Compare commits
15 Commits
d5476b54d7
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 0c0e31db38 | |||
| 2e23b37405 | |||
| 63a68b70b4 | |||
| 6528ca854b | |||
| 940b58331d | |||
| c26fa892ee | |||
| 4c84437022 | |||
| 3fdaebf66d | |||
| 50a404984d | |||
| 89904824bb | |||
| 94daf81a85 | |||
| 607a6a0bed | |||
| 8b2141df16 | |||
| 7fefe0ecf6 | |||
| f99f86bc37 |
99
AST.jai
99
AST.jai
@@ -8,23 +8,17 @@ AST_Kind :: enum {
|
|||||||
Function;
|
Function;
|
||||||
Return;
|
Return;
|
||||||
|
|
||||||
// @Incomplete(nb): Should these three really be their own block types?
|
|
||||||
// Maybe they at least shouldn't need to have their own tokens...
|
|
||||||
Properties;
|
|
||||||
Meta;
|
|
||||||
Instance;
|
|
||||||
//==
|
//==
|
||||||
// Directives
|
// Directives
|
||||||
If_Directive;
|
If_Directive;
|
||||||
|
|
||||||
// Hint;
|
Access;
|
||||||
// Type;
|
|
||||||
// Operator;
|
|
||||||
Call;
|
Call;
|
||||||
Struct;
|
Struct;
|
||||||
If;
|
If;
|
||||||
For;
|
For;
|
||||||
CBuffer;
|
CBuffer;
|
||||||
|
Buffer;
|
||||||
FieldList;
|
FieldList;
|
||||||
ArgList;
|
ArgList;
|
||||||
Variable;
|
Variable;
|
||||||
@@ -261,9 +255,15 @@ pretty_print_binary :: (node : *AST_Node, indentation : int, builder : *String_B
|
|||||||
if !skip_indent {
|
if !skip_indent {
|
||||||
indent(builder, indentation);
|
indent(builder, indentation);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if node.token.kind == .TOKEN_LEFTBRACKET {
|
||||||
|
pretty_print_node(node.children[0], 0, builder);
|
||||||
|
append(builder, "[");
|
||||||
|
pretty_print_node(node.children[1], 0, builder);
|
||||||
|
append(builder, "]");
|
||||||
|
} else {
|
||||||
append(builder, "(");
|
append(builder, "(");
|
||||||
op := node.token;
|
op := node.token;
|
||||||
|
|
||||||
print_to_builder(builder, op_to_string(op));
|
print_to_builder(builder, op_to_string(op));
|
||||||
append(builder, " ");
|
append(builder, " ");
|
||||||
|
|
||||||
@@ -273,6 +273,19 @@ pretty_print_binary :: (node : *AST_Node, indentation : int, builder : *String_B
|
|||||||
append(builder, ")");
|
append(builder, ")");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
pretty_print_access :: (node : *AST_Node, indentation : int, builder : *String_Builder, skip_indent := false) {
|
||||||
|
if !skip_indent {
|
||||||
|
indent(builder, indentation);
|
||||||
|
}
|
||||||
|
|
||||||
|
pretty_print_node(node.children[0], 0, builder);
|
||||||
|
append(builder, ".");
|
||||||
|
pretty_print_node(node.children[1], 0, builder);
|
||||||
|
}
|
||||||
|
|
||||||
pretty_print_unary :: (node : *AST_Node, indentation : int, builder : *String_Builder, skip_indent := false) {
|
pretty_print_unary :: (node : *AST_Node, indentation : int, builder : *String_Builder, skip_indent := false) {
|
||||||
if !skip_indent {
|
if !skip_indent {
|
||||||
indent(builder, indentation);
|
indent(builder, indentation);
|
||||||
@@ -356,6 +369,29 @@ pretty_print_node :: (node : *AST_Node, indentation : int, builder : *String_Bui
|
|||||||
case .If; {
|
case .If; {
|
||||||
pretty_print_if(node, indentation, builder, skip_indent);
|
pretty_print_if(node, indentation, builder, skip_indent);
|
||||||
}
|
}
|
||||||
|
case .If_Directive; {
|
||||||
|
if !skip_indent {
|
||||||
|
indent(builder, indentation);
|
||||||
|
}
|
||||||
|
append(builder, "(#if ");
|
||||||
|
|
||||||
|
condition := node.children[0];
|
||||||
|
pretty_print_node(condition, 0, builder);
|
||||||
|
append(builder, "\n");
|
||||||
|
|
||||||
|
body := node.children[1];
|
||||||
|
// indent(builder,indentation + 4);
|
||||||
|
// append(builder, "(");
|
||||||
|
pretty_print_node(body, indentation + 4, builder);
|
||||||
|
// append(builder, ")");
|
||||||
|
|
||||||
|
if node.children.count == 3 { //@Note: Else branch
|
||||||
|
append(builder, "\n");
|
||||||
|
pretty_print_node(node.children[2], indentation + 4, builder);
|
||||||
|
}
|
||||||
|
|
||||||
|
append(builder, ")");
|
||||||
|
}
|
||||||
case .For; {
|
case .For; {
|
||||||
pretty_print_for(node, indentation, builder, skip_indent);
|
pretty_print_for(node, indentation, builder, skip_indent);
|
||||||
}
|
}
|
||||||
@@ -378,6 +414,9 @@ pretty_print_node :: (node : *AST_Node, indentation : int, builder : *String_Bui
|
|||||||
case .Binary; {
|
case .Binary; {
|
||||||
pretty_print_binary(node, indentation, builder, skip_indent);
|
pretty_print_binary(node, indentation, builder, skip_indent);
|
||||||
}
|
}
|
||||||
|
case .Access; {
|
||||||
|
pretty_print_access(node, indentation, builder, skip_indent);
|
||||||
|
}
|
||||||
case .Unary; {
|
case .Unary; {
|
||||||
pretty_print_unary(node, indentation, builder, skip_indent);
|
pretty_print_unary(node, indentation, builder, skip_indent);
|
||||||
}
|
}
|
||||||
@@ -446,23 +485,24 @@ pretty_print_declaration :: (declaration : *AST_Node, indentation : int, builder
|
|||||||
append(builder, "#if ");
|
append(builder, "#if ");
|
||||||
}
|
}
|
||||||
|
|
||||||
if declaration.kind == .Properties {
|
|
||||||
append(builder, "properties");
|
|
||||||
if declaration.name.count > 0 {
|
|
||||||
print_to_builder(builder, " %", declaration.name);
|
|
||||||
}
|
|
||||||
} else if declaration.kind == .Instance {
|
|
||||||
append(builder, "instance");
|
|
||||||
} else if declaration.kind == .Meta {
|
|
||||||
append(builder, "meta");
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
if declaration.kind == .Struct {
|
if declaration.kind == .Struct {
|
||||||
append(builder, "struct ");
|
append(builder, "struct ");
|
||||||
} else if declaration.kind == .CBuffer {
|
} else if declaration.kind == .CBuffer {
|
||||||
append(builder, "constant_buffer ");
|
append(builder, "constant_buffer ");
|
||||||
|
} else if declaration.kind == .Buffer {
|
||||||
|
append(builder, "buffer ");
|
||||||
}
|
}
|
||||||
print_to_builder(builder, "%", declaration.name);
|
print_to_builder(builder, "%", declaration.name);
|
||||||
|
|
||||||
|
if declaration.kind == .CBuffer || declaration.kind == .Buffer{
|
||||||
|
for hint : declaration.hint_tokens {
|
||||||
|
if hint.string_value.count > 0 {
|
||||||
|
print_to_builder(builder, " (@%)", hint.string_value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// if declaration.kind != .If_Directive {
|
||||||
|
// print_to_builder(builder, "%", declaration.name);
|
||||||
|
// }
|
||||||
}
|
}
|
||||||
|
|
||||||
if declaration.kind == .Function && declaration.token.kind == .TOKEN_IDENTIFIER{
|
if declaration.kind == .Function && declaration.token.kind == .TOKEN_IDENTIFIER{
|
||||||
@@ -479,9 +519,26 @@ pretty_print_declaration :: (declaration : *AST_Node, indentation : int, builder
|
|||||||
pretty_print_node(declaration.children[0], 0, builder);
|
pretty_print_node(declaration.children[0], 0, builder);
|
||||||
append(builder, "\n");
|
append(builder, "\n");
|
||||||
pretty_print_node(declaration.children[1], indentation + 5, builder);
|
pretty_print_node(declaration.children[1], indentation + 5, builder);
|
||||||
|
|
||||||
|
if declaration.children.count > 2 {
|
||||||
|
append(builder, "\n");
|
||||||
|
if declaration.children[2].kind == .If_Directive {
|
||||||
|
pretty_print_declaration(declaration.children[2], indentation + 5, builder);
|
||||||
|
} else {
|
||||||
|
pretty_print_node(declaration.children[2], indentation + 5, builder);
|
||||||
|
}
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
print_to_builder(builder, "\n");
|
print_to_builder(builder, "\n");
|
||||||
pretty_print_children(declaration, indentation + 1, builder, flags = .NewLine);
|
|
||||||
|
flags := Children_Print_Flags.NewLine;
|
||||||
|
|
||||||
|
if declaration.parent && declaration.parent.parent {
|
||||||
|
if declaration.parent.parent.kind == .If_Directive {
|
||||||
|
indent(builder, indentation - 1); //@Note: Hack the indent for now... Wow this is stupid, but it works!
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pretty_print_children(declaration, indentation + 1, builder, flags = flags);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
209
Codegen.jai
209
Codegen.jai
@@ -5,38 +5,26 @@
|
|||||||
/////////////////////////////////////
|
/////////////////////////////////////
|
||||||
//~ nbr: Codegen TODOs
|
//~ nbr: Codegen TODOs
|
||||||
//
|
//
|
||||||
// [ ] Prefix output of property values with __PROPERTIES so we don't get name clashes
|
|
||||||
|
|
||||||
Output_Language :: enum {
|
Output_Language :: enum {
|
||||||
HLSL;
|
HLSL;
|
||||||
GLSL; // @Incomplete
|
GLSL; // @Incomplete
|
||||||
MLSL; // @Incomplete
|
MLSL; // @Incomplete
|
||||||
|
// SPIRV; // @Incomplete: Should we do this?
|
||||||
}
|
}
|
||||||
|
|
||||||
Codegen_State :: struct {
|
Codegen_State :: struct {
|
||||||
path : string;
|
path : string;
|
||||||
|
|
||||||
// scope_stack : Scope_Stack;
|
|
||||||
current_scope : Scope_Handle;
|
current_scope : Scope_Handle;
|
||||||
|
|
||||||
// type_variables : []Type_Variable;
|
|
||||||
// root : *AST_Node;
|
|
||||||
|
|
||||||
output_language : Output_Language;
|
output_language : Output_Language;
|
||||||
|
|
||||||
builder : String_Builder;
|
builder : String_Builder;
|
||||||
|
|
||||||
result : *Compiler_Context;
|
ctx : *Compiler_Context;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Codegen_Result :: struct {
|
|
||||||
// messages : [..]Compiler_Message;
|
|
||||||
|
|
||||||
// had_error : bool;
|
|
||||||
|
|
||||||
// result_text : string; // @Incomplete(nb): Result for now, should likely be far more sophisticated.
|
|
||||||
// }
|
|
||||||
|
|
||||||
Reserved_HLSL_Words :: string.[
|
Reserved_HLSL_Words :: string.[
|
||||||
"texture",
|
"texture",
|
||||||
"sampler",
|
"sampler",
|
||||||
@@ -56,7 +44,7 @@ Reserved_GLSL_Words :: string.[
|
|||||||
""
|
""
|
||||||
];
|
];
|
||||||
|
|
||||||
init_codegen_state :: (state : *Codegen_State, result : *Compiler_Context, output_language : Output_Language) {
|
init_codegen_state :: (state : *Codegen_State, ctx : *Compiler_Context, output_language : Output_Language) {
|
||||||
state.current_scope = cast(Scope_Handle)1;
|
state.current_scope = cast(Scope_Handle)1;
|
||||||
state.output_language = output_language;
|
state.output_language = output_language;
|
||||||
init_string_builder(*state.builder);
|
init_string_builder(*state.builder);
|
||||||
@@ -66,7 +54,11 @@ indent :: (state : *Codegen_State, indentation : int) {
|
|||||||
for 1..indentation append(*state.builder, " ");
|
for 1..indentation append(*state.builder, " ");
|
||||||
}
|
}
|
||||||
|
|
||||||
hlsl_type_to_string :: (type_variable : Type_Variable) -> string {
|
hlsl_type_to_string :: (variables : []Type_Variable, type_handle : Type_Variable_Handle) -> string {
|
||||||
|
return hlsl_type_to_string(variables, from_handle(variables, type_handle));
|
||||||
|
}
|
||||||
|
|
||||||
|
hlsl_type_to_string :: (variables : []Type_Variable, type_variable : Type_Variable) -> string {
|
||||||
if type_variable.type == {
|
if type_variable.type == {
|
||||||
case .Invalid;
|
case .Invalid;
|
||||||
return "{{invalid}}";
|
return "{{invalid}}";
|
||||||
@@ -95,28 +87,21 @@ hlsl_type_to_string :: (type_variable : Type_Variable) -> string {
|
|||||||
return type_variable.typename;
|
return type_variable.typename;
|
||||||
}
|
}
|
||||||
case .Array;
|
case .Array;
|
||||||
return "array";
|
return hlsl_type_to_string(variables, type_variable.element_type);
|
||||||
}
|
}
|
||||||
|
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
|
||||||
emit_field :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
emit_field :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
||||||
find_result := find_symbol(state.result.scope_stack, node.name, state.current_scope);
|
find_result := find_symbol(state.ctx.scope_stack, node.name, state.current_scope);
|
||||||
|
|
||||||
field := from_handle(state.result.type_variables, find_result.type_variable);
|
field := from_handle(state.ctx.type_variables, find_result.type_variable);
|
||||||
|
|
||||||
indent(state, indentation);
|
indent(state, indentation);
|
||||||
|
|
||||||
print_to_builder(*state.builder, "% ", hlsl_type_to_string(field));
|
print_to_builder(*state.builder, "% ", hlsl_type_to_string(state.ctx.type_variables, field));
|
||||||
|
|
||||||
if field.struct_field_parent {
|
|
||||||
parent_tv := from_handle(state.result.type_variables, field.struct_field_parent.type_variable);
|
|
||||||
|
|
||||||
if parent_tv.typename == "properties" {
|
|
||||||
append(*state.builder, "__PROPERTIES__");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
print_to_builder(*state.builder, "%", node.name);
|
print_to_builder(*state.builder, "%", node.name);
|
||||||
|
|
||||||
if field.type == .Sampler {
|
if field.type == .Sampler {
|
||||||
@@ -130,41 +115,51 @@ emit_field :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
|||||||
if node.children.count == 1 {
|
if node.children.count == 1 {
|
||||||
child := node.children[0];
|
child := node.children[0];
|
||||||
|
|
||||||
|
if field.type == .Array {
|
||||||
|
append(*state.builder, "[");
|
||||||
|
emit_node(state, child, 0);
|
||||||
|
append(*state.builder, "]");
|
||||||
|
} else {
|
||||||
print_to_builder(*state.builder, " = ");
|
print_to_builder(*state.builder, " = ");
|
||||||
emit_node(state, child, 0);
|
emit_node(state, child, 0);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if node.parent.kind == .Block {
|
if node.parent.kind == .Block {
|
||||||
append(*state.builder, ";");
|
append(*state.builder, ";");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
for i :0..field.children.count - 1 {
|
for i :0..field.children.count - 1 {
|
||||||
child := from_handle(state.result.type_variables, field.children[i]);
|
child := from_handle(state.ctx.type_variables, field.children[i]);
|
||||||
emit_node(state, child.source_node, 0);
|
emit_node(state, child.source_node, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
for hint : node.hint_tokens {
|
for hint : node.hint_tokens {
|
||||||
if hint.ident_value == "position" {
|
if lookup_hint(hint.ident_value) == .Position {
|
||||||
// @Incomplete(nb): Should be a lookup table somewhere
|
|
||||||
append(*state.builder, " : POSITION");
|
append(*state.builder, " : POSITION");
|
||||||
} else if hint.ident_value == "uv" {
|
} else if lookup_hint(hint.ident_value) == .UV {
|
||||||
append(*state.builder, " : TEXCOORD0");
|
append(*state.builder, " : TEXCOORD0");
|
||||||
} else if hint.ident_value == "outposition" {
|
} else if lookup_hint(hint.ident_value) == .Output_Position {
|
||||||
append(*state.builder, " : SV_POSITION");
|
append(*state.builder, " : SV_POSITION");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
emit_block :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
emit_block :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
||||||
|
previous_scope := state.current_scope;
|
||||||
|
|
||||||
for statement : node.children {
|
for statement : node.children {
|
||||||
|
if statement.type_variable {
|
||||||
|
state.current_scope = from_handle(state.ctx.type_variables, statement.type_variable).scope;
|
||||||
|
}
|
||||||
|
|
||||||
emit_node(state, statement, indentation);
|
emit_node(state, statement, indentation);
|
||||||
|
|
||||||
if it_index < node.children.count {
|
if it_index < node.children.count {
|
||||||
append(*state.builder, "\n");
|
append(*state.builder, "\n");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
state.current_scope = previous_scope;
|
||||||
}
|
}
|
||||||
|
|
||||||
emit_call :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
emit_call :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
||||||
@@ -226,59 +221,9 @@ emit_call :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
|||||||
append(*state.builder, ")");
|
append(*state.builder, ")");
|
||||||
}
|
}
|
||||||
|
|
||||||
emit_properties :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
|
||||||
find_result := find_symbol(state.result.scope_stack, ifx node.name.count > 0 then node.name else "properties", state.current_scope);
|
|
||||||
|
|
||||||
if !find_result {
|
|
||||||
message : Compiler_Message;
|
|
||||||
message.message_kind = .Internal_Error;
|
|
||||||
message.path = state.path;
|
|
||||||
message.message = "Attempting to generate undeclared properties buffer. This should never happen at this stage.";
|
|
||||||
array_add(*state.result.messages, message);
|
|
||||||
}
|
|
||||||
assert(find_result != null, "Attempting to generate undeclared properties buffer. This should never happen at this stage.");
|
|
||||||
|
|
||||||
variable := from_handle(state.result.type_variables, find_result.type_variable);
|
|
||||||
|
|
||||||
print_to_builder(*state.builder, "cbuffer __PROPERTIES : register(b%) \n{\n", variable.resource_index);
|
|
||||||
|
|
||||||
previous_scope := state.current_scope;
|
|
||||||
state.current_scope = variable.scope;
|
|
||||||
|
|
||||||
resources : Static_Array(*AST_Node, 8);
|
|
||||||
|
|
||||||
for child : node.children {
|
|
||||||
if child.kind == .FieldList {
|
|
||||||
for field : child.children {
|
|
||||||
tv := from_handle(state.result.type_variables, field.type_variable);
|
|
||||||
if tv.type == .Sampler || tv.type == .Texture2D {
|
|
||||||
array_add(*resources, field);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
emit_node(state, field, 1);
|
|
||||||
|
|
||||||
append(*state.builder, ";\n");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
append(*state.builder, "}\n\n");
|
|
||||||
|
|
||||||
for i : 0..resources.count - 1 {
|
|
||||||
resource := resources[i];
|
|
||||||
emit_node(state, resource, 0);
|
|
||||||
|
|
||||||
append(*state.builder, ";\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
append(*state.builder, "\n");
|
|
||||||
|
|
||||||
state.current_scope = previous_scope;
|
|
||||||
}
|
|
||||||
|
|
||||||
emit_function :: (state : *Codegen_State, node : *AST_Node, indentation : int, emit_body := true) {
|
emit_function :: (state : *Codegen_State, node : *AST_Node, indentation : int, emit_body := true) {
|
||||||
name := get_actual_function_name(node);
|
name := get_actual_function_name(node);
|
||||||
find_result := find_symbol(state.result.scope_stack, name, state.current_scope);
|
find_result := find_symbol(state.ctx.scope_stack, name, state.current_scope);
|
||||||
|
|
||||||
assert(find_result != null, "Attempting to generate undeclared function. This should never happen at this stage.");
|
assert(find_result != null, "Attempting to generate undeclared function. This should never happen at this stage.");
|
||||||
if !find_result {
|
if !find_result {
|
||||||
@@ -286,17 +231,17 @@ emit_function :: (state : *Codegen_State, node : *AST_Node, indentation : int, e
|
|||||||
message.message_kind = .Internal_Error;
|
message.message_kind = .Internal_Error;
|
||||||
message.path = state.path;
|
message.path = state.path;
|
||||||
message.message = "Attempting to generate undeclared function. This should never happen at this stage.";
|
message.message = "Attempting to generate undeclared function. This should never happen at this stage.";
|
||||||
array_add(*state.result.messages, message);
|
array_add(*state.ctx.messages, message);
|
||||||
}
|
}
|
||||||
|
|
||||||
for func : find_result.functions {
|
for func : find_result.functions {
|
||||||
function_variable := from_handle(state.result.type_variables, func.type_variable);
|
function_variable := from_handle(state.ctx.type_variables, func.type_variable);
|
||||||
|
|
||||||
indent(state, indentation);
|
indent(state, indentation);
|
||||||
|
|
||||||
if function_variable.return_type_variable {
|
if function_variable.return_type_variable {
|
||||||
return_variable := from_handle(state.result.type_variables, function_variable.return_type_variable);
|
return_variable := from_handle(state.ctx.type_variables, function_variable.return_type_variable);
|
||||||
print_to_builder(*state.builder, "% ", hlsl_type_to_string(return_variable));
|
print_to_builder(*state.builder, "% ", hlsl_type_to_string(state.ctx.type_variables, return_variable));
|
||||||
} else {
|
} else {
|
||||||
append(*state.builder, "void ");
|
append(*state.builder, "void ");
|
||||||
}
|
}
|
||||||
@@ -421,9 +366,6 @@ emit_node :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
|||||||
}
|
}
|
||||||
case .Float; {
|
case .Float; {
|
||||||
print_to_builder(*state.builder, "%f", formatFloat(node.float_value, zero_removal=.ONE_ZERO_AFTER_DECIMAL));
|
print_to_builder(*state.builder, "%f", formatFloat(node.float_value, zero_removal=.ONE_ZERO_AFTER_DECIMAL));
|
||||||
}
|
|
||||||
case .Properties; {
|
|
||||||
|
|
||||||
}
|
}
|
||||||
case .Field; {
|
case .Field; {
|
||||||
emit_field(state, node, indentation);
|
emit_field(state, node, indentation);
|
||||||
@@ -435,46 +377,57 @@ emit_node :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
|||||||
case .Variable; {
|
case .Variable; {
|
||||||
indent(*state.builder, indentation);
|
indent(*state.builder, indentation);
|
||||||
|
|
||||||
type_var := from_handle(state.result.type_variables, node.type_variable);
|
type_var := from_handle(state.ctx.type_variables, node.type_variable);
|
||||||
is_properties := type_var.typename == "properties";
|
|
||||||
|
|
||||||
if !is_properties {
|
|
||||||
if type_var.struct_field_parent {
|
|
||||||
parent_tv := from_handle(state.result.type_variables, type_var.struct_field_parent.type_variable);
|
|
||||||
|
|
||||||
if parent_tv.typename == "properties" {
|
|
||||||
append(*state.builder, "__PROPERTIES__");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
print_to_builder(*state.builder, "%", node.name);
|
print_to_builder(*state.builder, "%", node.name);
|
||||||
}
|
|
||||||
|
|
||||||
if node.children.count > 0 {
|
if node.children.count > 0 {
|
||||||
if !is_properties {
|
|
||||||
append(*state.builder, ".");
|
append(*state.builder, ".");
|
||||||
}
|
|
||||||
emit_node(state, node.children[0], 0);
|
emit_node(state, node.children[0], 0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
case .Access; {
|
||||||
|
indent(*state.builder, indentation);
|
||||||
|
|
||||||
|
lhs := node.children[0];
|
||||||
|
rhs := node.children[1];
|
||||||
|
|
||||||
|
emit_node(state, lhs, 0);
|
||||||
|
|
||||||
|
print_to_builder(*state.builder, "%.", node.name);
|
||||||
|
emit_node(state, rhs, 0);
|
||||||
|
}
|
||||||
case .Binary; {
|
case .Binary; {
|
||||||
indent(*state.builder, indentation);
|
indent(*state.builder, indentation);
|
||||||
|
|
||||||
if node.token.kind != .TOKEN_ASSIGN {
|
if node.token.kind != .TOKEN_ASSIGN && node.token.kind != .TOKEN_LEFTBRACKET {
|
||||||
|
if (node.parent.kind == .Binary && node.parent.token.kind != .TOKEN_ASSIGN) || node.parent.kind == .Access {
|
||||||
append(*state.builder, "(");
|
append(*state.builder, "(");
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
lhs := node.children[0];
|
lhs := node.children[0];
|
||||||
rhs := node.children[1];
|
rhs := node.children[1];
|
||||||
emit_node(state, lhs, 0);
|
|
||||||
|
|
||||||
|
if node.token.kind == .TOKEN_LEFTBRACKET {
|
||||||
|
emit_node(state, lhs, 0);
|
||||||
|
append(*state.builder, "[");
|
||||||
|
emit_node(state, rhs, 0);
|
||||||
|
append(*state.builder, "]");
|
||||||
|
} else {
|
||||||
|
emit_node(state, lhs, 0);
|
||||||
append(*state.builder, " ");
|
append(*state.builder, " ");
|
||||||
emit_operator(state, node.token.kind);
|
emit_operator(state, node.token.kind);
|
||||||
append(*state.builder, " ");
|
append(*state.builder, " ");
|
||||||
emit_node(state, rhs, 0);
|
emit_node(state, rhs, 0);
|
||||||
if node.token.kind != .TOKEN_ASSIGN {
|
}
|
||||||
|
|
||||||
|
if node.token.kind != .TOKEN_ASSIGN && node.token.kind != .TOKEN_LEFTBRACKET {
|
||||||
|
if (node.parent.kind == .Binary && node.parent.token.kind != .TOKEN_ASSIGN) || node.parent.kind == .Access {
|
||||||
append(*state.builder, ")");
|
append(*state.builder, ")");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
case .Unary; {
|
case .Unary; {
|
||||||
indent(*state.builder, indentation);
|
indent(*state.builder, indentation);
|
||||||
|
|
||||||
@@ -522,7 +475,9 @@ emit_node :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
|||||||
append(*state.builder, "if ");
|
append(*state.builder, "if ");
|
||||||
|
|
||||||
cond := node.children[0];
|
cond := node.children[0];
|
||||||
|
append(*state.builder, "(");
|
||||||
emit_node(state, cond, 0);
|
emit_node(state, cond, 0);
|
||||||
|
append(*state.builder, ")");
|
||||||
|
|
||||||
body := node.children[1];
|
body := node.children[1];
|
||||||
append(*state.builder, "\n");
|
append(*state.builder, "\n");
|
||||||
@@ -569,11 +524,16 @@ emit_field_list :: (state : *Codegen_State, field_list : *AST_Node, indentation
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
emit_struct :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
emit_struct :: (state : *Codegen_State, node : *AST_Node, indentation : int, name : string = "") {
|
||||||
|
if name.count > 0 {
|
||||||
|
print_to_builder(*state.builder, "struct %", name);
|
||||||
|
} else {
|
||||||
print_to_builder(*state.builder, "struct %", node.name);
|
print_to_builder(*state.builder, "struct %", node.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
current_scope := state.current_scope;
|
current_scope := state.current_scope;
|
||||||
state.current_scope = from_handle(state.result.type_variables, node.type_variable).scope;
|
state.current_scope = from_handle(state.ctx.type_variables, node.type_variable).scope;
|
||||||
|
|
||||||
field_list := node.children[0];
|
field_list := node.children[0];
|
||||||
|
|
||||||
@@ -590,11 +550,11 @@ emit_struct :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
emit_cbuffer :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
emit_cbuffer :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
||||||
variable := from_handle(state.result.type_variables, node.type_variable);
|
variable := from_handle(state.ctx.type_variables, node.type_variable);
|
||||||
print_to_builder(*state.builder, "cbuffer % : register(b%)", variable.name, variable.resource_index);
|
print_to_builder(*state.builder, "cbuffer % : register(b%)", variable.name, variable.resource_index);
|
||||||
|
|
||||||
current_scope := state.current_scope;
|
current_scope := state.current_scope;
|
||||||
state.current_scope = from_handle(state.result.type_variables, node.type_variable).scope;
|
state.current_scope = from_handle(state.ctx.type_variables, node.type_variable).scope;
|
||||||
|
|
||||||
field_list := node.children[0];
|
field_list := node.children[0];
|
||||||
|
|
||||||
@@ -610,17 +570,26 @@ emit_cbuffer :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
|||||||
state.current_scope = current_scope;
|
state.current_scope = current_scope;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
emit_buffer :: (state : *Codegen_State, node : *AST_Node, indentation : int) {
|
||||||
|
variable := from_handle(state.ctx.type_variables, node.type_variable);
|
||||||
|
element := from_handle(state.ctx.type_variables, variable.element_type);
|
||||||
|
|
||||||
|
emit_struct(state, node, indentation, element.typename);
|
||||||
|
|
||||||
|
print_to_builder(*state.builder, "StructuredBuffer<%> % : register(t%);\n\n", element.typename, variable.name, variable.resource_index);
|
||||||
|
}
|
||||||
|
|
||||||
emit_declaration :: (state : *Codegen_State, node : *AST_Node) {
|
emit_declaration :: (state : *Codegen_State, node : *AST_Node) {
|
||||||
if node.kind == {
|
if node.kind == {
|
||||||
case .Function; {
|
case .Function; {
|
||||||
emit_function(state, node, 0);
|
emit_function(state, node, 0);
|
||||||
}
|
}
|
||||||
case .Properties; {
|
|
||||||
emit_properties(state, node, 0);
|
|
||||||
}
|
|
||||||
case .CBuffer; {
|
case .CBuffer; {
|
||||||
emit_cbuffer(state, node, 0);
|
emit_cbuffer(state, node, 0);
|
||||||
}
|
}
|
||||||
|
case .Buffer; {
|
||||||
|
emit_buffer(state, node, 0);
|
||||||
|
}
|
||||||
case .Struct; {
|
case .Struct; {
|
||||||
emit_struct(state, node, 0);
|
emit_struct(state, node, 0);
|
||||||
}
|
}
|
||||||
@@ -643,7 +612,7 @@ codegen :: (result : *Compiler_Context, output_language : Output_Language, alloc
|
|||||||
defer clear_context_allocators();
|
defer clear_context_allocators();
|
||||||
|
|
||||||
state : Codegen_State;
|
state : Codegen_State;
|
||||||
state.result = result;
|
state.ctx = result;
|
||||||
state.current_scope = cast(Scope_Handle)1;
|
state.current_scope = cast(Scope_Handle)1;
|
||||||
state.output_language = output_language;
|
state.output_language = output_language;
|
||||||
init_string_builder(*state.builder);
|
init_string_builder(*state.builder);
|
||||||
@@ -657,7 +626,7 @@ codegen :: (state : *Codegen_State) {
|
|||||||
found_function : bool = false;
|
found_function : bool = false;
|
||||||
// found_struct : bool = false;
|
// found_struct : bool = false;
|
||||||
|
|
||||||
// for variable : state.result.type_variables {
|
// for variable : state.ctx.type_variables {
|
||||||
// if variable.type == .Struct && variable.kind == .Declaration && !variable.builtin {
|
// if variable.type == .Struct && variable.kind == .Declaration && !variable.builtin {
|
||||||
// if variable.source_node.kind == .Properties continue;
|
// if variable.source_node.kind == .Properties continue;
|
||||||
// if variable.source_node.kind == .Meta continue;
|
// if variable.source_node.kind == .Meta continue;
|
||||||
@@ -670,7 +639,7 @@ codegen :: (state : *Codegen_State) {
|
|||||||
// append(*state.builder, "\n");
|
// append(*state.builder, "\n");
|
||||||
// }
|
// }
|
||||||
|
|
||||||
for variable : state.result.type_variables {
|
for variable : state.ctx.type_variables {
|
||||||
if variable.type == .Function && !variable.builtin
|
if variable.type == .Function && !variable.builtin
|
||||||
&& !variable.source_node.vertex_entry_point && !variable.source_node.pixel_entry_point {
|
&& !variable.source_node.vertex_entry_point && !variable.source_node.pixel_entry_point {
|
||||||
emit_function(state, variable.source_node, 0, false);
|
emit_function(state, variable.source_node, 0, false);
|
||||||
@@ -681,14 +650,14 @@ codegen :: (state : *Codegen_State) {
|
|||||||
append(*state.builder, "\n");
|
append(*state.builder, "\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
for declaration : state.result.root.children {
|
for declaration : state.ctx.root.children {
|
||||||
if declaration.foreign_declaration {
|
if declaration.foreign_declaration {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
emit_declaration(state, declaration);
|
emit_declaration(state, declaration);
|
||||||
}
|
}
|
||||||
|
|
||||||
state.result.codegen_result_text = builder_to_string(*state.builder);
|
state.ctx.codegen_result_text = builder_to_string(*state.builder);
|
||||||
}
|
}
|
||||||
|
|
||||||
#scope_module
|
#scope_module
|
||||||
|
|||||||
12
Error.jai
12
Error.jai
@@ -102,20 +102,20 @@ copy_messages :: (source : []Compiler_Message, dest : *[..]Compiler_Message) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
report_messages :: (messages : []Compiler_Message) -> string {
|
report_messages :: (ctx : *Compiler_Context, messages : []Compiler_Message) -> string {
|
||||||
builder : String_Builder;
|
builder : String_Builder;
|
||||||
init_string_builder(*builder);
|
init_string_builder(*builder);
|
||||||
for message : messages {
|
for message : messages {
|
||||||
report_message(*builder, message);
|
report_message(ctx, *builder, message);
|
||||||
}
|
}
|
||||||
return builder_to_string(*builder);
|
return builder_to_string(*builder);
|
||||||
}
|
}
|
||||||
|
|
||||||
report_message :: (builder : *String_Builder, message : Compiler_Message) {
|
report_message :: (ctx : *Compiler_Context, builder : *String_Builder, message : Compiler_Message) {
|
||||||
report_message(builder, message.path, message.message, message.source_locations, message.message_kind, message.report_source_location);
|
report_message(ctx, builder, message.path, message.message, message.source_locations, message.message_kind, message.report_source_location);
|
||||||
}
|
}
|
||||||
|
|
||||||
report_message :: (builder : *String_Builder, path : string, message : string, source_locations : []Source_Range, kind : Message_Kind, report_source_location : bool = false) {
|
report_message :: (ctx : *Compiler_Context, builder : *String_Builder, path : string, message : string, source_locations : []Source_Range, kind : Message_Kind, report_source_location : bool = false) {
|
||||||
append(builder, "\x1b[1;37m");
|
append(builder, "\x1b[1;37m");
|
||||||
if path.count > 0 {
|
if path.count > 0 {
|
||||||
print_to_builder(builder, "%:", path);
|
print_to_builder(builder, "%:", path);
|
||||||
@@ -140,7 +140,7 @@ report_message :: (builder : *String_Builder, path : string, message : string, s
|
|||||||
if report_source_location {
|
if report_source_location {
|
||||||
for location : source_locations {
|
for location : source_locations {
|
||||||
append(builder, "\t");
|
append(builder, "\t");
|
||||||
print_from_source_location(builder, location);
|
print_from_source_location(ctx, builder, location);
|
||||||
append(builder, "\n\t");
|
append(builder, "\n\t");
|
||||||
begin := location.begin;
|
begin := location.begin;
|
||||||
|
|
||||||
|
|||||||
71
Ink.jai
71
Ink.jai
@@ -23,7 +23,7 @@ LEXER_FOLDER :: "lex";
|
|||||||
PARSER_FOLDER :: "parse";
|
PARSER_FOLDER :: "parse";
|
||||||
CODEGEN_FOLDER :: "codegen";
|
CODEGEN_FOLDER :: "codegen";
|
||||||
COMPILED_FOLDER :: "compiled";
|
COMPILED_FOLDER :: "compiled";
|
||||||
SEMANTIC_ANALYSIS_FOLDER :: "semant";
|
CHECK_FOLDER :: "check";
|
||||||
TESTS_FOLDER :: "test";
|
TESTS_FOLDER :: "test";
|
||||||
|
|
||||||
SHADER_EXTENSION :: "ink";
|
SHADER_EXTENSION :: "ink";
|
||||||
@@ -32,7 +32,7 @@ SUITE_EXTENSION :: "suite";
|
|||||||
Stage_Flags :: enum_flags u16 {
|
Stage_Flags :: enum_flags u16 {
|
||||||
Lexer :: 0x1;
|
Lexer :: 0x1;
|
||||||
Parser :: 0x2;
|
Parser :: 0x2;
|
||||||
Semantic_Analysis :: 0x4;
|
Check :: 0x4;
|
||||||
Codegen :: 0x8;
|
Codegen :: 0x8;
|
||||||
Compile :: 0x10;
|
Compile :: 0x10;
|
||||||
}
|
}
|
||||||
@@ -97,10 +97,10 @@ get_golden_path :: (file_path : string, stage : Stage_Flags) -> string {
|
|||||||
make_directory_if_it_does_not_exist(dir);
|
make_directory_if_it_does_not_exist(dir);
|
||||||
array_add(*path.words, PARSER_FOLDER);
|
array_add(*path.words, PARSER_FOLDER);
|
||||||
}
|
}
|
||||||
case .Semantic_Analysis; {
|
case .Check; {
|
||||||
dir := tprint("%/%", TESTS_FOLDER, SEMANTIC_ANALYSIS_FOLDER);
|
dir := tprint("%/%", TESTS_FOLDER, CHECK_FOLDER);
|
||||||
make_directory_if_it_does_not_exist(dir);
|
make_directory_if_it_does_not_exist(dir);
|
||||||
array_add(*path.words, SEMANTIC_ANALYSIS_FOLDER);
|
array_add(*path.words, CHECK_FOLDER);
|
||||||
}
|
}
|
||||||
case .Codegen; {
|
case .Codegen; {
|
||||||
dir := tprint("%/%", TESTS_FOLDER, CODEGEN_FOLDER);
|
dir := tprint("%/%", TESTS_FOLDER, CODEGEN_FOLDER);
|
||||||
@@ -189,7 +189,7 @@ run_codegen_test :: (ctx : *Compiler_Context, output_type : Output_Type = 0) ->
|
|||||||
|
|
||||||
if ctx.had_error {
|
if ctx.had_error {
|
||||||
result.type = .Failed;
|
result.type = .Failed;
|
||||||
result_text = report_messages(ctx.messages);
|
result_text = report_messages(ctx, ctx.messages);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -227,16 +227,35 @@ run_compile_test :: (path : string, output_type : Output_Type = 0) -> Result, Co
|
|||||||
print_to_builder(*sb, "[pixel entry point] - %\n", ctx.pixel_entry_point.name);
|
print_to_builder(*sb, "[pixel entry point] - %\n", ctx.pixel_entry_point.name);
|
||||||
}
|
}
|
||||||
|
|
||||||
for cb : ctx.cbuffers {
|
for buf : ctx.buffers {
|
||||||
print_to_builder(*sb, "[constant_buffer] - % - %\n", cb.name, cb.buffer_index);
|
if buf.kind == {
|
||||||
|
case .Constant; {
|
||||||
|
print_to_builder(*sb, "[constant_buffer] - % - %", buf.name, buf.buffer_index);
|
||||||
|
|
||||||
|
}
|
||||||
|
case .Structured; {
|
||||||
|
print_to_builder(*sb, "[buffer] - % - %", buf.name, buf.buffer_index);
|
||||||
|
}
|
||||||
|
|
||||||
|
if buf.hints.count > 0 {
|
||||||
|
for hint : buf.hints {
|
||||||
|
print_to_builder(*sb, " (@%)", hint.custom_hint_name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
append(*sb, "\n");
|
||||||
|
|
||||||
indent(*sb, 1);
|
indent(*sb, 1);
|
||||||
for field : cb.fields {
|
for field : buf.fields {
|
||||||
append(*sb, "[field] - ");
|
append(*sb, "[field] - ");
|
||||||
pretty_print_field(*sb, *field.base_field);
|
pretty_print_field(*sb, *field);
|
||||||
|
append(*sb, "\n");
|
||||||
|
indent(*sb, 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
result.info_text = builder_to_string(*sb);
|
result.info_text = builder_to_string(*sb);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -261,7 +280,7 @@ run_lexer_test :: (file_path : string, ctx : *Compiler_Context, output_type : Ou
|
|||||||
lex(ctx);
|
lex(ctx);
|
||||||
if ctx.had_error {
|
if ctx.had_error {
|
||||||
result.type = .Failed;
|
result.type = .Failed;
|
||||||
result_text = report_messages(ctx.messages);
|
result_text = report_messages(ctx, ctx.messages);
|
||||||
} else {
|
} else {
|
||||||
result_text = pretty_print_tokens(ctx.tokens, context.allocator);
|
result_text = pretty_print_tokens(ctx.tokens, context.allocator);
|
||||||
}
|
}
|
||||||
@@ -300,7 +319,7 @@ run_parser_test :: (ctx : *Compiler_Context, output_type : Output_Type = 0) -> R
|
|||||||
|
|
||||||
if ctx.had_error {
|
if ctx.had_error {
|
||||||
result.type = .Failed;
|
result.type = .Failed;
|
||||||
result_text = report_messages(ctx.messages);
|
result_text = report_messages(ctx, ctx.messages);
|
||||||
} else {
|
} else {
|
||||||
result_text = pretty_print_ast(ctx.root, context.allocator);
|
result_text = pretty_print_ast(ctx.root, context.allocator);
|
||||||
}
|
}
|
||||||
@@ -316,7 +335,7 @@ run_parser_test :: (ctx : *Compiler_Context, output_type : Output_Type = 0) -> R
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
run_semantic_analysis_test :: (ctx : *Compiler_Context, output_type : Output_Type = 0) -> Result {
|
run_check_test :: (ctx : *Compiler_Context, output_type : Output_Type = 0) -> Result {
|
||||||
result : Result;
|
result : Result;
|
||||||
result.path = ctx.file.path;
|
result.path = ctx.file.path;
|
||||||
result_text : string;
|
result_text : string;
|
||||||
@@ -325,7 +344,7 @@ run_semantic_analysis_test :: (ctx : *Compiler_Context, output_type : Output_Typ
|
|||||||
|
|
||||||
if ctx.had_error {
|
if ctx.had_error {
|
||||||
result.type = .Failed;
|
result.type = .Failed;
|
||||||
result_text = report_messages(ctx.messages);
|
result_text = report_messages(ctx, ctx.messages);
|
||||||
} else {
|
} else {
|
||||||
result_text = pretty_print_symbol_table(ctx, context.allocator);
|
result_text = pretty_print_symbol_table(ctx, context.allocator);
|
||||||
}
|
}
|
||||||
@@ -336,12 +355,12 @@ run_semantic_analysis_test :: (ctx : *Compiler_Context, output_type : Output_Typ
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
golden_path := get_golden_path(ctx.file.path, .Semantic_Analysis);
|
golden_path := get_golden_path(ctx.file.path, .Check);
|
||||||
do_golden_comparison(golden_path, result_text, *result, output_type);
|
do_golden_comparison(golden_path, result_text, *result, output_type);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
run_semantic_analysis_test :: (file_path : string, ctx : *Compiler_Context, output_type : Output_Type = 0) -> Result {
|
run_check_test :: (file_path : string, ctx : *Compiler_Context, output_type : Output_Type = 0) -> Result {
|
||||||
result : Result;
|
result : Result;
|
||||||
result.path = file_path;
|
result.path = file_path;
|
||||||
|
|
||||||
@@ -352,7 +371,7 @@ run_semantic_analysis_test :: (file_path : string, ctx : *Compiler_Context, outp
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
result = run_semantic_analysis_test(ctx, output_type);
|
result = run_check_test(ctx, output_type);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
@@ -389,17 +408,17 @@ run_test_new :: (file_path : string, stage_flags : Stage_Flags, results : *[..]R
|
|||||||
record_result(results, result);
|
record_result(results, result);
|
||||||
}
|
}
|
||||||
|
|
||||||
if stage_flags & .Semantic_Analysis {
|
if stage_flags & .Check {
|
||||||
if stage_flags & .Parser && (result.type == .Passed || result.type == .Golden_Output) {
|
if stage_flags & .Parser && (result.type == .Passed || result.type == .Golden_Output) {
|
||||||
result = run_semantic_analysis_test(*ctx, output_type);
|
result = run_check_test(*ctx, output_type);
|
||||||
} else {
|
} else {
|
||||||
result = run_semantic_analysis_test(file_path, *ctx, output_type);
|
result = run_check_test(file_path, *ctx, output_type);
|
||||||
}
|
}
|
||||||
record_result(results, result);
|
record_result(results, result);
|
||||||
}
|
}
|
||||||
|
|
||||||
if stage_flags & .Codegen {
|
if stage_flags & .Codegen {
|
||||||
if stage_flags & .Semantic_Analysis && (result.type == .Passed || result.type == .Golden_Output) {
|
if stage_flags & .Check && (result.type == .Passed || result.type == .Golden_Output) {
|
||||||
result = run_codegen_test(*ctx, output_type);
|
result = run_codegen_test(*ctx, output_type);
|
||||||
} else {
|
} else {
|
||||||
result = run_codegen_test(file_path, *ctx, output_type);
|
result = run_codegen_test(file_path, *ctx, output_type);
|
||||||
@@ -554,8 +573,8 @@ read_suite :: (file_path : string, suite : *Test_Suite, allocator := temp) -> bo
|
|||||||
stage_flags |= .Lexer;
|
stage_flags |= .Lexer;
|
||||||
} else if equal(trimmed, "parse") {
|
} else if equal(trimmed, "parse") {
|
||||||
stage_flags |= .Parser;
|
stage_flags |= .Parser;
|
||||||
} else if equal(trimmed, "semant") {
|
} else if equal(trimmed, "check") {
|
||||||
stage_flags |= .Semantic_Analysis;
|
stage_flags |= .Check;
|
||||||
} else if equal(trimmed, "codegen") {
|
} else if equal(trimmed, "codegen") {
|
||||||
stage_flags |= .Codegen;
|
stage_flags |= .Codegen;
|
||||||
} else if equal(trimmed, "compile") {
|
} else if equal(trimmed, "compile") {
|
||||||
@@ -577,7 +596,7 @@ stage_to_string :: (stage : Stage_Flags) -> string {
|
|||||||
if #complete stage == {
|
if #complete stage == {
|
||||||
case .Lexer; return "lexing";
|
case .Lexer; return "lexing";
|
||||||
case .Parser; return "parsing";
|
case .Parser; return "parsing";
|
||||||
case .Semantic_Analysis; return "semantic checking";
|
case .Check; return "checking";
|
||||||
case .Codegen; return "codegen";
|
case .Codegen; return "codegen";
|
||||||
case .Compile; return "compiled";
|
case .Compile; return "compiled";
|
||||||
case; return "";
|
case; return "";
|
||||||
@@ -671,8 +690,8 @@ main :: () {
|
|||||||
current_suite.test_cases[cases - 1].stage_flags |= .Lexer;
|
current_suite.test_cases[cases - 1].stage_flags |= .Lexer;
|
||||||
} else if arg == "-parse" {
|
} else if arg == "-parse" {
|
||||||
current_suite.test_cases[cases - 1].stage_flags |= .Parser;
|
current_suite.test_cases[cases - 1].stage_flags |= .Parser;
|
||||||
} else if arg == "-semant" {
|
} else if arg == "-check" {
|
||||||
current_suite.test_cases[cases - 1].stage_flags |= .Semantic_Analysis;
|
current_suite.test_cases[cases - 1].stage_flags |= .Check;
|
||||||
} else if arg == "-codegen" {
|
} else if arg == "-codegen" {
|
||||||
current_suite.test_cases[cases - 1].stage_flags |= .Codegen;
|
current_suite.test_cases[cases - 1].stage_flags |= .Codegen;
|
||||||
} else if arg == "-compile" {
|
} else if arg == "-compile" {
|
||||||
|
|||||||
33
Lexing.jai
33
Lexing.jai
@@ -11,6 +11,7 @@ Lexer :: struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Token_Kind :: enum {
|
Token_Kind :: enum {
|
||||||
|
TOKEN_INVALID :: 0;
|
||||||
TOKEN_FLOATLITERAL;
|
TOKEN_FLOATLITERAL;
|
||||||
TOKEN_INTLITERAL;
|
TOKEN_INTLITERAL;
|
||||||
|
|
||||||
@@ -54,6 +55,7 @@ Token_Kind :: enum {
|
|||||||
|
|
||||||
// Keywords
|
// Keywords
|
||||||
TOKEN_BOOL;
|
TOKEN_BOOL;
|
||||||
|
TOKEN_BUFFER;
|
||||||
|
|
||||||
TOKEN_CASE;
|
TOKEN_CASE;
|
||||||
TOKEN_CBUFFER;
|
TOKEN_CBUFFER;
|
||||||
@@ -90,7 +92,7 @@ Token_Kind :: enum {
|
|||||||
TOKEN_OUT;
|
TOKEN_OUT;
|
||||||
|
|
||||||
TOKEN_PIXEL;
|
TOKEN_PIXEL;
|
||||||
TOKEN_PROPERTIES;
|
TOKEN_PLEX;
|
||||||
|
|
||||||
TOKEN_RETURN;
|
TOKEN_RETURN;
|
||||||
TOKEN_REGISTER;
|
TOKEN_REGISTER;
|
||||||
@@ -215,10 +217,11 @@ identifier_kind :: (using lexer : *Lexer) -> Token_Kind {
|
|||||||
identifier.count = length;
|
identifier.count = length;
|
||||||
|
|
||||||
if identifier == "bool" return .TOKEN_BOOL;
|
if identifier == "bool" return .TOKEN_BOOL;
|
||||||
|
if identifier == "Buffer" return .TOKEN_BUFFER;
|
||||||
if identifier == "case" return .TOKEN_CASE;
|
if identifier == "case" return .TOKEN_CASE;
|
||||||
if identifier == "columnmajor" return .TOKEN_COLUMNMAJOR;
|
if identifier == "columnmajor" return .TOKEN_COLUMNMAJOR;
|
||||||
if identifier == "const" return .TOKEN_CONST;
|
if identifier == "const" return .TOKEN_CONST;
|
||||||
if identifier == "constant_buffer" return .TOKEN_CONSTANT_BUFFER;
|
if identifier == "Constant_Buffer" return .TOKEN_CONSTANT_BUFFER;
|
||||||
if identifier == "continue" return .TOKEN_CONTINUE;
|
if identifier == "continue" return .TOKEN_CONTINUE;
|
||||||
if identifier == "default" return .TOKEN_DEFAULT;
|
if identifier == "default" return .TOKEN_DEFAULT;
|
||||||
if identifier == "directive" return .TOKEN_DIRECTIVE;
|
if identifier == "directive" return .TOKEN_DIRECTIVE;
|
||||||
@@ -242,10 +245,10 @@ identifier_kind :: (using lexer : *Lexer) -> Token_Kind {
|
|||||||
if identifier == "optional" return .TOKEN_OPTIONAL;
|
if identifier == "optional" return .TOKEN_OPTIONAL;
|
||||||
if identifier == "out" return .TOKEN_OUT;
|
if identifier == "out" return .TOKEN_OUT;
|
||||||
if identifier == "pixel" return .TOKEN_PIXEL;
|
if identifier == "pixel" return .TOKEN_PIXEL;
|
||||||
if identifier == "properties" return .TOKEN_PROPERTIES;
|
|
||||||
if identifier == "return" return .TOKEN_RETURN;
|
if identifier == "return" return .TOKEN_RETURN;
|
||||||
if identifier == "register" return .TOKEN_REGISTER;
|
if identifier == "register" return .TOKEN_REGISTER;
|
||||||
if identifier == "struct" return .TOKEN_STRUCT;
|
if identifier == "struct" return .TOKEN_STRUCT;
|
||||||
|
if identifier == "plex" return .TOKEN_STRUCT;
|
||||||
if identifier == "switch" return .TOKEN_SWITCH;
|
if identifier == "switch" return .TOKEN_SWITCH;
|
||||||
if identifier == "true" return .TOKEN_TRUE;
|
if identifier == "true" return .TOKEN_TRUE;
|
||||||
if identifier == "unorm" return .TOKEN_UNORM;
|
if identifier == "unorm" return .TOKEN_UNORM;
|
||||||
@@ -386,7 +389,12 @@ make_directive :: (lexer : *Lexer) -> *Token {
|
|||||||
for tok : ctx.tokens {
|
for tok : ctx.tokens {
|
||||||
lexer.ctx.tokens[it_index] = tok;
|
lexer.ctx.tokens[it_index] = tok;
|
||||||
}
|
}
|
||||||
return scan_next_token(lexer);;
|
return scan_next_token(lexer);
|
||||||
|
} else if ident.ident_value == "add_define" {
|
||||||
|
new_define := scan_next_token(lexer);
|
||||||
|
add_define(*lexer.ctx.environment, new_define.ident_value);
|
||||||
|
lexer.ctx.tokens.count -= 2;
|
||||||
|
return scan_next_token(lexer);
|
||||||
}
|
}
|
||||||
return ident;
|
return ident;
|
||||||
}
|
}
|
||||||
@@ -739,10 +747,20 @@ print_token_pointer :: (builder : *String_Builder, token : Token) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
print_from_source_location :: (builder : *String_Builder, source_location : Source_Range, indentation : int = 0) {
|
print_from_source_location :: (ctx : *Compiler_Context, builder : *String_Builder, source_location : Source_Range, indentation : int = 0) {
|
||||||
current := source_location.begin;
|
current := source_location.begin;
|
||||||
begin := source_location.begin;
|
begin := source_location.begin;
|
||||||
end := source_location.end;
|
end := source_location.end;
|
||||||
|
|
||||||
|
if begin.builtin {
|
||||||
|
for i : begin.index..end.index - 1 {
|
||||||
|
tok := ctx.tokens[i];
|
||||||
|
text : string;
|
||||||
|
text.data = tok.source;
|
||||||
|
text.count = tok.length;
|
||||||
|
print_to_builder(builder, "%", text);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
begin_pos := 0;
|
begin_pos := 0;
|
||||||
token_string : string;
|
token_string : string;
|
||||||
count := end.index - begin.index + end.length;
|
count := end.index - begin.index + end.length;
|
||||||
@@ -768,13 +786,14 @@ print_from_source_location :: (builder : *String_Builder, source_location : Sour
|
|||||||
print_to_builder(builder, "%", token_string);
|
print_to_builder(builder, "%", token_string);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
print_from_source_location :: (source_location : Source_Range, allocator := context.allocator, indentation : int = 0) -> string {
|
print_from_source_location :: (ctx : *Compiler_Context, source_location : Source_Range, allocator := context.allocator, indentation : int = 0) -> string {
|
||||||
sc := get_scratch();
|
sc := get_scratch();
|
||||||
defer scratch_end(sc);
|
defer scratch_end(sc);
|
||||||
builder : String_Builder;
|
builder : String_Builder;
|
||||||
init_string_builder(*builder,, sc.allocator);
|
init_string_builder(*builder,, sc.allocator);
|
||||||
print_from_source_location(*builder, source_location,, sc.allocator);
|
print_from_source_location(ctx, *builder, source_location,, sc.allocator);
|
||||||
return builder_to_string(*builder,, allocator);
|
return builder_to_string(*builder,, allocator);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
358
Parsing.jai
358
Parsing.jai
@@ -1,13 +1,3 @@
|
|||||||
#import "Flat_Pool";
|
|
||||||
|
|
||||||
// #load "qpwodkqopwkd.jai";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* TODO:
|
|
||||||
* if parsing
|
|
||||||
* for/while loop parsing
|
|
||||||
**/
|
|
||||||
|
|
||||||
////////////////////////////
|
////////////////////////////
|
||||||
//@nb - Parse_state state
|
//@nb - Parse_state state
|
||||||
Parse_State :: struct {
|
Parse_State :: struct {
|
||||||
@@ -19,15 +9,6 @@ Parse_State :: struct {
|
|||||||
ctx : *Compiler_Context;
|
ctx : *Compiler_Context;
|
||||||
}
|
}
|
||||||
|
|
||||||
////////////////////////////
|
|
||||||
//@nb - Result and error handling
|
|
||||||
Parse_Error_Kind :: enum {
|
|
||||||
Parse_Error_Type_Missing;
|
|
||||||
Parse_Error_Expected_Expression;
|
|
||||||
Parse_Error_Empty_Block;
|
|
||||||
Parse_Error_Unexpected_Token;
|
|
||||||
}
|
|
||||||
|
|
||||||
////////////////////////////
|
////////////////////////////
|
||||||
//@nb - Parsing helper types
|
//@nb - Parsing helper types
|
||||||
Separator_Type :: enum {
|
Separator_Type :: enum {
|
||||||
@@ -75,7 +56,7 @@ parse_rules :: #run -> [(cast(int)Token_Kind.TOKEN_ERROR) + 1]Parse_Rule {
|
|||||||
rules[Token_Kind.TOKEN_RIGHTBRACKET] = .{null, null, .PREC_NONE};
|
rules[Token_Kind.TOKEN_RIGHTBRACKET] = .{null, null, .PREC_NONE};
|
||||||
rules[Token_Kind.TOKEN_COMMA] = .{null, null, .PREC_NONE};
|
rules[Token_Kind.TOKEN_COMMA] = .{null, null, .PREC_NONE};
|
||||||
rules[Token_Kind.TOKEN_DOT] = .{null, dot, .PREC_CALL};
|
rules[Token_Kind.TOKEN_DOT] = .{null, dot, .PREC_CALL};
|
||||||
rules[Token_Kind.TOKEN_PROPERTIES] = .{named_variable, null, .PREC_CALL};
|
// rules[Token_Kind.TOKEN_PROPERTIES] = .{named_variable, null, .PREC_CALL};
|
||||||
rules[Token_Kind.TOKEN_MINUS] = .{unary, binary, .PREC_TERM};
|
rules[Token_Kind.TOKEN_MINUS] = .{unary, binary, .PREC_TERM};
|
||||||
rules[Token_Kind.TOKEN_PLUS] = .{null, binary, .PREC_TERM};
|
rules[Token_Kind.TOKEN_PLUS] = .{null, binary, .PREC_TERM};
|
||||||
rules[Token_Kind.TOKEN_SEMICOLON] = .{null, null, .PREC_NONE};
|
rules[Token_Kind.TOKEN_SEMICOLON] = .{null, null, .PREC_NONE};
|
||||||
@@ -186,7 +167,7 @@ unexpected_token :: (state : *Parse_State, token : Token, message : string) {
|
|||||||
|
|
||||||
indent(*builder, 1);
|
indent(*builder, 1);
|
||||||
cyan(*builder);
|
cyan(*builder);
|
||||||
print_to_builder(*builder, "%\n", print_from_source_location(location));
|
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
||||||
|
|
||||||
indent(*builder, 1);
|
indent(*builder, 1);
|
||||||
print_token_pointer(*builder, token);
|
print_token_pointer(*builder, token);
|
||||||
@@ -207,7 +188,7 @@ else_if_without_if :: (state : *Parse_State) {
|
|||||||
|
|
||||||
indent(*builder, 1);
|
indent(*builder, 1);
|
||||||
cyan(*builder);
|
cyan(*builder);
|
||||||
print_to_builder(*builder, "%\n", print_from_source_location(location));
|
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
||||||
|
|
||||||
indent(*builder, 1);
|
indent(*builder, 1);
|
||||||
print_token_pointer(*builder, token);
|
print_token_pointer(*builder, token);
|
||||||
@@ -229,7 +210,7 @@ else_without_if :: (state : *Parse_State) {
|
|||||||
|
|
||||||
indent(*builder, 1);
|
indent(*builder, 1);
|
||||||
cyan(*builder);
|
cyan(*builder);
|
||||||
print_to_builder(*builder, "%\n", print_from_source_location(location));
|
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
||||||
|
|
||||||
indent(*builder, 1);
|
indent(*builder, 1);
|
||||||
print_token_pointer(*builder, token);
|
print_token_pointer(*builder, token);
|
||||||
@@ -249,7 +230,7 @@ unable_to_parse_statement :: (state : *Parse_State, token : Token, message : str
|
|||||||
|
|
||||||
indent(*builder, 1);
|
indent(*builder, 1);
|
||||||
cyan(*builder);
|
cyan(*builder);
|
||||||
print_to_builder(*builder, "%\n", print_from_source_location(location));
|
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
||||||
|
|
||||||
|
|
||||||
indent(*builder, 1);
|
indent(*builder, 1);
|
||||||
@@ -269,7 +250,7 @@ expected_expression :: (state : *Parse_State, token : Token, message : string) {
|
|||||||
|
|
||||||
indent(*builder, 1);
|
indent(*builder, 1);
|
||||||
cyan(*builder);
|
cyan(*builder);
|
||||||
print_to_builder(*builder, "%\n", print_from_source_location(location));
|
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
||||||
|
|
||||||
indent(*builder, 1);
|
indent(*builder, 1);
|
||||||
print_token_pointer(*builder, token);
|
print_token_pointer(*builder, token);
|
||||||
@@ -288,7 +269,7 @@ missing_type_specifier :: (state : *Parse_State, token : Token, message : string
|
|||||||
|
|
||||||
indent(*builder, 1);
|
indent(*builder, 1);
|
||||||
cyan(*builder);
|
cyan(*builder);
|
||||||
print_to_builder(*builder, "%\n", print_from_source_location(location));
|
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
||||||
indent(*builder, 1);
|
indent(*builder, 1);
|
||||||
|
|
||||||
loc := location.begin;
|
loc := location.begin;
|
||||||
@@ -312,7 +293,7 @@ empty_block :: (state : *Parse_State, token : Token, message : string) {
|
|||||||
|
|
||||||
indent(*builder, 1);
|
indent(*builder, 1);
|
||||||
cyan(*builder);
|
cyan(*builder);
|
||||||
print_to_builder(*builder, "%\n", print_from_source_location(location));
|
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
||||||
indent(*builder, 1);
|
indent(*builder, 1);
|
||||||
|
|
||||||
loc := location.begin;
|
loc := location.begin;
|
||||||
@@ -336,7 +317,26 @@ unable_to_open_file :: (state : *Parse_State, path : string, token : Token) {
|
|||||||
|
|
||||||
indent(*builder, 1);
|
indent(*builder, 1);
|
||||||
cyan(*builder);
|
cyan(*builder);
|
||||||
print_to_builder(*builder, "%\n", print_from_source_location(location));
|
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
||||||
|
indent(*builder, 1);
|
||||||
|
|
||||||
|
loc := location.begin;
|
||||||
|
print_token_pointer(*builder, loc);
|
||||||
|
|
||||||
|
final_message := builder_to_string(*builder);
|
||||||
|
record_error(state, token, final_message, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
entry_point_requires_return_value :: (state : *Parse_State, token : Token) {
|
||||||
|
builder : String_Builder;
|
||||||
|
init_string_builder(*builder,, temp);
|
||||||
|
|
||||||
|
print_to_builder(*builder, "Entry point '%' requires return value\n\n", token.ident_value);
|
||||||
|
|
||||||
|
location := generate_source_location_from_token(state, token);
|
||||||
|
indent(*builder, 1);
|
||||||
|
cyan(*builder);
|
||||||
|
print_to_builder(*builder, "%\n", print_from_source_location(state.ctx, location));
|
||||||
indent(*builder, 1);
|
indent(*builder, 1);
|
||||||
|
|
||||||
loc := location.begin;
|
loc := location.begin;
|
||||||
@@ -383,29 +383,14 @@ make_node :: (parse_state : *Parse_State, kind : AST_Kind) -> *AST_Node {
|
|||||||
return make_node(*parse_state.ctx.nodes, kind);
|
return make_node(*parse_state.ctx.nodes, kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
// new_builtin_node :: (nodes : *[..]AST_Node, kind : AST_Kind) -> *AST_Node {
|
make_builtin_token :: (tokens : *[..]Token, kind : Token_Kind, text : string, col : *int, line : *int) -> *Token {
|
||||||
// node := make_node(parse_state, kind);
|
|
||||||
// node.builtin = true;
|
|
||||||
// return node;
|
|
||||||
// }
|
|
||||||
|
|
||||||
make_builtin_token :: (tokens : *[..]Token, builder : *String_Builder, kind : Token_Kind, text : string, col : *int, line : *int) -> *Token {
|
|
||||||
tok : Token;
|
tok : Token;
|
||||||
tok.kind = kind;
|
tok.kind = kind;
|
||||||
|
|
||||||
start := 0;
|
start := 0;
|
||||||
|
|
||||||
buffer := get_current_buffer(builder);
|
|
||||||
|
|
||||||
if buffer {
|
|
||||||
start := buffer.count;
|
|
||||||
}
|
|
||||||
tok.column = col.*;
|
tok.column = col.*;
|
||||||
|
|
||||||
print_to_builder(builder, "%", text);
|
|
||||||
buffer = get_current_buffer(builder);
|
|
||||||
end := buffer.count;
|
|
||||||
|
|
||||||
for c : text {
|
for c : text {
|
||||||
if c == #char "\n" {
|
if c == #char "\n" {
|
||||||
line.* ++ 1;
|
line.* ++ 1;
|
||||||
@@ -415,9 +400,11 @@ make_builtin_token :: (tokens : *[..]Token, builder : *String_Builder, kind : To
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
tok.index = buffer.count - text.count;
|
tok.index = tokens.count;
|
||||||
tok.length = text.count;
|
tok.length = text.count;
|
||||||
tok.builtin = true;
|
tok.builtin = true;
|
||||||
|
tok.source = text.data;
|
||||||
|
tok.ident_value = text;
|
||||||
|
|
||||||
array_add(tokens, tok);
|
array_add(tokens, tok);
|
||||||
|
|
||||||
@@ -427,9 +414,6 @@ make_builtin_token :: (tokens : *[..]Token, builder : *String_Builder, kind : To
|
|||||||
new_builtin_struct_node :: (ctx : *Compiler_Context, name : string, members : []Arg) -> *AST_Node {
|
new_builtin_struct_node :: (ctx : *Compiler_Context, name : string, members : []Arg) -> *AST_Node {
|
||||||
sc := get_scratch(context.allocator);
|
sc := get_scratch(context.allocator);
|
||||||
defer scratch_end(sc);
|
defer scratch_end(sc);
|
||||||
builder : String_Builder;
|
|
||||||
builder.allocator = sc.allocator; // I want to find a good way to use scratch here...
|
|
||||||
|
|
||||||
node := make_node(*ctx.nodes, .Struct);
|
node := make_node(*ctx.nodes, .Struct);
|
||||||
|
|
||||||
source_location : Source_Range;
|
source_location : Source_Range;
|
||||||
@@ -439,17 +423,13 @@ new_builtin_struct_node :: (ctx : *Compiler_Context, name : string, members : []
|
|||||||
|
|
||||||
tok_index := ctx.tokens.count;
|
tok_index := ctx.tokens.count;
|
||||||
|
|
||||||
ident_token := make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", name), *col, *line);
|
ident_token := make_builtin_token(*ctx.tokens, .TOKEN_IDENTIFIER, name, *col, *line);
|
||||||
ident_token.ident_value = name;
|
ident_token.ident_value = name;
|
||||||
source_location.begin = ident_token;
|
source_location.begin = ident_token;
|
||||||
|
|
||||||
append(*builder, " ");
|
make_builtin_token(*ctx.tokens, .TOKEN_DOUBLECOLON, " :: ", *col, *line);
|
||||||
make_builtin_token(*ctx.tokens, *builder, .TOKEN_DOUBLECOLON, "::", *col, *line);
|
make_builtin_token(*ctx.tokens, .TOKEN_STRUCT, "struct ", *col, *line);
|
||||||
append(*builder, " ");
|
make_builtin_token(*ctx.tokens, .TOKEN_LEFTBRACE, "{\n\t", *col, *line);
|
||||||
make_builtin_token(*ctx.tokens, *builder, .TOKEN_STRUCT, "struct", *col, *line);
|
|
||||||
append(*builder, " ");
|
|
||||||
make_builtin_token(*ctx.tokens, *builder, .TOKEN_LEFTBRACE, "{", *col, *line);
|
|
||||||
append(*builder, "\n");
|
|
||||||
line += 1;
|
line += 1;
|
||||||
col = 0;
|
col = 0;
|
||||||
|
|
||||||
@@ -460,18 +440,14 @@ new_builtin_struct_node :: (ctx : *Compiler_Context, name : string, members : []
|
|||||||
field := make_node(*ctx.nodes, .Field);
|
field := make_node(*ctx.nodes, .Field);
|
||||||
field_source_loc : Source_Range;
|
field_source_loc : Source_Range;
|
||||||
|
|
||||||
indent(*builder, 1);
|
field_ident := make_builtin_token(*ctx.tokens, .TOKEN_IDENTIFIER, member.name, *col, *line);
|
||||||
field_ident := make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.name), *col, *line);
|
|
||||||
field_source_loc.begin = field_ident;
|
field_source_loc.begin = field_ident;
|
||||||
field.token = field_ident;
|
field.token = field_ident;
|
||||||
field.name = member.name;
|
field.name = member.name;
|
||||||
|
|
||||||
append(*builder, " ");
|
make_builtin_token(*ctx.tokens, .TOKEN_COLON, ": ", *col, *line);
|
||||||
make_builtin_token(*ctx.tokens, *builder, .TOKEN_COLON, ":", *col, *line);
|
make_builtin_token(*ctx.tokens, .TOKEN_IDENTIFIER, member.typename, *col, *line);
|
||||||
append(*builder, " ");
|
semicolon_tok := make_builtin_token(*ctx.tokens, .TOKEN_SEMICOLON, ";", *col, *line);
|
||||||
make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.typename), *col, *line);
|
|
||||||
semicolon_tok := make_builtin_token(*ctx.tokens, *builder, .TOKEN_SEMICOLON, ";", *col, *line);
|
|
||||||
append(*builder, "\n");
|
|
||||||
col = 0;
|
col = 0;
|
||||||
line += 1;
|
line += 1;
|
||||||
|
|
||||||
@@ -481,27 +457,10 @@ new_builtin_struct_node :: (ctx : *Compiler_Context, name : string, members : []
|
|||||||
add_child(field_list, field);
|
add_child(field_list, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
brace_token := make_builtin_token(*ctx.tokens, *builder, .TOKEN_RIGHTBRACE, "}", *col, *line);
|
brace_token := make_builtin_token(*ctx.tokens, .TOKEN_RIGHTBRACE, "\n}", *col, *line);
|
||||||
append(*builder, "\n");
|
|
||||||
|
|
||||||
source_location.end = brace_token;
|
source_location.end = brace_token;
|
||||||
|
|
||||||
source := builder_to_string(*builder,, context.allocator);
|
|
||||||
|
|
||||||
source_location.begin.source = *source.data[source_location.begin.column];
|
|
||||||
source_location.end.source = *source.data[source_location.end.column];
|
|
||||||
|
|
||||||
for i : tok_index..ctx.tokens.count - 1 {
|
|
||||||
tok := ctx.tokens[i];
|
|
||||||
tok.source = *source.data[tok.column];
|
|
||||||
}
|
|
||||||
|
|
||||||
for field : field_list.children {
|
|
||||||
field.source_location.begin.source = *source.data[field.source_location.begin.column];
|
|
||||||
field.source_location.end.source = *source.data[field.source_location.end.column];
|
|
||||||
// field.source_location.main_token.source = *source.data[tok.column];
|
|
||||||
}
|
|
||||||
|
|
||||||
node.source_location = source_location;
|
node.source_location = source_location;
|
||||||
|
|
||||||
return node;
|
return node;
|
||||||
@@ -510,8 +469,6 @@ new_builtin_struct_node :: (ctx : *Compiler_Context, name : string, members : []
|
|||||||
new_builtin_function_node :: (ctx : *Compiler_Context, name : string, members : []Arg, return_var : Arg) -> *AST_Node {
|
new_builtin_function_node :: (ctx : *Compiler_Context, name : string, members : []Arg, return_var : Arg) -> *AST_Node {
|
||||||
sc := get_scratch(context.allocator);
|
sc := get_scratch(context.allocator);
|
||||||
defer scratch_end(sc);
|
defer scratch_end(sc);
|
||||||
builder : String_Builder;
|
|
||||||
builder.allocator = sc.allocator; // I want to find a good way to use scratch here...
|
|
||||||
|
|
||||||
node := make_node(*ctx.nodes, .Function);
|
node := make_node(*ctx.nodes, .Function);
|
||||||
|
|
||||||
@@ -522,13 +479,11 @@ new_builtin_function_node :: (ctx : *Compiler_Context, name : string, members :
|
|||||||
|
|
||||||
tok_index := ctx.tokens.count;
|
tok_index := ctx.tokens.count;
|
||||||
|
|
||||||
ident_token := make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", name), *col, *line);
|
ident_token := make_builtin_token(*ctx.tokens, .TOKEN_IDENTIFIER, name, *col, *line);
|
||||||
source_location.begin = ident_token;
|
source_location.begin = ident_token;
|
||||||
|
|
||||||
append(*builder, " ");
|
make_builtin_token(*ctx.tokens, .TOKEN_DOUBLECOLON, " :: ", *col, *line);
|
||||||
make_builtin_token(*ctx.tokens, *builder, .TOKEN_DOUBLECOLON, "::", *col, *line);
|
make_builtin_token(*ctx.tokens, .TOKEN_LEFTPAREN, "(", *col, *line);
|
||||||
append(*builder, " ");
|
|
||||||
make_builtin_token(*ctx.tokens, *builder, .TOKEN_LEFTPAREN, "(", *col, *line);
|
|
||||||
field_list := make_node(*ctx.nodes, .FieldList);
|
field_list := make_node(*ctx.nodes, .FieldList);
|
||||||
add_child(node, field_list);
|
add_child(node, field_list);
|
||||||
|
|
||||||
@@ -536,14 +491,12 @@ new_builtin_function_node :: (ctx : *Compiler_Context, name : string, members :
|
|||||||
field := make_node(*ctx.nodes, .Field);
|
field := make_node(*ctx.nodes, .Field);
|
||||||
field_source_loc : Source_Range;
|
field_source_loc : Source_Range;
|
||||||
|
|
||||||
// field_ident := make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.name), *col, *line);
|
type_tok := make_builtin_token(*ctx.tokens, .TOKEN_IDENTIFIER, member.typename, *col, *line);
|
||||||
type_tok := make_builtin_token(*ctx.tokens, *builder, .TOKEN_IDENTIFIER, tprint("%", member.typename), *col, *line);
|
|
||||||
field_source_loc.begin = type_tok;
|
field_source_loc.begin = type_tok;
|
||||||
field.token = type_tok;
|
field.token = type_tok;
|
||||||
|
|
||||||
if it_index < members.count - 1 {
|
if it_index < members.count - 1 {
|
||||||
make_builtin_token(*ctx.tokens, *builder, .TOKEN_COMMA, ",", *col, *line);
|
make_builtin_token(*ctx.tokens, .TOKEN_COMMA, ", ", *col, *line);
|
||||||
append(*builder, " ");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
field_source_loc.end = type_tok;
|
field_source_loc.end = type_tok;
|
||||||
@@ -552,34 +505,18 @@ new_builtin_function_node :: (ctx : *Compiler_Context, name : string, members :
|
|||||||
add_child(field_list, field);
|
add_child(field_list, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
make_builtin_token(*ctx.tokens, *builder, .TOKEN_RIGHTPAREN, ")", *col, *line);
|
make_builtin_token(*ctx.tokens, .TOKEN_RIGHTPAREN, ")", *col, *line);
|
||||||
semicolon_tok := make_builtin_token(*ctx.tokens, *builder, .TOKEN_SEMICOLON, ";", *col, *line);
|
semicolon_tok := make_builtin_token(*ctx.tokens, .TOKEN_SEMICOLON, ";", *col, *line);
|
||||||
|
|
||||||
source_location.end = semicolon_tok;
|
source_location.end = semicolon_tok;
|
||||||
|
|
||||||
source := builder_to_string(*builder,, context.allocator);
|
|
||||||
|
|
||||||
source_location.begin.source = *source.data[source_location.begin.column];
|
|
||||||
source_location.end.source = *source.data[source_location.end.column];
|
|
||||||
|
|
||||||
for i : tok_index..ctx.tokens.count - 1 {
|
|
||||||
tok := ctx.tokens[i];
|
|
||||||
tok.source = *source.data[tok.column];
|
|
||||||
}
|
|
||||||
|
|
||||||
for field : field_list.children {
|
|
||||||
field.source_location.begin.source = *source.data[field.source_location.begin.column];
|
|
||||||
field.source_location.end.source = *source.data[field.source_location.end.column];
|
|
||||||
// field.source_location.main_token.source = *source.data[tok.column];
|
|
||||||
}
|
|
||||||
|
|
||||||
node.source_location = source_location;
|
node.source_location = source_location;
|
||||||
|
|
||||||
return node;
|
return node;
|
||||||
}
|
}
|
||||||
|
|
||||||
get_field_list :: (struct_or_func : *AST_Node) -> *AST_Node {
|
get_field_list :: (struct_or_func : *AST_Node) -> *AST_Node {
|
||||||
assert(struct_or_func.kind == .Function || struct_or_func.kind == .Struct || struct_or_func.kind == .Properties);
|
assert(struct_or_func.kind == .Function || struct_or_func.kind == .Struct || struct_or_func.kind == .CBuffer);
|
||||||
return struct_or_func.children[0];
|
return struct_or_func.children[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -771,13 +708,12 @@ array_access :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
|||||||
identifier := parse_state.ctx.tokens[parse_state.current_token_index - 3];
|
identifier := parse_state.ctx.tokens[parse_state.current_token_index - 3];
|
||||||
left_bracket := parse_state.ctx.tokens[parse_state.current_token_index - 2];
|
left_bracket := parse_state.ctx.tokens[parse_state.current_token_index - 2];
|
||||||
|
|
||||||
array_access := make_node(parse_state, .Unary);
|
array_access := make_node(parse_state, .Binary);
|
||||||
array_access.token = left_bracket;
|
array_access.token = left_bracket;
|
||||||
array_index := expression(parse_state);
|
array_index := expression(parse_state);
|
||||||
|
add_child(array_access, left);
|
||||||
add_child(array_access, array_index);
|
add_child(array_access, array_index);
|
||||||
|
|
||||||
add_child(left, array_access);
|
|
||||||
|
|
||||||
consume(parse_state, .TOKEN_RIGHTBRACKET, "Expected ']' after array index.");
|
consume(parse_state, .TOKEN_RIGHTBRACKET, "Expected ']' after array index.");
|
||||||
|
|
||||||
source_location : Source_Range;
|
source_location : Source_Range;
|
||||||
@@ -794,8 +730,8 @@ array_access :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
|||||||
}
|
}
|
||||||
|
|
||||||
source_location.end = parse_state.previous;
|
source_location.end = parse_state.previous;
|
||||||
left.source_location = source_location;
|
array_access.source_location = source_location;
|
||||||
return left;
|
return array_access;
|
||||||
}
|
}
|
||||||
|
|
||||||
unary :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
unary :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
||||||
@@ -838,62 +774,31 @@ directive :: (state : *Parse_State) -> *AST_Node {
|
|||||||
if_directive := make_node(state, .If_Directive);
|
if_directive := make_node(state, .If_Directive);
|
||||||
|
|
||||||
source_location : Source_Range;
|
source_location : Source_Range;
|
||||||
// source_location.begin = state.previous;
|
if state.previous {
|
||||||
|
source_location.begin = state.previous;
|
||||||
|
} else {
|
||||||
|
source_location.begin = state.current;
|
||||||
|
}
|
||||||
|
|
||||||
advance(state);
|
advance(state);
|
||||||
|
|
||||||
cond := expression(state);
|
cond := expression(state);
|
||||||
add_child(if_directive, cond);
|
add_child(if_directive, cond);
|
||||||
|
|
||||||
|
source_location.end = state.previous;
|
||||||
|
advance_to_sync_point(state);
|
||||||
|
|
||||||
if_body := block(state);
|
if_body := block(state);
|
||||||
add_child(if_directive, if_body);
|
add_child(if_directive, if_body);
|
||||||
|
|
||||||
|
if match(state, .TOKEN_ELSE) {
|
||||||
|
else_node := else_statement(state);
|
||||||
|
add_child(if_directive, else_node);
|
||||||
|
}
|
||||||
|
|
||||||
if_directive.source_location = source_location;
|
if_directive.source_location = source_location;
|
||||||
|
|
||||||
return if_directive;
|
return if_directive;
|
||||||
} else if state.current.ident_value == "load" {
|
|
||||||
advance(state);
|
|
||||||
|
|
||||||
if check(state, .TOKEN_STRING) {
|
|
||||||
// path_tok := state.current;
|
|
||||||
// path := path_tok.string_value;
|
|
||||||
|
|
||||||
// advance(state);
|
|
||||||
|
|
||||||
// result : Compiler_Context;
|
|
||||||
// ctx.allocator = state.ctx.allocator;
|
|
||||||
// ctx.environment = state.ctx.environment;
|
|
||||||
|
|
||||||
// ctx.file = make_file(*result, path);
|
|
||||||
|
|
||||||
// if ctx.file.source.count == 0 {
|
|
||||||
// unable_to_open_file(state, path, path_tok);
|
|
||||||
// advance_to_sync_point(state);
|
|
||||||
// advance(state);
|
|
||||||
// return null;
|
|
||||||
// }
|
|
||||||
|
|
||||||
// consume(state, .TOKEN_SEMICOLON, "Expected ';' after #load directive");
|
|
||||||
|
|
||||||
// lex(*result);
|
|
||||||
|
|
||||||
// count := state.ctx.tokens..count;
|
|
||||||
// current_idx := state.current_token_index;
|
|
||||||
// result_count := ctx.tokens..count;
|
|
||||||
|
|
||||||
// // state.ctx.tokens..count -= 1;
|
|
||||||
// array_resize(*state.ctx.tokens., count + result_count - 1);
|
|
||||||
|
|
||||||
// memcpy(*state.ctx.tokens[current_idx + result_count - 1], *state.ctx.tokens[current_idx], size_of(Token) * (count - current_idx));
|
|
||||||
|
|
||||||
// for *tok : ctx.tokens. {
|
|
||||||
// if tok.kind == .TOKEN_EOF {
|
|
||||||
// break;
|
|
||||||
// }
|
|
||||||
// tok.builtin = true;
|
|
||||||
// state.ctx.tokens[it_index] = tok.*;
|
|
||||||
// }
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
@@ -931,34 +836,31 @@ dot :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
|||||||
|
|
||||||
source_location : Source_Range;
|
source_location : Source_Range;
|
||||||
source_location.begin = left.source_location.begin;
|
source_location.begin = left.source_location.begin;
|
||||||
|
source_location.main_token = identifier;
|
||||||
|
|
||||||
|
access := make_node(parse_state, .Access);
|
||||||
|
|
||||||
|
variable := make_node(parse_state, .Variable);
|
||||||
|
variable.name = identifier.ident_value;
|
||||||
|
|
||||||
|
add_child(access, left);
|
||||||
|
add_child(access, variable);
|
||||||
|
|
||||||
if check_any(parse_state, .TOKEN_ASSIGN, .TOKEN_MINUSEQUALS, .TOKEN_PLUSEQUALS, .TOKEN_DIVEQUALS, .TOKEN_MODEQUALS, .TOKEN_TIMESEQUALS) {
|
if check_any(parse_state, .TOKEN_ASSIGN, .TOKEN_MINUSEQUALS, .TOKEN_PLUSEQUALS, .TOKEN_DIVEQUALS, .TOKEN_MODEQUALS, .TOKEN_TIMESEQUALS) {
|
||||||
advance(parse_state);
|
advance(parse_state);
|
||||||
variable := make_node(parse_state, .Variable);
|
access.source_location = generate_source_location_from_token(parse_state, identifier);
|
||||||
variable.source_location = generate_source_location_from_token(parse_state, identifier);
|
|
||||||
variable.name = identifier.ident_value;
|
|
||||||
|
|
||||||
add_child(left, variable);
|
|
||||||
|
|
||||||
node := make_node(parse_state, .Binary);
|
node := make_node(parse_state, .Binary);
|
||||||
node.token = parse_state.previous;
|
node.token = parse_state.previous;
|
||||||
add_child(node, left);
|
node.source_location = generate_source_location_from_token(parse_state, node.token);
|
||||||
|
add_child(node, access);
|
||||||
add_child(node, expression(parse_state));
|
add_child(node, expression(parse_state));
|
||||||
return node;
|
return node;
|
||||||
}
|
}
|
||||||
variable := make_node(parse_state, .Variable);
|
|
||||||
variable.name = identifier.ident_value;
|
|
||||||
|
|
||||||
if check(parse_state, .TOKEN_DOT) {
|
source_location.end = parse_state.current;
|
||||||
advance(parse_state);
|
access.source_location = source_location;
|
||||||
dot(parse_state, variable);
|
return access;
|
||||||
}
|
|
||||||
|
|
||||||
add_child(left, variable);
|
|
||||||
|
|
||||||
source_location.end = parse_state.previous;
|
|
||||||
variable.source_location = source_location;
|
|
||||||
return left;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
integer :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
integer :: (parse_state : *Parse_State, left : *AST_Node) -> *AST_Node {
|
||||||
@@ -1256,13 +1158,15 @@ statement :: (parse_state : *Parse_State) -> *AST_Node {
|
|||||||
else_statement :: (parse_state : *Parse_State) -> *AST_Node {
|
else_statement :: (parse_state : *Parse_State) -> *AST_Node {
|
||||||
if check(parse_state, .TOKEN_IF) {
|
if check(parse_state, .TOKEN_IF) {
|
||||||
return statement(parse_state);
|
return statement(parse_state);
|
||||||
|
} else if check(parse_state, .TOKEN_DIRECTIVE) && parse_state.current.ident_value == "if" {
|
||||||
|
return directive(parse_state);
|
||||||
}
|
}
|
||||||
return block(parse_state);
|
return block(parse_state);
|
||||||
}
|
}
|
||||||
|
|
||||||
block :: (parse_state : *Parse_State) -> *AST_Node {
|
block :: (parse_state : *Parse_State) -> *AST_Node {
|
||||||
node : *AST_Node = make_node(parse_state, .Block);
|
node : *AST_Node = make_node(parse_state, .Block);
|
||||||
array_reserve(*node.children, 1024);
|
array_reserve(*node.children, 32);
|
||||||
|
|
||||||
source_location : Source_Range;
|
source_location : Source_Range;
|
||||||
|
|
||||||
@@ -1361,10 +1265,22 @@ function_declaration :: (parse_state : *Parse_State, identifier_token : *Token,
|
|||||||
case .Vertex; {
|
case .Vertex; {
|
||||||
node.vertex_entry_point = true;
|
node.vertex_entry_point = true;
|
||||||
name = sprint("vs_%", function_name_token.ident_value);
|
name = sprint("vs_%", function_name_token.ident_value);
|
||||||
|
|
||||||
|
// if return_type_token.kind == .TOKEN_INVALID {
|
||||||
|
// entry_point_requires_return_value(parse_state, function_name_token);
|
||||||
|
// advance_to_sync_point(parse_state);
|
||||||
|
// return error_node(parse_state, "");
|
||||||
|
// }
|
||||||
}
|
}
|
||||||
case .Pixel; {
|
case .Pixel; {
|
||||||
node.pixel_entry_point = true;
|
node.pixel_entry_point = true;
|
||||||
name = sprint("ps_%", function_name_token.ident_value);
|
name = sprint("ps_%", function_name_token.ident_value);
|
||||||
|
|
||||||
|
// if return_type_token.kind == .TOKEN_INVALID {
|
||||||
|
// entry_point_requires_return_value(parse_state, function_name_token);
|
||||||
|
// advance_to_sync_point(parse_state);
|
||||||
|
// return error_node(parse_state, "");
|
||||||
|
// }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1386,67 +1302,40 @@ function_declaration :: (parse_state : *Parse_State, identifier_token : *Token,
|
|||||||
return node;
|
return node;
|
||||||
}
|
}
|
||||||
|
|
||||||
instance_block :: (parse_state : *Parse_State) -> *AST_Node {
|
buffer :: (state : *Parse_State, identifier_token : *Token = null) -> *AST_Node {
|
||||||
node : *AST_Node;
|
node : *AST_Node = make_node(state, .Buffer);
|
||||||
|
|
||||||
source_location : Source_Range;
|
source_location : Source_Range;
|
||||||
source_location.begin = parse_state.current;
|
source_location.begin = state.current;
|
||||||
|
|
||||||
consume(parse_state, .TOKEN_LEFTBRACE, "Expect '{' after 'instance' keyword");
|
if check(state, .TOKEN_AT) {
|
||||||
properties := field_list(parse_state, .Semicolon);
|
while check(state, .TOKEN_AT) {
|
||||||
|
advance(state);
|
||||||
node = make_node(parse_state, .Instance);
|
// @Incomplete(niels): this is a mapping
|
||||||
add_child(node, properties);
|
if check(state, .TOKEN_IDENTIFIER) {
|
||||||
|
array_add(*node.hint_tokens, state.current);
|
||||||
consume(parse_state, .TOKEN_RIGHTBRACE, "Expect '}' after instance block");
|
advance(state);
|
||||||
source_location.end = parse_state.previous;
|
}
|
||||||
node.source_location = source_location;
|
}
|
||||||
|
|
||||||
return node;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
meta_block :: (parse_state : *Parse_State) -> *AST_Node {
|
consume(state, .TOKEN_LEFTBRACE, "Expect '{' after 'buffer' keyword");
|
||||||
node : *AST_Node;
|
buffer := field_list(state, .Semicolon);
|
||||||
|
node.array_field = true;
|
||||||
|
|
||||||
source_location : Source_Range;
|
|
||||||
source_location.begin = parse_state.current;
|
|
||||||
|
|
||||||
consume(parse_state, .TOKEN_LEFTBRACE, "Expect '{' after 'meta' keyword");
|
|
||||||
properties := field_list(parse_state, .Semicolon);
|
|
||||||
|
|
||||||
node = make_node(parse_state, .Meta);
|
|
||||||
add_child(node, properties);
|
|
||||||
|
|
||||||
consume(parse_state, .TOKEN_RIGHTBRACE, "Expect '}' after meta block");
|
|
||||||
source_location.end = parse_state.previous;
|
|
||||||
node.source_location = source_location;
|
|
||||||
|
|
||||||
return node;
|
|
||||||
}
|
|
||||||
|
|
||||||
property_block :: (parse_state : *Parse_State, identifier_token : *Token = null) -> *AST_Node {
|
|
||||||
node : *AST_Node;
|
|
||||||
source_location : Source_Range;
|
|
||||||
source_location.begin = parse_state.current;
|
|
||||||
|
|
||||||
consume(parse_state, .TOKEN_LEFTBRACE, "Expect '{' after 'property' keyword");
|
|
||||||
properties := field_list(parse_state, .Semicolon);
|
|
||||||
|
|
||||||
node = make_node(parse_state, .Properties);
|
|
||||||
if identifier_token {
|
if identifier_token {
|
||||||
node.name = identifier_token.ident_value;
|
node.name = identifier_token.ident_value;
|
||||||
}
|
}
|
||||||
add_child(node, properties);
|
add_child(node, buffer);
|
||||||
|
|
||||||
consume(parse_state, .TOKEN_RIGHTBRACE, "Expect '}' after 'property' keyword");
|
consume(state, .TOKEN_RIGHTBRACE, "Expect '}' after 'buffer' block");
|
||||||
source_location.end = parse_state.previous;
|
source_location.end = state.previous;
|
||||||
node.source_location = source_location;
|
node.source_location = source_location;
|
||||||
|
|
||||||
return node;
|
return node;
|
||||||
}
|
}
|
||||||
|
|
||||||
constant_buffer :: (parse_state : *Parse_State, identifier_token : *Token = null) -> *AST_Node {
|
constant_buffer :: (parse_state : *Parse_State, identifier_token : *Token = null) -> *AST_Node {
|
||||||
node : *AST_Node;
|
node : *AST_Node = make_node(parse_state, .CBuffer);
|
||||||
source_location : Source_Range;
|
source_location : Source_Range;
|
||||||
source_location.begin = parse_state.current;
|
source_location.begin = parse_state.current;
|
||||||
|
|
||||||
@@ -1464,7 +1353,6 @@ constant_buffer :: (parse_state : *Parse_State, identifier_token : *Token = null
|
|||||||
consume(parse_state, .TOKEN_LEFTBRACE, "Expect '{' after 'constant_buffer' keyword");
|
consume(parse_state, .TOKEN_LEFTBRACE, "Expect '{' after 'constant_buffer' keyword");
|
||||||
buffer := field_list(parse_state, .Semicolon);
|
buffer := field_list(parse_state, .Semicolon);
|
||||||
|
|
||||||
node = make_node(parse_state, .CBuffer);
|
|
||||||
if identifier_token {
|
if identifier_token {
|
||||||
node.name = identifier_token.ident_value;
|
node.name = identifier_token.ident_value;
|
||||||
}
|
}
|
||||||
@@ -1506,10 +1394,10 @@ const_declaration :: (parse_state : *Parse_State, identifier_token : *Token) ->
|
|||||||
return struct_declaration(parse_state, identifier_token);
|
return struct_declaration(parse_state, identifier_token);
|
||||||
} else if check(parse_state, .TOKEN_LEFTPAREN) {
|
} else if check(parse_state, .TOKEN_LEFTPAREN) {
|
||||||
return function_declaration(parse_state, identifier_token, .None);
|
return function_declaration(parse_state, identifier_token, .None);
|
||||||
} else if match(parse_state, .TOKEN_PROPERTIES) {
|
|
||||||
return property_block(parse_state, identifier_token);
|
|
||||||
} else if match(parse_state, .TOKEN_CONSTANT_BUFFER) {
|
} else if match(parse_state, .TOKEN_CONSTANT_BUFFER) {
|
||||||
return constant_buffer(parse_state, identifier_token);
|
return constant_buffer(parse_state, identifier_token);
|
||||||
|
} else if match(parse_state, .TOKEN_BUFFER) {
|
||||||
|
return buffer(parse_state, identifier_token);
|
||||||
}
|
}
|
||||||
return error_node(parse_state, tprint("Couldn't parse constant declaration at token %\n", parse_state.current.*));
|
return error_node(parse_state, tprint("Couldn't parse constant declaration at token %\n", parse_state.current.*));
|
||||||
}
|
}
|
||||||
@@ -1517,13 +1405,7 @@ const_declaration :: (parse_state : *Parse_State, identifier_token : *Token) ->
|
|||||||
declaration :: (parse_state : *Parse_State) -> *AST_Node {
|
declaration :: (parse_state : *Parse_State) -> *AST_Node {
|
||||||
skip_statement := false;
|
skip_statement := false;
|
||||||
decl_node : *AST_Node;
|
decl_node : *AST_Node;
|
||||||
if match(parse_state, .TOKEN_PROPERTIES) {
|
if match(parse_state, .TOKEN_VERTEX) {
|
||||||
decl_node = property_block(parse_state);
|
|
||||||
} else if match(parse_state, .TOKEN_INSTANCE) {
|
|
||||||
decl_node = instance_block(parse_state);
|
|
||||||
} else if match(parse_state, .TOKEN_META) {
|
|
||||||
decl_node = meta_block(parse_state);
|
|
||||||
} else if match(parse_state, .TOKEN_VERTEX) {
|
|
||||||
vertex_token := parse_state.previous;
|
vertex_token := parse_state.previous;
|
||||||
identifier := parse_state.current;
|
identifier := parse_state.current;
|
||||||
|
|
||||||
@@ -1539,8 +1421,6 @@ declaration :: (parse_state : *Parse_State) -> *AST_Node {
|
|||||||
consume(parse_state, .TOKEN_DOUBLECOLON, "Expect '::' after pixel entry point declaration.");
|
consume(parse_state, .TOKEN_DOUBLECOLON, "Expect '::' after pixel entry point declaration.");
|
||||||
|
|
||||||
decl_node = function_declaration(parse_state, identifier, .Pixel);
|
decl_node = function_declaration(parse_state, identifier, .Pixel);
|
||||||
} else if check(parse_state, .TOKEN_LEFTPAREN) {
|
|
||||||
decl_node = call(parse_state, null);
|
|
||||||
} else if check(parse_state, .TOKEN_DIRECTIVE) {
|
} else if check(parse_state, .TOKEN_DIRECTIVE) {
|
||||||
decl_node = directive(parse_state);
|
decl_node = directive(parse_state);
|
||||||
skip_statement = true;
|
skip_statement = true;
|
||||||
@@ -1610,4 +1490,4 @@ parse :: (ctx : *Compiler_Context, allocator := temp) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#load "AST.jai";
|
#load "ast.jai";
|
||||||
|
|||||||
76
README.md
76
README.md
@@ -20,15 +20,15 @@ There is basic support for most HLSL built-in math functions for the following t
|
|||||||
- Vector types: float2, float3, float4, int2, int3, int4
|
- Vector types: float2, float3, float4, int2, int3, int4
|
||||||
- Matrices: float4x4
|
- Matrices: float4x4
|
||||||
All of the above can be constructed with their namesake constructors i.e. `float4(x, y, z, w);`.
|
All of the above can be constructed with their namesake constructors i.e. `float4(x, y, z, w);`.
|
||||||
We don't yet support textures and samplers.
|
We also support Samplers and Texture2D
|
||||||
|
|
||||||
If you want to declare and use variables you can do it as follows
|
If you want to declare and use variables you can do it as follows
|
||||||
```hlsl
|
```hlsl
|
||||||
x : float = 2.0; // no 'f' suffix required or even supported (it gives an error)
|
x : float = 2.0; // no 'f' suffix required or even supported (it gives an error)
|
||||||
y : float = 4.0;
|
y : float = 4.0;
|
||||||
v : float2 = float2(x, y);
|
v : float2 = float2(x, y);
|
||||||
|
v2 := float2(x, y);
|
||||||
```
|
```
|
||||||
For now it is required to specify the type of the variable (no type inference).
|
|
||||||
|
|
||||||
You can also do arithmetic as you would expect
|
You can also do arithmetic as you would expect
|
||||||
```
|
```
|
||||||
@@ -43,6 +43,7 @@ Camera_Data :: struct {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
And there is a special struct called `properties`, which is used for custom data you want to pass in.
|
And there is a special struct called `properties`, which is used for custom data you want to pass in.
|
||||||
|
#### ** Note: Properties will likely be deprecated, since the language now supports `@` hints to easily mark buffers and values with metadata.**
|
||||||
```hlsl
|
```hlsl
|
||||||
properties {
|
properties {
|
||||||
projection : float4x4;
|
projection : float4x4;
|
||||||
@@ -53,13 +54,14 @@ which will be exposed in the compiled result. `properties` can be renamed to a c
|
|||||||
```
|
```
|
||||||
p :: properties {
|
p :: properties {
|
||||||
...
|
...
|
||||||
}
|
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
You can also define constant buffers
|
You can also define constant buffers
|
||||||
|
|
||||||
```
|
```
|
||||||
camera :: Constant_Buffer {
|
camera :: constant_buffer {
|
||||||
projection : float4x4;
|
projection : float4x4;
|
||||||
view : float4x4;
|
view : float4x4;
|
||||||
}
|
}
|
||||||
@@ -70,69 +72,68 @@ camera :: Constant_Buffer {
|
|||||||
To compile a shader and use the result, you can do the following in jai
|
To compile a shader and use the result, you can do the following in jai
|
||||||
```jai
|
```jai
|
||||||
|
|
||||||
parse_shader :: (path : string, allocator : Allocator) -> Compilation_Result {
|
|
||||||
// In the future, you can pass environment defines to the compiler.
|
// In the future, you can pass environment defines to the compiler.
|
||||||
compiler : Shader_Compiler;
|
ctx : Compiler_Context;
|
||||||
|
compile_file(*compiler, "shader.shd", allocator);
|
||||||
|
|
||||||
return compile_file(*compiler, path,, allocator);
|
if ctx.had_error {
|
||||||
}
|
log_error("%\n", report_messages(ctx.messages),, temp);
|
||||||
|
|
||||||
result := parse_shader("shader.shd", allocator);
|
|
||||||
if result.had_error {
|
|
||||||
log_error("%\n", report_messages(result.messages),, temp);
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
collection := result.collection;
|
// The ctx now contains all the needed information like the source text, entry points, constant buffers etc.
|
||||||
variant := collection.variants[0];
|
|
||||||
}
|
|
||||||
```
|
```
|
||||||
|
|
||||||
When parsing a shader you get the following struct as a result
|
When parsing a shader you get the following struct as a result
|
||||||
```
|
```
|
||||||
Compilation_Result :: struct {
|
Compiler_Context :: struct {
|
||||||
messages : [..]Compiler_Message;
|
file : Input_File;
|
||||||
|
|
||||||
had_error : bool;
|
environment : Environment;
|
||||||
|
|
||||||
collection : Shader_Variant_Collection;
|
tokens : [..]Token;;
|
||||||
}
|
root : *AST_Node;
|
||||||
```
|
nodes : [..]AST_Node;
|
||||||
|
|
||||||
A `Shader_Variant_Collection` looks as follows
|
codegen_result_text : string;
|
||||||
```
|
|
||||||
Shader_Variant_Collection :: struct {
|
|
||||||
properties : Properties;
|
|
||||||
|
|
||||||
max_constant_buffers :: 16;
|
constant_buffers : Static_Array(Type_Variable_Handle, 16);
|
||||||
cbuffers : Static_Array(Constant_Buffer, max_constant_buffers);
|
|
||||||
|
|
||||||
variants : [..]Shader_Variant;
|
scope_stack : Scope_Stack;
|
||||||
}
|
type_variables : [..]Type_Variable;
|
||||||
|
|
||||||
Shader_Variant :: struct {
|
property_name : string;
|
||||||
text : string;
|
|
||||||
|
|
||||||
vertex_entry_point : struct {
|
vertex_entry_point : struct {
|
||||||
|
node : *AST_Node;
|
||||||
name : string;
|
name : string;
|
||||||
|
|
||||||
input : [..]Field;
|
input : [..]Field;
|
||||||
}
|
}
|
||||||
|
|
||||||
pixel_entry_point : struct {
|
pixel_entry_point : struct {
|
||||||
|
node : *AST_Node;
|
||||||
name : string;
|
name : string;
|
||||||
|
|
||||||
return_value : Field;
|
return_value : Field;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
properties : Properties;
|
||||||
|
|
||||||
|
max_constant_buffers :: 16;
|
||||||
|
|
||||||
|
cbuffers : Static_Array(Constant_Buffer, max_constant_buffers);
|
||||||
|
|
||||||
|
had_error : bool;
|
||||||
|
messages : [..]Compiler_Message;
|
||||||
}
|
}
|
||||||
|
|
||||||
Constant_Buffer :: struct {
|
Constant_Buffer :: struct {
|
||||||
register : int;
|
|
||||||
|
|
||||||
name : string;
|
name : string;
|
||||||
|
|
||||||
fields : Static_Array(Property_Field, 16);
|
fields : Static_Array(Property_Field, 16);
|
||||||
|
|
||||||
|
// hints : Field_Hint; // optional hint...
|
||||||
|
hints : [..]Field_Hint;
|
||||||
|
|
||||||
buffer_index : u32;
|
buffer_index : u32;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -192,11 +193,10 @@ Hint_Kind :: enum {
|
|||||||
|
|
||||||
## Notable missing features
|
## Notable missing features
|
||||||
|
|
||||||
- Control flow: if/else, for, while, switch etc.
|
- While
|
||||||
- Arrays
|
- Arrays
|
||||||
- Textures and samplers
|
|
||||||
- Multiple render targets
|
- Multiple render targets
|
||||||
- Custom buffers/structured buffers
|
- Custom buffers/structured buffers
|
||||||
- Interpolation specifiers
|
- Interpolation specifiers
|
||||||
- Proper variant handling with environment defines
|
- Proper variant handling with environment defines
|
||||||
- Include/importing files such as shared utils etc.
|
- Importing files such as shared utils etc. with something other than textual `#load`
|
||||||
30
first.jai
30
first.jai
@@ -13,46 +13,33 @@ build :: () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
EXECUTABLE_NAME :: "ink";
|
EXECUTABLE_NAME :: "ink";
|
||||||
MAIN_FILE :: "Ink.jai";
|
MAIN_FILE :: "ink.jai";
|
||||||
|
|
||||||
options := get_build_options(w);
|
options := get_build_options(w);
|
||||||
|
|
||||||
options.write_added_strings = true;
|
|
||||||
|
|
||||||
args := options.compile_time_command_line;
|
args := options.compile_time_command_line;
|
||||||
|
|
||||||
intercept_flags: Intercept_Flags;
|
profile : bool = false;
|
||||||
plugin_start_index := -1;
|
|
||||||
|
|
||||||
for arg : args {
|
for arg : args {
|
||||||
if arg == {
|
if arg == {
|
||||||
case "check"; {
|
case "check"; {
|
||||||
options.output_type = .NO_OUTPUT;
|
options.output_type = .NO_OUTPUT;
|
||||||
}
|
}
|
||||||
|
case "profile"; {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
it := args[it_index];
|
|
||||||
|
|
||||||
if !it continue;
|
|
||||||
|
|
||||||
if it[0] == #char "+" {
|
|
||||||
plugin_start_index = it_index;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
intercept_flags: Intercept_Flags;
|
||||||
plugins_to_create: [..] Plugin_To_Create;
|
plugins_to_create: [..] Plugin_To_Create;
|
||||||
|
|
||||||
|
if profile {
|
||||||
tracy : Plugin_To_Create;
|
tracy : Plugin_To_Create;
|
||||||
tracy.name = "tracy";
|
tracy.name = "tracy";
|
||||||
array_add(*plugins_to_create, tracy);
|
array_add(*plugins_to_create, tracy);
|
||||||
// got_error := false;
|
}
|
||||||
// if plugin_start_index >= 0 {
|
|
||||||
// success:, plugins_to_create = parse_plugin_arguments(args, plugin_start_index);
|
|
||||||
// if !success got_error = true;
|
|
||||||
// }
|
|
||||||
|
|
||||||
// if got_error {
|
|
||||||
// exit(1);
|
|
||||||
// }
|
|
||||||
|
|
||||||
success := init_plugins(plugins_to_create, *plugins, w);
|
success := init_plugins(plugins_to_create, *plugins, w);
|
||||||
if !success {
|
if !success {
|
||||||
@@ -63,7 +50,6 @@ build :: () {
|
|||||||
new_path: [..] string;
|
new_path: [..] string;
|
||||||
array_add(*new_path, ..options.import_path);
|
array_add(*new_path, ..options.import_path);
|
||||||
array_add(*new_path, "modules");
|
array_add(*new_path, "modules");
|
||||||
// array_add(*new_path, "modules/shader_parsing");
|
|
||||||
options.import_path = new_path;
|
options.import_path = new_path;
|
||||||
options.output_executable_name = EXECUTABLE_NAME;
|
options.output_executable_name = EXECUTABLE_NAME;
|
||||||
|
|
||||||
|
|||||||
233
module.jai
233
module.jai
@@ -1,11 +1,35 @@
|
|||||||
#load "Lexing.jai";
|
#load "lexing.jai";
|
||||||
#load "Error.jai";
|
#load "error.jai";
|
||||||
#load "Parsing.jai";
|
#load "parsing.jai";
|
||||||
#load "Semantic_Analysis.jai";
|
#load "check.jai";
|
||||||
#load "Codegen.jai";
|
#load "codegen.jai";
|
||||||
|
|
||||||
#import "File_Utilities";
|
#import "File_Utilities";
|
||||||
|
|
||||||
|
/* TODO
|
||||||
|
- [x] Remove builtin stringbuilding and replace it with ad-hoc string building when error reporting. In that case we are already building a string anyway, so we can just pass in the string builder
|
||||||
|
- [ ] Support structured buffers (ro, rw, w)
|
||||||
|
- [ ] Support mesh and amplification shaders
|
||||||
|
- [ ] Support compute shaders
|
||||||
|
- [x] Support #if at top level
|
||||||
|
- [x] Support #if at block level
|
||||||
|
- [x] Remove properties block and just use hinted constant buffers instead
|
||||||
|
```
|
||||||
|
props :: constant_buffer @properties {
|
||||||
|
[...]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
- [ ] while loops
|
||||||
|
- [ ] for-each loops
|
||||||
|
- [ ] add parameters to hints (meta properties, resource binding indices if needed)
|
||||||
|
- [ ] consider @entry(stage) syntax instead of the forced keyword
|
||||||
|
- [ ] Add flags to compiler
|
||||||
|
- [ ] Generate output flag(s)
|
||||||
|
- [ ] Possibly final stage flag, so you can just call compile_file and it only does what you need.
|
||||||
|
- Probably this flag is about which stage you need as the _last_ and not which stages to do, as that doesn't make sense.
|
||||||
|
- [ ] Multiple output languages?
|
||||||
|
*/
|
||||||
|
|
||||||
add_define :: (env : *Environment, key : string) {
|
add_define :: (env : *Environment, key : string) {
|
||||||
for define : env.defines {
|
for define : env.defines {
|
||||||
if define == key {
|
if define == key {
|
||||||
@@ -55,10 +79,33 @@ Hint_Kind :: enum {
|
|||||||
Position;
|
Position;
|
||||||
UV;
|
UV;
|
||||||
Target;
|
Target;
|
||||||
|
Output_Position;
|
||||||
|
|
||||||
Custom;
|
Custom;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Hint_Names :: #run -> [(cast(int)Hint_Kind.Target) + 1]string {
|
||||||
|
names : [(cast(int)Hint_Kind.Target) + 1]string;
|
||||||
|
names[Hint_Kind.Position] = "position";
|
||||||
|
names[Hint_Kind.UV] = "uv";
|
||||||
|
names[Hint_Kind.Target] = "target";
|
||||||
|
|
||||||
|
return names;
|
||||||
|
}
|
||||||
|
|
||||||
|
lookup_hint :: (name : string) -> Hint_Kind {
|
||||||
|
if name == "position" {
|
||||||
|
return Hint_Kind.Position;
|
||||||
|
} else if name == "uv" {
|
||||||
|
return Hint_Kind.UV;
|
||||||
|
} else if starts_with(name, "target") {
|
||||||
|
return Hint_Kind.Target;
|
||||||
|
} else if name == "outposition" {
|
||||||
|
return Hint_Kind.Output_Position;
|
||||||
|
}
|
||||||
|
return .None;
|
||||||
|
}
|
||||||
|
|
||||||
Field_Hint :: struct {
|
Field_Hint :: struct {
|
||||||
kind : Hint_Kind;
|
kind : Hint_Kind;
|
||||||
|
|
||||||
@@ -81,57 +128,22 @@ Entry_Point :: struct {
|
|||||||
return_value : Field;
|
return_value : Field;
|
||||||
}
|
}
|
||||||
|
|
||||||
Shader_Variant :: struct {
|
Buffer_Kind :: enum {
|
||||||
text : string;
|
Constant;
|
||||||
|
Structured;
|
||||||
|
}
|
||||||
|
|
||||||
vertex_entry_point : struct {
|
Buffer :: struct {
|
||||||
|
kind : Buffer_Kind;
|
||||||
name : string;
|
name : string;
|
||||||
|
|
||||||
input : [..]Field;
|
fields : Static_Array(Field, 16);
|
||||||
}
|
|
||||||
|
|
||||||
pixel_entry_point : struct {
|
|
||||||
name : string;
|
|
||||||
|
|
||||||
return_value : Field;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Property_Field :: struct {
|
|
||||||
base_field : Field;
|
|
||||||
|
|
||||||
// @Incomplete(nb): Editor information, min max, etc.
|
|
||||||
// This should also be compiled out for ship
|
|
||||||
}
|
|
||||||
|
|
||||||
Properties :: struct {
|
|
||||||
fields : [..]Property_Field;
|
|
||||||
|
|
||||||
buffer_index : u32;
|
|
||||||
}
|
|
||||||
|
|
||||||
Constant_Buffer :: struct {
|
|
||||||
register : int;
|
|
||||||
|
|
||||||
name : string;
|
|
||||||
|
|
||||||
fields : Static_Array(Property_Field, 16);
|
|
||||||
|
|
||||||
// hints : Field_Hint; // optional hint...
|
|
||||||
hints : [..]Field_Hint;
|
hints : [..]Field_Hint;
|
||||||
|
|
||||||
buffer_index : u32;
|
buffer_index : u32;
|
||||||
}
|
}
|
||||||
|
|
||||||
Shader_Variant_Collection :: struct {
|
|
||||||
properties : Properties;
|
|
||||||
|
|
||||||
max_constant_buffers :: 16;
|
|
||||||
cbuffers : Static_Array(Constant_Buffer, max_constant_buffers);
|
|
||||||
|
|
||||||
variants : [..]Shader_Variant;
|
|
||||||
}
|
|
||||||
|
|
||||||
Input_File :: struct {
|
Input_File :: struct {
|
||||||
source : string;
|
source : string;
|
||||||
path : string;
|
path : string;
|
||||||
@@ -148,13 +160,12 @@ Compiler_Context :: struct {
|
|||||||
|
|
||||||
codegen_result_text : string;
|
codegen_result_text : string;
|
||||||
|
|
||||||
constant_buffers : Static_Array(Type_Variable_Handle, 16);
|
typed_buffers : Static_Array(Type_Variable_Handle, 32);
|
||||||
|
// structured_buffers : Static_Array(Type_Variable_Handle, 16);
|
||||||
|
|
||||||
scope_stack : Scope_Stack;
|
scope_stack : Scope_Stack;
|
||||||
type_variables : [..]Type_Variable;
|
type_variables : [..]Type_Variable;
|
||||||
|
|
||||||
property_name : string;
|
|
||||||
|
|
||||||
vertex_entry_point : struct {
|
vertex_entry_point : struct {
|
||||||
node : *AST_Node;
|
node : *AST_Node;
|
||||||
name : string;
|
name : string;
|
||||||
@@ -167,11 +178,9 @@ Compiler_Context :: struct {
|
|||||||
return_value : Field;
|
return_value : Field;
|
||||||
}
|
}
|
||||||
|
|
||||||
properties : Properties;
|
max_buffers :: 32;
|
||||||
|
|
||||||
max_constant_buffers :: 16;
|
buffers : Static_Array(Buffer, max_buffers);
|
||||||
|
|
||||||
cbuffers : Static_Array(Constant_Buffer, max_constant_buffers);
|
|
||||||
|
|
||||||
had_error : bool;
|
had_error : bool;
|
||||||
messages : [..]Compiler_Message;
|
messages : [..]Compiler_Message;
|
||||||
@@ -250,7 +259,7 @@ Min_Field_Name :: 10;
|
|||||||
pretty_print_field :: (builder : *String_Builder, field : *Field) {
|
pretty_print_field :: (builder : *String_Builder, field : *Field) {
|
||||||
if field.name.count > 0 {
|
if field.name.count > 0 {
|
||||||
print_to_builder(builder, "% ", field.name);
|
print_to_builder(builder, "% ", field.name);
|
||||||
append(builder, "- ");
|
append(builder, ": ");
|
||||||
} else {
|
} else {
|
||||||
append(builder, "return - ");
|
append(builder, "return - ");
|
||||||
}
|
}
|
||||||
@@ -279,10 +288,17 @@ pretty_print_field :: (builder : *String_Builder, field : *Field) {
|
|||||||
case .Struct; {
|
case .Struct; {
|
||||||
print_to_builder(builder, "struct : % {", type.name);
|
print_to_builder(builder, "struct : % {", type.name);
|
||||||
|
|
||||||
|
newline_after := type.children.count / 4;
|
||||||
|
|
||||||
for *child : type.children {
|
for *child : type.children {
|
||||||
pretty_print_field(builder, child);
|
pretty_print_field(builder, child);
|
||||||
if it_index < type.children.count - 1 {
|
if it_index < type.children.count - 1 {
|
||||||
append(builder, " ");
|
append(builder, ", ");
|
||||||
|
}
|
||||||
|
|
||||||
|
if it_index % newline_after == 0 {
|
||||||
|
append(builder, "\n");
|
||||||
|
indent(builder, 4);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -312,11 +328,7 @@ pretty_print_field :: (builder : *String_Builder, field : *Field) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type_variable_to_field :: (checker : *Semantic_Checker, variable : Type_Variable_Handle) -> Field {
|
type_variable_to_field :: (ctx : *Compiler_Context, variable : *Type_Variable) -> Field {
|
||||||
return type_variable_to_field(checker, from_handle(checker, variable));
|
|
||||||
}
|
|
||||||
|
|
||||||
type_variable_to_field :: (type_variables : []Type_Variable, scope_stack : Scope_Stack, variable : *Type_Variable) -> Field {
|
|
||||||
field : Field;
|
field : Field;
|
||||||
|
|
||||||
field.name = variable.name;
|
field.name = variable.name;
|
||||||
@@ -349,14 +361,14 @@ type_variable_to_field :: (type_variables : []Type_Variable, scope_stack : Scope
|
|||||||
case .Struct; {
|
case .Struct; {
|
||||||
type.kind = Field_Kind.Struct;
|
type.kind = Field_Kind.Struct;
|
||||||
|
|
||||||
find_result := find_symbol(scope_stack, variable.typename, xx 1);
|
find_result := find_symbol(ctx.scope_stack, variable.typename, xx 1);
|
||||||
assert(find_result != null, "Internal compiler error\n");
|
assert(find_result != null, "Internal compiler error\n");
|
||||||
|
|
||||||
type_var := from_handle(type_variables, find_result.type_variable);
|
type_var := from_handle(ctx.type_variables, find_result.type_variable);
|
||||||
|
|
||||||
for i : 0..type_var.children.count - 1 {
|
for i : 0..type_var.children.count - 1 {
|
||||||
child := type_var.children[i];
|
child := type_var.children[i];
|
||||||
child_field := type_variable_to_field(type_variables, scope_stack, child);
|
child_field := type_variable_to_field(ctx, child);
|
||||||
array_add(*type.children, child_field);
|
array_add(*type.children, child_field);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -367,13 +379,11 @@ type_variable_to_field :: (type_variables : []Type_Variable, scope_stack : Scope
|
|||||||
for hint : variable.source_node.hint_tokens {
|
for hint : variable.source_node.hint_tokens {
|
||||||
field_hint : Field_Hint;
|
field_hint : Field_Hint;
|
||||||
|
|
||||||
if hint.ident_value == "position" {
|
if lookup_hint(hint.ident_value) == .Position {
|
||||||
// @Incomplete(nb): Should be a lookup table somewhere
|
|
||||||
field_hint.kind = .Position;
|
field_hint.kind = .Position;
|
||||||
} else if hint.ident_value == "uv" {
|
} else if lookup_hint(hint.ident_value) == .UV {
|
||||||
field_hint.kind = .UV;
|
field_hint.kind = .UV;
|
||||||
} else if starts_with(hint.ident_value, "target") {
|
} else if lookup_hint(hint.ident_value) == .Target {
|
||||||
// @Incomplete(nb): Should be a lookup table somewhere
|
|
||||||
index_str : string;
|
index_str : string;
|
||||||
index_str.data = *hint.ident_value.data[7];
|
index_str.data = *hint.ident_value.data[7];
|
||||||
index_str.count = 1;
|
index_str.count = 1;
|
||||||
@@ -395,12 +405,39 @@ type_variable_to_field :: (type_variables : []Type_Variable, scope_stack : Scope
|
|||||||
return field;
|
return field;
|
||||||
}
|
}
|
||||||
|
|
||||||
type_variable_to_field :: (type_variables : []Type_Variable, scope_stack : Scope_Stack, variable : Type_Variable_Handle) -> Field {
|
type_variable_to_field :: (ctx : *Compiler_Context, variable : Type_Variable_Handle) -> Field {
|
||||||
return type_variable_to_field(type_variables, scope_stack, from_handle(type_variables, variable));
|
return type_variable_to_field(ctx, from_handle(ctx.type_variables, variable));
|
||||||
}
|
}
|
||||||
|
|
||||||
type_variable_to_field :: (checker : *Semantic_Checker, variable : *Type_Variable) -> Field {
|
generate_buffer :: (ctx : *Compiler_Context, type_handle : Type_Variable_Handle, buffers : *Static_Array) {
|
||||||
return type_variable_to_field(checker.ctx.type_variables, checker.ctx.scope_stack, variable);
|
variable := from_handle(ctx.type_variables, type_handle);
|
||||||
|
|
||||||
|
buffer := array_add(buffers);
|
||||||
|
|
||||||
|
if variable.type == {
|
||||||
|
case .CBuffer; {
|
||||||
|
buffer.kind = .Constant;
|
||||||
|
}
|
||||||
|
case .Buffer; {
|
||||||
|
buffer.kind = .Structured;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
buffer.name = variable.name;
|
||||||
|
|
||||||
|
for i : 0..variable.children.count - 1 {
|
||||||
|
child := variable.children[i];
|
||||||
|
field : Field = type_variable_to_field(ctx, from_handle(ctx.type_variables, child));
|
||||||
|
array_add(*buffer.fields, field);
|
||||||
|
}
|
||||||
|
|
||||||
|
buffer.buffer_index = variable.resource_index;
|
||||||
|
|
||||||
|
for hint : variable.source_node.hint_tokens {
|
||||||
|
field_hint : Field_Hint;
|
||||||
|
field_hint.custom_hint_name = hint.ident_value;
|
||||||
|
field_hint.kind = .Custom;
|
||||||
|
array_add(*buffer.hints, field_hint);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
generate_output_data :: (ctx : *Compiler_Context) {
|
generate_output_data :: (ctx : *Compiler_Context) {
|
||||||
@@ -420,65 +457,31 @@ generate_output_data :: (ctx : *Compiler_Context) {
|
|||||||
field_list := node.children[0];
|
field_list := node.children[0];
|
||||||
for child : field_list.children {
|
for child : field_list.children {
|
||||||
tv := from_handle(ctx.type_variables, child.type_variable);
|
tv := from_handle(ctx.type_variables, child.type_variable);
|
||||||
field := type_variable_to_field(ctx.type_variables, ctx.scope_stack, tv);
|
field := type_variable_to_field(ctx, tv);
|
||||||
array_add(*ctx.vertex_entry_point.input, field);
|
array_add(*ctx.vertex_entry_point.input, field);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for buffer_variable : ctx.constant_buffers {
|
for buffer_variable : ctx.typed_buffers {
|
||||||
variable := from_handle(ctx.type_variables, buffer_variable);
|
generate_buffer(ctx, buffer_variable, *ctx.buffers);
|
||||||
|
|
||||||
cb := array_add(*ctx.cbuffers);
|
|
||||||
|
|
||||||
for i : 0..variable.children.count - 1 {
|
|
||||||
child := variable.children[i];
|
|
||||||
field : Property_Field;
|
|
||||||
field.base_field = type_variable_to_field(ctx.type_variables, ctx.scope_stack, from_handle(ctx.type_variables, child));
|
|
||||||
array_add(*cb.fields, field);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
cb.buffer_index = variable.resource_index;
|
|
||||||
|
|
||||||
for hint : variable.source_node.hint_tokens {
|
|
||||||
field_hint : Field_Hint;
|
|
||||||
field_hint.custom_hint_name = hint.ident_value;
|
|
||||||
field_hint.kind = .Custom;
|
|
||||||
array_add(*cb.hints, field_hint);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
find_result := find_symbol(*ctx.scope_stack, ctx.property_name, xx 1);
|
|
||||||
if find_result {
|
|
||||||
property_variable := from_handle(ctx.type_variables, find_result.type_variable);
|
|
||||||
|
|
||||||
for i : 0..property_variable.children.count - 1 {
|
|
||||||
child := property_variable.children[i];
|
|
||||||
field := type_variable_to_field(ctx.type_variables, ctx.scope_stack, from_handle(ctx.type_variables, child));
|
|
||||||
prop_field : Property_Field;
|
|
||||||
prop_field.base_field = field;
|
|
||||||
array_add(*ctx.properties.fields, prop_field);
|
|
||||||
}
|
|
||||||
ctx.properties.buffer_index = property_variable.resource_index;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
if ctx.pixel_entry_point.node {
|
if ctx.pixel_entry_point.node {
|
||||||
ctx.pixel_entry_point.name = ctx.pixel_entry_point.node.name;
|
ctx.pixel_entry_point.name = ctx.pixel_entry_point.node.name;
|
||||||
|
|
||||||
type_variable := from_handle(ctx.type_variables, ctx.pixel_entry_point.node.type_variable);
|
type_variable := from_handle(ctx.type_variables, ctx.pixel_entry_point.node.type_variable);
|
||||||
assert(type_variable.type == .Function);
|
assert(type_variable.type == .Function);
|
||||||
|
|
||||||
field := type_variable_to_field(ctx.type_variables, ctx.scope_stack, type_variable.return_type_variable);
|
if type_variable.return_type_variable > 0 {
|
||||||
|
field := type_variable_to_field(ctx, type_variable.return_type_variable);
|
||||||
for hint : type_variable.source_node.hint_tokens {
|
for hint : type_variable.source_node.hint_tokens {
|
||||||
field_hint : Field_Hint;
|
field_hint : Field_Hint;
|
||||||
|
|
||||||
if hint.ident_value == "position" {
|
if lookup_hint(hint.ident_value) == .Position {
|
||||||
// @Incomplete(nb): Should be a lookup table somewhere
|
|
||||||
field_hint.kind = .Position;
|
field_hint.kind = .Position;
|
||||||
} else if starts_with(hint.ident_value, "target") {
|
} else if lookup_hint(hint.ident_value) == .Target {
|
||||||
// @Incomplete(nb): Should be a lookup table somewhere
|
|
||||||
index_str : string;
|
index_str : string;
|
||||||
index_str.data = *hint.ident_value.data[7];
|
index_str.data = *hint.ident_value.data[7];
|
||||||
index_str.count = 1;
|
index_str.count = 1;
|
||||||
@@ -493,10 +496,10 @@ generate_output_data :: (ctx : *Compiler_Context) {
|
|||||||
}
|
}
|
||||||
array_add(*field.hints, field_hint);
|
array_add(*field.hints, field_hint);
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.pixel_entry_point.return_value = field;
|
ctx.pixel_entry_point.return_value = field;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
compile_file :: (ctx : *Compiler_Context, path : string, allocator : Allocator = temp) {
|
compile_file :: (ctx : *Compiler_Context, path : string, allocator : Allocator = temp) {
|
||||||
new_context := context;
|
new_context := context;
|
||||||
|
|||||||
4
test/arithmetic_parens.ink
Normal file
4
test/arithmetic_parens.ink
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
vertex main :: () {
|
||||||
|
v : float2;
|
||||||
|
v.x = (2.0 + ((4.0 - 2.0) * 1.5)) * 3.0;
|
||||||
|
}
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
vertex main :: () -> float4 @position {
|
vertex main :: () -> float4 @position {
|
||||||
arr : [16].float4;
|
arr : [16].float4;
|
||||||
arr[0] = float4(1,1,1);
|
arr[0] = float4(1, 1, 1, 1);
|
||||||
return arr[0];
|
pos := arr[1];
|
||||||
|
return pos;
|
||||||
}
|
}
|
||||||
|
|||||||
5
test/assign_temporary.ink
Normal file
5
test/assign_temporary.ink
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
vertex main :: () {
|
||||||
|
a : float2;
|
||||||
|
b : float2;
|
||||||
|
(a + b).x = 2.0;
|
||||||
|
}
|
||||||
10
test/bad_double_access.ink
Normal file
10
test/bad_double_access.ink
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
P :: struct {
|
||||||
|
v : float2;
|
||||||
|
}
|
||||||
|
|
||||||
|
vertex main :: () {
|
||||||
|
p : P;
|
||||||
|
p.v.x.y = 2.0;
|
||||||
|
// v : float2;
|
||||||
|
// v.x.y.z = 2.0;
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
properties {
|
properties :: Constant_Buffer @properties {
|
||||||
color : float4;
|
color : float4;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
11
test/buffers.ink
Normal file
11
test/buffers.ink
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
property_buffer :: Buffer {
|
||||||
|
color : float4;
|
||||||
|
}
|
||||||
|
|
||||||
|
const_buffer :: Constant_Buffer {
|
||||||
|
color : float4;
|
||||||
|
}
|
||||||
|
|
||||||
|
pixel main :: (index : int) {
|
||||||
|
return property_buffer[index].color;
|
||||||
|
}
|
||||||
@@ -1,7 +1,6 @@
|
|||||||
scope (global) [
|
scope (global) [
|
||||||
[vertex__vs_main] : ()
|
[vertex__vs_main] : ()
|
||||||
scope (vertex__vs_main) [
|
scope (vertex__vs_main) [
|
||||||
[i] : int
|
[v] : float2
|
||||||
[x] : int
|
|
||||||
]
|
]
|
||||||
]
|
]
|
||||||
7
test/check/arrays.golden
Normal file
7
test/check/arrays.golden
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
scope (global) [
|
||||||
|
[vertex__vs_main] : () -> float4
|
||||||
|
scope (vertex__vs_main) [
|
||||||
|
[pos] : float4
|
||||||
|
[arr] : [16].float4
|
||||||
|
]
|
||||||
|
]
|
||||||
6
test/check/bad_double_access.golden
Normal file
6
test/check/bad_double_access.golden
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
[1;37mtest/bad_double_access.ink:7,4: [31merror: [37mAttempting to access a field on a primitive type 'float'.
|
||||||
|
[96mp.v.x.
|
||||||
|
^
|
||||||
|
declaration:
|
||||||
|
x: float
|
||||||
|
[36m[37m
|
||||||
10
test/check/double_access.golden
Normal file
10
test/check/double_access.golden
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
scope (global) [
|
||||||
|
[vertex__vs_main] : ()
|
||||||
|
[p] : {v : float2}
|
||||||
|
scope (p) [
|
||||||
|
[v] : float2
|
||||||
|
]
|
||||||
|
scope (vertex__vs_main) [
|
||||||
|
[x] : float
|
||||||
|
]
|
||||||
|
]
|
||||||
10
test/check/for_i_loop.golden
Normal file
10
test/check/for_i_loop.golden
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
scope (global) [
|
||||||
|
[vertex__vs_main] : ()
|
||||||
|
scope (vertex__vs_main) [
|
||||||
|
[x] : int
|
||||||
|
scope (block) [
|
||||||
|
[i] : int
|
||||||
|
scope (block) []
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
4
test/check/for_index_outside.golden
Normal file
4
test/check/for_index_outside.golden
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
[1;37mtest/for_index_outside.ink:6,0: [31merror: [37mUse of undeclared symbol 'i'
|
||||||
|
[96m i += 1;
|
||||||
|
^
|
||||||
|
[36m[37m
|
||||||
13
test/check/hinted_cbuffer.golden
Normal file
13
test/check/hinted_cbuffer.golden
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
scope (global) [
|
||||||
|
[vertex__vs_main] : (pos : float4) -> float4
|
||||||
|
[props] : {projection : float4x4, view : float4x4}
|
||||||
|
scope (props) [
|
||||||
|
[projection] : float4x4
|
||||||
|
[view] : float4x4
|
||||||
|
]
|
||||||
|
scope (vertex__vs_main) [
|
||||||
|
[pos] : float4
|
||||||
|
[mv] : float4
|
||||||
|
[mvp] : float4
|
||||||
|
]
|
||||||
|
]
|
||||||
8
test/check/if_def_block.golden
Normal file
8
test/check/if_def_block.golden
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
scope (global) [
|
||||||
|
[pixel__ps_main] : ()
|
||||||
|
scope (pixel__ps_main) [ scope (block) [
|
||||||
|
[alpha_color] : float4
|
||||||
|
[f] : float
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
4
test/check/if_def_expression.golden
Normal file
4
test/check/if_def_expression.golden
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
scope (global) [
|
||||||
|
[vertex__vs_console_main] : ()
|
||||||
|
scope (vertex__vs_console_main) []
|
||||||
|
]
|
||||||
8
test/check/ifdefs.golden
Normal file
8
test/check/ifdefs.golden
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
scope (global) [
|
||||||
|
[vertex__vs_skinning_main] : ()
|
||||||
|
[pixel__ps_main] : ()
|
||||||
|
scope (vertex__vs_skinning_main) [
|
||||||
|
[x] : float
|
||||||
|
]
|
||||||
|
scope (pixel__ps_main) []
|
||||||
|
]
|
||||||
@@ -2,5 +2,8 @@ scope (global) [
|
|||||||
[vertex__vs_main] : (pos : float3) -> float4
|
[vertex__vs_main] : (pos : float3) -> float4
|
||||||
scope (vertex__vs_main) [
|
scope (vertex__vs_main) [
|
||||||
[pos] : float3
|
[pos] : float3
|
||||||
|
scope (block) [ scope (block) []
|
||||||
|
scope (block) []
|
||||||
|
]
|
||||||
]
|
]
|
||||||
]
|
]
|
||||||
8
test/check/rvalue_binary.golden
Normal file
8
test/check/rvalue_binary.golden
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
scope (global) [
|
||||||
|
[vertex__vs_main] : ()
|
||||||
|
scope (vertex__vs_main) [
|
||||||
|
[b] : float2
|
||||||
|
[x] : float
|
||||||
|
[a] : float2
|
||||||
|
]
|
||||||
|
]
|
||||||
@@ -2,5 +2,8 @@ scope (global) [
|
|||||||
[vertex__vs_main] : (pos : float3) -> float4
|
[vertex__vs_main] : (pos : float3) -> float4
|
||||||
scope (vertex__vs_main) [
|
scope (vertex__vs_main) [
|
||||||
[pos] : float3
|
[pos] : float3
|
||||||
|
scope (block) []
|
||||||
|
scope (block) []
|
||||||
|
scope (block) []
|
||||||
]
|
]
|
||||||
]
|
]
|
||||||
@@ -2,5 +2,6 @@ scope (global) [
|
|||||||
[vertex__vs_main] : (pos : float3) -> float4
|
[vertex__vs_main] : (pos : float3) -> float4
|
||||||
scope (vertex__vs_main) [
|
scope (vertex__vs_main) [
|
||||||
[pos] : float3
|
[pos] : float3
|
||||||
|
scope (block) []
|
||||||
]
|
]
|
||||||
]
|
]
|
||||||
@@ -2,5 +2,7 @@ scope (global) [
|
|||||||
[vertex__vs_main] : (pos : float3) -> float4
|
[vertex__vs_main] : (pos : float3) -> float4
|
||||||
scope (vertex__vs_main) [
|
scope (vertex__vs_main) [
|
||||||
[pos] : float3
|
[pos] : float3
|
||||||
|
scope (block) []
|
||||||
|
scope (block) []
|
||||||
]
|
]
|
||||||
]
|
]
|
||||||
6
test/check/temp_access.golden
Normal file
6
test/check/temp_access.golden
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
[1;37mtest/temp_access.ink:5,10: [31merror: [37mCannot assign to an lvalue.
|
||||||
|
[96m (a + b).x = 2.0;
|
||||||
|
^^^^^^^^^^^
|
||||||
|
|
||||||
|
|
||||||
|
[36m[37m
|
||||||
@@ -7,15 +7,15 @@
|
|||||||
[96m result : float4 = float4(1.0, foo * res, 0.0, 1.0);
|
[96m result : float4 = float4(1.0, foo * res, 0.0, 1.0);
|
||||||
^
|
^
|
||||||
[97m Possible overloads:
|
[97m Possible overloads:
|
||||||
[96m float4 :: (float, float, float, float); (test/wrong_multiply.ink:0)
|
[96m float4 :: (float, float, float, float)
|
||||||
[96m float4 :: (float2, float2); (test/wrong_multiply.ink:0)
|
[96m float4 :: (float2, float2)
|
||||||
[96m float4 :: (float2, float, float); (test/wrong_multiply.ink:0)
|
[96m float4 :: (float2, float, float)
|
||||||
[96m float4 :: (float, float2, float); (test/wrong_multiply.ink:0)
|
[96m float4 :: (float, float2, float)
|
||||||
[96m float4 :: (float, float, float2); (test/wrong_multiply.ink:0)
|
[96m float4 :: (float, float, float2)
|
||||||
[96m float4 :: (float, float3); (test/wrong_multiply.ink:0)
|
[96m float4 :: (float, float3)
|
||||||
[96m float4 :: (float3, float); (test/wrong_multiply.ink:0)
|
[96m float4 :: (float3, float)
|
||||||
[96m float4 :: (float4); (test/wrong_multiply.ink:0)
|
[96m float4 :: (float4)
|
||||||
[96m float4 :: (float); (test/wrong_multiply.ink:0)
|
[96m float4 :: (float)
|
||||||
|
|
||||||
[36m[37m[1;37mtest/wrong_multiply.ink:4,34: [31merror: [37mType mismatch. Expected float got float2
|
[36m[37m[1;37mtest/wrong_multiply.ink:4,34: [31merror: [37mType mismatch. Expected float got float2
|
||||||
[96m found:
|
[96m found:
|
||||||
30
test/check/wrong_type_for_function.golden
Normal file
30
test/check/wrong_type_for_function.golden
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
[1;37mtest/wrong_type_for_function.ink:11,17: [31merror: [37mProcedure call did not match any of the possible overloads for 'float4'
|
||||||
|
[96m found:
|
||||||
|
color : float4 = float4(y, 1.0, 1.0, 1.0);
|
||||||
|
^^^^^^
|
||||||
|
|
||||||
|
[97m While matching argument 1 in function call.
|
||||||
|
[96m color : float4 = float4(y, 1.0, 1.0, 1.0);
|
||||||
|
^
|
||||||
|
[97m Possible overloads:
|
||||||
|
[96m float4 :: (float, float, float, float)
|
||||||
|
[96m float4 :: (float2, float2)
|
||||||
|
[96m float4 :: (float2, float, float)
|
||||||
|
[96m float4 :: (float, float2, float)
|
||||||
|
[96m float4 :: (float, float, float2)
|
||||||
|
[96m float4 :: (float, float3)
|
||||||
|
[96m float4 :: (float3, float)
|
||||||
|
[96m float4 :: (float4)
|
||||||
|
[96m float4 :: (float)
|
||||||
|
|
||||||
|
[36m[37m[1;37mtest/wrong_type_for_function.ink:11,24: [31merror: [37mType mismatch. Expected float got float2
|
||||||
|
[96m found:
|
||||||
|
color : float4 = float4(y, 1.0, 1.0, 1.0);
|
||||||
|
^
|
||||||
|
expected:
|
||||||
|
float
|
||||||
|
|
||||||
|
got:
|
||||||
|
y : float2 = foo()
|
||||||
|
|
||||||
|
[36m[37m
|
||||||
47
test/check_all.suite
Normal file
47
test/check_all.suite
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
test/assign_arithmetic_expression.ink check
|
||||||
|
test/arithmetic_parens.ink check
|
||||||
|
test/basic_property_and_return_value.ink check
|
||||||
|
test/builtin_types.ink check
|
||||||
|
test/complicated_computation.ink check
|
||||||
|
test/constant_buffer.ink check
|
||||||
|
test/bad_double_access.ink check
|
||||||
|
test/double_access.ink check
|
||||||
|
test/empty_struct.ink check
|
||||||
|
test/empty_vertex_main.ink check
|
||||||
|
test/empty_vertex_main_with_position_parameter.ink check
|
||||||
|
test/field_assignment.ink check
|
||||||
|
test/for_i_loop.ink check
|
||||||
|
test/function_call.ink check
|
||||||
|
test/function_call_out_of_order_declaration.ink check
|
||||||
|
test/function_call_return.ink check
|
||||||
|
test/functions_with_same_name.ink check
|
||||||
|
test/function_with_int_return.ink check
|
||||||
|
test/if_cond_assign.ink check
|
||||||
|
test/ifdefs.ink check
|
||||||
|
test/if_def_block.ink check
|
||||||
|
test/if_def_expression.ink check
|
||||||
|
test/inferred_types.ink check
|
||||||
|
test/multiple_functions.ink check
|
||||||
|
test/multiple_semicolons_everywhere.ink check
|
||||||
|
test/nested_if.ink check
|
||||||
|
test/non_bool_cond.ink check
|
||||||
|
test/pass_and_access_struct_fields_in_functions.ink check
|
||||||
|
test/passthrough.ink check
|
||||||
|
test/redeclared_variable.ink check
|
||||||
|
test/rvalue_binary.ink check
|
||||||
|
test/simple_else_if.ink check
|
||||||
|
test/simple_if_else.ink check
|
||||||
|
test/simple_if.ink check
|
||||||
|
test/simple_struct_access.ink check
|
||||||
|
test/struct_access_primitive_type.ink check
|
||||||
|
test/struct_within_struct.ink check
|
||||||
|
test/temp_access.ink check
|
||||||
|
test/type_as_variable_name.ink check
|
||||||
|
test/unary.ink check
|
||||||
|
test/undeclared_function.ink check
|
||||||
|
test/undeclared_symbol.ink check
|
||||||
|
test/unknown_overload.ink check
|
||||||
|
test/use_builtin_functions.ink check
|
||||||
|
test/wrong_argument_count.ink check
|
||||||
|
test/wrong_multiply.ink check
|
||||||
|
test/wrong_type_for_function.ink check
|
||||||
6
test/codegen/arithmetic_parens.golden
Normal file
6
test/codegen/arithmetic_parens.golden
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
void vs_main()
|
||||||
|
{
|
||||||
|
float2 v;
|
||||||
|
v.x = (2.0f + ((4.0f - 2.0f) * 1.5f)) * 3.0f;
|
||||||
|
}
|
||||||
|
|
||||||
6
test/codegen/arrays.golden
Normal file
6
test/codegen/arrays.golden
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
float4 vs_main() : SV_POSITION
|
||||||
|
{
|
||||||
|
float4 arr[16];
|
||||||
|
return arr[0];
|
||||||
|
}
|
||||||
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
void vs_main()
|
void vs_main()
|
||||||
{
|
{
|
||||||
float x = (2.0f + 5.0f);
|
float x = 2.0f + 5.0f;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
cbuffer __PROPERTIES : register(b0)
|
cbuffer properties : register(b0)
|
||||||
{
|
{
|
||||||
float4 __PROPERTIES__color;
|
float4 color;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
float3 vs_main(float3 pos : POSITION) : SV_POSITION
|
float3 vs_main(float3 pos : POSITION) : SV_POSITION
|
||||||
{
|
{
|
||||||
return pos;
|
return pos;
|
||||||
@@ -11,6 +10,6 @@ float3 vs_main(float3 pos : POSITION) : SV_POSITION
|
|||||||
|
|
||||||
float4 ps_main() : SV_TARGET
|
float4 ps_main() : SV_TARGET
|
||||||
{
|
{
|
||||||
return __PROPERTIES__color;
|
return properties.color;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -21,8 +21,8 @@ void vs_main()
|
|||||||
v4 = float4(2.0f, 2.0f, 2.0f, 2.0f);
|
v4 = float4(2.0f, 2.0f, 2.0f, 2.0f);
|
||||||
v2.x = 2.0f;
|
v2.x = 2.0f;
|
||||||
v2.y = 2.0f;
|
v2.y = 2.0f;
|
||||||
float p = (v2.x + v3.z);
|
float p = v2.x + v3.z;
|
||||||
float q = (v4.w + v2.x);
|
float q = v4.w + v2.x;
|
||||||
float4x4 m;
|
float4x4 m;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,6 @@ void vs_main()
|
|||||||
{
|
{
|
||||||
float x = 5.0f;
|
float x = 5.0f;
|
||||||
float y = 3000.0f;
|
float y = 3000.0f;
|
||||||
float z = ((y * y) + x);
|
float z = (y * y) + x;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
13
test/codegen/hinted_cbuffer.golden
Normal file
13
test/codegen/hinted_cbuffer.golden
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
cbuffer props : register(b0)
|
||||||
|
{
|
||||||
|
float4x4 projection;
|
||||||
|
float4x4 view;
|
||||||
|
}
|
||||||
|
|
||||||
|
float4 vs_main(float4 pos : POSITION) : SV_POSITION
|
||||||
|
{
|
||||||
|
float4 mv = mul(props.view, pos);
|
||||||
|
float4 mvp = mul(props.projection, mv);
|
||||||
|
return mvp;
|
||||||
|
}
|
||||||
|
|
||||||
7
test/codegen/if_def_block.golden
Normal file
7
test/codegen/if_def_block.golden
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
void ps_main()
|
||||||
|
{
|
||||||
|
|
||||||
|
float4 alpha_color = float4(1, 0, 0, 1);
|
||||||
|
float f = 2.0f;
|
||||||
|
}
|
||||||
|
|
||||||
4
test/codegen/if_def_expression.golden
Normal file
4
test/codegen/if_def_expression.golden
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
void vs_console_main()
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
9
test/codegen/ifdefs.golden
Normal file
9
test/codegen/ifdefs.golden
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
void ps_main()
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
void vs_skinning_main()
|
||||||
|
{
|
||||||
|
float x = 5.0f;
|
||||||
|
}
|
||||||
|
|
||||||
@@ -8,7 +8,7 @@ int foo()
|
|||||||
|
|
||||||
float bar()
|
float bar()
|
||||||
{
|
{
|
||||||
return (1235.0f * 500);
|
return 1235.0f * 500;
|
||||||
}
|
}
|
||||||
|
|
||||||
void vs_main()
|
void vs_main()
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ struct Foo
|
|||||||
|
|
||||||
float foo(Foo f)
|
float foo(Foo f)
|
||||||
{
|
{
|
||||||
return (f.some_data * 2.0f);
|
return f.some_data * 2.0f;
|
||||||
}
|
}
|
||||||
|
|
||||||
void vs_main()
|
void vs_main()
|
||||||
|
|||||||
7
test/codegen/rvalue_binary.golden
Normal file
7
test/codegen/rvalue_binary.golden
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
void vs_main()
|
||||||
|
{
|
||||||
|
float2 a;
|
||||||
|
float2 b;
|
||||||
|
float x = (a + b).x;
|
||||||
|
}
|
||||||
|
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
test/assign_arithmetic_expression.ink codegen
|
test/assign_arithmetic_expression.ink codegen
|
||||||
|
test/arithmetic_parens.ink codegen
|
||||||
test/basic_property_and_return_value.ink codegen
|
test/basic_property_and_return_value.ink codegen
|
||||||
test/builtin_types.ink codegen
|
test/builtin_types.ink codegen
|
||||||
test/complicated_computation.ink codegen
|
test/complicated_computation.ink codegen
|
||||||
@@ -10,14 +11,16 @@ test/field_assignment.ink codegen
|
|||||||
test/function_call.ink codegen
|
test/function_call.ink codegen
|
||||||
test/function_call_out_of_order_declaration.ink codegen
|
test/function_call_out_of_order_declaration.ink codegen
|
||||||
test/function_call_return.ink codegen
|
test/function_call_return.ink codegen
|
||||||
|
test/ifdefs.ink codegen
|
||||||
|
test/if_def_block.ink codegen
|
||||||
|
test/if_def_expression.ink codegen
|
||||||
test/inferred_types.ink codegen
|
test/inferred_types.ink codegen
|
||||||
test/meta_block.ink codegen
|
|
||||||
test/multiple_functions.ink codegen
|
test/multiple_functions.ink codegen
|
||||||
test/multiple_semicolons_everywhere.ink codegen
|
test/multiple_semicolons_everywhere.ink codegen
|
||||||
test/nested_if.ink codegen
|
test/nested_if.ink codegen
|
||||||
test/pass_and_access_struct_fields_in_functions.ink codegen
|
test/pass_and_access_struct_fields_in_functions.ink codegen
|
||||||
test/passthrough.ink codegen
|
test/passthrough.ink codegen
|
||||||
test/property_rename.ink codegen
|
test/rvalue_binary.ink codegen
|
||||||
test/simple_else_if.ink codegen
|
test/simple_else_if.ink codegen
|
||||||
test/simple_if_else.ink codegen
|
test/simple_if_else.ink codegen
|
||||||
test/simple_if.ink codegen
|
test/simple_if.ink codegen
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
test/assign_arithmetic_expression.ink compile
|
test/assign_arithmetic_expression.ink compile
|
||||||
|
test/arithmetic_parens.ink compile
|
||||||
test/basic_property_and_return_value.ink compile
|
test/basic_property_and_return_value.ink compile
|
||||||
test/builtin_types.ink compile
|
test/builtin_types.ink compile
|
||||||
test/complicated_computation.ink compile
|
test/complicated_computation.ink compile
|
||||||
@@ -11,12 +12,15 @@ test/function_call.ink compile
|
|||||||
test/function_call_out_of_order_declaration.ink compile
|
test/function_call_out_of_order_declaration.ink compile
|
||||||
test/function_call_return.ink compile
|
test/function_call_return.ink compile
|
||||||
test/functions_with_same_name.ink compile
|
test/functions_with_same_name.ink compile
|
||||||
|
test/ifdefs.ink compile
|
||||||
|
test/if_def_block.ink compile
|
||||||
|
test/if_def_expression.ink compile
|
||||||
test/inferred_types.ink compile
|
test/inferred_types.ink compile
|
||||||
test/meta_block.ink compile
|
|
||||||
test/multiple_functions.ink compile
|
test/multiple_functions.ink compile
|
||||||
test/multiple_semicolons_everywhere.ink compile
|
test/multiple_semicolons_everywhere.ink compile
|
||||||
test/pass_and_access_struct_fields_in_functions.ink compile
|
test/pass_and_access_struct_fields_in_functions.ink compile
|
||||||
test/passthrough.ink compile
|
test/passthrough.ink compile
|
||||||
|
test/rvalue_binary.ink compile
|
||||||
test/simple_else_if.ink compile
|
test/simple_else_if.ink compile
|
||||||
test/simple_if_else.ink compile
|
test/simple_if_else.ink compile
|
||||||
test/simple_if.ink compile
|
test/simple_if.ink compile
|
||||||
|
|||||||
1
test/compiled/assign_arithmetic_expression.golden
Normal file
1
test/compiled/assign_arithmetic_expression.golden
Normal file
@@ -0,0 +1 @@
|
|||||||
|
[vertex entry point] - vs_main
|
||||||
13
test/compiled/hinted_cbuffer.golden
Normal file
13
test/compiled/hinted_cbuffer.golden
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
[vertex entry point] - vs_main
|
||||||
|
[constant_buffer] - props - 0 (@properties)
|
||||||
|
[field] - projection : struct : float4x4 {m11 : float,
|
||||||
|
m12 : float, m13 : float, m14 : float, m21 : float,
|
||||||
|
m22 : float, m23 : float, m24 : float, m31 : float,
|
||||||
|
m32 : float, m33 : float, m34 : float, m41 : float,
|
||||||
|
m42 : float, m43 : float, m44 : float} (@projection)
|
||||||
|
[field] - view : struct : float4x4 {m11 : float,
|
||||||
|
m12 : float, m13 : float, m14 : float, m21 : float,
|
||||||
|
m22 : float, m23 : float, m24 : float, m31 : float,
|
||||||
|
m32 : float, m33 : float, m34 : float, m41 : float,
|
||||||
|
m42 : float, m43 : float, m44 : float} (@view)
|
||||||
|
|
||||||
1
test/compiled/if_def_block.golden
Normal file
1
test/compiled/if_def_block.golden
Normal file
@@ -0,0 +1 @@
|
|||||||
|
[pixel entry point] - ps_main
|
||||||
1
test/compiled/if_def_expression.golden
Normal file
1
test/compiled/if_def_expression.golden
Normal file
@@ -0,0 +1 @@
|
|||||||
|
[vertex entry point] - vs_console_main
|
||||||
2
test/compiled/ifdefs.golden
Normal file
2
test/compiled/ifdefs.golden
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
[vertex entry point] - vs_skinning_main
|
||||||
|
[pixel entry point] - ps_main
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
camera :: constant_buffer {
|
camera :: Constant_Buffer {
|
||||||
projection : float4x4;
|
projection : float4x4;
|
||||||
view : float4x4;
|
view : float4x4;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
p :: properties {
|
p :: Constant_Buffer {
|
||||||
v : float2;
|
v : float2;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
7
test/for_index_outside.ink
Normal file
7
test/for_index_outside.ink
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
vertex main :: () {
|
||||||
|
for i : 0..10 {
|
||||||
|
x : float;
|
||||||
|
}
|
||||||
|
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
10
test/hinted_cbuffer.ink
Normal file
10
test/hinted_cbuffer.ink
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
props :: constant_buffer @properties {
|
||||||
|
projection : float4x4 @projection;
|
||||||
|
view : float4x4 @view;
|
||||||
|
}
|
||||||
|
|
||||||
|
vertex main :: (pos : float4 @position) -> float4 @position {
|
||||||
|
mv : float4 = mul(props.view, pos);
|
||||||
|
mvp : float4 = mul(props.projection, mv);
|
||||||
|
return mvp;
|
||||||
|
}
|
||||||
11
test/if_def_block.ink
Normal file
11
test/if_def_block.ink
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
#add_define Alpha
|
||||||
|
|
||||||
|
pixel main :: () {
|
||||||
|
#if Env.Alpha {
|
||||||
|
alpha_color := float4(1, 0, 0, 1);
|
||||||
|
f := 2.0;
|
||||||
|
} else {
|
||||||
|
color := float3(0, 0, 0);
|
||||||
|
g := 5.0;
|
||||||
|
}
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user