diff --git a/CMakeLists.txt b/CMakeLists.txt
index d435092723..0aad51c7bc 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -576,7 +576,8 @@ set(ZIG_STD_FILES
"unicode.zig"
"zig/ast.zig"
"zig/index.zig"
- "zig/parser.zig"
+ "zig/parse.zig"
+ "zig/render.zig"
"zig/tokenizer.zig"
)
diff --git a/README.md b/README.md
index 1f23e133f8..552b784a50 100644
--- a/README.md
+++ b/README.md
@@ -1,9 +1,9 @@
-
+
A programming language designed for robustness, optimality, and
clarity.
-[ziglang.org](http://ziglang.org)
+[ziglang.org](https://ziglang.org)
## Feature Highlights
diff --git a/doc/langref.html.in b/doc/langref.html.in
index 644cc5b9e0..cdfe7d9228 100644
--- a/doc/langref.html.in
+++ b/doc/langref.html.in
@@ -4485,17 +4485,58 @@ mem.set(u8, dest, c);
If no overflow or underflow occurs, returns false.
{#header_close#}
+ {#header_open|@newStackCall#}
+ @newStackCall(new_stack: []u8, function: var, args: ...) -> var
+
+ This calls a function, in the same way that invoking an expression with parentheses does. However,
+ instead of using the same stack as the caller, the function uses the stack provided in the new_stack
+ parameter.
+
+ {#code_begin|test#}
+const std = @import("std");
+const assert = std.debug.assert;
+
+var new_stack_bytes: [1024]u8 = undefined;
+
+test "calling a function with a new stack" {
+ const arg = 1234;
+
+ const a = @newStackCall(new_stack_bytes[0..512], targetFunction, arg);
+ const b = @newStackCall(new_stack_bytes[512..], targetFunction, arg);
+ _ = targetFunction(arg);
+
+ assert(arg == 1234);
+ assert(a < b);
+}
+
+fn targetFunction(x: i32) usize {
+ assert(x == 1234);
+
+ var local_variable: i32 = 42;
+ const ptr = &local_variable;
+ *ptr += 1;
+
+ assert(local_variable == 43);
+ return @ptrToInt(ptr);
+}
+ {#code_end#}
+ {#header_close#}
{#header_open|@noInlineCall#}
@noInlineCall(function: var, args: ...) -> var
This calls a function, in the same way that invoking an expression with parentheses does:
- const assert = @import("std").debug.assert;
+ {#code_begin|test#}
+const assert = @import("std").debug.assert;
+
test "noinline function call" {
assert(@noInlineCall(add, 3, 9) == 12);
}
-fn add(a: i32, b: i32) -> i32 { a + b }
+fn add(a: i32, b: i32) i32 {
+ return a + b;
+}
+ {#code_end#}
Unlike a normal function call, however, @noInlineCall guarantees that the call
will not be inlined. If the call must be inlined, a compile error is emitted.
@@ -6453,7 +6494,7 @@ hljs.registerLanguage("zig", function(t) {
a = t.IR + "\\s*\\(",
c = {
keyword: "const align var extern stdcallcc nakedcc volatile export pub noalias inline struct packed enum union break return try catch test continue unreachable comptime and or asm defer errdefer if else switch while for fn use bool f32 f64 void type noreturn error i8 u8 i16 u16 i32 u32 i64 u64 isize usize i8w u8w i16w i32w u32w i64w u64w isizew usizew c_short c_ushort c_int c_uint c_long c_ulong c_longlong c_ulonglong",
- built_in: "atomicLoad breakpoint returnAddress frameAddress fieldParentPtr setFloatMode IntType OpaqueType compileError compileLog setCold setRuntimeSafety setEvalBranchQuota offsetOf memcpy inlineCall setGlobalLinkage setGlobalSection divTrunc divFloor enumTagName intToPtr ptrToInt panic canImplicitCast ptrCast bitCast rem mod memset sizeOf alignOf alignCast maxValue minValue memberCount memberName memberType typeOf addWithOverflow subWithOverflow mulWithOverflow shlWithOverflow shlExact shrExact cInclude cDefine cUndef ctz clz import cImport errorName embedFile cmpxchgStrong cmpxchgWeak fence divExact truncate atomicRmw sqrt field typeInfo",
+ built_in: "atomicLoad breakpoint returnAddress frameAddress fieldParentPtr setFloatMode IntType OpaqueType compileError compileLog setCold setRuntimeSafety setEvalBranchQuota offsetOf memcpy inlineCall setGlobalLinkage setGlobalSection divTrunc divFloor enumTagName intToPtr ptrToInt panic canImplicitCast ptrCast bitCast rem mod memset sizeOf alignOf alignCast maxValue minValue memberCount memberName memberType typeOf addWithOverflow subWithOverflow mulWithOverflow shlWithOverflow shlExact shrExact cInclude cDefine cUndef ctz clz import cImport errorName embedFile cmpxchgStrong cmpxchgWeak fence divExact truncate atomicRmw sqrt field typeInfo newStackCall",
literal: "true false null undefined"
},
n = [e, t.CLCM, t.CBCM, s, r];
diff --git a/src-self-hosted/main.zig b/src-self-hosted/main.zig
index c1a6bbe99a..22f49e80d9 100644
--- a/src-self-hosted/main.zig
+++ b/src-self-hosted/main.zig
@@ -637,14 +637,12 @@ const usage_fmt =
\\
\\Options:
\\ --help Print this help and exit
- \\ --keep-backups Retain backup entries for every file
\\
\\
;
const args_fmt_spec = []Flag {
Flag.Bool("--help"),
- Flag.Bool("--keep-backups"),
};
fn cmdFmt(allocator: &Allocator, args: []const []const u8) !void {
@@ -671,34 +669,46 @@ fn cmdFmt(allocator: &Allocator, args: []const []const u8) !void {
};
defer allocator.free(source_code);
- var tokenizer = std.zig.Tokenizer.init(source_code);
- var parser = std.zig.Parser.init(&tokenizer, allocator, file_path);
- defer parser.deinit();
-
- var tree = parser.parse() catch |err| {
+ var tree = std.zig.parse(allocator, source_code) catch |err| {
try stderr.print("error parsing file '{}': {}\n", file_path, err);
continue;
};
defer tree.deinit();
- var original_file_backup = try Buffer.init(allocator, file_path);
- defer original_file_backup.deinit();
- try original_file_backup.append(".backup");
- try os.rename(allocator, file_path, original_file_backup.toSliceConst());
+ var error_it = tree.errors.iterator(0);
+ while (error_it.next()) |parse_error| {
+ const token = tree.tokens.at(parse_error.loc());
+ const loc = tree.tokenLocation(0, parse_error.loc());
+ try stderr.print("{}:{}:{}: error: ", file_path, loc.line + 1, loc.column + 1);
+ try tree.renderError(parse_error, stderr);
+ try stderr.print("\n{}\n", source_code[loc.line_start..loc.line_end]);
+ {
+ var i: usize = 0;
+ while (i < loc.column) : (i += 1) {
+ try stderr.write(" ");
+ }
+ }
+ {
+ const caret_count = token.end - token.start;
+ var i: usize = 0;
+ while (i < caret_count) : (i += 1) {
+ try stderr.write("~");
+ }
+ }
+ try stderr.write("\n");
+ }
+ if (tree.errors.len != 0) {
+ continue;
+ }
try stderr.print("{}\n", file_path);
- // TODO: BufferedAtomicFile has some access problems.
- var out_file = try os.File.openWrite(allocator, file_path);
- defer out_file.close();
+ const baf = try io.BufferedAtomicFile.create(allocator, file_path);
+ defer baf.destroy();
- var out_file_stream = io.FileOutStream.init(&out_file);
- try parser.renderSource(out_file_stream.stream, tree.root_node);
-
- if (!flags.present("keep-backups")) {
- try os.deleteFile(allocator, original_file_backup.toSliceConst());
- }
+ try std.zig.render(allocator, baf.stream(), &tree);
+ try baf.finish();
}
}
diff --git a/src-self-hosted/module.zig b/src-self-hosted/module.zig
index eec30749e2..ccbd683bdc 100644
--- a/src-self-hosted/module.zig
+++ b/src-self-hosted/module.zig
@@ -8,9 +8,7 @@ const c = @import("c.zig");
const builtin = @import("builtin");
const Target = @import("target.zig").Target;
const warn = std.debug.warn;
-const Tokenizer = std.zig.Tokenizer;
const Token = std.zig.Token;
-const Parser = std.zig.Parser;
const ArrayList = std.ArrayList;
pub const Module = struct {
@@ -246,34 +244,17 @@ pub const Module = struct {
warn("{}", source_code);
- warn("====tokenization:====\n");
- {
- var tokenizer = Tokenizer.init(source_code);
- while (true) {
- const token = tokenizer.next();
- tokenizer.dump(token);
- if (token.id == Token.Id.Eof) {
- break;
- }
- }
- }
-
warn("====parse:====\n");
- var tokenizer = Tokenizer.init(source_code);
- var parser = Parser.init(&tokenizer, self.allocator, root_src_real_path);
- defer parser.deinit();
-
- var tree = try parser.parse();
+ var tree = try std.zig.parse(self.allocator, source_code);
defer tree.deinit();
var stderr_file = try std.io.getStdErr();
var stderr_file_out_stream = std.io.FileOutStream.init(&stderr_file);
const out_stream = &stderr_file_out_stream.stream;
- try parser.renderAst(out_stream, tree.root_node);
warn("====fmt:====\n");
- try parser.renderSource(out_stream, tree.root_node);
+ try std.zig.render(self.allocator, out_stream, &tree);
warn("====ir:====\n");
warn("TODO\n\n");
diff --git a/src/all_types.hpp b/src/all_types.hpp
index 6f48c4ed36..9c156fb58b 100644
--- a/src/all_types.hpp
+++ b/src/all_types.hpp
@@ -1345,6 +1345,7 @@ enum BuiltinFnId {
BuiltinFnIdOffsetOf,
BuiltinFnIdInlineCall,
BuiltinFnIdNoInlineCall,
+ BuiltinFnIdNewStackCall,
BuiltinFnIdTypeId,
BuiltinFnIdShlExact,
BuiltinFnIdShrExact,
@@ -1661,8 +1662,13 @@ struct CodeGen {
LLVMValueRef coro_alloc_helper_fn_val;
LLVMValueRef merge_err_ret_traces_fn_val;
LLVMValueRef add_error_return_trace_addr_fn_val;
+ LLVMValueRef stacksave_fn_val;
+ LLVMValueRef stackrestore_fn_val;
+ LLVMValueRef write_register_fn_val;
bool error_during_imports;
+ LLVMValueRef sp_md_node;
+
const char **clang_argv;
size_t clang_argv_len;
ZigList lib_dirs;
@@ -2285,6 +2291,7 @@ struct IrInstructionCall {
bool is_async;
IrInstruction *async_allocator;
+ IrInstruction *new_stack;
};
struct IrInstructionConst {
diff --git a/src/bigint.cpp b/src/bigint.cpp
index 64bc59e5cf..367ae79b6c 100644
--- a/src/bigint.cpp
+++ b/src/bigint.cpp
@@ -1425,7 +1425,7 @@ void bigint_shr(BigInt *dest, const BigInt *op1, const BigInt *op2) {
uint64_t digit = op1_digits[op_digit_index];
size_t dest_digit_index = op_digit_index - digit_shift_count;
dest->data.digits[dest_digit_index] = carry | (digit >> leftover_shift_count);
- carry = digit << leftover_shift_count;
+ carry = digit << (64 - leftover_shift_count);
if (dest_digit_index == 0) { break; }
op_digit_index -= 1;
diff --git a/src/codegen.cpp b/src/codegen.cpp
index 4e58f86d4b..f1e102392a 100644
--- a/src/codegen.cpp
+++ b/src/codegen.cpp
@@ -938,6 +938,53 @@ static LLVMValueRef get_memcpy_fn_val(CodeGen *g) {
return g->memcpy_fn_val;
}
+static LLVMValueRef get_stacksave_fn_val(CodeGen *g) {
+ if (g->stacksave_fn_val)
+ return g->stacksave_fn_val;
+
+ // declare i8* @llvm.stacksave()
+
+ LLVMTypeRef fn_type = LLVMFunctionType(LLVMPointerType(LLVMInt8Type(), 0), nullptr, 0, false);
+ g->stacksave_fn_val = LLVMAddFunction(g->module, "llvm.stacksave", fn_type);
+ assert(LLVMGetIntrinsicID(g->stacksave_fn_val));
+
+ return g->stacksave_fn_val;
+}
+
+static LLVMValueRef get_stackrestore_fn_val(CodeGen *g) {
+ if (g->stackrestore_fn_val)
+ return g->stackrestore_fn_val;
+
+ // declare void @llvm.stackrestore(i8* %ptr)
+
+ LLVMTypeRef param_type = LLVMPointerType(LLVMInt8Type(), 0);
+ LLVMTypeRef fn_type = LLVMFunctionType(LLVMVoidType(), ¶m_type, 1, false);
+ g->stackrestore_fn_val = LLVMAddFunction(g->module, "llvm.stackrestore", fn_type);
+ assert(LLVMGetIntrinsicID(g->stackrestore_fn_val));
+
+ return g->stackrestore_fn_val;
+}
+
+static LLVMValueRef get_write_register_fn_val(CodeGen *g) {
+ if (g->write_register_fn_val)
+ return g->write_register_fn_val;
+
+ // declare void @llvm.write_register.i64(metadata, i64 @value)
+ // !0 = !{!"sp\00"}
+
+ LLVMTypeRef param_types[] = {
+ LLVMMetadataTypeInContext(LLVMGetGlobalContext()),
+ LLVMIntType(g->pointer_size_bytes * 8),
+ };
+
+ LLVMTypeRef fn_type = LLVMFunctionType(LLVMVoidType(), param_types, 2, false);
+ Buf *name = buf_sprintf("llvm.write_register.i%d", g->pointer_size_bytes * 8);
+ g->write_register_fn_val = LLVMAddFunction(g->module, buf_ptr(name), fn_type);
+ assert(LLVMGetIntrinsicID(g->write_register_fn_val));
+
+ return g->write_register_fn_val;
+}
+
static LLVMValueRef get_coro_destroy_fn_val(CodeGen *g) {
if (g->coro_destroy_fn_val)
return g->coro_destroy_fn_val;
@@ -2901,6 +2948,38 @@ static size_t get_async_err_code_arg_index(CodeGen *g, FnTypeId *fn_type_id) {
return 1 + get_async_allocator_arg_index(g, fn_type_id);
}
+
+static LLVMValueRef get_new_stack_addr(CodeGen *g, LLVMValueRef new_stack) {
+ LLVMValueRef ptr_field_ptr = LLVMBuildStructGEP(g->builder, new_stack, (unsigned)slice_ptr_index, "");
+ LLVMValueRef len_field_ptr = LLVMBuildStructGEP(g->builder, new_stack, (unsigned)slice_len_index, "");
+
+ LLVMValueRef ptr_value = gen_load_untyped(g, ptr_field_ptr, 0, false, "");
+ LLVMValueRef len_value = gen_load_untyped(g, len_field_ptr, 0, false, "");
+
+ LLVMValueRef ptr_addr = LLVMBuildPtrToInt(g->builder, ptr_value, LLVMTypeOf(len_value), "");
+ LLVMValueRef end_addr = LLVMBuildNUWAdd(g->builder, ptr_addr, len_value, "");
+ LLVMValueRef align_amt = LLVMConstInt(LLVMTypeOf(end_addr), get_abi_alignment(g, g->builtin_types.entry_usize), false);
+ LLVMValueRef align_adj = LLVMBuildURem(g->builder, end_addr, align_amt, "");
+ return LLVMBuildNUWSub(g->builder, end_addr, align_adj, "");
+}
+
+static void gen_set_stack_pointer(CodeGen *g, LLVMValueRef aligned_end_addr) {
+ LLVMValueRef write_register_fn_val = get_write_register_fn_val(g);
+
+ if (g->sp_md_node == nullptr) {
+ Buf *sp_reg_name = buf_create_from_str(arch_stack_pointer_register_name(&g->zig_target.arch));
+ LLVMValueRef str_node = LLVMMDString(buf_ptr(sp_reg_name), buf_len(sp_reg_name) + 1);
+ g->sp_md_node = LLVMMDNode(&str_node, 1);
+ }
+
+ LLVMValueRef params[] = {
+ g->sp_md_node,
+ aligned_end_addr,
+ };
+
+ LLVMBuildCall(g->builder, write_register_fn_val, params, 2, "");
+}
+
static LLVMValueRef ir_render_call(CodeGen *g, IrExecutable *executable, IrInstructionCall *instruction) {
LLVMValueRef fn_val;
TypeTableEntry *fn_type;
@@ -2967,8 +3046,23 @@ static LLVMValueRef ir_render_call(CodeGen *g, IrExecutable *executable, IrInstr
}
LLVMCallConv llvm_cc = get_llvm_cc(g, fn_type->data.fn.fn_type_id.cc);
- LLVMValueRef result = ZigLLVMBuildCall(g->builder, fn_val,
- gen_param_values, (unsigned)gen_param_index, llvm_cc, fn_inline, "");
+ LLVMValueRef result;
+
+ if (instruction->new_stack == nullptr) {
+ result = ZigLLVMBuildCall(g->builder, fn_val,
+ gen_param_values, (unsigned)gen_param_index, llvm_cc, fn_inline, "");
+ } else {
+ LLVMValueRef stacksave_fn_val = get_stacksave_fn_val(g);
+ LLVMValueRef stackrestore_fn_val = get_stackrestore_fn_val(g);
+
+ LLVMValueRef new_stack_addr = get_new_stack_addr(g, ir_llvm_value(g, instruction->new_stack));
+ LLVMValueRef old_stack_ref = LLVMBuildCall(g->builder, stacksave_fn_val, nullptr, 0, "");
+ gen_set_stack_pointer(g, new_stack_addr);
+ result = ZigLLVMBuildCall(g->builder, fn_val,
+ gen_param_values, (unsigned)gen_param_index, llvm_cc, fn_inline, "");
+ LLVMBuildCall(g->builder, stackrestore_fn_val, &old_stack_ref, 1, "");
+ }
+
for (size_t param_i = 0; param_i < fn_type_id->param_count; param_i += 1) {
FnGenParamInfo *gen_info = &fn_type->data.fn.gen_param_info[param_i];
@@ -6171,6 +6265,7 @@ static void define_builtin_fns(CodeGen *g) {
create_builtin_fn(g, BuiltinFnIdSqrt, "sqrt", 2);
create_builtin_fn(g, BuiltinFnIdInlineCall, "inlineCall", SIZE_MAX);
create_builtin_fn(g, BuiltinFnIdNoInlineCall, "noInlineCall", SIZE_MAX);
+ create_builtin_fn(g, BuiltinFnIdNewStackCall, "newStackCall", SIZE_MAX);
create_builtin_fn(g, BuiltinFnIdTypeId, "typeId", 1);
create_builtin_fn(g, BuiltinFnIdShlExact, "shlExact", 2);
create_builtin_fn(g, BuiltinFnIdShrExact, "shrExact", 2);
diff --git a/src/ir.cpp b/src/ir.cpp
index a069271af7..9691e9e635 100644
--- a/src/ir.cpp
+++ b/src/ir.cpp
@@ -1102,7 +1102,8 @@ static IrInstruction *ir_build_union_field_ptr_from(IrBuilder *irb, IrInstructio
static IrInstruction *ir_build_call(IrBuilder *irb, Scope *scope, AstNode *source_node,
FnTableEntry *fn_entry, IrInstruction *fn_ref, size_t arg_count, IrInstruction **args,
- bool is_comptime, FnInline fn_inline, bool is_async, IrInstruction *async_allocator)
+ bool is_comptime, FnInline fn_inline, bool is_async, IrInstruction *async_allocator,
+ IrInstruction *new_stack)
{
IrInstructionCall *call_instruction = ir_build_instruction(irb, scope, source_node);
call_instruction->fn_entry = fn_entry;
@@ -1113,6 +1114,7 @@ static IrInstruction *ir_build_call(IrBuilder *irb, Scope *scope, AstNode *sourc
call_instruction->arg_count = arg_count;
call_instruction->is_async = is_async;
call_instruction->async_allocator = async_allocator;
+ call_instruction->new_stack = new_stack;
if (fn_ref)
ir_ref_instruction(fn_ref, irb->current_basic_block);
@@ -1120,16 +1122,19 @@ static IrInstruction *ir_build_call(IrBuilder *irb, Scope *scope, AstNode *sourc
ir_ref_instruction(args[i], irb->current_basic_block);
if (async_allocator)
ir_ref_instruction(async_allocator, irb->current_basic_block);
+ if (new_stack != nullptr)
+ ir_ref_instruction(new_stack, irb->current_basic_block);
return &call_instruction->base;
}
static IrInstruction *ir_build_call_from(IrBuilder *irb, IrInstruction *old_instruction,
FnTableEntry *fn_entry, IrInstruction *fn_ref, size_t arg_count, IrInstruction **args,
- bool is_comptime, FnInline fn_inline, bool is_async, IrInstruction *async_allocator)
+ bool is_comptime, FnInline fn_inline, bool is_async, IrInstruction *async_allocator,
+ IrInstruction *new_stack)
{
IrInstruction *new_instruction = ir_build_call(irb, old_instruction->scope,
- old_instruction->source_node, fn_entry, fn_ref, arg_count, args, is_comptime, fn_inline, is_async, async_allocator);
+ old_instruction->source_node, fn_entry, fn_ref, arg_count, args, is_comptime, fn_inline, is_async, async_allocator, new_stack);
ir_link_new_instruction(new_instruction, old_instruction);
return new_instruction;
}
@@ -4303,7 +4308,37 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
}
FnInline fn_inline = (builtin_fn->id == BuiltinFnIdInlineCall) ? FnInlineAlways : FnInlineNever;
- IrInstruction *call = ir_build_call(irb, scope, node, nullptr, fn_ref, arg_count, args, false, fn_inline, false, nullptr);
+ IrInstruction *call = ir_build_call(irb, scope, node, nullptr, fn_ref, arg_count, args, false, fn_inline, false, nullptr, nullptr);
+ return ir_lval_wrap(irb, scope, call, lval);
+ }
+ case BuiltinFnIdNewStackCall:
+ {
+ if (node->data.fn_call_expr.params.length == 0) {
+ add_node_error(irb->codegen, node, buf_sprintf("expected at least 1 argument, found 0"));
+ return irb->codegen->invalid_instruction;
+ }
+
+ AstNode *new_stack_node = node->data.fn_call_expr.params.at(0);
+ IrInstruction *new_stack = ir_gen_node(irb, new_stack_node, scope);
+ if (new_stack == irb->codegen->invalid_instruction)
+ return new_stack;
+
+ AstNode *fn_ref_node = node->data.fn_call_expr.params.at(1);
+ IrInstruction *fn_ref = ir_gen_node(irb, fn_ref_node, scope);
+ if (fn_ref == irb->codegen->invalid_instruction)
+ return fn_ref;
+
+ size_t arg_count = node->data.fn_call_expr.params.length - 2;
+
+ IrInstruction **args = allocate(arg_count);
+ for (size_t i = 0; i < arg_count; i += 1) {
+ AstNode *arg_node = node->data.fn_call_expr.params.at(i + 2);
+ args[i] = ir_gen_node(irb, arg_node, scope);
+ if (args[i] == irb->codegen->invalid_instruction)
+ return args[i];
+ }
+
+ IrInstruction *call = ir_build_call(irb, scope, node, nullptr, fn_ref, arg_count, args, false, FnInlineAuto, false, nullptr, new_stack);
return ir_lval_wrap(irb, scope, call, lval);
}
case BuiltinFnIdTypeId:
@@ -4513,7 +4548,7 @@ static IrInstruction *ir_gen_fn_call(IrBuilder *irb, Scope *scope, AstNode *node
}
}
- IrInstruction *fn_call = ir_build_call(irb, scope, node, nullptr, fn_ref, arg_count, args, false, FnInlineAuto, is_async, async_allocator);
+ IrInstruction *fn_call = ir_build_call(irb, scope, node, nullptr, fn_ref, arg_count, args, false, FnInlineAuto, is_async, async_allocator, nullptr);
return ir_lval_wrap(irb, scope, fn_call, lval);
}
@@ -6831,7 +6866,7 @@ bool ir_gen(CodeGen *codegen, AstNode *node, Scope *scope, IrExecutable *ir_exec
IrInstruction **args = allocate(arg_count);
args[0] = implicit_allocator_ptr; // self
args[1] = mem_slice; // old_mem
- ir_build_call(irb, scope, node, nullptr, free_fn, arg_count, args, false, FnInlineAuto, false, nullptr);
+ ir_build_call(irb, scope, node, nullptr, free_fn, arg_count, args, false, FnInlineAuto, false, nullptr, nullptr);
IrBasicBlock *resume_block = ir_create_basic_block(irb, scope, "Resume");
ir_build_cond_br(irb, scope, node, resume_awaiter, resume_block, irb->exec->coro_suspend_block, const_bool_false);
@@ -8692,6 +8727,10 @@ static void copy_const_val(ConstExprValue *dest, ConstExprValue *src, bool same_
*dest = *src;
if (!same_global_refs) {
dest->global_refs = global_refs;
+ if (dest->type->id == TypeTableEntryIdStruct) {
+ dest->data.x_struct.fields = allocate_nonzero(dest->type->data.structure.src_field_count);
+ memcpy(dest->data.x_struct.fields, src->data.x_struct.fields, sizeof(ConstExprValue) * dest->type->data.structure.src_field_count);
+ }
}
}
@@ -11676,7 +11715,8 @@ static TypeTableEntry *ir_analyze_instruction_decl_var(IrAnalyze *ira, IrInstruc
if (var->mem_slot_index != SIZE_MAX) {
assert(var->mem_slot_index < ira->exec_context.mem_slot_count);
ConstExprValue *mem_slot = &ira->exec_context.mem_slot_list[var->mem_slot_index];
- *mem_slot = casted_init_value->value;
+ copy_const_val(mem_slot, &casted_init_value->value,
+ !is_comptime_var || var->gen_is_const);
if (is_comptime_var || (var_class_requires_const && var->gen_is_const)) {
ir_build_const_from(ira, &decl_var_instruction->base);
@@ -11993,7 +12033,7 @@ static IrInstruction *ir_analyze_async_call(IrAnalyze *ira, IrInstructionCall *c
TypeTableEntry *async_return_type = get_error_union_type(ira->codegen, alloc_fn_error_set_type, promise_type);
IrInstruction *result = ir_build_call(&ira->new_irb, call_instruction->base.scope, call_instruction->base.source_node,
- fn_entry, fn_ref, arg_count, casted_args, false, FnInlineAuto, true, async_allocator_inst);
+ fn_entry, fn_ref, arg_count, casted_args, false, FnInlineAuto, true, async_allocator_inst, nullptr);
result->value.type = async_return_type;
return result;
}
@@ -12363,6 +12403,19 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
return ir_finish_anal(ira, return_type);
}
+ IrInstruction *casted_new_stack = nullptr;
+ if (call_instruction->new_stack != nullptr) {
+ TypeTableEntry *u8_ptr = get_pointer_to_type(ira->codegen, ira->codegen->builtin_types.entry_u8, false);
+ TypeTableEntry *u8_slice = get_slice_type(ira->codegen, u8_ptr);
+ IrInstruction *new_stack = call_instruction->new_stack->other;
+ if (type_is_invalid(new_stack->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ casted_new_stack = ir_implicit_cast(ira, new_stack, u8_slice);
+ if (type_is_invalid(casted_new_stack->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
if (fn_type->data.fn.is_generic) {
if (!fn_entry) {
ir_add_error(ira, call_instruction->fn_ref,
@@ -12589,7 +12642,7 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
assert(async_allocator_inst == nullptr);
IrInstruction *new_call_instruction = ir_build_call_from(&ira->new_irb, &call_instruction->base,
impl_fn, nullptr, impl_param_count, casted_args, false, fn_inline,
- call_instruction->is_async, nullptr);
+ call_instruction->is_async, nullptr, casted_new_stack);
ir_add_alloca(ira, new_call_instruction, return_type);
@@ -12680,7 +12733,7 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
IrInstruction *new_call_instruction = ir_build_call_from(&ira->new_irb, &call_instruction->base,
- fn_entry, fn_ref, call_param_count, casted_args, false, fn_inline, false, nullptr);
+ fn_entry, fn_ref, call_param_count, casted_args, false, fn_inline, false, nullptr, casted_new_stack);
ir_add_alloca(ira, new_call_instruction, return_type);
return ir_finish_anal(ira, return_type);
@@ -14715,7 +14768,7 @@ static IrInstruction *ir_analyze_union_tag(IrAnalyze *ira, IrInstruction *source
}
if (value->value.type->id != TypeTableEntryIdUnion) {
- ir_add_error(ira, source_instr,
+ ir_add_error(ira, value,
buf_sprintf("expected enum or union type, found '%s'", buf_ptr(&value->value.type->name)));
return ira->codegen->invalid_instruction;
}
@@ -18036,7 +18089,11 @@ static TypeTableEntry *ir_analyze_instruction_check_switch_prongs(IrAnalyze *ira
if (type_is_invalid(end_value->value.type))
return ira->codegen->builtin_types.entry_invalid;
- assert(start_value->value.type->id == TypeTableEntryIdEnum);
+ if (start_value->value.type->id != TypeTableEntryIdEnum) {
+ ir_add_error(ira, range->start, buf_sprintf("not an enum type"));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
BigInt start_index;
bigint_init_bigint(&start_index, &start_value->value.data.x_enum_tag);
diff --git a/src/target.cpp b/src/target.cpp
index 5008b51a09..57970888fc 100644
--- a/src/target.cpp
+++ b/src/target.cpp
@@ -896,3 +896,65 @@ bool target_can_exec(const ZigTarget *host_target, const ZigTarget *guest_target
return false;
}
+
+const char *arch_stack_pointer_register_name(const ArchType *arch) {
+ switch (arch->arch) {
+ case ZigLLVM_UnknownArch:
+ zig_unreachable();
+ case ZigLLVM_x86:
+ return "sp";
+ case ZigLLVM_x86_64:
+ return "rsp";
+
+ case ZigLLVM_aarch64:
+ case ZigLLVM_arm:
+ case ZigLLVM_thumb:
+ case ZigLLVM_aarch64_be:
+ case ZigLLVM_amdgcn:
+ case ZigLLVM_amdil:
+ case ZigLLVM_amdil64:
+ case ZigLLVM_armeb:
+ case ZigLLVM_arc:
+ case ZigLLVM_avr:
+ case ZigLLVM_bpfeb:
+ case ZigLLVM_bpfel:
+ case ZigLLVM_hexagon:
+ case ZigLLVM_lanai:
+ case ZigLLVM_hsail:
+ case ZigLLVM_hsail64:
+ case ZigLLVM_kalimba:
+ case ZigLLVM_le32:
+ case ZigLLVM_le64:
+ case ZigLLVM_mips:
+ case ZigLLVM_mips64:
+ case ZigLLVM_mips64el:
+ case ZigLLVM_mipsel:
+ case ZigLLVM_msp430:
+ case ZigLLVM_nios2:
+ case ZigLLVM_nvptx:
+ case ZigLLVM_nvptx64:
+ case ZigLLVM_ppc64le:
+ case ZigLLVM_r600:
+ case ZigLLVM_renderscript32:
+ case ZigLLVM_renderscript64:
+ case ZigLLVM_riscv32:
+ case ZigLLVM_riscv64:
+ case ZigLLVM_shave:
+ case ZigLLVM_sparc:
+ case ZigLLVM_sparcel:
+ case ZigLLVM_sparcv9:
+ case ZigLLVM_spir:
+ case ZigLLVM_spir64:
+ case ZigLLVM_systemz:
+ case ZigLLVM_tce:
+ case ZigLLVM_tcele:
+ case ZigLLVM_thumbeb:
+ case ZigLLVM_wasm32:
+ case ZigLLVM_wasm64:
+ case ZigLLVM_xcore:
+ case ZigLLVM_ppc:
+ case ZigLLVM_ppc64:
+ zig_panic("TODO populate this table with stack pointer register name for this CPU architecture");
+ }
+ zig_unreachable();
+}
diff --git a/src/target.hpp b/src/target.hpp
index 614b0627d5..5a118f6d8d 100644
--- a/src/target.hpp
+++ b/src/target.hpp
@@ -77,6 +77,8 @@ size_t target_arch_count(void);
const ArchType *get_target_arch(size_t index);
void get_arch_name(char *out_str, const ArchType *arch);
+const char *arch_stack_pointer_register_name(const ArchType *arch);
+
size_t target_vendor_count(void);
ZigLLVM_VendorType get_target_vendor(size_t index);
diff --git a/std/buffer.zig b/std/buffer.zig
index cf530c3c9e..90d63719e3 100644
--- a/std/buffer.zig
+++ b/std/buffer.zig
@@ -94,26 +94,10 @@ pub const Buffer = struct {
mem.copy(u8, self.list.toSlice()[old_len..], m);
}
- // TODO: remove, use OutStream for this
- pub fn appendFormat(self: &Buffer, comptime format: []const u8, args: ...) !void {
- return fmt.format(self, append, format, args);
- }
-
- // TODO: remove, use OutStream for this
pub fn appendByte(self: &Buffer, byte: u8) !void {
- return self.appendByteNTimes(byte, 1);
- }
-
- // TODO: remove, use OutStream for this
- pub fn appendByteNTimes(self: &Buffer, byte: u8, count: usize) !void {
- var prev_size: usize = self.len();
- const new_size = prev_size + count;
- try self.resize(new_size);
-
- var i: usize = prev_size;
- while (i < new_size) : (i += 1) {
- self.list.items[i] = byte;
- }
+ const old_len = self.len();
+ try self.resize(old_len + 1);
+ self.list.toSlice()[old_len] = byte;
}
pub fn eql(self: &const Buffer, m: []const u8) bool {
@@ -149,7 +133,7 @@ test "simple Buffer" {
var buf = try Buffer.init(debug.global_allocator, "");
assert(buf.len() == 0);
try buf.append("hello");
- try buf.appendByte(' ');
+ try buf.append(" ");
try buf.append("world");
assert(buf.eql("hello world"));
assert(mem.eql(u8, cstr.toSliceConst(buf.toSliceConst().ptr), buf.toSliceConst()));
diff --git a/std/io_test.zig b/std/io_test.zig
index 89959b7b54..5f53556785 100644
--- a/std/io_test.zig
+++ b/std/io_test.zig
@@ -1,6 +1,5 @@
const std = @import("index.zig");
const io = std.io;
-const allocator = std.debug.global_allocator;
const DefaultPrng = std.rand.DefaultPrng;
const assert = std.debug.assert;
const mem = std.mem;
@@ -8,6 +7,9 @@ const os = std.os;
const builtin = @import("builtin");
test "write a file, read it, then delete it" {
+ var raw_bytes: [200 * 1024]u8 = undefined;
+ var allocator = &std.heap.FixedBufferAllocator.init(raw_bytes[0..]).allocator;
+
var data: [1024]u8 = undefined;
var prng = DefaultPrng.init(1234);
prng.random.bytes(data[0..]);
@@ -44,3 +46,17 @@ test "write a file, read it, then delete it" {
}
try os.deleteFile(allocator, tmp_file_name);
}
+
+test "BufferOutStream" {
+ var bytes: [100]u8 = undefined;
+ var allocator = &std.heap.FixedBufferAllocator.init(bytes[0..]).allocator;
+
+ var buffer = try std.Buffer.initSize(allocator, 0);
+ var buf_stream = &std.io.BufferOutStream.init(&buffer).stream;
+
+ const x: i32 = 42;
+ const y: i32 = 1234;
+ try buf_stream.print("x: {}\ny: {}\n", x, y);
+
+ assert(mem.eql(u8, buffer.toSlice(), "x: 42\ny: 1234\n"));
+}
diff --git a/std/os/child_process.zig b/std/os/child_process.zig
index 6c0c21f64a..dc3a582707 100644
--- a/std/os/child_process.zig
+++ b/std/os/child_process.zig
@@ -661,6 +661,8 @@ fn windowsCreateCommandLine(allocator: &mem.Allocator, argv: []const []const u8)
var buf = try Buffer.initSize(allocator, 0);
defer buf.deinit();
+ var buf_stream = &io.BufferOutStream.init(&buf).stream;
+
for (argv) |arg, arg_i| {
if (arg_i != 0) try buf.appendByte(' ');
if (mem.indexOfAny(u8, arg, " \t\n\"") == null) {
@@ -673,18 +675,18 @@ fn windowsCreateCommandLine(allocator: &mem.Allocator, argv: []const []const u8)
switch (byte) {
'\\' => backslash_count += 1,
'"' => {
- try buf.appendByteNTimes('\\', backslash_count * 2 + 1);
+ try buf_stream.writeByteNTimes('\\', backslash_count * 2 + 1);
try buf.appendByte('"');
backslash_count = 0;
},
else => {
- try buf.appendByteNTimes('\\', backslash_count);
+ try buf_stream.writeByteNTimes('\\', backslash_count);
try buf.appendByte(byte);
backslash_count = 0;
},
}
}
- try buf.appendByteNTimes('\\', backslash_count * 2);
+ try buf_stream.writeByteNTimes('\\', backslash_count * 2);
try buf.appendByte('"');
}
diff --git a/std/segmented_list.zig b/std/segmented_list.zig
index e9abc7adf9..280c6a7f9b 100644
--- a/std/segmented_list.zig
+++ b/std/segmented_list.zig
@@ -91,6 +91,8 @@ pub fn SegmentedList(comptime T: type, comptime prealloc_item_count: usize) type
allocator: &Allocator,
len: usize,
+ pub const prealloc_count = prealloc_item_count;
+
/// Deinitialize with `deinit`
pub fn init(allocator: &Allocator) Self {
return Self{
@@ -283,6 +285,15 @@ pub fn SegmentedList(comptime T: type, comptime prealloc_item_count: usize) type
return &it.list.dynamic_segments[it.shelf_index][it.box_index];
}
+
+ pub fn peek(it: &Iterator) ?&T {
+ if (it.index >= it.list.len)
+ return null;
+ if (it.index < prealloc_item_count)
+ return &it.list.prealloc_segment[it.index];
+
+ return &it.list.dynamic_segments[it.shelf_index][it.box_index];
+ }
};
pub fn iterator(self: &Self, start_index: usize) Iterator {
diff --git a/std/zig/ast.zig b/std/zig/ast.zig
index d1d7fe7914..596b57bdb6 100644
--- a/std/zig/ast.zig
+++ b/std/zig/ast.zig
@@ -1,12 +1,232 @@
const std = @import("../index.zig");
const assert = std.debug.assert;
-const ArrayList = std.ArrayList;
-const Token = std.zig.Token;
+const SegmentedList = std.SegmentedList;
const mem = std.mem;
+const Token = std.zig.Token;
+
+pub const TokenIndex = usize;
+
+pub const Tree = struct {
+ source: []const u8,
+ tokens: TokenList,
+ root_node: &Node.Root,
+ arena_allocator: std.heap.ArenaAllocator,
+ errors: ErrorList,
+
+ pub const TokenList = SegmentedList(Token, 64);
+ pub const ErrorList = SegmentedList(Error, 0);
+
+ pub fn deinit(self: &Tree) void {
+ self.arena_allocator.deinit();
+ }
+
+ pub fn renderError(self: &Tree, parse_error: &Error, stream: var) !void {
+ return parse_error.render(&self.tokens, stream);
+ }
+
+ pub fn tokenSlice(self: &Tree, token_index: TokenIndex) []const u8 {
+ return self.tokenSlicePtr(self.tokens.at(token_index));
+ }
+
+ pub fn tokenSlicePtr(self: &Tree, token: &const Token) []const u8 {
+ return self.source[token.start..token.end];
+ }
+
+ pub const Location = struct {
+ line: usize,
+ column: usize,
+ line_start: usize,
+ line_end: usize,
+ };
+
+ pub fn tokenLocationPtr(self: &Tree, start_index: usize, token: &const Token) Location {
+ var loc = Location {
+ .line = 0,
+ .column = 0,
+ .line_start = start_index,
+ .line_end = self.source.len,
+ };
+ const token_start = token.start;
+ for (self.source[start_index..]) |c, i| {
+ if (i + start_index == token_start) {
+ loc.line_end = i + start_index;
+ while (loc.line_end < self.source.len and self.source[loc.line_end] != '\n') : (loc.line_end += 1) {}
+ return loc;
+ }
+ if (c == '\n') {
+ loc.line += 1;
+ loc.column = 0;
+ loc.line_start = i + 1;
+ } else {
+ loc.column += 1;
+ }
+ }
+ return loc;
+ }
+
+ pub fn tokenLocation(self: &Tree, start_index: usize, token_index: TokenIndex) Location {
+ return self.tokenLocationPtr(start_index, self.tokens.at(token_index));
+ }
+
+ pub fn dump(self: &Tree) void {
+ self.root_node.base.dump(0);
+ }
+
+};
+
+pub const Error = union(enum) {
+ InvalidToken: InvalidToken,
+ ExpectedVarDeclOrFn: ExpectedVarDeclOrFn,
+ ExpectedAggregateKw: ExpectedAggregateKw,
+ UnattachedDocComment: UnattachedDocComment,
+ ExpectedEqOrSemi: ExpectedEqOrSemi,
+ ExpectedSemiOrLBrace: ExpectedSemiOrLBrace,
+ ExpectedLabelable: ExpectedLabelable,
+ ExpectedInlinable: ExpectedInlinable,
+ ExpectedAsmOutputReturnOrType: ExpectedAsmOutputReturnOrType,
+ ExpectedCall: ExpectedCall,
+ ExpectedCallOrFnProto: ExpectedCallOrFnProto,
+ ExpectedSliceOrRBracket: ExpectedSliceOrRBracket,
+ ExtraAlignQualifier: ExtraAlignQualifier,
+ ExtraConstQualifier: ExtraConstQualifier,
+ ExtraVolatileQualifier: ExtraVolatileQualifier,
+ ExpectedPrimaryExpr: ExpectedPrimaryExpr,
+ ExpectedToken: ExpectedToken,
+ ExpectedCommaOrEnd: ExpectedCommaOrEnd,
+
+ pub fn render(self: &Error, tokens: &Tree.TokenList, stream: var) !void {
+ switch (*self) {
+ // TODO https://github.com/zig-lang/zig/issues/683
+ @TagType(Error).InvalidToken => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedVarDeclOrFn => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedAggregateKw => |*x| return x.render(tokens, stream),
+ @TagType(Error).UnattachedDocComment => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedEqOrSemi => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedSemiOrLBrace => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedLabelable => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedInlinable => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedAsmOutputReturnOrType => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedCall => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedCallOrFnProto => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedSliceOrRBracket => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExtraAlignQualifier => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExtraConstQualifier => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExtraVolatileQualifier => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedPrimaryExpr => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedToken => |*x| return x.render(tokens, stream),
+ @TagType(Error).ExpectedCommaOrEnd => |*x| return x.render(tokens, stream),
+ }
+ }
+
+ pub fn loc(self: &Error) TokenIndex {
+ switch (*self) {
+ // TODO https://github.com/zig-lang/zig/issues/683
+ @TagType(Error).InvalidToken => |x| return x.token,
+ @TagType(Error).ExpectedVarDeclOrFn => |x| return x.token,
+ @TagType(Error).ExpectedAggregateKw => |x| return x.token,
+ @TagType(Error).UnattachedDocComment => |x| return x.token,
+ @TagType(Error).ExpectedEqOrSemi => |x| return x.token,
+ @TagType(Error).ExpectedSemiOrLBrace => |x| return x.token,
+ @TagType(Error).ExpectedLabelable => |x| return x.token,
+ @TagType(Error).ExpectedInlinable => |x| return x.token,
+ @TagType(Error).ExpectedAsmOutputReturnOrType => |x| return x.token,
+ @TagType(Error).ExpectedCall => |x| return x.node.firstToken(),
+ @TagType(Error).ExpectedCallOrFnProto => |x| return x.node.firstToken(),
+ @TagType(Error).ExpectedSliceOrRBracket => |x| return x.token,
+ @TagType(Error).ExtraAlignQualifier => |x| return x.token,
+ @TagType(Error).ExtraConstQualifier => |x| return x.token,
+ @TagType(Error).ExtraVolatileQualifier => |x| return x.token,
+ @TagType(Error).ExpectedPrimaryExpr => |x| return x.token,
+ @TagType(Error).ExpectedToken => |x| return x.token,
+ @TagType(Error).ExpectedCommaOrEnd => |x| return x.token,
+ }
+ }
+
+ pub const InvalidToken = SingleTokenError("Invalid token {}");
+ pub const ExpectedVarDeclOrFn = SingleTokenError("Expected variable declaration or function, found {}");
+ pub const ExpectedAggregateKw = SingleTokenError("Expected " ++
+ @tagName(Token.Id.Keyword_struct) ++ ", " ++ @tagName(Token.Id.Keyword_union) ++ ", or " ++
+ @tagName(Token.Id.Keyword_enum) ++ ", found {}");
+ pub const ExpectedEqOrSemi = SingleTokenError("Expected '=' or ';', found {}");
+ pub const ExpectedSemiOrLBrace = SingleTokenError("Expected ';' or '{{', found {}");
+ pub const ExpectedLabelable = SingleTokenError("Expected 'while', 'for', 'inline', 'suspend', or '{{', found {}");
+ pub const ExpectedInlinable = SingleTokenError("Expected 'while' or 'for', found {}");
+ pub const ExpectedAsmOutputReturnOrType = SingleTokenError("Expected '->' or " ++
+ @tagName(Token.Id.Identifier) ++ ", found {}");
+ pub const ExpectedSliceOrRBracket = SingleTokenError("Expected ']' or '..', found {}");
+ pub const ExpectedPrimaryExpr = SingleTokenError("Expected primary expression, found {}");
+
+ pub const UnattachedDocComment = SimpleError("Unattached documentation comment");
+ pub const ExtraAlignQualifier = SimpleError("Extra align qualifier");
+ pub const ExtraConstQualifier = SimpleError("Extra const qualifier");
+ pub const ExtraVolatileQualifier = SimpleError("Extra volatile qualifier");
+
+ pub const ExpectedCall = struct {
+ node: &Node,
+
+ pub fn render(self: &ExpectedCall, tokens: &Tree.TokenList, stream: var) !void {
+ return stream.print("expected " ++ @tagName(@TagType(Node.SuffixOp.Op).Call) ++ ", found {}",
+ @tagName(self.node.id));
+ }
+ };
+
+ pub const ExpectedCallOrFnProto = struct {
+ node: &Node,
+
+ pub fn render(self: &ExpectedCallOrFnProto, tokens: &Tree.TokenList, stream: var) !void {
+ return stream.print("expected " ++ @tagName(@TagType(Node.SuffixOp.Op).Call) ++ " or " ++
+ @tagName(Node.Id.FnProto) ++ ", found {}", @tagName(self.node.id));
+ }
+ };
+
+ pub const ExpectedToken = struct {
+ token: TokenIndex,
+ expected_id: @TagType(Token.Id),
+
+ pub fn render(self: &ExpectedToken, tokens: &Tree.TokenList, stream: var) !void {
+ const token_name = @tagName(tokens.at(self.token).id);
+ return stream.print("expected {}, found {}", @tagName(self.expected_id), token_name);
+ }
+ };
+
+ pub const ExpectedCommaOrEnd = struct {
+ token: TokenIndex,
+ end_id: @TagType(Token.Id),
+
+ pub fn render(self: &ExpectedCommaOrEnd, tokens: &Tree.TokenList, stream: var) !void {
+ const token_name = @tagName(tokens.at(self.token).id);
+ return stream.print("expected ',' or {}, found {}", @tagName(self.end_id), token_name);
+ }
+ };
+
+ fn SingleTokenError(comptime msg: []const u8) type {
+ return struct {
+ const ThisError = this;
+
+ token: TokenIndex,
+
+ pub fn render(self: &ThisError, tokens: &Tree.TokenList, stream: var) !void {
+ const token_name = @tagName(tokens.at(self.token).id);
+ return stream.print(msg, token_name);
+ }
+ };
+ }
+
+ fn SimpleError(comptime msg: []const u8) type {
+ return struct {
+ const ThisError = this;
+
+ token: TokenIndex,
+
+ pub fn render(self: &ThisError, tokens: &Tree.TokenList, stream: var) !void {
+ return stream.write(msg);
+ }
+ };
+ }
+};
pub const Node = struct {
id: Id,
- same_line_comment: ?&Token,
pub const Id = enum {
// Top level
@@ -95,7 +315,7 @@ pub const Node = struct {
unreachable;
}
- pub fn firstToken(base: &Node) Token {
+ pub fn firstToken(base: &Node) TokenIndex {
comptime var i = 0;
inline while (i < @memberCount(Id)) : (i += 1) {
if (base.id == @field(Id, @memberName(Id, i))) {
@@ -106,7 +326,7 @@ pub const Node = struct {
unreachable;
}
- pub fn lastToken(base: &Node) Token {
+ pub fn lastToken(base: &Node) TokenIndex {
comptime var i = 0;
inline while (i < @memberCount(Id)) : (i += 1) {
if (base.id == @field(Id, @memberName(Id, i))) {
@@ -127,42 +347,132 @@ pub const Node = struct {
unreachable;
}
+ pub fn requireSemiColon(base: &const Node) bool {
+ var n = base;
+ while (true) {
+ switch (n.id) {
+ Id.Root,
+ Id.StructField,
+ Id.UnionTag,
+ Id.EnumTag,
+ Id.ParamDecl,
+ Id.Block,
+ Id.Payload,
+ Id.PointerPayload,
+ Id.PointerIndexPayload,
+ Id.Switch,
+ Id.SwitchCase,
+ Id.SwitchElse,
+ Id.FieldInitializer,
+ Id.DocComment,
+ Id.LineComment,
+ Id.TestDecl => return false,
+ Id.While => {
+ const while_node = @fieldParentPtr(While, "base", n);
+ if (while_node.@"else") |@"else"| {
+ n = @"else".base;
+ continue;
+ }
+
+ return while_node.body.id != Id.Block;
+ },
+ Id.For => {
+ const for_node = @fieldParentPtr(For, "base", n);
+ if (for_node.@"else") |@"else"| {
+ n = @"else".base;
+ continue;
+ }
+
+ return for_node.body.id != Id.Block;
+ },
+ Id.If => {
+ const if_node = @fieldParentPtr(If, "base", n);
+ if (if_node.@"else") |@"else"| {
+ n = @"else".base;
+ continue;
+ }
+
+ return if_node.body.id != Id.Block;
+ },
+ Id.Else => {
+ const else_node = @fieldParentPtr(Else, "base", n);
+ n = else_node.body;
+ continue;
+ },
+ Id.Defer => {
+ const defer_node = @fieldParentPtr(Defer, "base", n);
+ return defer_node.expr.id != Id.Block;
+ },
+ Id.Comptime => {
+ const comptime_node = @fieldParentPtr(Comptime, "base", n);
+ return comptime_node.expr.id != Id.Block;
+ },
+ Id.Suspend => {
+ const suspend_node = @fieldParentPtr(Suspend, "base", n);
+ if (suspend_node.body) |body| {
+ return body.id != Id.Block;
+ }
+
+ return true;
+ },
+ else => return true,
+ }
+ }
+ }
+
+ pub fn dump(self: &Node, indent: usize) void {
+ {
+ var i: usize = 0;
+ while (i < indent) : (i += 1) {
+ std.debug.warn(" ");
+ }
+ }
+ std.debug.warn("{}\n", @tagName(self.id));
+
+ var child_i: usize = 0;
+ while (self.iterate(child_i)) |child| : (child_i += 1) {
+ child.dump(indent + 2);
+ }
+ }
+
pub const Root = struct {
base: Node,
doc_comments: ?&DocComment,
- decls: ArrayList(&Node),
- eof_token: Token,
+ decls: DeclList,
+ eof_token: TokenIndex,
+
+ pub const DeclList = SegmentedList(&Node, 4);
pub fn iterate(self: &Root, index: usize) ?&Node {
if (index < self.decls.len) {
- return self.decls.items[self.decls.len - index - 1];
+ return *self.decls.at(index);
}
return null;
}
- pub fn firstToken(self: &Root) Token {
- return if (self.decls.len == 0) self.eof_token else self.decls.at(0).firstToken();
+ pub fn firstToken(self: &Root) TokenIndex {
+ return if (self.decls.len == 0) self.eof_token else (*self.decls.at(0)).firstToken();
}
- pub fn lastToken(self: &Root) Token {
- return if (self.decls.len == 0) self.eof_token else self.decls.at(self.decls.len - 1).lastToken();
+ pub fn lastToken(self: &Root) TokenIndex {
+ return if (self.decls.len == 0) self.eof_token else (*self.decls.at(self.decls.len - 1)).lastToken();
}
};
pub const VarDecl = struct {
base: Node,
doc_comments: ?&DocComment,
- visib_token: ?Token,
- name_token: Token,
- eq_token: Token,
- mut_token: Token,
- comptime_token: ?Token,
- extern_export_token: ?Token,
+ visib_token: ?TokenIndex,
+ name_token: TokenIndex,
+ eq_token: TokenIndex,
+ mut_token: TokenIndex,
+ comptime_token: ?TokenIndex,
+ extern_export_token: ?TokenIndex,
lib_name: ?&Node,
type_node: ?&Node,
align_node: ?&Node,
init_node: ?&Node,
- semicolon_token: Token,
+ semicolon_token: TokenIndex,
pub fn iterate(self: &VarDecl, index: usize) ?&Node {
var i = index;
@@ -185,7 +495,7 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &VarDecl) Token {
+ pub fn firstToken(self: &VarDecl) TokenIndex {
if (self.visib_token) |visib_token| return visib_token;
if (self.comptime_token) |comptime_token| return comptime_token;
if (self.extern_export_token) |extern_export_token| return extern_export_token;
@@ -193,7 +503,7 @@ pub const Node = struct {
return self.mut_token;
}
- pub fn lastToken(self: &VarDecl) Token {
+ pub fn lastToken(self: &VarDecl) TokenIndex {
return self.semicolon_token;
}
};
@@ -201,9 +511,9 @@ pub const Node = struct {
pub const Use = struct {
base: Node,
doc_comments: ?&DocComment,
- visib_token: ?Token,
+ visib_token: ?TokenIndex,
expr: &Node,
- semicolon_token: Token,
+ semicolon_token: TokenIndex,
pub fn iterate(self: &Use, index: usize) ?&Node {
var i = index;
@@ -214,48 +524,52 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &Use) Token {
+ pub fn firstToken(self: &Use) TokenIndex {
if (self.visib_token) |visib_token| return visib_token;
return self.expr.firstToken();
}
- pub fn lastToken(self: &Use) Token {
+ pub fn lastToken(self: &Use) TokenIndex {
return self.semicolon_token;
}
};
pub const ErrorSetDecl = struct {
base: Node,
- error_token: Token,
- decls: ArrayList(&Node),
- rbrace_token: Token,
+ error_token: TokenIndex,
+ decls: DeclList,
+ rbrace_token: TokenIndex,
+
+ pub const DeclList = SegmentedList(&Node, 2);
pub fn iterate(self: &ErrorSetDecl, index: usize) ?&Node {
var i = index;
- if (i < self.decls.len) return self.decls.at(i);
+ if (i < self.decls.len) return *self.decls.at(i);
i -= self.decls.len;
return null;
}
- pub fn firstToken(self: &ErrorSetDecl) Token {
+ pub fn firstToken(self: &ErrorSetDecl) TokenIndex {
return self.error_token;
}
- pub fn lastToken(self: &ErrorSetDecl) Token {
+ pub fn lastToken(self: &ErrorSetDecl) TokenIndex {
return self.rbrace_token;
}
};
pub const ContainerDecl = struct {
base: Node,
- ltoken: Token,
+ ltoken: TokenIndex,
layout: Layout,
kind: Kind,
init_arg_expr: InitArg,
- fields_and_decls: ArrayList(&Node),
- rbrace_token: Token,
+ fields_and_decls: DeclList,
+ rbrace_token: TokenIndex,
+
+ pub const DeclList = Root.DeclList;
const Layout = enum {
Auto,
@@ -287,17 +601,17 @@ pub const Node = struct {
InitArg.Enum => { }
}
- if (i < self.fields_and_decls.len) return self.fields_and_decls.at(i);
+ if (i < self.fields_and_decls.len) return *self.fields_and_decls.at(i);
i -= self.fields_and_decls.len;
return null;
}
- pub fn firstToken(self: &ContainerDecl) Token {
+ pub fn firstToken(self: &ContainerDecl) TokenIndex {
return self.ltoken;
}
- pub fn lastToken(self: &ContainerDecl) Token {
+ pub fn lastToken(self: &ContainerDecl) TokenIndex {
return self.rbrace_token;
}
};
@@ -305,8 +619,8 @@ pub const Node = struct {
pub const StructField = struct {
base: Node,
doc_comments: ?&DocComment,
- visib_token: ?Token,
- name_token: Token,
+ visib_token: ?TokenIndex,
+ name_token: TokenIndex,
type_expr: &Node,
pub fn iterate(self: &StructField, index: usize) ?&Node {
@@ -318,12 +632,12 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &StructField) Token {
+ pub fn firstToken(self: &StructField) TokenIndex {
if (self.visib_token) |visib_token| return visib_token;
return self.name_token;
}
- pub fn lastToken(self: &StructField) Token {
+ pub fn lastToken(self: &StructField) TokenIndex {
return self.type_expr.lastToken();
}
};
@@ -331,7 +645,7 @@ pub const Node = struct {
pub const UnionTag = struct {
base: Node,
doc_comments: ?&DocComment,
- name_token: Token,
+ name_token: TokenIndex,
type_expr: ?&Node,
value_expr: ?&Node,
@@ -351,11 +665,11 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &UnionTag) Token {
+ pub fn firstToken(self: &UnionTag) TokenIndex {
return self.name_token;
}
- pub fn lastToken(self: &UnionTag) Token {
+ pub fn lastToken(self: &UnionTag) TokenIndex {
if (self.value_expr) |value_expr| {
return value_expr.lastToken();
}
@@ -370,7 +684,7 @@ pub const Node = struct {
pub const EnumTag = struct {
base: Node,
doc_comments: ?&DocComment,
- name_token: Token,
+ name_token: TokenIndex,
value: ?&Node,
pub fn iterate(self: &EnumTag, index: usize) ?&Node {
@@ -384,11 +698,11 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &EnumTag) Token {
+ pub fn firstToken(self: &EnumTag) TokenIndex {
return self.name_token;
}
- pub fn lastToken(self: &EnumTag) Token {
+ pub fn lastToken(self: &EnumTag) TokenIndex {
if (self.value) |value| {
return value.lastToken();
}
@@ -400,7 +714,7 @@ pub const Node = struct {
pub const ErrorTag = struct {
base: Node,
doc_comments: ?&DocComment,
- name_token: Token,
+ name_token: TokenIndex,
pub fn iterate(self: &ErrorTag, index: usize) ?&Node {
var i = index;
@@ -413,37 +727,37 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &ErrorTag) Token {
+ pub fn firstToken(self: &ErrorTag) TokenIndex {
return self.name_token;
}
- pub fn lastToken(self: &ErrorTag) Token {
+ pub fn lastToken(self: &ErrorTag) TokenIndex {
return self.name_token;
}
};
pub const Identifier = struct {
base: Node,
- token: Token,
+ token: TokenIndex,
pub fn iterate(self: &Identifier, index: usize) ?&Node {
return null;
}
- pub fn firstToken(self: &Identifier) Token {
+ pub fn firstToken(self: &Identifier) TokenIndex {
return self.token;
}
- pub fn lastToken(self: &Identifier) Token {
+ pub fn lastToken(self: &Identifier) TokenIndex {
return self.token;
}
};
pub const AsyncAttribute = struct {
base: Node,
- async_token: Token,
+ async_token: TokenIndex,
allocator_type: ?&Node,
- rangle_bracket: ?Token,
+ rangle_bracket: ?TokenIndex,
pub fn iterate(self: &AsyncAttribute, index: usize) ?&Node {
var i = index;
@@ -456,11 +770,11 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &AsyncAttribute) Token {
+ pub fn firstToken(self: &AsyncAttribute) TokenIndex {
return self.async_token;
}
- pub fn lastToken(self: &AsyncAttribute) Token {
+ pub fn lastToken(self: &AsyncAttribute) TokenIndex {
if (self.rangle_bracket) |rangle_bracket| {
return rangle_bracket;
}
@@ -472,19 +786,21 @@ pub const Node = struct {
pub const FnProto = struct {
base: Node,
doc_comments: ?&DocComment,
- visib_token: ?Token,
- fn_token: Token,
- name_token: ?Token,
- params: ArrayList(&Node),
+ visib_token: ?TokenIndex,
+ fn_token: TokenIndex,
+ name_token: ?TokenIndex,
+ params: ParamList,
return_type: ReturnType,
- var_args_token: ?Token,
- extern_export_inline_token: ?Token,
- cc_token: ?Token,
+ var_args_token: ?TokenIndex,
+ extern_export_inline_token: ?TokenIndex,
+ cc_token: ?TokenIndex,
async_attr: ?&AsyncAttribute,
body_node: ?&Node,
lib_name: ?&Node, // populated if this is an extern declaration
align_expr: ?&Node, // populated if align(A) is present
+ pub const ParamList = SegmentedList(&Node, 2);
+
pub const ReturnType = union(enum) {
Explicit: &Node,
InferErrorSet: &Node,
@@ -493,8 +809,16 @@ pub const Node = struct {
pub fn iterate(self: &FnProto, index: usize) ?&Node {
var i = index;
- if (self.body_node) |body_node| {
- if (i < 1) return body_node;
+ if (self.lib_name) |lib_name| {
+ if (i < 1) return lib_name;
+ i -= 1;
+ }
+
+ if (i < self.params.len) return *self.params.at(self.params.len - i - 1);
+ i -= self.params.len;
+
+ if (self.align_expr) |align_expr| {
+ if (i < 1) return align_expr;
i -= 1;
}
@@ -510,23 +834,16 @@ pub const Node = struct {
},
}
- if (self.align_expr) |align_expr| {
- if (i < 1) return align_expr;
+ if (self.body_node) |body_node| {
+ if (i < 1) return body_node;
i -= 1;
}
- if (i < self.params.len) return self.params.items[self.params.len - i - 1];
- i -= self.params.len;
-
- if (self.lib_name) |lib_name| {
- if (i < 1) return lib_name;
- i -= 1;
- }
return null;
}
- pub fn firstToken(self: &FnProto) Token {
+ pub fn firstToken(self: &FnProto) TokenIndex {
if (self.visib_token) |visib_token| return visib_token;
if (self.extern_export_inline_token) |extern_export_inline_token| return extern_export_inline_token;
assert(self.lib_name == null);
@@ -534,7 +851,7 @@ pub const Node = struct {
return self.fn_token;
}
- pub fn lastToken(self: &FnProto) Token {
+ pub fn lastToken(self: &FnProto) TokenIndex {
if (self.body_node) |body_node| return body_node.lastToken();
switch (self.return_type) {
// TODO allow this and next prong to share bodies since the types are the same
@@ -546,11 +863,11 @@ pub const Node = struct {
pub const PromiseType = struct {
base: Node,
- promise_token: Token,
+ promise_token: TokenIndex,
result: ?Result,
pub const Result = struct {
- arrow_token: Token,
+ arrow_token: TokenIndex,
return_type: &Node,
};
@@ -565,11 +882,11 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &PromiseType) Token {
+ pub fn firstToken(self: &PromiseType) TokenIndex {
return self.promise_token;
}
- pub fn lastToken(self: &PromiseType) Token {
+ pub fn lastToken(self: &PromiseType) TokenIndex {
if (self.result) |result| return result.return_type.lastToken();
return self.promise_token;
}
@@ -577,11 +894,11 @@ pub const Node = struct {
pub const ParamDecl = struct {
base: Node,
- comptime_token: ?Token,
- noalias_token: ?Token,
- name_token: ?Token,
+ comptime_token: ?TokenIndex,
+ noalias_token: ?TokenIndex,
+ name_token: ?TokenIndex,
type_node: &Node,
- var_args_token: ?Token,
+ var_args_token: ?TokenIndex,
pub fn iterate(self: &ParamDecl, index: usize) ?&Node {
var i = index;
@@ -592,14 +909,14 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &ParamDecl) Token {
+ pub fn firstToken(self: &ParamDecl) TokenIndex {
if (self.comptime_token) |comptime_token| return comptime_token;
if (self.noalias_token) |noalias_token| return noalias_token;
if (self.name_token) |name_token| return name_token;
return self.type_node.firstToken();
}
- pub fn lastToken(self: &ParamDecl) Token {
+ pub fn lastToken(self: &ParamDecl) TokenIndex {
if (self.var_args_token) |var_args_token| return var_args_token;
return self.type_node.lastToken();
}
@@ -607,21 +924,23 @@ pub const Node = struct {
pub const Block = struct {
base: Node,
- label: ?Token,
- lbrace: Token,
- statements: ArrayList(&Node),
- rbrace: Token,
+ label: ?TokenIndex,
+ lbrace: TokenIndex,
+ statements: StatementList,
+ rbrace: TokenIndex,
+
+ pub const StatementList = Root.DeclList;
pub fn iterate(self: &Block, index: usize) ?&Node {
var i = index;
- if (i < self.statements.len) return self.statements.items[i];
+ if (i < self.statements.len) return *self.statements.at(i);
i -= self.statements.len;
return null;
}
- pub fn firstToken(self: &Block) Token {
+ pub fn firstToken(self: &Block) TokenIndex {
if (self.label) |label| {
return label;
}
@@ -629,14 +948,14 @@ pub const Node = struct {
return self.lbrace;
}
- pub fn lastToken(self: &Block) Token {
+ pub fn lastToken(self: &Block) TokenIndex {
return self.rbrace;
}
};
pub const Defer = struct {
base: Node,
- defer_token: Token,
+ defer_token: TokenIndex,
kind: Kind,
expr: &Node,
@@ -654,11 +973,11 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &Defer) Token {
+ pub fn firstToken(self: &Defer) TokenIndex {
return self.defer_token;
}
- pub fn lastToken(self: &Defer) Token {
+ pub fn lastToken(self: &Defer) TokenIndex {
return self.expr.lastToken();
}
};
@@ -666,7 +985,7 @@ pub const Node = struct {
pub const Comptime = struct {
base: Node,
doc_comments: ?&DocComment,
- comptime_token: Token,
+ comptime_token: TokenIndex,
expr: &Node,
pub fn iterate(self: &Comptime, index: usize) ?&Node {
@@ -678,20 +997,20 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &Comptime) Token {
+ pub fn firstToken(self: &Comptime) TokenIndex {
return self.comptime_token;
}
- pub fn lastToken(self: &Comptime) Token {
+ pub fn lastToken(self: &Comptime) TokenIndex {
return self.expr.lastToken();
}
};
pub const Payload = struct {
base: Node,
- lpipe: Token,
+ lpipe: TokenIndex,
error_symbol: &Node,
- rpipe: Token,
+ rpipe: TokenIndex,
pub fn iterate(self: &Payload, index: usize) ?&Node {
var i = index;
@@ -702,21 +1021,21 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &Payload) Token {
+ pub fn firstToken(self: &Payload) TokenIndex {
return self.lpipe;
}
- pub fn lastToken(self: &Payload) Token {
+ pub fn lastToken(self: &Payload) TokenIndex {
return self.rpipe;
}
};
pub const PointerPayload = struct {
base: Node,
- lpipe: Token,
- ptr_token: ?Token,
+ lpipe: TokenIndex,
+ ptr_token: ?TokenIndex,
value_symbol: &Node,
- rpipe: Token,
+ rpipe: TokenIndex,
pub fn iterate(self: &PointerPayload, index: usize) ?&Node {
var i = index;
@@ -727,22 +1046,22 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &PointerPayload) Token {
+ pub fn firstToken(self: &PointerPayload) TokenIndex {
return self.lpipe;
}
- pub fn lastToken(self: &PointerPayload) Token {
+ pub fn lastToken(self: &PointerPayload) TokenIndex {
return self.rpipe;
}
};
pub const PointerIndexPayload = struct {
base: Node,
- lpipe: Token,
- ptr_token: ?Token,
+ lpipe: TokenIndex,
+ ptr_token: ?TokenIndex,
value_symbol: &Node,
index_symbol: ?&Node,
- rpipe: Token,
+ rpipe: TokenIndex,
pub fn iterate(self: &PointerIndexPayload, index: usize) ?&Node {
var i = index;
@@ -758,18 +1077,18 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &PointerIndexPayload) Token {
+ pub fn firstToken(self: &PointerIndexPayload) TokenIndex {
return self.lpipe;
}
- pub fn lastToken(self: &PointerIndexPayload) Token {
+ pub fn lastToken(self: &PointerIndexPayload) TokenIndex {
return self.rpipe;
}
};
pub const Else = struct {
base: Node,
- else_token: Token,
+ else_token: TokenIndex,
payload: ?&Node,
body: &Node,
@@ -787,22 +1106,24 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &Else) Token {
+ pub fn firstToken(self: &Else) TokenIndex {
return self.else_token;
}
- pub fn lastToken(self: &Else) Token {
+ pub fn lastToken(self: &Else) TokenIndex {
return self.body.lastToken();
}
};
pub const Switch = struct {
base: Node,
- switch_token: Token,
+ switch_token: TokenIndex,
expr: &Node,
/// these can be SwitchCase nodes or LineComment nodes
- cases: ArrayList(&Node),
- rbrace: Token,
+ cases: CaseList,
+ rbrace: TokenIndex,
+
+ pub const CaseList = SegmentedList(&Node, 2);
pub fn iterate(self: &Switch, index: usize) ?&Node {
var i = index;
@@ -810,31 +1131,33 @@ pub const Node = struct {
if (i < 1) return self.expr;
i -= 1;
- if (i < self.cases.len) return self.cases.at(i);
+ if (i < self.cases.len) return *self.cases.at(i);
i -= self.cases.len;
return null;
}
- pub fn firstToken(self: &Switch) Token {
+ pub fn firstToken(self: &Switch) TokenIndex {
return self.switch_token;
}
- pub fn lastToken(self: &Switch) Token {
+ pub fn lastToken(self: &Switch) TokenIndex {
return self.rbrace;
}
};
pub const SwitchCase = struct {
base: Node,
- items: ArrayList(&Node),
+ items: ItemList,
payload: ?&Node,
expr: &Node,
+ pub const ItemList = SegmentedList(&Node, 1);
+
pub fn iterate(self: &SwitchCase, index: usize) ?&Node {
var i = index;
- if (i < self.items.len) return self.items.at(i);
+ if (i < self.items.len) return *self.items.at(i);
i -= self.items.len;
if (self.payload) |payload| {
@@ -848,37 +1171,37 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &SwitchCase) Token {
- return self.items.at(0).firstToken();
+ pub fn firstToken(self: &SwitchCase) TokenIndex {
+ return (*self.items.at(0)).firstToken();
}
- pub fn lastToken(self: &SwitchCase) Token {
+ pub fn lastToken(self: &SwitchCase) TokenIndex {
return self.expr.lastToken();
}
};
pub const SwitchElse = struct {
base: Node,
- token: Token,
+ token: TokenIndex,
pub fn iterate(self: &SwitchElse, index: usize) ?&Node {
return null;
}
- pub fn firstToken(self: &SwitchElse) Token {
+ pub fn firstToken(self: &SwitchElse) TokenIndex {
return self.token;
}
- pub fn lastToken(self: &SwitchElse) Token {
+ pub fn lastToken(self: &SwitchElse) TokenIndex {
return self.token;
}
};
pub const While = struct {
base: Node,
- label: ?Token,
- inline_token: ?Token,
- while_token: Token,
+ label: ?TokenIndex,
+ inline_token: ?TokenIndex,
+ while_token: TokenIndex,
condition: &Node,
payload: ?&Node,
continue_expr: ?&Node,
@@ -912,7 +1235,7 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &While) Token {
+ pub fn firstToken(self: &While) TokenIndex {
if (self.label) |label| {
return label;
}
@@ -924,7 +1247,7 @@ pub const Node = struct {
return self.while_token;
}
- pub fn lastToken(self: &While) Token {
+ pub fn lastToken(self: &While) TokenIndex {
if (self.@"else") |@"else"| {
return @"else".body.lastToken();
}
@@ -935,9 +1258,9 @@ pub const Node = struct {
pub const For = struct {
base: Node,
- label: ?Token,
- inline_token: ?Token,
- for_token: Token,
+ label: ?TokenIndex,
+ inline_token: ?TokenIndex,
+ for_token: TokenIndex,
array_expr: &Node,
payload: ?&Node,
body: &Node,
@@ -965,7 +1288,7 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &For) Token {
+ pub fn firstToken(self: &For) TokenIndex {
if (self.label) |label| {
return label;
}
@@ -977,7 +1300,7 @@ pub const Node = struct {
return self.for_token;
}
- pub fn lastToken(self: &For) Token {
+ pub fn lastToken(self: &For) TokenIndex {
if (self.@"else") |@"else"| {
return @"else".body.lastToken();
}
@@ -988,7 +1311,7 @@ pub const Node = struct {
pub const If = struct {
base: Node,
- if_token: Token,
+ if_token: TokenIndex,
condition: &Node,
payload: ?&Node,
body: &Node,
@@ -1016,11 +1339,11 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &If) Token {
+ pub fn firstToken(self: &If) TokenIndex {
return self.if_token;
}
- pub fn lastToken(self: &If) Token {
+ pub fn lastToken(self: &If) TokenIndex {
if (self.@"else") |@"else"| {
return @"else".body.lastToken();
}
@@ -1031,7 +1354,7 @@ pub const Node = struct {
pub const InfixOp = struct {
base: Node,
- op_token: Token,
+ op_token: TokenIndex,
lhs: &Node,
op: Op,
rhs: &Node,
@@ -1146,18 +1469,18 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &InfixOp) Token {
+ pub fn firstToken(self: &InfixOp) TokenIndex {
return self.lhs.firstToken();
}
- pub fn lastToken(self: &InfixOp) Token {
+ pub fn lastToken(self: &InfixOp) TokenIndex {
return self.rhs.lastToken();
}
};
pub const PrefixOp = struct {
base: Node,
- op_token: Token,
+ op_token: TokenIndex,
op: Op,
rhs: &Node,
@@ -1168,7 +1491,6 @@ pub const Node = struct {
BitNot,
BoolNot,
Cancel,
- Deref,
MaybeType,
Negation,
NegationWrap,
@@ -1180,10 +1502,10 @@ pub const Node = struct {
const AddrOfInfo = struct {
align_expr: ?&Node,
- bit_offset_start_token: ?Token,
- bit_offset_end_token: ?Token,
- const_token: ?Token,
- volatile_token: ?Token,
+ bit_offset_start_token: ?TokenIndex,
+ bit_offset_end_token: ?TokenIndex,
+ const_token: ?TokenIndex,
+ volatile_token: ?TokenIndex,
};
pub fn iterate(self: &PrefixOp, index: usize) ?&Node {
@@ -1210,7 +1532,6 @@ pub const Node = struct {
Op.BitNot,
Op.BoolNot,
Op.Cancel,
- Op.Deref,
Op.MaybeType,
Op.Negation,
Op.NegationWrap,
@@ -1225,19 +1546,19 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &PrefixOp) Token {
+ pub fn firstToken(self: &PrefixOp) TokenIndex {
return self.op_token;
}
- pub fn lastToken(self: &PrefixOp) Token {
+ pub fn lastToken(self: &PrefixOp) TokenIndex {
return self.rhs.lastToken();
}
};
pub const FieldInitializer = struct {
base: Node,
- period_token: Token,
- name_token: Token,
+ period_token: TokenIndex,
+ name_token: TokenIndex,
expr: &Node,
pub fn iterate(self: &FieldInitializer, index: usize) ?&Node {
@@ -1249,11 +1570,11 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &FieldInitializer) Token {
+ pub fn firstToken(self: &FieldInitializer) TokenIndex {
return self.period_token;
}
- pub fn lastToken(self: &FieldInitializer) Token {
+ pub fn lastToken(self: &FieldInitializer) TokenIndex {
return self.expr.lastToken();
}
};
@@ -1262,24 +1583,29 @@ pub const Node = struct {
base: Node,
lhs: &Node,
op: Op,
- rtoken: Token,
+ rtoken: TokenIndex,
- const Op = union(enum) {
- Call: CallInfo,
+ pub const Op = union(enum) {
+ Call: Call,
ArrayAccess: &Node,
- Slice: SliceRange,
- ArrayInitializer: ArrayList(&Node),
- StructInitializer: ArrayList(&Node),
- };
+ Slice: Slice,
+ ArrayInitializer: InitList,
+ StructInitializer: InitList,
+ Deref,
- const CallInfo = struct {
- params: ArrayList(&Node),
- async_attr: ?&AsyncAttribute,
- };
+ pub const InitList = SegmentedList(&Node, 2);
- const SliceRange = struct {
- start: &Node,
- end: ?&Node,
+ pub const Call = struct {
+ params: ParamList,
+ async_attr: ?&AsyncAttribute,
+
+ pub const ParamList = SegmentedList(&Node, 2);
+ };
+
+ pub const Slice = struct {
+ start: &Node,
+ end: ?&Node,
+ };
};
pub fn iterate(self: &SuffixOp, index: usize) ?&Node {
@@ -1289,15 +1615,15 @@ pub const Node = struct {
i -= 1;
switch (self.op) {
- Op.Call => |call_info| {
- if (i < call_info.params.len) return call_info.params.at(i);
+ @TagType(Op).Call => |*call_info| {
+ if (i < call_info.params.len) return *call_info.params.at(i);
i -= call_info.params.len;
},
Op.ArrayAccess => |index_expr| {
if (i < 1) return index_expr;
i -= 1;
},
- Op.Slice => |range| {
+ @TagType(Op).Slice => |range| {
if (i < 1) return range.start;
i -= 1;
@@ -1306,12 +1632,12 @@ pub const Node = struct {
i -= 1;
}
},
- Op.ArrayInitializer => |exprs| {
- if (i < exprs.len) return exprs.at(i);
+ Op.ArrayInitializer => |*exprs| {
+ if (i < exprs.len) return *exprs.at(i);
i -= exprs.len;
},
- Op.StructInitializer => |fields| {
- if (i < fields.len) return fields.at(i);
+ Op.StructInitializer => |*fields| {
+ if (i < fields.len) return *fields.at(i);
i -= fields.len;
},
}
@@ -1319,20 +1645,20 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &SuffixOp) Token {
+ pub fn firstToken(self: &SuffixOp) TokenIndex {
return self.lhs.firstToken();
}
- pub fn lastToken(self: &SuffixOp) Token {
+ pub fn lastToken(self: &SuffixOp) TokenIndex {
return self.rtoken;
}
};
pub const GroupedExpression = struct {
base: Node,
- lparen: Token,
+ lparen: TokenIndex,
expr: &Node,
- rparen: Token,
+ rparen: TokenIndex,
pub fn iterate(self: &GroupedExpression, index: usize) ?&Node {
var i = index;
@@ -1343,18 +1669,18 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &GroupedExpression) Token {
+ pub fn firstToken(self: &GroupedExpression) TokenIndex {
return self.lparen;
}
- pub fn lastToken(self: &GroupedExpression) Token {
+ pub fn lastToken(self: &GroupedExpression) TokenIndex {
return self.rparen;
}
};
pub const ControlFlowExpression = struct {
base: Node,
- ltoken: Token,
+ ltoken: TokenIndex,
kind: Kind,
rhs: ?&Node,
@@ -1391,11 +1717,11 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &ControlFlowExpression) Token {
+ pub fn firstToken(self: &ControlFlowExpression) TokenIndex {
return self.ltoken;
}
- pub fn lastToken(self: &ControlFlowExpression) Token {
+ pub fn lastToken(self: &ControlFlowExpression) TokenIndex {
if (self.rhs) |rhs| {
return rhs.lastToken();
}
@@ -1420,8 +1746,8 @@ pub const Node = struct {
pub const Suspend = struct {
base: Node,
- label: ?Token,
- suspend_token: Token,
+ label: ?TokenIndex,
+ suspend_token: TokenIndex,
payload: ?&Node,
body: ?&Node,
@@ -1441,12 +1767,12 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &Suspend) Token {
+ pub fn firstToken(self: &Suspend) TokenIndex {
if (self.label) |label| return label;
return self.suspend_token;
}
- pub fn lastToken(self: &Suspend) Token {
+ pub fn lastToken(self: &Suspend) TokenIndex {
if (self.body) |body| {
return body.lastToken();
}
@@ -1461,177 +1787,181 @@ pub const Node = struct {
pub const IntegerLiteral = struct {
base: Node,
- token: Token,
+ token: TokenIndex,
pub fn iterate(self: &IntegerLiteral, index: usize) ?&Node {
return null;
}
- pub fn firstToken(self: &IntegerLiteral) Token {
+ pub fn firstToken(self: &IntegerLiteral) TokenIndex {
return self.token;
}
- pub fn lastToken(self: &IntegerLiteral) Token {
+ pub fn lastToken(self: &IntegerLiteral) TokenIndex {
return self.token;
}
};
pub const FloatLiteral = struct {
base: Node,
- token: Token,
+ token: TokenIndex,
pub fn iterate(self: &FloatLiteral, index: usize) ?&Node {
return null;
}
- pub fn firstToken(self: &FloatLiteral) Token {
+ pub fn firstToken(self: &FloatLiteral) TokenIndex {
return self.token;
}
- pub fn lastToken(self: &FloatLiteral) Token {
+ pub fn lastToken(self: &FloatLiteral) TokenIndex {
return self.token;
}
};
pub const BuiltinCall = struct {
base: Node,
- builtin_token: Token,
- params: ArrayList(&Node),
- rparen_token: Token,
+ builtin_token: TokenIndex,
+ params: ParamList,
+ rparen_token: TokenIndex,
+
+ pub const ParamList = SegmentedList(&Node, 2);
pub fn iterate(self: &BuiltinCall, index: usize) ?&Node {
var i = index;
- if (i < self.params.len) return self.params.at(i);
+ if (i < self.params.len) return *self.params.at(i);
i -= self.params.len;
return null;
}
- pub fn firstToken(self: &BuiltinCall) Token {
+ pub fn firstToken(self: &BuiltinCall) TokenIndex {
return self.builtin_token;
}
- pub fn lastToken(self: &BuiltinCall) Token {
+ pub fn lastToken(self: &BuiltinCall) TokenIndex {
return self.rparen_token;
}
};
pub const StringLiteral = struct {
base: Node,
- token: Token,
+ token: TokenIndex,
pub fn iterate(self: &StringLiteral, index: usize) ?&Node {
return null;
}
- pub fn firstToken(self: &StringLiteral) Token {
+ pub fn firstToken(self: &StringLiteral) TokenIndex {
return self.token;
}
- pub fn lastToken(self: &StringLiteral) Token {
+ pub fn lastToken(self: &StringLiteral) TokenIndex {
return self.token;
}
};
pub const MultilineStringLiteral = struct {
base: Node,
- tokens: ArrayList(Token),
+ lines: LineList,
+
+ pub const LineList = SegmentedList(TokenIndex, 4);
pub fn iterate(self: &MultilineStringLiteral, index: usize) ?&Node {
return null;
}
- pub fn firstToken(self: &MultilineStringLiteral) Token {
- return self.tokens.at(0);
+ pub fn firstToken(self: &MultilineStringLiteral) TokenIndex {
+ return *self.lines.at(0);
}
- pub fn lastToken(self: &MultilineStringLiteral) Token {
- return self.tokens.at(self.tokens.len - 1);
+ pub fn lastToken(self: &MultilineStringLiteral) TokenIndex {
+ return *self.lines.at(self.lines.len - 1);
}
};
pub const CharLiteral = struct {
base: Node,
- token: Token,
+ token: TokenIndex,
pub fn iterate(self: &CharLiteral, index: usize) ?&Node {
return null;
}
- pub fn firstToken(self: &CharLiteral) Token {
+ pub fn firstToken(self: &CharLiteral) TokenIndex {
return self.token;
}
- pub fn lastToken(self: &CharLiteral) Token {
+ pub fn lastToken(self: &CharLiteral) TokenIndex {
return self.token;
}
};
pub const BoolLiteral = struct {
base: Node,
- token: Token,
+ token: TokenIndex,
pub fn iterate(self: &BoolLiteral, index: usize) ?&Node {
return null;
}
- pub fn firstToken(self: &BoolLiteral) Token {
+ pub fn firstToken(self: &BoolLiteral) TokenIndex {
return self.token;
}
- pub fn lastToken(self: &BoolLiteral) Token {
+ pub fn lastToken(self: &BoolLiteral) TokenIndex {
return self.token;
}
};
pub const NullLiteral = struct {
base: Node,
- token: Token,
+ token: TokenIndex,
pub fn iterate(self: &NullLiteral, index: usize) ?&Node {
return null;
}
- pub fn firstToken(self: &NullLiteral) Token {
+ pub fn firstToken(self: &NullLiteral) TokenIndex {
return self.token;
}
- pub fn lastToken(self: &NullLiteral) Token {
+ pub fn lastToken(self: &NullLiteral) TokenIndex {
return self.token;
}
};
pub const UndefinedLiteral = struct {
base: Node,
- token: Token,
+ token: TokenIndex,
pub fn iterate(self: &UndefinedLiteral, index: usize) ?&Node {
return null;
}
- pub fn firstToken(self: &UndefinedLiteral) Token {
+ pub fn firstToken(self: &UndefinedLiteral) TokenIndex {
return self.token;
}
- pub fn lastToken(self: &UndefinedLiteral) Token {
+ pub fn lastToken(self: &UndefinedLiteral) TokenIndex {
return self.token;
}
};
pub const ThisLiteral = struct {
base: Node,
- token: Token,
+ token: TokenIndex,
pub fn iterate(self: &ThisLiteral, index: usize) ?&Node {
return null;
}
- pub fn firstToken(self: &ThisLiteral) Token {
+ pub fn firstToken(self: &ThisLiteral) TokenIndex {
return self.token;
}
- pub fn lastToken(self: &ThisLiteral) Token {
+ pub fn lastToken(self: &ThisLiteral) TokenIndex {
return self.token;
}
};
@@ -1670,11 +2000,11 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &AsmOutput) Token {
+ pub fn firstToken(self: &AsmOutput) TokenIndex {
return self.symbolic_name.firstToken();
}
- pub fn lastToken(self: &AsmOutput) Token {
+ pub fn lastToken(self: &AsmOutput) TokenIndex {
return switch (self.kind) {
Kind.Variable => |variable_name| variable_name.lastToken(),
Kind.Return => |return_type| return_type.lastToken(),
@@ -1703,139 +2033,144 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &AsmInput) Token {
+ pub fn firstToken(self: &AsmInput) TokenIndex {
return self.symbolic_name.firstToken();
}
- pub fn lastToken(self: &AsmInput) Token {
+ pub fn lastToken(self: &AsmInput) TokenIndex {
return self.expr.lastToken();
}
};
pub const Asm = struct {
base: Node,
- asm_token: Token,
- volatile_token: ?Token,
+ asm_token: TokenIndex,
+ volatile_token: ?TokenIndex,
template: &Node,
- //tokens: ArrayList(AsmToken),
- outputs: ArrayList(&AsmOutput),
- inputs: ArrayList(&AsmInput),
- cloppers: ArrayList(&Node),
- rparen: Token,
+ outputs: OutputList,
+ inputs: InputList,
+ clobbers: ClobberList,
+ rparen: TokenIndex,
+
+ const OutputList = SegmentedList(&AsmOutput, 2);
+ const InputList = SegmentedList(&AsmInput, 2);
+ const ClobberList = SegmentedList(&Node, 2);
pub fn iterate(self: &Asm, index: usize) ?&Node {
var i = index;
- if (i < self.outputs.len) return &self.outputs.at(index).base;
+ if (i < self.outputs.len) return &(*self.outputs.at(index)).base;
i -= self.outputs.len;
- if (i < self.inputs.len) return &self.inputs.at(index).base;
+ if (i < self.inputs.len) return &(*self.inputs.at(index)).base;
i -= self.inputs.len;
- if (i < self.cloppers.len) return self.cloppers.at(index);
- i -= self.cloppers.len;
+ if (i < self.clobbers.len) return *self.clobbers.at(index);
+ i -= self.clobbers.len;
return null;
}
- pub fn firstToken(self: &Asm) Token {
+ pub fn firstToken(self: &Asm) TokenIndex {
return self.asm_token;
}
- pub fn lastToken(self: &Asm) Token {
+ pub fn lastToken(self: &Asm) TokenIndex {
return self.rparen;
}
};
pub const Unreachable = struct {
base: Node,
- token: Token,
+ token: TokenIndex,
pub fn iterate(self: &Unreachable, index: usize) ?&Node {
return null;
}
- pub fn firstToken(self: &Unreachable) Token {
+ pub fn firstToken(self: &Unreachable) TokenIndex {
return self.token;
}
- pub fn lastToken(self: &Unreachable) Token {
+ pub fn lastToken(self: &Unreachable) TokenIndex {
return self.token;
}
};
pub const ErrorType = struct {
base: Node,
- token: Token,
+ token: TokenIndex,
pub fn iterate(self: &ErrorType, index: usize) ?&Node {
return null;
}
- pub fn firstToken(self: &ErrorType) Token {
+ pub fn firstToken(self: &ErrorType) TokenIndex {
return self.token;
}
- pub fn lastToken(self: &ErrorType) Token {
+ pub fn lastToken(self: &ErrorType) TokenIndex {
return self.token;
}
};
pub const VarType = struct {
base: Node,
- token: Token,
+ token: TokenIndex,
pub fn iterate(self: &VarType, index: usize) ?&Node {
return null;
}
- pub fn firstToken(self: &VarType) Token {
+ pub fn firstToken(self: &VarType) TokenIndex {
return self.token;
}
- pub fn lastToken(self: &VarType) Token {
+ pub fn lastToken(self: &VarType) TokenIndex {
return self.token;
}
};
pub const LineComment = struct {
base: Node,
- token: Token,
+ token: TokenIndex,
pub fn iterate(self: &LineComment, index: usize) ?&Node {
return null;
}
- pub fn firstToken(self: &LineComment) Token {
+ pub fn firstToken(self: &LineComment) TokenIndex {
return self.token;
}
- pub fn lastToken(self: &LineComment) Token {
+ pub fn lastToken(self: &LineComment) TokenIndex {
return self.token;
}
};
pub const DocComment = struct {
base: Node,
- lines: ArrayList(Token),
+ lines: LineList,
+
+ pub const LineList = SegmentedList(TokenIndex, 4);
pub fn iterate(self: &DocComment, index: usize) ?&Node {
return null;
}
- pub fn firstToken(self: &DocComment) Token {
- return self.lines.at(0);
+ pub fn firstToken(self: &DocComment) TokenIndex {
+ return *self.lines.at(0);
}
- pub fn lastToken(self: &DocComment) Token {
- return self.lines.at(self.lines.len - 1);
+ pub fn lastToken(self: &DocComment) TokenIndex {
+ return *self.lines.at(self.lines.len - 1);
}
};
pub const TestDecl = struct {
base: Node,
doc_comments: ?&DocComment,
- test_token: Token,
+ test_token: TokenIndex,
name: &Node,
body_node: &Node,
@@ -1848,11 +2183,11 @@ pub const Node = struct {
return null;
}
- pub fn firstToken(self: &TestDecl) Token {
+ pub fn firstToken(self: &TestDecl) TokenIndex {
return self.test_token;
}
- pub fn lastToken(self: &TestDecl) Token {
+ pub fn lastToken(self: &TestDecl) TokenIndex {
return self.body_node.lastToken();
}
};
diff --git a/std/zig/index.zig b/std/zig/index.zig
index 32699935d9..4dd68fa8b3 100644
--- a/std/zig/index.zig
+++ b/std/zig/index.zig
@@ -1,11 +1,13 @@
const tokenizer = @import("tokenizer.zig");
pub const Token = tokenizer.Token;
pub const Tokenizer = tokenizer.Tokenizer;
-pub const Parser = @import("parser.zig").Parser;
+pub const parse = @import("parse.zig").parse;
+pub const render = @import("render.zig").render;
pub const ast = @import("ast.zig");
test "std.zig tests" {
- _ = @import("tokenizer.zig");
- _ = @import("parser.zig");
_ = @import("ast.zig");
+ _ = @import("parse.zig");
+ _ = @import("render.zig");
+ _ = @import("tokenizer.zig");
}
diff --git a/std/zig/parse.zig b/std/zig/parse.zig
new file mode 100644
index 0000000000..f88fdfab62
--- /dev/null
+++ b/std/zig/parse.zig
@@ -0,0 +1,3470 @@
+const std = @import("../index.zig");
+const assert = std.debug.assert;
+const mem = std.mem;
+const ast = std.zig.ast;
+const Tokenizer = std.zig.Tokenizer;
+const Token = std.zig.Token;
+const TokenIndex = ast.TokenIndex;
+const Error = ast.Error;
+
+/// Result should be freed with tree.deinit() when there are
+/// no more references to any of the tokens or nodes.
+pub fn parse(allocator: &mem.Allocator, source: []const u8) !ast.Tree {
+ var tree_arena = std.heap.ArenaAllocator.init(allocator);
+ errdefer tree_arena.deinit();
+
+ var stack = std.ArrayList(State).init(allocator);
+ defer stack.deinit();
+
+ const arena = &tree_arena.allocator;
+ const root_node = try createNode(arena, ast.Node.Root,
+ ast.Node.Root {
+ .base = undefined,
+ .decls = ast.Node.Root.DeclList.init(arena),
+ .doc_comments = null,
+ // initialized when we get the eof token
+ .eof_token = undefined,
+ }
+ );
+
+ var tree = ast.Tree {
+ .source = source,
+ .root_node = root_node,
+ .arena_allocator = tree_arena,
+ .tokens = ast.Tree.TokenList.init(arena),
+ .errors = ast.Tree.ErrorList.init(arena),
+ };
+
+ var tokenizer = Tokenizer.init(tree.source);
+ while (true) {
+ const token_ptr = try tree.tokens.addOne();
+ *token_ptr = tokenizer.next();
+ if (token_ptr.id == Token.Id.Eof)
+ break;
+ }
+ var tok_it = tree.tokens.iterator(0);
+
+ try stack.append(State.TopLevel);
+
+ while (true) {
+ // This gives us 1 free push that can't fail
+ const state = stack.pop();
+
+ switch (state) {
+ State.TopLevel => {
+ while (try eatLineComment(arena, &tok_it, &tree)) |line_comment| {
+ try root_node.decls.push(&line_comment.base);
+ }
+
+ const comments = try eatDocComments(arena, &tok_it, &tree);
+
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Keyword_test => {
+ stack.append(State.TopLevel) catch unreachable;
+
+ const block = try arena.construct(ast.Node.Block {
+ .base = ast.Node {
+ .id = ast.Node.Id.Block,
+ },
+ .label = null,
+ .lbrace = undefined,
+ .statements = ast.Node.Block.StatementList.init(arena),
+ .rbrace = undefined,
+ });
+ const test_node = try arena.construct(ast.Node.TestDecl {
+ .base = ast.Node {
+ .id = ast.Node.Id.TestDecl,
+ },
+ .doc_comments = comments,
+ .test_token = token_index,
+ .name = undefined,
+ .body_node = &block.base,
+ });
+ try root_node.decls.push(&test_node.base);
+ try stack.append(State { .Block = block });
+ try stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.LBrace,
+ .ptr = &block.rbrace,
+ }
+ });
+ try stack.append(State { .StringLiteral = OptionalCtx { .Required = &test_node.name } });
+ continue;
+ },
+ Token.Id.Eof => {
+ root_node.eof_token = token_index;
+ root_node.doc_comments = comments;
+ return tree;
+ },
+ Token.Id.Keyword_pub => {
+ stack.append(State.TopLevel) catch unreachable;
+ try stack.append(State {
+ .TopLevelExtern = TopLevelDeclCtx {
+ .decls = &root_node.decls,
+ .visib_token = token_index,
+ .extern_export_inline_token = null,
+ .lib_name = null,
+ .comments = comments,
+ }
+ });
+ continue;
+ },
+ Token.Id.Keyword_comptime => {
+ const block = try createNode(arena, ast.Node.Block,
+ ast.Node.Block {
+ .base = undefined,
+ .label = null,
+ .lbrace = undefined,
+ .statements = ast.Node.Block.StatementList.init(arena),
+ .rbrace = undefined,
+ }
+ );
+ const node = try arena.construct(ast.Node.Comptime {
+ .base = ast.Node {
+ .id = ast.Node.Id.Comptime,
+ },
+ .comptime_token = token_index,
+ .expr = &block.base,
+ .doc_comments = comments,
+ });
+ try root_node.decls.push(&node.base);
+
+ stack.append(State.TopLevel) catch unreachable;
+ try stack.append(State { .Block = block });
+ try stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.LBrace,
+ .ptr = &block.rbrace,
+ }
+ });
+ continue;
+ },
+ else => {
+ putBackToken(&tok_it, &tree);
+ stack.append(State.TopLevel) catch unreachable;
+ try stack.append(State {
+ .TopLevelExtern = TopLevelDeclCtx {
+ .decls = &root_node.decls,
+ .visib_token = null,
+ .extern_export_inline_token = null,
+ .lib_name = null,
+ .comments = comments,
+ }
+ });
+ continue;
+ },
+ }
+ },
+ State.TopLevelExtern => |ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Keyword_export, Token.Id.Keyword_inline => {
+ stack.append(State {
+ .TopLevelDecl = TopLevelDeclCtx {
+ .decls = ctx.decls,
+ .visib_token = ctx.visib_token,
+ .extern_export_inline_token = AnnotatedToken {
+ .index = token_index,
+ .ptr = token_ptr,
+ },
+ .lib_name = null,
+ .comments = ctx.comments,
+ },
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_extern => {
+ stack.append(State {
+ .TopLevelLibname = TopLevelDeclCtx {
+ .decls = ctx.decls,
+ .visib_token = ctx.visib_token,
+ .extern_export_inline_token = AnnotatedToken {
+ .index = token_index,
+ .ptr = token_ptr,
+ },
+ .lib_name = null,
+ .comments = ctx.comments,
+ },
+ }) catch unreachable;
+ continue;
+ },
+ else => {
+ putBackToken(&tok_it, &tree);
+ stack.append(State { .TopLevelDecl = ctx }) catch unreachable;
+ continue;
+ }
+ }
+ },
+ State.TopLevelLibname => |ctx| {
+ const lib_name = blk: {
+ const lib_name_token = nextToken(&tok_it, &tree);
+ const lib_name_token_index = lib_name_token.index;
+ const lib_name_token_ptr = lib_name_token.ptr;
+ break :blk (try parseStringLiteral(arena, &tok_it, lib_name_token_ptr, lib_name_token_index, &tree)) ?? {
+ putBackToken(&tok_it, &tree);
+ break :blk null;
+ };
+ };
+
+ stack.append(State {
+ .TopLevelDecl = TopLevelDeclCtx {
+ .decls = ctx.decls,
+ .visib_token = ctx.visib_token,
+ .extern_export_inline_token = ctx.extern_export_inline_token,
+ .lib_name = lib_name,
+ .comments = ctx.comments,
+ },
+ }) catch unreachable;
+ continue;
+ },
+ State.TopLevelDecl => |ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Keyword_use => {
+ if (ctx.extern_export_inline_token) |annotated_token| {
+ *(try tree.errors.addOne()) = Error {
+ .InvalidToken = Error.InvalidToken { .token = annotated_token.index },
+ };
+ return tree;
+ }
+
+ const node = try arena.construct(ast.Node.Use {
+ .base = ast.Node {.id = ast.Node.Id.Use },
+ .visib_token = ctx.visib_token,
+ .expr = undefined,
+ .semicolon_token = undefined,
+ .doc_comments = ctx.comments,
+ });
+ try ctx.decls.push(&node.base);
+
+ stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Semicolon,
+ .ptr = &node.semicolon_token,
+ }
+ }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
+ continue;
+ },
+ Token.Id.Keyword_var, Token.Id.Keyword_const => {
+ if (ctx.extern_export_inline_token) |annotated_token| {
+ if (annotated_token.ptr.id == Token.Id.Keyword_inline) {
+ *(try tree.errors.addOne()) = Error {
+ .InvalidToken = Error.InvalidToken { .token = annotated_token.index },
+ };
+ return tree;
+ }
+ }
+
+ try stack.append(State {
+ .VarDecl = VarDeclCtx {
+ .comments = ctx.comments,
+ .visib_token = ctx.visib_token,
+ .lib_name = ctx.lib_name,
+ .comptime_token = null,
+ .extern_export_token = if (ctx.extern_export_inline_token) |at| at.index else null,
+ .mut_token = token_index,
+ .list = ctx.decls
+ }
+ });
+ continue;
+ },
+ Token.Id.Keyword_fn, Token.Id.Keyword_nakedcc,
+ Token.Id.Keyword_stdcallcc, Token.Id.Keyword_async => {
+ const fn_proto = try arena.construct(ast.Node.FnProto {
+ .base = ast.Node {
+ .id = ast.Node.Id.FnProto,
+ },
+ .doc_comments = ctx.comments,
+ .visib_token = ctx.visib_token,
+ .name_token = null,
+ .fn_token = undefined,
+ .params = ast.Node.FnProto.ParamList.init(arena),
+ .return_type = undefined,
+ .var_args_token = null,
+ .extern_export_inline_token = if (ctx.extern_export_inline_token) |at| at.index else null,
+ .cc_token = null,
+ .async_attr = null,
+ .body_node = null,
+ .lib_name = ctx.lib_name,
+ .align_expr = null,
+ });
+ try ctx.decls.push(&fn_proto.base);
+ stack.append(State { .FnDef = fn_proto }) catch unreachable;
+ try stack.append(State { .FnProto = fn_proto });
+
+ switch (token_ptr.id) {
+ Token.Id.Keyword_nakedcc, Token.Id.Keyword_stdcallcc => {
+ fn_proto.cc_token = token_index;
+ try stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Keyword_fn,
+ .ptr = &fn_proto.fn_token,
+ }
+ });
+ continue;
+ },
+ Token.Id.Keyword_async => {
+ const async_node = try createNode(arena, ast.Node.AsyncAttribute,
+ ast.Node.AsyncAttribute {
+ .base = undefined,
+ .async_token = token_index,
+ .allocator_type = null,
+ .rangle_bracket = null,
+ }
+ );
+ fn_proto.async_attr = async_node;
+
+ try stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Keyword_fn,
+ .ptr = &fn_proto.fn_token,
+ }
+ });
+ try stack.append(State { .AsyncAllocator = async_node });
+ continue;
+ },
+ Token.Id.Keyword_fn => {
+ fn_proto.fn_token = token_index;
+ continue;
+ },
+ else => unreachable,
+ }
+ },
+ else => {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedVarDeclOrFn = Error.ExpectedVarDeclOrFn { .token = token_index },
+ };
+ return tree;
+ },
+ }
+ },
+ State.TopLevelExternOrField => |ctx| {
+ if (eatToken(&tok_it, &tree, Token.Id.Identifier)) |identifier| {
+ std.debug.assert(ctx.container_decl.kind == ast.Node.ContainerDecl.Kind.Struct);
+ const node = try arena.construct(ast.Node.StructField {
+ .base = ast.Node {
+ .id = ast.Node.Id.StructField,
+ },
+ .doc_comments = ctx.comments,
+ .visib_token = ctx.visib_token,
+ .name_token = identifier,
+ .type_expr = undefined,
+ });
+ const node_ptr = try ctx.container_decl.fields_and_decls.addOne();
+ *node_ptr = &node.base;
+
+ stack.append(State { .FieldListCommaOrEnd = ctx.container_decl }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.type_expr } });
+ try stack.append(State { .ExpectToken = Token.Id.Colon });
+ continue;
+ }
+
+ stack.append(State{ .ContainerDecl = ctx.container_decl }) catch unreachable;
+ try stack.append(State {
+ .TopLevelExtern = TopLevelDeclCtx {
+ .decls = &ctx.container_decl.fields_and_decls,
+ .visib_token = ctx.visib_token,
+ .extern_export_inline_token = null,
+ .lib_name = null,
+ .comments = ctx.comments,
+ }
+ });
+ continue;
+ },
+
+ State.FieldInitValue => |ctx| {
+ const eq_tok = nextToken(&tok_it, &tree);
+ const eq_tok_index = eq_tok.index;
+ const eq_tok_ptr = eq_tok.ptr;
+ if (eq_tok_ptr.id != Token.Id.Equal) {
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+ stack.append(State { .Expression = ctx }) catch unreachable;
+ continue;
+ },
+
+ State.ContainerKind => |ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ const node = try createToCtxNode(arena, ctx.opt_ctx, ast.Node.ContainerDecl,
+ ast.Node.ContainerDecl {
+ .base = undefined,
+ .ltoken = ctx.ltoken,
+ .layout = ctx.layout,
+ .kind = switch (token_ptr.id) {
+ Token.Id.Keyword_struct => ast.Node.ContainerDecl.Kind.Struct,
+ Token.Id.Keyword_union => ast.Node.ContainerDecl.Kind.Union,
+ Token.Id.Keyword_enum => ast.Node.ContainerDecl.Kind.Enum,
+ else => {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedAggregateKw = Error.ExpectedAggregateKw { .token = token_index },
+ };
+ return tree;
+ },
+ },
+ .init_arg_expr = ast.Node.ContainerDecl.InitArg.None,
+ .fields_and_decls = ast.Node.ContainerDecl.DeclList.init(arena),
+ .rbrace_token = undefined,
+ }
+ );
+
+ stack.append(State { .ContainerDecl = node }) catch unreachable;
+ try stack.append(State { .ExpectToken = Token.Id.LBrace });
+ try stack.append(State { .ContainerInitArgStart = node });
+ continue;
+ },
+
+ State.ContainerInitArgStart => |container_decl| {
+ if (eatToken(&tok_it, &tree, Token.Id.LParen) == null) {
+ continue;
+ }
+
+ stack.append(State { .ExpectToken = Token.Id.RParen }) catch unreachable;
+ try stack.append(State { .ContainerInitArg = container_decl });
+ continue;
+ },
+
+ State.ContainerInitArg => |container_decl| {
+ const init_arg_token = nextToken(&tok_it, &tree);
+ const init_arg_token_index = init_arg_token.index;
+ const init_arg_token_ptr = init_arg_token.ptr;
+ switch (init_arg_token_ptr.id) {
+ Token.Id.Keyword_enum => {
+ container_decl.init_arg_expr = ast.Node.ContainerDecl.InitArg {.Enum = null};
+ const lparen_tok = nextToken(&tok_it, &tree);
+ const lparen_tok_index = lparen_tok.index;
+ const lparen_tok_ptr = lparen_tok.ptr;
+ if (lparen_tok_ptr.id == Token.Id.LParen) {
+ try stack.append(State { .ExpectToken = Token.Id.RParen } );
+ try stack.append(State { .Expression = OptionalCtx {
+ .RequiredNull = &container_decl.init_arg_expr.Enum,
+ } });
+ } else {
+ putBackToken(&tok_it, &tree);
+ }
+ },
+ else => {
+ putBackToken(&tok_it, &tree);
+ container_decl.init_arg_expr = ast.Node.ContainerDecl.InitArg { .Type = undefined };
+ stack.append(State { .Expression = OptionalCtx { .Required = &container_decl.init_arg_expr.Type } }) catch unreachable;
+ },
+ }
+ continue;
+ },
+
+ State.ContainerDecl => |container_decl| {
+ while (try eatLineComment(arena, &tok_it, &tree)) |line_comment| {
+ try container_decl.fields_and_decls.push(&line_comment.base);
+ }
+
+ const comments = try eatDocComments(arena, &tok_it, &tree);
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Identifier => {
+ switch (container_decl.kind) {
+ ast.Node.ContainerDecl.Kind.Struct => {
+ const node = try arena.construct(ast.Node.StructField {
+ .base = ast.Node {
+ .id = ast.Node.Id.StructField,
+ },
+ .doc_comments = comments,
+ .visib_token = null,
+ .name_token = token_index,
+ .type_expr = undefined,
+ });
+ const node_ptr = try container_decl.fields_and_decls.addOne();
+ *node_ptr = &node.base;
+
+ try stack.append(State { .FieldListCommaOrEnd = container_decl });
+ try stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &node.type_expr } });
+ try stack.append(State { .ExpectToken = Token.Id.Colon });
+ continue;
+ },
+ ast.Node.ContainerDecl.Kind.Union => {
+ const node = try arena.construct(ast.Node.UnionTag {
+ .base = ast.Node {.id = ast.Node.Id.UnionTag },
+ .name_token = token_index,
+ .type_expr = null,
+ .value_expr = null,
+ .doc_comments = comments,
+ });
+ try container_decl.fields_and_decls.push(&node.base);
+
+ stack.append(State { .FieldListCommaOrEnd = container_decl }) catch unreachable;
+ try stack.append(State { .FieldInitValue = OptionalCtx { .RequiredNull = &node.value_expr } });
+ try stack.append(State { .TypeExprBegin = OptionalCtx { .RequiredNull = &node.type_expr } });
+ try stack.append(State { .IfToken = Token.Id.Colon });
+ continue;
+ },
+ ast.Node.ContainerDecl.Kind.Enum => {
+ const node = try arena.construct(ast.Node.EnumTag {
+ .base = ast.Node { .id = ast.Node.Id.EnumTag },
+ .name_token = token_index,
+ .value = null,
+ .doc_comments = comments,
+ });
+ try container_decl.fields_and_decls.push(&node.base);
+
+ stack.append(State { .FieldListCommaOrEnd = container_decl }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .RequiredNull = &node.value } });
+ try stack.append(State { .IfToken = Token.Id.Equal });
+ continue;
+ },
+ }
+ },
+ Token.Id.Keyword_pub => {
+ switch (container_decl.kind) {
+ ast.Node.ContainerDecl.Kind.Struct => {
+ try stack.append(State {
+ .TopLevelExternOrField = TopLevelExternOrFieldCtx {
+ .visib_token = token_index,
+ .container_decl = container_decl,
+ .comments = comments,
+ }
+ });
+ continue;
+ },
+ else => {
+ stack.append(State{ .ContainerDecl = container_decl }) catch unreachable;
+ try stack.append(State {
+ .TopLevelExtern = TopLevelDeclCtx {
+ .decls = &container_decl.fields_and_decls,
+ .visib_token = token_index,
+ .extern_export_inline_token = null,
+ .lib_name = null,
+ .comments = comments,
+ }
+ });
+ continue;
+ }
+ }
+ },
+ Token.Id.Keyword_export => {
+ stack.append(State{ .ContainerDecl = container_decl }) catch unreachable;
+ try stack.append(State {
+ .TopLevelExtern = TopLevelDeclCtx {
+ .decls = &container_decl.fields_and_decls,
+ .visib_token = token_index,
+ .extern_export_inline_token = null,
+ .lib_name = null,
+ .comments = comments,
+ }
+ });
+ continue;
+ },
+ Token.Id.RBrace => {
+ if (comments != null) {
+ *(try tree.errors.addOne()) = Error {
+ .UnattachedDocComment = Error.UnattachedDocComment { .token = token_index },
+ };
+ return tree;
+ }
+ container_decl.rbrace_token = token_index;
+ continue;
+ },
+ else => {
+ putBackToken(&tok_it, &tree);
+ stack.append(State{ .ContainerDecl = container_decl }) catch unreachable;
+ try stack.append(State {
+ .TopLevelExtern = TopLevelDeclCtx {
+ .decls = &container_decl.fields_and_decls,
+ .visib_token = null,
+ .extern_export_inline_token = null,
+ .lib_name = null,
+ .comments = comments,
+ }
+ });
+ continue;
+ }
+ }
+ },
+
+
+ State.VarDecl => |ctx| {
+ const var_decl = try arena.construct(ast.Node.VarDecl {
+ .base = ast.Node {
+ .id = ast.Node.Id.VarDecl,
+ },
+ .doc_comments = ctx.comments,
+ .visib_token = ctx.visib_token,
+ .mut_token = ctx.mut_token,
+ .comptime_token = ctx.comptime_token,
+ .extern_export_token = ctx.extern_export_token,
+ .type_node = null,
+ .align_node = null,
+ .init_node = null,
+ .lib_name = ctx.lib_name,
+ // initialized later
+ .name_token = undefined,
+ .eq_token = undefined,
+ .semicolon_token = undefined,
+ });
+ try ctx.list.push(&var_decl.base);
+
+ try stack.append(State { .VarDeclAlign = var_decl });
+ try stack.append(State { .TypeExprBegin = OptionalCtx { .RequiredNull = &var_decl.type_node} });
+ try stack.append(State { .IfToken = Token.Id.Colon });
+ try stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Identifier,
+ .ptr = &var_decl.name_token,
+ }
+ });
+ continue;
+ },
+ State.VarDeclAlign => |var_decl| {
+ try stack.append(State { .VarDeclEq = var_decl });
+
+ const next_token = nextToken(&tok_it, &tree);
+ const next_token_index = next_token.index;
+ const next_token_ptr = next_token.ptr;
+ if (next_token_ptr.id == Token.Id.Keyword_align) {
+ try stack.append(State { .ExpectToken = Token.Id.RParen });
+ try stack.append(State { .Expression = OptionalCtx { .RequiredNull = &var_decl.align_node} });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ continue;
+ }
+
+ putBackToken(&tok_it, &tree);
+ continue;
+ },
+ State.VarDeclEq => |var_decl| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Equal => {
+ var_decl.eq_token = token_index;
+ stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Semicolon,
+ .ptr = &var_decl.semicolon_token,
+ },
+ }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .RequiredNull = &var_decl.init_node } });
+ continue;
+ },
+ Token.Id.Semicolon => {
+ var_decl.semicolon_token = token_index;
+ continue;
+ },
+ else => {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedEqOrSemi = Error.ExpectedEqOrSemi { .token = token_index },
+ };
+ return tree;
+ }
+ }
+ },
+
+
+ State.FnDef => |fn_proto| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch(token_ptr.id) {
+ Token.Id.LBrace => {
+ const block = try arena.construct(ast.Node.Block {
+ .base = ast.Node { .id = ast.Node.Id.Block },
+ .label = null,
+ .lbrace = token_index,
+ .statements = ast.Node.Block.StatementList.init(arena),
+ .rbrace = undefined,
+ });
+ fn_proto.body_node = &block.base;
+ stack.append(State { .Block = block }) catch unreachable;
+ continue;
+ },
+ Token.Id.Semicolon => continue,
+ else => {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedSemiOrLBrace = Error.ExpectedSemiOrLBrace { .token = token_index },
+ };
+ return tree;
+ },
+ }
+ },
+ State.FnProto => |fn_proto| {
+ stack.append(State { .FnProtoAlign = fn_proto }) catch unreachable;
+ try stack.append(State { .ParamDecl = fn_proto });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+
+ if (eatToken(&tok_it, &tree, Token.Id.Identifier)) |name_token| {
+ fn_proto.name_token = name_token;
+ }
+ continue;
+ },
+ State.FnProtoAlign => |fn_proto| {
+ stack.append(State { .FnProtoReturnType = fn_proto }) catch unreachable;
+
+ if (eatToken(&tok_it, &tree, Token.Id.Keyword_align)) |align_token| {
+ try stack.append(State { .ExpectToken = Token.Id.RParen });
+ try stack.append(State { .Expression = OptionalCtx { .RequiredNull = &fn_proto.align_expr } });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ }
+ continue;
+ },
+ State.FnProtoReturnType => |fn_proto| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Bang => {
+ fn_proto.return_type = ast.Node.FnProto.ReturnType { .InferErrorSet = undefined };
+ stack.append(State {
+ .TypeExprBegin = OptionalCtx { .Required = &fn_proto.return_type.InferErrorSet },
+ }) catch unreachable;
+ continue;
+ },
+ else => {
+ // TODO: this is a special case. Remove this when #760 is fixed
+ if (token_ptr.id == Token.Id.Keyword_error) {
+ if ((??tok_it.peek()).id == Token.Id.LBrace) {
+ const error_type_node = try arena.construct(ast.Node.ErrorType {
+ .base = ast.Node { .id = ast.Node.Id.ErrorType },
+ .token = token_index,
+ });
+ fn_proto.return_type = ast.Node.FnProto.ReturnType {
+ .Explicit = &error_type_node.base,
+ };
+ continue;
+ }
+ }
+
+ putBackToken(&tok_it, &tree);
+ fn_proto.return_type = ast.Node.FnProto.ReturnType { .Explicit = undefined };
+ stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &fn_proto.return_type.Explicit }, }) catch unreachable;
+ continue;
+ },
+ }
+ },
+
+
+ State.ParamDecl => |fn_proto| {
+ if (eatToken(&tok_it, &tree, Token.Id.RParen)) |_| {
+ continue;
+ }
+ const param_decl = try arena.construct(ast.Node.ParamDecl {
+ .base = ast.Node {.id = ast.Node.Id.ParamDecl },
+ .comptime_token = null,
+ .noalias_token = null,
+ .name_token = null,
+ .type_node = undefined,
+ .var_args_token = null,
+ });
+ try fn_proto.params.push(¶m_decl.base);
+
+ stack.append(State {
+ .ParamDeclEnd = ParamDeclEndCtx {
+ .param_decl = param_decl,
+ .fn_proto = fn_proto,
+ }
+ }) catch unreachable;
+ try stack.append(State { .ParamDeclName = param_decl });
+ try stack.append(State { .ParamDeclAliasOrComptime = param_decl });
+ continue;
+ },
+ State.ParamDeclAliasOrComptime => |param_decl| {
+ if (eatToken(&tok_it, &tree, Token.Id.Keyword_comptime)) |comptime_token| {
+ param_decl.comptime_token = comptime_token;
+ } else if (eatToken(&tok_it, &tree, Token.Id.Keyword_noalias)) |noalias_token| {
+ param_decl.noalias_token = noalias_token;
+ }
+ continue;
+ },
+ State.ParamDeclName => |param_decl| {
+ // TODO: Here, we eat two tokens in one state. This means that we can't have
+ // comments between these two tokens.
+ if (eatToken(&tok_it, &tree, Token.Id.Identifier)) |ident_token| {
+ if (eatToken(&tok_it, &tree, Token.Id.Colon)) |_| {
+ param_decl.name_token = ident_token;
+ } else {
+ putBackToken(&tok_it, &tree);
+ }
+ }
+ continue;
+ },
+ State.ParamDeclEnd => |ctx| {
+ if (eatToken(&tok_it, &tree, Token.Id.Ellipsis3)) |ellipsis3| {
+ ctx.param_decl.var_args_token = ellipsis3;
+ stack.append(State { .ExpectToken = Token.Id.RParen }) catch unreachable;
+ continue;
+ }
+
+ try stack.append(State { .ParamDeclComma = ctx.fn_proto });
+ try stack.append(State {
+ .TypeExprBegin = OptionalCtx { .Required = &ctx.param_decl.type_node }
+ });
+ continue;
+ },
+ State.ParamDeclComma => |fn_proto| {
+ switch (expectCommaOrEnd(&tok_it, &tree, Token.Id.RParen)) {
+ ExpectCommaOrEndResult.end_token => |t| {
+ if (t == null) {
+ stack.append(State { .ParamDecl = fn_proto }) catch unreachable;
+ }
+ continue;
+ },
+ ExpectCommaOrEndResult.parse_error => |e| {
+ try tree.errors.push(e);
+ return tree;
+ },
+ }
+ },
+
+ State.MaybeLabeledExpression => |ctx| {
+ if (eatToken(&tok_it, &tree, Token.Id.Colon)) |_| {
+ stack.append(State {
+ .LabeledExpression = LabelCtx {
+ .label = ctx.label,
+ .opt_ctx = ctx.opt_ctx,
+ }
+ }) catch unreachable;
+ continue;
+ }
+
+ _ = try createToCtxLiteral(arena, ctx.opt_ctx, ast.Node.Identifier, ctx.label);
+ continue;
+ },
+ State.LabeledExpression => |ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.LBrace => {
+ const block = try createToCtxNode(arena, ctx.opt_ctx, ast.Node.Block,
+ ast.Node.Block {
+ .base = undefined,
+ .label = ctx.label,
+ .lbrace = token_index,
+ .statements = ast.Node.Block.StatementList.init(arena),
+ .rbrace = undefined,
+ }
+ );
+ stack.append(State { .Block = block }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_while => {
+ stack.append(State {
+ .While = LoopCtx {
+ .label = ctx.label,
+ .inline_token = null,
+ .loop_token = token_index,
+ .opt_ctx = ctx.opt_ctx.toRequired(),
+ }
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_for => {
+ stack.append(State {
+ .For = LoopCtx {
+ .label = ctx.label,
+ .inline_token = null,
+ .loop_token = token_index,
+ .opt_ctx = ctx.opt_ctx.toRequired(),
+ }
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_suspend => {
+ const node = try arena.construct(ast.Node.Suspend {
+ .base = ast.Node {
+ .id = ast.Node.Id.Suspend,
+ },
+ .label = ctx.label,
+ .suspend_token = token_index,
+ .payload = null,
+ .body = null,
+ });
+ ctx.opt_ctx.store(&node.base);
+ stack.append(State { .SuspendBody = node }) catch unreachable;
+ try stack.append(State { .Payload = OptionalCtx { .Optional = &node.payload } });
+ continue;
+ },
+ Token.Id.Keyword_inline => {
+ stack.append(State {
+ .Inline = InlineCtx {
+ .label = ctx.label,
+ .inline_token = token_index,
+ .opt_ctx = ctx.opt_ctx.toRequired(),
+ }
+ }) catch unreachable;
+ continue;
+ },
+ else => {
+ if (ctx.opt_ctx != OptionalCtx.Optional) {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedLabelable = Error.ExpectedLabelable { .token = token_index },
+ };
+ return tree;
+ }
+
+ putBackToken(&tok_it, &tree);
+ continue;
+ },
+ }
+ },
+ State.Inline => |ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Keyword_while => {
+ stack.append(State {
+ .While = LoopCtx {
+ .inline_token = ctx.inline_token,
+ .label = ctx.label,
+ .loop_token = token_index,
+ .opt_ctx = ctx.opt_ctx.toRequired(),
+ }
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_for => {
+ stack.append(State {
+ .For = LoopCtx {
+ .inline_token = ctx.inline_token,
+ .label = ctx.label,
+ .loop_token = token_index,
+ .opt_ctx = ctx.opt_ctx.toRequired(),
+ }
+ }) catch unreachable;
+ continue;
+ },
+ else => {
+ if (ctx.opt_ctx != OptionalCtx.Optional) {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedInlinable = Error.ExpectedInlinable { .token = token_index },
+ };
+ return tree;
+ }
+
+ putBackToken(&tok_it, &tree);
+ continue;
+ },
+ }
+ },
+ State.While => |ctx| {
+ const node = try createToCtxNode(arena, ctx.opt_ctx, ast.Node.While,
+ ast.Node.While {
+ .base = undefined,
+ .label = ctx.label,
+ .inline_token = ctx.inline_token,
+ .while_token = ctx.loop_token,
+ .condition = undefined,
+ .payload = null,
+ .continue_expr = null,
+ .body = undefined,
+ .@"else" = null,
+ }
+ );
+ stack.append(State { .Else = &node.@"else" }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.body } });
+ try stack.append(State { .WhileContinueExpr = &node.continue_expr });
+ try stack.append(State { .IfToken = Token.Id.Colon });
+ try stack.append(State { .PointerPayload = OptionalCtx { .Optional = &node.payload } });
+ try stack.append(State { .ExpectToken = Token.Id.RParen });
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.condition } });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ continue;
+ },
+ State.WhileContinueExpr => |dest| {
+ stack.append(State { .ExpectToken = Token.Id.RParen }) catch unreachable;
+ try stack.append(State { .AssignmentExpressionBegin = OptionalCtx { .RequiredNull = dest } });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ continue;
+ },
+ State.For => |ctx| {
+ const node = try createToCtxNode(arena, ctx.opt_ctx, ast.Node.For,
+ ast.Node.For {
+ .base = undefined,
+ .label = ctx.label,
+ .inline_token = ctx.inline_token,
+ .for_token = ctx.loop_token,
+ .array_expr = undefined,
+ .payload = null,
+ .body = undefined,
+ .@"else" = null,
+ }
+ );
+ stack.append(State { .Else = &node.@"else" }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.body } });
+ try stack.append(State { .PointerIndexPayload = OptionalCtx { .Optional = &node.payload } });
+ try stack.append(State { .ExpectToken = Token.Id.RParen });
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.array_expr } });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ continue;
+ },
+ State.Else => |dest| {
+ if (eatToken(&tok_it, &tree, Token.Id.Keyword_else)) |else_token| {
+ const node = try createNode(arena, ast.Node.Else,
+ ast.Node.Else {
+ .base = undefined,
+ .else_token = else_token,
+ .payload = null,
+ .body = undefined,
+ }
+ );
+ *dest = node;
+
+ stack.append(State { .Expression = OptionalCtx { .Required = &node.body } }) catch unreachable;
+ try stack.append(State { .Payload = OptionalCtx { .Optional = &node.payload } });
+ continue;
+ } else {
+ continue;
+ }
+ },
+
+
+ State.Block => |block| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.RBrace => {
+ block.rbrace = token_index;
+ continue;
+ },
+ else => {
+ putBackToken(&tok_it, &tree);
+ stack.append(State { .Block = block }) catch unreachable;
+
+ var any_comments = false;
+ while (try eatLineComment(arena, &tok_it, &tree)) |line_comment| {
+ try block.statements.push(&line_comment.base);
+ any_comments = true;
+ }
+ if (any_comments) continue;
+
+ try stack.append(State { .Statement = block });
+ continue;
+ },
+ }
+ },
+ State.Statement => |block| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Keyword_comptime => {
+ stack.append(State {
+ .ComptimeStatement = ComptimeStatementCtx {
+ .comptime_token = token_index,
+ .block = block,
+ }
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_var, Token.Id.Keyword_const => {
+ stack.append(State {
+ .VarDecl = VarDeclCtx {
+ .comments = null,
+ .visib_token = null,
+ .comptime_token = null,
+ .extern_export_token = null,
+ .lib_name = null,
+ .mut_token = token_index,
+ .list = &block.statements,
+ }
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_defer, Token.Id.Keyword_errdefer => {
+ const node = try arena.construct(ast.Node.Defer {
+ .base = ast.Node {
+ .id = ast.Node.Id.Defer,
+ },
+ .defer_token = token_index,
+ .kind = switch (token_ptr.id) {
+ Token.Id.Keyword_defer => ast.Node.Defer.Kind.Unconditional,
+ Token.Id.Keyword_errdefer => ast.Node.Defer.Kind.Error,
+ else => unreachable,
+ },
+ .expr = undefined,
+ });
+ const node_ptr = try block.statements.addOne();
+ *node_ptr = &node.base;
+
+ stack.append(State { .Semicolon = node_ptr }) catch unreachable;
+ try stack.append(State { .AssignmentExpressionBegin = OptionalCtx{ .Required = &node.expr } });
+ continue;
+ },
+ Token.Id.LBrace => {
+ const inner_block = try arena.construct(ast.Node.Block {
+ .base = ast.Node { .id = ast.Node.Id.Block },
+ .label = null,
+ .lbrace = token_index,
+ .statements = ast.Node.Block.StatementList.init(arena),
+ .rbrace = undefined,
+ });
+ try block.statements.push(&inner_block.base);
+
+ stack.append(State { .Block = inner_block }) catch unreachable;
+ continue;
+ },
+ else => {
+ putBackToken(&tok_it, &tree);
+ const statement = try block.statements.addOne();
+ try stack.append(State { .Semicolon = statement });
+ try stack.append(State { .AssignmentExpressionBegin = OptionalCtx{ .Required = statement } });
+ continue;
+ }
+ }
+ },
+ State.ComptimeStatement => |ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Keyword_var, Token.Id.Keyword_const => {
+ stack.append(State {
+ .VarDecl = VarDeclCtx {
+ .comments = null,
+ .visib_token = null,
+ .comptime_token = ctx.comptime_token,
+ .extern_export_token = null,
+ .lib_name = null,
+ .mut_token = token_index,
+ .list = &ctx.block.statements,
+ }
+ }) catch unreachable;
+ continue;
+ },
+ else => {
+ putBackToken(&tok_it, &tree);
+ putBackToken(&tok_it, &tree);
+ const statement = try ctx.block.statements.addOne();
+ try stack.append(State { .Semicolon = statement });
+ try stack.append(State { .Expression = OptionalCtx { .Required = statement } });
+ continue;
+ }
+ }
+ },
+ State.Semicolon => |node_ptr| {
+ const node = *node_ptr;
+ if (node.requireSemiColon()) {
+ stack.append(State { .ExpectToken = Token.Id.Semicolon }) catch unreachable;
+ continue;
+ }
+ continue;
+ },
+
+ State.AsmOutputItems => |items| {
+ const lbracket = nextToken(&tok_it, &tree);
+ const lbracket_index = lbracket.index;
+ const lbracket_ptr = lbracket.ptr;
+ if (lbracket_ptr.id != Token.Id.LBracket) {
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+
+ const node = try createNode(arena, ast.Node.AsmOutput,
+ ast.Node.AsmOutput {
+ .base = undefined,
+ .symbolic_name = undefined,
+ .constraint = undefined,
+ .kind = undefined,
+ }
+ );
+ try items.push(node);
+
+ stack.append(State { .AsmOutputItems = items }) catch unreachable;
+ try stack.append(State { .IfToken = Token.Id.Comma });
+ try stack.append(State { .ExpectToken = Token.Id.RParen });
+ try stack.append(State { .AsmOutputReturnOrType = node });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ try stack.append(State { .StringLiteral = OptionalCtx { .Required = &node.constraint } });
+ try stack.append(State { .ExpectToken = Token.Id.RBracket });
+ try stack.append(State { .Identifier = OptionalCtx { .Required = &node.symbolic_name } });
+ continue;
+ },
+ State.AsmOutputReturnOrType => |node| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Identifier => {
+ node.kind = ast.Node.AsmOutput.Kind { .Variable = try createLiteral(arena, ast.Node.Identifier, token_index) };
+ continue;
+ },
+ Token.Id.Arrow => {
+ node.kind = ast.Node.AsmOutput.Kind { .Return = undefined };
+ try stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &node.kind.Return } });
+ continue;
+ },
+ else => {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedAsmOutputReturnOrType = Error.ExpectedAsmOutputReturnOrType {
+ .token = token_index,
+ },
+ };
+ return tree;
+ },
+ }
+ },
+ State.AsmInputItems => |items| {
+ const lbracket = nextToken(&tok_it, &tree);
+ const lbracket_index = lbracket.index;
+ const lbracket_ptr = lbracket.ptr;
+ if (lbracket_ptr.id != Token.Id.LBracket) {
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+
+ const node = try createNode(arena, ast.Node.AsmInput,
+ ast.Node.AsmInput {
+ .base = undefined,
+ .symbolic_name = undefined,
+ .constraint = undefined,
+ .expr = undefined,
+ }
+ );
+ try items.push(node);
+
+ stack.append(State { .AsmInputItems = items }) catch unreachable;
+ try stack.append(State { .IfToken = Token.Id.Comma });
+ try stack.append(State { .ExpectToken = Token.Id.RParen });
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ try stack.append(State { .StringLiteral = OptionalCtx { .Required = &node.constraint } });
+ try stack.append(State { .ExpectToken = Token.Id.RBracket });
+ try stack.append(State { .Identifier = OptionalCtx { .Required = &node.symbolic_name } });
+ continue;
+ },
+ State.AsmClobberItems => |items| {
+ stack.append(State { .AsmClobberItems = items }) catch unreachable;
+ try stack.append(State { .IfToken = Token.Id.Comma });
+ try stack.append(State { .StringLiteral = OptionalCtx { .Required = try items.addOne() } });
+ continue;
+ },
+
+
+ State.ExprListItemOrEnd => |list_state| {
+ if (eatToken(&tok_it, &tree, list_state.end)) |token_index| {
+ *list_state.ptr = token_index;
+ continue;
+ }
+
+ stack.append(State { .ExprListCommaOrEnd = list_state }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .Required = try list_state.list.addOne() } });
+ continue;
+ },
+ State.ExprListCommaOrEnd => |list_state| {
+ switch (expectCommaOrEnd(&tok_it, &tree, list_state.end)) {
+ ExpectCommaOrEndResult.end_token => |maybe_end| if (maybe_end) |end| {
+ *list_state.ptr = end;
+ continue;
+ } else {
+ stack.append(State { .ExprListItemOrEnd = list_state }) catch unreachable;
+ continue;
+ },
+ ExpectCommaOrEndResult.parse_error => |e| {
+ try tree.errors.push(e);
+ return tree;
+ },
+ }
+ },
+ State.FieldInitListItemOrEnd => |list_state| {
+ while (try eatLineComment(arena, &tok_it, &tree)) |line_comment| {
+ try list_state.list.push(&line_comment.base);
+ }
+
+ if (eatToken(&tok_it, &tree, Token.Id.RBrace)) |rbrace| {
+ *list_state.ptr = rbrace;
+ continue;
+ }
+
+ const node = try arena.construct(ast.Node.FieldInitializer {
+ .base = ast.Node {
+ .id = ast.Node.Id.FieldInitializer,
+ },
+ .period_token = undefined,
+ .name_token = undefined,
+ .expr = undefined,
+ });
+ try list_state.list.push(&node.base);
+
+ stack.append(State { .FieldInitListCommaOrEnd = list_state }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx{ .Required = &node.expr } });
+ try stack.append(State { .ExpectToken = Token.Id.Equal });
+ try stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Identifier,
+ .ptr = &node.name_token,
+ }
+ });
+ try stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Period,
+ .ptr = &node.period_token,
+ }
+ });
+ continue;
+ },
+ State.FieldInitListCommaOrEnd => |list_state| {
+ switch (expectCommaOrEnd(&tok_it, &tree, Token.Id.RBrace)) {
+ ExpectCommaOrEndResult.end_token => |maybe_end| if (maybe_end) |end| {
+ *list_state.ptr = end;
+ continue;
+ } else {
+ stack.append(State { .FieldInitListItemOrEnd = list_state }) catch unreachable;
+ continue;
+ },
+ ExpectCommaOrEndResult.parse_error => |e| {
+ try tree.errors.push(e);
+ return tree;
+ },
+ }
+ },
+ State.FieldListCommaOrEnd => |container_decl| {
+ switch (expectCommaOrEnd(&tok_it, &tree, Token.Id.RBrace)) {
+ ExpectCommaOrEndResult.end_token => |maybe_end| if (maybe_end) |end| {
+ container_decl.rbrace_token = end;
+ continue;
+ } else {
+ try stack.append(State { .ContainerDecl = container_decl });
+ continue;
+ },
+ ExpectCommaOrEndResult.parse_error => |e| {
+ try tree.errors.push(e);
+ return tree;
+ },
+ }
+ },
+ State.ErrorTagListItemOrEnd => |list_state| {
+ while (try eatLineComment(arena, &tok_it, &tree)) |line_comment| {
+ try list_state.list.push(&line_comment.base);
+ }
+
+ if (eatToken(&tok_it, &tree, Token.Id.RBrace)) |rbrace| {
+ *list_state.ptr = rbrace;
+ continue;
+ }
+
+ const node_ptr = try list_state.list.addOne();
+
+ try stack.append(State { .ErrorTagListCommaOrEnd = list_state });
+ try stack.append(State { .ErrorTag = node_ptr });
+ continue;
+ },
+ State.ErrorTagListCommaOrEnd => |list_state| {
+ switch (expectCommaOrEnd(&tok_it, &tree, Token.Id.RBrace)) {
+ ExpectCommaOrEndResult.end_token => |maybe_end| if (maybe_end) |end| {
+ *list_state.ptr = end;
+ continue;
+ } else {
+ stack.append(State { .ErrorTagListItemOrEnd = list_state }) catch unreachable;
+ continue;
+ },
+ ExpectCommaOrEndResult.parse_error => |e| {
+ try tree.errors.push(e);
+ return tree;
+ },
+ }
+ },
+ State.SwitchCaseOrEnd => |list_state| {
+ while (try eatLineComment(arena, &tok_it, &tree)) |line_comment| {
+ try list_state.list.push(&line_comment.base);
+ }
+
+ if (eatToken(&tok_it, &tree, Token.Id.RBrace)) |rbrace| {
+ *list_state.ptr = rbrace;
+ continue;
+ }
+
+ const comments = try eatDocComments(arena, &tok_it, &tree);
+ const node = try arena.construct(ast.Node.SwitchCase {
+ .base = ast.Node {
+ .id = ast.Node.Id.SwitchCase,
+ },
+ .items = ast.Node.SwitchCase.ItemList.init(arena),
+ .payload = null,
+ .expr = undefined,
+ });
+ try list_state.list.push(&node.base);
+ try stack.append(State { .SwitchCaseCommaOrEnd = list_state });
+ try stack.append(State { .AssignmentExpressionBegin = OptionalCtx { .Required = &node.expr } });
+ try stack.append(State { .PointerPayload = OptionalCtx { .Optional = &node.payload } });
+ try stack.append(State { .SwitchCaseFirstItem = &node.items });
+
+ continue;
+ },
+
+ State.SwitchCaseCommaOrEnd => |list_state| {
+ switch (expectCommaOrEnd(&tok_it, &tree, Token.Id.RParen)) {
+ ExpectCommaOrEndResult.end_token => |maybe_end| if (maybe_end) |end| {
+ *list_state.ptr = end;
+ continue;
+ } else {
+ try stack.append(State { .SwitchCaseOrEnd = list_state });
+ continue;
+ },
+ ExpectCommaOrEndResult.parse_error => |e| {
+ try tree.errors.push(e);
+ return tree;
+ },
+ }
+ },
+
+ State.SwitchCaseFirstItem => |case_items| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (token_ptr.id == Token.Id.Keyword_else) {
+ const else_node = try arena.construct(ast.Node.SwitchElse {
+ .base = ast.Node{ .id = ast.Node.Id.SwitchElse},
+ .token = token_index,
+ });
+ try case_items.push(&else_node.base);
+
+ try stack.append(State { .ExpectToken = Token.Id.EqualAngleBracketRight });
+ continue;
+ } else {
+ putBackToken(&tok_it, &tree);
+ try stack.append(State { .SwitchCaseItem = case_items });
+ continue;
+ }
+ },
+ State.SwitchCaseItem => |case_items| {
+ stack.append(State { .SwitchCaseItemCommaOrEnd = case_items }) catch unreachable;
+ try stack.append(State { .RangeExpressionBegin = OptionalCtx { .Required = try case_items.addOne() } });
+ },
+ State.SwitchCaseItemCommaOrEnd => |case_items| {
+ switch (expectCommaOrEnd(&tok_it, &tree, Token.Id.EqualAngleBracketRight)) {
+ ExpectCommaOrEndResult.end_token => |t| {
+ if (t == null) {
+ stack.append(State { .SwitchCaseItem = case_items }) catch unreachable;
+ }
+ continue;
+ },
+ ExpectCommaOrEndResult.parse_error => |e| {
+ try tree.errors.push(e);
+ return tree;
+ },
+ }
+ continue;
+ },
+
+
+ State.SuspendBody => |suspend_node| {
+ if (suspend_node.payload != null) {
+ try stack.append(State { .AssignmentExpressionBegin = OptionalCtx { .RequiredNull = &suspend_node.body } });
+ }
+ continue;
+ },
+ State.AsyncAllocator => |async_node| {
+ if (eatToken(&tok_it, &tree, Token.Id.AngleBracketLeft) == null) {
+ continue;
+ }
+
+ async_node.rangle_bracket = TokenIndex(0);
+ try stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.AngleBracketRight,
+ .ptr = &??async_node.rangle_bracket,
+ }
+ });
+ try stack.append(State { .TypeExprBegin = OptionalCtx { .RequiredNull = &async_node.allocator_type } });
+ continue;
+ },
+ State.AsyncEnd => |ctx| {
+ const node = ctx.ctx.get() ?? continue;
+
+ switch (node.id) {
+ ast.Node.Id.FnProto => {
+ const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", node);
+ fn_proto.async_attr = ctx.attribute;
+ continue;
+ },
+ ast.Node.Id.SuffixOp => {
+ const suffix_op = @fieldParentPtr(ast.Node.SuffixOp, "base", node);
+ if (suffix_op.op == @TagType(ast.Node.SuffixOp.Op).Call) {
+ suffix_op.op.Call.async_attr = ctx.attribute;
+ continue;
+ }
+
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedCall = Error.ExpectedCall { .node = node },
+ };
+ return tree;
+ },
+ else => {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedCallOrFnProto = Error.ExpectedCallOrFnProto { .node = node },
+ };
+ return tree;
+ }
+ }
+ },
+
+
+ State.ExternType => |ctx| {
+ if (eatToken(&tok_it, &tree, Token.Id.Keyword_fn)) |fn_token| {
+ const fn_proto = try arena.construct(ast.Node.FnProto {
+ .base = ast.Node {
+ .id = ast.Node.Id.FnProto,
+ },
+ .doc_comments = ctx.comments,
+ .visib_token = null,
+ .name_token = null,
+ .fn_token = fn_token,
+ .params = ast.Node.FnProto.ParamList.init(arena),
+ .return_type = undefined,
+ .var_args_token = null,
+ .extern_export_inline_token = ctx.extern_token,
+ .cc_token = null,
+ .async_attr = null,
+ .body_node = null,
+ .lib_name = null,
+ .align_expr = null,
+ });
+ ctx.opt_ctx.store(&fn_proto.base);
+ stack.append(State { .FnProto = fn_proto }) catch unreachable;
+ continue;
+ }
+
+ stack.append(State {
+ .ContainerKind = ContainerKindCtx {
+ .opt_ctx = ctx.opt_ctx,
+ .ltoken = ctx.extern_token,
+ .layout = ast.Node.ContainerDecl.Layout.Extern,
+ },
+ }) catch unreachable;
+ continue;
+ },
+ State.SliceOrArrayAccess => |node| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Ellipsis2 => {
+ const start = node.op.ArrayAccess;
+ node.op = ast.Node.SuffixOp.Op {
+ .Slice = ast.Node.SuffixOp.Op.Slice {
+ .start = start,
+ .end = null,
+ }
+ };
+
+ stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.RBracket,
+ .ptr = &node.rtoken,
+ }
+ }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .Optional = &node.op.Slice.end } });
+ continue;
+ },
+ Token.Id.RBracket => {
+ node.rtoken = token_index;
+ continue;
+ },
+ else => {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedSliceOrRBracket = Error.ExpectedSliceOrRBracket { .token = token_index },
+ };
+ return tree;
+ }
+ }
+ },
+ State.SliceOrArrayType => |node| {
+ if (eatToken(&tok_it, &tree, Token.Id.RBracket)) |_| {
+ node.op = ast.Node.PrefixOp.Op {
+ .SliceType = ast.Node.PrefixOp.AddrOfInfo {
+ .align_expr = null,
+ .bit_offset_start_token = null,
+ .bit_offset_end_token = null,
+ .const_token = null,
+ .volatile_token = null,
+ }
+ };
+ stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &node.rhs } }) catch unreachable;
+ try stack.append(State { .AddrOfModifiers = &node.op.SliceType });
+ continue;
+ }
+
+ node.op = ast.Node.PrefixOp.Op { .ArrayType = undefined };
+ stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &node.rhs } }) catch unreachable;
+ try stack.append(State { .ExpectToken = Token.Id.RBracket });
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.op.ArrayType } });
+ continue;
+ },
+ State.AddrOfModifiers => |addr_of_info| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Keyword_align => {
+ stack.append(state) catch unreachable;
+ if (addr_of_info.align_expr != null) {
+ *(try tree.errors.addOne()) = Error {
+ .ExtraAlignQualifier = Error.ExtraAlignQualifier { .token = token_index },
+ };
+ return tree;
+ }
+ try stack.append(State { .ExpectToken = Token.Id.RParen });
+ try stack.append(State { .Expression = OptionalCtx { .RequiredNull = &addr_of_info.align_expr} });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ continue;
+ },
+ Token.Id.Keyword_const => {
+ stack.append(state) catch unreachable;
+ if (addr_of_info.const_token != null) {
+ *(try tree.errors.addOne()) = Error {
+ .ExtraConstQualifier = Error.ExtraConstQualifier { .token = token_index },
+ };
+ return tree;
+ }
+ addr_of_info.const_token = token_index;
+ continue;
+ },
+ Token.Id.Keyword_volatile => {
+ stack.append(state) catch unreachable;
+ if (addr_of_info.volatile_token != null) {
+ *(try tree.errors.addOne()) = Error {
+ .ExtraVolatileQualifier = Error.ExtraVolatileQualifier { .token = token_index },
+ };
+ return tree;
+ }
+ addr_of_info.volatile_token = token_index;
+ continue;
+ },
+ else => {
+ putBackToken(&tok_it, &tree);
+ continue;
+ },
+ }
+ },
+
+
+ State.Payload => |opt_ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (token_ptr.id != Token.Id.Pipe) {
+ if (opt_ctx != OptionalCtx.Optional) {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedToken = Error.ExpectedToken {
+ .token = token_index,
+ .expected_id = Token.Id.Pipe,
+ },
+ };
+ return tree;
+ }
+
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.Payload,
+ ast.Node.Payload {
+ .base = undefined,
+ .lpipe = token_index,
+ .error_symbol = undefined,
+ .rpipe = undefined
+ }
+ );
+
+ stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Pipe,
+ .ptr = &node.rpipe,
+ }
+ }) catch unreachable;
+ try stack.append(State { .Identifier = OptionalCtx { .Required = &node.error_symbol } });
+ continue;
+ },
+ State.PointerPayload => |opt_ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (token_ptr.id != Token.Id.Pipe) {
+ if (opt_ctx != OptionalCtx.Optional) {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedToken = Error.ExpectedToken {
+ .token = token_index,
+ .expected_id = Token.Id.Pipe,
+ },
+ };
+ return tree;
+ }
+
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.PointerPayload,
+ ast.Node.PointerPayload {
+ .base = undefined,
+ .lpipe = token_index,
+ .ptr_token = null,
+ .value_symbol = undefined,
+ .rpipe = undefined
+ }
+ );
+
+ try stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Pipe,
+ .ptr = &node.rpipe,
+ }
+ });
+ try stack.append(State { .Identifier = OptionalCtx { .Required = &node.value_symbol } });
+ try stack.append(State {
+ .OptionalTokenSave = OptionalTokenSave {
+ .id = Token.Id.Asterisk,
+ .ptr = &node.ptr_token,
+ }
+ });
+ continue;
+ },
+ State.PointerIndexPayload => |opt_ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (token_ptr.id != Token.Id.Pipe) {
+ if (opt_ctx != OptionalCtx.Optional) {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedToken = Error.ExpectedToken {
+ .token = token_index,
+ .expected_id = Token.Id.Pipe,
+ },
+ };
+ return tree;
+ }
+
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.PointerIndexPayload,
+ ast.Node.PointerIndexPayload {
+ .base = undefined,
+ .lpipe = token_index,
+ .ptr_token = null,
+ .value_symbol = undefined,
+ .index_symbol = null,
+ .rpipe = undefined
+ }
+ );
+
+ stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Pipe,
+ .ptr = &node.rpipe,
+ }
+ }) catch unreachable;
+ try stack.append(State { .Identifier = OptionalCtx { .RequiredNull = &node.index_symbol } });
+ try stack.append(State { .IfToken = Token.Id.Comma });
+ try stack.append(State { .Identifier = OptionalCtx { .Required = &node.value_symbol } });
+ try stack.append(State {
+ .OptionalTokenSave = OptionalTokenSave {
+ .id = Token.Id.Asterisk,
+ .ptr = &node.ptr_token,
+ }
+ });
+ continue;
+ },
+
+
+ State.Expression => |opt_ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Keyword_return, Token.Id.Keyword_break, Token.Id.Keyword_continue => {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.ControlFlowExpression,
+ ast.Node.ControlFlowExpression {
+ .base = undefined,
+ .ltoken = token_index,
+ .kind = undefined,
+ .rhs = null,
+ }
+ );
+
+ stack.append(State { .Expression = OptionalCtx { .Optional = &node.rhs } }) catch unreachable;
+
+ switch (token_ptr.id) {
+ Token.Id.Keyword_break => {
+ node.kind = ast.Node.ControlFlowExpression.Kind { .Break = null };
+ try stack.append(State { .Identifier = OptionalCtx { .RequiredNull = &node.kind.Break } });
+ try stack.append(State { .IfToken = Token.Id.Colon });
+ },
+ Token.Id.Keyword_continue => {
+ node.kind = ast.Node.ControlFlowExpression.Kind { .Continue = null };
+ try stack.append(State { .Identifier = OptionalCtx { .RequiredNull = &node.kind.Continue } });
+ try stack.append(State { .IfToken = Token.Id.Colon });
+ },
+ Token.Id.Keyword_return => {
+ node.kind = ast.Node.ControlFlowExpression.Kind.Return;
+ },
+ else => unreachable,
+ }
+ continue;
+ },
+ Token.Id.Keyword_try, Token.Id.Keyword_cancel, Token.Id.Keyword_resume => {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.PrefixOp,
+ ast.Node.PrefixOp {
+ .base = undefined,
+ .op_token = token_index,
+ .op = switch (token_ptr.id) {
+ Token.Id.Keyword_try => ast.Node.PrefixOp.Op { .Try = void{} },
+ Token.Id.Keyword_cancel => ast.Node.PrefixOp.Op { .Cancel = void{} },
+ Token.Id.Keyword_resume => ast.Node.PrefixOp.Op { .Resume = void{} },
+ else => unreachable,
+ },
+ .rhs = undefined,
+ }
+ );
+
+ stack.append(State { .Expression = OptionalCtx { .Required = &node.rhs } }) catch unreachable;
+ continue;
+ },
+ else => {
+ if (!try parseBlockExpr(&stack, arena, opt_ctx, token_ptr, token_index)) {
+ putBackToken(&tok_it, &tree);
+ stack.append(State { .UnwrapExpressionBegin = opt_ctx }) catch unreachable;
+ }
+ continue;
+ }
+ }
+ },
+ State.RangeExpressionBegin => |opt_ctx| {
+ stack.append(State { .RangeExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .Expression = opt_ctx });
+ continue;
+ },
+ State.RangeExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ if (eatToken(&tok_it, &tree, Token.Id.Ellipsis3)) |ellipsis3| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = ellipsis3,
+ .op = ast.Node.InfixOp.Op.Range,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .Expression = OptionalCtx { .Required = &node.rhs } }) catch unreachable;
+ continue;
+ }
+ },
+ State.AssignmentExpressionBegin => |opt_ctx| {
+ stack.append(State { .AssignmentExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .Expression = opt_ctx });
+ continue;
+ },
+
+ State.AssignmentExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (tokenIdToAssignment(token_ptr.id)) |ass_id| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = token_index,
+ .op = ass_id,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .AssignmentExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ } else {
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+ },
+
+ State.UnwrapExpressionBegin => |opt_ctx| {
+ stack.append(State { .UnwrapExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .BoolOrExpressionBegin = opt_ctx });
+ continue;
+ },
+
+ State.UnwrapExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (tokenIdToUnwrapExpr(token_ptr.id)) |unwrap_id| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = token_index,
+ .op = unwrap_id,
+ .rhs = undefined,
+ }
+ );
+
+ stack.append(State { .UnwrapExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.rhs } });
+
+ if (node.op == ast.Node.InfixOp.Op.Catch) {
+ try stack.append(State { .Payload = OptionalCtx { .Optional = &node.op.Catch } });
+ }
+ continue;
+ } else {
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+ },
+
+ State.BoolOrExpressionBegin => |opt_ctx| {
+ stack.append(State { .BoolOrExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .BoolAndExpressionBegin = opt_ctx });
+ continue;
+ },
+
+ State.BoolOrExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ if (eatToken(&tok_it, &tree, Token.Id.Keyword_or)) |or_token| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = or_token,
+ .op = ast.Node.InfixOp.Op.BoolOr,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .BoolOrExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .BoolAndExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ }
+ },
+
+ State.BoolAndExpressionBegin => |opt_ctx| {
+ stack.append(State { .BoolAndExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .ComparisonExpressionBegin = opt_ctx });
+ continue;
+ },
+
+ State.BoolAndExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ if (eatToken(&tok_it, &tree, Token.Id.Keyword_and)) |and_token| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = and_token,
+ .op = ast.Node.InfixOp.Op.BoolAnd,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .BoolAndExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .ComparisonExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ }
+ },
+
+ State.ComparisonExpressionBegin => |opt_ctx| {
+ stack.append(State { .ComparisonExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .BinaryOrExpressionBegin = opt_ctx });
+ continue;
+ },
+
+ State.ComparisonExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (tokenIdToComparison(token_ptr.id)) |comp_id| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = token_index,
+ .op = comp_id,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .ComparisonExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .BinaryOrExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ } else {
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+ },
+
+ State.BinaryOrExpressionBegin => |opt_ctx| {
+ stack.append(State { .BinaryOrExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .BinaryXorExpressionBegin = opt_ctx });
+ continue;
+ },
+
+ State.BinaryOrExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ if (eatToken(&tok_it, &tree, Token.Id.Pipe)) |pipe| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = pipe,
+ .op = ast.Node.InfixOp.Op.BitOr,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .BinaryOrExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .BinaryXorExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ }
+ },
+
+ State.BinaryXorExpressionBegin => |opt_ctx| {
+ stack.append(State { .BinaryXorExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .BinaryAndExpressionBegin = opt_ctx });
+ continue;
+ },
+
+ State.BinaryXorExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ if (eatToken(&tok_it, &tree, Token.Id.Caret)) |caret| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = caret,
+ .op = ast.Node.InfixOp.Op.BitXor,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .BinaryXorExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .BinaryAndExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ }
+ },
+
+ State.BinaryAndExpressionBegin => |opt_ctx| {
+ stack.append(State { .BinaryAndExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .BitShiftExpressionBegin = opt_ctx });
+ continue;
+ },
+
+ State.BinaryAndExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ if (eatToken(&tok_it, &tree, Token.Id.Ampersand)) |ampersand| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = ampersand,
+ .op = ast.Node.InfixOp.Op.BitAnd,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .BinaryAndExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .BitShiftExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ }
+ },
+
+ State.BitShiftExpressionBegin => |opt_ctx| {
+ stack.append(State { .BitShiftExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .AdditionExpressionBegin = opt_ctx });
+ continue;
+ },
+
+ State.BitShiftExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (tokenIdToBitShift(token_ptr.id)) |bitshift_id| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = token_index,
+ .op = bitshift_id,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .BitShiftExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .AdditionExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ } else {
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+ },
+
+ State.AdditionExpressionBegin => |opt_ctx| {
+ stack.append(State { .AdditionExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .MultiplyExpressionBegin = opt_ctx });
+ continue;
+ },
+
+ State.AdditionExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (tokenIdToAddition(token_ptr.id)) |add_id| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = token_index,
+ .op = add_id,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .AdditionExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .MultiplyExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ } else {
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+ },
+
+ State.MultiplyExpressionBegin => |opt_ctx| {
+ stack.append(State { .MultiplyExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .CurlySuffixExpressionBegin = opt_ctx });
+ continue;
+ },
+
+ State.MultiplyExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (tokenIdToMultiply(token_ptr.id)) |mult_id| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = token_index,
+ .op = mult_id,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .MultiplyExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .CurlySuffixExpressionBegin = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ } else {
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+ },
+
+ State.CurlySuffixExpressionBegin => |opt_ctx| {
+ stack.append(State { .CurlySuffixExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .IfToken = Token.Id.LBrace });
+ try stack.append(State { .TypeExprBegin = opt_ctx });
+ continue;
+ },
+
+ State.CurlySuffixExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ if ((??tok_it.peek()).id == Token.Id.Period) {
+ const node = try arena.construct(ast.Node.SuffixOp {
+ .base = ast.Node { .id = ast.Node.Id.SuffixOp },
+ .lhs = lhs,
+ .op = ast.Node.SuffixOp.Op {
+ .StructInitializer = ast.Node.SuffixOp.Op.InitList.init(arena),
+ },
+ .rtoken = undefined,
+ });
+ opt_ctx.store(&node.base);
+
+ stack.append(State { .CurlySuffixExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .IfToken = Token.Id.LBrace });
+ try stack.append(State {
+ .FieldInitListItemOrEnd = ListSave(@typeOf(node.op.StructInitializer)) {
+ .list = &node.op.StructInitializer,
+ .ptr = &node.rtoken,
+ }
+ });
+ continue;
+ }
+
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.SuffixOp,
+ ast.Node.SuffixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op = ast.Node.SuffixOp.Op {
+ .ArrayInitializer = ast.Node.SuffixOp.Op.InitList.init(arena),
+ },
+ .rtoken = undefined,
+ }
+ );
+ stack.append(State { .CurlySuffixExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .IfToken = Token.Id.LBrace });
+ try stack.append(State {
+ .ExprListItemOrEnd = ExprListCtx {
+ .list = &node.op.ArrayInitializer,
+ .end = Token.Id.RBrace,
+ .ptr = &node.rtoken,
+ }
+ });
+ continue;
+ },
+
+ State.TypeExprBegin => |opt_ctx| {
+ stack.append(State { .TypeExprEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .PrefixOpExpression = opt_ctx });
+ continue;
+ },
+
+ State.TypeExprEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ if (eatToken(&tok_it, &tree, Token.Id.Bang)) |bang| {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = bang,
+ .op = ast.Node.InfixOp.Op.ErrorUnion,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .TypeExprEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .PrefixOpExpression = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ }
+ },
+
+ State.PrefixOpExpression => |opt_ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (tokenIdToPrefixOp(token_ptr.id)) |prefix_id| {
+ var node = try createToCtxNode(arena, opt_ctx, ast.Node.PrefixOp,
+ ast.Node.PrefixOp {
+ .base = undefined,
+ .op_token = token_index,
+ .op = prefix_id,
+ .rhs = undefined,
+ }
+ );
+
+ // Treat '**' token as two derefs
+ if (token_ptr.id == Token.Id.AsteriskAsterisk) {
+ const child = try createNode(arena, ast.Node.PrefixOp,
+ ast.Node.PrefixOp {
+ .base = undefined,
+ .op_token = token_index,
+ .op = prefix_id,
+ .rhs = undefined,
+ }
+ );
+ node.rhs = &child.base;
+ node = child;
+ }
+
+ stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &node.rhs } }) catch unreachable;
+ if (node.op == ast.Node.PrefixOp.Op.AddrOf) {
+ try stack.append(State { .AddrOfModifiers = &node.op.AddrOf });
+ }
+ continue;
+ } else {
+ putBackToken(&tok_it, &tree);
+ stack.append(State { .SuffixOpExpressionBegin = opt_ctx }) catch unreachable;
+ continue;
+ }
+ },
+
+ State.SuffixOpExpressionBegin => |opt_ctx| {
+ if (eatToken(&tok_it, &tree, Token.Id.Keyword_async)) |async_token| {
+ const async_node = try createNode(arena, ast.Node.AsyncAttribute,
+ ast.Node.AsyncAttribute {
+ .base = undefined,
+ .async_token = async_token,
+ .allocator_type = null,
+ .rangle_bracket = null,
+ }
+ );
+ stack.append(State {
+ .AsyncEnd = AsyncEndCtx {
+ .ctx = opt_ctx,
+ .attribute = async_node,
+ }
+ }) catch unreachable;
+ try stack.append(State { .SuffixOpExpressionEnd = opt_ctx.toRequired() });
+ try stack.append(State { .PrimaryExpression = opt_ctx.toRequired() });
+ try stack.append(State { .AsyncAllocator = async_node });
+ continue;
+ }
+
+ stack.append(State { .SuffixOpExpressionEnd = opt_ctx }) catch unreachable;
+ try stack.append(State { .PrimaryExpression = opt_ctx });
+ continue;
+ },
+
+ State.SuffixOpExpressionEnd => |opt_ctx| {
+ const lhs = opt_ctx.get() ?? continue;
+
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.LParen => {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.SuffixOp,
+ ast.Node.SuffixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op = ast.Node.SuffixOp.Op {
+ .Call = ast.Node.SuffixOp.Op.Call {
+ .params = ast.Node.SuffixOp.Op.Call.ParamList.init(arena),
+ .async_attr = null,
+ }
+ },
+ .rtoken = undefined,
+ }
+ );
+ stack.append(State { .SuffixOpExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State {
+ .ExprListItemOrEnd = ExprListCtx {
+ .list = &node.op.Call.params,
+ .end = Token.Id.RParen,
+ .ptr = &node.rtoken,
+ }
+ });
+ continue;
+ },
+ Token.Id.LBracket => {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.SuffixOp,
+ ast.Node.SuffixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op = ast.Node.SuffixOp.Op {
+ .ArrayAccess = undefined,
+ },
+ .rtoken = undefined
+ }
+ );
+ stack.append(State { .SuffixOpExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .SliceOrArrayAccess = node });
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.op.ArrayAccess }});
+ continue;
+ },
+ Token.Id.Period => {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
+ ast.Node.InfixOp {
+ .base = undefined,
+ .lhs = lhs,
+ .op_token = token_index,
+ .op = ast.Node.InfixOp.Op.Period,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .SuffixOpExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
+ try stack.append(State { .Identifier = OptionalCtx { .Required = &node.rhs } });
+ continue;
+ },
+ else => {
+ putBackToken(&tok_it, &tree);
+ continue;
+ },
+ }
+ },
+
+ State.PrimaryExpression => |opt_ctx| {
+ const token = nextToken(&tok_it, &tree);
+ switch (token.ptr.id) {
+ Token.Id.IntegerLiteral => {
+ _ = try createToCtxLiteral(arena, opt_ctx, ast.Node.StringLiteral, token.index);
+ continue;
+ },
+ Token.Id.FloatLiteral => {
+ _ = try createToCtxLiteral(arena, opt_ctx, ast.Node.FloatLiteral, token.index);
+ continue;
+ },
+ Token.Id.CharLiteral => {
+ _ = try createToCtxLiteral(arena, opt_ctx, ast.Node.CharLiteral, token.index);
+ continue;
+ },
+ Token.Id.Keyword_undefined => {
+ _ = try createToCtxLiteral(arena, opt_ctx, ast.Node.UndefinedLiteral, token.index);
+ continue;
+ },
+ Token.Id.Keyword_true, Token.Id.Keyword_false => {
+ _ = try createToCtxLiteral(arena, opt_ctx, ast.Node.BoolLiteral, token.index);
+ continue;
+ },
+ Token.Id.Keyword_null => {
+ _ = try createToCtxLiteral(arena, opt_ctx, ast.Node.NullLiteral, token.index);
+ continue;
+ },
+ Token.Id.Keyword_this => {
+ _ = try createToCtxLiteral(arena, opt_ctx, ast.Node.ThisLiteral, token.index);
+ continue;
+ },
+ Token.Id.Keyword_var => {
+ _ = try createToCtxLiteral(arena, opt_ctx, ast.Node.VarType, token.index);
+ continue;
+ },
+ Token.Id.Keyword_unreachable => {
+ _ = try createToCtxLiteral(arena, opt_ctx, ast.Node.Unreachable, token.index);
+ continue;
+ },
+ Token.Id.Keyword_promise => {
+ const node = try arena.construct(ast.Node.PromiseType {
+ .base = ast.Node {
+ .id = ast.Node.Id.PromiseType,
+ },
+ .promise_token = token.index,
+ .result = null,
+ });
+ opt_ctx.store(&node.base);
+ const next_token = nextToken(&tok_it, &tree);
+ const next_token_index = next_token.index;
+ const next_token_ptr = next_token.ptr;
+ if (next_token_ptr.id != Token.Id.Arrow) {
+ putBackToken(&tok_it, &tree);
+ continue;
+ }
+ node.result = ast.Node.PromiseType.Result {
+ .arrow_token = next_token_index,
+ .return_type = undefined,
+ };
+ const return_type_ptr = &((??node.result).return_type);
+ try stack.append(State { .Expression = OptionalCtx { .Required = return_type_ptr, } });
+ continue;
+ },
+ Token.Id.StringLiteral, Token.Id.MultilineStringLiteralLine => {
+ opt_ctx.store((try parseStringLiteral(arena, &tok_it, token.ptr, token.index, &tree)) ?? unreachable);
+ continue;
+ },
+ Token.Id.LParen => {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.GroupedExpression,
+ ast.Node.GroupedExpression {
+ .base = undefined,
+ .lparen = token.index,
+ .expr = undefined,
+ .rparen = undefined,
+ }
+ );
+ stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.RParen,
+ .ptr = &node.rparen,
+ }
+ }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
+ continue;
+ },
+ Token.Id.Builtin => {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.BuiltinCall,
+ ast.Node.BuiltinCall {
+ .base = undefined,
+ .builtin_token = token.index,
+ .params = ast.Node.BuiltinCall.ParamList.init(arena),
+ .rparen_token = undefined,
+ }
+ );
+ stack.append(State {
+ .ExprListItemOrEnd = ExprListCtx {
+ .list = &node.params,
+ .end = Token.Id.RParen,
+ .ptr = &node.rparen_token,
+ }
+ }) catch unreachable;
+ try stack.append(State { .ExpectToken = Token.Id.LParen, });
+ continue;
+ },
+ Token.Id.LBracket => {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.PrefixOp,
+ ast.Node.PrefixOp {
+ .base = undefined,
+ .op_token = token.index,
+ .op = undefined,
+ .rhs = undefined,
+ }
+ );
+ stack.append(State { .SliceOrArrayType = node }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_error => {
+ stack.append(State {
+ .ErrorTypeOrSetDecl = ErrorTypeOrSetDeclCtx {
+ .error_token = token.index,
+ .opt_ctx = opt_ctx
+ }
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_packed => {
+ stack.append(State {
+ .ContainerKind = ContainerKindCtx {
+ .opt_ctx = opt_ctx,
+ .ltoken = token.index,
+ .layout = ast.Node.ContainerDecl.Layout.Packed,
+ },
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_extern => {
+ stack.append(State {
+ .ExternType = ExternTypeCtx {
+ .opt_ctx = opt_ctx,
+ .extern_token = token.index,
+ .comments = null,
+ },
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_struct, Token.Id.Keyword_union, Token.Id.Keyword_enum => {
+ putBackToken(&tok_it, &tree);
+ stack.append(State {
+ .ContainerKind = ContainerKindCtx {
+ .opt_ctx = opt_ctx,
+ .ltoken = token.index,
+ .layout = ast.Node.ContainerDecl.Layout.Auto,
+ },
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Identifier => {
+ stack.append(State {
+ .MaybeLabeledExpression = MaybeLabeledExpressionCtx {
+ .label = token.index,
+ .opt_ctx = opt_ctx
+ }
+ }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_fn => {
+ const fn_proto = try arena.construct(ast.Node.FnProto {
+ .base = ast.Node {
+ .id = ast.Node.Id.FnProto,
+ },
+ .doc_comments = null,
+ .visib_token = null,
+ .name_token = null,
+ .fn_token = token.index,
+ .params = ast.Node.FnProto.ParamList.init(arena),
+ .return_type = undefined,
+ .var_args_token = null,
+ .extern_export_inline_token = null,
+ .cc_token = null,
+ .async_attr = null,
+ .body_node = null,
+ .lib_name = null,
+ .align_expr = null,
+ });
+ opt_ctx.store(&fn_proto.base);
+ stack.append(State { .FnProto = fn_proto }) catch unreachable;
+ continue;
+ },
+ Token.Id.Keyword_nakedcc, Token.Id.Keyword_stdcallcc => {
+ const fn_proto = try arena.construct(ast.Node.FnProto {
+ .base = ast.Node {
+ .id = ast.Node.Id.FnProto,
+ },
+ .doc_comments = null,
+ .visib_token = null,
+ .name_token = null,
+ .fn_token = undefined,
+ .params = ast.Node.FnProto.ParamList.init(arena),
+ .return_type = undefined,
+ .var_args_token = null,
+ .extern_export_inline_token = null,
+ .cc_token = token.index,
+ .async_attr = null,
+ .body_node = null,
+ .lib_name = null,
+ .align_expr = null,
+ });
+ opt_ctx.store(&fn_proto.base);
+ stack.append(State { .FnProto = fn_proto }) catch unreachable;
+ try stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.Keyword_fn,
+ .ptr = &fn_proto.fn_token
+ }
+ });
+ continue;
+ },
+ Token.Id.Keyword_asm => {
+ const node = try createToCtxNode(arena, opt_ctx, ast.Node.Asm,
+ ast.Node.Asm {
+ .base = undefined,
+ .asm_token = token.index,
+ .volatile_token = null,
+ .template = undefined,
+ .outputs = ast.Node.Asm.OutputList.init(arena),
+ .inputs = ast.Node.Asm.InputList.init(arena),
+ .clobbers = ast.Node.Asm.ClobberList.init(arena),
+ .rparen = undefined,
+ }
+ );
+ stack.append(State {
+ .ExpectTokenSave = ExpectTokenSave {
+ .id = Token.Id.RParen,
+ .ptr = &node.rparen,
+ }
+ }) catch unreachable;
+ try stack.append(State { .AsmClobberItems = &node.clobbers });
+ try stack.append(State { .IfToken = Token.Id.Colon });
+ try stack.append(State { .AsmInputItems = &node.inputs });
+ try stack.append(State { .IfToken = Token.Id.Colon });
+ try stack.append(State { .AsmOutputItems = &node.outputs });
+ try stack.append(State { .IfToken = Token.Id.Colon });
+ try stack.append(State { .StringLiteral = OptionalCtx { .Required = &node.template } });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ try stack.append(State {
+ .OptionalTokenSave = OptionalTokenSave {
+ .id = Token.Id.Keyword_volatile,
+ .ptr = &node.volatile_token,
+ }
+ });
+ },
+ Token.Id.Keyword_inline => {
+ stack.append(State {
+ .Inline = InlineCtx {
+ .label = null,
+ .inline_token = token.index,
+ .opt_ctx = opt_ctx,
+ }
+ }) catch unreachable;
+ continue;
+ },
+ else => {
+ if (!try parseBlockExpr(&stack, arena, opt_ctx, token.ptr, token.index)) {
+ putBackToken(&tok_it, &tree);
+ if (opt_ctx != OptionalCtx.Optional) {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedPrimaryExpr = Error.ExpectedPrimaryExpr { .token = token.index },
+ };
+ return tree;
+ }
+ }
+ continue;
+ }
+ }
+ },
+
+
+ State.ErrorTypeOrSetDecl => |ctx| {
+ if (eatToken(&tok_it, &tree, Token.Id.LBrace) == null) {
+ _ = try createToCtxLiteral(arena, ctx.opt_ctx, ast.Node.ErrorType, ctx.error_token);
+ continue;
+ }
+
+ const node = try arena.construct(ast.Node.ErrorSetDecl {
+ .base = ast.Node {
+ .id = ast.Node.Id.ErrorSetDecl,
+ },
+ .error_token = ctx.error_token,
+ .decls = ast.Node.ErrorSetDecl.DeclList.init(arena),
+ .rbrace_token = undefined,
+ });
+ ctx.opt_ctx.store(&node.base);
+
+ stack.append(State {
+ .ErrorTagListItemOrEnd = ListSave(@typeOf(node.decls)) {
+ .list = &node.decls,
+ .ptr = &node.rbrace_token,
+ }
+ }) catch unreachable;
+ continue;
+ },
+ State.StringLiteral => |opt_ctx| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ opt_ctx.store(
+ (try parseStringLiteral(arena, &tok_it, token_ptr, token_index, &tree)) ?? {
+ putBackToken(&tok_it, &tree);
+ if (opt_ctx != OptionalCtx.Optional) {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedPrimaryExpr = Error.ExpectedPrimaryExpr { .token = token_index },
+ };
+ return tree;
+ }
+
+ continue;
+ }
+ );
+ },
+
+ State.Identifier => |opt_ctx| {
+ if (eatToken(&tok_it, &tree, Token.Id.Identifier)) |ident_token| {
+ _ = try createToCtxLiteral(arena, opt_ctx, ast.Node.Identifier, ident_token);
+ continue;
+ }
+
+ if (opt_ctx != OptionalCtx.Optional) {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedToken = Error.ExpectedToken {
+ .token = token_index,
+ .expected_id = Token.Id.Identifier,
+ },
+ };
+ return tree;
+ }
+ },
+
+ State.ErrorTag => |node_ptr| {
+ const comments = try eatDocComments(arena, &tok_it, &tree);
+ const ident_token = nextToken(&tok_it, &tree);
+ const ident_token_index = ident_token.index;
+ const ident_token_ptr = ident_token.ptr;
+ if (ident_token_ptr.id != Token.Id.Identifier) {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedToken = Error.ExpectedToken {
+ .token = ident_token_index,
+ .expected_id = Token.Id.Identifier,
+ },
+ };
+ return tree;
+ }
+
+ const node = try arena.construct(ast.Node.ErrorTag {
+ .base = ast.Node {
+ .id = ast.Node.Id.ErrorTag,
+ },
+ .doc_comments = comments,
+ .name_token = ident_token_index,
+ });
+ *node_ptr = &node.base;
+ continue;
+ },
+
+ State.ExpectToken => |token_id| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (token_ptr.id != token_id) {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedToken = Error.ExpectedToken {
+ .token = token_index,
+ .expected_id = token_id,
+ },
+ };
+ return tree;
+ }
+ continue;
+ },
+ State.ExpectTokenSave => |expect_token_save| {
+ const token = nextToken(&tok_it, &tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ if (token_ptr.id != expect_token_save.id) {
+ *(try tree.errors.addOne()) = Error {
+ .ExpectedToken = Error.ExpectedToken {
+ .token = token_index,
+ .expected_id = expect_token_save.id,
+ },
+ };
+ return tree;
+ }
+ *expect_token_save.ptr = token_index;
+ continue;
+ },
+ State.IfToken => |token_id| {
+ if (eatToken(&tok_it, &tree, token_id)) |_| {
+ continue;
+ }
+
+ _ = stack.pop();
+ continue;
+ },
+ State.IfTokenSave => |if_token_save| {
+ if (eatToken(&tok_it, &tree, if_token_save.id)) |token_index| {
+ *if_token_save.ptr = token_index;
+ continue;
+ }
+
+ _ = stack.pop();
+ continue;
+ },
+ State.OptionalTokenSave => |optional_token_save| {
+ if (eatToken(&tok_it, &tree, optional_token_save.id)) |token_index| {
+ *optional_token_save.ptr = token_index;
+ continue;
+ }
+
+ continue;
+ },
+ }
+ }
+}
+
+const AnnotatedToken = struct {
+ ptr: &Token,
+ index: TokenIndex,
+};
+
+const TopLevelDeclCtx = struct {
+ decls: &ast.Node.Root.DeclList,
+ visib_token: ?TokenIndex,
+ extern_export_inline_token: ?AnnotatedToken,
+ lib_name: ?&ast.Node,
+ comments: ?&ast.Node.DocComment,
+};
+
+const VarDeclCtx = struct {
+ mut_token: TokenIndex,
+ visib_token: ?TokenIndex,
+ comptime_token: ?TokenIndex,
+ extern_export_token: ?TokenIndex,
+ lib_name: ?&ast.Node,
+ list: &ast.Node.Root.DeclList,
+ comments: ?&ast.Node.DocComment,
+};
+
+const TopLevelExternOrFieldCtx = struct {
+ visib_token: TokenIndex,
+ container_decl: &ast.Node.ContainerDecl,
+ comments: ?&ast.Node.DocComment,
+};
+
+const ExternTypeCtx = struct {
+ opt_ctx: OptionalCtx,
+ extern_token: TokenIndex,
+ comments: ?&ast.Node.DocComment,
+};
+
+const ContainerKindCtx = struct {
+ opt_ctx: OptionalCtx,
+ ltoken: TokenIndex,
+ layout: ast.Node.ContainerDecl.Layout,
+};
+
+const ExpectTokenSave = struct {
+ id: @TagType(Token.Id),
+ ptr: &TokenIndex,
+};
+
+const OptionalTokenSave = struct {
+ id: @TagType(Token.Id),
+ ptr: &?TokenIndex,
+};
+
+const ExprListCtx = struct {
+ list: &ast.Node.SuffixOp.Op.InitList,
+ end: Token.Id,
+ ptr: &TokenIndex,
+};
+
+fn ListSave(comptime List: type) type {
+ return struct {
+ list: &List,
+ ptr: &TokenIndex,
+ };
+}
+
+const MaybeLabeledExpressionCtx = struct {
+ label: TokenIndex,
+ opt_ctx: OptionalCtx,
+};
+
+const LabelCtx = struct {
+ label: ?TokenIndex,
+ opt_ctx: OptionalCtx,
+};
+
+const InlineCtx = struct {
+ label: ?TokenIndex,
+ inline_token: ?TokenIndex,
+ opt_ctx: OptionalCtx,
+};
+
+const LoopCtx = struct {
+ label: ?TokenIndex,
+ inline_token: ?TokenIndex,
+ loop_token: TokenIndex,
+ opt_ctx: OptionalCtx,
+};
+
+const AsyncEndCtx = struct {
+ ctx: OptionalCtx,
+ attribute: &ast.Node.AsyncAttribute,
+};
+
+const ErrorTypeOrSetDeclCtx = struct {
+ opt_ctx: OptionalCtx,
+ error_token: TokenIndex,
+};
+
+const ParamDeclEndCtx = struct {
+ fn_proto: &ast.Node.FnProto,
+ param_decl: &ast.Node.ParamDecl,
+};
+
+const ComptimeStatementCtx = struct {
+ comptime_token: TokenIndex,
+ block: &ast.Node.Block,
+};
+
+const OptionalCtx = union(enum) {
+ Optional: &?&ast.Node,
+ RequiredNull: &?&ast.Node,
+ Required: &&ast.Node,
+
+ pub fn store(self: &const OptionalCtx, value: &ast.Node) void {
+ switch (*self) {
+ OptionalCtx.Optional => |ptr| *ptr = value,
+ OptionalCtx.RequiredNull => |ptr| *ptr = value,
+ OptionalCtx.Required => |ptr| *ptr = value,
+ }
+ }
+
+ pub fn get(self: &const OptionalCtx) ?&ast.Node {
+ switch (*self) {
+ OptionalCtx.Optional => |ptr| return *ptr,
+ OptionalCtx.RequiredNull => |ptr| return ??*ptr,
+ OptionalCtx.Required => |ptr| return *ptr,
+ }
+ }
+
+ pub fn toRequired(self: &const OptionalCtx) OptionalCtx {
+ switch (*self) {
+ OptionalCtx.Optional => |ptr| {
+ return OptionalCtx { .RequiredNull = ptr };
+ },
+ OptionalCtx.RequiredNull => |ptr| return *self,
+ OptionalCtx.Required => |ptr| return *self,
+ }
+ }
+};
+
+const AddCommentsCtx = struct {
+ node_ptr: &&ast.Node,
+ comments: ?&ast.Node.DocComment,
+};
+
+const State = union(enum) {
+ TopLevel,
+ TopLevelExtern: TopLevelDeclCtx,
+ TopLevelLibname: TopLevelDeclCtx,
+ TopLevelDecl: TopLevelDeclCtx,
+ TopLevelExternOrField: TopLevelExternOrFieldCtx,
+
+ ContainerKind: ContainerKindCtx,
+ ContainerInitArgStart: &ast.Node.ContainerDecl,
+ ContainerInitArg: &ast.Node.ContainerDecl,
+ ContainerDecl: &ast.Node.ContainerDecl,
+
+ VarDecl: VarDeclCtx,
+ VarDeclAlign: &ast.Node.VarDecl,
+ VarDeclEq: &ast.Node.VarDecl,
+
+ FnDef: &ast.Node.FnProto,
+ FnProto: &ast.Node.FnProto,
+ FnProtoAlign: &ast.Node.FnProto,
+ FnProtoReturnType: &ast.Node.FnProto,
+
+ ParamDecl: &ast.Node.FnProto,
+ ParamDeclAliasOrComptime: &ast.Node.ParamDecl,
+ ParamDeclName: &ast.Node.ParamDecl,
+ ParamDeclEnd: ParamDeclEndCtx,
+ ParamDeclComma: &ast.Node.FnProto,
+
+ MaybeLabeledExpression: MaybeLabeledExpressionCtx,
+ LabeledExpression: LabelCtx,
+ Inline: InlineCtx,
+ While: LoopCtx,
+ WhileContinueExpr: &?&ast.Node,
+ For: LoopCtx,
+ Else: &?&ast.Node.Else,
+
+ Block: &ast.Node.Block,
+ Statement: &ast.Node.Block,
+ ComptimeStatement: ComptimeStatementCtx,
+ Semicolon: &&ast.Node,
+
+ AsmOutputItems: &ast.Node.Asm.OutputList,
+ AsmOutputReturnOrType: &ast.Node.AsmOutput,
+ AsmInputItems: &ast.Node.Asm.InputList,
+ AsmClobberItems: &ast.Node.Asm.ClobberList,
+
+ ExprListItemOrEnd: ExprListCtx,
+ ExprListCommaOrEnd: ExprListCtx,
+ FieldInitListItemOrEnd: ListSave(ast.Node.SuffixOp.Op.InitList),
+ FieldInitListCommaOrEnd: ListSave(ast.Node.SuffixOp.Op.InitList),
+ FieldListCommaOrEnd: &ast.Node.ContainerDecl,
+ FieldInitValue: OptionalCtx,
+ ErrorTagListItemOrEnd: ListSave(ast.Node.ErrorSetDecl.DeclList),
+ ErrorTagListCommaOrEnd: ListSave(ast.Node.ErrorSetDecl.DeclList),
+ SwitchCaseOrEnd: ListSave(ast.Node.Switch.CaseList),
+ SwitchCaseCommaOrEnd: ListSave(ast.Node.Switch.CaseList),
+ SwitchCaseFirstItem: &ast.Node.SwitchCase.ItemList,
+ SwitchCaseItem: &ast.Node.SwitchCase.ItemList,
+ SwitchCaseItemCommaOrEnd: &ast.Node.SwitchCase.ItemList,
+
+ SuspendBody: &ast.Node.Suspend,
+ AsyncAllocator: &ast.Node.AsyncAttribute,
+ AsyncEnd: AsyncEndCtx,
+
+ ExternType: ExternTypeCtx,
+ SliceOrArrayAccess: &ast.Node.SuffixOp,
+ SliceOrArrayType: &ast.Node.PrefixOp,
+ AddrOfModifiers: &ast.Node.PrefixOp.AddrOfInfo,
+
+ Payload: OptionalCtx,
+ PointerPayload: OptionalCtx,
+ PointerIndexPayload: OptionalCtx,
+
+ Expression: OptionalCtx,
+ RangeExpressionBegin: OptionalCtx,
+ RangeExpressionEnd: OptionalCtx,
+ AssignmentExpressionBegin: OptionalCtx,
+ AssignmentExpressionEnd: OptionalCtx,
+ UnwrapExpressionBegin: OptionalCtx,
+ UnwrapExpressionEnd: OptionalCtx,
+ BoolOrExpressionBegin: OptionalCtx,
+ BoolOrExpressionEnd: OptionalCtx,
+ BoolAndExpressionBegin: OptionalCtx,
+ BoolAndExpressionEnd: OptionalCtx,
+ ComparisonExpressionBegin: OptionalCtx,
+ ComparisonExpressionEnd: OptionalCtx,
+ BinaryOrExpressionBegin: OptionalCtx,
+ BinaryOrExpressionEnd: OptionalCtx,
+ BinaryXorExpressionBegin: OptionalCtx,
+ BinaryXorExpressionEnd: OptionalCtx,
+ BinaryAndExpressionBegin: OptionalCtx,
+ BinaryAndExpressionEnd: OptionalCtx,
+ BitShiftExpressionBegin: OptionalCtx,
+ BitShiftExpressionEnd: OptionalCtx,
+ AdditionExpressionBegin: OptionalCtx,
+ AdditionExpressionEnd: OptionalCtx,
+ MultiplyExpressionBegin: OptionalCtx,
+ MultiplyExpressionEnd: OptionalCtx,
+ CurlySuffixExpressionBegin: OptionalCtx,
+ CurlySuffixExpressionEnd: OptionalCtx,
+ TypeExprBegin: OptionalCtx,
+ TypeExprEnd: OptionalCtx,
+ PrefixOpExpression: OptionalCtx,
+ SuffixOpExpressionBegin: OptionalCtx,
+ SuffixOpExpressionEnd: OptionalCtx,
+ PrimaryExpression: OptionalCtx,
+
+ ErrorTypeOrSetDecl: ErrorTypeOrSetDeclCtx,
+ StringLiteral: OptionalCtx,
+ Identifier: OptionalCtx,
+ ErrorTag: &&ast.Node,
+
+
+ IfToken: @TagType(Token.Id),
+ IfTokenSave: ExpectTokenSave,
+ ExpectToken: @TagType(Token.Id),
+ ExpectTokenSave: ExpectTokenSave,
+ OptionalTokenSave: OptionalTokenSave,
+};
+
+fn eatDocComments(arena: &mem.Allocator, tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree) !?&ast.Node.DocComment {
+ var result: ?&ast.Node.DocComment = null;
+ while (true) {
+ if (eatToken(tok_it, tree, Token.Id.DocComment)) |line_comment| {
+ const node = blk: {
+ if (result) |comment_node| {
+ break :blk comment_node;
+ } else {
+ const comment_node = try arena.construct(ast.Node.DocComment {
+ .base = ast.Node {
+ .id = ast.Node.Id.DocComment,
+ },
+ .lines = ast.Node.DocComment.LineList.init(arena),
+ });
+ result = comment_node;
+ break :blk comment_node;
+ }
+ };
+ try node.lines.push(line_comment);
+ continue;
+ }
+ break;
+ }
+ return result;
+}
+
+fn eatLineComment(arena: &mem.Allocator, tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree) !?&ast.Node.LineComment {
+ const token = eatToken(tok_it, tree, Token.Id.LineComment) ?? return null;
+ return try arena.construct(ast.Node.LineComment {
+ .base = ast.Node {
+ .id = ast.Node.Id.LineComment,
+ },
+ .token = token,
+ });
+}
+
+fn parseStringLiteral(arena: &mem.Allocator, tok_it: &ast.Tree.TokenList.Iterator,
+ token_ptr: &const Token, token_index: TokenIndex, tree: &ast.Tree) !?&ast.Node
+{
+ switch (token_ptr.id) {
+ Token.Id.StringLiteral => {
+ return &(try createLiteral(arena, ast.Node.StringLiteral, token_index)).base;
+ },
+ Token.Id.MultilineStringLiteralLine => {
+ const node = try arena.construct(ast.Node.MultilineStringLiteral {
+ .base = ast.Node { .id = ast.Node.Id.MultilineStringLiteral },
+ .lines = ast.Node.MultilineStringLiteral.LineList.init(arena),
+ });
+ try node.lines.push(token_index);
+ while (true) {
+ const multiline_str = nextToken(tok_it, tree);
+ const multiline_str_index = multiline_str.index;
+ const multiline_str_ptr = multiline_str.ptr;
+ if (multiline_str_ptr.id != Token.Id.MultilineStringLiteralLine) {
+ putBackToken(tok_it, tree);
+ break;
+ }
+
+ try node.lines.push(multiline_str_index);
+ }
+
+ return &node.base;
+ },
+ // TODO: We shouldn't need a cast, but:
+ // zig: /home/jc/Documents/zig/src/ir.cpp:7962: TypeTableEntry* ir_resolve_peer_types(IrAnalyze*, AstNode*, IrInstruction**, size_t): Assertion `err_set_type != nullptr' failed.
+ else => return (?&ast.Node)(null),
+ }
+}
+
+fn parseBlockExpr(stack: &std.ArrayList(State), arena: &mem.Allocator, ctx: &const OptionalCtx,
+ token_ptr: &const Token, token_index: TokenIndex) !bool {
+ switch (token_ptr.id) {
+ Token.Id.Keyword_suspend => {
+ const node = try createToCtxNode(arena, ctx, ast.Node.Suspend,
+ ast.Node.Suspend {
+ .base = undefined,
+ .label = null,
+ .suspend_token = token_index,
+ .payload = null,
+ .body = null,
+ }
+ );
+
+ stack.append(State { .SuspendBody = node }) catch unreachable;
+ try stack.append(State { .Payload = OptionalCtx { .Optional = &node.payload } });
+ return true;
+ },
+ Token.Id.Keyword_if => {
+ const node = try createToCtxNode(arena, ctx, ast.Node.If,
+ ast.Node.If {
+ .base = undefined,
+ .if_token = token_index,
+ .condition = undefined,
+ .payload = null,
+ .body = undefined,
+ .@"else" = null,
+ }
+ );
+
+ stack.append(State { .Else = &node.@"else" }) catch unreachable;
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.body } });
+ try stack.append(State { .PointerPayload = OptionalCtx { .Optional = &node.payload } });
+ try stack.append(State { .ExpectToken = Token.Id.RParen });
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.condition } });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ return true;
+ },
+ Token.Id.Keyword_while => {
+ stack.append(State {
+ .While = LoopCtx {
+ .label = null,
+ .inline_token = null,
+ .loop_token = token_index,
+ .opt_ctx = *ctx,
+ }
+ }) catch unreachable;
+ return true;
+ },
+ Token.Id.Keyword_for => {
+ stack.append(State {
+ .For = LoopCtx {
+ .label = null,
+ .inline_token = null,
+ .loop_token = token_index,
+ .opt_ctx = *ctx,
+ }
+ }) catch unreachable;
+ return true;
+ },
+ Token.Id.Keyword_switch => {
+ const node = try arena.construct(ast.Node.Switch {
+ .base = ast.Node {
+ .id = ast.Node.Id.Switch,
+ },
+ .switch_token = token_index,
+ .expr = undefined,
+ .cases = ast.Node.Switch.CaseList.init(arena),
+ .rbrace = undefined,
+ });
+ ctx.store(&node.base);
+
+ stack.append(State {
+ .SwitchCaseOrEnd = ListSave(@typeOf(node.cases)) {
+ .list = &node.cases,
+ .ptr = &node.rbrace,
+ },
+ }) catch unreachable;
+ try stack.append(State { .ExpectToken = Token.Id.LBrace });
+ try stack.append(State { .ExpectToken = Token.Id.RParen });
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
+ try stack.append(State { .ExpectToken = Token.Id.LParen });
+ return true;
+ },
+ Token.Id.Keyword_comptime => {
+ const node = try createToCtxNode(arena, ctx, ast.Node.Comptime,
+ ast.Node.Comptime {
+ .base = undefined,
+ .comptime_token = token_index,
+ .expr = undefined,
+ .doc_comments = null,
+ }
+ );
+ try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
+ return true;
+ },
+ Token.Id.LBrace => {
+ const block = try arena.construct(ast.Node.Block {
+ .base = ast.Node {.id = ast.Node.Id.Block },
+ .label = null,
+ .lbrace = token_index,
+ .statements = ast.Node.Block.StatementList.init(arena),
+ .rbrace = undefined,
+ });
+ ctx.store(&block.base);
+ stack.append(State { .Block = block }) catch unreachable;
+ return true;
+ },
+ else => {
+ return false;
+ }
+ }
+}
+
+const ExpectCommaOrEndResult = union(enum) {
+ end_token: ?TokenIndex,
+ parse_error: Error,
+};
+
+fn expectCommaOrEnd(tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree, end: @TagType(Token.Id)) ExpectCommaOrEndResult {
+ const token = nextToken(tok_it, tree);
+ const token_index = token.index;
+ const token_ptr = token.ptr;
+ switch (token_ptr.id) {
+ Token.Id.Comma => return ExpectCommaOrEndResult { .end_token = null},
+ else => {
+ if (end == token_ptr.id) {
+ return ExpectCommaOrEndResult { .end_token = token_index };
+ }
+
+ return ExpectCommaOrEndResult {
+ .parse_error = Error {
+ .ExpectedCommaOrEnd = Error.ExpectedCommaOrEnd {
+ .token = token_index,
+ .end_id = end,
+ },
+ },
+ };
+ },
+ }
+}
+
+fn tokenIdToAssignment(id: &const Token.Id) ?ast.Node.InfixOp.Op {
+ // TODO: We have to cast all cases because of this:
+ // error: expected type '?InfixOp', found '?@TagType(InfixOp)'
+ return switch (*id) {
+ Token.Id.AmpersandEqual => ast.Node.InfixOp.Op { .AssignBitAnd = {} },
+ Token.Id.AngleBracketAngleBracketLeftEqual => ast.Node.InfixOp.Op { .AssignBitShiftLeft = {} },
+ Token.Id.AngleBracketAngleBracketRightEqual => ast.Node.InfixOp.Op { .AssignBitShiftRight = {} },
+ Token.Id.AsteriskEqual => ast.Node.InfixOp.Op { .AssignTimes = {} },
+ Token.Id.AsteriskPercentEqual => ast.Node.InfixOp.Op { .AssignTimesWarp = {} },
+ Token.Id.CaretEqual => ast.Node.InfixOp.Op { .AssignBitXor = {} },
+ Token.Id.Equal => ast.Node.InfixOp.Op { .Assign = {} },
+ Token.Id.MinusEqual => ast.Node.InfixOp.Op { .AssignMinus = {} },
+ Token.Id.MinusPercentEqual => ast.Node.InfixOp.Op { .AssignMinusWrap = {} },
+ Token.Id.PercentEqual => ast.Node.InfixOp.Op { .AssignMod = {} },
+ Token.Id.PipeEqual => ast.Node.InfixOp.Op { .AssignBitOr = {} },
+ Token.Id.PlusEqual => ast.Node.InfixOp.Op { .AssignPlus = {} },
+ Token.Id.PlusPercentEqual => ast.Node.InfixOp.Op { .AssignPlusWrap = {} },
+ Token.Id.SlashEqual => ast.Node.InfixOp.Op { .AssignDiv = {} },
+ else => null,
+ };
+}
+
+fn tokenIdToUnwrapExpr(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
+ return switch (id) {
+ Token.Id.Keyword_catch => ast.Node.InfixOp.Op { .Catch = null },
+ Token.Id.QuestionMarkQuestionMark => ast.Node.InfixOp.Op { .UnwrapMaybe = void{} },
+ else => null,
+ };
+}
+
+fn tokenIdToComparison(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
+ return switch (id) {
+ Token.Id.BangEqual => ast.Node.InfixOp.Op { .BangEqual = void{} },
+ Token.Id.EqualEqual => ast.Node.InfixOp.Op { .EqualEqual = void{} },
+ Token.Id.AngleBracketLeft => ast.Node.InfixOp.Op { .LessThan = void{} },
+ Token.Id.AngleBracketLeftEqual => ast.Node.InfixOp.Op { .LessOrEqual = void{} },
+ Token.Id.AngleBracketRight => ast.Node.InfixOp.Op { .GreaterThan = void{} },
+ Token.Id.AngleBracketRightEqual => ast.Node.InfixOp.Op { .GreaterOrEqual = void{} },
+ else => null,
+ };
+}
+
+fn tokenIdToBitShift(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
+ return switch (id) {
+ Token.Id.AngleBracketAngleBracketLeft => ast.Node.InfixOp.Op { .BitShiftLeft = void{} },
+ Token.Id.AngleBracketAngleBracketRight => ast.Node.InfixOp.Op { .BitShiftRight = void{} },
+ else => null,
+ };
+}
+
+fn tokenIdToAddition(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
+ return switch (id) {
+ Token.Id.Minus => ast.Node.InfixOp.Op { .Sub = void{} },
+ Token.Id.MinusPercent => ast.Node.InfixOp.Op { .SubWrap = void{} },
+ Token.Id.Plus => ast.Node.InfixOp.Op { .Add = void{} },
+ Token.Id.PlusPercent => ast.Node.InfixOp.Op { .AddWrap = void{} },
+ Token.Id.PlusPlus => ast.Node.InfixOp.Op { .ArrayCat = void{} },
+ else => null,
+ };
+}
+
+fn tokenIdToMultiply(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
+ return switch (id) {
+ Token.Id.Slash => ast.Node.InfixOp.Op { .Div = void{} },
+ Token.Id.Asterisk => ast.Node.InfixOp.Op { .Mult = void{} },
+ Token.Id.AsteriskAsterisk => ast.Node.InfixOp.Op { .ArrayMult = void{} },
+ Token.Id.AsteriskPercent => ast.Node.InfixOp.Op { .MultWrap = void{} },
+ Token.Id.Percent => ast.Node.InfixOp.Op { .Mod = void{} },
+ Token.Id.PipePipe => ast.Node.InfixOp.Op { .MergeErrorSets = void{} },
+ else => null,
+ };
+}
+
+fn tokenIdToPrefixOp(id: @TagType(Token.Id)) ?ast.Node.PrefixOp.Op {
+ return switch (id) {
+ Token.Id.Bang => ast.Node.PrefixOp.Op { .BoolNot = void{} },
+ Token.Id.Tilde => ast.Node.PrefixOp.Op { .BitNot = void{} },
+ Token.Id.Minus => ast.Node.PrefixOp.Op { .Negation = void{} },
+ Token.Id.MinusPercent => ast.Node.PrefixOp.Op { .NegationWrap = void{} },
+ Token.Id.Asterisk, Token.Id.AsteriskAsterisk => ast.Node.PrefixOp.Op { .Deref = void{} },
+ Token.Id.Ampersand => ast.Node.PrefixOp.Op {
+ .AddrOf = ast.Node.PrefixOp.AddrOfInfo {
+ .align_expr = null,
+ .bit_offset_start_token = null,
+ .bit_offset_end_token = null,
+ .const_token = null,
+ .volatile_token = null,
+ },
+ },
+ Token.Id.QuestionMark => ast.Node.PrefixOp.Op { .MaybeType = void{} },
+ Token.Id.QuestionMarkQuestionMark => ast.Node.PrefixOp.Op { .UnwrapMaybe = void{} },
+ Token.Id.Keyword_await => ast.Node.PrefixOp.Op { .Await = void{} },
+ Token.Id.Keyword_try => ast.Node.PrefixOp.Op { .Try = void{ } },
+ else => null,
+ };
+}
+
+fn createNode(arena: &mem.Allocator, comptime T: type, init_to: &const T) !&T {
+ const node = try arena.create(T);
+ *node = *init_to;
+ node.base = blk: {
+ const id = ast.Node.typeToId(T);
+ break :blk ast.Node {
+ .id = id,
+ };
+ };
+
+ return node;
+}
+
+fn createToCtxNode(arena: &mem.Allocator, opt_ctx: &const OptionalCtx, comptime T: type, init_to: &const T) !&T {
+ const node = try createNode(arena, T, init_to);
+ opt_ctx.store(&node.base);
+
+ return node;
+}
+
+fn createLiteral(arena: &mem.Allocator, comptime T: type, token_index: TokenIndex) !&T {
+ return createNode(arena, T,
+ T {
+ .base = undefined,
+ .token = token_index,
+ }
+ );
+}
+
+fn createToCtxLiteral(arena: &mem.Allocator, opt_ctx: &const OptionalCtx, comptime T: type, token_index: TokenIndex) !&T {
+ const node = try createLiteral(arena, T, token_index);
+ opt_ctx.store(&node.base);
+
+ return node;
+}
+
+fn eatToken(tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree, id: @TagType(Token.Id)) ?TokenIndex {
+ const token = nextToken(tok_it, tree);
+
+ if (token.ptr.id == id)
+ return token.index;
+
+ putBackToken(tok_it, tree);
+ return null;
+}
+
+fn nextToken(tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree) AnnotatedToken {
+ const result = AnnotatedToken {
+ .index = tok_it.index,
+ .ptr = ??tok_it.next(),
+ };
+ // possibly skip a following same line token
+ const token = tok_it.next() ?? return result;
+ if (token.id != Token.Id.LineComment) {
+ putBackToken(tok_it, tree);
+ return result;
+ }
+ const loc = tree.tokenLocationPtr(result.ptr.end, token);
+ if (loc.line != 0) {
+ putBackToken(tok_it, tree);
+ }
+ return result;
+}
+
+fn putBackToken(tok_it: &ast.Tree.TokenList.Iterator, tree: &ast.Tree) void {
+ const prev_tok = ??tok_it.prev();
+ if (prev_tok.id == Token.Id.LineComment) {
+ const minus2_tok = tok_it.prev() ?? return;
+ const loc = tree.tokenLocationPtr(minus2_tok.end, prev_tok);
+ if (loc.line != 0) {
+ _ = tok_it.next();
+ }
+ }
+}
+
+test "std.zig.parser" {
+ _ = @import("parser_test.zig");
+}
diff --git a/std/zig/parser.zig b/std/zig/parser.zig
deleted file mode 100644
index e4b2aa620a..0000000000
--- a/std/zig/parser.zig
+++ /dev/null
@@ -1,4734 +0,0 @@
-const std = @import("../index.zig");
-const assert = std.debug.assert;
-const ArrayList = std.ArrayList;
-const mem = std.mem;
-const ast = std.zig.ast;
-const Tokenizer = std.zig.Tokenizer;
-const Token = std.zig.Token;
-const builtin = @import("builtin");
-const io = std.io;
-
-// TODO when we make parse errors into error types instead of printing directly,
-// get rid of this
-const warn = std.debug.warn;
-
-pub const Parser = struct {
- util_allocator: &mem.Allocator,
- tokenizer: &Tokenizer,
- put_back_tokens: [2]Token,
- put_back_count: usize,
- source_file_name: []const u8,
-
- pub const Tree = struct {
- root_node: &ast.Node.Root,
- arena_allocator: std.heap.ArenaAllocator,
-
- pub fn deinit(self: &Tree) void {
- self.arena_allocator.deinit();
- }
- };
-
- // This memory contents are used only during a function call. It's used to repurpose memory;
- // we reuse the same bytes for the stack data structure used by parsing, tree rendering, and
- // source rendering.
- const utility_bytes_align = @alignOf( union { a: RenderAstFrame, b: State, c: RenderState } );
- utility_bytes: []align(utility_bytes_align) u8,
-
- /// allocator must outlive the returned Parser and all the parse trees you create with it.
- pub fn init(tokenizer: &Tokenizer, allocator: &mem.Allocator, source_file_name: []const u8) Parser {
- return Parser {
- .util_allocator = allocator,
- .tokenizer = tokenizer,
- .put_back_tokens = undefined,
- .put_back_count = 0,
- .source_file_name = source_file_name,
- .utility_bytes = []align(utility_bytes_align) u8{},
- };
- }
-
- pub fn deinit(self: &Parser) void {
- self.util_allocator.free(self.utility_bytes);
- }
-
- const TopLevelDeclCtx = struct {
- decls: &ArrayList(&ast.Node),
- visib_token: ?Token,
- extern_export_inline_token: ?Token,
- lib_name: ?&ast.Node,
- comments: ?&ast.Node.DocComment,
- };
-
- const VarDeclCtx = struct {
- mut_token: Token,
- visib_token: ?Token,
- comptime_token: ?Token,
- extern_export_token: ?Token,
- lib_name: ?&ast.Node,
- list: &ArrayList(&ast.Node),
- comments: ?&ast.Node.DocComment,
- };
-
- const TopLevelExternOrFieldCtx = struct {
- visib_token: Token,
- container_decl: &ast.Node.ContainerDecl,
- comments: ?&ast.Node.DocComment,
- };
-
- const ExternTypeCtx = struct {
- opt_ctx: OptionalCtx,
- extern_token: Token,
- comments: ?&ast.Node.DocComment,
- };
-
- const ContainerKindCtx = struct {
- opt_ctx: OptionalCtx,
- ltoken: Token,
- layout: ast.Node.ContainerDecl.Layout,
- };
-
- const ExpectTokenSave = struct {
- id: Token.Id,
- ptr: &Token,
- };
-
- const OptionalTokenSave = struct {
- id: Token.Id,
- ptr: &?Token,
- };
-
- const ExprListCtx = struct {
- list: &ArrayList(&ast.Node),
- end: Token.Id,
- ptr: &Token,
- };
-
- fn ListSave(comptime T: type) type {
- return struct {
- list: &ArrayList(T),
- ptr: &Token,
- };
- }
-
- const MaybeLabeledExpressionCtx = struct {
- label: Token,
- opt_ctx: OptionalCtx,
- };
-
- const LabelCtx = struct {
- label: ?Token,
- opt_ctx: OptionalCtx,
- };
-
- const InlineCtx = struct {
- label: ?Token,
- inline_token: ?Token,
- opt_ctx: OptionalCtx,
- };
-
- const LoopCtx = struct {
- label: ?Token,
- inline_token: ?Token,
- loop_token: Token,
- opt_ctx: OptionalCtx,
- };
-
- const AsyncEndCtx = struct {
- ctx: OptionalCtx,
- attribute: &ast.Node.AsyncAttribute,
- };
-
- const ErrorTypeOrSetDeclCtx = struct {
- opt_ctx: OptionalCtx,
- error_token: Token,
- };
-
- const ParamDeclEndCtx = struct {
- fn_proto: &ast.Node.FnProto,
- param_decl: &ast.Node.ParamDecl,
- };
-
- const ComptimeStatementCtx = struct {
- comptime_token: Token,
- block: &ast.Node.Block,
- };
-
- const OptionalCtx = union(enum) {
- Optional: &?&ast.Node,
- RequiredNull: &?&ast.Node,
- Required: &&ast.Node,
-
- pub fn store(self: &const OptionalCtx, value: &ast.Node) void {
- switch (*self) {
- OptionalCtx.Optional => |ptr| *ptr = value,
- OptionalCtx.RequiredNull => |ptr| *ptr = value,
- OptionalCtx.Required => |ptr| *ptr = value,
- }
- }
-
- pub fn get(self: &const OptionalCtx) ?&ast.Node {
- switch (*self) {
- OptionalCtx.Optional => |ptr| return *ptr,
- OptionalCtx.RequiredNull => |ptr| return ??*ptr,
- OptionalCtx.Required => |ptr| return *ptr,
- }
- }
-
- pub fn toRequired(self: &const OptionalCtx) OptionalCtx {
- switch (*self) {
- OptionalCtx.Optional => |ptr| {
- return OptionalCtx { .RequiredNull = ptr };
- },
- OptionalCtx.RequiredNull => |ptr| return *self,
- OptionalCtx.Required => |ptr| return *self,
- }
- }
- };
-
- const AddCommentsCtx = struct {
- node_ptr: &&ast.Node,
- comments: ?&ast.Node.DocComment,
- };
-
- const State = union(enum) {
- TopLevel,
- TopLevelExtern: TopLevelDeclCtx,
- TopLevelLibname: TopLevelDeclCtx,
- TopLevelDecl: TopLevelDeclCtx,
- TopLevelExternOrField: TopLevelExternOrFieldCtx,
-
- ContainerKind: ContainerKindCtx,
- ContainerInitArgStart: &ast.Node.ContainerDecl,
- ContainerInitArg: &ast.Node.ContainerDecl,
- ContainerDecl: &ast.Node.ContainerDecl,
-
- VarDecl: VarDeclCtx,
- VarDeclAlign: &ast.Node.VarDecl,
- VarDeclEq: &ast.Node.VarDecl,
-
- FnDef: &ast.Node.FnProto,
- FnProto: &ast.Node.FnProto,
- FnProtoAlign: &ast.Node.FnProto,
- FnProtoReturnType: &ast.Node.FnProto,
-
- ParamDecl: &ast.Node.FnProto,
- ParamDeclAliasOrComptime: &ast.Node.ParamDecl,
- ParamDeclName: &ast.Node.ParamDecl,
- ParamDeclEnd: ParamDeclEndCtx,
- ParamDeclComma: &ast.Node.FnProto,
-
- MaybeLabeledExpression: MaybeLabeledExpressionCtx,
- LabeledExpression: LabelCtx,
- Inline: InlineCtx,
- While: LoopCtx,
- WhileContinueExpr: &?&ast.Node,
- For: LoopCtx,
- Else: &?&ast.Node.Else,
-
- Block: &ast.Node.Block,
- Statement: &ast.Node.Block,
- ComptimeStatement: ComptimeStatementCtx,
- Semicolon: &&ast.Node,
- LookForSameLineComment: &&ast.Node,
- LookForSameLineCommentDirect: &ast.Node,
-
- AsmOutputItems: &ArrayList(&ast.Node.AsmOutput),
- AsmOutputReturnOrType: &ast.Node.AsmOutput,
- AsmInputItems: &ArrayList(&ast.Node.AsmInput),
- AsmClopperItems: &ArrayList(&ast.Node),
-
- ExprListItemOrEnd: ExprListCtx,
- ExprListCommaOrEnd: ExprListCtx,
- FieldInitListItemOrEnd: ListSave(&ast.Node),
- FieldInitListCommaOrEnd: ListSave(&ast.Node),
- FieldListCommaOrEnd: &ast.Node.ContainerDecl,
- FieldInitValue: OptionalCtx,
- ErrorTagListItemOrEnd: ListSave(&ast.Node),
- ErrorTagListCommaOrEnd: ListSave(&ast.Node),
- SwitchCaseOrEnd: ListSave(&ast.Node),
- SwitchCaseCommaOrEnd: ListSave(&ast.Node),
- SwitchCaseFirstItem: &ArrayList(&ast.Node),
- SwitchCaseItem: &ArrayList(&ast.Node),
- SwitchCaseItemCommaOrEnd: &ArrayList(&ast.Node),
-
- SuspendBody: &ast.Node.Suspend,
- AsyncAllocator: &ast.Node.AsyncAttribute,
- AsyncEnd: AsyncEndCtx,
-
- ExternType: ExternTypeCtx,
- SliceOrArrayAccess: &ast.Node.SuffixOp,
- SliceOrArrayType: &ast.Node.PrefixOp,
- AddrOfModifiers: &ast.Node.PrefixOp.AddrOfInfo,
-
- Payload: OptionalCtx,
- PointerPayload: OptionalCtx,
- PointerIndexPayload: OptionalCtx,
-
- Expression: OptionalCtx,
- RangeExpressionBegin: OptionalCtx,
- RangeExpressionEnd: OptionalCtx,
- AssignmentExpressionBegin: OptionalCtx,
- AssignmentExpressionEnd: OptionalCtx,
- UnwrapExpressionBegin: OptionalCtx,
- UnwrapExpressionEnd: OptionalCtx,
- BoolOrExpressionBegin: OptionalCtx,
- BoolOrExpressionEnd: OptionalCtx,
- BoolAndExpressionBegin: OptionalCtx,
- BoolAndExpressionEnd: OptionalCtx,
- ComparisonExpressionBegin: OptionalCtx,
- ComparisonExpressionEnd: OptionalCtx,
- BinaryOrExpressionBegin: OptionalCtx,
- BinaryOrExpressionEnd: OptionalCtx,
- BinaryXorExpressionBegin: OptionalCtx,
- BinaryXorExpressionEnd: OptionalCtx,
- BinaryAndExpressionBegin: OptionalCtx,
- BinaryAndExpressionEnd: OptionalCtx,
- BitShiftExpressionBegin: OptionalCtx,
- BitShiftExpressionEnd: OptionalCtx,
- AdditionExpressionBegin: OptionalCtx,
- AdditionExpressionEnd: OptionalCtx,
- MultiplyExpressionBegin: OptionalCtx,
- MultiplyExpressionEnd: OptionalCtx,
- CurlySuffixExpressionBegin: OptionalCtx,
- CurlySuffixExpressionEnd: OptionalCtx,
- TypeExprBegin: OptionalCtx,
- TypeExprEnd: OptionalCtx,
- PrefixOpExpression: OptionalCtx,
- SuffixOpExpressionBegin: OptionalCtx,
- SuffixOpExpressionEnd: OptionalCtx,
- PrimaryExpression: OptionalCtx,
-
- ErrorTypeOrSetDecl: ErrorTypeOrSetDeclCtx,
- StringLiteral: OptionalCtx,
- Identifier: OptionalCtx,
- ErrorTag: &&ast.Node,
-
-
- IfToken: @TagType(Token.Id),
- IfTokenSave: ExpectTokenSave,
- ExpectToken: @TagType(Token.Id),
- ExpectTokenSave: ExpectTokenSave,
- OptionalTokenSave: OptionalTokenSave,
- };
-
- /// Returns an AST tree, allocated with the parser's allocator.
- /// Result should be freed with tree.deinit() when there are
- /// no more references to any AST nodes of the tree.
- pub fn parse(self: &Parser) !Tree {
- var stack = self.initUtilityArrayList(State);
- defer self.deinitUtilityArrayList(stack);
-
- var arena_allocator = std.heap.ArenaAllocator.init(self.util_allocator);
- errdefer arena_allocator.deinit();
-
- const arena = &arena_allocator.allocator;
- const root_node = try self.createNode(arena, ast.Node.Root,
- ast.Node.Root {
- .base = undefined,
- .decls = ArrayList(&ast.Node).init(arena),
- .doc_comments = null,
- // initialized when we get the eof token
- .eof_token = undefined,
- }
- );
-
- try stack.append(State.TopLevel);
-
- while (true) {
- //{
- // const token = self.getNextToken();
- // warn("{} ", @tagName(token.id));
- // self.putBackToken(token);
- // var i: usize = stack.len;
- // while (i != 0) {
- // i -= 1;
- // warn("{} ", @tagName(stack.items[i]));
- // }
- // warn("\n");
- //}
-
- // This gives us 1 free append that can't fail
- const state = stack.pop();
-
- switch (state) {
- State.TopLevel => {
- while (try self.eatLineComment(arena)) |line_comment| {
- try root_node.decls.append(&line_comment.base);
- }
-
- const comments = try self.eatDocComments(arena);
- const token = self.getNextToken();
- switch (token.id) {
- Token.Id.Keyword_test => {
- stack.append(State.TopLevel) catch unreachable;
-
- const block = try arena.construct(ast.Node.Block {
- .base = ast.Node {
- .id = ast.Node.Id.Block,
- .same_line_comment = null,
- },
- .label = null,
- .lbrace = undefined,
- .statements = ArrayList(&ast.Node).init(arena),
- .rbrace = undefined,
- });
- const test_node = try arena.construct(ast.Node.TestDecl {
- .base = ast.Node {
- .id = ast.Node.Id.TestDecl,
- .same_line_comment = null,
- },
- .doc_comments = comments,
- .test_token = token,
- .name = undefined,
- .body_node = &block.base,
- });
- try root_node.decls.append(&test_node.base);
- try stack.append(State { .Block = block });
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.LBrace,
- .ptr = &block.rbrace,
- }
- });
- try stack.append(State { .StringLiteral = OptionalCtx { .Required = &test_node.name } });
- continue;
- },
- Token.Id.Eof => {
- root_node.eof_token = token;
- root_node.doc_comments = comments;
- return Tree {
- .root_node = root_node,
- .arena_allocator = arena_allocator,
- };
- },
- Token.Id.Keyword_pub => {
- stack.append(State.TopLevel) catch unreachable;
- try stack.append(State {
- .TopLevelExtern = TopLevelDeclCtx {
- .decls = &root_node.decls,
- .visib_token = token,
- .extern_export_inline_token = null,
- .lib_name = null,
- .comments = comments,
- }
- });
- continue;
- },
- Token.Id.Keyword_comptime => {
- const block = try self.createNode(arena, ast.Node.Block,
- ast.Node.Block {
- .base = undefined,
- .label = null,
- .lbrace = undefined,
- .statements = ArrayList(&ast.Node).init(arena),
- .rbrace = undefined,
- }
- );
- const node = try self.createAttachNode(arena, &root_node.decls, ast.Node.Comptime,
- ast.Node.Comptime {
- .base = undefined,
- .comptime_token = token,
- .expr = &block.base,
- .doc_comments = comments,
- }
- );
- stack.append(State.TopLevel) catch unreachable;
- try stack.append(State { .Block = block });
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.LBrace,
- .ptr = &block.rbrace,
- }
- });
- continue;
- },
- else => {
- self.putBackToken(token);
- stack.append(State.TopLevel) catch unreachable;
- try stack.append(State {
- .TopLevelExtern = TopLevelDeclCtx {
- .decls = &root_node.decls,
- .visib_token = null,
- .extern_export_inline_token = null,
- .lib_name = null,
- .comments = comments,
- }
- });
- continue;
- },
- }
- },
- State.TopLevelExtern => |ctx| {
- const token = self.getNextToken();
- switch (token.id) {
- Token.Id.Keyword_export, Token.Id.Keyword_inline => {
- stack.append(State {
- .TopLevelDecl = TopLevelDeclCtx {
- .decls = ctx.decls,
- .visib_token = ctx.visib_token,
- .extern_export_inline_token = token,
- .lib_name = null,
- .comments = ctx.comments,
- },
- }) catch unreachable;
- continue;
- },
- Token.Id.Keyword_extern => {
- stack.append(State {
- .TopLevelLibname = TopLevelDeclCtx {
- .decls = ctx.decls,
- .visib_token = ctx.visib_token,
- .extern_export_inline_token = token,
- .lib_name = null,
- .comments = ctx.comments,
- },
- }) catch unreachable;
- continue;
- },
- else => {
- self.putBackToken(token);
- stack.append(State { .TopLevelDecl = ctx }) catch unreachable;
- continue;
- }
- }
- },
- State.TopLevelLibname => |ctx| {
- const lib_name = blk: {
- const lib_name_token = self.getNextToken();
- break :blk (try self.parseStringLiteral(arena, lib_name_token)) ?? {
- self.putBackToken(lib_name_token);
- break :blk null;
- };
- };
-
- stack.append(State {
- .TopLevelDecl = TopLevelDeclCtx {
- .decls = ctx.decls,
- .visib_token = ctx.visib_token,
- .extern_export_inline_token = ctx.extern_export_inline_token,
- .lib_name = lib_name,
- .comments = ctx.comments,
- },
- }) catch unreachable;
- continue;
- },
- State.TopLevelDecl => |ctx| {
- const token = self.getNextToken();
- switch (token.id) {
- Token.Id.Keyword_use => {
- if (ctx.extern_export_inline_token != null) {
- return self.parseError(token, "Invalid token {}", @tagName((??ctx.extern_export_inline_token).id));
- }
-
- const node = try self.createAttachNode(arena, ctx.decls, ast.Node.Use,
- ast.Node.Use {
- .base = undefined,
- .visib_token = ctx.visib_token,
- .expr = undefined,
- .semicolon_token = undefined,
- .doc_comments = ctx.comments,
- }
- );
- stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Semicolon,
- .ptr = &node.semicolon_token,
- }
- }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
- continue;
- },
- Token.Id.Keyword_var, Token.Id.Keyword_const => {
- if (ctx.extern_export_inline_token) |extern_export_inline_token| {
- if (extern_export_inline_token.id == Token.Id.Keyword_inline) {
- return self.parseError(token, "Invalid token {}", @tagName(extern_export_inline_token.id));
- }
- }
-
- try stack.append(State {
- .VarDecl = VarDeclCtx {
- .comments = ctx.comments,
- .visib_token = ctx.visib_token,
- .lib_name = ctx.lib_name,
- .comptime_token = null,
- .extern_export_token = ctx.extern_export_inline_token,
- .mut_token = token,
- .list = ctx.decls
- }
- });
- continue;
- },
- Token.Id.Keyword_fn, Token.Id.Keyword_nakedcc,
- Token.Id.Keyword_stdcallcc, Token.Id.Keyword_async => {
- const fn_proto = try arena.construct(ast.Node.FnProto {
- .base = ast.Node {
- .id = ast.Node.Id.FnProto,
- .same_line_comment = null,
- },
- .doc_comments = ctx.comments,
- .visib_token = ctx.visib_token,
- .name_token = null,
- .fn_token = undefined,
- .params = ArrayList(&ast.Node).init(arena),
- .return_type = undefined,
- .var_args_token = null,
- .extern_export_inline_token = ctx.extern_export_inline_token,
- .cc_token = null,
- .async_attr = null,
- .body_node = null,
- .lib_name = ctx.lib_name,
- .align_expr = null,
- });
- try ctx.decls.append(&fn_proto.base);
- stack.append(State { .FnDef = fn_proto }) catch unreachable;
- try stack.append(State { .FnProto = fn_proto });
-
- switch (token.id) {
- Token.Id.Keyword_nakedcc, Token.Id.Keyword_stdcallcc => {
- fn_proto.cc_token = token;
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Keyword_fn,
- .ptr = &fn_proto.fn_token,
- }
- });
- continue;
- },
- Token.Id.Keyword_async => {
- const async_node = try self.createNode(arena, ast.Node.AsyncAttribute,
- ast.Node.AsyncAttribute {
- .base = undefined,
- .async_token = token,
- .allocator_type = null,
- .rangle_bracket = null,
- }
- );
- fn_proto.async_attr = async_node;
-
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Keyword_fn,
- .ptr = &fn_proto.fn_token,
- }
- });
- try stack.append(State { .AsyncAllocator = async_node });
- continue;
- },
- Token.Id.Keyword_fn => {
- fn_proto.fn_token = token;
- continue;
- },
- else => unreachable,
- }
- },
- else => {
- return self.parseError(token, "expected variable declaration or function, found {}", @tagName(token.id));
- },
- }
- },
- State.TopLevelExternOrField => |ctx| {
- if (self.eatToken(Token.Id.Identifier)) |identifier| {
- std.debug.assert(ctx.container_decl.kind == ast.Node.ContainerDecl.Kind.Struct);
- const node = try arena.construct(ast.Node.StructField {
- .base = ast.Node {
- .id = ast.Node.Id.StructField,
- .same_line_comment = null,
- },
- .doc_comments = ctx.comments,
- .visib_token = ctx.visib_token,
- .name_token = identifier,
- .type_expr = undefined,
- });
- const node_ptr = try ctx.container_decl.fields_and_decls.addOne();
- *node_ptr = &node.base;
-
- stack.append(State { .FieldListCommaOrEnd = ctx.container_decl }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.type_expr } });
- try stack.append(State { .ExpectToken = Token.Id.Colon });
- continue;
- }
-
- stack.append(State{ .ContainerDecl = ctx.container_decl }) catch unreachable;
- try stack.append(State {
- .TopLevelExtern = TopLevelDeclCtx {
- .decls = &ctx.container_decl.fields_and_decls,
- .visib_token = ctx.visib_token,
- .extern_export_inline_token = null,
- .lib_name = null,
- .comments = ctx.comments,
- }
- });
- continue;
- },
-
- State.FieldInitValue => |ctx| {
- const eq_tok = self.getNextToken();
- if (eq_tok.id != Token.Id.Equal) {
- self.putBackToken(eq_tok);
- continue;
- }
- stack.append(State { .Expression = ctx }) catch unreachable;
- continue;
- },
-
- State.ContainerKind => |ctx| {
- const token = self.getNextToken();
- const node = try self.createToCtxNode(arena, ctx.opt_ctx, ast.Node.ContainerDecl,
- ast.Node.ContainerDecl {
- .base = undefined,
- .ltoken = ctx.ltoken,
- .layout = ctx.layout,
- .kind = switch (token.id) {
- Token.Id.Keyword_struct => ast.Node.ContainerDecl.Kind.Struct,
- Token.Id.Keyword_union => ast.Node.ContainerDecl.Kind.Union,
- Token.Id.Keyword_enum => ast.Node.ContainerDecl.Kind.Enum,
- else => {
- return self.parseError(token, "expected {}, {} or {}, found {}",
- @tagName(Token.Id.Keyword_struct),
- @tagName(Token.Id.Keyword_union),
- @tagName(Token.Id.Keyword_enum),
- @tagName(token.id));
- },
- },
- .init_arg_expr = ast.Node.ContainerDecl.InitArg.None,
- .fields_and_decls = ArrayList(&ast.Node).init(arena),
- .rbrace_token = undefined,
- }
- );
-
- stack.append(State { .ContainerDecl = node }) catch unreachable;
- try stack.append(State { .ExpectToken = Token.Id.LBrace });
- try stack.append(State { .ContainerInitArgStart = node });
- continue;
- },
-
- State.ContainerInitArgStart => |container_decl| {
- if (self.eatToken(Token.Id.LParen) == null) {
- continue;
- }
-
- stack.append(State { .ExpectToken = Token.Id.RParen }) catch unreachable;
- try stack.append(State { .ContainerInitArg = container_decl });
- continue;
- },
-
- State.ContainerInitArg => |container_decl| {
- const init_arg_token = self.getNextToken();
- switch (init_arg_token.id) {
- Token.Id.Keyword_enum => {
- container_decl.init_arg_expr = ast.Node.ContainerDecl.InitArg {.Enum = null};
- const lparen_tok = self.getNextToken();
- if (lparen_tok.id == Token.Id.LParen) {
- try stack.append(State { .ExpectToken = Token.Id.RParen } );
- try stack.append(State { .Expression = OptionalCtx {
- .RequiredNull = &container_decl.init_arg_expr.Enum,
- } });
- } else {
- self.putBackToken(lparen_tok);
- }
- },
- else => {
- self.putBackToken(init_arg_token);
- container_decl.init_arg_expr = ast.Node.ContainerDecl.InitArg { .Type = undefined };
- stack.append(State { .Expression = OptionalCtx { .Required = &container_decl.init_arg_expr.Type } }) catch unreachable;
- },
- }
- continue;
- },
-
- State.ContainerDecl => |container_decl| {
- while (try self.eatLineComment(arena)) |line_comment| {
- try container_decl.fields_and_decls.append(&line_comment.base);
- }
-
- const comments = try self.eatDocComments(arena);
- const token = self.getNextToken();
- switch (token.id) {
- Token.Id.Identifier => {
- switch (container_decl.kind) {
- ast.Node.ContainerDecl.Kind.Struct => {
- const node = try arena.construct(ast.Node.StructField {
- .base = ast.Node {
- .id = ast.Node.Id.StructField,
- .same_line_comment = null,
- },
- .doc_comments = comments,
- .visib_token = null,
- .name_token = token,
- .type_expr = undefined,
- });
- const node_ptr = try container_decl.fields_and_decls.addOne();
- *node_ptr = &node.base;
-
- try stack.append(State { .FieldListCommaOrEnd = container_decl });
- try stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &node.type_expr } });
- try stack.append(State { .ExpectToken = Token.Id.Colon });
- continue;
- },
- ast.Node.ContainerDecl.Kind.Union => {
- const node = try self.createAttachNode(arena, &container_decl.fields_and_decls, ast.Node.UnionTag,
- ast.Node.UnionTag {
- .base = undefined,
- .name_token = token,
- .type_expr = null,
- .value_expr = null,
- .doc_comments = comments,
- }
- );
-
- stack.append(State { .FieldListCommaOrEnd = container_decl }) catch unreachable;
- try stack.append(State { .FieldInitValue = OptionalCtx { .RequiredNull = &node.value_expr } });
- try stack.append(State { .TypeExprBegin = OptionalCtx { .RequiredNull = &node.type_expr } });
- try stack.append(State { .IfToken = Token.Id.Colon });
- continue;
- },
- ast.Node.ContainerDecl.Kind.Enum => {
- const node = try self.createAttachNode(arena, &container_decl.fields_and_decls, ast.Node.EnumTag,
- ast.Node.EnumTag {
- .base = undefined,
- .name_token = token,
- .value = null,
- .doc_comments = comments,
- }
- );
-
- stack.append(State { .FieldListCommaOrEnd = container_decl }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .RequiredNull = &node.value } });
- try stack.append(State { .IfToken = Token.Id.Equal });
- continue;
- },
- }
- },
- Token.Id.Keyword_pub => {
- switch (container_decl.kind) {
- ast.Node.ContainerDecl.Kind.Struct => {
- try stack.append(State {
- .TopLevelExternOrField = TopLevelExternOrFieldCtx {
- .visib_token = token,
- .container_decl = container_decl,
- .comments = comments,
- }
- });
- continue;
- },
- else => {
- stack.append(State{ .ContainerDecl = container_decl }) catch unreachable;
- try stack.append(State {
- .TopLevelExtern = TopLevelDeclCtx {
- .decls = &container_decl.fields_and_decls,
- .visib_token = token,
- .extern_export_inline_token = null,
- .lib_name = null,
- .comments = comments,
- }
- });
- continue;
- }
- }
- },
- Token.Id.Keyword_export => {
- stack.append(State{ .ContainerDecl = container_decl }) catch unreachable;
- try stack.append(State {
- .TopLevelExtern = TopLevelDeclCtx {
- .decls = &container_decl.fields_and_decls,
- .visib_token = token,
- .extern_export_inline_token = null,
- .lib_name = null,
- .comments = comments,
- }
- });
- continue;
- },
- Token.Id.RBrace => {
- if (comments != null) {
- return self.parseError(token, "doc comments must be attached to a node");
- }
- container_decl.rbrace_token = token;
- continue;
- },
- else => {
- self.putBackToken(token);
- stack.append(State{ .ContainerDecl = container_decl }) catch unreachable;
- try stack.append(State {
- .TopLevelExtern = TopLevelDeclCtx {
- .decls = &container_decl.fields_and_decls,
- .visib_token = null,
- .extern_export_inline_token = null,
- .lib_name = null,
- .comments = comments,
- }
- });
- continue;
- }
- }
- },
-
-
- State.VarDecl => |ctx| {
- const var_decl = try arena.construct(ast.Node.VarDecl {
- .base = ast.Node {
- .id = ast.Node.Id.VarDecl,
- .same_line_comment = null,
- },
- .doc_comments = ctx.comments,
- .visib_token = ctx.visib_token,
- .mut_token = ctx.mut_token,
- .comptime_token = ctx.comptime_token,
- .extern_export_token = ctx.extern_export_token,
- .type_node = null,
- .align_node = null,
- .init_node = null,
- .lib_name = ctx.lib_name,
- // initialized later
- .name_token = undefined,
- .eq_token = undefined,
- .semicolon_token = undefined,
- });
- try ctx.list.append(&var_decl.base);
-
- try stack.append(State { .LookForSameLineCommentDirect = &var_decl.base });
- try stack.append(State { .VarDeclAlign = var_decl });
- try stack.append(State { .TypeExprBegin = OptionalCtx { .RequiredNull = &var_decl.type_node} });
- try stack.append(State { .IfToken = Token.Id.Colon });
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Identifier,
- .ptr = &var_decl.name_token,
- }
- });
- continue;
- },
- State.VarDeclAlign => |var_decl| {
- try stack.append(State { .VarDeclEq = var_decl });
-
- const next_token = self.getNextToken();
- if (next_token.id == Token.Id.Keyword_align) {
- try stack.append(State { .ExpectToken = Token.Id.RParen });
- try stack.append(State { .Expression = OptionalCtx { .RequiredNull = &var_decl.align_node} });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
- continue;
- }
-
- self.putBackToken(next_token);
- continue;
- },
- State.VarDeclEq => |var_decl| {
- const token = self.getNextToken();
- switch (token.id) {
- Token.Id.Equal => {
- var_decl.eq_token = token;
- stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Semicolon,
- .ptr = &var_decl.semicolon_token,
- },
- }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .RequiredNull = &var_decl.init_node } });
- continue;
- },
- Token.Id.Semicolon => {
- var_decl.semicolon_token = token;
- continue;
- },
- else => {
- return self.parseError(token, "expected '=' or ';', found {}", @tagName(token.id));
- }
- }
- },
-
-
- State.FnDef => |fn_proto| {
- const token = self.getNextToken();
- switch(token.id) {
- Token.Id.LBrace => {
- const block = try self.createNode(arena, ast.Node.Block,
- ast.Node.Block {
- .base = undefined,
- .label = null,
- .lbrace = token,
- .statements = ArrayList(&ast.Node).init(arena),
- .rbrace = undefined,
- }
- );
- fn_proto.body_node = &block.base;
- stack.append(State { .Block = block }) catch unreachable;
- continue;
- },
- Token.Id.Semicolon => continue,
- else => {
- return self.parseError(token, "expected ';' or '{{', found {}", @tagName(token.id));
- },
- }
- },
- State.FnProto => |fn_proto| {
- stack.append(State { .FnProtoAlign = fn_proto }) catch unreachable;
- try stack.append(State { .ParamDecl = fn_proto });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
-
- if (self.eatToken(Token.Id.Identifier)) |name_token| {
- fn_proto.name_token = name_token;
- }
- continue;
- },
- State.FnProtoAlign => |fn_proto| {
- stack.append(State { .FnProtoReturnType = fn_proto }) catch unreachable;
-
- if (self.eatToken(Token.Id.Keyword_align)) |align_token| {
- try stack.append(State { .ExpectToken = Token.Id.RParen });
- try stack.append(State { .Expression = OptionalCtx { .RequiredNull = &fn_proto.align_expr } });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
- }
- continue;
- },
- State.FnProtoReturnType => |fn_proto| {
- const token = self.getNextToken();
- switch (token.id) {
- Token.Id.Bang => {
- fn_proto.return_type = ast.Node.FnProto.ReturnType { .InferErrorSet = undefined };
- stack.append(State {
- .TypeExprBegin = OptionalCtx { .Required = &fn_proto.return_type.InferErrorSet },
- }) catch unreachable;
- continue;
- },
- else => {
- // TODO: this is a special case. Remove this when #760 is fixed
- if (token.id == Token.Id.Keyword_error) {
- if (self.isPeekToken(Token.Id.LBrace)) {
- fn_proto.return_type = ast.Node.FnProto.ReturnType {
- .Explicit = &(try self.createLiteral(arena, ast.Node.ErrorType, token)).base
- };
- continue;
- }
- }
-
- self.putBackToken(token);
- fn_proto.return_type = ast.Node.FnProto.ReturnType { .Explicit = undefined };
- stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &fn_proto.return_type.Explicit }, }) catch unreachable;
- continue;
- },
- }
- },
-
-
- State.ParamDecl => |fn_proto| {
- if (self.eatToken(Token.Id.RParen)) |_| {
- continue;
- }
- const param_decl = try self.createAttachNode(arena, &fn_proto.params, ast.Node.ParamDecl,
- ast.Node.ParamDecl {
- .base = undefined,
- .comptime_token = null,
- .noalias_token = null,
- .name_token = null,
- .type_node = undefined,
- .var_args_token = null,
- },
- );
-
- stack.append(State {
- .ParamDeclEnd = ParamDeclEndCtx {
- .param_decl = param_decl,
- .fn_proto = fn_proto,
- }
- }) catch unreachable;
- try stack.append(State { .ParamDeclName = param_decl });
- try stack.append(State { .ParamDeclAliasOrComptime = param_decl });
- continue;
- },
- State.ParamDeclAliasOrComptime => |param_decl| {
- if (self.eatToken(Token.Id.Keyword_comptime)) |comptime_token| {
- param_decl.comptime_token = comptime_token;
- } else if (self.eatToken(Token.Id.Keyword_noalias)) |noalias_token| {
- param_decl.noalias_token = noalias_token;
- }
- continue;
- },
- State.ParamDeclName => |param_decl| {
- // TODO: Here, we eat two tokens in one state. This means that we can't have
- // comments between these two tokens.
- if (self.eatToken(Token.Id.Identifier)) |ident_token| {
- if (self.eatToken(Token.Id.Colon)) |_| {
- param_decl.name_token = ident_token;
- } else {
- self.putBackToken(ident_token);
- }
- }
- continue;
- },
- State.ParamDeclEnd => |ctx| {
- if (self.eatToken(Token.Id.Ellipsis3)) |ellipsis3| {
- ctx.param_decl.var_args_token = ellipsis3;
- stack.append(State { .ExpectToken = Token.Id.RParen }) catch unreachable;
- continue;
- }
-
- try stack.append(State { .ParamDeclComma = ctx.fn_proto });
- try stack.append(State {
- .TypeExprBegin = OptionalCtx { .Required = &ctx.param_decl.type_node }
- });
- continue;
- },
- State.ParamDeclComma => |fn_proto| {
- if ((try self.expectCommaOrEnd(Token.Id.RParen)) == null) {
- stack.append(State { .ParamDecl = fn_proto }) catch unreachable;
- }
- continue;
- },
-
- State.MaybeLabeledExpression => |ctx| {
- if (self.eatToken(Token.Id.Colon)) |_| {
- stack.append(State {
- .LabeledExpression = LabelCtx {
- .label = ctx.label,
- .opt_ctx = ctx.opt_ctx,
- }
- }) catch unreachable;
- continue;
- }
-
- _ = try self.createToCtxLiteral(arena, ctx.opt_ctx, ast.Node.Identifier, ctx.label);
- continue;
- },
- State.LabeledExpression => |ctx| {
- const token = self.getNextToken();
- switch (token.id) {
- Token.Id.LBrace => {
- const block = try self.createToCtxNode(arena, ctx.opt_ctx, ast.Node.Block,
- ast.Node.Block {
- .base = undefined,
- .label = ctx.label,
- .lbrace = token,
- .statements = ArrayList(&ast.Node).init(arena),
- .rbrace = undefined,
- }
- );
- stack.append(State { .Block = block }) catch unreachable;
- continue;
- },
- Token.Id.Keyword_while => {
- stack.append(State {
- .While = LoopCtx {
- .label = ctx.label,
- .inline_token = null,
- .loop_token = token,
- .opt_ctx = ctx.opt_ctx.toRequired(),
- }
- }) catch unreachable;
- continue;
- },
- Token.Id.Keyword_for => {
- stack.append(State {
- .For = LoopCtx {
- .label = ctx.label,
- .inline_token = null,
- .loop_token = token,
- .opt_ctx = ctx.opt_ctx.toRequired(),
- }
- }) catch unreachable;
- continue;
- },
- Token.Id.Keyword_suspend => {
- const node = try arena.construct(ast.Node.Suspend {
- .base = ast.Node {
- .id = ast.Node.Id.Suspend,
- .same_line_comment = null,
- },
- .label = ctx.label,
- .suspend_token = token,
- .payload = null,
- .body = null,
- });
- ctx.opt_ctx.store(&node.base);
- stack.append(State { .SuspendBody = node }) catch unreachable;
- try stack.append(State { .Payload = OptionalCtx { .Optional = &node.payload } });
- continue;
- },
- Token.Id.Keyword_inline => {
- stack.append(State {
- .Inline = InlineCtx {
- .label = ctx.label,
- .inline_token = token,
- .opt_ctx = ctx.opt_ctx.toRequired(),
- }
- }) catch unreachable;
- continue;
- },
- else => {
- if (ctx.opt_ctx != OptionalCtx.Optional) {
- return self.parseError(token, "expected 'while', 'for', 'inline' or '{{', found {}", @tagName(token.id));
- }
-
- self.putBackToken(token);
- continue;
- },
- }
- },
- State.Inline => |ctx| {
- const token = self.getNextToken();
- switch (token.id) {
- Token.Id.Keyword_while => {
- stack.append(State {
- .While = LoopCtx {
- .inline_token = ctx.inline_token,
- .label = ctx.label,
- .loop_token = token,
- .opt_ctx = ctx.opt_ctx.toRequired(),
- }
- }) catch unreachable;
- continue;
- },
- Token.Id.Keyword_for => {
- stack.append(State {
- .For = LoopCtx {
- .inline_token = ctx.inline_token,
- .label = ctx.label,
- .loop_token = token,
- .opt_ctx = ctx.opt_ctx.toRequired(),
- }
- }) catch unreachable;
- continue;
- },
- else => {
- if (ctx.opt_ctx != OptionalCtx.Optional) {
- return self.parseError(token, "expected 'while' or 'for', found {}", @tagName(token.id));
- }
-
- self.putBackToken(token);
- continue;
- },
- }
- },
- State.While => |ctx| {
- const node = try self.createToCtxNode(arena, ctx.opt_ctx, ast.Node.While,
- ast.Node.While {
- .base = undefined,
- .label = ctx.label,
- .inline_token = ctx.inline_token,
- .while_token = ctx.loop_token,
- .condition = undefined,
- .payload = null,
- .continue_expr = null,
- .body = undefined,
- .@"else" = null,
- }
- );
- stack.append(State { .Else = &node.@"else" }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.body } });
- try stack.append(State { .WhileContinueExpr = &node.continue_expr });
- try stack.append(State { .IfToken = Token.Id.Colon });
- try stack.append(State { .PointerPayload = OptionalCtx { .Optional = &node.payload } });
- try stack.append(State { .ExpectToken = Token.Id.RParen });
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.condition } });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
- continue;
- },
- State.WhileContinueExpr => |dest| {
- stack.append(State { .ExpectToken = Token.Id.RParen }) catch unreachable;
- try stack.append(State { .AssignmentExpressionBegin = OptionalCtx { .RequiredNull = dest } });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
- continue;
- },
- State.For => |ctx| {
- const node = try self.createToCtxNode(arena, ctx.opt_ctx, ast.Node.For,
- ast.Node.For {
- .base = undefined,
- .label = ctx.label,
- .inline_token = ctx.inline_token,
- .for_token = ctx.loop_token,
- .array_expr = undefined,
- .payload = null,
- .body = undefined,
- .@"else" = null,
- }
- );
- stack.append(State { .Else = &node.@"else" }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.body } });
- try stack.append(State { .PointerIndexPayload = OptionalCtx { .Optional = &node.payload } });
- try stack.append(State { .ExpectToken = Token.Id.RParen });
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.array_expr } });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
- continue;
- },
- State.Else => |dest| {
- if (self.eatToken(Token.Id.Keyword_else)) |else_token| {
- const node = try self.createNode(arena, ast.Node.Else,
- ast.Node.Else {
- .base = undefined,
- .else_token = else_token,
- .payload = null,
- .body = undefined,
- }
- );
- *dest = node;
-
- stack.append(State { .Expression = OptionalCtx { .Required = &node.body } }) catch unreachable;
- try stack.append(State { .Payload = OptionalCtx { .Optional = &node.payload } });
- continue;
- } else {
- continue;
- }
- },
-
-
- State.Block => |block| {
- const token = self.getNextToken();
- switch (token.id) {
- Token.Id.RBrace => {
- block.rbrace = token;
- continue;
- },
- else => {
- self.putBackToken(token);
- stack.append(State { .Block = block }) catch unreachable;
-
- var any_comments = false;
- while (try self.eatLineComment(arena)) |line_comment| {
- try block.statements.append(&line_comment.base);
- any_comments = true;
- }
- if (any_comments) continue;
-
- try stack.append(State { .Statement = block });
- continue;
- },
- }
- },
- State.Statement => |block| {
- const token = self.getNextToken();
- switch (token.id) {
- Token.Id.Keyword_comptime => {
- stack.append(State {
- .ComptimeStatement = ComptimeStatementCtx {
- .comptime_token = token,
- .block = block,
- }
- }) catch unreachable;
- continue;
- },
- Token.Id.Keyword_var, Token.Id.Keyword_const => {
- stack.append(State {
- .VarDecl = VarDeclCtx {
- .comments = null,
- .visib_token = null,
- .comptime_token = null,
- .extern_export_token = null,
- .lib_name = null,
- .mut_token = token,
- .list = &block.statements,
- }
- }) catch unreachable;
- continue;
- },
- Token.Id.Keyword_defer, Token.Id.Keyword_errdefer => {
- const node = try arena.construct(ast.Node.Defer {
- .base = ast.Node {
- .id = ast.Node.Id.Defer,
- .same_line_comment = null,
- },
- .defer_token = token,
- .kind = switch (token.id) {
- Token.Id.Keyword_defer => ast.Node.Defer.Kind.Unconditional,
- Token.Id.Keyword_errdefer => ast.Node.Defer.Kind.Error,
- else => unreachable,
- },
- .expr = undefined,
- });
- const node_ptr = try block.statements.addOne();
- *node_ptr = &node.base;
-
- stack.append(State { .Semicolon = node_ptr }) catch unreachable;
- try stack.append(State { .AssignmentExpressionBegin = OptionalCtx{ .Required = &node.expr } });
- continue;
- },
- Token.Id.LBrace => {
- const inner_block = try self.createAttachNode(arena, &block.statements, ast.Node.Block,
- ast.Node.Block {
- .base = undefined,
- .label = null,
- .lbrace = token,
- .statements = ArrayList(&ast.Node).init(arena),
- .rbrace = undefined,
- }
- );
- stack.append(State { .Block = inner_block }) catch unreachable;
- continue;
- },
- else => {
- self.putBackToken(token);
- const statement = try block.statements.addOne();
- stack.append(State { .LookForSameLineComment = statement }) catch unreachable;
- try stack.append(State { .Semicolon = statement });
- try stack.append(State { .AssignmentExpressionBegin = OptionalCtx{ .Required = statement } });
- continue;
- }
- }
- },
- State.ComptimeStatement => |ctx| {
- const token = self.getNextToken();
- switch (token.id) {
- Token.Id.Keyword_var, Token.Id.Keyword_const => {
- stack.append(State {
- .VarDecl = VarDeclCtx {
- .comments = null,
- .visib_token = null,
- .comptime_token = ctx.comptime_token,
- .extern_export_token = null,
- .lib_name = null,
- .mut_token = token,
- .list = &ctx.block.statements,
- }
- }) catch unreachable;
- continue;
- },
- else => {
- self.putBackToken(token);
- self.putBackToken(ctx.comptime_token);
- const statement = try ctx.block.statements.addOne();
- stack.append(State { .LookForSameLineComment = statement }) catch unreachable;
- try stack.append(State { .Semicolon = statement });
- try stack.append(State { .Expression = OptionalCtx { .Required = statement } });
- continue;
- }
- }
- },
- State.Semicolon => |node_ptr| {
- const node = *node_ptr;
- if (requireSemiColon(node)) {
- stack.append(State { .ExpectToken = Token.Id.Semicolon }) catch unreachable;
- continue;
- }
- continue;
- },
-
- State.LookForSameLineComment => |node_ptr| {
- try self.lookForSameLineComment(arena, *node_ptr);
- continue;
- },
-
- State.LookForSameLineCommentDirect => |node| {
- try self.lookForSameLineComment(arena, node);
- continue;
- },
-
-
- State.AsmOutputItems => |items| {
- const lbracket = self.getNextToken();
- if (lbracket.id != Token.Id.LBracket) {
- self.putBackToken(lbracket);
- continue;
- }
-
- const node = try self.createNode(arena, ast.Node.AsmOutput,
- ast.Node.AsmOutput {
- .base = undefined,
- .symbolic_name = undefined,
- .constraint = undefined,
- .kind = undefined,
- }
- );
- try items.append(node);
-
- stack.append(State { .AsmOutputItems = items }) catch unreachable;
- try stack.append(State { .IfToken = Token.Id.Comma });
- try stack.append(State { .ExpectToken = Token.Id.RParen });
- try stack.append(State { .AsmOutputReturnOrType = node });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
- try stack.append(State { .StringLiteral = OptionalCtx { .Required = &node.constraint } });
- try stack.append(State { .ExpectToken = Token.Id.RBracket });
- try stack.append(State { .Identifier = OptionalCtx { .Required = &node.symbolic_name } });
- continue;
- },
- State.AsmOutputReturnOrType => |node| {
- const token = self.getNextToken();
- switch (token.id) {
- Token.Id.Identifier => {
- node.kind = ast.Node.AsmOutput.Kind { .Variable = try self.createLiteral(arena, ast.Node.Identifier, token) };
- continue;
- },
- Token.Id.Arrow => {
- node.kind = ast.Node.AsmOutput.Kind { .Return = undefined };
- try stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &node.kind.Return } });
- continue;
- },
- else => {
- return self.parseError(token, "expected '->' or {}, found {}",
- @tagName(Token.Id.Identifier),
- @tagName(token.id));
- },
- }
- },
- State.AsmInputItems => |items| {
- const lbracket = self.getNextToken();
- if (lbracket.id != Token.Id.LBracket) {
- self.putBackToken(lbracket);
- continue;
- }
-
- const node = try self.createNode(arena, ast.Node.AsmInput,
- ast.Node.AsmInput {
- .base = undefined,
- .symbolic_name = undefined,
- .constraint = undefined,
- .expr = undefined,
- }
- );
- try items.append(node);
-
- stack.append(State { .AsmInputItems = items }) catch unreachable;
- try stack.append(State { .IfToken = Token.Id.Comma });
- try stack.append(State { .ExpectToken = Token.Id.RParen });
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
- try stack.append(State { .StringLiteral = OptionalCtx { .Required = &node.constraint } });
- try stack.append(State { .ExpectToken = Token.Id.RBracket });
- try stack.append(State { .Identifier = OptionalCtx { .Required = &node.symbolic_name } });
- continue;
- },
- State.AsmClopperItems => |items| {
- stack.append(State { .AsmClopperItems = items }) catch unreachable;
- try stack.append(State { .IfToken = Token.Id.Comma });
- try stack.append(State { .StringLiteral = OptionalCtx { .Required = try items.addOne() } });
- continue;
- },
-
-
- State.ExprListItemOrEnd => |list_state| {
- if (self.eatToken(list_state.end)) |token| {
- *list_state.ptr = token;
- continue;
- }
-
- stack.append(State { .ExprListCommaOrEnd = list_state }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .Required = try list_state.list.addOne() } });
- continue;
- },
- State.ExprListCommaOrEnd => |list_state| {
- if (try self.expectCommaOrEnd(list_state.end)) |end| {
- *list_state.ptr = end;
- continue;
- } else {
- stack.append(State { .ExprListItemOrEnd = list_state }) catch unreachable;
- continue;
- }
- },
- State.FieldInitListItemOrEnd => |list_state| {
- while (try self.eatLineComment(arena)) |line_comment| {
- try list_state.list.append(&line_comment.base);
- }
-
- if (self.eatToken(Token.Id.RBrace)) |rbrace| {
- *list_state.ptr = rbrace;
- continue;
- }
-
- const node = try arena.construct(ast.Node.FieldInitializer {
- .base = ast.Node {
- .id = ast.Node.Id.FieldInitializer,
- .same_line_comment = null,
- },
- .period_token = undefined,
- .name_token = undefined,
- .expr = undefined,
- });
- try list_state.list.append(&node.base);
-
- stack.append(State { .FieldInitListCommaOrEnd = list_state }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx{ .Required = &node.expr } });
- try stack.append(State { .ExpectToken = Token.Id.Equal });
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Identifier,
- .ptr = &node.name_token,
- }
- });
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Period,
- .ptr = &node.period_token,
- }
- });
- continue;
- },
- State.FieldInitListCommaOrEnd => |list_state| {
- if (try self.expectCommaOrEnd(Token.Id.RBrace)) |end| {
- *list_state.ptr = end;
- continue;
- } else {
- stack.append(State { .FieldInitListItemOrEnd = list_state }) catch unreachable;
- continue;
- }
- },
- State.FieldListCommaOrEnd => |container_decl| {
- if (try self.expectCommaOrEnd(Token.Id.RBrace)) |end| {
- container_decl.rbrace_token = end;
- continue;
- }
-
- try self.lookForSameLineComment(arena, container_decl.fields_and_decls.toSlice()[container_decl.fields_and_decls.len - 1]);
- try stack.append(State { .ContainerDecl = container_decl });
- continue;
- },
- State.ErrorTagListItemOrEnd => |list_state| {
- while (try self.eatLineComment(arena)) |line_comment| {
- try list_state.list.append(&line_comment.base);
- }
-
- if (self.eatToken(Token.Id.RBrace)) |rbrace| {
- *list_state.ptr = rbrace;
- continue;
- }
-
- const node_ptr = try list_state.list.addOne();
-
- try stack.append(State { .ErrorTagListCommaOrEnd = list_state });
- try stack.append(State { .ErrorTag = node_ptr });
- continue;
- },
- State.ErrorTagListCommaOrEnd => |list_state| {
- if (try self.expectCommaOrEnd(Token.Id.RBrace)) |end| {
- *list_state.ptr = end;
- continue;
- } else {
- stack.append(State { .ErrorTagListItemOrEnd = list_state }) catch unreachable;
- continue;
- }
- },
- State.SwitchCaseOrEnd => |list_state| {
- while (try self.eatLineComment(arena)) |line_comment| {
- try list_state.list.append(&line_comment.base);
- }
-
- if (self.eatToken(Token.Id.RBrace)) |rbrace| {
- *list_state.ptr = rbrace;
- continue;
- }
-
- const comments = try self.eatDocComments(arena);
- const node = try arena.construct(ast.Node.SwitchCase {
- .base = ast.Node {
- .id = ast.Node.Id.SwitchCase,
- .same_line_comment = null,
- },
- .items = ArrayList(&ast.Node).init(arena),
- .payload = null,
- .expr = undefined,
- });
- try list_state.list.append(&node.base);
- try stack.append(State { .SwitchCaseCommaOrEnd = list_state });
- try stack.append(State { .AssignmentExpressionBegin = OptionalCtx { .Required = &node.expr } });
- try stack.append(State { .PointerPayload = OptionalCtx { .Optional = &node.payload } });
- try stack.append(State { .SwitchCaseFirstItem = &node.items });
-
- continue;
- },
-
- State.SwitchCaseCommaOrEnd => |list_state| {
- if (try self.expectCommaOrEnd(Token.Id.RBrace)) |end| {
- *list_state.ptr = end;
- continue;
- }
-
- const node = list_state.list.toSlice()[list_state.list.len - 1];
- try self.lookForSameLineComment(arena, node);
- try stack.append(State { .SwitchCaseOrEnd = list_state });
- continue;
- },
-
- State.SwitchCaseFirstItem => |case_items| {
- const token = self.getNextToken();
- if (token.id == Token.Id.Keyword_else) {
- const else_node = try self.createAttachNode(arena, case_items, ast.Node.SwitchElse,
- ast.Node.SwitchElse {
- .base = undefined,
- .token = token,
- }
- );
- try stack.append(State { .ExpectToken = Token.Id.EqualAngleBracketRight });
- continue;
- } else {
- self.putBackToken(token);
- try stack.append(State { .SwitchCaseItem = case_items });
- continue;
- }
- },
- State.SwitchCaseItem => |case_items| {
- stack.append(State { .SwitchCaseItemCommaOrEnd = case_items }) catch unreachable;
- try stack.append(State { .RangeExpressionBegin = OptionalCtx { .Required = try case_items.addOne() } });
- },
- State.SwitchCaseItemCommaOrEnd => |case_items| {
- if ((try self.expectCommaOrEnd(Token.Id.EqualAngleBracketRight)) == null) {
- stack.append(State { .SwitchCaseItem = case_items }) catch unreachable;
- }
- continue;
- },
-
-
- State.SuspendBody => |suspend_node| {
- if (suspend_node.payload != null) {
- try stack.append(State { .AssignmentExpressionBegin = OptionalCtx { .RequiredNull = &suspend_node.body } });
- }
- continue;
- },
- State.AsyncAllocator => |async_node| {
- if (self.eatToken(Token.Id.AngleBracketLeft) == null) {
- continue;
- }
-
- async_node.rangle_bracket = Token(undefined);
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.AngleBracketRight,
- .ptr = &??async_node.rangle_bracket,
- }
- });
- try stack.append(State { .TypeExprBegin = OptionalCtx { .RequiredNull = &async_node.allocator_type } });
- continue;
- },
- State.AsyncEnd => |ctx| {
- const node = ctx.ctx.get() ?? continue;
-
- switch (node.id) {
- ast.Node.Id.FnProto => {
- const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", node);
- fn_proto.async_attr = ctx.attribute;
- continue;
- },
- ast.Node.Id.SuffixOp => {
- const suffix_op = @fieldParentPtr(ast.Node.SuffixOp, "base", node);
- if (suffix_op.op == ast.Node.SuffixOp.Op.Call) {
- suffix_op.op.Call.async_attr = ctx.attribute;
- continue;
- }
-
- return self.parseError(node.firstToken(), "expected {}, found {}.",
- @tagName(ast.Node.SuffixOp.Op.Call),
- @tagName(suffix_op.op));
- },
- else => {
- return self.parseError(node.firstToken(), "expected {} or {}, found {}.",
- @tagName(ast.Node.SuffixOp.Op.Call),
- @tagName(ast.Node.Id.FnProto),
- @tagName(node.id));
- }
- }
- },
-
-
- State.ExternType => |ctx| {
- if (self.eatToken(Token.Id.Keyword_fn)) |fn_token| {
- const fn_proto = try arena.construct(ast.Node.FnProto {
- .base = ast.Node {
- .id = ast.Node.Id.FnProto,
- .same_line_comment = null,
- },
- .doc_comments = ctx.comments,
- .visib_token = null,
- .name_token = null,
- .fn_token = fn_token,
- .params = ArrayList(&ast.Node).init(arena),
- .return_type = undefined,
- .var_args_token = null,
- .extern_export_inline_token = ctx.extern_token,
- .cc_token = null,
- .async_attr = null,
- .body_node = null,
- .lib_name = null,
- .align_expr = null,
- });
- ctx.opt_ctx.store(&fn_proto.base);
- stack.append(State { .FnProto = fn_proto }) catch unreachable;
- continue;
- }
-
- stack.append(State {
- .ContainerKind = ContainerKindCtx {
- .opt_ctx = ctx.opt_ctx,
- .ltoken = ctx.extern_token,
- .layout = ast.Node.ContainerDecl.Layout.Extern,
- },
- }) catch unreachable;
- continue;
- },
- State.SliceOrArrayAccess => |node| {
- var token = self.getNextToken();
- switch (token.id) {
- Token.Id.Ellipsis2 => {
- const start = node.op.ArrayAccess;
- node.op = ast.Node.SuffixOp.Op {
- .Slice = ast.Node.SuffixOp.SliceRange {
- .start = start,
- .end = null,
- }
- };
-
- stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.RBracket,
- .ptr = &node.rtoken,
- }
- }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .Optional = &node.op.Slice.end } });
- continue;
- },
- Token.Id.RBracket => {
- node.rtoken = token;
- continue;
- },
- else => {
- return self.parseError(token, "expected ']' or '..', found {}", @tagName(token.id));
- }
- }
- },
- State.SliceOrArrayType => |node| {
- if (self.eatToken(Token.Id.RBracket)) |_| {
- node.op = ast.Node.PrefixOp.Op {
- .SliceType = ast.Node.PrefixOp.AddrOfInfo {
- .align_expr = null,
- .bit_offset_start_token = null,
- .bit_offset_end_token = null,
- .const_token = null,
- .volatile_token = null,
- }
- };
- stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &node.rhs } }) catch unreachable;
- try stack.append(State { .AddrOfModifiers = &node.op.SliceType });
- continue;
- }
-
- node.op = ast.Node.PrefixOp.Op { .ArrayType = undefined };
- stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &node.rhs } }) catch unreachable;
- try stack.append(State { .ExpectToken = Token.Id.RBracket });
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.op.ArrayType } });
- continue;
- },
- State.AddrOfModifiers => |addr_of_info| {
- var token = self.getNextToken();
- switch (token.id) {
- Token.Id.Keyword_align => {
- stack.append(state) catch unreachable;
- if (addr_of_info.align_expr != null) {
- return self.parseError(token, "multiple align qualifiers");
- }
- try stack.append(State { .ExpectToken = Token.Id.RParen });
- try stack.append(State { .Expression = OptionalCtx { .RequiredNull = &addr_of_info.align_expr} });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
- continue;
- },
- Token.Id.Keyword_const => {
- stack.append(state) catch unreachable;
- if (addr_of_info.const_token != null) {
- return self.parseError(token, "duplicate qualifier: const");
- }
- addr_of_info.const_token = token;
- continue;
- },
- Token.Id.Keyword_volatile => {
- stack.append(state) catch unreachable;
- if (addr_of_info.volatile_token != null) {
- return self.parseError(token, "duplicate qualifier: volatile");
- }
- addr_of_info.volatile_token = token;
- continue;
- },
- else => {
- self.putBackToken(token);
- continue;
- },
- }
- },
-
-
- State.Payload => |opt_ctx| {
- const token = self.getNextToken();
- if (token.id != Token.Id.Pipe) {
- if (opt_ctx != OptionalCtx.Optional) {
- return self.parseError(token, "expected {}, found {}.",
- @tagName(Token.Id.Pipe),
- @tagName(token.id));
- }
-
- self.putBackToken(token);
- continue;
- }
-
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.Payload,
- ast.Node.Payload {
- .base = undefined,
- .lpipe = token,
- .error_symbol = undefined,
- .rpipe = undefined
- }
- );
-
- stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Pipe,
- .ptr = &node.rpipe,
- }
- }) catch unreachable;
- try stack.append(State { .Identifier = OptionalCtx { .Required = &node.error_symbol } });
- continue;
- },
- State.PointerPayload => |opt_ctx| {
- const token = self.getNextToken();
- if (token.id != Token.Id.Pipe) {
- if (opt_ctx != OptionalCtx.Optional) {
- return self.parseError(token, "expected {}, found {}.",
- @tagName(Token.Id.Pipe),
- @tagName(token.id));
- }
-
- self.putBackToken(token);
- continue;
- }
-
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.PointerPayload,
- ast.Node.PointerPayload {
- .base = undefined,
- .lpipe = token,
- .ptr_token = null,
- .value_symbol = undefined,
- .rpipe = undefined
- }
- );
-
- stack.append(State {.LookForSameLineCommentDirect = &node.base }) catch unreachable;
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Pipe,
- .ptr = &node.rpipe,
- }
- });
- try stack.append(State { .Identifier = OptionalCtx { .Required = &node.value_symbol } });
- try stack.append(State {
- .OptionalTokenSave = OptionalTokenSave {
- .id = Token.Id.Asterisk,
- .ptr = &node.ptr_token,
- }
- });
- continue;
- },
- State.PointerIndexPayload => |opt_ctx| {
- const token = self.getNextToken();
- if (token.id != Token.Id.Pipe) {
- if (opt_ctx != OptionalCtx.Optional) {
- return self.parseError(token, "expected {}, found {}.",
- @tagName(Token.Id.Pipe),
- @tagName(token.id));
- }
-
- self.putBackToken(token);
- continue;
- }
-
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.PointerIndexPayload,
- ast.Node.PointerIndexPayload {
- .base = undefined,
- .lpipe = token,
- .ptr_token = null,
- .value_symbol = undefined,
- .index_symbol = null,
- .rpipe = undefined
- }
- );
-
- stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Pipe,
- .ptr = &node.rpipe,
- }
- }) catch unreachable;
- try stack.append(State { .Identifier = OptionalCtx { .RequiredNull = &node.index_symbol } });
- try stack.append(State { .IfToken = Token.Id.Comma });
- try stack.append(State { .Identifier = OptionalCtx { .Required = &node.value_symbol } });
- try stack.append(State {
- .OptionalTokenSave = OptionalTokenSave {
- .id = Token.Id.Asterisk,
- .ptr = &node.ptr_token,
- }
- });
- continue;
- },
-
-
- State.Expression => |opt_ctx| {
- const token = self.getNextToken();
- switch (token.id) {
- Token.Id.Keyword_return, Token.Id.Keyword_break, Token.Id.Keyword_continue => {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.ControlFlowExpression,
- ast.Node.ControlFlowExpression {
- .base = undefined,
- .ltoken = token,
- .kind = undefined,
- .rhs = null,
- }
- );
-
- stack.append(State { .Expression = OptionalCtx { .Optional = &node.rhs } }) catch unreachable;
-
- switch (token.id) {
- Token.Id.Keyword_break => {
- node.kind = ast.Node.ControlFlowExpression.Kind { .Break = null };
- try stack.append(State { .Identifier = OptionalCtx { .RequiredNull = &node.kind.Break } });
- try stack.append(State { .IfToken = Token.Id.Colon });
- },
- Token.Id.Keyword_continue => {
- node.kind = ast.Node.ControlFlowExpression.Kind { .Continue = null };
- try stack.append(State { .Identifier = OptionalCtx { .RequiredNull = &node.kind.Continue } });
- try stack.append(State { .IfToken = Token.Id.Colon });
- },
- Token.Id.Keyword_return => {
- node.kind = ast.Node.ControlFlowExpression.Kind.Return;
- },
- else => unreachable,
- }
- continue;
- },
- Token.Id.Keyword_try, Token.Id.Keyword_cancel, Token.Id.Keyword_resume => {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.PrefixOp,
- ast.Node.PrefixOp {
- .base = undefined,
- .op_token = token,
- .op = switch (token.id) {
- Token.Id.Keyword_try => ast.Node.PrefixOp.Op { .Try = void{} },
- Token.Id.Keyword_cancel => ast.Node.PrefixOp.Op { .Cancel = void{} },
- Token.Id.Keyword_resume => ast.Node.PrefixOp.Op { .Resume = void{} },
- else => unreachable,
- },
- .rhs = undefined,
- }
- );
-
- stack.append(State { .Expression = OptionalCtx { .Required = &node.rhs } }) catch unreachable;
- continue;
- },
- else => {
- if (!try self.parseBlockExpr(&stack, arena, opt_ctx, token)) {
- self.putBackToken(token);
- stack.append(State { .UnwrapExpressionBegin = opt_ctx }) catch unreachable;
- }
- continue;
- }
- }
- },
- State.RangeExpressionBegin => |opt_ctx| {
- stack.append(State { .RangeExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .Expression = opt_ctx });
- continue;
- },
- State.RangeExpressionEnd => |opt_ctx| {
- const lhs = opt_ctx.get() ?? continue;
-
- if (self.eatToken(Token.Id.Ellipsis3)) |ellipsis3| {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
- ast.Node.InfixOp {
- .base = undefined,
- .lhs = lhs,
- .op_token = ellipsis3,
- .op = ast.Node.InfixOp.Op.Range,
- .rhs = undefined,
- }
- );
- stack.append(State { .Expression = OptionalCtx { .Required = &node.rhs } }) catch unreachable;
- continue;
- }
- },
- State.AssignmentExpressionBegin => |opt_ctx| {
- stack.append(State { .AssignmentExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .Expression = opt_ctx });
- continue;
- },
-
- State.AssignmentExpressionEnd => |opt_ctx| {
- const lhs = opt_ctx.get() ?? continue;
-
- const token = self.getNextToken();
- if (tokenIdToAssignment(token.id)) |ass_id| {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
- ast.Node.InfixOp {
- .base = undefined,
- .lhs = lhs,
- .op_token = token,
- .op = ass_id,
- .rhs = undefined,
- }
- );
- stack.append(State { .AssignmentExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.rhs } });
- continue;
- } else {
- self.putBackToken(token);
- continue;
- }
- },
-
- State.UnwrapExpressionBegin => |opt_ctx| {
- stack.append(State { .UnwrapExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .BoolOrExpressionBegin = opt_ctx });
- continue;
- },
-
- State.UnwrapExpressionEnd => |opt_ctx| {
- const lhs = opt_ctx.get() ?? continue;
-
- const token = self.getNextToken();
- if (tokenIdToUnwrapExpr(token.id)) |unwrap_id| {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
- ast.Node.InfixOp {
- .base = undefined,
- .lhs = lhs,
- .op_token = token,
- .op = unwrap_id,
- .rhs = undefined,
- }
- );
-
- stack.append(State { .UnwrapExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.rhs } });
-
- if (node.op == ast.Node.InfixOp.Op.Catch) {
- try stack.append(State { .Payload = OptionalCtx { .Optional = &node.op.Catch } });
- }
- continue;
- } else {
- self.putBackToken(token);
- continue;
- }
- },
-
- State.BoolOrExpressionBegin => |opt_ctx| {
- stack.append(State { .BoolOrExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .BoolAndExpressionBegin = opt_ctx });
- continue;
- },
-
- State.BoolOrExpressionEnd => |opt_ctx| {
- const lhs = opt_ctx.get() ?? continue;
-
- if (self.eatToken(Token.Id.Keyword_or)) |or_token| {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
- ast.Node.InfixOp {
- .base = undefined,
- .lhs = lhs,
- .op_token = or_token,
- .op = ast.Node.InfixOp.Op.BoolOr,
- .rhs = undefined,
- }
- );
- stack.append(State { .BoolOrExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .BoolAndExpressionBegin = OptionalCtx { .Required = &node.rhs } });
- continue;
- }
- },
-
- State.BoolAndExpressionBegin => |opt_ctx| {
- stack.append(State { .BoolAndExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .ComparisonExpressionBegin = opt_ctx });
- continue;
- },
-
- State.BoolAndExpressionEnd => |opt_ctx| {
- const lhs = opt_ctx.get() ?? continue;
-
- if (self.eatToken(Token.Id.Keyword_and)) |and_token| {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
- ast.Node.InfixOp {
- .base = undefined,
- .lhs = lhs,
- .op_token = and_token,
- .op = ast.Node.InfixOp.Op.BoolAnd,
- .rhs = undefined,
- }
- );
- stack.append(State { .BoolAndExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .ComparisonExpressionBegin = OptionalCtx { .Required = &node.rhs } });
- continue;
- }
- },
-
- State.ComparisonExpressionBegin => |opt_ctx| {
- stack.append(State { .ComparisonExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .BinaryOrExpressionBegin = opt_ctx });
- continue;
- },
-
- State.ComparisonExpressionEnd => |opt_ctx| {
- const lhs = opt_ctx.get() ?? continue;
-
- const token = self.getNextToken();
- if (tokenIdToComparison(token.id)) |comp_id| {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
- ast.Node.InfixOp {
- .base = undefined,
- .lhs = lhs,
- .op_token = token,
- .op = comp_id,
- .rhs = undefined,
- }
- );
- stack.append(State { .ComparisonExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .BinaryOrExpressionBegin = OptionalCtx { .Required = &node.rhs } });
- continue;
- } else {
- self.putBackToken(token);
- continue;
- }
- },
-
- State.BinaryOrExpressionBegin => |opt_ctx| {
- stack.append(State { .BinaryOrExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .BinaryXorExpressionBegin = opt_ctx });
- continue;
- },
-
- State.BinaryOrExpressionEnd => |opt_ctx| {
- const lhs = opt_ctx.get() ?? continue;
-
- if (self.eatToken(Token.Id.Pipe)) |pipe| {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
- ast.Node.InfixOp {
- .base = undefined,
- .lhs = lhs,
- .op_token = pipe,
- .op = ast.Node.InfixOp.Op.BitOr,
- .rhs = undefined,
- }
- );
- stack.append(State { .BinaryOrExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .BinaryXorExpressionBegin = OptionalCtx { .Required = &node.rhs } });
- continue;
- }
- },
-
- State.BinaryXorExpressionBegin => |opt_ctx| {
- stack.append(State { .BinaryXorExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .BinaryAndExpressionBegin = opt_ctx });
- continue;
- },
-
- State.BinaryXorExpressionEnd => |opt_ctx| {
- const lhs = opt_ctx.get() ?? continue;
-
- if (self.eatToken(Token.Id.Caret)) |caret| {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
- ast.Node.InfixOp {
- .base = undefined,
- .lhs = lhs,
- .op_token = caret,
- .op = ast.Node.InfixOp.Op.BitXor,
- .rhs = undefined,
- }
- );
- stack.append(State { .BinaryXorExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .BinaryAndExpressionBegin = OptionalCtx { .Required = &node.rhs } });
- continue;
- }
- },
-
- State.BinaryAndExpressionBegin => |opt_ctx| {
- stack.append(State { .BinaryAndExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .BitShiftExpressionBegin = opt_ctx });
- continue;
- },
-
- State.BinaryAndExpressionEnd => |opt_ctx| {
- const lhs = opt_ctx.get() ?? continue;
-
- if (self.eatToken(Token.Id.Ampersand)) |ampersand| {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
- ast.Node.InfixOp {
- .base = undefined,
- .lhs = lhs,
- .op_token = ampersand,
- .op = ast.Node.InfixOp.Op.BitAnd,
- .rhs = undefined,
- }
- );
- stack.append(State { .BinaryAndExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .BitShiftExpressionBegin = OptionalCtx { .Required = &node.rhs } });
- continue;
- }
- },
-
- State.BitShiftExpressionBegin => |opt_ctx| {
- stack.append(State { .BitShiftExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .AdditionExpressionBegin = opt_ctx });
- continue;
- },
-
- State.BitShiftExpressionEnd => |opt_ctx| {
- const lhs = opt_ctx.get() ?? continue;
-
- const token = self.getNextToken();
- if (tokenIdToBitShift(token.id)) |bitshift_id| {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
- ast.Node.InfixOp {
- .base = undefined,
- .lhs = lhs,
- .op_token = token,
- .op = bitshift_id,
- .rhs = undefined,
- }
- );
- stack.append(State { .BitShiftExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .AdditionExpressionBegin = OptionalCtx { .Required = &node.rhs } });
- continue;
- } else {
- self.putBackToken(token);
- continue;
- }
- },
-
- State.AdditionExpressionBegin => |opt_ctx| {
- stack.append(State { .AdditionExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .MultiplyExpressionBegin = opt_ctx });
- continue;
- },
-
- State.AdditionExpressionEnd => |opt_ctx| {
- const lhs = opt_ctx.get() ?? continue;
-
- const token = self.getNextToken();
- if (tokenIdToAddition(token.id)) |add_id| {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
- ast.Node.InfixOp {
- .base = undefined,
- .lhs = lhs,
- .op_token = token,
- .op = add_id,
- .rhs = undefined,
- }
- );
- stack.append(State { .AdditionExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .MultiplyExpressionBegin = OptionalCtx { .Required = &node.rhs } });
- continue;
- } else {
- self.putBackToken(token);
- continue;
- }
- },
-
- State.MultiplyExpressionBegin => |opt_ctx| {
- stack.append(State { .MultiplyExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .CurlySuffixExpressionBegin = opt_ctx });
- continue;
- },
-
- State.MultiplyExpressionEnd => |opt_ctx| {
- const lhs = opt_ctx.get() ?? continue;
-
- const token = self.getNextToken();
- if (tokenIdToMultiply(token.id)) |mult_id| {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
- ast.Node.InfixOp {
- .base = undefined,
- .lhs = lhs,
- .op_token = token,
- .op = mult_id,
- .rhs = undefined,
- }
- );
- stack.append(State { .MultiplyExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .CurlySuffixExpressionBegin = OptionalCtx { .Required = &node.rhs } });
- continue;
- } else {
- self.putBackToken(token);
- continue;
- }
- },
-
- State.CurlySuffixExpressionBegin => |opt_ctx| {
- stack.append(State { .CurlySuffixExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .IfToken = Token.Id.LBrace });
- try stack.append(State { .TypeExprBegin = opt_ctx });
- continue;
- },
-
- State.CurlySuffixExpressionEnd => |opt_ctx| {
- const lhs = opt_ctx.get() ?? continue;
-
- if (self.isPeekToken(Token.Id.Period)) {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.SuffixOp,
- ast.Node.SuffixOp {
- .base = undefined,
- .lhs = lhs,
- .op = ast.Node.SuffixOp.Op {
- .StructInitializer = ArrayList(&ast.Node).init(arena),
- },
- .rtoken = undefined,
- }
- );
- stack.append(State { .CurlySuffixExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .IfToken = Token.Id.LBrace });
- try stack.append(State {
- .FieldInitListItemOrEnd = ListSave(&ast.Node) {
- .list = &node.op.StructInitializer,
- .ptr = &node.rtoken,
- }
- });
- continue;
- }
-
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.SuffixOp,
- ast.Node.SuffixOp {
- .base = undefined,
- .lhs = lhs,
- .op = ast.Node.SuffixOp.Op {
- .ArrayInitializer = ArrayList(&ast.Node).init(arena),
- },
- .rtoken = undefined,
- }
- );
- stack.append(State { .CurlySuffixExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .IfToken = Token.Id.LBrace });
- try stack.append(State {
- .ExprListItemOrEnd = ExprListCtx {
- .list = &node.op.ArrayInitializer,
- .end = Token.Id.RBrace,
- .ptr = &node.rtoken,
- }
- });
- continue;
- },
-
- State.TypeExprBegin => |opt_ctx| {
- stack.append(State { .TypeExprEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .PrefixOpExpression = opt_ctx });
- continue;
- },
-
- State.TypeExprEnd => |opt_ctx| {
- const lhs = opt_ctx.get() ?? continue;
-
- if (self.eatToken(Token.Id.Bang)) |bang| {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
- ast.Node.InfixOp {
- .base = undefined,
- .lhs = lhs,
- .op_token = bang,
- .op = ast.Node.InfixOp.Op.ErrorUnion,
- .rhs = undefined,
- }
- );
- stack.append(State { .TypeExprEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .PrefixOpExpression = OptionalCtx { .Required = &node.rhs } });
- continue;
- }
- },
-
- State.PrefixOpExpression => |opt_ctx| {
- const token = self.getNextToken();
- if (tokenIdToPrefixOp(token.id)) |prefix_id| {
- var node = try self.createToCtxNode(arena, opt_ctx, ast.Node.PrefixOp,
- ast.Node.PrefixOp {
- .base = undefined,
- .op_token = token,
- .op = prefix_id,
- .rhs = undefined,
- }
- );
-
- // Treat '**' token as two derefs
- if (token.id == Token.Id.AsteriskAsterisk) {
- const child = try self.createNode(arena, ast.Node.PrefixOp,
- ast.Node.PrefixOp {
- .base = undefined,
- .op_token = token,
- .op = prefix_id,
- .rhs = undefined,
- }
- );
- node.rhs = &child.base;
- node = child;
- }
-
- stack.append(State { .TypeExprBegin = OptionalCtx { .Required = &node.rhs } }) catch unreachable;
- if (node.op == ast.Node.PrefixOp.Op.AddrOf) {
- try stack.append(State { .AddrOfModifiers = &node.op.AddrOf });
- }
- continue;
- } else {
- self.putBackToken(token);
- stack.append(State { .SuffixOpExpressionBegin = opt_ctx }) catch unreachable;
- continue;
- }
- },
-
- State.SuffixOpExpressionBegin => |opt_ctx| {
- if (self.eatToken(Token.Id.Keyword_async)) |async_token| {
- const async_node = try self.createNode(arena, ast.Node.AsyncAttribute,
- ast.Node.AsyncAttribute {
- .base = undefined,
- .async_token = async_token,
- .allocator_type = null,
- .rangle_bracket = null,
- }
- );
- stack.append(State {
- .AsyncEnd = AsyncEndCtx {
- .ctx = opt_ctx,
- .attribute = async_node,
- }
- }) catch unreachable;
- try stack.append(State { .SuffixOpExpressionEnd = opt_ctx.toRequired() });
- try stack.append(State { .PrimaryExpression = opt_ctx.toRequired() });
- try stack.append(State { .AsyncAllocator = async_node });
- continue;
- }
-
- stack.append(State { .SuffixOpExpressionEnd = opt_ctx }) catch unreachable;
- try stack.append(State { .PrimaryExpression = opt_ctx });
- continue;
- },
-
- State.SuffixOpExpressionEnd => |opt_ctx| {
- const lhs = opt_ctx.get() ?? continue;
-
- const token = self.getNextToken();
- switch (token.id) {
- Token.Id.LParen => {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.SuffixOp,
- ast.Node.SuffixOp {
- .base = undefined,
- .lhs = lhs,
- .op = ast.Node.SuffixOp.Op {
- .Call = ast.Node.SuffixOp.CallInfo {
- .params = ArrayList(&ast.Node).init(arena),
- .async_attr = null,
- }
- },
- .rtoken = undefined,
- }
- );
- stack.append(State { .SuffixOpExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State {
- .ExprListItemOrEnd = ExprListCtx {
- .list = &node.op.Call.params,
- .end = Token.Id.RParen,
- .ptr = &node.rtoken,
- }
- });
- continue;
- },
- Token.Id.LBracket => {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.SuffixOp,
- ast.Node.SuffixOp {
- .base = undefined,
- .lhs = lhs,
- .op = ast.Node.SuffixOp.Op {
- .ArrayAccess = undefined,
- },
- .rtoken = undefined
- }
- );
- stack.append(State { .SuffixOpExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .SliceOrArrayAccess = node });
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.op.ArrayAccess }});
- continue;
- },
- Token.Id.Period => {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.InfixOp,
- ast.Node.InfixOp {
- .base = undefined,
- .lhs = lhs,
- .op_token = token,
- .op = ast.Node.InfixOp.Op.Period,
- .rhs = undefined,
- }
- );
- stack.append(State { .SuffixOpExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
- try stack.append(State { .Identifier = OptionalCtx { .Required = &node.rhs } });
- continue;
- },
- else => {
- self.putBackToken(token);
- continue;
- },
- }
- },
-
- State.PrimaryExpression => |opt_ctx| {
- const token = self.getNextToken();
- switch (token.id) {
- Token.Id.IntegerLiteral => {
- _ = try self.createToCtxLiteral(arena, opt_ctx, ast.Node.StringLiteral, token);
- continue;
- },
- Token.Id.FloatLiteral => {
- _ = try self.createToCtxLiteral(arena, opt_ctx, ast.Node.FloatLiteral, token);
- continue;
- },
- Token.Id.CharLiteral => {
- _ = try self.createToCtxLiteral(arena, opt_ctx, ast.Node.CharLiteral, token);
- continue;
- },
- Token.Id.Keyword_undefined => {
- _ = try self.createToCtxLiteral(arena, opt_ctx, ast.Node.UndefinedLiteral, token);
- continue;
- },
- Token.Id.Keyword_true, Token.Id.Keyword_false => {
- _ = try self.createToCtxLiteral(arena, opt_ctx, ast.Node.BoolLiteral, token);
- continue;
- },
- Token.Id.Keyword_null => {
- _ = try self.createToCtxLiteral(arena, opt_ctx, ast.Node.NullLiteral, token);
- continue;
- },
- Token.Id.Keyword_this => {
- _ = try self.createToCtxLiteral(arena, opt_ctx, ast.Node.ThisLiteral, token);
- continue;
- },
- Token.Id.Keyword_var => {
- _ = try self.createToCtxLiteral(arena, opt_ctx, ast.Node.VarType, token);
- continue;
- },
- Token.Id.Keyword_unreachable => {
- _ = try self.createToCtxLiteral(arena, opt_ctx, ast.Node.Unreachable, token);
- continue;
- },
- Token.Id.Keyword_promise => {
- const node = try arena.construct(ast.Node.PromiseType {
- .base = ast.Node {
- .id = ast.Node.Id.PromiseType,
- .same_line_comment = null,
- },
- .promise_token = token,
- .result = null,
- });
- opt_ctx.store(&node.base);
- const next_token = self.getNextToken();
- if (next_token.id != Token.Id.Arrow) {
- self.putBackToken(next_token);
- continue;
- }
- node.result = ast.Node.PromiseType.Result {
- .arrow_token = next_token,
- .return_type = undefined,
- };
- const return_type_ptr = &((??node.result).return_type);
- try stack.append(State { .Expression = OptionalCtx { .Required = return_type_ptr, } });
- continue;
- },
- Token.Id.StringLiteral, Token.Id.MultilineStringLiteralLine => {
- opt_ctx.store((try self.parseStringLiteral(arena, token)) ?? unreachable);
- continue;
- },
- Token.Id.LParen => {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.GroupedExpression,
- ast.Node.GroupedExpression {
- .base = undefined,
- .lparen = token,
- .expr = undefined,
- .rparen = undefined,
- }
- );
- stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.RParen,
- .ptr = &node.rparen,
- }
- }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
- continue;
- },
- Token.Id.Builtin => {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.BuiltinCall,
- ast.Node.BuiltinCall {
- .base = undefined,
- .builtin_token = token,
- .params = ArrayList(&ast.Node).init(arena),
- .rparen_token = undefined,
- }
- );
- stack.append(State {
- .ExprListItemOrEnd = ExprListCtx {
- .list = &node.params,
- .end = Token.Id.RParen,
- .ptr = &node.rparen_token,
- }
- }) catch unreachable;
- try stack.append(State { .ExpectToken = Token.Id.LParen, });
- continue;
- },
- Token.Id.LBracket => {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.PrefixOp,
- ast.Node.PrefixOp {
- .base = undefined,
- .op_token = token,
- .op = undefined,
- .rhs = undefined,
- }
- );
- stack.append(State { .SliceOrArrayType = node }) catch unreachable;
- continue;
- },
- Token.Id.Keyword_error => {
- stack.append(State {
- .ErrorTypeOrSetDecl = ErrorTypeOrSetDeclCtx {
- .error_token = token,
- .opt_ctx = opt_ctx
- }
- }) catch unreachable;
- continue;
- },
- Token.Id.Keyword_packed => {
- stack.append(State {
- .ContainerKind = ContainerKindCtx {
- .opt_ctx = opt_ctx,
- .ltoken = token,
- .layout = ast.Node.ContainerDecl.Layout.Packed,
- },
- }) catch unreachable;
- continue;
- },
- Token.Id.Keyword_extern => {
- stack.append(State {
- .ExternType = ExternTypeCtx {
- .opt_ctx = opt_ctx,
- .extern_token = token,
- .comments = null,
- },
- }) catch unreachable;
- continue;
- },
- Token.Id.Keyword_struct, Token.Id.Keyword_union, Token.Id.Keyword_enum => {
- self.putBackToken(token);
- stack.append(State {
- .ContainerKind = ContainerKindCtx {
- .opt_ctx = opt_ctx,
- .ltoken = token,
- .layout = ast.Node.ContainerDecl.Layout.Auto,
- },
- }) catch unreachable;
- continue;
- },
- Token.Id.Identifier => {
- stack.append(State {
- .MaybeLabeledExpression = MaybeLabeledExpressionCtx {
- .label = token,
- .opt_ctx = opt_ctx
- }
- }) catch unreachable;
- continue;
- },
- Token.Id.Keyword_fn => {
- const fn_proto = try arena.construct(ast.Node.FnProto {
- .base = ast.Node {
- .id = ast.Node.Id.FnProto,
- .same_line_comment = null,
- },
- .doc_comments = null,
- .visib_token = null,
- .name_token = null,
- .fn_token = token,
- .params = ArrayList(&ast.Node).init(arena),
- .return_type = undefined,
- .var_args_token = null,
- .extern_export_inline_token = null,
- .cc_token = null,
- .async_attr = null,
- .body_node = null,
- .lib_name = null,
- .align_expr = null,
- });
- opt_ctx.store(&fn_proto.base);
- stack.append(State { .FnProto = fn_proto }) catch unreachable;
- continue;
- },
- Token.Id.Keyword_nakedcc, Token.Id.Keyword_stdcallcc => {
- const fn_proto = try arena.construct(ast.Node.FnProto {
- .base = ast.Node {
- .id = ast.Node.Id.FnProto,
- .same_line_comment = null,
- },
- .doc_comments = null,
- .visib_token = null,
- .name_token = null,
- .fn_token = undefined,
- .params = ArrayList(&ast.Node).init(arena),
- .return_type = undefined,
- .var_args_token = null,
- .extern_export_inline_token = null,
- .cc_token = token,
- .async_attr = null,
- .body_node = null,
- .lib_name = null,
- .align_expr = null,
- });
- opt_ctx.store(&fn_proto.base);
- stack.append(State { .FnProto = fn_proto }) catch unreachable;
- try stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.Keyword_fn,
- .ptr = &fn_proto.fn_token
- }
- });
- continue;
- },
- Token.Id.Keyword_asm => {
- const node = try self.createToCtxNode(arena, opt_ctx, ast.Node.Asm,
- ast.Node.Asm {
- .base = undefined,
- .asm_token = token,
- .volatile_token = null,
- .template = undefined,
- //.tokens = ArrayList(ast.Node.Asm.AsmToken).init(arena),
- .outputs = ArrayList(&ast.Node.AsmOutput).init(arena),
- .inputs = ArrayList(&ast.Node.AsmInput).init(arena),
- .cloppers = ArrayList(&ast.Node).init(arena),
- .rparen = undefined,
- }
- );
- stack.append(State {
- .ExpectTokenSave = ExpectTokenSave {
- .id = Token.Id.RParen,
- .ptr = &node.rparen,
- }
- }) catch unreachable;
- try stack.append(State { .AsmClopperItems = &node.cloppers });
- try stack.append(State { .IfToken = Token.Id.Colon });
- try stack.append(State { .AsmInputItems = &node.inputs });
- try stack.append(State { .IfToken = Token.Id.Colon });
- try stack.append(State { .AsmOutputItems = &node.outputs });
- try stack.append(State { .IfToken = Token.Id.Colon });
- try stack.append(State { .StringLiteral = OptionalCtx { .Required = &node.template } });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
- try stack.append(State {
- .OptionalTokenSave = OptionalTokenSave {
- .id = Token.Id.Keyword_volatile,
- .ptr = &node.volatile_token,
- }
- });
- },
- Token.Id.Keyword_inline => {
- stack.append(State {
- .Inline = InlineCtx {
- .label = null,
- .inline_token = token,
- .opt_ctx = opt_ctx,
- }
- }) catch unreachable;
- continue;
- },
- else => {
- if (!try self.parseBlockExpr(&stack, arena, opt_ctx, token)) {
- self.putBackToken(token);
- if (opt_ctx != OptionalCtx.Optional) {
- return self.parseError(token, "expected primary expression, found {}", @tagName(token.id));
- }
- }
- continue;
- }
- }
- },
-
-
- State.ErrorTypeOrSetDecl => |ctx| {
- if (self.eatToken(Token.Id.LBrace) == null) {
- _ = try self.createToCtxLiteral(arena, ctx.opt_ctx, ast.Node.ErrorType, ctx.error_token);
- continue;
- }
-
- const node = try arena.construct(ast.Node.ErrorSetDecl {
- .base = ast.Node {
- .id = ast.Node.Id.ErrorSetDecl,
- .same_line_comment = null,
- },
- .error_token = ctx.error_token,
- .decls = ArrayList(&ast.Node).init(arena),
- .rbrace_token = undefined,
- });
- ctx.opt_ctx.store(&node.base);
-
- stack.append(State {
- .ErrorTagListItemOrEnd = ListSave(&ast.Node) {
- .list = &node.decls,
- .ptr = &node.rbrace_token,
- }
- }) catch unreachable;
- continue;
- },
- State.StringLiteral => |opt_ctx| {
- const token = self.getNextToken();
- opt_ctx.store(
- (try self.parseStringLiteral(arena, token)) ?? {
- self.putBackToken(token);
- if (opt_ctx != OptionalCtx.Optional) {
- return self.parseError(token, "expected primary expression, found {}", @tagName(token.id));
- }
-
- continue;
- }
- );
- },
-
- State.Identifier => |opt_ctx| {
- if (self.eatToken(Token.Id.Identifier)) |ident_token| {
- _ = try self.createToCtxLiteral(arena, opt_ctx, ast.Node.Identifier, ident_token);
- continue;
- }
-
- if (opt_ctx != OptionalCtx.Optional) {
- const token = self.getNextToken();
- return self.parseError(token, "expected identifier, found {}", @tagName(token.id));
- }
- },
-
- State.ErrorTag => |node_ptr| {
- const comments = try self.eatDocComments(arena);
- const ident_token = self.getNextToken();
- if (ident_token.id != Token.Id.Identifier) {
- return self.parseError(ident_token, "expected {}, found {}",
- @tagName(Token.Id.Identifier), @tagName(ident_token.id));
- }
-
- const node = try arena.construct(ast.Node.ErrorTag {
- .base = ast.Node {
- .id = ast.Node.Id.ErrorTag,
- .same_line_comment = null,
- },
- .doc_comments = comments,
- .name_token = ident_token,
- });
- *node_ptr = &node.base;
- continue;
- },
-
- State.ExpectToken => |token_id| {
- _ = try self.expectToken(token_id);
- continue;
- },
- State.ExpectTokenSave => |expect_token_save| {
- *expect_token_save.ptr = try self.expectToken(expect_token_save.id);
- continue;
- },
- State.IfToken => |token_id| {
- if (self.eatToken(token_id)) |_| {
- continue;
- }
-
- _ = stack.pop();
- continue;
- },
- State.IfTokenSave => |if_token_save| {
- if (self.eatToken(if_token_save.id)) |token| {
- *if_token_save.ptr = token;
- continue;
- }
-
- _ = stack.pop();
- continue;
- },
- State.OptionalTokenSave => |optional_token_save| {
- if (self.eatToken(optional_token_save.id)) |token| {
- *optional_token_save.ptr = token;
- continue;
- }
-
- continue;
- },
- }
- }
- }
-
- fn eatDocComments(self: &Parser, arena: &mem.Allocator) !?&ast.Node.DocComment {
- var result: ?&ast.Node.DocComment = null;
- while (true) {
- if (self.eatToken(Token.Id.DocComment)) |line_comment| {
- const node = blk: {
- if (result) |comment_node| {
- break :blk comment_node;
- } else {
- const comment_node = try arena.construct(ast.Node.DocComment {
- .base = ast.Node {
- .id = ast.Node.Id.DocComment,
- .same_line_comment = null,
- },
- .lines = ArrayList(Token).init(arena),
- });
- result = comment_node;
- break :blk comment_node;
- }
- };
- try node.lines.append(line_comment);
- continue;
- }
- break;
- }
- return result;
- }
-
- fn eatLineComment(self: &Parser, arena: &mem.Allocator) !?&ast.Node.LineComment {
- const token = self.eatToken(Token.Id.LineComment) ?? return null;
- return try arena.construct(ast.Node.LineComment {
- .base = ast.Node {
- .id = ast.Node.Id.LineComment,
- .same_line_comment = null,
- },
- .token = token,
- });
- }
-
- fn requireSemiColon(node: &const ast.Node) bool {
- var n = node;
- while (true) {
- switch (n.id) {
- ast.Node.Id.Root,
- ast.Node.Id.StructField,
- ast.Node.Id.UnionTag,
- ast.Node.Id.EnumTag,
- ast.Node.Id.ParamDecl,
- ast.Node.Id.Block,
- ast.Node.Id.Payload,
- ast.Node.Id.PointerPayload,
- ast.Node.Id.PointerIndexPayload,
- ast.Node.Id.Switch,
- ast.Node.Id.SwitchCase,
- ast.Node.Id.SwitchElse,
- ast.Node.Id.FieldInitializer,
- ast.Node.Id.DocComment,
- ast.Node.Id.LineComment,
- ast.Node.Id.TestDecl => return false,
- ast.Node.Id.While => {
- const while_node = @fieldParentPtr(ast.Node.While, "base", n);
- if (while_node.@"else") |@"else"| {
- n = @"else".base;
- continue;
- }
-
- return while_node.body.id != ast.Node.Id.Block;
- },
- ast.Node.Id.For => {
- const for_node = @fieldParentPtr(ast.Node.For, "base", n);
- if (for_node.@"else") |@"else"| {
- n = @"else".base;
- continue;
- }
-
- return for_node.body.id != ast.Node.Id.Block;
- },
- ast.Node.Id.If => {
- const if_node = @fieldParentPtr(ast.Node.If, "base", n);
- if (if_node.@"else") |@"else"| {
- n = @"else".base;
- continue;
- }
-
- return if_node.body.id != ast.Node.Id.Block;
- },
- ast.Node.Id.Else => {
- const else_node = @fieldParentPtr(ast.Node.Else, "base", n);
- n = else_node.body;
- continue;
- },
- ast.Node.Id.Defer => {
- const defer_node = @fieldParentPtr(ast.Node.Defer, "base", n);
- return defer_node.expr.id != ast.Node.Id.Block;
- },
- ast.Node.Id.Comptime => {
- const comptime_node = @fieldParentPtr(ast.Node.Comptime, "base", n);
- return comptime_node.expr.id != ast.Node.Id.Block;
- },
- ast.Node.Id.Suspend => {
- const suspend_node = @fieldParentPtr(ast.Node.Suspend, "base", n);
- if (suspend_node.body) |body| {
- return body.id != ast.Node.Id.Block;
- }
-
- return true;
- },
- else => return true,
- }
- }
- }
-
- fn lookForSameLineComment(self: &Parser, arena: &mem.Allocator, node: &ast.Node) !void {
- const node_last_token = node.lastToken();
-
- const line_comment_token = self.getNextToken();
- if (line_comment_token.id != Token.Id.DocComment and line_comment_token.id != Token.Id.LineComment) {
- self.putBackToken(line_comment_token);
- return;
- }
-
- const offset_loc = self.tokenizer.getTokenLocation(node_last_token.end, line_comment_token);
- const different_line = offset_loc.line != 0;
- if (different_line) {
- self.putBackToken(line_comment_token);
- return;
- }
-
- node.same_line_comment = try arena.construct(line_comment_token);
- }
-
- fn parseStringLiteral(self: &Parser, arena: &mem.Allocator, token: &const Token) !?&ast.Node {
- switch (token.id) {
- Token.Id.StringLiteral => {
- return &(try self.createLiteral(arena, ast.Node.StringLiteral, token)).base;
- },
- Token.Id.MultilineStringLiteralLine => {
- const node = try self.createNode(arena, ast.Node.MultilineStringLiteral,
- ast.Node.MultilineStringLiteral {
- .base = undefined,
- .tokens = ArrayList(Token).init(arena),
- }
- );
- try node.tokens.append(token);
- while (true) {
- const multiline_str = self.getNextToken();
- if (multiline_str.id != Token.Id.MultilineStringLiteralLine) {
- self.putBackToken(multiline_str);
- break;
- }
-
- try node.tokens.append(multiline_str);
- }
-
- return &node.base;
- },
- // TODO: We shouldn't need a cast, but:
- // zig: /home/jc/Documents/zig/src/ir.cpp:7962: TypeTableEntry* ir_resolve_peer_types(IrAnalyze*, AstNode*, IrInstruction**, size_t): Assertion `err_set_type != nullptr' failed.
- else => return (?&ast.Node)(null),
- }
- }
-
- fn parseBlockExpr(self: &Parser, stack: &ArrayList(State), arena: &mem.Allocator, ctx: &const OptionalCtx, token: &const Token) !bool {
- switch (token.id) {
- Token.Id.Keyword_suspend => {
- const node = try self.createToCtxNode(arena, ctx, ast.Node.Suspend,
- ast.Node.Suspend {
- .base = undefined,
- .label = null,
- .suspend_token = *token,
- .payload = null,
- .body = null,
- }
- );
-
- stack.append(State { .SuspendBody = node }) catch unreachable;
- try stack.append(State { .Payload = OptionalCtx { .Optional = &node.payload } });
- return true;
- },
- Token.Id.Keyword_if => {
- const node = try self.createToCtxNode(arena, ctx, ast.Node.If,
- ast.Node.If {
- .base = undefined,
- .if_token = *token,
- .condition = undefined,
- .payload = null,
- .body = undefined,
- .@"else" = null,
- }
- );
-
- stack.append(State { .Else = &node.@"else" }) catch unreachable;
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.body } });
- try stack.append(State { .PointerPayload = OptionalCtx { .Optional = &node.payload } });
- try stack.append(State { .LookForSameLineComment = &node.condition });
- try stack.append(State { .ExpectToken = Token.Id.RParen });
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.condition } });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
- return true;
- },
- Token.Id.Keyword_while => {
- stack.append(State {
- .While = LoopCtx {
- .label = null,
- .inline_token = null,
- .loop_token = *token,
- .opt_ctx = *ctx,
- }
- }) catch unreachable;
- return true;
- },
- Token.Id.Keyword_for => {
- stack.append(State {
- .For = LoopCtx {
- .label = null,
- .inline_token = null,
- .loop_token = *token,
- .opt_ctx = *ctx,
- }
- }) catch unreachable;
- return true;
- },
- Token.Id.Keyword_switch => {
- const node = try arena.construct(ast.Node.Switch {
- .base = ast.Node {
- .id = ast.Node.Id.Switch,
- .same_line_comment = null,
- },
- .switch_token = *token,
- .expr = undefined,
- .cases = ArrayList(&ast.Node).init(arena),
- .rbrace = undefined,
- });
- ctx.store(&node.base);
-
- stack.append(State {
- .SwitchCaseOrEnd = ListSave(&ast.Node) {
- .list = &node.cases,
- .ptr = &node.rbrace,
- },
- }) catch unreachable;
- try stack.append(State { .ExpectToken = Token.Id.LBrace });
- try stack.append(State { .ExpectToken = Token.Id.RParen });
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
- try stack.append(State { .ExpectToken = Token.Id.LParen });
- return true;
- },
- Token.Id.Keyword_comptime => {
- const node = try self.createToCtxNode(arena, ctx, ast.Node.Comptime,
- ast.Node.Comptime {
- .base = undefined,
- .comptime_token = *token,
- .expr = undefined,
- .doc_comments = null,
- }
- );
- try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
- return true;
- },
- Token.Id.LBrace => {
- const block = try self.createToCtxNode(arena, ctx, ast.Node.Block,
- ast.Node.Block {
- .base = undefined,
- .label = null,
- .lbrace = *token,
- .statements = ArrayList(&ast.Node).init(arena),
- .rbrace = undefined,
- }
- );
- stack.append(State { .Block = block }) catch unreachable;
- return true;
- },
- else => {
- return false;
- }
- }
- }
-
- fn expectCommaOrEnd(self: &Parser, end: @TagType(Token.Id)) !?Token {
- var token = self.getNextToken();
- switch (token.id) {
- Token.Id.Comma => return null,
- else => {
- if (end == token.id) {
- return token;
- }
-
- return self.parseError(token, "expected ',' or {}, found {}", @tagName(end), @tagName(token.id));
- },
- }
- }
-
- fn tokenIdToAssignment(id: &const Token.Id) ?ast.Node.InfixOp.Op {
- // TODO: We have to cast all cases because of this:
- // error: expected type '?InfixOp', found '?@TagType(InfixOp)'
- return switch (*id) {
- Token.Id.AmpersandEqual => ast.Node.InfixOp.Op { .AssignBitAnd = void{} },
- Token.Id.AngleBracketAngleBracketLeftEqual => ast.Node.InfixOp.Op { .AssignBitShiftLeft = void{} },
- Token.Id.AngleBracketAngleBracketRightEqual => ast.Node.InfixOp.Op { .AssignBitShiftRight = void{} },
- Token.Id.AsteriskEqual => ast.Node.InfixOp.Op { .AssignTimes = void{} },
- Token.Id.AsteriskPercentEqual => ast.Node.InfixOp.Op { .AssignTimesWarp = void{} },
- Token.Id.CaretEqual => ast.Node.InfixOp.Op { .AssignBitXor = void{} },
- Token.Id.Equal => ast.Node.InfixOp.Op { .Assign = void{} },
- Token.Id.MinusEqual => ast.Node.InfixOp.Op { .AssignMinus = void{} },
- Token.Id.MinusPercentEqual => ast.Node.InfixOp.Op { .AssignMinusWrap = void{} },
- Token.Id.PercentEqual => ast.Node.InfixOp.Op { .AssignMod = void{} },
- Token.Id.PipeEqual => ast.Node.InfixOp.Op { .AssignBitOr = void{} },
- Token.Id.PlusEqual => ast.Node.InfixOp.Op { .AssignPlus = void{} },
- Token.Id.PlusPercentEqual => ast.Node.InfixOp.Op { .AssignPlusWrap = void{} },
- Token.Id.SlashEqual => ast.Node.InfixOp.Op { .AssignDiv = void{} },
- else => null,
- };
- }
-
- fn tokenIdToUnwrapExpr(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
- return switch (id) {
- Token.Id.Keyword_catch => ast.Node.InfixOp.Op { .Catch = null },
- Token.Id.QuestionMarkQuestionMark => ast.Node.InfixOp.Op { .UnwrapMaybe = void{} },
- else => null,
- };
- }
-
- fn tokenIdToComparison(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
- return switch (id) {
- Token.Id.BangEqual => ast.Node.InfixOp.Op { .BangEqual = void{} },
- Token.Id.EqualEqual => ast.Node.InfixOp.Op { .EqualEqual = void{} },
- Token.Id.AngleBracketLeft => ast.Node.InfixOp.Op { .LessThan = void{} },
- Token.Id.AngleBracketLeftEqual => ast.Node.InfixOp.Op { .LessOrEqual = void{} },
- Token.Id.AngleBracketRight => ast.Node.InfixOp.Op { .GreaterThan = void{} },
- Token.Id.AngleBracketRightEqual => ast.Node.InfixOp.Op { .GreaterOrEqual = void{} },
- else => null,
- };
- }
-
- fn tokenIdToBitShift(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
- return switch (id) {
- Token.Id.AngleBracketAngleBracketLeft => ast.Node.InfixOp.Op { .BitShiftLeft = void{} },
- Token.Id.AngleBracketAngleBracketRight => ast.Node.InfixOp.Op { .BitShiftRight = void{} },
- else => null,
- };
- }
-
- fn tokenIdToAddition(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
- return switch (id) {
- Token.Id.Minus => ast.Node.InfixOp.Op { .Sub = void{} },
- Token.Id.MinusPercent => ast.Node.InfixOp.Op { .SubWrap = void{} },
- Token.Id.Plus => ast.Node.InfixOp.Op { .Add = void{} },
- Token.Id.PlusPercent => ast.Node.InfixOp.Op { .AddWrap = void{} },
- Token.Id.PlusPlus => ast.Node.InfixOp.Op { .ArrayCat = void{} },
- else => null,
- };
- }
-
- fn tokenIdToMultiply(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
- return switch (id) {
- Token.Id.Slash => ast.Node.InfixOp.Op { .Div = void{} },
- Token.Id.Asterisk => ast.Node.InfixOp.Op { .Mult = void{} },
- Token.Id.AsteriskAsterisk => ast.Node.InfixOp.Op { .ArrayMult = void{} },
- Token.Id.AsteriskPercent => ast.Node.InfixOp.Op { .MultWrap = void{} },
- Token.Id.Percent => ast.Node.InfixOp.Op { .Mod = void{} },
- Token.Id.PipePipe => ast.Node.InfixOp.Op { .MergeErrorSets = void{} },
- else => null,
- };
- }
-
- fn tokenIdToPrefixOp(id: @TagType(Token.Id)) ?ast.Node.PrefixOp.Op {
- return switch (id) {
- Token.Id.Bang => ast.Node.PrefixOp.Op { .BoolNot = void{} },
- Token.Id.Tilde => ast.Node.PrefixOp.Op { .BitNot = void{} },
- Token.Id.Minus => ast.Node.PrefixOp.Op { .Negation = void{} },
- Token.Id.MinusPercent => ast.Node.PrefixOp.Op { .NegationWrap = void{} },
- Token.Id.Asterisk, Token.Id.AsteriskAsterisk => ast.Node.PrefixOp.Op { .Deref = void{} },
- Token.Id.Ampersand => ast.Node.PrefixOp.Op {
- .AddrOf = ast.Node.PrefixOp.AddrOfInfo {
- .align_expr = null,
- .bit_offset_start_token = null,
- .bit_offset_end_token = null,
- .const_token = null,
- .volatile_token = null,
- },
- },
- Token.Id.QuestionMark => ast.Node.PrefixOp.Op { .MaybeType = void{} },
- Token.Id.QuestionMarkQuestionMark => ast.Node.PrefixOp.Op { .UnwrapMaybe = void{} },
- Token.Id.Keyword_await => ast.Node.PrefixOp.Op { .Await = void{} },
- Token.Id.Keyword_try => ast.Node.PrefixOp.Op { .Try = void{ } },
- else => null,
- };
- }
-
- fn createNode(self: &Parser, arena: &mem.Allocator, comptime T: type, init_to: &const T) !&T {
- const node = try arena.create(T);
- *node = *init_to;
- node.base = blk: {
- const id = ast.Node.typeToId(T);
- break :blk ast.Node {
- .id = id,
- .same_line_comment = null,
- };
- };
-
- return node;
- }
-
- fn createAttachNode(self: &Parser, arena: &mem.Allocator, list: &ArrayList(&ast.Node), comptime T: type, init_to: &const T) !&T {
- const node = try self.createNode(arena, T, init_to);
- try list.append(&node.base);
-
- return node;
- }
-
- fn createToCtxNode(self: &Parser, arena: &mem.Allocator, opt_ctx: &const OptionalCtx, comptime T: type, init_to: &const T) !&T {
- const node = try self.createNode(arena, T, init_to);
- opt_ctx.store(&node.base);
-
- return node;
- }
-
- fn createLiteral(self: &Parser, arena: &mem.Allocator, comptime T: type, token: &const Token) !&T {
- return self.createNode(arena, T,
- T {
- .base = undefined,
- .token = *token,
- }
- );
- }
-
- fn createToCtxLiteral(self: &Parser, arena: &mem.Allocator, opt_ctx: &const OptionalCtx, comptime T: type, token: &const Token) !&T {
- const node = try self.createLiteral(arena, T, token);
- opt_ctx.store(&node.base);
-
- return node;
- }
-
- fn parseError(self: &Parser, token: &const Token, comptime fmt: []const u8, args: ...) (error{ParseError}) {
- const loc = self.tokenizer.getTokenLocation(0, token);
- warn("{}:{}:{}: error: " ++ fmt ++ "\n", self.source_file_name, loc.line + 1, loc.column + 1, args);
- warn("{}\n", self.tokenizer.buffer[loc.line_start..loc.line_end]);
- {
- var i: usize = 0;
- while (i < loc.column) : (i += 1) {
- warn(" ");
- }
- }
- {
- const caret_count = token.end - token.start;
- var i: usize = 0;
- while (i < caret_count) : (i += 1) {
- warn("~");
- }
- }
- warn("\n");
- return error.ParseError;
- }
-
- fn expectToken(self: &Parser, id: @TagType(Token.Id)) !Token {
- const token = self.getNextToken();
- if (token.id != id) {
- return self.parseError(token, "expected {}, found {}", @tagName(id), @tagName(token.id));
- }
- return token;
- }
-
- fn eatToken(self: &Parser, id: @TagType(Token.Id)) ?Token {
- if (self.isPeekToken(id)) {
- return self.getNextToken();
- }
- return null;
- }
-
- fn putBackToken(self: &Parser, token: &const Token) void {
- self.put_back_tokens[self.put_back_count] = *token;
- self.put_back_count += 1;
- }
-
- fn getNextToken(self: &Parser) Token {
- if (self.put_back_count != 0) {
- const put_back_index = self.put_back_count - 1;
- const put_back_token = self.put_back_tokens[put_back_index];
- self.put_back_count = put_back_index;
- return put_back_token;
- } else {
- return self.tokenizer.next();
- }
- }
-
- fn isPeekToken(self: &Parser, id: @TagType(Token.Id)) bool {
- const token = self.getNextToken();
- defer self.putBackToken(token);
- return id == token.id;
- }
-
- const RenderAstFrame = struct {
- node: &ast.Node,
- indent: usize,
- };
-
- pub fn renderAst(self: &Parser, stream: var, root_node: &ast.Node.Root) !void {
- var stack = self.initUtilityArrayList(RenderAstFrame);
- defer self.deinitUtilityArrayList(stack);
-
- try stack.append(RenderAstFrame {
- .node = &root_node.base,
- .indent = 0,
- });
-
- while (stack.popOrNull()) |frame| {
- {
- var i: usize = 0;
- while (i < frame.indent) : (i += 1) {
- try stream.print(" ");
- }
- }
- try stream.print("{}\n", @tagName(frame.node.id));
- var child_i: usize = 0;
- while (frame.node.iterate(child_i)) |child| : (child_i += 1) {
- try stack.append(RenderAstFrame {
- .node = child,
- .indent = frame.indent + 2,
- });
- }
- }
- }
-
- const RenderState = union(enum) {
- TopLevelDecl: &ast.Node,
- ParamDecl: &ast.Node,
- Text: []const u8,
- Expression: &ast.Node,
- VarDecl: &ast.Node.VarDecl,
- Statement: &ast.Node,
- PrintIndent,
- Indent: usize,
- PrintSameLineComment: ?&Token,
- PrintLineComment: &Token,
- };
-
- pub fn renderSource(self: &Parser, stream: var, root_node: &ast.Node.Root) !void {
- var stack = self.initUtilityArrayList(RenderState);
- defer self.deinitUtilityArrayList(stack);
-
- {
- try stack.append(RenderState { .Text = "\n"});
-
- var i = root_node.decls.len;
- while (i != 0) {
- i -= 1;
- const decl = root_node.decls.items[i];
- try stack.append(RenderState {.TopLevelDecl = decl});
- if (i != 0) {
- try stack.append(RenderState {
- .Text = blk: {
- const prev_node = root_node.decls.at(i - 1);
- const loc = self.tokenizer.getTokenLocation(prev_node.lastToken().end, decl.firstToken());
- if (loc.line >= 2) {
- break :blk "\n\n";
- }
- break :blk "\n";
- },
- });
- }
- }
- }
-
- const indent_delta = 4;
- var indent: usize = 0;
- while (stack.popOrNull()) |state| {
- switch (state) {
- RenderState.TopLevelDecl => |decl| {
- try stack.append(RenderState { .PrintSameLineComment = decl.same_line_comment } );
- switch (decl.id) {
- ast.Node.Id.FnProto => {
- const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl);
- try self.renderComments(stream, fn_proto, indent);
-
- if (fn_proto.body_node) |body_node| {
- stack.append(RenderState { .Expression = body_node}) catch unreachable;
- try stack.append(RenderState { .Text = " "});
- } else {
- stack.append(RenderState { .Text = ";" }) catch unreachable;
- }
-
- try stack.append(RenderState { .Expression = decl });
- },
- ast.Node.Id.Use => {
- const use_decl = @fieldParentPtr(ast.Node.Use, "base", decl);
- if (use_decl.visib_token) |visib_token| {
- try stream.print("{} ", self.tokenizer.getTokenSlice(visib_token));
- }
- try stream.print("use ");
- try stack.append(RenderState { .Text = ";" });
- try stack.append(RenderState { .Expression = use_decl.expr });
- },
- ast.Node.Id.VarDecl => {
- const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", decl);
- try self.renderComments(stream, var_decl, indent);
- try stack.append(RenderState { .VarDecl = var_decl});
- },
- ast.Node.Id.TestDecl => {
- const test_decl = @fieldParentPtr(ast.Node.TestDecl, "base", decl);
- try self.renderComments(stream, test_decl, indent);
- try stream.print("test ");
- try stack.append(RenderState { .Expression = test_decl.body_node });
- try stack.append(RenderState { .Text = " " });
- try stack.append(RenderState { .Expression = test_decl.name });
- },
- ast.Node.Id.StructField => {
- const field = @fieldParentPtr(ast.Node.StructField, "base", decl);
- try self.renderComments(stream, field, indent);
- if (field.visib_token) |visib_token| {
- try stream.print("{} ", self.tokenizer.getTokenSlice(visib_token));
- }
- try stream.print("{}: ", self.tokenizer.getTokenSlice(field.name_token));
- try stack.append(RenderState { .Text = "," });
- try stack.append(RenderState { .Expression = field.type_expr});
- },
- ast.Node.Id.UnionTag => {
- const tag = @fieldParentPtr(ast.Node.UnionTag, "base", decl);
- try self.renderComments(stream, tag, indent);
- try stream.print("{}", self.tokenizer.getTokenSlice(tag.name_token));
-
- try stack.append(RenderState { .Text = "," });
-
- if (tag.value_expr) |value_expr| {
- try stack.append(RenderState { .Expression = value_expr });
- try stack.append(RenderState { .Text = " = " });
- }
-
- if (tag.type_expr) |type_expr| {
- try stream.print(": ");
- try stack.append(RenderState { .Expression = type_expr});
- }
- },
- ast.Node.Id.EnumTag => {
- const tag = @fieldParentPtr(ast.Node.EnumTag, "base", decl);
- try self.renderComments(stream, tag, indent);
- try stream.print("{}", self.tokenizer.getTokenSlice(tag.name_token));
-
- try stack.append(RenderState { .Text = "," });
- if (tag.value) |value| {
- try stream.print(" = ");
- try stack.append(RenderState { .Expression = value});
- }
- },
- ast.Node.Id.ErrorTag => {
- const tag = @fieldParentPtr(ast.Node.ErrorTag, "base", decl);
- try self.renderComments(stream, tag, indent);
- try stream.print("{}", self.tokenizer.getTokenSlice(tag.name_token));
- },
- ast.Node.Id.Comptime => {
- if (requireSemiColon(decl)) {
- try stack.append(RenderState { .Text = ";" });
- }
- try stack.append(RenderState { .Expression = decl });
- },
- ast.Node.Id.LineComment => {
- const line_comment_node = @fieldParentPtr(ast.Node.LineComment, "base", decl);
- try stream.write(self.tokenizer.getTokenSlice(line_comment_node.token));
- },
- else => unreachable,
- }
- },
-
- RenderState.VarDecl => |var_decl| {
- try stack.append(RenderState { .Text = ";" });
- if (var_decl.init_node) |init_node| {
- try stack.append(RenderState { .Expression = init_node });
- const text = if (init_node.id == ast.Node.Id.MultilineStringLiteral) " =" else " = ";
- try stack.append(RenderState { .Text = text });
- }
- if (var_decl.align_node) |align_node| {
- try stack.append(RenderState { .Text = ")" });
- try stack.append(RenderState { .Expression = align_node });
- try stack.append(RenderState { .Text = " align(" });
- }
- if (var_decl.type_node) |type_node| {
- try stack.append(RenderState { .Expression = type_node });
- try stack.append(RenderState { .Text = ": " });
- }
- try stack.append(RenderState { .Text = self.tokenizer.getTokenSlice(var_decl.name_token) });
- try stack.append(RenderState { .Text = " " });
- try stack.append(RenderState { .Text = self.tokenizer.getTokenSlice(var_decl.mut_token) });
-
- if (var_decl.comptime_token) |comptime_token| {
- try stack.append(RenderState { .Text = " " });
- try stack.append(RenderState { .Text = self.tokenizer.getTokenSlice(comptime_token) });
- }
-
- if (var_decl.extern_export_token) |extern_export_token| {
- if (var_decl.lib_name != null) {
- try stack.append(RenderState { .Text = " " });
- try stack.append(RenderState { .Expression = ??var_decl.lib_name });
- }
- try stack.append(RenderState { .Text = " " });
- try stack.append(RenderState { .Text = self.tokenizer.getTokenSlice(extern_export_token) });
- }
-
- if (var_decl.visib_token) |visib_token| {
- try stack.append(RenderState { .Text = " " });
- try stack.append(RenderState { .Text = self.tokenizer.getTokenSlice(visib_token) });
- }
- },
-
- RenderState.ParamDecl => |base| {
- const param_decl = @fieldParentPtr(ast.Node.ParamDecl, "base", base);
- if (param_decl.comptime_token) |comptime_token| {
- try stream.print("{} ", self.tokenizer.getTokenSlice(comptime_token));
- }
- if (param_decl.noalias_token) |noalias_token| {
- try stream.print("{} ", self.tokenizer.getTokenSlice(noalias_token));
- }
- if (param_decl.name_token) |name_token| {
- try stream.print("{}: ", self.tokenizer.getTokenSlice(name_token));
- }
- if (param_decl.var_args_token) |var_args_token| {
- try stream.print("{}", self.tokenizer.getTokenSlice(var_args_token));
- } else {
- try stack.append(RenderState { .Expression = param_decl.type_node});
- }
- },
- RenderState.Text => |bytes| {
- try stream.write(bytes);
- },
- RenderState.Expression => |base| switch (base.id) {
- ast.Node.Id.Identifier => {
- const identifier = @fieldParentPtr(ast.Node.Identifier, "base", base);
- try stream.print("{}", self.tokenizer.getTokenSlice(identifier.token));
- },
- ast.Node.Id.Block => {
- const block = @fieldParentPtr(ast.Node.Block, "base", base);
- if (block.label) |label| {
- try stream.print("{}: ", self.tokenizer.getTokenSlice(label));
- }
-
- if (block.statements.len == 0) {
- try stream.write("{}");
- } else {
- try stream.write("{");
- try stack.append(RenderState { .Text = "}"});
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState { .Indent = indent});
- try stack.append(RenderState { .Text = "\n"});
- var i = block.statements.len;
- while (i != 0) {
- i -= 1;
- const statement_node = block.statements.items[i];
- try stack.append(RenderState { .Statement = statement_node});
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState { .Indent = indent + indent_delta});
- try stack.append(RenderState {
- .Text = blk: {
- if (i != 0) {
- const prev_node = block.statements.items[i - 1];
- const loc = self.tokenizer.getTokenLocation(prev_node.lastToken().end, statement_node.firstToken());
- if (loc.line >= 2) {
- break :blk "\n\n";
- }
- }
- break :blk "\n";
- },
- });
- }
- }
- },
- ast.Node.Id.Defer => {
- const defer_node = @fieldParentPtr(ast.Node.Defer, "base", base);
- try stream.print("{} ", self.tokenizer.getTokenSlice(defer_node.defer_token));
- try stack.append(RenderState { .Expression = defer_node.expr });
- },
- ast.Node.Id.Comptime => {
- const comptime_node = @fieldParentPtr(ast.Node.Comptime, "base", base);
- try stream.print("{} ", self.tokenizer.getTokenSlice(comptime_node.comptime_token));
- try stack.append(RenderState { .Expression = comptime_node.expr });
- },
- ast.Node.Id.AsyncAttribute => {
- const async_attr = @fieldParentPtr(ast.Node.AsyncAttribute, "base", base);
- try stream.print("{}", self.tokenizer.getTokenSlice(async_attr.async_token));
-
- if (async_attr.allocator_type) |allocator_type| {
- try stack.append(RenderState { .Text = ">" });
- try stack.append(RenderState { .Expression = allocator_type });
- try stack.append(RenderState { .Text = "<" });
- }
- },
- ast.Node.Id.Suspend => {
- const suspend_node = @fieldParentPtr(ast.Node.Suspend, "base", base);
- if (suspend_node.label) |label| {
- try stream.print("{}: ", self.tokenizer.getTokenSlice(label));
- }
- try stream.print("{}", self.tokenizer.getTokenSlice(suspend_node.suspend_token));
-
- if (suspend_node.body) |body| {
- try stack.append(RenderState { .Expression = body });
- try stack.append(RenderState { .Text = " " });
- }
-
- if (suspend_node.payload) |payload| {
- try stack.append(RenderState { .Expression = payload });
- try stack.append(RenderState { .Text = " " });
- }
- },
- ast.Node.Id.InfixOp => {
- const prefix_op_node = @fieldParentPtr(ast.Node.InfixOp, "base", base);
- try stack.append(RenderState { .Expression = prefix_op_node.rhs });
-
- if (prefix_op_node.op == ast.Node.InfixOp.Op.Catch) {
- if (prefix_op_node.op.Catch) |payload| {
- try stack.append(RenderState { .Text = " " });
- try stack.append(RenderState { .Expression = payload });
- }
- try stack.append(RenderState { .Text = " catch " });
- } else {
- const text = switch (prefix_op_node.op) {
- ast.Node.InfixOp.Op.Add => " + ",
- ast.Node.InfixOp.Op.AddWrap => " +% ",
- ast.Node.InfixOp.Op.ArrayCat => " ++ ",
- ast.Node.InfixOp.Op.ArrayMult => " ** ",
- ast.Node.InfixOp.Op.Assign => " = ",
- ast.Node.InfixOp.Op.AssignBitAnd => " &= ",
- ast.Node.InfixOp.Op.AssignBitOr => " |= ",
- ast.Node.InfixOp.Op.AssignBitShiftLeft => " <<= ",
- ast.Node.InfixOp.Op.AssignBitShiftRight => " >>= ",
- ast.Node.InfixOp.Op.AssignBitXor => " ^= ",
- ast.Node.InfixOp.Op.AssignDiv => " /= ",
- ast.Node.InfixOp.Op.AssignMinus => " -= ",
- ast.Node.InfixOp.Op.AssignMinusWrap => " -%= ",
- ast.Node.InfixOp.Op.AssignMod => " %= ",
- ast.Node.InfixOp.Op.AssignPlus => " += ",
- ast.Node.InfixOp.Op.AssignPlusWrap => " +%= ",
- ast.Node.InfixOp.Op.AssignTimes => " *= ",
- ast.Node.InfixOp.Op.AssignTimesWarp => " *%= ",
- ast.Node.InfixOp.Op.BangEqual => " != ",
- ast.Node.InfixOp.Op.BitAnd => " & ",
- ast.Node.InfixOp.Op.BitOr => " | ",
- ast.Node.InfixOp.Op.BitShiftLeft => " << ",
- ast.Node.InfixOp.Op.BitShiftRight => " >> ",
- ast.Node.InfixOp.Op.BitXor => " ^ ",
- ast.Node.InfixOp.Op.BoolAnd => " and ",
- ast.Node.InfixOp.Op.BoolOr => " or ",
- ast.Node.InfixOp.Op.Div => " / ",
- ast.Node.InfixOp.Op.EqualEqual => " == ",
- ast.Node.InfixOp.Op.ErrorUnion => "!",
- ast.Node.InfixOp.Op.GreaterOrEqual => " >= ",
- ast.Node.InfixOp.Op.GreaterThan => " > ",
- ast.Node.InfixOp.Op.LessOrEqual => " <= ",
- ast.Node.InfixOp.Op.LessThan => " < ",
- ast.Node.InfixOp.Op.MergeErrorSets => " || ",
- ast.Node.InfixOp.Op.Mod => " % ",
- ast.Node.InfixOp.Op.Mult => " * ",
- ast.Node.InfixOp.Op.MultWrap => " *% ",
- ast.Node.InfixOp.Op.Period => ".",
- ast.Node.InfixOp.Op.Sub => " - ",
- ast.Node.InfixOp.Op.SubWrap => " -% ",
- ast.Node.InfixOp.Op.UnwrapMaybe => " ?? ",
- ast.Node.InfixOp.Op.Range => " ... ",
- ast.Node.InfixOp.Op.Catch => unreachable,
- };
-
- try stack.append(RenderState { .Text = text });
- }
- try stack.append(RenderState { .Expression = prefix_op_node.lhs });
- },
- ast.Node.Id.PrefixOp => {
- const prefix_op_node = @fieldParentPtr(ast.Node.PrefixOp, "base", base);
- if (prefix_op_node.op != ast.Node.PrefixOp.Op.Deref) {
- try stack.append(RenderState { .Expression = prefix_op_node.rhs });
- }
- switch (prefix_op_node.op) {
- ast.Node.PrefixOp.Op.AddrOf => |addr_of_info| {
- try stream.write("&");
- if (addr_of_info.volatile_token != null) {
- try stack.append(RenderState { .Text = "volatile "});
- }
- if (addr_of_info.const_token != null) {
- try stack.append(RenderState { .Text = "const "});
- }
- if (addr_of_info.align_expr) |align_expr| {
- try stream.print("align(");
- try stack.append(RenderState { .Text = ") "});
- try stack.append(RenderState { .Expression = align_expr});
- }
- },
- ast.Node.PrefixOp.Op.SliceType => |addr_of_info| {
- try stream.write("[]");
- if (addr_of_info.volatile_token != null) {
- try stack.append(RenderState { .Text = "volatile "});
- }
- if (addr_of_info.const_token != null) {
- try stack.append(RenderState { .Text = "const "});
- }
- if (addr_of_info.align_expr) |align_expr| {
- try stream.print("align(");
- try stack.append(RenderState { .Text = ") "});
- try stack.append(RenderState { .Expression = align_expr});
- }
- },
- ast.Node.PrefixOp.Op.ArrayType => |array_index| {
- try stack.append(RenderState { .Text = "]"});
- try stack.append(RenderState { .Expression = array_index});
- try stack.append(RenderState { .Text = "["});
- },
- ast.Node.PrefixOp.Op.BitNot => try stream.write("~"),
- ast.Node.PrefixOp.Op.BoolNot => try stream.write("!"),
- ast.Node.PrefixOp.Op.Deref => {
- try stack.append(RenderState { .Text = ".*" });
- try stack.append(RenderState { .Expression = prefix_op_node.rhs });
- },
- ast.Node.PrefixOp.Op.Negation => try stream.write("-"),
- ast.Node.PrefixOp.Op.NegationWrap => try stream.write("-%"),
- ast.Node.PrefixOp.Op.Try => try stream.write("try "),
- ast.Node.PrefixOp.Op.UnwrapMaybe => try stream.write("??"),
- ast.Node.PrefixOp.Op.MaybeType => try stream.write("?"),
- ast.Node.PrefixOp.Op.Await => try stream.write("await "),
- ast.Node.PrefixOp.Op.Cancel => try stream.write("cancel "),
- ast.Node.PrefixOp.Op.Resume => try stream.write("resume "),
- }
- },
- ast.Node.Id.SuffixOp => {
- const suffix_op = @fieldParentPtr(ast.Node.SuffixOp, "base", base);
-
- switch (suffix_op.op) {
- ast.Node.SuffixOp.Op.Call => |call_info| {
- try stack.append(RenderState { .Text = ")"});
- var i = call_info.params.len;
- while (i != 0) {
- i -= 1;
- const param_node = call_info.params.at(i);
- try stack.append(RenderState { .Expression = param_node});
- if (i != 0) {
- try stack.append(RenderState { .Text = ", " });
- }
- }
- try stack.append(RenderState { .Text = "("});
- try stack.append(RenderState { .Expression = suffix_op.lhs });
-
- if (call_info.async_attr) |async_attr| {
- try stack.append(RenderState { .Text = " "});
- try stack.append(RenderState { .Expression = &async_attr.base });
- }
- },
- ast.Node.SuffixOp.Op.ArrayAccess => |index_expr| {
- try stack.append(RenderState { .Text = "]"});
- try stack.append(RenderState { .Expression = index_expr});
- try stack.append(RenderState { .Text = "["});
- try stack.append(RenderState { .Expression = suffix_op.lhs });
- },
- ast.Node.SuffixOp.Op.Slice => |range| {
- try stack.append(RenderState { .Text = "]"});
- if (range.end) |end| {
- try stack.append(RenderState { .Expression = end});
- }
- try stack.append(RenderState { .Text = ".."});
- try stack.append(RenderState { .Expression = range.start});
- try stack.append(RenderState { .Text = "["});
- try stack.append(RenderState { .Expression = suffix_op.lhs });
- },
- ast.Node.SuffixOp.Op.StructInitializer => |field_inits| {
- if (field_inits.len == 0) {
- try stack.append(RenderState { .Text = "{}" });
- try stack.append(RenderState { .Expression = suffix_op.lhs });
- continue;
- }
- if (field_inits.len == 1) {
- const field_init = field_inits.at(0);
-
- try stack.append(RenderState { .Text = " }" });
- try stack.append(RenderState { .Expression = field_init });
- try stack.append(RenderState { .Text = "{ " });
- try stack.append(RenderState { .Expression = suffix_op.lhs });
- continue;
- }
- try stack.append(RenderState { .Text = "}"});
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState { .Indent = indent });
- try stack.append(RenderState { .Text = "\n" });
- var i = field_inits.len;
- while (i != 0) {
- i -= 1;
- const field_init = field_inits.at(i);
- if (field_init.id != ast.Node.Id.LineComment) {
- try stack.append(RenderState { .Text = "," });
- }
- try stack.append(RenderState { .Expression = field_init });
- try stack.append(RenderState.PrintIndent);
- if (i != 0) {
- try stack.append(RenderState { .Text = blk: {
- const prev_node = field_inits.at(i - 1);
- const loc = self.tokenizer.getTokenLocation(prev_node.lastToken().end, field_init.firstToken());
- if (loc.line >= 2) {
- break :blk "\n\n";
- }
- break :blk "\n";
- }});
- }
- }
- try stack.append(RenderState { .Indent = indent + indent_delta });
- try stack.append(RenderState { .Text = "{\n"});
- try stack.append(RenderState { .Expression = suffix_op.lhs });
- },
- ast.Node.SuffixOp.Op.ArrayInitializer => |exprs| {
- if (exprs.len == 0) {
- try stack.append(RenderState { .Text = "{}" });
- try stack.append(RenderState { .Expression = suffix_op.lhs });
- continue;
- }
- if (exprs.len == 1) {
- const expr = exprs.at(0);
-
- try stack.append(RenderState { .Text = "}" });
- try stack.append(RenderState { .Expression = expr });
- try stack.append(RenderState { .Text = "{" });
- try stack.append(RenderState { .Expression = suffix_op.lhs });
- continue;
- }
-
- try stack.append(RenderState { .Text = "}"});
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState { .Indent = indent });
- var i = exprs.len;
- while (i != 0) {
- i -= 1;
- const expr = exprs.at(i);
- try stack.append(RenderState { .Text = ",\n" });
- try stack.append(RenderState { .Expression = expr });
- try stack.append(RenderState.PrintIndent);
- }
- try stack.append(RenderState { .Indent = indent + indent_delta });
- try stack.append(RenderState { .Text = "{\n"});
- try stack.append(RenderState { .Expression = suffix_op.lhs });
- },
- }
- },
- ast.Node.Id.ControlFlowExpression => {
- const flow_expr = @fieldParentPtr(ast.Node.ControlFlowExpression, "base", base);
-
- if (flow_expr.rhs) |rhs| {
- try stack.append(RenderState { .Expression = rhs });
- try stack.append(RenderState { .Text = " " });
- }
-
- switch (flow_expr.kind) {
- ast.Node.ControlFlowExpression.Kind.Break => |maybe_label| {
- try stream.print("break");
- if (maybe_label) |label| {
- try stream.print(" :");
- try stack.append(RenderState { .Expression = label });
- }
- },
- ast.Node.ControlFlowExpression.Kind.Continue => |maybe_label| {
- try stream.print("continue");
- if (maybe_label) |label| {
- try stream.print(" :");
- try stack.append(RenderState { .Expression = label });
- }
- },
- ast.Node.ControlFlowExpression.Kind.Return => {
- try stream.print("return");
- },
-
- }
- },
- ast.Node.Id.Payload => {
- const payload = @fieldParentPtr(ast.Node.Payload, "base", base);
- try stack.append(RenderState { .Text = "|"});
- try stack.append(RenderState { .Expression = payload.error_symbol });
- try stack.append(RenderState { .Text = "|"});
- },
- ast.Node.Id.PointerPayload => {
- const payload = @fieldParentPtr(ast.Node.PointerPayload, "base", base);
- try stack.append(RenderState { .Text = "|"});
- try stack.append(RenderState { .Expression = payload.value_symbol });
-
- if (payload.ptr_token) |ptr_token| {
- try stack.append(RenderState { .Text = self.tokenizer.getTokenSlice(ptr_token) });
- }
-
- try stack.append(RenderState { .Text = "|"});
- },
- ast.Node.Id.PointerIndexPayload => {
- const payload = @fieldParentPtr(ast.Node.PointerIndexPayload, "base", base);
- try stack.append(RenderState { .Text = "|"});
-
- if (payload.index_symbol) |index_symbol| {
- try stack.append(RenderState { .Expression = index_symbol });
- try stack.append(RenderState { .Text = ", "});
- }
-
- try stack.append(RenderState { .Expression = payload.value_symbol });
-
- if (payload.ptr_token) |ptr_token| {
- try stack.append(RenderState { .Text = self.tokenizer.getTokenSlice(ptr_token) });
- }
-
- try stack.append(RenderState { .Text = "|"});
- },
- ast.Node.Id.GroupedExpression => {
- const grouped_expr = @fieldParentPtr(ast.Node.GroupedExpression, "base", base);
- try stack.append(RenderState { .Text = ")"});
- try stack.append(RenderState { .Expression = grouped_expr.expr });
- try stack.append(RenderState { .Text = "("});
- },
- ast.Node.Id.FieldInitializer => {
- const field_init = @fieldParentPtr(ast.Node.FieldInitializer, "base", base);
- try stream.print(".{} = ", self.tokenizer.getTokenSlice(field_init.name_token));
- try stack.append(RenderState { .Expression = field_init.expr });
- },
- ast.Node.Id.IntegerLiteral => {
- const integer_literal = @fieldParentPtr(ast.Node.IntegerLiteral, "base", base);
- try stream.print("{}", self.tokenizer.getTokenSlice(integer_literal.token));
- },
- ast.Node.Id.FloatLiteral => {
- const float_literal = @fieldParentPtr(ast.Node.FloatLiteral, "base", base);
- try stream.print("{}", self.tokenizer.getTokenSlice(float_literal.token));
- },
- ast.Node.Id.StringLiteral => {
- const string_literal = @fieldParentPtr(ast.Node.StringLiteral, "base", base);
- try stream.print("{}", self.tokenizer.getTokenSlice(string_literal.token));
- },
- ast.Node.Id.CharLiteral => {
- const char_literal = @fieldParentPtr(ast.Node.CharLiteral, "base", base);
- try stream.print("{}", self.tokenizer.getTokenSlice(char_literal.token));
- },
- ast.Node.Id.BoolLiteral => {
- const bool_literal = @fieldParentPtr(ast.Node.CharLiteral, "base", base);
- try stream.print("{}", self.tokenizer.getTokenSlice(bool_literal.token));
- },
- ast.Node.Id.NullLiteral => {
- const null_literal = @fieldParentPtr(ast.Node.NullLiteral, "base", base);
- try stream.print("{}", self.tokenizer.getTokenSlice(null_literal.token));
- },
- ast.Node.Id.ThisLiteral => {
- const this_literal = @fieldParentPtr(ast.Node.ThisLiteral, "base", base);
- try stream.print("{}", self.tokenizer.getTokenSlice(this_literal.token));
- },
- ast.Node.Id.Unreachable => {
- const unreachable_node = @fieldParentPtr(ast.Node.Unreachable, "base", base);
- try stream.print("{}", self.tokenizer.getTokenSlice(unreachable_node.token));
- },
- ast.Node.Id.ErrorType => {
- const error_type = @fieldParentPtr(ast.Node.ErrorType, "base", base);
- try stream.print("{}", self.tokenizer.getTokenSlice(error_type.token));
- },
- ast.Node.Id.VarType => {
- const var_type = @fieldParentPtr(ast.Node.VarType, "base", base);
- try stream.print("{}", self.tokenizer.getTokenSlice(var_type.token));
- },
- ast.Node.Id.ContainerDecl => {
- const container_decl = @fieldParentPtr(ast.Node.ContainerDecl, "base", base);
-
- switch (container_decl.layout) {
- ast.Node.ContainerDecl.Layout.Packed => try stream.print("packed "),
- ast.Node.ContainerDecl.Layout.Extern => try stream.print("extern "),
- ast.Node.ContainerDecl.Layout.Auto => { },
- }
-
- switch (container_decl.kind) {
- ast.Node.ContainerDecl.Kind.Struct => try stream.print("struct"),
- ast.Node.ContainerDecl.Kind.Enum => try stream.print("enum"),
- ast.Node.ContainerDecl.Kind.Union => try stream.print("union"),
- }
-
- const fields_and_decls = container_decl.fields_and_decls.toSliceConst();
- if (fields_and_decls.len == 0) {
- try stack.append(RenderState { .Text = "{}"});
- } else {
- try stack.append(RenderState { .Text = "}"});
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState { .Indent = indent });
- try stack.append(RenderState { .Text = "\n"});
-
- var i = fields_and_decls.len;
- while (i != 0) {
- i -= 1;
- const node = fields_and_decls[i];
- try stack.append(RenderState { .TopLevelDecl = node});
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState {
- .Text = blk: {
- if (i != 0) {
- const prev_node = fields_and_decls[i - 1];
- const loc = self.tokenizer.getTokenLocation(prev_node.lastToken().end, node.firstToken());
- if (loc.line >= 2) {
- break :blk "\n\n";
- }
- }
- break :blk "\n";
- },
- });
- }
- try stack.append(RenderState { .Indent = indent + indent_delta});
- try stack.append(RenderState { .Text = "{"});
- }
-
- switch (container_decl.init_arg_expr) {
- ast.Node.ContainerDecl.InitArg.None => try stack.append(RenderState { .Text = " "}),
- ast.Node.ContainerDecl.InitArg.Enum => |enum_tag_type| {
- if (enum_tag_type) |expr| {
- try stack.append(RenderState { .Text = ")) "});
- try stack.append(RenderState { .Expression = expr});
- try stack.append(RenderState { .Text = "(enum("});
- } else {
- try stack.append(RenderState { .Text = "(enum) "});
- }
- },
- ast.Node.ContainerDecl.InitArg.Type => |type_expr| {
- try stack.append(RenderState { .Text = ") "});
- try stack.append(RenderState { .Expression = type_expr});
- try stack.append(RenderState { .Text = "("});
- },
- }
- },
- ast.Node.Id.ErrorSetDecl => {
- const err_set_decl = @fieldParentPtr(ast.Node.ErrorSetDecl, "base", base);
-
- const decls = err_set_decl.decls.toSliceConst();
- if (decls.len == 0) {
- try stream.write("error{}");
- continue;
- }
-
- if (decls.len == 1) blk: {
- const node = decls[0];
-
- // if there are any doc comments or same line comments
- // don't try to put it all on one line
- if (node.same_line_comment != null) break :blk;
- if (node.cast(ast.Node.ErrorTag)) |tag| {
- if (tag.doc_comments != null) break :blk;
- } else {
- break :blk;
- }
-
-
- try stream.write("error{");
- try stack.append(RenderState { .Text = "}" });
- try stack.append(RenderState { .TopLevelDecl = node });
- continue;
- }
-
- try stream.write("error{");
-
- try stack.append(RenderState { .Text = "}"});
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState { .Indent = indent });
- try stack.append(RenderState { .Text = "\n"});
-
- var i = decls.len;
- while (i != 0) {
- i -= 1;
- const node = decls[i];
- if (node.id != ast.Node.Id.LineComment) {
- try stack.append(RenderState { .Text = "," });
- }
- try stack.append(RenderState { .TopLevelDecl = node });
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState {
- .Text = blk: {
- if (i != 0) {
- const prev_node = decls[i - 1];
- const loc = self.tokenizer.getTokenLocation(prev_node.lastToken().end, node.firstToken());
- if (loc.line >= 2) {
- break :blk "\n\n";
- }
- }
- break :blk "\n";
- },
- });
- }
- try stack.append(RenderState { .Indent = indent + indent_delta});
- },
- ast.Node.Id.MultilineStringLiteral => {
- const multiline_str_literal = @fieldParentPtr(ast.Node.MultilineStringLiteral, "base", base);
- try stream.print("\n");
-
- var i : usize = 0;
- while (i < multiline_str_literal.tokens.len) : (i += 1) {
- const t = multiline_str_literal.tokens.at(i);
- try stream.writeByteNTimes(' ', indent + indent_delta);
- try stream.print("{}", self.tokenizer.getTokenSlice(t));
- }
- try stream.writeByteNTimes(' ', indent);
- },
- ast.Node.Id.UndefinedLiteral => {
- const undefined_literal = @fieldParentPtr(ast.Node.UndefinedLiteral, "base", base);
- try stream.print("{}", self.tokenizer.getTokenSlice(undefined_literal.token));
- },
- ast.Node.Id.BuiltinCall => {
- const builtin_call = @fieldParentPtr(ast.Node.BuiltinCall, "base", base);
- try stream.print("{}(", self.tokenizer.getTokenSlice(builtin_call.builtin_token));
- try stack.append(RenderState { .Text = ")"});
- var i = builtin_call.params.len;
- while (i != 0) {
- i -= 1;
- const param_node = builtin_call.params.at(i);
- try stack.append(RenderState { .Expression = param_node});
- if (i != 0) {
- try stack.append(RenderState { .Text = ", " });
- }
- }
- },
- ast.Node.Id.FnProto => {
- const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", base);
-
- switch (fn_proto.return_type) {
- ast.Node.FnProto.ReturnType.Explicit => |node| {
- try stack.append(RenderState { .Expression = node});
- },
- ast.Node.FnProto.ReturnType.InferErrorSet => |node| {
- try stack.append(RenderState { .Expression = node});
- try stack.append(RenderState { .Text = "!"});
- },
- }
-
- if (fn_proto.align_expr) |align_expr| {
- try stack.append(RenderState { .Text = ") " });
- try stack.append(RenderState { .Expression = align_expr});
- try stack.append(RenderState { .Text = "align(" });
- }
-
- try stack.append(RenderState { .Text = ") " });
- var i = fn_proto.params.len;
- while (i != 0) {
- i -= 1;
- const param_decl_node = fn_proto.params.items[i];
- try stack.append(RenderState { .ParamDecl = param_decl_node});
- if (i != 0) {
- try stack.append(RenderState { .Text = ", " });
- }
- }
-
- try stack.append(RenderState { .Text = "(" });
- if (fn_proto.name_token) |name_token| {
- try stack.append(RenderState { .Text = self.tokenizer.getTokenSlice(name_token) });
- try stack.append(RenderState { .Text = " " });
- }
-
- try stack.append(RenderState { .Text = "fn" });
-
- if (fn_proto.async_attr) |async_attr| {
- try stack.append(RenderState { .Text = " " });
- try stack.append(RenderState { .Expression = &async_attr.base });
- }
-
- if (fn_proto.cc_token) |cc_token| {
- try stack.append(RenderState { .Text = " " });
- try stack.append(RenderState { .Text = self.tokenizer.getTokenSlice(cc_token) });
- }
-
- if (fn_proto.lib_name) |lib_name| {
- try stack.append(RenderState { .Text = " " });
- try stack.append(RenderState { .Expression = lib_name });
- }
- if (fn_proto.extern_export_inline_token) |extern_export_inline_token| {
- try stack.append(RenderState { .Text = " " });
- try stack.append(RenderState { .Text = self.tokenizer.getTokenSlice(extern_export_inline_token) });
- }
-
- if (fn_proto.visib_token) |visib_token| {
- assert(visib_token.id == Token.Id.Keyword_pub or visib_token.id == Token.Id.Keyword_export);
- try stack.append(RenderState { .Text = " " });
- try stack.append(RenderState { .Text = self.tokenizer.getTokenSlice(visib_token) });
- }
- },
- ast.Node.Id.PromiseType => {
- const promise_type = @fieldParentPtr(ast.Node.PromiseType, "base", base);
- try stream.write(self.tokenizer.getTokenSlice(promise_type.promise_token));
- if (promise_type.result) |result| {
- try stream.write(self.tokenizer.getTokenSlice(result.arrow_token));
- try stack.append(RenderState { .Expression = result.return_type});
- }
- },
- ast.Node.Id.LineComment => {
- const line_comment_node = @fieldParentPtr(ast.Node.LineComment, "base", base);
- try stream.write(self.tokenizer.getTokenSlice(line_comment_node.token));
- },
- ast.Node.Id.DocComment => unreachable, // doc comments are attached to nodes
- ast.Node.Id.Switch => {
- const switch_node = @fieldParentPtr(ast.Node.Switch, "base", base);
- const cases = switch_node.cases.toSliceConst();
-
- try stream.print("{} (", self.tokenizer.getTokenSlice(switch_node.switch_token));
-
- if (cases.len == 0) {
- try stack.append(RenderState { .Text = ") {}"});
- try stack.append(RenderState { .Expression = switch_node.expr });
- continue;
- }
-
- try stack.append(RenderState { .Text = "}"});
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState { .Indent = indent });
- try stack.append(RenderState { .Text = "\n"});
-
- var i = cases.len;
- while (i != 0) {
- i -= 1;
- const node = cases[i];
- try stack.append(RenderState { .Expression = node});
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState {
- .Text = blk: {
- if (i != 0) {
- const prev_node = cases[i - 1];
- const loc = self.tokenizer.getTokenLocation(prev_node.lastToken().end, node.firstToken());
- if (loc.line >= 2) {
- break :blk "\n\n";
- }
- }
- break :blk "\n";
- },
- });
- }
- try stack.append(RenderState { .Indent = indent + indent_delta});
- try stack.append(RenderState { .Text = ") {"});
- try stack.append(RenderState { .Expression = switch_node.expr });
- },
- ast.Node.Id.SwitchCase => {
- const switch_case = @fieldParentPtr(ast.Node.SwitchCase, "base", base);
-
- try stack.append(RenderState { .PrintSameLineComment = base.same_line_comment });
- try stack.append(RenderState { .Text = "," });
- try stack.append(RenderState { .Expression = switch_case.expr });
- if (switch_case.payload) |payload| {
- try stack.append(RenderState { .Text = " " });
- try stack.append(RenderState { .Expression = payload });
- }
- try stack.append(RenderState { .Text = " => "});
-
- const items = switch_case.items.toSliceConst();
- var i = items.len;
- while (i != 0) {
- i -= 1;
- try stack.append(RenderState { .Expression = items[i] });
-
- if (i != 0) {
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState { .Text = ",\n" });
- }
- }
- },
- ast.Node.Id.SwitchElse => {
- const switch_else = @fieldParentPtr(ast.Node.SwitchElse, "base", base);
- try stream.print("{}", self.tokenizer.getTokenSlice(switch_else.token));
- },
- ast.Node.Id.Else => {
- const else_node = @fieldParentPtr(ast.Node.Else, "base", base);
- try stream.print("{}", self.tokenizer.getTokenSlice(else_node.else_token));
-
- switch (else_node.body.id) {
- ast.Node.Id.Block, ast.Node.Id.If,
- ast.Node.Id.For, ast.Node.Id.While,
- ast.Node.Id.Switch => {
- try stream.print(" ");
- try stack.append(RenderState { .Expression = else_node.body });
- },
- else => {
- try stack.append(RenderState { .Indent = indent });
- try stack.append(RenderState { .Expression = else_node.body });
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState { .Indent = indent + indent_delta });
- try stack.append(RenderState { .Text = "\n" });
- }
- }
-
- if (else_node.payload) |payload| {
- try stack.append(RenderState { .Text = " " });
- try stack.append(RenderState { .Expression = payload });
- }
- },
- ast.Node.Id.While => {
- const while_node = @fieldParentPtr(ast.Node.While, "base", base);
- if (while_node.label) |label| {
- try stream.print("{}: ", self.tokenizer.getTokenSlice(label));
- }
-
- if (while_node.inline_token) |inline_token| {
- try stream.print("{} ", self.tokenizer.getTokenSlice(inline_token));
- }
-
- try stream.print("{} ", self.tokenizer.getTokenSlice(while_node.while_token));
-
- if (while_node.@"else") |@"else"| {
- try stack.append(RenderState { .Expression = &@"else".base });
-
- if (while_node.body.id == ast.Node.Id.Block) {
- try stack.append(RenderState { .Text = " " });
- } else {
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState { .Text = "\n" });
- }
- }
-
- if (while_node.body.id == ast.Node.Id.Block) {
- try stack.append(RenderState { .Expression = while_node.body });
- try stack.append(RenderState { .Text = " " });
- } else {
- try stack.append(RenderState { .Indent = indent });
- try stack.append(RenderState { .Expression = while_node.body });
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState { .Indent = indent + indent_delta });
- try stack.append(RenderState { .Text = "\n" });
- }
-
- if (while_node.continue_expr) |continue_expr| {
- try stack.append(RenderState { .Text = ")" });
- try stack.append(RenderState { .Expression = continue_expr });
- try stack.append(RenderState { .Text = ": (" });
- try stack.append(RenderState { .Text = " " });
- }
-
- if (while_node.payload) |payload| {
- try stack.append(RenderState { .Expression = payload });
- try stack.append(RenderState { .Text = " " });
- }
-
- try stack.append(RenderState { .Text = ")" });
- try stack.append(RenderState { .Expression = while_node.condition });
- try stack.append(RenderState { .Text = "(" });
- },
- ast.Node.Id.For => {
- const for_node = @fieldParentPtr(ast.Node.For, "base", base);
- if (for_node.label) |label| {
- try stream.print("{}: ", self.tokenizer.getTokenSlice(label));
- }
-
- if (for_node.inline_token) |inline_token| {
- try stream.print("{} ", self.tokenizer.getTokenSlice(inline_token));
- }
-
- try stream.print("{} ", self.tokenizer.getTokenSlice(for_node.for_token));
-
- if (for_node.@"else") |@"else"| {
- try stack.append(RenderState { .Expression = &@"else".base });
-
- if (for_node.body.id == ast.Node.Id.Block) {
- try stack.append(RenderState { .Text = " " });
- } else {
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState { .Text = "\n" });
- }
- }
-
- if (for_node.body.id == ast.Node.Id.Block) {
- try stack.append(RenderState { .Expression = for_node.body });
- try stack.append(RenderState { .Text = " " });
- } else {
- try stack.append(RenderState { .Indent = indent });
- try stack.append(RenderState { .Expression = for_node.body });
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState { .Indent = indent + indent_delta });
- try stack.append(RenderState { .Text = "\n" });
- }
-
- if (for_node.payload) |payload| {
- try stack.append(RenderState { .Expression = payload });
- try stack.append(RenderState { .Text = " " });
- }
-
- try stack.append(RenderState { .Text = ")" });
- try stack.append(RenderState { .Expression = for_node.array_expr });
- try stack.append(RenderState { .Text = "(" });
- },
- ast.Node.Id.If => {
- const if_node = @fieldParentPtr(ast.Node.If, "base", base);
- try stream.print("{} ", self.tokenizer.getTokenSlice(if_node.if_token));
-
- switch (if_node.body.id) {
- ast.Node.Id.Block, ast.Node.Id.If,
- ast.Node.Id.For, ast.Node.Id.While,
- ast.Node.Id.Switch => {
- if (if_node.@"else") |@"else"| {
- try stack.append(RenderState { .Expression = &@"else".base });
-
- if (if_node.body.id == ast.Node.Id.Block) {
- try stack.append(RenderState { .Text = " " });
- } else {
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState { .Text = "\n" });
- }
- }
- },
- else => {
- if (if_node.@"else") |@"else"| {
- try stack.append(RenderState { .Expression = @"else".body });
-
- if (@"else".payload) |payload| {
- try stack.append(RenderState { .Text = " " });
- try stack.append(RenderState { .Expression = payload });
- }
-
- try stack.append(RenderState { .Text = " " });
- try stack.append(RenderState { .Text = self.tokenizer.getTokenSlice(@"else".else_token) });
- try stack.append(RenderState { .Text = " " });
- }
- }
- }
-
- if (if_node.condition.same_line_comment) |comment| {
- try stack.append(RenderState { .Indent = indent });
- try stack.append(RenderState { .Expression = if_node.body });
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState { .Indent = indent + indent_delta });
- try stack.append(RenderState { .Text = "\n" });
- try stack.append(RenderState { .PrintLineComment = comment });
- } else {
- try stack.append(RenderState { .Expression = if_node.body });
- }
-
-
- try stack.append(RenderState { .Text = " " });
-
- if (if_node.payload) |payload| {
- try stack.append(RenderState { .Expression = payload });
- try stack.append(RenderState { .Text = " " });
- }
-
- try stack.append(RenderState { .Text = ")" });
- try stack.append(RenderState { .Expression = if_node.condition });
- try stack.append(RenderState { .Text = "(" });
- },
- ast.Node.Id.Asm => {
- const asm_node = @fieldParentPtr(ast.Node.Asm, "base", base);
- try stream.print("{} ", self.tokenizer.getTokenSlice(asm_node.asm_token));
-
- if (asm_node.volatile_token) |volatile_token| {
- try stream.print("{} ", self.tokenizer.getTokenSlice(volatile_token));
- }
-
- try stack.append(RenderState { .Indent = indent });
- try stack.append(RenderState { .Text = ")" });
- {
- const cloppers = asm_node.cloppers.toSliceConst();
- var i = cloppers.len;
- while (i != 0) {
- i -= 1;
- try stack.append(RenderState { .Expression = cloppers[i] });
-
- if (i != 0) {
- try stack.append(RenderState { .Text = ", " });
- }
- }
- }
- try stack.append(RenderState { .Text = ": " });
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState { .Indent = indent + indent_delta });
- try stack.append(RenderState { .Text = "\n" });
- {
- const inputs = asm_node.inputs.toSliceConst();
- var i = inputs.len;
- while (i != 0) {
- i -= 1;
- const node = inputs[i];
- try stack.append(RenderState { .Expression = &node.base});
-
- if (i != 0) {
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState {
- .Text = blk: {
- const prev_node = inputs[i - 1];
- const loc = self.tokenizer.getTokenLocation(prev_node.lastToken().end, node.firstToken());
- if (loc.line >= 2) {
- break :blk "\n\n";
- }
- break :blk "\n";
- },
- });
- try stack.append(RenderState { .Text = "," });
- }
- }
- }
- try stack.append(RenderState { .Indent = indent + indent_delta + 2});
- try stack.append(RenderState { .Text = ": "});
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState { .Indent = indent + indent_delta});
- try stack.append(RenderState { .Text = "\n" });
- {
- const outputs = asm_node.outputs.toSliceConst();
- var i = outputs.len;
- while (i != 0) {
- i -= 1;
- const node = outputs[i];
- try stack.append(RenderState { .Expression = &node.base});
-
- if (i != 0) {
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState {
- .Text = blk: {
- const prev_node = outputs[i - 1];
- const loc = self.tokenizer.getTokenLocation(prev_node.lastToken().end, node.firstToken());
- if (loc.line >= 2) {
- break :blk "\n\n";
- }
- break :blk "\n";
- },
- });
- try stack.append(RenderState { .Text = "," });
- }
- }
- }
- try stack.append(RenderState { .Indent = indent + indent_delta + 2});
- try stack.append(RenderState { .Text = ": "});
- try stack.append(RenderState.PrintIndent);
- try stack.append(RenderState { .Indent = indent + indent_delta});
- try stack.append(RenderState { .Text = "\n" });
- try stack.append(RenderState { .Expression = asm_node.template });
- try stack.append(RenderState { .Text = "(" });
- },
- ast.Node.Id.AsmInput => {
- const asm_input = @fieldParentPtr(ast.Node.AsmInput, "base", base);
-
- try stack.append(RenderState { .Text = ")"});
- try stack.append(RenderState { .Expression = asm_input.expr});
- try stack.append(RenderState { .Text = " ("});
- try stack.append(RenderState { .Expression = asm_input.constraint });
- try stack.append(RenderState { .Text = "] "});
- try stack.append(RenderState { .Expression = asm_input.symbolic_name });
- try stack.append(RenderState { .Text = "["});
- },
- ast.Node.Id.AsmOutput => {
- const asm_output = @fieldParentPtr(ast.Node.AsmOutput, "base", base);
-
- try stack.append(RenderState { .Text = ")"});
- switch (asm_output.kind) {
- ast.Node.AsmOutput.Kind.Variable => |variable_name| {
- try stack.append(RenderState { .Expression = &variable_name.base});
- },
- ast.Node.AsmOutput.Kind.Return => |return_type| {
- try stack.append(RenderState { .Expression = return_type});
- try stack.append(RenderState { .Text = "-> "});
- },
- }
- try stack.append(RenderState { .Text = " ("});
- try stack.append(RenderState { .Expression = asm_output.constraint });
- try stack.append(RenderState { .Text = "] "});
- try stack.append(RenderState { .Expression = asm_output.symbolic_name });
- try stack.append(RenderState { .Text = "["});
- },
-
- ast.Node.Id.StructField,
- ast.Node.Id.UnionTag,
- ast.Node.Id.EnumTag,
- ast.Node.Id.ErrorTag,
- ast.Node.Id.Root,
- ast.Node.Id.VarDecl,
- ast.Node.Id.Use,
- ast.Node.Id.TestDecl,
- ast.Node.Id.ParamDecl => unreachable,
- },
- RenderState.Statement => |base| {
- try stack.append(RenderState { .PrintSameLineComment = base.same_line_comment } );
- switch (base.id) {
- ast.Node.Id.VarDecl => {
- const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", base);
- try stack.append(RenderState { .VarDecl = var_decl});
- },
- else => {
- if (requireSemiColon(base)) {
- try stack.append(RenderState { .Text = ";" });
- }
- try stack.append(RenderState { .Expression = base });
- },
- }
- },
- RenderState.Indent => |new_indent| indent = new_indent,
- RenderState.PrintIndent => try stream.writeByteNTimes(' ', indent),
- RenderState.PrintSameLineComment => |maybe_comment| blk: {
- const comment_token = maybe_comment ?? break :blk;
- try stream.print(" {}", self.tokenizer.getTokenSlice(comment_token));
- },
- RenderState.PrintLineComment => |comment_token| {
- try stream.write(self.tokenizer.getTokenSlice(comment_token));
- },
- }
- }
- }
-
- fn renderComments(self: &Parser, stream: var, node: var, indent: usize) !void {
- const comment = node.doc_comments ?? return;
- for (comment.lines.toSliceConst()) |line_token| {
- try stream.print("{}\n", self.tokenizer.getTokenSlice(line_token));
- try stream.writeByteNTimes(' ', indent);
- }
- }
-
- fn initUtilityArrayList(self: &Parser, comptime T: type) ArrayList(T) {
- const new_byte_count = self.utility_bytes.len - self.utility_bytes.len % @sizeOf(T);
- self.utility_bytes = self.util_allocator.alignedShrink(u8, utility_bytes_align, self.utility_bytes, new_byte_count);
- const typed_slice = ([]T)(self.utility_bytes);
- return ArrayList(T) {
- .allocator = self.util_allocator,
- .items = typed_slice,
- .len = 0,
- };
- }
-
- fn deinitUtilityArrayList(self: &Parser, list: var) void {
- self.utility_bytes = ([]align(utility_bytes_align) u8)(list.items);
- }
-
-};
-
-test "std.zig.parser" {
- _ = @import("parser_test.zig");
-}
diff --git a/std/zig/parser_test.zig b/std/zig/parser_test.zig
index e1d75d8380..29b231a4db 100644
--- a/std/zig/parser_test.zig
+++ b/std/zig/parser_test.zig
@@ -1,19 +1,53 @@
-test "zig fmt: same-line comment after non-block if expression" {
+test "zig fmt: same-line comment after a statement" {
try testCanonical(
- \\comptime {
- \\ if (sr > n_uword_bits - 1) {
- \\ // d > r
- \\ return 0;
+ \\test "" {
+ \\ a = b;
+ \\ debug.assert(H.digest_size <= H.block_size); // HMAC makes this assumption
+ \\ a = b;
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: same-line comment after var decl in struct" {
+ try testCanonical(
+ \\pub const vfs_cap_data = extern struct {
+ \\ const Data = struct {}; // when on disk.
+ \\};
+ \\
+ );
+}
+
+test "zig fmt: same-line comment after field decl" {
+ try testCanonical(
+ \\pub const dirent = extern struct {
+ \\ d_name: u8,
+ \\ d_name: u8, // comment 1
+ \\ d_name: u8,
+ \\ d_name: u8, // comment 2
+ \\ d_name: u8,
+ \\};
+ \\
+ );
+}
+
+test "zig fmt: same-line comment after switch prong" {
+ try testCanonical(
+ \\test "" {
+ \\ switch (err) {
+ \\ error.PathAlreadyExists => {}, // comment 2
+ \\ else => return err, // comment 1
\\ }
\\}
\\
);
}
-test "zig fmt: switch with empty body" {
+test "zig fmt: same-line comment after non-block if expression" {
try testCanonical(
- \\test "" {
- \\ foo() catch |err| switch (err) {};
+ \\comptime {
+ \\ if (sr > n_uword_bits - 1) // d > r
+ \\ return 0;
\\}
\\
);
@@ -28,6 +62,15 @@ test "zig fmt: same-line comment on comptime expression" {
);
}
+test "zig fmt: switch with empty body" {
+ try testCanonical(
+ \\test "" {
+ \\ foo() catch |err| switch (err) {};
+ \\}
+ \\
+ );
+}
+
test "zig fmt: float literal with exponent" {
try testCanonical(
\\pub const f64_true_min = 4.94065645841246544177e-324;
@@ -154,18 +197,6 @@ test "zig fmt: comments before switch prong" {
);
}
-test "zig fmt: same-line comment after switch prong" {
- try testCanonical(
- \\test "" {
- \\ switch (err) {
- \\ error.PathAlreadyExists => {}, // comment 2
- \\ else => return err, // comment 1
- \\ }
- \\}
- \\
- );
-}
-
test "zig fmt: comments before var decl in struct" {
try testCanonical(
\\pub const vfs_cap_data = extern struct {
@@ -191,28 +222,6 @@ test "zig fmt: comments before var decl in struct" {
);
}
-test "zig fmt: same-line comment after var decl in struct" {
- try testCanonical(
- \\pub const vfs_cap_data = extern struct {
- \\ const Data = struct {}; // when on disk.
- \\};
- \\
- );
-}
-
-test "zig fmt: same-line comment after field decl" {
- try testCanonical(
- \\pub const dirent = extern struct {
- \\ d_name: u8,
- \\ d_name: u8, // comment 1
- \\ d_name: u8,
- \\ d_name: u8, // comment 2
- \\ d_name: u8,
- \\};
- \\
- );
-}
-
test "zig fmt: array literal with 1 item on 1 line" {
try testCanonical(
\\var s = []const u64{0} ** 25;
@@ -220,17 +229,6 @@ test "zig fmt: array literal with 1 item on 1 line" {
);
}
-test "zig fmt: same-line comment after a statement" {
- try testCanonical(
- \\test "" {
- \\ a = b;
- \\ debug.assert(H.digest_size <= H.block_size); // HMAC makes this assumption
- \\ a = b;
- \\}
- \\
- );
-}
-
test "zig fmt: comments before global variables" {
try testCanonical(
\\/// Foo copies keys and values before they go into the map, and
@@ -1094,25 +1092,48 @@ test "zig fmt: error return" {
const std = @import("std");
const mem = std.mem;
const warn = std.debug.warn;
-const Tokenizer = std.zig.Tokenizer;
-const Parser = std.zig.Parser;
const io = std.io;
var fixed_buffer_mem: [100 * 1024]u8 = undefined;
fn testParse(source: []const u8, allocator: &mem.Allocator) ![]u8 {
- var tokenizer = Tokenizer.init(source);
- var parser = Parser.init(&tokenizer, allocator, "(memory buffer)");
- defer parser.deinit();
+ var stderr_file = try io.getStdErr();
+ var stderr = &io.FileOutStream.init(&stderr_file).stream;
- var tree = try parser.parse();
+ var tree = try std.zig.parse(allocator, source);
defer tree.deinit();
+ var error_it = tree.errors.iterator(0);
+ while (error_it.next()) |parse_error| {
+ const token = tree.tokens.at(parse_error.loc());
+ const loc = tree.tokenLocation(0, parse_error.loc());
+ try stderr.print("(memory buffer):{}:{}: error: ", loc.line + 1, loc.column + 1);
+ try tree.renderError(parse_error, stderr);
+ try stderr.print("\n{}\n", source[loc.line_start..loc.line_end]);
+ {
+ var i: usize = 0;
+ while (i < loc.column) : (i += 1) {
+ try stderr.write(" ");
+ }
+ }
+ {
+ const caret_count = token.end - token.start;
+ var i: usize = 0;
+ while (i < caret_count) : (i += 1) {
+ try stderr.write("~");
+ }
+ }
+ try stderr.write("\n");
+ }
+ if (tree.errors.len != 0) {
+ return error.ParseError;
+ }
+
var buffer = try std.Buffer.initSize(allocator, 0);
errdefer buffer.deinit();
var buffer_out_stream = io.BufferOutStream.init(&buffer);
- try parser.renderSource(&buffer_out_stream.stream, tree.root_node);
+ try std.zig.render(allocator, &buffer_out_stream.stream, &tree);
return buffer.toOwnedSlice();
}
@@ -1151,6 +1172,7 @@ fn testTransform(source: []const u8, expected_source: []const u8) !void {
}
},
error.ParseError => @panic("test failed"),
+ else => @panic("test failed"),
}
}
}
diff --git a/std/zig/render.zig b/std/zig/render.zig
new file mode 100644
index 0000000000..24a6566b56
--- /dev/null
+++ b/std/zig/render.zig
@@ -0,0 +1,1227 @@
+const std = @import("../index.zig");
+const builtin = @import("builtin");
+const assert = std.debug.assert;
+const mem = std.mem;
+const ast = std.zig.ast;
+const Token = std.zig.Token;
+
+const indent_delta = 4;
+
+pub const Error = error {
+ /// Ran out of memory allocating call stack frames to complete rendering.
+ OutOfMemory,
+};
+
+pub fn render(allocator: &mem.Allocator, stream: var, tree: &ast.Tree) (@typeOf(stream).Child.Error || Error)!void {
+ comptime assert(@typeId(@typeOf(stream)) == builtin.TypeId.Pointer);
+
+ var it = tree.root_node.decls.iterator(0);
+ while (it.next()) |decl| {
+ try renderTopLevelDecl(allocator, stream, tree, 0, *decl);
+ if (it.peek()) |next_decl| {
+ const n = if (nodeLineOffset(tree, *decl, *next_decl) >= 2) u8(2) else u8(1);
+ try stream.writeByteNTimes('\n', n);
+ }
+ }
+ try stream.write("\n");
+}
+
+fn nodeLineOffset(tree: &ast.Tree, a: &ast.Node, b: &ast.Node) usize {
+ const a_last_token = tree.tokens.at(a.lastToken());
+ const loc = tree.tokenLocation(a_last_token.end, b.firstToken());
+ return loc.line;
+}
+
+fn renderTopLevelDecl(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, indent: usize, decl: &ast.Node) (@typeOf(stream).Child.Error || Error)!void {
+ switch (decl.id) {
+ ast.Node.Id.FnProto => {
+ const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl);
+
+ try renderComments(tree, stream, fn_proto, indent);
+ try renderExpression(allocator, stream, tree, indent, decl);
+
+ if (fn_proto.body_node) |body_node| {
+ try stream.write(" ");
+ try renderExpression(allocator, stream, tree, indent, body_node);
+ } else {
+ try stream.write(";");
+ }
+ },
+ ast.Node.Id.Use => {
+ const use_decl = @fieldParentPtr(ast.Node.Use, "base", decl);
+
+ if (use_decl.visib_token) |visib_token| {
+ try stream.print("{} ", tree.tokenSlice(visib_token));
+ }
+ try stream.write("use ");
+ try renderExpression(allocator, stream, tree, indent, use_decl.expr);
+ try stream.write(";");
+ },
+ ast.Node.Id.VarDecl => {
+ const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", decl);
+
+ try renderComments(tree, stream, var_decl, indent);
+ try renderVarDecl(allocator, stream, tree, indent, var_decl);
+ },
+ ast.Node.Id.TestDecl => {
+ const test_decl = @fieldParentPtr(ast.Node.TestDecl, "base", decl);
+
+ try renderComments(tree, stream, test_decl, indent);
+ try stream.write("test ");
+ try renderExpression(allocator, stream, tree, indent, test_decl.name);
+ try stream.write(" ");
+ try renderExpression(allocator, stream, tree, indent, test_decl.body_node);
+ },
+ ast.Node.Id.StructField => {
+ const field = @fieldParentPtr(ast.Node.StructField, "base", decl);
+
+ try renderComments(tree, stream, field, indent);
+ if (field.visib_token) |visib_token| {
+ try stream.print("{} ", tree.tokenSlice(visib_token));
+ }
+ try stream.print("{}: ", tree.tokenSlice(field.name_token));
+ try renderExpression(allocator, stream, tree, indent, field.type_expr);
+ try renderToken(tree, stream, field.lastToken() + 1, indent, true);
+ },
+ ast.Node.Id.UnionTag => {
+ const tag = @fieldParentPtr(ast.Node.UnionTag, "base", decl);
+
+ try renderComments(tree, stream, tag, indent);
+ try stream.print("{}", tree.tokenSlice(tag.name_token));
+
+ if (tag.type_expr) |type_expr| {
+ try stream.print(": ");
+ try renderExpression(allocator, stream, tree, indent, type_expr);
+ }
+
+ if (tag.value_expr) |value_expr| {
+ try stream.print(" = ");
+ try renderExpression(allocator, stream, tree, indent, value_expr);
+ }
+
+ try stream.write(",");
+ },
+ ast.Node.Id.EnumTag => {
+ const tag = @fieldParentPtr(ast.Node.EnumTag, "base", decl);
+
+ try renderComments(tree, stream, tag, indent);
+ try stream.print("{}", tree.tokenSlice(tag.name_token));
+
+ if (tag.value) |value| {
+ try stream.print(" = ");
+ try renderExpression(allocator, stream, tree, indent, value);
+ }
+
+ try stream.write(",");
+ },
+ ast.Node.Id.ErrorTag => {
+ const tag = @fieldParentPtr(ast.Node.ErrorTag, "base", decl);
+
+ try renderComments(tree, stream, tag, indent);
+ try stream.print("{}", tree.tokenSlice(tag.name_token));
+ },
+ ast.Node.Id.Comptime => {
+ try renderExpression(allocator, stream, tree, indent, decl);
+ try maybeRenderSemicolon(stream, tree, indent, decl);
+ },
+ ast.Node.Id.LineComment => {
+ const line_comment_node = @fieldParentPtr(ast.Node.LineComment, "base", decl);
+
+ try stream.write(tree.tokenSlice(line_comment_node.token));
+ },
+ else => unreachable,
+ }
+}
+
+fn renderExpression(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, indent: usize, base: &ast.Node) (@typeOf(stream).Child.Error || Error)!void {
+ switch (base.id) {
+ ast.Node.Id.Identifier => {
+ const identifier = @fieldParentPtr(ast.Node.Identifier, "base", base);
+ try stream.print("{}", tree.tokenSlice(identifier.token));
+ },
+ ast.Node.Id.Block => {
+ const block = @fieldParentPtr(ast.Node.Block, "base", base);
+ if (block.label) |label| {
+ try stream.print("{}: ", tree.tokenSlice(label));
+ }
+
+ if (block.statements.len == 0) {
+ try stream.write("{}");
+ } else {
+ try stream.write("{\n");
+ const block_indent = indent + indent_delta;
+
+ var it = block.statements.iterator(0);
+ while (it.next()) |statement| {
+ try stream.writeByteNTimes(' ', block_indent);
+ try renderStatement(allocator, stream, tree, block_indent, *statement);
+
+ if (it.peek()) |next_statement| {
+ const n = if (nodeLineOffset(tree, *statement, *next_statement) >= 2) u8(2) else u8(1);
+ try stream.writeByteNTimes('\n', n);
+ }
+ }
+
+ try stream.write("\n");
+ try stream.writeByteNTimes(' ', indent);
+ try stream.write("}");
+ }
+ },
+ ast.Node.Id.Defer => {
+ const defer_node = @fieldParentPtr(ast.Node.Defer, "base", base);
+ try stream.print("{} ", tree.tokenSlice(defer_node.defer_token));
+ try renderExpression(allocator, stream, tree, indent, defer_node.expr);
+ },
+ ast.Node.Id.Comptime => {
+ const comptime_node = @fieldParentPtr(ast.Node.Comptime, "base", base);
+ try stream.print("{} ", tree.tokenSlice(comptime_node.comptime_token));
+ try renderExpression(allocator, stream, tree, indent, comptime_node.expr);
+ },
+ ast.Node.Id.AsyncAttribute => {
+ const async_attr = @fieldParentPtr(ast.Node.AsyncAttribute, "base", base);
+ try stream.print("{}", tree.tokenSlice(async_attr.async_token));
+
+ if (async_attr.allocator_type) |allocator_type| {
+ try stream.write("<");
+ try renderExpression(allocator, stream, tree, indent, allocator_type);
+ try stream.write(">");
+ }
+ },
+ ast.Node.Id.Suspend => {
+ const suspend_node = @fieldParentPtr(ast.Node.Suspend, "base", base);
+ if (suspend_node.label) |label| {
+ try stream.print("{}: ", tree.tokenSlice(label));
+ }
+ try stream.print("{}", tree.tokenSlice(suspend_node.suspend_token));
+
+ if (suspend_node.payload) |payload| {
+ try stream.write(" ");
+ try renderExpression(allocator, stream, tree, indent, payload);
+ }
+
+ if (suspend_node.body) |body| {
+ try stream.write(" ");
+ try renderExpression(allocator, stream, tree, indent, body);
+ }
+
+ },
+
+ ast.Node.Id.InfixOp => {
+ const infix_op_node = @fieldParentPtr(ast.Node.InfixOp, "base", base);
+
+ try renderExpression(allocator, stream, tree, indent, infix_op_node.lhs);
+
+ const text = switch (infix_op_node.op) {
+ ast.Node.InfixOp.Op.Add => " + ",
+ ast.Node.InfixOp.Op.AddWrap => " +% ",
+ ast.Node.InfixOp.Op.ArrayCat => " ++ ",
+ ast.Node.InfixOp.Op.ArrayMult => " ** ",
+ ast.Node.InfixOp.Op.Assign => " = ",
+ ast.Node.InfixOp.Op.AssignBitAnd => " &= ",
+ ast.Node.InfixOp.Op.AssignBitOr => " |= ",
+ ast.Node.InfixOp.Op.AssignBitShiftLeft => " <<= ",
+ ast.Node.InfixOp.Op.AssignBitShiftRight => " >>= ",
+ ast.Node.InfixOp.Op.AssignBitXor => " ^= ",
+ ast.Node.InfixOp.Op.AssignDiv => " /= ",
+ ast.Node.InfixOp.Op.AssignMinus => " -= ",
+ ast.Node.InfixOp.Op.AssignMinusWrap => " -%= ",
+ ast.Node.InfixOp.Op.AssignMod => " %= ",
+ ast.Node.InfixOp.Op.AssignPlus => " += ",
+ ast.Node.InfixOp.Op.AssignPlusWrap => " +%= ",
+ ast.Node.InfixOp.Op.AssignTimes => " *= ",
+ ast.Node.InfixOp.Op.AssignTimesWarp => " *%= ",
+ ast.Node.InfixOp.Op.BangEqual => " != ",
+ ast.Node.InfixOp.Op.BitAnd => " & ",
+ ast.Node.InfixOp.Op.BitOr => " | ",
+ ast.Node.InfixOp.Op.BitShiftLeft => " << ",
+ ast.Node.InfixOp.Op.BitShiftRight => " >> ",
+ ast.Node.InfixOp.Op.BitXor => " ^ ",
+ ast.Node.InfixOp.Op.BoolAnd => " and ",
+ ast.Node.InfixOp.Op.BoolOr => " or ",
+ ast.Node.InfixOp.Op.Div => " / ",
+ ast.Node.InfixOp.Op.EqualEqual => " == ",
+ ast.Node.InfixOp.Op.ErrorUnion => "!",
+ ast.Node.InfixOp.Op.GreaterOrEqual => " >= ",
+ ast.Node.InfixOp.Op.GreaterThan => " > ",
+ ast.Node.InfixOp.Op.LessOrEqual => " <= ",
+ ast.Node.InfixOp.Op.LessThan => " < ",
+ ast.Node.InfixOp.Op.MergeErrorSets => " || ",
+ ast.Node.InfixOp.Op.Mod => " % ",
+ ast.Node.InfixOp.Op.Mult => " * ",
+ ast.Node.InfixOp.Op.MultWrap => " *% ",
+ ast.Node.InfixOp.Op.Period => ".",
+ ast.Node.InfixOp.Op.Sub => " - ",
+ ast.Node.InfixOp.Op.SubWrap => " -% ",
+ ast.Node.InfixOp.Op.UnwrapMaybe => " ?? ",
+ ast.Node.InfixOp.Op.Range => " ... ",
+ ast.Node.InfixOp.Op.Catch => |maybe_payload| blk: {
+ try stream.write(" catch ");
+ if (maybe_payload) |payload| {
+ try renderExpression(allocator, stream, tree, indent, payload);
+ try stream.write(" ");
+ }
+ break :blk "";
+ },
+ };
+
+ try stream.write(text);
+ try renderExpression(allocator, stream, tree, indent, infix_op_node.rhs);
+ },
+
+ ast.Node.Id.PrefixOp => {
+ const prefix_op_node = @fieldParentPtr(ast.Node.PrefixOp, "base", base);
+
+ switch (prefix_op_node.op) {
+ ast.Node.PrefixOp.Op.AddrOf => |addr_of_info| {
+ try stream.write("&");
+ if (addr_of_info.align_expr) |align_expr| {
+ try stream.write("align(");
+ try renderExpression(allocator, stream, tree, indent, align_expr);
+ try stream.write(") ");
+ }
+ if (addr_of_info.const_token != null) {
+ try stream.write("const ");
+ }
+ if (addr_of_info.volatile_token != null) {
+ try stream.write("volatile ");
+ }
+ },
+ ast.Node.PrefixOp.Op.SliceType => |addr_of_info| {
+ try stream.write("[]");
+ if (addr_of_info.align_expr) |align_expr| {
+ try stream.print("align(");
+ try renderExpression(allocator, stream, tree, indent, align_expr);
+ try stream.print(") ");
+ }
+ if (addr_of_info.const_token != null) {
+ try stream.print("const ");
+ }
+ if (addr_of_info.volatile_token != null) {
+ try stream.print("volatile ");
+ }
+ },
+ ast.Node.PrefixOp.Op.ArrayType => |array_index| {
+ try stream.print("[");
+ try renderExpression(allocator, stream, tree, indent, array_index);
+ try stream.print("]");
+ },
+ ast.Node.PrefixOp.Op.BitNot => try stream.write("~"),
+ ast.Node.PrefixOp.Op.BoolNot => try stream.write("!"),
+ ast.Node.PrefixOp.Op.Negation => try stream.write("-"),
+ ast.Node.PrefixOp.Op.NegationWrap => try stream.write("-%"),
+ ast.Node.PrefixOp.Op.Try => try stream.write("try "),
+ ast.Node.PrefixOp.Op.UnwrapMaybe => try stream.write("??"),
+ ast.Node.PrefixOp.Op.MaybeType => try stream.write("?"),
+ ast.Node.PrefixOp.Op.Await => try stream.write("await "),
+ ast.Node.PrefixOp.Op.Cancel => try stream.write("cancel "),
+ ast.Node.PrefixOp.Op.Resume => try stream.write("resume "),
+ }
+
+ try renderExpression(allocator, stream, tree, indent, prefix_op_node.rhs);
+ },
+
+ ast.Node.Id.SuffixOp => {
+ const suffix_op = @fieldParentPtr(ast.Node.SuffixOp, "base", base);
+
+ switch (suffix_op.op) {
+ @TagType(ast.Node.SuffixOp.Op).Call => |*call_info| {
+ if (call_info.async_attr) |async_attr| {
+ try renderExpression(allocator, stream, tree, indent, &async_attr.base);
+ try stream.write(" ");
+ }
+
+ try renderExpression(allocator, stream, tree, indent, suffix_op.lhs);
+ try stream.write("(");
+
+ var it = call_info.params.iterator(0);
+ while (it.next()) |param_node| {
+ try renderExpression(allocator, stream, tree, indent, *param_node);
+ if (it.peek() != null) {
+ try stream.write(", ");
+ }
+ }
+
+ try stream.write(")");
+ },
+
+ ast.Node.SuffixOp.Op.ArrayAccess => |index_expr| {
+ try renderExpression(allocator, stream, tree, indent, suffix_op.lhs);
+ try stream.write("[");
+ try renderExpression(allocator, stream, tree, indent, index_expr);
+ try stream.write("]");
+ },
+
+ ast.Node.SuffixOp.Op.SuffixOp {
+ try renderExpression(allocator, stream, tree, indent, suffix_op.lhs);
+ try stream.write(".*");
+ },
+
+ @TagType(ast.Node.SuffixOp.Op).Slice => |range| {
+ try renderExpression(allocator, stream, tree, indent, suffix_op.lhs);
+ try stream.write("[");
+ try renderExpression(allocator, stream, tree, indent, range.start);
+ try stream.write("..");
+ if (range.end) |end| {
+ try renderExpression(allocator, stream, tree, indent, end);
+ }
+ try stream.write("]");
+ },
+
+ ast.Node.SuffixOp.Op.StructInitializer => |*field_inits| {
+ if (field_inits.len == 0) {
+ try renderExpression(allocator, stream, tree, indent, suffix_op.lhs);
+ try stream.write("{}");
+ return;
+ }
+
+ if (field_inits.len == 1) {
+ const field_init = *field_inits.at(0);
+
+ try renderExpression(allocator, stream, tree, indent, suffix_op.lhs);
+ try stream.write("{ ");
+ try renderExpression(allocator, stream, tree, indent, field_init);
+ try stream.write(" }");
+ return;
+ }
+
+ try renderExpression(allocator, stream, tree, indent, suffix_op.lhs);
+ try stream.write("{\n");
+
+ const new_indent = indent + indent_delta;
+
+ var it = field_inits.iterator(0);
+ while (it.next()) |field_init| {
+ try stream.writeByteNTimes(' ', new_indent);
+ try renderExpression(allocator, stream, tree, new_indent, *field_init);
+ if ((*field_init).id != ast.Node.Id.LineComment) {
+ try stream.write(",");
+ }
+ if (it.peek()) |next_field_init| {
+ const n = if (nodeLineOffset(tree, *field_init, *next_field_init) >= 2) u8(2) else u8(1);
+ try stream.writeByteNTimes('\n', n);
+ }
+ }
+
+ try stream.write("\n");
+ try stream.writeByteNTimes(' ', indent);
+ try stream.write("}");
+ },
+
+ ast.Node.SuffixOp.Op.ArrayInitializer => |*exprs| {
+
+ if (exprs.len == 0) {
+ try renderExpression(allocator, stream, tree, indent, suffix_op.lhs);
+ try stream.write("{}");
+ return;
+ }
+ if (exprs.len == 1) {
+ const expr = *exprs.at(0);
+
+ try renderExpression(allocator, stream, tree, indent, suffix_op.lhs);
+ try stream.write("{");
+ try renderExpression(allocator, stream, tree, indent, expr);
+ try stream.write("}");
+ return;
+ }
+
+ try renderExpression(allocator, stream, tree, indent, suffix_op.lhs);
+
+ const new_indent = indent + indent_delta;
+ try stream.write("{\n");
+
+ var it = exprs.iterator(0);
+ while (it.next()) |expr| {
+ try stream.writeByteNTimes(' ', new_indent);
+ try renderExpression(allocator, stream, tree, new_indent, *expr);
+ try stream.write(",");
+
+ if (it.peek()) |next_expr| {
+ const n = if (nodeLineOffset(tree, *expr, *next_expr) >= 2) u8(2) else u8(1);
+ try stream.writeByteNTimes('\n', n);
+ }
+ }
+
+ try stream.write("\n");
+ try stream.writeByteNTimes(' ', indent);
+ try stream.write("}");
+ },
+ }
+ },
+
+ ast.Node.Id.ControlFlowExpression => {
+ const flow_expr = @fieldParentPtr(ast.Node.ControlFlowExpression, "base", base);
+
+ switch (flow_expr.kind) {
+ ast.Node.ControlFlowExpression.Kind.Break => |maybe_label| {
+ try stream.print("break");
+ if (maybe_label) |label| {
+ try stream.print(" :");
+ try renderExpression(allocator, stream, tree, indent, label);
+ }
+ },
+ ast.Node.ControlFlowExpression.Kind.Continue => |maybe_label| {
+ try stream.print("continue");
+ if (maybe_label) |label| {
+ try stream.print(" :");
+ try renderExpression(allocator, stream, tree, indent, label);
+ }
+ },
+ ast.Node.ControlFlowExpression.Kind.Return => {
+ try stream.print("return");
+ },
+
+ }
+
+ if (flow_expr.rhs) |rhs| {
+ try stream.write(" ");
+ try renderExpression(allocator, stream, tree, indent, rhs);
+ }
+ },
+
+ ast.Node.Id.Payload => {
+ const payload = @fieldParentPtr(ast.Node.Payload, "base", base);
+
+ try stream.write("|");
+ try renderExpression(allocator, stream, tree, indent, payload.error_symbol);
+ try stream.write("|");
+ },
+
+ ast.Node.Id.PointerPayload => {
+ const payload = @fieldParentPtr(ast.Node.PointerPayload, "base", base);
+
+ try stream.write("|");
+ if (payload.ptr_token) |ptr_token| {
+ try stream.write(tree.tokenSlice(ptr_token));
+ }
+ try renderExpression(allocator, stream, tree, indent, payload.value_symbol);
+ try stream.write("|");
+ },
+
+ ast.Node.Id.PointerIndexPayload => {
+ const payload = @fieldParentPtr(ast.Node.PointerIndexPayload, "base", base);
+
+ try stream.write("|");
+ if (payload.ptr_token) |ptr_token| {
+ try stream.write(tree.tokenSlice(ptr_token));
+ }
+ try renderExpression(allocator, stream, tree, indent, payload.value_symbol);
+
+ if (payload.index_symbol) |index_symbol| {
+ try stream.write(", ");
+ try renderExpression(allocator, stream, tree, indent, index_symbol);
+ }
+
+ try stream.write("|");
+ },
+
+ ast.Node.Id.GroupedExpression => {
+ const grouped_expr = @fieldParentPtr(ast.Node.GroupedExpression, "base", base);
+
+ try stream.write("(");
+ try renderExpression(allocator, stream, tree, indent, grouped_expr.expr);
+ try stream.write(")");
+ },
+
+ ast.Node.Id.FieldInitializer => {
+ const field_init = @fieldParentPtr(ast.Node.FieldInitializer, "base", base);
+
+ try stream.print(".{} = ", tree.tokenSlice(field_init.name_token));
+ try renderExpression(allocator, stream, tree, indent, field_init.expr);
+ },
+
+ ast.Node.Id.IntegerLiteral => {
+ const integer_literal = @fieldParentPtr(ast.Node.IntegerLiteral, "base", base);
+ try stream.print("{}", tree.tokenSlice(integer_literal.token));
+ },
+ ast.Node.Id.FloatLiteral => {
+ const float_literal = @fieldParentPtr(ast.Node.FloatLiteral, "base", base);
+ try stream.print("{}", tree.tokenSlice(float_literal.token));
+ },
+ ast.Node.Id.StringLiteral => {
+ const string_literal = @fieldParentPtr(ast.Node.StringLiteral, "base", base);
+ try stream.print("{}", tree.tokenSlice(string_literal.token));
+ },
+ ast.Node.Id.CharLiteral => {
+ const char_literal = @fieldParentPtr(ast.Node.CharLiteral, "base", base);
+ try stream.print("{}", tree.tokenSlice(char_literal.token));
+ },
+ ast.Node.Id.BoolLiteral => {
+ const bool_literal = @fieldParentPtr(ast.Node.CharLiteral, "base", base);
+ try stream.print("{}", tree.tokenSlice(bool_literal.token));
+ },
+ ast.Node.Id.NullLiteral => {
+ const null_literal = @fieldParentPtr(ast.Node.NullLiteral, "base", base);
+ try stream.print("{}", tree.tokenSlice(null_literal.token));
+ },
+ ast.Node.Id.ThisLiteral => {
+ const this_literal = @fieldParentPtr(ast.Node.ThisLiteral, "base", base);
+ try stream.print("{}", tree.tokenSlice(this_literal.token));
+ },
+ ast.Node.Id.Unreachable => {
+ const unreachable_node = @fieldParentPtr(ast.Node.Unreachable, "base", base);
+ try stream.print("{}", tree.tokenSlice(unreachable_node.token));
+ },
+ ast.Node.Id.ErrorType => {
+ const error_type = @fieldParentPtr(ast.Node.ErrorType, "base", base);
+ try stream.print("{}", tree.tokenSlice(error_type.token));
+ },
+ ast.Node.Id.VarType => {
+ const var_type = @fieldParentPtr(ast.Node.VarType, "base", base);
+ try stream.print("{}", tree.tokenSlice(var_type.token));
+ },
+ ast.Node.Id.ContainerDecl => {
+ const container_decl = @fieldParentPtr(ast.Node.ContainerDecl, "base", base);
+
+ switch (container_decl.layout) {
+ ast.Node.ContainerDecl.Layout.Packed => try stream.print("packed "),
+ ast.Node.ContainerDecl.Layout.Extern => try stream.print("extern "),
+ ast.Node.ContainerDecl.Layout.Auto => { },
+ }
+
+ switch (container_decl.kind) {
+ ast.Node.ContainerDecl.Kind.Struct => try stream.print("struct"),
+ ast.Node.ContainerDecl.Kind.Enum => try stream.print("enum"),
+ ast.Node.ContainerDecl.Kind.Union => try stream.print("union"),
+ }
+
+ switch (container_decl.init_arg_expr) {
+ ast.Node.ContainerDecl.InitArg.None => try stream.write(" "),
+ ast.Node.ContainerDecl.InitArg.Enum => |enum_tag_type| {
+ if (enum_tag_type) |expr| {
+ try stream.write("(enum(");
+ try renderExpression(allocator, stream, tree, indent, expr);
+ try stream.write(")) ");
+ } else {
+ try stream.write("(enum) ");
+ }
+ },
+ ast.Node.ContainerDecl.InitArg.Type => |type_expr| {
+ try stream.write("(");
+ try renderExpression(allocator, stream, tree, indent, type_expr);
+ try stream.write(") ");
+ },
+ }
+
+ if (container_decl.fields_and_decls.len == 0) {
+ try stream.write("{}");
+ } else {
+ try stream.write("{\n");
+ const new_indent = indent + indent_delta;
+
+ var it = container_decl.fields_and_decls.iterator(0);
+ while (it.next()) |decl| {
+ try stream.writeByteNTimes(' ', new_indent);
+ try renderTopLevelDecl(allocator, stream, tree, new_indent, *decl);
+
+ if (it.peek()) |next_decl| {
+ const n = if (nodeLineOffset(tree, *decl, *next_decl) >= 2) u8(2) else u8(1);
+ try stream.writeByteNTimes('\n', n);
+ }
+ }
+
+ try stream.write("\n");
+ try stream.writeByteNTimes(' ', indent);
+ try stream.write("}");
+ }
+ },
+
+ ast.Node.Id.ErrorSetDecl => {
+ const err_set_decl = @fieldParentPtr(ast.Node.ErrorSetDecl, "base", base);
+
+ if (err_set_decl.decls.len == 0) {
+ try stream.write("error{}");
+ return;
+ }
+
+ if (err_set_decl.decls.len == 1) blk: {
+ const node = *err_set_decl.decls.at(0);
+
+ // if there are any doc comments or same line comments
+ // don't try to put it all on one line
+ if (node.cast(ast.Node.ErrorTag)) |tag| {
+ if (tag.doc_comments != null) break :blk;
+ } else {
+ break :blk;
+ }
+
+
+ try stream.write("error{");
+ try renderTopLevelDecl(allocator, stream, tree, indent, node);
+ try stream.write("}");
+ return;
+ }
+
+ try stream.write("error{\n");
+ const new_indent = indent + indent_delta;
+
+ var it = err_set_decl.decls.iterator(0);
+ while (it.next()) |node| {
+ try stream.writeByteNTimes(' ', new_indent);
+ try renderTopLevelDecl(allocator, stream, tree, new_indent, *node);
+ if ((*node).id != ast.Node.Id.LineComment) {
+ try stream.write(",");
+ }
+ if (it.peek()) |next_node| {
+ const n = if (nodeLineOffset(tree, *node, *next_node) >= 2) u8(2) else u8(1);
+ try stream.writeByteNTimes('\n', n);
+ }
+ }
+
+ try stream.write("\n");
+ try stream.writeByteNTimes(' ', indent);
+ try stream.write("}");
+ },
+
+ ast.Node.Id.MultilineStringLiteral => {
+ const multiline_str_literal = @fieldParentPtr(ast.Node.MultilineStringLiteral, "base", base);
+ try stream.print("\n");
+
+ var i : usize = 0;
+ while (i < multiline_str_literal.lines.len) : (i += 1) {
+ const t = *multiline_str_literal.lines.at(i);
+ try stream.writeByteNTimes(' ', indent + indent_delta);
+ try stream.print("{}", tree.tokenSlice(t));
+ }
+ try stream.writeByteNTimes(' ', indent);
+ },
+ ast.Node.Id.UndefinedLiteral => {
+ const undefined_literal = @fieldParentPtr(ast.Node.UndefinedLiteral, "base", base);
+ try stream.print("{}", tree.tokenSlice(undefined_literal.token));
+ },
+
+ ast.Node.Id.BuiltinCall => {
+ const builtin_call = @fieldParentPtr(ast.Node.BuiltinCall, "base", base);
+ try stream.print("{}(", tree.tokenSlice(builtin_call.builtin_token));
+
+ var it = builtin_call.params.iterator(0);
+ while (it.next()) |param_node| {
+ try renderExpression(allocator, stream, tree, indent, *param_node);
+ if (it.peek() != null) {
+ try stream.write(", ");
+ }
+ }
+ try stream.write(")");
+ },
+
+ ast.Node.Id.FnProto => {
+ const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", base);
+
+ if (fn_proto.visib_token) |visib_token_index| {
+ const visib_token = tree.tokens.at(visib_token_index);
+ assert(visib_token.id == Token.Id.Keyword_pub or visib_token.id == Token.Id.Keyword_export);
+ try stream.print("{} ", tree.tokenSlice(visib_token_index));
+ }
+
+ if (fn_proto.extern_export_inline_token) |extern_export_inline_token| {
+ try stream.print("{} ", tree.tokenSlice(extern_export_inline_token));
+ }
+
+ if (fn_proto.lib_name) |lib_name| {
+ try renderExpression(allocator, stream, tree, indent, lib_name);
+ try stream.write(" ");
+ }
+
+ if (fn_proto.cc_token) |cc_token| {
+ try stream.print("{} ", tree.tokenSlice(cc_token));
+ }
+
+ if (fn_proto.async_attr) |async_attr| {
+ try renderExpression(allocator, stream, tree, indent, &async_attr.base);
+ try stream.write(" ");
+ }
+
+ try stream.write("fn");
+
+ if (fn_proto.name_token) |name_token| {
+ try stream.print(" {}", tree.tokenSlice(name_token));
+ }
+
+ try stream.write("(");
+
+ var it = fn_proto.params.iterator(0);
+ while (it.next()) |param_decl_node| {
+ try renderParamDecl(allocator, stream, tree, indent, *param_decl_node);
+
+ if (it.peek() != null) {
+ try stream.write(", ");
+ }
+ }
+
+ try stream.write(") ");
+
+ if (fn_proto.align_expr) |align_expr| {
+ try stream.write("align(");
+ try renderExpression(allocator, stream, tree, indent, align_expr);
+ try stream.write(") ");
+ }
+
+ switch (fn_proto.return_type) {
+ ast.Node.FnProto.ReturnType.Explicit => |node| {
+ try renderExpression(allocator, stream, tree, indent, node);
+ },
+ ast.Node.FnProto.ReturnType.InferErrorSet => |node| {
+ try stream.write("!");
+ try renderExpression(allocator, stream, tree, indent, node);
+ },
+ }
+
+ },
+
+ ast.Node.Id.PromiseType => {
+ const promise_type = @fieldParentPtr(ast.Node.PromiseType, "base", base);
+ try stream.write(tree.tokenSlice(promise_type.promise_token));
+ if (promise_type.result) |result| {
+ try stream.write(tree.tokenSlice(result.arrow_token));
+ try renderExpression(allocator, stream, tree, indent, result.return_type);
+ }
+ },
+
+ ast.Node.Id.LineComment => {
+ const line_comment_node = @fieldParentPtr(ast.Node.LineComment, "base", base);
+ try stream.write(tree.tokenSlice(line_comment_node.token));
+ },
+
+ ast.Node.Id.DocComment => unreachable, // doc comments are attached to nodes
+
+ ast.Node.Id.Switch => {
+ const switch_node = @fieldParentPtr(ast.Node.Switch, "base", base);
+
+ try stream.print("{} (", tree.tokenSlice(switch_node.switch_token));
+ if (switch_node.cases.len == 0) {
+ try renderExpression(allocator, stream, tree, indent, switch_node.expr);
+ try stream.write(") {}");
+ return;
+ }
+
+ try renderExpression(allocator, stream, tree, indent, switch_node.expr);
+ try stream.write(") {\n");
+
+ const new_indent = indent + indent_delta;
+
+ var it = switch_node.cases.iterator(0);
+ while (it.next()) |node| {
+ try stream.writeByteNTimes(' ', new_indent);
+ try renderExpression(allocator, stream, tree, new_indent, *node);
+
+ if (it.peek()) |next_node| {
+ const n = if (nodeLineOffset(tree, *node, *next_node) >= 2) u8(2) else u8(1);
+ try stream.writeByteNTimes('\n', n);
+ }
+ }
+
+ try stream.write("\n");
+ try stream.writeByteNTimes(' ', indent);
+ try stream.write("}");
+ },
+
+ ast.Node.Id.SwitchCase => {
+ const switch_case = @fieldParentPtr(ast.Node.SwitchCase, "base", base);
+
+ var it = switch_case.items.iterator(0);
+ while (it.next()) |node| {
+ try renderExpression(allocator, stream, tree, indent, *node);
+
+ if (it.peek() != null) {
+ try stream.write(",\n");
+ try stream.writeByteNTimes(' ', indent);
+ }
+ }
+
+ try stream.write(" => ");
+
+ if (switch_case.payload) |payload| {
+ try renderExpression(allocator, stream, tree, indent, payload);
+ try stream.write(" ");
+ }
+
+ try renderExpression(allocator, stream, tree, indent, switch_case.expr);
+ try renderToken(tree, stream, switch_case.lastToken() + 1, indent, true);
+ },
+ ast.Node.Id.SwitchElse => {
+ const switch_else = @fieldParentPtr(ast.Node.SwitchElse, "base", base);
+ try stream.print("{}", tree.tokenSlice(switch_else.token));
+ },
+ ast.Node.Id.Else => {
+ const else_node = @fieldParentPtr(ast.Node.Else, "base", base);
+ try stream.print("{}", tree.tokenSlice(else_node.else_token));
+
+ const block_body = switch (else_node.body.id) {
+ ast.Node.Id.Block, ast.Node.Id.If,
+ ast.Node.Id.For, ast.Node.Id.While,
+ ast.Node.Id.Switch => true,
+ else => false,
+ };
+
+ if (block_body) {
+ try stream.write(" ");
+ }
+
+ if (else_node.payload) |payload| {
+ try renderExpression(allocator, stream, tree, indent, payload);
+ try stream.write(" ");
+ }
+
+ if (block_body) {
+ try renderExpression(allocator, stream, tree, indent, else_node.body);
+ } else {
+ try stream.write("\n");
+ try stream.writeByteNTimes(' ', indent + indent_delta);
+ try renderExpression(allocator, stream, tree, indent, else_node.body);
+ }
+ },
+
+ ast.Node.Id.While => {
+ const while_node = @fieldParentPtr(ast.Node.While, "base", base);
+
+ if (while_node.label) |label| {
+ try stream.print("{}: ", tree.tokenSlice(label));
+ }
+
+ if (while_node.inline_token) |inline_token| {
+ try stream.print("{} ", tree.tokenSlice(inline_token));
+ }
+
+ try stream.print("{} (", tree.tokenSlice(while_node.while_token));
+ try renderExpression(allocator, stream, tree, indent, while_node.condition);
+ try stream.write(")");
+
+ if (while_node.payload) |payload| {
+ try stream.write(" ");
+ try renderExpression(allocator, stream, tree, indent, payload);
+ }
+
+ if (while_node.continue_expr) |continue_expr| {
+ try stream.write(" : (");
+ try renderExpression(allocator, stream, tree, indent, continue_expr);
+ try stream.write(")");
+ }
+
+ if (while_node.body.id == ast.Node.Id.Block) {
+ try stream.write(" ");
+ try renderExpression(allocator, stream, tree, indent, while_node.body);
+ } else {
+ try stream.write("\n");
+ try stream.writeByteNTimes(' ', indent + indent_delta);
+ try renderExpression(allocator, stream, tree, indent, while_node.body);
+ }
+
+ if (while_node.@"else") |@"else"| {
+ if (while_node.body.id == ast.Node.Id.Block) {
+ try stream.write(" ");
+ } else {
+ try stream.write("\n");
+ try stream.writeByteNTimes(' ', indent);
+ }
+
+ try renderExpression(allocator, stream, tree, indent, &@"else".base);
+ }
+ },
+
+ ast.Node.Id.For => {
+ const for_node = @fieldParentPtr(ast.Node.For, "base", base);
+ if (for_node.label) |label| {
+ try stream.print("{}: ", tree.tokenSlice(label));
+ }
+
+ if (for_node.inline_token) |inline_token| {
+ try stream.print("{} ", tree.tokenSlice(inline_token));
+ }
+
+ try stream.print("{} (", tree.tokenSlice(for_node.for_token));
+ try renderExpression(allocator, stream, tree, indent, for_node.array_expr);
+ try stream.write(")");
+
+ if (for_node.payload) |payload| {
+ try stream.write(" ");
+ try renderExpression(allocator, stream, tree, indent, payload);
+ }
+
+ if (for_node.body.id == ast.Node.Id.Block) {
+ try stream.write(" ");
+ try renderExpression(allocator, stream, tree, indent, for_node.body);
+ } else {
+ try stream.write("\n");
+ try stream.writeByteNTimes(' ', indent + indent_delta);
+ try renderExpression(allocator, stream, tree, indent, for_node.body);
+ }
+
+ if (for_node.@"else") |@"else"| {
+ if (for_node.body.id == ast.Node.Id.Block) {
+ try stream.write(" ");
+ } else {
+ try stream.write("\n");
+ try stream.writeByteNTimes(' ', indent);
+ }
+
+ try renderExpression(allocator, stream, tree, indent, &@"else".base);
+ }
+ },
+
+ ast.Node.Id.If => {
+ const if_node = @fieldParentPtr(ast.Node.If, "base", base);
+ try stream.print("{} (", tree.tokenSlice(if_node.if_token));
+
+ try renderExpression(allocator, stream, tree, indent, if_node.condition);
+ try renderToken(tree, stream, if_node.condition.lastToken() + 1, indent, false);
+
+ if (if_node.payload) |payload| {
+ try renderExpression(allocator, stream, tree, indent, payload);
+ try stream.write(" ");
+ }
+
+ try renderExpression(allocator, stream, tree, indent, if_node.body);
+
+ switch (if_node.body.id) {
+ ast.Node.Id.Block, ast.Node.Id.If, ast.Node.Id.For, ast.Node.Id.While, ast.Node.Id.Switch => {
+ if (if_node.@"else") |@"else"| {
+ if (if_node.body.id == ast.Node.Id.Block) {
+ try stream.write(" ");
+ } else {
+ try stream.write("\n");
+ try stream.writeByteNTimes(' ', indent);
+ }
+
+ try renderExpression(allocator, stream, tree, indent, &@"else".base);
+ }
+ },
+ else => {
+ if (if_node.@"else") |@"else"| {
+ try stream.print(" {} ", tree.tokenSlice(@"else".else_token));
+
+ if (@"else".payload) |payload| {
+ try renderExpression(allocator, stream, tree, indent, payload);
+ try stream.write(" ");
+ }
+
+ try renderExpression(allocator, stream, tree, indent, @"else".body);
+ }
+ }
+ }
+ },
+
+ ast.Node.Id.Asm => {
+ const asm_node = @fieldParentPtr(ast.Node.Asm, "base", base);
+ try stream.print("{} ", tree.tokenSlice(asm_node.asm_token));
+
+ if (asm_node.volatile_token) |volatile_token| {
+ try stream.print("{} ", tree.tokenSlice(volatile_token));
+ }
+
+ try stream.print("(");
+ try renderExpression(allocator, stream, tree, indent, asm_node.template);
+ try stream.print("\n");
+ const indent_once = indent + indent_delta;
+ try stream.writeByteNTimes(' ', indent_once);
+ try stream.print(": ");
+ const indent_extra = indent_once + 2;
+
+ {
+ var it = asm_node.outputs.iterator(0);
+ while (it.next()) |asm_output| {
+ const node = &(*asm_output).base;
+ try renderExpression(allocator, stream, tree, indent_extra, node);
+
+ if (it.peek()) |next_asm_output| {
+ const next_node = &(*next_asm_output).base;
+ const n = if (nodeLineOffset(tree, node, next_node) >= 2) u8(2) else u8(1);
+ try stream.writeByte(',');
+ try stream.writeByteNTimes('\n', n);
+ try stream.writeByteNTimes(' ', indent_extra);
+ }
+ }
+ }
+
+ try stream.write("\n");
+ try stream.writeByteNTimes(' ', indent_once);
+ try stream.write(": ");
+
+ {
+ var it = asm_node.inputs.iterator(0);
+ while (it.next()) |asm_input| {
+ const node = &(*asm_input).base;
+ try renderExpression(allocator, stream, tree, indent_extra, node);
+
+ if (it.peek()) |next_asm_input| {
+ const next_node = &(*next_asm_input).base;
+ const n = if (nodeLineOffset(tree, node, next_node) >= 2) u8(2) else u8(1);
+ try stream.writeByte(',');
+ try stream.writeByteNTimes('\n', n);
+ try stream.writeByteNTimes(' ', indent_extra);
+ }
+ }
+ }
+
+ try stream.write("\n");
+ try stream.writeByteNTimes(' ', indent_once);
+ try stream.write(": ");
+
+ {
+ var it = asm_node.clobbers.iterator(0);
+ while (it.next()) |node| {
+ try renderExpression(allocator, stream, tree, indent_once, *node);
+
+ if (it.peek() != null) {
+ try stream.write(", ");
+ }
+ }
+ }
+
+ try stream.write(")");
+ },
+
+ ast.Node.Id.AsmInput => {
+ const asm_input = @fieldParentPtr(ast.Node.AsmInput, "base", base);
+
+ try stream.write("[");
+ try renderExpression(allocator, stream, tree, indent, asm_input.symbolic_name);
+ try stream.write("] ");
+ try renderExpression(allocator, stream, tree, indent, asm_input.constraint);
+ try stream.write(" (");
+ try renderExpression(allocator, stream, tree, indent, asm_input.expr);
+ try stream.write(")");
+ },
+
+ ast.Node.Id.AsmOutput => {
+ const asm_output = @fieldParentPtr(ast.Node.AsmOutput, "base", base);
+
+ try stream.write("[");
+ try renderExpression(allocator, stream, tree, indent, asm_output.symbolic_name);
+ try stream.write("] ");
+ try renderExpression(allocator, stream, tree, indent, asm_output.constraint);
+ try stream.write(" (");
+
+ switch (asm_output.kind) {
+ ast.Node.AsmOutput.Kind.Variable => |variable_name| {
+ try renderExpression(allocator, stream, tree, indent, &variable_name.base);
+ },
+ ast.Node.AsmOutput.Kind.Return => |return_type| {
+ try stream.write("-> ");
+ try renderExpression(allocator, stream, tree, indent, return_type);
+ },
+ }
+
+ try stream.write(")");
+ },
+
+ ast.Node.Id.StructField,
+ ast.Node.Id.UnionTag,
+ ast.Node.Id.EnumTag,
+ ast.Node.Id.ErrorTag,
+ ast.Node.Id.Root,
+ ast.Node.Id.VarDecl,
+ ast.Node.Id.Use,
+ ast.Node.Id.TestDecl,
+ ast.Node.Id.ParamDecl => unreachable,
+ }
+}
+
+fn renderVarDecl(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, indent: usize, var_decl: &ast.Node.VarDecl) (@typeOf(stream).Child.Error || Error)!void {
+ if (var_decl.visib_token) |visib_token| {
+ try stream.print("{} ", tree.tokenSlice(visib_token));
+ }
+
+ if (var_decl.extern_export_token) |extern_export_token| {
+ try stream.print("{} ", tree.tokenSlice(extern_export_token));
+
+ if (var_decl.lib_name) |lib_name| {
+ try renderExpression(allocator, stream, tree, indent, lib_name);
+ try stream.write(" ");
+ }
+ }
+
+ if (var_decl.comptime_token) |comptime_token| {
+ try stream.print("{} ", tree.tokenSlice(comptime_token));
+ }
+
+ try stream.print("{} {}", tree.tokenSlice(var_decl.mut_token), tree.tokenSlice(var_decl.name_token));
+
+ if (var_decl.type_node) |type_node| {
+ try stream.write(": ");
+ try renderExpression(allocator, stream, tree, indent, type_node);
+ }
+
+ if (var_decl.align_node) |align_node| {
+ try stream.write(" align(");
+ try renderExpression(allocator, stream, tree, indent, align_node);
+ try stream.write(")");
+ }
+
+ if (var_decl.init_node) |init_node| {
+ const text = if (init_node.id == ast.Node.Id.MultilineStringLiteral) " =" else " = ";
+ try stream.write(text);
+ try renderExpression(allocator, stream, tree, indent, init_node);
+ }
+
+ try renderToken(tree, stream, var_decl.semicolon_token, indent, true);
+}
+
+fn maybeRenderSemicolon(stream: var, tree: &ast.Tree, indent: usize, base: &ast.Node) (@typeOf(stream).Child.Error || Error)!void {
+ if (base.requireSemiColon()) {
+ const semicolon_index = base.lastToken() + 1;
+ assert(tree.tokens.at(semicolon_index).id == Token.Id.Semicolon);
+ try renderToken(tree, stream, semicolon_index, indent, true);
+ }
+}
+
+fn renderParamDecl(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, indent: usize, base: &ast.Node) (@typeOf(stream).Child.Error || Error)!void {
+ const param_decl = @fieldParentPtr(ast.Node.ParamDecl, "base", base);
+ if (param_decl.comptime_token) |comptime_token| {
+ try stream.print("{} ", tree.tokenSlice(comptime_token));
+ }
+ if (param_decl.noalias_token) |noalias_token| {
+ try stream.print("{} ", tree.tokenSlice(noalias_token));
+ }
+ if (param_decl.name_token) |name_token| {
+ try stream.print("{}: ", tree.tokenSlice(name_token));
+ }
+ if (param_decl.var_args_token) |var_args_token| {
+ try stream.print("{}", tree.tokenSlice(var_args_token));
+ } else {
+ try renderExpression(allocator, stream, tree, indent, param_decl.type_node);
+ }
+}
+
+fn renderStatement(allocator: &mem.Allocator, stream: var, tree: &ast.Tree, indent: usize, base: &ast.Node) (@typeOf(stream).Child.Error || Error)!void {
+ switch (base.id) {
+ ast.Node.Id.VarDecl => {
+ const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", base);
+ try renderVarDecl(allocator, stream, tree, indent, var_decl);
+ },
+ else => {
+ try renderExpression(allocator, stream, tree, indent, base);
+ try maybeRenderSemicolon(stream, tree, indent, base);
+ },
+ }
+}
+
+fn renderToken(tree: &ast.Tree, stream: var, token_index: ast.TokenIndex, indent: usize, line_break: bool) (@typeOf(stream).Child.Error || Error)!void {
+ const token = tree.tokens.at(token_index);
+ try stream.write(tree.tokenSlicePtr(token));
+
+ const next_token = tree.tokens.at(token_index + 1);
+ if (next_token.id == Token.Id.LineComment) {
+ const loc = tree.tokenLocationPtr(token.end, next_token);
+ if (loc.line == 0) {
+ try stream.print(" {}", tree.tokenSlicePtr(next_token));
+ if (!line_break) {
+ try stream.write("\n");
+ try stream.writeByteNTimes(' ', indent + indent_delta);
+ return;
+ }
+ }
+ }
+
+ if (!line_break) {
+ try stream.writeByte(' ');
+ }
+}
+
+fn renderComments(tree: &ast.Tree, stream: var, node: var, indent: usize) (@typeOf(stream).Child.Error || Error)!void {
+ const comment = node.doc_comments ?? return;
+ var it = comment.lines.iterator(0);
+ while (it.next()) |line_token_index| {
+ try stream.print("{}\n", tree.tokenSlice(*line_token_index));
+ try stream.writeByteNTimes(' ', indent);
+ }
+}
+
diff --git a/std/zig/tokenizer.zig b/std/zig/tokenizer.zig
index 31dc06b695..b0e5014a1a 100644
--- a/std/zig/tokenizer.zig
+++ b/std/zig/tokenizer.zig
@@ -195,37 +195,6 @@ pub const Tokenizer = struct {
index: usize,
pending_invalid_token: ?Token,
- pub const Location = struct {
- line: usize,
- column: usize,
- line_start: usize,
- line_end: usize,
- };
-
- pub fn getTokenLocation(self: &Tokenizer, start_index: usize, token: &const Token) Location {
- var loc = Location {
- .line = 0,
- .column = 0,
- .line_start = start_index,
- .line_end = self.buffer.len,
- };
- for (self.buffer[start_index..]) |c, i| {
- if (i + start_index == token.start) {
- loc.line_end = i + start_index;
- while (loc.line_end < self.buffer.len and self.buffer[loc.line_end] != '\n') : (loc.line_end += 1) {}
- return loc;
- }
- if (c == '\n') {
- loc.line += 1;
- loc.column = 0;
- loc.line_start = i + 1;
- } else {
- loc.column += 1;
- }
- }
- return loc;
- }
-
/// For debugging purposes
pub fn dump(self: &Tokenizer, token: &const Token) void {
std.debug.warn("{} \"{}\"\n", @tagName(token.id), self.buffer[token.start..token.end]);
@@ -1047,10 +1016,6 @@ pub const Tokenizer = struct {
return result;
}
- pub fn getTokenSlice(self: &const Tokenizer, token: &const Token) []const u8 {
- return self.buffer[token.start..token.end];
- }
-
fn checkLiteralCharacter(self: &Tokenizer) void {
if (self.pending_invalid_token != null) return;
const invalid_length = self.getInvalidCharacterLength();
diff --git a/test/behavior.zig b/test/behavior.zig
index 5239d61941..3341fe717d 100644
--- a/test/behavior.zig
+++ b/test/behavior.zig
@@ -23,6 +23,7 @@ comptime {
_ = @import("cases/eval.zig");
_ = @import("cases/field_parent_ptr.zig");
_ = @import("cases/fn.zig");
+ _ = @import("cases/fn_in_struct_in_comptime.zig");
_ = @import("cases/for.zig");
_ = @import("cases/generics.zig");
_ = @import("cases/if.zig");
@@ -32,12 +33,12 @@ comptime {
_ = @import("cases/math.zig");
_ = @import("cases/misc.zig");
_ = @import("cases/namespace_depends_on_compile_var/index.zig");
+ _ = @import("cases/new_stack_call.zig");
_ = @import("cases/null.zig");
_ = @import("cases/pointers.zig");
_ = @import("cases/pub_enum/index.zig");
_ = @import("cases/ref_var_in_if_after_if_2nd_switch_prong.zig");
_ = @import("cases/reflection.zig");
- _ = @import("cases/type_info.zig");
_ = @import("cases/sizeof_and_typeof.zig");
_ = @import("cases/slice.zig");
_ = @import("cases/struct.zig");
@@ -49,10 +50,10 @@ comptime {
_ = @import("cases/syntax.zig");
_ = @import("cases/this.zig");
_ = @import("cases/try.zig");
+ _ = @import("cases/type_info.zig");
_ = @import("cases/undefined.zig");
_ = @import("cases/union.zig");
_ = @import("cases/var_args.zig");
_ = @import("cases/void.zig");
_ = @import("cases/while.zig");
- _ = @import("cases/fn_in_struct_in_comptime.zig");
}
diff --git a/test/cases/eval.zig b/test/cases/eval.zig
index 2571686b0b..d7ad68b74e 100644
--- a/test/cases/eval.zig
+++ b/test/cases/eval.zig
@@ -569,3 +569,20 @@ test "runtime 128 bit integer division" {
var c = a / b;
assert(c == 15231399999);
}
+
+pub const Info = struct {
+ version: u8,
+};
+
+pub const diamond_info = Info {
+ .version = 0,
+};
+
+test "comptime modification of const struct field" {
+ comptime {
+ var res = diamond_info;
+ res.version = 1;
+ assert(diamond_info.version == 0);
+ assert(res.version == 1);
+ }
+}
diff --git a/test/cases/math.zig b/test/cases/math.zig
index 406ffec8d4..3c6156a3ea 100644
--- a/test/cases/math.zig
+++ b/test/cases/math.zig
@@ -352,6 +352,14 @@ test "big number multi-limb shift and mask" {
}
}
+test "big number multi-limb partial shift right" {
+ comptime {
+ var a = 0x1ffffffffeeeeeeee;
+ a >>= 16;
+ assert(a == 0x1ffffffffeeee);
+ }
+}
+
test "xor" {
test_xor();
comptime test_xor();
diff --git a/test/cases/new_stack_call.zig b/test/cases/new_stack_call.zig
new file mode 100644
index 0000000000..5912550d54
--- /dev/null
+++ b/test/cases/new_stack_call.zig
@@ -0,0 +1,26 @@
+const std = @import("std");
+const assert = std.debug.assert;
+
+var new_stack_bytes: [1024]u8 = undefined;
+
+test "calling a function with a new stack" {
+ const arg = 1234;
+
+ const a = @newStackCall(new_stack_bytes[0..512], targetFunction, arg);
+ const b = @newStackCall(new_stack_bytes[512..], targetFunction, arg);
+ _ = targetFunction(arg);
+
+ assert(arg == 1234);
+ assert(a < b);
+}
+
+fn targetFunction(x: i32) usize {
+ assert(x == 1234);
+
+ var local_variable: i32 = 42;
+ const ptr = &local_variable;
+ ptr.* += 1;
+
+ assert(local_variable == 43);
+ return @ptrToInt(ptr);
+}