diff --git a/doc/docgen.zig b/doc/docgen.zig
index 92a7353fa1..7886c7cc90 100644
--- a/doc/docgen.zig
+++ b/doc/docgen.zig
@@ -776,7 +776,7 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
next_tok_is_fn = false;
const token = tokenizer.next();
- try writeEscaped(out, src[index..token.start]);
+ try writeEscaped(out, src[index..token.loc.start]);
switch (token.id) {
.Eof => break,
@@ -827,13 +827,13 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
.Keyword_while,
=> {
try out.writeAll("");
- try writeEscaped(out, src[token.start..token.end]);
+ try writeEscaped(out, src[token.loc.start..token.loc.end]);
try out.writeAll("");
},
.Keyword_fn => {
try out.writeAll("");
- try writeEscaped(out, src[token.start..token.end]);
+ try writeEscaped(out, src[token.loc.start..token.loc.end]);
try out.writeAll("");
next_tok_is_fn = true;
},
@@ -844,7 +844,7 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
.Keyword_false,
=> {
try out.writeAll("");
- try writeEscaped(out, src[token.start..token.end]);
+ try writeEscaped(out, src[token.loc.start..token.loc.end]);
try out.writeAll("");
},
@@ -853,13 +853,13 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
.CharLiteral,
=> {
try out.writeAll("");
- try writeEscaped(out, src[token.start..token.end]);
+ try writeEscaped(out, src[token.loc.start..token.loc.end]);
try out.writeAll("");
},
.Builtin => {
try out.writeAll("");
- try writeEscaped(out, src[token.start..token.end]);
+ try writeEscaped(out, src[token.loc.start..token.loc.end]);
try out.writeAll("");
},
@@ -869,34 +869,34 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
.ShebangLine,
=> {
try out.writeAll("");
},
.Identifier => {
if (prev_tok_was_fn) {
try out.writeAll("");
- try writeEscaped(out, src[token.start..token.end]);
+ try writeEscaped(out, src[token.loc.start..token.loc.end]);
try out.writeAll("");
} else {
const is_int = blk: {
- if (src[token.start] != 'i' and src[token.start] != 'u')
+ if (src[token.loc.start] != 'i' and src[token.loc.start] != 'u')
break :blk false;
- var i = token.start + 1;
- if (i == token.end)
+ var i = token.loc.start + 1;
+ if (i == token.loc.end)
break :blk false;
- while (i != token.end) : (i += 1) {
+ while (i != token.loc.end) : (i += 1) {
if (src[i] < '0' or src[i] > '9')
break :blk false;
}
break :blk true;
};
- if (is_int or isType(src[token.start..token.end])) {
+ if (is_int or isType(src[token.loc.start..token.loc.end])) {
try out.writeAll("");
- try writeEscaped(out, src[token.start..token.end]);
+ try writeEscaped(out, src[token.loc.start..token.loc.end]);
try out.writeAll("");
} else {
- try writeEscaped(out, src[token.start..token.end]);
+ try writeEscaped(out, src[token.loc.start..token.loc.end]);
}
}
},
@@ -905,7 +905,7 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
.FloatLiteral,
=> {
try out.writeAll("");
- try writeEscaped(out, src[token.start..token.end]);
+ try writeEscaped(out, src[token.loc.start..token.loc.end]);
try out.writeAll("");
},
@@ -963,7 +963,7 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
.AngleBracketAngleBracketRight,
.AngleBracketAngleBracketRightEqual,
.Tilde,
- => try writeEscaped(out, src[token.start..token.end]),
+ => try writeEscaped(out, src[token.loc.start..token.loc.end]),
.Invalid, .Invalid_ampersands => return parseError(
docgen_tokenizer,
@@ -972,7 +972,7 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
.{},
),
}
- index = token.end;
+ index = token.loc.end;
}
try out.writeAll("");
}
diff --git a/lib/std/heap/arena_allocator.zig b/lib/std/heap/arena_allocator.zig
index daed17d6b3..b41399772a 100644
--- a/lib/std/heap/arena_allocator.zig
+++ b/lib/std/heap/arena_allocator.zig
@@ -46,13 +46,9 @@ pub const ArenaAllocator = struct {
}
fn createNode(self: *ArenaAllocator, prev_len: usize, minimum_size: usize) !*BufNode {
- const actual_min_size = minimum_size + @sizeOf(BufNode);
- var len = prev_len;
- while (true) {
- len += len / 2;
- len += mem.page_size - @rem(len, mem.page_size);
- if (len >= actual_min_size) break;
- }
+ const actual_min_size = minimum_size + (@sizeOf(BufNode) + 16);
+ const big_enough_len = prev_len + actual_min_size;
+ const len = big_enough_len + big_enough_len / 2;
const buf = try self.child_allocator.alignedAlloc(u8, @alignOf(BufNode), len);
const buf_node_slice = mem.bytesAsSlice(BufNode, buf[0..@sizeOf(BufNode)]);
const buf_node = &buf_node_slice[0];
diff --git a/lib/std/linked_list.zig b/lib/std/linked_list.zig
index 50acbb2c9f..03520b0640 100644
--- a/lib/std/linked_list.zig
+++ b/lib/std/linked_list.zig
@@ -21,6 +21,8 @@ pub fn SinglyLinkedList(comptime T: type) type {
next: ?*Node = null,
data: T,
+ pub const Data = T;
+
pub fn init(data: T) Node {
return Node{
.data = data,
@@ -47,29 +49,30 @@ pub fn SinglyLinkedList(comptime T: type) type {
node.next = next_node.next;
return next_node;
}
+
+ /// Iterate over the singly-linked list from this node, until the final node is found.
+ /// This operation is O(N).
+ pub fn findLast(node: *Node) *Node {
+ var it = node;
+ while (true) {
+ it = it.next orelse return it;
+ }
+ }
+
+ /// Iterate over each next node, returning the count of all nodes except the starting one.
+ /// This operation is O(N).
+ pub fn countChildren(node: *const Node) usize {
+ var count: usize = 0;
+ var it: ?*const Node = node;
+ while (it) |n| : (it = n.next) {
+ count += 1;
+ }
+ return count;
+ }
};
first: ?*Node = null,
- /// Initialize a linked list.
- ///
- /// Returns:
- /// An empty linked list.
- pub fn init() Self {
- return Self{
- .first = null,
- };
- }
-
- /// Insert a new node after an existing one.
- ///
- /// Arguments:
- /// node: Pointer to a node in the list.
- /// new_node: Pointer to the new node to insert.
- pub fn insertAfter(list: *Self, node: *Node, new_node: *Node) void {
- node.insertAfter(new_node);
- }
-
/// Insert a new node at the head.
///
/// Arguments:
@@ -105,64 +108,33 @@ pub fn SinglyLinkedList(comptime T: type) type {
return first;
}
- /// Allocate a new node.
- ///
- /// Arguments:
- /// allocator: Dynamic memory allocator.
- ///
- /// Returns:
- /// A pointer to the new node.
- pub fn allocateNode(list: *Self, allocator: *Allocator) !*Node {
- return allocator.create(Node);
- }
-
- /// Deallocate a node.
- ///
- /// Arguments:
- /// node: Pointer to the node to deallocate.
- /// allocator: Dynamic memory allocator.
- pub fn destroyNode(list: *Self, node: *Node, allocator: *Allocator) void {
- allocator.destroy(node);
- }
-
- /// Allocate and initialize a node and its data.
- ///
- /// Arguments:
- /// data: The data to put inside the node.
- /// allocator: Dynamic memory allocator.
- ///
- /// Returns:
- /// A pointer to the new node.
- pub fn createNode(list: *Self, data: T, allocator: *Allocator) !*Node {
- var node = try list.allocateNode(allocator);
- node.* = Node.init(data);
- return node;
+ /// Iterate over all nodes, returning the count.
+ /// This operation is O(N).
+ pub fn len(list: Self) usize {
+ if (list.first) |n| {
+ return 1 + n.countChildren();
+ } else {
+ return 0;
+ }
}
};
}
test "basic SinglyLinkedList test" {
- const allocator = testing.allocator;
- var list = SinglyLinkedList(u32).init();
+ const L = SinglyLinkedList(u32);
+ var list = L{};
- var one = try list.createNode(1, allocator);
- var two = try list.createNode(2, allocator);
- var three = try list.createNode(3, allocator);
- var four = try list.createNode(4, allocator);
- var five = try list.createNode(5, allocator);
- defer {
- list.destroyNode(one, allocator);
- list.destroyNode(two, allocator);
- list.destroyNode(three, allocator);
- list.destroyNode(four, allocator);
- list.destroyNode(five, allocator);
- }
+ var one = L.Node{.data = 1};
+ var two = L.Node{.data = 2};
+ var three = L.Node{.data = 3};
+ var four = L.Node{.data = 4};
+ var five = L.Node{.data = 5};
- list.prepend(two); // {2}
- list.insertAfter(two, five); // {2, 5}
- list.prepend(one); // {1, 2, 5}
- list.insertAfter(two, three); // {1, 2, 3, 5}
- list.insertAfter(three, four); // {1, 2, 3, 4, 5}
+ list.prepend(&two); // {2}
+ two.insertAfter(&five); // {2, 5}
+ list.prepend(&one); // {1, 2, 5}
+ two.insertAfter(&three); // {1, 2, 3, 5}
+ three.insertAfter(&four); // {1, 2, 3, 4, 5}
// Traverse forwards.
{
@@ -175,7 +147,7 @@ test "basic SinglyLinkedList test" {
}
_ = list.popFirst(); // {2, 3, 4, 5}
- _ = list.remove(five); // {2, 3, 4}
+ _ = list.remove(&five); // {2, 3, 4}
_ = two.removeNext(); // {2, 4}
testing.expect(list.first.?.data == 2);
diff --git a/lib/std/rand.zig b/lib/std/rand.zig
index 45a1172b3d..1e5bb37592 100644
--- a/lib/std/rand.zig
+++ b/lib/std/rand.zig
@@ -658,7 +658,7 @@ pub const Xoroshiro128 = struct {
self.s[1] = s1;
}
- fn seed(self: *Xoroshiro128, init_s: u64) void {
+ pub fn seed(self: *Xoroshiro128, init_s: u64) void {
// Xoroshiro requires 128-bits of seed.
var gen = SplitMix64.init(init_s);
diff --git a/lib/std/testing.zig b/lib/std/testing.zig
index 34bebad043..2d136d56c9 100644
--- a/lib/std/testing.zig
+++ b/lib/std/testing.zig
@@ -320,10 +320,11 @@ fn printWithVisibleNewlines(source: []const u8) void {
}
fn printLine(line: []const u8) void {
- switch (line[line.len - 1]) {
+ if (line.len != 0) switch (line[line.len - 1]) {
' ', '\t' => warn("{}⏎\n", .{line}), // Carriage return symbol,
- else => warn("{}\n", .{line}),
- }
+ else => {},
+ };
+ warn("{}\n", .{line});
}
test "" {
diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig
index fcbb090ace..a257b9e1c3 100644
--- a/lib/std/zig/ast.zig
+++ b/lib/std/zig/ast.zig
@@ -1,51 +1,50 @@
const std = @import("../std.zig");
const assert = std.debug.assert;
const testing = std.testing;
-const SegmentedList = std.SegmentedList;
const mem = std.mem;
const Token = std.zig.Token;
pub const TokenIndex = usize;
+pub const NodeIndex = usize;
pub const Tree = struct {
+ /// Reference to externally-owned data.
source: []const u8,
- tokens: TokenList,
-
- /// undefined on parse error (errors not empty)
+ token_ids: []const Token.Id,
+ token_locs: []const Token.Loc,
+ errors: []const Error,
+ /// undefined on parse error (when errors field is not empty)
root_node: *Node.Root,
- arena_allocator: std.heap.ArenaAllocator,
- errors: ErrorList,
+
+ arena: std.heap.ArenaAllocator.State,
+ gpa: *mem.Allocator,
/// translate-c uses this to avoid having to emit correct newlines
/// TODO get rid of this hack
generated: bool = false,
- pub const TokenList = SegmentedList(Token, 64);
- pub const ErrorList = SegmentedList(Error, 0);
-
pub fn deinit(self: *Tree) void {
- // Here we copy the arena allocator into stack memory, because
- // otherwise it would destroy itself while it was still working.
- var arena_allocator = self.arena_allocator;
- arena_allocator.deinit();
- // self is destroyed
+ self.gpa.free(self.token_ids);
+ self.gpa.free(self.token_locs);
+ self.gpa.free(self.errors);
+ self.arena.promote(self.gpa).deinit();
}
- pub fn renderError(self: *Tree, parse_error: *Error, stream: var) !void {
- return parse_error.render(&self.tokens, stream);
+ pub fn renderError(self: *Tree, parse_error: *const Error, stream: var) !void {
+ return parse_error.render(self.token_ids, stream);
}
pub fn tokenSlice(self: *Tree, token_index: TokenIndex) []const u8 {
- return self.tokenSlicePtr(self.tokens.at(token_index));
+ return self.tokenSliceLoc(self.token_locs[token_index]);
}
- pub fn tokenSlicePtr(self: *Tree, token: *const Token) []const u8 {
+ pub fn tokenSliceLoc(self: *Tree, token: Token.Loc) []const u8 {
return self.source[token.start..token.end];
}
pub fn getNodeSource(self: *const Tree, node: *const Node) []const u8 {
- const first_token = self.tokens.at(node.firstToken());
- const last_token = self.tokens.at(node.lastToken());
+ const first_token = self.token_locs[node.firstToken()];
+ const last_token = self.token_locs[node.lastToken()];
return self.source[first_token.start..last_token.end];
}
@@ -57,7 +56,7 @@ pub const Tree = struct {
};
/// Return the Location of the token relative to the offset specified by `start_index`.
- pub fn tokenLocationPtr(self: *Tree, start_index: usize, token: *const Token) Location {
+ pub fn tokenLocationLoc(self: *Tree, start_index: usize, token: Token.Loc) Location {
var loc = Location{
.line = 0,
.column = 0,
@@ -85,14 +84,14 @@ pub const Tree = struct {
}
pub fn tokenLocation(self: *Tree, start_index: usize, token_index: TokenIndex) Location {
- return self.tokenLocationPtr(start_index, self.tokens.at(token_index));
+ return self.tokenLocationLoc(start_index, self.token_locs[token_index]);
}
pub fn tokensOnSameLine(self: *Tree, token1_index: TokenIndex, token2_index: TokenIndex) bool {
- return self.tokensOnSameLinePtr(self.tokens.at(token1_index), self.tokens.at(token2_index));
+ return self.tokensOnSameLineLoc(self.token_locs[token1_index], self.token_locs[token2_index]);
}
- pub fn tokensOnSameLinePtr(self: *Tree, token1: *const Token, token2: *const Token) bool {
+ pub fn tokensOnSameLineLoc(self: *Tree, token1: Token.Loc, token2: Token.Loc) bool {
return mem.indexOfScalar(u8, self.source[token1.end..token2.start], '\n') == null;
}
@@ -103,7 +102,7 @@ pub const Tree = struct {
/// Skips over comments
pub fn prevToken(self: *Tree, token_index: TokenIndex) TokenIndex {
var index = token_index - 1;
- while (self.tokens.at(index).id == Token.Id.LineComment) {
+ while (self.token_ids[index] == Token.Id.LineComment) {
index -= 1;
}
return index;
@@ -112,7 +111,7 @@ pub const Tree = struct {
/// Skips over comments
pub fn nextToken(self: *Tree, token_index: TokenIndex) TokenIndex {
var index = token_index + 1;
- while (self.tokens.at(index).id == Token.Id.LineComment) {
+ while (self.token_ids[index] == Token.Id.LineComment) {
index += 1;
}
return index;
@@ -169,7 +168,7 @@ pub const Error = union(enum) {
DeclBetweenFields: DeclBetweenFields,
InvalidAnd: InvalidAnd,
- pub fn render(self: *const Error, tokens: *Tree.TokenList, stream: var) !void {
+ pub fn render(self: *const Error, tokens: []const Token.Id, stream: var) !void {
switch (self.*) {
.InvalidToken => |*x| return x.render(tokens, stream),
.ExpectedContainerMembers => |*x| return x.render(tokens, stream),
@@ -324,8 +323,8 @@ pub const Error = union(enum) {
pub const ExpectedCall = struct {
node: *Node,
- pub fn render(self: *const ExpectedCall, tokens: *Tree.TokenList, stream: var) !void {
- return stream.print("expected " ++ @tagName(@TagType(Node.SuffixOp.Op).Call) ++ ", found {}", .{
+ pub fn render(self: *const ExpectedCall, tokens: []const Token.Id, stream: var) !void {
+ return stream.print("expected " ++ @tagName(Node.Id.Call) ++ ", found {}", .{
@tagName(self.node.id),
});
}
@@ -334,8 +333,8 @@ pub const Error = union(enum) {
pub const ExpectedCallOrFnProto = struct {
node: *Node,
- pub fn render(self: *const ExpectedCallOrFnProto, tokens: *Tree.TokenList, stream: var) !void {
- return stream.print("expected " ++ @tagName(@TagType(Node.SuffixOp.Op).Call) ++ " or " ++
+ pub fn render(self: *const ExpectedCallOrFnProto, tokens: []const Token.Id, stream: var) !void {
+ return stream.print("expected " ++ @tagName(Node.Id.Call) ++ " or " ++
@tagName(Node.Id.FnProto) ++ ", found {}", .{@tagName(self.node.id)});
}
};
@@ -344,14 +343,14 @@ pub const Error = union(enum) {
token: TokenIndex,
expected_id: Token.Id,
- pub fn render(self: *const ExpectedToken, tokens: *Tree.TokenList, stream: var) !void {
- const found_token = tokens.at(self.token);
- switch (found_token.id) {
+ pub fn render(self: *const ExpectedToken, tokens: []const Token.Id, stream: var) !void {
+ const found_token = tokens[self.token];
+ switch (found_token) {
.Invalid => {
return stream.print("expected '{}', found invalid bytes", .{self.expected_id.symbol()});
},
else => {
- const token_name = found_token.id.symbol();
+ const token_name = found_token.symbol();
return stream.print("expected '{}', found '{}'", .{ self.expected_id.symbol(), token_name });
},
}
@@ -362,11 +361,11 @@ pub const Error = union(enum) {
token: TokenIndex,
end_id: Token.Id,
- pub fn render(self: *const ExpectedCommaOrEnd, tokens: *Tree.TokenList, stream: var) !void {
- const actual_token = tokens.at(self.token);
+ pub fn render(self: *const ExpectedCommaOrEnd, tokens: []const Token.Id, stream: var) !void {
+ const actual_token = tokens[self.token];
return stream.print("expected ',' or '{}', found '{}'", .{
self.end_id.symbol(),
- actual_token.id.symbol(),
+ actual_token.symbol(),
});
}
};
@@ -377,9 +376,9 @@ pub const Error = union(enum) {
token: TokenIndex,
- pub fn render(self: *const ThisError, tokens: *Tree.TokenList, stream: var) !void {
- const actual_token = tokens.at(self.token);
- return stream.print(msg, .{actual_token.id.symbol()});
+ pub fn render(self: *const ThisError, tokens: []const Token.Id, stream: var) !void {
+ const actual_token = tokens[self.token];
+ return stream.print(msg, .{actual_token.symbol()});
}
};
}
@@ -390,7 +389,7 @@ pub const Error = union(enum) {
token: TokenIndex,
- pub fn render(self: *const ThisError, tokens: *Tree.TokenList, stream: var) !void {
+ pub fn render(self: *const ThisError, tokens: []const Token.Id, stream: var) !void {
return stream.writeAll(msg);
}
};
@@ -413,7 +412,19 @@ pub const Node = struct {
// Operators
InfixOp,
PrefixOp,
+ /// Not all suffix operations are under this tag. To save memory, some
+ /// suffix operations have dedicated Node tags.
SuffixOp,
+ /// `T{a, b}`
+ ArrayInitializer,
+ /// ArrayInitializer but with `.` instead of a left-hand-side operand.
+ ArrayInitializerDot,
+ /// `T{.a = b}`
+ StructInitializer,
+ /// StructInitializer but with `.` instead of a left-hand-side operand.
+ StructInitializerDot,
+ /// `foo()`
+ Call,
// Control flow
Switch,
@@ -460,9 +471,6 @@ pub const Node = struct {
PointerIndexPayload,
ContainerField,
ErrorTag,
- AsmInput,
- AsmOutput,
- ParamDecl,
FieldInitializer,
};
@@ -518,7 +526,6 @@ pub const Node = struct {
switch (n.id) {
.Root,
.ContainerField,
- .ParamDecl,
.Block,
.Payload,
.PointerPayload,
@@ -602,26 +609,57 @@ pub const Node = struct {
}
}
+ /// The decls data follows this struct in memory as an array of Node pointers.
pub const Root = struct {
base: Node = Node{ .id = .Root },
- decls: DeclList,
eof_token: TokenIndex,
+ decls_len: NodeIndex,
- pub const DeclList = SegmentedList(*Node, 4);
+ /// After this the caller must initialize the decls list.
+ pub fn create(allocator: *mem.Allocator, decls_len: NodeIndex, eof_token: TokenIndex) !*Root {
+ const bytes = try allocator.alignedAlloc(u8, @alignOf(Root), sizeInBytes(decls_len));
+ const self = @ptrCast(*Root, bytes.ptr);
+ self.* = .{
+ .eof_token = eof_token,
+ .decls_len = decls_len,
+ };
+ return self;
+ }
- pub fn iterate(self: *Root, index: usize) ?*Node {
- if (index < self.decls.len) {
- return self.decls.at(index).*;
- }
+ pub fn destroy(self: *Decl, allocator: *mem.Allocator) void {
+ const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.decls_len)];
+ allocator.free(bytes);
+ }
+
+ pub fn iterate(self: *const Root, index: usize) ?*Node {
+ var i = index;
+
+ if (i < self.decls_len) return self.declsConst()[i];
return null;
}
+ pub fn decls(self: *Root) []*Node {
+ const decls_start = @ptrCast([*]u8, self) + @sizeOf(Root);
+ return @ptrCast([*]*Node, decls_start)[0..self.decls_len];
+ }
+
+ pub fn declsConst(self: *const Root) []const *Node {
+ const decls_start = @ptrCast([*]const u8, self) + @sizeOf(Root);
+ return @ptrCast([*]const *Node, decls_start)[0..self.decls_len];
+ }
+
pub fn firstToken(self: *const Root) TokenIndex {
- return if (self.decls.len == 0) self.eof_token else (self.decls.at(0).*).firstToken();
+ if (self.decls_len == 0) return self.eof_token;
+ return self.declsConst()[0].firstToken();
}
pub fn lastToken(self: *const Root) TokenIndex {
- return if (self.decls.len == 0) self.eof_token else (self.decls.at(self.decls.len - 1).*).lastToken();
+ if (self.decls_len == 0) return self.eof_token;
+ return self.declsConst()[self.decls_len - 1].lastToken();
+ }
+
+ fn sizeInBytes(decls_len: NodeIndex) usize {
+ return @sizeOf(Root) + @sizeOf(*Node) * @as(usize, decls_len);
}
};
@@ -642,7 +680,7 @@ pub const Node = struct {
init_node: ?*Node,
semicolon_token: TokenIndex,
- pub fn iterate(self: *VarDecl, index: usize) ?*Node {
+ pub fn iterate(self: *const VarDecl, index: usize) ?*Node {
var i = index;
if (self.type_node) |type_node| {
@@ -668,6 +706,7 @@ pub const Node = struct {
return null;
}
+
pub fn firstToken(self: *const VarDecl) TokenIndex {
if (self.visib_token) |visib_token| return visib_token;
if (self.thread_local_token) |thread_local_token| return thread_local_token;
@@ -690,7 +729,7 @@ pub const Node = struct {
expr: *Node,
semicolon_token: TokenIndex,
- pub fn iterate(self: *Use, index: usize) ?*Node {
+ pub fn iterate(self: *const Use, index: usize) ?*Node {
var i = index;
if (i < 1) return self.expr;
@@ -712,16 +751,25 @@ pub const Node = struct {
pub const ErrorSetDecl = struct {
base: Node = Node{ .id = .ErrorSetDecl },
error_token: TokenIndex,
- decls: DeclList,
rbrace_token: TokenIndex,
+ decls_len: NodeIndex,
- pub const DeclList = SegmentedList(*Node, 2);
+ /// After this the caller must initialize the decls list.
+ pub fn alloc(allocator: *mem.Allocator, decls_len: NodeIndex) !*ErrorSetDecl {
+ const bytes = try allocator.alignedAlloc(u8, @alignOf(ErrorSetDecl), sizeInBytes(decls_len));
+ return @ptrCast(*ErrorSetDecl, bytes.ptr);
+ }
- pub fn iterate(self: *ErrorSetDecl, index: usize) ?*Node {
+ pub fn free(self: *ErrorSetDecl, allocator: *mem.Allocator) void {
+ const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.decls_len)];
+ allocator.free(bytes);
+ }
+
+ pub fn iterate(self: *const ErrorSetDecl, index: usize) ?*Node {
var i = index;
- if (i < self.decls.len) return self.decls.at(i).*;
- i -= self.decls.len;
+ if (i < self.decls_len) return self.declsConst()[i];
+ i -= self.decls_len;
return null;
}
@@ -733,18 +781,31 @@ pub const Node = struct {
pub fn lastToken(self: *const ErrorSetDecl) TokenIndex {
return self.rbrace_token;
}
+
+ pub fn decls(self: *ErrorSetDecl) []*Node {
+ const decls_start = @ptrCast([*]u8, self) + @sizeOf(ErrorSetDecl);
+ return @ptrCast([*]*Node, decls_start)[0..self.decls_len];
+ }
+
+ pub fn declsConst(self: *const ErrorSetDecl) []const *Node {
+ const decls_start = @ptrCast([*]const u8, self) + @sizeOf(ErrorSetDecl);
+ return @ptrCast([*]const *Node, decls_start)[0..self.decls_len];
+ }
+
+ fn sizeInBytes(decls_len: NodeIndex) usize {
+ return @sizeOf(ErrorSetDecl) + @sizeOf(*Node) * @as(usize, decls_len);
+ }
};
+ /// The fields and decls Node pointers directly follow this struct in memory.
pub const ContainerDecl = struct {
base: Node = Node{ .id = .ContainerDecl },
- layout_token: ?TokenIndex,
kind_token: TokenIndex,
- init_arg_expr: InitArg,
- fields_and_decls: DeclList,
+ layout_token: ?TokenIndex,
lbrace_token: TokenIndex,
rbrace_token: TokenIndex,
-
- pub const DeclList = Root.DeclList;
+ fields_and_decls_len: NodeIndex,
+ init_arg_expr: InitArg,
pub const InitArg = union(enum) {
None,
@@ -752,7 +813,18 @@ pub const Node = struct {
Type: *Node,
};
- pub fn iterate(self: *ContainerDecl, index: usize) ?*Node {
+ /// After this the caller must initialize the fields_and_decls list.
+ pub fn alloc(allocator: *mem.Allocator, fields_and_decls_len: NodeIndex) !*ContainerDecl {
+ const bytes = try allocator.alignedAlloc(u8, @alignOf(ContainerDecl), sizeInBytes(fields_and_decls_len));
+ return @ptrCast(*ContainerDecl, bytes.ptr);
+ }
+
+ pub fn free(self: *ContainerDecl, allocator: *mem.Allocator) void {
+ const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.fields_and_decls_len)];
+ allocator.free(bytes);
+ }
+
+ pub fn iterate(self: *const ContainerDecl, index: usize) ?*Node {
var i = index;
switch (self.init_arg_expr) {
@@ -763,8 +835,8 @@ pub const Node = struct {
.None, .Enum => {},
}
- if (i < self.fields_and_decls.len) return self.fields_and_decls.at(i).*;
- i -= self.fields_and_decls.len;
+ if (i < self.fields_and_decls_len) return self.fieldsAndDeclsConst()[i];
+ i -= self.fields_and_decls_len;
return null;
}
@@ -779,6 +851,20 @@ pub const Node = struct {
pub fn lastToken(self: *const ContainerDecl) TokenIndex {
return self.rbrace_token;
}
+
+ pub fn fieldsAndDecls(self: *ContainerDecl) []*Node {
+ const decls_start = @ptrCast([*]u8, self) + @sizeOf(ContainerDecl);
+ return @ptrCast([*]*Node, decls_start)[0..self.fields_and_decls_len];
+ }
+
+ pub fn fieldsAndDeclsConst(self: *const ContainerDecl) []const *Node {
+ const decls_start = @ptrCast([*]const u8, self) + @sizeOf(ContainerDecl);
+ return @ptrCast([*]const *Node, decls_start)[0..self.fields_and_decls_len];
+ }
+
+ fn sizeInBytes(fields_and_decls_len: NodeIndex) usize {
+ return @sizeOf(ContainerDecl) + @sizeOf(*Node) * @as(usize, fields_and_decls_len);
+ }
};
pub const ContainerField = struct {
@@ -790,7 +876,7 @@ pub const Node = struct {
value_expr: ?*Node,
align_expr: ?*Node,
- pub fn iterate(self: *ContainerField, index: usize) ?*Node {
+ pub fn iterate(self: *const ContainerField, index: usize) ?*Node {
var i = index;
if (self.type_expr) |type_expr| {
@@ -837,7 +923,7 @@ pub const Node = struct {
doc_comments: ?*DocComment,
name_token: TokenIndex,
- pub fn iterate(self: *ErrorTag, index: usize) ?*Node {
+ pub fn iterate(self: *const ErrorTag, index: usize) ?*Node {
var i = index;
if (self.doc_comments) |comments| {
@@ -861,7 +947,7 @@ pub const Node = struct {
base: Node = Node{ .id = .Identifier },
token: TokenIndex,
- pub fn iterate(self: *Identifier, index: usize) ?*Node {
+ pub fn iterate(self: *const Identifier, index: usize) ?*Node {
return null;
}
@@ -874,13 +960,14 @@ pub const Node = struct {
}
};
+ /// The params are directly after the FnProto in memory.
pub const FnProto = struct {
base: Node = Node{ .id = .FnProto },
doc_comments: ?*DocComment,
visib_token: ?TokenIndex,
fn_token: TokenIndex,
name_token: ?TokenIndex,
- params: ParamList,
+ params_len: NodeIndex,
return_type: ReturnType,
var_args_token: ?TokenIndex,
extern_export_inline_token: ?TokenIndex,
@@ -892,15 +979,69 @@ pub const Node = struct {
is_extern_prototype: bool = false, // TODO: Remove once extern fn rewriting is
is_async: bool = false, // TODO: remove once async fn rewriting is
- pub const ParamList = SegmentedList(*Node, 2);
-
pub const ReturnType = union(enum) {
Explicit: *Node,
InferErrorSet: *Node,
Invalid: TokenIndex,
};
- pub fn iterate(self: *FnProto, index: usize) ?*Node {
+ pub const ParamDecl = struct {
+ doc_comments: ?*DocComment,
+ comptime_token: ?TokenIndex,
+ noalias_token: ?TokenIndex,
+ name_token: ?TokenIndex,
+ param_type: ParamType,
+
+ pub const ParamType = union(enum) {
+ var_type: *Node,
+ var_args: TokenIndex,
+ type_expr: *Node,
+ };
+
+ pub fn iterate(self: *const ParamDecl, index: usize) ?*Node {
+ var i = index;
+
+ if (i < 1) {
+ switch (self.param_type) {
+ .var_args => return null,
+ .var_type, .type_expr => |node| return node,
+ }
+ }
+ i -= 1;
+
+ return null;
+ }
+
+ pub fn firstToken(self: *const ParamDecl) TokenIndex {
+ if (self.comptime_token) |comptime_token| return comptime_token;
+ if (self.noalias_token) |noalias_token| return noalias_token;
+ if (self.name_token) |name_token| return name_token;
+ switch (self.param_type) {
+ .var_args => |tok| return tok,
+ .var_type, .type_expr => |node| return node.firstToken(),
+ }
+ }
+
+ pub fn lastToken(self: *const ParamDecl) TokenIndex {
+ switch (self.param_type) {
+ .var_args => |tok| return tok,
+ .var_type, .type_expr => |node| return node.lastToken(),
+ }
+ }
+ };
+
+ /// After this the caller must initialize the params list.
+ pub fn alloc(allocator: *mem.Allocator, params_len: NodeIndex) !*FnProto {
+ const bytes = try allocator.alignedAlloc(u8, @alignOf(FnProto), sizeInBytes(params_len));
+ return @ptrCast(*FnProto, bytes.ptr);
+ }
+
+ pub fn free(self: *FnProto, allocator: *mem.Allocator) void {
+ const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.params_len)];
+ allocator.free(bytes);
+ }
+
+ pub fn iterate(self: *const FnProto, index: usize) ?*Node {
var i = index;
if (self.lib_name) |lib_name| {
@@ -908,8 +1049,18 @@ pub const Node = struct {
i -= 1;
}
- if (i < self.params.len) return self.params.at(self.params.len - i - 1).*;
- i -= self.params.len;
+ const params_len = switch (self.paramsConst()[self.params_len - 1].param_type) {
+ .var_type, .type_expr => self.params_len,
+ .var_args => self.params_len - 1,
+ };
+ if (i < params_len) {
+ switch (self.paramsConst()[i].param_type) {
+ .var_type => |n| return n,
+ .var_args => unreachable,
+ .type_expr => |n| return n,
+ }
+ }
+ i -= params_len;
if (self.align_expr) |align_expr| {
if (i < 1) return align_expr;
@@ -951,6 +1102,20 @@ pub const Node = struct {
.Invalid => |tok| return tok,
}
}
+
+ pub fn params(self: *FnProto) []ParamDecl {
+ const decls_start = @ptrCast([*]u8, self) + @sizeOf(FnProto);
+ return @ptrCast([*]ParamDecl, decls_start)[0..self.params_len];
+ }
+
+ pub fn paramsConst(self: *const FnProto) []const ParamDecl {
+ const decls_start = @ptrCast([*]const u8, self) + @sizeOf(FnProto);
+ return @ptrCast([*]const ParamDecl, decls_start)[0..self.params_len];
+ }
+
+ fn sizeInBytes(params_len: NodeIndex) usize {
+ return @sizeOf(FnProto) + @sizeOf(ParamDecl) * @as(usize, params_len);
+ }
};
pub const AnyFrameType = struct {
@@ -963,7 +1128,7 @@ pub const Node = struct {
return_type: *Node,
};
- pub fn iterate(self: *AnyFrameType, index: usize) ?*Node {
+ pub fn iterate(self: *const AnyFrameType, index: usize) ?*Node {
var i = index;
if (self.result) |result| {
@@ -984,66 +1149,30 @@ pub const Node = struct {
}
};
- pub const ParamDecl = struct {
- base: Node = Node{ .id = .ParamDecl },
- doc_comments: ?*DocComment,
- comptime_token: ?TokenIndex,
- noalias_token: ?TokenIndex,
- name_token: ?TokenIndex,
- param_type: ParamType,
-
- pub const ParamType = union(enum) {
- var_type: *Node,
- var_args: TokenIndex,
- type_expr: *Node,
- };
-
- pub fn iterate(self: *ParamDecl, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) {
- switch (self.param_type) {
- .var_args => return null,
- .var_type, .type_expr => |node| return node,
- }
- }
- i -= 1;
-
- return null;
- }
-
- pub fn firstToken(self: *const ParamDecl) TokenIndex {
- if (self.comptime_token) |comptime_token| return comptime_token;
- if (self.noalias_token) |noalias_token| return noalias_token;
- if (self.name_token) |name_token| return name_token;
- switch (self.param_type) {
- .var_args => |tok| return tok,
- .var_type, .type_expr => |node| return node.firstToken(),
- }
- }
-
- pub fn lastToken(self: *const ParamDecl) TokenIndex {
- switch (self.param_type) {
- .var_args => |tok| return tok,
- .var_type, .type_expr => |node| return node.lastToken(),
- }
- }
- };
-
+ /// The statements of the block follow Block directly in memory.
pub const Block = struct {
base: Node = Node{ .id = .Block },
- label: ?TokenIndex,
+ statements_len: NodeIndex,
lbrace: TokenIndex,
- statements: StatementList,
rbrace: TokenIndex,
+ label: ?TokenIndex,
- pub const StatementList = Root.DeclList;
+ /// After this the caller must initialize the statements list.
+ pub fn alloc(allocator: *mem.Allocator, statements_len: NodeIndex) !*Block {
+ const bytes = try allocator.alignedAlloc(u8, @alignOf(Block), sizeInBytes(statements_len));
+ return @ptrCast(*Block, bytes.ptr);
+ }
- pub fn iterate(self: *Block, index: usize) ?*Node {
+ pub fn free(self: *Block, allocator: *mem.Allocator) void {
+ const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.statements_len)];
+ allocator.free(bytes);
+ }
+
+ pub fn iterate(self: *const Block, index: usize) ?*Node {
var i = index;
- if (i < self.statements.len) return self.statements.at(i).*;
- i -= self.statements.len;
+ if (i < self.statements_len) return self.statementsConst()[i];
+ i -= self.statements_len;
return null;
}
@@ -1059,6 +1188,20 @@ pub const Node = struct {
pub fn lastToken(self: *const Block) TokenIndex {
return self.rbrace;
}
+
+ pub fn statements(self: *Block) []*Node {
+ const decls_start = @ptrCast([*]u8, self) + @sizeOf(Block);
+ return @ptrCast([*]*Node, decls_start)[0..self.statements_len];
+ }
+
+ pub fn statementsConst(self: *const Block) []const *Node {
+ const decls_start = @ptrCast([*]const u8, self) + @sizeOf(Block);
+ return @ptrCast([*]const *Node, decls_start)[0..self.statements_len];
+ }
+
+ fn sizeInBytes(statements_len: NodeIndex) usize {
+ return @sizeOf(Block) + @sizeOf(*Node) * @as(usize, statements_len);
+ }
};
pub const Defer = struct {
@@ -1067,7 +1210,7 @@ pub const Node = struct {
payload: ?*Node,
expr: *Node,
- pub fn iterate(self: *Defer, index: usize) ?*Node {
+ pub fn iterate(self: *const Defer, index: usize) ?*Node {
var i = index;
if (i < 1) return self.expr;
@@ -1091,7 +1234,7 @@ pub const Node = struct {
comptime_token: TokenIndex,
expr: *Node,
- pub fn iterate(self: *Comptime, index: usize) ?*Node {
+ pub fn iterate(self: *const Comptime, index: usize) ?*Node {
var i = index;
if (i < 1) return self.expr;
@@ -1114,7 +1257,7 @@ pub const Node = struct {
nosuspend_token: TokenIndex,
expr: *Node,
- pub fn iterate(self: *Nosuspend, index: usize) ?*Node {
+ pub fn iterate(self: *const Nosuspend, index: usize) ?*Node {
var i = index;
if (i < 1) return self.expr;
@@ -1138,7 +1281,7 @@ pub const Node = struct {
error_symbol: *Node,
rpipe: TokenIndex,
- pub fn iterate(self: *Payload, index: usize) ?*Node {
+ pub fn iterate(self: *const Payload, index: usize) ?*Node {
var i = index;
if (i < 1) return self.error_symbol;
@@ -1163,7 +1306,7 @@ pub const Node = struct {
value_symbol: *Node,
rpipe: TokenIndex,
- pub fn iterate(self: *PointerPayload, index: usize) ?*Node {
+ pub fn iterate(self: *const PointerPayload, index: usize) ?*Node {
var i = index;
if (i < 1) return self.value_symbol;
@@ -1189,7 +1332,7 @@ pub const Node = struct {
index_symbol: ?*Node,
rpipe: TokenIndex,
- pub fn iterate(self: *PointerIndexPayload, index: usize) ?*Node {
+ pub fn iterate(self: *const PointerIndexPayload, index: usize) ?*Node {
var i = index;
if (i < 1) return self.value_symbol;
@@ -1218,7 +1361,7 @@ pub const Node = struct {
payload: ?*Node,
body: *Node,
- pub fn iterate(self: *Else, index: usize) ?*Node {
+ pub fn iterate(self: *const Else, index: usize) ?*Node {
var i = index;
if (self.payload) |payload| {
@@ -1241,25 +1384,34 @@ pub const Node = struct {
}
};
+ /// The cases node pointers are found in memory after Switch.
+ /// They must be SwitchCase or SwitchElse nodes.
pub const Switch = struct {
base: Node = Node{ .id = .Switch },
switch_token: TokenIndex,
+ rbrace: TokenIndex,
+ cases_len: NodeIndex,
expr: *Node,
- /// these must be SwitchCase nodes
- cases: CaseList,
- rbrace: TokenIndex,
+ /// After this the caller must initialize the fields_and_decls list.
+ pub fn alloc(allocator: *mem.Allocator, cases_len: NodeIndex) !*Switch {
+ const bytes = try allocator.alignedAlloc(u8, @alignOf(Switch), sizeInBytes(cases_len));
+ return @ptrCast(*Switch, bytes.ptr);
+ }
- pub const CaseList = SegmentedList(*Node, 2);
+ pub fn free(self: *Switch, allocator: *mem.Allocator) void {
+ const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.cases_len)];
+ allocator.free(bytes);
+ }
- pub fn iterate(self: *Switch, index: usize) ?*Node {
+ pub fn iterate(self: *const Switch, index: usize) ?*Node {
var i = index;
if (i < 1) return self.expr;
i -= 1;
- if (i < self.cases.len) return self.cases.at(i).*;
- i -= self.cases.len;
+ if (i < self.cases_len) return self.casesConst()[i];
+ i -= self.cases_len;
return null;
}
@@ -1271,22 +1423,46 @@ pub const Node = struct {
pub fn lastToken(self: *const Switch) TokenIndex {
return self.rbrace;
}
+
+ pub fn cases(self: *Switch) []*Node {
+ const decls_start = @ptrCast([*]u8, self) + @sizeOf(Switch);
+ return @ptrCast([*]*Node, decls_start)[0..self.cases_len];
+ }
+
+ pub fn casesConst(self: *const Switch) []const *Node {
+ const decls_start = @ptrCast([*]const u8, self) + @sizeOf(Switch);
+ return @ptrCast([*]const *Node, decls_start)[0..self.cases_len];
+ }
+
+ fn sizeInBytes(cases_len: NodeIndex) usize {
+ return @sizeOf(Switch) + @sizeOf(*Node) * @as(usize, cases_len);
+ }
};
+ /// Items sub-nodes appear in memory directly following SwitchCase.
pub const SwitchCase = struct {
base: Node = Node{ .id = .SwitchCase },
- items: ItemList,
arrow_token: TokenIndex,
payload: ?*Node,
expr: *Node,
+ items_len: NodeIndex,
- pub const ItemList = SegmentedList(*Node, 1);
+ /// After this the caller must initialize the fields_and_decls list.
+ pub fn alloc(allocator: *mem.Allocator, items_len: NodeIndex) !*SwitchCase {
+ const bytes = try allocator.alignedAlloc(u8, @alignOf(SwitchCase), sizeInBytes(items_len));
+ return @ptrCast(*SwitchCase, bytes.ptr);
+ }
- pub fn iterate(self: *SwitchCase, index: usize) ?*Node {
+ pub fn free(self: *SwitchCase, allocator: *mem.Allocator) void {
+ const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.items_len)];
+ allocator.free(bytes);
+ }
+
+ pub fn iterate(self: *const SwitchCase, index: usize) ?*Node {
var i = index;
- if (i < self.items.len) return self.items.at(i).*;
- i -= self.items.len;
+ if (i < self.items_len) return self.itemsConst()[i];
+ i -= self.items_len;
if (self.payload) |payload| {
if (i < 1) return payload;
@@ -1300,19 +1476,33 @@ pub const Node = struct {
}
pub fn firstToken(self: *const SwitchCase) TokenIndex {
- return (self.items.at(0).*).firstToken();
+ return self.itemsConst()[0].firstToken();
}
pub fn lastToken(self: *const SwitchCase) TokenIndex {
return self.expr.lastToken();
}
+
+ pub fn items(self: *SwitchCase) []*Node {
+ const decls_start = @ptrCast([*]u8, self) + @sizeOf(SwitchCase);
+ return @ptrCast([*]*Node, decls_start)[0..self.items_len];
+ }
+
+ pub fn itemsConst(self: *const SwitchCase) []const *Node {
+ const decls_start = @ptrCast([*]const u8, self) + @sizeOf(SwitchCase);
+ return @ptrCast([*]const *Node, decls_start)[0..self.items_len];
+ }
+
+ fn sizeInBytes(items_len: NodeIndex) usize {
+ return @sizeOf(SwitchCase) + @sizeOf(*Node) * @as(usize, items_len);
+ }
};
pub const SwitchElse = struct {
base: Node = Node{ .id = .SwitchElse },
token: TokenIndex,
- pub fn iterate(self: *SwitchElse, index: usize) ?*Node {
+ pub fn iterate(self: *const SwitchElse, index: usize) ?*Node {
return null;
}
@@ -1336,7 +1526,7 @@ pub const Node = struct {
body: *Node,
@"else": ?*Else,
- pub fn iterate(self: *While, index: usize) ?*Node {
+ pub fn iterate(self: *const While, index: usize) ?*Node {
var i = index;
if (i < 1) return self.condition;
@@ -1394,7 +1584,7 @@ pub const Node = struct {
body: *Node,
@"else": ?*Else,
- pub fn iterate(self: *For, index: usize) ?*Node {
+ pub fn iterate(self: *const For, index: usize) ?*Node {
var i = index;
if (i < 1) return self.array_expr;
@@ -1443,7 +1633,7 @@ pub const Node = struct {
body: *Node,
@"else": ?*Else,
- pub fn iterate(self: *If, index: usize) ?*Node {
+ pub fn iterate(self: *const If, index: usize) ?*Node {
var i = index;
if (i < 1) return self.condition;
@@ -1531,7 +1721,7 @@ pub const Node = struct {
UnwrapOptional,
};
- pub fn iterate(self: *InfixOp, index: usize) ?*Node {
+ pub fn iterate(self: *const InfixOp, index: usize) ?*Node {
var i = index;
if (i < 1) return self.lhs;
@@ -1649,7 +1839,7 @@ pub const Node = struct {
};
};
- pub fn iterate(self: *PrefixOp, index: usize) ?*Node {
+ pub fn iterate(self: *const PrefixOp, index: usize) ?*Node {
var i = index;
switch (self.op) {
@@ -1707,7 +1897,7 @@ pub const Node = struct {
name_token: TokenIndex,
expr: *Node,
- pub fn iterate(self: *FieldInitializer, index: usize) ?*Node {
+ pub fn iterate(self: *const FieldInitializer, index: usize) ?*Node {
var i = index;
if (i < 1) return self.expr;
@@ -1725,35 +1915,279 @@ pub const Node = struct {
}
};
+ /// Elements occur directly in memory after ArrayInitializer.
+ pub const ArrayInitializer = struct {
+ base: Node = Node{ .id = .ArrayInitializer },
+ rtoken: TokenIndex,
+ list_len: NodeIndex,
+ lhs: *Node,
+
+ /// After this the caller must initialize the fields_and_decls list.
+ pub fn alloc(allocator: *mem.Allocator, list_len: NodeIndex) !*ArrayInitializer {
+ const bytes = try allocator.alignedAlloc(u8, @alignOf(ArrayInitializer), sizeInBytes(list_len));
+ return @ptrCast(*ArrayInitializer, bytes.ptr);
+ }
+
+ pub fn free(self: *ArrayInitializer, allocator: *mem.Allocator) void {
+ const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.list_len)];
+ allocator.free(bytes);
+ }
+
+ pub fn iterate(self: *const ArrayInitializer, index: usize) ?*Node {
+ var i = index;
+
+ if (i < 1) return self.lhs;
+ i -= 1;
+
+ if (i < self.list_len) return self.listConst()[i];
+ i -= self.list_len;
+
+ return null;
+ }
+
+ pub fn firstToken(self: *const ArrayInitializer) TokenIndex {
+ return self.lhs.firstToken();
+ }
+
+ pub fn lastToken(self: *const ArrayInitializer) TokenIndex {
+ return self.rtoken;
+ }
+
+ pub fn list(self: *ArrayInitializer) []*Node {
+ const decls_start = @ptrCast([*]u8, self) + @sizeOf(ArrayInitializer);
+ return @ptrCast([*]*Node, decls_start)[0..self.list_len];
+ }
+
+ pub fn listConst(self: *const ArrayInitializer) []const *Node {
+ const decls_start = @ptrCast([*]const u8, self) + @sizeOf(ArrayInitializer);
+ return @ptrCast([*]const *Node, decls_start)[0..self.list_len];
+ }
+
+ fn sizeInBytes(list_len: NodeIndex) usize {
+ return @sizeOf(ArrayInitializer) + @sizeOf(*Node) * @as(usize, list_len);
+ }
+ };
+
+ /// Elements occur directly in memory after ArrayInitializerDot.
+ pub const ArrayInitializerDot = struct {
+ base: Node = Node{ .id = .ArrayInitializerDot },
+ dot: TokenIndex,
+ rtoken: TokenIndex,
+ list_len: NodeIndex,
+
+ /// After this the caller must initialize the fields_and_decls list.
+ pub fn alloc(allocator: *mem.Allocator, list_len: NodeIndex) !*ArrayInitializerDot {
+ const bytes = try allocator.alignedAlloc(u8, @alignOf(ArrayInitializerDot), sizeInBytes(list_len));
+ return @ptrCast(*ArrayInitializerDot, bytes.ptr);
+ }
+
+ pub fn free(self: *ArrayInitializerDot, allocator: *mem.Allocator) void {
+ const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.list_len)];
+ allocator.free(bytes);
+ }
+
+ pub fn iterate(self: *const ArrayInitializerDot, index: usize) ?*Node {
+ var i = index;
+
+ if (i < self.list_len) return self.listConst()[i];
+ i -= self.list_len;
+
+ return null;
+ }
+
+ pub fn firstToken(self: *const ArrayInitializerDot) TokenIndex {
+ return self.dot;
+ }
+
+ pub fn lastToken(self: *const ArrayInitializerDot) TokenIndex {
+ return self.rtoken;
+ }
+
+ pub fn list(self: *ArrayInitializerDot) []*Node {
+ const decls_start = @ptrCast([*]u8, self) + @sizeOf(ArrayInitializerDot);
+ return @ptrCast([*]*Node, decls_start)[0..self.list_len];
+ }
+
+ pub fn listConst(self: *const ArrayInitializerDot) []const *Node {
+ const decls_start = @ptrCast([*]const u8, self) + @sizeOf(ArrayInitializerDot);
+ return @ptrCast([*]const *Node, decls_start)[0..self.list_len];
+ }
+
+ fn sizeInBytes(list_len: NodeIndex) usize {
+ return @sizeOf(ArrayInitializerDot) + @sizeOf(*Node) * @as(usize, list_len);
+ }
+ };
+
+ /// Elements occur directly in memory after StructInitializer.
+ pub const StructInitializer = struct {
+ base: Node = Node{ .id = .StructInitializer },
+ rtoken: TokenIndex,
+ list_len: NodeIndex,
+ lhs: *Node,
+
+ /// After this the caller must initialize the fields_and_decls list.
+ pub fn alloc(allocator: *mem.Allocator, list_len: NodeIndex) !*StructInitializer {
+ const bytes = try allocator.alignedAlloc(u8, @alignOf(StructInitializer), sizeInBytes(list_len));
+ return @ptrCast(*StructInitializer, bytes.ptr);
+ }
+
+ pub fn free(self: *StructInitializer, allocator: *mem.Allocator) void {
+ const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.list_len)];
+ allocator.free(bytes);
+ }
+
+ pub fn iterate(self: *const StructInitializer, index: usize) ?*Node {
+ var i = index;
+
+ if (i < 1) return self.lhs;
+ i -= 1;
+
+ if (i < self.list_len) return self.listConst()[i];
+ i -= self.list_len;
+
+ return null;
+ }
+
+ pub fn firstToken(self: *const StructInitializer) TokenIndex {
+ return self.lhs.firstToken();
+ }
+
+ pub fn lastToken(self: *const StructInitializer) TokenIndex {
+ return self.rtoken;
+ }
+
+ pub fn list(self: *StructInitializer) []*Node {
+ const decls_start = @ptrCast([*]u8, self) + @sizeOf(StructInitializer);
+ return @ptrCast([*]*Node, decls_start)[0..self.list_len];
+ }
+
+ pub fn listConst(self: *const StructInitializer) []const *Node {
+ const decls_start = @ptrCast([*]const u8, self) + @sizeOf(StructInitializer);
+ return @ptrCast([*]const *Node, decls_start)[0..self.list_len];
+ }
+
+ fn sizeInBytes(list_len: NodeIndex) usize {
+ return @sizeOf(StructInitializer) + @sizeOf(*Node) * @as(usize, list_len);
+ }
+ };
+
+ /// Elements occur directly in memory after StructInitializerDot.
+ pub const StructInitializerDot = struct {
+ base: Node = Node{ .id = .StructInitializerDot },
+ dot: TokenIndex,
+ rtoken: TokenIndex,
+ list_len: NodeIndex,
+
+ /// After this the caller must initialize the fields_and_decls list.
+ pub fn alloc(allocator: *mem.Allocator, list_len: NodeIndex) !*StructInitializerDot {
+ const bytes = try allocator.alignedAlloc(u8, @alignOf(StructInitializerDot), sizeInBytes(list_len));
+ return @ptrCast(*StructInitializerDot, bytes.ptr);
+ }
+
+ pub fn free(self: *StructInitializerDot, allocator: *mem.Allocator) void {
+ const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.list_len)];
+ allocator.free(bytes);
+ }
+
+ pub fn iterate(self: *const StructInitializerDot, index: usize) ?*Node {
+ var i = index;
+
+ if (i < self.list_len) return self.listConst()[i];
+ i -= self.list_len;
+
+ return null;
+ }
+
+ pub fn firstToken(self: *const StructInitializerDot) TokenIndex {
+ return self.dot;
+ }
+
+ pub fn lastToken(self: *const StructInitializerDot) TokenIndex {
+ return self.rtoken;
+ }
+
+ pub fn list(self: *StructInitializerDot) []*Node {
+ const decls_start = @ptrCast([*]u8, self) + @sizeOf(StructInitializerDot);
+ return @ptrCast([*]*Node, decls_start)[0..self.list_len];
+ }
+
+ pub fn listConst(self: *const StructInitializerDot) []const *Node {
+ const decls_start = @ptrCast([*]const u8, self) + @sizeOf(StructInitializerDot);
+ return @ptrCast([*]const *Node, decls_start)[0..self.list_len];
+ }
+
+ fn sizeInBytes(list_len: NodeIndex) usize {
+ return @sizeOf(StructInitializerDot) + @sizeOf(*Node) * @as(usize, list_len);
+ }
+ };
+
+ /// Parameter nodes directly follow Call in memory.
+ pub const Call = struct {
+ base: Node = Node{ .id = .Call },
+ lhs: *Node,
+ rtoken: TokenIndex,
+ params_len: NodeIndex,
+ async_token: ?TokenIndex,
+
+ /// After this the caller must initialize the fields_and_decls list.
+ pub fn alloc(allocator: *mem.Allocator, params_len: NodeIndex) !*Call {
+ const bytes = try allocator.alignedAlloc(u8, @alignOf(Call), sizeInBytes(params_len));
+ return @ptrCast(*Call, bytes.ptr);
+ }
+
+ pub fn free(self: *Call, allocator: *mem.Allocator) void {
+ const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.params_len)];
+ allocator.free(bytes);
+ }
+
+ pub fn iterate(self: *const Call, index: usize) ?*Node {
+ var i = index;
+
+ if (i < 1) return self.lhs;
+ i -= 1;
+
+ if (i < self.params_len) return self.paramsConst()[i];
+ i -= self.params_len;
+
+ return null;
+ }
+
+ pub fn firstToken(self: *const Call) TokenIndex {
+ if (self.async_token) |async_token| return async_token;
+ return self.lhs.firstToken();
+ }
+
+ pub fn lastToken(self: *const Call) TokenIndex {
+ return self.rtoken;
+ }
+
+ pub fn params(self: *Call) []*Node {
+ const decls_start = @ptrCast([*]u8, self) + @sizeOf(Call);
+ return @ptrCast([*]*Node, decls_start)[0..self.params_len];
+ }
+
+ pub fn paramsConst(self: *const Call) []const *Node {
+ const decls_start = @ptrCast([*]const u8, self) + @sizeOf(Call);
+ return @ptrCast([*]const *Node, decls_start)[0..self.params_len];
+ }
+
+ fn sizeInBytes(params_len: NodeIndex) usize {
+ return @sizeOf(Call) + @sizeOf(*Node) * @as(usize, params_len);
+ }
+ };
+
pub const SuffixOp = struct {
base: Node = Node{ .id = .SuffixOp },
- lhs: Lhs,
op: Op,
+ lhs: *Node,
rtoken: TokenIndex,
- pub const Lhs = union(enum) {
- node: *Node,
- dot: TokenIndex,
- };
-
pub const Op = union(enum) {
- Call: Call,
ArrayAccess: *Node,
Slice: Slice,
- ArrayInitializer: InitList,
- StructInitializer: InitList,
Deref,
UnwrapOptional,
- pub const InitList = SegmentedList(*Node, 2);
-
- pub const Call = struct {
- params: ParamList,
- async_token: ?TokenIndex,
-
- pub const ParamList = SegmentedList(*Node, 2);
- };
-
pub const Slice = struct {
start: *Node,
end: ?*Node,
@@ -1761,22 +2195,13 @@ pub const Node = struct {
};
};
- pub fn iterate(self: *SuffixOp, index: usize) ?*Node {
+ pub fn iterate(self: *const SuffixOp, index: usize) ?*Node {
var i = index;
- switch (self.lhs) {
- .node => |node| {
- if (i == 0) return node;
- i -= 1;
- },
- .dot => {},
- }
+ if (i == 0) return self.lhs;
+ i -= 1;
switch (self.op) {
- .Call => |*call_info| {
- if (i < call_info.params.len) return call_info.params.at(i).*;
- i -= call_info.params.len;
- },
.ArrayAccess => |index_expr| {
if (i < 1) return index_expr;
i -= 1;
@@ -1794,14 +2219,6 @@ pub const Node = struct {
i -= 1;
}
},
- .ArrayInitializer => |*exprs| {
- if (i < exprs.len) return exprs.at(i).*;
- i -= exprs.len;
- },
- .StructInitializer => |*fields| {
- if (i < fields.len) return fields.at(i).*;
- i -= fields.len;
- },
.UnwrapOptional,
.Deref,
=> {},
@@ -1811,14 +2228,7 @@ pub const Node = struct {
}
pub fn firstToken(self: *const SuffixOp) TokenIndex {
- switch (self.op) {
- .Call => |*call_info| if (call_info.async_token) |async_token| return async_token,
- else => {},
- }
- switch (self.lhs) {
- .node => |node| return node.firstToken(),
- .dot => |dot| return dot,
- }
+ return self.lhs.firstToken();
}
pub fn lastToken(self: *const SuffixOp) TokenIndex {
@@ -1832,7 +2242,7 @@ pub const Node = struct {
expr: *Node,
rparen: TokenIndex,
- pub fn iterate(self: *GroupedExpression, index: usize) ?*Node {
+ pub fn iterate(self: *const GroupedExpression, index: usize) ?*Node {
var i = index;
if (i < 1) return self.expr;
@@ -1862,7 +2272,7 @@ pub const Node = struct {
Return,
};
- pub fn iterate(self: *ControlFlowExpression, index: usize) ?*Node {
+ pub fn iterate(self: *const ControlFlowExpression, index: usize) ?*Node {
var i = index;
switch (self.kind) {
@@ -1910,7 +2320,7 @@ pub const Node = struct {
suspend_token: TokenIndex,
body: ?*Node,
- pub fn iterate(self: *Suspend, index: usize) ?*Node {
+ pub fn iterate(self: *const Suspend, index: usize) ?*Node {
var i = index;
if (self.body) |body| {
@@ -1938,7 +2348,7 @@ pub const Node = struct {
base: Node = Node{ .id = .IntegerLiteral },
token: TokenIndex,
- pub fn iterate(self: *IntegerLiteral, index: usize) ?*Node {
+ pub fn iterate(self: *const IntegerLiteral, index: usize) ?*Node {
return null;
}
@@ -1956,7 +2366,7 @@ pub const Node = struct {
dot: TokenIndex,
name: TokenIndex,
- pub fn iterate(self: *EnumLiteral, index: usize) ?*Node {
+ pub fn iterate(self: *const EnumLiteral, index: usize) ?*Node {
return null;
}
@@ -1973,7 +2383,7 @@ pub const Node = struct {
base: Node = Node{ .id = .FloatLiteral },
token: TokenIndex,
- pub fn iterate(self: *FloatLiteral, index: usize) ?*Node {
+ pub fn iterate(self: *const FloatLiteral, index: usize) ?*Node {
return null;
}
@@ -1986,19 +2396,29 @@ pub const Node = struct {
}
};
+ /// Parameters are in memory following BuiltinCall.
pub const BuiltinCall = struct {
base: Node = Node{ .id = .BuiltinCall },
+ params_len: NodeIndex,
builtin_token: TokenIndex,
- params: ParamList,
rparen_token: TokenIndex,
- pub const ParamList = SegmentedList(*Node, 2);
+ /// After this the caller must initialize the fields_and_decls list.
+ pub fn alloc(allocator: *mem.Allocator, params_len: NodeIndex) !*BuiltinCall {
+ const bytes = try allocator.alignedAlloc(u8, @alignOf(BuiltinCall), sizeInBytes(params_len));
+ return @ptrCast(*BuiltinCall, bytes.ptr);
+ }
- pub fn iterate(self: *BuiltinCall, index: usize) ?*Node {
+ pub fn free(self: *BuiltinCall, allocator: *mem.Allocator) void {
+ const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.params_len)];
+ allocator.free(bytes);
+ }
+
+ pub fn iterate(self: *const BuiltinCall, index: usize) ?*Node {
var i = index;
- if (i < self.params.len) return self.params.at(i).*;
- i -= self.params.len;
+ if (i < self.params_len) return self.paramsConst()[i];
+ i -= self.params_len;
return null;
}
@@ -2010,13 +2430,27 @@ pub const Node = struct {
pub fn lastToken(self: *const BuiltinCall) TokenIndex {
return self.rparen_token;
}
+
+ pub fn params(self: *BuiltinCall) []*Node {
+ const decls_start = @ptrCast([*]u8, self) + @sizeOf(BuiltinCall);
+ return @ptrCast([*]*Node, decls_start)[0..self.params_len];
+ }
+
+ pub fn paramsConst(self: *const BuiltinCall) []const *Node {
+ const decls_start = @ptrCast([*]const u8, self) + @sizeOf(BuiltinCall);
+ return @ptrCast([*]const *Node, decls_start)[0..self.params_len];
+ }
+
+ fn sizeInBytes(params_len: NodeIndex) usize {
+ return @sizeOf(BuiltinCall) + @sizeOf(*Node) * @as(usize, params_len);
+ }
};
pub const StringLiteral = struct {
base: Node = Node{ .id = .StringLiteral },
token: TokenIndex,
- pub fn iterate(self: *StringLiteral, index: usize) ?*Node {
+ pub fn iterate(self: *const StringLiteral, index: usize) ?*Node {
return null;
}
@@ -2029,22 +2463,46 @@ pub const Node = struct {
}
};
+ /// The string literal tokens appear directly in memory after MultilineStringLiteral.
pub const MultilineStringLiteral = struct {
base: Node = Node{ .id = .MultilineStringLiteral },
- lines: LineList,
+ lines_len: TokenIndex,
- pub const LineList = SegmentedList(TokenIndex, 4);
+ /// After this the caller must initialize the lines list.
+ pub fn alloc(allocator: *mem.Allocator, lines_len: NodeIndex) !*MultilineStringLiteral {
+ const bytes = try allocator.alignedAlloc(u8, @alignOf(MultilineStringLiteral), sizeInBytes(lines_len));
+ return @ptrCast(*MultilineStringLiteral, bytes.ptr);
+ }
- pub fn iterate(self: *MultilineStringLiteral, index: usize) ?*Node {
+ pub fn free(self: *MultilineStringLiteral, allocator: *mem.Allocator) void {
+ const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.lines_len)];
+ allocator.free(bytes);
+ }
+
+ pub fn iterate(self: *const MultilineStringLiteral, index: usize) ?*Node {
return null;
}
pub fn firstToken(self: *const MultilineStringLiteral) TokenIndex {
- return self.lines.at(0).*;
+ return self.linesConst()[0];
}
pub fn lastToken(self: *const MultilineStringLiteral) TokenIndex {
- return self.lines.at(self.lines.len - 1).*;
+ return self.linesConst()[self.lines_len - 1];
+ }
+
+ pub fn lines(self: *MultilineStringLiteral) []TokenIndex {
+ const decls_start = @ptrCast([*]u8, self) + @sizeOf(MultilineStringLiteral);
+ return @ptrCast([*]TokenIndex, decls_start)[0..self.lines_len];
+ }
+
+ pub fn linesConst(self: *const MultilineStringLiteral) []const TokenIndex {
+ const decls_start = @ptrCast([*]const u8, self) + @sizeOf(MultilineStringLiteral);
+ return @ptrCast([*]const TokenIndex, decls_start)[0..self.lines_len];
+ }
+
+ fn sizeInBytes(lines_len: NodeIndex) usize {
+ return @sizeOf(MultilineStringLiteral) + @sizeOf(TokenIndex) * @as(usize, lines_len);
}
};
@@ -2052,7 +2510,7 @@ pub const Node = struct {
base: Node = Node{ .id = .CharLiteral },
token: TokenIndex,
- pub fn iterate(self: *CharLiteral, index: usize) ?*Node {
+ pub fn iterate(self: *const CharLiteral, index: usize) ?*Node {
return null;
}
@@ -2069,7 +2527,7 @@ pub const Node = struct {
base: Node = Node{ .id = .BoolLiteral },
token: TokenIndex,
- pub fn iterate(self: *BoolLiteral, index: usize) ?*Node {
+ pub fn iterate(self: *const BoolLiteral, index: usize) ?*Node {
return null;
}
@@ -2086,7 +2544,7 @@ pub const Node = struct {
base: Node = Node{ .id = .NullLiteral },
token: TokenIndex,
- pub fn iterate(self: *NullLiteral, index: usize) ?*Node {
+ pub fn iterate(self: *const NullLiteral, index: usize) ?*Node {
return null;
}
@@ -2103,7 +2561,7 @@ pub const Node = struct {
base: Node = Node{ .id = .UndefinedLiteral },
token: TokenIndex,
- pub fn iterate(self: *UndefinedLiteral, index: usize) ?*Node {
+ pub fn iterate(self: *const UndefinedLiteral, index: usize) ?*Node {
return null;
}
@@ -2116,105 +2574,114 @@ pub const Node = struct {
}
};
- pub const AsmOutput = struct {
- base: Node = Node{ .id = .AsmOutput },
- lbracket: TokenIndex,
- symbolic_name: *Node,
- constraint: *Node,
- kind: Kind,
- rparen: TokenIndex,
-
- pub const Kind = union(enum) {
- Variable: *Identifier,
- Return: *Node,
- };
-
- pub fn iterate(self: *AsmOutput, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.symbolic_name;
- i -= 1;
-
- if (i < 1) return self.constraint;
- i -= 1;
-
- switch (self.kind) {
- .Variable => |variable_name| {
- if (i < 1) return &variable_name.base;
- i -= 1;
- },
- .Return => |return_type| {
- if (i < 1) return return_type;
- i -= 1;
- },
- }
-
- return null;
- }
-
- pub fn firstToken(self: *const AsmOutput) TokenIndex {
- return self.lbracket;
- }
-
- pub fn lastToken(self: *const AsmOutput) TokenIndex {
- return self.rparen;
- }
- };
-
- pub const AsmInput = struct {
- base: Node = Node{ .id = .AsmInput },
- lbracket: TokenIndex,
- symbolic_name: *Node,
- constraint: *Node,
- expr: *Node,
- rparen: TokenIndex,
-
- pub fn iterate(self: *AsmInput, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.symbolic_name;
- i -= 1;
-
- if (i < 1) return self.constraint;
- i -= 1;
-
- if (i < 1) return self.expr;
- i -= 1;
-
- return null;
- }
-
- pub fn firstToken(self: *const AsmInput) TokenIndex {
- return self.lbracket;
- }
-
- pub fn lastToken(self: *const AsmInput) TokenIndex {
- return self.rparen;
- }
- };
-
pub const Asm = struct {
base: Node = Node{ .id = .Asm },
asm_token: TokenIndex,
+ rparen: TokenIndex,
volatile_token: ?TokenIndex,
template: *Node,
- outputs: OutputList,
- inputs: InputList,
- clobbers: ClobberList,
- rparen: TokenIndex,
+ outputs: []Output,
+ inputs: []Input,
+ /// A clobber node must be a StringLiteral or MultilineStringLiteral.
+ clobbers: []*Node,
- pub const OutputList = SegmentedList(*AsmOutput, 2);
- pub const InputList = SegmentedList(*AsmInput, 2);
- pub const ClobberList = SegmentedList(*Node, 2);
+ pub const Output = struct {
+ lbracket: TokenIndex,
+ symbolic_name: *Node,
+ constraint: *Node,
+ kind: Kind,
+ rparen: TokenIndex,
- pub fn iterate(self: *Asm, index: usize) ?*Node {
+ pub const Kind = union(enum) {
+ Variable: *Identifier,
+ Return: *Node,
+ };
+
+ pub fn iterate(self: *const Output, index: usize) ?*Node {
+ var i = index;
+
+ if (i < 1) return self.symbolic_name;
+ i -= 1;
+
+ if (i < 1) return self.constraint;
+ i -= 1;
+
+ switch (self.kind) {
+ .Variable => |variable_name| {
+ if (i < 1) return &variable_name.base;
+ i -= 1;
+ },
+ .Return => |return_type| {
+ if (i < 1) return return_type;
+ i -= 1;
+ },
+ }
+
+ return null;
+ }
+
+ pub fn firstToken(self: *const Output) TokenIndex {
+ return self.lbracket;
+ }
+
+ pub fn lastToken(self: *const Output) TokenIndex {
+ return self.rparen;
+ }
+ };
+
+ pub const Input = struct {
+ lbracket: TokenIndex,
+ symbolic_name: *Node,
+ constraint: *Node,
+ expr: *Node,
+ rparen: TokenIndex,
+
+ pub fn iterate(self: *const Input, index: usize) ?*Node {
+ var i = index;
+
+ if (i < 1) return self.symbolic_name;
+ i -= 1;
+
+ if (i < 1) return self.constraint;
+ i -= 1;
+
+ if (i < 1) return self.expr;
+ i -= 1;
+
+ return null;
+ }
+
+ pub fn firstToken(self: *const Input) TokenIndex {
+ return self.lbracket;
+ }
+
+ pub fn lastToken(self: *const Input) TokenIndex {
+ return self.rparen;
+ }
+ };
+
+
+ pub fn iterate(self: *const Asm, index: usize) ?*Node {
var i = index;
- if (i < self.outputs.len) return &self.outputs.at(i).*.base;
- i -= self.outputs.len;
+ if (i < self.outputs.len * 3) switch (i % 3) {
+ 0 => return self.outputs[i / 3].symbolic_name,
+ 1 => return self.outputs[i / 3].constraint,
+ 2 => switch (self.outputs[i / 3].kind) {
+ .Variable => |variable_name| return &variable_name.base,
+ .Return => |return_type| return return_type,
+ },
+ else => unreachable,
+ };
+ i -= self.outputs.len * 3;
- if (i < self.inputs.len) return &self.inputs.at(i).*.base;
- i -= self.inputs.len;
+ if (i < self.inputs.len * 3) switch (i % 3) {
+ 0 => return self.inputs[i / 3].symbolic_name,
+ 1 => return self.inputs[i / 3].constraint,
+ 2 => return self.inputs[i / 3].expr,
+ else => unreachable,
+ };
+ i -= self.inputs.len * 3;
return null;
}
@@ -2232,7 +2699,7 @@ pub const Node = struct {
base: Node = Node{ .id = .Unreachable },
token: TokenIndex,
- pub fn iterate(self: *Unreachable, index: usize) ?*Node {
+ pub fn iterate(self: *const Unreachable, index: usize) ?*Node {
return null;
}
@@ -2249,7 +2716,7 @@ pub const Node = struct {
base: Node = Node{ .id = .ErrorType },
token: TokenIndex,
- pub fn iterate(self: *ErrorType, index: usize) ?*Node {
+ pub fn iterate(self: *const ErrorType, index: usize) ?*Node {
return null;
}
@@ -2266,7 +2733,7 @@ pub const Node = struct {
base: Node = Node{ .id = .VarType },
token: TokenIndex,
- pub fn iterate(self: *VarType, index: usize) ?*Node {
+ pub fn iterate(self: *const VarType, index: usize) ?*Node {
return null;
}
@@ -2281,20 +2748,23 @@ pub const Node = struct {
pub const DocComment = struct {
base: Node = Node{ .id = .DocComment },
- lines: LineList,
+ /// Points to the first doc comment token. API users are expected to iterate over the
+ /// tokens array, looking for more doc comments, ignoring line comments, and stopping
+ /// at the first other token.
+ first_line: TokenIndex,
- pub const LineList = SegmentedList(TokenIndex, 4);
-
- pub fn iterate(self: *DocComment, index: usize) ?*Node {
+ pub fn iterate(self: *const DocComment, index: usize) ?*Node {
return null;
}
pub fn firstToken(self: *const DocComment) TokenIndex {
- return self.lines.at(0).*;
+ return self.first_line;
}
+ /// Returns the first doc comment line. Be careful, this may not be the desired behavior,
+ /// which would require the tokens array.
pub fn lastToken(self: *const DocComment) TokenIndex {
- return self.lines.at(self.lines.len - 1).*;
+ return self.first_line;
}
};
@@ -2305,7 +2775,7 @@ pub const Node = struct {
name: *Node,
body_node: *Node,
- pub fn iterate(self: *TestDecl, index: usize) ?*Node {
+ pub fn iterate(self: *const TestDecl, index: usize) ?*Node {
var i = index;
if (i < 1) return self.body_node;
@@ -2327,7 +2797,7 @@ pub const Node = struct {
test "iterate" {
var root = Node.Root{
.base = Node{ .id = Node.Id.Root },
- .decls = Node.Root.DeclList.init(std.testing.allocator),
+ .decls_len = 0,
.eof_token = 0,
};
var base = &root.base;
diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig
index 6dfd1120b6..a8aadc5ef7 100644
--- a/lib/std/zig/parse.zig
+++ b/lib/std/zig/parse.zig
@@ -6,3304 +6,3387 @@ const Node = ast.Node;
const Tree = ast.Tree;
const AstError = ast.Error;
const TokenIndex = ast.TokenIndex;
+const NodeIndex = ast.NodeIndex;
const Token = std.zig.Token;
-const TokenIterator = Tree.TokenList.Iterator;
pub const Error = error{ParseError} || Allocator.Error;
/// Result should be freed with tree.deinit() when there are
/// no more references to any of the tokens or nodes.
-pub fn parse(allocator: *Allocator, source: []const u8) Allocator.Error!*Tree {
- const tree = blk: {
- // This block looks unnecessary, but is a "foot-shield" to prevent the SegmentedLists
- // from being initialized with a pointer to this `arena`, which is created on
- // the stack. Following code should instead refer to `&tree.arena_allocator`, a
- // pointer to data which lives safely on the heap and will outlive `parse`. See:
- // https://github.com/ziglang/zig/commit/cb4fb14b6e66bd213575f69eec9598be8394fae6
- var arena = std.heap.ArenaAllocator.init(allocator);
- errdefer arena.deinit();
- const tree = try arena.allocator.create(ast.Tree);
- tree.* = .{
- .source = source,
- .root_node = undefined,
- .arena_allocator = arena,
- .tokens = undefined,
- .errors = undefined,
- };
- break :blk tree;
- };
- errdefer tree.deinit();
- const arena = &tree.arena_allocator.allocator;
-
- tree.tokens = ast.Tree.TokenList.init(arena);
- tree.errors = ast.Tree.ErrorList.init(arena);
+pub fn parse(gpa: *Allocator, source: []const u8) Allocator.Error!*Tree {
+ // TODO optimization idea: ensureCapacity on the tokens list and
+ // then appendAssumeCapacity inside the loop.
+ var token_ids = std.ArrayList(Token.Id).init(gpa);
+ defer token_ids.deinit();
+ var token_locs = std.ArrayList(Token.Loc).init(gpa);
+ defer token_locs.deinit();
var tokenizer = std.zig.Tokenizer.init(source);
while (true) {
- const tree_token = try tree.tokens.addOne();
- tree_token.* = tokenizer.next();
- if (tree_token.id == .Eof) break;
+ const token = tokenizer.next();
+ try token_ids.append(token.id);
+ try token_locs.append(token.loc);
+ if (token.id == .Eof) break;
}
- var it = tree.tokens.iterator(0);
- while (it.peek().?.id == .LineComment) _ = it.next();
+ var parser: Parser = .{
+ .source = source,
+ .arena = std.heap.ArenaAllocator.init(gpa),
+ .gpa = gpa,
+ .token_ids = token_ids.items,
+ .token_locs = token_locs.items,
+ .errors = .{},
+ .tok_i = 0,
+ };
+ defer parser.errors.deinit(gpa);
+ errdefer parser.arena.deinit();
- tree.root_node = try parseRoot(arena, &it, tree);
+ while (token_ids.items[parser.tok_i] == .LineComment) parser.tok_i += 1;
+ const root_node = try parser.parseRoot();
+
+ const tree = try parser.arena.allocator.create(Tree);
+ tree.* = .{
+ .gpa = gpa,
+ .source = source,
+ .token_ids = token_ids.toOwnedSlice(),
+ .token_locs = token_locs.toOwnedSlice(),
+ .errors = parser.errors.toOwnedSlice(gpa),
+ .root_node = root_node,
+ .arena = parser.arena.state,
+ };
return tree;
}
-/// Root <- skip ContainerMembers eof
-fn parseRoot(arena: *Allocator, it: *TokenIterator, tree: *Tree) Allocator.Error!*Node.Root {
- const node = try arena.create(Node.Root);
- node.* = .{
- .decls = try parseContainerMembers(arena, it, tree, true),
+/// Represents in-progress parsing, will be converted to an ast.Tree after completion.
+const Parser = struct {
+ arena: std.heap.ArenaAllocator,
+ gpa: *Allocator,
+ source: []const u8,
+ token_ids: []const Token.Id,
+ token_locs: []const Token.Loc,
+ tok_i: TokenIndex,
+ errors: std.ArrayListUnmanaged(AstError),
+
+ /// Root <- skip ContainerMembers eof
+ fn parseRoot(p: *Parser) Allocator.Error!*Node.Root {
+ const decls = try parseContainerMembers(p, true);
+ defer p.gpa.free(decls);
+
// parseContainerMembers will try to skip as much
// invalid tokens as it can so this can only be the EOF
- .eof_token = eatToken(it, .Eof).?,
- };
- return node;
-}
+ const eof_token = p.eatToken(.Eof).?;
-/// ContainerMembers
-/// <- TestDecl ContainerMembers
-/// / TopLevelComptime ContainerMembers
-/// / KEYWORD_pub? TopLevelDecl ContainerMembers
-/// / ContainerField COMMA ContainerMembers
-/// / ContainerField
-/// /
-fn parseContainerMembers(arena: *Allocator, it: *TokenIterator, tree: *Tree, top_level: bool) !Node.Root.DeclList {
- var list = Node.Root.DeclList.init(arena);
+ const decls_len = @intCast(NodeIndex, decls.len);
+ const node = try Node.Root.create(&p.arena.allocator, decls_len, eof_token);
+ std.mem.copy(*Node, node.decls(), decls);
- var field_state: union(enum) {
- /// no fields have been seen
- none,
- /// currently parsing fields
- seen,
- /// saw fields and then a declaration after them.
- /// payload is first token of previous declaration.
- end: TokenIndex,
- /// ther was a declaration between fields, don't report more errors
- err,
- } = .none;
+ return node;
+ }
- while (true) {
- if (try parseContainerDocComments(arena, it, tree)) |node| {
- try list.push(node);
- continue;
- }
+ /// ContainerMembers
+ /// <- TestDecl ContainerMembers
+ /// / TopLevelComptime ContainerMembers
+ /// / KEYWORD_pub? TopLevelDecl ContainerMembers
+ /// / ContainerField COMMA ContainerMembers
+ /// / ContainerField
+ /// /
+ fn parseContainerMembers(p: *Parser, top_level: bool) ![]*Node {
+ var list = std.ArrayList(*Node).init(p.gpa);
+ defer list.deinit();
- const doc_comments = try parseDocComment(arena, it, tree);
+ var field_state: union(enum) {
+ /// no fields have been seen
+ none,
+ /// currently parsing fields
+ seen,
+ /// saw fields and then a declaration after them.
+ /// payload is first token of previous declaration.
+ end: TokenIndex,
+ /// ther was a declaration between fields, don't report more errors
+ err,
+ } = .none;
- if (parseTestDecl(arena, it, tree) catch |err| switch (err) {
- error.OutOfMemory => return error.OutOfMemory,
- error.ParseError => {
- findNextContainerMember(it);
+ while (true) {
+ if (try p.parseContainerDocComments()) |node| {
+ try list.append(node);
continue;
- },
- }) |node| {
- if (field_state == .seen) {
- field_state = .{ .end = node.firstToken() };
}
- node.cast(Node.TestDecl).?.doc_comments = doc_comments;
- try list.push(node);
- continue;
- }
- if (parseTopLevelComptime(arena, it, tree) catch |err| switch (err) {
- error.OutOfMemory => return error.OutOfMemory,
- error.ParseError => {
- findNextContainerMember(it);
+ const doc_comments = try p.parseDocComment();
+
+ if (p.parseTestDecl() catch |err| switch (err) {
+ error.OutOfMemory => return error.OutOfMemory,
+ error.ParseError => {
+ p.findNextContainerMember();
+ continue;
+ },
+ }) |node| {
+ if (field_state == .seen) {
+ field_state = .{ .end = node.firstToken() };
+ }
+ node.cast(Node.TestDecl).?.doc_comments = doc_comments;
+ try list.append(node);
continue;
- },
- }) |node| {
- if (field_state == .seen) {
- field_state = .{ .end = node.firstToken() };
}
- node.cast(Node.Comptime).?.doc_comments = doc_comments;
- try list.push(node);
- continue;
- }
- const visib_token = eatToken(it, .Keyword_pub);
-
- if (parseTopLevelDecl(arena, it, tree) catch |err| switch (err) {
- error.OutOfMemory => return error.OutOfMemory,
- error.ParseError => {
- findNextContainerMember(it);
+ if (p.parseTopLevelComptime() catch |err| switch (err) {
+ error.OutOfMemory => return error.OutOfMemory,
+ error.ParseError => {
+ p.findNextContainerMember();
+ continue;
+ },
+ }) |node| {
+ if (field_state == .seen) {
+ field_state = .{ .end = node.firstToken() };
+ }
+ node.cast(Node.Comptime).?.doc_comments = doc_comments;
+ try list.append(node);
continue;
- },
- }) |node| {
- if (field_state == .seen) {
- field_state = .{ .end = visib_token orelse node.firstToken() };
}
- switch (node.id) {
- .FnProto => {
- node.cast(Node.FnProto).?.doc_comments = doc_comments;
- node.cast(Node.FnProto).?.visib_token = visib_token;
+
+ const visib_token = p.eatToken(.Keyword_pub);
+
+ if (p.parseTopLevelDecl() catch |err| switch (err) {
+ error.OutOfMemory => return error.OutOfMemory,
+ error.ParseError => {
+ p.findNextContainerMember();
+ continue;
},
- .VarDecl => {
- node.cast(Node.VarDecl).?.doc_comments = doc_comments;
- node.cast(Node.VarDecl).?.visib_token = visib_token;
- },
- .Use => {
- node.cast(Node.Use).?.doc_comments = doc_comments;
- node.cast(Node.Use).?.visib_token = visib_token;
- },
- else => unreachable,
- }
- try list.push(node);
- if (try parseAppendedDocComment(arena, it, tree, node.lastToken())) |appended_comment| {
+ }) |node| {
+ if (field_state == .seen) {
+ field_state = .{ .end = visib_token orelse node.firstToken() };
+ }
switch (node.id) {
- .FnProto => {},
- .VarDecl => node.cast(Node.VarDecl).?.doc_comments = appended_comment,
- .Use => node.cast(Node.Use).?.doc_comments = appended_comment,
+ .FnProto => {
+ node.cast(Node.FnProto).?.doc_comments = doc_comments;
+ node.cast(Node.FnProto).?.visib_token = visib_token;
+ },
+ .VarDecl => {
+ node.cast(Node.VarDecl).?.doc_comments = doc_comments;
+ node.cast(Node.VarDecl).?.visib_token = visib_token;
+ },
+ .Use => {
+ node.cast(Node.Use).?.doc_comments = doc_comments;
+ node.cast(Node.Use).?.visib_token = visib_token;
+ },
else => unreachable,
}
- }
- continue;
- }
-
- if (visib_token != null) {
- try tree.errors.push(.{
- .ExpectedPubItem = .{ .token = it.index },
- });
- // ignore this pub
- continue;
- }
-
- if (parseContainerField(arena, it, tree) catch |err| switch (err) {
- error.OutOfMemory => return error.OutOfMemory,
- error.ParseError => {
- // attempt to recover
- findNextContainerMember(it);
+ try list.append(node);
+ if (try p.parseAppendedDocComment(node.lastToken())) |appended_comment| {
+ switch (node.id) {
+ .FnProto => {},
+ .VarDecl => node.cast(Node.VarDecl).?.doc_comments = appended_comment,
+ .Use => node.cast(Node.Use).?.doc_comments = appended_comment,
+ else => unreachable,
+ }
+ }
continue;
- },
- }) |node| {
- switch (field_state) {
- .none => field_state = .seen,
- .err, .seen => {},
- .end => |tok| {
- try tree.errors.push(.{
- .DeclBetweenFields = .{ .token = tok },
- });
- // continue parsing, error will be reported later
- field_state = .err;
- },
}
- const field = node.cast(Node.ContainerField).?;
- field.doc_comments = doc_comments;
- try list.push(node);
- const comma = eatToken(it, .Comma) orelse {
- // try to continue parsing
- const index = it.index;
- findNextContainerMember(it);
- const next = it.peek().?.id;
- switch (next) {
- .Eof => break,
- else => {
- if (next == .RBrace) {
- if (!top_level) break;
- _ = nextToken(it);
- }
+ if (visib_token != null) {
+ try p.errors.append(p.gpa, .{
+ .ExpectedPubItem = .{ .token = p.tok_i },
+ });
+ // ignore this pub
+ continue;
+ }
- // add error and continue
- try tree.errors.push(.{
- .ExpectedToken = .{ .token = index, .expected_id = .Comma },
+ if (p.parseContainerField() catch |err| switch (err) {
+ error.OutOfMemory => return error.OutOfMemory,
+ error.ParseError => {
+ // attempt to recover
+ p.findNextContainerMember();
+ continue;
+ },
+ }) |node| {
+ switch (field_state) {
+ .none => field_state = .seen,
+ .err, .seen => {},
+ .end => |tok| {
+ try p.errors.append(p.gpa, .{
+ .DeclBetweenFields = .{ .token = tok },
});
- continue;
+ // continue parsing, error will be reported later
+ field_state = .err;
},
}
- };
- if (try parseAppendedDocComment(arena, it, tree, comma)) |appended_comment|
- field.doc_comments = appended_comment;
- continue;
- }
- // Dangling doc comment
- if (doc_comments != null) {
- try tree.errors.push(.{
- .UnattachedDocComment = .{ .token = doc_comments.?.firstToken() },
- });
- }
+ const field = node.cast(Node.ContainerField).?;
+ field.doc_comments = doc_comments;
+ try list.append(node);
+ const comma = p.eatToken(.Comma) orelse {
+ // try to continue parsing
+ const index = p.tok_i;
+ p.findNextContainerMember();
+ const next = p.token_ids[p.tok_i];
+ switch (next) {
+ .Eof => break,
+ else => {
+ if (next == .RBrace) {
+ if (!top_level) break;
+ _ = p.nextToken();
+ }
- const next = it.peek().?.id;
- switch (next) {
- .Eof => break,
- .Keyword_comptime => {
- _ = nextToken(it);
- try tree.errors.push(.{
- .ExpectedBlockOrField = .{ .token = it.index },
+ // add error and continue
+ try p.errors.append(p.gpa, .{
+ .ExpectedToken = .{ .token = index, .expected_id = .Comma },
+ });
+ continue;
+ },
+ }
+ };
+ if (try p.parseAppendedDocComment(comma)) |appended_comment|
+ field.doc_comments = appended_comment;
+ continue;
+ }
+
+ // Dangling doc comment
+ if (doc_comments != null) {
+ try p.errors.append(p.gpa, .{
+ .UnattachedDocComment = .{ .token = doc_comments.?.firstToken() },
});
- },
- else => {
- const index = it.index;
- if (next == .RBrace) {
- if (!top_level) break;
- _ = nextToken(it);
- }
+ }
- // this was likely not supposed to end yet,
- // try to find the next declaration
- findNextContainerMember(it);
- try tree.errors.push(.{
- .ExpectedContainerMembers = .{ .token = index },
- });
- },
+ const next = p.token_ids[p.tok_i];
+ switch (next) {
+ .Eof => break,
+ .Keyword_comptime => {
+ _ = p.nextToken();
+ try p.errors.append(p.gpa, .{
+ .ExpectedBlockOrField = .{ .token = p.tok_i },
+ });
+ },
+ else => {
+ const index = p.tok_i;
+ if (next == .RBrace) {
+ if (!top_level) break;
+ _ = p.nextToken();
+ }
+
+ // this was likely not supposed to end yet,
+ // try to find the next declaration
+ p.findNextContainerMember();
+ try p.errors.append(p.gpa, .{
+ .ExpectedContainerMembers = .{ .token = index },
+ });
+ },
+ }
+ }
+
+ return list.toOwnedSlice();
+ }
+
+ /// Attempts to find next container member by searching for certain tokens
+ fn findNextContainerMember(p: *Parser) void {
+ var level: u32 = 0;
+ while (true) {
+ const tok = p.nextToken();
+ switch (p.token_ids[tok]) {
+ // any of these can start a new top level declaration
+ .Keyword_test,
+ .Keyword_comptime,
+ .Keyword_pub,
+ .Keyword_export,
+ .Keyword_extern,
+ .Keyword_inline,
+ .Keyword_noinline,
+ .Keyword_usingnamespace,
+ .Keyword_threadlocal,
+ .Keyword_const,
+ .Keyword_var,
+ .Keyword_fn,
+ .Identifier,
+ => {
+ if (level == 0) {
+ p.putBackToken(tok);
+ return;
+ }
+ },
+ .Comma, .Semicolon => {
+ // this decl was likely meant to end here
+ if (level == 0) {
+ return;
+ }
+ },
+ .LParen, .LBracket, .LBrace => level += 1,
+ .RParen, .RBracket => {
+ if (level != 0) level -= 1;
+ },
+ .RBrace => {
+ if (level == 0) {
+ // end of container, exit
+ p.putBackToken(tok);
+ return;
+ }
+ level -= 1;
+ },
+ .Eof => {
+ p.putBackToken(tok);
+ return;
+ },
+ else => {},
+ }
}
}
- return list;
-}
-
-/// Attempts to find next container member by searching for certain tokens
-fn findNextContainerMember(it: *TokenIterator) void {
- var level: u32 = 0;
- while (true) {
- const tok = nextToken(it);
- switch (tok.ptr.id) {
- // any of these can start a new top level declaration
- .Keyword_test,
- .Keyword_comptime,
- .Keyword_pub,
- .Keyword_export,
- .Keyword_extern,
- .Keyword_inline,
- .Keyword_noinline,
- .Keyword_usingnamespace,
- .Keyword_threadlocal,
- .Keyword_const,
- .Keyword_var,
- .Keyword_fn,
- .Identifier,
- => {
- if (level == 0) {
- putBackToken(it, tok.index);
+ /// Attempts to find the next statement by searching for a semicolon
+ fn findNextStmt(p: *Parser) void {
+ var level: u32 = 0;
+ while (true) {
+ const tok = p.nextToken();
+ switch (p.token_ids[tok]) {
+ .LBrace => level += 1,
+ .RBrace => {
+ if (level == 0) {
+ p.putBackToken(tok);
+ return;
+ }
+ level -= 1;
+ },
+ .Semicolon => {
+ if (level == 0) {
+ return;
+ }
+ },
+ .Eof => {
+ p.putBackToken(tok);
return;
- }
- },
- .Comma, .Semicolon => {
- // this decl was likely meant to end here
- if (level == 0) {
- return;
- }
- },
- .LParen, .LBracket, .LBrace => level += 1,
- .RParen, .RBracket => {
- if (level != 0) level -= 1;
- },
- .RBrace => {
- if (level == 0) {
- // end of container, exit
- putBackToken(it, tok.index);
- return;
- }
- level -= 1;
- },
- .Eof => {
- putBackToken(it, tok.index);
- return;
- },
- else => {},
+ },
+ else => {},
+ }
}
}
-}
-/// Attempts to find the next statement by searching for a semicolon
-fn findNextStmt(it: *TokenIterator) void {
- var level: u32 = 0;
- while (true) {
- const tok = nextToken(it);
- switch (tok.ptr.id) {
- .LBrace => level += 1,
- .RBrace => {
- if (level == 0) {
- putBackToken(it, tok.index);
- return;
- }
- level -= 1;
- },
- .Semicolon => {
- if (level == 0) {
- return;
- }
- },
- .Eof => {
- putBackToken(it, tok.index);
- return;
- },
- else => {},
+ /// Eat a multiline container doc comment
+ fn parseContainerDocComments(p: *Parser) !?*Node {
+ if (p.eatToken(.ContainerDocComment)) |first_line| {
+ while (p.eatToken(.ContainerDocComment)) |_| {}
+ const node = try p.arena.allocator.create(Node.DocComment);
+ node.* = .{ .first_line = first_line };
+ return &node.base;
}
- }
-}
-
-/// Eat a multiline container doc comment
-fn parseContainerDocComments(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- var lines = Node.DocComment.LineList.init(arena);
- while (eatToken(it, .ContainerDocComment)) |line| {
- try lines.push(line);
- }
-
- if (lines.len == 0) return null;
-
- const node = try arena.create(Node.DocComment);
- node.* = .{
- .lines = lines,
- };
- return &node.base;
-}
-
-/// TestDecl <- KEYWORD_test STRINGLITERALSINGLE Block
-fn parseTestDecl(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const test_token = eatToken(it, .Keyword_test) orelse return null;
- const name_node = try expectNode(arena, it, tree, parseStringLiteralSingle, .{
- .ExpectedStringLiteral = .{ .token = it.index },
- });
- const block_node = try expectNode(arena, it, tree, parseBlock, .{
- .ExpectedLBrace = .{ .token = it.index },
- });
-
- const test_node = try arena.create(Node.TestDecl);
- test_node.* = .{
- .doc_comments = null,
- .test_token = test_token,
- .name = name_node,
- .body_node = block_node,
- };
- return &test_node.base;
-}
-
-/// TopLevelComptime <- KEYWORD_comptime BlockExpr
-fn parseTopLevelComptime(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const tok = eatToken(it, .Keyword_comptime) orelse return null;
- const lbrace = eatToken(it, .LBrace) orelse {
- putBackToken(it, tok);
return null;
- };
- putBackToken(it, lbrace);
- const block_node = try expectNode(arena, it, tree, parseBlockExpr, .{
- .ExpectedLabelOrLBrace = .{ .token = it.index },
- });
-
- const comptime_node = try arena.create(Node.Comptime);
- comptime_node.* = .{
- .doc_comments = null,
- .comptime_token = tok,
- .expr = block_node,
- };
- return &comptime_node.base;
-}
-
-/// TopLevelDecl
-/// <- (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE? / (KEYWORD_inline / KEYWORD_noinline))? FnProto (SEMICOLON / Block)
-/// / (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE?)? KEYWORD_threadlocal? VarDecl
-/// / KEYWORD_usingnamespace Expr SEMICOLON
-fn parseTopLevelDecl(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- var lib_name: ?*Node = null;
- const extern_export_inline_token = blk: {
- if (eatToken(it, .Keyword_export)) |token| break :blk token;
- if (eatToken(it, .Keyword_extern)) |token| {
- lib_name = try parseStringLiteralSingle(arena, it, tree);
- break :blk token;
- }
- if (eatToken(it, .Keyword_inline)) |token| break :blk token;
- if (eatToken(it, .Keyword_noinline)) |token| break :blk token;
- break :blk null;
- };
-
- if (try parseFnProto(arena, it, tree)) |node| {
- const fn_node = node.cast(Node.FnProto).?;
- fn_node.*.extern_export_inline_token = extern_export_inline_token;
- fn_node.*.lib_name = lib_name;
- if (eatToken(it, .Semicolon)) |_| return node;
-
- if (try expectNodeRecoverable(arena, it, tree, parseBlock, .{
- // since parseBlock only return error.ParseError on
- // a missing '}' we can assume this function was
- // supposed to end here.
- .ExpectedSemiOrLBrace = .{ .token = it.index },
- })) |body_node| {
- fn_node.body_node = body_node;
- }
- return node;
}
- if (extern_export_inline_token) |token| {
- if (tree.tokens.at(token).id == .Keyword_inline or
- tree.tokens.at(token).id == .Keyword_noinline)
- {
- try tree.errors.push(.{
- .ExpectedFn = .{ .token = it.index },
+ /// TestDecl <- KEYWORD_test STRINGLITERALSINGLE Block
+ fn parseTestDecl(p: *Parser) !?*Node {
+ const test_token = p.eatToken(.Keyword_test) orelse return null;
+ const name_node = try p.expectNode(parseStringLiteralSingle, .{
+ .ExpectedStringLiteral = .{ .token = p.tok_i },
+ });
+ const block_node = try p.expectNode(parseBlock, .{
+ .ExpectedLBrace = .{ .token = p.tok_i },
+ });
+
+ const test_node = try p.arena.allocator.create(Node.TestDecl);
+ test_node.* = .{
+ .doc_comments = null,
+ .test_token = test_token,
+ .name = name_node,
+ .body_node = block_node,
+ };
+ return &test_node.base;
+ }
+
+ /// TopLevelComptime <- KEYWORD_comptime BlockExpr
+ fn parseTopLevelComptime(p: *Parser) !?*Node {
+ const tok = p.eatToken(.Keyword_comptime) orelse return null;
+ const lbrace = p.eatToken(.LBrace) orelse {
+ p.putBackToken(tok);
+ return null;
+ };
+ p.putBackToken(lbrace);
+ const block_node = try p.expectNode(parseBlockExpr, .{
+ .ExpectedLabelOrLBrace = .{ .token = p.tok_i },
+ });
+
+ const comptime_node = try p.arena.allocator.create(Node.Comptime);
+ comptime_node.* = .{
+ .doc_comments = null,
+ .comptime_token = tok,
+ .expr = block_node,
+ };
+ return &comptime_node.base;
+ }
+
+ /// TopLevelDecl
+ /// <- (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE? / (KEYWORD_inline / KEYWORD_noinline))? FnProto (SEMICOLON / Block)
+ /// / (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE?)? KEYWORD_threadlocal? VarDecl
+ /// / KEYWORD_usingnamespace Expr SEMICOLON
+ fn parseTopLevelDecl(p: *Parser) !?*Node {
+ var lib_name: ?*Node = null;
+ const extern_export_inline_token = blk: {
+ if (p.eatToken(.Keyword_export)) |token| break :blk token;
+ if (p.eatToken(.Keyword_extern)) |token| {
+ lib_name = try p.parseStringLiteralSingle();
+ break :blk token;
+ }
+ if (p.eatToken(.Keyword_inline)) |token| break :blk token;
+ if (p.eatToken(.Keyword_noinline)) |token| break :blk token;
+ break :blk null;
+ };
+
+ if (try p.parseFnProto()) |node| {
+ const fn_node = node.cast(Node.FnProto).?;
+ fn_node.*.extern_export_inline_token = extern_export_inline_token;
+ fn_node.*.lib_name = lib_name;
+ if (p.eatToken(.Semicolon)) |_| return node;
+
+ if (try p.expectNodeRecoverable(parseBlock, .{
+ // since parseBlock only return error.ParseError on
+ // a missing '}' we can assume this function was
+ // supposed to end here.
+ .ExpectedSemiOrLBrace = .{ .token = p.tok_i },
+ })) |body_node| {
+ fn_node.body_node = body_node;
+ }
+ return node;
+ }
+
+ if (extern_export_inline_token) |token| {
+ if (p.token_ids[token] == .Keyword_inline or
+ p.token_ids[token] == .Keyword_noinline)
+ {
+ try p.errors.append(p.gpa, .{
+ .ExpectedFn = .{ .token = p.tok_i },
+ });
+ return error.ParseError;
+ }
+ }
+
+ const thread_local_token = p.eatToken(.Keyword_threadlocal);
+
+ if (try p.parseVarDecl()) |node| {
+ var var_decl = node.cast(Node.VarDecl).?;
+ var_decl.*.thread_local_token = thread_local_token;
+ var_decl.*.comptime_token = null;
+ var_decl.*.extern_export_token = extern_export_inline_token;
+ var_decl.*.lib_name = lib_name;
+ return node;
+ }
+
+ if (thread_local_token != null) {
+ try p.errors.append(p.gpa, .{
+ .ExpectedVarDecl = .{ .token = p.tok_i },
});
+ // ignore this and try again;
return error.ParseError;
}
- }
- const thread_local_token = eatToken(it, .Keyword_threadlocal);
-
- if (try parseVarDecl(arena, it, tree)) |node| {
- var var_decl = node.cast(Node.VarDecl).?;
- var_decl.*.thread_local_token = thread_local_token;
- var_decl.*.comptime_token = null;
- var_decl.*.extern_export_token = extern_export_inline_token;
- var_decl.*.lib_name = lib_name;
- return node;
- }
-
- if (thread_local_token != null) {
- try tree.errors.push(.{
- .ExpectedVarDecl = .{ .token = it.index },
- });
- // ignore this and try again;
- return error.ParseError;
- }
-
- if (extern_export_inline_token) |token| {
- try tree.errors.push(.{
- .ExpectedVarDeclOrFn = .{ .token = it.index },
- });
- // ignore this and try again;
- return error.ParseError;
- }
-
- return try parseUse(arena, it, tree);
-}
-
-/// FnProto <- KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? LinkSection? EXCLAMATIONMARK? (KEYWORD_var / TypeExpr)
-fn parseFnProto(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- // TODO: Remove once extern/async fn rewriting is
- var is_async = false;
- var is_extern = false;
- const cc_token: ?usize = blk: {
- if (eatToken(it, .Keyword_extern)) |token| {
- is_extern = true;
- break :blk token;
- }
- if (eatToken(it, .Keyword_async)) |token| {
- is_async = true;
- break :blk token;
- }
- break :blk null;
- };
- const fn_token = eatToken(it, .Keyword_fn) orelse {
- if (cc_token) |token|
- putBackToken(it, token);
- return null;
- };
- const name_token = eatToken(it, .Identifier);
- const lparen = try expectToken(it, tree, .LParen);
- const params = try parseParamDeclList(arena, it, tree);
- const rparen = try expectToken(it, tree, .RParen);
- const align_expr = try parseByteAlign(arena, it, tree);
- const section_expr = try parseLinkSection(arena, it, tree);
- const callconv_expr = try parseCallconv(arena, it, tree);
- const exclamation_token = eatToken(it, .Bang);
-
- const return_type_expr = (try parseVarType(arena, it, tree)) orelse
- try expectNodeRecoverable(arena, it, tree, parseTypeExpr, .{
- // most likely the user forgot to specify the return type.
- // Mark return type as invalid and try to continue.
- .ExpectedReturnType = .{ .token = it.index },
- });
-
- // TODO https://github.com/ziglang/zig/issues/3750
- const R = Node.FnProto.ReturnType;
- const return_type = if (return_type_expr == null)
- R{ .Invalid = rparen }
- else if (exclamation_token != null)
- R{ .InferErrorSet = return_type_expr.? }
- else
- R{ .Explicit = return_type_expr.? };
-
- const var_args_token = if (params.len > 0) blk: {
- const param_type = params.at(params.len - 1).*.cast(Node.ParamDecl).?.param_type;
- break :blk if (param_type == .var_args) param_type.var_args else null;
- } else
- null;
-
- const fn_proto_node = try arena.create(Node.FnProto);
- fn_proto_node.* = .{
- .doc_comments = null,
- .visib_token = null,
- .fn_token = fn_token,
- .name_token = name_token,
- .params = params,
- .return_type = return_type,
- .var_args_token = var_args_token,
- .extern_export_inline_token = null,
- .body_node = null,
- .lib_name = null,
- .align_expr = align_expr,
- .section_expr = section_expr,
- .callconv_expr = callconv_expr,
- .is_extern_prototype = is_extern,
- .is_async = is_async,
- };
-
- return &fn_proto_node.base;
-}
-
-/// VarDecl <- (KEYWORD_const / KEYWORD_var) IDENTIFIER (COLON TypeExpr)? ByteAlign? LinkSection? (EQUAL Expr)? SEMICOLON
-fn parseVarDecl(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const mut_token = eatToken(it, .Keyword_const) orelse
- eatToken(it, .Keyword_var) orelse
- return null;
-
- const name_token = try expectToken(it, tree, .Identifier);
- const type_node = if (eatToken(it, .Colon) != null)
- try expectNode(arena, it, tree, parseTypeExpr, .{
- .ExpectedTypeExpr = .{ .token = it.index },
- })
- else
- null;
- const align_node = try parseByteAlign(arena, it, tree);
- const section_node = try parseLinkSection(arena, it, tree);
- const eq_token = eatToken(it, .Equal);
- const init_node = if (eq_token != null) blk: {
- break :blk try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
- } else null;
- const semicolon_token = try expectToken(it, tree, .Semicolon);
-
- const node = try arena.create(Node.VarDecl);
- node.* = .{
- .doc_comments = null,
- .visib_token = null,
- .thread_local_token = null,
- .name_token = name_token,
- .eq_token = eq_token,
- .mut_token = mut_token,
- .comptime_token = null,
- .extern_export_token = null,
- .lib_name = null,
- .type_node = type_node,
- .align_node = align_node,
- .section_node = section_node,
- .init_node = init_node,
- .semicolon_token = semicolon_token,
- };
- return &node.base;
-}
-
-/// ContainerField <- KEYWORD_comptime? IDENTIFIER (COLON TypeExpr ByteAlign?)? (EQUAL Expr)?
-fn parseContainerField(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const comptime_token = eatToken(it, .Keyword_comptime);
- const name_token = eatToken(it, .Identifier) orelse {
- if (comptime_token) |t| putBackToken(it, t);
- return null;
- };
-
- var align_expr: ?*Node = null;
- var type_expr: ?*Node = null;
- if (eatToken(it, .Colon)) |_| {
- if (eatToken(it, .Keyword_var)) |var_tok| {
- const node = try arena.create(ast.Node.VarType);
- node.* = .{ .token = var_tok };
- type_expr = &node.base;
- } else {
- type_expr = try expectNode(arena, it, tree, parseTypeExpr, .{
- .ExpectedTypeExpr = .{ .token = it.index },
+ if (extern_export_inline_token) |token| {
+ try p.errors.append(p.gpa, .{
+ .ExpectedVarDeclOrFn = .{ .token = p.tok_i },
});
- align_expr = try parseByteAlign(arena, it, tree);
+ // ignore this and try again;
+ return error.ParseError;
}
+
+ return p.parseUse();
}
- const value_expr = if (eatToken(it, .Equal)) |_|
- try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- })
- else
- null;
+ /// FnProto <- KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? LinkSection? EXCLAMATIONMARK? (KEYWORD_var / TypeExpr)
+ fn parseFnProto(p: *Parser) !?*Node {
+ // TODO: Remove once extern/async fn rewriting is
+ var is_async = false;
+ var is_extern = false;
+ const cc_token: ?TokenIndex = blk: {
+ if (p.eatToken(.Keyword_extern)) |token| {
+ is_extern = true;
+ break :blk token;
+ }
+ if (p.eatToken(.Keyword_async)) |token| {
+ is_async = true;
+ break :blk token;
+ }
+ break :blk null;
+ };
+ const fn_token = p.eatToken(.Keyword_fn) orelse {
+ if (cc_token) |token|
+ p.putBackToken(token);
+ return null;
+ };
+ const name_token = p.eatToken(.Identifier);
+ const lparen = try p.expectToken(.LParen);
+ const params = try p.parseParamDeclList();
+ defer p.gpa.free(params);
+ const rparen = try p.expectToken(.RParen);
+ const align_expr = try p.parseByteAlign();
+ const section_expr = try p.parseLinkSection();
+ const callconv_expr = try p.parseCallconv();
+ const exclamation_token = p.eatToken(.Bang);
- const node = try arena.create(Node.ContainerField);
- node.* = .{
- .doc_comments = null,
- .comptime_token = comptime_token,
- .name_token = name_token,
- .type_expr = type_expr,
- .value_expr = value_expr,
- .align_expr = align_expr,
- };
- return &node.base;
-}
-
-/// Statement
-/// <- KEYWORD_comptime? VarDecl
-/// / KEYWORD_comptime BlockExprStatement
-/// / KEYWORD_nosuspend BlockExprStatement
-/// / KEYWORD_suspend (SEMICOLON / BlockExprStatement)
-/// / KEYWORD_defer BlockExprStatement
-/// / KEYWORD_errdefer Payload? BlockExprStatement
-/// / IfStatement
-/// / LabeledStatement
-/// / SwitchExpr
-/// / AssignExpr SEMICOLON
-fn parseStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) Error!?*Node {
- const comptime_token = eatToken(it, .Keyword_comptime);
-
- const var_decl_node = try parseVarDecl(arena, it, tree);
- if (var_decl_node) |node| {
- const var_decl = node.cast(Node.VarDecl).?;
- var_decl.comptime_token = comptime_token;
- return node;
- }
-
- if (comptime_token) |token| {
- const block_expr = try expectNode(arena, it, tree, parseBlockExprStatement, .{
- .ExpectedBlockOrAssignment = .{ .token = it.index },
+ const return_type_expr = (try p.parseVarType()) orelse
+ try p.expectNodeRecoverable(parseTypeExpr, .{
+ // most likely the user forgot to specify the return type.
+ // Mark return type as invalid and try to continue.
+ .ExpectedReturnType = .{ .token = p.tok_i },
});
- const node = try arena.create(Node.Comptime);
- node.* = .{
+ // TODO https://github.com/ziglang/zig/issues/3750
+ const R = Node.FnProto.ReturnType;
+ const return_type = if (return_type_expr == null)
+ R{ .Invalid = rparen }
+ else if (exclamation_token != null)
+ R{ .InferErrorSet = return_type_expr.? }
+ else
+ R{ .Explicit = return_type_expr.? };
+
+ const var_args_token = if (params.len > 0) blk: {
+ const param_type = params[params.len - 1].param_type;
+ break :blk if (param_type == .var_args) param_type.var_args else null;
+ } else
+ null;
+
+ const fn_proto_node = try Node.FnProto.alloc(&p.arena.allocator, params.len);
+ fn_proto_node.* = .{
.doc_comments = null,
- .comptime_token = token,
- .expr = block_expr,
+ .visib_token = null,
+ .fn_token = fn_token,
+ .name_token = name_token,
+ .params_len = params.len,
+ .return_type = return_type,
+ .var_args_token = var_args_token,
+ .extern_export_inline_token = null,
+ .body_node = null,
+ .lib_name = null,
+ .align_expr = align_expr,
+ .section_expr = section_expr,
+ .callconv_expr = callconv_expr,
+ .is_extern_prototype = is_extern,
+ .is_async = is_async,
};
- return &node.base;
+ std.mem.copy(Node.FnProto.ParamDecl, fn_proto_node.params(), params);
+
+ return &fn_proto_node.base;
}
- if (eatToken(it, .Keyword_nosuspend)) |nosuspend_token| {
- const block_expr = try expectNode(arena, it, tree, parseBlockExprStatement, .{
- .ExpectedBlockOrAssignment = .{ .token = it.index },
- });
+ /// VarDecl <- (KEYWORD_const / KEYWORD_var) IDENTIFIER (COLON TypeExpr)? ByteAlign? LinkSection? (EQUAL Expr)? SEMICOLON
+ fn parseVarDecl(p: *Parser) !?*Node {
+ const mut_token = p.eatToken(.Keyword_const) orelse
+ p.eatToken(.Keyword_var) orelse
+ return null;
- const node = try arena.create(Node.Nosuspend);
- node.* = .{
- .nosuspend_token = nosuspend_token,
- .expr = block_expr,
- };
- return &node.base;
- }
-
- if (eatToken(it, .Keyword_suspend)) |suspend_token| {
- const semicolon = eatToken(it, .Semicolon);
-
- const body_node = if (semicolon == null) blk: {
- break :blk try expectNode(arena, it, tree, parseBlockExprStatement, .{
- .ExpectedBlockOrExpression = .{ .token = it.index },
- });
- } else null;
-
- const node = try arena.create(Node.Suspend);
- node.* = .{
- .suspend_token = suspend_token,
- .body = body_node,
- };
- return &node.base;
- }
-
- const defer_token = eatToken(it, .Keyword_defer) orelse eatToken(it, .Keyword_errdefer);
- if (defer_token) |token| {
- const payload = if (tree.tokens.at(token).id == .Keyword_errdefer)
- try parsePayload(arena, it, tree)
+ const name_token = try p.expectToken(.Identifier);
+ const type_node = if (p.eatToken(.Colon) != null)
+ try p.expectNode(parseTypeExpr, .{
+ .ExpectedTypeExpr = .{ .token = p.tok_i },
+ })
else
null;
- const expr_node = try expectNode(arena, it, tree, parseBlockExprStatement, .{
- .ExpectedBlockOrExpression = .{ .token = it.index },
- });
- const node = try arena.create(Node.Defer);
+ const align_node = try p.parseByteAlign();
+ const section_node = try p.parseLinkSection();
+ const eq_token = p.eatToken(.Equal);
+ const init_node = if (eq_token != null) blk: {
+ break :blk try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+ } else null;
+ const semicolon_token = try p.expectToken(.Semicolon);
+
+ const node = try p.arena.allocator.create(Node.VarDecl);
node.* = .{
- .defer_token = token,
- .expr = expr_node,
- .payload = payload,
+ .doc_comments = null,
+ .visib_token = null,
+ .thread_local_token = null,
+ .name_token = name_token,
+ .eq_token = eq_token,
+ .mut_token = mut_token,
+ .comptime_token = null,
+ .extern_export_token = null,
+ .lib_name = null,
+ .type_node = type_node,
+ .align_node = align_node,
+ .section_node = section_node,
+ .init_node = init_node,
+ .semicolon_token = semicolon_token,
};
return &node.base;
}
- if (try parseIfStatement(arena, it, tree)) |node| return node;
- if (try parseLabeledStatement(arena, it, tree)) |node| return node;
- if (try parseSwitchExpr(arena, it, tree)) |node| return node;
- if (try parseAssignExpr(arena, it, tree)) |node| {
- _ = try expectTokenRecoverable(it, tree, .Semicolon);
- return node;
- }
-
- return null;
-}
-
-/// IfStatement
-/// <- IfPrefix BlockExpr ( KEYWORD_else Payload? Statement )?
-/// / IfPrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement )
-fn parseIfStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const if_node = (try parseIfPrefix(arena, it, tree)) orelse return null;
- const if_prefix = if_node.cast(Node.If).?;
-
- const block_expr = (try parseBlockExpr(arena, it, tree));
- const assign_expr = if (block_expr == null)
- try expectNode(arena, it, tree, parseAssignExpr, .{
- .ExpectedBlockOrAssignment = .{ .token = it.index },
- })
- else
- null;
-
- const semicolon = if (assign_expr != null) eatToken(it, .Semicolon) else null;
-
- const else_node = if (semicolon == null) blk: {
- const else_token = eatToken(it, .Keyword_else) orelse break :blk null;
- const payload = try parsePayload(arena, it, tree);
- const else_body = try expectNode(arena, it, tree, parseStatement, .{
- .InvalidToken = .{ .token = it.index },
- });
-
- const node = try arena.create(Node.Else);
- node.* = .{
- .else_token = else_token,
- .payload = payload,
- .body = else_body,
+ /// ContainerField <- KEYWORD_comptime? IDENTIFIER (COLON TypeExpr ByteAlign?)? (EQUAL Expr)?
+ fn parseContainerField(p: *Parser) !?*Node {
+ const comptime_token = p.eatToken(.Keyword_comptime);
+ const name_token = p.eatToken(.Identifier) orelse {
+ if (comptime_token) |t| p.putBackToken(t);
+ return null;
};
- break :blk node;
- } else null;
+ var align_expr: ?*Node = null;
+ var type_expr: ?*Node = null;
+ if (p.eatToken(.Colon)) |_| {
+ if (p.eatToken(.Keyword_var)) |var_tok| {
+ const node = try p.arena.allocator.create(Node.VarType);
+ node.* = .{ .token = var_tok };
+ type_expr = &node.base;
+ } else {
+ type_expr = try p.expectNode(parseTypeExpr, .{
+ .ExpectedTypeExpr = .{ .token = p.tok_i },
+ });
+ align_expr = try p.parseByteAlign();
+ }
+ }
- if (block_expr) |body| {
- if_prefix.body = body;
- if_prefix.@"else" = else_node;
- return if_node;
+ const value_expr = if (p.eatToken(.Equal)) |_|
+ try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ })
+ else
+ null;
+
+ const node = try p.arena.allocator.create(Node.ContainerField);
+ node.* = .{
+ .doc_comments = null,
+ .comptime_token = comptime_token,
+ .name_token = name_token,
+ .type_expr = type_expr,
+ .value_expr = value_expr,
+ .align_expr = align_expr,
+ };
+ return &node.base;
}
- if (assign_expr) |body| {
- if_prefix.body = body;
- if (semicolon != null) return if_node;
- if (else_node != null) {
+ /// Statement
+ /// <- KEYWORD_comptime? VarDecl
+ /// / KEYWORD_comptime BlockExprStatement
+ /// / KEYWORD_nosuspend BlockExprStatement
+ /// / KEYWORD_suspend (SEMICOLON / BlockExprStatement)
+ /// / KEYWORD_defer BlockExprStatement
+ /// / KEYWORD_errdefer Payload? BlockExprStatement
+ /// / IfStatement
+ /// / LabeledStatement
+ /// / SwitchExpr
+ /// / AssignExpr SEMICOLON
+ fn parseStatement(p: *Parser) Error!?*Node {
+ const comptime_token = p.eatToken(.Keyword_comptime);
+
+ const var_decl_node = try p.parseVarDecl();
+ if (var_decl_node) |node| {
+ const var_decl = node.cast(Node.VarDecl).?;
+ var_decl.comptime_token = comptime_token;
+ return node;
+ }
+
+ if (comptime_token) |token| {
+ const block_expr = try p.expectNode(parseBlockExprStatement, .{
+ .ExpectedBlockOrAssignment = .{ .token = p.tok_i },
+ });
+
+ const node = try p.arena.allocator.create(Node.Comptime);
+ node.* = .{
+ .doc_comments = null,
+ .comptime_token = token,
+ .expr = block_expr,
+ };
+ return &node.base;
+ }
+
+ if (p.eatToken(.Keyword_nosuspend)) |nosuspend_token| {
+ const block_expr = try p.expectNode(parseBlockExprStatement, .{
+ .ExpectedBlockOrAssignment = .{ .token = p.tok_i },
+ });
+
+ const node = try p.arena.allocator.create(Node.Nosuspend);
+ node.* = .{
+ .nosuspend_token = nosuspend_token,
+ .expr = block_expr,
+ };
+ return &node.base;
+ }
+
+ if (p.eatToken(.Keyword_suspend)) |suspend_token| {
+ const semicolon = p.eatToken(.Semicolon);
+
+ const body_node = if (semicolon == null) blk: {
+ break :blk try p.expectNode(parseBlockExprStatement, .{
+ .ExpectedBlockOrExpression = .{ .token = p.tok_i },
+ });
+ } else null;
+
+ const node = try p.arena.allocator.create(Node.Suspend);
+ node.* = .{
+ .suspend_token = suspend_token,
+ .body = body_node,
+ };
+ return &node.base;
+ }
+
+ const defer_token = p.eatToken(.Keyword_defer) orelse p.eatToken(.Keyword_errdefer);
+ if (defer_token) |token| {
+ const payload = if (p.token_ids[token] == .Keyword_errdefer)
+ try p.parsePayload()
+ else
+ null;
+ const expr_node = try p.expectNode(parseBlockExprStatement, .{
+ .ExpectedBlockOrExpression = .{ .token = p.tok_i },
+ });
+ const node = try p.arena.allocator.create(Node.Defer);
+ node.* = .{
+ .defer_token = token,
+ .expr = expr_node,
+ .payload = payload,
+ };
+ return &node.base;
+ }
+
+ if (try p.parseIfStatement()) |node| return node;
+ if (try p.parseLabeledStatement()) |node| return node;
+ if (try p.parseSwitchExpr()) |node| return node;
+ if (try p.parseAssignExpr()) |node| {
+ _ = try p.expectTokenRecoverable(.Semicolon);
+ return node;
+ }
+
+ return null;
+ }
+
+ /// IfStatement
+ /// <- IfPrefix BlockExpr ( KEYWORD_else Payload? Statement )?
+ /// / IfPrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement )
+ fn parseIfStatement(p: *Parser) !?*Node {
+ const if_node = (try p.parseIfPrefix()) orelse return null;
+ const if_prefix = if_node.cast(Node.If).?;
+
+ const block_expr = (try p.parseBlockExpr());
+ const assign_expr = if (block_expr == null)
+ try p.expectNode(parseAssignExpr, .{
+ .ExpectedBlockOrAssignment = .{ .token = p.tok_i },
+ })
+ else
+ null;
+
+ const semicolon = if (assign_expr != null) p.eatToken(.Semicolon) else null;
+
+ const else_node = if (semicolon == null) blk: {
+ const else_token = p.eatToken(.Keyword_else) orelse break :blk null;
+ const payload = try p.parsePayload();
+ const else_body = try p.expectNode(parseStatement, .{
+ .InvalidToken = .{ .token = p.tok_i },
+ });
+
+ const node = try p.arena.allocator.create(Node.Else);
+ node.* = .{
+ .else_token = else_token,
+ .payload = payload,
+ .body = else_body,
+ };
+
+ break :blk node;
+ } else null;
+
+ if (block_expr) |body| {
+ if_prefix.body = body;
if_prefix.@"else" = else_node;
return if_node;
}
- try tree.errors.push(.{
- .ExpectedSemiOrElse = .{ .token = it.index },
- });
+
+ if (assign_expr) |body| {
+ if_prefix.body = body;
+ if (semicolon != null) return if_node;
+ if (else_node != null) {
+ if_prefix.@"else" = else_node;
+ return if_node;
+ }
+ try p.errors.append(p.gpa, .{
+ .ExpectedSemiOrElse = .{ .token = p.tok_i },
+ });
+ }
+
+ return if_node;
}
- return if_node;
-}
+ /// LabeledStatement <- BlockLabel? (Block / LoopStatement)
+ fn parseLabeledStatement(p: *Parser) !?*Node {
+ var colon: TokenIndex = undefined;
+ const label_token = p.parseBlockLabel(&colon);
-/// LabeledStatement <- BlockLabel? (Block / LoopStatement)
-fn parseLabeledStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- var colon: TokenIndex = undefined;
- const label_token = parseBlockLabel(arena, it, tree, &colon);
+ if (try p.parseBlock()) |node| {
+ node.cast(Node.Block).?.label = label_token;
+ return node;
+ }
- if (try parseBlock(arena, it, tree)) |node| {
- node.cast(Node.Block).?.label = label_token;
- return node;
+ if (try p.parseLoopStatement()) |node| {
+ if (node.cast(Node.For)) |for_node| {
+ for_node.label = label_token;
+ } else if (node.cast(Node.While)) |while_node| {
+ while_node.label = label_token;
+ } else unreachable;
+ return node;
+ }
+
+ if (label_token != null) {
+ try p.errors.append(p.gpa, .{
+ .ExpectedLabelable = .{ .token = p.tok_i },
+ });
+ return error.ParseError;
+ }
+
+ return null;
}
- if (try parseLoopStatement(arena, it, tree)) |node| {
- if (node.cast(Node.For)) |for_node| {
- for_node.label = label_token;
- } else if (node.cast(Node.While)) |while_node| {
- while_node.label = label_token;
- } else unreachable;
- return node;
- }
+ /// LoopStatement <- KEYWORD_inline? (ForStatement / WhileStatement)
+ fn parseLoopStatement(p: *Parser) !?*Node {
+ const inline_token = p.eatToken(.Keyword_inline);
- if (label_token != null) {
- try tree.errors.push(.{
- .ExpectedLabelable = .{ .token = it.index },
+ if (try p.parseForStatement()) |node| {
+ node.cast(Node.For).?.inline_token = inline_token;
+ return node;
+ }
+
+ if (try p.parseWhileStatement()) |node| {
+ node.cast(Node.While).?.inline_token = inline_token;
+ return node;
+ }
+ if (inline_token == null) return null;
+
+ // If we've seen "inline", there should have been a "for" or "while"
+ try p.errors.append(p.gpa, .{
+ .ExpectedInlinable = .{ .token = p.tok_i },
});
return error.ParseError;
}
- return null;
-}
+ /// ForStatement
+ /// <- ForPrefix BlockExpr ( KEYWORD_else Statement )?
+ /// / ForPrefix AssignExpr ( SEMICOLON / KEYWORD_else Statement )
+ fn parseForStatement(p: *Parser) !?*Node {
+ const node = (try p.parseForPrefix()) orelse return null;
+ const for_prefix = node.cast(Node.For).?;
-/// LoopStatement <- KEYWORD_inline? (ForStatement / WhileStatement)
-fn parseLoopStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const inline_token = eatToken(it, .Keyword_inline);
+ if (try p.parseBlockExpr()) |block_expr_node| {
+ for_prefix.body = block_expr_node;
- if (try parseForStatement(arena, it, tree)) |node| {
- node.cast(Node.For).?.inline_token = inline_token;
- return node;
- }
+ if (p.eatToken(.Keyword_else)) |else_token| {
+ const statement_node = try p.expectNode(parseStatement, .{
+ .InvalidToken = .{ .token = p.tok_i },
+ });
- if (try parseWhileStatement(arena, it, tree)) |node| {
- node.cast(Node.While).?.inline_token = inline_token;
- return node;
- }
- if (inline_token == null) return null;
+ const else_node = try p.arena.allocator.create(Node.Else);
+ else_node.* = .{
+ .else_token = else_token,
+ .payload = null,
+ .body = statement_node,
+ };
+ for_prefix.@"else" = else_node;
- // If we've seen "inline", there should have been a "for" or "while"
- try tree.errors.push(.{
- .ExpectedInlinable = .{ .token = it.index },
- });
- return error.ParseError;
-}
-
-/// ForStatement
-/// <- ForPrefix BlockExpr ( KEYWORD_else Statement )?
-/// / ForPrefix AssignExpr ( SEMICOLON / KEYWORD_else Statement )
-fn parseForStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const node = (try parseForPrefix(arena, it, tree)) orelse return null;
- const for_prefix = node.cast(Node.For).?;
-
- if (try parseBlockExpr(arena, it, tree)) |block_expr_node| {
- for_prefix.body = block_expr_node;
-
- if (eatToken(it, .Keyword_else)) |else_token| {
- const statement_node = try expectNode(arena, it, tree, parseStatement, .{
- .InvalidToken = .{ .token = it.index },
- });
-
- const else_node = try arena.create(Node.Else);
- else_node.* = .{
- .else_token = else_token,
- .payload = null,
- .body = statement_node,
- };
- for_prefix.@"else" = else_node;
+ return node;
+ }
return node;
}
- return node;
- }
+ if (try p.parseAssignExpr()) |assign_expr| {
+ for_prefix.body = assign_expr;
- if (try parseAssignExpr(arena, it, tree)) |assign_expr| {
- for_prefix.body = assign_expr;
+ if (p.eatToken(.Semicolon) != null) return node;
- if (eatToken(it, .Semicolon) != null) return node;
+ if (p.eatToken(.Keyword_else)) |else_token| {
+ const statement_node = try p.expectNode(parseStatement, .{
+ .ExpectedStatement = .{ .token = p.tok_i },
+ });
- if (eatToken(it, .Keyword_else)) |else_token| {
- const statement_node = try expectNode(arena, it, tree, parseStatement, .{
- .ExpectedStatement = .{ .token = it.index },
+ const else_node = try p.arena.allocator.create(Node.Else);
+ else_node.* = .{
+ .else_token = else_token,
+ .payload = null,
+ .body = statement_node,
+ };
+ for_prefix.@"else" = else_node;
+ return node;
+ }
+
+ try p.errors.append(p.gpa, .{
+ .ExpectedSemiOrElse = .{ .token = p.tok_i },
});
- const else_node = try arena.create(Node.Else);
- else_node.* = .{
- .else_token = else_token,
- .payload = null,
- .body = statement_node,
- };
- for_prefix.@"else" = else_node;
- return node;
- }
-
- try tree.errors.push(.{
- .ExpectedSemiOrElse = .{ .token = it.index },
- });
-
- return node;
- }
-
- return null;
-}
-
-/// WhileStatement
-/// <- WhilePrefix BlockExpr ( KEYWORD_else Payload? Statement )?
-/// / WhilePrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement )
-fn parseWhileStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const node = (try parseWhilePrefix(arena, it, tree)) orelse return null;
- const while_prefix = node.cast(Node.While).?;
-
- if (try parseBlockExpr(arena, it, tree)) |block_expr_node| {
- while_prefix.body = block_expr_node;
-
- if (eatToken(it, .Keyword_else)) |else_token| {
- const payload = try parsePayload(arena, it, tree);
-
- const statement_node = try expectNode(arena, it, tree, parseStatement, .{
- .InvalidToken = .{ .token = it.index },
- });
-
- const else_node = try arena.create(Node.Else);
- else_node.* = .{
- .else_token = else_token,
- .payload = payload,
- .body = statement_node,
- };
- while_prefix.@"else" = else_node;
-
return node;
}
- return node;
+ return null;
}
- if (try parseAssignExpr(arena, it, tree)) |assign_expr_node| {
- while_prefix.body = assign_expr_node;
+ /// WhileStatement
+ /// <- WhilePrefix BlockExpr ( KEYWORD_else Payload? Statement )?
+ /// / WhilePrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement )
+ fn parseWhileStatement(p: *Parser) !?*Node {
+ const node = (try p.parseWhilePrefix()) orelse return null;
+ const while_prefix = node.cast(Node.While).?;
- if (eatToken(it, .Semicolon) != null) return node;
+ if (try p.parseBlockExpr()) |block_expr_node| {
+ while_prefix.body = block_expr_node;
- if (eatToken(it, .Keyword_else)) |else_token| {
- const payload = try parsePayload(arena, it, tree);
+ if (p.eatToken(.Keyword_else)) |else_token| {
+ const payload = try p.parsePayload();
- const statement_node = try expectNode(arena, it, tree, parseStatement, .{
- .ExpectedStatement = .{ .token = it.index },
- });
+ const statement_node = try p.expectNode(parseStatement, .{
+ .InvalidToken = .{ .token = p.tok_i },
+ });
+
+ const else_node = try p.arena.allocator.create(Node.Else);
+ else_node.* = .{
+ .else_token = else_token,
+ .payload = payload,
+ .body = statement_node,
+ };
+ while_prefix.@"else" = else_node;
+
+ return node;
+ }
- const else_node = try arena.create(Node.Else);
- else_node.* = .{
- .else_token = else_token,
- .payload = payload,
- .body = statement_node,
- };
- while_prefix.@"else" = else_node;
return node;
}
- try tree.errors.push(.{
- .ExpectedSemiOrElse = .{ .token = it.index },
- });
+ if (try p.parseAssignExpr()) |assign_expr_node| {
+ while_prefix.body = assign_expr_node;
- return node;
+ if (p.eatToken(.Semicolon) != null) return node;
+
+ if (p.eatToken(.Keyword_else)) |else_token| {
+ const payload = try p.parsePayload();
+
+ const statement_node = try p.expectNode(parseStatement, .{
+ .ExpectedStatement = .{ .token = p.tok_i },
+ });
+
+ const else_node = try p.arena.allocator.create(Node.Else);
+ else_node.* = .{
+ .else_token = else_token,
+ .payload = payload,
+ .body = statement_node,
+ };
+ while_prefix.@"else" = else_node;
+ return node;
+ }
+
+ try p.errors.append(p.gpa, .{
+ .ExpectedSemiOrElse = .{ .token = p.tok_i },
+ });
+
+ return node;
+ }
+
+ return null;
}
- return null;
-}
-
-/// BlockExprStatement
-/// <- BlockExpr
-/// / AssignExpr SEMICOLON
-fn parseBlockExprStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- if (try parseBlockExpr(arena, it, tree)) |node| return node;
- if (try parseAssignExpr(arena, it, tree)) |node| {
- _ = try expectTokenRecoverable(it, tree, .Semicolon);
- return node;
- }
- return null;
-}
-
-/// BlockExpr <- BlockLabel? Block
-fn parseBlockExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) Error!?*Node {
- var colon: TokenIndex = undefined;
- const label_token = parseBlockLabel(arena, it, tree, &colon);
- const block_node = (try parseBlock(arena, it, tree)) orelse {
- if (label_token) |label| {
- putBackToken(it, label + 1); // ":"
- putBackToken(it, label); // IDENTIFIER
+ /// BlockExprStatement
+ /// <- BlockExpr
+ /// / AssignExpr SEMICOLON
+ fn parseBlockExprStatement(p: *Parser) !?*Node {
+ if (try p.parseBlockExpr()) |node| return node;
+ if (try p.parseAssignExpr()) |node| {
+ _ = try p.expectTokenRecoverable(.Semicolon);
+ return node;
}
return null;
- };
- block_node.cast(Node.Block).?.label = label_token;
- return block_node;
-}
-
-/// AssignExpr <- Expr (AssignOp Expr)?
-fn parseAssignExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- return parseBinOpExpr(arena, it, tree, parseAssignOp, parseExpr, .Once);
-}
-
-/// Expr <- KEYWORD_try* BoolOrExpr
-fn parseExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) Error!?*Node {
- return parsePrefixOpExpr(arena, it, tree, parseTry, parseBoolOrExpr);
-}
-
-/// BoolOrExpr <- BoolAndExpr (KEYWORD_or BoolAndExpr)*
-fn parseBoolOrExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- return parseBinOpExpr(
- arena,
- it,
- tree,
- SimpleBinOpParseFn(.Keyword_or, Node.InfixOp.Op.BoolOr),
- parseBoolAndExpr,
- .Infinitely,
- );
-}
-
-/// BoolAndExpr <- CompareExpr (KEYWORD_and CompareExpr)*
-fn parseBoolAndExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- return parseBinOpExpr(
- arena,
- it,
- tree,
- SimpleBinOpParseFn(.Keyword_and, .BoolAnd),
- parseCompareExpr,
- .Infinitely,
- );
-}
-
-/// CompareExpr <- BitwiseExpr (CompareOp BitwiseExpr)?
-fn parseCompareExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- return parseBinOpExpr(arena, it, tree, parseCompareOp, parseBitwiseExpr, .Once);
-}
-
-/// BitwiseExpr <- BitShiftExpr (BitwiseOp BitShiftExpr)*
-fn parseBitwiseExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- return parseBinOpExpr(arena, it, tree, parseBitwiseOp, parseBitShiftExpr, .Infinitely);
-}
-
-/// BitShiftExpr <- AdditionExpr (BitShiftOp AdditionExpr)*
-fn parseBitShiftExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- return parseBinOpExpr(arena, it, tree, parseBitShiftOp, parseAdditionExpr, .Infinitely);
-}
-
-/// AdditionExpr <- MultiplyExpr (AdditionOp MultiplyExpr)*
-fn parseAdditionExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- return parseBinOpExpr(arena, it, tree, parseAdditionOp, parseMultiplyExpr, .Infinitely);
-}
-
-/// MultiplyExpr <- PrefixExpr (MultiplyOp PrefixExpr)*
-fn parseMultiplyExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- return parseBinOpExpr(arena, it, tree, parseMultiplyOp, parsePrefixExpr, .Infinitely);
-}
-
-/// PrefixExpr <- PrefixOp* PrimaryExpr
-fn parsePrefixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- return parsePrefixOpExpr(arena, it, tree, parsePrefixOp, parsePrimaryExpr);
-}
-
-/// PrimaryExpr
-/// <- AsmExpr
-/// / IfExpr
-/// / KEYWORD_break BreakLabel? Expr?
-/// / KEYWORD_comptime Expr
-/// / KEYWORD_nosuspend Expr
-/// / KEYWORD_continue BreakLabel?
-/// / KEYWORD_resume Expr
-/// / KEYWORD_return Expr?
-/// / BlockLabel? LoopExpr
-/// / Block
-/// / CurlySuffixExpr
-fn parsePrimaryExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- if (try parseAsmExpr(arena, it, tree)) |node| return node;
- if (try parseIfExpr(arena, it, tree)) |node| return node;
-
- if (eatToken(it, .Keyword_break)) |token| {
- const label = try parseBreakLabel(arena, it, tree);
- const expr_node = try parseExpr(arena, it, tree);
- const node = try arena.create(Node.ControlFlowExpression);
- node.* = .{
- .ltoken = token,
- .kind = .{ .Break = label },
- .rhs = expr_node,
- };
- return &node.base;
}
- if (eatToken(it, .Keyword_comptime)) |token| {
- const expr_node = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
- const node = try arena.create(Node.Comptime);
- node.* = .{
- .doc_comments = null,
- .comptime_token = token,
- .expr = expr_node,
- };
- return &node.base;
- }
-
- if (eatToken(it, .Keyword_nosuspend)) |token| {
- const expr_node = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
- const node = try arena.create(Node.Nosuspend);
- node.* = .{
- .nosuspend_token = token,
- .expr = expr_node,
- };
- return &node.base;
- }
-
- if (eatToken(it, .Keyword_continue)) |token| {
- const label = try parseBreakLabel(arena, it, tree);
- const node = try arena.create(Node.ControlFlowExpression);
- node.* = .{
- .ltoken = token,
- .kind = .{ .Continue = label },
- .rhs = null,
- };
- return &node.base;
- }
-
- if (eatToken(it, .Keyword_resume)) |token| {
- const expr_node = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
- const node = try arena.create(Node.PrefixOp);
- node.* = .{
- .op_token = token,
- .op = .Resume,
- .rhs = expr_node,
- };
- return &node.base;
- }
-
- if (eatToken(it, .Keyword_return)) |token| {
- const expr_node = try parseExpr(arena, it, tree);
- const node = try arena.create(Node.ControlFlowExpression);
- node.* = .{
- .ltoken = token,
- .kind = .Return,
- .rhs = expr_node,
- };
- return &node.base;
- }
-
- var colon: TokenIndex = undefined;
- const label = parseBlockLabel(arena, it, tree, &colon);
- if (try parseLoopExpr(arena, it, tree)) |node| {
- if (node.cast(Node.For)) |for_node| {
- for_node.label = label;
- } else if (node.cast(Node.While)) |while_node| {
- while_node.label = label;
- } else unreachable;
- return node;
- }
- if (label) |token| {
- putBackToken(it, token + 1); // ":"
- putBackToken(it, token); // IDENTIFIER
- }
-
- if (try parseBlock(arena, it, tree)) |node| return node;
- if (try parseCurlySuffixExpr(arena, it, tree)) |node| return node;
-
- return null;
-}
-
-/// IfExpr <- IfPrefix Expr (KEYWORD_else Payload? Expr)?
-fn parseIfExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- return parseIf(arena, it, tree, parseExpr);
-}
-
-/// Block <- LBRACE Statement* RBRACE
-fn parseBlock(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const lbrace = eatToken(it, .LBrace) orelse return null;
-
- var statements = Node.Block.StatementList.init(arena);
- while (true) {
- const statement = (parseStatement(arena, it, tree) catch |err| switch (err) {
- error.OutOfMemory => return error.OutOfMemory,
- error.ParseError => {
- // try to skip to the next statement
- findNextStmt(it);
- continue;
- },
- }) orelse break;
- try statements.push(statement);
- }
-
- const rbrace = try expectToken(it, tree, .RBrace);
-
- const block_node = try arena.create(Node.Block);
- block_node.* = .{
- .label = null,
- .lbrace = lbrace,
- .statements = statements,
- .rbrace = rbrace,
- };
-
- return &block_node.base;
-}
-
-/// LoopExpr <- KEYWORD_inline? (ForExpr / WhileExpr)
-fn parseLoopExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const inline_token = eatToken(it, .Keyword_inline);
-
- if (try parseForExpr(arena, it, tree)) |node| {
- node.cast(Node.For).?.inline_token = inline_token;
- return node;
- }
-
- if (try parseWhileExpr(arena, it, tree)) |node| {
- node.cast(Node.While).?.inline_token = inline_token;
- return node;
- }
-
- if (inline_token == null) return null;
-
- // If we've seen "inline", there should have been a "for" or "while"
- try tree.errors.push(.{
- .ExpectedInlinable = .{ .token = it.index },
- });
- return error.ParseError;
-}
-
-/// ForExpr <- ForPrefix Expr (KEYWORD_else Expr)?
-fn parseForExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const node = (try parseForPrefix(arena, it, tree)) orelse return null;
- const for_prefix = node.cast(Node.For).?;
-
- const body_node = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
- for_prefix.body = body_node;
-
- if (eatToken(it, .Keyword_else)) |else_token| {
- const body = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
-
- const else_node = try arena.create(Node.Else);
- else_node.* = .{
- .else_token = else_token,
- .payload = null,
- .body = body,
- };
-
- for_prefix.@"else" = else_node;
- }
-
- return node;
-}
-
-/// WhileExpr <- WhilePrefix Expr (KEYWORD_else Payload? Expr)?
-fn parseWhileExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const node = (try parseWhilePrefix(arena, it, tree)) orelse return null;
- const while_prefix = node.cast(Node.While).?;
-
- const body_node = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
- while_prefix.body = body_node;
-
- if (eatToken(it, .Keyword_else)) |else_token| {
- const payload = try parsePayload(arena, it, tree);
- const body = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
-
- const else_node = try arena.create(Node.Else);
- else_node.* = .{
- .else_token = else_token,
- .payload = payload,
- .body = body,
- };
-
- while_prefix.@"else" = else_node;
- }
-
- return node;
-}
-
-/// CurlySuffixExpr <- TypeExpr InitList?
-fn parseCurlySuffixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const type_expr = (try parseTypeExpr(arena, it, tree)) orelse return null;
- const suffix_op = (try parseInitList(arena, it, tree)) orelse return type_expr;
- suffix_op.lhs.node = type_expr;
- return &suffix_op.base;
-}
-
-/// InitList
-/// <- LBRACE FieldInit (COMMA FieldInit)* COMMA? RBRACE
-/// / LBRACE Expr (COMMA Expr)* COMMA? RBRACE
-/// / LBRACE RBRACE
-fn parseInitList(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node.SuffixOp {
- const lbrace = eatToken(it, .LBrace) orelse return null;
- var init_list = Node.SuffixOp.Op.InitList.init(arena);
-
- const op: Node.SuffixOp.Op = blk: {
- if (try parseFieldInit(arena, it, tree)) |field_init| {
- try init_list.push(field_init);
- while (eatToken(it, .Comma)) |_| {
- const next = (try parseFieldInit(arena, it, tree)) orelse break;
- try init_list.push(next);
+ /// BlockExpr <- BlockLabel? Block
+ fn parseBlockExpr(p: *Parser) Error!?*Node {
+ var colon: TokenIndex = undefined;
+ const label_token = p.parseBlockLabel(&colon);
+ const block_node = (try p.parseBlock()) orelse {
+ if (label_token) |label| {
+ p.putBackToken(label + 1); // ":"
+ p.putBackToken(label); // IDENTIFIER
}
- break :blk .{ .StructInitializer = init_list };
- }
-
- if (try parseExpr(arena, it, tree)) |expr| {
- try init_list.push(expr);
- while (eatToken(it, .Comma)) |_| {
- const next = (try parseExpr(arena, it, tree)) orelse break;
- try init_list.push(next);
- }
- break :blk .{ .ArrayInitializer = init_list };
- }
-
- break :blk .{ .StructInitializer = init_list };
- };
-
- const node = try arena.create(Node.SuffixOp);
- node.* = .{
- .lhs = .{ .node = undefined }, // set by caller
- .op = op,
- .rtoken = try expectToken(it, tree, .RBrace),
- };
- return node;
-}
-
-/// TypeExpr <- PrefixTypeOp* ErrorUnionExpr
-fn parseTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) Error!?*Node {
- return parsePrefixOpExpr(arena, it, tree, parsePrefixTypeOp, parseErrorUnionExpr);
-}
-
-/// ErrorUnionExpr <- SuffixExpr (EXCLAMATIONMARK TypeExpr)?
-fn parseErrorUnionExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const suffix_expr = (try parseSuffixExpr(arena, it, tree)) orelse return null;
-
- if (try SimpleBinOpParseFn(.Bang, Node.InfixOp.Op.ErrorUnion)(arena, it, tree)) |node| {
- const error_union = node.cast(Node.InfixOp).?;
- const type_expr = try expectNode(arena, it, tree, parseTypeExpr, .{
- .ExpectedTypeExpr = .{ .token = it.index },
- });
- error_union.lhs = suffix_expr;
- error_union.rhs = type_expr;
- return node;
+ return null;
+ };
+ block_node.cast(Node.Block).?.label = label_token;
+ return block_node;
}
- return suffix_expr;
-}
+ /// AssignExpr <- Expr (AssignOp Expr)?
+ fn parseAssignExpr(p: *Parser) !?*Node {
+ return p.parseBinOpExpr(parseAssignOp, parseExpr, .Once);
+ }
-/// SuffixExpr
-/// <- KEYWORD_async PrimaryTypeExpr SuffixOp* FnCallArguments
-/// / PrimaryTypeExpr (SuffixOp / FnCallArguments)*
-fn parseSuffixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const maybe_async = eatToken(it, .Keyword_async);
- if (maybe_async) |async_token| {
- const token_fn = eatToken(it, .Keyword_fn);
- if (token_fn != null) {
- // TODO: remove this hack when async fn rewriting is
- // HACK: If we see the keyword `fn`, then we assume that
- // we are parsing an async fn proto, and not a call.
- // We therefore put back all tokens consumed by the async
- // prefix...
- putBackToken(it, token_fn.?);
- putBackToken(it, async_token);
- return parsePrimaryTypeExpr(arena, it, tree);
- }
- var res = try expectNode(arena, it, tree, parsePrimaryTypeExpr, .{
- .ExpectedPrimaryTypeExpr = .{ .token = it.index },
- });
+ /// Expr <- KEYWORD_try* BoolOrExpr
+ fn parseExpr(p: *Parser) Error!?*Node {
+ return p.parsePrefixOpExpr(parseTry, parseBoolOrExpr);
+ }
- while (try parseSuffixOp(arena, it, tree)) |node| {
- switch (node.id) {
- .SuffixOp => node.cast(Node.SuffixOp).?.lhs = .{ .node = res },
- .InfixOp => node.cast(Node.InfixOp).?.lhs = res,
- else => unreachable,
- }
- res = node;
+ /// BoolOrExpr <- BoolAndExpr (KEYWORD_or BoolAndExpr)*
+ fn parseBoolOrExpr(p: *Parser) !?*Node {
+ return p.parseBinOpExpr(
+ SimpleBinOpParseFn(.Keyword_or, Node.InfixOp.Op.BoolOr),
+ parseBoolAndExpr,
+ .Infinitely,
+ );
+ }
+
+ /// BoolAndExpr <- CompareExpr (KEYWORD_and CompareExpr)*
+ fn parseBoolAndExpr(p: *Parser) !?*Node {
+ return p.parseBinOpExpr(
+ SimpleBinOpParseFn(.Keyword_and, .BoolAnd),
+ parseCompareExpr,
+ .Infinitely,
+ );
+ }
+
+ /// CompareExpr <- BitwiseExpr (CompareOp BitwiseExpr)?
+ fn parseCompareExpr(p: *Parser) !?*Node {
+ return p.parseBinOpExpr(parseCompareOp, parseBitwiseExpr, .Once);
+ }
+
+ /// BitwiseExpr <- BitShiftExpr (BitwiseOp BitShiftExpr)*
+ fn parseBitwiseExpr(p: *Parser) !?*Node {
+ return p.parseBinOpExpr(parseBitwiseOp, parseBitShiftExpr, .Infinitely);
+ }
+
+ /// BitShiftExpr <- AdditionExpr (BitShiftOp AdditionExpr)*
+ fn parseBitShiftExpr(p: *Parser) !?*Node {
+ return p.parseBinOpExpr(parseBitShiftOp, parseAdditionExpr, .Infinitely);
+ }
+
+ /// AdditionExpr <- MultiplyExpr (AdditionOp MultiplyExpr)*
+ fn parseAdditionExpr(p: *Parser) !?*Node {
+ return p.parseBinOpExpr(parseAdditionOp, parseMultiplyExpr, .Infinitely);
+ }
+
+ /// MultiplyExpr <- PrefixExpr (MultiplyOp PrefixExpr)*
+ fn parseMultiplyExpr(p: *Parser) !?*Node {
+ return p.parseBinOpExpr(parseMultiplyOp, parsePrefixExpr, .Infinitely);
+ }
+
+ /// PrefixExpr <- PrefixOp* PrimaryExpr
+ fn parsePrefixExpr(p: *Parser) !?*Node {
+ return p.parsePrefixOpExpr(parsePrefixOp, parsePrimaryExpr);
+ }
+
+ /// PrimaryExpr
+ /// <- AsmExpr
+ /// / IfExpr
+ /// / KEYWORD_break BreakLabel? Expr?
+ /// / KEYWORD_comptime Expr
+ /// / KEYWORD_nosuspend Expr
+ /// / KEYWORD_continue BreakLabel?
+ /// / KEYWORD_resume Expr
+ /// / KEYWORD_return Expr?
+ /// / BlockLabel? LoopExpr
+ /// / Block
+ /// / CurlySuffixExpr
+ fn parsePrimaryExpr(p: *Parser) !?*Node {
+ if (try p.parseAsmExpr()) |node| return node;
+ if (try p.parseIfExpr()) |node| return node;
+
+ if (p.eatToken(.Keyword_break)) |token| {
+ const label = try p.parseBreakLabel();
+ const expr_node = try p.parseExpr();
+ const node = try p.arena.allocator.create(Node.ControlFlowExpression);
+ node.* = .{
+ .ltoken = token,
+ .kind = .{ .Break = label },
+ .rhs = expr_node,
+ };
+ return &node.base;
}
- const params = (try parseFnCallArguments(arena, it, tree)) orelse {
- try tree.errors.push(.{
- .ExpectedParamList = .{ .token = it.index },
+ if (p.eatToken(.Keyword_comptime)) |token| {
+ const expr_node = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
});
- // ignore this, continue parsing
- return res;
- };
- const node = try arena.create(Node.SuffixOp);
- node.* = .{
- .lhs = .{ .node = res },
- .op = .{
- .Call = .{
- .params = params.list,
- .async_token = async_token,
- },
- },
- .rtoken = params.rparen,
- };
- return &node.base;
+ const node = try p.arena.allocator.create(Node.Comptime);
+ node.* = .{
+ .doc_comments = null,
+ .comptime_token = token,
+ .expr = expr_node,
+ };
+ return &node.base;
+ }
+
+ if (p.eatToken(.Keyword_nosuspend)) |token| {
+ const expr_node = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+ const node = try p.arena.allocator.create(Node.Nosuspend);
+ node.* = .{
+ .nosuspend_token = token,
+ .expr = expr_node,
+ };
+ return &node.base;
+ }
+
+ if (p.eatToken(.Keyword_continue)) |token| {
+ const label = try p.parseBreakLabel();
+ const node = try p.arena.allocator.create(Node.ControlFlowExpression);
+ node.* = .{
+ .ltoken = token,
+ .kind = .{ .Continue = label },
+ .rhs = null,
+ };
+ return &node.base;
+ }
+
+ if (p.eatToken(.Keyword_resume)) |token| {
+ const expr_node = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+ const node = try p.arena.allocator.create(Node.PrefixOp);
+ node.* = .{
+ .op_token = token,
+ .op = .Resume,
+ .rhs = expr_node,
+ };
+ return &node.base;
+ }
+
+ if (p.eatToken(.Keyword_return)) |token| {
+ const expr_node = try p.parseExpr();
+ const node = try p.arena.allocator.create(Node.ControlFlowExpression);
+ node.* = .{
+ .ltoken = token,
+ .kind = .Return,
+ .rhs = expr_node,
+ };
+ return &node.base;
+ }
+
+ var colon: TokenIndex = undefined;
+ const label = p.parseBlockLabel(&colon);
+ if (try p.parseLoopExpr()) |node| {
+ if (node.cast(Node.For)) |for_node| {
+ for_node.label = label;
+ } else if (node.cast(Node.While)) |while_node| {
+ while_node.label = label;
+ } else unreachable;
+ return node;
+ }
+ if (label) |token| {
+ p.putBackToken(token + 1); // ":"
+ p.putBackToken(token); // IDENTIFIER
+ }
+
+ if (try p.parseBlock()) |node| return node;
+ if (try p.parseCurlySuffixExpr()) |node| return node;
+
+ return null;
}
- if (try parsePrimaryTypeExpr(arena, it, tree)) |expr| {
- var res = expr;
+
+ /// IfExpr <- IfPrefix Expr (KEYWORD_else Payload? Expr)?
+ fn parseIfExpr(p: *Parser) !?*Node {
+ return p.parseIf(parseExpr);
+ }
+
+ /// Block <- LBRACE Statement* RBRACE
+ fn parseBlock(p: *Parser) !?*Node {
+ const lbrace = p.eatToken(.LBrace) orelse return null;
+
+ var statements = std.ArrayList(*Node).init(p.gpa);
+ defer statements.deinit();
while (true) {
- if (try parseSuffixOp(arena, it, tree)) |node| {
+ const statement = (p.parseStatement() catch |err| switch (err) {
+ error.OutOfMemory => return error.OutOfMemory,
+ error.ParseError => {
+ // try to skip to the next statement
+ p.findNextStmt();
+ continue;
+ },
+ }) orelse break;
+ try statements.append(statement);
+ }
+
+ const rbrace = try p.expectToken(.RBrace);
+
+ const statements_len = @intCast(NodeIndex, statements.items.len);
+
+ const block_node = try Node.Block.alloc(&p.arena.allocator, statements_len);
+ block_node.* = .{
+ .label = null,
+ .lbrace = lbrace,
+ .statements_len = statements_len,
+ .rbrace = rbrace,
+ };
+ std.mem.copy(*Node, block_node.statements(), statements.items);
+
+ return &block_node.base;
+ }
+
+ /// LoopExpr <- KEYWORD_inline? (ForExpr / WhileExpr)
+ fn parseLoopExpr(p: *Parser) !?*Node {
+ const inline_token = p.eatToken(.Keyword_inline);
+
+ if (try p.parseForExpr()) |node| {
+ node.cast(Node.For).?.inline_token = inline_token;
+ return node;
+ }
+
+ if (try p.parseWhileExpr()) |node| {
+ node.cast(Node.While).?.inline_token = inline_token;
+ return node;
+ }
+
+ if (inline_token == null) return null;
+
+ // If we've seen "inline", there should have been a "for" or "while"
+ try p.errors.append(p.gpa, .{
+ .ExpectedInlinable = .{ .token = p.tok_i },
+ });
+ return error.ParseError;
+ }
+
+ /// ForExpr <- ForPrefix Expr (KEYWORD_else Expr)?
+ fn parseForExpr(p: *Parser) !?*Node {
+ const node = (try p.parseForPrefix()) orelse return null;
+ const for_prefix = node.cast(Node.For).?;
+
+ const body_node = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+ for_prefix.body = body_node;
+
+ if (p.eatToken(.Keyword_else)) |else_token| {
+ const body = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+
+ const else_node = try p.arena.allocator.create(Node.Else);
+ else_node.* = .{
+ .else_token = else_token,
+ .payload = null,
+ .body = body,
+ };
+
+ for_prefix.@"else" = else_node;
+ }
+
+ return node;
+ }
+
+ /// WhileExpr <- WhilePrefix Expr (KEYWORD_else Payload? Expr)?
+ fn parseWhileExpr(p: *Parser) !?*Node {
+ const node = (try p.parseWhilePrefix()) orelse return null;
+ const while_prefix = node.cast(Node.While).?;
+
+ const body_node = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+ while_prefix.body = body_node;
+
+ if (p.eatToken(.Keyword_else)) |else_token| {
+ const payload = try p.parsePayload();
+ const body = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+
+ const else_node = try p.arena.allocator.create(Node.Else);
+ else_node.* = .{
+ .else_token = else_token,
+ .payload = payload,
+ .body = body,
+ };
+
+ while_prefix.@"else" = else_node;
+ }
+
+ return node;
+ }
+
+ /// CurlySuffixExpr <- TypeExpr InitList?
+ fn parseCurlySuffixExpr(p: *Parser) !?*Node {
+ const lhs = (try p.parseTypeExpr()) orelse return null;
+ const suffix_op = (try p.parseInitList(lhs)) orelse return lhs;
+ return suffix_op;
+ }
+
+ /// InitList
+ /// <- LBRACE FieldInit (COMMA FieldInit)* COMMA? RBRACE
+ /// / LBRACE Expr (COMMA Expr)* COMMA? RBRACE
+ /// / LBRACE RBRACE
+ fn parseInitList(p: *Parser, lhs: *Node) !?*Node {
+ const lbrace = p.eatToken(.LBrace) orelse return null;
+ var init_list = std.ArrayList(*Node).init(p.gpa);
+ defer init_list.deinit();
+
+ if (try p.parseFieldInit()) |field_init| {
+ try init_list.append(field_init);
+ while (p.eatToken(.Comma)) |_| {
+ const next = (try p.parseFieldInit()) orelse break;
+ try init_list.append(next);
+ }
+ const node = try Node.StructInitializer.alloc(&p.arena.allocator, init_list.items.len);
+ node.* = .{
+ .lhs = lhs,
+ .rtoken = try p.expectToken(.RBrace),
+ .list_len = init_list.items.len,
+ };
+ std.mem.copy(*Node, node.list(), init_list.items);
+ return &node.base;
+ }
+
+ if (try p.parseExpr()) |expr| {
+ try init_list.append(expr);
+ while (p.eatToken(.Comma)) |_| {
+ const next = (try p.parseExpr()) orelse break;
+ try init_list.append(next);
+ }
+ const node = try Node.ArrayInitializer.alloc(&p.arena.allocator, init_list.items.len);
+ node.* = .{
+ .lhs = lhs,
+ .rtoken = try p.expectToken(.RBrace),
+ .list_len = init_list.items.len,
+ };
+ std.mem.copy(*Node, node.list(), init_list.items);
+ return &node.base;
+ }
+
+ const node = try p.arena.allocator.create(Node.StructInitializer);
+ node.* = .{
+ .lhs = lhs,
+ .rtoken = try p.expectToken(.RBrace),
+ .list_len = 0,
+ };
+ return &node.base;
+ }
+
+ /// InitList
+ /// <- LBRACE FieldInit (COMMA FieldInit)* COMMA? RBRACE
+ /// / LBRACE Expr (COMMA Expr)* COMMA? RBRACE
+ /// / LBRACE RBRACE
+ fn parseAnonInitList(p: *Parser, dot: TokenIndex) !?*Node {
+ const lbrace = p.eatToken(.LBrace) orelse return null;
+ var init_list = std.ArrayList(*Node).init(p.gpa);
+ defer init_list.deinit();
+
+ if (try p.parseFieldInit()) |field_init| {
+ try init_list.append(field_init);
+ while (p.eatToken(.Comma)) |_| {
+ const next = (try p.parseFieldInit()) orelse break;
+ try init_list.append(next);
+ }
+ const node = try Node.StructInitializerDot.alloc(&p.arena.allocator, init_list.items.len);
+ node.* = .{
+ .dot = dot,
+ .rtoken = try p.expectToken(.RBrace),
+ .list_len = init_list.items.len,
+ };
+ std.mem.copy(*Node, node.list(), init_list.items);
+ return &node.base;
+ }
+
+ if (try p.parseExpr()) |expr| {
+ try init_list.append(expr);
+ while (p.eatToken(.Comma)) |_| {
+ const next = (try p.parseExpr()) orelse break;
+ try init_list.append(next);
+ }
+ const node = try Node.ArrayInitializerDot.alloc(&p.arena.allocator, init_list.items.len);
+ node.* = .{
+ .dot = dot,
+ .rtoken = try p.expectToken(.RBrace),
+ .list_len = init_list.items.len,
+ };
+ std.mem.copy(*Node, node.list(), init_list.items);
+ return &node.base;
+ }
+
+ const node = try p.arena.allocator.create(Node.StructInitializerDot);
+ node.* = .{
+ .dot = dot,
+ .rtoken = try p.expectToken(.RBrace),
+ .list_len = 0,
+ };
+ return &node.base;
+ }
+
+ /// TypeExpr <- PrefixTypeOp* ErrorUnionExpr
+ fn parseTypeExpr(p: *Parser) Error!?*Node {
+ return p.parsePrefixOpExpr(parsePrefixTypeOp, parseErrorUnionExpr);
+ }
+
+ /// ErrorUnionExpr <- SuffixExpr (EXCLAMATIONMARK TypeExpr)?
+ fn parseErrorUnionExpr(p: *Parser) !?*Node {
+ const suffix_expr = (try p.parseSuffixExpr()) orelse return null;
+
+ if (try SimpleBinOpParseFn(.Bang, Node.InfixOp.Op.ErrorUnion)(p)) |node| {
+ const error_union = node.cast(Node.InfixOp).?;
+ const type_expr = try p.expectNode(parseTypeExpr, .{
+ .ExpectedTypeExpr = .{ .token = p.tok_i },
+ });
+ error_union.lhs = suffix_expr;
+ error_union.rhs = type_expr;
+ return node;
+ }
+
+ return suffix_expr;
+ }
+
+ /// SuffixExpr
+ /// <- KEYWORD_async PrimaryTypeExpr SuffixOp* FnCallArguments
+ /// / PrimaryTypeExpr (SuffixOp / FnCallArguments)*
+ fn parseSuffixExpr(p: *Parser) !?*Node {
+ const maybe_async = p.eatToken(.Keyword_async);
+ if (maybe_async) |async_token| {
+ const token_fn = p.eatToken(.Keyword_fn);
+ if (token_fn != null) {
+ // TODO: remove this hack when async fn rewriting is
+ // HACK: If we see the keyword `fn`, then we assume that
+ // we are parsing an async fn proto, and not a call.
+ // We therefore put back all tokens consumed by the async
+ // prefix...
+ p.putBackToken(token_fn.?);
+ p.putBackToken(async_token);
+ return p.parsePrimaryTypeExpr();
+ }
+ var res = try p.expectNode(parsePrimaryTypeExpr, .{
+ .ExpectedPrimaryTypeExpr = .{ .token = p.tok_i },
+ });
+
+ while (try p.parseSuffixOp()) |node| {
switch (node.id) {
- .SuffixOp => node.cast(Node.SuffixOp).?.lhs = .{ .node = res },
+ .SuffixOp => node.cast(Node.SuffixOp).?.lhs = res,
.InfixOp => node.cast(Node.InfixOp).?.lhs = res,
else => unreachable,
}
res = node;
- continue;
}
- if (try parseFnCallArguments(arena, it, tree)) |params| {
- const call = try arena.create(Node.SuffixOp);
- call.* = .{
- .lhs = .{ .node = res },
- .op = .{
- .Call = .{
- .params = params.list,
- .async_token = null,
- },
- },
- .rtoken = params.rparen,
- };
- res = &call.base;
- continue;
- }
- break;
+
+ const params = (try p.parseFnCallArguments()) orelse {
+ try p.errors.append(p.gpa, .{
+ .ExpectedParamList = .{ .token = p.tok_i },
+ });
+ // ignore this, continue parsing
+ return res;
+ };
+ defer p.gpa.free(params.list);
+ const node = try Node.Call.alloc(&p.arena.allocator, params.list.len);
+ node.* = .{
+ .lhs = res,
+ .params_len = params.list.len,
+ .async_token = async_token,
+ .rtoken = params.rparen,
+ };
+ std.mem.copy(*Node, node.params(), params.list);
+ return &node.base;
}
- return res;
+ if (try p.parsePrimaryTypeExpr()) |expr| {
+ var res = expr;
+
+ while (true) {
+ if (try p.parseSuffixOp()) |node| {
+ switch (node.id) {
+ .SuffixOp => node.cast(Node.SuffixOp).?.lhs = res,
+ .InfixOp => node.cast(Node.InfixOp).?.lhs = res,
+ else => unreachable,
+ }
+ res = node;
+ continue;
+ }
+ if (try p.parseFnCallArguments()) |params| {
+ defer p.gpa.free(params.list);
+ const call = try Node.Call.alloc(&p.arena.allocator, params.list.len);
+ call.* = .{
+ .lhs = res,
+ .params_len = params.list.len,
+ .async_token = null,
+ .rtoken = params.rparen,
+ };
+ std.mem.copy(*Node, call.params(), params.list);
+ res = &call.base;
+ continue;
+ }
+ break;
+ }
+ return res;
+ }
+
+ return null;
}
- return null;
-}
+ /// PrimaryTypeExpr
+ /// <- BUILTINIDENTIFIER FnCallArguments
+ /// / CHAR_LITERAL
+ /// / ContainerDecl
+ /// / DOT IDENTIFIER
+ /// / ErrorSetDecl
+ /// / FLOAT
+ /// / FnProto
+ /// / GroupedExpr
+ /// / LabeledTypeExpr
+ /// / IDENTIFIER
+ /// / IfTypeExpr
+ /// / INTEGER
+ /// / KEYWORD_comptime TypeExpr
+ /// / KEYWORD_error DOT IDENTIFIER
+ /// / KEYWORD_false
+ /// / KEYWORD_null
+ /// / KEYWORD_anyframe
+ /// / KEYWORD_true
+ /// / KEYWORD_undefined
+ /// / KEYWORD_unreachable
+ /// / STRINGLITERAL
+ /// / SwitchExpr
+ fn parsePrimaryTypeExpr(p: *Parser) !?*Node {
+ if (try p.parseBuiltinCall()) |node| return node;
+ if (p.eatToken(.CharLiteral)) |token| {
+ const node = try p.arena.allocator.create(Node.CharLiteral);
+ node.* = .{
+ .token = token,
+ };
+ return &node.base;
+ }
+ if (try p.parseContainerDecl()) |node| return node;
+ if (try p.parseAnonLiteral()) |node| return node;
+ if (try p.parseErrorSetDecl()) |node| return node;
+ if (try p.parseFloatLiteral()) |node| return node;
+ if (try p.parseFnProto()) |node| return node;
+ if (try p.parseGroupedExpr()) |node| return node;
+ if (try p.parseLabeledTypeExpr()) |node| return node;
+ if (try p.parseIdentifier()) |node| return node;
+ if (try p.parseIfTypeExpr()) |node| return node;
+ if (try p.parseIntegerLiteral()) |node| return node;
+ if (p.eatToken(.Keyword_comptime)) |token| {
+ const expr = (try p.parseTypeExpr()) orelse return null;
+ const node = try p.arena.allocator.create(Node.Comptime);
+ node.* = .{
+ .doc_comments = null,
+ .comptime_token = token,
+ .expr = expr,
+ };
+ return &node.base;
+ }
+ if (p.eatToken(.Keyword_error)) |token| {
+ const period = try p.expectTokenRecoverable(.Period);
+ const identifier = try p.expectNodeRecoverable(parseIdentifier, .{
+ .ExpectedIdentifier = .{ .token = p.tok_i },
+ });
+ const global_error_set = try p.createLiteral(Node.ErrorType, token);
+ if (period == null or identifier == null) return global_error_set;
-/// PrimaryTypeExpr
-/// <- BUILTINIDENTIFIER FnCallArguments
-/// / CHAR_LITERAL
-/// / ContainerDecl
-/// / DOT IDENTIFIER
-/// / ErrorSetDecl
-/// / FLOAT
-/// / FnProto
-/// / GroupedExpr
-/// / LabeledTypeExpr
-/// / IDENTIFIER
-/// / IfTypeExpr
-/// / INTEGER
-/// / KEYWORD_comptime TypeExpr
-/// / KEYWORD_error DOT IDENTIFIER
-/// / KEYWORD_false
-/// / KEYWORD_null
-/// / KEYWORD_anyframe
-/// / KEYWORD_true
-/// / KEYWORD_undefined
-/// / KEYWORD_unreachable
-/// / STRINGLITERAL
-/// / SwitchExpr
-fn parsePrimaryTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- if (try parseBuiltinCall(arena, it, tree)) |node| return node;
- if (eatToken(it, .CharLiteral)) |token| {
- const node = try arena.create(Node.CharLiteral);
- node.* = .{
- .token = token,
+ const node = try p.arena.allocator.create(Node.InfixOp);
+ node.* = .{
+ .op_token = period.?,
+ .lhs = global_error_set,
+ .op = .Period,
+ .rhs = identifier.?,
+ };
+ return &node.base;
+ }
+ if (p.eatToken(.Keyword_false)) |token| return p.createLiteral(Node.BoolLiteral, token);
+ if (p.eatToken(.Keyword_null)) |token| return p.createLiteral(Node.NullLiteral, token);
+ if (p.eatToken(.Keyword_anyframe)) |token| {
+ const node = try p.arena.allocator.create(Node.AnyFrameType);
+ node.* = .{
+ .anyframe_token = token,
+ .result = null,
+ };
+ return &node.base;
+ }
+ if (p.eatToken(.Keyword_true)) |token| return p.createLiteral(Node.BoolLiteral, token);
+ if (p.eatToken(.Keyword_undefined)) |token| return p.createLiteral(Node.UndefinedLiteral, token);
+ if (p.eatToken(.Keyword_unreachable)) |token| return p.createLiteral(Node.Unreachable, token);
+ if (try p.parseStringLiteral()) |node| return node;
+ if (try p.parseSwitchExpr()) |node| return node;
+
+ return null;
+ }
+
+ /// ContainerDecl <- (KEYWORD_extern / KEYWORD_packed)? ContainerDeclAuto
+ fn parseContainerDecl(p: *Parser) !?*Node {
+ const layout_token = p.eatToken(.Keyword_extern) orelse
+ p.eatToken(.Keyword_packed);
+
+ const node = (try p.parseContainerDeclAuto()) orelse {
+ if (layout_token) |token|
+ p.putBackToken(token);
+ return null;
};
+ node.cast(Node.ContainerDecl).?.*.layout_token = layout_token;
+ return node;
+ }
+
+ /// ErrorSetDecl <- KEYWORD_error LBRACE IdentifierList RBRACE
+ fn parseErrorSetDecl(p: *Parser) !?*Node {
+ const error_token = p.eatToken(.Keyword_error) orelse return null;
+ if (p.eatToken(.LBrace) == null) {
+ // Might parse as `KEYWORD_error DOT IDENTIFIER` later in PrimaryTypeExpr, so don't error
+ p.putBackToken(error_token);
+ return null;
+ }
+ const decls = try p.parseErrorTagList();
+ defer p.gpa.free(decls);
+ const rbrace = try p.expectToken(.RBrace);
+
+ const node = try Node.ErrorSetDecl.alloc(&p.arena.allocator, decls.len);
+ node.* = .{
+ .error_token = error_token,
+ .decls_len = decls.len,
+ .rbrace_token = rbrace,
+ };
+ std.mem.copy(*Node, node.decls(), decls);
return &node.base;
}
- if (try parseContainerDecl(arena, it, tree)) |node| return node;
- if (try parseAnonLiteral(arena, it, tree)) |node| return node;
- if (try parseErrorSetDecl(arena, it, tree)) |node| return node;
- if (try parseFloatLiteral(arena, it, tree)) |node| return node;
- if (try parseFnProto(arena, it, tree)) |node| return node;
- if (try parseGroupedExpr(arena, it, tree)) |node| return node;
- if (try parseLabeledTypeExpr(arena, it, tree)) |node| return node;
- if (try parseIdentifier(arena, it, tree)) |node| return node;
- if (try parseIfTypeExpr(arena, it, tree)) |node| return node;
- if (try parseIntegerLiteral(arena, it, tree)) |node| return node;
- if (eatToken(it, .Keyword_comptime)) |token| {
- const expr = (try parseTypeExpr(arena, it, tree)) orelse return null;
- const node = try arena.create(Node.Comptime);
- node.* = .{
- .doc_comments = null,
- .comptime_token = token,
- .expr = expr,
- };
- return &node.base;
- }
- if (eatToken(it, .Keyword_error)) |token| {
- const period = try expectTokenRecoverable(it, tree, .Period);
- const identifier = try expectNodeRecoverable(arena, it, tree, parseIdentifier, .{
- .ExpectedIdentifier = .{ .token = it.index },
+
+ /// GroupedExpr <- LPAREN Expr RPAREN
+ fn parseGroupedExpr(p: *Parser) !?*Node {
+ const lparen = p.eatToken(.LParen) orelse return null;
+ const expr = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
});
- const global_error_set = try createLiteral(arena, Node.ErrorType, token);
- if (period == null or identifier == null) return global_error_set;
+ const rparen = try p.expectToken(.RParen);
- const node = try arena.create(Node.InfixOp);
+ const node = try p.arena.allocator.create(Node.GroupedExpression);
node.* = .{
- .op_token = period.?,
- .lhs = global_error_set,
- .op = .Period,
- .rhs = identifier.?,
+ .lparen = lparen,
+ .expr = expr,
+ .rparen = rparen,
};
return &node.base;
}
- if (eatToken(it, .Keyword_false)) |token| return createLiteral(arena, Node.BoolLiteral, token);
- if (eatToken(it, .Keyword_null)) |token| return createLiteral(arena, Node.NullLiteral, token);
- if (eatToken(it, .Keyword_anyframe)) |token| {
- const node = try arena.create(Node.AnyFrameType);
- node.* = .{
- .anyframe_token = token,
- .result = null,
- };
- return &node.base;
+
+ /// IfTypeExpr <- IfPrefix TypeExpr (KEYWORD_else Payload? TypeExpr)?
+ fn parseIfTypeExpr(p: *Parser) !?*Node {
+ return p.parseIf(parseTypeExpr);
}
- if (eatToken(it, .Keyword_true)) |token| return createLiteral(arena, Node.BoolLiteral, token);
- if (eatToken(it, .Keyword_undefined)) |token| return createLiteral(arena, Node.UndefinedLiteral, token);
- if (eatToken(it, .Keyword_unreachable)) |token| return createLiteral(arena, Node.Unreachable, token);
- if (try parseStringLiteral(arena, it, tree)) |node| return node;
- if (try parseSwitchExpr(arena, it, tree)) |node| return node;
- return null;
-}
+ /// LabeledTypeExpr
+ /// <- BlockLabel Block
+ /// / BlockLabel? LoopTypeExpr
+ fn parseLabeledTypeExpr(p: *Parser) !?*Node {
+ var colon: TokenIndex = undefined;
+ const label = p.parseBlockLabel(&colon);
-/// ContainerDecl <- (KEYWORD_extern / KEYWORD_packed)? ContainerDeclAuto
-fn parseContainerDecl(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const layout_token = eatToken(it, .Keyword_extern) orelse
- eatToken(it, .Keyword_packed);
+ if (label) |token| {
+ if (try p.parseBlock()) |node| {
+ node.cast(Node.Block).?.label = token;
+ return node;
+ }
+ }
- const node = (try parseContainerDeclAuto(arena, it, tree)) orelse {
- if (layout_token) |token|
- putBackToken(it, token);
- return null;
- };
- node.cast(Node.ContainerDecl).?.*.layout_token = layout_token;
- return node;
-}
-
-/// ErrorSetDecl <- KEYWORD_error LBRACE IdentifierList RBRACE
-fn parseErrorSetDecl(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const error_token = eatToken(it, .Keyword_error) orelse return null;
- if (eatToken(it, .LBrace) == null) {
- // Might parse as `KEYWORD_error DOT IDENTIFIER` later in PrimaryTypeExpr, so don't error
- putBackToken(it, error_token);
- return null;
- }
- const decls = try parseErrorTagList(arena, it, tree);
- const rbrace = try expectToken(it, tree, .RBrace);
-
- const node = try arena.create(Node.ErrorSetDecl);
- node.* = .{
- .error_token = error_token,
- .decls = decls,
- .rbrace_token = rbrace,
- };
- return &node.base;
-}
-
-/// GroupedExpr <- LPAREN Expr RPAREN
-fn parseGroupedExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const lparen = eatToken(it, .LParen) orelse return null;
- const expr = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
- const rparen = try expectToken(it, tree, .RParen);
-
- const node = try arena.create(Node.GroupedExpression);
- node.* = .{
- .lparen = lparen,
- .expr = expr,
- .rparen = rparen,
- };
- return &node.base;
-}
-
-/// IfTypeExpr <- IfPrefix TypeExpr (KEYWORD_else Payload? TypeExpr)?
-fn parseIfTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- return parseIf(arena, it, tree, parseTypeExpr);
-}
-
-/// LabeledTypeExpr
-/// <- BlockLabel Block
-/// / BlockLabel? LoopTypeExpr
-fn parseLabeledTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- var colon: TokenIndex = undefined;
- const label = parseBlockLabel(arena, it, tree, &colon);
-
- if (label) |token| {
- if (try parseBlock(arena, it, tree)) |node| {
- node.cast(Node.Block).?.label = token;
+ if (try p.parseLoopTypeExpr()) |node| {
+ switch (node.id) {
+ .For => node.cast(Node.For).?.label = label,
+ .While => node.cast(Node.While).?.label = label,
+ else => unreachable,
+ }
return node;
}
- }
- if (try parseLoopTypeExpr(arena, it, tree)) |node| {
- switch (node.id) {
- .For => node.cast(Node.For).?.label = label,
- .While => node.cast(Node.While).?.label = label,
- else => unreachable,
+ if (label) |token| {
+ p.putBackToken(colon);
+ p.putBackToken(token);
}
- return node;
+ return null;
}
- if (label) |token| {
- putBackToken(it, colon);
- putBackToken(it, token);
- }
- return null;
-}
+ /// LoopTypeExpr <- KEYWORD_inline? (ForTypeExpr / WhileTypeExpr)
+ fn parseLoopTypeExpr(p: *Parser) !?*Node {
+ const inline_token = p.eatToken(.Keyword_inline);
-/// LoopTypeExpr <- KEYWORD_inline? (ForTypeExpr / WhileTypeExpr)
-fn parseLoopTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const inline_token = eatToken(it, .Keyword_inline);
-
- if (try parseForTypeExpr(arena, it, tree)) |node| {
- node.cast(Node.For).?.inline_token = inline_token;
- return node;
- }
-
- if (try parseWhileTypeExpr(arena, it, tree)) |node| {
- node.cast(Node.While).?.inline_token = inline_token;
- return node;
- }
-
- if (inline_token == null) return null;
-
- // If we've seen "inline", there should have been a "for" or "while"
- try tree.errors.push(.{
- .ExpectedInlinable = .{ .token = it.index },
- });
- return error.ParseError;
-}
-
-/// ForTypeExpr <- ForPrefix TypeExpr (KEYWORD_else TypeExpr)?
-fn parseForTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const node = (try parseForPrefix(arena, it, tree)) orelse return null;
- const for_prefix = node.cast(Node.For).?;
-
- const type_expr = try expectNode(arena, it, tree, parseTypeExpr, .{
- .ExpectedTypeExpr = .{ .token = it.index },
- });
- for_prefix.body = type_expr;
-
- if (eatToken(it, .Keyword_else)) |else_token| {
- const else_expr = try expectNode(arena, it, tree, parseTypeExpr, .{
- .ExpectedTypeExpr = .{ .token = it.index },
- });
-
- const else_node = try arena.create(Node.Else);
- else_node.* = .{
- .else_token = else_token,
- .payload = null,
- .body = else_expr,
- };
-
- for_prefix.@"else" = else_node;
- }
-
- return node;
-}
-
-/// WhileTypeExpr <- WhilePrefix TypeExpr (KEYWORD_else Payload? TypeExpr)?
-fn parseWhileTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const node = (try parseWhilePrefix(arena, it, tree)) orelse return null;
- const while_prefix = node.cast(Node.While).?;
-
- const type_expr = try expectNode(arena, it, tree, parseTypeExpr, .{
- .ExpectedTypeExpr = .{ .token = it.index },
- });
- while_prefix.body = type_expr;
-
- if (eatToken(it, .Keyword_else)) |else_token| {
- const payload = try parsePayload(arena, it, tree);
-
- const else_expr = try expectNode(arena, it, tree, parseTypeExpr, .{
- .ExpectedTypeExpr = .{ .token = it.index },
- });
-
- const else_node = try arena.create(Node.Else);
- else_node.* = .{
- .else_token = else_token,
- .payload = null,
- .body = else_expr,
- };
-
- while_prefix.@"else" = else_node;
- }
-
- return node;
-}
-
-/// SwitchExpr <- KEYWORD_switch LPAREN Expr RPAREN LBRACE SwitchProngList RBRACE
-fn parseSwitchExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const switch_token = eatToken(it, .Keyword_switch) orelse return null;
- _ = try expectToken(it, tree, .LParen);
- const expr_node = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
- _ = try expectToken(it, tree, .RParen);
- _ = try expectToken(it, tree, .LBrace);
- const cases = try parseSwitchProngList(arena, it, tree);
- const rbrace = try expectToken(it, tree, .RBrace);
-
- const node = try arena.create(Node.Switch);
- node.* = .{
- .switch_token = switch_token,
- .expr = expr_node,
- .cases = cases,
- .rbrace = rbrace,
- };
- return &node.base;
-}
-
-/// AsmExpr <- KEYWORD_asm KEYWORD_volatile? LPAREN Expr AsmOutput? RPAREN
-fn parseAsmExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const asm_token = eatToken(it, .Keyword_asm) orelse return null;
- const volatile_token = eatToken(it, .Keyword_volatile);
- _ = try expectToken(it, tree, .LParen);
- const template = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
-
- const node = try arena.create(Node.Asm);
- node.* = .{
- .asm_token = asm_token,
- .volatile_token = volatile_token,
- .template = template,
- .outputs = Node.Asm.OutputList.init(arena),
- .inputs = Node.Asm.InputList.init(arena),
- .clobbers = Node.Asm.ClobberList.init(arena),
- .rparen = undefined,
- };
-
- try parseAsmOutput(arena, it, tree, node);
- node.rparen = try expectToken(it, tree, .RParen);
- return &node.base;
-}
-
-/// DOT IDENTIFIER
-fn parseAnonLiteral(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const dot = eatToken(it, .Period) orelse return null;
-
- // anon enum literal
- if (eatToken(it, .Identifier)) |name| {
- const node = try arena.create(Node.EnumLiteral);
- node.* = .{
- .dot = dot,
- .name = name,
- };
- return &node.base;
- }
-
- // anon container literal
- if (try parseInitList(arena, it, tree)) |node| {
- node.lhs = .{ .dot = dot };
- return &node.base;
- }
-
- putBackToken(it, dot);
- return null;
-}
-
-/// AsmOutput <- COLON AsmOutputList AsmInput?
-fn parseAsmOutput(arena: *Allocator, it: *TokenIterator, tree: *Tree, asm_node: *Node.Asm) !void {
- if (eatToken(it, .Colon) == null) return;
- asm_node.outputs = try parseAsmOutputList(arena, it, tree);
- try parseAsmInput(arena, it, tree, asm_node);
-}
-
-/// AsmOutputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN (MINUSRARROW TypeExpr / IDENTIFIER) RPAREN
-fn parseAsmOutputItem(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node.AsmOutput {
- const lbracket = eatToken(it, .LBracket) orelse return null;
- const name = try expectNode(arena, it, tree, parseIdentifier, .{
- .ExpectedIdentifier = .{ .token = it.index },
- });
- _ = try expectToken(it, tree, .RBracket);
-
- const constraint = try expectNode(arena, it, tree, parseStringLiteral, .{
- .ExpectedStringLiteral = .{ .token = it.index },
- });
-
- _ = try expectToken(it, tree, .LParen);
- const kind: Node.AsmOutput.Kind = blk: {
- if (eatToken(it, .Arrow) != null) {
- const return_ident = try expectNode(arena, it, tree, parseTypeExpr, .{
- .ExpectedTypeExpr = .{ .token = it.index },
- });
- break :blk .{ .Return = return_ident };
+ if (try p.parseForTypeExpr()) |node| {
+ node.cast(Node.For).?.inline_token = inline_token;
+ return node;
}
- const variable = try expectNode(arena, it, tree, parseIdentifier, .{
- .ExpectedIdentifier = .{ .token = it.index },
- });
- break :blk .{ .Variable = variable.cast(Node.Identifier).? };
- };
- const rparen = try expectToken(it, tree, .RParen);
- const node = try arena.create(Node.AsmOutput);
- node.* = .{
- .lbracket = lbracket,
- .symbolic_name = name,
- .constraint = constraint,
- .kind = kind,
- .rparen = rparen,
- };
- return node;
-}
+ if (try p.parseWhileTypeExpr()) |node| {
+ node.cast(Node.While).?.inline_token = inline_token;
+ return node;
+ }
-/// AsmInput <- COLON AsmInputList AsmClobbers?
-fn parseAsmInput(arena: *Allocator, it: *TokenIterator, tree: *Tree, asm_node: *Node.Asm) !void {
- if (eatToken(it, .Colon) == null) return;
- asm_node.inputs = try parseAsmInputList(arena, it, tree);
- try parseAsmClobbers(arena, it, tree, asm_node);
-}
+ if (inline_token == null) return null;
-/// AsmInputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN Expr RPAREN
-fn parseAsmInputItem(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node.AsmInput {
- const lbracket = eatToken(it, .LBracket) orelse return null;
- const name = try expectNode(arena, it, tree, parseIdentifier, .{
- .ExpectedIdentifier = .{ .token = it.index },
- });
- _ = try expectToken(it, tree, .RBracket);
-
- const constraint = try expectNode(arena, it, tree, parseStringLiteral, .{
- .ExpectedStringLiteral = .{ .token = it.index },
- });
-
- _ = try expectToken(it, tree, .LParen);
- const expr = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
- const rparen = try expectToken(it, tree, .RParen);
-
- const node = try arena.create(Node.AsmInput);
- node.* = .{
- .lbracket = lbracket,
- .symbolic_name = name,
- .constraint = constraint,
- .expr = expr,
- .rparen = rparen,
- };
- return node;
-}
-
-/// AsmClobbers <- COLON StringList
-/// StringList <- (STRINGLITERAL COMMA)* STRINGLITERAL?
-fn parseAsmClobbers(arena: *Allocator, it: *TokenIterator, tree: *Tree, asm_node: *Node.Asm) !void {
- if (eatToken(it, .Colon) == null) return;
- asm_node.clobbers = try ListParseFn(
- Node.Asm.ClobberList,
- parseStringLiteral,
- )(arena, it, tree);
-}
-
-/// BreakLabel <- COLON IDENTIFIER
-fn parseBreakLabel(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- _ = eatToken(it, .Colon) orelse return null;
- return try expectNode(arena, it, tree, parseIdentifier, .{
- .ExpectedIdentifier = .{ .token = it.index },
- });
-}
-
-/// BlockLabel <- IDENTIFIER COLON
-fn parseBlockLabel(arena: *Allocator, it: *TokenIterator, tree: *Tree, colon_token: *TokenIndex) ?TokenIndex {
- const identifier = eatToken(it, .Identifier) orelse return null;
- if (eatToken(it, .Colon)) |colon| {
- colon_token.* = colon;
- return identifier;
- }
- putBackToken(it, identifier);
- return null;
-}
-
-/// FieldInit <- DOT IDENTIFIER EQUAL Expr
-fn parseFieldInit(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const period_token = eatToken(it, .Period) orelse return null;
- const name_token = eatToken(it, .Identifier) orelse {
- // Because of anon literals `.{` is also valid.
- putBackToken(it, period_token);
- return null;
- };
- const eq_token = eatToken(it, .Equal) orelse {
- // `.Name` may also be an enum literal, which is a later rule.
- putBackToken(it, name_token);
- putBackToken(it, period_token);
- return null;
- };
- const expr_node = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
-
- const node = try arena.create(Node.FieldInitializer);
- node.* = .{
- .period_token = period_token,
- .name_token = name_token,
- .expr = expr_node,
- };
- return &node.base;
-}
-
-/// WhileContinueExpr <- COLON LPAREN AssignExpr RPAREN
-fn parseWhileContinueExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- _ = eatToken(it, .Colon) orelse return null;
- _ = try expectToken(it, tree, .LParen);
- const node = try expectNode(arena, it, tree, parseAssignExpr, .{
- .ExpectedExprOrAssignment = .{ .token = it.index },
- });
- _ = try expectToken(it, tree, .RParen);
- return node;
-}
-
-/// LinkSection <- KEYWORD_linksection LPAREN Expr RPAREN
-fn parseLinkSection(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- _ = eatToken(it, .Keyword_linksection) orelse return null;
- _ = try expectToken(it, tree, .LParen);
- const expr_node = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
- _ = try expectToken(it, tree, .RParen);
- return expr_node;
-}
-
-/// CallConv <- KEYWORD_callconv LPAREN Expr RPAREN
-fn parseCallconv(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- _ = eatToken(it, .Keyword_callconv) orelse return null;
- _ = try expectToken(it, tree, .LParen);
- const expr_node = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
- _ = try expectToken(it, tree, .RParen);
- return expr_node;
-}
-
-/// ParamDecl <- (KEYWORD_noalias / KEYWORD_comptime)? (IDENTIFIER COLON)? ParamType
-fn parseParamDecl(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const doc_comments = try parseDocComment(arena, it, tree);
- const noalias_token = eatToken(it, .Keyword_noalias);
- const comptime_token = if (noalias_token == null) eatToken(it, .Keyword_comptime) else null;
- const name_token = blk: {
- const identifier = eatToken(it, .Identifier) orelse break :blk null;
- if (eatToken(it, .Colon) != null) break :blk identifier;
- putBackToken(it, identifier); // ParamType may also be an identifier
- break :blk null;
- };
- const param_type = (try parseParamType(arena, it, tree)) orelse {
- // Only return cleanly if no keyword, identifier, or doc comment was found
- if (noalias_token == null and
- comptime_token == null and
- name_token == null and
- doc_comments == null) return null;
- try tree.errors.push(.{
- .ExpectedParamType = .{ .token = it.index },
+ // If we've seen "inline", there should have been a "for" or "while"
+ try p.errors.append(p.gpa, .{
+ .ExpectedInlinable = .{ .token = p.tok_i },
});
return error.ParseError;
- };
+ }
- const param_decl = try arena.create(Node.ParamDecl);
- param_decl.* = .{
- .doc_comments = doc_comments,
- .comptime_token = comptime_token,
- .noalias_token = noalias_token,
- .name_token = name_token,
- .param_type = param_type,
- };
- return ¶m_decl.base;
-}
+ /// ForTypeExpr <- ForPrefix TypeExpr (KEYWORD_else TypeExpr)?
+ fn parseForTypeExpr(p: *Parser) !?*Node {
+ const node = (try p.parseForPrefix()) orelse return null;
+ const for_prefix = node.cast(Node.For).?;
-/// ParamType
-/// <- KEYWORD_var
-/// / DOT3
-/// / TypeExpr
-fn parseParamType(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?Node.ParamDecl.ParamType {
- // TODO cast from tuple to error union is broken
- const P = Node.ParamDecl.ParamType;
- if (try parseVarType(arena, it, tree)) |node| return P{ .var_type = node };
- if (eatToken(it, .Ellipsis3)) |token| return P{ .var_args = token };
- if (try parseTypeExpr(arena, it, tree)) |node| return P{ .type_expr = node };
- return null;
-}
-
-/// IfPrefix <- KEYWORD_if LPAREN Expr RPAREN PtrPayload?
-fn parseIfPrefix(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const if_token = eatToken(it, .Keyword_if) orelse return null;
- _ = try expectToken(it, tree, .LParen);
- const condition = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
- _ = try expectToken(it, tree, .RParen);
- const payload = try parsePtrPayload(arena, it, tree);
-
- const node = try arena.create(Node.If);
- node.* = .{
- .if_token = if_token,
- .condition = condition,
- .payload = payload,
- .body = undefined, // set by caller
- .@"else" = null,
- };
- return &node.base;
-}
-
-/// WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr?
-fn parseWhilePrefix(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const while_token = eatToken(it, .Keyword_while) orelse return null;
-
- _ = try expectToken(it, tree, .LParen);
- const condition = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
- _ = try expectToken(it, tree, .RParen);
-
- const payload = try parsePtrPayload(arena, it, tree);
- const continue_expr = try parseWhileContinueExpr(arena, it, tree);
-
- const node = try arena.create(Node.While);
- node.* = .{
- .label = null,
- .inline_token = null,
- .while_token = while_token,
- .condition = condition,
- .payload = payload,
- .continue_expr = continue_expr,
- .body = undefined, // set by caller
- .@"else" = null,
- };
- return &node.base;
-}
-
-/// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload
-fn parseForPrefix(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const for_token = eatToken(it, .Keyword_for) orelse return null;
-
- _ = try expectToken(it, tree, .LParen);
- const array_expr = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
- _ = try expectToken(it, tree, .RParen);
-
- const payload = try expectNode(arena, it, tree, parsePtrIndexPayload, .{
- .ExpectedPayload = .{ .token = it.index },
- });
-
- const node = try arena.create(Node.For);
- node.* = .{
- .label = null,
- .inline_token = null,
- .for_token = for_token,
- .array_expr = array_expr,
- .payload = payload,
- .body = undefined, // set by caller
- .@"else" = null,
- };
- return &node.base;
-}
-
-/// Payload <- PIPE IDENTIFIER PIPE
-fn parsePayload(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const lpipe = eatToken(it, .Pipe) orelse return null;
- const identifier = try expectNode(arena, it, tree, parseIdentifier, .{
- .ExpectedIdentifier = .{ .token = it.index },
- });
- const rpipe = try expectToken(it, tree, .Pipe);
-
- const node = try arena.create(Node.Payload);
- node.* = .{
- .lpipe = lpipe,
- .error_symbol = identifier,
- .rpipe = rpipe,
- };
- return &node.base;
-}
-
-/// PtrPayload <- PIPE ASTERISK? IDENTIFIER PIPE
-fn parsePtrPayload(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const lpipe = eatToken(it, .Pipe) orelse return null;
- const asterisk = eatToken(it, .Asterisk);
- const identifier = try expectNode(arena, it, tree, parseIdentifier, .{
- .ExpectedIdentifier = .{ .token = it.index },
- });
- const rpipe = try expectToken(it, tree, .Pipe);
-
- const node = try arena.create(Node.PointerPayload);
- node.* = .{
- .lpipe = lpipe,
- .ptr_token = asterisk,
- .value_symbol = identifier,
- .rpipe = rpipe,
- };
- return &node.base;
-}
-
-/// PtrIndexPayload <- PIPE ASTERISK? IDENTIFIER (COMMA IDENTIFIER)? PIPE
-fn parsePtrIndexPayload(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const lpipe = eatToken(it, .Pipe) orelse return null;
- const asterisk = eatToken(it, .Asterisk);
- const identifier = try expectNode(arena, it, tree, parseIdentifier, .{
- .ExpectedIdentifier = .{ .token = it.index },
- });
-
- const index = if (eatToken(it, .Comma) == null)
- null
- else
- try expectNode(arena, it, tree, parseIdentifier, .{
- .ExpectedIdentifier = .{ .token = it.index },
+ const type_expr = try p.expectNode(parseTypeExpr, .{
+ .ExpectedTypeExpr = .{ .token = p.tok_i },
});
+ for_prefix.body = type_expr;
- const rpipe = try expectToken(it, tree, .Pipe);
-
- const node = try arena.create(Node.PointerIndexPayload);
- node.* = .{
- .lpipe = lpipe,
- .ptr_token = asterisk,
- .value_symbol = identifier,
- .index_symbol = index,
- .rpipe = rpipe,
- };
- return &node.base;
-}
-
-/// SwitchProng <- SwitchCase EQUALRARROW PtrPayload? AssignExpr
-fn parseSwitchProng(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const node = (try parseSwitchCase(arena, it, tree)) orelse return null;
- const arrow = try expectToken(it, tree, .EqualAngleBracketRight);
- const payload = try parsePtrPayload(arena, it, tree);
- const expr = try expectNode(arena, it, tree, parseAssignExpr, .{
- .ExpectedExprOrAssignment = .{ .token = it.index },
- });
-
- const switch_case = node.cast(Node.SwitchCase).?;
- switch_case.arrow_token = arrow;
- switch_case.payload = payload;
- switch_case.expr = expr;
-
- return node;
-}
-
-/// SwitchCase
-/// <- SwitchItem (COMMA SwitchItem)* COMMA?
-/// / KEYWORD_else
-fn parseSwitchCase(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- var list = Node.SwitchCase.ItemList.init(arena);
-
- if (try parseSwitchItem(arena, it, tree)) |first_item| {
- try list.push(first_item);
- while (eatToken(it, .Comma) != null) {
- const next_item = (try parseSwitchItem(arena, it, tree)) orelse break;
- try list.push(next_item);
- }
- } else if (eatToken(it, .Keyword_else)) |else_token| {
- const else_node = try arena.create(Node.SwitchElse);
- else_node.* = .{
- .token = else_token,
- };
- try list.push(&else_node.base);
- } else return null;
-
- const node = try arena.create(Node.SwitchCase);
- node.* = .{
- .items = list,
- .arrow_token = undefined, // set by caller
- .payload = null,
- .expr = undefined, // set by caller
- };
- return &node.base;
-}
-
-/// SwitchItem <- Expr (DOT3 Expr)?
-fn parseSwitchItem(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const expr = (try parseExpr(arena, it, tree)) orelse return null;
- if (eatToken(it, .Ellipsis3)) |token| {
- const range_end = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
-
- const node = try arena.create(Node.InfixOp);
- node.* = .{
- .op_token = token,
- .lhs = expr,
- .op = .Range,
- .rhs = range_end,
- };
- return &node.base;
- }
- return expr;
-}
-
-/// AssignOp
-/// <- ASTERISKEQUAL
-/// / SLASHEQUAL
-/// / PERCENTEQUAL
-/// / PLUSEQUAL
-/// / MINUSEQUAL
-/// / LARROW2EQUAL
-/// / RARROW2EQUAL
-/// / AMPERSANDEQUAL
-/// / CARETEQUAL
-/// / PIPEEQUAL
-/// / ASTERISKPERCENTEQUAL
-/// / PLUSPERCENTEQUAL
-/// / MINUSPERCENTEQUAL
-/// / EQUAL
-fn parseAssignOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const token = nextToken(it);
- const op: Node.InfixOp.Op = switch (token.ptr.id) {
- .AsteriskEqual => .AssignMul,
- .SlashEqual => .AssignDiv,
- .PercentEqual => .AssignMod,
- .PlusEqual => .AssignAdd,
- .MinusEqual => .AssignSub,
- .AngleBracketAngleBracketLeftEqual => .AssignBitShiftLeft,
- .AngleBracketAngleBracketRightEqual => .AssignBitShiftRight,
- .AmpersandEqual => .AssignBitAnd,
- .CaretEqual => .AssignBitXor,
- .PipeEqual => .AssignBitOr,
- .AsteriskPercentEqual => .AssignMulWrap,
- .PlusPercentEqual => .AssignAddWrap,
- .MinusPercentEqual => .AssignSubWrap,
- .Equal => .Assign,
- else => {
- putBackToken(it, token.index);
- return null;
- },
- };
-
- const node = try arena.create(Node.InfixOp);
- node.* = .{
- .op_token = token.index,
- .lhs = undefined, // set by caller
- .op = op,
- .rhs = undefined, // set by caller
- };
- return &node.base;
-}
-
-/// CompareOp
-/// <- EQUALEQUAL
-/// / EXCLAMATIONMARKEQUAL
-/// / LARROW
-/// / RARROW
-/// / LARROWEQUAL
-/// / RARROWEQUAL
-fn parseCompareOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const token = nextToken(it);
- const op: Node.InfixOp.Op = switch (token.ptr.id) {
- .EqualEqual => .EqualEqual,
- .BangEqual => .BangEqual,
- .AngleBracketLeft => .LessThan,
- .AngleBracketRight => .GreaterThan,
- .AngleBracketLeftEqual => .LessOrEqual,
- .AngleBracketRightEqual => .GreaterOrEqual,
- else => {
- putBackToken(it, token.index);
- return null;
- },
- };
-
- return try createInfixOp(arena, token.index, op);
-}
-
-/// BitwiseOp
-/// <- AMPERSAND
-/// / CARET
-/// / PIPE
-/// / KEYWORD_orelse
-/// / KEYWORD_catch Payload?
-fn parseBitwiseOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const token = nextToken(it);
- const op: Node.InfixOp.Op = switch (token.ptr.id) {
- .Ampersand => .BitAnd,
- .Caret => .BitXor,
- .Pipe => .BitOr,
- .Keyword_orelse => .UnwrapOptional,
- .Keyword_catch => .{ .Catch = try parsePayload(arena, it, tree) },
- else => {
- putBackToken(it, token.index);
- return null;
- },
- };
-
- return try createInfixOp(arena, token.index, op);
-}
-
-/// BitShiftOp
-/// <- LARROW2
-/// / RARROW2
-fn parseBitShiftOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const token = nextToken(it);
- const op: Node.InfixOp.Op = switch (token.ptr.id) {
- .AngleBracketAngleBracketLeft => .BitShiftLeft,
- .AngleBracketAngleBracketRight => .BitShiftRight,
- else => {
- putBackToken(it, token.index);
- return null;
- },
- };
-
- return try createInfixOp(arena, token.index, op);
-}
-
-/// AdditionOp
-/// <- PLUS
-/// / MINUS
-/// / PLUS2
-/// / PLUSPERCENT
-/// / MINUSPERCENT
-fn parseAdditionOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const token = nextToken(it);
- const op: Node.InfixOp.Op = switch (token.ptr.id) {
- .Plus => .Add,
- .Minus => .Sub,
- .PlusPlus => .ArrayCat,
- .PlusPercent => .AddWrap,
- .MinusPercent => .SubWrap,
- else => {
- putBackToken(it, token.index);
- return null;
- },
- };
-
- return try createInfixOp(arena, token.index, op);
-}
-
-/// MultiplyOp
-/// <- PIPE2
-/// / ASTERISK
-/// / SLASH
-/// / PERCENT
-/// / ASTERISK2
-/// / ASTERISKPERCENT
-fn parseMultiplyOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const token = nextToken(it);
- const op: Node.InfixOp.Op = switch (token.ptr.id) {
- .PipePipe => .MergeErrorSets,
- .Asterisk => .Mul,
- .Slash => .Div,
- .Percent => .Mod,
- .AsteriskAsterisk => .ArrayMult,
- .AsteriskPercent => .MulWrap,
- else => {
- putBackToken(it, token.index);
- return null;
- },
- };
-
- return try createInfixOp(arena, token.index, op);
-}
-
-/// PrefixOp
-/// <- EXCLAMATIONMARK
-/// / MINUS
-/// / TILDE
-/// / MINUSPERCENT
-/// / AMPERSAND
-/// / KEYWORD_try
-/// / KEYWORD_await
-fn parsePrefixOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const token = nextToken(it);
- const op: Node.PrefixOp.Op = switch (token.ptr.id) {
- .Bang => .BoolNot,
- .Minus => .Negation,
- .Tilde => .BitNot,
- .MinusPercent => .NegationWrap,
- .Ampersand => .AddressOf,
- .Keyword_try => .Try,
- .Keyword_await => .Await,
- else => {
- putBackToken(it, token.index);
- return null;
- },
- };
-
- const node = try arena.create(Node.PrefixOp);
- node.* = .{
- .op_token = token.index,
- .op = op,
- .rhs = undefined, // set by caller
- };
- return &node.base;
-}
-
-// TODO: ArrayTypeStart is either an array or a slice, but const/allowzero only work on
-// pointers. Consider updating this rule:
-// ...
-// / ArrayTypeStart
-// / SliceTypeStart (ByteAlign / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)*
-// / PtrTypeStart ...
-
-/// PrefixTypeOp
-/// <- QUESTIONMARK
-/// / KEYWORD_anyframe MINUSRARROW
-/// / ArrayTypeStart (ByteAlign / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)*
-/// / PtrTypeStart (KEYWORD_align LPAREN Expr (COLON INTEGER COLON INTEGER)? RPAREN / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)*
-fn parsePrefixTypeOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- if (eatToken(it, .QuestionMark)) |token| {
- const node = try arena.create(Node.PrefixOp);
- node.* = .{
- .op_token = token,
- .op = .OptionalType,
- .rhs = undefined, // set by caller
- };
- return &node.base;
- }
-
- // TODO: Returning a AnyFrameType instead of PrefixOp makes casting and setting .rhs or
- // .return_type more difficult for the caller (see parsePrefixOpExpr helper).
- // Consider making the AnyFrameType a member of PrefixOp and add a
- // PrefixOp.AnyFrameType variant?
- if (eatToken(it, .Keyword_anyframe)) |token| {
- const arrow = eatToken(it, .Arrow) orelse {
- putBackToken(it, token);
- return null;
- };
- const node = try arena.create(Node.AnyFrameType);
- node.* = .{
- .anyframe_token = token,
- .result = .{
- .arrow_token = arrow,
- .return_type = undefined, // set by caller
- },
- };
- return &node.base;
- }
-
- if (try parsePtrTypeStart(arena, it, tree)) |node| {
- // If the token encountered was **, there will be two nodes instead of one.
- // The attributes should be applied to the rightmost operator.
- const prefix_op = node.cast(Node.PrefixOp).?;
- var ptr_info = if (tree.tokens.at(prefix_op.op_token).id == .AsteriskAsterisk)
- &prefix_op.rhs.cast(Node.PrefixOp).?.op.PtrType
- else
- &prefix_op.op.PtrType;
-
- while (true) {
- if (eatToken(it, .Keyword_align)) |align_token| {
- const lparen = try expectToken(it, tree, .LParen);
- const expr_node = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
-
- // Optional bit range
- const bit_range = if (eatToken(it, .Colon)) |_| bit_range_value: {
- const range_start = try expectNode(arena, it, tree, parseIntegerLiteral, .{
- .ExpectedIntegerLiteral = .{ .token = it.index },
- });
- _ = try expectToken(it, tree, .Colon);
- const range_end = try expectNode(arena, it, tree, parseIntegerLiteral, .{
- .ExpectedIntegerLiteral = .{ .token = it.index },
- });
-
- break :bit_range_value Node.PrefixOp.PtrInfo.Align.BitRange{
- .start = range_start,
- .end = range_end,
- };
- } else null;
- _ = try expectToken(it, tree, .RParen);
-
- if (ptr_info.align_info != null) {
- try tree.errors.push(.{
- .ExtraAlignQualifier = .{ .token = it.index - 1 },
- });
- continue;
- }
-
- ptr_info.align_info = Node.PrefixOp.PtrInfo.Align{
- .node = expr_node,
- .bit_range = bit_range,
- };
-
- continue;
- }
- if (eatToken(it, .Keyword_const)) |const_token| {
- if (ptr_info.const_token != null) {
- try tree.errors.push(.{
- .ExtraConstQualifier = .{ .token = it.index - 1 },
- });
- continue;
- }
- ptr_info.const_token = const_token;
- continue;
- }
- if (eatToken(it, .Keyword_volatile)) |volatile_token| {
- if (ptr_info.volatile_token != null) {
- try tree.errors.push(.{
- .ExtraVolatileQualifier = .{ .token = it.index - 1 },
- });
- continue;
- }
- ptr_info.volatile_token = volatile_token;
- continue;
- }
- if (eatToken(it, .Keyword_allowzero)) |allowzero_token| {
- if (ptr_info.allowzero_token != null) {
- try tree.errors.push(.{
- .ExtraAllowZeroQualifier = .{ .token = it.index - 1 },
- });
- continue;
- }
- ptr_info.allowzero_token = allowzero_token;
- continue;
- }
- break;
- }
-
- return node;
- }
-
- if (try parseArrayTypeStart(arena, it, tree)) |node| {
- switch (node.cast(Node.PrefixOp).?.op) {
- .ArrayType => {},
- .SliceType => |*slice_type| {
- // Collect pointer qualifiers in any order, but disallow duplicates
- while (true) {
- if (try parseByteAlign(arena, it, tree)) |align_expr| {
- if (slice_type.align_info != null) {
- try tree.errors.push(.{
- .ExtraAlignQualifier = .{ .token = it.index - 1 },
- });
- continue;
- }
- slice_type.align_info = Node.PrefixOp.PtrInfo.Align{
- .node = align_expr,
- .bit_range = null,
- };
- continue;
- }
- if (eatToken(it, .Keyword_const)) |const_token| {
- if (slice_type.const_token != null) {
- try tree.errors.push(.{
- .ExtraConstQualifier = .{ .token = it.index - 1 },
- });
- continue;
- }
- slice_type.const_token = const_token;
- continue;
- }
- if (eatToken(it, .Keyword_volatile)) |volatile_token| {
- if (slice_type.volatile_token != null) {
- try tree.errors.push(.{
- .ExtraVolatileQualifier = .{ .token = it.index - 1 },
- });
- continue;
- }
- slice_type.volatile_token = volatile_token;
- continue;
- }
- if (eatToken(it, .Keyword_allowzero)) |allowzero_token| {
- if (slice_type.allowzero_token != null) {
- try tree.errors.push(.{
- .ExtraAllowZeroQualifier = .{ .token = it.index - 1 },
- });
- continue;
- }
- slice_type.allowzero_token = allowzero_token;
- continue;
- }
- break;
- }
- },
- else => unreachable,
- }
- return node;
- }
-
- return null;
-}
-
-/// SuffixOp
-/// <- LBRACKET Expr (DOT2 (Expr (COLON Expr)?)?)? RBRACKET
-/// / DOT IDENTIFIER
-/// / DOTASTERISK
-/// / DOTQUESTIONMARK
-fn parseSuffixOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const OpAndToken = struct {
- op: Node.SuffixOp.Op,
- token: TokenIndex,
- };
- const op_and_token: OpAndToken = blk: {
- if (eatToken(it, .LBracket)) |_| {
- const index_expr = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
+ if (p.eatToken(.Keyword_else)) |else_token| {
+ const else_expr = try p.expectNode(parseTypeExpr, .{
+ .ExpectedTypeExpr = .{ .token = p.tok_i },
});
- if (eatToken(it, .Ellipsis2) != null) {
- const end_expr = try parseExpr(arena, it, tree);
- const sentinel: ?*ast.Node = if (eatToken(it, .Colon) != null)
- try parseExpr(arena, it, tree)
- else
- null;
- break :blk .{
- .op = .{
- .Slice = .{
- .start = index_expr,
- .end = end_expr,
- .sentinel = sentinel,
- },
- },
- .token = try expectToken(it, tree, .RBracket),
- };
- }
-
- break :blk .{
- .op = .{ .ArrayAccess = index_expr },
- .token = try expectToken(it, tree, .RBracket),
+ const else_node = try p.arena.allocator.create(Node.Else);
+ else_node.* = .{
+ .else_token = else_token,
+ .payload = null,
+ .body = else_expr,
};
+
+ for_prefix.@"else" = else_node;
}
- if (eatToken(it, .PeriodAsterisk)) |period_asterisk| {
- break :blk .{ .op = .Deref, .token = period_asterisk };
- }
+ return node;
+ }
- if (eatToken(it, .Period)) |period| {
- if (try parseIdentifier(arena, it, tree)) |identifier| {
- // TODO: It's a bit weird to return an InfixOp from the SuffixOp parser.
- // Should there be an ast.Node.SuffixOp.FieldAccess variant? Or should
- // this grammar rule be altered?
- const node = try arena.create(Node.InfixOp);
- node.* = .{
- .op_token = period,
- .lhs = undefined, // set by caller
- .op = .Period,
- .rhs = identifier,
- };
- return &node.base;
- }
- if (eatToken(it, .QuestionMark)) |question_mark| {
- break :blk .{ .op = .UnwrapOptional, .token = question_mark };
- }
- try tree.errors.push(.{
- .ExpectedSuffixOp = .{ .token = it.index },
+ /// WhileTypeExpr <- WhilePrefix TypeExpr (KEYWORD_else Payload? TypeExpr)?
+ fn parseWhileTypeExpr(p: *Parser) !?*Node {
+ const node = (try p.parseWhilePrefix()) orelse return null;
+ const while_prefix = node.cast(Node.While).?;
+
+ const type_expr = try p.expectNode(parseTypeExpr, .{
+ .ExpectedTypeExpr = .{ .token = p.tok_i },
+ });
+ while_prefix.body = type_expr;
+
+ if (p.eatToken(.Keyword_else)) |else_token| {
+ const payload = try p.parsePayload();
+
+ const else_expr = try p.expectNode(parseTypeExpr, .{
+ .ExpectedTypeExpr = .{ .token = p.tok_i },
});
- return null;
+
+ const else_node = try p.arena.allocator.create(Node.Else);
+ else_node.* = .{
+ .else_token = else_token,
+ .payload = null,
+ .body = else_expr,
+ };
+
+ while_prefix.@"else" = else_node;
}
- return null;
- };
+ return node;
+ }
- const node = try arena.create(Node.SuffixOp);
- node.* = .{
- .lhs = undefined, // set by caller
- .op = op_and_token.op,
- .rtoken = op_and_token.token,
- };
- return &node.base;
-}
+ /// SwitchExpr <- KEYWORD_switch LPAREN Expr RPAREN LBRACE SwitchProngList RBRACE
+ fn parseSwitchExpr(p: *Parser) !?*Node {
+ const switch_token = p.eatToken(.Keyword_switch) orelse return null;
+ _ = try p.expectToken(.LParen);
+ const expr_node = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+ _ = try p.expectToken(.RParen);
+ _ = try p.expectToken(.LBrace);
+ const cases = try p.parseSwitchProngList();
+ defer p.gpa.free(cases);
+ const rbrace = try p.expectToken(.RBrace);
-/// FnCallArguments <- LPAREN ExprList RPAREN
-/// ExprList <- (Expr COMMA)* Expr?
-fn parseFnCallArguments(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?AnnotatedParamList {
- if (eatToken(it, .LParen) == null) return null;
- const list = try ListParseFn(Node.FnProto.ParamList, parseExpr)(arena, it, tree);
- const rparen = try expectToken(it, tree, .RParen);
- return AnnotatedParamList{ .list = list, .rparen = rparen };
-}
-
-const AnnotatedParamList = struct {
- list: Node.FnProto.ParamList, // NOTE: may also be any other type SegmentedList(*Node, 2)
- rparen: TokenIndex,
-};
-
-/// ArrayTypeStart <- LBRACKET Expr? RBRACKET
-fn parseArrayTypeStart(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const lbracket = eatToken(it, .LBracket) orelse return null;
- const expr = try parseExpr(arena, it, tree);
- const sentinel = if (eatToken(it, .Colon)) |_|
- try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- })
- else
- null;
- const rbracket = try expectToken(it, tree, .RBracket);
-
- const op: Node.PrefixOp.Op = if (expr) |len_expr|
- .{
- .ArrayType = .{
- .len_expr = len_expr,
- .sentinel = sentinel,
- },
- }
- else
- .{
- .SliceType = Node.PrefixOp.PtrInfo{
- .allowzero_token = null,
- .align_info = null,
- .const_token = null,
- .volatile_token = null,
- .sentinel = sentinel,
- },
- };
-
- const node = try arena.create(Node.PrefixOp);
- node.* = .{
- .op_token = lbracket,
- .op = op,
- .rhs = undefined, // set by caller
- };
- return &node.base;
-}
-
-/// PtrTypeStart
-/// <- ASTERISK
-/// / ASTERISK2
-/// / PTRUNKNOWN
-/// / PTRC
-fn parsePtrTypeStart(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- if (eatToken(it, .Asterisk)) |asterisk| {
- const sentinel = if (eatToken(it, .Colon)) |_|
- try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- })
- else
- null;
- const node = try arena.create(Node.PrefixOp);
+ const node = try Node.Switch.alloc(&p.arena.allocator, cases.len);
node.* = .{
- .op_token = asterisk,
- .op = .{ .PtrType = .{ .sentinel = sentinel } },
- .rhs = undefined, // set by caller
+ .switch_token = switch_token,
+ .expr = expr_node,
+ .cases_len = cases.len,
+ .rbrace = rbrace,
};
+ std.mem.copy(*Node, node.cases(), cases);
return &node.base;
}
- if (eatToken(it, .AsteriskAsterisk)) |double_asterisk| {
- const node = try arena.create(Node.PrefixOp);
- node.* = .{
- .op_token = double_asterisk,
- .op = .{ .PtrType = .{} },
- .rhs = undefined, // set by caller
- };
+ /// AsmExpr <- KEYWORD_asm KEYWORD_volatile? LPAREN Expr AsmOutput? RPAREN
+ /// AsmOutput <- COLON AsmOutputList AsmInput?
+ /// AsmInput <- COLON AsmInputList AsmClobbers?
+ /// AsmClobbers <- COLON StringList
+ /// StringList <- (STRINGLITERAL COMMA)* STRINGLITERAL?
+ fn parseAsmExpr(p: *Parser) !?*Node {
+ const asm_token = p.eatToken(.Keyword_asm) orelse return null;
+ const volatile_token = p.eatToken(.Keyword_volatile);
+ _ = try p.expectToken(.LParen);
+ const template = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
- // Special case for **, which is its own token
- const child = try arena.create(Node.PrefixOp);
- child.* = .{
- .op_token = double_asterisk,
- .op = .{ .PtrType = .{} },
- .rhs = undefined, // set by caller
- };
- node.rhs = &child.base;
+ var arena_outputs: []Node.Asm.Output = &[0]Node.Asm.Output{};
+ var arena_inputs: []Node.Asm.Input = &[0]Node.Asm.Input{};
+ var arena_clobbers: []*Node = &[0]*Node{};
- return &node.base;
- }
- if (eatToken(it, .LBracket)) |lbracket| {
- const asterisk = eatToken(it, .Asterisk) orelse {
- putBackToken(it, lbracket);
- return null;
- };
- if (eatToken(it, .Identifier)) |ident| {
- if (!std.mem.eql(u8, tree.tokenSlice(ident), "c")) {
- putBackToken(it, ident);
- } else {
- _ = try expectToken(it, tree, .RBracket);
- const node = try arena.create(Node.PrefixOp);
- node.* = .{
- .op_token = lbracket,
- .op = .{ .PtrType = .{} },
- .rhs = undefined, // set by caller
- };
- return &node.base;
- }
- }
- const sentinel = if (eatToken(it, .Colon)) |_|
- try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- })
- else
- null;
- _ = try expectToken(it, tree, .RBracket);
- const node = try arena.create(Node.PrefixOp);
- node.* = .{
- .op_token = lbracket,
- .op = .{ .PtrType = .{ .sentinel = sentinel } },
- .rhs = undefined, // set by caller
- };
- return &node.base;
- }
- return null;
-}
+ if (p.eatToken(.Colon) != null) {
+ const outputs = try p.parseAsmOutputList();
+ defer p.gpa.free(outputs);
+ arena_outputs = try p.arena.allocator.dupe(Node.Asm.Output, outputs);
-/// ContainerDeclAuto <- ContainerDeclType LBRACE ContainerMembers RBRACE
-fn parseContainerDeclAuto(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const node = (try parseContainerDeclType(arena, it, tree)) orelse return null;
- const lbrace = try expectToken(it, tree, .LBrace);
- const members = try parseContainerMembers(arena, it, tree, false);
- const rbrace = try expectToken(it, tree, .RBrace);
+ if (p.eatToken(.Colon) != null) {
+ const inputs = try p.parseAsmInputList();
+ defer p.gpa.free(inputs);
+ arena_inputs = try p.arena.allocator.dupe(Node.Asm.Input, inputs);
- const decl_type = node.cast(Node.ContainerDecl).?;
- decl_type.fields_and_decls = members;
- decl_type.lbrace_token = lbrace;
- decl_type.rbrace_token = rbrace;
-
- return node;
-}
-
-/// ContainerDeclType
-/// <- KEYWORD_struct
-/// / KEYWORD_enum (LPAREN Expr RPAREN)?
-/// / KEYWORD_union (LPAREN (KEYWORD_enum (LPAREN Expr RPAREN)? / Expr) RPAREN)?
-fn parseContainerDeclType(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const kind_token = nextToken(it);
-
- const init_arg_expr = switch (kind_token.ptr.id) {
- .Keyword_struct => Node.ContainerDecl.InitArg{ .None = {} },
- .Keyword_enum => blk: {
- if (eatToken(it, .LParen) != null) {
- const expr = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
- _ = try expectToken(it, tree, .RParen);
- break :blk Node.ContainerDecl.InitArg{ .Type = expr };
- }
- break :blk Node.ContainerDecl.InitArg{ .None = {} };
- },
- .Keyword_union => blk: {
- if (eatToken(it, .LParen) != null) {
- if (eatToken(it, .Keyword_enum) != null) {
- if (eatToken(it, .LParen) != null) {
- const expr = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
- _ = try expectToken(it, tree, .RParen);
- _ = try expectToken(it, tree, .RParen);
- break :blk Node.ContainerDecl.InitArg{ .Enum = expr };
- }
- _ = try expectToken(it, tree, .RParen);
- break :blk Node.ContainerDecl.InitArg{ .Enum = null };
- }
- const expr = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
- _ = try expectToken(it, tree, .RParen);
- break :blk Node.ContainerDecl.InitArg{ .Type = expr };
- }
- break :blk Node.ContainerDecl.InitArg{ .None = {} };
- },
- else => {
- putBackToken(it, kind_token.index);
- return null;
- },
- };
-
- const node = try arena.create(Node.ContainerDecl);
- node.* = .{
- .layout_token = null,
- .kind_token = kind_token.index,
- .init_arg_expr = init_arg_expr,
- .fields_and_decls = undefined, // set by caller
- .lbrace_token = undefined, // set by caller
- .rbrace_token = undefined, // set by caller
- };
- return &node.base;
-}
-
-/// ByteAlign <- KEYWORD_align LPAREN Expr RPAREN
-fn parseByteAlign(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- _ = eatToken(it, .Keyword_align) orelse return null;
- _ = try expectToken(it, tree, .LParen);
- const expr = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- });
- _ = try expectToken(it, tree, .RParen);
- return expr;
-}
-
-/// IdentifierList <- (IDENTIFIER COMMA)* IDENTIFIER?
-/// Only ErrorSetDecl parses an IdentifierList
-fn parseErrorTagList(arena: *Allocator, it: *TokenIterator, tree: *Tree) !Node.ErrorSetDecl.DeclList {
- return try ListParseFn(Node.ErrorSetDecl.DeclList, parseErrorTag)(arena, it, tree);
-}
-
-/// SwitchProngList <- (SwitchProng COMMA)* SwitchProng?
-fn parseSwitchProngList(arena: *Allocator, it: *TokenIterator, tree: *Tree) !Node.Switch.CaseList {
- return try ListParseFn(Node.Switch.CaseList, parseSwitchProng)(arena, it, tree);
-}
-
-/// AsmOutputList <- (AsmOutputItem COMMA)* AsmOutputItem?
-fn parseAsmOutputList(arena: *Allocator, it: *TokenIterator, tree: *Tree) Error!Node.Asm.OutputList {
- return try ListParseFn(Node.Asm.OutputList, parseAsmOutputItem)(arena, it, tree);
-}
-
-/// AsmInputList <- (AsmInputItem COMMA)* AsmInputItem?
-fn parseAsmInputList(arena: *Allocator, it: *TokenIterator, tree: *Tree) Error!Node.Asm.InputList {
- return try ListParseFn(Node.Asm.InputList, parseAsmInputItem)(arena, it, tree);
-}
-
-/// ParamDeclList <- (ParamDecl COMMA)* ParamDecl?
-fn parseParamDeclList(arena: *Allocator, it: *TokenIterator, tree: *Tree) !Node.FnProto.ParamList {
- return try ListParseFn(Node.FnProto.ParamList, parseParamDecl)(arena, it, tree);
-}
-
-fn ParseFn(comptime T: type) type {
- return fn (*Allocator, *TokenIterator, *Tree) Error!T;
-}
-
-const NodeParseFn = fn (*Allocator, *TokenIterator, *Tree) Error!?*Node;
-
-fn ListParseFn(comptime L: type, comptime nodeParseFn: var) ParseFn(L) {
- return struct {
- pub fn parse(arena: *Allocator, it: *TokenIterator, tree: *Tree) !L {
- var list = L.init(arena);
- while (try nodeParseFn(arena, it, tree)) |node| {
- try list.push(node);
-
- switch (it.peek().?.id) {
- .Comma => _ = nextToken(it),
- // all possible delimiters
- .Colon, .RParen, .RBrace, .RBracket => break,
- else => {
- // this is likely just a missing comma,
- // continue parsing this list and give an error
- try tree.errors.push(.{
- .ExpectedToken = .{ .token = it.index, .expected_id = .Comma },
- });
- },
+ if (p.eatToken(.Colon) != null) {
+ const clobbers = try ListParseFn(*Node, parseStringLiteral)(p);
+ defer p.gpa.free(clobbers);
+ arena_clobbers = try p.arena.allocator.dupe(*Node, clobbers);
}
}
- return list;
}
- }.parse;
-}
-fn SimpleBinOpParseFn(comptime token: Token.Id, comptime op: Node.InfixOp.Op) NodeParseFn {
- return struct {
- pub fn parse(arena: *Allocator, it: *TokenIterator, tree: *Tree) Error!?*Node {
- const op_token = if (token == .Keyword_and) switch (it.peek().?.id) {
- .Keyword_and => nextToken(it).index,
- .Invalid_ampersands => blk: {
- try tree.errors.push(.{
- .InvalidAnd = .{ .token = it.index },
- });
- break :blk nextToken(it).index;
- },
- else => return null,
- } else eatToken(it, token) orelse return null;
+ const node = try p.arena.allocator.create(Node.Asm);
+ node.* = .{
+ .asm_token = asm_token,
+ .volatile_token = volatile_token,
+ .template = template,
+ .outputs = arena_outputs,
+ .inputs = arena_inputs,
+ .clobbers = arena_clobbers,
+ .rparen = try p.expectToken(.RParen),
+ };
- const node = try arena.create(Node.InfixOp);
+ return &node.base;
+ }
+
+ /// DOT IDENTIFIER
+ fn parseAnonLiteral(p: *Parser) !?*Node {
+ const dot = p.eatToken(.Period) orelse return null;
+
+ // anon enum literal
+ if (p.eatToken(.Identifier)) |name| {
+ const node = try p.arena.allocator.create(Node.EnumLiteral);
node.* = .{
- .op_token = op_token,
- .lhs = undefined, // set by caller
- .op = op,
+ .dot = dot,
+ .name = name,
+ };
+ return &node.base;
+ }
+
+ if (try p.parseAnonInitList(dot)) |node| {
+ return node;
+ }
+
+ p.putBackToken(dot);
+ return null;
+ }
+
+ /// AsmOutputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN (MINUSRARROW TypeExpr / IDENTIFIER) RPAREN
+ fn parseAsmOutputItem(p: *Parser) !?Node.Asm.Output {
+ const lbracket = p.eatToken(.LBracket) orelse return null;
+ const name = try p.expectNode(parseIdentifier, .{
+ .ExpectedIdentifier = .{ .token = p.tok_i },
+ });
+ _ = try p.expectToken(.RBracket);
+
+ const constraint = try p.expectNode(parseStringLiteral, .{
+ .ExpectedStringLiteral = .{ .token = p.tok_i },
+ });
+
+ _ = try p.expectToken(.LParen);
+ const kind: Node.Asm.Output.Kind = blk: {
+ if (p.eatToken(.Arrow) != null) {
+ const return_ident = try p.expectNode(parseTypeExpr, .{
+ .ExpectedTypeExpr = .{ .token = p.tok_i },
+ });
+ break :blk .{ .Return = return_ident };
+ }
+ const variable = try p.expectNode(parseIdentifier, .{
+ .ExpectedIdentifier = .{ .token = p.tok_i },
+ });
+ break :blk .{ .Variable = variable.cast(Node.Identifier).? };
+ };
+ const rparen = try p.expectToken(.RParen);
+
+ return Node.Asm.Output{
+ .lbracket = lbracket,
+ .symbolic_name = name,
+ .constraint = constraint,
+ .kind = kind,
+ .rparen = rparen,
+ };
+ }
+
+ /// AsmInputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN Expr RPAREN
+ fn parseAsmInputItem(p: *Parser) !?Node.Asm.Input {
+ const lbracket = p.eatToken(.LBracket) orelse return null;
+ const name = try p.expectNode(parseIdentifier, .{
+ .ExpectedIdentifier = .{ .token = p.tok_i },
+ });
+ _ = try p.expectToken(.RBracket);
+
+ const constraint = try p.expectNode(parseStringLiteral, .{
+ .ExpectedStringLiteral = .{ .token = p.tok_i },
+ });
+
+ _ = try p.expectToken(.LParen);
+ const expr = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+ const rparen = try p.expectToken(.RParen);
+
+ return Node.Asm.Input{
+ .lbracket = lbracket,
+ .symbolic_name = name,
+ .constraint = constraint,
+ .expr = expr,
+ .rparen = rparen,
+ };
+ }
+
+ /// BreakLabel <- COLON IDENTIFIER
+ fn parseBreakLabel(p: *Parser) !?*Node {
+ _ = p.eatToken(.Colon) orelse return null;
+ return p.expectNode(parseIdentifier, .{
+ .ExpectedIdentifier = .{ .token = p.tok_i },
+ });
+ }
+
+ /// BlockLabel <- IDENTIFIER COLON
+ fn parseBlockLabel(p: *Parser, colon_token: *TokenIndex) ?TokenIndex {
+ const identifier = p.eatToken(.Identifier) orelse return null;
+ if (p.eatToken(.Colon)) |colon| {
+ colon_token.* = colon;
+ return identifier;
+ }
+ p.putBackToken(identifier);
+ return null;
+ }
+
+ /// FieldInit <- DOT IDENTIFIER EQUAL Expr
+ fn parseFieldInit(p: *Parser) !?*Node {
+ const period_token = p.eatToken(.Period) orelse return null;
+ const name_token = p.eatToken(.Identifier) orelse {
+ // Because of anon literals `.{` is also valid.
+ p.putBackToken(period_token);
+ return null;
+ };
+ const eq_token = p.eatToken(.Equal) orelse {
+ // `.Name` may also be an enum literal, which is a later rule.
+ p.putBackToken(name_token);
+ p.putBackToken(period_token);
+ return null;
+ };
+ const expr_node = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+
+ const node = try p.arena.allocator.create(Node.FieldInitializer);
+ node.* = .{
+ .period_token = period_token,
+ .name_token = name_token,
+ .expr = expr_node,
+ };
+ return &node.base;
+ }
+
+ /// WhileContinueExpr <- COLON LPAREN AssignExpr RPAREN
+ fn parseWhileContinueExpr(p: *Parser) !?*Node {
+ _ = p.eatToken(.Colon) orelse return null;
+ _ = try p.expectToken(.LParen);
+ const node = try p.expectNode(parseAssignExpr, .{
+ .ExpectedExprOrAssignment = .{ .token = p.tok_i },
+ });
+ _ = try p.expectToken(.RParen);
+ return node;
+ }
+
+ /// LinkSection <- KEYWORD_linksection LPAREN Expr RPAREN
+ fn parseLinkSection(p: *Parser) !?*Node {
+ _ = p.eatToken(.Keyword_linksection) orelse return null;
+ _ = try p.expectToken(.LParen);
+ const expr_node = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+ _ = try p.expectToken(.RParen);
+ return expr_node;
+ }
+
+ /// CallConv <- KEYWORD_callconv LPAREN Expr RPAREN
+ fn parseCallconv(p: *Parser) !?*Node {
+ _ = p.eatToken(.Keyword_callconv) orelse return null;
+ _ = try p.expectToken(.LParen);
+ const expr_node = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+ _ = try p.expectToken(.RParen);
+ return expr_node;
+ }
+
+ /// ParamDecl <- (KEYWORD_noalias / KEYWORD_comptime)? (IDENTIFIER COLON)? ParamType
+ fn parseParamDecl(p: *Parser) !?Node.FnProto.ParamDecl {
+ const doc_comments = try p.parseDocComment();
+ const noalias_token = p.eatToken(.Keyword_noalias);
+ const comptime_token = if (noalias_token == null) p.eatToken(.Keyword_comptime) else null;
+ const name_token = blk: {
+ const identifier = p.eatToken(.Identifier) orelse break :blk null;
+ if (p.eatToken(.Colon) != null) break :blk identifier;
+ p.putBackToken(identifier); // ParamType may also be an identifier
+ break :blk null;
+ };
+ const param_type = (try p.parseParamType()) orelse {
+ // Only return cleanly if no keyword, identifier, or doc comment was found
+ if (noalias_token == null and
+ comptime_token == null and
+ name_token == null and
+ doc_comments == null)
+ {
+ return null;
+ }
+ try p.errors.append(p.gpa, .{
+ .ExpectedParamType = .{ .token = p.tok_i },
+ });
+ return error.ParseError;
+ };
+
+ return Node.FnProto.ParamDecl{
+ .doc_comments = doc_comments,
+ .comptime_token = comptime_token,
+ .noalias_token = noalias_token,
+ .name_token = name_token,
+ .param_type = param_type,
+ };
+ }
+
+ /// ParamType
+ /// <- KEYWORD_var
+ /// / DOT3
+ /// / TypeExpr
+ fn parseParamType(p: *Parser) !?Node.FnProto.ParamDecl.ParamType {
+ // TODO cast from tuple to error union is broken
+ const P = Node.FnProto.ParamDecl.ParamType;
+ if (try p.parseVarType()) |node| return P{ .var_type = node };
+ if (p.eatToken(.Ellipsis3)) |token| return P{ .var_args = token };
+ if (try p.parseTypeExpr()) |node| return P{ .type_expr = node };
+ return null;
+ }
+
+ /// IfPrefix <- KEYWORD_if LPAREN Expr RPAREN PtrPayload?
+ fn parseIfPrefix(p: *Parser) !?*Node {
+ const if_token = p.eatToken(.Keyword_if) orelse return null;
+ _ = try p.expectToken(.LParen);
+ const condition = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+ _ = try p.expectToken(.RParen);
+ const payload = try p.parsePtrPayload();
+
+ const node = try p.arena.allocator.create(Node.If);
+ node.* = .{
+ .if_token = if_token,
+ .condition = condition,
+ .payload = payload,
+ .body = undefined, // set by caller
+ .@"else" = null,
+ };
+ return &node.base;
+ }
+
+ /// WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr?
+ fn parseWhilePrefix(p: *Parser) !?*Node {
+ const while_token = p.eatToken(.Keyword_while) orelse return null;
+
+ _ = try p.expectToken(.LParen);
+ const condition = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+ _ = try p.expectToken(.RParen);
+
+ const payload = try p.parsePtrPayload();
+ const continue_expr = try p.parseWhileContinueExpr();
+
+ const node = try p.arena.allocator.create(Node.While);
+ node.* = .{
+ .label = null,
+ .inline_token = null,
+ .while_token = while_token,
+ .condition = condition,
+ .payload = payload,
+ .continue_expr = continue_expr,
+ .body = undefined, // set by caller
+ .@"else" = null,
+ };
+ return &node.base;
+ }
+
+ /// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload
+ fn parseForPrefix(p: *Parser) !?*Node {
+ const for_token = p.eatToken(.Keyword_for) orelse return null;
+
+ _ = try p.expectToken(.LParen);
+ const array_expr = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+ _ = try p.expectToken(.RParen);
+
+ const payload = try p.expectNode(parsePtrIndexPayload, .{
+ .ExpectedPayload = .{ .token = p.tok_i },
+ });
+
+ const node = try p.arena.allocator.create(Node.For);
+ node.* = .{
+ .label = null,
+ .inline_token = null,
+ .for_token = for_token,
+ .array_expr = array_expr,
+ .payload = payload,
+ .body = undefined, // set by caller
+ .@"else" = null,
+ };
+ return &node.base;
+ }
+
+ /// Payload <- PIPE IDENTIFIER PIPE
+ fn parsePayload(p: *Parser) !?*Node {
+ const lpipe = p.eatToken(.Pipe) orelse return null;
+ const identifier = try p.expectNode(parseIdentifier, .{
+ .ExpectedIdentifier = .{ .token = p.tok_i },
+ });
+ const rpipe = try p.expectToken(.Pipe);
+
+ const node = try p.arena.allocator.create(Node.Payload);
+ node.* = .{
+ .lpipe = lpipe,
+ .error_symbol = identifier,
+ .rpipe = rpipe,
+ };
+ return &node.base;
+ }
+
+ /// PtrPayload <- PIPE ASTERISK? IDENTIFIER PIPE
+ fn parsePtrPayload(p: *Parser) !?*Node {
+ const lpipe = p.eatToken(.Pipe) orelse return null;
+ const asterisk = p.eatToken(.Asterisk);
+ const identifier = try p.expectNode(parseIdentifier, .{
+ .ExpectedIdentifier = .{ .token = p.tok_i },
+ });
+ const rpipe = try p.expectToken(.Pipe);
+
+ const node = try p.arena.allocator.create(Node.PointerPayload);
+ node.* = .{
+ .lpipe = lpipe,
+ .ptr_token = asterisk,
+ .value_symbol = identifier,
+ .rpipe = rpipe,
+ };
+ return &node.base;
+ }
+
+ /// PtrIndexPayload <- PIPE ASTERISK? IDENTIFIER (COMMA IDENTIFIER)? PIPE
+ fn parsePtrIndexPayload(p: *Parser) !?*Node {
+ const lpipe = p.eatToken(.Pipe) orelse return null;
+ const asterisk = p.eatToken(.Asterisk);
+ const identifier = try p.expectNode(parseIdentifier, .{
+ .ExpectedIdentifier = .{ .token = p.tok_i },
+ });
+
+ const index = if (p.eatToken(.Comma) == null)
+ null
+ else
+ try p.expectNode(parseIdentifier, .{
+ .ExpectedIdentifier = .{ .token = p.tok_i },
+ });
+
+ const rpipe = try p.expectToken(.Pipe);
+
+ const node = try p.arena.allocator.create(Node.PointerIndexPayload);
+ node.* = .{
+ .lpipe = lpipe,
+ .ptr_token = asterisk,
+ .value_symbol = identifier,
+ .index_symbol = index,
+ .rpipe = rpipe,
+ };
+ return &node.base;
+ }
+
+ /// SwitchProng <- SwitchCase EQUALRARROW PtrPayload? AssignExpr
+ fn parseSwitchProng(p: *Parser) !?*Node {
+ const node = (try p.parseSwitchCase()) orelse return null;
+ const arrow = try p.expectToken(.EqualAngleBracketRight);
+ const payload = try p.parsePtrPayload();
+ const expr = try p.expectNode(parseAssignExpr, .{
+ .ExpectedExprOrAssignment = .{ .token = p.tok_i },
+ });
+
+ const switch_case = node.cast(Node.SwitchCase).?;
+ switch_case.arrow_token = arrow;
+ switch_case.payload = payload;
+ switch_case.expr = expr;
+
+ return node;
+ }
+
+ /// SwitchCase
+ /// <- SwitchItem (COMMA SwitchItem)* COMMA?
+ /// / KEYWORD_else
+ fn parseSwitchCase(p: *Parser) !?*Node {
+ var list = std.ArrayList(*Node).init(p.gpa);
+ defer list.deinit();
+
+ if (try p.parseSwitchItem()) |first_item| {
+ try list.append(first_item);
+ while (p.eatToken(.Comma) != null) {
+ const next_item = (try p.parseSwitchItem()) orelse break;
+ try list.append(next_item);
+ }
+ } else if (p.eatToken(.Keyword_else)) |else_token| {
+ const else_node = try p.arena.allocator.create(Node.SwitchElse);
+ else_node.* = .{
+ .token = else_token,
+ };
+ try list.append(&else_node.base);
+ } else return null;
+
+ const node = try Node.SwitchCase.alloc(&p.arena.allocator, list.items.len);
+ node.* = .{
+ .items_len = list.items.len,
+ .arrow_token = undefined, // set by caller
+ .payload = null,
+ .expr = undefined, // set by caller
+ };
+ std.mem.copy(*Node, node.items(), list.items);
+ return &node.base;
+ }
+
+ /// SwitchItem <- Expr (DOT3 Expr)?
+ fn parseSwitchItem(p: *Parser) !?*Node {
+ const expr = (try p.parseExpr()) orelse return null;
+ if (p.eatToken(.Ellipsis3)) |token| {
+ const range_end = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+
+ const node = try p.arena.allocator.create(Node.InfixOp);
+ node.* = .{
+ .op_token = token,
+ .lhs = expr,
+ .op = .Range,
+ .rhs = range_end,
+ };
+ return &node.base;
+ }
+ return expr;
+ }
+
+ /// AssignOp
+ /// <- ASTERISKEQUAL
+ /// / SLASHEQUAL
+ /// / PERCENTEQUAL
+ /// / PLUSEQUAL
+ /// / MINUSEQUAL
+ /// / LARROW2EQUAL
+ /// / RARROW2EQUAL
+ /// / AMPERSANDEQUAL
+ /// / CARETEQUAL
+ /// / PIPEEQUAL
+ /// / ASTERISKPERCENTEQUAL
+ /// / PLUSPERCENTEQUAL
+ /// / MINUSPERCENTEQUAL
+ /// / EQUAL
+ fn parseAssignOp(p: *Parser) !?*Node {
+ const token = p.nextToken();
+ const op: Node.InfixOp.Op = switch (p.token_ids[token]) {
+ .AsteriskEqual => .AssignMul,
+ .SlashEqual => .AssignDiv,
+ .PercentEqual => .AssignMod,
+ .PlusEqual => .AssignAdd,
+ .MinusEqual => .AssignSub,
+ .AngleBracketAngleBracketLeftEqual => .AssignBitShiftLeft,
+ .AngleBracketAngleBracketRightEqual => .AssignBitShiftRight,
+ .AmpersandEqual => .AssignBitAnd,
+ .CaretEqual => .AssignBitXor,
+ .PipeEqual => .AssignBitOr,
+ .AsteriskPercentEqual => .AssignMulWrap,
+ .PlusPercentEqual => .AssignAddWrap,
+ .MinusPercentEqual => .AssignSubWrap,
+ .Equal => .Assign,
+ else => {
+ p.putBackToken(token);
+ return null;
+ },
+ };
+
+ const node = try p.arena.allocator.create(Node.InfixOp);
+ node.* = .{
+ .op_token = token,
+ .lhs = undefined, // set by caller
+ .op = op,
+ .rhs = undefined, // set by caller
+ };
+ return &node.base;
+ }
+
+ /// CompareOp
+ /// <- EQUALEQUAL
+ /// / EXCLAMATIONMARKEQUAL
+ /// / LARROW
+ /// / RARROW
+ /// / LARROWEQUAL
+ /// / RARROWEQUAL
+ fn parseCompareOp(p: *Parser) !?*Node {
+ const token = p.nextToken();
+ const op: Node.InfixOp.Op = switch (p.token_ids[token]) {
+ .EqualEqual => .EqualEqual,
+ .BangEqual => .BangEqual,
+ .AngleBracketLeft => .LessThan,
+ .AngleBracketRight => .GreaterThan,
+ .AngleBracketLeftEqual => .LessOrEqual,
+ .AngleBracketRightEqual => .GreaterOrEqual,
+ else => {
+ p.putBackToken(token);
+ return null;
+ },
+ };
+
+ return p.createInfixOp(token, op);
+ }
+
+ /// BitwiseOp
+ /// <- AMPERSAND
+ /// / CARET
+ /// / PIPE
+ /// / KEYWORD_orelse
+ /// / KEYWORD_catch Payload?
+ fn parseBitwiseOp(p: *Parser) !?*Node {
+ const token = p.nextToken();
+ const op: Node.InfixOp.Op = switch (p.token_ids[token]) {
+ .Ampersand => .BitAnd,
+ .Caret => .BitXor,
+ .Pipe => .BitOr,
+ .Keyword_orelse => .UnwrapOptional,
+ .Keyword_catch => .{ .Catch = try p.parsePayload() },
+ else => {
+ p.putBackToken(token);
+ return null;
+ },
+ };
+
+ return p.createInfixOp(token, op);
+ }
+
+ /// BitShiftOp
+ /// <- LARROW2
+ /// / RARROW2
+ fn parseBitShiftOp(p: *Parser) !?*Node {
+ const token = p.nextToken();
+ const op: Node.InfixOp.Op = switch (p.token_ids[token]) {
+ .AngleBracketAngleBracketLeft => .BitShiftLeft,
+ .AngleBracketAngleBracketRight => .BitShiftRight,
+ else => {
+ p.putBackToken(token);
+ return null;
+ },
+ };
+
+ return p.createInfixOp(token, op);
+ }
+
+ /// AdditionOp
+ /// <- PLUS
+ /// / MINUS
+ /// / PLUS2
+ /// / PLUSPERCENT
+ /// / MINUSPERCENT
+ fn parseAdditionOp(p: *Parser) !?*Node {
+ const token = p.nextToken();
+ const op: Node.InfixOp.Op = switch (p.token_ids[token]) {
+ .Plus => .Add,
+ .Minus => .Sub,
+ .PlusPlus => .ArrayCat,
+ .PlusPercent => .AddWrap,
+ .MinusPercent => .SubWrap,
+ else => {
+ p.putBackToken(token);
+ return null;
+ },
+ };
+
+ return p.createInfixOp(token, op);
+ }
+
+ /// MultiplyOp
+ /// <- PIPE2
+ /// / ASTERISK
+ /// / SLASH
+ /// / PERCENT
+ /// / ASTERISK2
+ /// / ASTERISKPERCENT
+ fn parseMultiplyOp(p: *Parser) !?*Node {
+ const token = p.nextToken();
+ const op: Node.InfixOp.Op = switch (p.token_ids[token]) {
+ .PipePipe => .MergeErrorSets,
+ .Asterisk => .Mul,
+ .Slash => .Div,
+ .Percent => .Mod,
+ .AsteriskAsterisk => .ArrayMult,
+ .AsteriskPercent => .MulWrap,
+ else => {
+ p.putBackToken(token);
+ return null;
+ },
+ };
+
+ return p.createInfixOp(token, op);
+ }
+
+ /// PrefixOp
+ /// <- EXCLAMATIONMARK
+ /// / MINUS
+ /// / TILDE
+ /// / MINUSPERCENT
+ /// / AMPERSAND
+ /// / KEYWORD_try
+ /// / KEYWORD_await
+ fn parsePrefixOp(p: *Parser) !?*Node {
+ const token = p.nextToken();
+ const op: Node.PrefixOp.Op = switch (p.token_ids[token]) {
+ .Bang => .BoolNot,
+ .Minus => .Negation,
+ .Tilde => .BitNot,
+ .MinusPercent => .NegationWrap,
+ .Ampersand => .AddressOf,
+ .Keyword_try => .Try,
+ .Keyword_await => .Await,
+ else => {
+ p.putBackToken(token);
+ return null;
+ },
+ };
+
+ const node = try p.arena.allocator.create(Node.PrefixOp);
+ node.* = .{
+ .op_token = token,
+ .op = op,
+ .rhs = undefined, // set by caller
+ };
+ return &node.base;
+ }
+
+ // TODO: ArrayTypeStart is either an array or a slice, but const/allowzero only work on
+ // pointers. Consider updating this rule:
+ // ...
+ // / ArrayTypeStart
+ // / SliceTypeStart (ByteAlign / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)*
+ // / PtrTypeStart ...
+
+ /// PrefixTypeOp
+ /// <- QUESTIONMARK
+ /// / KEYWORD_anyframe MINUSRARROW
+ /// / ArrayTypeStart (ByteAlign / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)*
+ /// / PtrTypeStart (KEYWORD_align LPAREN Expr (COLON INTEGER COLON INTEGER)? RPAREN / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)*
+ fn parsePrefixTypeOp(p: *Parser) !?*Node {
+ if (p.eatToken(.QuestionMark)) |token| {
+ const node = try p.arena.allocator.create(Node.PrefixOp);
+ node.* = .{
+ .op_token = token,
+ .op = .OptionalType,
.rhs = undefined, // set by caller
};
return &node.base;
}
- }.parse;
-}
-// Helper parsers not included in the grammar
-
-fn parseBuiltinCall(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const token = eatToken(it, .Builtin) orelse return null;
- const params = (try parseFnCallArguments(arena, it, tree)) orelse {
- try tree.errors.push(.{
- .ExpectedParamList = .{ .token = it.index },
- });
-
- // lets pretend this was an identifier so we can continue parsing
- const node = try arena.create(Node.Identifier);
- node.* = .{
- .token = token,
- };
- return &node.base;
- };
- const node = try arena.create(Node.BuiltinCall);
- node.* = .{
- .builtin_token = token,
- .params = params.list,
- .rparen_token = params.rparen,
- };
- return &node.base;
-}
-
-fn parseErrorTag(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const doc_comments = try parseDocComment(arena, it, tree); // no need to rewind on failure
- const token = eatToken(it, .Identifier) orelse return null;
-
- const node = try arena.create(Node.ErrorTag);
- node.* = .{
- .doc_comments = doc_comments,
- .name_token = token,
- };
- return &node.base;
-}
-
-fn parseIdentifier(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const token = eatToken(it, .Identifier) orelse return null;
- const node = try arena.create(Node.Identifier);
- node.* = .{
- .token = token,
- };
- return &node.base;
-}
-
-fn parseVarType(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const token = eatToken(it, .Keyword_var) orelse return null;
- const node = try arena.create(Node.VarType);
- node.* = .{
- .token = token,
- };
- return &node.base;
-}
-
-fn createLiteral(arena: *Allocator, comptime T: type, token: TokenIndex) !*Node {
- const result = try arena.create(T);
- result.* = T{
- .base = Node{ .id = Node.typeToId(T) },
- .token = token,
- };
- return &result.base;
-}
-
-fn parseStringLiteralSingle(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- if (eatToken(it, .StringLiteral)) |token| {
- const node = try arena.create(Node.StringLiteral);
- node.* = .{
- .token = token,
- };
- return &node.base;
- }
- return null;
-}
-
-// string literal or multiline string literal
-fn parseStringLiteral(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- if (try parseStringLiteralSingle(arena, it, tree)) |node| return node;
-
- if (eatToken(it, .MultilineStringLiteralLine)) |first_line| {
- const node = try arena.create(Node.MultilineStringLiteral);
- node.* = .{
- .lines = Node.MultilineStringLiteral.LineList.init(arena),
- };
- try node.lines.push(first_line);
- while (eatToken(it, .MultilineStringLiteralLine)) |line|
- try node.lines.push(line);
-
- return &node.base;
- }
-
- return null;
-}
-
-fn parseIntegerLiteral(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const token = eatToken(it, .IntegerLiteral) orelse return null;
- const node = try arena.create(Node.IntegerLiteral);
- node.* = .{
- .token = token,
- };
- return &node.base;
-}
-
-fn parseFloatLiteral(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const token = eatToken(it, .FloatLiteral) orelse return null;
- const node = try arena.create(Node.FloatLiteral);
- node.* = .{
- .token = token,
- };
- return &node.base;
-}
-
-fn parseTry(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const token = eatToken(it, .Keyword_try) orelse return null;
- const node = try arena.create(Node.PrefixOp);
- node.* = .{
- .op_token = token,
- .op = .Try,
- .rhs = undefined, // set by caller
- };
- return &node.base;
-}
-
-fn parseUse(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
- const token = eatToken(it, .Keyword_usingnamespace) orelse return null;
- const node = try arena.create(Node.Use);
- node.* = .{
- .doc_comments = null,
- .visib_token = null,
- .use_token = token,
- .expr = try expectNode(arena, it, tree, parseExpr, .{
- .ExpectedExpr = .{ .token = it.index },
- }),
- .semicolon_token = try expectToken(it, tree, .Semicolon),
- };
- return &node.base;
-}
-
-/// IfPrefix Body (KEYWORD_else Payload? Body)?
-fn parseIf(arena: *Allocator, it: *TokenIterator, tree: *Tree, bodyParseFn: NodeParseFn) !?*Node {
- const node = (try parseIfPrefix(arena, it, tree)) orelse return null;
- const if_prefix = node.cast(Node.If).?;
-
- if_prefix.body = try expectNode(arena, it, tree, bodyParseFn, .{
- .InvalidToken = .{ .token = it.index },
- });
-
- const else_token = eatToken(it, .Keyword_else) orelse return node;
- const payload = try parsePayload(arena, it, tree);
- const else_expr = try expectNode(arena, it, tree, bodyParseFn, .{
- .InvalidToken = .{ .token = it.index },
- });
- const else_node = try arena.create(Node.Else);
- else_node.* = .{
- .else_token = else_token,
- .payload = payload,
- .body = else_expr,
- };
- if_prefix.@"else" = else_node;
-
- return node;
-}
-
-/// Eat a multiline doc comment
-fn parseDocComment(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node.DocComment {
- var lines = Node.DocComment.LineList.init(arena);
- while (eatToken(it, .DocComment)) |line| {
- try lines.push(line);
- }
-
- if (lines.len == 0) return null;
-
- const node = try arena.create(Node.DocComment);
- node.* = .{
- .lines = lines,
- };
- return node;
-}
-
-/// Eat a single-line doc comment on the same line as another node
-fn parseAppendedDocComment(arena: *Allocator, it: *TokenIterator, tree: *Tree, after_token: TokenIndex) !?*Node.DocComment {
- const comment_token = eatToken(it, .DocComment) orelse return null;
- if (tree.tokensOnSameLine(after_token, comment_token)) {
- const node = try arena.create(Node.DocComment);
- node.* = .{
- .lines = Node.DocComment.LineList.init(arena),
- };
- try node.lines.push(comment_token);
- return node;
- }
- putBackToken(it, comment_token);
- return null;
-}
-
-/// Op* Child
-fn parsePrefixOpExpr(
- arena: *Allocator,
- it: *TokenIterator,
- tree: *Tree,
- opParseFn: NodeParseFn,
- childParseFn: NodeParseFn,
-) Error!?*Node {
- if (try opParseFn(arena, it, tree)) |first_op| {
- var rightmost_op = first_op;
- while (true) {
- switch (rightmost_op.id) {
- .PrefixOp => {
- var prefix_op = rightmost_op.cast(Node.PrefixOp).?;
- // If the token encountered was **, there will be two nodes
- if (tree.tokens.at(prefix_op.op_token).id == .AsteriskAsterisk) {
- rightmost_op = prefix_op.rhs;
- prefix_op = rightmost_op.cast(Node.PrefixOp).?;
- }
- if (try opParseFn(arena, it, tree)) |rhs| {
- prefix_op.rhs = rhs;
- rightmost_op = rhs;
- } else break;
+ // TODO: Returning a AnyFrameType instead of PrefixOp makes casting and setting .rhs or
+ // .return_type more difficult for the caller (see parsePrefixOpExpr helper).
+ // Consider making the AnyFrameType a member of PrefixOp and add a
+ // PrefixOp.AnyFrameType variant?
+ if (p.eatToken(.Keyword_anyframe)) |token| {
+ const arrow = p.eatToken(.Arrow) orelse {
+ p.putBackToken(token);
+ return null;
+ };
+ const node = try p.arena.allocator.create(Node.AnyFrameType);
+ node.* = .{
+ .anyframe_token = token,
+ .result = .{
+ .arrow_token = arrow,
+ .return_type = undefined, // set by caller
},
- .AnyFrameType => {
- const prom = rightmost_op.cast(Node.AnyFrameType).?;
- if (try opParseFn(arena, it, tree)) |rhs| {
- prom.result.?.return_type = rhs;
- rightmost_op = rhs;
- } else break;
+ };
+ return &node.base;
+ }
+
+ if (try p.parsePtrTypeStart()) |node| {
+ // If the token encountered was **, there will be two nodes instead of one.
+ // The attributes should be applied to the rightmost operator.
+ const prefix_op = node.cast(Node.PrefixOp).?;
+ var ptr_info = if (p.token_ids[prefix_op.op_token] == .AsteriskAsterisk)
+ &prefix_op.rhs.cast(Node.PrefixOp).?.op.PtrType
+ else
+ &prefix_op.op.PtrType;
+
+ while (true) {
+ if (p.eatToken(.Keyword_align)) |align_token| {
+ const lparen = try p.expectToken(.LParen);
+ const expr_node = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+
+ // Optional bit range
+ const bit_range = if (p.eatToken(.Colon)) |_| bit_range_value: {
+ const range_start = try p.expectNode(parseIntegerLiteral, .{
+ .ExpectedIntegerLiteral = .{ .token = p.tok_i },
+ });
+ _ = try p.expectToken(.Colon);
+ const range_end = try p.expectNode(parseIntegerLiteral, .{
+ .ExpectedIntegerLiteral = .{ .token = p.tok_i },
+ });
+
+ break :bit_range_value Node.PrefixOp.PtrInfo.Align.BitRange{
+ .start = range_start,
+ .end = range_end,
+ };
+ } else null;
+ _ = try p.expectToken(.RParen);
+
+ if (ptr_info.align_info != null) {
+ try p.errors.append(p.gpa, .{
+ .ExtraAlignQualifier = .{ .token = p.tok_i - 1 },
+ });
+ continue;
+ }
+
+ ptr_info.align_info = Node.PrefixOp.PtrInfo.Align{
+ .node = expr_node,
+ .bit_range = bit_range,
+ };
+
+ continue;
+ }
+ if (p.eatToken(.Keyword_const)) |const_token| {
+ if (ptr_info.const_token != null) {
+ try p.errors.append(p.gpa, .{
+ .ExtraConstQualifier = .{ .token = p.tok_i - 1 },
+ });
+ continue;
+ }
+ ptr_info.const_token = const_token;
+ continue;
+ }
+ if (p.eatToken(.Keyword_volatile)) |volatile_token| {
+ if (ptr_info.volatile_token != null) {
+ try p.errors.append(p.gpa, .{
+ .ExtraVolatileQualifier = .{ .token = p.tok_i - 1 },
+ });
+ continue;
+ }
+ ptr_info.volatile_token = volatile_token;
+ continue;
+ }
+ if (p.eatToken(.Keyword_allowzero)) |allowzero_token| {
+ if (ptr_info.allowzero_token != null) {
+ try p.errors.append(p.gpa, .{
+ .ExtraAllowZeroQualifier = .{ .token = p.tok_i - 1 },
+ });
+ continue;
+ }
+ ptr_info.allowzero_token = allowzero_token;
+ continue;
+ }
+ break;
+ }
+
+ return node;
+ }
+
+ if (try p.parseArrayTypeStart()) |node| {
+ switch (node.cast(Node.PrefixOp).?.op) {
+ .ArrayType => {},
+ .SliceType => |*slice_type| {
+ // Collect pointer qualifiers in any order, but disallow duplicates
+ while (true) {
+ if (try p.parseByteAlign()) |align_expr| {
+ if (slice_type.align_info != null) {
+ try p.errors.append(p.gpa, .{
+ .ExtraAlignQualifier = .{ .token = p.tok_i - 1 },
+ });
+ continue;
+ }
+ slice_type.align_info = Node.PrefixOp.PtrInfo.Align{
+ .node = align_expr,
+ .bit_range = null,
+ };
+ continue;
+ }
+ if (p.eatToken(.Keyword_const)) |const_token| {
+ if (slice_type.const_token != null) {
+ try p.errors.append(p.gpa, .{
+ .ExtraConstQualifier = .{ .token = p.tok_i - 1 },
+ });
+ continue;
+ }
+ slice_type.const_token = const_token;
+ continue;
+ }
+ if (p.eatToken(.Keyword_volatile)) |volatile_token| {
+ if (slice_type.volatile_token != null) {
+ try p.errors.append(p.gpa, .{
+ .ExtraVolatileQualifier = .{ .token = p.tok_i - 1 },
+ });
+ continue;
+ }
+ slice_type.volatile_token = volatile_token;
+ continue;
+ }
+ if (p.eatToken(.Keyword_allowzero)) |allowzero_token| {
+ if (slice_type.allowzero_token != null) {
+ try p.errors.append(p.gpa, .{
+ .ExtraAllowZeroQualifier = .{ .token = p.tok_i - 1 },
+ });
+ continue;
+ }
+ slice_type.allowzero_token = allowzero_token;
+ continue;
+ }
+ break;
+ }
},
else => unreachable,
}
+ return node;
}
- // If any prefix op existed, a child node on the RHS is required
- switch (rightmost_op.id) {
- .PrefixOp => {
- const prefix_op = rightmost_op.cast(Node.PrefixOp).?;
- prefix_op.rhs = try expectNode(arena, it, tree, childParseFn, .{
- .InvalidToken = .{ .token = it.index },
- });
- },
- .AnyFrameType => {
- const prom = rightmost_op.cast(Node.AnyFrameType).?;
- prom.result.?.return_type = try expectNode(arena, it, tree, childParseFn, .{
- .InvalidToken = .{ .token = it.index },
- });
- },
- else => unreachable,
- }
-
- return first_op;
- }
-
- // Otherwise, the child node is optional
- return try childParseFn(arena, it, tree);
-}
-
-/// Child (Op Child)*
-/// Child (Op Child)?
-fn parseBinOpExpr(
- arena: *Allocator,
- it: *TokenIterator,
- tree: *Tree,
- opParseFn: NodeParseFn,
- childParseFn: NodeParseFn,
- chain: enum {
- Once,
- Infinitely,
- },
-) Error!?*Node {
- var res = (try childParseFn(arena, it, tree)) orelse return null;
-
- while (try opParseFn(arena, it, tree)) |node| {
- const right = try expectNode(arena, it, tree, childParseFn, .{
- .InvalidToken = .{ .token = it.index },
- });
- const left = res;
- res = node;
-
- const op = node.cast(Node.InfixOp).?;
- op.*.lhs = left;
- op.*.rhs = right;
-
- switch (chain) {
- .Once => break,
- .Infinitely => continue,
- }
- }
-
- return res;
-}
-
-fn createInfixOp(arena: *Allocator, index: TokenIndex, op: Node.InfixOp.Op) !*Node {
- const node = try arena.create(Node.InfixOp);
- node.* = .{
- .op_token = index,
- .lhs = undefined, // set by caller
- .op = op,
- .rhs = undefined, // set by caller
- };
- return &node.base;
-}
-
-fn eatToken(it: *TokenIterator, id: Token.Id) ?TokenIndex {
- return if (eatAnnotatedToken(it, id)) |token| token.index else null;
-}
-
-fn eatAnnotatedToken(it: *TokenIterator, id: Token.Id) ?AnnotatedToken {
- return if (it.peek().?.id == id) nextToken(it) else null;
-}
-
-fn expectToken(it: *TokenIterator, tree: *Tree, id: Token.Id) Error!TokenIndex {
- return (try expectTokenRecoverable(it, tree, id)) orelse
- error.ParseError;
-}
-
-fn expectTokenRecoverable(it: *TokenIterator, tree: *Tree, id: Token.Id) !?TokenIndex {
- const token = nextToken(it);
- if (token.ptr.id != id) {
- try tree.errors.push(.{
- .ExpectedToken = .{ .token = token.index, .expected_id = id },
- });
- // go back so that we can recover properly
- putBackToken(it, token.index);
return null;
}
- return token.index;
-}
-fn nextToken(it: *TokenIterator) AnnotatedToken {
- const result = AnnotatedToken{
- .index = it.index,
- .ptr = it.next().?,
+ /// SuffixOp
+ /// <- LBRACKET Expr (DOT2 (Expr (COLON Expr)?)?)? RBRACKET
+ /// / DOT IDENTIFIER
+ /// / DOTASTERISK
+ /// / DOTQUESTIONMARK
+ fn parseSuffixOp(p: *Parser) !?*Node {
+ const OpAndToken = struct {
+ op: Node.SuffixOp.Op,
+ token: TokenIndex,
+ };
+ const op_and_token: OpAndToken = blk: {
+ if (p.eatToken(.LBracket)) |_| {
+ const index_expr = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+
+ if (p.eatToken(.Ellipsis2) != null) {
+ const end_expr = try p.parseExpr();
+ const sentinel: ?*Node = if (p.eatToken(.Colon) != null)
+ try p.parseExpr()
+ else
+ null;
+ break :blk .{
+ .op = .{
+ .Slice = .{
+ .start = index_expr,
+ .end = end_expr,
+ .sentinel = sentinel,
+ },
+ },
+ .token = try p.expectToken(.RBracket),
+ };
+ }
+
+ break :blk .{
+ .op = .{ .ArrayAccess = index_expr },
+ .token = try p.expectToken(.RBracket),
+ };
+ }
+
+ if (p.eatToken(.PeriodAsterisk)) |period_asterisk| {
+ break :blk .{ .op = .Deref, .token = period_asterisk };
+ }
+
+ if (p.eatToken(.Period)) |period| {
+ if (try p.parseIdentifier()) |identifier| {
+ // TODO: It's a bit weird to return an InfixOp from the SuffixOp parser.
+ // Should there be an Node.SuffixOp.FieldAccess variant? Or should
+ // this grammar rule be altered?
+ const node = try p.arena.allocator.create(Node.InfixOp);
+ node.* = .{
+ .op_token = period,
+ .lhs = undefined, // set by caller
+ .op = .Period,
+ .rhs = identifier,
+ };
+ return &node.base;
+ }
+ if (p.eatToken(.QuestionMark)) |question_mark| {
+ break :blk .{ .op = .UnwrapOptional, .token = question_mark };
+ }
+ try p.errors.append(p.gpa, .{
+ .ExpectedSuffixOp = .{ .token = p.tok_i },
+ });
+ return null;
+ }
+
+ return null;
+ };
+
+ const node = try p.arena.allocator.create(Node.SuffixOp);
+ node.* = .{
+ .lhs = undefined, // set by caller
+ .op = op_and_token.op,
+ .rtoken = op_and_token.token,
+ };
+ return &node.base;
+ }
+
+ /// FnCallArguments <- LPAREN ExprList RPAREN
+ /// ExprList <- (Expr COMMA)* Expr?
+ fn parseFnCallArguments(p: *Parser) !?AnnotatedParamList {
+ if (p.eatToken(.LParen) == null) return null;
+ const list = try ListParseFn(*Node, parseExpr)(p);
+ errdefer p.gpa.free(list);
+ const rparen = try p.expectToken(.RParen);
+ return AnnotatedParamList{ .list = list, .rparen = rparen };
+ }
+
+ const AnnotatedParamList = struct {
+ list: []*Node,
+ rparen: TokenIndex,
};
- assert(result.ptr.id != .LineComment);
- while (true) {
- const next_tok = it.peek() orelse return result;
- if (next_tok.id != .LineComment) return result;
- _ = it.next();
+ /// ArrayTypeStart <- LBRACKET Expr? RBRACKET
+ fn parseArrayTypeStart(p: *Parser) !?*Node {
+ const lbracket = p.eatToken(.LBracket) orelse return null;
+ const expr = try p.parseExpr();
+ const sentinel = if (p.eatToken(.Colon)) |_|
+ try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ })
+ else
+ null;
+ const rbracket = try p.expectToken(.RBracket);
+
+ const op: Node.PrefixOp.Op = if (expr) |len_expr|
+ .{
+ .ArrayType = .{
+ .len_expr = len_expr,
+ .sentinel = sentinel,
+ },
+ }
+ else
+ .{
+ .SliceType = Node.PrefixOp.PtrInfo{
+ .allowzero_token = null,
+ .align_info = null,
+ .const_token = null,
+ .volatile_token = null,
+ .sentinel = sentinel,
+ },
+ };
+
+ const node = try p.arena.allocator.create(Node.PrefixOp);
+ node.* = .{
+ .op_token = lbracket,
+ .op = op,
+ .rhs = undefined, // set by caller
+ };
+ return &node.base;
}
-}
-fn putBackToken(it: *TokenIterator, putting_back: TokenIndex) void {
- while (true) {
- const prev_tok = it.prev() orelse return;
- if (prev_tok.id == .LineComment) continue;
- assert(it.list.at(putting_back) == prev_tok);
- return;
+ /// PtrTypeStart
+ /// <- ASTERISK
+ /// / ASTERISK2
+ /// / PTRUNKNOWN
+ /// / PTRC
+ fn parsePtrTypeStart(p: *Parser) !?*Node {
+ if (p.eatToken(.Asterisk)) |asterisk| {
+ const sentinel = if (p.eatToken(.Colon)) |_|
+ try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ })
+ else
+ null;
+ const node = try p.arena.allocator.create(Node.PrefixOp);
+ node.* = .{
+ .op_token = asterisk,
+ .op = .{ .PtrType = .{ .sentinel = sentinel } },
+ .rhs = undefined, // set by caller
+ };
+ return &node.base;
+ }
+
+ if (p.eatToken(.AsteriskAsterisk)) |double_asterisk| {
+ const node = try p.arena.allocator.create(Node.PrefixOp);
+ node.* = .{
+ .op_token = double_asterisk,
+ .op = .{ .PtrType = .{} },
+ .rhs = undefined, // set by caller
+ };
+
+ // Special case for **, which is its own token
+ const child = try p.arena.allocator.create(Node.PrefixOp);
+ child.* = .{
+ .op_token = double_asterisk,
+ .op = .{ .PtrType = .{} },
+ .rhs = undefined, // set by caller
+ };
+ node.rhs = &child.base;
+
+ return &node.base;
+ }
+ if (p.eatToken(.LBracket)) |lbracket| {
+ const asterisk = p.eatToken(.Asterisk) orelse {
+ p.putBackToken(lbracket);
+ return null;
+ };
+ if (p.eatToken(.Identifier)) |ident| {
+ const token_loc = p.token_locs[ident];
+ const token_slice = p.source[token_loc.start..token_loc.end];
+ if (!std.mem.eql(u8, token_slice, "c")) {
+ p.putBackToken(ident);
+ } else {
+ _ = try p.expectToken(.RBracket);
+ const node = try p.arena.allocator.create(Node.PrefixOp);
+ node.* = .{
+ .op_token = lbracket,
+ .op = .{ .PtrType = .{} },
+ .rhs = undefined, // set by caller
+ };
+ return &node.base;
+ }
+ }
+ const sentinel = if (p.eatToken(.Colon)) |_|
+ try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ })
+ else
+ null;
+ _ = try p.expectToken(.RBracket);
+ const node = try p.arena.allocator.create(Node.PrefixOp);
+ node.* = .{
+ .op_token = lbracket,
+ .op = .{ .PtrType = .{ .sentinel = sentinel } },
+ .rhs = undefined, // set by caller
+ };
+ return &node.base;
+ }
+ return null;
}
-}
-const AnnotatedToken = struct {
- index: TokenIndex,
- ptr: *Token,
+ /// ContainerDeclAuto <- ContainerDeclType LBRACE ContainerMembers RBRACE
+ fn parseContainerDeclAuto(p: *Parser) !?*Node {
+ const container_decl_type = (try p.parseContainerDeclType()) orelse return null;
+ const lbrace = try p.expectToken(.LBrace);
+ const members = try p.parseContainerMembers(false);
+ defer p.gpa.free(members);
+ const rbrace = try p.expectToken(.RBrace);
+
+ const members_len = @intCast(NodeIndex, members.len);
+ const node = try Node.ContainerDecl.alloc(&p.arena.allocator, members_len);
+ node.* = .{
+ .layout_token = null,
+ .kind_token = container_decl_type.kind_token,
+ .init_arg_expr = container_decl_type.init_arg_expr,
+ .fields_and_decls_len = members_len,
+ .lbrace_token = lbrace,
+ .rbrace_token = rbrace,
+ };
+ std.mem.copy(*Node, node.fieldsAndDecls(), members);
+ return &node.base;
+ }
+
+ /// Holds temporary data until we are ready to construct the full ContainerDecl AST node.
+ const ContainerDeclType = struct {
+ kind_token: TokenIndex,
+ init_arg_expr: Node.ContainerDecl.InitArg,
+ };
+
+ /// ContainerDeclType
+ /// <- KEYWORD_struct
+ /// / KEYWORD_enum (LPAREN Expr RPAREN)?
+ /// / KEYWORD_union (LPAREN (KEYWORD_enum (LPAREN Expr RPAREN)? / Expr) RPAREN)?
+ fn parseContainerDeclType(p: *Parser) !?ContainerDeclType {
+ const kind_token = p.nextToken();
+
+ const init_arg_expr = switch (p.token_ids[kind_token]) {
+ .Keyword_struct => Node.ContainerDecl.InitArg{ .None = {} },
+ .Keyword_enum => blk: {
+ if (p.eatToken(.LParen) != null) {
+ const expr = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+ _ = try p.expectToken(.RParen);
+ break :blk Node.ContainerDecl.InitArg{ .Type = expr };
+ }
+ break :blk Node.ContainerDecl.InitArg{ .None = {} };
+ },
+ .Keyword_union => blk: {
+ if (p.eatToken(.LParen) != null) {
+ if (p.eatToken(.Keyword_enum) != null) {
+ if (p.eatToken(.LParen) != null) {
+ const expr = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+ _ = try p.expectToken(.RParen);
+ _ = try p.expectToken(.RParen);
+ break :blk Node.ContainerDecl.InitArg{ .Enum = expr };
+ }
+ _ = try p.expectToken(.RParen);
+ break :blk Node.ContainerDecl.InitArg{ .Enum = null };
+ }
+ const expr = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+ _ = try p.expectToken(.RParen);
+ break :blk Node.ContainerDecl.InitArg{ .Type = expr };
+ }
+ break :blk Node.ContainerDecl.InitArg{ .None = {} };
+ },
+ else => {
+ p.putBackToken(kind_token);
+ return null;
+ },
+ };
+
+ return ContainerDeclType{
+ .kind_token = kind_token,
+ .init_arg_expr = init_arg_expr,
+ };
+ }
+
+ /// ByteAlign <- KEYWORD_align LPAREN Expr RPAREN
+ fn parseByteAlign(p: *Parser) !?*Node {
+ _ = p.eatToken(.Keyword_align) orelse return null;
+ _ = try p.expectToken(.LParen);
+ const expr = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ });
+ _ = try p.expectToken(.RParen);
+ return expr;
+ }
+
+ /// IdentifierList <- (IDENTIFIER COMMA)* IDENTIFIER?
+ /// Only ErrorSetDecl parses an IdentifierList
+ fn parseErrorTagList(p: *Parser) ![]*Node {
+ return ListParseFn(*Node, parseErrorTag)(p);
+ }
+
+ /// SwitchProngList <- (SwitchProng COMMA)* SwitchProng?
+ fn parseSwitchProngList(p: *Parser) ![]*Node {
+ return ListParseFn(*Node, parseSwitchProng)(p);
+ }
+
+ /// AsmOutputList <- (AsmOutputItem COMMA)* AsmOutputItem?
+ fn parseAsmOutputList(p: *Parser) Error![]Node.Asm.Output {
+ return ListParseFn(Node.Asm.Output, parseAsmOutputItem)(p);
+ }
+
+ /// AsmInputList <- (AsmInputItem COMMA)* AsmInputItem?
+ fn parseAsmInputList(p: *Parser) Error![]Node.Asm.Input {
+ return ListParseFn(Node.Asm.Input, parseAsmInputItem)(p);
+ }
+
+ /// ParamDeclList <- (ParamDecl COMMA)* ParamDecl?
+ fn parseParamDeclList(p: *Parser) ![]Node.FnProto.ParamDecl {
+ return ListParseFn(Node.FnProto.ParamDecl, parseParamDecl)(p);
+ }
+
+ const NodeParseFn = fn (p: *Parser) Error!?*Node;
+
+ fn ListParseFn(comptime E: type, comptime nodeParseFn: var) ParseFn([]E) {
+ return struct {
+ pub fn parse(p: *Parser) ![]E {
+ var list = std.ArrayList(E).init(p.gpa);
+ defer list.deinit();
+
+ while (try nodeParseFn(p)) |item| {
+ try list.append(item);
+
+ switch (p.token_ids[p.tok_i]) {
+ .Comma => _ = p.nextToken(),
+ // all possible delimiters
+ .Colon, .RParen, .RBrace, .RBracket => break,
+ else => {
+ // this is likely just a missing comma,
+ // continue parsing this list and give an error
+ try p.errors.append(p.gpa, .{
+ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
+ });
+ },
+ }
+ }
+ return list.toOwnedSlice();
+ }
+ }.parse;
+ }
+
+ fn SimpleBinOpParseFn(comptime token: Token.Id, comptime op: Node.InfixOp.Op) NodeParseFn {
+ return struct {
+ pub fn parse(p: *Parser) Error!?*Node {
+ const op_token = if (token == .Keyword_and) switch (p.token_ids[p.tok_i]) {
+ .Keyword_and => p.nextToken(),
+ .Invalid_ampersands => blk: {
+ try p.errors.append(p.gpa, .{
+ .InvalidAnd = .{ .token = p.tok_i },
+ });
+ break :blk p.nextToken();
+ },
+ else => return null,
+ } else p.eatToken(token) orelse return null;
+
+ const node = try p.arena.allocator.create(Node.InfixOp);
+ node.* = .{
+ .op_token = op_token,
+ .lhs = undefined, // set by caller
+ .op = op,
+ .rhs = undefined, // set by caller
+ };
+ return &node.base;
+ }
+ }.parse;
+ }
+
+ // Helper parsers not included in the grammar
+
+ fn parseBuiltinCall(p: *Parser) !?*Node {
+ const token = p.eatToken(.Builtin) orelse return null;
+ const params = (try p.parseFnCallArguments()) orelse {
+ try p.errors.append(p.gpa, .{
+ .ExpectedParamList = .{ .token = p.tok_i },
+ });
+
+ // lets pretend this was an identifier so we can continue parsing
+ const node = try p.arena.allocator.create(Node.Identifier);
+ node.* = .{
+ .token = token,
+ };
+ return &node.base;
+ };
+ defer p.gpa.free(params.list);
+
+ const node = try Node.BuiltinCall.alloc(&p.arena.allocator, params.list.len);
+ node.* = .{
+ .builtin_token = token,
+ .params_len = params.list.len,
+ .rparen_token = params.rparen,
+ };
+ std.mem.copy(*Node, node.params(), params.list);
+ return &node.base;
+ }
+
+ fn parseErrorTag(p: *Parser) !?*Node {
+ const doc_comments = try p.parseDocComment(); // no need to rewind on failure
+ const token = p.eatToken(.Identifier) orelse return null;
+
+ const node = try p.arena.allocator.create(Node.ErrorTag);
+ node.* = .{
+ .doc_comments = doc_comments,
+ .name_token = token,
+ };
+ return &node.base;
+ }
+
+ fn parseIdentifier(p: *Parser) !?*Node {
+ const token = p.eatToken(.Identifier) orelse return null;
+ const node = try p.arena.allocator.create(Node.Identifier);
+ node.* = .{
+ .token = token,
+ };
+ return &node.base;
+ }
+
+ fn parseVarType(p: *Parser) !?*Node {
+ const token = p.eatToken(.Keyword_var) orelse return null;
+ const node = try p.arena.allocator.create(Node.VarType);
+ node.* = .{
+ .token = token,
+ };
+ return &node.base;
+ }
+
+ fn createLiteral(p: *Parser, comptime T: type, token: TokenIndex) !*Node {
+ const result = try p.arena.allocator.create(T);
+ result.* = T{
+ .base = Node{ .id = Node.typeToId(T) },
+ .token = token,
+ };
+ return &result.base;
+ }
+
+ fn parseStringLiteralSingle(p: *Parser) !?*Node {
+ if (p.eatToken(.StringLiteral)) |token| {
+ const node = try p.arena.allocator.create(Node.StringLiteral);
+ node.* = .{
+ .token = token,
+ };
+ return &node.base;
+ }
+ return null;
+ }
+
+ // string literal or multiline string literal
+ fn parseStringLiteral(p: *Parser) !?*Node {
+ if (try p.parseStringLiteralSingle()) |node| return node;
+
+ if (p.eatToken(.MultilineStringLiteralLine)) |first_line| {
+ const start_tok_i = p.tok_i;
+ var tok_i = start_tok_i;
+ var count: usize = 1; // including first_line
+ while (true) : (tok_i += 1) {
+ switch (p.token_ids[tok_i]) {
+ .LineComment => continue,
+ .MultilineStringLiteralLine => count += 1,
+ else => break,
+ }
+ }
+
+ const node = try Node.MultilineStringLiteral.alloc(&p.arena.allocator, count);
+ node.* = .{ .lines_len = count };
+ const lines = node.lines();
+ tok_i = start_tok_i;
+ lines[0] = first_line;
+ count = 1;
+ while (true) : (tok_i += 1) {
+ switch (p.token_ids[tok_i]) {
+ .LineComment => continue,
+ .MultilineStringLiteralLine => {
+ lines[count] = tok_i;
+ count += 1;
+ },
+ else => break,
+ }
+ }
+ p.tok_i = tok_i;
+ return &node.base;
+ }
+
+ return null;
+ }
+
+ fn parseIntegerLiteral(p: *Parser) !?*Node {
+ const token = p.eatToken(.IntegerLiteral) orelse return null;
+ const node = try p.arena.allocator.create(Node.IntegerLiteral);
+ node.* = .{
+ .token = token,
+ };
+ return &node.base;
+ }
+
+ fn parseFloatLiteral(p: *Parser) !?*Node {
+ const token = p.eatToken(.FloatLiteral) orelse return null;
+ const node = try p.arena.allocator.create(Node.FloatLiteral);
+ node.* = .{
+ .token = token,
+ };
+ return &node.base;
+ }
+
+ fn parseTry(p: *Parser) !?*Node {
+ const token = p.eatToken(.Keyword_try) orelse return null;
+ const node = try p.arena.allocator.create(Node.PrefixOp);
+ node.* = .{
+ .op_token = token,
+ .op = .Try,
+ .rhs = undefined, // set by caller
+ };
+ return &node.base;
+ }
+
+ fn parseUse(p: *Parser) !?*Node {
+ const token = p.eatToken(.Keyword_usingnamespace) orelse return null;
+ const node = try p.arena.allocator.create(Node.Use);
+ node.* = .{
+ .doc_comments = null,
+ .visib_token = null,
+ .use_token = token,
+ .expr = try p.expectNode(parseExpr, .{
+ .ExpectedExpr = .{ .token = p.tok_i },
+ }),
+ .semicolon_token = try p.expectToken(.Semicolon),
+ };
+ return &node.base;
+ }
+
+ /// IfPrefix Body (KEYWORD_else Payload? Body)?
+ fn parseIf(p: *Parser, bodyParseFn: NodeParseFn) !?*Node {
+ const node = (try p.parseIfPrefix()) orelse return null;
+ const if_prefix = node.cast(Node.If).?;
+
+ if_prefix.body = try p.expectNode(bodyParseFn, .{
+ .InvalidToken = .{ .token = p.tok_i },
+ });
+
+ const else_token = p.eatToken(.Keyword_else) orelse return node;
+ const payload = try p.parsePayload();
+ const else_expr = try p.expectNode(bodyParseFn, .{
+ .InvalidToken = .{ .token = p.tok_i },
+ });
+ const else_node = try p.arena.allocator.create(Node.Else);
+ else_node.* = .{
+ .else_token = else_token,
+ .payload = payload,
+ .body = else_expr,
+ };
+ if_prefix.@"else" = else_node;
+
+ return node;
+ }
+
+ /// Eat a multiline doc comment
+ fn parseDocComment(p: *Parser) !?*Node.DocComment {
+ if (p.eatToken(.DocComment)) |first_line| {
+ while (p.eatToken(.DocComment)) |_| {}
+ const node = try p.arena.allocator.create(Node.DocComment);
+ node.* = .{ .first_line = first_line };
+ return node;
+ }
+ return null;
+ }
+
+ fn tokensOnSameLine(p: *Parser, token1: TokenIndex, token2: TokenIndex) bool {
+ return std.mem.indexOfScalar(u8, p.source[p.token_locs[token1].end..p.token_locs[token2].start], '\n') == null;
+ }
+
+ /// Eat a single-line doc comment on the same line as another node
+ fn parseAppendedDocComment(p: *Parser, after_token: TokenIndex) !?*Node.DocComment {
+ const comment_token = p.eatToken(.DocComment) orelse return null;
+ if (p.tokensOnSameLine(after_token, comment_token)) {
+ const node = try p.arena.allocator.create(Node.DocComment);
+ node.* = .{ .first_line = comment_token };
+ return node;
+ }
+ p.putBackToken(comment_token);
+ return null;
+ }
+
+ /// Op* Child
+ fn parsePrefixOpExpr(p: *Parser, opParseFn: NodeParseFn, childParseFn: NodeParseFn) Error!?*Node {
+ if (try opParseFn(p)) |first_op| {
+ var rightmost_op = first_op;
+ while (true) {
+ switch (rightmost_op.id) {
+ .PrefixOp => {
+ var prefix_op = rightmost_op.cast(Node.PrefixOp).?;
+ // If the token encountered was **, there will be two nodes
+ if (p.token_ids[prefix_op.op_token] == .AsteriskAsterisk) {
+ rightmost_op = prefix_op.rhs;
+ prefix_op = rightmost_op.cast(Node.PrefixOp).?;
+ }
+ if (try opParseFn(p)) |rhs| {
+ prefix_op.rhs = rhs;
+ rightmost_op = rhs;
+ } else break;
+ },
+ .AnyFrameType => {
+ const prom = rightmost_op.cast(Node.AnyFrameType).?;
+ if (try opParseFn(p)) |rhs| {
+ prom.result.?.return_type = rhs;
+ rightmost_op = rhs;
+ } else break;
+ },
+ else => unreachable,
+ }
+ }
+
+ // If any prefix op existed, a child node on the RHS is required
+ switch (rightmost_op.id) {
+ .PrefixOp => {
+ const prefix_op = rightmost_op.cast(Node.PrefixOp).?;
+ prefix_op.rhs = try p.expectNode(childParseFn, .{
+ .InvalidToken = .{ .token = p.tok_i },
+ });
+ },
+ .AnyFrameType => {
+ const prom = rightmost_op.cast(Node.AnyFrameType).?;
+ prom.result.?.return_type = try p.expectNode(childParseFn, .{
+ .InvalidToken = .{ .token = p.tok_i },
+ });
+ },
+ else => unreachable,
+ }
+
+ return first_op;
+ }
+
+ // Otherwise, the child node is optional
+ return childParseFn(p);
+ }
+
+ /// Child (Op Child)*
+ /// Child (Op Child)?
+ fn parseBinOpExpr(
+ p: *Parser,
+ opParseFn: NodeParseFn,
+ childParseFn: NodeParseFn,
+ chain: enum {
+ Once,
+ Infinitely,
+ },
+ ) Error!?*Node {
+ var res = (try childParseFn(p)) orelse return null;
+
+ while (try opParseFn(p)) |node| {
+ const right = try p.expectNode(childParseFn, .{
+ .InvalidToken = .{ .token = p.tok_i },
+ });
+ const left = res;
+ res = node;
+
+ const op = node.cast(Node.InfixOp).?;
+ op.*.lhs = left;
+ op.*.rhs = right;
+
+ switch (chain) {
+ .Once => break,
+ .Infinitely => continue,
+ }
+ }
+
+ return res;
+ }
+
+ fn createInfixOp(p: *Parser, index: TokenIndex, op: Node.InfixOp.Op) !*Node {
+ const node = try p.arena.allocator.create(Node.InfixOp);
+ node.* = .{
+ .op_token = index,
+ .lhs = undefined, // set by caller
+ .op = op,
+ .rhs = undefined, // set by caller
+ };
+ return &node.base;
+ }
+
+ fn eatToken(p: *Parser, id: Token.Id) ?TokenIndex {
+ return if (p.token_ids[p.tok_i] == id) p.nextToken() else null;
+ }
+
+ fn expectToken(p: *Parser, id: Token.Id) Error!TokenIndex {
+ return (try p.expectTokenRecoverable(id)) orelse error.ParseError;
+ }
+
+ fn expectTokenRecoverable(p: *Parser, id: Token.Id) !?TokenIndex {
+ const token = p.nextToken();
+ if (p.token_ids[token] != id) {
+ try p.errors.append(p.gpa, .{
+ .ExpectedToken = .{ .token = token, .expected_id = id },
+ });
+ // go back so that we can recover properly
+ p.putBackToken(token);
+ return null;
+ }
+ return token;
+ }
+
+ fn nextToken(p: *Parser) TokenIndex {
+ const result = p.tok_i;
+ p.tok_i += 1;
+ assert(p.token_ids[result] != .LineComment);
+ if (p.tok_i >= p.token_ids.len) return result;
+
+ while (true) {
+ if (p.token_ids[p.tok_i] != .LineComment) return result;
+ p.tok_i += 1;
+ }
+ }
+
+ fn putBackToken(p: *Parser, putting_back: TokenIndex) void {
+ while (p.tok_i > 0) {
+ p.tok_i -= 1;
+ if (p.token_ids[p.tok_i] == .LineComment) continue;
+ assert(putting_back == p.tok_i);
+ return;
+ }
+ }
+
+ fn expectNode(
+ p: *Parser,
+ parseFn: NodeParseFn,
+ /// if parsing fails
+ err: AstError,
+ ) Error!*Node {
+ return (try p.expectNodeRecoverable(parseFn, err)) orelse return error.ParseError;
+ }
+
+ fn expectNodeRecoverable(
+ p: *Parser,
+ parseFn: NodeParseFn,
+ /// if parsing fails
+ err: AstError,
+ ) !?*Node {
+ return (try parseFn(p)) orelse {
+ try p.errors.append(p.gpa, err);
+ return null;
+ };
+ }
};
-fn expectNode(
- arena: *Allocator,
- it: *TokenIterator,
- tree: *Tree,
- parseFn: NodeParseFn,
- err: AstError, // if parsing fails
-) Error!*Node {
- return (try expectNodeRecoverable(arena, it, tree, parseFn, err)) orelse
- return error.ParseError;
+fn ParseFn(comptime T: type) type {
+ return fn (p: *Parser) Error!T;
}
-fn expectNodeRecoverable(
- arena: *Allocator,
- it: *TokenIterator,
- tree: *Tree,
- parseFn: NodeParseFn,
- err: AstError, // if parsing fails
-) !?*Node {
- return (try parseFn(arena, it, tree)) orelse {
- try tree.errors.push(err);
- return null;
- };
-}
test "std.zig.parser" {
_ = @import("parser_test.zig");
diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig
index 0a3a47cf94..25fc48e46d 100644
--- a/lib/std/zig/parser_test.zig
+++ b/lib/std/zig/parser_test.zig
@@ -3180,9 +3180,8 @@ fn testParse(source: []const u8, allocator: *mem.Allocator, anything_changed: *b
const tree = try std.zig.parse(allocator, source);
defer tree.deinit();
- var error_it = tree.errors.iterator(0);
- while (error_it.next()) |parse_error| {
- const token = tree.tokens.at(parse_error.loc());
+ for (tree.errors) |*parse_error| {
+ const token = tree.token_locs[parse_error.loc()];
const loc = tree.tokenLocation(0, parse_error.loc());
try stderr.print("(memory buffer):{}:{}: error: ", .{ loc.line + 1, loc.column + 1 });
try tree.renderError(parse_error, stderr);
@@ -3271,8 +3270,6 @@ fn testError(source: []const u8, expected_errors: []const Error) !void {
std.testing.expect(tree.errors.len == expected_errors.len);
for (expected_errors) |expected, i| {
- const err = tree.errors.at(i);
-
- std.testing.expect(expected == err.*);
+ std.testing.expect(expected == tree.errors[i]);
}
}
diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig
index 2de5022876..690592bc17 100644
--- a/lib/std/zig/render.zig
+++ b/lib/std/zig/render.zig
@@ -67,24 +67,23 @@ fn renderRoot(
stream: var,
tree: *ast.Tree,
) (@TypeOf(stream).Error || Error)!void {
- var tok_it = tree.tokens.iterator(0);
-
// render all the line comments at the beginning of the file
- while (tok_it.next()) |token| {
- if (token.id != .LineComment) break;
- try stream.print("{}\n", .{mem.trimRight(u8, tree.tokenSlicePtr(token), " ")});
- if (tok_it.peek()) |next_token| {
- const loc = tree.tokenLocationPtr(token.end, next_token);
- if (loc.line >= 2) {
- try stream.writeByte('\n');
- }
+ for (tree.token_ids) |token_id, i| {
+ if (token_id != .LineComment) break;
+ const token_loc = tree.token_locs[i];
+ try stream.print("{}\n", .{mem.trimRight(u8, tree.tokenSliceLoc(token_loc), " ")});
+ const next_token = tree.token_locs[i + 1];
+ const loc = tree.tokenLocationLoc(token_loc.end, next_token);
+ if (loc.line >= 2) {
+ try stream.writeByte('\n');
}
}
var start_col: usize = 0;
- var it = tree.root_node.decls.iterator(0);
+ var decl_i: ast.NodeIndex = 0;
+ const root_decls = tree.root_node.decls();
while (true) {
- var decl = (it.next() orelse return).*;
+ var decl = root_decls[decl_i];
// This loop does the following:
//
@@ -103,8 +102,8 @@ fn renderRoot(
while (token_index != 0) {
token_index -= 1;
- const token = tree.tokens.at(token_index);
- switch (token.id) {
+ const token_id = tree.token_ids[token_index];
+ switch (token_id) {
.LineComment => {},
.DocComment => {
copy_start_token_index = token_index;
@@ -113,12 +112,13 @@ fn renderRoot(
else => break,
}
- if (mem.eql(u8, mem.trim(u8, tree.tokenSlicePtr(token)[2..], " "), "zig fmt: off")) {
+ const token_loc = tree.token_locs[token_index];
+ if (mem.eql(u8, mem.trim(u8, tree.tokenSliceLoc(token_loc)[2..], " "), "zig fmt: off")) {
if (!found_fmt_directive) {
fmt_active = false;
found_fmt_directive = true;
}
- } else if (mem.eql(u8, mem.trim(u8, tree.tokenSlicePtr(token)[2..], " "), "zig fmt: on")) {
+ } else if (mem.eql(u8, mem.trim(u8, tree.tokenSliceLoc(token_loc)[2..], " "), "zig fmt: on")) {
if (!found_fmt_directive) {
fmt_active = true;
found_fmt_directive = true;
@@ -133,25 +133,28 @@ fn renderRoot(
token_index = decl.firstToken();
while (!fmt_active) {
- decl = (it.next() orelse {
+ decl_i += 1;
+ if (decl_i >= root_decls.len) {
// If there's no next reformatted `decl`, just copy the
// remaining input tokens and bail out.
- const start = tree.tokens.at(copy_start_token_index).start;
+ const start = tree.token_locs[copy_start_token_index].start;
try copyFixingWhitespace(stream, tree.source[start..]);
return;
- }).*;
+ }
+ decl = root_decls[decl_i];
var decl_first_token_index = decl.firstToken();
while (token_index < decl_first_token_index) : (token_index += 1) {
- const token = tree.tokens.at(token_index);
- switch (token.id) {
+ const token_id = tree.token_ids[token_index];
+ switch (token_id) {
.LineComment => {},
.Eof => unreachable,
else => continue,
}
- if (mem.eql(u8, mem.trim(u8, tree.tokenSlicePtr(token)[2..], " "), "zig fmt: on")) {
+ const token_loc = tree.token_locs[token_index];
+ if (mem.eql(u8, mem.trim(u8, tree.tokenSliceLoc(token_loc)[2..], " "), "zig fmt: on")) {
fmt_active = true;
- } else if (mem.eql(u8, mem.trim(u8, tree.tokenSlicePtr(token)[2..], " "), "zig fmt: off")) {
+ } else if (mem.eql(u8, mem.trim(u8, tree.tokenSliceLoc(token_loc)[2..], " "), "zig fmt: off")) {
fmt_active = false;
}
}
@@ -163,8 +166,8 @@ fn renderRoot(
token_index = copy_end_token_index;
while (token_index != 0) {
token_index -= 1;
- const token = tree.tokens.at(token_index);
- switch (token.id) {
+ const token_id = tree.token_ids[token_index];
+ switch (token_id) {
.LineComment => {},
.DocComment => {
copy_end_token_index = token_index;
@@ -174,30 +177,33 @@ fn renderRoot(
}
}
- const start = tree.tokens.at(copy_start_token_index).start;
- const end = tree.tokens.at(copy_end_token_index).start;
+ const start = tree.token_locs[copy_start_token_index].start;
+ const end = tree.token_locs[copy_end_token_index].start;
try copyFixingWhitespace(stream, tree.source[start..end]);
}
try renderTopLevelDecl(allocator, stream, tree, 0, &start_col, decl);
- if (it.peek()) |next_decl| {
- try renderExtraNewline(tree, stream, &start_col, next_decl.*);
- }
+ decl_i += 1;
+ if (decl_i >= root_decls.len) return;
+ try renderExtraNewline(tree, stream, &start_col, root_decls[decl_i]);
}
}
fn renderExtraNewline(tree: *ast.Tree, stream: var, start_col: *usize, node: *ast.Node) @TypeOf(stream).Error!void {
- const first_token = node.firstToken();
+ return renderExtraNewlineToken(tree, stream, start_col, node.firstToken());
+}
+
+fn renderExtraNewlineToken(tree: *ast.Tree, stream: var, start_col: *usize, first_token: ast.TokenIndex,) @TypeOf(stream).Error!void {
var prev_token = first_token;
if (prev_token == 0) return;
var newline_threshold: usize = 2;
- while (tree.tokens.at(prev_token - 1).id == .DocComment) {
- if (tree.tokenLocation(tree.tokens.at(prev_token - 1).end, prev_token).line == 1) {
+ while (tree.token_ids[prev_token - 1] == .DocComment) {
+ if (tree.tokenLocation(tree.token_locs[prev_token - 1].end, prev_token).line == 1) {
newline_threshold += 1;
}
prev_token -= 1;
}
- const prev_token_end = tree.tokens.at(prev_token - 1).end;
+ const prev_token_end = tree.token_locs[prev_token - 1].end;
const loc = tree.tokenLocation(prev_token_end, first_token);
if (loc.line >= newline_threshold) {
try stream.writeByte('\n');
@@ -262,7 +268,7 @@ fn renderContainerDecl(allocator: *mem.Allocator, stream: var, tree: *ast.Tree,
const src_has_trailing_comma = blk: {
const maybe_comma = tree.nextToken(field.lastToken());
- break :blk tree.tokens.at(maybe_comma).id == .Comma;
+ break :blk tree.token_ids[maybe_comma] == .Comma;
};
// The trailing comma is emitted at the end, but if it's not present
@@ -324,11 +330,18 @@ fn renderContainerDecl(allocator: *mem.Allocator, stream: var, tree: *ast.Tree,
.DocComment => {
const comment = @fieldParentPtr(ast.Node.DocComment, "base", decl);
- var it = comment.lines.iterator(0);
- while (it.next()) |line_token_index| {
- try renderToken(tree, stream, line_token_index.*, indent, start_col, .Newline);
- if (it.peek()) |_| {
+ const kind = tree.token_ids[comment.first_line];
+ try renderToken(tree, stream, comment.first_line, indent, start_col, .Newline);
+ var tok_i = comment.first_line + 1;
+ while (true) : (tok_i += 1) {
+ const tok_id = tree.token_ids[tok_i];
+ if (tok_id == kind) {
try stream.writeByteNTimes(' ', indent);
+ try renderToken(tree, stream, tok_i, indent, start_col, .Newline);
+ } else if (tok_id == .LineComment) {
+ continue;
+ } else {
+ break;
}
}
},
@@ -358,20 +371,20 @@ fn renderExpression(
try renderToken(tree, stream, tree.nextToken(label), indent, start_col, Space.Space);
}
- if (block.statements.len == 0) {
+ if (block.statements_len == 0) {
try renderToken(tree, stream, block.lbrace, indent + indent_delta, start_col, Space.None);
return renderToken(tree, stream, block.rbrace, indent, start_col, space);
} else {
const block_indent = indent + indent_delta;
try renderToken(tree, stream, block.lbrace, block_indent, start_col, Space.Newline);
- var it = block.statements.iterator(0);
- while (it.next()) |statement| {
+ const block_statements = block.statements();
+ for (block_statements) |statement, i| {
try stream.writeByteNTimes(' ', block_indent);
- try renderStatement(allocator, stream, tree, block_indent, start_col, statement.*);
+ try renderStatement(allocator, stream, tree, block_indent, start_col, statement);
- if (it.peek()) |next_statement| {
- try renderExtraNewline(tree, stream, start_col, next_statement.*);
+ if (i + 1 < block_statements.len) {
+ try renderExtraNewline(tree, stream, start_col, block_statements[i + 1]);
}
}
@@ -426,13 +439,13 @@ fn renderExpression(
try renderExpression(allocator, stream, tree, indent, start_col, infix_op_node.lhs, op_space);
const after_op_space = blk: {
- const loc = tree.tokenLocation(tree.tokens.at(infix_op_node.op_token).end, tree.nextToken(infix_op_node.op_token));
+ const loc = tree.tokenLocation(tree.token_locs[infix_op_node.op_token].end, tree.nextToken(infix_op_node.op_token));
break :blk if (loc.line == 0) op_space else Space.Newline;
};
try renderToken(tree, stream, infix_op_node.op_token, indent, start_col, after_op_space);
if (after_op_space == Space.Newline and
- tree.tokens.at(tree.nextToken(infix_op_node.op_token)).id != .MultilineStringLiteralLine)
+ tree.token_ids[tree.nextToken(infix_op_node.op_token)] != .MultilineStringLiteralLine)
{
try stream.writeByteNTimes(' ', indent + indent_delta);
start_col.* = indent + indent_delta;
@@ -453,10 +466,10 @@ fn renderExpression(
switch (prefix_op_node.op) {
.PtrType => |ptr_info| {
- const op_tok_id = tree.tokens.at(prefix_op_node.op_token).id;
+ const op_tok_id = tree.token_ids[prefix_op_node.op_token];
switch (op_tok_id) {
.Asterisk, .AsteriskAsterisk => try stream.writeByte('*'),
- .LBracket => if (tree.tokens.at(prefix_op_node.op_token + 2).id == .Identifier)
+ .LBracket => if (tree.token_ids[prefix_op_node.op_token + 2] == .Identifier)
try stream.writeAll("[*c")
else
try stream.writeAll("[*"),
@@ -568,8 +581,8 @@ fn renderExpression(
try renderToken(tree, stream, lbracket, indent, start_col, Space.None); // [
- const starts_with_comment = tree.tokens.at(lbracket + 1).id == .LineComment;
- const ends_with_comment = tree.tokens.at(rbracket - 1).id == .LineComment;
+ const starts_with_comment = tree.token_ids[lbracket + 1] == .LineComment;
+ const ends_with_comment = tree.token_ids[rbracket - 1] == .LineComment;
const new_indent = if (ends_with_comment) indent + indent_delta else indent;
const new_space = if (ends_with_comment) Space.Newline else Space.None;
try renderExpression(allocator, stream, tree, new_indent, start_col, array_info.len_expr, new_space);
@@ -610,80 +623,371 @@ fn renderExpression(
return renderExpression(allocator, stream, tree, indent, start_col, prefix_op_node.rhs, space);
},
+ .ArrayInitializer, .ArrayInitializerDot => {
+ var rtoken: ast.TokenIndex = undefined;
+ var exprs: []*ast.Node = undefined;
+ const lhs: union(enum) {dot: ast.TokenIndex, node: *ast.Node } = switch (base.id){
+ .ArrayInitializerDot => blk: {
+ const casted = @fieldParentPtr(ast.Node.ArrayInitializerDot, "base", base);
+ rtoken = casted.rtoken;
+ exprs = casted.list();
+ break :blk .{ .dot = casted.dot };
+ },
+ .ArrayInitializer => blk: {
+ const casted = @fieldParentPtr(ast.Node.ArrayInitializer, "base", base);
+ rtoken = casted.rtoken;
+ exprs = casted.list();
+ break :blk .{ .node = casted.lhs };
+ },
+ else => unreachable,
+ };
+
+ const lbrace = switch (lhs) {
+ .dot => |dot| tree.nextToken(dot),
+ .node => |node| tree.nextToken(node.lastToken()),
+ };
+
+ if (exprs.len == 0) {
+ switch (lhs) {
+ .dot => |dot| try renderToken(tree, stream, dot, indent, start_col, Space.None),
+ .node => |node| try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None),
+ }
+ try renderToken(tree, stream, lbrace, indent, start_col, Space.None);
+ return renderToken(tree, stream, rtoken, indent, start_col, space);
+ }
+
+ if (exprs.len == 1 and tree.token_ids[exprs[0].lastToken() + 1] == .RBrace) {
+ const expr = exprs[0];
+ switch (lhs) {
+ .dot => |dot| try renderToken(tree, stream, dot, indent, start_col, Space.None),
+ .node => |node| try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None),
+ }
+ try renderToken(tree, stream, lbrace, indent, start_col, Space.None);
+ try renderExpression(allocator, stream, tree, indent, start_col, expr, Space.None);
+ return renderToken(tree, stream, rtoken, indent, start_col, space);
+ }
+
+ switch (lhs) {
+ .dot => |dot| try renderToken(tree, stream, dot, indent, start_col, Space.None),
+ .node => |node| try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None),
+ }
+
+ // scan to find row size
+ const maybe_row_size: ?usize = blk: {
+ var count: usize = 1;
+ for (exprs) |expr, i| {
+ if (i + 1 < exprs.len) {
+ const expr_last_token = expr.lastToken() + 1;
+ const loc = tree.tokenLocation(tree.token_locs[expr_last_token].end, exprs[i+1].firstToken());
+ if (loc.line != 0) break :blk count;
+ count += 1;
+ } else {
+ const expr_last_token = expr.lastToken();
+ const loc = tree.tokenLocation(tree.token_locs[expr_last_token].end, rtoken);
+ if (loc.line == 0) {
+ // all on one line
+ const src_has_trailing_comma = trailblk: {
+ const maybe_comma = tree.prevToken(rtoken);
+ break :trailblk tree.token_ids[maybe_comma] == .Comma;
+ };
+ if (src_has_trailing_comma) {
+ break :blk 1; // force row size 1
+ } else {
+ break :blk null; // no newlines
+ }
+ }
+ break :blk count;
+ }
+ }
+ unreachable;
+ };
+
+ if (maybe_row_size) |row_size| {
+ // A place to store the width of each expression and its column's maximum
+ var widths = try allocator.alloc(usize, exprs.len + row_size);
+ defer allocator.free(widths);
+ mem.set(usize, widths, 0);
+
+ var expr_widths = widths[0 .. widths.len - row_size];
+ var column_widths = widths[widths.len - row_size ..];
+
+ // Null stream for counting the printed length of each expression
+ var counting_stream = std.io.countingOutStream(std.io.null_out_stream);
+
+ for (exprs) |expr, i| {
+ counting_stream.bytes_written = 0;
+ var dummy_col: usize = 0;
+ try renderExpression(allocator, counting_stream.outStream(), tree, indent, &dummy_col, expr, Space.None);
+ const width = @intCast(usize, counting_stream.bytes_written);
+ const col = i % row_size;
+ column_widths[col] = std.math.max(column_widths[col], width);
+ expr_widths[i] = width;
+ }
+
+ var new_indent = indent + indent_delta;
+
+ if (tree.token_ids[tree.nextToken(lbrace)] != .MultilineStringLiteralLine) {
+ try renderToken(tree, stream, lbrace, new_indent, start_col, Space.Newline);
+ try stream.writeByteNTimes(' ', new_indent);
+ } else {
+ new_indent -= indent_delta;
+ try renderToken(tree, stream, lbrace, new_indent, start_col, Space.None);
+ }
+
+ var col: usize = 1;
+ for (exprs) |expr, i| {
+ if (i + 1 < exprs.len) {
+ const next_expr = exprs[i + 1];
+ try renderExpression(allocator, stream, tree, new_indent, start_col, expr, Space.None);
+
+ const comma = tree.nextToken(expr.lastToken());
+
+ if (col != row_size) {
+ try renderToken(tree, stream, comma, new_indent, start_col, Space.Space); // ,
+
+ const padding = column_widths[i % row_size] - expr_widths[i];
+ try stream.writeByteNTimes(' ', padding);
+
+ col += 1;
+ continue;
+ }
+ col = 1;
+
+ if (tree.token_ids[tree.nextToken(comma)] != .MultilineStringLiteralLine) {
+ try renderToken(tree, stream, comma, new_indent, start_col, Space.Newline); // ,
+ } else {
+ try renderToken(tree, stream, comma, new_indent, start_col, Space.None); // ,
+ }
+
+ try renderExtraNewline(tree, stream, start_col, next_expr);
+ if (next_expr.id != .MultilineStringLiteral) {
+ try stream.writeByteNTimes(' ', new_indent);
+ }
+ } else {
+ try renderExpression(allocator, stream, tree, new_indent, start_col, expr, Space.Comma); // ,
+ }
+ }
+ if (exprs[exprs.len - 1].id != .MultilineStringLiteral) {
+ try stream.writeByteNTimes(' ', indent);
+ }
+ return renderToken(tree, stream, rtoken, indent, start_col, space);
+ } else {
+ try renderToken(tree, stream, lbrace, indent, start_col, Space.Space);
+ for (exprs) |expr, i| {
+ if (i + 1 < exprs.len) {
+ try renderExpression(allocator, stream, tree, indent, start_col, expr, Space.None);
+ const comma = tree.nextToken(expr.lastToken());
+ try renderToken(tree, stream, comma, indent, start_col, Space.Space); // ,
+ } else {
+ try renderExpression(allocator, stream, tree, indent, start_col, expr, Space.Space);
+ }
+ }
+
+ return renderToken(tree, stream, rtoken, indent, start_col, space);
+ }
+ },
+
+ .StructInitializer, .StructInitializerDot => {
+ var rtoken: ast.TokenIndex = undefined;
+ var field_inits: []*ast.Node = undefined;
+ const lhs: union(enum) {dot: ast.TokenIndex, node: *ast.Node } = switch (base.id){
+ .StructInitializerDot => blk: {
+ const casted = @fieldParentPtr(ast.Node.StructInitializerDot, "base", base);
+ rtoken = casted.rtoken;
+ field_inits = casted.list();
+ break :blk .{ .dot = casted.dot };
+ },
+ .StructInitializer => blk: {
+ const casted = @fieldParentPtr(ast.Node.StructInitializer, "base", base);
+ rtoken = casted.rtoken;
+ field_inits = casted.list();
+ break :blk .{ .node = casted.lhs };
+ },
+ else => unreachable,
+ };
+
+ const lbrace = switch (lhs) {
+ .dot => |dot| tree.nextToken(dot),
+ .node => |node| tree.nextToken(node.lastToken()),
+ };
+
+ if (field_inits.len == 0) {
+ switch (lhs) {
+ .dot => |dot| try renderToken(tree, stream, dot, indent, start_col, Space.None),
+ .node => |node| try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None),
+ }
+ try renderToken(tree, stream, lbrace, indent + indent_delta, start_col, Space.None);
+ return renderToken(tree, stream, rtoken, indent, start_col, space);
+ }
+
+ const src_has_trailing_comma = blk: {
+ const maybe_comma = tree.prevToken(rtoken);
+ break :blk tree.token_ids[maybe_comma] == .Comma;
+ };
+
+ const src_same_line = blk: {
+ const loc = tree.tokenLocation(tree.token_locs[lbrace].end, rtoken);
+ break :blk loc.line == 0;
+ };
+
+ const expr_outputs_one_line = blk: {
+ // render field expressions until a LF is found
+ for (field_inits) |field_init| {
+ var find_stream = FindByteOutStream.init('\n');
+ var dummy_col: usize = 0;
+ try renderExpression(allocator, find_stream.outStream(), tree, 0, &dummy_col, field_init, Space.None);
+ if (find_stream.byte_found) break :blk false;
+ }
+ break :blk true;
+ };
+
+ if (field_inits.len == 1) blk: {
+ const field_init = field_inits[0].cast(ast.Node.FieldInitializer).?;
+
+ switch (field_init.expr.id) {
+ .StructInitializer,
+ .StructInitializerDot,
+ => break :blk,
+
+ else => {},
+ }
+
+ // if the expression outputs to multiline, make this struct multiline
+ if (!expr_outputs_one_line or src_has_trailing_comma) {
+ break :blk;
+ }
+
+ switch (lhs) {
+ .dot => |dot| try renderToken(tree, stream, dot, indent, start_col, Space.None),
+ .node => |node| try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None),
+ }
+ try renderToken(tree, stream, lbrace, indent, start_col, Space.Space);
+ try renderExpression(allocator, stream, tree, indent, start_col, &field_init.base, Space.Space);
+ return renderToken(tree, stream, rtoken, indent, start_col, space);
+ }
+
+ if (!src_has_trailing_comma and src_same_line and expr_outputs_one_line) {
+ // render all on one line, no trailing comma
+ switch (lhs) {
+ .dot => |dot| try renderToken(tree, stream, dot, indent, start_col, Space.None),
+ .node => |node| try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None),
+ }
+ try renderToken(tree, stream, lbrace, indent, start_col, Space.Space);
+
+ for (field_inits) |field_init, i| {
+ if (i + 1 < field_inits.len) {
+ try renderExpression(allocator, stream, tree, indent, start_col, field_init, Space.None);
+
+ const comma = tree.nextToken(field_init.lastToken());
+ try renderToken(tree, stream, comma, indent, start_col, Space.Space);
+ } else {
+ try renderExpression(allocator, stream, tree, indent, start_col, field_init, Space.Space);
+ }
+ }
+
+ return renderToken(tree, stream, rtoken, indent, start_col, space);
+ }
+
+ const new_indent = indent + indent_delta;
+
+ switch (lhs) {
+ .dot => |dot| try renderToken(tree, stream, dot, new_indent, start_col, Space.None),
+ .node => |node| try renderExpression(allocator, stream, tree, new_indent, start_col, node, Space.None),
+ }
+ try renderToken(tree, stream, lbrace, new_indent, start_col, Space.Newline);
+
+ for (field_inits) |field_init, i| {
+ try stream.writeByteNTimes(' ', new_indent);
+
+ if (i + 1 < field_inits.len) {
+ try renderExpression(allocator, stream, tree, new_indent, start_col, field_init, Space.None);
+
+ const comma = tree.nextToken(field_init.lastToken());
+ try renderToken(tree, stream, comma, new_indent, start_col, Space.Newline);
+
+ try renderExtraNewline(tree, stream, start_col, field_inits[i + 1]);
+ } else {
+ try renderExpression(allocator, stream, tree, new_indent, start_col, field_init, Space.Comma);
+ }
+ }
+
+ try stream.writeByteNTimes(' ', indent);
+ return renderToken(tree, stream, rtoken, indent, start_col, space);
+ },
+
+ .Call => {
+ const call = @fieldParentPtr(ast.Node.Call, "base", base);
+ if (call.async_token) |async_token| {
+ try renderToken(tree, stream, async_token, indent, start_col, Space.Space);
+ }
+
+ try renderExpression(allocator, stream, tree, indent, start_col, call.lhs, Space.None);
+
+ const lparen = tree.nextToken(call.lhs.lastToken());
+
+ if (call.params_len == 0) {
+ try renderToken(tree, stream, lparen, indent, start_col, Space.None);
+ return renderToken(tree, stream, call.rtoken, indent, start_col, space);
+ }
+
+ const src_has_trailing_comma = blk: {
+ const maybe_comma = tree.prevToken(call.rtoken);
+ break :blk tree.token_ids[maybe_comma] == .Comma;
+ };
+
+ if (src_has_trailing_comma) {
+ const new_indent = indent + indent_delta;
+ try renderToken(tree, stream, lparen, new_indent, start_col, Space.Newline);
+
+ const params = call.params();
+ for (params) |param_node, i| {
+ const param_node_new_indent = if (param_node.id == .MultilineStringLiteral) blk: {
+ break :blk indent;
+ } else blk: {
+ try stream.writeByteNTimes(' ', new_indent);
+ break :blk new_indent;
+ };
+
+ if (i + 1 < params.len) {
+ try renderExpression(allocator, stream, tree, param_node_new_indent, start_col, param_node, Space.None);
+ const comma = tree.nextToken(param_node.lastToken());
+ try renderToken(tree, stream, comma, new_indent, start_col, Space.Newline); // ,
+ try renderExtraNewline(tree, stream, start_col, params[i + 1]);
+ } else {
+ try renderExpression(allocator, stream, tree, param_node_new_indent, start_col, param_node, Space.Comma);
+ try stream.writeByteNTimes(' ', indent);
+ return renderToken(tree, stream, call.rtoken, indent, start_col, space);
+ }
+ }
+ }
+
+ try renderToken(tree, stream, lparen, indent, start_col, Space.None); // (
+
+ const params = call.params();
+ for (params) |param_node, i| {
+ try renderExpression(allocator, stream, tree, indent, start_col, param_node, Space.None);
+
+ if (i + 1 < params.len) {
+ const comma = tree.nextToken(param_node.lastToken());
+ try renderToken(tree, stream, comma, indent, start_col, Space.Space);
+ }
+ }
+ return renderToken(tree, stream, call.rtoken, indent, start_col, space);
+ },
+
.SuffixOp => {
const suffix_op = @fieldParentPtr(ast.Node.SuffixOp, "base", base);
switch (suffix_op.op) {
- .Call => |*call_info| {
- if (call_info.async_token) |async_token| {
- try renderToken(tree, stream, async_token, indent, start_col, Space.Space);
- }
-
- try renderExpression(allocator, stream, tree, indent, start_col, suffix_op.lhs.node, Space.None);
-
- const lparen = tree.nextToken(suffix_op.lhs.node.lastToken());
-
- if (call_info.params.len == 0) {
- try renderToken(tree, stream, lparen, indent, start_col, Space.None);
- return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space);
- }
-
- const src_has_trailing_comma = blk: {
- const maybe_comma = tree.prevToken(suffix_op.rtoken);
- break :blk tree.tokens.at(maybe_comma).id == .Comma;
- };
-
- if (src_has_trailing_comma) {
- const new_indent = indent + indent_delta;
- try renderToken(tree, stream, lparen, new_indent, start_col, Space.Newline);
-
- var it = call_info.params.iterator(0);
- while (true) {
- const param_node = it.next().?;
-
- const param_node_new_indent = if (param_node.*.id == .MultilineStringLiteral) blk: {
- break :blk indent;
- } else blk: {
- try stream.writeByteNTimes(' ', new_indent);
- break :blk new_indent;
- };
-
- if (it.peek()) |next_node| {
- try renderExpression(allocator, stream, tree, param_node_new_indent, start_col, param_node.*, Space.None);
- const comma = tree.nextToken(param_node.*.lastToken());
- try renderToken(tree, stream, comma, new_indent, start_col, Space.Newline); // ,
- try renderExtraNewline(tree, stream, start_col, next_node.*);
- } else {
- try renderExpression(allocator, stream, tree, param_node_new_indent, start_col, param_node.*, Space.Comma);
- try stream.writeByteNTimes(' ', indent);
- return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space);
- }
- }
- }
-
- try renderToken(tree, stream, lparen, indent, start_col, Space.None); // (
-
- var it = call_info.params.iterator(0);
- while (it.next()) |param_node| {
- try renderExpression(allocator, stream, tree, indent, start_col, param_node.*, Space.None);
-
- if (it.peek() != null) {
- const comma = tree.nextToken(param_node.*.lastToken());
- try renderToken(tree, stream, comma, indent, start_col, Space.Space);
- }
- }
- return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space);
- },
-
.ArrayAccess => |index_expr| {
- const lbracket = tree.nextToken(suffix_op.lhs.node.lastToken());
+ const lbracket = tree.nextToken(suffix_op.lhs.lastToken());
const rbracket = tree.nextToken(index_expr.lastToken());
- try renderExpression(allocator, stream, tree, indent, start_col, suffix_op.lhs.node, Space.None);
+ try renderExpression(allocator, stream, tree, indent, start_col, suffix_op.lhs, Space.None);
try renderToken(tree, stream, lbracket, indent, start_col, Space.None); // [
- const starts_with_comment = tree.tokens.at(lbracket + 1).id == .LineComment;
- const ends_with_comment = tree.tokens.at(rbracket - 1).id == .LineComment;
+ const starts_with_comment = tree.token_ids[lbracket + 1] == .LineComment;
+ const ends_with_comment = tree.token_ids[rbracket - 1] == .LineComment;
const new_indent = if (ends_with_comment) indent + indent_delta else indent;
const new_space = if (ends_with_comment) Space.Newline else Space.None;
try renderExpression(allocator, stream, tree, new_indent, start_col, index_expr, new_space);
@@ -697,18 +1001,18 @@ fn renderExpression(
},
.Deref => {
- try renderExpression(allocator, stream, tree, indent, start_col, suffix_op.lhs.node, Space.None);
+ try renderExpression(allocator, stream, tree, indent, start_col, suffix_op.lhs, Space.None);
return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space); // .*
},
.UnwrapOptional => {
- try renderExpression(allocator, stream, tree, indent, start_col, suffix_op.lhs.node, Space.None);
+ try renderExpression(allocator, stream, tree, indent, start_col, suffix_op.lhs, Space.None);
try renderToken(tree, stream, tree.prevToken(suffix_op.rtoken), indent, start_col, Space.None); // .
return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space); // ?
},
.Slice => |range| {
- try renderExpression(allocator, stream, tree, indent, start_col, suffix_op.lhs.node, Space.None);
+ try renderExpression(allocator, stream, tree, indent, start_col, suffix_op.lhs, Space.None);
const lbracket = tree.prevToken(range.start.firstToken());
const dotdot = tree.nextToken(range.start.lastToken());
@@ -733,269 +1037,6 @@ fn renderExpression(
return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space); // ]
},
- .StructInitializer => |*field_inits| {
- const lbrace = switch (suffix_op.lhs) {
- .dot => |dot| tree.nextToken(dot),
- .node => |node| tree.nextToken(node.lastToken()),
- };
-
- if (field_inits.len == 0) {
- switch (suffix_op.lhs) {
- .dot => |dot| try renderToken(tree, stream, dot, indent, start_col, Space.None),
- .node => |node| try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None),
- }
- try renderToken(tree, stream, lbrace, indent + indent_delta, start_col, Space.None);
- return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space);
- }
-
- const src_has_trailing_comma = blk: {
- const maybe_comma = tree.prevToken(suffix_op.rtoken);
- break :blk tree.tokens.at(maybe_comma).id == .Comma;
- };
-
- const src_same_line = blk: {
- const loc = tree.tokenLocation(tree.tokens.at(lbrace).end, suffix_op.rtoken);
- break :blk loc.line == 0;
- };
-
- const expr_outputs_one_line = blk: {
- // render field expressions until a LF is found
- var it = field_inits.iterator(0);
- while (it.next()) |field_init| {
- var find_stream = FindByteOutStream.init('\n');
- var dummy_col: usize = 0;
- try renderExpression(allocator, find_stream.outStream(), tree, 0, &dummy_col, field_init.*, Space.None);
- if (find_stream.byte_found) break :blk false;
- }
- break :blk true;
- };
-
- if (field_inits.len == 1) blk: {
- const field_init = field_inits.at(0).*.cast(ast.Node.FieldInitializer).?;
-
- if (field_init.expr.cast(ast.Node.SuffixOp)) |nested_suffix_op| {
- if (nested_suffix_op.op == .StructInitializer) {
- break :blk;
- }
- }
-
- // if the expression outputs to multiline, make this struct multiline
- if (!expr_outputs_one_line or src_has_trailing_comma) {
- break :blk;
- }
-
- switch (suffix_op.lhs) {
- .dot => |dot| try renderToken(tree, stream, dot, indent, start_col, Space.None),
- .node => |node| try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None),
- }
- try renderToken(tree, stream, lbrace, indent, start_col, Space.Space);
- try renderExpression(allocator, stream, tree, indent, start_col, &field_init.base, Space.Space);
- return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space);
- }
-
- if (!src_has_trailing_comma and src_same_line and expr_outputs_one_line) {
- // render all on one line, no trailing comma
- switch (suffix_op.lhs) {
- .dot => |dot| try renderToken(tree, stream, dot, indent, start_col, Space.None),
- .node => |node| try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None),
- }
- try renderToken(tree, stream, lbrace, indent, start_col, Space.Space);
-
- var it = field_inits.iterator(0);
- while (it.next()) |field_init| {
- if (it.peek() != null) {
- try renderExpression(allocator, stream, tree, indent, start_col, field_init.*, Space.None);
-
- const comma = tree.nextToken(field_init.*.lastToken());
- try renderToken(tree, stream, comma, indent, start_col, Space.Space);
- } else {
- try renderExpression(allocator, stream, tree, indent, start_col, field_init.*, Space.Space);
- }
- }
-
- return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space);
- }
-
- const new_indent = indent + indent_delta;
-
- switch (suffix_op.lhs) {
- .dot => |dot| try renderToken(tree, stream, dot, new_indent, start_col, Space.None),
- .node => |node| try renderExpression(allocator, stream, tree, new_indent, start_col, node, Space.None),
- }
- try renderToken(tree, stream, lbrace, new_indent, start_col, Space.Newline);
-
- var it = field_inits.iterator(0);
- while (it.next()) |field_init| {
- try stream.writeByteNTimes(' ', new_indent);
-
- if (it.peek()) |next_field_init| {
- try renderExpression(allocator, stream, tree, new_indent, start_col, field_init.*, Space.None);
-
- const comma = tree.nextToken(field_init.*.lastToken());
- try renderToken(tree, stream, comma, new_indent, start_col, Space.Newline);
-
- try renderExtraNewline(tree, stream, start_col, next_field_init.*);
- } else {
- try renderExpression(allocator, stream, tree, new_indent, start_col, field_init.*, Space.Comma);
- }
- }
-
- try stream.writeByteNTimes(' ', indent);
- return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space);
- },
-
- .ArrayInitializer => |*exprs| {
- const lbrace = switch (suffix_op.lhs) {
- .dot => |dot| tree.nextToken(dot),
- .node => |node| tree.nextToken(node.lastToken()),
- };
-
- if (exprs.len == 0) {
- switch (suffix_op.lhs) {
- .dot => |dot| try renderToken(tree, stream, dot, indent, start_col, Space.None),
- .node => |node| try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None),
- }
- try renderToken(tree, stream, lbrace, indent, start_col, Space.None);
- return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space);
- }
- if (exprs.len == 1 and tree.tokens.at(exprs.at(0).*.lastToken() + 1).id == .RBrace) {
- const expr = exprs.at(0).*;
-
- switch (suffix_op.lhs) {
- .dot => |dot| try renderToken(tree, stream, dot, indent, start_col, Space.None),
- .node => |node| try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None),
- }
- try renderToken(tree, stream, lbrace, indent, start_col, Space.None);
- try renderExpression(allocator, stream, tree, indent, start_col, expr, Space.None);
- return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space);
- }
-
- switch (suffix_op.lhs) {
- .dot => |dot| try renderToken(tree, stream, dot, indent, start_col, Space.None),
- .node => |node| try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None),
- }
-
- // scan to find row size
- const maybe_row_size: ?usize = blk: {
- var count: usize = 1;
- var it = exprs.iterator(0);
- while (true) {
- const expr = it.next().?.*;
- if (it.peek()) |next_expr| {
- const expr_last_token = expr.*.lastToken() + 1;
- const loc = tree.tokenLocation(tree.tokens.at(expr_last_token).end, next_expr.*.firstToken());
- if (loc.line != 0) break :blk count;
- count += 1;
- } else {
- const expr_last_token = expr.*.lastToken();
- const loc = tree.tokenLocation(tree.tokens.at(expr_last_token).end, suffix_op.rtoken);
- if (loc.line == 0) {
- // all on one line
- const src_has_trailing_comma = trailblk: {
- const maybe_comma = tree.prevToken(suffix_op.rtoken);
- break :trailblk tree.tokens.at(maybe_comma).id == .Comma;
- };
- if (src_has_trailing_comma) {
- break :blk 1; // force row size 1
- } else {
- break :blk null; // no newlines
- }
- }
- break :blk count;
- }
- }
- };
-
- if (maybe_row_size) |row_size| {
- // A place to store the width of each expression and its column's maximum
- var widths = try allocator.alloc(usize, exprs.len + row_size);
- defer allocator.free(widths);
- mem.set(usize, widths, 0);
-
- var expr_widths = widths[0 .. widths.len - row_size];
- var column_widths = widths[widths.len - row_size ..];
-
- // Null stream for counting the printed length of each expression
- var counting_stream = std.io.countingOutStream(std.io.null_out_stream);
-
- var it = exprs.iterator(0);
- var i: usize = 0;
-
- while (it.next()) |expr| : (i += 1) {
- counting_stream.bytes_written = 0;
- var dummy_col: usize = 0;
- try renderExpression(allocator, counting_stream.outStream(), tree, indent, &dummy_col, expr.*, Space.None);
- const width = @intCast(usize, counting_stream.bytes_written);
- const col = i % row_size;
- column_widths[col] = std.math.max(column_widths[col], width);
- expr_widths[i] = width;
- }
-
- var new_indent = indent + indent_delta;
-
- if (tree.tokens.at(tree.nextToken(lbrace)).id != .MultilineStringLiteralLine) {
- try renderToken(tree, stream, lbrace, new_indent, start_col, Space.Newline);
- try stream.writeByteNTimes(' ', new_indent);
- } else {
- new_indent -= indent_delta;
- try renderToken(tree, stream, lbrace, new_indent, start_col, Space.None);
- }
-
- it.set(0);
- i = 0;
- var col: usize = 1;
- while (it.next()) |expr| : (i += 1) {
- if (it.peek()) |next_expr| {
- try renderExpression(allocator, stream, tree, new_indent, start_col, expr.*, Space.None);
-
- const comma = tree.nextToken(expr.*.lastToken());
-
- if (col != row_size) {
- try renderToken(tree, stream, comma, new_indent, start_col, Space.Space); // ,
-
- const padding = column_widths[i % row_size] - expr_widths[i];
- try stream.writeByteNTimes(' ', padding);
-
- col += 1;
- continue;
- }
- col = 1;
-
- if (tree.tokens.at(tree.nextToken(comma)).id != .MultilineStringLiteralLine) {
- try renderToken(tree, stream, comma, new_indent, start_col, Space.Newline); // ,
- } else {
- try renderToken(tree, stream, comma, new_indent, start_col, Space.None); // ,
- }
-
- try renderExtraNewline(tree, stream, start_col, next_expr.*);
- if (next_expr.*.id != .MultilineStringLiteral) {
- try stream.writeByteNTimes(' ', new_indent);
- }
- } else {
- try renderExpression(allocator, stream, tree, new_indent, start_col, expr.*, Space.Comma); // ,
- }
- }
- const last_node = it.prev().?;
- if (last_node.*.id != .MultilineStringLiteral) {
- try stream.writeByteNTimes(' ', indent);
- }
- return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space);
- } else {
- try renderToken(tree, stream, lbrace, indent, start_col, Space.Space);
- var it = exprs.iterator(0);
- while (it.next()) |expr| {
- if (it.peek()) |next_expr| {
- try renderExpression(allocator, stream, tree, indent, start_col, expr.*, Space.None);
- const comma = tree.nextToken(expr.*.lastToken());
- try renderToken(tree, stream, comma, indent, start_col, Space.Space); // ,
- } else {
- try renderExpression(allocator, stream, tree, indent, start_col, expr.*, Space.Space);
- }
- }
-
- return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space);
- }
- },
}
},
@@ -1179,7 +1220,7 @@ fn renderExpression(
},
}
- if (container_decl.fields_and_decls.len == 0) {
+ if (container_decl.fields_and_decls_len == 0) {
try renderToken(tree, stream, container_decl.lbrace_token, indent + indent_delta, start_col, Space.None); // {
return renderToken(tree, stream, container_decl.rbrace_token, indent, start_col, space); // }
}
@@ -1188,23 +1229,24 @@ fn renderExpression(
var maybe_comma = tree.prevToken(container_decl.lastToken());
// Doc comments for a field may also appear after the comma, eg.
// field_name: T, // comment attached to field_name
- if (tree.tokens.at(maybe_comma).id == .DocComment)
+ if (tree.token_ids[maybe_comma] == .DocComment)
maybe_comma = tree.prevToken(maybe_comma);
- break :blk tree.tokens.at(maybe_comma).id == .Comma;
+ break :blk tree.token_ids[maybe_comma] == .Comma;
};
+ const fields_and_decls = container_decl.fieldsAndDecls();
+
// Check if the first declaration and the { are on the same line
const src_has_newline = !tree.tokensOnSameLine(
container_decl.lbrace_token,
- container_decl.fields_and_decls.at(0).*.firstToken(),
+ fields_and_decls[0].firstToken(),
);
// We can only print all the elements in-line if all the
// declarations inside are fields
const src_has_only_fields = blk: {
- var it = container_decl.fields_and_decls.iterator(0);
- while (it.next()) |decl| {
- if (decl.*.id != .ContainerField) break :blk false;
+ for (fields_and_decls) |decl| {
+ if (decl.id != .ContainerField) break :blk false;
}
break :blk true;
};
@@ -1214,13 +1256,12 @@ fn renderExpression(
const new_indent = indent + indent_delta;
try renderToken(tree, stream, container_decl.lbrace_token, new_indent, start_col, .Newline); // {
- var it = container_decl.fields_and_decls.iterator(0);
- while (it.next()) |decl| {
+ for (fields_and_decls) |decl, i| {
try stream.writeByteNTimes(' ', new_indent);
- try renderContainerDecl(allocator, stream, tree, new_indent, start_col, decl.*, .Newline);
+ try renderContainerDecl(allocator, stream, tree, new_indent, start_col, decl, .Newline);
- if (it.peek()) |next_decl| {
- try renderExtraNewline(tree, stream, start_col, next_decl.*);
+ if (i + 1 < fields_and_decls.len) {
+ try renderExtraNewline(tree, stream, start_col, fields_and_decls[i + 1]);
}
}
@@ -1233,10 +1274,9 @@ fn renderExpression(
const new_indent = indent + indent_delta;
try stream.writeByteNTimes(' ', new_indent);
- var it = container_decl.fields_and_decls.iterator(0);
- while (it.next()) |decl| {
- const space_after_decl: Space = if (it.peek() == null) .Newline else .Space;
- try renderContainerDecl(allocator, stream, tree, new_indent, start_col, decl.*, space_after_decl);
+ for (fields_and_decls) |decl, i| {
+ const space_after_decl: Space = if (i + 1 >= fields_and_decls.len) .Newline else .Space;
+ try renderContainerDecl(allocator, stream, tree, new_indent, start_col, decl, space_after_decl);
}
try stream.writeByteNTimes(' ', indent);
@@ -1244,9 +1284,8 @@ fn renderExpression(
// All the declarations on the same line
try renderToken(tree, stream, container_decl.lbrace_token, indent, start_col, .Space); // {
- var it = container_decl.fields_and_decls.iterator(0);
- while (it.next()) |decl| {
- try renderContainerDecl(allocator, stream, tree, indent, start_col, decl.*, .Space);
+ for (fields_and_decls) |decl| {
+ try renderContainerDecl(allocator, stream, tree, indent, start_col, decl, .Space);
}
}
@@ -1258,14 +1297,14 @@ fn renderExpression(
const lbrace = tree.nextToken(err_set_decl.error_token);
- if (err_set_decl.decls.len == 0) {
+ if (err_set_decl.decls_len == 0) {
try renderToken(tree, stream, err_set_decl.error_token, indent, start_col, Space.None);
try renderToken(tree, stream, lbrace, indent, start_col, Space.None);
return renderToken(tree, stream, err_set_decl.rbrace_token, indent, start_col, space);
}
- if (err_set_decl.decls.len == 1) blk: {
- const node = err_set_decl.decls.at(0).*;
+ if (err_set_decl.decls_len == 1) blk: {
+ const node = err_set_decl.decls()[0];
// if there are any doc comments or same line comments
// don't try to put it all on one line
@@ -1285,24 +1324,24 @@ fn renderExpression(
const src_has_trailing_comma = blk: {
const maybe_comma = tree.prevToken(err_set_decl.rbrace_token);
- break :blk tree.tokens.at(maybe_comma).id == .Comma;
+ break :blk tree.token_ids[maybe_comma] == .Comma;
};
if (src_has_trailing_comma) {
try renderToken(tree, stream, lbrace, indent, start_col, Space.Newline); // {
const new_indent = indent + indent_delta;
- var it = err_set_decl.decls.iterator(0);
- while (it.next()) |node| {
+ const decls = err_set_decl.decls();
+ for (decls) |node, i| {
try stream.writeByteNTimes(' ', new_indent);
- if (it.peek()) |next_node| {
- try renderExpression(allocator, stream, tree, new_indent, start_col, node.*, Space.None);
- try renderToken(tree, stream, tree.nextToken(node.*.lastToken()), new_indent, start_col, Space.Newline); // ,
+ if (i + 1 < decls.len) {
+ try renderExpression(allocator, stream, tree, new_indent, start_col, node, Space.None);
+ try renderToken(tree, stream, tree.nextToken(node.lastToken()), new_indent, start_col, Space.Newline); // ,
- try renderExtraNewline(tree, stream, start_col, next_node.*);
+ try renderExtraNewline(tree, stream, start_col, decls[i + 1]);
} else {
- try renderExpression(allocator, stream, tree, new_indent, start_col, node.*, Space.Comma);
+ try renderExpression(allocator, stream, tree, new_indent, start_col, node, Space.Comma);
}
}
@@ -1311,17 +1350,17 @@ fn renderExpression(
} else {
try renderToken(tree, stream, lbrace, indent, start_col, Space.Space); // {
- var it = err_set_decl.decls.iterator(0);
- while (it.next()) |node| {
- if (it.peek()) |next_node| {
- try renderExpression(allocator, stream, tree, indent, start_col, node.*, Space.None);
+ const decls = err_set_decl.decls();
+ for (decls) |node, i| {
+ if (i + 1 < decls.len) {
+ try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None);
- const comma_token = tree.nextToken(node.*.lastToken());
- assert(tree.tokens.at(comma_token).id == .Comma);
+ const comma_token = tree.nextToken(node.lastToken());
+ assert(tree.token_ids[comma_token] == .Comma);
try renderToken(tree, stream, comma_token, indent, start_col, Space.Space); // ,
- try renderExtraNewline(tree, stream, start_col, next_node.*);
+ try renderExtraNewline(tree, stream, start_col, decls[i + 1]);
} else {
- try renderExpression(allocator, stream, tree, indent, start_col, node.*, Space.Space);
+ try renderExpression(allocator, stream, tree, indent, start_col, node, Space.Space);
}
}
@@ -1342,14 +1381,12 @@ fn renderExpression(
const multiline_str_literal = @fieldParentPtr(ast.Node.MultilineStringLiteral, "base", base);
var skip_first_indent = true;
- if (tree.tokens.at(multiline_str_literal.firstToken() - 1).id != .LineComment) {
+ if (tree.token_ids[multiline_str_literal.firstToken() - 1] != .LineComment) {
try stream.print("\n", .{});
skip_first_indent = false;
}
- var i: usize = 0;
- while (i < multiline_str_literal.lines.len) : (i += 1) {
- const t = multiline_str_literal.lines.at(i).*;
+ for (multiline_str_literal.lines()) |t| {
if (!skip_first_indent) {
try stream.writeByteNTimes(' ', indent + indent_delta);
}
@@ -1369,10 +1406,10 @@ fn renderExpression(
try renderToken(tree, stream, builtin_call.builtin_token, indent, start_col, Space.None); // @name
const src_params_trailing_comma = blk: {
- if (builtin_call.params.len < 2) break :blk false;
- const last_node = builtin_call.params.at(builtin_call.params.len - 1).*;
+ if (builtin_call.params_len < 2) break :blk false;
+ const last_node = builtin_call.params()[builtin_call.params_len - 1];
const maybe_comma = tree.nextToken(last_node.lastToken());
- break :blk tree.tokens.at(maybe_comma).id == .Comma;
+ break :blk tree.token_ids[maybe_comma] == .Comma;
};
const lparen = tree.nextToken(builtin_call.builtin_token);
@@ -1381,12 +1418,12 @@ fn renderExpression(
try renderToken(tree, stream, lparen, indent, start_col, Space.None); // (
// render all on one line, no trailing comma
- var it = builtin_call.params.iterator(0);
- while (it.next()) |param_node| {
- try renderExpression(allocator, stream, tree, indent, start_col, param_node.*, Space.None);
+ const params = builtin_call.params();
+ for (params) |param_node, i| {
+ try renderExpression(allocator, stream, tree, indent, start_col, param_node, Space.None);
- if (it.peek() != null) {
- const comma_token = tree.nextToken(param_node.*.lastToken());
+ if (i + 1 < params.len) {
+ const comma_token = tree.nextToken(param_node.lastToken());
try renderToken(tree, stream, comma_token, indent, start_col, Space.Space); // ,
}
}
@@ -1395,10 +1432,9 @@ fn renderExpression(
const new_indent = indent + indent_delta;
try renderToken(tree, stream, lparen, new_indent, start_col, Space.Newline); // (
- var it = builtin_call.params.iterator(0);
- while (it.next()) |param_node| {
+ for (builtin_call.params()) |param_node| {
try stream.writeByteNTimes(' ', new_indent);
- try renderExpression(allocator, stream, tree, indent, start_col, param_node.*, Space.Comma);
+ try renderExpression(allocator, stream, tree, indent, start_col, param_node, Space.Comma);
}
try stream.writeByteNTimes(' ', indent);
}
@@ -1410,8 +1446,8 @@ fn renderExpression(
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", base);
if (fn_proto.visib_token) |visib_token_index| {
- const visib_token = tree.tokens.at(visib_token_index);
- assert(visib_token.id == .Keyword_pub or visib_token.id == .Keyword_export);
+ const visib_token = tree.token_ids[visib_token_index];
+ assert(visib_token == .Keyword_pub or visib_token == .Keyword_export);
try renderToken(tree, stream, visib_token_index, indent, start_col, Space.Space); // pub
}
@@ -1433,7 +1469,7 @@ fn renderExpression(
try renderToken(tree, stream, fn_proto.fn_token, indent, start_col, Space.Space); // fn
break :blk tree.nextToken(fn_proto.fn_token);
};
- assert(tree.tokens.at(lparen).id == .LParen);
+ assert(tree.token_ids[lparen] == .LParen);
const rparen = tree.prevToken(
// the first token for the annotation expressions is the left
@@ -1449,10 +1485,10 @@ fn renderExpression(
.InferErrorSet => |node| tree.prevToken(node.firstToken()),
.Invalid => unreachable,
});
- assert(tree.tokens.at(rparen).id == .RParen);
+ assert(tree.token_ids[rparen] == .RParen);
const src_params_trailing_comma = blk: {
- const maybe_comma = tree.tokens.at(rparen - 1).id;
+ const maybe_comma = tree.token_ids[rparen - 1];
break :blk maybe_comma == .Comma or maybe_comma == .LineComment;
};
@@ -1460,12 +1496,11 @@ fn renderExpression(
try renderToken(tree, stream, lparen, indent, start_col, Space.None); // (
// render all on one line, no trailing comma
- var it = fn_proto.params.iterator(0);
- while (it.next()) |param_decl_node| {
- try renderParamDecl(allocator, stream, tree, indent, start_col, param_decl_node.*, Space.None);
+ for (fn_proto.params()) |param_decl, i| {
+ try renderParamDecl(allocator, stream, tree, indent, start_col, param_decl, Space.None);
- if (it.peek() != null) {
- const comma = tree.nextToken(param_decl_node.*.lastToken());
+ if (i + 1 < fn_proto.params_len) {
+ const comma = tree.nextToken(param_decl.lastToken());
try renderToken(tree, stream, comma, indent, start_col, Space.Space); // ,
}
}
@@ -1474,10 +1509,9 @@ fn renderExpression(
const new_indent = indent + indent_delta;
try renderToken(tree, stream, lparen, new_indent, start_col, Space.Newline); // (
- var it = fn_proto.params.iterator(0);
- while (it.next()) |param_decl_node| {
+ for (fn_proto.params()) |param_decl| {
try stream.writeByteNTimes(' ', new_indent);
- try renderParamDecl(allocator, stream, tree, new_indent, start_col, param_decl_node.*, Space.Comma);
+ try renderParamDecl(allocator, stream, tree, new_indent, start_col, param_decl, Space.Comma);
}
try stream.writeByteNTimes(' ', indent);
}
@@ -1556,7 +1590,7 @@ fn renderExpression(
const rparen = tree.nextToken(switch_node.expr.lastToken());
const lbrace = tree.nextToken(rparen);
- if (switch_node.cases.len == 0) {
+ if (switch_node.cases_len == 0) {
try renderExpression(allocator, stream, tree, indent, start_col, switch_node.expr, Space.None);
try renderToken(tree, stream, rparen, indent, start_col, Space.Space); // )
try renderToken(tree, stream, lbrace, indent, start_col, Space.None); // {
@@ -1570,13 +1604,13 @@ fn renderExpression(
try renderToken(tree, stream, rparen, indent, start_col, Space.Space); // )
try renderToken(tree, stream, lbrace, new_indent, start_col, Space.Newline); // {
- var it = switch_node.cases.iterator(0);
- while (it.next()) |node| {
+ const cases = switch_node.cases();
+ for (cases) |node, i| {
try stream.writeByteNTimes(' ', new_indent);
- try renderExpression(allocator, stream, tree, new_indent, start_col, node.*, Space.Comma);
+ try renderExpression(allocator, stream, tree, new_indent, start_col, node, Space.Comma);
- if (it.peek()) |next_node| {
- try renderExtraNewline(tree, stream, start_col, next_node.*);
+ if (i + 1 < cases.len) {
+ try renderExtraNewline(tree, stream, start_col, cases[i + 1]);
}
}
@@ -1587,38 +1621,38 @@ fn renderExpression(
.SwitchCase => {
const switch_case = @fieldParentPtr(ast.Node.SwitchCase, "base", base);
- assert(switch_case.items.len != 0);
+ assert(switch_case.items_len != 0);
const src_has_trailing_comma = blk: {
- const last_node = switch_case.items.at(switch_case.items.len - 1).*;
+ const last_node = switch_case.items()[switch_case.items_len - 1];
const maybe_comma = tree.nextToken(last_node.lastToken());
- break :blk tree.tokens.at(maybe_comma).id == .Comma;
+ break :blk tree.token_ids[maybe_comma] == .Comma;
};
- if (switch_case.items.len == 1 or !src_has_trailing_comma) {
- var it = switch_case.items.iterator(0);
- while (it.next()) |node| {
- if (it.peek()) |next_node| {
- try renderExpression(allocator, stream, tree, indent, start_col, node.*, Space.None);
+ if (switch_case.items_len == 1 or !src_has_trailing_comma) {
+ const items = switch_case.items();
+ for (items) |node, i| {
+ if (i + 1 < items.len) {
+ try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None);
- const comma_token = tree.nextToken(node.*.lastToken());
+ const comma_token = tree.nextToken(node.lastToken());
try renderToken(tree, stream, comma_token, indent, start_col, Space.Space); // ,
- try renderExtraNewline(tree, stream, start_col, next_node.*);
+ try renderExtraNewline(tree, stream, start_col, items[i + 1]);
} else {
- try renderExpression(allocator, stream, tree, indent, start_col, node.*, Space.Space);
+ try renderExpression(allocator, stream, tree, indent, start_col, node, Space.Space);
}
}
} else {
- var it = switch_case.items.iterator(0);
- while (it.next()) |node| {
- if (it.peek()) |next_node| {
- try renderExpression(allocator, stream, tree, indent, start_col, node.*, Space.None);
+ const items = switch_case.items();
+ for (items) |node, i| {
+ if (i + 1 < items.len) {
+ try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None);
- const comma_token = tree.nextToken(node.*.lastToken());
+ const comma_token = tree.nextToken(node.lastToken());
try renderToken(tree, stream, comma_token, indent, start_col, Space.Newline); // ,
- try renderExtraNewline(tree, stream, start_col, next_node.*);
+ try renderExtraNewline(tree, stream, start_col, items[i + 1]);
try stream.writeByteNTimes(' ', indent);
} else {
- try renderExpression(allocator, stream, tree, indent, start_col, node.*, Space.Comma);
+ try renderExpression(allocator, stream, tree, indent, start_col, node, Space.Comma);
try stream.writeByteNTimes(' ', indent);
}
}
@@ -1918,34 +1952,31 @@ fn renderExpression(
} else blk: {
try renderToken(tree, stream, colon1, indent, start_col, Space.Space); // :
- var it = asm_node.outputs.iterator(0);
- while (true) {
- const asm_output = it.next().?;
- const node = &(asm_output.*).base;
+ for (asm_node.outputs) |*asm_output, i| {
+ if (i + 1 < asm_node.outputs.len) {
+ const next_asm_output = asm_node.outputs[i + 1];
+ try renderAsmOutput(allocator, stream, tree, indent_extra, start_col, asm_output, Space.None);
- if (it.peek()) |next_asm_output| {
- try renderExpression(allocator, stream, tree, indent_extra, start_col, node, Space.None);
- const next_node = &(next_asm_output.*).base;
-
- const comma = tree.prevToken(next_asm_output.*.firstToken());
+ const comma = tree.prevToken(next_asm_output.firstToken());
try renderToken(tree, stream, comma, indent_extra, start_col, Space.Newline); // ,
- try renderExtraNewline(tree, stream, start_col, next_node);
+ try renderExtraNewlineToken(tree, stream, start_col, next_asm_output.firstToken());
try stream.writeByteNTimes(' ', indent_extra);
} else if (asm_node.inputs.len == 0 and asm_node.clobbers.len == 0) {
- try renderExpression(allocator, stream, tree, indent_extra, start_col, node, Space.Newline);
+ try renderAsmOutput(allocator, stream, tree, indent_extra, start_col, asm_output, Space.Newline);
try stream.writeByteNTimes(' ', indent);
return renderToken(tree, stream, asm_node.rparen, indent, start_col, space);
} else {
- try renderExpression(allocator, stream, tree, indent_extra, start_col, node, Space.Newline);
+ try renderAsmOutput(allocator, stream, tree, indent_extra, start_col, asm_output, Space.Newline);
try stream.writeByteNTimes(' ', indent_once);
- const comma_or_colon = tree.nextToken(node.lastToken());
- break :blk switch (tree.tokens.at(comma_or_colon).id) {
+ const comma_or_colon = tree.nextToken(asm_output.lastToken());
+ break :blk switch (tree.token_ids[comma_or_colon]) {
.Comma => tree.nextToken(comma_or_colon),
else => comma_or_colon,
};
}
}
+ unreachable;
};
const colon3 = if (asm_node.inputs.len == 0) blk: {
@@ -1956,43 +1987,37 @@ fn renderExpression(
} else blk: {
try renderToken(tree, stream, colon2, indent, start_col, Space.Space); // :
- var it = asm_node.inputs.iterator(0);
- while (true) {
- const asm_input = it.next().?;
- const node = &(asm_input.*).base;
+ for (asm_node.inputs) |*asm_input, i| {
+ if (i + 1 < asm_node.inputs.len) {
+ const next_asm_input = &asm_node.inputs[i + 1];
+ try renderAsmInput(allocator, stream, tree, indent_extra, start_col, asm_input, Space.None);
- if (it.peek()) |next_asm_input| {
- try renderExpression(allocator, stream, tree, indent_extra, start_col, node, Space.None);
- const next_node = &(next_asm_input.*).base;
-
- const comma = tree.prevToken(next_asm_input.*.firstToken());
+ const comma = tree.prevToken(next_asm_input.firstToken());
try renderToken(tree, stream, comma, indent_extra, start_col, Space.Newline); // ,
- try renderExtraNewline(tree, stream, start_col, next_node);
+ try renderExtraNewlineToken(tree, stream, start_col, next_asm_input.firstToken());
try stream.writeByteNTimes(' ', indent_extra);
} else if (asm_node.clobbers.len == 0) {
- try renderExpression(allocator, stream, tree, indent_extra, start_col, node, Space.Newline);
+ try renderAsmInput(allocator, stream, tree, indent_extra, start_col, asm_input, Space.Newline);
try stream.writeByteNTimes(' ', indent);
return renderToken(tree, stream, asm_node.rparen, indent, start_col, space); // )
} else {
- try renderExpression(allocator, stream, tree, indent_extra, start_col, node, Space.Newline);
+ try renderAsmInput(allocator, stream, tree, indent_extra, start_col, asm_input, Space.Newline);
try stream.writeByteNTimes(' ', indent_once);
- const comma_or_colon = tree.nextToken(node.lastToken());
- break :blk switch (tree.tokens.at(comma_or_colon).id) {
+ const comma_or_colon = tree.nextToken(asm_input.lastToken());
+ break :blk switch (tree.token_ids[comma_or_colon]) {
.Comma => tree.nextToken(comma_or_colon),
else => comma_or_colon,
};
}
}
+ unreachable;
};
try renderToken(tree, stream, colon3, indent, start_col, Space.Space); // :
- var it = asm_node.clobbers.iterator(0);
- while (true) {
- const clobber_node = it.next().?.*;
-
- if (it.peek() == null) {
+ for (asm_node.clobbers) |clobber_node, i| {
+ if (i + 1 >= asm_node.clobbers.len) {
try renderExpression(allocator, stream, tree, indent_extra, start_col, clobber_node, Space.Newline);
try stream.writeByteNTimes(' ', indent);
return renderToken(tree, stream, asm_node.rparen, indent, start_col, space);
@@ -2004,40 +2029,6 @@ fn renderExpression(
}
},
- .AsmInput => {
- const asm_input = @fieldParentPtr(ast.Node.AsmInput, "base", base);
-
- try stream.writeAll("[");
- try renderExpression(allocator, stream, tree, indent, start_col, asm_input.symbolic_name, Space.None);
- try stream.writeAll("] ");
- try renderExpression(allocator, stream, tree, indent, start_col, asm_input.constraint, Space.None);
- try stream.writeAll(" (");
- try renderExpression(allocator, stream, tree, indent, start_col, asm_input.expr, Space.None);
- return renderToken(tree, stream, asm_input.lastToken(), indent, start_col, space); // )
- },
-
- .AsmOutput => {
- const asm_output = @fieldParentPtr(ast.Node.AsmOutput, "base", base);
-
- try stream.writeAll("[");
- try renderExpression(allocator, stream, tree, indent, start_col, asm_output.symbolic_name, Space.None);
- try stream.writeAll("] ");
- try renderExpression(allocator, stream, tree, indent, start_col, asm_output.constraint, Space.None);
- try stream.writeAll(" (");
-
- switch (asm_output.kind) {
- ast.Node.AsmOutput.Kind.Variable => |variable_name| {
- try renderExpression(allocator, stream, tree, indent, start_col, &variable_name.base, Space.None);
- },
- ast.Node.AsmOutput.Kind.Return => |return_type| {
- try stream.writeAll("-> ");
- try renderExpression(allocator, stream, tree, indent, start_col, return_type, Space.None);
- },
- }
-
- return renderToken(tree, stream, asm_output.lastToken(), indent, start_col, space); // )
- },
-
.EnumLiteral => {
const enum_literal = @fieldParentPtr(ast.Node.EnumLiteral, "base", base);
@@ -2050,11 +2041,56 @@ fn renderExpression(
.VarDecl,
.Use,
.TestDecl,
- .ParamDecl,
=> unreachable,
}
}
+fn renderAsmOutput(
+ allocator: *mem.Allocator,
+ stream: var,
+ tree: *ast.Tree,
+ indent: usize,
+ start_col: *usize,
+ asm_output: *const ast.Node.Asm.Output,
+ space: Space,
+) (@TypeOf(stream).Error || Error)!void {
+ try stream.writeAll("[");
+ try renderExpression(allocator, stream, tree, indent, start_col, asm_output.symbolic_name, Space.None);
+ try stream.writeAll("] ");
+ try renderExpression(allocator, stream, tree, indent, start_col, asm_output.constraint, Space.None);
+ try stream.writeAll(" (");
+
+ switch (asm_output.kind) {
+ ast.Node.Asm.Output.Kind.Variable => |variable_name| {
+ try renderExpression(allocator, stream, tree, indent, start_col, &variable_name.base, Space.None);
+ },
+ ast.Node.Asm.Output.Kind.Return => |return_type| {
+ try stream.writeAll("-> ");
+ try renderExpression(allocator, stream, tree, indent, start_col, return_type, Space.None);
+ },
+ }
+
+ return renderToken(tree, stream, asm_output.lastToken(), indent, start_col, space); // )
+}
+
+fn renderAsmInput(
+ allocator: *mem.Allocator,
+ stream: var,
+ tree: *ast.Tree,
+ indent: usize,
+ start_col: *usize,
+ asm_input: *const ast.Node.Asm.Input,
+ space: Space,
+) (@TypeOf(stream).Error || Error)!void {
+ try stream.writeAll("[");
+ try renderExpression(allocator, stream, tree, indent, start_col, asm_input.symbolic_name, Space.None);
+ try stream.writeAll("] ");
+ try renderExpression(allocator, stream, tree, indent, start_col, asm_input.constraint, Space.None);
+ try stream.writeAll(" (");
+ try renderExpression(allocator, stream, tree, indent, start_col, asm_input.expr, Space.None);
+ return renderToken(tree, stream, asm_input.lastToken(), indent, start_col, space); // )
+}
+
fn renderVarDecl(
allocator: *mem.Allocator,
stream: var,
@@ -2133,11 +2169,9 @@ fn renderParamDecl(
tree: *ast.Tree,
indent: usize,
start_col: *usize,
- base: *ast.Node,
+ param_decl: ast.Node.FnProto.ParamDecl,
space: Space,
) (@TypeOf(stream).Error || Error)!void {
- const param_decl = @fieldParentPtr(ast.Node.ParamDecl, "base", base);
-
try renderDocComments(tree, stream, param_decl, indent, start_col);
if (param_decl.comptime_token) |comptime_token| {
@@ -2174,7 +2208,7 @@ fn renderStatement(
try renderExpression(allocator, stream, tree, indent, start_col, base, Space.None);
const semicolon_index = tree.nextToken(base.lastToken());
- assert(tree.tokens.at(semicolon_index).id == .Semicolon);
+ assert(tree.token_ids[semicolon_index] == .Semicolon);
try renderToken(tree, stream, semicolon_index, indent, start_col, Space.Newline);
} else {
try renderExpression(allocator, stream, tree, indent, start_col, base, Space.Newline);
@@ -2212,22 +2246,25 @@ fn renderTokenOffset(
return;
}
- var token = tree.tokens.at(token_index);
- try stream.writeAll(mem.trimRight(u8, tree.tokenSlicePtr(token)[token_skip_bytes..], " "));
+ var token_loc = tree.token_locs[token_index];
+ try stream.writeAll(mem.trimRight(u8, tree.tokenSliceLoc(token_loc)[token_skip_bytes..], " "));
if (space == Space.NoComment)
return;
- var next_token = tree.tokens.at(token_index + 1);
+ var next_token_id = tree.token_ids[token_index + 1];
+ var next_token_loc = tree.token_locs[token_index + 1];
- if (space == Space.Comma) switch (next_token.id) {
+ if (space == Space.Comma) switch (next_token_id) {
.Comma => return renderToken(tree, stream, token_index + 1, indent, start_col, Space.Newline),
.LineComment => {
try stream.writeAll(", ");
return renderToken(tree, stream, token_index + 1, indent, start_col, Space.Newline);
},
else => {
- if (token_index + 2 < tree.tokens.len and tree.tokens.at(token_index + 2).id == .MultilineStringLiteralLine) {
+ if (token_index + 2 < tree.token_ids.len and
+ tree.token_ids[token_index + 2] == .MultilineStringLiteralLine)
+ {
try stream.writeAll(",");
return;
} else {
@@ -2240,19 +2277,20 @@ fn renderTokenOffset(
// Skip over same line doc comments
var offset: usize = 1;
- if (next_token.id == .DocComment) {
- const loc = tree.tokenLocationPtr(token.end, next_token);
+ if (next_token_id == .DocComment) {
+ const loc = tree.tokenLocationLoc(token_loc.end, next_token_loc);
if (loc.line == 0) {
offset += 1;
- next_token = tree.tokens.at(token_index + offset);
+ next_token_id = tree.token_ids[token_index + offset];
+ next_token_loc = tree.token_locs[token_index + offset];
}
}
- if (next_token.id != .LineComment) blk: {
+ if (next_token_id != .LineComment) blk: {
switch (space) {
Space.None, Space.NoNewline => return,
Space.Newline => {
- if (next_token.id == .MultilineStringLiteralLine) {
+ if (next_token_id == .MultilineStringLiteralLine) {
return;
} else {
try stream.writeAll("\n");
@@ -2261,7 +2299,7 @@ fn renderTokenOffset(
}
},
Space.Space, Space.SpaceOrOutdent => {
- if (next_token.id == .MultilineStringLiteralLine)
+ if (next_token_id == .MultilineStringLiteralLine)
return;
try stream.writeByte(' ');
return;
@@ -2271,14 +2309,15 @@ fn renderTokenOffset(
}
while (true) {
- const comment_is_empty = mem.trimRight(u8, tree.tokenSlicePtr(next_token), " ").len == 2;
+ const comment_is_empty = mem.trimRight(u8, tree.tokenSliceLoc(next_token_loc), " ").len == 2;
if (comment_is_empty) {
switch (space) {
Space.Newline => {
offset += 1;
- token = next_token;
- next_token = tree.tokens.at(token_index + offset);
- if (next_token.id != .LineComment) {
+ token_loc = next_token_loc;
+ next_token_id = tree.token_ids[token_index + offset];
+ next_token_loc = tree.token_locs[token_index + offset];
+ if (next_token_id != .LineComment) {
try stream.writeByte('\n');
start_col.* = 0;
return;
@@ -2291,18 +2330,19 @@ fn renderTokenOffset(
}
}
- var loc = tree.tokenLocationPtr(token.end, next_token);
+ var loc = tree.tokenLocationLoc(token_loc.end, next_token_loc);
if (loc.line == 0) {
- try stream.print(" {}", .{mem.trimRight(u8, tree.tokenSlicePtr(next_token), " ")});
+ try stream.print(" {}", .{mem.trimRight(u8, tree.tokenSliceLoc(next_token_loc), " ")});
offset = 2;
- token = next_token;
- next_token = tree.tokens.at(token_index + offset);
- if (next_token.id != .LineComment) {
+ token_loc = next_token_loc;
+ next_token_loc = tree.token_locs[token_index + offset];
+ next_token_id = tree.token_ids[token_index + offset];
+ if (next_token_id != .LineComment) {
switch (space) {
Space.None, Space.Space => {
try stream.writeByte('\n');
- const after_comment_token = tree.tokens.at(token_index + offset);
- const next_line_indent = switch (after_comment_token.id) {
+ const after_comment_token = tree.token_ids[token_index + offset];
+ const next_line_indent = switch (after_comment_token) {
.RParen, .RBrace, .RBracket => indent,
else => indent + indent_delta,
};
@@ -2315,7 +2355,7 @@ fn renderTokenOffset(
start_col.* = indent;
},
Space.Newline => {
- if (next_token.id == .MultilineStringLiteralLine) {
+ if (next_token_id == .MultilineStringLiteralLine) {
return;
} else {
try stream.writeAll("\n");
@@ -2328,7 +2368,7 @@ fn renderTokenOffset(
}
return;
}
- loc = tree.tokenLocationPtr(token.end, next_token);
+ loc = tree.tokenLocationLoc(token_loc.end, next_token_loc);
}
while (true) {
@@ -2338,15 +2378,16 @@ fn renderTokenOffset(
const newline_count = if (loc.line <= 1) @as(u8, 1) else @as(u8, 2);
try stream.writeByteNTimes('\n', newline_count);
try stream.writeByteNTimes(' ', indent);
- try stream.writeAll(mem.trimRight(u8, tree.tokenSlicePtr(next_token), " "));
+ try stream.writeAll(mem.trimRight(u8, tree.tokenSliceLoc(next_token_loc), " "));
offset += 1;
- token = next_token;
- next_token = tree.tokens.at(token_index + offset);
- if (next_token.id != .LineComment) {
+ token_loc = next_token_loc;
+ next_token_loc = tree.token_locs[token_index + offset];
+ next_token_id = tree.token_ids[token_index + offset];
+ if (next_token_id != .LineComment) {
switch (space) {
Space.Newline => {
- if (next_token.id == .MultilineStringLiteralLine) {
+ if (next_token_id == .MultilineStringLiteralLine) {
return;
} else {
try stream.writeAll("\n");
@@ -2357,8 +2398,8 @@ fn renderTokenOffset(
Space.None, Space.Space => {
try stream.writeByte('\n');
- const after_comment_token = tree.tokens.at(token_index + offset);
- const next_line_indent = switch (after_comment_token.id) {
+ const after_comment_token = tree.token_ids[token_index + offset];
+ const next_line_indent = switch (after_comment_token) {
.RParen, .RBrace, .RBracket => blk: {
if (indent > indent_delta) {
break :blk indent - indent_delta;
@@ -2381,7 +2422,7 @@ fn renderTokenOffset(
}
return;
}
- loc = tree.tokenLocationPtr(token.end, next_token);
+ loc = tree.tokenLocationLoc(token_loc.end, next_token_loc);
}
}
@@ -2404,16 +2445,32 @@ fn renderDocComments(
start_col: *usize,
) (@TypeOf(stream).Error || Error)!void {
const comment = node.doc_comments orelse return;
- var it = comment.lines.iterator(0);
- const first_token = node.firstToken();
- while (it.next()) |line_token_index| {
- if (line_token_index.* < first_token) {
- try renderToken(tree, stream, line_token_index.*, indent, start_col, Space.Newline);
- try stream.writeByteNTimes(' ', indent);
- } else {
- try renderToken(tree, stream, line_token_index.*, indent, start_col, Space.NoComment);
- try stream.writeAll("\n");
- try stream.writeByteNTimes(' ', indent);
+ return renderDocCommentsToken(tree, stream, comment, node.firstToken(), indent, start_col);
+}
+
+fn renderDocCommentsToken(
+ tree: *ast.Tree,
+ stream: var,
+ comment: *ast.Node.DocComment,
+ first_token: ast.TokenIndex,
+ indent: usize,
+ start_col: *usize,
+) (@TypeOf(stream).Error || Error)!void {
+ var tok_i = comment.first_line;
+ while (true) : (tok_i += 1) {
+ switch (tree.token_ids[tok_i]) {
+ .DocComment, .ContainerDocComment => {
+ if (comment.first_line < first_token) {
+ try renderToken(tree, stream, tok_i, indent, start_col, Space.Newline);
+ try stream.writeByteNTimes(' ', indent);
+ } else {
+ try renderToken(tree, stream, tok_i, indent, start_col, Space.NoComment);
+ try stream.writeAll("\n");
+ try stream.writeByteNTimes(' ', indent);
+ }
+ },
+ .LineComment => continue,
+ else => break,
}
}
}
diff --git a/lib/std/zig/tokenizer.zig b/lib/std/zig/tokenizer.zig
index 160530f459..3ed57f621f 100644
--- a/lib/std/zig/tokenizer.zig
+++ b/lib/std/zig/tokenizer.zig
@@ -3,8 +3,12 @@ const mem = std.mem;
pub const Token = struct {
id: Id,
- start: usize,
- end: usize,
+ loc: Loc,
+
+ pub const Loc = struct {
+ start: usize,
+ end: usize,
+ };
pub const Keyword = struct {
bytes: []const u8,
@@ -426,8 +430,10 @@ pub const Tokenizer = struct {
var state: State = .start;
var result = Token{
.id = .Eof,
- .start = self.index,
- .end = undefined,
+ .loc = .{
+ .start = self.index,
+ .end = undefined,
+ },
};
var seen_escape_digits: usize = undefined;
var remaining_code_units: usize = undefined;
@@ -436,7 +442,7 @@ pub const Tokenizer = struct {
switch (state) {
.start => switch (c) {
' ', '\n', '\t', '\r' => {
- result.start = self.index + 1;
+ result.loc.start = self.index + 1;
},
'"' => {
state = .string_literal;
@@ -686,7 +692,7 @@ pub const Tokenizer = struct {
.identifier => switch (c) {
'a'...'z', 'A'...'Z', '_', '0'...'9' => {},
else => {
- if (Token.getKeyword(self.buffer[result.start..self.index])) |id| {
+ if (Token.getKeyword(self.buffer[result.loc.start..self.index])) |id| {
result.id = id;
}
break;
@@ -1313,7 +1319,7 @@ pub const Tokenizer = struct {
=> {},
.identifier => {
- if (Token.getKeyword(self.buffer[result.start..self.index])) |id| {
+ if (Token.getKeyword(self.buffer[result.loc.start..self.index])) |id| {
result.id = id;
}
},
@@ -1420,7 +1426,7 @@ pub const Tokenizer = struct {
}
}
- result.end = self.index;
+ result.loc.end = self.index;
return result;
}
@@ -1430,8 +1436,10 @@ pub const Tokenizer = struct {
if (invalid_length == 0) return;
self.pending_invalid_token = .{
.id = .Invalid,
- .start = self.index,
- .end = self.index + invalid_length,
+ .loc = .{
+ .start = self.index,
+ .end = self.index + invalid_length,
+ },
};
}
diff --git a/src-self-hosted/clang.zig b/src-self-hosted/clang.zig
index b3d67aab25..624dbe1dad 100644
--- a/src-self-hosted/clang.zig
+++ b/src-self-hosted/clang.zig
@@ -22,7 +22,7 @@ pub const struct_ZigClangCompoundStmt = @Type(.Opaque);
pub const struct_ZigClangConstantArrayType = @Type(.Opaque);
pub const struct_ZigClangContinueStmt = @Type(.Opaque);
pub const struct_ZigClangDecayedType = @Type(.Opaque);
-pub const struct_ZigClangDecl = @Type(.Opaque);
+pub const ZigClangDecl = @Type(.Opaque);
pub const struct_ZigClangDeclRefExpr = @Type(.Opaque);
pub const struct_ZigClangDeclStmt = @Type(.Opaque);
pub const struct_ZigClangDefaultStmt = @Type(.Opaque);
@@ -63,7 +63,7 @@ pub const struct_ZigClangReturnStmt = @Type(.Opaque);
pub const struct_ZigClangSkipFunctionBodiesScope = @Type(.Opaque);
pub const struct_ZigClangSourceManager = @Type(.Opaque);
pub const struct_ZigClangSourceRange = @Type(.Opaque);
-pub const struct_ZigClangStmt = @Type(.Opaque);
+pub const ZigClangStmt = @Type(.Opaque);
pub const struct_ZigClangStringLiteral = @Type(.Opaque);
pub const struct_ZigClangStringRef = @Type(.Opaque);
pub const struct_ZigClangSwitchStmt = @Type(.Opaque);
@@ -781,7 +781,7 @@ pub extern fn ZigClangSourceManager_getCharacterData(self: ?*const struct_ZigCla
pub extern fn ZigClangASTContext_getPointerType(self: ?*const struct_ZigClangASTContext, T: struct_ZigClangQualType) struct_ZigClangQualType;
pub extern fn ZigClangASTUnit_getASTContext(self: ?*struct_ZigClangASTUnit) ?*struct_ZigClangASTContext;
pub extern fn ZigClangASTUnit_getSourceManager(self: *struct_ZigClangASTUnit) *struct_ZigClangSourceManager;
-pub extern fn ZigClangASTUnit_visitLocalTopLevelDecls(self: *struct_ZigClangASTUnit, context: ?*c_void, Fn: ?fn (?*c_void, *const struct_ZigClangDecl) callconv(.C) bool) bool;
+pub extern fn ZigClangASTUnit_visitLocalTopLevelDecls(self: *struct_ZigClangASTUnit, context: ?*c_void, Fn: ?fn (?*c_void, *const ZigClangDecl) callconv(.C) bool) bool;
pub extern fn ZigClangRecordType_getDecl(record_ty: ?*const struct_ZigClangRecordType) *const struct_ZigClangRecordDecl;
pub extern fn ZigClangTagDecl_isThisDeclarationADefinition(self: *const ZigClangTagDecl) bool;
pub extern fn ZigClangEnumType_getDecl(record_ty: ?*const struct_ZigClangEnumType) *const struct_ZigClangEnumDecl;
@@ -817,7 +817,7 @@ pub extern fn ZigClangEnumDecl_enumerator_end(*const ZigClangEnumDecl) ZigClangE
pub extern fn ZigClangEnumDecl_enumerator_iterator_next(ZigClangEnumDecl_enumerator_iterator) ZigClangEnumDecl_enumerator_iterator;
pub extern fn ZigClangEnumDecl_enumerator_iterator_deref(ZigClangEnumDecl_enumerator_iterator) *const ZigClangEnumConstantDecl;
pub extern fn ZigClangEnumDecl_enumerator_iterator_neq(ZigClangEnumDecl_enumerator_iterator, ZigClangEnumDecl_enumerator_iterator) bool;
-pub extern fn ZigClangDecl_castToNamedDecl(decl: *const struct_ZigClangDecl) ?*const ZigClangNamedDecl;
+pub extern fn ZigClangDecl_castToNamedDecl(decl: *const ZigClangDecl) ?*const ZigClangNamedDecl;
pub extern fn ZigClangNamedDecl_getName_bytes_begin(decl: ?*const struct_ZigClangNamedDecl) [*:0]const u8;
pub extern fn ZigClangSourceLocation_eq(a: struct_ZigClangSourceLocation, b: struct_ZigClangSourceLocation) bool;
pub extern fn ZigClangTypedefType_getDecl(self: ?*const struct_ZigClangTypedefType) *const struct_ZigClangTypedefNameDecl;
@@ -842,9 +842,9 @@ pub extern fn ZigClangType_getTypeClassName(self: *const struct_ZigClangType) [*
pub extern fn ZigClangType_getAsArrayTypeUnsafe(self: *const ZigClangType) *const ZigClangArrayType;
pub extern fn ZigClangType_getAsRecordType(self: *const ZigClangType) ?*const ZigClangRecordType;
pub extern fn ZigClangType_getAsUnionType(self: *const ZigClangType) ?*const ZigClangRecordType;
-pub extern fn ZigClangStmt_getBeginLoc(self: *const struct_ZigClangStmt) struct_ZigClangSourceLocation;
-pub extern fn ZigClangStmt_getStmtClass(self: ?*const struct_ZigClangStmt) ZigClangStmtClass;
-pub extern fn ZigClangStmt_classof_Expr(self: ?*const struct_ZigClangStmt) bool;
+pub extern fn ZigClangStmt_getBeginLoc(self: *const ZigClangStmt) struct_ZigClangSourceLocation;
+pub extern fn ZigClangStmt_getStmtClass(self: ?*const ZigClangStmt) ZigClangStmtClass;
+pub extern fn ZigClangStmt_classof_Expr(self: ?*const ZigClangStmt) bool;
pub extern fn ZigClangExpr_getStmtClass(self: *const struct_ZigClangExpr) ZigClangStmtClass;
pub extern fn ZigClangExpr_getType(self: *const struct_ZigClangExpr) struct_ZigClangQualType;
pub extern fn ZigClangExpr_getBeginLoc(self: *const struct_ZigClangExpr) struct_ZigClangSourceLocation;
@@ -873,7 +873,7 @@ pub extern fn ZigClangFunctionDecl_getLocation(self: *const ZigClangFunctionDecl
pub extern fn ZigClangFunctionDecl_hasBody(self: *const ZigClangFunctionDecl) bool;
pub extern fn ZigClangFunctionDecl_getStorageClass(self: *const ZigClangFunctionDecl) ZigClangStorageClass;
pub extern fn ZigClangFunctionDecl_getParamDecl(self: *const ZigClangFunctionDecl, i: c_uint) *const struct_ZigClangParmVarDecl;
-pub extern fn ZigClangFunctionDecl_getBody(self: *const ZigClangFunctionDecl) *const struct_ZigClangStmt;
+pub extern fn ZigClangFunctionDecl_getBody(self: *const ZigClangFunctionDecl) *const ZigClangStmt;
pub extern fn ZigClangFunctionDecl_doesDeclarationForceExternallyVisibleDefinition(self: *const ZigClangFunctionDecl) bool;
pub extern fn ZigClangFunctionDecl_isThisDeclarationADefinition(self: *const ZigClangFunctionDecl) bool;
pub extern fn ZigClangFunctionDecl_doesThisDeclarationHaveABody(self: *const ZigClangFunctionDecl) bool;
@@ -918,7 +918,6 @@ pub const ZigClangCompoundStmt = struct_ZigClangCompoundStmt;
pub const ZigClangConstantArrayType = struct_ZigClangConstantArrayType;
pub const ZigClangContinueStmt = struct_ZigClangContinueStmt;
pub const ZigClangDecayedType = struct_ZigClangDecayedType;
-pub const ZigClangDecl = struct_ZigClangDecl;
pub const ZigClangDeclRefExpr = struct_ZigClangDeclRefExpr;
pub const ZigClangDeclStmt = struct_ZigClangDeclStmt;
pub const ZigClangDefaultStmt = struct_ZigClangDefaultStmt;
@@ -959,7 +958,6 @@ pub const ZigClangReturnStmt = struct_ZigClangReturnStmt;
pub const ZigClangSkipFunctionBodiesScope = struct_ZigClangSkipFunctionBodiesScope;
pub const ZigClangSourceManager = struct_ZigClangSourceManager;
pub const ZigClangSourceRange = struct_ZigClangSourceRange;
-pub const ZigClangStmt = struct_ZigClangStmt;
pub const ZigClangStringLiteral = struct_ZigClangStringLiteral;
pub const ZigClangStringRef = struct_ZigClangStringRef;
pub const ZigClangSwitchStmt = struct_ZigClangSwitchStmt;
@@ -1016,14 +1014,14 @@ pub extern fn ZigClangLoadFromCommandLine(
) ?*ZigClangASTUnit;
pub extern fn ZigClangDecl_getKind(decl: *const ZigClangDecl) ZigClangDeclKind;
-pub extern fn ZigClangDecl_getDeclKindName(decl: *const struct_ZigClangDecl) [*:0]const u8;
+pub extern fn ZigClangDecl_getDeclKindName(decl: *const ZigClangDecl) [*:0]const u8;
-pub const ZigClangCompoundStmt_const_body_iterator = [*]const *struct_ZigClangStmt;
+pub const ZigClangCompoundStmt_const_body_iterator = [*]const *ZigClangStmt;
pub extern fn ZigClangCompoundStmt_body_begin(self: *const ZigClangCompoundStmt) ZigClangCompoundStmt_const_body_iterator;
pub extern fn ZigClangCompoundStmt_body_end(self: *const ZigClangCompoundStmt) ZigClangCompoundStmt_const_body_iterator;
-pub const ZigClangDeclStmt_const_decl_iterator = [*]const *struct_ZigClangDecl;
+pub const ZigClangDeclStmt_const_decl_iterator = [*]const *ZigClangDecl;
pub extern fn ZigClangDeclStmt_decl_begin(self: *const ZigClangDeclStmt) ZigClangDeclStmt_const_decl_iterator;
pub extern fn ZigClangDeclStmt_decl_end(self: *const ZigClangDeclStmt) ZigClangDeclStmt_const_decl_iterator;
diff --git a/src-self-hosted/main.zig b/src-self-hosted/main.zig
index 4ef4acc24b..40d3068d3f 100644
--- a/src-self-hosted/main.zig
+++ b/src-self-hosted/main.zig
@@ -600,8 +600,7 @@ pub fn cmdFmt(gpa: *Allocator, args: []const []const u8) !void {
};
defer tree.deinit();
- var error_it = tree.errors.iterator(0);
- while (error_it.next()) |parse_error| {
+ for (tree.errors) |parse_error| {
try printErrMsgToFile(gpa, parse_error, tree, "", stderr_file, color);
}
if (tree.errors.len != 0) {
@@ -701,8 +700,7 @@ fn fmtPath(fmt: *Fmt, file_path: []const u8, check_mode: bool) FmtError!void {
};
defer tree.deinit();
- var error_it = tree.errors.iterator(0);
- while (error_it.next()) |parse_error| {
+ for (tree.errors) |parse_error| {
try printErrMsgToFile(fmt.gpa, parse_error, tree, file_path, std.io.getStdErr(), fmt.color);
}
if (tree.errors.len != 0) {
@@ -730,7 +728,7 @@ fn fmtPath(fmt: *Fmt, file_path: []const u8, check_mode: bool) FmtError!void {
fn printErrMsgToFile(
gpa: *mem.Allocator,
- parse_error: *const ast.Error,
+ parse_error: ast.Error,
tree: *ast.Tree,
path: []const u8,
file: fs.File,
@@ -745,15 +743,15 @@ fn printErrMsgToFile(
const span_first = lok_token;
const span_last = lok_token;
- const first_token = tree.tokens.at(span_first);
- const last_token = tree.tokens.at(span_last);
- const start_loc = tree.tokenLocationPtr(0, first_token);
- const end_loc = tree.tokenLocationPtr(first_token.end, last_token);
+ const first_token = tree.token_locs[span_first];
+ const last_token = tree.token_locs[span_last];
+ const start_loc = tree.tokenLocationLoc(0, first_token);
+ const end_loc = tree.tokenLocationLoc(first_token.end, last_token);
var text_buf = std.ArrayList(u8).init(gpa);
defer text_buf.deinit();
const out_stream = text_buf.outStream();
- try parse_error.render(&tree.tokens, out_stream);
+ try parse_error.render(tree.token_ids, out_stream);
const text = text_buf.span();
const stream = file.outStream();
diff --git a/src-self-hosted/translate_c.zig b/src-self-hosted/translate_c.zig
index 8321061924..94122f465e 100644
--- a/src-self-hosted/translate_c.zig
+++ b/src-self-hosted/translate_c.zig
@@ -37,10 +37,10 @@ fn addrEql(a: usize, b: usize) bool {
}
const SymbolTable = std.StringHashMap(*ast.Node);
-const AliasList = std.SegmentedList(struct {
+const AliasList = std.ArrayList(struct {
alias: []const u8,
name: []const u8,
-}, 4);
+});
const Scope = struct {
id: Id,
@@ -54,50 +54,99 @@ const Scope = struct {
Loop,
};
+ /// Represents an in-progress ast.Node.Switch. This struct is stack-allocated.
+ /// When it is deinitialized, it produces an ast.Node.Switch which is allocated
+ /// into the main arena.
const Switch = struct {
base: Scope,
- pending_block: *ast.Node.Block,
- cases: *ast.Node.Switch.CaseList,
+ pending_block: Block,
+ cases: []*ast.Node,
+ case_index: usize,
has_default: bool = false,
};
+ /// Used for the scope of condition expressions, for example `if (cond)`.
+ /// The block is lazily initialised because it is only needed for rare
+ /// cases of comma operators being used.
+ const Condition = struct {
+ base: Scope,
+ block: ?Block = null,
+
+ fn getBlockScope(self: *Condition, c: *Context) !*Block {
+ if (self.block) |*b| return b;
+ self.block = try Block.init(c, &self.base, "blk");
+ return &self.block.?;
+ }
+
+ fn deinit(self: *Condition) void {
+ if (self.block) |*b| b.deinit();
+ }
+ };
+
+ /// Represents an in-progress ast.Node.Block. This struct is stack-allocated.
+ /// When it is deinitialized, it produces an ast.Node.Block which is allocated
+ /// into the main arena.
const Block = struct {
base: Scope,
- block_node: *ast.Node.Block,
+ statements: std.ArrayList(*ast.Node),
variables: AliasList,
- label: ?[]const u8,
+ label: ?ast.TokenIndex,
mangle_count: u32 = 0,
+ lbrace: ast.TokenIndex,
- /// Don't forget to set rbrace token and block_node later
- fn init(c: *Context, parent: *Scope, label: ?[]const u8) !*Block {
- const block = try c.a().create(Block);
- block.* = .{
+ fn init(c: *Context, parent: *Scope, label: ?[]const u8) !Block {
+ return Block{
.base = .{
.id = .Block,
.parent = parent,
},
- .block_node = undefined,
- .variables = AliasList.init(c.a()),
- .label = label,
+ .statements = std.ArrayList(*ast.Node).init(c.gpa),
+ .variables = AliasList.init(c.gpa),
+ .label = if (label) |l| blk: {
+ const ll = try appendIdentifier(c, l);
+ _ = try appendToken(c, .Colon, ":");
+ break :blk ll;
+ } else null,
+ .lbrace = try appendToken(c, .LBrace, "{"),
};
- return block;
+ }
+
+ fn deinit(self: *Block) void {
+ self.statements.deinit();
+ self.variables.deinit();
+ self.* = undefined;
+ }
+
+ fn complete(self: *Block, c: *Context) !*ast.Node.Block {
+ // We reserve 1 extra statement if the parent is a Loop. This is in case of
+ // do while, we want to put `if (cond) break;` at the end.
+ const alloc_len = self.statements.items.len + @boolToInt(self.base.parent.?.id == .Loop);
+ const node = try ast.Node.Block.alloc(c.arena, alloc_len);
+ node.* = .{
+ .statements_len = self.statements.items.len,
+ .lbrace = self.lbrace,
+ .rbrace = try appendToken(c, .RBrace, "}"),
+ .label = self.label,
+ };
+ mem.copy(*ast.Node, node.statements(), self.statements.items);
+ return node;
}
/// Given the desired name, return a name that does not shadow anything from outer scopes.
/// Inserts the returned name into the scope.
fn makeMangledName(scope: *Block, c: *Context, name: []const u8) ![]const u8 {
- var proposed_name = name;
+ const name_copy = try c.arena.dupe(u8, name);
+ var proposed_name = name_copy;
while (scope.contains(proposed_name)) {
scope.mangle_count += 1;
- proposed_name = try std.fmt.allocPrint(c.a(), "{}_{}", .{ name, scope.mangle_count });
+ proposed_name = try std.fmt.allocPrint(c.arena, "{}_{}", .{ name, scope.mangle_count });
}
- try scope.variables.push(.{ .name = name, .alias = proposed_name });
+ try scope.variables.append(.{ .name = name_copy, .alias = proposed_name });
return proposed_name;
}
fn getAlias(scope: *Block, name: []const u8) []const u8 {
- var it = scope.variables.iterator(0);
- while (it.next()) |p| {
+ for (scope.variables.items) |p| {
if (mem.eql(u8, p.name, name))
return p.alias;
}
@@ -105,8 +154,7 @@ const Scope = struct {
}
fn localContains(scope: *Block, name: []const u8) bool {
- var it = scope.variables.iterator(0);
- while (it.next()) |p| {
+ for (scope.variables.items) |p| {
if (mem.eql(u8, p.name, name))
return true;
}
@@ -132,8 +180,8 @@ const Scope = struct {
.id = .Root,
.parent = null,
},
- .sym_table = SymbolTable.init(c.a()),
- .macro_table = SymbolTable.init(c.a()),
+ .sym_table = SymbolTable.init(c.arena),
+ .macro_table = SymbolTable.init(c.arena),
.context = c,
};
}
@@ -158,10 +206,7 @@ const Scope = struct {
switch (scope.id) {
.Root => unreachable,
.Block => return @fieldParentPtr(Block, "base", scope),
- .Condition => {
- // comma operator used
- return try Block.init(c, scope, "blk");
- },
+ .Condition => return @fieldParentPtr(Condition, "base", scope).getBlockScope(c),
else => scope = scope.parent.?,
}
}
@@ -208,7 +253,11 @@ const Scope = struct {
};
pub const Context = struct {
- tree: *ast.Tree,
+ gpa: *mem.Allocator,
+ arena: *mem.Allocator,
+ token_ids: std.ArrayListUnmanaged(Token.Id),
+ token_locs: std.ArrayListUnmanaged(Token.Loc),
+ errors: std.ArrayListUnmanaged(ast.Error),
source_buffer: *std.ArrayList(u8),
err: Error,
source_manager: *ZigClangSourceManager,
@@ -217,6 +266,7 @@ pub const Context = struct {
global_scope: *Scope.Root,
clang_context: *ZigClangASTContext,
mangle_count: u32 = 0,
+ root_decls: std.ArrayListUnmanaged(*ast.Node),
/// This one is different than the root scope's name table. This contains
/// a list of names that we found by visiting all the top level decls without
@@ -229,13 +279,9 @@ pub const Context = struct {
return c.mangle_count;
}
- fn a(c: *Context) *mem.Allocator {
- return &c.tree.arena_allocator.allocator;
- }
-
/// Convert a null-terminated C string to a slice allocated in the arena
fn str(c: *Context, s: [*:0]const u8) ![]u8 {
- return mem.dupe(c.a(), u8, mem.spanZ(s));
+ return mem.dupe(c.arena, u8, mem.spanZ(s));
}
/// Convert a clang source location to a file:line:column string
@@ -246,12 +292,52 @@ pub const Context = struct {
const line = ZigClangSourceManager_getSpellingLineNumber(c.source_manager, spelling_loc);
const column = ZigClangSourceManager_getSpellingColumnNumber(c.source_manager, spelling_loc);
- return std.fmt.allocPrint(c.a(), "{}:{}:{}", .{ filename, line, column });
+ return std.fmt.allocPrint(c.arena, "{}:{}:{}", .{ filename, line, column });
+ }
+
+ fn createCall(c: *Context, fn_expr: *ast.Node, params_len: ast.NodeIndex) !*ast.Node.Call {
+ _ = try appendToken(c, .LParen, "(");
+ const node = try ast.Node.Call.alloc(c.arena, params_len);
+ node.* = .{
+ .lhs = fn_expr,
+ .params_len = params_len,
+ .async_token = null,
+ .rtoken = undefined, // set after appending args
+ };
+ return node;
+ }
+
+ fn createBuiltinCall(c: *Context, name: []const u8, params_len: ast.NodeIndex) !*ast.Node.BuiltinCall {
+ const builtin_token = try appendToken(c, .Builtin, name);
+ _ = try appendToken(c, .LParen, "(");
+ const node = try ast.Node.BuiltinCall.alloc(c.arena, params_len);
+ node.* = .{
+ .builtin_token = builtin_token,
+ .params_len = params_len,
+ .rparen_token = undefined, // set after appending args
+ };
+ return node;
+ }
+
+ fn createBlock(c: *Context, label: ?[]const u8, statements_len: ast.NodeIndex) !*ast.Node.Block {
+ const label_node = if (label) |l| blk: {
+ const ll = try appendIdentifier(c, l);
+ _ = try appendToken(c, .Colon, ":");
+ break :blk ll;
+ } else null;
+ const block_node = try ast.Node.Block.alloc(c.arena, statements_len);
+ block_node.* = .{
+ .label = label_node,
+ .lbrace = try appendToken(c, .LBrace, "{"),
+ .statements_len = statements_len,
+ .rbrace = undefined,
+ };
+ return block_node;
}
};
pub fn translate(
- backing_allocator: *mem.Allocator,
+ gpa: *mem.Allocator,
args_begin: [*]?[*]const u8,
args_end: [*]?[*]const u8,
errors: *[]ClangErrMsg,
@@ -269,47 +355,38 @@ pub fn translate(
};
defer ZigClangASTUnit_delete(ast_unit);
- const tree = blk: {
- var tree_arena = std.heap.ArenaAllocator.init(backing_allocator);
- errdefer tree_arena.deinit();
+ var source_buffer = std.ArrayList(u8).init(gpa);
+ defer source_buffer.deinit();
- const tree = try tree_arena.allocator.create(ast.Tree);
- tree.* = .{
- .source = undefined, // need to use toOwnedSlice later
- .root_node = undefined,
- .arena_allocator = tree_arena,
- .tokens = undefined, // can't reference the allocator yet
- .errors = undefined, // can't reference the allocator yet
- .generated = true,
- };
- break :blk tree;
- };
- const arena = &tree.arena_allocator.allocator; // now we can reference the allocator
- errdefer tree.arena_allocator.deinit();
- tree.tokens = ast.Tree.TokenList.init(arena);
- tree.errors = ast.Tree.ErrorList.init(arena);
-
- tree.root_node = try arena.create(ast.Node.Root);
- tree.root_node.* = .{
- .decls = ast.Node.Root.DeclList.init(arena),
- // initialized with the eof token at the end
- .eof_token = undefined,
- };
-
- var source_buffer = std.ArrayList(u8).init(arena);
+ // For memory that has the same lifetime as the Tree that we return
+ // from this function.
+ var arena = std.heap.ArenaAllocator.init(gpa);
+ errdefer arena.deinit();
var context = Context{
- .tree = tree,
+ .gpa = gpa,
+ .arena = &arena.allocator,
.source_buffer = &source_buffer,
.source_manager = ZigClangASTUnit_getSourceManager(ast_unit),
.err = undefined,
- .decl_table = DeclTable.init(arena),
- .alias_list = AliasList.init(arena),
- .global_scope = try arena.create(Scope.Root),
+ .decl_table = DeclTable.init(gpa),
+ .alias_list = AliasList.init(gpa),
+ .global_scope = try arena.allocator.create(Scope.Root),
.clang_context = ZigClangASTUnit_getASTContext(ast_unit).?,
- .global_names = std.StringHashMap(void).init(arena),
+ .global_names = std.StringHashMap(void).init(gpa),
+ .token_ids = .{},
+ .token_locs = .{},
+ .errors = .{},
+ .root_decls = .{},
};
context.global_scope.* = Scope.Root.init(&context);
+ defer context.decl_table.deinit();
+ defer context.alias_list.deinit();
+ defer context.token_ids.deinit(gpa);
+ defer context.token_locs.deinit(gpa);
+ defer context.errors.deinit(gpa);
+ defer context.global_names.deinit();
+ defer context.root_decls.deinit(gpa);
try prepopulateGlobalNameTable(ast_unit, &context);
@@ -320,23 +397,34 @@ pub fn translate(
try transPreprocessorEntities(&context, ast_unit);
try addMacros(&context);
- var it = context.alias_list.iterator(0);
- while (it.next()) |alias| {
+ for (context.alias_list.items) |alias| {
if (!context.global_scope.sym_table.contains(alias.alias)) {
try createAlias(&context, alias);
}
}
- tree.root_node.eof_token = try appendToken(&context, .Eof, "");
- tree.source = source_buffer.toOwnedSlice();
+ const eof_token = try appendToken(&context, .Eof, "");
+ const root_node = try ast.Node.Root.create(&arena.allocator, context.root_decls.items.len, eof_token);
+ mem.copy(*ast.Node, root_node.decls(), context.root_decls.items);
+
if (false) {
- std.debug.warn("debug source:\n{}\n==EOF==\ntokens:\n", .{tree.source});
- var i: usize = 0;
- while (i < tree.tokens.len) : (i += 1) {
- const token = tree.tokens.at(i);
+ std.debug.warn("debug source:\n{}\n==EOF==\ntokens:\n", .{source_buffer.items});
+ for (context.token_ids.items) |token| {
std.debug.warn("{}\n", .{token});
}
}
+
+ const tree = try arena.allocator.create(ast.Tree);
+ tree.* = .{
+ .gpa = gpa,
+ .source = try arena.allocator.dupe(u8, source_buffer.items),
+ .token_ids = context.token_ids.toOwnedSlice(gpa),
+ .token_locs = context.token_locs.toOwnedSlice(gpa),
+ .errors = context.errors.toOwnedSlice(gpa),
+ .root_node = root_node,
+ .arena = arena.state,
+ .generated = true,
+ };
return tree;
}
@@ -490,42 +578,35 @@ fn visitFnDecl(c: *Context, fn_decl: *const ZigClangFunctionDecl) Error!void {
// actual function definition with body
const body_stmt = ZigClangFunctionDecl_getBody(fn_decl);
- const block_scope = try Scope.Block.init(rp.c, &c.global_scope.base, null);
+ var block_scope = try Scope.Block.init(rp.c, &c.global_scope.base, null);
+ defer block_scope.deinit();
var scope = &block_scope.base;
- const block_node = try transCreateNodeBlock(rp.c, null);
- block_scope.block_node = block_node;
- var it = proto_node.params.iterator(0);
var param_id: c_uint = 0;
- while (it.next()) |p| {
- const param = @fieldParentPtr(ast.Node.ParamDecl, "base", p.*);
+ for (proto_node.params()) |*param, i| {
const param_name = if (param.name_token) |name_tok|
tokenSlice(c, name_tok)
else if (param.param_type == .var_args) {
- assert(it.next() == null);
- _ = proto_node.params.pop();
+ assert(i + 1 == proto_node.params_len);
+ proto_node.params_len -= 1;
break;
} else
return failDecl(c, fn_decl_loc, fn_name, "function {} parameter has no name", .{fn_name});
- const mangled_param_name = try block_scope.makeMangledName(c, param_name);
-
const c_param = ZigClangFunctionDecl_getParamDecl(fn_decl, param_id);
const qual_type = ZigClangParmVarDecl_getOriginalType(c_param);
const is_const = ZigClangQualType_isConstQualified(qual_type);
- const arg_name = blk: {
- const param_prefix = if (is_const) "" else "arg_";
- const bare_arg_name = try std.fmt.allocPrint(c.a(), "{}{}", .{ param_prefix, mangled_param_name });
- break :blk try block_scope.makeMangledName(c, bare_arg_name);
- };
+ const mangled_param_name = try block_scope.makeMangledName(c, param_name);
if (!is_const) {
+ const bare_arg_name = try std.fmt.allocPrint(c.arena, "arg_{}", .{ mangled_param_name });
+ const arg_name = try block_scope.makeMangledName(c, bare_arg_name);
const node = try transCreateNodeVarDecl(c, false, false, mangled_param_name);
node.eq_token = try appendToken(c, .Equal, "=");
node.init_node = try transCreateNodeIdentifier(c, arg_name);
node.semicolon_token = try appendToken(c, .Semicolon, ";");
- try block_node.statements.push(&node.base);
+ try block_scope.statements.append(&node.base);
param.name_token = try appendIdentifier(c, arg_name);
_ = try appendToken(c, .Colon, ":");
}
@@ -533,14 +614,15 @@ fn visitFnDecl(c: *Context, fn_decl: *const ZigClangFunctionDecl) Error!void {
param_id += 1;
}
- transCompoundStmtInline(rp, &block_scope.base, @ptrCast(*const ZigClangCompoundStmt, body_stmt), block_node) catch |err| switch (err) {
+ const casted_body = @ptrCast(*const ZigClangCompoundStmt, body_stmt);
+ transCompoundStmtInline(rp, &block_scope.base, casted_body, &block_scope) catch |err| switch (err) {
error.OutOfMemory => |e| return e,
error.UnsupportedTranslation,
error.UnsupportedType,
=> return failDecl(c, fn_decl_loc, fn_name, "unable to translate function", .{}),
};
- block_node.rbrace = try appendToken(rp.c, .RBrace, "}");
- proto_node.body_node = &block_node.base;
+ const body_node = try block_scope.complete(rp.c);
+ proto_node.body_node = &body_node.base;
return addTopLevelDecl(c, fn_name, &proto_node.base);
}
@@ -560,7 +642,7 @@ fn visitVarDecl(c: *Context, var_decl: *const ZigClangVarDecl) Error!void {
// TODO https://github.com/ziglang/zig/issues/3756
// TODO https://github.com/ziglang/zig/issues/1802
- const checked_name = if (isZigPrimitiveType(var_name)) try std.fmt.allocPrint(c.a(), "{}_{}", .{ var_name, c.getMangle() }) else var_name;
+ const checked_name = if (isZigPrimitiveType(var_name)) try std.fmt.allocPrint(c.arena, "{}_{}", .{ var_name, c.getMangle() }) else var_name;
const var_decl_loc = ZigClangVarDecl_getLocation(var_decl);
const qual_type = ZigClangVarDecl_getTypeSourceInfo_getType(var_decl);
@@ -620,7 +702,7 @@ fn visitVarDecl(c: *Context, var_decl: *const ZigClangVarDecl) Error!void {
_ = try appendToken(rp.c, .LParen, "(");
const expr = try transCreateNodeStringLiteral(
rp.c,
- try std.fmt.allocPrint(rp.c.a(), "\"{}\"", .{str_ptr[0..str_len]}),
+ try std.fmt.allocPrint(rp.c.arena, "\"{}\"", .{str_ptr[0..str_len]}),
);
_ = try appendToken(rp.c, .RParen, ")");
@@ -643,7 +725,7 @@ fn visitVarDecl(c: *Context, var_decl: *const ZigClangVarDecl) Error!void {
break :blk null;
};
- const node = try c.a().create(ast.Node.VarDecl);
+ const node = try c.arena.create(ast.Node.VarDecl);
node.* = .{
.doc_comments = null,
.visib_token = visib_tok,
@@ -702,7 +784,7 @@ fn transTypeDef(c: *Context, typedef_decl: *const ZigClangTypedefNameDecl, top_l
// TODO https://github.com/ziglang/zig/issues/3756
// TODO https://github.com/ziglang/zig/issues/1802
- const checked_name = if (isZigPrimitiveType(typedef_name)) try std.fmt.allocPrint(c.a(), "{}_{}", .{ typedef_name, c.getMangle() }) else typedef_name;
+ const checked_name = if (isZigPrimitiveType(typedef_name)) try std.fmt.allocPrint(c.arena, "{}_{}", .{ typedef_name, c.getMangle() }) else typedef_name;
if (checkForBuiltinTypedef(checked_name)) |builtin| {
return transTypeDefAsBuiltin(c, typedef_decl, builtin);
}
@@ -745,7 +827,7 @@ fn transRecordDecl(c: *Context, record_decl: *const ZigClangRecordDecl) Error!?*
// Record declarations such as `struct {...} x` have no name but they're not
// anonymous hence here isAnonymousStructOrUnion is not needed
if (bare_name.len == 0) {
- bare_name = try std.fmt.allocPrint(c.a(), "unnamed_{}", .{c.getMangle()});
+ bare_name = try std.fmt.allocPrint(c.arena, "unnamed_{}", .{c.getMangle()});
is_unnamed = true;
}
@@ -762,7 +844,7 @@ fn transRecordDecl(c: *Context, record_decl: *const ZigClangRecordDecl) Error!?*
return null;
}
- const name = try std.fmt.allocPrint(c.a(), "{}_{}", .{ container_kind_name, bare_name });
+ const name = try std.fmt.allocPrint(c.arena, "{}_{}", .{ container_kind_name, bare_name });
_ = try c.decl_table.put(@ptrToInt(ZigClangRecordDecl_getCanonicalDecl(record_decl)), name);
const node = try transCreateNodeVarDecl(c, !is_unnamed, true, name);
@@ -785,15 +867,8 @@ fn transRecordDecl(c: *Context, record_decl: *const ZigClangRecordDecl) Error!?*
const container_tok = try appendToken(c, container_kind, container_kind_name);
const lbrace_token = try appendToken(c, .LBrace, "{");
- const container_node = try c.a().create(ast.Node.ContainerDecl);
- container_node.* = .{
- .layout_token = layout_tok,
- .kind_token = container_tok,
- .init_arg_expr = .None,
- .fields_and_decls = ast.Node.ContainerDecl.DeclList.init(c.a()),
- .lbrace_token = lbrace_token,
- .rbrace_token = undefined,
- };
+ var fields_and_decls = std.ArrayList(*ast.Node).init(c.gpa);
+ defer fields_and_decls.deinit();
var unnamed_field_count: u32 = 0;
var it = ZigClangRecordDecl_field_begin(record_def);
@@ -821,7 +896,7 @@ fn transRecordDecl(c: *Context, record_decl: *const ZigClangRecordDecl) Error!?*
var raw_name = try c.str(ZigClangNamedDecl_getName_bytes_begin(@ptrCast(*const ZigClangNamedDecl, field_decl)));
if (ZigClangFieldDecl_isAnonymousStructOrUnion(field_decl) or raw_name.len == 0) {
// Context.getMangle() is not used here because doing so causes unpredictable field names for anonymous fields.
- raw_name = try std.fmt.allocPrint(c.a(), "unnamed_{}", .{unnamed_field_count});
+ raw_name = try std.fmt.allocPrint(c.arena, "unnamed_{}", .{unnamed_field_count});
unnamed_field_count += 1;
is_anon = true;
}
@@ -851,7 +926,7 @@ fn transRecordDecl(c: *Context, record_decl: *const ZigClangRecordDecl) Error!?*
break :blk null;
};
- const field_node = try c.a().create(ast.Node.ContainerField);
+ const field_node = try c.arena.create(ast.Node.ContainerField);
field_node.* = .{
.doc_comments = null,
.comptime_token = null,
@@ -868,10 +943,19 @@ fn transRecordDecl(c: *Context, record_decl: *const ZigClangRecordDecl) Error!?*
);
}
- try container_node.fields_and_decls.push(&field_node.base);
+ try fields_and_decls.append(&field_node.base);
_ = try appendToken(c, .Comma, ",");
}
- container_node.rbrace_token = try appendToken(c, .RBrace, "}");
+ const container_node = try ast.Node.ContainerDecl.alloc(c.arena, fields_and_decls.items.len);
+ container_node.* = .{
+ .layout_token = layout_tok,
+ .kind_token = container_tok,
+ .init_arg_expr = .None,
+ .fields_and_decls_len = fields_and_decls.items.len,
+ .lbrace_token = lbrace_token,
+ .rbrace_token = try appendToken(c, .RBrace, "}"),
+ };
+ mem.copy(*ast.Node, container_node.fieldsAndDecls(), fields_and_decls.items);
semicolon = try appendToken(c, .Semicolon, ";");
break :blk &container_node.base;
};
@@ -879,7 +963,7 @@ fn transRecordDecl(c: *Context, record_decl: *const ZigClangRecordDecl) Error!?*
try addTopLevelDecl(c, name, &node.base);
if (!is_unnamed)
- try c.alias_list.push(.{ .alias = bare_name, .name = name });
+ try c.alias_list.append(.{ .alias = bare_name, .name = name });
return transCreateNodeIdentifier(c, name);
}
@@ -892,11 +976,11 @@ fn transEnumDecl(c: *Context, enum_decl: *const ZigClangEnumDecl) Error!?*ast.No
var bare_name = try c.str(ZigClangNamedDecl_getName_bytes_begin(@ptrCast(*const ZigClangNamedDecl, enum_decl)));
var is_unnamed = false;
if (bare_name.len == 0) {
- bare_name = try std.fmt.allocPrint(c.a(), "unnamed_{}", .{c.getMangle()});
+ bare_name = try std.fmt.allocPrint(c.arena, "unnamed_{}", .{c.getMangle()});
is_unnamed = true;
}
- const name = try std.fmt.allocPrint(c.a(), "enum_{}", .{bare_name});
+ const name = try std.fmt.allocPrint(c.arena, "enum_{}", .{bare_name});
_ = try c.decl_table.put(@ptrToInt(ZigClangEnumDecl_getCanonicalDecl(enum_decl)), name);
const node = try transCreateNodeVarDecl(c, !is_unnamed, true, name);
node.eq_token = try appendToken(c, .Equal, "=");
@@ -916,15 +1000,8 @@ fn transEnumDecl(c: *Context, enum_decl: *const ZigClangEnumDecl) Error!?*ast.No
const extern_tok = try appendToken(c, .Keyword_extern, "extern");
const container_tok = try appendToken(c, .Keyword_enum, "enum");
- const container_node = try c.a().create(ast.Node.ContainerDecl);
- container_node.* = .{
- .layout_token = extern_tok,
- .kind_token = container_tok,
- .init_arg_expr = .None,
- .fields_and_decls = ast.Node.ContainerDecl.DeclList.init(c.a()),
- .lbrace_token = undefined,
- .rbrace_token = undefined,
- };
+ var fields_and_decls = std.ArrayList(*ast.Node).init(c.gpa);
+ defer fields_and_decls.deinit();
const int_type = ZigClangEnumDecl_getIntegerType(enum_decl);
// The underlying type may be null in case of forward-declared enum
@@ -933,7 +1010,7 @@ fn transEnumDecl(c: *Context, enum_decl: *const ZigClangEnumDecl) Error!?*ast.No
// default to c_int since msvc and gcc default to different types
_ = try appendToken(c, .LParen, "(");
- container_node.init_arg_expr = .{
+ const init_arg_expr = ast.Node.ContainerDecl.InitArg{
.Type = if (int_type.ptr != null and
!isCBuiltinType(int_type, .UInt) and
!isCBuiltinType(int_type, .Int))
@@ -949,7 +1026,7 @@ fn transEnumDecl(c: *Context, enum_decl: *const ZigClangEnumDecl) Error!?*ast.No
};
_ = try appendToken(c, .RParen, ")");
- container_node.lbrace_token = try appendToken(c, .LBrace, "{");
+ const lbrace_token = try appendToken(c, .LBrace, "{");
it = ZigClangEnumDecl_enumerator_begin(enum_def);
end_it = ZigClangEnumDecl_enumerator_end(enum_def);
@@ -971,7 +1048,7 @@ fn transEnumDecl(c: *Context, enum_decl: *const ZigClangEnumDecl) Error!?*ast.No
} else
null;
- const field_node = try c.a().create(ast.Node.ContainerField);
+ const field_node = try c.arena.create(ast.Node.ContainerField);
field_node.* = .{
.doc_comments = null,
.comptime_token = null,
@@ -981,32 +1058,32 @@ fn transEnumDecl(c: *Context, enum_decl: *const ZigClangEnumDecl) Error!?*ast.No
.align_expr = null,
};
- try container_node.fields_and_decls.push(&field_node.base);
+ try fields_and_decls.append(&field_node.base);
_ = try appendToken(c, .Comma, ",");
// In C each enum value is in the global namespace. So we put them there too.
// At this point we can rely on the enum emitting successfully.
const tld_node = try transCreateNodeVarDecl(c, true, true, enum_val_name);
tld_node.eq_token = try appendToken(c, .Equal, "=");
- const cast_node = try transCreateNodeBuiltinFnCall(rp.c, "@enumToInt");
+ const cast_node = try rp.c.createBuiltinCall("@enumToInt", 1);
const enum_ident = try transCreateNodeIdentifier(c, name);
const period_tok = try appendToken(c, .Period, ".");
const field_ident = try transCreateNodeIdentifier(c, field_name);
- const field_access_node = try c.a().create(ast.Node.InfixOp);
+ const field_access_node = try c.arena.create(ast.Node.InfixOp);
field_access_node.* = .{
.op_token = period_tok,
.lhs = enum_ident,
.op = .Period,
.rhs = field_ident,
};
- try cast_node.params.push(&field_access_node.base);
+ cast_node.params()[0] = &field_access_node.base;
cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
tld_node.init_node = &cast_node.base;
tld_node.semicolon_token = try appendToken(c, .Semicolon, ";");
try addTopLevelDecl(c, field_name, &tld_node.base);
}
// make non exhaustive
- const field_node = try c.a().create(ast.Node.ContainerField);
+ const field_node = try c.arena.create(ast.Node.ContainerField);
field_node.* = .{
.doc_comments = null,
.comptime_token = null,
@@ -1016,10 +1093,18 @@ fn transEnumDecl(c: *Context, enum_decl: *const ZigClangEnumDecl) Error!?*ast.No
.align_expr = null,
};
- try container_node.fields_and_decls.push(&field_node.base);
+ try fields_and_decls.append(&field_node.base);
_ = try appendToken(c, .Comma, ",");
- container_node.rbrace_token = try appendToken(c, .RBrace, "}");
-
+ const container_node = try ast.Node.ContainerDecl.alloc(c.arena, fields_and_decls.items.len);
+ container_node.* = .{
+ .layout_token = extern_tok,
+ .kind_token = container_tok,
+ .init_arg_expr = init_arg_expr,
+ .fields_and_decls_len = fields_and_decls.items.len,
+ .lbrace_token = lbrace_token,
+ .rbrace_token = try appendToken(c, .RBrace, "}"),
+ };
+ mem.copy(*ast.Node, container_node.fieldsAndDecls(), fields_and_decls.items);
break :blk &container_node.base;
} else
try transCreateNodeOpaqueType(c);
@@ -1028,7 +1113,7 @@ fn transEnumDecl(c: *Context, enum_decl: *const ZigClangEnumDecl) Error!?*ast.No
try addTopLevelDecl(c, name, &node.base);
if (!is_unnamed)
- try c.alias_list.push(.{ .alias = bare_name, .name = name });
+ try c.alias_list.append(.{ .alias = bare_name, .name = name });
return transCreateNodeIdentifier(c, name);
}
@@ -1071,7 +1156,7 @@ fn transStmt(
.ParenExprClass => {
const expr = try transExpr(rp, scope, ZigClangParenExpr_getSubExpr(@ptrCast(*const ZigClangParenExpr, stmt)), .used, lrvalue);
if (expr.id == .GroupedExpression) return maybeSuppressResult(rp, scope, result_used, expr);
- const node = try rp.c.a().create(ast.Node.GroupedExpression);
+ const node = try rp.c.arena.create(ast.Node.GroupedExpression);
node.* = .{
.lparen = try appendToken(rp.c, .LParen, "("),
.expr = expr,
@@ -1085,7 +1170,7 @@ fn transStmt(
.WhileStmtClass => return transWhileLoop(rp, scope, @ptrCast(*const ZigClangWhileStmt, stmt)),
.DoStmtClass => return transDoWhileLoop(rp, scope, @ptrCast(*const ZigClangDoStmt, stmt)),
.NullStmtClass => {
- const block = try transCreateNodeBlock(rp.c, null);
+ const block = try rp.c.createBlock(null, 0);
block.rbrace = try appendToken(rp.c, .RBrace, "}");
return &block.base;
},
@@ -1116,7 +1201,7 @@ fn transStmt(
const source_expr = ZigClangOpaqueValueExpr_getSourceExpr(@ptrCast(*const ZigClangOpaqueValueExpr, stmt)).?;
const expr = try transExpr(rp, scope, source_expr, .used, lrvalue);
if (expr.id == .GroupedExpression) return maybeSuppressResult(rp, scope, result_used, expr);
- const node = try rp.c.a().create(ast.Node.GroupedExpression);
+ const node = try rp.c.arena.create(ast.Node.GroupedExpression);
node.* = .{
.lparen = try appendToken(rp.c, .LParen, "("),
.expr = expr,
@@ -1147,31 +1232,27 @@ fn transBinaryOperator(
var op_token: ast.TokenIndex = undefined;
var op_id: ast.Node.InfixOp.Op = undefined;
switch (op) {
- .Assign => return transCreateNodeAssign(rp, scope, result_used, ZigClangBinaryOperator_getLHS(stmt), ZigClangBinaryOperator_getRHS(stmt)),
+ .Assign => return try transCreateNodeAssign(rp, scope, result_used, ZigClangBinaryOperator_getLHS(stmt), ZigClangBinaryOperator_getRHS(stmt)),
.Comma => {
const block_scope = try scope.findBlockScope(rp.c);
const expr = block_scope.base.parent == scope;
- const lparen = if (expr) blk: {
- const l = try appendToken(rp.c, .LParen, "(");
- block_scope.block_node = try transCreateNodeBlock(rp.c, block_scope.label);
- break :blk l;
- } else undefined;
+ const lparen = if (expr) try appendToken(rp.c, .LParen, "(") else undefined;
const lhs = try transExpr(rp, &block_scope.base, ZigClangBinaryOperator_getLHS(stmt), .unused, .r_value);
- try block_scope.block_node.statements.push(lhs);
+ try block_scope.statements.append(lhs);
const rhs = try transExpr(rp, &block_scope.base, ZigClangBinaryOperator_getRHS(stmt), .used, .r_value);
if (expr) {
_ = try appendToken(rp.c, .Semicolon, ";");
- const break_node = try transCreateNodeBreak(rp.c, block_scope.label);
+ const break_node = try transCreateNodeBreakToken(rp.c, block_scope.label);
break_node.rhs = rhs;
- try block_scope.block_node.statements.push(&break_node.base);
- block_scope.block_node.rbrace = try appendToken(rp.c, .RBrace, "}");
+ try block_scope.statements.append(&break_node.base);
+ const block_node = try block_scope.complete(rp.c);
const rparen = try appendToken(rp.c, .RParen, ")");
- const grouped_expr = try rp.c.a().create(ast.Node.GroupedExpression);
+ const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression);
grouped_expr.* = .{
.lparen = lparen,
- .expr = &block_scope.block_node.base,
+ .expr = &block_node.base,
.rparen = rparen,
};
return maybeSuppressResult(rp, scope, result_used, &grouped_expr.base);
@@ -1182,11 +1263,11 @@ fn transBinaryOperator(
.Div => {
if (cIsSignedInteger(qt)) {
// signed integer division uses @divTrunc
- const div_trunc_node = try transCreateNodeBuiltinFnCall(rp.c, "@divTrunc");
- try div_trunc_node.params.push(try transExpr(rp, scope, ZigClangBinaryOperator_getLHS(stmt), .used, .l_value));
+ const div_trunc_node = try rp.c.createBuiltinCall("@divTrunc", 2);
+ div_trunc_node.params()[0] = try transExpr(rp, scope, ZigClangBinaryOperator_getLHS(stmt), .used, .l_value);
_ = try appendToken(rp.c, .Comma, ",");
const rhs = try transExpr(rp, scope, ZigClangBinaryOperator_getRHS(stmt), .used, .r_value);
- try div_trunc_node.params.push(rhs);
+ div_trunc_node.params()[1] = rhs;
div_trunc_node.rparen_token = try appendToken(rp.c, .RParen, ")");
return maybeSuppressResult(rp, scope, result_used, &div_trunc_node.base);
}
@@ -1194,11 +1275,11 @@ fn transBinaryOperator(
.Rem => {
if (cIsSignedInteger(qt)) {
// signed integer division uses @rem
- const rem_node = try transCreateNodeBuiltinFnCall(rp.c, "@rem");
- try rem_node.params.push(try transExpr(rp, scope, ZigClangBinaryOperator_getLHS(stmt), .used, .l_value));
+ const rem_node = try rp.c.createBuiltinCall("@rem", 2);
+ rem_node.params()[0] = try transExpr(rp, scope, ZigClangBinaryOperator_getLHS(stmt), .used, .l_value);
_ = try appendToken(rp.c, .Comma, ",");
const rhs = try transExpr(rp, scope, ZigClangBinaryOperator_getRHS(stmt), .used, .r_value);
- try rem_node.params.push(rhs);
+ rem_node.params()[1] = rhs;
rem_node.rparen_token = try appendToken(rp.c, .RParen, ")");
return maybeSuppressResult(rp, scope, result_used, &rem_node.base);
}
@@ -1302,15 +1383,15 @@ fn transBinaryOperator(
const rhs_node = try transExpr(rp, scope, ZigClangBinaryOperator_getRHS(stmt), .used, .r_value);
const lhs = if (isBoolRes(lhs_node)) init: {
- const cast_node = try transCreateNodeBuiltinFnCall(rp.c, "@boolToInt");
- try cast_node.params.push(lhs_node);
+ const cast_node = try rp.c.createBuiltinCall("@boolToInt", 1);
+ cast_node.params()[0] = lhs_node;
cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
break :init &cast_node.base;
} else lhs_node;
const rhs = if (isBoolRes(rhs_node)) init: {
- const cast_node = try transCreateNodeBuiltinFnCall(rp.c, "@boolToInt");
- try cast_node.params.push(rhs_node);
+ const cast_node = try rp.c.createBuiltinCall("@boolToInt", 1);
+ cast_node.params()[0] = rhs_node;
cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
break :init &cast_node.base;
} else rhs_node;
@@ -1322,23 +1403,22 @@ fn transCompoundStmtInline(
rp: RestorePoint,
parent_scope: *Scope,
stmt: *const ZigClangCompoundStmt,
- block_node: *ast.Node.Block,
+ block: *Scope.Block,
) TransError!void {
var it = ZigClangCompoundStmt_body_begin(stmt);
const end_it = ZigClangCompoundStmt_body_end(stmt);
while (it != end_it) : (it += 1) {
const result = try transStmt(rp, parent_scope, it[0], .unused, .r_value);
- if (result != &block_node.base)
- try block_node.statements.push(result);
+ try block.statements.append(result);
}
}
fn transCompoundStmt(rp: RestorePoint, scope: *Scope, stmt: *const ZigClangCompoundStmt) TransError!*ast.Node {
- const block_scope = try Scope.Block.init(rp.c, scope, null);
- block_scope.block_node = try transCreateNodeBlock(rp.c, null);
- try transCompoundStmtInline(rp, &block_scope.base, stmt, block_scope.block_node);
- block_scope.block_node.rbrace = try appendToken(rp.c, .RBrace, "}");
- return &block_scope.block_node.base;
+ var block_scope = try Scope.Block.init(rp.c, scope, null);
+ defer block_scope.deinit();
+ try transCompoundStmtInline(rp, &block_scope.base, stmt, &block_scope);
+ const node = try block_scope.complete(rp.c);
+ return &node.base;
}
fn transCStyleCastExprClass(
@@ -1360,77 +1440,83 @@ fn transCStyleCastExprClass(
return maybeSuppressResult(rp, scope, result_used, cast_node);
}
-fn transDeclStmt(rp: RestorePoint, scope: *Scope, stmt: *const ZigClangDeclStmt) TransError!*ast.Node {
+fn transDeclStmtOne(rp: RestorePoint, scope: *Scope, decl: *const ZigClangDecl, block_scope: *Scope.Block,) TransError!*ast.Node {
const c = rp.c;
- const block_scope = scope.findBlockScope(c) catch unreachable;
+
+ switch (ZigClangDecl_getKind(decl)) {
+ .Var => {
+ const var_decl = @ptrCast(*const ZigClangVarDecl, decl);
+
+ const thread_local_token = if (ZigClangVarDecl_getTLSKind(var_decl) == .None)
+ null
+ else
+ try appendToken(c, .Keyword_threadlocal, "threadlocal");
+ const qual_type = ZigClangVarDecl_getTypeSourceInfo_getType(var_decl);
+ const name = try c.str(ZigClangNamedDecl_getName_bytes_begin(
+ @ptrCast(*const ZigClangNamedDecl, var_decl),
+ ));
+ const mangled_name = try block_scope.makeMangledName(c, name);
+ const node = try transCreateNodeVarDecl(c, false, ZigClangQualType_isConstQualified(qual_type), mangled_name);
+
+ _ = try appendToken(c, .Colon, ":");
+ const loc = ZigClangDecl_getLocation(decl);
+ node.type_node = try transQualType(rp, qual_type, loc);
+
+ node.eq_token = try appendToken(c, .Equal, "=");
+ var init_node = if (ZigClangVarDecl_getInit(var_decl)) |expr|
+ try transExprCoercing(rp, scope, expr, .used, .r_value)
+ else
+ try transCreateNodeUndefinedLiteral(c);
+ if (!qualTypeIsBoolean(qual_type) and isBoolRes(init_node)) {
+ const builtin_node = try rp.c.createBuiltinCall("@boolToInt", 1);
+ builtin_node.params()[0] = init_node;
+ builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
+ init_node = &builtin_node.base;
+ }
+ node.init_node = init_node;
+ node.semicolon_token = try appendToken(c, .Semicolon, ";");
+ return &node.base;
+ },
+ .Typedef => {
+ const typedef_decl = @ptrCast(*const ZigClangTypedefNameDecl, decl);
+ const name = try c.str(ZigClangNamedDecl_getName_bytes_begin(
+ @ptrCast(*const ZigClangNamedDecl, typedef_decl),
+ ));
+
+ const underlying_qual = ZigClangTypedefNameDecl_getUnderlyingType(typedef_decl);
+ const underlying_type = ZigClangQualType_getTypePtr(underlying_qual);
+
+ const mangled_name = try block_scope.makeMangledName(c, name);
+ const node = (try transCreateNodeTypedef(rp, typedef_decl, false, mangled_name)) orelse
+ return error.UnsupportedTranslation;
+ return &node.base;
+ },
+ else => |kind| return revertAndWarn(
+ rp,
+ error.UnsupportedTranslation,
+ ZigClangDecl_getLocation(decl),
+ "TODO implement translation of DeclStmt kind {}",
+ .{@tagName(kind)},
+ ),
+ }
+}
+
+fn transDeclStmt(rp: RestorePoint, scope: *Scope, stmt: *const ZigClangDeclStmt) TransError!*ast.Node {
+ const block_scope = scope.findBlockScope(rp.c) catch unreachable;
var it = ZigClangDeclStmt_decl_begin(stmt);
const end_it = ZigClangDeclStmt_decl_end(stmt);
- while (it != end_it) : (it += 1) {
- switch (ZigClangDecl_getKind(it[0])) {
- .Var => {
- const var_decl = @ptrCast(*const ZigClangVarDecl, it[0]);
+ assert(it != end_it);
+ while (true) : (it += 1) {
+ const node = try transDeclStmtOne(rp, scope, it[0], block_scope);
- const thread_local_token = if (ZigClangVarDecl_getTLSKind(var_decl) == .None)
- null
- else
- try appendToken(c, .Keyword_threadlocal, "threadlocal");
- const qual_type = ZigClangVarDecl_getTypeSourceInfo_getType(var_decl);
- const name = try c.str(ZigClangNamedDecl_getName_bytes_begin(
- @ptrCast(*const ZigClangNamedDecl, var_decl),
- ));
- const mangled_name = try block_scope.makeMangledName(c, name);
- const node = try transCreateNodeVarDecl(c, false, ZigClangQualType_isConstQualified(qual_type), mangled_name);
-
- _ = try appendToken(c, .Colon, ":");
- const loc = ZigClangStmt_getBeginLoc(@ptrCast(*const ZigClangStmt, stmt));
- node.type_node = try transQualType(rp, qual_type, loc);
-
- node.eq_token = try appendToken(c, .Equal, "=");
- var init_node = if (ZigClangVarDecl_getInit(var_decl)) |expr|
- try transExprCoercing(rp, scope, expr, .used, .r_value)
- else
- try transCreateNodeUndefinedLiteral(c);
- if (!qualTypeIsBoolean(qual_type) and isBoolRes(init_node)) {
- const builtin_node = try transCreateNodeBuiltinFnCall(rp.c, "@boolToInt");
- try builtin_node.params.push(init_node);
- builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- init_node = &builtin_node.base;
- }
- node.init_node = init_node;
- node.semicolon_token = try appendToken(c, .Semicolon, ";");
- try block_scope.block_node.statements.push(&node.base);
- },
- .Typedef => {
- const typedef_decl = @ptrCast(*const ZigClangTypedefNameDecl, it[0]);
- const name = try c.str(ZigClangNamedDecl_getName_bytes_begin(
- @ptrCast(*const ZigClangNamedDecl, typedef_decl),
- ));
-
- const underlying_qual = ZigClangTypedefNameDecl_getUnderlyingType(typedef_decl);
- const underlying_type = ZigClangQualType_getTypePtr(underlying_qual);
-
- const mangled_name = try block_scope.makeMangledName(c, name);
- if (checkForBuiltinTypedef(name)) |builtin| {
- try block_scope.variables.push(.{
- .alias = builtin,
- .name = mangled_name,
- });
- } else {
- const node = (try transCreateNodeTypedef(rp, typedef_decl, false, mangled_name)) orelse return error.UnsupportedTranslation;
- try block_scope.block_node.statements.push(&node.base);
- }
- },
- else => |kind| return revertAndWarn(
- rp,
- error.UnsupportedTranslation,
- ZigClangStmt_getBeginLoc(@ptrCast(*const ZigClangStmt, stmt)),
- "TODO implement translation of DeclStmt kind {}",
- .{@tagName(kind)},
- ),
+ if (it + 1 == end_it) {
+ return node;
+ } else {
+ try block_scope.statements.append(node);
}
}
- return &block_scope.block_node.base;
+ unreachable;
}
fn transDeclRefExpr(
@@ -1458,7 +1544,7 @@ fn transImplicitCastExpr(
switch (ZigClangImplicitCastExpr_getCastKind(expr)) {
.BitCast, .FloatingCast, .FloatingToIntegral, .IntegralToFloating, .IntegralCast, .PointerToIntegral, .IntegralToPointer => {
const sub_expr_node = try transExpr(rp, scope, sub_expr, .used, .r_value);
- return transCCast(rp, scope, ZigClangImplicitCastExpr_getBeginLoc(expr), dest_type, src_type, sub_expr_node);
+ return try transCCast(rp, scope, ZigClangImplicitCastExpr_getBeginLoc(expr), dest_type, src_type, sub_expr_node);
},
.LValueToRValue, .NoOp, .FunctionToPointerDecay => {
const sub_expr_node = try transExpr(rp, scope, sub_expr, .used, .r_value);
@@ -1480,8 +1566,8 @@ fn transImplicitCastExpr(
},
.PointerToBoolean => {
// @ptrToInt(val) != 0
- const ptr_to_int = try transCreateNodeBuiltinFnCall(rp.c, "@ptrToInt");
- try ptr_to_int.params.push(try transExpr(rp, scope, sub_expr, .used, .r_value));
+ const ptr_to_int = try rp.c.createBuiltinCall("@ptrToInt", 1);
+ ptr_to_int.params()[0] = try transExpr(rp, scope, sub_expr, .used, .r_value);
ptr_to_int.rparen_token = try appendToken(rp.c, .RParen, ")");
const op_token = try appendToken(rp.c, .BangEqual, "!=");
@@ -1539,7 +1625,7 @@ fn transBoolExpr(
if (grouped) {
const rparen = try appendToken(rp.c, .RParen, ")");
- const grouped_expr = try rp.c.a().create(ast.Node.GroupedExpression);
+ const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression);
grouped_expr.* = .{
.lparen = lparen,
.expr = node,
@@ -1705,13 +1791,11 @@ fn transIntegerLiteral(
// @as(T, x)
const expr_base = @ptrCast(*const ZigClangExpr, expr);
- const as_node = try transCreateNodeBuiltinFnCall(rp.c, "@as");
+ const as_node = try rp.c.createBuiltinCall("@as", 2);
const ty_node = try transQualType(rp, ZigClangExpr_getType(expr_base), ZigClangExpr_getBeginLoc(expr_base));
- try as_node.params.push(ty_node);
+ as_node.params()[0] = ty_node;
_ = try appendToken(rp.c, .Comma, ",");
-
- const int_lit_node = try transCreateNodeAPInt(rp.c, ZigClangAPValue_getInt(&eval_result.Val));
- try as_node.params.push(int_lit_node);
+ as_node.params()[1] = try transCreateNodeAPInt(rp.c, ZigClangAPValue_getInt(&eval_result.Val));
as_node.rparen_token = try appendToken(rp.c, .RParen, ")");
return maybeSuppressResult(rp, scope, result_used, &as_node.base);
@@ -1747,13 +1831,13 @@ fn transStringLiteral(
len = 0;
for (str) |c| len += escapeChar(c, &char_buf).len;
- const buf = try rp.c.a().alloc(u8, len + "\"\"".len);
+ const buf = try rp.c.arena.alloc(u8, len + "\"\"".len);
buf[0] = '"';
writeEscapedString(buf[1..], str);
buf[buf.len - 1] = '"';
const token = try appendToken(rp.c, .StringLiteral, buf);
- const node = try rp.c.a().create(ast.Node.StringLiteral);
+ const node = try rp.c.arena.create(ast.Node.StringLiteral);
node.* = .{
.token = token,
};
@@ -1821,35 +1905,35 @@ fn transCCast(
// 2. Bit-cast to correct signed-ness
// @bitCast(dest_type, intermediate_value)
- const cast_node = try transCreateNodeBuiltinFnCall(rp.c, "@bitCast");
- try cast_node.params.push(try transQualType(rp, dst_type, loc));
+ const cast_node = try rp.c.createBuiltinCall("@bitCast", 2);
+ cast_node.params()[0] = try transQualType(rp, dst_type, loc);
_ = try appendToken(rp.c, .Comma, ",");
switch (cIntTypeCmp(dst_type, src_type)) {
.lt => {
// @truncate(SameSignSmallerInt, src_type)
- const trunc_node = try transCreateNodeBuiltinFnCall(rp.c, "@truncate");
+ const trunc_node = try rp.c.createBuiltinCall("@truncate", 2);
const ty_node = try transQualTypeIntWidthOf(rp.c, dst_type, cIsSignedInteger(src_type));
- try trunc_node.params.push(ty_node);
+ trunc_node.params()[0] = ty_node;
_ = try appendToken(rp.c, .Comma, ",");
- try trunc_node.params.push(expr);
+ trunc_node.params()[1] = expr;
trunc_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- try cast_node.params.push(&trunc_node.base);
+ cast_node.params()[1] = &trunc_node.base;
},
.gt => {
// @as(SameSignBiggerInt, src_type)
- const as_node = try transCreateNodeBuiltinFnCall(rp.c, "@as");
+ const as_node = try rp.c.createBuiltinCall("@as", 2);
const ty_node = try transQualTypeIntWidthOf(rp.c, dst_type, cIsSignedInteger(src_type));
- try as_node.params.push(ty_node);
+ as_node.params()[0] = ty_node;
_ = try appendToken(rp.c, .Comma, ",");
- try as_node.params.push(expr);
+ as_node.params()[1] = expr;
as_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- try cast_node.params.push(&as_node.base);
+ cast_node.params()[1] = &as_node.base;
},
.eq => {
- try cast_node.params.push(expr);
+ cast_node.params()[1] = expr;
},
}
cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
@@ -1857,46 +1941,46 @@ fn transCCast(
}
if (cIsInteger(dst_type) and qualTypeIsPtr(src_type)) {
// @intCast(dest_type, @ptrToInt(val))
- const cast_node = try transCreateNodeBuiltinFnCall(rp.c, "@intCast");
- try cast_node.params.push(try transQualType(rp, dst_type, loc));
+ const cast_node = try rp.c.createBuiltinCall("@intCast", 2);
+ cast_node.params()[0] = try transQualType(rp, dst_type, loc);
_ = try appendToken(rp.c, .Comma, ",");
- const builtin_node = try transCreateNodeBuiltinFnCall(rp.c, "@ptrToInt");
- try builtin_node.params.push(expr);
+ const builtin_node = try rp.c.createBuiltinCall("@ptrToInt", 1);
+ builtin_node.params()[0] = expr;
builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- try cast_node.params.push(&builtin_node.base);
+ cast_node.params()[1] = &builtin_node.base;
cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
return &cast_node.base;
}
if (cIsInteger(src_type) and qualTypeIsPtr(dst_type)) {
// @intToPtr(dest_type, val)
- const builtin_node = try transCreateNodeBuiltinFnCall(rp.c, "@intToPtr");
- try builtin_node.params.push(try transQualType(rp, dst_type, loc));
+ const builtin_node = try rp.c.createBuiltinCall("@intToPtr", 2);
+ builtin_node.params()[0] = try transQualType(rp, dst_type, loc);
_ = try appendToken(rp.c, .Comma, ",");
- try builtin_node.params.push(expr);
+ builtin_node.params()[1] = expr;
builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
return &builtin_node.base;
}
if (cIsFloating(src_type) and cIsFloating(dst_type)) {
- const builtin_node = try transCreateNodeBuiltinFnCall(rp.c, "@floatCast");
- try builtin_node.params.push(try transQualType(rp, dst_type, loc));
+ const builtin_node = try rp.c.createBuiltinCall("@floatCast", 2);
+ builtin_node.params()[0] = try transQualType(rp, dst_type, loc);
_ = try appendToken(rp.c, .Comma, ",");
- try builtin_node.params.push(expr);
+ builtin_node.params()[1] = expr;
builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
return &builtin_node.base;
}
if (cIsFloating(src_type) and !cIsFloating(dst_type)) {
- const builtin_node = try transCreateNodeBuiltinFnCall(rp.c, "@floatToInt");
- try builtin_node.params.push(try transQualType(rp, dst_type, loc));
+ const builtin_node = try rp.c.createBuiltinCall("@floatToInt", 2);
+ builtin_node.params()[0] = try transQualType(rp, dst_type, loc);
_ = try appendToken(rp.c, .Comma, ",");
- try builtin_node.params.push(expr);
+ builtin_node.params()[1] = expr;
builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
return &builtin_node.base;
}
if (!cIsFloating(src_type) and cIsFloating(dst_type)) {
- const builtin_node = try transCreateNodeBuiltinFnCall(rp.c, "@intToFloat");
- try builtin_node.params.push(try transQualType(rp, dst_type, loc));
+ const builtin_node = try rp.c.createBuiltinCall("@intToFloat", 2);
+ builtin_node.params()[0] = try transQualType(rp, dst_type, loc);
_ = try appendToken(rp.c, .Comma, ",");
- try builtin_node.params.push(expr);
+ builtin_node.params()[1] = expr;
builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
return &builtin_node.base;
}
@@ -1904,54 +1988,54 @@ fn transCCast(
!ZigClangType_isBooleanType(qualTypeCanon(dst_type)))
{
// @boolToInt returns either a comptime_int or a u1
- const builtin_node = try transCreateNodeBuiltinFnCall(rp.c, "@boolToInt");
- try builtin_node.params.push(expr);
+ const builtin_node = try rp.c.createBuiltinCall("@boolToInt", 1);
+ builtin_node.params()[0] = expr;
builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- const inner_cast_node = try transCreateNodeBuiltinFnCall(rp.c, "@intCast");
- try inner_cast_node.params.push(try transCreateNodeIdentifier(rp.c, "u1"));
+ const inner_cast_node = try rp.c.createBuiltinCall("@intCast", 2);
+ inner_cast_node.params()[0] = try transCreateNodeIdentifier(rp.c, "u1");
_ = try appendToken(rp.c, .Comma, ",");
- try inner_cast_node.params.push(&builtin_node.base);
+ inner_cast_node.params()[1] = &builtin_node.base;
inner_cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- const cast_node = try transCreateNodeBuiltinFnCall(rp.c, "@intCast");
- try cast_node.params.push(try transQualType(rp, dst_type, loc));
+ const cast_node = try rp.c.createBuiltinCall("@intCast", 2);
+ cast_node.params()[0] = try transQualType(rp, dst_type, loc);
_ = try appendToken(rp.c, .Comma, ",");
if (cIsSignedInteger(dst_type)) {
- const bitcast_node = try transCreateNodeBuiltinFnCall(rp.c, "@bitCast");
- try bitcast_node.params.push(try transCreateNodeIdentifier(rp.c, "i1"));
+ const bitcast_node = try rp.c.createBuiltinCall("@bitCast", 2);
+ bitcast_node.params()[0] = try transCreateNodeIdentifier(rp.c, "i1");
_ = try appendToken(rp.c, .Comma, ",");
- try bitcast_node.params.push(&inner_cast_node.base);
+ bitcast_node.params()[1] = &inner_cast_node.base;
bitcast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- try cast_node.params.push(&bitcast_node.base);
+ cast_node.params()[1] = &bitcast_node.base;
} else {
- try cast_node.params.push(&inner_cast_node.base);
+ cast_node.params()[1] = &inner_cast_node.base;
}
cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
return &cast_node.base;
}
if (ZigClangQualType_getTypeClass(ZigClangQualType_getCanonicalType(dst_type)) == .Enum) {
- const builtin_node = try transCreateNodeBuiltinFnCall(rp.c, "@intToEnum");
- try builtin_node.params.push(try transQualType(rp, dst_type, loc));
+ const builtin_node = try rp.c.createBuiltinCall("@intToEnum", 2);
+ builtin_node.params()[0] = try transQualType(rp, dst_type, loc);
_ = try appendToken(rp.c, .Comma, ",");
- try builtin_node.params.push(expr);
+ builtin_node.params()[1] = expr;
builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
return &builtin_node.base;
}
if (ZigClangQualType_getTypeClass(ZigClangQualType_getCanonicalType(src_type)) == .Enum and
ZigClangQualType_getTypeClass(ZigClangQualType_getCanonicalType(dst_type)) != .Enum)
{
- const builtin_node = try transCreateNodeBuiltinFnCall(rp.c, "@enumToInt");
- try builtin_node.params.push(expr);
+ const builtin_node = try rp.c.createBuiltinCall("@enumToInt", 1);
+ builtin_node.params()[0] = expr;
builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
return &builtin_node.base;
}
- const cast_node = try transCreateNodeBuiltinFnCall(rp.c, "@as");
- try cast_node.params.push(try transQualType(rp, dst_type, loc));
+ const cast_node = try rp.c.createBuiltinCall("@as", 2);
+ cast_node.params()[0] = try transQualType(rp, dst_type, loc);
_ = try appendToken(rp.c, .Comma, ",");
- try cast_node.params.push(expr);
+ cast_node.params()[1] = expr;
cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
return &cast_node.base;
}
@@ -2014,7 +2098,10 @@ fn transInitListExprRecord(
const ty_node = try transType(rp, ty, loc);
const init_count = ZigClangInitListExpr_getNumInits(expr);
- var init_node = try transCreateNodeStructInitializer(rp.c, ty_node);
+ var field_inits = std.ArrayList(*ast.Node).init(rp.c.gpa);
+ defer field_inits.deinit();
+
+ _ = try appendToken(rp.c, .LBrace, "{");
var init_i: c_uint = 0;
var it = ZigClangRecordDecl_field_begin(record_def);
@@ -2038,26 +2125,31 @@ fn transInitListExprRecord(
var raw_name = try rp.c.str(ZigClangNamedDecl_getName_bytes_begin(@ptrCast(*const ZigClangNamedDecl, field_decl)));
if (ZigClangFieldDecl_isAnonymousStructOrUnion(field_decl)) {
const name = rp.c.decl_table.get(@ptrToInt(ZigClangFieldDecl_getCanonicalDecl(field_decl))).?;
- raw_name = try mem.dupe(rp.c.a(), u8, name.value);
+ raw_name = try mem.dupe(rp.c.arena, u8, name.value);
}
const field_name_tok = try appendIdentifier(rp.c, raw_name);
_ = try appendToken(rp.c, .Equal, "=");
- const field_init_node = try rp.c.a().create(ast.Node.FieldInitializer);
+ const field_init_node = try rp.c.arena.create(ast.Node.FieldInitializer);
field_init_node.* = .{
.period_token = period_tok,
.name_token = field_name_tok,
.expr = try transExpr(rp, scope, elem_expr, .used, .r_value),
};
- try init_node.op.StructInitializer.push(&field_init_node.base);
+ try field_inits.append(&field_init_node.base);
_ = try appendToken(rp.c, .Comma, ",");
}
- init_node.rtoken = try appendToken(rp.c, .RBrace, "}");
-
- return &init_node.base;
+ const node = try ast.Node.StructInitializer.alloc(rp.c.arena, field_inits.items.len);
+ node.* = .{
+ .lhs = ty_node,
+ .rtoken = try appendToken(rp.c, .RBrace, "}"),
+ .list_len = field_inits.items.len,
+ };
+ mem.copy(*ast.Node, node.list(), field_inits.items);
+ return &node.base;
}
fn transCreateNodeArrayType(
@@ -2100,7 +2192,7 @@ fn transInitListExprArray(
const all_count = ZigClangAPInt_getLimitedValue(size_ap_int, math.maxInt(usize));
const leftover_count = all_count - init_count;
- var init_node: *ast.Node.SuffixOp = undefined;
+ var init_node: *ast.Node.ArrayInitializer = undefined;
var cat_tok: ast.TokenIndex = undefined;
if (init_count != 0) {
const ty_node = try transCreateNodeArrayType(
@@ -2109,11 +2201,19 @@ fn transInitListExprArray(
ZigClangQualType_getTypePtr(child_qt),
init_count,
);
- init_node = try transCreateNodeArrayInitializer(rp.c, ty_node);
+ _ = try appendToken(rp.c, .LBrace, "{");
+ init_node = try ast.Node.ArrayInitializer.alloc(rp.c.arena, init_count);
+ init_node.* = .{
+ .lhs = ty_node,
+ .rtoken = undefined,
+ .list_len = init_count,
+ };
+ const init_list = init_node.list();
+
var i: c_uint = 0;
while (i < init_count) : (i += 1) {
const elem_expr = ZigClangInitListExpr_getInit(expr, i);
- try init_node.op.ArrayInitializer.push(try transExpr(rp, scope, elem_expr, .used, .r_value));
+ init_list[i] = try transExpr(rp, scope, elem_expr, .used, .r_value);
_ = try appendToken(rp.c, .Comma, ",");
}
init_node.rtoken = try appendToken(rp.c, .RBrace, "}");
@@ -2124,16 +2224,22 @@ fn transInitListExprArray(
}
const ty_node = try transCreateNodeArrayType(rp, loc, ZigClangQualType_getTypePtr(child_qt), 1);
- var filler_init_node = try transCreateNodeArrayInitializer(rp.c, ty_node);
+ _ = try appendToken(rp.c, .LBrace, "{");
+ const filler_init_node = try ast.Node.ArrayInitializer.alloc(rp.c.arena, 1);
+ filler_init_node.* = .{
+ .lhs = ty_node,
+ .rtoken = undefined,
+ .list_len = 1,
+ };
const filler_val_expr = ZigClangInitListExpr_getArrayFiller(expr);
- try filler_init_node.op.ArrayInitializer.push(try transExpr(rp, scope, filler_val_expr, .used, .r_value));
+ filler_init_node.list()[0] = try transExpr(rp, scope, filler_val_expr, .used, .r_value);
filler_init_node.rtoken = try appendToken(rp.c, .RBrace, "}");
const rhs_node = if (leftover_count == 1)
&filler_init_node.base
else blk: {
const mul_tok = try appendToken(rp.c, .AsteriskAsterisk, "**");
- const mul_node = try rp.c.a().create(ast.Node.InfixOp);
+ const mul_node = try rp.c.arena.create(ast.Node.InfixOp);
mul_node.* = .{
.op_token = mul_tok,
.lhs = &filler_init_node.base,
@@ -2147,7 +2253,7 @@ fn transInitListExprArray(
return rhs_node;
}
- const cat_node = try rp.c.a().create(ast.Node.InfixOp);
+ const cat_node = try rp.c.arena.create(ast.Node.InfixOp);
cat_node.* = .{
.op_token = cat_tok,
.lhs = &init_node.base,
@@ -2266,11 +2372,15 @@ fn transIfStmt(
// if (c) t else e
const if_node = try transCreateNodeIf(rp.c);
- var cond_scope = Scope{
- .parent = scope,
- .id = .Condition,
+ var cond_scope = Scope.Condition{
+ .base = .{
+ .parent = scope,
+ .id = .Condition,
+ },
};
- if_node.condition = try transBoolExpr(rp, &cond_scope, @ptrCast(*const ZigClangExpr, ZigClangIfStmt_getCond(stmt)), .used, .r_value, false);
+ defer cond_scope.deinit();
+ const cond_expr = @ptrCast(*const ZigClangExpr, ZigClangIfStmt_getCond(stmt));
+ if_node.condition = try transBoolExpr(rp, &cond_scope.base, cond_expr, .used, .r_value, false);
_ = try appendToken(rp.c, .RParen, ")");
if_node.body = try transStmt(rp, scope, ZigClangIfStmt_getThen(stmt), .unused, .r_value);
@@ -2290,11 +2400,15 @@ fn transWhileLoop(
) TransError!*ast.Node {
const while_node = try transCreateNodeWhile(rp.c);
- var cond_scope = Scope{
- .parent = scope,
- .id = .Condition,
+ var cond_scope = Scope.Condition{
+ .base = .{
+ .parent = scope,
+ .id = .Condition,
+ },
};
- while_node.condition = try transBoolExpr(rp, &cond_scope, @ptrCast(*const ZigClangExpr, ZigClangWhileStmt_getCond(stmt)), .used, .r_value, false);
+ defer cond_scope.deinit();
+ const cond_expr = @ptrCast(*const ZigClangExpr, ZigClangWhileStmt_getCond(stmt));
+ while_node.condition = try transBoolExpr(rp, &cond_scope.base, cond_expr, .used, .r_value, false);
_ = try appendToken(rp.c, .RParen, ")");
var loop_scope = Scope{
@@ -2323,12 +2437,15 @@ fn transDoWhileLoop(
// if (!cond) break;
const if_node = try transCreateNodeIf(rp.c);
- var cond_scope = Scope{
- .parent = scope,
- .id = .Condition,
+ var cond_scope = Scope.Condition{
+ .base = .{
+ .parent = scope,
+ .id = .Condition,
+ },
};
+ defer cond_scope.deinit();
const prefix_op = try transCreateNodePrefixOp(rp.c, .BoolNot, .Bang, "!");
- prefix_op.rhs = try transBoolExpr(rp, &cond_scope, @ptrCast(*const ZigClangExpr, ZigClangDoStmt_getCond(stmt)), .used, .r_value, true);
+ prefix_op.rhs = try transBoolExpr(rp, &cond_scope.base, @ptrCast(*const ZigClangExpr, ZigClangDoStmt_getCond(stmt)), .used, .r_value, true);
_ = try appendToken(rp.c, .RParen, ")");
if_node.condition = &prefix_op.base;
if_node.body = &(try transCreateNodeBreak(rp.c, null)).base;
@@ -2345,7 +2462,8 @@ fn transDoWhileLoop(
// zig: b;
// zig: if (!cond) break;
// zig: }
- break :blk (try transStmt(rp, &loop_scope, ZigClangDoStmt_getBody(stmt), .unused, .r_value)).cast(ast.Node.Block).?;
+ const node = try transStmt(rp, &loop_scope, ZigClangDoStmt_getBody(stmt), .unused, .r_value);
+ break :blk node.cast(ast.Node.Block).?;
} else blk: {
// the C statement is without a block, so we need to create a block to contain it.
// c: do
@@ -2356,12 +2474,15 @@ fn transDoWhileLoop(
// zig: if (!cond) break;
// zig: }
new = true;
- const block = try transCreateNodeBlock(rp.c, null);
- try block.statements.push(try transStmt(rp, &loop_scope, ZigClangDoStmt_getBody(stmt), .unused, .r_value));
+ const block = try rp.c.createBlock(null, 2);
+ block.statements_len = 1; // over-allocated so we can add another below
+ block.statements()[0] = try transStmt(rp, &loop_scope, ZigClangDoStmt_getBody(stmt), .unused, .r_value);
break :blk block;
};
- try body_node.statements.push(&if_node.base);
+ // In both cases above, we reserved 1 extra statement.
+ body_node.statements_len += 1;
+ body_node.statements()[body_node.statements_len - 1] = &if_node.base;
if (new)
body_node.rbrace = try appendToken(rp.c, .RBrace, "}");
while_node.body = &body_node.base;
@@ -2378,24 +2499,26 @@ fn transForLoop(
.id = .Loop,
};
- var block_scope: ?*Scope.Block = null;
+ var block_scope: ?Scope.Block = null;
+ defer if (block_scope) |*bs| bs.deinit();
+
if (ZigClangForStmt_getInit(stmt)) |init| {
block_scope = try Scope.Block.init(rp.c, scope, null);
- const block = try transCreateNodeBlock(rp.c, null);
- block_scope.?.block_node = block;
loop_scope.parent = &block_scope.?.base;
- const result = try transStmt(rp, &block_scope.?.base, init, .unused, .r_value);
- if (result != &block.base)
- try block.statements.push(result);
+ const init_node = try transStmt(rp, &block_scope.?.base, init, .unused, .r_value);
+ try block_scope.?.statements.append(init_node);
}
- var cond_scope = Scope{
- .parent = scope,
- .id = .Condition,
+ var cond_scope = Scope.Condition{
+ .base = .{
+ .parent = scope,
+ .id = .Condition,
+ },
};
+ defer cond_scope.deinit();
const while_node = try transCreateNodeWhile(rp.c);
while_node.condition = if (ZigClangForStmt_getCond(stmt)) |cond|
- try transBoolExpr(rp, &cond_scope, cond, .used, .r_value, false)
+ try transBoolExpr(rp, &cond_scope.base, cond, .used, .r_value, false)
else
try transCreateNodeBoolLiteral(rp.c, true);
_ = try appendToken(rp.c, .RParen, ")");
@@ -2403,61 +2526,92 @@ fn transForLoop(
if (ZigClangForStmt_getInc(stmt)) |incr| {
_ = try appendToken(rp.c, .Colon, ":");
_ = try appendToken(rp.c, .LParen, "(");
- while_node.continue_expr = try transExpr(rp, &cond_scope, incr, .unused, .r_value);
+ while_node.continue_expr = try transExpr(rp, &cond_scope.base, incr, .unused, .r_value);
_ = try appendToken(rp.c, .RParen, ")");
}
while_node.body = try transStmt(rp, &loop_scope, ZigClangForStmt_getBody(stmt), .unused, .r_value);
- if (block_scope != null) {
- try block_scope.?.block_node.statements.push(&while_node.base);
- block_scope.?.block_node.rbrace = try appendToken(rp.c, .RBrace, "}");
- return &block_scope.?.block_node.base;
+ if (block_scope) |*bs| {
+ try bs.statements.append(&while_node.base);
+ const node = try bs.complete(rp.c);
+ return &node.base;
} else {
_ = try appendToken(rp.c, .Semicolon, ";");
return &while_node.base;
}
}
+fn getSwitchCaseCount(stmt: *const ZigClangSwitchStmt) usize {
+ const body = ZigClangSwitchStmt_getBody(stmt);
+ assert(ZigClangStmt_getStmtClass(body) == .CompoundStmtClass);
+ const comp = @ptrCast(*const ZigClangCompoundStmt, body);
+ // TODO https://github.com/ziglang/zig/issues/1738
+ // return ZigClangCompoundStmt_body_end(comp) - ZigClangCompoundStmt_body_begin(comp);
+ const start_addr = @ptrToInt(ZigClangCompoundStmt_body_begin(comp));
+ const end_addr = @ptrToInt(ZigClangCompoundStmt_body_end(comp));
+ return (end_addr - start_addr) / @sizeOf(*ZigClangStmt);
+}
+
fn transSwitch(
rp: RestorePoint,
scope: *Scope,
stmt: *const ZigClangSwitchStmt,
) TransError!*ast.Node {
- const switch_node = try transCreateNodeSwitch(rp.c);
+ const switch_tok = try appendToken(rp.c, .Keyword_switch, "switch");
+ _ = try appendToken(rp.c, .LParen, "(");
+
+ const cases_len = getSwitchCaseCount(stmt);
+
+ var cond_scope = Scope.Condition{
+ .base = .{
+ .parent = scope,
+ .id = .Condition,
+ },
+ };
+ defer cond_scope.deinit();
+ const switch_expr = try transExpr(rp, &cond_scope.base, ZigClangSwitchStmt_getCond(stmt), .used, .r_value);
+ _ = try appendToken(rp.c, .RParen, ")");
+ _ = try appendToken(rp.c, .LBrace, "{");
+ // reserve +1 case in case there is no default case
+ const switch_node = try ast.Node.Switch.alloc(rp.c.arena, cases_len + 1);
+ switch_node.* = .{
+ .switch_token = switch_tok,
+ .expr = switch_expr,
+ .cases_len = cases_len + 1,
+ .rbrace = try appendToken(rp.c, .RBrace, "}"),
+ };
+
var switch_scope = Scope.Switch{
.base = .{
.id = .Switch,
.parent = scope,
},
- .cases = &switch_node.cases,
+ .cases = switch_node.cases(),
+ .case_index = 0,
.pending_block = undefined,
};
- var cond_scope = Scope{
- .parent = scope,
- .id = .Condition,
- };
- switch_node.expr = try transExpr(rp, &cond_scope, ZigClangSwitchStmt_getCond(stmt), .used, .r_value);
- _ = try appendToken(rp.c, .RParen, ")");
- _ = try appendToken(rp.c, .LBrace, "{");
- switch_node.rbrace = try appendToken(rp.c, .RBrace, "}");
-
- const block_scope = try Scope.Block.init(rp.c, &switch_scope.base, null);
// tmp block that all statements will go before being picked up by a case or default
- const block = try transCreateNodeBlock(rp.c, null);
- block_scope.block_node = block;
+ var block_scope = try Scope.Block.init(rp.c, &switch_scope.base, null);
+ defer block_scope.deinit();
- const switch_block = try transCreateNodeBlock(rp.c, null);
- try switch_block.statements.push(&switch_node.base);
- switch_scope.pending_block = switch_block;
+ // Note that we do not defer a deinit here; the switch_scope.pending_block field
+ // has its own memory management. This resource is freed inside `transCase` and
+ // then the final pending_block is freed at the bottom of this function with
+ // pending_block.deinit().
+ switch_scope.pending_block = try Scope.Block.init(rp.c, scope, null);
+ try switch_scope.pending_block.statements.append(&switch_node.base);
const last = try transStmt(rp, &block_scope.base, ZigClangSwitchStmt_getBody(stmt), .unused, .r_value);
_ = try appendToken(rp.c, .Semicolon, ";");
// take all pending statements
- var it = last.cast(ast.Node.Block).?.statements.iterator(0);
- while (it.next()) |n| {
- try switch_scope.pending_block.statements.push(n.*);
+ const last_block_stmts = last.cast(ast.Node.Block).?.statements();
+ try switch_scope.pending_block.statements.ensureCapacity(
+ switch_scope.pending_block.statements.items.len + last_block_stmts.len,
+ );
+ for (last_block_stmts) |n| {
+ switch_scope.pending_block.statements.appendAssumeCapacity(n);
}
switch_scope.pending_block.label = try appendIdentifier(rp.c, "__switch");
@@ -2466,10 +2620,16 @@ fn transSwitch(
const else_prong = try transCreateNodeSwitchCase(rp.c, try transCreateNodeSwitchElse(rp.c));
else_prong.expr = &(try transCreateNodeBreak(rp.c, "__switch")).base;
_ = try appendToken(rp.c, .Comma, ",");
- try switch_node.cases.push(&else_prong.base);
+ switch_scope.cases[switch_scope.case_index] = &else_prong.base;
+ switch_scope.case_index += 1;
}
- switch_scope.pending_block.rbrace = try appendToken(rp.c, .RBrace, "}");
- return &switch_scope.pending_block.base;
+ // We overallocated in case there was no default, so now we correct
+ // the number of cases in the AST node.
+ switch_node.cases_len = switch_scope.case_index;
+
+ const result_node = try switch_scope.pending_block.complete(rp.c);
+ switch_scope.pending_block.deinit();
+ return &result_node.base;
}
fn transCase(
@@ -2479,7 +2639,7 @@ fn transCase(
) TransError!*ast.Node {
const block_scope = scope.findBlockScope(rp.c) catch unreachable;
const switch_scope = scope.getSwitch();
- const label = try std.fmt.allocPrint(rp.c.a(), "__case_{}", .{switch_scope.cases.len - @boolToInt(switch_scope.has_default)});
+ const label = try std.fmt.allocPrint(rp.c.arena, "__case_{}", .{switch_scope.case_index - @boolToInt(switch_scope.has_default)});
_ = try appendToken(rp.c, .Semicolon, ";");
const expr = if (ZigClangCaseStmt_getRHS(stmt)) |rhs| blk: {
@@ -2487,7 +2647,7 @@ fn transCase(
const ellips = try appendToken(rp.c, .Ellipsis3, "...");
const rhs_node = try transExpr(rp, scope, rhs, .used, .r_value);
- const node = try rp.c.a().create(ast.Node.InfixOp);
+ const node = try rp.c.arena.create(ast.Node.InfixOp);
node.* = .{
.op_token = ellips,
.lhs = lhs_node,
@@ -2501,22 +2661,21 @@ fn transCase(
const switch_prong = try transCreateNodeSwitchCase(rp.c, expr);
switch_prong.expr = &(try transCreateNodeBreak(rp.c, label)).base;
_ = try appendToken(rp.c, .Comma, ",");
- try switch_scope.cases.push(&switch_prong.base);
+ switch_scope.cases[switch_scope.case_index] = &switch_prong.base;
+ switch_scope.case_index += 1;
- const block = try transCreateNodeBlock(rp.c, null);
switch_scope.pending_block.label = try appendIdentifier(rp.c, label);
_ = try appendToken(rp.c, .Colon, ":");
- switch_scope.pending_block.rbrace = try appendToken(rp.c, .RBrace, "}");
- try block.statements.push(&switch_scope.pending_block.base);
// take all pending statements
- var it = block_scope.block_node.statements.iterator(0);
- while (it.next()) |n| {
- try switch_scope.pending_block.statements.push(n.*);
- }
- block_scope.block_node.statements.shrink(0);
+ try switch_scope.pending_block.statements.appendSlice(block_scope.statements.items);
+ block_scope.statements.shrink(0);
- switch_scope.pending_block = block;
+ const pending_node = try switch_scope.pending_block.complete(rp.c);
+ switch_scope.pending_block.deinit();
+ switch_scope.pending_block = try Scope.Block.init(rp.c, scope, null);
+
+ try switch_scope.pending_block.statements.append(&pending_node.base);
return transStmt(rp, scope, ZigClangCaseStmt_getSubStmt(stmt), .unused, .r_value);
}
@@ -2535,22 +2694,21 @@ fn transDefault(
const else_prong = try transCreateNodeSwitchCase(rp.c, try transCreateNodeSwitchElse(rp.c));
else_prong.expr = &(try transCreateNodeBreak(rp.c, label)).base;
_ = try appendToken(rp.c, .Comma, ",");
- try switch_scope.cases.push(&else_prong.base);
+ switch_scope.cases[switch_scope.case_index] = &else_prong.base;
+ switch_scope.case_index += 1;
- const block = try transCreateNodeBlock(rp.c, null);
switch_scope.pending_block.label = try appendIdentifier(rp.c, label);
_ = try appendToken(rp.c, .Colon, ":");
- switch_scope.pending_block.rbrace = try appendToken(rp.c, .RBrace, "}");
- try block.statements.push(&switch_scope.pending_block.base);
// take all pending statements
- var it = block_scope.block_node.statements.iterator(0);
- while (it.next()) |n| {
- try switch_scope.pending_block.statements.push(n.*);
- }
- block_scope.block_node.statements.shrink(0);
+ try switch_scope.pending_block.statements.appendSlice(block_scope.statements.items);
+ block_scope.statements.shrink(0);
+
+ const pending_node = try switch_scope.pending_block.complete(rp.c);
+ switch_scope.pending_block.deinit();
+ switch_scope.pending_block = try Scope.Block.init(rp.c, scope, null);
+ try switch_scope.pending_block.statements.append(&pending_node.base);
- switch_scope.pending_block = block;
return transStmt(rp, scope, ZigClangDefaultStmt_getSubStmt(stmt), .unused, .r_value);
}
@@ -2565,13 +2723,13 @@ fn transConstantExpr(rp: RestorePoint, scope: *Scope, expr: *const ZigClangExpr,
// See comment in `transIntegerLiteral` for why this code is here.
// @as(T, x)
const expr_base = @ptrCast(*const ZigClangExpr, expr);
- const as_node = try transCreateNodeBuiltinFnCall(rp.c, "@as");
+ const as_node = try rp.c.createBuiltinCall("@as", 2);
const ty_node = try transQualType(rp, ZigClangExpr_getType(expr_base), ZigClangExpr_getBeginLoc(expr_base));
- try as_node.params.push(ty_node);
+ as_node.params()[0] = ty_node;
_ = try appendToken(rp.c, .Comma, ",");
const int_lit_node = try transCreateNodeAPInt(rp.c, ZigClangAPValue_getInt(&result.Val));
- try as_node.params.push(int_lit_node);
+ as_node.params()[1] = int_lit_node;
as_node.rparen_token = try appendToken(rp.c, .RParen, ")");
@@ -2606,7 +2764,7 @@ fn transCharLiteral(
}
var char_buf: [4]u8 = undefined;
const token = try appendTokenFmt(rp.c, .CharLiteral, "'{}'", .{escapeChar(@intCast(u8, val), &char_buf)});
- const node = try rp.c.a().create(ast.Node.CharLiteral);
+ const node = try rp.c.arena.create(ast.Node.CharLiteral);
node.* = .{
.token = token,
};
@@ -2627,12 +2785,11 @@ fn transCharLiteral(
// See comment in `transIntegerLiteral` for why this code is here.
// @as(T, x)
const expr_base = @ptrCast(*const ZigClangExpr, stmt);
- const as_node = try transCreateNodeBuiltinFnCall(rp.c, "@as");
+ const as_node = try rp.c.createBuiltinCall("@as", 2);
const ty_node = try transQualType(rp, ZigClangExpr_getType(expr_base), ZigClangExpr_getBeginLoc(expr_base));
- try as_node.params.push(ty_node);
+ as_node.params()[0] = ty_node;
_ = try appendToken(rp.c, .Comma, ",");
-
- try as_node.params.push(int_lit_node);
+ as_node.params()[1] = int_lit_node;
as_node.rparen_token = try appendToken(rp.c, .RParen, ")");
return maybeSuppressResult(rp, scope, result_used, &as_node.base);
@@ -2644,27 +2801,25 @@ fn transStmtExpr(rp: RestorePoint, scope: *Scope, stmt: *const ZigClangStmtExpr,
return transCompoundStmt(rp, scope, comp);
}
const lparen = try appendToken(rp.c, .LParen, "(");
- const block_scope = try Scope.Block.init(rp.c, scope, "blk");
- const block = try transCreateNodeBlock(rp.c, "blk");
- block_scope.block_node = block;
+ var block_scope = try Scope.Block.init(rp.c, scope, "blk");
+ defer block_scope.deinit();
var it = ZigClangCompoundStmt_body_begin(comp);
const end_it = ZigClangCompoundStmt_body_end(comp);
while (it != end_it - 1) : (it += 1) {
const result = try transStmt(rp, &block_scope.base, it[0], .unused, .r_value);
- if (result != &block.base)
- try block.statements.push(result);
+ try block_scope.statements.append(result);
}
const break_node = try transCreateNodeBreak(rp.c, "blk");
break_node.rhs = try transStmt(rp, &block_scope.base, it[0], .used, .r_value);
_ = try appendToken(rp.c, .Semicolon, ";");
- try block.statements.push(&break_node.base);
- block.rbrace = try appendToken(rp.c, .RBrace, "}");
+ try block_scope.statements.append(&break_node.base);
+ const block_node = try block_scope.complete(rp.c);
const rparen = try appendToken(rp.c, .RParen, ")");
- const grouped_expr = try rp.c.a().create(ast.Node.GroupedExpression);
+ const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression);
grouped_expr.* = .{
.lparen = lparen,
- .expr = &block.base,
+ .expr = &block_node.base,
.rparen = rparen,
};
return maybeSuppressResult(rp, scope, used, &grouped_expr.base);
@@ -2686,7 +2841,7 @@ fn transMemberExpr(rp: RestorePoint, scope: *Scope, stmt: *const ZigClangMemberE
const field_decl = @ptrCast(*const struct_ZigClangFieldDecl, member_decl);
if (ZigClangFieldDecl_isAnonymousStructOrUnion(field_decl)) {
const name = rp.c.decl_table.get(@ptrToInt(ZigClangFieldDecl_getCanonicalDecl(field_decl))).?;
- break :blk try mem.dupe(rp.c.a(), u8, name.value);
+ break :blk try mem.dupe(rp.c.arena, u8, name.value);
}
}
const decl = @ptrCast(*const ZigClangNamedDecl, member_decl);
@@ -2720,12 +2875,12 @@ fn transArrayAccess(rp: RestorePoint, scope: *Scope, stmt: *const ZigClangArrayS
const is_signed = cIsSignedInteger(qt);
if (is_longlong or is_signed) {
- const cast_node = try transCreateNodeBuiltinFnCall(rp.c, "@intCast");
+ const cast_node = try rp.c.createBuiltinCall("@intCast", 2);
// check if long long first so that signed long long doesn't just become unsigned long long
var typeid_node = if (is_longlong) try transCreateNodeIdentifier(rp.c, "usize") else try transQualTypeIntWidthOf(rp.c, qt, false);
- try cast_node.params.push(typeid_node);
+ cast_node.params()[0] = typeid_node;
_ = try appendToken(rp.c, .Comma, ",");
- try cast_node.params.push(try transExpr(rp, scope, subscr_expr, .used, .r_value));
+ cast_node.params()[1] = try transExpr(rp, scope, subscr_expr, .used, .r_value);
cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
node.rtoken = try appendToken(rp.c, .RBrace, "]");
node.op.ArrayAccess = &cast_node.base;
@@ -2761,17 +2916,18 @@ fn transCallExpr(rp: RestorePoint, scope: *Scope, stmt: *const ZigClangCallExpr,
break :blk try transCreateNodeUnwrapNull(rp.c, raw_fn_expr);
} else
raw_fn_expr;
- const node = try transCreateNodeFnCall(rp.c, fn_expr);
const num_args = ZigClangCallExpr_getNumArgs(stmt);
+ const node = try rp.c.createCall(fn_expr, num_args);
+ const call_params = node.params();
+
const args = ZigClangCallExpr_getArgs(stmt);
var i: usize = 0;
while (i < num_args) : (i += 1) {
if (i != 0) {
_ = try appendToken(rp.c, .Comma, ",");
}
- const arg = try transExpr(rp, scope, args[i], .used, .r_value);
- try node.op.Call.params.push(arg);
+ call_params[i] = try transExpr(rp, scope, args[i], .used, .r_value);
}
node.rtoken = try appendToken(rp.c, .RParen, ")");
@@ -2830,8 +2986,8 @@ fn transUnaryExprOrTypeTraitExpr(
ZigClangUnaryExprOrTypeTraitExpr_getBeginLoc(stmt),
);
- const builtin_node = try transCreateNodeBuiltinFnCall(rp.c, "@sizeOf");
- try builtin_node.params.push(type_node);
+ const builtin_node = try rp.c.createBuiltinCall("@sizeOf", 1);
+ builtin_node.params()[0] = type_node;
builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
return maybeSuppressResult(rp, scope, result_used, &builtin_node.base);
}
@@ -2940,8 +3096,8 @@ fn transCreatePreCrement(
// zig: _ref.* += 1;
// zig: break :blk _ref.*
// zig: })
- const block_scope = try Scope.Block.init(rp.c, scope, "blk");
- block_scope.block_node = try transCreateNodeBlock(rp.c, block_scope.label);
+ var block_scope = try Scope.Block.init(rp.c, scope, "blk");
+ defer block_scope.deinit();
const ref = try block_scope.makeMangledName(rp.c, "ref");
const node = try transCreateNodeVarDecl(rp.c, false, true, ref);
@@ -2950,7 +3106,7 @@ fn transCreatePreCrement(
rhs_node.rhs = try transExpr(rp, scope, op_expr, .used, .r_value);
node.init_node = &rhs_node.base;
node.semicolon_token = try appendToken(rp.c, .Semicolon, ";");
- try block_scope.block_node.statements.push(&node.base);
+ try block_scope.statements.append(&node.base);
const lhs_node = try transCreateNodeIdentifier(rp.c, ref);
const ref_node = try transCreateNodePtrDeref(rp.c, lhs_node);
@@ -2959,18 +3115,18 @@ fn transCreatePreCrement(
const one = try transCreateNodeInt(rp.c, 1);
_ = try appendToken(rp.c, .Semicolon, ";");
const assign = try transCreateNodeInfixOp(rp, scope, ref_node, op, token, one, .used, false);
- try block_scope.block_node.statements.push(assign);
+ try block_scope.statements.append(assign);
- const break_node = try transCreateNodeBreak(rp.c, block_scope.label);
+ const break_node = try transCreateNodeBreakToken(rp.c, block_scope.label);
break_node.rhs = ref_node;
- try block_scope.block_node.statements.push(&break_node.base);
- block_scope.block_node.rbrace = try appendToken(rp.c, .RBrace, "}");
+ try block_scope.statements.append(&break_node.base);
+ const block_node = try block_scope.complete(rp.c);
// semicolon must immediately follow rbrace because it is the last token in a block
_ = try appendToken(rp.c, .Semicolon, ";");
- const grouped_expr = try rp.c.a().create(ast.Node.GroupedExpression);
+ const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression);
grouped_expr.* = .{
.lparen = try appendToken(rp.c, .LParen, "("),
- .expr = &block_scope.block_node.base,
+ .expr = &block_node.base,
.rparen = try appendToken(rp.c, .RParen, ")"),
};
return &grouped_expr.base;
@@ -3006,8 +3162,8 @@ fn transCreatePostCrement(
// zig: _ref.* += 1;
// zig: break :blk _tmp
// zig: })
- const block_scope = try Scope.Block.init(rp.c, scope, "blk");
- block_scope.block_node = try transCreateNodeBlock(rp.c, block_scope.label);
+ var block_scope = try Scope.Block.init(rp.c, scope, "blk");
+ defer block_scope.deinit();
const ref = try block_scope.makeMangledName(rp.c, "ref");
const node = try transCreateNodeVarDecl(rp.c, false, true, ref);
@@ -3016,7 +3172,7 @@ fn transCreatePostCrement(
rhs_node.rhs = try transExpr(rp, scope, op_expr, .used, .r_value);
node.init_node = &rhs_node.base;
node.semicolon_token = try appendToken(rp.c, .Semicolon, ";");
- try block_scope.block_node.statements.push(&node.base);
+ try block_scope.statements.append(&node.base);
const lhs_node = try transCreateNodeIdentifier(rp.c, ref);
const ref_node = try transCreateNodePtrDeref(rp.c, lhs_node);
@@ -3027,23 +3183,23 @@ fn transCreatePostCrement(
tmp_node.eq_token = try appendToken(rp.c, .Equal, "=");
tmp_node.init_node = ref_node;
tmp_node.semicolon_token = try appendToken(rp.c, .Semicolon, ";");
- try block_scope.block_node.statements.push(&tmp_node.base);
+ try block_scope.statements.append(&tmp_node.base);
const token = try appendToken(rp.c, op_tok_id, bytes);
const one = try transCreateNodeInt(rp.c, 1);
_ = try appendToken(rp.c, .Semicolon, ";");
const assign = try transCreateNodeInfixOp(rp, scope, ref_node, op, token, one, .used, false);
- try block_scope.block_node.statements.push(assign);
+ try block_scope.statements.append(assign);
- const break_node = try transCreateNodeBreak(rp.c, block_scope.label);
+ const break_node = try transCreateNodeBreakToken(rp.c, block_scope.label);
break_node.rhs = try transCreateNodeIdentifier(rp.c, tmp);
- try block_scope.block_node.statements.push(&break_node.base);
+ try block_scope.statements.append(&break_node.base);
_ = try appendToken(rp.c, .Semicolon, ";");
- block_scope.block_node.rbrace = try appendToken(rp.c, .RBrace, "}");
- const grouped_expr = try rp.c.a().create(ast.Node.GroupedExpression);
+ const block_node = try block_scope.complete(rp.c);
+ const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression);
grouped_expr.* = .{
.lparen = try appendToken(rp.c, .LParen, "("),
- .expr = &block_scope.block_node.base,
+ .expr = &block_node.base,
.rparen = try appendToken(rp.c, .RParen, ")"),
};
return &grouped_expr.base;
@@ -3106,13 +3262,13 @@ fn transCreateCompoundAssign(
if ((is_mod or is_div) and is_signed) {
const op_token = try appendToken(rp.c, .Equal, "=");
- const op_node = try rp.c.a().create(ast.Node.InfixOp);
+ const op_node = try rp.c.arena.create(ast.Node.InfixOp);
const builtin = if (is_mod) "@rem" else "@divTrunc";
- const builtin_node = try transCreateNodeBuiltinFnCall(rp.c, builtin);
+ const builtin_node = try rp.c.createBuiltinCall(builtin, 2);
const lhs_node = try transExpr(rp, scope, lhs, .used, .l_value);
- try builtin_node.params.push(lhs_node);
+ builtin_node.params()[0] = lhs_node;
_ = try appendToken(rp.c, .Comma, ",");
- try builtin_node.params.push(try transExpr(rp, scope, rhs, .used, .r_value));
+ builtin_node.params()[1] = try transExpr(rp, scope, rhs, .used, .r_value);
builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
op_node.* = .{
.op_token = op_token,
@@ -3132,11 +3288,11 @@ fn transCreateCompoundAssign(
try transExpr(rp, scope, rhs, .used, .r_value);
if (is_shift) {
- const cast_node = try transCreateNodeBuiltinFnCall(rp.c, "@intCast");
+ const cast_node = try rp.c.createBuiltinCall("@intCast", 2);
const rhs_type = try qualTypeToLog2IntRef(rp, getExprQualType(rp.c, rhs), loc);
- try cast_node.params.push(rhs_type);
+ cast_node.params()[0] = rhs_type;
_ = try appendToken(rp.c, .Comma, ",");
- try cast_node.params.push(rhs_node);
+ cast_node.params()[1] = rhs_node;
cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
rhs_node = &cast_node.base;
}
@@ -3151,8 +3307,8 @@ fn transCreateCompoundAssign(
// zig: _ref.* = _ref.* + rhs;
// zig: break :blk _ref.*
// zig: })
- const block_scope = try Scope.Block.init(rp.c, scope, "blk");
- block_scope.block_node = try transCreateNodeBlock(rp.c, block_scope.label);
+ var block_scope = try Scope.Block.init(rp.c, scope, "blk");
+ defer block_scope.deinit();
const ref = try block_scope.makeMangledName(rp.c, "ref");
const node = try transCreateNodeVarDecl(rp.c, false, true, ref);
@@ -3161,7 +3317,7 @@ fn transCreateCompoundAssign(
addr_node.rhs = try transExpr(rp, scope, lhs, .used, .l_value);
node.init_node = &addr_node.base;
node.semicolon_token = try appendToken(rp.c, .Semicolon, ";");
- try block_scope.block_node.statements.push(&node.base);
+ try block_scope.statements.append(&node.base);
const lhs_node = try transCreateNodeIdentifier(rp.c, ref);
const ref_node = try transCreateNodePtrDeref(rp.c, lhs_node);
@@ -3169,12 +3325,12 @@ fn transCreateCompoundAssign(
if ((is_mod or is_div) and is_signed) {
const op_token = try appendToken(rp.c, .Equal, "=");
- const op_node = try rp.c.a().create(ast.Node.InfixOp);
+ const op_node = try rp.c.arena.create(ast.Node.InfixOp);
const builtin = if (is_mod) "@rem" else "@divTrunc";
- const builtin_node = try transCreateNodeBuiltinFnCall(rp.c, builtin);
- try builtin_node.params.push(try transCreateNodePtrDeref(rp.c, lhs_node));
+ const builtin_node = try rp.c.createBuiltinCall(builtin, 2);
+ builtin_node.params()[0] = try transCreateNodePtrDeref(rp.c, lhs_node);
_ = try appendToken(rp.c, .Comma, ",");
- try builtin_node.params.push(try transExpr(rp, scope, rhs, .used, .r_value));
+ builtin_node.params()[1] = try transExpr(rp, scope, rhs, .used, .r_value);
builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
_ = try appendToken(rp.c, .Semicolon, ";");
op_node.* = .{
@@ -3184,17 +3340,17 @@ fn transCreateCompoundAssign(
.rhs = &builtin_node.base,
};
_ = try appendToken(rp.c, .Semicolon, ";");
- try block_scope.block_node.statements.push(&op_node.base);
+ try block_scope.statements.append(&op_node.base);
} else {
const bin_token = try appendToken(rp.c, bin_tok_id, bin_bytes);
var rhs_node = try transExpr(rp, scope, rhs, .used, .r_value);
if (is_shift) {
- const cast_node = try transCreateNodeBuiltinFnCall(rp.c, "@intCast");
+ const cast_node = try rp.c.createBuiltinCall("@intCast", 2);
const rhs_type = try qualTypeToLog2IntRef(rp, getExprQualType(rp.c, rhs), loc);
- try cast_node.params.push(rhs_type);
+ cast_node.params()[0] = rhs_type;
_ = try appendToken(rp.c, .Comma, ",");
- try cast_node.params.push(rhs_node);
+ cast_node.params()[1] = rhs_node;
cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
rhs_node = &cast_node.base;
}
@@ -3204,17 +3360,17 @@ fn transCreateCompoundAssign(
const eq_token = try appendToken(rp.c, .Equal, "=");
const assign = try transCreateNodeInfixOp(rp, scope, ref_node, .Assign, eq_token, rhs_bin, .used, false);
- try block_scope.block_node.statements.push(assign);
+ try block_scope.statements.append(assign);
}
- const break_node = try transCreateNodeBreak(rp.c, block_scope.label);
+ const break_node = try transCreateNodeBreakToken(rp.c, block_scope.label);
break_node.rhs = ref_node;
- try block_scope.block_node.statements.push(&break_node.base);
- block_scope.block_node.rbrace = try appendToken(rp.c, .RBrace, "}");
- const grouped_expr = try rp.c.a().create(ast.Node.GroupedExpression);
+ try block_scope.statements.append(&break_node.base);
+ const block_node = try block_scope.complete(rp.c);
+ const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression);
grouped_expr.* = .{
.lparen = try appendToken(rp.c, .LParen, "("),
- .expr = &block_scope.block_node.base,
+ .expr = &block_node.base,
.rparen = try appendToken(rp.c, .RParen, ")"),
};
return &grouped_expr.base;
@@ -3238,43 +3394,43 @@ fn transCPtrCast(
!ZigClangQualType_isVolatileQualified(child_type)))
{
// Casting away const or volatile requires us to use @intToPtr
- const inttoptr_node = try transCreateNodeBuiltinFnCall(rp.c, "@intToPtr");
+ const inttoptr_node = try rp.c.createBuiltinCall("@intToPtr", 2);
const dst_type_node = try transType(rp, ty, loc);
- try inttoptr_node.params.push(dst_type_node);
+ inttoptr_node.params()[0] = dst_type_node;
_ = try appendToken(rp.c, .Comma, ",");
- const ptrtoint_node = try transCreateNodeBuiltinFnCall(rp.c, "@ptrToInt");
- try ptrtoint_node.params.push(expr);
+ const ptrtoint_node = try rp.c.createBuiltinCall("@ptrToInt", 1);
+ ptrtoint_node.params()[0] = expr;
ptrtoint_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- try inttoptr_node.params.push(&ptrtoint_node.base);
+ inttoptr_node.params()[1] = &ptrtoint_node.base;
inttoptr_node.rparen_token = try appendToken(rp.c, .RParen, ")");
return &inttoptr_node.base;
} else {
// Implicit downcasting from higher to lower alignment values is forbidden,
// use @alignCast to side-step this problem
- const ptrcast_node = try transCreateNodeBuiltinFnCall(rp.c, "@ptrCast");
+ const ptrcast_node = try rp.c.createBuiltinCall("@ptrCast", 2);
const dst_type_node = try transType(rp, ty, loc);
- try ptrcast_node.params.push(dst_type_node);
+ ptrcast_node.params()[0] = dst_type_node;
_ = try appendToken(rp.c, .Comma, ",");
if (ZigClangType_isVoidType(qualTypeCanon(child_type))) {
// void has 1-byte alignment, so @alignCast is not needed
- try ptrcast_node.params.push(expr);
+ ptrcast_node.params()[1] = expr;
} else if (typeIsOpaque(rp.c, qualTypeCanon(child_type), loc)) {
// For opaque types a ptrCast is enough
- try ptrcast_node.params.push(expr);
+ ptrcast_node.params()[1] = expr;
} else {
- const aligncast_node = try transCreateNodeBuiltinFnCall(rp.c, "@alignCast");
- const alignof_node = try transCreateNodeBuiltinFnCall(rp.c, "@alignOf");
+ const aligncast_node = try rp.c.createBuiltinCall("@alignCast", 2);
+ const alignof_node = try rp.c.createBuiltinCall("@alignOf", 1);
const child_type_node = try transQualType(rp, child_type, loc);
- try alignof_node.params.push(child_type_node);
+ alignof_node.params()[0] = child_type_node;
alignof_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- try aligncast_node.params.push(&alignof_node.base);
+ aligncast_node.params()[0] = &alignof_node.base;
_ = try appendToken(rp.c, .Comma, ",");
- try aligncast_node.params.push(expr);
+ aligncast_node.params()[1] = expr;
aligncast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- try ptrcast_node.params.push(&aligncast_node.base);
+ ptrcast_node.params()[1] = &aligncast_node.base;
}
ptrcast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
@@ -3295,7 +3451,7 @@ fn transBreak(rp: RestorePoint, scope: *Scope) TransError!*ast.Node {
fn transFloatingLiteral(rp: RestorePoint, scope: *Scope, stmt: *const ZigClangFloatingLiteral, used: ResultUsed) TransError!*ast.Node {
// TODO use something more accurate
const dbl = ZigClangAPFloat_getValueAsApproximateDouble(stmt);
- const node = try rp.c.a().create(ast.Node.FloatLiteral);
+ const node = try rp.c.arena.create(ast.Node.FloatLiteral);
node.* = .{
.token = try appendTokenFmt(rp.c, .FloatLiteral, "{d}", .{dbl}),
};
@@ -3317,27 +3473,30 @@ fn transBinaryConditionalOperator(rp: RestorePoint, scope: *Scope, stmt: *const
// })
const lparen = try appendToken(rp.c, .LParen, "(");
- const block_scope = try Scope.Block.init(rp.c, scope, "blk");
- block_scope.block_node = try transCreateNodeBlock(rp.c, block_scope.label);
+ var block_scope = try Scope.Block.init(rp.c, scope, "blk");
+ defer block_scope.deinit();
const mangled_name = try block_scope.makeMangledName(rp.c, "cond_temp");
const tmp_var = try transCreateNodeVarDecl(rp.c, false, true, mangled_name);
tmp_var.eq_token = try appendToken(rp.c, .Equal, "=");
tmp_var.init_node = try transExpr(rp, &block_scope.base, cond_expr, .used, .r_value);
tmp_var.semicolon_token = try appendToken(rp.c, .Semicolon, ";");
- try block_scope.block_node.statements.push(&tmp_var.base);
+ try block_scope.statements.append(&tmp_var.base);
- const break_node = try transCreateNodeBreak(rp.c, block_scope.label);
+ const break_node = try transCreateNodeBreakToken(rp.c, block_scope.label);
const if_node = try transCreateNodeIf(rp.c);
- var cond_scope = Scope{
- .parent = &block_scope.base,
- .id = .Condition,
+ var cond_scope = Scope.Condition{
+ .base = .{
+ .parent = &block_scope.base,
+ .id = .Condition,
+ },
};
+ defer cond_scope.deinit();
const tmp_var_node = try transCreateNodeIdentifier(rp.c, mangled_name);
const ty = ZigClangQualType_getTypePtr(getExprQualType(rp.c, cond_expr));
- const cond_node = try finishBoolExpr(rp, &block_scope.base, ZigClangExpr_getBeginLoc(cond_expr), ty, tmp_var_node, used);
+ const cond_node = try finishBoolExpr(rp, &cond_scope.base, ZigClangExpr_getBeginLoc(cond_expr), ty, tmp_var_node, used);
if_node.condition = cond_node;
_ = try appendToken(rp.c, .RParen, ")");
@@ -3348,13 +3507,13 @@ fn transBinaryConditionalOperator(rp: RestorePoint, scope: *Scope, stmt: *const
break_node.rhs = &if_node.base;
_ = try appendToken(rp.c, .Semicolon, ";");
- try block_scope.block_node.statements.push(&break_node.base);
- block_scope.block_node.rbrace = try appendToken(rp.c, .RBrace, "}");
+ try block_scope.statements.append(&break_node.base);
+ const block_node = try block_scope.complete(rp.c);
- const grouped_expr = try rp.c.a().create(ast.Node.GroupedExpression);
+ const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression);
grouped_expr.* = .{
.lparen = lparen,
- .expr = &block_scope.block_node.base,
+ .expr = &block_node.base,
.rparen = try appendToken(rp.c, .RParen, ")"),
};
return maybeSuppressResult(rp, scope, used, &grouped_expr.base);
@@ -3364,17 +3523,20 @@ fn transConditionalOperator(rp: RestorePoint, scope: *Scope, stmt: *const ZigCla
const grouped = scope.id == .Condition;
const lparen = if (grouped) try appendToken(rp.c, .LParen, "(") else undefined;
const if_node = try transCreateNodeIf(rp.c);
- var cond_scope = Scope{
- .parent = scope,
- .id = .Condition,
+ var cond_scope = Scope.Condition{
+ .base = .{
+ .parent = scope,
+ .id = .Condition,
+ },
};
+ defer cond_scope.deinit();
const casted_stmt = @ptrCast(*const ZigClangAbstractConditionalOperator, stmt);
const cond_expr = ZigClangAbstractConditionalOperator_getCond(casted_stmt);
const true_expr = ZigClangAbstractConditionalOperator_getTrueExpr(casted_stmt);
const false_expr = ZigClangAbstractConditionalOperator_getFalseExpr(casted_stmt);
- if_node.condition = try transBoolExpr(rp, &cond_scope, cond_expr, .used, .r_value, false);
+ if_node.condition = try transBoolExpr(rp, &cond_scope.base, cond_expr, .used, .r_value, false);
_ = try appendToken(rp.c, .RParen, ")");
if_node.body = try transExpr(rp, scope, true_expr, .used, .r_value);
@@ -3384,7 +3546,7 @@ fn transConditionalOperator(rp: RestorePoint, scope: *Scope, stmt: *const ZigCla
if (grouped) {
const rparen = try appendToken(rp.c, .RParen, ")");
- const grouped_expr = try rp.c.a().create(ast.Node.GroupedExpression);
+ const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression);
grouped_expr.* = .{
.lparen = lparen,
.expr = &if_node.base,
@@ -3415,7 +3577,7 @@ fn maybeSuppressResult(
}
const lhs = try transCreateNodeIdentifier(rp.c, "_");
const op_token = try appendToken(rp.c, .Equal, "=");
- const op_node = try rp.c.a().create(ast.Node.InfixOp);
+ const op_node = try rp.c.arena.create(ast.Node.InfixOp);
op_node.* = .{
.op_token = op_token,
.lhs = lhs,
@@ -3426,7 +3588,7 @@ fn maybeSuppressResult(
}
fn addTopLevelDecl(c: *Context, name: []const u8, decl_node: *ast.Node) !void {
- try c.tree.root_node.decls.push(decl_node);
+ try c.root_decls.append(c.gpa, decl_node);
_ = try c.global_scope.sym_table.put(name, decl_node);
}
@@ -3524,7 +3686,7 @@ fn qualTypeToLog2IntRef(rp: RestorePoint, qt: ZigClangQualType, source_loc: ZigC
if (int_bit_width != 0) {
// we can perform the log2 now.
const cast_bit_width = math.log2_int(u64, int_bit_width);
- const node = try rp.c.a().create(ast.Node.IntegerLiteral);
+ const node = try rp.c.arena.create(ast.Node.IntegerLiteral);
node.* = .{
.token = try appendTokenFmt(rp.c, .Identifier, "u{}", .{cast_bit_width}),
};
@@ -3545,19 +3707,19 @@ fn qualTypeToLog2IntRef(rp: RestorePoint, qt: ZigClangQualType, source_loc: ZigC
// Symbol "Log2Int"
// Symbol (var from above)
- const import_fn_call = try transCreateNodeBuiltinFnCall(rp.c, "@import");
+ const import_fn_call = try rp.c.createBuiltinCall("@import", 1);
const std_token = try appendToken(rp.c, .StringLiteral, "\"std\"");
- const std_node = try rp.c.a().create(ast.Node.StringLiteral);
+ const std_node = try rp.c.arena.create(ast.Node.StringLiteral);
std_node.* = .{
.token = std_token,
};
- try import_fn_call.params.push(&std_node.base);
+ import_fn_call.params()[0] = &std_node.base;
import_fn_call.rparen_token = try appendToken(rp.c, .RParen, ")");
const inner_field_access = try transCreateNodeFieldAccess(rp.c, &import_fn_call.base, "math");
const outer_field_access = try transCreateNodeFieldAccess(rp.c, inner_field_access, "Log2Int");
- const log2int_fn_call = try transCreateNodeFnCall(rp.c, outer_field_access);
- try @fieldParentPtr(ast.Node.SuffixOp, "base", &log2int_fn_call.base).op.Call.params.push(zig_type_node);
+ const log2int_fn_call = try rp.c.createCall(outer_field_access, 1);
+ log2int_fn_call.params()[0] = zig_type_node;
log2int_fn_call.rtoken = try appendToken(rp.c, .RParen, ")");
return &log2int_fn_call.base;
@@ -3731,8 +3893,8 @@ fn transCreateNodeAssign(
const eq_token = try appendToken(rp.c, .Equal, "=");
var rhs_node = try transExprCoercing(rp, scope, rhs, .used, .r_value);
if (!exprIsBooleanType(lhs) and isBoolRes(rhs_node)) {
- const builtin_node = try transCreateNodeBuiltinFnCall(rp.c, "@boolToInt");
- try builtin_node.params.push(rhs_node);
+ const builtin_node = try rp.c.createBuiltinCall("@boolToInt", 1);
+ builtin_node.params()[0] = rhs_node;
builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
rhs_node = &builtin_node.base;
}
@@ -3748,22 +3910,24 @@ fn transCreateNodeAssign(
// zig: lhs = _tmp;
// zig: break :blk _tmp
// zig: })
- const block_scope = try Scope.Block.init(rp.c, scope, "blk");
- block_scope.block_node = try transCreateNodeBlock(rp.c, block_scope.label);
+ const label_name = "blk";
+ var block_scope = try Scope.Block.init(rp.c, scope, label_name);
+ defer block_scope.deinit();
+
const tmp = try block_scope.makeMangledName(rp.c, "tmp");
const node = try transCreateNodeVarDecl(rp.c, false, true, tmp);
node.eq_token = try appendToken(rp.c, .Equal, "=");
var rhs_node = try transExpr(rp, &block_scope.base, rhs, .used, .r_value);
if (!exprIsBooleanType(lhs) and isBoolRes(rhs_node)) {
- const builtin_node = try transCreateNodeBuiltinFnCall(rp.c, "@boolToInt");
- try builtin_node.params.push(rhs_node);
+ const builtin_node = try rp.c.createBuiltinCall("@boolToInt", 1);
+ builtin_node.params()[0] = rhs_node;
builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
rhs_node = &builtin_node.base;
}
node.init_node = rhs_node;
node.semicolon_token = try appendToken(rp.c, .Semicolon, ";");
- try block_scope.block_node.statements.push(&node.base);
+ try block_scope.statements.append(&node.base);
const lhs_node = try transExpr(rp, &block_scope.base, lhs, .used, .l_value);
const eq_token = try appendToken(rp.c, .Equal, "=");
@@ -3771,48 +3935,20 @@ fn transCreateNodeAssign(
_ = try appendToken(rp.c, .Semicolon, ";");
const assign = try transCreateNodeInfixOp(rp, &block_scope.base, lhs_node, .Assign, eq_token, ident, .used, false);
- try block_scope.block_node.statements.push(assign);
+ try block_scope.statements.append(assign);
- const break_node = try transCreateNodeBreak(rp.c, block_scope.label);
+ const break_node = try transCreateNodeBreak(rp.c, label_name);
break_node.rhs = try transCreateNodeIdentifier(rp.c, tmp);
_ = try appendToken(rp.c, .Semicolon, ";");
- try block_scope.block_node.statements.push(&break_node.base);
- block_scope.block_node.rbrace = try appendToken(rp.c, .RBrace, "}");
+ try block_scope.statements.append(&break_node.base);
+ const block_node = try block_scope.complete(rp.c);
// semicolon must immediately follow rbrace because it is the last token in a block
_ = try appendToken(rp.c, .Semicolon, ";");
- return &block_scope.block_node.base;
-}
-
-fn transCreateNodeBuiltinFnCall(c: *Context, name: []const u8) !*ast.Node.BuiltinCall {
- const builtin_token = try appendToken(c, .Builtin, name);
- _ = try appendToken(c, .LParen, "(");
- const node = try c.a().create(ast.Node.BuiltinCall);
- node.* = .{
- .builtin_token = builtin_token,
- .params = ast.Node.BuiltinCall.ParamList.init(c.a()),
- .rparen_token = undefined, // set after appending args
- };
- return node;
-}
-
-fn transCreateNodeFnCall(c: *Context, fn_expr: *ast.Node) !*ast.Node.SuffixOp {
- _ = try appendToken(c, .LParen, "(");
- const node = try c.a().create(ast.Node.SuffixOp);
- node.* = .{
- .lhs = .{ .node = fn_expr },
- .op = .{
- .Call = .{
- .params = ast.Node.SuffixOp.Op.Call.ParamList.init(c.a()),
- .async_token = null,
- },
- },
- .rtoken = undefined, // set after appending args
- };
- return node;
+ return &block_node.base;
}
fn transCreateNodeFieldAccess(c: *Context, container: *ast.Node, field_name: []const u8) !*ast.Node {
- const field_access_node = try c.a().create(ast.Node.InfixOp);
+ const field_access_node = try c.arena.create(ast.Node.InfixOp);
field_access_node.* = .{
.op_token = try appendToken(c, .Period, "."),
.lhs = container,
@@ -3828,7 +3964,7 @@ fn transCreateNodePrefixOp(
op_tok_id: std.zig.Token.Id,
bytes: []const u8,
) !*ast.Node.PrefixOp {
- const node = try c.a().create(ast.Node.PrefixOp);
+ const node = try c.arena.create(ast.Node.PrefixOp);
node.* = .{
.op_token = try appendToken(c, op_tok_id, bytes),
.op = op,
@@ -3851,7 +3987,7 @@ fn transCreateNodeInfixOp(
try appendToken(rp.c, .LParen, "(")
else
null;
- const node = try rp.c.a().create(ast.Node.InfixOp);
+ const node = try rp.c.arena.create(ast.Node.InfixOp);
node.* = .{
.op_token = op_token,
.lhs = lhs_node,
@@ -3860,7 +3996,7 @@ fn transCreateNodeInfixOp(
};
if (!grouped) return maybeSuppressResult(rp, scope, used, &node.base);
const rparen = try appendToken(rp.c, .RParen, ")");
- const grouped_expr = try rp.c.a().create(ast.Node.GroupedExpression);
+ const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression);
grouped_expr.* = .{
.lparen = lparen.?,
.expr = &node.base,
@@ -3904,7 +4040,7 @@ fn transCreateNodePtrType(
is_volatile: bool,
op_tok_id: std.zig.Token.Id,
) !*ast.Node.PrefixOp {
- const node = try c.a().create(ast.Node.PrefixOp);
+ const node = try c.arena.create(ast.Node.PrefixOp);
const op_token = switch (op_tok_id) {
.LBracket => blk: {
const lbracket = try appendToken(c, .LBracket, "[");
@@ -3946,8 +4082,8 @@ fn transCreateNodeAPInt(c: *Context, int: *const ZigClangAPSInt) !*ast.Node {
ZigClangAPSInt_free(aps_int);
};
- const limbs = try c.a().alloc(math.big.Limb, num_limbs);
- defer c.a().free(limbs);
+ const limbs = try c.arena.alloc(math.big.Limb, num_limbs);
+ defer c.arena.free(limbs);
const data = ZigClangAPSInt_getRawData(aps_int);
switch (@sizeOf(math.big.Limb)) {
@@ -3972,12 +4108,12 @@ fn transCreateNodeAPInt(c: *Context, int: *const ZigClangAPSInt) !*ast.Node {
}
const big: math.big.int.Const = .{ .limbs = limbs, .positive = !is_negative };
- const str = big.toStringAlloc(c.a(), 10, false) catch |err| switch (err) {
+ const str = big.toStringAlloc(c.arena, 10, false) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
};
- defer c.a().free(str);
+ defer c.arena.free(str);
const token = try appendToken(c, .IntegerLiteral, str);
- const node = try c.a().create(ast.Node.IntegerLiteral);
+ const node = try c.arena.create(ast.Node.IntegerLiteral);
node.* = .{
.token = token,
};
@@ -3986,7 +4122,7 @@ fn transCreateNodeAPInt(c: *Context, int: *const ZigClangAPSInt) !*ast.Node {
fn transCreateNodeReturnExpr(c: *Context) !*ast.Node.ControlFlowExpression {
const ltoken = try appendToken(c, .Keyword_return, "return");
- const node = try c.a().create(ast.Node.ControlFlowExpression);
+ const node = try c.arena.create(ast.Node.ControlFlowExpression);
node.* = .{
.ltoken = ltoken,
.kind = .Return,
@@ -3997,7 +4133,7 @@ fn transCreateNodeReturnExpr(c: *Context) !*ast.Node.ControlFlowExpression {
fn transCreateNodeUndefinedLiteral(c: *Context) !*ast.Node {
const token = try appendToken(c, .Keyword_undefined, "undefined");
- const node = try c.a().create(ast.Node.UndefinedLiteral);
+ const node = try c.arena.create(ast.Node.UndefinedLiteral);
node.* = .{
.token = token,
};
@@ -4006,7 +4142,7 @@ fn transCreateNodeUndefinedLiteral(c: *Context) !*ast.Node {
fn transCreateNodeNullLiteral(c: *Context) !*ast.Node {
const token = try appendToken(c, .Keyword_null, "null");
- const node = try c.a().create(ast.Node.NullLiteral);
+ const node = try c.arena.create(ast.Node.NullLiteral);
node.* = .{
.token = token,
};
@@ -4018,42 +4154,16 @@ fn transCreateNodeBoolLiteral(c: *Context, value: bool) !*ast.Node {
try appendToken(c, .Keyword_true, "true")
else
try appendToken(c, .Keyword_false, "false");
- const node = try c.a().create(ast.Node.BoolLiteral);
+ const node = try c.arena.create(ast.Node.BoolLiteral);
node.* = .{
.token = token,
};
return &node.base;
}
-fn transCreateNodeArrayInitializer(c: *Context, ty: *ast.Node) !*ast.Node.SuffixOp {
- _ = try appendToken(c, .LBrace, "{");
- const node = try c.a().create(ast.Node.SuffixOp);
- node.* = .{
- .lhs = .{ .node = ty },
- .op = .{
- .ArrayInitializer = ast.Node.SuffixOp.Op.InitList.init(c.a()),
- },
- .rtoken = undefined, // set after appending values
- };
- return node;
-}
-
-fn transCreateNodeStructInitializer(c: *Context, ty: *ast.Node) !*ast.Node.SuffixOp {
- _ = try appendToken(c, .LBrace, "{");
- const node = try c.a().create(ast.Node.SuffixOp);
- node.* = .{
- .lhs = .{ .node = ty },
- .op = .{
- .StructInitializer = ast.Node.SuffixOp.Op.InitList.init(c.a()),
- },
- .rtoken = undefined, // set after appending values
- };
- return node;
-}
-
fn transCreateNodeInt(c: *Context, int: var) !*ast.Node {
const token = try appendTokenFmt(c, .IntegerLiteral, "{}", .{int});
- const node = try c.a().create(ast.Node.IntegerLiteral);
+ const node = try c.arena.create(ast.Node.IntegerLiteral);
node.* = .{
.token = token,
};
@@ -4062,7 +4172,7 @@ fn transCreateNodeInt(c: *Context, int: var) !*ast.Node {
fn transCreateNodeFloat(c: *Context, int: var) !*ast.Node {
const token = try appendTokenFmt(c, .FloatLiteral, "{}", .{int});
- const node = try c.a().create(ast.Node.FloatLiteral);
+ const node = try c.arena.create(ast.Node.FloatLiteral);
node.* = .{
.token = token,
};
@@ -4070,8 +4180,8 @@ fn transCreateNodeFloat(c: *Context, int: var) !*ast.Node {
}
fn transCreateNodeOpaqueType(c: *Context) !*ast.Node {
- const call_node = try transCreateNodeBuiltinFnCall(c, "@Type");
- try call_node.params.push(try transCreateNodeEnumLiteral(c, "Opaque"));
+ const call_node = try c.createBuiltinCall("@Type", 1);
+ call_node.params()[0] = try transCreateNodeEnumLiteral(c, "Opaque");
call_node.rparen_token = try appendToken(c, .RParen, ")");
return &call_node.base;
}
@@ -4085,39 +4195,36 @@ fn transCreateNodeMacroFn(c: *Context, name: []const u8, ref: *ast.Node, proto_a
const name_tok = try appendIdentifier(c, name);
_ = try appendToken(c, .LParen, "(");
- var fn_params = ast.Node.FnProto.ParamList.init(c.a());
- var it = proto_alias.params.iterator(0);
- while (it.next()) |pn| {
- if (it.index != 0) {
+ var fn_params = std.ArrayList(ast.Node.FnProto.ParamDecl).init(c.gpa);
+ defer fn_params.deinit();
+
+ for (proto_alias.params()) |param, i| {
+ if (i != 0) {
_ = try appendToken(c, .Comma, ",");
}
- const param = pn.*.cast(ast.Node.ParamDecl).?;
-
const param_name_tok = param.name_token orelse
try appendTokenFmt(c, .Identifier, "arg_{}", .{c.getMangle()});
_ = try appendToken(c, .Colon, ":");
- const param_node = try c.a().create(ast.Node.ParamDecl);
- param_node.* = .{
+ (try fn_params.addOne()).* = .{
.doc_comments = null,
.comptime_token = null,
.noalias_token = param.noalias_token,
.name_token = param_name_tok,
.param_type = param.param_type,
};
- try fn_params.push(¶m_node.base);
}
_ = try appendToken(c, .RParen, ")");
- const fn_proto = try c.a().create(ast.Node.FnProto);
+ const fn_proto = try ast.Node.FnProto.alloc(c.arena, fn_params.items.len);
fn_proto.* = .{
.doc_comments = null,
.visib_token = pub_tok,
.fn_token = fn_tok,
.name_token = name_tok,
- .params = fn_params,
+ .params_len = fn_params.items.len,
.return_type = proto_alias.return_type,
.var_args_token = null,
.extern_export_inline_token = inline_tok,
@@ -4127,26 +4234,35 @@ fn transCreateNodeMacroFn(c: *Context, name: []const u8, ref: *ast.Node, proto_a
.section_expr = null,
.callconv_expr = null,
};
+ mem.copy(ast.Node.FnProto.ParamDecl, fn_proto.params(), fn_params.items);
- const block = try transCreateNodeBlock(c, null);
+ const block_lbrace = try appendToken(c, .LBrace, "{");
const return_expr = try transCreateNodeReturnExpr(c);
const unwrap_expr = try transCreateNodeUnwrapNull(c, ref.cast(ast.Node.VarDecl).?.init_node.?);
- const call_expr = try transCreateNodeFnCall(c, unwrap_expr);
- it = fn_params.iterator(0);
- while (it.next()) |pn| {
- if (it.index != 0) {
+
+ const call_expr = try c.createCall(unwrap_expr, fn_params.items.len);
+ const call_params = call_expr.params();
+
+ for (fn_params.items) |param, i| {
+ if (i != 0) {
_ = try appendToken(c, .Comma, ",");
}
- const param = pn.*.cast(ast.Node.ParamDecl).?;
- try call_expr.op.Call.params.push(try transCreateNodeIdentifier(c, tokenSlice(c, param.name_token.?)));
+ call_params[i] = try transCreateNodeIdentifier(c, tokenSlice(c, param.name_token.?));
}
call_expr.rtoken = try appendToken(c, .RParen, ")");
+
return_expr.rhs = &call_expr.base;
_ = try appendToken(c, .Semicolon, ";");
- block.rbrace = try appendToken(c, .RBrace, "}");
- try block.statements.push(&return_expr.base);
+ const block = try ast.Node.Block.alloc(c.arena, 1);
+ block.* = .{
+ .label = null,
+ .lbrace = block_lbrace,
+ .statements_len = 1,
+ .rbrace = try appendToken(c, .RBrace, "}"),
+ };
+ block.statements()[0] = &return_expr.base;
fn_proto.body_node = &block.base;
return &fn_proto.base;
}
@@ -4154,17 +4270,17 @@ fn transCreateNodeMacroFn(c: *Context, name: []const u8, ref: *ast.Node, proto_a
fn transCreateNodeUnwrapNull(c: *Context, wrapped: *ast.Node) !*ast.Node {
_ = try appendToken(c, .Period, ".");
const qm = try appendToken(c, .QuestionMark, "?");
- const node = try c.a().create(ast.Node.SuffixOp);
+ const node = try c.arena.create(ast.Node.SuffixOp);
node.* = .{
.op = .UnwrapOptional,
- .lhs = .{ .node = wrapped },
+ .lhs = wrapped,
.rtoken = qm,
};
return &node.base;
}
fn transCreateNodeEnumLiteral(c: *Context, name: []const u8) !*ast.Node {
- const node = try c.a().create(ast.Node.EnumLiteral);
+ const node = try c.arena.create(ast.Node.EnumLiteral);
node.* = .{
.dot = try appendToken(c, .Period, "."),
.name = try appendIdentifier(c, name),
@@ -4173,7 +4289,7 @@ fn transCreateNodeEnumLiteral(c: *Context, name: []const u8) !*ast.Node {
}
fn transCreateNodeStringLiteral(c: *Context, str: []const u8) !*ast.Node {
- const node = try c.a().create(ast.Node.StringLiteral);
+ const node = try c.arena.create(ast.Node.StringLiteral);
node.* = .{
.token = try appendToken(c, .StringLiteral, str),
};
@@ -4183,7 +4299,7 @@ fn transCreateNodeStringLiteral(c: *Context, str: []const u8) !*ast.Node {
fn transCreateNodeIf(c: *Context) !*ast.Node.If {
const if_tok = try appendToken(c, .Keyword_if, "if");
_ = try appendToken(c, .LParen, "(");
- const node = try c.a().create(ast.Node.If);
+ const node = try c.arena.create(ast.Node.If);
node.* = .{
.if_token = if_tok,
.condition = undefined,
@@ -4195,7 +4311,7 @@ fn transCreateNodeIf(c: *Context) !*ast.Node.If {
}
fn transCreateNodeElse(c: *Context) !*ast.Node.Else {
- const node = try c.a().create(ast.Node.Else);
+ const node = try c.arena.create(ast.Node.Else);
node.* = .{
.else_token = try appendToken(c, .Keyword_else, "else"),
.payload = null,
@@ -4204,20 +4320,11 @@ fn transCreateNodeElse(c: *Context) !*ast.Node.Else {
return node;
}
-fn transCreateNodeBlock(c: *Context, label: ?[]const u8) !*ast.Node.Block {
- const label_node = if (label) |l| blk: {
- const ll = try appendIdentifier(c, l);
- _ = try appendToken(c, .Colon, ":");
- break :blk ll;
- } else null;
- const block_node = try c.a().create(ast.Node.Block);
- block_node.* = .{
- .label = label_node,
- .lbrace = try appendToken(c, .LBrace, "{"),
- .statements = ast.Node.Block.StatementList.init(c.a()),
- .rbrace = undefined,
- };
- return block_node;
+fn transCreateNodeBreakToken(c: *Context, label: ?ast.TokenIndex) !*ast.Node.ControlFlowExpression {
+ const other_token = label orelse return transCreateNodeBreak(c, null);
+ const loc = c.token_locs.items[other_token];
+ const label_name = c.source_buffer.items[loc.start..loc.end];
+ return transCreateNodeBreak(c, label_name);
}
fn transCreateNodeBreak(c: *Context, label: ?[]const u8) !*ast.Node.ControlFlowExpression {
@@ -4226,7 +4333,7 @@ fn transCreateNodeBreak(c: *Context, label: ?[]const u8) !*ast.Node.ControlFlowE
_ = try appendToken(c, .Colon, ":");
break :blk try transCreateNodeIdentifier(c, l);
} else null;
- const node = try c.a().create(ast.Node.ControlFlowExpression);
+ const node = try c.arena.create(ast.Node.ControlFlowExpression);
node.* = .{
.ltoken = ltoken,
.kind = .{ .Break = label_node },
@@ -4240,7 +4347,7 @@ fn transCreateNodeVarDecl(c: *Context, is_pub: bool, is_const: bool, name: []con
const mut_tok = if (is_const) try appendToken(c, .Keyword_const, "const") else try appendToken(c, .Keyword_var, "var");
const name_tok = try appendIdentifier(c, name);
- const node = try c.a().create(ast.Node.VarDecl);
+ const node = try c.arena.create(ast.Node.VarDecl);
node.* = .{
.doc_comments = null,
.visib_token = visib_tok,
@@ -4264,7 +4371,7 @@ fn transCreateNodeWhile(c: *Context) !*ast.Node.While {
const while_tok = try appendToken(c, .Keyword_while, "while");
_ = try appendToken(c, .LParen, "(");
- const node = try c.a().create(ast.Node.While);
+ const node = try c.arena.create(ast.Node.While);
node.* = .{
.label = null,
.inline_token = null,
@@ -4280,7 +4387,7 @@ fn transCreateNodeWhile(c: *Context) !*ast.Node.While {
fn transCreateNodeContinue(c: *Context) !*ast.Node {
const ltoken = try appendToken(c, .Keyword_continue, "continue");
- const node = try c.a().create(ast.Node.ControlFlowExpression);
+ const node = try c.arena.create(ast.Node.ControlFlowExpression);
node.* = .{
.ltoken = ltoken,
.kind = .{ .Continue = null },
@@ -4290,36 +4397,22 @@ fn transCreateNodeContinue(c: *Context) !*ast.Node {
return &node.base;
}
-fn transCreateNodeSwitch(c: *Context) !*ast.Node.Switch {
- const switch_tok = try appendToken(c, .Keyword_switch, "switch");
- _ = try appendToken(c, .LParen, "(");
-
- const node = try c.a().create(ast.Node.Switch);
- node.* = .{
- .switch_token = switch_tok,
- .expr = undefined,
- .cases = ast.Node.Switch.CaseList.init(c.a()),
- .rbrace = undefined,
- };
- return node;
-}
-
fn transCreateNodeSwitchCase(c: *Context, lhs: *ast.Node) !*ast.Node.SwitchCase {
const arrow_tok = try appendToken(c, .EqualAngleBracketRight, "=>");
- const node = try c.a().create(ast.Node.SwitchCase);
+ const node = try ast.Node.SwitchCase.alloc(c.arena, 1);
node.* = .{
- .items = ast.Node.SwitchCase.ItemList.init(c.a()),
+ .items_len = 1,
.arrow_token = arrow_tok,
.payload = null,
.expr = undefined,
};
- try node.items.push(lhs);
+ node.items()[0] = lhs;
return node;
}
fn transCreateNodeSwitchElse(c: *Context) !*ast.Node {
- const node = try c.a().create(ast.Node.SwitchElse);
+ const node = try c.arena.create(ast.Node.SwitchElse);
node.* = .{
.token = try appendToken(c, .Keyword_else, "else"),
};
@@ -4344,15 +4437,15 @@ fn transCreateNodeShiftOp(
const lhs = try transExpr(rp, scope, lhs_expr, .used, .l_value);
const op_token = try appendToken(rp.c, op_tok_id, bytes);
- const cast_node = try transCreateNodeBuiltinFnCall(rp.c, "@intCast");
+ const cast_node = try rp.c.createBuiltinCall("@intCast", 2);
const rhs_type = try qualTypeToLog2IntRef(rp, ZigClangBinaryOperator_getType(stmt), rhs_location);
- try cast_node.params.push(rhs_type);
+ cast_node.params()[0] = rhs_type;
_ = try appendToken(rp.c, .Comma, ",");
const rhs = try transExprCoercing(rp, scope, rhs_expr, .used, .r_value);
- try cast_node.params.push(rhs);
+ cast_node.params()[1] = rhs;
cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- const node = try rp.c.a().create(ast.Node.InfixOp);
+ const node = try rp.c.arena.create(ast.Node.InfixOp);
node.* = .{
.op_token = op_token,
.lhs = lhs,
@@ -4364,9 +4457,9 @@ fn transCreateNodeShiftOp(
}
fn transCreateNodePtrDeref(c: *Context, lhs: *ast.Node) !*ast.Node {
- const node = try c.a().create(ast.Node.SuffixOp);
+ const node = try c.arena.create(ast.Node.SuffixOp);
node.* = .{
- .lhs = .{ .node = lhs },
+ .lhs = lhs,
.op = .Deref,
.rtoken = try appendToken(c, .PeriodAsterisk, ".*"),
};
@@ -4375,9 +4468,9 @@ fn transCreateNodePtrDeref(c: *Context, lhs: *ast.Node) !*ast.Node {
fn transCreateNodeArrayAccess(c: *Context, lhs: *ast.Node) !*ast.Node.SuffixOp {
_ = try appendToken(c, .LBrace, "[");
- const node = try c.a().create(ast.Node.SuffixOp);
+ const node = try c.arena.create(ast.Node.SuffixOp);
node.* = .{
- .lhs = .{ .node = lhs },
+ .lhs = lhs,
.op = .{
.ArrayAccess = undefined,
},
@@ -4392,7 +4485,8 @@ const RestorePoint = struct {
src_buf_index: usize,
fn activate(self: RestorePoint) void {
- self.c.tree.tokens.shrink(self.token_index);
+ self.c.token_ids.shrink(self.c.gpa, self.token_index);
+ self.c.token_locs.shrink(self.c.gpa, self.token_index);
self.c.source_buffer.shrink(self.src_buf_index);
}
};
@@ -4400,7 +4494,7 @@ const RestorePoint = struct {
fn makeRestorePoint(c: *Context) RestorePoint {
return RestorePoint{
.c = c,
- .token_index = c.tree.tokens.len,
+ .token_index = c.token_ids.items.len,
.src_buf_index = c.source_buffer.items.len,
};
}
@@ -4647,8 +4741,10 @@ fn finishTransFnProto(
const name_tok = if (fn_decl_context) |ctx| try appendIdentifier(rp.c, ctx.fn_name) else null;
const lparen_tok = try appendToken(rp.c, .LParen, "(");
- var fn_params = ast.Node.FnProto.ParamList.init(rp.c.a());
+ var fn_params = std.ArrayList(ast.Node.FnProto.ParamDecl).init(rp.c.gpa);
+ defer fn_params.deinit();
const param_count: usize = if (fn_proto_ty != null) ZigClangFunctionProtoType_getNumParams(fn_proto_ty.?) else 0;
+ try fn_params.ensureCapacity(param_count + 1); // +1 for possible var args node
var i: usize = 0;
while (i < param_count) : (i += 1) {
@@ -4672,15 +4768,13 @@ fn finishTransFnProto(
const type_node = try transQualType(rp, param_qt, source_loc);
- const param_node = try rp.c.a().create(ast.Node.ParamDecl);
- param_node.* = .{
+ fn_params.addOneAssumeCapacity().* = .{
.doc_comments = null,
.comptime_token = null,
.noalias_token = noalias_tok,
.name_token = param_name_tok,
.param_type = .{ .type_expr = type_node },
};
- try fn_params.push(¶m_node.base);
if (i + 1 < param_count) {
_ = try appendToken(rp.c, .Comma, ",");
@@ -4692,15 +4786,13 @@ fn finishTransFnProto(
_ = try appendToken(rp.c, .Comma, ",");
}
- const var_arg_node = try rp.c.a().create(ast.Node.ParamDecl);
- var_arg_node.* = .{
+ fn_params.addOneAssumeCapacity().* = .{
.doc_comments = null,
.comptime_token = null,
.noalias_token = null,
.name_token = null,
.param_type = .{ .var_args = try appendToken(rp.c, .Ellipsis3, "...") }
};
- try fn_params.push(&var_arg_node.base);
}
const rparen_tok = try appendToken(rp.c, .RParen, ")");
@@ -4713,7 +4805,7 @@ fn finishTransFnProto(
_ = try appendToken(rp.c, .LParen, "(");
const expr = try transCreateNodeStringLiteral(
rp.c,
- try std.fmt.allocPrint(rp.c.a(), "\"{}\"", .{str_ptr[0..str_len]}),
+ try std.fmt.allocPrint(rp.c.arena, "\"{}\"", .{str_ptr[0..str_len]}),
);
_ = try appendToken(rp.c, .RParen, ")");
@@ -4767,13 +4859,13 @@ fn finishTransFnProto(
}
};
- const fn_proto = try rp.c.a().create(ast.Node.FnProto);
+ const fn_proto = try ast.Node.FnProto.alloc(rp.c.arena, fn_params.items.len);
fn_proto.* = .{
.doc_comments = null,
.visib_token = pub_tok,
.fn_token = fn_tok,
.name_token = name_tok,
- .params = fn_params,
+ .params_len = fn_params.items.len,
.return_type = .{ .Explicit = return_type_node },
.var_args_token = null, // TODO this field is broken in the AST data model
.extern_export_inline_token = extern_export_inline_tok,
@@ -4783,6 +4875,7 @@ fn finishTransFnProto(
.section_expr = linksection_expr,
.callconv_expr = callconv_expr,
};
+ mem.copy(ast.Node.FnProto.ParamDecl, fn_proto.params(), fn_params.items);
return fn_proto;
}
@@ -4816,20 +4909,20 @@ pub fn failDecl(c: *Context, loc: ZigClangSourceLocation, name: []const u8, comp
const semi_tok = try appendToken(c, .Semicolon, ";");
_ = try appendTokenFmt(c, .LineComment, "// {}", .{c.locStr(loc)});
- const msg_node = try c.a().create(ast.Node.StringLiteral);
+ const msg_node = try c.arena.create(ast.Node.StringLiteral);
msg_node.* = .{
.token = msg_tok,
};
- const call_node = try c.a().create(ast.Node.BuiltinCall);
+ const call_node = try ast.Node.BuiltinCall.alloc(c.arena, 1);
call_node.* = .{
.builtin_token = builtin_tok,
- .params = ast.Node.BuiltinCall.ParamList.init(c.a()),
+ .params_len = 1,
.rparen_token = rparen_tok,
};
- try call_node.params.push(&msg_node.base);
+ call_node.params()[0] = &msg_node.base;
- const var_decl_node = try c.a().create(ast.Node.VarDecl);
+ const var_decl_node = try c.arena.create(ast.Node.VarDecl);
var_decl_node.* = .{
.doc_comments = null,
.visib_token = pub_tok,
@@ -4856,23 +4949,20 @@ fn appendToken(c: *Context, token_id: Token.Id, bytes: []const u8) !ast.TokenInd
fn appendTokenFmt(c: *Context, token_id: Token.Id, comptime format: []const u8, args: var) !ast.TokenIndex {
assert(token_id != .Invalid);
+
+ try c.token_ids.ensureCapacity(c.gpa, c.token_ids.items.len + 1);
+ try c.token_locs.ensureCapacity(c.gpa, c.token_locs.items.len + 1);
+
const start_index = c.source_buffer.items.len;
- errdefer c.source_buffer.shrink(start_index);
+ try c.source_buffer.outStream().print(format ++ " ", args);
- try c.source_buffer.outStream().print(format, args);
- const end_index = c.source_buffer.items.len;
- const token_index = c.tree.tokens.len;
- const new_token = try c.tree.tokens.addOne();
- errdefer c.tree.tokens.shrink(token_index);
-
- new_token.* = .{
- .id = token_id,
+ c.token_ids.appendAssumeCapacity(token_id);
+ c.token_locs.appendAssumeCapacity(.{
.start = start_index,
- .end = end_index,
- };
- try c.source_buffer.append(' ');
+ .end = c.source_buffer.items.len - 1, // back up before the space
+ });
- return token_index;
+ return c.token_ids.items.len - 1;
}
// TODO hook up with codegen
@@ -4931,7 +5021,7 @@ fn appendIdentifier(c: *Context, name: []const u8) !ast.TokenIndex {
fn transCreateNodeIdentifier(c: *Context, name: []const u8) !*ast.Node {
const token_index = try appendIdentifier(c, name);
- const identifier = try c.a().create(ast.Node.Identifier);
+ const identifier = try c.arena.create(ast.Node.Identifier);
identifier.* = .{
.token = token_index,
};
@@ -4940,7 +5030,7 @@ fn transCreateNodeIdentifier(c: *Context, name: []const u8) !*ast.Node {
fn transCreateNodeIdentifierUnchecked(c: *Context, name: []const u8) !*ast.Node {
const token_index = try appendTokenFmt(c, .Identifier, "{}", .{name});
- const identifier = try c.a().create(ast.Node.Identifier);
+ const identifier = try c.arena.create(ast.Node.Identifier);
identifier.* = .{
.token = token_index,
};
@@ -4955,7 +5045,7 @@ fn transPreprocessorEntities(c: *Context, unit: *ZigClangASTUnit) Error!void {
// TODO if we see #undef, delete it from the table
var it = ZigClangASTUnit_getLocalPreprocessingEntities_begin(unit);
const it_end = ZigClangASTUnit_getLocalPreprocessingEntities_end(unit);
- var tok_list = CTokenList.init(c.a());
+ var tok_list = CTokenList.init(c.arena);
const scope = c.global_scope;
while (it.I != it_end.I) : (it.I += 1) {
@@ -4970,7 +5060,7 @@ fn transPreprocessorEntities(c: *Context, unit: *ZigClangASTUnit) Error!void {
const name = try c.str(raw_name);
// TODO https://github.com/ziglang/zig/issues/3756
// TODO https://github.com/ziglang/zig/issues/1802
- const mangled_name = if (isZigPrimitiveType(name)) try std.fmt.allocPrint(c.a(), "{}_{}", .{ name, c.getMangle() }) else name;
+ const mangled_name = if (isZigPrimitiveType(name)) try std.fmt.allocPrint(c.arena, "{}_{}", .{ name, c.getMangle() }) else name;
if (scope.containsNow(mangled_name)) {
continue;
}
@@ -5060,7 +5150,8 @@ fn transMacroDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8, n
}
fn transMacroFnDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8, name: []const u8, source_loc: ZigClangSourceLocation) ParseError!void {
- const block_scope = try Scope.Block.init(c, &c.global_scope.base, null);
+ var block_scope = try Scope.Block.init(c, &c.global_scope.base, null);
+ defer block_scope.deinit();
const scope = &block_scope.base;
const pub_tok = try appendToken(c, .Keyword_pub, "pub");
@@ -5078,7 +5169,10 @@ fn transMacroFnDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8,
.{},
);
}
- var fn_params = ast.Node.FnProto.ParamList.init(c.a());
+
+ var fn_params = std.ArrayList(ast.Node.FnProto.ParamDecl).init(c.gpa);
+ defer fn_params.deinit();
+
while (true) {
const param_tok = it.next().?;
if (param_tok.id != .Identifier) {
@@ -5096,20 +5190,18 @@ fn transMacroFnDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8,
_ = try appendToken(c, .Colon, ":");
const token_index = try appendToken(c, .Keyword_var, "var");
- const identifier = try c.a().create(ast.Node.Identifier);
+ const identifier = try c.arena.create(ast.Node.Identifier);
identifier.* = .{
.token = token_index,
};
- const param_node = try c.a().create(ast.Node.ParamDecl);
- param_node.* = .{
+ (try fn_params.addOne()).* = .{
.doc_comments = null,
.comptime_token = null,
.noalias_token = null,
.name_token = param_name_tok,
.param_type = .{ .type_expr = &identifier.base },
};
- try fn_params.push(¶m_node.base);
if (it.peek().?.id != .Comma)
break;
@@ -5129,16 +5221,15 @@ fn transMacroFnDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8,
_ = try appendToken(c, .RParen, ")");
- const type_of = try transCreateNodeBuiltinFnCall(c, "@TypeOf");
- type_of.rparen_token = try appendToken(c, .RParen, ")");
+ const type_of = try c.createBuiltinCall("@TypeOf", 1);
- const fn_proto = try c.a().create(ast.Node.FnProto);
+ const fn_proto = try ast.Node.FnProto.alloc(c.arena, fn_params.items.len);
fn_proto.* = .{
.visib_token = pub_tok,
.extern_export_inline_token = inline_tok,
.fn_token = fn_tok,
.name_token = name_tok,
- .params = fn_params,
+ .params_len = fn_params.items.len,
.return_type = .{ .Explicit = &type_of.base },
.doc_comments = null,
.var_args_token = null,
@@ -5148,8 +5239,7 @@ fn transMacroFnDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8,
.section_expr = null,
.callconv_expr = null,
};
-
- const block = try transCreateNodeBlock(c, null);
+ mem.copy(ast.Node.FnProto.ParamDecl, fn_proto.params(), fn_params.items);
const return_expr = try transCreateNodeReturnExpr(c);
const expr = try parseCExpr(c, it, source, source_loc, scope);
@@ -5165,17 +5255,18 @@ fn transMacroFnDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8,
_ = try appendToken(c, .Semicolon, ";");
const type_of_arg = if (expr.id != .Block) expr else blk: {
const blk = @fieldParentPtr(ast.Node.Block, "base", expr);
- const blk_last = blk.statements.at(blk.statements.len - 1).*;
+ const blk_last = blk.statements()[blk.statements_len - 1];
std.debug.assert(blk_last.id == .ControlFlowExpression);
const br = @fieldParentPtr(ast.Node.ControlFlowExpression, "base", blk_last);
break :blk br.rhs.?;
};
- try type_of.params.push(type_of_arg);
+ type_of.params()[0] = type_of_arg;
+ type_of.rparen_token = try appendToken(c, .RParen, ")");
return_expr.rhs = expr;
- block.rbrace = try appendToken(c, .RBrace, "}");
- try block.statements.push(&return_expr.base);
- fn_proto.body_node = &block.base;
+ try block_scope.statements.append(&return_expr.base);
+ const block_node = try block_scope.complete(c);
+ fn_proto.body_node = &block_node.base;
_ = try c.global_scope.macro_table.put(name, &fn_proto.base);
}
@@ -5207,22 +5298,23 @@ fn parseCExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8, source_
},
.Comma => {
_ = try appendToken(c, .Semicolon, ";");
- const block_scope = try Scope.Block.init(c, scope, "blk");
- block_scope.block_node = try transCreateNodeBlock(c, block_scope.label);
+ const label_name = "blk";
+ var block_scope = try Scope.Block.init(c, scope, label_name);
+ defer block_scope.deinit();
var last = node;
while (true) {
// suppress result
const lhs = try transCreateNodeIdentifier(c, "_");
const op_token = try appendToken(c, .Equal, "=");
- const op_node = try c.a().create(ast.Node.InfixOp);
+ const op_node = try c.arena.create(ast.Node.InfixOp);
op_node.* = .{
.op_token = op_token,
.lhs = lhs,
.op = .Assign,
.rhs = last,
};
- try block_scope.block_node.statements.push(&op_node.base);
+ try block_scope.statements.append(&op_node.base);
last = try parseCPrefixOpExpr(c, it, source, source_loc, scope);
_ = try appendToken(c, .Semicolon, ";");
@@ -5232,11 +5324,11 @@ fn parseCExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8, source_
}
}
- const break_node = try transCreateNodeBreak(c, block_scope.label);
+ const break_node = try transCreateNodeBreak(c, label_name);
break_node.rhs = last;
- try block_scope.block_node.statements.push(&break_node.base);
- block_scope.block_node.rbrace = try appendToken(c, .RBrace, "}");
- return &block_scope.block_node.base;
+ try block_scope.statements.append(&break_node.base);
+ const block_node = try block_scope.complete(c);
+ return &block_node.base;
},
else => {
_ = it.prev();
@@ -5253,11 +5345,11 @@ fn parseCNumLit(c: *Context, tok: *CToken, source: []const u8, source_loc: ZigCl
switch (lit_bytes[1]) {
'0'...'7' => {
// Octal
- lit_bytes = try std.fmt.allocPrint(c.a(), "0o{}", .{lit_bytes});
+ lit_bytes = try std.fmt.allocPrint(c.arena, "0o{}", .{lit_bytes});
},
'X' => {
// Hexadecimal with capital X, valid in C but not in Zig
- lit_bytes = try std.fmt.allocPrint(c.a(), "0x{}", .{lit_bytes[2..]});
+ lit_bytes = try std.fmt.allocPrint(c.arena, "0x{}", .{lit_bytes[2..]});
},
else => {},
}
@@ -5267,15 +5359,15 @@ fn parseCNumLit(c: *Context, tok: *CToken, source: []const u8, source_loc: ZigCl
return transCreateNodeInt(c, lit_bytes);
}
- const cast_node = try transCreateNodeBuiltinFnCall(c, "@as");
- try cast_node.params.push(try transCreateNodeIdentifier(c, switch (tok.id.IntegerLiteral) {
+ const cast_node = try c.createBuiltinCall("@as", 2);
+ cast_node.params()[0] = try transCreateNodeIdentifier(c, switch (tok.id.IntegerLiteral) {
.U => "c_uint",
.L => "c_long",
.LU => "c_ulong",
.LL => "c_longlong",
.LLU => "c_ulonglong",
else => unreachable,
- }));
+ });
lit_bytes = lit_bytes[0 .. lit_bytes.len - switch (tok.id.IntegerLiteral) {
.U, .L => @as(u8, 1),
.LU, .LL => 2,
@@ -5283,23 +5375,23 @@ fn parseCNumLit(c: *Context, tok: *CToken, source: []const u8, source_loc: ZigCl
else => unreachable,
}];
_ = try appendToken(c, .Comma, ",");
- try cast_node.params.push(try transCreateNodeInt(c, lit_bytes));
+ cast_node.params()[1] = try transCreateNodeInt(c, lit_bytes);
cast_node.rparen_token = try appendToken(c, .RParen, ")");
return &cast_node.base;
} else if (tok.id == .FloatLiteral) {
if (lit_bytes[0] == '.')
- lit_bytes = try std.fmt.allocPrint(c.a(), "0{}", .{lit_bytes});
+ lit_bytes = try std.fmt.allocPrint(c.arena, "0{}", .{lit_bytes});
if (tok.id.FloatLiteral == .None) {
return transCreateNodeFloat(c, lit_bytes);
}
- const cast_node = try transCreateNodeBuiltinFnCall(c, "@as");
- try cast_node.params.push(try transCreateNodeIdentifier(c, switch (tok.id.FloatLiteral) {
+ const cast_node = try c.createBuiltinCall("@as", 2);
+ cast_node.params()[0] = try transCreateNodeIdentifier(c, switch (tok.id.FloatLiteral) {
.F => "f32",
.L => "c_longdouble",
else => unreachable,
- }));
+ });
_ = try appendToken(c, .Comma, ",");
- try cast_node.params.push(try transCreateNodeFloat(c, lit_bytes[0 .. lit_bytes.len - 1]));
+ cast_node.params()[1] = try transCreateNodeFloat(c, lit_bytes[0 .. lit_bytes.len - 1]);
cast_node.rparen_token = try appendToken(c, .RParen, ")");
return &cast_node.base;
} else unreachable;
@@ -5318,7 +5410,7 @@ fn zigifyEscapeSequences(ctx: *Context, source_bytes: []const u8, name: []const
break;
}
} else return source;
- var bytes = try ctx.a().alloc(u8, source.len * 2);
+ var bytes = try ctx.arena.alloc(u8, source.len * 2);
var state: enum {
Start,
Escape,
@@ -5472,14 +5564,14 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
const first_tok = it.list.at(0);
if (source[tok.start] != '\'' or source[tok.start + 1] == '\\' or tok.end - tok.start == 3) {
const token = try appendToken(c, .CharLiteral, try zigifyEscapeSequences(c, source[tok.start..tok.end], source[first_tok.start..first_tok.end], source_loc));
- const node = try c.a().create(ast.Node.CharLiteral);
+ const node = try c.arena.create(ast.Node.CharLiteral);
node.* = .{
.token = token,
};
return &node.base;
} else {
const token = try appendTokenFmt(c, .IntegerLiteral, "0x{x}", .{source[tok.start+1..tok.end-1]});
- const node = try c.a().create(ast.Node.IntegerLiteral);
+ const node = try c.arena.create(ast.Node.IntegerLiteral);
node.* = .{
.token = token,
};
@@ -5489,7 +5581,7 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
.StringLiteral => {
const first_tok = it.list.at(0);
const token = try appendToken(c, .StringLiteral, try zigifyEscapeSequences(c, source[tok.start..tok.end], source[first_tok.start..first_tok.end], source_loc));
- const node = try c.a().create(ast.Node.StringLiteral);
+ const node = try c.arena.create(ast.Node.StringLiteral);
node.* = .{
.token = token,
};
@@ -5569,10 +5661,10 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
//else
// @as(dest, x) )
const if_node = try transCreateNodeIf(c);
- const type_info_node = try transCreateNodeBuiltinFnCall(c, "@typeInfo");
- try type_info_node.params.push(inner_node);
+ const type_info_node = try c.createBuiltinCall("@typeInfo", 1);
+ type_info_node.params()[0] = inner_node;
type_info_node.rparen_token = try appendToken(c, .LParen, ")");
- const cmp_node = try c.a().create(ast.Node.InfixOp);
+ const cmp_node = try c.arena.create(ast.Node.InfixOp);
cmp_node.* = .{
.op_token = try appendToken(c, .EqualEqual, "=="),
.lhs = &type_info_node.base,
@@ -5582,22 +5674,22 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
if_node.condition = &cmp_node.base;
_ = try appendToken(c, .RParen, ")");
- const int_to_ptr = try transCreateNodeBuiltinFnCall(c, "@intToPtr");
- try int_to_ptr.params.push(inner_node);
- try int_to_ptr.params.push(node_to_cast);
+ const int_to_ptr = try c.createBuiltinCall("@intToPtr", 2);
+ int_to_ptr.params()[0] = inner_node;
+ int_to_ptr.params()[1] = node_to_cast;
int_to_ptr.rparen_token = try appendToken(c, .RParen, ")");
if_node.body = &int_to_ptr.base;
const else_node = try transCreateNodeElse(c);
if_node.@"else" = else_node;
- const as_node = try transCreateNodeBuiltinFnCall(c, "@as");
- try as_node.params.push(inner_node);
- try as_node.params.push(node_to_cast);
+ const as_node = try c.createBuiltinCall("@as", 2);
+ as_node.params()[0] = inner_node;
+ as_node.params()[1] = node_to_cast;
as_node.rparen_token = try appendToken(c, .RParen, ")");
else_node.body = &as_node.base;
- const group_node = try c.a().create(ast.Node.GroupedExpression);
+ const group_node = try c.arena.create(ast.Node.GroupedExpression);
group_node.* = .{
.lparen = lparen,
.expr = &if_node.base,
@@ -5614,14 +5706,14 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
// @as(dest, x) )
const if_1 = try transCreateNodeIf(c);
- const type_info_1 = try transCreateNodeBuiltinFnCall(c, "@typeInfo");
- const type_of_1 = try transCreateNodeBuiltinFnCall(c, "@TypeOf");
- try type_info_1.params.push(&type_of_1.base);
- try type_of_1.params.push(node_to_cast);
+ const type_info_1 = try c.createBuiltinCall("@typeInfo", 1);
+ const type_of_1 = try c.createBuiltinCall("@TypeOf", 1);
+ type_info_1.params()[0] = &type_of_1.base;
+ type_of_1.params()[0] = node_to_cast;
type_of_1.rparen_token = try appendToken(c, .RParen, ")");
type_info_1.rparen_token = try appendToken(c, .RParen, ")");
- const cmp_1 = try c.a().create(ast.Node.InfixOp);
+ const cmp_1 = try c.arena.create(ast.Node.InfixOp);
cmp_1.* = .{
.op_token = try appendToken(c, .EqualEqual, "=="),
.lhs = &type_info_1.base,
@@ -5633,7 +5725,7 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
const period_tok = try appendToken(c, .Period, ".");
const child_ident = try transCreateNodeIdentifier(c, "Child");
- const inner_node_child = try c.a().create(ast.Node.InfixOp);
+ const inner_node_child = try c.arena.create(ast.Node.InfixOp);
inner_node_child.* = .{
.op_token = period_tok,
.lhs = inner_node,
@@ -5641,20 +5733,20 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
.rhs = child_ident,
};
- const align_of = try transCreateNodeBuiltinFnCall(c, "@alignOf");
- try align_of.params.push(&inner_node_child.base);
+ const align_of = try c.createBuiltinCall("@alignOf", 1);
+ align_of.params()[0] = &inner_node_child.base;
align_of.rparen_token = try appendToken(c, .RParen, ")");
// hack to get zig fmt to render a comma in builtin calls
_ = try appendToken(c, .Comma, ",");
- const align_cast = try transCreateNodeBuiltinFnCall(c, "@alignCast");
- try align_cast.params.push(&align_of.base);
- try align_cast.params.push(node_to_cast);
+ const align_cast = try c.createBuiltinCall("@alignCast", 2);
+ align_cast.params()[0] = &align_of.base;
+ align_cast.params()[1] = node_to_cast;
align_cast.rparen_token = try appendToken(c, .RParen, ")");
- const ptr_cast = try transCreateNodeBuiltinFnCall(c, "@ptrCast");
- try ptr_cast.params.push(inner_node);
- try ptr_cast.params.push(&align_cast.base);
+ const ptr_cast = try c.createBuiltinCall("@ptrCast", 2);
+ ptr_cast.params()[0] = inner_node;
+ ptr_cast.params()[1] = &align_cast.base;
ptr_cast.rparen_token = try appendToken(c, .RParen, ")");
if_1.body = &ptr_cast.base;
@@ -5662,14 +5754,14 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
if_1.@"else" = else_1;
const if_2 = try transCreateNodeIf(c);
- const type_info_2 = try transCreateNodeBuiltinFnCall(c, "@typeInfo");
- const type_of_2 = try transCreateNodeBuiltinFnCall(c, "@TypeOf");
- try type_info_2.params.push(&type_of_2.base);
- try type_of_2.params.push(node_to_cast);
+ const type_info_2 = try c.createBuiltinCall("@typeInfo", 1);
+ const type_of_2 = try c.createBuiltinCall("@TypeOf", 1);
+ type_info_2.params()[0] = &type_of_2.base;
+ type_of_2.params()[0] = node_to_cast;
type_of_2.rparen_token = try appendToken(c, .RParen, ")");
type_info_2.rparen_token = try appendToken(c, .RParen, ")");
- const cmp_2 = try c.a().create(ast.Node.InfixOp);
+ const cmp_2 = try c.arena.create(ast.Node.InfixOp);
cmp_2.* = .{
.op_token = try appendToken(c, .EqualEqual, "=="),
.lhs = &type_info_2.base,
@@ -5677,17 +5769,17 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
.rhs = try transCreateNodeEnumLiteral(c, "Int"),
};
if_2.condition = &cmp_2.base;
- const cmp_4 = try c.a().create(ast.Node.InfixOp);
+ const cmp_4 = try c.arena.create(ast.Node.InfixOp);
cmp_4.* = .{
.op_token = try appendToken(c, .Keyword_and, "and"),
.lhs = &cmp_2.base,
.op = .BoolAnd,
.rhs = undefined,
};
- const type_info_3 = try transCreateNodeBuiltinFnCall(c, "@typeInfo");
- try type_info_3.params.push(inner_node);
+ const type_info_3 = try c.createBuiltinCall("@typeInfo", 1);
+ type_info_3.params()[0] = inner_node;
type_info_3.rparen_token = try appendToken(c, .LParen, ")");
- const cmp_3 = try c.a().create(ast.Node.InfixOp);
+ const cmp_3 = try c.arena.create(ast.Node.InfixOp);
cmp_3.* = .{
.op_token = try appendToken(c, .EqualEqual, "=="),
.lhs = &type_info_3.base,
@@ -5699,22 +5791,22 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
else_1.body = &if_2.base;
_ = try appendToken(c, .RParen, ")");
- const int_to_ptr = try transCreateNodeBuiltinFnCall(c, "@intToPtr");
- try int_to_ptr.params.push(inner_node);
- try int_to_ptr.params.push(node_to_cast);
+ const int_to_ptr = try c.createBuiltinCall("@intToPtr", 2);
+ int_to_ptr.params()[0] = inner_node;
+ int_to_ptr.params()[1] = node_to_cast;
int_to_ptr.rparen_token = try appendToken(c, .RParen, ")");
if_2.body = &int_to_ptr.base;
const else_2 = try transCreateNodeElse(c);
if_2.@"else" = else_2;
- const as = try transCreateNodeBuiltinFnCall(c, "@as");
- try as.params.push(inner_node);
- try as.params.push(node_to_cast);
+ const as = try c.createBuiltinCall("@as", 2);
+ as.params()[0] = inner_node;
+ as.params()[1] = node_to_cast;
as.rparen_token = try appendToken(c, .RParen, ")");
else_2.body = &as.base;
- const group_node = try c.a().create(ast.Node.GroupedExpression);
+ const group_node = try c.arena.create(ast.Node.GroupedExpression);
group_node.* = .{
.lparen = lparen,
.expr = &if_1.base,
@@ -5740,7 +5832,7 @@ fn macroBoolToInt(c: *Context, node: *ast.Node) !*ast.Node {
if (!isBoolRes(node)) {
if (node.id != .InfixOp) return node;
- const group_node = try c.a().create(ast.Node.GroupedExpression);
+ const group_node = try c.arena.create(ast.Node.GroupedExpression);
group_node.* = .{
.lparen = try appendToken(c, .LParen, "("),
.expr = node,
@@ -5749,8 +5841,8 @@ fn macroBoolToInt(c: *Context, node: *ast.Node) !*ast.Node {
return &group_node.base;
}
- const builtin_node = try transCreateNodeBuiltinFnCall(c, "@boolToInt");
- try builtin_node.params.push(node);
+ const builtin_node = try c.createBuiltinCall("@boolToInt", 1);
+ builtin_node.params()[0] = node;
builtin_node.rparen_token = try appendToken(c, .RParen, ")");
return &builtin_node.base;
}
@@ -5759,7 +5851,7 @@ fn macroIntToBool(c: *Context, node: *ast.Node) !*ast.Node {
if (isBoolRes(node)) {
if (node.id != .InfixOp) return node;
- const group_node = try c.a().create(ast.Node.GroupedExpression);
+ const group_node = try c.arena.create(ast.Node.GroupedExpression);
group_node.* = .{
.lparen = try appendToken(c, .LParen, "("),
.expr = node,
@@ -5770,14 +5862,14 @@ fn macroIntToBool(c: *Context, node: *ast.Node) !*ast.Node {
const op_token = try appendToken(c, .BangEqual, "!=");
const zero = try transCreateNodeInt(c, 0);
- const res = try c.a().create(ast.Node.InfixOp);
+ const res = try c.arena.create(ast.Node.InfixOp);
res.* = .{
.op_token = op_token,
.lhs = node,
.op = .BangEqual,
.rhs = zero,
};
- const group_node = try c.a().create(ast.Node.GroupedExpression);
+ const group_node = try c.arena.create(ast.Node.GroupedExpression);
group_node.* = .{
.lparen = try appendToken(c, .LParen, "("),
.expr = &res.base,
@@ -5928,10 +6020,12 @@ fn parseCSuffixOpExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
continue;
},
.LParen => {
- const call_node = try transCreateNodeFnCall(c, node);
+ _ = try appendToken(c, .LParen, "(");
+ var call_params = std.ArrayList(*ast.Node).init(c.gpa);
+ defer call_params.deinit();
while (true) {
const arg = try parseCPrefixOpExpr(c, it, source, source_loc, scope);
- try call_node.op.Call.params.push(arg);
+ try call_params.append(arg);
const next = it.next().?;
if (next.id == .Comma)
_ = try appendToken(c, .Comma, ",")
@@ -5949,7 +6043,14 @@ fn parseCSuffixOpExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
return error.ParseError;
}
}
- call_node.rtoken = try appendToken(c, .RParen, ")");
+ const call_node = try ast.Node.Call.alloc(c.arena, call_params.items.len);
+ call_node.* = .{
+ .lhs = node,
+ .params_len = call_params.items.len,
+ .async_token = null,
+ .rtoken = try appendToken(c, .RParen, ")"),
+ };
+ mem.copy(*ast.Node, call_node.params(), call_params.items);
node = &call_node.base;
continue;
},
@@ -5989,7 +6090,7 @@ fn parseCSuffixOpExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
const cast_fn = if (bool_op) macroIntToBool else macroBoolToInt;
const lhs_node = try cast_fn(c, node);
const rhs_node = try parseCPrefixOpExpr(c, it, source, source_loc, scope);
- const op_node = try c.a().create(ast.Node.InfixOp);
+ const op_node = try c.arena.create(ast.Node.InfixOp);
op_node.* = .{
.op_token = op_token,
.lhs = lhs_node,
@@ -6037,7 +6138,7 @@ fn parseCPrefixOpExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
}
fn tokenSlice(c: *Context, token: ast.TokenIndex) []u8 {
- const tok = c.tree.tokens.at(token);
+ const tok = c.token_locs.items[token];
const slice = c.source_buffer.span()[tok.start..tok.end];
return if (mem.startsWith(u8, slice, "@\""))
slice[2 .. slice.len - 1]
@@ -6060,9 +6161,8 @@ fn getContainer(c: *Context, node: *ast.Node) ?*ast.Node {
return null;
if (getContainerTypeOf(c, infix.lhs)) |ty_node| {
if (ty_node.cast(ast.Node.ContainerDecl)) |container| {
- var it = container.fields_and_decls.iterator(0);
- while (it.next()) |field_ref| {
- const field = field_ref.*.cast(ast.Node.ContainerField).?;
+ for (container.fieldsAndDecls()) |field_ref| {
+ const field = field_ref.cast(ast.Node.ContainerField).?;
const ident = infix.rhs.cast(ast.Node.Identifier).?;
if (mem.eql(u8, tokenSlice(c, field.name_token), tokenSlice(c, ident.token))) {
return getContainer(c, field.type_expr.?);
@@ -6087,9 +6187,8 @@ fn getContainerTypeOf(c: *Context, ref: *ast.Node) ?*ast.Node {
return null;
if (getContainerTypeOf(c, infix.lhs)) |ty_node| {
if (ty_node.cast(ast.Node.ContainerDecl)) |container| {
- var it = container.fields_and_decls.iterator(0);
- while (it.next()) |field_ref| {
- const field = field_ref.*.cast(ast.Node.ContainerField).?;
+ for (container.fieldsAndDecls()) |field_ref| {
+ const field = field_ref.cast(ast.Node.ContainerField).?;
const ident = infix.rhs.cast(ast.Node.Identifier).?;
if (mem.eql(u8, tokenSlice(c, field.name_token), tokenSlice(c, ident.token))) {
return getContainer(c, field.type_expr.?);
diff --git a/src/analyze.cpp b/src/analyze.cpp
index 5a2629a0ea..88f967240a 100644
--- a/src/analyze.cpp
+++ b/src/analyze.cpp
@@ -3612,6 +3612,12 @@ static void add_top_level_decl(CodeGen *g, ScopeDecls *decls_scope, Tld *tld) {
auto entry = decls_scope->decl_table.put_unique(tld->name, tld);
if (entry) {
Tld *other_tld = entry->value;
+ if (other_tld->id == TldIdVar) {
+ ZigVar *var = reinterpret_cast(other_tld)->var;
+ if (var != nullptr && var->var_type != nullptr && type_is_invalid(var->var_type)) {
+ return; // already reported compile error
+ }
+ }
ErrorMsg *msg = add_node_error(g, tld->source_node, buf_sprintf("redefinition of '%s'", buf_ptr(tld->name)));
add_error_note(g, msg, other_tld->source_node, buf_sprintf("previous definition is here"));
return;
@@ -3887,9 +3893,18 @@ ZigVar *add_variable(CodeGen *g, AstNode *source_node, Scope *parent_scope, Buf
if (search_scope != nullptr) {
Tld *tld = find_decl(g, search_scope, name);
if (tld != nullptr && tld != src_tld) {
- ErrorMsg *msg = add_node_error(g, source_node,
- buf_sprintf("redefinition of '%s'", buf_ptr(name)));
- add_error_note(g, msg, tld->source_node, buf_sprintf("previous definition is here"));
+ bool want_err_msg = true;
+ if (tld->id == TldIdVar) {
+ ZigVar *var = reinterpret_cast(tld)->var;
+ if (var != nullptr && var->var_type != nullptr && type_is_invalid(var->var_type)) {
+ want_err_msg = false;
+ }
+ }
+ if (want_err_msg) {
+ ErrorMsg *msg = add_node_error(g, source_node,
+ buf_sprintf("redefinition of '%s'", buf_ptr(name)));
+ add_error_note(g, msg, tld->source_node, buf_sprintf("previous definition is here"));
+ }
variable_entry->var_type = g->builtin_types.entry_invalid;
}
}
diff --git a/src/ir.cpp b/src/ir.cpp
index 20e83ecb9d..48c4162a77 100644
--- a/src/ir.cpp
+++ b/src/ir.cpp
@@ -5300,9 +5300,18 @@ static ZigVar *create_local_var(CodeGen *codegen, AstNode *node, Scope *parent_s
} else {
Tld *tld = find_decl(codegen, parent_scope, name);
if (tld != nullptr) {
- ErrorMsg *msg = add_node_error(codegen, node,
- buf_sprintf("redefinition of '%s'", buf_ptr(name)));
- add_error_note(codegen, msg, tld->source_node, buf_sprintf("previous definition is here"));
+ bool want_err_msg = true;
+ if (tld->id == TldIdVar) {
+ ZigVar *var = reinterpret_cast(tld)->var;
+ if (var != nullptr && var->var_type != nullptr && type_is_invalid(var->var_type)) {
+ want_err_msg = false;
+ }
+ }
+ if (want_err_msg) {
+ ErrorMsg *msg = add_node_error(codegen, node,
+ buf_sprintf("redefinition of '%s'", buf_ptr(name)));
+ add_error_note(codegen, msg, tld->source_node, buf_sprintf("previous definition is here"));
+ }
variable_entry->var_type = codegen->builtin_types.entry_invalid;
}
}