From 0808d98e10c5fea27cebf912c6296b760c2b837b Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Sat, 30 Jan 2021 20:12:13 -0700 Subject: [PATCH 001/173] add std.MultiArrayList Also known as "Struct-Of-Arrays" or "SOA". The purpose of this data structure is to provide a similar API to ArrayList but instead of the element type being a struct, the fields of the struct are in N different arrays, all with the same length and capacity. Having this abstraction means we can put them in the same allocation, avoiding overhead with the allocator. It also saves a tiny bit of overhead from the redundant capacity and length fields, since each struct element shares the same value. This is an alternate implementation to #7854. --- lib/std/multi_array_list.zig | 353 +++++++++++++++++++++++++++++++++++ lib/std/std.zig | 1 + 2 files changed, 354 insertions(+) create mode 100644 lib/std/multi_array_list.zig diff --git a/lib/std/multi_array_list.zig b/lib/std/multi_array_list.zig new file mode 100644 index 0000000000..84b39765d3 --- /dev/null +++ b/lib/std/multi_array_list.zig @@ -0,0 +1,353 @@ +// SPDX-License-Identifier: MIT +// Copyright (c) 2015-2021 Zig Contributors +// This file is part of [zig](https://ziglang.org/), which is MIT licensed. +// The MIT license requires this copyright notice to be included in all copies +// and substantial portions of the software. +const std = @import("std.zig"); +const assert = std.debug.assert; +const meta = std.meta; +const mem = std.mem; +const Allocator = mem.Allocator; + +pub fn MultiArrayList(comptime S: type) type { + return struct { + bytes: [*]align(@alignOf(S)) u8 = undefined, + len: usize = 0, + capacity: usize = 0, + + pub const Elem = S; + + pub const Field = meta.FieldEnum(S); + + pub const Slice = struct { + /// The index corresponds to sizes.bytes, not in field order. + ptrs: [fields.len][*]u8, + len: usize, + capacity: usize, + + pub fn items(self: Slice, comptime field: Field) []FieldType(field) { + const byte_ptr = self.ptrs[@enumToInt(field)]; + const F = FieldType(field); + const casted_ptr = @ptrCast([*]F, @alignCast(@alignOf(F), byte_ptr)); + return casted_ptr[0..self.len]; + } + + pub fn toMultiArrayList(self: Slice) Self { + if (self.ptrs.len == 0) { + return .{}; + } + const unaligned_ptr = self.ptrs[sizes.fields[0]]; + const aligned_ptr = @alignCast(@alignOf(S), unaligned_ptr); + const casted_ptr = @ptrCast([*]align(@alignOf(S)) u8, aligned_ptr); + return .{ + .bytes = casted_ptr, + .len = self.len, + .capacity = self.capacity, + }; + } + + pub fn deinit(self: *Slice, gpa: *Allocator) void { + var other = self.toMultiArrayList(); + other.deinit(gpa); + self.* = undefined; + } + }; + + const Self = @This(); + + const fields = meta.fields(S); + /// `sizes.bytes` is an array of @sizeOf each S field. Sorted by alignment, descending. + /// `sizes.indexes` is an array mapping from field to its index in the `sizes.bytes` array. + /// `sizes.fields` is an array with the field indexes of the `sizes.bytes` array. + const sizes = blk: { + const Data = struct { + size: usize, + size_index: usize, + alignment: usize, + }; + var data: [fields.len]Data = undefined; + for (fields) |field_info, i| { + data[i] = .{ + .size = @sizeOf(field_info.field_type), + .size_index = i, + .alignment = field_info.alignment, + }; + } + const Sort = struct { + fn lessThan(trash: *i32, lhs: Data, rhs: Data) bool { + return lhs.alignment >= rhs.alignment; + } + }; + var trash: i32 = undefined; // workaround for stage1 compiler bug + std.sort.sort(Data, &data, &trash, Sort.lessThan); + var sizes_bytes: [fields.len]usize = undefined; + var sizes_indexes: [fields.len]usize = undefined; + var field_indexes: [fields.len]usize = undefined; + for (data) |elem, i| { + sizes_bytes[i] = elem.size; + sizes_indexes[elem.size_index] = i; + field_indexes[i] = elem.size_index; + } + break :blk .{ + .bytes = sizes_bytes, + .indexes = sizes_indexes, + .fields = field_indexes, + }; + }; + + /// Release all allocated memory. + pub fn deinit(self: *Self, gpa: *Allocator) void { + gpa.free(self.allocatedBytes()); + self.* = undefined; + } + + /// The caller owns the returned memory. Empties this MultiArrayList. + pub fn toOwnedSlice(self: *Self) Slice { + const result = self.slice(); + self.* = .{}; + return result; + } + + pub fn slice(self: Self) Slice { + var result: Slice = .{ + .ptrs = undefined, + .len = self.len, + .capacity = self.capacity, + }; + var ptr: [*]u8 = self.bytes; + for (sizes.bytes) |field_size, i| { + result.ptrs[sizes.fields[i]] = ptr; + ptr += field_size * self.capacity; + } + return result; + } + + pub fn items(self: Self, comptime field: Field) []FieldType(field) { + return self.slice().items(field); + } + + /// Overwrite one array element with new data. + pub fn set(self: *Self, index: usize, elem: S) void { + const slices = self.slice(); + inline for (fields) |field_info, i| { + slices.items(@intToEnum(Field, i))[index] = @field(elem, field_info.name); + } + } + + /// Obtain all the data for one array element. + pub fn get(self: *Self, index: usize) S { + const slices = self.slice(); + var result: S = undefined; + inline for (fields) |field_info, i| { + @field(elem, field_info.name) = slices.items(@intToEnum(Field, i))[index]; + } + return result; + } + + /// Extend the list by 1 element. Allocates more memory as necessary. + pub fn append(self: *Self, gpa: *Allocator, elem: S) !void { + try self.ensureCapacity(gpa, self.len + 1); + self.appendAssumeCapacity(elem); + } + + /// Extend the list by 1 element, but asserting `self.capacity` + /// is sufficient to hold an additional item. + pub fn appendAssumeCapacity(self: *Self, elem: S) void { + assert(self.len < self.capacity); + self.len += 1; + self.set(self.len - 1, elem); + } + + /// Adjust the list's length to `new_len`. + /// Does not initialize added items, if any. + pub fn resize(self: *Self, gpa: *Allocator, new_len: usize) !void { + try self.ensureCapacity(gpa, new_len); + self.len = new_len; + } + + /// Attempt to reduce allocated capacity to `new_len`. + /// If `new_len` is greater than zero, this may fail to reduce the capacity, + /// but the data remains intact and the length is updated to new_len. + pub fn shrinkAndFree(self: *Self, gpa: *Allocator, new_len: usize) void { + if (new_len == 0) { + gpa.free(self.allocatedBytes()); + self.* = .{}; + return; + } + assert(new_len <= self.capacity); + assert(new_len <= self.len); + + const other_bytes = gpa.allocAdvanced( + u8, + @alignOf(S), + capacityInBytes(new_len), + .exact, + ) catch { + self.len = new_len; + // TODO memset the invalidated items to undefined + return; + }; + var other = Self{ + .bytes = other_bytes.ptr, + .capacity = new_len, + .len = new_len, + }; + self.len = new_len; + const self_slice = self.slice(); + const other_slice = other.slice(); + inline for (fields) |field_info, i| { + const field = @intToEnum(Field, i); + mem.copy(field_info.field_type, other_slice.items(field), self_slice.items(field)); + } + gpa.free(self.allocatedBytes()); + self.* = other; + } + + /// Reduce length to `new_len`. + /// Invalidates pointers to elements `items[new_len..]`. + /// Keeps capacity the same. + pub fn shrinkRetainingCapacity(self: *Self, new_len: usize) void { + self.len = new_len; + } + + /// Modify the array so that it can hold at least `new_capacity` items. + /// Implements super-linear growth to achieve amortized O(1) append operations. + /// Invalidates pointers if additional memory is needed. + pub fn ensureCapacity(self: *Self, gpa: *Allocator, new_capacity: usize) !void { + var better_capacity = self.capacity; + if (better_capacity >= new_capacity) return; + + while (true) { + better_capacity += better_capacity / 2 + 8; + if (better_capacity >= new_capacity) break; + } + + return self.setCapacity(gpa, better_capacity); + } + + /// Modify the array so that it can hold exactly `new_capacity` items. + /// Invalidates pointers if additional memory is needed. + /// `new_capacity` must be greater or equal to `len`. + pub fn setCapacity(self: *Self, gpa: *Allocator, new_capacity: usize) !void { + assert(new_capacity >= self.len); + const new_bytes = try gpa.allocAdvanced( + u8, + @alignOf(S), + capacityInBytes(new_capacity), + .exact, + ); + if (self.len == 0) { + self.bytes = new_bytes.ptr; + self.capacity = new_capacity; + return; + } + var other = Self{ + .bytes = new_bytes.ptr, + .capacity = new_capacity, + .len = self.len, + }; + const self_slice = self.slice(); + const other_slice = other.slice(); + inline for (fields) |field_info, i| { + const field = @intToEnum(Field, i); + mem.copy(field_info.field_type, other_slice.items(field), self_slice.items(field)); + } + gpa.free(self.allocatedBytes()); + self.* = other; + } + + fn capacityInBytes(capacity: usize) usize { + const sizes_vector: std.meta.Vector(sizes.bytes.len, usize) = sizes.bytes; + const capacity_vector = @splat(sizes.bytes.len, capacity); + return @reduce(.Add, capacity_vector * sizes_vector); + } + + fn allocatedBytes(self: Self) []align(@alignOf(S)) u8 { + return self.bytes[0..capacityInBytes(self.capacity)]; + } + + fn FieldType(field: Field) type { + return meta.fieldInfo(S, field).field_type; + } + }; +} + +test "basic usage" { + const testing = std.testing; + const ally = testing.allocator; + + const Foo = struct { + a: u32, + b: []const u8, + c: u8, + }; + + var list = MultiArrayList(Foo){}; + defer list.deinit(ally); + + try list.ensureCapacity(ally, 2); + + list.appendAssumeCapacity(.{ + .a = 1, + .b = "foobar", + .c = 'a', + }); + + list.appendAssumeCapacity(.{ + .a = 2, + .b = "zigzag", + .c = 'b', + }); + + testing.expectEqualSlices(u32, list.items(.a), &[_]u32{ 1, 2 }); + testing.expectEqualSlices(u8, list.items(.c), &[_]u8{ 'a', 'b' }); + + testing.expectEqual(@as(usize, 2), list.items(.b).len); + testing.expectEqualStrings("foobar", list.items(.b)[0]); + testing.expectEqualStrings("zigzag", list.items(.b)[1]); + + try list.append(ally, .{ + .a = 3, + .b = "fizzbuzz", + .c = 'c', + }); + + testing.expectEqualSlices(u32, list.items(.a), &[_]u32{ 1, 2, 3 }); + testing.expectEqualSlices(u8, list.items(.c), &[_]u8{ 'a', 'b', 'c' }); + + testing.expectEqual(@as(usize, 3), list.items(.b).len); + testing.expectEqualStrings("foobar", list.items(.b)[0]); + testing.expectEqualStrings("zigzag", list.items(.b)[1]); + testing.expectEqualStrings("fizzbuzz", list.items(.b)[2]); + + // Add 6 more things to force a capacity increase. + var i: usize = 0; + while (i < 6) : (i += 1) { + try list.append(ally, .{ + .a = @intCast(u32, 4 + i), + .b = "whatever", + .c = @intCast(u8, 'd' + i), + }); + } + + testing.expectEqualSlices( + u32, + &[_]u32{ 1, 2, 3, 4, 5, 6, 7, 8, 9 }, + list.items(.a), + ); + testing.expectEqualSlices( + u8, + &[_]u8{ 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i' }, + list.items(.c), + ); + + list.shrinkAndFree(ally, 3); + + testing.expectEqualSlices(u32, list.items(.a), &[_]u32{ 1, 2, 3 }); + testing.expectEqualSlices(u8, list.items(.c), &[_]u8{ 'a', 'b', 'c' }); + + testing.expectEqual(@as(usize, 3), list.items(.b).len); + testing.expectEqualStrings("foobar", list.items(.b)[0]); + testing.expectEqualStrings("zigzag", list.items(.b)[1]); + testing.expectEqualStrings("fizzbuzz", list.items(.b)[2]); +} diff --git a/lib/std/std.zig b/lib/std/std.zig index 780579debf..4d9a8d98f6 100644 --- a/lib/std/std.zig +++ b/lib/std/std.zig @@ -20,6 +20,7 @@ pub const ComptimeStringMap = @import("comptime_string_map.zig").ComptimeStringM pub const DynLib = @import("dynamic_library.zig").DynLib; pub const HashMap = hash_map.HashMap; pub const HashMapUnmanaged = hash_map.HashMapUnmanaged; +pub const MultiArrayList = @import("multi_array_list.zig").MultiArrayList; pub const PackedIntArray = @import("packed_int_array.zig").PackedIntArray; pub const PackedIntArrayEndian = @import("packed_int_array.zig").PackedIntArrayEndian; pub const PackedIntSlice = @import("packed_int_array.zig").PackedIntSlice; From 766b315b3888f0f9ac1ece69131cdf23f98b2c14 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Sat, 30 Jan 2021 20:15:26 -0700 Subject: [PATCH 002/173] std.GeneralPurposeAllocator: logging improvements It now uses the log scope "gpa" instead of "std". Additionally, there is a new config option `verbose_log` which enables info log messages for every allocation. Can be useful when debugging. This option is off by default. --- lib/std/heap/general_purpose_allocator.zig | 28 +++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/lib/std/heap/general_purpose_allocator.zig b/lib/std/heap/general_purpose_allocator.zig index fb340edfd3..c731f22d66 100644 --- a/lib/std/heap/general_purpose_allocator.zig +++ b/lib/std/heap/general_purpose_allocator.zig @@ -98,7 +98,7 @@ //! in a `std.HashMap` using the backing allocator. const std = @import("std"); -const log = std.log.scoped(.std); +const log = std.log.scoped(.gpa); const math = std.math; const assert = std.debug.assert; const mem = std.mem; @@ -162,6 +162,9 @@ pub const Config = struct { /// logged error messages with stack trace details. The downside is that every allocation /// will be leaked! never_unmap: bool = false, + + /// Enables emitting info messages with the size and address of every allocation. + verbose_log: bool = false, }; pub fn GeneralPurposeAllocator(comptime config: Config) type { @@ -454,10 +457,19 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type { const result_len = try self.backing_allocator.resizeFn(self.backing_allocator, old_mem, old_align, new_size, len_align, ret_addr); if (result_len == 0) { + if (config.verbose_log) { + log.info("large free {d} bytes at {*}", .{ old_mem.len, old_mem.ptr }); + } + self.large_allocations.removeAssertDiscard(@ptrToInt(old_mem.ptr)); return 0; } + if (config.verbose_log) { + log.info("large resize {d} bytes at {*} to {d}", .{ + old_mem.len, old_mem.ptr, new_size, + }); + } entry.value.bytes = old_mem.ptr[0..result_len]; collectStackTrace(ret_addr, &entry.value.stack_addresses); return result_len; @@ -568,6 +580,9 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type { } else { @memset(old_mem.ptr, undefined, old_mem.len); } + if (config.verbose_log) { + log.info("small free {d} bytes at {*}", .{ old_mem.len, old_mem.ptr }); + } return @as(usize, 0); } const new_aligned_size = math.max(new_size, old_align); @@ -576,6 +591,11 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type { if (old_mem.len > new_size) { @memset(old_mem.ptr + new_size, undefined, old_mem.len - new_size); } + if (config.verbose_log) { + log.info("small resize {d} bytes at {*} to {d}", .{ + old_mem.len, old_mem.ptr, new_size, + }); + } return new_size; } return error.OutOfMemory; @@ -623,6 +643,9 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type { gop.entry.value.bytes = slice; collectStackTrace(ret_addr, &gop.entry.value.stack_addresses); + if (config.verbose_log) { + log.info("large alloc {d} bytes at {*}", .{ slice.len, slice.ptr }); + } return slice; } @@ -632,6 +655,9 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type { const new_size_class = math.ceilPowerOfTwoAssert(usize, new_aligned_size); const ptr = try self.allocSlot(new_size_class, ret_addr); + if (config.verbose_log) { + log.info("small alloc {d} bytes at {*}", .{ len, ptr }); + } return ptr[0..len]; } From 4dca99d3f6b732c415d270f0c97def144ed6d3b7 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Sat, 30 Jan 2021 20:16:59 -0700 Subject: [PATCH 003/173] stage2: rework AST memory layout This is a proof-of-concept of switching to a new memory layout for tokens and AST nodes. The goal is threefold: * smaller memory footprint * faster performance for tokenization and parsing * most importantly, a proof-of-concept that can be also applied to ZIR and TZIR to improve the entire compiler pipeline in this way. I had a few key insights here: * Underlying premise: using less memory will make things faster, because of fewer allocations and better cache utilization. Also using less memory is valuable in and of itself. * Using a Struct-Of-Arrays for tokens and AST nodes, saves the bytes of padding between the enum tag (which kind of token is it; which kind of AST node is it) and the next fields in the struct. It also improves cache coherence, since one can peek ahead in the tokens array without having to load the source locations of tokens. * Token memory can be conserved by only having the tag (1 byte) and byte offset (4 bytes) for a total of 5 bytes per token. It is not necessary to store the token ending byte offset because one can always re-tokenize later, but also most tokens the length can be trivially determined from the tag alone, and for ones where it doesn't, string literals for example, one must parse the string literal again later anyway in astgen, making it free to re-tokenize. * AST nodes do not actually need to store more than 1 token index because one can poke left and right in the tokens array very cheaply. So far we are left with one big problem though: how can we put AST nodes into an array, since different AST nodes are different sizes? This is where my key observation comes in: one can have a hash table for the extra data for the less common AST nodes! But it gets even better than that: I defined this data that is always present for every AST Node: * tag (1 byte) - which AST node is it * main_token (4 bytes, index into tokens array) - the tag determines which token this points to * struct{lhs: u32, rhs: u32} - enough to store 2 indexes to other AST nodes, the tag determines how to interpret this data You can see how a binary operation, such as `a * b` would fit into this structure perfectly. A unary operation, such as `*a` would also fit, and leave `rhs` unused. So this is a total of 13 bytes per AST node. And again, we don't have to pay for the padding to round up to 16 because we store in struct-of-arrays format. I made a further observation: the only kind of data AST nodes need to store other than the main_token is indexes to sub-expressions. That's it. The only purpose of an AST is to bring a tree structure to a list of tokens. This observation means all the data that nodes store are only sets of u32 indexes to other nodes. The other tokens can be found later by the compiler, by poking around in the tokens array, which again is super fast because it is struct-of-arrays, so you often only need to look at the token tags array, which is an array of bytes, very cache friendly. So for nearly every kind of AST node, you can store it in 13 bytes. For the rarer AST nodes that have 3 or more indexes to other nodes to store, either the lhs or the rhs will be repurposed to be an index into an extra_data array which contains the extra AST node indexes. In other words, no hash table needed, it's just 1 big ArrayList with the extra data for AST Nodes. Final observation, no need to have a canonical tag for a given AST. For example: The expression `foo(bar)` is a function call. Function calls can have any number of parameters. However in this example, we can encode the function call into the AST with a tag called `FunctionCallOnlyOneParam`, and use lhs for the function expr and rhs for the only parameter expr. Meanwhile if the code was `foo(bar, baz)` then the AST node would have to be `FunctionCall` with lhs still being the function expr, but rhs being the index into `extra_data`. Then because the tag is `FunctionCall` it means `extra_data[rhs]` is the "start" and `extra_data[rhs+1]` is the "end". Now the range `extra_data[start..end]` describes the list of parameters to the function. Point being, you only have to pay for the extra bytes if the AST actually requires it. There's no limit to the number of different AST tag encodings. Preliminary results: * 15% improvement on cache-misses * 28% improvement on total instructions executed * 26% improvement on total CPU cycles * 22% improvement on wall clock time This is 1/4 items on the checklist before this can actually be merged: * [x] parser * [ ] render (zig fmt) * [ ] astgen * [ ] translate-c --- lib/std/zig/ast.zig | 3568 ++++------------------ lib/std/zig/parse.zig | 5698 ++++++++++++++++++----------------- lib/std/zig/parser_test.zig | 23 +- lib/std/zig/tokenizer.zig | 844 +++--- 4 files changed, 3921 insertions(+), 6212 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index b7975fc0f7..823e0312cd 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -9,48 +9,30 @@ const testing = std.testing; const mem = std.mem; const Token = std.zig.Token; -pub const TokenIndex = usize; -pub const NodeIndex = usize; +pub const TokenIndex = u32; +pub const ByteOffset = u32; + +pub const TokenList = std.MultiArrayList(struct { + tag: Token.Tag, + start: ByteOffset, +}); +pub const NodeList = std.MultiArrayList(struct { + tag: Node.Tag, + main_token: TokenIndex, + data: Node.Data, +}); pub const Tree = struct { /// Reference to externally-owned data. source: []const u8, - token_ids: []const Token.Id, - token_locs: []const Token.Loc, + + tokens: TokenList.Slice, + /// The root AST node is assumed to be index 0. Since there can be no + /// references to the root node, this means 0 is available to indicate null. + nodes: NodeList.Slice, + extra_data: []Node.Index, + errors: []const Error, - root_node: *Node.Root, - - arena: std.heap.ArenaAllocator.State, - gpa: *mem.Allocator, - - /// translate-c uses this to avoid having to emit correct newlines - /// TODO get rid of this hack - generated: bool = false, - - pub fn deinit(self: *Tree) void { - self.gpa.free(self.token_ids); - self.gpa.free(self.token_locs); - self.gpa.free(self.errors); - self.arena.promote(self.gpa).deinit(); - } - - pub fn renderError(self: *Tree, parse_error: *const Error, stream: anytype) !void { - return parse_error.render(self.token_ids, stream); - } - - pub fn tokenSlice(self: *Tree, token_index: TokenIndex) []const u8 { - return self.tokenSliceLoc(self.token_locs[token_index]); - } - - pub fn tokenSliceLoc(self: *Tree, token: Token.Loc) []const u8 { - return self.source[token.start..token.end]; - } - - pub fn getNodeSource(self: *const Tree, node: *const Node) []const u8 { - const first_token = self.token_locs[node.firstToken()]; - const last_token = self.token_locs[node.lastToken()]; - return self.source[first_token.start..last_token.end]; - } pub const Location = struct { line: usize, @@ -59,21 +41,28 @@ pub const Tree = struct { line_end: usize, }; - /// Return the Location of the token relative to the offset specified by `start_index`. - pub fn tokenLocationLoc(self: *Tree, start_index: usize, token: Token.Loc) Location { + pub fn deinit(tree: *Tree, gpa: *mem.Allocator) void { + tree.tokens.deinit(gpa); + tree.nodes.deinit(gpa); + gpa.free(tree.extra_data); + gpa.free(tree.errors); + tree.* = undefined; + } + + pub fn tokenLocation(self: Tree, start_offset: ByteOffset, token_index: TokenIndex) Location { var loc = Location{ .line = 0, .column = 0, - .line_start = start_index, + .line_start = start_offset, .line_end = self.source.len, }; - if (self.generated) - return loc; - const token_start = token.start; - for (self.source[start_index..]) |c, i| { - if (i + start_index == token_start) { - loc.line_end = i + start_index; - while (loc.line_end < self.source.len and self.source[loc.line_end] != '\n') : (loc.line_end += 1) {} + const token_start = self.tokens.items(.start)[token_index]; + for (self.source[start_offset..]) |c, i| { + if (i + start_offset == token_start) { + loc.line_end = i + start_offset; + while (loc.line_end < self.source.len and self.source[loc.line_end] != '\n') { + loc.line_end += 1; + } return loc; } if (c == '\n') { @@ -87,35 +76,130 @@ pub const Tree = struct { return loc; } - pub fn tokenLocation(self: *Tree, start_index: usize, token_index: TokenIndex) Location { - return self.tokenLocationLoc(start_index, self.token_locs[token_index]); + pub fn renderError(tree: Tree, parse_error: Error, stream: anytype) !void { + const tokens = tree.tokens.items(.tag); + switch (parse_error) { + .InvalidToken => |*x| return x.render(tokens, stream), + .ExpectedContainerMembers => |*x| return x.render(tokens, stream), + .ExpectedStringLiteral => |*x| return x.render(tokens, stream), + .ExpectedIntegerLiteral => |*x| return x.render(tokens, stream), + .ExpectedPubItem => |*x| return x.render(tokens, stream), + .ExpectedIdentifier => |*x| return x.render(tokens, stream), + .ExpectedStatement => |*x| return x.render(tokens, stream), + .ExpectedVarDeclOrFn => |*x| return x.render(tokens, stream), + .ExpectedVarDecl => |*x| return x.render(tokens, stream), + .ExpectedFn => |*x| return x.render(tokens, stream), + .ExpectedReturnType => |*x| return x.render(tokens, stream), + .ExpectedAggregateKw => |*x| return x.render(tokens, stream), + .UnattachedDocComment => |*x| return x.render(tokens, stream), + .ExpectedEqOrSemi => |*x| return x.render(tokens, stream), + .ExpectedSemiOrLBrace => |*x| return x.render(tokens, stream), + .ExpectedSemiOrElse => |*x| return x.render(tokens, stream), + .ExpectedLabelOrLBrace => |*x| return x.render(tokens, stream), + .ExpectedLBrace => |*x| return x.render(tokens, stream), + .ExpectedColonOrRParen => |*x| return x.render(tokens, stream), + .ExpectedLabelable => |*x| return x.render(tokens, stream), + .ExpectedInlinable => |*x| return x.render(tokens, stream), + .ExpectedAsmOutputReturnOrType => |*x| return x.render(tokens, stream), + .ExpectedCall => |x| return x.render(tree, stream), + .ExpectedCallOrFnProto => |x| return x.render(tree, stream), + .ExpectedSliceOrRBracket => |*x| return x.render(tokens, stream), + .ExtraAlignQualifier => |*x| return x.render(tokens, stream), + .ExtraConstQualifier => |*x| return x.render(tokens, stream), + .ExtraVolatileQualifier => |*x| return x.render(tokens, stream), + .ExtraAllowZeroQualifier => |*x| return x.render(tokens, stream), + .ExpectedTypeExpr => |*x| return x.render(tokens, stream), + .ExpectedPrimaryTypeExpr => |*x| return x.render(tokens, stream), + .ExpectedParamType => |*x| return x.render(tokens, stream), + .ExpectedExpr => |*x| return x.render(tokens, stream), + .ExpectedPrimaryExpr => |*x| return x.render(tokens, stream), + .ExpectedToken => |*x| return x.render(tokens, stream), + .ExpectedCommaOrEnd => |*x| return x.render(tokens, stream), + .ExpectedParamList => |*x| return x.render(tokens, stream), + .ExpectedPayload => |*x| return x.render(tokens, stream), + .ExpectedBlockOrAssignment => |*x| return x.render(tokens, stream), + .ExpectedBlockOrExpression => |*x| return x.render(tokens, stream), + .ExpectedExprOrAssignment => |*x| return x.render(tokens, stream), + .ExpectedPrefixExpr => |*x| return x.render(tokens, stream), + .ExpectedLoopExpr => |*x| return x.render(tokens, stream), + .ExpectedDerefOrUnwrap => |*x| return x.render(tokens, stream), + .ExpectedSuffixOp => |*x| return x.render(tokens, stream), + .ExpectedBlockOrField => |*x| return x.render(tokens, stream), + .DeclBetweenFields => |*x| return x.render(tokens, stream), + .InvalidAnd => |*x| return x.render(tokens, stream), + .AsteriskAfterPointerDereference => |*x| return x.render(tokens, stream), + } } - pub fn tokensOnSameLine(self: *Tree, token1_index: TokenIndex, token2_index: TokenIndex) bool { - return self.tokensOnSameLineLoc(self.token_locs[token1_index], self.token_locs[token2_index]); + pub fn errorToken(tree: Tree, parse_error: Error) TokenIndex { + switch (parse_error) { + .InvalidToken => |x| return x.token, + .ExpectedContainerMembers => |x| return x.token, + .ExpectedStringLiteral => |x| return x.token, + .ExpectedIntegerLiteral => |x| return x.token, + .ExpectedPubItem => |x| return x.token, + .ExpectedIdentifier => |x| return x.token, + .ExpectedStatement => |x| return x.token, + .ExpectedVarDeclOrFn => |x| return x.token, + .ExpectedVarDecl => |x| return x.token, + .ExpectedFn => |x| return x.token, + .ExpectedReturnType => |x| return x.token, + .ExpectedAggregateKw => |x| return x.token, + .UnattachedDocComment => |x| return x.token, + .ExpectedEqOrSemi => |x| return x.token, + .ExpectedSemiOrLBrace => |x| return x.token, + .ExpectedSemiOrElse => |x| return x.token, + .ExpectedLabelOrLBrace => |x| return x.token, + .ExpectedLBrace => |x| return x.token, + .ExpectedColonOrRParen => |x| return x.token, + .ExpectedLabelable => |x| return x.token, + .ExpectedInlinable => |x| return x.token, + .ExpectedAsmOutputReturnOrType => |x| return x.token, + .ExpectedCall => |x| return tree.nodes.items(.main_token)[x.node], + .ExpectedCallOrFnProto => |x| return tree.nodes.items(.main_token)[x.node], + .ExpectedSliceOrRBracket => |x| return x.token, + .ExtraAlignQualifier => |x| return x.token, + .ExtraConstQualifier => |x| return x.token, + .ExtraVolatileQualifier => |x| return x.token, + .ExtraAllowZeroQualifier => |x| return x.token, + .ExpectedTypeExpr => |x| return x.token, + .ExpectedPrimaryTypeExpr => |x| return x.token, + .ExpectedParamType => |x| return x.token, + .ExpectedExpr => |x| return x.token, + .ExpectedPrimaryExpr => |x| return x.token, + .ExpectedToken => |x| return x.token, + .ExpectedCommaOrEnd => |x| return x.token, + .ExpectedParamList => |x| return x.token, + .ExpectedPayload => |x| return x.token, + .ExpectedBlockOrAssignment => |x| return x.token, + .ExpectedBlockOrExpression => |x| return x.token, + .ExpectedExprOrAssignment => |x| return x.token, + .ExpectedPrefixExpr => |x| return x.token, + .ExpectedLoopExpr => |x| return x.token, + .ExpectedDerefOrUnwrap => |x| return x.token, + .ExpectedSuffixOp => |x| return x.token, + .ExpectedBlockOrField => |x| return x.token, + .DeclBetweenFields => |x| return x.token, + .InvalidAnd => |x| return x.token, + .AsteriskAfterPointerDereference => |x| return x.token, + } } - pub fn tokensOnSameLineLoc(self: *Tree, token1: Token.Loc, token2: Token.Loc) bool { - return mem.indexOfScalar(u8, self.source[token1.end..token2.start], '\n') == null; - } - - pub fn dump(self: *Tree) void { - self.root_node.base.dump(0); - } - - /// Skips over comments - pub fn prevToken(self: *Tree, token_index: TokenIndex) TokenIndex { + /// Skips over comments. + pub fn prevToken(self: *const Tree, token_index: TokenIndex) TokenIndex { + const token_tags = self.tokens.items(.tag); var index = token_index - 1; - while (self.token_ids[index] == Token.Id.LineComment) { + while (token_tags[index] == .LineComment) { index -= 1; } return index; } - /// Skips over comments - pub fn nextToken(self: *Tree, token_index: TokenIndex) TokenIndex { + /// Skips over comments. + pub fn nextToken(self: *const Tree, token_index: TokenIndex) TokenIndex { + const token_tags = self.tokens.items(.tag); var index = token_index + 1; - while (self.token_ids[index] == Token.Id.LineComment) { + while (token_tags[index] == .LineComment) { index += 1; } return index; @@ -173,114 +257,6 @@ pub const Error = union(enum) { InvalidAnd: InvalidAnd, AsteriskAfterPointerDereference: AsteriskAfterPointerDereference, - pub fn render(self: *const Error, tokens: []const Token.Id, stream: anytype) !void { - switch (self.*) { - .InvalidToken => |*x| return x.render(tokens, stream), - .ExpectedContainerMembers => |*x| return x.render(tokens, stream), - .ExpectedStringLiteral => |*x| return x.render(tokens, stream), - .ExpectedIntegerLiteral => |*x| return x.render(tokens, stream), - .ExpectedPubItem => |*x| return x.render(tokens, stream), - .ExpectedIdentifier => |*x| return x.render(tokens, stream), - .ExpectedStatement => |*x| return x.render(tokens, stream), - .ExpectedVarDeclOrFn => |*x| return x.render(tokens, stream), - .ExpectedVarDecl => |*x| return x.render(tokens, stream), - .ExpectedFn => |*x| return x.render(tokens, stream), - .ExpectedReturnType => |*x| return x.render(tokens, stream), - .ExpectedAggregateKw => |*x| return x.render(tokens, stream), - .UnattachedDocComment => |*x| return x.render(tokens, stream), - .ExpectedEqOrSemi => |*x| return x.render(tokens, stream), - .ExpectedSemiOrLBrace => |*x| return x.render(tokens, stream), - .ExpectedSemiOrElse => |*x| return x.render(tokens, stream), - .ExpectedLabelOrLBrace => |*x| return x.render(tokens, stream), - .ExpectedLBrace => |*x| return x.render(tokens, stream), - .ExpectedColonOrRParen => |*x| return x.render(tokens, stream), - .ExpectedLabelable => |*x| return x.render(tokens, stream), - .ExpectedInlinable => |*x| return x.render(tokens, stream), - .ExpectedAsmOutputReturnOrType => |*x| return x.render(tokens, stream), - .ExpectedCall => |*x| return x.render(tokens, stream), - .ExpectedCallOrFnProto => |*x| return x.render(tokens, stream), - .ExpectedSliceOrRBracket => |*x| return x.render(tokens, stream), - .ExtraAlignQualifier => |*x| return x.render(tokens, stream), - .ExtraConstQualifier => |*x| return x.render(tokens, stream), - .ExtraVolatileQualifier => |*x| return x.render(tokens, stream), - .ExtraAllowZeroQualifier => |*x| return x.render(tokens, stream), - .ExpectedTypeExpr => |*x| return x.render(tokens, stream), - .ExpectedPrimaryTypeExpr => |*x| return x.render(tokens, stream), - .ExpectedParamType => |*x| return x.render(tokens, stream), - .ExpectedExpr => |*x| return x.render(tokens, stream), - .ExpectedPrimaryExpr => |*x| return x.render(tokens, stream), - .ExpectedToken => |*x| return x.render(tokens, stream), - .ExpectedCommaOrEnd => |*x| return x.render(tokens, stream), - .ExpectedParamList => |*x| return x.render(tokens, stream), - .ExpectedPayload => |*x| return x.render(tokens, stream), - .ExpectedBlockOrAssignment => |*x| return x.render(tokens, stream), - .ExpectedBlockOrExpression => |*x| return x.render(tokens, stream), - .ExpectedExprOrAssignment => |*x| return x.render(tokens, stream), - .ExpectedPrefixExpr => |*x| return x.render(tokens, stream), - .ExpectedLoopExpr => |*x| return x.render(tokens, stream), - .ExpectedDerefOrUnwrap => |*x| return x.render(tokens, stream), - .ExpectedSuffixOp => |*x| return x.render(tokens, stream), - .ExpectedBlockOrField => |*x| return x.render(tokens, stream), - .DeclBetweenFields => |*x| return x.render(tokens, stream), - .InvalidAnd => |*x| return x.render(tokens, stream), - .AsteriskAfterPointerDereference => |*x| return x.render(tokens, stream), - } - } - - pub fn loc(self: *const Error) TokenIndex { - switch (self.*) { - .InvalidToken => |x| return x.token, - .ExpectedContainerMembers => |x| return x.token, - .ExpectedStringLiteral => |x| return x.token, - .ExpectedIntegerLiteral => |x| return x.token, - .ExpectedPubItem => |x| return x.token, - .ExpectedIdentifier => |x| return x.token, - .ExpectedStatement => |x| return x.token, - .ExpectedVarDeclOrFn => |x| return x.token, - .ExpectedVarDecl => |x| return x.token, - .ExpectedFn => |x| return x.token, - .ExpectedReturnType => |x| return x.token, - .ExpectedAggregateKw => |x| return x.token, - .UnattachedDocComment => |x| return x.token, - .ExpectedEqOrSemi => |x| return x.token, - .ExpectedSemiOrLBrace => |x| return x.token, - .ExpectedSemiOrElse => |x| return x.token, - .ExpectedLabelOrLBrace => |x| return x.token, - .ExpectedLBrace => |x| return x.token, - .ExpectedColonOrRParen => |x| return x.token, - .ExpectedLabelable => |x| return x.token, - .ExpectedInlinable => |x| return x.token, - .ExpectedAsmOutputReturnOrType => |x| return x.token, - .ExpectedCall => |x| return x.node.firstToken(), - .ExpectedCallOrFnProto => |x| return x.node.firstToken(), - .ExpectedSliceOrRBracket => |x| return x.token, - .ExtraAlignQualifier => |x| return x.token, - .ExtraConstQualifier => |x| return x.token, - .ExtraVolatileQualifier => |x| return x.token, - .ExtraAllowZeroQualifier => |x| return x.token, - .ExpectedTypeExpr => |x| return x.token, - .ExpectedPrimaryTypeExpr => |x| return x.token, - .ExpectedParamType => |x| return x.token, - .ExpectedExpr => |x| return x.token, - .ExpectedPrimaryExpr => |x| return x.token, - .ExpectedToken => |x| return x.token, - .ExpectedCommaOrEnd => |x| return x.token, - .ExpectedParamList => |x| return x.token, - .ExpectedPayload => |x| return x.token, - .ExpectedBlockOrAssignment => |x| return x.token, - .ExpectedBlockOrExpression => |x| return x.token, - .ExpectedExprOrAssignment => |x| return x.token, - .ExpectedPrefixExpr => |x| return x.token, - .ExpectedLoopExpr => |x| return x.token, - .ExpectedDerefOrUnwrap => |x| return x.token, - .ExpectedSuffixOp => |x| return x.token, - .ExpectedBlockOrField => |x| return x.token, - .DeclBetweenFields => |x| return x.token, - .InvalidAnd => |x| return x.token, - .AsteriskAfterPointerDereference => |x| return x.token, - } - } - pub const InvalidToken = SingleTokenError("Invalid token '{s}'"); pub const ExpectedContainerMembers = SingleTokenError("Expected test, comptime, var decl, or container field, found '{s}'"); pub const ExpectedStringLiteral = SingleTokenError("Expected string literal, found '{s}'"); @@ -291,7 +267,7 @@ pub const Error = union(enum) { pub const ExpectedVarDecl = SingleTokenError("Expected variable declaration, found '{s}'"); pub const ExpectedFn = SingleTokenError("Expected function, found '{s}'"); pub const ExpectedReturnType = SingleTokenError("Expected 'var' or return type expression, found '{s}'"); - pub const ExpectedAggregateKw = SingleTokenError("Expected '" ++ Token.Id.Keyword_struct.symbol() ++ "', '" ++ Token.Id.Keyword_union.symbol() ++ "', '" ++ Token.Id.Keyword_enum.symbol() ++ "', or '" ++ Token.Id.Keyword_opaque.symbol() ++ "', found '{s}'"); + pub const ExpectedAggregateKw = SingleTokenError("Expected '" ++ Token.Tag.Keyword_struct.symbol() ++ "', '" ++ Token.Tag.Keyword_union.symbol() ++ "', '" ++ Token.Tag.Keyword_enum.symbol() ++ "', or '" ++ Token.Tag.Keyword_opaque.symbol() ++ "', found '{s}'"); pub const ExpectedEqOrSemi = SingleTokenError("Expected '=' or ';', found '{s}'"); pub const ExpectedSemiOrLBrace = SingleTokenError("Expected ';' or '{{', found '{s}'"); pub const ExpectedSemiOrElse = SingleTokenError("Expected ';' or 'else', found '{s}'"); @@ -300,7 +276,7 @@ pub const Error = union(enum) { pub const ExpectedColonOrRParen = SingleTokenError("Expected ':' or ')', found '{s}'"); pub const ExpectedLabelable = SingleTokenError("Expected 'while', 'for', 'inline', 'suspend', or '{{', found '{s}'"); pub const ExpectedInlinable = SingleTokenError("Expected 'while' or 'for', found '{s}'"); - pub const ExpectedAsmOutputReturnOrType = SingleTokenError("Expected '->' or '" ++ Token.Id.Identifier.symbol() ++ "', found '{s}'"); + pub const ExpectedAsmOutputReturnOrType = SingleTokenError("Expected '->' or '" ++ Token.Tag.Identifier.symbol() ++ "', found '{s}'"); pub const ExpectedSliceOrRBracket = SingleTokenError("Expected ']' or '..', found '{s}'"); pub const ExpectedTypeExpr = SingleTokenError("Expected type expression, found '{s}'"); pub const ExpectedPrimaryTypeExpr = SingleTokenError("Expected primary type expression, found '{s}'"); @@ -329,29 +305,31 @@ pub const Error = union(enum) { pub const AsteriskAfterPointerDereference = SimpleError("`.*` can't be followed by `*`. Are you missing a space?"); pub const ExpectedCall = struct { - node: *Node, + node: Node.Index, - pub fn render(self: *const ExpectedCall, tokens: []const Token.Id, stream: anytype) !void { + pub fn render(self: ExpectedCall, tree: Tree, stream: anytype) !void { + const node_tag = tree.nodes.items(.tag)[self.node]; return stream.print("expected " ++ @tagName(Node.Tag.Call) ++ ", found {s}", .{ - @tagName(self.node.tag), + @tagName(node_tag), }); } }; pub const ExpectedCallOrFnProto = struct { - node: *Node, + node: Node.Index, - pub fn render(self: *const ExpectedCallOrFnProto, tokens: []const Token.Id, stream: anytype) !void { + pub fn render(self: ExpectedCallOrFnProto, tree: Tree, stream: anytype) !void { + const node_tag = tree.nodes.items(.tag)[self.node]; return stream.print("expected " ++ @tagName(Node.Tag.Call) ++ " or " ++ - @tagName(Node.Tag.FnProto) ++ ", found {s}", .{@tagName(self.node.tag)}); + @tagName(Node.Tag.FnProto) ++ ", found {s}", .{@tagName(node_tag)}); } }; pub const ExpectedToken = struct { token: TokenIndex, - expected_id: Token.Id, + expected_id: Token.Tag, - pub fn render(self: *const ExpectedToken, tokens: []const Token.Id, stream: anytype) !void { + pub fn render(self: *const ExpectedToken, tokens: []const Token.Tag, stream: anytype) !void { const found_token = tokens[self.token]; switch (found_token) { .Invalid => { @@ -367,9 +345,9 @@ pub const Error = union(enum) { pub const ExpectedCommaOrEnd = struct { token: TokenIndex, - end_id: Token.Id, + end_id: Token.Tag, - pub fn render(self: *const ExpectedCommaOrEnd, tokens: []const Token.Id, stream: anytype) !void { + pub fn render(self: *const ExpectedCommaOrEnd, tokens: []const Token.Tag, stream: anytype) !void { const actual_token = tokens[self.token]; return stream.print("expected ',' or '{s}', found '{s}'", .{ self.end_id.symbol(), @@ -384,7 +362,7 @@ pub const Error = union(enum) { token: TokenIndex, - pub fn render(self: *const ThisError, tokens: []const Token.Id, stream: anytype) !void { + pub fn render(self: *const ThisError, tokens: []const Token.Tag, stream: anytype) !void { const actual_token = tokens[self.token]; return stream.print(msg, .{actual_token.symbol()}); } @@ -397,2886 +375,466 @@ pub const Error = union(enum) { token: TokenIndex, - pub fn render(self: *const ThisError, tokens: []const Token.Id, stream: anytype) !void { + pub fn render(self: *const ThisError, tokens: []const Token.Tag, stream: anytype) !void { return stream.writeAll(msg); } }; } + + pub fn loc(self: Error) TokenIndex { + switch (self) { + .InvalidToken => |x| return x.token, + .ExpectedContainerMembers => |x| return x.token, + .ExpectedStringLiteral => |x| return x.token, + .ExpectedIntegerLiteral => |x| return x.token, + .ExpectedPubItem => |x| return x.token, + .ExpectedIdentifier => |x| return x.token, + .ExpectedStatement => |x| return x.token, + .ExpectedVarDeclOrFn => |x| return x.token, + .ExpectedVarDecl => |x| return x.token, + .ExpectedFn => |x| return x.token, + .ExpectedReturnType => |x| return x.token, + .ExpectedAggregateKw => |x| return x.token, + .UnattachedDocComment => |x| return x.token, + .ExpectedEqOrSemi => |x| return x.token, + .ExpectedSemiOrLBrace => |x| return x.token, + .ExpectedSemiOrElse => |x| return x.token, + .ExpectedLabelOrLBrace => |x| return x.token, + .ExpectedLBrace => |x| return x.token, + .ExpectedColonOrRParen => |x| return x.token, + .ExpectedLabelable => |x| return x.token, + .ExpectedInlinable => |x| return x.token, + .ExpectedAsmOutputReturnOrType => |x| return x.token, + .ExpectedCall => |x| @panic("TODO redo ast errors"), + .ExpectedCallOrFnProto => |x| @panic("TODO redo ast errors"), + .ExpectedSliceOrRBracket => |x| return x.token, + .ExtraAlignQualifier => |x| return x.token, + .ExtraConstQualifier => |x| return x.token, + .ExtraVolatileQualifier => |x| return x.token, + .ExtraAllowZeroQualifier => |x| return x.token, + .ExpectedTypeExpr => |x| return x.token, + .ExpectedPrimaryTypeExpr => |x| return x.token, + .ExpectedParamType => |x| return x.token, + .ExpectedExpr => |x| return x.token, + .ExpectedPrimaryExpr => |x| return x.token, + .ExpectedToken => |x| return x.token, + .ExpectedCommaOrEnd => |x| return x.token, + .ExpectedParamList => |x| return x.token, + .ExpectedPayload => |x| return x.token, + .ExpectedBlockOrAssignment => |x| return x.token, + .ExpectedBlockOrExpression => |x| return x.token, + .ExpectedExprOrAssignment => |x| return x.token, + .ExpectedPrefixExpr => |x| return x.token, + .ExpectedLoopExpr => |x| return x.token, + .ExpectedDerefOrUnwrap => |x| return x.token, + .ExpectedSuffixOp => |x| return x.token, + .ExpectedBlockOrField => |x| return x.token, + .DeclBetweenFields => |x| return x.token, + .InvalidAnd => |x| return x.token, + .AsteriskAfterPointerDereference => |x| return x.token, + } + } }; pub const Node = struct { - tag: Tag, + index: Index, + + pub const Index = u32; + + comptime { + // Goal is to keep this under one byte for efficiency. + assert(@sizeOf(Tag) == 1); + } pub const Tag = enum { - // Top level + /// sub_list[lhs...rhs] Root, - Use, + /// lhs is the sub-expression. rhs is unused. + UsingNamespace, + /// lhs is test name token (must be string literal), if any. + /// rhs is the body node. TestDecl, - - // Statements - VarDecl, + /// lhs is the index into global_var_decl_list. + /// rhs is the initialization expression, if any. + GlobalVarDecl, + /// `var a: x align(y) = rhs` + /// lhs is the index into local_var_decl_list. + LocalVarDecl, + /// `var a: lhs = rhs`. lhs and rhs may be unused. + /// Can be local or global. + SimpleVarDecl, + /// `var a align(lhs) = rhs`. lhs and rhs may be unused. + /// Can be local or global. + AlignedVarDecl, + /// lhs is the identifier token payload if any, + /// rhs is the deferred expression. + ErrDefer, + /// lhs is unused. + /// rhs is the deferred expression. Defer, - - // Infix operators + /// lhs is target expr; rhs is fallback expr. + /// payload is determined by looking at the prev tokens before rhs. Catch, - - // SimpleInfixOp - Add, - AddWrap, - ArrayCat, - ArrayMult, - Assign, - AssignBitAnd, - AssignBitOr, - AssignBitShiftLeft, - AssignBitShiftRight, - AssignBitXor, - AssignDiv, - AssignSub, - AssignSubWrap, - AssignMod, - AssignAdd, - AssignAddWrap, - AssignMul, - AssignMulWrap, - BangEqual, - BitAnd, - BitOr, - BitShiftLeft, - BitShiftRight, - BitXor, - BoolAnd, - BoolOr, - Div, - EqualEqual, - ErrorUnion, - GreaterOrEqual, - GreaterThan, - LessOrEqual, - LessThan, - MergeErrorSets, - Mod, - Mul, - MulWrap, - Period, - Range, - Sub, - SubWrap, - OrElse, - - // SimplePrefixOp - AddressOf, - Await, - BitNot, - BoolNot, - OptionalType, - Negation, - NegationWrap, - Resume, - Try, - - ArrayType, - /// ArrayType but has a sentinel node. - ArrayTypeSentinel, - PtrType, - SliceType, - /// `a[b..c]` - Slice, - /// `a.*` - Deref, - /// `a.?` + /// `lhs.a`. main_token is the dot. rhs is the identifier token index. + FieldAccess, + /// `lhs.?`. main_token is the dot. rhs is the `?` token index. UnwrapOptional, - /// `a[b]` + /// `lhs == rhs`. main_token is op. + EqualEqual, + /// `lhs != rhs`. main_token is op. + BangEqual, + /// `lhs < rhs`. main_token is op. + LessThan, + /// `lhs > rhs`. main_token is op. + GreaterThan, + /// `lhs <= rhs`. main_token is op. + LessOrEqual, + /// `lhs >= rhs`. main_token is op. + GreaterOrEqual, + /// `lhs *= rhs`. main_token is op. + AssignMul, + /// `lhs /= rhs`. main_token is op. + AssignDiv, + /// `lhs *= rhs`. main_token is op. + AssignMod, + /// `lhs += rhs`. main_token is op. + AssignAdd, + /// `lhs -= rhs`. main_token is op. + AssignSub, + /// `lhs <<= rhs`. main_token is op. + AssignBitShiftLeft, + /// `lhs >>= rhs`. main_token is op. + AssignBitShiftRight, + /// `lhs &= rhs`. main_token is op. + AssignBitAnd, + /// `lhs ^= rhs`. main_token is op. + AssignBitXor, + /// `lhs |= rhs`. main_token is op. + AssignBitOr, + /// `lhs *%= rhs`. main_token is op. + AssignMulWrap, + /// `lhs +%= rhs`. main_token is op. + AssignAddWrap, + /// `lhs -%= rhs`. main_token is op. + AssignSubWrap, + /// `lhs = rhs`. main_token is op. + Assign, + /// `lhs || rhs`. main_token is the `||`. + MergeErrorSets, + /// `lhs * rhs`. main_token is the `*`. + Mul, + /// `lhs / rhs`. main_token is the `/`. + Div, + /// `lhs % rhs`. main_token is the `%`. + Mod, + /// `lhs ** rhs`. main_token is the `**`. + ArrayMult, + /// `lhs *% rhs`. main_token is the `*%`. + MulWrap, + /// `lhs + rhs`. main_token is the `+`. + Add, + /// `lhs - rhs`. main_token is the `-`. + Sub, + /// `lhs ++ rhs`. main_token is the `++`. + ArrayCat, + /// `lhs +% rhs`. main_token is the `+%`. + AddWrap, + /// `lhs -% rhs`. main_token is the `-%`. + SubWrap, + /// `lhs << rhs`. main_token is the `<<`. + BitShiftLeft, + /// `lhs >> rhs`. main_token is the `>>`. + BitShiftRight, + /// `lhs & rhs`. main_token is the `&`. + BitAnd, + /// `lhs ^ rhs`. main_token is the `^`. + BitXor, + /// `lhs | rhs`. main_token is the `|`. + BitOr, + /// `lhs orelse rhs`. main_token is the `orelse`. + OrElse, + /// `lhs and rhs`. main_token is the `and`. + BoolAnd, + /// `lhs or rhs`. main_token is the `or`. + BoolOr, + /// `op lhs`. rhs unused. main_token is op. + BoolNot, + /// `op lhs`. rhs unused. main_token is op. + Negation, + /// `op lhs`. rhs unused. main_token is op. + BitNot, + /// `op lhs`. rhs unused. main_token is op. + NegationWrap, + /// `op lhs`. rhs unused. main_token is op. + AddressOf, + /// `op lhs`. rhs unused. main_token is op. + Try, + /// `op lhs`. rhs unused. main_token is op. + Await, + /// `?lhs`. rhs unused. main_token is the `?`. + OptionalType, + /// `[lhs]rhs`. lhs can be omitted to make it a slice. + ArrayType, + /// `[lhs:a]b`. `ArrayTypeSentinel[rhs]`. + ArrayTypeSentinel, + /// `[*]align(lhs) rhs`. lhs can be omitted. + /// `*align(lhs) rhs`. lhs can be omitted. + /// `[]rhs`. + PtrTypeAligned, + /// `[*:lhs]rhs`. lhs can be omitted. + /// `*rhs`. + /// `[:lhs]rhs`. + PtrTypeSentinel, + /// lhs is index into PtrType. rhs is the element type expression. + PtrType, + /// lhs is index into SliceType. rhs is the element type expression. + /// Can be pointer or slice, depending on main_token. + SliceType, + /// `lhs[rhs..]` + /// main_token is the `[`. + SliceOpen, + /// `lhs[b..c :d]`. `slice_list[rhs]`. + /// main_token is the `[`. + Slice, + /// `lhs.*`. rhs is unused. + Deref, + /// `lhs[rhs]`. ArrayAccess, - /// `T{a, b}` - ArrayInitializer, - /// ArrayInitializer but with `.` instead of a left-hand-side operand. - ArrayInitializerDot, - /// `T{.a = b}` - StructInitializer, - /// StructInitializer but with `.` instead of a left-hand-side operand. - StructInitializerDot, - /// `foo()` + /// `lhs{rhs}`. rhs can be omitted. + ArrayInitOne, + /// `.{lhs, rhs}`. lhs and rhs can be omitted. + ArrayInitDotTwo, + /// `.{a, b}`. `sub_list[lhs..rhs]`. + ArrayInitDot, + /// `lhs{a, b}`. `sub_range_list[rhs]`. lhs can be omitted which means `.{a, b}`. + ArrayInit, + /// `lhs{.a = rhs}`. rhs can be omitted making it empty. + StructInitOne, + /// `.{.a = lhs, .b = rhs}`. lhs and rhs can be omitted. + StructInitDotTwo, + /// `.{.a = b, .c = d}`. `sub_list[lhs..rhs]`. + StructInitDot, + /// `lhs{.a = b, .c = d}`. `sub_range_list[rhs]`. + /// lhs can be omitted which means `.{.a = b, .c = d}`. + StructInit, + /// `lhs(rhs)`. rhs can be omitted. + CallOne, + /// `lhs(a, b, c)`. `sub_range_list[rhs]`. + /// main_token is the `(`. Call, - - // Control flow + /// `switch(lhs) {}`. `sub_range_list[rhs]`. Switch, + /// `lhs => rhs`. If lhs is omitted it means `else`. + /// main_token is the `=>` + SwitchCaseOne, + /// `a, b, c => rhs`. `sub_range_list[lhs]`. + SwitchCaseMulti, + /// `lhs...rhs`. + SwitchRange, + /// `while (lhs) rhs`. + WhileSimple, + /// `while (lhs) |x| rhs`. + WhileSimpleOptional, + /// `while (lhs) : (a) b`. `WhileCont[rhs]`. + WhileCont, + /// `while (lhs) : (a) b`. `WhileCont[rhs]`. + WhileContOptional, + /// `while (lhs) : (a) b else c`. `While[rhs]`. While, + /// `while (lhs) |x| : (a) b else c`. `While[rhs]`. + WhileOptional, + /// `while (lhs) |x| : (a) b else |y| c`. `While[rhs]`. + WhileError, + /// `for (lhs) rhs`. + ForSimple, + /// `for (lhs) a else b`. `if_list[rhs]`. For, + /// `if (lhs) rhs`. + IfSimple, + /// `if (lhs) |a| rhs`. + IfSimpleOptional, + /// `if (lhs) a else b`. `if_list[rhs]`. If, + /// `if (lhs) |x| a else b`. `if_list[rhs]`. + IfOptional, + /// `if (lhs) |x| a else |y| b`. `if_list[rhs]`. + IfError, + /// `suspend lhs`. lhs can be omitted. rhs is unused. Suspend, + /// `resume lhs`. rhs is unused. + Resume, + /// `continue`. lhs is token index of label if any. rhs is unused. Continue, + /// `break rhs`. rhs can be omitted. lhs is label token index, if any. Break, + /// `return lhs`. lhs can be omitted. rhs is unused. Return, - - // Type expressions - AnyType, - ErrorType, + /// `fn(a: lhs) rhs`. lhs can be omitted. + /// anytype and ... parameters are omitted from the AST tree. + FnProtoSimple, + /// `fn(a: b, c: d) rhs`. `sub_range_list[lhs]`. + /// anytype and ... parameters are omitted from the AST tree. + FnProtoSimpleMulti, + /// `fn(a: b) rhs linksection(e) callconv(f)`. lhs is index into extra_data. + /// zero or one parameters. + /// anytype and ... parameters are omitted from the AST tree. + FnProtoOne, + /// `fn(a: b, c: d) rhs linksection(e) callconv(f)`. `fn_proto_list[lhs]`. + /// anytype and ... parameters are omitted from the AST tree. FnProto, + /// lhs is the FnProto, rhs is the function body block. + FnDecl, + /// `anyframe->rhs`. main_token is `anyframe`. `lhs` is arrow token index. AnyFrameType, - - // Primary expressions - IntegerLiteral, - FloatLiteral, - EnumLiteral, - StringLiteral, - MultilineStringLiteral, - CharLiteral, - BoolLiteral, - NullLiteral, - UndefinedLiteral, - Unreachable, + /// Could be integer literal, float literal, char literal, bool literal, + /// null literal, undefined literal, unreachable, depending on the token. + /// Both lhs and rhs unused. + OneToken, + /// Both lhs and rhs unused. + /// Most identifiers will not have explicit AST nodes, however for expressions + /// which could be one of many different kinds of AST nodes, there will be an + /// Identifier AST node for it. Identifier, + /// lhs is the dot token index, rhs unused, main_token is the identifier. + EnumLiteral, + /// Both lhs and rhs unused. + MultilineStringLiteral, + /// `(lhs)`. main_token is the `(`; rhs is the token index of the `)`. GroupedExpression, + /// `@a(lhs, rhs)`. lhs and rhs may be omitted. + BuiltinCallTwo, + /// `@a(b, c)`. `sub_list[lhs..rhs]`. BuiltinCall, + /// `error{a, b}`. + /// lhs and rhs both unused. ErrorSetDecl, + /// `struct {}`, `union {}`, etc. `sub_list[lhs..rhs]`. ContainerDecl, - Asm, - Comptime, - Nosuspend, - Block, - LabeledBlock, - - // Misc - DocComment, - SwitchCase, // TODO make this not a child of AST Node - SwitchElse, // TODO make this not a child of AST Node - Else, // TODO make this not a child of AST Node - Payload, // TODO make this not a child of AST Node - PointerPayload, // TODO make this not a child of AST Node - PointerIndexPayload, // TODO make this not a child of AST Node + /// `union(lhs)` / `enum(lhs)`. `sub_range_list[rhs]`. + ContainerDeclArg, + /// `union(enum) {}`. `sub_list[lhs..rhs]`. + /// Note that tagged unions with explicitly provided enums are represented + /// by `ContainerDeclArg`. + TaggedUnion, + /// `union(enum(lhs)) {}`. `sub_list_range[rhs]`. + TaggedUnionEnumTag, + /// `a: lhs = rhs,`. lhs and rhs can be omitted. + ContainerFieldInit, + /// `a: lhs align(rhs),`. rhs can be omitted. + ContainerFieldAlign, + /// `a: lhs align(c) = d,`. `container_field_list[rhs]`. ContainerField, - ErrorTag, // TODO make this not a child of AST Node - FieldInitializer, // TODO make this not a child of AST Node - - pub fn Type(tag: Tag) type { - return switch (tag) { - .Root => Root, - .Use => Use, - .TestDecl => TestDecl, - .VarDecl => VarDecl, - .Defer => Defer, - .Catch => Catch, - - .Add, - .AddWrap, - .ArrayCat, - .ArrayMult, - .Assign, - .AssignBitAnd, - .AssignBitOr, - .AssignBitShiftLeft, - .AssignBitShiftRight, - .AssignBitXor, - .AssignDiv, - .AssignSub, - .AssignSubWrap, - .AssignMod, - .AssignAdd, - .AssignAddWrap, - .AssignMul, - .AssignMulWrap, - .BangEqual, - .BitAnd, - .BitOr, - .BitShiftLeft, - .BitShiftRight, - .BitXor, - .BoolAnd, - .BoolOr, - .Div, - .EqualEqual, - .ErrorUnion, - .GreaterOrEqual, - .GreaterThan, - .LessOrEqual, - .LessThan, - .MergeErrorSets, - .Mod, - .Mul, - .MulWrap, - .Period, - .Range, - .Sub, - .SubWrap, - .OrElse, - => SimpleInfixOp, - - .AddressOf, - .Await, - .BitNot, - .BoolNot, - .OptionalType, - .Negation, - .NegationWrap, - .Resume, - .Try, - => SimplePrefixOp, - - .Identifier, - .BoolLiteral, - .NullLiteral, - .UndefinedLiteral, - .Unreachable, - .AnyType, - .ErrorType, - .IntegerLiteral, - .FloatLiteral, - .StringLiteral, - .CharLiteral, - => OneToken, - - .Continue, - .Break, - .Return, - => ControlFlowExpression, - - .ArrayType => ArrayType, - .ArrayTypeSentinel => ArrayTypeSentinel, - - .PtrType => PtrType, - .SliceType => SliceType, - .Slice => Slice, - .Deref, .UnwrapOptional => SimpleSuffixOp, - .ArrayAccess => ArrayAccess, - - .ArrayInitializer => ArrayInitializer, - .ArrayInitializerDot => ArrayInitializerDot, - - .StructInitializer => StructInitializer, - .StructInitializerDot => StructInitializerDot, - - .Call => Call, - .Switch => Switch, - .While => While, - .For => For, - .If => If, - .Suspend => Suspend, - .FnProto => FnProto, - .AnyFrameType => AnyFrameType, - .EnumLiteral => EnumLiteral, - .MultilineStringLiteral => MultilineStringLiteral, - .GroupedExpression => GroupedExpression, - .BuiltinCall => BuiltinCall, - .ErrorSetDecl => ErrorSetDecl, - .ContainerDecl => ContainerDecl, - .Asm => Asm, - .Comptime => Comptime, - .Nosuspend => Nosuspend, - .Block => Block, - .LabeledBlock => LabeledBlock, - .DocComment => DocComment, - .SwitchCase => SwitchCase, - .SwitchElse => SwitchElse, - .Else => Else, - .Payload => Payload, - .PointerPayload => PointerPayload, - .PointerIndexPayload => PointerIndexPayload, - .ContainerField => ContainerField, - .ErrorTag => ErrorTag, - .FieldInitializer => FieldInitializer, - }; - } - - pub fn isBlock(tag: Tag) bool { - return switch (tag) { - .Block, .LabeledBlock => true, - else => false, - }; - } + /// `anytype`. both lhs and rhs unused. + /// Used by `ContainerField`. + AnyType, + /// `comptime lhs`. rhs unused. + Comptime, + /// `nosuspend lhs`. rhs unused. + Nosuspend, + /// `{}`. `sub_list[lhs..rhs]`. + Block, + /// `asm(lhs)`. rhs unused. + AsmSimple, + /// `asm(lhs, a)`. `sub_range_list[rhs]`. + Asm, + /// `[a] "b" (c)`. lhs is string literal token index, rhs is 0. + /// `[a] "b" (-> rhs)`. lhs is the string literal token index, rhs is type expr. + /// main_token is `a`. + AsmOutput, + /// `[a] "b" (rhs)`. lhs is string literal token index. + /// main_token is `a`. + AsmInput, + /// `error.a`. lhs is token index of `.`. rhs is token index of `a`. + ErrorValue, + /// `lhs!rhs`. main_token is the `!`. + ErrorUnion, }; - /// Prefer `castTag` to this. - pub fn cast(base: *Node, comptime T: type) ?*T { - if (std.meta.fieldInfo(T, .base).default_value) |default_base| { - return base.castTag(default_base.tag); - } - inline for (@typeInfo(Tag).Enum.fields) |field| { - const tag = @intToEnum(Tag, field.value); - if (base.tag == tag) { - if (T == tag.Type()) { - return @fieldParentPtr(T, "base", base); - } - return null; - } - } - unreachable; - } - - pub fn castTag(base: *Node, comptime tag: Tag) ?*tag.Type() { - if (base.tag == tag) { - return @fieldParentPtr(tag.Type(), "base", base); - } - return null; - } - - pub fn iterate(base: *Node, index: usize) ?*Node { - inline for (@typeInfo(Tag).Enum.fields) |field| { - const tag = @intToEnum(Tag, field.value); - if (base.tag == tag) { - return @fieldParentPtr(tag.Type(), "base", base).iterate(index); - } - } - unreachable; - } - - pub fn firstToken(base: *const Node) TokenIndex { - inline for (@typeInfo(Tag).Enum.fields) |field| { - const tag = @intToEnum(Tag, field.value); - if (base.tag == tag) { - return @fieldParentPtr(tag.Type(), "base", base).firstToken(); - } - } - unreachable; - } - - pub fn lastToken(base: *const Node) TokenIndex { - inline for (@typeInfo(Tag).Enum.fields) |field| { - const tag = @intToEnum(Tag, field.value); - if (base.tag == tag) { - return @fieldParentPtr(tag.Type(), "base", base).lastToken(); - } - } - unreachable; - } - - pub fn requireSemiColon(base: *const Node) bool { - var n = base; - while (true) { - switch (n.tag) { - .Root, - .ContainerField, - .Block, - .LabeledBlock, - .Payload, - .PointerPayload, - .PointerIndexPayload, - .Switch, - .SwitchCase, - .SwitchElse, - .FieldInitializer, - .DocComment, - .TestDecl, - => return false, - - .While => { - const while_node = @fieldParentPtr(While, "base", n); - if (while_node.@"else") |@"else"| { - n = &@"else".base; - continue; - } - - return !while_node.body.tag.isBlock(); - }, - .For => { - const for_node = @fieldParentPtr(For, "base", n); - if (for_node.@"else") |@"else"| { - n = &@"else".base; - continue; - } - - return !for_node.body.tag.isBlock(); - }, - .If => { - const if_node = @fieldParentPtr(If, "base", n); - if (if_node.@"else") |@"else"| { - n = &@"else".base; - continue; - } - - return !if_node.body.tag.isBlock(); - }, - .Else => { - const else_node = @fieldParentPtr(Else, "base", n); - n = else_node.body; - continue; - }, - .Defer => { - const defer_node = @fieldParentPtr(Defer, "base", n); - return !defer_node.expr.tag.isBlock(); - }, - .Comptime => { - const comptime_node = @fieldParentPtr(Comptime, "base", n); - return !comptime_node.expr.tag.isBlock(); - }, - .Suspend => { - const suspend_node = @fieldParentPtr(Suspend, "base", n); - if (suspend_node.body) |body| { - return !body.tag.isBlock(); - } - - return true; - }, - .Nosuspend => { - const nosuspend_node = @fieldParentPtr(Nosuspend, "base", n); - return !nosuspend_node.expr.tag.isBlock(); - }, - else => return true, - } - } - } - - /// Asserts the node is a Block or LabeledBlock and returns the statements slice. - pub fn blockStatements(base: *Node) []*Node { - if (base.castTag(.Block)) |block| { - return block.statements(); - } else if (base.castTag(.LabeledBlock)) |labeled_block| { - return labeled_block.statements(); - } else { - unreachable; - } - } - - pub fn findFirstWithId(self: *Node, id: Id) ?*Node { - if (self.id == id) return self; - var child_i: usize = 0; - while (self.iterate(child_i)) |child| : (child_i += 1) { - if (child.findFirstWithId(id)) |result| return result; - } - return null; - } - - pub fn dump(self: *Node, indent: usize) void { - { - var i: usize = 0; - while (i < indent) : (i += 1) { - std.debug.warn(" ", .{}); - } - } - std.debug.warn("{s}\n", .{@tagName(self.tag)}); - - var child_i: usize = 0; - while (self.iterate(child_i)) |child| : (child_i += 1) { - child.dump(indent + 2); - } - } - - /// The decls data follows this struct in memory as an array of Node pointers. - pub const Root = struct { - base: Node = Node{ .tag = .Root }, - eof_token: TokenIndex, - decls_len: NodeIndex, - - /// After this the caller must initialize the decls list. - pub fn create(allocator: *mem.Allocator, decls_len: NodeIndex, eof_token: TokenIndex) !*Root { - const bytes = try allocator.alignedAlloc(u8, @alignOf(Root), sizeInBytes(decls_len)); - const self = @ptrCast(*Root, bytes.ptr); - self.* = .{ - .eof_token = eof_token, - .decls_len = decls_len, - }; - return self; - } - - pub fn destroy(self: *Decl, allocator: *mem.Allocator) void { - const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.decls_len)]; - allocator.free(bytes); - } - - pub fn iterate(self: *const Root, index: usize) ?*Node { - var i = index; - - if (i < self.decls_len) return self.declsConst()[i]; - return null; - } - - pub fn decls(self: *Root) []*Node { - const decls_start = @ptrCast([*]u8, self) + @sizeOf(Root); - return @ptrCast([*]*Node, decls_start)[0..self.decls_len]; - } - - pub fn declsConst(self: *const Root) []const *Node { - const decls_start = @ptrCast([*]const u8, self) + @sizeOf(Root); - return @ptrCast([*]const *Node, decls_start)[0..self.decls_len]; - } - - pub fn firstToken(self: *const Root) TokenIndex { - if (self.decls_len == 0) return self.eof_token; - return self.declsConst()[0].firstToken(); - } - - pub fn lastToken(self: *const Root) TokenIndex { - if (self.decls_len == 0) return self.eof_token; - return self.declsConst()[self.decls_len - 1].lastToken(); - } - - fn sizeInBytes(decls_len: NodeIndex) usize { - return @sizeOf(Root) + @sizeOf(*Node) * @as(usize, decls_len); - } + pub const Data = struct { + lhs: Index, + rhs: Index, }; - /// Trailed in memory by possibly many things, with each optional thing - /// determined by a bit in `trailer_flags`. - pub const VarDecl = struct { - base: Node = Node{ .tag = .VarDecl }, - trailer_flags: TrailerFlags, - mut_token: TokenIndex, - name_token: TokenIndex, - semicolon_token: TokenIndex, - - pub const TrailerFlags = std.meta.TrailerFlags(struct { - doc_comments: *DocComment, - visib_token: TokenIndex, - thread_local_token: TokenIndex, - eq_token: TokenIndex, - comptime_token: TokenIndex, - extern_export_token: TokenIndex, - lib_name: *Node, - type_node: *Node, - align_node: *Node, - section_node: *Node, - init_node: *Node, - }); - - pub fn getDocComments(self: *const VarDecl) ?*DocComment { - return self.getTrailer(.doc_comments); - } - - pub fn setDocComments(self: *VarDecl, value: *DocComment) void { - self.setTrailer(.doc_comments, value); - } - - pub fn getVisibToken(self: *const VarDecl) ?TokenIndex { - return self.getTrailer(.visib_token); - } - - pub fn setVisibToken(self: *VarDecl, value: TokenIndex) void { - self.setTrailer(.visib_token, value); - } - - pub fn getThreadLocalToken(self: *const VarDecl) ?TokenIndex { - return self.getTrailer(.thread_local_token); - } - - pub fn setThreadLocalToken(self: *VarDecl, value: TokenIndex) void { - self.setTrailer(.thread_local_token, value); - } - - pub fn getEqToken(self: *const VarDecl) ?TokenIndex { - return self.getTrailer(.eq_token); - } - - pub fn setEqToken(self: *VarDecl, value: TokenIndex) void { - self.setTrailer(.eq_token, value); - } - - pub fn getComptimeToken(self: *const VarDecl) ?TokenIndex { - return self.getTrailer(.comptime_token); - } - - pub fn setComptimeToken(self: *VarDecl, value: TokenIndex) void { - self.setTrailer(.comptime_token, value); - } - - pub fn getExternExportToken(self: *const VarDecl) ?TokenIndex { - return self.getTrailer(.extern_export_token); - } - - pub fn setExternExportToken(self: *VarDecl, value: TokenIndex) void { - self.setTrailer(.extern_export_token, value); - } - - pub fn getLibName(self: *const VarDecl) ?*Node { - return self.getTrailer(.lib_name); - } - - pub fn setLibName(self: *VarDecl, value: *Node) void { - self.setTrailer(.lib_name, value); - } - - pub fn getTypeNode(self: *const VarDecl) ?*Node { - return self.getTrailer(.type_node); - } - - pub fn setTypeNode(self: *VarDecl, value: *Node) void { - self.setTrailer(.type_node, value); - } - - pub fn getAlignNode(self: *const VarDecl) ?*Node { - return self.getTrailer(.align_node); - } - - pub fn setAlignNode(self: *VarDecl, value: *Node) void { - self.setTrailer(.align_node, value); - } - - pub fn getSectionNode(self: *const VarDecl) ?*Node { - return self.getTrailer(.section_node); - } - - pub fn setSectionNode(self: *VarDecl, value: *Node) void { - self.setTrailer(.section_node, value); - } - - pub fn getInitNode(self: *const VarDecl) ?*Node { - return self.getTrailer(.init_node); - } - - pub fn setInitNode(self: *VarDecl, value: *Node) void { - self.setTrailer(.init_node, value); - } - - pub const RequiredFields = struct { - mut_token: TokenIndex, - name_token: TokenIndex, - semicolon_token: TokenIndex, - }; - - fn getTrailer(self: *const VarDecl, comptime field: TrailerFlags.FieldEnum) ?TrailerFlags.Field(field) { - const trailers_start = @ptrCast([*]const u8, self) + @sizeOf(VarDecl); - return self.trailer_flags.get(trailers_start, field); - } - - fn setTrailer(self: *VarDecl, comptime field: TrailerFlags.FieldEnum, value: TrailerFlags.Field(field)) void { - const trailers_start = @ptrCast([*]u8, self) + @sizeOf(VarDecl); - self.trailer_flags.set(trailers_start, field, value); - } - - pub fn create(allocator: *mem.Allocator, required: RequiredFields, trailers: TrailerFlags.InitStruct) !*VarDecl { - const trailer_flags = TrailerFlags.init(trailers); - const bytes = try allocator.alignedAlloc(u8, @alignOf(VarDecl), sizeInBytes(trailer_flags)); - const var_decl = @ptrCast(*VarDecl, bytes.ptr); - var_decl.* = .{ - .trailer_flags = trailer_flags, - .mut_token = required.mut_token, - .name_token = required.name_token, - .semicolon_token = required.semicolon_token, - }; - const trailers_start = bytes.ptr + @sizeOf(VarDecl); - trailer_flags.setMany(trailers_start, trailers); - return var_decl; - } - - pub fn destroy(self: *VarDecl, allocator: *mem.Allocator) void { - const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.trailer_flags)]; - allocator.free(bytes); - } - - pub fn iterate(self: *const VarDecl, index: usize) ?*Node { - var i = index; - - if (self.getTypeNode()) |type_node| { - if (i < 1) return type_node; - i -= 1; - } - - if (self.getAlignNode()) |align_node| { - if (i < 1) return align_node; - i -= 1; - } - - if (self.getSectionNode()) |section_node| { - if (i < 1) return section_node; - i -= 1; - } - - if (self.getInitNode()) |init_node| { - if (i < 1) return init_node; - i -= 1; - } - - return null; - } - - pub fn firstToken(self: *const VarDecl) TokenIndex { - if (self.getVisibToken()) |visib_token| return visib_token; - if (self.getThreadLocalToken()) |thread_local_token| return thread_local_token; - if (self.getComptimeToken()) |comptime_token| return comptime_token; - if (self.getExternExportToken()) |extern_export_token| return extern_export_token; - assert(self.getLibName() == null); - return self.mut_token; - } - - pub fn lastToken(self: *const VarDecl) TokenIndex { - return self.semicolon_token; - } - - fn sizeInBytes(trailer_flags: TrailerFlags) usize { - return @sizeOf(VarDecl) + trailer_flags.sizeInBytes(); - } - }; - - pub const Use = struct { - base: Node = Node{ .tag = .Use }, - doc_comments: ?*DocComment, - visib_token: ?TokenIndex, - use_token: TokenIndex, - expr: *Node, - semicolon_token: TokenIndex, - - pub fn iterate(self: *const Use, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.expr; - i -= 1; - - return null; - } - - pub fn firstToken(self: *const Use) TokenIndex { - if (self.visib_token) |visib_token| return visib_token; - return self.use_token; - } - - pub fn lastToken(self: *const Use) TokenIndex { - return self.semicolon_token; - } - }; - - pub const ErrorSetDecl = struct { - base: Node = Node{ .tag = .ErrorSetDecl }, - error_token: TokenIndex, - rbrace_token: TokenIndex, - decls_len: NodeIndex, - - /// After this the caller must initialize the decls list. - pub fn alloc(allocator: *mem.Allocator, decls_len: NodeIndex) !*ErrorSetDecl { - const bytes = try allocator.alignedAlloc(u8, @alignOf(ErrorSetDecl), sizeInBytes(decls_len)); - return @ptrCast(*ErrorSetDecl, bytes.ptr); - } - - pub fn free(self: *ErrorSetDecl, allocator: *mem.Allocator) void { - const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.decls_len)]; - allocator.free(bytes); - } - - pub fn iterate(self: *const ErrorSetDecl, index: usize) ?*Node { - var i = index; - - if (i < self.decls_len) return self.declsConst()[i]; - i -= self.decls_len; - - return null; - } - - pub fn firstToken(self: *const ErrorSetDecl) TokenIndex { - return self.error_token; - } - - pub fn lastToken(self: *const ErrorSetDecl) TokenIndex { - return self.rbrace_token; - } - - pub fn decls(self: *ErrorSetDecl) []*Node { - const decls_start = @ptrCast([*]u8, self) + @sizeOf(ErrorSetDecl); - return @ptrCast([*]*Node, decls_start)[0..self.decls_len]; - } - - pub fn declsConst(self: *const ErrorSetDecl) []const *Node { - const decls_start = @ptrCast([*]const u8, self) + @sizeOf(ErrorSetDecl); - return @ptrCast([*]const *Node, decls_start)[0..self.decls_len]; - } - - fn sizeInBytes(decls_len: NodeIndex) usize { - return @sizeOf(ErrorSetDecl) + @sizeOf(*Node) * @as(usize, decls_len); - } - }; - - /// The fields and decls Node pointers directly follow this struct in memory. - pub const ContainerDecl = struct { - base: Node = Node{ .tag = .ContainerDecl }, - kind_token: TokenIndex, - layout_token: ?TokenIndex, - lbrace_token: TokenIndex, - rbrace_token: TokenIndex, - fields_and_decls_len: NodeIndex, - init_arg_expr: InitArg, - - pub const InitArg = union(enum) { - None, - Enum: ?*Node, - Type: *Node, - }; - - /// After this the caller must initialize the fields_and_decls list. - pub fn alloc(allocator: *mem.Allocator, fields_and_decls_len: NodeIndex) !*ContainerDecl { - const bytes = try allocator.alignedAlloc(u8, @alignOf(ContainerDecl), sizeInBytes(fields_and_decls_len)); - return @ptrCast(*ContainerDecl, bytes.ptr); - } - - pub fn free(self: *ContainerDecl, allocator: *mem.Allocator) void { - const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.fields_and_decls_len)]; - allocator.free(bytes); - } - - pub fn iterate(self: *const ContainerDecl, index: usize) ?*Node { - var i = index; - - switch (self.init_arg_expr) { - .Type => |t| { - if (i < 1) return t; - i -= 1; - }, - .None, .Enum => {}, - } - - if (i < self.fields_and_decls_len) return self.fieldsAndDeclsConst()[i]; - i -= self.fields_and_decls_len; - - return null; - } - - pub fn firstToken(self: *const ContainerDecl) TokenIndex { - if (self.layout_token) |layout_token| { - return layout_token; - } - return self.kind_token; - } - - pub fn lastToken(self: *const ContainerDecl) TokenIndex { - return self.rbrace_token; - } - - pub fn fieldsAndDecls(self: *ContainerDecl) []*Node { - const decls_start = @ptrCast([*]u8, self) + @sizeOf(ContainerDecl); - return @ptrCast([*]*Node, decls_start)[0..self.fields_and_decls_len]; - } - - pub fn fieldsAndDeclsConst(self: *const ContainerDecl) []const *Node { - const decls_start = @ptrCast([*]const u8, self) + @sizeOf(ContainerDecl); - return @ptrCast([*]const *Node, decls_start)[0..self.fields_and_decls_len]; - } - - fn sizeInBytes(fields_and_decls_len: NodeIndex) usize { - return @sizeOf(ContainerDecl) + @sizeOf(*Node) * @as(usize, fields_and_decls_len); - } - }; - - pub const ContainerField = struct { - base: Node = Node{ .tag = .ContainerField }, - doc_comments: ?*DocComment, - comptime_token: ?TokenIndex, - name_token: TokenIndex, - type_expr: ?*Node, - value_expr: ?*Node, - align_expr: ?*Node, - - pub fn iterate(self: *const ContainerField, index: usize) ?*Node { - var i = index; - - if (self.type_expr) |type_expr| { - if (i < 1) return type_expr; - i -= 1; - } - - if (self.align_expr) |align_expr| { - if (i < 1) return align_expr; - i -= 1; - } - - if (self.value_expr) |value_expr| { - if (i < 1) return value_expr; - i -= 1; - } - - return null; - } - - pub fn firstToken(self: *const ContainerField) TokenIndex { - return self.comptime_token orelse self.name_token; - } - - pub fn lastToken(self: *const ContainerField) TokenIndex { - if (self.value_expr) |value_expr| { - return value_expr.lastToken(); - } - if (self.align_expr) |align_expr| { - // The expression refers to what's inside the parenthesis, the - // last token is the closing one - return align_expr.lastToken() + 1; - } - if (self.type_expr) |type_expr| { - return type_expr.lastToken(); - } - - return self.name_token; - } - }; - - pub const ErrorTag = struct { - base: Node = Node{ .tag = .ErrorTag }, - doc_comments: ?*DocComment, - name_token: TokenIndex, - - pub fn iterate(self: *const ErrorTag, index: usize) ?*Node { - var i = index; - - if (self.doc_comments) |comments| { - if (i < 1) return &comments.base; - i -= 1; - } - - return null; - } - - pub fn firstToken(self: *const ErrorTag) TokenIndex { - return self.name_token; - } - - pub fn lastToken(self: *const ErrorTag) TokenIndex { - return self.name_token; - } - }; - - pub const OneToken = struct { - base: Node, - token: TokenIndex, - - pub fn iterate(self: *const OneToken, index: usize) ?*Node { - return null; - } - - pub fn firstToken(self: *const OneToken) TokenIndex { - return self.token; - } - - pub fn lastToken(self: *const OneToken) TokenIndex { - return self.token; - } - }; - - /// The params are directly after the FnProto in memory. - /// Next, each optional thing determined by a bit in `trailer_flags`. - pub const FnProto = struct { - base: Node = Node{ .tag = .FnProto }, - trailer_flags: TrailerFlags, - fn_token: TokenIndex, - params_len: NodeIndex, - return_type: ReturnType, - - pub const TrailerFlags = std.meta.TrailerFlags(struct { - doc_comments: *DocComment, - body_node: *Node, - lib_name: *Node, // populated if this is an extern declaration - align_expr: *Node, // populated if align(A) is present - section_expr: *Node, // populated if linksection(A) is present - callconv_expr: *Node, // populated if callconv(A) is present - visib_token: TokenIndex, - name_token: TokenIndex, - var_args_token: TokenIndex, - extern_export_inline_token: TokenIndex, - is_extern_prototype: void, // TODO: Remove once extern fn rewriting is - is_async: void, // TODO: remove once async fn rewriting is - }); - - pub const RequiredFields = struct { - fn_token: TokenIndex, - params_len: NodeIndex, - return_type: ReturnType, - }; - - pub const ReturnType = union(enum) { - Explicit: *Node, - InferErrorSet: *Node, - Invalid: TokenIndex, - }; - - pub const ParamDecl = struct { - doc_comments: ?*DocComment, - comptime_token: ?TokenIndex, - noalias_token: ?TokenIndex, - name_token: ?TokenIndex, - param_type: ParamType, - - pub const ParamType = union(enum) { - any_type: *Node, - type_expr: *Node, - }; - - pub fn iterate(self: *const ParamDecl, index: usize) ?*Node { - var i = index; - - if (i < 1) { - switch (self.param_type) { - .any_type, .type_expr => |node| return node, - } - } - i -= 1; - - return null; - } - - pub fn firstToken(self: *const ParamDecl) TokenIndex { - if (self.comptime_token) |comptime_token| return comptime_token; - if (self.noalias_token) |noalias_token| return noalias_token; - if (self.name_token) |name_token| return name_token; - switch (self.param_type) { - .any_type, .type_expr => |node| return node.firstToken(), - } - } - - pub fn lastToken(self: *const ParamDecl) TokenIndex { - switch (self.param_type) { - .any_type, .type_expr => |node| return node.lastToken(), - } - } - }; - - /// For debugging purposes. - pub fn dump(self: *const FnProto) void { - const trailers_start = @alignCast( - @alignOf(ParamDecl), - @ptrCast([*]const u8, self) + @sizeOf(FnProto) + @sizeOf(ParamDecl) * self.params_len, - ); - std.debug.print("{*} flags: {b} name_token: {s} {*} params_len: {d}\n", .{ - self, - self.trailer_flags.bits, - self.getNameToken(), - self.trailer_flags.ptrConst(trailers_start, .name_token), - self.params_len, - }); - } - - pub fn getDocComments(self: *const FnProto) ?*DocComment { - return self.getTrailer(.doc_comments); - } - - pub fn setDocComments(self: *FnProto, value: *DocComment) void { - self.setTrailer(.doc_comments, value); - } - - pub fn getBodyNode(self: *const FnProto) ?*Node { - return self.getTrailer(.body_node); - } - - pub fn setBodyNode(self: *FnProto, value: *Node) void { - self.setTrailer(.body_node, value); - } - - pub fn getLibName(self: *const FnProto) ?*Node { - return self.getTrailer(.lib_name); - } - - pub fn setLibName(self: *FnProto, value: *Node) void { - self.setTrailer(.lib_name, value); - } - - pub fn getAlignExpr(self: *const FnProto) ?*Node { - return self.getTrailer(.align_expr); - } - - pub fn setAlignExpr(self: *FnProto, value: *Node) void { - self.setTrailer(.align_expr, value); - } - - pub fn getSectionExpr(self: *const FnProto) ?*Node { - return self.getTrailer(.section_expr); - } - - pub fn setSectionExpr(self: *FnProto, value: *Node) void { - self.setTrailer(.section_expr, value); - } - - pub fn getCallconvExpr(self: *const FnProto) ?*Node { - return self.getTrailer(.callconv_expr); - } - - pub fn setCallconvExpr(self: *FnProto, value: *Node) void { - self.setTrailer(.callconv_expr, value); - } - - pub fn getVisibToken(self: *const FnProto) ?TokenIndex { - return self.getTrailer(.visib_token); - } - - pub fn setVisibToken(self: *FnProto, value: TokenIndex) void { - self.setTrailer(.visib_token, value); - } - - pub fn getNameToken(self: *const FnProto) ?TokenIndex { - return self.getTrailer(.name_token); - } - - pub fn setNameToken(self: *FnProto, value: TokenIndex) void { - self.setTrailer(.name_token, value); - } - - pub fn getVarArgsToken(self: *const FnProto) ?TokenIndex { - return self.getTrailer(.var_args_token); - } - - pub fn setVarArgsToken(self: *FnProto, value: TokenIndex) void { - self.setTrailer(.var_args_token, value); - } - - pub fn getExternExportInlineToken(self: *const FnProto) ?TokenIndex { - return self.getTrailer(.extern_export_inline_token); - } - - pub fn setExternExportInlineToken(self: *FnProto, value: TokenIndex) void { - self.setTrailer(.extern_export_inline_token, value); - } - - pub fn getIsExternPrototype(self: *const FnProto) ?void { - return self.getTrailer(.is_extern_prototype); - } - - pub fn setIsExternPrototype(self: *FnProto, value: void) void { - self.setTrailer(.is_extern_prototype, value); - } - - pub fn getIsAsync(self: *const FnProto) ?void { - return self.getTrailer(.is_async); - } - - pub fn setIsAsync(self: *FnProto, value: void) void { - self.setTrailer(.is_async, value); - } - - fn getTrailer(self: *const FnProto, comptime field: TrailerFlags.FieldEnum) ?TrailerFlags.Field(field) { - const trailers_start = @alignCast( - @alignOf(ParamDecl), - @ptrCast([*]const u8, self) + @sizeOf(FnProto) + @sizeOf(ParamDecl) * self.params_len, - ); - return self.trailer_flags.get(trailers_start, field); - } - - fn setTrailer(self: *FnProto, comptime field: TrailerFlags.FieldEnum, value: TrailerFlags.Field(field)) void { - const trailers_start = @alignCast( - @alignOf(ParamDecl), - @ptrCast([*]u8, self) + @sizeOf(FnProto) + @sizeOf(ParamDecl) * self.params_len, - ); - self.trailer_flags.set(trailers_start, field, value); - } - - /// After this the caller must initialize the params list. - pub fn create(allocator: *mem.Allocator, required: RequiredFields, trailers: TrailerFlags.InitStruct) !*FnProto { - const trailer_flags = TrailerFlags.init(trailers); - const bytes = try allocator.alignedAlloc(u8, @alignOf(FnProto), sizeInBytes( - required.params_len, - trailer_flags, - )); - const fn_proto = @ptrCast(*FnProto, bytes.ptr); - fn_proto.* = .{ - .trailer_flags = trailer_flags, - .fn_token = required.fn_token, - .params_len = required.params_len, - .return_type = required.return_type, - }; - const trailers_start = @alignCast( - @alignOf(ParamDecl), - bytes.ptr + @sizeOf(FnProto) + @sizeOf(ParamDecl) * required.params_len, - ); - trailer_flags.setMany(trailers_start, trailers); - return fn_proto; - } - - pub fn destroy(self: *FnProto, allocator: *mem.Allocator) void { - const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.params_len, self.trailer_flags)]; - allocator.free(bytes); - } - - pub fn iterate(self: *const FnProto, index: usize) ?*Node { - var i = index; - - if (self.getLibName()) |lib_name| { - if (i < 1) return lib_name; - i -= 1; - } - - const params_len: usize = if (self.params_len == 0) - 0 - else switch (self.paramsConst()[self.params_len - 1].param_type) { - .any_type, .type_expr => self.params_len, - }; - if (i < params_len) { - switch (self.paramsConst()[i].param_type) { - .any_type => |n| return n, - .type_expr => |n| return n, - } - } - i -= params_len; - - if (self.getAlignExpr()) |align_expr| { - if (i < 1) return align_expr; - i -= 1; - } - - if (self.getSectionExpr()) |section_expr| { - if (i < 1) return section_expr; - i -= 1; - } - - switch (self.return_type) { - .Explicit, .InferErrorSet => |node| { - if (i < 1) return node; - i -= 1; - }, - .Invalid => {}, - } - - if (self.getBodyNode()) |body_node| { - if (i < 1) return body_node; - i -= 1; - } - - return null; - } - - pub fn firstToken(self: *const FnProto) TokenIndex { - if (self.getVisibToken()) |visib_token| return visib_token; - if (self.getExternExportInlineToken()) |extern_export_inline_token| return extern_export_inline_token; - assert(self.getLibName() == null); - return self.fn_token; - } - - pub fn lastToken(self: *const FnProto) TokenIndex { - if (self.getBodyNode()) |body_node| return body_node.lastToken(); - switch (self.return_type) { - .Explicit, .InferErrorSet => |node| return node.lastToken(), - .Invalid => |tok| return tok, - } - } - - pub fn params(self: *FnProto) []ParamDecl { - const params_start = @ptrCast([*]u8, self) + @sizeOf(FnProto); - return @ptrCast([*]ParamDecl, params_start)[0..self.params_len]; - } - - pub fn paramsConst(self: *const FnProto) []const ParamDecl { - const params_start = @ptrCast([*]const u8, self) + @sizeOf(FnProto); - return @ptrCast([*]const ParamDecl, params_start)[0..self.params_len]; - } - - fn sizeInBytes(params_len: NodeIndex, trailer_flags: TrailerFlags) usize { - return @sizeOf(FnProto) + @sizeOf(ParamDecl) * @as(usize, params_len) + trailer_flags.sizeInBytes(); - } - }; - - pub const AnyFrameType = struct { - base: Node = Node{ .tag = .AnyFrameType }, - anyframe_token: TokenIndex, - result: ?Result, - - pub const Result = struct { - arrow_token: TokenIndex, - return_type: *Node, - }; - - pub fn iterate(self: *const AnyFrameType, index: usize) ?*Node { - var i = index; - - if (self.result) |result| { - if (i < 1) return result.return_type; - i -= 1; - } - - return null; - } - - pub fn firstToken(self: *const AnyFrameType) TokenIndex { - return self.anyframe_token; - } - - pub fn lastToken(self: *const AnyFrameType) TokenIndex { - if (self.result) |result| return result.return_type.lastToken(); - return self.anyframe_token; - } - }; - - /// The statements of the block follow Block directly in memory. - pub const Block = struct { - base: Node = Node{ .tag = .Block }, - statements_len: NodeIndex, - lbrace: TokenIndex, - rbrace: TokenIndex, - - /// After this the caller must initialize the statements list. - pub fn alloc(allocator: *mem.Allocator, statements_len: NodeIndex) !*Block { - const bytes = try allocator.alignedAlloc(u8, @alignOf(Block), sizeInBytes(statements_len)); - return @ptrCast(*Block, bytes.ptr); - } - - pub fn free(self: *Block, allocator: *mem.Allocator) void { - const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.statements_len)]; - allocator.free(bytes); - } - - pub fn iterate(self: *const Block, index: usize) ?*Node { - var i = index; - - if (i < self.statements_len) return self.statementsConst()[i]; - i -= self.statements_len; - - return null; - } - - pub fn firstToken(self: *const Block) TokenIndex { - return self.lbrace; - } - - pub fn lastToken(self: *const Block) TokenIndex { - return self.rbrace; - } - - pub fn statements(self: *Block) []*Node { - const decls_start = @ptrCast([*]u8, self) + @sizeOf(Block); - return @ptrCast([*]*Node, decls_start)[0..self.statements_len]; - } - - pub fn statementsConst(self: *const Block) []const *Node { - const decls_start = @ptrCast([*]const u8, self) + @sizeOf(Block); - return @ptrCast([*]const *Node, decls_start)[0..self.statements_len]; - } - - fn sizeInBytes(statements_len: NodeIndex) usize { - return @sizeOf(Block) + @sizeOf(*Node) * @as(usize, statements_len); - } - }; - - /// The statements of the block follow LabeledBlock directly in memory. - pub const LabeledBlock = struct { - base: Node = Node{ .tag = .LabeledBlock }, - statements_len: NodeIndex, - lbrace: TokenIndex, - rbrace: TokenIndex, - label: TokenIndex, - - /// After this the caller must initialize the statements list. - pub fn alloc(allocator: *mem.Allocator, statements_len: NodeIndex) !*LabeledBlock { - const bytes = try allocator.alignedAlloc(u8, @alignOf(LabeledBlock), sizeInBytes(statements_len)); - return @ptrCast(*LabeledBlock, bytes.ptr); - } - - pub fn free(self: *LabeledBlock, allocator: *mem.Allocator) void { - const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.statements_len)]; - allocator.free(bytes); - } - - pub fn iterate(self: *const LabeledBlock, index: usize) ?*Node { - var i = index; - - if (i < self.statements_len) return self.statementsConst()[i]; - i -= self.statements_len; - - return null; - } - - pub fn firstToken(self: *const LabeledBlock) TokenIndex { - return self.label; - } - - pub fn lastToken(self: *const LabeledBlock) TokenIndex { - return self.rbrace; - } - - pub fn statements(self: *LabeledBlock) []*Node { - const decls_start = @ptrCast([*]u8, self) + @sizeOf(LabeledBlock); - return @ptrCast([*]*Node, decls_start)[0..self.statements_len]; - } - - pub fn statementsConst(self: *const LabeledBlock) []const *Node { - const decls_start = @ptrCast([*]const u8, self) + @sizeOf(LabeledBlock); - return @ptrCast([*]const *Node, decls_start)[0..self.statements_len]; - } - - fn sizeInBytes(statements_len: NodeIndex) usize { - return @sizeOf(LabeledBlock) + @sizeOf(*Node) * @as(usize, statements_len); - } - }; - - pub const Defer = struct { - base: Node = Node{ .tag = .Defer }, - defer_token: TokenIndex, - payload: ?*Node, - expr: *Node, - - pub fn iterate(self: *const Defer, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.expr; - i -= 1; - - return null; - } - - pub fn firstToken(self: *const Defer) TokenIndex { - return self.defer_token; - } - - pub fn lastToken(self: *const Defer) TokenIndex { - return self.expr.lastToken(); - } - }; - - pub const Comptime = struct { - base: Node = Node{ .tag = .Comptime }, - doc_comments: ?*DocComment, - comptime_token: TokenIndex, - expr: *Node, - - pub fn iterate(self: *const Comptime, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.expr; - i -= 1; - - return null; - } - - pub fn firstToken(self: *const Comptime) TokenIndex { - return self.comptime_token; - } - - pub fn lastToken(self: *const Comptime) TokenIndex { - return self.expr.lastToken(); - } - }; - - pub const Nosuspend = struct { - base: Node = Node{ .tag = .Nosuspend }, - nosuspend_token: TokenIndex, - expr: *Node, - - pub fn iterate(self: *const Nosuspend, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.expr; - i -= 1; - - return null; - } - - pub fn firstToken(self: *const Nosuspend) TokenIndex { - return self.nosuspend_token; - } - - pub fn lastToken(self: *const Nosuspend) TokenIndex { - return self.expr.lastToken(); - } - }; - - pub const Payload = struct { - base: Node = Node{ .tag = .Payload }, - lpipe: TokenIndex, - error_symbol: *Node, - rpipe: TokenIndex, - - pub fn iterate(self: *const Payload, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.error_symbol; - i -= 1; - - return null; - } - - pub fn firstToken(self: *const Payload) TokenIndex { - return self.lpipe; - } - - pub fn lastToken(self: *const Payload) TokenIndex { - return self.rpipe; - } - }; - - pub const PointerPayload = struct { - base: Node = Node{ .tag = .PointerPayload }, - lpipe: TokenIndex, - ptr_token: ?TokenIndex, - value_symbol: *Node, - rpipe: TokenIndex, - - pub fn iterate(self: *const PointerPayload, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.value_symbol; - i -= 1; - - return null; - } - - pub fn firstToken(self: *const PointerPayload) TokenIndex { - return self.lpipe; - } - - pub fn lastToken(self: *const PointerPayload) TokenIndex { - return self.rpipe; - } - }; - - pub const PointerIndexPayload = struct { - base: Node = Node{ .tag = .PointerIndexPayload }, - lpipe: TokenIndex, - ptr_token: ?TokenIndex, - value_symbol: *Node, - index_symbol: ?*Node, - rpipe: TokenIndex, - - pub fn iterate(self: *const PointerIndexPayload, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.value_symbol; - i -= 1; - - if (self.index_symbol) |index_symbol| { - if (i < 1) return index_symbol; - i -= 1; - } - - return null; - } - - pub fn firstToken(self: *const PointerIndexPayload) TokenIndex { - return self.lpipe; - } - - pub fn lastToken(self: *const PointerIndexPayload) TokenIndex { - return self.rpipe; - } - }; - - pub const Else = struct { - base: Node = Node{ .tag = .Else }, - else_token: TokenIndex, - payload: ?*Node, - body: *Node, - - pub fn iterate(self: *const Else, index: usize) ?*Node { - var i = index; - - if (self.payload) |payload| { - if (i < 1) return payload; - i -= 1; - } - - if (i < 1) return self.body; - i -= 1; - - return null; - } - - pub fn firstToken(self: *const Else) TokenIndex { - return self.else_token; - } - - pub fn lastToken(self: *const Else) TokenIndex { - return self.body.lastToken(); - } - }; - - /// The cases node pointers are found in memory after Switch. - /// They must be SwitchCase or SwitchElse nodes. - pub const Switch = struct { - base: Node = Node{ .tag = .Switch }, - switch_token: TokenIndex, - rbrace: TokenIndex, - cases_len: NodeIndex, - expr: *Node, - - /// After this the caller must initialize the fields_and_decls list. - pub fn alloc(allocator: *mem.Allocator, cases_len: NodeIndex) !*Switch { - const bytes = try allocator.alignedAlloc(u8, @alignOf(Switch), sizeInBytes(cases_len)); - return @ptrCast(*Switch, bytes.ptr); - } - - pub fn free(self: *Switch, allocator: *mem.Allocator) void { - const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.cases_len)]; - allocator.free(bytes); - } - - pub fn iterate(self: *const Switch, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.expr; - i -= 1; - - if (i < self.cases_len) return self.casesConst()[i]; - i -= self.cases_len; - - return null; - } - - pub fn firstToken(self: *const Switch) TokenIndex { - return self.switch_token; - } - - pub fn lastToken(self: *const Switch) TokenIndex { - return self.rbrace; - } - - pub fn cases(self: *Switch) []*Node { - const decls_start = @ptrCast([*]u8, self) + @sizeOf(Switch); - return @ptrCast([*]*Node, decls_start)[0..self.cases_len]; - } - - pub fn casesConst(self: *const Switch) []const *Node { - const decls_start = @ptrCast([*]const u8, self) + @sizeOf(Switch); - return @ptrCast([*]const *Node, decls_start)[0..self.cases_len]; - } - - fn sizeInBytes(cases_len: NodeIndex) usize { - return @sizeOf(Switch) + @sizeOf(*Node) * @as(usize, cases_len); - } - }; - - /// Items sub-nodes appear in memory directly following SwitchCase. - pub const SwitchCase = struct { - base: Node = Node{ .tag = .SwitchCase }, - arrow_token: TokenIndex, - payload: ?*Node, - expr: *Node, - items_len: NodeIndex, - - /// After this the caller must initialize the fields_and_decls list. - pub fn alloc(allocator: *mem.Allocator, items_len: NodeIndex) !*SwitchCase { - const bytes = try allocator.alignedAlloc(u8, @alignOf(SwitchCase), sizeInBytes(items_len)); - return @ptrCast(*SwitchCase, bytes.ptr); - } - - pub fn free(self: *SwitchCase, allocator: *mem.Allocator) void { - const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.items_len)]; - allocator.free(bytes); - } - - pub fn iterate(self: *const SwitchCase, index: usize) ?*Node { - var i = index; - - if (i < self.items_len) return self.itemsConst()[i]; - i -= self.items_len; - - if (self.payload) |payload| { - if (i < 1) return payload; - i -= 1; - } - - if (i < 1) return self.expr; - i -= 1; - - return null; - } - - pub fn firstToken(self: *const SwitchCase) TokenIndex { - return self.itemsConst()[0].firstToken(); - } - - pub fn lastToken(self: *const SwitchCase) TokenIndex { - return self.expr.lastToken(); - } - - pub fn items(self: *SwitchCase) []*Node { - const decls_start = @ptrCast([*]u8, self) + @sizeOf(SwitchCase); - return @ptrCast([*]*Node, decls_start)[0..self.items_len]; - } - - pub fn itemsConst(self: *const SwitchCase) []const *Node { - const decls_start = @ptrCast([*]const u8, self) + @sizeOf(SwitchCase); - return @ptrCast([*]const *Node, decls_start)[0..self.items_len]; - } - - fn sizeInBytes(items_len: NodeIndex) usize { - return @sizeOf(SwitchCase) + @sizeOf(*Node) * @as(usize, items_len); - } - }; - - pub const SwitchElse = struct { - base: Node = Node{ .tag = .SwitchElse }, - token: TokenIndex, - - pub fn iterate(self: *const SwitchElse, index: usize) ?*Node { - return null; - } - - pub fn firstToken(self: *const SwitchElse) TokenIndex { - return self.token; - } - - pub fn lastToken(self: *const SwitchElse) TokenIndex { - return self.token; - } - }; - - pub const While = struct { - base: Node = Node{ .tag = .While }, - label: ?TokenIndex, - inline_token: ?TokenIndex, - while_token: TokenIndex, - condition: *Node, - payload: ?*Node, - continue_expr: ?*Node, - body: *Node, - @"else": ?*Else, - - pub fn iterate(self: *const While, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.condition; - i -= 1; - - if (self.payload) |payload| { - if (i < 1) return payload; - i -= 1; - } - - if (self.continue_expr) |continue_expr| { - if (i < 1) return continue_expr; - i -= 1; - } - - if (i < 1) return self.body; - i -= 1; - - if (self.@"else") |@"else"| { - if (i < 1) return &@"else".base; - i -= 1; - } - - return null; - } - - pub fn firstToken(self: *const While) TokenIndex { - if (self.label) |label| { - return label; - } - - if (self.inline_token) |inline_token| { - return inline_token; - } - - return self.while_token; - } - - pub fn lastToken(self: *const While) TokenIndex { - if (self.@"else") |@"else"| { - return @"else".body.lastToken(); - } - - return self.body.lastToken(); - } - }; - - pub const For = struct { - base: Node = Node{ .tag = .For }, - label: ?TokenIndex, - inline_token: ?TokenIndex, - for_token: TokenIndex, - array_expr: *Node, - payload: *Node, - body: *Node, - @"else": ?*Else, - - pub fn iterate(self: *const For, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.array_expr; - i -= 1; - - if (i < 1) return self.payload; - i -= 1; - - if (i < 1) return self.body; - i -= 1; - - if (self.@"else") |@"else"| { - if (i < 1) return &@"else".base; - i -= 1; - } - - return null; - } - - pub fn firstToken(self: *const For) TokenIndex { - if (self.label) |label| { - return label; - } - - if (self.inline_token) |inline_token| { - return inline_token; - } - - return self.for_token; - } - - pub fn lastToken(self: *const For) TokenIndex { - if (self.@"else") |@"else"| { - return @"else".body.lastToken(); - } - - return self.body.lastToken(); - } - }; - - pub const If = struct { - base: Node = Node{ .tag = .If }, - if_token: TokenIndex, - condition: *Node, - payload: ?*Node, - body: *Node, - @"else": ?*Else, - - pub fn iterate(self: *const If, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.condition; - i -= 1; - - if (self.payload) |payload| { - if (i < 1) return payload; - i -= 1; - } - - if (i < 1) return self.body; - i -= 1; - - if (self.@"else") |@"else"| { - if (i < 1) return &@"else".base; - i -= 1; - } - - return null; - } - - pub fn firstToken(self: *const If) TokenIndex { - return self.if_token; - } - - pub fn lastToken(self: *const If) TokenIndex { - if (self.@"else") |@"else"| { - return @"else".body.lastToken(); - } - - return self.body.lastToken(); - } - }; - - pub const Catch = struct { - base: Node = Node{ .tag = .Catch }, - op_token: TokenIndex, - lhs: *Node, - rhs: *Node, - payload: ?*Node, - - pub fn iterate(self: *const Catch, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.lhs; - i -= 1; - - if (self.payload) |payload| { - if (i < 1) return payload; - i -= 1; - } - - if (i < 1) return self.rhs; - i -= 1; - - return null; - } - - pub fn firstToken(self: *const Catch) TokenIndex { - return self.lhs.firstToken(); - } - - pub fn lastToken(self: *const Catch) TokenIndex { - return self.rhs.lastToken(); - } - }; - - pub const SimpleInfixOp = struct { - base: Node, - op_token: TokenIndex, - lhs: *Node, - rhs: *Node, - - pub fn iterate(self: *const SimpleInfixOp, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.lhs; - i -= 1; - - if (i < 1) return self.rhs; - i -= 1; - - return null; - } - - pub fn firstToken(self: *const SimpleInfixOp) TokenIndex { - return self.lhs.firstToken(); - } - - pub fn lastToken(self: *const SimpleInfixOp) TokenIndex { - return self.rhs.lastToken(); - } - }; - - pub const SimplePrefixOp = struct { - base: Node, - op_token: TokenIndex, - rhs: *Node, - - const Self = @This(); - - pub fn iterate(self: *const Self, index: usize) ?*Node { - if (index == 0) return self.rhs; - return null; - } - - pub fn firstToken(self: *const Self) TokenIndex { - return self.op_token; - } - - pub fn lastToken(self: *const Self) TokenIndex { - return self.rhs.lastToken(); - } - }; - - pub const ArrayType = struct { - base: Node = Node{ .tag = .ArrayType }, - op_token: TokenIndex, - rhs: *Node, - len_expr: *Node, - - pub fn iterate(self: *const ArrayType, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.len_expr; - i -= 1; - - if (i < 1) return self.rhs; - i -= 1; - - return null; - } - - pub fn firstToken(self: *const ArrayType) TokenIndex { - return self.op_token; - } - - pub fn lastToken(self: *const ArrayType) TokenIndex { - return self.rhs.lastToken(); - } + pub const LocalVarDecl = struct { + type_node: Index, + align_node: Index, }; pub const ArrayTypeSentinel = struct { - base: Node = Node{ .tag = .ArrayTypeSentinel }, - op_token: TokenIndex, - rhs: *Node, - len_expr: *Node, - sentinel: *Node, - - pub fn iterate(self: *const ArrayTypeSentinel, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.len_expr; - i -= 1; - - if (i < 1) return self.sentinel; - i -= 1; - - if (i < 1) return self.rhs; - i -= 1; - - return null; - } - - pub fn firstToken(self: *const ArrayTypeSentinel) TokenIndex { - return self.op_token; - } - - pub fn lastToken(self: *const ArrayTypeSentinel) TokenIndex { - return self.rhs.lastToken(); - } + elem_type: Index, + sentinel: Index, }; pub const PtrType = struct { - base: Node = Node{ .tag = .PtrType }, - op_token: TokenIndex, - rhs: *Node, - /// TODO Add a u8 flags field to Node where it would otherwise be padding, and each bit represents - /// one of these possibly-null things. Then we have them directly follow the PtrType in memory. - ptr_info: PtrInfo = .{}, - - pub fn iterate(self: *const PtrType, index: usize) ?*Node { - var i = index; - - if (self.ptr_info.sentinel) |sentinel| { - if (i < 1) return sentinel; - i -= 1; - } - - if (self.ptr_info.align_info) |align_info| { - if (i < 1) return align_info.node; - i -= 1; - } - - if (i < 1) return self.rhs; - i -= 1; - - return null; - } - - pub fn firstToken(self: *const PtrType) TokenIndex { - return self.op_token; - } - - pub fn lastToken(self: *const PtrType) TokenIndex { - return self.rhs.lastToken(); - } + sentinel: Index, + align_node: Index, + bit_range_start: Index, + bit_range_end: Index, }; pub const SliceType = struct { - base: Node = Node{ .tag = .SliceType }, - op_token: TokenIndex, - rhs: *Node, - /// TODO Add a u8 flags field to Node where it would otherwise be padding, and each bit represents - /// one of these possibly-null things. Then we have them directly follow the SliceType in memory. - ptr_info: PtrInfo = .{}, - - pub fn iterate(self: *const SliceType, index: usize) ?*Node { - var i = index; - - if (self.ptr_info.sentinel) |sentinel| { - if (i < 1) return sentinel; - i -= 1; - } - - if (self.ptr_info.align_info) |align_info| { - if (i < 1) return align_info.node; - i -= 1; - } - - if (i < 1) return self.rhs; - i -= 1; - - return null; - } - - pub fn firstToken(self: *const SliceType) TokenIndex { - return self.op_token; - } - - pub fn lastToken(self: *const SliceType) TokenIndex { - return self.rhs.lastToken(); - } + sentinel: Index, + align_node: Index, + }; + pub const SubRange = struct { + /// Index into sub_list. + start: Index, + /// Index into sub_list. + end: Index, }; - pub const FieldInitializer = struct { - base: Node = Node{ .tag = .FieldInitializer }, - period_token: TokenIndex, - name_token: TokenIndex, - expr: *Node, - - pub fn iterate(self: *const FieldInitializer, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.expr; - i -= 1; - - return null; - } - - pub fn firstToken(self: *const FieldInitializer) TokenIndex { - return self.period_token; - } - - pub fn lastToken(self: *const FieldInitializer) TokenIndex { - return self.expr.lastToken(); - } + pub const If = struct { + then_expr: Index, + else_expr: Index, }; - /// Elements occur directly in memory after ArrayInitializer. - pub const ArrayInitializer = struct { - base: Node = Node{ .tag = .ArrayInitializer }, - rtoken: TokenIndex, - list_len: NodeIndex, - lhs: *Node, - - /// After this the caller must initialize the fields_and_decls list. - pub fn alloc(allocator: *mem.Allocator, list_len: NodeIndex) !*ArrayInitializer { - const bytes = try allocator.alignedAlloc(u8, @alignOf(ArrayInitializer), sizeInBytes(list_len)); - return @ptrCast(*ArrayInitializer, bytes.ptr); - } - - pub fn free(self: *ArrayInitializer, allocator: *mem.Allocator) void { - const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.list_len)]; - allocator.free(bytes); - } - - pub fn iterate(self: *const ArrayInitializer, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.lhs; - i -= 1; - - if (i < self.list_len) return self.listConst()[i]; - i -= self.list_len; - - return null; - } - - pub fn firstToken(self: *const ArrayInitializer) TokenIndex { - return self.lhs.firstToken(); - } - - pub fn lastToken(self: *const ArrayInitializer) TokenIndex { - return self.rtoken; - } - - pub fn list(self: *ArrayInitializer) []*Node { - const decls_start = @ptrCast([*]u8, self) + @sizeOf(ArrayInitializer); - return @ptrCast([*]*Node, decls_start)[0..self.list_len]; - } - - pub fn listConst(self: *const ArrayInitializer) []const *Node { - const decls_start = @ptrCast([*]const u8, self) + @sizeOf(ArrayInitializer); - return @ptrCast([*]const *Node, decls_start)[0..self.list_len]; - } - - fn sizeInBytes(list_len: NodeIndex) usize { - return @sizeOf(ArrayInitializer) + @sizeOf(*Node) * @as(usize, list_len); - } + pub const ContainerField = struct { + value_expr: Index, + align_expr: Index, }; - /// Elements occur directly in memory after ArrayInitializerDot. - pub const ArrayInitializerDot = struct { - base: Node = Node{ .tag = .ArrayInitializerDot }, - dot: TokenIndex, - rtoken: TokenIndex, - list_len: NodeIndex, - - /// After this the caller must initialize the fields_and_decls list. - pub fn alloc(allocator: *mem.Allocator, list_len: NodeIndex) !*ArrayInitializerDot { - const bytes = try allocator.alignedAlloc(u8, @alignOf(ArrayInitializerDot), sizeInBytes(list_len)); - return @ptrCast(*ArrayInitializerDot, bytes.ptr); - } - - pub fn free(self: *ArrayInitializerDot, allocator: *mem.Allocator) void { - const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.list_len)]; - allocator.free(bytes); - } - - pub fn iterate(self: *const ArrayInitializerDot, index: usize) ?*Node { - var i = index; - - if (i < self.list_len) return self.listConst()[i]; - i -= self.list_len; - - return null; - } - - pub fn firstToken(self: *const ArrayInitializerDot) TokenIndex { - return self.dot; - } - - pub fn lastToken(self: *const ArrayInitializerDot) TokenIndex { - return self.rtoken; - } - - pub fn list(self: *ArrayInitializerDot) []*Node { - const decls_start = @ptrCast([*]u8, self) + @sizeOf(ArrayInitializerDot); - return @ptrCast([*]*Node, decls_start)[0..self.list_len]; - } - - pub fn listConst(self: *const ArrayInitializerDot) []const *Node { - const decls_start = @ptrCast([*]const u8, self) + @sizeOf(ArrayInitializerDot); - return @ptrCast([*]const *Node, decls_start)[0..self.list_len]; - } - - fn sizeInBytes(list_len: NodeIndex) usize { - return @sizeOf(ArrayInitializerDot) + @sizeOf(*Node) * @as(usize, list_len); - } - }; - - /// Elements occur directly in memory after StructInitializer. - pub const StructInitializer = struct { - base: Node = Node{ .tag = .StructInitializer }, - rtoken: TokenIndex, - list_len: NodeIndex, - lhs: *Node, - - /// After this the caller must initialize the fields_and_decls list. - pub fn alloc(allocator: *mem.Allocator, list_len: NodeIndex) !*StructInitializer { - const bytes = try allocator.alignedAlloc(u8, @alignOf(StructInitializer), sizeInBytes(list_len)); - return @ptrCast(*StructInitializer, bytes.ptr); - } - - pub fn free(self: *StructInitializer, allocator: *mem.Allocator) void { - const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.list_len)]; - allocator.free(bytes); - } - - pub fn iterate(self: *const StructInitializer, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.lhs; - i -= 1; - - if (i < self.list_len) return self.listConst()[i]; - i -= self.list_len; - - return null; - } - - pub fn firstToken(self: *const StructInitializer) TokenIndex { - return self.lhs.firstToken(); - } - - pub fn lastToken(self: *const StructInitializer) TokenIndex { - return self.rtoken; - } - - pub fn list(self: *StructInitializer) []*Node { - const decls_start = @ptrCast([*]u8, self) + @sizeOf(StructInitializer); - return @ptrCast([*]*Node, decls_start)[0..self.list_len]; - } - - pub fn listConst(self: *const StructInitializer) []const *Node { - const decls_start = @ptrCast([*]const u8, self) + @sizeOf(StructInitializer); - return @ptrCast([*]const *Node, decls_start)[0..self.list_len]; - } - - fn sizeInBytes(list_len: NodeIndex) usize { - return @sizeOf(StructInitializer) + @sizeOf(*Node) * @as(usize, list_len); - } - }; - - /// Elements occur directly in memory after StructInitializerDot. - pub const StructInitializerDot = struct { - base: Node = Node{ .tag = .StructInitializerDot }, - dot: TokenIndex, - rtoken: TokenIndex, - list_len: NodeIndex, - - /// After this the caller must initialize the fields_and_decls list. - pub fn alloc(allocator: *mem.Allocator, list_len: NodeIndex) !*StructInitializerDot { - const bytes = try allocator.alignedAlloc(u8, @alignOf(StructInitializerDot), sizeInBytes(list_len)); - return @ptrCast(*StructInitializerDot, bytes.ptr); - } - - pub fn free(self: *StructInitializerDot, allocator: *mem.Allocator) void { - const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.list_len)]; - allocator.free(bytes); - } - - pub fn iterate(self: *const StructInitializerDot, index: usize) ?*Node { - var i = index; - - if (i < self.list_len) return self.listConst()[i]; - i -= self.list_len; - - return null; - } - - pub fn firstToken(self: *const StructInitializerDot) TokenIndex { - return self.dot; - } - - pub fn lastToken(self: *const StructInitializerDot) TokenIndex { - return self.rtoken; - } - - pub fn list(self: *StructInitializerDot) []*Node { - const decls_start = @ptrCast([*]u8, self) + @sizeOf(StructInitializerDot); - return @ptrCast([*]*Node, decls_start)[0..self.list_len]; - } - - pub fn listConst(self: *const StructInitializerDot) []const *Node { - const decls_start = @ptrCast([*]const u8, self) + @sizeOf(StructInitializerDot); - return @ptrCast([*]const *Node, decls_start)[0..self.list_len]; - } - - fn sizeInBytes(list_len: NodeIndex) usize { - return @sizeOf(StructInitializerDot) + @sizeOf(*Node) * @as(usize, list_len); - } - }; - - /// Parameter nodes directly follow Call in memory. - pub const Call = struct { - base: Node = Node{ .tag = .Call }, - rtoken: TokenIndex, - lhs: *Node, - params_len: NodeIndex, - async_token: ?TokenIndex, - - /// After this the caller must initialize the fields_and_decls list. - pub fn alloc(allocator: *mem.Allocator, params_len: NodeIndex) !*Call { - const bytes = try allocator.alignedAlloc(u8, @alignOf(Call), sizeInBytes(params_len)); - return @ptrCast(*Call, bytes.ptr); - } - - pub fn free(self: *Call, allocator: *mem.Allocator) void { - const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.params_len)]; - allocator.free(bytes); - } - - pub fn iterate(self: *const Call, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.lhs; - i -= 1; - - if (i < self.params_len) return self.paramsConst()[i]; - i -= self.params_len; - - return null; - } - - pub fn firstToken(self: *const Call) TokenIndex { - if (self.async_token) |async_token| return async_token; - return self.lhs.firstToken(); - } - - pub fn lastToken(self: *const Call) TokenIndex { - return self.rtoken; - } - - pub fn params(self: *Call) []*Node { - const decls_start = @ptrCast([*]u8, self) + @sizeOf(Call); - return @ptrCast([*]*Node, decls_start)[0..self.params_len]; - } - - pub fn paramsConst(self: *const Call) []const *Node { - const decls_start = @ptrCast([*]const u8, self) + @sizeOf(Call); - return @ptrCast([*]const *Node, decls_start)[0..self.params_len]; - } - - fn sizeInBytes(params_len: NodeIndex) usize { - return @sizeOf(Call) + @sizeOf(*Node) * @as(usize, params_len); - } - }; - - pub const ArrayAccess = struct { - base: Node = Node{ .tag = .ArrayAccess }, - rtoken: TokenIndex, - lhs: *Node, - index_expr: *Node, - - pub fn iterate(self: *const ArrayAccess, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.lhs; - i -= 1; - - if (i < 1) return self.index_expr; - i -= 1; - - return null; - } - - pub fn firstToken(self: *const ArrayAccess) TokenIndex { - return self.lhs.firstToken(); - } - - pub fn lastToken(self: *const ArrayAccess) TokenIndex { - return self.rtoken; - } - }; - - pub const SimpleSuffixOp = struct { - base: Node, - rtoken: TokenIndex, - lhs: *Node, - - pub fn iterate(self: *const SimpleSuffixOp, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.lhs; - i -= 1; - - return null; - } - - pub fn firstToken(self: *const SimpleSuffixOp) TokenIndex { - return self.lhs.firstToken(); - } - - pub fn lastToken(self: *const SimpleSuffixOp) TokenIndex { - return self.rtoken; - } + pub const GlobalVarDecl = struct { + type_node: Index, + align_node: Index, + section_node: Index, }; pub const Slice = struct { - base: Node = Node{ .tag = .Slice }, - rtoken: TokenIndex, - lhs: *Node, - start: *Node, - end: ?*Node, - sentinel: ?*Node, - - pub fn iterate(self: *const Slice, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.lhs; - i -= 1; - - if (i < 1) return self.start; - i -= 1; - - if (self.end) |end| { - if (i < 1) return end; - i -= 1; - } - if (self.sentinel) |sentinel| { - if (i < 1) return sentinel; - i -= 1; - } - - return null; - } - - pub fn firstToken(self: *const Slice) TokenIndex { - return self.lhs.firstToken(); - } - - pub fn lastToken(self: *const Slice) TokenIndex { - return self.rtoken; - } + start: Index, + end: Index, + sentinel: Index, }; - pub const GroupedExpression = struct { - base: Node = Node{ .tag = .GroupedExpression }, - lparen: TokenIndex, - expr: *Node, - rparen: TokenIndex, - - pub fn iterate(self: *const GroupedExpression, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.expr; - i -= 1; - - return null; - } - - pub fn firstToken(self: *const GroupedExpression) TokenIndex { - return self.lparen; - } - - pub fn lastToken(self: *const GroupedExpression) TokenIndex { - return self.rparen; - } + pub const While = struct { + continue_expr: Index, + then_expr: Index, + else_expr: Index, }; - /// Trailed in memory by possibly many things, with each optional thing - /// determined by a bit in `trailer_flags`. - /// Can be: return, break, continue - pub const ControlFlowExpression = struct { - base: Node, - trailer_flags: TrailerFlags, - ltoken: TokenIndex, - - pub const TrailerFlags = std.meta.TrailerFlags(struct { - rhs: *Node, - label: TokenIndex, - }); - - pub const RequiredFields = struct { - tag: Tag, - ltoken: TokenIndex, - }; - - pub fn getRHS(self: *const ControlFlowExpression) ?*Node { - return self.getTrailer(.rhs); - } - - pub fn setRHS(self: *ControlFlowExpression, value: *Node) void { - self.setTrailer(.rhs, value); - } - - pub fn getLabel(self: *const ControlFlowExpression) ?TokenIndex { - return self.getTrailer(.label); - } - - pub fn setLabel(self: *ControlFlowExpression, value: TokenIndex) void { - self.setTrailer(.label, value); - } - - fn getTrailer(self: *const ControlFlowExpression, comptime field: TrailerFlags.FieldEnum) ?TrailerFlags.Field(field) { - const trailers_start = @ptrCast([*]const u8, self) + @sizeOf(ControlFlowExpression); - return self.trailer_flags.get(trailers_start, field); - } - - fn setTrailer(self: *ControlFlowExpression, comptime field: TrailerFlags.FieldEnum, value: TrailerFlags.Field(field)) void { - const trailers_start = @ptrCast([*]u8, self) + @sizeOf(ControlFlowExpression); - self.trailer_flags.set(trailers_start, field, value); - } - - pub fn create(allocator: *mem.Allocator, required: RequiredFields, trailers: TrailerFlags.InitStruct) !*ControlFlowExpression { - const trailer_flags = TrailerFlags.init(trailers); - const bytes = try allocator.alignedAlloc(u8, @alignOf(ControlFlowExpression), sizeInBytes(trailer_flags)); - const ctrl_flow_expr = @ptrCast(*ControlFlowExpression, bytes.ptr); - ctrl_flow_expr.* = .{ - .base = .{ .tag = required.tag }, - .trailer_flags = trailer_flags, - .ltoken = required.ltoken, - }; - const trailers_start = bytes.ptr + @sizeOf(ControlFlowExpression); - trailer_flags.setMany(trailers_start, trailers); - return ctrl_flow_expr; - } - - pub fn destroy(self: *ControlFlowExpression, allocator: *mem.Allocator) void { - const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.trailer_flags)]; - allocator.free(bytes); - } - - pub fn iterate(self: *const ControlFlowExpression, index: usize) ?*Node { - var i = index; - - if (self.getRHS()) |rhs| { - if (i < 1) return rhs; - i -= 1; - } - - return null; - } - - pub fn firstToken(self: *const ControlFlowExpression) TokenIndex { - return self.ltoken; - } - - pub fn lastToken(self: *const ControlFlowExpression) TokenIndex { - if (self.getRHS()) |rhs| { - return rhs.lastToken(); - } - - if (self.getLabel()) |label| { - return label; - } - - return self.ltoken; - } - - fn sizeInBytes(trailer_flags: TrailerFlags) usize { - return @sizeOf(ControlFlowExpression) + trailer_flags.sizeInBytes(); - } + pub const WhileCont = struct { + continue_expr: Index, + then_expr: Index, }; - pub const Suspend = struct { - base: Node = Node{ .tag = .Suspend }, - suspend_token: TokenIndex, - body: ?*Node, - - pub fn iterate(self: *const Suspend, index: usize) ?*Node { - var i = index; - - if (self.body) |body| { - if (i < 1) return body; - i -= 1; - } - - return null; - } - - pub fn firstToken(self: *const Suspend) TokenIndex { - return self.suspend_token; - } - - pub fn lastToken(self: *const Suspend) TokenIndex { - if (self.body) |body| { - return body.lastToken(); - } - - return self.suspend_token; - } + pub const FnProtoOne = struct { + /// Populated if there is exactly 1 parameter. Otherwise there are 0 parameters. + param: Index, + /// Populated if align(A) is present. + align_expr: Index, + /// Populated if linksection(A) is present. + section_expr: Index, + /// Populated if callconv(A) is present. + callconv_expr: Index, }; - pub const EnumLiteral = struct { - base: Node = Node{ .tag = .EnumLiteral }, - dot: TokenIndex, - name: TokenIndex, - - pub fn iterate(self: *const EnumLiteral, index: usize) ?*Node { - return null; - } - - pub fn firstToken(self: *const EnumLiteral) TokenIndex { - return self.dot; - } - - pub fn lastToken(self: *const EnumLiteral) TokenIndex { - return self.name; - } - }; - - /// Parameters are in memory following BuiltinCall. - pub const BuiltinCall = struct { - base: Node = Node{ .tag = .BuiltinCall }, - params_len: NodeIndex, - builtin_token: TokenIndex, - rparen_token: TokenIndex, - - /// After this the caller must initialize the fields_and_decls list. - pub fn alloc(allocator: *mem.Allocator, params_len: NodeIndex) !*BuiltinCall { - const bytes = try allocator.alignedAlloc(u8, @alignOf(BuiltinCall), sizeInBytes(params_len)); - return @ptrCast(*BuiltinCall, bytes.ptr); - } - - pub fn free(self: *BuiltinCall, allocator: *mem.Allocator) void { - const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.params_len)]; - allocator.free(bytes); - } - - pub fn iterate(self: *const BuiltinCall, index: usize) ?*Node { - var i = index; - - if (i < self.params_len) return self.paramsConst()[i]; - i -= self.params_len; - - return null; - } - - pub fn firstToken(self: *const BuiltinCall) TokenIndex { - return self.builtin_token; - } - - pub fn lastToken(self: *const BuiltinCall) TokenIndex { - return self.rparen_token; - } - - pub fn params(self: *BuiltinCall) []*Node { - const decls_start = @ptrCast([*]u8, self) + @sizeOf(BuiltinCall); - return @ptrCast([*]*Node, decls_start)[0..self.params_len]; - } - - pub fn paramsConst(self: *const BuiltinCall) []const *Node { - const decls_start = @ptrCast([*]const u8, self) + @sizeOf(BuiltinCall); - return @ptrCast([*]const *Node, decls_start)[0..self.params_len]; - } - - fn sizeInBytes(params_len: NodeIndex) usize { - return @sizeOf(BuiltinCall) + @sizeOf(*Node) * @as(usize, params_len); - } - }; - - /// The string literal tokens appear directly in memory after MultilineStringLiteral. - pub const MultilineStringLiteral = struct { - base: Node = Node{ .tag = .MultilineStringLiteral }, - lines_len: TokenIndex, - - /// After this the caller must initialize the lines list. - pub fn alloc(allocator: *mem.Allocator, lines_len: NodeIndex) !*MultilineStringLiteral { - const bytes = try allocator.alignedAlloc(u8, @alignOf(MultilineStringLiteral), sizeInBytes(lines_len)); - return @ptrCast(*MultilineStringLiteral, bytes.ptr); - } - - pub fn free(self: *MultilineStringLiteral, allocator: *mem.Allocator) void { - const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.lines_len)]; - allocator.free(bytes); - } - - pub fn iterate(self: *const MultilineStringLiteral, index: usize) ?*Node { - return null; - } - - pub fn firstToken(self: *const MultilineStringLiteral) TokenIndex { - return self.linesConst()[0]; - } - - pub fn lastToken(self: *const MultilineStringLiteral) TokenIndex { - return self.linesConst()[self.lines_len - 1]; - } - - pub fn lines(self: *MultilineStringLiteral) []TokenIndex { - const decls_start = @ptrCast([*]u8, self) + @sizeOf(MultilineStringLiteral); - return @ptrCast([*]TokenIndex, decls_start)[0..self.lines_len]; - } - - pub fn linesConst(self: *const MultilineStringLiteral) []const TokenIndex { - const decls_start = @ptrCast([*]const u8, self) + @sizeOf(MultilineStringLiteral); - return @ptrCast([*]const TokenIndex, decls_start)[0..self.lines_len]; - } - - fn sizeInBytes(lines_len: NodeIndex) usize { - return @sizeOf(MultilineStringLiteral) + @sizeOf(TokenIndex) * @as(usize, lines_len); - } - }; - - pub const Asm = struct { - base: Node = Node{ .tag = .Asm }, - asm_token: TokenIndex, - rparen: TokenIndex, - volatile_token: ?TokenIndex, - template: *Node, - outputs: []Output, - inputs: []Input, - /// A clobber node must be a StringLiteral or MultilineStringLiteral. - clobbers: []*Node, - - pub const Output = struct { - lbracket: TokenIndex, - symbolic_name: *Node, - constraint: *Node, - kind: Kind, - rparen: TokenIndex, - - pub const Kind = union(enum) { - Variable: *OneToken, - Return: *Node, - }; - - pub fn iterate(self: *const Output, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.symbolic_name; - i -= 1; - - if (i < 1) return self.constraint; - i -= 1; - - switch (self.kind) { - .Variable => |variable_name| { - if (i < 1) return &variable_name.base; - i -= 1; - }, - .Return => |return_type| { - if (i < 1) return return_type; - i -= 1; - }, - } - - return null; - } - - pub fn firstToken(self: *const Output) TokenIndex { - return self.lbracket; - } - - pub fn lastToken(self: *const Output) TokenIndex { - return self.rparen; - } - }; - - pub const Input = struct { - lbracket: TokenIndex, - symbolic_name: *Node, - constraint: *Node, - expr: *Node, - rparen: TokenIndex, - - pub fn iterate(self: *const Input, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.symbolic_name; - i -= 1; - - if (i < 1) return self.constraint; - i -= 1; - - if (i < 1) return self.expr; - i -= 1; - - return null; - } - - pub fn firstToken(self: *const Input) TokenIndex { - return self.lbracket; - } - - pub fn lastToken(self: *const Input) TokenIndex { - return self.rparen; - } - }; - - pub fn iterate(self: *const Asm, index: usize) ?*Node { - var i = index; - - if (i < self.outputs.len * 3) switch (i % 3) { - 0 => return self.outputs[i / 3].symbolic_name, - 1 => return self.outputs[i / 3].constraint, - 2 => switch (self.outputs[i / 3].kind) { - .Variable => |variable_name| return &variable_name.base, - .Return => |return_type| return return_type, - }, - else => unreachable, - }; - i -= self.outputs.len * 3; - - if (i < self.inputs.len * 3) switch (i % 3) { - 0 => return self.inputs[i / 3].symbolic_name, - 1 => return self.inputs[i / 3].constraint, - 2 => return self.inputs[i / 3].expr, - else => unreachable, - }; - i -= self.inputs.len * 3; - - return null; - } - - pub fn firstToken(self: *const Asm) TokenIndex { - return self.asm_token; - } - - pub fn lastToken(self: *const Asm) TokenIndex { - return self.rparen; - } - }; - - /// TODO remove from the Node base struct - /// TODO actually maybe remove entirely in favor of iterating backward from Node.firstToken() - /// and forwards to find same-line doc comments. - pub const DocComment = struct { - base: Node = Node{ .tag = .DocComment }, - /// Points to the first doc comment token. API users are expected to iterate over the - /// tokens array, looking for more doc comments, ignoring line comments, and stopping - /// at the first other token. - first_line: TokenIndex, - - pub fn iterate(self: *const DocComment, index: usize) ?*Node { - return null; - } - - pub fn firstToken(self: *const DocComment) TokenIndex { - return self.first_line; - } - - /// Returns the first doc comment line. Be careful, this may not be the desired behavior, - /// which would require the tokens array. - pub fn lastToken(self: *const DocComment) TokenIndex { - return self.first_line; - } - }; - - pub const TestDecl = struct { - base: Node = Node{ .tag = .TestDecl }, - doc_comments: ?*DocComment, - test_token: TokenIndex, - name: ?*Node, - body_node: *Node, - - pub fn iterate(self: *const TestDecl, index: usize) ?*Node { - var i = index; - - if (i < 1) return self.body_node; - i -= 1; - - return null; - } - - pub fn firstToken(self: *const TestDecl) TokenIndex { - return self.test_token; - } - - pub fn lastToken(self: *const TestDecl) TokenIndex { - return self.body_node.lastToken(); - } + pub const FnProto = struct { + params_start: Index, + params_end: Index, + /// Populated if align(A) is present. + align_expr: Index, + /// Populated if linksection(A) is present. + section_expr: Index, + /// Populated if callconv(A) is present. + callconv_expr: Index, }; }; - -pub const PtrInfo = struct { - allowzero_token: ?TokenIndex = null, - align_info: ?Align = null, - const_token: ?TokenIndex = null, - volatile_token: ?TokenIndex = null, - sentinel: ?*Node = null, - - pub const Align = struct { - node: *Node, - bit_range: ?BitRange = null, - - pub const BitRange = struct { - start: *Node, - end: *Node, - }; - }; -}; - -test "iterate" { - var root = Node.Root{ - .base = Node{ .tag = Node.Tag.Root }, - .decls_len = 0, - .eof_token = 0, - }; - var base = &root.base; - testing.expect(base.iterate(0) == null); -} diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 5dd27cbcb3..e0bef649bb 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -11,85 +11,138 @@ const Node = ast.Node; const Tree = ast.Tree; const AstError = ast.Error; const TokenIndex = ast.TokenIndex; -const NodeIndex = ast.NodeIndex; const Token = std.zig.Token; pub const Error = error{ParseError} || Allocator.Error; /// Result should be freed with tree.deinit() when there are /// no more references to any of the tokens or nodes. -pub fn parse(gpa: *Allocator, source: []const u8) Allocator.Error!*Tree { - var token_ids = std.ArrayList(Token.Id).init(gpa); - defer token_ids.deinit(); - var token_locs = std.ArrayList(Token.Loc).init(gpa); - defer token_locs.deinit(); +pub fn parse(gpa: *Allocator, source: []const u8) Allocator.Error!Tree { + var tokens = ast.TokenList{}; + defer tokens.deinit(gpa); // Empirically, the zig std lib has an 8:1 ratio of source bytes to token count. const estimated_token_count = source.len / 8; - try token_ids.ensureCapacity(estimated_token_count); - try token_locs.ensureCapacity(estimated_token_count); + try tokens.ensureCapacity(gpa, estimated_token_count); var tokenizer = std.zig.Tokenizer.init(source); while (true) { const token = tokenizer.next(); - try token_ids.append(token.id); - try token_locs.append(token.loc); - if (token.id == .Eof) break; + if (token.tag == .LineComment) continue; + try tokens.append(gpa, .{ + .tag = token.tag, + .start = @intCast(u32, token.loc.start), + }); + if (token.tag == .Eof) break; } var parser: Parser = .{ .source = source, - .arena = std.heap.ArenaAllocator.init(gpa), .gpa = gpa, - .token_ids = token_ids.items, - .token_locs = token_locs.items, + .token_tags = tokens.items(.tag), + .token_starts = tokens.items(.start), .errors = .{}, + .nodes = .{}, + .extra_data = .{}, .tok_i = 0, }; defer parser.errors.deinit(gpa); - errdefer parser.arena.deinit(); + defer parser.nodes.deinit(gpa); + defer parser.extra_data.deinit(gpa); - while (token_ids.items[parser.tok_i] == .LineComment) parser.tok_i += 1; + // Empirically, Zig source code has a 2:1 ratio of tokens to AST nodes. + // Make sure at least 1 so we can use appendAssumeCapacity on the root node below. + const estimated_node_count = (tokens.len + 2) / 2; + try parser.nodes.ensureCapacity(gpa, estimated_node_count); - const root_node = try parser.parseRoot(); - - const tree = try parser.arena.allocator.create(Tree); - tree.* = .{ - .gpa = gpa, - .source = source, - .token_ids = token_ids.toOwnedSlice(), - .token_locs = token_locs.toOwnedSlice(), - .errors = parser.errors.toOwnedSlice(gpa), - .root_node = root_node, - .arena = parser.arena.state, + // Root node must be index 0. + // Root <- skip ContainerMembers eof + parser.nodes.appendAssumeCapacity(.{ + .tag = .Root, + .main_token = 0, + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }); + const root_decls = try parser.parseContainerMembers(true); + // parseContainerMembers will try to skip as much + // invalid tokens as it can, so we are now at EOF. + assert(parser.token_tags[parser.tok_i] == .Eof); + parser.nodes.items(.data)[0] = .{ + .lhs = root_decls.start, + .rhs = root_decls.end, + }; + + // TODO experiment with compacting the MultiArrayList slices here + return Tree{ + .source = source, + .tokens = tokens.toOwnedSlice(), + .nodes = parser.nodes.toOwnedSlice(), + .extra_data = parser.extra_data.toOwnedSlice(gpa), + .errors = parser.errors.toOwnedSlice(gpa), }; - return tree; } +const null_node: Node.Index = 0; + /// Represents in-progress parsing, will be converted to an ast.Tree after completion. const Parser = struct { - arena: std.heap.ArenaAllocator, gpa: *Allocator, source: []const u8, - token_ids: []const Token.Id, - token_locs: []const Token.Loc, + token_tags: []const Token.Tag, + token_starts: []const ast.ByteOffset, tok_i: TokenIndex, errors: std.ArrayListUnmanaged(AstError), + nodes: ast.NodeList, + extra_data: std.ArrayListUnmanaged(Node.Index), - /// Root <- skip ContainerMembers eof - fn parseRoot(p: *Parser) Allocator.Error!*Node.Root { - const decls = try parseContainerMembers(p, true); - defer p.gpa.free(decls); + const SmallSpan = union(enum) { + zero_or_one: Node.Index, + multi: []Node.Index, - // parseContainerMembers will try to skip as much - // invalid tokens as it can so this can only be the EOF - const eof_token = p.eatToken(.Eof).?; + fn deinit(self: SmallSpan, gpa: *Allocator) void { + switch (self) { + .zero_or_one => {}, + .multi => |list| gpa.free(list), + } + } + }; - const decls_len = @intCast(NodeIndex, decls.len); - const node = try Node.Root.create(&p.arena.allocator, decls_len, eof_token); - std.mem.copy(*Node, node.decls(), decls); + fn listToSpan(p: *Parser, list: []const Node.Index) !Node.SubRange { + try p.extra_data.appendSlice(p.gpa, list); + return Node.SubRange{ + .start = @intCast(Node.Index, p.extra_data.items.len - list.len), + .end = @intCast(Node.Index, p.extra_data.items.len), + }; + } - return node; + fn addNode(p: *Parser, elem: ast.NodeList.Elem) Allocator.Error!Node.Index { + const result = @intCast(Node.Index, p.nodes.len); + try p.nodes.append(p.gpa, elem); + return result; + } + + fn addExtra(p: *Parser, extra: anytype) Allocator.Error!Node.Index { + const fields = std.meta.fields(@TypeOf(extra)); + try p.extra_data.ensureCapacity(p.gpa, p.extra_data.items.len + fields.len); + const result = @intCast(u32, p.extra_data.items.len); + inline for (fields) |field| { + comptime assert(field.field_type == Node.Index); + p.extra_data.appendAssumeCapacity(@field(extra, field.name)); + } + return result; + } + + fn warn(p: *Parser, msg: ast.Error) error{OutOfMemory}!void { + @setCold(true); + try p.errors.append(p.gpa, msg); + } + + fn fail(p: *Parser, msg: ast.Error) error{ ParseError, OutOfMemory } { + @setCold(true); + try p.warn(msg); + return error.ParseError; } /// ContainerMembers @@ -99,8 +152,8 @@ const Parser = struct { /// / ContainerField COMMA ContainerMembers /// / ContainerField /// / - fn parseContainerMembers(p: *Parser, top_level: bool) ![]*Node { - var list = std.ArrayList(*Node).init(p.gpa); + fn parseContainerMembers(p: *Parser, top_level: bool) !Node.SubRange { + var list = std.ArrayList(Node.Index).init(p.gpa); defer list.deinit(); var field_state: union(enum) { @@ -115,103 +168,98 @@ const Parser = struct { err, } = .none; + // Skip container doc comments. + while (p.eatToken(.ContainerDocComment)) |_| {} + while (true) { - if (try p.parseContainerDocComments()) |node| { - try list.append(node); - continue; - } + const doc_comment = p.eatDocComments(); - const doc_comments = try p.parseDocComment(); - - if (p.parseTestDecl() catch |err| switch (err) { + const test_decl_node = p.parseTestDecl() catch |err| switch (err) { error.OutOfMemory => return error.OutOfMemory, error.ParseError => { p.findNextContainerMember(); continue; }, - }) |node| { + }; + if (test_decl_node != 0) { if (field_state == .seen) { - field_state = .{ .end = node.firstToken() }; + field_state = .{ .end = p.nodes.items(.main_token)[test_decl_node] }; } - node.cast(Node.TestDecl).?.doc_comments = doc_comments; - try list.append(node); + try list.append(test_decl_node); continue; } - if (p.parseTopLevelComptime() catch |err| switch (err) { + const comptime_node = p.parseTopLevelComptime() catch |err| switch (err) { error.OutOfMemory => return error.OutOfMemory, error.ParseError => { p.findNextContainerMember(); continue; }, - }) |node| { + }; + if (comptime_node != 0) { if (field_state == .seen) { - field_state = .{ .end = node.firstToken() }; + field_state = .{ .end = p.nodes.items(.main_token)[comptime_node] }; } - node.cast(Node.Comptime).?.doc_comments = doc_comments; - try list.append(node); + try list.append(comptime_node); continue; } const visib_token = p.eatToken(.Keyword_pub); - if (p.parseTopLevelDecl(doc_comments, visib_token) catch |err| switch (err) { + const top_level_decl = p.parseTopLevelDecl() catch |err| switch (err) { error.OutOfMemory => return error.OutOfMemory, error.ParseError => { p.findNextContainerMember(); continue; }, - }) |node| { + }; + if (top_level_decl != 0) { if (field_state == .seen) { - field_state = .{ .end = visib_token orelse node.firstToken() }; + field_state = .{ + .end = visib_token orelse p.nodes.items(.main_token)[top_level_decl], + }; } - try list.append(node); + try list.append(top_level_decl); continue; } if (visib_token != null) { - try p.errors.append(p.gpa, .{ - .ExpectedPubItem = .{ .token = p.tok_i }, - }); + try p.warn(.{ .ExpectedPubItem = .{ .token = p.tok_i } }); // ignore this pub continue; } - if (p.parseContainerField() catch |err| switch (err) { + const container_field = p.parseContainerField() catch |err| switch (err) { error.OutOfMemory => return error.OutOfMemory, error.ParseError => { // attempt to recover p.findNextContainerMember(); continue; }, - }) |node| { + }; + if (container_field != 0) { switch (field_state) { .none => field_state = .seen, .err, .seen => {}, .end => |tok| { - try p.errors.append(p.gpa, .{ - .DeclBetweenFields = .{ .token = tok }, - }); + try p.warn(.{ .DeclBetweenFields = .{ .token = tok } }); // continue parsing, error will be reported later field_state = .err; }, } - - const field = node.cast(Node.ContainerField).?; - field.doc_comments = doc_comments; - try list.append(node); + try list.append(container_field); const comma = p.eatToken(.Comma) orelse { // try to continue parsing const index = p.tok_i; p.findNextContainerMember(); - const next = p.token_ids[p.tok_i]; + const next = p.token_tags[p.tok_i]; switch (next) { .Eof => { // no invalid tokens were found if (index == p.tok_i) break; // Invalid tokens, add error and exit - try p.errors.append(p.gpa, .{ + try p.warn(.{ .ExpectedToken = .{ .token = index, .expected_id = .Comma }, }); break; @@ -219,35 +267,33 @@ const Parser = struct { else => { if (next == .RBrace) { if (!top_level) break; - _ = p.nextToken(); + p.tok_i += 1; } // add error and continue - try p.errors.append(p.gpa, .{ + try p.warn(.{ .ExpectedToken = .{ .token = index, .expected_id = .Comma }, }); continue; }, } }; - if (try p.parseAppendedDocComment(comma)) |appended_comment| - field.doc_comments = appended_comment; continue; } // Dangling doc comment - if (doc_comments != null) { - try p.errors.append(p.gpa, .{ - .UnattachedDocComment = .{ .token = doc_comments.?.firstToken() }, + if (doc_comment) |tok| { + try p.warn(.{ + .UnattachedDocComment = .{ .token = tok }, }); } - const next = p.token_ids[p.tok_i]; + const next = p.token_tags[p.tok_i]; switch (next) { .Eof => break, .Keyword_comptime => { - _ = p.nextToken(); - try p.errors.append(p.gpa, .{ + p.tok_i += 1; + try p.warn(.{ .ExpectedBlockOrField = .{ .token = p.tok_i }, }); }, @@ -255,20 +301,20 @@ const Parser = struct { const index = p.tok_i; if (next == .RBrace) { if (!top_level) break; - _ = p.nextToken(); + p.tok_i += 1; } // this was likely not supposed to end yet, // try to find the next declaration p.findNextContainerMember(); - try p.errors.append(p.gpa, .{ + try p.warn(.{ .ExpectedContainerMembers = .{ .token = index }, }); }, } } - return list.toOwnedSlice(); + return p.listToSpan(list.items); } /// Attempts to find next container member by searching for certain tokens @@ -276,7 +322,7 @@ const Parser = struct { var level: u32 = 0; while (true) { const tok = p.nextToken(); - switch (p.token_ids[tok]) { + switch (p.token_tags[tok]) { // any of these can start a new top level declaration .Keyword_test, .Keyword_comptime, @@ -293,7 +339,7 @@ const Parser = struct { .Identifier, => { if (level == 0) { - p.putBackToken(tok); + p.tok_i -= 1; return; } }, @@ -310,13 +356,13 @@ const Parser = struct { .RBrace => { if (level == 0) { // end of container, exit - p.putBackToken(tok); + p.tok_i -= 1; return; } level -= 1; }, .Eof => { - p.putBackToken(tok); + p.tok_i -= 1; return; }, else => {}, @@ -329,11 +375,11 @@ const Parser = struct { var level: u32 = 0; while (true) { const tok = p.nextToken(); - switch (p.token_ids[tok]) { + switch (p.token_tags[tok]) { .LBrace => level += 1, .RBrace => { if (level == 0) { - p.putBackToken(tok); + p.tok_i -= 1; return; } level -= 1; @@ -344,7 +390,7 @@ const Parser = struct { } }, .Eof => { - p.putBackToken(tok); + p.tok_i -= 1; return; }, else => {}, @@ -352,328 +398,315 @@ const Parser = struct { } } - /// Eat a multiline container doc comment - fn parseContainerDocComments(p: *Parser) !?*Node { - if (p.eatToken(.ContainerDocComment)) |first_line| { - while (p.eatToken(.ContainerDocComment)) |_| {} - const node = try p.arena.allocator.create(Node.DocComment); - node.* = .{ .first_line = first_line }; - return &node.base; - } - return null; - } - - /// TestDecl <- KEYWORD_test STRINGLITERALSINGLE Block - fn parseTestDecl(p: *Parser) !?*Node { - const test_token = p.eatToken(.Keyword_test) orelse return null; - const name_node = try p.parseStringLiteralSingle(); - const block_node = (try p.parseBlock(null)) orelse { - try p.errors.append(p.gpa, .{ .ExpectedLBrace = .{ .token = p.tok_i } }); - return error.ParseError; - }; - - const test_node = try p.arena.allocator.create(Node.TestDecl); - test_node.* = .{ - .doc_comments = null, - .test_token = test_token, - .name = name_node, - .body_node = block_node, - }; - return &test_node.base; + /// TestDecl <- KEYWORD_test STRINGLITERALSINGLE? Block + fn parseTestDecl(p: *Parser) !Node.Index { + const test_token = p.eatToken(.Keyword_test) orelse return null_node; + const name_token = try p.expectToken(.StringLiteral); + const block_node = try p.parseBlock(); + if (block_node == 0) return p.fail(.{ .ExpectedLBrace = .{ .token = p.tok_i } }); + return p.addNode(.{ + .tag = .TestDecl, + .main_token = test_token, + .data = .{ + .lhs = name_token, + .rhs = block_node, + }, + }); } /// TopLevelComptime <- KEYWORD_comptime BlockExpr - fn parseTopLevelComptime(p: *Parser) !?*Node { - const tok = p.eatToken(.Keyword_comptime) orelse return null; - const lbrace = p.eatToken(.LBrace) orelse { - p.putBackToken(tok); - return null; - }; - p.putBackToken(lbrace); - const block_node = try p.expectNode(parseBlockExpr, .{ - .ExpectedLabelOrLBrace = .{ .token = p.tok_i }, - }); - - const comptime_node = try p.arena.allocator.create(Node.Comptime); - comptime_node.* = .{ - .doc_comments = null, - .comptime_token = tok, - .expr = block_node, - }; - return &comptime_node.base; + fn parseTopLevelComptime(p: *Parser) !Node.Index { + if (p.token_tags[p.tok_i] == .Keyword_comptime and + p.token_tags[p.tok_i + 1] == .LBrace) + { + return p.addNode(.{ + .tag = .Comptime, + .main_token = p.nextToken(), + .data = .{ + .lhs = try p.parseBlock(), + .rhs = undefined, + }, + }); + } else { + return null_node; + } } /// TopLevelDecl /// <- (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE? / (KEYWORD_inline / KEYWORD_noinline))? FnProto (SEMICOLON / Block) /// / (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE?)? KEYWORD_threadlocal? VarDecl /// / KEYWORD_usingnamespace Expr SEMICOLON - fn parseTopLevelDecl(p: *Parser, doc_comments: ?*Node.DocComment, visib_token: ?TokenIndex) !?*Node { - var lib_name: ?*Node = null; - const extern_export_inline_token = blk: { - if (p.eatToken(.Keyword_export)) |token| break :blk token; - if (p.eatToken(.Keyword_extern)) |token| { - lib_name = try p.parseStringLiteralSingle(); - break :blk token; - } - if (p.eatToken(.Keyword_inline)) |token| break :blk token; - if (p.eatToken(.Keyword_noinline)) |token| break :blk token; - break :blk null; - }; - - if (try p.parseFnProto(.top_level, .{ - .doc_comments = doc_comments, - .visib_token = visib_token, - .extern_export_inline_token = extern_export_inline_token, - .lib_name = lib_name, - })) |node| { - return node; + fn parseTopLevelDecl(p: *Parser) !Node.Index { + const extern_export_inline_token = p.nextToken(); + var expect_fn: bool = false; + var exported: bool = false; + switch (p.token_tags[extern_export_inline_token]) { + .Keyword_extern => _ = p.eatToken(.StringLiteral), + .Keyword_export => exported = true, + .Keyword_inline, .Keyword_noinline => expect_fn = true, + else => p.tok_i -= 1, } - - if (extern_export_inline_token) |token| { - if (p.token_ids[token] == .Keyword_inline or - p.token_ids[token] == .Keyword_noinline) - { - try p.errors.append(p.gpa, .{ - .ExpectedFn = .{ .token = p.tok_i }, - }); - return error.ParseError; - } - } - - const thread_local_token = p.eatToken(.Keyword_threadlocal); - - if (try p.parseVarDecl(.{ - .doc_comments = doc_comments, - .visib_token = visib_token, - .thread_local_token = thread_local_token, - .extern_export_token = extern_export_inline_token, - .lib_name = lib_name, - })) |node| { - return node; - } - - if (thread_local_token != null) { - try p.errors.append(p.gpa, .{ - .ExpectedVarDecl = .{ .token = p.tok_i }, - }); - // ignore this and try again; - return error.ParseError; - } - - if (extern_export_inline_token) |token| { - try p.errors.append(p.gpa, .{ - .ExpectedVarDeclOrFn = .{ .token = p.tok_i }, - }); - // ignore this and try again; - return error.ParseError; - } - - const use_token = p.eatToken(.Keyword_usingnamespace) orelse return null; - const expr = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); - const semicolon_token = try p.expectToken(.Semicolon); - - const node = try p.arena.allocator.create(Node.Use); - node.* = .{ - .doc_comments = doc_comments orelse try p.parseAppendedDocComment(semicolon_token), - .visib_token = visib_token, - .use_token = use_token, - .expr = expr, - .semicolon_token = semicolon_token, - }; - - return &node.base; - } - - /// FnProto <- KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? LinkSection? CallConv? EXCLAMATIONMARK? (Keyword_anytype / TypeExpr) - fn parseFnProto(p: *Parser, level: enum { top_level, as_type }, fields: struct { - doc_comments: ?*Node.DocComment = null, - visib_token: ?TokenIndex = null, - extern_export_inline_token: ?TokenIndex = null, - lib_name: ?*Node = null, - }) !?*Node { - // TODO: Remove once extern/async fn rewriting is - var is_async: ?void = null; - var is_extern_prototype: ?void = null; - const cc_token: ?TokenIndex = blk: { - if (p.eatToken(.Keyword_extern)) |token| { - is_extern_prototype = {}; - break :blk token; - } - if (p.eatToken(.Keyword_async)) |token| { - is_async = {}; - break :blk token; - } - break :blk null; - }; - const fn_token = p.eatToken(.Keyword_fn) orelse { - if (cc_token) |token| - p.putBackToken(token); - return null; - }; - const name_token = p.eatToken(.Identifier); - const lparen = try p.expectToken(.LParen); - const params = try p.parseParamDeclList(); - defer p.gpa.free(params); - const var_args_token = p.eatToken(.Ellipsis3); - const rparen = try p.expectToken(.RParen); - const align_expr = try p.parseByteAlign(); - const section_expr = try p.parseLinkSection(); - const callconv_expr = try p.parseCallconv(); - const exclamation_token = p.eatToken(.Bang); - - const return_type_expr = (try p.parseAnyType()) orelse - try p.expectNodeRecoverable(parseTypeExpr, .{ - // most likely the user forgot to specify the return type. - // Mark return type as invalid and try to continue. - .ExpectedReturnType = .{ .token = p.tok_i }, - }); - - // TODO https://github.com/ziglang/zig/issues/3750 - const R = Node.FnProto.ReturnType; - const return_type = if (return_type_expr == null) - R{ .Invalid = rparen } - else if (exclamation_token != null) - R{ .InferErrorSet = return_type_expr.? } - else - R{ .Explicit = return_type_expr.? }; - - const body_node: ?*Node = switch (level) { - .top_level => blk: { - if (p.eatToken(.Semicolon)) |_| { - break :blk null; - } - const body_block = (try p.parseBlock(null)) orelse { + const fn_proto = try p.parseFnProto(); + if (fn_proto != 0) { + switch (p.token_tags[p.tok_i]) { + .Semicolon => { + const semicolon_token = p.nextToken(); + try p.parseAppendedDocComment(semicolon_token); + return fn_proto; + }, + .LBrace => { + const body_block = try p.parseBlock(); + assert(body_block != 0); + return p.addNode(.{ + .tag = .FnDecl, + .main_token = p.nodes.items(.main_token)[fn_proto], + .data = .{ + .lhs = fn_proto, + .rhs = body_block, + }, + }); + }, + else => { // Since parseBlock only return error.ParseError on // a missing '}' we can assume this function was // supposed to end here. - try p.errors.append(p.gpa, .{ .ExpectedSemiOrLBrace = .{ .token = p.tok_i } }); - break :blk null; - }; - break :blk body_block; + try p.warn(.{ .ExpectedSemiOrLBrace = .{ .token = p.tok_i } }); + return null_node; + }, + } + } + if (expect_fn) { + try p.warn(.{ + .ExpectedFn = .{ .token = p.tok_i }, + }); + return error.ParseError; + } + + const thread_local_token = p.eatToken(.Keyword_threadlocal); + const var_decl = try p.parseVarDecl(); + if (var_decl != 0) { + const semicolon_token = try p.expectToken(.Semicolon); + try p.parseAppendedDocComment(semicolon_token); + return var_decl; + } + if (thread_local_token != null) { + return p.fail(.{ .ExpectedVarDecl = .{ .token = p.tok_i } }); + } + + if (exported) { + return p.fail(.{ .ExpectedVarDeclOrFn = .{ .token = p.tok_i } }); + } + + const usingnamespace_token = p.eatToken(.Keyword_usingnamespace) orelse return null_node; + const expr = try p.expectExpr(); + const semicolon_token = try p.expectToken(.Semicolon); + try p.parseAppendedDocComment(semicolon_token); + return p.addNode(.{ + .tag = .UsingNamespace, + .main_token = usingnamespace_token, + .data = .{ + .lhs = expr, + .rhs = undefined, }, - .as_type => null, - }; - - const fn_proto_node = try Node.FnProto.create(&p.arena.allocator, .{ - .params_len = params.len, - .fn_token = fn_token, - .return_type = return_type, - }, .{ - .doc_comments = fields.doc_comments, - .visib_token = fields.visib_token, - .name_token = name_token, - .var_args_token = var_args_token, - .extern_export_inline_token = fields.extern_export_inline_token, - .body_node = body_node, - .lib_name = fields.lib_name, - .align_expr = align_expr, - .section_expr = section_expr, - .callconv_expr = callconv_expr, - .is_extern_prototype = is_extern_prototype, - .is_async = is_async, }); - std.mem.copy(Node.FnProto.ParamDecl, fn_proto_node.params(), params); + } - return &fn_proto_node.base; + /// FnProto <- KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? LinkSection? CallConv? EXCLAMATIONMARK? (Keyword_anytype / TypeExpr) + fn parseFnProto(p: *Parser) !Node.Index { + const fn_token = p.eatToken(.Keyword_fn) orelse return null_node; + _ = p.eatToken(.Identifier); + const params = try p.parseParamDeclList(); + defer params.deinit(p.gpa); + const align_expr = try p.parseByteAlign(); + const section_expr = try p.parseLinkSection(); + const callconv_expr = try p.parseCallconv(); + const bang_token = p.eatToken(.Bang); + + const return_type_expr = try p.parseTypeExpr(); + if (return_type_expr == 0) { + // most likely the user forgot to specify the return type. + // Mark return type as invalid and try to continue. + try p.warn(.{ .ExpectedReturnType = .{ .token = p.tok_i } }); + } + + if (align_expr == 0 and section_expr == 0 and callconv_expr == 0) { + switch (params) { + .zero_or_one => |param| return p.addNode(.{ + .tag = .FnProtoSimple, + .main_token = fn_token, + .data = .{ + .lhs = param, + .rhs = return_type_expr, + }, + }), + .multi => |list| { + const span = try p.listToSpan(list); + return p.addNode(.{ + .tag = .FnProtoSimpleMulti, + .main_token = fn_token, + .data = .{ + .lhs = try p.addExtra(Node.SubRange{ + .start = span.start, + .end = span.end, + }), + .rhs = return_type_expr, + }, + }); + }, + } + } + switch (params) { + .zero_or_one => |param| return p.addNode(.{ + .tag = .FnProtoOne, + .main_token = fn_token, + .data = .{ + .lhs = try p.addExtra(Node.FnProtoOne{ + .param = param, + .align_expr = align_expr, + .section_expr = section_expr, + .callconv_expr = callconv_expr, + }), + .rhs = return_type_expr, + }, + }), + .multi => |list| { + const span = try p.listToSpan(list); + return p.addNode(.{ + .tag = .FnProto, + .main_token = fn_token, + .data = .{ + .lhs = try p.addExtra(Node.FnProto{ + .params_start = span.start, + .params_end = span.end, + .align_expr = align_expr, + .section_expr = section_expr, + .callconv_expr = callconv_expr, + }), + .rhs = return_type_expr, + }, + }); + }, + } } /// VarDecl <- (KEYWORD_const / KEYWORD_var) IDENTIFIER (COLON TypeExpr)? ByteAlign? LinkSection? (EQUAL Expr)? SEMICOLON - fn parseVarDecl(p: *Parser, fields: struct { - doc_comments: ?*Node.DocComment = null, - visib_token: ?TokenIndex = null, - thread_local_token: ?TokenIndex = null, - extern_export_token: ?TokenIndex = null, - lib_name: ?*Node = null, - comptime_token: ?TokenIndex = null, - }) !?*Node { + fn parseVarDecl(p: *Parser) !Node.Index { const mut_token = p.eatToken(.Keyword_const) orelse p.eatToken(.Keyword_var) orelse - return null; + return null_node; const name_token = try p.expectToken(.Identifier); - const type_node = if (p.eatToken(.Colon) != null) - try p.expectNode(parseTypeExpr, .{ - .ExpectedTypeExpr = .{ .token = p.tok_i }, - }) - else - null; + const type_node: Node.Index = if (p.eatToken(.Colon) == null) 0 else try p.expectTypeExpr(); const align_node = try p.parseByteAlign(); const section_node = try p.parseLinkSection(); - const eq_token = p.eatToken(.Equal); - const init_node = if (eq_token != null) blk: { - break :blk try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, + const init_node: Node.Index = if (p.eatToken(.Equal) == null) 0 else try p.expectExpr(); + if (section_node == 0) { + if (align_node == 0) { + return p.addNode(.{ + .tag = .SimpleVarDecl, + .main_token = mut_token, + .data = .{ + .lhs = type_node, + .rhs = init_node, + }, + }); + } else if (type_node == 0) { + return p.addNode(.{ + .tag = .AlignedVarDecl, + .main_token = mut_token, + .data = .{ + .lhs = align_node, + .rhs = init_node, + }, + }); + } else { + return p.addNode(.{ + .tag = .LocalVarDecl, + .main_token = mut_token, + .data = .{ + .lhs = try p.addExtra(Node.LocalVarDecl{ + .type_node = type_node, + .align_node = align_node, + }), + .rhs = init_node, + }, + }); + } + } else { + return p.addNode(.{ + .tag = .GlobalVarDecl, + .main_token = mut_token, + .data = .{ + .lhs = try p.addExtra(Node.GlobalVarDecl{ + .type_node = type_node, + .align_node = align_node, + .section_node = section_node, + }), + .rhs = init_node, + }, }); - } else null; - const semicolon_token = try p.expectToken(.Semicolon); - - const doc_comments = fields.doc_comments orelse try p.parseAppendedDocComment(semicolon_token); - - const node = try Node.VarDecl.create(&p.arena.allocator, .{ - .mut_token = mut_token, - .name_token = name_token, - .semicolon_token = semicolon_token, - }, .{ - .doc_comments = doc_comments, - .visib_token = fields.visib_token, - .thread_local_token = fields.thread_local_token, - .eq_token = eq_token, - .comptime_token = fields.comptime_token, - .extern_export_token = fields.extern_export_token, - .lib_name = fields.lib_name, - .type_node = type_node, - .align_node = align_node, - .section_node = section_node, - .init_node = init_node, - }); - return &node.base; + } } /// ContainerField <- KEYWORD_comptime? IDENTIFIER (COLON TypeExpr ByteAlign?)? (EQUAL Expr)? - fn parseContainerField(p: *Parser) !?*Node { + fn parseContainerField(p: *Parser) !Node.Index { const comptime_token = p.eatToken(.Keyword_comptime); const name_token = p.eatToken(.Identifier) orelse { - if (comptime_token) |t| p.putBackToken(t); - return null; + if (comptime_token) |_| p.tok_i -= 1; + return null_node; }; - var align_expr: ?*Node = null; - var type_expr: ?*Node = null; + var align_expr: Node.Index = 0; + var type_expr: Node.Index = 0; if (p.eatToken(.Colon)) |_| { - if (p.eatToken(.Keyword_anytype) orelse p.eatToken(.Keyword_var)) |anytype_tok| { - const node = try p.arena.allocator.create(Node.OneToken); - node.* = .{ - .base = .{ .tag = .AnyType }, - .token = anytype_tok, - }; - type_expr = &node.base; - } else { - type_expr = try p.expectNode(parseTypeExpr, .{ - .ExpectedTypeExpr = .{ .token = p.tok_i }, + if (p.eatToken(.Keyword_anytype)) |anytype_tok| { + type_expr = try p.addNode(.{ + .tag = .AnyType, + .main_token = anytype_tok, + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, }); + } else { + type_expr = try p.expectTypeExpr(); align_expr = try p.parseByteAlign(); } } - const value_expr = if (p.eatToken(.Equal)) |_| - try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }) - else - null; + const value_expr: Node.Index = if (p.eatToken(.Equal) == null) 0 else try p.expectExpr(); - const node = try p.arena.allocator.create(Node.ContainerField); - node.* = .{ - .doc_comments = null, - .comptime_token = comptime_token, - .name_token = name_token, - .type_expr = type_expr, - .value_expr = value_expr, - .align_expr = align_expr, - }; - return &node.base; + if (align_expr == 0) { + return p.addNode(.{ + .tag = .ContainerFieldInit, + .main_token = name_token, + .data = .{ + .lhs = type_expr, + .rhs = value_expr, + }, + }); + } else if (value_expr == 0) { + return p.addNode(.{ + .tag = .ContainerFieldAlign, + .main_token = name_token, + .data = .{ + .lhs = type_expr, + .rhs = align_expr, + }, + }); + } else { + return p.addNode(.{ + .tag = .ContainerField, + .main_token = name_token, + .data = .{ + .lhs = type_expr, + .rhs = try p.addExtra(Node.ContainerField{ + .value_expr = value_expr, + .align_expr = align_expr, + }), + }, + }); + } } /// Statement @@ -687,391 +720,1017 @@ const Parser = struct { /// / LabeledStatement /// / SwitchExpr /// / AssignExpr SEMICOLON - fn parseStatement(p: *Parser) Error!?*Node { + fn parseStatement(p: *Parser) Error!Node.Index { const comptime_token = p.eatToken(.Keyword_comptime); - if (try p.parseVarDecl(.{ - .comptime_token = comptime_token, - })) |node| { - return node; + const var_decl = try p.parseVarDecl(); + if (var_decl != 0) { + _ = try p.expectTokenRecoverable(.Semicolon); + return var_decl; } if (comptime_token) |token| { - const block_expr = try p.expectNode(parseBlockExprStatement, .{ - .ExpectedBlockOrAssignment = .{ .token = p.tok_i }, + return p.addNode(.{ + .tag = .Comptime, + .main_token = token, + .data = .{ + .lhs = try p.expectBlockExprStatement(), + .rhs = undefined, + }, }); - - const node = try p.arena.allocator.create(Node.Comptime); - node.* = .{ - .doc_comments = null, - .comptime_token = token, - .expr = block_expr, - }; - return &node.base; } - if (p.eatToken(.Keyword_nosuspend)) |nosuspend_token| { - const block_expr = try p.expectNode(parseBlockExprStatement, .{ - .ExpectedBlockOrAssignment = .{ .token = p.tok_i }, - }); - - const node = try p.arena.allocator.create(Node.Nosuspend); - node.* = .{ - .nosuspend_token = nosuspend_token, - .expr = block_expr, - }; - return &node.base; - } - - if (p.eatToken(.Keyword_suspend)) |suspend_token| { - const semicolon = p.eatToken(.Semicolon); - - const body_node = if (semicolon == null) blk: { - break :blk try p.expectNode(parseBlockExprStatement, .{ - .ExpectedBlockOrExpression = .{ .token = p.tok_i }, + const token = p.nextToken(); + switch (p.token_tags[token]) { + .Keyword_nosuspend => { + return p.addNode(.{ + .tag = .Nosuspend, + .main_token = token, + .data = .{ + .lhs = try p.expectBlockExprStatement(), + .rhs = undefined, + }, }); - } else null; - - const node = try p.arena.allocator.create(Node.Suspend); - node.* = .{ - .suspend_token = suspend_token, - .body = body_node, - }; - return &node.base; + }, + .Keyword_suspend => { + const block_expr: Node.Index = if (p.eatToken(.Semicolon) != null) + 0 + else + try p.expectBlockExprStatement(); + return p.addNode(.{ + .tag = .Suspend, + .main_token = token, + .data = .{ + .lhs = block_expr, + .rhs = undefined, + }, + }); + }, + .Keyword_defer => return p.addNode(.{ + .tag = .Defer, + .main_token = token, + .data = .{ + .lhs = undefined, + .rhs = try p.expectBlockExprStatement(), + }, + }), + .Keyword_errdefer => return p.addNode(.{ + .tag = .ErrDefer, + .main_token = token, + .data = .{ + .lhs = try p.parsePayload(), + .rhs = try p.expectBlockExprStatement(), + }, + }), + else => p.tok_i -= 1, } - const defer_token = p.eatToken(.Keyword_defer) orelse p.eatToken(.Keyword_errdefer); - if (defer_token) |token| { - const payload = if (p.token_ids[token] == .Keyword_errdefer) - try p.parsePayload() - else - null; - const expr_node = try p.expectNode(parseBlockExprStatement, .{ - .ExpectedBlockOrExpression = .{ .token = p.tok_i }, - }); - const node = try p.arena.allocator.create(Node.Defer); - node.* = .{ - .defer_token = token, - .expr = expr_node, - .payload = payload, - }; - return &node.base; - } + const if_statement = try p.parseIfStatement(); + if (if_statement != 0) return if_statement; - if (try p.parseIfStatement()) |node| return node; - if (try p.parseLabeledStatement()) |node| return node; - if (try p.parseSwitchExpr()) |node| return node; - if (try p.parseAssignExpr()) |node| { + const labeled_statement = try p.parseLabeledStatement(); + if (labeled_statement != 0) return labeled_statement; + + const switch_expr = try p.parseSwitchExpr(); + if (switch_expr != 0) return switch_expr; + + const assign_expr = try p.parseAssignExpr(); + if (assign_expr != 0) { _ = try p.expectTokenRecoverable(.Semicolon); - return node; + return assign_expr; } - return null; + return null_node; + } + + fn expectStatement(p: *Parser) !Node.Index { + const statement = try p.parseStatement(); + if (statement == 0) { + return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); + } + return statement; } /// IfStatement /// <- IfPrefix BlockExpr ( KEYWORD_else Payload? Statement )? /// / IfPrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement ) - fn parseIfStatement(p: *Parser) !?*Node { - const if_node = (try p.parseIfPrefix()) orelse return null; - const if_prefix = if_node.cast(Node.If).?; + fn parseIfStatement(p: *Parser) !Node.Index { + const if_token = p.eatToken(.Keyword_if) orelse return null_node; + _ = try p.expectToken(.LParen); + const condition = try p.expectExpr(); + _ = try p.expectToken(.RParen); + const then_payload = try p.parsePtrPayload(); - const block_expr = (try p.parseBlockExpr()); - const assign_expr = if (block_expr == null) - try p.expectNode(parseAssignExpr, .{ - .ExpectedBlockOrAssignment = .{ .token = p.tok_i }, - }) - else - null; - - const semicolon = if (assign_expr != null) p.eatToken(.Semicolon) else null; - - const else_node = if (semicolon == null) blk: { - const else_token = p.eatToken(.Keyword_else) orelse break :blk null; - const payload = try p.parsePayload(); - const else_body = try p.expectNode(parseStatement, .{ - .InvalidToken = .{ .token = p.tok_i }, - }); - - const node = try p.arena.allocator.create(Node.Else); - node.* = .{ - .else_token = else_token, - .payload = payload, - .body = else_body, - }; - - break :blk node; - } else null; - - if (block_expr) |body| { - if_prefix.body = body; - if_prefix.@"else" = else_node; - return if_node; - } - - if (assign_expr) |body| { - if_prefix.body = body; - if (semicolon != null) return if_node; - if (else_node != null) { - if_prefix.@"else" = else_node; - return if_node; + // TODO propose to change the syntax so that semicolons are always required + // inside if statements, even if there is an `else`. + var else_required = false; + const then_expr = blk: { + const block_expr = try p.parseBlockExpr(); + if (block_expr != 0) break :blk block_expr; + const assign_expr = try p.parseAssignExpr(); + if (assign_expr == 0) { + return p.fail(.{ .ExpectedBlockOrAssignment = .{ .token = p.tok_i } }); } - try p.errors.append(p.gpa, .{ - .ExpectedSemiOrElse = .{ .token = p.tok_i }, + if (p.eatToken(.Semicolon)) |_| { + return p.addNode(.{ + .tag = if (then_payload == 0) .IfSimple else .IfSimpleOptional, + .main_token = if_token, + .data = .{ + .lhs = condition, + .rhs = assign_expr, + }, + }); + } + else_required = true; + break :blk assign_expr; + }; + const else_token = p.eatToken(.Keyword_else) orelse { + if (else_required) { + return p.fail(.{ .ExpectedSemiOrElse = .{ .token = p.tok_i } }); + } + return p.addNode(.{ + .tag = if (then_payload == 0) .IfSimple else .IfSimpleOptional, + .main_token = if_token, + .data = .{ + .lhs = condition, + .rhs = then_expr, + }, }); - } - - return if_node; + }; + const else_payload = try p.parsePayload(); + const else_expr = try p.expectStatement(); + const tag = if (else_payload != 0) + Node.Tag.IfError + else if (then_payload != 0) + Node.Tag.IfOptional + else + Node.Tag.If; + return p.addNode(.{ + .tag = tag, + .main_token = if_token, + .data = .{ + .lhs = condition, + .rhs = try p.addExtra(Node.If{ + .then_expr = then_expr, + .else_expr = else_expr, + }), + }, + }); } /// LabeledStatement <- BlockLabel? (Block / LoopStatement) - fn parseLabeledStatement(p: *Parser) !?*Node { - var colon: TokenIndex = undefined; - const label_token = p.parseBlockLabel(&colon); + fn parseLabeledStatement(p: *Parser) !Node.Index { + const label_token = p.parseBlockLabel(); + const block = try p.parseBlock(); + if (block != 0) return block; - if (try p.parseBlock(label_token)) |node| return node; + const loop_stmt = try p.parseLoopStatement(); + if (loop_stmt != 0) return loop_stmt; - if (try p.parseLoopStatement()) |node| { - if (node.cast(Node.For)) |for_node| { - for_node.label = label_token; - } else if (node.cast(Node.While)) |while_node| { - while_node.label = label_token; - } else unreachable; - return node; + if (label_token != 0) { + return p.fail(.{ .ExpectedLabelable = .{ .token = p.tok_i } }); } - if (label_token != null) { - try p.errors.append(p.gpa, .{ - .ExpectedLabelable = .{ .token = p.tok_i }, - }); - return error.ParseError; - } - - return null; + return null_node; } /// LoopStatement <- KEYWORD_inline? (ForStatement / WhileStatement) - fn parseLoopStatement(p: *Parser) !?*Node { + fn parseLoopStatement(p: *Parser) !Node.Index { const inline_token = p.eatToken(.Keyword_inline); - if (try p.parseForStatement()) |node| { - node.cast(Node.For).?.inline_token = inline_token; - return node; - } + const for_statement = try p.parseForStatement(); + if (for_statement != 0) return for_statement; - if (try p.parseWhileStatement()) |node| { - node.cast(Node.While).?.inline_token = inline_token; - return node; - } - if (inline_token == null) return null; + const while_statement = try p.parseWhileStatement(); + if (while_statement != 0) return while_statement; + + if (inline_token == null) return null_node; // If we've seen "inline", there should have been a "for" or "while" - try p.errors.append(p.gpa, .{ - .ExpectedInlinable = .{ .token = p.tok_i }, - }); - return error.ParseError; + return p.fail(.{ .ExpectedInlinable = .{ .token = p.tok_i } }); } + /// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload /// ForStatement /// <- ForPrefix BlockExpr ( KEYWORD_else Statement )? /// / ForPrefix AssignExpr ( SEMICOLON / KEYWORD_else Statement ) - fn parseForStatement(p: *Parser) !?*Node { - const node = (try p.parseForPrefix()) orelse return null; - const for_prefix = node.cast(Node.For).?; + fn parseForStatement(p: *Parser) !Node.Index { + const for_token = p.eatToken(.Keyword_for) orelse return null_node; + _ = try p.expectToken(.LParen); + const array_expr = try p.expectExpr(); + _ = try p.expectToken(.RParen); + _ = try p.parsePtrIndexPayload(); - if (try p.parseBlockExpr()) |block_expr_node| { - for_prefix.body = block_expr_node; - - if (p.eatToken(.Keyword_else)) |else_token| { - const statement_node = try p.expectNode(parseStatement, .{ - .InvalidToken = .{ .token = p.tok_i }, - }); - - const else_node = try p.arena.allocator.create(Node.Else); - else_node.* = .{ - .else_token = else_token, - .payload = null, - .body = statement_node, - }; - for_prefix.@"else" = else_node; - - return node; + // TODO propose to change the syntax so that semicolons are always required + // inside while statements, even if there is an `else`. + var else_required = false; + const then_expr = blk: { + const block_expr = try p.parseBlockExpr(); + if (block_expr != 0) break :blk block_expr; + const assign_expr = try p.parseAssignExpr(); + if (assign_expr == 0) { + return p.fail(.{ .ExpectedBlockOrAssignment = .{ .token = p.tok_i } }); } - - return node; - } - - for_prefix.body = try p.expectNode(parseAssignExpr, .{ - .ExpectedBlockOrAssignment = .{ .token = p.tok_i }, - }); - - if (p.eatToken(.Semicolon) != null) return node; - - if (p.eatToken(.Keyword_else)) |else_token| { - const statement_node = try p.expectNode(parseStatement, .{ - .ExpectedStatement = .{ .token = p.tok_i }, + if (p.eatToken(.Semicolon)) |_| { + return p.addNode(.{ + .tag = .ForSimple, + .main_token = for_token, + .data = .{ + .lhs = array_expr, + .rhs = assign_expr, + }, + }); + } + else_required = true; + break :blk assign_expr; + }; + const else_token = p.eatToken(.Keyword_else) orelse { + if (else_required) { + return p.fail(.{ .ExpectedSemiOrElse = .{ .token = p.tok_i } }); + } + return p.addNode(.{ + .tag = .ForSimple, + .main_token = for_token, + .data = .{ + .lhs = array_expr, + .rhs = then_expr, + }, }); - - const else_node = try p.arena.allocator.create(Node.Else); - else_node.* = .{ - .else_token = else_token, - .payload = null, - .body = statement_node, - }; - for_prefix.@"else" = else_node; - return node; - } - - try p.errors.append(p.gpa, .{ - .ExpectedSemiOrElse = .{ .token = p.tok_i }, + }; + return p.addNode(.{ + .tag = .For, + .main_token = for_token, + .data = .{ + .lhs = array_expr, + .rhs = try p.addExtra(Node.If{ + .then_expr = then_expr, + .else_expr = try p.expectStatement(), + }), + }, }); - - return node; } + /// WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr? /// WhileStatement /// <- WhilePrefix BlockExpr ( KEYWORD_else Payload? Statement )? /// / WhilePrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement ) - fn parseWhileStatement(p: *Parser) !?*Node { - const node = (try p.parseWhilePrefix()) orelse return null; - const while_prefix = node.cast(Node.While).?; + fn parseWhileStatement(p: *Parser) !Node.Index { + const while_token = p.eatToken(.Keyword_while) orelse return null_node; + _ = try p.expectToken(.LParen); + const condition = try p.expectExpr(); + _ = try p.expectToken(.RParen); + const then_payload = try p.parsePtrPayload(); + const continue_expr = try p.parseWhileContinueExpr(); - if (try p.parseBlockExpr()) |block_expr_node| { - while_prefix.body = block_expr_node; - - if (p.eatToken(.Keyword_else)) |else_token| { - const payload = try p.parsePayload(); - - const statement_node = try p.expectNode(parseStatement, .{ - .InvalidToken = .{ .token = p.tok_i }, - }); - - const else_node = try p.arena.allocator.create(Node.Else); - else_node.* = .{ - .else_token = else_token, - .payload = payload, - .body = statement_node, - }; - while_prefix.@"else" = else_node; - - return node; + // TODO propose to change the syntax so that semicolons are always required + // inside while statements, even if there is an `else`. + var else_required = false; + const then_expr = blk: { + const block_expr = try p.parseBlockExpr(); + if (block_expr != 0) break :blk block_expr; + const assign_expr = try p.parseAssignExpr(); + if (assign_expr == 0) { + return p.fail(.{ .ExpectedBlockOrAssignment = .{ .token = p.tok_i } }); } - - return node; - } - - while_prefix.body = try p.expectNode(parseAssignExpr, .{ - .ExpectedBlockOrAssignment = .{ .token = p.tok_i }, + if (p.eatToken(.Semicolon)) |_| { + if (continue_expr == 0) { + return p.addNode(.{ + .tag = if (then_payload == 0) .WhileSimple else .WhileSimpleOptional, + .main_token = while_token, + .data = .{ + .lhs = condition, + .rhs = assign_expr, + }, + }); + } else { + return p.addNode(.{ + .tag = if (then_payload == 0) .WhileCont else .WhileContOptional, + .main_token = while_token, + .data = .{ + .lhs = condition, + .rhs = try p.addExtra(Node.WhileCont{ + .continue_expr = continue_expr, + .then_expr = assign_expr, + }), + }, + }); + } + } + else_required = true; + break :blk assign_expr; + }; + const else_token = p.eatToken(.Keyword_else) orelse { + if (else_required) { + return p.fail(.{ .ExpectedSemiOrElse = .{ .token = p.tok_i } }); + } + if (continue_expr == 0) { + return p.addNode(.{ + .tag = if (then_payload == 0) .WhileSimple else .WhileSimpleOptional, + .main_token = while_token, + .data = .{ + .lhs = condition, + .rhs = then_expr, + }, + }); + } else { + return p.addNode(.{ + .tag = if (then_payload == 0) .WhileCont else .WhileContOptional, + .main_token = while_token, + .data = .{ + .lhs = condition, + .rhs = try p.addExtra(Node.WhileCont{ + .continue_expr = continue_expr, + .then_expr = then_expr, + }), + }, + }); + } + }; + const else_payload = try p.parsePayload(); + const else_expr = try p.expectStatement(); + const tag = if (else_payload != 0) + Node.Tag.WhileError + else if (then_payload != 0) + Node.Tag.WhileOptional + else + Node.Tag.While; + return p.addNode(.{ + .tag = tag, + .main_token = while_token, + .data = .{ + .lhs = condition, + .rhs = try p.addExtra(Node.While{ + .continue_expr = continue_expr, + .then_expr = then_expr, + .else_expr = else_expr, + }), + }, }); - - if (p.eatToken(.Semicolon) != null) return node; - - if (p.eatToken(.Keyword_else)) |else_token| { - const payload = try p.parsePayload(); - - const statement_node = try p.expectNode(parseStatement, .{ - .ExpectedStatement = .{ .token = p.tok_i }, - }); - - const else_node = try p.arena.allocator.create(Node.Else); - else_node.* = .{ - .else_token = else_token, - .payload = payload, - .body = statement_node, - }; - while_prefix.@"else" = else_node; - return node; - } - - try p.errors.append(p.gpa, .{ - .ExpectedSemiOrElse = .{ .token = p.tok_i }, - }); - - return node; } /// BlockExprStatement /// <- BlockExpr /// / AssignExpr SEMICOLON - fn parseBlockExprStatement(p: *Parser) !?*Node { - if (try p.parseBlockExpr()) |node| return node; - if (try p.parseAssignExpr()) |node| { - _ = try p.expectTokenRecoverable(.Semicolon); - return node; + fn parseBlockExprStatement(p: *Parser) !Node.Index { + const block_expr = try p.parseBlockExpr(); + if (block_expr != 0) { + return block_expr; } - return null; + const assign_expr = try p.parseAssignExpr(); + if (assign_expr != 0) { + _ = try p.expectTokenRecoverable(.Semicolon); + return assign_expr; + } + return null_node; + } + + fn expectBlockExprStatement(p: *Parser) !Node.Index { + const node = try p.parseBlockExprStatement(); + if (node == 0) { + return p.fail(.{ .ExpectedBlockOrExpression = .{ .token = p.tok_i } }); + } + return node; } /// BlockExpr <- BlockLabel? Block - fn parseBlockExpr(p: *Parser) Error!?*Node { - var colon: TokenIndex = undefined; - const label_token = p.parseBlockLabel(&colon); - const block_node = (try p.parseBlock(label_token)) orelse { - if (label_token) |label| { - p.putBackToken(label + 1); // ":" - p.putBackToken(label); // IDENTIFIER - } - return null; - }; - return block_node; + fn parseBlockExpr(p: *Parser) Error!Node.Index { + switch (p.token_tags[p.tok_i]) { + .Identifier => { + if (p.token_tags[p.tok_i + 1] == .Colon and + p.token_tags[p.tok_i + 2] == .LBrace) + { + p.tok_i += 2; + return p.parseBlock(); + } else { + return null_node; + } + }, + .LBrace => return p.parseBlock(), + else => return null_node, + } } /// AssignExpr <- Expr (AssignOp Expr)? - fn parseAssignExpr(p: *Parser) !?*Node { - return p.parseBinOpExpr(parseAssignOp, parseExpr, .Once); + /// AssignOp + /// <- ASTERISKEQUAL + /// / SLASHEQUAL + /// / PERCENTEQUAL + /// / PLUSEQUAL + /// / MINUSEQUAL + /// / LARROW2EQUAL + /// / RARROW2EQUAL + /// / AMPERSANDEQUAL + /// / CARETEQUAL + /// / PIPEEQUAL + /// / ASTERISKPERCENTEQUAL + /// / PLUSPERCENTEQUAL + /// / MINUSPERCENTEQUAL + /// / EQUAL + fn parseAssignExpr(p: *Parser) !Node.Index { + const expr = try p.parseExpr(); + if (expr == 0) return null_node; + + const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { + .AsteriskEqual => .AssignMul, + .SlashEqual => .AssignDiv, + .PercentEqual => .AssignMod, + .PlusEqual => .AssignAdd, + .MinusEqual => .AssignSub, + .AngleBracketAngleBracketLeftEqual => .AssignBitShiftLeft, + .AngleBracketAngleBracketRightEqual => .AssignBitShiftRight, + .AmpersandEqual => .AssignBitAnd, + .CaretEqual => .AssignBitXor, + .PipeEqual => .AssignBitOr, + .AsteriskPercentEqual => .AssignMulWrap, + .PlusPercentEqual => .AssignAddWrap, + .MinusPercentEqual => .AssignSubWrap, + .Equal => .Assign, + else => return expr, + }; + return p.addNode(.{ + .tag = tag, + .main_token = p.nextToken(), + .data = .{ + .lhs = expr, + .rhs = try p.expectExpr(), + }, + }); + } + + fn expectAssignExpr(p: *Parser) !Node.Index { + const expr = try p.parseAssignExpr(); + if (expr == 0) { + return p.fail(.{ .ExpectedExprOrAssignment = .{ .token = p.tok_i } }); + } + return expr; } /// Expr <- BoolOrExpr - fn parseExpr(p: *Parser) Error!?*Node { - return p.parsePrefixOpExpr(parseTry, parseBoolOrExpr); + fn parseExpr(p: *Parser) Error!Node.Index { + return p.parseBoolOrExpr(); + } + + fn expectExpr(p: *Parser) Error!Node.Index { + const node = try p.parseExpr(); + if (node == 0) { + return p.fail(.{ .ExpectedExpr = .{ .token = p.tok_i } }); + } else { + return node; + } } /// BoolOrExpr <- BoolAndExpr (KEYWORD_or BoolAndExpr)* - fn parseBoolOrExpr(p: *Parser) !?*Node { - return p.parseBinOpExpr( - SimpleBinOpParseFn(.Keyword_or, .BoolOr), - parseBoolAndExpr, - .Infinitely, - ); + fn parseBoolOrExpr(p: *Parser) Error!Node.Index { + var res = try p.parseBoolAndExpr(); + if (res == 0) return null_node; + + while (true) { + switch (p.token_tags[p.tok_i]) { + .Keyword_or => { + const or_token = p.nextToken(); + const rhs = try p.parseBoolAndExpr(); + if (rhs == 0) { + return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); + } + res = try p.addNode(.{ + .tag = .BoolOr, + .main_token = or_token, + .data = .{ + .lhs = res, + .rhs = rhs, + }, + }); + }, + else => return res, + } + } } /// BoolAndExpr <- CompareExpr (KEYWORD_and CompareExpr)* - fn parseBoolAndExpr(p: *Parser) !?*Node { - return p.parseBinOpExpr( - SimpleBinOpParseFn(.Keyword_and, .BoolAnd), - parseCompareExpr, - .Infinitely, - ); + fn parseBoolAndExpr(p: *Parser) !Node.Index { + var res = try p.parseCompareExpr(); + if (res == 0) return null_node; + + while (true) { + switch (p.token_tags[p.tok_i]) { + .Keyword_and => { + const and_token = p.nextToken(); + const rhs = try p.parseCompareExpr(); + if (rhs == 0) { + return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); + } + res = try p.addNode(.{ + .tag = .BoolAnd, + .main_token = and_token, + .data = .{ + .lhs = res, + .rhs = rhs, + }, + }); + }, + else => return res, + } + } } /// CompareExpr <- BitwiseExpr (CompareOp BitwiseExpr)? - fn parseCompareExpr(p: *Parser) !?*Node { - return p.parseBinOpExpr(parseCompareOp, parseBitwiseExpr, .Once); + /// CompareOp + /// <- EQUALEQUAL + /// / EXCLAMATIONMARKEQUAL + /// / LARROW + /// / RARROW + /// / LARROWEQUAL + /// / RARROWEQUAL + fn parseCompareExpr(p: *Parser) !Node.Index { + const expr = try p.parseBitwiseExpr(); + if (expr == 0) return null_node; + + const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { + .EqualEqual => .EqualEqual, + .BangEqual => .BangEqual, + .AngleBracketLeft => .LessThan, + .AngleBracketRight => .GreaterThan, + .AngleBracketLeftEqual => .LessOrEqual, + .AngleBracketRightEqual => .GreaterOrEqual, + else => return expr, + }; + return p.addNode(.{ + .tag = tag, + .main_token = p.nextToken(), + .data = .{ + .lhs = expr, + .rhs = try p.expectBitwiseExpr(), + }, + }); } /// BitwiseExpr <- BitShiftExpr (BitwiseOp BitShiftExpr)* - fn parseBitwiseExpr(p: *Parser) !?*Node { - return p.parseBinOpExpr(parseBitwiseOp, parseBitShiftExpr, .Infinitely); + /// BitwiseOp + /// <- AMPERSAND + /// / CARET + /// / PIPE + /// / KEYWORD_orelse + /// / KEYWORD_catch Payload? + fn parseBitwiseExpr(p: *Parser) !Node.Index { + var res = try p.parseBitShiftExpr(); + if (res == 0) return null_node; + + while (true) { + const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { + .Ampersand => .BitAnd, + .Caret => .BitXor, + .Pipe => .BitOr, + .Keyword_orelse => .OrElse, + .Keyword_catch => { + const catch_token = p.nextToken(); + _ = try p.parsePayload(); + const rhs = try p.parseBitShiftExpr(); + if (rhs == 0) { + return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); + } + res = try p.addNode(.{ + .tag = .Catch, + .main_token = catch_token, + .data = .{ + .lhs = res, + .rhs = rhs, + }, + }); + continue; + }, + else => return res, + }; + res = try p.addNode(.{ + .tag = tag, + .main_token = p.nextToken(), + .data = .{ + .lhs = res, + .rhs = try p.expectBitShiftExpr(), + }, + }); + } + } + + fn expectBitwiseExpr(p: *Parser) Error!Node.Index { + const node = try p.parseBitwiseExpr(); + if (node == 0) { + return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); + } else { + return node; + } } /// BitShiftExpr <- AdditionExpr (BitShiftOp AdditionExpr)* - fn parseBitShiftExpr(p: *Parser) !?*Node { - return p.parseBinOpExpr(parseBitShiftOp, parseAdditionExpr, .Infinitely); + /// BitShiftOp + /// <- LARROW2 + /// / RARROW2 + fn parseBitShiftExpr(p: *Parser) Error!Node.Index { + var res = try p.parseAdditionExpr(); + if (res == 0) return null_node; + + while (true) { + const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { + .AngleBracketAngleBracketLeft => .BitShiftLeft, + .AngleBracketAngleBracketRight => .BitShiftRight, + else => return res, + }; + res = try p.addNode(.{ + .tag = tag, + .main_token = p.nextToken(), + .data = .{ + .lhs = res, + .rhs = try p.expectAdditionExpr(), + }, + }); + } + } + + fn expectBitShiftExpr(p: *Parser) Error!Node.Index { + const node = try p.parseBitShiftExpr(); + if (node == 0) { + return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); + } else { + return node; + } } /// AdditionExpr <- MultiplyExpr (AdditionOp MultiplyExpr)* - fn parseAdditionExpr(p: *Parser) !?*Node { - return p.parseBinOpExpr(parseAdditionOp, parseMultiplyExpr, .Infinitely); + /// AdditionOp + /// <- PLUS + /// / MINUS + /// / PLUS2 + /// / PLUSPERCENT + /// / MINUSPERCENT + fn parseAdditionExpr(p: *Parser) Error!Node.Index { + var res = try p.parseMultiplyExpr(); + if (res == 0) return null_node; + + while (true) { + const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { + .Plus => .Add, + .Minus => .Sub, + .PlusPlus => .ArrayCat, + .PlusPercent => .AddWrap, + .MinusPercent => .SubWrap, + else => return res, + }; + res = try p.addNode(.{ + .tag = tag, + .main_token = p.nextToken(), + .data = .{ + .lhs = res, + .rhs = try p.expectMultiplyExpr(), + }, + }); + } + } + + fn expectAdditionExpr(p: *Parser) Error!Node.Index { + const node = try p.parseAdditionExpr(); + if (node == 0) { + return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); + } + return node; } /// MultiplyExpr <- PrefixExpr (MultiplyOp PrefixExpr)* - fn parseMultiplyExpr(p: *Parser) !?*Node { - return p.parseBinOpExpr(parseMultiplyOp, parsePrefixExpr, .Infinitely); + /// MultiplyOp + /// <- PIPE2 + /// / ASTERISK + /// / SLASH + /// / PERCENT + /// / ASTERISK2 + /// / ASTERISKPERCENT + fn parseMultiplyExpr(p: *Parser) Error!Node.Index { + var res = try p.parsePrefixExpr(); + if (res == 0) return null_node; + + while (true) { + const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { + .PipePipe => .MergeErrorSets, + .Asterisk => .Mul, + .Slash => .Div, + .Percent => .Mod, + .AsteriskAsterisk => .ArrayMult, + .AsteriskPercent => .MulWrap, + else => return res, + }; + res = try p.addNode(.{ + .tag = tag, + .main_token = p.nextToken(), + .data = .{ + .lhs = res, + .rhs = try p.expectPrefixExpr(), + }, + }); + } + } + + fn expectMultiplyExpr(p: *Parser) Error!Node.Index { + const node = try p.parseMultiplyExpr(); + if (node == 0) { + return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); + } + return node; } /// PrefixExpr <- PrefixOp* PrimaryExpr - fn parsePrefixExpr(p: *Parser) !?*Node { - return p.parsePrefixOpExpr(parsePrefixOp, parsePrimaryExpr); + /// PrefixOp + /// <- EXCLAMATIONMARK + /// / MINUS + /// / TILDE + /// / MINUSPERCENT + /// / AMPERSAND + /// / KEYWORD_try + /// / KEYWORD_await + fn parsePrefixExpr(p: *Parser) Error!Node.Index { + const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { + .Bang => .BoolNot, + .Minus => .Negation, + .Tilde => .BitNot, + .MinusPercent => .NegationWrap, + .Ampersand => .AddressOf, + .Keyword_try => .Try, + .Keyword_await => .Await, + else => return p.parsePrimaryExpr(), + }; + return p.addNode(.{ + .tag = tag, + .main_token = p.nextToken(), + .data = .{ + .lhs = try p.expectPrefixExpr(), + .rhs = undefined, + }, + }); + } + + fn expectPrefixExpr(p: *Parser) Error!Node.Index { + const node = try p.parsePrefixExpr(); + if (node == 0) { + return p.fail(.{ .ExpectedPrefixExpr = .{ .token = p.tok_i } }); + } + return node; + } + + /// TypeExpr <- PrefixTypeOp* ErrorUnionExpr + /// PrefixTypeOp + /// <- QUESTIONMARK + /// / KEYWORD_anyframe MINUSRARROW + /// / ArrayTypeStart (ByteAlign / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)* + /// / PtrTypeStart (KEYWORD_align LPAREN Expr (COLON INTEGER COLON INTEGER)? RPAREN / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)* + /// PtrTypeStart + /// <- ASTERISK + /// / ASTERISK2 + /// / LBRACKET ASTERISK (LETTERC / COLON Expr)? RBRACKET + /// ArrayTypeStart <- LBRACKET Expr? (COLON Expr)? RBRACKET + fn parseTypeExpr(p: *Parser) Error!Node.Index { + switch (p.token_tags[p.tok_i]) { + .QuestionMark => return p.addNode(.{ + .tag = .OptionalType, + .main_token = p.nextToken(), + .data = .{ + .lhs = try p.expectTypeExpr(), + .rhs = undefined, + }, + }), + .Keyword_anyframe => switch (p.token_tags[p.tok_i + 1]) { + .Arrow => return p.addNode(.{ + .tag = .AnyFrameType, + .main_token = p.nextToken(), + .data = .{ + .lhs = p.nextToken(), + .rhs = try p.expectTypeExpr(), + }, + }), + else => return p.parseErrorUnionExpr(), + }, + .Asterisk => { + const asterisk = p.nextToken(); + const mods = try p.parsePtrModifiers(); + const elem_type = try p.expectTypeExpr(); + if (mods.bit_range_start == 0) { + return p.addNode(.{ + .tag = .PtrTypeAligned, + .main_token = asterisk, + .data = .{ + .lhs = mods.align_node, + .rhs = elem_type, + }, + }); + } else { + return p.addNode(.{ + .tag = .PtrType, + .main_token = asterisk, + .data = .{ + .lhs = try p.addExtra(Node.PtrType{ + .sentinel = 0, + .align_node = mods.align_node, + .bit_range_start = mods.bit_range_start, + .bit_range_end = mods.bit_range_end, + }), + .rhs = elem_type, + }, + }); + } + }, + .AsteriskAsterisk => { + const asterisk = p.nextToken(); + const mods = try p.parsePtrModifiers(); + const elem_type = try p.expectTypeExpr(); + const inner: Node.Index = inner: { + if (mods.bit_range_start == 0) { + break :inner try p.addNode(.{ + .tag = .PtrTypeAligned, + .main_token = asterisk, + .data = .{ + .lhs = mods.align_node, + .rhs = elem_type, + }, + }); + } else { + break :inner try p.addNode(.{ + .tag = .PtrType, + .main_token = asterisk, + .data = .{ + .lhs = try p.addExtra(Node.PtrType{ + .sentinel = 0, + .align_node = mods.align_node, + .bit_range_start = mods.bit_range_start, + .bit_range_end = mods.bit_range_end, + }), + .rhs = elem_type, + }, + }); + } + }; + return p.addNode(.{ + .tag = .PtrTypeAligned, + .main_token = asterisk, + .data = .{ + .lhs = 0, + .rhs = inner, + }, + }); + }, + .LBracket => switch (p.token_tags[p.tok_i + 1]) { + .Asterisk => { + const lbracket = p.nextToken(); + const asterisk = p.nextToken(); + var sentinel: Node.Index = 0; + prefix: { + if (p.eatToken(.Identifier)) |ident| { + const token_slice = p.source[p.token_starts[ident]..][0..2]; + if (!std.mem.eql(u8, token_slice, "c]")) { + p.tok_i -= 1; + } else { + break :prefix; + } + } + if (p.eatToken(.Colon)) |_| { + sentinel = try p.expectExpr(); + } + } + _ = try p.expectToken(.RBracket); + const mods = try p.parsePtrModifiers(); + const elem_type = try p.expectTypeExpr(); + if (mods.bit_range_start == 0) { + if (sentinel == 0) { + return p.addNode(.{ + .tag = .PtrTypeAligned, + .main_token = asterisk, + .data = .{ + .lhs = mods.align_node, + .rhs = elem_type, + }, + }); + } else if (mods.align_node == 0) { + return p.addNode(.{ + .tag = .PtrTypeSentinel, + .main_token = asterisk, + .data = .{ + .lhs = sentinel, + .rhs = elem_type, + }, + }); + } else { + return p.addNode(.{ + .tag = .SliceType, + .main_token = asterisk, + .data = .{ + .lhs = try p.addExtra(.{ + .sentinel = sentinel, + .align_node = mods.align_node, + }), + .rhs = elem_type, + }, + }); + } + } else { + return p.addNode(.{ + .tag = .PtrType, + .main_token = asterisk, + .data = .{ + .lhs = try p.addExtra(.{ + .sentinel = sentinel, + .align_node = mods.align_node, + .bit_range_start = mods.bit_range_start, + .bit_range_end = mods.bit_range_end, + }), + .rhs = elem_type, + }, + }); + } + }, + else => { + const lbracket = p.nextToken(); + const len_expr = try p.parseExpr(); + const sentinel: Node.Index = if (p.eatToken(.Colon)) |_| + try p.expectExpr() + else + 0; + _ = try p.expectToken(.RBracket); + const mods = try p.parsePtrModifiers(); + const elem_type = try p.expectTypeExpr(); + if (mods.bit_range_start != 0) { + @panic("TODO implement this error"); + //try p.warn(.{ + // .BitRangeInvalid = .{ .node = mods.bit_range_start }, + //}); + } + if (len_expr == 0) { + if (sentinel == 0) { + return p.addNode(.{ + .tag = .PtrTypeAligned, + .main_token = lbracket, + .data = .{ + .lhs = mods.align_node, + .rhs = elem_type, + }, + }); + } else if (mods.align_node == 0) { + return p.addNode(.{ + .tag = .PtrTypeSentinel, + .main_token = lbracket, + .data = .{ + .lhs = sentinel, + .rhs = elem_type, + }, + }); + } else { + return p.addNode(.{ + .tag = .SliceType, + .main_token = lbracket, + .data = .{ + .lhs = try p.addExtra(.{ + .sentinel = sentinel, + .align_node = mods.align_node, + }), + .rhs = elem_type, + }, + }); + } + } else { + if (mods.align_node != 0) { + @panic("TODO implement this error"); + //try p.warn(.{ + // .AlignInvalid = .{ .node = mods.align_node }, + //}); + } + if (sentinel == 0) { + return p.addNode(.{ + .tag = .ArrayType, + .main_token = lbracket, + .data = .{ + .lhs = len_expr, + .rhs = elem_type, + }, + }); + } else { + return p.addNode(.{ + .tag = .ArrayTypeSentinel, + .main_token = lbracket, + .data = .{ + .lhs = len_expr, + .rhs = try p.addExtra(.{ + .elem_type = elem_type, + .sentinel = sentinel, + }), + }, + }); + } + } + }, + }, + else => return p.parseErrorUnionExpr(), + } + } + + fn expectTypeExpr(p: *Parser) Error!Node.Index { + const node = try p.parseTypeExpr(); + if (node == 0) { + return p.fail(.{ .ExpectedTypeExpr = .{ .token = p.tok_i } }); + } + return node; } /// PrimaryExpr @@ -1086,115 +1745,134 @@ const Parser = struct { /// / BlockLabel? LoopExpr /// / Block /// / CurlySuffixExpr - fn parsePrimaryExpr(p: *Parser) !?*Node { - if (try p.parseAsmExpr()) |node| return node; - if (try p.parseIfExpr()) |node| return node; - - if (p.eatToken(.Keyword_break)) |token| { - const label = try p.parseBreakLabel(); - const expr_node = try p.parseExpr(); - const node = try Node.ControlFlowExpression.create(&p.arena.allocator, .{ - .tag = .Break, - .ltoken = token, - }, .{ - .label = label, - .rhs = expr_node, - }); - return &node.base; + fn parsePrimaryExpr(p: *Parser) !Node.Index { + switch (p.token_tags[p.tok_i]) { + .Keyword_asm => return p.parseAsmExpr(), + .Keyword_if => return p.parseIfExpr(), + .Keyword_break => { + p.tok_i += 1; + return p.addNode(.{ + .tag = .Break, + .main_token = p.tok_i - 1, + .data = .{ + .lhs = try p.parseBreakLabel(), + .rhs = try p.parseExpr(), + }, + }); + }, + .Keyword_continue => { + p.tok_i += 1; + return p.addNode(.{ + .tag = .Continue, + .main_token = p.tok_i - 1, + .data = .{ + .lhs = try p.parseBreakLabel(), + .rhs = undefined, + }, + }); + }, + .Keyword_comptime => { + p.tok_i += 1; + return p.addNode(.{ + .tag = .Comptime, + .main_token = p.tok_i - 1, + .data = .{ + .lhs = try p.expectExpr(), + .rhs = undefined, + }, + }); + }, + .Keyword_nosuspend => { + p.tok_i += 1; + return p.addNode(.{ + .tag = .Nosuspend, + .main_token = p.tok_i - 1, + .data = .{ + .lhs = try p.expectExpr(), + .rhs = undefined, + }, + }); + }, + .Keyword_resume => { + p.tok_i += 1; + return p.addNode(.{ + .tag = .Resume, + .main_token = p.tok_i - 1, + .data = .{ + .lhs = try p.expectExpr(), + .rhs = undefined, + }, + }); + }, + .Keyword_return => { + p.tok_i += 1; + return p.addNode(.{ + .tag = .Return, + .main_token = p.tok_i - 1, + .data = .{ + .lhs = try p.parseExpr(), + .rhs = undefined, + }, + }); + }, + .Identifier => { + if (p.token_tags[p.tok_i + 1] == .Colon) { + switch (p.token_tags[p.tok_i + 2]) { + .Keyword_inline => { + p.tok_i += 3; + switch (p.token_tags[p.tok_i]) { + .Keyword_for => return p.parseForExpr(), + .Keyword_while => return p.parseWhileExpr(), + else => return p.fail(.{ + .ExpectedInlinable = .{ .token = p.tok_i }, + }), + } + }, + .Keyword_for => { + p.tok_i += 2; + return p.parseForExpr(); + }, + .Keyword_while => { + p.tok_i += 2; + return p.parseWhileExpr(); + }, + .LBrace => { + p.tok_i += 2; + return p.parseBlock(); + }, + else => return p.parseCurlySuffixExpr(), + } + } else { + return p.parseCurlySuffixExpr(); + } + }, + .Keyword_inline => { + p.tok_i += 2; + switch (p.token_tags[p.tok_i]) { + .Keyword_for => return p.parseForExpr(), + .Keyword_while => return p.parseWhileExpr(), + else => return p.fail(.{ + .ExpectedInlinable = .{ .token = p.tok_i }, + }), + } + }, + .Keyword_for => return p.parseForExpr(), + .Keyword_while => return p.parseWhileExpr(), + .LBrace => return p.parseBlock(), + else => return p.parseCurlySuffixExpr(), } - - if (p.eatToken(.Keyword_comptime)) |token| { - const expr_node = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); - const node = try p.arena.allocator.create(Node.Comptime); - node.* = .{ - .doc_comments = null, - .comptime_token = token, - .expr = expr_node, - }; - return &node.base; - } - - if (p.eatToken(.Keyword_nosuspend)) |token| { - const expr_node = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); - const node = try p.arena.allocator.create(Node.Nosuspend); - node.* = .{ - .nosuspend_token = token, - .expr = expr_node, - }; - return &node.base; - } - - if (p.eatToken(.Keyword_continue)) |token| { - const label = try p.parseBreakLabel(); - const node = try Node.ControlFlowExpression.create(&p.arena.allocator, .{ - .tag = .Continue, - .ltoken = token, - }, .{ - .label = label, - .rhs = null, - }); - return &node.base; - } - - if (p.eatToken(.Keyword_resume)) |token| { - const expr_node = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); - const node = try p.arena.allocator.create(Node.SimplePrefixOp); - node.* = .{ - .base = .{ .tag = .Resume }, - .op_token = token, - .rhs = expr_node, - }; - return &node.base; - } - - if (p.eatToken(.Keyword_return)) |token| { - const expr_node = try p.parseExpr(); - const node = try Node.ControlFlowExpression.create(&p.arena.allocator, .{ - .tag = .Return, - .ltoken = token, - }, .{ - .rhs = expr_node, - }); - return &node.base; - } - - var colon: TokenIndex = undefined; - const label = p.parseBlockLabel(&colon); - if (try p.parseLoopExpr()) |node| { - if (node.cast(Node.For)) |for_node| { - for_node.label = label; - } else if (node.cast(Node.While)) |while_node| { - while_node.label = label; - } else unreachable; - return node; - } - if (label) |token| { - p.putBackToken(token + 1); // ":" - p.putBackToken(token); // IDENTIFIER - } - - if (try p.parseBlock(null)) |node| return node; - if (try p.parseCurlySuffixExpr()) |node| return node; - - return null; } /// IfExpr <- IfPrefix Expr (KEYWORD_else Payload? Expr)? - fn parseIfExpr(p: *Parser) !?*Node { + fn parseIfExpr(p: *Parser) !Node.Index { return p.parseIf(parseExpr); } /// Block <- LBRACE Statement* RBRACE - fn parseBlock(p: *Parser, label_token: ?TokenIndex) !?*Node { - const lbrace = p.eatToken(.LBrace) orelse return null; + fn parseBlock(p: *Parser) !Node.Index { + const lbrace = p.eatToken(.LBrace) orelse return null_node; - var statements = std.ArrayList(*Node).init(p.gpa); + var statements = std.ArrayList(Node.Index).init(p.gpa); defer statements.deinit(); while (true) { @@ -1205,315 +1883,312 @@ const Parser = struct { p.findNextStmt(); continue; }, - }) orelse break; + }); + if (statement == 0) break; try statements.append(statement); } const rbrace = try p.expectToken(.RBrace); + const statements_span = try p.listToSpan(statements.items); - const statements_len = @intCast(NodeIndex, statements.items.len); - - if (label_token) |label| { - const block_node = try Node.LabeledBlock.alloc(&p.arena.allocator, statements_len); - block_node.* = .{ - .label = label, - .lbrace = lbrace, - .statements_len = statements_len, - .rbrace = rbrace, - }; - std.mem.copy(*Node, block_node.statements(), statements.items); - return &block_node.base; - } else { - const block_node = try Node.Block.alloc(&p.arena.allocator, statements_len); - block_node.* = .{ - .lbrace = lbrace, - .statements_len = statements_len, - .rbrace = rbrace, - }; - std.mem.copy(*Node, block_node.statements(), statements.items); - return &block_node.base; - } - } - - /// LoopExpr <- KEYWORD_inline? (ForExpr / WhileExpr) - fn parseLoopExpr(p: *Parser) !?*Node { - const inline_token = p.eatToken(.Keyword_inline); - - if (try p.parseForExpr()) |node| { - node.cast(Node.For).?.inline_token = inline_token; - return node; - } - - if (try p.parseWhileExpr()) |node| { - node.cast(Node.While).?.inline_token = inline_token; - return node; - } - - if (inline_token == null) return null; - - // If we've seen "inline", there should have been a "for" or "while" - try p.errors.append(p.gpa, .{ - .ExpectedInlinable = .{ .token = p.tok_i }, + return p.addNode(.{ + .tag = .Block, + .main_token = lbrace, + .data = .{ + .lhs = statements_span.start, + .rhs = statements_span.end, + }, }); - return error.ParseError; } + /// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload /// ForExpr <- ForPrefix Expr (KEYWORD_else Expr)? - fn parseForExpr(p: *Parser) !?*Node { - const node = (try p.parseForPrefix()) orelse return null; - const for_prefix = node.cast(Node.For).?; + fn parseForExpr(p: *Parser) !Node.Index { + const for_token = p.eatToken(.Keyword_for) orelse return null_node; + _ = try p.expectToken(.LParen); + const array_expr = try p.expectExpr(); + _ = try p.expectToken(.RParen); + _ = try p.parsePtrIndexPayload(); - const body_node = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); - for_prefix.body = body_node; - - if (p.eatToken(.Keyword_else)) |else_token| { - const body = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, + const then_expr = try p.expectExpr(); + const else_token = p.eatToken(.Keyword_else) orelse { + return p.addNode(.{ + .tag = .ForSimple, + .main_token = for_token, + .data = .{ + .lhs = array_expr, + .rhs = then_expr, + }, }); - - const else_node = try p.arena.allocator.create(Node.Else); - else_node.* = .{ - .else_token = else_token, - .payload = null, - .body = body, - }; - - for_prefix.@"else" = else_node; - } - - return node; + }; + const else_expr = try p.expectExpr(); + return p.addNode(.{ + .tag = .For, + .main_token = for_token, + .data = .{ + .lhs = array_expr, + .rhs = try p.addExtra(Node.If{ + .then_expr = then_expr, + .else_expr = else_expr, + }), + }, + }); } + /// WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr? /// WhileExpr <- WhilePrefix Expr (KEYWORD_else Payload? Expr)? - fn parseWhileExpr(p: *Parser) !?*Node { - const node = (try p.parseWhilePrefix()) orelse return null; - const while_prefix = node.cast(Node.While).?; + fn parseWhileExpr(p: *Parser) !Node.Index { + const while_token = p.eatToken(.Keyword_while) orelse return null_node; + _ = try p.expectToken(.LParen); + const condition = try p.expectExpr(); + _ = try p.expectToken(.RParen); + const then_payload = try p.parsePtrPayload(); + const continue_expr = try p.parseWhileContinueExpr(); - const body_node = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, + const then_expr = try p.expectExpr(); + const else_token = p.eatToken(.Keyword_else) orelse { + if (continue_expr == 0) { + return p.addNode(.{ + .tag = if (then_payload == 0) .WhileSimple else .WhileSimpleOptional, + .main_token = while_token, + .data = .{ + .lhs = condition, + .rhs = then_expr, + }, + }); + } else { + return p.addNode(.{ + .tag = if (then_payload == 0) .WhileCont else .WhileContOptional, + .main_token = while_token, + .data = .{ + .lhs = condition, + .rhs = try p.addExtra(Node.WhileCont{ + .continue_expr = continue_expr, + .then_expr = then_expr, + }), + }, + }); + } + }; + const else_payload = try p.parsePayload(); + const else_expr = try p.expectExpr(); + const tag = if (else_payload != 0) + Node.Tag.WhileError + else if (then_payload != 0) + Node.Tag.WhileOptional + else + Node.Tag.While; + return p.addNode(.{ + .tag = tag, + .main_token = while_token, + .data = .{ + .lhs = condition, + .rhs = try p.addExtra(Node.While{ + .continue_expr = continue_expr, + .then_expr = then_expr, + .else_expr = else_expr, + }), + }, }); - while_prefix.body = body_node; - - if (p.eatToken(.Keyword_else)) |else_token| { - const payload = try p.parsePayload(); - const body = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); - - const else_node = try p.arena.allocator.create(Node.Else); - else_node.* = .{ - .else_token = else_token, - .payload = payload, - .body = body, - }; - - while_prefix.@"else" = else_node; - } - - return node; } /// CurlySuffixExpr <- TypeExpr InitList? - fn parseCurlySuffixExpr(p: *Parser) !?*Node { - const lhs = (try p.parseTypeExpr()) orelse return null; - const suffix_op = (try p.parseInitList(lhs)) orelse return lhs; - return suffix_op; - } - /// InitList /// <- LBRACE FieldInit (COMMA FieldInit)* COMMA? RBRACE /// / LBRACE Expr (COMMA Expr)* COMMA? RBRACE /// / LBRACE RBRACE - fn parseInitList(p: *Parser, lhs: *Node) !?*Node { - const lbrace = p.eatToken(.LBrace) orelse return null; - var init_list = std.ArrayList(*Node).init(p.gpa); + fn parseCurlySuffixExpr(p: *Parser) !Node.Index { + const lhs = try p.parseTypeExpr(); + if (lhs == 0) return null_node; + const lbrace = p.eatToken(.LBrace) orelse return lhs; + + // If there are 0 or 1 items, we can use ArrayInitOne/StructInitOne; + // otherwise we use the full ArrayInit/StructInit. + + if (p.eatToken(.RBrace)) |_| { + return p.addNode(.{ + .tag = .StructInitOne, + .main_token = lbrace, + .data = .{ + .lhs = lhs, + .rhs = 0, + }, + }); + } + const field_init = try p.parseFieldInit(); + if (field_init != 0) { + const comma_one = p.eatToken(.Comma); + if (p.eatToken(.RBrace)) |_| { + return p.addNode(.{ + .tag = .StructInitOne, + .main_token = lbrace, + .data = .{ + .lhs = lhs, + .rhs = field_init, + }, + }); + } + + var init_list = std.ArrayList(Node.Index).init(p.gpa); + defer init_list.deinit(); + + try init_list.append(field_init); + + while (true) { + const next = try p.expectFieldInit(); + try init_list.append(next); + + switch (p.token_tags[p.nextToken()]) { + .Comma => { + if (p.eatToken(.RBrace)) |_| break; + continue; + }, + .RBrace => break, + .Colon, .RParen, .RBracket => { + p.tok_i -= 1; + return p.fail(.{ + .ExpectedToken = .{ + .token = p.tok_i, + .expected_id = .RBrace, + }, + }); + }, + else => { + // This is likely just a missing comma; + // give an error but continue parsing this list. + p.tok_i -= 1; + try p.warn(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + }); + }, + } + } + const span = try p.listToSpan(init_list.items); + return p.addNode(.{ + .tag = .StructInit, + .main_token = lbrace, + .data = .{ + .lhs = lhs, + .rhs = try p.addExtra(Node.SubRange{ + .start = span.start, + .end = span.end, + }), + }, + }); + } + + const elem_init = try p.expectExpr(); + if (p.eatToken(.RBrace)) |_| { + return p.addNode(.{ + .tag = .ArrayInitOne, + .main_token = lbrace, + .data = .{ + .lhs = lhs, + .rhs = elem_init, + }, + }); + } + + var init_list = std.ArrayList(Node.Index).init(p.gpa); defer init_list.deinit(); - if (try p.parseFieldInit()) |field_init| { - try init_list.append(field_init); - while (p.eatToken(.Comma)) |_| { - const next = (try p.parseFieldInit()) orelse break; - try init_list.append(next); - } - const node = try Node.StructInitializer.alloc(&p.arena.allocator, init_list.items.len); - node.* = .{ + try init_list.append(elem_init); + + while (p.eatToken(.Comma)) |_| { + const next = try p.parseExpr(); + if (next == 0) break; + try init_list.append(next); + } + _ = try p.expectToken(.RBrace); + const span = try p.listToSpan(init_list.items); + return p.addNode(.{ + .tag = .ArrayInit, + .main_token = lbrace, + .data = .{ .lhs = lhs, - .rtoken = try p.expectToken(.RBrace), - .list_len = init_list.items.len, - }; - std.mem.copy(*Node, node.list(), init_list.items); - return &node.base; - } - - if (try p.parseExpr()) |expr| { - try init_list.append(expr); - while (p.eatToken(.Comma)) |_| { - const next = (try p.parseExpr()) orelse break; - try init_list.append(next); - } - const node = try Node.ArrayInitializer.alloc(&p.arena.allocator, init_list.items.len); - node.* = .{ - .lhs = lhs, - .rtoken = try p.expectToken(.RBrace), - .list_len = init_list.items.len, - }; - std.mem.copy(*Node, node.list(), init_list.items); - return &node.base; - } - - const node = try p.arena.allocator.create(Node.StructInitializer); - node.* = .{ - .lhs = lhs, - .rtoken = try p.expectToken(.RBrace), - .list_len = 0, - }; - return &node.base; - } - - /// InitList - /// <- LBRACE FieldInit (COMMA FieldInit)* COMMA? RBRACE - /// / LBRACE Expr (COMMA Expr)* COMMA? RBRACE - /// / LBRACE RBRACE - fn parseAnonInitList(p: *Parser, dot: TokenIndex) !?*Node { - const lbrace = p.eatToken(.LBrace) orelse return null; - var init_list = std.ArrayList(*Node).init(p.gpa); - defer init_list.deinit(); - - if (try p.parseFieldInit()) |field_init| { - try init_list.append(field_init); - while (p.eatToken(.Comma)) |_| { - const next = (try p.parseFieldInit()) orelse break; - try init_list.append(next); - } - const node = try Node.StructInitializerDot.alloc(&p.arena.allocator, init_list.items.len); - node.* = .{ - .dot = dot, - .rtoken = try p.expectToken(.RBrace), - .list_len = init_list.items.len, - }; - std.mem.copy(*Node, node.list(), init_list.items); - return &node.base; - } - - if (try p.parseExpr()) |expr| { - try init_list.append(expr); - while (p.eatToken(.Comma)) |_| { - const next = (try p.parseExpr()) orelse break; - try init_list.append(next); - } - const node = try Node.ArrayInitializerDot.alloc(&p.arena.allocator, init_list.items.len); - node.* = .{ - .dot = dot, - .rtoken = try p.expectToken(.RBrace), - .list_len = init_list.items.len, - }; - std.mem.copy(*Node, node.list(), init_list.items); - return &node.base; - } - - const node = try p.arena.allocator.create(Node.StructInitializerDot); - node.* = .{ - .dot = dot, - .rtoken = try p.expectToken(.RBrace), - .list_len = 0, - }; - return &node.base; - } - - /// TypeExpr <- PrefixTypeOp* ErrorUnionExpr - fn parseTypeExpr(p: *Parser) Error!?*Node { - return p.parsePrefixOpExpr(parsePrefixTypeOp, parseErrorUnionExpr); + .rhs = try p.addExtra(Node.SubRange{ + .start = span.start, + .end = span.end, + }), + }, + }); } /// ErrorUnionExpr <- SuffixExpr (EXCLAMATIONMARK TypeExpr)? - fn parseErrorUnionExpr(p: *Parser) !?*Node { - const suffix_expr = (try p.parseSuffixExpr()) orelse return null; - - if (try SimpleBinOpParseFn(.Bang, .ErrorUnion)(p)) |node| { - const error_union = node.castTag(.ErrorUnion).?; - const type_expr = try p.expectNode(parseTypeExpr, .{ - .ExpectedTypeExpr = .{ .token = p.tok_i }, - }); - error_union.lhs = suffix_expr; - error_union.rhs = type_expr; - return node; - } - - return suffix_expr; + fn parseErrorUnionExpr(p: *Parser) !Node.Index { + const suffix_expr = try p.parseSuffixExpr(); + if (suffix_expr == 0) return null_node; + const bang = p.eatToken(.Bang) orelse return suffix_expr; + return p.addNode(.{ + .tag = .ErrorUnion, + .main_token = bang, + .data = .{ + .lhs = suffix_expr, + .rhs = try p.expectTypeExpr(), + }, + }); } /// SuffixExpr /// <- KEYWORD_async PrimaryTypeExpr SuffixOp* FnCallArguments /// / PrimaryTypeExpr (SuffixOp / FnCallArguments)* - fn parseSuffixExpr(p: *Parser) !?*Node { - const maybe_async = p.eatToken(.Keyword_async); - if (maybe_async) |async_token| { - const token_fn = p.eatToken(.Keyword_fn); - if (token_fn != null) { - // TODO: remove this hack when async fn rewriting is - // HACK: If we see the keyword `fn`, then we assume that - // we are parsing an async fn proto, and not a call. - // We therefore put back all tokens consumed by the async - // prefix... - p.putBackToken(token_fn.?); - p.putBackToken(async_token); - return p.parsePrimaryTypeExpr(); - } - var res = try p.expectNode(parsePrimaryTypeExpr, .{ - .ExpectedPrimaryTypeExpr = .{ .token = p.tok_i }, - }); - - while (try p.parseSuffixOp(res)) |node| { - res = node; - } - - const params = (try p.parseFnCallArguments()) orelse { - try p.errors.append(p.gpa, .{ - .ExpectedParamList = .{ .token = p.tok_i }, - }); - // ignore this, continue parsing - return res; - }; - defer p.gpa.free(params.list); - const node = try Node.Call.alloc(&p.arena.allocator, params.list.len); - node.* = .{ - .lhs = res, - .params_len = params.list.len, - .async_token = async_token, - .rtoken = params.rparen, - }; - std.mem.copy(*Node, node.params(), params.list); - return &node.base; - } - if (try p.parsePrimaryTypeExpr()) |expr| { - var res = expr; + /// FnCallArguments <- LPAREN ExprList RPAREN + /// ExprList <- (Expr COMMA)* Expr? + /// TODO detect when there is 1 or less parameter to the call and emit + /// CallOne instead of Call. + fn parseSuffixExpr(p: *Parser) !Node.Index { + if (p.eatToken(.Keyword_async)) |async_token| { + var res = try p.expectPrimaryTypeExpr(); while (true) { - if (try p.parseSuffixOp(res)) |node| { - res = node; - continue; - } - if (try p.parseFnCallArguments()) |params| { - defer p.gpa.free(params.list); - const call = try Node.Call.alloc(&p.arena.allocator, params.list.len); - call.* = .{ - .lhs = res, - .params_len = params.list.len, - .async_token = null, - .rtoken = params.rparen, - }; - std.mem.copy(*Node, call.params(), params.list); - res = &call.base; - continue; - } - break; + const node = try p.parseSuffixOp(res); + if (node == 0) break; + res = node; } - return res; - } + const lparen = (try p.expectTokenRecoverable(.LParen)) orelse { + try p.warn(.{ .ExpectedParamList = .{ .token = p.tok_i } }); + return res; + }; + const params = try ListParseFn(parseExpr)(p); + _ = try p.expectToken(.RParen); - return null; + return p.addNode(.{ + .tag = .Call, + .main_token = lparen, + .data = .{ + .lhs = res, + .rhs = try p.addExtra(Node.SubRange{ + .start = params.start, + .end = params.end, + }), + }, + }); + } + var res = try p.parsePrimaryTypeExpr(); + if (res == 0) return res; + + while (true) { + const suffix_op = try p.parseSuffixOp(res); + if (suffix_op != 0) { + res = suffix_op; + continue; + } + const lparen = p.eatToken(.LParen) orelse return res; + const params = try ListParseFn(parseExpr)(p); + _ = try p.expectToken(.RParen); + + res = try p.addNode(.{ + .tag = .Call, + .main_token = lparen, + .data = .{ + .lhs = res, + .rhs = try p.addExtra(Node.SubRange{ + .start = params.start, + .end = params.end, + }), + }, + }); + } } /// PrimaryTypeExpr @@ -1521,6 +2196,7 @@ const Parser = struct { /// / CHAR_LITERAL /// / ContainerDecl /// / DOT IDENTIFIER + /// / DOT InitList /// / ErrorSetDecl /// / FLOAT /// / FnProto @@ -1539,260 +2215,497 @@ const Parser = struct { /// / KEYWORD_unreachable /// / STRINGLITERAL /// / SwitchExpr - fn parsePrimaryTypeExpr(p: *Parser) !?*Node { - if (try p.parseBuiltinCall()) |node| return node; - if (p.eatToken(.CharLiteral)) |token| { - const node = try p.arena.allocator.create(Node.OneToken); - node.* = .{ - .base = .{ .tag = .CharLiteral }, - .token = token, - }; - return &node.base; - } - if (try p.parseContainerDecl()) |node| return node; - if (try p.parseAnonLiteral()) |node| return node; - if (try p.parseErrorSetDecl()) |node| return node; - if (try p.parseFloatLiteral()) |node| return node; - if (try p.parseFnProto(.as_type, .{})) |node| return node; - if (try p.parseGroupedExpr()) |node| return node; - if (try p.parseLabeledTypeExpr()) |node| return node; - if (try p.parseIdentifier()) |node| return node; - if (try p.parseIfTypeExpr()) |node| return node; - if (try p.parseIntegerLiteral()) |node| return node; - if (p.eatToken(.Keyword_comptime)) |token| { - const expr = (try p.parseTypeExpr()) orelse return null; - const node = try p.arena.allocator.create(Node.Comptime); - node.* = .{ - .doc_comments = null, - .comptime_token = token, - .expr = expr, - }; - return &node.base; - } - if (p.eatToken(.Keyword_error)) |token| { - const period = try p.expectTokenRecoverable(.Period); - const identifier = try p.expectNodeRecoverable(parseIdentifier, .{ - .ExpectedIdentifier = .{ .token = p.tok_i }, - }); - const global_error_set = try p.createLiteral(.ErrorType, token); - if (period == null or identifier == null) return global_error_set; - - const node = try p.arena.allocator.create(Node.SimpleInfixOp); - node.* = .{ - .base = Node{ .tag = .Period }, - .op_token = period.?, - .lhs = global_error_set, - .rhs = identifier.?, - }; - return &node.base; - } - if (p.eatToken(.Keyword_false)) |token| return p.createLiteral(.BoolLiteral, token); - if (p.eatToken(.Keyword_null)) |token| return p.createLiteral(.NullLiteral, token); - if (p.eatToken(.Keyword_anyframe)) |token| { - const node = try p.arena.allocator.create(Node.AnyFrameType); - node.* = .{ - .anyframe_token = token, - .result = null, - }; - return &node.base; - } - if (p.eatToken(.Keyword_true)) |token| return p.createLiteral(.BoolLiteral, token); - if (p.eatToken(.Keyword_undefined)) |token| return p.createLiteral(.UndefinedLiteral, token); - if (p.eatToken(.Keyword_unreachable)) |token| return p.createLiteral(.Unreachable, token); - if (try p.parseStringLiteral()) |node| return node; - if (try p.parseSwitchExpr()) |node| return node; - - return null; - } - /// ContainerDecl <- (KEYWORD_extern / KEYWORD_packed)? ContainerDeclAuto - fn parseContainerDecl(p: *Parser) !?*Node { - const layout_token = p.eatToken(.Keyword_extern) orelse - p.eatToken(.Keyword_packed); - - const node = (try p.parseContainerDeclAuto()) orelse { - if (layout_token) |token| - p.putBackToken(token); - return null; - }; - node.cast(Node.ContainerDecl).?.*.layout_token = layout_token; - return node; - } - + /// ContainerDeclAuto <- ContainerDeclType LBRACE ContainerMembers RBRACE + /// InitList + /// <- LBRACE FieldInit (COMMA FieldInit)* COMMA? RBRACE + /// / LBRACE Expr (COMMA Expr)* COMMA? RBRACE + /// / LBRACE RBRACE /// ErrorSetDecl <- KEYWORD_error LBRACE IdentifierList RBRACE - fn parseErrorSetDecl(p: *Parser) !?*Node { - const error_token = p.eatToken(.Keyword_error) orelse return null; - if (p.eatToken(.LBrace) == null) { - // Might parse as `KEYWORD_error DOT IDENTIFIER` later in PrimaryTypeExpr, so don't error - p.putBackToken(error_token); - return null; - } - const decls = try p.parseErrorTagList(); - defer p.gpa.free(decls); - const rbrace = try p.expectToken(.RBrace); - - const node = try Node.ErrorSetDecl.alloc(&p.arena.allocator, decls.len); - node.* = .{ - .error_token = error_token, - .decls_len = decls.len, - .rbrace_token = rbrace, - }; - std.mem.copy(*Node, node.decls(), decls); - return &node.base; - } - /// GroupedExpr <- LPAREN Expr RPAREN - fn parseGroupedExpr(p: *Parser) !?*Node { - const lparen = p.eatToken(.LParen) orelse return null; - const expr = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); - const rparen = try p.expectToken(.RParen); - - const node = try p.arena.allocator.create(Node.GroupedExpression); - node.* = .{ - .lparen = lparen, - .expr = expr, - .rparen = rparen, - }; - return &node.base; - } - /// IfTypeExpr <- IfPrefix TypeExpr (KEYWORD_else Payload? TypeExpr)? - fn parseIfTypeExpr(p: *Parser) !?*Node { - return p.parseIf(parseTypeExpr); - } - /// LabeledTypeExpr /// <- BlockLabel Block /// / BlockLabel? LoopTypeExpr - fn parseLabeledTypeExpr(p: *Parser) !?*Node { - var colon: TokenIndex = undefined; - const label = p.parseBlockLabel(&colon); - - if (label) |label_token| { - if (try p.parseBlock(label_token)) |node| return node; - } - - if (try p.parseLoopTypeExpr()) |node| { - switch (node.tag) { - .For => node.cast(Node.For).?.label = label, - .While => node.cast(Node.While).?.label = label, - else => unreachable, - } - return node; - } - - if (label) |token| { - p.putBackToken(colon); - p.putBackToken(token); - } - return null; - } - /// LoopTypeExpr <- KEYWORD_inline? (ForTypeExpr / WhileTypeExpr) - fn parseLoopTypeExpr(p: *Parser) !?*Node { - const inline_token = p.eatToken(.Keyword_inline); + fn parsePrimaryTypeExpr(p: *Parser) !Node.Index { + switch (p.token_tags[p.tok_i]) { + .CharLiteral, + .IntegerLiteral, + .FloatLiteral, + .StringLiteral, + .Keyword_false, + .Keyword_true, + .Keyword_null, + .Keyword_undefined, + .Keyword_unreachable, + .Keyword_anyframe, + => return p.addNode(.{ + .tag = .OneToken, + .main_token = p.nextToken(), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), - if (try p.parseForTypeExpr()) |node| { - node.cast(Node.For).?.inline_token = inline_token; - return node; + .Builtin => return p.parseBuiltinCall(), + .Keyword_fn => return p.parseFnProto(), + .Keyword_if => return p.parseIf(parseTypeExpr), + .Keyword_switch => return p.parseSwitchExpr(), + + .Keyword_extern, + .Keyword_packed, + => { + p.tok_i += 1; + return p.parseContainerDeclAuto(); + }, + + .Keyword_struct, + .Keyword_opaque, + .Keyword_enum, + .Keyword_union, + => return p.parseContainerDeclAuto(), + + .Keyword_comptime => return p.addNode(.{ + .tag = .Comptime, + .main_token = p.nextToken(), + .data = .{ + .lhs = try p.expectTypeExpr(), + .rhs = undefined, + }, + }), + .MultilineStringLiteralLine => { + const first_line = p.nextToken(); + while (p.token_tags[p.tok_i] == .MultilineStringLiteralLine) { + p.tok_i += 1; + } + return p.addNode(.{ + .tag = .OneToken, + .main_token = first_line, + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }); + }, + .Identifier => switch (p.token_tags[p.tok_i + 1]) { + .Colon => switch (p.token_tags[p.tok_i + 2]) { + .Keyword_inline => { + p.tok_i += 3; + switch (p.token_tags[p.tok_i]) { + .Keyword_for => return p.parseForTypeExpr(), + .Keyword_while => return p.parseWhileTypeExpr(), + else => return p.fail(.{ + .ExpectedInlinable = .{ .token = p.tok_i }, + }), + } + }, + .Keyword_for => { + p.tok_i += 2; + return p.parseForTypeExpr(); + }, + .Keyword_while => { + p.tok_i += 2; + return p.parseWhileTypeExpr(); + }, + else => return p.addNode(.{ + .tag = .Identifier, + .main_token = p.nextToken(), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), + }, + else => return p.addNode(.{ + .tag = .Identifier, + .main_token = p.nextToken(), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), + }, + .Period => switch (p.token_tags[p.tok_i + 1]) { + .Identifier => return p.addNode(.{ + .tag = .EnumLiteral, + .data = .{ + .lhs = p.nextToken(), // dot + .rhs = undefined, + }, + .main_token = p.nextToken(), // identifier + }), + .LBrace => { + const lbrace = p.tok_i + 1; + p.tok_i = lbrace + 1; + + // If there are 0, 1, or 2 items, we can use ArrayInitDotTwo/StructInitDotTwo; + // otherwise we use the full ArrayInitDot/StructInitDot. + + if (p.eatToken(.RBrace)) |_| { + return p.addNode(.{ + .tag = .StructInitDotTwo, + .main_token = lbrace, + .data = .{ + .lhs = 0, + .rhs = 0, + }, + }); + } + const field_init_one = try p.parseFieldInit(); + if (field_init_one != 0) { + const comma_one = p.eatToken(.Comma); + if (p.eatToken(.RBrace)) |_| { + return p.addNode(.{ + .tag = .StructInitDotTwo, + .main_token = lbrace, + .data = .{ + .lhs = field_init_one, + .rhs = 0, + }, + }); + } + if (comma_one == null) { + try p.warn(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + }); + } + const field_init_two = try p.expectFieldInit(); + const comma_two = p.eatToken(.Comma); + if (p.eatToken(.RBrace)) |_| { + return p.addNode(.{ + .tag = .StructInitDotTwo, + .main_token = lbrace, + .data = .{ + .lhs = field_init_one, + .rhs = field_init_two, + }, + }); + } + if (comma_two == null) { + try p.warn(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + }); + } + var init_list = std.ArrayList(Node.Index).init(p.gpa); + defer init_list.deinit(); + + try init_list.appendSlice(&[_]Node.Index{ field_init_one, field_init_two }); + + while (true) { + const next = try p.expectFieldInit(); + if (next == 0) break; + try init_list.append(next); + switch (p.token_tags[p.nextToken()]) { + .Comma => { + if (p.eatToken(.RBrace)) |_| break; + continue; + }, + .RBrace => break, + .Colon, .RParen, .RBracket => { + p.tok_i -= 1; + return p.fail(.{ + .ExpectedToken = .{ + .token = p.tok_i, + .expected_id = .RBrace, + }, + }); + }, + else => { + p.tok_i -= 1; + try p.warn(.{ + .ExpectedToken = .{ + .token = p.tok_i, + .expected_id = .Comma, + }, + }); + }, + } + } + const span = try p.listToSpan(init_list.items); + return p.addNode(.{ + .tag = .StructInitDot, + .main_token = lbrace, + .data = .{ + .lhs = span.start, + .rhs = span.end, + }, + }); + } + + const elem_init_one = try p.expectExpr(); + const comma_one = p.eatToken(.Comma); + if (p.eatToken(.RBrace)) |_| { + return p.addNode(.{ + .tag = .ArrayInitDotTwo, + .main_token = lbrace, + .data = .{ + .lhs = elem_init_one, + .rhs = 0, + }, + }); + } + if (comma_one == null) { + try p.warn(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + }); + } + const elem_init_two = try p.expectExpr(); + const comma_two = p.eatToken(.Comma); + if (p.eatToken(.RBrace)) |_| { + return p.addNode(.{ + .tag = .ArrayInitDotTwo, + .main_token = lbrace, + .data = .{ + .lhs = elem_init_one, + .rhs = elem_init_two, + }, + }); + } + if (comma_two == null) { + try p.warn(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + }); + } + var init_list = std.ArrayList(Node.Index).init(p.gpa); + defer init_list.deinit(); + + try init_list.appendSlice(&[_]Node.Index{ elem_init_one, elem_init_two }); + + while (true) { + const next = try p.expectExpr(); + if (next == 0) break; + try init_list.append(next); + switch (p.token_tags[p.nextToken()]) { + .Comma => continue, + .RBrace => break, + .Colon, .RParen, .RBracket => { + p.tok_i -= 1; + return p.fail(.{ + .ExpectedToken = .{ + .token = p.tok_i, + .expected_id = .RBrace, + }, + }); + }, + else => { + p.tok_i -= 1; + try p.warn(.{ + .ExpectedToken = .{ + .token = p.tok_i, + .expected_id = .Comma, + }, + }); + }, + } + } + const span = try p.listToSpan(init_list.items); + return p.addNode(.{ + .tag = .ArrayInitDot, + .main_token = lbrace, + .data = .{ + .lhs = span.start, + .rhs = span.end, + }, + }); + }, + else => return null_node, + }, + .Keyword_error => switch (p.token_tags[p.tok_i + 1]) { + .LBrace => { + const error_token = p.tok_i; + p.tok_i += 2; + + if (p.eatToken(.RBrace)) |_| { + return p.addNode(.{ + .tag = .ErrorSetDecl, + .main_token = error_token, + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }); + } + + while (true) { + const doc_comment = p.eatDocComments(); + const identifier = try p.expectToken(.Identifier); + switch (p.token_tags[p.nextToken()]) { + .Comma => { + if (p.eatToken(.RBrace)) |_| break; + continue; + }, + .RBrace => break, + .Colon, .RParen, .RBracket => { + p.tok_i -= 1; + return p.fail(.{ + .ExpectedToken = .{ + .token = p.tok_i, + .expected_id = .RBrace, + }, + }); + }, + else => { + // This is likely just a missing comma; + // give an error but continue parsing this list. + p.tok_i -= 1; + try p.warn(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + }); + }, + } + } + return p.addNode(.{ + .tag = .ErrorSetDecl, + .main_token = error_token, + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }); + }, + else => return p.addNode(.{ + .tag = .ErrorValue, + .main_token = p.nextToken(), + .data = .{ + .lhs = try p.expectToken(.Period), + .rhs = try p.expectToken(.Identifier), + }, + }), + }, + .LParen => return p.addNode(.{ + .tag = .GroupedExpression, + .main_token = p.nextToken(), + .data = .{ + .lhs = try p.expectExpr(), + .rhs = try p.expectToken(.RParen), + }, + }), + else => return null_node, } - - if (try p.parseWhileTypeExpr()) |node| { - node.cast(Node.While).?.inline_token = inline_token; - return node; - } - - if (inline_token == null) return null; - - // If we've seen "inline", there should have been a "for" or "while" - try p.errors.append(p.gpa, .{ - .ExpectedInlinable = .{ .token = p.tok_i }, - }); - return error.ParseError; } + fn expectPrimaryTypeExpr(p: *Parser) !Node.Index { + const node = try p.parsePrimaryTypeExpr(); + if (node == 0) { + return p.fail(.{ .ExpectedPrimaryTypeExpr = .{ .token = p.tok_i } }); + } + return node; + } + + /// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload /// ForTypeExpr <- ForPrefix TypeExpr (KEYWORD_else TypeExpr)? - fn parseForTypeExpr(p: *Parser) !?*Node { - const node = (try p.parseForPrefix()) orelse return null; - const for_prefix = node.cast(Node.For).?; + fn parseForTypeExpr(p: *Parser) !Node.Index { + const for_token = p.eatToken(.Keyword_for) orelse return null_node; + _ = try p.expectToken(.LParen); + const array_expr = try p.expectTypeExpr(); + _ = try p.expectToken(.RParen); + _ = try p.parsePtrIndexPayload(); - const type_expr = try p.expectNode(parseTypeExpr, .{ - .ExpectedTypeExpr = .{ .token = p.tok_i }, - }); - for_prefix.body = type_expr; - - if (p.eatToken(.Keyword_else)) |else_token| { - const else_expr = try p.expectNode(parseTypeExpr, .{ - .ExpectedTypeExpr = .{ .token = p.tok_i }, + const then_expr = try p.expectExpr(); + const else_token = p.eatToken(.Keyword_else) orelse { + return p.addNode(.{ + .tag = .ForSimple, + .main_token = for_token, + .data = .{ + .lhs = array_expr, + .rhs = then_expr, + }, }); - - const else_node = try p.arena.allocator.create(Node.Else); - else_node.* = .{ - .else_token = else_token, - .payload = null, - .body = else_expr, - }; - - for_prefix.@"else" = else_node; - } - - return node; + }; + const else_expr = try p.expectTypeExpr(); + return p.addNode(.{ + .tag = .For, + .main_token = for_token, + .data = .{ + .lhs = array_expr, + .rhs = try p.addExtra(Node.If{ + .then_expr = then_expr, + .else_expr = else_expr, + }), + }, + }); } + /// WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr? /// WhileTypeExpr <- WhilePrefix TypeExpr (KEYWORD_else Payload? TypeExpr)? - fn parseWhileTypeExpr(p: *Parser) !?*Node { - const node = (try p.parseWhilePrefix()) orelse return null; - const while_prefix = node.cast(Node.While).?; + fn parseWhileTypeExpr(p: *Parser) !Node.Index { + const while_token = p.eatToken(.Keyword_while) orelse return null_node; + _ = try p.expectToken(.LParen); + const condition = try p.expectExpr(); + _ = try p.expectToken(.RParen); + const then_payload = try p.parsePtrPayload(); + const continue_expr = try p.parseWhileContinueExpr(); - const type_expr = try p.expectNode(parseTypeExpr, .{ - .ExpectedTypeExpr = .{ .token = p.tok_i }, + const then_expr = try p.expectTypeExpr(); + const else_token = p.eatToken(.Keyword_else) orelse { + if (continue_expr == 0) { + return p.addNode(.{ + .tag = if (then_payload == 0) .WhileSimple else .WhileSimpleOptional, + .main_token = while_token, + .data = .{ + .lhs = condition, + .rhs = then_expr, + }, + }); + } else { + return p.addNode(.{ + .tag = if (then_payload == 0) .WhileCont else .WhileContOptional, + .main_token = while_token, + .data = .{ + .lhs = condition, + .rhs = try p.addExtra(Node.WhileCont{ + .continue_expr = continue_expr, + .then_expr = then_expr, + }), + }, + }); + } + }; + const else_payload = try p.parsePayload(); + const else_expr = try p.expectTypeExpr(); + const tag = if (else_payload != 0) + Node.Tag.WhileError + else if (then_payload != 0) + Node.Tag.WhileOptional + else + Node.Tag.While; + return p.addNode(.{ + .tag = tag, + .main_token = while_token, + .data = .{ + .lhs = condition, + .rhs = try p.addExtra(Node.While{ + .continue_expr = continue_expr, + .then_expr = then_expr, + .else_expr = else_expr, + }), + }, }); - while_prefix.body = type_expr; - - if (p.eatToken(.Keyword_else)) |else_token| { - const payload = try p.parsePayload(); - - const else_expr = try p.expectNode(parseTypeExpr, .{ - .ExpectedTypeExpr = .{ .token = p.tok_i }, - }); - - const else_node = try p.arena.allocator.create(Node.Else); - else_node.* = .{ - .else_token = else_token, - .payload = null, - .body = else_expr, - }; - - while_prefix.@"else" = else_node; - } - - return node; } /// SwitchExpr <- KEYWORD_switch LPAREN Expr RPAREN LBRACE SwitchProngList RBRACE - fn parseSwitchExpr(p: *Parser) !?*Node { - const switch_token = p.eatToken(.Keyword_switch) orelse return null; + fn parseSwitchExpr(p: *Parser) !Node.Index { + const switch_token = p.eatToken(.Keyword_switch) orelse return null_node; _ = try p.expectToken(.LParen); - const expr_node = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); + const expr_node = try p.expectExpr(); _ = try p.expectToken(.RParen); _ = try p.expectToken(.LBrace); const cases = try p.parseSwitchProngList(); - defer p.gpa.free(cases); - const rbrace = try p.expectToken(.RBrace); + _ = try p.expectToken(.RBrace); - const node = try Node.Switch.alloc(&p.arena.allocator, cases.len); - node.* = .{ - .switch_token = switch_token, - .expr = expr_node, - .cases_len = cases.len, - .rbrace = rbrace, - }; - std.mem.copy(*Node, node.cases(), cases); - return &node.base; + return p.addNode(.{ + .tag = .Switch, + .main_token = switch_token, + .data = .{ + .lhs = expr_node, + .rhs = try p.addExtra(Node.SubRange{ + .start = cases.start, + .end = cases.end, + }), + }, + }); } /// AsmExpr <- KEYWORD_asm KEYWORD_volatile? LPAREN Expr AsmOutput? RPAREN @@ -1800,854 +2713,402 @@ const Parser = struct { /// AsmInput <- COLON AsmInputList AsmClobbers? /// AsmClobbers <- COLON StringList /// StringList <- (STRINGLITERAL COMMA)* STRINGLITERAL? - fn parseAsmExpr(p: *Parser) !?*Node { - const asm_token = p.eatToken(.Keyword_asm) orelse return null; - const volatile_token = p.eatToken(.Keyword_volatile); + /// AsmOutputList <- (AsmOutputItem COMMA)* AsmOutputItem? + /// AsmInputList <- (AsmInputItem COMMA)* AsmInputItem? + fn parseAsmExpr(p: *Parser) !Node.Index { + const asm_token = p.assertToken(.Keyword_asm); + _ = p.eatToken(.Keyword_volatile); _ = try p.expectToken(.LParen); - const template = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); + const template = try p.expectExpr(); - var arena_outputs: []Node.Asm.Output = &[0]Node.Asm.Output{}; - var arena_inputs: []Node.Asm.Input = &[0]Node.Asm.Input{}; - var arena_clobbers: []*Node = &[0]*Node{}; + if (p.eatToken(.RParen)) |_| { + return p.addNode(.{ + .tag = .AsmSimple, + .main_token = asm_token, + .data = .{ + .lhs = template, + .rhs = undefined, + }, + }); + } - if (p.eatToken(.Colon) != null) { - const outputs = try p.parseAsmOutputList(); - defer p.gpa.free(outputs); - arena_outputs = try p.arena.allocator.dupe(Node.Asm.Output, outputs); + _ = try p.expectToken(.Colon); - if (p.eatToken(.Colon) != null) { - const inputs = try p.parseAsmInputList(); - defer p.gpa.free(inputs); - arena_inputs = try p.arena.allocator.dupe(Node.Asm.Input, inputs); + var list = std.ArrayList(Node.Index).init(p.gpa); + defer list.deinit(); - if (p.eatToken(.Colon) != null) { - const clobbers = try ListParseFn(*Node, parseStringLiteral)(p); - defer p.gpa.free(clobbers); - arena_clobbers = try p.arena.allocator.dupe(*Node, clobbers); + while (true) { + const output_item = try p.parseAsmOutputItem(); + if (output_item == 0) break; + try list.append(output_item); + switch (p.token_tags[p.tok_i]) { + .Comma => p.tok_i += 1, + .Colon, .RParen, .RBrace, .RBracket => break, // All possible delimiters. + else => { + // This is likely just a missing comma; + // give an error but continue parsing this list. + try p.warn(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + }); + }, + } + } + if (p.eatToken(.Colon)) |_| { + while (true) { + const input_item = try p.parseAsmInputItem(); + if (input_item == 0) break; + try list.append(input_item); + switch (p.token_tags[p.tok_i]) { + .Comma => p.tok_i += 1, + .Colon, .RParen, .RBrace, .RBracket => break, // All possible delimiters. + else => { + // This is likely just a missing comma; + // give an error but continue parsing this list. + try p.warn(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + }); + }, + } + } + if (p.eatToken(.Colon)) |_| { + while (p.eatToken(.StringLiteral)) |_| { + switch (p.token_tags[p.tok_i]) { + .Comma => p.tok_i += 1, + .Colon, .RParen, .RBrace, .RBracket => break, + else => { + // This is likely just a missing comma; + // give an error but continue parsing this list. + try p.warn(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + }); + }, + } } } } - - const node = try p.arena.allocator.create(Node.Asm); - node.* = .{ - .asm_token = asm_token, - .volatile_token = volatile_token, - .template = template, - .outputs = arena_outputs, - .inputs = arena_inputs, - .clobbers = arena_clobbers, - .rparen = try p.expectToken(.RParen), - }; - - return &node.base; - } - - /// DOT IDENTIFIER - fn parseAnonLiteral(p: *Parser) !?*Node { - const dot = p.eatToken(.Period) orelse return null; - - // anon enum literal - if (p.eatToken(.Identifier)) |name| { - const node = try p.arena.allocator.create(Node.EnumLiteral); - node.* = .{ - .dot = dot, - .name = name, - }; - return &node.base; - } - - if (try p.parseAnonInitList(dot)) |node| { - return node; - } - - p.putBackToken(dot); - return null; + _ = try p.expectToken(.RParen); + const span = try p.listToSpan(list.items); + return p.addNode(.{ + .tag = .Asm, + .main_token = asm_token, + .data = .{ + .lhs = template, + .rhs = try p.addExtra(Node.SubRange{ + .start = span.start, + .end = span.end, + }), + }, + }); } /// AsmOutputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN (MINUSRARROW TypeExpr / IDENTIFIER) RPAREN - fn parseAsmOutputItem(p: *Parser) !?Node.Asm.Output { - const lbracket = p.eatToken(.LBracket) orelse return null; - const name = try p.expectNode(parseIdentifier, .{ - .ExpectedIdentifier = .{ .token = p.tok_i }, - }); + fn parseAsmOutputItem(p: *Parser) !Node.Index { + _ = p.eatToken(.LBracket) orelse return null_node; + const identifier = try p.expectToken(.Identifier); _ = try p.expectToken(.RBracket); - - const constraint = try p.expectNode(parseStringLiteral, .{ - .ExpectedStringLiteral = .{ .token = p.tok_i }, - }); - + const constraint = try p.expectToken(.StringLiteral); _ = try p.expectToken(.LParen); - const kind: Node.Asm.Output.Kind = blk: { - if (p.eatToken(.Arrow) != null) { - const return_ident = try p.expectNode(parseTypeExpr, .{ - .ExpectedTypeExpr = .{ .token = p.tok_i }, - }); - break :blk .{ .Return = return_ident }; - } - const variable = try p.expectNode(parseIdentifier, .{ - .ExpectedIdentifier = .{ .token = p.tok_i }, - }); - break :blk .{ .Variable = variable.castTag(.Identifier).? }; - }; - const rparen = try p.expectToken(.RParen); - - return Node.Asm.Output{ - .lbracket = lbracket, - .symbolic_name = name, - .constraint = constraint, - .kind = kind, - .rparen = rparen, - }; + const rhs: Node.Index = if (p.eatToken(.Arrow)) |_| try p.expectTypeExpr() else null_node; + _ = try p.expectToken(.RParen); + return p.addNode(.{ + .tag = .AsmOutput, + .main_token = identifier, + .data = .{ + .lhs = constraint, + .rhs = rhs, + }, + }); } /// AsmInputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN Expr RPAREN - fn parseAsmInputItem(p: *Parser) !?Node.Asm.Input { - const lbracket = p.eatToken(.LBracket) orelse return null; - const name = try p.expectNode(parseIdentifier, .{ - .ExpectedIdentifier = .{ .token = p.tok_i }, - }); + fn parseAsmInputItem(p: *Parser) !Node.Index { + _ = p.eatToken(.LBracket) orelse return null_node; + const identifier = try p.expectToken(.Identifier); _ = try p.expectToken(.RBracket); - - const constraint = try p.expectNode(parseStringLiteral, .{ - .ExpectedStringLiteral = .{ .token = p.tok_i }, - }); - + const constraint = try p.expectToken(.StringLiteral); _ = try p.expectToken(.LParen); - const expr = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, + const expr = try p.expectExpr(); + _ = try p.expectToken(.RParen); + return p.addNode(.{ + .tag = .AsmInput, + .main_token = identifier, + .data = .{ + .lhs = constraint, + .rhs = expr, + }, }); - const rparen = try p.expectToken(.RParen); - - return Node.Asm.Input{ - .lbracket = lbracket, - .symbolic_name = name, - .constraint = constraint, - .expr = expr, - .rparen = rparen, - }; } /// BreakLabel <- COLON IDENTIFIER - fn parseBreakLabel(p: *Parser) !?TokenIndex { - _ = p.eatToken(.Colon) orelse return null; - const ident = try p.expectToken(.Identifier); - return ident; + fn parseBreakLabel(p: *Parser) !TokenIndex { + _ = p.eatToken(.Colon) orelse return @as(TokenIndex, 0); + return p.expectToken(.Identifier); } /// BlockLabel <- IDENTIFIER COLON - fn parseBlockLabel(p: *Parser, colon_token: *TokenIndex) ?TokenIndex { - const identifier = p.eatToken(.Identifier) orelse return null; - if (p.eatToken(.Colon)) |colon| { - colon_token.* = colon; + fn parseBlockLabel(p: *Parser) TokenIndex { + if (p.token_tags[p.tok_i] == .Identifier and + p.token_tags[p.tok_i + 1] == .Colon) + { + const identifier = p.tok_i; + p.tok_i += 2; return identifier; } - p.putBackToken(identifier); - return null; + return 0; } /// FieldInit <- DOT IDENTIFIER EQUAL Expr - fn parseFieldInit(p: *Parser) !?*Node { - const period_token = p.eatToken(.Period) orelse return null; - const name_token = p.eatToken(.Identifier) orelse { - // Because of anon literals `.{` is also valid. - p.putBackToken(period_token); - return null; - }; - const eq_token = p.eatToken(.Equal) orelse { - // `.Name` may also be an enum literal, which is a later rule. - p.putBackToken(name_token); - p.putBackToken(period_token); - return null; - }; - const expr_node = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); + fn parseFieldInit(p: *Parser) !Node.Index { + if (p.token_tags[p.tok_i + 0] == .Period and + p.token_tags[p.tok_i + 1] == .Identifier and + p.token_tags[p.tok_i + 2] == .Equal) + { + p.tok_i += 3; + return p.expectExpr(); + } else { + return null_node; + } + } - const node = try p.arena.allocator.create(Node.FieldInitializer); - node.* = .{ - .period_token = period_token, - .name_token = name_token, - .expr = expr_node, - }; - return &node.base; + fn expectFieldInit(p: *Parser) !Node.Index { + _ = try p.expectToken(.Period); + _ = try p.expectToken(.Identifier); + _ = try p.expectToken(.Equal); + return p.expectExpr(); } /// WhileContinueExpr <- COLON LPAREN AssignExpr RPAREN - fn parseWhileContinueExpr(p: *Parser) !?*Node { - _ = p.eatToken(.Colon) orelse return null; + fn parseWhileContinueExpr(p: *Parser) !Node.Index { + _ = p.eatToken(.Colon) orelse return null_node; _ = try p.expectToken(.LParen); - const node = try p.expectNode(parseAssignExpr, .{ - .ExpectedExprOrAssignment = .{ .token = p.tok_i }, - }); + const node = try p.parseAssignExpr(); + if (node == 0) return p.fail(.{ .ExpectedExprOrAssignment = .{ .token = p.tok_i } }); _ = try p.expectToken(.RParen); return node; } /// LinkSection <- KEYWORD_linksection LPAREN Expr RPAREN - fn parseLinkSection(p: *Parser) !?*Node { - _ = p.eatToken(.Keyword_linksection) orelse return null; + fn parseLinkSection(p: *Parser) !Node.Index { + _ = p.eatToken(.Keyword_linksection) orelse return null_node; _ = try p.expectToken(.LParen); - const expr_node = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); + const expr_node = try p.expectExpr(); _ = try p.expectToken(.RParen); return expr_node; } /// CallConv <- KEYWORD_callconv LPAREN Expr RPAREN - fn parseCallconv(p: *Parser) !?*Node { - _ = p.eatToken(.Keyword_callconv) orelse return null; + fn parseCallconv(p: *Parser) !Node.Index { + _ = p.eatToken(.Keyword_callconv) orelse return null_node; _ = try p.expectToken(.LParen); - const expr_node = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); + const expr_node = try p.expectExpr(); _ = try p.expectToken(.RParen); return expr_node; } - /// ParamDecl <- (KEYWORD_noalias / KEYWORD_comptime)? (IDENTIFIER COLON)? ParamType - fn parseParamDecl(p: *Parser) !?Node.FnProto.ParamDecl { - const doc_comments = try p.parseDocComment(); - const noalias_token = p.eatToken(.Keyword_noalias); - const comptime_token = if (noalias_token == null) p.eatToken(.Keyword_comptime) else null; - const name_token = blk: { - const identifier = p.eatToken(.Identifier) orelse break :blk null; - if (p.eatToken(.Colon) != null) break :blk identifier; - p.putBackToken(identifier); // ParamType may also be an identifier - break :blk null; - }; - const param_type = (try p.parseParamType()) orelse { - // Only return cleanly if no keyword, identifier, or doc comment was found - if (noalias_token == null and - comptime_token == null and - name_token == null and - doc_comments == null) - { - return null; - } - try p.errors.append(p.gpa, .{ - .ExpectedParamType = .{ .token = p.tok_i }, - }); - return error.ParseError; - }; - - return Node.FnProto.ParamDecl{ - .doc_comments = doc_comments, - .comptime_token = comptime_token, - .noalias_token = noalias_token, - .name_token = name_token, - .param_type = param_type, - }; - } - + /// ParamDecl + /// <- (KEYWORD_noalias / KEYWORD_comptime)? (IDENTIFIER COLON)? ParamType + /// / DOT3 /// ParamType /// <- Keyword_anytype - /// / DOT3 /// / TypeExpr - fn parseParamType(p: *Parser) !?Node.FnProto.ParamDecl.ParamType { - // TODO cast from tuple to error union is broken - const P = Node.FnProto.ParamDecl.ParamType; - if (try p.parseAnyType()) |node| return P{ .any_type = node }; - if (try p.parseTypeExpr()) |node| return P{ .type_expr = node }; - return null; - } - - /// IfPrefix <- KEYWORD_if LPAREN Expr RPAREN PtrPayload? - fn parseIfPrefix(p: *Parser) !?*Node { - const if_token = p.eatToken(.Keyword_if) orelse return null; - _ = try p.expectToken(.LParen); - const condition = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); - _ = try p.expectToken(.RParen); - const payload = try p.parsePtrPayload(); - - const node = try p.arena.allocator.create(Node.If); - node.* = .{ - .if_token = if_token, - .condition = condition, - .payload = payload, - .body = undefined, // set by caller - .@"else" = null, - }; - return &node.base; - } - - /// WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr? - fn parseWhilePrefix(p: *Parser) !?*Node { - const while_token = p.eatToken(.Keyword_while) orelse return null; - - _ = try p.expectToken(.LParen); - const condition = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); - _ = try p.expectToken(.RParen); - - const payload = try p.parsePtrPayload(); - const continue_expr = try p.parseWhileContinueExpr(); - - const node = try p.arena.allocator.create(Node.While); - node.* = .{ - .label = null, - .inline_token = null, - .while_token = while_token, - .condition = condition, - .payload = payload, - .continue_expr = continue_expr, - .body = undefined, // set by caller - .@"else" = null, - }; - return &node.base; - } - - /// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload - fn parseForPrefix(p: *Parser) !?*Node { - const for_token = p.eatToken(.Keyword_for) orelse return null; - - _ = try p.expectToken(.LParen); - const array_expr = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); - _ = try p.expectToken(.RParen); - - const payload = try p.expectNode(parsePtrIndexPayload, .{ - .ExpectedPayload = .{ .token = p.tok_i }, - }); - - const node = try p.arena.allocator.create(Node.For); - node.* = .{ - .label = null, - .inline_token = null, - .for_token = for_token, - .array_expr = array_expr, - .payload = payload, - .body = undefined, // set by caller - .@"else" = null, - }; - return &node.base; + /// This function can return null nodes and then still return nodes afterwards, + /// such as in the case of anytype and `...`. Caller must look for rparen to find + /// out when there are no more param decls left. + fn expectParamDecl(p: *Parser) !Node.Index { + _ = p.eatDocComments(); + switch (p.token_tags[p.tok_i]) { + .Keyword_noalias, .Keyword_comptime => p.tok_i += 1, + .Ellipsis3 => { + p.tok_i += 1; + return null_node; + }, + else => {}, + } + if (p.token_tags[p.tok_i] == .Identifier and + p.token_tags[p.tok_i + 1] == .Colon) + { + p.tok_i += 2; + } + switch (p.token_tags[p.tok_i]) { + .Keyword_anytype => { + p.tok_i += 1; + return null_node; + }, + else => return p.expectTypeExpr(), + } } /// Payload <- PIPE IDENTIFIER PIPE - fn parsePayload(p: *Parser) !?*Node { - const lpipe = p.eatToken(.Pipe) orelse return null; - const identifier = try p.expectNode(parseIdentifier, .{ - .ExpectedIdentifier = .{ .token = p.tok_i }, - }); - const rpipe = try p.expectToken(.Pipe); - - const node = try p.arena.allocator.create(Node.Payload); - node.* = .{ - .lpipe = lpipe, - .error_symbol = identifier, - .rpipe = rpipe, - }; - return &node.base; + fn parsePayload(p: *Parser) !TokenIndex { + _ = p.eatToken(.Pipe) orelse return @as(TokenIndex, 0); + const identifier = try p.expectToken(.Identifier); + _ = try p.expectToken(.Pipe); + return identifier; } /// PtrPayload <- PIPE ASTERISK? IDENTIFIER PIPE - fn parsePtrPayload(p: *Parser) !?*Node { - const lpipe = p.eatToken(.Pipe) orelse return null; - const asterisk = p.eatToken(.Asterisk); - const identifier = try p.expectNode(parseIdentifier, .{ - .ExpectedIdentifier = .{ .token = p.tok_i }, - }); - const rpipe = try p.expectToken(.Pipe); - - const node = try p.arena.allocator.create(Node.PointerPayload); - node.* = .{ - .lpipe = lpipe, - .ptr_token = asterisk, - .value_symbol = identifier, - .rpipe = rpipe, - }; - return &node.base; + fn parsePtrPayload(p: *Parser) !TokenIndex { + _ = p.eatToken(.Pipe) orelse return @as(TokenIndex, 0); + _ = p.eatToken(.Asterisk); + const identifier = try p.expectToken(.Identifier); + _ = try p.expectToken(.Pipe); + return identifier; } /// PtrIndexPayload <- PIPE ASTERISK? IDENTIFIER (COMMA IDENTIFIER)? PIPE - fn parsePtrIndexPayload(p: *Parser) !?*Node { - const lpipe = p.eatToken(.Pipe) orelse return null; - const asterisk = p.eatToken(.Asterisk); - const identifier = try p.expectNode(parseIdentifier, .{ - .ExpectedIdentifier = .{ .token = p.tok_i }, - }); - - const index = if (p.eatToken(.Comma) == null) - null - else - try p.expectNode(parseIdentifier, .{ - .ExpectedIdentifier = .{ .token = p.tok_i }, - }); - - const rpipe = try p.expectToken(.Pipe); - - const node = try p.arena.allocator.create(Node.PointerIndexPayload); - node.* = .{ - .lpipe = lpipe, - .ptr_token = asterisk, - .value_symbol = identifier, - .index_symbol = index, - .rpipe = rpipe, - }; - return &node.base; + /// Returns the first identifier token, if any. + fn parsePtrIndexPayload(p: *Parser) !TokenIndex { + _ = p.eatToken(.Pipe) orelse return @as(TokenIndex, 0); + _ = p.eatToken(.Asterisk); + const identifier = try p.expectToken(.Identifier); + if (p.eatToken(.Comma) != null) { + _ = try p.expectToken(.Identifier); + } + _ = try p.expectToken(.Pipe); + return identifier; } /// SwitchProng <- SwitchCase EQUALRARROW PtrPayload? AssignExpr - fn parseSwitchProng(p: *Parser) !?*Node { - const node = (try p.parseSwitchCase()) orelse return null; - const arrow = try p.expectToken(.EqualAngleBracketRight); - const payload = try p.parsePtrPayload(); - const expr = try p.expectNode(parseAssignExpr, .{ - .ExpectedExprOrAssignment = .{ .token = p.tok_i }, - }); - - const switch_case = node.cast(Node.SwitchCase).?; - switch_case.arrow_token = arrow; - switch_case.payload = payload; - switch_case.expr = expr; - - return node; - } - /// SwitchCase /// <- SwitchItem (COMMA SwitchItem)* COMMA? /// / KEYWORD_else - fn parseSwitchCase(p: *Parser) !?*Node { - var list = std.ArrayList(*Node).init(p.gpa); + fn parseSwitchProng(p: *Parser) !Node.Index { + if (p.eatToken(.Keyword_else)) |_| { + const arrow_token = try p.expectToken(.EqualAngleBracketRight); + _ = try p.parsePtrPayload(); + return p.addNode(.{ + .tag = .SwitchCaseOne, + .main_token = arrow_token, + .data = .{ + .lhs = 0, + .rhs = try p.expectAssignExpr(), + }, + }); + } + const first_item = try p.parseSwitchItem(); + if (first_item == 0) return null_node; + + if (p.token_tags[p.tok_i] == .RBrace) { + const arrow_token = try p.expectToken(.EqualAngleBracketRight); + _ = try p.parsePtrPayload(); + return p.addNode(.{ + .tag = .SwitchCaseOne, + .main_token = arrow_token, + .data = .{ + .lhs = first_item, + .rhs = try p.expectAssignExpr(), + }, + }); + } + + var list = std.ArrayList(Node.Index).init(p.gpa); defer list.deinit(); - if (try p.parseSwitchItem()) |first_item| { - try list.append(first_item); - while (p.eatToken(.Comma) != null) { - const next_item = (try p.parseSwitchItem()) orelse break; - try list.append(next_item); - } - } else if (p.eatToken(.Keyword_else)) |else_token| { - const else_node = try p.arena.allocator.create(Node.SwitchElse); - else_node.* = .{ - .token = else_token, - }; - try list.append(&else_node.base); - } else return null; - - const node = try Node.SwitchCase.alloc(&p.arena.allocator, list.items.len); - node.* = .{ - .items_len = list.items.len, - .arrow_token = undefined, // set by caller - .payload = null, - .expr = undefined, // set by caller - }; - std.mem.copy(*Node, node.items(), list.items); - return &node.base; + try list.append(first_item); + while (p.eatToken(.Comma)) |_| { + const next_item = try p.parseSwitchItem(); + if (next_item == 0) break; + try list.append(next_item); + } + const span = try p.listToSpan(list.items); + const arrow_token = try p.expectToken(.EqualAngleBracketRight); + _ = try p.parsePtrPayload(); + return p.addNode(.{ + .tag = .SwitchCaseMulti, + .main_token = arrow_token, + .data = .{ + .lhs = try p.addExtra(Node.SubRange{ + .start = span.start, + .end = span.end, + }), + .rhs = try p.expectAssignExpr(), + }, + }); } /// SwitchItem <- Expr (DOT3 Expr)? - fn parseSwitchItem(p: *Parser) !?*Node { - const expr = (try p.parseExpr()) orelse return null; - if (p.eatToken(.Ellipsis3)) |token| { - const range_end = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); + fn parseSwitchItem(p: *Parser) !Node.Index { + const expr = try p.parseExpr(); + if (expr == 0) return null_node; - const node = try p.arena.allocator.create(Node.SimpleInfixOp); - node.* = .{ - .base = Node{ .tag = .Range }, - .op_token = token, - .lhs = expr, - .rhs = range_end, - }; - return &node.base; + if (p.eatToken(.Ellipsis3)) |token| { + return p.addNode(.{ + .tag = .SwitchRange, + .main_token = token, + .data = .{ + .lhs = expr, + .rhs = try p.expectExpr(), + }, + }); } return expr; } - /// AssignOp - /// <- ASTERISKEQUAL - /// / SLASHEQUAL - /// / PERCENTEQUAL - /// / PLUSEQUAL - /// / MINUSEQUAL - /// / LARROW2EQUAL - /// / RARROW2EQUAL - /// / AMPERSANDEQUAL - /// / CARETEQUAL - /// / PIPEEQUAL - /// / ASTERISKPERCENTEQUAL - /// / PLUSPERCENTEQUAL - /// / MINUSPERCENTEQUAL - /// / EQUAL - fn parseAssignOp(p: *Parser) !?*Node { - const token = p.nextToken(); - const op: Node.Tag = switch (p.token_ids[token]) { - .AsteriskEqual => .AssignMul, - .SlashEqual => .AssignDiv, - .PercentEqual => .AssignMod, - .PlusEqual => .AssignAdd, - .MinusEqual => .AssignSub, - .AngleBracketAngleBracketLeftEqual => .AssignBitShiftLeft, - .AngleBracketAngleBracketRightEqual => .AssignBitShiftRight, - .AmpersandEqual => .AssignBitAnd, - .CaretEqual => .AssignBitXor, - .PipeEqual => .AssignBitOr, - .AsteriskPercentEqual => .AssignMulWrap, - .PlusPercentEqual => .AssignAddWrap, - .MinusPercentEqual => .AssignSubWrap, - .Equal => .Assign, - else => { - p.putBackToken(token); - return null; - }, + const PtrModifiers = struct { + align_node: Node.Index, + bit_range_start: Node.Index, + bit_range_end: Node.Index, + }; + + fn parsePtrModifiers(p: *Parser) !PtrModifiers { + var result: PtrModifiers = .{ + .align_node = 0, + .bit_range_start = 0, + .bit_range_end = 0, }; - - const node = try p.arena.allocator.create(Node.SimpleInfixOp); - node.* = .{ - .base = .{ .tag = op }, - .op_token = token, - .lhs = undefined, // set by caller - .rhs = undefined, // set by caller - }; - return &node.base; - } - - /// CompareOp - /// <- EQUALEQUAL - /// / EXCLAMATIONMARKEQUAL - /// / LARROW - /// / RARROW - /// / LARROWEQUAL - /// / RARROWEQUAL - fn parseCompareOp(p: *Parser) !?*Node { - const token = p.nextToken(); - const op: Node.Tag = switch (p.token_ids[token]) { - .EqualEqual => .EqualEqual, - .BangEqual => .BangEqual, - .AngleBracketLeft => .LessThan, - .AngleBracketRight => .GreaterThan, - .AngleBracketLeftEqual => .LessOrEqual, - .AngleBracketRightEqual => .GreaterOrEqual, - else => { - p.putBackToken(token); - return null; - }, - }; - - return p.createInfixOp(token, op); - } - - /// BitwiseOp - /// <- AMPERSAND - /// / CARET - /// / PIPE - /// / KEYWORD_orelse - /// / KEYWORD_catch Payload? - fn parseBitwiseOp(p: *Parser) !?*Node { - const token = p.nextToken(); - const op: Node.Tag = switch (p.token_ids[token]) { - .Ampersand => .BitAnd, - .Caret => .BitXor, - .Pipe => .BitOr, - .Keyword_orelse => .OrElse, - .Keyword_catch => { - const payload = try p.parsePayload(); - const node = try p.arena.allocator.create(Node.Catch); - node.* = .{ - .op_token = token, - .lhs = undefined, // set by caller - .rhs = undefined, // set by caller - .payload = payload, - }; - return &node.base; - }, - else => { - p.putBackToken(token); - return null; - }, - }; - - return p.createInfixOp(token, op); - } - - /// BitShiftOp - /// <- LARROW2 - /// / RARROW2 - fn parseBitShiftOp(p: *Parser) !?*Node { - const token = p.nextToken(); - const op: Node.Tag = switch (p.token_ids[token]) { - .AngleBracketAngleBracketLeft => .BitShiftLeft, - .AngleBracketAngleBracketRight => .BitShiftRight, - else => { - p.putBackToken(token); - return null; - }, - }; - - return p.createInfixOp(token, op); - } - - /// AdditionOp - /// <- PLUS - /// / MINUS - /// / PLUS2 - /// / PLUSPERCENT - /// / MINUSPERCENT - fn parseAdditionOp(p: *Parser) !?*Node { - const token = p.nextToken(); - const op: Node.Tag = switch (p.token_ids[token]) { - .Plus => .Add, - .Minus => .Sub, - .PlusPlus => .ArrayCat, - .PlusPercent => .AddWrap, - .MinusPercent => .SubWrap, - else => { - p.putBackToken(token); - return null; - }, - }; - - return p.createInfixOp(token, op); - } - - /// MultiplyOp - /// <- PIPE2 - /// / ASTERISK - /// / SLASH - /// / PERCENT - /// / ASTERISK2 - /// / ASTERISKPERCENT - fn parseMultiplyOp(p: *Parser) !?*Node { - const token = p.nextToken(); - const op: Node.Tag = switch (p.token_ids[token]) { - .PipePipe => .MergeErrorSets, - .Asterisk => .Mul, - .Slash => .Div, - .Percent => .Mod, - .AsteriskAsterisk => .ArrayMult, - .AsteriskPercent => .MulWrap, - else => { - p.putBackToken(token); - return null; - }, - }; - - return p.createInfixOp(token, op); - } - - /// PrefixOp - /// <- EXCLAMATIONMARK - /// / MINUS - /// / TILDE - /// / MINUSPERCENT - /// / AMPERSAND - /// / KEYWORD_try - /// / KEYWORD_await - fn parsePrefixOp(p: *Parser) !?*Node { - const token = p.nextToken(); - switch (p.token_ids[token]) { - .Bang => return p.allocSimplePrefixOp(.BoolNot, token), - .Minus => return p.allocSimplePrefixOp(.Negation, token), - .Tilde => return p.allocSimplePrefixOp(.BitNot, token), - .MinusPercent => return p.allocSimplePrefixOp(.NegationWrap, token), - .Ampersand => return p.allocSimplePrefixOp(.AddressOf, token), - .Keyword_try => return p.allocSimplePrefixOp(.Try, token), - .Keyword_await => return p.allocSimplePrefixOp(.Await, token), - else => { - p.putBackToken(token); - return null; - }, - } - } - - fn allocSimplePrefixOp(p: *Parser, comptime tag: Node.Tag, token: TokenIndex) !?*Node { - const node = try p.arena.allocator.create(Node.SimplePrefixOp); - node.* = .{ - .base = .{ .tag = tag }, - .op_token = token, - .rhs = undefined, // set by caller - }; - return &node.base; - } - - // TODO: ArrayTypeStart is either an array or a slice, but const/allowzero only work on - // pointers. Consider updating this rule: - // ... - // / ArrayTypeStart - // / SliceTypeStart (ByteAlign / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)* - // / PtrTypeStart ... - - /// PrefixTypeOp - /// <- QUESTIONMARK - /// / KEYWORD_anyframe MINUSRARROW - /// / ArrayTypeStart (ByteAlign / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)* - /// / PtrTypeStart (KEYWORD_align LPAREN Expr (COLON INTEGER COLON INTEGER)? RPAREN / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)* - fn parsePrefixTypeOp(p: *Parser) !?*Node { - if (p.eatToken(.QuestionMark)) |token| { - const node = try p.arena.allocator.create(Node.SimplePrefixOp); - node.* = .{ - .base = .{ .tag = .OptionalType }, - .op_token = token, - .rhs = undefined, // set by caller - }; - return &node.base; - } - - if (p.eatToken(.Keyword_anyframe)) |token| { - const arrow = p.eatToken(.Arrow) orelse { - p.putBackToken(token); - return null; - }; - const node = try p.arena.allocator.create(Node.AnyFrameType); - node.* = .{ - .anyframe_token = token, - .result = .{ - .arrow_token = arrow, - .return_type = undefined, // set by caller - }, - }; - return &node.base; - } - - if (try p.parsePtrTypeStart()) |node| { - // If the token encountered was **, there will be two nodes instead of one. - // The attributes should be applied to the rightmost operator. - var ptr_info = if (node.cast(Node.PtrType)) |ptr_type| - if (p.token_ids[ptr_type.op_token] == .AsteriskAsterisk) - &ptr_type.rhs.cast(Node.PtrType).?.ptr_info - else - &ptr_type.ptr_info - else if (node.cast(Node.SliceType)) |slice_type| - &slice_type.ptr_info - else - unreachable; - - while (true) { - if (p.eatToken(.Keyword_align)) |align_token| { - const lparen = try p.expectToken(.LParen); - const expr_node = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); - - // Optional bit range - const bit_range = if (p.eatToken(.Colon)) |_| bit_range_value: { - const range_start = try p.expectNode(parseIntegerLiteral, .{ - .ExpectedIntegerLiteral = .{ .token = p.tok_i }, + var saw_const = false; + var saw_volatile = false; + var saw_allowzero = false; + while (true) { + switch (p.token_tags[p.tok_i]) { + .Keyword_align => { + if (result.align_node != 0) { + try p.warn(.{ + .ExtraAlignQualifier = .{ .token = p.tok_i }, }); + } + p.tok_i += 1; + _ = try p.expectToken(.LParen); + result.align_node = try p.expectExpr(); + + if (p.eatToken(.Colon)) |_| { + result.bit_range_start = try p.expectExpr(); _ = try p.expectToken(.Colon); - const range_end = try p.expectNode(parseIntegerLiteral, .{ - .ExpectedIntegerLiteral = .{ .token = p.tok_i }, - }); + result.bit_range_end = try p.expectExpr(); + } - break :bit_range_value ast.PtrInfo.Align.BitRange{ - .start = range_start, - .end = range_end, - }; - } else null; _ = try p.expectToken(.RParen); - - if (ptr_info.align_info != null) { - try p.errors.append(p.gpa, .{ - .ExtraAlignQualifier = .{ .token = p.tok_i - 1 }, + }, + .Keyword_const => { + if (saw_const) { + try p.warn(.{ + .ExtraConstQualifier = .{ .token = p.tok_i }, }); - continue; } - - ptr_info.align_info = ast.PtrInfo.Align{ - .node = expr_node, - .bit_range = bit_range, - }; - - continue; - } - if (p.eatToken(.Keyword_const)) |const_token| { - if (ptr_info.const_token != null) { - try p.errors.append(p.gpa, .{ - .ExtraConstQualifier = .{ .token = p.tok_i - 1 }, + p.tok_i += 1; + saw_const = true; + }, + .Keyword_volatile => { + if (saw_volatile) { + try p.warn(.{ + .ExtraVolatileQualifier = .{ .token = p.tok_i }, }); - continue; } - ptr_info.const_token = const_token; - continue; - } - if (p.eatToken(.Keyword_volatile)) |volatile_token| { - if (ptr_info.volatile_token != null) { - try p.errors.append(p.gpa, .{ - .ExtraVolatileQualifier = .{ .token = p.tok_i - 1 }, + p.tok_i += 1; + saw_volatile = true; + }, + .Keyword_allowzero => { + if (saw_allowzero) { + try p.warn(.{ + .ExtraAllowZeroQualifier = .{ .token = p.tok_i }, }); - continue; } - ptr_info.volatile_token = volatile_token; - continue; - } - if (p.eatToken(.Keyword_allowzero)) |allowzero_token| { - if (ptr_info.allowzero_token != null) { - try p.errors.append(p.gpa, .{ - .ExtraAllowZeroQualifier = .{ .token = p.tok_i - 1 }, - }); - continue; - } - ptr_info.allowzero_token = allowzero_token; - continue; - } - break; + p.tok_i += 1; + saw_allowzero = true; + }, + else => return result, } - - return node; } - - if (try p.parseArrayTypeStart()) |node| { - if (node.cast(Node.SliceType)) |slice_type| { - // Collect pointer qualifiers in any order, but disallow duplicates - while (true) { - if (try p.parseByteAlign()) |align_expr| { - if (slice_type.ptr_info.align_info != null) { - try p.errors.append(p.gpa, .{ - .ExtraAlignQualifier = .{ .token = p.tok_i - 1 }, - }); - continue; - } - slice_type.ptr_info.align_info = ast.PtrInfo.Align{ - .node = align_expr, - .bit_range = null, - }; - continue; - } - if (p.eatToken(.Keyword_const)) |const_token| { - if (slice_type.ptr_info.const_token != null) { - try p.errors.append(p.gpa, .{ - .ExtraConstQualifier = .{ .token = p.tok_i - 1 }, - }); - continue; - } - slice_type.ptr_info.const_token = const_token; - continue; - } - if (p.eatToken(.Keyword_volatile)) |volatile_token| { - if (slice_type.ptr_info.volatile_token != null) { - try p.errors.append(p.gpa, .{ - .ExtraVolatileQualifier = .{ .token = p.tok_i - 1 }, - }); - continue; - } - slice_type.ptr_info.volatile_token = volatile_token; - continue; - } - if (p.eatToken(.Keyword_allowzero)) |allowzero_token| { - if (slice_type.ptr_info.allowzero_token != null) { - try p.errors.append(p.gpa, .{ - .ExtraAllowZeroQualifier = .{ .token = p.tok_i - 1 }, - }); - continue; - } - slice_type.ptr_info.allowzero_token = allowzero_token; - continue; - } - break; - } - } - return node; - } - - return null; } /// SuffixOp @@ -2655,841 +3116,536 @@ const Parser = struct { /// / DOT IDENTIFIER /// / DOTASTERISK /// / DOTQUESTIONMARK - fn parseSuffixOp(p: *Parser, lhs: *Node) !?*Node { - if (p.eatToken(.LBracket)) |_| { - const index_expr = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); + fn parseSuffixOp(p: *Parser, lhs: Node.Index) !Node.Index { + switch (p.token_tags[p.tok_i]) { + .LBracket => { + const lbracket = p.nextToken(); + const index_expr = try p.expectExpr(); - if (p.eatToken(.Ellipsis2) != null) { - const end_expr = try p.parseExpr(); - const sentinel: ?*Node = if (p.eatToken(.Colon) != null) - try p.parseExpr() - else - null; - const rtoken = try p.expectToken(.RBracket); - const node = try p.arena.allocator.create(Node.Slice); - node.* = .{ - .lhs = lhs, - .rtoken = rtoken, - .start = index_expr, - .end = end_expr, - .sentinel = sentinel, - }; - return &node.base; - } - - const rtoken = try p.expectToken(.RBracket); - const node = try p.arena.allocator.create(Node.ArrayAccess); - node.* = .{ - .lhs = lhs, - .rtoken = rtoken, - .index_expr = index_expr, - }; - return &node.base; - } - - if (p.eatToken(.PeriodAsterisk)) |period_asterisk| { - const node = try p.arena.allocator.create(Node.SimpleSuffixOp); - node.* = .{ - .base = .{ .tag = .Deref }, - .lhs = lhs, - .rtoken = period_asterisk, - }; - return &node.base; - } - - if (p.eatToken(.Invalid_periodasterisks)) |period_asterisk| { - try p.errors.append(p.gpa, .{ - .AsteriskAfterPointerDereference = .{ .token = period_asterisk }, - }); - const node = try p.arena.allocator.create(Node.SimpleSuffixOp); - node.* = .{ - .base = .{ .tag = .Deref }, - .lhs = lhs, - .rtoken = period_asterisk, - }; - return &node.base; - } - - if (p.eatToken(.Period)) |period| { - if (try p.parseIdentifier()) |identifier| { - const node = try p.arena.allocator.create(Node.SimpleInfixOp); - node.* = .{ - .base = Node{ .tag = .Period }, - .op_token = period, - .lhs = lhs, - .rhs = identifier, - }; - return &node.base; - } - if (p.eatToken(.QuestionMark)) |question_mark| { - const node = try p.arena.allocator.create(Node.SimpleSuffixOp); - node.* = .{ - .base = .{ .tag = .UnwrapOptional }, - .lhs = lhs, - .rtoken = question_mark, - }; - return &node.base; - } - try p.errors.append(p.gpa, .{ - .ExpectedSuffixOp = .{ .token = p.tok_i }, - }); - return null; - } - - return null; - } - - /// FnCallArguments <- LPAREN ExprList RPAREN - /// ExprList <- (Expr COMMA)* Expr? - fn parseFnCallArguments(p: *Parser) !?AnnotatedParamList { - if (p.eatToken(.LParen) == null) return null; - const list = try ListParseFn(*Node, parseExpr)(p); - errdefer p.gpa.free(list); - const rparen = try p.expectToken(.RParen); - return AnnotatedParamList{ .list = list, .rparen = rparen }; - } - - const AnnotatedParamList = struct { - list: []*Node, - rparen: TokenIndex, - }; - - /// ArrayTypeStart <- LBRACKET Expr? (COLON Expr)? RBRACKET - fn parseArrayTypeStart(p: *Parser) !?*Node { - const lbracket = p.eatToken(.LBracket) orelse return null; - const expr = try p.parseExpr(); - const sentinel = if (p.eatToken(.Colon)) |_| - try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }) - else - null; - const rbracket = try p.expectToken(.RBracket); - - if (expr) |len_expr| { - if (sentinel) |s| { - const node = try p.arena.allocator.create(Node.ArrayTypeSentinel); - node.* = .{ - .op_token = lbracket, - .rhs = undefined, // set by caller - .len_expr = len_expr, - .sentinel = s, - }; - return &node.base; - } else { - const node = try p.arena.allocator.create(Node.ArrayType); - node.* = .{ - .op_token = lbracket, - .rhs = undefined, // set by caller - .len_expr = len_expr, - }; - return &node.base; - } - } - - const node = try p.arena.allocator.create(Node.SliceType); - node.* = .{ - .op_token = lbracket, - .rhs = undefined, // set by caller - .ptr_info = .{ .sentinel = sentinel }, - }; - return &node.base; - } - - /// PtrTypeStart - /// <- ASTERISK - /// / ASTERISK2 - /// / LBRACKET ASTERISK (LETTERC / COLON Expr)? RBRACKET - fn parsePtrTypeStart(p: *Parser) !?*Node { - if (p.eatToken(.Asterisk)) |asterisk| { - const sentinel = if (p.eatToken(.Colon)) |_| - try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }) - else - null; - const node = try p.arena.allocator.create(Node.PtrType); - node.* = .{ - .op_token = asterisk, - .rhs = undefined, // set by caller - .ptr_info = .{ .sentinel = sentinel }, - }; - return &node.base; - } - - if (p.eatToken(.AsteriskAsterisk)) |double_asterisk| { - const node = try p.arena.allocator.create(Node.PtrType); - node.* = .{ - .op_token = double_asterisk, - .rhs = undefined, // set by caller - }; - - // Special case for **, which is its own token - const child = try p.arena.allocator.create(Node.PtrType); - child.* = .{ - .op_token = double_asterisk, - .rhs = undefined, // set by caller - }; - node.rhs = &child.base; - - return &node.base; - } - if (p.eatToken(.LBracket)) |lbracket| { - const asterisk = p.eatToken(.Asterisk) orelse { - p.putBackToken(lbracket); - return null; - }; - if (p.eatToken(.Identifier)) |ident| { - const token_loc = p.token_locs[ident]; - const token_slice = p.source[token_loc.start..token_loc.end]; - if (!std.mem.eql(u8, token_slice, "c")) { - p.putBackToken(ident); - } else { + if (p.eatToken(.Ellipsis2)) |_| { + const end_expr = try p.parseExpr(); + if (end_expr == 0) { + _ = try p.expectToken(.RBracket); + return p.addNode(.{ + .tag = .SliceOpen, + .main_token = lbracket, + .data = .{ + .lhs = lhs, + .rhs = index_expr, + }, + }); + } + const sentinel: Node.Index = if (p.eatToken(.Colon)) |_| + try p.parseExpr() + else + 0; _ = try p.expectToken(.RBracket); - const node = try p.arena.allocator.create(Node.PtrType); - node.* = .{ - .op_token = lbracket, - .rhs = undefined, // set by caller - }; - return &node.base; + return p.addNode(.{ + .tag = .Slice, + .main_token = lbracket, + .data = .{ + .lhs = lhs, + .rhs = try p.addExtra(.{ + .start = index_expr, + .end = end_expr, + .sentinel = sentinel, + }), + }, + }); } - } - const sentinel = if (p.eatToken(.Colon)) |_| - try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }) - else - null; - _ = try p.expectToken(.RBracket); - const node = try p.arena.allocator.create(Node.PtrType); - node.* = .{ - .op_token = lbracket, - .rhs = undefined, // set by caller - .ptr_info = .{ .sentinel = sentinel }, - }; - return &node.base; + _ = try p.expectToken(.RBracket); + return p.addNode(.{ + .tag = .ArrayAccess, + .main_token = lbracket, + .data = .{ + .lhs = lhs, + .rhs = index_expr, + }, + }); + }, + .PeriodAsterisk => return p.addNode(.{ + .tag = .Deref, + .main_token = p.nextToken(), + .data = .{ + .lhs = lhs, + .rhs = undefined, + }, + }), + .Invalid_periodasterisks => { + const period_asterisk = p.nextToken(); + try p.warn(.{ .AsteriskAfterPointerDereference = .{ .token = period_asterisk } }); + return p.addNode(.{ + .tag = .Deref, + .main_token = period_asterisk, + .data = .{ + .lhs = lhs, + .rhs = undefined, + }, + }); + }, + .Period => switch (p.token_tags[p.tok_i + 1]) { + .Identifier => return p.addNode(.{ + .tag = .FieldAccess, + .main_token = p.nextToken(), + .data = .{ + .lhs = lhs, + .rhs = p.nextToken(), + }, + }), + .QuestionMark => return p.addNode(.{ + .tag = .UnwrapOptional, + .main_token = p.nextToken(), + .data = .{ + .lhs = lhs, + .rhs = p.nextToken(), + }, + }), + else => { + p.tok_i += 1; + try p.warn(.{ .ExpectedSuffixOp = .{ .token = p.tok_i } }); + return null_node; + }, + }, + else => return null_node, } - return null; } - /// ContainerDeclAuto <- ContainerDeclType LBRACE ContainerMembers RBRACE - fn parseContainerDeclAuto(p: *Parser) !?*Node { - const container_decl_type = (try p.parseContainerDeclType()) orelse return null; - const lbrace = try p.expectToken(.LBrace); - const members = try p.parseContainerMembers(false); - defer p.gpa.free(members); - const rbrace = try p.expectToken(.RBrace); - - const members_len = @intCast(NodeIndex, members.len); - const node = try Node.ContainerDecl.alloc(&p.arena.allocator, members_len); - node.* = .{ - .layout_token = null, - .kind_token = container_decl_type.kind_token, - .init_arg_expr = container_decl_type.init_arg_expr, - .fields_and_decls_len = members_len, - .lbrace_token = lbrace, - .rbrace_token = rbrace, - }; - std.mem.copy(*Node, node.fieldsAndDecls(), members); - return &node.base; - } - - /// Holds temporary data until we are ready to construct the full ContainerDecl AST node. - const ContainerDeclType = struct { - kind_token: TokenIndex, - init_arg_expr: Node.ContainerDecl.InitArg, - }; - + /// Caller must have already verified the first token. /// ContainerDeclType /// <- KEYWORD_struct /// / KEYWORD_enum (LPAREN Expr RPAREN)? /// / KEYWORD_union (LPAREN (KEYWORD_enum (LPAREN Expr RPAREN)? / Expr) RPAREN)? /// / KEYWORD_opaque - fn parseContainerDeclType(p: *Parser) !?ContainerDeclType { - const kind_token = p.nextToken(); - - const init_arg_expr = switch (p.token_ids[kind_token]) { - .Keyword_struct, .Keyword_opaque => Node.ContainerDecl.InitArg{ .None = {} }, + fn parseContainerDeclAuto(p: *Parser) !Node.Index { + const main_token = p.nextToken(); + const arg_expr = switch (p.token_tags[main_token]) { + .Keyword_struct, .Keyword_opaque => null_node, .Keyword_enum => blk: { - if (p.eatToken(.LParen) != null) { - const expr = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); + if (p.eatToken(.LParen)) |_| { + const expr = try p.expectExpr(); _ = try p.expectToken(.RParen); - break :blk Node.ContainerDecl.InitArg{ .Type = expr }; + break :blk expr; + } else { + break :blk null_node; } - break :blk Node.ContainerDecl.InitArg{ .None = {} }; }, .Keyword_union => blk: { - if (p.eatToken(.LParen) != null) { - if (p.eatToken(.Keyword_enum) != null) { - if (p.eatToken(.LParen) != null) { - const expr = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); + if (p.eatToken(.LParen)) |_| { + if (p.eatToken(.Keyword_enum)) |_| { + if (p.eatToken(.LParen)) |_| { + const enum_tag_expr = try p.expectExpr(); _ = try p.expectToken(.RParen); _ = try p.expectToken(.RParen); - break :blk Node.ContainerDecl.InitArg{ .Enum = expr }; - } - _ = try p.expectToken(.RParen); - break :blk Node.ContainerDecl.InitArg{ .Enum = null }; - } - const expr = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); - _ = try p.expectToken(.RParen); - break :blk Node.ContainerDecl.InitArg{ .Type = expr }; - } - break :blk Node.ContainerDecl.InitArg{ .None = {} }; - }, - else => { - p.putBackToken(kind_token); - return null; - }, - }; - return ContainerDeclType{ - .kind_token = kind_token, - .init_arg_expr = init_arg_expr, + _ = try p.expectToken(.LBrace); + const members = try p.parseContainerMembers(false); + _ = try p.expectToken(.RBrace); + return p.addNode(.{ + .tag = .TaggedUnionEnumTag, + .main_token = main_token, + .data = .{ + .lhs = enum_tag_expr, + .rhs = try p.addExtra(Node.SubRange{ + .start = members.start, + .end = members.end, + }), + }, + }); + } else { + _ = try p.expectToken(.RParen); + + _ = try p.expectToken(.LBrace); + const members = try p.parseContainerMembers(false); + _ = try p.expectToken(.RBrace); + return p.addNode(.{ + .tag = .TaggedUnion, + .main_token = main_token, + .data = .{ + .lhs = members.start, + .rhs = members.end, + }, + }); + } + } else { + const expr = try p.expectExpr(); + _ = try p.expectToken(.RParen); + break :blk expr; + } + } else { + break :blk null_node; + } + }, + else => unreachable, }; + _ = try p.expectToken(.LBrace); + const members = try p.parseContainerMembers(false); + _ = try p.expectToken(.RBrace); + if (arg_expr == 0) { + return p.addNode(.{ + .tag = .ContainerDecl, + .main_token = main_token, + .data = .{ + .lhs = members.start, + .rhs = members.end, + }, + }); + } else { + return p.addNode(.{ + .tag = .ContainerDeclArg, + .main_token = main_token, + .data = .{ + .lhs = arg_expr, + .rhs = try p.addExtra(Node.SubRange{ + .start = members.start, + .end = members.end, + }), + }, + }); + } } + /// Holds temporary data until we are ready to construct the full ContainerDecl AST node. /// ByteAlign <- KEYWORD_align LPAREN Expr RPAREN - fn parseByteAlign(p: *Parser) !?*Node { - _ = p.eatToken(.Keyword_align) orelse return null; + fn parseByteAlign(p: *Parser) !Node.Index { + _ = p.eatToken(.Keyword_align) orelse return null_node; _ = try p.expectToken(.LParen); - const expr = try p.expectNode(parseExpr, .{ - .ExpectedExpr = .{ .token = p.tok_i }, - }); + const expr = try p.expectExpr(); _ = try p.expectToken(.RParen); return expr; } - /// IdentifierList <- (IDENTIFIER COMMA)* IDENTIFIER? - /// Only ErrorSetDecl parses an IdentifierList - fn parseErrorTagList(p: *Parser) ![]*Node { - return ListParseFn(*Node, parseErrorTag)(p); - } - /// SwitchProngList <- (SwitchProng COMMA)* SwitchProng? - fn parseSwitchProngList(p: *Parser) ![]*Node { - return ListParseFn(*Node, parseSwitchProng)(p); - } - - /// AsmOutputList <- (AsmOutputItem COMMA)* AsmOutputItem? - fn parseAsmOutputList(p: *Parser) Error![]Node.Asm.Output { - return ListParseFn(Node.Asm.Output, parseAsmOutputItem)(p); - } - - /// AsmInputList <- (AsmInputItem COMMA)* AsmInputItem? - fn parseAsmInputList(p: *Parser) Error![]Node.Asm.Input { - return ListParseFn(Node.Asm.Input, parseAsmInputItem)(p); + fn parseSwitchProngList(p: *Parser) !Node.SubRange { + return ListParseFn(parseSwitchProng)(p); } /// ParamDeclList <- (ParamDecl COMMA)* ParamDecl? - fn parseParamDeclList(p: *Parser) ![]Node.FnProto.ParamDecl { - return ListParseFn(Node.FnProto.ParamDecl, parseParamDecl)(p); + fn parseParamDeclList(p: *Parser) !SmallSpan { + _ = try p.expectToken(.LParen); + if (p.eatToken(.RParen)) |_| { + return SmallSpan{ .zero_or_one = 0 }; + } + const param_one = while (true) { + const param = try p.expectParamDecl(); + if (param != 0) break param; + switch (p.token_tags[p.nextToken()]) { + .Comma => continue, + .RParen => return SmallSpan{ .zero_or_one = 0 }, + else => { + // This is likely just a missing comma; + // give an error but continue parsing this list. + p.tok_i -= 1; + try p.warn(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + }); + }, + } + } else unreachable; + + const param_two = while (true) { + switch (p.token_tags[p.nextToken()]) { + .Comma => { + if (p.eatToken(.RParen)) |_| { + return SmallSpan{ .zero_or_one = param_one }; + } + const param = try p.expectParamDecl(); + if (param != 0) break param; + continue; + }, + .RParen => return SmallSpan{ .zero_or_one = param_one }, + .Colon, .RBrace, .RBracket => { + p.tok_i -= 1; + return p.fail(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .RParen }, + }); + }, + else => { + // This is likely just a missing comma; + // give an error but continue parsing this list. + p.tok_i -= 1; + try p.warn(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + }); + }, + } + } else unreachable; + + var list = std.ArrayList(Node.Index).init(p.gpa); + defer list.deinit(); + + try list.appendSlice(&[_]Node.Index{ param_one, param_two }); + + while (true) { + switch (p.token_tags[p.nextToken()]) { + .Comma => { + if (p.token_tags[p.tok_i] == .RParen) { + p.tok_i += 1; + return SmallSpan{ .multi = list.toOwnedSlice() }; + } + const param = try p.expectParamDecl(); + if (param != 0) { + try list.append(param); + } + continue; + }, + .RParen => return SmallSpan{ .multi = list.toOwnedSlice() }, + .Colon, .RBrace, .RBracket => { + p.tok_i -= 1; + return p.fail(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .RParen }, + }); + }, + else => { + // This is likely just a missing comma; + // give an error but continue parsing this list. + p.tok_i -= 1; + try p.warn(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + }); + }, + } + } } - const NodeParseFn = fn (p: *Parser) Error!?*Node; + const NodeParseFn = fn (p: *Parser) Error!Node.Index; - fn ListParseFn(comptime E: type, comptime nodeParseFn: anytype) ParseFn([]E) { + fn ListParseFn(comptime nodeParseFn: anytype) (fn (p: *Parser) Error!Node.SubRange) { return struct { - pub fn parse(p: *Parser) ![]E { - var list = std.ArrayList(E).init(p.gpa); + pub fn parse(p: *Parser) Error!Node.SubRange { + var list = std.ArrayList(Node.Index).init(p.gpa); defer list.deinit(); - while (try nodeParseFn(p)) |item| { + while (true) { + const item = try nodeParseFn(p); + if (item == 0) break; + try list.append(item); - switch (p.token_ids[p.tok_i]) { - .Comma => _ = p.nextToken(), + switch (p.token_tags[p.tok_i]) { + .Comma => p.tok_i += 1, // all possible delimiters .Colon, .RParen, .RBrace, .RBracket => break, else => { - // this is likely just a missing comma, - // continue parsing this list and give an error - try p.errors.append(p.gpa, .{ + // This is likely just a missing comma; + // give an error but continue parsing this list. + try p.warn(.{ .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, }); }, } } - return list.toOwnedSlice(); + return p.listToSpan(list.items); } }.parse; } - fn SimpleBinOpParseFn(comptime token: Token.Id, comptime op: Node.Tag) NodeParseFn { - return struct { - pub fn parse(p: *Parser) Error!?*Node { - const op_token = if (token == .Keyword_and) switch (p.token_ids[p.tok_i]) { - .Keyword_and => p.nextToken(), - .Invalid_ampersands => blk: { - try p.errors.append(p.gpa, .{ - .InvalidAnd = .{ .token = p.tok_i }, - }); - break :blk p.nextToken(); - }, - else => return null, - } else p.eatToken(token) orelse return null; + /// FnCallArguments <- LPAREN ExprList RPAREN + /// ExprList <- (Expr COMMA)* Expr? + /// TODO detect when we can emit BuiltinCallTwo instead of BuiltinCall. + fn parseBuiltinCall(p: *Parser) !Node.Index { + const builtin_token = p.eatToken(.Builtin) orelse return null_node; - const node = try p.arena.allocator.create(Node.SimpleInfixOp); - node.* = .{ - .base = .{ .tag = op }, - .op_token = op_token, - .lhs = undefined, // set by caller - .rhs = undefined, // set by caller - }; - return &node.base; - } - }.parse; - } - - // Helper parsers not included in the grammar - - fn parseBuiltinCall(p: *Parser) !?*Node { - const token = p.eatToken(.Builtin) orelse return null; - const params = (try p.parseFnCallArguments()) orelse { - try p.errors.append(p.gpa, .{ + const lparen = (try p.expectTokenRecoverable(.LParen)) orelse { + try p.warn(.{ .ExpectedParamList = .{ .token = p.tok_i }, }); - - // lets pretend this was an identifier so we can continue parsing - const node = try p.arena.allocator.create(Node.OneToken); - node.* = .{ - .base = .{ .tag = .Identifier }, - .token = token, - }; - return &node.base; + // Pretend this was an identifier so we can continue parsing. + return p.addNode(.{ + .tag = .OneToken, + .main_token = builtin_token, + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }); }; - defer p.gpa.free(params.list); - - const node = try Node.BuiltinCall.alloc(&p.arena.allocator, params.list.len); - node.* = .{ - .builtin_token = token, - .params_len = params.list.len, - .rparen_token = params.rparen, - }; - std.mem.copy(*Node, node.params(), params.list); - return &node.base; - } - - fn parseErrorTag(p: *Parser) !?*Node { - const doc_comments = try p.parseDocComment(); // no need to rewind on failure - const token = p.eatToken(.Identifier) orelse return null; - - const node = try p.arena.allocator.create(Node.ErrorTag); - node.* = .{ - .doc_comments = doc_comments, - .name_token = token, - }; - return &node.base; - } - - fn parseIdentifier(p: *Parser) !?*Node { - const token = p.eatToken(.Identifier) orelse return null; - const node = try p.arena.allocator.create(Node.OneToken); - node.* = .{ - .base = .{ .tag = .Identifier }, - .token = token, - }; - return &node.base; - } - - fn parseAnyType(p: *Parser) !?*Node { - const token = p.eatToken(.Keyword_anytype) orelse - p.eatToken(.Keyword_var) orelse return null; // TODO remove in next release cycle - const node = try p.arena.allocator.create(Node.OneToken); - node.* = .{ - .base = .{ .tag = .AnyType }, - .token = token, - }; - return &node.base; - } - - fn createLiteral(p: *Parser, tag: ast.Node.Tag, token: TokenIndex) !*Node { - const result = try p.arena.allocator.create(Node.OneToken); - result.* = .{ - .base = .{ .tag = tag }, - .token = token, - }; - return &result.base; - } - - fn parseStringLiteralSingle(p: *Parser) !?*Node { - if (p.eatToken(.StringLiteral)) |token| { - const node = try p.arena.allocator.create(Node.OneToken); - node.* = .{ - .base = .{ .tag = .StringLiteral }, - .token = token, - }; - return &node.base; - } - return null; - } - - // string literal or multiline string literal - fn parseStringLiteral(p: *Parser) !?*Node { - if (try p.parseStringLiteralSingle()) |node| return node; - - if (p.eatToken(.MultilineStringLiteralLine)) |first_line| { - const start_tok_i = p.tok_i; - var tok_i = start_tok_i; - var count: usize = 1; // including first_line - while (true) : (tok_i += 1) { - switch (p.token_ids[tok_i]) { - .LineComment => continue, - .MultilineStringLiteralLine => count += 1, - else => break, - } - } - - const node = try Node.MultilineStringLiteral.alloc(&p.arena.allocator, count); - node.* = .{ .lines_len = count }; - const lines = node.lines(); - tok_i = start_tok_i; - lines[0] = first_line; - count = 1; - while (true) : (tok_i += 1) { - switch (p.token_ids[tok_i]) { - .LineComment => continue, - .MultilineStringLiteralLine => { - lines[count] = tok_i; - count += 1; - }, - else => break, - } - } - p.tok_i = tok_i; - return &node.base; - } - - return null; - } - - fn parseIntegerLiteral(p: *Parser) !?*Node { - const token = p.eatToken(.IntegerLiteral) orelse return null; - const node = try p.arena.allocator.create(Node.OneToken); - node.* = .{ - .base = .{ .tag = .IntegerLiteral }, - .token = token, - }; - return &node.base; - } - - fn parseFloatLiteral(p: *Parser) !?*Node { - const token = p.eatToken(.FloatLiteral) orelse return null; - const node = try p.arena.allocator.create(Node.OneToken); - node.* = .{ - .base = .{ .tag = .FloatLiteral }, - .token = token, - }; - return &node.base; - } - - fn parseTry(p: *Parser) !?*Node { - const token = p.eatToken(.Keyword_try) orelse return null; - const node = try p.arena.allocator.create(Node.SimplePrefixOp); - node.* = .{ - .base = .{ .tag = .Try }, - .op_token = token, - .rhs = undefined, // set by caller - }; - return &node.base; - } - - /// IfPrefix Body (KEYWORD_else Payload? Body)? - fn parseIf(p: *Parser, bodyParseFn: NodeParseFn) !?*Node { - const node = (try p.parseIfPrefix()) orelse return null; - const if_prefix = node.cast(Node.If).?; - - if_prefix.body = try p.expectNode(bodyParseFn, .{ - .InvalidToken = .{ .token = p.tok_i }, + const params = try ListParseFn(parseExpr)(p); + _ = try p.expectToken(.RParen); + return p.addNode(.{ + .tag = .BuiltinCall, + .main_token = builtin_token, + .data = .{ + .lhs = params.start, + .rhs = params.end, + }, }); + } - const else_token = p.eatToken(.Keyword_else) orelse return node; - const payload = try p.parsePayload(); - const else_expr = try p.expectNode(bodyParseFn, .{ - .InvalidToken = .{ .token = p.tok_i }, + fn parseOneToken(p: *Parser, token_tag: Token.Tag) !Node.Index { + const token = p.eatToken(token_tag) orelse return null_node; + return p.addNode(.{ + .tag = .OneToken, + .main_token = token, + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, }); - const else_node = try p.arena.allocator.create(Node.Else); - else_node.* = .{ - .else_token = else_token, - .payload = payload, - .body = else_expr, - }; - if_prefix.@"else" = else_node; + } + fn expectOneToken(p: *Parser, token_tag: Token.Tag) !Node.Index { + const node = try p.expectOneTokenRecoverable(token_tag); + if (node == 0) return error.ParseError; return node; } - /// Eat a multiline doc comment - fn parseDocComment(p: *Parser) !?*Node.DocComment { + fn expectOneTokenRecoverable(p: *Parser, token_tag: Token.Tag) !Node.Index { + const node = p.parseOneToken(token_tag); + if (node == 0) { + try p.warn(.{ + .ExpectedToken = .{ + .token = p.tok_i, + .expected_id = token_tag, + }, + }); + } + return node; + } + + // string literal or multiline string literal + fn parseStringLiteral(p: *Parser) !Node.Index { + switch (p.token_tags[p.tok_i]) { + .StringLiteral => return p.addNode(.{ + .tag = .OneToken, + .main_token = p.nextToken(), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), + .MultilineStringLiteralLine => { + const first_line = p.nextToken(); + while (p.token_tags[p.tok_i] == .MultilineStringLiteralLine) { + p.tok_i += 1; + } + return p.addNode(.{ + .tag = .OneToken, + .main_token = first_line, + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }); + }, + else => return null_node, + } + } + + fn expectStringLiteral(p: *Parser) !Node.Index { + const node = try p.parseStringLiteral(); + if (node == 0) { + return p.fail(.{ .ExpectedStringLiteral = .{ .token = p.tok_i } }); + } + return node; + } + + fn expectIntegerLiteral(p: *Parser) !Node.Index { + const node = p.parseOneToken(.IntegerLiteral); + if (node != 0) { + return p.fail(.{ .ExpectedIntegerLiteral = .{ .token = p.tok_i } }); + } + return node; + } + + /// KEYWORD_if LPAREN Expr RPAREN PtrPayload? Body (KEYWORD_else Payload? Body)? + fn parseIf(p: *Parser, bodyParseFn: NodeParseFn) !Node.Index { + const if_token = p.eatToken(.Keyword_if) orelse return null_node; + _ = try p.expectToken(.LParen); + const condition = try p.expectExpr(); + _ = try p.expectToken(.RParen); + const then_payload = try p.parsePtrPayload(); + + const then_expr = try bodyParseFn(p); + if (then_expr == 0) return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); + + const else_token = p.eatToken(.Keyword_else) orelse return p.addNode(.{ + .tag = if (then_payload == 0) .IfSimple else .IfSimpleOptional, + .main_token = if_token, + .data = .{ + .lhs = condition, + .rhs = then_expr, + }, + }); + const else_payload = try p.parsePayload(); + const else_expr = try bodyParseFn(p); + if (else_expr == 0) return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); + + const tag = if (else_payload != 0) + Node.Tag.IfError + else if (then_payload != 0) + Node.Tag.IfOptional + else + Node.Tag.If; + return p.addNode(.{ + .tag = tag, + .main_token = if_token, + .data = .{ + .lhs = condition, + .rhs = try p.addExtra(Node.If{ + .then_expr = then_expr, + .else_expr = else_expr, + }), + }, + }); + } + + /// Skips over doc comment tokens. Returns the first one, if any. + fn eatDocComments(p: *Parser) ?TokenIndex { if (p.eatToken(.DocComment)) |first_line| { while (p.eatToken(.DocComment)) |_| {} - const node = try p.arena.allocator.create(Node.DocComment); - node.* = .{ .first_line = first_line }; - return node; + return first_line; } return null; } fn tokensOnSameLine(p: *Parser, token1: TokenIndex, token2: TokenIndex) bool { - return std.mem.indexOfScalar(u8, p.source[p.token_locs[token1].end..p.token_locs[token2].start], '\n') == null; + return std.mem.indexOfScalar(u8, p.source[p.token_starts[token1]..p.token_starts[token2]], '\n') == null; } /// Eat a single-line doc comment on the same line as another node - fn parseAppendedDocComment(p: *Parser, after_token: TokenIndex) !?*Node.DocComment { - const comment_token = p.eatToken(.DocComment) orelse return null; - if (p.tokensOnSameLine(after_token, comment_token)) { - const node = try p.arena.allocator.create(Node.DocComment); - node.* = .{ .first_line = comment_token }; - return node; + fn parseAppendedDocComment(p: *Parser, after_token: TokenIndex) !void { + const comment_token = p.eatToken(.DocComment) orelse return; + if (!p.tokensOnSameLine(after_token, comment_token)) { + p.tok_i -= 1; } - p.putBackToken(comment_token); - return null; } - /// Op* Child - fn parsePrefixOpExpr(p: *Parser, comptime opParseFn: NodeParseFn, comptime childParseFn: NodeParseFn) Error!?*Node { - if (try opParseFn(p)) |first_op| { - var rightmost_op = first_op; - while (true) { - switch (rightmost_op.tag) { - .AddressOf, - .Await, - .BitNot, - .BoolNot, - .OptionalType, - .Negation, - .NegationWrap, - .Resume, - .Try, - => { - if (try opParseFn(p)) |rhs| { - rightmost_op.cast(Node.SimplePrefixOp).?.rhs = rhs; - rightmost_op = rhs; - } else break; - }, - .ArrayType => { - if (try opParseFn(p)) |rhs| { - rightmost_op.cast(Node.ArrayType).?.rhs = rhs; - rightmost_op = rhs; - } else break; - }, - .ArrayTypeSentinel => { - if (try opParseFn(p)) |rhs| { - rightmost_op.cast(Node.ArrayTypeSentinel).?.rhs = rhs; - rightmost_op = rhs; - } else break; - }, - .SliceType => { - if (try opParseFn(p)) |rhs| { - rightmost_op.cast(Node.SliceType).?.rhs = rhs; - rightmost_op = rhs; - } else break; - }, - .PtrType => { - var ptr_type = rightmost_op.cast(Node.PtrType).?; - // If the token encountered was **, there will be two nodes - if (p.token_ids[ptr_type.op_token] == .AsteriskAsterisk) { - rightmost_op = ptr_type.rhs; - ptr_type = rightmost_op.cast(Node.PtrType).?; - } - if (try opParseFn(p)) |rhs| { - ptr_type.rhs = rhs; - rightmost_op = rhs; - } else break; - }, - .AnyFrameType => { - const prom = rightmost_op.cast(Node.AnyFrameType).?; - if (try opParseFn(p)) |rhs| { - prom.result.?.return_type = rhs; - rightmost_op = rhs; - } else break; - }, - else => unreachable, - } - } - - // If any prefix op existed, a child node on the RHS is required - switch (rightmost_op.tag) { - .AddressOf, - .Await, - .BitNot, - .BoolNot, - .OptionalType, - .Negation, - .NegationWrap, - .Resume, - .Try, - => { - const prefix_op = rightmost_op.cast(Node.SimplePrefixOp).?; - prefix_op.rhs = try p.expectNode(childParseFn, .{ - .InvalidToken = .{ .token = p.tok_i }, - }); - }, - .ArrayType => { - const prefix_op = rightmost_op.cast(Node.ArrayType).?; - prefix_op.rhs = try p.expectNode(childParseFn, .{ - .InvalidToken = .{ .token = p.tok_i }, - }); - }, - .ArrayTypeSentinel => { - const prefix_op = rightmost_op.cast(Node.ArrayTypeSentinel).?; - prefix_op.rhs = try p.expectNode(childParseFn, .{ - .InvalidToken = .{ .token = p.tok_i }, - }); - }, - .PtrType => { - const prefix_op = rightmost_op.cast(Node.PtrType).?; - prefix_op.rhs = try p.expectNode(childParseFn, .{ - .InvalidToken = .{ .token = p.tok_i }, - }); - }, - .SliceType => { - const prefix_op = rightmost_op.cast(Node.SliceType).?; - prefix_op.rhs = try p.expectNode(childParseFn, .{ - .InvalidToken = .{ .token = p.tok_i }, - }); - }, - .AnyFrameType => { - const prom = rightmost_op.cast(Node.AnyFrameType).?; - prom.result.?.return_type = try p.expectNode(childParseFn, .{ - .InvalidToken = .{ .token = p.tok_i }, - }); - }, - else => unreachable, - } - - return first_op; - } - - // Otherwise, the child node is optional - return childParseFn(p); + fn eatToken(p: *Parser, tag: Token.Tag) ?TokenIndex { + return if (p.token_tags[p.tok_i] == tag) p.nextToken() else null; } - /// Child (Op Child)* - /// Child (Op Child)? - fn parseBinOpExpr( - p: *Parser, - opParseFn: NodeParseFn, - childParseFn: NodeParseFn, - chain: enum { - Once, - Infinitely, - }, - ) Error!?*Node { - var res = (try childParseFn(p)) orelse return null; - - while (try opParseFn(p)) |node| { - const right = try p.expectNode(childParseFn, .{ - .InvalidToken = .{ .token = p.tok_i }, - }); - const left = res; - res = node; - - if (node.castTag(.Catch)) |op| { - op.lhs = left; - op.rhs = right; - } else if (node.cast(Node.SimpleInfixOp)) |op| { - op.lhs = left; - op.rhs = right; - } - - switch (chain) { - .Once => break, - .Infinitely => continue, - } - } - - return res; - } - - fn createInfixOp(p: *Parser, op_token: TokenIndex, tag: Node.Tag) !*Node { - const node = try p.arena.allocator.create(Node.SimpleInfixOp); - node.* = .{ - .base = Node{ .tag = tag }, - .op_token = op_token, - .lhs = undefined, // set by caller - .rhs = undefined, // set by caller - }; - return &node.base; - } - - fn eatToken(p: *Parser, id: Token.Id) ?TokenIndex { - return if (p.token_ids[p.tok_i] == id) p.nextToken() else null; - } - - fn expectToken(p: *Parser, id: Token.Id) Error!TokenIndex { - return (try p.expectTokenRecoverable(id)) orelse error.ParseError; - } - - fn expectTokenRecoverable(p: *Parser, id: Token.Id) !?TokenIndex { + fn assertToken(p: *Parser, tag: Token.Tag) TokenIndex { const token = p.nextToken(); - if (p.token_ids[token] != id) { - try p.errors.append(p.gpa, .{ - .ExpectedToken = .{ .token = token, .expected_id = id }, - }); - // go back so that we can recover properly - p.putBackToken(token); - return null; + assert(p.token_tags[token] == tag); + return token; + } + + fn expectToken(p: *Parser, tag: Token.Tag) Error!TokenIndex { + const token = p.nextToken(); + if (p.token_tags[token] != tag) { + return p.fail(.{ .ExpectedToken = .{ .token = token, .expected_id = tag } }); } return token; } + fn expectTokenRecoverable(p: *Parser, tag: Token.Tag) !?TokenIndex { + if (p.token_tags[p.tok_i] != tag) { + try p.warn(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = tag }, + }); + return null; + } else { + return p.nextToken(); + } + } + fn nextToken(p: *Parser) TokenIndex { const result = p.tok_i; p.tok_i += 1; - assert(p.token_ids[result] != .LineComment); - if (p.tok_i >= p.token_ids.len) return result; - - while (true) { - if (p.token_ids[p.tok_i] != .LineComment) return result; - p.tok_i += 1; - } - } - - fn putBackToken(p: *Parser, putting_back: TokenIndex) void { - while (p.tok_i > 0) { - p.tok_i -= 1; - if (p.token_ids[p.tok_i] == .LineComment) continue; - assert(putting_back == p.tok_i); - return; - } - } - - /// TODO Delete this function. I don't like the inversion of control. - fn expectNode( - p: *Parser, - parseFn: NodeParseFn, - /// if parsing fails - err: AstError, - ) Error!*Node { - return (try p.expectNodeRecoverable(parseFn, err)) orelse return error.ParseError; - } - - /// TODO Delete this function. I don't like the inversion of control. - fn expectNodeRecoverable( - p: *Parser, - parseFn: NodeParseFn, - /// if parsing fails - err: AstError, - ) !?*Node { - return (try parseFn(p)) orelse { - try p.errors.append(p.gpa, err); - return null; - }; + return result; } }; -fn ParseFn(comptime T: type) type { - return fn (p: *Parser) Error!T; -} - -test "std.zig.parser" { +test { _ = @import("parser_test.zig"); } diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index d7cc1208a2..279402d71b 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -3736,12 +3736,13 @@ var fixed_buffer_mem: [100 * 1024]u8 = undefined; fn testParse(source: []const u8, allocator: *mem.Allocator, anything_changed: *bool) ![]u8 { const stderr = io.getStdErr().writer(); - const tree = try std.zig.parse(allocator, source); - defer tree.deinit(); + var tree = try std.zig.parse(allocator, source); + defer tree.deinit(allocator); - for (tree.errors) |*parse_error| { - const token = tree.token_locs[parse_error.loc()]; - const loc = tree.tokenLocation(0, parse_error.loc()); + for (tree.errors) |parse_error| { + const error_token = tree.errorToken(parse_error); + const token_start = tree.tokens.items(.start)[error_token]; + const loc = tree.tokenLocation(0, error_token); try stderr.print("(memory buffer):{d}:{d}: error: ", .{ loc.line + 1, loc.column + 1 }); try tree.renderError(parse_error, stderr); try stderr.print("\n{s}\n", .{source[loc.line_start..loc.line_end]}); @@ -3750,13 +3751,7 @@ fn testParse(source: []const u8, allocator: *mem.Allocator, anything_changed: *b while (i < loc.column) : (i += 1) { try stderr.writeAll(" "); } - } - { - const caret_count = token.end - token.start; - var i: usize = 0; - while (i < caret_count) : (i += 1) { - try stderr.writeAll("~"); - } + try stderr.writeAll("^"); } try stderr.writeAll("\n"); } @@ -3825,8 +3820,8 @@ fn testCanonical(source: []const u8) !void { const Error = @TagType(std.zig.ast.Error); fn testError(source: []const u8, expected_errors: []const Error) !void { - const tree = try std.zig.parse(std.testing.allocator, source); - defer tree.deinit(); + var tree = try std.zig.parse(std.testing.allocator, source); + defer tree.deinit(std.testing.allocator); std.testing.expect(tree.errors.len == expected_errors.len); for (expected_errors) |expected, i| { diff --git a/lib/std/zig/tokenizer.zig b/lib/std/zig/tokenizer.zig index dcbf717638..68dfd987a4 100644 --- a/lib/std/zig/tokenizer.zig +++ b/lib/std/zig/tokenizer.zig @@ -7,7 +7,7 @@ const std = @import("../std.zig"); const mem = std.mem; pub const Token = struct { - id: Id, + tag: Tag, loc: Loc, pub const Loc = struct { @@ -15,7 +15,7 @@ pub const Token = struct { end: usize, }; - pub const keywords = std.ComptimeStringMap(Id, .{ + pub const keywords = std.ComptimeStringMap(Tag, .{ .{ "align", .Keyword_align }, .{ "allowzero", .Keyword_allowzero }, .{ "and", .Keyword_and }, @@ -71,11 +71,11 @@ pub const Token = struct { .{ "while", .Keyword_while }, }); - pub fn getKeyword(bytes: []const u8) ?Id { + pub fn getKeyword(bytes: []const u8) ?Tag { return keywords.get(bytes); } - pub const Id = enum { + pub const Tag = enum { Invalid, Invalid_ampersands, Invalid_periodasterisks, @@ -198,8 +198,8 @@ pub const Token = struct { Keyword_volatile, Keyword_while, - pub fn symbol(id: Id) []const u8 { - return switch (id) { + pub fn symbol(tag: Tag) []const u8 { + return switch (tag) { .Invalid => "Invalid", .Invalid_ampersands => "&&", .Invalid_periodasterisks => ".**", @@ -334,7 +334,7 @@ pub const Tokenizer = struct { /// For debugging purposes pub fn dump(self: *Tokenizer, token: *const Token) void { - std.debug.warn("{s} \"{s}\"\n", .{ @tagName(token.id), self.buffer[token.start..token.end] }); + std.debug.warn("{s} \"{s}\"\n", .{ @tagName(token.tag), self.buffer[token.start..token.end] }); } pub fn init(buffer: []const u8) Tokenizer { @@ -421,7 +421,7 @@ pub const Tokenizer = struct { const start_index = self.index; var state: State = .start; var result = Token{ - .id = .Eof, + .tag = .Eof, .loc = .{ .start = self.index, .end = undefined, @@ -438,14 +438,14 @@ pub const Tokenizer = struct { }, '"' => { state = .string_literal; - result.id = .StringLiteral; + result.tag = .StringLiteral; }, '\'' => { state = .char_literal; }, 'a'...'z', 'A'...'Z', '_' => { state = .identifier; - result.id = .Identifier; + result.tag = .Identifier; }, '@' => { state = .saw_at_sign; @@ -460,42 +460,42 @@ pub const Tokenizer = struct { state = .pipe; }, '(' => { - result.id = .LParen; + result.tag = .LParen; self.index += 1; break; }, ')' => { - result.id = .RParen; + result.tag = .RParen; self.index += 1; break; }, '[' => { - result.id = .LBracket; + result.tag = .LBracket; self.index += 1; break; }, ']' => { - result.id = .RBracket; + result.tag = .RBracket; self.index += 1; break; }, ';' => { - result.id = .Semicolon; + result.tag = .Semicolon; self.index += 1; break; }, ',' => { - result.id = .Comma; + result.tag = .Comma; self.index += 1; break; }, '?' => { - result.id = .QuestionMark; + result.tag = .QuestionMark; self.index += 1; break; }, ':' => { - result.id = .Colon; + result.tag = .Colon; self.index += 1; break; }, @@ -519,20 +519,20 @@ pub const Tokenizer = struct { }, '\\' => { state = .backslash; - result.id = .MultilineStringLiteralLine; + result.tag = .MultilineStringLiteralLine; }, '{' => { - result.id = .LBrace; + result.tag = .LBrace; self.index += 1; break; }, '}' => { - result.id = .RBrace; + result.tag = .RBrace; self.index += 1; break; }, '~' => { - result.id = .Tilde; + result.tag = .Tilde; self.index += 1; break; }, @@ -550,14 +550,14 @@ pub const Tokenizer = struct { }, '0' => { state = .zero; - result.id = .IntegerLiteral; + result.tag = .IntegerLiteral; }, '1'...'9' => { state = .int_literal_dec; - result.id = .IntegerLiteral; + result.tag = .IntegerLiteral; }, else => { - result.id = .Invalid; + result.tag = .Invalid; self.index += 1; break; }, @@ -565,42 +565,42 @@ pub const Tokenizer = struct { .saw_at_sign => switch (c) { '"' => { - result.id = .Identifier; + result.tag = .Identifier; state = .string_literal; }, else => { // reinterpret as a builtin self.index -= 1; state = .builtin; - result.id = .Builtin; + result.tag = .Builtin; }, }, .ampersand => switch (c) { '&' => { - result.id = .Invalid_ampersands; + result.tag = .Invalid_ampersands; self.index += 1; break; }, '=' => { - result.id = .AmpersandEqual; + result.tag = .AmpersandEqual; self.index += 1; break; }, else => { - result.id = .Ampersand; + result.tag = .Ampersand; break; }, }, .asterisk => switch (c) { '=' => { - result.id = .AsteriskEqual; + result.tag = .AsteriskEqual; self.index += 1; break; }, '*' => { - result.id = .AsteriskAsterisk; + result.tag = .AsteriskAsterisk; self.index += 1; break; }, @@ -608,43 +608,43 @@ pub const Tokenizer = struct { state = .asterisk_percent; }, else => { - result.id = .Asterisk; + result.tag = .Asterisk; break; }, }, .asterisk_percent => switch (c) { '=' => { - result.id = .AsteriskPercentEqual; + result.tag = .AsteriskPercentEqual; self.index += 1; break; }, else => { - result.id = .AsteriskPercent; + result.tag = .AsteriskPercent; break; }, }, .percent => switch (c) { '=' => { - result.id = .PercentEqual; + result.tag = .PercentEqual; self.index += 1; break; }, else => { - result.id = .Percent; + result.tag = .Percent; break; }, }, .plus => switch (c) { '=' => { - result.id = .PlusEqual; + result.tag = .PlusEqual; self.index += 1; break; }, '+' => { - result.id = .PlusPlus; + result.tag = .PlusPlus; self.index += 1; break; }, @@ -652,31 +652,31 @@ pub const Tokenizer = struct { state = .plus_percent; }, else => { - result.id = .Plus; + result.tag = .Plus; break; }, }, .plus_percent => switch (c) { '=' => { - result.id = .PlusPercentEqual; + result.tag = .PlusPercentEqual; self.index += 1; break; }, else => { - result.id = .PlusPercent; + result.tag = .PlusPercent; break; }, }, .caret => switch (c) { '=' => { - result.id = .CaretEqual; + result.tag = .CaretEqual; self.index += 1; break; }, else => { - result.id = .Caret; + result.tag = .Caret; break; }, }, @@ -684,8 +684,8 @@ pub const Tokenizer = struct { .identifier => switch (c) { 'a'...'z', 'A'...'Z', '_', '0'...'9' => {}, else => { - if (Token.getKeyword(self.buffer[result.loc.start..self.index])) |id| { - result.id = id; + if (Token.getKeyword(self.buffer[result.loc.start..self.index])) |tag| { + result.tag = tag; } break; }, @@ -724,7 +724,7 @@ pub const Tokenizer = struct { state = .char_literal_backslash; }, '\'', 0x80...0xbf, 0xf8...0xff => { - result.id = .Invalid; + result.tag = .Invalid; break; }, 0xc0...0xdf => { // 110xxxxx @@ -746,7 +746,7 @@ pub const Tokenizer = struct { .char_literal_backslash => switch (c) { '\n' => { - result.id = .Invalid; + result.tag = .Invalid; break; }, 'x' => { @@ -769,7 +769,7 @@ pub const Tokenizer = struct { } }, else => { - result.id = .Invalid; + result.tag = .Invalid; break; }, }, @@ -780,7 +780,7 @@ pub const Tokenizer = struct { seen_escape_digits = 0; }, else => { - result.id = .Invalid; + result.tag = .Invalid; state = .char_literal_unicode_invalid; }, }, @@ -791,14 +791,14 @@ pub const Tokenizer = struct { }, '}' => { if (seen_escape_digits == 0) { - result.id = .Invalid; + result.tag = .Invalid; state = .char_literal_unicode_invalid; } else { state = .char_literal_end; } }, else => { - result.id = .Invalid; + result.tag = .Invalid; state = .char_literal_unicode_invalid; }, }, @@ -813,12 +813,12 @@ pub const Tokenizer = struct { .char_literal_end => switch (c) { '\'' => { - result.id = .CharLiteral; + result.tag = .CharLiteral; self.index += 1; break; }, else => { - result.id = .Invalid; + result.tag = .Invalid; break; }, }, @@ -831,7 +831,7 @@ pub const Tokenizer = struct { } }, else => { - result.id = .Invalid; + result.tag = .Invalid; break; }, }, @@ -847,58 +847,58 @@ pub const Tokenizer = struct { .bang => switch (c) { '=' => { - result.id = .BangEqual; + result.tag = .BangEqual; self.index += 1; break; }, else => { - result.id = .Bang; + result.tag = .Bang; break; }, }, .pipe => switch (c) { '=' => { - result.id = .PipeEqual; + result.tag = .PipeEqual; self.index += 1; break; }, '|' => { - result.id = .PipePipe; + result.tag = .PipePipe; self.index += 1; break; }, else => { - result.id = .Pipe; + result.tag = .Pipe; break; }, }, .equal => switch (c) { '=' => { - result.id = .EqualEqual; + result.tag = .EqualEqual; self.index += 1; break; }, '>' => { - result.id = .EqualAngleBracketRight; + result.tag = .EqualAngleBracketRight; self.index += 1; break; }, else => { - result.id = .Equal; + result.tag = .Equal; break; }, }, .minus => switch (c) { '>' => { - result.id = .Arrow; + result.tag = .Arrow; self.index += 1; break; }, '=' => { - result.id = .MinusEqual; + result.tag = .MinusEqual; self.index += 1; break; }, @@ -906,19 +906,19 @@ pub const Tokenizer = struct { state = .minus_percent; }, else => { - result.id = .Minus; + result.tag = .Minus; break; }, }, .minus_percent => switch (c) { '=' => { - result.id = .MinusPercentEqual; + result.tag = .MinusPercentEqual; self.index += 1; break; }, else => { - result.id = .MinusPercent; + result.tag = .MinusPercent; break; }, }, @@ -928,24 +928,24 @@ pub const Tokenizer = struct { state = .angle_bracket_angle_bracket_left; }, '=' => { - result.id = .AngleBracketLeftEqual; + result.tag = .AngleBracketLeftEqual; self.index += 1; break; }, else => { - result.id = .AngleBracketLeft; + result.tag = .AngleBracketLeft; break; }, }, .angle_bracket_angle_bracket_left => switch (c) { '=' => { - result.id = .AngleBracketAngleBracketLeftEqual; + result.tag = .AngleBracketAngleBracketLeftEqual; self.index += 1; break; }, else => { - result.id = .AngleBracketAngleBracketLeft; + result.tag = .AngleBracketAngleBracketLeft; break; }, }, @@ -955,24 +955,24 @@ pub const Tokenizer = struct { state = .angle_bracket_angle_bracket_right; }, '=' => { - result.id = .AngleBracketRightEqual; + result.tag = .AngleBracketRightEqual; self.index += 1; break; }, else => { - result.id = .AngleBracketRight; + result.tag = .AngleBracketRight; break; }, }, .angle_bracket_angle_bracket_right => switch (c) { '=' => { - result.id = .AngleBracketAngleBracketRightEqual; + result.tag = .AngleBracketAngleBracketRightEqual; self.index += 1; break; }, else => { - result.id = .AngleBracketAngleBracketRight; + result.tag = .AngleBracketAngleBracketRight; break; }, }, @@ -985,30 +985,30 @@ pub const Tokenizer = struct { state = .period_asterisk; }, else => { - result.id = .Period; + result.tag = .Period; break; }, }, .period_2 => switch (c) { '.' => { - result.id = .Ellipsis3; + result.tag = .Ellipsis3; self.index += 1; break; }, else => { - result.id = .Ellipsis2; + result.tag = .Ellipsis2; break; }, }, .period_asterisk => switch (c) { '*' => { - result.id = .Invalid_periodasterisks; + result.tag = .Invalid_periodasterisks; break; }, else => { - result.id = .PeriodAsterisk; + result.tag = .PeriodAsterisk; break; }, }, @@ -1016,15 +1016,15 @@ pub const Tokenizer = struct { .slash => switch (c) { '/' => { state = .line_comment_start; - result.id = .LineComment; + result.tag = .LineComment; }, '=' => { - result.id = .SlashEqual; + result.tag = .SlashEqual; self.index += 1; break; }, else => { - result.id = .Slash; + result.tag = .Slash; break; }, }, @@ -1033,7 +1033,7 @@ pub const Tokenizer = struct { state = .doc_comment_start; }, '!' => { - result.id = .ContainerDocComment; + result.tag = .ContainerDocComment; state = .container_doc_comment; }, '\n' => break, @@ -1048,16 +1048,16 @@ pub const Tokenizer = struct { state = .line_comment; }, '\n' => { - result.id = .DocComment; + result.tag = .DocComment; break; }, '\t', '\r' => { state = .doc_comment; - result.id = .DocComment; + result.tag = .DocComment; }, else => { state = .doc_comment; - result.id = .DocComment; + result.tag = .DocComment; self.checkLiteralCharacter(); }, }, @@ -1083,7 +1083,7 @@ pub const Tokenizer = struct { }, else => { if (isIdentifierChar(c)) { - result.id = .Invalid; + result.tag = .Invalid; } break; }, @@ -1093,7 +1093,7 @@ pub const Tokenizer = struct { state = .int_literal_bin; }, else => { - result.id = .Invalid; + result.tag = .Invalid; break; }, }, @@ -1104,7 +1104,7 @@ pub const Tokenizer = struct { '0'...'1' => {}, else => { if (isIdentifierChar(c)) { - result.id = .Invalid; + result.tag = .Invalid; } break; }, @@ -1114,7 +1114,7 @@ pub const Tokenizer = struct { state = .int_literal_oct; }, else => { - result.id = .Invalid; + result.tag = .Invalid; break; }, }, @@ -1125,7 +1125,7 @@ pub const Tokenizer = struct { '0'...'7' => {}, else => { if (isIdentifierChar(c)) { - result.id = .Invalid; + result.tag = .Invalid; } break; }, @@ -1135,7 +1135,7 @@ pub const Tokenizer = struct { state = .int_literal_dec; }, else => { - result.id = .Invalid; + result.tag = .Invalid; break; }, }, @@ -1145,16 +1145,16 @@ pub const Tokenizer = struct { }, '.' => { state = .num_dot_dec; - result.id = .FloatLiteral; + result.tag = .FloatLiteral; }, 'e', 'E' => { state = .float_exponent_unsigned; - result.id = .FloatLiteral; + result.tag = .FloatLiteral; }, '0'...'9' => {}, else => { if (isIdentifierChar(c)) { - result.id = .Invalid; + result.tag = .Invalid; } break; }, @@ -1164,7 +1164,7 @@ pub const Tokenizer = struct { state = .int_literal_hex; }, else => { - result.id = .Invalid; + result.tag = .Invalid; break; }, }, @@ -1174,23 +1174,23 @@ pub const Tokenizer = struct { }, '.' => { state = .num_dot_hex; - result.id = .FloatLiteral; + result.tag = .FloatLiteral; }, 'p', 'P' => { state = .float_exponent_unsigned; - result.id = .FloatLiteral; + result.tag = .FloatLiteral; }, '0'...'9', 'a'...'f', 'A'...'F' => {}, else => { if (isIdentifierChar(c)) { - result.id = .Invalid; + result.tag = .Invalid; } break; }, }, .num_dot_dec => switch (c) { '.' => { - result.id = .IntegerLiteral; + result.tag = .IntegerLiteral; self.index -= 1; state = .start; break; @@ -1203,14 +1203,14 @@ pub const Tokenizer = struct { }, else => { if (isIdentifierChar(c)) { - result.id = .Invalid; + result.tag = .Invalid; } break; }, }, .num_dot_hex => switch (c) { '.' => { - result.id = .IntegerLiteral; + result.tag = .IntegerLiteral; self.index -= 1; state = .start; break; @@ -1219,12 +1219,12 @@ pub const Tokenizer = struct { state = .float_exponent_unsigned; }, '0'...'9', 'a'...'f', 'A'...'F' => { - result.id = .FloatLiteral; + result.tag = .FloatLiteral; state = .float_fraction_hex; }, else => { if (isIdentifierChar(c)) { - result.id = .Invalid; + result.tag = .Invalid; } break; }, @@ -1234,7 +1234,7 @@ pub const Tokenizer = struct { state = .float_fraction_dec; }, else => { - result.id = .Invalid; + result.tag = .Invalid; break; }, }, @@ -1248,7 +1248,7 @@ pub const Tokenizer = struct { '0'...'9' => {}, else => { if (isIdentifierChar(c)) { - result.id = .Invalid; + result.tag = .Invalid; } break; }, @@ -1258,7 +1258,7 @@ pub const Tokenizer = struct { state = .float_fraction_hex; }, else => { - result.id = .Invalid; + result.tag = .Invalid; break; }, }, @@ -1272,7 +1272,7 @@ pub const Tokenizer = struct { '0'...'9', 'a'...'f', 'A'...'F' => {}, else => { if (isIdentifierChar(c)) { - result.id = .Invalid; + result.tag = .Invalid; } break; }, @@ -1292,7 +1292,7 @@ pub const Tokenizer = struct { state = .float_exponent_num; }, else => { - result.id = .Invalid; + result.tag = .Invalid; break; }, }, @@ -1303,7 +1303,7 @@ pub const Tokenizer = struct { '0'...'9' => {}, else => { if (isIdentifierChar(c)) { - result.id = .Invalid; + result.tag = .Invalid; } break; }, @@ -1327,18 +1327,18 @@ pub const Tokenizer = struct { => {}, .identifier => { - if (Token.getKeyword(self.buffer[result.loc.start..self.index])) |id| { - result.id = id; + if (Token.getKeyword(self.buffer[result.loc.start..self.index])) |tag| { + result.tag = tag; } }, .line_comment, .line_comment_start => { - result.id = .LineComment; + result.tag = .LineComment; }, .doc_comment, .doc_comment_start => { - result.id = .DocComment; + result.tag = .DocComment; }, .container_doc_comment => { - result.id = .ContainerDocComment; + result.tag = .ContainerDocComment; }, .int_literal_dec_no_underscore, @@ -1361,76 +1361,76 @@ pub const Tokenizer = struct { .char_literal_unicode, .string_literal_backslash, => { - result.id = .Invalid; + result.tag = .Invalid; }, .equal => { - result.id = .Equal; + result.tag = .Equal; }, .bang => { - result.id = .Bang; + result.tag = .Bang; }, .minus => { - result.id = .Minus; + result.tag = .Minus; }, .slash => { - result.id = .Slash; + result.tag = .Slash; }, .zero => { - result.id = .IntegerLiteral; + result.tag = .IntegerLiteral; }, .ampersand => { - result.id = .Ampersand; + result.tag = .Ampersand; }, .period => { - result.id = .Period; + result.tag = .Period; }, .period_2 => { - result.id = .Ellipsis2; + result.tag = .Ellipsis2; }, .period_asterisk => { - result.id = .PeriodAsterisk; + result.tag = .PeriodAsterisk; }, .pipe => { - result.id = .Pipe; + result.tag = .Pipe; }, .angle_bracket_angle_bracket_right => { - result.id = .AngleBracketAngleBracketRight; + result.tag = .AngleBracketAngleBracketRight; }, .angle_bracket_right => { - result.id = .AngleBracketRight; + result.tag = .AngleBracketRight; }, .angle_bracket_angle_bracket_left => { - result.id = .AngleBracketAngleBracketLeft; + result.tag = .AngleBracketAngleBracketLeft; }, .angle_bracket_left => { - result.id = .AngleBracketLeft; + result.tag = .AngleBracketLeft; }, .plus_percent => { - result.id = .PlusPercent; + result.tag = .PlusPercent; }, .plus => { - result.id = .Plus; + result.tag = .Plus; }, .percent => { - result.id = .Percent; + result.tag = .Percent; }, .caret => { - result.id = .Caret; + result.tag = .Caret; }, .asterisk_percent => { - result.id = .AsteriskPercent; + result.tag = .AsteriskPercent; }, .asterisk => { - result.id = .Asterisk; + result.tag = .Asterisk; }, .minus_percent => { - result.id = .MinusPercent; + result.tag = .MinusPercent; }, } } - if (result.id == .Eof) { + if (result.tag == .Eof) { if (self.pending_invalid_token) |token| { self.pending_invalid_token = null; return token; @@ -1446,7 +1446,7 @@ pub const Tokenizer = struct { const invalid_length = self.getInvalidCharacterLength(); if (invalid_length == 0) return; self.pending_invalid_token = .{ - .id = .Invalid, + .tag = .Invalid, .loc = .{ .start = self.index, .end = self.index + invalid_length, @@ -1493,14 +1493,14 @@ pub const Tokenizer = struct { }; test "tokenizer" { - testTokenize("test", &[_]Token.Id{.Keyword_test}); + testTokenize("test", &[_]Token.Tag{.Keyword_test}); } test "tokenizer - unknown length pointer and then c pointer" { testTokenize( \\[*]u8 \\[*c]u8 - , &[_]Token.Id{ + , &[_]Token.Tag{ .LBracket, .Asterisk, .RBracket, @@ -1516,70 +1516,70 @@ test "tokenizer - unknown length pointer and then c pointer" { test "tokenizer - char literal with hex escape" { testTokenize( \\'\x1b' - , &[_]Token.Id{.CharLiteral}); + , &[_]Token.Tag{.CharLiteral}); testTokenize( \\'\x1' - , &[_]Token.Id{ .Invalid, .Invalid }); + , &[_]Token.Tag{ .Invalid, .Invalid }); } test "tokenizer - char literal with unicode escapes" { // Valid unicode escapes testTokenize( \\'\u{3}' - , &[_]Token.Id{.CharLiteral}); + , &[_]Token.Tag{.CharLiteral}); testTokenize( \\'\u{01}' - , &[_]Token.Id{.CharLiteral}); + , &[_]Token.Tag{.CharLiteral}); testTokenize( \\'\u{2a}' - , &[_]Token.Id{.CharLiteral}); + , &[_]Token.Tag{.CharLiteral}); testTokenize( \\'\u{3f9}' - , &[_]Token.Id{.CharLiteral}); + , &[_]Token.Tag{.CharLiteral}); testTokenize( \\'\u{6E09aBc1523}' - , &[_]Token.Id{.CharLiteral}); + , &[_]Token.Tag{.CharLiteral}); testTokenize( \\"\u{440}" - , &[_]Token.Id{.StringLiteral}); + , &[_]Token.Tag{.StringLiteral}); // Invalid unicode escapes testTokenize( \\'\u' - , &[_]Token.Id{.Invalid}); + , &[_]Token.Tag{.Invalid}); testTokenize( \\'\u{{' - , &[_]Token.Id{ .Invalid, .Invalid }); + , &[_]Token.Tag{ .Invalid, .Invalid }); testTokenize( \\'\u{}' - , &[_]Token.Id{ .Invalid, .Invalid }); + , &[_]Token.Tag{ .Invalid, .Invalid }); testTokenize( \\'\u{s}' - , &[_]Token.Id{ .Invalid, .Invalid }); + , &[_]Token.Tag{ .Invalid, .Invalid }); testTokenize( \\'\u{2z}' - , &[_]Token.Id{ .Invalid, .Invalid }); + , &[_]Token.Tag{ .Invalid, .Invalid }); testTokenize( \\'\u{4a' - , &[_]Token.Id{.Invalid}); + , &[_]Token.Tag{.Invalid}); // Test old-style unicode literals testTokenize( \\'\u0333' - , &[_]Token.Id{ .Invalid, .Invalid }); + , &[_]Token.Tag{ .Invalid, .Invalid }); testTokenize( \\'\U0333' - , &[_]Token.Id{ .Invalid, .IntegerLiteral, .Invalid }); + , &[_]Token.Tag{ .Invalid, .IntegerLiteral, .Invalid }); } test "tokenizer - char literal with unicode code point" { testTokenize( \\'💩' - , &[_]Token.Id{.CharLiteral}); + , &[_]Token.Tag{.CharLiteral}); } test "tokenizer - float literal e exponent" { - testTokenize("a = 4.94065645841246544177e-324;\n", &[_]Token.Id{ + testTokenize("a = 4.94065645841246544177e-324;\n", &[_]Token.Tag{ .Identifier, .Equal, .FloatLiteral, @@ -1588,7 +1588,7 @@ test "tokenizer - float literal e exponent" { } test "tokenizer - float literal p exponent" { - testTokenize("a = 0x1.a827999fcef32p+1022;\n", &[_]Token.Id{ + testTokenize("a = 0x1.a827999fcef32p+1022;\n", &[_]Token.Tag{ .Identifier, .Equal, .FloatLiteral, @@ -1597,71 +1597,71 @@ test "tokenizer - float literal p exponent" { } test "tokenizer - chars" { - testTokenize("'c'", &[_]Token.Id{.CharLiteral}); + testTokenize("'c'", &[_]Token.Tag{.CharLiteral}); } test "tokenizer - invalid token characters" { - testTokenize("#", &[_]Token.Id{.Invalid}); - testTokenize("`", &[_]Token.Id{.Invalid}); - testTokenize("'c", &[_]Token.Id{.Invalid}); - testTokenize("'", &[_]Token.Id{.Invalid}); - testTokenize("''", &[_]Token.Id{ .Invalid, .Invalid }); + testTokenize("#", &[_]Token.Tag{.Invalid}); + testTokenize("`", &[_]Token.Tag{.Invalid}); + testTokenize("'c", &[_]Token.Tag{.Invalid}); + testTokenize("'", &[_]Token.Tag{.Invalid}); + testTokenize("''", &[_]Token.Tag{ .Invalid, .Invalid }); } test "tokenizer - invalid literal/comment characters" { - testTokenize("\"\x00\"", &[_]Token.Id{ + testTokenize("\"\x00\"", &[_]Token.Tag{ .StringLiteral, .Invalid, }); - testTokenize("//\x00", &[_]Token.Id{ + testTokenize("//\x00", &[_]Token.Tag{ .LineComment, .Invalid, }); - testTokenize("//\x1f", &[_]Token.Id{ + testTokenize("//\x1f", &[_]Token.Tag{ .LineComment, .Invalid, }); - testTokenize("//\x7f", &[_]Token.Id{ + testTokenize("//\x7f", &[_]Token.Tag{ .LineComment, .Invalid, }); } test "tokenizer - utf8" { - testTokenize("//\xc2\x80", &[_]Token.Id{.LineComment}); - testTokenize("//\xf4\x8f\xbf\xbf", &[_]Token.Id{.LineComment}); + testTokenize("//\xc2\x80", &[_]Token.Tag{.LineComment}); + testTokenize("//\xf4\x8f\xbf\xbf", &[_]Token.Tag{.LineComment}); } test "tokenizer - invalid utf8" { - testTokenize("//\x80", &[_]Token.Id{ + testTokenize("//\x80", &[_]Token.Tag{ .LineComment, .Invalid, }); - testTokenize("//\xbf", &[_]Token.Id{ + testTokenize("//\xbf", &[_]Token.Tag{ .LineComment, .Invalid, }); - testTokenize("//\xf8", &[_]Token.Id{ + testTokenize("//\xf8", &[_]Token.Tag{ .LineComment, .Invalid, }); - testTokenize("//\xff", &[_]Token.Id{ + testTokenize("//\xff", &[_]Token.Tag{ .LineComment, .Invalid, }); - testTokenize("//\xc2\xc0", &[_]Token.Id{ + testTokenize("//\xc2\xc0", &[_]Token.Tag{ .LineComment, .Invalid, }); - testTokenize("//\xe0", &[_]Token.Id{ + testTokenize("//\xe0", &[_]Token.Tag{ .LineComment, .Invalid, }); - testTokenize("//\xf0", &[_]Token.Id{ + testTokenize("//\xf0", &[_]Token.Tag{ .LineComment, .Invalid, }); - testTokenize("//\xf0\x90\x80\xc0", &[_]Token.Id{ + testTokenize("//\xf0\x90\x80\xc0", &[_]Token.Tag{ .LineComment, .Invalid, }); @@ -1669,28 +1669,28 @@ test "tokenizer - invalid utf8" { test "tokenizer - illegal unicode codepoints" { // unicode newline characters.U+0085, U+2028, U+2029 - testTokenize("//\xc2\x84", &[_]Token.Id{.LineComment}); - testTokenize("//\xc2\x85", &[_]Token.Id{ + testTokenize("//\xc2\x84", &[_]Token.Tag{.LineComment}); + testTokenize("//\xc2\x85", &[_]Token.Tag{ .LineComment, .Invalid, }); - testTokenize("//\xc2\x86", &[_]Token.Id{.LineComment}); - testTokenize("//\xe2\x80\xa7", &[_]Token.Id{.LineComment}); - testTokenize("//\xe2\x80\xa8", &[_]Token.Id{ + testTokenize("//\xc2\x86", &[_]Token.Tag{.LineComment}); + testTokenize("//\xe2\x80\xa7", &[_]Token.Tag{.LineComment}); + testTokenize("//\xe2\x80\xa8", &[_]Token.Tag{ .LineComment, .Invalid, }); - testTokenize("//\xe2\x80\xa9", &[_]Token.Id{ + testTokenize("//\xe2\x80\xa9", &[_]Token.Tag{ .LineComment, .Invalid, }); - testTokenize("//\xe2\x80\xaa", &[_]Token.Id{.LineComment}); + testTokenize("//\xe2\x80\xaa", &[_]Token.Tag{.LineComment}); } test "tokenizer - string identifier and builtin fns" { testTokenize( \\const @"if" = @import("std"); - , &[_]Token.Id{ + , &[_]Token.Tag{ .Keyword_const, .Identifier, .Equal, @@ -1705,7 +1705,7 @@ test "tokenizer - string identifier and builtin fns" { test "tokenizer - multiline string literal with literal tab" { testTokenize( \\\\foo bar - , &[_]Token.Id{ + , &[_]Token.Tag{ .MultilineStringLiteralLine, }); } @@ -1718,7 +1718,7 @@ test "tokenizer - comments with literal tab" { \\// foo \\/// foo \\/// /foo - , &[_]Token.Id{ + , &[_]Token.Tag{ .LineComment, .ContainerDocComment, .DocComment, @@ -1729,21 +1729,21 @@ test "tokenizer - comments with literal tab" { } test "tokenizer - pipe and then invalid" { - testTokenize("||=", &[_]Token.Id{ + testTokenize("||=", &[_]Token.Tag{ .PipePipe, .Equal, }); } test "tokenizer - line comment and doc comment" { - testTokenize("//", &[_]Token.Id{.LineComment}); - testTokenize("// a / b", &[_]Token.Id{.LineComment}); - testTokenize("// /", &[_]Token.Id{.LineComment}); - testTokenize("/// a", &[_]Token.Id{.DocComment}); - testTokenize("///", &[_]Token.Id{.DocComment}); - testTokenize("////", &[_]Token.Id{.LineComment}); - testTokenize("//!", &[_]Token.Id{.ContainerDocComment}); - testTokenize("//!!", &[_]Token.Id{.ContainerDocComment}); + testTokenize("//", &[_]Token.Tag{.LineComment}); + testTokenize("// a / b", &[_]Token.Tag{.LineComment}); + testTokenize("// /", &[_]Token.Tag{.LineComment}); + testTokenize("/// a", &[_]Token.Tag{.DocComment}); + testTokenize("///", &[_]Token.Tag{.DocComment}); + testTokenize("////", &[_]Token.Tag{.LineComment}); + testTokenize("//!", &[_]Token.Tag{.ContainerDocComment}); + testTokenize("//!!", &[_]Token.Tag{.ContainerDocComment}); } test "tokenizer - line comment followed by identifier" { @@ -1751,7 +1751,7 @@ test "tokenizer - line comment followed by identifier" { \\ Unexpected, \\ // another \\ Another, - , &[_]Token.Id{ + , &[_]Token.Tag{ .Identifier, .Comma, .LineComment, @@ -1761,14 +1761,14 @@ test "tokenizer - line comment followed by identifier" { } test "tokenizer - UTF-8 BOM is recognized and skipped" { - testTokenize("\xEF\xBB\xBFa;\n", &[_]Token.Id{ + testTokenize("\xEF\xBB\xBFa;\n", &[_]Token.Tag{ .Identifier, .Semicolon, }); } test "correctly parse pointer assignment" { - testTokenize("b.*=3;\n", &[_]Token.Id{ + testTokenize("b.*=3;\n", &[_]Token.Tag{ .Identifier, .PeriodAsterisk, .Equal, @@ -1778,14 +1778,14 @@ test "correctly parse pointer assignment" { } test "correctly parse pointer dereference followed by asterisk" { - testTokenize("\"b\".* ** 10", &[_]Token.Id{ + testTokenize("\"b\".* ** 10", &[_]Token.Tag{ .StringLiteral, .PeriodAsterisk, .AsteriskAsterisk, .IntegerLiteral, }); - testTokenize("(\"b\".*)** 10", &[_]Token.Id{ + testTokenize("(\"b\".*)** 10", &[_]Token.Tag{ .LParen, .StringLiteral, .PeriodAsterisk, @@ -1794,7 +1794,7 @@ test "correctly parse pointer dereference followed by asterisk" { .IntegerLiteral, }); - testTokenize("\"b\".*** 10", &[_]Token.Id{ + testTokenize("\"b\".*** 10", &[_]Token.Tag{ .StringLiteral, .Invalid_periodasterisks, .AsteriskAsterisk, @@ -1803,252 +1803,252 @@ test "correctly parse pointer dereference followed by asterisk" { } test "tokenizer - range literals" { - testTokenize("0...9", &[_]Token.Id{ .IntegerLiteral, .Ellipsis3, .IntegerLiteral }); - testTokenize("'0'...'9'", &[_]Token.Id{ .CharLiteral, .Ellipsis3, .CharLiteral }); - testTokenize("0x00...0x09", &[_]Token.Id{ .IntegerLiteral, .Ellipsis3, .IntegerLiteral }); - testTokenize("0b00...0b11", &[_]Token.Id{ .IntegerLiteral, .Ellipsis3, .IntegerLiteral }); - testTokenize("0o00...0o11", &[_]Token.Id{ .IntegerLiteral, .Ellipsis3, .IntegerLiteral }); + testTokenize("0...9", &[_]Token.Tag{ .IntegerLiteral, .Ellipsis3, .IntegerLiteral }); + testTokenize("'0'...'9'", &[_]Token.Tag{ .CharLiteral, .Ellipsis3, .CharLiteral }); + testTokenize("0x00...0x09", &[_]Token.Tag{ .IntegerLiteral, .Ellipsis3, .IntegerLiteral }); + testTokenize("0b00...0b11", &[_]Token.Tag{ .IntegerLiteral, .Ellipsis3, .IntegerLiteral }); + testTokenize("0o00...0o11", &[_]Token.Tag{ .IntegerLiteral, .Ellipsis3, .IntegerLiteral }); } test "tokenizer - number literals decimal" { - testTokenize("0", &[_]Token.Id{.IntegerLiteral}); - testTokenize("1", &[_]Token.Id{.IntegerLiteral}); - testTokenize("2", &[_]Token.Id{.IntegerLiteral}); - testTokenize("3", &[_]Token.Id{.IntegerLiteral}); - testTokenize("4", &[_]Token.Id{.IntegerLiteral}); - testTokenize("5", &[_]Token.Id{.IntegerLiteral}); - testTokenize("6", &[_]Token.Id{.IntegerLiteral}); - testTokenize("7", &[_]Token.Id{.IntegerLiteral}); - testTokenize("8", &[_]Token.Id{.IntegerLiteral}); - testTokenize("9", &[_]Token.Id{.IntegerLiteral}); - testTokenize("1..", &[_]Token.Id{ .IntegerLiteral, .Ellipsis2 }); - testTokenize("0a", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("9b", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("1z", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("1z_1", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("9z3", &[_]Token.Id{ .Invalid, .Identifier }); + testTokenize("0", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("1", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("2", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("3", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("4", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("5", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("6", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("7", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("8", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("9", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("1..", &[_]Token.Tag{ .IntegerLiteral, .Ellipsis2 }); + testTokenize("0a", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("9b", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("1z", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("1z_1", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("9z3", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0_0", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0001", &[_]Token.Id{.IntegerLiteral}); - testTokenize("01234567890", &[_]Token.Id{.IntegerLiteral}); - testTokenize("012_345_6789_0", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0_1_2_3_4_5_6_7_8_9_0", &[_]Token.Id{.IntegerLiteral}); + testTokenize("0_0", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0001", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("01234567890", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("012_345_6789_0", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0_1_2_3_4_5_6_7_8_9_0", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("00_", &[_]Token.Id{.Invalid}); - testTokenize("0_0_", &[_]Token.Id{.Invalid}); - testTokenize("0__0", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0_0f", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0_0_f", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0_0_f_00", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("1_,", &[_]Token.Id{ .Invalid, .Comma }); + testTokenize("00_", &[_]Token.Tag{.Invalid}); + testTokenize("0_0_", &[_]Token.Tag{.Invalid}); + testTokenize("0__0", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0_0f", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0_0_f", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0_0_f_00", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("1_,", &[_]Token.Tag{ .Invalid, .Comma }); - testTokenize("1.", &[_]Token.Id{.FloatLiteral}); - testTokenize("0.0", &[_]Token.Id{.FloatLiteral}); - testTokenize("1.0", &[_]Token.Id{.FloatLiteral}); - testTokenize("10.0", &[_]Token.Id{.FloatLiteral}); - testTokenize("0e0", &[_]Token.Id{.FloatLiteral}); - testTokenize("1e0", &[_]Token.Id{.FloatLiteral}); - testTokenize("1e100", &[_]Token.Id{.FloatLiteral}); - testTokenize("1.e100", &[_]Token.Id{.FloatLiteral}); - testTokenize("1.0e100", &[_]Token.Id{.FloatLiteral}); - testTokenize("1.0e+100", &[_]Token.Id{.FloatLiteral}); - testTokenize("1.0e-100", &[_]Token.Id{.FloatLiteral}); - testTokenize("1_0_0_0.0_0_0_0_0_1e1_0_0_0", &[_]Token.Id{.FloatLiteral}); - testTokenize("1.+", &[_]Token.Id{ .FloatLiteral, .Plus }); + testTokenize("1.", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0.0", &[_]Token.Tag{.FloatLiteral}); + testTokenize("1.0", &[_]Token.Tag{.FloatLiteral}); + testTokenize("10.0", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0e0", &[_]Token.Tag{.FloatLiteral}); + testTokenize("1e0", &[_]Token.Tag{.FloatLiteral}); + testTokenize("1e100", &[_]Token.Tag{.FloatLiteral}); + testTokenize("1.e100", &[_]Token.Tag{.FloatLiteral}); + testTokenize("1.0e100", &[_]Token.Tag{.FloatLiteral}); + testTokenize("1.0e+100", &[_]Token.Tag{.FloatLiteral}); + testTokenize("1.0e-100", &[_]Token.Tag{.FloatLiteral}); + testTokenize("1_0_0_0.0_0_0_0_0_1e1_0_0_0", &[_]Token.Tag{.FloatLiteral}); + testTokenize("1.+", &[_]Token.Tag{ .FloatLiteral, .Plus }); - testTokenize("1e", &[_]Token.Id{.Invalid}); - testTokenize("1.0e1f0", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("1.0p100", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("1.0p-100", &[_]Token.Id{ .Invalid, .Identifier, .Minus, .IntegerLiteral }); - testTokenize("1.0p1f0", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("1.0_,", &[_]Token.Id{ .Invalid, .Comma }); - testTokenize("1_.0", &[_]Token.Id{ .Invalid, .Period, .IntegerLiteral }); - testTokenize("1._", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("1.a", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("1.z", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("1._0", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("1._+", &[_]Token.Id{ .Invalid, .Identifier, .Plus }); - testTokenize("1._e", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("1.0e", &[_]Token.Id{.Invalid}); - testTokenize("1.0e,", &[_]Token.Id{ .Invalid, .Comma }); - testTokenize("1.0e_", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("1.0e+_", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("1.0e-_", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("1.0e0_+", &[_]Token.Id{ .Invalid, .Plus }); + testTokenize("1e", &[_]Token.Tag{.Invalid}); + testTokenize("1.0e1f0", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("1.0p100", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("1.0p-100", &[_]Token.Tag{ .Invalid, .Identifier, .Minus, .IntegerLiteral }); + testTokenize("1.0p1f0", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("1.0_,", &[_]Token.Tag{ .Invalid, .Comma }); + testTokenize("1_.0", &[_]Token.Tag{ .Invalid, .Period, .IntegerLiteral }); + testTokenize("1._", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("1.a", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("1.z", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("1._0", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("1._+", &[_]Token.Tag{ .Invalid, .Identifier, .Plus }); + testTokenize("1._e", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("1.0e", &[_]Token.Tag{.Invalid}); + testTokenize("1.0e,", &[_]Token.Tag{ .Invalid, .Comma }); + testTokenize("1.0e_", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("1.0e+_", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("1.0e-_", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("1.0e0_+", &[_]Token.Tag{ .Invalid, .Plus }); } test "tokenizer - number literals binary" { - testTokenize("0b0", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0b1", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0b2", &[_]Token.Id{ .Invalid, .IntegerLiteral }); - testTokenize("0b3", &[_]Token.Id{ .Invalid, .IntegerLiteral }); - testTokenize("0b4", &[_]Token.Id{ .Invalid, .IntegerLiteral }); - testTokenize("0b5", &[_]Token.Id{ .Invalid, .IntegerLiteral }); - testTokenize("0b6", &[_]Token.Id{ .Invalid, .IntegerLiteral }); - testTokenize("0b7", &[_]Token.Id{ .Invalid, .IntegerLiteral }); - testTokenize("0b8", &[_]Token.Id{ .Invalid, .IntegerLiteral }); - testTokenize("0b9", &[_]Token.Id{ .Invalid, .IntegerLiteral }); - testTokenize("0ba", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0bb", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0bc", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0bd", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0be", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0bf", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0bz", &[_]Token.Id{ .Invalid, .Identifier }); + testTokenize("0b0", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0b1", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0b2", &[_]Token.Tag{ .Invalid, .IntegerLiteral }); + testTokenize("0b3", &[_]Token.Tag{ .Invalid, .IntegerLiteral }); + testTokenize("0b4", &[_]Token.Tag{ .Invalid, .IntegerLiteral }); + testTokenize("0b5", &[_]Token.Tag{ .Invalid, .IntegerLiteral }); + testTokenize("0b6", &[_]Token.Tag{ .Invalid, .IntegerLiteral }); + testTokenize("0b7", &[_]Token.Tag{ .Invalid, .IntegerLiteral }); + testTokenize("0b8", &[_]Token.Tag{ .Invalid, .IntegerLiteral }); + testTokenize("0b9", &[_]Token.Tag{ .Invalid, .IntegerLiteral }); + testTokenize("0ba", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0bb", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0bc", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0bd", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0be", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0bf", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0bz", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0b0000_0000", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0b1111_1111", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0b10_10_10_10", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0b0_1_0_1_0_1_0_1", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0b1.", &[_]Token.Id{ .IntegerLiteral, .Period }); - testTokenize("0b1.0", &[_]Token.Id{ .IntegerLiteral, .Period, .IntegerLiteral }); + testTokenize("0b0000_0000", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0b1111_1111", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0b10_10_10_10", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0b0_1_0_1_0_1_0_1", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0b1.", &[_]Token.Tag{ .IntegerLiteral, .Period }); + testTokenize("0b1.0", &[_]Token.Tag{ .IntegerLiteral, .Period, .IntegerLiteral }); - testTokenize("0B0", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0b_", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0b_0", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0b1_", &[_]Token.Id{.Invalid}); - testTokenize("0b0__1", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0b0_1_", &[_]Token.Id{.Invalid}); - testTokenize("0b1e", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0b1p", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0b1e0", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0b1p0", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0b1_,", &[_]Token.Id{ .Invalid, .Comma }); + testTokenize("0B0", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0b_", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0b_0", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0b1_", &[_]Token.Tag{.Invalid}); + testTokenize("0b0__1", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0b0_1_", &[_]Token.Tag{.Invalid}); + testTokenize("0b1e", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0b1p", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0b1e0", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0b1p0", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0b1_,", &[_]Token.Tag{ .Invalid, .Comma }); } test "tokenizer - number literals octal" { - testTokenize("0o0", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0o1", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0o2", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0o3", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0o4", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0o5", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0o6", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0o7", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0o8", &[_]Token.Id{ .Invalid, .IntegerLiteral }); - testTokenize("0o9", &[_]Token.Id{ .Invalid, .IntegerLiteral }); - testTokenize("0oa", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0ob", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0oc", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0od", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0oe", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0of", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0oz", &[_]Token.Id{ .Invalid, .Identifier }); + testTokenize("0o0", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0o1", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0o2", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0o3", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0o4", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0o5", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0o6", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0o7", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0o8", &[_]Token.Tag{ .Invalid, .IntegerLiteral }); + testTokenize("0o9", &[_]Token.Tag{ .Invalid, .IntegerLiteral }); + testTokenize("0oa", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0ob", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0oc", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0od", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0oe", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0of", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0oz", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0o01234567", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0o0123_4567", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0o01_23_45_67", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0o0_1_2_3_4_5_6_7", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0o7.", &[_]Token.Id{ .IntegerLiteral, .Period }); - testTokenize("0o7.0", &[_]Token.Id{ .IntegerLiteral, .Period, .IntegerLiteral }); + testTokenize("0o01234567", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0o0123_4567", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0o01_23_45_67", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0o0_1_2_3_4_5_6_7", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0o7.", &[_]Token.Tag{ .IntegerLiteral, .Period }); + testTokenize("0o7.0", &[_]Token.Tag{ .IntegerLiteral, .Period, .IntegerLiteral }); - testTokenize("0O0", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0o_", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0o_0", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0o1_", &[_]Token.Id{.Invalid}); - testTokenize("0o0__1", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0o0_1_", &[_]Token.Id{.Invalid}); - testTokenize("0o1e", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0o1p", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0o1e0", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0o1p0", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0o_,", &[_]Token.Id{ .Invalid, .Identifier, .Comma }); + testTokenize("0O0", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0o_", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0o_0", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0o1_", &[_]Token.Tag{.Invalid}); + testTokenize("0o0__1", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0o0_1_", &[_]Token.Tag{.Invalid}); + testTokenize("0o1e", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0o1p", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0o1e0", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0o1p0", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0o_,", &[_]Token.Tag{ .Invalid, .Identifier, .Comma }); } test "tokenizer - number literals hexadeciaml" { - testTokenize("0x0", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0x1", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0x2", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0x3", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0x4", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0x5", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0x6", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0x7", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0x8", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0x9", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0xa", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0xb", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0xc", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0xd", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0xe", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0xf", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0xA", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0xB", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0xC", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0xD", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0xE", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0xF", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0x0z", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0xz", &[_]Token.Id{ .Invalid, .Identifier }); + testTokenize("0x0", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0x1", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0x2", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0x3", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0x4", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0x5", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0x6", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0x7", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0x8", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0x9", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0xa", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0xb", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0xc", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0xd", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0xe", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0xf", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0xA", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0xB", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0xC", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0xD", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0xE", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0xF", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0x0z", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0xz", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0x0123456789ABCDEF", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0x0123_4567_89AB_CDEF", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0x01_23_45_67_89AB_CDE_F", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0x0_1_2_3_4_5_6_7_8_9_A_B_C_D_E_F", &[_]Token.Id{.IntegerLiteral}); + testTokenize("0x0123456789ABCDEF", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0x0123_4567_89AB_CDEF", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0x01_23_45_67_89AB_CDE_F", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0x0_1_2_3_4_5_6_7_8_9_A_B_C_D_E_F", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0X0", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0x_", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0x_1", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0x1_", &[_]Token.Id{.Invalid}); - testTokenize("0x0__1", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0x0_1_", &[_]Token.Id{.Invalid}); - testTokenize("0x_,", &[_]Token.Id{ .Invalid, .Identifier, .Comma }); + testTokenize("0X0", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0x_", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0x_1", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0x1_", &[_]Token.Tag{.Invalid}); + testTokenize("0x0__1", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0x0_1_", &[_]Token.Tag{.Invalid}); + testTokenize("0x_,", &[_]Token.Tag{ .Invalid, .Identifier, .Comma }); - testTokenize("0x1.", &[_]Token.Id{.FloatLiteral}); - testTokenize("0x1.0", &[_]Token.Id{.FloatLiteral}); - testTokenize("0xF.", &[_]Token.Id{.FloatLiteral}); - testTokenize("0xF.0", &[_]Token.Id{.FloatLiteral}); - testTokenize("0xF.F", &[_]Token.Id{.FloatLiteral}); - testTokenize("0xF.Fp0", &[_]Token.Id{.FloatLiteral}); - testTokenize("0xF.FP0", &[_]Token.Id{.FloatLiteral}); - testTokenize("0x1p0", &[_]Token.Id{.FloatLiteral}); - testTokenize("0xfp0", &[_]Token.Id{.FloatLiteral}); - testTokenize("0x1.+0xF.", &[_]Token.Id{ .FloatLiteral, .Plus, .FloatLiteral }); + testTokenize("0x1.", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0x1.0", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0xF.", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0xF.0", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0xF.F", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0xF.Fp0", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0xF.FP0", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0x1p0", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0xfp0", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0x1.+0xF.", &[_]Token.Tag{ .FloatLiteral, .Plus, .FloatLiteral }); - testTokenize("0x0123456.789ABCDEF", &[_]Token.Id{.FloatLiteral}); - testTokenize("0x0_123_456.789_ABC_DEF", &[_]Token.Id{.FloatLiteral}); - testTokenize("0x0_1_2_3_4_5_6.7_8_9_A_B_C_D_E_F", &[_]Token.Id{.FloatLiteral}); - testTokenize("0x0p0", &[_]Token.Id{.FloatLiteral}); - testTokenize("0x0.0p0", &[_]Token.Id{.FloatLiteral}); - testTokenize("0xff.ffp10", &[_]Token.Id{.FloatLiteral}); - testTokenize("0xff.ffP10", &[_]Token.Id{.FloatLiteral}); - testTokenize("0xff.p10", &[_]Token.Id{.FloatLiteral}); - testTokenize("0xffp10", &[_]Token.Id{.FloatLiteral}); - testTokenize("0xff_ff.ff_ffp1_0_0_0", &[_]Token.Id{.FloatLiteral}); - testTokenize("0xf_f_f_f.f_f_f_fp+1_000", &[_]Token.Id{.FloatLiteral}); - testTokenize("0xf_f_f_f.f_f_f_fp-1_00_0", &[_]Token.Id{.FloatLiteral}); + testTokenize("0x0123456.789ABCDEF", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0x0_123_456.789_ABC_DEF", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0x0_1_2_3_4_5_6.7_8_9_A_B_C_D_E_F", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0x0p0", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0x0.0p0", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0xff.ffp10", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0xff.ffP10", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0xff.p10", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0xffp10", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0xff_ff.ff_ffp1_0_0_0", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0xf_f_f_f.f_f_f_fp+1_000", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0xf_f_f_f.f_f_f_fp-1_00_0", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0x1e", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0x1e0", &[_]Token.Id{.IntegerLiteral}); - testTokenize("0x1p", &[_]Token.Id{.Invalid}); - testTokenize("0xfp0z1", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0xff.ffpff", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0x0.p", &[_]Token.Id{.Invalid}); - testTokenize("0x0.z", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0x0._", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0x0_.0", &[_]Token.Id{ .Invalid, .Period, .IntegerLiteral }); - testTokenize("0x0_.0.0", &[_]Token.Id{ .Invalid, .Period, .FloatLiteral }); - testTokenize("0x0._0", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0x0.0_", &[_]Token.Id{.Invalid}); - testTokenize("0x0_p0", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0x0_.p0", &[_]Token.Id{ .Invalid, .Period, .Identifier }); - testTokenize("0x0._p0", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0x0.0_p0", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0x0._0p0", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0x0.0p_0", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0x0.0p+_0", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0x0.0p-_0", &[_]Token.Id{ .Invalid, .Identifier }); - testTokenize("0x0.0p0_", &[_]Token.Id{ .Invalid, .Eof }); + testTokenize("0x1e", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0x1e0", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0x1p", &[_]Token.Tag{.Invalid}); + testTokenize("0xfp0z1", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0xff.ffpff", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0x0.p", &[_]Token.Tag{.Invalid}); + testTokenize("0x0.z", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0x0._", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0x0_.0", &[_]Token.Tag{ .Invalid, .Period, .IntegerLiteral }); + testTokenize("0x0_.0.0", &[_]Token.Tag{ .Invalid, .Period, .FloatLiteral }); + testTokenize("0x0._0", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0x0.0_", &[_]Token.Tag{.Invalid}); + testTokenize("0x0_p0", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0x0_.p0", &[_]Token.Tag{ .Invalid, .Period, .Identifier }); + testTokenize("0x0._p0", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0x0.0_p0", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0x0._0p0", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0x0.0p_0", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0x0.0p+_0", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0x0.0p-_0", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0x0.0p0_", &[_]Token.Tag{ .Invalid, .Eof }); } -fn testTokenize(source: []const u8, expected_tokens: []const Token.Id) void { +fn testTokenize(source: []const u8, expected_tokens: []const Token.Tag) void { var tokenizer = Tokenizer.init(source); for (expected_tokens) |expected_token_id| { const token = tokenizer.next(); - if (token.id != expected_token_id) { - std.debug.panic("expected {s}, found {s}\n", .{ @tagName(expected_token_id), @tagName(token.id) }); + if (token.tag != expected_token_id) { + std.debug.panic("expected {s}, found {s}\n", .{ @tagName(expected_token_id), @tagName(token.tag) }); } } const last_token = tokenizer.next(); - std.testing.expect(last_token.id == .Eof); + std.testing.expect(last_token.tag == .Eof); } From bf8fafc37d4182196d108c773ae36c33a109d703 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Sun, 31 Jan 2021 21:57:48 -0700 Subject: [PATCH 004/173] stage2: tokenizer does not emit line comments anymore only std.zig.render cares about these, and it can find them in the original source easily enough. --- lib/std/zig/parse.zig | 1 - lib/std/zig/tokenizer.zig | 57 +++++++++++++-------------------------- 2 files changed, 19 insertions(+), 39 deletions(-) diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index e0bef649bb..04c44f5c9c 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -28,7 +28,6 @@ pub fn parse(gpa: *Allocator, source: []const u8) Allocator.Error!Tree { var tokenizer = std.zig.Tokenizer.init(source); while (true) { const token = tokenizer.next(); - if (token.tag == .LineComment) continue; try tokens.append(gpa, .{ .tag = token.tag, .start = @intCast(u32, token.loc.start), diff --git a/lib/std/zig/tokenizer.zig b/lib/std/zig/tokenizer.zig index 68dfd987a4..8692713eb3 100644 --- a/lib/std/zig/tokenizer.zig +++ b/lib/std/zig/tokenizer.zig @@ -43,7 +43,6 @@ pub const Token = struct { .{ "if", .Keyword_if }, .{ "inline", .Keyword_inline }, .{ "noalias", .Keyword_noalias }, - .{ "noasync", .Keyword_nosuspend }, // TODO: remove this .{ "noinline", .Keyword_noinline }, .{ "nosuspend", .Keyword_nosuspend }, .{ "null", .Keyword_null }, @@ -141,10 +140,8 @@ pub const Token = struct { Tilde, IntegerLiteral, FloatLiteral, - LineComment, DocComment, ContainerDocComment, - ShebangLine, Keyword_align, Keyword_allowzero, Keyword_and, @@ -211,10 +208,8 @@ pub const Token = struct { .Builtin => "Builtin", .IntegerLiteral => "IntegerLiteral", .FloatLiteral => "FloatLiteral", - .LineComment => "LineComment", .DocComment => "DocComment", .ContainerDocComment => "ContainerDocComment", - .ShebangLine => "ShebangLine", .Bang => "!", .Pipe => "|", @@ -1016,7 +1011,6 @@ pub const Tokenizer = struct { .slash => switch (c) { '/' => { state = .line_comment_start; - result.tag = .LineComment; }, '=' => { result.tag = .SlashEqual; @@ -1036,7 +1030,7 @@ pub const Tokenizer = struct { result.tag = .ContainerDocComment; state = .container_doc_comment; }, - '\n' => break, + '\n' => state = .start, '\t', '\r' => state = .line_comment, else => { state = .line_comment; @@ -1061,7 +1055,12 @@ pub const Tokenizer = struct { self.checkLiteralCharacter(); }, }, - .line_comment, .doc_comment, .container_doc_comment => switch (c) { + .line_comment => switch (c) { + '\n' => state = .start, + '\t', '\r' => {}, + else => self.checkLiteralCharacter(), + }, + .doc_comment, .container_doc_comment => switch (c) { '\n' => break, '\t', '\r' => {}, else => self.checkLiteralCharacter(), @@ -1324,6 +1323,8 @@ pub const Tokenizer = struct { .string_literal, // find this error later .multiline_string_literal_line, .builtin, + .line_comment, + .line_comment_start, => {}, .identifier => { @@ -1331,9 +1332,6 @@ pub const Tokenizer = struct { result.tag = tag; } }, - .line_comment, .line_comment_start => { - result.tag = .LineComment; - }, .doc_comment, .doc_comment_start => { result.tag = .DocComment; }, @@ -1614,77 +1612,63 @@ test "tokenizer - invalid literal/comment characters" { .Invalid, }); testTokenize("//\x00", &[_]Token.Tag{ - .LineComment, .Invalid, }); testTokenize("//\x1f", &[_]Token.Tag{ - .LineComment, .Invalid, }); testTokenize("//\x7f", &[_]Token.Tag{ - .LineComment, .Invalid, }); } test "tokenizer - utf8" { - testTokenize("//\xc2\x80", &[_]Token.Tag{.LineComment}); - testTokenize("//\xf4\x8f\xbf\xbf", &[_]Token.Tag{.LineComment}); + testTokenize("//\xc2\x80", &[_]Token.Tag{}); + testTokenize("//\xf4\x8f\xbf\xbf", &[_]Token.Tag{}); } test "tokenizer - invalid utf8" { testTokenize("//\x80", &[_]Token.Tag{ - .LineComment, .Invalid, }); testTokenize("//\xbf", &[_]Token.Tag{ - .LineComment, .Invalid, }); testTokenize("//\xf8", &[_]Token.Tag{ - .LineComment, .Invalid, }); testTokenize("//\xff", &[_]Token.Tag{ - .LineComment, .Invalid, }); testTokenize("//\xc2\xc0", &[_]Token.Tag{ - .LineComment, .Invalid, }); testTokenize("//\xe0", &[_]Token.Tag{ - .LineComment, .Invalid, }); testTokenize("//\xf0", &[_]Token.Tag{ - .LineComment, .Invalid, }); testTokenize("//\xf0\x90\x80\xc0", &[_]Token.Tag{ - .LineComment, .Invalid, }); } test "tokenizer - illegal unicode codepoints" { // unicode newline characters.U+0085, U+2028, U+2029 - testTokenize("//\xc2\x84", &[_]Token.Tag{.LineComment}); + testTokenize("//\xc2\x84", &[_]Token.Tag{}); testTokenize("//\xc2\x85", &[_]Token.Tag{ - .LineComment, .Invalid, }); - testTokenize("//\xc2\x86", &[_]Token.Tag{.LineComment}); - testTokenize("//\xe2\x80\xa7", &[_]Token.Tag{.LineComment}); + testTokenize("//\xc2\x86", &[_]Token.Tag{}); + testTokenize("//\xe2\x80\xa7", &[_]Token.Tag{}); testTokenize("//\xe2\x80\xa8", &[_]Token.Tag{ - .LineComment, .Invalid, }); testTokenize("//\xe2\x80\xa9", &[_]Token.Tag{ - .LineComment, .Invalid, }); - testTokenize("//\xe2\x80\xaa", &[_]Token.Tag{.LineComment}); + testTokenize("//\xe2\x80\xaa", &[_]Token.Tag{}); } test "tokenizer - string identifier and builtin fns" { @@ -1719,10 +1703,8 @@ test "tokenizer - comments with literal tab" { \\/// foo \\/// /foo , &[_]Token.Tag{ - .LineComment, .ContainerDocComment, .DocComment, - .LineComment, .DocComment, .DocComment, }); @@ -1736,12 +1718,12 @@ test "tokenizer - pipe and then invalid" { } test "tokenizer - line comment and doc comment" { - testTokenize("//", &[_]Token.Tag{.LineComment}); - testTokenize("// a / b", &[_]Token.Tag{.LineComment}); - testTokenize("// /", &[_]Token.Tag{.LineComment}); + testTokenize("//", &[_]Token.Tag{}); + testTokenize("// a / b", &[_]Token.Tag{}); + testTokenize("// /", &[_]Token.Tag{}); testTokenize("/// a", &[_]Token.Tag{.DocComment}); testTokenize("///", &[_]Token.Tag{.DocComment}); - testTokenize("////", &[_]Token.Tag{.LineComment}); + testTokenize("////", &[_]Token.Tag{}); testTokenize("//!", &[_]Token.Tag{.ContainerDocComment}); testTokenize("//!!", &[_]Token.Tag{.ContainerDocComment}); } @@ -1754,7 +1736,6 @@ test "tokenizer - line comment followed by identifier" { , &[_]Token.Tag{ .Identifier, .Comma, - .LineComment, .Identifier, .Comma, }); From 20554d32c0a9e8adeb311645797e0d6873f4bbc0 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Mon, 1 Feb 2021 17:23:49 -0700 Subject: [PATCH 005/173] zig fmt: start reworking with new memory layout * start implementation of ast.Tree.firstToken and lastToken * clarify some ast.Node doc comments * reimplement renderToken --- lib/std/zig/ast.zig | 300 +- lib/std/zig/parse.zig | 2 +- lib/std/zig/parser_test.zig | 7425 +++++++++++++++++------------------ lib/std/zig/render.zig | 4443 ++++++++++----------- lib/std/zig/tokenizer.zig | 31 +- 5 files changed, 6121 insertions(+), 6080 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 823e0312cd..3fd34cd03c 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -185,24 +185,293 @@ pub const Tree = struct { } } - /// Skips over comments. - pub fn prevToken(self: *const Tree, token_index: TokenIndex) TokenIndex { - const token_tags = self.tokens.items(.tag); - var index = token_index - 1; - while (token_tags[index] == .LineComment) { - index -= 1; + pub fn firstToken(tree: Tree, node: Node.Index) TokenIndex { + const tags = tree.nodes.items(.tag); + const datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); + switch (tags[node]) { + .Root => return 0, + + .UsingNamespace, + .TestDecl, + .ErrDefer, + .Defer, + .BoolNot, + .Negation, + .BitNot, + .NegationWrap, + .AddressOf, + .Try, + .Await, + .OptionalType, + .ArrayInitDotTwo, + .ArrayInitDot, + .StructInitDotTwo, + .StructInitDot, + .Switch, + .IfSimple, + .IfSimpleOptional, + .If, + .IfOptional, + .IfError, + .Suspend, + .Resume, + .Continue, + .Break, + .Return, + .AnyFrameType, + .OneToken, + .Identifier, + .EnumLiteral, + .MultilineStringLiteral, + .GroupedExpression, + .BuiltinCallTwo, + .BuiltinCall, + .ErrorSetDecl, + .AnyType, + .Comptime, + .Nosuspend, + .Block, + .AsmSimple, + .Asm, + => return main_tokens[node], + + .Catch, + .FieldAccess, + .UnwrapOptional, + .EqualEqual, + .BangEqual, + .LessThan, + .GreaterThan, + .LessOrEqual, + .GreaterOrEqual, + .AssignMul, + .AssignDiv, + .AssignMod, + .AssignAdd, + .AssignSub, + .AssignBitShiftLeft, + .AssignBitShiftRight, + .AssignBitAnd, + .AssignBitXor, + .AssignBitOr, + .AssignMulWrap, + .AssignAddWrap, + .AssignSubWrap, + .Assign, + .MergeErrorSets, + .Mul, + .Div, + .Mod, + .ArrayMult, + .MulWrap, + .Add, + .Sub, + .ArrayCat, + .AddWrap, + .SubWrap, + .BitShiftLeft, + .BitShiftRight, + .BitAnd, + .BitXor, + .BitOr, + .OrElse, + .BoolAnd, + .BoolOr, + .SliceOpen, + .Slice, + .Deref, + .ArrayAccess, + .ArrayInitOne, + .ArrayInit, + .StructInitOne, + .CallOne, + .Call, + .SwitchCaseOne, + .SwitchRange, + .FnDecl, + => return tree.firstToken(datas[node].lhs), + + .GlobalVarDecl, + .LocalVarDecl, + .SimpleVarDecl, + .AlignedVarDecl, + .ArrayType, + .ArrayTypeSentinel, + .PtrTypeAligned, + .PtrTypeSentinel, + .PtrType, + .SliceType, + .StructInit, + .SwitchCaseMulti, + .WhileSimple, + .WhileSimpleOptional, + .WhileCont, + .WhileContOptional, + .While, + .WhileOptional, + .WhileError, + .ForSimple, + .For, + .FnProtoSimple, + .FnProtoSimpleMulti, + .FnProtoOne, + .FnProto, + .ContainerDecl, + .ContainerDeclArg, + .TaggedUnion, + .TaggedUnionEnumTag, + .ContainerFieldInit, + .ContainerFieldAlign, + .ContainerField, + .AsmOutput, + .AsmInput, + .ErrorValue, + .ErrorUnion, + => @panic("TODO finish implementing firstToken"), } - return index; } - /// Skips over comments. - pub fn nextToken(self: *const Tree, token_index: TokenIndex) TokenIndex { - const token_tags = self.tokens.items(.tag); - var index = token_index + 1; - while (token_tags[index] == .LineComment) { - index += 1; + pub fn lastToken(tree: Tree, node: Node.Index) TokenIndex { + const tags = tree.nodes.items(.tag); + const datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); + switch (tags[node]) { + .Root, + .UsingNamespace, + .TestDecl, + .ErrDefer, + .Defer, + .BoolNot, + .Negation, + .BitNot, + .NegationWrap, + .AddressOf, + .Try, + .Await, + .OptionalType, + .ArrayInitDotTwo, + .ArrayInitDot, + .StructInitDotTwo, + .StructInitDot, + .Switch, + .IfSimple, + .IfSimpleOptional, + .If, + .IfOptional, + .IfError, + .Suspend, + .Resume, + .Continue, + .Break, + .Return, + .AnyFrameType, + .OneToken, + .Identifier, + .EnumLiteral, + .MultilineStringLiteral, + .GroupedExpression, + .BuiltinCallTwo, + .BuiltinCall, + .ErrorSetDecl, + .AnyType, + .Comptime, + .Nosuspend, + .Block, + .AsmSimple, + .Asm, + .Catch, + .FieldAccess, + .UnwrapOptional, + .EqualEqual, + .BangEqual, + .LessThan, + .GreaterThan, + .LessOrEqual, + .GreaterOrEqual, + .AssignMul, + .AssignDiv, + .AssignMod, + .AssignAdd, + .AssignSub, + .AssignBitShiftLeft, + .AssignBitShiftRight, + .AssignBitAnd, + .AssignBitXor, + .AssignBitOr, + .AssignMulWrap, + .AssignAddWrap, + .AssignSubWrap, + .Assign, + .MergeErrorSets, + .Mul, + .Div, + .Mod, + .ArrayMult, + .MulWrap, + .Add, + .Sub, + .ArrayCat, + .AddWrap, + .SubWrap, + .BitShiftLeft, + .BitShiftRight, + .BitAnd, + .BitXor, + .BitOr, + .OrElse, + .BoolAnd, + .BoolOr, + .SliceOpen, + .Slice, + .Deref, + .ArrayAccess, + .ArrayInitOne, + .ArrayInit, + .StructInitOne, + .CallOne, + .Call, + .SwitchCaseOne, + .SwitchRange, + .FnDecl, + .GlobalVarDecl, + .LocalVarDecl, + .SimpleVarDecl, + .AlignedVarDecl, + .ArrayType, + .ArrayTypeSentinel, + .PtrTypeAligned, + .PtrTypeSentinel, + .PtrType, + .SliceType, + .StructInit, + .SwitchCaseMulti, + .WhileSimple, + .WhileSimpleOptional, + .WhileCont, + .WhileContOptional, + .While, + .WhileOptional, + .WhileError, + .ForSimple, + .For, + .FnProtoSimple, + .FnProtoSimpleMulti, + .FnProtoOne, + .FnProto, + .ContainerDecl, + .ContainerDeclArg, + .TaggedUnion, + .TaggedUnionEnumTag, + .ContainerFieldInit, + .ContainerFieldAlign, + .ContainerField, + .AsmOutput, + .AsmInput, + .ErrorValue, + .ErrorUnion, + => @panic("TODO finish implementing lastToken"), } - return index; } }; @@ -454,7 +723,7 @@ pub const Node = struct { /// lhs is test name token (must be string literal), if any. /// rhs is the body node. TestDecl, - /// lhs is the index into global_var_decl_list. + /// lhs is the index into extra_data. /// rhs is the initialization expression, if any. GlobalVarDecl, /// `var a: x align(y) = rhs` @@ -732,6 +1001,7 @@ pub const Node = struct { /// `nosuspend lhs`. rhs unused. Nosuspend, /// `{}`. `sub_list[lhs..rhs]`. + /// main_token points at the `{`. Block, /// `asm(lhs)`. rhs unused. AsmSimple, diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 04c44f5c9c..05efea7fe1 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -594,7 +594,7 @@ const Parser = struct { p.eatToken(.Keyword_var) orelse return null_node; - const name_token = try p.expectToken(.Identifier); + _ = try p.expectToken(.Identifier); const type_node: Node.Index = if (p.eatToken(.Colon) == null) 0 else try p.expectTypeExpr(); const align_node = try p.parseByteAlign(); const section_node = try p.parseLinkSection(); diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 279402d71b..d4a01da0d0 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -3,3727 +3,3704 @@ // This file is part of [zig](https://ziglang.org/), which is MIT licensed. // The MIT license requires this copyright notice to be included in all copies // and substantial portions of the software. -test "zig fmt: convert var to anytype" { - // TODO remove in next release cycle - try testTransform( - \\pub fn main( - \\ a: var, - \\ bar: var, - \\) void {} - , - \\pub fn main( - \\ a: anytype, - \\ bar: anytype, - \\) void {} - \\ - ); -} - -test "zig fmt: noasync to nosuspend" { - // TODO: remove this - try testTransform( - \\pub fn main() void { - \\ noasync call(); - \\} - , - \\pub fn main() void { - \\ nosuspend call(); - \\} - \\ - ); -} - -test "recovery: top level" { - try testError( - \\test "" {inline} - \\test "" {inline} - , &[_]Error{ - .ExpectedInlinable, - .ExpectedInlinable, - }); -} - -test "recovery: block statements" { - try testError( - \\test "" { - \\ foo + +; - \\ inline; - \\} - , &[_]Error{ - .InvalidToken, - .ExpectedInlinable, - }); -} - -test "recovery: missing comma" { - try testError( - \\test "" { - \\ switch (foo) { - \\ 2 => {} - \\ 3 => {} - \\ else => { - \\ foo && bar +; - \\ } - \\ } - \\} - , &[_]Error{ - .ExpectedToken, - .ExpectedToken, - .InvalidAnd, - .InvalidToken, - }); -} - -test "recovery: extra qualifier" { - try testError( - \\const a: *const const u8; - \\test "" - , &[_]Error{ - .ExtraConstQualifier, - .ExpectedLBrace, - }); -} - -test "recovery: missing return type" { - try testError( - \\fn foo() { - \\ a && b; - \\} - \\test "" - , &[_]Error{ - .ExpectedReturnType, - .InvalidAnd, - .ExpectedLBrace, - }); -} - -test "recovery: continue after invalid decl" { - try testError( - \\fn foo { - \\ inline; - \\} - \\pub test "" { - \\ async a && b; - \\} - , &[_]Error{ - .ExpectedToken, - .ExpectedPubItem, - .ExpectedParamList, - .InvalidAnd, - }); - try testError( - \\threadlocal test "" { - \\ @a && b; - \\} - , &[_]Error{ - .ExpectedVarDecl, - .ExpectedParamList, - .InvalidAnd, - }); -} - -test "recovery: invalid extern/inline" { - try testError( - \\inline test "" { a && b; } - , &[_]Error{ - .ExpectedFn, - .InvalidAnd, - }); - try testError( - \\extern "" test "" { a && b; } - , &[_]Error{ - .ExpectedVarDeclOrFn, - .InvalidAnd, - }); -} - -test "recovery: missing semicolon" { - try testError( - \\test "" { - \\ comptime a && b - \\ c && d - \\ @foo - \\} - , &[_]Error{ - .InvalidAnd, - .ExpectedToken, - .InvalidAnd, - .ExpectedToken, - .ExpectedParamList, - .ExpectedToken, - }); -} - -test "recovery: invalid container members" { - try testError( - \\usingnamespace; - \\foo+ - \\bar@, - \\while (a == 2) { test "" {}} - \\test "" { - \\ a && b - \\} - , &[_]Error{ - .ExpectedExpr, - .ExpectedToken, - .ExpectedToken, - .ExpectedContainerMembers, - .InvalidAnd, - .ExpectedToken, - }); -} - -test "recovery: invalid parameter" { - try testError( - \\fn main() void { - \\ a(comptime T: type) - \\} - , &[_]Error{ - .ExpectedToken, - }); -} - -test "recovery: extra '}' at top level" { - try testError( - \\}}} - \\test "" { - \\ a && b; - \\} - , &[_]Error{ - .ExpectedContainerMembers, - .ExpectedContainerMembers, - .ExpectedContainerMembers, - .InvalidAnd, - }); -} - -test "recovery: mismatched bracket at top level" { - try testError( - \\const S = struct { - \\ arr: 128]?G - \\}; - , &[_]Error{ - .ExpectedToken, - }); -} - -test "recovery: invalid global error set access" { - try testError( - \\test "" { - \\ error && foo; - \\} - , &[_]Error{ - .ExpectedToken, - .ExpectedIdentifier, - .InvalidAnd, - }); -} - -test "recovery: invalid asterisk after pointer dereference" { - try testError( - \\test "" { - \\ var sequence = "repeat".*** 10; - \\} - , &[_]Error{ - .AsteriskAfterPointerDereference, - }); - try testError( - \\test "" { - \\ var sequence = "repeat".** 10&&a; - \\} - , &[_]Error{ - .AsteriskAfterPointerDereference, - .InvalidAnd, - }); -} - -test "recovery: missing semicolon after if, for, while stmt" { - try testError( - \\test "" { - \\ if (foo) bar - \\ for (foo) |a| bar - \\ while (foo) bar - \\ a && b; - \\} - , &[_]Error{ - .ExpectedSemiOrElse, - .ExpectedSemiOrElse, - .ExpectedSemiOrElse, - .InvalidAnd, - }); -} - -test "recovery: invalid comptime" { - try testError( - \\comptime - , &[_]Error{ - .ExpectedBlockOrField, - }); -} - -test "recovery: missing block after for/while loops" { - try testError( - \\test "" { while (foo) } - , &[_]Error{ - .ExpectedBlockOrAssignment, - }); - try testError( - \\test "" { for (foo) |bar| } - , &[_]Error{ - .ExpectedBlockOrAssignment, - }); -} - -test "zig fmt: respect line breaks after var declarations" { - try testCanonical( - \\const crc = - \\ lookup_tables[0][p[7]] ^ - \\ lookup_tables[1][p[6]] ^ - \\ lookup_tables[2][p[5]] ^ - \\ lookup_tables[3][p[4]] ^ - \\ lookup_tables[4][@truncate(u8, self.crc >> 24)] ^ - \\ lookup_tables[5][@truncate(u8, self.crc >> 16)] ^ - \\ lookup_tables[6][@truncate(u8, self.crc >> 8)] ^ - \\ lookup_tables[7][@truncate(u8, self.crc >> 0)]; - \\ - ); -} - -test "zig fmt: multiline string mixed with comments" { - try testCanonical( - \\const s1 = - \\ //\\one - \\ \\two) - \\ \\three - \\; - \\const s2 = - \\ \\one - \\ \\two) - \\ //\\three - \\; - \\const s3 = - \\ \\one - \\ //\\two) - \\ \\three - \\; - \\const s4 = - \\ \\one - \\ //\\two - \\ \\three - \\ //\\four - \\ \\five - \\; - \\const a = - \\ 1; - \\ - ); -} - -test "zig fmt: empty file" { - try testCanonical( - \\ - ); -} - -test "zig fmt: if statment" { - try testCanonical( - \\test "" { - \\ if (optional()) |some| - \\ bar = some.foo(); - \\} - \\ - ); -} - -test "zig fmt: top-level fields" { - try testCanonical( - \\a: did_you_know, - \\b: all_files_are, - \\structs: ?x, - \\ - ); -} - -test "zig fmt: decl between fields" { - try testError( - \\const S = struct { - \\ const foo = 2; - \\ const bar = 2; - \\ const baz = 2; - \\ a: usize, - \\ const foo1 = 2; - \\ const bar1 = 2; - \\ const baz1 = 2; - \\ b: usize, - \\}; - , &[_]Error{ - .DeclBetweenFields, - }); -} - -test "zig fmt: eof after missing comma" { - try testError( - \\foo() - , &[_]Error{ - .ExpectedToken, - }); -} - -test "zig fmt: errdefer with payload" { - try testCanonical( - \\pub fn main() anyerror!void { - \\ errdefer |a| x += 1; - \\ errdefer |a| {} - \\ errdefer |a| { - \\ x += 1; - \\ } - \\} - \\ - ); -} - -test "zig fmt: nosuspend block" { - try testCanonical( - \\pub fn main() anyerror!void { - \\ nosuspend { - \\ var foo: Foo = .{ .bar = 42 }; - \\ } - \\} - \\ - ); -} - -test "zig fmt: nosuspend await" { - try testCanonical( - \\fn foo() void { - \\ x = nosuspend await y; - \\} - \\ - ); -} - -test "zig fmt: trailing comma in container declaration" { - try testCanonical( - \\const X = struct { foo: i32 }; - \\const X = struct { foo: i32, bar: i32 }; - \\const X = struct { foo: i32 = 1, bar: i32 = 2 }; - \\const X = struct { foo: i32 align(4), bar: i32 align(4) }; - \\const X = struct { foo: i32 align(4) = 1, bar: i32 align(4) = 2 }; - \\ - ); - try testCanonical( - \\test "" { - \\ comptime { - \\ const X = struct { - \\ x: i32 - \\ }; - \\ } - \\} - \\ - ); - try testTransform( - \\const X = struct { - \\ foo: i32, bar: i8 }; - , - \\const X = struct { - \\ foo: i32, bar: i8 - \\}; - \\ - ); -} - -test "zig fmt: trailing comma in fn parameter list" { - try testCanonical( - \\pub fn f( - \\ a: i32, - \\ b: i32, - \\) i32 {} - \\pub fn f( - \\ a: i32, - \\ b: i32, - \\) align(8) i32 {} - \\pub fn f( - \\ a: i32, - \\ b: i32, - \\) linksection(".text") i32 {} - \\pub fn f( - \\ a: i32, - \\ b: i32, - \\) callconv(.C) i32 {} - \\pub fn f( - \\ a: i32, - \\ b: i32, - \\) align(8) linksection(".text") i32 {} - \\pub fn f( - \\ a: i32, - \\ b: i32, - \\) align(8) callconv(.C) i32 {} - \\pub fn f( - \\ a: i32, - \\ b: i32, - \\) align(8) linksection(".text") callconv(.C) i32 {} - \\pub fn f( - \\ a: i32, - \\ b: i32, - \\) linksection(".text") callconv(.C) i32 {} - \\ - ); -} - -test "zig fmt: comptime struct field" { - try testCanonical( - \\const Foo = struct { - \\ a: i32, - \\ comptime b: i32 = 1234, - \\}; - \\ - ); -} - -test "zig fmt: c pointer type" { - try testCanonical( - \\pub extern fn repro() [*c]const u8; - \\ - ); -} - -test "zig fmt: builtin call with trailing comma" { - try testCanonical( - \\pub fn main() void { - \\ @breakpoint(); - \\ _ = @boolToInt(a); - \\ _ = @call( - \\ a, - \\ b, - \\ c, - \\ ); - \\} - \\ - ); -} - -test "zig fmt: asm expression with comptime content" { - try testCanonical( - \\comptime { - \\ asm ("foo" ++ "bar"); - \\} - \\pub fn main() void { - \\ asm volatile ("foo" ++ "bar"); - \\ asm volatile ("foo" ++ "bar" - \\ : [_] "" (x) - \\ ); - \\ asm volatile ("foo" ++ "bar" - \\ : [_] "" (x) - \\ : [_] "" (y) - \\ ); - \\ asm volatile ("foo" ++ "bar" - \\ : [_] "" (x) - \\ : [_] "" (y) - \\ : "h", "e", "l", "l", "o" - \\ ); - \\} - \\ - ); -} - -test "zig fmt: anytype struct field" { - try testCanonical( - \\pub const Pointer = struct { - \\ sentinel: anytype, - \\}; - \\ - ); -} - -test "zig fmt: sentinel-terminated array type" { - try testCanonical( - \\pub fn cStrToPrefixedFileW(s: [*:0]const u8) ![PATH_MAX_WIDE:0]u16 { - \\ return sliceToPrefixedFileW(mem.toSliceConst(u8, s)); - \\} - \\ - ); -} - -test "zig fmt: sentinel-terminated slice type" { - try testCanonical( - \\pub fn toSlice(self: Buffer) [:0]u8 { - \\ return self.list.toSlice()[0..self.len()]; - \\} - \\ - ); -} - -test "zig fmt: anon literal in array" { - try testCanonical( - \\var arr: [2]Foo = .{ - \\ .{ .a = 2 }, - \\ .{ .b = 3 }, - \\}; - \\ - ); -} - -test "zig fmt: alignment in anonymous literal" { - try testTransform( - \\const a = .{ - \\ "U", "L", "F", - \\ "U'", - \\ "L'", - \\ "F'", - \\}; - \\ - , - \\const a = .{ - \\ "U", "L", "F", - \\ "U'", "L'", "F'", - \\}; - \\ - ); -} - -test "zig fmt: anon struct literal syntax" { - try testCanonical( - \\const x = .{ - \\ .a = b, - \\ .c = d, - \\}; - \\ - ); -} - -test "zig fmt: anon list literal syntax" { - try testCanonical( - \\const x = .{ a, b, c }; - \\ - ); -} - -test "zig fmt: async function" { - try testCanonical( - \\pub const Server = struct { - \\ handleRequestFn: fn (*Server, *const std.net.Address, File) callconv(.Async) void, - \\}; - \\test "hi" { - \\ var ptr = @ptrCast(fn (i32) callconv(.Async) void, other); - \\} - \\ - ); -} - -test "zig fmt: whitespace fixes" { - try testTransform("test \"\" {\r\n\tconst hi = x;\r\n}\n// zig fmt: off\ntest \"\"{\r\n\tconst a = b;}\r\n", - \\test "" { - \\ const hi = x; - \\} - \\// zig fmt: off - \\test ""{ - \\ const a = b;} - \\ - ); -} - -test "zig fmt: while else err prong with no block" { - try testCanonical( - \\test "" { - \\ const result = while (returnError()) |value| { - \\ break value; - \\ } else |err| @as(i32, 2); - \\ expect(result == 2); - \\} - \\ - ); -} - -test "zig fmt: tagged union with enum values" { - try testCanonical( - \\const MultipleChoice2 = union(enum(u32)) { - \\ Unspecified1: i32, - \\ A: f32 = 20, - \\ Unspecified2: void, - \\ B: bool = 40, - \\ Unspecified3: i32, - \\ C: i8 = 60, - \\ Unspecified4: void, - \\ D: void = 1000, - \\ Unspecified5: i32, - \\}; - \\ - ); -} - -test "zig fmt: allowzero pointer" { - try testCanonical( - \\const T = [*]allowzero const u8; - \\ - ); -} - -test "zig fmt: enum literal" { - try testCanonical( - \\const x = .hi; - \\ - ); -} - -test "zig fmt: enum literal inside array literal" { - try testCanonical( - \\test "enums in arrays" { - \\ var colors = []Color{.Green}; - \\ colors = []Colors{ .Green, .Cyan }; - \\ colors = []Colors{ - \\ .Grey, - \\ .Green, - \\ .Cyan, - \\ }; - \\} - \\ - ); -} - -test "zig fmt: character literal larger than u8" { - try testCanonical( - \\const x = '\u{01f4a9}'; - \\ - ); -} - -test "zig fmt: infix operator and then multiline string literal" { - try testCanonical( - \\const x = "" ++ - \\ \\ hi - \\; - \\ - ); -} - -test "zig fmt: infix operator and then multiline string literal" { - try testCanonical( - \\const x = "" ++ - \\ \\ hi0 - \\ \\ hi1 - \\ \\ hi2 - \\; - \\ - ); -} - -test "zig fmt: C pointers" { - try testCanonical( - \\const Ptr = [*c]i32; - \\ - ); -} - -test "zig fmt: threadlocal" { - try testCanonical( - \\threadlocal var x: i32 = 1234; - \\ - ); -} - -test "zig fmt: linksection" { - try testCanonical( - \\export var aoeu: u64 linksection(".text.derp") = 1234; - \\export fn _start() linksection(".text.boot") callconv(.Naked) noreturn {} - \\ - ); -} - -test "zig fmt: correctly move doc comments on struct fields" { - try testTransform( - \\pub const section_64 = extern struct { - \\ sectname: [16]u8, /// name of this section - \\ segname: [16]u8, /// segment this section goes in - \\}; - , - \\pub const section_64 = extern struct { - \\ /// name of this section - \\ sectname: [16]u8, - \\ /// segment this section goes in - \\ segname: [16]u8, - \\}; - \\ - ); -} - -test "zig fmt: correctly space struct fields with doc comments" { - try testTransform( - \\pub const S = struct { - \\ /// A - \\ a: u8, - \\ /// B - \\ /// B (cont) - \\ b: u8, - \\ - \\ - \\ /// C - \\ c: u8, - \\}; - \\ - , - \\pub const S = struct { - \\ /// A - \\ a: u8, - \\ /// B - \\ /// B (cont) - \\ b: u8, - \\ - \\ /// C - \\ c: u8, - \\}; - \\ - ); -} - -test "zig fmt: doc comments on param decl" { - try testCanonical( - \\pub const Allocator = struct { - \\ shrinkFn: fn ( - \\ self: *Allocator, - \\ /// Guaranteed to be the same as what was returned from most recent call to - \\ /// `allocFn`, `reallocFn`, or `shrinkFn`. - \\ old_mem: []u8, - \\ /// Guaranteed to be the same as what was returned from most recent call to - \\ /// `allocFn`, `reallocFn`, or `shrinkFn`. - \\ old_alignment: u29, - \\ /// Guaranteed to be less than or equal to `old_mem.len`. - \\ new_byte_count: usize, - \\ /// Guaranteed to be less than or equal to `old_alignment`. - \\ new_alignment: u29, - \\ ) []u8, - \\}; - \\ - ); -} - -test "zig fmt: aligned struct field" { - try testCanonical( - \\pub const S = struct { - \\ f: i32 align(32), - \\}; - \\ - ); - try testCanonical( - \\pub const S = struct { - \\ f: i32 align(32) = 1, - \\}; - \\ - ); -} - -test "zig fmt: comment to disable/enable zig fmt first" { - try testCanonical( - \\// Test trailing comma syntax - \\// zig fmt: off - \\ - \\const struct_trailing_comma = struct { x: i32, y: i32, }; - ); -} - -test "zig fmt: comment to disable/enable zig fmt" { - try testTransform( - \\const a = b; - \\// zig fmt: off - \\const c = d; - \\// zig fmt: on - \\const e = f; - , - \\const a = b; - \\// zig fmt: off - \\const c = d; - \\// zig fmt: on - \\const e = f; - \\ - ); -} - -test "zig fmt: line comment following 'zig fmt: off'" { - try testCanonical( - \\// zig fmt: off - \\// Test - \\const e = f; - ); -} - -test "zig fmt: doc comment following 'zig fmt: off'" { - try testCanonical( - \\// zig fmt: off - \\/// test - \\const e = f; - ); -} - -test "zig fmt: line and doc comment following 'zig fmt: off'" { - try testCanonical( - \\// zig fmt: off - \\// test 1 - \\/// test 2 - \\const e = f; - ); -} - -test "zig fmt: doc and line comment following 'zig fmt: off'" { - try testCanonical( - \\// zig fmt: off - \\/// test 1 - \\// test 2 - \\const e = f; - ); -} - -test "zig fmt: alternating 'zig fmt: off' and 'zig fmt: on'" { - try testCanonical( - \\// zig fmt: off - \\// zig fmt: on - \\// zig fmt: off - \\const e = f; - \\// zig fmt: off - \\// zig fmt: on - \\// zig fmt: off - \\const a = b; - \\// zig fmt: on - \\const c = d; - \\// zig fmt: on - \\ - ); -} - -test "zig fmt: line comment following 'zig fmt: on'" { - try testCanonical( - \\// zig fmt: off - \\const e = f; - \\// zig fmt: on - \\// test - \\const e = f; - \\ - ); -} - -test "zig fmt: doc comment following 'zig fmt: on'" { - try testCanonical( - \\// zig fmt: off - \\const e = f; - \\// zig fmt: on - \\/// test - \\const e = f; - \\ - ); -} - -test "zig fmt: line and doc comment following 'zig fmt: on'" { - try testCanonical( - \\// zig fmt: off - \\const e = f; - \\// zig fmt: on - \\// test1 - \\/// test2 - \\const e = f; - \\ - ); -} - -test "zig fmt: doc and line comment following 'zig fmt: on'" { - try testCanonical( - \\// zig fmt: off - \\const e = f; - \\// zig fmt: on - \\/// test1 - \\// test2 - \\const e = f; - \\ - ); -} - -test "zig fmt: pointer of unknown length" { - try testCanonical( - \\fn foo(ptr: [*]u8) void {} - \\ - ); -} - -test "zig fmt: spaces around slice operator" { - try testCanonical( - \\var a = b[c..d]; - \\var a = b[c..d :0]; - \\var a = b[c + 1 .. d]; - \\var a = b[c + 1 ..]; - \\var a = b[c .. d + 1]; - \\var a = b[c .. d + 1 :0]; - \\var a = b[c.a..d.e]; - \\var a = b[c.a..d.e :0]; - \\ - ); -} - -test "zig fmt: async call in if condition" { - try testCanonical( - \\comptime { - \\ if (async b()) { - \\ a(); - \\ } - \\} - \\ - ); -} - -test "zig fmt: 2nd arg multiline string" { - try testCanonical( - \\comptime { - \\ cases.addAsm("hello world linux x86_64", - \\ \\.text - \\ , "Hello, world!\n"); - \\} - \\ - ); -} - -test "zig fmt: 2nd arg multiline string many args" { - try testCanonical( - \\comptime { - \\ cases.addAsm("hello world linux x86_64", - \\ \\.text - \\ , "Hello, world!\n", "Hello, world!\n"); - \\} - \\ - ); -} - -test "zig fmt: final arg multiline string" { - try testCanonical( - \\comptime { - \\ cases.addAsm("hello world linux x86_64", "Hello, world!\n", - \\ \\.text - \\ ); - \\} - \\ - ); -} - -test "zig fmt: if condition wraps" { - try testTransform( - \\comptime { - \\ if (cond and - \\ cond) { - \\ return x; - \\ } - \\ while (cond and - \\ cond) { - \\ return x; - \\ } - \\ if (a == b and - \\ c) { - \\ a = b; - \\ } - \\ while (a == b and - \\ c) { - \\ a = b; - \\ } - \\ if ((cond and - \\ cond)) { - \\ return x; - \\ } - \\ while ((cond and - \\ cond)) { - \\ return x; - \\ } - \\ var a = if (a) |*f| x: { - \\ break :x &a.b; - \\ } else |err| err; - \\ var a = if (cond and - \\ cond) |*f| - \\ x: { - \\ break :x &a.b; - \\ } else |err| err; - \\} - , - \\comptime { - \\ if (cond and - \\ cond) - \\ { - \\ return x; - \\ } - \\ while (cond and - \\ cond) - \\ { - \\ return x; - \\ } - \\ if (a == b and - \\ c) - \\ { - \\ a = b; - \\ } - \\ while (a == b and - \\ c) - \\ { - \\ a = b; - \\ } - \\ if ((cond and - \\ cond)) - \\ { - \\ return x; - \\ } - \\ while ((cond and - \\ cond)) - \\ { - \\ return x; - \\ } - \\ var a = if (a) |*f| x: { - \\ break :x &a.b; - \\ } else |err| err; - \\ var a = if (cond and - \\ cond) |*f| - \\ x: { - \\ break :x &a.b; - \\ } else |err| err; - \\} - \\ - ); -} - -test "zig fmt: if condition has line break but must not wrap" { - try testCanonical( - \\comptime { - \\ if (self.user_input_options.put( - \\ name, - \\ UserInputOption{ - \\ .name = name, - \\ .used = false, - \\ }, - \\ ) catch unreachable) |*prev_value| { - \\ foo(); - \\ bar(); - \\ } - \\ if (put( - \\ a, - \\ b, - \\ )) { - \\ foo(); - \\ } - \\} - \\ - ); -} - -test "zig fmt: if condition has line break but must not wrap" { - try testCanonical( - \\comptime { - \\ if (self.user_input_options.put(name, UserInputOption{ - \\ .name = name, - \\ .used = false, - \\ }) catch unreachable) |*prev_value| { - \\ foo(); - \\ bar(); - \\ } - \\ if (put( - \\ a, - \\ b, - \\ )) { - \\ foo(); - \\ } - \\} - \\ - ); -} - -test "zig fmt: function call with multiline argument" { - try testCanonical( - \\comptime { - \\ self.user_input_options.put(name, UserInputOption{ - \\ .name = name, - \\ .used = false, - \\ }); - \\} - \\ - ); -} - -test "zig fmt: same-line doc comment on variable declaration" { - try testTransform( - \\pub const MAP_ANONYMOUS = 0x1000; /// allocated from memory, swap space - \\pub const MAP_FILE = 0x0000; /// map from file (default) - \\ - \\pub const EMEDIUMTYPE = 124; /// Wrong medium type - \\ - \\// nameserver query return codes - \\pub const ENSROK = 0; /// DNS server returned answer with no data - , - \\/// allocated from memory, swap space - \\pub const MAP_ANONYMOUS = 0x1000; - \\/// map from file (default) - \\pub const MAP_FILE = 0x0000; - \\ - \\/// Wrong medium type - \\pub const EMEDIUMTYPE = 124; - \\ - \\// nameserver query return codes - \\/// DNS server returned answer with no data - \\pub const ENSROK = 0; - \\ - ); -} - -test "zig fmt: if-else with comment before else" { - try testCanonical( - \\comptime { - \\ // cexp(finite|nan +- i inf|nan) = nan + i nan - \\ if ((hx & 0x7fffffff) != 0x7f800000) { - \\ return Complex(f32).new(y - y, y - y); - \\ } // cexp(-inf +- i inf|nan) = 0 + i0 - \\ else if (hx & 0x80000000 != 0) { - \\ return Complex(f32).new(0, 0); - \\ } // cexp(+inf +- i inf|nan) = inf + i nan - \\ else { - \\ return Complex(f32).new(x, y - y); - \\ } - \\} - \\ - ); -} - -test "zig fmt: if nested" { - try testCanonical( - \\pub fn foo() void { - \\ return if ((aInt & bInt) >= 0) - \\ if (aInt < bInt) - \\ GE_LESS - \\ else if (aInt == bInt) - \\ GE_EQUAL - \\ else - \\ GE_GREATER - \\ else if (aInt > bInt) - \\ GE_LESS - \\ else if (aInt == bInt) - \\ GE_EQUAL - \\ else - \\ GE_GREATER; - \\} - \\ - ); -} - -test "zig fmt: respect line breaks in if-else" { - try testCanonical( - \\comptime { - \\ return if (cond) a else b; - \\ return if (cond) - \\ a - \\ else - \\ b; - \\ return if (cond) - \\ a - \\ else if (cond) - \\ b - \\ else - \\ c; - \\} - \\ - ); -} - -test "zig fmt: respect line breaks after infix operators" { - try testCanonical( - \\comptime { - \\ self.crc = - \\ lookup_tables[0][p[7]] ^ - \\ lookup_tables[1][p[6]] ^ - \\ lookup_tables[2][p[5]] ^ - \\ lookup_tables[3][p[4]] ^ - \\ lookup_tables[4][@truncate(u8, self.crc >> 24)] ^ - \\ lookup_tables[5][@truncate(u8, self.crc >> 16)] ^ - \\ lookup_tables[6][@truncate(u8, self.crc >> 8)] ^ - \\ lookup_tables[7][@truncate(u8, self.crc >> 0)]; - \\} - \\ - ); -} - -test "zig fmt: fn decl with trailing comma" { - try testTransform( - \\fn foo(a: i32, b: i32,) void {} - , - \\fn foo( - \\ a: i32, - \\ b: i32, - \\) void {} - \\ - ); -} - -test "zig fmt: enum decl with no trailing comma" { - try testTransform( - \\const StrLitKind = enum {Normal, C}; - , - \\const StrLitKind = enum { Normal, C }; - \\ - ); -} - -test "zig fmt: switch comment before prong" { - try testCanonical( - \\comptime { - \\ switch (a) { - \\ // hi - \\ 0 => {}, - \\ } - \\} - \\ - ); -} - -test "zig fmt: struct literal no trailing comma" { - try testTransform( - \\const a = foo{ .x = 1, .y = 2 }; - \\const a = foo{ .x = 1, - \\ .y = 2 }; - , - \\const a = foo{ .x = 1, .y = 2 }; - \\const a = foo{ - \\ .x = 1, - \\ .y = 2, - \\}; - \\ - ); -} - -test "zig fmt: struct literal containing a multiline expression" { - try testTransform( - \\const a = A{ .x = if (f1()) 10 else 20 }; - \\const a = A{ .x = if (f1()) 10 else 20, }; - \\const a = A{ .x = if (f1()) - \\ 10 else 20 }; - \\const a = A{ .x = if (f1()) 10 else 20, .y = f2() + 100 }; - \\const a = A{ .x = if (f1()) 10 else 20, .y = f2() + 100, }; - \\const a = A{ .x = if (f1()) - \\ 10 else 20}; - \\const a = A{ .x = switch(g) {0 => "ok", else => "no"} }; - \\ - , - \\const a = A{ .x = if (f1()) 10 else 20 }; - \\const a = A{ - \\ .x = if (f1()) 10 else 20, - \\}; - \\const a = A{ - \\ .x = if (f1()) - \\ 10 - \\ else - \\ 20, - \\}; - \\const a = A{ .x = if (f1()) 10 else 20, .y = f2() + 100 }; - \\const a = A{ - \\ .x = if (f1()) 10 else 20, - \\ .y = f2() + 100, - \\}; - \\const a = A{ - \\ .x = if (f1()) - \\ 10 - \\ else - \\ 20, - \\}; - \\const a = A{ - \\ .x = switch (g) { - \\ 0 => "ok", - \\ else => "no", - \\ }, - \\}; - \\ - ); -} - -test "zig fmt: array literal with hint" { - try testTransform( - \\const a = []u8{ - \\ 1, 2, // - \\ 3, - \\ 4, - \\ 5, - \\ 6, - \\ 7 }; - \\const a = []u8{ - \\ 1, 2, // - \\ 3, - \\ 4, - \\ 5, - \\ 6, - \\ 7, 8 }; - \\const a = []u8{ - \\ 1, 2, // - \\ 3, - \\ 4, - \\ 5, - \\ 6, // blah - \\ 7, 8 }; - \\const a = []u8{ - \\ 1, 2, // - \\ 3, // - \\ 4, - \\ 5, - \\ 6, - \\ 7 }; - \\const a = []u8{ - \\ 1, - \\ 2, - \\ 3, 4, // - \\ 5, 6, // - \\ 7, 8, // - \\}; - , - \\const a = []u8{ - \\ 1, 2, - \\ 3, 4, - \\ 5, 6, - \\ 7, - \\}; - \\const a = []u8{ - \\ 1, 2, - \\ 3, 4, - \\ 5, 6, - \\ 7, 8, - \\}; - \\const a = []u8{ - \\ 1, 2, - \\ 3, 4, - \\ 5, - \\ 6, // blah - \\ 7, - \\ 8, - \\}; - \\const a = []u8{ - \\ 1, 2, - \\ 3, // - \\ 4, - \\ 5, 6, - \\ 7, - \\}; - \\const a = []u8{ - \\ 1, - \\ 2, - \\ 3, - \\ 4, - \\ 5, - \\ 6, - \\ 7, - \\ 8, - \\}; - \\ - ); -} - -test "zig fmt: array literal veritical column alignment" { - try testTransform( - \\const a = []u8{ - \\ 1000, 200, - \\ 30, 4, - \\ 50000, 60 - \\}; - \\const a = []u8{0, 1, 2, 3, 40, - \\ 4,5,600,7, - \\ 80, - \\ 9, 10, 11, 0, 13, 14, 15}; - \\ - , - \\const a = []u8{ - \\ 1000, 200, - \\ 30, 4, - \\ 50000, 60, - \\}; - \\const a = []u8{ - \\ 0, 1, 2, 3, 40, - \\ 4, 5, 600, 7, 80, - \\ 9, 10, 11, 0, 13, - \\ 14, 15, - \\}; - \\ - ); -} - -test "zig fmt: multiline string with backslash at end of line" { - try testCanonical( - \\comptime { - \\ err( - \\ \\\ - \\ ); - \\} - \\ - ); -} - -test "zig fmt: multiline string parameter in fn call with trailing comma" { - try testCanonical( - \\fn foo() void { - \\ try stdout.print( - \\ \\ZIG_CMAKE_BINARY_DIR {} - \\ \\ZIG_C_HEADER_FILES {} - \\ \\ZIG_DIA_GUIDS_LIB {} - \\ \\ - \\ , - \\ std.cstr.toSliceConst(c.ZIG_CMAKE_BINARY_DIR), - \\ std.cstr.toSliceConst(c.ZIG_CXX_COMPILER), - \\ std.cstr.toSliceConst(c.ZIG_DIA_GUIDS_LIB), - \\ ); - \\} - \\ - ); -} - -test "zig fmt: trailing comma on fn call" { - try testCanonical( - \\comptime { - \\ var module = try Module.create( - \\ allocator, - \\ zig_lib_dir, - \\ full_cache_dir, - \\ ); - \\} - \\ - ); -} - -test "zig fmt: multi line arguments without last comma" { - try testTransform( - \\pub fn foo( - \\ a: usize, - \\ b: usize, - \\ c: usize, - \\ d: usize - \\) usize { - \\ return a + b + c + d; - \\} - \\ - , - \\pub fn foo(a: usize, b: usize, c: usize, d: usize) usize { - \\ return a + b + c + d; - \\} - \\ - ); -} - -test "zig fmt: empty block with only comment" { - try testCanonical( - \\comptime { - \\ { - \\ // comment - \\ } - \\} - \\ - ); -} - -test "zig fmt: no trailing comma on struct decl" { - try testCanonical( - \\const RoundParam = struct { - \\ k: usize, s: u32, t: u32 - \\}; - \\ - ); -} - -test "zig fmt: extra newlines at the end" { - try testTransform( - \\const a = b; - \\ - \\ - \\ - , - \\const a = b; - \\ - ); -} - -test "zig fmt: simple asm" { - try testTransform( - \\comptime { - \\ asm volatile ( - \\ \\.globl aoeu; - \\ \\.type aoeu, @function; - \\ \\.set aoeu, derp; - \\ ); - \\ - \\ asm ("not real assembly" - \\ :[a] "x" (x),); - \\ asm ("not real assembly" - \\ :[a] "x" (->i32),:[a] "x" (1),); - \\ asm ("still not real assembly" - \\ :::"a","b",); - \\} - , - \\comptime { - \\ asm volatile ( - \\ \\.globl aoeu; - \\ \\.type aoeu, @function; - \\ \\.set aoeu, derp; - \\ ); - \\ - \\ asm ("not real assembly" - \\ : [a] "x" (x) - \\ ); - \\ asm ("not real assembly" - \\ : [a] "x" (-> i32) - \\ : [a] "x" (1) - \\ ); - \\ asm ("still not real assembly" - \\ : - \\ : - \\ : "a", "b" - \\ ); - \\} - \\ - ); -} - -test "zig fmt: nested struct literal with one item" { - try testCanonical( - \\const a = foo{ - \\ .item = bar{ .a = b }, - \\}; - \\ - ); -} - -test "zig fmt: switch cases trailing comma" { - try testTransform( - \\fn switch_cases(x: i32) void { - \\ switch (x) { - \\ 1,2,3 => {}, - \\ 4,5, => {}, - \\ 6... 8, => {}, - \\ else => {}, - \\ } - \\} - , - \\fn switch_cases(x: i32) void { - \\ switch (x) { - \\ 1, 2, 3 => {}, - \\ 4, - \\ 5, - \\ => {}, - \\ 6...8 => {}, - \\ else => {}, - \\ } - \\} - \\ - ); -} - -test "zig fmt: slice align" { - try testCanonical( - \\const A = struct { - \\ items: []align(A) T, - \\}; - \\ - ); -} - -test "zig fmt: add trailing comma to array literal" { - try testTransform( - \\comptime { - \\ return []u16{'m', 's', 'y', 's', '-' // hi - \\ }; - \\ return []u16{'m', 's', 'y', 's', - \\ '-'}; - \\ return []u16{'m', 's', 'y', 's', '-'}; - \\} - , - \\comptime { - \\ return []u16{ - \\ 'm', 's', 'y', 's', '-', // hi - \\ }; - \\ return []u16{ - \\ 'm', 's', 'y', 's', - \\ '-', - \\ }; - \\ return []u16{ 'm', 's', 'y', 's', '-' }; - \\} - \\ - ); -} - -test "zig fmt: first thing in file is line comment" { - try testCanonical( - \\// Introspection and determination of system libraries needed by zig. - \\ - \\// Introspection and determination of system libraries needed by zig. - \\ - \\const std = @import("std"); - \\ - ); -} - -test "zig fmt: line comment after doc comment" { - try testCanonical( - \\/// doc comment - \\// line comment - \\fn foo() void {} - \\ - ); -} - -test "zig fmt: float literal with exponent" { - try testCanonical( - \\test "bit field alignment" { - \\ assert(@TypeOf(&blah.b) == *align(1:3:6) const u3); - \\} - \\ - ); -} - -test "zig fmt: float literal with exponent" { - try testCanonical( - \\test "aoeu" { - \\ switch (state) { - \\ TermState.Start => switch (c) { - \\ '\x1b' => state = TermState.Escape, - \\ else => try out.writeByte(c), - \\ }, - \\ } - \\} - \\ - ); -} -test "zig fmt: float literal with exponent" { - try testCanonical( - \\pub const f64_true_min = 4.94065645841246544177e-324; - \\const threshold = 0x1.a827999fcef32p+1022; - \\ - ); -} - -test "zig fmt: if-else end of comptime" { - try testCanonical( - \\comptime { - \\ if (a) { - \\ b(); - \\ } else { - \\ b(); - \\ } - \\} - \\ - ); -} - -test "zig fmt: nested blocks" { - try testCanonical( - \\comptime { - \\ { - \\ { - \\ { - \\ a(); - \\ } - \\ } - \\ } - \\} - \\ - ); -} - -test "zig fmt: block with same line comment after end brace" { - try testCanonical( - \\comptime { - \\ { - \\ b(); - \\ } // comment - \\} - \\ - ); -} - -test "zig fmt: statements with comment between" { - try testCanonical( - \\comptime { - \\ a = b; - \\ // comment - \\ a = b; - \\} - \\ - ); -} - -test "zig fmt: statements with empty line between" { - try testCanonical( - \\comptime { - \\ a = b; - \\ - \\ a = b; - \\} - \\ - ); -} - -test "zig fmt: ptr deref operator and unwrap optional operator" { - try testCanonical( - \\const a = b.*; - \\const a = b.?; - \\ - ); -} - -test "zig fmt: comment after if before another if" { - try testCanonical( - \\test "aoeu" { - \\ // comment - \\ if (x) { - \\ bar(); - \\ } - \\} - \\ - \\test "aoeu" { - \\ if (x) { - \\ foo(); - \\ } - \\ // comment - \\ if (x) { - \\ bar(); - \\ } - \\} - \\ - ); -} - -test "zig fmt: line comment between if block and else keyword" { - try testCanonical( - \\test "aoeu" { - \\ // cexp(finite|nan +- i inf|nan) = nan + i nan - \\ if ((hx & 0x7fffffff) != 0x7f800000) { - \\ return Complex(f32).new(y - y, y - y); - \\ } - \\ // cexp(-inf +- i inf|nan) = 0 + i0 - \\ else if (hx & 0x80000000 != 0) { - \\ return Complex(f32).new(0, 0); - \\ } - \\ // cexp(+inf +- i inf|nan) = inf + i nan - \\ // another comment - \\ else { - \\ return Complex(f32).new(x, y - y); - \\ } - \\} - \\ - ); -} - -test "zig fmt: same line comments in expression" { - try testCanonical( - \\test "aoeu" { - \\ const x = ( // a - \\ 0 // b - \\ ); // c - \\} - \\ - ); -} - -test "zig fmt: add comma on last switch prong" { - try testTransform( - \\test "aoeu" { - \\switch (self.init_arg_expr) { - \\ InitArg.Type => |t| { }, - \\ InitArg.None, - \\ InitArg.Enum => { } - \\} - \\ switch (self.init_arg_expr) { - \\ InitArg.Type => |t| { }, - \\ InitArg.None, - \\ InitArg.Enum => { }//line comment - \\ } - \\} - , - \\test "aoeu" { - \\ switch (self.init_arg_expr) { - \\ InitArg.Type => |t| {}, - \\ InitArg.None, InitArg.Enum => {}, - \\ } - \\ switch (self.init_arg_expr) { - \\ InitArg.Type => |t| {}, - \\ InitArg.None, InitArg.Enum => {}, //line comment - \\ } - \\} - \\ - ); -} - -test "zig fmt: same-line comment after a statement" { - try testCanonical( - \\test "" { - \\ a = b; - \\ debug.assert(H.digest_size <= H.block_size); // HMAC makes this assumption - \\ a = b; - \\} - \\ - ); -} - -test "zig fmt: same-line comment after var decl in struct" { - try testCanonical( - \\pub const vfs_cap_data = extern struct { - \\ const Data = struct {}; // when on disk. - \\}; - \\ - ); -} - -test "zig fmt: same-line comment after field decl" { - try testCanonical( - \\pub const dirent = extern struct { - \\ d_name: u8, - \\ d_name: u8, // comment 1 - \\ d_name: u8, - \\ d_name: u8, // comment 2 - \\ d_name: u8, - \\}; - \\ - ); -} - -test "zig fmt: same-line comment after switch prong" { - try testCanonical( - \\test "" { - \\ switch (err) { - \\ error.PathAlreadyExists => {}, // comment 2 - \\ else => return err, // comment 1 - \\ } - \\} - \\ - ); -} - -test "zig fmt: same-line comment after non-block if expression" { - try testCanonical( - \\comptime { - \\ if (sr > n_uword_bits - 1) // d > r - \\ return 0; - \\} - \\ - ); -} - -test "zig fmt: same-line comment on comptime expression" { - try testCanonical( - \\test "" { - \\ comptime assert(@typeInfo(T) == .Int); // must pass an integer to absInt - \\} - \\ - ); -} - -test "zig fmt: switch with empty body" { - try testCanonical( - \\test "" { - \\ foo() catch |err| switch (err) {}; - \\} - \\ - ); -} - -test "zig fmt: line comments in struct initializer" { - try testCanonical( - \\fn foo() void { - \\ return Self{ - \\ .a = b, - \\ - \\ // Initialize these two fields to buffer_size so that - \\ // in `readFn` we treat the state as being able to read - \\ .start_index = buffer_size, - \\ .end_index = buffer_size, - \\ - \\ // middle - \\ - \\ .a = b, - \\ - \\ // end - \\ }; - \\} - \\ - ); -} - -test "zig fmt: first line comment in struct initializer" { - try testCanonical( - \\pub fn acquire(self: *Self) HeldLock { - \\ return HeldLock{ - \\ // guaranteed allocation elision - \\ .held = self.lock.acquire(), - \\ .value = &self.private_data, - \\ }; - \\} - \\ - ); -} - -test "zig fmt: doc comments before struct field" { - try testCanonical( - \\pub const Allocator = struct { - \\ /// Allocate byte_count bytes and return them in a slice, with the - \\ /// slice's pointer aligned at least to alignment bytes. - \\ allocFn: fn () void, - \\}; - \\ - ); -} - -test "zig fmt: error set declaration" { - try testCanonical( - \\const E = error{ - \\ A, - \\ B, - \\ - \\ C, - \\}; - \\ - \\const Error = error{ - \\ /// no more memory - \\ OutOfMemory, - \\}; - \\ - \\const Error = error{ - \\ /// no more memory - \\ OutOfMemory, - \\ - \\ /// another - \\ Another, - \\ - \\ // end - \\}; - \\ - \\const Error = error{OutOfMemory}; - \\const Error = error{}; - \\ - \\const Error = error{ OutOfMemory, OutOfTime }; - \\ - ); -} - -test "zig fmt: union(enum(u32)) with assigned enum values" { - try testCanonical( - \\const MultipleChoice = union(enum(u32)) { - \\ A = 20, - \\ B = 40, - \\ C = 60, - \\ D = 1000, - \\}; - \\ - ); -} - -test "zig fmt: resume from suspend block" { - try testCanonical( - \\fn foo() void { - \\ suspend { - \\ resume @frame(); - \\ } - \\} - \\ - ); -} - -test "zig fmt: comments before error set decl" { - try testCanonical( - \\const UnexpectedError = error{ - \\ /// The Operating System returned an undocumented error code. - \\ Unexpected, - \\ // another - \\ Another, - \\ - \\ // in between - \\ - \\ // at end - \\}; - \\ - ); -} - -test "zig fmt: comments before switch prong" { - try testCanonical( - \\test "" { - \\ switch (err) { - \\ error.PathAlreadyExists => continue, - \\ - \\ // comment 1 - \\ - \\ // comment 2 - \\ else => return err, - \\ // at end - \\ } - \\} - \\ - ); -} - -test "zig fmt: comments before var decl in struct" { - try testCanonical( - \\pub const vfs_cap_data = extern struct { - \\ // All of these are mandated as little endian - \\ // when on disk. - \\ const Data = struct { - \\ permitted: u32, - \\ inheritable: u32, - \\ }; - \\ - \\ // in between - \\ - \\ /// All of these are mandated as little endian - \\ /// when on disk. - \\ const Data = struct { - \\ permitted: u32, - \\ inheritable: u32, - \\ }; - \\ - \\ // at end - \\}; - \\ - ); -} - -test "zig fmt: array literal with 1 item on 1 line" { - try testCanonical( - \\var s = []const u64{0} ** 25; - \\ - ); -} - -test "zig fmt: comments before global variables" { - try testCanonical( - \\/// Foo copies keys and values before they go into the map, and - \\/// frees them when they get removed. - \\pub const Foo = struct {}; - \\ - ); -} - -test "zig fmt: comments in statements" { - try testCanonical( - \\test "std" { - \\ // statement comment - \\ _ = @import("foo/bar.zig"); - \\ - \\ // middle - \\ // middle2 - \\ - \\ // end - \\} - \\ - ); -} - -test "zig fmt: comments before test decl" { - try testCanonical( - \\/// top level doc comment - \\test "hi" {} - \\ - \\// top level normal comment - \\test "hi" {} - \\ - \\// middle - \\ - \\// end - \\ - ); -} - -test "zig fmt: preserve spacing" { - try testCanonical( - \\const std = @import("std"); - \\ - \\pub fn main() !void { - \\ var stdout_file = std.io.getStdOut; - \\ var stdout_file = std.io.getStdOut; - \\ - \\ var stdout_file = std.io.getStdOut; - \\ var stdout_file = std.io.getStdOut; - \\} - \\ - ); -} - -test "zig fmt: return types" { - try testCanonical( - \\pub fn main() !void {} - \\pub fn main() anytype {} - \\pub fn main() i32 {} - \\ - ); -} - -test "zig fmt: imports" { - try testCanonical( - \\const std = @import("std"); - \\const std = @import(); - \\ - ); -} - -test "zig fmt: global declarations" { - try testCanonical( - \\const a = b; - \\pub const a = b; - \\var a = b; - \\pub var a = b; - \\const a: i32 = b; - \\pub const a: i32 = b; - \\var a: i32 = b; - \\pub var a: i32 = b; - \\extern const a: i32 = b; - \\pub extern const a: i32 = b; - \\extern var a: i32 = b; - \\pub extern var a: i32 = b; - \\extern "a" const a: i32 = b; - \\pub extern "a" const a: i32 = b; - \\extern "a" var a: i32 = b; - \\pub extern "a" var a: i32 = b; - \\ - ); -} - -test "zig fmt: extern declaration" { - try testCanonical( - \\extern var foo: c_int; - \\ - ); -} - -test "zig fmt: alignment" { - try testCanonical( - \\var foo: c_int align(1); - \\ - ); -} - -test "zig fmt: C main" { - try testCanonical( - \\fn main(argc: c_int, argv: **u8) c_int { - \\ const a = b; - \\} - \\ - ); -} - -test "zig fmt: return" { - try testCanonical( - \\fn foo(argc: c_int, argv: **u8) c_int { - \\ return 0; - \\} - \\ - \\fn bar() void { - \\ return; - \\} - \\ - ); -} - -test "zig fmt: pointer attributes" { - try testCanonical( - \\extern fn f1(s: *align(*u8) u8) c_int; - \\extern fn f2(s: **align(1) *const *volatile u8) c_int; - \\extern fn f3(s: *align(1) const *align(1) volatile *const volatile u8) c_int; - \\extern fn f4(s: *align(1) const volatile u8) c_int; - \\extern fn f5(s: [*:0]align(1) const volatile u8) c_int; - \\ - ); -} - -test "zig fmt: slice attributes" { - try testCanonical( - \\extern fn f1(s: *align(*u8) u8) c_int; - \\extern fn f2(s: **align(1) *const *volatile u8) c_int; - \\extern fn f3(s: *align(1) const *align(1) volatile *const volatile u8) c_int; - \\extern fn f4(s: *align(1) const volatile u8) c_int; - \\extern fn f5(s: [*:0]align(1) const volatile u8) c_int; - \\ - ); -} - -test "zig fmt: test declaration" { - try testCanonical( - \\test "test name" { - \\ const a = 1; - \\ var b = 1; - \\} - \\ - ); -} - -test "zig fmt: infix operators" { - try testCanonical( - \\test "infix operators" { - \\ var i = undefined; - \\ i = 2; - \\ i *= 2; - \\ i |= 2; - \\ i ^= 2; - \\ i <<= 2; - \\ i >>= 2; - \\ i &= 2; - \\ i *= 2; - \\ i *%= 2; - \\ i -= 2; - \\ i -%= 2; - \\ i += 2; - \\ i +%= 2; - \\ i /= 2; - \\ i %= 2; - \\ _ = i == i; - \\ _ = i != i; - \\ _ = i != i; - \\ _ = i.i; - \\ _ = i || i; - \\ _ = i!i; - \\ _ = i ** i; - \\ _ = i ++ i; - \\ _ = i orelse i; - \\ _ = i % i; - \\ _ = i / i; - \\ _ = i *% i; - \\ _ = i * i; - \\ _ = i -% i; - \\ _ = i - i; - \\ _ = i +% i; - \\ _ = i + i; - \\ _ = i << i; - \\ _ = i >> i; - \\ _ = i & i; - \\ _ = i ^ i; - \\ _ = i | i; - \\ _ = i >= i; - \\ _ = i <= i; - \\ _ = i > i; - \\ _ = i < i; - \\ _ = i and i; - \\ _ = i or i; - \\} - \\ - ); -} - -test "zig fmt: precedence" { - try testCanonical( - \\test "precedence" { - \\ a!b(); - \\ (a!b)(); - \\ !a!b; - \\ !(a!b); - \\ !a{}; - \\ !(a{}); - \\ a + b{}; - \\ (a + b){}; - \\ a << b + c; - \\ (a << b) + c; - \\ a & b << c; - \\ (a & b) << c; - \\ a ^ b & c; - \\ (a ^ b) & c; - \\ a | b ^ c; - \\ (a | b) ^ c; - \\ a == b | c; - \\ (a == b) | c; - \\ a and b == c; - \\ (a and b) == c; - \\ a or b and c; - \\ (a or b) and c; - \\ (a or b) and c; - \\} - \\ - ); -} - -test "zig fmt: prefix operators" { - try testCanonical( - \\test "prefix operators" { - \\ try return --%~!&0; - \\} - \\ - ); -} - -test "zig fmt: call expression" { - try testCanonical( - \\test "test calls" { - \\ a(); - \\ a(1); - \\ a(1, 2); - \\ a(1, 2) + a(1, 2); - \\} - \\ - ); -} - -test "zig fmt: anytype type" { - try testCanonical( - \\fn print(args: anytype) anytype {} - \\ - ); -} - -test "zig fmt: functions" { - try testCanonical( - \\extern fn puts(s: *const u8) c_int; - \\extern "c" fn puts(s: *const u8) c_int; - \\export fn puts(s: *const u8) c_int; - \\inline fn puts(s: *const u8) c_int; - \\noinline fn puts(s: *const u8) c_int; - \\pub extern fn puts(s: *const u8) c_int; - \\pub extern "c" fn puts(s: *const u8) c_int; - \\pub export fn puts(s: *const u8) c_int; - \\pub inline fn puts(s: *const u8) c_int; - \\pub noinline fn puts(s: *const u8) c_int; - \\pub extern fn puts(s: *const u8) align(2 + 2) c_int; - \\pub extern "c" fn puts(s: *const u8) align(2 + 2) c_int; - \\pub export fn puts(s: *const u8) align(2 + 2) c_int; - \\pub inline fn puts(s: *const u8) align(2 + 2) c_int; - \\pub noinline fn puts(s: *const u8) align(2 + 2) c_int; - \\ - ); -} - -test "zig fmt: multiline string" { - try testCanonical( - \\test "" { - \\ const s1 = - \\ \\one - \\ \\two) - \\ \\three - \\ ; - \\ const s3 = // hi - \\ \\one - \\ \\two) - \\ \\three - \\ ; - \\} - \\ - ); -} - -test "zig fmt: values" { - try testCanonical( - \\test "values" { - \\ 1; - \\ 1.0; - \\ "string"; - \\ 'c'; - \\ true; - \\ false; - \\ null; - \\ undefined; - \\ anyerror; - \\ this; - \\ unreachable; - \\} - \\ - ); -} - -test "zig fmt: indexing" { - try testCanonical( - \\test "test index" { - \\ a[0]; - \\ a[0 + 5]; - \\ a[0..]; - \\ a[0..5]; - \\ a[a[0]]; - \\ a[a[0..]]; - \\ a[a[0..5]]; - \\ a[a[0]..]; - \\ a[a[0..5]..]; - \\ a[a[0]..a[0]]; - \\ a[a[0..5]..a[0]]; - \\ a[a[0..5]..a[0..5]]; - \\} - \\ - ); -} - -test "zig fmt: struct declaration" { - try testCanonical( - \\const S = struct { - \\ const Self = @This(); - \\ f1: u8, - \\ f3: u8, - \\ - \\ f2: u8, - \\ - \\ fn method(self: *Self) Self { - \\ return self.*; - \\ } - \\}; - \\ - \\const Ps = packed struct { - \\ a: u8, - \\ b: u8, - \\ - \\ c: u8, - \\}; - \\ - \\const Es = extern struct { - \\ a: u8, - \\ b: u8, - \\ - \\ c: u8, - \\}; - \\ - ); -} - -test "zig fmt: enum declaration" { - try testCanonical( - \\const E = enum { - \\ Ok, - \\ SomethingElse = 0, - \\}; - \\ - \\const E2 = enum(u8) { - \\ Ok, - \\ SomethingElse = 255, - \\ SomethingThird, - \\}; - \\ - \\const Ee = extern enum { - \\ Ok, - \\ SomethingElse, - \\ SomethingThird, - \\}; - \\ - \\const Ep = packed enum { - \\ Ok, - \\ SomethingElse, - \\ SomethingThird, - \\}; - \\ - ); -} - -test "zig fmt: union declaration" { - try testCanonical( - \\const U = union { - \\ Int: u8, - \\ Float: f32, - \\ None, - \\ Bool: bool, - \\}; - \\ - \\const Ue = union(enum) { - \\ Int: u8, - \\ Float: f32, - \\ None, - \\ Bool: bool, - \\}; - \\ - \\const E = enum { - \\ Int, - \\ Float, - \\ None, - \\ Bool, - \\}; - \\ - \\const Ue2 = union(E) { - \\ Int: u8, - \\ Float: f32, - \\ None, - \\ Bool: bool, - \\}; - \\ - \\const Eu = extern union { - \\ Int: u8, - \\ Float: f32, - \\ None, - \\ Bool: bool, - \\}; - \\ - ); -} - -test "zig fmt: arrays" { - try testCanonical( - \\test "test array" { - \\ const a: [2]u8 = [2]u8{ - \\ 1, - \\ 2, - \\ }; - \\ const a: [2]u8 = []u8{ - \\ 1, - \\ 2, - \\ }; - \\ const a: [0]u8 = []u8{}; - \\ const x: [4:0]u8 = undefined; - \\} - \\ - ); -} - -test "zig fmt: container initializers" { - try testCanonical( - \\const a0 = []u8{}; - \\const a1 = []u8{1}; - \\const a2 = []u8{ - \\ 1, - \\ 2, - \\ 3, - \\ 4, - \\}; - \\const s0 = S{}; - \\const s1 = S{ .a = 1 }; - \\const s2 = S{ - \\ .a = 1, - \\ .b = 2, - \\}; - \\ - ); -} - -test "zig fmt: catch" { - try testCanonical( - \\test "catch" { - \\ const a: anyerror!u8 = 0; - \\ _ = a catch return; - \\ _ = a catch |err| return; - \\} - \\ - ); -} - -test "zig fmt: blocks" { - try testCanonical( - \\test "blocks" { - \\ { - \\ const a = 0; - \\ const b = 0; - \\ } - \\ - \\ blk: { - \\ const a = 0; - \\ const b = 0; - \\ } - \\ - \\ const r = blk: { - \\ const a = 0; - \\ const b = 0; - \\ }; - \\} - \\ - ); -} - -test "zig fmt: switch" { - try testCanonical( - \\test "switch" { - \\ switch (0) { - \\ 0 => {}, - \\ 1 => unreachable, - \\ 2, 3 => {}, - \\ 4...7 => {}, - \\ 1 + 4 * 3 + 22 => {}, - \\ else => { - \\ const a = 1; - \\ const b = a; - \\ }, - \\ } - \\ - \\ const res = switch (0) { - \\ 0 => 0, - \\ 1 => 2, - \\ 1 => a = 4, - \\ else => 4, - \\ }; - \\ - \\ const Union = union(enum) { - \\ Int: i64, - \\ Float: f64, - \\ }; - \\ - \\ switch (u) { - \\ Union.Int => |int| {}, - \\ Union.Float => |*float| unreachable, - \\ } - \\} - \\ - ); -} - -test "zig fmt: while" { - try testCanonical( - \\test "while" { - \\ while (10 < 1) unreachable; - \\ - \\ while (10 < 1) unreachable else unreachable; - \\ - \\ while (10 < 1) { - \\ unreachable; - \\ } - \\ - \\ while (10 < 1) - \\ unreachable; - \\ - \\ var i: usize = 0; - \\ while (i < 10) : (i += 1) { - \\ continue; - \\ } - \\ - \\ i = 0; - \\ while (i < 10) : (i += 1) - \\ continue; - \\ - \\ i = 0; - \\ var j: usize = 0; - \\ while (i < 10) : ({ - \\ i += 1; - \\ j += 1; - \\ }) { - \\ continue; - \\ } - \\ - \\ var a: ?u8 = 2; - \\ while (a) |v| : (a = null) { - \\ continue; - \\ } - \\ - \\ while (a) |v| : (a = null) - \\ unreachable; - \\ - \\ label: while (10 < 0) { - \\ unreachable; - \\ } - \\ - \\ const res = while (0 < 10) { - \\ break 7; - \\ } else { - \\ unreachable; - \\ }; - \\ - \\ const res = while (0 < 10) - \\ break 7 - \\ else - \\ unreachable; - \\ - \\ var a: anyerror!u8 = 0; - \\ while (a) |v| { - \\ a = error.Err; - \\ } else |err| { - \\ i = 1; - \\ } - \\ - \\ comptime var k: usize = 0; - \\ inline while (i < 10) : (i += 1) - \\ j += 2; - \\} - \\ - ); -} - -test "zig fmt: for" { - try testCanonical( - \\test "for" { - \\ for (a) |v| { - \\ continue; - \\ } - \\ - \\ for (a) |v| continue; - \\ - \\ for (a) |v| continue else return; - \\ - \\ for (a) |v| { - \\ continue; - \\ } else return; - \\ - \\ for (a) |v| continue else { - \\ return; - \\ } - \\ - \\ for (a) |v| - \\ continue - \\ else - \\ return; - \\ - \\ for (a) |v| - \\ continue; - \\ - \\ for (a) |*v| - \\ continue; - \\ - \\ for (a) |v, i| { - \\ continue; - \\ } - \\ - \\ for (a) |v, i| - \\ continue; - \\ - \\ for (a) |b| switch (b) { - \\ c => {}, - \\ d => {}, - \\ }; - \\ - \\ for (a) |b| - \\ switch (b) { - \\ c => {}, - \\ d => {}, - \\ }; - \\ - \\ const res = for (a) |v, i| { - \\ break v; - \\ } else { - \\ unreachable; - \\ }; - \\ - \\ var num: usize = 0; - \\ inline for (a) |v, i| { - \\ num += v; - \\ num += i; - \\ } - \\} - \\ - ); - - try testTransform( - \\test "fix for" { - \\ for (a) |x| - \\ f(x) else continue; - \\} - \\ - , - \\test "fix for" { - \\ for (a) |x| - \\ f(x) - \\ else continue; - \\} - \\ - ); -} - -test "zig fmt: if" { - try testCanonical( - \\test "if" { - \\ if (10 < 0) { - \\ unreachable; - \\ } - \\ - \\ if (10 < 0) unreachable; - \\ - \\ if (10 < 0) { - \\ unreachable; - \\ } else { - \\ const a = 20; - \\ } - \\ - \\ if (10 < 0) { - \\ unreachable; - \\ } else if (5 < 0) { - \\ unreachable; - \\ } else { - \\ const a = 20; - \\ } - \\ - \\ const is_world_broken = if (10 < 0) true else false; - \\ const some_number = 1 + if (10 < 0) 2 else 3; - \\ - \\ const a: ?u8 = 10; - \\ const b: ?u8 = null; - \\ if (a) |v| { - \\ const some = v; - \\ } else if (b) |*v| { - \\ unreachable; - \\ } else { - \\ const some = 10; - \\ } - \\ - \\ const non_null_a = if (a) |v| v else 0; - \\ - \\ const a_err: anyerror!u8 = 0; - \\ if (a_err) |v| { - \\ const p = v; - \\ } else |err| { - \\ unreachable; - \\ } - \\} - \\ - ); -} - -test "zig fmt: defer" { - try testCanonical( - \\test "defer" { - \\ var i: usize = 0; - \\ defer i = 1; - \\ defer { - \\ i += 2; - \\ i *= i; - \\ } - \\ - \\ errdefer i += 3; - \\ errdefer { - \\ i += 2; - \\ i /= i; - \\ } - \\} - \\ - ); -} - -test "zig fmt: comptime" { - try testCanonical( - \\fn a() u8 { - \\ return 5; - \\} - \\ - \\fn b(comptime i: u8) u8 { - \\ return i; - \\} - \\ - \\const av = comptime a(); - \\const av2 = comptime blk: { - \\ var res = a(); - \\ res *= b(2); - \\ break :blk res; - \\}; - \\ - \\comptime { - \\ _ = a(); - \\} - \\ - \\test "comptime" { - \\ const av3 = comptime a(); - \\ const av4 = comptime blk: { - \\ var res = a(); - \\ res *= a(); - \\ break :blk res; - \\ }; - \\ - \\ comptime var i = 0; - \\ comptime { - \\ i = a(); - \\ i += b(i); - \\ } - \\} - \\ - ); -} - -test "zig fmt: fn type" { - try testCanonical( - \\fn a(i: u8) u8 { - \\ return i + 1; - \\} - \\ - \\const a: fn (u8) u8 = undefined; - \\const b: fn (u8) callconv(.Naked) u8 = undefined; - \\const ap: fn (u8) u8 = a; - \\ - ); -} - -test "zig fmt: inline asm" { - try testCanonical( - \\pub fn syscall1(number: usize, arg1: usize) usize { - \\ return asm volatile ("syscall" - \\ : [ret] "={rax}" (-> usize) - \\ : [number] "{rax}" (number), - \\ [arg1] "{rdi}" (arg1) - \\ : "rcx", "r11" - \\ ); - \\} - \\ - ); -} - -test "zig fmt: async functions" { - try testCanonical( - \\fn simpleAsyncFn() void { - \\ const a = async a.b(); - \\ x += 1; - \\ suspend; - \\ x += 1; - \\ suspend; - \\ const p: anyframe->void = async simpleAsyncFn() catch unreachable; - \\ await p; - \\} - \\ - \\test "suspend, resume, await" { - \\ const p: anyframe = async testAsyncSeq(); - \\ resume p; - \\ await p; - \\} - \\ - ); -} - -test "zig fmt: nosuspend" { - try testCanonical( - \\const a = nosuspend foo(); - \\ - ); -} - -test "zig fmt: Block after if" { - try testCanonical( - \\test "Block after if" { - \\ if (true) { - \\ const a = 0; - \\ } - \\ - \\ { - \\ const a = 0; - \\ } - \\} - \\ - ); -} - -test "zig fmt: use" { - try testCanonical( - \\usingnamespace @import("std"); - \\pub usingnamespace @import("std"); - \\ - ); -} - -test "zig fmt: string identifier" { - try testCanonical( - \\const @"a b" = @"c d".@"e f"; - \\fn @"g h"() void {} - \\ - ); -} - -test "zig fmt: error return" { - try testCanonical( - \\fn err() anyerror { - \\ call(); - \\ return error.InvalidArgs; - \\} - \\ - ); -} - -test "zig fmt: comptime block in container" { - try testCanonical( - \\pub fn container() type { - \\ return struct { - \\ comptime { - \\ if (false) { - \\ unreachable; - \\ } - \\ } - \\ }; - \\} - \\ - ); -} - -test "zig fmt: inline asm parameter alignment" { - try testCanonical( - \\pub fn main() void { - \\ asm volatile ( - \\ \\ foo - \\ \\ bar - \\ ); - \\ asm volatile ( - \\ \\ foo - \\ \\ bar - \\ : [_] "" (-> usize), - \\ [_] "" (-> usize) - \\ ); - \\ asm volatile ( - \\ \\ foo - \\ \\ bar - \\ : - \\ : [_] "" (0), - \\ [_] "" (0) - \\ ); - \\ asm volatile ( - \\ \\ foo - \\ \\ bar - \\ : - \\ : - \\ : "", "" - \\ ); - \\ asm volatile ( - \\ \\ foo - \\ \\ bar - \\ : [_] "" (-> usize), - \\ [_] "" (-> usize) - \\ : [_] "" (0), - \\ [_] "" (0) - \\ : "", "" - \\ ); - \\} - \\ - ); -} - -test "zig fmt: multiline string in array" { - try testCanonical( - \\const Foo = [][]const u8{ - \\ \\aaa - \\ , - \\ \\bbb - \\}; - \\ - \\fn bar() void { - \\ const Foo = [][]const u8{ - \\ \\aaa - \\ , - \\ \\bbb - \\ }; - \\ const Bar = [][]const u8{ // comment here - \\ \\aaa - \\ \\ - \\ , // and another comment can go here - \\ \\bbb - \\ }; - \\} - \\ - ); -} - -test "zig fmt: if type expr" { - try testCanonical( - \\const mycond = true; - \\pub fn foo() if (mycond) i32 else void { - \\ if (mycond) { - \\ return 42; - \\ } - \\} - \\ - ); -} -test "zig fmt: file ends with struct field" { - try testCanonical( - \\a: bool - \\ - ); -} - -test "zig fmt: comment after empty comment" { - try testTransform( - \\const x = true; // - \\// - \\// - \\//a - \\ - , - \\const x = true; - \\//a - \\ - ); -} - -test "zig fmt: line comment in array" { - try testTransform( - \\test "a" { - \\ var arr = [_]u32{ - \\ 0 - \\ // 1, - \\ // 2, - \\ }; - \\} - \\ - , - \\test "a" { - \\ var arr = [_]u32{ - \\ 0, // 1, - \\ // 2, - \\ }; - \\} - \\ - ); - try testCanonical( - \\test "a" { - \\ var arr = [_]u32{ - \\ 0, - \\ // 1, - \\ // 2, - \\ }; - \\} - \\ - ); -} - -test "zig fmt: comment after params" { - try testTransform( - \\fn a( - \\ b: u32 - \\ // c: u32, - \\ // d: u32, - \\) void {} - \\ - , - \\fn a( - \\ b: u32, // c: u32, - \\ // d: u32, - \\) void {} - \\ - ); - try testCanonical( - \\fn a( - \\ b: u32, - \\ // c: u32, - \\ // d: u32, - \\) void {} - \\ - ); -} - -test "zig fmt: comment in array initializer/access" { - try testCanonical( - \\test "a" { - \\ var a = x{ //aa - \\ //bb - \\ }; - \\ var a = []x{ //aa - \\ //bb - \\ }; - \\ var b = [ //aa - \\ _ - \\ ]x{ //aa - \\ //bb - \\ 9, - \\ }; - \\ var c = b[ //aa - \\ 0 - \\ ]; - \\ var d = [_ - \\ //aa - \\ ]x{ //aa - \\ //bb - \\ 9, - \\ }; - \\ var e = d[0 - \\ //aa - \\ ]; - \\} - \\ - ); -} - -test "zig fmt: comments at several places in struct init" { - try testTransform( - \\var bar = Bar{ - \\ .x = 10, // test - \\ .y = "test" - \\ // test - \\}; - \\ - , - \\var bar = Bar{ - \\ .x = 10, // test - \\ .y = "test", // test - \\}; - \\ - ); - - try testCanonical( - \\var bar = Bar{ // test - \\ .x = 10, // test - \\ .y = "test", - \\ // test - \\}; - \\ - ); -} - -test "zig fmt: top level doc comments" { - try testCanonical( - \\//! tld 1 - \\//! tld 2 - \\//! tld 3 - \\ - \\// comment - \\ - \\/// A doc - \\const A = struct { - \\ //! A tld 1 - \\ //! A tld 2 - \\ //! A tld 3 - \\}; - \\ - \\/// B doc - \\const B = struct { - \\ //! B tld 1 - \\ //! B tld 2 - \\ //! B tld 3 - \\ - \\ /// b doc - \\ b: u32, - \\}; - \\ - \\/// C doc - \\const C = struct { - \\ //! C tld 1 - \\ //! C tld 2 - \\ //! C tld 3 - \\ - \\ /// c1 doc - \\ c1: u32, - \\ - \\ //! C tld 4 - \\ //! C tld 5 - \\ //! C tld 6 - \\ - \\ /// c2 doc - \\ c2: u32, - \\}; - \\ - ); - try testCanonical( - \\//! Top-level documentation. - \\ - \\/// This is A - \\pub const A = usize; - \\ - ); - try testCanonical( - \\//! Nothing here - \\ - ); -} - -test "zig fmt: extern without container keyword returns error" { - try testError( - \\const container = extern {}; - \\ - , &[_]Error{ - .ExpectedExpr, - .ExpectedVarDeclOrFn, - }); -} - -test "zig fmt: integer literals with underscore separators" { - try testTransform( - \\const - \\ x = - \\ 1_234_567 - \\ +(0b0_1-0o7_0+0xff_FF ) + 0_0; - , - \\const x = - \\ 1_234_567 + (0b0_1 - 0o7_0 + 0xff_FF) + 0_0; - \\ - ); -} - -test "zig fmt: hex literals with underscore separators" { - try testTransform( - \\pub fn orMask(a: [ 1_000 ]u64, b: [ 1_000] u64) [1_000]u64 { - \\ var c: [1_000]u64 = [1]u64{ 0xFFFF_FFFF_FFFF_FFFF}**1_000; - \\ for (c [ 0_0 .. ]) |_, i| { - \\ c[i] = (a[i] | b[i]) & 0xCCAA_CCAA_CCAA_CCAA; - \\ } - \\ return c; - \\} - \\ - \\ - , - \\pub fn orMask(a: [1_000]u64, b: [1_000]u64) [1_000]u64 { - \\ var c: [1_000]u64 = [1]u64{0xFFFF_FFFF_FFFF_FFFF} ** 1_000; - \\ for (c[0_0..]) |_, i| { - \\ c[i] = (a[i] | b[i]) & 0xCCAA_CCAA_CCAA_CCAA; - \\ } - \\ return c; - \\} - \\ - ); -} - -test "zig fmt: decimal float literals with underscore separators" { - try testTransform( - \\pub fn main() void { - \\ const a:f64=(10.0e-0+(10.e+0))+10_00.00_00e-2+00_00.00_10e+4; - \\ const b:f64=010.0--0_10.+0_1_0.0_0+1e2; - \\ std.debug.warn("a: {}, b: {} -> a+b: {}\n", .{ a, b, a + b }); - \\} - , - \\pub fn main() void { - \\ const a: f64 = (10.0e-0 + (10.e+0)) + 10_00.00_00e-2 + 00_00.00_10e+4; - \\ const b: f64 = 010.0 - -0_10. + 0_1_0.0_0 + 1e2; - \\ std.debug.warn("a: {}, b: {} -> a+b: {}\n", .{ a, b, a + b }); - \\} - \\ - ); -} - -test "zig fmt: hexadeciaml float literals with underscore separators" { - try testTransform( - \\pub fn main() void { - \\ const a: f64 = (0x10.0p-0+(0x10.p+0))+0x10_00.00_00p-8+0x00_00.00_10p+16; - \\ const b: f64 = 0x0010.0--0x00_10.+0x10.00+0x1p4; - \\ std.debug.warn("a: {}, b: {} -> a+b: {}\n", .{ a, b, a + b }); - \\} - , - \\pub fn main() void { - \\ const a: f64 = (0x10.0p-0 + (0x10.p+0)) + 0x10_00.00_00p-8 + 0x00_00.00_10p+16; - \\ const b: f64 = 0x0010.0 - -0x00_10. + 0x10.00 + 0x1p4; - \\ std.debug.warn("a: {}, b: {} -> a+b: {}\n", .{ a, b, a + b }); - \\} - \\ - ); -} - -test "zig fmt: convert async fn into callconv(.Async)" { - try testTransform( - \\async fn foo() void {} - , - \\fn foo() callconv(.Async) void {} - \\ - ); -} - -test "zig fmt: convert extern fn proto into callconv(.C)" { - try testTransform( - \\extern fn foo0() void {} - \\const foo1 = extern fn () void; - , - \\extern fn foo0() void {} - \\const foo1 = fn () callconv(.C) void; - \\ - ); -} - -test "zig fmt: C var args" { - try testCanonical( - \\pub extern "c" fn printf(format: [*:0]const u8, ...) c_int; - \\ - ); -} - -test "zig fmt: Only indent multiline string literals in function calls" { - try testCanonical( - \\test "zig fmt:" { - \\ try testTransform( - \\ \\const X = struct { - \\ \\ foo: i32, bar: i8 }; - \\ , - \\ \\const X = struct { - \\ \\ foo: i32, bar: i8 - \\ \\}; - \\ \\ - \\ ); - \\} - \\ - ); -} - -test "zig fmt: Don't add extra newline after if" { - try testCanonical( - \\pub fn atomicSymLink(allocator: *Allocator, existing_path: []const u8, new_path: []const u8) !void { - \\ if (cwd().symLink(existing_path, new_path, .{})) { - \\ return; - \\ } - \\} - \\ - ); -} - -test "zig fmt: comments in ternary ifs" { - try testCanonical( - \\const x = if (true) { - \\ 1; - \\} else if (false) - \\ // Comment - \\ 0; - \\const y = if (true) - \\ // Comment - \\ 1 - \\else - \\ 0; - \\ - \\pub extern "c" fn printf(format: [*:0]const u8, ...) c_int; - \\ - ); -} - -test "zig fmt: test comments in field access chain" { - try testCanonical( - \\pub const str = struct { - \\ pub const Thing = more.more // - \\ .more() // - \\ .more().more() // - \\ .more() // - \\ // .more() // - \\ .more() // - \\ .more(); - \\ data: Data, - \\}; - \\ - \\pub const str = struct { - \\ pub const Thing = more.more // - \\ .more() // - \\ // .more() // - \\ // .more() // - \\ // .more() // - \\ .more() // - \\ .more(); - \\ data: Data, - \\}; - \\ - \\pub const str = struct { - \\ pub const Thing = more // - \\ .more // - \\ .more() // - \\ .more(); - \\ data: Data, - \\}; - \\ - ); -} - -test "zig fmt: Indent comma correctly after multiline string literals in arg list (trailing comma)" { - try testCanonical( - \\fn foo() void { - \\ z.display_message_dialog( - \\ *const [323:0]u8, - \\ \\Message Text - \\ \\------------ - \\ \\xxxxxxxxxxxx - \\ \\xxxxxxxxxxxx - \\ , - \\ g.GtkMessageType.GTK_MESSAGE_WARNING, - \\ null, - \\ ); - \\ - \\ z.display_message_dialog(*const [323:0]u8, - \\ \\Message Text - \\ \\------------ - \\ \\xxxxxxxxxxxx - \\ \\xxxxxxxxxxxx - \\ , g.GtkMessageType.GTK_MESSAGE_WARNING, null); - \\} - \\ - ); -} - -test "zig fmt: Control flow statement as body of blockless if" { - try testCanonical( - \\pub fn main() void { - \\ const zoom_node = if (focused_node == layout_first) - \\ if (it.next()) { - \\ if (!node.view.pending.float and !node.view.pending.fullscreen) break node; - \\ } else null - \\ else - \\ focused_node; - \\ - \\ const zoom_node = if (focused_node == layout_first) while (it.next()) |node| { - \\ if (!node.view.pending.float and !node.view.pending.fullscreen) break node; - \\ } else null else - \\ focused_node; - \\ - \\ const zoom_node = if (focused_node == layout_first) - \\ if (it.next()) { - \\ if (!node.view.pending.float and !node.view.pending.fullscreen) break node; - \\ } else null; - \\ - \\ const zoom_node = if (focused_node == layout_first) while (it.next()) |node| { - \\ if (!node.view.pending.float and !node.view.pending.fullscreen) break node; - \\ }; - \\ - \\ const zoom_node = if (focused_node == layout_first) for (nodes) |node| { - \\ break node; - \\ }; - \\ - \\ const zoom_node = if (focused_node == layout_first) switch (nodes) { - \\ 0 => 0, - \\ } else - \\ focused_node; - \\} - \\ - ); -} - -test "zig fmt: " { - try testCanonical( - \\pub fn sendViewTags(self: Self) void { - \\ var it = ViewStack(View).iterator(self.output.views.first, std.math.maxInt(u32)); - \\ while (it.next()) |node| - \\ view_tags.append(node.view.current_tags) catch { - \\ c.wl_resource_post_no_memory(self.wl_resource); - \\ log.crit(.river_status, "out of memory", .{}); - \\ return; - \\ }; - \\} - \\ - ); -} - -test "zig fmt: allow trailing line comments to do manual array formatting" { - try testCanonical( - \\fn foo() void { - \\ self.code.appendSliceAssumeCapacity(&[_]u8{ - \\ 0x55, // push rbp - \\ 0x48, 0x89, 0xe5, // mov rbp, rsp - \\ 0x48, 0x81, 0xec, // sub rsp, imm32 (with reloc) - \\ }); - \\ - \\ di_buf.appendAssumeCapacity(&[_]u8{ - \\ 1, DW.TAG_compile_unit, DW.CHILDREN_no, // header - \\ DW.AT_stmt_list, DW_FORM_data4, // form value pairs - \\ DW.AT_low_pc, DW_FORM_addr, - \\ DW.AT_high_pc, DW_FORM_addr, - \\ DW.AT_name, DW_FORM_strp, - \\ DW.AT_comp_dir, DW_FORM_strp, - \\ DW.AT_producer, DW_FORM_strp, - \\ DW.AT_language, DW_FORM_data2, - \\ 0, 0, // sentinel - \\ }); - \\ - \\ self.code.appendSliceAssumeCapacity(&[_]u8{ - \\ 0x55, // push rbp - \\ 0x48, 0x89, 0xe5, // mov rbp, rsp - \\ // How do we handle this? - \\ //0x48, 0x81, 0xec, // sub rsp, imm32 (with reloc) - \\ // Here's a blank line, should that be allowed? - \\ - \\ 0x48, 0x89, 0xe5, - \\ 0x33, 0x45, - \\ // Now the comment breaks a single line -- how do we handle this? - \\ 0x88, - \\ }); - \\} - \\ - ); -} - -test "zig fmt: multiline string literals should play nice with array initializers" { - try testCanonical( - \\fn main() void { - \\ var a = .{.{.{.{.{.{.{.{ - \\ 0, - \\ }}}}}}}}; - \\ myFunc(.{ - \\ "aaaaaaa", "bbbbbb", "ccccc", - \\ "dddd", ("eee"), ("fff"), - \\ ("gggg"), - \\ // Line comment - \\ \\Multiline String Literals can be quite long - \\ , - \\ \\Multiline String Literals can be quite long - \\ \\Multiline String Literals can be quite long - \\ , - \\ \\Multiline String Literals can be quite long - \\ \\Multiline String Literals can be quite long - \\ \\Multiline String Literals can be quite long - \\ \\Multiline String Literals can be quite long - \\ , - \\ ( - \\ \\Multiline String Literals can be quite long - \\ ), - \\ .{ - \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx - \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx - \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx - \\ }, - \\ .{( - \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx - \\ )}, - \\ .{ - \\ "xxxxxxx", "xxx", - \\ ( - \\ \\ xxx - \\ ), - \\ "xxx", "xxx", - \\ }, - \\ .{ "xxxxxxx", "xxx", "xxx", "xxx" }, .{ "xxxxxxx", "xxx", "xxx", "xxx" }, - \\ "aaaaaaa", "bbbbbb", "ccccc", // - - \\ "dddd", ("eee"), ("fff"), - \\ .{ - \\ "xxx", "xxx", - \\ ( - \\ \\ xxx - \\ ), - \\ "xxxxxxxxxxxxxx", "xxx", - \\ }, - \\ .{ - \\ ( - \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx - \\ ), - \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx - \\ }, - \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx - \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx - \\ }); - \\} - \\ - ); -} - -test "zig fmt: use of comments and Multiline string literals may force the parameters over multiple lines" { - try testCanonical( - \\pub fn makeMemUndefined(qzz: []u8) i1 { - \\ cases.add( // fixed bug #2032 - \\ "compile diagnostic string for top level decl type", - \\ \\export fn entry() void { - \\ \\ var foo: u32 = @This(){}; - \\ \\} - \\ , &[_][]const u8{ - \\ "tmp.zig:2:27: error: type 'u32' does not support array initialization", - \\ }); - \\ @compileError( - \\ \\ unknown-length pointers and C pointers cannot be hashed deeply. - \\ \\ Consider providing your own hash function. - \\ \\ unknown-length pointers and C pointers cannot be hashed deeply. - \\ \\ Consider providing your own hash function. - \\ ); - \\ return @intCast(i1, doMemCheckClientRequestExpr(0, // default return - \\ .MakeMemUndefined, @ptrToInt(qzz.ptr), qzz.len, 0, 0, 0)); - \\} - \\ - \\// This looks like garbage don't do this - \\const rparen = tree.prevToken( - \\// the first token for the annotation expressions is the left - \\// parenthesis, hence the need for two prevToken - \\ if (fn_proto.getAlignExpr()) |align_expr| - \\ tree.prevToken(tree.prevToken(align_expr.firstToken())) - \\else if (fn_proto.getSectionExpr()) |section_expr| - \\ tree.prevToken(tree.prevToken(section_expr.firstToken())) - \\else if (fn_proto.getCallconvExpr()) |callconv_expr| - \\ tree.prevToken(tree.prevToken(callconv_expr.firstToken())) - \\else switch (fn_proto.return_type) { - \\ .Explicit => |node| node.firstToken(), - \\ .InferErrorSet => |node| tree.prevToken(node.firstToken()), - \\ .Invalid => unreachable, - \\}); - \\ - ); -} - -test "zig fmt: single argument trailing commas in @builtins()" { - try testCanonical( - \\pub fn foo(qzz: []u8) i1 { - \\ @panic( - \\ foo, - \\ ); - \\ panic( - \\ foo, - \\ ); - \\ @panic( - \\ foo, - \\ bar, - \\ ); - \\} - \\ - ); -} - -test "zig fmt: trailing comma should force multiline 1 column" { - try testTransform( - \\pub const UUID_NULL: uuid_t = [16]u8{0,0,0,0,}; - \\ - , - \\pub const UUID_NULL: uuid_t = [16]u8{ - \\ 0, - \\ 0, - \\ 0, - \\ 0, - \\}; - \\ - ); -} - -test "zig fmt: function params should align nicely" { - try testCanonical( - \\pub fn foo() void { - \\ cases.addRuntimeSafety("slicing operator with sentinel", - \\ \\const std = @import("std"); - \\ ++ check_panic_msg ++ - \\ \\pub fn main() void { - \\ \\ var buf = [4]u8{'a','b','c',0}; - \\ \\ const slice = buf[0..:0]; - \\ \\} - \\ ); - \\} - \\ - ); -} +test "zig fmt: simple top level comptime block" { + try testCanonical( + \\comptime {} + \\ + ); +} + +//test "recovery: top level" { +// try testError( +// \\test "" {inline} +// \\test "" {inline} +// , &[_]Error{ +// .ExpectedInlinable, +// .ExpectedInlinable, +// }); +//} +// +//test "recovery: block statements" { +// try testError( +// \\test "" { +// \\ foo + +; +// \\ inline; +// \\} +// , &[_]Error{ +// .InvalidToken, +// .ExpectedInlinable, +// }); +//} +// +//test "recovery: missing comma" { +// try testError( +// \\test "" { +// \\ switch (foo) { +// \\ 2 => {} +// \\ 3 => {} +// \\ else => { +// \\ foo && bar +; +// \\ } +// \\ } +// \\} +// , &[_]Error{ +// .ExpectedToken, +// .ExpectedToken, +// .InvalidAnd, +// .InvalidToken, +// }); +//} +// +//test "recovery: extra qualifier" { +// try testError( +// \\const a: *const const u8; +// \\test "" +// , &[_]Error{ +// .ExtraConstQualifier, +// .ExpectedLBrace, +// }); +//} +// +//test "recovery: missing return type" { +// try testError( +// \\fn foo() { +// \\ a && b; +// \\} +// \\test "" +// , &[_]Error{ +// .ExpectedReturnType, +// .InvalidAnd, +// .ExpectedLBrace, +// }); +//} +// +//test "recovery: continue after invalid decl" { +// try testError( +// \\fn foo { +// \\ inline; +// \\} +// \\pub test "" { +// \\ async a && b; +// \\} +// , &[_]Error{ +// .ExpectedToken, +// .ExpectedPubItem, +// .ExpectedParamList, +// .InvalidAnd, +// }); +// try testError( +// \\threadlocal test "" { +// \\ @a && b; +// \\} +// , &[_]Error{ +// .ExpectedVarDecl, +// .ExpectedParamList, +// .InvalidAnd, +// }); +//} +// +//test "recovery: invalid extern/inline" { +// try testError( +// \\inline test "" { a && b; } +// , &[_]Error{ +// .ExpectedFn, +// .InvalidAnd, +// }); +// try testError( +// \\extern "" test "" { a && b; } +// , &[_]Error{ +// .ExpectedVarDeclOrFn, +// .InvalidAnd, +// }); +//} +// +//test "recovery: missing semicolon" { +// try testError( +// \\test "" { +// \\ comptime a && b +// \\ c && d +// \\ @foo +// \\} +// , &[_]Error{ +// .InvalidAnd, +// .ExpectedToken, +// .InvalidAnd, +// .ExpectedToken, +// .ExpectedParamList, +// .ExpectedToken, +// }); +//} +// +//test "recovery: invalid container members" { +// try testError( +// \\usingnamespace; +// \\foo+ +// \\bar@, +// \\while (a == 2) { test "" {}} +// \\test "" { +// \\ a && b +// \\} +// , &[_]Error{ +// .ExpectedExpr, +// .ExpectedToken, +// .ExpectedToken, +// .ExpectedContainerMembers, +// .InvalidAnd, +// .ExpectedToken, +// }); +//} +// +//test "recovery: invalid parameter" { +// try testError( +// \\fn main() void { +// \\ a(comptime T: type) +// \\} +// , &[_]Error{ +// .ExpectedToken, +// }); +//} +// +//test "recovery: extra '}' at top level" { +// try testError( +// \\}}} +// \\test "" { +// \\ a && b; +// \\} +// , &[_]Error{ +// .ExpectedContainerMembers, +// .ExpectedContainerMembers, +// .ExpectedContainerMembers, +// .InvalidAnd, +// }); +//} +// +//test "recovery: mismatched bracket at top level" { +// try testError( +// \\const S = struct { +// \\ arr: 128]?G +// \\}; +// , &[_]Error{ +// .ExpectedToken, +// }); +//} +// +//test "recovery: invalid global error set access" { +// try testError( +// \\test "" { +// \\ error && foo; +// \\} +// , &[_]Error{ +// .ExpectedToken, +// .ExpectedIdentifier, +// .InvalidAnd, +// }); +//} +// +//test "recovery: invalid asterisk after pointer dereference" { +// try testError( +// \\test "" { +// \\ var sequence = "repeat".*** 10; +// \\} +// , &[_]Error{ +// .AsteriskAfterPointerDereference, +// }); +// try testError( +// \\test "" { +// \\ var sequence = "repeat".** 10&&a; +// \\} +// , &[_]Error{ +// .AsteriskAfterPointerDereference, +// .InvalidAnd, +// }); +//} +// +//test "recovery: missing semicolon after if, for, while stmt" { +// try testError( +// \\test "" { +// \\ if (foo) bar +// \\ for (foo) |a| bar +// \\ while (foo) bar +// \\ a && b; +// \\} +// , &[_]Error{ +// .ExpectedSemiOrElse, +// .ExpectedSemiOrElse, +// .ExpectedSemiOrElse, +// .InvalidAnd, +// }); +//} +// +//test "recovery: invalid comptime" { +// try testError( +// \\comptime +// , &[_]Error{ +// .ExpectedBlockOrField, +// }); +//} +// +//test "recovery: missing block after for/while loops" { +// try testError( +// \\test "" { while (foo) } +// , &[_]Error{ +// .ExpectedBlockOrAssignment, +// }); +// try testError( +// \\test "" { for (foo) |bar| } +// , &[_]Error{ +// .ExpectedBlockOrAssignment, +// }); +//} +// +//test "zig fmt: respect line breaks after var declarations" { +// try testCanonical( +// \\const crc = +// \\ lookup_tables[0][p[7]] ^ +// \\ lookup_tables[1][p[6]] ^ +// \\ lookup_tables[2][p[5]] ^ +// \\ lookup_tables[3][p[4]] ^ +// \\ lookup_tables[4][@truncate(u8, self.crc >> 24)] ^ +// \\ lookup_tables[5][@truncate(u8, self.crc >> 16)] ^ +// \\ lookup_tables[6][@truncate(u8, self.crc >> 8)] ^ +// \\ lookup_tables[7][@truncate(u8, self.crc >> 0)]; +// \\ +// ); +//} +// +//test "zig fmt: multiline string mixed with comments" { +// try testCanonical( +// \\const s1 = +// \\ //\\one +// \\ \\two) +// \\ \\three +// \\; +// \\const s2 = +// \\ \\one +// \\ \\two) +// \\ //\\three +// \\; +// \\const s3 = +// \\ \\one +// \\ //\\two) +// \\ \\three +// \\; +// \\const s4 = +// \\ \\one +// \\ //\\two +// \\ \\three +// \\ //\\four +// \\ \\five +// \\; +// \\const a = +// \\ 1; +// \\ +// ); +//} +// +//test "zig fmt: empty file" { +// try testCanonical( +// \\ +// ); +//} +// +//test "zig fmt: if statment" { +// try testCanonical( +// \\test "" { +// \\ if (optional()) |some| +// \\ bar = some.foo(); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: top-level fields" { +// try testCanonical( +// \\a: did_you_know, +// \\b: all_files_are, +// \\structs: ?x, +// \\ +// ); +//} +// +//test "zig fmt: decl between fields" { +// try testError( +// \\const S = struct { +// \\ const foo = 2; +// \\ const bar = 2; +// \\ const baz = 2; +// \\ a: usize, +// \\ const foo1 = 2; +// \\ const bar1 = 2; +// \\ const baz1 = 2; +// \\ b: usize, +// \\}; +// , &[_]Error{ +// .DeclBetweenFields, +// }); +//} +// +//test "zig fmt: eof after missing comma" { +// try testError( +// \\foo() +// , &[_]Error{ +// .ExpectedToken, +// }); +//} +// +//test "zig fmt: errdefer with payload" { +// try testCanonical( +// \\pub fn main() anyerror!void { +// \\ errdefer |a| x += 1; +// \\ errdefer |a| {} +// \\ errdefer |a| { +// \\ x += 1; +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: nosuspend block" { +// try testCanonical( +// \\pub fn main() anyerror!void { +// \\ nosuspend { +// \\ var foo: Foo = .{ .bar = 42 }; +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: nosuspend await" { +// try testCanonical( +// \\fn foo() void { +// \\ x = nosuspend await y; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: trailing comma in container declaration" { +// try testCanonical( +// \\const X = struct { foo: i32 }; +// \\const X = struct { foo: i32, bar: i32 }; +// \\const X = struct { foo: i32 = 1, bar: i32 = 2 }; +// \\const X = struct { foo: i32 align(4), bar: i32 align(4) }; +// \\const X = struct { foo: i32 align(4) = 1, bar: i32 align(4) = 2 }; +// \\ +// ); +// try testCanonical( +// \\test "" { +// \\ comptime { +// \\ const X = struct { +// \\ x: i32 +// \\ }; +// \\ } +// \\} +// \\ +// ); +// try testTransform( +// \\const X = struct { +// \\ foo: i32, bar: i8 }; +// , +// \\const X = struct { +// \\ foo: i32, bar: i8 +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: trailing comma in fn parameter list" { +// try testCanonical( +// \\pub fn f( +// \\ a: i32, +// \\ b: i32, +// \\) i32 {} +// \\pub fn f( +// \\ a: i32, +// \\ b: i32, +// \\) align(8) i32 {} +// \\pub fn f( +// \\ a: i32, +// \\ b: i32, +// \\) linksection(".text") i32 {} +// \\pub fn f( +// \\ a: i32, +// \\ b: i32, +// \\) callconv(.C) i32 {} +// \\pub fn f( +// \\ a: i32, +// \\ b: i32, +// \\) align(8) linksection(".text") i32 {} +// \\pub fn f( +// \\ a: i32, +// \\ b: i32, +// \\) align(8) callconv(.C) i32 {} +// \\pub fn f( +// \\ a: i32, +// \\ b: i32, +// \\) align(8) linksection(".text") callconv(.C) i32 {} +// \\pub fn f( +// \\ a: i32, +// \\ b: i32, +// \\) linksection(".text") callconv(.C) i32 {} +// \\ +// ); +//} +// +//test "zig fmt: comptime struct field" { +// try testCanonical( +// \\const Foo = struct { +// \\ a: i32, +// \\ comptime b: i32 = 1234, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: c pointer type" { +// try testCanonical( +// \\pub extern fn repro() [*c]const u8; +// \\ +// ); +//} +// +//test "zig fmt: builtin call with trailing comma" { +// try testCanonical( +// \\pub fn main() void { +// \\ @breakpoint(); +// \\ _ = @boolToInt(a); +// \\ _ = @call( +// \\ a, +// \\ b, +// \\ c, +// \\ ); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: asm expression with comptime content" { +// try testCanonical( +// \\comptime { +// \\ asm ("foo" ++ "bar"); +// \\} +// \\pub fn main() void { +// \\ asm volatile ("foo" ++ "bar"); +// \\ asm volatile ("foo" ++ "bar" +// \\ : [_] "" (x) +// \\ ); +// \\ asm volatile ("foo" ++ "bar" +// \\ : [_] "" (x) +// \\ : [_] "" (y) +// \\ ); +// \\ asm volatile ("foo" ++ "bar" +// \\ : [_] "" (x) +// \\ : [_] "" (y) +// \\ : "h", "e", "l", "l", "o" +// \\ ); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: anytype struct field" { +// try testCanonical( +// \\pub const Pointer = struct { +// \\ sentinel: anytype, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: sentinel-terminated array type" { +// try testCanonical( +// \\pub fn cStrToPrefixedFileW(s: [*:0]const u8) ![PATH_MAX_WIDE:0]u16 { +// \\ return sliceToPrefixedFileW(mem.toSliceConst(u8, s)); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: sentinel-terminated slice type" { +// try testCanonical( +// \\pub fn toSlice(self: Buffer) [:0]u8 { +// \\ return self.list.toSlice()[0..self.len()]; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: anon literal in array" { +// try testCanonical( +// \\var arr: [2]Foo = .{ +// \\ .{ .a = 2 }, +// \\ .{ .b = 3 }, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: alignment in anonymous literal" { +// try testTransform( +// \\const a = .{ +// \\ "U", "L", "F", +// \\ "U'", +// \\ "L'", +// \\ "F'", +// \\}; +// \\ +// , +// \\const a = .{ +// \\ "U", "L", "F", +// \\ "U'", "L'", "F'", +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: anon struct literal syntax" { +// try testCanonical( +// \\const x = .{ +// \\ .a = b, +// \\ .c = d, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: anon list literal syntax" { +// try testCanonical( +// \\const x = .{ a, b, c }; +// \\ +// ); +//} +// +//test "zig fmt: async function" { +// try testCanonical( +// \\pub const Server = struct { +// \\ handleRequestFn: fn (*Server, *const std.net.Address, File) callconv(.Async) void, +// \\}; +// \\test "hi" { +// \\ var ptr = @ptrCast(fn (i32) callconv(.Async) void, other); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: whitespace fixes" { +// try testTransform("test \"\" {\r\n\tconst hi = x;\r\n}\n// zig fmt: off\ntest \"\"{\r\n\tconst a = b;}\r\n", +// \\test "" { +// \\ const hi = x; +// \\} +// \\// zig fmt: off +// \\test ""{ +// \\ const a = b;} +// \\ +// ); +//} +// +//test "zig fmt: while else err prong with no block" { +// try testCanonical( +// \\test "" { +// \\ const result = while (returnError()) |value| { +// \\ break value; +// \\ } else |err| @as(i32, 2); +// \\ expect(result == 2); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: tagged union with enum values" { +// try testCanonical( +// \\const MultipleChoice2 = union(enum(u32)) { +// \\ Unspecified1: i32, +// \\ A: f32 = 20, +// \\ Unspecified2: void, +// \\ B: bool = 40, +// \\ Unspecified3: i32, +// \\ C: i8 = 60, +// \\ Unspecified4: void, +// \\ D: void = 1000, +// \\ Unspecified5: i32, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: allowzero pointer" { +// try testCanonical( +// \\const T = [*]allowzero const u8; +// \\ +// ); +//} +// +//test "zig fmt: enum literal" { +// try testCanonical( +// \\const x = .hi; +// \\ +// ); +//} +// +//test "zig fmt: enum literal inside array literal" { +// try testCanonical( +// \\test "enums in arrays" { +// \\ var colors = []Color{.Green}; +// \\ colors = []Colors{ .Green, .Cyan }; +// \\ colors = []Colors{ +// \\ .Grey, +// \\ .Green, +// \\ .Cyan, +// \\ }; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: character literal larger than u8" { +// try testCanonical( +// \\const x = '\u{01f4a9}'; +// \\ +// ); +//} +// +//test "zig fmt: infix operator and then multiline string literal" { +// try testCanonical( +// \\const x = "" ++ +// \\ \\ hi +// \\; +// \\ +// ); +//} +// +//test "zig fmt: infix operator and then multiline string literal" { +// try testCanonical( +// \\const x = "" ++ +// \\ \\ hi0 +// \\ \\ hi1 +// \\ \\ hi2 +// \\; +// \\ +// ); +//} +// +//test "zig fmt: C pointers" { +// try testCanonical( +// \\const Ptr = [*c]i32; +// \\ +// ); +//} +// +//test "zig fmt: threadlocal" { +// try testCanonical( +// \\threadlocal var x: i32 = 1234; +// \\ +// ); +//} +// +//test "zig fmt: linksection" { +// try testCanonical( +// \\export var aoeu: u64 linksection(".text.derp") = 1234; +// \\export fn _start() linksection(".text.boot") callconv(.Naked) noreturn {} +// \\ +// ); +//} +// +//test "zig fmt: correctly move doc comments on struct fields" { +// try testTransform( +// \\pub const section_64 = extern struct { +// \\ sectname: [16]u8, /// name of this section +// \\ segname: [16]u8, /// segment this section goes in +// \\}; +// , +// \\pub const section_64 = extern struct { +// \\ /// name of this section +// \\ sectname: [16]u8, +// \\ /// segment this section goes in +// \\ segname: [16]u8, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: correctly space struct fields with doc comments" { +// try testTransform( +// \\pub const S = struct { +// \\ /// A +// \\ a: u8, +// \\ /// B +// \\ /// B (cont) +// \\ b: u8, +// \\ +// \\ +// \\ /// C +// \\ c: u8, +// \\}; +// \\ +// , +// \\pub const S = struct { +// \\ /// A +// \\ a: u8, +// \\ /// B +// \\ /// B (cont) +// \\ b: u8, +// \\ +// \\ /// C +// \\ c: u8, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: doc comments on param decl" { +// try testCanonical( +// \\pub const Allocator = struct { +// \\ shrinkFn: fn ( +// \\ self: *Allocator, +// \\ /// Guaranteed to be the same as what was returned from most recent call to +// \\ /// `allocFn`, `reallocFn`, or `shrinkFn`. +// \\ old_mem: []u8, +// \\ /// Guaranteed to be the same as what was returned from most recent call to +// \\ /// `allocFn`, `reallocFn`, or `shrinkFn`. +// \\ old_alignment: u29, +// \\ /// Guaranteed to be less than or equal to `old_mem.len`. +// \\ new_byte_count: usize, +// \\ /// Guaranteed to be less than or equal to `old_alignment`. +// \\ new_alignment: u29, +// \\ ) []u8, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: aligned struct field" { +// try testCanonical( +// \\pub const S = struct { +// \\ f: i32 align(32), +// \\}; +// \\ +// ); +// try testCanonical( +// \\pub const S = struct { +// \\ f: i32 align(32) = 1, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: comment to disable/enable zig fmt first" { +// try testCanonical( +// \\// Test trailing comma syntax +// \\// zig fmt: off +// \\ +// \\const struct_trailing_comma = struct { x: i32, y: i32, }; +// ); +//} +// +//test "zig fmt: comment to disable/enable zig fmt" { +// try testTransform( +// \\const a = b; +// \\// zig fmt: off +// \\const c = d; +// \\// zig fmt: on +// \\const e = f; +// , +// \\const a = b; +// \\// zig fmt: off +// \\const c = d; +// \\// zig fmt: on +// \\const e = f; +// \\ +// ); +//} +// +//test "zig fmt: line comment following 'zig fmt: off'" { +// try testCanonical( +// \\// zig fmt: off +// \\// Test +// \\const e = f; +// ); +//} +// +//test "zig fmt: doc comment following 'zig fmt: off'" { +// try testCanonical( +// \\// zig fmt: off +// \\/// test +// \\const e = f; +// ); +//} +// +//test "zig fmt: line and doc comment following 'zig fmt: off'" { +// try testCanonical( +// \\// zig fmt: off +// \\// test 1 +// \\/// test 2 +// \\const e = f; +// ); +//} +// +//test "zig fmt: doc and line comment following 'zig fmt: off'" { +// try testCanonical( +// \\// zig fmt: off +// \\/// test 1 +// \\// test 2 +// \\const e = f; +// ); +//} +// +//test "zig fmt: alternating 'zig fmt: off' and 'zig fmt: on'" { +// try testCanonical( +// \\// zig fmt: off +// \\// zig fmt: on +// \\// zig fmt: off +// \\const e = f; +// \\// zig fmt: off +// \\// zig fmt: on +// \\// zig fmt: off +// \\const a = b; +// \\// zig fmt: on +// \\const c = d; +// \\// zig fmt: on +// \\ +// ); +//} +// +//test "zig fmt: line comment following 'zig fmt: on'" { +// try testCanonical( +// \\// zig fmt: off +// \\const e = f; +// \\// zig fmt: on +// \\// test +// \\const e = f; +// \\ +// ); +//} +// +//test "zig fmt: doc comment following 'zig fmt: on'" { +// try testCanonical( +// \\// zig fmt: off +// \\const e = f; +// \\// zig fmt: on +// \\/// test +// \\const e = f; +// \\ +// ); +//} +// +//test "zig fmt: line and doc comment following 'zig fmt: on'" { +// try testCanonical( +// \\// zig fmt: off +// \\const e = f; +// \\// zig fmt: on +// \\// test1 +// \\/// test2 +// \\const e = f; +// \\ +// ); +//} +// +//test "zig fmt: doc and line comment following 'zig fmt: on'" { +// try testCanonical( +// \\// zig fmt: off +// \\const e = f; +// \\// zig fmt: on +// \\/// test1 +// \\// test2 +// \\const e = f; +// \\ +// ); +//} +// +//test "zig fmt: pointer of unknown length" { +// try testCanonical( +// \\fn foo(ptr: [*]u8) void {} +// \\ +// ); +//} +// +//test "zig fmt: spaces around slice operator" { +// try testCanonical( +// \\var a = b[c..d]; +// \\var a = b[c..d :0]; +// \\var a = b[c + 1 .. d]; +// \\var a = b[c + 1 ..]; +// \\var a = b[c .. d + 1]; +// \\var a = b[c .. d + 1 :0]; +// \\var a = b[c.a..d.e]; +// \\var a = b[c.a..d.e :0]; +// \\ +// ); +//} +// +//test "zig fmt: async call in if condition" { +// try testCanonical( +// \\comptime { +// \\ if (async b()) { +// \\ a(); +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: 2nd arg multiline string" { +// try testCanonical( +// \\comptime { +// \\ cases.addAsm("hello world linux x86_64", +// \\ \\.text +// \\ , "Hello, world!\n"); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: 2nd arg multiline string many args" { +// try testCanonical( +// \\comptime { +// \\ cases.addAsm("hello world linux x86_64", +// \\ \\.text +// \\ , "Hello, world!\n", "Hello, world!\n"); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: final arg multiline string" { +// try testCanonical( +// \\comptime { +// \\ cases.addAsm("hello world linux x86_64", "Hello, world!\n", +// \\ \\.text +// \\ ); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: if condition wraps" { +// try testTransform( +// \\comptime { +// \\ if (cond and +// \\ cond) { +// \\ return x; +// \\ } +// \\ while (cond and +// \\ cond) { +// \\ return x; +// \\ } +// \\ if (a == b and +// \\ c) { +// \\ a = b; +// \\ } +// \\ while (a == b and +// \\ c) { +// \\ a = b; +// \\ } +// \\ if ((cond and +// \\ cond)) { +// \\ return x; +// \\ } +// \\ while ((cond and +// \\ cond)) { +// \\ return x; +// \\ } +// \\ var a = if (a) |*f| x: { +// \\ break :x &a.b; +// \\ } else |err| err; +// \\ var a = if (cond and +// \\ cond) |*f| +// \\ x: { +// \\ break :x &a.b; +// \\ } else |err| err; +// \\} +// , +// \\comptime { +// \\ if (cond and +// \\ cond) +// \\ { +// \\ return x; +// \\ } +// \\ while (cond and +// \\ cond) +// \\ { +// \\ return x; +// \\ } +// \\ if (a == b and +// \\ c) +// \\ { +// \\ a = b; +// \\ } +// \\ while (a == b and +// \\ c) +// \\ { +// \\ a = b; +// \\ } +// \\ if ((cond and +// \\ cond)) +// \\ { +// \\ return x; +// \\ } +// \\ while ((cond and +// \\ cond)) +// \\ { +// \\ return x; +// \\ } +// \\ var a = if (a) |*f| x: { +// \\ break :x &a.b; +// \\ } else |err| err; +// \\ var a = if (cond and +// \\ cond) |*f| +// \\ x: { +// \\ break :x &a.b; +// \\ } else |err| err; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: if condition has line break but must not wrap" { +// try testCanonical( +// \\comptime { +// \\ if (self.user_input_options.put( +// \\ name, +// \\ UserInputOption{ +// \\ .name = name, +// \\ .used = false, +// \\ }, +// \\ ) catch unreachable) |*prev_value| { +// \\ foo(); +// \\ bar(); +// \\ } +// \\ if (put( +// \\ a, +// \\ b, +// \\ )) { +// \\ foo(); +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: if condition has line break but must not wrap" { +// try testCanonical( +// \\comptime { +// \\ if (self.user_input_options.put(name, UserInputOption{ +// \\ .name = name, +// \\ .used = false, +// \\ }) catch unreachable) |*prev_value| { +// \\ foo(); +// \\ bar(); +// \\ } +// \\ if (put( +// \\ a, +// \\ b, +// \\ )) { +// \\ foo(); +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: function call with multiline argument" { +// try testCanonical( +// \\comptime { +// \\ self.user_input_options.put(name, UserInputOption{ +// \\ .name = name, +// \\ .used = false, +// \\ }); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: same-line doc comment on variable declaration" { +// try testTransform( +// \\pub const MAP_ANONYMOUS = 0x1000; /// allocated from memory, swap space +// \\pub const MAP_FILE = 0x0000; /// map from file (default) +// \\ +// \\pub const EMEDIUMTYPE = 124; /// Wrong medium type +// \\ +// \\// nameserver query return codes +// \\pub const ENSROK = 0; /// DNS server returned answer with no data +// , +// \\/// allocated from memory, swap space +// \\pub const MAP_ANONYMOUS = 0x1000; +// \\/// map from file (default) +// \\pub const MAP_FILE = 0x0000; +// \\ +// \\/// Wrong medium type +// \\pub const EMEDIUMTYPE = 124; +// \\ +// \\// nameserver query return codes +// \\/// DNS server returned answer with no data +// \\pub const ENSROK = 0; +// \\ +// ); +//} +// +//test "zig fmt: if-else with comment before else" { +// try testCanonical( +// \\comptime { +// \\ // cexp(finite|nan +- i inf|nan) = nan + i nan +// \\ if ((hx & 0x7fffffff) != 0x7f800000) { +// \\ return Complex(f32).new(y - y, y - y); +// \\ } // cexp(-inf +- i inf|nan) = 0 + i0 +// \\ else if (hx & 0x80000000 != 0) { +// \\ return Complex(f32).new(0, 0); +// \\ } // cexp(+inf +- i inf|nan) = inf + i nan +// \\ else { +// \\ return Complex(f32).new(x, y - y); +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: if nested" { +// try testCanonical( +// \\pub fn foo() void { +// \\ return if ((aInt & bInt) >= 0) +// \\ if (aInt < bInt) +// \\ GE_LESS +// \\ else if (aInt == bInt) +// \\ GE_EQUAL +// \\ else +// \\ GE_GREATER +// \\ else if (aInt > bInt) +// \\ GE_LESS +// \\ else if (aInt == bInt) +// \\ GE_EQUAL +// \\ else +// \\ GE_GREATER; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: respect line breaks in if-else" { +// try testCanonical( +// \\comptime { +// \\ return if (cond) a else b; +// \\ return if (cond) +// \\ a +// \\ else +// \\ b; +// \\ return if (cond) +// \\ a +// \\ else if (cond) +// \\ b +// \\ else +// \\ c; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: respect line breaks after infix operators" { +// try testCanonical( +// \\comptime { +// \\ self.crc = +// \\ lookup_tables[0][p[7]] ^ +// \\ lookup_tables[1][p[6]] ^ +// \\ lookup_tables[2][p[5]] ^ +// \\ lookup_tables[3][p[4]] ^ +// \\ lookup_tables[4][@truncate(u8, self.crc >> 24)] ^ +// \\ lookup_tables[5][@truncate(u8, self.crc >> 16)] ^ +// \\ lookup_tables[6][@truncate(u8, self.crc >> 8)] ^ +// \\ lookup_tables[7][@truncate(u8, self.crc >> 0)]; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: fn decl with trailing comma" { +// try testTransform( +// \\fn foo(a: i32, b: i32,) void {} +// , +// \\fn foo( +// \\ a: i32, +// \\ b: i32, +// \\) void {} +// \\ +// ); +//} +// +//test "zig fmt: enum decl with no trailing comma" { +// try testTransform( +// \\const StrLitKind = enum {Normal, C}; +// , +// \\const StrLitKind = enum { Normal, C }; +// \\ +// ); +//} +// +//test "zig fmt: switch comment before prong" { +// try testCanonical( +// \\comptime { +// \\ switch (a) { +// \\ // hi +// \\ 0 => {}, +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: struct literal no trailing comma" { +// try testTransform( +// \\const a = foo{ .x = 1, .y = 2 }; +// \\const a = foo{ .x = 1, +// \\ .y = 2 }; +// , +// \\const a = foo{ .x = 1, .y = 2 }; +// \\const a = foo{ +// \\ .x = 1, +// \\ .y = 2, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: struct literal containing a multiline expression" { +// try testTransform( +// \\const a = A{ .x = if (f1()) 10 else 20 }; +// \\const a = A{ .x = if (f1()) 10 else 20, }; +// \\const a = A{ .x = if (f1()) +// \\ 10 else 20 }; +// \\const a = A{ .x = if (f1()) 10 else 20, .y = f2() + 100 }; +// \\const a = A{ .x = if (f1()) 10 else 20, .y = f2() + 100, }; +// \\const a = A{ .x = if (f1()) +// \\ 10 else 20}; +// \\const a = A{ .x = switch(g) {0 => "ok", else => "no"} }; +// \\ +// , +// \\const a = A{ .x = if (f1()) 10 else 20 }; +// \\const a = A{ +// \\ .x = if (f1()) 10 else 20, +// \\}; +// \\const a = A{ +// \\ .x = if (f1()) +// \\ 10 +// \\ else +// \\ 20, +// \\}; +// \\const a = A{ .x = if (f1()) 10 else 20, .y = f2() + 100 }; +// \\const a = A{ +// \\ .x = if (f1()) 10 else 20, +// \\ .y = f2() + 100, +// \\}; +// \\const a = A{ +// \\ .x = if (f1()) +// \\ 10 +// \\ else +// \\ 20, +// \\}; +// \\const a = A{ +// \\ .x = switch (g) { +// \\ 0 => "ok", +// \\ else => "no", +// \\ }, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: array literal with hint" { +// try testTransform( +// \\const a = []u8{ +// \\ 1, 2, // +// \\ 3, +// \\ 4, +// \\ 5, +// \\ 6, +// \\ 7 }; +// \\const a = []u8{ +// \\ 1, 2, // +// \\ 3, +// \\ 4, +// \\ 5, +// \\ 6, +// \\ 7, 8 }; +// \\const a = []u8{ +// \\ 1, 2, // +// \\ 3, +// \\ 4, +// \\ 5, +// \\ 6, // blah +// \\ 7, 8 }; +// \\const a = []u8{ +// \\ 1, 2, // +// \\ 3, // +// \\ 4, +// \\ 5, +// \\ 6, +// \\ 7 }; +// \\const a = []u8{ +// \\ 1, +// \\ 2, +// \\ 3, 4, // +// \\ 5, 6, // +// \\ 7, 8, // +// \\}; +// , +// \\const a = []u8{ +// \\ 1, 2, +// \\ 3, 4, +// \\ 5, 6, +// \\ 7, +// \\}; +// \\const a = []u8{ +// \\ 1, 2, +// \\ 3, 4, +// \\ 5, 6, +// \\ 7, 8, +// \\}; +// \\const a = []u8{ +// \\ 1, 2, +// \\ 3, 4, +// \\ 5, +// \\ 6, // blah +// \\ 7, +// \\ 8, +// \\}; +// \\const a = []u8{ +// \\ 1, 2, +// \\ 3, // +// \\ 4, +// \\ 5, 6, +// \\ 7, +// \\}; +// \\const a = []u8{ +// \\ 1, +// \\ 2, +// \\ 3, +// \\ 4, +// \\ 5, +// \\ 6, +// \\ 7, +// \\ 8, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: array literal veritical column alignment" { +// try testTransform( +// \\const a = []u8{ +// \\ 1000, 200, +// \\ 30, 4, +// \\ 50000, 60 +// \\}; +// \\const a = []u8{0, 1, 2, 3, 40, +// \\ 4,5,600,7, +// \\ 80, +// \\ 9, 10, 11, 0, 13, 14, 15}; +// \\ +// , +// \\const a = []u8{ +// \\ 1000, 200, +// \\ 30, 4, +// \\ 50000, 60, +// \\}; +// \\const a = []u8{ +// \\ 0, 1, 2, 3, 40, +// \\ 4, 5, 600, 7, 80, +// \\ 9, 10, 11, 0, 13, +// \\ 14, 15, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: multiline string with backslash at end of line" { +// try testCanonical( +// \\comptime { +// \\ err( +// \\ \\\ +// \\ ); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: multiline string parameter in fn call with trailing comma" { +// try testCanonical( +// \\fn foo() void { +// \\ try stdout.print( +// \\ \\ZIG_CMAKE_BINARY_DIR {} +// \\ \\ZIG_C_HEADER_FILES {} +// \\ \\ZIG_DIA_GUIDS_LIB {} +// \\ \\ +// \\ , +// \\ std.cstr.toSliceConst(c.ZIG_CMAKE_BINARY_DIR), +// \\ std.cstr.toSliceConst(c.ZIG_CXX_COMPILER), +// \\ std.cstr.toSliceConst(c.ZIG_DIA_GUIDS_LIB), +// \\ ); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: trailing comma on fn call" { +// try testCanonical( +// \\comptime { +// \\ var module = try Module.create( +// \\ allocator, +// \\ zig_lib_dir, +// \\ full_cache_dir, +// \\ ); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: multi line arguments without last comma" { +// try testTransform( +// \\pub fn foo( +// \\ a: usize, +// \\ b: usize, +// \\ c: usize, +// \\ d: usize +// \\) usize { +// \\ return a + b + c + d; +// \\} +// \\ +// , +// \\pub fn foo(a: usize, b: usize, c: usize, d: usize) usize { +// \\ return a + b + c + d; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: empty block with only comment" { +// try testCanonical( +// \\comptime { +// \\ { +// \\ // comment +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: no trailing comma on struct decl" { +// try testCanonical( +// \\const RoundParam = struct { +// \\ k: usize, s: u32, t: u32 +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: extra newlines at the end" { +// try testTransform( +// \\const a = b; +// \\ +// \\ +// \\ +// , +// \\const a = b; +// \\ +// ); +//} +// +//test "zig fmt: simple asm" { +// try testTransform( +// \\comptime { +// \\ asm volatile ( +// \\ \\.globl aoeu; +// \\ \\.type aoeu, @function; +// \\ \\.set aoeu, derp; +// \\ ); +// \\ +// \\ asm ("not real assembly" +// \\ :[a] "x" (x),); +// \\ asm ("not real assembly" +// \\ :[a] "x" (->i32),:[a] "x" (1),); +// \\ asm ("still not real assembly" +// \\ :::"a","b",); +// \\} +// , +// \\comptime { +// \\ asm volatile ( +// \\ \\.globl aoeu; +// \\ \\.type aoeu, @function; +// \\ \\.set aoeu, derp; +// \\ ); +// \\ +// \\ asm ("not real assembly" +// \\ : [a] "x" (x) +// \\ ); +// \\ asm ("not real assembly" +// \\ : [a] "x" (-> i32) +// \\ : [a] "x" (1) +// \\ ); +// \\ asm ("still not real assembly" +// \\ : +// \\ : +// \\ : "a", "b" +// \\ ); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: nested struct literal with one item" { +// try testCanonical( +// \\const a = foo{ +// \\ .item = bar{ .a = b }, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: switch cases trailing comma" { +// try testTransform( +// \\fn switch_cases(x: i32) void { +// \\ switch (x) { +// \\ 1,2,3 => {}, +// \\ 4,5, => {}, +// \\ 6... 8, => {}, +// \\ else => {}, +// \\ } +// \\} +// , +// \\fn switch_cases(x: i32) void { +// \\ switch (x) { +// \\ 1, 2, 3 => {}, +// \\ 4, +// \\ 5, +// \\ => {}, +// \\ 6...8 => {}, +// \\ else => {}, +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: slice align" { +// try testCanonical( +// \\const A = struct { +// \\ items: []align(A) T, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: add trailing comma to array literal" { +// try testTransform( +// \\comptime { +// \\ return []u16{'m', 's', 'y', 's', '-' // hi +// \\ }; +// \\ return []u16{'m', 's', 'y', 's', +// \\ '-'}; +// \\ return []u16{'m', 's', 'y', 's', '-'}; +// \\} +// , +// \\comptime { +// \\ return []u16{ +// \\ 'm', 's', 'y', 's', '-', // hi +// \\ }; +// \\ return []u16{ +// \\ 'm', 's', 'y', 's', +// \\ '-', +// \\ }; +// \\ return []u16{ 'm', 's', 'y', 's', '-' }; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: first thing in file is line comment" { +// try testCanonical( +// \\// Introspection and determination of system libraries needed by zig. +// \\ +// \\// Introspection and determination of system libraries needed by zig. +// \\ +// \\const std = @import("std"); +// \\ +// ); +//} +// +//test "zig fmt: line comment after doc comment" { +// try testCanonical( +// \\/// doc comment +// \\// line comment +// \\fn foo() void {} +// \\ +// ); +//} +// +//test "zig fmt: float literal with exponent" { +// try testCanonical( +// \\test "bit field alignment" { +// \\ assert(@TypeOf(&blah.b) == *align(1:3:6) const u3); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: float literal with exponent" { +// try testCanonical( +// \\test "aoeu" { +// \\ switch (state) { +// \\ TermState.Start => switch (c) { +// \\ '\x1b' => state = TermState.Escape, +// \\ else => try out.writeByte(c), +// \\ }, +// \\ } +// \\} +// \\ +// ); +//} +//test "zig fmt: float literal with exponent" { +// try testCanonical( +// \\pub const f64_true_min = 4.94065645841246544177e-324; +// \\const threshold = 0x1.a827999fcef32p+1022; +// \\ +// ); +//} +// +//test "zig fmt: if-else end of comptime" { +// try testCanonical( +// \\comptime { +// \\ if (a) { +// \\ b(); +// \\ } else { +// \\ b(); +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: nested blocks" { +// try testCanonical( +// \\comptime { +// \\ { +// \\ { +// \\ { +// \\ a(); +// \\ } +// \\ } +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: block with same line comment after end brace" { +// try testCanonical( +// \\comptime { +// \\ { +// \\ b(); +// \\ } // comment +// \\} +// \\ +// ); +//} +// +//test "zig fmt: statements with comment between" { +// try testCanonical( +// \\comptime { +// \\ a = b; +// \\ // comment +// \\ a = b; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: statements with empty line between" { +// try testCanonical( +// \\comptime { +// \\ a = b; +// \\ +// \\ a = b; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: ptr deref operator and unwrap optional operator" { +// try testCanonical( +// \\const a = b.*; +// \\const a = b.?; +// \\ +// ); +//} +// +//test "zig fmt: comment after if before another if" { +// try testCanonical( +// \\test "aoeu" { +// \\ // comment +// \\ if (x) { +// \\ bar(); +// \\ } +// \\} +// \\ +// \\test "aoeu" { +// \\ if (x) { +// \\ foo(); +// \\ } +// \\ // comment +// \\ if (x) { +// \\ bar(); +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: line comment between if block and else keyword" { +// try testCanonical( +// \\test "aoeu" { +// \\ // cexp(finite|nan +- i inf|nan) = nan + i nan +// \\ if ((hx & 0x7fffffff) != 0x7f800000) { +// \\ return Complex(f32).new(y - y, y - y); +// \\ } +// \\ // cexp(-inf +- i inf|nan) = 0 + i0 +// \\ else if (hx & 0x80000000 != 0) { +// \\ return Complex(f32).new(0, 0); +// \\ } +// \\ // cexp(+inf +- i inf|nan) = inf + i nan +// \\ // another comment +// \\ else { +// \\ return Complex(f32).new(x, y - y); +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: same line comments in expression" { +// try testCanonical( +// \\test "aoeu" { +// \\ const x = ( // a +// \\ 0 // b +// \\ ); // c +// \\} +// \\ +// ); +//} +// +//test "zig fmt: add comma on last switch prong" { +// try testTransform( +// \\test "aoeu" { +// \\switch (self.init_arg_expr) { +// \\ InitArg.Type => |t| { }, +// \\ InitArg.None, +// \\ InitArg.Enum => { } +// \\} +// \\ switch (self.init_arg_expr) { +// \\ InitArg.Type => |t| { }, +// \\ InitArg.None, +// \\ InitArg.Enum => { }//line comment +// \\ } +// \\} +// , +// \\test "aoeu" { +// \\ switch (self.init_arg_expr) { +// \\ InitArg.Type => |t| {}, +// \\ InitArg.None, InitArg.Enum => {}, +// \\ } +// \\ switch (self.init_arg_expr) { +// \\ InitArg.Type => |t| {}, +// \\ InitArg.None, InitArg.Enum => {}, //line comment +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: same-line comment after a statement" { +// try testCanonical( +// \\test "" { +// \\ a = b; +// \\ debug.assert(H.digest_size <= H.block_size); // HMAC makes this assumption +// \\ a = b; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: same-line comment after var decl in struct" { +// try testCanonical( +// \\pub const vfs_cap_data = extern struct { +// \\ const Data = struct {}; // when on disk. +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: same-line comment after field decl" { +// try testCanonical( +// \\pub const dirent = extern struct { +// \\ d_name: u8, +// \\ d_name: u8, // comment 1 +// \\ d_name: u8, +// \\ d_name: u8, // comment 2 +// \\ d_name: u8, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: same-line comment after switch prong" { +// try testCanonical( +// \\test "" { +// \\ switch (err) { +// \\ error.PathAlreadyExists => {}, // comment 2 +// \\ else => return err, // comment 1 +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: same-line comment after non-block if expression" { +// try testCanonical( +// \\comptime { +// \\ if (sr > n_uword_bits - 1) // d > r +// \\ return 0; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: same-line comment on comptime expression" { +// try testCanonical( +// \\test "" { +// \\ comptime assert(@typeInfo(T) == .Int); // must pass an integer to absInt +// \\} +// \\ +// ); +//} +// +//test "zig fmt: switch with empty body" { +// try testCanonical( +// \\test "" { +// \\ foo() catch |err| switch (err) {}; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: line comments in struct initializer" { +// try testCanonical( +// \\fn foo() void { +// \\ return Self{ +// \\ .a = b, +// \\ +// \\ // Initialize these two fields to buffer_size so that +// \\ // in `readFn` we treat the state as being able to read +// \\ .start_index = buffer_size, +// \\ .end_index = buffer_size, +// \\ +// \\ // middle +// \\ +// \\ .a = b, +// \\ +// \\ // end +// \\ }; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: first line comment in struct initializer" { +// try testCanonical( +// \\pub fn acquire(self: *Self) HeldLock { +// \\ return HeldLock{ +// \\ // guaranteed allocation elision +// \\ .held = self.lock.acquire(), +// \\ .value = &self.private_data, +// \\ }; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: doc comments before struct field" { +// try testCanonical( +// \\pub const Allocator = struct { +// \\ /// Allocate byte_count bytes and return them in a slice, with the +// \\ /// slice's pointer aligned at least to alignment bytes. +// \\ allocFn: fn () void, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: error set declaration" { +// try testCanonical( +// \\const E = error{ +// \\ A, +// \\ B, +// \\ +// \\ C, +// \\}; +// \\ +// \\const Error = error{ +// \\ /// no more memory +// \\ OutOfMemory, +// \\}; +// \\ +// \\const Error = error{ +// \\ /// no more memory +// \\ OutOfMemory, +// \\ +// \\ /// another +// \\ Another, +// \\ +// \\ // end +// \\}; +// \\ +// \\const Error = error{OutOfMemory}; +// \\const Error = error{}; +// \\ +// \\const Error = error{ OutOfMemory, OutOfTime }; +// \\ +// ); +//} +// +//test "zig fmt: union(enum(u32)) with assigned enum values" { +// try testCanonical( +// \\const MultipleChoice = union(enum(u32)) { +// \\ A = 20, +// \\ B = 40, +// \\ C = 60, +// \\ D = 1000, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: resume from suspend block" { +// try testCanonical( +// \\fn foo() void { +// \\ suspend { +// \\ resume @frame(); +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: comments before error set decl" { +// try testCanonical( +// \\const UnexpectedError = error{ +// \\ /// The Operating System returned an undocumented error code. +// \\ Unexpected, +// \\ // another +// \\ Another, +// \\ +// \\ // in between +// \\ +// \\ // at end +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: comments before switch prong" { +// try testCanonical( +// \\test "" { +// \\ switch (err) { +// \\ error.PathAlreadyExists => continue, +// \\ +// \\ // comment 1 +// \\ +// \\ // comment 2 +// \\ else => return err, +// \\ // at end +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: comments before var decl in struct" { +// try testCanonical( +// \\pub const vfs_cap_data = extern struct { +// \\ // All of these are mandated as little endian +// \\ // when on disk. +// \\ const Data = struct { +// \\ permitted: u32, +// \\ inheritable: u32, +// \\ }; +// \\ +// \\ // in between +// \\ +// \\ /// All of these are mandated as little endian +// \\ /// when on disk. +// \\ const Data = struct { +// \\ permitted: u32, +// \\ inheritable: u32, +// \\ }; +// \\ +// \\ // at end +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: array literal with 1 item on 1 line" { +// try testCanonical( +// \\var s = []const u64{0} ** 25; +// \\ +// ); +//} +// +//test "zig fmt: comments before global variables" { +// try testCanonical( +// \\/// Foo copies keys and values before they go into the map, and +// \\/// frees them when they get removed. +// \\pub const Foo = struct {}; +// \\ +// ); +//} +// +//test "zig fmt: comments in statements" { +// try testCanonical( +// \\test "std" { +// \\ // statement comment +// \\ _ = @import("foo/bar.zig"); +// \\ +// \\ // middle +// \\ // middle2 +// \\ +// \\ // end +// \\} +// \\ +// ); +//} +// +//test "zig fmt: comments before test decl" { +// try testCanonical( +// \\/// top level doc comment +// \\test "hi" {} +// \\ +// \\// top level normal comment +// \\test "hi" {} +// \\ +// \\// middle +// \\ +// \\// end +// \\ +// ); +//} +// +//test "zig fmt: preserve spacing" { +// try testCanonical( +// \\const std = @import("std"); +// \\ +// \\pub fn main() !void { +// \\ var stdout_file = std.io.getStdOut; +// \\ var stdout_file = std.io.getStdOut; +// \\ +// \\ var stdout_file = std.io.getStdOut; +// \\ var stdout_file = std.io.getStdOut; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: return types" { +// try testCanonical( +// \\pub fn main() !void {} +// \\pub fn main() anytype {} +// \\pub fn main() i32 {} +// \\ +// ); +//} +// +//test "zig fmt: imports" { +// try testCanonical( +// \\const std = @import("std"); +// \\const std = @import(); +// \\ +// ); +//} +// +//test "zig fmt: global declarations" { +// try testCanonical( +// \\const a = b; +// \\pub const a = b; +// \\var a = b; +// \\pub var a = b; +// \\const a: i32 = b; +// \\pub const a: i32 = b; +// \\var a: i32 = b; +// \\pub var a: i32 = b; +// \\extern const a: i32 = b; +// \\pub extern const a: i32 = b; +// \\extern var a: i32 = b; +// \\pub extern var a: i32 = b; +// \\extern "a" const a: i32 = b; +// \\pub extern "a" const a: i32 = b; +// \\extern "a" var a: i32 = b; +// \\pub extern "a" var a: i32 = b; +// \\ +// ); +//} +// +//test "zig fmt: extern declaration" { +// try testCanonical( +// \\extern var foo: c_int; +// \\ +// ); +//} +// +//test "zig fmt: alignment" { +// try testCanonical( +// \\var foo: c_int align(1); +// \\ +// ); +//} +// +//test "zig fmt: C main" { +// try testCanonical( +// \\fn main(argc: c_int, argv: **u8) c_int { +// \\ const a = b; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: return" { +// try testCanonical( +// \\fn foo(argc: c_int, argv: **u8) c_int { +// \\ return 0; +// \\} +// \\ +// \\fn bar() void { +// \\ return; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: pointer attributes" { +// try testCanonical( +// \\extern fn f1(s: *align(*u8) u8) c_int; +// \\extern fn f2(s: **align(1) *const *volatile u8) c_int; +// \\extern fn f3(s: *align(1) const *align(1) volatile *const volatile u8) c_int; +// \\extern fn f4(s: *align(1) const volatile u8) c_int; +// \\extern fn f5(s: [*:0]align(1) const volatile u8) c_int; +// \\ +// ); +//} +// +//test "zig fmt: slice attributes" { +// try testCanonical( +// \\extern fn f1(s: *align(*u8) u8) c_int; +// \\extern fn f2(s: **align(1) *const *volatile u8) c_int; +// \\extern fn f3(s: *align(1) const *align(1) volatile *const volatile u8) c_int; +// \\extern fn f4(s: *align(1) const volatile u8) c_int; +// \\extern fn f5(s: [*:0]align(1) const volatile u8) c_int; +// \\ +// ); +//} +// +//test "zig fmt: test declaration" { +// try testCanonical( +// \\test "test name" { +// \\ const a = 1; +// \\ var b = 1; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: infix operators" { +// try testCanonical( +// \\test "infix operators" { +// \\ var i = undefined; +// \\ i = 2; +// \\ i *= 2; +// \\ i |= 2; +// \\ i ^= 2; +// \\ i <<= 2; +// \\ i >>= 2; +// \\ i &= 2; +// \\ i *= 2; +// \\ i *%= 2; +// \\ i -= 2; +// \\ i -%= 2; +// \\ i += 2; +// \\ i +%= 2; +// \\ i /= 2; +// \\ i %= 2; +// \\ _ = i == i; +// \\ _ = i != i; +// \\ _ = i != i; +// \\ _ = i.i; +// \\ _ = i || i; +// \\ _ = i!i; +// \\ _ = i ** i; +// \\ _ = i ++ i; +// \\ _ = i orelse i; +// \\ _ = i % i; +// \\ _ = i / i; +// \\ _ = i *% i; +// \\ _ = i * i; +// \\ _ = i -% i; +// \\ _ = i - i; +// \\ _ = i +% i; +// \\ _ = i + i; +// \\ _ = i << i; +// \\ _ = i >> i; +// \\ _ = i & i; +// \\ _ = i ^ i; +// \\ _ = i | i; +// \\ _ = i >= i; +// \\ _ = i <= i; +// \\ _ = i > i; +// \\ _ = i < i; +// \\ _ = i and i; +// \\ _ = i or i; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: precedence" { +// try testCanonical( +// \\test "precedence" { +// \\ a!b(); +// \\ (a!b)(); +// \\ !a!b; +// \\ !(a!b); +// \\ !a{}; +// \\ !(a{}); +// \\ a + b{}; +// \\ (a + b){}; +// \\ a << b + c; +// \\ (a << b) + c; +// \\ a & b << c; +// \\ (a & b) << c; +// \\ a ^ b & c; +// \\ (a ^ b) & c; +// \\ a | b ^ c; +// \\ (a | b) ^ c; +// \\ a == b | c; +// \\ (a == b) | c; +// \\ a and b == c; +// \\ (a and b) == c; +// \\ a or b and c; +// \\ (a or b) and c; +// \\ (a or b) and c; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: prefix operators" { +// try testCanonical( +// \\test "prefix operators" { +// \\ try return --%~!&0; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: call expression" { +// try testCanonical( +// \\test "test calls" { +// \\ a(); +// \\ a(1); +// \\ a(1, 2); +// \\ a(1, 2) + a(1, 2); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: anytype type" { +// try testCanonical( +// \\fn print(args: anytype) anytype {} +// \\ +// ); +//} +// +//test "zig fmt: functions" { +// try testCanonical( +// \\extern fn puts(s: *const u8) c_int; +// \\extern "c" fn puts(s: *const u8) c_int; +// \\export fn puts(s: *const u8) c_int; +// \\inline fn puts(s: *const u8) c_int; +// \\noinline fn puts(s: *const u8) c_int; +// \\pub extern fn puts(s: *const u8) c_int; +// \\pub extern "c" fn puts(s: *const u8) c_int; +// \\pub export fn puts(s: *const u8) c_int; +// \\pub inline fn puts(s: *const u8) c_int; +// \\pub noinline fn puts(s: *const u8) c_int; +// \\pub extern fn puts(s: *const u8) align(2 + 2) c_int; +// \\pub extern "c" fn puts(s: *const u8) align(2 + 2) c_int; +// \\pub export fn puts(s: *const u8) align(2 + 2) c_int; +// \\pub inline fn puts(s: *const u8) align(2 + 2) c_int; +// \\pub noinline fn puts(s: *const u8) align(2 + 2) c_int; +// \\ +// ); +//} +// +//test "zig fmt: multiline string" { +// try testCanonical( +// \\test "" { +// \\ const s1 = +// \\ \\one +// \\ \\two) +// \\ \\three +// \\ ; +// \\ const s3 = // hi +// \\ \\one +// \\ \\two) +// \\ \\three +// \\ ; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: values" { +// try testCanonical( +// \\test "values" { +// \\ 1; +// \\ 1.0; +// \\ "string"; +// \\ 'c'; +// \\ true; +// \\ false; +// \\ null; +// \\ undefined; +// \\ anyerror; +// \\ this; +// \\ unreachable; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: indexing" { +// try testCanonical( +// \\test "test index" { +// \\ a[0]; +// \\ a[0 + 5]; +// \\ a[0..]; +// \\ a[0..5]; +// \\ a[a[0]]; +// \\ a[a[0..]]; +// \\ a[a[0..5]]; +// \\ a[a[0]..]; +// \\ a[a[0..5]..]; +// \\ a[a[0]..a[0]]; +// \\ a[a[0..5]..a[0]]; +// \\ a[a[0..5]..a[0..5]]; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: struct declaration" { +// try testCanonical( +// \\const S = struct { +// \\ const Self = @This(); +// \\ f1: u8, +// \\ f3: u8, +// \\ +// \\ f2: u8, +// \\ +// \\ fn method(self: *Self) Self { +// \\ return self.*; +// \\ } +// \\}; +// \\ +// \\const Ps = packed struct { +// \\ a: u8, +// \\ b: u8, +// \\ +// \\ c: u8, +// \\}; +// \\ +// \\const Es = extern struct { +// \\ a: u8, +// \\ b: u8, +// \\ +// \\ c: u8, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: enum declaration" { +// try testCanonical( +// \\const E = enum { +// \\ Ok, +// \\ SomethingElse = 0, +// \\}; +// \\ +// \\const E2 = enum(u8) { +// \\ Ok, +// \\ SomethingElse = 255, +// \\ SomethingThird, +// \\}; +// \\ +// \\const Ee = extern enum { +// \\ Ok, +// \\ SomethingElse, +// \\ SomethingThird, +// \\}; +// \\ +// \\const Ep = packed enum { +// \\ Ok, +// \\ SomethingElse, +// \\ SomethingThird, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: union declaration" { +// try testCanonical( +// \\const U = union { +// \\ Int: u8, +// \\ Float: f32, +// \\ None, +// \\ Bool: bool, +// \\}; +// \\ +// \\const Ue = union(enum) { +// \\ Int: u8, +// \\ Float: f32, +// \\ None, +// \\ Bool: bool, +// \\}; +// \\ +// \\const E = enum { +// \\ Int, +// \\ Float, +// \\ None, +// \\ Bool, +// \\}; +// \\ +// \\const Ue2 = union(E) { +// \\ Int: u8, +// \\ Float: f32, +// \\ None, +// \\ Bool: bool, +// \\}; +// \\ +// \\const Eu = extern union { +// \\ Int: u8, +// \\ Float: f32, +// \\ None, +// \\ Bool: bool, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: arrays" { +// try testCanonical( +// \\test "test array" { +// \\ const a: [2]u8 = [2]u8{ +// \\ 1, +// \\ 2, +// \\ }; +// \\ const a: [2]u8 = []u8{ +// \\ 1, +// \\ 2, +// \\ }; +// \\ const a: [0]u8 = []u8{}; +// \\ const x: [4:0]u8 = undefined; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: container initializers" { +// try testCanonical( +// \\const a0 = []u8{}; +// \\const a1 = []u8{1}; +// \\const a2 = []u8{ +// \\ 1, +// \\ 2, +// \\ 3, +// \\ 4, +// \\}; +// \\const s0 = S{}; +// \\const s1 = S{ .a = 1 }; +// \\const s2 = S{ +// \\ .a = 1, +// \\ .b = 2, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: catch" { +// try testCanonical( +// \\test "catch" { +// \\ const a: anyerror!u8 = 0; +// \\ _ = a catch return; +// \\ _ = a catch |err| return; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: blocks" { +// try testCanonical( +// \\test "blocks" { +// \\ { +// \\ const a = 0; +// \\ const b = 0; +// \\ } +// \\ +// \\ blk: { +// \\ const a = 0; +// \\ const b = 0; +// \\ } +// \\ +// \\ const r = blk: { +// \\ const a = 0; +// \\ const b = 0; +// \\ }; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: switch" { +// try testCanonical( +// \\test "switch" { +// \\ switch (0) { +// \\ 0 => {}, +// \\ 1 => unreachable, +// \\ 2, 3 => {}, +// \\ 4...7 => {}, +// \\ 1 + 4 * 3 + 22 => {}, +// \\ else => { +// \\ const a = 1; +// \\ const b = a; +// \\ }, +// \\ } +// \\ +// \\ const res = switch (0) { +// \\ 0 => 0, +// \\ 1 => 2, +// \\ 1 => a = 4, +// \\ else => 4, +// \\ }; +// \\ +// \\ const Union = union(enum) { +// \\ Int: i64, +// \\ Float: f64, +// \\ }; +// \\ +// \\ switch (u) { +// \\ Union.Int => |int| {}, +// \\ Union.Float => |*float| unreachable, +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: while" { +// try testCanonical( +// \\test "while" { +// \\ while (10 < 1) unreachable; +// \\ +// \\ while (10 < 1) unreachable else unreachable; +// \\ +// \\ while (10 < 1) { +// \\ unreachable; +// \\ } +// \\ +// \\ while (10 < 1) +// \\ unreachable; +// \\ +// \\ var i: usize = 0; +// \\ while (i < 10) : (i += 1) { +// \\ continue; +// \\ } +// \\ +// \\ i = 0; +// \\ while (i < 10) : (i += 1) +// \\ continue; +// \\ +// \\ i = 0; +// \\ var j: usize = 0; +// \\ while (i < 10) : ({ +// \\ i += 1; +// \\ j += 1; +// \\ }) { +// \\ continue; +// \\ } +// \\ +// \\ var a: ?u8 = 2; +// \\ while (a) |v| : (a = null) { +// \\ continue; +// \\ } +// \\ +// \\ while (a) |v| : (a = null) +// \\ unreachable; +// \\ +// \\ label: while (10 < 0) { +// \\ unreachable; +// \\ } +// \\ +// \\ const res = while (0 < 10) { +// \\ break 7; +// \\ } else { +// \\ unreachable; +// \\ }; +// \\ +// \\ const res = while (0 < 10) +// \\ break 7 +// \\ else +// \\ unreachable; +// \\ +// \\ var a: anyerror!u8 = 0; +// \\ while (a) |v| { +// \\ a = error.Err; +// \\ } else |err| { +// \\ i = 1; +// \\ } +// \\ +// \\ comptime var k: usize = 0; +// \\ inline while (i < 10) : (i += 1) +// \\ j += 2; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: for" { +// try testCanonical( +// \\test "for" { +// \\ for (a) |v| { +// \\ continue; +// \\ } +// \\ +// \\ for (a) |v| continue; +// \\ +// \\ for (a) |v| continue else return; +// \\ +// \\ for (a) |v| { +// \\ continue; +// \\ } else return; +// \\ +// \\ for (a) |v| continue else { +// \\ return; +// \\ } +// \\ +// \\ for (a) |v| +// \\ continue +// \\ else +// \\ return; +// \\ +// \\ for (a) |v| +// \\ continue; +// \\ +// \\ for (a) |*v| +// \\ continue; +// \\ +// \\ for (a) |v, i| { +// \\ continue; +// \\ } +// \\ +// \\ for (a) |v, i| +// \\ continue; +// \\ +// \\ for (a) |b| switch (b) { +// \\ c => {}, +// \\ d => {}, +// \\ }; +// \\ +// \\ for (a) |b| +// \\ switch (b) { +// \\ c => {}, +// \\ d => {}, +// \\ }; +// \\ +// \\ const res = for (a) |v, i| { +// \\ break v; +// \\ } else { +// \\ unreachable; +// \\ }; +// \\ +// \\ var num: usize = 0; +// \\ inline for (a) |v, i| { +// \\ num += v; +// \\ num += i; +// \\ } +// \\} +// \\ +// ); +// +// try testTransform( +// \\test "fix for" { +// \\ for (a) |x| +// \\ f(x) else continue; +// \\} +// \\ +// , +// \\test "fix for" { +// \\ for (a) |x| +// \\ f(x) +// \\ else continue; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: if" { +// try testCanonical( +// \\test "if" { +// \\ if (10 < 0) { +// \\ unreachable; +// \\ } +// \\ +// \\ if (10 < 0) unreachable; +// \\ +// \\ if (10 < 0) { +// \\ unreachable; +// \\ } else { +// \\ const a = 20; +// \\ } +// \\ +// \\ if (10 < 0) { +// \\ unreachable; +// \\ } else if (5 < 0) { +// \\ unreachable; +// \\ } else { +// \\ const a = 20; +// \\ } +// \\ +// \\ const is_world_broken = if (10 < 0) true else false; +// \\ const some_number = 1 + if (10 < 0) 2 else 3; +// \\ +// \\ const a: ?u8 = 10; +// \\ const b: ?u8 = null; +// \\ if (a) |v| { +// \\ const some = v; +// \\ } else if (b) |*v| { +// \\ unreachable; +// \\ } else { +// \\ const some = 10; +// \\ } +// \\ +// \\ const non_null_a = if (a) |v| v else 0; +// \\ +// \\ const a_err: anyerror!u8 = 0; +// \\ if (a_err) |v| { +// \\ const p = v; +// \\ } else |err| { +// \\ unreachable; +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: defer" { +// try testCanonical( +// \\test "defer" { +// \\ var i: usize = 0; +// \\ defer i = 1; +// \\ defer { +// \\ i += 2; +// \\ i *= i; +// \\ } +// \\ +// \\ errdefer i += 3; +// \\ errdefer { +// \\ i += 2; +// \\ i /= i; +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: comptime" { +// try testCanonical( +// \\fn a() u8 { +// \\ return 5; +// \\} +// \\ +// \\fn b(comptime i: u8) u8 { +// \\ return i; +// \\} +// \\ +// \\const av = comptime a(); +// \\const av2 = comptime blk: { +// \\ var res = a(); +// \\ res *= b(2); +// \\ break :blk res; +// \\}; +// \\ +// \\comptime { +// \\ _ = a(); +// \\} +// \\ +// \\test "comptime" { +// \\ const av3 = comptime a(); +// \\ const av4 = comptime blk: { +// \\ var res = a(); +// \\ res *= a(); +// \\ break :blk res; +// \\ }; +// \\ +// \\ comptime var i = 0; +// \\ comptime { +// \\ i = a(); +// \\ i += b(i); +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: fn type" { +// try testCanonical( +// \\fn a(i: u8) u8 { +// \\ return i + 1; +// \\} +// \\ +// \\const a: fn (u8) u8 = undefined; +// \\const b: fn (u8) callconv(.Naked) u8 = undefined; +// \\const ap: fn (u8) u8 = a; +// \\ +// ); +//} +// +//test "zig fmt: inline asm" { +// try testCanonical( +// \\pub fn syscall1(number: usize, arg1: usize) usize { +// \\ return asm volatile ("syscall" +// \\ : [ret] "={rax}" (-> usize) +// \\ : [number] "{rax}" (number), +// \\ [arg1] "{rdi}" (arg1) +// \\ : "rcx", "r11" +// \\ ); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: async functions" { +// try testCanonical( +// \\fn simpleAsyncFn() void { +// \\ const a = async a.b(); +// \\ x += 1; +// \\ suspend; +// \\ x += 1; +// \\ suspend; +// \\ const p: anyframe->void = async simpleAsyncFn() catch unreachable; +// \\ await p; +// \\} +// \\ +// \\test "suspend, resume, await" { +// \\ const p: anyframe = async testAsyncSeq(); +// \\ resume p; +// \\ await p; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: nosuspend" { +// try testCanonical( +// \\const a = nosuspend foo(); +// \\ +// ); +//} +// +//test "zig fmt: Block after if" { +// try testCanonical( +// \\test "Block after if" { +// \\ if (true) { +// \\ const a = 0; +// \\ } +// \\ +// \\ { +// \\ const a = 0; +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: use" { +// try testCanonical( +// \\usingnamespace @import("std"); +// \\pub usingnamespace @import("std"); +// \\ +// ); +//} +// +//test "zig fmt: string identifier" { +// try testCanonical( +// \\const @"a b" = @"c d".@"e f"; +// \\fn @"g h"() void {} +// \\ +// ); +//} +// +//test "zig fmt: error return" { +// try testCanonical( +// \\fn err() anyerror { +// \\ call(); +// \\ return error.InvalidArgs; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: comptime block in container" { +// try testCanonical( +// \\pub fn container() type { +// \\ return struct { +// \\ comptime { +// \\ if (false) { +// \\ unreachable; +// \\ } +// \\ } +// \\ }; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: inline asm parameter alignment" { +// try testCanonical( +// \\pub fn main() void { +// \\ asm volatile ( +// \\ \\ foo +// \\ \\ bar +// \\ ); +// \\ asm volatile ( +// \\ \\ foo +// \\ \\ bar +// \\ : [_] "" (-> usize), +// \\ [_] "" (-> usize) +// \\ ); +// \\ asm volatile ( +// \\ \\ foo +// \\ \\ bar +// \\ : +// \\ : [_] "" (0), +// \\ [_] "" (0) +// \\ ); +// \\ asm volatile ( +// \\ \\ foo +// \\ \\ bar +// \\ : +// \\ : +// \\ : "", "" +// \\ ); +// \\ asm volatile ( +// \\ \\ foo +// \\ \\ bar +// \\ : [_] "" (-> usize), +// \\ [_] "" (-> usize) +// \\ : [_] "" (0), +// \\ [_] "" (0) +// \\ : "", "" +// \\ ); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: multiline string in array" { +// try testCanonical( +// \\const Foo = [][]const u8{ +// \\ \\aaa +// \\ , +// \\ \\bbb +// \\}; +// \\ +// \\fn bar() void { +// \\ const Foo = [][]const u8{ +// \\ \\aaa +// \\ , +// \\ \\bbb +// \\ }; +// \\ const Bar = [][]const u8{ // comment here +// \\ \\aaa +// \\ \\ +// \\ , // and another comment can go here +// \\ \\bbb +// \\ }; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: if type expr" { +// try testCanonical( +// \\const mycond = true; +// \\pub fn foo() if (mycond) i32 else void { +// \\ if (mycond) { +// \\ return 42; +// \\ } +// \\} +// \\ +// ); +//} +//test "zig fmt: file ends with struct field" { +// try testCanonical( +// \\a: bool +// \\ +// ); +//} +// +//test "zig fmt: comment after empty comment" { +// try testTransform( +// \\const x = true; // +// \\// +// \\// +// \\//a +// \\ +// , +// \\const x = true; +// \\//a +// \\ +// ); +//} +// +//test "zig fmt: line comment in array" { +// try testTransform( +// \\test "a" { +// \\ var arr = [_]u32{ +// \\ 0 +// \\ // 1, +// \\ // 2, +// \\ }; +// \\} +// \\ +// , +// \\test "a" { +// \\ var arr = [_]u32{ +// \\ 0, // 1, +// \\ // 2, +// \\ }; +// \\} +// \\ +// ); +// try testCanonical( +// \\test "a" { +// \\ var arr = [_]u32{ +// \\ 0, +// \\ // 1, +// \\ // 2, +// \\ }; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: comment after params" { +// try testTransform( +// \\fn a( +// \\ b: u32 +// \\ // c: u32, +// \\ // d: u32, +// \\) void {} +// \\ +// , +// \\fn a( +// \\ b: u32, // c: u32, +// \\ // d: u32, +// \\) void {} +// \\ +// ); +// try testCanonical( +// \\fn a( +// \\ b: u32, +// \\ // c: u32, +// \\ // d: u32, +// \\) void {} +// \\ +// ); +//} +// +//test "zig fmt: comment in array initializer/access" { +// try testCanonical( +// \\test "a" { +// \\ var a = x{ //aa +// \\ //bb +// \\ }; +// \\ var a = []x{ //aa +// \\ //bb +// \\ }; +// \\ var b = [ //aa +// \\ _ +// \\ ]x{ //aa +// \\ //bb +// \\ 9, +// \\ }; +// \\ var c = b[ //aa +// \\ 0 +// \\ ]; +// \\ var d = [_ +// \\ //aa +// \\ ]x{ //aa +// \\ //bb +// \\ 9, +// \\ }; +// \\ var e = d[0 +// \\ //aa +// \\ ]; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: comments at several places in struct init" { +// try testTransform( +// \\var bar = Bar{ +// \\ .x = 10, // test +// \\ .y = "test" +// \\ // test +// \\}; +// \\ +// , +// \\var bar = Bar{ +// \\ .x = 10, // test +// \\ .y = "test", // test +// \\}; +// \\ +// ); +// +// try testCanonical( +// \\var bar = Bar{ // test +// \\ .x = 10, // test +// \\ .y = "test", +// \\ // test +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: top level doc comments" { +// try testCanonical( +// \\//! tld 1 +// \\//! tld 2 +// \\//! tld 3 +// \\ +// \\// comment +// \\ +// \\/// A doc +// \\const A = struct { +// \\ //! A tld 1 +// \\ //! A tld 2 +// \\ //! A tld 3 +// \\}; +// \\ +// \\/// B doc +// \\const B = struct { +// \\ //! B tld 1 +// \\ //! B tld 2 +// \\ //! B tld 3 +// \\ +// \\ /// b doc +// \\ b: u32, +// \\}; +// \\ +// \\/// C doc +// \\const C = struct { +// \\ //! C tld 1 +// \\ //! C tld 2 +// \\ //! C tld 3 +// \\ +// \\ /// c1 doc +// \\ c1: u32, +// \\ +// \\ //! C tld 4 +// \\ //! C tld 5 +// \\ //! C tld 6 +// \\ +// \\ /// c2 doc +// \\ c2: u32, +// \\}; +// \\ +// ); +// try testCanonical( +// \\//! Top-level documentation. +// \\ +// \\/// This is A +// \\pub const A = usize; +// \\ +// ); +// try testCanonical( +// \\//! Nothing here +// \\ +// ); +//} +// +//test "zig fmt: extern without container keyword returns error" { +// try testError( +// \\const container = extern {}; +// \\ +// , &[_]Error{ +// .ExpectedExpr, +// .ExpectedVarDeclOrFn, +// }); +//} +// +//test "zig fmt: integer literals with underscore separators" { +// try testTransform( +// \\const +// \\ x = +// \\ 1_234_567 +// \\ +(0b0_1-0o7_0+0xff_FF ) + 0_0; +// , +// \\const x = +// \\ 1_234_567 + (0b0_1 - 0o7_0 + 0xff_FF) + 0_0; +// \\ +// ); +//} +// +//test "zig fmt: hex literals with underscore separators" { +// try testTransform( +// \\pub fn orMask(a: [ 1_000 ]u64, b: [ 1_000] u64) [1_000]u64 { +// \\ var c: [1_000]u64 = [1]u64{ 0xFFFF_FFFF_FFFF_FFFF}**1_000; +// \\ for (c [ 0_0 .. ]) |_, i| { +// \\ c[i] = (a[i] | b[i]) & 0xCCAA_CCAA_CCAA_CCAA; +// \\ } +// \\ return c; +// \\} +// \\ +// \\ +// , +// \\pub fn orMask(a: [1_000]u64, b: [1_000]u64) [1_000]u64 { +// \\ var c: [1_000]u64 = [1]u64{0xFFFF_FFFF_FFFF_FFFF} ** 1_000; +// \\ for (c[0_0..]) |_, i| { +// \\ c[i] = (a[i] | b[i]) & 0xCCAA_CCAA_CCAA_CCAA; +// \\ } +// \\ return c; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: decimal float literals with underscore separators" { +// try testTransform( +// \\pub fn main() void { +// \\ const a:f64=(10.0e-0+(10.e+0))+10_00.00_00e-2+00_00.00_10e+4; +// \\ const b:f64=010.0--0_10.+0_1_0.0_0+1e2; +// \\ std.debug.warn("a: {}, b: {} -> a+b: {}\n", .{ a, b, a + b }); +// \\} +// , +// \\pub fn main() void { +// \\ const a: f64 = (10.0e-0 + (10.e+0)) + 10_00.00_00e-2 + 00_00.00_10e+4; +// \\ const b: f64 = 010.0 - -0_10. + 0_1_0.0_0 + 1e2; +// \\ std.debug.warn("a: {}, b: {} -> a+b: {}\n", .{ a, b, a + b }); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: hexadeciaml float literals with underscore separators" { +// try testTransform( +// \\pub fn main() void { +// \\ const a: f64 = (0x10.0p-0+(0x10.p+0))+0x10_00.00_00p-8+0x00_00.00_10p+16; +// \\ const b: f64 = 0x0010.0--0x00_10.+0x10.00+0x1p4; +// \\ std.debug.warn("a: {}, b: {} -> a+b: {}\n", .{ a, b, a + b }); +// \\} +// , +// \\pub fn main() void { +// \\ const a: f64 = (0x10.0p-0 + (0x10.p+0)) + 0x10_00.00_00p-8 + 0x00_00.00_10p+16; +// \\ const b: f64 = 0x0010.0 - -0x00_10. + 0x10.00 + 0x1p4; +// \\ std.debug.warn("a: {}, b: {} -> a+b: {}\n", .{ a, b, a + b }); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: convert async fn into callconv(.Async)" { +// try testTransform( +// \\async fn foo() void {} +// , +// \\fn foo() callconv(.Async) void {} +// \\ +// ); +//} +// +//test "zig fmt: convert extern fn proto into callconv(.C)" { +// try testTransform( +// \\extern fn foo0() void {} +// \\const foo1 = extern fn () void; +// , +// \\extern fn foo0() void {} +// \\const foo1 = fn () callconv(.C) void; +// \\ +// ); +//} +// +//test "zig fmt: C var args" { +// try testCanonical( +// \\pub extern "c" fn printf(format: [*:0]const u8, ...) c_int; +// \\ +// ); +//} +// +//test "zig fmt: Only indent multiline string literals in function calls" { +// try testCanonical( +// \\test "zig fmt:" { +// \\ try testTransform( +// \\ \\const X = struct { +// \\ \\ foo: i32, bar: i8 }; +// \\ , +// \\ \\const X = struct { +// \\ \\ foo: i32, bar: i8 +// \\ \\}; +// \\ \\ +// \\ ); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: Don't add extra newline after if" { +// try testCanonical( +// \\pub fn atomicSymLink(allocator: *Allocator, existing_path: []const u8, new_path: []const u8) !void { +// \\ if (cwd().symLink(existing_path, new_path, .{})) { +// \\ return; +// \\ } +// \\} +// \\ +// ); +//} +// +//test "zig fmt: comments in ternary ifs" { +// try testCanonical( +// \\const x = if (true) { +// \\ 1; +// \\} else if (false) +// \\ // Comment +// \\ 0; +// \\const y = if (true) +// \\ // Comment +// \\ 1 +// \\else +// \\ 0; +// \\ +// \\pub extern "c" fn printf(format: [*:0]const u8, ...) c_int; +// \\ +// ); +//} +// +//test "zig fmt: test comments in field access chain" { +// try testCanonical( +// \\pub const str = struct { +// \\ pub const Thing = more.more // +// \\ .more() // +// \\ .more().more() // +// \\ .more() // +// \\ // .more() // +// \\ .more() // +// \\ .more(); +// \\ data: Data, +// \\}; +// \\ +// \\pub const str = struct { +// \\ pub const Thing = more.more // +// \\ .more() // +// \\ // .more() // +// \\ // .more() // +// \\ // .more() // +// \\ .more() // +// \\ .more(); +// \\ data: Data, +// \\}; +// \\ +// \\pub const str = struct { +// \\ pub const Thing = more // +// \\ .more // +// \\ .more() // +// \\ .more(); +// \\ data: Data, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: Indent comma correctly after multiline string literals in arg list (trailing comma)" { +// try testCanonical( +// \\fn foo() void { +// \\ z.display_message_dialog( +// \\ *const [323:0]u8, +// \\ \\Message Text +// \\ \\------------ +// \\ \\xxxxxxxxxxxx +// \\ \\xxxxxxxxxxxx +// \\ , +// \\ g.GtkMessageType.GTK_MESSAGE_WARNING, +// \\ null, +// \\ ); +// \\ +// \\ z.display_message_dialog(*const [323:0]u8, +// \\ \\Message Text +// \\ \\------------ +// \\ \\xxxxxxxxxxxx +// \\ \\xxxxxxxxxxxx +// \\ , g.GtkMessageType.GTK_MESSAGE_WARNING, null); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: Control flow statement as body of blockless if" { +// try testCanonical( +// \\pub fn main() void { +// \\ const zoom_node = if (focused_node == layout_first) +// \\ if (it.next()) { +// \\ if (!node.view.pending.float and !node.view.pending.fullscreen) break node; +// \\ } else null +// \\ else +// \\ focused_node; +// \\ +// \\ const zoom_node = if (focused_node == layout_first) while (it.next()) |node| { +// \\ if (!node.view.pending.float and !node.view.pending.fullscreen) break node; +// \\ } else null else +// \\ focused_node; +// \\ +// \\ const zoom_node = if (focused_node == layout_first) +// \\ if (it.next()) { +// \\ if (!node.view.pending.float and !node.view.pending.fullscreen) break node; +// \\ } else null; +// \\ +// \\ const zoom_node = if (focused_node == layout_first) while (it.next()) |node| { +// \\ if (!node.view.pending.float and !node.view.pending.fullscreen) break node; +// \\ }; +// \\ +// \\ const zoom_node = if (focused_node == layout_first) for (nodes) |node| { +// \\ break node; +// \\ }; +// \\ +// \\ const zoom_node = if (focused_node == layout_first) switch (nodes) { +// \\ 0 => 0, +// \\ } else +// \\ focused_node; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: " { +// try testCanonical( +// \\pub fn sendViewTags(self: Self) void { +// \\ var it = ViewStack(View).iterator(self.output.views.first, std.math.maxInt(u32)); +// \\ while (it.next()) |node| +// \\ view_tags.append(node.view.current_tags) catch { +// \\ c.wl_resource_post_no_memory(self.wl_resource); +// \\ log.crit(.river_status, "out of memory", .{}); +// \\ return; +// \\ }; +// \\} +// \\ +// ); +//} +// +//test "zig fmt: allow trailing line comments to do manual array formatting" { +// try testCanonical( +// \\fn foo() void { +// \\ self.code.appendSliceAssumeCapacity(&[_]u8{ +// \\ 0x55, // push rbp +// \\ 0x48, 0x89, 0xe5, // mov rbp, rsp +// \\ 0x48, 0x81, 0xec, // sub rsp, imm32 (with reloc) +// \\ }); +// \\ +// \\ di_buf.appendAssumeCapacity(&[_]u8{ +// \\ 1, DW.TAG_compile_unit, DW.CHILDREN_no, // header +// \\ DW.AT_stmt_list, DW_FORM_data4, // form value pairs +// \\ DW.AT_low_pc, DW_FORM_addr, +// \\ DW.AT_high_pc, DW_FORM_addr, +// \\ DW.AT_name, DW_FORM_strp, +// \\ DW.AT_comp_dir, DW_FORM_strp, +// \\ DW.AT_producer, DW_FORM_strp, +// \\ DW.AT_language, DW_FORM_data2, +// \\ 0, 0, // sentinel +// \\ }); +// \\ +// \\ self.code.appendSliceAssumeCapacity(&[_]u8{ +// \\ 0x55, // push rbp +// \\ 0x48, 0x89, 0xe5, // mov rbp, rsp +// \\ // How do we handle this? +// \\ //0x48, 0x81, 0xec, // sub rsp, imm32 (with reloc) +// \\ // Here's a blank line, should that be allowed? +// \\ +// \\ 0x48, 0x89, 0xe5, +// \\ 0x33, 0x45, +// \\ // Now the comment breaks a single line -- how do we handle this? +// \\ 0x88, +// \\ }); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: multiline string literals should play nice with array initializers" { +// try testCanonical( +// \\fn main() void { +// \\ var a = .{.{.{.{.{.{.{.{ +// \\ 0, +// \\ }}}}}}}}; +// \\ myFunc(.{ +// \\ "aaaaaaa", "bbbbbb", "ccccc", +// \\ "dddd", ("eee"), ("fff"), +// \\ ("gggg"), +// \\ // Line comment +// \\ \\Multiline String Literals can be quite long +// \\ , +// \\ \\Multiline String Literals can be quite long +// \\ \\Multiline String Literals can be quite long +// \\ , +// \\ \\Multiline String Literals can be quite long +// \\ \\Multiline String Literals can be quite long +// \\ \\Multiline String Literals can be quite long +// \\ \\Multiline String Literals can be quite long +// \\ , +// \\ ( +// \\ \\Multiline String Literals can be quite long +// \\ ), +// \\ .{ +// \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx +// \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx +// \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx +// \\ }, +// \\ .{( +// \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx +// \\ )}, +// \\ .{ +// \\ "xxxxxxx", "xxx", +// \\ ( +// \\ \\ xxx +// \\ ), +// \\ "xxx", "xxx", +// \\ }, +// \\ .{ "xxxxxxx", "xxx", "xxx", "xxx" }, .{ "xxxxxxx", "xxx", "xxx", "xxx" }, +// \\ "aaaaaaa", "bbbbbb", "ccccc", // - +// \\ "dddd", ("eee"), ("fff"), +// \\ .{ +// \\ "xxx", "xxx", +// \\ ( +// \\ \\ xxx +// \\ ), +// \\ "xxxxxxxxxxxxxx", "xxx", +// \\ }, +// \\ .{ +// \\ ( +// \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx +// \\ ), +// \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx +// \\ }, +// \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx +// \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx +// \\ }); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: use of comments and Multiline string literals may force the parameters over multiple lines" { +// try testCanonical( +// \\pub fn makeMemUndefined(qzz: []u8) i1 { +// \\ cases.add( // fixed bug #2032 +// \\ "compile diagnostic string for top level decl type", +// \\ \\export fn entry() void { +// \\ \\ var foo: u32 = @This(){}; +// \\ \\} +// \\ , &[_][]const u8{ +// \\ "tmp.zig:2:27: error: type 'u32' does not support array initialization", +// \\ }); +// \\ @compileError( +// \\ \\ unknown-length pointers and C pointers cannot be hashed deeply. +// \\ \\ Consider providing your own hash function. +// \\ \\ unknown-length pointers and C pointers cannot be hashed deeply. +// \\ \\ Consider providing your own hash function. +// \\ ); +// \\ return @intCast(i1, doMemCheckClientRequestExpr(0, // default return +// \\ .MakeMemUndefined, @ptrToInt(qzz.ptr), qzz.len, 0, 0, 0)); +// \\} +// \\ +// \\// This looks like garbage don't do this +// \\const rparen = tree.prevToken( +// \\// the first token for the annotation expressions is the left +// \\// parenthesis, hence the need for two prevToken +// \\ if (fn_proto.getAlignExpr()) |align_expr| +// \\ tree.prevToken(tree.prevToken(align_expr.firstToken())) +// \\else if (fn_proto.getSectionExpr()) |section_expr| +// \\ tree.prevToken(tree.prevToken(section_expr.firstToken())) +// \\else if (fn_proto.getCallconvExpr()) |callconv_expr| +// \\ tree.prevToken(tree.prevToken(callconv_expr.firstToken())) +// \\else switch (fn_proto.return_type) { +// \\ .Explicit => |node| node.firstToken(), +// \\ .InferErrorSet => |node| tree.prevToken(node.firstToken()), +// \\ .Invalid => unreachable, +// \\}); +// \\ +// ); +//} +// +//test "zig fmt: single argument trailing commas in @builtins()" { +// try testCanonical( +// \\pub fn foo(qzz: []u8) i1 { +// \\ @panic( +// \\ foo, +// \\ ); +// \\ panic( +// \\ foo, +// \\ ); +// \\ @panic( +// \\ foo, +// \\ bar, +// \\ ); +// \\} +// \\ +// ); +//} +// +//test "zig fmt: trailing comma should force multiline 1 column" { +// try testTransform( +// \\pub const UUID_NULL: uuid_t = [16]u8{0,0,0,0,}; +// \\ +// , +// \\pub const UUID_NULL: uuid_t = [16]u8{ +// \\ 0, +// \\ 0, +// \\ 0, +// \\ 0, +// \\}; +// \\ +// ); +//} +// +//test "zig fmt: function params should align nicely" { +// try testCanonical( +// \\pub fn foo() void { +// \\ cases.addRuntimeSafety("slicing operator with sentinel", +// \\ \\const std = @import("std"); +// \\ ++ check_panic_msg ++ +// \\ \\pub fn main() void { +// \\ \\ var buf = [4]u8{'a','b','c',0}; +// \\ \\ const slice = buf[0..:0]; +// \\ \\} +// \\ ); +// \\} +// \\ +// ); +//} const std = @import("std"); const mem = std.mem; @@ -3763,8 +3740,10 @@ fn testParse(source: []const u8, allocator: *mem.Allocator, anything_changed: *b errdefer buffer.deinit(); const writer = buffer.writer(); - anything_changed.* = try std.zig.render(allocator, writer, tree); - return buffer.toOwnedSlice(); + try std.zig.render(allocator, writer, tree); + const result = buffer.toOwnedSlice(); + anything_changed.* = !mem.eql(u8, result, source); + return result; } fn testTransform(source: []const u8, expected_source: []const u8) !void { const needed_alloc_count = x: { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index c8568301ea..17c59776b5 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -14,2167 +14,2072 @@ const indent_delta = 4; const asm_indent_delta = 2; pub const Error = error{ - /// Ran out of memory allocating call stack frames to complete rendering. + /// Ran out of memory allocating call stack frames to complete rendering, or + /// ran out of memory allocating space in the output buffer. OutOfMemory, }; -/// Returns whether anything changed -pub fn render(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree) (@TypeOf(stream).Error || Error)!bool { - // cannot render an invalid tree - std.debug.assert(tree.errors.len == 0); +const Writer = std.ArrayList(u8).Writer; +const Ais = std.io.AutoIndentingStream(Writer); - var change_detection_stream = std.io.changeDetectionStream(tree.source, stream); - var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, change_detection_stream.writer()); - - try renderRoot(allocator, &auto_indenting_stream, tree); - - return change_detection_stream.changeDetected(); +/// Returns whether anything changed. +/// `gpa` is used for allocating extra stack memory if needed, because +/// this function utilizes recursion. +pub fn render(gpa: *mem.Allocator, writer: Writer, tree: ast.Tree) Error!void { + assert(tree.errors.len == 0); // cannot render an invalid tree + var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, writer); + try renderRoot(&auto_indenting_stream, tree); } -fn renderRoot( - allocator: *mem.Allocator, - ais: anytype, - tree: *ast.Tree, -) (@TypeOf(ais.*).Error || Error)!void { - - // render all the line comments at the beginning of the file - for (tree.token_ids) |token_id, i| { - if (token_id != .LineComment) break; - const token_loc = tree.token_locs[i]; - try ais.writer().print("{s}\n", .{mem.trimRight(u8, tree.tokenSliceLoc(token_loc), " ")}); - const next_token = tree.token_locs[i + 1]; - const loc = tree.tokenLocationLoc(token_loc.end, next_token); - if (loc.line >= 2) { - try ais.insertNewline(); - } - } - - var decl_i: ast.NodeIndex = 0; - const root_decls = tree.root_node.decls(); - - if (root_decls.len == 0) return; +/// Assumes there are no tokens in between start and end. +fn renderComments(ais: *Ais, tree: ast.Tree, start: usize, end: usize, prefix: []const u8) Error!usize { + var index: usize = start; + var count: usize = 0; while (true) { - var decl = root_decls[decl_i]; - - // This loop does the following: - // - // - Iterates through line/doc comment tokens that precedes the current - // decl. - // - Figures out the first token index (`copy_start_token_index`) which - // hasn't been copied to the output stream yet. - // - Detects `zig fmt: (off|on)` in the line comment tokens, and - // determines whether the current decl should be reformatted or not. - // - var token_index = decl.firstToken(); - var fmt_active = true; - var found_fmt_directive = false; - - var copy_start_token_index = token_index; - - while (token_index != 0) { - token_index -= 1; - const token_id = tree.token_ids[token_index]; - switch (token_id) { - .LineComment => {}, - .DocComment => { - copy_start_token_index = token_index; - continue; - }, - else => break, - } - - const token_loc = tree.token_locs[token_index]; - if (mem.eql(u8, mem.trim(u8, tree.tokenSliceLoc(token_loc)[2..], " "), "zig fmt: off")) { - if (!found_fmt_directive) { - fmt_active = false; - found_fmt_directive = true; - } - } else if (mem.eql(u8, mem.trim(u8, tree.tokenSliceLoc(token_loc)[2..], " "), "zig fmt: on")) { - if (!found_fmt_directive) { - fmt_active = true; - found_fmt_directive = true; - } + // Scan forward to the next line comment, counting newlines. + const comment_start = mem.indexOf(u8, tree.source[index..end], "//") orelse return count; + const newline = mem.indexOfScalar(u8, tree.source[comment_start..end], '\n').?; + const untrimmed_comment = tree.source[comment_start..][0..newline]; + const trimmed_comment = mem.trimRight(u8, untrimmed_comment, " \r\t"); + if (count == 0) { + count += 1; + try ais.writer().writeAll(prefix); + } else { + // If another newline occurs between prev comment and this one + // we honor it, but not any additional ones. + if (mem.indexOfScalar(u8, tree.source[index..comment_start], '\n') != null) { + try ais.insertNewline(); } } - - if (!fmt_active) { - // Reformatting is disabled for the current decl and possibly some - // more decls that follow. - // Find the next `decl` for which reformatting is re-enabled. - token_index = decl.firstToken(); - - while (!fmt_active) { - decl_i += 1; - if (decl_i >= root_decls.len) { - // If there's no next reformatted `decl`, just copy the - // remaining input tokens and bail out. - const start = tree.token_locs[copy_start_token_index].start; - try copyFixingWhitespace(ais, tree.source[start..]); - return; - } - decl = root_decls[decl_i]; - var decl_first_token_index = decl.firstToken(); - - while (token_index < decl_first_token_index) : (token_index += 1) { - const token_id = tree.token_ids[token_index]; - switch (token_id) { - .LineComment => {}, - .Eof => unreachable, - else => continue, - } - const token_loc = tree.token_locs[token_index]; - if (mem.eql(u8, mem.trim(u8, tree.tokenSliceLoc(token_loc)[2..], " "), "zig fmt: on")) { - fmt_active = true; - } else if (mem.eql(u8, mem.trim(u8, tree.tokenSliceLoc(token_loc)[2..], " "), "zig fmt: off")) { - fmt_active = false; - } - } - } - - // Found the next `decl` for which reformatting is enabled. Copy - // the input tokens before the `decl` that haven't been copied yet. - var copy_end_token_index = decl.firstToken(); - token_index = copy_end_token_index; - while (token_index != 0) { - token_index -= 1; - const token_id = tree.token_ids[token_index]; - switch (token_id) { - .LineComment => {}, - .DocComment => { - copy_end_token_index = token_index; - continue; - }, - else => break, - } - } - - const start = tree.token_locs[copy_start_token_index].start; - const end = tree.token_locs[copy_end_token_index].start; - try copyFixingWhitespace(ais, tree.source[start..end]); - } - - try renderTopLevelDecl(allocator, ais, tree, decl); - decl_i += 1; - if (decl_i >= root_decls.len) return; - try renderExtraNewline(tree, ais, root_decls[decl_i]); + try ais.writer().print("{s}\n", .{trimmed_comment}); + index += comment_start + newline; } } -fn renderExtraNewline(tree: *ast.Tree, ais: anytype, node: *ast.Node) @TypeOf(ais.*).Error!void { - return renderExtraNewlineToken(tree, ais, node.firstToken()); -} +fn renderRoot(ais: *Ais, tree: ast.Tree) Error!void { + // Render all the line comments at the beginning of the file. + const src_start: usize = if (mem.startsWith(u8, tree.source, "\xEF\xBB\xBF")) 3 else 0; + const comment_end_loc: usize = tree.tokens.items(.start)[0]; + _ = try renderComments(ais, tree, src_start, comment_end_loc, ""); -fn renderExtraNewlineToken( - tree: *ast.Tree, - ais: anytype, - first_token: ast.TokenIndex, -) @TypeOf(ais.*).Error!void { - var prev_token = first_token; - if (prev_token == 0) return; - var newline_threshold: usize = 2; - while (tree.token_ids[prev_token - 1] == .DocComment) { - if (tree.tokenLocation(tree.token_locs[prev_token - 1].end, prev_token).line == 1) { - newline_threshold += 1; - } - prev_token -= 1; - } - const prev_token_end = tree.token_locs[prev_token - 1].end; - const loc = tree.tokenLocation(prev_token_end, first_token); - if (loc.line >= newline_threshold) { - try ais.insertNewline(); + // Root is always index 0. + const nodes_data = tree.nodes.items(.data); + const root_decls = tree.extra_data[nodes_data[0].lhs..nodes_data[0].rhs]; + if (root_decls.len == 0) return; + + for (root_decls) |decl| { + try renderTopLevelDecl(ais, tree, decl); } } -fn renderTopLevelDecl(allocator: *mem.Allocator, ais: anytype, tree: *ast.Tree, decl: *ast.Node) (@TypeOf(ais.*).Error || Error)!void { - try renderContainerDecl(allocator, ais, tree, decl, .Newline); +fn renderExtraNewline(tree: ast.Tree, ais: *Ais, node: ast.Node.Index) Error!void { + return renderExtraNewlineToken(tree, ais, tree.firstToken(node)); } -fn renderContainerDecl(allocator: *mem.Allocator, ais: anytype, tree: *ast.Tree, decl: *ast.Node, space: Space) (@TypeOf(ais.*).Error || Error)!void { - switch (decl.tag) { - .FnProto => { - const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl); +fn renderExtraNewlineToken(tree: ast.Tree, ais: *Ais, first_token: ast.TokenIndex) Error!void { + @panic("TODO implement renderExtraNewlineToken"); + //var prev_token = first_token; + //if (prev_token == 0) return; + //const token_tags = tree.tokens.items(.tag); + //var newline_threshold: usize = 2; + //while (token_tags[prev_token - 1] == .DocComment) { + // if (tree.tokenLocation(tree.token_locs[prev_token - 1].end, prev_token).line == 1) { + // newline_threshold += 1; + // } + // prev_token -= 1; + //} + //const prev_token_end = tree.token_locs[prev_token - 1].end; + //const loc = tree.tokenLocation(prev_token_end, first_token); + //if (loc.line >= newline_threshold) { + // try ais.insertNewline(); + //} +} - try renderDocComments(tree, ais, fn_proto, fn_proto.getDocComments()); +fn renderTopLevelDecl(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index) Error!void { + return renderContainerDecl(ais, tree, decl, .Newline); +} - if (fn_proto.getBodyNode()) |body_node| { - try renderExpression(allocator, ais, tree, decl, .Space); - try renderExpression(allocator, ais, tree, body_node, space); - } else { - try renderExpression(allocator, ais, tree, decl, .None); - try renderToken(tree, ais, tree.nextToken(decl.lastToken()), space); - } - }, +fn renderContainerDecl(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) Error!void { + switch (tree.nodes.items(.tag)[decl]) { + .UsingNamespace, + .FnProtoSimple, + .FnProtoSimpleMulti, + .FnProtoOne, + .FnProto, + .FnDecl, + .GlobalVarDecl, + .LocalVarDecl, + .SimpleVarDecl, + .AlignedVarDecl, + .TestDecl, + .ContainerFieldInit, + .ContainerFieldAlign, + .ContainerField, + => @panic("TODO implement renderContainerDecl"), - .Use => { - const use_decl = @fieldParentPtr(ast.Node.Use, "base", decl); + .Comptime => return renderExpression(ais, tree, decl, space), - if (use_decl.visib_token) |visib_token| { - try renderToken(tree, ais, visib_token, .Space); // pub - } - try renderToken(tree, ais, use_decl.use_token, .Space); // usingnamespace - try renderExpression(allocator, ais, tree, use_decl.expr, .None); - try renderToken(tree, ais, use_decl.semicolon_token, space); // ; - }, - - .VarDecl => { - const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", decl); - - try renderDocComments(tree, ais, var_decl, var_decl.getDocComments()); - try renderVarDecl(allocator, ais, tree, var_decl); - }, - - .TestDecl => { - const test_decl = @fieldParentPtr(ast.Node.TestDecl, "base", decl); - - try renderDocComments(tree, ais, test_decl, test_decl.doc_comments); - try renderToken(tree, ais, test_decl.test_token, .Space); - if (test_decl.name) |name| - try renderExpression(allocator, ais, tree, name, .Space); - try renderExpression(allocator, ais, tree, test_decl.body_node, space); - }, - - .ContainerField => { - const field = @fieldParentPtr(ast.Node.ContainerField, "base", decl); - - try renderDocComments(tree, ais, field, field.doc_comments); - if (field.comptime_token) |t| { - try renderToken(tree, ais, t, .Space); // comptime - } - - const src_has_trailing_comma = blk: { - const maybe_comma = tree.nextToken(field.lastToken()); - break :blk tree.token_ids[maybe_comma] == .Comma; - }; - - // The trailing comma is emitted at the end, but if it's not present - // we still have to respect the specified `space` parameter - const last_token_space: Space = if (src_has_trailing_comma) .None else space; - - if (field.type_expr == null and field.value_expr == null) { - try renderToken(tree, ais, field.name_token, last_token_space); // name - } else if (field.type_expr != null and field.value_expr == null) { - try renderToken(tree, ais, field.name_token, .None); // name - try renderToken(tree, ais, tree.nextToken(field.name_token), .Space); // : - - if (field.align_expr) |align_value_expr| { - try renderExpression(allocator, ais, tree, field.type_expr.?, .Space); // type - const lparen_token = tree.prevToken(align_value_expr.firstToken()); - const align_kw = tree.prevToken(lparen_token); - const rparen_token = tree.nextToken(align_value_expr.lastToken()); - try renderToken(tree, ais, align_kw, .None); // align - try renderToken(tree, ais, lparen_token, .None); // ( - try renderExpression(allocator, ais, tree, align_value_expr, .None); // alignment - try renderToken(tree, ais, rparen_token, last_token_space); // ) - } else { - try renderExpression(allocator, ais, tree, field.type_expr.?, last_token_space); // type - } - } else if (field.type_expr == null and field.value_expr != null) { - try renderToken(tree, ais, field.name_token, .Space); // name - try renderToken(tree, ais, tree.nextToken(field.name_token), .Space); // = - try renderExpression(allocator, ais, tree, field.value_expr.?, last_token_space); // value - } else { - try renderToken(tree, ais, field.name_token, .None); // name - try renderToken(tree, ais, tree.nextToken(field.name_token), .Space); // : - - if (field.align_expr) |align_value_expr| { - try renderExpression(allocator, ais, tree, field.type_expr.?, .Space); // type - const lparen_token = tree.prevToken(align_value_expr.firstToken()); - const align_kw = tree.prevToken(lparen_token); - const rparen_token = tree.nextToken(align_value_expr.lastToken()); - try renderToken(tree, ais, align_kw, .None); // align - try renderToken(tree, ais, lparen_token, .None); // ( - try renderExpression(allocator, ais, tree, align_value_expr, .None); // alignment - try renderToken(tree, ais, rparen_token, .Space); // ) - } else { - try renderExpression(allocator, ais, tree, field.type_expr.?, .Space); // type - } - try renderToken(tree, ais, tree.prevToken(field.value_expr.?.firstToken()), .Space); // = - try renderExpression(allocator, ais, tree, field.value_expr.?, last_token_space); // value - } - - if (src_has_trailing_comma) { - const comma = tree.nextToken(field.lastToken()); - try renderToken(tree, ais, comma, space); - } - }, - - .Comptime => { - assert(!decl.requireSemiColon()); - try renderExpression(allocator, ais, tree, decl, space); - }, - - .DocComment => { - const comment = @fieldParentPtr(ast.Node.DocComment, "base", decl); - const kind = tree.token_ids[comment.first_line]; - try renderToken(tree, ais, comment.first_line, .Newline); - var tok_i = comment.first_line + 1; - while (true) : (tok_i += 1) { - const tok_id = tree.token_ids[tok_i]; - if (tok_id == kind) { - try renderToken(tree, ais, tok_i, .Newline); - } else if (tok_id == .LineComment) { - continue; - } else { - break; - } - } - }, else => unreachable, } + //switch (tag) { + // .FnProto => { + // const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl); + + // try renderDocComments(tree, ais, fn_proto, fn_proto.getDocComments()); + + // if (fn_proto.getBodyNode()) |body_node| { + // try renderExpression(allocator, ais, tree, decl, .Space); + // try renderExpression(allocator, ais, tree, body_node, space); + // } else { + // try renderExpression(allocator, ais, tree, decl, .None); + // try renderToken(ais, tree, tree.nextToken(decl.lastToken()), space); + // } + // }, + + // .Use => { + // const use_decl = @fieldParentPtr(ast.Node.Use, "base", decl); + + // if (use_decl.visib_token) |visib_token| { + // try renderToken(ais, tree, visib_token, .Space); // pub + // } + // try renderToken(ais, tree, use_decl.use_token, .Space); // usingnamespace + // try renderExpression(allocator, ais, tree, use_decl.expr, .None); + // try renderToken(ais, tree, use_decl.semicolon_token, space); // ; + // }, + + // .VarDecl => { + // const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", decl); + + // try renderDocComments(tree, ais, var_decl, var_decl.getDocComments()); + // try renderVarDecl(allocator, ais, tree, var_decl); + // }, + + // .TestDecl => { + // const test_decl = @fieldParentPtr(ast.Node.TestDecl, "base", decl); + + // try renderDocComments(tree, ais, test_decl, test_decl.doc_comments); + // try renderToken(ais, tree, test_decl.test_token, .Space); + // if (test_decl.name) |name| + // try renderExpression(allocator, ais, tree, name, .Space); + // try renderExpression(allocator, ais, tree, test_decl.body_node, space); + // }, + + // .ContainerField => { + // const field = @fieldParentPtr(ast.Node.ContainerField, "base", decl); + + // try renderDocComments(tree, ais, field, field.doc_comments); + // if (field.comptime_token) |t| { + // try renderToken(ais, tree, t, .Space); // comptime + // } + + // const src_has_trailing_comma = blk: { + // const maybe_comma = tree.nextToken(field.lastToken()); + // break :blk tree.token_tags[maybe_comma] == .Comma; + // }; + + // // The trailing comma is emitted at the end, but if it's not present + // // we still have to respect the specified `space` parameter + // const last_token_space: Space = if (src_has_trailing_comma) .None else space; + + // if (field.type_expr == null and field.value_expr == null) { + // try renderToken(ais, tree, field.name_token, last_token_space); // name + // } else if (field.type_expr != null and field.value_expr == null) { + // try renderToken(ais, tree, field.name_token, .None); // name + // try renderToken(ais, tree, tree.nextToken(field.name_token), .Space); // : + + // if (field.align_expr) |align_value_expr| { + // try renderExpression(allocator, ais, tree, field.type_expr.?, .Space); // type + // const lparen_token = tree.prevToken(align_value_expr.firstToken()); + // const align_kw = tree.prevToken(lparen_token); + // const rparen_token = tree.nextToken(align_value_expr.lastToken()); + // try renderToken(ais, tree, align_kw, .None); // align + // try renderToken(ais, tree, lparen_token, .None); // ( + // try renderExpression(allocator, ais, tree, align_value_expr, .None); // alignment + // try renderToken(ais, tree, rparen_token, last_token_space); // ) + // } else { + // try renderExpression(allocator, ais, tree, field.type_expr.?, last_token_space); // type + // } + // } else if (field.type_expr == null and field.value_expr != null) { + // try renderToken(ais, tree, field.name_token, .Space); // name + // try renderToken(ais, tree, tree.nextToken(field.name_token), .Space); // = + // try renderExpression(allocator, ais, tree, field.value_expr.?, last_token_space); // value + // } else { + // try renderToken(ais, tree, field.name_token, .None); // name + // try renderToken(ais, tree, tree.nextToken(field.name_token), .Space); // : + + // if (field.align_expr) |align_value_expr| { + // try renderExpression(allocator, ais, tree, field.type_expr.?, .Space); // type + // const lparen_token = tree.prevToken(align_value_expr.firstToken()); + // const align_kw = tree.prevToken(lparen_token); + // const rparen_token = tree.nextToken(align_value_expr.lastToken()); + // try renderToken(ais, tree, align_kw, .None); // align + // try renderToken(ais, tree, lparen_token, .None); // ( + // try renderExpression(allocator, ais, tree, align_value_expr, .None); // alignment + // try renderToken(ais, tree, rparen_token, .Space); // ) + // } else { + // try renderExpression(allocator, ais, tree, field.type_expr.?, .Space); // type + // } + // try renderToken(ais, tree, tree.prevToken(field.value_expr.?.firstToken()), .Space); // = + // try renderExpression(allocator, ais, tree, field.value_expr.?, last_token_space); // value + // } + + // if (src_has_trailing_comma) { + // const comma = tree.nextToken(field.lastToken()); + // try renderToken(ais, tree, comma, space); + // } + // }, + + // .DocComment => { + // const comment = @fieldParentPtr(ast.Node.DocComment, "base", decl); + // const kind = tree.token_tags[comment.first_line]; + // try renderToken(ais, tree, comment.first_line, .Newline); + // var tok_i = comment.first_line + 1; + // while (true) : (tok_i += 1) { + // const tok_id = tree.token_tags[tok_i]; + // if (tok_id == kind) { + // try renderToken(ais, tree, tok_i, .Newline); + // } else if (tok_id == .LineComment) { + // continue; + // } else { + // break; + // } + // } + // }, + // else => unreachable, + //} } -fn renderExpression( - allocator: *mem.Allocator, - ais: anytype, - tree: *ast.Tree, - base: *ast.Node, - space: Space, -) (@TypeOf(ais.*).Error || Error)!void { - switch (base.tag) { - .Identifier, - .IntegerLiteral, - .FloatLiteral, - .StringLiteral, - .CharLiteral, - .BoolLiteral, - .NullLiteral, - .Unreachable, - .ErrorType, - .UndefinedLiteral, - => { - const casted_node = base.cast(ast.Node.OneToken).?; - return renderToken(tree, ais, casted_node.token, space); - }, +fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void { + const token_tags = tree.tokens.items(.tag); + const main_tokens = tree.nodes.items(.main_token); + switch (tree.nodes.items(.tag)[node]) { + //.Identifier, + //.IntegerLiteral, + //.FloatLiteral, + //.StringLiteral, + //.CharLiteral, + //.BoolLiteral, + //.NullLiteral, + //.Unreachable, + //.ErrorType, + //.UndefinedLiteral, + //=> { + // const casted_node = base.cast(ast.Node.OneToken).?; + // return renderToken(ais, tree, casted_node.token, space); + //}, - .AnyType => { - const any_type = base.castTag(.AnyType).?; - if (mem.eql(u8, tree.tokenSlice(any_type.token), "var")) { - // TODO remove in next release cycle - try ais.writer().writeAll("anytype"); - if (space == .Comma) try ais.writer().writeAll(",\n"); - return; + //.AnyType => { + // const any_type = base.castTag(.AnyType).?; + // if (mem.eql(u8, tree.tokenSlice(any_type.token), "var")) { + // // TODO remove in next release cycle + // try ais.writer().writeAll("anytype"); + // if (space == .Comma) try ais.writer().writeAll(",\n"); + // return; + // } + // return renderToken(ais, tree, any_type.token, space); + //}, + .Block => { + const lbrace = main_tokens[node]; + if (token_tags[lbrace - 1] == .Colon and + token_tags[lbrace - 2] == .Identifier) + { + try renderToken(ais, tree, lbrace - 2, .None); + try renderToken(ais, tree, lbrace - 1, .Space); } - return renderToken(tree, ais, any_type.token, space); - }, + const nodes_data = tree.nodes.items(.data); + const statements = tree.extra_data[nodes_data[node].lhs..nodes_data[node].rhs]; - .Block, .LabeledBlock => { - const block: struct { - label: ?ast.TokenIndex, - statements: []*ast.Node, - lbrace: ast.TokenIndex, - rbrace: ast.TokenIndex, - } = b: { - if (base.castTag(.Block)) |block| { - break :b .{ - .label = null, - .statements = block.statements(), - .lbrace = block.lbrace, - .rbrace = block.rbrace, - }; - } else if (base.castTag(.LabeledBlock)) |block| { - break :b .{ - .label = block.label, - .statements = block.statements(), - .lbrace = block.lbrace, - .rbrace = block.rbrace, - }; - } else { - unreachable; - } - }; - - if (block.label) |label| { - try renderToken(tree, ais, label, Space.None); - try renderToken(tree, ais, tree.nextToken(label), Space.Space); - } - - if (block.statements.len == 0) { + if (statements.len == 0) { ais.pushIndentNextLine(); - defer ais.popIndent(); - try renderToken(tree, ais, block.lbrace, Space.None); + try renderToken(ais, tree, lbrace, .None); + ais.popIndent(); + const rbrace = lbrace + 1; + return renderToken(ais, tree, rbrace, space); } else { ais.pushIndentNextLine(); - defer ais.popIndent(); - try renderToken(tree, ais, block.lbrace, Space.Newline); + try renderToken(ais, tree, lbrace, .Newline); - for (block.statements) |statement, i| { - try renderStatement(allocator, ais, tree, statement); + for (statements) |statement, i| { + try renderStatement(ais, tree, statement); - if (i + 1 < block.statements.len) { - try renderExtraNewline(tree, ais, block.statements[i + 1]); + if (i + 1 < statements.len) { + try renderExtraNewline(tree, ais, statements[i + 1]); } } + ais.popIndent(); + const rbrace = tree.lastToken(statements[statements.len - 1]) + 1; + return renderToken(ais, tree, rbrace, space); } - return renderToken(tree, ais, block.rbrace, space); }, - .Defer => { - const defer_node = @fieldParentPtr(ast.Node.Defer, "base", base); + //.Defer => { + // const defer_node = @fieldParentPtr(ast.Node.Defer, "base", base); - try renderToken(tree, ais, defer_node.defer_token, Space.Space); - if (defer_node.payload) |payload| { - try renderExpression(allocator, ais, tree, payload, Space.Space); - } - return renderExpression(allocator, ais, tree, defer_node.expr, space); - }, + // try renderToken(ais, tree, defer_node.defer_token, Space.Space); + // if (defer_node.payload) |payload| { + // try renderExpression(allocator, ais, tree, payload, Space.Space); + // } + // return renderExpression(allocator, ais, tree, defer_node.expr, space); + //}, .Comptime => { - const comptime_node = @fieldParentPtr(ast.Node.Comptime, "base", base); - - try renderToken(tree, ais, comptime_node.comptime_token, Space.Space); - return renderExpression(allocator, ais, tree, comptime_node.expr, space); + const comptime_token = tree.nodes.items(.main_token)[node]; + const block = tree.nodes.items(.data)[node].lhs; + try renderToken(ais, tree, comptime_token, .Space); + return renderExpression(ais, tree, block, space); }, - .Nosuspend => { - const nosuspend_node = @fieldParentPtr(ast.Node.Nosuspend, "base", base); - if (mem.eql(u8, tree.tokenSlice(nosuspend_node.nosuspend_token), "noasync")) { - // TODO: remove this - try ais.writer().writeAll("nosuspend "); - } else { - try renderToken(tree, ais, nosuspend_node.nosuspend_token, Space.Space); - } - return renderExpression(allocator, ais, tree, nosuspend_node.expr, space); - }, - - .Suspend => { - const suspend_node = @fieldParentPtr(ast.Node.Suspend, "base", base); - - if (suspend_node.body) |body| { - try renderToken(tree, ais, suspend_node.suspend_token, Space.Space); - return renderExpression(allocator, ais, tree, body, space); - } else { - return renderToken(tree, ais, suspend_node.suspend_token, space); - } - }, - - .Catch => { - const infix_op_node = @fieldParentPtr(ast.Node.Catch, "base", base); - - const op_space = Space.Space; - try renderExpression(allocator, ais, tree, infix_op_node.lhs, op_space); - - const after_op_space = blk: { - const same_line = tree.tokensOnSameLine(infix_op_node.op_token, tree.nextToken(infix_op_node.op_token)); - break :blk if (same_line) op_space else Space.Newline; - }; - - try renderToken(tree, ais, infix_op_node.op_token, after_op_space); - - if (infix_op_node.payload) |payload| { - try renderExpression(allocator, ais, tree, payload, Space.Space); - } - - ais.pushIndentOneShot(); - return renderExpression(allocator, ais, tree, infix_op_node.rhs, space); - }, - - .Add, - .AddWrap, - .ArrayCat, - .ArrayMult, - .Assign, - .AssignBitAnd, - .AssignBitOr, - .AssignBitShiftLeft, - .AssignBitShiftRight, - .AssignBitXor, - .AssignDiv, - .AssignSub, - .AssignSubWrap, - .AssignMod, - .AssignAdd, - .AssignAddWrap, - .AssignMul, - .AssignMulWrap, - .BangEqual, - .BitAnd, - .BitOr, - .BitShiftLeft, - .BitShiftRight, - .BitXor, - .BoolAnd, - .BoolOr, - .Div, - .EqualEqual, - .ErrorUnion, - .GreaterOrEqual, - .GreaterThan, - .LessOrEqual, - .LessThan, - .MergeErrorSets, - .Mod, - .Mul, - .MulWrap, - .Period, - .Range, - .Sub, - .SubWrap, - .OrElse, - => { - const infix_op_node = @fieldParentPtr(ast.Node.SimpleInfixOp, "base", base); - - const op_space = switch (base.tag) { - .Period, .ErrorUnion, .Range => Space.None, - else => Space.Space, - }; - try renderExpression(allocator, ais, tree, infix_op_node.lhs, op_space); - - const after_op_space = blk: { - const loc = tree.tokenLocation(tree.token_locs[infix_op_node.op_token].end, tree.nextToken(infix_op_node.op_token)); - break :blk if (loc.line == 0) op_space else Space.Newline; - }; - - { - ais.pushIndent(); - defer ais.popIndent(); - try renderToken(tree, ais, infix_op_node.op_token, after_op_space); - } - ais.pushIndentOneShot(); - return renderExpression(allocator, ais, tree, infix_op_node.rhs, space); - }, - - .BitNot, - .BoolNot, - .Negation, - .NegationWrap, - .OptionalType, - .AddressOf, - => { - const casted_node = @fieldParentPtr(ast.Node.SimplePrefixOp, "base", base); - try renderToken(tree, ais, casted_node.op_token, Space.None); - return renderExpression(allocator, ais, tree, casted_node.rhs, space); - }, - - .Try, - .Resume, - .Await, - => { - const casted_node = @fieldParentPtr(ast.Node.SimplePrefixOp, "base", base); - try renderToken(tree, ais, casted_node.op_token, Space.Space); - return renderExpression(allocator, ais, tree, casted_node.rhs, space); - }, - - .ArrayType => { - const array_type = @fieldParentPtr(ast.Node.ArrayType, "base", base); - return renderArrayType( - allocator, - ais, - tree, - array_type.op_token, - array_type.rhs, - array_type.len_expr, - null, - space, - ); - }, - .ArrayTypeSentinel => { - const array_type = @fieldParentPtr(ast.Node.ArrayTypeSentinel, "base", base); - return renderArrayType( - allocator, - ais, - tree, - array_type.op_token, - array_type.rhs, - array_type.len_expr, - array_type.sentinel, - space, - ); - }, - - .PtrType => { - const ptr_type = @fieldParentPtr(ast.Node.PtrType, "base", base); - const op_tok_id = tree.token_ids[ptr_type.op_token]; - switch (op_tok_id) { - .Asterisk, .AsteriskAsterisk => try ais.writer().writeByte('*'), - .LBracket => if (tree.token_ids[ptr_type.op_token + 2] == .Identifier) - try ais.writer().writeAll("[*c") - else - try ais.writer().writeAll("[*"), - else => unreachable, - } - if (ptr_type.ptr_info.sentinel) |sentinel| { - const colon_token = tree.prevToken(sentinel.firstToken()); - try renderToken(tree, ais, colon_token, Space.None); // : - const sentinel_space = switch (op_tok_id) { - .LBracket => Space.None, - else => Space.Space, - }; - try renderExpression(allocator, ais, tree, sentinel, sentinel_space); - } - switch (op_tok_id) { - .Asterisk, .AsteriskAsterisk => {}, - .LBracket => try ais.writer().writeByte(']'), - else => unreachable, - } - if (ptr_type.ptr_info.allowzero_token) |allowzero_token| { - try renderToken(tree, ais, allowzero_token, Space.Space); // allowzero - } - if (ptr_type.ptr_info.align_info) |align_info| { - const lparen_token = tree.prevToken(align_info.node.firstToken()); - const align_token = tree.prevToken(lparen_token); - - try renderToken(tree, ais, align_token, Space.None); // align - try renderToken(tree, ais, lparen_token, Space.None); // ( - - try renderExpression(allocator, ais, tree, align_info.node, Space.None); - - if (align_info.bit_range) |bit_range| { - const colon1 = tree.prevToken(bit_range.start.firstToken()); - const colon2 = tree.prevToken(bit_range.end.firstToken()); - - try renderToken(tree, ais, colon1, Space.None); // : - try renderExpression(allocator, ais, tree, bit_range.start, Space.None); - try renderToken(tree, ais, colon2, Space.None); // : - try renderExpression(allocator, ais, tree, bit_range.end, Space.None); - - const rparen_token = tree.nextToken(bit_range.end.lastToken()); - try renderToken(tree, ais, rparen_token, Space.Space); // ) - } else { - const rparen_token = tree.nextToken(align_info.node.lastToken()); - try renderToken(tree, ais, rparen_token, Space.Space); // ) - } - } - if (ptr_type.ptr_info.const_token) |const_token| { - try renderToken(tree, ais, const_token, Space.Space); // const - } - if (ptr_type.ptr_info.volatile_token) |volatile_token| { - try renderToken(tree, ais, volatile_token, Space.Space); // volatile - } - return renderExpression(allocator, ais, tree, ptr_type.rhs, space); - }, - - .SliceType => { - const slice_type = @fieldParentPtr(ast.Node.SliceType, "base", base); - try renderToken(tree, ais, slice_type.op_token, Space.None); // [ - if (slice_type.ptr_info.sentinel) |sentinel| { - const colon_token = tree.prevToken(sentinel.firstToken()); - try renderToken(tree, ais, colon_token, Space.None); // : - try renderExpression(allocator, ais, tree, sentinel, Space.None); - try renderToken(tree, ais, tree.nextToken(sentinel.lastToken()), Space.None); // ] - } else { - try renderToken(tree, ais, tree.nextToken(slice_type.op_token), Space.None); // ] - } - - if (slice_type.ptr_info.allowzero_token) |allowzero_token| { - try renderToken(tree, ais, allowzero_token, Space.Space); // allowzero - } - if (slice_type.ptr_info.align_info) |align_info| { - const lparen_token = tree.prevToken(align_info.node.firstToken()); - const align_token = tree.prevToken(lparen_token); - - try renderToken(tree, ais, align_token, Space.None); // align - try renderToken(tree, ais, lparen_token, Space.None); // ( - - try renderExpression(allocator, ais, tree, align_info.node, Space.None); - - if (align_info.bit_range) |bit_range| { - const colon1 = tree.prevToken(bit_range.start.firstToken()); - const colon2 = tree.prevToken(bit_range.end.firstToken()); - - try renderToken(tree, ais, colon1, Space.None); // : - try renderExpression(allocator, ais, tree, bit_range.start, Space.None); - try renderToken(tree, ais, colon2, Space.None); // : - try renderExpression(allocator, ais, tree, bit_range.end, Space.None); - - const rparen_token = tree.nextToken(bit_range.end.lastToken()); - try renderToken(tree, ais, rparen_token, Space.Space); // ) - } else { - const rparen_token = tree.nextToken(align_info.node.lastToken()); - try renderToken(tree, ais, rparen_token, Space.Space); // ) - } - } - if (slice_type.ptr_info.const_token) |const_token| { - try renderToken(tree, ais, const_token, Space.Space); - } - if (slice_type.ptr_info.volatile_token) |volatile_token| { - try renderToken(tree, ais, volatile_token, Space.Space); - } - return renderExpression(allocator, ais, tree, slice_type.rhs, space); - }, - - .ArrayInitializer, .ArrayInitializerDot => { - var rtoken: ast.TokenIndex = undefined; - var exprs: []*ast.Node = undefined; - const lhs: union(enum) { dot: ast.TokenIndex, node: *ast.Node } = switch (base.tag) { - .ArrayInitializerDot => blk: { - const casted = @fieldParentPtr(ast.Node.ArrayInitializerDot, "base", base); - rtoken = casted.rtoken; - exprs = casted.list(); - break :blk .{ .dot = casted.dot }; - }, - .ArrayInitializer => blk: { - const casted = @fieldParentPtr(ast.Node.ArrayInitializer, "base", base); - rtoken = casted.rtoken; - exprs = casted.list(); - break :blk .{ .node = casted.lhs }; - }, - else => unreachable, - }; - - const lbrace = switch (lhs) { - .dot => |dot| tree.nextToken(dot), - .node => |node| tree.nextToken(node.lastToken()), - }; - - switch (lhs) { - .dot => |dot| try renderToken(tree, ais, dot, Space.None), - .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), - } - - if (exprs.len == 0) { - try renderToken(tree, ais, lbrace, Space.None); - return renderToken(tree, ais, rtoken, space); - } - - if (exprs.len == 1 and exprs[0].tag != .MultilineStringLiteral and tree.token_ids[exprs[0].*.lastToken() + 1] == .RBrace) { - const expr = exprs[0]; - - try renderToken(tree, ais, lbrace, Space.None); - try renderExpression(allocator, ais, tree, expr, Space.None); - return renderToken(tree, ais, rtoken, space); - } - - // scan to find row size - if (rowSize(tree, exprs, rtoken) != null) { - { - ais.pushIndentNextLine(); - defer ais.popIndent(); - try renderToken(tree, ais, lbrace, Space.Newline); - - var expr_index: usize = 0; - while (rowSize(tree, exprs[expr_index..], rtoken)) |row_size| { - const row_exprs = exprs[expr_index..]; - // A place to store the width of each expression and its column's maximum - var widths = try allocator.alloc(usize, row_exprs.len + row_size); - defer allocator.free(widths); - mem.set(usize, widths, 0); - - var expr_newlines = try allocator.alloc(bool, row_exprs.len); - defer allocator.free(expr_newlines); - mem.set(bool, expr_newlines, false); - - var expr_widths = widths[0 .. widths.len - row_size]; - var column_widths = widths[widths.len - row_size ..]; - - // Find next row with trailing comment (if any) to end the current section - var section_end = sec_end: { - var this_line_first_expr: usize = 0; - var this_line_size = rowSize(tree, row_exprs, rtoken); - for (row_exprs) |expr, i| { - // Ignore comment on first line of this section - if (i == 0 or tree.tokensOnSameLine(row_exprs[0].firstToken(), expr.lastToken())) continue; - // Track start of line containing comment - if (!tree.tokensOnSameLine(row_exprs[this_line_first_expr].firstToken(), expr.lastToken())) { - this_line_first_expr = i; - this_line_size = rowSize(tree, row_exprs[this_line_first_expr..], rtoken); - } - - const maybe_comma = expr.lastToken() + 1; - const maybe_comment = expr.lastToken() + 2; - if (maybe_comment < tree.token_ids.len) { - if (tree.token_ids[maybe_comma] == .Comma and - tree.token_ids[maybe_comment] == .LineComment and - tree.tokensOnSameLine(expr.lastToken(), maybe_comment)) - { - var comment_token_loc = tree.token_locs[maybe_comment]; - const comment_is_empty = mem.trimRight(u8, tree.tokenSliceLoc(comment_token_loc), " ").len == 2; - if (!comment_is_empty) { - // Found row ending in comment - break :sec_end i - this_line_size.? + 1; - } - } - } - } - break :sec_end row_exprs.len; - }; - expr_index += section_end; - - const section_exprs = row_exprs[0..section_end]; - - // Null stream for counting the printed length of each expression - var line_find_stream = std.io.findByteWriter('\n', std.io.null_writer); - var counting_stream = std.io.countingWriter(line_find_stream.writer()); - var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, counting_stream.writer()); - - // Calculate size of columns in current section - var column_counter: usize = 0; - var single_line = true; - for (section_exprs) |expr, i| { - if (i + 1 < section_exprs.len) { - counting_stream.bytes_written = 0; - line_find_stream.byte_found = false; - try renderExpression(allocator, &auto_indenting_stream, tree, expr, Space.None); - const width = @intCast(usize, counting_stream.bytes_written); - expr_widths[i] = width; - expr_newlines[i] = line_find_stream.byte_found; - - if (!line_find_stream.byte_found) { - const column = column_counter % row_size; - column_widths[column] = std.math.max(column_widths[column], width); - - const expr_last_token = expr.*.lastToken() + 1; - const next_expr = section_exprs[i + 1]; - const loc = tree.tokenLocation(tree.token_locs[expr_last_token].start, next_expr.*.firstToken()); - - column_counter += 1; - - if (loc.line != 0) single_line = false; - } else { - single_line = false; - column_counter = 0; - } - } else { - counting_stream.bytes_written = 0; - try renderExpression(allocator, &auto_indenting_stream, tree, expr, Space.None); - const width = @intCast(usize, counting_stream.bytes_written); - expr_widths[i] = width; - expr_newlines[i] = line_find_stream.byte_found; - - if (!line_find_stream.byte_found) { - const column = column_counter % row_size; - column_widths[column] = std.math.max(column_widths[column], width); - } - break; - } - } - - // Render exprs in current section - column_counter = 0; - var last_col_index: usize = row_size - 1; - for (section_exprs) |expr, i| { - if (i + 1 < section_exprs.len) { - const next_expr = section_exprs[i + 1]; - try renderExpression(allocator, ais, tree, expr, Space.None); - - const comma = tree.nextToken(expr.*.lastToken()); - - if (column_counter != last_col_index) { - if (!expr_newlines[i] and !expr_newlines[i + 1]) { - // Neither the current or next expression is multiline - try renderToken(tree, ais, comma, Space.Space); // , - assert(column_widths[column_counter % row_size] >= expr_widths[i]); - const padding = column_widths[column_counter % row_size] - expr_widths[i]; - try ais.writer().writeByteNTimes(' ', padding); - - column_counter += 1; - continue; - } - } - if (single_line and row_size != 1) { - try renderToken(tree, ais, comma, Space.Space); // , - continue; - } - - column_counter = 0; - try renderToken(tree, ais, comma, Space.Newline); // , - try renderExtraNewline(tree, ais, next_expr); - } else { - const maybe_comma = tree.nextToken(expr.*.lastToken()); - if (tree.token_ids[maybe_comma] == .Comma) { - try renderExpression(allocator, ais, tree, expr, Space.None); // , - try renderToken(tree, ais, maybe_comma, Space.Newline); // , - } else { - try renderExpression(allocator, ais, tree, expr, Space.Comma); // , - } - } - } - - if (expr_index == exprs.len) { - break; - } - } - } - - return renderToken(tree, ais, rtoken, space); - } - - // Single line - try renderToken(tree, ais, lbrace, Space.Space); - for (exprs) |expr, i| { - if (i + 1 < exprs.len) { - const next_expr = exprs[i + 1]; - try renderExpression(allocator, ais, tree, expr, Space.None); - const comma = tree.nextToken(expr.*.lastToken()); - try renderToken(tree, ais, comma, Space.Space); // , - } else { - try renderExpression(allocator, ais, tree, expr, Space.Space); - } - } - - return renderToken(tree, ais, rtoken, space); - }, - - .StructInitializer, .StructInitializerDot => { - var rtoken: ast.TokenIndex = undefined; - var field_inits: []*ast.Node = undefined; - const lhs: union(enum) { dot: ast.TokenIndex, node: *ast.Node } = switch (base.tag) { - .StructInitializerDot => blk: { - const casted = @fieldParentPtr(ast.Node.StructInitializerDot, "base", base); - rtoken = casted.rtoken; - field_inits = casted.list(); - break :blk .{ .dot = casted.dot }; - }, - .StructInitializer => blk: { - const casted = @fieldParentPtr(ast.Node.StructInitializer, "base", base); - rtoken = casted.rtoken; - field_inits = casted.list(); - break :blk .{ .node = casted.lhs }; - }, - else => unreachable, - }; - - const lbrace = switch (lhs) { - .dot => |dot| tree.nextToken(dot), - .node => |node| tree.nextToken(node.lastToken()), - }; - - if (field_inits.len == 0) { - switch (lhs) { - .dot => |dot| try renderToken(tree, ais, dot, Space.None), - .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), - } - - { - ais.pushIndentNextLine(); - defer ais.popIndent(); - try renderToken(tree, ais, lbrace, Space.None); - } - - return renderToken(tree, ais, rtoken, space); - } - - const src_has_trailing_comma = blk: { - const maybe_comma = tree.prevToken(rtoken); - break :blk tree.token_ids[maybe_comma] == .Comma; - }; - - const src_same_line = blk: { - const loc = tree.tokenLocation(tree.token_locs[lbrace].end, rtoken); - break :blk loc.line == 0; - }; - - const expr_outputs_one_line = blk: { - // render field expressions until a LF is found - for (field_inits) |field_init| { - var find_stream = std.io.findByteWriter('\n', std.io.null_writer); - var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, find_stream.writer()); - - try renderExpression(allocator, &auto_indenting_stream, tree, field_init, Space.None); - if (find_stream.byte_found) break :blk false; - } - break :blk true; - }; - - if (field_inits.len == 1) blk: { - if (field_inits[0].cast(ast.Node.FieldInitializer)) |field_init| { - switch (field_init.expr.tag) { - .StructInitializer, - .StructInitializerDot, - => break :blk, - else => {}, - } - } - - // if the expression outputs to multiline, make this struct multiline - if (!expr_outputs_one_line or src_has_trailing_comma) { - break :blk; - } - - switch (lhs) { - .dot => |dot| try renderToken(tree, ais, dot, Space.None), - .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), - } - try renderToken(tree, ais, lbrace, Space.Space); - try renderExpression(allocator, ais, tree, field_inits[0], Space.Space); - return renderToken(tree, ais, rtoken, space); - } - - if (!src_has_trailing_comma and src_same_line and expr_outputs_one_line) { - // render all on one line, no trailing comma - switch (lhs) { - .dot => |dot| try renderToken(tree, ais, dot, Space.None), - .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), - } - try renderToken(tree, ais, lbrace, Space.Space); - - for (field_inits) |field_init, i| { - if (i + 1 < field_inits.len) { - try renderExpression(allocator, ais, tree, field_init, Space.None); - - const comma = tree.nextToken(field_init.lastToken()); - try renderToken(tree, ais, comma, Space.Space); - } else { - try renderExpression(allocator, ais, tree, field_init, Space.Space); - } - } - - return renderToken(tree, ais, rtoken, space); - } - - { - switch (lhs) { - .dot => |dot| try renderToken(tree, ais, dot, Space.None), - .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), - } - - ais.pushIndentNextLine(); - defer ais.popIndent(); - - try renderToken(tree, ais, lbrace, Space.Newline); - - for (field_inits) |field_init, i| { - if (i + 1 < field_inits.len) { - const next_field_init = field_inits[i + 1]; - try renderExpression(allocator, ais, tree, field_init, Space.None); - - const comma = tree.nextToken(field_init.lastToken()); - try renderToken(tree, ais, comma, Space.Newline); - - try renderExtraNewline(tree, ais, next_field_init); - } else { - try renderExpression(allocator, ais, tree, field_init, Space.Comma); - } - } - } - - return renderToken(tree, ais, rtoken, space); - }, - - .Call => { - const call = @fieldParentPtr(ast.Node.Call, "base", base); - if (call.async_token) |async_token| { - try renderToken(tree, ais, async_token, Space.Space); - } - - try renderExpression(allocator, ais, tree, call.lhs, Space.None); - - const lparen = tree.nextToken(call.lhs.lastToken()); - - if (call.params_len == 0) { - try renderToken(tree, ais, lparen, Space.None); - return renderToken(tree, ais, call.rtoken, space); - } - - const src_has_trailing_comma = blk: { - const maybe_comma = tree.prevToken(call.rtoken); - break :blk tree.token_ids[maybe_comma] == .Comma; - }; - - if (src_has_trailing_comma) { - { - ais.pushIndent(); - defer ais.popIndent(); - - try renderToken(tree, ais, lparen, Space.Newline); // ( - const params = call.params(); - for (params) |param_node, i| { - if (i + 1 < params.len) { - const next_node = params[i + 1]; - try renderExpression(allocator, ais, tree, param_node, Space.None); - - // Unindent the comma for multiline string literals - const maybe_multiline_string = param_node.firstToken(); - const is_multiline_string = tree.token_ids[maybe_multiline_string] == .MultilineStringLiteralLine; - if (is_multiline_string) ais.popIndent(); - defer if (is_multiline_string) ais.pushIndent(); - - const comma = tree.nextToken(param_node.lastToken()); - try renderToken(tree, ais, comma, Space.Newline); // , - try renderExtraNewline(tree, ais, next_node); - } else { - try renderExpression(allocator, ais, tree, param_node, Space.Comma); - } - } - } - return renderToken(tree, ais, call.rtoken, space); - } - - try renderToken(tree, ais, lparen, Space.None); // ( - - const params = call.params(); - for (params) |param_node, i| { - const maybe_comment = param_node.firstToken() - 1; - const maybe_multiline_string = param_node.firstToken(); - if (tree.token_ids[maybe_multiline_string] == .MultilineStringLiteralLine or tree.token_ids[maybe_comment] == .LineComment) { - ais.pushIndentOneShot(); - } - - try renderExpression(allocator, ais, tree, param_node, Space.None); - - if (i + 1 < params.len) { - const comma = tree.nextToken(param_node.lastToken()); - try renderToken(tree, ais, comma, Space.Space); - } - } - return renderToken(tree, ais, call.rtoken, space); // ) - }, - - .ArrayAccess => { - const suffix_op = base.castTag(.ArrayAccess).?; - - const lbracket = tree.nextToken(suffix_op.lhs.lastToken()); - const rbracket = tree.nextToken(suffix_op.index_expr.lastToken()); - - try renderExpression(allocator, ais, tree, suffix_op.lhs, Space.None); - try renderToken(tree, ais, lbracket, Space.None); // [ - - const starts_with_comment = tree.token_ids[lbracket + 1] == .LineComment; - const ends_with_comment = tree.token_ids[rbracket - 1] == .LineComment; - { - const new_space = if (ends_with_comment) Space.Newline else Space.None; - - ais.pushIndent(); - defer ais.popIndent(); - try renderExpression(allocator, ais, tree, suffix_op.index_expr, new_space); - } - if (starts_with_comment) try ais.maybeInsertNewline(); - return renderToken(tree, ais, rbracket, space); // ] - }, - - .Slice => { - const suffix_op = base.castTag(.Slice).?; - try renderExpression(allocator, ais, tree, suffix_op.lhs, Space.None); - - const lbracket = tree.prevToken(suffix_op.start.firstToken()); - const dotdot = tree.nextToken(suffix_op.start.lastToken()); - - const after_start_space_bool = nodeCausesSliceOpSpace(suffix_op.start) or - (if (suffix_op.end) |end| nodeCausesSliceOpSpace(end) else false); - const after_start_space = if (after_start_space_bool) Space.Space else Space.None; - const after_op_space = if (suffix_op.end != null) after_start_space else Space.None; - - try renderToken(tree, ais, lbracket, Space.None); // [ - try renderExpression(allocator, ais, tree, suffix_op.start, after_start_space); - try renderToken(tree, ais, dotdot, after_op_space); // .. - if (suffix_op.end) |end| { - const after_end_space = if (suffix_op.sentinel != null) Space.Space else Space.None; - try renderExpression(allocator, ais, tree, end, after_end_space); - } - if (suffix_op.sentinel) |sentinel| { - const colon = tree.prevToken(sentinel.firstToken()); - try renderToken(tree, ais, colon, Space.None); // : - try renderExpression(allocator, ais, tree, sentinel, Space.None); - } - return renderToken(tree, ais, suffix_op.rtoken, space); // ] - }, - - .Deref => { - const suffix_op = base.castTag(.Deref).?; - - try renderExpression(allocator, ais, tree, suffix_op.lhs, Space.None); - return renderToken(tree, ais, suffix_op.rtoken, space); // .* - }, - .UnwrapOptional => { - const suffix_op = base.castTag(.UnwrapOptional).?; - - try renderExpression(allocator, ais, tree, suffix_op.lhs, Space.None); - try renderToken(tree, ais, tree.prevToken(suffix_op.rtoken), Space.None); // . - return renderToken(tree, ais, suffix_op.rtoken, space); // ? - }, - - .Break => { - const flow_expr = base.castTag(.Break).?; - const maybe_rhs = flow_expr.getRHS(); - const maybe_label = flow_expr.getLabel(); - - if (maybe_label == null and maybe_rhs == null) { - return renderToken(tree, ais, flow_expr.ltoken, space); // break - } - - try renderToken(tree, ais, flow_expr.ltoken, Space.Space); // break - if (maybe_label) |label| { - const colon = tree.nextToken(flow_expr.ltoken); - try renderToken(tree, ais, colon, Space.None); // : - - if (maybe_rhs == null) { - return renderToken(tree, ais, label, space); // label - } - try renderToken(tree, ais, label, Space.Space); // label - } - return renderExpression(allocator, ais, tree, maybe_rhs.?, space); - }, - - .Continue => { - const flow_expr = base.castTag(.Continue).?; - if (flow_expr.getLabel()) |label| { - try renderToken(tree, ais, flow_expr.ltoken, Space.Space); // continue - const colon = tree.nextToken(flow_expr.ltoken); - try renderToken(tree, ais, colon, Space.None); // : - return renderToken(tree, ais, label, space); // label - } else { - return renderToken(tree, ais, flow_expr.ltoken, space); // continue - } - }, - - .Return => { - const flow_expr = base.castTag(.Return).?; - if (flow_expr.getRHS()) |rhs| { - try renderToken(tree, ais, flow_expr.ltoken, Space.Space); - return renderExpression(allocator, ais, tree, rhs, space); - } else { - return renderToken(tree, ais, flow_expr.ltoken, space); - } - }, - - .Payload => { - const payload = @fieldParentPtr(ast.Node.Payload, "base", base); - - try renderToken(tree, ais, payload.lpipe, Space.None); - try renderExpression(allocator, ais, tree, payload.error_symbol, Space.None); - return renderToken(tree, ais, payload.rpipe, space); - }, - - .PointerPayload => { - const payload = @fieldParentPtr(ast.Node.PointerPayload, "base", base); - - try renderToken(tree, ais, payload.lpipe, Space.None); - if (payload.ptr_token) |ptr_token| { - try renderToken(tree, ais, ptr_token, Space.None); - } - try renderExpression(allocator, ais, tree, payload.value_symbol, Space.None); - return renderToken(tree, ais, payload.rpipe, space); - }, - - .PointerIndexPayload => { - const payload = @fieldParentPtr(ast.Node.PointerIndexPayload, "base", base); - - try renderToken(tree, ais, payload.lpipe, Space.None); - if (payload.ptr_token) |ptr_token| { - try renderToken(tree, ais, ptr_token, Space.None); - } - try renderExpression(allocator, ais, tree, payload.value_symbol, Space.None); - - if (payload.index_symbol) |index_symbol| { - const comma = tree.nextToken(payload.value_symbol.lastToken()); - - try renderToken(tree, ais, comma, Space.Space); - try renderExpression(allocator, ais, tree, index_symbol, Space.None); - } - - return renderToken(tree, ais, payload.rpipe, space); - }, - - .GroupedExpression => { - const grouped_expr = @fieldParentPtr(ast.Node.GroupedExpression, "base", base); - - try renderToken(tree, ais, grouped_expr.lparen, Space.None); - { - ais.pushIndentOneShot(); - try renderExpression(allocator, ais, tree, grouped_expr.expr, Space.None); - } - return renderToken(tree, ais, grouped_expr.rparen, space); - }, - - .FieldInitializer => { - const field_init = @fieldParentPtr(ast.Node.FieldInitializer, "base", base); - - try renderToken(tree, ais, field_init.period_token, Space.None); // . - try renderToken(tree, ais, field_init.name_token, Space.Space); // name - try renderToken(tree, ais, tree.nextToken(field_init.name_token), Space.Space); // = - return renderExpression(allocator, ais, tree, field_init.expr, space); - }, - - .ContainerDecl => { - const container_decl = @fieldParentPtr(ast.Node.ContainerDecl, "base", base); - - if (container_decl.layout_token) |layout_token| { - try renderToken(tree, ais, layout_token, Space.Space); - } - - switch (container_decl.init_arg_expr) { - .None => { - try renderToken(tree, ais, container_decl.kind_token, Space.Space); // union - }, - .Enum => |enum_tag_type| { - try renderToken(tree, ais, container_decl.kind_token, Space.None); // union - - const lparen = tree.nextToken(container_decl.kind_token); - const enum_token = tree.nextToken(lparen); - - try renderToken(tree, ais, lparen, Space.None); // ( - try renderToken(tree, ais, enum_token, Space.None); // enum - - if (enum_tag_type) |expr| { - try renderToken(tree, ais, tree.nextToken(enum_token), Space.None); // ( - try renderExpression(allocator, ais, tree, expr, Space.None); - - const rparen = tree.nextToken(expr.lastToken()); - try renderToken(tree, ais, rparen, Space.None); // ) - try renderToken(tree, ais, tree.nextToken(rparen), Space.Space); // ) - } else { - try renderToken(tree, ais, tree.nextToken(enum_token), Space.Space); // ) - } - }, - .Type => |type_expr| { - try renderToken(tree, ais, container_decl.kind_token, Space.None); // union - - const lparen = tree.nextToken(container_decl.kind_token); - const rparen = tree.nextToken(type_expr.lastToken()); - - try renderToken(tree, ais, lparen, Space.None); // ( - try renderExpression(allocator, ais, tree, type_expr, Space.None); - try renderToken(tree, ais, rparen, Space.Space); // ) - }, - } - - if (container_decl.fields_and_decls_len == 0) { - { - ais.pushIndentNextLine(); - defer ais.popIndent(); - try renderToken(tree, ais, container_decl.lbrace_token, Space.None); // { - } - return renderToken(tree, ais, container_decl.rbrace_token, space); // } - } - - const src_has_trailing_comma = blk: { - var maybe_comma = tree.prevToken(container_decl.lastToken()); - // Doc comments for a field may also appear after the comma, eg. - // field_name: T, // comment attached to field_name - if (tree.token_ids[maybe_comma] == .DocComment) - maybe_comma = tree.prevToken(maybe_comma); - break :blk tree.token_ids[maybe_comma] == .Comma; - }; - - const fields_and_decls = container_decl.fieldsAndDecls(); - - // Check if the first declaration and the { are on the same line - const src_has_newline = !tree.tokensOnSameLine( - container_decl.lbrace_token, - fields_and_decls[0].firstToken(), - ); - - // We can only print all the elements in-line if all the - // declarations inside are fields - const src_has_only_fields = blk: { - for (fields_and_decls) |decl| { - if (decl.tag != .ContainerField) break :blk false; - } - break :blk true; - }; - - if (src_has_trailing_comma or !src_has_only_fields) { - // One declaration per line - ais.pushIndentNextLine(); - defer ais.popIndent(); - try renderToken(tree, ais, container_decl.lbrace_token, .Newline); // { - - for (fields_and_decls) |decl, i| { - try renderContainerDecl(allocator, ais, tree, decl, .Newline); - - if (i + 1 < fields_and_decls.len) { - try renderExtraNewline(tree, ais, fields_and_decls[i + 1]); - } - } - } else if (src_has_newline) { - // All the declarations on the same line, but place the items on - // their own line - try renderToken(tree, ais, container_decl.lbrace_token, .Newline); // { - - ais.pushIndent(); - defer ais.popIndent(); - - for (fields_and_decls) |decl, i| { - const space_after_decl: Space = if (i + 1 >= fields_and_decls.len) .Newline else .Space; - try renderContainerDecl(allocator, ais, tree, decl, space_after_decl); - } - } else { - // All the declarations on the same line - try renderToken(tree, ais, container_decl.lbrace_token, .Space); // { - - for (fields_and_decls) |decl| { - try renderContainerDecl(allocator, ais, tree, decl, .Space); - } - } - - return renderToken(tree, ais, container_decl.rbrace_token, space); // } - }, - - .ErrorSetDecl => { - const err_set_decl = @fieldParentPtr(ast.Node.ErrorSetDecl, "base", base); - - const lbrace = tree.nextToken(err_set_decl.error_token); - - if (err_set_decl.decls_len == 0) { - try renderToken(tree, ais, err_set_decl.error_token, Space.None); - try renderToken(tree, ais, lbrace, Space.None); - return renderToken(tree, ais, err_set_decl.rbrace_token, space); - } - - if (err_set_decl.decls_len == 1) blk: { - const node = err_set_decl.decls()[0]; - - // if there are any doc comments or same line comments - // don't try to put it all on one line - if (node.cast(ast.Node.ErrorTag)) |tag| { - if (tag.doc_comments != null) break :blk; - } else { - break :blk; - } - - try renderToken(tree, ais, err_set_decl.error_token, Space.None); // error - try renderToken(tree, ais, lbrace, Space.None); // { - try renderExpression(allocator, ais, tree, node, Space.None); - return renderToken(tree, ais, err_set_decl.rbrace_token, space); // } - } - - try renderToken(tree, ais, err_set_decl.error_token, Space.None); // error - - const src_has_trailing_comma = blk: { - const maybe_comma = tree.prevToken(err_set_decl.rbrace_token); - break :blk tree.token_ids[maybe_comma] == .Comma; - }; - - if (src_has_trailing_comma) { - { - ais.pushIndent(); - defer ais.popIndent(); - - try renderToken(tree, ais, lbrace, Space.Newline); // { - const decls = err_set_decl.decls(); - for (decls) |node, i| { - if (i + 1 < decls.len) { - try renderExpression(allocator, ais, tree, node, Space.None); - try renderToken(tree, ais, tree.nextToken(node.lastToken()), Space.Newline); // , - - try renderExtraNewline(tree, ais, decls[i + 1]); - } else { - try renderExpression(allocator, ais, tree, node, Space.Comma); - } - } - } - - return renderToken(tree, ais, err_set_decl.rbrace_token, space); // } - } else { - try renderToken(tree, ais, lbrace, Space.Space); // { - - const decls = err_set_decl.decls(); - for (decls) |node, i| { - if (i + 1 < decls.len) { - try renderExpression(allocator, ais, tree, node, Space.None); - - const comma_token = tree.nextToken(node.lastToken()); - assert(tree.token_ids[comma_token] == .Comma); - try renderToken(tree, ais, comma_token, Space.Space); // , - try renderExtraNewline(tree, ais, decls[i + 1]); - } else { - try renderExpression(allocator, ais, tree, node, Space.Space); - } - } - - return renderToken(tree, ais, err_set_decl.rbrace_token, space); // } - } - }, - - .ErrorTag => { - const tag = @fieldParentPtr(ast.Node.ErrorTag, "base", base); - - try renderDocComments(tree, ais, tag, tag.doc_comments); - return renderToken(tree, ais, tag.name_token, space); // name - }, - - .MultilineStringLiteral => { - const multiline_str_literal = @fieldParentPtr(ast.Node.MultilineStringLiteral, "base", base); - - { - const locked_indents = ais.lockOneShotIndent(); - defer { - var i: u8 = 0; - while (i < locked_indents) : (i += 1) ais.popIndent(); - } - try ais.maybeInsertNewline(); - - for (multiline_str_literal.lines()) |t| try renderToken(tree, ais, t, Space.None); - } - }, - - .BuiltinCall => { - const builtin_call = @fieldParentPtr(ast.Node.BuiltinCall, "base", base); - - // TODO remove after 0.7.0 release - if (mem.eql(u8, tree.tokenSlice(builtin_call.builtin_token), "@OpaqueType")) - return ais.writer().writeAll("opaque {}"); - - // TODO remove after 0.7.0 release - { - const params = builtin_call.paramsConst(); - if (mem.eql(u8, tree.tokenSlice(builtin_call.builtin_token), "@Type") and - params.len == 1) - { - if (params[0].castTag(.EnumLiteral)) |enum_literal| - if (mem.eql(u8, tree.tokenSlice(enum_literal.name), "Opaque")) - return ais.writer().writeAll("opaque {}"); - } - } - - try renderToken(tree, ais, builtin_call.builtin_token, Space.None); // @name - - const src_params_trailing_comma = blk: { - if (builtin_call.params_len == 0) break :blk false; - const last_node = builtin_call.params()[builtin_call.params_len - 1]; - const maybe_comma = tree.nextToken(last_node.lastToken()); - break :blk tree.token_ids[maybe_comma] == .Comma; - }; - - const lparen = tree.nextToken(builtin_call.builtin_token); - - if (!src_params_trailing_comma) { - try renderToken(tree, ais, lparen, Space.None); // ( - - // render all on one line, no trailing comma - const params = builtin_call.params(); - for (params) |param_node, i| { - const maybe_comment = param_node.firstToken() - 1; - if (param_node.*.tag == .MultilineStringLiteral or tree.token_ids[maybe_comment] == .LineComment) { - ais.pushIndentOneShot(); - } - try renderExpression(allocator, ais, tree, param_node, Space.None); - - if (i + 1 < params.len) { - const comma_token = tree.nextToken(param_node.lastToken()); - try renderToken(tree, ais, comma_token, Space.Space); // , - } - } - } else { - // one param per line - ais.pushIndent(); - defer ais.popIndent(); - try renderToken(tree, ais, lparen, Space.Newline); // ( - - for (builtin_call.params()) |param_node| { - try renderExpression(allocator, ais, tree, param_node, Space.Comma); - } - } - - return renderToken(tree, ais, builtin_call.rparen_token, space); // ) - }, - - .FnProto => { - const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", base); - - if (fn_proto.getVisibToken()) |visib_token_index| { - const visib_token = tree.token_ids[visib_token_index]; - assert(visib_token == .Keyword_pub or visib_token == .Keyword_export); - - try renderToken(tree, ais, visib_token_index, Space.Space); // pub - } - - if (fn_proto.getExternExportInlineToken()) |extern_export_inline_token| { - if (fn_proto.getIsExternPrototype() == null) - try renderToken(tree, ais, extern_export_inline_token, Space.Space); // extern/export/inline - } - - if (fn_proto.getLibName()) |lib_name| { - try renderExpression(allocator, ais, tree, lib_name, Space.Space); - } - - const lparen = if (fn_proto.getNameToken()) |name_token| blk: { - try renderToken(tree, ais, fn_proto.fn_token, Space.Space); // fn - try renderToken(tree, ais, name_token, Space.None); // name - break :blk tree.nextToken(name_token); - } else blk: { - try renderToken(tree, ais, fn_proto.fn_token, Space.Space); // fn - break :blk tree.nextToken(fn_proto.fn_token); - }; - assert(tree.token_ids[lparen] == .LParen); - - const rparen = tree.prevToken( - // the first token for the annotation expressions is the left - // parenthesis, hence the need for two prevToken - if (fn_proto.getAlignExpr()) |align_expr| - tree.prevToken(tree.prevToken(align_expr.firstToken())) - else if (fn_proto.getSectionExpr()) |section_expr| - tree.prevToken(tree.prevToken(section_expr.firstToken())) - else if (fn_proto.getCallconvExpr()) |callconv_expr| - tree.prevToken(tree.prevToken(callconv_expr.firstToken())) - else switch (fn_proto.return_type) { - .Explicit => |node| node.firstToken(), - .InferErrorSet => |node| tree.prevToken(node.firstToken()), - .Invalid => unreachable, - }, - ); - assert(tree.token_ids[rparen] == .RParen); - - const src_params_trailing_comma = blk: { - const maybe_comma = tree.token_ids[rparen - 1]; - break :blk maybe_comma == .Comma or maybe_comma == .LineComment; - }; - - if (!src_params_trailing_comma) { - try renderToken(tree, ais, lparen, Space.None); // ( - - // render all on one line, no trailing comma - for (fn_proto.params()) |param_decl, i| { - try renderParamDecl(allocator, ais, tree, param_decl, Space.None); - - if (i + 1 < fn_proto.params_len or fn_proto.getVarArgsToken() != null) { - const comma = tree.nextToken(param_decl.lastToken()); - try renderToken(tree, ais, comma, Space.Space); // , - } - } - if (fn_proto.getVarArgsToken()) |var_args_token| { - try renderToken(tree, ais, var_args_token, Space.None); - } - } else { - // one param per line - ais.pushIndent(); - defer ais.popIndent(); - try renderToken(tree, ais, lparen, Space.Newline); // ( - - for (fn_proto.params()) |param_decl| { - try renderParamDecl(allocator, ais, tree, param_decl, Space.Comma); - } - if (fn_proto.getVarArgsToken()) |var_args_token| { - try renderToken(tree, ais, var_args_token, Space.Comma); - } - } - - try renderToken(tree, ais, rparen, Space.Space); // ) - - if (fn_proto.getAlignExpr()) |align_expr| { - const align_rparen = tree.nextToken(align_expr.lastToken()); - const align_lparen = tree.prevToken(align_expr.firstToken()); - const align_kw = tree.prevToken(align_lparen); - - try renderToken(tree, ais, align_kw, Space.None); // align - try renderToken(tree, ais, align_lparen, Space.None); // ( - try renderExpression(allocator, ais, tree, align_expr, Space.None); - try renderToken(tree, ais, align_rparen, Space.Space); // ) - } - - if (fn_proto.getSectionExpr()) |section_expr| { - const section_rparen = tree.nextToken(section_expr.lastToken()); - const section_lparen = tree.prevToken(section_expr.firstToken()); - const section_kw = tree.prevToken(section_lparen); - - try renderToken(tree, ais, section_kw, Space.None); // section - try renderToken(tree, ais, section_lparen, Space.None); // ( - try renderExpression(allocator, ais, tree, section_expr, Space.None); - try renderToken(tree, ais, section_rparen, Space.Space); // ) - } - - if (fn_proto.getCallconvExpr()) |callconv_expr| { - const callconv_rparen = tree.nextToken(callconv_expr.lastToken()); - const callconv_lparen = tree.prevToken(callconv_expr.firstToken()); - const callconv_kw = tree.prevToken(callconv_lparen); - - try renderToken(tree, ais, callconv_kw, Space.None); // callconv - try renderToken(tree, ais, callconv_lparen, Space.None); // ( - try renderExpression(allocator, ais, tree, callconv_expr, Space.None); - try renderToken(tree, ais, callconv_rparen, Space.Space); // ) - } else if (fn_proto.getIsExternPrototype() != null) { - try ais.writer().writeAll("callconv(.C) "); - } else if (fn_proto.getIsAsync() != null) { - try ais.writer().writeAll("callconv(.Async) "); - } - - switch (fn_proto.return_type) { - .Explicit => |node| { - return renderExpression(allocator, ais, tree, node, space); - }, - .InferErrorSet => |node| { - try renderToken(tree, ais, tree.prevToken(node.firstToken()), Space.None); // ! - return renderExpression(allocator, ais, tree, node, space); - }, - .Invalid => unreachable, - } - }, - - .AnyFrameType => { - const anyframe_type = @fieldParentPtr(ast.Node.AnyFrameType, "base", base); - - if (anyframe_type.result) |result| { - try renderToken(tree, ais, anyframe_type.anyframe_token, Space.None); // anyframe - try renderToken(tree, ais, result.arrow_token, Space.None); // -> - return renderExpression(allocator, ais, tree, result.return_type, space); - } else { - return renderToken(tree, ais, anyframe_type.anyframe_token, space); // anyframe - } - }, - - .DocComment => unreachable, // doc comments are attached to nodes - - .Switch => { - const switch_node = @fieldParentPtr(ast.Node.Switch, "base", base); - - try renderToken(tree, ais, switch_node.switch_token, Space.Space); // switch - try renderToken(tree, ais, tree.nextToken(switch_node.switch_token), Space.None); // ( - - const rparen = tree.nextToken(switch_node.expr.lastToken()); - const lbrace = tree.nextToken(rparen); - - if (switch_node.cases_len == 0) { - try renderExpression(allocator, ais, tree, switch_node.expr, Space.None); - try renderToken(tree, ais, rparen, Space.Space); // ) - try renderToken(tree, ais, lbrace, Space.None); // { - return renderToken(tree, ais, switch_node.rbrace, space); // } - } - - try renderExpression(allocator, ais, tree, switch_node.expr, Space.None); - try renderToken(tree, ais, rparen, Space.Space); // ) - - { - ais.pushIndentNextLine(); - defer ais.popIndent(); - try renderToken(tree, ais, lbrace, Space.Newline); // { - - const cases = switch_node.cases(); - for (cases) |node, i| { - try renderExpression(allocator, ais, tree, node, Space.Comma); - - if (i + 1 < cases.len) { - try renderExtraNewline(tree, ais, cases[i + 1]); - } - } - } - - return renderToken(tree, ais, switch_node.rbrace, space); // } - }, - - .SwitchCase => { - const switch_case = @fieldParentPtr(ast.Node.SwitchCase, "base", base); - - assert(switch_case.items_len != 0); - const src_has_trailing_comma = blk: { - const last_node = switch_case.items()[switch_case.items_len - 1]; - const maybe_comma = tree.nextToken(last_node.lastToken()); - break :blk tree.token_ids[maybe_comma] == .Comma; - }; - - if (switch_case.items_len == 1 or !src_has_trailing_comma) { - const items = switch_case.items(); - for (items) |node, i| { - if (i + 1 < items.len) { - try renderExpression(allocator, ais, tree, node, Space.None); - - const comma_token = tree.nextToken(node.lastToken()); - try renderToken(tree, ais, comma_token, Space.Space); // , - try renderExtraNewline(tree, ais, items[i + 1]); - } else { - try renderExpression(allocator, ais, tree, node, Space.Space); - } - } - } else { - const items = switch_case.items(); - for (items) |node, i| { - if (i + 1 < items.len) { - try renderExpression(allocator, ais, tree, node, Space.None); - - const comma_token = tree.nextToken(node.lastToken()); - try renderToken(tree, ais, comma_token, Space.Newline); // , - try renderExtraNewline(tree, ais, items[i + 1]); - } else { - try renderExpression(allocator, ais, tree, node, Space.Comma); - } - } - } - - try renderToken(tree, ais, switch_case.arrow_token, Space.Space); // => - - if (switch_case.payload) |payload| { - try renderExpression(allocator, ais, tree, payload, Space.Space); - } - - return renderExpression(allocator, ais, tree, switch_case.expr, space); - }, - .SwitchElse => { - const switch_else = @fieldParentPtr(ast.Node.SwitchElse, "base", base); - return renderToken(tree, ais, switch_else.token, space); - }, - .Else => { - const else_node = @fieldParentPtr(ast.Node.Else, "base", base); - - const body_is_block = nodeIsBlock(else_node.body); - const same_line = body_is_block or tree.tokensOnSameLine(else_node.else_token, else_node.body.lastToken()); - - const after_else_space = if (same_line or else_node.payload != null) Space.Space else Space.Newline; - try renderToken(tree, ais, else_node.else_token, after_else_space); - - if (else_node.payload) |payload| { - const payload_space = if (same_line) Space.Space else Space.Newline; - try renderExpression(allocator, ais, tree, payload, payload_space); - } - - if (same_line) { - return renderExpression(allocator, ais, tree, else_node.body, space); - } else { - ais.pushIndent(); - defer ais.popIndent(); - return renderExpression(allocator, ais, tree, else_node.body, space); - } - }, - - .While => { - const while_node = @fieldParentPtr(ast.Node.While, "base", base); - - if (while_node.label) |label| { - try renderToken(tree, ais, label, Space.None); // label - try renderToken(tree, ais, tree.nextToken(label), Space.Space); // : - } - - if (while_node.inline_token) |inline_token| { - try renderToken(tree, ais, inline_token, Space.Space); // inline - } - - try renderToken(tree, ais, while_node.while_token, Space.Space); // while - try renderToken(tree, ais, tree.nextToken(while_node.while_token), Space.None); // ( - try renderExpression(allocator, ais, tree, while_node.condition, Space.None); - - const cond_rparen = tree.nextToken(while_node.condition.lastToken()); - - const body_is_block = nodeIsBlock(while_node.body); - - var block_start_space: Space = undefined; - var after_body_space: Space = undefined; - - if (body_is_block) { - block_start_space = Space.BlockStart; - after_body_space = if (while_node.@"else" == null) space else Space.SpaceOrOutdent; - } else if (tree.tokensOnSameLine(cond_rparen, while_node.body.lastToken())) { - block_start_space = Space.Space; - after_body_space = if (while_node.@"else" == null) space else Space.Space; - } else { - block_start_space = Space.Newline; - after_body_space = if (while_node.@"else" == null) space else Space.Newline; - } - - { - const rparen_space = if (while_node.payload != null or while_node.continue_expr != null) Space.Space else block_start_space; - try renderToken(tree, ais, cond_rparen, rparen_space); // ) - } - - if (while_node.payload) |payload| { - const payload_space = if (while_node.continue_expr != null) Space.Space else block_start_space; - try renderExpression(allocator, ais, tree, payload, payload_space); - } - - if (while_node.continue_expr) |continue_expr| { - const rparen = tree.nextToken(continue_expr.lastToken()); - const lparen = tree.prevToken(continue_expr.firstToken()); - const colon = tree.prevToken(lparen); - - try renderToken(tree, ais, colon, Space.Space); // : - try renderToken(tree, ais, lparen, Space.None); // ( - - try renderExpression(allocator, ais, tree, continue_expr, Space.None); - - try renderToken(tree, ais, rparen, block_start_space); // ) - } - - { - if (!body_is_block) ais.pushIndent(); - defer if (!body_is_block) ais.popIndent(); - try renderExpression(allocator, ais, tree, while_node.body, after_body_space); - } - - if (while_node.@"else") |@"else"| { - return renderExpression(allocator, ais, tree, &@"else".base, space); - } - }, - - .For => { - const for_node = @fieldParentPtr(ast.Node.For, "base", base); - - if (for_node.label) |label| { - try renderToken(tree, ais, label, Space.None); // label - try renderToken(tree, ais, tree.nextToken(label), Space.Space); // : - } - - if (for_node.inline_token) |inline_token| { - try renderToken(tree, ais, inline_token, Space.Space); // inline - } - - try renderToken(tree, ais, for_node.for_token, Space.Space); // for - try renderToken(tree, ais, tree.nextToken(for_node.for_token), Space.None); // ( - try renderExpression(allocator, ais, tree, for_node.array_expr, Space.None); - - const rparen = tree.nextToken(for_node.array_expr.lastToken()); - - const body_is_block = for_node.body.tag.isBlock(); - const src_one_line_to_body = !body_is_block and tree.tokensOnSameLine(rparen, for_node.body.firstToken()); - const body_on_same_line = body_is_block or src_one_line_to_body; - - try renderToken(tree, ais, rparen, Space.Space); // ) - - const space_after_payload = if (body_on_same_line) Space.Space else Space.Newline; - try renderExpression(allocator, ais, tree, for_node.payload, space_after_payload); // |x| - - const space_after_body = blk: { - if (for_node.@"else") |@"else"| { - const src_one_line_to_else = tree.tokensOnSameLine(rparen, @"else".firstToken()); - if (body_is_block or src_one_line_to_else) { - break :blk Space.Space; - } else { - break :blk Space.Newline; - } - } else { - break :blk space; - } - }; - - { - if (!body_on_same_line) ais.pushIndent(); - defer if (!body_on_same_line) ais.popIndent(); - try renderExpression(allocator, ais, tree, for_node.body, space_after_body); // { body } - } - - if (for_node.@"else") |@"else"| { - return renderExpression(allocator, ais, tree, &@"else".base, space); // else - } - }, - - .If => { - const if_node = @fieldParentPtr(ast.Node.If, "base", base); - - const lparen = tree.nextToken(if_node.if_token); - const rparen = tree.nextToken(if_node.condition.lastToken()); - - try renderToken(tree, ais, if_node.if_token, Space.Space); // if - try renderToken(tree, ais, lparen, Space.None); // ( - - try renderExpression(allocator, ais, tree, if_node.condition, Space.None); // condition - - const body_is_if_block = if_node.body.tag == .If; - const body_is_block = nodeIsBlock(if_node.body); - - if (body_is_if_block) { - try renderExtraNewline(tree, ais, if_node.body); - } else if (body_is_block) { - const after_rparen_space = if (if_node.payload == null) Space.BlockStart else Space.Space; - try renderToken(tree, ais, rparen, after_rparen_space); // ) - - if (if_node.payload) |payload| { - try renderExpression(allocator, ais, tree, payload, Space.BlockStart); // |x| - } - - if (if_node.@"else") |@"else"| { - try renderExpression(allocator, ais, tree, if_node.body, Space.SpaceOrOutdent); - return renderExpression(allocator, ais, tree, &@"else".base, space); - } else { - return renderExpression(allocator, ais, tree, if_node.body, space); - } - } - - const src_has_newline = !tree.tokensOnSameLine(rparen, if_node.body.lastToken()); - - if (src_has_newline) { - const after_rparen_space = if (if_node.payload == null) Space.Newline else Space.Space; - - { - ais.pushIndent(); - defer ais.popIndent(); - try renderToken(tree, ais, rparen, after_rparen_space); // ) - } - - if (if_node.payload) |payload| { - try renderExpression(allocator, ais, tree, payload, Space.Newline); - } - - if (if_node.@"else") |@"else"| { - const else_is_block = nodeIsBlock(@"else".body); - - { - ais.pushIndent(); - defer ais.popIndent(); - try renderExpression(allocator, ais, tree, if_node.body, Space.Newline); - } - - if (else_is_block) { - try renderToken(tree, ais, @"else".else_token, Space.Space); // else - - if (@"else".payload) |payload| { - try renderExpression(allocator, ais, tree, payload, Space.Space); - } - - return renderExpression(allocator, ais, tree, @"else".body, space); - } else { - const after_else_space = if (@"else".payload == null) Space.Newline else Space.Space; - try renderToken(tree, ais, @"else".else_token, after_else_space); // else - - if (@"else".payload) |payload| { - try renderExpression(allocator, ais, tree, payload, Space.Newline); - } - - ais.pushIndent(); - defer ais.popIndent(); - return renderExpression(allocator, ais, tree, @"else".body, space); - } - } else { - ais.pushIndent(); - defer ais.popIndent(); - return renderExpression(allocator, ais, tree, if_node.body, space); - } - } - - // Single line if statement - - try renderToken(tree, ais, rparen, Space.Space); // ) - - if (if_node.payload) |payload| { - try renderExpression(allocator, ais, tree, payload, Space.Space); - } - - if (if_node.@"else") |@"else"| { - try renderExpression(allocator, ais, tree, if_node.body, Space.Space); - try renderToken(tree, ais, @"else".else_token, Space.Space); - - if (@"else".payload) |payload| { - try renderExpression(allocator, ais, tree, payload, Space.Space); - } - - return renderExpression(allocator, ais, tree, @"else".body, space); - } else { - return renderExpression(allocator, ais, tree, if_node.body, space); - } - }, - - .Asm => { - const asm_node = @fieldParentPtr(ast.Node.Asm, "base", base); - - try renderToken(tree, ais, asm_node.asm_token, Space.Space); // asm - - if (asm_node.volatile_token) |volatile_token| { - try renderToken(tree, ais, volatile_token, Space.Space); // volatile - try renderToken(tree, ais, tree.nextToken(volatile_token), Space.None); // ( - } else { - try renderToken(tree, ais, tree.nextToken(asm_node.asm_token), Space.None); // ( - } - - asmblk: { - ais.pushIndent(); - defer ais.popIndent(); - - if (asm_node.outputs.len == 0 and asm_node.inputs.len == 0 and asm_node.clobbers.len == 0) { - try renderExpression(allocator, ais, tree, asm_node.template, Space.None); - break :asmblk; - } - - try renderExpression(allocator, ais, tree, asm_node.template, Space.Newline); - - ais.setIndentDelta(asm_indent_delta); - defer ais.setIndentDelta(indent_delta); - - const colon1 = tree.nextToken(asm_node.template.lastToken()); - - const colon2 = if (asm_node.outputs.len == 0) blk: { - try renderToken(tree, ais, colon1, Space.Newline); // : - - break :blk tree.nextToken(colon1); - } else blk: { - try renderToken(tree, ais, colon1, Space.Space); // : - - ais.pushIndent(); - defer ais.popIndent(); - - for (asm_node.outputs) |*asm_output, i| { - if (i + 1 < asm_node.outputs.len) { - const next_asm_output = asm_node.outputs[i + 1]; - try renderAsmOutput(allocator, ais, tree, asm_output, Space.None); - - const comma = tree.prevToken(next_asm_output.firstToken()); - try renderToken(tree, ais, comma, Space.Newline); // , - try renderExtraNewlineToken(tree, ais, next_asm_output.firstToken()); - } else if (asm_node.inputs.len == 0 and asm_node.clobbers.len == 0) { - try renderAsmOutput(allocator, ais, tree, asm_output, Space.Newline); - break :asmblk; - } else { - try renderAsmOutput(allocator, ais, tree, asm_output, Space.Newline); - const comma_or_colon = tree.nextToken(asm_output.lastToken()); - break :blk switch (tree.token_ids[comma_or_colon]) { - .Comma => tree.nextToken(comma_or_colon), - else => comma_or_colon, - }; - } - } - unreachable; - }; - - const colon3 = if (asm_node.inputs.len == 0) blk: { - try renderToken(tree, ais, colon2, Space.Newline); // : - break :blk tree.nextToken(colon2); - } else blk: { - try renderToken(tree, ais, colon2, Space.Space); // : - ais.pushIndent(); - defer ais.popIndent(); - for (asm_node.inputs) |*asm_input, i| { - if (i + 1 < asm_node.inputs.len) { - const next_asm_input = &asm_node.inputs[i + 1]; - try renderAsmInput(allocator, ais, tree, asm_input, Space.None); - - const comma = tree.prevToken(next_asm_input.firstToken()); - try renderToken(tree, ais, comma, Space.Newline); // , - try renderExtraNewlineToken(tree, ais, next_asm_input.firstToken()); - } else if (asm_node.clobbers.len == 0) { - try renderAsmInput(allocator, ais, tree, asm_input, Space.Newline); - break :asmblk; - } else { - try renderAsmInput(allocator, ais, tree, asm_input, Space.Newline); - const comma_or_colon = tree.nextToken(asm_input.lastToken()); - break :blk switch (tree.token_ids[comma_or_colon]) { - .Comma => tree.nextToken(comma_or_colon), - else => comma_or_colon, - }; - } - } - unreachable; - }; - - try renderToken(tree, ais, colon3, Space.Space); // : - ais.pushIndent(); - defer ais.popIndent(); - for (asm_node.clobbers) |clobber_node, i| { - if (i + 1 >= asm_node.clobbers.len) { - try renderExpression(allocator, ais, tree, clobber_node, Space.Newline); - break :asmblk; - } else { - try renderExpression(allocator, ais, tree, clobber_node, Space.None); - const comma = tree.nextToken(clobber_node.lastToken()); - try renderToken(tree, ais, comma, Space.Space); // , - } - } - } - - return renderToken(tree, ais, asm_node.rparen, space); - }, - - .EnumLiteral => { - const enum_literal = @fieldParentPtr(ast.Node.EnumLiteral, "base", base); - - try renderToken(tree, ais, enum_literal.dot, Space.None); // . - return renderToken(tree, ais, enum_literal.name, space); // name - }, - - .ContainerField, - .Root, - .VarDecl, - .Use, - .TestDecl, - => unreachable, + //.Nosuspend => { + // const nosuspend_node = @fieldParentPtr(ast.Node.Nosuspend, "base", base); + // if (mem.eql(u8, tree.tokenSlice(nosuspend_node.nosuspend_token), "noasync")) { + // // TODO: remove this + // try ais.writer().writeAll("nosuspend "); + // } else { + // try renderToken(ais, tree, nosuspend_node.nosuspend_token, Space.Space); + // } + // return renderExpression(allocator, ais, tree, nosuspend_node.expr, space); + //}, + + //.Suspend => { + // const suspend_node = @fieldParentPtr(ast.Node.Suspend, "base", base); + + // if (suspend_node.body) |body| { + // try renderToken(ais, tree, suspend_node.suspend_token, Space.Space); + // return renderExpression(allocator, ais, tree, body, space); + // } else { + // return renderToken(ais, tree, suspend_node.suspend_token, space); + // } + //}, + + //.Catch => { + // const infix_op_node = @fieldParentPtr(ast.Node.Catch, "base", base); + + // const op_space = Space.Space; + // try renderExpression(allocator, ais, tree, infix_op_node.lhs, op_space); + + // const after_op_space = blk: { + // const same_line = tree.tokensOnSameLine(infix_op_node.op_token, tree.nextToken(infix_op_node.op_token)); + // break :blk if (same_line) op_space else Space.Newline; + // }; + + // try renderToken(ais, tree, infix_op_node.op_token, after_op_space); + + // if (infix_op_node.payload) |payload| { + // try renderExpression(allocator, ais, tree, payload, Space.Space); + // } + + // ais.pushIndentOneShot(); + // return renderExpression(allocator, ais, tree, infix_op_node.rhs, space); + //}, + + //.Add, + //.AddWrap, + //.ArrayCat, + //.ArrayMult, + //.Assign, + //.AssignBitAnd, + //.AssignBitOr, + //.AssignBitShiftLeft, + //.AssignBitShiftRight, + //.AssignBitXor, + //.AssignDiv, + //.AssignSub, + //.AssignSubWrap, + //.AssignMod, + //.AssignAdd, + //.AssignAddWrap, + //.AssignMul, + //.AssignMulWrap, + //.BangEqual, + //.BitAnd, + //.BitOr, + //.BitShiftLeft, + //.BitShiftRight, + //.BitXor, + //.BoolAnd, + //.BoolOr, + //.Div, + //.EqualEqual, + //.ErrorUnion, + //.GreaterOrEqual, + //.GreaterThan, + //.LessOrEqual, + //.LessThan, + //.MergeErrorSets, + //.Mod, + //.Mul, + //.MulWrap, + //.Period, + //.Range, + //.Sub, + //.SubWrap, + //.OrElse, + //=> { + // const infix_op_node = @fieldParentPtr(ast.Node.SimpleInfixOp, "base", base); + + // const op_space = switch (base.tag) { + // .Period, .ErrorUnion, .Range => Space.None, + // else => Space.Space, + // }; + // try renderExpression(allocator, ais, tree, infix_op_node.lhs, op_space); + + // const after_op_space = blk: { + // const loc = tree.tokenLocation(tree.token_locs[infix_op_node.op_token].end, tree.nextToken(infix_op_node.op_token)); + // break :blk if (loc.line == 0) op_space else Space.Newline; + // }; + + // { + // ais.pushIndent(); + // defer ais.popIndent(); + // try renderToken(ais, tree, infix_op_node.op_token, after_op_space); + // } + // ais.pushIndentOneShot(); + // return renderExpression(allocator, ais, tree, infix_op_node.rhs, space); + //}, + + //.BitNot, + //.BoolNot, + //.Negation, + //.NegationWrap, + //.OptionalType, + //.AddressOf, + //=> { + // const casted_node = @fieldParentPtr(ast.Node.SimplePrefixOp, "base", base); + // try renderToken(ais, tree, casted_node.op_token, Space.None); + // return renderExpression(allocator, ais, tree, casted_node.rhs, space); + //}, + + //.Try, + //.Resume, + //.Await, + //=> { + // const casted_node = @fieldParentPtr(ast.Node.SimplePrefixOp, "base", base); + // try renderToken(ais, tree, casted_node.op_token, Space.Space); + // return renderExpression(allocator, ais, tree, casted_node.rhs, space); + //}, + + //.ArrayType => { + // const array_type = @fieldParentPtr(ast.Node.ArrayType, "base", base); + // return renderArrayType( + // allocator, + // ais, + // tree, + // array_type.op_token, + // array_type.rhs, + // array_type.len_expr, + // null, + // space, + // ); + //}, + //.ArrayTypeSentinel => { + // const array_type = @fieldParentPtr(ast.Node.ArrayTypeSentinel, "base", base); + // return renderArrayType( + // allocator, + // ais, + // tree, + // array_type.op_token, + // array_type.rhs, + // array_type.len_expr, + // array_type.sentinel, + // space, + // ); + //}, + + //.PtrType => { + // const ptr_type = @fieldParentPtr(ast.Node.PtrType, "base", base); + // const op_tok_id = tree.token_tags[ptr_type.op_token]; + // switch (op_tok_id) { + // .Asterisk, .AsteriskAsterisk => try ais.writer().writeByte('*'), + // .LBracket => if (tree.token_tags[ptr_type.op_token + 2] == .Identifier) + // try ais.writer().writeAll("[*c") + // else + // try ais.writer().writeAll("[*"), + // else => unreachable, + // } + // if (ptr_type.ptr_info.sentinel) |sentinel| { + // const colon_token = tree.prevToken(sentinel.firstToken()); + // try renderToken(ais, tree, colon_token, Space.None); // : + // const sentinel_space = switch (op_tok_id) { + // .LBracket => Space.None, + // else => Space.Space, + // }; + // try renderExpression(allocator, ais, tree, sentinel, sentinel_space); + // } + // switch (op_tok_id) { + // .Asterisk, .AsteriskAsterisk => {}, + // .LBracket => try ais.writer().writeByte(']'), + // else => unreachable, + // } + // if (ptr_type.ptr_info.allowzero_token) |allowzero_token| { + // try renderToken(ais, tree, allowzero_token, Space.Space); // allowzero + // } + // if (ptr_type.ptr_info.align_info) |align_info| { + // const lparen_token = tree.prevToken(align_info.node.firstToken()); + // const align_token = tree.prevToken(lparen_token); + + // try renderToken(ais, tree, align_token, Space.None); // align + // try renderToken(ais, tree, lparen_token, Space.None); // ( + + // try renderExpression(allocator, ais, tree, align_info.node, Space.None); + + // if (align_info.bit_range) |bit_range| { + // const colon1 = tree.prevToken(bit_range.start.firstToken()); + // const colon2 = tree.prevToken(bit_range.end.firstToken()); + + // try renderToken(ais, tree, colon1, Space.None); // : + // try renderExpression(allocator, ais, tree, bit_range.start, Space.None); + // try renderToken(ais, tree, colon2, Space.None); // : + // try renderExpression(allocator, ais, tree, bit_range.end, Space.None); + + // const rparen_token = tree.nextToken(bit_range.end.lastToken()); + // try renderToken(ais, tree, rparen_token, Space.Space); // ) + // } else { + // const rparen_token = tree.nextToken(align_info.node.lastToken()); + // try renderToken(ais, tree, rparen_token, Space.Space); // ) + // } + // } + // if (ptr_type.ptr_info.const_token) |const_token| { + // try renderToken(ais, tree, const_token, Space.Space); // const + // } + // if (ptr_type.ptr_info.volatile_token) |volatile_token| { + // try renderToken(ais, tree, volatile_token, Space.Space); // volatile + // } + // return renderExpression(allocator, ais, tree, ptr_type.rhs, space); + //}, + + //.SliceType => { + // const slice_type = @fieldParentPtr(ast.Node.SliceType, "base", base); + // try renderToken(ais, tree, slice_type.op_token, Space.None); // [ + // if (slice_type.ptr_info.sentinel) |sentinel| { + // const colon_token = tree.prevToken(sentinel.firstToken()); + // try renderToken(ais, tree, colon_token, Space.None); // : + // try renderExpression(allocator, ais, tree, sentinel, Space.None); + // try renderToken(ais, tree, tree.nextToken(sentinel.lastToken()), Space.None); // ] + // } else { + // try renderToken(ais, tree, tree.nextToken(slice_type.op_token), Space.None); // ] + // } + + // if (slice_type.ptr_info.allowzero_token) |allowzero_token| { + // try renderToken(ais, tree, allowzero_token, Space.Space); // allowzero + // } + // if (slice_type.ptr_info.align_info) |align_info| { + // const lparen_token = tree.prevToken(align_info.node.firstToken()); + // const align_token = tree.prevToken(lparen_token); + + // try renderToken(ais, tree, align_token, Space.None); // align + // try renderToken(ais, tree, lparen_token, Space.None); // ( + + // try renderExpression(allocator, ais, tree, align_info.node, Space.None); + + // if (align_info.bit_range) |bit_range| { + // const colon1 = tree.prevToken(bit_range.start.firstToken()); + // const colon2 = tree.prevToken(bit_range.end.firstToken()); + + // try renderToken(ais, tree, colon1, Space.None); // : + // try renderExpression(allocator, ais, tree, bit_range.start, Space.None); + // try renderToken(ais, tree, colon2, Space.None); // : + // try renderExpression(allocator, ais, tree, bit_range.end, Space.None); + + // const rparen_token = tree.nextToken(bit_range.end.lastToken()); + // try renderToken(ais, tree, rparen_token, Space.Space); // ) + // } else { + // const rparen_token = tree.nextToken(align_info.node.lastToken()); + // try renderToken(ais, tree, rparen_token, Space.Space); // ) + // } + // } + // if (slice_type.ptr_info.const_token) |const_token| { + // try renderToken(ais, tree, const_token, Space.Space); + // } + // if (slice_type.ptr_info.volatile_token) |volatile_token| { + // try renderToken(ais, tree, volatile_token, Space.Space); + // } + // return renderExpression(allocator, ais, tree, slice_type.rhs, space); + //}, + + //.ArrayInitializer, .ArrayInitializerDot => { + // var rtoken: ast.TokenIndex = undefined; + // var exprs: []ast.Node.Index = undefined; + // const lhs: union(enum) { dot: ast.TokenIndex, node: ast.Node.Index } = switch (base.tag) { + // .ArrayInitializerDot => blk: { + // const casted = @fieldParentPtr(ast.Node.ArrayInitializerDot, "base", base); + // rtoken = casted.rtoken; + // exprs = casted.list(); + // break :blk .{ .dot = casted.dot }; + // }, + // .ArrayInitializer => blk: { + // const casted = @fieldParentPtr(ast.Node.ArrayInitializer, "base", base); + // rtoken = casted.rtoken; + // exprs = casted.list(); + // break :blk .{ .node = casted.lhs }; + // }, + // else => unreachable, + // }; + + // const lbrace = switch (lhs) { + // .dot => |dot| tree.nextToken(dot), + // .node => |node| tree.nextToken(node.lastToken()), + // }; + + // switch (lhs) { + // .dot => |dot| try renderToken(ais, tree, dot, Space.None), + // .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), + // } + + // if (exprs.len == 0) { + // try renderToken(ais, tree, lbrace, Space.None); + // return renderToken(ais, tree, rtoken, space); + // } + + // if (exprs.len == 1 and exprs[0].tag != .MultilineStringLiteral and tree.token_tags[exprs[0].*.lastToken() + 1] == .RBrace) { + // const expr = exprs[0]; + + // try renderToken(ais, tree, lbrace, Space.None); + // try renderExpression(allocator, ais, tree, expr, Space.None); + // return renderToken(ais, tree, rtoken, space); + // } + + // // scan to find row size + // if (rowSize(tree, exprs, rtoken) != null) { + // { + // ais.pushIndentNextLine(); + // defer ais.popIndent(); + // try renderToken(ais, tree, lbrace, Space.Newline); + + // var expr_index: usize = 0; + // while (rowSize(tree, exprs[expr_index..], rtoken)) |row_size| { + // const row_exprs = exprs[expr_index..]; + // // A place to store the width of each expression and its column's maximum + // var widths = try allocator.alloc(usize, row_exprs.len + row_size); + // defer allocator.free(widths); + // mem.set(usize, widths, 0); + + // var expr_newlines = try allocator.alloc(bool, row_exprs.len); + // defer allocator.free(expr_newlines); + // mem.set(bool, expr_newlines, false); + + // var expr_widths = widths[0 .. widths.len - row_size]; + // var column_widths = widths[widths.len - row_size ..]; + + // // Find next row with trailing comment (if any) to end the current section + // var section_end = sec_end: { + // var this_line_first_expr: usize = 0; + // var this_line_size = rowSize(tree, row_exprs, rtoken); + // for (row_exprs) |expr, i| { + // // Ignore comment on first line of this section + // if (i == 0 or tree.tokensOnSameLine(row_exprs[0].firstToken(), expr.lastToken())) continue; + // // Track start of line containing comment + // if (!tree.tokensOnSameLine(row_exprs[this_line_first_expr].firstToken(), expr.lastToken())) { + // this_line_first_expr = i; + // this_line_size = rowSize(tree, row_exprs[this_line_first_expr..], rtoken); + // } + + // const maybe_comma = expr.lastToken() + 1; + // const maybe_comment = expr.lastToken() + 2; + // if (maybe_comment < tree.token_tags.len) { + // if (tree.token_tags[maybe_comma] == .Comma and + // tree.token_tags[maybe_comment] == .LineComment and + // tree.tokensOnSameLine(expr.lastToken(), maybe_comment)) + // { + // var comment_token_loc = tree.token_locs[maybe_comment]; + // const comment_is_empty = mem.trimRight(u8, tree.tokenSliceLoc(comment_token_loc), " ").len == 2; + // if (!comment_is_empty) { + // // Found row ending in comment + // break :sec_end i - this_line_size.? + 1; + // } + // } + // } + // } + // break :sec_end row_exprs.len; + // }; + // expr_index += section_end; + + // const section_exprs = row_exprs[0..section_end]; + + // // Null stream for counting the printed length of each expression + // var line_find_stream = std.io.findByteWriter('\n', std.io.null_writer); + // var counting_stream = std.io.countingWriter(line_find_stream.writer()); + // var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, counting_stream.writer()); + + // // Calculate size of columns in current section + // var column_counter: usize = 0; + // var single_line = true; + // for (section_exprs) |expr, i| { + // if (i + 1 < section_exprs.len) { + // counting_stream.bytes_written = 0; + // line_find_stream.byte_found = false; + // try renderExpression(allocator, &auto_indenting_stream, tree, expr, Space.None); + // const width = @intCast(usize, counting_stream.bytes_written); + // expr_widths[i] = width; + // expr_newlines[i] = line_find_stream.byte_found; + + // if (!line_find_stream.byte_found) { + // const column = column_counter % row_size; + // column_widths[column] = std.math.max(column_widths[column], width); + + // const expr_last_token = expr.*.lastToken() + 1; + // const next_expr = section_exprs[i + 1]; + // const loc = tree.tokenLocation(tree.token_locs[expr_last_token].start, next_expr.*.firstToken()); + + // column_counter += 1; + + // if (loc.line != 0) single_line = false; + // } else { + // single_line = false; + // column_counter = 0; + // } + // } else { + // counting_stream.bytes_written = 0; + // try renderExpression(allocator, &auto_indenting_stream, tree, expr, Space.None); + // const width = @intCast(usize, counting_stream.bytes_written); + // expr_widths[i] = width; + // expr_newlines[i] = line_find_stream.byte_found; + + // if (!line_find_stream.byte_found) { + // const column = column_counter % row_size; + // column_widths[column] = std.math.max(column_widths[column], width); + // } + // break; + // } + // } + + // // Render exprs in current section + // column_counter = 0; + // var last_col_index: usize = row_size - 1; + // for (section_exprs) |expr, i| { + // if (i + 1 < section_exprs.len) { + // const next_expr = section_exprs[i + 1]; + // try renderExpression(allocator, ais, tree, expr, Space.None); + + // const comma = tree.nextToken(expr.*.lastToken()); + + // if (column_counter != last_col_index) { + // if (!expr_newlines[i] and !expr_newlines[i + 1]) { + // // Neither the current or next expression is multiline + // try renderToken(ais, tree, comma, Space.Space); // , + // assert(column_widths[column_counter % row_size] >= expr_widths[i]); + // const padding = column_widths[column_counter % row_size] - expr_widths[i]; + // try ais.writer().writeByteNTimes(' ', padding); + + // column_counter += 1; + // continue; + // } + // } + // if (single_line and row_size != 1) { + // try renderToken(ais, tree, comma, Space.Space); // , + // continue; + // } + + // column_counter = 0; + // try renderToken(ais, tree, comma, Space.Newline); // , + // try renderExtraNewline(tree, ais, next_expr); + // } else { + // const maybe_comma = tree.nextToken(expr.*.lastToken()); + // if (tree.token_tags[maybe_comma] == .Comma) { + // try renderExpression(allocator, ais, tree, expr, Space.None); // , + // try renderToken(ais, tree, maybe_comma, Space.Newline); // , + // } else { + // try renderExpression(allocator, ais, tree, expr, Space.Comma); // , + // } + // } + // } + + // if (expr_index == exprs.len) { + // break; + // } + // } + // } + + // return renderToken(ais, tree, rtoken, space); + // } + + // // Single line + // try renderToken(ais, tree, lbrace, Space.Space); + // for (exprs) |expr, i| { + // if (i + 1 < exprs.len) { + // const next_expr = exprs[i + 1]; + // try renderExpression(allocator, ais, tree, expr, Space.None); + // const comma = tree.nextToken(expr.*.lastToken()); + // try renderToken(ais, tree, comma, Space.Space); // , + // } else { + // try renderExpression(allocator, ais, tree, expr, Space.Space); + // } + // } + + // return renderToken(ais, tree, rtoken, space); + //}, + + //.StructInitializer, .StructInitializerDot => { + // var rtoken: ast.TokenIndex = undefined; + // var field_inits: []ast.Node.Index = undefined; + // const lhs: union(enum) { dot: ast.TokenIndex, node: ast.Node.Index } = switch (base.tag) { + // .StructInitializerDot => blk: { + // const casted = @fieldParentPtr(ast.Node.StructInitializerDot, "base", base); + // rtoken = casted.rtoken; + // field_inits = casted.list(); + // break :blk .{ .dot = casted.dot }; + // }, + // .StructInitializer => blk: { + // const casted = @fieldParentPtr(ast.Node.StructInitializer, "base", base); + // rtoken = casted.rtoken; + // field_inits = casted.list(); + // break :blk .{ .node = casted.lhs }; + // }, + // else => unreachable, + // }; + + // const lbrace = switch (lhs) { + // .dot => |dot| tree.nextToken(dot), + // .node => |node| tree.nextToken(node.lastToken()), + // }; + + // if (field_inits.len == 0) { + // switch (lhs) { + // .dot => |dot| try renderToken(ais, tree, dot, Space.None), + // .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), + // } + + // { + // ais.pushIndentNextLine(); + // defer ais.popIndent(); + // try renderToken(ais, tree, lbrace, Space.None); + // } + + // return renderToken(ais, tree, rtoken, space); + // } + + // const src_has_trailing_comma = blk: { + // const maybe_comma = tree.prevToken(rtoken); + // break :blk tree.token_tags[maybe_comma] == .Comma; + // }; + + // const src_same_line = blk: { + // const loc = tree.tokenLocation(tree.token_locs[lbrace].end, rtoken); + // break :blk loc.line == 0; + // }; + + // const expr_outputs_one_line = blk: { + // // render field expressions until a LF is found + // for (field_inits) |field_init| { + // var find_stream = std.io.findByteWriter('\n', std.io.null_writer); + // var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, find_stream.writer()); + + // try renderExpression(allocator, &auto_indenting_stream, tree, field_init, Space.None); + // if (find_stream.byte_found) break :blk false; + // } + // break :blk true; + // }; + + // if (field_inits.len == 1) blk: { + // if (field_inits[0].cast(ast.Node.FieldInitializer)) |field_init| { + // switch (field_init.expr.tag) { + // .StructInitializer, + // .StructInitializerDot, + // => break :blk, + // else => {}, + // } + // } + + // // if the expression outputs to multiline, make this struct multiline + // if (!expr_outputs_one_line or src_has_trailing_comma) { + // break :blk; + // } + + // switch (lhs) { + // .dot => |dot| try renderToken(ais, tree, dot, Space.None), + // .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), + // } + // try renderToken(ais, tree, lbrace, Space.Space); + // try renderExpression(allocator, ais, tree, field_inits[0], Space.Space); + // return renderToken(ais, tree, rtoken, space); + // } + + // if (!src_has_trailing_comma and src_same_line and expr_outputs_one_line) { + // // render all on one line, no trailing comma + // switch (lhs) { + // .dot => |dot| try renderToken(ais, tree, dot, Space.None), + // .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), + // } + // try renderToken(ais, tree, lbrace, Space.Space); + + // for (field_inits) |field_init, i| { + // if (i + 1 < field_inits.len) { + // try renderExpression(allocator, ais, tree, field_init, Space.None); + + // const comma = tree.nextToken(field_init.lastToken()); + // try renderToken(ais, tree, comma, Space.Space); + // } else { + // try renderExpression(allocator, ais, tree, field_init, Space.Space); + // } + // } + + // return renderToken(ais, tree, rtoken, space); + // } + + // { + // switch (lhs) { + // .dot => |dot| try renderToken(ais, tree, dot, Space.None), + // .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), + // } + + // ais.pushIndentNextLine(); + // defer ais.popIndent(); + + // try renderToken(ais, tree, lbrace, Space.Newline); + + // for (field_inits) |field_init, i| { + // if (i + 1 < field_inits.len) { + // const next_field_init = field_inits[i + 1]; + // try renderExpression(allocator, ais, tree, field_init, Space.None); + + // const comma = tree.nextToken(field_init.lastToken()); + // try renderToken(ais, tree, comma, Space.Newline); + + // try renderExtraNewline(tree, ais, next_field_init); + // } else { + // try renderExpression(allocator, ais, tree, field_init, Space.Comma); + // } + // } + // } + + // return renderToken(ais, tree, rtoken, space); + //}, + + //.Call => { + // const call = @fieldParentPtr(ast.Node.Call, "base", base); + // if (call.async_token) |async_token| { + // try renderToken(ais, tree, async_token, Space.Space); + // } + + // try renderExpression(allocator, ais, tree, call.lhs, Space.None); + + // const lparen = tree.nextToken(call.lhs.lastToken()); + + // if (call.params_len == 0) { + // try renderToken(ais, tree, lparen, Space.None); + // return renderToken(ais, tree, call.rtoken, space); + // } + + // const src_has_trailing_comma = blk: { + // const maybe_comma = tree.prevToken(call.rtoken); + // break :blk tree.token_tags[maybe_comma] == .Comma; + // }; + + // if (src_has_trailing_comma) { + // { + // ais.pushIndent(); + // defer ais.popIndent(); + + // try renderToken(ais, tree, lparen, Space.Newline); // ( + // const params = call.params(); + // for (params) |param_node, i| { + // if (i + 1 < params.len) { + // const next_node = params[i + 1]; + // try renderExpression(allocator, ais, tree, param_node, Space.None); + + // // Unindent the comma for multiline string literals + // const maybe_multiline_string = param_node.firstToken(); + // const is_multiline_string = tree.token_tags[maybe_multiline_string] == .MultilineStringLiteralLine; + // if (is_multiline_string) ais.popIndent(); + // defer if (is_multiline_string) ais.pushIndent(); + + // const comma = tree.nextToken(param_node.lastToken()); + // try renderToken(ais, tree, comma, Space.Newline); // , + // try renderExtraNewline(tree, ais, next_node); + // } else { + // try renderExpression(allocator, ais, tree, param_node, Space.Comma); + // } + // } + // } + // return renderToken(ais, tree, call.rtoken, space); + // } + + // try renderToken(ais, tree, lparen, Space.None); // ( + + // const params = call.params(); + // for (params) |param_node, i| { + // const maybe_comment = param_node.firstToken() - 1; + // const maybe_multiline_string = param_node.firstToken(); + // if (tree.token_tags[maybe_multiline_string] == .MultilineStringLiteralLine or tree.token_tags[maybe_comment] == .LineComment) { + // ais.pushIndentOneShot(); + // } + + // try renderExpression(allocator, ais, tree, param_node, Space.None); + + // if (i + 1 < params.len) { + // const comma = tree.nextToken(param_node.lastToken()); + // try renderToken(ais, tree, comma, Space.Space); + // } + // } + // return renderToken(ais, tree, call.rtoken, space); // ) + //}, + + //.ArrayAccess => { + // const suffix_op = base.castTag(.ArrayAccess).?; + + // const lbracket = tree.nextToken(suffix_op.lhs.lastToken()); + // const rbracket = tree.nextToken(suffix_op.index_expr.lastToken()); + + // try renderExpression(allocator, ais, tree, suffix_op.lhs, Space.None); + // try renderToken(ais, tree, lbracket, Space.None); // [ + + // const starts_with_comment = tree.token_tags[lbracket + 1] == .LineComment; + // const ends_with_comment = tree.token_tags[rbracket - 1] == .LineComment; + // { + // const new_space = if (ends_with_comment) Space.Newline else Space.None; + + // ais.pushIndent(); + // defer ais.popIndent(); + // try renderExpression(allocator, ais, tree, suffix_op.index_expr, new_space); + // } + // if (starts_with_comment) try ais.maybeInsertNewline(); + // return renderToken(ais, tree, rbracket, space); // ] + //}, + + //.Slice => { + // const suffix_op = base.castTag(.Slice).?; + // try renderExpression(allocator, ais, tree, suffix_op.lhs, Space.None); + + // const lbracket = tree.prevToken(suffix_op.start.firstToken()); + // const dotdot = tree.nextToken(suffix_op.start.lastToken()); + + // const after_start_space_bool = nodeCausesSliceOpSpace(suffix_op.start) or + // (if (suffix_op.end) |end| nodeCausesSliceOpSpace(end) else false); + // const after_start_space = if (after_start_space_bool) Space.Space else Space.None; + // const after_op_space = if (suffix_op.end != null) after_start_space else Space.None; + + // try renderToken(ais, tree, lbracket, Space.None); // [ + // try renderExpression(allocator, ais, tree, suffix_op.start, after_start_space); + // try renderToken(ais, tree, dotdot, after_op_space); // .. + // if (suffix_op.end) |end| { + // const after_end_space = if (suffix_op.sentinel != null) Space.Space else Space.None; + // try renderExpression(allocator, ais, tree, end, after_end_space); + // } + // if (suffix_op.sentinel) |sentinel| { + // const colon = tree.prevToken(sentinel.firstToken()); + // try renderToken(ais, tree, colon, Space.None); // : + // try renderExpression(allocator, ais, tree, sentinel, Space.None); + // } + // return renderToken(ais, tree, suffix_op.rtoken, space); // ] + //}, + + //.Deref => { + // const suffix_op = base.castTag(.Deref).?; + + // try renderExpression(allocator, ais, tree, suffix_op.lhs, Space.None); + // return renderToken(ais, tree, suffix_op.rtoken, space); // .* + //}, + //.UnwrapOptional => { + // const suffix_op = base.castTag(.UnwrapOptional).?; + + // try renderExpression(allocator, ais, tree, suffix_op.lhs, Space.None); + // try renderToken(ais, tree, tree.prevToken(suffix_op.rtoken), Space.None); // . + // return renderToken(ais, tree, suffix_op.rtoken, space); // ? + //}, + + //.Break => { + // const flow_expr = base.castTag(.Break).?; + // const maybe_rhs = flow_expr.getRHS(); + // const maybe_label = flow_expr.getLabel(); + + // if (maybe_label == null and maybe_rhs == null) { + // return renderToken(ais, tree, flow_expr.ltoken, space); // break + // } + + // try renderToken(ais, tree, flow_expr.ltoken, Space.Space); // break + // if (maybe_label) |label| { + // const colon = tree.nextToken(flow_expr.ltoken); + // try renderToken(ais, tree, colon, Space.None); // : + + // if (maybe_rhs == null) { + // return renderToken(ais, tree, label, space); // label + // } + // try renderToken(ais, tree, label, Space.Space); // label + // } + // return renderExpression(allocator, ais, tree, maybe_rhs.?, space); + //}, + + //.Continue => { + // const flow_expr = base.castTag(.Continue).?; + // if (flow_expr.getLabel()) |label| { + // try renderToken(ais, tree, flow_expr.ltoken, Space.Space); // continue + // const colon = tree.nextToken(flow_expr.ltoken); + // try renderToken(ais, tree, colon, Space.None); // : + // return renderToken(ais, tree, label, space); // label + // } else { + // return renderToken(ais, tree, flow_expr.ltoken, space); // continue + // } + //}, + + //.Return => { + // const flow_expr = base.castTag(.Return).?; + // if (flow_expr.getRHS()) |rhs| { + // try renderToken(ais, tree, flow_expr.ltoken, Space.Space); + // return renderExpression(allocator, ais, tree, rhs, space); + // } else { + // return renderToken(ais, tree, flow_expr.ltoken, space); + // } + //}, + + //.Payload => { + // const payload = @fieldParentPtr(ast.Node.Payload, "base", base); + + // try renderToken(ais, tree, payload.lpipe, Space.None); + // try renderExpression(allocator, ais, tree, payload.error_symbol, Space.None); + // return renderToken(ais, tree, payload.rpipe, space); + //}, + + //.PointerPayload => { + // const payload = @fieldParentPtr(ast.Node.PointerPayload, "base", base); + + // try renderToken(ais, tree, payload.lpipe, Space.None); + // if (payload.ptr_token) |ptr_token| { + // try renderToken(ais, tree, ptr_token, Space.None); + // } + // try renderExpression(allocator, ais, tree, payload.value_symbol, Space.None); + // return renderToken(ais, tree, payload.rpipe, space); + //}, + + //.PointerIndexPayload => { + // const payload = @fieldParentPtr(ast.Node.PointerIndexPayload, "base", base); + + // try renderToken(ais, tree, payload.lpipe, Space.None); + // if (payload.ptr_token) |ptr_token| { + // try renderToken(ais, tree, ptr_token, Space.None); + // } + // try renderExpression(allocator, ais, tree, payload.value_symbol, Space.None); + + // if (payload.index_symbol) |index_symbol| { + // const comma = tree.nextToken(payload.value_symbol.lastToken()); + + // try renderToken(ais, tree, comma, Space.Space); + // try renderExpression(allocator, ais, tree, index_symbol, Space.None); + // } + + // return renderToken(ais, tree, payload.rpipe, space); + //}, + + //.GroupedExpression => { + // const grouped_expr = @fieldParentPtr(ast.Node.GroupedExpression, "base", base); + + // try renderToken(ais, tree, grouped_expr.lparen, Space.None); + // { + // ais.pushIndentOneShot(); + // try renderExpression(allocator, ais, tree, grouped_expr.expr, Space.None); + // } + // return renderToken(ais, tree, grouped_expr.rparen, space); + //}, + + //.FieldInitializer => { + // const field_init = @fieldParentPtr(ast.Node.FieldInitializer, "base", base); + + // try renderToken(ais, tree, field_init.period_token, Space.None); // . + // try renderToken(ais, tree, field_init.name_token, Space.Space); // name + // try renderToken(ais, tree, tree.nextToken(field_init.name_token), Space.Space); // = + // return renderExpression(allocator, ais, tree, field_init.expr, space); + //}, + + //.ContainerDecl => { + // const container_decl = @fieldParentPtr(ast.Node.ContainerDecl, "base", base); + + // if (container_decl.layout_token) |layout_token| { + // try renderToken(ais, tree, layout_token, Space.Space); + // } + + // switch (container_decl.init_arg_expr) { + // .None => { + // try renderToken(ais, tree, container_decl.kind_token, Space.Space); // union + // }, + // .Enum => |enum_tag_type| { + // try renderToken(ais, tree, container_decl.kind_token, Space.None); // union + + // const lparen = tree.nextToken(container_decl.kind_token); + // const enum_token = tree.nextToken(lparen); + + // try renderToken(ais, tree, lparen, Space.None); // ( + // try renderToken(ais, tree, enum_token, Space.None); // enum + + // if (enum_tag_type) |expr| { + // try renderToken(ais, tree, tree.nextToken(enum_token), Space.None); // ( + // try renderExpression(allocator, ais, tree, expr, Space.None); + + // const rparen = tree.nextToken(expr.lastToken()); + // try renderToken(ais, tree, rparen, Space.None); // ) + // try renderToken(ais, tree, tree.nextToken(rparen), Space.Space); // ) + // } else { + // try renderToken(ais, tree, tree.nextToken(enum_token), Space.Space); // ) + // } + // }, + // .Type => |type_expr| { + // try renderToken(ais, tree, container_decl.kind_token, Space.None); // union + + // const lparen = tree.nextToken(container_decl.kind_token); + // const rparen = tree.nextToken(type_expr.lastToken()); + + // try renderToken(ais, tree, lparen, Space.None); // ( + // try renderExpression(allocator, ais, tree, type_expr, Space.None); + // try renderToken(ais, tree, rparen, Space.Space); // ) + // }, + // } + + // if (container_decl.fields_and_decls_len == 0) { + // { + // ais.pushIndentNextLine(); + // defer ais.popIndent(); + // try renderToken(ais, tree, container_decl.lbrace_token, Space.None); // { + // } + // return renderToken(ais, tree, container_decl.rbrace_token, space); // } + // } + + // const src_has_trailing_comma = blk: { + // var maybe_comma = tree.prevToken(container_decl.lastToken()); + // // Doc comments for a field may also appear after the comma, eg. + // // field_name: T, // comment attached to field_name + // if (tree.token_tags[maybe_comma] == .DocComment) + // maybe_comma = tree.prevToken(maybe_comma); + // break :blk tree.token_tags[maybe_comma] == .Comma; + // }; + + // const fields_and_decls = container_decl.fieldsAndDecls(); + + // // Check if the first declaration and the { are on the same line + // const src_has_newline = !tree.tokensOnSameLine( + // container_decl.lbrace_token, + // fields_and_decls[0].firstToken(), + // ); + + // // We can only print all the elements in-line if all the + // // declarations inside are fields + // const src_has_only_fields = blk: { + // for (fields_and_decls) |decl| { + // if (decl.tag != .ContainerField) break :blk false; + // } + // break :blk true; + // }; + + // if (src_has_trailing_comma or !src_has_only_fields) { + // // One declaration per line + // ais.pushIndentNextLine(); + // defer ais.popIndent(); + // try renderToken(ais, tree, container_decl.lbrace_token, .Newline); // { + + // for (fields_and_decls) |decl, i| { + // try renderContainerDecl(allocator, ais, tree, decl, .Newline); + + // if (i + 1 < fields_and_decls.len) { + // try renderExtraNewline(tree, ais, fields_and_decls[i + 1]); + // } + // } + // } else if (src_has_newline) { + // // All the declarations on the same line, but place the items on + // // their own line + // try renderToken(ais, tree, container_decl.lbrace_token, .Newline); // { + + // ais.pushIndent(); + // defer ais.popIndent(); + + // for (fields_and_decls) |decl, i| { + // const space_after_decl: Space = if (i + 1 >= fields_and_decls.len) .Newline else .Space; + // try renderContainerDecl(allocator, ais, tree, decl, space_after_decl); + // } + // } else { + // // All the declarations on the same line + // try renderToken(ais, tree, container_decl.lbrace_token, .Space); // { + + // for (fields_and_decls) |decl| { + // try renderContainerDecl(allocator, ais, tree, decl, .Space); + // } + // } + + // return renderToken(ais, tree, container_decl.rbrace_token, space); // } + //}, + + //.ErrorSetDecl => { + // const err_set_decl = @fieldParentPtr(ast.Node.ErrorSetDecl, "base", base); + + // const lbrace = tree.nextToken(err_set_decl.error_token); + + // if (err_set_decl.decls_len == 0) { + // try renderToken(ais, tree, err_set_decl.error_token, Space.None); + // try renderToken(ais, tree, lbrace, Space.None); + // return renderToken(ais, tree, err_set_decl.rbrace_token, space); + // } + + // if (err_set_decl.decls_len == 1) blk: { + // const node = err_set_decl.decls()[0]; + + // // if there are any doc comments or same line comments + // // don't try to put it all on one line + // if (node.cast(ast.Node.ErrorTag)) |tag| { + // if (tag.doc_comments != null) break :blk; + // } else { + // break :blk; + // } + + // try renderToken(ais, tree, err_set_decl.error_token, Space.None); // error + // try renderToken(ais, tree, lbrace, Space.None); // { + // try renderExpression(allocator, ais, tree, node, Space.None); + // return renderToken(ais, tree, err_set_decl.rbrace_token, space); // } + // } + + // try renderToken(ais, tree, err_set_decl.error_token, Space.None); // error + + // const src_has_trailing_comma = blk: { + // const maybe_comma = tree.prevToken(err_set_decl.rbrace_token); + // break :blk tree.token_tags[maybe_comma] == .Comma; + // }; + + // if (src_has_trailing_comma) { + // { + // ais.pushIndent(); + // defer ais.popIndent(); + + // try renderToken(ais, tree, lbrace, Space.Newline); // { + // const decls = err_set_decl.decls(); + // for (decls) |node, i| { + // if (i + 1 < decls.len) { + // try renderExpression(allocator, ais, tree, node, Space.None); + // try renderToken(ais, tree, tree.nextToken(node.lastToken()), Space.Newline); // , + + // try renderExtraNewline(tree, ais, decls[i + 1]); + // } else { + // try renderExpression(allocator, ais, tree, node, Space.Comma); + // } + // } + // } + + // return renderToken(ais, tree, err_set_decl.rbrace_token, space); // } + // } else { + // try renderToken(ais, tree, lbrace, Space.Space); // { + + // const decls = err_set_decl.decls(); + // for (decls) |node, i| { + // if (i + 1 < decls.len) { + // try renderExpression(allocator, ais, tree, node, Space.None); + + // const comma_token = tree.nextToken(node.lastToken()); + // assert(tree.token_tags[comma_token] == .Comma); + // try renderToken(ais, tree, comma_token, Space.Space); // , + // try renderExtraNewline(tree, ais, decls[i + 1]); + // } else { + // try renderExpression(allocator, ais, tree, node, Space.Space); + // } + // } + + // return renderToken(ais, tree, err_set_decl.rbrace_token, space); // } + // } + //}, + + //.ErrorTag => { + // const tag = @fieldParentPtr(ast.Node.ErrorTag, "base", base); + + // try renderDocComments(tree, ais, tag, tag.doc_comments); + // return renderToken(ais, tree, tag.name_token, space); // name + //}, + + //.MultilineStringLiteral => { + // const multiline_str_literal = @fieldParentPtr(ast.Node.MultilineStringLiteral, "base", base); + + // { + // const locked_indents = ais.lockOneShotIndent(); + // defer { + // var i: u8 = 0; + // while (i < locked_indents) : (i += 1) ais.popIndent(); + // } + // try ais.maybeInsertNewline(); + + // for (multiline_str_literal.lines()) |t| try renderToken(ais, tree, t, Space.None); + // } + //}, + + //.BuiltinCall => { + // const builtin_call = @fieldParentPtr(ast.Node.BuiltinCall, "base", base); + + // // TODO remove after 0.7.0 release + // if (mem.eql(u8, tree.tokenSlice(builtin_call.builtin_token), "@OpaqueType")) + // return ais.writer().writeAll("opaque {}"); + + // // TODO remove after 0.7.0 release + // { + // const params = builtin_call.paramsConst(); + // if (mem.eql(u8, tree.tokenSlice(builtin_call.builtin_token), "@Type") and + // params.len == 1) + // { + // if (params[0].castTag(.EnumLiteral)) |enum_literal| + // if (mem.eql(u8, tree.tokenSlice(enum_literal.name), "Opaque")) + // return ais.writer().writeAll("opaque {}"); + // } + // } + + // try renderToken(ais, tree, builtin_call.builtin_token, Space.None); // @name + + // const src_params_trailing_comma = blk: { + // if (builtin_call.params_len == 0) break :blk false; + // const last_node = builtin_call.params()[builtin_call.params_len - 1]; + // const maybe_comma = tree.nextToken(last_node.lastToken()); + // break :blk tree.token_tags[maybe_comma] == .Comma; + // }; + + // const lparen = tree.nextToken(builtin_call.builtin_token); + + // if (!src_params_trailing_comma) { + // try renderToken(ais, tree, lparen, Space.None); // ( + + // // render all on one line, no trailing comma + // const params = builtin_call.params(); + // for (params) |param_node, i| { + // const maybe_comment = param_node.firstToken() - 1; + // if (param_node.*.tag == .MultilineStringLiteral or tree.token_tags[maybe_comment] == .LineComment) { + // ais.pushIndentOneShot(); + // } + // try renderExpression(allocator, ais, tree, param_node, Space.None); + + // if (i + 1 < params.len) { + // const comma_token = tree.nextToken(param_node.lastToken()); + // try renderToken(ais, tree, comma_token, Space.Space); // , + // } + // } + // } else { + // // one param per line + // ais.pushIndent(); + // defer ais.popIndent(); + // try renderToken(ais, tree, lparen, Space.Newline); // ( + + // for (builtin_call.params()) |param_node| { + // try renderExpression(allocator, ais, tree, param_node, Space.Comma); + // } + // } + + // return renderToken(ais, tree, builtin_call.rparen_token, space); // ) + //}, + + //.FnProto => { + // const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", base); + + // if (fn_proto.getVisibToken()) |visib_token_index| { + // const visib_token = tree.token_tags[visib_token_index]; + // assert(visib_token == .Keyword_pub or visib_token == .Keyword_export); + + // try renderToken(ais, tree, visib_token_index, Space.Space); // pub + // } + + // if (fn_proto.getExternExportInlineToken()) |extern_export_inline_token| { + // if (fn_proto.getIsExternPrototype() == null) + // try renderToken(ais, tree, extern_export_inline_token, Space.Space); // extern/export/inline + // } + + // if (fn_proto.getLibName()) |lib_name| { + // try renderExpression(allocator, ais, tree, lib_name, Space.Space); + // } + + // const lparen = if (fn_proto.getNameToken()) |name_token| blk: { + // try renderToken(ais, tree, fn_proto.fn_token, Space.Space); // fn + // try renderToken(ais, tree, name_token, Space.None); // name + // break :blk tree.nextToken(name_token); + // } else blk: { + // try renderToken(ais, tree, fn_proto.fn_token, Space.Space); // fn + // break :blk tree.nextToken(fn_proto.fn_token); + // }; + // assert(tree.token_tags[lparen] == .LParen); + + // const rparen = tree.prevToken( + // // the first token for the annotation expressions is the left + // // parenthesis, hence the need for two prevToken + // if (fn_proto.getAlignExpr()) |align_expr| + // tree.prevToken(tree.prevToken(align_expr.firstToken())) + // else if (fn_proto.getSectionExpr()) |section_expr| + // tree.prevToken(tree.prevToken(section_expr.firstToken())) + // else if (fn_proto.getCallconvExpr()) |callconv_expr| + // tree.prevToken(tree.prevToken(callconv_expr.firstToken())) + // else switch (fn_proto.return_type) { + // .Explicit => |node| node.firstToken(), + // .InferErrorSet => |node| tree.prevToken(node.firstToken()), + // .Invalid => unreachable, + // }, + // ); + // assert(tree.token_tags[rparen] == .RParen); + + // const src_params_trailing_comma = blk: { + // const maybe_comma = tree.token_tags[rparen - 1]; + // break :blk maybe_comma == .Comma or maybe_comma == .LineComment; + // }; + + // if (!src_params_trailing_comma) { + // try renderToken(ais, tree, lparen, Space.None); // ( + + // // render all on one line, no trailing comma + // for (fn_proto.params()) |param_decl, i| { + // try renderParamDecl(allocator, ais, tree, param_decl, Space.None); + + // if (i + 1 < fn_proto.params_len or fn_proto.getVarArgsToken() != null) { + // const comma = tree.nextToken(param_decl.lastToken()); + // try renderToken(ais, tree, comma, Space.Space); // , + // } + // } + // if (fn_proto.getVarArgsToken()) |var_args_token| { + // try renderToken(ais, tree, var_args_token, Space.None); + // } + // } else { + // // one param per line + // ais.pushIndent(); + // defer ais.popIndent(); + // try renderToken(ais, tree, lparen, Space.Newline); // ( + + // for (fn_proto.params()) |param_decl| { + // try renderParamDecl(allocator, ais, tree, param_decl, Space.Comma); + // } + // if (fn_proto.getVarArgsToken()) |var_args_token| { + // try renderToken(ais, tree, var_args_token, Space.Comma); + // } + // } + + // try renderToken(ais, tree, rparen, Space.Space); // ) + + // if (fn_proto.getAlignExpr()) |align_expr| { + // const align_rparen = tree.nextToken(align_expr.lastToken()); + // const align_lparen = tree.prevToken(align_expr.firstToken()); + // const align_kw = tree.prevToken(align_lparen); + + // try renderToken(ais, tree, align_kw, Space.None); // align + // try renderToken(ais, tree, align_lparen, Space.None); // ( + // try renderExpression(allocator, ais, tree, align_expr, Space.None); + // try renderToken(ais, tree, align_rparen, Space.Space); // ) + // } + + // if (fn_proto.getSectionExpr()) |section_expr| { + // const section_rparen = tree.nextToken(section_expr.lastToken()); + // const section_lparen = tree.prevToken(section_expr.firstToken()); + // const section_kw = tree.prevToken(section_lparen); + + // try renderToken(ais, tree, section_kw, Space.None); // section + // try renderToken(ais, tree, section_lparen, Space.None); // ( + // try renderExpression(allocator, ais, tree, section_expr, Space.None); + // try renderToken(ais, tree, section_rparen, Space.Space); // ) + // } + + // if (fn_proto.getCallconvExpr()) |callconv_expr| { + // const callconv_rparen = tree.nextToken(callconv_expr.lastToken()); + // const callconv_lparen = tree.prevToken(callconv_expr.firstToken()); + // const callconv_kw = tree.prevToken(callconv_lparen); + + // try renderToken(ais, tree, callconv_kw, Space.None); // callconv + // try renderToken(ais, tree, callconv_lparen, Space.None); // ( + // try renderExpression(allocator, ais, tree, callconv_expr, Space.None); + // try renderToken(ais, tree, callconv_rparen, Space.Space); // ) + // } else if (fn_proto.getIsExternPrototype() != null) { + // try ais.writer().writeAll("callconv(.C) "); + // } else if (fn_proto.getIsAsync() != null) { + // try ais.writer().writeAll("callconv(.Async) "); + // } + + // switch (fn_proto.return_type) { + // .Explicit => |node| { + // return renderExpression(allocator, ais, tree, node, space); + // }, + // .InferErrorSet => |node| { + // try renderToken(ais, tree, tree.prevToken(node.firstToken()), Space.None); // ! + // return renderExpression(allocator, ais, tree, node, space); + // }, + // .Invalid => unreachable, + // } + //}, + + //.AnyFrameType => { + // const anyframe_type = @fieldParentPtr(ast.Node.AnyFrameType, "base", base); + + // if (anyframe_type.result) |result| { + // try renderToken(ais, tree, anyframe_type.anyframe_token, Space.None); // anyframe + // try renderToken(ais, tree, result.arrow_token, Space.None); // -> + // return renderExpression(allocator, ais, tree, result.return_type, space); + // } else { + // return renderToken(ais, tree, anyframe_type.anyframe_token, space); // anyframe + // } + //}, + + //.DocComment => unreachable, // doc comments are attached to nodes + + //.Switch => { + // const switch_node = @fieldParentPtr(ast.Node.Switch, "base", base); + + // try renderToken(ais, tree, switch_node.switch_token, Space.Space); // switch + // try renderToken(ais, tree, tree.nextToken(switch_node.switch_token), Space.None); // ( + + // const rparen = tree.nextToken(switch_node.expr.lastToken()); + // const lbrace = tree.nextToken(rparen); + + // if (switch_node.cases_len == 0) { + // try renderExpression(allocator, ais, tree, switch_node.expr, Space.None); + // try renderToken(ais, tree, rparen, Space.Space); // ) + // try renderToken(ais, tree, lbrace, Space.None); // { + // return renderToken(ais, tree, switch_node.rbrace, space); // } + // } + + // try renderExpression(allocator, ais, tree, switch_node.expr, Space.None); + // try renderToken(ais, tree, rparen, Space.Space); // ) + + // { + // ais.pushIndentNextLine(); + // defer ais.popIndent(); + // try renderToken(ais, tree, lbrace, Space.Newline); // { + + // const cases = switch_node.cases(); + // for (cases) |node, i| { + // try renderExpression(allocator, ais, tree, node, Space.Comma); + + // if (i + 1 < cases.len) { + // try renderExtraNewline(tree, ais, cases[i + 1]); + // } + // } + // } + + // return renderToken(ais, tree, switch_node.rbrace, space); // } + //}, + + //.SwitchCase => { + // const switch_case = @fieldParentPtr(ast.Node.SwitchCase, "base", base); + + // assert(switch_case.items_len != 0); + // const src_has_trailing_comma = blk: { + // const last_node = switch_case.items()[switch_case.items_len - 1]; + // const maybe_comma = tree.nextToken(last_node.lastToken()); + // break :blk tree.token_tags[maybe_comma] == .Comma; + // }; + + // if (switch_case.items_len == 1 or !src_has_trailing_comma) { + // const items = switch_case.items(); + // for (items) |node, i| { + // if (i + 1 < items.len) { + // try renderExpression(allocator, ais, tree, node, Space.None); + + // const comma_token = tree.nextToken(node.lastToken()); + // try renderToken(ais, tree, comma_token, Space.Space); // , + // try renderExtraNewline(tree, ais, items[i + 1]); + // } else { + // try renderExpression(allocator, ais, tree, node, Space.Space); + // } + // } + // } else { + // const items = switch_case.items(); + // for (items) |node, i| { + // if (i + 1 < items.len) { + // try renderExpression(allocator, ais, tree, node, Space.None); + + // const comma_token = tree.nextToken(node.lastToken()); + // try renderToken(ais, tree, comma_token, Space.Newline); // , + // try renderExtraNewline(tree, ais, items[i + 1]); + // } else { + // try renderExpression(allocator, ais, tree, node, Space.Comma); + // } + // } + // } + + // try renderToken(ais, tree, switch_case.arrow_token, Space.Space); // => + + // if (switch_case.payload) |payload| { + // try renderExpression(allocator, ais, tree, payload, Space.Space); + // } + + // return renderExpression(allocator, ais, tree, switch_case.expr, space); + //}, + //.SwitchElse => { + // const switch_else = @fieldParentPtr(ast.Node.SwitchElse, "base", base); + // return renderToken(ais, tree, switch_else.token, space); + //}, + //.Else => { + // const else_node = @fieldParentPtr(ast.Node.Else, "base", base); + + // const body_is_block = nodeIsBlock(else_node.body); + // const same_line = body_is_block or tree.tokensOnSameLine(else_node.else_token, else_node.body.lastToken()); + + // const after_else_space = if (same_line or else_node.payload != null) Space.Space else Space.Newline; + // try renderToken(ais, tree, else_node.else_token, after_else_space); + + // if (else_node.payload) |payload| { + // const payload_space = if (same_line) Space.Space else Space.Newline; + // try renderExpression(allocator, ais, tree, payload, payload_space); + // } + + // if (same_line) { + // return renderExpression(allocator, ais, tree, else_node.body, space); + // } else { + // ais.pushIndent(); + // defer ais.popIndent(); + // return renderExpression(allocator, ais, tree, else_node.body, space); + // } + //}, + + //.While => { + // const while_node = @fieldParentPtr(ast.Node.While, "base", base); + + // if (while_node.label) |label| { + // try renderToken(ais, tree, label, Space.None); // label + // try renderToken(ais, tree, tree.nextToken(label), Space.Space); // : + // } + + // if (while_node.inline_token) |inline_token| { + // try renderToken(ais, tree, inline_token, Space.Space); // inline + // } + + // try renderToken(ais, tree, while_node.while_token, Space.Space); // while + // try renderToken(ais, tree, tree.nextToken(while_node.while_token), Space.None); // ( + // try renderExpression(allocator, ais, tree, while_node.condition, Space.None); + + // const cond_rparen = tree.nextToken(while_node.condition.lastToken()); + + // const body_is_block = nodeIsBlock(while_node.body); + + // var block_start_space: Space = undefined; + // var after_body_space: Space = undefined; + + // if (body_is_block) { + // block_start_space = Space.BlockStart; + // after_body_space = if (while_node.@"else" == null) space else Space.SpaceOrOutdent; + // } else if (tree.tokensOnSameLine(cond_rparen, while_node.body.lastToken())) { + // block_start_space = Space.Space; + // after_body_space = if (while_node.@"else" == null) space else Space.Space; + // } else { + // block_start_space = Space.Newline; + // after_body_space = if (while_node.@"else" == null) space else Space.Newline; + // } + + // { + // const rparen_space = if (while_node.payload != null or while_node.continue_expr != null) Space.Space else block_start_space; + // try renderToken(ais, tree, cond_rparen, rparen_space); // ) + // } + + // if (while_node.payload) |payload| { + // const payload_space = if (while_node.continue_expr != null) Space.Space else block_start_space; + // try renderExpression(allocator, ais, tree, payload, payload_space); + // } + + // if (while_node.continue_expr) |continue_expr| { + // const rparen = tree.nextToken(continue_expr.lastToken()); + // const lparen = tree.prevToken(continue_expr.firstToken()); + // const colon = tree.prevToken(lparen); + + // try renderToken(ais, tree, colon, Space.Space); // : + // try renderToken(ais, tree, lparen, Space.None); // ( + + // try renderExpression(allocator, ais, tree, continue_expr, Space.None); + + // try renderToken(ais, tree, rparen, block_start_space); // ) + // } + + // { + // if (!body_is_block) ais.pushIndent(); + // defer if (!body_is_block) ais.popIndent(); + // try renderExpression(allocator, ais, tree, while_node.body, after_body_space); + // } + + // if (while_node.@"else") |@"else"| { + // return renderExpression(allocator, ais, tree, &@"else".base, space); + // } + //}, + + //.For => { + // const for_node = @fieldParentPtr(ast.Node.For, "base", base); + + // if (for_node.label) |label| { + // try renderToken(ais, tree, label, Space.None); // label + // try renderToken(ais, tree, tree.nextToken(label), Space.Space); // : + // } + + // if (for_node.inline_token) |inline_token| { + // try renderToken(ais, tree, inline_token, Space.Space); // inline + // } + + // try renderToken(ais, tree, for_node.for_token, Space.Space); // for + // try renderToken(ais, tree, tree.nextToken(for_node.for_token), Space.None); // ( + // try renderExpression(allocator, ais, tree, for_node.array_expr, Space.None); + + // const rparen = tree.nextToken(for_node.array_expr.lastToken()); + + // const body_is_block = for_node.body.tag.isBlock(); + // const src_one_line_to_body = !body_is_block and tree.tokensOnSameLine(rparen, for_node.body.firstToken()); + // const body_on_same_line = body_is_block or src_one_line_to_body; + + // try renderToken(ais, tree, rparen, Space.Space); // ) + + // const space_after_payload = if (body_on_same_line) Space.Space else Space.Newline; + // try renderExpression(allocator, ais, tree, for_node.payload, space_after_payload); // |x| + + // const space_after_body = blk: { + // if (for_node.@"else") |@"else"| { + // const src_one_line_to_else = tree.tokensOnSameLine(rparen, @"else".firstToken()); + // if (body_is_block or src_one_line_to_else) { + // break :blk Space.Space; + // } else { + // break :blk Space.Newline; + // } + // } else { + // break :blk space; + // } + // }; + + // { + // if (!body_on_same_line) ais.pushIndent(); + // defer if (!body_on_same_line) ais.popIndent(); + // try renderExpression(allocator, ais, tree, for_node.body, space_after_body); // { body } + // } + + // if (for_node.@"else") |@"else"| { + // return renderExpression(allocator, ais, tree, &@"else".base, space); // else + // } + //}, + + //.If => { + // const if_node = @fieldParentPtr(ast.Node.If, "base", base); + + // const lparen = tree.nextToken(if_node.if_token); + // const rparen = tree.nextToken(if_node.condition.lastToken()); + + // try renderToken(ais, tree, if_node.if_token, Space.Space); // if + // try renderToken(ais, tree, lparen, Space.None); // ( + + // try renderExpression(allocator, ais, tree, if_node.condition, Space.None); // condition + + // const body_is_if_block = if_node.body.tag == .If; + // const body_is_block = nodeIsBlock(if_node.body); + + // if (body_is_if_block) { + // try renderExtraNewline(tree, ais, if_node.body); + // } else if (body_is_block) { + // const after_rparen_space = if (if_node.payload == null) Space.BlockStart else Space.Space; + // try renderToken(ais, tree, rparen, after_rparen_space); // ) + + // if (if_node.payload) |payload| { + // try renderExpression(allocator, ais, tree, payload, Space.BlockStart); // |x| + // } + + // if (if_node.@"else") |@"else"| { + // try renderExpression(allocator, ais, tree, if_node.body, Space.SpaceOrOutdent); + // return renderExpression(allocator, ais, tree, &@"else".base, space); + // } else { + // return renderExpression(allocator, ais, tree, if_node.body, space); + // } + // } + + // const src_has_newline = !tree.tokensOnSameLine(rparen, if_node.body.lastToken()); + + // if (src_has_newline) { + // const after_rparen_space = if (if_node.payload == null) Space.Newline else Space.Space; + + // { + // ais.pushIndent(); + // defer ais.popIndent(); + // try renderToken(ais, tree, rparen, after_rparen_space); // ) + // } + + // if (if_node.payload) |payload| { + // try renderExpression(allocator, ais, tree, payload, Space.Newline); + // } + + // if (if_node.@"else") |@"else"| { + // const else_is_block = nodeIsBlock(@"else".body); + + // { + // ais.pushIndent(); + // defer ais.popIndent(); + // try renderExpression(allocator, ais, tree, if_node.body, Space.Newline); + // } + + // if (else_is_block) { + // try renderToken(ais, tree, @"else".else_token, Space.Space); // else + + // if (@"else".payload) |payload| { + // try renderExpression(allocator, ais, tree, payload, Space.Space); + // } + + // return renderExpression(allocator, ais, tree, @"else".body, space); + // } else { + // const after_else_space = if (@"else".payload == null) Space.Newline else Space.Space; + // try renderToken(ais, tree, @"else".else_token, after_else_space); // else + + // if (@"else".payload) |payload| { + // try renderExpression(allocator, ais, tree, payload, Space.Newline); + // } + + // ais.pushIndent(); + // defer ais.popIndent(); + // return renderExpression(allocator, ais, tree, @"else".body, space); + // } + // } else { + // ais.pushIndent(); + // defer ais.popIndent(); + // return renderExpression(allocator, ais, tree, if_node.body, space); + // } + // } + + // // Single line if statement + + // try renderToken(ais, tree, rparen, Space.Space); // ) + + // if (if_node.payload) |payload| { + // try renderExpression(allocator, ais, tree, payload, Space.Space); + // } + + // if (if_node.@"else") |@"else"| { + // try renderExpression(allocator, ais, tree, if_node.body, Space.Space); + // try renderToken(ais, tree, @"else".else_token, Space.Space); + + // if (@"else".payload) |payload| { + // try renderExpression(allocator, ais, tree, payload, Space.Space); + // } + + // return renderExpression(allocator, ais, tree, @"else".body, space); + // } else { + // return renderExpression(allocator, ais, tree, if_node.body, space); + // } + //}, + + //.Asm => { + // const asm_node = @fieldParentPtr(ast.Node.Asm, "base", base); + + // try renderToken(ais, tree, asm_node.asm_token, Space.Space); // asm + + // if (asm_node.volatile_token) |volatile_token| { + // try renderToken(ais, tree, volatile_token, Space.Space); // volatile + // try renderToken(ais, tree, tree.nextToken(volatile_token), Space.None); // ( + // } else { + // try renderToken(ais, tree, tree.nextToken(asm_node.asm_token), Space.None); // ( + // } + + // asmblk: { + // ais.pushIndent(); + // defer ais.popIndent(); + + // if (asm_node.outputs.len == 0 and asm_node.inputs.len == 0 and asm_node.clobbers.len == 0) { + // try renderExpression(allocator, ais, tree, asm_node.template, Space.None); + // break :asmblk; + // } + + // try renderExpression(allocator, ais, tree, asm_node.template, Space.Newline); + + // ais.setIndentDelta(asm_indent_delta); + // defer ais.setIndentDelta(indent_delta); + + // const colon1 = tree.nextToken(asm_node.template.lastToken()); + + // const colon2 = if (asm_node.outputs.len == 0) blk: { + // try renderToken(ais, tree, colon1, Space.Newline); // : + + // break :blk tree.nextToken(colon1); + // } else blk: { + // try renderToken(ais, tree, colon1, Space.Space); // : + + // ais.pushIndent(); + // defer ais.popIndent(); + + // for (asm_node.outputs) |*asm_output, i| { + // if (i + 1 < asm_node.outputs.len) { + // const next_asm_output = asm_node.outputs[i + 1]; + // try renderAsmOutput(allocator, ais, tree, asm_output, Space.None); + + // const comma = tree.prevToken(next_asm_output.firstToken()); + // try renderToken(ais, tree, comma, Space.Newline); // , + // try renderExtraNewlineToken(tree, ais, next_asm_output.firstToken()); + // } else if (asm_node.inputs.len == 0 and asm_node.clobbers.len == 0) { + // try renderAsmOutput(allocator, ais, tree, asm_output, Space.Newline); + // break :asmblk; + // } else { + // try renderAsmOutput(allocator, ais, tree, asm_output, Space.Newline); + // const comma_or_colon = tree.nextToken(asm_output.lastToken()); + // break :blk switch (tree.token_tags[comma_or_colon]) { + // .Comma => tree.nextToken(comma_or_colon), + // else => comma_or_colon, + // }; + // } + // } + // unreachable; + // }; + + // const colon3 = if (asm_node.inputs.len == 0) blk: { + // try renderToken(ais, tree, colon2, Space.Newline); // : + // break :blk tree.nextToken(colon2); + // } else blk: { + // try renderToken(ais, tree, colon2, Space.Space); // : + // ais.pushIndent(); + // defer ais.popIndent(); + // for (asm_node.inputs) |*asm_input, i| { + // if (i + 1 < asm_node.inputs.len) { + // const next_asm_input = &asm_node.inputs[i + 1]; + // try renderAsmInput(allocator, ais, tree, asm_input, Space.None); + + // const comma = tree.prevToken(next_asm_input.firstToken()); + // try renderToken(ais, tree, comma, Space.Newline); // , + // try renderExtraNewlineToken(tree, ais, next_asm_input.firstToken()); + // } else if (asm_node.clobbers.len == 0) { + // try renderAsmInput(allocator, ais, tree, asm_input, Space.Newline); + // break :asmblk; + // } else { + // try renderAsmInput(allocator, ais, tree, asm_input, Space.Newline); + // const comma_or_colon = tree.nextToken(asm_input.lastToken()); + // break :blk switch (tree.token_tags[comma_or_colon]) { + // .Comma => tree.nextToken(comma_or_colon), + // else => comma_or_colon, + // }; + // } + // } + // unreachable; + // }; + + // try renderToken(ais, tree, colon3, Space.Space); // : + // ais.pushIndent(); + // defer ais.popIndent(); + // for (asm_node.clobbers) |clobber_node, i| { + // if (i + 1 >= asm_node.clobbers.len) { + // try renderExpression(allocator, ais, tree, clobber_node, Space.Newline); + // break :asmblk; + // } else { + // try renderExpression(allocator, ais, tree, clobber_node, Space.None); + // const comma = tree.nextToken(clobber_node.lastToken()); + // try renderToken(ais, tree, comma, Space.Space); // , + // } + // } + // } + + // return renderToken(ais, tree, asm_node.rparen, space); + //}, + + //.EnumLiteral => { + // const enum_literal = @fieldParentPtr(ast.Node.EnumLiteral, "base", base); + + // try renderToken(ais, tree, enum_literal.dot, Space.None); // . + // return renderToken(ais, tree, enum_literal.name, space); // name + //}, + + //.ContainerField, + //.Root, + //.VarDecl, + //.Use, + //.TestDecl, + //=> unreachable, + else => @panic("TODO implement more renderExpression"), } } fn renderArrayType( allocator: *mem.Allocator, - ais: anytype, - tree: *ast.Tree, + ais: *Ais, + tree: ast.Tree, lbracket: ast.TokenIndex, - rhs: *ast.Node, - len_expr: *ast.Node, - opt_sentinel: ?*ast.Node, + rhs: ast.Node.Index, + len_expr: ast.Node.Index, + opt_sentinel: ?ast.Node.Index, space: Space, -) (@TypeOf(ais.*).Error || Error)!void { +) Error!void { const rbracket = tree.nextToken(if (opt_sentinel) |sentinel| sentinel.lastToken() else len_expr.lastToken()); - const starts_with_comment = tree.token_ids[lbracket + 1] == .LineComment; - const ends_with_comment = tree.token_ids[rbracket - 1] == .LineComment; + const starts_with_comment = tree.token_tags[lbracket + 1] == .LineComment; + const ends_with_comment = tree.token_tags[rbracket - 1] == .LineComment; const new_space = if (ends_with_comment) Space.Newline else Space.None; { const do_indent = (starts_with_comment or ends_with_comment); if (do_indent) ais.pushIndent(); defer if (do_indent) ais.popIndent(); - try renderToken(tree, ais, lbracket, Space.None); // [ + try renderToken(ais, tree, lbracket, Space.None); // [ try renderExpression(allocator, ais, tree, len_expr, new_space); if (starts_with_comment) { @@ -2182,25 +2087,25 @@ fn renderArrayType( } if (opt_sentinel) |sentinel| { const colon_token = tree.prevToken(sentinel.firstToken()); - try renderToken(tree, ais, colon_token, Space.None); // : + try renderToken(ais, tree, colon_token, Space.None); // : try renderExpression(allocator, ais, tree, sentinel, Space.None); } if (starts_with_comment) { try ais.maybeInsertNewline(); } } - try renderToken(tree, ais, rbracket, Space.None); // ] + try renderToken(ais, tree, rbracket, Space.None); // ] return renderExpression(allocator, ais, tree, rhs, space); } fn renderAsmOutput( allocator: *mem.Allocator, - ais: anytype, - tree: *ast.Tree, + ais: *Ais, + tree: ast.Tree, asm_output: *const ast.Node.Asm.Output, space: Space, -) (@TypeOf(ais.*).Error || Error)!void { +) Error!void { try ais.writer().writeAll("["); try renderExpression(allocator, ais, tree, asm_output.symbolic_name, Space.None); try ais.writer().writeAll("] "); @@ -2217,37 +2122,37 @@ fn renderAsmOutput( }, } - return renderToken(tree, ais, asm_output.lastToken(), space); // ) + return renderToken(ais, tree, asm_output.lastToken(), space); // ) } fn renderAsmInput( allocator: *mem.Allocator, - ais: anytype, - tree: *ast.Tree, + ais: *Ais, + tree: ast.Tree, asm_input: *const ast.Node.Asm.Input, space: Space, -) (@TypeOf(ais.*).Error || Error)!void { +) Error!void { try ais.writer().writeAll("["); try renderExpression(allocator, ais, tree, asm_input.symbolic_name, Space.None); try ais.writer().writeAll("] "); try renderExpression(allocator, ais, tree, asm_input.constraint, Space.None); try ais.writer().writeAll(" ("); try renderExpression(allocator, ais, tree, asm_input.expr, Space.None); - return renderToken(tree, ais, asm_input.lastToken(), space); // ) + return renderToken(ais, tree, asm_input.lastToken(), space); // ) } fn renderVarDecl( allocator: *mem.Allocator, - ais: anytype, - tree: *ast.Tree, - var_decl: *ast.Node.VarDecl, -) (@TypeOf(ais.*).Error || Error)!void { + ais: *Ais, + tree: ast.Tree, + var_decl: ast.Node.Index.VarDecl, +) Error!void { if (var_decl.getVisibToken()) |visib_token| { - try renderToken(tree, ais, visib_token, Space.Space); // pub + try renderToken(ais, tree, visib_token, Space.Space); // pub } if (var_decl.getExternExportToken()) |extern_export_token| { - try renderToken(tree, ais, extern_export_token, Space.Space); // extern + try renderToken(ais, tree, extern_export_token, Space.Space); // extern if (var_decl.getLibName()) |lib_name| { try renderExpression(allocator, ais, tree, lib_name, Space.Space); // "lib" @@ -2255,13 +2160,13 @@ fn renderVarDecl( } if (var_decl.getComptimeToken()) |comptime_token| { - try renderToken(tree, ais, comptime_token, Space.Space); // comptime + try renderToken(ais, tree, comptime_token, Space.Space); // comptime } if (var_decl.getThreadLocalToken()) |thread_local_token| { - try renderToken(tree, ais, thread_local_token, Space.Space); // threadlocal + try renderToken(ais, tree, thread_local_token, Space.Space); // threadlocal } - try renderToken(tree, ais, var_decl.mut_token, Space.Space); // var + try renderToken(ais, tree, var_decl.mut_token, Space.Space); // var const name_space = if (var_decl.getTypeNode() == null and (var_decl.getAlignNode() != null or @@ -2270,10 +2175,10 @@ fn renderVarDecl( Space.Space else Space.None; - try renderToken(tree, ais, var_decl.name_token, name_space); + try renderToken(ais, tree, var_decl.name_token, name_space); if (var_decl.getTypeNode()) |type_node| { - try renderToken(tree, ais, tree.nextToken(var_decl.name_token), Space.Space); + try renderToken(ais, tree, tree.nextToken(var_decl.name_token), Space.Space); const s = if (var_decl.getAlignNode() != null or var_decl.getSectionNode() != null or var_decl.getInitNode() != null) Space.Space else Space.None; @@ -2284,22 +2189,22 @@ fn renderVarDecl( const lparen = tree.prevToken(align_node.firstToken()); const align_kw = tree.prevToken(lparen); const rparen = tree.nextToken(align_node.lastToken()); - try renderToken(tree, ais, align_kw, Space.None); // align - try renderToken(tree, ais, lparen, Space.None); // ( + try renderToken(ais, tree, align_kw, Space.None); // align + try renderToken(ais, tree, lparen, Space.None); // ( try renderExpression(allocator, ais, tree, align_node, Space.None); const s = if (var_decl.getSectionNode() != null or var_decl.getInitNode() != null) Space.Space else Space.None; - try renderToken(tree, ais, rparen, s); // ) + try renderToken(ais, tree, rparen, s); // ) } if (var_decl.getSectionNode()) |section_node| { const lparen = tree.prevToken(section_node.firstToken()); const section_kw = tree.prevToken(lparen); const rparen = tree.nextToken(section_node.lastToken()); - try renderToken(tree, ais, section_kw, Space.None); // linksection - try renderToken(tree, ais, lparen, Space.None); // ( + try renderToken(ais, tree, section_kw, Space.None); // linksection + try renderToken(ais, tree, lparen, Space.None); // ( try renderExpression(allocator, ais, tree, section_node, Space.None); const s = if (var_decl.getInitNode() != null) Space.Space else Space.None; - try renderToken(tree, ais, rparen, s); // ) + try renderToken(ais, tree, rparen, s); // ) } if (var_decl.getInitNode()) |init_node| { @@ -2312,268 +2217,150 @@ fn renderVarDecl( { ais.pushIndent(); defer ais.popIndent(); - try renderToken(tree, ais, eq_token, eq_space); // = + try renderToken(ais, tree, eq_token, eq_space); // = } ais.pushIndentOneShot(); try renderExpression(allocator, ais, tree, init_node, Space.None); } - try renderToken(tree, ais, var_decl.semicolon_token, Space.Newline); + try renderToken(ais, tree, var_decl.semicolon_token, Space.Newline); } fn renderParamDecl( allocator: *mem.Allocator, - ais: anytype, - tree: *ast.Tree, + ais: *Ais, + tree: ast.Tree, param_decl: ast.Node.FnProto.ParamDecl, space: Space, -) (@TypeOf(ais.*).Error || Error)!void { +) Error!void { try renderDocComments(tree, ais, param_decl, param_decl.doc_comments); if (param_decl.comptime_token) |comptime_token| { - try renderToken(tree, ais, comptime_token, Space.Space); + try renderToken(ais, tree, comptime_token, Space.Space); } if (param_decl.noalias_token) |noalias_token| { - try renderToken(tree, ais, noalias_token, Space.Space); + try renderToken(ais, tree, noalias_token, Space.Space); } if (param_decl.name_token) |name_token| { - try renderToken(tree, ais, name_token, Space.None); - try renderToken(tree, ais, tree.nextToken(name_token), Space.Space); // : + try renderToken(ais, tree, name_token, Space.None); + try renderToken(ais, tree, tree.nextToken(name_token), Space.Space); // : } switch (param_decl.param_type) { .any_type, .type_expr => |node| try renderExpression(allocator, ais, tree, node, space), } } -fn renderStatement( - allocator: *mem.Allocator, - ais: anytype, - tree: *ast.Tree, - base: *ast.Node, -) (@TypeOf(ais.*).Error || Error)!void { - switch (base.tag) { - .VarDecl => { - const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", base); - try renderVarDecl(allocator, ais, tree, var_decl); - }, - else => { - if (base.requireSemiColon()) { - try renderExpression(allocator, ais, tree, base, Space.None); +fn renderStatement(ais: *Ais, tree: ast.Tree, base: ast.Node.Index) Error!void { + @panic("TODO render statement"); + //switch (base.tag) { + // .VarDecl => { + // const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", base); + // try renderVarDecl(allocator, ais, tree, var_decl); + // }, + // else => { + // if (base.requireSemiColon()) { + // try renderExpression(allocator, ais, tree, base, Space.None); - const semicolon_index = tree.nextToken(base.lastToken()); - assert(tree.token_ids[semicolon_index] == .Semicolon); - try renderToken(tree, ais, semicolon_index, Space.Newline); - } else { - try renderExpression(allocator, ais, tree, base, Space.Newline); - } - }, - } + // const semicolon_index = tree.nextToken(base.lastToken()); + // assert(tree.token_tags[semicolon_index] == .Semicolon); + // try renderToken(ais, tree, semicolon_index, Space.Newline); + // } else { + // try renderExpression(allocator, ais, tree, base, Space.Newline); + // } + // }, + //} } const Space = enum { None, Newline, + /// `renderToken` will additionally consume the next token if it is a comma. Comma, Space, SpaceOrOutdent, NoNewline, + /// Skips writing the possible line comment after the token. NoComment, BlockStart, }; -fn renderTokenOffset( - tree: *ast.Tree, - ais: anytype, - token_index: ast.TokenIndex, - space: Space, - token_skip_bytes: usize, -) (@TypeOf(ais.*).Error || Error)!void { +fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Space) Error!void { if (space == Space.BlockStart) { - // If placing the lbrace on the current line would cause an uggly gap then put the lbrace on the next line + // If placing the lbrace on the current line would cause an ugly gap then put the lbrace on the next line. const new_space = if (ais.isLineOverIndented()) Space.Newline else Space.Space; - return renderToken(tree, ais, token_index, new_space); + return renderToken(ais, tree, token_index, new_space); } - var token_loc = tree.token_locs[token_index]; - try ais.writer().writeAll(mem.trimRight(u8, tree.tokenSliceLoc(token_loc)[token_skip_bytes..], " ")); + const token_tags = tree.tokens.items(.tag); + const token_starts = tree.tokens.items(.start); - if (space == Space.NoComment) - return; - - var next_token_id = tree.token_ids[token_index + 1]; - var next_token_loc = tree.token_locs[token_index + 1]; - - if (space == Space.Comma) switch (next_token_id) { - .Comma => return renderToken(tree, ais, token_index + 1, Space.Newline), - .LineComment => { - try ais.writer().writeAll(", "); - return renderToken(tree, ais, token_index + 1, Space.Newline); - }, - else => { - if (token_index + 2 < tree.token_ids.len and - tree.token_ids[token_index + 2] == .MultilineStringLiteralLine) - { - try ais.writer().writeAll(","); - return; - } else { - try ais.writer().writeAll(","); - try ais.insertNewline(); - return; - } - }, + const token_start = token_starts[token_index]; + const token_tag = token_tags[token_index]; + const lexeme = token_tag.lexeme() orelse lexeme: { + var tokenizer: std.zig.Tokenizer = .{ + .buffer = tree.source, + .index = token_start, + .pending_invalid_token = null, + }; + const token = tokenizer.next(); + assert(token.tag == token_tag); + break :lexeme tree.source[token.loc.start..token.loc.end]; }; + try ais.writer().writeAll(lexeme); - // Skip over same line doc comments - var offset: usize = 1; - if (next_token_id == .DocComment) { - const loc = tree.tokenLocationLoc(token_loc.end, next_token_loc); - if (loc.line == 0) { - offset += 1; - next_token_id = tree.token_ids[token_index + offset]; - next_token_loc = tree.token_locs[token_index + offset]; - } - } - - if (next_token_id != .LineComment) { - switch (space) { - Space.None, Space.NoNewline => return, - Space.Newline => { - if (next_token_id == .MultilineStringLiteralLine) { - return; - } else { - try ais.insertNewline(); - return; - } - }, - Space.Space, Space.SpaceOrOutdent => { - if (next_token_id == .MultilineStringLiteralLine) - return; - try ais.writer().writeByte(' '); - return; - }, - Space.NoComment, Space.Comma, Space.BlockStart => unreachable, - } - } - - while (true) { - const comment_is_empty = mem.trimRight(u8, tree.tokenSliceLoc(next_token_loc), " ").len == 2; - if (comment_is_empty) { - switch (space) { - Space.Newline => { - offset += 1; - token_loc = next_token_loc; - next_token_id = tree.token_ids[token_index + offset]; - next_token_loc = tree.token_locs[token_index + offset]; - if (next_token_id != .LineComment) { - try ais.insertNewline(); - return; - } - }, - else => break, + switch (space) { + .NoComment => {}, + .NoNewline => {}, + .None => {}, + .Comma => { + const count = try renderComments(ais, tree, token_start + lexeme.len, token_starts[token_index + 1], ", "); + if (count == 0 and token_tags[token_index + 1] == .Comma) { + return renderToken(ais, tree, token_index + 1, Space.Newline); } - } else { - break; - } - } + try ais.writer().writeAll(","); - var loc = tree.tokenLocationLoc(token_loc.end, next_token_loc); - if (loc.line == 0) { - if (tree.token_ids[token_index] != .MultilineStringLiteralLine) { + if (token_tags[token_index + 2] != .MultilineStringLiteralLine) { + try ais.insertNewline(); + } + }, + .SpaceOrOutdent => @panic("what does this even do"), + .Space => { + _ = try renderComments(ais, tree, token_start + lexeme.len, token_starts[token_index + 1], ""); try ais.writer().writeByte(' '); - } - try ais.writer().writeAll(mem.trimRight(u8, tree.tokenSliceLoc(next_token_loc), " ")); - offset = 2; - token_loc = next_token_loc; - next_token_loc = tree.token_locs[token_index + offset]; - next_token_id = tree.token_ids[token_index + offset]; - if (next_token_id != .LineComment) { - switch (space) { - .None, .Space, .SpaceOrOutdent => { - try ais.insertNewline(); - }, - .Newline => { - if (next_token_id == .MultilineStringLiteralLine) { - return; - } else { - try ais.insertNewline(); - return; - } - }, - .NoNewline => {}, - .NoComment, .Comma, .BlockStart => unreachable, + }, + .Newline => { + if (token_tags[token_index + 1] != .MultilineStringLiteralLine) { + try ais.insertNewline(); } - return; - } - loc = tree.tokenLocationLoc(token_loc.end, next_token_loc); + }, + .BlockStart => unreachable, } - - while (true) { - // translate-c doesn't generate correct newlines - // in generated code (loc.line == 0) so treat that case - // as though there was meant to be a newline between the tokens - var newline_count = if (loc.line <= 1) @as(u8, 1) else @as(u8, 2); - while (newline_count > 0) : (newline_count -= 1) try ais.insertNewline(); - try ais.writer().writeAll(mem.trimRight(u8, tree.tokenSliceLoc(next_token_loc), " ")); - - offset += 1; - token_loc = next_token_loc; - next_token_loc = tree.token_locs[token_index + offset]; - next_token_id = tree.token_ids[token_index + offset]; - if (next_token_id != .LineComment) { - switch (space) { - .Newline => { - if (next_token_id == .MultilineStringLiteralLine) { - return; - } else { - try ais.insertNewline(); - return; - } - }, - .None, .Space, .SpaceOrOutdent => { - try ais.insertNewline(); - }, - .NoNewline => {}, - .NoComment, .Comma, .BlockStart => unreachable, - } - return; - } - loc = tree.tokenLocationLoc(token_loc.end, next_token_loc); - } -} - -fn renderToken( - tree: *ast.Tree, - ais: anytype, - token_index: ast.TokenIndex, - space: Space, -) (@TypeOf(ais.*).Error || Error)!void { - return renderTokenOffset(tree, ais, token_index, space, 0); } fn renderDocComments( - tree: *ast.Tree, - ais: anytype, + tree: ast.Tree, + ais: *Ais, node: anytype, - doc_comments: ?*ast.Node.DocComment, -) (@TypeOf(ais.*).Error || Error)!void { + doc_comments: ?ast.Node.Index.DocComment, +) Error!void { const comment = doc_comments orelse return; return renderDocCommentsToken(tree, ais, comment, node.firstToken()); } fn renderDocCommentsToken( - tree: *ast.Tree, - ais: anytype, - comment: *ast.Node.DocComment, + tree: ast.Tree, + ais: *Ais, + comment: ast.Node.Index.DocComment, first_token: ast.TokenIndex, -) (@TypeOf(ais.*).Error || Error)!void { +) Error!void { var tok_i = comment.first_line; while (true) : (tok_i += 1) { - switch (tree.token_ids[tok_i]) { + switch (tree.token_tags[tok_i]) { .DocComment, .ContainerDocComment => { if (comment.first_line < first_token) { - try renderToken(tree, ais, tok_i, Space.Newline); + try renderToken(ais, tree, tok_i, Space.Newline); } else { - try renderToken(tree, ais, tok_i, Space.NoComment); + try renderToken(ais, tree, tok_i, Space.NoComment); try ais.insertNewline(); } }, @@ -2596,7 +2383,7 @@ fn nodeIsBlock(base: *const ast.Node) bool { }; } -fn nodeCausesSliceOpSpace(base: *ast.Node) bool { +fn nodeCausesSliceOpSpace(base: ast.Node.Index) bool { return switch (base.tag) { .Catch, .Add, @@ -2646,7 +2433,7 @@ fn nodeCausesSliceOpSpace(base: *ast.Node) bool { }; } -fn copyFixingWhitespace(ais: anytype, slice: []const u8) @TypeOf(ais.*).Error!void { +fn copyFixingWhitespace(ais: *Ais, slice: []const u8) @TypeOf(ais.*).Error!void { for (slice) |byte| switch (byte) { '\t' => try ais.writer().writeAll(" "), '\r' => {}, @@ -2656,12 +2443,12 @@ fn copyFixingWhitespace(ais: anytype, slice: []const u8) @TypeOf(ais.*).Error!vo // Returns the number of nodes in `expr` that are on the same line as `rtoken`, // or null if they all are on the same line. -fn rowSize(tree: *ast.Tree, exprs: []*ast.Node, rtoken: ast.TokenIndex) ?usize { +fn rowSize(tree: ast.Tree, exprs: []ast.Node.Index, rtoken: ast.TokenIndex) ?usize { const first_token = exprs[0].firstToken(); const first_loc = tree.tokenLocation(tree.token_locs[first_token].start, rtoken); if (first_loc.line == 0) { const maybe_comma = tree.prevToken(rtoken); - if (tree.token_ids[maybe_comma] == .Comma) + if (tree.token_tags[maybe_comma] == .Comma) return 1; return null; // no newlines } diff --git a/lib/std/zig/tokenizer.zig b/lib/std/zig/tokenizer.zig index 8692713eb3..642c09e477 100644 --- a/lib/std/zig/tokenizer.zig +++ b/lib/std/zig/tokenizer.zig @@ -195,22 +195,23 @@ pub const Token = struct { Keyword_volatile, Keyword_while, - pub fn symbol(tag: Tag) []const u8 { + pub fn lexeme(tag: Tag) ?[]const u8 { return switch (tag) { - .Invalid => "Invalid", + .Invalid, + .Identifier, + .StringLiteral, + .MultilineStringLiteralLine, + .CharLiteral, + .Eof, + .Builtin, + .IntegerLiteral, + .FloatLiteral, + .DocComment, + .ContainerDocComment, + => null, + .Invalid_ampersands => "&&", .Invalid_periodasterisks => ".**", - .Identifier => "Identifier", - .StringLiteral => "StringLiteral", - .MultilineStringLiteralLine => "MultilineStringLiteralLine", - .CharLiteral => "CharLiteral", - .Eof => "Eof", - .Builtin => "Builtin", - .IntegerLiteral => "IntegerLiteral", - .FloatLiteral => "FloatLiteral", - .DocComment => "DocComment", - .ContainerDocComment => "ContainerDocComment", - .Bang => "!", .Pipe => "|", .PipePipe => "||", @@ -319,6 +320,10 @@ pub const Token = struct { .Keyword_while => "while", }; } + + pub fn symbol(tag: Tag) []const u8 { + return tag.lexeme() orelse @tagName(tag); + } }; }; From 272a0ab359ee504f147ccf1ce5adb78c676a8305 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Mon, 1 Feb 2021 20:11:55 -0700 Subject: [PATCH 006/173] zig fmt: implement "line comment followed by top-level comptime" --- lib/std/zig/parser_test.zig | 11 +++++++++++ lib/std/zig/render.zig | 11 ++++++----- lib/std/zig/tokenizer.zig | 22 ++++++++++++++++++++-- 3 files changed, 37 insertions(+), 7 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index d4a01da0d0..bc6e9f933b 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -5,6 +5,17 @@ // and substantial portions of the software. test "zig fmt: simple top level comptime block" { try testCanonical( + \\// line comment + \\comptime {} + \\ + ); +} + +test "zig fmt: two spaced line comments before decl" { + try testCanonical( + \\// line comment + \\ + \\// another \\comptime {} \\ ); diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 17c59776b5..f1521b86cc 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -36,10 +36,11 @@ fn renderComments(ais: *Ais, tree: ast.Tree, start: usize, end: usize, prefix: [ var index: usize = start; var count: usize = 0; while (true) { - // Scan forward to the next line comment, counting newlines. - const comment_start = mem.indexOf(u8, tree.source[index..end], "//") orelse return count; - const newline = mem.indexOfScalar(u8, tree.source[comment_start..end], '\n').?; - const untrimmed_comment = tree.source[comment_start..][0..newline]; + const comment_start = index + + (mem.indexOf(u8, tree.source[index..end], "//") orelse return count); + const newline = comment_start + + mem.indexOfScalar(u8, tree.source[comment_start..end], '\n').?; + const untrimmed_comment = tree.source[comment_start..newline]; const trimmed_comment = mem.trimRight(u8, untrimmed_comment, " \r\t"); if (count == 0) { count += 1; @@ -52,7 +53,7 @@ fn renderComments(ais: *Ais, tree: ast.Tree, start: usize, end: usize, prefix: [ } } try ais.writer().print("{s}\n", .{trimmed_comment}); - index += comment_start + newline; + index = newline + 1; } } diff --git a/lib/std/zig/tokenizer.zig b/lib/std/zig/tokenizer.zig index 642c09e477..8e5ecc7010 100644 --- a/lib/std/zig/tokenizer.zig +++ b/lib/std/zig/tokenizer.zig @@ -1035,7 +1035,10 @@ pub const Tokenizer = struct { result.tag = .ContainerDocComment; state = .container_doc_comment; }, - '\n' => state = .start, + '\n' => { + state = .start; + result.loc.start = self.index + 1; + }, '\t', '\r' => state = .line_comment, else => { state = .line_comment; @@ -1061,7 +1064,10 @@ pub const Tokenizer = struct { }, }, .line_comment => switch (c) { - '\n' => state = .start, + '\n' => { + state = .start; + result.loc.start = self.index + 1; + }, '\t', '\r' => {}, else => self.checkLiteralCharacter(), }, @@ -1499,6 +1505,18 @@ test "tokenizer" { testTokenize("test", &[_]Token.Tag{.Keyword_test}); } +test "line comment followed by top-level comptime" { + testTokenize( + \\// line comment + \\comptime {} + \\ + , &[_]Token.Tag{ + .Keyword_comptime, + .LBrace, + .RBrace, + }); +} + test "tokenizer - unknown length pointer and then c pointer" { testTokenize( \\[*]u8 From 0c6b98b82568edd982d8f7b7820a0a1173c5c5d2 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Mon, 1 Feb 2021 21:31:41 -0700 Subject: [PATCH 007/173] zig fmt: implement simple test with doc comments --- lib/std/zig/parser_test.zig | 505 ++++++++++++++++---------------- lib/std/zig/render.zig | 566 ++++++++++++++++++------------------ 2 files changed, 538 insertions(+), 533 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index bc6e9f933b..8d9c4b5cc8 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -21,247 +21,6 @@ test "zig fmt: two spaced line comments before decl" { ); } -//test "recovery: top level" { -// try testError( -// \\test "" {inline} -// \\test "" {inline} -// , &[_]Error{ -// .ExpectedInlinable, -// .ExpectedInlinable, -// }); -//} -// -//test "recovery: block statements" { -// try testError( -// \\test "" { -// \\ foo + +; -// \\ inline; -// \\} -// , &[_]Error{ -// .InvalidToken, -// .ExpectedInlinable, -// }); -//} -// -//test "recovery: missing comma" { -// try testError( -// \\test "" { -// \\ switch (foo) { -// \\ 2 => {} -// \\ 3 => {} -// \\ else => { -// \\ foo && bar +; -// \\ } -// \\ } -// \\} -// , &[_]Error{ -// .ExpectedToken, -// .ExpectedToken, -// .InvalidAnd, -// .InvalidToken, -// }); -//} -// -//test "recovery: extra qualifier" { -// try testError( -// \\const a: *const const u8; -// \\test "" -// , &[_]Error{ -// .ExtraConstQualifier, -// .ExpectedLBrace, -// }); -//} -// -//test "recovery: missing return type" { -// try testError( -// \\fn foo() { -// \\ a && b; -// \\} -// \\test "" -// , &[_]Error{ -// .ExpectedReturnType, -// .InvalidAnd, -// .ExpectedLBrace, -// }); -//} -// -//test "recovery: continue after invalid decl" { -// try testError( -// \\fn foo { -// \\ inline; -// \\} -// \\pub test "" { -// \\ async a && b; -// \\} -// , &[_]Error{ -// .ExpectedToken, -// .ExpectedPubItem, -// .ExpectedParamList, -// .InvalidAnd, -// }); -// try testError( -// \\threadlocal test "" { -// \\ @a && b; -// \\} -// , &[_]Error{ -// .ExpectedVarDecl, -// .ExpectedParamList, -// .InvalidAnd, -// }); -//} -// -//test "recovery: invalid extern/inline" { -// try testError( -// \\inline test "" { a && b; } -// , &[_]Error{ -// .ExpectedFn, -// .InvalidAnd, -// }); -// try testError( -// \\extern "" test "" { a && b; } -// , &[_]Error{ -// .ExpectedVarDeclOrFn, -// .InvalidAnd, -// }); -//} -// -//test "recovery: missing semicolon" { -// try testError( -// \\test "" { -// \\ comptime a && b -// \\ c && d -// \\ @foo -// \\} -// , &[_]Error{ -// .InvalidAnd, -// .ExpectedToken, -// .InvalidAnd, -// .ExpectedToken, -// .ExpectedParamList, -// .ExpectedToken, -// }); -//} -// -//test "recovery: invalid container members" { -// try testError( -// \\usingnamespace; -// \\foo+ -// \\bar@, -// \\while (a == 2) { test "" {}} -// \\test "" { -// \\ a && b -// \\} -// , &[_]Error{ -// .ExpectedExpr, -// .ExpectedToken, -// .ExpectedToken, -// .ExpectedContainerMembers, -// .InvalidAnd, -// .ExpectedToken, -// }); -//} -// -//test "recovery: invalid parameter" { -// try testError( -// \\fn main() void { -// \\ a(comptime T: type) -// \\} -// , &[_]Error{ -// .ExpectedToken, -// }); -//} -// -//test "recovery: extra '}' at top level" { -// try testError( -// \\}}} -// \\test "" { -// \\ a && b; -// \\} -// , &[_]Error{ -// .ExpectedContainerMembers, -// .ExpectedContainerMembers, -// .ExpectedContainerMembers, -// .InvalidAnd, -// }); -//} -// -//test "recovery: mismatched bracket at top level" { -// try testError( -// \\const S = struct { -// \\ arr: 128]?G -// \\}; -// , &[_]Error{ -// .ExpectedToken, -// }); -//} -// -//test "recovery: invalid global error set access" { -// try testError( -// \\test "" { -// \\ error && foo; -// \\} -// , &[_]Error{ -// .ExpectedToken, -// .ExpectedIdentifier, -// .InvalidAnd, -// }); -//} -// -//test "recovery: invalid asterisk after pointer dereference" { -// try testError( -// \\test "" { -// \\ var sequence = "repeat".*** 10; -// \\} -// , &[_]Error{ -// .AsteriskAfterPointerDereference, -// }); -// try testError( -// \\test "" { -// \\ var sequence = "repeat".** 10&&a; -// \\} -// , &[_]Error{ -// .AsteriskAfterPointerDereference, -// .InvalidAnd, -// }); -//} -// -//test "recovery: missing semicolon after if, for, while stmt" { -// try testError( -// \\test "" { -// \\ if (foo) bar -// \\ for (foo) |a| bar -// \\ while (foo) bar -// \\ a && b; -// \\} -// , &[_]Error{ -// .ExpectedSemiOrElse, -// .ExpectedSemiOrElse, -// .ExpectedSemiOrElse, -// .InvalidAnd, -// }); -//} -// -//test "recovery: invalid comptime" { -// try testError( -// \\comptime -// , &[_]Error{ -// .ExpectedBlockOrField, -// }); -//} -// -//test "recovery: missing block after for/while loops" { -// try testError( -// \\test "" { while (foo) } -// , &[_]Error{ -// .ExpectedBlockOrAssignment, -// }); -// try testError( -// \\test "" { for (foo) |bar| } -// , &[_]Error{ -// .ExpectedBlockOrAssignment, -// }); -//} -// //test "zig fmt: respect line breaks after var declarations" { // try testCanonical( // \\const crc = @@ -306,13 +65,22 @@ test "zig fmt: two spaced line comments before decl" { // \\ // ); //} -// -//test "zig fmt: empty file" { -// try testCanonical( -// \\ -// ); -//} -// + +test "zig fmt: empty file" { + try testCanonical( + \\ + ); +} + +test "zig fmt: doc comments on test" { + try testCanonical( + \\/// hello + \\/// world + \\test "" {} + \\ + ); +} + //test "zig fmt: if statment" { // try testCanonical( // \\test "" { @@ -3712,6 +3480,247 @@ test "zig fmt: two spaced line comments before decl" { // \\ // ); //} +// +//test "recovery: top level" { +// try testError( +// \\test "" {inline} +// \\test "" {inline} +// , &[_]Error{ +// .ExpectedInlinable, +// .ExpectedInlinable, +// }); +//} +// +//test "recovery: block statements" { +// try testError( +// \\test "" { +// \\ foo + +; +// \\ inline; +// \\} +// , &[_]Error{ +// .InvalidToken, +// .ExpectedInlinable, +// }); +//} +// +//test "recovery: missing comma" { +// try testError( +// \\test "" { +// \\ switch (foo) { +// \\ 2 => {} +// \\ 3 => {} +// \\ else => { +// \\ foo && bar +; +// \\ } +// \\ } +// \\} +// , &[_]Error{ +// .ExpectedToken, +// .ExpectedToken, +// .InvalidAnd, +// .InvalidToken, +// }); +//} +// +//test "recovery: extra qualifier" { +// try testError( +// \\const a: *const const u8; +// \\test "" +// , &[_]Error{ +// .ExtraConstQualifier, +// .ExpectedLBrace, +// }); +//} +// +//test "recovery: missing return type" { +// try testError( +// \\fn foo() { +// \\ a && b; +// \\} +// \\test "" +// , &[_]Error{ +// .ExpectedReturnType, +// .InvalidAnd, +// .ExpectedLBrace, +// }); +//} +// +//test "recovery: continue after invalid decl" { +// try testError( +// \\fn foo { +// \\ inline; +// \\} +// \\pub test "" { +// \\ async a && b; +// \\} +// , &[_]Error{ +// .ExpectedToken, +// .ExpectedPubItem, +// .ExpectedParamList, +// .InvalidAnd, +// }); +// try testError( +// \\threadlocal test "" { +// \\ @a && b; +// \\} +// , &[_]Error{ +// .ExpectedVarDecl, +// .ExpectedParamList, +// .InvalidAnd, +// }); +//} +// +//test "recovery: invalid extern/inline" { +// try testError( +// \\inline test "" { a && b; } +// , &[_]Error{ +// .ExpectedFn, +// .InvalidAnd, +// }); +// try testError( +// \\extern "" test "" { a && b; } +// , &[_]Error{ +// .ExpectedVarDeclOrFn, +// .InvalidAnd, +// }); +//} +// +//test "recovery: missing semicolon" { +// try testError( +// \\test "" { +// \\ comptime a && b +// \\ c && d +// \\ @foo +// \\} +// , &[_]Error{ +// .InvalidAnd, +// .ExpectedToken, +// .InvalidAnd, +// .ExpectedToken, +// .ExpectedParamList, +// .ExpectedToken, +// }); +//} +// +//test "recovery: invalid container members" { +// try testError( +// \\usingnamespace; +// \\foo+ +// \\bar@, +// \\while (a == 2) { test "" {}} +// \\test "" { +// \\ a && b +// \\} +// , &[_]Error{ +// .ExpectedExpr, +// .ExpectedToken, +// .ExpectedToken, +// .ExpectedContainerMembers, +// .InvalidAnd, +// .ExpectedToken, +// }); +//} +// +//test "recovery: invalid parameter" { +// try testError( +// \\fn main() void { +// \\ a(comptime T: type) +// \\} +// , &[_]Error{ +// .ExpectedToken, +// }); +//} +// +//test "recovery: extra '}' at top level" { +// try testError( +// \\}}} +// \\test "" { +// \\ a && b; +// \\} +// , &[_]Error{ +// .ExpectedContainerMembers, +// .ExpectedContainerMembers, +// .ExpectedContainerMembers, +// .InvalidAnd, +// }); +//} +// +//test "recovery: mismatched bracket at top level" { +// try testError( +// \\const S = struct { +// \\ arr: 128]?G +// \\}; +// , &[_]Error{ +// .ExpectedToken, +// }); +//} +// +//test "recovery: invalid global error set access" { +// try testError( +// \\test "" { +// \\ error && foo; +// \\} +// , &[_]Error{ +// .ExpectedToken, +// .ExpectedIdentifier, +// .InvalidAnd, +// }); +//} +// +//test "recovery: invalid asterisk after pointer dereference" { +// try testError( +// \\test "" { +// \\ var sequence = "repeat".*** 10; +// \\} +// , &[_]Error{ +// .AsteriskAfterPointerDereference, +// }); +// try testError( +// \\test "" { +// \\ var sequence = "repeat".** 10&&a; +// \\} +// , &[_]Error{ +// .AsteriskAfterPointerDereference, +// .InvalidAnd, +// }); +//} +// +//test "recovery: missing semicolon after if, for, while stmt" { +// try testError( +// \\test "" { +// \\ if (foo) bar +// \\ for (foo) |a| bar +// \\ while (foo) bar +// \\ a && b; +// \\} +// , &[_]Error{ +// .ExpectedSemiOrElse, +// .ExpectedSemiOrElse, +// .ExpectedSemiOrElse, +// .InvalidAnd, +// }); +//} +// +//test "recovery: invalid comptime" { +// try testError( +// \\comptime +// , &[_]Error{ +// .ExpectedBlockOrField, +// }); +//} +// +//test "recovery: missing block after for/while loops" { +// try testError( +// \\test "" { while (foo) } +// , &[_]Error{ +// .ExpectedBlockOrAssignment, +// }); +// try testError( +// \\test "" { for (foo) |bar| } +// , &[_]Error{ +// .ExpectedBlockOrAssignment, +// }); +//} const std = @import("std"); const mem = std.mem; diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index f1521b86cc..4799f39dc4 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -101,6 +101,9 @@ fn renderTopLevelDecl(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index) Error!voi } fn renderContainerDecl(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) Error!void { + const token_tags = tree.tokens.items(.tag); + const main_tokens = tree.nodes.items(.main_token); + const datas = tree.nodes.items(.data); switch (tree.nodes.items(.tag)[decl]) { .UsingNamespace, .FnProtoSimple, @@ -112,142 +115,137 @@ fn renderContainerDecl(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: S .LocalVarDecl, .SimpleVarDecl, .AlignedVarDecl, - .TestDecl, .ContainerFieldInit, .ContainerFieldAlign, .ContainerField, => @panic("TODO implement renderContainerDecl"), + // .FnProto => { + // const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl); + + // try renderDocComments(ais, tree, fn_proto, fn_proto.getDocComments()); + + // if (fn_proto.getBodyNode()) |body_node| { + // try renderExpression(ais, tree, decl, .Space); + // try renderExpression(ais, tree, body_node, space); + // } else { + // try renderExpression(ais, tree, decl, .None); + // try renderToken(ais, tree, tree.nextToken(decl.lastToken()), space); + // } + // }, + + // .Use => { + // const use_decl = @fieldParentPtr(ast.Node.Use, "base", decl); + + // if (use_decl.visib_token) |visib_token| { + // try renderToken(ais, tree, visib_token, .Space); // pub + // } + // try renderToken(ais, tree, use_decl.use_token, .Space); // usingnamespace + // try renderExpression(ais, tree, use_decl.expr, .None); + // try renderToken(ais, tree, use_decl.semicolon_token, space); // ; + // }, + + // .VarDecl => { + // const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", decl); + + // try renderDocComments(ais, tree, var_decl, var_decl.getDocComments()); + // try renderVarDecl(allocator, ais, tree, var_decl); + // }, + + .TestDecl => { + const test_token = main_tokens[decl]; + try renderDocComments(ais, tree, test_token); + try renderToken(ais, tree, test_token, .Space); + if (token_tags[test_token + 1] == .StringLiteral) { + try renderToken(ais, tree, test_token + 1, .Space); + } + try renderExpression(ais, tree, datas[decl].rhs, space); + }, + + // .ContainerField => { + // const field = @fieldParentPtr(ast.Node.ContainerField, "base", decl); + + // try renderDocComments(ais, tree, field, field.doc_comments); + // if (field.comptime_token) |t| { + // try renderToken(ais, tree, t, .Space); // comptime + // } + + // const src_has_trailing_comma = blk: { + // const maybe_comma = tree.nextToken(field.lastToken()); + // break :blk tree.token_tags[maybe_comma] == .Comma; + // }; + + // // The trailing comma is emitted at the end, but if it's not present + // // we still have to respect the specified `space` parameter + // const last_token_space: Space = if (src_has_trailing_comma) .None else space; + + // if (field.type_expr == null and field.value_expr == null) { + // try renderToken(ais, tree, field.name_token, last_token_space); // name + // } else if (field.type_expr != null and field.value_expr == null) { + // try renderToken(ais, tree, field.name_token, .None); // name + // try renderToken(ais, tree, tree.nextToken(field.name_token), .Space); // : + + // if (field.align_expr) |align_value_expr| { + // try renderExpression(ais, tree, field.type_expr.?, .Space); // type + // const lparen_token = tree.prevToken(align_value_expr.firstToken()); + // const align_kw = tree.prevToken(lparen_token); + // const rparen_token = tree.nextToken(align_value_expr.lastToken()); + // try renderToken(ais, tree, align_kw, .None); // align + // try renderToken(ais, tree, lparen_token, .None); // ( + // try renderExpression(ais, tree, align_value_expr, .None); // alignment + // try renderToken(ais, tree, rparen_token, last_token_space); // ) + // } else { + // try renderExpression(ais, tree, field.type_expr.?, last_token_space); // type + // } + // } else if (field.type_expr == null and field.value_expr != null) { + // try renderToken(ais, tree, field.name_token, .Space); // name + // try renderToken(ais, tree, tree.nextToken(field.name_token), .Space); // = + // try renderExpression(ais, tree, field.value_expr.?, last_token_space); // value + // } else { + // try renderToken(ais, tree, field.name_token, .None); // name + // try renderToken(ais, tree, tree.nextToken(field.name_token), .Space); // : + + // if (field.align_expr) |align_value_expr| { + // try renderExpression(ais, tree, field.type_expr.?, .Space); // type + // const lparen_token = tree.prevToken(align_value_expr.firstToken()); + // const align_kw = tree.prevToken(lparen_token); + // const rparen_token = tree.nextToken(align_value_expr.lastToken()); + // try renderToken(ais, tree, align_kw, .None); // align + // try renderToken(ais, tree, lparen_token, .None); // ( + // try renderExpression(ais, tree, align_value_expr, .None); // alignment + // try renderToken(ais, tree, rparen_token, .Space); // ) + // } else { + // try renderExpression(ais, tree, field.type_expr.?, .Space); // type + // } + // try renderToken(ais, tree, tree.prevToken(field.value_expr.?.firstToken()), .Space); // = + // try renderExpression(ais, tree, field.value_expr.?, last_token_space); // value + // } + + // if (src_has_trailing_comma) { + // const comma = tree.nextToken(field.lastToken()); + // try renderToken(ais, tree, comma, space); + // } + // }, .Comptime => return renderExpression(ais, tree, decl, space), + // .DocComment => { + // const comment = @fieldParentPtr(ast.Node.DocComment, "base", decl); + // const kind = tree.token_tags[comment.first_line]; + // try renderToken(ais, tree, comment.first_line, .Newline); + // var tok_i = comment.first_line + 1; + // while (true) : (tok_i += 1) { + // const tok_id = tree.token_tags[tok_i]; + // if (tok_id == kind) { + // try renderToken(ais, tree, tok_i, .Newline); + // } else if (tok_id == .LineComment) { + // continue; + // } else { + // break; + // } + // } + // }, else => unreachable, } - //switch (tag) { - // .FnProto => { - // const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl); - - // try renderDocComments(tree, ais, fn_proto, fn_proto.getDocComments()); - - // if (fn_proto.getBodyNode()) |body_node| { - // try renderExpression(allocator, ais, tree, decl, .Space); - // try renderExpression(allocator, ais, tree, body_node, space); - // } else { - // try renderExpression(allocator, ais, tree, decl, .None); - // try renderToken(ais, tree, tree.nextToken(decl.lastToken()), space); - // } - // }, - - // .Use => { - // const use_decl = @fieldParentPtr(ast.Node.Use, "base", decl); - - // if (use_decl.visib_token) |visib_token| { - // try renderToken(ais, tree, visib_token, .Space); // pub - // } - // try renderToken(ais, tree, use_decl.use_token, .Space); // usingnamespace - // try renderExpression(allocator, ais, tree, use_decl.expr, .None); - // try renderToken(ais, tree, use_decl.semicolon_token, space); // ; - // }, - - // .VarDecl => { - // const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", decl); - - // try renderDocComments(tree, ais, var_decl, var_decl.getDocComments()); - // try renderVarDecl(allocator, ais, tree, var_decl); - // }, - - // .TestDecl => { - // const test_decl = @fieldParentPtr(ast.Node.TestDecl, "base", decl); - - // try renderDocComments(tree, ais, test_decl, test_decl.doc_comments); - // try renderToken(ais, tree, test_decl.test_token, .Space); - // if (test_decl.name) |name| - // try renderExpression(allocator, ais, tree, name, .Space); - // try renderExpression(allocator, ais, tree, test_decl.body_node, space); - // }, - - // .ContainerField => { - // const field = @fieldParentPtr(ast.Node.ContainerField, "base", decl); - - // try renderDocComments(tree, ais, field, field.doc_comments); - // if (field.comptime_token) |t| { - // try renderToken(ais, tree, t, .Space); // comptime - // } - - // const src_has_trailing_comma = blk: { - // const maybe_comma = tree.nextToken(field.lastToken()); - // break :blk tree.token_tags[maybe_comma] == .Comma; - // }; - - // // The trailing comma is emitted at the end, but if it's not present - // // we still have to respect the specified `space` parameter - // const last_token_space: Space = if (src_has_trailing_comma) .None else space; - - // if (field.type_expr == null and field.value_expr == null) { - // try renderToken(ais, tree, field.name_token, last_token_space); // name - // } else if (field.type_expr != null and field.value_expr == null) { - // try renderToken(ais, tree, field.name_token, .None); // name - // try renderToken(ais, tree, tree.nextToken(field.name_token), .Space); // : - - // if (field.align_expr) |align_value_expr| { - // try renderExpression(allocator, ais, tree, field.type_expr.?, .Space); // type - // const lparen_token = tree.prevToken(align_value_expr.firstToken()); - // const align_kw = tree.prevToken(lparen_token); - // const rparen_token = tree.nextToken(align_value_expr.lastToken()); - // try renderToken(ais, tree, align_kw, .None); // align - // try renderToken(ais, tree, lparen_token, .None); // ( - // try renderExpression(allocator, ais, tree, align_value_expr, .None); // alignment - // try renderToken(ais, tree, rparen_token, last_token_space); // ) - // } else { - // try renderExpression(allocator, ais, tree, field.type_expr.?, last_token_space); // type - // } - // } else if (field.type_expr == null and field.value_expr != null) { - // try renderToken(ais, tree, field.name_token, .Space); // name - // try renderToken(ais, tree, tree.nextToken(field.name_token), .Space); // = - // try renderExpression(allocator, ais, tree, field.value_expr.?, last_token_space); // value - // } else { - // try renderToken(ais, tree, field.name_token, .None); // name - // try renderToken(ais, tree, tree.nextToken(field.name_token), .Space); // : - - // if (field.align_expr) |align_value_expr| { - // try renderExpression(allocator, ais, tree, field.type_expr.?, .Space); // type - // const lparen_token = tree.prevToken(align_value_expr.firstToken()); - // const align_kw = tree.prevToken(lparen_token); - // const rparen_token = tree.nextToken(align_value_expr.lastToken()); - // try renderToken(ais, tree, align_kw, .None); // align - // try renderToken(ais, tree, lparen_token, .None); // ( - // try renderExpression(allocator, ais, tree, align_value_expr, .None); // alignment - // try renderToken(ais, tree, rparen_token, .Space); // ) - // } else { - // try renderExpression(allocator, ais, tree, field.type_expr.?, .Space); // type - // } - // try renderToken(ais, tree, tree.prevToken(field.value_expr.?.firstToken()), .Space); // = - // try renderExpression(allocator, ais, tree, field.value_expr.?, last_token_space); // value - // } - - // if (src_has_trailing_comma) { - // const comma = tree.nextToken(field.lastToken()); - // try renderToken(ais, tree, comma, space); - // } - // }, - - // .DocComment => { - // const comment = @fieldParentPtr(ast.Node.DocComment, "base", decl); - // const kind = tree.token_tags[comment.first_line]; - // try renderToken(ais, tree, comment.first_line, .Newline); - // var tok_i = comment.first_line + 1; - // while (true) : (tok_i += 1) { - // const tok_id = tree.token_tags[tok_i]; - // if (tok_id == kind) { - // try renderToken(ais, tree, tok_i, .Newline); - // } else if (tok_id == .LineComment) { - // continue; - // } else { - // break; - // } - // } - // }, - // else => unreachable, - //} } fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void { @@ -319,9 +317,9 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, defer_node.defer_token, Space.Space); // if (defer_node.payload) |payload| { - // try renderExpression(allocator, ais, tree, payload, Space.Space); + // try renderExpression(ais, tree, payload, Space.Space); // } - // return renderExpression(allocator, ais, tree, defer_node.expr, space); + // return renderExpression(ais, tree, defer_node.expr, space); //}, .Comptime => { const comptime_token = tree.nodes.items(.main_token)[node]; @@ -337,7 +335,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } else { // try renderToken(ais, tree, nosuspend_node.nosuspend_token, Space.Space); // } - // return renderExpression(allocator, ais, tree, nosuspend_node.expr, space); + // return renderExpression(ais, tree, nosuspend_node.expr, space); //}, //.Suspend => { @@ -345,7 +343,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // if (suspend_node.body) |body| { // try renderToken(ais, tree, suspend_node.suspend_token, Space.Space); - // return renderExpression(allocator, ais, tree, body, space); + // return renderExpression(ais, tree, body, space); // } else { // return renderToken(ais, tree, suspend_node.suspend_token, space); // } @@ -355,7 +353,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // const infix_op_node = @fieldParentPtr(ast.Node.Catch, "base", base); // const op_space = Space.Space; - // try renderExpression(allocator, ais, tree, infix_op_node.lhs, op_space); + // try renderExpression(ais, tree, infix_op_node.lhs, op_space); // const after_op_space = blk: { // const same_line = tree.tokensOnSameLine(infix_op_node.op_token, tree.nextToken(infix_op_node.op_token)); @@ -365,11 +363,11 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, infix_op_node.op_token, after_op_space); // if (infix_op_node.payload) |payload| { - // try renderExpression(allocator, ais, tree, payload, Space.Space); + // try renderExpression(ais, tree, payload, Space.Space); // } // ais.pushIndentOneShot(); - // return renderExpression(allocator, ais, tree, infix_op_node.rhs, space); + // return renderExpression(ais, tree, infix_op_node.rhs, space); //}, //.Add, @@ -421,7 +419,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // .Period, .ErrorUnion, .Range => Space.None, // else => Space.Space, // }; - // try renderExpression(allocator, ais, tree, infix_op_node.lhs, op_space); + // try renderExpression(ais, tree, infix_op_node.lhs, op_space); // const after_op_space = blk: { // const loc = tree.tokenLocation(tree.token_locs[infix_op_node.op_token].end, tree.nextToken(infix_op_node.op_token)); @@ -434,7 +432,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, infix_op_node.op_token, after_op_space); // } // ais.pushIndentOneShot(); - // return renderExpression(allocator, ais, tree, infix_op_node.rhs, space); + // return renderExpression(ais, tree, infix_op_node.rhs, space); //}, //.BitNot, @@ -446,7 +444,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac //=> { // const casted_node = @fieldParentPtr(ast.Node.SimplePrefixOp, "base", base); // try renderToken(ais, tree, casted_node.op_token, Space.None); - // return renderExpression(allocator, ais, tree, casted_node.rhs, space); + // return renderExpression(ais, tree, casted_node.rhs, space); //}, //.Try, @@ -455,7 +453,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac //=> { // const casted_node = @fieldParentPtr(ast.Node.SimplePrefixOp, "base", base); // try renderToken(ais, tree, casted_node.op_token, Space.Space); - // return renderExpression(allocator, ais, tree, casted_node.rhs, space); + // return renderExpression(ais, tree, casted_node.rhs, space); //}, //.ArrayType => { @@ -503,7 +501,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // .LBracket => Space.None, // else => Space.Space, // }; - // try renderExpression(allocator, ais, tree, sentinel, sentinel_space); + // try renderExpression(ais, tree, sentinel, sentinel_space); // } // switch (op_tok_id) { // .Asterisk, .AsteriskAsterisk => {}, @@ -520,16 +518,16 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, align_token, Space.None); // align // try renderToken(ais, tree, lparen_token, Space.None); // ( - // try renderExpression(allocator, ais, tree, align_info.node, Space.None); + // try renderExpression(ais, tree, align_info.node, Space.None); // if (align_info.bit_range) |bit_range| { // const colon1 = tree.prevToken(bit_range.start.firstToken()); // const colon2 = tree.prevToken(bit_range.end.firstToken()); // try renderToken(ais, tree, colon1, Space.None); // : - // try renderExpression(allocator, ais, tree, bit_range.start, Space.None); + // try renderExpression(ais, tree, bit_range.start, Space.None); // try renderToken(ais, tree, colon2, Space.None); // : - // try renderExpression(allocator, ais, tree, bit_range.end, Space.None); + // try renderExpression(ais, tree, bit_range.end, Space.None); // const rparen_token = tree.nextToken(bit_range.end.lastToken()); // try renderToken(ais, tree, rparen_token, Space.Space); // ) @@ -544,7 +542,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // if (ptr_type.ptr_info.volatile_token) |volatile_token| { // try renderToken(ais, tree, volatile_token, Space.Space); // volatile // } - // return renderExpression(allocator, ais, tree, ptr_type.rhs, space); + // return renderExpression(ais, tree, ptr_type.rhs, space); //}, //.SliceType => { @@ -553,7 +551,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // if (slice_type.ptr_info.sentinel) |sentinel| { // const colon_token = tree.prevToken(sentinel.firstToken()); // try renderToken(ais, tree, colon_token, Space.None); // : - // try renderExpression(allocator, ais, tree, sentinel, Space.None); + // try renderExpression(ais, tree, sentinel, Space.None); // try renderToken(ais, tree, tree.nextToken(sentinel.lastToken()), Space.None); // ] // } else { // try renderToken(ais, tree, tree.nextToken(slice_type.op_token), Space.None); // ] @@ -569,16 +567,16 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, align_token, Space.None); // align // try renderToken(ais, tree, lparen_token, Space.None); // ( - // try renderExpression(allocator, ais, tree, align_info.node, Space.None); + // try renderExpression(ais, tree, align_info.node, Space.None); // if (align_info.bit_range) |bit_range| { // const colon1 = tree.prevToken(bit_range.start.firstToken()); // const colon2 = tree.prevToken(bit_range.end.firstToken()); // try renderToken(ais, tree, colon1, Space.None); // : - // try renderExpression(allocator, ais, tree, bit_range.start, Space.None); + // try renderExpression(ais, tree, bit_range.start, Space.None); // try renderToken(ais, tree, colon2, Space.None); // : - // try renderExpression(allocator, ais, tree, bit_range.end, Space.None); + // try renderExpression(ais, tree, bit_range.end, Space.None); // const rparen_token = tree.nextToken(bit_range.end.lastToken()); // try renderToken(ais, tree, rparen_token, Space.Space); // ) @@ -593,7 +591,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // if (slice_type.ptr_info.volatile_token) |volatile_token| { // try renderToken(ais, tree, volatile_token, Space.Space); // } - // return renderExpression(allocator, ais, tree, slice_type.rhs, space); + // return renderExpression(ais, tree, slice_type.rhs, space); //}, //.ArrayInitializer, .ArrayInitializerDot => { @@ -622,7 +620,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // switch (lhs) { // .dot => |dot| try renderToken(ais, tree, dot, Space.None), - // .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), + // .node => |node| try renderExpression(ais, tree, node, Space.None), // } // if (exprs.len == 0) { @@ -634,7 +632,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // const expr = exprs[0]; // try renderToken(ais, tree, lbrace, Space.None); - // try renderExpression(allocator, ais, tree, expr, Space.None); + // try renderExpression(ais, tree, expr, Space.None); // return renderToken(ais, tree, rtoken, space); // } @@ -748,7 +746,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // for (section_exprs) |expr, i| { // if (i + 1 < section_exprs.len) { // const next_expr = section_exprs[i + 1]; - // try renderExpression(allocator, ais, tree, expr, Space.None); + // try renderExpression(ais, tree, expr, Space.None); // const comma = tree.nextToken(expr.*.lastToken()); @@ -775,10 +773,10 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } else { // const maybe_comma = tree.nextToken(expr.*.lastToken()); // if (tree.token_tags[maybe_comma] == .Comma) { - // try renderExpression(allocator, ais, tree, expr, Space.None); // , + // try renderExpression(ais, tree, expr, Space.None); // , // try renderToken(ais, tree, maybe_comma, Space.Newline); // , // } else { - // try renderExpression(allocator, ais, tree, expr, Space.Comma); // , + // try renderExpression(ais, tree, expr, Space.Comma); // , // } // } // } @@ -797,11 +795,11 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // for (exprs) |expr, i| { // if (i + 1 < exprs.len) { // const next_expr = exprs[i + 1]; - // try renderExpression(allocator, ais, tree, expr, Space.None); + // try renderExpression(ais, tree, expr, Space.None); // const comma = tree.nextToken(expr.*.lastToken()); // try renderToken(ais, tree, comma, Space.Space); // , // } else { - // try renderExpression(allocator, ais, tree, expr, Space.Space); + // try renderExpression(ais, tree, expr, Space.Space); // } // } @@ -835,7 +833,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // if (field_inits.len == 0) { // switch (lhs) { // .dot => |dot| try renderToken(ais, tree, dot, Space.None), - // .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), + // .node => |node| try renderExpression(ais, tree, node, Space.None), // } // { @@ -886,10 +884,10 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // switch (lhs) { // .dot => |dot| try renderToken(ais, tree, dot, Space.None), - // .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), + // .node => |node| try renderExpression(ais, tree, node, Space.None), // } // try renderToken(ais, tree, lbrace, Space.Space); - // try renderExpression(allocator, ais, tree, field_inits[0], Space.Space); + // try renderExpression(ais, tree, field_inits[0], Space.Space); // return renderToken(ais, tree, rtoken, space); // } @@ -897,18 +895,18 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // // render all on one line, no trailing comma // switch (lhs) { // .dot => |dot| try renderToken(ais, tree, dot, Space.None), - // .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), + // .node => |node| try renderExpression(ais, tree, node, Space.None), // } // try renderToken(ais, tree, lbrace, Space.Space); // for (field_inits) |field_init, i| { // if (i + 1 < field_inits.len) { - // try renderExpression(allocator, ais, tree, field_init, Space.None); + // try renderExpression(ais, tree, field_init, Space.None); // const comma = tree.nextToken(field_init.lastToken()); // try renderToken(ais, tree, comma, Space.Space); // } else { - // try renderExpression(allocator, ais, tree, field_init, Space.Space); + // try renderExpression(ais, tree, field_init, Space.Space); // } // } @@ -918,7 +916,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // { // switch (lhs) { // .dot => |dot| try renderToken(ais, tree, dot, Space.None), - // .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), + // .node => |node| try renderExpression(ais, tree, node, Space.None), // } // ais.pushIndentNextLine(); @@ -929,14 +927,14 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // for (field_inits) |field_init, i| { // if (i + 1 < field_inits.len) { // const next_field_init = field_inits[i + 1]; - // try renderExpression(allocator, ais, tree, field_init, Space.None); + // try renderExpression(ais, tree, field_init, Space.None); // const comma = tree.nextToken(field_init.lastToken()); // try renderToken(ais, tree, comma, Space.Newline); // try renderExtraNewline(tree, ais, next_field_init); // } else { - // try renderExpression(allocator, ais, tree, field_init, Space.Comma); + // try renderExpression(ais, tree, field_init, Space.Comma); // } // } // } @@ -950,7 +948,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, async_token, Space.Space); // } - // try renderExpression(allocator, ais, tree, call.lhs, Space.None); + // try renderExpression(ais, tree, call.lhs, Space.None); // const lparen = tree.nextToken(call.lhs.lastToken()); @@ -974,7 +972,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // for (params) |param_node, i| { // if (i + 1 < params.len) { // const next_node = params[i + 1]; - // try renderExpression(allocator, ais, tree, param_node, Space.None); + // try renderExpression(ais, tree, param_node, Space.None); // // Unindent the comma for multiline string literals // const maybe_multiline_string = param_node.firstToken(); @@ -986,7 +984,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, comma, Space.Newline); // , // try renderExtraNewline(tree, ais, next_node); // } else { - // try renderExpression(allocator, ais, tree, param_node, Space.Comma); + // try renderExpression(ais, tree, param_node, Space.Comma); // } // } // } @@ -1003,7 +1001,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // ais.pushIndentOneShot(); // } - // try renderExpression(allocator, ais, tree, param_node, Space.None); + // try renderExpression(ais, tree, param_node, Space.None); // if (i + 1 < params.len) { // const comma = tree.nextToken(param_node.lastToken()); @@ -1019,7 +1017,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // const lbracket = tree.nextToken(suffix_op.lhs.lastToken()); // const rbracket = tree.nextToken(suffix_op.index_expr.lastToken()); - // try renderExpression(allocator, ais, tree, suffix_op.lhs, Space.None); + // try renderExpression(ais, tree, suffix_op.lhs, Space.None); // try renderToken(ais, tree, lbracket, Space.None); // [ // const starts_with_comment = tree.token_tags[lbracket + 1] == .LineComment; @@ -1029,7 +1027,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // ais.pushIndent(); // defer ais.popIndent(); - // try renderExpression(allocator, ais, tree, suffix_op.index_expr, new_space); + // try renderExpression(ais, tree, suffix_op.index_expr, new_space); // } // if (starts_with_comment) try ais.maybeInsertNewline(); // return renderToken(ais, tree, rbracket, space); // ] @@ -1037,7 +1035,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac //.Slice => { // const suffix_op = base.castTag(.Slice).?; - // try renderExpression(allocator, ais, tree, suffix_op.lhs, Space.None); + // try renderExpression(ais, tree, suffix_op.lhs, Space.None); // const lbracket = tree.prevToken(suffix_op.start.firstToken()); // const dotdot = tree.nextToken(suffix_op.start.lastToken()); @@ -1048,16 +1046,16 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // const after_op_space = if (suffix_op.end != null) after_start_space else Space.None; // try renderToken(ais, tree, lbracket, Space.None); // [ - // try renderExpression(allocator, ais, tree, suffix_op.start, after_start_space); + // try renderExpression(ais, tree, suffix_op.start, after_start_space); // try renderToken(ais, tree, dotdot, after_op_space); // .. // if (suffix_op.end) |end| { // const after_end_space = if (suffix_op.sentinel != null) Space.Space else Space.None; - // try renderExpression(allocator, ais, tree, end, after_end_space); + // try renderExpression(ais, tree, end, after_end_space); // } // if (suffix_op.sentinel) |sentinel| { // const colon = tree.prevToken(sentinel.firstToken()); // try renderToken(ais, tree, colon, Space.None); // : - // try renderExpression(allocator, ais, tree, sentinel, Space.None); + // try renderExpression(ais, tree, sentinel, Space.None); // } // return renderToken(ais, tree, suffix_op.rtoken, space); // ] //}, @@ -1065,13 +1063,13 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac //.Deref => { // const suffix_op = base.castTag(.Deref).?; - // try renderExpression(allocator, ais, tree, suffix_op.lhs, Space.None); + // try renderExpression(ais, tree, suffix_op.lhs, Space.None); // return renderToken(ais, tree, suffix_op.rtoken, space); // .* //}, //.UnwrapOptional => { // const suffix_op = base.castTag(.UnwrapOptional).?; - // try renderExpression(allocator, ais, tree, suffix_op.lhs, Space.None); + // try renderExpression(ais, tree, suffix_op.lhs, Space.None); // try renderToken(ais, tree, tree.prevToken(suffix_op.rtoken), Space.None); // . // return renderToken(ais, tree, suffix_op.rtoken, space); // ? //}, @@ -1095,7 +1093,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } // try renderToken(ais, tree, label, Space.Space); // label // } - // return renderExpression(allocator, ais, tree, maybe_rhs.?, space); + // return renderExpression(ais, tree, maybe_rhs.?, space); //}, //.Continue => { @@ -1114,7 +1112,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // const flow_expr = base.castTag(.Return).?; // if (flow_expr.getRHS()) |rhs| { // try renderToken(ais, tree, flow_expr.ltoken, Space.Space); - // return renderExpression(allocator, ais, tree, rhs, space); + // return renderExpression(ais, tree, rhs, space); // } else { // return renderToken(ais, tree, flow_expr.ltoken, space); // } @@ -1124,7 +1122,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // const payload = @fieldParentPtr(ast.Node.Payload, "base", base); // try renderToken(ais, tree, payload.lpipe, Space.None); - // try renderExpression(allocator, ais, tree, payload.error_symbol, Space.None); + // try renderExpression(ais, tree, payload.error_symbol, Space.None); // return renderToken(ais, tree, payload.rpipe, space); //}, @@ -1135,7 +1133,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // if (payload.ptr_token) |ptr_token| { // try renderToken(ais, tree, ptr_token, Space.None); // } - // try renderExpression(allocator, ais, tree, payload.value_symbol, Space.None); + // try renderExpression(ais, tree, payload.value_symbol, Space.None); // return renderToken(ais, tree, payload.rpipe, space); //}, @@ -1146,13 +1144,13 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // if (payload.ptr_token) |ptr_token| { // try renderToken(ais, tree, ptr_token, Space.None); // } - // try renderExpression(allocator, ais, tree, payload.value_symbol, Space.None); + // try renderExpression(ais, tree, payload.value_symbol, Space.None); // if (payload.index_symbol) |index_symbol| { // const comma = tree.nextToken(payload.value_symbol.lastToken()); // try renderToken(ais, tree, comma, Space.Space); - // try renderExpression(allocator, ais, tree, index_symbol, Space.None); + // try renderExpression(ais, tree, index_symbol, Space.None); // } // return renderToken(ais, tree, payload.rpipe, space); @@ -1164,7 +1162,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, grouped_expr.lparen, Space.None); // { // ais.pushIndentOneShot(); - // try renderExpression(allocator, ais, tree, grouped_expr.expr, Space.None); + // try renderExpression(ais, tree, grouped_expr.expr, Space.None); // } // return renderToken(ais, tree, grouped_expr.rparen, space); //}, @@ -1175,7 +1173,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, field_init.period_token, Space.None); // . // try renderToken(ais, tree, field_init.name_token, Space.Space); // name // try renderToken(ais, tree, tree.nextToken(field_init.name_token), Space.Space); // = - // return renderExpression(allocator, ais, tree, field_init.expr, space); + // return renderExpression(ais, tree, field_init.expr, space); //}, //.ContainerDecl => { @@ -1200,7 +1198,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // if (enum_tag_type) |expr| { // try renderToken(ais, tree, tree.nextToken(enum_token), Space.None); // ( - // try renderExpression(allocator, ais, tree, expr, Space.None); + // try renderExpression(ais, tree, expr, Space.None); // const rparen = tree.nextToken(expr.lastToken()); // try renderToken(ais, tree, rparen, Space.None); // ) @@ -1216,7 +1214,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // const rparen = tree.nextToken(type_expr.lastToken()); // try renderToken(ais, tree, lparen, Space.None); // ( - // try renderExpression(allocator, ais, tree, type_expr, Space.None); + // try renderExpression(ais, tree, type_expr, Space.None); // try renderToken(ais, tree, rparen, Space.Space); // ) // }, // } @@ -1317,7 +1315,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, err_set_decl.error_token, Space.None); // error // try renderToken(ais, tree, lbrace, Space.None); // { - // try renderExpression(allocator, ais, tree, node, Space.None); + // try renderExpression(ais, tree, node, Space.None); // return renderToken(ais, tree, err_set_decl.rbrace_token, space); // } // } @@ -1337,12 +1335,12 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // const decls = err_set_decl.decls(); // for (decls) |node, i| { // if (i + 1 < decls.len) { - // try renderExpression(allocator, ais, tree, node, Space.None); + // try renderExpression(ais, tree, node, Space.None); // try renderToken(ais, tree, tree.nextToken(node.lastToken()), Space.Newline); // , // try renderExtraNewline(tree, ais, decls[i + 1]); // } else { - // try renderExpression(allocator, ais, tree, node, Space.Comma); + // try renderExpression(ais, tree, node, Space.Comma); // } // } // } @@ -1354,14 +1352,14 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // const decls = err_set_decl.decls(); // for (decls) |node, i| { // if (i + 1 < decls.len) { - // try renderExpression(allocator, ais, tree, node, Space.None); + // try renderExpression(ais, tree, node, Space.None); // const comma_token = tree.nextToken(node.lastToken()); // assert(tree.token_tags[comma_token] == .Comma); // try renderToken(ais, tree, comma_token, Space.Space); // , // try renderExtraNewline(tree, ais, decls[i + 1]); // } else { - // try renderExpression(allocator, ais, tree, node, Space.Space); + // try renderExpression(ais, tree, node, Space.Space); // } // } @@ -1372,7 +1370,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac //.ErrorTag => { // const tag = @fieldParentPtr(ast.Node.ErrorTag, "base", base); - // try renderDocComments(tree, ais, tag, tag.doc_comments); + // try renderDocComments(ais, tree, tag, tag.doc_comments); // return renderToken(ais, tree, tag.name_token, space); // name //}, @@ -1431,7 +1429,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // if (param_node.*.tag == .MultilineStringLiteral or tree.token_tags[maybe_comment] == .LineComment) { // ais.pushIndentOneShot(); // } - // try renderExpression(allocator, ais, tree, param_node, Space.None); + // try renderExpression(ais, tree, param_node, Space.None); // if (i + 1 < params.len) { // const comma_token = tree.nextToken(param_node.lastToken()); @@ -1445,7 +1443,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, lparen, Space.Newline); // ( // for (builtin_call.params()) |param_node| { - // try renderExpression(allocator, ais, tree, param_node, Space.Comma); + // try renderExpression(ais, tree, param_node, Space.Comma); // } // } @@ -1468,7 +1466,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } // if (fn_proto.getLibName()) |lib_name| { - // try renderExpression(allocator, ais, tree, lib_name, Space.Space); + // try renderExpression(ais, tree, lib_name, Space.Space); // } // const lparen = if (fn_proto.getNameToken()) |name_token| blk: { @@ -1541,7 +1539,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, align_kw, Space.None); // align // try renderToken(ais, tree, align_lparen, Space.None); // ( - // try renderExpression(allocator, ais, tree, align_expr, Space.None); + // try renderExpression(ais, tree, align_expr, Space.None); // try renderToken(ais, tree, align_rparen, Space.Space); // ) // } @@ -1552,7 +1550,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, section_kw, Space.None); // section // try renderToken(ais, tree, section_lparen, Space.None); // ( - // try renderExpression(allocator, ais, tree, section_expr, Space.None); + // try renderExpression(ais, tree, section_expr, Space.None); // try renderToken(ais, tree, section_rparen, Space.Space); // ) // } @@ -1563,7 +1561,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, callconv_kw, Space.None); // callconv // try renderToken(ais, tree, callconv_lparen, Space.None); // ( - // try renderExpression(allocator, ais, tree, callconv_expr, Space.None); + // try renderExpression(ais, tree, callconv_expr, Space.None); // try renderToken(ais, tree, callconv_rparen, Space.Space); // ) // } else if (fn_proto.getIsExternPrototype() != null) { // try ais.writer().writeAll("callconv(.C) "); @@ -1573,11 +1571,11 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // switch (fn_proto.return_type) { // .Explicit => |node| { - // return renderExpression(allocator, ais, tree, node, space); + // return renderExpression(ais, tree, node, space); // }, // .InferErrorSet => |node| { // try renderToken(ais, tree, tree.prevToken(node.firstToken()), Space.None); // ! - // return renderExpression(allocator, ais, tree, node, space); + // return renderExpression(ais, tree, node, space); // }, // .Invalid => unreachable, // } @@ -1589,7 +1587,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // if (anyframe_type.result) |result| { // try renderToken(ais, tree, anyframe_type.anyframe_token, Space.None); // anyframe // try renderToken(ais, tree, result.arrow_token, Space.None); // -> - // return renderExpression(allocator, ais, tree, result.return_type, space); + // return renderExpression(ais, tree, result.return_type, space); // } else { // return renderToken(ais, tree, anyframe_type.anyframe_token, space); // anyframe // } @@ -1607,13 +1605,13 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // const lbrace = tree.nextToken(rparen); // if (switch_node.cases_len == 0) { - // try renderExpression(allocator, ais, tree, switch_node.expr, Space.None); + // try renderExpression(ais, tree, switch_node.expr, Space.None); // try renderToken(ais, tree, rparen, Space.Space); // ) // try renderToken(ais, tree, lbrace, Space.None); // { // return renderToken(ais, tree, switch_node.rbrace, space); // } // } - // try renderExpression(allocator, ais, tree, switch_node.expr, Space.None); + // try renderExpression(ais, tree, switch_node.expr, Space.None); // try renderToken(ais, tree, rparen, Space.Space); // ) // { @@ -1623,7 +1621,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // const cases = switch_node.cases(); // for (cases) |node, i| { - // try renderExpression(allocator, ais, tree, node, Space.Comma); + // try renderExpression(ais, tree, node, Space.Comma); // if (i + 1 < cases.len) { // try renderExtraNewline(tree, ais, cases[i + 1]); @@ -1648,26 +1646,26 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // const items = switch_case.items(); // for (items) |node, i| { // if (i + 1 < items.len) { - // try renderExpression(allocator, ais, tree, node, Space.None); + // try renderExpression(ais, tree, node, Space.None); // const comma_token = tree.nextToken(node.lastToken()); // try renderToken(ais, tree, comma_token, Space.Space); // , // try renderExtraNewline(tree, ais, items[i + 1]); // } else { - // try renderExpression(allocator, ais, tree, node, Space.Space); + // try renderExpression(ais, tree, node, Space.Space); // } // } // } else { // const items = switch_case.items(); // for (items) |node, i| { // if (i + 1 < items.len) { - // try renderExpression(allocator, ais, tree, node, Space.None); + // try renderExpression(ais, tree, node, Space.None); // const comma_token = tree.nextToken(node.lastToken()); // try renderToken(ais, tree, comma_token, Space.Newline); // , // try renderExtraNewline(tree, ais, items[i + 1]); // } else { - // try renderExpression(allocator, ais, tree, node, Space.Comma); + // try renderExpression(ais, tree, node, Space.Comma); // } // } // } @@ -1675,10 +1673,10 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, switch_case.arrow_token, Space.Space); // => // if (switch_case.payload) |payload| { - // try renderExpression(allocator, ais, tree, payload, Space.Space); + // try renderExpression(ais, tree, payload, Space.Space); // } - // return renderExpression(allocator, ais, tree, switch_case.expr, space); + // return renderExpression(ais, tree, switch_case.expr, space); //}, //.SwitchElse => { // const switch_else = @fieldParentPtr(ast.Node.SwitchElse, "base", base); @@ -1695,15 +1693,15 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // if (else_node.payload) |payload| { // const payload_space = if (same_line) Space.Space else Space.Newline; - // try renderExpression(allocator, ais, tree, payload, payload_space); + // try renderExpression(ais, tree, payload, payload_space); // } // if (same_line) { - // return renderExpression(allocator, ais, tree, else_node.body, space); + // return renderExpression(ais, tree, else_node.body, space); // } else { // ais.pushIndent(); // defer ais.popIndent(); - // return renderExpression(allocator, ais, tree, else_node.body, space); + // return renderExpression(ais, tree, else_node.body, space); // } //}, @@ -1721,7 +1719,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, while_node.while_token, Space.Space); // while // try renderToken(ais, tree, tree.nextToken(while_node.while_token), Space.None); // ( - // try renderExpression(allocator, ais, tree, while_node.condition, Space.None); + // try renderExpression(ais, tree, while_node.condition, Space.None); // const cond_rparen = tree.nextToken(while_node.condition.lastToken()); @@ -1748,7 +1746,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // if (while_node.payload) |payload| { // const payload_space = if (while_node.continue_expr != null) Space.Space else block_start_space; - // try renderExpression(allocator, ais, tree, payload, payload_space); + // try renderExpression(ais, tree, payload, payload_space); // } // if (while_node.continue_expr) |continue_expr| { @@ -1759,7 +1757,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, colon, Space.Space); // : // try renderToken(ais, tree, lparen, Space.None); // ( - // try renderExpression(allocator, ais, tree, continue_expr, Space.None); + // try renderExpression(ais, tree, continue_expr, Space.None); // try renderToken(ais, tree, rparen, block_start_space); // ) // } @@ -1767,11 +1765,11 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // { // if (!body_is_block) ais.pushIndent(); // defer if (!body_is_block) ais.popIndent(); - // try renderExpression(allocator, ais, tree, while_node.body, after_body_space); + // try renderExpression(ais, tree, while_node.body, after_body_space); // } // if (while_node.@"else") |@"else"| { - // return renderExpression(allocator, ais, tree, &@"else".base, space); + // return renderExpression(ais, tree, &@"else".base, space); // } //}, @@ -1789,7 +1787,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, for_node.for_token, Space.Space); // for // try renderToken(ais, tree, tree.nextToken(for_node.for_token), Space.None); // ( - // try renderExpression(allocator, ais, tree, for_node.array_expr, Space.None); + // try renderExpression(ais, tree, for_node.array_expr, Space.None); // const rparen = tree.nextToken(for_node.array_expr.lastToken()); @@ -1800,7 +1798,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, rparen, Space.Space); // ) // const space_after_payload = if (body_on_same_line) Space.Space else Space.Newline; - // try renderExpression(allocator, ais, tree, for_node.payload, space_after_payload); // |x| + // try renderExpression(ais, tree, for_node.payload, space_after_payload); // |x| // const space_after_body = blk: { // if (for_node.@"else") |@"else"| { @@ -1818,11 +1816,11 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // { // if (!body_on_same_line) ais.pushIndent(); // defer if (!body_on_same_line) ais.popIndent(); - // try renderExpression(allocator, ais, tree, for_node.body, space_after_body); // { body } + // try renderExpression(ais, tree, for_node.body, space_after_body); // { body } // } // if (for_node.@"else") |@"else"| { - // return renderExpression(allocator, ais, tree, &@"else".base, space); // else + // return renderExpression(ais, tree, &@"else".base, space); // else // } //}, @@ -1835,7 +1833,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, if_node.if_token, Space.Space); // if // try renderToken(ais, tree, lparen, Space.None); // ( - // try renderExpression(allocator, ais, tree, if_node.condition, Space.None); // condition + // try renderExpression(ais, tree, if_node.condition, Space.None); // condition // const body_is_if_block = if_node.body.tag == .If; // const body_is_block = nodeIsBlock(if_node.body); @@ -1847,14 +1845,14 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, rparen, after_rparen_space); // ) // if (if_node.payload) |payload| { - // try renderExpression(allocator, ais, tree, payload, Space.BlockStart); // |x| + // try renderExpression(ais, tree, payload, Space.BlockStart); // |x| // } // if (if_node.@"else") |@"else"| { - // try renderExpression(allocator, ais, tree, if_node.body, Space.SpaceOrOutdent); - // return renderExpression(allocator, ais, tree, &@"else".base, space); + // try renderExpression(ais, tree, if_node.body, Space.SpaceOrOutdent); + // return renderExpression(ais, tree, &@"else".base, space); // } else { - // return renderExpression(allocator, ais, tree, if_node.body, space); + // return renderExpression(ais, tree, if_node.body, space); // } // } @@ -1870,7 +1868,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } // if (if_node.payload) |payload| { - // try renderExpression(allocator, ais, tree, payload, Space.Newline); + // try renderExpression(ais, tree, payload, Space.Newline); // } // if (if_node.@"else") |@"else"| { @@ -1879,33 +1877,33 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // { // ais.pushIndent(); // defer ais.popIndent(); - // try renderExpression(allocator, ais, tree, if_node.body, Space.Newline); + // try renderExpression(ais, tree, if_node.body, Space.Newline); // } // if (else_is_block) { // try renderToken(ais, tree, @"else".else_token, Space.Space); // else // if (@"else".payload) |payload| { - // try renderExpression(allocator, ais, tree, payload, Space.Space); + // try renderExpression(ais, tree, payload, Space.Space); // } - // return renderExpression(allocator, ais, tree, @"else".body, space); + // return renderExpression(ais, tree, @"else".body, space); // } else { // const after_else_space = if (@"else".payload == null) Space.Newline else Space.Space; // try renderToken(ais, tree, @"else".else_token, after_else_space); // else // if (@"else".payload) |payload| { - // try renderExpression(allocator, ais, tree, payload, Space.Newline); + // try renderExpression(ais, tree, payload, Space.Newline); // } // ais.pushIndent(); // defer ais.popIndent(); - // return renderExpression(allocator, ais, tree, @"else".body, space); + // return renderExpression(ais, tree, @"else".body, space); // } // } else { // ais.pushIndent(); // defer ais.popIndent(); - // return renderExpression(allocator, ais, tree, if_node.body, space); + // return renderExpression(ais, tree, if_node.body, space); // } // } @@ -1914,20 +1912,20 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderToken(ais, tree, rparen, Space.Space); // ) // if (if_node.payload) |payload| { - // try renderExpression(allocator, ais, tree, payload, Space.Space); + // try renderExpression(ais, tree, payload, Space.Space); // } // if (if_node.@"else") |@"else"| { - // try renderExpression(allocator, ais, tree, if_node.body, Space.Space); + // try renderExpression(ais, tree, if_node.body, Space.Space); // try renderToken(ais, tree, @"else".else_token, Space.Space); // if (@"else".payload) |payload| { - // try renderExpression(allocator, ais, tree, payload, Space.Space); + // try renderExpression(ais, tree, payload, Space.Space); // } - // return renderExpression(allocator, ais, tree, @"else".body, space); + // return renderExpression(ais, tree, @"else".body, space); // } else { - // return renderExpression(allocator, ais, tree, if_node.body, space); + // return renderExpression(ais, tree, if_node.body, space); // } //}, @@ -1948,11 +1946,11 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // defer ais.popIndent(); // if (asm_node.outputs.len == 0 and asm_node.inputs.len == 0 and asm_node.clobbers.len == 0) { - // try renderExpression(allocator, ais, tree, asm_node.template, Space.None); + // try renderExpression(ais, tree, asm_node.template, Space.None); // break :asmblk; // } - // try renderExpression(allocator, ais, tree, asm_node.template, Space.Newline); + // try renderExpression(ais, tree, asm_node.template, Space.Newline); // ais.setIndentDelta(asm_indent_delta); // defer ais.setIndentDelta(indent_delta); @@ -2027,10 +2025,10 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // defer ais.popIndent(); // for (asm_node.clobbers) |clobber_node, i| { // if (i + 1 >= asm_node.clobbers.len) { - // try renderExpression(allocator, ais, tree, clobber_node, Space.Newline); + // try renderExpression(ais, tree, clobber_node, Space.Newline); // break :asmblk; // } else { - // try renderExpression(allocator, ais, tree, clobber_node, Space.None); + // try renderExpression(ais, tree, clobber_node, Space.None); // const comma = tree.nextToken(clobber_node.lastToken()); // try renderToken(ais, tree, comma, Space.Space); // , // } @@ -2081,7 +2079,7 @@ fn renderArrayType( defer if (do_indent) ais.popIndent(); try renderToken(ais, tree, lbracket, Space.None); // [ - try renderExpression(allocator, ais, tree, len_expr, new_space); + try renderExpression(ais, tree, len_expr, new_space); if (starts_with_comment) { try ais.maybeInsertNewline(); @@ -2089,7 +2087,7 @@ fn renderArrayType( if (opt_sentinel) |sentinel| { const colon_token = tree.prevToken(sentinel.firstToken()); try renderToken(ais, tree, colon_token, Space.None); // : - try renderExpression(allocator, ais, tree, sentinel, Space.None); + try renderExpression(ais, tree, sentinel, Space.None); } if (starts_with_comment) { try ais.maybeInsertNewline(); @@ -2097,7 +2095,7 @@ fn renderArrayType( } try renderToken(ais, tree, rbracket, Space.None); // ] - return renderExpression(allocator, ais, tree, rhs, space); + return renderExpression(ais, tree, rhs, space); } fn renderAsmOutput( @@ -2108,18 +2106,18 @@ fn renderAsmOutput( space: Space, ) Error!void { try ais.writer().writeAll("["); - try renderExpression(allocator, ais, tree, asm_output.symbolic_name, Space.None); + try renderExpression(ais, tree, asm_output.symbolic_name, Space.None); try ais.writer().writeAll("] "); - try renderExpression(allocator, ais, tree, asm_output.constraint, Space.None); + try renderExpression(ais, tree, asm_output.constraint, Space.None); try ais.writer().writeAll(" ("); switch (asm_output.kind) { .Variable => |variable_name| { - try renderExpression(allocator, ais, tree, &variable_name.base, Space.None); + try renderExpression(ais, tree, &variable_name.base, Space.None); }, .Return => |return_type| { try ais.writer().writeAll("-> "); - try renderExpression(allocator, ais, tree, return_type, Space.None); + try renderExpression(ais, tree, return_type, Space.None); }, } @@ -2134,11 +2132,11 @@ fn renderAsmInput( space: Space, ) Error!void { try ais.writer().writeAll("["); - try renderExpression(allocator, ais, tree, asm_input.symbolic_name, Space.None); + try renderExpression(ais, tree, asm_input.symbolic_name, Space.None); try ais.writer().writeAll("] "); - try renderExpression(allocator, ais, tree, asm_input.constraint, Space.None); + try renderExpression(ais, tree, asm_input.constraint, Space.None); try ais.writer().writeAll(" ("); - try renderExpression(allocator, ais, tree, asm_input.expr, Space.None); + try renderExpression(ais, tree, asm_input.expr, Space.None); return renderToken(ais, tree, asm_input.lastToken(), space); // ) } @@ -2156,7 +2154,7 @@ fn renderVarDecl( try renderToken(ais, tree, extern_export_token, Space.Space); // extern if (var_decl.getLibName()) |lib_name| { - try renderExpression(allocator, ais, tree, lib_name, Space.Space); // "lib" + try renderExpression(ais, tree, lib_name, Space.Space); // "lib" } } @@ -2183,7 +2181,7 @@ fn renderVarDecl( const s = if (var_decl.getAlignNode() != null or var_decl.getSectionNode() != null or var_decl.getInitNode() != null) Space.Space else Space.None; - try renderExpression(allocator, ais, tree, type_node, s); + try renderExpression(ais, tree, type_node, s); } if (var_decl.getAlignNode()) |align_node| { @@ -2192,7 +2190,7 @@ fn renderVarDecl( const rparen = tree.nextToken(align_node.lastToken()); try renderToken(ais, tree, align_kw, Space.None); // align try renderToken(ais, tree, lparen, Space.None); // ( - try renderExpression(allocator, ais, tree, align_node, Space.None); + try renderExpression(ais, tree, align_node, Space.None); const s = if (var_decl.getSectionNode() != null or var_decl.getInitNode() != null) Space.Space else Space.None; try renderToken(ais, tree, rparen, s); // ) } @@ -2203,7 +2201,7 @@ fn renderVarDecl( const rparen = tree.nextToken(section_node.lastToken()); try renderToken(ais, tree, section_kw, Space.None); // linksection try renderToken(ais, tree, lparen, Space.None); // ( - try renderExpression(allocator, ais, tree, section_node, Space.None); + try renderExpression(ais, tree, section_node, Space.None); const s = if (var_decl.getInitNode() != null) Space.Space else Space.None; try renderToken(ais, tree, rparen, s); // ) } @@ -2221,7 +2219,7 @@ fn renderVarDecl( try renderToken(ais, tree, eq_token, eq_space); // = } ais.pushIndentOneShot(); - try renderExpression(allocator, ais, tree, init_node, Space.None); + try renderExpression(ais, tree, init_node, Space.None); } try renderToken(ais, tree, var_decl.semicolon_token, Space.Newline); @@ -2234,7 +2232,7 @@ fn renderParamDecl( param_decl: ast.Node.FnProto.ParamDecl, space: Space, ) Error!void { - try renderDocComments(tree, ais, param_decl, param_decl.doc_comments); + try renderDocComments(ais, tree, param_decl, param_decl.doc_comments); if (param_decl.comptime_token) |comptime_token| { try renderToken(ais, tree, comptime_token, Space.Space); @@ -2247,7 +2245,7 @@ fn renderParamDecl( try renderToken(ais, tree, tree.nextToken(name_token), Space.Space); // : } switch (param_decl.param_type) { - .any_type, .type_expr => |node| try renderExpression(allocator, ais, tree, node, space), + .any_type, .type_expr => |node| try renderExpression(ais, tree, node, space), } } @@ -2260,13 +2258,13 @@ fn renderStatement(ais: *Ais, tree: ast.Tree, base: ast.Node.Index) Error!void { // }, // else => { // if (base.requireSemiColon()) { - // try renderExpression(allocator, ais, tree, base, Space.None); + // try renderExpression(ais, tree, base, Space.None); // const semicolon_index = tree.nextToken(base.lastToken()); // assert(tree.token_tags[semicolon_index] == .Semicolon); // try renderToken(ais, tree, semicolon_index, Space.Newline); // } else { - // try renderExpression(allocator, ais, tree, base, Space.Newline); + // try renderExpression(ais, tree, base, Space.Newline); // } // }, //} @@ -2338,34 +2336,32 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp } } -fn renderDocComments( - tree: ast.Tree, - ais: *Ais, - node: anytype, - doc_comments: ?ast.Node.Index.DocComment, -) Error!void { - const comment = doc_comments orelse return; - return renderDocCommentsToken(tree, ais, comment, node.firstToken()); -} +/// end_token is the token one past the last doc comment token. This function +/// searches backwards from there. +fn renderDocComments(ais: *Ais, tree: ast.Tree, end_token: ast.TokenIndex) Error!void { + // Search backwards for the first doc comment. + const token_tags = tree.tokens.items(.tag); + if (end_token == 0) return; + var tok = end_token - 1; + while (token_tags[tok] == .DocComment) { + if (tok == 0) break; + tok -= 1; + } else { + tok += 1; + } + const first_tok = tok; + if (tok == end_token) return; -fn renderDocCommentsToken( - tree: ast.Tree, - ais: *Ais, - comment: ast.Node.Index.DocComment, - first_token: ast.TokenIndex, -) Error!void { - var tok_i = comment.first_line; - while (true) : (tok_i += 1) { - switch (tree.token_tags[tok_i]) { - .DocComment, .ContainerDocComment => { - if (comment.first_line < first_token) { - try renderToken(ais, tree, tok_i, Space.Newline); + while (true) : (tok += 1) { + switch (token_tags[tok]) { + .DocComment => { + if (first_tok < end_token) { + try renderToken(ais, tree, tok, .Newline); } else { - try renderToken(ais, tree, tok_i, Space.NoComment); + try renderToken(ais, tree, tok, .NoComment); try ais.insertNewline(); } }, - .LineComment => continue, else => break, } } From 1a83b29bea486050544acc7935c0c438da2c2654 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 2 Feb 2021 21:05:53 -0700 Subject: [PATCH 008/173] zig fmt: implement if, call, field access, assignment --- lib/std/zig/ast.zig | 377 ++++++++++++++---- lib/std/zig/parse.zig | 228 +++++------ lib/std/zig/parser_test.zig | 20 +- lib/std/zig/render.zig | 760 ++++++++++++++++++------------------ 4 files changed, 801 insertions(+), 584 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 3fd34cd03c..7040028cf2 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -16,11 +16,7 @@ pub const TokenList = std.MultiArrayList(struct { tag: Token.Tag, start: ByteOffset, }); -pub const NodeList = std.MultiArrayList(struct { - tag: Node.Tag, - main_token: TokenIndex, - data: Node.Data, -}); +pub const NodeList = std.MultiArrayList(Node); pub const Tree = struct { /// Reference to externally-owned data. @@ -76,6 +72,16 @@ pub const Tree = struct { return loc; } + pub fn extraData(tree: Tree, index: usize, comptime T: type) T { + const fields = std.meta.fields(T); + var result: T = undefined; + inline for (fields) |field, i| { + comptime assert(field.field_type == Node.Index); + @field(result, field.name) = tree.extra_data[index + i]; + } + return result; + } + pub fn renderError(tree: Tree, parse_error: Error, stream: anytype) !void { const tokens = tree.tokens.items(.tag); switch (parse_error) { @@ -189,7 +195,8 @@ pub const Tree = struct { const tags = tree.nodes.items(.tag); const datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - switch (tags[node]) { + var n = node; + while (true) switch (tags[n]) { .Root => return 0, .UsingNamespace, @@ -210,20 +217,25 @@ pub const Tree = struct { .StructInitDot, .Switch, .IfSimple, - .IfSimpleOptional, .If, - .IfOptional, - .IfError, .Suspend, .Resume, .Continue, .Break, .Return, .AnyFrameType, - .OneToken, .Identifier, + .AnyFrameLiteral, + .CharLiteral, + .IntegerLiteral, + .FloatLiteral, + .FalseLiteral, + .TrueLiteral, + .NullLiteral, + .UndefinedLiteral, + .UnreachableLiteral, .EnumLiteral, - .MultilineStringLiteral, + .StringLiteral, .GroupedExpression, .BuiltinCallTwo, .BuiltinCall, @@ -234,7 +246,7 @@ pub const Tree = struct { .Block, .AsmSimple, .Asm, - => return main_tokens[node], + => return main_tokens[n], .Catch, .FieldAccess, @@ -290,7 +302,7 @@ pub const Tree = struct { .SwitchCaseOne, .SwitchRange, .FnDecl, - => return tree.firstToken(datas[node].lhs), + => n = datas[n].lhs, .GlobalVarDecl, .LocalVarDecl, @@ -305,12 +317,8 @@ pub const Tree = struct { .StructInit, .SwitchCaseMulti, .WhileSimple, - .WhileSimpleOptional, .WhileCont, - .WhileContOptional, .While, - .WhileOptional, - .WhileError, .ForSimple, .For, .FnProtoSimple, @@ -329,19 +337,19 @@ pub const Tree = struct { .ErrorValue, .ErrorUnion, => @panic("TODO finish implementing firstToken"), - } + }; } pub fn lastToken(tree: Tree, node: Node.Index) TokenIndex { const tags = tree.nodes.items(.tag); const datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - switch (tags[node]) { - .Root, + var n = node; + var end_offset: TokenIndex = 0; + while (true) switch (tags[n]) { + .Root => return @intCast(TokenIndex, tree.tokens.len - 1), + .UsingNamespace, - .TestDecl, - .ErrDefer, - .Defer, .BoolNot, .Negation, .BitNot, @@ -350,39 +358,16 @@ pub const Tree = struct { .Try, .Await, .OptionalType, - .ArrayInitDotTwo, - .ArrayInitDot, - .StructInitDotTwo, - .StructInitDot, - .Switch, - .IfSimple, - .IfSimpleOptional, - .If, - .IfOptional, - .IfError, .Suspend, .Resume, - .Continue, .Break, .Return, - .AnyFrameType, - .OneToken, - .Identifier, - .EnumLiteral, - .MultilineStringLiteral, - .GroupedExpression, - .BuiltinCallTwo, - .BuiltinCall, - .ErrorSetDecl, - .AnyType, - .Comptime, - .Nosuspend, - .Block, - .AsmSimple, - .Asm, + => n = datas[n].lhs, + + .TestDecl, + .ErrDefer, + .Defer, .Catch, - .FieldAccess, - .UnwrapOptional, .EqualEqual, .BangEqual, .LessThan, @@ -422,6 +407,63 @@ pub const Tree = struct { .OrElse, .BoolAnd, .BoolOr, + .AnyFrameType, + .ErrorUnion, + .Comptime, + .Nosuspend, + .IfSimple, + .WhileSimple, + => n = datas[n].rhs, + + .FieldAccess, + .UnwrapOptional, + .GroupedExpression, + .StringLiteral, + => return datas[n].rhs + end_offset, + + .AnyType, + .AnyFrameLiteral, + .CharLiteral, + .IntegerLiteral, + .FloatLiteral, + .FalseLiteral, + .TrueLiteral, + .NullLiteral, + .UndefinedLiteral, + .UnreachableLiteral, + => return main_tokens[n] + end_offset, + + .Call => { + end_offset += 1; // for the `)` + const params = tree.extraData(datas[n].rhs, Node.SubRange); + if (params.end - params.start == 0) { + return main_tokens[n] + end_offset; + } + n = tree.extra_data[params.end - 1]; // last parameter + }, + .CallOne => { + end_offset += 1; // for the `)` + if (datas[n].rhs == 0) { + return main_tokens[n] + end_offset; + } + n = datas[n].rhs; + }, + + .ArrayInitDotTwo, + .ArrayInitDot, + .StructInitDotTwo, + .StructInitDot, + .Switch, + .If, + .Continue, + .Identifier, + .EnumLiteral, + .BuiltinCallTwo, + .BuiltinCall, + .ErrorSetDecl, + .Block, + .AsmSimple, + .Asm, .SliceOpen, .Slice, .Deref, @@ -429,8 +471,6 @@ pub const Tree = struct { .ArrayInitOne, .ArrayInit, .StructInitOne, - .CallOne, - .Call, .SwitchCaseOne, .SwitchRange, .FnDecl, @@ -446,13 +486,8 @@ pub const Tree = struct { .SliceType, .StructInit, .SwitchCaseMulti, - .WhileSimple, - .WhileSimpleOptional, .WhileCont, - .WhileContOptional, .While, - .WhileOptional, - .WhileError, .ForSimple, .For, .FnProtoSimple, @@ -469,10 +504,176 @@ pub const Tree = struct { .AsmOutput, .AsmInput, .ErrorValue, - .ErrorUnion, => @panic("TODO finish implementing lastToken"), - } + }; } + + pub fn tokensOnSameLine(tree: Tree, token1: TokenIndex, token2: TokenIndex) bool { + const token_starts = tree.tokens.items(.start); + const source = tree.source[token_starts[token1]..token_starts[token2]]; + return mem.indexOfScalar(u8, source, '\n') == null; + } + + pub fn globalVarDecl(tree: Tree, node: Node.Index) Full.VarDecl { + assert(tree.nodes.items(.tag)[node] == .GlobalVarDecl); + const data = tree.nodes.items(.data)[node]; + const extra = tree.extraData(data.lhs, Node.GlobalVarDecl); + return tree.fullVarDecl(.{ + .type_node = extra.type_node, + .align_node = extra.align_node, + .section_node = extra.section_node, + .init_node = data.rhs, + .mut_token = tree.nodes.items(.main_token)[node], + }); + } + + pub fn localVarDecl(tree: Tree, node: Node.Index) Full.VarDecl { + assert(tree.nodes.items(.tag)[node] == .LocalVarDecl); + const data = tree.nodes.items(.data)[node]; + const extra = tree.extraData(data.lhs, Node.LocalVarDecl); + return tree.fullVarDecl(.{ + .type_node = extra.type_node, + .align_node = extra.align_node, + .section_node = 0, + .init_node = data.rhs, + .mut_token = tree.nodes.items(.main_token)[node], + }); + } + + pub fn simpleVarDecl(tree: Tree, node: Node.Index) Full.VarDecl { + assert(tree.nodes.items(.tag)[node] == .SimpleVarDecl); + const data = tree.nodes.items(.data)[node]; + return tree.fullVarDecl(.{ + .type_node = data.lhs, + .align_node = 0, + .section_node = 0, + .init_node = data.rhs, + .mut_token = tree.nodes.items(.main_token)[node], + }); + } + + pub fn alignedVarDecl(tree: Tree, node: Node.Index) Full.VarDecl { + assert(tree.nodes.items(.tag)[node] == .AlignedVarDecl); + const data = tree.nodes.items(.data)[node]; + return tree.fullVarDecl(.{ + .type_node = 0, + .align_node = data.lhs, + .section_node = 0, + .init_node = data.rhs, + .mut_token = tree.nodes.items(.main_token)[node], + }); + } + + pub fn ifSimple(tree: Tree, node: Node.Index) Full.If { + assert(tree.nodes.items(.tag)[node] == .IfSimple); + const data = tree.nodes.items(.data)[node]; + return tree.fullIf(.{ + .cond_expr = data.lhs, + .then_expr = data.rhs, + .else_expr = 0, + .if_token = tree.nodes.items(.main_token)[node], + }); + } + + pub fn ifFull(tree: Tree, node: Node.Index) Full.If { + assert(tree.nodes.items(.tag)[node] == .If); + const data = tree.nodes.items(.data)[node]; + const extra = tree.extraData(data.rhs, Node.If); + return tree.fullIf(.{ + .cond_expr = data.lhs, + .then_expr = extra.then_expr, + .else_expr = extra.else_expr, + .if_token = tree.nodes.items(.main_token)[node], + }); + } + + fn fullVarDecl(tree: Tree, info: Full.VarDecl.Ast) Full.VarDecl { + const token_tags = tree.tokens.items(.tag); + var result: Full.VarDecl = .{ + .ast = info, + .visib_token = null, + .extern_export_token = null, + .lib_name = null, + .threadlocal_token = null, + .comptime_token = null, + }; + var i = info.mut_token; + while (i > 0) { + i -= 1; + switch (token_tags[i]) { + .Keyword_extern, .Keyword_export => result.extern_export_token = i, + .Keyword_comptime => result.comptime_token = i, + .Keyword_pub => result.visib_token = i, + .Keyword_threadlocal => result.threadlocal_token = i, + .StringLiteral => result.lib_name = i, + else => break, + } + } + return result; + } + + fn fullIf(tree: Tree, info: Full.If.Ast) Full.If { + const token_tags = tree.tokens.items(.tag); + var result: Full.If = .{ + .ast = info, + .payload_token = null, + .error_token = null, + .else_token = undefined, + }; + // if (cond_expr) |x| + // ^ ^ + const payload_pipe = tree.lastToken(info.cond_expr) + 2; + if (token_tags[payload_pipe] == .Pipe) { + result.payload_token = payload_pipe + 1; + } + if (info.else_expr != 0) { + // then_expr else |x| + // ^ ^ + result.else_token = tree.lastToken(info.then_expr) + 1; + if (token_tags[result.else_token + 1] == .Pipe) { + result.error_token = result.else_token + 2; + } + } + return result; + } +}; + +/// Fully assembled AST node information. +pub const Full = struct { + pub const VarDecl = struct { + visib_token: ?TokenIndex, + extern_export_token: ?TokenIndex, + lib_name: ?TokenIndex, + threadlocal_token: ?TokenIndex, + comptime_token: ?TokenIndex, + ast: Ast, + + pub const Ast = struct { + mut_token: TokenIndex, + type_node: Node.Index, + align_node: Node.Index, + section_node: Node.Index, + init_node: Node.Index, + }; + }; + + pub const If = struct { + // Points to the first token after the `|`. Will either be an identifier or + // a `*` (with an identifier immediately after it). + payload_token: ?TokenIndex, + // Points to the identifier after the `|`. + error_token: ?TokenIndex, + // Populated only if else_expr != 0. + else_token: TokenIndex, + ast: Ast, + + pub const Ast = struct { + if_token: TokenIndex, + cond_expr: Node.Index, + then_expr: Node.Index, + else_expr: Node.Index, + }; + }; }; pub const Error = union(enum) { @@ -706,7 +907,9 @@ pub const Error = union(enum) { }; pub const Node = struct { - index: Index, + tag: Tag, + main_token: TokenIndex, + data: Data, pub const Index = u32; @@ -718,22 +921,26 @@ pub const Node = struct { pub const Tag = enum { /// sub_list[lhs...rhs] Root, - /// lhs is the sub-expression. rhs is unused. + /// `usingnamespace lhs;`. rhs unused. main_token is `usingnamespace`. UsingNamespace, /// lhs is test name token (must be string literal), if any. /// rhs is the body node. TestDecl, /// lhs is the index into extra_data. /// rhs is the initialization expression, if any. + /// main_token is `var` or `const`. GlobalVarDecl, /// `var a: x align(y) = rhs` - /// lhs is the index into local_var_decl_list. + /// lhs is the index into extra_data. + /// main_token is `var` or `const`. LocalVarDecl, /// `var a: lhs = rhs`. lhs and rhs may be unused. /// Can be local or global. + /// main_token is `var` or `const`. SimpleVarDecl, /// `var a align(lhs) = rhs`. lhs and rhs may be unused. /// Can be local or global. + /// main_token is `var` or `const`. AlignedVarDecl, /// lhs is the identifier token payload if any, /// rhs is the deferred expression. @@ -901,33 +1108,26 @@ pub const Node = struct { /// `lhs...rhs`. SwitchRange, /// `while (lhs) rhs`. - WhileSimple, /// `while (lhs) |x| rhs`. - WhileSimpleOptional, + WhileSimple, + /// `while (lhs) : (a) b`. `WhileCont[rhs]`. /// `while (lhs) : (a) b`. `WhileCont[rhs]`. WhileCont, - /// `while (lhs) : (a) b`. `WhileCont[rhs]`. - WhileContOptional, /// `while (lhs) : (a) b else c`. `While[rhs]`. - While, /// `while (lhs) |x| : (a) b else c`. `While[rhs]`. - WhileOptional, /// `while (lhs) |x| : (a) b else |y| c`. `While[rhs]`. - WhileError, + While, /// `for (lhs) rhs`. ForSimple, /// `for (lhs) a else b`. `if_list[rhs]`. For, /// `if (lhs) rhs`. - IfSimple, /// `if (lhs) |a| rhs`. - IfSimpleOptional, + IfSimple, /// `if (lhs) a else b`. `if_list[rhs]`. - If, /// `if (lhs) |x| a else b`. `if_list[rhs]`. - IfOptional, /// `if (lhs) |x| a else |y| b`. `if_list[rhs]`. - IfError, + If, /// `suspend lhs`. lhs can be omitted. rhs is unused. Suspend, /// `resume lhs`. rhs is unused. @@ -955,10 +1155,24 @@ pub const Node = struct { FnDecl, /// `anyframe->rhs`. main_token is `anyframe`. `lhs` is arrow token index. AnyFrameType, - /// Could be integer literal, float literal, char literal, bool literal, - /// null literal, undefined literal, unreachable, depending on the token. /// Both lhs and rhs unused. - OneToken, + AnyFrameLiteral, + /// Both lhs and rhs unused. + CharLiteral, + /// Both lhs and rhs unused. + IntegerLiteral, + /// Both lhs and rhs unused. + FloatLiteral, + /// Both lhs and rhs unused. + FalseLiteral, + /// Both lhs and rhs unused. + TrueLiteral, + /// Both lhs and rhs unused. + NullLiteral, + /// Both lhs and rhs unused. + UndefinedLiteral, + /// Both lhs and rhs unused. + UnreachableLiteral, /// Both lhs and rhs unused. /// Most identifiers will not have explicit AST nodes, however for expressions /// which could be one of many different kinds of AST nodes, there will be an @@ -966,8 +1180,11 @@ pub const Node = struct { Identifier, /// lhs is the dot token index, rhs unused, main_token is the identifier. EnumLiteral, - /// Both lhs and rhs unused. - MultilineStringLiteral, + /// main_token is the first token index (redundant with lhs) + /// lhs is the first token index; rhs is the last token index. + /// Could be a series of MultilineStringLiteralLine tokens, or a single + /// StringLiteral token. + StringLiteral, /// `(lhs)`. main_token is the `(`; rhs is the token index of the `)`. GroupedExpression, /// `@a(lhs, rhs)`. lhs and rhs may be omitted. diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 05efea7fe1..e5cac445c1 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -832,7 +832,7 @@ const Parser = struct { } if (p.eatToken(.Semicolon)) |_| { return p.addNode(.{ - .tag = if (then_payload == 0) .IfSimple else .IfSimpleOptional, + .tag = .IfSimple, .main_token = if_token, .data = .{ .lhs = condition, @@ -848,7 +848,7 @@ const Parser = struct { return p.fail(.{ .ExpectedSemiOrElse = .{ .token = p.tok_i } }); } return p.addNode(.{ - .tag = if (then_payload == 0) .IfSimple else .IfSimpleOptional, + .tag = .IfSimple, .main_token = if_token, .data = .{ .lhs = condition, @@ -858,14 +858,8 @@ const Parser = struct { }; const else_payload = try p.parsePayload(); const else_expr = try p.expectStatement(); - const tag = if (else_payload != 0) - Node.Tag.IfError - else if (then_payload != 0) - Node.Tag.IfOptional - else - Node.Tag.If; return p.addNode(.{ - .tag = tag, + .tag = .If, .main_token = if_token, .data = .{ .lhs = condition, @@ -994,7 +988,7 @@ const Parser = struct { if (p.eatToken(.Semicolon)) |_| { if (continue_expr == 0) { return p.addNode(.{ - .tag = if (then_payload == 0) .WhileSimple else .WhileSimpleOptional, + .tag = .WhileSimple, .main_token = while_token, .data = .{ .lhs = condition, @@ -1003,7 +997,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = if (then_payload == 0) .WhileCont else .WhileContOptional, + .tag = .WhileCont, .main_token = while_token, .data = .{ .lhs = condition, @@ -1024,7 +1018,7 @@ const Parser = struct { } if (continue_expr == 0) { return p.addNode(.{ - .tag = if (then_payload == 0) .WhileSimple else .WhileSimpleOptional, + .tag = .WhileSimple, .main_token = while_token, .data = .{ .lhs = condition, @@ -1033,7 +1027,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = if (then_payload == 0) .WhileCont else .WhileContOptional, + .tag = .WhileCont, .main_token = while_token, .data = .{ .lhs = condition, @@ -1047,14 +1041,8 @@ const Parser = struct { }; const else_payload = try p.parsePayload(); const else_expr = try p.expectStatement(); - const tag = if (else_payload != 0) - Node.Tag.WhileError - else if (then_payload != 0) - Node.Tag.WhileOptional - else - Node.Tag.While; return p.addNode(.{ - .tag = tag, + .tag = .While, .main_token = while_token, .data = .{ .lhs = condition, @@ -1948,7 +1936,7 @@ const Parser = struct { const else_token = p.eatToken(.Keyword_else) orelse { if (continue_expr == 0) { return p.addNode(.{ - .tag = if (then_payload == 0) .WhileSimple else .WhileSimpleOptional, + .tag = .WhileSimple, .main_token = while_token, .data = .{ .lhs = condition, @@ -1957,7 +1945,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = if (then_payload == 0) .WhileCont else .WhileContOptional, + .tag = .WhileCont, .main_token = while_token, .data = .{ .lhs = condition, @@ -1971,14 +1959,8 @@ const Parser = struct { }; const else_payload = try p.parsePayload(); const else_expr = try p.expectExpr(); - const tag = if (else_payload != 0) - Node.Tag.WhileError - else if (then_payload != 0) - Node.Tag.WhileOptional - else - Node.Tag.While; return p.addNode(.{ - .tag = tag, + .tag = .While, .main_token = while_token, .data = .{ .lhs = condition, @@ -2229,24 +2211,89 @@ const Parser = struct { /// LoopTypeExpr <- KEYWORD_inline? (ForTypeExpr / WhileTypeExpr) fn parsePrimaryTypeExpr(p: *Parser) !Node.Index { switch (p.token_tags[p.tok_i]) { - .CharLiteral, - .IntegerLiteral, - .FloatLiteral, - .StringLiteral, - .Keyword_false, - .Keyword_true, - .Keyword_null, - .Keyword_undefined, - .Keyword_unreachable, - .Keyword_anyframe, - => return p.addNode(.{ - .tag = .OneToken, + .CharLiteral => return p.addNode(.{ + .tag = .CharLiteral, .main_token = p.nextToken(), .data = .{ .lhs = undefined, .rhs = undefined, }, }), + .IntegerLiteral => return p.addNode(.{ + .tag = .IntegerLiteral, + .main_token = p.nextToken(), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), + .FloatLiteral => return p.addNode(.{ + .tag = .FloatLiteral, + .main_token = p.nextToken(), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), + .Keyword_false => return p.addNode(.{ + .tag = .FalseLiteral, + .main_token = p.nextToken(), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), + .Keyword_true => return p.addNode(.{ + .tag = .TrueLiteral, + .main_token = p.nextToken(), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), + .Keyword_null => return p.addNode(.{ + .tag = .NullLiteral, + .main_token = p.nextToken(), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), + .Keyword_undefined => return p.addNode(.{ + .tag = .UndefinedLiteral, + .main_token = p.nextToken(), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), + .Keyword_unreachable => return p.addNode(.{ + .tag = .UnreachableLiteral, + .main_token = p.nextToken(), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), + .Keyword_anyframe => return p.addNode(.{ + .tag = .AnyFrameLiteral, + .main_token = p.nextToken(), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), + .StringLiteral => { + const main_token = p.nextToken(); + return p.addNode(.{ + .tag = .StringLiteral, + .main_token = main_token, + .data = .{ + .lhs = main_token, + .rhs = main_token, + }, + }); + }, .Builtin => return p.parseBuiltinCall(), .Keyword_fn => return p.parseFnProto(), @@ -2280,11 +2327,11 @@ const Parser = struct { p.tok_i += 1; } return p.addNode(.{ - .tag = .OneToken, + .tag = .StringLiteral, .main_token = first_line, .data = .{ - .lhs = undefined, - .rhs = undefined, + .lhs = first_line, + .rhs = p.tok_i - 1, }, }); }, @@ -2641,7 +2688,7 @@ const Parser = struct { const else_token = p.eatToken(.Keyword_else) orelse { if (continue_expr == 0) { return p.addNode(.{ - .tag = if (then_payload == 0) .WhileSimple else .WhileSimpleOptional, + .tag = .WhileSimple, .main_token = while_token, .data = .{ .lhs = condition, @@ -2650,7 +2697,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = if (then_payload == 0) .WhileCont else .WhileContOptional, + .tag = .WhileCont, .main_token = while_token, .data = .{ .lhs = condition, @@ -2664,14 +2711,8 @@ const Parser = struct { }; const else_payload = try p.parsePayload(); const else_expr = try p.expectTypeExpr(); - const tag = if (else_payload != 0) - Node.Tag.WhileError - else if (then_payload != 0) - Node.Tag.WhileOptional - else - Node.Tag.While; return p.addNode(.{ - .tag = tag, + .tag = .While, .main_token = while_token, .data = .{ .lhs = condition, @@ -3450,7 +3491,7 @@ const Parser = struct { }); // Pretend this was an identifier so we can continue parsing. return p.addNode(.{ - .tag = .OneToken, + .tag = .Identifier, .main_token = builtin_token, .data = .{ .lhs = undefined, @@ -3470,59 +3511,31 @@ const Parser = struct { }); } - fn parseOneToken(p: *Parser, token_tag: Token.Tag) !Node.Index { - const token = p.eatToken(token_tag) orelse return null_node; - return p.addNode(.{ - .tag = .OneToken, - .main_token = token, - .data = .{ - .lhs = undefined, - .rhs = undefined, - }, - }); - } - - fn expectOneToken(p: *Parser, token_tag: Token.Tag) !Node.Index { - const node = try p.expectOneTokenRecoverable(token_tag); - if (node == 0) return error.ParseError; - return node; - } - - fn expectOneTokenRecoverable(p: *Parser, token_tag: Token.Tag) !Node.Index { - const node = p.parseOneToken(token_tag); - if (node == 0) { - try p.warn(.{ - .ExpectedToken = .{ - .token = p.tok_i, - .expected_id = token_tag, - }, - }); - } - return node; - } - // string literal or multiline string literal fn parseStringLiteral(p: *Parser) !Node.Index { switch (p.token_tags[p.tok_i]) { - .StringLiteral => return p.addNode(.{ - .tag = .OneToken, - .main_token = p.nextToken(), - .data = .{ - .lhs = undefined, - .rhs = undefined, - }, - }), + .StringLiteral => { + const main_token = p.nextToken(); + return p.addNode(.{ + .tag = .StringLiteral, + .main_token = main_token, + .data = .{ + .lhs = main_token, + .rhs = main_token, + }, + }); + }, .MultilineStringLiteralLine => { const first_line = p.nextToken(); while (p.token_tags[p.tok_i] == .MultilineStringLiteralLine) { p.tok_i += 1; } return p.addNode(.{ - .tag = .OneToken, + .tag = .StringLiteral, .main_token = first_line, .data = .{ - .lhs = undefined, - .rhs = undefined, + .lhs = first_line, + .rhs = p.tok_i - 1, }, }); }, @@ -3539,11 +3552,14 @@ const Parser = struct { } fn expectIntegerLiteral(p: *Parser) !Node.Index { - const node = p.parseOneToken(.IntegerLiteral); - if (node != 0) { - return p.fail(.{ .ExpectedIntegerLiteral = .{ .token = p.tok_i } }); - } - return node; + return p.addNode(.{ + .tag = .IntegerLiteral, + .main_token = try p.expectToken(.IntegerLiteral), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }); } /// KEYWORD_if LPAREN Expr RPAREN PtrPayload? Body (KEYWORD_else Payload? Body)? @@ -3558,7 +3574,7 @@ const Parser = struct { if (then_expr == 0) return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); const else_token = p.eatToken(.Keyword_else) orelse return p.addNode(.{ - .tag = if (then_payload == 0) .IfSimple else .IfSimpleOptional, + .tag = .IfSimple, .main_token = if_token, .data = .{ .lhs = condition, @@ -3569,14 +3585,8 @@ const Parser = struct { const else_expr = try bodyParseFn(p); if (else_expr == 0) return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); - const tag = if (else_payload != 0) - Node.Tag.IfError - else if (then_payload != 0) - Node.Tag.IfOptional - else - Node.Tag.If; return p.addNode(.{ - .tag = tag, + .tag = .If, .main_token = if_token, .data = .{ .lhs = condition, diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 8d9c4b5cc8..7a22b9a6a1 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -81,16 +81,16 @@ test "zig fmt: doc comments on test" { ); } -//test "zig fmt: if statment" { -// try testCanonical( -// \\test "" { -// \\ if (optional()) |some| -// \\ bar = some.foo(); -// \\} -// \\ -// ); -//} -// +test "zig fmt: if statment" { + try testCanonical( + \\test "" { + \\ if (optional()) |some| + \\ bar = some.foo(); + \\} + \\ + ); +} + //test "zig fmt: top-level fields" { // try testCanonical( // \\a: did_you_know, diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 4799f39dc4..e481fd7f10 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -69,15 +69,15 @@ fn renderRoot(ais: *Ais, tree: ast.Tree) Error!void { if (root_decls.len == 0) return; for (root_decls) |decl| { - try renderTopLevelDecl(ais, tree, decl); + return renderContainerDecl(ais, tree, decl, .Newline); } } -fn renderExtraNewline(tree: ast.Tree, ais: *Ais, node: ast.Node.Index) Error!void { - return renderExtraNewlineToken(tree, ais, tree.firstToken(node)); +fn renderExtraNewline(ais: *Ais, tree: ast.Tree, node: ast.Node.Index) Error!void { + return renderExtraNewlineToken(ais, tree, tree.firstToken(node)); } -fn renderExtraNewlineToken(tree: ast.Tree, ais: *Ais, first_token: ast.TokenIndex) Error!void { +fn renderExtraNewlineToken(ais: *Ais, tree: ast.Tree, first_token: ast.TokenIndex) Error!void { @panic("TODO implement renderExtraNewlineToken"); //var prev_token = first_token; //if (prev_token == 0) return; @@ -96,14 +96,11 @@ fn renderExtraNewlineToken(tree: ast.Tree, ais: *Ais, first_token: ast.TokenInde //} } -fn renderTopLevelDecl(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index) Error!void { - return renderContainerDecl(ais, tree, decl, .Newline); -} - fn renderContainerDecl(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); const main_tokens = tree.nodes.items(.main_token); const datas = tree.nodes.items(.data); + try renderDocComments(ais, tree, tree.firstToken(decl)); switch (tree.nodes.items(.tag)[decl]) { .UsingNamespace, .FnProtoSimple, @@ -111,10 +108,6 @@ fn renderContainerDecl(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: S .FnProtoOne, .FnProto, .FnDecl, - .GlobalVarDecl, - .LocalVarDecl, - .SimpleVarDecl, - .AlignedVarDecl, .ContainerFieldInit, .ContainerFieldAlign, .ContainerField, @@ -145,16 +138,13 @@ fn renderContainerDecl(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: S // try renderToken(ais, tree, use_decl.semicolon_token, space); // ; // }, - // .VarDecl => { - // const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", decl); - - // try renderDocComments(ais, tree, var_decl, var_decl.getDocComments()); - // try renderVarDecl(allocator, ais, tree, var_decl); - // }, + .GlobalVarDecl => return renderVarDecl(ais, tree, tree.globalVarDecl(decl)), + .LocalVarDecl => return renderVarDecl(ais, tree, tree.localVarDecl(decl)), + .SimpleVarDecl => return renderVarDecl(ais, tree, tree.simpleVarDecl(decl)), + .AlignedVarDecl => return renderVarDecl(ais, tree, tree.alignedVarDecl(decl)), .TestDecl => { const test_token = main_tokens[decl]; - try renderDocComments(ais, tree, test_token); try renderToken(ais, tree, test_token, .Space); if (token_tags[test_token + 1] == .StringLiteral) { try renderToken(ais, tree, test_token + 1, .Space); @@ -251,21 +241,20 @@ fn renderContainerDecl(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: S fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); const main_tokens = tree.nodes.items(.main_token); - switch (tree.nodes.items(.tag)[node]) { - //.Identifier, - //.IntegerLiteral, - //.FloatLiteral, - //.StringLiteral, - //.CharLiteral, - //.BoolLiteral, - //.NullLiteral, - //.Unreachable, - //.ErrorType, - //.UndefinedLiteral, - //=> { - // const casted_node = base.cast(ast.Node.OneToken).?; - // return renderToken(ais, tree, casted_node.token, space); - //}, + const node_tags = tree.nodes.items(.tag); + const datas = tree.nodes.items(.data); + switch (node_tags[node]) { + .Identifier, + .IntegerLiteral, + .FloatLiteral, + .StringLiteral, + .CharLiteral, + .TrueLiteral, + .FalseLiteral, + .NullLiteral, + .UnreachableLiteral, + .UndefinedLiteral, + => return renderToken(ais, tree, main_tokens[node], space), //.AnyType => { // const any_type = base.castTag(.AnyType).?; @@ -299,16 +288,37 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac try renderToken(ais, tree, lbrace, .Newline); - for (statements) |statement, i| { - try renderStatement(ais, tree, statement); + for (statements) |stmt, i| { + switch (node_tags[stmt]) { + .GlobalVarDecl => try renderVarDecl(ais, tree, tree.globalVarDecl(stmt)), + .LocalVarDecl => try renderVarDecl(ais, tree, tree.localVarDecl(stmt)), + .SimpleVarDecl => try renderVarDecl(ais, tree, tree.simpleVarDecl(stmt)), + .AlignedVarDecl => try renderVarDecl(ais, tree, tree.alignedVarDecl(stmt)), + else => { + const semicolon = tree.lastToken(stmt) + 1; + if (token_tags[semicolon] == .Semicolon) { + try renderExpression(ais, tree, stmt, .None); + try renderToken(ais, tree, semicolon, .Newline); + } else { + try renderExpression(ais, tree, stmt, .Newline); + } + }, + } if (i + 1 < statements.len) { - try renderExtraNewline(tree, ais, statements[i + 1]); + try renderExtraNewline(ais, tree, statements[i + 1]); } } ais.popIndent(); - const rbrace = tree.lastToken(statements[statements.len - 1]) + 1; - return renderToken(ais, tree, rbrace, space); + // The rbrace could be +1 or +2 from the last token of the last + // statement in the block because lastToken() does not count semicolons. + const maybe_rbrace = tree.lastToken(statements[statements.len - 1]) + 1; + if (token_tags[maybe_rbrace] == .RBrace) { + return renderToken(ais, tree, maybe_rbrace, space); + } else { + assert(token_tags[maybe_rbrace + 1] == .RBrace); + return renderToken(ais, tree, maybe_rbrace + 1, space); + } } }, @@ -322,8 +332,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderExpression(ais, tree, defer_node.expr, space); //}, .Comptime => { - const comptime_token = tree.nodes.items(.main_token)[node]; - const block = tree.nodes.items(.data)[node].lhs; + const comptime_token = main_tokens[node]; + const block = datas[node].lhs; try renderToken(ais, tree, comptime_token, .Space); return renderExpression(ais, tree, block, space); }, @@ -369,71 +379,78 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // ais.pushIndentOneShot(); // return renderExpression(ais, tree, infix_op_node.rhs, space); //}, + .FieldAccess => { + const field_access = datas[node]; + try renderExpression(ais, tree, field_access.lhs, .None); + try renderToken(ais, tree, main_tokens[node], .None); + return renderToken(ais, tree, field_access.rhs, .None); + }, - //.Add, - //.AddWrap, - //.ArrayCat, - //.ArrayMult, - //.Assign, - //.AssignBitAnd, - //.AssignBitOr, - //.AssignBitShiftLeft, - //.AssignBitShiftRight, - //.AssignBitXor, - //.AssignDiv, - //.AssignSub, - //.AssignSubWrap, - //.AssignMod, - //.AssignAdd, - //.AssignAddWrap, - //.AssignMul, - //.AssignMulWrap, - //.BangEqual, - //.BitAnd, - //.BitOr, - //.BitShiftLeft, - //.BitShiftRight, - //.BitXor, - //.BoolAnd, - //.BoolOr, - //.Div, - //.EqualEqual, - //.ErrorUnion, - //.GreaterOrEqual, - //.GreaterThan, - //.LessOrEqual, - //.LessThan, - //.MergeErrorSets, - //.Mod, - //.Mul, - //.MulWrap, - //.Period, - //.Range, - //.Sub, - //.SubWrap, - //.OrElse, - //=> { - // const infix_op_node = @fieldParentPtr(ast.Node.SimpleInfixOp, "base", base); + .ErrorUnion, + .SwitchRange, + => { + const infix = datas[node]; + try renderExpression(ais, tree, infix.lhs, .None); + try renderToken(ais, tree, main_tokens[node], .None); + return renderExpression(ais, tree, infix.rhs, space); + }, - // const op_space = switch (base.tag) { - // .Period, .ErrorUnion, .Range => Space.None, - // else => Space.Space, - // }; - // try renderExpression(ais, tree, infix_op_node.lhs, op_space); + .Add, + .AddWrap, + .ArrayCat, + .ArrayMult, + .Assign, + .AssignBitAnd, + .AssignBitOr, + .AssignBitShiftLeft, + .AssignBitShiftRight, + .AssignBitXor, + .AssignDiv, + .AssignSub, + .AssignSubWrap, + .AssignMod, + .AssignAdd, + .AssignAddWrap, + .AssignMul, + .AssignMulWrap, + .BangEqual, + .BitAnd, + .BitOr, + .BitShiftLeft, + .BitShiftRight, + .BitXor, + .BoolAnd, + .BoolOr, + .Div, + .EqualEqual, + .GreaterOrEqual, + .GreaterThan, + .LessOrEqual, + .LessThan, + .MergeErrorSets, + .Mod, + .Mul, + .MulWrap, + .Sub, + .SubWrap, + .OrElse, + => { + const infix = datas[node]; + try renderExpression(ais, tree, infix.lhs, .Space); - // const after_op_space = blk: { - // const loc = tree.tokenLocation(tree.token_locs[infix_op_node.op_token].end, tree.nextToken(infix_op_node.op_token)); - // break :blk if (loc.line == 0) op_space else Space.Newline; - // }; - - // { - // ais.pushIndent(); - // defer ais.popIndent(); - // try renderToken(ais, tree, infix_op_node.op_token, after_op_space); - // } - // ais.pushIndentOneShot(); - // return renderExpression(ais, tree, infix_op_node.rhs, space); - //}, + const op_token = main_tokens[node]; + const after_op_space: Space = if (tree.tokensOnSameLine(op_token, op_token + 1)) + .Space + else + .Newline; + { + ais.pushIndent(); + try renderToken(ais, tree, op_token, after_op_space); + ais.popIndent(); + } + ais.pushIndentOneShot(); + return renderExpression(ais, tree, infix.rhs, space); + }, //.BitNot, //.BoolNot, @@ -769,7 +786,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // column_counter = 0; // try renderToken(ais, tree, comma, Space.Newline); // , - // try renderExtraNewline(tree, ais, next_expr); + // try renderExtraNewline(ais, tree, next_expr); // } else { // const maybe_comma = tree.nextToken(expr.*.lastToken()); // if (tree.token_tags[maybe_comma] == .Comma) { @@ -932,7 +949,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // const comma = tree.nextToken(field_init.lastToken()); // try renderToken(ais, tree, comma, Space.Newline); - // try renderExtraNewline(tree, ais, next_field_init); + // try renderExtraNewline(ais, tree, next_field_init); // } else { // try renderExpression(ais, tree, field_init, Space.Comma); // } @@ -942,74 +959,63 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderToken(ais, tree, rtoken, space); //}, - //.Call => { - // const call = @fieldParentPtr(ast.Node.Call, "base", base); - // if (call.async_token) |async_token| { - // try renderToken(ais, tree, async_token, Space.Space); - // } + .Call => { + const call = datas[node]; + const params_range = tree.extraData(call.rhs, ast.Node.SubRange); + const params = tree.extra_data[params_range.start..params_range.end]; + const async_token = tree.firstToken(call.lhs) - 1; + if (token_tags[async_token] == .Keyword_async) { + try renderToken(ais, tree, async_token, .Space); + } + try renderExpression(ais, tree, call.lhs, .None); - // try renderExpression(ais, tree, call.lhs, Space.None); + const lparen = main_tokens[node]; - // const lparen = tree.nextToken(call.lhs.lastToken()); + if (params.len == 0) { + try renderToken(ais, tree, lparen, .None); + return renderToken(ais, tree, lparen + 1, space); // ) + } - // if (call.params_len == 0) { - // try renderToken(ais, tree, lparen, Space.None); - // return renderToken(ais, tree, call.rtoken, space); - // } + const last_param = params[params.len - 1]; + const after_last_param_tok = tree.lastToken(last_param) + 1; + if (token_tags[after_last_param_tok] == .Comma) { + ais.pushIndent(); + try renderToken(ais, tree, lparen, Space.Newline); // ( + for (params) |param_node, i| { + if (i + 1 < params.len) { + try renderExpression(ais, tree, param_node, Space.None); - // const src_has_trailing_comma = blk: { - // const maybe_comma = tree.prevToken(call.rtoken); - // break :blk tree.token_tags[maybe_comma] == .Comma; - // }; + // Unindent the comma for multiline string literals + const is_multiline_string = node_tags[param_node] == .StringLiteral and + token_tags[main_tokens[param_node]] == .MultilineStringLiteralLine; + if (is_multiline_string) ais.popIndent(); - // if (src_has_trailing_comma) { - // { - // ais.pushIndent(); - // defer ais.popIndent(); + const comma = tree.lastToken(param_node) + 1; + try renderToken(ais, tree, comma, Space.Newline); // , - // try renderToken(ais, tree, lparen, Space.Newline); // ( - // const params = call.params(); - // for (params) |param_node, i| { - // if (i + 1 < params.len) { - // const next_node = params[i + 1]; - // try renderExpression(ais, tree, param_node, Space.None); + if (is_multiline_string) ais.pushIndent(); - // // Unindent the comma for multiline string literals - // const maybe_multiline_string = param_node.firstToken(); - // const is_multiline_string = tree.token_tags[maybe_multiline_string] == .MultilineStringLiteralLine; - // if (is_multiline_string) ais.popIndent(); - // defer if (is_multiline_string) ais.pushIndent(); + try renderExtraNewline(ais, tree, params[i + 1]); + } else { + try renderExpression(ais, tree, param_node, Space.Comma); + } + } + ais.popIndent(); + return renderToken(ais, tree, after_last_param_tok + 1, space); // ) + } - // const comma = tree.nextToken(param_node.lastToken()); - // try renderToken(ais, tree, comma, Space.Newline); // , - // try renderExtraNewline(tree, ais, next_node); - // } else { - // try renderExpression(ais, tree, param_node, Space.Comma); - // } - // } - // } - // return renderToken(ais, tree, call.rtoken, space); - // } + try renderToken(ais, tree, lparen, Space.None); // ( - // try renderToken(ais, tree, lparen, Space.None); // ( + for (params) |param_node, i| { + try renderExpression(ais, tree, param_node, Space.None); - // const params = call.params(); - // for (params) |param_node, i| { - // const maybe_comment = param_node.firstToken() - 1; - // const maybe_multiline_string = param_node.firstToken(); - // if (tree.token_tags[maybe_multiline_string] == .MultilineStringLiteralLine or tree.token_tags[maybe_comment] == .LineComment) { - // ais.pushIndentOneShot(); - // } - - // try renderExpression(ais, tree, param_node, Space.None); - - // if (i + 1 < params.len) { - // const comma = tree.nextToken(param_node.lastToken()); - // try renderToken(ais, tree, comma, Space.Space); - // } - // } - // return renderToken(ais, tree, call.rtoken, space); // ) - //}, + if (i + 1 < params.len) { + const comma = tree.lastToken(param_node) + 1; + try renderToken(ais, tree, comma, Space.Space); + } + } + return renderToken(ais, tree, after_last_param_tok, space); // ) + }, //.ArrayAccess => { // const suffix_op = base.castTag(.ArrayAccess).?; @@ -1118,14 +1124,6 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } //}, - //.Payload => { - // const payload = @fieldParentPtr(ast.Node.Payload, "base", base); - - // try renderToken(ais, tree, payload.lpipe, Space.None); - // try renderExpression(ais, tree, payload.error_symbol, Space.None); - // return renderToken(ais, tree, payload.rpipe, space); - //}, - //.PointerPayload => { // const payload = @fieldParentPtr(ast.Node.PointerPayload, "base", base); @@ -1264,7 +1262,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderContainerDecl(allocator, ais, tree, decl, .Newline); // if (i + 1 < fields_and_decls.len) { - // try renderExtraNewline(tree, ais, fields_and_decls[i + 1]); + // try renderExtraNewline(ais, tree, fields_and_decls[i + 1]); // } // } // } else if (src_has_newline) { @@ -1338,7 +1336,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderExpression(ais, tree, node, Space.None); // try renderToken(ais, tree, tree.nextToken(node.lastToken()), Space.Newline); // , - // try renderExtraNewline(tree, ais, decls[i + 1]); + // try renderExtraNewline(ais, tree, decls[i + 1]); // } else { // try renderExpression(ais, tree, node, Space.Comma); // } @@ -1357,7 +1355,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // const comma_token = tree.nextToken(node.lastToken()); // assert(tree.token_tags[comma_token] == .Comma); // try renderToken(ais, tree, comma_token, Space.Space); // , - // try renderExtraNewline(tree, ais, decls[i + 1]); + // try renderExtraNewline(ais, tree, decls[i + 1]); // } else { // try renderExpression(ais, tree, node, Space.Space); // } @@ -1624,7 +1622,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // try renderExpression(ais, tree, node, Space.Comma); // if (i + 1 < cases.len) { - // try renderExtraNewline(tree, ais, cases[i + 1]); + // try renderExtraNewline(ais, tree, cases[i + 1]); // } // } // } @@ -1650,7 +1648,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // const comma_token = tree.nextToken(node.lastToken()); // try renderToken(ais, tree, comma_token, Space.Space); // , - // try renderExtraNewline(tree, ais, items[i + 1]); + // try renderExtraNewline(ais, tree, items[i + 1]); // } else { // try renderExpression(ais, tree, node, Space.Space); // } @@ -1663,7 +1661,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // const comma_token = tree.nextToken(node.lastToken()); // try renderToken(ais, tree, comma_token, Space.Newline); // , - // try renderExtraNewline(tree, ais, items[i + 1]); + // try renderExtraNewline(ais, tree, items[i + 1]); // } else { // try renderExpression(ais, tree, node, Space.Comma); // } @@ -1682,28 +1680,6 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // const switch_else = @fieldParentPtr(ast.Node.SwitchElse, "base", base); // return renderToken(ais, tree, switch_else.token, space); //}, - //.Else => { - // const else_node = @fieldParentPtr(ast.Node.Else, "base", base); - - // const body_is_block = nodeIsBlock(else_node.body); - // const same_line = body_is_block or tree.tokensOnSameLine(else_node.else_token, else_node.body.lastToken()); - - // const after_else_space = if (same_line or else_node.payload != null) Space.Space else Space.Newline; - // try renderToken(ais, tree, else_node.else_token, after_else_space); - - // if (else_node.payload) |payload| { - // const payload_space = if (same_line) Space.Space else Space.Newline; - // try renderExpression(ais, tree, payload, payload_space); - // } - - // if (same_line) { - // return renderExpression(ais, tree, else_node.body, space); - // } else { - // ais.pushIndent(); - // defer ais.popIndent(); - // return renderExpression(ais, tree, else_node.body, space); - // } - //}, //.While => { // const while_node = @fieldParentPtr(ast.Node.While, "base", base); @@ -1824,111 +1800,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } //}, - //.If => { - // const if_node = @fieldParentPtr(ast.Node.If, "base", base); - - // const lparen = tree.nextToken(if_node.if_token); - // const rparen = tree.nextToken(if_node.condition.lastToken()); - - // try renderToken(ais, tree, if_node.if_token, Space.Space); // if - // try renderToken(ais, tree, lparen, Space.None); // ( - - // try renderExpression(ais, tree, if_node.condition, Space.None); // condition - - // const body_is_if_block = if_node.body.tag == .If; - // const body_is_block = nodeIsBlock(if_node.body); - - // if (body_is_if_block) { - // try renderExtraNewline(tree, ais, if_node.body); - // } else if (body_is_block) { - // const after_rparen_space = if (if_node.payload == null) Space.BlockStart else Space.Space; - // try renderToken(ais, tree, rparen, after_rparen_space); // ) - - // if (if_node.payload) |payload| { - // try renderExpression(ais, tree, payload, Space.BlockStart); // |x| - // } - - // if (if_node.@"else") |@"else"| { - // try renderExpression(ais, tree, if_node.body, Space.SpaceOrOutdent); - // return renderExpression(ais, tree, &@"else".base, space); - // } else { - // return renderExpression(ais, tree, if_node.body, space); - // } - // } - - // const src_has_newline = !tree.tokensOnSameLine(rparen, if_node.body.lastToken()); - - // if (src_has_newline) { - // const after_rparen_space = if (if_node.payload == null) Space.Newline else Space.Space; - - // { - // ais.pushIndent(); - // defer ais.popIndent(); - // try renderToken(ais, tree, rparen, after_rparen_space); // ) - // } - - // if (if_node.payload) |payload| { - // try renderExpression(ais, tree, payload, Space.Newline); - // } - - // if (if_node.@"else") |@"else"| { - // const else_is_block = nodeIsBlock(@"else".body); - - // { - // ais.pushIndent(); - // defer ais.popIndent(); - // try renderExpression(ais, tree, if_node.body, Space.Newline); - // } - - // if (else_is_block) { - // try renderToken(ais, tree, @"else".else_token, Space.Space); // else - - // if (@"else".payload) |payload| { - // try renderExpression(ais, tree, payload, Space.Space); - // } - - // return renderExpression(ais, tree, @"else".body, space); - // } else { - // const after_else_space = if (@"else".payload == null) Space.Newline else Space.Space; - // try renderToken(ais, tree, @"else".else_token, after_else_space); // else - - // if (@"else".payload) |payload| { - // try renderExpression(ais, tree, payload, Space.Newline); - // } - - // ais.pushIndent(); - // defer ais.popIndent(); - // return renderExpression(ais, tree, @"else".body, space); - // } - // } else { - // ais.pushIndent(); - // defer ais.popIndent(); - // return renderExpression(ais, tree, if_node.body, space); - // } - // } - - // // Single line if statement - - // try renderToken(ais, tree, rparen, Space.Space); // ) - - // if (if_node.payload) |payload| { - // try renderExpression(ais, tree, payload, Space.Space); - // } - - // if (if_node.@"else") |@"else"| { - // try renderExpression(ais, tree, if_node.body, Space.Space); - // try renderToken(ais, tree, @"else".else_token, Space.Space); - - // if (@"else".payload) |payload| { - // try renderExpression(ais, tree, payload, Space.Space); - // } - - // return renderExpression(ais, tree, @"else".body, space); - // } else { - // return renderExpression(ais, tree, if_node.body, space); - // } - //}, - + .IfSimple => return renderIf(ais, tree, tree.ifSimple(node), space), + .If => return renderIf(ais, tree, tree.ifFull(node), space), //.Asm => { // const asm_node = @fieldParentPtr(ast.Node.Asm, "base", base); @@ -1974,7 +1847,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // const comma = tree.prevToken(next_asm_output.firstToken()); // try renderToken(ais, tree, comma, Space.Newline); // , - // try renderExtraNewlineToken(tree, ais, next_asm_output.firstToken()); + // try renderExtraNewlineToken(ais, tree, next_asm_output.firstToken()); // } else if (asm_node.inputs.len == 0 and asm_node.clobbers.len == 0) { // try renderAsmOutput(allocator, ais, tree, asm_output, Space.Newline); // break :asmblk; @@ -2004,7 +1877,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // const comma = tree.prevToken(next_asm_input.firstToken()); // try renderToken(ais, tree, comma, Space.Newline); // , - // try renderExtraNewlineToken(tree, ais, next_asm_input.firstToken()); + // try renderExtraNewlineToken(ais, tree, next_asm_input.firstToken()); // } else if (asm_node.clobbers.len == 0) { // try renderAsmInput(allocator, ais, tree, asm_input, Space.Newline); // break :asmblk; @@ -2140,89 +2013,225 @@ fn renderAsmInput( return renderToken(ais, tree, asm_input.lastToken(), space); // ) } -fn renderVarDecl( - allocator: *mem.Allocator, - ais: *Ais, - tree: ast.Tree, - var_decl: ast.Node.Index.VarDecl, -) Error!void { - if (var_decl.getVisibToken()) |visib_token| { +fn renderVarDecl(ais: *Ais, tree: ast.Tree, var_decl: ast.Full.VarDecl) Error!void { + if (var_decl.visib_token) |visib_token| { try renderToken(ais, tree, visib_token, Space.Space); // pub } - if (var_decl.getExternExportToken()) |extern_export_token| { + if (var_decl.extern_export_token) |extern_export_token| { try renderToken(ais, tree, extern_export_token, Space.Space); // extern - if (var_decl.getLibName()) |lib_name| { + if (var_decl.lib_name) |lib_name| { try renderExpression(ais, tree, lib_name, Space.Space); // "lib" } } - if (var_decl.getComptimeToken()) |comptime_token| { + if (var_decl.threadlocal_token) |thread_local_token| { + try renderToken(ais, tree, thread_local_token, Space.Space); // threadlocal + } + + if (var_decl.comptime_token) |comptime_token| { try renderToken(ais, tree, comptime_token, Space.Space); // comptime } - if (var_decl.getThreadLocalToken()) |thread_local_token| { - try renderToken(ais, tree, thread_local_token, Space.Space); // threadlocal - } - try renderToken(ais, tree, var_decl.mut_token, Space.Space); // var + try renderToken(ais, tree, var_decl.ast.mut_token, .Space); // var - const name_space = if (var_decl.getTypeNode() == null and - (var_decl.getAlignNode() != null or - var_decl.getSectionNode() != null or - var_decl.getInitNode() != null)) + const name_space = if (var_decl.ast.type_node == 0 and + (var_decl.ast.align_node != 0 or + var_decl.ast.section_node != 0 or + var_decl.ast.init_node != 0)) Space.Space else Space.None; - try renderToken(ais, tree, var_decl.name_token, name_space); + try renderToken(ais, tree, var_decl.ast.mut_token + 1, name_space); // name - if (var_decl.getTypeNode()) |type_node| { - try renderToken(ais, tree, tree.nextToken(var_decl.name_token), Space.Space); - const s = if (var_decl.getAlignNode() != null or - var_decl.getSectionNode() != null or - var_decl.getInitNode() != null) Space.Space else Space.None; - try renderExpression(ais, tree, type_node, s); + if (var_decl.ast.type_node != 0) { + try renderToken(ais, tree, var_decl.ast.mut_token + 2, Space.Space); // : + if (var_decl.ast.align_node != 0 or var_decl.ast.section_node != 0 or + var_decl.ast.init_node != 0) + { + try renderExpression(ais, tree, var_decl.ast.type_node, .Space); + } else { + try renderExpression(ais, tree, var_decl.ast.type_node, .None); + const semicolon = tree.lastToken(var_decl.ast.type_node) + 1; + return renderToken(ais, tree, semicolon, Space.Newline); // ; + } } - if (var_decl.getAlignNode()) |align_node| { - const lparen = tree.prevToken(align_node.firstToken()); - const align_kw = tree.prevToken(lparen); - const rparen = tree.nextToken(align_node.lastToken()); + if (var_decl.ast.align_node != 0) { + const lparen = tree.firstToken(var_decl.ast.align_node) - 1; + const align_kw = lparen - 1; + const rparen = tree.lastToken(var_decl.ast.align_node) + 1; try renderToken(ais, tree, align_kw, Space.None); // align try renderToken(ais, tree, lparen, Space.None); // ( - try renderExpression(ais, tree, align_node, Space.None); - const s = if (var_decl.getSectionNode() != null or var_decl.getInitNode() != null) Space.Space else Space.None; - try renderToken(ais, tree, rparen, s); // ) + try renderExpression(ais, tree, var_decl.ast.align_node, Space.None); + if (var_decl.ast.section_node != 0 or var_decl.ast.init_node != 0) { + try renderToken(ais, tree, rparen, .Space); // ) + } else { + try renderToken(ais, tree, rparen, .None); // ) + return renderToken(ais, tree, rparen + 1, Space.Newline); // ; + } } - if (var_decl.getSectionNode()) |section_node| { - const lparen = tree.prevToken(section_node.firstToken()); - const section_kw = tree.prevToken(lparen); - const rparen = tree.nextToken(section_node.lastToken()); + if (var_decl.ast.section_node != 0) { + const lparen = tree.firstToken(var_decl.ast.section_node) - 1; + const section_kw = lparen - 1; + const rparen = tree.lastToken(var_decl.ast.section_node) + 1; try renderToken(ais, tree, section_kw, Space.None); // linksection try renderToken(ais, tree, lparen, Space.None); // ( - try renderExpression(ais, tree, section_node, Space.None); - const s = if (var_decl.getInitNode() != null) Space.Space else Space.None; - try renderToken(ais, tree, rparen, s); // ) - } - - if (var_decl.getInitNode()) |init_node| { - const eq_token = var_decl.getEqToken().?; - const eq_space = blk: { - const loc = tree.tokenLocation(tree.token_locs[eq_token].end, tree.nextToken(eq_token)); - break :blk if (loc.line == 0) Space.Space else Space.Newline; - }; - - { - ais.pushIndent(); - defer ais.popIndent(); - try renderToken(ais, tree, eq_token, eq_space); // = + try renderExpression(ais, tree, var_decl.ast.section_node, Space.None); + if (var_decl.ast.init_node != 0) { + try renderToken(ais, tree, rparen, .Space); // ) + } else { + try renderToken(ais, tree, rparen, .None); // ) + return renderToken(ais, tree, rparen + 1, Space.Newline); // ; } - ais.pushIndentOneShot(); - try renderExpression(ais, tree, init_node, Space.None); } - try renderToken(ais, tree, var_decl.semicolon_token, Space.Newline); + assert(var_decl.ast.init_node != 0); + const eq_token = tree.firstToken(var_decl.ast.init_node) - 1; + const eq_space: Space = if (tree.tokensOnSameLine(eq_token, eq_token + 1)) .Space else .Newline; + { + ais.pushIndent(); + try renderToken(ais, tree, eq_token, eq_space); // = + ais.popIndent(); + } + ais.pushIndentOneShot(); + try renderExpression(ais, tree, var_decl.ast.init_node, Space.None); + + const semicolon = tree.lastToken(var_decl.ast.init_node) + 1; + return renderToken(ais, tree, semicolon, Space.Newline); +} + +fn renderIf(ais: *Ais, tree: ast.Tree, if_node: ast.Full.If, space: Space) Error!void { + const node_tags = tree.nodes.items(.tag); + const token_tags = tree.tokens.items(.tag); + + try renderToken(ais, tree, if_node.ast.if_token, .Space); // if + + const lparen = if_node.ast.if_token + 1; + + try renderToken(ais, tree, lparen, .None); // ( + try renderExpression(ais, tree, if_node.ast.cond_expr, .None); // condition + + switch (node_tags[if_node.ast.then_expr]) { + .If, .IfSimple => { + try renderExtraNewline(ais, tree, if_node.ast.then_expr); + }, + .Block, .For, .ForSimple, .While, .WhileSimple, .Switch => { + if (if_node.payload_token) |payload_token| { + try renderToken(ais, tree, payload_token - 2, .Space); // ) + try renderToken(ais, tree, payload_token - 1, .None); // | + if (token_tags[payload_token] == .Asterisk) { + try renderToken(ais, tree, payload_token, .None); // * + try renderToken(ais, tree, payload_token + 1, .None); // identifier + try renderToken(ais, tree, payload_token + 2, .BlockStart); // | + } else { + try renderToken(ais, tree, payload_token, .None); // identifier + try renderToken(ais, tree, payload_token + 1, .BlockStart); // | + } + } else { + const rparen = tree.lastToken(if_node.ast.cond_expr) + 1; + try renderToken(ais, tree, rparen, .BlockStart); // ) + } + if (if_node.ast.else_expr != 0) { + try renderExpression(ais, tree, if_node.ast.then_expr, Space.SpaceOrOutdent); + try renderToken(ais, tree, if_node.else_token, .Space); // else + if (if_node.error_token) |error_token| { + try renderToken(ais, tree, error_token - 1, .None); // | + try renderToken(ais, tree, error_token, .None); // identifier + try renderToken(ais, tree, error_token + 1, .Space); // | + } + return renderExpression(ais, tree, if_node.ast.else_expr, space); + } else { + return renderExpression(ais, tree, if_node.ast.then_expr, space); + } + }, + else => {}, + } + + const rparen = tree.lastToken(if_node.ast.cond_expr) + 1; + const last_then_token = tree.lastToken(if_node.ast.then_expr); + const src_has_newline = !tree.tokensOnSameLine(rparen, last_then_token); + + if (src_has_newline) { + if (if_node.payload_token) |payload_token| { + try renderToken(ais, tree, payload_token - 2, .Space); // ) + try renderToken(ais, tree, payload_token - 1, .None); // | + try renderToken(ais, tree, payload_token, .None); // identifier + try renderToken(ais, tree, payload_token + 1, .Newline); // | + } else { + ais.pushIndent(); + try renderToken(ais, tree, rparen, .Newline); // ) + ais.popIndent(); + } + if (if_node.ast.else_expr != 0) { + ais.pushIndent(); + try renderExpression(ais, tree, if_node.ast.then_expr, Space.Newline); + ais.popIndent(); + const else_is_block = nodeIsBlock(node_tags[if_node.ast.else_expr]); + if (else_is_block) { + try renderToken(ais, tree, if_node.else_token, .Space); // else + if (if_node.error_token) |error_token| { + try renderToken(ais, tree, error_token - 1, .None); // | + try renderToken(ais, tree, error_token, .None); // identifier + try renderToken(ais, tree, error_token + 1, .Space); // | + } + return renderExpression(ais, tree, if_node.ast.else_expr, space); + } else { + if (if_node.error_token) |error_token| { + try renderToken(ais, tree, if_node.else_token, .Space); // else + try renderToken(ais, tree, error_token - 1, .None); // | + try renderToken(ais, tree, error_token, .None); // identifier + try renderToken(ais, tree, error_token + 1, .Space); // | + } else { + try renderToken(ais, tree, if_node.else_token, .Newline); // else + } + ais.pushIndent(); + try renderExpression(ais, tree, if_node.ast.else_expr, space); + ais.popIndent(); + return; + } + } else { + ais.pushIndent(); + try renderExpression(ais, tree, if_node.ast.then_expr, space); + ais.popIndent(); + return; + } + } + + // Single line if statement. + + if (if_node.payload_token) |payload_token| { + assert(payload_token - 2 == rparen); + try renderToken(ais, tree, payload_token - 2, .Space); // ) + try renderToken(ais, tree, payload_token - 1, .None); // | + if (token_tags[payload_token] == .Asterisk) { + try renderToken(ais, tree, payload_token, .None); // * + try renderToken(ais, tree, payload_token + 1, .None); // identifier + try renderToken(ais, tree, payload_token + 2, .Space); // | + } else { + try renderToken(ais, tree, payload_token, .None); // identifier + try renderToken(ais, tree, payload_token + 1, .Space); // | + } + } else { + try renderToken(ais, tree, rparen, .Space); // ) + } + + if (if_node.ast.else_expr != 0) { + try renderExpression(ais, tree, if_node.ast.then_expr, .Space); + try renderToken(ais, tree, if_node.else_token, .Space); // else + + if (if_node.error_token) |error_token| { + try renderToken(ais, tree, error_token - 1, .None); // | + try renderToken(ais, tree, error_token, .None); // identifier + try renderToken(ais, tree, error_token + 1, .Space); // | + } + + return renderExpression(ais, tree, if_node.ast.else_expr, space); + } else { + return renderExpression(ais, tree, if_node.ast.then_expr, space); + } } fn renderParamDecl( @@ -2249,27 +2258,6 @@ fn renderParamDecl( } } -fn renderStatement(ais: *Ais, tree: ast.Tree, base: ast.Node.Index) Error!void { - @panic("TODO render statement"); - //switch (base.tag) { - // .VarDecl => { - // const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", base); - // try renderVarDecl(allocator, ais, tree, var_decl); - // }, - // else => { - // if (base.requireSemiColon()) { - // try renderExpression(ais, tree, base, Space.None); - - // const semicolon_index = tree.nextToken(base.lastToken()); - // assert(tree.token_tags[semicolon_index] == .Semicolon); - // try renderToken(ais, tree, semicolon_index, Space.Newline); - // } else { - // try renderExpression(ais, tree, base, Space.Newline); - // } - // }, - //} -} - const Space = enum { None, Newline, @@ -2367,13 +2355,15 @@ fn renderDocComments(ais: *Ais, tree: ast.Tree, end_token: ast.TokenIndex) Error } } -fn nodeIsBlock(base: *const ast.Node) bool { - return switch (base.tag) { +fn nodeIsBlock(tag: ast.Node.Tag) bool { + return switch (tag) { .Block, - .LabeledBlock, .If, + .IfSimple, .For, + .ForSimple, .While, + .WhileSimple, .Switch, => true, else => false, From f5279cbada9199023049abe02dbf65a894ba317c Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Wed, 3 Feb 2021 17:02:12 -0700 Subject: [PATCH 009/173] zig fmt: implement top-level fields --- lib/std/zig/ast.zig | 243 +++++++++++++++--------- lib/std/zig/parser_test.zig | 18 +- lib/std/zig/render.zig | 362 +++++++++++++++++------------------- 3 files changed, 340 insertions(+), 283 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 7040028cf2..64b3fa7887 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -195,6 +195,7 @@ pub const Tree = struct { const tags = tree.nodes.items(.tag); const datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_tags = tree.tokens.items(.tag); var n = node; while (true) switch (tags[n]) { .Root => return 0, @@ -304,39 +305,49 @@ pub const Tree = struct { .FnDecl, => n = datas[n].lhs, - .GlobalVarDecl, - .LocalVarDecl, - .SimpleVarDecl, - .AlignedVarDecl, - .ArrayType, - .ArrayTypeSentinel, - .PtrTypeAligned, - .PtrTypeSentinel, - .PtrType, - .SliceType, - .StructInit, - .SwitchCaseMulti, - .WhileSimple, - .WhileCont, - .While, - .ForSimple, - .For, - .FnProtoSimple, - .FnProtoSimpleMulti, - .FnProtoOne, - .FnProto, - .ContainerDecl, - .ContainerDeclArg, - .TaggedUnion, - .TaggedUnionEnumTag, .ContainerFieldInit, .ContainerFieldAlign, .ContainerField, - .AsmOutput, - .AsmInput, - .ErrorValue, - .ErrorUnion, - => @panic("TODO finish implementing firstToken"), + => { + const name_token = main_tokens[n]; + if (name_token > 0 and + token_tags[name_token - 1] == .Keyword_comptime) + { + return name_token - 1; + } else { + return name_token; + } + }, + + .GlobalVarDecl => unreachable, + .LocalVarDecl => unreachable, + .SimpleVarDecl => unreachable, + .AlignedVarDecl => unreachable, + .ArrayType => unreachable, + .ArrayTypeSentinel => unreachable, + .PtrTypeAligned => unreachable, + .PtrTypeSentinel => unreachable, + .PtrType => unreachable, + .SliceType => unreachable, + .StructInit => unreachable, + .SwitchCaseMulti => unreachable, + .WhileSimple => unreachable, + .WhileCont => unreachable, + .While => unreachable, + .ForSimple => unreachable, + .For => unreachable, + .FnProtoSimple => unreachable, + .FnProtoSimpleMulti => unreachable, + .FnProtoOne => unreachable, + .FnProto => unreachable, + .ContainerDecl => unreachable, + .ContainerDeclArg => unreachable, + .TaggedUnion => unreachable, + .TaggedUnionEnumTag => unreachable, + .AsmOutput => unreachable, + .AsmInput => unreachable, + .ErrorValue => unreachable, + .ErrorUnion => unreachable, }; } @@ -431,6 +442,7 @@ pub const Tree = struct { .NullLiteral, .UndefinedLiteral, .UnreachableLiteral, + .Identifier, => return main_tokens[n] + end_offset, .Call => { @@ -449,62 +461,61 @@ pub const Tree = struct { n = datas[n].rhs; }, - .ArrayInitDotTwo, - .ArrayInitDot, - .StructInitDotTwo, - .StructInitDot, - .Switch, - .If, - .Continue, - .Identifier, - .EnumLiteral, - .BuiltinCallTwo, - .BuiltinCall, - .ErrorSetDecl, - .Block, - .AsmSimple, - .Asm, - .SliceOpen, - .Slice, - .Deref, - .ArrayAccess, - .ArrayInitOne, - .ArrayInit, - .StructInitOne, - .SwitchCaseOne, - .SwitchRange, - .FnDecl, - .GlobalVarDecl, - .LocalVarDecl, - .SimpleVarDecl, - .AlignedVarDecl, - .ArrayType, - .ArrayTypeSentinel, - .PtrTypeAligned, - .PtrTypeSentinel, - .PtrType, - .SliceType, - .StructInit, - .SwitchCaseMulti, - .WhileCont, - .While, - .ForSimple, - .For, - .FnProtoSimple, - .FnProtoSimpleMulti, - .FnProtoOne, - .FnProto, - .ContainerDecl, - .ContainerDeclArg, - .TaggedUnion, - .TaggedUnionEnumTag, - .ContainerFieldInit, - .ContainerFieldAlign, - .ContainerField, - .AsmOutput, - .AsmInput, - .ErrorValue, - => @panic("TODO finish implementing lastToken"), + .ContainerFieldInit => unreachable, + .ContainerFieldAlign => unreachable, + .ContainerField => unreachable, + + .ArrayInitDotTwo => unreachable, + .ArrayInitDot => unreachable, + .StructInitDotTwo => unreachable, + .StructInitDot => unreachable, + .Switch => unreachable, + .If => unreachable, + .Continue => unreachable, + .EnumLiteral => unreachable, + .BuiltinCallTwo => unreachable, + .BuiltinCall => unreachable, + .ErrorSetDecl => unreachable, + .Block => unreachable, + .AsmSimple => unreachable, + .Asm => unreachable, + .SliceOpen => unreachable, + .Slice => unreachable, + .Deref => unreachable, + .ArrayAccess => unreachable, + .ArrayInitOne => unreachable, + .ArrayInit => unreachable, + .StructInitOne => unreachable, + .SwitchCaseOne => unreachable, + .SwitchRange => unreachable, + .FnDecl => unreachable, + .GlobalVarDecl => unreachable, + .LocalVarDecl => unreachable, + .SimpleVarDecl => unreachable, + .AlignedVarDecl => unreachable, + .ArrayType => unreachable, + .ArrayTypeSentinel => unreachable, + .PtrTypeAligned => unreachable, + .PtrTypeSentinel => unreachable, + .PtrType => unreachable, + .SliceType => unreachable, + .StructInit => unreachable, + .SwitchCaseMulti => unreachable, + .WhileCont => unreachable, + .While => unreachable, + .ForSimple => unreachable, + .For => unreachable, + .FnProtoSimple => unreachable, + .FnProtoSimpleMulti => unreachable, + .FnProtoOne => unreachable, + .FnProto => unreachable, + .ContainerDecl => unreachable, + .ContainerDeclArg => unreachable, + .TaggedUnion => unreachable, + .TaggedUnionEnumTag => unreachable, + .AsmOutput => unreachable, + .AsmInput => unreachable, + .ErrorValue => unreachable, }; } @@ -587,6 +598,40 @@ pub const Tree = struct { }); } + pub fn containerField(tree: Tree, node: Node.Index) Full.ContainerField { + assert(tree.nodes.items(.tag)[node] == .ContainerField); + const data = tree.nodes.items(.data)[node]; + const extra = tree.extraData(data.rhs, Node.ContainerField); + return tree.fullContainerField(.{ + .name_token = tree.nodes.items(.main_token)[node], + .type_expr = data.lhs, + .value_expr = extra.value_expr, + .align_expr = extra.align_expr, + }); + } + + pub fn containerFieldInit(tree: Tree, node: Node.Index) Full.ContainerField { + assert(tree.nodes.items(.tag)[node] == .ContainerFieldInit); + const data = tree.nodes.items(.data)[node]; + return tree.fullContainerField(.{ + .name_token = tree.nodes.items(.main_token)[node], + .type_expr = data.lhs, + .value_expr = data.rhs, + .align_expr = 0, + }); + } + + pub fn containerFieldAlign(tree: Tree, node: Node.Index) Full.ContainerField { + assert(tree.nodes.items(.tag)[node] == .ContainerFieldAlign); + const data = tree.nodes.items(.data)[node]; + return tree.fullContainerField(.{ + .name_token = tree.nodes.items(.main_token)[node], + .type_expr = data.lhs, + .value_expr = 0, + .align_expr = data.rhs, + }); + } + fn fullVarDecl(tree: Tree, info: Full.VarDecl.Ast) Full.VarDecl { const token_tags = tree.tokens.items(.tag); var result: Full.VarDecl = .{ @@ -636,6 +681,20 @@ pub const Tree = struct { } return result; } + + fn fullContainerField(tree: Tree, info: Full.ContainerField.Ast) Full.ContainerField { + const token_tags = tree.tokens.items(.tag); + var result: Full.ContainerField = .{ + .ast = info, + .comptime_token = null, + }; + // comptime name: type = init, + // ^ + if (info.name_token > 0 and token_tags[info.name_token - 1] == .Keyword_comptime) { + result.comptime_token = info.name_token - 1; + } + return result; + } }; /// Fully assembled AST node information. @@ -674,6 +733,18 @@ pub const Full = struct { else_expr: Node.Index, }; }; + + pub const ContainerField = struct { + comptime_token: ?TokenIndex, + ast: Ast, + + pub const Ast = struct { + name_token: TokenIndex, + type_expr: Node.Index, + value_expr: Node.Index, + align_expr: Node.Index, + }; + }; }; pub const Error = union(enum) { diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 7a22b9a6a1..988eb9c233 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -91,15 +91,15 @@ test "zig fmt: if statment" { ); } -//test "zig fmt: top-level fields" { -// try testCanonical( -// \\a: did_you_know, -// \\b: all_files_are, -// \\structs: ?x, -// \\ -// ); -//} -// +test "zig fmt: top-level fields" { + try testCanonical( + \\a: did_you_know, + \\b: all_files_are, + \\structs: ?x, + \\ + ); +} + //test "zig fmt: decl between fields" { // try testError( // \\const S = struct { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index e481fd7f10..7c901da458 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -28,7 +28,7 @@ const Ais = std.io.AutoIndentingStream(Writer); pub fn render(gpa: *mem.Allocator, writer: Writer, tree: ast.Tree) Error!void { assert(tree.errors.len == 0); // cannot render an invalid tree var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, writer); - try renderRoot(&auto_indenting_stream, tree); + return renderRoot(&auto_indenting_stream, tree); } /// Assumes there are no tokens in between start and end. @@ -66,10 +66,9 @@ fn renderRoot(ais: *Ais, tree: ast.Tree) Error!void { // Root is always index 0. const nodes_data = tree.nodes.items(.data); const root_decls = tree.extra_data[nodes_data[0].lhs..nodes_data[0].rhs]; - if (root_decls.len == 0) return; for (root_decls) |decl| { - return renderContainerDecl(ais, tree, decl, .Newline); + try renderContainerDecl(ais, tree, decl, .Newline); } } @@ -102,17 +101,11 @@ fn renderContainerDecl(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: S const datas = tree.nodes.items(.data); try renderDocComments(ais, tree, tree.firstToken(decl)); switch (tree.nodes.items(.tag)[decl]) { - .UsingNamespace, - .FnProtoSimple, - .FnProtoSimpleMulti, - .FnProtoOne, - .FnProto, - .FnDecl, - .ContainerFieldInit, - .ContainerFieldAlign, - .ContainerField, - => @panic("TODO implement renderContainerDecl"), - + .FnProtoSimple => unreachable, // TODO + .FnProtoSimpleMulti => unreachable, // TODO + .FnProtoOne => unreachable, // TODO + .FnDecl => unreachable, // TODO + .FnProto => unreachable, // TODO // .FnProto => { // const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl); @@ -127,6 +120,7 @@ fn renderContainerDecl(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: S // } // }, + .UsingNamespace => unreachable, // TODO // .Use => { // const use_decl = @fieldParentPtr(ast.Node.Use, "base", decl); @@ -152,88 +146,10 @@ fn renderContainerDecl(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: S try renderExpression(ais, tree, datas[decl].rhs, space); }, - // .ContainerField => { - // const field = @fieldParentPtr(ast.Node.ContainerField, "base", decl); - - // try renderDocComments(ais, tree, field, field.doc_comments); - // if (field.comptime_token) |t| { - // try renderToken(ais, tree, t, .Space); // comptime - // } - - // const src_has_trailing_comma = blk: { - // const maybe_comma = tree.nextToken(field.lastToken()); - // break :blk tree.token_tags[maybe_comma] == .Comma; - // }; - - // // The trailing comma is emitted at the end, but if it's not present - // // we still have to respect the specified `space` parameter - // const last_token_space: Space = if (src_has_trailing_comma) .None else space; - - // if (field.type_expr == null and field.value_expr == null) { - // try renderToken(ais, tree, field.name_token, last_token_space); // name - // } else if (field.type_expr != null and field.value_expr == null) { - // try renderToken(ais, tree, field.name_token, .None); // name - // try renderToken(ais, tree, tree.nextToken(field.name_token), .Space); // : - - // if (field.align_expr) |align_value_expr| { - // try renderExpression(ais, tree, field.type_expr.?, .Space); // type - // const lparen_token = tree.prevToken(align_value_expr.firstToken()); - // const align_kw = tree.prevToken(lparen_token); - // const rparen_token = tree.nextToken(align_value_expr.lastToken()); - // try renderToken(ais, tree, align_kw, .None); // align - // try renderToken(ais, tree, lparen_token, .None); // ( - // try renderExpression(ais, tree, align_value_expr, .None); // alignment - // try renderToken(ais, tree, rparen_token, last_token_space); // ) - // } else { - // try renderExpression(ais, tree, field.type_expr.?, last_token_space); // type - // } - // } else if (field.type_expr == null and field.value_expr != null) { - // try renderToken(ais, tree, field.name_token, .Space); // name - // try renderToken(ais, tree, tree.nextToken(field.name_token), .Space); // = - // try renderExpression(ais, tree, field.value_expr.?, last_token_space); // value - // } else { - // try renderToken(ais, tree, field.name_token, .None); // name - // try renderToken(ais, tree, tree.nextToken(field.name_token), .Space); // : - - // if (field.align_expr) |align_value_expr| { - // try renderExpression(ais, tree, field.type_expr.?, .Space); // type - // const lparen_token = tree.prevToken(align_value_expr.firstToken()); - // const align_kw = tree.prevToken(lparen_token); - // const rparen_token = tree.nextToken(align_value_expr.lastToken()); - // try renderToken(ais, tree, align_kw, .None); // align - // try renderToken(ais, tree, lparen_token, .None); // ( - // try renderExpression(ais, tree, align_value_expr, .None); // alignment - // try renderToken(ais, tree, rparen_token, .Space); // ) - // } else { - // try renderExpression(ais, tree, field.type_expr.?, .Space); // type - // } - // try renderToken(ais, tree, tree.prevToken(field.value_expr.?.firstToken()), .Space); // = - // try renderExpression(ais, tree, field.value_expr.?, last_token_space); // value - // } - - // if (src_has_trailing_comma) { - // const comma = tree.nextToken(field.lastToken()); - // try renderToken(ais, tree, comma, space); - // } - // }, + .ContainerFieldInit => return renderContainerField(ais, tree, tree.containerFieldInit(decl), space), + .ContainerFieldAlign => return renderContainerField(ais, tree, tree.containerFieldAlign(decl), space), + .ContainerField => return renderContainerField(ais, tree, tree.containerField(decl), space), .Comptime => return renderExpression(ais, tree, decl, space), - - // .DocComment => { - // const comment = @fieldParentPtr(ast.Node.DocComment, "base", decl); - // const kind = tree.token_tags[comment.first_line]; - // try renderToken(ais, tree, comment.first_line, .Newline); - // var tok_i = comment.first_line + 1; - // while (true) : (tok_i += 1) { - // const tok_id = tree.token_tags[tok_i]; - // if (tok_id == kind) { - // try renderToken(ais, tree, tok_i, .Newline); - // } else if (tok_id == .LineComment) { - // continue; - // } else { - // break; - // } - // } - // }, else => unreachable, } } @@ -254,8 +170,12 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .NullLiteral, .UnreachableLiteral, .UndefinedLiteral, + .AnyFrameLiteral, => return renderToken(ais, tree, main_tokens[node], space), + .ErrorValue => unreachable, // TODO + + .AnyType => unreachable, // TODO //.AnyType => { // const any_type = base.castTag(.AnyType).?; // if (mem.eql(u8, tree.tokenSlice(any_type.token), "var")) { @@ -322,6 +242,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac } }, + .Defer => unreachable, // TODO + .ErrDefer => unreachable, // TODO //.Defer => { // const defer_node = @fieldParentPtr(ast.Node.Defer, "base", base); @@ -337,6 +259,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac try renderToken(ais, tree, comptime_token, .Space); return renderExpression(ais, tree, block, space); }, + .Nosuspend => unreachable, // TODO //.Nosuspend => { // const nosuspend_node = @fieldParentPtr(ast.Node.Nosuspend, "base", base); // if (mem.eql(u8, tree.tokenSlice(nosuspend_node.nosuspend_token), "noasync")) { @@ -348,6 +271,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderExpression(ais, tree, nosuspend_node.expr, space); //}, + .Suspend => unreachable, // TODO //.Suspend => { // const suspend_node = @fieldParentPtr(ast.Node.Suspend, "base", base); @@ -359,6 +283,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } //}, + .Catch => unreachable, // TODO //.Catch => { // const infix_op_node = @fieldParentPtr(ast.Node.Catch, "base", base); @@ -452,27 +377,26 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderExpression(ais, tree, infix.rhs, space); }, - //.BitNot, - //.BoolNot, - //.Negation, - //.NegationWrap, - //.OptionalType, - //.AddressOf, - //=> { - // const casted_node = @fieldParentPtr(ast.Node.SimplePrefixOp, "base", base); - // try renderToken(ais, tree, casted_node.op_token, Space.None); - // return renderExpression(ais, tree, casted_node.rhs, space); - //}, + .BitNot, + .BoolNot, + .Negation, + .NegationWrap, + .OptionalType, + .AddressOf, + => { + try renderToken(ais, tree, main_tokens[node], .None); + return renderExpression(ais, tree, datas[node].lhs, space); + }, - //.Try, - //.Resume, - //.Await, - //=> { - // const casted_node = @fieldParentPtr(ast.Node.SimplePrefixOp, "base", base); - // try renderToken(ais, tree, casted_node.op_token, Space.Space); - // return renderExpression(ais, tree, casted_node.rhs, space); - //}, + .Try, + .Resume, + .Await, + => { + try renderToken(ais, tree, main_tokens[node], .Space); + return renderExpression(ais, tree, datas[node].lhs, space); + }, + .ArrayType => unreachable, // TODO //.ArrayType => { // const array_type = @fieldParentPtr(ast.Node.ArrayType, "base", base); // return renderArrayType( @@ -486,6 +410,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // space, // ); //}, + .ArrayTypeSentinel => unreachable, // TODO //.ArrayTypeSentinel => { // const array_type = @fieldParentPtr(ast.Node.ArrayTypeSentinel, "base", base); // return renderArrayType( @@ -500,6 +425,9 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // ); //}, + .PtrType => unreachable, // TODO + .PtrTypeAligned => unreachable, // TODO + .PtrTypeSentinel => unreachable, // TODO //.PtrType => { // const ptr_type = @fieldParentPtr(ast.Node.PtrType, "base", base); // const op_tok_id = tree.token_tags[ptr_type.op_token]; @@ -562,6 +490,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderExpression(ais, tree, ptr_type.rhs, space); //}, + .SliceType => unreachable, // TODO //.SliceType => { // const slice_type = @fieldParentPtr(ast.Node.SliceType, "base", base); // try renderToken(ais, tree, slice_type.op_token, Space.None); // [ @@ -611,6 +540,10 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderExpression(ais, tree, slice_type.rhs, space); //}, + .ArrayInitOne => unreachable, // TODO + .ArrayInitDotTwo => unreachable, // TODO + .ArrayInitDot => unreachable, // TODO + .ArrayInit => unreachable, // TODO //.ArrayInitializer, .ArrayInitializerDot => { // var rtoken: ast.TokenIndex = undefined; // var exprs: []ast.Node.Index = undefined; @@ -823,6 +756,10 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderToken(ais, tree, rtoken, space); //}, + .StructInitOne => unreachable, // TODO + .StructInitDotTwo => unreachable, // TODO + .StructInitDot => unreachable, // TODO + .StructInit => unreachable, // TODO //.StructInitializer, .StructInitializerDot => { // var rtoken: ast.TokenIndex = undefined; // var field_inits: []ast.Node.Index = undefined; @@ -959,6 +896,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderToken(ais, tree, rtoken, space); //}, + .CallOne => unreachable, // TODO .Call => { const call = datas[node]; const params_range = tree.extraData(call.rhs, ast.Node.SubRange); @@ -1017,6 +955,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderToken(ais, tree, after_last_param_tok, space); // ) }, + .ArrayAccess => unreachable, // TODO //.ArrayAccess => { // const suffix_op = base.castTag(.ArrayAccess).?; @@ -1039,6 +978,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderToken(ais, tree, rbracket, space); // ] //}, + .Slice => unreachable, // TODO + .SliceOpen => unreachable, // TODO //.Slice => { // const suffix_op = base.castTag(.Slice).?; // try renderExpression(ais, tree, suffix_op.lhs, Space.None); @@ -1066,12 +1007,14 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderToken(ais, tree, suffix_op.rtoken, space); // ] //}, + .Deref => unreachable, // TODO //.Deref => { // const suffix_op = base.castTag(.Deref).?; // try renderExpression(ais, tree, suffix_op.lhs, Space.None); // return renderToken(ais, tree, suffix_op.rtoken, space); // .* //}, + .UnwrapOptional => unreachable, // TODO //.UnwrapOptional => { // const suffix_op = base.castTag(.UnwrapOptional).?; @@ -1080,6 +1023,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderToken(ais, tree, suffix_op.rtoken, space); // ? //}, + .Break => unreachable, // TODO //.Break => { // const flow_expr = base.castTag(.Break).?; // const maybe_rhs = flow_expr.getRHS(); @@ -1102,6 +1046,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderExpression(ais, tree, maybe_rhs.?, space); //}, + .Continue => unreachable, // TODO //.Continue => { // const flow_expr = base.castTag(.Continue).?; // if (flow_expr.getLabel()) |label| { @@ -1114,6 +1059,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } //}, + .Return => unreachable, // TODO //.Return => { // const flow_expr = base.castTag(.Return).?; // if (flow_expr.getRHS()) |rhs| { @@ -1124,36 +1070,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } //}, - //.PointerPayload => { - // const payload = @fieldParentPtr(ast.Node.PointerPayload, "base", base); - - // try renderToken(ais, tree, payload.lpipe, Space.None); - // if (payload.ptr_token) |ptr_token| { - // try renderToken(ais, tree, ptr_token, Space.None); - // } - // try renderExpression(ais, tree, payload.value_symbol, Space.None); - // return renderToken(ais, tree, payload.rpipe, space); - //}, - - //.PointerIndexPayload => { - // const payload = @fieldParentPtr(ast.Node.PointerIndexPayload, "base", base); - - // try renderToken(ais, tree, payload.lpipe, Space.None); - // if (payload.ptr_token) |ptr_token| { - // try renderToken(ais, tree, ptr_token, Space.None); - // } - // try renderExpression(ais, tree, payload.value_symbol, Space.None); - - // if (payload.index_symbol) |index_symbol| { - // const comma = tree.nextToken(payload.value_symbol.lastToken()); - - // try renderToken(ais, tree, comma, Space.Space); - // try renderExpression(ais, tree, index_symbol, Space.None); - // } - - // return renderToken(ais, tree, payload.rpipe, space); - //}, - + .GroupedExpression => unreachable, // TODO //.GroupedExpression => { // const grouped_expr = @fieldParentPtr(ast.Node.GroupedExpression, "base", base); @@ -1165,15 +1082,10 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderToken(ais, tree, grouped_expr.rparen, space); //}, - //.FieldInitializer => { - // const field_init = @fieldParentPtr(ast.Node.FieldInitializer, "base", base); - - // try renderToken(ais, tree, field_init.period_token, Space.None); // . - // try renderToken(ais, tree, field_init.name_token, Space.Space); // name - // try renderToken(ais, tree, tree.nextToken(field_init.name_token), Space.Space); // = - // return renderExpression(ais, tree, field_init.expr, space); - //}, - + .ContainerDecl => unreachable, // TODO + .ContainerDeclArg => unreachable, // TODO + .TaggedUnion => unreachable, // TODO + .TaggedUnionEnumTag => unreachable, // TODO //.ContainerDecl => { // const container_decl = @fieldParentPtr(ast.Node.ContainerDecl, "base", base); @@ -1289,6 +1201,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderToken(ais, tree, container_decl.rbrace_token, space); // } //}, + .ErrorSetDecl => unreachable, // TODO //.ErrorSetDecl => { // const err_set_decl = @fieldParentPtr(ast.Node.ErrorSetDecl, "base", base); @@ -1365,28 +1278,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } //}, - //.ErrorTag => { - // const tag = @fieldParentPtr(ast.Node.ErrorTag, "base", base); - - // try renderDocComments(ais, tree, tag, tag.doc_comments); - // return renderToken(ais, tree, tag.name_token, space); // name - //}, - - //.MultilineStringLiteral => { - // const multiline_str_literal = @fieldParentPtr(ast.Node.MultilineStringLiteral, "base", base); - - // { - // const locked_indents = ais.lockOneShotIndent(); - // defer { - // var i: u8 = 0; - // while (i < locked_indents) : (i += 1) ais.popIndent(); - // } - // try ais.maybeInsertNewline(); - - // for (multiline_str_literal.lines()) |t| try renderToken(ais, tree, t, Space.None); - // } - //}, - + .BuiltinCall => unreachable, // TODO + .BuiltinCallTwo => unreachable, // TODO //.BuiltinCall => { // const builtin_call = @fieldParentPtr(ast.Node.BuiltinCall, "base", base); @@ -1448,6 +1341,10 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderToken(ais, tree, builtin_call.rparen_token, space); // ) //}, + .FnProtoSimple => unreachable, // TODO + .FnProtoSimpleMulti => unreachable, // TODO + .FnProtoOne => unreachable, // TODO + .FnProto => unreachable, // TODO //.FnProto => { // const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", base); @@ -1579,6 +1476,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } //}, + .AnyFrameType => unreachable, // TODO //.AnyFrameType => { // const anyframe_type = @fieldParentPtr(ast.Node.AnyFrameType, "base", base); @@ -1591,8 +1489,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } //}, - //.DocComment => unreachable, // doc comments are attached to nodes - + .Switch => unreachable, // TODO //.Switch => { // const switch_node = @fieldParentPtr(ast.Node.Switch, "base", base); @@ -1630,6 +1527,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderToken(ais, tree, switch_node.rbrace, space); // } //}, + .SwitchCaseOne => unreachable, // TODO + .SwitchCaseMulti => unreachable, // TODO //.SwitchCase => { // const switch_case = @fieldParentPtr(ast.Node.SwitchCase, "base", base); @@ -1676,11 +1575,10 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderExpression(ais, tree, switch_case.expr, space); //}, - //.SwitchElse => { - // const switch_else = @fieldParentPtr(ast.Node.SwitchElse, "base", base); - // return renderToken(ais, tree, switch_else.token, space); - //}, + .WhileSimple => unreachable, // TODO + .WhileCont => unreachable, // TODO + .While => unreachable, // TODO //.While => { // const while_node = @fieldParentPtr(ast.Node.While, "base", base); @@ -1749,6 +1647,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } //}, + .ForSimple => unreachable, // TODO + .For => unreachable, // TODO //.For => { // const for_node = @fieldParentPtr(ast.Node.For, "base", base); @@ -1802,6 +1702,11 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .IfSimple => return renderIf(ais, tree, tree.ifSimple(node), space), .If => return renderIf(ais, tree, tree.ifFull(node), space), + + .Asm => unreachable, // TODO + .AsmSimple => unreachable, // TODO + .AsmOutput => unreachable, // TODO + .AsmInput => unreachable, // TODO //.Asm => { // const asm_node = @fieldParentPtr(ast.Node.Asm, "base", base); @@ -1911,6 +1816,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderToken(ais, tree, asm_node.rparen, space); //}, + .EnumLiteral => unreachable, // TODO //.EnumLiteral => { // const enum_literal = @fieldParentPtr(ast.Node.EnumLiteral, "base", base); @@ -1918,13 +1824,17 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderToken(ais, tree, enum_literal.name, space); // name //}, - //.ContainerField, - //.Root, - //.VarDecl, - //.Use, - //.TestDecl, - //=> unreachable, - else => @panic("TODO implement more renderExpression"), + .FnDecl => unreachable, + .ContainerField => unreachable, + .ContainerFieldInit => unreachable, + .ContainerFieldAlign => unreachable, + .Root => unreachable, + .GlobalVarDecl => unreachable, + .LocalVarDecl => unreachable, + .SimpleVarDecl => unreachable, + .AlignedVarDecl => unreachable, + .UsingNamespace => unreachable, + .TestDecl => unreachable, } } @@ -2234,6 +2144,59 @@ fn renderIf(ais: *Ais, tree: ast.Tree, if_node: ast.Full.If, space: Space) Error } } +fn renderContainerField( + ais: *Ais, + tree: ast.Tree, + field: ast.Full.ContainerField, + space: Space, +) Error!void { + const main_tokens = tree.nodes.items(.main_token); + if (field.comptime_token) |t| { + try renderToken(ais, tree, t, .Space); // comptime + } + if (field.ast.type_expr == 0 and field.ast.value_expr == 0) { + return renderTokenComma(ais, tree, field.ast.name_token, space); // name + } + if (field.ast.type_expr != 0 and field.ast.value_expr == 0) { + try renderToken(ais, tree, field.ast.name_token, .None); // name + try renderToken(ais, tree, field.ast.name_token + 1, .Space); // : + + if (field.ast.align_expr != 0) { + try renderExpression(ais, tree, field.ast.type_expr, .Space); // type + const align_token = tree.firstToken(field.ast.align_expr) - 2; + try renderToken(ais, tree, align_token, .None); // align + try renderToken(ais, tree, align_token + 1, .None); // ( + try renderExpression(ais, tree, field.ast.align_expr, .None); // alignment + const rparen = tree.lastToken(field.ast.align_expr) + 1; + return renderTokenComma(ais, tree, rparen, space); // ) + } else { + return renderExpressionComma(ais, tree, field.ast.type_expr, space); // type + } + } + if (field.ast.type_expr == 0 and field.ast.value_expr != 0) { + try renderToken(ais, tree, field.ast.name_token, .Space); // name + try renderToken(ais, tree, field.ast.name_token + 1, .Space); // = + return renderExpressionComma(ais, tree, field.ast.value_expr, space); // value + } + + try renderToken(ais, tree, field.ast.name_token, .None); // name + try renderToken(ais, tree, field.ast.name_token + 1, .Space); // : + try renderExpression(ais, tree, field.ast.type_expr, .Space); // type + + if (field.ast.align_expr != 0) { + const lparen_token = tree.firstToken(field.ast.align_expr) - 1; + const align_kw = lparen_token - 1; + const rparen_token = tree.lastToken(field.ast.align_expr) + 1; + try renderToken(ais, tree, align_kw, .None); // align + try renderToken(ais, tree, lparen_token, .None); // ( + try renderExpression(ais, tree, field.ast.align_expr, .None); // alignment + try renderToken(ais, tree, rparen_token, .Space); // ) + } + const eq_token = tree.firstToken(field.ast.value_expr) - 1; + try renderToken(ais, tree, eq_token, .Space); // = + return renderExpressionComma(ais, tree, field.ast.value_expr, space); // value +} + fn renderParamDecl( allocator: *mem.Allocator, ais: *Ais, @@ -2258,6 +2221,29 @@ fn renderParamDecl( } } +/// Render an expression, and the comma that follows it, if it is present in the source. +fn renderExpressionComma(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void { + const token_tags = tree.tokens.items(.tag); + const maybe_comma = tree.lastToken(node) + 1; + if (token_tags[maybe_comma] == .Comma) { + try renderExpression(ais, tree, node, .None); + return renderToken(ais, tree, maybe_comma, space); + } else { + return renderExpression(ais, tree, node, space); + } +} + +fn renderTokenComma(ais: *Ais, tree: ast.Tree, token: ast.TokenIndex, space: Space) Error!void { + const token_tags = tree.tokens.items(.tag); + const maybe_comma = token + 1; + if (token_tags[maybe_comma] == .Comma) { + try renderToken(ais, tree, token, .None); + return renderToken(ais, tree, maybe_comma, space); + } else { + return renderToken(ais, tree, token, space); + } +} + const Space = enum { None, Newline, From 725adf833289e0a1b0826c6a774cea5283cec744 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Wed, 3 Feb 2021 22:12:11 -0700 Subject: [PATCH 010/173] zig fmt: builtin calls and array access --- lib/std/zig/ast.zig | 199 +++++++++++++++++++++--------------- lib/std/zig/parse.zig | 105 ++++++++++++++++++- lib/std/zig/parser_test.zig | 30 +++--- lib/std/zig/render.zig | 170 ++++++++++++++---------------- 4 files changed, 308 insertions(+), 196 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 64b3fa7887..3e3416e9b5 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -319,35 +319,54 @@ pub const Tree = struct { } }, - .GlobalVarDecl => unreachable, - .LocalVarDecl => unreachable, - .SimpleVarDecl => unreachable, - .AlignedVarDecl => unreachable, - .ArrayType => unreachable, - .ArrayTypeSentinel => unreachable, - .PtrTypeAligned => unreachable, - .PtrTypeSentinel => unreachable, - .PtrType => unreachable, - .SliceType => unreachable, - .StructInit => unreachable, - .SwitchCaseMulti => unreachable, - .WhileSimple => unreachable, - .WhileCont => unreachable, - .While => unreachable, - .ForSimple => unreachable, - .For => unreachable, - .FnProtoSimple => unreachable, - .FnProtoSimpleMulti => unreachable, - .FnProtoOne => unreachable, - .FnProto => unreachable, - .ContainerDecl => unreachable, - .ContainerDeclArg => unreachable, - .TaggedUnion => unreachable, - .TaggedUnionEnumTag => unreachable, - .AsmOutput => unreachable, - .AsmInput => unreachable, - .ErrorValue => unreachable, - .ErrorUnion => unreachable, + .GlobalVarDecl, + .LocalVarDecl, + .SimpleVarDecl, + .AlignedVarDecl, + => { + var i = main_tokens[n]; // mut token + while (i > 0) { + i -= 1; + switch (token_tags[i]) { + .Keyword_extern, + .Keyword_export, + .Keyword_comptime, + .Keyword_pub, + .Keyword_threadlocal, + .StringLiteral, + => continue, + + else => return i + 1, + } + } + return i; + }, + + .ArrayType => unreachable, // TODO + .ArrayTypeSentinel => unreachable, // TODO + .PtrTypeAligned => unreachable, // TODO + .PtrTypeSentinel => unreachable, // TODO + .PtrType => unreachable, // TODO + .SliceType => unreachable, // TODO + .StructInit => unreachable, // TODO + .SwitchCaseMulti => unreachable, // TODO + .WhileSimple => unreachable, // TODO + .WhileCont => unreachable, // TODO + .While => unreachable, // TODO + .ForSimple => unreachable, // TODO + .For => unreachable, // TODO + .FnProtoSimple => unreachable, // TODO + .FnProtoSimpleMulti => unreachable, // TODO + .FnProtoOne => unreachable, // TODO + .FnProto => unreachable, // TODO + .ContainerDecl => unreachable, // TODO + .ContainerDeclArg => unreachable, // TODO + .TaggedUnion => unreachable, // TODO + .TaggedUnionEnumTag => unreachable, // TODO + .AsmOutput => unreachable, // TODO + .AsmInput => unreachable, // TODO + .ErrorValue => unreachable, // TODO + .ErrorUnion => unreachable, // TODO }; } @@ -445,7 +464,9 @@ pub const Tree = struct { .Identifier, => return main_tokens[n] + end_offset, - .Call => { + .Call, + .BuiltinCall, + => { end_offset += 1; // for the `)` const params = tree.extraData(datas[n].rhs, Node.SubRange); if (params.end - params.start == 0) { @@ -453,69 +474,81 @@ pub const Tree = struct { } n = tree.extra_data[params.end - 1]; // last parameter }, - .CallOne => { - end_offset += 1; // for the `)` + .CallOne, + .ArrayAccess, + => { + end_offset += 1; // for the rparen/rbracket if (datas[n].rhs == 0) { return main_tokens[n] + end_offset; } n = datas[n].rhs; }, + .BuiltinCallTwo => { + end_offset += 1; // for the rparen + if (datas[n].rhs == 0) { + if (datas[n].lhs == 0) { + return main_tokens[n] + end_offset; + } else { + n = datas[n].lhs; + } + } else { + n = datas[n].rhs; + } + }, + .ContainerFieldInit => unreachable, .ContainerFieldAlign => unreachable, .ContainerField => unreachable, - .ArrayInitDotTwo => unreachable, - .ArrayInitDot => unreachable, - .StructInitDotTwo => unreachable, - .StructInitDot => unreachable, - .Switch => unreachable, - .If => unreachable, - .Continue => unreachable, - .EnumLiteral => unreachable, - .BuiltinCallTwo => unreachable, - .BuiltinCall => unreachable, - .ErrorSetDecl => unreachable, - .Block => unreachable, - .AsmSimple => unreachable, - .Asm => unreachable, - .SliceOpen => unreachable, - .Slice => unreachable, - .Deref => unreachable, - .ArrayAccess => unreachable, - .ArrayInitOne => unreachable, - .ArrayInit => unreachable, - .StructInitOne => unreachable, - .SwitchCaseOne => unreachable, - .SwitchRange => unreachable, - .FnDecl => unreachable, - .GlobalVarDecl => unreachable, - .LocalVarDecl => unreachable, - .SimpleVarDecl => unreachable, - .AlignedVarDecl => unreachable, - .ArrayType => unreachable, - .ArrayTypeSentinel => unreachable, - .PtrTypeAligned => unreachable, - .PtrTypeSentinel => unreachable, - .PtrType => unreachable, - .SliceType => unreachable, - .StructInit => unreachable, - .SwitchCaseMulti => unreachable, - .WhileCont => unreachable, - .While => unreachable, - .ForSimple => unreachable, - .For => unreachable, - .FnProtoSimple => unreachable, - .FnProtoSimpleMulti => unreachable, - .FnProtoOne => unreachable, - .FnProto => unreachable, - .ContainerDecl => unreachable, - .ContainerDeclArg => unreachable, - .TaggedUnion => unreachable, - .TaggedUnionEnumTag => unreachable, - .AsmOutput => unreachable, - .AsmInput => unreachable, - .ErrorValue => unreachable, + .ArrayInitDotTwo => unreachable, // TODO + .ArrayInitDot => unreachable, // TODO + .StructInitDotTwo => unreachable, // TODO + .StructInitDot => unreachable, // TODO + .Switch => unreachable, // TODO + .If => unreachable, // TODO + .Continue => unreachable, // TODO + .EnumLiteral => unreachable, // TODO + .ErrorSetDecl => unreachable, // TODO + .Block => unreachable, // TODO + .AsmSimple => unreachable, // TODO + .Asm => unreachable, // TODO + .SliceOpen => unreachable, // TODO + .Slice => unreachable, // TODO + .Deref => unreachable, // TODO + .ArrayInitOne => unreachable, // TODO + .ArrayInit => unreachable, // TODO + .StructInitOne => unreachable, // TODO + .SwitchCaseOne => unreachable, // TODO + .SwitchRange => unreachable, // TODO + .FnDecl => unreachable, // TODO + .GlobalVarDecl => unreachable, // TODO + .LocalVarDecl => unreachable, // TODO + .SimpleVarDecl => unreachable, // TODO + .AlignedVarDecl => unreachable, // TODO + .ArrayType => unreachable, // TODO + .ArrayTypeSentinel => unreachable, // TODO + .PtrTypeAligned => unreachable, // TODO + .PtrTypeSentinel => unreachable, // TODO + .PtrType => unreachable, // TODO + .SliceType => unreachable, // TODO + .StructInit => unreachable, // TODO + .SwitchCaseMulti => unreachable, // TODO + .WhileCont => unreachable, // TODO + .While => unreachable, // TODO + .ForSimple => unreachable, // TODO + .For => unreachable, // TODO + .FnProtoSimple => unreachable, // TODO + .FnProtoSimpleMulti => unreachable, // TODO + .FnProtoOne => unreachable, // TODO + .FnProto => unreachable, // TODO + .ContainerDecl => unreachable, // TODO + .ContainerDeclArg => unreachable, // TODO + .TaggedUnion => unreachable, // TODO + .TaggedUnionEnumTag => unreachable, // TODO + .AsmOutput => unreachable, // TODO + .AsmInput => unreachable, // TODO + .ErrorValue => unreachable, // TODO }; } diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index e5cac445c1..02f4dc49a3 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -3483,9 +3483,8 @@ const Parser = struct { /// ExprList <- (Expr COMMA)* Expr? /// TODO detect when we can emit BuiltinCallTwo instead of BuiltinCall. fn parseBuiltinCall(p: *Parser) !Node.Index { - const builtin_token = p.eatToken(.Builtin) orelse return null_node; - - const lparen = (try p.expectTokenRecoverable(.LParen)) orelse { + const builtin_token = p.assertToken(.Builtin); + _ = (try p.expectTokenRecoverable(.LParen)) orelse { try p.warn(.{ .ExpectedParamList = .{ .token = p.tok_i }, }); @@ -3499,8 +3498,104 @@ const Parser = struct { }, }); }; - const params = try ListParseFn(parseExpr)(p); - _ = try p.expectToken(.RParen); + if (p.eatToken(.RParen)) |_| { + return p.addNode(.{ + .tag = .BuiltinCallTwo, + .main_token = builtin_token, + .data = .{ + .lhs = 0, + .rhs = 0, + }, + }); + } + const param_one = try p.expectExpr(); + switch (p.token_tags[p.nextToken()]) { + .Comma => { + if (p.eatToken(.RParen)) |_| { + return p.addNode(.{ + .tag = .BuiltinCallTwo, + .main_token = builtin_token, + .data = .{ + .lhs = param_one, + .rhs = 0, + }, + }); + } + }, + .RParen => return p.addNode(.{ + .tag = .BuiltinCallTwo, + .main_token = builtin_token, + .data = .{ + .lhs = param_one, + .rhs = 0, + }, + }), + else => { + // This is likely just a missing comma; + // give an error but continue parsing this list. + p.tok_i -= 1; + try p.warn(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + }); + }, + } + const param_two = try p.expectExpr(); + switch (p.token_tags[p.nextToken()]) { + .Comma => { + if (p.eatToken(.RParen)) |_| { + return p.addNode(.{ + .tag = .BuiltinCallTwo, + .main_token = builtin_token, + .data = .{ + .lhs = param_one, + .rhs = param_two, + }, + }); + } + }, + .RParen => return p.addNode(.{ + .tag = .BuiltinCallTwo, + .main_token = builtin_token, + .data = .{ + .lhs = param_one, + .rhs = param_two, + }, + }), + else => { + // This is likely just a missing comma; + // give an error but continue parsing this list. + p.tok_i -= 1; + try p.warn(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + }); + }, + } + + var list = std.ArrayList(Node.Index).init(p.gpa); + defer list.deinit(); + + try list.appendSlice(&[_]Node.Index{ param_one, param_two }); + + while (true) { + const param = try p.expectExpr(); + try list.append(param); + switch (p.token_tags[p.nextToken()]) { + .Comma => { + if (p.eatToken(.RParen)) |_| break; + continue; + }, + .RParen => break, + else => { + // This is likely just a missing comma; + // give an error but continue parsing this list. + p.tok_i -= 1; + try p.warn(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + }); + }, + } + } + const params = try p.listToSpan(list.items); return p.addNode(.{ .tag = .BuiltinCall, .main_token = builtin_token, diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 988eb9c233..877e1f42a5 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -21,21 +21,21 @@ test "zig fmt: two spaced line comments before decl" { ); } -//test "zig fmt: respect line breaks after var declarations" { -// try testCanonical( -// \\const crc = -// \\ lookup_tables[0][p[7]] ^ -// \\ lookup_tables[1][p[6]] ^ -// \\ lookup_tables[2][p[5]] ^ -// \\ lookup_tables[3][p[4]] ^ -// \\ lookup_tables[4][@truncate(u8, self.crc >> 24)] ^ -// \\ lookup_tables[5][@truncate(u8, self.crc >> 16)] ^ -// \\ lookup_tables[6][@truncate(u8, self.crc >> 8)] ^ -// \\ lookup_tables[7][@truncate(u8, self.crc >> 0)]; -// \\ -// ); -//} -// +test "zig fmt: respect line breaks after var declarations" { + try testCanonical( + \\const crc = + \\ lookup_tables[0][p[7]] ^ + \\ lookup_tables[1][p[6]] ^ + \\ lookup_tables[2][p[5]] ^ + \\ lookup_tables[3][p[4]] ^ + \\ lookup_tables[4][@truncate(u8, self.crc >> 24)] ^ + \\ lookup_tables[5][@truncate(u8, self.crc >> 16)] ^ + \\ lookup_tables[6][@truncate(u8, self.crc >> 8)] ^ + \\ lookup_tables[7][@truncate(u8, self.crc >> 0)]; + \\ + ); +} + //test "zig fmt: multiline string mixed with comments" { // try testCanonical( // \\const s1 = diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 7c901da458..f55b31a53a 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -308,7 +308,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac const field_access = datas[node]; try renderExpression(ais, tree, field_access.lhs, .None); try renderToken(ais, tree, main_tokens[node], .None); - return renderToken(ais, tree, field_access.rhs, .None); + return renderToken(ais, tree, field_access.rhs, space); }, .ErrorUnion, @@ -362,18 +362,15 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac => { const infix = datas[node]; try renderExpression(ais, tree, infix.lhs, .Space); - const op_token = main_tokens[node]; - const after_op_space: Space = if (tree.tokensOnSameLine(op_token, op_token + 1)) - .Space - else - .Newline; - { + if (tree.tokensOnSameLine(op_token, op_token + 1)) { + try renderToken(ais, tree, op_token, .Space); + } else { ais.pushIndent(); - try renderToken(ais, tree, op_token, after_op_space); + try renderToken(ais, tree, op_token, .Newline); ais.popIndent(); + ais.pushIndentOneShot(); } - ais.pushIndentOneShot(); return renderExpression(ais, tree, infix.rhs, space); }, @@ -955,28 +952,15 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderToken(ais, tree, after_last_param_tok, space); // ) }, - .ArrayAccess => unreachable, // TODO - //.ArrayAccess => { - // const suffix_op = base.castTag(.ArrayAccess).?; - - // const lbracket = tree.nextToken(suffix_op.lhs.lastToken()); - // const rbracket = tree.nextToken(suffix_op.index_expr.lastToken()); - - // try renderExpression(ais, tree, suffix_op.lhs, Space.None); - // try renderToken(ais, tree, lbracket, Space.None); // [ - - // const starts_with_comment = tree.token_tags[lbracket + 1] == .LineComment; - // const ends_with_comment = tree.token_tags[rbracket - 1] == .LineComment; - // { - // const new_space = if (ends_with_comment) Space.Newline else Space.None; - - // ais.pushIndent(); - // defer ais.popIndent(); - // try renderExpression(ais, tree, suffix_op.index_expr, new_space); - // } - // if (starts_with_comment) try ais.maybeInsertNewline(); - // return renderToken(ais, tree, rbracket, space); // ] - //}, + .ArrayAccess => { + const suffix = datas[node]; + const lbracket = tree.firstToken(suffix.rhs) - 1; + const rbracket = tree.lastToken(suffix.rhs) + 1; + try renderExpression(ais, tree, suffix.lhs, .None); + try renderToken(ais, tree, lbracket, .None); // [ + try renderExpression(ais, tree, suffix.rhs, .None); + return renderToken(ais, tree, rbracket, space); // ] + }, .Slice => unreachable, // TODO .SliceOpen => unreachable, // TODO @@ -1278,68 +1262,22 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } //}, - .BuiltinCall => unreachable, // TODO - .BuiltinCallTwo => unreachable, // TODO - //.BuiltinCall => { - // const builtin_call = @fieldParentPtr(ast.Node.BuiltinCall, "base", base); - - // // TODO remove after 0.7.0 release - // if (mem.eql(u8, tree.tokenSlice(builtin_call.builtin_token), "@OpaqueType")) - // return ais.writer().writeAll("opaque {}"); - - // // TODO remove after 0.7.0 release - // { - // const params = builtin_call.paramsConst(); - // if (mem.eql(u8, tree.tokenSlice(builtin_call.builtin_token), "@Type") and - // params.len == 1) - // { - // if (params[0].castTag(.EnumLiteral)) |enum_literal| - // if (mem.eql(u8, tree.tokenSlice(enum_literal.name), "Opaque")) - // return ais.writer().writeAll("opaque {}"); - // } - // } - - // try renderToken(ais, tree, builtin_call.builtin_token, Space.None); // @name - - // const src_params_trailing_comma = blk: { - // if (builtin_call.params_len == 0) break :blk false; - // const last_node = builtin_call.params()[builtin_call.params_len - 1]; - // const maybe_comma = tree.nextToken(last_node.lastToken()); - // break :blk tree.token_tags[maybe_comma] == .Comma; - // }; - - // const lparen = tree.nextToken(builtin_call.builtin_token); - - // if (!src_params_trailing_comma) { - // try renderToken(ais, tree, lparen, Space.None); // ( - - // // render all on one line, no trailing comma - // const params = builtin_call.params(); - // for (params) |param_node, i| { - // const maybe_comment = param_node.firstToken() - 1; - // if (param_node.*.tag == .MultilineStringLiteral or tree.token_tags[maybe_comment] == .LineComment) { - // ais.pushIndentOneShot(); - // } - // try renderExpression(ais, tree, param_node, Space.None); - - // if (i + 1 < params.len) { - // const comma_token = tree.nextToken(param_node.lastToken()); - // try renderToken(ais, tree, comma_token, Space.Space); // , - // } - // } - // } else { - // // one param per line - // ais.pushIndent(); - // defer ais.popIndent(); - // try renderToken(ais, tree, lparen, Space.Newline); // ( - - // for (builtin_call.params()) |param_node| { - // try renderExpression(ais, tree, param_node, Space.Comma); - // } - // } - - // return renderToken(ais, tree, builtin_call.rparen_token, space); // ) - //}, + .BuiltinCallTwo => { + if (datas[node].lhs == 0) { + const params = [_]ast.Node.Index{}; + return renderBuiltinCall(ais, tree, main_tokens[node], ¶ms, space); + } else if (datas[node].rhs == 0) { + const params = [_]ast.Node.Index{datas[node].lhs}; + return renderBuiltinCall(ais, tree, main_tokens[node], ¶ms, space); + } else { + const params = [_]ast.Node.Index{ datas[node].lhs, datas[node].rhs }; + return renderBuiltinCall(ais, tree, main_tokens[node], ¶ms, space); + } + }, + .BuiltinCall => { + const params = tree.extra_data[datas[node].lhs..datas[node].rhs]; + return renderBuiltinCall(ais, tree, main_tokens[node], params, space); + }, .FnProtoSimple => unreachable, // TODO .FnProtoSimpleMulti => unreachable, // TODO @@ -2221,6 +2159,52 @@ fn renderParamDecl( } } +fn renderBuiltinCall( + ais: *Ais, + tree: ast.Tree, + builtin_token: ast.TokenIndex, + params: []const ast.Node.Index, + space: Space, +) Error!void { + const token_tags = tree.tokens.items(.tag); + + try renderToken(ais, tree, builtin_token, .None); // @name + + if (params.len == 0) { + try renderToken(ais, tree, builtin_token + 1, .None); // ( + return renderToken(ais, tree, builtin_token + 2, space); // ) + } + + const last_param = params[params.len - 1]; + const after_last_param_token = tree.lastToken(last_param) + 1; + + if (token_tags[after_last_param_token] != .Comma) { + // Render all on one line, no trailing comma. + try renderToken(ais, tree, builtin_token + 1, .None); // ( + + for (params) |param_node, i| { + try renderExpression(ais, tree, param_node, .None); + + if (i + 1 < params.len) { + const comma_token = tree.lastToken(param_node) + 1; + try renderToken(ais, tree, comma_token, .Space); // , + } + } + return renderToken(ais, tree, after_last_param_token, space); // ) + } else { + // Render one param per line. + ais.pushIndent(); + try renderToken(ais, tree, builtin_token + 1, Space.Newline); // ( + + for (params) |param_node| { + try renderExpression(ais, tree, param_node, .Comma); + } + ais.popIndent(); + + return renderToken(ais, tree, after_last_param_token + 1, space); // ) + } +} + /// Render an expression, and the comma that follows it, if it is present in the source. fn renderExpressionComma(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); From 4428acf0f7b4b8da2709bffe1c304df435d89eed Mon Sep 17 00:00:00 2001 From: Asherah Connor Date: Thu, 4 Feb 2021 21:33:41 +1100 Subject: [PATCH 011/173] zig fmt: deref, unwrap optional --- lib/std/zig/ast.zig | 2 +- lib/std/zig/parser_test.zig | 18 +++++++++--------- lib/std/zig/render.zig | 23 +++++++++-------------- 3 files changed, 19 insertions(+), 24 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 3e3416e9b5..d4e1392350 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -462,6 +462,7 @@ pub const Tree = struct { .UndefinedLiteral, .UnreachableLiteral, .Identifier, + .Deref, => return main_tokens[n] + end_offset, .Call, @@ -515,7 +516,6 @@ pub const Tree = struct { .Asm => unreachable, // TODO .SliceOpen => unreachable, // TODO .Slice => unreachable, // TODO - .Deref => unreachable, // TODO .ArrayInitOne => unreachable, // TODO .ArrayInit => unreachable, // TODO .StructInitOne => unreachable, // TODO diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 877e1f42a5..ab84c69fdf 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -1503,15 +1503,15 @@ test "zig fmt: top-level fields" { // \\ // ); //} -// -//test "zig fmt: ptr deref operator and unwrap optional operator" { -// try testCanonical( -// \\const a = b.*; -// \\const a = b.?; -// \\ -// ); -//} -// + +test "zig fmt: ptr deref operator and unwrap optional operator" { + try testCanonical( + \\const a = b.*; + \\const a = b.?; + \\ + ); +} + //test "zig fmt: comment after if before another if" { // try testCanonical( // \\test "aoeu" { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index f55b31a53a..af0c179896 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -991,21 +991,16 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderToken(ais, tree, suffix_op.rtoken, space); // ] //}, - .Deref => unreachable, // TODO - //.Deref => { - // const suffix_op = base.castTag(.Deref).?; + .Deref => { + try renderExpression(ais, tree, datas[node].lhs, .None); + return renderToken(ais, tree, main_tokens[node], space); + }, - // try renderExpression(ais, tree, suffix_op.lhs, Space.None); - // return renderToken(ais, tree, suffix_op.rtoken, space); // .* - //}, - .UnwrapOptional => unreachable, // TODO - //.UnwrapOptional => { - // const suffix_op = base.castTag(.UnwrapOptional).?; - - // try renderExpression(ais, tree, suffix_op.lhs, Space.None); - // try renderToken(ais, tree, tree.prevToken(suffix_op.rtoken), Space.None); // . - // return renderToken(ais, tree, suffix_op.rtoken, space); // ? - //}, + .UnwrapOptional => { + try renderExpression(ais, tree, datas[node].lhs, .None); + try renderToken(ais, tree, main_tokens[node], .None); + return renderToken(ais, tree, datas[node].rhs, space); + }, .Break => unreachable, // TODO //.Break => { From 8e46d06650cf26f6769817f75ced486e9ee5c0dd Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Thu, 4 Feb 2021 16:38:29 -0700 Subject: [PATCH 012/173] zig fmt: implement fn protos and defers --- lib/std/zig/ast.zig | 126 +++++++- lib/std/zig/parse.zig | 68 ++++- lib/std/zig/parser_test.zig | 76 ++--- lib/std/zig/render.zig | 581 ++++++++++++++++++++---------------- 4 files changed, 524 insertions(+), 327 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index d4e1392350..3a7f2cf2ac 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -244,9 +244,12 @@ pub const Tree = struct { .AnyType, .Comptime, .Nosuspend, - .Block, .AsmSimple, .Asm, + .FnProtoSimple, + .FnProtoMulti, + .FnProtoOne, + .FnProto, => return main_tokens[n], .Catch, @@ -303,6 +306,7 @@ pub const Tree = struct { .SwitchCaseOne, .SwitchRange, .FnDecl, + .ErrorUnion, => n = datas[n].lhs, .ContainerFieldInit, @@ -342,6 +346,18 @@ pub const Tree = struct { return i; }, + .Block, + .BlockTwo, + => { + // Look for a label. + const lbrace = main_tokens[n]; + if (token_tags[lbrace - 1] == .Colon) { + return lbrace - 2; + } else { + return lbrace; + } + }, + .ArrayType => unreachable, // TODO .ArrayTypeSentinel => unreachable, // TODO .PtrTypeAligned => unreachable, // TODO @@ -355,10 +371,6 @@ pub const Tree = struct { .While => unreachable, // TODO .ForSimple => unreachable, // TODO .For => unreachable, // TODO - .FnProtoSimple => unreachable, // TODO - .FnProtoSimpleMulti => unreachable, // TODO - .FnProtoOne => unreachable, // TODO - .FnProto => unreachable, // TODO .ContainerDecl => unreachable, // TODO .ContainerDeclArg => unreachable, // TODO .TaggedUnion => unreachable, // TODO @@ -366,7 +378,6 @@ pub const Tree = struct { .AsmOutput => unreachable, // TODO .AsmInput => unreachable, // TODO .ErrorValue => unreachable, // TODO - .ErrorUnion => unreachable, // TODO }; } @@ -468,13 +479,20 @@ pub const Tree = struct { .Call, .BuiltinCall, => { - end_offset += 1; // for the `)` + end_offset += 1; // for the rparen const params = tree.extraData(datas[n].rhs, Node.SubRange); if (params.end - params.start == 0) { return main_tokens[n] + end_offset; } n = tree.extra_data[params.end - 1]; // last parameter }, + .Block => { + end_offset += 1; // for the rbrace + if (datas[n].rhs - datas[n].lhs == 0) { + return main_tokens[n] + end_offset; + } + n = tree.extra_data[datas[n].rhs - 1]; // last statement + }, .CallOne, .ArrayAccess, => { @@ -485,8 +503,8 @@ pub const Tree = struct { n = datas[n].rhs; }, - .BuiltinCallTwo => { - end_offset += 1; // for the rparen + .BuiltinCallTwo, .BlockTwo => { + end_offset += 1; // for the rparen/rbrace if (datas[n].rhs == 0) { if (datas[n].lhs == 0) { return main_tokens[n] + end_offset; @@ -511,7 +529,6 @@ pub const Tree = struct { .Continue => unreachable, // TODO .EnumLiteral => unreachable, // TODO .ErrorSetDecl => unreachable, // TODO - .Block => unreachable, // TODO .AsmSimple => unreachable, // TODO .Asm => unreachable, // TODO .SliceOpen => unreachable, // TODO @@ -539,7 +556,7 @@ pub const Tree = struct { .ForSimple => unreachable, // TODO .For => unreachable, // TODO .FnProtoSimple => unreachable, // TODO - .FnProtoSimpleMulti => unreachable, // TODO + .FnProtoMulti => unreachable, // TODO .FnProtoOne => unreachable, // TODO .FnProto => unreachable, // TODO .ContainerDecl => unreachable, // TODO @@ -665,6 +682,67 @@ pub const Tree = struct { }); } + pub fn fnProtoSimple(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) Full.FnProto { + assert(tree.nodes.items(.tag)[node] == .FnProtoSimple); + const data = tree.nodes.items(.data)[node]; + buffer[0] = data.lhs; + const params = if (data.lhs == 0) buffer[0..0] else buffer[0..1]; + return tree.fullFnProto(.{ + .fn_token = tree.nodes.items(.main_token)[node], + .return_type = data.rhs, + .params = params, + .align_expr = 0, + .section_expr = 0, + .callconv_expr = 0, + }); + } + + pub fn fnProtoMulti(tree: Tree, node: Node.Index) Full.FnProto { + assert(tree.nodes.items(.tag)[node] == .FnProtoMulti); + const data = tree.nodes.items(.data)[node]; + const params_range = tree.extraData(data.lhs, Node.SubRange); + const params = tree.extra_data[params_range.start..params_range.end]; + return tree.fullFnProto(.{ + .fn_token = tree.nodes.items(.main_token)[node], + .return_type = data.rhs, + .params = params, + .align_expr = 0, + .section_expr = 0, + .callconv_expr = 0, + }); + } + + pub fn fnProtoOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) Full.FnProto { + assert(tree.nodes.items(.tag)[node] == .FnProtoOne); + const data = tree.nodes.items(.data)[node]; + const extra = tree.extraData(data.lhs, Node.FnProtoOne); + buffer[0] = extra.param; + const params = if (extra.param == 0) buffer[0..0] else buffer[0..1]; + return tree.fullFnProto(.{ + .fn_token = tree.nodes.items(.main_token)[node], + .return_type = data.rhs, + .params = params, + .align_expr = extra.align_expr, + .section_expr = extra.section_expr, + .callconv_expr = extra.callconv_expr, + }); + } + + pub fn fnProto(tree: Tree, node: Node.Index) Full.FnProto { + assert(tree.nodes.items(.tag)[node] == .FnProto); + const data = tree.nodes.items(.data)[node]; + const extra = tree.extraData(data.lhs, Node.FnProto); + const params = tree.extra_data[extra.params_start..extra.params_end]; + return tree.fullFnProto(.{ + .fn_token = tree.nodes.items(.main_token)[node], + .return_type = data.rhs, + .params = params, + .align_expr = extra.align_expr, + .section_expr = extra.section_expr, + .callconv_expr = extra.callconv_expr, + }); + } + fn fullVarDecl(tree: Tree, info: Full.VarDecl.Ast) Full.VarDecl { const token_tags = tree.tokens.items(.tag); var result: Full.VarDecl = .{ @@ -728,6 +806,14 @@ pub const Tree = struct { } return result; } + + fn fullFnProto(tree: Tree, info: Full.FnProto.Ast) Full.FnProto { + const token_tags = tree.tokens.items(.tag); + var result: Full.FnProto = .{ + .ast = info, + }; + return result; + } }; /// Fully assembled AST node information. @@ -778,6 +864,19 @@ pub const Full = struct { align_expr: Node.Index, }; }; + + pub const FnProto = struct { + ast: Ast, + + pub const Ast = struct { + fn_token: TokenIndex, + return_type: Node.Index, + params: []const Node.Index, + align_expr: Node.Index, + section_expr: Node.Index, + callconv_expr: Node.Index, + }; + }; }; pub const Error = union(enum) { @@ -1247,7 +1346,7 @@ pub const Node = struct { FnProtoSimple, /// `fn(a: b, c: d) rhs`. `sub_range_list[lhs]`. /// anytype and ... parameters are omitted from the AST tree. - FnProtoSimpleMulti, + FnProtoMulti, /// `fn(a: b) rhs linksection(e) callconv(f)`. lhs is index into extra_data. /// zero or one parameters. /// anytype and ... parameters are omitted from the AST tree. @@ -1321,6 +1420,9 @@ pub const Node = struct { Comptime, /// `nosuspend lhs`. rhs unused. Nosuspend, + /// `{lhs; rhs;}`. rhs or lhs can be omitted. + /// main_token points at the `{`. + BlockTwo, /// `{}`. `sub_list[lhs..rhs]`. /// main_token points at the `{`. Block, diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 02f4dc49a3..b11fdf25ea 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -541,7 +541,7 @@ const Parser = struct { .multi => |list| { const span = try p.listToSpan(list); return p.addNode(.{ - .tag = .FnProtoSimpleMulti, + .tag = .FnProtoMulti, .main_token = fn_token, .data = .{ .lhs = try p.addExtra(Node.SubRange{ @@ -810,6 +810,22 @@ const Parser = struct { return statement; } + /// If a parse error occurs, reports an error, but then finds the next statement + /// and returns that one instead. If a parse error occurs but there is no following + /// statement, returns 0. + fn expectStatementRecoverable(p: *Parser) error{OutOfMemory}!Node.Index { + while (true) { + return p.expectStatement() catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + error.ParseError => { + p.findNextStmt(); // Try to skip to the next statement. + if (p.token_tags[p.tok_i] == .RBrace) return null_node; + continue; + }, + }; + } + } + /// IfStatement /// <- IfPrefix BlockExpr ( KEYWORD_else Payload? Statement )? /// / IfPrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement ) @@ -1859,25 +1875,53 @@ const Parser = struct { fn parseBlock(p: *Parser) !Node.Index { const lbrace = p.eatToken(.LBrace) orelse return null_node; + if (p.eatToken(.RBrace)) |_| { + return p.addNode(.{ + .tag = .BlockTwo, + .main_token = lbrace, + .data = .{ + .lhs = 0, + .rhs = 0, + }, + }); + } + + const stmt_one = try p.expectStatementRecoverable(); + if (p.eatToken(.RBrace)) |_| { + return p.addNode(.{ + .tag = .BlockTwo, + .main_token = lbrace, + .data = .{ + .lhs = stmt_one, + .rhs = 0, + }, + }); + } + const stmt_two = try p.expectStatementRecoverable(); + if (p.eatToken(.RBrace)) |_| { + return p.addNode(.{ + .tag = .BlockTwo, + .main_token = lbrace, + .data = .{ + .lhs = stmt_one, + .rhs = stmt_two, + }, + }); + } + var statements = std.ArrayList(Node.Index).init(p.gpa); defer statements.deinit(); + try statements.appendSlice(&[_]Node.Index{ stmt_one, stmt_two }); + while (true) { - const statement = (p.parseStatement() catch |err| switch (err) { - error.OutOfMemory => return error.OutOfMemory, - error.ParseError => { - // try to skip to the next statement - p.findNextStmt(); - continue; - }, - }); + const statement = try p.expectStatementRecoverable(); if (statement == 0) break; try statements.append(statement); + if (p.token_tags[p.tok_i] == .RBrace) break; } - - const rbrace = try p.expectToken(.RBrace); + _ = try p.expectToken(.RBrace); const statements_span = try p.listToSpan(statements.items); - return p.addNode(.{ .tag = .Block, .main_token = lbrace, diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index ab84c69fdf..26b720766c 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -100,44 +100,44 @@ test "zig fmt: top-level fields" { ); } -//test "zig fmt: decl between fields" { -// try testError( -// \\const S = struct { -// \\ const foo = 2; -// \\ const bar = 2; -// \\ const baz = 2; -// \\ a: usize, -// \\ const foo1 = 2; -// \\ const bar1 = 2; -// \\ const baz1 = 2; -// \\ b: usize, -// \\}; -// , &[_]Error{ -// .DeclBetweenFields, -// }); -//} -// -//test "zig fmt: eof after missing comma" { -// try testError( -// \\foo() -// , &[_]Error{ -// .ExpectedToken, -// }); -//} -// -//test "zig fmt: errdefer with payload" { -// try testCanonical( -// \\pub fn main() anyerror!void { -// \\ errdefer |a| x += 1; -// \\ errdefer |a| {} -// \\ errdefer |a| { -// \\ x += 1; -// \\ } -// \\} -// \\ -// ); -//} -// +test "zig fmt: decl between fields" { + try testError( + \\const S = struct { + \\ const foo = 2; + \\ const bar = 2; + \\ const baz = 2; + \\ a: usize, + \\ const foo1 = 2; + \\ const bar1 = 2; + \\ const baz1 = 2; + \\ b: usize, + \\}; + , &[_]Error{ + .DeclBetweenFields, + }); +} + +test "zig fmt: eof after missing comma" { + try testError( + \\foo() + , &[_]Error{ + .ExpectedToken, + }); +} + +test "zig fmt: errdefer with payload" { + try testCanonical( + \\pub fn main() anyerror!void { + \\ errdefer |a| x += 1; + \\ errdefer |a| {} + \\ errdefer |a| { + \\ x += 1; + \\ } + \\} + \\ + ); +} + //test "zig fmt: nosuspend block" { // try testCanonical( // \\pub fn main() anyerror!void { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index af0c179896..288d3e8b97 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -77,22 +77,11 @@ fn renderExtraNewline(ais: *Ais, tree: ast.Tree, node: ast.Node.Index) Error!voi } fn renderExtraNewlineToken(ais: *Ais, tree: ast.Tree, first_token: ast.TokenIndex) Error!void { - @panic("TODO implement renderExtraNewlineToken"); - //var prev_token = first_token; - //if (prev_token == 0) return; - //const token_tags = tree.tokens.items(.tag); - //var newline_threshold: usize = 2; - //while (token_tags[prev_token - 1] == .DocComment) { - // if (tree.tokenLocation(tree.token_locs[prev_token - 1].end, prev_token).line == 1) { - // newline_threshold += 1; - // } - // prev_token -= 1; - //} - //const prev_token_end = tree.token_locs[prev_token - 1].end; - //const loc = tree.tokenLocation(prev_token_end, first_token); - //if (loc.line >= newline_threshold) { - // try ais.insertNewline(); - //} + if (first_token == 0) return; + const token_starts = tree.tokens.items(.start); + if (tree.tokenLocation(token_starts[first_token - 1], first_token).line >= 2) { + return ais.insertNewline(); + } } fn renderContainerDecl(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) Error!void { @@ -101,24 +90,43 @@ fn renderContainerDecl(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: S const datas = tree.nodes.items(.data); try renderDocComments(ais, tree, tree.firstToken(decl)); switch (tree.nodes.items(.tag)[decl]) { - .FnProtoSimple => unreachable, // TODO - .FnProtoSimpleMulti => unreachable, // TODO - .FnProtoOne => unreachable, // TODO - .FnDecl => unreachable, // TODO - .FnProto => unreachable, // TODO - // .FnProto => { - // const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl); + .FnDecl => { + // Some examples: + // pub extern "foo" fn ... + // export fn ... + const fn_proto = datas[decl].lhs; + const fn_token = main_tokens[fn_proto]; + // Go back to the first token we should render here. + var i = fn_token; + while (i > 0) { + i -= 1; + switch (token_tags[i]) { + .Keyword_extern, + .Keyword_export, + .Keyword_pub, + .StringLiteral, + => continue, - // try renderDocComments(ais, tree, fn_proto, fn_proto.getDocComments()); - - // if (fn_proto.getBodyNode()) |body_node| { - // try renderExpression(ais, tree, decl, .Space); - // try renderExpression(ais, tree, body_node, space); - // } else { - // try renderExpression(ais, tree, decl, .None); - // try renderToken(ais, tree, tree.nextToken(decl.lastToken()), space); - // } - // }, + else => { + i += 1; + break; + }, + } + } + while (i < fn_token) : (i += 1) { + try renderToken(ais, tree, i, .Space); + } + try renderExpression(ais, tree, fn_proto, .Space); + return renderExpression(ais, tree, datas[decl].rhs, space); + }, + .FnProtoSimple, + .FnProtoMulti, + .FnProtoOne, + .FnProto, + => { + try renderExpression(ais, tree, decl, .None); + try renderToken(ais, tree, tree.lastToken(decl) + 1, space); // semicolon + }, .UsingNamespace => unreachable, // TODO // .Use => { @@ -186,90 +194,48 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } // return renderToken(ais, tree, any_type.token, space); //}, - .Block => { - const lbrace = main_tokens[node]; - if (token_tags[lbrace - 1] == .Colon and - token_tags[lbrace - 2] == .Identifier) - { - try renderToken(ais, tree, lbrace - 2, .None); - try renderToken(ais, tree, lbrace - 1, .Space); - } - const nodes_data = tree.nodes.items(.data); - const statements = tree.extra_data[nodes_data[node].lhs..nodes_data[node].rhs]; - - if (statements.len == 0) { - ais.pushIndentNextLine(); - try renderToken(ais, tree, lbrace, .None); - ais.popIndent(); - const rbrace = lbrace + 1; - return renderToken(ais, tree, rbrace, space); + .BlockTwo => { + var statements = [2]ast.Node.Index{ datas[node].lhs, datas[node].rhs }; + if (datas[node].lhs == 0) { + return renderBlock(ais, tree, main_tokens[node], statements[0..0], space); + } else if (datas[node].rhs == 0) { + return renderBlock(ais, tree, main_tokens[node], statements[0..1], space); } else { - ais.pushIndentNextLine(); - - try renderToken(ais, tree, lbrace, .Newline); - - for (statements) |stmt, i| { - switch (node_tags[stmt]) { - .GlobalVarDecl => try renderVarDecl(ais, tree, tree.globalVarDecl(stmt)), - .LocalVarDecl => try renderVarDecl(ais, tree, tree.localVarDecl(stmt)), - .SimpleVarDecl => try renderVarDecl(ais, tree, tree.simpleVarDecl(stmt)), - .AlignedVarDecl => try renderVarDecl(ais, tree, tree.alignedVarDecl(stmt)), - else => { - const semicolon = tree.lastToken(stmt) + 1; - if (token_tags[semicolon] == .Semicolon) { - try renderExpression(ais, tree, stmt, .None); - try renderToken(ais, tree, semicolon, .Newline); - } else { - try renderExpression(ais, tree, stmt, .Newline); - } - }, - } - - if (i + 1 < statements.len) { - try renderExtraNewline(ais, tree, statements[i + 1]); - } - } - ais.popIndent(); - // The rbrace could be +1 or +2 from the last token of the last - // statement in the block because lastToken() does not count semicolons. - const maybe_rbrace = tree.lastToken(statements[statements.len - 1]) + 1; - if (token_tags[maybe_rbrace] == .RBrace) { - return renderToken(ais, tree, maybe_rbrace, space); - } else { - assert(token_tags[maybe_rbrace + 1] == .RBrace); - return renderToken(ais, tree, maybe_rbrace + 1, space); - } + return renderBlock(ais, tree, main_tokens[node], statements[0..2], space); } }, + .Block => { + const lbrace = main_tokens[node]; + const statements = tree.extra_data[datas[node].lhs..datas[node].rhs]; + return renderBlock(ais, tree, main_tokens[node], statements, space); + }, - .Defer => unreachable, // TODO - .ErrDefer => unreachable, // TODO - //.Defer => { - // const defer_node = @fieldParentPtr(ast.Node.Defer, "base", base); + .ErrDefer => { + const defer_token = main_tokens[node]; + const payload_token = datas[node].lhs; + const expr = datas[node].rhs; - // try renderToken(ais, tree, defer_node.defer_token, Space.Space); - // if (defer_node.payload) |payload| { - // try renderExpression(ais, tree, payload, Space.Space); - // } - // return renderExpression(ais, tree, defer_node.expr, space); - //}, - .Comptime => { + try renderToken(ais, tree, defer_token, .Space); + if (payload_token != 0) { + try renderToken(ais, tree, payload_token - 1, .None); // | + try renderToken(ais, tree, payload_token, .None); // identifier + try renderToken(ais, tree, payload_token + 1, .Space); // | + } + return renderExpression(ais, tree, expr, space); + }, + + .Defer => { + const defer_token = main_tokens[node]; + const expr = datas[node].rhs; + try renderToken(ais, tree, defer_token, .Space); + return renderExpression(ais, tree, expr, space); + }, + .Comptime, .Nosuspend => { const comptime_token = main_tokens[node]; const block = datas[node].lhs; try renderToken(ais, tree, comptime_token, .Space); return renderExpression(ais, tree, block, space); }, - .Nosuspend => unreachable, // TODO - //.Nosuspend => { - // const nosuspend_node = @fieldParentPtr(ast.Node.Nosuspend, "base", base); - // if (mem.eql(u8, tree.tokenSlice(nosuspend_node.nosuspend_token), "noasync")) { - // // TODO: remove this - // try ais.writer().writeAll("nosuspend "); - // } else { - // try renderToken(ais, tree, nosuspend_node.nosuspend_token, Space.Space); - // } - // return renderExpression(ais, tree, nosuspend_node.expr, space); - //}, .Suspend => unreachable, // TODO //.Suspend => { @@ -1274,140 +1240,16 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderBuiltinCall(ais, tree, main_tokens[node], params, space); }, - .FnProtoSimple => unreachable, // TODO - .FnProtoSimpleMulti => unreachable, // TODO - .FnProtoOne => unreachable, // TODO - .FnProto => unreachable, // TODO - //.FnProto => { - // const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", base); - - // if (fn_proto.getVisibToken()) |visib_token_index| { - // const visib_token = tree.token_tags[visib_token_index]; - // assert(visib_token == .Keyword_pub or visib_token == .Keyword_export); - - // try renderToken(ais, tree, visib_token_index, Space.Space); // pub - // } - - // if (fn_proto.getExternExportInlineToken()) |extern_export_inline_token| { - // if (fn_proto.getIsExternPrototype() == null) - // try renderToken(ais, tree, extern_export_inline_token, Space.Space); // extern/export/inline - // } - - // if (fn_proto.getLibName()) |lib_name| { - // try renderExpression(ais, tree, lib_name, Space.Space); - // } - - // const lparen = if (fn_proto.getNameToken()) |name_token| blk: { - // try renderToken(ais, tree, fn_proto.fn_token, Space.Space); // fn - // try renderToken(ais, tree, name_token, Space.None); // name - // break :blk tree.nextToken(name_token); - // } else blk: { - // try renderToken(ais, tree, fn_proto.fn_token, Space.Space); // fn - // break :blk tree.nextToken(fn_proto.fn_token); - // }; - // assert(tree.token_tags[lparen] == .LParen); - - // const rparen = tree.prevToken( - // // the first token for the annotation expressions is the left - // // parenthesis, hence the need for two prevToken - // if (fn_proto.getAlignExpr()) |align_expr| - // tree.prevToken(tree.prevToken(align_expr.firstToken())) - // else if (fn_proto.getSectionExpr()) |section_expr| - // tree.prevToken(tree.prevToken(section_expr.firstToken())) - // else if (fn_proto.getCallconvExpr()) |callconv_expr| - // tree.prevToken(tree.prevToken(callconv_expr.firstToken())) - // else switch (fn_proto.return_type) { - // .Explicit => |node| node.firstToken(), - // .InferErrorSet => |node| tree.prevToken(node.firstToken()), - // .Invalid => unreachable, - // }, - // ); - // assert(tree.token_tags[rparen] == .RParen); - - // const src_params_trailing_comma = blk: { - // const maybe_comma = tree.token_tags[rparen - 1]; - // break :blk maybe_comma == .Comma or maybe_comma == .LineComment; - // }; - - // if (!src_params_trailing_comma) { - // try renderToken(ais, tree, lparen, Space.None); // ( - - // // render all on one line, no trailing comma - // for (fn_proto.params()) |param_decl, i| { - // try renderParamDecl(allocator, ais, tree, param_decl, Space.None); - - // if (i + 1 < fn_proto.params_len or fn_proto.getVarArgsToken() != null) { - // const comma = tree.nextToken(param_decl.lastToken()); - // try renderToken(ais, tree, comma, Space.Space); // , - // } - // } - // if (fn_proto.getVarArgsToken()) |var_args_token| { - // try renderToken(ais, tree, var_args_token, Space.None); - // } - // } else { - // // one param per line - // ais.pushIndent(); - // defer ais.popIndent(); - // try renderToken(ais, tree, lparen, Space.Newline); // ( - - // for (fn_proto.params()) |param_decl| { - // try renderParamDecl(allocator, ais, tree, param_decl, Space.Comma); - // } - // if (fn_proto.getVarArgsToken()) |var_args_token| { - // try renderToken(ais, tree, var_args_token, Space.Comma); - // } - // } - - // try renderToken(ais, tree, rparen, Space.Space); // ) - - // if (fn_proto.getAlignExpr()) |align_expr| { - // const align_rparen = tree.nextToken(align_expr.lastToken()); - // const align_lparen = tree.prevToken(align_expr.firstToken()); - // const align_kw = tree.prevToken(align_lparen); - - // try renderToken(ais, tree, align_kw, Space.None); // align - // try renderToken(ais, tree, align_lparen, Space.None); // ( - // try renderExpression(ais, tree, align_expr, Space.None); - // try renderToken(ais, tree, align_rparen, Space.Space); // ) - // } - - // if (fn_proto.getSectionExpr()) |section_expr| { - // const section_rparen = tree.nextToken(section_expr.lastToken()); - // const section_lparen = tree.prevToken(section_expr.firstToken()); - // const section_kw = tree.prevToken(section_lparen); - - // try renderToken(ais, tree, section_kw, Space.None); // section - // try renderToken(ais, tree, section_lparen, Space.None); // ( - // try renderExpression(ais, tree, section_expr, Space.None); - // try renderToken(ais, tree, section_rparen, Space.Space); // ) - // } - - // if (fn_proto.getCallconvExpr()) |callconv_expr| { - // const callconv_rparen = tree.nextToken(callconv_expr.lastToken()); - // const callconv_lparen = tree.prevToken(callconv_expr.firstToken()); - // const callconv_kw = tree.prevToken(callconv_lparen); - - // try renderToken(ais, tree, callconv_kw, Space.None); // callconv - // try renderToken(ais, tree, callconv_lparen, Space.None); // ( - // try renderExpression(ais, tree, callconv_expr, Space.None); - // try renderToken(ais, tree, callconv_rparen, Space.Space); // ) - // } else if (fn_proto.getIsExternPrototype() != null) { - // try ais.writer().writeAll("callconv(.C) "); - // } else if (fn_proto.getIsAsync() != null) { - // try ais.writer().writeAll("callconv(.Async) "); - // } - - // switch (fn_proto.return_type) { - // .Explicit => |node| { - // return renderExpression(ais, tree, node, space); - // }, - // .InferErrorSet => |node| { - // try renderToken(ais, tree, tree.prevToken(node.firstToken()), Space.None); // ! - // return renderExpression(ais, tree, node, space); - // }, - // .Invalid => unreachable, - // } - //}, + .FnProtoSimple => { + var params: [1]ast.Node.Index = undefined; + return renderFnProto(ais, tree, tree.fnProtoSimple(¶ms, node), space); + }, + .FnProtoMulti => return renderFnProto(ais, tree, tree.fnProtoMulti(node), space), + .FnProtoOne => { + var params: [1]ast.Node.Index = undefined; + return renderFnProto(ais, tree, tree.fnProtoOne(¶ms, node), space); + }, + .FnProto => return renderFnProto(ais, tree, tree.fnProto(node), space), .AnyFrameType => unreachable, // TODO //.AnyFrameType => { @@ -2130,30 +1972,6 @@ fn renderContainerField( return renderExpressionComma(ais, tree, field.ast.value_expr, space); // value } -fn renderParamDecl( - allocator: *mem.Allocator, - ais: *Ais, - tree: ast.Tree, - param_decl: ast.Node.FnProto.ParamDecl, - space: Space, -) Error!void { - try renderDocComments(ais, tree, param_decl, param_decl.doc_comments); - - if (param_decl.comptime_token) |comptime_token| { - try renderToken(ais, tree, comptime_token, Space.Space); - } - if (param_decl.noalias_token) |noalias_token| { - try renderToken(ais, tree, noalias_token, Space.Space); - } - if (param_decl.name_token) |name_token| { - try renderToken(ais, tree, name_token, Space.None); - try renderToken(ais, tree, tree.nextToken(name_token), Space.Space); // : - } - switch (param_decl.param_type) { - .any_type, .type_expr => |node| try renderExpression(ais, tree, node, space), - } -} - fn renderBuiltinCall( ais: *Ais, tree: ast.Tree, @@ -2200,6 +2018,239 @@ fn renderBuiltinCall( } } +fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.Full.FnProto, space: Space) Error!void { + const token_tags = tree.tokens.items(.tag); + + const after_fn_token = fn_proto.ast.fn_token + 1; + const lparen = if (token_tags[after_fn_token] == .Identifier) blk: { + try renderToken(ais, tree, fn_proto.ast.fn_token, .Space); // fn + try renderToken(ais, tree, after_fn_token, .None); // name + break :blk after_fn_token + 1; + } else blk: { + try renderToken(ais, tree, fn_proto.ast.fn_token, .Space); // fn + break :blk fn_proto.ast.fn_token + 1; + }; + assert(token_tags[lparen] == .LParen); + + const maybe_bang = tree.firstToken(fn_proto.ast.return_type) - 1; + const rparen = blk: { + // The first token for the annotation expressions is the left + // parenthesis, hence the need for two previous tokens. + if (fn_proto.ast.align_expr != 0) { + break :blk tree.firstToken(fn_proto.ast.align_expr) - 3; + } + if (fn_proto.ast.section_expr != 0) { + break :blk tree.firstToken(fn_proto.ast.section_expr) - 3; + } + if (fn_proto.ast.callconv_expr != 0) { + break :blk tree.firstToken(fn_proto.ast.callconv_expr) - 3; + } + if (token_tags[maybe_bang] == .Bang) { + break :blk maybe_bang - 1; + } + break :blk maybe_bang; + }; + assert(token_tags[rparen] == .RParen); + + // The params list is a sparse set that does *not* include anytype or ... parameters. + + if (token_tags[rparen - 1] != .Comma) { + // Render all on one line, no trailing comma. + try renderToken(ais, tree, lparen, .None); // ( + + var param_i: usize = 0; + var last_param_token = lparen; + while (true) { + last_param_token += 1; + switch (token_tags[last_param_token]) { + .DocComment => { + try renderToken(ais, tree, last_param_token, .Newline); + continue; + }, + .Ellipsis3 => { + try renderToken(ais, tree, last_param_token, .None); // ... + break; + }, + .Keyword_noalias, .Keyword_comptime => { + try renderToken(ais, tree, last_param_token, .Space); + last_param_token += 1; + }, + .Identifier => {}, + .Keyword_anytype => { + try renderToken(ais, tree, last_param_token, .None); // anytype + continue; + }, + .RParen => break, + .Comma => { + try renderToken(ais, tree, last_param_token, .Space); // , + last_param_token += 1; + }, + else => unreachable, + } + if (token_tags[last_param_token] == .Identifier) { + try renderToken(ais, tree, last_param_token, .None); // name + last_param_token += 1; + try renderToken(ais, tree, last_param_token, .Space); // : + last_param_token += 1; + } + if (token_tags[last_param_token] == .Keyword_anytype) { + try renderToken(ais, tree, last_param_token, .None); // anytype + continue; + } + const param = fn_proto.ast.params[param_i]; + param_i += 1; + try renderExpression(ais, tree, param, .None); + last_param_token = tree.lastToken(param) + 1; + } + } else { + // One param per line. + ais.pushIndent(); + try renderToken(ais, tree, lparen, .Newline); // ( + + var param_i: usize = 0; + var last_param_token = lparen; + while (true) { + last_param_token += 1; + switch (token_tags[last_param_token]) { + .DocComment => { + try renderToken(ais, tree, last_param_token, .Newline); + continue; + }, + .Ellipsis3 => { + try renderToken(ais, tree, last_param_token, .Comma); // ... + break; + }, + .Keyword_noalias, .Keyword_comptime => { + try renderToken(ais, tree, last_param_token, .Space); + last_param_token += 1; + }, + .Identifier => {}, + .Keyword_anytype => { + try renderToken(ais, tree, last_param_token, .Comma); // anytype + continue; + }, + .RParen => break, + else => unreachable, + } + if (token_tags[last_param_token] == .Identifier) { + try renderToken(ais, tree, last_param_token, .None); // name + last_param_token += 1; + try renderToken(ais, tree, last_param_token, .Space); // : + last_param_token += 1; + } + if (token_tags[last_param_token] == .Keyword_anytype) { + try renderToken(ais, tree, last_param_token, .Comma); // anytype + continue; + } + const param = fn_proto.ast.params[param_i]; + param_i += 1; + try renderExpression(ais, tree, param, .Comma); + last_param_token = tree.lastToken(param) + 2; + } + ais.popIndent(); + } + + try renderToken(ais, tree, rparen, .Space); // ) + + if (fn_proto.ast.align_expr != 0) { + const align_lparen = tree.firstToken(fn_proto.ast.align_expr) - 1; + const align_rparen = tree.lastToken(fn_proto.ast.align_expr) + 1; + + try renderToken(ais, tree, align_lparen - 1, .None); // align + try renderToken(ais, tree, align_lparen, .None); // ( + try renderExpression(ais, tree, fn_proto.ast.align_expr, .None); + try renderToken(ais, tree, align_rparen, .Space); // ) + } + + if (fn_proto.ast.section_expr != 0) { + const section_lparen = tree.firstToken(fn_proto.ast.section_expr) - 1; + const section_rparen = tree.lastToken(fn_proto.ast.section_expr) + 1; + + try renderToken(ais, tree, section_lparen - 1, .None); // section + try renderToken(ais, tree, section_lparen, .None); // ( + try renderExpression(ais, tree, fn_proto.ast.section_expr, .None); + try renderToken(ais, tree, section_rparen, .Space); // ) + } + + if (fn_proto.ast.callconv_expr != 0) { + const callconv_lparen = tree.firstToken(fn_proto.ast.callconv_expr) - 1; + const callconv_rparen = tree.lastToken(fn_proto.ast.callconv_expr) + 1; + + try renderToken(ais, tree, callconv_lparen - 1, .None); // callconv + try renderToken(ais, tree, callconv_lparen, .None); // ( + try renderExpression(ais, tree, fn_proto.ast.callconv_expr, .None); + try renderToken(ais, tree, callconv_rparen, .Space); // ) + } + + if (token_tags[maybe_bang] == .Bang) { + try renderToken(ais, tree, maybe_bang, .None); // ! + } + return renderExpression(ais, tree, fn_proto.ast.return_type, space); +} + +fn renderBlock( + ais: *Ais, + tree: ast.Tree, + lbrace: ast.TokenIndex, + statements: []const ast.Node.Index, + space: Space, +) Error!void { + const token_tags = tree.tokens.items(.tag); + const node_tags = tree.nodes.items(.tag); + + if (token_tags[lbrace - 1] == .Colon and + token_tags[lbrace - 2] == .Identifier) + { + try renderToken(ais, tree, lbrace - 2, .None); + try renderToken(ais, tree, lbrace - 1, .Space); + } + const nodes_data = tree.nodes.items(.data); + + if (statements.len == 0) { + ais.pushIndentNextLine(); + try renderToken(ais, tree, lbrace, .None); + ais.popIndent(); + const rbrace = lbrace + 1; + return renderToken(ais, tree, rbrace, space); + } else { + ais.pushIndentNextLine(); + + try renderToken(ais, tree, lbrace, .Newline); + + for (statements) |stmt, i| { + switch (node_tags[stmt]) { + .GlobalVarDecl => try renderVarDecl(ais, tree, tree.globalVarDecl(stmt)), + .LocalVarDecl => try renderVarDecl(ais, tree, tree.localVarDecl(stmt)), + .SimpleVarDecl => try renderVarDecl(ais, tree, tree.simpleVarDecl(stmt)), + .AlignedVarDecl => try renderVarDecl(ais, tree, tree.alignedVarDecl(stmt)), + else => { + const semicolon = tree.lastToken(stmt) + 1; + if (token_tags[semicolon] == .Semicolon) { + try renderExpression(ais, tree, stmt, .None); + try renderToken(ais, tree, semicolon, .Newline); + } else { + try renderExpression(ais, tree, stmt, .Newline); + } + }, + } + + if (i + 1 < statements.len) { + try renderExtraNewline(ais, tree, statements[i + 1]); + } + } + ais.popIndent(); + // The rbrace could be +1 or +2 from the last token of the last + // statement in the block because lastToken() does not count semicolons. + const maybe_rbrace = tree.lastToken(statements[statements.len - 1]) + 1; + if (token_tags[maybe_rbrace] == .RBrace) { + return renderToken(ais, tree, maybe_rbrace, space); + } else { + assert(token_tags[maybe_rbrace + 1] == .RBrace); + return renderToken(ais, tree, maybe_rbrace + 1, space); + } + } +} + /// Render an expression, and the comma that follows it, if it is present in the source. fn renderExpressionComma(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); From 7069459a76ad507bb1ce56b2d8de923b93385def Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Thu, 4 Feb 2021 19:59:06 -0700 Subject: [PATCH 013/173] zig fmt: implement struct init --- lib/std/zig/ast.zig | 183 ++++++++++++++++-- lib/std/zig/parse.zig | 12 +- lib/std/zig/parser_test.zig | 32 ++-- lib/std/zig/render.zig | 362 +++++++++++++++--------------------- 4 files changed, 345 insertions(+), 244 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 3a7f2cf2ac..c6d477cc5f 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -214,8 +214,6 @@ pub const Tree = struct { .OptionalType, .ArrayInitDotTwo, .ArrayInitDot, - .StructInitDotTwo, - .StructInitDot, .Switch, .IfSimple, .If, @@ -252,6 +250,11 @@ pub const Tree = struct { .FnProto, => return main_tokens[n], + .StructInitDotTwo, + .StructInitDotTwoComma, + .StructInitDot, + => return main_tokens[n] - 1, + .Catch, .FieldAccess, .UnwrapOptional, @@ -403,6 +406,7 @@ pub const Tree = struct { .Resume, .Break, .Return, + .Nosuspend, => n = datas[n].lhs, .TestDecl, @@ -451,7 +455,6 @@ pub const Tree = struct { .AnyFrameType, .ErrorUnion, .Comptime, - .Nosuspend, .IfSimple, .WhileSimple, => n = datas[n].rhs, @@ -503,27 +506,97 @@ pub const Tree = struct { n = datas[n].rhs; }, - .BuiltinCallTwo, .BlockTwo => { + .BuiltinCallTwo, + .BlockTwo, + .StructInitDotTwo, + => { end_offset += 1; // for the rparen/rbrace - if (datas[n].rhs == 0) { - if (datas[n].lhs == 0) { - return main_tokens[n] + end_offset; - } else { - n = datas[n].lhs; - } - } else { + if (datas[n].rhs != 0) { n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + return main_tokens[n] + end_offset; + } + }, + .StructInitDotTwoComma => { + end_offset += 2; // for the comma + rbrace + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + unreachable; + } + }, + .SimpleVarDecl => { + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + end_offset += 1; // from mut token to name + return main_tokens[n] + end_offset; + } + }, + .AlignedVarDecl => { + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + end_offset += 1; // for the rparen + n = datas[n].lhs; + } else { + end_offset += 1; // from mut token to name + return main_tokens[n] + end_offset; + } + }, + .GlobalVarDecl => { + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else { + const extra = tree.extraData(datas[n].lhs, Node.GlobalVarDecl); + if (extra.section_node != 0) { + end_offset += 1; // for the rparen + n = extra.section_node; + } else if (extra.align_node != 0) { + end_offset += 1; // for the rparen + n = extra.align_node; + } else if (extra.type_node != 0) { + n = extra.type_node; + } else { + end_offset += 1; // from mut token to name + return main_tokens[n] + end_offset; + } + } + }, + .LocalVarDecl => { + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else { + const extra = tree.extraData(datas[n].lhs, Node.LocalVarDecl); + if (extra.align_node != 0) { + end_offset += 1; // for the rparen + n = extra.align_node; + } else if (extra.type_node != 0) { + n = extra.type_node; + } else { + end_offset += 1; // from mut token to name + return main_tokens[n] + end_offset; + } } }, + // These are not supported by lastToken() because implementation would + // require recursion due to the optional comma followed by rbrace. + // TODO follow the pattern set by StructInitDotTwoComma which will allow + // lastToken to work for all of these. + .StructInitDot => unreachable, .ContainerFieldInit => unreachable, .ContainerFieldAlign => unreachable, .ContainerField => unreachable, .ArrayInitDotTwo => unreachable, // TODO .ArrayInitDot => unreachable, // TODO - .StructInitDotTwo => unreachable, // TODO - .StructInitDot => unreachable, // TODO .Switch => unreachable, // TODO .If => unreachable, // TODO .Continue => unreachable, // TODO @@ -539,10 +612,6 @@ pub const Tree = struct { .SwitchCaseOne => unreachable, // TODO .SwitchRange => unreachable, // TODO .FnDecl => unreachable, // TODO - .GlobalVarDecl => unreachable, // TODO - .LocalVarDecl => unreachable, // TODO - .SimpleVarDecl => unreachable, // TODO - .AlignedVarDecl => unreachable, // TODO .ArrayType => unreachable, // TODO .ArrayTypeSentinel => unreachable, // TODO .PtrTypeAligned => unreachable, // TODO @@ -743,6 +812,57 @@ pub const Tree = struct { }); } + pub fn structInitOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) Full.StructInit { + assert(tree.nodes.items(.tag)[node] == .StructInitOne); + const data = tree.nodes.items(.data)[node]; + buffer[0] = data.rhs; + const fields = if (data.rhs == 0) buffer[0..0] else buffer[0..1]; + return tree.fullStructInit(.{ + .lbrace = tree.nodes.items(.main_token)[node], + .fields = fields, + .type_expr = data.lhs, + }); + } + + pub fn structInitDotTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) Full.StructInit { + assert(tree.nodes.items(.tag)[node] == .StructInitDotTwo or + tree.nodes.items(.tag)[node] == .StructInitDotTwoComma); + const data = tree.nodes.items(.data)[node]; + buffer.* = .{ data.lhs, data.rhs }; + const fields = if (data.rhs != 0) + buffer[0..2] + else if (data.lhs != 0) + buffer[0..1] + else + buffer[0..0]; + return tree.fullStructInit(.{ + .lbrace = tree.nodes.items(.main_token)[node], + .fields = fields, + .type_expr = 0, + }); + } + + pub fn structInitDot(tree: Tree, node: Node.Index) Full.StructInit { + assert(tree.nodes.items(.tag)[node] == .StructInitDot); + const data = tree.nodes.items(.data)[node]; + return tree.fullStructInit(.{ + .lbrace = tree.nodes.items(.main_token)[node], + .fields = tree.extra_data[data.lhs..data.rhs], + .type_expr = 0, + }); + } + + pub fn structInit(tree: Tree, node: Node.Index) Full.StructInit { + assert(tree.nodes.items(.tag)[node] == .StructInit); + const data = tree.nodes.items(.data)[node]; + const fields_range = tree.extraData(data.rhs, Node.SubRange); + return tree.fullStructInit(.{ + .lbrace = tree.nodes.items(.main_token)[node], + .fields = tree.extra_data[fields_range.start..fields_range.end], + .type_expr = data.lhs, + }); + } + fn fullVarDecl(tree: Tree, info: Full.VarDecl.Ast) Full.VarDecl { const token_tags = tree.tokens.items(.tag); var result: Full.VarDecl = .{ @@ -814,6 +934,14 @@ pub const Tree = struct { }; return result; } + + fn fullStructInit(tree: Tree, info: Full.StructInit.Ast) Full.StructInit { + const token_tags = tree.tokens.items(.tag); + var result: Full.StructInit = .{ + .ast = info, + }; + return result; + } }; /// Fully assembled AST node information. @@ -877,6 +1005,16 @@ pub const Full = struct { callconv_expr: Node.Index, }; }; + + pub const StructInit = struct { + ast: Ast, + + pub const Ast = struct { + lbrace: TokenIndex, + fields: []const Node.Index, + type_expr: Node.Index, + }; + }; }; pub const Error = union(enum) { @@ -1288,13 +1426,20 @@ pub const Node = struct { /// `lhs{a, b}`. `sub_range_list[rhs]`. lhs can be omitted which means `.{a, b}`. ArrayInit, /// `lhs{.a = rhs}`. rhs can be omitted making it empty. + /// main_token is the lbrace. StructInitOne, /// `.{.a = lhs, .b = rhs}`. lhs and rhs can be omitted. + /// main_token is the lbrace. StructInitDotTwo, + /// Same as `StructInitDotTwo` except there is known to be a trailing comma + /// before the final rbrace. + StructInitDotTwoComma, /// `.{.a = b, .c = d}`. `sub_list[lhs..rhs]`. + /// main_token is the lbrace. StructInitDot, /// `lhs{.a = b, .c = d}`. `sub_range_list[rhs]`. /// lhs can be omitted which means `.{.a = b, .c = d}`. + /// main_token is the lbrace. StructInit, /// `lhs(rhs)`. rhs can be omitted. CallOne, @@ -1421,10 +1566,10 @@ pub const Node = struct { /// `nosuspend lhs`. rhs unused. Nosuspend, /// `{lhs; rhs;}`. rhs or lhs can be omitted. - /// main_token points at the `{`. + /// main_token points at the lbrace. BlockTwo, /// `{}`. `sub_list[lhs..rhs]`. - /// main_token points at the `{`. + /// main_token points at the lbrace. Block, /// `asm(lhs)`. rhs unused. AsmSimple, diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index b11fdf25ea..deff7fdf98 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -2447,8 +2447,12 @@ const Parser = struct { if (field_init_one != 0) { const comma_one = p.eatToken(.Comma); if (p.eatToken(.RBrace)) |_| { + const tag: Node.Tag = if (comma_one != null) + .StructInitDotTwoComma + else + .StructInitDotTwo; return p.addNode(.{ - .tag = .StructInitDotTwo, + .tag = tag, .main_token = lbrace, .data = .{ .lhs = field_init_one, @@ -2464,8 +2468,12 @@ const Parser = struct { const field_init_two = try p.expectFieldInit(); const comma_two = p.eatToken(.Comma); if (p.eatToken(.RBrace)) |_| { + const tag: Node.Tag = if (comma_two != null) + .StructInitDotTwoComma + else + .StructInitDotTwo; return p.addNode(.{ - .tag = .StructInitDotTwo, + .tag = tag, .main_token = lbrace, .data = .{ .lhs = field_init_one, diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 26b720766c..9ebf218550 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -138,17 +138,17 @@ test "zig fmt: errdefer with payload" { ); } -//test "zig fmt: nosuspend block" { -// try testCanonical( -// \\pub fn main() anyerror!void { -// \\ nosuspend { -// \\ var foo: Foo = .{ .bar = 42 }; -// \\ } -// \\} -// \\ -// ); -//} -// +test "zig fmt: nosuspend block" { + try testCanonical( + \\pub fn main() anyerror!void { + \\ nosuspend { + \\ var foo: Foo = .{ .bar = 42 }; + \\ } + \\} + \\ + ); +} + //test "zig fmt: nosuspend await" { // try testCanonical( // \\fn foo() void { @@ -1505,11 +1505,11 @@ test "zig fmt: errdefer with payload" { //} test "zig fmt: ptr deref operator and unwrap optional operator" { - try testCanonical( - \\const a = b.*; - \\const a = b.?; - \\ - ); + try testCanonical( + \\const a = b.*; + \\const a = b.?; + \\ + ); } //test "zig fmt: comment after if before another if" { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 288d3e8b97..5ebed640e7 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -719,145 +719,16 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderToken(ais, tree, rtoken, space); //}, - .StructInitOne => unreachable, // TODO - .StructInitDotTwo => unreachable, // TODO - .StructInitDot => unreachable, // TODO - .StructInit => unreachable, // TODO - //.StructInitializer, .StructInitializerDot => { - // var rtoken: ast.TokenIndex = undefined; - // var field_inits: []ast.Node.Index = undefined; - // const lhs: union(enum) { dot: ast.TokenIndex, node: ast.Node.Index } = switch (base.tag) { - // .StructInitializerDot => blk: { - // const casted = @fieldParentPtr(ast.Node.StructInitializerDot, "base", base); - // rtoken = casted.rtoken; - // field_inits = casted.list(); - // break :blk .{ .dot = casted.dot }; - // }, - // .StructInitializer => blk: { - // const casted = @fieldParentPtr(ast.Node.StructInitializer, "base", base); - // rtoken = casted.rtoken; - // field_inits = casted.list(); - // break :blk .{ .node = casted.lhs }; - // }, - // else => unreachable, - // }; - - // const lbrace = switch (lhs) { - // .dot => |dot| tree.nextToken(dot), - // .node => |node| tree.nextToken(node.lastToken()), - // }; - - // if (field_inits.len == 0) { - // switch (lhs) { - // .dot => |dot| try renderToken(ais, tree, dot, Space.None), - // .node => |node| try renderExpression(ais, tree, node, Space.None), - // } - - // { - // ais.pushIndentNextLine(); - // defer ais.popIndent(); - // try renderToken(ais, tree, lbrace, Space.None); - // } - - // return renderToken(ais, tree, rtoken, space); - // } - - // const src_has_trailing_comma = blk: { - // const maybe_comma = tree.prevToken(rtoken); - // break :blk tree.token_tags[maybe_comma] == .Comma; - // }; - - // const src_same_line = blk: { - // const loc = tree.tokenLocation(tree.token_locs[lbrace].end, rtoken); - // break :blk loc.line == 0; - // }; - - // const expr_outputs_one_line = blk: { - // // render field expressions until a LF is found - // for (field_inits) |field_init| { - // var find_stream = std.io.findByteWriter('\n', std.io.null_writer); - // var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, find_stream.writer()); - - // try renderExpression(allocator, &auto_indenting_stream, tree, field_init, Space.None); - // if (find_stream.byte_found) break :blk false; - // } - // break :blk true; - // }; - - // if (field_inits.len == 1) blk: { - // if (field_inits[0].cast(ast.Node.FieldInitializer)) |field_init| { - // switch (field_init.expr.tag) { - // .StructInitializer, - // .StructInitializerDot, - // => break :blk, - // else => {}, - // } - // } - - // // if the expression outputs to multiline, make this struct multiline - // if (!expr_outputs_one_line or src_has_trailing_comma) { - // break :blk; - // } - - // switch (lhs) { - // .dot => |dot| try renderToken(ais, tree, dot, Space.None), - // .node => |node| try renderExpression(ais, tree, node, Space.None), - // } - // try renderToken(ais, tree, lbrace, Space.Space); - // try renderExpression(ais, tree, field_inits[0], Space.Space); - // return renderToken(ais, tree, rtoken, space); - // } - - // if (!src_has_trailing_comma and src_same_line and expr_outputs_one_line) { - // // render all on one line, no trailing comma - // switch (lhs) { - // .dot => |dot| try renderToken(ais, tree, dot, Space.None), - // .node => |node| try renderExpression(ais, tree, node, Space.None), - // } - // try renderToken(ais, tree, lbrace, Space.Space); - - // for (field_inits) |field_init, i| { - // if (i + 1 < field_inits.len) { - // try renderExpression(ais, tree, field_init, Space.None); - - // const comma = tree.nextToken(field_init.lastToken()); - // try renderToken(ais, tree, comma, Space.Space); - // } else { - // try renderExpression(ais, tree, field_init, Space.Space); - // } - // } - - // return renderToken(ais, tree, rtoken, space); - // } - - // { - // switch (lhs) { - // .dot => |dot| try renderToken(ais, tree, dot, Space.None), - // .node => |node| try renderExpression(ais, tree, node, Space.None), - // } - - // ais.pushIndentNextLine(); - // defer ais.popIndent(); - - // try renderToken(ais, tree, lbrace, Space.Newline); - - // for (field_inits) |field_init, i| { - // if (i + 1 < field_inits.len) { - // const next_field_init = field_inits[i + 1]; - // try renderExpression(ais, tree, field_init, Space.None); - - // const comma = tree.nextToken(field_init.lastToken()); - // try renderToken(ais, tree, comma, Space.Newline); - - // try renderExtraNewline(ais, tree, next_field_init); - // } else { - // try renderExpression(ais, tree, field_init, Space.Comma); - // } - // } - // } - - // return renderToken(ais, tree, rtoken, space); - //}, + .StructInitOne => { + var fields: [1]ast.Node.Index = undefined; + return renderStructInit(ais, tree, tree.structInitOne(&fields, node), space); + }, + .StructInitDotTwo, .StructInitDotTwoComma => { + var fields: [2]ast.Node.Index = undefined; + return renderStructInit(ais, tree, tree.structInitDotTwo(&fields, node), space); + }, + .StructInitDot => return renderStructInit(ais, tree, tree.structInitDot(node), space), + .StructInit => return renderStructInit(ais, tree, tree.structInit(node), space), .CallOne => unreachable, // TODO .Call => { @@ -1078,9 +949,9 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // { // ais.pushIndentNextLine(); // defer ais.popIndent(); - // try renderToken(ais, tree, container_decl.lbrace_token, Space.None); // { + // try renderToken(ais, tree, container_decl.lbrace_token, Space.None); // lbrace // } - // return renderToken(ais, tree, container_decl.rbrace_token, space); // } + // return renderToken(ais, tree, container_decl.rbrace_token, space); // rbrace // } // const src_has_trailing_comma = blk: { @@ -1113,7 +984,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // // One declaration per line // ais.pushIndentNextLine(); // defer ais.popIndent(); - // try renderToken(ais, tree, container_decl.lbrace_token, .Newline); // { + // try renderToken(ais, tree, container_decl.lbrace_token, .Newline); // lbrace // for (fields_and_decls) |decl, i| { // try renderContainerDecl(allocator, ais, tree, decl, .Newline); @@ -1125,7 +996,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } else if (src_has_newline) { // // All the declarations on the same line, but place the items on // // their own line - // try renderToken(ais, tree, container_decl.lbrace_token, .Newline); // { + // try renderToken(ais, tree, container_decl.lbrace_token, .Newline); // lbrace // ais.pushIndent(); // defer ais.popIndent(); @@ -1136,14 +1007,14 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } // } else { // // All the declarations on the same line - // try renderToken(ais, tree, container_decl.lbrace_token, .Space); // { + // try renderToken(ais, tree, container_decl.lbrace_token, .Space); // lbrace // for (fields_and_decls) |decl| { // try renderContainerDecl(allocator, ais, tree, decl, .Space); // } // } - // return renderToken(ais, tree, container_decl.rbrace_token, space); // } + // return renderToken(ais, tree, container_decl.rbrace_token, space); // rbrace //}, .ErrorSetDecl => unreachable, // TODO @@ -1170,9 +1041,9 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } // try renderToken(ais, tree, err_set_decl.error_token, Space.None); // error - // try renderToken(ais, tree, lbrace, Space.None); // { + // try renderToken(ais, tree, lbrace, Space.None); // lbrace // try renderExpression(ais, tree, node, Space.None); - // return renderToken(ais, tree, err_set_decl.rbrace_token, space); // } + // return renderToken(ais, tree, err_set_decl.rbrace_token, space); // rbrace // } // try renderToken(ais, tree, err_set_decl.error_token, Space.None); // error @@ -1187,7 +1058,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // ais.pushIndent(); // defer ais.popIndent(); - // try renderToken(ais, tree, lbrace, Space.Newline); // { + // try renderToken(ais, tree, lbrace, Space.Newline); // lbrace // const decls = err_set_decl.decls(); // for (decls) |node, i| { // if (i + 1 < decls.len) { @@ -1201,9 +1072,9 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } // } - // return renderToken(ais, tree, err_set_decl.rbrace_token, space); // } + // return renderToken(ais, tree, err_set_decl.rbrace_token, space); // rbrace // } else { - // try renderToken(ais, tree, lbrace, Space.Space); // { + // try renderToken(ais, tree, lbrace, Space.Space); // lbrace // const decls = err_set_decl.decls(); // for (decls) |node, i| { @@ -1219,7 +1090,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } // } - // return renderToken(ais, tree, err_set_decl.rbrace_token, space); // } + // return renderToken(ais, tree, err_set_decl.rbrace_token, space); // rbrace // } //}, @@ -1277,8 +1148,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // if (switch_node.cases_len == 0) { // try renderExpression(ais, tree, switch_node.expr, Space.None); // try renderToken(ais, tree, rparen, Space.Space); // ) - // try renderToken(ais, tree, lbrace, Space.None); // { - // return renderToken(ais, tree, switch_node.rbrace, space); // } + // try renderToken(ais, tree, lbrace, Space.None); // lbrace + // return renderToken(ais, tree, switch_node.rbrace, space); // rbrace // } // try renderExpression(ais, tree, switch_node.expr, Space.None); @@ -1287,7 +1158,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // { // ais.pushIndentNextLine(); // defer ais.popIndent(); - // try renderToken(ais, tree, lbrace, Space.Newline); // { + // try renderToken(ais, tree, lbrace, Space.Newline); // lbrace // const cases = switch_node.cases(); // for (cases) |node, i| { @@ -1299,7 +1170,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } // } - // return renderToken(ais, tree, switch_node.rbrace, space); // } + // return renderToken(ais, tree, switch_node.rbrace, space); // rbrace //}, .SwitchCaseOne => unreachable, // TODO @@ -1379,7 +1250,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // if (body_is_block) { // block_start_space = Space.BlockStart; - // after_body_space = if (while_node.@"else" == null) space else Space.SpaceOrOutdent; + // after_body_space = if (while_node.@"else" == null) space else Space.Space; // } else if (tree.tokensOnSameLine(cond_rparen, while_node.body.lastToken())) { // block_start_space = Space.Space; // after_body_space = if (while_node.@"else" == null) space else Space.Space; @@ -1782,10 +1653,7 @@ fn renderVarDecl(ais: *Ais, tree: ast.Tree, var_decl: ast.Full.VarDecl) Error!vo ais.popIndent(); } ais.pushIndentOneShot(); - try renderExpression(ais, tree, var_decl.ast.init_node, Space.None); - - const semicolon = tree.lastToken(var_decl.ast.init_node) + 1; - return renderToken(ais, tree, semicolon, Space.Newline); + try renderExpression(ais, tree, var_decl.ast.init_node, .Semicolon); } fn renderIf(ais: *Ais, tree: ast.Tree, if_node: ast.Full.If, space: Space) Error!void { @@ -1820,7 +1688,7 @@ fn renderIf(ais: *Ais, tree: ast.Tree, if_node: ast.Full.If, space: Space) Error try renderToken(ais, tree, rparen, .BlockStart); // ) } if (if_node.ast.else_expr != 0) { - try renderExpression(ais, tree, if_node.ast.then_expr, Space.SpaceOrOutdent); + try renderExpression(ais, tree, if_node.ast.then_expr, Space.Space); try renderToken(ais, tree, if_node.else_token, .Space); // else if (if_node.error_token) |error_token| { try renderToken(ais, tree, error_token - 1, .None); // | @@ -2197,6 +2065,7 @@ fn renderBlock( ) Error!void { const token_tags = tree.tokens.items(.tag); const node_tags = tree.nodes.items(.tag); + const nodes_data = tree.nodes.items(.data); if (token_tags[lbrace - 1] == .Colon and token_tags[lbrace - 2] == .Identifier) @@ -2204,50 +2073,88 @@ fn renderBlock( try renderToken(ais, tree, lbrace - 2, .None); try renderToken(ais, tree, lbrace - 1, .Space); } - const nodes_data = tree.nodes.items(.data); if (statements.len == 0) { - ais.pushIndentNextLine(); try renderToken(ais, tree, lbrace, .None); - ais.popIndent(); - const rbrace = lbrace + 1; - return renderToken(ais, tree, rbrace, space); + return renderToken(ais, tree, lbrace + 1, space); // rbrace + } + + ais.pushIndent(); + try renderToken(ais, tree, lbrace, .Newline); + for (statements) |stmt, i| { + switch (node_tags[stmt]) { + .GlobalVarDecl => try renderVarDecl(ais, tree, tree.globalVarDecl(stmt)), + .LocalVarDecl => try renderVarDecl(ais, tree, tree.localVarDecl(stmt)), + .SimpleVarDecl => try renderVarDecl(ais, tree, tree.simpleVarDecl(stmt)), + .AlignedVarDecl => try renderVarDecl(ais, tree, tree.alignedVarDecl(stmt)), + else => try renderExpression(ais, tree, stmt, .Semicolon), + } + if (i + 1 < statements.len) { + try renderExtraNewline(ais, tree, statements[i + 1]); + } + } + ais.popIndent(); + // The rbrace could be +1 or +2 from the last token of the last + // statement in the block because lastToken() does not count semicolons. + const maybe_rbrace = tree.lastToken(statements[statements.len - 1]) + 1; + if (token_tags[maybe_rbrace] == .RBrace) { + return renderToken(ais, tree, maybe_rbrace, space); } else { - ais.pushIndentNextLine(); + assert(token_tags[maybe_rbrace + 1] == .RBrace); + return renderToken(ais, tree, maybe_rbrace + 1, space); + } +} - try renderToken(ais, tree, lbrace, .Newline); +fn renderStructInit( + ais: *Ais, + tree: ast.Tree, + struct_init: ast.Full.StructInit, + space: Space, +) Error!void { + const token_tags = tree.tokens.items(.tag); + if (struct_init.ast.type_expr == 0) { + try renderToken(ais, tree, struct_init.ast.lbrace - 1, .None); // . + } else { + try renderExpression(ais, tree, struct_init.ast.type_expr, .None); // T + } + if (struct_init.ast.fields.len == 0) { + try renderToken(ais, tree, struct_init.ast.lbrace, .None); // lbrace + return renderToken(ais, tree, struct_init.ast.lbrace + 1, space); // rbrace + } + const last_field = struct_init.ast.fields[struct_init.ast.fields.len - 1]; + const last_field_token = tree.lastToken(last_field); + if (token_tags[last_field_token + 1] == .Comma) { + // Render one field init per line. + ais.pushIndent(); + try renderToken(ais, tree, struct_init.ast.lbrace, .Newline); - for (statements) |stmt, i| { - switch (node_tags[stmt]) { - .GlobalVarDecl => try renderVarDecl(ais, tree, tree.globalVarDecl(stmt)), - .LocalVarDecl => try renderVarDecl(ais, tree, tree.localVarDecl(stmt)), - .SimpleVarDecl => try renderVarDecl(ais, tree, tree.simpleVarDecl(stmt)), - .AlignedVarDecl => try renderVarDecl(ais, tree, tree.alignedVarDecl(stmt)), - else => { - const semicolon = tree.lastToken(stmt) + 1; - if (token_tags[semicolon] == .Semicolon) { - try renderExpression(ais, tree, stmt, .None); - try renderToken(ais, tree, semicolon, .Newline); - } else { - try renderExpression(ais, tree, stmt, .Newline); - } - }, - } + try renderToken(ais, tree, struct_init.ast.lbrace + 1, .None); // . + try renderToken(ais, tree, struct_init.ast.lbrace + 2, .Space); // name + try renderToken(ais, tree, struct_init.ast.lbrace + 3, .Space); // = + try renderExpression(ais, tree, struct_init.ast.fields[0], .Comma); - if (i + 1 < statements.len) { - try renderExtraNewline(ais, tree, statements[i + 1]); - } + for (struct_init.ast.fields[1..]) |field_init| { + const init_token = tree.firstToken(field_init); + try renderToken(ais, tree, init_token - 3, .None); // . + try renderToken(ais, tree, init_token - 2, .Space); // name + try renderToken(ais, tree, init_token - 1, .Space); // = + try renderExpressionNewlined(ais, tree, field_init, .Comma); } ais.popIndent(); - // The rbrace could be +1 or +2 from the last token of the last - // statement in the block because lastToken() does not count semicolons. - const maybe_rbrace = tree.lastToken(statements[statements.len - 1]) + 1; - if (token_tags[maybe_rbrace] == .RBrace) { - return renderToken(ais, tree, maybe_rbrace, space); - } else { - assert(token_tags[maybe_rbrace + 1] == .RBrace); - return renderToken(ais, tree, maybe_rbrace + 1, space); + return renderToken(ais, tree, last_field_token + 2, space); // rbrace + } else { + // Render all on one line, no trailing comma. + try renderToken(ais, tree, struct_init.ast.lbrace, .Space); + + for (struct_init.ast.fields) |field_init| { + const init_token = tree.firstToken(field_init); + try renderToken(ais, tree, init_token - 3, .None); // . + try renderToken(ais, tree, init_token - 2, .Space); // name + try renderToken(ais, tree, init_token - 1, .Space); // = + try renderExpression(ais, tree, field_init, .CommaSpace); } + + return renderToken(ais, tree, last_field_token + 1, space); // rbrace } } @@ -2263,6 +2170,22 @@ fn renderExpressionComma(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: } } +/// Render an expression, but first insert an extra newline if the previous token is 2 or +/// more lines away. +fn renderExpressionNewlined( + ais: *Ais, + tree: ast.Tree, + node: ast.Node.Index, + space: Space, +) Error!void { + const token_starts = tree.tokens.items(.start); + const first_token = tree.firstToken(node); + if (tree.tokenLocation(token_starts[first_token - 1], first_token).line >= 2) { + try ais.insertNewline(); + } + return renderExpression(ais, tree, node, space); +} + fn renderTokenComma(ais: *Ais, tree: ast.Tree, token: ast.TokenIndex, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); const maybe_comma = token + 1; @@ -2275,22 +2198,32 @@ fn renderTokenComma(ais: *Ais, tree: ast.Tree, token: ast.TokenIndex, space: Spa } const Space = enum { + /// Output the token lexeme only. None, - Newline, - /// `renderToken` will additionally consume the next token if it is a comma. - Comma, + /// Output the token lexeme followed by a single space. Space, - SpaceOrOutdent, - NoNewline, + /// Output the token lexeme followed by a newline. + Newline, + /// Additionally consume the next token if it is a comma. + /// In either case, a newline will be inserted afterwards. + Comma, + /// Additionally consume the next token if it is a comma. + /// In either case, a space will be inserted afterwards. + CommaSpace, + /// Additionally consume the next token if it is a semicolon. + /// In either case, a newline will be inserted afterwards. + Semicolon, /// Skips writing the possible line comment after the token. NoComment, + /// Intended when rendering lbrace tokens. Depending on whether the line is + /// "over indented", will output a newline or a single space afterwards. + /// See `std.io.AutoIndentingStream` for the definition of "over indented". BlockStart, }; fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Space) Error!void { if (space == Space.BlockStart) { - // If placing the lbrace on the current line would cause an ugly gap then put the lbrace on the next line. - const new_space = if (ais.isLineOverIndented()) Space.Newline else Space.Space; + const new_space: Space = if (ais.isLineOverIndented()) .Newline else .Space; return renderToken(ais, tree, token_index, new_space); } @@ -2313,7 +2246,6 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp switch (space) { .NoComment => {}, - .NoNewline => {}, .None => {}, .Comma => { const count = try renderComments(ais, tree, token_start + lexeme.len, token_starts[token_index + 1], ", "); @@ -2326,10 +2258,25 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp try ais.insertNewline(); } }, - .SpaceOrOutdent => @panic("what does this even do"), + .CommaSpace => { + _ = try renderComments(ais, tree, token_start + lexeme.len, token_starts[token_index + 1], ""); + if (token_tags[token_index + 1] == .Comma) { + return renderToken(ais, tree, token_index + 1, .Space); + } else { + return ais.writer().writeByte(' '); + } + }, + .Semicolon => { + _ = try renderComments(ais, tree, token_start + lexeme.len, token_starts[token_index + 1], ""); + if (token_tags[token_index + 1] == .Semicolon) { + return renderToken(ais, tree, token_index + 1, .Newline); + } else { + return ais.insertNewline(); + } + }, .Space => { _ = try renderComments(ais, tree, token_start + lexeme.len, token_starts[token_index + 1], ""); - try ais.writer().writeByte(' '); + return ais.writer().writeByte(' '); }, .Newline => { if (token_tags[token_index + 1] != .MultilineStringLiteralLine) { @@ -2437,10 +2384,11 @@ fn nodeCausesSliceOpSpace(base: ast.Node.Index) bool { } fn copyFixingWhitespace(ais: *Ais, slice: []const u8) @TypeOf(ais.*).Error!void { + const writer = ais.writer(); for (slice) |byte| switch (byte) { - '\t' => try ais.writer().writeAll(" "), + '\t' => try writer.writeAll(" "), '\r' => {}, - else => try ais.writer().writeByte(byte), + else => try writer.writeByte(byte), }; } From 0b4bb9b84fddd8fe03fe15e25bd82babb455f2a6 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Fri, 5 Feb 2021 11:40:22 +0100 Subject: [PATCH 014/173] std.MultiArrayList: implement review comments --- lib/std/multi_array_list.zig | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/lib/std/multi_array_list.zig b/lib/std/multi_array_list.zig index 84b39765d3..aaf105bb57 100644 --- a/lib/std/multi_array_list.zig +++ b/lib/std/multi_array_list.zig @@ -20,7 +20,8 @@ pub fn MultiArrayList(comptime S: type) type { pub const Field = meta.FieldEnum(S); pub const Slice = struct { - /// The index corresponds to sizes.bytes, not in field order. + /// This array is indexed by the field index which can be obtained + /// by using @enumToInt() on the Field enum ptrs: [fields.len][*]u8, len: usize, capacity: usize, @@ -57,8 +58,7 @@ pub fn MultiArrayList(comptime S: type) type { const fields = meta.fields(S); /// `sizes.bytes` is an array of @sizeOf each S field. Sorted by alignment, descending. - /// `sizes.indexes` is an array mapping from field to its index in the `sizes.bytes` array. - /// `sizes.fields` is an array with the field indexes of the `sizes.bytes` array. + /// `sizes.fields` is an array mapping from `sizes.bytes` array index to field index. const sizes = blk: { const Data = struct { size: usize, @@ -81,16 +81,13 @@ pub fn MultiArrayList(comptime S: type) type { var trash: i32 = undefined; // workaround for stage1 compiler bug std.sort.sort(Data, &data, &trash, Sort.lessThan); var sizes_bytes: [fields.len]usize = undefined; - var sizes_indexes: [fields.len]usize = undefined; var field_indexes: [fields.len]usize = undefined; for (data) |elem, i| { sizes_bytes[i] = elem.size; - sizes_indexes[elem.size_index] = i; field_indexes[i] = elem.size_index; } break :blk .{ .bytes = sizes_bytes, - .indexes = sizes_indexes, .fields = field_indexes, }; }; @@ -183,8 +180,11 @@ pub fn MultiArrayList(comptime S: type) type { capacityInBytes(new_len), .exact, ) catch { + inline for (fields) |field_info, i| { + const field = @intToEnum(Field, i); + mem.set(field_info.field_type, self.slice().items(field)[new_len..], undefined); + } self.len = new_len; - // TODO memset the invalidated items to undefined return; }; var other = Self{ From 3e960cfffea8083b1948ba295d1a85964b5afa25 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Fri, 5 Feb 2021 11:46:43 +0100 Subject: [PATCH 015/173] zig fmt: float literal with exponent --- lib/std/zig/parser_test.zig | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 9ebf218550..9174e3b1bd 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -1435,14 +1435,15 @@ test "zig fmt: nosuspend block" { // \\ // ); //} -//test "zig fmt: float literal with exponent" { -// try testCanonical( -// \\pub const f64_true_min = 4.94065645841246544177e-324; -// \\const threshold = 0x1.a827999fcef32p+1022; -// \\ -// ); -//} -// + +test "zig fmt: float literal with exponent" { + try testCanonical( + \\pub const f64_true_min = 4.94065645841246544177e-324; + \\const threshold = 0x1.a827999fcef32p+1022; + \\ + ); +} + //test "zig fmt: if-else end of comptime" { // try testCanonical( // \\comptime { From 6f3b93e2e8e5cc8c98d473bef4804c61abc4a196 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Fri, 5 Feb 2021 13:15:42 +0100 Subject: [PATCH 016/173] zig fmt: struct and anon array initialization --- lib/std/zig/ast.zig | 97 +++++++++++-- lib/std/zig/parse.zig | 9 +- lib/std/zig/parser_test.zig | 172 ++++++++++++++++++++--- lib/std/zig/render.zig | 271 ++++++++---------------------------- 4 files changed, 302 insertions(+), 247 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index c6d477cc5f..96a60614bf 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -212,8 +212,6 @@ pub const Tree = struct { .Try, .Await, .OptionalType, - .ArrayInitDotTwo, - .ArrayInitDot, .Switch, .IfSimple, .If, @@ -250,9 +248,12 @@ pub const Tree = struct { .FnProto, => return main_tokens[n], + .ArrayInitDot, + .ArrayInitDotTwo, + .ArrayInitDotTwoComma, + .StructInitDot, .StructInitDotTwo, .StructInitDotTwoComma, - .StructInitDot, => return main_tokens[n] - 1, .Catch, @@ -304,6 +305,7 @@ pub const Tree = struct { .ArrayInitOne, .ArrayInit, .StructInitOne, + .StructInit, .CallOne, .Call, .SwitchCaseOne, @@ -367,7 +369,6 @@ pub const Tree = struct { .PtrTypeSentinel => unreachable, // TODO .PtrType => unreachable, // TODO .SliceType => unreachable, // TODO - .StructInit => unreachable, // TODO .SwitchCaseMulti => unreachable, // TODO .WhileSimple => unreachable, // TODO .WhileCont => unreachable, // TODO @@ -506,6 +507,7 @@ pub const Tree = struct { n = datas[n].rhs; }, + .ArrayInitDotTwo, .BuiltinCallTwo, .BlockTwo, .StructInitDotTwo, @@ -519,7 +521,9 @@ pub const Tree = struct { return main_tokens[n] + end_offset; } }, - .StructInitDotTwoComma => { + .ArrayInitDotTwoComma, + .StructInitDotTwoComma, + => { end_offset += 2; // for the comma + rbrace if (datas[n].rhs != 0) { n = datas[n].rhs; @@ -590,13 +594,16 @@ pub const Tree = struct { // require recursion due to the optional comma followed by rbrace. // TODO follow the pattern set by StructInitDotTwoComma which will allow // lastToken to work for all of these. + .ArrayInit => unreachable, + .ArrayInitOne => unreachable, + .ArrayInitDot => unreachable, + .StructInit => unreachable, + .StructInitOne => unreachable, .StructInitDot => unreachable, .ContainerFieldInit => unreachable, .ContainerFieldAlign => unreachable, .ContainerField => unreachable, - .ArrayInitDotTwo => unreachable, // TODO - .ArrayInitDot => unreachable, // TODO .Switch => unreachable, // TODO .If => unreachable, // TODO .Continue => unreachable, // TODO @@ -606,9 +613,6 @@ pub const Tree = struct { .Asm => unreachable, // TODO .SliceOpen => unreachable, // TODO .Slice => unreachable, // TODO - .ArrayInitOne => unreachable, // TODO - .ArrayInit => unreachable, // TODO - .StructInitOne => unreachable, // TODO .SwitchCaseOne => unreachable, // TODO .SwitchRange => unreachable, // TODO .FnDecl => unreachable, // TODO @@ -618,7 +622,6 @@ pub const Tree = struct { .PtrTypeSentinel => unreachable, // TODO .PtrType => unreachable, // TODO .SliceType => unreachable, // TODO - .StructInit => unreachable, // TODO .SwitchCaseMulti => unreachable, // TODO .WhileCont => unreachable, // TODO .While => unreachable, // TODO @@ -863,6 +866,65 @@ pub const Tree = struct { }); } + pub fn arrayInitOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) Full.ArrayInit { + assert(tree.nodes.items(.tag)[node] == .ArrayInitOne); + const data = tree.nodes.items(.data)[node]; + buffer[0] = data.rhs; + const elements = if (data.rhs == 0) buffer[0..0] else buffer[0..1]; + return .{ + .ast = .{ + .lbrace = tree.nodes.items(.main_token)[node], + .elements = elements, + .type_expr = data.lhs, + }, + }; + } + + pub fn arrayInitDotTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) Full.ArrayInit { + assert(tree.nodes.items(.tag)[node] == .ArrayInitDotTwo or + tree.nodes.items(.tag)[node] == .ArrayInitDotTwoComma); + const data = tree.nodes.items(.data)[node]; + buffer.* = .{ data.lhs, data.rhs }; + const elements = if (data.rhs != 0) + buffer[0..2] + else if (data.lhs != 0) + buffer[0..1] + else + buffer[0..0]; + return .{ + .ast = .{ + .lbrace = tree.nodes.items(.main_token)[node], + .elements = elements, + .type_expr = 0, + }, + }; + } + + pub fn arrayInitDot(tree: Tree, node: Node.Index) Full.ArrayInit { + assert(tree.nodes.items(.tag)[node] == .ArrayInitDot); + const data = tree.nodes.items(.data)[node]; + return .{ + .ast = .{ + .lbrace = tree.nodes.items(.main_token)[node], + .elements = tree.extra_data[data.lhs..data.rhs], + .type_expr = 0, + }, + }; + } + + pub fn arrayInit(tree: Tree, node: Node.Index) Full.ArrayInit { + assert(tree.nodes.items(.tag)[node] == .ArrayInit); + const data = tree.nodes.items(.data)[node]; + const elem_range = tree.extraData(data.rhs, Node.SubRange); + return .{ + .ast = .{ + .lbrace = tree.nodes.items(.main_token)[node], + .elements = tree.extra_data[elem_range.start..elem_range.end], + .type_expr = data.lhs, + }, + }; + } + fn fullVarDecl(tree: Tree, info: Full.VarDecl.Ast) Full.VarDecl { const token_tags = tree.tokens.items(.tag); var result: Full.VarDecl = .{ @@ -1015,6 +1077,16 @@ pub const Full = struct { type_expr: Node.Index, }; }; + + pub const ArrayInit = struct { + ast: Ast, + + pub const Ast = struct { + lbrace: TokenIndex, + elements: []const Node.Index, + type_expr: Node.Index, + }; + }; }; pub const Error = union(enum) { @@ -1421,6 +1493,9 @@ pub const Node = struct { ArrayInitOne, /// `.{lhs, rhs}`. lhs and rhs can be omitted. ArrayInitDotTwo, + /// Same as `ArrayInitDotTwo` except there is known to be a trailing comma + /// before the final rbrace. + ArrayInitDotTwoComma, /// `.{a, b}`. `sub_list[lhs..rhs]`. ArrayInitDot, /// `lhs{a, b}`. `sub_range_list[rhs]`. lhs can be omitted which means `.{a, b}`. diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index deff7fdf98..394c36fcb5 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -2536,7 +2536,7 @@ const Parser = struct { const comma_one = p.eatToken(.Comma); if (p.eatToken(.RBrace)) |_| { return p.addNode(.{ - .tag = .ArrayInitDotTwo, + .tag = if (comma_one != null) .ArrayInitDotTwoComma else .ArrayInitDotTwo, .main_token = lbrace, .data = .{ .lhs = elem_init_one, @@ -2553,7 +2553,7 @@ const Parser = struct { const comma_two = p.eatToken(.Comma); if (p.eatToken(.RBrace)) |_| { return p.addNode(.{ - .tag = .ArrayInitDotTwo, + .tag = if (comma_one != null) .ArrayInitDotTwoComma else .ArrayInitDotTwo, .main_token = lbrace, .data = .{ .lhs = elem_init_one, @@ -2576,7 +2576,10 @@ const Parser = struct { if (next == 0) break; try init_list.append(next); switch (p.token_tags[p.nextToken()]) { - .Comma => continue, + .Comma => { + if (p.eatToken(.RBrace)) |_| break; + continue; + }, .RBrace => break, .Colon, .RParen, .RBracket => { p.tok_i -= 1; diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 9174e3b1bd..d155c2ee5e 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -336,24 +336,160 @@ test "zig fmt: nosuspend block" { // \\ // ); //} -// -//test "zig fmt: anon struct literal syntax" { -// try testCanonical( -// \\const x = .{ -// \\ .a = b, -// \\ .c = d, -// \\}; -// \\ -// ); -//} -// -//test "zig fmt: anon list literal syntax" { -// try testCanonical( -// \\const x = .{ a, b, c }; -// \\ -// ); -//} -// + +test "zig fmt: anon struct literal 1 element" { + try testCanonical( + \\const x = .{ .a = b }; + \\ + ); +} + +test "zig fmt: anon struct literal 1 element comma" { + try testCanonical( + \\const x = .{ + \\ .a = b, + \\}; + \\ + ); +} + +test "zig fmt: anon struct literal 2 element" { + try testCanonical( + \\const x = .{ .a = b, .c = d }; + \\ + ); +} + +test "zig fmt: anon struct literal 2 element comma" { + try testCanonical( + \\const x = .{ + \\ .a = b, + \\ .c = d, + \\}; + \\ + ); +} + +test "zig fmt: anon struct literal 3 element" { + try testCanonical( + \\const x = .{ .a = b, .c = d, .e = f }; + \\ + ); +} + +test "zig fmt: anon struct literal 3 element comma" { + try testCanonical( + \\const x = .{ + \\ .a = b, + \\ .c = d, + \\ .e = f, + \\}; + \\ + ); +} + +test "zig fmt: struct literal 1 element" { + try testCanonical( + \\const x = X{ .a = b }; + \\ + ); +} + +test "zig fmt: struct literal 1 element comma" { + try testCanonical( + \\const x = X{ + \\ .a = b, + \\}; + \\ + ); +} + +test "zig fmt: struct literal 2 element" { + try testCanonical( + \\const x = X{ .a = b, .c = d }; + \\ + ); +} + +test "zig fmt: struct literal 2 element comma" { + try testCanonical( + \\const x = X{ + \\ .a = b, + \\ .c = d, + \\}; + \\ + ); +} + +test "zig fmt: struct literal 3 element" { + try testCanonical( + \\const x = X{ .a = b, .c = d, .e = f }; + \\ + ); +} + +test "zig fmt: struct literal 3 element comma" { + try testCanonical( + \\const x = X{ + \\ .a = b, + \\ .c = d, + \\ .e = f, + \\}; + \\ + ); +} + +test "zig fmt: anon list literal 1 element" { + try testCanonical( + \\const x = .{a}; + \\ + ); +} + +test "zig fmt: anon list literal 1 element comma" { + try testCanonical( + \\const x = .{ + \\ a, + \\}; + \\ + ); +} + +test "zig fmt: anon list literal 2 element" { + try testCanonical( + \\const x = .{ a, b }; + \\ + ); +} + +test "zig fmt: anon list literal 2 element comma" { + try testCanonical( + \\const x = .{ + \\ a, + \\ b, + \\}; + \\ + ); +} + +test "zig fmt: anon list literal 3 element" { + try testCanonical( + \\const x = .{ a, b, c }; + \\ + ); +} + +test "zig fmt: anon list literal 3 element comma" { + try testCanonical( + \\const x = .{ + \\ a, + \\ b, + \\ c, + \\}; + \\ + ); +} + //test "zig fmt: async function" { // try testCanonical( // \\pub const Server = struct { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 5ebed640e7..deeae58e7f 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -503,221 +503,16 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderExpression(ais, tree, slice_type.rhs, space); //}, - .ArrayInitOne => unreachable, // TODO - .ArrayInitDotTwo => unreachable, // TODO - .ArrayInitDot => unreachable, // TODO - .ArrayInit => unreachable, // TODO - //.ArrayInitializer, .ArrayInitializerDot => { - // var rtoken: ast.TokenIndex = undefined; - // var exprs: []ast.Node.Index = undefined; - // const lhs: union(enum) { dot: ast.TokenIndex, node: ast.Node.Index } = switch (base.tag) { - // .ArrayInitializerDot => blk: { - // const casted = @fieldParentPtr(ast.Node.ArrayInitializerDot, "base", base); - // rtoken = casted.rtoken; - // exprs = casted.list(); - // break :blk .{ .dot = casted.dot }; - // }, - // .ArrayInitializer => blk: { - // const casted = @fieldParentPtr(ast.Node.ArrayInitializer, "base", base); - // rtoken = casted.rtoken; - // exprs = casted.list(); - // break :blk .{ .node = casted.lhs }; - // }, - // else => unreachable, - // }; - - // const lbrace = switch (lhs) { - // .dot => |dot| tree.nextToken(dot), - // .node => |node| tree.nextToken(node.lastToken()), - // }; - - // switch (lhs) { - // .dot => |dot| try renderToken(ais, tree, dot, Space.None), - // .node => |node| try renderExpression(ais, tree, node, Space.None), - // } - - // if (exprs.len == 0) { - // try renderToken(ais, tree, lbrace, Space.None); - // return renderToken(ais, tree, rtoken, space); - // } - - // if (exprs.len == 1 and exprs[0].tag != .MultilineStringLiteral and tree.token_tags[exprs[0].*.lastToken() + 1] == .RBrace) { - // const expr = exprs[0]; - - // try renderToken(ais, tree, lbrace, Space.None); - // try renderExpression(ais, tree, expr, Space.None); - // return renderToken(ais, tree, rtoken, space); - // } - - // // scan to find row size - // if (rowSize(tree, exprs, rtoken) != null) { - // { - // ais.pushIndentNextLine(); - // defer ais.popIndent(); - // try renderToken(ais, tree, lbrace, Space.Newline); - - // var expr_index: usize = 0; - // while (rowSize(tree, exprs[expr_index..], rtoken)) |row_size| { - // const row_exprs = exprs[expr_index..]; - // // A place to store the width of each expression and its column's maximum - // var widths = try allocator.alloc(usize, row_exprs.len + row_size); - // defer allocator.free(widths); - // mem.set(usize, widths, 0); - - // var expr_newlines = try allocator.alloc(bool, row_exprs.len); - // defer allocator.free(expr_newlines); - // mem.set(bool, expr_newlines, false); - - // var expr_widths = widths[0 .. widths.len - row_size]; - // var column_widths = widths[widths.len - row_size ..]; - - // // Find next row with trailing comment (if any) to end the current section - // var section_end = sec_end: { - // var this_line_first_expr: usize = 0; - // var this_line_size = rowSize(tree, row_exprs, rtoken); - // for (row_exprs) |expr, i| { - // // Ignore comment on first line of this section - // if (i == 0 or tree.tokensOnSameLine(row_exprs[0].firstToken(), expr.lastToken())) continue; - // // Track start of line containing comment - // if (!tree.tokensOnSameLine(row_exprs[this_line_first_expr].firstToken(), expr.lastToken())) { - // this_line_first_expr = i; - // this_line_size = rowSize(tree, row_exprs[this_line_first_expr..], rtoken); - // } - - // const maybe_comma = expr.lastToken() + 1; - // const maybe_comment = expr.lastToken() + 2; - // if (maybe_comment < tree.token_tags.len) { - // if (tree.token_tags[maybe_comma] == .Comma and - // tree.token_tags[maybe_comment] == .LineComment and - // tree.tokensOnSameLine(expr.lastToken(), maybe_comment)) - // { - // var comment_token_loc = tree.token_locs[maybe_comment]; - // const comment_is_empty = mem.trimRight(u8, tree.tokenSliceLoc(comment_token_loc), " ").len == 2; - // if (!comment_is_empty) { - // // Found row ending in comment - // break :sec_end i - this_line_size.? + 1; - // } - // } - // } - // } - // break :sec_end row_exprs.len; - // }; - // expr_index += section_end; - - // const section_exprs = row_exprs[0..section_end]; - - // // Null stream for counting the printed length of each expression - // var line_find_stream = std.io.findByteWriter('\n', std.io.null_writer); - // var counting_stream = std.io.countingWriter(line_find_stream.writer()); - // var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, counting_stream.writer()); - - // // Calculate size of columns in current section - // var column_counter: usize = 0; - // var single_line = true; - // for (section_exprs) |expr, i| { - // if (i + 1 < section_exprs.len) { - // counting_stream.bytes_written = 0; - // line_find_stream.byte_found = false; - // try renderExpression(allocator, &auto_indenting_stream, tree, expr, Space.None); - // const width = @intCast(usize, counting_stream.bytes_written); - // expr_widths[i] = width; - // expr_newlines[i] = line_find_stream.byte_found; - - // if (!line_find_stream.byte_found) { - // const column = column_counter % row_size; - // column_widths[column] = std.math.max(column_widths[column], width); - - // const expr_last_token = expr.*.lastToken() + 1; - // const next_expr = section_exprs[i + 1]; - // const loc = tree.tokenLocation(tree.token_locs[expr_last_token].start, next_expr.*.firstToken()); - - // column_counter += 1; - - // if (loc.line != 0) single_line = false; - // } else { - // single_line = false; - // column_counter = 0; - // } - // } else { - // counting_stream.bytes_written = 0; - // try renderExpression(allocator, &auto_indenting_stream, tree, expr, Space.None); - // const width = @intCast(usize, counting_stream.bytes_written); - // expr_widths[i] = width; - // expr_newlines[i] = line_find_stream.byte_found; - - // if (!line_find_stream.byte_found) { - // const column = column_counter % row_size; - // column_widths[column] = std.math.max(column_widths[column], width); - // } - // break; - // } - // } - - // // Render exprs in current section - // column_counter = 0; - // var last_col_index: usize = row_size - 1; - // for (section_exprs) |expr, i| { - // if (i + 1 < section_exprs.len) { - // const next_expr = section_exprs[i + 1]; - // try renderExpression(ais, tree, expr, Space.None); - - // const comma = tree.nextToken(expr.*.lastToken()); - - // if (column_counter != last_col_index) { - // if (!expr_newlines[i] and !expr_newlines[i + 1]) { - // // Neither the current or next expression is multiline - // try renderToken(ais, tree, comma, Space.Space); // , - // assert(column_widths[column_counter % row_size] >= expr_widths[i]); - // const padding = column_widths[column_counter % row_size] - expr_widths[i]; - // try ais.writer().writeByteNTimes(' ', padding); - - // column_counter += 1; - // continue; - // } - // } - // if (single_line and row_size != 1) { - // try renderToken(ais, tree, comma, Space.Space); // , - // continue; - // } - - // column_counter = 0; - // try renderToken(ais, tree, comma, Space.Newline); // , - // try renderExtraNewline(ais, tree, next_expr); - // } else { - // const maybe_comma = tree.nextToken(expr.*.lastToken()); - // if (tree.token_tags[maybe_comma] == .Comma) { - // try renderExpression(ais, tree, expr, Space.None); // , - // try renderToken(ais, tree, maybe_comma, Space.Newline); // , - // } else { - // try renderExpression(ais, tree, expr, Space.Comma); // , - // } - // } - // } - - // if (expr_index == exprs.len) { - // break; - // } - // } - // } - - // return renderToken(ais, tree, rtoken, space); - // } - - // // Single line - // try renderToken(ais, tree, lbrace, Space.Space); - // for (exprs) |expr, i| { - // if (i + 1 < exprs.len) { - // const next_expr = exprs[i + 1]; - // try renderExpression(ais, tree, expr, Space.None); - // const comma = tree.nextToken(expr.*.lastToken()); - // try renderToken(ais, tree, comma, Space.Space); // , - // } else { - // try renderExpression(ais, tree, expr, Space.Space); - // } - // } - - // return renderToken(ais, tree, rtoken, space); - //}, + .ArrayInitOne => { + var elements: [1]ast.Node.Index = undefined; + return renderArrayInit(ais, tree, tree.arrayInitOne(&elements, node), space); + }, + .ArrayInitDotTwo, .ArrayInitDotTwoComma => { + var elements: [2]ast.Node.Index = undefined; + return renderArrayInit(ais, tree, tree.arrayInitDotTwo(&elements, node), space); + }, + .ArrayInitDot => return renderArrayInit(ais, tree, tree.arrayInitDot(node), space), + .ArrayInit => return renderArrayInit(ais, tree, tree.arrayInit(node), space), .StructInitOne => { var fields: [1]ast.Node.Index = undefined; @@ -2158,6 +1953,52 @@ fn renderStructInit( } } +fn renderArrayInit( + ais: *Ais, + tree: ast.Tree, + array_init: ast.Full.ArrayInit, + space: Space, +) Error!void { + const token_tags = tree.tokens.items(.tag); + if (array_init.ast.type_expr == 0) { + try renderToken(ais, tree, array_init.ast.lbrace - 1, .None); // . + } else { + try renderExpression(ais, tree, array_init.ast.type_expr, .None); // T + } + if (array_init.ast.elements.len == 0) { + try renderToken(ais, tree, array_init.ast.lbrace, .None); // lbrace + return renderToken(ais, tree, array_init.ast.lbrace + 1, space); // rbrace + } + const last_elem = array_init.ast.elements[array_init.ast.elements.len - 1]; + const last_elem_token = tree.lastToken(last_elem); + if (token_tags[last_elem_token + 1] == .Comma) { + // Render one element per line. + ais.pushIndent(); + try renderToken(ais, tree, array_init.ast.lbrace, .Newline); + + try renderExpression(ais, tree, array_init.ast.elements[0], .Comma); + for (array_init.ast.elements[1..]) |elem| { + try renderExpressionNewlined(ais, tree, elem, .Comma); + } + + ais.popIndent(); + return renderToken(ais, tree, last_elem_token + 2, space); // rbrace + } else { + // Render all on one line, no trailing comma. + if (array_init.ast.elements.len == 1) { + // If there is only one element, we don't use spaces + try renderToken(ais, tree, array_init.ast.lbrace, .None); + try renderExpression(ais, tree, array_init.ast.elements[0], .None); + } else { + try renderToken(ais, tree, array_init.ast.lbrace, .Space); + for (array_init.ast.elements) |elem| { + try renderExpression(ais, tree, elem, .CommaSpace); + } + } + return renderToken(ais, tree, last_elem_token + 1, space); // rbrace + } +} + /// Render an expression, and the comma that follows it, if it is present in the source. fn renderExpressionComma(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); From 0f3fa4d6540af42552571bf46609ab5546c16b72 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Fri, 5 Feb 2021 19:10:14 +0100 Subject: [PATCH 017/173] zig fmt: array types --- lib/std/zig/ast.zig | 42 +++++++++++++++++++- lib/std/zig/parser_test.zig | 58 ++++++++++++++++++++++++++++ lib/std/zig/render.zig | 76 +++++++------------------------------ 3 files changed, 111 insertions(+), 65 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 96a60614bf..08ca0e934f 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -246,6 +246,8 @@ pub const Tree = struct { .FnProtoMulti, .FnProtoOne, .FnProto, + .ArrayType, + .ArrayTypeSentinel, => return main_tokens[n], .ArrayInitDot, @@ -363,8 +365,6 @@ pub const Tree = struct { } }, - .ArrayType => unreachable, // TODO - .ArrayTypeSentinel => unreachable, // TODO .PtrTypeAligned => unreachable, // TODO .PtrTypeSentinel => unreachable, // TODO .PtrType => unreachable, // TODO @@ -925,6 +925,33 @@ pub const Tree = struct { }; } + pub fn arrayType(tree: Tree, node: Node.Index) Full.ArrayType { + assert(tree.nodes.items(.tag)[node] == .ArrayType); + const data = tree.nodes.items(.data)[node]; + return .{ + .ast = .{ + .lbracket = tree.nodes.items(.main_token)[node], + .elem_count = data.lhs, + .sentinel = null, + .elem_type = data.rhs, + }, + }; + } + + pub fn arrayTypeSentinel(tree: Tree, node: Node.Index) Full.ArrayType { + assert(tree.nodes.items(.tag)[node] == .ArrayTypeSentinel); + const data = tree.nodes.items(.data)[node]; + const extra = tree.extraData(data.rhs, Node.ArrayTypeSentinel); + return .{ + .ast = .{ + .lbracket = tree.nodes.items(.main_token)[node], + .elem_count = data.lhs, + .sentinel = extra.sentinel, + .elem_type = extra.elem_type, + }, + }; + } + fn fullVarDecl(tree: Tree, info: Full.VarDecl.Ast) Full.VarDecl { const token_tags = tree.tokens.items(.tag); var result: Full.VarDecl = .{ @@ -1087,6 +1114,17 @@ pub const Full = struct { type_expr: Node.Index, }; }; + + pub const ArrayType = struct { + ast: Ast, + + pub const Ast = struct { + lbracket: TokenIndex, + elem_count: Node.Index, + sentinel: ?Node.Index, + elem_type: Node.Index, + }; + }; }; pub const Error = union(enum) { diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index d155c2ee5e..8086525fef 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -490,6 +490,64 @@ test "zig fmt: anon list literal 3 element comma" { ); } +test "zig fmt: array literal 1 element" { + try testCanonical( + \\const x = [_]u32{a}; + \\ + ); +} + +test "zig fmt: array literal 1 element comma" { + try testCanonical( + \\const x = [1]u32{ + \\ a, + \\}; + \\ + ); +} + +test "zig fmt: array literal 2 element" { + try testCanonical( + \\const x = [_]u32{ a, b }; + \\ + ); +} + +test "zig fmt: array literal 2 element comma" { + try testCanonical( + \\const x = [2]u32{ + \\ a, + \\ b, + \\}; + \\ + ); +} + +test "zig fmt: array literal 3 element" { + try testCanonical( + \\const x = [_]u32{ a, b, c }; + \\ + ); +} + +test "zig fmt: array literal 3 element comma" { + try testCanonical( + \\const x = [3]u32{ + \\ a, + \\ b, + \\ c, + \\}; + \\ + ); +} + +test "zig fmt: sentinel array literal 1 element" { + try testCanonical( + \\const x = [_:9000]u32{a}; + \\ + ); +} + //test "zig fmt: async function" { // try testCanonical( // \\pub const Server = struct { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index deeae58e7f..e8d8e2e54d 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -359,34 +359,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderExpression(ais, tree, datas[node].lhs, space); }, - .ArrayType => unreachable, // TODO - //.ArrayType => { - // const array_type = @fieldParentPtr(ast.Node.ArrayType, "base", base); - // return renderArrayType( - // allocator, - // ais, - // tree, - // array_type.op_token, - // array_type.rhs, - // array_type.len_expr, - // null, - // space, - // ); - //}, - .ArrayTypeSentinel => unreachable, // TODO - //.ArrayTypeSentinel => { - // const array_type = @fieldParentPtr(ast.Node.ArrayTypeSentinel, "base", base); - // return renderArrayType( - // allocator, - // ais, - // tree, - // array_type.op_token, - // array_type.rhs, - // array_type.len_expr, - // array_type.sentinel, - // space, - // ); - //}, + .ArrayType => return renderArrayType(ais, tree, tree.arrayType(node), space), + .ArrayTypeSentinel => return renderArrayType(ais, tree, tree.arrayTypeSentinel(node), space), .PtrType => unreachable, // TODO .PtrTypeAligned => unreachable, // TODO @@ -1279,47 +1253,21 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac } } +// TODO: handle comments inside the brackets fn renderArrayType( - allocator: *mem.Allocator, ais: *Ais, tree: ast.Tree, - lbracket: ast.TokenIndex, - rhs: ast.Node.Index, - len_expr: ast.Node.Index, - opt_sentinel: ?ast.Node.Index, + array_type: ast.Full.ArrayType, space: Space, ) Error!void { - const rbracket = tree.nextToken(if (opt_sentinel) |sentinel| - sentinel.lastToken() - else - len_expr.lastToken()); - - const starts_with_comment = tree.token_tags[lbracket + 1] == .LineComment; - const ends_with_comment = tree.token_tags[rbracket - 1] == .LineComment; - const new_space = if (ends_with_comment) Space.Newline else Space.None; - { - const do_indent = (starts_with_comment or ends_with_comment); - if (do_indent) ais.pushIndent(); - defer if (do_indent) ais.popIndent(); - - try renderToken(ais, tree, lbracket, Space.None); // [ - try renderExpression(ais, tree, len_expr, new_space); - - if (starts_with_comment) { - try ais.maybeInsertNewline(); - } - if (opt_sentinel) |sentinel| { - const colon_token = tree.prevToken(sentinel.firstToken()); - try renderToken(ais, tree, colon_token, Space.None); // : - try renderExpression(ais, tree, sentinel, Space.None); - } - if (starts_with_comment) { - try ais.maybeInsertNewline(); - } + try renderToken(ais, tree, array_type.ast.lbracket, .None); // lbracket + try renderExpression(ais, tree, array_type.ast.elem_count, .None); + if (array_type.ast.sentinel) |sentinel| { + try renderToken(ais, tree, tree.firstToken(sentinel) - 1, .None); // colon + try renderExpression(ais, tree, sentinel, .None); } - try renderToken(ais, tree, rbracket, Space.None); // ] - - return renderExpression(ais, tree, rhs, space); + try renderToken(ais, tree, tree.firstToken(array_type.ast.elem_type) - 1, .None); // rbracket + return renderExpression(ais, tree, array_type.ast.elem_type, space); } fn renderAsmOutput( @@ -1900,6 +1848,7 @@ fn renderBlock( } } +// TODO: handle comments between fields fn renderStructInit( ais: *Ais, tree: ast.Tree, @@ -1953,6 +1902,7 @@ fn renderStructInit( } } +// TODO: handle comments between elements fn renderArrayInit( ais: *Ais, tree: ast.Tree, From cf42ae178deae475c4fdc2d927f91b4980ec8be5 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Fri, 5 Feb 2021 15:45:33 -0700 Subject: [PATCH 018/173] std.MultiArrayList: use `@memset` builtin for undefined See comment for more details --- lib/std/multi_array_list.zig | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/lib/std/multi_array_list.zig b/lib/std/multi_array_list.zig index aaf105bb57..24269c2316 100644 --- a/lib/std/multi_array_list.zig +++ b/lib/std/multi_array_list.zig @@ -180,9 +180,15 @@ pub fn MultiArrayList(comptime S: type) type { capacityInBytes(new_len), .exact, ) catch { + const self_slice = self.slice(); inline for (fields) |field_info, i| { const field = @intToEnum(Field, i); - mem.set(field_info.field_type, self.slice().items(field)[new_len..], undefined); + const dest_slice = self_slice.items(field)[new_len..]; + const byte_count = dest_slice.len * @sizeOf(field_info.field_type); + // We use memset here for more efficient codegen in safety-checked, + // valgrind-enabled builds. Otherwise the valgrind client request + // will be repeated for every element. + @memset(@ptrCast([*]u8, dest_slice.ptr), undefined, byte_count); } self.len = new_len; return; From 16a2562c3f12a5a4fe9875644b593bd571c2734c Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Fri, 5 Feb 2021 15:47:18 -0700 Subject: [PATCH 019/173] zig fmt: implement container decls --- lib/std/zig/ast.zig | 260 ++++++++++++++++-- lib/std/zig/parse.zig | 528 +++++++++++++++++++++++------------- lib/std/zig/parser_test.zig | 115 +++++--- lib/std/zig/render.zig | 231 ++++++++-------- 4 files changed, 766 insertions(+), 368 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 08ca0e934f..9db1bc3b19 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -365,6 +365,26 @@ pub const Tree = struct { } }, + .ContainerDecl, + .ContainerDeclComma, + .ContainerDeclTwo, + .ContainerDeclTwoComma, + .ContainerDeclArg, + .ContainerDeclArgComma, + .TaggedUnion, + .TaggedUnionComma, + .TaggedUnionTwo, + .TaggedUnionTwoComma, + .TaggedUnionEnumTag, + .TaggedUnionEnumTagComma, + => { + const main_token = main_tokens[n]; + switch (token_tags[main_token - 1]) { + .Keyword_packed, .Keyword_extern => return main_token - 1, + else => return main_token, + } + }, + .PtrTypeAligned => unreachable, // TODO .PtrTypeSentinel => unreachable, // TODO .PtrType => unreachable, // TODO @@ -375,10 +395,6 @@ pub const Tree = struct { .While => unreachable, // TODO .ForSimple => unreachable, // TODO .For => unreachable, // TODO - .ContainerDecl => unreachable, // TODO - .ContainerDeclArg => unreachable, // TODO - .TaggedUnion => unreachable, // TODO - .TaggedUnionEnumTag => unreachable, // TODO .AsmOutput => unreachable, // TODO .AsmInput => unreachable, // TODO .ErrorValue => unreachable, // TODO @@ -408,6 +424,7 @@ pub const Tree = struct { .Break, .Return, .Nosuspend, + .Comptime, => n = datas[n].lhs, .TestDecl, @@ -455,7 +472,6 @@ pub const Tree = struct { .BoolOr, .AnyFrameType, .ErrorUnion, - .Comptime, .IfSimple, .WhileSimple, => n = datas[n].rhs, @@ -490,13 +506,37 @@ pub const Tree = struct { } n = tree.extra_data[params.end - 1]; // last parameter }, - .Block => { + .ContainerDeclArg => { + const members = tree.extraData(datas[n].rhs, Node.SubRange); + if (members.end - members.start == 0) { + end_offset += 1; // for the rparen + n = datas[n].lhs; + } else { + end_offset += 1; // for the rbrace + n = tree.extra_data[members.end - 1]; // last parameter + } + }, + .ContainerDeclArgComma => { + const members = tree.extraData(datas[n].rhs, Node.SubRange); + assert(members.end - members.start > 0); + end_offset += 2; // for the comma + rbrace + n = tree.extra_data[members.end - 1]; // last parameter + }, + .Block, + .ContainerDecl, + .TaggedUnion, + => { end_offset += 1; // for the rbrace if (datas[n].rhs - datas[n].lhs == 0) { return main_tokens[n] + end_offset; } n = tree.extra_data[datas[n].rhs - 1]; // last statement }, + .ContainerDeclComma, .TaggedUnionComma => { + assert(datas[n].rhs - datas[n].lhs > 0); + end_offset += 2; // for the comma + rbrace + n = tree.extra_data[datas[n].rhs - 1]; // last member + }, .CallOne, .ArrayAccess, => { @@ -511,6 +551,8 @@ pub const Tree = struct { .BuiltinCallTwo, .BlockTwo, .StructInitDotTwo, + .ContainerDeclTwo, + .TaggedUnionTwo, => { end_offset += 1; // for the rparen/rbrace if (datas[n].rhs != 0) { @@ -523,6 +565,8 @@ pub const Tree = struct { }, .ArrayInitDotTwoComma, .StructInitDotTwoComma, + .ContainerDeclTwoComma, + .TaggedUnionTwoComma, => { end_offset += 2; // for the comma + rbrace if (datas[n].rhs != 0) { @@ -589,6 +633,38 @@ pub const Tree = struct { } } }, + .ContainerFieldInit => { + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + return main_tokens[n] + end_offset; + } + }, + .ContainerFieldAlign => { + if (datas[n].rhs != 0) { + end_offset += 1; // for the rparen + n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + return main_tokens[n] + end_offset; + } + }, + .ContainerField => { + const extra = tree.extraData(datas[n].rhs, Node.ContainerField); + if (extra.value_expr != 0) { + n = extra.value_expr; + } else if (extra.align_expr != 0) { + end_offset += 1; // for the rparen + n = extra.align_expr; + } else if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + return main_tokens[n] + end_offset; + } + }, // These are not supported by lastToken() because implementation would // require recursion due to the optional comma followed by rbrace. @@ -600,10 +676,9 @@ pub const Tree = struct { .StructInit => unreachable, .StructInitOne => unreachable, .StructInitDot => unreachable, - .ContainerFieldInit => unreachable, - .ContainerFieldAlign => unreachable, - .ContainerField => unreachable, + .TaggedUnionEnumTag => unreachable, // TODO + .TaggedUnionEnumTagComma => unreachable, // TODO .Switch => unreachable, // TODO .If => unreachable, // TODO .Continue => unreachable, // TODO @@ -631,10 +706,6 @@ pub const Tree = struct { .FnProtoMulti => unreachable, // TODO .FnProtoOne => unreachable, // TODO .FnProto => unreachable, // TODO - .ContainerDecl => unreachable, // TODO - .ContainerDeclArg => unreachable, // TODO - .TaggedUnion => unreachable, // TODO - .TaggedUnionEnumTag => unreachable, // TODO .AsmOutput => unreachable, // TODO .AsmInput => unreachable, // TODO .ErrorValue => unreachable, // TODO @@ -952,6 +1023,93 @@ pub const Tree = struct { }; } + pub fn containerDeclTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) Full.ContainerDecl { + assert(tree.nodes.items(.tag)[node] == .ContainerDeclTwo or + tree.nodes.items(.tag)[node] == .ContainerDeclTwoComma); + const data = tree.nodes.items(.data)[node]; + buffer.* = .{ data.lhs, data.rhs }; + const members = if (data.rhs != 0) + buffer[0..2] + else if (data.lhs != 0) + buffer[0..1] + else + buffer[0..0]; + return tree.fullContainerDecl(.{ + .main_token = tree.nodes.items(.main_token)[node], + .enum_token = null, + .members = members, + .arg = 0, + }); + } + + pub fn containerDecl(tree: Tree, node: Node.Index) Full.ContainerDecl { + assert(tree.nodes.items(.tag)[node] == .ContainerDecl or + tree.nodes.items(.tag)[node] == .ContainerDeclComma); + const data = tree.nodes.items(.data)[node]; + return tree.fullContainerDecl(.{ + .main_token = tree.nodes.items(.main_token)[node], + .enum_token = null, + .members = tree.extra_data[data.lhs..data.rhs], + .arg = 0, + }); + } + + pub fn containerDeclArg(tree: Tree, node: Node.Index) Full.ContainerDecl { + assert(tree.nodes.items(.tag)[node] == .ContainerDeclArg); + const data = tree.nodes.items(.data)[node]; + const members_range = tree.extraData(data.rhs, Node.SubRange); + return tree.fullContainerDecl(.{ + .main_token = tree.nodes.items(.main_token)[node], + .enum_token = null, + .members = tree.extra_data[members_range.start..members_range.end], + .arg = data.lhs, + }); + } + + pub fn taggedUnionTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) Full.ContainerDecl { + assert(tree.nodes.items(.tag)[node] == .TaggedUnionTwo); + const data = tree.nodes.items(.data)[node]; + buffer.* = .{ data.lhs, data.rhs }; + const members = if (data.rhs != 0) + buffer[0..2] + else if (data.lhs != 0) + buffer[0..1] + else + buffer[0..0]; + const main_token = tree.nodes.items(.main_token)[node]; + return tree.fullContainerDecl(.{ + .main_token = main_token, + .enum_token = main_token + 2, // union lparen enum + .members = members, + .arg = 0, + }); + } + + pub fn taggedUnion(tree: Tree, node: Node.Index) Full.ContainerDecl { + assert(tree.nodes.items(.tag)[node] == .TaggedUnion); + const data = tree.nodes.items(.data)[node]; + const main_token = tree.nodes.items(.main_token)[node]; + return tree.fullContainerDecl(.{ + .main_token = main_token, + .enum_token = main_token + 2, // union lparen enum + .members = tree.extra_data[data.lhs..data.rhs], + .arg = 0, + }); + } + + pub fn taggedUnionEnumTag(tree: Tree, node: Node.Index) Full.ContainerDecl { + assert(tree.nodes.items(.tag)[node] == .TaggedUnionEnumTag); + const data = tree.nodes.items(.data)[node]; + const members_range = tree.extraData(data.rhs, Node.SubRange); + const main_token = tree.nodes.items(.main_token)[node]; + return tree.fullContainerDecl(.{ + .main_token = main_token, + .enum_token = main_token + 2, // union lparen enum + .members = tree.extra_data[data.lhs..data.rhs], + .arg = data.lhs, + }); + } + fn fullVarDecl(tree: Tree, info: Full.VarDecl.Ast) Full.VarDecl { const token_tags = tree.tokens.items(.tag); var result: Full.VarDecl = .{ @@ -1031,6 +1189,19 @@ pub const Tree = struct { }; return result; } + + fn fullContainerDecl(tree: Tree, info: Full.ContainerDecl.Ast) Full.ContainerDecl { + const token_tags = tree.tokens.items(.tag); + var result: Full.ContainerDecl = .{ + .ast = info, + .layout_token = null, + }; + switch (token_tags[info.main_token - 1]) { + .Keyword_extern, .Keyword_packed => result.layout_token = info.main_token - 1, + else => {}, + } + return result; + } }; /// Fully assembled AST node information. @@ -1125,6 +1296,19 @@ pub const Full = struct { elem_type: Node.Index, }; }; + + pub const ContainerDecl = struct { + layout_token: ?TokenIndex, + ast: Ast, + + pub const Ast = struct { + main_token: TokenIndex, + /// Populated when main_token is Keyword_union. + enum_token: ?TokenIndex, + members: []const Node.Index, + arg: Node.Index, + }; + }; }; pub const Error = union(enum) { @@ -1543,9 +1727,11 @@ pub const Node = struct { StructInitOne, /// `.{.a = lhs, .b = rhs}`. lhs and rhs can be omitted. /// main_token is the lbrace. + /// No trailing comma before the rbrace. StructInitDotTwo, /// Same as `StructInitDotTwo` except there is known to be a trailing comma - /// before the final rbrace. + /// before the final rbrace. This tag exists to facilitate lastToken() implemented + /// without recursion. StructInitDotTwoComma, /// `.{.a = b, .c = d}`. `sub_list[lhs..rhs]`. /// main_token is the lbrace. @@ -1655,21 +1841,50 @@ pub const Node = struct { /// `error{a, b}`. /// lhs and rhs both unused. ErrorSetDecl, - /// `struct {}`, `union {}`, etc. `sub_list[lhs..rhs]`. + /// `struct {}`, `union {}`, `opaque {}`, `enum {}`. `extra_data[lhs..rhs]`. + /// main_token is `struct`, `union`, `opaque`, `enum` keyword. ContainerDecl, - /// `union(lhs)` / `enum(lhs)`. `sub_range_list[rhs]`. + /// Same as ContainerDecl but there is known to be a trailing comma before the rbrace. + ContainerDeclComma, + /// `struct {lhs, rhs}`, `union {lhs, rhs}`, `opaque {lhs, rhs}`, `enum {lhs, rhs}`. + /// lhs or rhs can be omitted. + /// main_token is `struct`, `union`, `opaque`, `enum` keyword. + ContainerDeclTwo, + /// Same as ContainerDeclTwo except there is known to be a trailing comma + /// before the rbrace. + ContainerDeclTwoComma, + /// `union(lhs)` / `enum(lhs)`. `SubRange[rhs]`. ContainerDeclArg, + /// Same as ContainerDeclArg but there is known to be a trailing comma before the rbrace. + ContainerDeclArgComma, /// `union(enum) {}`. `sub_list[lhs..rhs]`. /// Note that tagged unions with explicitly provided enums are represented /// by `ContainerDeclArg`. TaggedUnion, - /// `union(enum(lhs)) {}`. `sub_list_range[rhs]`. + /// Same as TaggedUnion but there is known to be a trailing comma before the rbrace. + TaggedUnionComma, + /// `union(enum) {lhs, rhs}`. lhs or rhs may be omitted. + /// Note that tagged unions with explicitly provided enums are represented + /// by `ContainerDeclArg`. + TaggedUnionTwo, + /// Same as TaggedUnionTwo but there is known to be a trailing comma before the rbrace. + TaggedUnionTwoComma, + /// `union(enum(lhs)) {}`. `SubRange[rhs]`. TaggedUnionEnumTag, + /// Same as TaggedUnionEnumTag but there is known to be a trailing comma + /// before the rbrace. + TaggedUnionEnumTagComma, /// `a: lhs = rhs,`. lhs and rhs can be omitted. + /// main_token is the field name identifier. + /// lastToken() does not include the possible trailing comma. ContainerFieldInit, /// `a: lhs align(rhs),`. rhs can be omitted. + /// main_token is the field name identifier. + /// lastToken() does not include the possible trailing comma. ContainerFieldAlign, /// `a: lhs align(c) = d,`. `container_field_list[rhs]`. + /// main_token is the field name identifier. + /// lastToken() does not include the possible trailing comma. ContainerField, /// `anytype`. both lhs and rhs unused. /// Used by `ContainerField`. @@ -1699,6 +1914,17 @@ pub const Node = struct { ErrorValue, /// `lhs!rhs`. main_token is the `!`. ErrorUnion, + + pub fn isContainerField(tag: Tag) bool { + return switch (tag) { + .ContainerFieldInit, + .ContainerFieldAlign, + .ContainerField, + => true, + + else => false, + }; + } }; pub const Data = struct { diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 394c36fcb5..35b5083562 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -64,9 +64,10 @@ pub fn parse(gpa: *Allocator, source: []const u8) Allocator.Error!Tree { .rhs = undefined, }, }); - const root_decls = try parser.parseContainerMembers(true); - // parseContainerMembers will try to skip as much - // invalid tokens as it can, so we are now at EOF. + const root_members = try parser.parseContainerMembers(); + const root_decls = try root_members.toSpan(&parser); + // parseContainerMembers will try to skip as much invalid tokens as + // it can, so we are now at EOF. assert(parser.token_tags[parser.tok_i] == .Eof); parser.nodes.items(.data)[0] = .{ .lhs = root_decls.start, @@ -108,6 +109,22 @@ const Parser = struct { } }; + const Members = struct { + len: usize, + lhs: Node.Index, + rhs: Node.Index, + trailing_comma: bool, + + fn toSpan(self: Members, p: *Parser) !Node.SubRange { + if (self.len <= 2) { + const nodes = [2]Node.Index{ self.lhs, self.rhs }; + return p.listToSpan(nodes[0..self.len]); + } else { + return Node.SubRange{ .start = self.lhs, .end = self.rhs }; + } + } + }; + fn listToSpan(p: *Parser, list: []const Node.Index) !Node.SubRange { try p.extra_data.appendSlice(p.gpa, list); return Node.SubRange{ @@ -151,169 +168,225 @@ const Parser = struct { /// / ContainerField COMMA ContainerMembers /// / ContainerField /// / - fn parseContainerMembers(p: *Parser, top_level: bool) !Node.SubRange { + /// TopLevelComptime <- KEYWORD_comptime BlockExpr + fn parseContainerMembers(p: *Parser) !Members { var list = std.ArrayList(Node.Index).init(p.gpa); defer list.deinit(); var field_state: union(enum) { - /// no fields have been seen + /// No fields have been seen. none, - /// currently parsing fields + /// Currently parsing fields. seen, - /// saw fields and then a declaration after them. - /// payload is first token of previous declaration. - end: TokenIndex, - /// ther was a declaration between fields, don't report more errors + /// Saw fields and then a declaration after them. + /// Payload is first token of previous declaration. + end: Node.Index, + /// There was a declaration between fields, don't report more errors. err, } = .none; // Skip container doc comments. while (p.eatToken(.ContainerDocComment)) |_| {} + var trailing_comma = false; while (true) { const doc_comment = p.eatDocComments(); - const test_decl_node = p.parseTestDecl() catch |err| switch (err) { - error.OutOfMemory => return error.OutOfMemory, - error.ParseError => { - p.findNextContainerMember(); - continue; - }, - }; - if (test_decl_node != 0) { - if (field_state == .seen) { - field_state = .{ .end = p.nodes.items(.main_token)[test_decl_node] }; - } - try list.append(test_decl_node); - continue; - } - - const comptime_node = p.parseTopLevelComptime() catch |err| switch (err) { - error.OutOfMemory => return error.OutOfMemory, - error.ParseError => { - p.findNextContainerMember(); - continue; - }, - }; - if (comptime_node != 0) { - if (field_state == .seen) { - field_state = .{ .end = p.nodes.items(.main_token)[comptime_node] }; - } - try list.append(comptime_node); - continue; - } - - const visib_token = p.eatToken(.Keyword_pub); - - const top_level_decl = p.parseTopLevelDecl() catch |err| switch (err) { - error.OutOfMemory => return error.OutOfMemory, - error.ParseError => { - p.findNextContainerMember(); - continue; - }, - }; - if (top_level_decl != 0) { - if (field_state == .seen) { - field_state = .{ - .end = visib_token orelse p.nodes.items(.main_token)[top_level_decl], - }; - } - try list.append(top_level_decl); - continue; - } - - if (visib_token != null) { - try p.warn(.{ .ExpectedPubItem = .{ .token = p.tok_i } }); - // ignore this pub - continue; - } - - const container_field = p.parseContainerField() catch |err| switch (err) { - error.OutOfMemory => return error.OutOfMemory, - error.ParseError => { - // attempt to recover - p.findNextContainerMember(); - continue; - }, - }; - if (container_field != 0) { - switch (field_state) { - .none => field_state = .seen, - .err, .seen => {}, - .end => |tok| { - try p.warn(.{ .DeclBetweenFields = .{ .token = tok } }); - // continue parsing, error will be reported later - field_state = .err; - }, - } - try list.append(container_field); - const comma = p.eatToken(.Comma) orelse { - // try to continue parsing - const index = p.tok_i; - p.findNextContainerMember(); - const next = p.token_tags[p.tok_i]; - switch (next) { - .Eof => { - // no invalid tokens were found - if (index == p.tok_i) break; - - // Invalid tokens, add error and exit - try p.warn(.{ - .ExpectedToken = .{ .token = index, .expected_id = .Comma }, - }); - break; - }, - else => { - if (next == .RBrace) { - if (!top_level) break; - p.tok_i += 1; - } - - // add error and continue - try p.warn(.{ - .ExpectedToken = .{ .token = index, .expected_id = .Comma }, - }); - continue; - }, + switch (p.token_tags[p.tok_i]) { + .Keyword_test => { + const test_decl_node = try p.expectTestDeclRecoverable(); + if (test_decl_node != 0) { + if (field_state == .seen) { + field_state = .{ .end = test_decl_node }; + } + try list.append(test_decl_node); } - }; - continue; - } - - // Dangling doc comment - if (doc_comment) |tok| { - try p.warn(.{ - .UnattachedDocComment = .{ .token = tok }, - }); - } - - const next = p.token_tags[p.tok_i]; - switch (next) { - .Eof => break, - .Keyword_comptime => { + trailing_comma = false; + }, + .Keyword_comptime => switch (p.token_tags[p.tok_i + 1]) { + .Identifier => { + p.tok_i += 1; + const container_field = try p.expectContainerFieldRecoverable(); + if (container_field != 0) { + switch (field_state) { + .none => field_state = .seen, + .err, .seen => {}, + .end => |node| { + try p.warn(.{ + .DeclBetweenFields = .{ .token = p.nodes.items(.main_token)[node] }, + }); + // Continue parsing; error will be reported later. + field_state = .err; + }, + } + try list.append(container_field); + switch (p.token_tags[p.tok_i]) { + .Comma => { + p.tok_i += 1; + trailing_comma = true; + continue; + }, + .RBrace, .Eof => { + trailing_comma = false; + break; + }, + else => {}, + } + // There is not allowed to be a decl after a field with no comma. + // Report error but recover parser. + try p.warn(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + }); + p.findNextContainerMember(); + } + }, + .LBrace => { + const comptime_token = p.nextToken(); + const block = p.parseBlock() catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + error.ParseError => blk: { + p.findNextContainerMember(); + break :blk null_node; + }, + }; + if (block != 0) { + const comptime_node = try p.addNode(.{ + .tag = .Comptime, + .main_token = comptime_token, + .data = .{ + .lhs = block, + .rhs = undefined, + }, + }); + if (field_state == .seen) { + field_state = .{ .end = comptime_node }; + } + try list.append(comptime_node); + } + trailing_comma = false; + }, + else => { + p.tok_i += 1; + try p.warn(.{ .ExpectedBlockOrField = .{ .token = p.tok_i } }); + }, + }, + .Keyword_pub => { p.tok_i += 1; - try p.warn(.{ - .ExpectedBlockOrField = .{ .token = p.tok_i }, - }); + const top_level_decl = try p.expectTopLevelDeclRecoverable(); + if (top_level_decl != 0) { + if (field_state == .seen) { + field_state = .{ .end = top_level_decl }; + } + try list.append(top_level_decl); + } + trailing_comma = false; + }, + .Keyword_usingnamespace => { + const node = try p.expectUsingNamespaceRecoverable(); + if (node != 0) { + if (field_state == .seen) { + field_state = .{ .end = node }; + } + try list.append(node); + } + trailing_comma = false; + }, + .Keyword_const, + .Keyword_var, + .Keyword_threadlocal, + .Keyword_export, + .Keyword_extern, + .Keyword_inline, + .Keyword_noinline, + .Keyword_fn, + => { + const top_level_decl = try p.expectTopLevelDeclRecoverable(); + if (top_level_decl != 0) { + if (field_state == .seen) { + field_state = .{ .end = top_level_decl }; + } + try list.append(top_level_decl); + } + trailing_comma = false; + }, + .Identifier => { + const container_field = try p.expectContainerFieldRecoverable(); + if (container_field != 0) { + switch (field_state) { + .none => field_state = .seen, + .err, .seen => {}, + .end => |node| { + try p.warn(.{ + .DeclBetweenFields = .{ .token = p.nodes.items(.main_token)[node] }, + }); + // Continue parsing; error will be reported later. + field_state = .err; + }, + } + try list.append(container_field); + switch (p.token_tags[p.tok_i]) { + .Comma => { + p.tok_i += 1; + trailing_comma = true; + continue; + }, + .RBrace, .Eof => { + trailing_comma = false; + break; + }, + else => {}, + } + // There is not allowed to be a decl after a field with no comma. + // Report error but recover parser. + try p.warn(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + }); + p.findNextContainerMember(); + } + }, + .Eof, .RBrace => { + if (doc_comment) |tok| { + try p.warn(.{ .UnattachedDocComment = .{ .token = tok } }); + } + break; }, else => { - const index = p.tok_i; - if (next == .RBrace) { - if (!top_level) break; - p.tok_i += 1; - } - - // this was likely not supposed to end yet, - // try to find the next declaration + try p.warn(.{ .ExpectedContainerMembers = .{ .token = p.tok_i } }); + // This was likely not supposed to end yet; try to find the next declaration. p.findNextContainerMember(); - try p.warn(.{ - .ExpectedContainerMembers = .{ .token = index }, - }); }, } } - return p.listToSpan(list.items); + switch (list.items.len) { + 0 => return Members{ + .len = 0, + .lhs = 0, + .rhs = 0, + .trailing_comma = trailing_comma, + }, + 1 => return Members{ + .len = 1, + .lhs = list.items[0], + .rhs = 0, + .trailing_comma = trailing_comma, + }, + 2 => return Members{ + .len = 2, + .lhs = list.items[0], + .rhs = list.items[1], + .trailing_comma = trailing_comma, + }, + else => { + const span = try p.listToSpan(list.items); + return Members{ + .len = list.items.len, + .lhs = span.start, + .rhs = span.end, + .trailing_comma = trailing_comma, + }; + }, + } } /// Attempts to find next container member by searching for certain tokens @@ -398,44 +471,36 @@ const Parser = struct { } /// TestDecl <- KEYWORD_test STRINGLITERALSINGLE? Block - fn parseTestDecl(p: *Parser) !Node.Index { - const test_token = p.eatToken(.Keyword_test) orelse return null_node; - const name_token = try p.expectToken(.StringLiteral); + fn expectTestDecl(p: *Parser) !Node.Index { + const test_token = try p.expectToken(.Keyword_test); + const name_token = p.eatToken(.StringLiteral); const block_node = try p.parseBlock(); if (block_node == 0) return p.fail(.{ .ExpectedLBrace = .{ .token = p.tok_i } }); return p.addNode(.{ .tag = .TestDecl, .main_token = test_token, .data = .{ - .lhs = name_token, + .lhs = name_token orelse 0, .rhs = block_node, }, }); } - /// TopLevelComptime <- KEYWORD_comptime BlockExpr - fn parseTopLevelComptime(p: *Parser) !Node.Index { - if (p.token_tags[p.tok_i] == .Keyword_comptime and - p.token_tags[p.tok_i + 1] == .LBrace) - { - return p.addNode(.{ - .tag = .Comptime, - .main_token = p.nextToken(), - .data = .{ - .lhs = try p.parseBlock(), - .rhs = undefined, - }, - }); - } else { - return null_node; - } + fn expectTestDeclRecoverable(p: *Parser) error{OutOfMemory}!Node.Index { + return p.expectTestDecl() catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + error.ParseError => { + p.findNextContainerMember(); + return null_node; + }, + }; } /// TopLevelDecl /// <- (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE? / (KEYWORD_inline / KEYWORD_noinline))? FnProto (SEMICOLON / Block) /// / (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE?)? KEYWORD_threadlocal? VarDecl /// / KEYWORD_usingnamespace Expr SEMICOLON - fn parseTopLevelDecl(p: *Parser) !Node.Index { + fn expectTopLevelDecl(p: *Parser) !Node.Index { const extern_export_inline_token = p.nextToken(); var expect_fn: bool = false; var exported: bool = false; @@ -496,7 +561,21 @@ const Parser = struct { return p.fail(.{ .ExpectedVarDeclOrFn = .{ .token = p.tok_i } }); } - const usingnamespace_token = p.eatToken(.Keyword_usingnamespace) orelse return null_node; + return p.expectUsingNamespace(); + } + + fn expectTopLevelDeclRecoverable(p: *Parser) error{OutOfMemory}!Node.Index { + return p.expectTopLevelDecl() catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + error.ParseError => { + p.findNextContainerMember(); + return null_node; + }, + }; + } + + fn expectUsingNamespace(p: *Parser) !Node.Index { + const usingnamespace_token = try p.expectToken(.Keyword_usingnamespace); const expr = try p.expectExpr(); const semicolon_token = try p.expectToken(.Semicolon); try p.parseAppendedDocComment(semicolon_token); @@ -510,6 +589,16 @@ const Parser = struct { }); } + fn expectUsingNamespaceRecoverable(p: *Parser) error{OutOfMemory}!Node.Index { + return p.expectUsingNamespace() catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + error.ParseError => { + p.findNextContainerMember(); + return null_node; + }, + }; + } + /// FnProto <- KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? LinkSection? CallConv? EXCLAMATIONMARK? (Keyword_anytype / TypeExpr) fn parseFnProto(p: *Parser) !Node.Index { const fn_token = p.eatToken(.Keyword_fn) orelse return null_node; @@ -648,12 +737,9 @@ const Parser = struct { } /// ContainerField <- KEYWORD_comptime? IDENTIFIER (COLON TypeExpr ByteAlign?)? (EQUAL Expr)? - fn parseContainerField(p: *Parser) !Node.Index { + fn expectContainerField(p: *Parser) !Node.Index { const comptime_token = p.eatToken(.Keyword_comptime); - const name_token = p.eatToken(.Identifier) orelse { - if (comptime_token) |_| p.tok_i -= 1; - return null_node; - }; + const name_token = try p.expectToken(.Identifier); var align_expr: Node.Index = 0; var type_expr: Node.Index = 0; @@ -708,6 +794,16 @@ const Parser = struct { } } + fn expectContainerFieldRecoverable(p: *Parser) error{OutOfMemory}!Node.Index { + return p.expectContainerField() catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + error.ParseError => { + p.findNextContainerMember(); + return null_node; + }, + }; + } + /// Statement /// <- KEYWORD_comptime? VarDecl /// / KEYWORD_comptime BlockExprStatement @@ -3333,16 +3429,20 @@ const Parser = struct { _ = try p.expectToken(.RParen); _ = try p.expectToken(.LBrace); - const members = try p.parseContainerMembers(false); + const members = try p.parseContainerMembers(); + const members_span = try members.toSpan(p); _ = try p.expectToken(.RBrace); return p.addNode(.{ - .tag = .TaggedUnionEnumTag, + .tag = switch (members.trailing_comma) { + true => .TaggedUnionEnumTagComma, + false => .TaggedUnionEnumTag, + }, .main_token = main_token, .data = .{ .lhs = enum_tag_expr, .rhs = try p.addExtra(Node.SubRange{ - .start = members.start, - .end = members.end, + .start = members_span.start, + .end = members_span.end, }), }, }); @@ -3350,16 +3450,34 @@ const Parser = struct { _ = try p.expectToken(.RParen); _ = try p.expectToken(.LBrace); - const members = try p.parseContainerMembers(false); + const members = try p.parseContainerMembers(); _ = try p.expectToken(.RBrace); - return p.addNode(.{ - .tag = .TaggedUnion, - .main_token = main_token, - .data = .{ - .lhs = members.start, - .rhs = members.end, - }, - }); + if (members.len <= 2) { + return p.addNode(.{ + .tag = switch (members.trailing_comma) { + true => .TaggedUnionTwoComma, + false => .TaggedUnionTwo, + }, + .main_token = main_token, + .data = .{ + .lhs = members.lhs, + .rhs = members.rhs, + }, + }); + } else { + const span = try members.toSpan(p); + return p.addNode(.{ + .tag = switch (members.trailing_comma) { + true => .TaggedUnionComma, + false => .TaggedUnion, + }, + .main_token = main_token, + .data = .{ + .lhs = span.start, + .rhs = span.end, + }, + }); + } } } else { const expr = try p.expectExpr(); @@ -3373,26 +3491,48 @@ const Parser = struct { else => unreachable, }; _ = try p.expectToken(.LBrace); - const members = try p.parseContainerMembers(false); + const members = try p.parseContainerMembers(); _ = try p.expectToken(.RBrace); if (arg_expr == 0) { - return p.addNode(.{ - .tag = .ContainerDecl, - .main_token = main_token, - .data = .{ - .lhs = members.start, - .rhs = members.end, - }, - }); + if (members.len <= 2) { + return p.addNode(.{ + .tag = switch (members.trailing_comma) { + true => .ContainerDeclTwoComma, + false => .ContainerDeclTwo, + }, + .main_token = main_token, + .data = .{ + .lhs = members.lhs, + .rhs = members.rhs, + }, + }); + } else { + const span = try members.toSpan(p); + return p.addNode(.{ + .tag = switch (members.trailing_comma) { + true => .ContainerDeclComma, + false => .ContainerDecl, + }, + .main_token = main_token, + .data = .{ + .lhs = span.start, + .rhs = span.end, + }, + }); + } } else { + const span = try members.toSpan(p); return p.addNode(.{ - .tag = .ContainerDeclArg, + .tag = switch (members.trailing_comma) { + true => .ContainerDeclArgComma, + false => .ContainerDeclArg, + }, .main_token = main_token, .data = .{ .lhs = arg_expr, .rhs = try p.addExtra(Node.SubRange{ - .start = members.start, - .end = members.end, + .start = span.start, + .end = span.end, }), }, }); diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 8086525fef..345bce2654 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -149,45 +149,82 @@ test "zig fmt: nosuspend block" { ); } -//test "zig fmt: nosuspend await" { -// try testCanonical( -// \\fn foo() void { -// \\ x = nosuspend await y; -// \\} -// \\ -// ); -//} -// -//test "zig fmt: trailing comma in container declaration" { -// try testCanonical( -// \\const X = struct { foo: i32 }; -// \\const X = struct { foo: i32, bar: i32 }; -// \\const X = struct { foo: i32 = 1, bar: i32 = 2 }; -// \\const X = struct { foo: i32 align(4), bar: i32 align(4) }; -// \\const X = struct { foo: i32 align(4) = 1, bar: i32 align(4) = 2 }; -// \\ -// ); -// try testCanonical( -// \\test "" { -// \\ comptime { -// \\ const X = struct { -// \\ x: i32 -// \\ }; -// \\ } -// \\} -// \\ -// ); -// try testTransform( -// \\const X = struct { -// \\ foo: i32, bar: i8 }; -// , -// \\const X = struct { -// \\ foo: i32, bar: i8 -// \\}; -// \\ -// ); -//} -// +test "zig fmt: nosuspend await" { + try testCanonical( + \\fn foo() void { + \\ x = nosuspend await y; + \\} + \\ + ); +} + +test "zig fmt: container declaration, single line" { + try testCanonical( + \\const X = struct { foo: i32 }; + \\const X = struct { foo: i32, bar: i32 }; + \\const X = struct { foo: i32 = 1, bar: i32 = 2 }; + \\const X = struct { foo: i32 align(4), bar: i32 align(4) }; + \\const X = struct { foo: i32 align(4) = 1, bar: i32 align(4) = 2 }; + \\ + ); +} + +test "zig fmt: container declaration, one item, multi line trailing comma" { + try testCanonical( + \\test "" { + \\ comptime { + \\ const X = struct { + \\ x: i32, + \\ }; + \\ } + \\} + \\ + ); +} + +test "zig fmt: container declaration, no trailing comma on separate line" { + try testTransform( + \\test "" { + \\ comptime { + \\ const X = struct { + \\ x: i32 + \\ }; + \\ } + \\} + \\ + , + \\test "" { + \\ comptime { + \\ const X = struct { x: i32 }; + \\ } + \\} + \\ + ); +} + +test "zig fmt: container declaration, line break, no trailing comma" { + try testTransform( + \\const X = struct { + \\ foo: i32, bar: i8 }; + , + \\const X = struct { foo: i32, bar: i8 }; + \\ + ); +} + +test "zig fmt: container declaration, transform trailing comma" { + try testTransform( + \\const X = struct { + \\ foo: i32, bar: i8, }; + , + \\const X = struct { + \\ foo: i32, + \\ bar: i8, + \\}; + \\ + ); +} + //test "zig fmt: trailing comma in fn parameter list" { // try testCanonical( // \\pub fn f( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index e8d8e2e54d..cc3cf855c3 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -68,7 +68,7 @@ fn renderRoot(ais: *Ais, tree: ast.Tree) Error!void { const root_decls = tree.extra_data[nodes_data[0].lhs..nodes_data[0].rhs]; for (root_decls) |decl| { - try renderContainerDecl(ais, tree, decl, .Newline); + try renderMember(ais, tree, decl, .Newline); } } @@ -84,7 +84,7 @@ fn renderExtraNewlineToken(ais: *Ais, tree: ast.Tree, first_token: ast.TokenInde } } -fn renderContainerDecl(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) Error!void { +fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); const main_tokens = tree.nodes.items(.main_token); const datas = tree.nodes.items(.data); @@ -158,6 +158,8 @@ fn renderContainerDecl(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: S .ContainerFieldAlign => return renderContainerField(ais, tree, tree.containerFieldAlign(decl), space), .ContainerField => return renderContainerField(ais, tree, tree.containerField(decl), space), .Comptime => return renderExpression(ais, tree, decl, space), + + .Root => unreachable, else => unreachable, } } @@ -195,7 +197,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderToken(ais, tree, any_type.token, space); //}, .BlockTwo => { - var statements = [2]ast.Node.Index{ datas[node].lhs, datas[node].rhs }; + const statements = [2]ast.Node.Index{ datas[node].lhs, datas[node].rhs }; if (datas[node].lhs == 0) { return renderBlock(ais, tree, main_tokens[node], statements[0..0], space); } else if (datas[node].rhs == 0) { @@ -667,124 +669,29 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderToken(ais, tree, grouped_expr.rparen, space); //}, - .ContainerDecl => unreachable, // TODO - .ContainerDeclArg => unreachable, // TODO - .TaggedUnion => unreachable, // TODO - .TaggedUnionEnumTag => unreachable, // TODO - //.ContainerDecl => { - // const container_decl = @fieldParentPtr(ast.Node.ContainerDecl, "base", base); + .ContainerDecl, + .ContainerDeclComma, + => return renderContainerDecl(ais, tree, tree.containerDecl(node), space), - // if (container_decl.layout_token) |layout_token| { - // try renderToken(ais, tree, layout_token, Space.Space); - // } + .ContainerDeclTwo, .ContainerDeclTwoComma => { + var buffer: [2]ast.Node.Index = undefined; + return renderContainerDecl(ais, tree, tree.containerDeclTwo(&buffer, node), space); + }, + .ContainerDeclArg, + .ContainerDeclArgComma, + => return renderContainerDecl(ais, tree, tree.containerDeclArg(node), space), - // switch (container_decl.init_arg_expr) { - // .None => { - // try renderToken(ais, tree, container_decl.kind_token, Space.Space); // union - // }, - // .Enum => |enum_tag_type| { - // try renderToken(ais, tree, container_decl.kind_token, Space.None); // union + .TaggedUnion, + .TaggedUnionComma, + => return renderContainerDecl(ais, tree, tree.taggedUnion(node), space), - // const lparen = tree.nextToken(container_decl.kind_token); - // const enum_token = tree.nextToken(lparen); - - // try renderToken(ais, tree, lparen, Space.None); // ( - // try renderToken(ais, tree, enum_token, Space.None); // enum - - // if (enum_tag_type) |expr| { - // try renderToken(ais, tree, tree.nextToken(enum_token), Space.None); // ( - // try renderExpression(ais, tree, expr, Space.None); - - // const rparen = tree.nextToken(expr.lastToken()); - // try renderToken(ais, tree, rparen, Space.None); // ) - // try renderToken(ais, tree, tree.nextToken(rparen), Space.Space); // ) - // } else { - // try renderToken(ais, tree, tree.nextToken(enum_token), Space.Space); // ) - // } - // }, - // .Type => |type_expr| { - // try renderToken(ais, tree, container_decl.kind_token, Space.None); // union - - // const lparen = tree.nextToken(container_decl.kind_token); - // const rparen = tree.nextToken(type_expr.lastToken()); - - // try renderToken(ais, tree, lparen, Space.None); // ( - // try renderExpression(ais, tree, type_expr, Space.None); - // try renderToken(ais, tree, rparen, Space.Space); // ) - // }, - // } - - // if (container_decl.fields_and_decls_len == 0) { - // { - // ais.pushIndentNextLine(); - // defer ais.popIndent(); - // try renderToken(ais, tree, container_decl.lbrace_token, Space.None); // lbrace - // } - // return renderToken(ais, tree, container_decl.rbrace_token, space); // rbrace - // } - - // const src_has_trailing_comma = blk: { - // var maybe_comma = tree.prevToken(container_decl.lastToken()); - // // Doc comments for a field may also appear after the comma, eg. - // // field_name: T, // comment attached to field_name - // if (tree.token_tags[maybe_comma] == .DocComment) - // maybe_comma = tree.prevToken(maybe_comma); - // break :blk tree.token_tags[maybe_comma] == .Comma; - // }; - - // const fields_and_decls = container_decl.fieldsAndDecls(); - - // // Check if the first declaration and the { are on the same line - // const src_has_newline = !tree.tokensOnSameLine( - // container_decl.lbrace_token, - // fields_and_decls[0].firstToken(), - // ); - - // // We can only print all the elements in-line if all the - // // declarations inside are fields - // const src_has_only_fields = blk: { - // for (fields_and_decls) |decl| { - // if (decl.tag != .ContainerField) break :blk false; - // } - // break :blk true; - // }; - - // if (src_has_trailing_comma or !src_has_only_fields) { - // // One declaration per line - // ais.pushIndentNextLine(); - // defer ais.popIndent(); - // try renderToken(ais, tree, container_decl.lbrace_token, .Newline); // lbrace - - // for (fields_and_decls) |decl, i| { - // try renderContainerDecl(allocator, ais, tree, decl, .Newline); - - // if (i + 1 < fields_and_decls.len) { - // try renderExtraNewline(ais, tree, fields_and_decls[i + 1]); - // } - // } - // } else if (src_has_newline) { - // // All the declarations on the same line, but place the items on - // // their own line - // try renderToken(ais, tree, container_decl.lbrace_token, .Newline); // lbrace - - // ais.pushIndent(); - // defer ais.popIndent(); - - // for (fields_and_decls) |decl, i| { - // const space_after_decl: Space = if (i + 1 >= fields_and_decls.len) .Newline else .Space; - // try renderContainerDecl(allocator, ais, tree, decl, space_after_decl); - // } - // } else { - // // All the declarations on the same line - // try renderToken(ais, tree, container_decl.lbrace_token, .Space); // lbrace - - // for (fields_and_decls) |decl| { - // try renderContainerDecl(allocator, ais, tree, decl, .Space); - // } - // } - - // return renderToken(ais, tree, container_decl.rbrace_token, space); // rbrace - //}, + .TaggedUnionTwo, .TaggedUnionTwoComma => { + var buffer: [2]ast.Node.Index = undefined; + return renderContainerDecl(ais, tree, tree.taggedUnionTwo(&buffer, node), space); + }, + .TaggedUnionEnumTag, + .TaggedUnionEnumTagComma, + => return renderContainerDecl(ais, tree, tree.taggedUnionEnumTag(node), space), .ErrorSetDecl => unreachable, // TODO //.ErrorSetDecl => { @@ -1949,6 +1856,94 @@ fn renderArrayInit( } } +fn renderContainerDecl( + ais: *Ais, + tree: ast.Tree, + container_decl: ast.Full.ContainerDecl, + space: Space, +) Error!void { + const token_tags = tree.tokens.items(.tag); + const node_tags = tree.nodes.items(.tag); + + if (container_decl.layout_token) |layout_token| { + try renderToken(ais, tree, layout_token, .Space); + } + + var lbrace: ast.TokenIndex = undefined; + if (container_decl.ast.enum_token) |enum_token| { + try renderToken(ais, tree, container_decl.ast.main_token, .None); // union + try renderToken(ais, tree, enum_token - 1, .None); // lparen + try renderToken(ais, tree, enum_token, .None); // enum + if (container_decl.ast.arg != 0) { + try renderToken(ais, tree, enum_token + 1, .None); // lparen + try renderExpression(ais, tree, container_decl.ast.arg, .None); + const rparen = tree.lastToken(container_decl.ast.arg) + 1; + try renderToken(ais, tree, rparen, .None); // rparen + try renderToken(ais, tree, rparen + 1, .Space); // rparen + lbrace = rparen + 2; + } else { + try renderToken(ais, tree, enum_token + 1, .Space); // rparen + lbrace = enum_token + 2; + } + } else if (container_decl.ast.arg != 0) { + try renderToken(ais, tree, container_decl.ast.main_token, .None); // union + try renderToken(ais, tree, container_decl.ast.main_token + 1, .None); // lparen + try renderExpression(ais, tree, container_decl.ast.arg, .None); + const rparen = tree.lastToken(container_decl.ast.arg) + 1; + try renderToken(ais, tree, rparen, .Space); // rparen + lbrace = rparen + 1; + } else { + try renderToken(ais, tree, container_decl.ast.main_token, .Space); // union + lbrace = container_decl.ast.main_token + 1; + } + + if (container_decl.ast.members.len == 0) { + try renderToken(ais, tree, lbrace, Space.None); // lbrace + return renderToken(ais, tree, lbrace + 1, space); // rbrace + } + + const last_member = container_decl.ast.members[container_decl.ast.members.len - 1]; + const last_member_token = tree.lastToken(last_member); + const rbrace = switch (token_tags[last_member_token + 1]) { + .DocComment => last_member_token + 2, + .Comma => switch (token_tags[last_member_token + 2]) { + .DocComment => last_member_token + 3, + .RBrace => last_member_token + 2, + else => unreachable, + }, + .RBrace => last_member_token + 1, + else => unreachable, + }; + const src_has_trailing_comma = token_tags[last_member_token + 1] == .Comma; + + if (!src_has_trailing_comma) one_line: { + // We can only print all the members in-line if all the members are fields. + for (container_decl.ast.members) |member| { + if (!node_tags[member].isContainerField()) break :one_line; + } + // All the declarations on the same line. + try renderToken(ais, tree, lbrace, .Space); // lbrace + for (container_decl.ast.members) |member| { + try renderMember(ais, tree, member, .Space); + } + return renderToken(ais, tree, rbrace, space); // rbrace + } + + // One member per line. + ais.pushIndent(); + try renderToken(ais, tree, lbrace, .Newline); // lbrace + for (container_decl.ast.members) |member, i| { + try renderMember(ais, tree, member, .Newline); + + if (i + 1 < container_decl.ast.members.len) { + try renderExtraNewline(ais, tree, container_decl.ast.members[i + 1]); + } + } + ais.popIndent(); + + return renderToken(ais, tree, rbrace, space); // rbrace +} + /// Render an expression, and the comma that follows it, if it is present in the source. fn renderExpressionComma(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); From 409ca8882939418b3d4cbd4be7a18daf1d4833aa Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Fri, 5 Feb 2021 17:46:15 -0700 Subject: [PATCH 020/173] zig fmt: trailing comma in fn parameter list --- lib/std/zig/ast.zig | 4 +- lib/std/zig/parser_test.zig | 76 ++++++++++++++++++------------------- lib/std/zig/render.zig | 46 +++++++++++++--------- 3 files changed, 69 insertions(+), 57 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 9db1bc3b19..f875c5a93d 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -231,7 +231,6 @@ pub const Tree = struct { .NullLiteral, .UndefinedLiteral, .UnreachableLiteral, - .EnumLiteral, .StringLiteral, .GroupedExpression, .BuiltinCallTwo, @@ -256,6 +255,7 @@ pub const Tree = struct { .StructInitDot, .StructInitDotTwo, .StructInitDotTwoComma, + .EnumLiteral, => return main_tokens[n] - 1, .Catch, @@ -494,6 +494,7 @@ pub const Tree = struct { .UnreachableLiteral, .Identifier, .Deref, + .EnumLiteral, => return main_tokens[n] + end_offset, .Call, @@ -682,7 +683,6 @@ pub const Tree = struct { .Switch => unreachable, // TODO .If => unreachable, // TODO .Continue => unreachable, // TODO - .EnumLiteral => unreachable, // TODO .ErrorSetDecl => unreachable, // TODO .AsmSimple => unreachable, // TODO .Asm => unreachable, // TODO diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 345bce2654..e46cbf09d9 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -225,44 +225,44 @@ test "zig fmt: container declaration, transform trailing comma" { ); } -//test "zig fmt: trailing comma in fn parameter list" { -// try testCanonical( -// \\pub fn f( -// \\ a: i32, -// \\ b: i32, -// \\) i32 {} -// \\pub fn f( -// \\ a: i32, -// \\ b: i32, -// \\) align(8) i32 {} -// \\pub fn f( -// \\ a: i32, -// \\ b: i32, -// \\) linksection(".text") i32 {} -// \\pub fn f( -// \\ a: i32, -// \\ b: i32, -// \\) callconv(.C) i32 {} -// \\pub fn f( -// \\ a: i32, -// \\ b: i32, -// \\) align(8) linksection(".text") i32 {} -// \\pub fn f( -// \\ a: i32, -// \\ b: i32, -// \\) align(8) callconv(.C) i32 {} -// \\pub fn f( -// \\ a: i32, -// \\ b: i32, -// \\) align(8) linksection(".text") callconv(.C) i32 {} -// \\pub fn f( -// \\ a: i32, -// \\ b: i32, -// \\) linksection(".text") callconv(.C) i32 {} -// \\ -// ); -//} -// +test "zig fmt: trailing comma in fn parameter list" { + try testCanonical( + \\pub fn f( + \\ a: i32, + \\ b: i32, + \\) i32 {} + \\pub fn f( + \\ a: i32, + \\ b: i32, + \\) align(8) i32 {} + \\pub fn f( + \\ a: i32, + \\ b: i32, + \\) linksection(".text") i32 {} + \\pub fn f( + \\ a: i32, + \\ b: i32, + \\) callconv(.C) i32 {} + \\pub fn f( + \\ a: i32, + \\ b: i32, + \\) align(8) linksection(".text") i32 {} + \\pub fn f( + \\ a: i32, + \\ b: i32, + \\) align(8) callconv(.C) i32 {} + \\pub fn f( + \\ a: i32, + \\ b: i32, + \\) align(8) linksection(".text") callconv(.C) i32 {} + \\pub fn f( + \\ a: i32, + \\ b: i32, + \\) linksection(".text") callconv(.C) i32 {} + \\ + ); +} + //test "zig fmt: comptime struct field" { // try testCanonical( // \\const Foo = struct { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index cc3cf855c3..274e181e0e 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -1138,13 +1138,10 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // return renderToken(ais, tree, asm_node.rparen, space); //}, - .EnumLiteral => unreachable, // TODO - //.EnumLiteral => { - // const enum_literal = @fieldParentPtr(ast.Node.EnumLiteral, "base", base); - - // try renderToken(ais, tree, enum_literal.dot, Space.None); // . - // return renderToken(ais, tree, enum_literal.name, space); // name - //}, + .EnumLiteral => { + try renderToken(ais, tree, main_tokens[node] - 1, .None); // . + return renderToken(ais, tree, main_tokens[node], space); // name + }, .FnDecl => unreachable, .ContainerField => unreachable, @@ -1538,6 +1535,7 @@ fn renderBuiltinCall( fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.Full.FnProto, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); + const token_starts = tree.tokens.items(.start); const after_fn_token = fn_proto.ast.fn_token + 1; const lparen = if (token_tags[after_fn_token] == .Identifier) blk: { @@ -1552,21 +1550,35 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.Full.FnProto, space: S const maybe_bang = tree.firstToken(fn_proto.ast.return_type) - 1; const rparen = blk: { - // The first token for the annotation expressions is the left - // parenthesis, hence the need for two previous tokens. + // These may appear in any order, so we have to check the token_starts array + // to find out which is first. + var rparen: ast.TokenIndex = maybe_bang; + var smallest_start = token_starts[maybe_bang]; if (fn_proto.ast.align_expr != 0) { - break :blk tree.firstToken(fn_proto.ast.align_expr) - 3; + const tok = tree.firstToken(fn_proto.ast.align_expr) - 3; + const start = token_starts[tok]; + if (start < smallest_start) { + rparen = tok; + smallest_start = start; + } } if (fn_proto.ast.section_expr != 0) { - break :blk tree.firstToken(fn_proto.ast.section_expr) - 3; + const tok = tree.firstToken(fn_proto.ast.section_expr) - 3; + const start = token_starts[tok]; + if (start < smallest_start) { + rparen = tok; + smallest_start = start; + } } if (fn_proto.ast.callconv_expr != 0) { - break :blk tree.firstToken(fn_proto.ast.callconv_expr) - 3; + const tok = tree.firstToken(fn_proto.ast.callconv_expr) - 3; + const start = token_starts[tok]; + if (start < smallest_start) { + rparen = tok; + smallest_start = start; + } } - if (token_tags[maybe_bang] == .Bang) { - break :blk maybe_bang - 1; - } - break :blk maybe_bang; + break :blk rparen; }; assert(token_tags[rparen] == .RParen); @@ -1663,7 +1675,7 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.Full.FnProto, space: S const param = fn_proto.ast.params[param_i]; param_i += 1; try renderExpression(ais, tree, param, .Comma); - last_param_token = tree.lastToken(param) + 2; + last_param_token = tree.lastToken(param) + 1; } ais.popIndent(); } From d898945786b527b09ef056099e923e946425e146 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Fri, 5 Feb 2021 20:38:30 -0700 Subject: [PATCH 021/173] zig fmt: builtin call with trailing comma --- lib/std/zig/ast.zig | 23 +++++++++----- lib/std/zig/parse.zig | 38 +++++++++++++--------- lib/std/zig/parser_test.zig | 63 +++++++++++++++++-------------------- lib/std/zig/render.zig | 24 ++++++++------ 4 files changed, 84 insertions(+), 64 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index f875c5a93d..5d65fc5ae6 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -234,7 +234,9 @@ pub const Tree = struct { .StringLiteral, .GroupedExpression, .BuiltinCallTwo, + .BuiltinCallTwoComma, .BuiltinCall, + .BuiltinCallComma, .ErrorSetDecl, .AnyType, .Comptime, @@ -474,6 +476,7 @@ pub const Tree = struct { .ErrorUnion, .IfSimple, .WhileSimple, + .FnDecl, => n = datas[n].rhs, .FieldAccess, @@ -497,9 +500,7 @@ pub const Tree = struct { .EnumLiteral, => return main_tokens[n] + end_offset, - .Call, - .BuiltinCall, - => { + .Call => { end_offset += 1; // for the rparen const params = tree.extraData(datas[n].rhs, Node.SubRange); if (params.end - params.start == 0) { @@ -526,6 +527,7 @@ pub const Tree = struct { .Block, .ContainerDecl, .TaggedUnion, + .BuiltinCall, => { end_offset += 1; // for the rbrace if (datas[n].rhs - datas[n].lhs == 0) { @@ -533,9 +535,12 @@ pub const Tree = struct { } n = tree.extra_data[datas[n].rhs - 1]; // last statement }, - .ContainerDeclComma, .TaggedUnionComma => { + .ContainerDeclComma, + .TaggedUnionComma, + .BuiltinCallComma, + => { assert(datas[n].rhs - datas[n].lhs > 0); - end_offset += 2; // for the comma + rbrace + end_offset += 2; // for the comma + rbrace/rparen n = tree.extra_data[datas[n].rhs - 1]; // last member }, .CallOne, @@ -565,11 +570,12 @@ pub const Tree = struct { } }, .ArrayInitDotTwoComma, + .BuiltinCallTwoComma, .StructInitDotTwoComma, .ContainerDeclTwoComma, .TaggedUnionTwoComma, => { - end_offset += 2; // for the comma + rbrace + end_offset += 2; // for the comma + rbrace/rparen if (datas[n].rhs != 0) { n = datas[n].rhs; } else if (datas[n].lhs != 0) { @@ -690,7 +696,6 @@ pub const Tree = struct { .Slice => unreachable, // TODO .SwitchCaseOne => unreachable, // TODO .SwitchRange => unreachable, // TODO - .FnDecl => unreachable, // TODO .ArrayType => unreachable, // TODO .ArrayTypeSentinel => unreachable, // TODO .PtrTypeAligned => unreachable, // TODO @@ -1836,8 +1841,12 @@ pub const Node = struct { GroupedExpression, /// `@a(lhs, rhs)`. lhs and rhs may be omitted. BuiltinCallTwo, + /// Same as BuiltinCallTwo but there is known to be a trailing comma before the rparen. + BuiltinCallTwoComma, /// `@a(b, c)`. `sub_list[lhs..rhs]`. BuiltinCall, + /// Same as BuiltinCall but there is known to be a trailing comma before the rparen. + BuiltinCallComma, /// `error{a, b}`. /// lhs and rhs both unused. ErrorSetDecl, diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 35b5083562..1143c9f9c0 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -3676,7 +3676,6 @@ const Parser = struct { /// FnCallArguments <- LPAREN ExprList RPAREN /// ExprList <- (Expr COMMA)* Expr? - /// TODO detect when we can emit BuiltinCallTwo instead of BuiltinCall. fn parseBuiltinCall(p: *Parser) !Node.Index { const builtin_token = p.assertToken(.Builtin); _ = (try p.expectTokenRecoverable(.LParen)) orelse { @@ -3708,7 +3707,7 @@ const Parser = struct { .Comma => { if (p.eatToken(.RParen)) |_| { return p.addNode(.{ - .tag = .BuiltinCallTwo, + .tag = .BuiltinCallTwoComma, .main_token = builtin_token, .data = .{ .lhs = param_one, @@ -3739,7 +3738,7 @@ const Parser = struct { .Comma => { if (p.eatToken(.RParen)) |_| { return p.addNode(.{ - .tag = .BuiltinCallTwo, + .tag = .BuiltinCallTwoComma, .main_token = builtin_token, .data = .{ .lhs = param_one, @@ -3776,10 +3775,30 @@ const Parser = struct { try list.append(param); switch (p.token_tags[p.nextToken()]) { .Comma => { - if (p.eatToken(.RParen)) |_| break; + if (p.eatToken(.RParen)) |_| { + const params = try p.listToSpan(list.items); + return p.addNode(.{ + .tag = .BuiltinCallComma, + .main_token = builtin_token, + .data = .{ + .lhs = params.start, + .rhs = params.end, + }, + }); + } continue; }, - .RParen => break, + .RParen => { + const params = try p.listToSpan(list.items); + return p.addNode(.{ + .tag = .BuiltinCall, + .main_token = builtin_token, + .data = .{ + .lhs = params.start, + .rhs = params.end, + }, + }); + }, else => { // This is likely just a missing comma; // give an error but continue parsing this list. @@ -3790,15 +3809,6 @@ const Parser = struct { }, } } - const params = try p.listToSpan(list.items); - return p.addNode(.{ - .tag = .BuiltinCall, - .main_token = builtin_token, - .data = .{ - .lhs = params.start, - .rhs = params.end, - }, - }); } // string literal or multiline string literal diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index e46cbf09d9..65843f9b5f 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -263,38 +263,38 @@ test "zig fmt: trailing comma in fn parameter list" { ); } -//test "zig fmt: comptime struct field" { -// try testCanonical( -// \\const Foo = struct { -// \\ a: i32, -// \\ comptime b: i32 = 1234, -// \\}; -// \\ -// ); -//} -// +test "zig fmt: comptime struct field" { + try testCanonical( + \\const Foo = struct { + \\ a: i32, + \\ comptime b: i32 = 1234, + \\}; + \\ + ); +} + //test "zig fmt: c pointer type" { // try testCanonical( // \\pub extern fn repro() [*c]const u8; // \\ // ); //} -// -//test "zig fmt: builtin call with trailing comma" { -// try testCanonical( -// \\pub fn main() void { -// \\ @breakpoint(); -// \\ _ = @boolToInt(a); -// \\ _ = @call( -// \\ a, -// \\ b, -// \\ c, -// \\ ); -// \\} -// \\ -// ); -//} -// + +test "zig fmt: builtin call with trailing comma" { + try testCanonical( + \\pub fn main() void { + \\ @breakpoint(); + \\ _ = @boolToInt(a); + \\ _ = @call( + \\ a, + \\ b, + \\ c, + \\ ); + \\} + \\ + ); +} + //test "zig fmt: asm expression with comptime content" { // try testCanonical( // \\comptime { @@ -3988,14 +3988,9 @@ fn testParse(source: []const u8, allocator: *mem.Allocator, anything_changed: *b return error.ParseError; } - var buffer = std.ArrayList(u8).init(allocator); - errdefer buffer.deinit(); - - const writer = buffer.writer(); - try std.zig.render(allocator, writer, tree); - const result = buffer.toOwnedSlice(); - anything_changed.* = !mem.eql(u8, result, source); - return result; + const formatted = try std.zig.render(allocator, tree); + anything_changed.* = !mem.eql(u8, formatted, source); + return formatted; } fn testTransform(source: []const u8, expected_source: []const u8) !void { const needed_alloc_count = x: { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 274e181e0e..d873676d53 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -22,13 +22,19 @@ pub const Error = error{ const Writer = std.ArrayList(u8).Writer; const Ais = std.io.AutoIndentingStream(Writer); -/// Returns whether anything changed. -/// `gpa` is used for allocating extra stack memory if needed, because -/// this function utilizes recursion. -pub fn render(gpa: *mem.Allocator, writer: Writer, tree: ast.Tree) Error!void { - assert(tree.errors.len == 0); // cannot render an invalid tree - var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, writer); - return renderRoot(&auto_indenting_stream, tree); +/// `gpa` is used both for allocating the resulting formatted source code, but also +/// for allocating extra stack memory if needed, because this function utilizes recursion. +/// Note: that's not actually true yet, see https://github.com/ziglang/zig/issues/1006. +/// Caller owns the returned slice of bytes, allocated with `gpa`. +pub fn render(gpa: *mem.Allocator, tree: ast.Tree) Error![]u8 { + assert(tree.errors.len == 0); // Cannot render an invalid tree. + + var buffer = std.ArrayList(u8).init(gpa); + defer buffer.deinit(); + + var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, buffer.writer()); + try renderRoot(&auto_indenting_stream, tree); + return buffer.toOwnedSlice(); } /// Assumes there are no tokens in between start and end. @@ -770,7 +776,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } //}, - .BuiltinCallTwo => { + .BuiltinCallTwo, .BuiltinCallTwoComma => { if (datas[node].lhs == 0) { const params = [_]ast.Node.Index{}; return renderBuiltinCall(ais, tree, main_tokens[node], ¶ms, space); @@ -782,7 +788,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderBuiltinCall(ais, tree, main_tokens[node], ¶ms, space); } }, - .BuiltinCall => { + .BuiltinCall, .BuiltinCallComma => { const params = tree.extra_data[datas[node].lhs..datas[node].rhs]; return renderBuiltinCall(ais, tree, main_tokens[node], params, space); }, From 33915cb1ed88417d3495c160ffd15c93f9197e5b Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Sat, 6 Feb 2021 22:55:29 +0100 Subject: [PATCH 022/173] zig fmt: implement pointer types rename PtrType => PtrTypeBitRange, SliceType => PtrType This rename was done as the current SliceType is used for non-bitrange pointers as well as slices and because PtrTypeSentinel/PtrTypeAligned are also used for slices. Therefore using the same Ptr prefix for all these pointer/slice nodes is an improvement. --- lib/std/zig/ast.zig | 184 +++++++++++++++++++++++++++++++--- lib/std/zig/parse.zig | 20 ++-- lib/std/zig/parser_test.zig | 54 +++++++++- lib/std/zig/render.zig | 190 +++++++++++++++--------------------- 4 files changed, 308 insertions(+), 140 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 5d65fc5ae6..0fdcf0a4d6 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -387,10 +387,22 @@ pub const Tree = struct { } }, - .PtrTypeAligned => unreachable, // TODO - .PtrTypeSentinel => unreachable, // TODO - .PtrType => unreachable, // TODO - .SliceType => unreachable, // TODO + .PtrTypeAligned, + .PtrTypeSentinel, + .PtrType, + .PtrTypeBitRange, + => { + const main_token = main_tokens[n]; + return switch (token_tags[main_token]) { + .Asterisk => switch (token_tags[main_token - 1]) { + .LBrace => main_token - 1, + else => main_token, + }, + .LBrace => main_token, + else => unreachable, + }; + }, + .SwitchCaseMulti => unreachable, // TODO .WhileSimple => unreachable, // TODO .WhileCont => unreachable, // TODO @@ -477,6 +489,10 @@ pub const Tree = struct { .IfSimple, .WhileSimple, .FnDecl, + .PtrTypeAligned, + .PtrTypeSentinel, + .PtrType, + .PtrTypeBitRange, => n = datas[n].rhs, .FieldAccess, @@ -698,10 +714,6 @@ pub const Tree = struct { .SwitchRange => unreachable, // TODO .ArrayType => unreachable, // TODO .ArrayTypeSentinel => unreachable, // TODO - .PtrTypeAligned => unreachable, // TODO - .PtrTypeSentinel => unreachable, // TODO - .PtrType => unreachable, // TODO - .SliceType => unreachable, // TODO .SwitchCaseMulti => unreachable, // TODO .WhileCont => unreachable, // TODO .While => unreachable, // TODO @@ -1028,6 +1040,60 @@ pub const Tree = struct { }; } + pub fn ptrTypeAligned(tree: Tree, node: Node.Index) Full.PtrType { + assert(tree.nodes.items(.tag)[node] == .PtrTypeAligned); + const data = tree.nodes.items(.data)[node]; + return tree.fullPtrType(.{ + .main_token = tree.nodes.items(.main_token)[node], + .align_node = data.lhs, + .sentinel = 0, + .bit_range_start = 0, + .bit_range_end = 0, + .child_type = data.rhs, + }); + } + + pub fn ptrTypeSentinel(tree: Tree, node: Node.Index) Full.PtrType { + assert(tree.nodes.items(.tag)[node] == .PtrTypeSentinel); + const data = tree.nodes.items(.data)[node]; + return tree.fullPtrType(.{ + .main_token = tree.nodes.items(.main_token)[node], + .align_node = 0, + .sentinel = data.lhs, + .bit_range_start = 0, + .bit_range_end = 0, + .child_type = data.rhs, + }); + } + + pub fn ptrType(tree: Tree, node: Node.Index) Full.PtrType { + assert(tree.nodes.items(.tag)[node] == .PtrType); + const data = tree.nodes.items(.data)[node]; + const extra = tree.extraData(data.lhs, Node.PtrType); + return tree.fullPtrType(.{ + .main_token = tree.nodes.items(.main_token)[node], + .align_node = extra.align_node, + .sentinel = extra.sentinel, + .bit_range_start = 0, + .bit_range_end = 0, + .child_type = data.rhs, + }); + } + + pub fn ptrTypeBitRange(tree: Tree, node: Node.Index) Full.PtrType { + assert(tree.nodes.items(.tag)[node] == .PtrTypeBitRange); + const data = tree.nodes.items(.data)[node]; + const extra = tree.extraData(data.lhs, Node.PtrTypeBitRange); + return tree.fullPtrType(.{ + .main_token = tree.nodes.items(.main_token)[node], + .align_node = extra.align_node, + .sentinel = extra.sentinel, + .bit_range_start = extra.bit_range_start, + .bit_range_end = extra.bit_range_end, + .child_type = data.rhs, + }); + } + pub fn containerDeclTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) Full.ContainerDecl { assert(tree.nodes.items(.tag)[node] == .ContainerDeclTwo or tree.nodes.items(.tag)[node] == .ContainerDeclTwoComma); @@ -1195,6 +1261,64 @@ pub const Tree = struct { return result; } + fn fullPtrType(tree: Tree, info: Full.PtrType.Ast) Full.PtrType { + const token_tags = tree.tokens.items(.tag); + // TODO: looks like stage1 isn't quite smart enough to handle enum + // literals in some places here + const Kind = Full.PtrType.Kind; + const kind: Kind = switch (token_tags[info.main_token]) { + .Asterisk => switch (token_tags[info.main_token + 1]) { + .RBracket => .many, + .Colon => .sentinel, + .Identifier => if (token_tags[info.main_token - 1] == .LBracket) Kind.c else .one, + else => .one, + }, + .LBracket => switch (token_tags[info.main_token + 1]) { + .RBracket => Kind.slice, + .Colon => .slice_sentinel, + else => unreachable, + }, + else => unreachable, + }; + var result: Full.PtrType = .{ + .kind = kind, + .allowzero_token = null, + .const_token = null, + .volatile_token = null, + .ast = info, + }; + // We need to be careful that we don't iterate over any sub-expressions + // here while looking for modifiers as that could result in false + // positives. Therefore, start after a sentinel if there is one and + // skip over any align node and bit range nodes. + var i = if (kind == .sentinel or kind == .slice_sentinel) blk: { + assert(info.sentinel != 0); + break :blk tree.lastToken(info.sentinel) + 1; + } else blk: { + assert(info.sentinel == 0); + break :blk info.main_token; + }; + const end = tree.firstToken(info.child_type); + while (i < end) : (i += 1) { + switch (token_tags[i]) { + .Keyword_allowzero => result.allowzero_token = i, + .Keyword_const => result.const_token = i, + .Keyword_volatile => result.volatile_token = i, + .Keyword_align => { + assert(info.align_node != 0); + if (info.bit_range_end != 0) { + assert(info.bit_range_start != 0); + i = tree.lastToken(info.bit_range_end) + 1; + } else { + i = tree.lastToken(info.align_node) + 1; + } + }, + else => {}, + } + } + return result; + } + fn fullContainerDecl(tree: Tree, info: Full.ContainerDecl.Ast) Full.ContainerDecl { const token_tags = tree.tokens.items(.tag); var result: Full.ContainerDecl = .{ @@ -1302,6 +1426,32 @@ pub const Full = struct { }; }; + pub const PtrType = struct { + kind: Kind, + allowzero_token: ?TokenIndex, + const_token: ?TokenIndex, + volatile_token: ?TokenIndex, + ast: Ast, + + pub const Kind = enum { + one, + many, + sentinel, + c, + slice, + slice_sentinel, + }; + + pub const Ast = struct { + main_token: TokenIndex, + align_node: Node.Index, + sentinel: Node.Index, + bit_range_start: Node.Index, + bit_range_end: Node.Index, + child_type: Node.Index, + }; + }; + pub const ContainerDecl = struct { layout_token: ?TokenIndex, ast: Ast, @@ -1696,16 +1846,19 @@ pub const Node = struct { /// `[*]align(lhs) rhs`. lhs can be omitted. /// `*align(lhs) rhs`. lhs can be omitted. /// `[]rhs`. + /// main_token is the asterisk if a pointer or the lbrace if a slice PtrTypeAligned, /// `[*:lhs]rhs`. lhs can be omitted. /// `*rhs`. /// `[:lhs]rhs`. + /// main_token is the asterisk if a pointer or the lbrace if a slice PtrTypeSentinel, /// lhs is index into PtrType. rhs is the element type expression. + /// main_token is the asterisk if a pointer or the lbrace if a slice PtrType, - /// lhs is index into SliceType. rhs is the element type expression. - /// Can be pointer or slice, depending on main_token. - SliceType, + /// lhs is index into PtrTypeBitRange. rhs is the element type expression. + /// main_token is the asterisk if a pointer or the lbrace if a slice + PtrTypeBitRange, /// `lhs[rhs..]` /// main_token is the `[`. SliceOpen, @@ -1954,14 +2107,15 @@ pub const Node = struct { pub const PtrType = struct { sentinel: Index, align_node: Index, + }; + + pub const PtrTypeBitRange = struct { + sentinel: Index, + align_node: Index, bit_range_start: Index, bit_range_end: Index, }; - pub const SliceType = struct { - sentinel: Index, - align_node: Index, - }; pub const SubRange = struct { /// Index into sub_list. start: Index, diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 1143c9f9c0..c6cec8a195 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -1618,10 +1618,10 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .PtrType, + .tag = .PtrTypeBitRange, .main_token = asterisk, .data = .{ - .lhs = try p.addExtra(Node.PtrType{ + .lhs = try p.addExtra(Node.PtrTypeBitRange{ .sentinel = 0, .align_node = mods.align_node, .bit_range_start = mods.bit_range_start, @@ -1648,10 +1648,10 @@ const Parser = struct { }); } else { break :inner try p.addNode(.{ - .tag = .PtrType, + .tag = .PtrTypeBitRange, .main_token = asterisk, .data = .{ - .lhs = try p.addExtra(Node.PtrType{ + .lhs = try p.addExtra(Node.PtrTypeBitRange{ .sentinel = 0, .align_node = mods.align_node, .bit_range_start = mods.bit_range_start, @@ -1713,10 +1713,10 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .SliceType, + .tag = .PtrType, .main_token = asterisk, .data = .{ - .lhs = try p.addExtra(.{ + .lhs = try p.addExtra(Node.PtrType{ .sentinel = sentinel, .align_node = mods.align_node, }), @@ -1726,10 +1726,10 @@ const Parser = struct { } } else { return p.addNode(.{ - .tag = .PtrType, + .tag = .PtrTypeBitRange, .main_token = asterisk, .data = .{ - .lhs = try p.addExtra(.{ + .lhs = try p.addExtra(Node.PtrTypeBitRange{ .sentinel = sentinel, .align_node = mods.align_node, .bit_range_start = mods.bit_range_start, @@ -1777,10 +1777,10 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .SliceType, + .tag = .PtrType, .main_token = lbracket, .data = .{ - .lhs = try p.addExtra(.{ + .lhs = try p.addExtra(Node.PtrType{ .sentinel = sentinel, .align_node = mods.align_node, }), diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 65843f9b5f..95299b8d94 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -345,7 +345,59 @@ test "zig fmt: builtin call with trailing comma" { // \\ // ); //} -// + +test "zig fmt: pointer-to-one with modifiers" { + try testCanonical( + \\const x: *u32 = undefined; + \\const y: *allowzero align(8) const volatile u32 = undefined; + \\const z: *allowzero align(8:4:2) const volatile u32 = undefined; + \\ + ); +} + +test "zig fmt: pointer-to-many with modifiers" { + try testCanonical( + \\const x: [*]u32 = undefined; + \\const y: [*]allowzero align(8) const volatile u32 = undefined; + \\const z: [*]allowzero align(8:4:2) const volatile u32 = undefined; + \\ + ); +} + +test "zig fmt: sentinel pointer with modifiers" { + try testCanonical( + \\const x: [*:42]u32 = undefined; + \\const y: [*:42]allowzero align(8) const volatile u32 = undefined; + \\const y: [*:42]allowzero align(8:4:2) const volatile u32 = undefined; + \\ + ); +} + +test "zig fmt: c pointer with modifiers" { + try testCanonical( + \\const x: [*c]u32 = undefined; + \\const y: [*c]allowzero align(8) const volatile u32 = undefined; + \\const z: [*c]allowzero align(8:4:2) const volatile u32 = undefined; + \\ + ); +} + +test "zig fmt: slice with modifiers" { + try testCanonical( + \\const x: []u32 = undefined; + \\const y: []allowzero align(8) const volatile u32 = undefined; + \\ + ); +} + +test "zig fmt: sentinel slice with modifiers" { + try testCanonical( + \\const x: [:42]u32 = undefined; + \\const y: [:42]allowzero align(8) const volatile u32 = undefined; + \\ + ); +} + //test "zig fmt: anon literal in array" { // try testCanonical( // \\var arr: [2]Foo = .{ diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index d873676d53..ba2158ff4e 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -370,120 +370,10 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .ArrayType => return renderArrayType(ais, tree, tree.arrayType(node), space), .ArrayTypeSentinel => return renderArrayType(ais, tree, tree.arrayTypeSentinel(node), space), - .PtrType => unreachable, // TODO - .PtrTypeAligned => unreachable, // TODO - .PtrTypeSentinel => unreachable, // TODO - //.PtrType => { - // const ptr_type = @fieldParentPtr(ast.Node.PtrType, "base", base); - // const op_tok_id = tree.token_tags[ptr_type.op_token]; - // switch (op_tok_id) { - // .Asterisk, .AsteriskAsterisk => try ais.writer().writeByte('*'), - // .LBracket => if (tree.token_tags[ptr_type.op_token + 2] == .Identifier) - // try ais.writer().writeAll("[*c") - // else - // try ais.writer().writeAll("[*"), - // else => unreachable, - // } - // if (ptr_type.ptr_info.sentinel) |sentinel| { - // const colon_token = tree.prevToken(sentinel.firstToken()); - // try renderToken(ais, tree, colon_token, Space.None); // : - // const sentinel_space = switch (op_tok_id) { - // .LBracket => Space.None, - // else => Space.Space, - // }; - // try renderExpression(ais, tree, sentinel, sentinel_space); - // } - // switch (op_tok_id) { - // .Asterisk, .AsteriskAsterisk => {}, - // .LBracket => try ais.writer().writeByte(']'), - // else => unreachable, - // } - // if (ptr_type.ptr_info.allowzero_token) |allowzero_token| { - // try renderToken(ais, tree, allowzero_token, Space.Space); // allowzero - // } - // if (ptr_type.ptr_info.align_info) |align_info| { - // const lparen_token = tree.prevToken(align_info.node.firstToken()); - // const align_token = tree.prevToken(lparen_token); - - // try renderToken(ais, tree, align_token, Space.None); // align - // try renderToken(ais, tree, lparen_token, Space.None); // ( - - // try renderExpression(ais, tree, align_info.node, Space.None); - - // if (align_info.bit_range) |bit_range| { - // const colon1 = tree.prevToken(bit_range.start.firstToken()); - // const colon2 = tree.prevToken(bit_range.end.firstToken()); - - // try renderToken(ais, tree, colon1, Space.None); // : - // try renderExpression(ais, tree, bit_range.start, Space.None); - // try renderToken(ais, tree, colon2, Space.None); // : - // try renderExpression(ais, tree, bit_range.end, Space.None); - - // const rparen_token = tree.nextToken(bit_range.end.lastToken()); - // try renderToken(ais, tree, rparen_token, Space.Space); // ) - // } else { - // const rparen_token = tree.nextToken(align_info.node.lastToken()); - // try renderToken(ais, tree, rparen_token, Space.Space); // ) - // } - // } - // if (ptr_type.ptr_info.const_token) |const_token| { - // try renderToken(ais, tree, const_token, Space.Space); // const - // } - // if (ptr_type.ptr_info.volatile_token) |volatile_token| { - // try renderToken(ais, tree, volatile_token, Space.Space); // volatile - // } - // return renderExpression(ais, tree, ptr_type.rhs, space); - //}, - - .SliceType => unreachable, // TODO - //.SliceType => { - // const slice_type = @fieldParentPtr(ast.Node.SliceType, "base", base); - // try renderToken(ais, tree, slice_type.op_token, Space.None); // [ - // if (slice_type.ptr_info.sentinel) |sentinel| { - // const colon_token = tree.prevToken(sentinel.firstToken()); - // try renderToken(ais, tree, colon_token, Space.None); // : - // try renderExpression(ais, tree, sentinel, Space.None); - // try renderToken(ais, tree, tree.nextToken(sentinel.lastToken()), Space.None); // ] - // } else { - // try renderToken(ais, tree, tree.nextToken(slice_type.op_token), Space.None); // ] - // } - - // if (slice_type.ptr_info.allowzero_token) |allowzero_token| { - // try renderToken(ais, tree, allowzero_token, Space.Space); // allowzero - // } - // if (slice_type.ptr_info.align_info) |align_info| { - // const lparen_token = tree.prevToken(align_info.node.firstToken()); - // const align_token = tree.prevToken(lparen_token); - - // try renderToken(ais, tree, align_token, Space.None); // align - // try renderToken(ais, tree, lparen_token, Space.None); // ( - - // try renderExpression(ais, tree, align_info.node, Space.None); - - // if (align_info.bit_range) |bit_range| { - // const colon1 = tree.prevToken(bit_range.start.firstToken()); - // const colon2 = tree.prevToken(bit_range.end.firstToken()); - - // try renderToken(ais, tree, colon1, Space.None); // : - // try renderExpression(ais, tree, bit_range.start, Space.None); - // try renderToken(ais, tree, colon2, Space.None); // : - // try renderExpression(ais, tree, bit_range.end, Space.None); - - // const rparen_token = tree.nextToken(bit_range.end.lastToken()); - // try renderToken(ais, tree, rparen_token, Space.Space); // ) - // } else { - // const rparen_token = tree.nextToken(align_info.node.lastToken()); - // try renderToken(ais, tree, rparen_token, Space.Space); // ) - // } - // } - // if (slice_type.ptr_info.const_token) |const_token| { - // try renderToken(ais, tree, const_token, Space.Space); - // } - // if (slice_type.ptr_info.volatile_token) |volatile_token| { - // try renderToken(ais, tree, volatile_token, Space.Space); - // } - // return renderExpression(ais, tree, slice_type.rhs, space); - //}, + .PtrTypeAligned => return renderPtrType(ais, tree, tree.ptrTypeAligned(node), space), + .PtrTypeSentinel => return renderPtrType(ais, tree, tree.ptrTypeSentinel(node), space), + .PtrType => return renderPtrType(ais, tree, tree.ptrType(node), space), + .PtrTypeBitRange => return renderPtrType(ais, tree, tree.ptrTypeBitRange(node), space), .ArrayInitOne => { var elements: [1]ast.Node.Index = undefined; @@ -1180,6 +1070,78 @@ fn renderArrayType( return renderExpression(ais, tree, array_type.ast.elem_type, space); } +fn renderPtrType( + ais: *Ais, + tree: ast.Tree, + ptr_type: ast.Full.PtrType, + space: Space, +) Error!void { + switch (ptr_type.kind) { + .one => { + try renderToken(ais, tree, ptr_type.ast.main_token, .None); // asterisk + }, + .many => { + try renderToken(ais, tree, ptr_type.ast.main_token - 1, .None); // lbracket + try renderToken(ais, tree, ptr_type.ast.main_token, .None); // asterisk + try renderToken(ais, tree, ptr_type.ast.main_token + 1, .None); // rbracket + }, + .sentinel => { + try renderToken(ais, tree, ptr_type.ast.main_token - 1, .None); // lbracket + try renderToken(ais, tree, ptr_type.ast.main_token, .None); // asterisk + try renderToken(ais, tree, ptr_type.ast.main_token + 1, .None); // colon + try renderExpression(ais, tree, ptr_type.ast.sentinel, .None); + try renderToken(ais, tree, tree.lastToken(ptr_type.ast.sentinel) + 1, .None); // rbracket + }, + .c => { + try renderToken(ais, tree, ptr_type.ast.main_token - 1, .None); // lbracket + try renderToken(ais, tree, ptr_type.ast.main_token, .None); // asterisk + try renderToken(ais, tree, ptr_type.ast.main_token + 1, .None); // c + try renderToken(ais, tree, ptr_type.ast.main_token + 2, .None); // rbracket + }, + .slice => { + try renderToken(ais, tree, ptr_type.ast.main_token, .None); // lbracket + try renderToken(ais, tree, ptr_type.ast.main_token + 1, .None); // rbracket + }, + .slice_sentinel => { + try renderToken(ais, tree, ptr_type.ast.main_token, .None); // lbracket + try renderToken(ais, tree, ptr_type.ast.main_token + 1, .None); // colon + try renderExpression(ais, tree, ptr_type.ast.sentinel, .None); + try renderToken(ais, tree, tree.lastToken(ptr_type.ast.sentinel) + 1, .None); // rbracket + }, + } + + if (ptr_type.allowzero_token) |allowzero_token| { + try renderToken(ais, tree, allowzero_token, .Space); + } + + if (ptr_type.ast.align_node != 0) { + const align_first = tree.firstToken(ptr_type.ast.align_node); + try renderToken(ais, tree, align_first - 2, .None); // align + try renderToken(ais, tree, align_first - 1, .None); // lparen + try renderExpression(ais, tree, ptr_type.ast.align_node, .None); + if (ptr_type.ast.bit_range_start != 0) { + assert(ptr_type.ast.bit_range_end != 0); + try renderToken(ais, tree, tree.firstToken(ptr_type.ast.bit_range_start) - 1, .None); // colon + try renderExpression(ais, tree, ptr_type.ast.bit_range_start, .None); + try renderToken(ais, tree, tree.firstToken(ptr_type.ast.bit_range_end) - 1, .None); // colon + try renderExpression(ais, tree, ptr_type.ast.bit_range_end, .None); + try renderToken(ais, tree, tree.lastToken(ptr_type.ast.bit_range_end) + 1, .Space); // rparen + } else { + try renderToken(ais, tree, tree.lastToken(ptr_type.ast.align_node) + 1, .Space); // rparen + } + } + + if (ptr_type.const_token) |const_token| { + try renderToken(ais, tree, const_token, .Space); + } + + if (ptr_type.volatile_token) |volatile_token| { + try renderToken(ais, tree, volatile_token, .Space); + } + + try renderExpression(ais, tree, ptr_type.ast.child_type, space); +} + fn renderAsmOutput( allocator: *mem.Allocator, ais: *Ais, From 0929fcbc34a2aa8cd69492c9437622d29a9a46b9 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Sun, 7 Feb 2021 12:43:53 +0100 Subject: [PATCH 023/173] zig fmt: implement slices --- lib/std/zig/ast.zig | 63 ++++++++++++++++++++++++++++++--- lib/std/zig/parser_test.zig | 18 ++++++++++ lib/std/zig/render.zig | 69 ++++++++++++++++++++----------------- 3 files changed, 114 insertions(+), 36 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 0fdcf0a4d6..a4d698ecaa 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -688,6 +688,20 @@ pub const Tree = struct { return main_tokens[n] + end_offset; } }, + .SliceOpen => { + end_offset += 2; // ellipsis2 and rbracket + n = datas[n].rhs; + }, + .Slice => { + const extra = tree.extraData(datas[n].rhs, Node.Slice); + if (extra.sentinel != 0) { + n = extra.sentinel; + } else { + assert(extra.end != 0); // should have used SliceOpen if end and sentinel are 0 + n = extra.end; + } + end_offset += 1; // rbracket + }, // These are not supported by lastToken() because implementation would // require recursion due to the optional comma followed by rbrace. @@ -708,8 +722,6 @@ pub const Tree = struct { .ErrorSetDecl => unreachable, // TODO .AsmSimple => unreachable, // TODO .Asm => unreachable, // TODO - .SliceOpen => unreachable, // TODO - .Slice => unreachable, // TODO .SwitchCaseOne => unreachable, // TODO .SwitchRange => unreachable, // TODO .ArrayType => unreachable, // TODO @@ -1094,6 +1106,35 @@ pub const Tree = struct { }); } + pub fn sliceOpen(tree: Tree, node: Node.Index) Full.Slice { + assert(tree.nodes.items(.tag)[node] == .SliceOpen); + const data = tree.nodes.items(.data)[node]; + return .{ + .ast = .{ + .sliced = data.lhs, + .lbracket = tree.nodes.items(.main_token)[node], + .start = data.rhs, + .end = 0, + .sentinel = 0, + }, + }; + } + + pub fn slice(tree: Tree, node: Node.Index) Full.Slice { + assert(tree.nodes.items(.tag)[node] == .Slice); + const data = tree.nodes.items(.data)[node]; + const extra = tree.extraData(data.rhs, Node.Slice); + return .{ + .ast = .{ + .sliced = data.lhs, + .lbracket = tree.nodes.items(.main_token)[node], + .start = extra.start, + .end = extra.end, + .sentinel = extra.sentinel, + }, + }; + } + pub fn containerDeclTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) Full.ContainerDecl { assert(tree.nodes.items(.tag)[node] == .ContainerDeclTwo or tree.nodes.items(.tag)[node] == .ContainerDeclTwoComma); @@ -1452,6 +1493,18 @@ pub const Full = struct { }; }; + pub const Slice = struct { + ast: Ast, + + pub const Ast = struct { + sliced: Node.Index, + lbracket: TokenIndex, + start: Node.Index, + end: Node.Index, + sentinel: Node.Index, + }; + }; + pub const ContainerDecl = struct { layout_token: ?TokenIndex, ast: Ast, @@ -1860,10 +1913,10 @@ pub const Node = struct { /// main_token is the asterisk if a pointer or the lbrace if a slice PtrTypeBitRange, /// `lhs[rhs..]` - /// main_token is the `[`. + /// main_token is the lbracket. SliceOpen, - /// `lhs[b..c :d]`. `slice_list[rhs]`. - /// main_token is the `[`. + /// `lhs[b..c :d]`. rhs is index into Slice + /// main_token is the lbracket. Slice, /// `lhs.*`. rhs is unused. Deref, diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 95299b8d94..f09c3f8ade 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -637,6 +637,24 @@ test "zig fmt: sentinel array literal 1 element" { ); } +test "zig fmt: slices" { + try testCanonical( + \\const a = b[0..]; + \\const c = d[0..1]; + \\const e = f[0..1 :0]; + \\ + ); +} + +test "zig fmt: slices with spaces in bounds" { + try testCanonical( + \\const a = b[0 + 0 ..]; + \\const c = d[0 + 0 .. 1]; + \\const e = f[0 .. 1 + 1 :0]; + \\ + ); +} + //test "zig fmt: async function" { // try testCanonical( // \\pub const Server = struct { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index ba2158ff4e..26bf4d0b72 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -466,34 +466,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderToken(ais, tree, rbracket, space); // ] }, - .Slice => unreachable, // TODO - .SliceOpen => unreachable, // TODO - //.Slice => { - // const suffix_op = base.castTag(.Slice).?; - // try renderExpression(ais, tree, suffix_op.lhs, Space.None); - - // const lbracket = tree.prevToken(suffix_op.start.firstToken()); - // const dotdot = tree.nextToken(suffix_op.start.lastToken()); - - // const after_start_space_bool = nodeCausesSliceOpSpace(suffix_op.start) or - // (if (suffix_op.end) |end| nodeCausesSliceOpSpace(end) else false); - // const after_start_space = if (after_start_space_bool) Space.Space else Space.None; - // const after_op_space = if (suffix_op.end != null) after_start_space else Space.None; - - // try renderToken(ais, tree, lbracket, Space.None); // [ - // try renderExpression(ais, tree, suffix_op.start, after_start_space); - // try renderToken(ais, tree, dotdot, after_op_space); // .. - // if (suffix_op.end) |end| { - // const after_end_space = if (suffix_op.sentinel != null) Space.Space else Space.None; - // try renderExpression(ais, tree, end, after_end_space); - // } - // if (suffix_op.sentinel) |sentinel| { - // const colon = tree.prevToken(sentinel.firstToken()); - // try renderToken(ais, tree, colon, Space.None); // : - // try renderExpression(ais, tree, sentinel, Space.None); - // } - // return renderToken(ais, tree, suffix_op.rtoken, space); // ] - //}, + .SliceOpen => try renderSlice(ais, tree, tree.sliceOpen(node), space), + .Slice => try renderSlice(ais, tree, tree.slice(node), space), .Deref => { try renderExpression(ais, tree, datas[node].lhs, .None); @@ -1142,6 +1116,40 @@ fn renderPtrType( try renderExpression(ais, tree, ptr_type.ast.child_type, space); } +fn renderSlice( + ais: *Ais, + tree: ast.Tree, + slice: ast.Full.Slice, + space: Space, +) Error!void { + const node_tags = tree.nodes.items(.tag); + const after_start_space_bool = nodeCausesSliceOpSpace(node_tags[slice.ast.start]) or + if (slice.ast.end != 0) nodeCausesSliceOpSpace(node_tags[slice.ast.end]) else false; + const after_start_space = if (after_start_space_bool) Space.Space else Space.None; + const after_dots_space = if (slice.ast.end != 0) after_start_space else Space.None; + + try renderExpression(ais, tree, slice.ast.sliced, .None); + try renderToken(ais, tree, slice.ast.lbracket, .None); // lbracket + + const start_last = tree.lastToken(slice.ast.start); + try renderExpression(ais, tree, slice.ast.start, after_start_space); + try renderToken(ais, tree, start_last + 1, after_dots_space); // ellipsis2 ("..") + if (slice.ast.end == 0) { + return renderToken(ais, tree, start_last + 2, space); // rbracket + } + + const end_last = tree.lastToken(slice.ast.end); + const after_end_space = if (slice.ast.sentinel != 0) Space.Space else Space.None; + try renderExpression(ais, tree, slice.ast.end, after_end_space); + if (slice.ast.sentinel == 0) { + return renderToken(ais, tree, end_last + 1, space); // rbracket + } + + try renderToken(ais, tree, end_last + 1, .None); // colon + try renderExpression(ais, tree, slice.ast.sentinel, .None); + try renderToken(ais, tree, tree.lastToken(slice.ast.sentinel) + 1, space); // rbracket +} + fn renderAsmOutput( allocator: *mem.Allocator, ais: *Ais, @@ -2099,8 +2107,8 @@ fn nodeIsBlock(tag: ast.Node.Tag) bool { }; } -fn nodeCausesSliceOpSpace(base: ast.Node.Index) bool { - return switch (base.tag) { +fn nodeCausesSliceOpSpace(tag: ast.Node.Tag) bool { + return switch (tag) { .Catch, .Add, .AddWrap, @@ -2139,7 +2147,6 @@ fn nodeCausesSliceOpSpace(base: ast.Node.Index) bool { .Mod, .Mul, .MulWrap, - .Range, .Sub, .SubWrap, .OrElse, From bb7b5ee2acb81a69290b2eaafecd6095f3adfe6a Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Sun, 7 Feb 2021 14:37:12 +0100 Subject: [PATCH 024/173] zig fmt: implement catch and return --- lib/std/zig/ast.zig | 11 ++++++-- lib/std/zig/parser_test.zig | 28 +++++++++++--------- lib/std/zig/render.zig | 52 ++++++++++++++++++------------------- 3 files changed, 51 insertions(+), 40 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index a4d698ecaa..4ecd82acb4 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -436,7 +436,6 @@ pub const Tree = struct { .Suspend, .Resume, .Break, - .Return, .Nosuspend, .Comptime, => n = datas[n].lhs, @@ -516,6 +515,12 @@ pub const Tree = struct { .EnumLiteral, => return main_tokens[n] + end_offset, + .Return => if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + return main_tokens[n] + end_offset; + }, + .Call => { end_offset += 1; // for the rparen const params = tree.extraData(datas[n].rhs, Node.SubRange); @@ -1791,7 +1796,9 @@ pub const Node = struct { /// lhs is unused. /// rhs is the deferred expression. Defer, - /// lhs is target expr; rhs is fallback expr. + /// lhs catch rhs + /// lhs catch |err| rhs + /// main_token is the catch /// payload is determined by looking at the prev tokens before rhs. Catch, /// `lhs.a`. main_token is the dot. rhs is the identifier token index. diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index f09c3f8ade..d8af653028 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -2622,18 +2622,22 @@ test "zig fmt: ptr deref operator and unwrap optional operator" { // \\ // ); //} -// -//test "zig fmt: catch" { -// try testCanonical( -// \\test "catch" { -// \\ const a: anyerror!u8 = 0; -// \\ _ = a catch return; -// \\ _ = a catch |err| return; -// \\} -// \\ -// ); -//} -// + +test "zig fmt: catch" { + try testCanonical( + \\test "catch" { + \\ const a: anyerror!u8 = 0; + \\ _ = a catch return; + \\ _ = a catch + \\ return; + \\ _ = a catch |err| return; + \\ _ = a catch |err| + \\ return; + \\} + \\ + ); +} + //test "zig fmt: blocks" { // try testCanonical( // \\test "blocks" { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 26bf4d0b72..3d2c6e2f03 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -257,27 +257,29 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } //}, - .Catch => unreachable, // TODO - //.Catch => { - // const infix_op_node = @fieldParentPtr(ast.Node.Catch, "base", base); + .Catch => { + const main_token = main_tokens[node]; + const fallback_first = tree.firstToken(datas[node].rhs); - // const op_space = Space.Space; - // try renderExpression(ais, tree, infix_op_node.lhs, op_space); + const same_line = tree.tokensOnSameLine(main_token, fallback_first); + const after_op_space = if (same_line) Space.Space else Space.Newline; - // const after_op_space = blk: { - // const same_line = tree.tokensOnSameLine(infix_op_node.op_token, tree.nextToken(infix_op_node.op_token)); - // break :blk if (same_line) op_space else Space.Newline; - // }; + try renderExpression(ais, tree, datas[node].lhs, .Space); // target - // try renderToken(ais, tree, infix_op_node.op_token, after_op_space); + if (token_tags[fallback_first - 1] == .Pipe) { + try renderToken(ais, tree, main_token, .Space); // catch keyword + try renderToken(ais, tree, main_token + 1, .None); // pipe + try renderToken(ais, tree, main_token + 2, .None); // payload identifier + try renderToken(ais, tree, main_token + 3, after_op_space); // pipe + } else { + assert(token_tags[fallback_first - 1] == .Keyword_catch); + try renderToken(ais, tree, main_token, after_op_space); // catch keyword + } - // if (infix_op_node.payload) |payload| { - // try renderExpression(ais, tree, payload, Space.Space); - // } + ais.pushIndentOneShot(); + try renderExpression(ais, tree, datas[node].rhs, space); // fallback + }, - // ais.pushIndentOneShot(); - // return renderExpression(ais, tree, infix_op_node.rhs, space); - //}, .FieldAccess => { const field_access = datas[node]; try renderExpression(ais, tree, field_access.lhs, .None); @@ -516,16 +518,14 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } //}, - .Return => unreachable, // TODO - //.Return => { - // const flow_expr = base.castTag(.Return).?; - // if (flow_expr.getRHS()) |rhs| { - // try renderToken(ais, tree, flow_expr.ltoken, Space.Space); - // return renderExpression(ais, tree, rhs, space); - // } else { - // return renderToken(ais, tree, flow_expr.ltoken, space); - // } - //}, + .Return => { + if (datas[node].lhs != 0) { + try renderToken(ais, tree, main_tokens[node], .Space); + try renderExpression(ais, tree, datas[node].lhs, space); + } else { + try renderToken(ais, tree, main_tokens[node], space); + } + }, .GroupedExpression => unreachable, // TODO //.GroupedExpression => { From 1d71b19c0d025aeeede229e714679f4b4fb7880d Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Sun, 7 Feb 2021 22:16:23 +0100 Subject: [PATCH 025/173] zig fmt: implement error set decls --- lib/std/zig/ast.zig | 4 +-- lib/std/zig/parse.zig | 6 ++--- lib/std/zig/parser_test.zig | 31 ++++++++++++++++++++++- lib/std/zig/render.zig | 50 ++++++++++++++++++++++++++++++++++++- 4 files changed, 84 insertions(+), 7 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 4ecd82acb4..bebacd0b2d 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -498,6 +498,7 @@ pub const Tree = struct { .UnwrapOptional, .GroupedExpression, .StringLiteral, + .ErrorSetDecl, => return datas[n].rhs + end_offset, .AnyType, @@ -724,7 +725,6 @@ pub const Tree = struct { .Switch => unreachable, // TODO .If => unreachable, // TODO .Continue => unreachable, // TODO - .ErrorSetDecl => unreachable, // TODO .AsmSimple => unreachable, // TODO .Asm => unreachable, // TODO .SwitchCaseOne => unreachable, // TODO @@ -2061,7 +2061,7 @@ pub const Node = struct { /// Same as BuiltinCall but there is known to be a trailing comma before the rparen. BuiltinCallComma, /// `error{a, b}`. - /// lhs and rhs both unused. + /// rhs is the rbrace, lhs is unused. ErrorSetDecl, /// `struct {}`, `union {}`, `opaque {}`, `enum {}`. `extra_data[lhs..rhs]`. /// main_token is `struct`, `union`, `opaque`, `enum` keyword. diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index c6cec8a195..fc4847249a 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -2714,13 +2714,13 @@ const Parser = struct { const error_token = p.tok_i; p.tok_i += 2; - if (p.eatToken(.RBrace)) |_| { + if (p.eatToken(.RBrace)) |rbrace| { return p.addNode(.{ .tag = .ErrorSetDecl, .main_token = error_token, .data = .{ .lhs = undefined, - .rhs = undefined, + .rhs = rbrace, }, }); } @@ -2758,7 +2758,7 @@ const Parser = struct { .main_token = error_token, .data = .{ .lhs = undefined, - .rhs = undefined, + .rhs = p.tok_i - 1, // rbrace }, }); }, diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index d8af653028..dec7b09127 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -2015,7 +2015,36 @@ test "zig fmt: ptr deref operator and unwrap optional operator" { // \\ // ); //} -// + +// TODO: replace this with the next test case when possible +test "zig fmt: error set declaration" { + try testCanonical( + \\const E = error{ + \\ A, + \\ B, + \\ + \\ C, + \\}; + \\const Error = error{ + \\ /// no more memory + \\ OutOfMemory, + \\}; + \\const Error = error{ + \\ /// no more memory + \\ OutOfMemory, + \\ + \\ /// another + \\ Another, + \\ /// and one more + \\ Another, + \\}; + \\const Error = error{OutOfMemory}; + \\const Error = error{}; + \\const Error = error{ OutOfMemory, OutOfTime }; + \\ + ); +} + //test "zig fmt: error set declaration" { // try testCanonical( // \\const E = error{ diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 3d2c6e2f03..cbaf462166 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -563,7 +563,55 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .TaggedUnionEnumTagComma, => return renderContainerDecl(ais, tree, tree.taggedUnionEnumTag(node), space), - .ErrorSetDecl => unreachable, // TODO + // TODO: handle comments properly + .ErrorSetDecl => { + const error_token = main_tokens[node]; + const lbrace = error_token + 1; + const rbrace = datas[node].rhs; + + try renderToken(ais, tree, error_token, .None); + + if (lbrace + 1 == rbrace) { + // There is nothing between the braces so render condensed: `error{}` + try renderToken(ais, tree, lbrace, .None); + try renderToken(ais, tree, rbrace, space); + } else if (lbrace + 2 == rbrace and token_tags[lbrace + 1] == .Identifier) { + // There is exactly one member and no trailing comma or + // comments, so render without surrounding spaces: `error{Foo}` + try renderToken(ais, tree, lbrace, .None); + try renderToken(ais, tree, lbrace + 1, .None); // identifier + try renderToken(ais, tree, rbrace, space); + } else if (token_tags[rbrace - 1] == .Comma) { + // There is a trailing comma so render each member on a new line. + try renderToken(ais, tree, lbrace, .Newline); + ais.pushIndent(); + var i = lbrace + 1; + while (i < rbrace) : (i += 1) { + try renderExtraNewlineToken(ais, tree, i); + switch (token_tags[i]) { + .DocComment => try renderToken(ais, tree, i, .Newline), + .Identifier => try renderToken(ais, tree, i, .Comma), + .Comma => {}, + else => unreachable, + } + } + ais.popIndent(); + try renderToken(ais, tree, rbrace, space); + } else { + // There is no trailing comma so render everything on one line. + try renderToken(ais, tree, lbrace, .Space); + var i = lbrace + 1; + while (i < rbrace) : (i += 1) { + switch (token_tags[i]) { + .DocComment => unreachable, // TODO + .Identifier => try renderToken(ais, tree, i, .CommaSpace), + .Comma => {}, + else => unreachable, + } + } + try renderToken(ais, tree, rbrace, space); + } + }, //.ErrorSetDecl => { // const err_set_decl = @fieldParentPtr(ast.Node.ErrorSetDecl, "base", base); From 0e38362d244f118565f447f8ee2c6b8a700d05bf Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Sun, 7 Feb 2021 22:26:43 +0100 Subject: [PATCH 026/173] zig fmt: split Slice and SliceSentinel This saves 4 whole bytes in the common case where there is no sentinel. --- lib/std/zig/ast.zig | 41 ++++++++++++++++++++++++++++++------- lib/std/zig/parse.zig | 46 ++++++++++++++++++++++++++---------------- lib/std/zig/render.zig | 1 + 3 files changed, 64 insertions(+), 24 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index bebacd0b2d..815d783234 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -304,6 +304,7 @@ pub const Tree = struct { .BoolOr, .SliceOpen, .Slice, + .SliceSentinel, .Deref, .ArrayAccess, .ArrayInitOne, @@ -694,19 +695,22 @@ pub const Tree = struct { return main_tokens[n] + end_offset; } }, + .SliceOpen => { end_offset += 2; // ellipsis2 and rbracket n = datas[n].rhs; }, .Slice => { const extra = tree.extraData(datas[n].rhs, Node.Slice); - if (extra.sentinel != 0) { - n = extra.sentinel; - } else { - assert(extra.end != 0); // should have used SliceOpen if end and sentinel are 0 - n = extra.end; - } + assert(extra.end != 0); // should have used SliceOpen end_offset += 1; // rbracket + n = extra.end; + }, + .SliceSentinel => { + const extra = tree.extraData(datas[n].rhs, Node.SliceSentinel); + assert(extra.sentinel != 0); // should have used Slice + end_offset += 1; // rbracket + n = extra.sentinel; }, // These are not supported by lastToken() because implementation would @@ -1129,6 +1133,21 @@ pub const Tree = struct { assert(tree.nodes.items(.tag)[node] == .Slice); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.rhs, Node.Slice); + return .{ + .ast = .{ + .sliced = data.lhs, + .lbracket = tree.nodes.items(.main_token)[node], + .start = extra.start, + .end = extra.end, + .sentinel = 0, + }, + }; + } + + pub fn sliceSentinel(tree: Tree, node: Node.Index) Full.Slice { + assert(tree.nodes.items(.tag)[node] == .SliceSentinel); + const data = tree.nodes.items(.data)[node]; + const extra = tree.extraData(data.rhs, Node.SliceSentinel); return .{ .ast = .{ .sliced = data.lhs, @@ -1922,9 +1941,12 @@ pub const Node = struct { /// `lhs[rhs..]` /// main_token is the lbracket. SliceOpen, - /// `lhs[b..c :d]`. rhs is index into Slice + /// `lhs[b..c]`. rhs is index into Slice /// main_token is the lbracket. Slice, + /// `lhs[b..c :d]`. rhs is index into SliceSentinel + /// main_token is the lbracket. + SliceSentinel, /// `lhs.*`. rhs is unused. Deref, /// `lhs[rhs]`. @@ -2202,6 +2224,11 @@ pub const Node = struct { pub const Slice = struct { start: Index, end: Index, + }; + + pub const SliceSentinel = struct { + start: Index, + end: Index, sentinel: Index, }; diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index fc4847249a..31182c4952 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -3326,23 +3326,35 @@ const Parser = struct { }, }); } - const sentinel: Node.Index = if (p.eatToken(.Colon)) |_| - try p.parseExpr() - else - 0; - _ = try p.expectToken(.RBracket); - return p.addNode(.{ - .tag = .Slice, - .main_token = lbracket, - .data = .{ - .lhs = lhs, - .rhs = try p.addExtra(.{ - .start = index_expr, - .end = end_expr, - .sentinel = sentinel, - }), - }, - }); + if (p.eatToken(.Colon)) |_| { + const sentinel = try p.parseExpr(); + _ = try p.expectToken(.RBracket); + return p.addNode(.{ + .tag = .SliceSentinel, + .main_token = lbracket, + .data = .{ + .lhs = lhs, + .rhs = try p.addExtra(Node.SliceSentinel{ + .start = index_expr, + .end = end_expr, + .sentinel = sentinel, + }), + }, + }); + } else { + _ = try p.expectToken(.RBracket); + return p.addNode(.{ + .tag = .Slice, + .main_token = lbracket, + .data = .{ + .lhs = lhs, + .rhs = try p.addExtra(Node.Slice{ + .start = index_expr, + .end = end_expr, + }), + }, + }); + } } _ = try p.expectToken(.RBracket); return p.addNode(.{ diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index cbaf462166..1c18d15219 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -470,6 +470,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .SliceOpen => try renderSlice(ais, tree, tree.sliceOpen(node), space), .Slice => try renderSlice(ais, tree, tree.slice(node), space), + .SliceSentinel => try renderSlice(ais, tree, tree.sliceSentinel(node), space), .Deref => { try renderExpression(ais, tree, datas[node].lhs, .None); From 57cec38e6144754fcd15266100974a7cf0059570 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Sun, 7 Feb 2021 23:14:33 +0100 Subject: [PATCH 027/173] std/zig/ast: fix Tree.lastToken() for blocks The fact that blocks may end in a semicolon but this semicolon is not counted by recursive lastToken() evaluation on the sub expression causes off-by-one errors for lastToken() on blocks currently. To fix this, introduce BlockSemicolon and BlockTwoSemicolon following the pattern used for trailing commas in e.g. builtin function arguments. --- lib/std/zig/ast.zig | 18 ++++++++++++------ lib/std/zig/parse.zig | 9 ++++++--- lib/std/zig/parser_test.zig | 18 ++++++++++++++++++ lib/std/zig/render.zig | 8 ++++++-- 4 files changed, 42 insertions(+), 11 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 815d783234..d5857c9d53 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -357,7 +357,9 @@ pub const Tree = struct { }, .Block, + .BlockSemicolon, .BlockTwo, + .BlockTwoSemicolon, => { // Look for a label. const lbrace = main_tokens[n]; @@ -552,18 +554,17 @@ pub const Tree = struct { .TaggedUnion, .BuiltinCall, => { + assert(datas[n].rhs - datas[n].lhs > 0); end_offset += 1; // for the rbrace - if (datas[n].rhs - datas[n].lhs == 0) { - return main_tokens[n] + end_offset; - } n = tree.extra_data[datas[n].rhs - 1]; // last statement }, + .BlockSemicolon, .ContainerDeclComma, .TaggedUnionComma, .BuiltinCallComma, => { assert(datas[n].rhs - datas[n].lhs > 0); - end_offset += 2; // for the comma + rbrace/rparen + end_offset += 2; // for the comma/semicolon + rbrace/rparen n = tree.extra_data[datas[n].rhs - 1]; // last member }, .CallOne, @@ -594,11 +595,12 @@ pub const Tree = struct { }, .ArrayInitDotTwoComma, .BuiltinCallTwoComma, + .BlockTwoSemicolon, .StructInitDotTwoComma, .ContainerDeclTwoComma, .TaggedUnionTwoComma, => { - end_offset += 2; // for the comma + rbrace/rparen + end_offset += 2; // for the comma/semicolon + rbrace/rparen if (datas[n].rhs != 0) { n = datas[n].rhs; } else if (datas[n].lhs != 0) { @@ -2137,12 +2139,16 @@ pub const Node = struct { Comptime, /// `nosuspend lhs`. rhs unused. Nosuspend, - /// `{lhs; rhs;}`. rhs or lhs can be omitted. + /// `{lhs rhs}`. rhs or lhs can be omitted. /// main_token points at the lbrace. BlockTwo, + /// Same as BlockTwo but there is known to be a semicolon before the rbrace. + BlockTwoSemicolon, /// `{}`. `sub_list[lhs..rhs]`. /// main_token points at the lbrace. Block, + /// Same as BlockTwo but there is known to be a semicolon before the rbrace. + BlockSemicolon, /// `asm(lhs)`. rhs unused. AsmSimple, /// `asm(lhs, a)`. `sub_range_list[rhs]`. diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 31182c4952..96d9a0dea6 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -1984,8 +1984,9 @@ const Parser = struct { const stmt_one = try p.expectStatementRecoverable(); if (p.eatToken(.RBrace)) |_| { + const semicolon = p.token_tags[p.tok_i - 2] == .Semicolon; return p.addNode(.{ - .tag = .BlockTwo, + .tag = if (semicolon) .BlockTwoSemicolon else .BlockTwo, .main_token = lbrace, .data = .{ .lhs = stmt_one, @@ -1995,8 +1996,9 @@ const Parser = struct { } const stmt_two = try p.expectStatementRecoverable(); if (p.eatToken(.RBrace)) |_| { + const semicolon = p.token_tags[p.tok_i - 2] == .Semicolon; return p.addNode(.{ - .tag = .BlockTwo, + .tag = if (semicolon) .BlockTwoSemicolon else .BlockTwo, .main_token = lbrace, .data = .{ .lhs = stmt_one, @@ -2017,9 +2019,10 @@ const Parser = struct { if (p.token_tags[p.tok_i] == .RBrace) break; } _ = try p.expectToken(.RBrace); + const semicolon = p.token_tags[p.tok_i - 2] == .Semicolon; const statements_span = try p.listToSpan(statements.items); return p.addNode(.{ - .tag = .Block, + .tag = if (semicolon) .BlockSemicolon else .Block, .main_token = lbrace, .data = .{ .lhs = statements_span.start, diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index dec7b09127..5d95a2b12f 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -655,6 +655,24 @@ test "zig fmt: slices with spaces in bounds" { ); } +test "zig fmt: block in slice expression" { + try testCanonical( + \\const a = b[{ + \\ _ = x; + \\}..]; + \\const c = d[0..{ + \\ _ = x; + \\ _ = y; + \\}]; + \\const e = f[0..1 :{ + \\ _ = x; + \\ _ = y; + \\ _ = z; + \\}]; + \\ + ); +} + //test "zig fmt: async function" { // try testCanonical( // \\pub const Server = struct { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 1c18d15219..ce3ce7055a 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -202,7 +202,9 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } // return renderToken(ais, tree, any_type.token, space); //}, - .BlockTwo => { + .BlockTwo, + .BlockTwoSemicolon, + => { const statements = [2]ast.Node.Index{ datas[node].lhs, datas[node].rhs }; if (datas[node].lhs == 0) { return renderBlock(ais, tree, main_tokens[node], statements[0..0], space); @@ -212,7 +214,9 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderBlock(ais, tree, main_tokens[node], statements[0..2], space); } }, - .Block => { + .Block, + .BlockSemicolon, + => { const lbrace = main_tokens[node]; const statements = tree.extra_data[datas[node].lhs..datas[node].rhs]; return renderBlock(ais, tree, main_tokens[node], statements, space); From aaf13a2bb369aa5b35d4b5ee36ddd8c6029f7473 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Mon, 8 Feb 2021 00:24:58 +0100 Subject: [PATCH 028/173] zig fmt: implement break --- lib/std/zig/ast.zig | 14 +++++++++++-- lib/std/zig/parser_test.zig | 18 ++++++++++++++++ lib/std/zig/render.zig | 42 ++++++++++++++++++------------------- 3 files changed, 50 insertions(+), 24 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index d5857c9d53..2d27de575a 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -438,7 +438,6 @@ pub const Tree = struct { .OptionalType, .Suspend, .Resume, - .Break, .Nosuspend, .Comptime, => n = datas[n].lhs, @@ -715,6 +714,16 @@ pub const Tree = struct { n = extra.sentinel; }, + .Break => { + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + return datas[n].lhs + end_offset; + } else { + return main_tokens[n] + end_offset; + } + }, + // These are not supported by lastToken() because implementation would // require recursion due to the optional comma followed by rbrace. // TODO follow the pattern set by StructInitDotTwoComma which will allow @@ -2023,7 +2032,8 @@ pub const Node = struct { Resume, /// `continue`. lhs is token index of label if any. rhs is unused. Continue, - /// `break rhs`. rhs can be omitted. lhs is label token index, if any. + /// `break :lhs rhs` + /// both lhs and rhs may be omitted. Break, /// `return lhs`. lhs can be omitted. rhs is unused. Return, diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 5d95a2b12f..b38e4c6ea9 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -273,6 +273,24 @@ test "zig fmt: comptime struct field" { ); } +test "zig fmt: break from block" { + try testCanonical( + \\const a = blk: { + \\ break :blk 42; + \\}; + \\const b = blk: { + \\ break :blk; + \\}; + \\const c = { + \\ break 42; + \\}; + \\const d = { + \\ break; + \\}; + \\ + ); +} + //test "zig fmt: c pointer type" { // try testCanonical( // \\pub extern fn repro() [*c]const u8; diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index ce3ce7055a..7daf1b7a62 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -487,28 +487,26 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderToken(ais, tree, datas[node].rhs, space); }, - .Break => unreachable, // TODO - //.Break => { - // const flow_expr = base.castTag(.Break).?; - // const maybe_rhs = flow_expr.getRHS(); - // const maybe_label = flow_expr.getLabel(); - - // if (maybe_label == null and maybe_rhs == null) { - // return renderToken(ais, tree, flow_expr.ltoken, space); // break - // } - - // try renderToken(ais, tree, flow_expr.ltoken, Space.Space); // break - // if (maybe_label) |label| { - // const colon = tree.nextToken(flow_expr.ltoken); - // try renderToken(ais, tree, colon, Space.None); // : - - // if (maybe_rhs == null) { - // return renderToken(ais, tree, label, space); // label - // } - // try renderToken(ais, tree, label, Space.Space); // label - // } - // return renderExpression(ais, tree, maybe_rhs.?, space); - //}, + .Break => { + const main_token = main_tokens[node]; + const label_token = datas[node].lhs; + const target = datas[node].rhs; + if (label_token == 0 and target == 0) { + try renderToken(ais, tree, main_token, space); // break keyword + } else if (label_token == 0 and target != 0) { + try renderToken(ais, tree, main_token, .Space); // break keyword + try renderExpression(ais, tree, target, space); + } else if (label_token != 0 and target == 0) { + try renderToken(ais, tree, main_token, .Space); // break keyword + try renderToken(ais, tree, label_token - 1, .None); // colon + try renderToken(ais, tree, label_token, space); // identifier + } else if (label_token != 0 and target != 0) { + try renderToken(ais, tree, main_token, .Space); // break keyword + try renderToken(ais, tree, label_token - 1, .None); // colon + try renderToken(ais, tree, label_token, .Space); // identifier + try renderExpression(ais, tree, target, space); + } + }, .Continue => unreachable, // TODO //.Continue => { From 837cd748a8607a6ac9f1a5bfc6d625512bacb906 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Mon, 8 Feb 2021 13:42:02 +0100 Subject: [PATCH 029/173] zig fmt: fix block lbrace indent --- lib/std/zig/parser_test.zig | 44 ++++++++++++++++++------------------- lib/std/zig/render.zig | 2 +- 2 files changed, 23 insertions(+), 23 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index b38e4c6ea9..7338122afc 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -2703,28 +2703,28 @@ test "zig fmt: catch" { ); } -//test "zig fmt: blocks" { -// try testCanonical( -// \\test "blocks" { -// \\ { -// \\ const a = 0; -// \\ const b = 0; -// \\ } -// \\ -// \\ blk: { -// \\ const a = 0; -// \\ const b = 0; -// \\ } -// \\ -// \\ const r = blk: { -// \\ const a = 0; -// \\ const b = 0; -// \\ }; -// \\} -// \\ -// ); -//} -// +test "zig fmt: blocks" { + try testCanonical( + \\test "blocks" { + \\ { + \\ const a = 0; + \\ const b = 0; + \\ } + \\ + \\ blk: { + \\ const a = 0; + \\ const b = 0; + \\ } + \\ + \\ const r = blk: { + \\ const a = 0; + \\ const b = 0; + \\ }; + \\} + \\ + ); +} + //test "zig fmt: switch" { // try testCanonical( // \\test "switch" { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 7daf1b7a62..f67c95d355 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -1768,8 +1768,8 @@ fn renderBlock( return renderToken(ais, tree, lbrace + 1, space); // rbrace } - ais.pushIndent(); try renderToken(ais, tree, lbrace, .Newline); + ais.pushIndent(); for (statements) |stmt, i| { switch (node_tags[stmt]) { .GlobalVarDecl => try renderVarDecl(ais, tree, tree.globalVarDecl(stmt)), From d869133a9fb96fb27a07c1e42c12c5270eae5940 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Mon, 8 Feb 2021 13:38:24 +0100 Subject: [PATCH 030/173] zig fmt: implement switches --- lib/std/zig/ast.zig | 101 ++++++++++++-- lib/std/zig/parse.zig | 5 +- lib/std/zig/parser_test.zig | 144 ++++++++++---------- lib/std/zig/render.zig | 257 ++++++++++++------------------------ 4 files changed, 247 insertions(+), 260 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 2d27de575a..9b2d8f9d54 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -213,6 +213,7 @@ pub const Tree = struct { .Await, .OptionalType, .Switch, + .SwitchComma, .IfSimple, .If, .Suspend, @@ -313,7 +314,6 @@ pub const Tree = struct { .StructInit, .CallOne, .Call, - .SwitchCaseOne, .SwitchRange, .FnDecl, .ErrorUnion, @@ -406,7 +406,19 @@ pub const Tree = struct { }; }, - .SwitchCaseMulti => unreachable, // TODO + .SwitchCaseOne => { + if (datas[n].lhs == 0) { + return main_tokens[n] - 1; // else token + } else { + n = datas[n].lhs; + } + }, + .SwitchCase => { + const extra = tree.extraData(datas[n].lhs, Node.SubRange); + assert(extra.end - extra.start > 0); + n = extra.start; + }, + .WhileSimple => unreachable, // TODO .WhileCont => unreachable, // TODO .While => unreachable, // TODO @@ -494,6 +506,8 @@ pub const Tree = struct { .PtrTypeSentinel, .PtrType, .PtrTypeBitRange, + .SwitchCaseOne, + .SwitchCase, => n = datas[n].rhs, .FieldAccess, @@ -532,6 +546,16 @@ pub const Tree = struct { } n = tree.extra_data[params.end - 1]; // last parameter }, + .Switch => { + const cases = tree.extraData(datas[n].rhs, Node.SubRange); + if (cases.end - cases.start == 0) { + end_offset += 3; // rparen, lbrace, rbrace + n = datas[n].lhs; // condition expression + } else { + end_offset += 1; // for the rbrace + n = tree.extra_data[cases.end - 1]; // last case + } + }, .ContainerDeclArg => { const members = tree.extraData(datas[n].rhs, Node.SubRange); if (members.end - members.start == 0) { @@ -542,7 +566,9 @@ pub const Tree = struct { n = tree.extra_data[members.end - 1]; // last parameter } }, - .ContainerDeclArgComma => { + .ContainerDeclArgComma, + .SwitchComma, + => { const members = tree.extraData(datas[n].rhs, Node.SubRange); assert(members.end - members.start > 0); end_offset += 2; // for the comma + rbrace @@ -737,16 +763,13 @@ pub const Tree = struct { .TaggedUnionEnumTag => unreachable, // TODO .TaggedUnionEnumTagComma => unreachable, // TODO - .Switch => unreachable, // TODO .If => unreachable, // TODO .Continue => unreachable, // TODO .AsmSimple => unreachable, // TODO .Asm => unreachable, // TODO - .SwitchCaseOne => unreachable, // TODO .SwitchRange => unreachable, // TODO .ArrayType => unreachable, // TODO .ArrayTypeSentinel => unreachable, // TODO - .SwitchCaseMulti => unreachable, // TODO .WhileCont => unreachable, // TODO .While => unreachable, // TODO .ForSimple => unreachable, // TODO @@ -1202,7 +1225,8 @@ pub const Tree = struct { } pub fn containerDeclArg(tree: Tree, node: Node.Index) Full.ContainerDecl { - assert(tree.nodes.items(.tag)[node] == .ContainerDeclArg); + assert(tree.nodes.items(.tag)[node] == .ContainerDeclArg or + tree.nodes.items(.tag)[node] == .ContainerDeclArgComma); const data = tree.nodes.items(.data)[node]; const members_range = tree.extraData(data.rhs, Node.SubRange); return tree.fullContainerDecl(.{ @@ -1214,7 +1238,8 @@ pub const Tree = struct { } pub fn taggedUnionTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) Full.ContainerDecl { - assert(tree.nodes.items(.tag)[node] == .TaggedUnionTwo); + assert(tree.nodes.items(.tag)[node] == .TaggedUnionTwo or + tree.nodes.items(.tag)[node] == .TaggedUnionTwoComma); const data = tree.nodes.items(.data)[node]; buffer.* = .{ data.lhs, data.rhs }; const members = if (data.rhs != 0) @@ -1233,7 +1258,8 @@ pub const Tree = struct { } pub fn taggedUnion(tree: Tree, node: Node.Index) Full.ContainerDecl { - assert(tree.nodes.items(.tag)[node] == .TaggedUnion); + assert(tree.nodes.items(.tag)[node] == .TaggedUnion or + tree.nodes.items(.tag)[node] == .TaggedUnionComma); const data = tree.nodes.items(.data)[node]; const main_token = tree.nodes.items(.main_token)[node]; return tree.fullContainerDecl(.{ @@ -1245,7 +1271,8 @@ pub const Tree = struct { } pub fn taggedUnionEnumTag(tree: Tree, node: Node.Index) Full.ContainerDecl { - assert(tree.nodes.items(.tag)[node] == .TaggedUnionEnumTag); + assert(tree.nodes.items(.tag)[node] == .TaggedUnionEnumTag or + tree.nodes.items(.tag)[node] == .TaggedUnionEnumTagComma); const data = tree.nodes.items(.data)[node]; const members_range = tree.extraData(data.rhs, Node.SubRange); const main_token = tree.nodes.items(.main_token)[node]; @@ -1257,6 +1284,25 @@ pub const Tree = struct { }); } + pub fn switchCaseOne(tree: Tree, node: Node.Index) Full.SwitchCase { + const data = &tree.nodes.items(.data)[node]; + return tree.fullSwitchCase(.{ + .values = if (data.lhs == 0) &.{} else @ptrCast([*]Node.Index, &data.lhs)[0..1], + .arrow_token = tree.nodes.items(.main_token)[node], + .target_expr = data.rhs, + }); + } + + pub fn switchCase(tree: Tree, node: Node.Index) Full.SwitchCase { + const data = tree.nodes.items(.data)[node]; + const extra = tree.extraData(data.lhs, Node.SubRange); + return tree.fullSwitchCase(.{ + .values = tree.extra_data[extra.start..extra.end], + .arrow_token = tree.nodes.items(.main_token)[node], + .target_expr = data.rhs, + }); + } + fn fullVarDecl(tree: Tree, info: Full.VarDecl.Ast) Full.VarDecl { const token_tags = tree.tokens.items(.tag); var result: Full.VarDecl = .{ @@ -1407,6 +1453,18 @@ pub const Tree = struct { } return result; } + + fn fullSwitchCase(tree: Tree, info: Full.SwitchCase.Ast) Full.SwitchCase { + const token_tags = tree.tokens.items(.tag); + var result: Full.SwitchCase = .{ + .ast = info, + .payload_token = null, + }; + if (token_tags[info.arrow_token + 1] == .Pipe) { + result.payload_token = info.arrow_token + 2; + } + return result; + } }; /// Fully assembled AST node information. @@ -1552,6 +1610,20 @@ pub const Full = struct { arg: Node.Index, }; }; + + pub const SwitchCase = struct { + /// Points to the first token after the `|`. Will either be an identifier or + /// a `*` (with an identifier immediately after it). + payload_token: ?TokenIndex, + ast: Ast, + + pub const Ast = struct { + /// If empty, this is an else case + values: []const Node.Index, + arrow_token: TokenIndex, + target_expr: Node.Index, + }; + }; }; pub const Error = union(enum) { @@ -1996,13 +2068,16 @@ pub const Node = struct { /// `lhs(a, b, c)`. `sub_range_list[rhs]`. /// main_token is the `(`. Call, - /// `switch(lhs) {}`. `sub_range_list[rhs]`. + /// `switch(lhs) {}`. `SubRange[rhs]`. Switch, + /// Same as Switch except there is known to be a trailing comma + /// before the final rbrace + SwitchComma, /// `lhs => rhs`. If lhs is omitted it means `else`. /// main_token is the `=>` SwitchCaseOne, - /// `a, b, c => rhs`. `sub_range_list[lhs]`. - SwitchCaseMulti, + /// `a, b, c => rhs`. `SubRange[lhs]`. + SwitchCase, /// `lhs...rhs`. SwitchRange, /// `while (lhs) rhs`. diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 96d9a0dea6..aa70634c47 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -2887,10 +2887,11 @@ const Parser = struct { _ = try p.expectToken(.RParen); _ = try p.expectToken(.LBrace); const cases = try p.parseSwitchProngList(); + const trailing_comma = p.token_tags[p.tok_i - 1] == .Comma; _ = try p.expectToken(.RBrace); return p.addNode(.{ - .tag = .Switch, + .tag = if (trailing_comma) .SwitchComma else .Switch, .main_token = switch_token, .data = .{ .lhs = expr_node, @@ -3208,7 +3209,7 @@ const Parser = struct { const arrow_token = try p.expectToken(.EqualAngleBracketRight); _ = try p.parsePtrPayload(); return p.addNode(.{ - .tag = .SwitchCaseMulti, + .tag = .SwitchCase, .main_token = arrow_token, .data = .{ .lhs = try p.addExtra(Node.SubRange{ diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 7338122afc..983190b152 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -1671,32 +1671,32 @@ test "zig fmt: block in slice expression" { // \\ // ); //} -// -//test "zig fmt: switch cases trailing comma" { -// try testTransform( -// \\fn switch_cases(x: i32) void { -// \\ switch (x) { -// \\ 1,2,3 => {}, -// \\ 4,5, => {}, -// \\ 6... 8, => {}, -// \\ else => {}, -// \\ } -// \\} -// , -// \\fn switch_cases(x: i32) void { -// \\ switch (x) { -// \\ 1, 2, 3 => {}, -// \\ 4, -// \\ 5, -// \\ => {}, -// \\ 6...8 => {}, -// \\ else => {}, -// \\ } -// \\} -// \\ -// ); -//} -// + +test "zig fmt: switch cases trailing comma" { + try testTransform( + \\test "switch cases trailing comma"{ + \\ switch (x) { + \\ 1,2,3 => {}, + \\ 4,5, => {}, + \\ 6... 8, => {}, + \\ else => {}, + \\ } + \\} + , + \\test "switch cases trailing comma" { + \\ switch (x) { + \\ 1, 2, 3 => {}, + \\ 4, + \\ 5, + \\ => {}, + \\ 6...8 => {}, + \\ else => {}, + \\ } + \\} + \\ + ); +} + //test "zig fmt: slice align" { // try testCanonical( // \\const A = struct { @@ -1996,16 +1996,16 @@ test "zig fmt: ptr deref operator and unwrap optional operator" { // \\ // ); //} -// -//test "zig fmt: switch with empty body" { -// try testCanonical( -// \\test "" { -// \\ foo() catch |err| switch (err) {}; -// \\} -// \\ -// ); -//} -// + +test "zig fmt: switch with empty body" { + try testCanonical( + \\test "" { + \\ foo() catch |err| switch (err) {}; + \\} + \\ + ); +} + //test "zig fmt: line comments in struct initializer" { // try testCanonical( // \\fn foo() void { @@ -2725,42 +2725,42 @@ test "zig fmt: blocks" { ); } -//test "zig fmt: switch" { -// try testCanonical( -// \\test "switch" { -// \\ switch (0) { -// \\ 0 => {}, -// \\ 1 => unreachable, -// \\ 2, 3 => {}, -// \\ 4...7 => {}, -// \\ 1 + 4 * 3 + 22 => {}, -// \\ else => { -// \\ const a = 1; -// \\ const b = a; -// \\ }, -// \\ } -// \\ -// \\ const res = switch (0) { -// \\ 0 => 0, -// \\ 1 => 2, -// \\ 1 => a = 4, -// \\ else => 4, -// \\ }; -// \\ -// \\ const Union = union(enum) { -// \\ Int: i64, -// \\ Float: f64, -// \\ }; -// \\ -// \\ switch (u) { -// \\ Union.Int => |int| {}, -// \\ Union.Float => |*float| unreachable, -// \\ } -// \\} -// \\ -// ); -//} -// +test "zig fmt: switch" { + try testCanonical( + \\test "switch" { + \\ switch (0) { + \\ 0 => {}, + \\ 1 => unreachable, + \\ 2, 3 => {}, + \\ 4...7 => {}, + \\ 1 + 4 * 3 + 22 => {}, + \\ else => { + \\ const a = 1; + \\ const b = a; + \\ }, + \\ } + \\ + \\ const res = switch (0) { + \\ 0 => 0, + \\ 1 => 2, + \\ 1 => a = 4, + \\ else => 4, + \\ }; + \\ + \\ const Union = union(enum) { + \\ Int: i64, + \\ Float: f64, + \\ }; + \\ + \\ switch (u) { + \\ Union.Int => |int| {}, + \\ Union.Float => |*float| unreachable, + \\ } + \\} + \\ + ); +} + //test "zig fmt: while" { // try testCanonical( // \\test "while" { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index f67c95d355..bd9b73e55d 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -207,19 +207,18 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac => { const statements = [2]ast.Node.Index{ datas[node].lhs, datas[node].rhs }; if (datas[node].lhs == 0) { - return renderBlock(ais, tree, main_tokens[node], statements[0..0], space); + return renderBlock(ais, tree, node, statements[0..0], space); } else if (datas[node].rhs == 0) { - return renderBlock(ais, tree, main_tokens[node], statements[0..1], space); + return renderBlock(ais, tree, node, statements[0..1], space); } else { - return renderBlock(ais, tree, main_tokens[node], statements[0..2], space); + return renderBlock(ais, tree, node, statements[0..2], space); } }, .Block, .BlockSemicolon, => { - const lbrace = main_tokens[node]; const statements = tree.extra_data[datas[node].lhs..datas[node].rhs]; - return renderBlock(ais, tree, main_tokens[node], statements, space); + return renderBlock(ais, tree, node, statements, space); }, .ErrDefer => { @@ -615,81 +614,6 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac try renderToken(ais, tree, rbrace, space); } }, - //.ErrorSetDecl => { - // const err_set_decl = @fieldParentPtr(ast.Node.ErrorSetDecl, "base", base); - - // const lbrace = tree.nextToken(err_set_decl.error_token); - - // if (err_set_decl.decls_len == 0) { - // try renderToken(ais, tree, err_set_decl.error_token, Space.None); - // try renderToken(ais, tree, lbrace, Space.None); - // return renderToken(ais, tree, err_set_decl.rbrace_token, space); - // } - - // if (err_set_decl.decls_len == 1) blk: { - // const node = err_set_decl.decls()[0]; - - // // if there are any doc comments or same line comments - // // don't try to put it all on one line - // if (node.cast(ast.Node.ErrorTag)) |tag| { - // if (tag.doc_comments != null) break :blk; - // } else { - // break :blk; - // } - - // try renderToken(ais, tree, err_set_decl.error_token, Space.None); // error - // try renderToken(ais, tree, lbrace, Space.None); // lbrace - // try renderExpression(ais, tree, node, Space.None); - // return renderToken(ais, tree, err_set_decl.rbrace_token, space); // rbrace - // } - - // try renderToken(ais, tree, err_set_decl.error_token, Space.None); // error - - // const src_has_trailing_comma = blk: { - // const maybe_comma = tree.prevToken(err_set_decl.rbrace_token); - // break :blk tree.token_tags[maybe_comma] == .Comma; - // }; - - // if (src_has_trailing_comma) { - // { - // ais.pushIndent(); - // defer ais.popIndent(); - - // try renderToken(ais, tree, lbrace, Space.Newline); // lbrace - // const decls = err_set_decl.decls(); - // for (decls) |node, i| { - // if (i + 1 < decls.len) { - // try renderExpression(ais, tree, node, Space.None); - // try renderToken(ais, tree, tree.nextToken(node.lastToken()), Space.Newline); // , - - // try renderExtraNewline(ais, tree, decls[i + 1]); - // } else { - // try renderExpression(ais, tree, node, Space.Comma); - // } - // } - // } - - // return renderToken(ais, tree, err_set_decl.rbrace_token, space); // rbrace - // } else { - // try renderToken(ais, tree, lbrace, Space.Space); // lbrace - - // const decls = err_set_decl.decls(); - // for (decls) |node, i| { - // if (i + 1 < decls.len) { - // try renderExpression(ais, tree, node, Space.None); - - // const comma_token = tree.nextToken(node.lastToken()); - // assert(tree.token_tags[comma_token] == .Comma); - // try renderToken(ais, tree, comma_token, Space.Space); // , - // try renderExtraNewline(ais, tree, decls[i + 1]); - // } else { - // try renderExpression(ais, tree, node, Space.Space); - // } - // } - - // return renderToken(ais, tree, err_set_decl.rbrace_token, space); // rbrace - // } - //}, .BuiltinCallTwo, .BuiltinCallTwoComma => { if (datas[node].lhs == 0) { @@ -732,92 +656,38 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // } //}, - .Switch => unreachable, // TODO - //.Switch => { - // const switch_node = @fieldParentPtr(ast.Node.Switch, "base", base); + .Switch, + .SwitchComma, + => { + const switch_token = main_tokens[node]; + const condition = datas[node].lhs; + const extra = tree.extraData(datas[node].rhs, ast.Node.SubRange); + const cases = tree.extra_data[extra.start..extra.end]; + const rparen = tree.lastToken(condition) + 1; - // try renderToken(ais, tree, switch_node.switch_token, Space.Space); // switch - // try renderToken(ais, tree, tree.nextToken(switch_node.switch_token), Space.None); // ( + try renderToken(ais, tree, switch_token, .Space); // switch keyword + try renderToken(ais, tree, switch_token + 1, .None); // lparen + try renderExpression(ais, tree, condition, .None); // condtion expression + try renderToken(ais, tree, rparen, .Space); // rparen - // const rparen = tree.nextToken(switch_node.expr.lastToken()); - // const lbrace = tree.nextToken(rparen); + if (cases.len == 0) { + try renderToken(ais, tree, rparen + 1, .None); // lbrace + try renderToken(ais, tree, rparen + 2, space); // rbrace + } else { + try renderToken(ais, tree, rparen + 1, .Newline); // lbrace + ais.pushIndent(); + try renderExpression(ais, tree, cases[0], .Comma); + for (cases[1..]) |case| { + try renderExtraNewline(ais, tree, case); + try renderExpression(ais, tree, case, .Comma); + } + ais.popIndent(); + try renderToken(ais, tree, tree.lastToken(node), space); // rbrace + } + }, - // if (switch_node.cases_len == 0) { - // try renderExpression(ais, tree, switch_node.expr, Space.None); - // try renderToken(ais, tree, rparen, Space.Space); // ) - // try renderToken(ais, tree, lbrace, Space.None); // lbrace - // return renderToken(ais, tree, switch_node.rbrace, space); // rbrace - // } - - // try renderExpression(ais, tree, switch_node.expr, Space.None); - // try renderToken(ais, tree, rparen, Space.Space); // ) - - // { - // ais.pushIndentNextLine(); - // defer ais.popIndent(); - // try renderToken(ais, tree, lbrace, Space.Newline); // lbrace - - // const cases = switch_node.cases(); - // for (cases) |node, i| { - // try renderExpression(ais, tree, node, Space.Comma); - - // if (i + 1 < cases.len) { - // try renderExtraNewline(ais, tree, cases[i + 1]); - // } - // } - // } - - // return renderToken(ais, tree, switch_node.rbrace, space); // rbrace - //}, - - .SwitchCaseOne => unreachable, // TODO - .SwitchCaseMulti => unreachable, // TODO - //.SwitchCase => { - // const switch_case = @fieldParentPtr(ast.Node.SwitchCase, "base", base); - - // assert(switch_case.items_len != 0); - // const src_has_trailing_comma = blk: { - // const last_node = switch_case.items()[switch_case.items_len - 1]; - // const maybe_comma = tree.nextToken(last_node.lastToken()); - // break :blk tree.token_tags[maybe_comma] == .Comma; - // }; - - // if (switch_case.items_len == 1 or !src_has_trailing_comma) { - // const items = switch_case.items(); - // for (items) |node, i| { - // if (i + 1 < items.len) { - // try renderExpression(ais, tree, node, Space.None); - - // const comma_token = tree.nextToken(node.lastToken()); - // try renderToken(ais, tree, comma_token, Space.Space); // , - // try renderExtraNewline(ais, tree, items[i + 1]); - // } else { - // try renderExpression(ais, tree, node, Space.Space); - // } - // } - // } else { - // const items = switch_case.items(); - // for (items) |node, i| { - // if (i + 1 < items.len) { - // try renderExpression(ais, tree, node, Space.None); - - // const comma_token = tree.nextToken(node.lastToken()); - // try renderToken(ais, tree, comma_token, Space.Newline); // , - // try renderExtraNewline(ais, tree, items[i + 1]); - // } else { - // try renderExpression(ais, tree, node, Space.Comma); - // } - // } - // } - - // try renderToken(ais, tree, switch_case.arrow_token, Space.Space); // => - - // if (switch_case.payload) |payload| { - // try renderExpression(ais, tree, payload, Space.Space); - // } - - // return renderExpression(ais, tree, switch_case.expr, space); - //}, + .SwitchCaseOne => try renderSwitchCase(ais, tree, tree.switchCaseOne(node), space), + .SwitchCase => try renderSwitchCase(ais, tree, tree.switchCase(node), space), .WhileSimple => unreachable, // TODO .WhileCont => unreachable, // TODO @@ -1745,16 +1615,64 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.Full.FnProto, space: S return renderExpression(ais, tree, fn_proto.ast.return_type, space); } +fn renderSwitchCase( + ais: *Ais, + tree: ast.Tree, + switch_case: ast.Full.SwitchCase, + space: Space, +) Error!void { + const token_tags = tree.tokens.items(.tag); + const trailing_comma = token_tags[switch_case.ast.arrow_token - 1] == .Comma; + + // Render everything before the arrow + if (switch_case.ast.values.len == 0) { + try renderToken(ais, tree, switch_case.ast.arrow_token - 1, .Space); // else keyword + } else if (switch_case.ast.values.len == 1) { + // render on one line and drop the trailing comma if any + try renderExpression(ais, tree, switch_case.ast.values[0], .Space); + } else if (trailing_comma) { + // Render each value on a new line + try renderExpression(ais, tree, switch_case.ast.values[0], .Comma); + for (switch_case.ast.values[1..]) |value_expr| { + try renderExtraNewline(ais, tree, value_expr); + try renderExpression(ais, tree, value_expr, .Comma); + } + } else { + // Render on one line + for (switch_case.ast.values) |value_expr| { + try renderExpression(ais, tree, value_expr, .CommaSpace); + } + } + + // Render the arrow and everything after it + try renderToken(ais, tree, switch_case.ast.arrow_token, .Space); + + if (switch_case.payload_token) |payload_token| { + try renderToken(ais, tree, payload_token - 1, .None); // pipe + if (token_tags[payload_token] == .Asterisk) { + try renderToken(ais, tree, payload_token, .None); // asterisk + try renderToken(ais, tree, payload_token + 1, .None); // identifier + try renderToken(ais, tree, payload_token + 2, .Space); // pipe + } else { + try renderToken(ais, tree, payload_token, .None); // identifier + try renderToken(ais, tree, payload_token + 1, .Space); // pipe + } + } + + try renderExpression(ais, tree, switch_case.ast.target_expr, space); +} + fn renderBlock( ais: *Ais, tree: ast.Tree, - lbrace: ast.TokenIndex, + block_node: ast.Node.Index, statements: []const ast.Node.Index, space: Space, ) Error!void { const token_tags = tree.tokens.items(.tag); const node_tags = tree.nodes.items(.tag); const nodes_data = tree.nodes.items(.data); + const lbrace = tree.nodes.items(.main_token)[block_node]; if (token_tags[lbrace - 1] == .Colon and token_tags[lbrace - 2] == .Identifier) @@ -1783,15 +1701,8 @@ fn renderBlock( } } ais.popIndent(); - // The rbrace could be +1 or +2 from the last token of the last - // statement in the block because lastToken() does not count semicolons. - const maybe_rbrace = tree.lastToken(statements[statements.len - 1]) + 1; - if (token_tags[maybe_rbrace] == .RBrace) { - return renderToken(ais, tree, maybe_rbrace, space); - } else { - assert(token_tags[maybe_rbrace + 1] == .RBrace); - return renderToken(ais, tree, maybe_rbrace + 1, space); - } + + try renderToken(ais, tree, tree.lastToken(block_node), space); // rbrace } // TODO: handle comments between fields From b1d8a0a5a6680383bad09b904dba231204a430bd Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Mon, 8 Feb 2021 22:03:23 -0700 Subject: [PATCH 031/173] zig fmt: asm expressions --- lib/std/zig/ast.zig | 240 ++++++++++++++++++-------- lib/std/zig/parse.zig | 46 ++--- lib/std/zig/parser_test.zig | 48 +++--- lib/std/zig/render.zig | 327 ++++++++++++++++++++---------------- 4 files changed, 402 insertions(+), 259 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 9b2d8f9d54..169c6cbb67 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -419,13 +419,16 @@ pub const Tree = struct { n = extra.start; }, + .AsmOutput, .AsmInput => { + assert(token_tags[main_tokens[n] - 1] == .LBracket); + return main_tokens[n] - 1; + }, + .WhileSimple => unreachable, // TODO .WhileCont => unreachable, // TODO .While => unreachable, // TODO .ForSimple => unreachable, // TODO .For => unreachable, // TODO - .AsmOutput => unreachable, // TODO - .AsmInput => unreachable, // TODO .ErrorValue => unreachable, // TODO }; } @@ -515,6 +518,9 @@ pub const Tree = struct { .GroupedExpression, .StringLiteral, .ErrorSetDecl, + .AsmSimple, + .AsmOutput, + .AsmInput, => return datas[n].rhs + end_offset, .AnyType, @@ -566,6 +572,10 @@ pub const Tree = struct { n = tree.extra_data[members.end - 1]; // last parameter } }, + .Asm => { + const extra = tree.extraData(datas[n].rhs, Node.Asm); + return extra.rparen + end_offset; + }, .ContainerDeclArgComma, .SwitchComma, => { @@ -765,8 +775,6 @@ pub const Tree = struct { .TaggedUnionEnumTagComma => unreachable, // TODO .If => unreachable, // TODO .Continue => unreachable, // TODO - .AsmSimple => unreachable, // TODO - .Asm => unreachable, // TODO .SwitchRange => unreachable, // TODO .ArrayType => unreachable, // TODO .ArrayTypeSentinel => unreachable, // TODO @@ -778,8 +786,6 @@ pub const Tree = struct { .FnProtoMulti => unreachable, // TODO .FnProtoOne => unreachable, // TODO .FnProto => unreachable, // TODO - .AsmOutput => unreachable, // TODO - .AsmInput => unreachable, // TODO .ErrorValue => unreachable, // TODO }; } @@ -790,7 +796,7 @@ pub const Tree = struct { return mem.indexOfScalar(u8, source, '\n') == null; } - pub fn globalVarDecl(tree: Tree, node: Node.Index) Full.VarDecl { + pub fn globalVarDecl(tree: Tree, node: Node.Index) full.VarDecl { assert(tree.nodes.items(.tag)[node] == .GlobalVarDecl); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.lhs, Node.GlobalVarDecl); @@ -803,7 +809,7 @@ pub const Tree = struct { }); } - pub fn localVarDecl(tree: Tree, node: Node.Index) Full.VarDecl { + pub fn localVarDecl(tree: Tree, node: Node.Index) full.VarDecl { assert(tree.nodes.items(.tag)[node] == .LocalVarDecl); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.lhs, Node.LocalVarDecl); @@ -816,7 +822,7 @@ pub const Tree = struct { }); } - pub fn simpleVarDecl(tree: Tree, node: Node.Index) Full.VarDecl { + pub fn simpleVarDecl(tree: Tree, node: Node.Index) full.VarDecl { assert(tree.nodes.items(.tag)[node] == .SimpleVarDecl); const data = tree.nodes.items(.data)[node]; return tree.fullVarDecl(.{ @@ -828,7 +834,7 @@ pub const Tree = struct { }); } - pub fn alignedVarDecl(tree: Tree, node: Node.Index) Full.VarDecl { + pub fn alignedVarDecl(tree: Tree, node: Node.Index) full.VarDecl { assert(tree.nodes.items(.tag)[node] == .AlignedVarDecl); const data = tree.nodes.items(.data)[node]; return tree.fullVarDecl(.{ @@ -840,7 +846,7 @@ pub const Tree = struct { }); } - pub fn ifSimple(tree: Tree, node: Node.Index) Full.If { + pub fn ifSimple(tree: Tree, node: Node.Index) full.If { assert(tree.nodes.items(.tag)[node] == .IfSimple); const data = tree.nodes.items(.data)[node]; return tree.fullIf(.{ @@ -851,7 +857,7 @@ pub const Tree = struct { }); } - pub fn ifFull(tree: Tree, node: Node.Index) Full.If { + pub fn ifFull(tree: Tree, node: Node.Index) full.If { assert(tree.nodes.items(.tag)[node] == .If); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.rhs, Node.If); @@ -863,7 +869,7 @@ pub const Tree = struct { }); } - pub fn containerField(tree: Tree, node: Node.Index) Full.ContainerField { + pub fn containerField(tree: Tree, node: Node.Index) full.ContainerField { assert(tree.nodes.items(.tag)[node] == .ContainerField); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.rhs, Node.ContainerField); @@ -875,7 +881,7 @@ pub const Tree = struct { }); } - pub fn containerFieldInit(tree: Tree, node: Node.Index) Full.ContainerField { + pub fn containerFieldInit(tree: Tree, node: Node.Index) full.ContainerField { assert(tree.nodes.items(.tag)[node] == .ContainerFieldInit); const data = tree.nodes.items(.data)[node]; return tree.fullContainerField(.{ @@ -886,7 +892,7 @@ pub const Tree = struct { }); } - pub fn containerFieldAlign(tree: Tree, node: Node.Index) Full.ContainerField { + pub fn containerFieldAlign(tree: Tree, node: Node.Index) full.ContainerField { assert(tree.nodes.items(.tag)[node] == .ContainerFieldAlign); const data = tree.nodes.items(.data)[node]; return tree.fullContainerField(.{ @@ -897,7 +903,7 @@ pub const Tree = struct { }); } - pub fn fnProtoSimple(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) Full.FnProto { + pub fn fnProtoSimple(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.FnProto { assert(tree.nodes.items(.tag)[node] == .FnProtoSimple); const data = tree.nodes.items(.data)[node]; buffer[0] = data.lhs; @@ -912,7 +918,7 @@ pub const Tree = struct { }); } - pub fn fnProtoMulti(tree: Tree, node: Node.Index) Full.FnProto { + pub fn fnProtoMulti(tree: Tree, node: Node.Index) full.FnProto { assert(tree.nodes.items(.tag)[node] == .FnProtoMulti); const data = tree.nodes.items(.data)[node]; const params_range = tree.extraData(data.lhs, Node.SubRange); @@ -927,7 +933,7 @@ pub const Tree = struct { }); } - pub fn fnProtoOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) Full.FnProto { + pub fn fnProtoOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.FnProto { assert(tree.nodes.items(.tag)[node] == .FnProtoOne); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.lhs, Node.FnProtoOne); @@ -943,7 +949,7 @@ pub const Tree = struct { }); } - pub fn fnProto(tree: Tree, node: Node.Index) Full.FnProto { + pub fn fnProto(tree: Tree, node: Node.Index) full.FnProto { assert(tree.nodes.items(.tag)[node] == .FnProto); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.lhs, Node.FnProto); @@ -958,7 +964,7 @@ pub const Tree = struct { }); } - pub fn structInitOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) Full.StructInit { + pub fn structInitOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.StructInit { assert(tree.nodes.items(.tag)[node] == .StructInitOne); const data = tree.nodes.items(.data)[node]; buffer[0] = data.rhs; @@ -970,7 +976,7 @@ pub const Tree = struct { }); } - pub fn structInitDotTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) Full.StructInit { + pub fn structInitDotTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.StructInit { assert(tree.nodes.items(.tag)[node] == .StructInitDotTwo or tree.nodes.items(.tag)[node] == .StructInitDotTwoComma); const data = tree.nodes.items(.data)[node]; @@ -988,7 +994,7 @@ pub const Tree = struct { }); } - pub fn structInitDot(tree: Tree, node: Node.Index) Full.StructInit { + pub fn structInitDot(tree: Tree, node: Node.Index) full.StructInit { assert(tree.nodes.items(.tag)[node] == .StructInitDot); const data = tree.nodes.items(.data)[node]; return tree.fullStructInit(.{ @@ -998,7 +1004,7 @@ pub const Tree = struct { }); } - pub fn structInit(tree: Tree, node: Node.Index) Full.StructInit { + pub fn structInit(tree: Tree, node: Node.Index) full.StructInit { assert(tree.nodes.items(.tag)[node] == .StructInit); const data = tree.nodes.items(.data)[node]; const fields_range = tree.extraData(data.rhs, Node.SubRange); @@ -1009,7 +1015,7 @@ pub const Tree = struct { }); } - pub fn arrayInitOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) Full.ArrayInit { + pub fn arrayInitOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.ArrayInit { assert(tree.nodes.items(.tag)[node] == .ArrayInitOne); const data = tree.nodes.items(.data)[node]; buffer[0] = data.rhs; @@ -1023,7 +1029,7 @@ pub const Tree = struct { }; } - pub fn arrayInitDotTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) Full.ArrayInit { + pub fn arrayInitDotTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ArrayInit { assert(tree.nodes.items(.tag)[node] == .ArrayInitDotTwo or tree.nodes.items(.tag)[node] == .ArrayInitDotTwoComma); const data = tree.nodes.items(.data)[node]; @@ -1043,7 +1049,7 @@ pub const Tree = struct { }; } - pub fn arrayInitDot(tree: Tree, node: Node.Index) Full.ArrayInit { + pub fn arrayInitDot(tree: Tree, node: Node.Index) full.ArrayInit { assert(tree.nodes.items(.tag)[node] == .ArrayInitDot); const data = tree.nodes.items(.data)[node]; return .{ @@ -1055,7 +1061,7 @@ pub const Tree = struct { }; } - pub fn arrayInit(tree: Tree, node: Node.Index) Full.ArrayInit { + pub fn arrayInit(tree: Tree, node: Node.Index) full.ArrayInit { assert(tree.nodes.items(.tag)[node] == .ArrayInit); const data = tree.nodes.items(.data)[node]; const elem_range = tree.extraData(data.rhs, Node.SubRange); @@ -1068,7 +1074,7 @@ pub const Tree = struct { }; } - pub fn arrayType(tree: Tree, node: Node.Index) Full.ArrayType { + pub fn arrayType(tree: Tree, node: Node.Index) full.ArrayType { assert(tree.nodes.items(.tag)[node] == .ArrayType); const data = tree.nodes.items(.data)[node]; return .{ @@ -1081,7 +1087,7 @@ pub const Tree = struct { }; } - pub fn arrayTypeSentinel(tree: Tree, node: Node.Index) Full.ArrayType { + pub fn arrayTypeSentinel(tree: Tree, node: Node.Index) full.ArrayType { assert(tree.nodes.items(.tag)[node] == .ArrayTypeSentinel); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.rhs, Node.ArrayTypeSentinel); @@ -1095,7 +1101,7 @@ pub const Tree = struct { }; } - pub fn ptrTypeAligned(tree: Tree, node: Node.Index) Full.PtrType { + pub fn ptrTypeAligned(tree: Tree, node: Node.Index) full.PtrType { assert(tree.nodes.items(.tag)[node] == .PtrTypeAligned); const data = tree.nodes.items(.data)[node]; return tree.fullPtrType(.{ @@ -1108,7 +1114,7 @@ pub const Tree = struct { }); } - pub fn ptrTypeSentinel(tree: Tree, node: Node.Index) Full.PtrType { + pub fn ptrTypeSentinel(tree: Tree, node: Node.Index) full.PtrType { assert(tree.nodes.items(.tag)[node] == .PtrTypeSentinel); const data = tree.nodes.items(.data)[node]; return tree.fullPtrType(.{ @@ -1121,7 +1127,7 @@ pub const Tree = struct { }); } - pub fn ptrType(tree: Tree, node: Node.Index) Full.PtrType { + pub fn ptrType(tree: Tree, node: Node.Index) full.PtrType { assert(tree.nodes.items(.tag)[node] == .PtrType); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.lhs, Node.PtrType); @@ -1135,7 +1141,7 @@ pub const Tree = struct { }); } - pub fn ptrTypeBitRange(tree: Tree, node: Node.Index) Full.PtrType { + pub fn ptrTypeBitRange(tree: Tree, node: Node.Index) full.PtrType { assert(tree.nodes.items(.tag)[node] == .PtrTypeBitRange); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.lhs, Node.PtrTypeBitRange); @@ -1149,7 +1155,7 @@ pub const Tree = struct { }); } - pub fn sliceOpen(tree: Tree, node: Node.Index) Full.Slice { + pub fn sliceOpen(tree: Tree, node: Node.Index) full.Slice { assert(tree.nodes.items(.tag)[node] == .SliceOpen); const data = tree.nodes.items(.data)[node]; return .{ @@ -1163,7 +1169,7 @@ pub const Tree = struct { }; } - pub fn slice(tree: Tree, node: Node.Index) Full.Slice { + pub fn slice(tree: Tree, node: Node.Index) full.Slice { assert(tree.nodes.items(.tag)[node] == .Slice); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.rhs, Node.Slice); @@ -1178,7 +1184,7 @@ pub const Tree = struct { }; } - pub fn sliceSentinel(tree: Tree, node: Node.Index) Full.Slice { + pub fn sliceSentinel(tree: Tree, node: Node.Index) full.Slice { assert(tree.nodes.items(.tag)[node] == .SliceSentinel); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.rhs, Node.SliceSentinel); @@ -1193,7 +1199,7 @@ pub const Tree = struct { }; } - pub fn containerDeclTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) Full.ContainerDecl { + pub fn containerDeclTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ContainerDecl { assert(tree.nodes.items(.tag)[node] == .ContainerDeclTwo or tree.nodes.items(.tag)[node] == .ContainerDeclTwoComma); const data = tree.nodes.items(.data)[node]; @@ -1212,7 +1218,7 @@ pub const Tree = struct { }); } - pub fn containerDecl(tree: Tree, node: Node.Index) Full.ContainerDecl { + pub fn containerDecl(tree: Tree, node: Node.Index) full.ContainerDecl { assert(tree.nodes.items(.tag)[node] == .ContainerDecl or tree.nodes.items(.tag)[node] == .ContainerDeclComma); const data = tree.nodes.items(.data)[node]; @@ -1224,7 +1230,7 @@ pub const Tree = struct { }); } - pub fn containerDeclArg(tree: Tree, node: Node.Index) Full.ContainerDecl { + pub fn containerDeclArg(tree: Tree, node: Node.Index) full.ContainerDecl { assert(tree.nodes.items(.tag)[node] == .ContainerDeclArg or tree.nodes.items(.tag)[node] == .ContainerDeclArgComma); const data = tree.nodes.items(.data)[node]; @@ -1237,7 +1243,7 @@ pub const Tree = struct { }); } - pub fn taggedUnionTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) Full.ContainerDecl { + pub fn taggedUnionTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ContainerDecl { assert(tree.nodes.items(.tag)[node] == .TaggedUnionTwo or tree.nodes.items(.tag)[node] == .TaggedUnionTwoComma); const data = tree.nodes.items(.data)[node]; @@ -1257,7 +1263,7 @@ pub const Tree = struct { }); } - pub fn taggedUnion(tree: Tree, node: Node.Index) Full.ContainerDecl { + pub fn taggedUnion(tree: Tree, node: Node.Index) full.ContainerDecl { assert(tree.nodes.items(.tag)[node] == .TaggedUnion or tree.nodes.items(.tag)[node] == .TaggedUnionComma); const data = tree.nodes.items(.data)[node]; @@ -1270,7 +1276,7 @@ pub const Tree = struct { }); } - pub fn taggedUnionEnumTag(tree: Tree, node: Node.Index) Full.ContainerDecl { + pub fn taggedUnionEnumTag(tree: Tree, node: Node.Index) full.ContainerDecl { assert(tree.nodes.items(.tag)[node] == .TaggedUnionEnumTag or tree.nodes.items(.tag)[node] == .TaggedUnionEnumTagComma); const data = tree.nodes.items(.data)[node]; @@ -1284,7 +1290,7 @@ pub const Tree = struct { }); } - pub fn switchCaseOne(tree: Tree, node: Node.Index) Full.SwitchCase { + pub fn switchCaseOne(tree: Tree, node: Node.Index) full.SwitchCase { const data = &tree.nodes.items(.data)[node]; return tree.fullSwitchCase(.{ .values = if (data.lhs == 0) &.{} else @ptrCast([*]Node.Index, &data.lhs)[0..1], @@ -1293,7 +1299,7 @@ pub const Tree = struct { }); } - pub fn switchCase(tree: Tree, node: Node.Index) Full.SwitchCase { + pub fn switchCase(tree: Tree, node: Node.Index) full.SwitchCase { const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.lhs, Node.SubRange); return tree.fullSwitchCase(.{ @@ -1303,9 +1309,30 @@ pub const Tree = struct { }); } - fn fullVarDecl(tree: Tree, info: Full.VarDecl.Ast) Full.VarDecl { + pub fn asmSimple(tree: Tree, node: Node.Index) full.Asm { + const data = tree.nodes.items(.data)[node]; + return tree.fullAsm(.{ + .asm_token = tree.nodes.items(.main_token)[node], + .template = data.lhs, + .items = &.{}, + .rparen = data.rhs, + }); + } + + pub fn asmFull(tree: Tree, node: Node.Index) full.Asm { + const data = tree.nodes.items(.data)[node]; + const extra = tree.extraData(data.rhs, Node.Asm); + return tree.fullAsm(.{ + .asm_token = tree.nodes.items(.main_token)[node], + .template = data.lhs, + .items = tree.extra_data[extra.items_start..extra.items_end], + .rparen = extra.rparen, + }); + } + + fn fullVarDecl(tree: Tree, info: full.VarDecl.Ast) full.VarDecl { const token_tags = tree.tokens.items(.tag); - var result: Full.VarDecl = .{ + var result: full.VarDecl = .{ .ast = info, .visib_token = null, .extern_export_token = null, @@ -1328,9 +1355,9 @@ pub const Tree = struct { return result; } - fn fullIf(tree: Tree, info: Full.If.Ast) Full.If { + fn fullIf(tree: Tree, info: full.If.Ast) full.If { const token_tags = tree.tokens.items(.tag); - var result: Full.If = .{ + var result: full.If = .{ .ast = info, .payload_token = null, .error_token = null, @@ -1353,9 +1380,9 @@ pub const Tree = struct { return result; } - fn fullContainerField(tree: Tree, info: Full.ContainerField.Ast) Full.ContainerField { + fn fullContainerField(tree: Tree, info: full.ContainerField.Ast) full.ContainerField { const token_tags = tree.tokens.items(.tag); - var result: Full.ContainerField = .{ + var result: full.ContainerField = .{ .ast = info, .comptime_token = null, }; @@ -1367,27 +1394,27 @@ pub const Tree = struct { return result; } - fn fullFnProto(tree: Tree, info: Full.FnProto.Ast) Full.FnProto { + fn fullFnProto(tree: Tree, info: full.FnProto.Ast) full.FnProto { const token_tags = tree.tokens.items(.tag); - var result: Full.FnProto = .{ + var result: full.FnProto = .{ .ast = info, }; return result; } - fn fullStructInit(tree: Tree, info: Full.StructInit.Ast) Full.StructInit { + fn fullStructInit(tree: Tree, info: full.StructInit.Ast) full.StructInit { const token_tags = tree.tokens.items(.tag); - var result: Full.StructInit = .{ + var result: full.StructInit = .{ .ast = info, }; return result; } - fn fullPtrType(tree: Tree, info: Full.PtrType.Ast) Full.PtrType { + fn fullPtrType(tree: Tree, info: full.PtrType.Ast) full.PtrType { const token_tags = tree.tokens.items(.tag); // TODO: looks like stage1 isn't quite smart enough to handle enum // literals in some places here - const Kind = Full.PtrType.Kind; + const Kind = full.PtrType.Kind; const kind: Kind = switch (token_tags[info.main_token]) { .Asterisk => switch (token_tags[info.main_token + 1]) { .RBracket => .many, @@ -1402,7 +1429,7 @@ pub const Tree = struct { }, else => unreachable, }; - var result: Full.PtrType = .{ + var result: full.PtrType = .{ .kind = kind, .allowzero_token = null, .const_token = null, @@ -1441,9 +1468,9 @@ pub const Tree = struct { return result; } - fn fullContainerDecl(tree: Tree, info: Full.ContainerDecl.Ast) Full.ContainerDecl { + fn fullContainerDecl(tree: Tree, info: full.ContainerDecl.Ast) full.ContainerDecl { const token_tags = tree.tokens.items(.tag); - var result: Full.ContainerDecl = .{ + var result: full.ContainerDecl = .{ .ast = info, .layout_token = null, }; @@ -1454,9 +1481,9 @@ pub const Tree = struct { return result; } - fn fullSwitchCase(tree: Tree, info: Full.SwitchCase.Ast) Full.SwitchCase { + fn fullSwitchCase(tree: Tree, info: full.SwitchCase.Ast) full.SwitchCase { const token_tags = tree.tokens.items(.tag); - var result: Full.SwitchCase = .{ + var result: full.SwitchCase = .{ .ast = info, .payload_token = null, }; @@ -1465,10 +1492,67 @@ pub const Tree = struct { } return result; } + + fn fullAsm(tree: Tree, info: full.Asm.Ast) full.Asm { + const token_tags = tree.tokens.items(.tag); + const node_tags = tree.nodes.items(.tag); + var result: full.Asm = .{ + .ast = info, + .volatile_token = null, + .inputs = &.{}, + .outputs = &.{}, + .first_clobber = null, + }; + if (token_tags[info.asm_token + 1] == .Keyword_volatile) { + result.volatile_token = info.asm_token + 1; + } + const outputs_end: usize = for (info.items) |item, i| { + switch (node_tags[item]) { + .AsmOutput => continue, + else => break i, + } + } else info.items.len; + + result.outputs = info.items[0..outputs_end]; + result.inputs = info.items[outputs_end..]; + + if (info.items.len == 0) { + // asm ("foo" ::: "a", "b"); + const template_token = tree.lastToken(info.template); + if (token_tags[template_token + 1] == .Colon and + token_tags[template_token + 2] == .Colon and + token_tags[template_token + 3] == .Colon and + token_tags[template_token + 4] == .StringLiteral) + { + result.first_clobber = template_token + 4; + } + } else if (result.inputs.len != 0) { + // asm ("foo" :: [_] "" (y) : "a", "b"); + const last_input = result.inputs[result.inputs.len - 1]; + const rparen = tree.lastToken(last_input); + if (token_tags[rparen + 1] == .Colon and + token_tags[rparen + 2] == .StringLiteral) + { + result.first_clobber = rparen + 2; + } + } else { + // asm ("foo" : [_] "" (x) :: "a", "b"); + const last_output = result.outputs[result.outputs.len - 1]; + const rparen = tree.lastToken(last_output); + if (token_tags[rparen + 1] == .Colon and + token_tags[rparen + 2] == .Colon and + token_tags[rparen + 3] == .StringLiteral) + { + result.first_clobber = rparen + 3; + } + } + + return result; + } }; /// Fully assembled AST node information. -pub const Full = struct { +pub const full = struct { pub const VarDecl = struct { visib_token: ?TokenIndex, extern_export_token: ?TokenIndex, @@ -1624,6 +1708,21 @@ pub const Full = struct { target_expr: Node.Index, }; }; + + pub const Asm = struct { + ast: Ast, + volatile_token: ?TokenIndex, + first_clobber: ?TokenIndex, + outputs: []const Node.Index, + inputs: []const Node.Index, + + pub const Ast = struct { + asm_token: TokenIndex, + template: Node.Index, + items: []const Node.Index, + rparen: TokenIndex, + }; + }; }; pub const Error = union(enum) { @@ -2234,15 +2333,15 @@ pub const Node = struct { Block, /// Same as BlockTwo but there is known to be a semicolon before the rbrace. BlockSemicolon, - /// `asm(lhs)`. rhs unused. + /// `asm(lhs)`. rhs is the token index of the rparen. AsmSimple, - /// `asm(lhs, a)`. `sub_range_list[rhs]`. + /// `asm(lhs, a)`. `Asm[rhs]`. Asm, - /// `[a] "b" (c)`. lhs is string literal token index, rhs is 0. - /// `[a] "b" (-> rhs)`. lhs is the string literal token index, rhs is type expr. + /// `[a] "b" (c)`. lhs is 0, rhs is token index of the rparen. + /// `[a] "b" (-> lhs)`. rhs is token index of the rparen. /// main_token is `a`. AsmOutput, - /// `[a] "b" (rhs)`. lhs is string literal token index. + /// `[a] "b" (lhs)`. rhs is token index of the rparen. /// main_token is `a`. AsmInput, /// `error.a`. lhs is token index of `.`. rhs is token index of `a`. @@ -2355,4 +2454,11 @@ pub const Node = struct { /// Populated if callconv(A) is present. callconv_expr: Index, }; + + pub const Asm = struct { + items_start: Index, + items_end: Index, + /// Needed to make lastToken() work. + rparen: TokenIndex, + }; }; diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index aa70634c47..3ca5bb0049 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -472,7 +472,7 @@ const Parser = struct { /// TestDecl <- KEYWORD_test STRINGLITERALSINGLE? Block fn expectTestDecl(p: *Parser) !Node.Index { - const test_token = try p.expectToken(.Keyword_test); + const test_token = p.assertToken(.Keyword_test); const name_token = p.eatToken(.StringLiteral); const block_node = try p.parseBlock(); if (block_node == 0) return p.fail(.{ .ExpectedLBrace = .{ .token = p.tok_i } }); @@ -739,7 +739,7 @@ const Parser = struct { /// ContainerField <- KEYWORD_comptime? IDENTIFIER (COLON TypeExpr ByteAlign?)? (EQUAL Expr)? fn expectContainerField(p: *Parser) !Node.Index { const comptime_token = p.eatToken(.Keyword_comptime); - const name_token = try p.expectToken(.Identifier); + const name_token = p.assertToken(.Identifier); var align_expr: Node.Index = 0; var type_expr: Node.Index = 0; @@ -1846,7 +1846,7 @@ const Parser = struct { /// / CurlySuffixExpr fn parsePrimaryExpr(p: *Parser) !Node.Index { switch (p.token_tags[p.tok_i]) { - .Keyword_asm => return p.parseAsmExpr(), + .Keyword_asm => return p.expectAsmExpr(), .Keyword_if => return p.parseIfExpr(), .Keyword_break => { p.tok_i += 1; @@ -2910,19 +2910,19 @@ const Parser = struct { /// StringList <- (STRINGLITERAL COMMA)* STRINGLITERAL? /// AsmOutputList <- (AsmOutputItem COMMA)* AsmOutputItem? /// AsmInputList <- (AsmInputItem COMMA)* AsmInputItem? - fn parseAsmExpr(p: *Parser) !Node.Index { + fn expectAsmExpr(p: *Parser) !Node.Index { const asm_token = p.assertToken(.Keyword_asm); _ = p.eatToken(.Keyword_volatile); _ = try p.expectToken(.LParen); const template = try p.expectExpr(); - if (p.eatToken(.RParen)) |_| { + if (p.eatToken(.RParen)) |rparen| { return p.addNode(.{ .tag = .AsmSimple, .main_token = asm_token, .data = .{ .lhs = template, - .rhs = undefined, + .rhs = rparen, }, }); } @@ -2981,16 +2981,17 @@ const Parser = struct { } } } - _ = try p.expectToken(.RParen); + const rparen = try p.expectToken(.RParen); const span = try p.listToSpan(list.items); return p.addNode(.{ .tag = .Asm, .main_token = asm_token, .data = .{ .lhs = template, - .rhs = try p.addExtra(Node.SubRange{ - .start = span.start, - .end = span.end, + .rhs = try p.addExtra(Node.Asm{ + .items_start = span.start, + .items_end = span.end, + .rparen = rparen, }), }, }); @@ -3001,16 +3002,23 @@ const Parser = struct { _ = p.eatToken(.LBracket) orelse return null_node; const identifier = try p.expectToken(.Identifier); _ = try p.expectToken(.RBracket); - const constraint = try p.expectToken(.StringLiteral); + _ = try p.expectToken(.StringLiteral); _ = try p.expectToken(.LParen); - const rhs: Node.Index = if (p.eatToken(.Arrow)) |_| try p.expectTypeExpr() else null_node; - _ = try p.expectToken(.RParen); + const type_expr: Node.Index = blk: { + if (p.eatToken(.Arrow)) |_| { + break :blk try p.expectTypeExpr(); + } else { + _ = try p.expectToken(.Identifier); + break :blk null_node; + } + }; + const rparen = try p.expectToken(.RParen); return p.addNode(.{ .tag = .AsmOutput, .main_token = identifier, .data = .{ - .lhs = constraint, - .rhs = rhs, + .lhs = type_expr, + .rhs = rparen, }, }); } @@ -3020,16 +3028,16 @@ const Parser = struct { _ = p.eatToken(.LBracket) orelse return null_node; const identifier = try p.expectToken(.Identifier); _ = try p.expectToken(.RBracket); - const constraint = try p.expectToken(.StringLiteral); + _ = try p.expectToken(.StringLiteral); _ = try p.expectToken(.LParen); const expr = try p.expectExpr(); - _ = try p.expectToken(.RParen); + const rparen = try p.expectToken(.RParen); return p.addNode(.{ .tag = .AsmInput, .main_token = identifier, .data = .{ - .lhs = constraint, - .rhs = expr, + .lhs = expr, + .rhs = rparen, }, }); } diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 983190b152..5ae8f37b87 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -313,30 +313,30 @@ test "zig fmt: builtin call with trailing comma" { ); } -//test "zig fmt: asm expression with comptime content" { -// try testCanonical( -// \\comptime { -// \\ asm ("foo" ++ "bar"); -// \\} -// \\pub fn main() void { -// \\ asm volatile ("foo" ++ "bar"); -// \\ asm volatile ("foo" ++ "bar" -// \\ : [_] "" (x) -// \\ ); -// \\ asm volatile ("foo" ++ "bar" -// \\ : [_] "" (x) -// \\ : [_] "" (y) -// \\ ); -// \\ asm volatile ("foo" ++ "bar" -// \\ : [_] "" (x) -// \\ : [_] "" (y) -// \\ : "h", "e", "l", "l", "o" -// \\ ); -// \\} -// \\ -// ); -//} -// +test "zig fmt: asm expression with comptime content" { + try testCanonical( + \\comptime { + \\ asm ("foo" ++ "bar"); + \\} + \\pub fn main() void { + \\ asm volatile ("foo" ++ "bar"); + \\ asm volatile ("foo" ++ "bar" + \\ : [_] "" (x) + \\ ); + \\ asm volatile ("foo" ++ "bar" + \\ : [_] "" (x) + \\ : [_] "" (y) + \\ ); + \\ asm volatile ("foo" ++ "bar" + \\ : [_] "" (x) + \\ : [_] "" (y) + \\ : "h", "e", "l", "l", "o" + \\ ); + \\} + \\ + ); +} + //test "zig fmt: anytype struct field" { // try testCanonical( // \\pub const Pointer = struct { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index bd9b73e55d..212e4c85d1 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -816,118 +816,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .IfSimple => return renderIf(ais, tree, tree.ifSimple(node), space), .If => return renderIf(ais, tree, tree.ifFull(node), space), - .Asm => unreachable, // TODO - .AsmSimple => unreachable, // TODO - .AsmOutput => unreachable, // TODO - .AsmInput => unreachable, // TODO - //.Asm => { - // const asm_node = @fieldParentPtr(ast.Node.Asm, "base", base); - - // try renderToken(ais, tree, asm_node.asm_token, Space.Space); // asm - - // if (asm_node.volatile_token) |volatile_token| { - // try renderToken(ais, tree, volatile_token, Space.Space); // volatile - // try renderToken(ais, tree, tree.nextToken(volatile_token), Space.None); // ( - // } else { - // try renderToken(ais, tree, tree.nextToken(asm_node.asm_token), Space.None); // ( - // } - - // asmblk: { - // ais.pushIndent(); - // defer ais.popIndent(); - - // if (asm_node.outputs.len == 0 and asm_node.inputs.len == 0 and asm_node.clobbers.len == 0) { - // try renderExpression(ais, tree, asm_node.template, Space.None); - // break :asmblk; - // } - - // try renderExpression(ais, tree, asm_node.template, Space.Newline); - - // ais.setIndentDelta(asm_indent_delta); - // defer ais.setIndentDelta(indent_delta); - - // const colon1 = tree.nextToken(asm_node.template.lastToken()); - - // const colon2 = if (asm_node.outputs.len == 0) blk: { - // try renderToken(ais, tree, colon1, Space.Newline); // : - - // break :blk tree.nextToken(colon1); - // } else blk: { - // try renderToken(ais, tree, colon1, Space.Space); // : - - // ais.pushIndent(); - // defer ais.popIndent(); - - // for (asm_node.outputs) |*asm_output, i| { - // if (i + 1 < asm_node.outputs.len) { - // const next_asm_output = asm_node.outputs[i + 1]; - // try renderAsmOutput(allocator, ais, tree, asm_output, Space.None); - - // const comma = tree.prevToken(next_asm_output.firstToken()); - // try renderToken(ais, tree, comma, Space.Newline); // , - // try renderExtraNewlineToken(ais, tree, next_asm_output.firstToken()); - // } else if (asm_node.inputs.len == 0 and asm_node.clobbers.len == 0) { - // try renderAsmOutput(allocator, ais, tree, asm_output, Space.Newline); - // break :asmblk; - // } else { - // try renderAsmOutput(allocator, ais, tree, asm_output, Space.Newline); - // const comma_or_colon = tree.nextToken(asm_output.lastToken()); - // break :blk switch (tree.token_tags[comma_or_colon]) { - // .Comma => tree.nextToken(comma_or_colon), - // else => comma_or_colon, - // }; - // } - // } - // unreachable; - // }; - - // const colon3 = if (asm_node.inputs.len == 0) blk: { - // try renderToken(ais, tree, colon2, Space.Newline); // : - // break :blk tree.nextToken(colon2); - // } else blk: { - // try renderToken(ais, tree, colon2, Space.Space); // : - // ais.pushIndent(); - // defer ais.popIndent(); - // for (asm_node.inputs) |*asm_input, i| { - // if (i + 1 < asm_node.inputs.len) { - // const next_asm_input = &asm_node.inputs[i + 1]; - // try renderAsmInput(allocator, ais, tree, asm_input, Space.None); - - // const comma = tree.prevToken(next_asm_input.firstToken()); - // try renderToken(ais, tree, comma, Space.Newline); // , - // try renderExtraNewlineToken(ais, tree, next_asm_input.firstToken()); - // } else if (asm_node.clobbers.len == 0) { - // try renderAsmInput(allocator, ais, tree, asm_input, Space.Newline); - // break :asmblk; - // } else { - // try renderAsmInput(allocator, ais, tree, asm_input, Space.Newline); - // const comma_or_colon = tree.nextToken(asm_input.lastToken()); - // break :blk switch (tree.token_tags[comma_or_colon]) { - // .Comma => tree.nextToken(comma_or_colon), - // else => comma_or_colon, - // }; - // } - // } - // unreachable; - // }; - - // try renderToken(ais, tree, colon3, Space.Space); // : - // ais.pushIndent(); - // defer ais.popIndent(); - // for (asm_node.clobbers) |clobber_node, i| { - // if (i + 1 >= asm_node.clobbers.len) { - // try renderExpression(ais, tree, clobber_node, Space.Newline); - // break :asmblk; - // } else { - // try renderExpression(ais, tree, clobber_node, Space.None); - // const comma = tree.nextToken(clobber_node.lastToken()); - // try renderToken(ais, tree, comma, Space.Space); // , - // } - // } - // } - - // return renderToken(ais, tree, asm_node.rparen, space); - //}, + .AsmSimple => return renderAsm(ais, tree, tree.asmSimple(node), space), + .Asm => return renderAsm(ais, tree, tree.asmFull(node), space), .EnumLiteral => { try renderToken(ais, tree, main_tokens[node] - 1, .None); // . @@ -945,6 +835,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .AlignedVarDecl => unreachable, .UsingNamespace => unreachable, .TestDecl => unreachable, + .AsmOutput => unreachable, + .AsmInput => unreachable, } } @@ -952,7 +844,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac fn renderArrayType( ais: *Ais, tree: ast.Tree, - array_type: ast.Full.ArrayType, + array_type: ast.full.ArrayType, space: Space, ) Error!void { try renderToken(ais, tree, array_type.ast.lbracket, .None); // lbracket @@ -968,7 +860,7 @@ fn renderArrayType( fn renderPtrType( ais: *Ais, tree: ast.Tree, - ptr_type: ast.Full.PtrType, + ptr_type: ast.full.PtrType, space: Space, ) Error!void { switch (ptr_type.kind) { @@ -1040,7 +932,7 @@ fn renderPtrType( fn renderSlice( ais: *Ais, tree: ast.Tree, - slice: ast.Full.Slice, + slice: ast.full.Slice, space: Space, ) Error!void { const node_tags = tree.nodes.items(.tag); @@ -1072,48 +964,56 @@ fn renderSlice( } fn renderAsmOutput( - allocator: *mem.Allocator, ais: *Ais, tree: ast.Tree, - asm_output: *const ast.Node.Asm.Output, + asm_output: ast.Node.Index, space: Space, ) Error!void { - try ais.writer().writeAll("["); - try renderExpression(ais, tree, asm_output.symbolic_name, Space.None); - try ais.writer().writeAll("] "); - try renderExpression(ais, tree, asm_output.constraint, Space.None); - try ais.writer().writeAll(" ("); + const token_tags = tree.tokens.items(.tag); + const node_tags = tree.nodes.items(.tag); + const main_tokens = tree.nodes.items(.main_token); + const datas = tree.nodes.items(.data); + assert(node_tags[asm_output] == .AsmOutput); + const symbolic_name = main_tokens[asm_output]; - switch (asm_output.kind) { - .Variable => |variable_name| { - try renderExpression(ais, tree, &variable_name.base, Space.None); - }, - .Return => |return_type| { - try ais.writer().writeAll("-> "); - try renderExpression(ais, tree, return_type, Space.None); - }, + try renderToken(ais, tree, symbolic_name - 1, .None); // lbracket + try renderToken(ais, tree, symbolic_name, .None); // ident + try renderToken(ais, tree, symbolic_name + 1, .Space); // rbracket + try renderToken(ais, tree, symbolic_name + 2, .Space); // "constraint" + try renderToken(ais, tree, symbolic_name + 3, .None); // lparen + + if (token_tags[symbolic_name + 4] == .Arrow) { + try renderToken(ais, tree, symbolic_name + 4, .Space); // -> + try renderExpression(ais, tree, datas[asm_output].lhs, Space.None); + return renderToken(ais, tree, datas[asm_output].rhs, space); // rparen + } else { + try renderToken(ais, tree, symbolic_name + 4, .None); // ident + return renderToken(ais, tree, symbolic_name + 5, space); // rparen } - - return renderToken(ais, tree, asm_output.lastToken(), space); // ) } fn renderAsmInput( - allocator: *mem.Allocator, ais: *Ais, tree: ast.Tree, - asm_input: *const ast.Node.Asm.Input, + asm_input: ast.Node.Index, space: Space, ) Error!void { - try ais.writer().writeAll("["); - try renderExpression(ais, tree, asm_input.symbolic_name, Space.None); - try ais.writer().writeAll("] "); - try renderExpression(ais, tree, asm_input.constraint, Space.None); - try ais.writer().writeAll(" ("); - try renderExpression(ais, tree, asm_input.expr, Space.None); - return renderToken(ais, tree, asm_input.lastToken(), space); // ) + const node_tags = tree.nodes.items(.tag); + const main_tokens = tree.nodes.items(.main_token); + const datas = tree.nodes.items(.data); + assert(node_tags[asm_input] == .AsmInput); + const symbolic_name = main_tokens[asm_input]; + + try renderToken(ais, tree, symbolic_name - 1, .None); // lbracket + try renderToken(ais, tree, symbolic_name, .None); // ident + try renderToken(ais, tree, symbolic_name + 1, .Space); // rbracket + try renderToken(ais, tree, symbolic_name + 2, .Space); // "constraint" + try renderToken(ais, tree, symbolic_name + 3, .None); // lparen + try renderExpression(ais, tree, datas[asm_input].lhs, Space.None); + return renderToken(ais, tree, datas[asm_input].rhs, space); // rparen } -fn renderVarDecl(ais: *Ais, tree: ast.Tree, var_decl: ast.Full.VarDecl) Error!void { +fn renderVarDecl(ais: *Ais, tree: ast.Tree, var_decl: ast.full.VarDecl) Error!void { if (var_decl.visib_token) |visib_token| { try renderToken(ais, tree, visib_token, Space.Space); // pub } @@ -1200,7 +1100,7 @@ fn renderVarDecl(ais: *Ais, tree: ast.Tree, var_decl: ast.Full.VarDecl) Error!vo try renderExpression(ais, tree, var_decl.ast.init_node, .Semicolon); } -fn renderIf(ais: *Ais, tree: ast.Tree, if_node: ast.Full.If, space: Space) Error!void { +fn renderIf(ais: *Ais, tree: ast.Tree, if_node: ast.full.If, space: Space) Error!void { const node_tags = tree.nodes.items(.tag); const token_tags = tree.tokens.items(.tag); @@ -1334,7 +1234,7 @@ fn renderIf(ais: *Ais, tree: ast.Tree, if_node: ast.Full.If, space: Space) Error fn renderContainerField( ais: *Ais, tree: ast.Tree, - field: ast.Full.ContainerField, + field: ast.full.ContainerField, space: Space, ) Error!void { const main_tokens = tree.nodes.items(.main_token); @@ -1430,7 +1330,7 @@ fn renderBuiltinCall( } } -fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.Full.FnProto, space: Space) Error!void { +fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); const token_starts = tree.tokens.items(.start); @@ -1618,7 +1518,7 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.Full.FnProto, space: S fn renderSwitchCase( ais: *Ais, tree: ast.Tree, - switch_case: ast.Full.SwitchCase, + switch_case: ast.full.SwitchCase, space: Space, ) Error!void { const token_tags = tree.tokens.items(.tag); @@ -1709,7 +1609,7 @@ fn renderBlock( fn renderStructInit( ais: *Ais, tree: ast.Tree, - struct_init: ast.Full.StructInit, + struct_init: ast.full.StructInit, space: Space, ) Error!void { const token_tags = tree.tokens.items(.tag); @@ -1763,7 +1663,7 @@ fn renderStructInit( fn renderArrayInit( ais: *Ais, tree: ast.Tree, - array_init: ast.Full.ArrayInit, + array_init: ast.full.ArrayInit, space: Space, ) Error!void { const token_tags = tree.tokens.items(.tag); @@ -1809,7 +1709,7 @@ fn renderArrayInit( fn renderContainerDecl( ais: *Ais, tree: ast.Tree, - container_decl: ast.Full.ContainerDecl, + container_decl: ast.full.ContainerDecl, space: Space, ) Error!void { const token_tags = tree.tokens.items(.tag); @@ -1894,6 +1794,135 @@ fn renderContainerDecl( return renderToken(ais, tree, rbrace, space); // rbrace } +fn renderAsm( + ais: *Ais, + tree: ast.Tree, + asm_node: ast.full.Asm, + space: Space, +) Error!void { + const token_tags = tree.tokens.items(.tag); + + try renderToken(ais, tree, asm_node.ast.asm_token, .Space); // asm + + if (asm_node.volatile_token) |volatile_token| { + try renderToken(ais, tree, volatile_token, .Space); // volatile + try renderToken(ais, tree, volatile_token + 1, .None); // lparen + } else { + try renderToken(ais, tree, asm_node.ast.asm_token + 1, .None); // lparen + } + + if (asm_node.ast.items.len == 0) { + try renderExpression(ais, tree, asm_node.ast.template, .None); + if (asm_node.first_clobber) |first_clobber| { + // asm ("foo" ::: "a", "b") + var tok_i = first_clobber; + while (true) : (tok_i += 1) { + try renderToken(ais, tree, tok_i, .None); + tok_i += 1; + switch (token_tags[tok_i]) { + .RParen => return renderToken(ais, tree, tok_i, space), + .Comma => try renderToken(ais, tree, tok_i, .Space), + else => unreachable, + } + } + } else { + // asm ("foo") + return renderToken(ais, tree, asm_node.ast.rparen, space); // rparen + } + } + + ais.pushIndent(); + try renderExpression(ais, tree, asm_node.ast.template, .Newline); + ais.setIndentDelta(asm_indent_delta); + const colon1 = tree.lastToken(asm_node.ast.template) + 1; + + const colon2 = if (asm_node.outputs.len == 0) colon2: { + try renderToken(ais, tree, colon1, .Newline); // : + break :colon2 colon1 + 1; + } else colon2: { + try renderToken(ais, tree, colon1, .Space); // : + + ais.pushIndent(); + for (asm_node.outputs) |asm_output, i| { + if (i + 1 < asm_node.outputs.len) { + const next_asm_output = asm_node.outputs[i + 1]; + try renderAsmOutput(ais, tree, asm_output, .None); + + const comma = tree.firstToken(next_asm_output) - 1; + try renderToken(ais, tree, comma, .Newline); // , + try renderExtraNewlineToken(ais, tree, tree.firstToken(next_asm_output)); + } else if (asm_node.inputs.len == 0 and asm_node.first_clobber == null) { + try renderAsmOutput(ais, tree, asm_output, .Newline); + ais.popIndent(); + ais.setIndentDelta(indent_delta); + ais.popIndent(); + return renderToken(ais, tree, asm_node.ast.rparen, space); // rparen + } else { + try renderAsmOutput(ais, tree, asm_output, .Newline); + const comma_or_colon = tree.lastToken(asm_output) + 1; + ais.popIndent(); + break :colon2 switch (token_tags[comma_or_colon]) { + .Comma => comma_or_colon + 1, + else => comma_or_colon, + }; + } + } else unreachable; + }; + + const colon3 = if (asm_node.inputs.len == 0) colon3: { + try renderToken(ais, tree, colon2, .Newline); // : + break :colon3 colon2 + 1; + } else colon3: { + try renderToken(ais, tree, colon2, .Space); // : + ais.pushIndent(); + for (asm_node.inputs) |asm_input, i| { + if (i + 1 < asm_node.inputs.len) { + const next_asm_input = asm_node.inputs[i + 1]; + try renderAsmInput(ais, tree, asm_input, .None); + + const first_token = tree.firstToken(next_asm_input); + try renderToken(ais, tree, first_token - 1, .Newline); // , + try renderExtraNewlineToken(ais, tree, first_token); + } else if (asm_node.first_clobber == null) { + try renderAsmInput(ais, tree, asm_input, .Newline); + ais.popIndent(); + ais.setIndentDelta(indent_delta); + ais.popIndent(); + return renderToken(ais, tree, asm_node.ast.rparen, space); // rparen + } else { + try renderAsmInput(ais, tree, asm_input, .Newline); + const comma_or_colon = tree.lastToken(asm_input) + 1; + ais.popIndent(); + break :colon3 switch (token_tags[comma_or_colon]) { + .Comma => comma_or_colon + 1, + else => comma_or_colon, + }; + } + } + unreachable; + }; + + try renderToken(ais, tree, colon3, .Space); // : + const first_clobber = asm_node.first_clobber.?; + var tok_i = first_clobber; + while (true) { + switch (token_tags[tok_i + 1]) { + .RParen => { + ais.setIndentDelta(indent_delta); + ais.popIndent(); + try renderToken(ais, tree, tok_i, .Newline); + return renderToken(ais, tree, tok_i + 1, space); + }, + .Comma => { + try renderToken(ais, tree, tok_i, .None); + try renderToken(ais, tree, tok_i + 1, .Space); + tok_i += 2; + }, + else => unreachable, + } + } else unreachable; // TODO shouldn't need this on while(true) +} + /// Render an expression, and the comma that follows it, if it is present in the source. fn renderExpressionComma(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); From bcafc51e58fcd6a4b12e43a780c0e9eef43a8d28 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 9 Feb 2021 14:41:50 -0700 Subject: [PATCH 032/173] zig fmt: fn protos and anytype --- lib/std/zig/ast.zig | 98 ++++++++++++++++++++++++++++++++----- lib/std/zig/parser_test.zig | 42 ++++++++-------- lib/std/zig/render.zig | 23 +++------ 3 files changed, 115 insertions(+), 48 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 169c6cbb67..32c7e0ac7f 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -250,6 +250,7 @@ pub const Tree = struct { .FnProto, .ArrayType, .ArrayTypeSentinel, + .ErrorValue, => return main_tokens[n], .ArrayInitDot, @@ -424,12 +425,18 @@ pub const Tree = struct { return main_tokens[n] - 1; }, - .WhileSimple => unreachable, // TODO - .WhileCont => unreachable, // TODO - .While => unreachable, // TODO - .ForSimple => unreachable, // TODO - .For => unreachable, // TODO - .ErrorValue => unreachable, // TODO + .WhileSimple, + .WhileCont, + .While, + .ForSimple, + .For, + => { + const main_token = main_tokens[n]; + return switch (token_tags[main_token - 1]) { + .Keyword_inline => main_token - 1, + else => main_token, + }; + }, }; } @@ -437,6 +444,7 @@ pub const Tree = struct { const tags = tree.nodes.items(.tag); const datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); var n = node; var end_offset: TokenIndex = 0; while (true) switch (tags[n]) { @@ -521,6 +529,8 @@ pub const Tree = struct { .AsmSimple, .AsmOutput, .AsmInput, + .FnProtoSimple, + .FnProtoMulti, => return datas[n].rhs + end_offset, .AnyType, @@ -759,6 +769,74 @@ pub const Tree = struct { return main_tokens[n] + end_offset; } }, + .FnProtoOne => { + const extra = tree.extraData(datas[n].lhs, Node.FnProtoOne); + // linksection, callconv, align can appear in any order, so we + // find the last one here. + var max_node: Node.Index = datas[n].rhs; + var max_start = token_starts[main_tokens[max_node]]; + var max_offset: TokenIndex = 0; + if (extra.align_expr != 0) { + const start = token_starts[main_tokens[extra.align_expr]]; + if (start > max_start) { + max_node = extra.align_expr; + max_start = start; + max_offset = 1; // for the rparen + } + } + if (extra.section_expr != 0) { + const start = token_starts[main_tokens[extra.section_expr]]; + if (start > max_start) { + max_node = extra.section_expr; + max_start = start; + max_offset = 1; // for the rparen + } + } + if (extra.callconv_expr != 0) { + const start = token_starts[main_tokens[extra.callconv_expr]]; + if (start > max_start) { + max_node = extra.callconv_expr; + max_start = start; + max_offset = 1; // for the rparen + } + } + n = max_node; + end_offset += max_offset; + }, + .FnProto => { + const extra = tree.extraData(datas[n].lhs, Node.FnProto); + // linksection, callconv, align can appear in any order, so we + // find the last one here. + var max_node: Node.Index = datas[n].rhs; + var max_start = token_starts[main_tokens[max_node]]; + var max_offset: TokenIndex = 0; + if (extra.align_expr != 0) { + const start = token_starts[main_tokens[extra.align_expr]]; + if (start > max_start) { + max_node = extra.align_expr; + max_start = start; + max_offset = 1; // for the rparen + } + } + if (extra.section_expr != 0) { + const start = token_starts[main_tokens[extra.section_expr]]; + if (start > max_start) { + max_node = extra.section_expr; + max_start = start; + max_offset = 1; // for the rparen + } + } + if (extra.callconv_expr != 0) { + const start = token_starts[main_tokens[extra.callconv_expr]]; + if (start > max_start) { + max_node = extra.callconv_expr; + max_start = start; + max_offset = 1; // for the rparen + } + } + n = max_node; + end_offset += max_offset; + }, // These are not supported by lastToken() because implementation would // require recursion due to the optional comma followed by rbrace. @@ -782,10 +860,6 @@ pub const Tree = struct { .While => unreachable, // TODO .ForSimple => unreachable, // TODO .For => unreachable, // TODO - .FnProtoSimple => unreachable, // TODO - .FnProtoMulti => unreachable, // TODO - .FnProtoOne => unreachable, // TODO - .FnProto => unreachable, // TODO .ErrorValue => unreachable, // TODO }; } @@ -2217,11 +2291,11 @@ pub const Node = struct { /// `fn(a: b, c: d) rhs`. `sub_range_list[lhs]`. /// anytype and ... parameters are omitted from the AST tree. FnProtoMulti, - /// `fn(a: b) rhs linksection(e) callconv(f)`. lhs is index into extra_data. + /// `fn(a: b) rhs linksection(e) callconv(f)`. `FnProtoOne[lhs]`. /// zero or one parameters. /// anytype and ... parameters are omitted from the AST tree. FnProtoOne, - /// `fn(a: b, c: d) rhs linksection(e) callconv(f)`. `fn_proto_list[lhs]`. + /// `fn(a: b, c: d) rhs linksection(e) callconv(f)`. `FnProto[lhs]`. /// anytype and ... parameters are omitted from the AST tree. FnProto, /// lhs is the FnProto, rhs is the function body block. diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 5ae8f37b87..da6671177a 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -337,15 +337,15 @@ test "zig fmt: asm expression with comptime content" { ); } -//test "zig fmt: anytype struct field" { -// try testCanonical( -// \\pub const Pointer = struct { -// \\ sentinel: anytype, -// \\}; -// \\ -// ); -//} -// +test "zig fmt: anytype struct field" { + try testCanonical( + \\pub const Pointer = struct { + \\ sentinel: anytype, + \\}; + \\ + ); +} + //test "zig fmt: sentinel-terminated array type" { // try testCanonical( // \\pub fn cStrToPrefixedFileW(s: [*:0]const u8) ![PATH_MAX_WIDE:0]u16 { @@ -691,18 +691,18 @@ test "zig fmt: block in slice expression" { ); } -//test "zig fmt: async function" { -// try testCanonical( -// \\pub const Server = struct { -// \\ handleRequestFn: fn (*Server, *const std.net.Address, File) callconv(.Async) void, -// \\}; -// \\test "hi" { -// \\ var ptr = @ptrCast(fn (i32) callconv(.Async) void, other); -// \\} -// \\ -// ); -//} -// +test "zig fmt: async function" { + try testCanonical( + \\pub const Server = struct { + \\ handleRequestFn: fn (*Server, *const std.net.Address, File) callconv(.Async) void, + \\}; + \\test "hi" { + \\ var ptr = @ptrCast(fn (i32) callconv(.Async) void, other); + \\} + \\ + ); +} + //test "zig fmt: whitespace fixes" { // try testTransform("test \"\" {\r\n\tconst hi = x;\r\n}\n// zig fmt: off\ntest \"\"{\r\n\tconst a = b;}\r\n", // \\test "" { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 212e4c85d1..df17b313f4 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -22,7 +22,7 @@ pub const Error = error{ const Writer = std.ArrayList(u8).Writer; const Ais = std.io.AutoIndentingStream(Writer); -/// `gpa` is used both for allocating the resulting formatted source code, but also +/// `gpa` is used for allocating the resulting formatted source code, as well as /// for allocating extra stack memory if needed, because this function utilizes recursion. /// Note: that's not actually true yet, see https://github.com/ziglang/zig/issues/1006. /// Caller owns the returned slice of bytes, allocated with `gpa`. @@ -191,17 +191,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .ErrorValue => unreachable, // TODO - .AnyType => unreachable, // TODO - //.AnyType => { - // const any_type = base.castTag(.AnyType).?; - // if (mem.eql(u8, tree.tokenSlice(any_type.token), "var")) { - // // TODO remove in next release cycle - // try ais.writer().writeAll("anytype"); - // if (space == .Comma) try ais.writer().writeAll(",\n"); - // return; - // } - // return renderToken(ais, tree, any_type.token, space); - //}, + .AnyType => return renderToken(ais, tree, main_tokens[node], space), + .BlockTwo, .BlockTwoSemicolon, => { @@ -1412,9 +1403,11 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S try renderToken(ais, tree, last_param_token, .Space); // , last_param_token += 1; }, - else => unreachable, + else => {}, // Parameter type without a name. } - if (token_tags[last_param_token] == .Identifier) { + if (token_tags[last_param_token] == .Identifier and + token_tags[last_param_token + 1] == .Colon) + { try renderToken(ais, tree, last_param_token, .None); // name last_param_token += 1; try renderToken(ais, tree, last_param_token, .Space); // : @@ -1427,7 +1420,7 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S const param = fn_proto.ast.params[param_i]; param_i += 1; try renderExpression(ais, tree, param, .None); - last_param_token = tree.lastToken(param) + 1; + last_param_token = tree.lastToken(param); } } else { // One param per line. From 1c79eea125c7e21b00a7abeffdc3b64548c54257 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 9 Feb 2021 17:23:57 -0700 Subject: [PATCH 033/173] zig fmt: while loops --- lib/std/zig/ast.zig | 132 ++++++++++++++++-- lib/std/zig/parse.zig | 28 ++-- lib/std/zig/parser_test.zig | 26 ++-- lib/std/zig/render.zig | 269 +++++++++++++++++------------------- 4 files changed, 272 insertions(+), 183 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 32c7e0ac7f..9bad013dea 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -760,6 +760,13 @@ pub const Tree = struct { n = extra.sentinel; }, + .Continue => { + if (datas[n].lhs != 0) { + return datas[n].lhs + end_offset; + } else { + return main_tokens[n] + end_offset; + } + }, .Break => { if (datas[n].rhs != 0) { n = datas[n].rhs; @@ -837,6 +844,21 @@ pub const Tree = struct { n = max_node; end_offset += max_offset; }, + .WhileCont => { + const extra = tree.extraData(datas[n].rhs, Node.WhileCont); + assert(extra.then_expr != 0); + n = extra.then_expr; + }, + .While => { + const extra = tree.extraData(datas[n].rhs, Node.While); + assert(extra.else_expr != 0); + n = extra.else_expr; + }, + .If => { + const extra = tree.extraData(datas[n].rhs, Node.If); + assert(extra.else_expr != 0); + n = extra.else_expr; + }, // These are not supported by lastToken() because implementation would // require recursion due to the optional comma followed by rbrace. @@ -851,13 +873,9 @@ pub const Tree = struct { .TaggedUnionEnumTag => unreachable, // TODO .TaggedUnionEnumTagComma => unreachable, // TODO - .If => unreachable, // TODO - .Continue => unreachable, // TODO .SwitchRange => unreachable, // TODO .ArrayType => unreachable, // TODO .ArrayTypeSentinel => unreachable, // TODO - .WhileCont => unreachable, // TODO - .While => unreachable, // TODO .ForSimple => unreachable, // TODO .For => unreachable, // TODO .ErrorValue => unreachable, // TODO @@ -1404,6 +1422,41 @@ pub const Tree = struct { }); } + pub fn whileSimple(tree: Tree, node: Node.Index) full.While { + const data = tree.nodes.items(.data)[node]; + return tree.fullWhile(.{ + .while_token = tree.nodes.items(.main_token)[node], + .cond_expr = data.lhs, + .cont_expr = 0, + .then_expr = data.rhs, + .else_expr = 0, + }); + } + + pub fn whileCont(tree: Tree, node: Node.Index) full.While { + const data = tree.nodes.items(.data)[node]; + const extra = tree.extraData(data.rhs, Node.WhileCont); + return tree.fullWhile(.{ + .while_token = tree.nodes.items(.main_token)[node], + .cond_expr = data.lhs, + .cont_expr = extra.cont_expr, + .then_expr = extra.then_expr, + .else_expr = 0, + }); + } + + pub fn whileFull(tree: Tree, node: Node.Index) full.While { + const data = tree.nodes.items(.data)[node]; + const extra = tree.extraData(data.rhs, Node.While); + return tree.fullWhile(.{ + .while_token = tree.nodes.items(.main_token)[node], + .cond_expr = data.lhs, + .cont_expr = extra.cont_expr, + .then_expr = extra.then_expr, + .else_expr = extra.else_expr, + }); + } + fn fullVarDecl(tree: Tree, info: full.VarDecl.Ast) full.VarDecl { const token_tags = tree.tokens.items(.tag); var result: full.VarDecl = .{ @@ -1623,6 +1676,41 @@ pub const Tree = struct { return result; } + + fn fullWhile(tree: Tree, info: full.While.Ast) full.While { + const token_tags = tree.tokens.items(.tag); + var result: full.While = .{ + .ast = info, + .inline_token = null, + .label_token = null, + .payload_token = null, + .else_token = undefined, + .error_token = null, + }; + var tok_i = info.while_token - 1; + if (token_tags[tok_i] == .Keyword_inline) { + result.inline_token = tok_i; + tok_i -= 1; + } + if (token_tags[tok_i] == .Colon and + token_tags[tok_i - 1] == .Identifier) + { + result.label_token = tok_i - 1; + } + const last_cond_token = tree.lastToken(info.cond_expr); + if (token_tags[last_cond_token + 2] == .Pipe) { + result.payload_token = last_cond_token + 3; + } + if (info.else_expr != 0) { + // then_expr else |x| + // ^ ^ + result.else_token = tree.lastToken(info.then_expr) + 1; + if (token_tags[result.else_token + 1] == .Pipe) { + result.error_token = result.else_token + 2; + } + } + return result; + } }; /// Fully assembled AST node information. @@ -1645,12 +1733,12 @@ pub const full = struct { }; pub const If = struct { - // Points to the first token after the `|`. Will either be an identifier or - // a `*` (with an identifier immediately after it). + /// Points to the first token after the `|`. Will either be an identifier or + /// a `*` (with an identifier immediately after it). payload_token: ?TokenIndex, - // Points to the identifier after the `|`. + /// Points to the identifier after the `|`. error_token: ?TokenIndex, - // Populated only if else_expr != 0. + /// Populated only if else_expr != 0. else_token: TokenIndex, ast: Ast, @@ -1662,6 +1750,24 @@ pub const full = struct { }; }; + pub const While = struct { + ast: Ast, + inline_token: ?TokenIndex, + label_token: ?TokenIndex, + payload_token: ?TokenIndex, + error_token: ?TokenIndex, + /// Populated only if else_expr != 0. + else_token: TokenIndex, + + pub const Ast = struct { + while_token: TokenIndex, + cond_expr: Node.Index, + cont_expr: Node.Index, + then_expr: Node.Index, + else_expr: Node.Index, + }; + }; + pub const ContainerField = struct { comptime_token: ?TokenIndex, ast: Ast, @@ -2270,9 +2376,9 @@ pub const Node = struct { /// `if (lhs) rhs`. /// `if (lhs) |a| rhs`. IfSimple, - /// `if (lhs) a else b`. `if_list[rhs]`. - /// `if (lhs) |x| a else b`. `if_list[rhs]`. - /// `if (lhs) |x| a else |y| b`. `if_list[rhs]`. + /// `if (lhs) a else b`. `If[rhs]`. + /// `if (lhs) |x| a else b`. `If[rhs]`. + /// `if (lhs) |x| a else |y| b`. `If[rhs]`. If, /// `suspend lhs`. lhs can be omitted. rhs is unused. Suspend, @@ -2497,13 +2603,13 @@ pub const Node = struct { }; pub const While = struct { - continue_expr: Index, + cont_expr: Index, then_expr: Index, else_expr: Index, }; pub const WhileCont = struct { - continue_expr: Index, + cont_expr: Index, then_expr: Index, }; diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 3ca5bb0049..7e2da72d00 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -1085,7 +1085,7 @@ const Parser = struct { const condition = try p.expectExpr(); _ = try p.expectToken(.RParen); const then_payload = try p.parsePtrPayload(); - const continue_expr = try p.parseWhileContinueExpr(); + const cont_expr = try p.parseWhileContinueExpr(); // TODO propose to change the syntax so that semicolons are always required // inside while statements, even if there is an `else`. @@ -1098,7 +1098,7 @@ const Parser = struct { return p.fail(.{ .ExpectedBlockOrAssignment = .{ .token = p.tok_i } }); } if (p.eatToken(.Semicolon)) |_| { - if (continue_expr == 0) { + if (cont_expr == 0) { return p.addNode(.{ .tag = .WhileSimple, .main_token = while_token, @@ -1114,7 +1114,7 @@ const Parser = struct { .data = .{ .lhs = condition, .rhs = try p.addExtra(Node.WhileCont{ - .continue_expr = continue_expr, + .cont_expr = cont_expr, .then_expr = assign_expr, }), }, @@ -1128,7 +1128,7 @@ const Parser = struct { if (else_required) { return p.fail(.{ .ExpectedSemiOrElse = .{ .token = p.tok_i } }); } - if (continue_expr == 0) { + if (cont_expr == 0) { return p.addNode(.{ .tag = .WhileSimple, .main_token = while_token, @@ -1144,7 +1144,7 @@ const Parser = struct { .data = .{ .lhs = condition, .rhs = try p.addExtra(Node.WhileCont{ - .continue_expr = continue_expr, + .cont_expr = cont_expr, .then_expr = then_expr, }), }, @@ -1159,7 +1159,7 @@ const Parser = struct { .data = .{ .lhs = condition, .rhs = try p.addExtra(Node.While{ - .continue_expr = continue_expr, + .cont_expr = cont_expr, .then_expr = then_expr, .else_expr = else_expr, }), @@ -2073,11 +2073,11 @@ const Parser = struct { const condition = try p.expectExpr(); _ = try p.expectToken(.RParen); const then_payload = try p.parsePtrPayload(); - const continue_expr = try p.parseWhileContinueExpr(); + const cont_expr = try p.parseWhileContinueExpr(); const then_expr = try p.expectExpr(); const else_token = p.eatToken(.Keyword_else) orelse { - if (continue_expr == 0) { + if (cont_expr == 0) { return p.addNode(.{ .tag = .WhileSimple, .main_token = while_token, @@ -2093,7 +2093,7 @@ const Parser = struct { .data = .{ .lhs = condition, .rhs = try p.addExtra(Node.WhileCont{ - .continue_expr = continue_expr, + .cont_expr = cont_expr, .then_expr = then_expr, }), }, @@ -2108,7 +2108,7 @@ const Parser = struct { .data = .{ .lhs = condition, .rhs = try p.addExtra(Node.While{ - .continue_expr = continue_expr, + .cont_expr = cont_expr, .then_expr = then_expr, .else_expr = else_expr, }), @@ -2836,11 +2836,11 @@ const Parser = struct { const condition = try p.expectExpr(); _ = try p.expectToken(.RParen); const then_payload = try p.parsePtrPayload(); - const continue_expr = try p.parseWhileContinueExpr(); + const cont_expr = try p.parseWhileContinueExpr(); const then_expr = try p.expectTypeExpr(); const else_token = p.eatToken(.Keyword_else) orelse { - if (continue_expr == 0) { + if (cont_expr == 0) { return p.addNode(.{ .tag = .WhileSimple, .main_token = while_token, @@ -2856,7 +2856,7 @@ const Parser = struct { .data = .{ .lhs = condition, .rhs = try p.addExtra(Node.WhileCont{ - .continue_expr = continue_expr, + .cont_expr = cont_expr, .then_expr = then_expr, }), }, @@ -2871,7 +2871,7 @@ const Parser = struct { .data = .{ .lhs = condition, .rhs = try p.addExtra(Node.While{ - .continue_expr = continue_expr, + .cont_expr = cont_expr, .then_expr = then_expr, .else_expr = else_expr, }), diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index da6671177a..0d5775fd61 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -714,19 +714,19 @@ test "zig fmt: async function" { // \\ // ); //} -// -//test "zig fmt: while else err prong with no block" { -// try testCanonical( -// \\test "" { -// \\ const result = while (returnError()) |value| { -// \\ break value; -// \\ } else |err| @as(i32, 2); -// \\ expect(result == 2); -// \\} -// \\ -// ); -//} -// + +test "zig fmt: while else err prong with no block" { + try testCanonical( + \\test "" { + \\ const result = while (returnError()) |value| { + \\ break value; + \\ } else |err| @as(i32, 2); + \\ expect(result == 2); + \\} + \\ + ); +} + //test "zig fmt: tagged union with enum values" { // try testCanonical( // \\const MultipleChoice2 = union(enum(u32)) { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index df17b313f4..8ce7d8bb7b 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -567,13 +567,13 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac if (lbrace + 1 == rbrace) { // There is nothing between the braces so render condensed: `error{}` try renderToken(ais, tree, lbrace, .None); - try renderToken(ais, tree, rbrace, space); + return renderToken(ais, tree, rbrace, space); } else if (lbrace + 2 == rbrace and token_tags[lbrace + 1] == .Identifier) { // There is exactly one member and no trailing comma or // comments, so render without surrounding spaces: `error{Foo}` try renderToken(ais, tree, lbrace, .None); try renderToken(ais, tree, lbrace + 1, .None); // identifier - try renderToken(ais, tree, rbrace, space); + return renderToken(ais, tree, rbrace, space); } else if (token_tags[rbrace - 1] == .Comma) { // There is a trailing comma so render each member on a new line. try renderToken(ais, tree, lbrace, .Newline); @@ -589,7 +589,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac } } ais.popIndent(); - try renderToken(ais, tree, rbrace, space); + return renderToken(ais, tree, rbrace, space); } else { // There is no trailing comma so render everything on one line. try renderToken(ais, tree, lbrace, .Space); @@ -602,7 +602,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac else => unreachable, } } - try renderToken(ais, tree, rbrace, space); + return renderToken(ais, tree, rbrace, space); } }, @@ -663,7 +663,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac if (cases.len == 0) { try renderToken(ais, tree, rparen + 1, .None); // lbrace - try renderToken(ais, tree, rparen + 2, space); // rbrace + return renderToken(ais, tree, rparen + 2, space); // rbrace } else { try renderToken(ais, tree, rparen + 1, .Newline); // lbrace ais.pushIndent(); @@ -673,83 +673,16 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac try renderExpression(ais, tree, case, .Comma); } ais.popIndent(); - try renderToken(ais, tree, tree.lastToken(node), space); // rbrace + return renderToken(ais, tree, tree.lastToken(node), space); // rbrace } }, - .SwitchCaseOne => try renderSwitchCase(ais, tree, tree.switchCaseOne(node), space), - .SwitchCase => try renderSwitchCase(ais, tree, tree.switchCase(node), space), + .SwitchCaseOne => return renderSwitchCase(ais, tree, tree.switchCaseOne(node), space), + .SwitchCase => return renderSwitchCase(ais, tree, tree.switchCase(node), space), - .WhileSimple => unreachable, // TODO - .WhileCont => unreachable, // TODO - .While => unreachable, // TODO - //.While => { - // const while_node = @fieldParentPtr(ast.Node.While, "base", base); - - // if (while_node.label) |label| { - // try renderToken(ais, tree, label, Space.None); // label - // try renderToken(ais, tree, tree.nextToken(label), Space.Space); // : - // } - - // if (while_node.inline_token) |inline_token| { - // try renderToken(ais, tree, inline_token, Space.Space); // inline - // } - - // try renderToken(ais, tree, while_node.while_token, Space.Space); // while - // try renderToken(ais, tree, tree.nextToken(while_node.while_token), Space.None); // ( - // try renderExpression(ais, tree, while_node.condition, Space.None); - - // const cond_rparen = tree.nextToken(while_node.condition.lastToken()); - - // const body_is_block = nodeIsBlock(while_node.body); - - // var block_start_space: Space = undefined; - // var after_body_space: Space = undefined; - - // if (body_is_block) { - // block_start_space = Space.BlockStart; - // after_body_space = if (while_node.@"else" == null) space else Space.Space; - // } else if (tree.tokensOnSameLine(cond_rparen, while_node.body.lastToken())) { - // block_start_space = Space.Space; - // after_body_space = if (while_node.@"else" == null) space else Space.Space; - // } else { - // block_start_space = Space.Newline; - // after_body_space = if (while_node.@"else" == null) space else Space.Newline; - // } - - // { - // const rparen_space = if (while_node.payload != null or while_node.continue_expr != null) Space.Space else block_start_space; - // try renderToken(ais, tree, cond_rparen, rparen_space); // ) - // } - - // if (while_node.payload) |payload| { - // const payload_space = if (while_node.continue_expr != null) Space.Space else block_start_space; - // try renderExpression(ais, tree, payload, payload_space); - // } - - // if (while_node.continue_expr) |continue_expr| { - // const rparen = tree.nextToken(continue_expr.lastToken()); - // const lparen = tree.prevToken(continue_expr.firstToken()); - // const colon = tree.prevToken(lparen); - - // try renderToken(ais, tree, colon, Space.Space); // : - // try renderToken(ais, tree, lparen, Space.None); // ( - - // try renderExpression(ais, tree, continue_expr, Space.None); - - // try renderToken(ais, tree, rparen, block_start_space); // ) - // } - - // { - // if (!body_is_block) ais.pushIndent(); - // defer if (!body_is_block) ais.popIndent(); - // try renderExpression(ais, tree, while_node.body, after_body_space); - // } - - // if (while_node.@"else") |@"else"| { - // return renderExpression(ais, tree, &@"else".base, space); - // } - //}, + .WhileSimple => return renderWhile(ais, tree, tree.whileSimple(node), space), + .WhileCont => return renderWhile(ais, tree, tree.whileCont(node), space), + .While => return renderWhile(ais, tree, tree.whileFull(node), space), .ForSimple => unreachable, // TODO .For => unreachable, // TODO @@ -1092,105 +1025,142 @@ fn renderVarDecl(ais: *Ais, tree: ast.Tree, var_decl: ast.full.VarDecl) Error!vo } fn renderIf(ais: *Ais, tree: ast.Tree, if_node: ast.full.If, space: Space) Error!void { + return renderWhile(ais, tree, .{ + .ast = .{ + .while_token = if_node.ast.if_token, + .cond_expr = if_node.ast.cond_expr, + .cont_expr = 0, + .then_expr = if_node.ast.then_expr, + .else_expr = if_node.ast.else_expr, + }, + .inline_token = null, + .label_token = null, + .payload_token = if_node.payload_token, + .else_token = if_node.else_token, + .error_token = if_node.error_token, + }, space); +} + +/// Note that this function is additionally used to render if expressions, with +/// respective values set to null. +fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Space) Error!void { const node_tags = tree.nodes.items(.tag); const token_tags = tree.tokens.items(.tag); - try renderToken(ais, tree, if_node.ast.if_token, .Space); // if - - const lparen = if_node.ast.if_token + 1; - - try renderToken(ais, tree, lparen, .None); // ( - try renderExpression(ais, tree, if_node.ast.cond_expr, .None); // condition - - switch (node_tags[if_node.ast.then_expr]) { - .If, .IfSimple => { - try renderExtraNewline(ais, tree, if_node.ast.then_expr); - }, - .Block, .For, .ForSimple, .While, .WhileSimple, .Switch => { - if (if_node.payload_token) |payload_token| { - try renderToken(ais, tree, payload_token - 2, .Space); // ) - try renderToken(ais, tree, payload_token - 1, .None); // | - if (token_tags[payload_token] == .Asterisk) { - try renderToken(ais, tree, payload_token, .None); // * - try renderToken(ais, tree, payload_token + 1, .None); // identifier - try renderToken(ais, tree, payload_token + 2, .BlockStart); // | - } else { - try renderToken(ais, tree, payload_token, .None); // identifier - try renderToken(ais, tree, payload_token + 1, .BlockStart); // | - } - } else { - const rparen = tree.lastToken(if_node.ast.cond_expr) + 1; - try renderToken(ais, tree, rparen, .BlockStart); // ) - } - if (if_node.ast.else_expr != 0) { - try renderExpression(ais, tree, if_node.ast.then_expr, Space.Space); - try renderToken(ais, tree, if_node.else_token, .Space); // else - if (if_node.error_token) |error_token| { - try renderToken(ais, tree, error_token - 1, .None); // | - try renderToken(ais, tree, error_token, .None); // identifier - try renderToken(ais, tree, error_token + 1, .Space); // | - } - return renderExpression(ais, tree, if_node.ast.else_expr, space); - } else { - return renderExpression(ais, tree, if_node.ast.then_expr, space); - } - }, - else => {}, + if (while_node.label_token) |label| { + try renderToken(ais, tree, label, .None); // label + try renderToken(ais, tree, label + 1, .Space); // : } - const rparen = tree.lastToken(if_node.ast.cond_expr) + 1; - const last_then_token = tree.lastToken(if_node.ast.then_expr); + if (while_node.inline_token) |inline_token| { + try renderToken(ais, tree, inline_token, .Space); // inline + } + + try renderToken(ais, tree, while_node.ast.while_token, .Space); // if + try renderToken(ais, tree, while_node.ast.while_token + 1, .None); // ( + try renderExpression(ais, tree, while_node.ast.cond_expr, .None); // condition + + if (nodeIsBlock(node_tags[while_node.ast.then_expr])) { + const payload_space: Space = if (while_node.ast.cont_expr != 0) .Space else .BlockStart; + if (while_node.payload_token) |payload_token| { + try renderToken(ais, tree, payload_token - 2, .Space); // ) + try renderToken(ais, tree, payload_token - 1, .None); // | + if (token_tags[payload_token] == .Asterisk) { + try renderToken(ais, tree, payload_token, .None); // * + try renderToken(ais, tree, payload_token + 1, .None); // identifier + try renderToken(ais, tree, payload_token + 2, payload_space); // | + } else { + try renderToken(ais, tree, payload_token, .None); // identifier + try renderToken(ais, tree, payload_token + 1, payload_space); // | + } + } else { + const rparen = tree.lastToken(while_node.ast.cond_expr) + 1; + try renderToken(ais, tree, rparen, payload_space); // ) + } + if (while_node.ast.cont_expr != 0) { + const rparen = tree.lastToken(while_node.ast.cont_expr) + 1; + const lparen = tree.firstToken(while_node.ast.cont_expr) - 1; + try renderToken(ais, tree, lparen - 1, .Space); // : + try renderToken(ais, tree, lparen, .None); // lparen + try renderExpression(ais, tree, while_node.ast.cont_expr, .None); + try renderToken(ais, tree, rparen, .BlockStart); // rparen + } + if (while_node.ast.else_expr != 0) { + try renderExpression(ais, tree, while_node.ast.then_expr, Space.Space); + try renderToken(ais, tree, while_node.else_token, .Space); // else + if (while_node.error_token) |error_token| { + try renderToken(ais, tree, error_token - 1, .None); // | + try renderToken(ais, tree, error_token, .None); // identifier + try renderToken(ais, tree, error_token + 1, .Space); // | + } + return renderExpression(ais, tree, while_node.ast.else_expr, space); + } else { + return renderExpression(ais, tree, while_node.ast.then_expr, space); + } + } + + const rparen = tree.lastToken(while_node.ast.cond_expr) + 1; + const last_then_token = tree.lastToken(while_node.ast.then_expr); const src_has_newline = !tree.tokensOnSameLine(rparen, last_then_token); if (src_has_newline) { - if (if_node.payload_token) |payload_token| { + const payload_space: Space = if (while_node.ast.cont_expr != 0) .Space else .Newline; + if (while_node.payload_token) |payload_token| { try renderToken(ais, tree, payload_token - 2, .Space); // ) try renderToken(ais, tree, payload_token - 1, .None); // | try renderToken(ais, tree, payload_token, .None); // identifier - try renderToken(ais, tree, payload_token + 1, .Newline); // | + try renderToken(ais, tree, payload_token + 1, payload_space); // | } else { ais.pushIndent(); - try renderToken(ais, tree, rparen, .Newline); // ) + try renderToken(ais, tree, rparen, payload_space); // ) ais.popIndent(); } - if (if_node.ast.else_expr != 0) { + if (while_node.ast.cont_expr != 0) { + const cont_rparen = tree.lastToken(while_node.ast.cont_expr) + 1; + const cont_lparen = tree.firstToken(while_node.ast.cont_expr) - 1; + try renderToken(ais, tree, cont_lparen - 1, .Space); // : + try renderToken(ais, tree, cont_lparen, .None); // lparen + try renderExpression(ais, tree, while_node.ast.cont_expr, .None); + try renderToken(ais, tree, cont_rparen, .Newline); // rparen + } + if (while_node.ast.else_expr != 0) { ais.pushIndent(); - try renderExpression(ais, tree, if_node.ast.then_expr, Space.Newline); + try renderExpression(ais, tree, while_node.ast.then_expr, Space.Newline); ais.popIndent(); - const else_is_block = nodeIsBlock(node_tags[if_node.ast.else_expr]); + const else_is_block = nodeIsBlock(node_tags[while_node.ast.else_expr]); if (else_is_block) { - try renderToken(ais, tree, if_node.else_token, .Space); // else - if (if_node.error_token) |error_token| { + try renderToken(ais, tree, while_node.else_token, .Space); // else + if (while_node.error_token) |error_token| { try renderToken(ais, tree, error_token - 1, .None); // | try renderToken(ais, tree, error_token, .None); // identifier try renderToken(ais, tree, error_token + 1, .Space); // | } - return renderExpression(ais, tree, if_node.ast.else_expr, space); + return renderExpression(ais, tree, while_node.ast.else_expr, space); } else { - if (if_node.error_token) |error_token| { - try renderToken(ais, tree, if_node.else_token, .Space); // else + if (while_node.error_token) |error_token| { + try renderToken(ais, tree, while_node.else_token, .Space); // else try renderToken(ais, tree, error_token - 1, .None); // | try renderToken(ais, tree, error_token, .None); // identifier try renderToken(ais, tree, error_token + 1, .Space); // | } else { - try renderToken(ais, tree, if_node.else_token, .Newline); // else + try renderToken(ais, tree, while_node.else_token, .Newline); // else } ais.pushIndent(); - try renderExpression(ais, tree, if_node.ast.else_expr, space); + try renderExpression(ais, tree, while_node.ast.else_expr, space); ais.popIndent(); return; } } else { ais.pushIndent(); - try renderExpression(ais, tree, if_node.ast.then_expr, space); + try renderExpression(ais, tree, while_node.ast.then_expr, space); ais.popIndent(); return; } } - // Single line if statement. + // Render everything on a single line. - if (if_node.payload_token) |payload_token| { + if (while_node.payload_token) |payload_token| { assert(payload_token - 2 == rparen); try renderToken(ais, tree, payload_token - 2, .Space); // ) try renderToken(ais, tree, payload_token - 1, .None); // | @@ -1206,19 +1176,28 @@ fn renderIf(ais: *Ais, tree: ast.Tree, if_node: ast.full.If, space: Space) Error try renderToken(ais, tree, rparen, .Space); // ) } - if (if_node.ast.else_expr != 0) { - try renderExpression(ais, tree, if_node.ast.then_expr, .Space); - try renderToken(ais, tree, if_node.else_token, .Space); // else + if (while_node.ast.cont_expr != 0) { + const cont_rparen = tree.lastToken(while_node.ast.cont_expr) + 1; + const cont_lparen = tree.firstToken(while_node.ast.cont_expr) - 1; + try renderToken(ais, tree, cont_lparen - 1, .Space); // : + try renderToken(ais, tree, cont_lparen, .None); // lparen + try renderExpression(ais, tree, while_node.ast.cont_expr, .None); + try renderToken(ais, tree, cont_rparen, .Space); // rparen + } - if (if_node.error_token) |error_token| { + if (while_node.ast.else_expr != 0) { + try renderExpression(ais, tree, while_node.ast.then_expr, .Space); + try renderToken(ais, tree, while_node.else_token, .Space); // else + + if (while_node.error_token) |error_token| { try renderToken(ais, tree, error_token - 1, .None); // | try renderToken(ais, tree, error_token, .None); // identifier try renderToken(ais, tree, error_token + 1, .Space); // | } - return renderExpression(ais, tree, if_node.ast.else_expr, space); + return renderExpression(ais, tree, while_node.ast.else_expr, space); } else { - return renderExpression(ais, tree, if_node.ast.then_expr, space); + return renderExpression(ais, tree, while_node.ast.then_expr, space); } } @@ -2079,12 +2058,16 @@ fn renderDocComments(ais: *Ais, tree: ast.Tree, end_token: ast.TokenIndex) Error fn nodeIsBlock(tag: ast.Node.Tag) bool { return switch (tag) { .Block, + .BlockSemicolon, + .BlockTwo, + .BlockTwoSemicolon, .If, .IfSimple, .For, .ForSimple, .While, .WhileSimple, + .WhileCont, .Switch, => true, else => false, From 39acc4c02004f4e106b7c280f3f6bb070df88178 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 9 Feb 2021 20:08:40 -0700 Subject: [PATCH 034/173] zig fmt: for loops --- lib/std/zig/ast.zig | 31 +++++- lib/std/zig/parse.zig | 33 +++--- lib/std/zig/parser_test.zig | 155 ++++++++++++++--------------- lib/std/zig/render.zig | 193 ++++++++++++++++-------------------- 4 files changed, 200 insertions(+), 212 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 9bad013dea..aa6962c247 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -512,6 +512,7 @@ pub const Tree = struct { .ErrorUnion, .IfSimple, .WhileSimple, + .ForSimple, .FnDecl, .PtrTypeAligned, .PtrTypeSentinel, @@ -531,6 +532,7 @@ pub const Tree = struct { .AsmInput, .FnProtoSimple, .FnProtoMulti, + .ErrorValue, => return datas[n].rhs + end_offset, .AnyType, @@ -854,7 +856,7 @@ pub const Tree = struct { assert(extra.else_expr != 0); n = extra.else_expr; }, - .If => { + .If, .For => { const extra = tree.extraData(datas[n].rhs, Node.If); assert(extra.else_expr != 0); n = extra.else_expr; @@ -876,9 +878,6 @@ pub const Tree = struct { .SwitchRange => unreachable, // TODO .ArrayType => unreachable, // TODO .ArrayTypeSentinel => unreachable, // TODO - .ForSimple => unreachable, // TODO - .For => unreachable, // TODO - .ErrorValue => unreachable, // TODO }; } @@ -1457,6 +1456,29 @@ pub const Tree = struct { }); } + pub fn forSimple(tree: Tree, node: Node.Index) full.While { + const data = tree.nodes.items(.data)[node]; + return tree.fullWhile(.{ + .while_token = tree.nodes.items(.main_token)[node], + .cond_expr = data.lhs, + .cont_expr = 0, + .then_expr = data.rhs, + .else_expr = 0, + }); + } + + pub fn forFull(tree: Tree, node: Node.Index) full.While { + const data = tree.nodes.items(.data)[node]; + const extra = tree.extraData(data.rhs, Node.If); + return tree.fullWhile(.{ + .while_token = tree.nodes.items(.main_token)[node], + .cond_expr = data.lhs, + .cont_expr = 0, + .then_expr = extra.then_expr, + .else_expr = extra.else_expr, + }); + } + fn fullVarDecl(tree: Tree, info: full.VarDecl.Ast) full.VarDecl { const token_tags = tree.tokens.items(.tag); var result: full.VarDecl = .{ @@ -2356,6 +2378,7 @@ pub const Node = struct { /// main_token is the `=>` SwitchCaseOne, /// `a, b, c => rhs`. `SubRange[lhs]`. + /// main_token is the `=>` SwitchCase, /// `lhs...rhs`. SwitchRange, diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 7e2da72d00..eb15a29650 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -835,12 +835,11 @@ const Parser = struct { }); } - const token = p.nextToken(); - switch (p.token_tags[token]) { + switch (p.token_tags[p.tok_i]) { .Keyword_nosuspend => { return p.addNode(.{ .tag = .Nosuspend, - .main_token = token, + .main_token = p.nextToken(), .data = .{ .lhs = try p.expectBlockExprStatement(), .rhs = undefined, @@ -848,6 +847,7 @@ const Parser = struct { }); }, .Keyword_suspend => { + const token = p.nextToken(); const block_expr: Node.Index = if (p.eatToken(.Semicolon) != null) 0 else @@ -863,7 +863,7 @@ const Parser = struct { }, .Keyword_defer => return p.addNode(.{ .tag = .Defer, - .main_token = token, + .main_token = p.nextToken(), .data = .{ .lhs = undefined, .rhs = try p.expectBlockExprStatement(), @@ -871,24 +871,20 @@ const Parser = struct { }), .Keyword_errdefer => return p.addNode(.{ .tag = .ErrDefer, - .main_token = token, + .main_token = p.nextToken(), .data = .{ .lhs = try p.parsePayload(), .rhs = try p.expectBlockExprStatement(), }, }), - else => p.tok_i -= 1, + .Keyword_switch => return p.expectSwitchExpr(), + .Keyword_if => return p.expectIfStatement(), + else => {}, } - const if_statement = try p.parseIfStatement(); - if (if_statement != 0) return if_statement; - const labeled_statement = try p.parseLabeledStatement(); if (labeled_statement != 0) return labeled_statement; - const switch_expr = try p.parseSwitchExpr(); - if (switch_expr != 0) return switch_expr; - const assign_expr = try p.parseAssignExpr(); if (assign_expr != 0) { _ = try p.expectTokenRecoverable(.Semicolon); @@ -925,8 +921,8 @@ const Parser = struct { /// IfStatement /// <- IfPrefix BlockExpr ( KEYWORD_else Payload? Statement )? /// / IfPrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement ) - fn parseIfStatement(p: *Parser) !Node.Index { - const if_token = p.eatToken(.Keyword_if) orelse return null_node; + fn expectIfStatement(p: *Parser) !Node.Index { + const if_token = p.assertToken(.Keyword_if); _ = try p.expectToken(.LParen); const condition = try p.expectExpr(); _ = try p.expectToken(.RParen); @@ -2441,7 +2437,7 @@ const Parser = struct { .Builtin => return p.parseBuiltinCall(), .Keyword_fn => return p.parseFnProto(), .Keyword_if => return p.parseIf(parseTypeExpr), - .Keyword_switch => return p.parseSwitchExpr(), + .Keyword_switch => return p.expectSwitchExpr(), .Keyword_extern, .Keyword_packed, @@ -2880,8 +2876,8 @@ const Parser = struct { } /// SwitchExpr <- KEYWORD_switch LPAREN Expr RPAREN LBRACE SwitchProngList RBRACE - fn parseSwitchExpr(p: *Parser) !Node.Index { - const switch_token = p.eatToken(.Keyword_switch) orelse return null_node; + fn expectSwitchExpr(p: *Parser) !Node.Index { + const switch_token = p.assertToken(.Keyword_switch); _ = try p.expectToken(.LParen); const expr_node = try p.expectExpr(); _ = try p.expectToken(.RParen); @@ -3191,8 +3187,7 @@ const Parser = struct { const first_item = try p.parseSwitchItem(); if (first_item == 0) return null_node; - if (p.token_tags[p.tok_i] == .RBrace) { - const arrow_token = try p.expectToken(.EqualAngleBracketRight); + if (p.eatToken(.EqualAngleBracketRight)) |arrow_token| { _ = try p.parsePtrPayload(); return p.addNode(.{ .tag = .SwitchCaseOne, diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 0d5775fd61..31d0a821d8 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -2830,86 +2830,81 @@ test "zig fmt: switch" { // \\ // ); //} -// -//test "zig fmt: for" { -// try testCanonical( -// \\test "for" { -// \\ for (a) |v| { -// \\ continue; -// \\ } -// \\ -// \\ for (a) |v| continue; -// \\ -// \\ for (a) |v| continue else return; -// \\ -// \\ for (a) |v| { -// \\ continue; -// \\ } else return; -// \\ -// \\ for (a) |v| continue else { -// \\ return; -// \\ } -// \\ -// \\ for (a) |v| -// \\ continue -// \\ else -// \\ return; -// \\ -// \\ for (a) |v| -// \\ continue; -// \\ -// \\ for (a) |*v| -// \\ continue; -// \\ -// \\ for (a) |v, i| { -// \\ continue; -// \\ } -// \\ -// \\ for (a) |v, i| -// \\ continue; -// \\ -// \\ for (a) |b| switch (b) { -// \\ c => {}, -// \\ d => {}, -// \\ }; -// \\ -// \\ for (a) |b| -// \\ switch (b) { -// \\ c => {}, -// \\ d => {}, -// \\ }; -// \\ -// \\ const res = for (a) |v, i| { -// \\ break v; -// \\ } else { -// \\ unreachable; -// \\ }; -// \\ -// \\ var num: usize = 0; -// \\ inline for (a) |v, i| { -// \\ num += v; -// \\ num += i; -// \\ } -// \\} -// \\ -// ); -// -// try testTransform( -// \\test "fix for" { -// \\ for (a) |x| -// \\ f(x) else continue; -// \\} -// \\ -// , -// \\test "fix for" { -// \\ for (a) |x| -// \\ f(x) -// \\ else continue; -// \\} -// \\ -// ); -//} -// + +test "zig fmt: for" { + try testCanonical( + \\test "for" { + \\ for (a) |v| { + \\ continue; + \\ } + \\ + \\ for (a) |v| continue; + \\ + \\ for (a) |v| continue else return; + \\ + \\ for (a) |v| { + \\ continue; + \\ } else return; + \\ + \\ for (a) |v| continue else { + \\ return; + \\ } + \\ + \\ for (a) |v| + \\ continue + \\ else + \\ return; + \\ + \\ for (a) |v| + \\ continue; + \\ + \\ for (a) |*v| + \\ continue; + \\ + \\ for (a) |v, i| { + \\ continue; + \\ } + \\ + \\ for (a) |v, i| + \\ continue; + \\ + \\ for (a) |b| switch (b) { + \\ c => {}, + \\ d => {}, + \\ }; + \\ + \\ const res = for (a) |v, i| { + \\ break v; + \\ } else { + \\ unreachable; + \\ }; + \\ + \\ var num: usize = 0; + \\ inline for (a) |v, i| { + \\ num += v; + \\ num += i; + \\ } + \\} + \\ + ); + + try testTransform( + \\test "fix for" { + \\ for (a) |x| + \\ f(x) else continue; + \\} + \\ + , + \\test "fix for" { + \\ for (a) |x| + \\ f(x) + \\ else + \\ continue; + \\} + \\ + ); +} + //test "zig fmt: if" { // try testCanonical( // \\test "if" { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 8ce7d8bb7b..17e0b0b297 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -498,18 +498,17 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac } }, - .Continue => unreachable, // TODO - //.Continue => { - // const flow_expr = base.castTag(.Continue).?; - // if (flow_expr.getLabel()) |label| { - // try renderToken(ais, tree, flow_expr.ltoken, Space.Space); // continue - // const colon = tree.nextToken(flow_expr.ltoken); - // try renderToken(ais, tree, colon, Space.None); // : - // return renderToken(ais, tree, label, space); // label - // } else { - // return renderToken(ais, tree, flow_expr.ltoken, space); // continue - // } - //}, + .Continue => { + const main_token = main_tokens[node]; + const label = datas[node].lhs; + if (label != 0) { + try renderToken(ais, tree, main_token, .Space); // continue + try renderToken(ais, tree, label - 1, .None); // : + return renderToken(ais, tree, label, space); // label + } else { + return renderToken(ais, tree, main_token, space); // continue + } + }, .Return => { if (datas[node].lhs != 0) { @@ -664,17 +663,16 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac if (cases.len == 0) { try renderToken(ais, tree, rparen + 1, .None); // lbrace return renderToken(ais, tree, rparen + 2, space); // rbrace - } else { - try renderToken(ais, tree, rparen + 1, .Newline); // lbrace - ais.pushIndent(); - try renderExpression(ais, tree, cases[0], .Comma); - for (cases[1..]) |case| { - try renderExtraNewline(ais, tree, case); - try renderExpression(ais, tree, case, .Comma); - } - ais.popIndent(); - return renderToken(ais, tree, tree.lastToken(node), space); // rbrace } + try renderToken(ais, tree, rparen + 1, .Newline); // lbrace + ais.pushIndent(); + try renderExpression(ais, tree, cases[0], .Comma); + for (cases[1..]) |case| { + try renderExtraNewline(ais, tree, case); + try renderExpression(ais, tree, case, .Comma); + } + ais.popIndent(); + return renderToken(ais, tree, tree.lastToken(node), space); // rbrace }, .SwitchCaseOne => return renderSwitchCase(ais, tree, tree.switchCaseOne(node), space), @@ -683,59 +681,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .WhileSimple => return renderWhile(ais, tree, tree.whileSimple(node), space), .WhileCont => return renderWhile(ais, tree, tree.whileCont(node), space), .While => return renderWhile(ais, tree, tree.whileFull(node), space), - - .ForSimple => unreachable, // TODO - .For => unreachable, // TODO - //.For => { - // const for_node = @fieldParentPtr(ast.Node.For, "base", base); - - // if (for_node.label) |label| { - // try renderToken(ais, tree, label, Space.None); // label - // try renderToken(ais, tree, tree.nextToken(label), Space.Space); // : - // } - - // if (for_node.inline_token) |inline_token| { - // try renderToken(ais, tree, inline_token, Space.Space); // inline - // } - - // try renderToken(ais, tree, for_node.for_token, Space.Space); // for - // try renderToken(ais, tree, tree.nextToken(for_node.for_token), Space.None); // ( - // try renderExpression(ais, tree, for_node.array_expr, Space.None); - - // const rparen = tree.nextToken(for_node.array_expr.lastToken()); - - // const body_is_block = for_node.body.tag.isBlock(); - // const src_one_line_to_body = !body_is_block and tree.tokensOnSameLine(rparen, for_node.body.firstToken()); - // const body_on_same_line = body_is_block or src_one_line_to_body; - - // try renderToken(ais, tree, rparen, Space.Space); // ) - - // const space_after_payload = if (body_on_same_line) Space.Space else Space.Newline; - // try renderExpression(ais, tree, for_node.payload, space_after_payload); // |x| - - // const space_after_body = blk: { - // if (for_node.@"else") |@"else"| { - // const src_one_line_to_else = tree.tokensOnSameLine(rparen, @"else".firstToken()); - // if (body_is_block or src_one_line_to_else) { - // break :blk Space.Space; - // } else { - // break :blk Space.Newline; - // } - // } else { - // break :blk space; - // } - // }; - - // { - // if (!body_on_same_line) ais.pushIndent(); - // defer if (!body_on_same_line) ais.popIndent(); - // try renderExpression(ais, tree, for_node.body, space_after_body); // { body } - // } - - // if (for_node.@"else") |@"else"| { - // return renderExpression(ais, tree, &@"else".base, space); // else - // } - //}, + .ForSimple => return renderWhile(ais, tree, tree.forSimple(node), space), + .For => return renderWhile(ais, tree, tree.forFull(node), space), .IfSimple => return renderIf(ais, tree, tree.ifSimple(node), space), .If => return renderIf(ais, tree, tree.ifFull(node), space), @@ -1041,7 +988,7 @@ fn renderIf(ais: *Ais, tree: ast.Tree, if_node: ast.full.If, space: Space) Error }, space); } -/// Note that this function is additionally used to render if expressions, with +/// Note that this function is additionally used to render if and for expressions, with /// respective values set to null. fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Space) Error!void { const node_tags = tree.nodes.items(.tag); @@ -1061,21 +1008,31 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa try renderExpression(ais, tree, while_node.ast.cond_expr, .None); // condition if (nodeIsBlock(node_tags[while_node.ast.then_expr])) { - const payload_space: Space = if (while_node.ast.cont_expr != 0) .Space else .BlockStart; if (while_node.payload_token) |payload_token| { try renderToken(ais, tree, payload_token - 2, .Space); // ) try renderToken(ais, tree, payload_token - 1, .None); // | - if (token_tags[payload_token] == .Asterisk) { - try renderToken(ais, tree, payload_token, .None); // * - try renderToken(ais, tree, payload_token + 1, .None); // identifier - try renderToken(ais, tree, payload_token + 2, payload_space); // | - } else { - try renderToken(ais, tree, payload_token, .None); // identifier - try renderToken(ais, tree, payload_token + 1, payload_space); // | - } + const ident = blk: { + if (token_tags[payload_token] == .Asterisk) { + try renderToken(ais, tree, payload_token, .None); // * + break :blk payload_token + 1; + } else { + break :blk payload_token; + } + }; + try renderToken(ais, tree, ident, .None); // identifier + const pipe = blk: { + if (token_tags[ident + 1] == .Comma) { + try renderToken(ais, tree, ident + 1, .Space); // , + try renderToken(ais, tree, ident + 2, .None); // index + break :blk payload_token + 3; + } else { + break :blk ident + 1; + } + }; + try renderToken(ais, tree, pipe, .Space); // | } else { const rparen = tree.lastToken(while_node.ast.cond_expr) + 1; - try renderToken(ais, tree, rparen, payload_space); // ) + try renderToken(ais, tree, rparen, .Space); // ) } if (while_node.ast.cont_expr != 0) { const rparen = tree.lastToken(while_node.ast.cont_expr) + 1; @@ -1083,7 +1040,7 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa try renderToken(ais, tree, lparen - 1, .Space); // : try renderToken(ais, tree, lparen, .None); // lparen try renderExpression(ais, tree, while_node.ast.cont_expr, .None); - try renderToken(ais, tree, rparen, .BlockStart); // rparen + try renderToken(ais, tree, rparen, .Space); // rparen } if (while_node.ast.else_expr != 0) { try renderExpression(ais, tree, while_node.ast.then_expr, Space.Space); @@ -1104,15 +1061,31 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa const src_has_newline = !tree.tokensOnSameLine(rparen, last_then_token); if (src_has_newline) { - const payload_space: Space = if (while_node.ast.cont_expr != 0) .Space else .Newline; if (while_node.payload_token) |payload_token| { try renderToken(ais, tree, payload_token - 2, .Space); // ) try renderToken(ais, tree, payload_token - 1, .None); // | - try renderToken(ais, tree, payload_token, .None); // identifier - try renderToken(ais, tree, payload_token + 1, payload_space); // | + const ident = blk: { + if (token_tags[payload_token] == .Asterisk) { + try renderToken(ais, tree, payload_token, .None); // * + break :blk payload_token + 1; + } else { + break :blk payload_token; + } + }; + try renderToken(ais, tree, ident, .None); // identifier + const pipe = blk: { + if (token_tags[ident + 1] == .Comma) { + try renderToken(ais, tree, ident + 1, .Space); // , + try renderToken(ais, tree, ident + 2, .None); // index + break :blk payload_token + 3; + } else { + break :blk ident + 1; + } + }; + try renderToken(ais, tree, pipe, .Newline); // | } else { ais.pushIndent(); - try renderToken(ais, tree, rparen, payload_space); // ) + try renderToken(ais, tree, rparen, .Newline); // ) ais.popIndent(); } if (while_node.ast.cont_expr != 0) { @@ -1164,14 +1137,25 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa assert(payload_token - 2 == rparen); try renderToken(ais, tree, payload_token - 2, .Space); // ) try renderToken(ais, tree, payload_token - 1, .None); // | - if (token_tags[payload_token] == .Asterisk) { - try renderToken(ais, tree, payload_token, .None); // * - try renderToken(ais, tree, payload_token + 1, .None); // identifier - try renderToken(ais, tree, payload_token + 2, .Space); // | - } else { - try renderToken(ais, tree, payload_token, .None); // identifier - try renderToken(ais, tree, payload_token + 1, .Space); // | - } + const ident = blk: { + if (token_tags[payload_token] == .Asterisk) { + try renderToken(ais, tree, payload_token, .None); // * + break :blk payload_token + 1; + } else { + break :blk payload_token; + } + }; + try renderToken(ais, tree, ident, .None); // identifier + const pipe = blk: { + if (token_tags[ident + 1] == .Comma) { + try renderToken(ais, tree, ident + 1, .Space); // , + try renderToken(ais, tree, ident + 2, .None); // index + break :blk payload_token + 3; + } else { + break :blk ident + 1; + } + }; + try renderToken(ais, tree, pipe, .Space); // | } else { try renderToken(ais, tree, rparen, .Space); // ) } @@ -1952,18 +1936,9 @@ const Space = enum { Semicolon, /// Skips writing the possible line comment after the token. NoComment, - /// Intended when rendering lbrace tokens. Depending on whether the line is - /// "over indented", will output a newline or a single space afterwards. - /// See `std.io.AutoIndentingStream` for the definition of "over indented". - BlockStart, }; fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Space) Error!void { - if (space == Space.BlockStart) { - const new_space: Space = if (ais.isLineOverIndented()) .Newline else .Space; - return renderToken(ais, tree, token_index, new_space); - } - const token_tags = tree.tokens.items(.tag); const token_starts = tree.tokens.items(.start); @@ -2020,7 +1995,6 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp try ais.insertNewline(); } }, - .BlockStart => unreachable, } } @@ -2069,6 +2043,7 @@ fn nodeIsBlock(tag: ast.Node.Tag) bool { .WhileSimple, .WhileCont, .Switch, + .SwitchComma, => true, else => false, }; From ebf04c56e12c8dd7cf503f042e698984704b14b2 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 9 Feb 2021 20:17:47 -0700 Subject: [PATCH 035/173] zig fmt: usingnamespace --- lib/std/zig/parser_test.zig | 18 +++++++++--------- lib/std/zig/render.zig | 23 +++++++++++------------ 2 files changed, 20 insertions(+), 21 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 31d0a821d8..0adcf68568 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -3082,15 +3082,15 @@ test "zig fmt: for" { // \\ // ); //} -// -//test "zig fmt: use" { -// try testCanonical( -// \\usingnamespace @import("std"); -// \\pub usingnamespace @import("std"); -// \\ -// ); -//} -// + +test "zig fmt: usingnamespace" { + try testCanonical( + \\usingnamespace @import("std"); + \\pub usingnamespace @import("std"); + \\ + ); +} + //test "zig fmt: string identifier" { // try testCanonical( // \\const @"a b" = @"c d".@"e f"; diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 17e0b0b297..32ae6f4876 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -131,20 +131,19 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E .FnProto, => { try renderExpression(ais, tree, decl, .None); - try renderToken(ais, tree, tree.lastToken(decl) + 1, space); // semicolon + return renderToken(ais, tree, tree.lastToken(decl) + 1, space); // semicolon }, - .UsingNamespace => unreachable, // TODO - // .Use => { - // const use_decl = @fieldParentPtr(ast.Node.Use, "base", decl); - - // if (use_decl.visib_token) |visib_token| { - // try renderToken(ais, tree, visib_token, .Space); // pub - // } - // try renderToken(ais, tree, use_decl.use_token, .Space); // usingnamespace - // try renderExpression(ais, tree, use_decl.expr, .None); - // try renderToken(ais, tree, use_decl.semicolon_token, space); // ; - // }, + .UsingNamespace => { + const main_token = main_tokens[decl]; + const expr = datas[decl].lhs; + if (main_token > 0 and token_tags[main_token - 1] == .Keyword_pub) { + try renderToken(ais, tree, main_token - 1, .Space); // pub + } + try renderToken(ais, tree, main_token, .Space); // usingnamespace + try renderExpression(ais, tree, expr, .None); + return renderToken(ais, tree, tree.lastToken(expr) + 1, space); // ; + }, .GlobalVarDecl => return renderVarDecl(ais, tree, tree.globalVarDecl(decl)), .LocalVarDecl => return renderVarDecl(ais, tree, tree.localVarDecl(decl)), From 7295d4b8079b5cf3fcdbb81dd9e129f3f0f139aa Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 9 Feb 2021 20:35:43 -0700 Subject: [PATCH 036/173] zig fmt: suspend blocks --- lib/std/zig/ast.zig | 21 +++++++++++++++++++-- lib/std/zig/parser_test.zig | 24 ++++++++++++------------ lib/std/zig/render.zig | 21 ++++++++++----------- 3 files changed, 41 insertions(+), 25 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index aa6962c247..8caafa73c8 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -459,7 +459,6 @@ pub const Tree = struct { .Try, .Await, .OptionalType, - .Suspend, .Resume, .Nosuspend, .Comptime, @@ -625,7 +624,6 @@ pub const Tree = struct { }, .ArrayInitDotTwo, - .BuiltinCallTwo, .BlockTwo, .StructInitDotTwo, .ContainerDeclTwo, @@ -640,6 +638,18 @@ pub const Tree = struct { return main_tokens[n] + end_offset; } }, + .BuiltinCallTwo => { + if (datas[n].rhs != 0) { + end_offset += 1; // for the rparen/rbrace + n = datas[n].rhs; + } else if (datas[n].lhs != 0) { + end_offset += 1; // for the rparen/rbrace + n = datas[n].lhs; + } else { + end_offset += 2; // for the lparen and rparen + return main_tokens[n] + end_offset; + } + }, .ArrayInitDotTwoComma, .BuiltinCallTwoComma, .BlockTwoSemicolon, @@ -861,6 +871,13 @@ pub const Tree = struct { assert(extra.else_expr != 0); n = extra.else_expr; }, + .Suspend => { + if (datas[n].lhs != 0) { + n = datas[n].lhs; + } else { + return main_tokens[n] + end_offset; + } + }, // These are not supported by lastToken() because implementation would // require recursion due to the optional comma followed by rbrace. diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 0adcf68568..7c7ecd2499 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -2124,18 +2124,18 @@ test "zig fmt: error set declaration" { // \\ // ); //} -// -//test "zig fmt: resume from suspend block" { -// try testCanonical( -// \\fn foo() void { -// \\ suspend { -// \\ resume @frame(); -// \\ } -// \\} -// \\ -// ); -//} -// + +test "zig fmt: resume from suspend block" { + try testCanonical( + \\fn foo() void { + \\ suspend { + \\ resume @frame(); + \\ } + \\} + \\ + ); +} + //test "zig fmt: comments before error set decl" { // try testCanonical( // \\const UnexpectedError = error{ diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 32ae6f4876..dd1c6d62f8 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -238,17 +238,16 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderExpression(ais, tree, block, space); }, - .Suspend => unreachable, // TODO - //.Suspend => { - // const suspend_node = @fieldParentPtr(ast.Node.Suspend, "base", base); - - // if (suspend_node.body) |body| { - // try renderToken(ais, tree, suspend_node.suspend_token, Space.Space); - // return renderExpression(ais, tree, body, space); - // } else { - // return renderToken(ais, tree, suspend_node.suspend_token, space); - // } - //}, + .Suspend => { + const suspend_token = main_tokens[node]; + const body = datas[node].lhs; + if (body != 0) { + try renderToken(ais, tree, suspend_token, .Space); + return renderExpression(ais, tree, body, space); + } else { + return renderToken(ais, tree, suspend_token, space); + } + }, .Catch => { const main_token = main_tokens[node]; From 25bcf4eb99d367965b0f699faff5d3fd1d991941 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 9 Feb 2021 21:17:43 -0700 Subject: [PATCH 037/173] zig fmt: grouped expressions --- lib/std/zig/parser_test.zig | 7 +++++++ lib/std/zig/render.zig | 16 +++++----------- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 7c7ecd2499..73c4458352 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -291,6 +291,13 @@ test "zig fmt: break from block" { ); } +test "zig fmt: grouped expressions (parentheses)" { + try testCanonical( + \\const r = (x + y) * (a + b); + \\ + ); +} + //test "zig fmt: c pointer type" { // try testCanonical( // \\pub extern fn repro() [*c]const u8; diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index dd1c6d62f8..b1ff7ff64b 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -517,17 +517,11 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac } }, - .GroupedExpression => unreachable, // TODO - //.GroupedExpression => { - // const grouped_expr = @fieldParentPtr(ast.Node.GroupedExpression, "base", base); - - // try renderToken(ais, tree, grouped_expr.lparen, Space.None); - // { - // ais.pushIndentOneShot(); - // try renderExpression(ais, tree, grouped_expr.expr, Space.None); - // } - // return renderToken(ais, tree, grouped_expr.rparen, space); - //}, + .GroupedExpression => { + try renderToken(ais, tree, main_tokens[node], .None); + try renderExpression(ais, tree, datas[node].lhs, .None); + return renderToken(ais, tree, datas[node].rhs, space); + }, .ContainerDecl, .ContainerDeclComma, From 36eee7bc6cbe6fcc388ebdc38fd3c5f06c9e9043 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 9 Feb 2021 22:25:49 -0700 Subject: [PATCH 038/173] zig fmt: anytype, fn calls with one param, trailing commas and extra newlines between top level declarations --- lib/std/zig/ast.zig | 124 ++++++++++++++++----- lib/std/zig/parse.zig | 210 ++++++++++++++++++++++++++++++------ lib/std/zig/parser_test.zig | 58 +++++----- lib/std/zig/render.zig | 173 +++++++++++++++-------------- 4 files changed, 404 insertions(+), 161 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 8caafa73c8..35b10c6e2c 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -196,6 +196,7 @@ pub const Tree = struct { const datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); const token_tags = tree.tokens.items(.tag); + var end_offset: TokenIndex = 0; var n = node; while (true) switch (tags[n]) { .Root => return 0, @@ -251,7 +252,7 @@ pub const Tree = struct { .ArrayType, .ArrayTypeSentinel, .ErrorValue, - => return main_tokens[n], + => return main_tokens[n] - end_offset, .ArrayInitDot, .ArrayInitDotTwo, @@ -260,7 +261,7 @@ pub const Tree = struct { .StructInitDotTwo, .StructInitDotTwoComma, .EnumLiteral, - => return main_tokens[n] - 1, + => return main_tokens[n] - 1 - end_offset, .Catch, .FieldAccess, @@ -314,24 +315,32 @@ pub const Tree = struct { .StructInitOne, .StructInit, .CallOne, + .CallOneComma, .Call, + .CallComma, .SwitchRange, .FnDecl, .ErrorUnion, => n = datas[n].lhs, + .AsyncCallOne, + .AsyncCallOneComma, + .AsyncCall, + .AsyncCallComma, + => { + end_offset += 1; // async token + n = datas[n].lhs; + }, + .ContainerFieldInit, .ContainerFieldAlign, .ContainerField, => { const name_token = main_tokens[n]; - if (name_token > 0 and - token_tags[name_token - 1] == .Keyword_comptime) - { - return name_token - 1; - } else { - return name_token; + if (name_token > 0 and token_tags[name_token - 1] == .Keyword_comptime) { + end_offset += 1; } + return name_token - end_offset; }, .GlobalVarDecl, @@ -351,10 +360,10 @@ pub const Tree = struct { .StringLiteral, => continue, - else => return i + 1, + else => return i + 1 - end_offset, } } - return i; + return i - end_offset; }, .Block, @@ -365,10 +374,9 @@ pub const Tree = struct { // Look for a label. const lbrace = main_tokens[n]; if (token_tags[lbrace - 1] == .Colon) { - return lbrace - 2; - } else { - return lbrace; + end_offset += 2; } + return lbrace - end_offset; }, .ContainerDecl, @@ -386,9 +394,10 @@ pub const Tree = struct { => { const main_token = main_tokens[n]; switch (token_tags[main_token - 1]) { - .Keyword_packed, .Keyword_extern => return main_token - 1, - else => return main_token, + .Keyword_packed, .Keyword_extern => end_offset += 1, + else => {}, } + return main_token - end_offset; }, .PtrTypeAligned, @@ -404,12 +413,12 @@ pub const Tree = struct { }, .LBrace => main_token, else => unreachable, - }; + } - end_offset; }, .SwitchCaseOne => { if (datas[n].lhs == 0) { - return main_tokens[n] - 1; // else token + return main_tokens[n] - 1 - end_offset; // else token } else { n = datas[n].lhs; } @@ -422,7 +431,7 @@ pub const Tree = struct { .AsmOutput, .AsmInput => { assert(token_tags[main_tokens[n] - 1] == .LBracket); - return main_tokens[n] - 1; + return main_tokens[n] - 1 - end_offset; }, .WhileSimple, @@ -435,7 +444,7 @@ pub const Tree = struct { return switch (token_tags[main_token - 1]) { .Keyword_inline => main_token - 1, else => main_token, - }; + } - end_offset; }, }; } @@ -555,7 +564,7 @@ pub const Tree = struct { return main_tokens[n] + end_offset; }, - .Call => { + .Call, .AsyncCall => { end_offset += 1; // for the rparen const params = tree.extraData(datas[n].rhs, Node.SubRange); if (params.end - params.start == 0) { @@ -563,6 +572,12 @@ pub const Tree = struct { } n = tree.extra_data[params.end - 1]; // last parameter }, + .CallComma, .AsyncCallComma => { + end_offset += 2; // for the comma+rparen + const params = tree.extraData(datas[n].rhs, Node.SubRange); + assert(params.end > params.start); + n = tree.extra_data[params.end - 1]; // last parameter + }, .Switch => { const cases = tree.extraData(datas[n].rhs, Node.SubRange); if (cases.end - cases.start == 0) { @@ -614,6 +629,7 @@ pub const Tree = struct { n = tree.extra_data[datas[n].rhs - 1]; // last member }, .CallOne, + .AsyncCallOne, .ArrayAccess, => { end_offset += 1; // for the rparen/rbracket @@ -622,7 +638,6 @@ pub const Tree = struct { } n = datas[n].rhs; }, - .ArrayInitDotTwo, .BlockTwo, .StructInitDotTwo, @@ -755,9 +770,10 @@ pub const Tree = struct { } }, - .SliceOpen => { - end_offset += 2; // ellipsis2 and rbracket + .SliceOpen, .CallOneComma, .AsyncCallOneComma => { + end_offset += 2; // ellipsis2 + rbracket, or comma + rparen n = datas[n].rhs; + assert(n != 0); }, .Slice => { const extra = tree.extraData(datas[n].rhs, Node.Slice); @@ -1496,6 +1512,27 @@ pub const Tree = struct { }); } + pub fn callOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.Call { + const data = tree.nodes.items(.data)[node]; + buffer.* = .{data.rhs}; + const params = if (data.rhs != 0) buffer[0..1] else buffer[0..0]; + return tree.fullCall(.{ + .lparen = tree.nodes.items(.main_token)[node], + .fn_expr = data.lhs, + .params = params, + }); + } + + pub fn callFull(tree: Tree, node: Node.Index) full.Call { + const data = tree.nodes.items(.data)[node]; + const extra = tree.extraData(data.rhs, Node.SubRange); + return tree.fullCall(.{ + .lparen = tree.nodes.items(.main_token)[node], + .fn_expr = data.lhs, + .params = tree.extra_data[extra.start..extra.end], + }); + } + fn fullVarDecl(tree: Tree, info: full.VarDecl.Ast) full.VarDecl { const token_tags = tree.tokens.items(.tag); var result: full.VarDecl = .{ @@ -1750,6 +1787,19 @@ pub const Tree = struct { } return result; } + + fn fullCall(tree: Tree, info: full.Call.Ast) full.Call { + const token_tags = tree.tokens.items(.tag); + var result: full.Call = .{ + .ast = info, + .async_token = null, + }; + const maybe_async_token = tree.firstToken(info.fn_expr) - 1; + if (token_tags[maybe_async_token] == .Keyword_async) { + result.async_token = maybe_async_token; + } + return result; + } }; /// Fully assembled AST node information. @@ -1942,6 +1992,17 @@ pub const full = struct { rparen: TokenIndex, }; }; + + pub const Call = struct { + ast: Ast, + async_token: ?TokenIndex, + + pub const Ast = struct { + lparen: TokenIndex, + fn_expr: Node.Index, + params: []const Node.Index, + }; + }; }; pub const Error = union(enum) { @@ -2383,9 +2444,24 @@ pub const Node = struct { StructInit, /// `lhs(rhs)`. rhs can be omitted. CallOne, - /// `lhs(a, b, c)`. `sub_range_list[rhs]`. + /// `lhs(rhs,)`. rhs can be omitted. + CallOneComma, + /// `async lhs(rhs)`. rhs can be omitted. + AsyncCallOne, + /// `async lhs(rhs,)`. + AsyncCallOneComma, + /// `lhs(a, b, c)`. `SubRange[rhs]`. /// main_token is the `(`. Call, + /// `lhs(a, b, c,)`. `SubRange[rhs]`. + /// main_token is the `(`. + CallComma, + /// `async lhs(a, b, c)`. `SubRange[rhs]`. + /// main_token is the `(`. + AsyncCall, + /// `async lhs(a, b, c,)`. `SubRange[rhs]`. + /// main_token is the `(`. + AsyncCallComma, /// `switch(lhs) {}`. `SubRange[rhs]`. Switch, /// Same as Switch except there is known to be a trailing comma diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index eb15a29650..da45090ffd 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -2254,8 +2254,6 @@ const Parser = struct { /// / PrimaryTypeExpr (SuffixOp / FnCallArguments)* /// FnCallArguments <- LPAREN ExprList RPAREN /// ExprList <- (Expr COMMA)* Expr? - /// TODO detect when there is 1 or less parameter to the call and emit - /// CallOne instead of Call. fn parseSuffixExpr(p: *Parser) !Node.Index { if (p.eatToken(.Keyword_async)) |async_token| { var res = try p.expectPrimaryTypeExpr(); @@ -2269,20 +2267,95 @@ const Parser = struct { try p.warn(.{ .ExpectedParamList = .{ .token = p.tok_i } }); return res; }; - const params = try ListParseFn(parseExpr)(p); - _ = try p.expectToken(.RParen); + if (p.eatToken(.RParen)) |_| { + return p.addNode(.{ + .tag = .AsyncCallOne, + .main_token = lparen, + .data = .{ + .lhs = res, + .rhs = 0, + }, + }); + } + const param_one = try p.expectExpr(); + const comma_one = p.eatToken(.Comma); + if (p.eatToken(.RParen)) |_| { + return p.addNode(.{ + .tag = if (comma_one == null) .AsyncCallOne else .AsyncCallOneComma, + .main_token = lparen, + .data = .{ + .lhs = res, + .rhs = param_one, + }, + }); + } + if (comma_one == null) { + try p.warn(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + }); + } - return p.addNode(.{ - .tag = .Call, - .main_token = lparen, - .data = .{ - .lhs = res, - .rhs = try p.addExtra(Node.SubRange{ - .start = params.start, - .end = params.end, - }), - }, - }); + var param_list = std.ArrayList(Node.Index).init(p.gpa); + defer param_list.deinit(); + + try param_list.append(param_one); + + while (true) { + const next = try p.expectExpr(); + try param_list.append(next); + switch (p.token_tags[p.nextToken()]) { + .Comma => { + if (p.eatToken(.RParen)) |_| { + const span = try p.listToSpan(param_list.items); + return p.addNode(.{ + .tag = .AsyncCallComma, + .main_token = lparen, + .data = .{ + .lhs = res, + .rhs = try p.addExtra(Node.SubRange{ + .start = span.start, + .end = span.end, + }), + }, + }); + } else { + continue; + } + }, + .RParen => { + const span = try p.listToSpan(param_list.items); + return p.addNode(.{ + .tag = .AsyncCall, + .main_token = lparen, + .data = .{ + .lhs = res, + .rhs = try p.addExtra(Node.SubRange{ + .start = span.start, + .end = span.end, + }), + }, + }); + }, + .Colon, .RBrace, .RBracket => { + p.tok_i -= 1; + return p.fail(.{ + .ExpectedToken = .{ + .token = p.tok_i, + .expected_id = .RParen, + }, + }); + }, + else => { + p.tok_i -= 1; + try p.warn(.{ + .ExpectedToken = .{ + .token = p.tok_i, + .expected_id = .Comma, + }, + }); + }, + } + } } var res = try p.parsePrimaryTypeExpr(); if (res == 0) return res; @@ -2293,21 +2366,98 @@ const Parser = struct { res = suffix_op; continue; } - const lparen = p.eatToken(.LParen) orelse return res; - const params = try ListParseFn(parseExpr)(p); - _ = try p.expectToken(.RParen); + res = res: { + const lparen = p.eatToken(.LParen) orelse return res; + if (p.eatToken(.RParen)) |_| { + break :res try p.addNode(.{ + .tag = .CallOne, + .main_token = lparen, + .data = .{ + .lhs = res, + .rhs = 0, + }, + }); + } + const param_one = try p.expectExpr(); + const comma_one = p.eatToken(.Comma); + if (p.eatToken(.RParen)) |_| { + break :res try p.addNode(.{ + .tag = if (comma_one == null) .CallOne else .CallOneComma, + .main_token = lparen, + .data = .{ + .lhs = res, + .rhs = param_one, + }, + }); + } + if (comma_one == null) { + try p.warn(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + }); + } - res = try p.addNode(.{ - .tag = .Call, - .main_token = lparen, - .data = .{ - .lhs = res, - .rhs = try p.addExtra(Node.SubRange{ - .start = params.start, - .end = params.end, - }), - }, - }); + var param_list = std.ArrayList(Node.Index).init(p.gpa); + defer param_list.deinit(); + + try param_list.append(param_one); + + while (true) { + const next = try p.expectExpr(); + try param_list.append(next); + switch (p.token_tags[p.nextToken()]) { + .Comma => { + if (p.eatToken(.RParen)) |_| { + const span = try p.listToSpan(param_list.items); + break :res try p.addNode(.{ + .tag = .CallComma, + .main_token = lparen, + .data = .{ + .lhs = res, + .rhs = try p.addExtra(Node.SubRange{ + .start = span.start, + .end = span.end, + }), + }, + }); + } else { + continue; + } + }, + .RParen => { + const span = try p.listToSpan(param_list.items); + break :res try p.addNode(.{ + .tag = .Call, + .main_token = lparen, + .data = .{ + .lhs = res, + .rhs = try p.addExtra(Node.SubRange{ + .start = span.start, + .end = span.end, + }), + }, + }); + }, + .Colon, .RBrace, .RBracket => { + p.tok_i -= 1; + return p.fail(.{ + .ExpectedToken = .{ + .token = p.tok_i, + .expected_id = .RParen, + }, + }); + }, + else => { + p.tok_i -= 1; + try p.warn(.{ + .ExpectedToken = .{ + .token = p.tok_i, + .expected_id = .Comma, + }, + }); + }, + } + } + }; } } @@ -2588,7 +2738,7 @@ const Parser = struct { while (true) { const next = try p.expectFieldInit(); - if (next == 0) break; + assert(next != 0); try init_list.append(next); switch (p.token_tags[p.nextToken()]) { .Comma => { diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 73c4458352..6b9f39a728 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -3046,35 +3046,35 @@ test "zig fmt: for" { // \\ // ); //} -// -//test "zig fmt: async functions" { -// try testCanonical( -// \\fn simpleAsyncFn() void { -// \\ const a = async a.b(); -// \\ x += 1; -// \\ suspend; -// \\ x += 1; -// \\ suspend; -// \\ const p: anyframe->void = async simpleAsyncFn() catch unreachable; -// \\ await p; -// \\} -// \\ -// \\test "suspend, resume, await" { -// \\ const p: anyframe = async testAsyncSeq(); -// \\ resume p; -// \\ await p; -// \\} -// \\ -// ); -//} -// -//test "zig fmt: nosuspend" { -// try testCanonical( -// \\const a = nosuspend foo(); -// \\ -// ); -//} -// + +test "zig fmt: async functions" { + try testCanonical( + \\fn simpleAsyncFn() void { + \\ const a = async a.b(); + \\ x += 1; + \\ suspend; + \\ x += 1; + \\ suspend; + \\ const p: anyframe->void = async simpleAsyncFn() catch unreachable; + \\ await p; + \\} + \\ + \\test "suspend, resume, await" { + \\ const p: anyframe = async testAsyncSeq(); + \\ resume p; + \\ await p; + \\} + \\ + ); +} + +test "zig fmt: nosuspend" { + try testCanonical( + \\const a = nosuspend foo(); + \\ + ); +} + //test "zig fmt: Block after if" { // try testCanonical( // \\test "Block after if" { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index b1ff7ff64b..0f922ceb0a 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -73,8 +73,18 @@ fn renderRoot(ais: *Ais, tree: ast.Tree) Error!void { const nodes_data = tree.nodes.items(.data); const root_decls = tree.extra_data[nodes_data[0].lhs..nodes_data[0].rhs]; - for (root_decls) |decl| { - try renderMember(ais, tree, decl, .Newline); + return renderAllMembers(ais, tree, root_decls); +} + +fn renderAllMembers(ais: *Ais, tree: ast.Tree, members: []const ast.Node.Index) Error!void { + if (members.len == 0) return; + + const first_member = members[0]; + try renderMember(ais, tree, first_member, .Newline); + + for (members[1..]) |member| { + try renderExtraNewline(ais, tree, member); + try renderMember(ais, tree, member, .Newline); } } @@ -391,65 +401,17 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .StructInitDot => return renderStructInit(ais, tree, tree.structInitDot(node), space), .StructInit => return renderStructInit(ais, tree, tree.structInit(node), space), - .CallOne => unreachable, // TODO - .Call => { - const call = datas[node]; - const params_range = tree.extraData(call.rhs, ast.Node.SubRange); - const params = tree.extra_data[params_range.start..params_range.end]; - const async_token = tree.firstToken(call.lhs) - 1; - if (token_tags[async_token] == .Keyword_async) { - try renderToken(ais, tree, async_token, .Space); - } - try renderExpression(ais, tree, call.lhs, .None); - - const lparen = main_tokens[node]; - - if (params.len == 0) { - try renderToken(ais, tree, lparen, .None); - return renderToken(ais, tree, lparen + 1, space); // ) - } - - const last_param = params[params.len - 1]; - const after_last_param_tok = tree.lastToken(last_param) + 1; - if (token_tags[after_last_param_tok] == .Comma) { - ais.pushIndent(); - try renderToken(ais, tree, lparen, Space.Newline); // ( - for (params) |param_node, i| { - if (i + 1 < params.len) { - try renderExpression(ais, tree, param_node, Space.None); - - // Unindent the comma for multiline string literals - const is_multiline_string = node_tags[param_node] == .StringLiteral and - token_tags[main_tokens[param_node]] == .MultilineStringLiteralLine; - if (is_multiline_string) ais.popIndent(); - - const comma = tree.lastToken(param_node) + 1; - try renderToken(ais, tree, comma, Space.Newline); // , - - if (is_multiline_string) ais.pushIndent(); - - try renderExtraNewline(ais, tree, params[i + 1]); - } else { - try renderExpression(ais, tree, param_node, Space.Comma); - } - } - ais.popIndent(); - return renderToken(ais, tree, after_last_param_tok + 1, space); // ) - } - - try renderToken(ais, tree, lparen, Space.None); // ( - - for (params) |param_node, i| { - try renderExpression(ais, tree, param_node, Space.None); - - if (i + 1 < params.len) { - const comma = tree.lastToken(param_node) + 1; - try renderToken(ais, tree, comma, Space.Space); - } - } - return renderToken(ais, tree, after_last_param_tok, space); // ) + .CallOne, .CallOneComma, .AsyncCallOne, .AsyncCallOneComma => { + var params: [1]ast.Node.Index = undefined; + return renderCall(ais, tree, tree.callOne(¶ms, node), space); }, + .Call, + .CallComma, + .AsyncCall, + .AsyncCallComma, + => return renderCall(ais, tree, tree.callFull(node), space), + .ArrayAccess => { const suffix = datas[node]; const lbracket = tree.firstToken(suffix.rhs) - 1; @@ -625,18 +587,16 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac }, .FnProto => return renderFnProto(ais, tree, tree.fnProto(node), space), - .AnyFrameType => unreachable, // TODO - //.AnyFrameType => { - // const anyframe_type = @fieldParentPtr(ast.Node.AnyFrameType, "base", base); - - // if (anyframe_type.result) |result| { - // try renderToken(ais, tree, anyframe_type.anyframe_token, Space.None); // anyframe - // try renderToken(ais, tree, result.arrow_token, Space.None); // -> - // return renderExpression(ais, tree, result.return_type, space); - // } else { - // return renderToken(ais, tree, anyframe_type.anyframe_token, space); // anyframe - // } - //}, + .AnyFrameType => { + const main_token = main_tokens[node]; + if (datas[node].rhs != 0) { + try renderToken(ais, tree, main_token, .None); // anyframe + try renderToken(ais, tree, main_token + 1, .None); // -> + return renderExpression(ais, tree, datas[node].rhs, space); + } else { + return renderToken(ais, tree, main_token, space); // anyframe + } + }, .Switch, .SwitchComma, @@ -1730,13 +1690,7 @@ fn renderContainerDecl( // One member per line. ais.pushIndent(); try renderToken(ais, tree, lbrace, .Newline); // lbrace - for (container_decl.ast.members) |member, i| { - try renderMember(ais, tree, member, .Newline); - - if (i + 1 < container_decl.ast.members.len) { - try renderExtraNewline(ais, tree, container_decl.ast.members[i + 1]); - } - } + try renderAllMembers(ais, tree, container_decl.ast.members); ais.popIndent(); return renderToken(ais, tree, rbrace, space); // rbrace @@ -1871,6 +1825,69 @@ fn renderAsm( } else unreachable; // TODO shouldn't need this on while(true) } +fn renderCall( + ais: *Ais, + tree: ast.Tree, + call: ast.full.Call, + space: Space, +) Error!void { + const token_tags = tree.tokens.items(.tag); + const node_tags = tree.nodes.items(.tag); + const main_tokens = tree.nodes.items(.main_token); + + if (call.async_token) |async_token| { + try renderToken(ais, tree, async_token, .Space); + } + try renderExpression(ais, tree, call.ast.fn_expr, .None); + + const lparen = call.ast.lparen; + const params = call.ast.params; + if (params.len == 0) { + try renderToken(ais, tree, lparen, .None); + return renderToken(ais, tree, lparen + 1, space); // ) + } + + const last_param = params[params.len - 1]; + const after_last_param_tok = tree.lastToken(last_param) + 1; + if (token_tags[after_last_param_tok] == .Comma) { + ais.pushIndent(); + try renderToken(ais, tree, lparen, Space.Newline); // ( + for (params) |param_node, i| { + if (i + 1 < params.len) { + try renderExpression(ais, tree, param_node, Space.None); + + // Unindent the comma for multiline string literals + const is_multiline_string = node_tags[param_node] == .StringLiteral and + token_tags[main_tokens[param_node]] == .MultilineStringLiteralLine; + if (is_multiline_string) ais.popIndent(); + + const comma = tree.lastToken(param_node) + 1; + try renderToken(ais, tree, comma, Space.Newline); // , + + if (is_multiline_string) ais.pushIndent(); + + try renderExtraNewline(ais, tree, params[i + 1]); + } else { + try renderExpression(ais, tree, param_node, Space.Comma); + } + } + ais.popIndent(); + return renderToken(ais, tree, after_last_param_tok + 1, space); // ) + } + + try renderToken(ais, tree, lparen, Space.None); // ( + + for (params) |param_node, i| { + try renderExpression(ais, tree, param_node, Space.None); + + if (i + 1 < params.len) { + const comma = tree.lastToken(param_node) + 1; + try renderToken(ais, tree, comma, Space.Space); + } + } + return renderToken(ais, tree, after_last_param_tok, space); // ) +} + /// Render an expression, and the comma that follows it, if it is present in the source. fn renderExpressionComma(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); From 9d87e6aeb8dd9c46b10483c31a392d7dac8a7dc4 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 9 Feb 2021 22:28:55 -0700 Subject: [PATCH 039/173] zig fmt: remove dead code likely these will be resurrected to make array literal cases pass. --- lib/std/zig/render.zig | 35 ----------------------------------- 1 file changed, 35 deletions(-) diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 0f922ceb0a..0e9032560c 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -2106,38 +2106,3 @@ fn nodeCausesSliceOpSpace(tag: ast.Node.Tag) bool { else => false, }; } - -fn copyFixingWhitespace(ais: *Ais, slice: []const u8) @TypeOf(ais.*).Error!void { - const writer = ais.writer(); - for (slice) |byte| switch (byte) { - '\t' => try writer.writeAll(" "), - '\r' => {}, - else => try writer.writeByte(byte), - }; -} - -// Returns the number of nodes in `expr` that are on the same line as `rtoken`, -// or null if they all are on the same line. -fn rowSize(tree: ast.Tree, exprs: []ast.Node.Index, rtoken: ast.TokenIndex) ?usize { - const first_token = exprs[0].firstToken(); - const first_loc = tree.tokenLocation(tree.token_locs[first_token].start, rtoken); - if (first_loc.line == 0) { - const maybe_comma = tree.prevToken(rtoken); - if (tree.token_tags[maybe_comma] == .Comma) - return 1; - return null; // no newlines - } - - var count: usize = 1; - for (exprs) |expr, i| { - if (i + 1 < exprs.len) { - const expr_last_token = expr.lastToken() + 1; - const loc = tree.tokenLocation(tree.token_locs[expr_last_token].start, exprs[i + 1].firstToken()); - if (loc.line != 0) return count; - count += 1; - } else { - return count; - } - } - unreachable; -} From fa5fcdd7343b8e759971ea14adead830b1f7c616 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 9 Feb 2021 22:42:00 -0700 Subject: [PATCH 040/173] zig fmt: fix regression with many container members --- lib/std/zig/ast.zig | 2 +- lib/std/zig/parse.zig | 5 +---- lib/std/zig/parser_test.zig | 34 +++++++++++++++++----------------- 3 files changed, 19 insertions(+), 22 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 35b10c6e2c..ea01a63096 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -1409,7 +1409,7 @@ pub const Tree = struct { return tree.fullContainerDecl(.{ .main_token = main_token, .enum_token = main_token + 2, // union lparen enum - .members = tree.extra_data[data.lhs..data.rhs], + .members = tree.extra_data[members_range.start..members_range.end], .arg = data.lhs, }); } diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index da45090ffd..9bc3385105 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -3609,10 +3609,7 @@ const Parser = struct { .main_token = main_token, .data = .{ .lhs = enum_tag_expr, - .rhs = try p.addExtra(Node.SubRange{ - .start = members_span.start, - .end = members_span.end, - }), + .rhs = try p.addExtra(members_span), }, }); } else { diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 6b9f39a728..bbca2741f2 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -734,23 +734,23 @@ test "zig fmt: while else err prong with no block" { ); } -//test "zig fmt: tagged union with enum values" { -// try testCanonical( -// \\const MultipleChoice2 = union(enum(u32)) { -// \\ Unspecified1: i32, -// \\ A: f32 = 20, -// \\ Unspecified2: void, -// \\ B: bool = 40, -// \\ Unspecified3: i32, -// \\ C: i8 = 60, -// \\ Unspecified4: void, -// \\ D: void = 1000, -// \\ Unspecified5: i32, -// \\}; -// \\ -// ); -//} -// +test "zig fmt: tagged union with enum values" { + try testCanonical( + \\const MultipleChoice2 = union(enum(u32)) { + \\ Unspecified1: i32, + \\ A: f32 = 20, + \\ Unspecified2: void, + \\ B: bool = 40, + \\ Unspecified3: i32, + \\ C: i8 = 60, + \\ Unspecified4: void, + \\ D: void = 1000, + \\ Unspecified5: i32, + \\}; + \\ + ); +} + //test "zig fmt: allowzero pointer" { // try testCanonical( // \\const T = [*]allowzero const u8; From 58db3d27753709074fba7c88923754dd1c1a0ed9 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 9 Feb 2021 23:07:29 -0700 Subject: [PATCH 041/173] zig fmt: re-enable now-passing test cases --- lib/std/zig/ast.zig | 14 +- lib/std/zig/parser_test.zig | 1833 +++++++++++++++++------------------ lib/std/zig/render.zig | 6 +- 3 files changed, 920 insertions(+), 933 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index ea01a63096..1b78db9f3e 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -899,12 +899,12 @@ pub const Tree = struct { // require recursion due to the optional comma followed by rbrace. // TODO follow the pattern set by StructInitDotTwoComma which will allow // lastToken to work for all of these. - .ArrayInit => unreachable, - .ArrayInitOne => unreachable, - .ArrayInitDot => unreachable, - .StructInit => unreachable, - .StructInitOne => unreachable, - .StructInitDot => unreachable, + .ArrayInit => unreachable, // TODO + .ArrayInitOne => unreachable, // TODO + .ArrayInitDot => unreachable, // TODO + .StructInit => unreachable, // TODO + .StructInitOne => unreachable, // TODO + .StructInitDot => unreachable, // TODO .TaggedUnionEnumTag => unreachable, // TODO .TaggedUnionEnumTagComma => unreachable, // TODO @@ -2065,7 +2065,7 @@ pub const Error = union(enum) { pub const ExpectedVarDeclOrFn = SingleTokenError("Expected variable declaration or function, found '{s}'"); pub const ExpectedVarDecl = SingleTokenError("Expected variable declaration, found '{s}'"); pub const ExpectedFn = SingleTokenError("Expected function, found '{s}'"); - pub const ExpectedReturnType = SingleTokenError("Expected 'var' or return type expression, found '{s}'"); + pub const ExpectedReturnType = SingleTokenError("Expected return type expression, found '{s}'"); pub const ExpectedAggregateKw = SingleTokenError("Expected '" ++ Token.Tag.Keyword_struct.symbol() ++ "', '" ++ Token.Tag.Keyword_union.symbol() ++ "', '" ++ Token.Tag.Keyword_enum.symbol() ++ "', or '" ++ Token.Tag.Keyword_opaque.symbol() ++ "', found '{s}'"); pub const ExpectedEqOrSemi = SingleTokenError("Expected '=' or ';', found '{s}'"); pub const ExpectedSemiOrLBrace = SingleTokenError("Expected ';' or '{{', found '{s}'"); diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index bbca2741f2..cc27b49f41 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -757,14 +757,14 @@ test "zig fmt: tagged union with enum values" { // \\ // ); //} -// -//test "zig fmt: enum literal" { -// try testCanonical( -// \\const x = .hi; -// \\ -// ); -//} -// + +test "zig fmt: enum literal" { + try testCanonical( + \\const x = .hi; + \\ + ); +} + //test "zig fmt: enum literal inside array literal" { // try testCanonical( // \\test "enums in arrays" { @@ -779,14 +779,14 @@ test "zig fmt: tagged union with enum values" { // \\ // ); //} -// -//test "zig fmt: character literal larger than u8" { -// try testCanonical( -// \\const x = '\u{01f4a9}'; -// \\ -// ); -//} -// + +test "zig fmt: character literal larger than u8" { + try testCanonical( + \\const x = '\u{01f4a9}'; + \\ + ); +} + //test "zig fmt: infix operator and then multiline string literal" { // try testCanonical( // \\const x = "" ++ @@ -813,22 +813,22 @@ test "zig fmt: tagged union with enum values" { // \\ // ); //} -// -//test "zig fmt: threadlocal" { -// try testCanonical( -// \\threadlocal var x: i32 = 1234; -// \\ -// ); -//} -// -//test "zig fmt: linksection" { -// try testCanonical( -// \\export var aoeu: u64 linksection(".text.derp") = 1234; -// \\export fn _start() linksection(".text.boot") callconv(.Naked) noreturn {} -// \\ -// ); -//} -// + +test "zig fmt: threadlocal" { + try testCanonical( + \\threadlocal var x: i32 = 1234; + \\ + ); +} + +test "zig fmt: linksection" { + try testCanonical( + \\export var aoeu: u64 linksection(".text.derp") = 1234; + \\export fn _start() linksection(".text.boot") callconv(.Naked) noreturn {} + \\ + ); +} + //test "zig fmt: correctly move doc comments on struct fields" { // try testTransform( // \\pub const section_64 = extern struct { @@ -895,22 +895,22 @@ test "zig fmt: tagged union with enum values" { // \\ // ); //} -// -//test "zig fmt: aligned struct field" { -// try testCanonical( -// \\pub const S = struct { -// \\ f: i32 align(32), -// \\}; -// \\ -// ); -// try testCanonical( -// \\pub const S = struct { -// \\ f: i32 align(32) = 1, -// \\}; -// \\ -// ); -//} -// + +test "zig fmt: aligned struct field" { + try testCanonical( + \\pub const S = struct { + \\ f: i32 align(32), + \\}; + \\ + ); + try testCanonical( + \\pub const S = struct { + \\ f: i32 align(32) = 1, + \\}; + \\ + ); +} + //test "zig fmt: comment to disable/enable zig fmt first" { // try testCanonical( // \\// Test trailing comma syntax @@ -1040,32 +1040,32 @@ test "zig fmt: tagged union with enum values" { // \\ // ); //} -// -//test "zig fmt: spaces around slice operator" { -// try testCanonical( -// \\var a = b[c..d]; -// \\var a = b[c..d :0]; -// \\var a = b[c + 1 .. d]; -// \\var a = b[c + 1 ..]; -// \\var a = b[c .. d + 1]; -// \\var a = b[c .. d + 1 :0]; -// \\var a = b[c.a..d.e]; -// \\var a = b[c.a..d.e :0]; -// \\ -// ); -//} -// -//test "zig fmt: async call in if condition" { -// try testCanonical( -// \\comptime { -// \\ if (async b()) { -// \\ a(); -// \\ } -// \\} -// \\ -// ); -//} -// + +test "zig fmt: spaces around slice operator" { + try testCanonical( + \\var a = b[c..d]; + \\var a = b[c..d :0]; + \\var a = b[c + 1 .. d]; + \\var a = b[c + 1 ..]; + \\var a = b[c .. d + 1]; + \\var a = b[c .. d + 1 :0]; + \\var a = b[c.a..d.e]; + \\var a = b[c.a..d.e :0]; + \\ + ); +} + +test "zig fmt: async call in if condition" { + try testCanonical( + \\comptime { + \\ if (async b()) { + \\ a(); + \\ } + \\} + \\ + ); +} + //test "zig fmt: 2nd arg multiline string" { // try testCanonical( // \\comptime { @@ -1300,64 +1300,64 @@ test "zig fmt: tagged union with enum values" { // \\ // ); //} -// -//test "zig fmt: respect line breaks in if-else" { -// try testCanonical( -// \\comptime { -// \\ return if (cond) a else b; -// \\ return if (cond) -// \\ a -// \\ else -// \\ b; -// \\ return if (cond) -// \\ a -// \\ else if (cond) -// \\ b -// \\ else -// \\ c; -// \\} -// \\ -// ); -//} -// -//test "zig fmt: respect line breaks after infix operators" { -// try testCanonical( -// \\comptime { -// \\ self.crc = -// \\ lookup_tables[0][p[7]] ^ -// \\ lookup_tables[1][p[6]] ^ -// \\ lookup_tables[2][p[5]] ^ -// \\ lookup_tables[3][p[4]] ^ -// \\ lookup_tables[4][@truncate(u8, self.crc >> 24)] ^ -// \\ lookup_tables[5][@truncate(u8, self.crc >> 16)] ^ -// \\ lookup_tables[6][@truncate(u8, self.crc >> 8)] ^ -// \\ lookup_tables[7][@truncate(u8, self.crc >> 0)]; -// \\} -// \\ -// ); -//} -// -//test "zig fmt: fn decl with trailing comma" { -// try testTransform( -// \\fn foo(a: i32, b: i32,) void {} -// , -// \\fn foo( -// \\ a: i32, -// \\ b: i32, -// \\) void {} -// \\ -// ); -//} -// -//test "zig fmt: enum decl with no trailing comma" { -// try testTransform( -// \\const StrLitKind = enum {Normal, C}; -// , -// \\const StrLitKind = enum { Normal, C }; -// \\ -// ); -//} -// + +test "zig fmt: respect line breaks in if-else" { + try testCanonical( + \\comptime { + \\ return if (cond) a else b; + \\ return if (cond) + \\ a + \\ else + \\ b; + \\ return if (cond) + \\ a + \\ else if (cond) + \\ b + \\ else + \\ c; + \\} + \\ + ); +} + +test "zig fmt: respect line breaks after infix operators" { + try testCanonical( + \\comptime { + \\ self.crc = + \\ lookup_tables[0][p[7]] ^ + \\ lookup_tables[1][p[6]] ^ + \\ lookup_tables[2][p[5]] ^ + \\ lookup_tables[3][p[4]] ^ + \\ lookup_tables[4][@truncate(u8, self.crc >> 24)] ^ + \\ lookup_tables[5][@truncate(u8, self.crc >> 16)] ^ + \\ lookup_tables[6][@truncate(u8, self.crc >> 8)] ^ + \\ lookup_tables[7][@truncate(u8, self.crc >> 0)]; + \\} + \\ + ); +} + +test "zig fmt: fn decl with trailing comma" { + try testTransform( + \\fn foo(a: i32, b: i32,) void {} + , + \\fn foo( + \\ a: i32, + \\ b: i32, + \\) void {} + \\ + ); +} + +test "zig fmt: enum decl with no trailing comma" { + try testTransform( + \\const StrLitKind = enum {Normal, C}; + , + \\const StrLitKind = enum { Normal, C }; + \\ + ); +} + //test "zig fmt: switch comment before prong" { // try testCanonical( // \\comptime { @@ -1369,22 +1369,25 @@ test "zig fmt: tagged union with enum values" { // \\ // ); //} -// -//test "zig fmt: struct literal no trailing comma" { -// try testTransform( -// \\const a = foo{ .x = 1, .y = 2 }; -// \\const a = foo{ .x = 1, -// \\ .y = 2 }; -// , -// \\const a = foo{ .x = 1, .y = 2 }; -// \\const a = foo{ -// \\ .x = 1, -// \\ .y = 2, -// \\}; -// \\ -// ); -//} -// + +test "zig fmt: struct literal no trailing comma" { + try testTransform( + \\const a = foo{ .x = 1, .y = 2 }; + \\const a = foo{ .x = 1, + \\ .y = 2 }; + \\const a = foo{ .x = 1, + \\ .y = 2, }; + , + \\const a = foo{ .x = 1, .y = 2 }; + \\const a = foo{ .x = 1, .y = 2 }; + \\const a = foo{ + \\ .x = 1, + \\ .y = 2, + \\}; + \\ + ); +} + //test "zig fmt: struct literal containing a multiline expression" { // try testTransform( // \\const a = A{ .x = if (f1()) 10 else 20 }; @@ -1564,39 +1567,39 @@ test "zig fmt: tagged union with enum values" { // \\ // ); //} -// -//test "zig fmt: trailing comma on fn call" { -// try testCanonical( -// \\comptime { -// \\ var module = try Module.create( -// \\ allocator, -// \\ zig_lib_dir, -// \\ full_cache_dir, -// \\ ); -// \\} -// \\ -// ); -//} -// -//test "zig fmt: multi line arguments without last comma" { -// try testTransform( -// \\pub fn foo( -// \\ a: usize, -// \\ b: usize, -// \\ c: usize, -// \\ d: usize -// \\) usize { -// \\ return a + b + c + d; -// \\} -// \\ -// , -// \\pub fn foo(a: usize, b: usize, c: usize, d: usize) usize { -// \\ return a + b + c + d; -// \\} -// \\ -// ); -//} -// + +test "zig fmt: trailing comma on fn call" { + try testCanonical( + \\comptime { + \\ var module = try Module.create( + \\ allocator, + \\ zig_lib_dir, + \\ full_cache_dir, + \\ ); + \\} + \\ + ); +} + +test "zig fmt: multi line arguments without last comma" { + try testTransform( + \\pub fn foo( + \\ a: usize, + \\ b: usize, + \\ c: usize, + \\ d: usize + \\) usize { + \\ return a + b + c + d; + \\} + \\ + , + \\pub fn foo(a: usize, b: usize, c: usize, d: usize) usize { + \\ return a + b + c + d; + \\} + \\ + ); +} + //test "zig fmt: empty block with only comment" { // try testCanonical( // \\comptime { @@ -1616,19 +1619,19 @@ test "zig fmt: tagged union with enum values" { // \\ // ); //} -// -//test "zig fmt: extra newlines at the end" { -// try testTransform( -// \\const a = b; -// \\ -// \\ -// \\ -// , -// \\const a = b; -// \\ -// ); -//} -// + +test "zig fmt: extra newlines at the end" { + try testTransform( + \\const a = b; + \\ + \\ + \\ + , + \\const a = b; + \\ + ); +} + //test "zig fmt: simple asm" { // try testTransform( // \\comptime { @@ -1704,15 +1707,15 @@ test "zig fmt: switch cases trailing comma" { ); } -//test "zig fmt: slice align" { -// try testCanonical( -// \\const A = struct { -// \\ items: []align(A) T, -// \\}; -// \\ -// ); -//} -// +test "zig fmt: slice align" { + try testCanonical( + \\const A = struct { + \\ items: []align(A) T, + \\}; + \\ + ); +} + //test "zig fmt: add trailing comma to array literal" { // try testTransform( // \\comptime { @@ -1756,29 +1759,29 @@ test "zig fmt: switch cases trailing comma" { // \\ // ); //} -// -//test "zig fmt: float literal with exponent" { -// try testCanonical( -// \\test "bit field alignment" { -// \\ assert(@TypeOf(&blah.b) == *align(1:3:6) const u3); -// \\} -// \\ -// ); -//} -// -//test "zig fmt: float literal with exponent" { -// try testCanonical( -// \\test "aoeu" { -// \\ switch (state) { -// \\ TermState.Start => switch (c) { -// \\ '\x1b' => state = TermState.Escape, -// \\ else => try out.writeByte(c), -// \\ }, -// \\ } -// \\} -// \\ -// ); -//} + +test "zig fmt: bit field alignment" { + try testCanonical( + \\test { + \\ assert(@TypeOf(&blah.b) == *align(1:3:6) const u3); + \\} + \\ + ); +} + +test "zig fmt: nested switch" { + try testCanonical( + \\test { + \\ switch (state) { + \\ TermState.Start => switch (c) { + \\ '\x1b' => state = TermState.Escape, + \\ else => try out.writeByte(c), + \\ }, + \\ } + \\} + \\ + ); +} test "zig fmt: float literal with exponent" { try testCanonical( @@ -1788,34 +1791,34 @@ test "zig fmt: float literal with exponent" { ); } -//test "zig fmt: if-else end of comptime" { -// try testCanonical( -// \\comptime { -// \\ if (a) { -// \\ b(); -// \\ } else { -// \\ b(); -// \\ } -// \\} -// \\ -// ); -//} -// -//test "zig fmt: nested blocks" { -// try testCanonical( -// \\comptime { -// \\ { -// \\ { -// \\ { -// \\ a(); -// \\ } -// \\ } -// \\ } -// \\} -// \\ -// ); -//} -// +test "zig fmt: if-else end of comptime" { + try testCanonical( + \\comptime { + \\ if (a) { + \\ b(); + \\ } else { + \\ b(); + \\ } + \\} + \\ + ); +} + +test "zig fmt: nested blocks" { + try testCanonical( + \\comptime { + \\ { + \\ { + \\ { + \\ a(); + \\ } + \\ } + \\ } + \\} + \\ + ); +} + //test "zig fmt: block with same line comment after end brace" { // try testCanonical( // \\comptime { @@ -1837,17 +1840,17 @@ test "zig fmt: float literal with exponent" { // \\ // ); //} -// -//test "zig fmt: statements with empty line between" { -// try testCanonical( -// \\comptime { -// \\ a = b; -// \\ -// \\ a = b; -// \\} -// \\ -// ); -//} + +test "zig fmt: statements with empty line between" { + try testCanonical( + \\comptime { + \\ a = b; + \\ + \\ a = b; + \\} + \\ + ); +} test "zig fmt: ptr deref operator and unwrap optional operator" { try testCanonical( @@ -2119,18 +2122,18 @@ test "zig fmt: error set declaration" { // \\ // ); //} -// -//test "zig fmt: union(enum(u32)) with assigned enum values" { -// try testCanonical( -// \\const MultipleChoice = union(enum(u32)) { -// \\ A = 20, -// \\ B = 40, -// \\ C = 60, -// \\ D = 1000, -// \\}; -// \\ -// ); -//} + +test "zig fmt: union(enum(u32)) with assigned enum values" { + try testCanonical( + \\const MultipleChoice = union(enum(u32)) { + \\ A = 20, + \\ B = 40, + \\ C = 60, + \\ D = 1000, + \\}; + \\ + ); +} test "zig fmt: resume from suspend block" { try testCanonical( @@ -2270,15 +2273,15 @@ test "zig fmt: resume from suspend block" { // \\ // ); //} -// -//test "zig fmt: imports" { -// try testCanonical( -// \\const std = @import("std"); -// \\const std = @import(); -// \\ -// ); -//} -// + +test "zig fmt: imports" { + try testCanonical( + \\const std = @import("std"); + \\const std = @import(); + \\ + ); +} + //test "zig fmt: global declarations" { // try testCanonical( // \\const a = b; @@ -2300,21 +2303,21 @@ test "zig fmt: resume from suspend block" { // \\ // ); //} -// -//test "zig fmt: extern declaration" { -// try testCanonical( -// \\extern var foo: c_int; -// \\ -// ); -//} -// -//test "zig fmt: alignment" { -// try testCanonical( -// \\var foo: c_int align(1); -// \\ -// ); -//} -// + +test "zig fmt: extern declaration" { + try testCanonical( + \\extern var foo: c_int; + \\ + ); +} + +test "zig fmt: alignment" { + try testCanonical( + \\var foo: c_int align(1); + \\ + ); +} + //test "zig fmt: C main" { // try testCanonical( // \\fn main(argc: c_int, argv: **u8) c_int { @@ -2358,128 +2361,128 @@ test "zig fmt: resume from suspend block" { // \\ // ); //} -// -//test "zig fmt: test declaration" { -// try testCanonical( -// \\test "test name" { -// \\ const a = 1; -// \\ var b = 1; -// \\} -// \\ -// ); -//} -// -//test "zig fmt: infix operators" { -// try testCanonical( -// \\test "infix operators" { -// \\ var i = undefined; -// \\ i = 2; -// \\ i *= 2; -// \\ i |= 2; -// \\ i ^= 2; -// \\ i <<= 2; -// \\ i >>= 2; -// \\ i &= 2; -// \\ i *= 2; -// \\ i *%= 2; -// \\ i -= 2; -// \\ i -%= 2; -// \\ i += 2; -// \\ i +%= 2; -// \\ i /= 2; -// \\ i %= 2; -// \\ _ = i == i; -// \\ _ = i != i; -// \\ _ = i != i; -// \\ _ = i.i; -// \\ _ = i || i; -// \\ _ = i!i; -// \\ _ = i ** i; -// \\ _ = i ++ i; -// \\ _ = i orelse i; -// \\ _ = i % i; -// \\ _ = i / i; -// \\ _ = i *% i; -// \\ _ = i * i; -// \\ _ = i -% i; -// \\ _ = i - i; -// \\ _ = i +% i; -// \\ _ = i + i; -// \\ _ = i << i; -// \\ _ = i >> i; -// \\ _ = i & i; -// \\ _ = i ^ i; -// \\ _ = i | i; -// \\ _ = i >= i; -// \\ _ = i <= i; -// \\ _ = i > i; -// \\ _ = i < i; -// \\ _ = i and i; -// \\ _ = i or i; -// \\} -// \\ -// ); -//} -// -//test "zig fmt: precedence" { -// try testCanonical( -// \\test "precedence" { -// \\ a!b(); -// \\ (a!b)(); -// \\ !a!b; -// \\ !(a!b); -// \\ !a{}; -// \\ !(a{}); -// \\ a + b{}; -// \\ (a + b){}; -// \\ a << b + c; -// \\ (a << b) + c; -// \\ a & b << c; -// \\ (a & b) << c; -// \\ a ^ b & c; -// \\ (a ^ b) & c; -// \\ a | b ^ c; -// \\ (a | b) ^ c; -// \\ a == b | c; -// \\ (a == b) | c; -// \\ a and b == c; -// \\ (a and b) == c; -// \\ a or b and c; -// \\ (a or b) and c; -// \\ (a or b) and c; -// \\} -// \\ -// ); -//} -// -//test "zig fmt: prefix operators" { -// try testCanonical( -// \\test "prefix operators" { -// \\ try return --%~!&0; -// \\} -// \\ -// ); -//} -// -//test "zig fmt: call expression" { -// try testCanonical( -// \\test "test calls" { -// \\ a(); -// \\ a(1); -// \\ a(1, 2); -// \\ a(1, 2) + a(1, 2); -// \\} -// \\ -// ); -//} -// + +test "zig fmt: test declaration" { + try testCanonical( + \\test "test name" { + \\ const a = 1; + \\ var b = 1; + \\} + \\ + ); +} + +test "zig fmt: infix operators" { + try testCanonical( + \\test { + \\ var i = undefined; + \\ i = 2; + \\ i *= 2; + \\ i |= 2; + \\ i ^= 2; + \\ i <<= 2; + \\ i >>= 2; + \\ i &= 2; + \\ i *= 2; + \\ i *%= 2; + \\ i -= 2; + \\ i -%= 2; + \\ i += 2; + \\ i +%= 2; + \\ i /= 2; + \\ i %= 2; + \\ _ = i == i; + \\ _ = i != i; + \\ _ = i != i; + \\ _ = i.i; + \\ _ = i || i; + \\ _ = i!i; + \\ _ = i ** i; + \\ _ = i ++ i; + \\ _ = i orelse i; + \\ _ = i % i; + \\ _ = i / i; + \\ _ = i *% i; + \\ _ = i * i; + \\ _ = i -% i; + \\ _ = i - i; + \\ _ = i +% i; + \\ _ = i + i; + \\ _ = i << i; + \\ _ = i >> i; + \\ _ = i & i; + \\ _ = i ^ i; + \\ _ = i | i; + \\ _ = i >= i; + \\ _ = i <= i; + \\ _ = i > i; + \\ _ = i < i; + \\ _ = i and i; + \\ _ = i or i; + \\} + \\ + ); +} + +test "zig fmt: precedence" { + try testCanonical( + \\test "precedence" { + \\ a!b(); + \\ (a!b)(); + \\ !a!b; + \\ !(a!b); + \\ !a{}; + \\ !(a{}); + \\ a + b{}; + \\ (a + b){}; + \\ a << b + c; + \\ (a << b) + c; + \\ a & b << c; + \\ (a & b) << c; + \\ a ^ b & c; + \\ (a ^ b) & c; + \\ a | b ^ c; + \\ (a | b) ^ c; + \\ a == b | c; + \\ (a == b) | c; + \\ a and b == c; + \\ (a and b) == c; + \\ a or b and c; + \\ (a or b) and c; + \\ (a or b) and c; + \\} + \\ + ); +} + +test "zig fmt: prefix operators" { + try testCanonical( + \\test "prefix operators" { + \\ try return --%~!&0; + \\} + \\ + ); +} + +test "zig fmt: call expression" { + try testCanonical( + \\test "test calls" { + \\ a(); + \\ a(1); + \\ a(1, 2); + \\ a(1, 2) + a(1, 2); + \\} + \\ + ); +} + //test "zig fmt: anytype type" { // try testCanonical( // \\fn print(args: anytype) anytype {} // \\ // ); //} -// + //test "zig fmt: functions" { // try testCanonical( // \\extern fn puts(s: *const u8) c_int; @@ -2500,7 +2503,7 @@ test "zig fmt: resume from suspend block" { // \\ // ); //} -// + //test "zig fmt: multiline string" { // try testCanonical( // \\test "" { @@ -2518,145 +2521,145 @@ test "zig fmt: resume from suspend block" { // \\ // ); //} -// -//test "zig fmt: values" { -// try testCanonical( -// \\test "values" { -// \\ 1; -// \\ 1.0; -// \\ "string"; -// \\ 'c'; -// \\ true; -// \\ false; -// \\ null; -// \\ undefined; -// \\ anyerror; -// \\ this; -// \\ unreachable; -// \\} -// \\ -// ); -//} -// -//test "zig fmt: indexing" { -// try testCanonical( -// \\test "test index" { -// \\ a[0]; -// \\ a[0 + 5]; -// \\ a[0..]; -// \\ a[0..5]; -// \\ a[a[0]]; -// \\ a[a[0..]]; -// \\ a[a[0..5]]; -// \\ a[a[0]..]; -// \\ a[a[0..5]..]; -// \\ a[a[0]..a[0]]; -// \\ a[a[0..5]..a[0]]; -// \\ a[a[0..5]..a[0..5]]; -// \\} -// \\ -// ); -//} -// -//test "zig fmt: struct declaration" { -// try testCanonical( -// \\const S = struct { -// \\ const Self = @This(); -// \\ f1: u8, -// \\ f3: u8, -// \\ -// \\ f2: u8, -// \\ -// \\ fn method(self: *Self) Self { -// \\ return self.*; -// \\ } -// \\}; -// \\ -// \\const Ps = packed struct { -// \\ a: u8, -// \\ b: u8, -// \\ -// \\ c: u8, -// \\}; -// \\ -// \\const Es = extern struct { -// \\ a: u8, -// \\ b: u8, -// \\ -// \\ c: u8, -// \\}; -// \\ -// ); -//} -// -//test "zig fmt: enum declaration" { -// try testCanonical( -// \\const E = enum { -// \\ Ok, -// \\ SomethingElse = 0, -// \\}; -// \\ -// \\const E2 = enum(u8) { -// \\ Ok, -// \\ SomethingElse = 255, -// \\ SomethingThird, -// \\}; -// \\ -// \\const Ee = extern enum { -// \\ Ok, -// \\ SomethingElse, -// \\ SomethingThird, -// \\}; -// \\ -// \\const Ep = packed enum { -// \\ Ok, -// \\ SomethingElse, -// \\ SomethingThird, -// \\}; -// \\ -// ); -//} -// -//test "zig fmt: union declaration" { -// try testCanonical( -// \\const U = union { -// \\ Int: u8, -// \\ Float: f32, -// \\ None, -// \\ Bool: bool, -// \\}; -// \\ -// \\const Ue = union(enum) { -// \\ Int: u8, -// \\ Float: f32, -// \\ None, -// \\ Bool: bool, -// \\}; -// \\ -// \\const E = enum { -// \\ Int, -// \\ Float, -// \\ None, -// \\ Bool, -// \\}; -// \\ -// \\const Ue2 = union(E) { -// \\ Int: u8, -// \\ Float: f32, -// \\ None, -// \\ Bool: bool, -// \\}; -// \\ -// \\const Eu = extern union { -// \\ Int: u8, -// \\ Float: f32, -// \\ None, -// \\ Bool: bool, -// \\}; -// \\ -// ); -//} -// + +test "zig fmt: values" { + try testCanonical( + \\test "values" { + \\ 1; + \\ 1.0; + \\ "string"; + \\ 'c'; + \\ true; + \\ false; + \\ null; + \\ undefined; + \\ anyerror; + \\ this; + \\ unreachable; + \\} + \\ + ); +} + +test "zig fmt: indexing" { + try testCanonical( + \\test "test index" { + \\ a[0]; + \\ a[0 + 5]; + \\ a[0..]; + \\ a[0..5]; + \\ a[a[0]]; + \\ a[a[0..]]; + \\ a[a[0..5]]; + \\ a[a[0]..]; + \\ a[a[0..5]..]; + \\ a[a[0]..a[0]]; + \\ a[a[0..5]..a[0]]; + \\ a[a[0..5]..a[0..5]]; + \\} + \\ + ); +} + +test "zig fmt: struct declaration" { + try testCanonical( + \\const S = struct { + \\ const Self = @This(); + \\ f1: u8, + \\ f3: u8, + \\ + \\ f2: u8, + \\ + \\ fn method(self: *Self) Self { + \\ return self.*; + \\ } + \\}; + \\ + \\const Ps = packed struct { + \\ a: u8, + \\ b: u8, + \\ + \\ c: u8, + \\}; + \\ + \\const Es = extern struct { + \\ a: u8, + \\ b: u8, + \\ + \\ c: u8, + \\}; + \\ + ); +} + +test "zig fmt: enum declaration" { + try testCanonical( + \\const E = enum { + \\ Ok, + \\ SomethingElse = 0, + \\}; + \\ + \\const E2 = enum(u8) { + \\ Ok, + \\ SomethingElse = 255, + \\ SomethingThird, + \\}; + \\ + \\const Ee = extern enum { + \\ Ok, + \\ SomethingElse, + \\ SomethingThird, + \\}; + \\ + \\const Ep = packed enum { + \\ Ok, + \\ SomethingElse, + \\ SomethingThird, + \\}; + \\ + ); +} + +test "zig fmt: union declaration" { + try testCanonical( + \\const U = union { + \\ Int: u8, + \\ Float: f32, + \\ None, + \\ Bool: bool, + \\}; + \\ + \\const Ue = union(enum) { + \\ Int: u8, + \\ Float: f32, + \\ None, + \\ Bool: bool, + \\}; + \\ + \\const E = enum { + \\ Int, + \\ Float, + \\ None, + \\ Bool, + \\}; + \\ + \\const Ue2 = union(E) { + \\ Int: u8, + \\ Float: f32, + \\ None, + \\ Bool: bool, + \\}; + \\ + \\const Eu = extern union { + \\ Int: u8, + \\ Float: f32, + \\ None, + \\ Bool: bool, + \\}; + \\ + ); +} + //test "zig fmt: arrays" { // try testCanonical( // \\test "test array" { @@ -2674,7 +2677,7 @@ test "zig fmt: resume from suspend block" { // \\ // ); //} -// + //test "zig fmt: container initializers" { // try testCanonical( // \\const a0 = []u8{}; @@ -2912,140 +2915,140 @@ test "zig fmt: for" { ); } -//test "zig fmt: if" { -// try testCanonical( -// \\test "if" { -// \\ if (10 < 0) { -// \\ unreachable; -// \\ } -// \\ -// \\ if (10 < 0) unreachable; -// \\ -// \\ if (10 < 0) { -// \\ unreachable; -// \\ } else { -// \\ const a = 20; -// \\ } -// \\ -// \\ if (10 < 0) { -// \\ unreachable; -// \\ } else if (5 < 0) { -// \\ unreachable; -// \\ } else { -// \\ const a = 20; -// \\ } -// \\ -// \\ const is_world_broken = if (10 < 0) true else false; -// \\ const some_number = 1 + if (10 < 0) 2 else 3; -// \\ -// \\ const a: ?u8 = 10; -// \\ const b: ?u8 = null; -// \\ if (a) |v| { -// \\ const some = v; -// \\ } else if (b) |*v| { -// \\ unreachable; -// \\ } else { -// \\ const some = 10; -// \\ } -// \\ -// \\ const non_null_a = if (a) |v| v else 0; -// \\ -// \\ const a_err: anyerror!u8 = 0; -// \\ if (a_err) |v| { -// \\ const p = v; -// \\ } else |err| { -// \\ unreachable; -// \\ } -// \\} -// \\ -// ); -//} -// -//test "zig fmt: defer" { -// try testCanonical( -// \\test "defer" { -// \\ var i: usize = 0; -// \\ defer i = 1; -// \\ defer { -// \\ i += 2; -// \\ i *= i; -// \\ } -// \\ -// \\ errdefer i += 3; -// \\ errdefer { -// \\ i += 2; -// \\ i /= i; -// \\ } -// \\} -// \\ -// ); -//} -// -//test "zig fmt: comptime" { -// try testCanonical( -// \\fn a() u8 { -// \\ return 5; -// \\} -// \\ -// \\fn b(comptime i: u8) u8 { -// \\ return i; -// \\} -// \\ -// \\const av = comptime a(); -// \\const av2 = comptime blk: { -// \\ var res = a(); -// \\ res *= b(2); -// \\ break :blk res; -// \\}; -// \\ -// \\comptime { -// \\ _ = a(); -// \\} -// \\ -// \\test "comptime" { -// \\ const av3 = comptime a(); -// \\ const av4 = comptime blk: { -// \\ var res = a(); -// \\ res *= a(); -// \\ break :blk res; -// \\ }; -// \\ -// \\ comptime var i = 0; -// \\ comptime { -// \\ i = a(); -// \\ i += b(i); -// \\ } -// \\} -// \\ -// ); -//} -// -//test "zig fmt: fn type" { -// try testCanonical( -// \\fn a(i: u8) u8 { -// \\ return i + 1; -// \\} -// \\ -// \\const a: fn (u8) u8 = undefined; -// \\const b: fn (u8) callconv(.Naked) u8 = undefined; -// \\const ap: fn (u8) u8 = a; -// \\ -// ); -//} -// -//test "zig fmt: inline asm" { -// try testCanonical( -// \\pub fn syscall1(number: usize, arg1: usize) usize { -// \\ return asm volatile ("syscall" -// \\ : [ret] "={rax}" (-> usize) -// \\ : [number] "{rax}" (number), -// \\ [arg1] "{rdi}" (arg1) -// \\ : "rcx", "r11" -// \\ ); -// \\} -// \\ -// ); -//} +test "zig fmt: if" { + try testCanonical( + \\test "if" { + \\ if (10 < 0) { + \\ unreachable; + \\ } + \\ + \\ if (10 < 0) unreachable; + \\ + \\ if (10 < 0) { + \\ unreachable; + \\ } else { + \\ const a = 20; + \\ } + \\ + \\ if (10 < 0) { + \\ unreachable; + \\ } else if (5 < 0) { + \\ unreachable; + \\ } else { + \\ const a = 20; + \\ } + \\ + \\ const is_world_broken = if (10 < 0) true else false; + \\ const some_number = 1 + if (10 < 0) 2 else 3; + \\ + \\ const a: ?u8 = 10; + \\ const b: ?u8 = null; + \\ if (a) |v| { + \\ const some = v; + \\ } else if (b) |*v| { + \\ unreachable; + \\ } else { + \\ const some = 10; + \\ } + \\ + \\ const non_null_a = if (a) |v| v else 0; + \\ + \\ const a_err: anyerror!u8 = 0; + \\ if (a_err) |v| { + \\ const p = v; + \\ } else |err| { + \\ unreachable; + \\ } + \\} + \\ + ); +} + +test "zig fmt: defer" { + try testCanonical( + \\test "defer" { + \\ var i: usize = 0; + \\ defer i = 1; + \\ defer { + \\ i += 2; + \\ i *= i; + \\ } + \\ + \\ errdefer i += 3; + \\ errdefer { + \\ i += 2; + \\ i /= i; + \\ } + \\} + \\ + ); +} + +test "zig fmt: comptime" { + try testCanonical( + \\fn a() u8 { + \\ return 5; + \\} + \\ + \\fn b(comptime i: u8) u8 { + \\ return i; + \\} + \\ + \\const av = comptime a(); + \\const av2 = comptime blk: { + \\ var res = a(); + \\ res *= b(2); + \\ break :blk res; + \\}; + \\ + \\comptime { + \\ _ = a(); + \\} + \\ + \\test "comptime" { + \\ const av3 = comptime a(); + \\ const av4 = comptime blk: { + \\ var res = a(); + \\ res *= a(); + \\ break :blk res; + \\ }; + \\ + \\ comptime var i = 0; + \\ comptime { + \\ i = a(); + \\ i += b(i); + \\ } + \\} + \\ + ); +} + +test "zig fmt: fn type" { + try testCanonical( + \\fn a(i: u8) u8 { + \\ return i + 1; + \\} + \\ + \\const a: fn (u8) u8 = undefined; + \\const b: fn (u8) callconv(.Naked) u8 = undefined; + \\const ap: fn (u8) u8 = a; + \\ + ); +} + +test "zig fmt: inline asm" { + try testCanonical( + \\pub fn syscall1(number: usize, arg1: usize) usize { + \\ return asm volatile ("syscall" + \\ : [ret] "={rax}" (-> usize) + \\ : [number] "{rax}" (number), + \\ [arg1] "{rdi}" (arg1) + \\ : "rcx", "r11" + \\ ); + \\} + \\ + ); +} test "zig fmt: async functions" { try testCanonical( @@ -3075,20 +3078,20 @@ test "zig fmt: nosuspend" { ); } -//test "zig fmt: Block after if" { -// try testCanonical( -// \\test "Block after if" { -// \\ if (true) { -// \\ const a = 0; -// \\ } -// \\ -// \\ { -// \\ const a = 0; -// \\ } -// \\} -// \\ -// ); -//} +test "zig fmt: Block after if" { + try testCanonical( + \\test { + \\ if (true) { + \\ const a = 0; + \\ } + \\ + \\ { + \\ const a = 0; + \\ } + \\} + \\ + ); +} test "zig fmt: usingnamespace" { try testCanonical( @@ -3098,39 +3101,39 @@ test "zig fmt: usingnamespace" { ); } -//test "zig fmt: string identifier" { -// try testCanonical( -// \\const @"a b" = @"c d".@"e f"; -// \\fn @"g h"() void {} -// \\ -// ); -//} -// -//test "zig fmt: error return" { -// try testCanonical( -// \\fn err() anyerror { -// \\ call(); -// \\ return error.InvalidArgs; -// \\} -// \\ -// ); -//} -// -//test "zig fmt: comptime block in container" { -// try testCanonical( -// \\pub fn container() type { -// \\ return struct { -// \\ comptime { -// \\ if (false) { -// \\ unreachable; -// \\ } -// \\ } -// \\ }; -// \\} -// \\ -// ); -//} -// +test "zig fmt: string identifier" { + try testCanonical( + \\const @"a b" = @"c d".@"e f"; + \\fn @"g h"() void {} + \\ + ); +} + +test "zig fmt: error return" { + try testCanonical( + \\fn err() anyerror { + \\ call(); + \\ return error.InvalidArgs; + \\} + \\ + ); +} + +test "zig fmt: comptime block in container" { + try testCanonical( + \\pub fn container() type { + \\ return struct { + \\ comptime { + \\ if (false) { + \\ unreachable; + \\ } + \\ } + \\ }; + \\} + \\ + ); +} + //test "zig fmt: inline asm parameter alignment" { // try testCanonical( // \\pub fn main() void { @@ -3171,7 +3174,7 @@ test "zig fmt: usingnamespace" { // \\ // ); //} -// + //test "zig fmt: multiline string in array" { // try testCanonical( // \\const Foo = [][]const u8{ @@ -3196,25 +3199,25 @@ test "zig fmt: usingnamespace" { // \\ // ); //} -// -//test "zig fmt: if type expr" { -// try testCanonical( -// \\const mycond = true; -// \\pub fn foo() if (mycond) i32 else void { -// \\ if (mycond) { -// \\ return 42; -// \\ } -// \\} -// \\ -// ); -//} -//test "zig fmt: file ends with struct field" { -// try testCanonical( -// \\a: bool -// \\ -// ); -//} -// + +test "zig fmt: if type expr" { + try testCanonical( + \\const mycond = true; + \\pub fn foo() if (mycond) i32 else void { + \\ if (mycond) { + \\ return 42; + \\ } + \\} + \\ + ); +} +test "zig fmt: file ends with struct field" { + try testCanonical( + \\a: bool + \\ + ); +} + //test "zig fmt: comment after empty comment" { // try testTransform( // \\const x = true; // @@ -3342,7 +3345,7 @@ test "zig fmt: usingnamespace" { // \\ // ); //} -// + //test "zig fmt: top level doc comments" { // try testCanonical( // \\//! tld 1 @@ -3398,7 +3401,7 @@ test "zig fmt: usingnamespace" { // \\ // ); //} -// + //test "zig fmt: extern without container keyword returns error" { // try testError( // \\const container = extern {}; @@ -3408,20 +3411,20 @@ test "zig fmt: usingnamespace" { // .ExpectedVarDeclOrFn, // }); //} -// -//test "zig fmt: integer literals with underscore separators" { -// try testTransform( -// \\const -// \\ x = -// \\ 1_234_567 -// \\ +(0b0_1-0o7_0+0xff_FF ) + 0_0; -// , -// \\const x = -// \\ 1_234_567 + (0b0_1 - 0o7_0 + 0xff_FF) + 0_0; -// \\ -// ); -//} -// + +test "zig fmt: integer literals with underscore separators" { + try testTransform( + \\const + \\ x = + \\ 1_234_567 + \\ +(0b0_1-0o7_0+0xff_FF ) + 0_0; + , + \\const x = + \\ 1_234_567 + (0b0_1 - 0o7_0 + 0xff_FF) + 0_0; + \\ + ); +} + //test "zig fmt: hex literals with underscore separators" { // try testTransform( // \\pub fn orMask(a: [ 1_000 ]u64, b: [ 1_000] u64) [1_000]u64 { @@ -3444,7 +3447,7 @@ test "zig fmt: usingnamespace" { // \\ // ); //} -// + //test "zig fmt: decimal float literals with underscore separators" { // try testTransform( // \\pub fn main() void { @@ -3461,7 +3464,7 @@ test "zig fmt: usingnamespace" { // \\ // ); //} -// + //test "zig fmt: hexadeciaml float literals with underscore separators" { // try testTransform( // \\pub fn main() void { @@ -3478,34 +3481,14 @@ test "zig fmt: usingnamespace" { // \\ // ); //} -// -//test "zig fmt: convert async fn into callconv(.Async)" { -// try testTransform( -// \\async fn foo() void {} -// , -// \\fn foo() callconv(.Async) void {} -// \\ -// ); -//} -// -//test "zig fmt: convert extern fn proto into callconv(.C)" { -// try testTransform( -// \\extern fn foo0() void {} -// \\const foo1 = extern fn () void; -// , -// \\extern fn foo0() void {} -// \\const foo1 = fn () callconv(.C) void; -// \\ -// ); -//} -// + //test "zig fmt: C var args" { // try testCanonical( // \\pub extern "c" fn printf(format: [*:0]const u8, ...) c_int; // \\ // ); //} -// + //test "zig fmt: Only indent multiline string literals in function calls" { // try testCanonical( // \\test "zig fmt:" { @@ -3522,7 +3505,7 @@ test "zig fmt: usingnamespace" { // \\ // ); //} -// + //test "zig fmt: Don't add extra newline after if" { // try testCanonical( // \\pub fn atomicSymLink(allocator: *Allocator, existing_path: []const u8, new_path: []const u8) !void { @@ -3533,7 +3516,7 @@ test "zig fmt: usingnamespace" { // \\ // ); //} -// + //test "zig fmt: comments in ternary ifs" { // try testCanonical( // \\const x = if (true) { @@ -3611,7 +3594,7 @@ test "zig fmt: usingnamespace" { // \\ // ); //} -// + //test "zig fmt: Control flow statement as body of blockless if" { // try testCanonical( // \\pub fn main() void { @@ -3648,22 +3631,22 @@ test "zig fmt: usingnamespace" { // \\ // ); //} -// -//test "zig fmt: " { -// try testCanonical( -// \\pub fn sendViewTags(self: Self) void { -// \\ var it = ViewStack(View).iterator(self.output.views.first, std.math.maxInt(u32)); -// \\ while (it.next()) |node| -// \\ view_tags.append(node.view.current_tags) catch { -// \\ c.wl_resource_post_no_memory(self.wl_resource); -// \\ log.crit(.river_status, "out of memory", .{}); -// \\ return; -// \\ }; -// \\} -// \\ -// ); -//} -// + +test "zig fmt: regression test for #5722" { + try testCanonical( + \\pub fn sendViewTags(self: Self) void { + \\ var it = ViewStack(View).iterator(self.output.views.first, std.math.maxInt(u32)); + \\ while (it.next()) |node| + \\ view_tags.append(node.view.current_tags) catch { + \\ c.wl_resource_post_no_memory(self.wl_resource); + \\ log.crit(.river_status, "out of memory", .{}); + \\ return; + \\ }; + \\} + \\ + ); +} + //test "zig fmt: allow trailing line comments to do manual array formatting" { // try testCanonical( // \\fn foo() void { @@ -3804,40 +3787,40 @@ test "zig fmt: usingnamespace" { // \\ // ); //} -// -//test "zig fmt: single argument trailing commas in @builtins()" { -// try testCanonical( -// \\pub fn foo(qzz: []u8) i1 { -// \\ @panic( -// \\ foo, -// \\ ); -// \\ panic( -// \\ foo, -// \\ ); -// \\ @panic( -// \\ foo, -// \\ bar, -// \\ ); -// \\} -// \\ -// ); -//} -// -//test "zig fmt: trailing comma should force multiline 1 column" { -// try testTransform( -// \\pub const UUID_NULL: uuid_t = [16]u8{0,0,0,0,}; -// \\ -// , -// \\pub const UUID_NULL: uuid_t = [16]u8{ -// \\ 0, -// \\ 0, -// \\ 0, -// \\ 0, -// \\}; -// \\ -// ); -//} -// + +test "zig fmt: single argument trailing commas in @builtins()" { + try testCanonical( + \\pub fn foo(qzz: []u8) i1 { + \\ @panic( + \\ foo, + \\ ); + \\ panic( + \\ foo, + \\ ); + \\ @panic( + \\ foo, + \\ bar, + \\ ); + \\} + \\ + ); +} + +test "zig fmt: trailing comma should force multiline 1 column" { + try testTransform( + \\pub const UUID_NULL: uuid_t = [16]u8{0,0,0,0,}; + \\ + , + \\pub const UUID_NULL: uuid_t = [16]u8{ + \\ 0, + \\ 0, + \\ 0, + \\ 0, + \\}; + \\ + ); +} + //test "zig fmt: function params should align nicely" { // try testCanonical( // \\pub fn foo() void { @@ -3853,29 +3836,29 @@ test "zig fmt: usingnamespace" { // \\ // ); //} -// -//test "recovery: top level" { -// try testError( -// \\test "" {inline} -// \\test "" {inline} -// , &[_]Error{ -// .ExpectedInlinable, -// .ExpectedInlinable, -// }); -//} -// -//test "recovery: block statements" { -// try testError( -// \\test "" { -// \\ foo + +; -// \\ inline; -// \\} -// , &[_]Error{ -// .InvalidToken, -// .ExpectedInlinable, -// }); -//} -// + +test "recovery: top level" { + try testError( + \\test "" {inline} + \\test "" {inline} + , &[_]Error{ + .ExpectedInlinable, + .ExpectedInlinable, + }); +} + +test "recovery: block statements" { + try testError( + \\test "" { + \\ foo + +; + \\ inline; + \\} + , &[_]Error{ + .InvalidToken, + .ExpectedInlinable, + }); +} + //test "recovery: missing comma" { // try testError( // \\test "" { @@ -3894,17 +3877,17 @@ test "zig fmt: usingnamespace" { // .InvalidToken, // }); //} -// -//test "recovery: extra qualifier" { -// try testError( -// \\const a: *const const u8; -// \\test "" -// , &[_]Error{ -// .ExtraConstQualifier, -// .ExpectedLBrace, -// }); -//} -// + +test "recovery: extra qualifier" { + try testError( + \\const a: *const const u8; + \\test "" + , &[_]Error{ + .ExtraConstQualifier, + .ExpectedLBrace, + }); +} + //test "recovery: missing return type" { // try testError( // \\fn foo() { @@ -3917,7 +3900,7 @@ test "zig fmt: usingnamespace" { // .ExpectedLBrace, // }); //} -// + //test "recovery: continue after invalid decl" { // try testError( // \\fn foo { @@ -3942,7 +3925,7 @@ test "zig fmt: usingnamespace" { // .InvalidAnd, // }); //} -// + //test "recovery: invalid extern/inline" { // try testError( // \\inline test "" { a && b; } @@ -3957,7 +3940,7 @@ test "zig fmt: usingnamespace" { // .InvalidAnd, // }); //} -// + //test "recovery: missing semicolon" { // try testError( // \\test "" { @@ -3974,7 +3957,7 @@ test "zig fmt: usingnamespace" { // .ExpectedToken, // }); //} -// + //test "recovery: invalid container members" { // try testError( // \\usingnamespace; @@ -3993,7 +3976,7 @@ test "zig fmt: usingnamespace" { // .ExpectedToken, // }); //} -// + //test "recovery: invalid parameter" { // try testError( // \\fn main() void { @@ -4003,7 +3986,7 @@ test "zig fmt: usingnamespace" { // .ExpectedToken, // }); //} -// + //test "recovery: extra '}' at top level" { // try testError( // \\}}} @@ -4018,16 +4001,16 @@ test "zig fmt: usingnamespace" { // }); //} // -//test "recovery: mismatched bracket at top level" { -// try testError( -// \\const S = struct { -// \\ arr: 128]?G -// \\}; -// , &[_]Error{ -// .ExpectedToken, -// }); -//} -// +test "recovery: mismatched bracket at top level" { + try testError( + \\const S = struct { + \\ arr: 128]?G + \\}; + , &[_]Error{ + .ExpectedToken, + }); +} + //test "recovery: invalid global error set access" { // try testError( // \\test "" { @@ -4039,7 +4022,7 @@ test "zig fmt: usingnamespace" { // .InvalidAnd, // }); //} -// + //test "recovery: invalid asterisk after pointer dereference" { // try testError( // \\test "" { @@ -4057,7 +4040,7 @@ test "zig fmt: usingnamespace" { // .InvalidAnd, // }); //} -// + //test "recovery: missing semicolon after if, for, while stmt" { // try testError( // \\test "" { @@ -4073,27 +4056,27 @@ test "zig fmt: usingnamespace" { // .InvalidAnd, // }); //} -// -//test "recovery: invalid comptime" { -// try testError( -// \\comptime -// , &[_]Error{ -// .ExpectedBlockOrField, -// }); -//} -// -//test "recovery: missing block after for/while loops" { -// try testError( -// \\test "" { while (foo) } -// , &[_]Error{ -// .ExpectedBlockOrAssignment, -// }); -// try testError( -// \\test "" { for (foo) |bar| } -// , &[_]Error{ -// .ExpectedBlockOrAssignment, -// }); -//} + +test "recovery: invalid comptime" { + try testError( + \\comptime + , &[_]Error{ + .ExpectedBlockOrField, + }); +} + +test "recovery: missing block after for/while loops" { + try testError( + \\test "" { while (foo) } + , &[_]Error{ + .ExpectedBlockOrAssignment, + }); + try testError( + \\test "" { for (foo) |bar| } + , &[_]Error{ + .ExpectedBlockOrAssignment, + }); +} const std = @import("std"); const mem = std.mem; diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 0e9032560c..ef1ba0a1b4 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -198,7 +198,11 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .AnyFrameLiteral, => return renderToken(ais, tree, main_tokens[node], space), - .ErrorValue => unreachable, // TODO + .ErrorValue => { + try renderToken(ais, tree, main_tokens[node], .None); + try renderToken(ais, tree, main_tokens[node] + 1, .None); + return renderToken(ais, tree, main_tokens[node] + 2, space); + }, .AnyType => return renderToken(ais, tree, main_tokens[node], space), From 80b719d967d7241182e237b42ade1cd88494c8e8 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Wed, 10 Feb 2021 13:02:37 +0100 Subject: [PATCH 042/173] zig fmt: fix typo in firstToken() for pointer types --- lib/std/zig/ast.zig | 4 ++-- lib/std/zig/parser_test.zig | 46 ++++++++++++++++++------------------- 2 files changed, 25 insertions(+), 25 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 1b78db9f3e..cbc12122c6 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -408,10 +408,10 @@ pub const Tree = struct { const main_token = main_tokens[n]; return switch (token_tags[main_token]) { .Asterisk => switch (token_tags[main_token - 1]) { - .LBrace => main_token - 1, + .LBracket => main_token - 1, else => main_token, }, - .LBrace => main_token, + .LBracket => main_token, else => unreachable, } - end_offset; }, diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index cc27b49f41..a0114260db 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -423,16 +423,16 @@ test "zig fmt: sentinel slice with modifiers" { ); } -//test "zig fmt: anon literal in array" { -// try testCanonical( -// \\var arr: [2]Foo = .{ -// \\ .{ .a = 2 }, -// \\ .{ .b = 3 }, -// \\}; -// \\ -// ); -//} -// +test "zig fmt: anon literal in array" { + try testCanonical( + \\var arr: [2]Foo = .{ + \\ .{ .a = 2 }, + \\ .{ .b = 3 }, + \\}; + \\ + ); +} + //test "zig fmt: alignment in anonymous literal" { // try testTransform( // \\const a = .{ @@ -751,12 +751,12 @@ test "zig fmt: tagged union with enum values" { ); } -//test "zig fmt: allowzero pointer" { -// try testCanonical( -// \\const T = [*]allowzero const u8; -// \\ -// ); -//} +test "zig fmt: allowzero pointer" { + try testCanonical( + \\const T = [*]allowzero const u8; + \\ + ); +} test "zig fmt: enum literal" { try testCanonical( @@ -806,13 +806,13 @@ test "zig fmt: character literal larger than u8" { // \\ // ); //} -// -//test "zig fmt: C pointers" { -// try testCanonical( -// \\const Ptr = [*c]i32; -// \\ -// ); -//} + +test "zig fmt: C pointers" { + try testCanonical( + \\const Ptr = [*c]i32; + \\ + ); +} test "zig fmt: threadlocal" { try testCanonical( From a524e57090f3e3412292dbe6b3e4fe4fb7bad1ea Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Wed, 10 Feb 2021 15:19:30 +0100 Subject: [PATCH 043/173] zig fmt: support bodyless function decls extern function declarations do not have a body, so allow setting the rhs for FnDecl to 0 to indicate this is the case. --- lib/std/zig/ast.zig | 16 ++++++++++++---- lib/std/zig/parse.zig | 9 ++++++++- lib/std/zig/parser_test.zig | 30 +++++++++++++++++++++++------- lib/std/zig/render.zig | 11 +++++++++-- 4 files changed, 52 insertions(+), 14 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index cbc12122c6..621b79b80f 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -521,7 +521,8 @@ pub const Tree = struct { .IfSimple, .WhileSimple, .ForSimple, - .FnDecl, + .FnProtoSimple, + .FnProtoMulti, .PtrTypeAligned, .PtrTypeSentinel, .PtrType, @@ -538,8 +539,6 @@ pub const Tree = struct { .AsmSimple, .AsmOutput, .AsmInput, - .FnProtoSimple, - .FnProtoMulti, .ErrorValue, => return datas[n].rhs + end_offset, @@ -804,6 +803,13 @@ pub const Tree = struct { return main_tokens[n] + end_offset; } }, + .FnDecl => { + if (datas[n].rhs != 0) { + n = datas[n].rhs; + } else { + n = datas[n].lhs; + } + }, .FnProtoOne => { const extra = tree.extraData(datas[n].lhs, Node.FnProtoOne); // linksection, callconv, align can appear in any order, so we @@ -2520,7 +2526,9 @@ pub const Node = struct { /// `fn(a: b, c: d) rhs linksection(e) callconv(f)`. `FnProto[lhs]`. /// anytype and ... parameters are omitted from the AST tree. FnProto, - /// lhs is the FnProto, rhs is the function body block. + /// lhs is the FnProto. + /// rhs is the function body block if non-zero. + /// if rhs is zero, the funtion decl has no body (e.g. an extern function) FnDecl, /// `anyframe->rhs`. main_token is `anyframe`. `lhs` is arrow token index. AnyFrameType, diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 9bc3385105..4571ece0e8 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -516,7 +516,14 @@ const Parser = struct { .Semicolon => { const semicolon_token = p.nextToken(); try p.parseAppendedDocComment(semicolon_token); - return fn_proto; + return p.addNode(.{ + .tag = .FnDecl, + .main_token = p.nodes.items(.main_token)[fn_proto], + .data = .{ + .lhs = fn_proto, + .rhs = 0, + }, + }); }, .LBrace => { const body_block = try p.parseBlock(); diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index a0114260db..98478d68b7 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -298,12 +298,12 @@ test "zig fmt: grouped expressions (parentheses)" { ); } -//test "zig fmt: c pointer type" { -// try testCanonical( -// \\pub extern fn repro() [*c]const u8; -// \\ -// ); -//} +test "zig fmt: c pointer type" { + try testCanonical( + \\pub extern fn repro() [*c]const u8; + \\ + ); +} test "zig fmt: builtin call with trailing comma" { try testCanonical( @@ -2339,7 +2339,23 @@ test "zig fmt: alignment" { // \\ // ); //} -// + +test "zig fmt: function attributes" { + try testCanonical( + \\export fn foo() void {} + \\pub export fn foo() void {} + \\extern fn foo() void; + \\pub extern fn foo() void; + \\extern "c" fn foo() void; + \\pub extern "c" fn foo() void; + \\inline fn foo() void {} + \\pub inline fn foo() void {} + \\noinline fn foo() void {} + \\pub noinline fn foo() void {} + \\ + ); +} + //test "zig fmt: pointer attributes" { // try testCanonical( // \\extern fn f1(s: *align(*u8) u8) c_int; diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index ef1ba0a1b4..c27cce795a 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -121,6 +121,8 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E .Keyword_export, .Keyword_pub, .StringLiteral, + .Keyword_inline, + .Keyword_noinline, => continue, else => { @@ -132,8 +134,13 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E while (i < fn_token) : (i += 1) { try renderToken(ais, tree, i, .Space); } - try renderExpression(ais, tree, fn_proto, .Space); - return renderExpression(ais, tree, datas[decl].rhs, space); + if (datas[decl].rhs != 0) { + try renderExpression(ais, tree, fn_proto, .Space); + return renderExpression(ais, tree, datas[decl].rhs, space); + } else { + try renderExpression(ais, tree, fn_proto, .None); + return renderToken(ais, tree, tree.lastToken(fn_proto) + 1, space); // semicolon + } }, .FnProtoSimple, .FnProtoMulti, From 8c4f3e5a319b2c700a57d833e1aaf01e769f8d4a Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Wed, 10 Feb 2021 16:00:54 +0100 Subject: [PATCH 044/173] zig fmt: fix render of pointers with ** tokens --- lib/std/zig/ast.zig | 24 ++++++--- lib/std/zig/parser_test.zig | 97 +++++++++++++++++++++---------------- lib/std/zig/render.zig | 10 ++++ 3 files changed, 83 insertions(+), 48 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 621b79b80f..8df99a640f 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -407,7 +407,9 @@ pub const Tree = struct { => { const main_token = main_tokens[n]; return switch (token_tags[main_token]) { - .Asterisk => switch (token_tags[main_token - 1]) { + .Asterisk, + .AsteriskAsterisk, + => switch (token_tags[main_token - 1]) { .LBracket => main_token - 1, else => main_token, }, @@ -1625,7 +1627,9 @@ pub const Tree = struct { // literals in some places here const Kind = full.PtrType.Kind; const kind: Kind = switch (token_tags[info.main_token]) { - .Asterisk => switch (token_tags[info.main_token + 1]) { + .Asterisk, + .AsteriskAsterisk, + => switch (token_tags[info.main_token + 1]) { .RBracket => .many, .Colon => .sentinel, .Identifier => if (token_tags[info.main_token - 1] == .LBracket) Kind.c else .one, @@ -2393,18 +2397,26 @@ pub const Node = struct { /// `[*]align(lhs) rhs`. lhs can be omitted. /// `*align(lhs) rhs`. lhs can be omitted. /// `[]rhs`. - /// main_token is the asterisk if a pointer or the lbrace if a slice + /// main_token is the asterisk if a pointer or the lbracket if a slice + /// main_token might be a ** token, which is shared with a parent/child + /// pointer type and may require special handling. PtrTypeAligned, /// `[*:lhs]rhs`. lhs can be omitted. /// `*rhs`. /// `[:lhs]rhs`. - /// main_token is the asterisk if a pointer or the lbrace if a slice + /// main_token is the asterisk if a pointer or the lbracket if a slice + /// main_token might be a ** token, which is shared with a parent/child + /// pointer type and may require special handling. PtrTypeSentinel, /// lhs is index into PtrType. rhs is the element type expression. - /// main_token is the asterisk if a pointer or the lbrace if a slice + /// main_token is the asterisk if a pointer or the lbracket if a slice + /// main_token might be a ** token, which is shared with a parent/child + /// pointer type and may require special handling. PtrType, /// lhs is index into PtrTypeBitRange. rhs is the element type expression. - /// main_token is the asterisk if a pointer or the lbrace if a slice + /// main_token is the asterisk if a pointer or the lbracket if a slice + /// main_token might be a ** token, which is shared with a parent/child + /// pointer type and may require special handling. PtrTypeBitRange, /// `lhs[rhs..]` /// main_token is the lbracket. diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 98478d68b7..e8e9c1a2e2 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -2318,27 +2318,27 @@ test "zig fmt: alignment" { ); } -//test "zig fmt: C main" { -// try testCanonical( -// \\fn main(argc: c_int, argv: **u8) c_int { -// \\ const a = b; -// \\} -// \\ -// ); -//} -// -//test "zig fmt: return" { -// try testCanonical( -// \\fn foo(argc: c_int, argv: **u8) c_int { -// \\ return 0; -// \\} -// \\ -// \\fn bar() void { -// \\ return; -// \\} -// \\ -// ); -//} +test "zig fmt: C main" { + try testCanonical( + \\fn main(argc: c_int, argv: **u8) c_int { + \\ const a = b; + \\} + \\ + ); +} + +test "zig fmt: return" { + try testCanonical( + \\fn foo(argc: c_int, argv: **u8) c_int { + \\ return 0; + \\} + \\ + \\fn bar() void { + \\ return; + \\} + \\ + ); +} test "zig fmt: function attributes" { try testCanonical( @@ -2356,27 +2356,40 @@ test "zig fmt: function attributes" { ); } -//test "zig fmt: pointer attributes" { -// try testCanonical( -// \\extern fn f1(s: *align(*u8) u8) c_int; -// \\extern fn f2(s: **align(1) *const *volatile u8) c_int; -// \\extern fn f3(s: *align(1) const *align(1) volatile *const volatile u8) c_int; -// \\extern fn f4(s: *align(1) const volatile u8) c_int; -// \\extern fn f5(s: [*:0]align(1) const volatile u8) c_int; -// \\ -// ); -//} -// -//test "zig fmt: slice attributes" { -// try testCanonical( -// \\extern fn f1(s: *align(*u8) u8) c_int; -// \\extern fn f2(s: **align(1) *const *volatile u8) c_int; -// \\extern fn f3(s: *align(1) const *align(1) volatile *const volatile u8) c_int; -// \\extern fn f4(s: *align(1) const volatile u8) c_int; -// \\extern fn f5(s: [*:0]align(1) const volatile u8) c_int; -// \\ -// ); -//} +test "zig fmt: nested pointers with ** tokens" { + try testCanonical( + \\const x: *u32 = undefined; + \\const x: **u32 = undefined; + \\const x: ***u32 = undefined; + \\const x: ****u32 = undefined; + \\const x: *****u32 = undefined; + \\const x: ******u32 = undefined; + \\const x: *******u32 = undefined; + \\ + ); +} + +test "zig fmt: pointer attributes" { + try testCanonical( + \\extern fn f1(s: *align(*u8) u8) c_int; + \\extern fn f2(s: **align(1) *const *volatile u8) c_int; + \\extern fn f3(s: *align(1) const *align(1) volatile *const volatile u8) c_int; + \\extern fn f4(s: *align(1) const volatile u8) c_int; + \\extern fn f5(s: [*:0]align(1) const volatile u8) c_int; + \\ + ); +} + +test "zig fmt: slice attributes" { + try testCanonical( + \\extern fn f1(s: []align(*u8) u8) c_int; + \\extern fn f2(s: []align(1) []const []volatile u8) c_int; + \\extern fn f3(s: []align(1) const [:0]align(1) volatile []const volatile u8) c_int; + \\extern fn f4(s: []align(1) const volatile u8) c_int; + \\extern fn f5(s: [:0]align(1) const volatile u8) c_int; + \\ + ); +} test "zig fmt: test declaration" { try testCanonical( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index c27cce795a..1be114a6a1 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -699,6 +699,16 @@ fn renderPtrType( ) Error!void { switch (ptr_type.kind) { .one => { + // Since ** tokens exist and the same token is shared by two + // nested pointer types, we check to see if we are the parent + // in such a relationship. If so, skip rendering anything for + // this pointer type and rely on the child to render our asterisk + // as well when it renders the ** token. + if (tree.tokens.items(.tag)[ptr_type.ast.main_token] == .AsteriskAsterisk and + ptr_type.ast.main_token == tree.nodes.items(.main_token)[ptr_type.ast.child_type]) + { + return renderExpression(ais, tree, ptr_type.ast.child_type, space); + } try renderToken(ais, tree, ptr_type.ast.main_token, .None); // asterisk }, .many => { From 3110a73486223aba9152946fa0184536fc3c4b76 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Wed, 10 Feb 2021 16:35:37 +0100 Subject: [PATCH 045/173] zig fmt: implement Tree.lastToken() for array types --- lib/std/zig/ast.zig | 7 +++++-- lib/std/zig/parser_test.zig | 13 +++++++++++++ 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 8df99a640f..483ae26956 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -529,6 +529,7 @@ pub const Tree = struct { .PtrTypeSentinel, .PtrType, .PtrTypeBitRange, + .ArrayType, .SwitchCaseOne, .SwitchCase, => n = datas[n].rhs, @@ -902,6 +903,10 @@ pub const Tree = struct { return main_tokens[n] + end_offset; } }, + .ArrayTypeSentinel => { + const extra = tree.extraData(datas[n].rhs, Node.ArrayTypeSentinel); + n = extra.elem_type; + }, // These are not supported by lastToken() because implementation would // require recursion due to the optional comma followed by rbrace. @@ -917,8 +922,6 @@ pub const Tree = struct { .TaggedUnionEnumTag => unreachable, // TODO .TaggedUnionEnumTagComma => unreachable, // TODO .SwitchRange => unreachable, // TODO - .ArrayType => unreachable, // TODO - .ArrayTypeSentinel => unreachable, // TODO }; } diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index e8e9c1a2e2..9eb7ecadba 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -353,6 +353,19 @@ test "zig fmt: anytype struct field" { ); } +test "zig fmt: array types last token" { + try testCanonical( + \\test { + \\ const x = [40]u32; + \\} + \\ + \\test { + \\ const x = [40:0]u32; + \\} + \\ + ); +} + //test "zig fmt: sentinel-terminated array type" { // try testCanonical( // \\pub fn cStrToPrefixedFileW(s: [*:0]const u8) ![PATH_MAX_WIDE:0]u16 { From 928f6f48a62b4b356a0031ba54e247dbbcde4256 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Wed, 10 Feb 2021 18:17:50 +0100 Subject: [PATCH 046/173] zig fmt: implement Tree.lastToken() for array init --- lib/std/zig/ast.zig | 43 +++++++++++++++--- lib/std/zig/parse.zig | 24 +++++++--- lib/std/zig/parser_test.zig | 88 ++++++++++++++++++++++++------------- lib/std/zig/render.zig | 10 +++-- 4 files changed, 117 insertions(+), 48 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 483ae26956..1780cc4f8d 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -255,6 +255,7 @@ pub const Tree = struct { => return main_tokens[n] - end_offset, .ArrayInitDot, + .ArrayInitDotComma, .ArrayInitDotTwo, .ArrayInitDotTwoComma, .StructInitDot, @@ -311,7 +312,9 @@ pub const Tree = struct { .Deref, .ArrayAccess, .ArrayInitOne, + .ArrayInitOneComma, .ArrayInit, + .ArrayInitComma, .StructInitOne, .StructInit, .CallOne, @@ -604,6 +607,13 @@ pub const Tree = struct { const extra = tree.extraData(datas[n].rhs, Node.Asm); return extra.rparen + end_offset; }, + .ArrayInit => { + const elements = tree.extraData(datas[n].rhs, Node.SubRange); + assert(elements.end - elements.start > 0); + end_offset += 1; // for the rbrace + n = tree.extra_data[elements.end - 1]; // last element + }, + .ArrayInitComma, .ContainerDeclArgComma, .SwitchComma, => { @@ -612,6 +622,7 @@ pub const Tree = struct { end_offset += 2; // for the comma + rbrace n = tree.extra_data[members.end - 1]; // last parameter }, + .ArrayInitDot, .Block, .ContainerDecl, .TaggedUnion, @@ -621,6 +632,7 @@ pub const Tree = struct { end_offset += 1; // for the rbrace n = tree.extra_data[datas[n].rhs - 1]; // last statement }, + .ArrayInitDotComma, .BlockSemicolon, .ContainerDeclComma, .TaggedUnionComma, @@ -772,7 +784,16 @@ pub const Tree = struct { } }, - .SliceOpen, .CallOneComma, .AsyncCallOneComma => { + .ArrayInitOne => { + end_offset += 1; // rbrace + n = datas[n].rhs; + assert(n != 0); + }, + .SliceOpen, + .CallOneComma, + .AsyncCallOneComma, + .ArrayInitOneComma, + => { end_offset += 2; // ellipsis2 + rbracket, or comma + rparen n = datas[n].rhs; assert(n != 0); @@ -912,9 +933,6 @@ pub const Tree = struct { // require recursion due to the optional comma followed by rbrace. // TODO follow the pattern set by StructInitDotTwoComma which will allow // lastToken to work for all of these. - .ArrayInit => unreachable, // TODO - .ArrayInitOne => unreachable, // TODO - .ArrayInitDot => unreachable, // TODO .StructInit => unreachable, // TODO .StructInitOne => unreachable, // TODO .StructInitDot => unreachable, // TODO @@ -1151,7 +1169,8 @@ pub const Tree = struct { } pub fn arrayInitOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.ArrayInit { - assert(tree.nodes.items(.tag)[node] == .ArrayInitOne); + assert(tree.nodes.items(.tag)[node] == .ArrayInitOne or + tree.nodes.items(.tag)[node] == .ArrayInitOneComma); const data = tree.nodes.items(.data)[node]; buffer[0] = data.rhs; const elements = if (data.rhs == 0) buffer[0..0] else buffer[0..1]; @@ -1185,7 +1204,8 @@ pub const Tree = struct { } pub fn arrayInitDot(tree: Tree, node: Node.Index) full.ArrayInit { - assert(tree.nodes.items(.tag)[node] == .ArrayInitDot); + assert(tree.nodes.items(.tag)[node] == .ArrayInitDot or + tree.nodes.items(.tag)[node] == .ArrayInitDotComma); const data = tree.nodes.items(.data)[node]; return .{ .ast = .{ @@ -1197,7 +1217,8 @@ pub const Tree = struct { } pub fn arrayInit(tree: Tree, node: Node.Index) full.ArrayInit { - assert(tree.nodes.items(.tag)[node] == .ArrayInit); + assert(tree.nodes.items(.tag)[node] == .ArrayInit or + tree.nodes.items(.tag)[node] == .ArrayInitComma); const data = tree.nodes.items(.data)[node]; const elem_range = tree.extraData(data.rhs, Node.SubRange); return .{ @@ -2436,6 +2457,8 @@ pub const Node = struct { ArrayAccess, /// `lhs{rhs}`. rhs can be omitted. ArrayInitOne, + /// `lhs{rhs,}`. rhs can *not* be omitted + ArrayInitOneComma, /// `.{lhs, rhs}`. lhs and rhs can be omitted. ArrayInitDotTwo, /// Same as `ArrayInitDotTwo` except there is known to be a trailing comma @@ -2443,8 +2466,14 @@ pub const Node = struct { ArrayInitDotTwoComma, /// `.{a, b}`. `sub_list[lhs..rhs]`. ArrayInitDot, + /// Same as `ArrayInitDot` except there is known to be a trailing comma + /// before the final rbrace. + ArrayInitDotComma, /// `lhs{a, b}`. `sub_range_list[rhs]`. lhs can be omitted which means `.{a, b}`. ArrayInit, + /// Same as `ArrayInit` except there is known to be a trailing comma + /// before the final rbrace. + ArrayInitComma, /// `lhs{.a = rhs}`. rhs can be omitted making it empty. /// main_token is the lbrace. StructInitOne, diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 4571ece0e8..9c07ad320a 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -2205,9 +2205,10 @@ const Parser = struct { } const elem_init = try p.expectExpr(); + const comma_one = p.eatToken(.Comma); if (p.eatToken(.RBrace)) |_| { return p.addNode(.{ - .tag = .ArrayInitOne, + .tag = if (comma_one != null) .ArrayInitOneComma else .ArrayInitOne, .main_token = lbrace, .data = .{ .lhs = lhs, @@ -2215,21 +2216,30 @@ const Parser = struct { }, }); } + if (comma_one == null) { + try p.warn(.{ + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + }); + } var init_list = std.ArrayList(Node.Index).init(p.gpa); defer init_list.deinit(); try init_list.append(elem_init); - while (p.eatToken(.Comma)) |_| { - const next = try p.parseExpr(); - if (next == 0) break; + var trailing_comma = true; + var next = try p.parseExpr(); + while (next != 0) : (next = try p.parseExpr()) { try init_list.append(next); + if (p.eatToken(.Comma) == null) { + trailing_comma = false; + break; + } } _ = try p.expectToken(.RBrace); const span = try p.listToSpan(init_list.items); return p.addNode(.{ - .tag = .ArrayInit, + .tag = if (trailing_comma) .ArrayInitComma else .ArrayInit, .main_token = lbrace, .data = .{ .lhs = lhs, @@ -2805,7 +2815,7 @@ const Parser = struct { const comma_two = p.eatToken(.Comma); if (p.eatToken(.RBrace)) |_| { return p.addNode(.{ - .tag = if (comma_one != null) .ArrayInitDotTwoComma else .ArrayInitDotTwo, + .tag = if (comma_two != null) .ArrayInitDotTwoComma else .ArrayInitDotTwo, .main_token = lbrace, .data = .{ .lhs = elem_init_one, @@ -2855,7 +2865,7 @@ const Parser = struct { } const span = try p.listToSpan(init_list.items); return p.addNode(.{ - .tag = .ArrayInitDot, + .tag = if (p.token_tags[p.tok_i - 2] == .Comma) .ArrayInitDotComma else .ArrayInitDot, .main_token = lbrace, .data = .{ .lhs = span.start, diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 9eb7ecadba..3b0afff9a7 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -568,109 +568,135 @@ test "zig fmt: struct literal 3 element comma" { test "zig fmt: anon list literal 1 element" { try testCanonical( - \\const x = .{a}; + \\test { + \\ const x = .{a}; + \\} \\ ); } test "zig fmt: anon list literal 1 element comma" { try testCanonical( - \\const x = .{ - \\ a, - \\}; + \\test { + \\ const x = .{ + \\ a, + \\ }; + \\} \\ ); } test "zig fmt: anon list literal 2 element" { try testCanonical( - \\const x = .{ a, b }; + \\test { + \\ const x = .{ a, b }; + \\} \\ ); } test "zig fmt: anon list literal 2 element comma" { try testCanonical( - \\const x = .{ - \\ a, - \\ b, - \\}; + \\test { + \\ const x = .{ + \\ a, + \\ b, + \\ }; + \\} \\ ); } test "zig fmt: anon list literal 3 element" { try testCanonical( - \\const x = .{ a, b, c }; + \\test { + \\ const x = .{ a, b, c }; + \\} \\ ); } test "zig fmt: anon list literal 3 element comma" { try testCanonical( - \\const x = .{ - \\ a, - \\ b, - \\ c, - \\}; + \\test { + \\ const x = .{ + \\ a, + \\ b, + \\ c, + \\ }; + \\} \\ ); } test "zig fmt: array literal 1 element" { try testCanonical( - \\const x = [_]u32{a}; + \\test { + \\ const x = [_]u32{a}; + \\} \\ ); } test "zig fmt: array literal 1 element comma" { try testCanonical( - \\const x = [1]u32{ - \\ a, - \\}; + \\test { + \\ const x = [1]u32{ + \\ a, + \\ }; + \\} \\ ); } test "zig fmt: array literal 2 element" { try testCanonical( - \\const x = [_]u32{ a, b }; + \\test { + \\ const x = [_]u32{ a, b }; + \\} \\ ); } test "zig fmt: array literal 2 element comma" { try testCanonical( - \\const x = [2]u32{ - \\ a, - \\ b, - \\}; + \\test { + \\ const x = [2]u32{ + \\ a, + \\ b, + \\ }; + \\} \\ ); } test "zig fmt: array literal 3 element" { try testCanonical( - \\const x = [_]u32{ a, b, c }; + \\test { + \\ const x = [_]u32{ a, b, c }; + \\} \\ ); } test "zig fmt: array literal 3 element comma" { try testCanonical( - \\const x = [3]u32{ - \\ a, - \\ b, - \\ c, - \\}; + \\test { + \\ const x = [3]u32{ + \\ a, + \\ b, + \\ c, + \\ }; + \\} \\ ); } test "zig fmt: sentinel array literal 1 element" { try testCanonical( - \\const x = [_:9000]u32{a}; + \\test { + \\ const x = [_:9000]u32{a}; + \\} \\ ); } diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 1be114a6a1..cc80b04d38 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -390,7 +390,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .PtrType => return renderPtrType(ais, tree, tree.ptrType(node), space), .PtrTypeBitRange => return renderPtrType(ais, tree, tree.ptrTypeBitRange(node), space), - .ArrayInitOne => { + .ArrayInitOne, .ArrayInitOneComma => { var elements: [1]ast.Node.Index = undefined; return renderArrayInit(ais, tree, tree.arrayInitOne(&elements, node), space); }, @@ -398,8 +398,12 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac var elements: [2]ast.Node.Index = undefined; return renderArrayInit(ais, tree, tree.arrayInitDotTwo(&elements, node), space); }, - .ArrayInitDot => return renderArrayInit(ais, tree, tree.arrayInitDot(node), space), - .ArrayInit => return renderArrayInit(ais, tree, tree.arrayInit(node), space), + .ArrayInitDot, + .ArrayInitDotComma, + => return renderArrayInit(ais, tree, tree.arrayInitDot(node), space), + .ArrayInit, + .ArrayInitComma, + => return renderArrayInit(ais, tree, tree.arrayInit(node), space), .StructInitOne => { var fields: [1]ast.Node.Index = undefined; From 5df7fc36c6e5d7caf7b5b5437bf40fec77a2b971 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Wed, 10 Feb 2021 19:16:25 +0100 Subject: [PATCH 047/173] zig fmt: implement Tree.lastToken() for struct init --- lib/std/zig/ast.zig | 46 +++++++++++++------- lib/std/zig/parse.zig | 19 +++------ lib/std/zig/parser_test.zig | 84 ++++++++++++++++++++++++------------- lib/std/zig/render.zig | 10 +++-- 4 files changed, 98 insertions(+), 61 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 1780cc4f8d..527c358103 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -259,6 +259,7 @@ pub const Tree = struct { .ArrayInitDotTwo, .ArrayInitDotTwoComma, .StructInitDot, + .StructInitDotComma, .StructInitDotTwo, .StructInitDotTwoComma, .EnumLiteral, @@ -316,7 +317,9 @@ pub const Tree = struct { .ArrayInit, .ArrayInitComma, .StructInitOne, + .StructInitOneComma, .StructInit, + .StructInitComma, .CallOne, .CallOneComma, .Call, @@ -607,13 +610,16 @@ pub const Tree = struct { const extra = tree.extraData(datas[n].rhs, Node.Asm); return extra.rparen + end_offset; }, - .ArrayInit => { + .ArrayInit, + .StructInit, + => { const elements = tree.extraData(datas[n].rhs, Node.SubRange); assert(elements.end - elements.start > 0); end_offset += 1; // for the rbrace n = tree.extra_data[elements.end - 1]; // last element }, .ArrayInitComma, + .StructInitComma, .ContainerDeclArgComma, .SwitchComma, => { @@ -623,6 +629,7 @@ pub const Tree = struct { n = tree.extra_data[members.end - 1]; // last parameter }, .ArrayInitDot, + .StructInitDot, .Block, .ContainerDecl, .TaggedUnion, @@ -633,6 +640,7 @@ pub const Tree = struct { n = tree.extra_data[datas[n].rhs - 1]; // last statement }, .ArrayInitDotComma, + .StructInitDotComma, .BlockSemicolon, .ContainerDeclComma, .TaggedUnionComma, @@ -784,7 +792,9 @@ pub const Tree = struct { } }, - .ArrayInitOne => { + .ArrayInitOne, + .StructInitOne, + => { end_offset += 1; // rbrace n = datas[n].rhs; assert(n != 0); @@ -793,6 +803,7 @@ pub const Tree = struct { .CallOneComma, .AsyncCallOneComma, .ArrayInitOneComma, + .StructInitOneComma, => { end_offset += 2; // ellipsis2 + rbracket, or comma + rparen n = datas[n].rhs; @@ -929,14 +940,6 @@ pub const Tree = struct { n = extra.elem_type; }, - // These are not supported by lastToken() because implementation would - // require recursion due to the optional comma followed by rbrace. - // TODO follow the pattern set by StructInitDotTwoComma which will allow - // lastToken to work for all of these. - .StructInit => unreachable, // TODO - .StructInitOne => unreachable, // TODO - .StructInitDot => unreachable, // TODO - .TaggedUnionEnumTag => unreachable, // TODO .TaggedUnionEnumTagComma => unreachable, // TODO .SwitchRange => unreachable, // TODO @@ -1118,7 +1121,8 @@ pub const Tree = struct { } pub fn structInitOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.StructInit { - assert(tree.nodes.items(.tag)[node] == .StructInitOne); + assert(tree.nodes.items(.tag)[node] == .StructInitOne or + tree.nodes.items(.tag)[node] == .StructInitOneComma); const data = tree.nodes.items(.data)[node]; buffer[0] = data.rhs; const fields = if (data.rhs == 0) buffer[0..0] else buffer[0..1]; @@ -1148,7 +1152,8 @@ pub const Tree = struct { } pub fn structInitDot(tree: Tree, node: Node.Index) full.StructInit { - assert(tree.nodes.items(.tag)[node] == .StructInitDot); + assert(tree.nodes.items(.tag)[node] == .StructInitDot or + tree.nodes.items(.tag)[node] == .StructInitDotComma); const data = tree.nodes.items(.data)[node]; return tree.fullStructInit(.{ .lbrace = tree.nodes.items(.main_token)[node], @@ -1158,7 +1163,8 @@ pub const Tree = struct { } pub fn structInit(tree: Tree, node: Node.Index) full.StructInit { - assert(tree.nodes.items(.tag)[node] == .StructInit); + assert(tree.nodes.items(.tag)[node] == .StructInit or + tree.nodes.items(.tag)[node] == .StructInitComma); const data = tree.nodes.items(.data)[node]; const fields_range = tree.extraData(data.rhs, Node.SubRange); return tree.fullStructInit(.{ @@ -2281,6 +2287,8 @@ pub const Node = struct { assert(@sizeOf(Tag) == 1); } + /// Note: The FooComma/FooSemicolon variants exist to ease the implementation of + /// Tree.lastToken() pub const Tag = enum { /// sub_list[lhs...rhs] Root, @@ -2477,21 +2485,29 @@ pub const Node = struct { /// `lhs{.a = rhs}`. rhs can be omitted making it empty. /// main_token is the lbrace. StructInitOne, + /// `lhs{.a = rhs,}`. rhs can *not* be omitted. + /// main_token is the lbrace. + StructInitOneComma, /// `.{.a = lhs, .b = rhs}`. lhs and rhs can be omitted. /// main_token is the lbrace. /// No trailing comma before the rbrace. StructInitDotTwo, /// Same as `StructInitDotTwo` except there is known to be a trailing comma - /// before the final rbrace. This tag exists to facilitate lastToken() implemented - /// without recursion. + /// before the final rbrace. StructInitDotTwoComma, /// `.{.a = b, .c = d}`. `sub_list[lhs..rhs]`. /// main_token is the lbrace. StructInitDot, + /// Same as `StructInitDot` except there is known to be a trailing comma + /// before the final rbrace. + StructInitDotComma, /// `lhs{.a = b, .c = d}`. `sub_range_list[rhs]`. /// lhs can be omitted which means `.{.a = b, .c = d}`. /// main_token is the lbrace. StructInit, + /// Same as `StructInit` except there is known to be a trailing comma + /// before the final rbrace. + StructInitComma, /// `lhs(rhs)`. rhs can be omitted. CallOne, /// `lhs(rhs,)`. rhs can be omitted. diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 9c07ad320a..21ebe43582 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -2147,7 +2147,7 @@ const Parser = struct { const comma_one = p.eatToken(.Comma); if (p.eatToken(.RBrace)) |_| { return p.addNode(.{ - .tag = .StructInitOne, + .tag = if (comma_one != null) .StructInitOneComma else .StructInitOne, .main_token = lbrace, .data = .{ .lhs = lhs, @@ -2192,7 +2192,7 @@ const Parser = struct { } const span = try p.listToSpan(init_list.items); return p.addNode(.{ - .tag = .StructInit, + .tag = if (p.token_tags[p.tok_i - 2] == .Comma) .StructInitComma else .StructInit, .main_token = lbrace, .data = .{ .lhs = lhs, @@ -2709,12 +2709,8 @@ const Parser = struct { if (field_init_one != 0) { const comma_one = p.eatToken(.Comma); if (p.eatToken(.RBrace)) |_| { - const tag: Node.Tag = if (comma_one != null) - .StructInitDotTwoComma - else - .StructInitDotTwo; return p.addNode(.{ - .tag = tag, + .tag = if (comma_one != null) .StructInitDotTwoComma else .StructInitDotTwo, .main_token = lbrace, .data = .{ .lhs = field_init_one, @@ -2730,12 +2726,8 @@ const Parser = struct { const field_init_two = try p.expectFieldInit(); const comma_two = p.eatToken(.Comma); if (p.eatToken(.RBrace)) |_| { - const tag: Node.Tag = if (comma_two != null) - .StructInitDotTwoComma - else - .StructInitDotTwo; return p.addNode(.{ - .tag = tag, + .tag = if (comma_two != null) .StructInitDotTwoComma else .StructInitDotTwo, .main_token = lbrace, .data = .{ .lhs = field_init_one, @@ -2784,8 +2776,9 @@ const Parser = struct { } } const span = try p.listToSpan(init_list.items); + const trailing_comma = p.token_tags[p.tok_i - 2] == .Comma; return p.addNode(.{ - .tag = .StructInitDot, + .tag = if (trailing_comma) .StructInitDotComma else .StructInitDot, .main_token = lbrace, .data = .{ .lhs = span.start, diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 3b0afff9a7..944722c296 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -466,102 +466,126 @@ test "zig fmt: anon literal in array" { test "zig fmt: anon struct literal 1 element" { try testCanonical( - \\const x = .{ .a = b }; + \\test { + \\ const x = .{ .a = b }; + \\} \\ ); } test "zig fmt: anon struct literal 1 element comma" { try testCanonical( - \\const x = .{ - \\ .a = b, - \\}; + \\test { + \\ const x = .{ + \\ .a = b, + \\ }; + \\} \\ ); } test "zig fmt: anon struct literal 2 element" { try testCanonical( - \\const x = .{ .a = b, .c = d }; + \\test { + \\ const x = .{ .a = b, .c = d }; + \\} \\ ); } test "zig fmt: anon struct literal 2 element comma" { try testCanonical( - \\const x = .{ - \\ .a = b, - \\ .c = d, - \\}; + \\test { + \\ const x = .{ + \\ .a = b, + \\ .c = d, + \\ }; + \\} \\ ); } test "zig fmt: anon struct literal 3 element" { try testCanonical( - \\const x = .{ .a = b, .c = d, .e = f }; + \\test { + \\ const x = .{ .a = b, .c = d, .e = f }; + \\} \\ ); } test "zig fmt: anon struct literal 3 element comma" { try testCanonical( - \\const x = .{ - \\ .a = b, - \\ .c = d, - \\ .e = f, - \\}; + \\test { + \\ const x = .{ + \\ .a = b, + \\ .c = d, + \\ .e = f, + \\ }; + \\} \\ ); } test "zig fmt: struct literal 1 element" { try testCanonical( - \\const x = X{ .a = b }; + \\test { + \\ const x = X{ .a = b }; + \\} \\ ); } test "zig fmt: struct literal 1 element comma" { try testCanonical( - \\const x = X{ - \\ .a = b, - \\}; + \\test { + \\ const x = X{ + \\ .a = b, + \\ }; + \\} \\ ); } test "zig fmt: struct literal 2 element" { try testCanonical( - \\const x = X{ .a = b, .c = d }; + \\test { + \\ const x = X{ .a = b, .c = d }; + \\} \\ ); } test "zig fmt: struct literal 2 element comma" { try testCanonical( - \\const x = X{ - \\ .a = b, - \\ .c = d, - \\}; + \\test { + \\ const x = X{ + \\ .a = b, + \\ .c = d, + \\ }; + \\} \\ ); } test "zig fmt: struct literal 3 element" { try testCanonical( - \\const x = X{ .a = b, .c = d, .e = f }; + \\test { + \\ const x = X{ .a = b, .c = d, .e = f }; + \\} \\ ); } test "zig fmt: struct literal 3 element comma" { try testCanonical( - \\const x = X{ - \\ .a = b, - \\ .c = d, - \\ .e = f, - \\}; + \\test { + \\ const x = X{ + \\ .a = b, + \\ .c = d, + \\ .e = f, + \\ }; + \\} \\ ); } diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index cc80b04d38..051b3f46b1 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -405,7 +405,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .ArrayInitComma, => return renderArrayInit(ais, tree, tree.arrayInit(node), space), - .StructInitOne => { + .StructInitOne, .StructInitOneComma => { var fields: [1]ast.Node.Index = undefined; return renderStructInit(ais, tree, tree.structInitOne(&fields, node), space); }, @@ -413,8 +413,12 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac var fields: [2]ast.Node.Index = undefined; return renderStructInit(ais, tree, tree.structInitDotTwo(&fields, node), space); }, - .StructInitDot => return renderStructInit(ais, tree, tree.structInitDot(node), space), - .StructInit => return renderStructInit(ais, tree, tree.structInit(node), space), + .StructInitDot, + .StructInitDotComma, + => return renderStructInit(ais, tree, tree.structInitDot(node), space), + .StructInit, + .StructInitComma, + => return renderStructInit(ais, tree, tree.structInit(node), space), .CallOne, .CallOneComma, .AsyncCallOne, .AsyncCallOneComma => { var params: [1]ast.Node.Index = undefined; From 515d4920e79ca3c631f243f6a1d7fb6b48aa91cd Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Wed, 10 Feb 2021 19:19:58 +0100 Subject: [PATCH 048/173] zig fmt: fix 0 element struct and array init --- lib/std/zig/ast.zig | 7 +++++-- lib/std/zig/parser_test.zig | 27 +++++++++++++++++++++++++++ 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 527c358103..358c40b28a 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -796,8 +796,11 @@ pub const Tree = struct { .StructInitOne, => { end_offset += 1; // rbrace - n = datas[n].rhs; - assert(n != 0); + if (datas[n].rhs == 0) { + return main_tokens[n] + end_offset; + } else { + n = datas[n].rhs; + } }, .SliceOpen, .CallOneComma, diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 944722c296..bc1a36d7e3 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -464,6 +464,15 @@ test "zig fmt: anon literal in array" { // ); //} +test "zig fmt: anon struct literal 0 element" { + try testCanonical( + \\test { + \\ const x = .{}; + \\} + \\ + ); +} + test "zig fmt: anon struct literal 1 element" { try testCanonical( \\test { @@ -527,6 +536,15 @@ test "zig fmt: anon struct literal 3 element comma" { ); } +test "zig fmt: struct literal 0 element" { + try testCanonical( + \\test { + \\ const x = X{}; + \\} + \\ + ); +} + test "zig fmt: struct literal 1 element" { try testCanonical( \\test { @@ -653,6 +671,15 @@ test "zig fmt: anon list literal 3 element comma" { ); } +test "zig fmt: array literal 0 element" { + try testCanonical( + \\test { + \\ const x = [_]u32{}; + \\} + \\ + ); +} + test "zig fmt: array literal 1 element" { try testCanonical( \\test { From 4c8caf33437d8ce55450a51dfbf89db69c65680e Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Wed, 10 Feb 2021 19:40:19 +0100 Subject: [PATCH 049/173] zig fmt: implement Tree.lastToken() for all nodes --- lib/std/zig/ast.zig | 22 ++++++++++++++++------ lib/std/zig/parser_test.zig | 19 +++++++++++++++++++ 2 files changed, 35 insertions(+), 6 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 358c40b28a..783a578e01 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -538,6 +538,7 @@ pub const Tree = struct { .ArrayType, .SwitchCaseOne, .SwitchCase, + .SwitchRange, => n = datas[n].rhs, .FieldAccess, @@ -580,8 +581,21 @@ pub const Tree = struct { } n = tree.extra_data[params.end - 1]; // last parameter }, - .CallComma, .AsyncCallComma => { - end_offset += 2; // for the comma+rparen + .TaggedUnionEnumTag => { + const members = tree.extraData(datas[n].rhs, Node.SubRange); + if (members.end - members.start == 0) { + end_offset += 4; // for the rparen + rparen + lbrace + rbrace + n = datas[n].lhs; + } else { + end_offset += 1; // for the rbrace + n = tree.extra_data[members.end - 1]; // last parameter + } + }, + .CallComma, + .AsyncCallComma, + .TaggedUnionEnumTagComma, + => { + end_offset += 2; // for the comma + rparen/rbrace const params = tree.extraData(datas[n].rhs, Node.SubRange); assert(params.end > params.start); n = tree.extra_data[params.end - 1]; // last parameter @@ -942,10 +956,6 @@ pub const Tree = struct { const extra = tree.extraData(datas[n].rhs, Node.ArrayTypeSentinel); n = extra.elem_type; }, - - .TaggedUnionEnumTag => unreachable, // TODO - .TaggedUnionEnumTagComma => unreachable, // TODO - .SwitchRange => unreachable, // TODO }; } diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index bc1a36d7e3..c6ee4f6bd9 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -841,6 +841,25 @@ test "zig fmt: tagged union with enum values" { ); } +test "zig fmt: tagged union enum tag last token" { + try testCanonical( + \\test { + \\ const U = union(enum(u32)) {}; + \\} + \\ + \\test { + \\ const U = union(enum(u32)) { foo }; + \\} + \\ + \\test { + \\ const U = union(enum(u32)) { + \\ foo, + \\ }; + \\} + \\ + ); +} + test "zig fmt: allowzero pointer" { try testCanonical( \\const T = [*]allowzero const u8; From b6448a533d4465c226b7b8613806ff0673261d6a Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Thu, 11 Feb 2021 22:31:44 +0100 Subject: [PATCH 050/173] zig fmt: fix Tree.lastToken() for empty *Two nodes --- lib/std/zig/ast.zig | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 783a578e01..ad96baf3f3 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -676,20 +676,11 @@ pub const Tree = struct { }, .ArrayInitDotTwo, .BlockTwo, + .BuiltinCallTwo, .StructInitDotTwo, .ContainerDeclTwo, .TaggedUnionTwo, => { - end_offset += 1; // for the rparen/rbrace - if (datas[n].rhs != 0) { - n = datas[n].rhs; - } else if (datas[n].lhs != 0) { - n = datas[n].lhs; - } else { - return main_tokens[n] + end_offset; - } - }, - .BuiltinCallTwo => { if (datas[n].rhs != 0) { end_offset += 1; // for the rparen/rbrace n = datas[n].rhs; @@ -697,7 +688,17 @@ pub const Tree = struct { end_offset += 1; // for the rparen/rbrace n = datas[n].lhs; } else { - end_offset += 2; // for the lparen and rparen + switch (tags[n]) { + .ArrayInitDotTwo, + .BlockTwo, + .StructInitDotTwo, + => end_offset += 1, // rbrace + .BuiltinCallTwo, + .ContainerDeclTwo, + => end_offset += 2, // lparen/lbrace + rparen/rbrace + .TaggedUnionTwo => end_offset += 5, // (enum) {} + else => unreachable, + } return main_tokens[n] + end_offset; } }, @@ -2708,7 +2709,7 @@ pub const Node = struct { /// `{}`. `sub_list[lhs..rhs]`. /// main_token points at the lbrace. Block, - /// Same as BlockTwo but there is known to be a semicolon before the rbrace. + /// Same as Block but there is known to be a semicolon before the rbrace. BlockSemicolon, /// `asm(lhs)`. rhs is the token index of the rparen. AsmSimple, From faa3fa65ac89b774b26bdd0ea4ac70861b29d0b4 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Thu, 11 Feb 2021 22:38:59 +0100 Subject: [PATCH 051/173] zig fmt: implement line comments --- lib/std/zig/parser_test.zig | 1083 +++++++++++++++++------------------ lib/std/zig/render.zig | 166 +++--- 2 files changed, 592 insertions(+), 657 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index c6ee4f6bd9..aad9ad3ed6 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -663,7 +663,9 @@ test "zig fmt: anon list literal 3 element comma" { \\test { \\ const x = .{ \\ a, + \\ // foo \\ b, + \\ \\ c, \\ }; \\} @@ -874,20 +876,20 @@ test "zig fmt: enum literal" { ); } -//test "zig fmt: enum literal inside array literal" { -// try testCanonical( -// \\test "enums in arrays" { -// \\ var colors = []Color{.Green}; -// \\ colors = []Colors{ .Green, .Cyan }; -// \\ colors = []Colors{ -// \\ .Grey, -// \\ .Green, -// \\ .Cyan, -// \\ }; -// \\} -// \\ -// ); -//} +test "zig fmt: enum literal inside array literal" { + try testCanonical( + \\test "enums in arrays" { + \\ var colors = []Color{.Green}; + \\ colors = []Colors{ .Green, .Cyan }; + \\ colors = []Colors{ + \\ .Grey, + \\ .Green, + \\ .Cyan, + \\ }; + \\} + \\ + ); +} test "zig fmt: character literal larger than u8" { try testCanonical( @@ -954,56 +956,56 @@ test "zig fmt: linksection" { // \\ // ); //} -// -//test "zig fmt: correctly space struct fields with doc comments" { -// try testTransform( -// \\pub const S = struct { -// \\ /// A -// \\ a: u8, -// \\ /// B -// \\ /// B (cont) -// \\ b: u8, -// \\ -// \\ -// \\ /// C -// \\ c: u8, -// \\}; -// \\ -// , -// \\pub const S = struct { -// \\ /// A -// \\ a: u8, -// \\ /// B -// \\ /// B (cont) -// \\ b: u8, -// \\ -// \\ /// C -// \\ c: u8, -// \\}; -// \\ -// ); -//} -// -//test "zig fmt: doc comments on param decl" { -// try testCanonical( -// \\pub const Allocator = struct { -// \\ shrinkFn: fn ( -// \\ self: *Allocator, -// \\ /// Guaranteed to be the same as what was returned from most recent call to -// \\ /// `allocFn`, `reallocFn`, or `shrinkFn`. -// \\ old_mem: []u8, -// \\ /// Guaranteed to be the same as what was returned from most recent call to -// \\ /// `allocFn`, `reallocFn`, or `shrinkFn`. -// \\ old_alignment: u29, -// \\ /// Guaranteed to be less than or equal to `old_mem.len`. -// \\ new_byte_count: usize, -// \\ /// Guaranteed to be less than or equal to `old_alignment`. -// \\ new_alignment: u29, -// \\ ) []u8, -// \\}; -// \\ -// ); -//} + +test "zig fmt: correctly space struct fields with doc comments" { + try testTransform( + \\pub const S = struct { + \\ /// A + \\ a: u8, + \\ /// B + \\ /// B (cont) + \\ b: u8, + \\ + \\ + \\ /// C + \\ c: u8, + \\}; + \\ + , + \\pub const S = struct { + \\ /// A + \\ a: u8, + \\ /// B + \\ /// B (cont) + \\ b: u8, + \\ + \\ /// C + \\ c: u8, + \\}; + \\ + ); +} + +test "zig fmt: doc comments on param decl" { + try testCanonical( + \\pub const Allocator = struct { + \\ shrinkFn: fn ( + \\ self: *Allocator, + \\ /// Guaranteed to be the same as what was returned from most recent call to + \\ /// `allocFn`, `reallocFn`, or `shrinkFn`. + \\ old_mem: []u8, + \\ /// Guaranteed to be the same as what was returned from most recent call to + \\ /// `allocFn`, `reallocFn`, or `shrinkFn`. + \\ old_alignment: u29, + \\ /// Guaranteed to be less than or equal to `old_mem.len`. + \\ new_byte_count: usize, + \\ /// Guaranteed to be less than or equal to `old_alignment`. + \\ new_alignment: u29, + \\ ) []u8, + \\}; + \\ + ); +} test "zig fmt: aligned struct field" { try testCanonical( @@ -1142,13 +1144,13 @@ test "zig fmt: aligned struct field" { // \\ // ); //} -// -//test "zig fmt: pointer of unknown length" { -// try testCanonical( -// \\fn foo(ptr: [*]u8) void {} -// \\ -// ); -//} + +test "zig fmt: pointer of unknown length" { + try testCanonical( + \\fn foo(ptr: [*]u8) void {} + \\ + ); +} test "zig fmt: spaces around slice operator" { try testCanonical( @@ -1370,25 +1372,25 @@ test "zig fmt: async call in if condition" { // \\ // ); //} -// -//test "zig fmt: if-else with comment before else" { -// try testCanonical( -// \\comptime { -// \\ // cexp(finite|nan +- i inf|nan) = nan + i nan -// \\ if ((hx & 0x7fffffff) != 0x7f800000) { -// \\ return Complex(f32).new(y - y, y - y); -// \\ } // cexp(-inf +- i inf|nan) = 0 + i0 -// \\ else if (hx & 0x80000000 != 0) { -// \\ return Complex(f32).new(0, 0); -// \\ } // cexp(+inf +- i inf|nan) = inf + i nan -// \\ else { -// \\ return Complex(f32).new(x, y - y); -// \\ } -// \\} -// \\ -// ); -//} -// + +test "zig fmt: if-else with comment before else" { + try testCanonical( + \\comptime { + \\ // cexp(finite|nan +- i inf|nan) = nan + i nan + \\ if ((hx & 0x7fffffff) != 0x7f800000) { + \\ return Complex(f32).new(y - y, y - y); + \\ } // cexp(-inf +- i inf|nan) = 0 + i0 + \\ else if (hx & 0x80000000 != 0) { + \\ return Complex(f32).new(0, 0); + \\ } // cexp(+inf +- i inf|nan) = inf + i nan + \\ else { + \\ return Complex(f32).new(x, y - y); + \\ } + \\} + \\ + ); +} + //test "zig fmt: if nested" { // try testCanonical( // \\pub fn foo() void { @@ -1467,17 +1469,17 @@ test "zig fmt: enum decl with no trailing comma" { ); } -//test "zig fmt: switch comment before prong" { -// try testCanonical( -// \\comptime { -// \\ switch (a) { -// \\ // hi -// \\ 0 => {}, -// \\ } -// \\} -// \\ -// ); -//} +test "zig fmt: switch comment before prong" { + try testCanonical( + \\comptime { + \\ switch (a) { + \\ // hi + \\ 0 => {}, + \\ } + \\} + \\ + ); +} test "zig fmt: struct literal no trailing comma" { try testTransform( @@ -1709,17 +1711,17 @@ test "zig fmt: multi line arguments without last comma" { ); } -//test "zig fmt: empty block with only comment" { -// try testCanonical( -// \\comptime { -// \\ { -// \\ // comment -// \\ } -// \\} -// \\ -// ); -//} -// +test "zig fmt: empty block with only comment" { + try testCanonical( + \\comptime { + \\ { + \\ // comment + \\ } + \\} + \\ + ); +} + //test "zig fmt: no trailing comma on struct decl" { // try testCanonical( // \\const RoundParam = struct { @@ -1781,15 +1783,15 @@ test "zig fmt: extra newlines at the end" { // \\ // ); //} -// -//test "zig fmt: nested struct literal with one item" { -// try testCanonical( -// \\const a = foo{ -// \\ .item = bar{ .a = b }, -// \\}; -// \\ -// ); -//} + +test "zig fmt: nested struct literal with one item" { + try testCanonical( + \\const a = foo{ + \\ .item = bar{ .a = b }, + \\}; + \\ + ); +} test "zig fmt: switch cases trailing comma" { try testTransform( @@ -1848,26 +1850,26 @@ test "zig fmt: slice align" { // \\ // ); //} -// -//test "zig fmt: first thing in file is line comment" { -// try testCanonical( -// \\// Introspection and determination of system libraries needed by zig. -// \\ -// \\// Introspection and determination of system libraries needed by zig. -// \\ -// \\const std = @import("std"); -// \\ -// ); -//} -// -//test "zig fmt: line comment after doc comment" { -// try testCanonical( -// \\/// doc comment -// \\// line comment -// \\fn foo() void {} -// \\ -// ); -//} + +test "zig fmt: first thing in file is line comment" { + try testCanonical( + \\// Introspection and determination of system libraries needed by zig. + \\ + \\// Introspection and determination of system libraries needed by zig. + \\ + \\const std = @import("std"); + \\ + ); +} + +test "zig fmt: line comment after doc comment" { + try testCanonical( + \\/// doc comment + \\// line comment + \\fn foo() void {} + \\ + ); +} test "zig fmt: bit field alignment" { try testCanonical( @@ -1928,27 +1930,27 @@ test "zig fmt: nested blocks" { ); } -//test "zig fmt: block with same line comment after end brace" { -// try testCanonical( -// \\comptime { -// \\ { -// \\ b(); -// \\ } // comment -// \\} -// \\ -// ); -//} -// -//test "zig fmt: statements with comment between" { -// try testCanonical( -// \\comptime { -// \\ a = b; -// \\ // comment -// \\ a = b; -// \\} -// \\ -// ); -//} +test "zig fmt: block with same line comment after end brace" { + try testCanonical( + \\comptime { + \\ { + \\ b(); + \\ } // comment + \\} + \\ + ); +} + +test "zig fmt: statements with comment between" { + try testCanonical( + \\comptime { + \\ a = b; + \\ // comment + \\ a = b; + \\} + \\ + ); +} test "zig fmt: statements with empty line between" { try testCanonical( @@ -1969,60 +1971,60 @@ test "zig fmt: ptr deref operator and unwrap optional operator" { ); } -//test "zig fmt: comment after if before another if" { -// try testCanonical( -// \\test "aoeu" { -// \\ // comment -// \\ if (x) { -// \\ bar(); -// \\ } -// \\} -// \\ -// \\test "aoeu" { -// \\ if (x) { -// \\ foo(); -// \\ } -// \\ // comment -// \\ if (x) { -// \\ bar(); -// \\ } -// \\} -// \\ -// ); -//} -// -//test "zig fmt: line comment between if block and else keyword" { -// try testCanonical( -// \\test "aoeu" { -// \\ // cexp(finite|nan +- i inf|nan) = nan + i nan -// \\ if ((hx & 0x7fffffff) != 0x7f800000) { -// \\ return Complex(f32).new(y - y, y - y); -// \\ } -// \\ // cexp(-inf +- i inf|nan) = 0 + i0 -// \\ else if (hx & 0x80000000 != 0) { -// \\ return Complex(f32).new(0, 0); -// \\ } -// \\ // cexp(+inf +- i inf|nan) = inf + i nan -// \\ // another comment -// \\ else { -// \\ return Complex(f32).new(x, y - y); -// \\ } -// \\} -// \\ -// ); -//} -// -//test "zig fmt: same line comments in expression" { -// try testCanonical( -// \\test "aoeu" { -// \\ const x = ( // a -// \\ 0 // b -// \\ ); // c -// \\} -// \\ -// ); -//} -// +test "zig fmt: comment after if before another if" { + try testCanonical( + \\test "aoeu" { + \\ // comment + \\ if (x) { + \\ bar(); + \\ } + \\} + \\ + \\test "aoeu" { + \\ if (x) { + \\ foo(); + \\ } + \\ // comment + \\ if (x) { + \\ bar(); + \\ } + \\} + \\ + ); +} + +test "zig fmt: line comment between if block and else keyword" { + try testCanonical( + \\test "aoeu" { + \\ // cexp(finite|nan +- i inf|nan) = nan + i nan + \\ if ((hx & 0x7fffffff) != 0x7f800000) { + \\ return Complex(f32).new(y - y, y - y); + \\ } + \\ // cexp(-inf +- i inf|nan) = 0 + i0 + \\ else if (hx & 0x80000000 != 0) { + \\ return Complex(f32).new(0, 0); + \\ } + \\ // cexp(+inf +- i inf|nan) = inf + i nan + \\ // another comment + \\ else { + \\ return Complex(f32).new(x, y - y); + \\ } + \\} + \\ + ); +} + +test "zig fmt: same line comments in expression" { + try testCanonical( + \\test "aoeu" { + \\ const x = ( // a + \\ 0 // b + \\ ); // c + \\} + \\ + ); +} + //test "zig fmt: add comma on last switch prong" { // try testTransform( // \\test "aoeu" { @@ -2051,70 +2053,70 @@ test "zig fmt: ptr deref operator and unwrap optional operator" { // \\ // ); //} -// -//test "zig fmt: same-line comment after a statement" { -// try testCanonical( -// \\test "" { -// \\ a = b; -// \\ debug.assert(H.digest_size <= H.block_size); // HMAC makes this assumption -// \\ a = b; -// \\} -// \\ -// ); -//} -// -//test "zig fmt: same-line comment after var decl in struct" { -// try testCanonical( -// \\pub const vfs_cap_data = extern struct { -// \\ const Data = struct {}; // when on disk. -// \\}; -// \\ -// ); -//} -// -//test "zig fmt: same-line comment after field decl" { -// try testCanonical( -// \\pub const dirent = extern struct { -// \\ d_name: u8, -// \\ d_name: u8, // comment 1 -// \\ d_name: u8, -// \\ d_name: u8, // comment 2 -// \\ d_name: u8, -// \\}; -// \\ -// ); -//} -// -//test "zig fmt: same-line comment after switch prong" { -// try testCanonical( -// \\test "" { -// \\ switch (err) { -// \\ error.PathAlreadyExists => {}, // comment 2 -// \\ else => return err, // comment 1 -// \\ } -// \\} -// \\ -// ); -//} -// -//test "zig fmt: same-line comment after non-block if expression" { -// try testCanonical( -// \\comptime { -// \\ if (sr > n_uword_bits - 1) // d > r -// \\ return 0; -// \\} -// \\ -// ); -//} -// -//test "zig fmt: same-line comment on comptime expression" { -// try testCanonical( -// \\test "" { -// \\ comptime assert(@typeInfo(T) == .Int); // must pass an integer to absInt -// \\} -// \\ -// ); -//} + +test "zig fmt: same-line comment after a statement" { + try testCanonical( + \\test "" { + \\ a = b; + \\ debug.assert(H.digest_size <= H.block_size); // HMAC makes this assumption + \\ a = b; + \\} + \\ + ); +} + +test "zig fmt: same-line comment after var decl in struct" { + try testCanonical( + \\pub const vfs_cap_data = extern struct { + \\ const Data = struct {}; // when on disk. + \\}; + \\ + ); +} + +test "zig fmt: same-line comment after field decl" { + try testCanonical( + \\pub const dirent = extern struct { + \\ d_name: u8, + \\ d_name: u8, // comment 1 + \\ d_name: u8, + \\ d_name: u8, // comment 2 + \\ d_name: u8, + \\}; + \\ + ); +} + +test "zig fmt: same-line comment after switch prong" { + try testCanonical( + \\test "" { + \\ switch (err) { + \\ error.PathAlreadyExists => {}, // comment 2 + \\ else => return err, // comment 1 + \\ } + \\} + \\ + ); +} + +test "zig fmt: same-line comment after non-block if expression" { + try testCanonical( + \\comptime { + \\ if (sr > n_uword_bits - 1) // d > r + \\ return 0; + \\} + \\ + ); +} + +test "zig fmt: same-line comment on comptime expression" { + try testCanonical( + \\test "" { + \\ comptime assert(@typeInfo(T) == .Int); // must pass an integer to absInt + \\} + \\ + ); +} test "zig fmt: switch with empty body" { try testCanonical( @@ -2125,53 +2127,52 @@ test "zig fmt: switch with empty body" { ); } -//test "zig fmt: line comments in struct initializer" { -// try testCanonical( -// \\fn foo() void { -// \\ return Self{ -// \\ .a = b, -// \\ -// \\ // Initialize these two fields to buffer_size so that -// \\ // in `readFn` we treat the state as being able to read -// \\ .start_index = buffer_size, -// \\ .end_index = buffer_size, -// \\ -// \\ // middle -// \\ -// \\ .a = b, -// \\ -// \\ // end -// \\ }; -// \\} -// \\ -// ); -//} -// -//test "zig fmt: first line comment in struct initializer" { -// try testCanonical( -// \\pub fn acquire(self: *Self) HeldLock { -// \\ return HeldLock{ -// \\ // guaranteed allocation elision -// \\ .held = self.lock.acquire(), -// \\ .value = &self.private_data, -// \\ }; -// \\} -// \\ -// ); -//} -// -//test "zig fmt: doc comments before struct field" { -// try testCanonical( -// \\pub const Allocator = struct { -// \\ /// Allocate byte_count bytes and return them in a slice, with the -// \\ /// slice's pointer aligned at least to alignment bytes. -// \\ allocFn: fn () void, -// \\}; -// \\ -// ); -//} +test "zig fmt: line comments in struct initializer" { + try testCanonical( + \\fn foo() void { + \\ return Self{ + \\ .a = b, + \\ + \\ // Initialize these two fields to buffer_size so that + \\ // in `readFn` we treat the state as being able to read + \\ .start_index = buffer_size, + \\ .end_index = buffer_size, + \\ + \\ // middle + \\ + \\ .a = b, + \\ + \\ // end + \\ }; + \\} + \\ + ); +} + +test "zig fmt: first line comment in struct initializer" { + try testCanonical( + \\pub fn acquire(self: *Self) HeldLock { + \\ return HeldLock{ + \\ // guaranteed allocation elision + \\ .held = self.lock.acquire(), + \\ .value = &self.private_data, + \\ }; + \\} + \\ + ); +} + +test "zig fmt: doc comments before struct field" { + try testCanonical( + \\pub const Allocator = struct { + \\ /// Allocate byte_count bytes and return them in a slice, with the + \\ /// slice's pointer aligned at least to alignment bytes. + \\ allocFn: fn () void, + \\}; + \\ + ); +} -// TODO: replace this with the next test case when possible test "zig fmt: error set declaration" { try testCanonical( \\const E = error{ @@ -2180,58 +2181,30 @@ test "zig fmt: error set declaration" { \\ \\ C, \\}; + \\ \\const Error = error{ \\ /// no more memory \\ OutOfMemory, \\}; + \\ \\const Error = error{ \\ /// no more memory \\ OutOfMemory, \\ \\ /// another \\ Another, - \\ /// and one more - \\ Another, + \\ + \\ // end \\}; + \\ \\const Error = error{OutOfMemory}; \\const Error = error{}; + \\ \\const Error = error{ OutOfMemory, OutOfTime }; \\ ); } -//test "zig fmt: error set declaration" { -// try testCanonical( -// \\const E = error{ -// \\ A, -// \\ B, -// \\ -// \\ C, -// \\}; -// \\ -// \\const Error = error{ -// \\ /// no more memory -// \\ OutOfMemory, -// \\}; -// \\ -// \\const Error = error{ -// \\ /// no more memory -// \\ OutOfMemory, -// \\ -// \\ /// another -// \\ Another, -// \\ -// \\ // end -// \\}; -// \\ -// \\const Error = error{OutOfMemory}; -// \\const Error = error{}; -// \\ -// \\const Error = error{ OutOfMemory, OutOfTime }; -// \\ -// ); -//} - test "zig fmt: union(enum(u32)) with assigned enum values" { try testCanonical( \\const MultipleChoice = union(enum(u32)) { @@ -2255,110 +2228,110 @@ test "zig fmt: resume from suspend block" { ); } -//test "zig fmt: comments before error set decl" { -// try testCanonical( -// \\const UnexpectedError = error{ -// \\ /// The Operating System returned an undocumented error code. -// \\ Unexpected, -// \\ // another -// \\ Another, -// \\ -// \\ // in between -// \\ -// \\ // at end -// \\}; -// \\ -// ); -//} -// -//test "zig fmt: comments before switch prong" { -// try testCanonical( -// \\test "" { -// \\ switch (err) { -// \\ error.PathAlreadyExists => continue, -// \\ -// \\ // comment 1 -// \\ -// \\ // comment 2 -// \\ else => return err, -// \\ // at end -// \\ } -// \\} -// \\ -// ); -//} -// -//test "zig fmt: comments before var decl in struct" { -// try testCanonical( -// \\pub const vfs_cap_data = extern struct { -// \\ // All of these are mandated as little endian -// \\ // when on disk. -// \\ const Data = struct { -// \\ permitted: u32, -// \\ inheritable: u32, -// \\ }; -// \\ -// \\ // in between -// \\ -// \\ /// All of these are mandated as little endian -// \\ /// when on disk. -// \\ const Data = struct { -// \\ permitted: u32, -// \\ inheritable: u32, -// \\ }; -// \\ -// \\ // at end -// \\}; -// \\ -// ); -//} -// -//test "zig fmt: array literal with 1 item on 1 line" { -// try testCanonical( -// \\var s = []const u64{0} ** 25; -// \\ -// ); -//} -// -//test "zig fmt: comments before global variables" { -// try testCanonical( -// \\/// Foo copies keys and values before they go into the map, and -// \\/// frees them when they get removed. -// \\pub const Foo = struct {}; -// \\ -// ); -//} -// -//test "zig fmt: comments in statements" { -// try testCanonical( -// \\test "std" { -// \\ // statement comment -// \\ _ = @import("foo/bar.zig"); -// \\ -// \\ // middle -// \\ // middle2 -// \\ -// \\ // end -// \\} -// \\ -// ); -//} -// -//test "zig fmt: comments before test decl" { -// try testCanonical( -// \\/// top level doc comment -// \\test "hi" {} -// \\ -// \\// top level normal comment -// \\test "hi" {} -// \\ -// \\// middle -// \\ -// \\// end -// \\ -// ); -//} -// +test "zig fmt: comments before error set decl" { + try testCanonical( + \\const UnexpectedError = error{ + \\ /// The Operating System returned an undocumented error code. + \\ Unexpected, + \\ // another + \\ Another, + \\ + \\ // in between + \\ + \\ // at end + \\}; + \\ + ); +} + +test "zig fmt: comments before switch prong" { + try testCanonical( + \\test "" { + \\ switch (err) { + \\ error.PathAlreadyExists => continue, + \\ + \\ // comment 1 + \\ + \\ // comment 2 + \\ else => return err, + \\ // at end + \\ } + \\} + \\ + ); +} + +test "zig fmt: comments before var decl in struct" { + try testCanonical( + \\pub const vfs_cap_data = extern struct { + \\ // All of these are mandated as little endian + \\ // when on disk. + \\ const Data = struct { + \\ permitted: u32, + \\ inheritable: u32, + \\ }; + \\ + \\ // in between + \\ + \\ /// All of these are mandated as little endian + \\ /// when on disk. + \\ const Data = struct { + \\ permitted: u32, + \\ inheritable: u32, + \\ }; + \\ + \\ // at end + \\}; + \\ + ); +} + +test "zig fmt: array literal with 1 item on 1 line" { + try testCanonical( + \\var s = []const u64{0} ** 25; + \\ + ); +} + +test "zig fmt: comments before global variables" { + try testCanonical( + \\/// Foo copies keys and values before they go into the map, and + \\/// frees them when they get removed. + \\pub const Foo = struct {}; + \\ + ); +} + +test "zig fmt: comments in statements" { + try testCanonical( + \\test "std" { + \\ // statement comment + \\ _ = @import("foo/bar.zig"); + \\ + \\ // middle + \\ // middle2 + \\ + \\ // end + \\} + \\ + ); +} + +test "zig fmt: comments before test decl" { + try testCanonical( + \\/// top level doc comment + \\test "hi" {} + \\ + \\// top level normal comment + \\test "hi" {} + \\ + \\// middle + \\ + \\// end + \\ + ); +} + //test "zig fmt: preserve spacing" { // try testCanonical( // \\const std = @import("std"); @@ -2373,7 +2346,7 @@ test "zig fmt: resume from suspend block" { // \\ // ); //} -// + //test "zig fmt: return types" { // try testCanonical( // \\pub fn main() !void {} @@ -2798,43 +2771,43 @@ test "zig fmt: union declaration" { ); } -//test "zig fmt: arrays" { -// try testCanonical( -// \\test "test array" { -// \\ const a: [2]u8 = [2]u8{ -// \\ 1, -// \\ 2, -// \\ }; -// \\ const a: [2]u8 = []u8{ -// \\ 1, -// \\ 2, -// \\ }; -// \\ const a: [0]u8 = []u8{}; -// \\ const x: [4:0]u8 = undefined; -// \\} -// \\ -// ); -//} +test "zig fmt: arrays" { + try testCanonical( + \\test "test array" { + \\ const a: [2]u8 = [2]u8{ + \\ 1, + \\ 2, + \\ }; + \\ const a: [2]u8 = []u8{ + \\ 1, + \\ 2, + \\ }; + \\ const a: [0]u8 = []u8{}; + \\ const x: [4:0]u8 = undefined; + \\} + \\ + ); +} -//test "zig fmt: container initializers" { -// try testCanonical( -// \\const a0 = []u8{}; -// \\const a1 = []u8{1}; -// \\const a2 = []u8{ -// \\ 1, -// \\ 2, -// \\ 3, -// \\ 4, -// \\}; -// \\const s0 = S{}; -// \\const s1 = S{ .a = 1 }; -// \\const s2 = S{ -// \\ .a = 1, -// \\ .b = 2, -// \\}; -// \\ -// ); -//} +test "zig fmt: container initializers" { + try testCanonical( + \\const a0 = []u8{}; + \\const a1 = []u8{1}; + \\const a2 = []u8{ + \\ 1, + \\ 2, + \\ 3, + \\ 4, + \\}; + \\const s0 = S{}; + \\const s1 = S{ .a = 1 }; + \\const s2 = S{ + \\ .a = 1, + \\ .b = 2, + \\}; + \\ + ); +} test "zig fmt: catch" { try testCanonical( @@ -3563,62 +3536,62 @@ test "zig fmt: integer literals with underscore separators" { ); } -//test "zig fmt: hex literals with underscore separators" { -// try testTransform( -// \\pub fn orMask(a: [ 1_000 ]u64, b: [ 1_000] u64) [1_000]u64 { -// \\ var c: [1_000]u64 = [1]u64{ 0xFFFF_FFFF_FFFF_FFFF}**1_000; -// \\ for (c [ 0_0 .. ]) |_, i| { -// \\ c[i] = (a[i] | b[i]) & 0xCCAA_CCAA_CCAA_CCAA; -// \\ } -// \\ return c; -// \\} -// \\ -// \\ -// , -// \\pub fn orMask(a: [1_000]u64, b: [1_000]u64) [1_000]u64 { -// \\ var c: [1_000]u64 = [1]u64{0xFFFF_FFFF_FFFF_FFFF} ** 1_000; -// \\ for (c[0_0..]) |_, i| { -// \\ c[i] = (a[i] | b[i]) & 0xCCAA_CCAA_CCAA_CCAA; -// \\ } -// \\ return c; -// \\} -// \\ -// ); -//} +test "zig fmt: hex literals with underscore separators" { + try testTransform( + \\pub fn orMask(a: [ 1_000 ]u64, b: [ 1_000] u64) [1_000]u64 { + \\ var c: [1_000]u64 = [1]u64{ 0xFFFF_FFFF_FFFF_FFFF}**1_000; + \\ for (c [ 0_0 .. ]) |_, i| { + \\ c[i] = (a[i] | b[i]) & 0xCCAA_CCAA_CCAA_CCAA; + \\ } + \\ return c; + \\} + \\ + \\ + , + \\pub fn orMask(a: [1_000]u64, b: [1_000]u64) [1_000]u64 { + \\ var c: [1_000]u64 = [1]u64{0xFFFF_FFFF_FFFF_FFFF} ** 1_000; + \\ for (c[0_0..]) |_, i| { + \\ c[i] = (a[i] | b[i]) & 0xCCAA_CCAA_CCAA_CCAA; + \\ } + \\ return c; + \\} + \\ + ); +} -//test "zig fmt: decimal float literals with underscore separators" { -// try testTransform( -// \\pub fn main() void { -// \\ const a:f64=(10.0e-0+(10.e+0))+10_00.00_00e-2+00_00.00_10e+4; -// \\ const b:f64=010.0--0_10.+0_1_0.0_0+1e2; -// \\ std.debug.warn("a: {}, b: {} -> a+b: {}\n", .{ a, b, a + b }); -// \\} -// , -// \\pub fn main() void { -// \\ const a: f64 = (10.0e-0 + (10.e+0)) + 10_00.00_00e-2 + 00_00.00_10e+4; -// \\ const b: f64 = 010.0 - -0_10. + 0_1_0.0_0 + 1e2; -// \\ std.debug.warn("a: {}, b: {} -> a+b: {}\n", .{ a, b, a + b }); -// \\} -// \\ -// ); -//} +test "zig fmt: decimal float literals with underscore separators" { + try testTransform( + \\pub fn main() void { + \\ const a:f64=(10.0e-0+(10.e+0))+10_00.00_00e-2+00_00.00_10e+4; + \\ const b:f64=010.0--0_10.+0_1_0.0_0+1e2; + \\ std.debug.warn("a: {}, b: {} -> a+b: {}\n", .{ a, b, a + b }); + \\} + , + \\pub fn main() void { + \\ const a: f64 = (10.0e-0 + (10.e+0)) + 10_00.00_00e-2 + 00_00.00_10e+4; + \\ const b: f64 = 010.0 - -0_10. + 0_1_0.0_0 + 1e2; + \\ std.debug.warn("a: {}, b: {} -> a+b: {}\n", .{ a, b, a + b }); + \\} + \\ + ); +} -//test "zig fmt: hexadeciaml float literals with underscore separators" { -// try testTransform( -// \\pub fn main() void { -// \\ const a: f64 = (0x10.0p-0+(0x10.p+0))+0x10_00.00_00p-8+0x00_00.00_10p+16; -// \\ const b: f64 = 0x0010.0--0x00_10.+0x10.00+0x1p4; -// \\ std.debug.warn("a: {}, b: {} -> a+b: {}\n", .{ a, b, a + b }); -// \\} -// , -// \\pub fn main() void { -// \\ const a: f64 = (0x10.0p-0 + (0x10.p+0)) + 0x10_00.00_00p-8 + 0x00_00.00_10p+16; -// \\ const b: f64 = 0x0010.0 - -0x00_10. + 0x10.00 + 0x1p4; -// \\ std.debug.warn("a: {}, b: {} -> a+b: {}\n", .{ a, b, a + b }); -// \\} -// \\ -// ); -//} +test "zig fmt: hexadeciaml float literals with underscore separators" { + try testTransform( + \\pub fn main() void { + \\ const a: f64 = (0x10.0p-0+(0x10.p+0))+0x10_00.00_00p-8+0x00_00.00_10p+16; + \\ const b: f64 = 0x0010.0--0x00_10.+0x10.00+0x1p4; + \\ std.debug.warn("a: {}, b: {} -> a+b: {}\n", .{ a, b, a + b }); + \\} + , + \\pub fn main() void { + \\ const a: f64 = (0x10.0p-0 + (0x10.p+0)) + 0x10_00.00_00p-8 + 0x00_00.00_10p+16; + \\ const b: f64 = 0x0010.0 - -0x00_10. + 0x10.00 + 0x1p4; + \\ std.debug.warn("a: {}, b: {} -> a+b: {}\n", .{ a, b, a + b }); + \\} + \\ + ); +} //test "zig fmt: C var args" { // try testCanonical( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 051b3f46b1..4ffbaeff19 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -37,66 +37,56 @@ pub fn render(gpa: *mem.Allocator, tree: ast.Tree) Error![]u8 { return buffer.toOwnedSlice(); } -/// Assumes there are no tokens in between start and end. -fn renderComments(ais: *Ais, tree: ast.Tree, start: usize, end: usize, prefix: []const u8) Error!usize { +/// Assumes that start is the first byte past the previous token and +/// that end is the last byte before the next token. +fn renderCommentsAndNewlines(ais: *Ais, tree: ast.Tree, start: usize, end: usize) Error!bool { var index: usize = start; - var count: usize = 0; - while (true) { - const comment_start = index + - (mem.indexOf(u8, tree.source[index..end], "//") orelse return count); + while (mem.indexOf(u8, tree.source[index..end], "//")) |offset| { + const comment_start = index + offset; const newline = comment_start + mem.indexOfScalar(u8, tree.source[comment_start..end], '\n').?; const untrimmed_comment = tree.source[comment_start..newline]; const trimmed_comment = mem.trimRight(u8, untrimmed_comment, " \r\t"); - if (count == 0) { - count += 1; - try ais.writer().writeAll(prefix); - } else { - // If another newline occurs between prev comment and this one - // we honor it, but not any additional ones. - if (mem.indexOfScalar(u8, tree.source[index..comment_start], '\n') != null) { - try ais.insertNewline(); - } + + // Leave up to one empty line before the comment + if (index == start and mem.containsAtLeast(u8, tree.source[index..comment_start], 2, "\n")) { + try ais.insertNewline(); + try ais.insertNewline(); + } else if (mem.indexOfScalar(u8, tree.source[index..comment_start], '\n') != null) { + // Respect the newline directly before the comment. This allows an + // empty line between comments + try ais.insertNewline(); + } else if (index == start and start != 0) { + // If the comment is on the same line as the token before it, + // prefix it with a single space + try ais.writer().writeByte(' '); } + try ais.writer().print("{s}\n", .{trimmed_comment}); index = newline + 1; } + + // Leave up to one empty line if present in the source + if (index > start) index -= 1; + if (end != tree.source.len and mem.containsAtLeast(u8, tree.source[index..end], 2, "\n")) { + try ais.insertNewline(); + } + + return index != start; } fn renderRoot(ais: *Ais, tree: ast.Tree) Error!void { // Render all the line comments at the beginning of the file. const src_start: usize = if (mem.startsWith(u8, tree.source, "\xEF\xBB\xBF")) 3 else 0; const comment_end_loc: usize = tree.tokens.items(.start)[0]; - _ = try renderComments(ais, tree, src_start, comment_end_loc, ""); + _ = try renderCommentsAndNewlines(ais, tree, src_start, comment_end_loc); // Root is always index 0. const nodes_data = tree.nodes.items(.data); const root_decls = tree.extra_data[nodes_data[0].lhs..nodes_data[0].rhs]; - return renderAllMembers(ais, tree, root_decls); -} - -fn renderAllMembers(ais: *Ais, tree: ast.Tree, members: []const ast.Node.Index) Error!void { - if (members.len == 0) return; - - const first_member = members[0]; - try renderMember(ais, tree, first_member, .Newline); - - for (members[1..]) |member| { - try renderExtraNewline(ais, tree, member); - try renderMember(ais, tree, member, .Newline); - } -} - -fn renderExtraNewline(ais: *Ais, tree: ast.Tree, node: ast.Node.Index) Error!void { - return renderExtraNewlineToken(ais, tree, tree.firstToken(node)); -} - -fn renderExtraNewlineToken(ais: *Ais, tree: ast.Tree, first_token: ast.TokenIndex) Error!void { - if (first_token == 0) return; - const token_starts = tree.tokens.items(.start); - if (tree.tokenLocation(token_starts[first_token - 1], first_token).line >= 2) { - return ais.insertNewline(); + for (root_decls) |decl| { + try renderMember(ais, tree, decl, .Newline); } } @@ -499,9 +489,11 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac }, .GroupedExpression => { - try renderToken(ais, tree, main_tokens[node], .None); + ais.pushIndentNextLine(); + try renderToken(ais, tree, main_tokens[node], .None); // lparen try renderExpression(ais, tree, datas[node].lhs, .None); - return renderToken(ais, tree, datas[node].rhs, space); + ais.popIndent(); + return renderToken(ais, tree, datas[node].rhs, space); // rparen }, .ContainerDecl, @@ -552,7 +544,6 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac ais.pushIndent(); var i = lbrace + 1; while (i < rbrace) : (i += 1) { - try renderExtraNewlineToken(ais, tree, i); switch (token_tags[i]) { .DocComment => try renderToken(ais, tree, i, .Newline), .Identifier => try renderToken(ais, tree, i, .Comma), @@ -635,11 +626,9 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac try renderToken(ais, tree, rparen + 1, .None); // lbrace return renderToken(ais, tree, rparen + 2, space); // rbrace } + ais.pushIndentNextLine(); try renderToken(ais, tree, rparen + 1, .Newline); // lbrace - ais.pushIndent(); - try renderExpression(ais, tree, cases[0], .Comma); - for (cases[1..]) |case| { - try renderExtraNewline(ais, tree, case); + for (cases) |case| { try renderExpression(ais, tree, case, .Comma); } ais.popIndent(); @@ -1469,9 +1458,7 @@ fn renderSwitchCase( try renderExpression(ais, tree, switch_case.ast.values[0], .Space); } else if (trailing_comma) { // Render each value on a new line - try renderExpression(ais, tree, switch_case.ast.values[0], .Comma); - for (switch_case.ast.values[1..]) |value_expr| { - try renderExtraNewline(ais, tree, value_expr); + for (switch_case.ast.values) |value_expr| { try renderExpression(ais, tree, value_expr, .Comma); } } else { @@ -1519,12 +1506,14 @@ fn renderBlock( } if (statements.len == 0) { + ais.pushIndentNextLine(); try renderToken(ais, tree, lbrace, .None); + ais.popIndent(); return renderToken(ais, tree, lbrace + 1, space); // rbrace } + ais.pushIndentNextLine(); try renderToken(ais, tree, lbrace, .Newline); - ais.pushIndent(); for (statements) |stmt, i| { switch (node_tags[stmt]) { .GlobalVarDecl => try renderVarDecl(ais, tree, tree.globalVarDecl(stmt)), @@ -1533,9 +1522,6 @@ fn renderBlock( .AlignedVarDecl => try renderVarDecl(ais, tree, tree.alignedVarDecl(stmt)), else => try renderExpression(ais, tree, stmt, .Semicolon), } - if (i + 1 < statements.len) { - try renderExtraNewline(ais, tree, statements[i + 1]); - } } ais.popIndent(); @@ -1566,18 +1552,14 @@ fn renderStructInit( ais.pushIndent(); try renderToken(ais, tree, struct_init.ast.lbrace, .Newline); - try renderToken(ais, tree, struct_init.ast.lbrace + 1, .None); // . - try renderToken(ais, tree, struct_init.ast.lbrace + 2, .Space); // name - try renderToken(ais, tree, struct_init.ast.lbrace + 3, .Space); // = - try renderExpression(ais, tree, struct_init.ast.fields[0], .Comma); - - for (struct_init.ast.fields[1..]) |field_init| { + for (struct_init.ast.fields) |field_init| { const init_token = tree.firstToken(field_init); try renderToken(ais, tree, init_token - 3, .None); // . try renderToken(ais, tree, init_token - 2, .Space); // name try renderToken(ais, tree, init_token - 1, .Space); // = - try renderExpressionNewlined(ais, tree, field_init, .Comma); + try renderExpression(ais, tree, field_init, .Comma); } + ais.popIndent(); return renderToken(ais, tree, last_field_token + 2, space); // rbrace } else { @@ -1620,9 +1602,8 @@ fn renderArrayInit( ais.pushIndent(); try renderToken(ais, tree, array_init.ast.lbrace, .Newline); - try renderExpression(ais, tree, array_init.ast.elements[0], .Comma); - for (array_init.ast.elements[1..]) |elem| { - try renderExpressionNewlined(ais, tree, elem, .Comma); + for (array_init.ast.elements) |elem| { + try renderExpression(ais, tree, elem, .Comma); } ais.popIndent(); @@ -1693,7 +1674,7 @@ fn renderContainerDecl( const last_member_token = tree.lastToken(last_member); const rbrace = switch (token_tags[last_member_token + 1]) { .DocComment => last_member_token + 2, - .Comma => switch (token_tags[last_member_token + 2]) { + .Comma, .Semicolon => switch (token_tags[last_member_token + 2]) { .DocComment => last_member_token + 3, .RBrace => last_member_token + 2, else => unreachable, @@ -1719,7 +1700,9 @@ fn renderContainerDecl( // One member per line. ais.pushIndent(); try renderToken(ais, tree, lbrace, .Newline); // lbrace - try renderAllMembers(ais, tree, container_decl.ast.members); + for (container_decl.ast.members) |member| { + try renderMember(ais, tree, member, .Newline); + } ais.popIndent(); return renderToken(ais, tree, rbrace, space); // rbrace @@ -1781,7 +1764,6 @@ fn renderAsm( const comma = tree.firstToken(next_asm_output) - 1; try renderToken(ais, tree, comma, .Newline); // , - try renderExtraNewlineToken(ais, tree, tree.firstToken(next_asm_output)); } else if (asm_node.inputs.len == 0 and asm_node.first_clobber == null) { try renderAsmOutput(ais, tree, asm_output, .Newline); ais.popIndent(); @@ -1813,7 +1795,6 @@ fn renderAsm( const first_token = tree.firstToken(next_asm_input); try renderToken(ais, tree, first_token - 1, .Newline); // , - try renderExtraNewlineToken(ais, tree, first_token); } else if (asm_node.first_clobber == null) { try renderAsmInput(ais, tree, asm_input, .Newline); ais.popIndent(); @@ -1894,8 +1875,6 @@ fn renderCall( try renderToken(ais, tree, comma, Space.Newline); // , if (is_multiline_string) ais.pushIndent(); - - try renderExtraNewline(ais, tree, params[i + 1]); } else { try renderExpression(ais, tree, param_node, Space.Comma); } @@ -1929,22 +1908,6 @@ fn renderExpressionComma(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: } } -/// Render an expression, but first insert an extra newline if the previous token is 2 or -/// more lines away. -fn renderExpressionNewlined( - ais: *Ais, - tree: ast.Tree, - node: ast.Node.Index, - space: Space, -) Error!void { - const token_starts = tree.tokens.items(.start); - const first_token = tree.firstToken(node); - if (tree.tokenLocation(token_starts[first_token - 1], first_token).line >= 2) { - try ais.insertNewline(); - } - return renderExpression(ais, tree, node, space); -} - fn renderTokenComma(ais: *Ais, tree: ast.Tree, token: ast.TokenIndex, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); const maybe_comma = token + 1; @@ -1996,40 +1959,39 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp switch (space) { .NoComment => {}, - .None => {}, + .None => _ = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]), .Comma => { - const count = try renderComments(ais, tree, token_start + lexeme.len, token_starts[token_index + 1], ", "); - if (count == 0 and token_tags[token_index + 1] == .Comma) { - return renderToken(ais, tree, token_index + 1, Space.Newline); - } - try ais.writer().writeAll(","); - - if (token_tags[token_index + 2] != .MultilineStringLiteralLine) { - try ais.insertNewline(); + const comment = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]); + if (token_tags[token_index + 1] == .Comma) { + return renderToken(ais, tree, token_index + 1, .Newline); + } else if (!comment) { + return ais.insertNewline(); } }, .CommaSpace => { - _ = try renderComments(ais, tree, token_start + lexeme.len, token_starts[token_index + 1], ""); + const comment = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]); if (token_tags[token_index + 1] == .Comma) { return renderToken(ais, tree, token_index + 1, .Space); - } else { + } else if (!comment) { return ais.writer().writeByte(' '); } }, .Semicolon => { - _ = try renderComments(ais, tree, token_start + lexeme.len, token_starts[token_index + 1], ""); + const comment = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]); if (token_tags[token_index + 1] == .Semicolon) { return renderToken(ais, tree, token_index + 1, .Newline); - } else { + } else if (!comment) { return ais.insertNewline(); } }, .Space => { - _ = try renderComments(ais, tree, token_start + lexeme.len, token_starts[token_index + 1], ""); - return ais.writer().writeByte(' '); + const comment = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]); + if (!comment) { + return ais.writer().writeByte(' '); + } }, .Newline => { - if (token_tags[token_index + 1] != .MultilineStringLiteralLine) { + if (!try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1])) { try ais.insertNewline(); } }, From e2289961c6b53451e5bdcdfb6eec27e27f6553ca Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Fri, 12 Feb 2021 01:14:04 +0200 Subject: [PATCH 052/173] snake_case Token.Tag --- lib/std/zig/ast.zig | 112 +-- lib/std/zig/parse.zig | 992 ++++++++++++------------ lib/std/zig/render.zig | 154 ++-- lib/std/zig/tokenizer.zig | 1546 ++++++++++++++++++------------------- 4 files changed, 1402 insertions(+), 1402 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index ad96baf3f3..6a56c0e242 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -343,7 +343,7 @@ pub const Tree = struct { .ContainerField, => { const name_token = main_tokens[n]; - if (name_token > 0 and token_tags[name_token - 1] == .Keyword_comptime) { + if (name_token > 0 and token_tags[name_token - 1] == .keyword_comptime) { end_offset += 1; } return name_token - end_offset; @@ -358,12 +358,12 @@ pub const Tree = struct { while (i > 0) { i -= 1; switch (token_tags[i]) { - .Keyword_extern, - .Keyword_export, - .Keyword_comptime, - .Keyword_pub, - .Keyword_threadlocal, - .StringLiteral, + .keyword_extern, + .keyword_export, + .keyword_comptime, + .keyword_pub, + .keyword_threadlocal, + .string_literal, => continue, else => return i + 1 - end_offset, @@ -379,7 +379,7 @@ pub const Tree = struct { => { // Look for a label. const lbrace = main_tokens[n]; - if (token_tags[lbrace - 1] == .Colon) { + if (token_tags[lbrace - 1] == .colon) { end_offset += 2; } return lbrace - end_offset; @@ -400,7 +400,7 @@ pub const Tree = struct { => { const main_token = main_tokens[n]; switch (token_tags[main_token - 1]) { - .Keyword_packed, .Keyword_extern => end_offset += 1, + .keyword_packed, .keyword_extern => end_offset += 1, else => {}, } return main_token - end_offset; @@ -413,13 +413,13 @@ pub const Tree = struct { => { const main_token = main_tokens[n]; return switch (token_tags[main_token]) { - .Asterisk, - .AsteriskAsterisk, + .asterisk, + .asterisk_asterisk, => switch (token_tags[main_token - 1]) { - .LBracket => main_token - 1, + .l_bracket => main_token - 1, else => main_token, }, - .LBracket => main_token, + .l_bracket => main_token, else => unreachable, } - end_offset; }, @@ -438,7 +438,7 @@ pub const Tree = struct { }, .AsmOutput, .AsmInput => { - assert(token_tags[main_tokens[n] - 1] == .LBracket); + assert(token_tags[main_tokens[n] - 1] == .l_bracket); return main_tokens[n] - 1 - end_offset; }, @@ -450,7 +450,7 @@ pub const Tree = struct { => { const main_token = main_tokens[n]; return switch (token_tags[main_token - 1]) { - .Keyword_inline => main_token - 1, + .keyword_inline => main_token - 1, else => main_token, } - end_offset; }, @@ -1599,11 +1599,11 @@ pub const Tree = struct { while (i > 0) { i -= 1; switch (token_tags[i]) { - .Keyword_extern, .Keyword_export => result.extern_export_token = i, - .Keyword_comptime => result.comptime_token = i, - .Keyword_pub => result.visib_token = i, - .Keyword_threadlocal => result.threadlocal_token = i, - .StringLiteral => result.lib_name = i, + .keyword_extern, .keyword_export => result.extern_export_token = i, + .keyword_comptime => result.comptime_token = i, + .keyword_pub => result.visib_token = i, + .keyword_threadlocal => result.threadlocal_token = i, + .string_literal => result.lib_name = i, else => break, } } @@ -1621,14 +1621,14 @@ pub const Tree = struct { // if (cond_expr) |x| // ^ ^ const payload_pipe = tree.lastToken(info.cond_expr) + 2; - if (token_tags[payload_pipe] == .Pipe) { + if (token_tags[payload_pipe] == .pipe) { result.payload_token = payload_pipe + 1; } if (info.else_expr != 0) { // then_expr else |x| // ^ ^ result.else_token = tree.lastToken(info.then_expr) + 1; - if (token_tags[result.else_token + 1] == .Pipe) { + if (token_tags[result.else_token + 1] == .pipe) { result.error_token = result.else_token + 2; } } @@ -1643,7 +1643,7 @@ pub const Tree = struct { }; // comptime name: type = init, // ^ - if (info.name_token > 0 and token_tags[info.name_token - 1] == .Keyword_comptime) { + if (info.name_token > 0 and token_tags[info.name_token - 1] == .keyword_comptime) { result.comptime_token = info.name_token - 1; } return result; @@ -1671,17 +1671,17 @@ pub const Tree = struct { // literals in some places here const Kind = full.PtrType.Kind; const kind: Kind = switch (token_tags[info.main_token]) { - .Asterisk, - .AsteriskAsterisk, + .asterisk, + .asterisk_asterisk, => switch (token_tags[info.main_token + 1]) { - .RBracket => .many, - .Colon => .sentinel, - .Identifier => if (token_tags[info.main_token - 1] == .LBracket) Kind.c else .one, + .r_bracket => .many, + .colon => .sentinel, + .identifier => if (token_tags[info.main_token - 1] == .l_bracket) Kind.c else .one, else => .one, }, - .LBracket => switch (token_tags[info.main_token + 1]) { - .RBracket => Kind.slice, - .Colon => .slice_sentinel, + .l_bracket => switch (token_tags[info.main_token + 1]) { + .r_bracket => Kind.slice, + .colon => .slice_sentinel, else => unreachable, }, else => unreachable, @@ -1707,10 +1707,10 @@ pub const Tree = struct { const end = tree.firstToken(info.child_type); while (i < end) : (i += 1) { switch (token_tags[i]) { - .Keyword_allowzero => result.allowzero_token = i, - .Keyword_const => result.const_token = i, - .Keyword_volatile => result.volatile_token = i, - .Keyword_align => { + .keyword_allowzero => result.allowzero_token = i, + .keyword_const => result.const_token = i, + .keyword_volatile => result.volatile_token = i, + .keyword_align => { assert(info.align_node != 0); if (info.bit_range_end != 0) { assert(info.bit_range_start != 0); @@ -1732,7 +1732,7 @@ pub const Tree = struct { .layout_token = null, }; switch (token_tags[info.main_token - 1]) { - .Keyword_extern, .Keyword_packed => result.layout_token = info.main_token - 1, + .keyword_extern, .keyword_packed => result.layout_token = info.main_token - 1, else => {}, } return result; @@ -1744,7 +1744,7 @@ pub const Tree = struct { .ast = info, .payload_token = null, }; - if (token_tags[info.arrow_token + 1] == .Pipe) { + if (token_tags[info.arrow_token + 1] == .pipe) { result.payload_token = info.arrow_token + 2; } return result; @@ -1760,7 +1760,7 @@ pub const Tree = struct { .outputs = &.{}, .first_clobber = null, }; - if (token_tags[info.asm_token + 1] == .Keyword_volatile) { + if (token_tags[info.asm_token + 1] == .keyword_volatile) { result.volatile_token = info.asm_token + 1; } const outputs_end: usize = for (info.items) |item, i| { @@ -1776,10 +1776,10 @@ pub const Tree = struct { if (info.items.len == 0) { // asm ("foo" ::: "a", "b"); const template_token = tree.lastToken(info.template); - if (token_tags[template_token + 1] == .Colon and - token_tags[template_token + 2] == .Colon and - token_tags[template_token + 3] == .Colon and - token_tags[template_token + 4] == .StringLiteral) + if (token_tags[template_token + 1] == .colon and + token_tags[template_token + 2] == .colon and + token_tags[template_token + 3] == .colon and + token_tags[template_token + 4] == .string_literal) { result.first_clobber = template_token + 4; } @@ -1787,8 +1787,8 @@ pub const Tree = struct { // asm ("foo" :: [_] "" (y) : "a", "b"); const last_input = result.inputs[result.inputs.len - 1]; const rparen = tree.lastToken(last_input); - if (token_tags[rparen + 1] == .Colon and - token_tags[rparen + 2] == .StringLiteral) + if (token_tags[rparen + 1] == .colon and + token_tags[rparen + 2] == .string_literal) { result.first_clobber = rparen + 2; } @@ -1796,9 +1796,9 @@ pub const Tree = struct { // asm ("foo" : [_] "" (x) :: "a", "b"); const last_output = result.outputs[result.outputs.len - 1]; const rparen = tree.lastToken(last_output); - if (token_tags[rparen + 1] == .Colon and - token_tags[rparen + 2] == .Colon and - token_tags[rparen + 3] == .StringLiteral) + if (token_tags[rparen + 1] == .colon and + token_tags[rparen + 2] == .colon and + token_tags[rparen + 3] == .string_literal) { result.first_clobber = rparen + 3; } @@ -1818,24 +1818,24 @@ pub const Tree = struct { .error_token = null, }; var tok_i = info.while_token - 1; - if (token_tags[tok_i] == .Keyword_inline) { + if (token_tags[tok_i] == .keyword_inline) { result.inline_token = tok_i; tok_i -= 1; } - if (token_tags[tok_i] == .Colon and - token_tags[tok_i - 1] == .Identifier) + if (token_tags[tok_i] == .colon and + token_tags[tok_i - 1] == .identifier) { result.label_token = tok_i - 1; } const last_cond_token = tree.lastToken(info.cond_expr); - if (token_tags[last_cond_token + 2] == .Pipe) { + if (token_tags[last_cond_token + 2] == .pipe) { result.payload_token = last_cond_token + 3; } if (info.else_expr != 0) { // then_expr else |x| // ^ ^ result.else_token = tree.lastToken(info.then_expr) + 1; - if (token_tags[result.else_token + 1] == .Pipe) { + if (token_tags[result.else_token + 1] == .pipe) { result.error_token = result.else_token + 2; } } @@ -1849,7 +1849,7 @@ pub const Tree = struct { .async_token = null, }; const maybe_async_token = tree.firstToken(info.fn_expr) - 1; - if (token_tags[maybe_async_token] == .Keyword_async) { + if (token_tags[maybe_async_token] == .keyword_async) { result.async_token = maybe_async_token; } return result; @@ -2120,7 +2120,7 @@ pub const Error = union(enum) { pub const ExpectedVarDecl = SingleTokenError("Expected variable declaration, found '{s}'"); pub const ExpectedFn = SingleTokenError("Expected function, found '{s}'"); pub const ExpectedReturnType = SingleTokenError("Expected return type expression, found '{s}'"); - pub const ExpectedAggregateKw = SingleTokenError("Expected '" ++ Token.Tag.Keyword_struct.symbol() ++ "', '" ++ Token.Tag.Keyword_union.symbol() ++ "', '" ++ Token.Tag.Keyword_enum.symbol() ++ "', or '" ++ Token.Tag.Keyword_opaque.symbol() ++ "', found '{s}'"); + pub const ExpectedAggregateKw = SingleTokenError("Expected '" ++ Token.Tag.keyword_struct.symbol() ++ "', '" ++ Token.Tag.keyword_union.symbol() ++ "', '" ++ Token.Tag.keyword_enum.symbol() ++ "', or '" ++ Token.Tag.keyword_opaque.symbol() ++ "', found '{s}'"); pub const ExpectedEqOrSemi = SingleTokenError("Expected '=' or ';', found '{s}'"); pub const ExpectedSemiOrLBrace = SingleTokenError("Expected ';' or '{{', found '{s}'"); pub const ExpectedSemiOrElse = SingleTokenError("Expected ';' or 'else', found '{s}'"); @@ -2129,7 +2129,7 @@ pub const Error = union(enum) { pub const ExpectedColonOrRParen = SingleTokenError("Expected ':' or ')', found '{s}'"); pub const ExpectedLabelable = SingleTokenError("Expected 'while', 'for', 'inline', 'suspend', or '{{', found '{s}'"); pub const ExpectedInlinable = SingleTokenError("Expected 'while' or 'for', found '{s}'"); - pub const ExpectedAsmOutputReturnOrType = SingleTokenError("Expected '->' or '" ++ Token.Tag.Identifier.symbol() ++ "', found '{s}'"); + pub const ExpectedAsmOutputReturnOrType = SingleTokenError("Expected '->' or '" ++ Token.Tag.identifier.symbol() ++ "', found '{s}'"); pub const ExpectedSliceOrRBracket = SingleTokenError("Expected ']' or '..', found '{s}'"); pub const ExpectedTypeExpr = SingleTokenError("Expected type expression, found '{s}'"); pub const ExpectedPrimaryTypeExpr = SingleTokenError("Expected primary type expression, found '{s}'"); @@ -2185,7 +2185,7 @@ pub const Error = union(enum) { pub fn render(self: *const ExpectedToken, tokens: []const Token.Tag, stream: anytype) !void { const found_token = tokens[self.token]; switch (found_token) { - .Invalid => { + .invalid => { return stream.print("expected '{s}', found invalid bytes", .{self.expected_id.symbol()}); }, else => { diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 21ebe43582..855f889794 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -32,7 +32,7 @@ pub fn parse(gpa: *Allocator, source: []const u8) Allocator.Error!Tree { .tag = token.tag, .start = @intCast(u32, token.loc.start), }); - if (token.tag == .Eof) break; + if (token.tag == .eof) break; } var parser: Parser = .{ @@ -68,7 +68,7 @@ pub fn parse(gpa: *Allocator, source: []const u8) Allocator.Error!Tree { const root_decls = try root_members.toSpan(&parser); // parseContainerMembers will try to skip as much invalid tokens as // it can, so we are now at EOF. - assert(parser.token_tags[parser.tok_i] == .Eof); + assert(parser.token_tags[parser.tok_i] == .eof); parser.nodes.items(.data)[0] = .{ .lhs = root_decls.start, .rhs = root_decls.end, @@ -186,14 +186,14 @@ const Parser = struct { } = .none; // Skip container doc comments. - while (p.eatToken(.ContainerDocComment)) |_| {} + while (p.eatToken(.container_doc_comment)) |_| {} var trailing_comma = false; while (true) { const doc_comment = p.eatDocComments(); switch (p.token_tags[p.tok_i]) { - .Keyword_test => { + .keyword_test => { const test_decl_node = try p.expectTestDeclRecoverable(); if (test_decl_node != 0) { if (field_state == .seen) { @@ -203,8 +203,8 @@ const Parser = struct { } trailing_comma = false; }, - .Keyword_comptime => switch (p.token_tags[p.tok_i + 1]) { - .Identifier => { + .keyword_comptime => switch (p.token_tags[p.tok_i + 1]) { + .identifier => { p.tok_i += 1; const container_field = try p.expectContainerFieldRecoverable(); if (container_field != 0) { @@ -221,12 +221,12 @@ const Parser = struct { } try list.append(container_field); switch (p.token_tags[p.tok_i]) { - .Comma => { + .comma => { p.tok_i += 1; trailing_comma = true; continue; }, - .RBrace, .Eof => { + .r_brace, .eof => { trailing_comma = false; break; }, @@ -235,12 +235,12 @@ const Parser = struct { // There is not allowed to be a decl after a field with no comma. // Report error but recover parser. try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, }); p.findNextContainerMember(); } }, - .LBrace => { + .l_brace => { const comptime_token = p.nextToken(); const block = p.parseBlock() catch |err| switch (err) { error.OutOfMemory => return error.OutOfMemory, @@ -270,7 +270,7 @@ const Parser = struct { try p.warn(.{ .ExpectedBlockOrField = .{ .token = p.tok_i } }); }, }, - .Keyword_pub => { + .keyword_pub => { p.tok_i += 1; const top_level_decl = try p.expectTopLevelDeclRecoverable(); if (top_level_decl != 0) { @@ -281,7 +281,7 @@ const Parser = struct { } trailing_comma = false; }, - .Keyword_usingnamespace => { + .keyword_usingnamespace => { const node = try p.expectUsingNamespaceRecoverable(); if (node != 0) { if (field_state == .seen) { @@ -291,14 +291,14 @@ const Parser = struct { } trailing_comma = false; }, - .Keyword_const, - .Keyword_var, - .Keyword_threadlocal, - .Keyword_export, - .Keyword_extern, - .Keyword_inline, - .Keyword_noinline, - .Keyword_fn, + .keyword_const, + .keyword_var, + .keyword_threadlocal, + .keyword_export, + .keyword_extern, + .keyword_inline, + .keyword_noinline, + .keyword_fn, => { const top_level_decl = try p.expectTopLevelDeclRecoverable(); if (top_level_decl != 0) { @@ -309,7 +309,7 @@ const Parser = struct { } trailing_comma = false; }, - .Identifier => { + .identifier => { const container_field = try p.expectContainerFieldRecoverable(); if (container_field != 0) { switch (field_state) { @@ -325,12 +325,12 @@ const Parser = struct { } try list.append(container_field); switch (p.token_tags[p.tok_i]) { - .Comma => { + .comma => { p.tok_i += 1; trailing_comma = true; continue; }, - .RBrace, .Eof => { + .r_brace, .eof => { trailing_comma = false; break; }, @@ -339,12 +339,12 @@ const Parser = struct { // There is not allowed to be a decl after a field with no comma. // Report error but recover parser. try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, }); p.findNextContainerMember(); } }, - .Eof, .RBrace => { + .eof, .r_brace => { if (doc_comment) |tok| { try p.warn(.{ .UnattachedDocComment = .{ .token = tok } }); } @@ -396,36 +396,36 @@ const Parser = struct { const tok = p.nextToken(); switch (p.token_tags[tok]) { // any of these can start a new top level declaration - .Keyword_test, - .Keyword_comptime, - .Keyword_pub, - .Keyword_export, - .Keyword_extern, - .Keyword_inline, - .Keyword_noinline, - .Keyword_usingnamespace, - .Keyword_threadlocal, - .Keyword_const, - .Keyword_var, - .Keyword_fn, - .Identifier, + .keyword_test, + .keyword_comptime, + .keyword_pub, + .keyword_export, + .keyword_extern, + .keyword_inline, + .keyword_noinline, + .keyword_usingnamespace, + .keyword_threadlocal, + .keyword_const, + .keyword_var, + .keyword_fn, + .identifier, => { if (level == 0) { p.tok_i -= 1; return; } }, - .Comma, .Semicolon => { + .comma, .semicolon => { // this decl was likely meant to end here if (level == 0) { return; } }, - .LParen, .LBracket, .LBrace => level += 1, - .RParen, .RBracket => { + .l_paren, .l_bracket, .l_brace => level += 1, + .r_paren, .r_bracket => { if (level != 0) level -= 1; }, - .RBrace => { + .r_brace => { if (level == 0) { // end of container, exit p.tok_i -= 1; @@ -433,7 +433,7 @@ const Parser = struct { } level -= 1; }, - .Eof => { + .eof => { p.tok_i -= 1; return; }, @@ -448,20 +448,20 @@ const Parser = struct { while (true) { const tok = p.nextToken(); switch (p.token_tags[tok]) { - .LBrace => level += 1, - .RBrace => { + .l_brace => level += 1, + .r_brace => { if (level == 0) { p.tok_i -= 1; return; } level -= 1; }, - .Semicolon => { + .semicolon => { if (level == 0) { return; } }, - .Eof => { + .eof => { p.tok_i -= 1; return; }, @@ -472,8 +472,8 @@ const Parser = struct { /// TestDecl <- KEYWORD_test STRINGLITERALSINGLE? Block fn expectTestDecl(p: *Parser) !Node.Index { - const test_token = p.assertToken(.Keyword_test); - const name_token = p.eatToken(.StringLiteral); + const test_token = p.assertToken(.keyword_test); + const name_token = p.eatToken(.string_literal); const block_node = try p.parseBlock(); if (block_node == 0) return p.fail(.{ .ExpectedLBrace = .{ .token = p.tok_i } }); return p.addNode(.{ @@ -505,15 +505,15 @@ const Parser = struct { var expect_fn: bool = false; var exported: bool = false; switch (p.token_tags[extern_export_inline_token]) { - .Keyword_extern => _ = p.eatToken(.StringLiteral), - .Keyword_export => exported = true, - .Keyword_inline, .Keyword_noinline => expect_fn = true, + .keyword_extern => _ = p.eatToken(.string_literal), + .keyword_export => exported = true, + .keyword_inline, .keyword_noinline => expect_fn = true, else => p.tok_i -= 1, } const fn_proto = try p.parseFnProto(); if (fn_proto != 0) { switch (p.token_tags[p.tok_i]) { - .Semicolon => { + .semicolon => { const semicolon_token = p.nextToken(); try p.parseAppendedDocComment(semicolon_token); return p.addNode(.{ @@ -525,7 +525,7 @@ const Parser = struct { }, }); }, - .LBrace => { + .l_brace => { const body_block = try p.parseBlock(); assert(body_block != 0); return p.addNode(.{ @@ -553,10 +553,10 @@ const Parser = struct { return error.ParseError; } - const thread_local_token = p.eatToken(.Keyword_threadlocal); + const thread_local_token = p.eatToken(.keyword_threadlocal); const var_decl = try p.parseVarDecl(); if (var_decl != 0) { - const semicolon_token = try p.expectToken(.Semicolon); + const semicolon_token = try p.expectToken(.semicolon); try p.parseAppendedDocComment(semicolon_token); return var_decl; } @@ -582,9 +582,9 @@ const Parser = struct { } fn expectUsingNamespace(p: *Parser) !Node.Index { - const usingnamespace_token = try p.expectToken(.Keyword_usingnamespace); + const usingnamespace_token = try p.expectToken(.keyword_usingnamespace); const expr = try p.expectExpr(); - const semicolon_token = try p.expectToken(.Semicolon); + const semicolon_token = try p.expectToken(.semicolon); try p.parseAppendedDocComment(semicolon_token); return p.addNode(.{ .tag = .UsingNamespace, @@ -608,14 +608,14 @@ const Parser = struct { /// FnProto <- KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? LinkSection? CallConv? EXCLAMATIONMARK? (Keyword_anytype / TypeExpr) fn parseFnProto(p: *Parser) !Node.Index { - const fn_token = p.eatToken(.Keyword_fn) orelse return null_node; - _ = p.eatToken(.Identifier); + const fn_token = p.eatToken(.keyword_fn) orelse return null_node; + _ = p.eatToken(.identifier); const params = try p.parseParamDeclList(); defer params.deinit(p.gpa); const align_expr = try p.parseByteAlign(); const section_expr = try p.parseLinkSection(); const callconv_expr = try p.parseCallconv(); - const bang_token = p.eatToken(.Bang); + const bang_token = p.eatToken(.bang); const return_type_expr = try p.parseTypeExpr(); if (return_type_expr == 0) { @@ -686,15 +686,15 @@ const Parser = struct { /// VarDecl <- (KEYWORD_const / KEYWORD_var) IDENTIFIER (COLON TypeExpr)? ByteAlign? LinkSection? (EQUAL Expr)? SEMICOLON fn parseVarDecl(p: *Parser) !Node.Index { - const mut_token = p.eatToken(.Keyword_const) orelse - p.eatToken(.Keyword_var) orelse + const mut_token = p.eatToken(.keyword_const) orelse + p.eatToken(.keyword_var) orelse return null_node; - _ = try p.expectToken(.Identifier); - const type_node: Node.Index = if (p.eatToken(.Colon) == null) 0 else try p.expectTypeExpr(); + _ = try p.expectToken(.identifier); + const type_node: Node.Index = if (p.eatToken(.colon) == null) 0 else try p.expectTypeExpr(); const align_node = try p.parseByteAlign(); const section_node = try p.parseLinkSection(); - const init_node: Node.Index = if (p.eatToken(.Equal) == null) 0 else try p.expectExpr(); + const init_node: Node.Index = if (p.eatToken(.equal) == null) 0 else try p.expectExpr(); if (section_node == 0) { if (align_node == 0) { return p.addNode(.{ @@ -745,13 +745,13 @@ const Parser = struct { /// ContainerField <- KEYWORD_comptime? IDENTIFIER (COLON TypeExpr ByteAlign?)? (EQUAL Expr)? fn expectContainerField(p: *Parser) !Node.Index { - const comptime_token = p.eatToken(.Keyword_comptime); - const name_token = p.assertToken(.Identifier); + const comptime_token = p.eatToken(.keyword_comptime); + const name_token = p.assertToken(.identifier); var align_expr: Node.Index = 0; var type_expr: Node.Index = 0; - if (p.eatToken(.Colon)) |_| { - if (p.eatToken(.Keyword_anytype)) |anytype_tok| { + if (p.eatToken(.colon)) |_| { + if (p.eatToken(.keyword_anytype)) |anytype_tok| { type_expr = try p.addNode(.{ .tag = .AnyType, .main_token = anytype_tok, @@ -766,7 +766,7 @@ const Parser = struct { } } - const value_expr: Node.Index = if (p.eatToken(.Equal) == null) 0 else try p.expectExpr(); + const value_expr: Node.Index = if (p.eatToken(.equal) == null) 0 else try p.expectExpr(); if (align_expr == 0) { return p.addNode(.{ @@ -823,11 +823,11 @@ const Parser = struct { /// / SwitchExpr /// / AssignExpr SEMICOLON fn parseStatement(p: *Parser) Error!Node.Index { - const comptime_token = p.eatToken(.Keyword_comptime); + const comptime_token = p.eatToken(.keyword_comptime); const var_decl = try p.parseVarDecl(); if (var_decl != 0) { - _ = try p.expectTokenRecoverable(.Semicolon); + _ = try p.expectTokenRecoverable(.semicolon); return var_decl; } @@ -843,7 +843,7 @@ const Parser = struct { } switch (p.token_tags[p.tok_i]) { - .Keyword_nosuspend => { + .keyword_nosuspend => { return p.addNode(.{ .tag = .Nosuspend, .main_token = p.nextToken(), @@ -853,9 +853,9 @@ const Parser = struct { }, }); }, - .Keyword_suspend => { + .keyword_suspend => { const token = p.nextToken(); - const block_expr: Node.Index = if (p.eatToken(.Semicolon) != null) + const block_expr: Node.Index = if (p.eatToken(.semicolon) != null) 0 else try p.expectBlockExprStatement(); @@ -868,7 +868,7 @@ const Parser = struct { }, }); }, - .Keyword_defer => return p.addNode(.{ + .keyword_defer => return p.addNode(.{ .tag = .Defer, .main_token = p.nextToken(), .data = .{ @@ -876,7 +876,7 @@ const Parser = struct { .rhs = try p.expectBlockExprStatement(), }, }), - .Keyword_errdefer => return p.addNode(.{ + .keyword_errdefer => return p.addNode(.{ .tag = .ErrDefer, .main_token = p.nextToken(), .data = .{ @@ -884,8 +884,8 @@ const Parser = struct { .rhs = try p.expectBlockExprStatement(), }, }), - .Keyword_switch => return p.expectSwitchExpr(), - .Keyword_if => return p.expectIfStatement(), + .keyword_switch => return p.expectSwitchExpr(), + .keyword_if => return p.expectIfStatement(), else => {}, } @@ -894,7 +894,7 @@ const Parser = struct { const assign_expr = try p.parseAssignExpr(); if (assign_expr != 0) { - _ = try p.expectTokenRecoverable(.Semicolon); + _ = try p.expectTokenRecoverable(.semicolon); return assign_expr; } @@ -918,7 +918,7 @@ const Parser = struct { error.OutOfMemory => return error.OutOfMemory, error.ParseError => { p.findNextStmt(); // Try to skip to the next statement. - if (p.token_tags[p.tok_i] == .RBrace) return null_node; + if (p.token_tags[p.tok_i] == .r_brace) return null_node; continue; }, }; @@ -929,10 +929,10 @@ const Parser = struct { /// <- IfPrefix BlockExpr ( KEYWORD_else Payload? Statement )? /// / IfPrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement ) fn expectIfStatement(p: *Parser) !Node.Index { - const if_token = p.assertToken(.Keyword_if); - _ = try p.expectToken(.LParen); + const if_token = p.assertToken(.keyword_if); + _ = try p.expectToken(.l_paren); const condition = try p.expectExpr(); - _ = try p.expectToken(.RParen); + _ = try p.expectToken(.r_paren); const then_payload = try p.parsePtrPayload(); // TODO propose to change the syntax so that semicolons are always required @@ -945,7 +945,7 @@ const Parser = struct { if (assign_expr == 0) { return p.fail(.{ .ExpectedBlockOrAssignment = .{ .token = p.tok_i } }); } - if (p.eatToken(.Semicolon)) |_| { + if (p.eatToken(.semicolon)) |_| { return p.addNode(.{ .tag = .IfSimple, .main_token = if_token, @@ -958,7 +958,7 @@ const Parser = struct { else_required = true; break :blk assign_expr; }; - const else_token = p.eatToken(.Keyword_else) orelse { + const else_token = p.eatToken(.keyword_else) orelse { if (else_required) { return p.fail(.{ .ExpectedSemiOrElse = .{ .token = p.tok_i } }); } @@ -1004,7 +1004,7 @@ const Parser = struct { /// LoopStatement <- KEYWORD_inline? (ForStatement / WhileStatement) fn parseLoopStatement(p: *Parser) !Node.Index { - const inline_token = p.eatToken(.Keyword_inline); + const inline_token = p.eatToken(.keyword_inline); const for_statement = try p.parseForStatement(); if (for_statement != 0) return for_statement; @@ -1023,10 +1023,10 @@ const Parser = struct { /// <- ForPrefix BlockExpr ( KEYWORD_else Statement )? /// / ForPrefix AssignExpr ( SEMICOLON / KEYWORD_else Statement ) fn parseForStatement(p: *Parser) !Node.Index { - const for_token = p.eatToken(.Keyword_for) orelse return null_node; - _ = try p.expectToken(.LParen); + const for_token = p.eatToken(.keyword_for) orelse return null_node; + _ = try p.expectToken(.l_paren); const array_expr = try p.expectExpr(); - _ = try p.expectToken(.RParen); + _ = try p.expectToken(.r_paren); _ = try p.parsePtrIndexPayload(); // TODO propose to change the syntax so that semicolons are always required @@ -1039,7 +1039,7 @@ const Parser = struct { if (assign_expr == 0) { return p.fail(.{ .ExpectedBlockOrAssignment = .{ .token = p.tok_i } }); } - if (p.eatToken(.Semicolon)) |_| { + if (p.eatToken(.semicolon)) |_| { return p.addNode(.{ .tag = .ForSimple, .main_token = for_token, @@ -1052,7 +1052,7 @@ const Parser = struct { else_required = true; break :blk assign_expr; }; - const else_token = p.eatToken(.Keyword_else) orelse { + const else_token = p.eatToken(.keyword_else) orelse { if (else_required) { return p.fail(.{ .ExpectedSemiOrElse = .{ .token = p.tok_i } }); } @@ -1083,10 +1083,10 @@ const Parser = struct { /// <- WhilePrefix BlockExpr ( KEYWORD_else Payload? Statement )? /// / WhilePrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement ) fn parseWhileStatement(p: *Parser) !Node.Index { - const while_token = p.eatToken(.Keyword_while) orelse return null_node; - _ = try p.expectToken(.LParen); + const while_token = p.eatToken(.keyword_while) orelse return null_node; + _ = try p.expectToken(.l_paren); const condition = try p.expectExpr(); - _ = try p.expectToken(.RParen); + _ = try p.expectToken(.r_paren); const then_payload = try p.parsePtrPayload(); const cont_expr = try p.parseWhileContinueExpr(); @@ -1100,7 +1100,7 @@ const Parser = struct { if (assign_expr == 0) { return p.fail(.{ .ExpectedBlockOrAssignment = .{ .token = p.tok_i } }); } - if (p.eatToken(.Semicolon)) |_| { + if (p.eatToken(.semicolon)) |_| { if (cont_expr == 0) { return p.addNode(.{ .tag = .WhileSimple, @@ -1127,7 +1127,7 @@ const Parser = struct { else_required = true; break :blk assign_expr; }; - const else_token = p.eatToken(.Keyword_else) orelse { + const else_token = p.eatToken(.keyword_else) orelse { if (else_required) { return p.fail(.{ .ExpectedSemiOrElse = .{ .token = p.tok_i } }); } @@ -1180,7 +1180,7 @@ const Parser = struct { } const assign_expr = try p.parseAssignExpr(); if (assign_expr != 0) { - _ = try p.expectTokenRecoverable(.Semicolon); + _ = try p.expectTokenRecoverable(.semicolon); return assign_expr; } return null_node; @@ -1197,9 +1197,9 @@ const Parser = struct { /// BlockExpr <- BlockLabel? Block fn parseBlockExpr(p: *Parser) Error!Node.Index { switch (p.token_tags[p.tok_i]) { - .Identifier => { - if (p.token_tags[p.tok_i + 1] == .Colon and - p.token_tags[p.tok_i + 2] == .LBrace) + .identifier => { + if (p.token_tags[p.tok_i + 1] == .colon and + p.token_tags[p.tok_i + 2] == .l_brace) { p.tok_i += 2; return p.parseBlock(); @@ -1207,7 +1207,7 @@ const Parser = struct { return null_node; } }, - .LBrace => return p.parseBlock(), + .l_brace => return p.parseBlock(), else => return null_node, } } @@ -1233,20 +1233,20 @@ const Parser = struct { if (expr == 0) return null_node; const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { - .AsteriskEqual => .AssignMul, - .SlashEqual => .AssignDiv, - .PercentEqual => .AssignMod, - .PlusEqual => .AssignAdd, - .MinusEqual => .AssignSub, - .AngleBracketAngleBracketLeftEqual => .AssignBitShiftLeft, - .AngleBracketAngleBracketRightEqual => .AssignBitShiftRight, - .AmpersandEqual => .AssignBitAnd, - .CaretEqual => .AssignBitXor, - .PipeEqual => .AssignBitOr, - .AsteriskPercentEqual => .AssignMulWrap, - .PlusPercentEqual => .AssignAddWrap, - .MinusPercentEqual => .AssignSubWrap, - .Equal => .Assign, + .asterisk_equal => .AssignMul, + .slash_equal => .AssignDiv, + .percent_equal => .AssignMod, + .plus_equal => .AssignAdd, + .minus_equal => .AssignSub, + .angle_bracket_angle_bracket_left_equal => .AssignBitShiftLeft, + .angle_bracket_angle_bracket_right_equal => .AssignBitShiftRight, + .ampersand_equal => .AssignBitAnd, + .caret_equal => .AssignBitXor, + .pipe_equal => .AssignBitOr, + .asterisk_percent_equal => .AssignMulWrap, + .plus_percent_equal => .AssignAddWrap, + .minus_percent_equal => .AssignSubWrap, + .equal => .Assign, else => return expr, }; return p.addNode(.{ @@ -1288,7 +1288,7 @@ const Parser = struct { while (true) { switch (p.token_tags[p.tok_i]) { - .Keyword_or => { + .keyword_or => { const or_token = p.nextToken(); const rhs = try p.parseBoolAndExpr(); if (rhs == 0) { @@ -1315,7 +1315,7 @@ const Parser = struct { while (true) { switch (p.token_tags[p.tok_i]) { - .Keyword_and => { + .keyword_and => { const and_token = p.nextToken(); const rhs = try p.parseCompareExpr(); if (rhs == 0) { @@ -1348,12 +1348,12 @@ const Parser = struct { if (expr == 0) return null_node; const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { - .EqualEqual => .EqualEqual, - .BangEqual => .BangEqual, - .AngleBracketLeft => .LessThan, - .AngleBracketRight => .GreaterThan, - .AngleBracketLeftEqual => .LessOrEqual, - .AngleBracketRightEqual => .GreaterOrEqual, + .equal_equal => .EqualEqual, + .bang_equal => .BangEqual, + .angle_bracket_left => .LessThan, + .angle_bracket_right => .GreaterThan, + .angle_bracket_left_equal => .LessOrEqual, + .angle_bracket_right_equal => .GreaterOrEqual, else => return expr, }; return p.addNode(.{ @@ -1379,11 +1379,11 @@ const Parser = struct { while (true) { const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { - .Ampersand => .BitAnd, - .Caret => .BitXor, - .Pipe => .BitOr, - .Keyword_orelse => .OrElse, - .Keyword_catch => { + .ampersand => .BitAnd, + .caret => .BitXor, + .pipe => .BitOr, + .keyword_orelse => .OrElse, + .keyword_catch => { const catch_token = p.nextToken(); _ = try p.parsePayload(); const rhs = try p.parseBitShiftExpr(); @@ -1432,8 +1432,8 @@ const Parser = struct { while (true) { const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { - .AngleBracketAngleBracketLeft => .BitShiftLeft, - .AngleBracketAngleBracketRight => .BitShiftRight, + .angle_bracket_angle_bracket_left => .BitShiftLeft, + .angle_bracket_angle_bracket_right => .BitShiftRight, else => return res, }; res = try p.addNode(.{ @@ -1469,11 +1469,11 @@ const Parser = struct { while (true) { const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { - .Plus => .Add, - .Minus => .Sub, - .PlusPlus => .ArrayCat, - .PlusPercent => .AddWrap, - .MinusPercent => .SubWrap, + .plus => .Add, + .minus => .Sub, + .plus_plus => .ArrayCat, + .plus_percent => .AddWrap, + .minus_percent => .SubWrap, else => return res, }; res = try p.addNode(.{ @@ -1509,12 +1509,12 @@ const Parser = struct { while (true) { const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { - .PipePipe => .MergeErrorSets, - .Asterisk => .Mul, - .Slash => .Div, - .Percent => .Mod, - .AsteriskAsterisk => .ArrayMult, - .AsteriskPercent => .MulWrap, + .pipe_pipe => .MergeErrorSets, + .asterisk => .Mul, + .slash => .Div, + .percent => .Mod, + .asterisk_asterisk => .ArrayMult, + .asterisk_percent => .MulWrap, else => return res, }; res = try p.addNode(.{ @@ -1547,13 +1547,13 @@ const Parser = struct { /// / KEYWORD_await fn parsePrefixExpr(p: *Parser) Error!Node.Index { const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { - .Bang => .BoolNot, - .Minus => .Negation, - .Tilde => .BitNot, - .MinusPercent => .NegationWrap, - .Ampersand => .AddressOf, - .Keyword_try => .Try, - .Keyword_await => .Await, + .bang => .BoolNot, + .minus => .Negation, + .tilde => .BitNot, + .minus_percent => .NegationWrap, + .ampersand => .AddressOf, + .keyword_try => .Try, + .keyword_await => .Await, else => return p.parsePrimaryExpr(), }; return p.addNode(.{ @@ -1587,7 +1587,7 @@ const Parser = struct { /// ArrayTypeStart <- LBRACKET Expr? (COLON Expr)? RBRACKET fn parseTypeExpr(p: *Parser) Error!Node.Index { switch (p.token_tags[p.tok_i]) { - .QuestionMark => return p.addNode(.{ + .question_mark => return p.addNode(.{ .tag = .OptionalType, .main_token = p.nextToken(), .data = .{ @@ -1595,8 +1595,8 @@ const Parser = struct { .rhs = undefined, }, }), - .Keyword_anyframe => switch (p.token_tags[p.tok_i + 1]) { - .Arrow => return p.addNode(.{ + .keyword_anyframe => switch (p.token_tags[p.tok_i + 1]) { + .arrow => return p.addNode(.{ .tag = .AnyFrameType, .main_token = p.nextToken(), .data = .{ @@ -1606,7 +1606,7 @@ const Parser = struct { }), else => return p.parseErrorUnionExpr(), }, - .Asterisk => { + .asterisk => { const asterisk = p.nextToken(); const mods = try p.parsePtrModifiers(); const elem_type = try p.expectTypeExpr(); @@ -1635,7 +1635,7 @@ const Parser = struct { }); } }, - .AsteriskAsterisk => { + .asterisk_asterisk => { const asterisk = p.nextToken(); const mods = try p.parsePtrModifiers(); const elem_type = try p.expectTypeExpr(); @@ -1674,13 +1674,13 @@ const Parser = struct { }, }); }, - .LBracket => switch (p.token_tags[p.tok_i + 1]) { - .Asterisk => { + .l_bracket => switch (p.token_tags[p.tok_i + 1]) { + .asterisk => { const lbracket = p.nextToken(); const asterisk = p.nextToken(); var sentinel: Node.Index = 0; prefix: { - if (p.eatToken(.Identifier)) |ident| { + if (p.eatToken(.identifier)) |ident| { const token_slice = p.source[p.token_starts[ident]..][0..2]; if (!std.mem.eql(u8, token_slice, "c]")) { p.tok_i -= 1; @@ -1688,11 +1688,11 @@ const Parser = struct { break :prefix; } } - if (p.eatToken(.Colon)) |_| { + if (p.eatToken(.colon)) |_| { sentinel = try p.expectExpr(); } } - _ = try p.expectToken(.RBracket); + _ = try p.expectToken(.r_bracket); const mods = try p.parsePtrModifiers(); const elem_type = try p.expectTypeExpr(); if (mods.bit_range_start == 0) { @@ -1746,11 +1746,11 @@ const Parser = struct { else => { const lbracket = p.nextToken(); const len_expr = try p.parseExpr(); - const sentinel: Node.Index = if (p.eatToken(.Colon)) |_| + const sentinel: Node.Index = if (p.eatToken(.colon)) |_| try p.expectExpr() else 0; - _ = try p.expectToken(.RBracket); + _ = try p.expectToken(.r_bracket); const mods = try p.parsePtrModifiers(); const elem_type = try p.expectTypeExpr(); if (mods.bit_range_start != 0) { @@ -1849,9 +1849,9 @@ const Parser = struct { /// / CurlySuffixExpr fn parsePrimaryExpr(p: *Parser) !Node.Index { switch (p.token_tags[p.tok_i]) { - .Keyword_asm => return p.expectAsmExpr(), - .Keyword_if => return p.parseIfExpr(), - .Keyword_break => { + .keyword_asm => return p.expectAsmExpr(), + .keyword_if => return p.parseIfExpr(), + .keyword_break => { p.tok_i += 1; return p.addNode(.{ .tag = .Break, @@ -1862,7 +1862,7 @@ const Parser = struct { }, }); }, - .Keyword_continue => { + .keyword_continue => { p.tok_i += 1; return p.addNode(.{ .tag = .Continue, @@ -1873,7 +1873,7 @@ const Parser = struct { }, }); }, - .Keyword_comptime => { + .keyword_comptime => { p.tok_i += 1; return p.addNode(.{ .tag = .Comptime, @@ -1884,7 +1884,7 @@ const Parser = struct { }, }); }, - .Keyword_nosuspend => { + .keyword_nosuspend => { p.tok_i += 1; return p.addNode(.{ .tag = .Nosuspend, @@ -1895,7 +1895,7 @@ const Parser = struct { }, }); }, - .Keyword_resume => { + .keyword_resume => { p.tok_i += 1; return p.addNode(.{ .tag = .Resume, @@ -1906,7 +1906,7 @@ const Parser = struct { }, }); }, - .Keyword_return => { + .keyword_return => { p.tok_i += 1; return p.addNode(.{ .tag = .Return, @@ -1917,28 +1917,28 @@ const Parser = struct { }, }); }, - .Identifier => { - if (p.token_tags[p.tok_i + 1] == .Colon) { + .identifier => { + if (p.token_tags[p.tok_i + 1] == .colon) { switch (p.token_tags[p.tok_i + 2]) { - .Keyword_inline => { + .keyword_inline => { p.tok_i += 3; switch (p.token_tags[p.tok_i]) { - .Keyword_for => return p.parseForExpr(), - .Keyword_while => return p.parseWhileExpr(), + .keyword_for => return p.parseForExpr(), + .keyword_while => return p.parseWhileExpr(), else => return p.fail(.{ .ExpectedInlinable = .{ .token = p.tok_i }, }), } }, - .Keyword_for => { + .keyword_for => { p.tok_i += 2; return p.parseForExpr(); }, - .Keyword_while => { + .keyword_while => { p.tok_i += 2; return p.parseWhileExpr(); }, - .LBrace => { + .l_brace => { p.tok_i += 2; return p.parseBlock(); }, @@ -1948,19 +1948,19 @@ const Parser = struct { return p.parseCurlySuffixExpr(); } }, - .Keyword_inline => { + .keyword_inline => { p.tok_i += 2; switch (p.token_tags[p.tok_i]) { - .Keyword_for => return p.parseForExpr(), - .Keyword_while => return p.parseWhileExpr(), + .keyword_for => return p.parseForExpr(), + .keyword_while => return p.parseWhileExpr(), else => return p.fail(.{ .ExpectedInlinable = .{ .token = p.tok_i }, }), } }, - .Keyword_for => return p.parseForExpr(), - .Keyword_while => return p.parseWhileExpr(), - .LBrace => return p.parseBlock(), + .keyword_for => return p.parseForExpr(), + .keyword_while => return p.parseWhileExpr(), + .l_brace => return p.parseBlock(), else => return p.parseCurlySuffixExpr(), } } @@ -1972,9 +1972,9 @@ const Parser = struct { /// Block <- LBRACE Statement* RBRACE fn parseBlock(p: *Parser) !Node.Index { - const lbrace = p.eatToken(.LBrace) orelse return null_node; + const lbrace = p.eatToken(.l_brace) orelse return null_node; - if (p.eatToken(.RBrace)) |_| { + if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ .tag = .BlockTwo, .main_token = lbrace, @@ -1986,8 +1986,8 @@ const Parser = struct { } const stmt_one = try p.expectStatementRecoverable(); - if (p.eatToken(.RBrace)) |_| { - const semicolon = p.token_tags[p.tok_i - 2] == .Semicolon; + if (p.eatToken(.r_brace)) |_| { + const semicolon = p.token_tags[p.tok_i - 2] == .semicolon; return p.addNode(.{ .tag = if (semicolon) .BlockTwoSemicolon else .BlockTwo, .main_token = lbrace, @@ -1998,8 +1998,8 @@ const Parser = struct { }); } const stmt_two = try p.expectStatementRecoverable(); - if (p.eatToken(.RBrace)) |_| { - const semicolon = p.token_tags[p.tok_i - 2] == .Semicolon; + if (p.eatToken(.r_brace)) |_| { + const semicolon = p.token_tags[p.tok_i - 2] == .semicolon; return p.addNode(.{ .tag = if (semicolon) .BlockTwoSemicolon else .BlockTwo, .main_token = lbrace, @@ -2013,16 +2013,16 @@ const Parser = struct { var statements = std.ArrayList(Node.Index).init(p.gpa); defer statements.deinit(); - try statements.appendSlice(&[_]Node.Index{ stmt_one, stmt_two }); + try statements.appendSlice(&.{ stmt_one, stmt_two }); while (true) { const statement = try p.expectStatementRecoverable(); if (statement == 0) break; try statements.append(statement); - if (p.token_tags[p.tok_i] == .RBrace) break; + if (p.token_tags[p.tok_i] == .r_brace) break; } - _ = try p.expectToken(.RBrace); - const semicolon = p.token_tags[p.tok_i - 2] == .Semicolon; + _ = try p.expectToken(.r_brace); + const semicolon = p.token_tags[p.tok_i - 2] == .semicolon; const statements_span = try p.listToSpan(statements.items); return p.addNode(.{ .tag = if (semicolon) .BlockSemicolon else .Block, @@ -2037,14 +2037,14 @@ const Parser = struct { /// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload /// ForExpr <- ForPrefix Expr (KEYWORD_else Expr)? fn parseForExpr(p: *Parser) !Node.Index { - const for_token = p.eatToken(.Keyword_for) orelse return null_node; - _ = try p.expectToken(.LParen); + const for_token = p.eatToken(.keyword_for) orelse return null_node; + _ = try p.expectToken(.l_paren); const array_expr = try p.expectExpr(); - _ = try p.expectToken(.RParen); + _ = try p.expectToken(.r_paren); _ = try p.parsePtrIndexPayload(); const then_expr = try p.expectExpr(); - const else_token = p.eatToken(.Keyword_else) orelse { + const else_token = p.eatToken(.keyword_else) orelse { return p.addNode(.{ .tag = .ForSimple, .main_token = for_token, @@ -2071,15 +2071,15 @@ const Parser = struct { /// WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr? /// WhileExpr <- WhilePrefix Expr (KEYWORD_else Payload? Expr)? fn parseWhileExpr(p: *Parser) !Node.Index { - const while_token = p.eatToken(.Keyword_while) orelse return null_node; - _ = try p.expectToken(.LParen); + const while_token = p.eatToken(.keyword_while) orelse return null_node; + _ = try p.expectToken(.l_paren); const condition = try p.expectExpr(); - _ = try p.expectToken(.RParen); + _ = try p.expectToken(.r_paren); const then_payload = try p.parsePtrPayload(); const cont_expr = try p.parseWhileContinueExpr(); const then_expr = try p.expectExpr(); - const else_token = p.eatToken(.Keyword_else) orelse { + const else_token = p.eatToken(.keyword_else) orelse { if (cont_expr == 0) { return p.addNode(.{ .tag = .WhileSimple, @@ -2127,12 +2127,12 @@ const Parser = struct { fn parseCurlySuffixExpr(p: *Parser) !Node.Index { const lhs = try p.parseTypeExpr(); if (lhs == 0) return null_node; - const lbrace = p.eatToken(.LBrace) orelse return lhs; + const lbrace = p.eatToken(.l_brace) orelse return lhs; // If there are 0 or 1 items, we can use ArrayInitOne/StructInitOne; // otherwise we use the full ArrayInit/StructInit. - if (p.eatToken(.RBrace)) |_| { + if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ .tag = .StructInitOne, .main_token = lbrace, @@ -2144,8 +2144,8 @@ const Parser = struct { } const field_init = try p.parseFieldInit(); if (field_init != 0) { - const comma_one = p.eatToken(.Comma); - if (p.eatToken(.RBrace)) |_| { + const comma_one = p.eatToken(.comma); + if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ .tag = if (comma_one != null) .StructInitOneComma else .StructInitOne, .main_token = lbrace, @@ -2166,17 +2166,17 @@ const Parser = struct { try init_list.append(next); switch (p.token_tags[p.nextToken()]) { - .Comma => { - if (p.eatToken(.RBrace)) |_| break; + .comma => { + if (p.eatToken(.r_brace)) |_| break; continue; }, - .RBrace => break, - .Colon, .RParen, .RBracket => { + .r_brace => break, + .colon, .r_paren, .r_bracket => { p.tok_i -= 1; return p.fail(.{ .ExpectedToken = .{ .token = p.tok_i, - .expected_id = .RBrace, + .expected_id = .r_brace, }, }); }, @@ -2185,14 +2185,14 @@ const Parser = struct { // give an error but continue parsing this list. p.tok_i -= 1; try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, }); }, } } const span = try p.listToSpan(init_list.items); return p.addNode(.{ - .tag = if (p.token_tags[p.tok_i - 2] == .Comma) .StructInitComma else .StructInit, + .tag = if (p.token_tags[p.tok_i - 2] == .comma) .StructInitComma else .StructInit, .main_token = lbrace, .data = .{ .lhs = lhs, @@ -2205,8 +2205,8 @@ const Parser = struct { } const elem_init = try p.expectExpr(); - const comma_one = p.eatToken(.Comma); - if (p.eatToken(.RBrace)) |_| { + const comma_one = p.eatToken(.comma); + if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ .tag = if (comma_one != null) .ArrayInitOneComma else .ArrayInitOne, .main_token = lbrace, @@ -2218,7 +2218,7 @@ const Parser = struct { } if (comma_one == null) { try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, }); } @@ -2231,12 +2231,12 @@ const Parser = struct { var next = try p.parseExpr(); while (next != 0) : (next = try p.parseExpr()) { try init_list.append(next); - if (p.eatToken(.Comma) == null) { + if (p.eatToken(.comma) == null) { trailing_comma = false; break; } } - _ = try p.expectToken(.RBrace); + _ = try p.expectToken(.r_brace); const span = try p.listToSpan(init_list.items); return p.addNode(.{ .tag = if (trailing_comma) .ArrayInitComma else .ArrayInit, @@ -2255,7 +2255,7 @@ const Parser = struct { fn parseErrorUnionExpr(p: *Parser) !Node.Index { const suffix_expr = try p.parseSuffixExpr(); if (suffix_expr == 0) return null_node; - const bang = p.eatToken(.Bang) orelse return suffix_expr; + const bang = p.eatToken(.bang) orelse return suffix_expr; return p.addNode(.{ .tag = .ErrorUnion, .main_token = bang, @@ -2272,7 +2272,7 @@ const Parser = struct { /// FnCallArguments <- LPAREN ExprList RPAREN /// ExprList <- (Expr COMMA)* Expr? fn parseSuffixExpr(p: *Parser) !Node.Index { - if (p.eatToken(.Keyword_async)) |async_token| { + if (p.eatToken(.keyword_async)) |async_token| { var res = try p.expectPrimaryTypeExpr(); while (true) { @@ -2280,11 +2280,11 @@ const Parser = struct { if (node == 0) break; res = node; } - const lparen = (try p.expectTokenRecoverable(.LParen)) orelse { + const lparen = (try p.expectTokenRecoverable(.l_paren)) orelse { try p.warn(.{ .ExpectedParamList = .{ .token = p.tok_i } }); return res; }; - if (p.eatToken(.RParen)) |_| { + if (p.eatToken(.r_paren)) |_| { return p.addNode(.{ .tag = .AsyncCallOne, .main_token = lparen, @@ -2295,8 +2295,8 @@ const Parser = struct { }); } const param_one = try p.expectExpr(); - const comma_one = p.eatToken(.Comma); - if (p.eatToken(.RParen)) |_| { + const comma_one = p.eatToken(.comma); + if (p.eatToken(.r_paren)) |_| { return p.addNode(.{ .tag = if (comma_one == null) .AsyncCallOne else .AsyncCallOneComma, .main_token = lparen, @@ -2308,7 +2308,7 @@ const Parser = struct { } if (comma_one == null) { try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, }); } @@ -2321,8 +2321,8 @@ const Parser = struct { const next = try p.expectExpr(); try param_list.append(next); switch (p.token_tags[p.nextToken()]) { - .Comma => { - if (p.eatToken(.RParen)) |_| { + .comma => { + if (p.eatToken(.r_paren)) |_| { const span = try p.listToSpan(param_list.items); return p.addNode(.{ .tag = .AsyncCallComma, @@ -2339,7 +2339,7 @@ const Parser = struct { continue; } }, - .RParen => { + .r_paren => { const span = try p.listToSpan(param_list.items); return p.addNode(.{ .tag = .AsyncCall, @@ -2353,12 +2353,12 @@ const Parser = struct { }, }); }, - .Colon, .RBrace, .RBracket => { + .colon, .r_brace, .r_bracket => { p.tok_i -= 1; return p.fail(.{ .ExpectedToken = .{ .token = p.tok_i, - .expected_id = .RParen, + .expected_id = .r_paren, }, }); }, @@ -2367,7 +2367,7 @@ const Parser = struct { try p.warn(.{ .ExpectedToken = .{ .token = p.tok_i, - .expected_id = .Comma, + .expected_id = .comma, }, }); }, @@ -2384,8 +2384,8 @@ const Parser = struct { continue; } res = res: { - const lparen = p.eatToken(.LParen) orelse return res; - if (p.eatToken(.RParen)) |_| { + const lparen = p.eatToken(.l_paren) orelse return res; + if (p.eatToken(.r_paren)) |_| { break :res try p.addNode(.{ .tag = .CallOne, .main_token = lparen, @@ -2396,8 +2396,8 @@ const Parser = struct { }); } const param_one = try p.expectExpr(); - const comma_one = p.eatToken(.Comma); - if (p.eatToken(.RParen)) |_| { + const comma_one = p.eatToken(.comma); + if (p.eatToken(.r_paren)) |_| { break :res try p.addNode(.{ .tag = if (comma_one == null) .CallOne else .CallOneComma, .main_token = lparen, @@ -2409,7 +2409,7 @@ const Parser = struct { } if (comma_one == null) { try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, }); } @@ -2422,8 +2422,8 @@ const Parser = struct { const next = try p.expectExpr(); try param_list.append(next); switch (p.token_tags[p.nextToken()]) { - .Comma => { - if (p.eatToken(.RParen)) |_| { + .comma => { + if (p.eatToken(.r_paren)) |_| { const span = try p.listToSpan(param_list.items); break :res try p.addNode(.{ .tag = .CallComma, @@ -2440,7 +2440,7 @@ const Parser = struct { continue; } }, - .RParen => { + .r_paren => { const span = try p.listToSpan(param_list.items); break :res try p.addNode(.{ .tag = .Call, @@ -2454,12 +2454,12 @@ const Parser = struct { }, }); }, - .Colon, .RBrace, .RBracket => { + .colon, .r_brace, .r_bracket => { p.tok_i -= 1; return p.fail(.{ .ExpectedToken = .{ .token = p.tok_i, - .expected_id = .RParen, + .expected_id = .r_paren, }, }); }, @@ -2468,7 +2468,7 @@ const Parser = struct { try p.warn(.{ .ExpectedToken = .{ .token = p.tok_i, - .expected_id = .Comma, + .expected_id = .comma, }, }); }, @@ -2517,7 +2517,7 @@ const Parser = struct { /// LoopTypeExpr <- KEYWORD_inline? (ForTypeExpr / WhileTypeExpr) fn parsePrimaryTypeExpr(p: *Parser) !Node.Index { switch (p.token_tags[p.tok_i]) { - .CharLiteral => return p.addNode(.{ + .char_literal => return p.addNode(.{ .tag = .CharLiteral, .main_token = p.nextToken(), .data = .{ @@ -2525,7 +2525,7 @@ const Parser = struct { .rhs = undefined, }, }), - .IntegerLiteral => return p.addNode(.{ + .integer_literal => return p.addNode(.{ .tag = .IntegerLiteral, .main_token = p.nextToken(), .data = .{ @@ -2533,7 +2533,7 @@ const Parser = struct { .rhs = undefined, }, }), - .FloatLiteral => return p.addNode(.{ + .float_literal => return p.addNode(.{ .tag = .FloatLiteral, .main_token = p.nextToken(), .data = .{ @@ -2541,7 +2541,7 @@ const Parser = struct { .rhs = undefined, }, }), - .Keyword_false => return p.addNode(.{ + .keyword_false => return p.addNode(.{ .tag = .FalseLiteral, .main_token = p.nextToken(), .data = .{ @@ -2549,7 +2549,7 @@ const Parser = struct { .rhs = undefined, }, }), - .Keyword_true => return p.addNode(.{ + .keyword_true => return p.addNode(.{ .tag = .TrueLiteral, .main_token = p.nextToken(), .data = .{ @@ -2557,7 +2557,7 @@ const Parser = struct { .rhs = undefined, }, }), - .Keyword_null => return p.addNode(.{ + .keyword_null => return p.addNode(.{ .tag = .NullLiteral, .main_token = p.nextToken(), .data = .{ @@ -2565,7 +2565,7 @@ const Parser = struct { .rhs = undefined, }, }), - .Keyword_undefined => return p.addNode(.{ + .keyword_undefined => return p.addNode(.{ .tag = .UndefinedLiteral, .main_token = p.nextToken(), .data = .{ @@ -2573,7 +2573,7 @@ const Parser = struct { .rhs = undefined, }, }), - .Keyword_unreachable => return p.addNode(.{ + .keyword_unreachable => return p.addNode(.{ .tag = .UnreachableLiteral, .main_token = p.nextToken(), .data = .{ @@ -2581,7 +2581,7 @@ const Parser = struct { .rhs = undefined, }, }), - .Keyword_anyframe => return p.addNode(.{ + .keyword_anyframe => return p.addNode(.{ .tag = .AnyFrameLiteral, .main_token = p.nextToken(), .data = .{ @@ -2589,7 +2589,7 @@ const Parser = struct { .rhs = undefined, }, }), - .StringLiteral => { + .string_literal => { const main_token = p.nextToken(); return p.addNode(.{ .tag = .StringLiteral, @@ -2601,25 +2601,25 @@ const Parser = struct { }); }, - .Builtin => return p.parseBuiltinCall(), - .Keyword_fn => return p.parseFnProto(), - .Keyword_if => return p.parseIf(parseTypeExpr), - .Keyword_switch => return p.expectSwitchExpr(), + .builtin => return p.parseBuiltinCall(), + .keyword_fn => return p.parseFnProto(), + .keyword_if => return p.parseIf(parseTypeExpr), + .keyword_switch => return p.expectSwitchExpr(), - .Keyword_extern, - .Keyword_packed, + .keyword_extern, + .keyword_packed, => { p.tok_i += 1; return p.parseContainerDeclAuto(); }, - .Keyword_struct, - .Keyword_opaque, - .Keyword_enum, - .Keyword_union, + .keyword_struct, + .keyword_opaque, + .keyword_enum, + .keyword_union, => return p.parseContainerDeclAuto(), - .Keyword_comptime => return p.addNode(.{ + .keyword_comptime => return p.addNode(.{ .tag = .Comptime, .main_token = p.nextToken(), .data = .{ @@ -2627,9 +2627,9 @@ const Parser = struct { .rhs = undefined, }, }), - .MultilineStringLiteralLine => { + .multiline_string_literal_line => { const first_line = p.nextToken(); - while (p.token_tags[p.tok_i] == .MultilineStringLiteralLine) { + while (p.token_tags[p.tok_i] == .multiline_string_literal_line) { p.tok_i += 1; } return p.addNode(.{ @@ -2641,23 +2641,23 @@ const Parser = struct { }, }); }, - .Identifier => switch (p.token_tags[p.tok_i + 1]) { - .Colon => switch (p.token_tags[p.tok_i + 2]) { - .Keyword_inline => { + .identifier => switch (p.token_tags[p.tok_i + 1]) { + .colon => switch (p.token_tags[p.tok_i + 2]) { + .keyword_inline => { p.tok_i += 3; switch (p.token_tags[p.tok_i]) { - .Keyword_for => return p.parseForTypeExpr(), - .Keyword_while => return p.parseWhileTypeExpr(), + .keyword_for => return p.parseForTypeExpr(), + .keyword_while => return p.parseWhileTypeExpr(), else => return p.fail(.{ .ExpectedInlinable = .{ .token = p.tok_i }, }), } }, - .Keyword_for => { + .keyword_for => { p.tok_i += 2; return p.parseForTypeExpr(); }, - .Keyword_while => { + .keyword_while => { p.tok_i += 2; return p.parseWhileTypeExpr(); }, @@ -2679,8 +2679,8 @@ const Parser = struct { }, }), }, - .Period => switch (p.token_tags[p.tok_i + 1]) { - .Identifier => return p.addNode(.{ + .period => switch (p.token_tags[p.tok_i + 1]) { + .identifier => return p.addNode(.{ .tag = .EnumLiteral, .data = .{ .lhs = p.nextToken(), // dot @@ -2688,14 +2688,14 @@ const Parser = struct { }, .main_token = p.nextToken(), // identifier }), - .LBrace => { + .l_brace => { const lbrace = p.tok_i + 1; p.tok_i = lbrace + 1; // If there are 0, 1, or 2 items, we can use ArrayInitDotTwo/StructInitDotTwo; // otherwise we use the full ArrayInitDot/StructInitDot. - if (p.eatToken(.RBrace)) |_| { + if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ .tag = .StructInitDotTwo, .main_token = lbrace, @@ -2707,8 +2707,8 @@ const Parser = struct { } const field_init_one = try p.parseFieldInit(); if (field_init_one != 0) { - const comma_one = p.eatToken(.Comma); - if (p.eatToken(.RBrace)) |_| { + const comma_one = p.eatToken(.comma); + if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ .tag = if (comma_one != null) .StructInitDotTwoComma else .StructInitDotTwo, .main_token = lbrace, @@ -2720,12 +2720,12 @@ const Parser = struct { } if (comma_one == null) { try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, }); } const field_init_two = try p.expectFieldInit(); - const comma_two = p.eatToken(.Comma); - if (p.eatToken(.RBrace)) |_| { + const comma_two = p.eatToken(.comma); + if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ .tag = if (comma_two != null) .StructInitDotTwoComma else .StructInitDotTwo, .main_token = lbrace, @@ -2737,30 +2737,30 @@ const Parser = struct { } if (comma_two == null) { try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, }); } var init_list = std.ArrayList(Node.Index).init(p.gpa); defer init_list.deinit(); - try init_list.appendSlice(&[_]Node.Index{ field_init_one, field_init_two }); + try init_list.appendSlice(&.{ field_init_one, field_init_two }); while (true) { const next = try p.expectFieldInit(); assert(next != 0); try init_list.append(next); switch (p.token_tags[p.nextToken()]) { - .Comma => { - if (p.eatToken(.RBrace)) |_| break; + .comma => { + if (p.eatToken(.r_brace)) |_| break; continue; }, - .RBrace => break, - .Colon, .RParen, .RBracket => { + .r_brace => break, + .colon, .r_paren, .r_bracket => { p.tok_i -= 1; return p.fail(.{ .ExpectedToken = .{ .token = p.tok_i, - .expected_id = .RBrace, + .expected_id = .r_brace, }, }); }, @@ -2769,14 +2769,14 @@ const Parser = struct { try p.warn(.{ .ExpectedToken = .{ .token = p.tok_i, - .expected_id = .Comma, + .expected_id = .comma, }, }); }, } } const span = try p.listToSpan(init_list.items); - const trailing_comma = p.token_tags[p.tok_i - 2] == .Comma; + const trailing_comma = p.token_tags[p.tok_i - 2] == .comma; return p.addNode(.{ .tag = if (trailing_comma) .StructInitDotComma else .StructInitDot, .main_token = lbrace, @@ -2788,8 +2788,8 @@ const Parser = struct { } const elem_init_one = try p.expectExpr(); - const comma_one = p.eatToken(.Comma); - if (p.eatToken(.RBrace)) |_| { + const comma_one = p.eatToken(.comma); + if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ .tag = if (comma_one != null) .ArrayInitDotTwoComma else .ArrayInitDotTwo, .main_token = lbrace, @@ -2801,12 +2801,12 @@ const Parser = struct { } if (comma_one == null) { try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, }); } const elem_init_two = try p.expectExpr(); - const comma_two = p.eatToken(.Comma); - if (p.eatToken(.RBrace)) |_| { + const comma_two = p.eatToken(.comma); + if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ .tag = if (comma_two != null) .ArrayInitDotTwoComma else .ArrayInitDotTwo, .main_token = lbrace, @@ -2818,30 +2818,30 @@ const Parser = struct { } if (comma_two == null) { try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, }); } var init_list = std.ArrayList(Node.Index).init(p.gpa); defer init_list.deinit(); - try init_list.appendSlice(&[_]Node.Index{ elem_init_one, elem_init_two }); + try init_list.appendSlice(&.{ elem_init_one, elem_init_two }); while (true) { const next = try p.expectExpr(); if (next == 0) break; try init_list.append(next); switch (p.token_tags[p.nextToken()]) { - .Comma => { - if (p.eatToken(.RBrace)) |_| break; + .comma => { + if (p.eatToken(.r_brace)) |_| break; continue; }, - .RBrace => break, - .Colon, .RParen, .RBracket => { + .r_brace => break, + .colon, .r_paren, .r_bracket => { p.tok_i -= 1; return p.fail(.{ .ExpectedToken = .{ .token = p.tok_i, - .expected_id = .RBrace, + .expected_id = .r_brace, }, }); }, @@ -2850,7 +2850,7 @@ const Parser = struct { try p.warn(.{ .ExpectedToken = .{ .token = p.tok_i, - .expected_id = .Comma, + .expected_id = .comma, }, }); }, @@ -2858,7 +2858,7 @@ const Parser = struct { } const span = try p.listToSpan(init_list.items); return p.addNode(.{ - .tag = if (p.token_tags[p.tok_i - 2] == .Comma) .ArrayInitDotComma else .ArrayInitDot, + .tag = if (p.token_tags[p.tok_i - 2] == .comma) .ArrayInitDotComma else .ArrayInitDot, .main_token = lbrace, .data = .{ .lhs = span.start, @@ -2868,12 +2868,12 @@ const Parser = struct { }, else => return null_node, }, - .Keyword_error => switch (p.token_tags[p.tok_i + 1]) { - .LBrace => { + .keyword_error => switch (p.token_tags[p.tok_i + 1]) { + .l_brace => { const error_token = p.tok_i; p.tok_i += 2; - if (p.eatToken(.RBrace)) |rbrace| { + if (p.eatToken(.r_brace)) |rbrace| { return p.addNode(.{ .tag = .ErrorSetDecl, .main_token = error_token, @@ -2886,19 +2886,19 @@ const Parser = struct { while (true) { const doc_comment = p.eatDocComments(); - const identifier = try p.expectToken(.Identifier); + const identifier = try p.expectToken(.identifier); switch (p.token_tags[p.nextToken()]) { - .Comma => { - if (p.eatToken(.RBrace)) |_| break; + .comma => { + if (p.eatToken(.r_brace)) |_| break; continue; }, - .RBrace => break, - .Colon, .RParen, .RBracket => { + .r_brace => break, + .colon, .r_paren, .r_bracket => { p.tok_i -= 1; return p.fail(.{ .ExpectedToken = .{ .token = p.tok_i, - .expected_id = .RBrace, + .expected_id = .r_brace, }, }); }, @@ -2907,7 +2907,7 @@ const Parser = struct { // give an error but continue parsing this list. p.tok_i -= 1; try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, }); }, } @@ -2925,17 +2925,17 @@ const Parser = struct { .tag = .ErrorValue, .main_token = p.nextToken(), .data = .{ - .lhs = try p.expectToken(.Period), - .rhs = try p.expectToken(.Identifier), + .lhs = try p.expectToken(.period), + .rhs = try p.expectToken(.identifier), }, }), }, - .LParen => return p.addNode(.{ + .l_paren => return p.addNode(.{ .tag = .GroupedExpression, .main_token = p.nextToken(), .data = .{ .lhs = try p.expectExpr(), - .rhs = try p.expectToken(.RParen), + .rhs = try p.expectToken(.r_paren), }, }), else => return null_node, @@ -2953,14 +2953,14 @@ const Parser = struct { /// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload /// ForTypeExpr <- ForPrefix TypeExpr (KEYWORD_else TypeExpr)? fn parseForTypeExpr(p: *Parser) !Node.Index { - const for_token = p.eatToken(.Keyword_for) orelse return null_node; - _ = try p.expectToken(.LParen); + const for_token = p.eatToken(.keyword_for) orelse return null_node; + _ = try p.expectToken(.l_paren); const array_expr = try p.expectTypeExpr(); - _ = try p.expectToken(.RParen); + _ = try p.expectToken(.r_paren); _ = try p.parsePtrIndexPayload(); const then_expr = try p.expectExpr(); - const else_token = p.eatToken(.Keyword_else) orelse { + const else_token = p.eatToken(.keyword_else) orelse { return p.addNode(.{ .tag = .ForSimple, .main_token = for_token, @@ -2987,15 +2987,15 @@ const Parser = struct { /// WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr? /// WhileTypeExpr <- WhilePrefix TypeExpr (KEYWORD_else Payload? TypeExpr)? fn parseWhileTypeExpr(p: *Parser) !Node.Index { - const while_token = p.eatToken(.Keyword_while) orelse return null_node; - _ = try p.expectToken(.LParen); + const while_token = p.eatToken(.keyword_while) orelse return null_node; + _ = try p.expectToken(.l_paren); const condition = try p.expectExpr(); - _ = try p.expectToken(.RParen); + _ = try p.expectToken(.r_paren); const then_payload = try p.parsePtrPayload(); const cont_expr = try p.parseWhileContinueExpr(); const then_expr = try p.expectTypeExpr(); - const else_token = p.eatToken(.Keyword_else) orelse { + const else_token = p.eatToken(.keyword_else) orelse { if (cont_expr == 0) { return p.addNode(.{ .tag = .WhileSimple, @@ -3037,14 +3037,14 @@ const Parser = struct { /// SwitchExpr <- KEYWORD_switch LPAREN Expr RPAREN LBRACE SwitchProngList RBRACE fn expectSwitchExpr(p: *Parser) !Node.Index { - const switch_token = p.assertToken(.Keyword_switch); - _ = try p.expectToken(.LParen); + const switch_token = p.assertToken(.keyword_switch); + _ = try p.expectToken(.l_paren); const expr_node = try p.expectExpr(); - _ = try p.expectToken(.RParen); - _ = try p.expectToken(.LBrace); + _ = try p.expectToken(.r_paren); + _ = try p.expectToken(.l_brace); const cases = try p.parseSwitchProngList(); - const trailing_comma = p.token_tags[p.tok_i - 1] == .Comma; - _ = try p.expectToken(.RBrace); + const trailing_comma = p.token_tags[p.tok_i - 1] == .comma; + _ = try p.expectToken(.r_brace); return p.addNode(.{ .tag = if (trailing_comma) .SwitchComma else .Switch, @@ -3067,12 +3067,12 @@ const Parser = struct { /// AsmOutputList <- (AsmOutputItem COMMA)* AsmOutputItem? /// AsmInputList <- (AsmInputItem COMMA)* AsmInputItem? fn expectAsmExpr(p: *Parser) !Node.Index { - const asm_token = p.assertToken(.Keyword_asm); - _ = p.eatToken(.Keyword_volatile); - _ = try p.expectToken(.LParen); + const asm_token = p.assertToken(.keyword_asm); + _ = p.eatToken(.keyword_volatile); + _ = try p.expectToken(.l_paren); const template = try p.expectExpr(); - if (p.eatToken(.RParen)) |rparen| { + if (p.eatToken(.r_paren)) |rparen| { return p.addNode(.{ .tag = .AsmSimple, .main_token = asm_token, @@ -3083,7 +3083,7 @@ const Parser = struct { }); } - _ = try p.expectToken(.Colon); + _ = try p.expectToken(.colon); var list = std.ArrayList(Node.Index).init(p.gpa); defer list.deinit(); @@ -3093,51 +3093,51 @@ const Parser = struct { if (output_item == 0) break; try list.append(output_item); switch (p.token_tags[p.tok_i]) { - .Comma => p.tok_i += 1, - .Colon, .RParen, .RBrace, .RBracket => break, // All possible delimiters. + .comma => p.tok_i += 1, + .colon, .r_paren, .r_brace, .r_bracket => break, // All possible delimiters. else => { // This is likely just a missing comma; // give an error but continue parsing this list. try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, }); }, } } - if (p.eatToken(.Colon)) |_| { + if (p.eatToken(.colon)) |_| { while (true) { const input_item = try p.parseAsmInputItem(); if (input_item == 0) break; try list.append(input_item); switch (p.token_tags[p.tok_i]) { - .Comma => p.tok_i += 1, - .Colon, .RParen, .RBrace, .RBracket => break, // All possible delimiters. + .comma => p.tok_i += 1, + .colon, .r_paren, .r_brace, .r_bracket => break, // All possible delimiters. else => { // This is likely just a missing comma; // give an error but continue parsing this list. try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, }); }, } } - if (p.eatToken(.Colon)) |_| { - while (p.eatToken(.StringLiteral)) |_| { + if (p.eatToken(.colon)) |_| { + while (p.eatToken(.string_literal)) |_| { switch (p.token_tags[p.tok_i]) { - .Comma => p.tok_i += 1, - .Colon, .RParen, .RBrace, .RBracket => break, + .comma => p.tok_i += 1, + .colon, .r_paren, .r_brace, .r_bracket => break, else => { // This is likely just a missing comma; // give an error but continue parsing this list. try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, }); }, } } } } - const rparen = try p.expectToken(.RParen); + const rparen = try p.expectToken(.r_paren); const span = try p.listToSpan(list.items); return p.addNode(.{ .tag = .Asm, @@ -3155,20 +3155,20 @@ const Parser = struct { /// AsmOutputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN (MINUSRARROW TypeExpr / IDENTIFIER) RPAREN fn parseAsmOutputItem(p: *Parser) !Node.Index { - _ = p.eatToken(.LBracket) orelse return null_node; - const identifier = try p.expectToken(.Identifier); - _ = try p.expectToken(.RBracket); - _ = try p.expectToken(.StringLiteral); - _ = try p.expectToken(.LParen); + _ = p.eatToken(.l_bracket) orelse return null_node; + const identifier = try p.expectToken(.identifier); + _ = try p.expectToken(.r_bracket); + _ = try p.expectToken(.string_literal); + _ = try p.expectToken(.l_paren); const type_expr: Node.Index = blk: { - if (p.eatToken(.Arrow)) |_| { + if (p.eatToken(.arrow)) |_| { break :blk try p.expectTypeExpr(); } else { - _ = try p.expectToken(.Identifier); + _ = try p.expectToken(.identifier); break :blk null_node; } }; - const rparen = try p.expectToken(.RParen); + const rparen = try p.expectToken(.r_paren); return p.addNode(.{ .tag = .AsmOutput, .main_token = identifier, @@ -3181,13 +3181,13 @@ const Parser = struct { /// AsmInputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN Expr RPAREN fn parseAsmInputItem(p: *Parser) !Node.Index { - _ = p.eatToken(.LBracket) orelse return null_node; - const identifier = try p.expectToken(.Identifier); - _ = try p.expectToken(.RBracket); - _ = try p.expectToken(.StringLiteral); - _ = try p.expectToken(.LParen); + _ = p.eatToken(.l_bracket) orelse return null_node; + const identifier = try p.expectToken(.identifier); + _ = try p.expectToken(.r_bracket); + _ = try p.expectToken(.string_literal); + _ = try p.expectToken(.l_paren); const expr = try p.expectExpr(); - const rparen = try p.expectToken(.RParen); + const rparen = try p.expectToken(.r_paren); return p.addNode(.{ .tag = .AsmInput, .main_token = identifier, @@ -3200,14 +3200,14 @@ const Parser = struct { /// BreakLabel <- COLON IDENTIFIER fn parseBreakLabel(p: *Parser) !TokenIndex { - _ = p.eatToken(.Colon) orelse return @as(TokenIndex, 0); - return p.expectToken(.Identifier); + _ = p.eatToken(.colon) orelse return @as(TokenIndex, 0); + return p.expectToken(.identifier); } /// BlockLabel <- IDENTIFIER COLON fn parseBlockLabel(p: *Parser) TokenIndex { - if (p.token_tags[p.tok_i] == .Identifier and - p.token_tags[p.tok_i + 1] == .Colon) + if (p.token_tags[p.tok_i] == .identifier and + p.token_tags[p.tok_i + 1] == .colon) { const identifier = p.tok_i; p.tok_i += 2; @@ -3218,9 +3218,9 @@ const Parser = struct { /// FieldInit <- DOT IDENTIFIER EQUAL Expr fn parseFieldInit(p: *Parser) !Node.Index { - if (p.token_tags[p.tok_i + 0] == .Period and - p.token_tags[p.tok_i + 1] == .Identifier and - p.token_tags[p.tok_i + 2] == .Equal) + if (p.token_tags[p.tok_i + 0] == .period and + p.token_tags[p.tok_i + 1] == .identifier and + p.token_tags[p.tok_i + 2] == .equal) { p.tok_i += 3; return p.expectExpr(); @@ -3230,37 +3230,37 @@ const Parser = struct { } fn expectFieldInit(p: *Parser) !Node.Index { - _ = try p.expectToken(.Period); - _ = try p.expectToken(.Identifier); - _ = try p.expectToken(.Equal); + _ = try p.expectToken(.period); + _ = try p.expectToken(.identifier); + _ = try p.expectToken(.equal); return p.expectExpr(); } /// WhileContinueExpr <- COLON LPAREN AssignExpr RPAREN fn parseWhileContinueExpr(p: *Parser) !Node.Index { - _ = p.eatToken(.Colon) orelse return null_node; - _ = try p.expectToken(.LParen); + _ = p.eatToken(.colon) orelse return null_node; + _ = try p.expectToken(.l_paren); const node = try p.parseAssignExpr(); if (node == 0) return p.fail(.{ .ExpectedExprOrAssignment = .{ .token = p.tok_i } }); - _ = try p.expectToken(.RParen); + _ = try p.expectToken(.r_paren); return node; } /// LinkSection <- KEYWORD_linksection LPAREN Expr RPAREN fn parseLinkSection(p: *Parser) !Node.Index { - _ = p.eatToken(.Keyword_linksection) orelse return null_node; - _ = try p.expectToken(.LParen); + _ = p.eatToken(.keyword_linksection) orelse return null_node; + _ = try p.expectToken(.l_paren); const expr_node = try p.expectExpr(); - _ = try p.expectToken(.RParen); + _ = try p.expectToken(.r_paren); return expr_node; } /// CallConv <- KEYWORD_callconv LPAREN Expr RPAREN fn parseCallconv(p: *Parser) !Node.Index { - _ = p.eatToken(.Keyword_callconv) orelse return null_node; - _ = try p.expectToken(.LParen); + _ = p.eatToken(.keyword_callconv) orelse return null_node; + _ = try p.expectToken(.l_paren); const expr_node = try p.expectExpr(); - _ = try p.expectToken(.RParen); + _ = try p.expectToken(.r_paren); return expr_node; } @@ -3276,20 +3276,20 @@ const Parser = struct { fn expectParamDecl(p: *Parser) !Node.Index { _ = p.eatDocComments(); switch (p.token_tags[p.tok_i]) { - .Keyword_noalias, .Keyword_comptime => p.tok_i += 1, - .Ellipsis3 => { + .keyword_noalias, .keyword_comptime => p.tok_i += 1, + .ellipsis3 => { p.tok_i += 1; return null_node; }, else => {}, } - if (p.token_tags[p.tok_i] == .Identifier and - p.token_tags[p.tok_i + 1] == .Colon) + if (p.token_tags[p.tok_i] == .identifier and + p.token_tags[p.tok_i + 1] == .colon) { p.tok_i += 2; } switch (p.token_tags[p.tok_i]) { - .Keyword_anytype => { + .keyword_anytype => { p.tok_i += 1; return null_node; }, @@ -3299,31 +3299,31 @@ const Parser = struct { /// Payload <- PIPE IDENTIFIER PIPE fn parsePayload(p: *Parser) !TokenIndex { - _ = p.eatToken(.Pipe) orelse return @as(TokenIndex, 0); - const identifier = try p.expectToken(.Identifier); - _ = try p.expectToken(.Pipe); + _ = p.eatToken(.pipe) orelse return @as(TokenIndex, 0); + const identifier = try p.expectToken(.identifier); + _ = try p.expectToken(.pipe); return identifier; } /// PtrPayload <- PIPE ASTERISK? IDENTIFIER PIPE fn parsePtrPayload(p: *Parser) !TokenIndex { - _ = p.eatToken(.Pipe) orelse return @as(TokenIndex, 0); - _ = p.eatToken(.Asterisk); - const identifier = try p.expectToken(.Identifier); - _ = try p.expectToken(.Pipe); + _ = p.eatToken(.pipe) orelse return @as(TokenIndex, 0); + _ = p.eatToken(.asterisk); + const identifier = try p.expectToken(.identifier); + _ = try p.expectToken(.pipe); return identifier; } /// PtrIndexPayload <- PIPE ASTERISK? IDENTIFIER (COMMA IDENTIFIER)? PIPE /// Returns the first identifier token, if any. fn parsePtrIndexPayload(p: *Parser) !TokenIndex { - _ = p.eatToken(.Pipe) orelse return @as(TokenIndex, 0); - _ = p.eatToken(.Asterisk); - const identifier = try p.expectToken(.Identifier); - if (p.eatToken(.Comma) != null) { - _ = try p.expectToken(.Identifier); + _ = p.eatToken(.pipe) orelse return @as(TokenIndex, 0); + _ = p.eatToken(.asterisk); + const identifier = try p.expectToken(.identifier); + if (p.eatToken(.comma) != null) { + _ = try p.expectToken(.identifier); } - _ = try p.expectToken(.Pipe); + _ = try p.expectToken(.pipe); return identifier; } @@ -3332,8 +3332,8 @@ const Parser = struct { /// <- SwitchItem (COMMA SwitchItem)* COMMA? /// / KEYWORD_else fn parseSwitchProng(p: *Parser) !Node.Index { - if (p.eatToken(.Keyword_else)) |_| { - const arrow_token = try p.expectToken(.EqualAngleBracketRight); + if (p.eatToken(.keyword_else)) |_| { + const arrow_token = try p.expectToken(.equal_angle_bracket_right); _ = try p.parsePtrPayload(); return p.addNode(.{ .tag = .SwitchCaseOne, @@ -3347,7 +3347,7 @@ const Parser = struct { const first_item = try p.parseSwitchItem(); if (first_item == 0) return null_node; - if (p.eatToken(.EqualAngleBracketRight)) |arrow_token| { + if (p.eatToken(.equal_angle_bracket_right)) |arrow_token| { _ = try p.parsePtrPayload(); return p.addNode(.{ .tag = .SwitchCaseOne, @@ -3363,13 +3363,13 @@ const Parser = struct { defer list.deinit(); try list.append(first_item); - while (p.eatToken(.Comma)) |_| { + while (p.eatToken(.comma)) |_| { const next_item = try p.parseSwitchItem(); if (next_item == 0) break; try list.append(next_item); } const span = try p.listToSpan(list.items); - const arrow_token = try p.expectToken(.EqualAngleBracketRight); + const arrow_token = try p.expectToken(.equal_angle_bracket_right); _ = try p.parsePtrPayload(); return p.addNode(.{ .tag = .SwitchCase, @@ -3389,7 +3389,7 @@ const Parser = struct { const expr = try p.parseExpr(); if (expr == 0) return null_node; - if (p.eatToken(.Ellipsis3)) |token| { + if (p.eatToken(.ellipsis3)) |token| { return p.addNode(.{ .tag = .SwitchRange, .main_token = token, @@ -3419,25 +3419,25 @@ const Parser = struct { var saw_allowzero = false; while (true) { switch (p.token_tags[p.tok_i]) { - .Keyword_align => { + .keyword_align => { if (result.align_node != 0) { try p.warn(.{ .ExtraAlignQualifier = .{ .token = p.tok_i }, }); } p.tok_i += 1; - _ = try p.expectToken(.LParen); + _ = try p.expectToken(.l_paren); result.align_node = try p.expectExpr(); - if (p.eatToken(.Colon)) |_| { + if (p.eatToken(.colon)) |_| { result.bit_range_start = try p.expectExpr(); - _ = try p.expectToken(.Colon); + _ = try p.expectToken(.colon); result.bit_range_end = try p.expectExpr(); } - _ = try p.expectToken(.RParen); + _ = try p.expectToken(.r_paren); }, - .Keyword_const => { + .keyword_const => { if (saw_const) { try p.warn(.{ .ExtraConstQualifier = .{ .token = p.tok_i }, @@ -3446,7 +3446,7 @@ const Parser = struct { p.tok_i += 1; saw_const = true; }, - .Keyword_volatile => { + .keyword_volatile => { if (saw_volatile) { try p.warn(.{ .ExtraVolatileQualifier = .{ .token = p.tok_i }, @@ -3455,7 +3455,7 @@ const Parser = struct { p.tok_i += 1; saw_volatile = true; }, - .Keyword_allowzero => { + .keyword_allowzero => { if (saw_allowzero) { try p.warn(.{ .ExtraAllowZeroQualifier = .{ .token = p.tok_i }, @@ -3476,14 +3476,14 @@ const Parser = struct { /// / DOTQUESTIONMARK fn parseSuffixOp(p: *Parser, lhs: Node.Index) !Node.Index { switch (p.token_tags[p.tok_i]) { - .LBracket => { + .l_bracket => { const lbracket = p.nextToken(); const index_expr = try p.expectExpr(); - if (p.eatToken(.Ellipsis2)) |_| { + if (p.eatToken(.ellipsis2)) |_| { const end_expr = try p.parseExpr(); if (end_expr == 0) { - _ = try p.expectToken(.RBracket); + _ = try p.expectToken(.r_bracket); return p.addNode(.{ .tag = .SliceOpen, .main_token = lbracket, @@ -3493,9 +3493,9 @@ const Parser = struct { }, }); } - if (p.eatToken(.Colon)) |_| { + if (p.eatToken(.colon)) |_| { const sentinel = try p.parseExpr(); - _ = try p.expectToken(.RBracket); + _ = try p.expectToken(.r_bracket); return p.addNode(.{ .tag = .SliceSentinel, .main_token = lbracket, @@ -3509,7 +3509,7 @@ const Parser = struct { }, }); } else { - _ = try p.expectToken(.RBracket); + _ = try p.expectToken(.r_bracket); return p.addNode(.{ .tag = .Slice, .main_token = lbracket, @@ -3523,7 +3523,7 @@ const Parser = struct { }); } } - _ = try p.expectToken(.RBracket); + _ = try p.expectToken(.r_bracket); return p.addNode(.{ .tag = .ArrayAccess, .main_token = lbracket, @@ -3533,7 +3533,7 @@ const Parser = struct { }, }); }, - .PeriodAsterisk => return p.addNode(.{ + .period_asterisk => return p.addNode(.{ .tag = .Deref, .main_token = p.nextToken(), .data = .{ @@ -3541,7 +3541,7 @@ const Parser = struct { .rhs = undefined, }, }), - .Invalid_periodasterisks => { + .invalid_periodasterisks => { const period_asterisk = p.nextToken(); try p.warn(.{ .AsteriskAfterPointerDereference = .{ .token = period_asterisk } }); return p.addNode(.{ @@ -3553,8 +3553,8 @@ const Parser = struct { }, }); }, - .Period => switch (p.token_tags[p.tok_i + 1]) { - .Identifier => return p.addNode(.{ + .period => switch (p.token_tags[p.tok_i + 1]) { + .identifier => return p.addNode(.{ .tag = .FieldAccess, .main_token = p.nextToken(), .data = .{ @@ -3562,7 +3562,7 @@ const Parser = struct { .rhs = p.nextToken(), }, }), - .QuestionMark => return p.addNode(.{ + .question_mark => return p.addNode(.{ .tag = .UnwrapOptional, .main_token = p.nextToken(), .data = .{ @@ -3589,28 +3589,28 @@ const Parser = struct { fn parseContainerDeclAuto(p: *Parser) !Node.Index { const main_token = p.nextToken(); const arg_expr = switch (p.token_tags[main_token]) { - .Keyword_struct, .Keyword_opaque => null_node, - .Keyword_enum => blk: { - if (p.eatToken(.LParen)) |_| { + .keyword_struct, .keyword_opaque => null_node, + .keyword_enum => blk: { + if (p.eatToken(.l_paren)) |_| { const expr = try p.expectExpr(); - _ = try p.expectToken(.RParen); + _ = try p.expectToken(.r_paren); break :blk expr; } else { break :blk null_node; } }, - .Keyword_union => blk: { - if (p.eatToken(.LParen)) |_| { - if (p.eatToken(.Keyword_enum)) |_| { - if (p.eatToken(.LParen)) |_| { + .keyword_union => blk: { + if (p.eatToken(.l_paren)) |_| { + if (p.eatToken(.keyword_enum)) |_| { + if (p.eatToken(.l_paren)) |_| { const enum_tag_expr = try p.expectExpr(); - _ = try p.expectToken(.RParen); - _ = try p.expectToken(.RParen); + _ = try p.expectToken(.r_paren); + _ = try p.expectToken(.r_paren); - _ = try p.expectToken(.LBrace); + _ = try p.expectToken(.l_brace); const members = try p.parseContainerMembers(); const members_span = try members.toSpan(p); - _ = try p.expectToken(.RBrace); + _ = try p.expectToken(.r_brace); return p.addNode(.{ .tag = switch (members.trailing_comma) { true => .TaggedUnionEnumTagComma, @@ -3623,11 +3623,11 @@ const Parser = struct { }, }); } else { - _ = try p.expectToken(.RParen); + _ = try p.expectToken(.r_paren); - _ = try p.expectToken(.LBrace); + _ = try p.expectToken(.l_brace); const members = try p.parseContainerMembers(); - _ = try p.expectToken(.RBrace); + _ = try p.expectToken(.r_brace); if (members.len <= 2) { return p.addNode(.{ .tag = switch (members.trailing_comma) { @@ -3657,7 +3657,7 @@ const Parser = struct { } } else { const expr = try p.expectExpr(); - _ = try p.expectToken(.RParen); + _ = try p.expectToken(.r_paren); break :blk expr; } } else { @@ -3666,9 +3666,9 @@ const Parser = struct { }, else => unreachable, }; - _ = try p.expectToken(.LBrace); + _ = try p.expectToken(.l_brace); const members = try p.parseContainerMembers(); - _ = try p.expectToken(.RBrace); + _ = try p.expectToken(.r_brace); if (arg_expr == 0) { if (members.len <= 2) { return p.addNode(.{ @@ -3718,10 +3718,10 @@ const Parser = struct { /// Holds temporary data until we are ready to construct the full ContainerDecl AST node. /// ByteAlign <- KEYWORD_align LPAREN Expr RPAREN fn parseByteAlign(p: *Parser) !Node.Index { - _ = p.eatToken(.Keyword_align) orelse return null_node; - _ = try p.expectToken(.LParen); + _ = p.eatToken(.keyword_align) orelse return null_node; + _ = try p.expectToken(.l_paren); const expr = try p.expectExpr(); - _ = try p.expectToken(.RParen); + _ = try p.expectToken(.r_paren); return expr; } @@ -3732,22 +3732,22 @@ const Parser = struct { /// ParamDeclList <- (ParamDecl COMMA)* ParamDecl? fn parseParamDeclList(p: *Parser) !SmallSpan { - _ = try p.expectToken(.LParen); - if (p.eatToken(.RParen)) |_| { + _ = try p.expectToken(.l_paren); + if (p.eatToken(.r_paren)) |_| { return SmallSpan{ .zero_or_one = 0 }; } const param_one = while (true) { const param = try p.expectParamDecl(); if (param != 0) break param; switch (p.token_tags[p.nextToken()]) { - .Comma => continue, - .RParen => return SmallSpan{ .zero_or_one = 0 }, + .comma => continue, + .r_paren => return SmallSpan{ .zero_or_one = 0 }, else => { // This is likely just a missing comma; // give an error but continue parsing this list. p.tok_i -= 1; try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, }); }, } @@ -3755,19 +3755,19 @@ const Parser = struct { const param_two = while (true) { switch (p.token_tags[p.nextToken()]) { - .Comma => { - if (p.eatToken(.RParen)) |_| { + .comma => { + if (p.eatToken(.r_paren)) |_| { return SmallSpan{ .zero_or_one = param_one }; } const param = try p.expectParamDecl(); if (param != 0) break param; continue; }, - .RParen => return SmallSpan{ .zero_or_one = param_one }, - .Colon, .RBrace, .RBracket => { + .r_paren => return SmallSpan{ .zero_or_one = param_one }, + .colon, .r_brace, .r_bracket => { p.tok_i -= 1; return p.fail(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .RParen }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .r_paren }, }); }, else => { @@ -3775,7 +3775,7 @@ const Parser = struct { // give an error but continue parsing this list. p.tok_i -= 1; try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, }); }, } @@ -3784,12 +3784,12 @@ const Parser = struct { var list = std.ArrayList(Node.Index).init(p.gpa); defer list.deinit(); - try list.appendSlice(&[_]Node.Index{ param_one, param_two }); + try list.appendSlice(&.{ param_one, param_two }); while (true) { switch (p.token_tags[p.nextToken()]) { - .Comma => { - if (p.token_tags[p.tok_i] == .RParen) { + .comma => { + if (p.token_tags[p.tok_i] == .r_paren) { p.tok_i += 1; return SmallSpan{ .multi = list.toOwnedSlice() }; } @@ -3799,11 +3799,11 @@ const Parser = struct { } continue; }, - .RParen => return SmallSpan{ .multi = list.toOwnedSlice() }, - .Colon, .RBrace, .RBracket => { + .r_paren => return SmallSpan{ .multi = list.toOwnedSlice() }, + .colon, .r_brace, .r_bracket => { p.tok_i -= 1; return p.fail(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .RParen }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .r_paren }, }); }, else => { @@ -3811,7 +3811,7 @@ const Parser = struct { // give an error but continue parsing this list. p.tok_i -= 1; try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, }); }, } @@ -3833,14 +3833,14 @@ const Parser = struct { try list.append(item); switch (p.token_tags[p.tok_i]) { - .Comma => p.tok_i += 1, + .comma => p.tok_i += 1, // all possible delimiters - .Colon, .RParen, .RBrace, .RBracket => break, + .colon, .r_paren, .r_brace, .r_bracket => break, else => { // This is likely just a missing comma; // give an error but continue parsing this list. try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, }); }, } @@ -3853,8 +3853,8 @@ const Parser = struct { /// FnCallArguments <- LPAREN ExprList RPAREN /// ExprList <- (Expr COMMA)* Expr? fn parseBuiltinCall(p: *Parser) !Node.Index { - const builtin_token = p.assertToken(.Builtin); - _ = (try p.expectTokenRecoverable(.LParen)) orelse { + const builtin_token = p.assertToken(.builtin); + _ = (try p.expectTokenRecoverable(.l_paren)) orelse { try p.warn(.{ .ExpectedParamList = .{ .token = p.tok_i }, }); @@ -3868,7 +3868,7 @@ const Parser = struct { }, }); }; - if (p.eatToken(.RParen)) |_| { + if (p.eatToken(.r_paren)) |_| { return p.addNode(.{ .tag = .BuiltinCallTwo, .main_token = builtin_token, @@ -3880,8 +3880,8 @@ const Parser = struct { } const param_one = try p.expectExpr(); switch (p.token_tags[p.nextToken()]) { - .Comma => { - if (p.eatToken(.RParen)) |_| { + .comma => { + if (p.eatToken(.r_paren)) |_| { return p.addNode(.{ .tag = .BuiltinCallTwoComma, .main_token = builtin_token, @@ -3892,7 +3892,7 @@ const Parser = struct { }); } }, - .RParen => return p.addNode(.{ + .r_paren => return p.addNode(.{ .tag = .BuiltinCallTwo, .main_token = builtin_token, .data = .{ @@ -3905,14 +3905,14 @@ const Parser = struct { // give an error but continue parsing this list. p.tok_i -= 1; try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, }); }, } const param_two = try p.expectExpr(); switch (p.token_tags[p.nextToken()]) { - .Comma => { - if (p.eatToken(.RParen)) |_| { + .comma => { + if (p.eatToken(.r_paren)) |_| { return p.addNode(.{ .tag = .BuiltinCallTwoComma, .main_token = builtin_token, @@ -3923,7 +3923,7 @@ const Parser = struct { }); } }, - .RParen => return p.addNode(.{ + .r_paren => return p.addNode(.{ .tag = .BuiltinCallTwo, .main_token = builtin_token, .data = .{ @@ -3936,7 +3936,7 @@ const Parser = struct { // give an error but continue parsing this list. p.tok_i -= 1; try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, }); }, } @@ -3944,14 +3944,14 @@ const Parser = struct { var list = std.ArrayList(Node.Index).init(p.gpa); defer list.deinit(); - try list.appendSlice(&[_]Node.Index{ param_one, param_two }); + try list.appendSlice(&.{ param_one, param_two }); while (true) { const param = try p.expectExpr(); try list.append(param); switch (p.token_tags[p.nextToken()]) { - .Comma => { - if (p.eatToken(.RParen)) |_| { + .comma => { + if (p.eatToken(.r_paren)) |_| { const params = try p.listToSpan(list.items); return p.addNode(.{ .tag = .BuiltinCallComma, @@ -3964,7 +3964,7 @@ const Parser = struct { } continue; }, - .RParen => { + .r_paren => { const params = try p.listToSpan(list.items); return p.addNode(.{ .tag = .BuiltinCall, @@ -3980,7 +3980,7 @@ const Parser = struct { // give an error but continue parsing this list. p.tok_i -= 1; try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma }, + .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, }); }, } @@ -3990,7 +3990,7 @@ const Parser = struct { // string literal or multiline string literal fn parseStringLiteral(p: *Parser) !Node.Index { switch (p.token_tags[p.tok_i]) { - .StringLiteral => { + .string_literal => { const main_token = p.nextToken(); return p.addNode(.{ .tag = .StringLiteral, @@ -4001,9 +4001,9 @@ const Parser = struct { }, }); }, - .MultilineStringLiteralLine => { + .multiline_string_literal_line => { const first_line = p.nextToken(); - while (p.token_tags[p.tok_i] == .MultilineStringLiteralLine) { + while (p.token_tags[p.tok_i] == .multiline_string_literal_line) { p.tok_i += 1; } return p.addNode(.{ @@ -4030,7 +4030,7 @@ const Parser = struct { fn expectIntegerLiteral(p: *Parser) !Node.Index { return p.addNode(.{ .tag = .IntegerLiteral, - .main_token = try p.expectToken(.IntegerLiteral), + .main_token = try p.expectToken(.integer_literal), .data = .{ .lhs = undefined, .rhs = undefined, @@ -4040,16 +4040,16 @@ const Parser = struct { /// KEYWORD_if LPAREN Expr RPAREN PtrPayload? Body (KEYWORD_else Payload? Body)? fn parseIf(p: *Parser, bodyParseFn: NodeParseFn) !Node.Index { - const if_token = p.eatToken(.Keyword_if) orelse return null_node; - _ = try p.expectToken(.LParen); + const if_token = p.eatToken(.keyword_if) orelse return null_node; + _ = try p.expectToken(.l_paren); const condition = try p.expectExpr(); - _ = try p.expectToken(.RParen); + _ = try p.expectToken(.r_paren); const then_payload = try p.parsePtrPayload(); const then_expr = try bodyParseFn(p); if (then_expr == 0) return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); - const else_token = p.eatToken(.Keyword_else) orelse return p.addNode(.{ + const else_token = p.eatToken(.keyword_else) orelse return p.addNode(.{ .tag = .IfSimple, .main_token = if_token, .data = .{ @@ -4076,8 +4076,8 @@ const Parser = struct { /// Skips over doc comment tokens. Returns the first one, if any. fn eatDocComments(p: *Parser) ?TokenIndex { - if (p.eatToken(.DocComment)) |first_line| { - while (p.eatToken(.DocComment)) |_| {} + if (p.eatToken(.doc_comment)) |first_line| { + while (p.eatToken(.doc_comment)) |_| {} return first_line; } return null; @@ -4089,7 +4089,7 @@ const Parser = struct { /// Eat a single-line doc comment on the same line as another node fn parseAppendedDocComment(p: *Parser, after_token: TokenIndex) !void { - const comment_token = p.eatToken(.DocComment) orelse return; + const comment_token = p.eatToken(.doc_comment) orelse return; if (!p.tokensOnSameLine(after_token, comment_token)) { p.tok_i -= 1; } diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 4ffbaeff19..0f14856fdf 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -107,12 +107,12 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E while (i > 0) { i -= 1; switch (token_tags[i]) { - .Keyword_extern, - .Keyword_export, - .Keyword_pub, - .StringLiteral, - .Keyword_inline, - .Keyword_noinline, + .keyword_extern, + .keyword_export, + .keyword_pub, + .string_literal, + .keyword_inline, + .keyword_noinline, => continue, else => { @@ -144,7 +144,7 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E .UsingNamespace => { const main_token = main_tokens[decl]; const expr = datas[decl].lhs; - if (main_token > 0 and token_tags[main_token - 1] == .Keyword_pub) { + if (main_token > 0 and token_tags[main_token - 1] == .keyword_pub) { try renderToken(ais, tree, main_token - 1, .Space); // pub } try renderToken(ais, tree, main_token, .Space); // usingnamespace @@ -160,7 +160,7 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E .TestDecl => { const test_token = main_tokens[decl]; try renderToken(ais, tree, test_token, .Space); - if (token_tags[test_token + 1] == .StringLiteral) { + if (token_tags[test_token + 1] == .string_literal) { try renderToken(ais, tree, test_token + 1, .Space); } try renderExpression(ais, tree, datas[decl].rhs, space); @@ -269,13 +269,13 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac try renderExpression(ais, tree, datas[node].lhs, .Space); // target - if (token_tags[fallback_first - 1] == .Pipe) { + if (token_tags[fallback_first - 1] == .pipe) { try renderToken(ais, tree, main_token, .Space); // catch keyword try renderToken(ais, tree, main_token + 1, .None); // pipe try renderToken(ais, tree, main_token + 2, .None); // payload identifier try renderToken(ais, tree, main_token + 3, after_op_space); // pipe } else { - assert(token_tags[fallback_first - 1] == .Keyword_catch); + assert(token_tags[fallback_first - 1] == .keyword_catch); try renderToken(ais, tree, main_token, after_op_space); // catch keyword } @@ -532,22 +532,22 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // There is nothing between the braces so render condensed: `error{}` try renderToken(ais, tree, lbrace, .None); return renderToken(ais, tree, rbrace, space); - } else if (lbrace + 2 == rbrace and token_tags[lbrace + 1] == .Identifier) { + } else if (lbrace + 2 == rbrace and token_tags[lbrace + 1] == .identifier) { // There is exactly one member and no trailing comma or // comments, so render without surrounding spaces: `error{Foo}` try renderToken(ais, tree, lbrace, .None); try renderToken(ais, tree, lbrace + 1, .None); // identifier return renderToken(ais, tree, rbrace, space); - } else if (token_tags[rbrace - 1] == .Comma) { + } else if (token_tags[rbrace - 1] == .comma) { // There is a trailing comma so render each member on a new line. try renderToken(ais, tree, lbrace, .Newline); ais.pushIndent(); var i = lbrace + 1; while (i < rbrace) : (i += 1) { switch (token_tags[i]) { - .DocComment => try renderToken(ais, tree, i, .Newline), - .Identifier => try renderToken(ais, tree, i, .Comma), - .Comma => {}, + .doc_comment => try renderToken(ais, tree, i, .Newline), + .identifier => try renderToken(ais, tree, i, .Comma), + .comma => {}, else => unreachable, } } @@ -559,9 +559,9 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac var i = lbrace + 1; while (i < rbrace) : (i += 1) { switch (token_tags[i]) { - .DocComment => unreachable, // TODO - .Identifier => try renderToken(ais, tree, i, .CommaSpace), - .Comma => {}, + .doc_comment => unreachable, // TODO + .identifier => try renderToken(ais, tree, i, .CommaSpace), + .comma => {}, else => unreachable, } } @@ -701,7 +701,7 @@ fn renderPtrType( // in such a relationship. If so, skip rendering anything for // this pointer type and rely on the child to render our asterisk // as well when it renders the ** token. - if (tree.tokens.items(.tag)[ptr_type.ast.main_token] == .AsteriskAsterisk and + if (tree.tokens.items(.tag)[ptr_type.ast.main_token] == .asterisk_asterisk and ptr_type.ast.main_token == tree.nodes.items(.main_token)[ptr_type.ast.child_type]) { return renderExpression(ais, tree, ptr_type.ast.child_type, space); @@ -823,7 +823,7 @@ fn renderAsmOutput( try renderToken(ais, tree, symbolic_name + 2, .Space); // "constraint" try renderToken(ais, tree, symbolic_name + 3, .None); // lparen - if (token_tags[symbolic_name + 4] == .Arrow) { + if (token_tags[symbolic_name + 4] == .arrow) { try renderToken(ais, tree, symbolic_name + 4, .Space); // -> try renderExpression(ais, tree, datas[asm_output].lhs, Space.None); return renderToken(ais, tree, datas[asm_output].rhs, space); // rparen @@ -982,7 +982,7 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa try renderToken(ais, tree, payload_token - 2, .Space); // ) try renderToken(ais, tree, payload_token - 1, .None); // | const ident = blk: { - if (token_tags[payload_token] == .Asterisk) { + if (token_tags[payload_token] == .asterisk) { try renderToken(ais, tree, payload_token, .None); // * break :blk payload_token + 1; } else { @@ -991,7 +991,7 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa }; try renderToken(ais, tree, ident, .None); // identifier const pipe = blk: { - if (token_tags[ident + 1] == .Comma) { + if (token_tags[ident + 1] == .comma) { try renderToken(ais, tree, ident + 1, .Space); // , try renderToken(ais, tree, ident + 2, .None); // index break :blk payload_token + 3; @@ -1035,7 +1035,7 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa try renderToken(ais, tree, payload_token - 2, .Space); // ) try renderToken(ais, tree, payload_token - 1, .None); // | const ident = blk: { - if (token_tags[payload_token] == .Asterisk) { + if (token_tags[payload_token] == .asterisk) { try renderToken(ais, tree, payload_token, .None); // * break :blk payload_token + 1; } else { @@ -1044,7 +1044,7 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa }; try renderToken(ais, tree, ident, .None); // identifier const pipe = blk: { - if (token_tags[ident + 1] == .Comma) { + if (token_tags[ident + 1] == .comma) { try renderToken(ais, tree, ident + 1, .Space); // , try renderToken(ais, tree, ident + 2, .None); // index break :blk payload_token + 3; @@ -1108,7 +1108,7 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa try renderToken(ais, tree, payload_token - 2, .Space); // ) try renderToken(ais, tree, payload_token - 1, .None); // | const ident = blk: { - if (token_tags[payload_token] == .Asterisk) { + if (token_tags[payload_token] == .asterisk) { try renderToken(ais, tree, payload_token, .None); // * break :blk payload_token + 1; } else { @@ -1117,7 +1117,7 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa }; try renderToken(ais, tree, ident, .None); // identifier const pipe = blk: { - if (token_tags[ident + 1] == .Comma) { + if (token_tags[ident + 1] == .comma) { try renderToken(ais, tree, ident + 1, .Space); // , try renderToken(ais, tree, ident + 2, .None); // index break :blk payload_token + 3; @@ -1227,7 +1227,7 @@ fn renderBuiltinCall( const last_param = params[params.len - 1]; const after_last_param_token = tree.lastToken(last_param) + 1; - if (token_tags[after_last_param_token] != .Comma) { + if (token_tags[after_last_param_token] != .comma) { // Render all on one line, no trailing comma. try renderToken(ais, tree, builtin_token + 1, .None); // ( @@ -1259,7 +1259,7 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S const token_starts = tree.tokens.items(.start); const after_fn_token = fn_proto.ast.fn_token + 1; - const lparen = if (token_tags[after_fn_token] == .Identifier) blk: { + const lparen = if (token_tags[after_fn_token] == .identifier) blk: { try renderToken(ais, tree, fn_proto.ast.fn_token, .Space); // fn try renderToken(ais, tree, after_fn_token, .None); // name break :blk after_fn_token + 1; @@ -1267,7 +1267,7 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S try renderToken(ais, tree, fn_proto.ast.fn_token, .Space); // fn break :blk fn_proto.ast.fn_token + 1; }; - assert(token_tags[lparen] == .LParen); + assert(token_tags[lparen] == .l_paren); const maybe_bang = tree.firstToken(fn_proto.ast.return_type) - 1; const rparen = blk: { @@ -1301,11 +1301,11 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S } break :blk rparen; }; - assert(token_tags[rparen] == .RParen); + assert(token_tags[rparen] == .r_paren); // The params list is a sparse set that does *not* include anytype or ... parameters. - if (token_tags[rparen - 1] != .Comma) { + if (token_tags[rparen - 1] != .comma) { // Render all on one line, no trailing comma. try renderToken(ais, tree, lparen, .None); // ( @@ -1314,39 +1314,39 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S while (true) { last_param_token += 1; switch (token_tags[last_param_token]) { - .DocComment => { + .doc_comment => { try renderToken(ais, tree, last_param_token, .Newline); continue; }, - .Ellipsis3 => { + .ellipsis3 => { try renderToken(ais, tree, last_param_token, .None); // ... break; }, - .Keyword_noalias, .Keyword_comptime => { + .keyword_noalias, .keyword_comptime => { try renderToken(ais, tree, last_param_token, .Space); last_param_token += 1; }, - .Identifier => {}, - .Keyword_anytype => { + .identifier => {}, + .keyword_anytype => { try renderToken(ais, tree, last_param_token, .None); // anytype continue; }, - .RParen => break, - .Comma => { + .r_paren => break, + .comma => { try renderToken(ais, tree, last_param_token, .Space); // , last_param_token += 1; }, else => {}, // Parameter type without a name. } - if (token_tags[last_param_token] == .Identifier and - token_tags[last_param_token + 1] == .Colon) + if (token_tags[last_param_token] == .identifier and + token_tags[last_param_token + 1] == .colon) { try renderToken(ais, tree, last_param_token, .None); // name last_param_token += 1; try renderToken(ais, tree, last_param_token, .Space); // : last_param_token += 1; } - if (token_tags[last_param_token] == .Keyword_anytype) { + if (token_tags[last_param_token] == .keyword_anytype) { try renderToken(ais, tree, last_param_token, .None); // anytype continue; } @@ -1365,33 +1365,33 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S while (true) { last_param_token += 1; switch (token_tags[last_param_token]) { - .DocComment => { + .doc_comment => { try renderToken(ais, tree, last_param_token, .Newline); continue; }, - .Ellipsis3 => { + .ellipsis3 => { try renderToken(ais, tree, last_param_token, .Comma); // ... break; }, - .Keyword_noalias, .Keyword_comptime => { + .keyword_noalias, .keyword_comptime => { try renderToken(ais, tree, last_param_token, .Space); last_param_token += 1; }, - .Identifier => {}, - .Keyword_anytype => { + .identifier => {}, + .keyword_anytype => { try renderToken(ais, tree, last_param_token, .Comma); // anytype continue; }, - .RParen => break, + .r_paren => break, else => unreachable, } - if (token_tags[last_param_token] == .Identifier) { + if (token_tags[last_param_token] == .identifier) { try renderToken(ais, tree, last_param_token, .None); // name last_param_token += 1; try renderToken(ais, tree, last_param_token, .Space); // : last_param_token += 1; } - if (token_tags[last_param_token] == .Keyword_anytype) { + if (token_tags[last_param_token] == .keyword_anytype) { try renderToken(ais, tree, last_param_token, .Comma); // anytype continue; } @@ -1435,7 +1435,7 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S try renderToken(ais, tree, callconv_rparen, .Space); // ) } - if (token_tags[maybe_bang] == .Bang) { + if (token_tags[maybe_bang] == .bang) { try renderToken(ais, tree, maybe_bang, .None); // ! } return renderExpression(ais, tree, fn_proto.ast.return_type, space); @@ -1448,7 +1448,7 @@ fn renderSwitchCase( space: Space, ) Error!void { const token_tags = tree.tokens.items(.tag); - const trailing_comma = token_tags[switch_case.ast.arrow_token - 1] == .Comma; + const trailing_comma = token_tags[switch_case.ast.arrow_token - 1] == .comma; // Render everything before the arrow if (switch_case.ast.values.len == 0) { @@ -1473,7 +1473,7 @@ fn renderSwitchCase( if (switch_case.payload_token) |payload_token| { try renderToken(ais, tree, payload_token - 1, .None); // pipe - if (token_tags[payload_token] == .Asterisk) { + if (token_tags[payload_token] == .asterisk) { try renderToken(ais, tree, payload_token, .None); // asterisk try renderToken(ais, tree, payload_token + 1, .None); // identifier try renderToken(ais, tree, payload_token + 2, .Space); // pipe @@ -1498,8 +1498,8 @@ fn renderBlock( const nodes_data = tree.nodes.items(.data); const lbrace = tree.nodes.items(.main_token)[block_node]; - if (token_tags[lbrace - 1] == .Colon and - token_tags[lbrace - 2] == .Identifier) + if (token_tags[lbrace - 1] == .colon and + token_tags[lbrace - 2] == .identifier) { try renderToken(ais, tree, lbrace - 2, .None); try renderToken(ais, tree, lbrace - 1, .Space); @@ -1547,7 +1547,7 @@ fn renderStructInit( } const last_field = struct_init.ast.fields[struct_init.ast.fields.len - 1]; const last_field_token = tree.lastToken(last_field); - if (token_tags[last_field_token + 1] == .Comma) { + if (token_tags[last_field_token + 1] == .comma) { // Render one field init per line. ais.pushIndent(); try renderToken(ais, tree, struct_init.ast.lbrace, .Newline); @@ -1597,7 +1597,7 @@ fn renderArrayInit( } const last_elem = array_init.ast.elements[array_init.ast.elements.len - 1]; const last_elem_token = tree.lastToken(last_elem); - if (token_tags[last_elem_token + 1] == .Comma) { + if (token_tags[last_elem_token + 1] == .comma) { // Render one element per line. ais.pushIndent(); try renderToken(ais, tree, array_init.ast.lbrace, .Newline); @@ -1673,16 +1673,16 @@ fn renderContainerDecl( const last_member = container_decl.ast.members[container_decl.ast.members.len - 1]; const last_member_token = tree.lastToken(last_member); const rbrace = switch (token_tags[last_member_token + 1]) { - .DocComment => last_member_token + 2, - .Comma, .Semicolon => switch (token_tags[last_member_token + 2]) { - .DocComment => last_member_token + 3, - .RBrace => last_member_token + 2, + .doc_comment => last_member_token + 2, + .comma, .semicolon => switch (token_tags[last_member_token + 2]) { + .doc_comment => last_member_token + 3, + .r_brace => last_member_token + 2, else => unreachable, }, - .RBrace => last_member_token + 1, + .r_brace => last_member_token + 1, else => unreachable, }; - const src_has_trailing_comma = token_tags[last_member_token + 1] == .Comma; + const src_has_trailing_comma = token_tags[last_member_token + 1] == .comma; if (!src_has_trailing_comma) one_line: { // We can only print all the members in-line if all the members are fields. @@ -1734,8 +1734,8 @@ fn renderAsm( try renderToken(ais, tree, tok_i, .None); tok_i += 1; switch (token_tags[tok_i]) { - .RParen => return renderToken(ais, tree, tok_i, space), - .Comma => try renderToken(ais, tree, tok_i, .Space), + .r_paren => return renderToken(ais, tree, tok_i, space), + .comma => try renderToken(ais, tree, tok_i, .Space), else => unreachable, } } @@ -1775,7 +1775,7 @@ fn renderAsm( const comma_or_colon = tree.lastToken(asm_output) + 1; ais.popIndent(); break :colon2 switch (token_tags[comma_or_colon]) { - .Comma => comma_or_colon + 1, + .comma => comma_or_colon + 1, else => comma_or_colon, }; } @@ -1806,7 +1806,7 @@ fn renderAsm( const comma_or_colon = tree.lastToken(asm_input) + 1; ais.popIndent(); break :colon3 switch (token_tags[comma_or_colon]) { - .Comma => comma_or_colon + 1, + .comma => comma_or_colon + 1, else => comma_or_colon, }; } @@ -1819,13 +1819,13 @@ fn renderAsm( var tok_i = first_clobber; while (true) { switch (token_tags[tok_i + 1]) { - .RParen => { + .r_paren => { ais.setIndentDelta(indent_delta); ais.popIndent(); try renderToken(ais, tree, tok_i, .Newline); return renderToken(ais, tree, tok_i + 1, space); }, - .Comma => { + .comma => { try renderToken(ais, tree, tok_i, .None); try renderToken(ais, tree, tok_i + 1, .Space); tok_i += 2; @@ -1859,7 +1859,7 @@ fn renderCall( const last_param = params[params.len - 1]; const after_last_param_tok = tree.lastToken(last_param) + 1; - if (token_tags[after_last_param_tok] == .Comma) { + if (token_tags[after_last_param_tok] == .comma) { ais.pushIndent(); try renderToken(ais, tree, lparen, Space.Newline); // ( for (params) |param_node, i| { @@ -1868,7 +1868,7 @@ fn renderCall( // Unindent the comma for multiline string literals const is_multiline_string = node_tags[param_node] == .StringLiteral and - token_tags[main_tokens[param_node]] == .MultilineStringLiteralLine; + token_tags[main_tokens[param_node]] == .multiline_string_literal_line; if (is_multiline_string) ais.popIndent(); const comma = tree.lastToken(param_node) + 1; @@ -1900,7 +1900,7 @@ fn renderCall( fn renderExpressionComma(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); const maybe_comma = tree.lastToken(node) + 1; - if (token_tags[maybe_comma] == .Comma) { + if (token_tags[maybe_comma] == .comma) { try renderExpression(ais, tree, node, .None); return renderToken(ais, tree, maybe_comma, space); } else { @@ -1911,7 +1911,7 @@ fn renderExpressionComma(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: fn renderTokenComma(ais: *Ais, tree: ast.Tree, token: ast.TokenIndex, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); const maybe_comma = token + 1; - if (token_tags[maybe_comma] == .Comma) { + if (token_tags[maybe_comma] == .comma) { try renderToken(ais, tree, token, .None); return renderToken(ais, tree, maybe_comma, space); } else { @@ -1962,7 +1962,7 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp .None => _ = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]), .Comma => { const comment = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]); - if (token_tags[token_index + 1] == .Comma) { + if (token_tags[token_index + 1] == .comma) { return renderToken(ais, tree, token_index + 1, .Newline); } else if (!comment) { return ais.insertNewline(); @@ -1970,7 +1970,7 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp }, .CommaSpace => { const comment = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]); - if (token_tags[token_index + 1] == .Comma) { + if (token_tags[token_index + 1] == .comma) { return renderToken(ais, tree, token_index + 1, .Space); } else if (!comment) { return ais.writer().writeByte(' '); @@ -1978,7 +1978,7 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp }, .Semicolon => { const comment = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]); - if (token_tags[token_index + 1] == .Semicolon) { + if (token_tags[token_index + 1] == .semicolon) { return renderToken(ais, tree, token_index + 1, .Newline); } else if (!comment) { return ais.insertNewline(); @@ -2005,7 +2005,7 @@ fn renderDocComments(ais: *Ais, tree: ast.Tree, end_token: ast.TokenIndex) Error const token_tags = tree.tokens.items(.tag); if (end_token == 0) return; var tok = end_token - 1; - while (token_tags[tok] == .DocComment) { + while (token_tags[tok] == .doc_comment) { if (tok == 0) break; tok -= 1; } else { @@ -2016,7 +2016,7 @@ fn renderDocComments(ais: *Ais, tree: ast.Tree, end_token: ast.TokenIndex) Error while (true) : (tok += 1) { switch (token_tags[tok]) { - .DocComment => { + .doc_comment => { if (first_tok < end_token) { try renderToken(ais, tree, tok, .Newline); } else { diff --git a/lib/std/zig/tokenizer.zig b/lib/std/zig/tokenizer.zig index 8e5ecc7010..dd18025efb 100644 --- a/lib/std/zig/tokenizer.zig +++ b/lib/std/zig/tokenizer.zig @@ -16,58 +16,58 @@ pub const Token = struct { }; pub const keywords = std.ComptimeStringMap(Tag, .{ - .{ "align", .Keyword_align }, - .{ "allowzero", .Keyword_allowzero }, - .{ "and", .Keyword_and }, - .{ "anyframe", .Keyword_anyframe }, - .{ "anytype", .Keyword_anytype }, - .{ "asm", .Keyword_asm }, - .{ "async", .Keyword_async }, - .{ "await", .Keyword_await }, - .{ "break", .Keyword_break }, - .{ "callconv", .Keyword_callconv }, - .{ "catch", .Keyword_catch }, - .{ "comptime", .Keyword_comptime }, - .{ "const", .Keyword_const }, - .{ "continue", .Keyword_continue }, - .{ "defer", .Keyword_defer }, - .{ "else", .Keyword_else }, - .{ "enum", .Keyword_enum }, - .{ "errdefer", .Keyword_errdefer }, - .{ "error", .Keyword_error }, - .{ "export", .Keyword_export }, - .{ "extern", .Keyword_extern }, - .{ "false", .Keyword_false }, - .{ "fn", .Keyword_fn }, - .{ "for", .Keyword_for }, - .{ "if", .Keyword_if }, - .{ "inline", .Keyword_inline }, - .{ "noalias", .Keyword_noalias }, - .{ "noinline", .Keyword_noinline }, - .{ "nosuspend", .Keyword_nosuspend }, - .{ "null", .Keyword_null }, - .{ "opaque", .Keyword_opaque }, - .{ "or", .Keyword_or }, - .{ "orelse", .Keyword_orelse }, - .{ "packed", .Keyword_packed }, - .{ "pub", .Keyword_pub }, - .{ "resume", .Keyword_resume }, - .{ "return", .Keyword_return }, - .{ "linksection", .Keyword_linksection }, - .{ "struct", .Keyword_struct }, - .{ "suspend", .Keyword_suspend }, - .{ "switch", .Keyword_switch }, - .{ "test", .Keyword_test }, - .{ "threadlocal", .Keyword_threadlocal }, - .{ "true", .Keyword_true }, - .{ "try", .Keyword_try }, - .{ "undefined", .Keyword_undefined }, - .{ "union", .Keyword_union }, - .{ "unreachable", .Keyword_unreachable }, - .{ "usingnamespace", .Keyword_usingnamespace }, - .{ "var", .Keyword_var }, - .{ "volatile", .Keyword_volatile }, - .{ "while", .Keyword_while }, + .{ "align", .keyword_align }, + .{ "allowzero", .keyword_allowzero }, + .{ "and", .keyword_and }, + .{ "anyframe", .keyword_anyframe }, + .{ "anytype", .keyword_anytype }, + .{ "asm", .keyword_asm }, + .{ "async", .keyword_async }, + .{ "await", .keyword_await }, + .{ "break", .keyword_break }, + .{ "callconv", .keyword_callconv }, + .{ "catch", .keyword_catch }, + .{ "comptime", .keyword_comptime }, + .{ "const", .keyword_const }, + .{ "continue", .keyword_continue }, + .{ "defer", .keyword_defer }, + .{ "else", .keyword_else }, + .{ "enum", .keyword_enum }, + .{ "errdefer", .keyword_errdefer }, + .{ "error", .keyword_error }, + .{ "export", .keyword_export }, + .{ "extern", .keyword_extern }, + .{ "false", .keyword_false }, + .{ "fn", .keyword_fn }, + .{ "for", .keyword_for }, + .{ "if", .keyword_if }, + .{ "inline", .keyword_inline }, + .{ "noalias", .keyword_noalias }, + .{ "noinline", .keyword_noinline }, + .{ "nosuspend", .keyword_nosuspend }, + .{ "null", .keyword_null }, + .{ "opaque", .keyword_opaque }, + .{ "or", .keyword_or }, + .{ "orelse", .keyword_orelse }, + .{ "packed", .keyword_packed }, + .{ "pub", .keyword_pub }, + .{ "resume", .keyword_resume }, + .{ "return", .keyword_return }, + .{ "linksection", .keyword_linksection }, + .{ "struct", .keyword_struct }, + .{ "suspend", .keyword_suspend }, + .{ "switch", .keyword_switch }, + .{ "test", .keyword_test }, + .{ "threadlocal", .keyword_threadlocal }, + .{ "true", .keyword_true }, + .{ "try", .keyword_try }, + .{ "undefined", .keyword_undefined }, + .{ "union", .keyword_union }, + .{ "unreachable", .keyword_unreachable }, + .{ "usingnamespace", .keyword_usingnamespace }, + .{ "var", .keyword_var }, + .{ "volatile", .keyword_volatile }, + .{ "while", .keyword_while }, }); pub fn getKeyword(bytes: []const u8) ?Tag { @@ -75,249 +75,249 @@ pub const Token = struct { } pub const Tag = enum { - Invalid, - Invalid_ampersands, - Invalid_periodasterisks, - Identifier, - StringLiteral, - MultilineStringLiteralLine, - CharLiteral, - Eof, - Builtin, - Bang, - Pipe, - PipePipe, - PipeEqual, - Equal, - EqualEqual, - EqualAngleBracketRight, - BangEqual, - LParen, - RParen, - Semicolon, - Percent, - PercentEqual, - LBrace, - RBrace, - LBracket, - RBracket, - Period, - PeriodAsterisk, - Ellipsis2, - Ellipsis3, - Caret, - CaretEqual, - Plus, - PlusPlus, - PlusEqual, - PlusPercent, - PlusPercentEqual, - Minus, - MinusEqual, - MinusPercent, - MinusPercentEqual, - Asterisk, - AsteriskEqual, - AsteriskAsterisk, - AsteriskPercent, - AsteriskPercentEqual, - Arrow, - Colon, - Slash, - SlashEqual, - Comma, - Ampersand, - AmpersandEqual, - QuestionMark, - AngleBracketLeft, - AngleBracketLeftEqual, - AngleBracketAngleBracketLeft, - AngleBracketAngleBracketLeftEqual, - AngleBracketRight, - AngleBracketRightEqual, - AngleBracketAngleBracketRight, - AngleBracketAngleBracketRightEqual, - Tilde, - IntegerLiteral, - FloatLiteral, - DocComment, - ContainerDocComment, - Keyword_align, - Keyword_allowzero, - Keyword_and, - Keyword_anyframe, - Keyword_anytype, - Keyword_asm, - Keyword_async, - Keyword_await, - Keyword_break, - Keyword_callconv, - Keyword_catch, - Keyword_comptime, - Keyword_const, - Keyword_continue, - Keyword_defer, - Keyword_else, - Keyword_enum, - Keyword_errdefer, - Keyword_error, - Keyword_export, - Keyword_extern, - Keyword_false, - Keyword_fn, - Keyword_for, - Keyword_if, - Keyword_inline, - Keyword_noalias, - Keyword_noinline, - Keyword_nosuspend, - Keyword_null, - Keyword_opaque, - Keyword_or, - Keyword_orelse, - Keyword_packed, - Keyword_pub, - Keyword_resume, - Keyword_return, - Keyword_linksection, - Keyword_struct, - Keyword_suspend, - Keyword_switch, - Keyword_test, - Keyword_threadlocal, - Keyword_true, - Keyword_try, - Keyword_undefined, - Keyword_union, - Keyword_unreachable, - Keyword_usingnamespace, - Keyword_var, - Keyword_volatile, - Keyword_while, + invalid, + invalid_ampersands, + invalid_periodasterisks, + identifier, + string_literal, + multiline_string_literal_line, + char_literal, + eof, + builtin, + bang, + pipe, + pipe_pipe, + pipe_equal, + equal, + equal_equal, + equal_angle_bracket_right, + bang_equal, + l_paren, + r_paren, + semicolon, + percent, + percent_equal, + l_brace, + r_brace, + l_bracket, + r_bracket, + period, + period_asterisk, + ellipsis2, + ellipsis3, + caret, + caret_equal, + plus, + plus_plus, + plus_equal, + plus_percent, + plus_percent_equal, + minus, + minus_equal, + minus_percent, + minus_percent_equal, + asterisk, + asterisk_equal, + asterisk_asterisk, + asterisk_percent, + asterisk_percent_equal, + arrow, + colon, + slash, + slash_equal, + comma, + ampersand, + ampersand_equal, + question_mark, + angle_bracket_left, + angle_bracket_left_equal, + angle_bracket_angle_bracket_left, + angle_bracket_angle_bracket_left_equal, + angle_bracket_right, + angle_bracket_right_equal, + angle_bracket_angle_bracket_right, + angle_bracket_angle_bracket_right_equal, + tilde, + integer_literal, + float_literal, + doc_comment, + container_doc_comment, + keyword_align, + keyword_allowzero, + keyword_and, + keyword_anyframe, + keyword_anytype, + keyword_asm, + keyword_async, + keyword_await, + keyword_break, + keyword_callconv, + keyword_catch, + keyword_comptime, + keyword_const, + keyword_continue, + keyword_defer, + keyword_else, + keyword_enum, + keyword_errdefer, + keyword_error, + keyword_export, + keyword_extern, + keyword_false, + keyword_fn, + keyword_for, + keyword_if, + keyword_inline, + keyword_noalias, + keyword_noinline, + keyword_nosuspend, + keyword_null, + keyword_opaque, + keyword_or, + keyword_orelse, + keyword_packed, + keyword_pub, + keyword_resume, + keyword_return, + keyword_linksection, + keyword_struct, + keyword_suspend, + keyword_switch, + keyword_test, + keyword_threadlocal, + keyword_true, + keyword_try, + keyword_undefined, + keyword_union, + keyword_unreachable, + keyword_usingnamespace, + keyword_var, + keyword_volatile, + keyword_while, pub fn lexeme(tag: Tag) ?[]const u8 { return switch (tag) { - .Invalid, - .Identifier, - .StringLiteral, - .MultilineStringLiteralLine, - .CharLiteral, - .Eof, - .Builtin, - .IntegerLiteral, - .FloatLiteral, - .DocComment, - .ContainerDocComment, + .invalid, + .identifier, + .string_literal, + .multiline_string_literal_line, + .char_literal, + .eof, + .builtin, + .integer_literal, + .float_literal, + .doc_comment, + .container_doc_comment, => null, - .Invalid_ampersands => "&&", - .Invalid_periodasterisks => ".**", - .Bang => "!", - .Pipe => "|", - .PipePipe => "||", - .PipeEqual => "|=", - .Equal => "=", - .EqualEqual => "==", - .EqualAngleBracketRight => "=>", - .BangEqual => "!=", - .LParen => "(", - .RParen => ")", - .Semicolon => ";", - .Percent => "%", - .PercentEqual => "%=", - .LBrace => "{", - .RBrace => "}", - .LBracket => "[", - .RBracket => "]", - .Period => ".", - .PeriodAsterisk => ".*", - .Ellipsis2 => "..", - .Ellipsis3 => "...", - .Caret => "^", - .CaretEqual => "^=", - .Plus => "+", - .PlusPlus => "++", - .PlusEqual => "+=", - .PlusPercent => "+%", - .PlusPercentEqual => "+%=", - .Minus => "-", - .MinusEqual => "-=", - .MinusPercent => "-%", - .MinusPercentEqual => "-%=", - .Asterisk => "*", - .AsteriskEqual => "*=", - .AsteriskAsterisk => "**", - .AsteriskPercent => "*%", - .AsteriskPercentEqual => "*%=", - .Arrow => "->", - .Colon => ":", - .Slash => "/", - .SlashEqual => "/=", - .Comma => ",", - .Ampersand => "&", - .AmpersandEqual => "&=", - .QuestionMark => "?", - .AngleBracketLeft => "<", - .AngleBracketLeftEqual => "<=", - .AngleBracketAngleBracketLeft => "<<", - .AngleBracketAngleBracketLeftEqual => "<<=", - .AngleBracketRight => ">", - .AngleBracketRightEqual => ">=", - .AngleBracketAngleBracketRight => ">>", - .AngleBracketAngleBracketRightEqual => ">>=", - .Tilde => "~", - .Keyword_align => "align", - .Keyword_allowzero => "allowzero", - .Keyword_and => "and", - .Keyword_anyframe => "anyframe", - .Keyword_anytype => "anytype", - .Keyword_asm => "asm", - .Keyword_async => "async", - .Keyword_await => "await", - .Keyword_break => "break", - .Keyword_callconv => "callconv", - .Keyword_catch => "catch", - .Keyword_comptime => "comptime", - .Keyword_const => "const", - .Keyword_continue => "continue", - .Keyword_defer => "defer", - .Keyword_else => "else", - .Keyword_enum => "enum", - .Keyword_errdefer => "errdefer", - .Keyword_error => "error", - .Keyword_export => "export", - .Keyword_extern => "extern", - .Keyword_false => "false", - .Keyword_fn => "fn", - .Keyword_for => "for", - .Keyword_if => "if", - .Keyword_inline => "inline", - .Keyword_noalias => "noalias", - .Keyword_noinline => "noinline", - .Keyword_nosuspend => "nosuspend", - .Keyword_null => "null", - .Keyword_opaque => "opaque", - .Keyword_or => "or", - .Keyword_orelse => "orelse", - .Keyword_packed => "packed", - .Keyword_pub => "pub", - .Keyword_resume => "resume", - .Keyword_return => "return", - .Keyword_linksection => "linksection", - .Keyword_struct => "struct", - .Keyword_suspend => "suspend", - .Keyword_switch => "switch", - .Keyword_test => "test", - .Keyword_threadlocal => "threadlocal", - .Keyword_true => "true", - .Keyword_try => "try", - .Keyword_undefined => "undefined", - .Keyword_union => "union", - .Keyword_unreachable => "unreachable", - .Keyword_usingnamespace => "usingnamespace", - .Keyword_var => "var", - .Keyword_volatile => "volatile", - .Keyword_while => "while", + .invalid_ampersands => "&&", + .invalid_periodasterisks => ".**", + .bang => "!", + .pipe => "|", + .pipe_pipe => "||", + .pipe_equal => "|=", + .equal => "=", + .equal_equal => "==", + .equal_angle_bracket_right => "=>", + .bang_equal => "!=", + .l_paren => "(", + .r_paren => ")", + .semicolon => ";", + .percent => "%", + .percent_equal => "%=", + .l_brace => "{", + .r_brace => "}", + .l_bracket => "[", + .r_bracket => "]", + .period => ".", + .period_asterisk => ".*", + .ellipsis2 => "..", + .ellipsis3 => "...", + .caret => "^", + .caret_equal => "^=", + .plus => "+", + .plus_plus => "++", + .plus_equal => "+=", + .plus_percent => "+%", + .plus_percent_equal => "+%=", + .minus => "-", + .minus_equal => "-=", + .minus_percent => "-%", + .minus_percent_equal => "-%=", + .asterisk => "*", + .asterisk_equal => "*=", + .asterisk_asterisk => "**", + .asterisk_percent => "*%", + .asterisk_percent_equal => "*%=", + .arrow => "->", + .colon => ":", + .slash => "/", + .slash_equal => "/=", + .comma => ",", + .ampersand => "&", + .ampersand_equal => "&=", + .question_mark => "?", + .angle_bracket_left => "<", + .angle_bracket_left_equal => "<=", + .angle_bracket_angle_bracket_left => "<<", + .angle_bracket_angle_bracket_left_equal => "<<=", + .angle_bracket_right => ">", + .angle_bracket_right_equal => ">=", + .angle_bracket_angle_bracket_right => ">>", + .angle_bracket_angle_bracket_right_equal => ">>=", + .tilde => "~", + .keyword_align => "align", + .keyword_allowzero => "allowzero", + .keyword_and => "and", + .keyword_anyframe => "anyframe", + .keyword_anytype => "anytype", + .keyword_asm => "asm", + .keyword_async => "async", + .keyword_await => "await", + .keyword_break => "break", + .keyword_callconv => "callconv", + .keyword_catch => "catch", + .keyword_comptime => "comptime", + .keyword_const => "const", + .keyword_continue => "continue", + .keyword_defer => "defer", + .keyword_else => "else", + .keyword_enum => "enum", + .keyword_errdefer => "errdefer", + .keyword_error => "error", + .keyword_export => "export", + .keyword_extern => "extern", + .keyword_false => "false", + .keyword_fn => "fn", + .keyword_for => "for", + .keyword_if => "if", + .keyword_inline => "inline", + .keyword_noalias => "noalias", + .keyword_noinline => "noinline", + .keyword_nosuspend => "nosuspend", + .keyword_null => "null", + .keyword_opaque => "opaque", + .keyword_or => "or", + .keyword_orelse => "orelse", + .keyword_packed => "packed", + .keyword_pub => "pub", + .keyword_resume => "resume", + .keyword_return => "return", + .keyword_linksection => "linksection", + .keyword_struct => "struct", + .keyword_suspend => "suspend", + .keyword_switch => "switch", + .keyword_test => "test", + .keyword_threadlocal => "threadlocal", + .keyword_true => "true", + .keyword_try => "try", + .keyword_undefined => "undefined", + .keyword_union => "union", + .keyword_unreachable => "unreachable", + .keyword_usingnamespace => "usingnamespace", + .keyword_var => "var", + .keyword_volatile => "volatile", + .keyword_while => "while", }; } @@ -421,7 +421,7 @@ pub const Tokenizer = struct { const start_index = self.index; var state: State = .start; var result = Token{ - .tag = .Eof, + .tag = .eof, .loc = .{ .start = self.index, .end = undefined, @@ -438,14 +438,14 @@ pub const Tokenizer = struct { }, '"' => { state = .string_literal; - result.tag = .StringLiteral; + result.tag = .string_literal; }, '\'' => { state = .char_literal; }, 'a'...'z', 'A'...'Z', '_' => { state = .identifier; - result.tag = .Identifier; + result.tag = .identifier; }, '@' => { state = .saw_at_sign; @@ -460,42 +460,42 @@ pub const Tokenizer = struct { state = .pipe; }, '(' => { - result.tag = .LParen; + result.tag = .l_paren; self.index += 1; break; }, ')' => { - result.tag = .RParen; + result.tag = .r_paren; self.index += 1; break; }, '[' => { - result.tag = .LBracket; + result.tag = .l_bracket; self.index += 1; break; }, ']' => { - result.tag = .RBracket; + result.tag = .r_bracket; self.index += 1; break; }, ';' => { - result.tag = .Semicolon; + result.tag = .semicolon; self.index += 1; break; }, ',' => { - result.tag = .Comma; + result.tag = .comma; self.index += 1; break; }, '?' => { - result.tag = .QuestionMark; + result.tag = .question_mark; self.index += 1; break; }, ':' => { - result.tag = .Colon; + result.tag = .colon; self.index += 1; break; }, @@ -519,20 +519,20 @@ pub const Tokenizer = struct { }, '\\' => { state = .backslash; - result.tag = .MultilineStringLiteralLine; + result.tag = .multiline_string_literal_line; }, '{' => { - result.tag = .LBrace; + result.tag = .l_brace; self.index += 1; break; }, '}' => { - result.tag = .RBrace; + result.tag = .r_brace; self.index += 1; break; }, '~' => { - result.tag = .Tilde; + result.tag = .tilde; self.index += 1; break; }, @@ -550,14 +550,14 @@ pub const Tokenizer = struct { }, '0' => { state = .zero; - result.tag = .IntegerLiteral; + result.tag = .integer_literal; }, '1'...'9' => { state = .int_literal_dec; - result.tag = .IntegerLiteral; + result.tag = .integer_literal; }, else => { - result.tag = .Invalid; + result.tag = .invalid; self.index += 1; break; }, @@ -565,42 +565,42 @@ pub const Tokenizer = struct { .saw_at_sign => switch (c) { '"' => { - result.tag = .Identifier; + result.tag = .identifier; state = .string_literal; }, else => { // reinterpret as a builtin self.index -= 1; state = .builtin; - result.tag = .Builtin; + result.tag = .builtin; }, }, .ampersand => switch (c) { '&' => { - result.tag = .Invalid_ampersands; + result.tag = .invalid_ampersands; self.index += 1; break; }, '=' => { - result.tag = .AmpersandEqual; + result.tag = .ampersand_equal; self.index += 1; break; }, else => { - result.tag = .Ampersand; + result.tag = .ampersand; break; }, }, .asterisk => switch (c) { '=' => { - result.tag = .AsteriskEqual; + result.tag = .asterisk_equal; self.index += 1; break; }, '*' => { - result.tag = .AsteriskAsterisk; + result.tag = .asterisk_asterisk; self.index += 1; break; }, @@ -608,43 +608,43 @@ pub const Tokenizer = struct { state = .asterisk_percent; }, else => { - result.tag = .Asterisk; + result.tag = .asterisk; break; }, }, .asterisk_percent => switch (c) { '=' => { - result.tag = .AsteriskPercentEqual; + result.tag = .asterisk_percent_equal; self.index += 1; break; }, else => { - result.tag = .AsteriskPercent; + result.tag = .asterisk_percent; break; }, }, .percent => switch (c) { '=' => { - result.tag = .PercentEqual; + result.tag = .percent_equal; self.index += 1; break; }, else => { - result.tag = .Percent; + result.tag = .percent; break; }, }, .plus => switch (c) { '=' => { - result.tag = .PlusEqual; + result.tag = .plus_equal; self.index += 1; break; }, '+' => { - result.tag = .PlusPlus; + result.tag = .plus_plus; self.index += 1; break; }, @@ -652,31 +652,31 @@ pub const Tokenizer = struct { state = .plus_percent; }, else => { - result.tag = .Plus; + result.tag = .plus; break; }, }, .plus_percent => switch (c) { '=' => { - result.tag = .PlusPercentEqual; + result.tag = .plus_percent_equal; self.index += 1; break; }, else => { - result.tag = .PlusPercent; + result.tag = .plus_percent; break; }, }, .caret => switch (c) { '=' => { - result.tag = .CaretEqual; + result.tag = .caret_equal; self.index += 1; break; }, else => { - result.tag = .Caret; + result.tag = .caret; break; }, }, @@ -724,7 +724,7 @@ pub const Tokenizer = struct { state = .char_literal_backslash; }, '\'', 0x80...0xbf, 0xf8...0xff => { - result.tag = .Invalid; + result.tag = .invalid; break; }, 0xc0...0xdf => { // 110xxxxx @@ -746,7 +746,7 @@ pub const Tokenizer = struct { .char_literal_backslash => switch (c) { '\n' => { - result.tag = .Invalid; + result.tag = .invalid; break; }, 'x' => { @@ -769,7 +769,7 @@ pub const Tokenizer = struct { } }, else => { - result.tag = .Invalid; + result.tag = .invalid; break; }, }, @@ -780,7 +780,7 @@ pub const Tokenizer = struct { seen_escape_digits = 0; }, else => { - result.tag = .Invalid; + result.tag = .invalid; state = .char_literal_unicode_invalid; }, }, @@ -791,14 +791,14 @@ pub const Tokenizer = struct { }, '}' => { if (seen_escape_digits == 0) { - result.tag = .Invalid; + result.tag = .invalid; state = .char_literal_unicode_invalid; } else { state = .char_literal_end; } }, else => { - result.tag = .Invalid; + result.tag = .invalid; state = .char_literal_unicode_invalid; }, }, @@ -813,12 +813,12 @@ pub const Tokenizer = struct { .char_literal_end => switch (c) { '\'' => { - result.tag = .CharLiteral; + result.tag = .char_literal; self.index += 1; break; }, else => { - result.tag = .Invalid; + result.tag = .invalid; break; }, }, @@ -831,7 +831,7 @@ pub const Tokenizer = struct { } }, else => { - result.tag = .Invalid; + result.tag = .invalid; break; }, }, @@ -847,58 +847,58 @@ pub const Tokenizer = struct { .bang => switch (c) { '=' => { - result.tag = .BangEqual; + result.tag = .bang_equal; self.index += 1; break; }, else => { - result.tag = .Bang; + result.tag = .bang; break; }, }, .pipe => switch (c) { '=' => { - result.tag = .PipeEqual; + result.tag = .pipe_equal; self.index += 1; break; }, '|' => { - result.tag = .PipePipe; + result.tag = .pipe_pipe; self.index += 1; break; }, else => { - result.tag = .Pipe; + result.tag = .pipe; break; }, }, .equal => switch (c) { '=' => { - result.tag = .EqualEqual; + result.tag = .equal_equal; self.index += 1; break; }, '>' => { - result.tag = .EqualAngleBracketRight; + result.tag = .equal_angle_bracket_right; self.index += 1; break; }, else => { - result.tag = .Equal; + result.tag = .equal; break; }, }, .minus => switch (c) { '>' => { - result.tag = .Arrow; + result.tag = .arrow; self.index += 1; break; }, '=' => { - result.tag = .MinusEqual; + result.tag = .minus_equal; self.index += 1; break; }, @@ -906,19 +906,19 @@ pub const Tokenizer = struct { state = .minus_percent; }, else => { - result.tag = .Minus; + result.tag = .minus; break; }, }, .minus_percent => switch (c) { '=' => { - result.tag = .MinusPercentEqual; + result.tag = .minus_percent_equal; self.index += 1; break; }, else => { - result.tag = .MinusPercent; + result.tag = .minus_percent; break; }, }, @@ -928,24 +928,24 @@ pub const Tokenizer = struct { state = .angle_bracket_angle_bracket_left; }, '=' => { - result.tag = .AngleBracketLeftEqual; + result.tag = .angle_bracket_left_equal; self.index += 1; break; }, else => { - result.tag = .AngleBracketLeft; + result.tag = .angle_bracket_left; break; }, }, .angle_bracket_angle_bracket_left => switch (c) { '=' => { - result.tag = .AngleBracketAngleBracketLeftEqual; + result.tag = .angle_bracket_angle_bracket_left_equal; self.index += 1; break; }, else => { - result.tag = .AngleBracketAngleBracketLeft; + result.tag = .angle_bracket_angle_bracket_left; break; }, }, @@ -955,24 +955,24 @@ pub const Tokenizer = struct { state = .angle_bracket_angle_bracket_right; }, '=' => { - result.tag = .AngleBracketRightEqual; + result.tag = .angle_bracket_right_equal; self.index += 1; break; }, else => { - result.tag = .AngleBracketRight; + result.tag = .angle_bracket_right; break; }, }, .angle_bracket_angle_bracket_right => switch (c) { '=' => { - result.tag = .AngleBracketAngleBracketRightEqual; + result.tag = .angle_bracket_angle_bracket_right_equal; self.index += 1; break; }, else => { - result.tag = .AngleBracketAngleBracketRight; + result.tag = .angle_bracket_angle_bracket_right; break; }, }, @@ -985,30 +985,30 @@ pub const Tokenizer = struct { state = .period_asterisk; }, else => { - result.tag = .Period; + result.tag = .period; break; }, }, .period_2 => switch (c) { '.' => { - result.tag = .Ellipsis3; + result.tag = .ellipsis3; self.index += 1; break; }, else => { - result.tag = .Ellipsis2; + result.tag = .ellipsis2; break; }, }, .period_asterisk => switch (c) { '*' => { - result.tag = .Invalid_periodasterisks; + result.tag = .invalid_periodasterisks; break; }, else => { - result.tag = .PeriodAsterisk; + result.tag = .period_asterisk; break; }, }, @@ -1018,12 +1018,12 @@ pub const Tokenizer = struct { state = .line_comment_start; }, '=' => { - result.tag = .SlashEqual; + result.tag = .slash_equal; self.index += 1; break; }, else => { - result.tag = .Slash; + result.tag = .slash; break; }, }, @@ -1032,7 +1032,7 @@ pub const Tokenizer = struct { state = .doc_comment_start; }, '!' => { - result.tag = .ContainerDocComment; + result.tag = .container_doc_comment; state = .container_doc_comment; }, '\n' => { @@ -1050,16 +1050,16 @@ pub const Tokenizer = struct { state = .line_comment; }, '\n' => { - result.tag = .DocComment; + result.tag = .doc_comment; break; }, '\t', '\r' => { state = .doc_comment; - result.tag = .DocComment; + result.tag = .doc_comment; }, else => { state = .doc_comment; - result.tag = .DocComment; + result.tag = .doc_comment; self.checkLiteralCharacter(); }, }, @@ -1093,7 +1093,7 @@ pub const Tokenizer = struct { }, else => { if (isIdentifierChar(c)) { - result.tag = .Invalid; + result.tag = .invalid; } break; }, @@ -1103,7 +1103,7 @@ pub const Tokenizer = struct { state = .int_literal_bin; }, else => { - result.tag = .Invalid; + result.tag = .invalid; break; }, }, @@ -1114,7 +1114,7 @@ pub const Tokenizer = struct { '0'...'1' => {}, else => { if (isIdentifierChar(c)) { - result.tag = .Invalid; + result.tag = .invalid; } break; }, @@ -1124,7 +1124,7 @@ pub const Tokenizer = struct { state = .int_literal_oct; }, else => { - result.tag = .Invalid; + result.tag = .invalid; break; }, }, @@ -1135,7 +1135,7 @@ pub const Tokenizer = struct { '0'...'7' => {}, else => { if (isIdentifierChar(c)) { - result.tag = .Invalid; + result.tag = .invalid; } break; }, @@ -1145,7 +1145,7 @@ pub const Tokenizer = struct { state = .int_literal_dec; }, else => { - result.tag = .Invalid; + result.tag = .invalid; break; }, }, @@ -1155,16 +1155,16 @@ pub const Tokenizer = struct { }, '.' => { state = .num_dot_dec; - result.tag = .FloatLiteral; + result.tag = .float_literal; }, 'e', 'E' => { state = .float_exponent_unsigned; - result.tag = .FloatLiteral; + result.tag = .float_literal; }, '0'...'9' => {}, else => { if (isIdentifierChar(c)) { - result.tag = .Invalid; + result.tag = .invalid; } break; }, @@ -1174,7 +1174,7 @@ pub const Tokenizer = struct { state = .int_literal_hex; }, else => { - result.tag = .Invalid; + result.tag = .invalid; break; }, }, @@ -1184,23 +1184,23 @@ pub const Tokenizer = struct { }, '.' => { state = .num_dot_hex; - result.tag = .FloatLiteral; + result.tag = .float_literal; }, 'p', 'P' => { state = .float_exponent_unsigned; - result.tag = .FloatLiteral; + result.tag = .float_literal; }, '0'...'9', 'a'...'f', 'A'...'F' => {}, else => { if (isIdentifierChar(c)) { - result.tag = .Invalid; + result.tag = .invalid; } break; }, }, .num_dot_dec => switch (c) { '.' => { - result.tag = .IntegerLiteral; + result.tag = .integer_literal; self.index -= 1; state = .start; break; @@ -1213,14 +1213,14 @@ pub const Tokenizer = struct { }, else => { if (isIdentifierChar(c)) { - result.tag = .Invalid; + result.tag = .invalid; } break; }, }, .num_dot_hex => switch (c) { '.' => { - result.tag = .IntegerLiteral; + result.tag = .integer_literal; self.index -= 1; state = .start; break; @@ -1229,12 +1229,12 @@ pub const Tokenizer = struct { state = .float_exponent_unsigned; }, '0'...'9', 'a'...'f', 'A'...'F' => { - result.tag = .FloatLiteral; + result.tag = .float_literal; state = .float_fraction_hex; }, else => { if (isIdentifierChar(c)) { - result.tag = .Invalid; + result.tag = .invalid; } break; }, @@ -1244,7 +1244,7 @@ pub const Tokenizer = struct { state = .float_fraction_dec; }, else => { - result.tag = .Invalid; + result.tag = .invalid; break; }, }, @@ -1258,7 +1258,7 @@ pub const Tokenizer = struct { '0'...'9' => {}, else => { if (isIdentifierChar(c)) { - result.tag = .Invalid; + result.tag = .invalid; } break; }, @@ -1268,7 +1268,7 @@ pub const Tokenizer = struct { state = .float_fraction_hex; }, else => { - result.tag = .Invalid; + result.tag = .invalid; break; }, }, @@ -1282,7 +1282,7 @@ pub const Tokenizer = struct { '0'...'9', 'a'...'f', 'A'...'F' => {}, else => { if (isIdentifierChar(c)) { - result.tag = .Invalid; + result.tag = .invalid; } break; }, @@ -1302,7 +1302,7 @@ pub const Tokenizer = struct { state = .float_exponent_num; }, else => { - result.tag = .Invalid; + result.tag = .invalid; break; }, }, @@ -1313,7 +1313,7 @@ pub const Tokenizer = struct { '0'...'9' => {}, else => { if (isIdentifierChar(c)) { - result.tag = .Invalid; + result.tag = .invalid; } break; }, @@ -1344,10 +1344,10 @@ pub const Tokenizer = struct { } }, .doc_comment, .doc_comment_start => { - result.tag = .DocComment; + result.tag = .doc_comment; }, .container_doc_comment => { - result.tag = .ContainerDocComment; + result.tag = .container_doc_comment; }, .int_literal_dec_no_underscore, @@ -1370,76 +1370,76 @@ pub const Tokenizer = struct { .char_literal_unicode, .string_literal_backslash, => { - result.tag = .Invalid; + result.tag = .invalid; }, .equal => { - result.tag = .Equal; + result.tag = .equal; }, .bang => { - result.tag = .Bang; + result.tag = .bang; }, .minus => { - result.tag = .Minus; + result.tag = .minus; }, .slash => { - result.tag = .Slash; + result.tag = .slash; }, .zero => { - result.tag = .IntegerLiteral; + result.tag = .integer_literal; }, .ampersand => { - result.tag = .Ampersand; + result.tag = .ampersand; }, .period => { - result.tag = .Period; + result.tag = .period; }, .period_2 => { - result.tag = .Ellipsis2; + result.tag = .ellipsis2; }, .period_asterisk => { - result.tag = .PeriodAsterisk; + result.tag = .period_asterisk; }, .pipe => { - result.tag = .Pipe; + result.tag = .pipe; }, .angle_bracket_angle_bracket_right => { - result.tag = .AngleBracketAngleBracketRight; + result.tag = .angle_bracket_angle_bracket_right; }, .angle_bracket_right => { - result.tag = .AngleBracketRight; + result.tag = .angle_bracket_right; }, .angle_bracket_angle_bracket_left => { - result.tag = .AngleBracketAngleBracketLeft; + result.tag = .angle_bracket_angle_bracket_left; }, .angle_bracket_left => { - result.tag = .AngleBracketLeft; + result.tag = .angle_bracket_left; }, .plus_percent => { - result.tag = .PlusPercent; + result.tag = .plus_percent; }, .plus => { - result.tag = .Plus; + result.tag = .plus; }, .percent => { - result.tag = .Percent; + result.tag = .percent; }, .caret => { - result.tag = .Caret; + result.tag = .caret; }, .asterisk_percent => { - result.tag = .AsteriskPercent; + result.tag = .asterisk_percent; }, .asterisk => { - result.tag = .Asterisk; + result.tag = .asterisk; }, .minus_percent => { - result.tag = .MinusPercent; + result.tag = .minus_percent; }, } } - if (result.tag == .Eof) { + if (result.tag == .eof) { if (self.pending_invalid_token) |token| { self.pending_invalid_token = null; return token; @@ -1455,7 +1455,7 @@ pub const Tokenizer = struct { const invalid_length = self.getInvalidCharacterLength(); if (invalid_length == 0) return; self.pending_invalid_token = .{ - .tag = .Invalid, + .tag = .invalid, .loc = .{ .start = self.index, .end = self.index + invalid_length, @@ -1502,7 +1502,7 @@ pub const Tokenizer = struct { }; test "tokenizer" { - testTokenize("test", &[_]Token.Tag{.Keyword_test}); + testTokenize("test", &.{.keyword_test}); } test "line comment followed by top-level comptime" { @@ -1510,10 +1510,10 @@ test "line comment followed by top-level comptime" { \\// line comment \\comptime {} \\ - , &[_]Token.Tag{ - .Keyword_comptime, - .LBrace, - .RBrace, + , &.{ + .keyword_comptime, + .l_brace, + .r_brace, }); } @@ -1521,199 +1521,199 @@ test "tokenizer - unknown length pointer and then c pointer" { testTokenize( \\[*]u8 \\[*c]u8 - , &[_]Token.Tag{ - .LBracket, - .Asterisk, - .RBracket, - .Identifier, - .LBracket, - .Asterisk, - .Identifier, - .RBracket, - .Identifier, + , &.{ + .l_bracket, + .asterisk, + .r_bracket, + .identifier, + .l_bracket, + .asterisk, + .identifier, + .r_bracket, + .identifier, }); } test "tokenizer - char literal with hex escape" { testTokenize( \\'\x1b' - , &[_]Token.Tag{.CharLiteral}); + , &.{.char_literal}); testTokenize( \\'\x1' - , &[_]Token.Tag{ .Invalid, .Invalid }); + , &.{ .invalid, .invalid }); } test "tokenizer - char literal with unicode escapes" { // Valid unicode escapes testTokenize( \\'\u{3}' - , &[_]Token.Tag{.CharLiteral}); + , &.{.char_literal}); testTokenize( \\'\u{01}' - , &[_]Token.Tag{.CharLiteral}); + , &.{.char_literal}); testTokenize( \\'\u{2a}' - , &[_]Token.Tag{.CharLiteral}); + , &.{.char_literal}); testTokenize( \\'\u{3f9}' - , &[_]Token.Tag{.CharLiteral}); + , &.{.char_literal}); testTokenize( \\'\u{6E09aBc1523}' - , &[_]Token.Tag{.CharLiteral}); + , &.{.char_literal}); testTokenize( \\"\u{440}" - , &[_]Token.Tag{.StringLiteral}); + , &.{.string_literal}); // Invalid unicode escapes testTokenize( \\'\u' - , &[_]Token.Tag{.Invalid}); + , &.{.invalid}); testTokenize( \\'\u{{' - , &[_]Token.Tag{ .Invalid, .Invalid }); + , &.{ .invalid, .invalid }); testTokenize( \\'\u{}' - , &[_]Token.Tag{ .Invalid, .Invalid }); + , &.{ .invalid, .invalid }); testTokenize( \\'\u{s}' - , &[_]Token.Tag{ .Invalid, .Invalid }); + , &.{ .invalid, .invalid }); testTokenize( \\'\u{2z}' - , &[_]Token.Tag{ .Invalid, .Invalid }); + , &.{ .invalid, .invalid }); testTokenize( \\'\u{4a' - , &[_]Token.Tag{.Invalid}); + , &.{.invalid}); // Test old-style unicode literals testTokenize( \\'\u0333' - , &[_]Token.Tag{ .Invalid, .Invalid }); + , &.{ .invalid, .invalid }); testTokenize( \\'\U0333' - , &[_]Token.Tag{ .Invalid, .IntegerLiteral, .Invalid }); + , &.{ .invalid, .integer_literal, .invalid }); } test "tokenizer - char literal with unicode code point" { testTokenize( \\'💩' - , &[_]Token.Tag{.CharLiteral}); + , &.{.char_literal}); } test "tokenizer - float literal e exponent" { - testTokenize("a = 4.94065645841246544177e-324;\n", &[_]Token.Tag{ - .Identifier, - .Equal, - .FloatLiteral, - .Semicolon, + testTokenize("a = 4.94065645841246544177e-324;\n", &.{ + .identifier, + .equal, + .float_literal, + .semicolon, }); } test "tokenizer - float literal p exponent" { - testTokenize("a = 0x1.a827999fcef32p+1022;\n", &[_]Token.Tag{ - .Identifier, - .Equal, - .FloatLiteral, - .Semicolon, + testTokenize("a = 0x1.a827999fcef32p+1022;\n", &.{ + .identifier, + .equal, + .float_literal, + .semicolon, }); } test "tokenizer - chars" { - testTokenize("'c'", &[_]Token.Tag{.CharLiteral}); + testTokenize("'c'", &.{.char_literal}); } test "tokenizer - invalid token characters" { - testTokenize("#", &[_]Token.Tag{.Invalid}); - testTokenize("`", &[_]Token.Tag{.Invalid}); - testTokenize("'c", &[_]Token.Tag{.Invalid}); - testTokenize("'", &[_]Token.Tag{.Invalid}); - testTokenize("''", &[_]Token.Tag{ .Invalid, .Invalid }); + testTokenize("#", &.{.invalid}); + testTokenize("`", &.{.invalid}); + testTokenize("'c", &.{.invalid}); + testTokenize("'", &.{.invalid}); + testTokenize("''", &.{ .invalid, .invalid }); } test "tokenizer - invalid literal/comment characters" { - testTokenize("\"\x00\"", &[_]Token.Tag{ - .StringLiteral, - .Invalid, + testTokenize("\"\x00\"", &.{ + .string_literal, + .invalid, }); - testTokenize("//\x00", &[_]Token.Tag{ - .Invalid, + testTokenize("//\x00", &.{ + .invalid, }); - testTokenize("//\x1f", &[_]Token.Tag{ - .Invalid, + testTokenize("//\x1f", &.{ + .invalid, }); - testTokenize("//\x7f", &[_]Token.Tag{ - .Invalid, + testTokenize("//\x7f", &.{ + .invalid, }); } test "tokenizer - utf8" { - testTokenize("//\xc2\x80", &[_]Token.Tag{}); - testTokenize("//\xf4\x8f\xbf\xbf", &[_]Token.Tag{}); + testTokenize("//\xc2\x80", &.{}); + testTokenize("//\xf4\x8f\xbf\xbf", &.{}); } test "tokenizer - invalid utf8" { - testTokenize("//\x80", &[_]Token.Tag{ - .Invalid, + testTokenize("//\x80", &.{ + .invalid, }); - testTokenize("//\xbf", &[_]Token.Tag{ - .Invalid, + testTokenize("//\xbf", &.{ + .invalid, }); - testTokenize("//\xf8", &[_]Token.Tag{ - .Invalid, + testTokenize("//\xf8", &.{ + .invalid, }); - testTokenize("//\xff", &[_]Token.Tag{ - .Invalid, + testTokenize("//\xff", &.{ + .invalid, }); - testTokenize("//\xc2\xc0", &[_]Token.Tag{ - .Invalid, + testTokenize("//\xc2\xc0", &.{ + .invalid, }); - testTokenize("//\xe0", &[_]Token.Tag{ - .Invalid, + testTokenize("//\xe0", &.{ + .invalid, }); - testTokenize("//\xf0", &[_]Token.Tag{ - .Invalid, + testTokenize("//\xf0", &.{ + .invalid, }); - testTokenize("//\xf0\x90\x80\xc0", &[_]Token.Tag{ - .Invalid, + testTokenize("//\xf0\x90\x80\xc0", &.{ + .invalid, }); } test "tokenizer - illegal unicode codepoints" { // unicode newline characters.U+0085, U+2028, U+2029 - testTokenize("//\xc2\x84", &[_]Token.Tag{}); - testTokenize("//\xc2\x85", &[_]Token.Tag{ - .Invalid, + testTokenize("//\xc2\x84", &.{}); + testTokenize("//\xc2\x85", &.{ + .invalid, }); - testTokenize("//\xc2\x86", &[_]Token.Tag{}); - testTokenize("//\xe2\x80\xa7", &[_]Token.Tag{}); - testTokenize("//\xe2\x80\xa8", &[_]Token.Tag{ - .Invalid, + testTokenize("//\xc2\x86", &.{}); + testTokenize("//\xe2\x80\xa7", &.{}); + testTokenize("//\xe2\x80\xa8", &.{ + .invalid, }); - testTokenize("//\xe2\x80\xa9", &[_]Token.Tag{ - .Invalid, + testTokenize("//\xe2\x80\xa9", &.{ + .invalid, }); - testTokenize("//\xe2\x80\xaa", &[_]Token.Tag{}); + testTokenize("//\xe2\x80\xaa", &.{}); } test "tokenizer - string identifier and builtin fns" { testTokenize( \\const @"if" = @import("std"); - , &[_]Token.Tag{ - .Keyword_const, - .Identifier, - .Equal, - .Builtin, - .LParen, - .StringLiteral, - .RParen, - .Semicolon, + , &.{ + .keyword_const, + .identifier, + .equal, + .builtin, + .l_paren, + .string_literal, + .r_paren, + .semicolon, }); } test "tokenizer - multiline string literal with literal tab" { testTokenize( \\\\foo bar - , &[_]Token.Tag{ - .MultilineStringLiteralLine, + , &.{ + .multiline_string_literal_line, }); } @@ -1725,30 +1725,30 @@ test "tokenizer - comments with literal tab" { \\// foo \\/// foo \\/// /foo - , &[_]Token.Tag{ - .ContainerDocComment, - .DocComment, - .DocComment, - .DocComment, + , &.{ + .container_doc_comment, + .doc_comment, + .doc_comment, + .doc_comment, }); } test "tokenizer - pipe and then invalid" { - testTokenize("||=", &[_]Token.Tag{ - .PipePipe, - .Equal, + testTokenize("||=", &.{ + .pipe_pipe, + .equal, }); } test "tokenizer - line comment and doc comment" { - testTokenize("//", &[_]Token.Tag{}); - testTokenize("// a / b", &[_]Token.Tag{}); - testTokenize("// /", &[_]Token.Tag{}); - testTokenize("/// a", &[_]Token.Tag{.DocComment}); - testTokenize("///", &[_]Token.Tag{.DocComment}); - testTokenize("////", &[_]Token.Tag{}); - testTokenize("//!", &[_]Token.Tag{.ContainerDocComment}); - testTokenize("//!!", &[_]Token.Tag{.ContainerDocComment}); + testTokenize("//", &.{}); + testTokenize("// a / b", &.{}); + testTokenize("// /", &.{}); + testTokenize("/// a", &.{.doc_comment}); + testTokenize("///", &.{.doc_comment}); + testTokenize("////", &.{}); + testTokenize("//!", &.{.container_doc_comment}); + testTokenize("//!!", &.{.container_doc_comment}); } test "tokenizer - line comment followed by identifier" { @@ -1756,293 +1756,293 @@ test "tokenizer - line comment followed by identifier" { \\ Unexpected, \\ // another \\ Another, - , &[_]Token.Tag{ - .Identifier, - .Comma, - .Identifier, - .Comma, + , &.{ + .identifier, + .comma, + .identifier, + .comma, }); } test "tokenizer - UTF-8 BOM is recognized and skipped" { - testTokenize("\xEF\xBB\xBFa;\n", &[_]Token.Tag{ - .Identifier, - .Semicolon, + testTokenize("\xEF\xBB\xBFa;\n", &.{ + .identifier, + .semicolon, }); } test "correctly parse pointer assignment" { - testTokenize("b.*=3;\n", &[_]Token.Tag{ - .Identifier, - .PeriodAsterisk, - .Equal, - .IntegerLiteral, - .Semicolon, + testTokenize("b.*=3;\n", &.{ + .identifier, + .period_asterisk, + .equal, + .integer_literal, + .semicolon, }); } test "correctly parse pointer dereference followed by asterisk" { - testTokenize("\"b\".* ** 10", &[_]Token.Tag{ - .StringLiteral, - .PeriodAsterisk, - .AsteriskAsterisk, - .IntegerLiteral, + testTokenize("\"b\".* ** 10", &.{ + .string_literal, + .period_asterisk, + .asterisk_asterisk, + .integer_literal, }); - testTokenize("(\"b\".*)** 10", &[_]Token.Tag{ - .LParen, - .StringLiteral, - .PeriodAsterisk, - .RParen, - .AsteriskAsterisk, - .IntegerLiteral, + testTokenize("(\"b\".*)** 10", &.{ + .l_paren, + .string_literal, + .period_asterisk, + .r_paren, + .asterisk_asterisk, + .integer_literal, }); - testTokenize("\"b\".*** 10", &[_]Token.Tag{ - .StringLiteral, - .Invalid_periodasterisks, - .AsteriskAsterisk, - .IntegerLiteral, + testTokenize("\"b\".*** 10", &.{ + .string_literal, + .invalid_periodasterisks, + .asterisk_asterisk, + .integer_literal, }); } test "tokenizer - range literals" { - testTokenize("0...9", &[_]Token.Tag{ .IntegerLiteral, .Ellipsis3, .IntegerLiteral }); - testTokenize("'0'...'9'", &[_]Token.Tag{ .CharLiteral, .Ellipsis3, .CharLiteral }); - testTokenize("0x00...0x09", &[_]Token.Tag{ .IntegerLiteral, .Ellipsis3, .IntegerLiteral }); - testTokenize("0b00...0b11", &[_]Token.Tag{ .IntegerLiteral, .Ellipsis3, .IntegerLiteral }); - testTokenize("0o00...0o11", &[_]Token.Tag{ .IntegerLiteral, .Ellipsis3, .IntegerLiteral }); + testTokenize("0...9", &.{ .integer_literal, .ellipsis3, .integer_literal }); + testTokenize("'0'...'9'", &.{ .char_literal, .ellipsis3, .char_literal }); + testTokenize("0x00...0x09", &.{ .integer_literal, .ellipsis3, .integer_literal }); + testTokenize("0b00...0b11", &.{ .integer_literal, .ellipsis3, .integer_literal }); + testTokenize("0o00...0o11", &.{ .integer_literal, .ellipsis3, .integer_literal }); } test "tokenizer - number literals decimal" { - testTokenize("0", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("1", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("2", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("3", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("4", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("5", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("6", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("7", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("8", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("9", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("1..", &[_]Token.Tag{ .IntegerLiteral, .Ellipsis2 }); - testTokenize("0a", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("9b", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("1z", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("1z_1", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("9z3", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0", &.{.integer_literal}); + testTokenize("1", &.{.integer_literal}); + testTokenize("2", &.{.integer_literal}); + testTokenize("3", &.{.integer_literal}); + testTokenize("4", &.{.integer_literal}); + testTokenize("5", &.{.integer_literal}); + testTokenize("6", &.{.integer_literal}); + testTokenize("7", &.{.integer_literal}); + testTokenize("8", &.{.integer_literal}); + testTokenize("9", &.{.integer_literal}); + testTokenize("1..", &.{ .integer_literal, .ellipsis2 }); + testTokenize("0a", &.{ .invalid, .identifier }); + testTokenize("9b", &.{ .invalid, .identifier }); + testTokenize("1z", &.{ .invalid, .identifier }); + testTokenize("1z_1", &.{ .invalid, .identifier }); + testTokenize("9z3", &.{ .invalid, .identifier }); - testTokenize("0_0", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0001", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("01234567890", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("012_345_6789_0", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0_1_2_3_4_5_6_7_8_9_0", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0_0", &.{.integer_literal}); + testTokenize("0001", &.{.integer_literal}); + testTokenize("01234567890", &.{.integer_literal}); + testTokenize("012_345_6789_0", &.{.integer_literal}); + testTokenize("0_1_2_3_4_5_6_7_8_9_0", &.{.integer_literal}); - testTokenize("00_", &[_]Token.Tag{.Invalid}); - testTokenize("0_0_", &[_]Token.Tag{.Invalid}); - testTokenize("0__0", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0_0f", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0_0_f", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0_0_f_00", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("1_,", &[_]Token.Tag{ .Invalid, .Comma }); + testTokenize("00_", &.{.invalid}); + testTokenize("0_0_", &.{.invalid}); + testTokenize("0__0", &.{ .invalid, .identifier }); + testTokenize("0_0f", &.{ .invalid, .identifier }); + testTokenize("0_0_f", &.{ .invalid, .identifier }); + testTokenize("0_0_f_00", &.{ .invalid, .identifier }); + testTokenize("1_,", &.{ .invalid, .comma }); - testTokenize("1.", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0.0", &[_]Token.Tag{.FloatLiteral}); - testTokenize("1.0", &[_]Token.Tag{.FloatLiteral}); - testTokenize("10.0", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0e0", &[_]Token.Tag{.FloatLiteral}); - testTokenize("1e0", &[_]Token.Tag{.FloatLiteral}); - testTokenize("1e100", &[_]Token.Tag{.FloatLiteral}); - testTokenize("1.e100", &[_]Token.Tag{.FloatLiteral}); - testTokenize("1.0e100", &[_]Token.Tag{.FloatLiteral}); - testTokenize("1.0e+100", &[_]Token.Tag{.FloatLiteral}); - testTokenize("1.0e-100", &[_]Token.Tag{.FloatLiteral}); - testTokenize("1_0_0_0.0_0_0_0_0_1e1_0_0_0", &[_]Token.Tag{.FloatLiteral}); - testTokenize("1.+", &[_]Token.Tag{ .FloatLiteral, .Plus }); + testTokenize("1.", &.{.float_literal}); + testTokenize("0.0", &.{.float_literal}); + testTokenize("1.0", &.{.float_literal}); + testTokenize("10.0", &.{.float_literal}); + testTokenize("0e0", &.{.float_literal}); + testTokenize("1e0", &.{.float_literal}); + testTokenize("1e100", &.{.float_literal}); + testTokenize("1.e100", &.{.float_literal}); + testTokenize("1.0e100", &.{.float_literal}); + testTokenize("1.0e+100", &.{.float_literal}); + testTokenize("1.0e-100", &.{.float_literal}); + testTokenize("1_0_0_0.0_0_0_0_0_1e1_0_0_0", &.{.float_literal}); + testTokenize("1.+", &.{ .float_literal, .plus }); - testTokenize("1e", &[_]Token.Tag{.Invalid}); - testTokenize("1.0e1f0", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("1.0p100", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("1.0p-100", &[_]Token.Tag{ .Invalid, .Identifier, .Minus, .IntegerLiteral }); - testTokenize("1.0p1f0", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("1.0_,", &[_]Token.Tag{ .Invalid, .Comma }); - testTokenize("1_.0", &[_]Token.Tag{ .Invalid, .Period, .IntegerLiteral }); - testTokenize("1._", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("1.a", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("1.z", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("1._0", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("1._+", &[_]Token.Tag{ .Invalid, .Identifier, .Plus }); - testTokenize("1._e", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("1.0e", &[_]Token.Tag{.Invalid}); - testTokenize("1.0e,", &[_]Token.Tag{ .Invalid, .Comma }); - testTokenize("1.0e_", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("1.0e+_", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("1.0e-_", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("1.0e0_+", &[_]Token.Tag{ .Invalid, .Plus }); + testTokenize("1e", &.{.invalid}); + testTokenize("1.0e1f0", &.{ .invalid, .identifier }); + testTokenize("1.0p100", &.{ .invalid, .identifier }); + testTokenize("1.0p-100", &.{ .invalid, .identifier, .minus, .integer_literal }); + testTokenize("1.0p1f0", &.{ .invalid, .identifier }); + testTokenize("1.0_,", &.{ .invalid, .comma }); + testTokenize("1_.0", &.{ .invalid, .period, .integer_literal }); + testTokenize("1._", &.{ .invalid, .identifier }); + testTokenize("1.a", &.{ .invalid, .identifier }); + testTokenize("1.z", &.{ .invalid, .identifier }); + testTokenize("1._0", &.{ .invalid, .identifier }); + testTokenize("1._+", &.{ .invalid, .identifier, .plus }); + testTokenize("1._e", &.{ .invalid, .identifier }); + testTokenize("1.0e", &.{.invalid}); + testTokenize("1.0e,", &.{ .invalid, .comma }); + testTokenize("1.0e_", &.{ .invalid, .identifier }); + testTokenize("1.0e+_", &.{ .invalid, .identifier }); + testTokenize("1.0e-_", &.{ .invalid, .identifier }); + testTokenize("1.0e0_+", &.{ .invalid, .plus }); } test "tokenizer - number literals binary" { - testTokenize("0b0", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0b1", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0b2", &[_]Token.Tag{ .Invalid, .IntegerLiteral }); - testTokenize("0b3", &[_]Token.Tag{ .Invalid, .IntegerLiteral }); - testTokenize("0b4", &[_]Token.Tag{ .Invalid, .IntegerLiteral }); - testTokenize("0b5", &[_]Token.Tag{ .Invalid, .IntegerLiteral }); - testTokenize("0b6", &[_]Token.Tag{ .Invalid, .IntegerLiteral }); - testTokenize("0b7", &[_]Token.Tag{ .Invalid, .IntegerLiteral }); - testTokenize("0b8", &[_]Token.Tag{ .Invalid, .IntegerLiteral }); - testTokenize("0b9", &[_]Token.Tag{ .Invalid, .IntegerLiteral }); - testTokenize("0ba", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0bb", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0bc", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0bd", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0be", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0bf", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0bz", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0b0", &.{.integer_literal}); + testTokenize("0b1", &.{.integer_literal}); + testTokenize("0b2", &.{ .invalid, .integer_literal }); + testTokenize("0b3", &.{ .invalid, .integer_literal }); + testTokenize("0b4", &.{ .invalid, .integer_literal }); + testTokenize("0b5", &.{ .invalid, .integer_literal }); + testTokenize("0b6", &.{ .invalid, .integer_literal }); + testTokenize("0b7", &.{ .invalid, .integer_literal }); + testTokenize("0b8", &.{ .invalid, .integer_literal }); + testTokenize("0b9", &.{ .invalid, .integer_literal }); + testTokenize("0ba", &.{ .invalid, .identifier }); + testTokenize("0bb", &.{ .invalid, .identifier }); + testTokenize("0bc", &.{ .invalid, .identifier }); + testTokenize("0bd", &.{ .invalid, .identifier }); + testTokenize("0be", &.{ .invalid, .identifier }); + testTokenize("0bf", &.{ .invalid, .identifier }); + testTokenize("0bz", &.{ .invalid, .identifier }); - testTokenize("0b0000_0000", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0b1111_1111", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0b10_10_10_10", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0b0_1_0_1_0_1_0_1", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0b1.", &[_]Token.Tag{ .IntegerLiteral, .Period }); - testTokenize("0b1.0", &[_]Token.Tag{ .IntegerLiteral, .Period, .IntegerLiteral }); + testTokenize("0b0000_0000", &.{.integer_literal}); + testTokenize("0b1111_1111", &.{.integer_literal}); + testTokenize("0b10_10_10_10", &.{.integer_literal}); + testTokenize("0b0_1_0_1_0_1_0_1", &.{.integer_literal}); + testTokenize("0b1.", &.{ .integer_literal, .period }); + testTokenize("0b1.0", &.{ .integer_literal, .period, .integer_literal }); - testTokenize("0B0", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0b_", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0b_0", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0b1_", &[_]Token.Tag{.Invalid}); - testTokenize("0b0__1", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0b0_1_", &[_]Token.Tag{.Invalid}); - testTokenize("0b1e", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0b1p", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0b1e0", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0b1p0", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0b1_,", &[_]Token.Tag{ .Invalid, .Comma }); + testTokenize("0B0", &.{ .invalid, .identifier }); + testTokenize("0b_", &.{ .invalid, .identifier }); + testTokenize("0b_0", &.{ .invalid, .identifier }); + testTokenize("0b1_", &.{.invalid}); + testTokenize("0b0__1", &.{ .invalid, .identifier }); + testTokenize("0b0_1_", &.{.invalid}); + testTokenize("0b1e", &.{ .invalid, .identifier }); + testTokenize("0b1p", &.{ .invalid, .identifier }); + testTokenize("0b1e0", &.{ .invalid, .identifier }); + testTokenize("0b1p0", &.{ .invalid, .identifier }); + testTokenize("0b1_,", &.{ .invalid, .comma }); } test "tokenizer - number literals octal" { - testTokenize("0o0", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0o1", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0o2", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0o3", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0o4", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0o5", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0o6", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0o7", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0o8", &[_]Token.Tag{ .Invalid, .IntegerLiteral }); - testTokenize("0o9", &[_]Token.Tag{ .Invalid, .IntegerLiteral }); - testTokenize("0oa", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0ob", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0oc", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0od", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0oe", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0of", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0oz", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0o0", &.{.integer_literal}); + testTokenize("0o1", &.{.integer_literal}); + testTokenize("0o2", &.{.integer_literal}); + testTokenize("0o3", &.{.integer_literal}); + testTokenize("0o4", &.{.integer_literal}); + testTokenize("0o5", &.{.integer_literal}); + testTokenize("0o6", &.{.integer_literal}); + testTokenize("0o7", &.{.integer_literal}); + testTokenize("0o8", &.{ .invalid, .integer_literal }); + testTokenize("0o9", &.{ .invalid, .integer_literal }); + testTokenize("0oa", &.{ .invalid, .identifier }); + testTokenize("0ob", &.{ .invalid, .identifier }); + testTokenize("0oc", &.{ .invalid, .identifier }); + testTokenize("0od", &.{ .invalid, .identifier }); + testTokenize("0oe", &.{ .invalid, .identifier }); + testTokenize("0of", &.{ .invalid, .identifier }); + testTokenize("0oz", &.{ .invalid, .identifier }); - testTokenize("0o01234567", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0o0123_4567", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0o01_23_45_67", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0o0_1_2_3_4_5_6_7", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0o7.", &[_]Token.Tag{ .IntegerLiteral, .Period }); - testTokenize("0o7.0", &[_]Token.Tag{ .IntegerLiteral, .Period, .IntegerLiteral }); + testTokenize("0o01234567", &.{.integer_literal}); + testTokenize("0o0123_4567", &.{.integer_literal}); + testTokenize("0o01_23_45_67", &.{.integer_literal}); + testTokenize("0o0_1_2_3_4_5_6_7", &.{.integer_literal}); + testTokenize("0o7.", &.{ .integer_literal, .period }); + testTokenize("0o7.0", &.{ .integer_literal, .period, .integer_literal }); - testTokenize("0O0", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0o_", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0o_0", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0o1_", &[_]Token.Tag{.Invalid}); - testTokenize("0o0__1", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0o0_1_", &[_]Token.Tag{.Invalid}); - testTokenize("0o1e", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0o1p", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0o1e0", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0o1p0", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0o_,", &[_]Token.Tag{ .Invalid, .Identifier, .Comma }); + testTokenize("0O0", &.{ .invalid, .identifier }); + testTokenize("0o_", &.{ .invalid, .identifier }); + testTokenize("0o_0", &.{ .invalid, .identifier }); + testTokenize("0o1_", &.{.invalid}); + testTokenize("0o0__1", &.{ .invalid, .identifier }); + testTokenize("0o0_1_", &.{.invalid}); + testTokenize("0o1e", &.{ .invalid, .identifier }); + testTokenize("0o1p", &.{ .invalid, .identifier }); + testTokenize("0o1e0", &.{ .invalid, .identifier }); + testTokenize("0o1p0", &.{ .invalid, .identifier }); + testTokenize("0o_,", &.{ .invalid, .identifier, .comma }); } test "tokenizer - number literals hexadeciaml" { - testTokenize("0x0", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0x1", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0x2", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0x3", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0x4", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0x5", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0x6", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0x7", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0x8", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0x9", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0xa", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0xb", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0xc", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0xd", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0xe", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0xf", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0xA", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0xB", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0xC", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0xD", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0xE", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0xF", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0x0z", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0xz", &[_]Token.Tag{ .Invalid, .Identifier }); + testTokenize("0x0", &.{.integer_literal}); + testTokenize("0x1", &.{.integer_literal}); + testTokenize("0x2", &.{.integer_literal}); + testTokenize("0x3", &.{.integer_literal}); + testTokenize("0x4", &.{.integer_literal}); + testTokenize("0x5", &.{.integer_literal}); + testTokenize("0x6", &.{.integer_literal}); + testTokenize("0x7", &.{.integer_literal}); + testTokenize("0x8", &.{.integer_literal}); + testTokenize("0x9", &.{.integer_literal}); + testTokenize("0xa", &.{.integer_literal}); + testTokenize("0xb", &.{.integer_literal}); + testTokenize("0xc", &.{.integer_literal}); + testTokenize("0xd", &.{.integer_literal}); + testTokenize("0xe", &.{.integer_literal}); + testTokenize("0xf", &.{.integer_literal}); + testTokenize("0xA", &.{.integer_literal}); + testTokenize("0xB", &.{.integer_literal}); + testTokenize("0xC", &.{.integer_literal}); + testTokenize("0xD", &.{.integer_literal}); + testTokenize("0xE", &.{.integer_literal}); + testTokenize("0xF", &.{.integer_literal}); + testTokenize("0x0z", &.{ .invalid, .identifier }); + testTokenize("0xz", &.{ .invalid, .identifier }); - testTokenize("0x0123456789ABCDEF", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0x0123_4567_89AB_CDEF", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0x01_23_45_67_89AB_CDE_F", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0x0_1_2_3_4_5_6_7_8_9_A_B_C_D_E_F", &[_]Token.Tag{.IntegerLiteral}); + testTokenize("0x0123456789ABCDEF", &.{.integer_literal}); + testTokenize("0x0123_4567_89AB_CDEF", &.{.integer_literal}); + testTokenize("0x01_23_45_67_89AB_CDE_F", &.{.integer_literal}); + testTokenize("0x0_1_2_3_4_5_6_7_8_9_A_B_C_D_E_F", &.{.integer_literal}); - testTokenize("0X0", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0x_", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0x_1", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0x1_", &[_]Token.Tag{.Invalid}); - testTokenize("0x0__1", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0x0_1_", &[_]Token.Tag{.Invalid}); - testTokenize("0x_,", &[_]Token.Tag{ .Invalid, .Identifier, .Comma }); + testTokenize("0X0", &.{ .invalid, .identifier }); + testTokenize("0x_", &.{ .invalid, .identifier }); + testTokenize("0x_1", &.{ .invalid, .identifier }); + testTokenize("0x1_", &.{.invalid}); + testTokenize("0x0__1", &.{ .invalid, .identifier }); + testTokenize("0x0_1_", &.{.invalid}); + testTokenize("0x_,", &.{ .invalid, .identifier, .comma }); - testTokenize("0x1.", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0x1.0", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0xF.", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0xF.0", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0xF.F", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0xF.Fp0", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0xF.FP0", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0x1p0", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0xfp0", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0x1.+0xF.", &[_]Token.Tag{ .FloatLiteral, .Plus, .FloatLiteral }); + testTokenize("0x1.", &.{.float_literal}); + testTokenize("0x1.0", &.{.float_literal}); + testTokenize("0xF.", &.{.float_literal}); + testTokenize("0xF.0", &.{.float_literal}); + testTokenize("0xF.F", &.{.float_literal}); + testTokenize("0xF.Fp0", &.{.float_literal}); + testTokenize("0xF.FP0", &.{.float_literal}); + testTokenize("0x1p0", &.{.float_literal}); + testTokenize("0xfp0", &.{.float_literal}); + testTokenize("0x1.+0xF.", &.{ .float_literal, .plus, .float_literal }); - testTokenize("0x0123456.789ABCDEF", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0x0_123_456.789_ABC_DEF", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0x0_1_2_3_4_5_6.7_8_9_A_B_C_D_E_F", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0x0p0", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0x0.0p0", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0xff.ffp10", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0xff.ffP10", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0xff.p10", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0xffp10", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0xff_ff.ff_ffp1_0_0_0", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0xf_f_f_f.f_f_f_fp+1_000", &[_]Token.Tag{.FloatLiteral}); - testTokenize("0xf_f_f_f.f_f_f_fp-1_00_0", &[_]Token.Tag{.FloatLiteral}); + testTokenize("0x0123456.789ABCDEF", &.{.float_literal}); + testTokenize("0x0_123_456.789_ABC_DEF", &.{.float_literal}); + testTokenize("0x0_1_2_3_4_5_6.7_8_9_A_B_C_D_E_F", &.{.float_literal}); + testTokenize("0x0p0", &.{.float_literal}); + testTokenize("0x0.0p0", &.{.float_literal}); + testTokenize("0xff.ffp10", &.{.float_literal}); + testTokenize("0xff.ffP10", &.{.float_literal}); + testTokenize("0xff.p10", &.{.float_literal}); + testTokenize("0xffp10", &.{.float_literal}); + testTokenize("0xff_ff.ff_ffp1_0_0_0", &.{.float_literal}); + testTokenize("0xf_f_f_f.f_f_f_fp+1_000", &.{.float_literal}); + testTokenize("0xf_f_f_f.f_f_f_fp-1_00_0", &.{.float_literal}); - testTokenize("0x1e", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0x1e0", &[_]Token.Tag{.IntegerLiteral}); - testTokenize("0x1p", &[_]Token.Tag{.Invalid}); - testTokenize("0xfp0z1", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0xff.ffpff", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0x0.p", &[_]Token.Tag{.Invalid}); - testTokenize("0x0.z", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0x0._", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0x0_.0", &[_]Token.Tag{ .Invalid, .Period, .IntegerLiteral }); - testTokenize("0x0_.0.0", &[_]Token.Tag{ .Invalid, .Period, .FloatLiteral }); - testTokenize("0x0._0", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0x0.0_", &[_]Token.Tag{.Invalid}); - testTokenize("0x0_p0", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0x0_.p0", &[_]Token.Tag{ .Invalid, .Period, .Identifier }); - testTokenize("0x0._p0", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0x0.0_p0", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0x0._0p0", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0x0.0p_0", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0x0.0p+_0", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0x0.0p-_0", &[_]Token.Tag{ .Invalid, .Identifier }); - testTokenize("0x0.0p0_", &[_]Token.Tag{ .Invalid, .Eof }); + testTokenize("0x1e", &.{.integer_literal}); + testTokenize("0x1e0", &.{.integer_literal}); + testTokenize("0x1p", &.{.invalid}); + testTokenize("0xfp0z1", &.{ .invalid, .identifier }); + testTokenize("0xff.ffpff", &.{ .invalid, .identifier }); + testTokenize("0x0.p", &.{.invalid}); + testTokenize("0x0.z", &.{ .invalid, .identifier }); + testTokenize("0x0._", &.{ .invalid, .identifier }); + testTokenize("0x0_.0", &.{ .invalid, .period, .integer_literal }); + testTokenize("0x0_.0.0", &.{ .invalid, .period, .float_literal }); + testTokenize("0x0._0", &.{ .invalid, .identifier }); + testTokenize("0x0.0_", &.{.invalid}); + testTokenize("0x0_p0", &.{ .invalid, .identifier }); + testTokenize("0x0_.p0", &.{ .invalid, .period, .identifier }); + testTokenize("0x0._p0", &.{ .invalid, .identifier }); + testTokenize("0x0.0_p0", &.{ .invalid, .identifier }); + testTokenize("0x0._0p0", &.{ .invalid, .identifier }); + testTokenize("0x0.0p_0", &.{ .invalid, .identifier }); + testTokenize("0x0.0p+_0", &.{ .invalid, .identifier }); + testTokenize("0x0.0p-_0", &.{ .invalid, .identifier }); + testTokenize("0x0.0p0_", &.{ .invalid, .eof }); } fn testTokenize(source: []const u8, expected_tokens: []const Token.Tag) void { @@ -2054,5 +2054,5 @@ fn testTokenize(source: []const u8, expected_tokens: []const Token.Tag) void { } } const last_token = tokenizer.next(); - std.testing.expect(last_token.tag == .Eof); + std.testing.expect(last_token.tag == .eof); } From bb22490fcc8ed7c96d665242cd18934f9d547555 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Fri, 12 Feb 2021 01:38:10 +0200 Subject: [PATCH 053/173] snake_case Node.Tag --- lib/std/zig/ast.zig | 1148 ++++++++++++++++++++-------------------- lib/std/zig/parse.zig | 398 +++++++------- lib/std/zig/render.zig | 456 ++++++++-------- 3 files changed, 1001 insertions(+), 1001 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 6a56c0e242..0b6133e789 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -199,148 +199,148 @@ pub const Tree = struct { var end_offset: TokenIndex = 0; var n = node; while (true) switch (tags[n]) { - .Root => return 0, + .root => return 0, - .UsingNamespace, - .TestDecl, - .ErrDefer, - .Defer, - .BoolNot, - .Negation, - .BitNot, - .NegationWrap, - .AddressOf, - .Try, - .Await, - .OptionalType, - .Switch, - .SwitchComma, - .IfSimple, - .If, - .Suspend, - .Resume, - .Continue, - .Break, - .Return, - .AnyFrameType, - .Identifier, - .AnyFrameLiteral, - .CharLiteral, - .IntegerLiteral, - .FloatLiteral, - .FalseLiteral, - .TrueLiteral, - .NullLiteral, - .UndefinedLiteral, - .UnreachableLiteral, - .StringLiteral, - .GroupedExpression, - .BuiltinCallTwo, - .BuiltinCallTwoComma, - .BuiltinCall, - .BuiltinCallComma, - .ErrorSetDecl, - .AnyType, - .Comptime, - .Nosuspend, - .AsmSimple, - .Asm, - .FnProtoSimple, - .FnProtoMulti, - .FnProtoOne, - .FnProto, - .ArrayType, - .ArrayTypeSentinel, - .ErrorValue, + .@"usingnamespace", + .test_decl, + .@"errdefer", + .@"defer", + .bool_not, + .negation, + .bit_not, + .negation_wrap, + .address_of, + .@"try", + .@"await", + .optional_type, + .@"switch", + .switch_comma, + .if_simple, + .@"if", + .@"suspend", + .@"resume", + .@"continue", + .@"break", + .@"return", + .anyframe_type, + .identifier, + .anyframe_literal, + .char_literal, + .integer_literal, + .float_literal, + .false_literal, + .true_literal, + .null_literal, + .undefined_literal, + .unreachable_literal, + .string_literal, + .grouped_expression, + .builtin_call_two, + .builtin_call_two_comma, + .builtin_call, + .builtin_call_comma, + .error_set_decl, + .@"anytype", + .@"comptime", + .@"nosuspend", + .asm_simple, + .@"asm", + .fn_proto_simple, + .fn_proto_multi, + .fn_proto_one, + .fn_proto, + .array_type, + .array_type_sentinel, + .error_value, => return main_tokens[n] - end_offset, - .ArrayInitDot, - .ArrayInitDotComma, - .ArrayInitDotTwo, - .ArrayInitDotTwoComma, - .StructInitDot, - .StructInitDotComma, - .StructInitDotTwo, - .StructInitDotTwoComma, - .EnumLiteral, + .array_init_dot, + .array_init_dot_comma, + .array_init_dot_two, + .array_init_dot_two_comma, + .struct_init_dot, + .struct_init_dot_comma, + .struct_init_dot_two, + .struct_init_dot_two_comma, + .enum_literal, => return main_tokens[n] - 1 - end_offset, - .Catch, - .FieldAccess, - .UnwrapOptional, - .EqualEqual, - .BangEqual, - .LessThan, - .GreaterThan, - .LessOrEqual, - .GreaterOrEqual, - .AssignMul, - .AssignDiv, - .AssignMod, - .AssignAdd, - .AssignSub, - .AssignBitShiftLeft, - .AssignBitShiftRight, - .AssignBitAnd, - .AssignBitXor, - .AssignBitOr, - .AssignMulWrap, - .AssignAddWrap, - .AssignSubWrap, - .Assign, - .MergeErrorSets, - .Mul, - .Div, - .Mod, - .ArrayMult, - .MulWrap, - .Add, - .Sub, - .ArrayCat, - .AddWrap, - .SubWrap, - .BitShiftLeft, - .BitShiftRight, - .BitAnd, - .BitXor, - .BitOr, - .OrElse, - .BoolAnd, - .BoolOr, - .SliceOpen, - .Slice, - .SliceSentinel, - .Deref, - .ArrayAccess, - .ArrayInitOne, - .ArrayInitOneComma, - .ArrayInit, - .ArrayInitComma, - .StructInitOne, - .StructInitOneComma, - .StructInit, - .StructInitComma, - .CallOne, - .CallOneComma, - .Call, - .CallComma, - .SwitchRange, - .FnDecl, - .ErrorUnion, + .@"catch", + .field_access, + .unwrap_optional, + .equal_equal, + .bang_equal, + .less_than, + .greater_than, + .less_or_equal, + .greater_or_equal, + .assign_mul, + .assign_div, + .assign_mod, + .assign_add, + .assign_sub, + .assign_bit_shift_left, + .assign_bit_shift_right, + .assign_bit_and, + .assign_bit_xor, + .assign_bit_or, + .assign_mul_wrap, + .assign_add_wrap, + .assign_sub_wrap, + .assign, + .merge_error_sets, + .mul, + .div, + .mod, + .array_mult, + .mul_wrap, + .add, + .sub, + .array_cat, + .add_wrap, + .sub_wrap, + .bit_shift_left, + .bit_shift_right, + .bit_and, + .bit_xor, + .bit_or, + .@"orelse", + .bool_and, + .bool_or, + .slice_open, + .slice, + .slice_sentinel, + .deref, + .array_access, + .array_init_one, + .array_init_one_comma, + .array_init, + .array_init_comma, + .struct_init_one, + .struct_init_one_comma, + .struct_init, + .struct_init_comma, + .call_one, + .call_one_comma, + .call, + .call_comma, + .switch_range, + .fn_decl, + .error_union, => n = datas[n].lhs, - .AsyncCallOne, - .AsyncCallOneComma, - .AsyncCall, - .AsyncCallComma, + .async_call_one, + .async_call_one_comma, + .async_call, + .async_call_comma, => { end_offset += 1; // async token n = datas[n].lhs; }, - .ContainerFieldInit, - .ContainerFieldAlign, - .ContainerField, + .container_field_init, + .container_field_align, + .container_field, => { const name_token = main_tokens[n]; if (name_token > 0 and token_tags[name_token - 1] == .keyword_comptime) { @@ -349,10 +349,10 @@ pub const Tree = struct { return name_token - end_offset; }, - .GlobalVarDecl, - .LocalVarDecl, - .SimpleVarDecl, - .AlignedVarDecl, + .global_var_decl, + .local_var_decl, + .simple_var_decl, + .aligned_var_decl, => { var i = main_tokens[n]; // mut token while (i > 0) { @@ -372,10 +372,10 @@ pub const Tree = struct { return i - end_offset; }, - .Block, - .BlockSemicolon, - .BlockTwo, - .BlockTwoSemicolon, + .block, + .block_semicolon, + .block_two, + .block_two_semicolon, => { // Look for a label. const lbrace = main_tokens[n]; @@ -385,18 +385,18 @@ pub const Tree = struct { return lbrace - end_offset; }, - .ContainerDecl, - .ContainerDeclComma, - .ContainerDeclTwo, - .ContainerDeclTwoComma, - .ContainerDeclArg, - .ContainerDeclArgComma, - .TaggedUnion, - .TaggedUnionComma, - .TaggedUnionTwo, - .TaggedUnionTwoComma, - .TaggedUnionEnumTag, - .TaggedUnionEnumTagComma, + .container_decl, + .container_decl_comma, + .container_decl_two, + .container_decl_two_comma, + .container_decl_arg, + .container_decl_arg_comma, + .tagged_union, + .tagged_union_comma, + .tagged_union_two, + .tagged_union_two_comma, + .tagged_union_enum_tag, + .tagged_union_enum_tag_comma, => { const main_token = main_tokens[n]; switch (token_tags[main_token - 1]) { @@ -406,10 +406,10 @@ pub const Tree = struct { return main_token - end_offset; }, - .PtrTypeAligned, - .PtrTypeSentinel, - .PtrType, - .PtrTypeBitRange, + .ptr_type_aligned, + .ptr_type_sentinel, + .ptr_type, + .ptr_type_bit_range, => { const main_token = main_tokens[n]; return switch (token_tags[main_token]) { @@ -424,29 +424,29 @@ pub const Tree = struct { } - end_offset; }, - .SwitchCaseOne => { + .switch_case_one => { if (datas[n].lhs == 0) { return main_tokens[n] - 1 - end_offset; // else token } else { n = datas[n].lhs; } }, - .SwitchCase => { + .switch_case => { const extra = tree.extraData(datas[n].lhs, Node.SubRange); assert(extra.end - extra.start > 0); n = extra.start; }, - .AsmOutput, .AsmInput => { + .asm_output, .asm_input => { assert(token_tags[main_tokens[n] - 1] == .l_bracket); return main_tokens[n] - 1 - end_offset; }, - .WhileSimple, - .WhileCont, - .While, - .ForSimple, - .For, + .while_simple, + .while_cont, + .@"while", + .for_simple, + .@"for", => { const main_token = main_tokens[n]; return switch (token_tags[main_token - 1]) { @@ -465,115 +465,115 @@ pub const Tree = struct { var n = node; var end_offset: TokenIndex = 0; while (true) switch (tags[n]) { - .Root => return @intCast(TokenIndex, tree.tokens.len - 1), + .root => return @intCast(TokenIndex, tree.tokens.len - 1), - .UsingNamespace, - .BoolNot, - .Negation, - .BitNot, - .NegationWrap, - .AddressOf, - .Try, - .Await, - .OptionalType, - .Resume, - .Nosuspend, - .Comptime, + .@"usingnamespace", + .bool_not, + .negation, + .bit_not, + .negation_wrap, + .address_of, + .@"try", + .@"await", + .optional_type, + .@"resume", + .@"nosuspend", + .@"comptime", => n = datas[n].lhs, - .TestDecl, - .ErrDefer, - .Defer, - .Catch, - .EqualEqual, - .BangEqual, - .LessThan, - .GreaterThan, - .LessOrEqual, - .GreaterOrEqual, - .AssignMul, - .AssignDiv, - .AssignMod, - .AssignAdd, - .AssignSub, - .AssignBitShiftLeft, - .AssignBitShiftRight, - .AssignBitAnd, - .AssignBitXor, - .AssignBitOr, - .AssignMulWrap, - .AssignAddWrap, - .AssignSubWrap, - .Assign, - .MergeErrorSets, - .Mul, - .Div, - .Mod, - .ArrayMult, - .MulWrap, - .Add, - .Sub, - .ArrayCat, - .AddWrap, - .SubWrap, - .BitShiftLeft, - .BitShiftRight, - .BitAnd, - .BitXor, - .BitOr, - .OrElse, - .BoolAnd, - .BoolOr, - .AnyFrameType, - .ErrorUnion, - .IfSimple, - .WhileSimple, - .ForSimple, - .FnProtoSimple, - .FnProtoMulti, - .PtrTypeAligned, - .PtrTypeSentinel, - .PtrType, - .PtrTypeBitRange, - .ArrayType, - .SwitchCaseOne, - .SwitchCase, - .SwitchRange, + .test_decl, + .@"errdefer", + .@"defer", + .@"catch", + .equal_equal, + .bang_equal, + .less_than, + .greater_than, + .less_or_equal, + .greater_or_equal, + .assign_mul, + .assign_div, + .assign_mod, + .assign_add, + .assign_sub, + .assign_bit_shift_left, + .assign_bit_shift_right, + .assign_bit_and, + .assign_bit_xor, + .assign_bit_or, + .assign_mul_wrap, + .assign_add_wrap, + .assign_sub_wrap, + .assign, + .merge_error_sets, + .mul, + .div, + .mod, + .array_mult, + .mul_wrap, + .add, + .sub, + .array_cat, + .add_wrap, + .sub_wrap, + .bit_shift_left, + .bit_shift_right, + .bit_and, + .bit_xor, + .bit_or, + .@"orelse", + .bool_and, + .bool_or, + .anyframe_type, + .error_union, + .if_simple, + .while_simple, + .for_simple, + .fn_proto_simple, + .fn_proto_multi, + .ptr_type_aligned, + .ptr_type_sentinel, + .ptr_type, + .ptr_type_bit_range, + .array_type, + .switch_case_one, + .switch_case, + .switch_range, => n = datas[n].rhs, - .FieldAccess, - .UnwrapOptional, - .GroupedExpression, - .StringLiteral, - .ErrorSetDecl, - .AsmSimple, - .AsmOutput, - .AsmInput, - .ErrorValue, + .field_access, + .unwrap_optional, + .grouped_expression, + .string_literal, + .error_set_decl, + .asm_simple, + .asm_output, + .asm_input, + .error_value, => return datas[n].rhs + end_offset, - .AnyType, - .AnyFrameLiteral, - .CharLiteral, - .IntegerLiteral, - .FloatLiteral, - .FalseLiteral, - .TrueLiteral, - .NullLiteral, - .UndefinedLiteral, - .UnreachableLiteral, - .Identifier, - .Deref, - .EnumLiteral, + .@"anytype", + .anyframe_literal, + .char_literal, + .integer_literal, + .float_literal, + .false_literal, + .true_literal, + .null_literal, + .undefined_literal, + .unreachable_literal, + .identifier, + .deref, + .enum_literal, => return main_tokens[n] + end_offset, - .Return => if (datas[n].lhs != 0) { + .@"return" => if (datas[n].lhs != 0) { n = datas[n].lhs; } else { return main_tokens[n] + end_offset; }, - .Call, .AsyncCall => { + .call, .async_call => { end_offset += 1; // for the rparen const params = tree.extraData(datas[n].rhs, Node.SubRange); if (params.end - params.start == 0) { @@ -581,7 +581,7 @@ pub const Tree = struct { } n = tree.extra_data[params.end - 1]; // last parameter }, - .TaggedUnionEnumTag => { + .tagged_union_enum_tag => { const members = tree.extraData(datas[n].rhs, Node.SubRange); if (members.end - members.start == 0) { end_offset += 4; // for the rparen + rparen + lbrace + rbrace @@ -591,16 +591,16 @@ pub const Tree = struct { n = tree.extra_data[members.end - 1]; // last parameter } }, - .CallComma, - .AsyncCallComma, - .TaggedUnionEnumTagComma, + .call_comma, + .async_call_comma, + .tagged_union_enum_tag_comma, => { end_offset += 2; // for the comma + rparen/rbrace const params = tree.extraData(datas[n].rhs, Node.SubRange); assert(params.end > params.start); n = tree.extra_data[params.end - 1]; // last parameter }, - .Switch => { + .@"switch" => { const cases = tree.extraData(datas[n].rhs, Node.SubRange); if (cases.end - cases.start == 0) { end_offset += 3; // rparen, lbrace, rbrace @@ -610,7 +610,7 @@ pub const Tree = struct { n = tree.extra_data[cases.end - 1]; // last case } }, - .ContainerDeclArg => { + .container_decl_arg => { const members = tree.extraData(datas[n].rhs, Node.SubRange); if (members.end - members.start == 0) { end_offset += 1; // for the rparen @@ -620,53 +620,53 @@ pub const Tree = struct { n = tree.extra_data[members.end - 1]; // last parameter } }, - .Asm => { + .@"asm" => { const extra = tree.extraData(datas[n].rhs, Node.Asm); return extra.rparen + end_offset; }, - .ArrayInit, - .StructInit, + .array_init, + .struct_init, => { const elements = tree.extraData(datas[n].rhs, Node.SubRange); assert(elements.end - elements.start > 0); end_offset += 1; // for the rbrace n = tree.extra_data[elements.end - 1]; // last element }, - .ArrayInitComma, - .StructInitComma, - .ContainerDeclArgComma, - .SwitchComma, + .array_init_comma, + .struct_init_comma, + .container_decl_arg_comma, + .switch_comma, => { const members = tree.extraData(datas[n].rhs, Node.SubRange); assert(members.end - members.start > 0); end_offset += 2; // for the comma + rbrace n = tree.extra_data[members.end - 1]; // last parameter }, - .ArrayInitDot, - .StructInitDot, - .Block, - .ContainerDecl, - .TaggedUnion, - .BuiltinCall, + .array_init_dot, + .struct_init_dot, + .block, + .container_decl, + .tagged_union, + .builtin_call, => { assert(datas[n].rhs - datas[n].lhs > 0); end_offset += 1; // for the rbrace n = tree.extra_data[datas[n].rhs - 1]; // last statement }, - .ArrayInitDotComma, - .StructInitDotComma, - .BlockSemicolon, - .ContainerDeclComma, - .TaggedUnionComma, - .BuiltinCallComma, + .array_init_dot_comma, + .struct_init_dot_comma, + .block_semicolon, + .container_decl_comma, + .tagged_union_comma, + .builtin_call_comma, => { assert(datas[n].rhs - datas[n].lhs > 0); end_offset += 2; // for the comma/semicolon + rbrace/rparen n = tree.extra_data[datas[n].rhs - 1]; // last member }, - .CallOne, - .AsyncCallOne, - .ArrayAccess, + .call_one, + .async_call_one, + .array_access, => { end_offset += 1; // for the rparen/rbracket if (datas[n].rhs == 0) { @@ -674,12 +674,12 @@ pub const Tree = struct { } n = datas[n].rhs; }, - .ArrayInitDotTwo, - .BlockTwo, - .BuiltinCallTwo, - .StructInitDotTwo, - .ContainerDeclTwo, - .TaggedUnionTwo, + .array_init_dot_two, + .block_two, + .builtin_call_two, + .struct_init_dot_two, + .container_decl_two, + .tagged_union_two, => { if (datas[n].rhs != 0) { end_offset += 1; // for the rparen/rbrace @@ -689,25 +689,25 @@ pub const Tree = struct { n = datas[n].lhs; } else { switch (tags[n]) { - .ArrayInitDotTwo, - .BlockTwo, - .StructInitDotTwo, + .array_init_dot_two, + .block_two, + .struct_init_dot_two, => end_offset += 1, // rbrace - .BuiltinCallTwo, - .ContainerDeclTwo, + .builtin_call_two, + .container_decl_two, => end_offset += 2, // lparen/lbrace + rparen/rbrace - .TaggedUnionTwo => end_offset += 5, // (enum) {} + .tagged_union_two => end_offset += 5, // (enum) {} else => unreachable, } return main_tokens[n] + end_offset; } }, - .ArrayInitDotTwoComma, - .BuiltinCallTwoComma, - .BlockTwoSemicolon, - .StructInitDotTwoComma, - .ContainerDeclTwoComma, - .TaggedUnionTwoComma, + .array_init_dot_two_comma, + .builtin_call_two_comma, + .block_two_semicolon, + .struct_init_dot_two_comma, + .container_decl_two_comma, + .tagged_union_two_comma, => { end_offset += 2; // for the comma/semicolon + rbrace/rparen if (datas[n].rhs != 0) { @@ -718,7 +718,7 @@ pub const Tree = struct { unreachable; } }, - .SimpleVarDecl => { + .simple_var_decl => { if (datas[n].rhs != 0) { n = datas[n].rhs; } else if (datas[n].lhs != 0) { @@ -728,7 +728,7 @@ pub const Tree = struct { return main_tokens[n] + end_offset; } }, - .AlignedVarDecl => { + .aligned_var_decl => { if (datas[n].rhs != 0) { n = datas[n].rhs; } else if (datas[n].lhs != 0) { @@ -739,7 +739,7 @@ pub const Tree = struct { return main_tokens[n] + end_offset; } }, - .GlobalVarDecl => { + .global_var_decl => { if (datas[n].rhs != 0) { n = datas[n].rhs; } else { @@ -758,7 +758,7 @@ pub const Tree = struct { } } }, - .LocalVarDecl => { + .local_var_decl => { if (datas[n].rhs != 0) { n = datas[n].rhs; } else { @@ -774,7 +774,7 @@ pub const Tree = struct { } } }, - .ContainerFieldInit => { + .container_field_init => { if (datas[n].rhs != 0) { n = datas[n].rhs; } else if (datas[n].lhs != 0) { @@ -783,7 +783,7 @@ pub const Tree = struct { return main_tokens[n] + end_offset; } }, - .ContainerFieldAlign => { + .container_field_align => { if (datas[n].rhs != 0) { end_offset += 1; // for the rparen n = datas[n].rhs; @@ -793,7 +793,7 @@ pub const Tree = struct { return main_tokens[n] + end_offset; } }, - .ContainerField => { + .container_field => { const extra = tree.extraData(datas[n].rhs, Node.ContainerField); if (extra.value_expr != 0) { n = extra.value_expr; @@ -807,8 +807,8 @@ pub const Tree = struct { } }, - .ArrayInitOne, - .StructInitOne, + .array_init_one, + .struct_init_one, => { end_offset += 1; // rbrace if (datas[n].rhs == 0) { @@ -817,37 +817,37 @@ pub const Tree = struct { n = datas[n].rhs; } }, - .SliceOpen, - .CallOneComma, - .AsyncCallOneComma, - .ArrayInitOneComma, - .StructInitOneComma, + .slice_open, + .call_one_comma, + .async_call_one_comma, + .array_init_one_comma, + .struct_init_one_comma, => { end_offset += 2; // ellipsis2 + rbracket, or comma + rparen n = datas[n].rhs; assert(n != 0); }, - .Slice => { + .slice => { const extra = tree.extraData(datas[n].rhs, Node.Slice); assert(extra.end != 0); // should have used SliceOpen end_offset += 1; // rbracket n = extra.end; }, - .SliceSentinel => { + .slice_sentinel => { const extra = tree.extraData(datas[n].rhs, Node.SliceSentinel); assert(extra.sentinel != 0); // should have used Slice end_offset += 1; // rbracket n = extra.sentinel; }, - .Continue => { + .@"continue" => { if (datas[n].lhs != 0) { return datas[n].lhs + end_offset; } else { return main_tokens[n] + end_offset; } }, - .Break => { + .@"break" => { if (datas[n].rhs != 0) { n = datas[n].rhs; } else if (datas[n].lhs != 0) { @@ -856,14 +856,14 @@ pub const Tree = struct { return main_tokens[n] + end_offset; } }, - .FnDecl => { + .fn_decl => { if (datas[n].rhs != 0) { n = datas[n].rhs; } else { n = datas[n].lhs; } }, - .FnProtoOne => { + .fn_proto_one => { const extra = tree.extraData(datas[n].lhs, Node.FnProtoOne); // linksection, callconv, align can appear in any order, so we // find the last one here. @@ -897,7 +897,7 @@ pub const Tree = struct { n = max_node; end_offset += max_offset; }, - .FnProto => { + .fn_proto => { const extra = tree.extraData(datas[n].lhs, Node.FnProto); // linksection, callconv, align can appear in any order, so we // find the last one here. @@ -931,29 +931,29 @@ pub const Tree = struct { n = max_node; end_offset += max_offset; }, - .WhileCont => { + .while_cont => { const extra = tree.extraData(datas[n].rhs, Node.WhileCont); assert(extra.then_expr != 0); n = extra.then_expr; }, - .While => { + .@"while" => { const extra = tree.extraData(datas[n].rhs, Node.While); assert(extra.else_expr != 0); n = extra.else_expr; }, - .If, .For => { + .@"if", .@"for" => { const extra = tree.extraData(datas[n].rhs, Node.If); assert(extra.else_expr != 0); n = extra.else_expr; }, - .Suspend => { + .@"suspend" => { if (datas[n].lhs != 0) { n = datas[n].lhs; } else { return main_tokens[n] + end_offset; } }, - .ArrayTypeSentinel => { + .array_type_sentinel => { const extra = tree.extraData(datas[n].rhs, Node.ArrayTypeSentinel); n = extra.elem_type; }, @@ -967,7 +967,7 @@ pub const Tree = struct { } pub fn globalVarDecl(tree: Tree, node: Node.Index) full.VarDecl { - assert(tree.nodes.items(.tag)[node] == .GlobalVarDecl); + assert(tree.nodes.items(.tag)[node] == .global_var_decl); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.lhs, Node.GlobalVarDecl); return tree.fullVarDecl(.{ @@ -980,7 +980,7 @@ pub const Tree = struct { } pub fn localVarDecl(tree: Tree, node: Node.Index) full.VarDecl { - assert(tree.nodes.items(.tag)[node] == .LocalVarDecl); + assert(tree.nodes.items(.tag)[node] == .local_var_decl); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.lhs, Node.LocalVarDecl); return tree.fullVarDecl(.{ @@ -993,7 +993,7 @@ pub const Tree = struct { } pub fn simpleVarDecl(tree: Tree, node: Node.Index) full.VarDecl { - assert(tree.nodes.items(.tag)[node] == .SimpleVarDecl); + assert(tree.nodes.items(.tag)[node] == .simple_var_decl); const data = tree.nodes.items(.data)[node]; return tree.fullVarDecl(.{ .type_node = data.lhs, @@ -1005,7 +1005,7 @@ pub const Tree = struct { } pub fn alignedVarDecl(tree: Tree, node: Node.Index) full.VarDecl { - assert(tree.nodes.items(.tag)[node] == .AlignedVarDecl); + assert(tree.nodes.items(.tag)[node] == .aligned_var_decl); const data = tree.nodes.items(.data)[node]; return tree.fullVarDecl(.{ .type_node = 0, @@ -1017,7 +1017,7 @@ pub const Tree = struct { } pub fn ifSimple(tree: Tree, node: Node.Index) full.If { - assert(tree.nodes.items(.tag)[node] == .IfSimple); + assert(tree.nodes.items(.tag)[node] == .if_simple); const data = tree.nodes.items(.data)[node]; return tree.fullIf(.{ .cond_expr = data.lhs, @@ -1028,7 +1028,7 @@ pub const Tree = struct { } pub fn ifFull(tree: Tree, node: Node.Index) full.If { - assert(tree.nodes.items(.tag)[node] == .If); + assert(tree.nodes.items(.tag)[node] == .@"if"); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.rhs, Node.If); return tree.fullIf(.{ @@ -1040,7 +1040,7 @@ pub const Tree = struct { } pub fn containerField(tree: Tree, node: Node.Index) full.ContainerField { - assert(tree.nodes.items(.tag)[node] == .ContainerField); + assert(tree.nodes.items(.tag)[node] == .container_field); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.rhs, Node.ContainerField); return tree.fullContainerField(.{ @@ -1052,7 +1052,7 @@ pub const Tree = struct { } pub fn containerFieldInit(tree: Tree, node: Node.Index) full.ContainerField { - assert(tree.nodes.items(.tag)[node] == .ContainerFieldInit); + assert(tree.nodes.items(.tag)[node] == .container_field_init); const data = tree.nodes.items(.data)[node]; return tree.fullContainerField(.{ .name_token = tree.nodes.items(.main_token)[node], @@ -1063,7 +1063,7 @@ pub const Tree = struct { } pub fn containerFieldAlign(tree: Tree, node: Node.Index) full.ContainerField { - assert(tree.nodes.items(.tag)[node] == .ContainerFieldAlign); + assert(tree.nodes.items(.tag)[node] == .container_field_align); const data = tree.nodes.items(.data)[node]; return tree.fullContainerField(.{ .name_token = tree.nodes.items(.main_token)[node], @@ -1074,7 +1074,7 @@ pub const Tree = struct { } pub fn fnProtoSimple(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.FnProto { - assert(tree.nodes.items(.tag)[node] == .FnProtoSimple); + assert(tree.nodes.items(.tag)[node] == .fn_proto_simple); const data = tree.nodes.items(.data)[node]; buffer[0] = data.lhs; const params = if (data.lhs == 0) buffer[0..0] else buffer[0..1]; @@ -1089,7 +1089,7 @@ pub const Tree = struct { } pub fn fnProtoMulti(tree: Tree, node: Node.Index) full.FnProto { - assert(tree.nodes.items(.tag)[node] == .FnProtoMulti); + assert(tree.nodes.items(.tag)[node] == .fn_proto_multi); const data = tree.nodes.items(.data)[node]; const params_range = tree.extraData(data.lhs, Node.SubRange); const params = tree.extra_data[params_range.start..params_range.end]; @@ -1104,7 +1104,7 @@ pub const Tree = struct { } pub fn fnProtoOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.FnProto { - assert(tree.nodes.items(.tag)[node] == .FnProtoOne); + assert(tree.nodes.items(.tag)[node] == .fn_proto_one); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.lhs, Node.FnProtoOne); buffer[0] = extra.param; @@ -1120,7 +1120,7 @@ pub const Tree = struct { } pub fn fnProto(tree: Tree, node: Node.Index) full.FnProto { - assert(tree.nodes.items(.tag)[node] == .FnProto); + assert(tree.nodes.items(.tag)[node] == .fn_proto); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.lhs, Node.FnProto); const params = tree.extra_data[extra.params_start..extra.params_end]; @@ -1135,8 +1135,8 @@ pub const Tree = struct { } pub fn structInitOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.StructInit { - assert(tree.nodes.items(.tag)[node] == .StructInitOne or - tree.nodes.items(.tag)[node] == .StructInitOneComma); + assert(tree.nodes.items(.tag)[node] == .struct_init_one or + tree.nodes.items(.tag)[node] == .struct_init_one_comma); const data = tree.nodes.items(.data)[node]; buffer[0] = data.rhs; const fields = if (data.rhs == 0) buffer[0..0] else buffer[0..1]; @@ -1148,8 +1148,8 @@ pub const Tree = struct { } pub fn structInitDotTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.StructInit { - assert(tree.nodes.items(.tag)[node] == .StructInitDotTwo or - tree.nodes.items(.tag)[node] == .StructInitDotTwoComma); + assert(tree.nodes.items(.tag)[node] == .struct_init_dot_two or + tree.nodes.items(.tag)[node] == .struct_init_dot_two_comma); const data = tree.nodes.items(.data)[node]; buffer.* = .{ data.lhs, data.rhs }; const fields = if (data.rhs != 0) @@ -1166,8 +1166,8 @@ pub const Tree = struct { } pub fn structInitDot(tree: Tree, node: Node.Index) full.StructInit { - assert(tree.nodes.items(.tag)[node] == .StructInitDot or - tree.nodes.items(.tag)[node] == .StructInitDotComma); + assert(tree.nodes.items(.tag)[node] == .struct_init_dot or + tree.nodes.items(.tag)[node] == .struct_init_dot_comma); const data = tree.nodes.items(.data)[node]; return tree.fullStructInit(.{ .lbrace = tree.nodes.items(.main_token)[node], @@ -1177,8 +1177,8 @@ pub const Tree = struct { } pub fn structInit(tree: Tree, node: Node.Index) full.StructInit { - assert(tree.nodes.items(.tag)[node] == .StructInit or - tree.nodes.items(.tag)[node] == .StructInitComma); + assert(tree.nodes.items(.tag)[node] == .struct_init or + tree.nodes.items(.tag)[node] == .struct_init_comma); const data = tree.nodes.items(.data)[node]; const fields_range = tree.extraData(data.rhs, Node.SubRange); return tree.fullStructInit(.{ @@ -1189,8 +1189,8 @@ pub const Tree = struct { } pub fn arrayInitOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.ArrayInit { - assert(tree.nodes.items(.tag)[node] == .ArrayInitOne or - tree.nodes.items(.tag)[node] == .ArrayInitOneComma); + assert(tree.nodes.items(.tag)[node] == .array_init_one or + tree.nodes.items(.tag)[node] == .array_init_one_comma); const data = tree.nodes.items(.data)[node]; buffer[0] = data.rhs; const elements = if (data.rhs == 0) buffer[0..0] else buffer[0..1]; @@ -1204,8 +1204,8 @@ pub const Tree = struct { } pub fn arrayInitDotTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ArrayInit { - assert(tree.nodes.items(.tag)[node] == .ArrayInitDotTwo or - tree.nodes.items(.tag)[node] == .ArrayInitDotTwoComma); + assert(tree.nodes.items(.tag)[node] == .array_init_dot_two or + tree.nodes.items(.tag)[node] == .array_init_dot_two_comma); const data = tree.nodes.items(.data)[node]; buffer.* = .{ data.lhs, data.rhs }; const elements = if (data.rhs != 0) @@ -1224,8 +1224,8 @@ pub const Tree = struct { } pub fn arrayInitDot(tree: Tree, node: Node.Index) full.ArrayInit { - assert(tree.nodes.items(.tag)[node] == .ArrayInitDot or - tree.nodes.items(.tag)[node] == .ArrayInitDotComma); + assert(tree.nodes.items(.tag)[node] == .array_init_dot or + tree.nodes.items(.tag)[node] == .array_init_dot_comma); const data = tree.nodes.items(.data)[node]; return .{ .ast = .{ @@ -1237,8 +1237,8 @@ pub const Tree = struct { } pub fn arrayInit(tree: Tree, node: Node.Index) full.ArrayInit { - assert(tree.nodes.items(.tag)[node] == .ArrayInit or - tree.nodes.items(.tag)[node] == .ArrayInitComma); + assert(tree.nodes.items(.tag)[node] == .array_init or + tree.nodes.items(.tag)[node] == .array_init_comma); const data = tree.nodes.items(.data)[node]; const elem_range = tree.extraData(data.rhs, Node.SubRange); return .{ @@ -1251,7 +1251,7 @@ pub const Tree = struct { } pub fn arrayType(tree: Tree, node: Node.Index) full.ArrayType { - assert(tree.nodes.items(.tag)[node] == .ArrayType); + assert(tree.nodes.items(.tag)[node] == .array_type); const data = tree.nodes.items(.data)[node]; return .{ .ast = .{ @@ -1264,7 +1264,7 @@ pub const Tree = struct { } pub fn arrayTypeSentinel(tree: Tree, node: Node.Index) full.ArrayType { - assert(tree.nodes.items(.tag)[node] == .ArrayTypeSentinel); + assert(tree.nodes.items(.tag)[node] == .array_type_sentinel); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.rhs, Node.ArrayTypeSentinel); return .{ @@ -1278,7 +1278,7 @@ pub const Tree = struct { } pub fn ptrTypeAligned(tree: Tree, node: Node.Index) full.PtrType { - assert(tree.nodes.items(.tag)[node] == .PtrTypeAligned); + assert(tree.nodes.items(.tag)[node] == .ptr_type_aligned); const data = tree.nodes.items(.data)[node]; return tree.fullPtrType(.{ .main_token = tree.nodes.items(.main_token)[node], @@ -1291,7 +1291,7 @@ pub const Tree = struct { } pub fn ptrTypeSentinel(tree: Tree, node: Node.Index) full.PtrType { - assert(tree.nodes.items(.tag)[node] == .PtrTypeSentinel); + assert(tree.nodes.items(.tag)[node] == .ptr_type_sentinel); const data = tree.nodes.items(.data)[node]; return tree.fullPtrType(.{ .main_token = tree.nodes.items(.main_token)[node], @@ -1304,7 +1304,7 @@ pub const Tree = struct { } pub fn ptrType(tree: Tree, node: Node.Index) full.PtrType { - assert(tree.nodes.items(.tag)[node] == .PtrType); + assert(tree.nodes.items(.tag)[node] == .ptr_type); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.lhs, Node.PtrType); return tree.fullPtrType(.{ @@ -1318,7 +1318,7 @@ pub const Tree = struct { } pub fn ptrTypeBitRange(tree: Tree, node: Node.Index) full.PtrType { - assert(tree.nodes.items(.tag)[node] == .PtrTypeBitRange); + assert(tree.nodes.items(.tag)[node] == .ptr_type_bit_range); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.lhs, Node.PtrTypeBitRange); return tree.fullPtrType(.{ @@ -1332,7 +1332,7 @@ pub const Tree = struct { } pub fn sliceOpen(tree: Tree, node: Node.Index) full.Slice { - assert(tree.nodes.items(.tag)[node] == .SliceOpen); + assert(tree.nodes.items(.tag)[node] == .slice_open); const data = tree.nodes.items(.data)[node]; return .{ .ast = .{ @@ -1346,7 +1346,7 @@ pub const Tree = struct { } pub fn slice(tree: Tree, node: Node.Index) full.Slice { - assert(tree.nodes.items(.tag)[node] == .Slice); + assert(tree.nodes.items(.tag)[node] == .slice); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.rhs, Node.Slice); return .{ @@ -1361,7 +1361,7 @@ pub const Tree = struct { } pub fn sliceSentinel(tree: Tree, node: Node.Index) full.Slice { - assert(tree.nodes.items(.tag)[node] == .SliceSentinel); + assert(tree.nodes.items(.tag)[node] == .slice_sentinel); const data = tree.nodes.items(.data)[node]; const extra = tree.extraData(data.rhs, Node.SliceSentinel); return .{ @@ -1376,8 +1376,8 @@ pub const Tree = struct { } pub fn containerDeclTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ContainerDecl { - assert(tree.nodes.items(.tag)[node] == .ContainerDeclTwo or - tree.nodes.items(.tag)[node] == .ContainerDeclTwoComma); + assert(tree.nodes.items(.tag)[node] == .container_decl_two or + tree.nodes.items(.tag)[node] == .container_decl_two_comma); const data = tree.nodes.items(.data)[node]; buffer.* = .{ data.lhs, data.rhs }; const members = if (data.rhs != 0) @@ -1395,8 +1395,8 @@ pub const Tree = struct { } pub fn containerDecl(tree: Tree, node: Node.Index) full.ContainerDecl { - assert(tree.nodes.items(.tag)[node] == .ContainerDecl or - tree.nodes.items(.tag)[node] == .ContainerDeclComma); + assert(tree.nodes.items(.tag)[node] == .container_decl or + tree.nodes.items(.tag)[node] == .container_decl_comma); const data = tree.nodes.items(.data)[node]; return tree.fullContainerDecl(.{ .main_token = tree.nodes.items(.main_token)[node], @@ -1407,8 +1407,8 @@ pub const Tree = struct { } pub fn containerDeclArg(tree: Tree, node: Node.Index) full.ContainerDecl { - assert(tree.nodes.items(.tag)[node] == .ContainerDeclArg or - tree.nodes.items(.tag)[node] == .ContainerDeclArgComma); + assert(tree.nodes.items(.tag)[node] == .container_decl_arg or + tree.nodes.items(.tag)[node] == .container_decl_arg_comma); const data = tree.nodes.items(.data)[node]; const members_range = tree.extraData(data.rhs, Node.SubRange); return tree.fullContainerDecl(.{ @@ -1420,8 +1420,8 @@ pub const Tree = struct { } pub fn taggedUnionTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ContainerDecl { - assert(tree.nodes.items(.tag)[node] == .TaggedUnionTwo or - tree.nodes.items(.tag)[node] == .TaggedUnionTwoComma); + assert(tree.nodes.items(.tag)[node] == .tagged_union_two or + tree.nodes.items(.tag)[node] == .tagged_union_two_comma); const data = tree.nodes.items(.data)[node]; buffer.* = .{ data.lhs, data.rhs }; const members = if (data.rhs != 0) @@ -1440,8 +1440,8 @@ pub const Tree = struct { } pub fn taggedUnion(tree: Tree, node: Node.Index) full.ContainerDecl { - assert(tree.nodes.items(.tag)[node] == .TaggedUnion or - tree.nodes.items(.tag)[node] == .TaggedUnionComma); + assert(tree.nodes.items(.tag)[node] == .tagged_union or + tree.nodes.items(.tag)[node] == .tagged_union_comma); const data = tree.nodes.items(.data)[node]; const main_token = tree.nodes.items(.main_token)[node]; return tree.fullContainerDecl(.{ @@ -1453,8 +1453,8 @@ pub const Tree = struct { } pub fn taggedUnionEnumTag(tree: Tree, node: Node.Index) full.ContainerDecl { - assert(tree.nodes.items(.tag)[node] == .TaggedUnionEnumTag or - tree.nodes.items(.tag)[node] == .TaggedUnionEnumTagComma); + assert(tree.nodes.items(.tag)[node] == .tagged_union_enum_tag or + tree.nodes.items(.tag)[node] == .tagged_union_enum_tag_comma); const data = tree.nodes.items(.data)[node]; const members_range = tree.extraData(data.rhs, Node.SubRange); const main_token = tree.nodes.items(.main_token)[node]; @@ -1765,7 +1765,7 @@ pub const Tree = struct { } const outputs_end: usize = for (info.items) |item, i| { switch (node_tags[item]) { - .AsmOutput => continue, + .asm_output => continue, else => break i, } } else info.items.len; @@ -2162,7 +2162,7 @@ pub const Error = union(enum) { pub fn render(self: ExpectedCall, tree: Tree, stream: anytype) !void { const node_tag = tree.nodes.items(.tag)[self.node]; - return stream.print("expected " ++ @tagName(Node.Tag.Call) ++ ", found {s}", .{ + return stream.print("expected " ++ @tagName(Node.Tag.call) ++ ", found {s}", .{ @tagName(node_tag), }); } @@ -2173,8 +2173,8 @@ pub const Error = union(enum) { pub fn render(self: ExpectedCallOrFnProto, tree: Tree, stream: anytype) !void { const node_tag = tree.nodes.items(.tag)[self.node]; - return stream.print("expected " ++ @tagName(Node.Tag.Call) ++ " or " ++ - @tagName(Node.Tag.FnProto) ++ ", found {s}", .{@tagName(node_tag)}); + return stream.print("expected " ++ @tagName(Node.Tag.call) ++ " or " ++ + @tagName(Node.Tag.fn_proto) ++ ", found {s}", .{@tagName(node_tag)}); } }; @@ -2305,433 +2305,433 @@ pub const Node = struct { /// Tree.lastToken() pub const Tag = enum { /// sub_list[lhs...rhs] - Root, + root, /// `usingnamespace lhs;`. rhs unused. main_token is `usingnamespace`. - UsingNamespace, + @"usingnamespace", /// lhs is test name token (must be string literal), if any. /// rhs is the body node. - TestDecl, + test_decl, /// lhs is the index into extra_data. /// rhs is the initialization expression, if any. /// main_token is `var` or `const`. - GlobalVarDecl, + global_var_decl, /// `var a: x align(y) = rhs` /// lhs is the index into extra_data. /// main_token is `var` or `const`. - LocalVarDecl, + local_var_decl, /// `var a: lhs = rhs`. lhs and rhs may be unused. /// Can be local or global. /// main_token is `var` or `const`. - SimpleVarDecl, + simple_var_decl, /// `var a align(lhs) = rhs`. lhs and rhs may be unused. /// Can be local or global. /// main_token is `var` or `const`. - AlignedVarDecl, + aligned_var_decl, /// lhs is the identifier token payload if any, /// rhs is the deferred expression. - ErrDefer, + @"errdefer", /// lhs is unused. /// rhs is the deferred expression. - Defer, + @"defer", /// lhs catch rhs /// lhs catch |err| rhs /// main_token is the catch /// payload is determined by looking at the prev tokens before rhs. - Catch, + @"catch", /// `lhs.a`. main_token is the dot. rhs is the identifier token index. - FieldAccess, + field_access, /// `lhs.?`. main_token is the dot. rhs is the `?` token index. - UnwrapOptional, + unwrap_optional, /// `lhs == rhs`. main_token is op. - EqualEqual, + equal_equal, /// `lhs != rhs`. main_token is op. - BangEqual, + bang_equal, /// `lhs < rhs`. main_token is op. - LessThan, + less_than, /// `lhs > rhs`. main_token is op. - GreaterThan, + greater_than, /// `lhs <= rhs`. main_token is op. - LessOrEqual, + less_or_equal, /// `lhs >= rhs`. main_token is op. - GreaterOrEqual, + greater_or_equal, /// `lhs *= rhs`. main_token is op. - AssignMul, + assign_mul, /// `lhs /= rhs`. main_token is op. - AssignDiv, + assign_div, /// `lhs *= rhs`. main_token is op. - AssignMod, + assign_mod, /// `lhs += rhs`. main_token is op. - AssignAdd, + assign_add, /// `lhs -= rhs`. main_token is op. - AssignSub, + assign_sub, /// `lhs <<= rhs`. main_token is op. - AssignBitShiftLeft, + assign_bit_shift_left, /// `lhs >>= rhs`. main_token is op. - AssignBitShiftRight, + assign_bit_shift_right, /// `lhs &= rhs`. main_token is op. - AssignBitAnd, + assign_bit_and, /// `lhs ^= rhs`. main_token is op. - AssignBitXor, + assign_bit_xor, /// `lhs |= rhs`. main_token is op. - AssignBitOr, + assign_bit_or, /// `lhs *%= rhs`. main_token is op. - AssignMulWrap, + assign_mul_wrap, /// `lhs +%= rhs`. main_token is op. - AssignAddWrap, + assign_add_wrap, /// `lhs -%= rhs`. main_token is op. - AssignSubWrap, + assign_sub_wrap, /// `lhs = rhs`. main_token is op. - Assign, + assign, /// `lhs || rhs`. main_token is the `||`. - MergeErrorSets, + merge_error_sets, /// `lhs * rhs`. main_token is the `*`. - Mul, + mul, /// `lhs / rhs`. main_token is the `/`. - Div, + div, /// `lhs % rhs`. main_token is the `%`. - Mod, + mod, /// `lhs ** rhs`. main_token is the `**`. - ArrayMult, + array_mult, /// `lhs *% rhs`. main_token is the `*%`. - MulWrap, + mul_wrap, /// `lhs + rhs`. main_token is the `+`. - Add, + add, /// `lhs - rhs`. main_token is the `-`. - Sub, + sub, /// `lhs ++ rhs`. main_token is the `++`. - ArrayCat, + array_cat, /// `lhs +% rhs`. main_token is the `+%`. - AddWrap, + add_wrap, /// `lhs -% rhs`. main_token is the `-%`. - SubWrap, + sub_wrap, /// `lhs << rhs`. main_token is the `<<`. - BitShiftLeft, + bit_shift_left, /// `lhs >> rhs`. main_token is the `>>`. - BitShiftRight, + bit_shift_right, /// `lhs & rhs`. main_token is the `&`. - BitAnd, + bit_and, /// `lhs ^ rhs`. main_token is the `^`. - BitXor, + bit_xor, /// `lhs | rhs`. main_token is the `|`. - BitOr, + bit_or, /// `lhs orelse rhs`. main_token is the `orelse`. - OrElse, + @"orelse", /// `lhs and rhs`. main_token is the `and`. - BoolAnd, + bool_and, /// `lhs or rhs`. main_token is the `or`. - BoolOr, + bool_or, /// `op lhs`. rhs unused. main_token is op. - BoolNot, + bool_not, /// `op lhs`. rhs unused. main_token is op. - Negation, + negation, /// `op lhs`. rhs unused. main_token is op. - BitNot, + bit_not, /// `op lhs`. rhs unused. main_token is op. - NegationWrap, + negation_wrap, /// `op lhs`. rhs unused. main_token is op. - AddressOf, + address_of, /// `op lhs`. rhs unused. main_token is op. - Try, + @"try", /// `op lhs`. rhs unused. main_token is op. - Await, + @"await", /// `?lhs`. rhs unused. main_token is the `?`. - OptionalType, + optional_type, /// `[lhs]rhs`. lhs can be omitted to make it a slice. - ArrayType, - /// `[lhs:a]b`. `ArrayTypeSentinel[rhs]`. - ArrayTypeSentinel, + array_type, + /// `[lhs:a]b`. `array_type_sentinel[rhs]`. + array_type_sentinel, /// `[*]align(lhs) rhs`. lhs can be omitted. /// `*align(lhs) rhs`. lhs can be omitted. /// `[]rhs`. /// main_token is the asterisk if a pointer or the lbracket if a slice /// main_token might be a ** token, which is shared with a parent/child /// pointer type and may require special handling. - PtrTypeAligned, + ptr_type_aligned, /// `[*:lhs]rhs`. lhs can be omitted. /// `*rhs`. /// `[:lhs]rhs`. /// main_token is the asterisk if a pointer or the lbracket if a slice /// main_token might be a ** token, which is shared with a parent/child /// pointer type and may require special handling. - PtrTypeSentinel, - /// lhs is index into PtrType. rhs is the element type expression. + ptr_type_sentinel, + /// lhs is index into ptr_type. rhs is the element type expression. /// main_token is the asterisk if a pointer or the lbracket if a slice /// main_token might be a ** token, which is shared with a parent/child /// pointer type and may require special handling. - PtrType, - /// lhs is index into PtrTypeBitRange. rhs is the element type expression. + ptr_type, + /// lhs is index into ptr_type_bit_range. rhs is the element type expression. /// main_token is the asterisk if a pointer or the lbracket if a slice /// main_token might be a ** token, which is shared with a parent/child /// pointer type and may require special handling. - PtrTypeBitRange, + ptr_type_bit_range, /// `lhs[rhs..]` /// main_token is the lbracket. - SliceOpen, + slice_open, /// `lhs[b..c]`. rhs is index into Slice /// main_token is the lbracket. - Slice, + slice, /// `lhs[b..c :d]`. rhs is index into SliceSentinel /// main_token is the lbracket. - SliceSentinel, + slice_sentinel, /// `lhs.*`. rhs is unused. - Deref, + deref, /// `lhs[rhs]`. - ArrayAccess, + array_access, /// `lhs{rhs}`. rhs can be omitted. - ArrayInitOne, + array_init_one, /// `lhs{rhs,}`. rhs can *not* be omitted - ArrayInitOneComma, + array_init_one_comma, /// `.{lhs, rhs}`. lhs and rhs can be omitted. - ArrayInitDotTwo, - /// Same as `ArrayInitDotTwo` except there is known to be a trailing comma + array_init_dot_two, + /// Same as `array_init_dot_two` except there is known to be a trailing comma /// before the final rbrace. - ArrayInitDotTwoComma, + array_init_dot_two_comma, /// `.{a, b}`. `sub_list[lhs..rhs]`. - ArrayInitDot, - /// Same as `ArrayInitDot` except there is known to be a trailing comma + array_init_dot, + /// Same as `array_init_dot` except there is known to be a trailing comma /// before the final rbrace. - ArrayInitDotComma, + array_init_dot_comma, /// `lhs{a, b}`. `sub_range_list[rhs]`. lhs can be omitted which means `.{a, b}`. - ArrayInit, - /// Same as `ArrayInit` except there is known to be a trailing comma + array_init, + /// Same as `array_init` except there is known to be a trailing comma /// before the final rbrace. - ArrayInitComma, + array_init_comma, /// `lhs{.a = rhs}`. rhs can be omitted making it empty. /// main_token is the lbrace. - StructInitOne, + struct_init_one, /// `lhs{.a = rhs,}`. rhs can *not* be omitted. /// main_token is the lbrace. - StructInitOneComma, + struct_init_one_comma, /// `.{.a = lhs, .b = rhs}`. lhs and rhs can be omitted. /// main_token is the lbrace. /// No trailing comma before the rbrace. - StructInitDotTwo, - /// Same as `StructInitDotTwo` except there is known to be a trailing comma + struct_init_dot_two, + /// Same as `struct_init_dot_two` except there is known to be a trailing comma /// before the final rbrace. - StructInitDotTwoComma, + struct_init_dot_two_comma, /// `.{.a = b, .c = d}`. `sub_list[lhs..rhs]`. /// main_token is the lbrace. - StructInitDot, - /// Same as `StructInitDot` except there is known to be a trailing comma + struct_init_dot, + /// Same as `struct_init_dot` except there is known to be a trailing comma /// before the final rbrace. - StructInitDotComma, + struct_init_dot_comma, /// `lhs{.a = b, .c = d}`. `sub_range_list[rhs]`. /// lhs can be omitted which means `.{.a = b, .c = d}`. /// main_token is the lbrace. - StructInit, - /// Same as `StructInit` except there is known to be a trailing comma + struct_init, + /// Same as `struct_init` except there is known to be a trailing comma /// before the final rbrace. - StructInitComma, + struct_init_comma, /// `lhs(rhs)`. rhs can be omitted. - CallOne, + call_one, /// `lhs(rhs,)`. rhs can be omitted. - CallOneComma, + call_one_comma, /// `async lhs(rhs)`. rhs can be omitted. - AsyncCallOne, + async_call_one, /// `async lhs(rhs,)`. - AsyncCallOneComma, + async_call_one_comma, /// `lhs(a, b, c)`. `SubRange[rhs]`. /// main_token is the `(`. - Call, + call, /// `lhs(a, b, c,)`. `SubRange[rhs]`. /// main_token is the `(`. - CallComma, + call_comma, /// `async lhs(a, b, c)`. `SubRange[rhs]`. /// main_token is the `(`. - AsyncCall, + async_call, /// `async lhs(a, b, c,)`. `SubRange[rhs]`. /// main_token is the `(`. - AsyncCallComma, + async_call_comma, /// `switch(lhs) {}`. `SubRange[rhs]`. - Switch, - /// Same as Switch except there is known to be a trailing comma + @"switch", + /// Same as switch except there is known to be a trailing comma /// before the final rbrace - SwitchComma, + switch_comma, /// `lhs => rhs`. If lhs is omitted it means `else`. /// main_token is the `=>` - SwitchCaseOne, + switch_case_one, /// `a, b, c => rhs`. `SubRange[lhs]`. /// main_token is the `=>` - SwitchCase, + switch_case, /// `lhs...rhs`. - SwitchRange, + switch_range, /// `while (lhs) rhs`. /// `while (lhs) |x| rhs`. - WhileSimple, + while_simple, /// `while (lhs) : (a) b`. `WhileCont[rhs]`. /// `while (lhs) : (a) b`. `WhileCont[rhs]`. - WhileCont, + while_cont, /// `while (lhs) : (a) b else c`. `While[rhs]`. /// `while (lhs) |x| : (a) b else c`. `While[rhs]`. /// `while (lhs) |x| : (a) b else |y| c`. `While[rhs]`. - While, + @"while", /// `for (lhs) rhs`. - ForSimple, + for_simple, /// `for (lhs) a else b`. `if_list[rhs]`. - For, + @"for", /// `if (lhs) rhs`. /// `if (lhs) |a| rhs`. - IfSimple, + if_simple, /// `if (lhs) a else b`. `If[rhs]`. /// `if (lhs) |x| a else b`. `If[rhs]`. /// `if (lhs) |x| a else |y| b`. `If[rhs]`. - If, + @"if", /// `suspend lhs`. lhs can be omitted. rhs is unused. - Suspend, + @"suspend", /// `resume lhs`. rhs is unused. - Resume, + @"resume", /// `continue`. lhs is token index of label if any. rhs is unused. - Continue, + @"continue", /// `break :lhs rhs` /// both lhs and rhs may be omitted. - Break, + @"break", /// `return lhs`. lhs can be omitted. rhs is unused. - Return, + @"return", /// `fn(a: lhs) rhs`. lhs can be omitted. /// anytype and ... parameters are omitted from the AST tree. - FnProtoSimple, + fn_proto_simple, /// `fn(a: b, c: d) rhs`. `sub_range_list[lhs]`. /// anytype and ... parameters are omitted from the AST tree. - FnProtoMulti, + fn_proto_multi, /// `fn(a: b) rhs linksection(e) callconv(f)`. `FnProtoOne[lhs]`. /// zero or one parameters. /// anytype and ... parameters are omitted from the AST tree. - FnProtoOne, + fn_proto_one, /// `fn(a: b, c: d) rhs linksection(e) callconv(f)`. `FnProto[lhs]`. /// anytype and ... parameters are omitted from the AST tree. - FnProto, - /// lhs is the FnProto. + fn_proto, + /// lhs is the fn_proto. /// rhs is the function body block if non-zero. - /// if rhs is zero, the funtion decl has no body (e.g. an extern function) - FnDecl, + /// if rhs is zero, the function decl has no body (e.g. an extern function) + fn_decl, /// `anyframe->rhs`. main_token is `anyframe`. `lhs` is arrow token index. - AnyFrameType, + anyframe_type, /// Both lhs and rhs unused. - AnyFrameLiteral, + anyframe_literal, /// Both lhs and rhs unused. - CharLiteral, + char_literal, /// Both lhs and rhs unused. - IntegerLiteral, + integer_literal, /// Both lhs and rhs unused. - FloatLiteral, + float_literal, /// Both lhs and rhs unused. - FalseLiteral, + false_literal, /// Both lhs and rhs unused. - TrueLiteral, + true_literal, /// Both lhs and rhs unused. - NullLiteral, + null_literal, /// Both lhs and rhs unused. - UndefinedLiteral, + undefined_literal, /// Both lhs and rhs unused. - UnreachableLiteral, + unreachable_literal, /// Both lhs and rhs unused. /// Most identifiers will not have explicit AST nodes, however for expressions /// which could be one of many different kinds of AST nodes, there will be an - /// Identifier AST node for it. - Identifier, + /// identifier AST node for it. + identifier, /// lhs is the dot token index, rhs unused, main_token is the identifier. - EnumLiteral, + enum_literal, /// main_token is the first token index (redundant with lhs) /// lhs is the first token index; rhs is the last token index. - /// Could be a series of MultilineStringLiteralLine tokens, or a single - /// StringLiteral token. - StringLiteral, + /// Could be a series of multiline_string_literal_line tokens, or a single + /// string_literal token. + string_literal, /// `(lhs)`. main_token is the `(`; rhs is the token index of the `)`. - GroupedExpression, + grouped_expression, /// `@a(lhs, rhs)`. lhs and rhs may be omitted. - BuiltinCallTwo, - /// Same as BuiltinCallTwo but there is known to be a trailing comma before the rparen. - BuiltinCallTwoComma, + builtin_call_two, + /// Same as builtin_call_two but there is known to be a trailing comma before the rparen. + builtin_call_two_comma, /// `@a(b, c)`. `sub_list[lhs..rhs]`. - BuiltinCall, - /// Same as BuiltinCall but there is known to be a trailing comma before the rparen. - BuiltinCallComma, + builtin_call, + /// Same as builtin_call but there is known to be a trailing comma before the rparen. + builtin_call_comma, /// `error{a, b}`. /// rhs is the rbrace, lhs is unused. - ErrorSetDecl, + error_set_decl, /// `struct {}`, `union {}`, `opaque {}`, `enum {}`. `extra_data[lhs..rhs]`. /// main_token is `struct`, `union`, `opaque`, `enum` keyword. - ContainerDecl, + container_decl, /// Same as ContainerDecl but there is known to be a trailing comma before the rbrace. - ContainerDeclComma, + container_decl_comma, /// `struct {lhs, rhs}`, `union {lhs, rhs}`, `opaque {lhs, rhs}`, `enum {lhs, rhs}`. /// lhs or rhs can be omitted. /// main_token is `struct`, `union`, `opaque`, `enum` keyword. - ContainerDeclTwo, + container_decl_two, /// Same as ContainerDeclTwo except there is known to be a trailing comma /// before the rbrace. - ContainerDeclTwoComma, + container_decl_two_comma, /// `union(lhs)` / `enum(lhs)`. `SubRange[rhs]`. - ContainerDeclArg, - /// Same as ContainerDeclArg but there is known to be a trailing comma before the rbrace. - ContainerDeclArgComma, + container_decl_arg, + /// Same as container_decl_arg but there is known to be a trailing comma before the rbrace. + container_decl_arg_comma, /// `union(enum) {}`. `sub_list[lhs..rhs]`. /// Note that tagged unions with explicitly provided enums are represented - /// by `ContainerDeclArg`. - TaggedUnion, - /// Same as TaggedUnion but there is known to be a trailing comma before the rbrace. - TaggedUnionComma, + /// by `container_decl_arg`. + tagged_union, + /// Same as tagged_union but there is known to be a trailing comma before the rbrace. + tagged_union_comma, /// `union(enum) {lhs, rhs}`. lhs or rhs may be omitted. /// Note that tagged unions with explicitly provided enums are represented - /// by `ContainerDeclArg`. - TaggedUnionTwo, - /// Same as TaggedUnionTwo but there is known to be a trailing comma before the rbrace. - TaggedUnionTwoComma, + /// by `container_decl_arg`. + tagged_union_two, + /// Same as tagged_union_two but there is known to be a trailing comma before the rbrace. + tagged_union_two_comma, /// `union(enum(lhs)) {}`. `SubRange[rhs]`. - TaggedUnionEnumTag, - /// Same as TaggedUnionEnumTag but there is known to be a trailing comma + tagged_union_enum_tag, + /// Same as tagged_union_enum_tag but there is known to be a trailing comma /// before the rbrace. - TaggedUnionEnumTagComma, + tagged_union_enum_tag_comma, /// `a: lhs = rhs,`. lhs and rhs can be omitted. /// main_token is the field name identifier. /// lastToken() does not include the possible trailing comma. - ContainerFieldInit, + container_field_init, /// `a: lhs align(rhs),`. rhs can be omitted. /// main_token is the field name identifier. /// lastToken() does not include the possible trailing comma. - ContainerFieldAlign, + container_field_align, /// `a: lhs align(c) = d,`. `container_field_list[rhs]`. /// main_token is the field name identifier. /// lastToken() does not include the possible trailing comma. - ContainerField, + container_field, /// `anytype`. both lhs and rhs unused. /// Used by `ContainerField`. - AnyType, + @"anytype", /// `comptime lhs`. rhs unused. - Comptime, + @"comptime", /// `nosuspend lhs`. rhs unused. - Nosuspend, + @"nosuspend", /// `{lhs rhs}`. rhs or lhs can be omitted. /// main_token points at the lbrace. - BlockTwo, - /// Same as BlockTwo but there is known to be a semicolon before the rbrace. - BlockTwoSemicolon, + block_two, + /// Same as block_two but there is known to be a semicolon before the rbrace. + block_two_semicolon, /// `{}`. `sub_list[lhs..rhs]`. /// main_token points at the lbrace. - Block, - /// Same as Block but there is known to be a semicolon before the rbrace. - BlockSemicolon, + block, + /// Same as block but there is known to be a semicolon before the rbrace. + block_semicolon, /// `asm(lhs)`. rhs is the token index of the rparen. - AsmSimple, + asm_simple, /// `asm(lhs, a)`. `Asm[rhs]`. - Asm, + @"asm", /// `[a] "b" (c)`. lhs is 0, rhs is token index of the rparen. /// `[a] "b" (-> lhs)`. rhs is token index of the rparen. /// main_token is `a`. - AsmOutput, + asm_output, /// `[a] "b" (lhs)`. rhs is token index of the rparen. /// main_token is `a`. - AsmInput, + asm_input, /// `error.a`. lhs is token index of `.`. rhs is token index of `a`. - ErrorValue, + error_value, /// `lhs!rhs`. main_token is the `!`. - ErrorUnion, + error_union, pub fn isContainerField(tag: Tag) bool { return switch (tag) { - .ContainerFieldInit, - .ContainerFieldAlign, - .ContainerField, + .container_field_init, + .container_field_align, + .container_field, => true, else => false, diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 855f889794..6eb617910c 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -57,7 +57,7 @@ pub fn parse(gpa: *Allocator, source: []const u8) Allocator.Error!Tree { // Root node must be index 0. // Root <- skip ContainerMembers eof parser.nodes.appendAssumeCapacity(.{ - .tag = .Root, + .tag = .root, .main_token = 0, .data = .{ .lhs = undefined, @@ -251,7 +251,7 @@ const Parser = struct { }; if (block != 0) { const comptime_node = try p.addNode(.{ - .tag = .Comptime, + .tag = .@"comptime", .main_token = comptime_token, .data = .{ .lhs = block, @@ -477,7 +477,7 @@ const Parser = struct { const block_node = try p.parseBlock(); if (block_node == 0) return p.fail(.{ .ExpectedLBrace = .{ .token = p.tok_i } }); return p.addNode(.{ - .tag = .TestDecl, + .tag = .test_decl, .main_token = test_token, .data = .{ .lhs = name_token orelse 0, @@ -517,7 +517,7 @@ const Parser = struct { const semicolon_token = p.nextToken(); try p.parseAppendedDocComment(semicolon_token); return p.addNode(.{ - .tag = .FnDecl, + .tag = .fn_decl, .main_token = p.nodes.items(.main_token)[fn_proto], .data = .{ .lhs = fn_proto, @@ -529,7 +529,7 @@ const Parser = struct { const body_block = try p.parseBlock(); assert(body_block != 0); return p.addNode(.{ - .tag = .FnDecl, + .tag = .fn_decl, .main_token = p.nodes.items(.main_token)[fn_proto], .data = .{ .lhs = fn_proto, @@ -587,7 +587,7 @@ const Parser = struct { const semicolon_token = try p.expectToken(.semicolon); try p.parseAppendedDocComment(semicolon_token); return p.addNode(.{ - .tag = .UsingNamespace, + .tag = .@"usingnamespace", .main_token = usingnamespace_token, .data = .{ .lhs = expr, @@ -627,7 +627,7 @@ const Parser = struct { if (align_expr == 0 and section_expr == 0 and callconv_expr == 0) { switch (params) { .zero_or_one => |param| return p.addNode(.{ - .tag = .FnProtoSimple, + .tag = .fn_proto_simple, .main_token = fn_token, .data = .{ .lhs = param, @@ -637,7 +637,7 @@ const Parser = struct { .multi => |list| { const span = try p.listToSpan(list); return p.addNode(.{ - .tag = .FnProtoMulti, + .tag = .fn_proto_multi, .main_token = fn_token, .data = .{ .lhs = try p.addExtra(Node.SubRange{ @@ -652,7 +652,7 @@ const Parser = struct { } switch (params) { .zero_or_one => |param| return p.addNode(.{ - .tag = .FnProtoOne, + .tag = .fn_proto_one, .main_token = fn_token, .data = .{ .lhs = try p.addExtra(Node.FnProtoOne{ @@ -667,7 +667,7 @@ const Parser = struct { .multi => |list| { const span = try p.listToSpan(list); return p.addNode(.{ - .tag = .FnProto, + .tag = .fn_proto, .main_token = fn_token, .data = .{ .lhs = try p.addExtra(Node.FnProto{ @@ -698,7 +698,7 @@ const Parser = struct { if (section_node == 0) { if (align_node == 0) { return p.addNode(.{ - .tag = .SimpleVarDecl, + .tag = .simple_var_decl, .main_token = mut_token, .data = .{ .lhs = type_node, @@ -707,7 +707,7 @@ const Parser = struct { }); } else if (type_node == 0) { return p.addNode(.{ - .tag = .AlignedVarDecl, + .tag = .aligned_var_decl, .main_token = mut_token, .data = .{ .lhs = align_node, @@ -716,7 +716,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .LocalVarDecl, + .tag = .local_var_decl, .main_token = mut_token, .data = .{ .lhs = try p.addExtra(Node.LocalVarDecl{ @@ -729,7 +729,7 @@ const Parser = struct { } } else { return p.addNode(.{ - .tag = .GlobalVarDecl, + .tag = .global_var_decl, .main_token = mut_token, .data = .{ .lhs = try p.addExtra(Node.GlobalVarDecl{ @@ -753,7 +753,7 @@ const Parser = struct { if (p.eatToken(.colon)) |_| { if (p.eatToken(.keyword_anytype)) |anytype_tok| { type_expr = try p.addNode(.{ - .tag = .AnyType, + .tag = .@"anytype", .main_token = anytype_tok, .data = .{ .lhs = undefined, @@ -770,7 +770,7 @@ const Parser = struct { if (align_expr == 0) { return p.addNode(.{ - .tag = .ContainerFieldInit, + .tag = .container_field_init, .main_token = name_token, .data = .{ .lhs = type_expr, @@ -779,7 +779,7 @@ const Parser = struct { }); } else if (value_expr == 0) { return p.addNode(.{ - .tag = .ContainerFieldAlign, + .tag = .container_field_align, .main_token = name_token, .data = .{ .lhs = type_expr, @@ -788,7 +788,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .ContainerField, + .tag = .container_field, .main_token = name_token, .data = .{ .lhs = type_expr, @@ -833,7 +833,7 @@ const Parser = struct { if (comptime_token) |token| { return p.addNode(.{ - .tag = .Comptime, + .tag = .@"comptime", .main_token = token, .data = .{ .lhs = try p.expectBlockExprStatement(), @@ -845,7 +845,7 @@ const Parser = struct { switch (p.token_tags[p.tok_i]) { .keyword_nosuspend => { return p.addNode(.{ - .tag = .Nosuspend, + .tag = .@"nosuspend", .main_token = p.nextToken(), .data = .{ .lhs = try p.expectBlockExprStatement(), @@ -860,7 +860,7 @@ const Parser = struct { else try p.expectBlockExprStatement(); return p.addNode(.{ - .tag = .Suspend, + .tag = .@"suspend", .main_token = token, .data = .{ .lhs = block_expr, @@ -869,7 +869,7 @@ const Parser = struct { }); }, .keyword_defer => return p.addNode(.{ - .tag = .Defer, + .tag = .@"defer", .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -877,7 +877,7 @@ const Parser = struct { }, }), .keyword_errdefer => return p.addNode(.{ - .tag = .ErrDefer, + .tag = .@"errdefer", .main_token = p.nextToken(), .data = .{ .lhs = try p.parsePayload(), @@ -947,7 +947,7 @@ const Parser = struct { } if (p.eatToken(.semicolon)) |_| { return p.addNode(.{ - .tag = .IfSimple, + .tag = .if_simple, .main_token = if_token, .data = .{ .lhs = condition, @@ -963,7 +963,7 @@ const Parser = struct { return p.fail(.{ .ExpectedSemiOrElse = .{ .token = p.tok_i } }); } return p.addNode(.{ - .tag = .IfSimple, + .tag = .if_simple, .main_token = if_token, .data = .{ .lhs = condition, @@ -974,7 +974,7 @@ const Parser = struct { const else_payload = try p.parsePayload(); const else_expr = try p.expectStatement(); return p.addNode(.{ - .tag = .If, + .tag = .@"if", .main_token = if_token, .data = .{ .lhs = condition, @@ -1041,7 +1041,7 @@ const Parser = struct { } if (p.eatToken(.semicolon)) |_| { return p.addNode(.{ - .tag = .ForSimple, + .tag = .for_simple, .main_token = for_token, .data = .{ .lhs = array_expr, @@ -1057,7 +1057,7 @@ const Parser = struct { return p.fail(.{ .ExpectedSemiOrElse = .{ .token = p.tok_i } }); } return p.addNode(.{ - .tag = .ForSimple, + .tag = .for_simple, .main_token = for_token, .data = .{ .lhs = array_expr, @@ -1066,7 +1066,7 @@ const Parser = struct { }); }; return p.addNode(.{ - .tag = .For, + .tag = .@"for", .main_token = for_token, .data = .{ .lhs = array_expr, @@ -1103,7 +1103,7 @@ const Parser = struct { if (p.eatToken(.semicolon)) |_| { if (cont_expr == 0) { return p.addNode(.{ - .tag = .WhileSimple, + .tag = .while_simple, .main_token = while_token, .data = .{ .lhs = condition, @@ -1112,7 +1112,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .WhileCont, + .tag = .while_cont, .main_token = while_token, .data = .{ .lhs = condition, @@ -1133,7 +1133,7 @@ const Parser = struct { } if (cont_expr == 0) { return p.addNode(.{ - .tag = .WhileSimple, + .tag = .while_simple, .main_token = while_token, .data = .{ .lhs = condition, @@ -1142,7 +1142,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .WhileCont, + .tag = .while_cont, .main_token = while_token, .data = .{ .lhs = condition, @@ -1157,7 +1157,7 @@ const Parser = struct { const else_payload = try p.parsePayload(); const else_expr = try p.expectStatement(); return p.addNode(.{ - .tag = .While, + .tag = .@"while", .main_token = while_token, .data = .{ .lhs = condition, @@ -1233,20 +1233,20 @@ const Parser = struct { if (expr == 0) return null_node; const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { - .asterisk_equal => .AssignMul, - .slash_equal => .AssignDiv, - .percent_equal => .AssignMod, - .plus_equal => .AssignAdd, - .minus_equal => .AssignSub, - .angle_bracket_angle_bracket_left_equal => .AssignBitShiftLeft, - .angle_bracket_angle_bracket_right_equal => .AssignBitShiftRight, - .ampersand_equal => .AssignBitAnd, - .caret_equal => .AssignBitXor, - .pipe_equal => .AssignBitOr, - .asterisk_percent_equal => .AssignMulWrap, - .plus_percent_equal => .AssignAddWrap, - .minus_percent_equal => .AssignSubWrap, - .equal => .Assign, + .asterisk_equal => .assign_mul, + .slash_equal => .assign_div, + .percent_equal => .assign_mod, + .plus_equal => .assign_add, + .minus_equal => .assign_sub, + .angle_bracket_angle_bracket_left_equal => .assign_bit_shift_left, + .angle_bracket_angle_bracket_right_equal => .assign_bit_shift_right, + .ampersand_equal => .assign_bit_and, + .caret_equal => .assign_bit_xor, + .pipe_equal => .assign_bit_or, + .asterisk_percent_equal => .assign_mul_wrap, + .plus_percent_equal => .assign_add_wrap, + .minus_percent_equal => .assign_sub_wrap, + .equal => .assign, else => return expr, }; return p.addNode(.{ @@ -1295,7 +1295,7 @@ const Parser = struct { return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); } res = try p.addNode(.{ - .tag = .BoolOr, + .tag = .bool_or, .main_token = or_token, .data = .{ .lhs = res, @@ -1322,7 +1322,7 @@ const Parser = struct { return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); } res = try p.addNode(.{ - .tag = .BoolAnd, + .tag = .bool_and, .main_token = and_token, .data = .{ .lhs = res, @@ -1348,12 +1348,12 @@ const Parser = struct { if (expr == 0) return null_node; const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { - .equal_equal => .EqualEqual, - .bang_equal => .BangEqual, - .angle_bracket_left => .LessThan, - .angle_bracket_right => .GreaterThan, - .angle_bracket_left_equal => .LessOrEqual, - .angle_bracket_right_equal => .GreaterOrEqual, + .equal_equal => .equal_equal, + .bang_equal => .bang_equal, + .angle_bracket_left => .less_than, + .angle_bracket_right => .greater_than, + .angle_bracket_left_equal => .less_or_equal, + .angle_bracket_right_equal => .greater_or_equal, else => return expr, }; return p.addNode(.{ @@ -1379,10 +1379,10 @@ const Parser = struct { while (true) { const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { - .ampersand => .BitAnd, - .caret => .BitXor, - .pipe => .BitOr, - .keyword_orelse => .OrElse, + .ampersand => .bit_and, + .caret => .bit_xor, + .pipe => .bit_or, + .keyword_orelse => .@"orelse", .keyword_catch => { const catch_token = p.nextToken(); _ = try p.parsePayload(); @@ -1391,7 +1391,7 @@ const Parser = struct { return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); } res = try p.addNode(.{ - .tag = .Catch, + .tag = .@"catch", .main_token = catch_token, .data = .{ .lhs = res, @@ -1432,8 +1432,8 @@ const Parser = struct { while (true) { const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { - .angle_bracket_angle_bracket_left => .BitShiftLeft, - .angle_bracket_angle_bracket_right => .BitShiftRight, + .angle_bracket_angle_bracket_left => .bit_shift_left, + .angle_bracket_angle_bracket_right => .bit_shift_right, else => return res, }; res = try p.addNode(.{ @@ -1469,11 +1469,11 @@ const Parser = struct { while (true) { const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { - .plus => .Add, - .minus => .Sub, - .plus_plus => .ArrayCat, - .plus_percent => .AddWrap, - .minus_percent => .SubWrap, + .plus => .add, + .minus => .sub, + .plus_plus => .array_cat, + .plus_percent => .add_wrap, + .minus_percent => .sub_wrap, else => return res, }; res = try p.addNode(.{ @@ -1509,12 +1509,12 @@ const Parser = struct { while (true) { const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { - .pipe_pipe => .MergeErrorSets, - .asterisk => .Mul, - .slash => .Div, - .percent => .Mod, - .asterisk_asterisk => .ArrayMult, - .asterisk_percent => .MulWrap, + .pipe_pipe => .merge_error_sets, + .asterisk => .mul, + .slash => .div, + .percent => .mod, + .asterisk_asterisk => .array_mult, + .asterisk_percent => .mul_wrap, else => return res, }; res = try p.addNode(.{ @@ -1547,13 +1547,13 @@ const Parser = struct { /// / KEYWORD_await fn parsePrefixExpr(p: *Parser) Error!Node.Index { const tag: Node.Tag = switch (p.token_tags[p.tok_i]) { - .bang => .BoolNot, - .minus => .Negation, - .tilde => .BitNot, - .minus_percent => .NegationWrap, - .ampersand => .AddressOf, - .keyword_try => .Try, - .keyword_await => .Await, + .bang => .bool_not, + .minus => .negation, + .tilde => .bit_not, + .minus_percent => .negation_wrap, + .ampersand => .address_of, + .keyword_try => .@"try", + .keyword_await => .@"await", else => return p.parsePrimaryExpr(), }; return p.addNode(.{ @@ -1588,7 +1588,7 @@ const Parser = struct { fn parseTypeExpr(p: *Parser) Error!Node.Index { switch (p.token_tags[p.tok_i]) { .question_mark => return p.addNode(.{ - .tag = .OptionalType, + .tag = .optional_type, .main_token = p.nextToken(), .data = .{ .lhs = try p.expectTypeExpr(), @@ -1597,7 +1597,7 @@ const Parser = struct { }), .keyword_anyframe => switch (p.token_tags[p.tok_i + 1]) { .arrow => return p.addNode(.{ - .tag = .AnyFrameType, + .tag = .anyframe_type, .main_token = p.nextToken(), .data = .{ .lhs = p.nextToken(), @@ -1612,7 +1612,7 @@ const Parser = struct { const elem_type = try p.expectTypeExpr(); if (mods.bit_range_start == 0) { return p.addNode(.{ - .tag = .PtrTypeAligned, + .tag = .ptr_type_aligned, .main_token = asterisk, .data = .{ .lhs = mods.align_node, @@ -1621,7 +1621,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .PtrTypeBitRange, + .tag = .ptr_type_bit_range, .main_token = asterisk, .data = .{ .lhs = try p.addExtra(Node.PtrTypeBitRange{ @@ -1642,7 +1642,7 @@ const Parser = struct { const inner: Node.Index = inner: { if (mods.bit_range_start == 0) { break :inner try p.addNode(.{ - .tag = .PtrTypeAligned, + .tag = .ptr_type_aligned, .main_token = asterisk, .data = .{ .lhs = mods.align_node, @@ -1651,7 +1651,7 @@ const Parser = struct { }); } else { break :inner try p.addNode(.{ - .tag = .PtrTypeBitRange, + .tag = .ptr_type_bit_range, .main_token = asterisk, .data = .{ .lhs = try p.addExtra(Node.PtrTypeBitRange{ @@ -1666,7 +1666,7 @@ const Parser = struct { } }; return p.addNode(.{ - .tag = .PtrTypeAligned, + .tag = .ptr_type_aligned, .main_token = asterisk, .data = .{ .lhs = 0, @@ -1698,7 +1698,7 @@ const Parser = struct { if (mods.bit_range_start == 0) { if (sentinel == 0) { return p.addNode(.{ - .tag = .PtrTypeAligned, + .tag = .ptr_type_aligned, .main_token = asterisk, .data = .{ .lhs = mods.align_node, @@ -1707,7 +1707,7 @@ const Parser = struct { }); } else if (mods.align_node == 0) { return p.addNode(.{ - .tag = .PtrTypeSentinel, + .tag = .ptr_type_sentinel, .main_token = asterisk, .data = .{ .lhs = sentinel, @@ -1716,7 +1716,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .PtrType, + .tag = .ptr_type, .main_token = asterisk, .data = .{ .lhs = try p.addExtra(Node.PtrType{ @@ -1729,7 +1729,7 @@ const Parser = struct { } } else { return p.addNode(.{ - .tag = .PtrTypeBitRange, + .tag = .ptr_type_bit_range, .main_token = asterisk, .data = .{ .lhs = try p.addExtra(Node.PtrTypeBitRange{ @@ -1762,7 +1762,7 @@ const Parser = struct { if (len_expr == 0) { if (sentinel == 0) { return p.addNode(.{ - .tag = .PtrTypeAligned, + .tag = .ptr_type_aligned, .main_token = lbracket, .data = .{ .lhs = mods.align_node, @@ -1771,7 +1771,7 @@ const Parser = struct { }); } else if (mods.align_node == 0) { return p.addNode(.{ - .tag = .PtrTypeSentinel, + .tag = .ptr_type_sentinel, .main_token = lbracket, .data = .{ .lhs = sentinel, @@ -1780,7 +1780,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .PtrType, + .tag = .ptr_type, .main_token = lbracket, .data = .{ .lhs = try p.addExtra(Node.PtrType{ @@ -1800,7 +1800,7 @@ const Parser = struct { } if (sentinel == 0) { return p.addNode(.{ - .tag = .ArrayType, + .tag = .array_type, .main_token = lbracket, .data = .{ .lhs = len_expr, @@ -1809,7 +1809,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .ArrayTypeSentinel, + .tag = .array_type_sentinel, .main_token = lbracket, .data = .{ .lhs = len_expr, @@ -1854,7 +1854,7 @@ const Parser = struct { .keyword_break => { p.tok_i += 1; return p.addNode(.{ - .tag = .Break, + .tag = .@"break", .main_token = p.tok_i - 1, .data = .{ .lhs = try p.parseBreakLabel(), @@ -1865,7 +1865,7 @@ const Parser = struct { .keyword_continue => { p.tok_i += 1; return p.addNode(.{ - .tag = .Continue, + .tag = .@"continue", .main_token = p.tok_i - 1, .data = .{ .lhs = try p.parseBreakLabel(), @@ -1876,7 +1876,7 @@ const Parser = struct { .keyword_comptime => { p.tok_i += 1; return p.addNode(.{ - .tag = .Comptime, + .tag = .@"comptime", .main_token = p.tok_i - 1, .data = .{ .lhs = try p.expectExpr(), @@ -1887,7 +1887,7 @@ const Parser = struct { .keyword_nosuspend => { p.tok_i += 1; return p.addNode(.{ - .tag = .Nosuspend, + .tag = .@"nosuspend", .main_token = p.tok_i - 1, .data = .{ .lhs = try p.expectExpr(), @@ -1898,7 +1898,7 @@ const Parser = struct { .keyword_resume => { p.tok_i += 1; return p.addNode(.{ - .tag = .Resume, + .tag = .@"resume", .main_token = p.tok_i - 1, .data = .{ .lhs = try p.expectExpr(), @@ -1909,7 +1909,7 @@ const Parser = struct { .keyword_return => { p.tok_i += 1; return p.addNode(.{ - .tag = .Return, + .tag = .@"return", .main_token = p.tok_i - 1, .data = .{ .lhs = try p.parseExpr(), @@ -1976,7 +1976,7 @@ const Parser = struct { if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ - .tag = .BlockTwo, + .tag = .block_two, .main_token = lbrace, .data = .{ .lhs = 0, @@ -1989,7 +1989,7 @@ const Parser = struct { if (p.eatToken(.r_brace)) |_| { const semicolon = p.token_tags[p.tok_i - 2] == .semicolon; return p.addNode(.{ - .tag = if (semicolon) .BlockTwoSemicolon else .BlockTwo, + .tag = if (semicolon) .block_two_semicolon else .block_two, .main_token = lbrace, .data = .{ .lhs = stmt_one, @@ -2001,7 +2001,7 @@ const Parser = struct { if (p.eatToken(.r_brace)) |_| { const semicolon = p.token_tags[p.tok_i - 2] == .semicolon; return p.addNode(.{ - .tag = if (semicolon) .BlockTwoSemicolon else .BlockTwo, + .tag = if (semicolon) .block_two_semicolon else .block_two, .main_token = lbrace, .data = .{ .lhs = stmt_one, @@ -2025,7 +2025,7 @@ const Parser = struct { const semicolon = p.token_tags[p.tok_i - 2] == .semicolon; const statements_span = try p.listToSpan(statements.items); return p.addNode(.{ - .tag = if (semicolon) .BlockSemicolon else .Block, + .tag = if (semicolon) .block_semicolon else .block, .main_token = lbrace, .data = .{ .lhs = statements_span.start, @@ -2046,7 +2046,7 @@ const Parser = struct { const then_expr = try p.expectExpr(); const else_token = p.eatToken(.keyword_else) orelse { return p.addNode(.{ - .tag = .ForSimple, + .tag = .for_simple, .main_token = for_token, .data = .{ .lhs = array_expr, @@ -2056,7 +2056,7 @@ const Parser = struct { }; const else_expr = try p.expectExpr(); return p.addNode(.{ - .tag = .For, + .tag = .@"for", .main_token = for_token, .data = .{ .lhs = array_expr, @@ -2082,7 +2082,7 @@ const Parser = struct { const else_token = p.eatToken(.keyword_else) orelse { if (cont_expr == 0) { return p.addNode(.{ - .tag = .WhileSimple, + .tag = .while_simple, .main_token = while_token, .data = .{ .lhs = condition, @@ -2091,7 +2091,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .WhileCont, + .tag = .while_cont, .main_token = while_token, .data = .{ .lhs = condition, @@ -2106,7 +2106,7 @@ const Parser = struct { const else_payload = try p.parsePayload(); const else_expr = try p.expectExpr(); return p.addNode(.{ - .tag = .While, + .tag = .@"while", .main_token = while_token, .data = .{ .lhs = condition, @@ -2134,7 +2134,7 @@ const Parser = struct { if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ - .tag = .StructInitOne, + .tag = .struct_init_one, .main_token = lbrace, .data = .{ .lhs = lhs, @@ -2147,7 +2147,7 @@ const Parser = struct { const comma_one = p.eatToken(.comma); if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ - .tag = if (comma_one != null) .StructInitOneComma else .StructInitOne, + .tag = if (comma_one != null) .struct_init_one_comma else .struct_init_one, .main_token = lbrace, .data = .{ .lhs = lhs, @@ -2192,7 +2192,7 @@ const Parser = struct { } const span = try p.listToSpan(init_list.items); return p.addNode(.{ - .tag = if (p.token_tags[p.tok_i - 2] == .comma) .StructInitComma else .StructInit, + .tag = if (p.token_tags[p.tok_i - 2] == .comma) .struct_init_comma else .struct_init, .main_token = lbrace, .data = .{ .lhs = lhs, @@ -2208,7 +2208,7 @@ const Parser = struct { const comma_one = p.eatToken(.comma); if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ - .tag = if (comma_one != null) .ArrayInitOneComma else .ArrayInitOne, + .tag = if (comma_one != null) .array_init_one_comma else .array_init_one, .main_token = lbrace, .data = .{ .lhs = lhs, @@ -2239,7 +2239,7 @@ const Parser = struct { _ = try p.expectToken(.r_brace); const span = try p.listToSpan(init_list.items); return p.addNode(.{ - .tag = if (trailing_comma) .ArrayInitComma else .ArrayInit, + .tag = if (trailing_comma) .array_init_comma else .array_init, .main_token = lbrace, .data = .{ .lhs = lhs, @@ -2257,7 +2257,7 @@ const Parser = struct { if (suffix_expr == 0) return null_node; const bang = p.eatToken(.bang) orelse return suffix_expr; return p.addNode(.{ - .tag = .ErrorUnion, + .tag = .error_union, .main_token = bang, .data = .{ .lhs = suffix_expr, @@ -2286,7 +2286,7 @@ const Parser = struct { }; if (p.eatToken(.r_paren)) |_| { return p.addNode(.{ - .tag = .AsyncCallOne, + .tag = .async_call_one, .main_token = lparen, .data = .{ .lhs = res, @@ -2298,7 +2298,7 @@ const Parser = struct { const comma_one = p.eatToken(.comma); if (p.eatToken(.r_paren)) |_| { return p.addNode(.{ - .tag = if (comma_one == null) .AsyncCallOne else .AsyncCallOneComma, + .tag = if (comma_one == null) .async_call_one else .async_call_one_comma, .main_token = lparen, .data = .{ .lhs = res, @@ -2325,7 +2325,7 @@ const Parser = struct { if (p.eatToken(.r_paren)) |_| { const span = try p.listToSpan(param_list.items); return p.addNode(.{ - .tag = .AsyncCallComma, + .tag = .async_call_comma, .main_token = lparen, .data = .{ .lhs = res, @@ -2342,7 +2342,7 @@ const Parser = struct { .r_paren => { const span = try p.listToSpan(param_list.items); return p.addNode(.{ - .tag = .AsyncCall, + .tag = .async_call, .main_token = lparen, .data = .{ .lhs = res, @@ -2387,7 +2387,7 @@ const Parser = struct { const lparen = p.eatToken(.l_paren) orelse return res; if (p.eatToken(.r_paren)) |_| { break :res try p.addNode(.{ - .tag = .CallOne, + .tag = .call_one, .main_token = lparen, .data = .{ .lhs = res, @@ -2399,7 +2399,7 @@ const Parser = struct { const comma_one = p.eatToken(.comma); if (p.eatToken(.r_paren)) |_| { break :res try p.addNode(.{ - .tag = if (comma_one == null) .CallOne else .CallOneComma, + .tag = if (comma_one == null) .call_one else .call_one_comma, .main_token = lparen, .data = .{ .lhs = res, @@ -2426,7 +2426,7 @@ const Parser = struct { if (p.eatToken(.r_paren)) |_| { const span = try p.listToSpan(param_list.items); break :res try p.addNode(.{ - .tag = .CallComma, + .tag = .call_comma, .main_token = lparen, .data = .{ .lhs = res, @@ -2443,7 +2443,7 @@ const Parser = struct { .r_paren => { const span = try p.listToSpan(param_list.items); break :res try p.addNode(.{ - .tag = .Call, + .tag = .call, .main_token = lparen, .data = .{ .lhs = res, @@ -2518,7 +2518,7 @@ const Parser = struct { fn parsePrimaryTypeExpr(p: *Parser) !Node.Index { switch (p.token_tags[p.tok_i]) { .char_literal => return p.addNode(.{ - .tag = .CharLiteral, + .tag = .char_literal, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2526,7 +2526,7 @@ const Parser = struct { }, }), .integer_literal => return p.addNode(.{ - .tag = .IntegerLiteral, + .tag = .integer_literal, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2534,7 +2534,7 @@ const Parser = struct { }, }), .float_literal => return p.addNode(.{ - .tag = .FloatLiteral, + .tag = .float_literal, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2542,7 +2542,7 @@ const Parser = struct { }, }), .keyword_false => return p.addNode(.{ - .tag = .FalseLiteral, + .tag = .false_literal, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2550,7 +2550,7 @@ const Parser = struct { }, }), .keyword_true => return p.addNode(.{ - .tag = .TrueLiteral, + .tag = .true_literal, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2558,7 +2558,7 @@ const Parser = struct { }, }), .keyword_null => return p.addNode(.{ - .tag = .NullLiteral, + .tag = .null_literal, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2566,7 +2566,7 @@ const Parser = struct { }, }), .keyword_undefined => return p.addNode(.{ - .tag = .UndefinedLiteral, + .tag = .undefined_literal, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2574,7 +2574,7 @@ const Parser = struct { }, }), .keyword_unreachable => return p.addNode(.{ - .tag = .UnreachableLiteral, + .tag = .unreachable_literal, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2582,7 +2582,7 @@ const Parser = struct { }, }), .keyword_anyframe => return p.addNode(.{ - .tag = .AnyFrameLiteral, + .tag = .anyframe_literal, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2592,7 +2592,7 @@ const Parser = struct { .string_literal => { const main_token = p.nextToken(); return p.addNode(.{ - .tag = .StringLiteral, + .tag = .string_literal, .main_token = main_token, .data = .{ .lhs = main_token, @@ -2620,7 +2620,7 @@ const Parser = struct { => return p.parseContainerDeclAuto(), .keyword_comptime => return p.addNode(.{ - .tag = .Comptime, + .tag = .@"comptime", .main_token = p.nextToken(), .data = .{ .lhs = try p.expectTypeExpr(), @@ -2633,7 +2633,7 @@ const Parser = struct { p.tok_i += 1; } return p.addNode(.{ - .tag = .StringLiteral, + .tag = .string_literal, .main_token = first_line, .data = .{ .lhs = first_line, @@ -2662,7 +2662,7 @@ const Parser = struct { return p.parseWhileTypeExpr(); }, else => return p.addNode(.{ - .tag = .Identifier, + .tag = .identifier, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2671,7 +2671,7 @@ const Parser = struct { }), }, else => return p.addNode(.{ - .tag = .Identifier, + .tag = .identifier, .main_token = p.nextToken(), .data = .{ .lhs = undefined, @@ -2681,7 +2681,7 @@ const Parser = struct { }, .period => switch (p.token_tags[p.tok_i + 1]) { .identifier => return p.addNode(.{ - .tag = .EnumLiteral, + .tag = .enum_literal, .data = .{ .lhs = p.nextToken(), // dot .rhs = undefined, @@ -2697,7 +2697,7 @@ const Parser = struct { if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ - .tag = .StructInitDotTwo, + .tag = .struct_init_dot_two, .main_token = lbrace, .data = .{ .lhs = 0, @@ -2710,7 +2710,7 @@ const Parser = struct { const comma_one = p.eatToken(.comma); if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ - .tag = if (comma_one != null) .StructInitDotTwoComma else .StructInitDotTwo, + .tag = if (comma_one != null) .struct_init_dot_two_comma else .struct_init_dot_two, .main_token = lbrace, .data = .{ .lhs = field_init_one, @@ -2727,7 +2727,7 @@ const Parser = struct { const comma_two = p.eatToken(.comma); if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ - .tag = if (comma_two != null) .StructInitDotTwoComma else .StructInitDotTwo, + .tag = if (comma_two != null) .struct_init_dot_two_comma else .struct_init_dot_two, .main_token = lbrace, .data = .{ .lhs = field_init_one, @@ -2778,7 +2778,7 @@ const Parser = struct { const span = try p.listToSpan(init_list.items); const trailing_comma = p.token_tags[p.tok_i - 2] == .comma; return p.addNode(.{ - .tag = if (trailing_comma) .StructInitDotComma else .StructInitDot, + .tag = if (trailing_comma) .struct_init_dot_comma else .struct_init_dot, .main_token = lbrace, .data = .{ .lhs = span.start, @@ -2791,7 +2791,7 @@ const Parser = struct { const comma_one = p.eatToken(.comma); if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ - .tag = if (comma_one != null) .ArrayInitDotTwoComma else .ArrayInitDotTwo, + .tag = if (comma_one != null) .array_init_dot_two_comma else .array_init_dot_two, .main_token = lbrace, .data = .{ .lhs = elem_init_one, @@ -2808,7 +2808,7 @@ const Parser = struct { const comma_two = p.eatToken(.comma); if (p.eatToken(.r_brace)) |_| { return p.addNode(.{ - .tag = if (comma_two != null) .ArrayInitDotTwoComma else .ArrayInitDotTwo, + .tag = if (comma_two != null) .array_init_dot_two_comma else .array_init_dot_two, .main_token = lbrace, .data = .{ .lhs = elem_init_one, @@ -2858,7 +2858,7 @@ const Parser = struct { } const span = try p.listToSpan(init_list.items); return p.addNode(.{ - .tag = if (p.token_tags[p.tok_i - 2] == .comma) .ArrayInitDotComma else .ArrayInitDot, + .tag = if (p.token_tags[p.tok_i - 2] == .comma) .array_init_dot_comma else .array_init_dot, .main_token = lbrace, .data = .{ .lhs = span.start, @@ -2875,7 +2875,7 @@ const Parser = struct { if (p.eatToken(.r_brace)) |rbrace| { return p.addNode(.{ - .tag = .ErrorSetDecl, + .tag = .error_set_decl, .main_token = error_token, .data = .{ .lhs = undefined, @@ -2913,7 +2913,7 @@ const Parser = struct { } } return p.addNode(.{ - .tag = .ErrorSetDecl, + .tag = .error_set_decl, .main_token = error_token, .data = .{ .lhs = undefined, @@ -2922,7 +2922,7 @@ const Parser = struct { }); }, else => return p.addNode(.{ - .tag = .ErrorValue, + .tag = .error_value, .main_token = p.nextToken(), .data = .{ .lhs = try p.expectToken(.period), @@ -2931,7 +2931,7 @@ const Parser = struct { }), }, .l_paren => return p.addNode(.{ - .tag = .GroupedExpression, + .tag = .grouped_expression, .main_token = p.nextToken(), .data = .{ .lhs = try p.expectExpr(), @@ -2962,7 +2962,7 @@ const Parser = struct { const then_expr = try p.expectExpr(); const else_token = p.eatToken(.keyword_else) orelse { return p.addNode(.{ - .tag = .ForSimple, + .tag = .for_simple, .main_token = for_token, .data = .{ .lhs = array_expr, @@ -2972,7 +2972,7 @@ const Parser = struct { }; const else_expr = try p.expectTypeExpr(); return p.addNode(.{ - .tag = .For, + .tag = .@"for", .main_token = for_token, .data = .{ .lhs = array_expr, @@ -2998,7 +2998,7 @@ const Parser = struct { const else_token = p.eatToken(.keyword_else) orelse { if (cont_expr == 0) { return p.addNode(.{ - .tag = .WhileSimple, + .tag = .while_simple, .main_token = while_token, .data = .{ .lhs = condition, @@ -3007,7 +3007,7 @@ const Parser = struct { }); } else { return p.addNode(.{ - .tag = .WhileCont, + .tag = .while_cont, .main_token = while_token, .data = .{ .lhs = condition, @@ -3022,7 +3022,7 @@ const Parser = struct { const else_payload = try p.parsePayload(); const else_expr = try p.expectTypeExpr(); return p.addNode(.{ - .tag = .While, + .tag = .@"while", .main_token = while_token, .data = .{ .lhs = condition, @@ -3047,7 +3047,7 @@ const Parser = struct { _ = try p.expectToken(.r_brace); return p.addNode(.{ - .tag = if (trailing_comma) .SwitchComma else .Switch, + .tag = if (trailing_comma) .switch_comma else .@"switch", .main_token = switch_token, .data = .{ .lhs = expr_node, @@ -3074,7 +3074,7 @@ const Parser = struct { if (p.eatToken(.r_paren)) |rparen| { return p.addNode(.{ - .tag = .AsmSimple, + .tag = .asm_simple, .main_token = asm_token, .data = .{ .lhs = template, @@ -3140,7 +3140,7 @@ const Parser = struct { const rparen = try p.expectToken(.r_paren); const span = try p.listToSpan(list.items); return p.addNode(.{ - .tag = .Asm, + .tag = .@"asm", .main_token = asm_token, .data = .{ .lhs = template, @@ -3170,7 +3170,7 @@ const Parser = struct { }; const rparen = try p.expectToken(.r_paren); return p.addNode(.{ - .tag = .AsmOutput, + .tag = .asm_output, .main_token = identifier, .data = .{ .lhs = type_expr, @@ -3189,7 +3189,7 @@ const Parser = struct { const expr = try p.expectExpr(); const rparen = try p.expectToken(.r_paren); return p.addNode(.{ - .tag = .AsmInput, + .tag = .asm_input, .main_token = identifier, .data = .{ .lhs = expr, @@ -3336,7 +3336,7 @@ const Parser = struct { const arrow_token = try p.expectToken(.equal_angle_bracket_right); _ = try p.parsePtrPayload(); return p.addNode(.{ - .tag = .SwitchCaseOne, + .tag = .switch_case_one, .main_token = arrow_token, .data = .{ .lhs = 0, @@ -3350,7 +3350,7 @@ const Parser = struct { if (p.eatToken(.equal_angle_bracket_right)) |arrow_token| { _ = try p.parsePtrPayload(); return p.addNode(.{ - .tag = .SwitchCaseOne, + .tag = .switch_case_one, .main_token = arrow_token, .data = .{ .lhs = first_item, @@ -3372,7 +3372,7 @@ const Parser = struct { const arrow_token = try p.expectToken(.equal_angle_bracket_right); _ = try p.parsePtrPayload(); return p.addNode(.{ - .tag = .SwitchCase, + .tag = .switch_case, .main_token = arrow_token, .data = .{ .lhs = try p.addExtra(Node.SubRange{ @@ -3391,7 +3391,7 @@ const Parser = struct { if (p.eatToken(.ellipsis3)) |token| { return p.addNode(.{ - .tag = .SwitchRange, + .tag = .switch_range, .main_token = token, .data = .{ .lhs = expr, @@ -3485,7 +3485,7 @@ const Parser = struct { if (end_expr == 0) { _ = try p.expectToken(.r_bracket); return p.addNode(.{ - .tag = .SliceOpen, + .tag = .slice_open, .main_token = lbracket, .data = .{ .lhs = lhs, @@ -3497,7 +3497,7 @@ const Parser = struct { const sentinel = try p.parseExpr(); _ = try p.expectToken(.r_bracket); return p.addNode(.{ - .tag = .SliceSentinel, + .tag = .slice_sentinel, .main_token = lbracket, .data = .{ .lhs = lhs, @@ -3511,7 +3511,7 @@ const Parser = struct { } else { _ = try p.expectToken(.r_bracket); return p.addNode(.{ - .tag = .Slice, + .tag = .slice, .main_token = lbracket, .data = .{ .lhs = lhs, @@ -3525,7 +3525,7 @@ const Parser = struct { } _ = try p.expectToken(.r_bracket); return p.addNode(.{ - .tag = .ArrayAccess, + .tag = .array_access, .main_token = lbracket, .data = .{ .lhs = lhs, @@ -3534,7 +3534,7 @@ const Parser = struct { }); }, .period_asterisk => return p.addNode(.{ - .tag = .Deref, + .tag = .deref, .main_token = p.nextToken(), .data = .{ .lhs = lhs, @@ -3545,7 +3545,7 @@ const Parser = struct { const period_asterisk = p.nextToken(); try p.warn(.{ .AsteriskAfterPointerDereference = .{ .token = period_asterisk } }); return p.addNode(.{ - .tag = .Deref, + .tag = .deref, .main_token = period_asterisk, .data = .{ .lhs = lhs, @@ -3555,7 +3555,7 @@ const Parser = struct { }, .period => switch (p.token_tags[p.tok_i + 1]) { .identifier => return p.addNode(.{ - .tag = .FieldAccess, + .tag = .field_access, .main_token = p.nextToken(), .data = .{ .lhs = lhs, @@ -3563,7 +3563,7 @@ const Parser = struct { }, }), .question_mark => return p.addNode(.{ - .tag = .UnwrapOptional, + .tag = .unwrap_optional, .main_token = p.nextToken(), .data = .{ .lhs = lhs, @@ -3613,8 +3613,8 @@ const Parser = struct { _ = try p.expectToken(.r_brace); return p.addNode(.{ .tag = switch (members.trailing_comma) { - true => .TaggedUnionEnumTagComma, - false => .TaggedUnionEnumTag, + true => .tagged_union_enum_tag_comma, + false => .tagged_union_enum_tag, }, .main_token = main_token, .data = .{ @@ -3631,8 +3631,8 @@ const Parser = struct { if (members.len <= 2) { return p.addNode(.{ .tag = switch (members.trailing_comma) { - true => .TaggedUnionTwoComma, - false => .TaggedUnionTwo, + true => .tagged_union_two_comma, + false => .tagged_union_two, }, .main_token = main_token, .data = .{ @@ -3644,8 +3644,8 @@ const Parser = struct { const span = try members.toSpan(p); return p.addNode(.{ .tag = switch (members.trailing_comma) { - true => .TaggedUnionComma, - false => .TaggedUnion, + true => .tagged_union_comma, + false => .tagged_union, }, .main_token = main_token, .data = .{ @@ -3673,8 +3673,8 @@ const Parser = struct { if (members.len <= 2) { return p.addNode(.{ .tag = switch (members.trailing_comma) { - true => .ContainerDeclTwoComma, - false => .ContainerDeclTwo, + true => .container_decl_two_comma, + false => .container_decl_two, }, .main_token = main_token, .data = .{ @@ -3686,8 +3686,8 @@ const Parser = struct { const span = try members.toSpan(p); return p.addNode(.{ .tag = switch (members.trailing_comma) { - true => .ContainerDeclComma, - false => .ContainerDecl, + true => .container_decl_comma, + false => .container_decl, }, .main_token = main_token, .data = .{ @@ -3700,8 +3700,8 @@ const Parser = struct { const span = try members.toSpan(p); return p.addNode(.{ .tag = switch (members.trailing_comma) { - true => .ContainerDeclArgComma, - false => .ContainerDeclArg, + true => .container_decl_arg_comma, + false => .container_decl_arg, }, .main_token = main_token, .data = .{ @@ -3860,7 +3860,7 @@ const Parser = struct { }); // Pretend this was an identifier so we can continue parsing. return p.addNode(.{ - .tag = .Identifier, + .tag = .identifier, .main_token = builtin_token, .data = .{ .lhs = undefined, @@ -3870,7 +3870,7 @@ const Parser = struct { }; if (p.eatToken(.r_paren)) |_| { return p.addNode(.{ - .tag = .BuiltinCallTwo, + .tag = .builtin_call_two, .main_token = builtin_token, .data = .{ .lhs = 0, @@ -3883,7 +3883,7 @@ const Parser = struct { .comma => { if (p.eatToken(.r_paren)) |_| { return p.addNode(.{ - .tag = .BuiltinCallTwoComma, + .tag = .builtin_call_two_comma, .main_token = builtin_token, .data = .{ .lhs = param_one, @@ -3893,7 +3893,7 @@ const Parser = struct { } }, .r_paren => return p.addNode(.{ - .tag = .BuiltinCallTwo, + .tag = .builtin_call_two, .main_token = builtin_token, .data = .{ .lhs = param_one, @@ -3914,7 +3914,7 @@ const Parser = struct { .comma => { if (p.eatToken(.r_paren)) |_| { return p.addNode(.{ - .tag = .BuiltinCallTwoComma, + .tag = .builtin_call_two_comma, .main_token = builtin_token, .data = .{ .lhs = param_one, @@ -3924,7 +3924,7 @@ const Parser = struct { } }, .r_paren => return p.addNode(.{ - .tag = .BuiltinCallTwo, + .tag = .builtin_call_two, .main_token = builtin_token, .data = .{ .lhs = param_one, @@ -3954,7 +3954,7 @@ const Parser = struct { if (p.eatToken(.r_paren)) |_| { const params = try p.listToSpan(list.items); return p.addNode(.{ - .tag = .BuiltinCallComma, + .tag = .builtin_call_comma, .main_token = builtin_token, .data = .{ .lhs = params.start, @@ -3967,7 +3967,7 @@ const Parser = struct { .r_paren => { const params = try p.listToSpan(list.items); return p.addNode(.{ - .tag = .BuiltinCall, + .tag = .builtin_call, .main_token = builtin_token, .data = .{ .lhs = params.start, @@ -3993,7 +3993,7 @@ const Parser = struct { .string_literal => { const main_token = p.nextToken(); return p.addNode(.{ - .tag = .StringLiteral, + .tag = .string_literal, .main_token = main_token, .data = .{ .lhs = main_token, @@ -4007,7 +4007,7 @@ const Parser = struct { p.tok_i += 1; } return p.addNode(.{ - .tag = .StringLiteral, + .tag = .string_literal, .main_token = first_line, .data = .{ .lhs = first_line, @@ -4029,7 +4029,7 @@ const Parser = struct { fn expectIntegerLiteral(p: *Parser) !Node.Index { return p.addNode(.{ - .tag = .IntegerLiteral, + .tag = .integer_literal, .main_token = try p.expectToken(.integer_literal), .data = .{ .lhs = undefined, @@ -4050,7 +4050,7 @@ const Parser = struct { if (then_expr == 0) return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); const else_token = p.eatToken(.keyword_else) orelse return p.addNode(.{ - .tag = .IfSimple, + .tag = .if_simple, .main_token = if_token, .data = .{ .lhs = condition, @@ -4062,7 +4062,7 @@ const Parser = struct { if (else_expr == 0) return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); return p.addNode(.{ - .tag = .If, + .tag = .@"if", .main_token = if_token, .data = .{ .lhs = condition, diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 0f14856fdf..1510409ae1 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -96,7 +96,7 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E const datas = tree.nodes.items(.data); try renderDocComments(ais, tree, tree.firstToken(decl)); switch (tree.nodes.items(.tag)[decl]) { - .FnDecl => { + .fn_decl => { // Some examples: // pub extern "foo" fn ... // export fn ... @@ -132,16 +132,16 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E return renderToken(ais, tree, tree.lastToken(fn_proto) + 1, space); // semicolon } }, - .FnProtoSimple, - .FnProtoMulti, - .FnProtoOne, - .FnProto, + .fn_proto_simple, + .fn_proto_multi, + .fn_proto_one, + .fn_proto, => { try renderExpression(ais, tree, decl, .None); return renderToken(ais, tree, tree.lastToken(decl) + 1, space); // semicolon }, - .UsingNamespace => { + .@"usingnamespace" => { const main_token = main_tokens[decl]; const expr = datas[decl].lhs; if (main_token > 0 and token_tags[main_token - 1] == .keyword_pub) { @@ -152,12 +152,12 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E return renderToken(ais, tree, tree.lastToken(expr) + 1, space); // ; }, - .GlobalVarDecl => return renderVarDecl(ais, tree, tree.globalVarDecl(decl)), - .LocalVarDecl => return renderVarDecl(ais, tree, tree.localVarDecl(decl)), - .SimpleVarDecl => return renderVarDecl(ais, tree, tree.simpleVarDecl(decl)), - .AlignedVarDecl => return renderVarDecl(ais, tree, tree.alignedVarDecl(decl)), + .global_var_decl => return renderVarDecl(ais, tree, tree.globalVarDecl(decl)), + .local_var_decl => return renderVarDecl(ais, tree, tree.localVarDecl(decl)), + .simple_var_decl => return renderVarDecl(ais, tree, tree.simpleVarDecl(decl)), + .aligned_var_decl => return renderVarDecl(ais, tree, tree.alignedVarDecl(decl)), - .TestDecl => { + .test_decl => { const test_token = main_tokens[decl]; try renderToken(ais, tree, test_token, .Space); if (token_tags[test_token + 1] == .string_literal) { @@ -166,12 +166,12 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E try renderExpression(ais, tree, datas[decl].rhs, space); }, - .ContainerFieldInit => return renderContainerField(ais, tree, tree.containerFieldInit(decl), space), - .ContainerFieldAlign => return renderContainerField(ais, tree, tree.containerFieldAlign(decl), space), - .ContainerField => return renderContainerField(ais, tree, tree.containerField(decl), space), - .Comptime => return renderExpression(ais, tree, decl, space), + .container_field_init => return renderContainerField(ais, tree, tree.containerFieldInit(decl), space), + .container_field_align => return renderContainerField(ais, tree, tree.containerFieldAlign(decl), space), + .container_field => return renderContainerField(ais, tree, tree.containerField(decl), space), + .@"comptime" => return renderExpression(ais, tree, decl, space), - .Root => unreachable, + .root => unreachable, else => unreachable, } } @@ -182,29 +182,29 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac const node_tags = tree.nodes.items(.tag); const datas = tree.nodes.items(.data); switch (node_tags[node]) { - .Identifier, - .IntegerLiteral, - .FloatLiteral, - .StringLiteral, - .CharLiteral, - .TrueLiteral, - .FalseLiteral, - .NullLiteral, - .UnreachableLiteral, - .UndefinedLiteral, - .AnyFrameLiteral, + .identifier, + .integer_literal, + .float_literal, + .string_literal, + .char_literal, + .true_literal, + .false_literal, + .null_literal, + .unreachable_literal, + .undefined_literal, + .anyframe_literal, => return renderToken(ais, tree, main_tokens[node], space), - .ErrorValue => { + .error_value => { try renderToken(ais, tree, main_tokens[node], .None); try renderToken(ais, tree, main_tokens[node] + 1, .None); return renderToken(ais, tree, main_tokens[node] + 2, space); }, - .AnyType => return renderToken(ais, tree, main_tokens[node], space), + .@"anytype" => return renderToken(ais, tree, main_tokens[node], space), - .BlockTwo, - .BlockTwoSemicolon, + .block_two, + .block_two_semicolon, => { const statements = [2]ast.Node.Index{ datas[node].lhs, datas[node].rhs }; if (datas[node].lhs == 0) { @@ -215,14 +215,14 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderBlock(ais, tree, node, statements[0..2], space); } }, - .Block, - .BlockSemicolon, + .block, + .block_semicolon, => { const statements = tree.extra_data[datas[node].lhs..datas[node].rhs]; return renderBlock(ais, tree, node, statements, space); }, - .ErrDefer => { + .@"errdefer" => { const defer_token = main_tokens[node]; const payload_token = datas[node].lhs; const expr = datas[node].rhs; @@ -236,20 +236,20 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderExpression(ais, tree, expr, space); }, - .Defer => { + .@"defer" => { const defer_token = main_tokens[node]; const expr = datas[node].rhs; try renderToken(ais, tree, defer_token, .Space); return renderExpression(ais, tree, expr, space); }, - .Comptime, .Nosuspend => { + .@"comptime", .@"nosuspend" => { const comptime_token = main_tokens[node]; const block = datas[node].lhs; try renderToken(ais, tree, comptime_token, .Space); return renderExpression(ais, tree, block, space); }, - .Suspend => { + .@"suspend" => { const suspend_token = main_tokens[node]; const body = datas[node].lhs; if (body != 0) { @@ -260,7 +260,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac } }, - .Catch => { + .@"catch" => { const main_token = main_tokens[node]; const fallback_first = tree.firstToken(datas[node].rhs); @@ -283,15 +283,15 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac try renderExpression(ais, tree, datas[node].rhs, space); // fallback }, - .FieldAccess => { + .field_access => { const field_access = datas[node]; try renderExpression(ais, tree, field_access.lhs, .None); try renderToken(ais, tree, main_tokens[node], .None); return renderToken(ais, tree, field_access.rhs, space); }, - .ErrorUnion, - .SwitchRange, + .error_union, + .switch_range, => { const infix = datas[node]; try renderExpression(ais, tree, infix.lhs, .None); @@ -299,45 +299,45 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderExpression(ais, tree, infix.rhs, space); }, - .Add, - .AddWrap, - .ArrayCat, - .ArrayMult, - .Assign, - .AssignBitAnd, - .AssignBitOr, - .AssignBitShiftLeft, - .AssignBitShiftRight, - .AssignBitXor, - .AssignDiv, - .AssignSub, - .AssignSubWrap, - .AssignMod, - .AssignAdd, - .AssignAddWrap, - .AssignMul, - .AssignMulWrap, - .BangEqual, - .BitAnd, - .BitOr, - .BitShiftLeft, - .BitShiftRight, - .BitXor, - .BoolAnd, - .BoolOr, - .Div, - .EqualEqual, - .GreaterOrEqual, - .GreaterThan, - .LessOrEqual, - .LessThan, - .MergeErrorSets, - .Mod, - .Mul, - .MulWrap, - .Sub, - .SubWrap, - .OrElse, + .add, + .add_wrap, + .array_cat, + .array_mult, + .assign, + .assign_bit_and, + .assign_bit_or, + .assign_bit_shift_left, + .assign_bit_shift_right, + .assign_bit_xor, + .assign_div, + .assign_sub, + .assign_sub_wrap, + .assign_mod, + .assign_add, + .assign_add_wrap, + .assign_mul, + .assign_mul_wrap, + .bang_equal, + .bit_and, + .bit_or, + .bit_shift_left, + .bit_shift_right, + .bit_xor, + .bool_and, + .bool_or, + .div, + .equal_equal, + .greater_or_equal, + .greater_than, + .less_or_equal, + .less_than, + .merge_error_sets, + .mod, + .mul, + .mul_wrap, + .sub, + .sub_wrap, + .@"orelse", => { const infix = datas[node]; try renderExpression(ais, tree, infix.lhs, .Space); @@ -353,75 +353,75 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderExpression(ais, tree, infix.rhs, space); }, - .BitNot, - .BoolNot, - .Negation, - .NegationWrap, - .OptionalType, - .AddressOf, + .bit_not, + .bool_not, + .negation, + .negation_wrap, + .optional_type, + .address_of, => { try renderToken(ais, tree, main_tokens[node], .None); return renderExpression(ais, tree, datas[node].lhs, space); }, - .Try, - .Resume, - .Await, + .@"try", + .@"resume", + .@"await", => { try renderToken(ais, tree, main_tokens[node], .Space); return renderExpression(ais, tree, datas[node].lhs, space); }, - .ArrayType => return renderArrayType(ais, tree, tree.arrayType(node), space), - .ArrayTypeSentinel => return renderArrayType(ais, tree, tree.arrayTypeSentinel(node), space), + .array_type => return renderArrayType(ais, tree, tree.arrayType(node), space), + .array_type_sentinel => return renderArrayType(ais, tree, tree.arrayTypeSentinel(node), space), - .PtrTypeAligned => return renderPtrType(ais, tree, tree.ptrTypeAligned(node), space), - .PtrTypeSentinel => return renderPtrType(ais, tree, tree.ptrTypeSentinel(node), space), - .PtrType => return renderPtrType(ais, tree, tree.ptrType(node), space), - .PtrTypeBitRange => return renderPtrType(ais, tree, tree.ptrTypeBitRange(node), space), + .ptr_type_aligned => return renderPtrType(ais, tree, tree.ptrTypeAligned(node), space), + .ptr_type_sentinel => return renderPtrType(ais, tree, tree.ptrTypeSentinel(node), space), + .ptr_type => return renderPtrType(ais, tree, tree.ptrType(node), space), + .ptr_type_bit_range => return renderPtrType(ais, tree, tree.ptrTypeBitRange(node), space), - .ArrayInitOne, .ArrayInitOneComma => { + .array_init_one, .array_init_one_comma => { var elements: [1]ast.Node.Index = undefined; return renderArrayInit(ais, tree, tree.arrayInitOne(&elements, node), space); }, - .ArrayInitDotTwo, .ArrayInitDotTwoComma => { + .array_init_dot_two, .array_init_dot_two_comma => { var elements: [2]ast.Node.Index = undefined; return renderArrayInit(ais, tree, tree.arrayInitDotTwo(&elements, node), space); }, - .ArrayInitDot, - .ArrayInitDotComma, + .array_init_dot, + .array_init_dot_comma, => return renderArrayInit(ais, tree, tree.arrayInitDot(node), space), - .ArrayInit, - .ArrayInitComma, + .array_init, + .array_init_comma, => return renderArrayInit(ais, tree, tree.arrayInit(node), space), - .StructInitOne, .StructInitOneComma => { + .struct_init_one, .struct_init_one_comma => { var fields: [1]ast.Node.Index = undefined; return renderStructInit(ais, tree, tree.structInitOne(&fields, node), space); }, - .StructInitDotTwo, .StructInitDotTwoComma => { + .struct_init_dot_two, .struct_init_dot_two_comma => { var fields: [2]ast.Node.Index = undefined; return renderStructInit(ais, tree, tree.structInitDotTwo(&fields, node), space); }, - .StructInitDot, - .StructInitDotComma, + .struct_init_dot, + .struct_init_dot_comma, => return renderStructInit(ais, tree, tree.structInitDot(node), space), - .StructInit, - .StructInitComma, + .struct_init, + .struct_init_comma, => return renderStructInit(ais, tree, tree.structInit(node), space), - .CallOne, .CallOneComma, .AsyncCallOne, .AsyncCallOneComma => { + .call_one, .call_one_comma, .async_call_one, .async_call_one_comma => { var params: [1]ast.Node.Index = undefined; return renderCall(ais, tree, tree.callOne(¶ms, node), space); }, - .Call, - .CallComma, - .AsyncCall, - .AsyncCallComma, + .call, + .call_comma, + .async_call, + .async_call_comma, => return renderCall(ais, tree, tree.callFull(node), space), - .ArrayAccess => { + .array_access => { const suffix = datas[node]; const lbracket = tree.firstToken(suffix.rhs) - 1; const rbracket = tree.lastToken(suffix.rhs) + 1; @@ -431,22 +431,22 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderToken(ais, tree, rbracket, space); // ] }, - .SliceOpen => try renderSlice(ais, tree, tree.sliceOpen(node), space), - .Slice => try renderSlice(ais, tree, tree.slice(node), space), - .SliceSentinel => try renderSlice(ais, tree, tree.sliceSentinel(node), space), + .slice_open => try renderSlice(ais, tree, tree.sliceOpen(node), space), + .slice => try renderSlice(ais, tree, tree.slice(node), space), + .slice_sentinel => try renderSlice(ais, tree, tree.sliceSentinel(node), space), - .Deref => { + .deref => { try renderExpression(ais, tree, datas[node].lhs, .None); return renderToken(ais, tree, main_tokens[node], space); }, - .UnwrapOptional => { + .unwrap_optional => { try renderExpression(ais, tree, datas[node].lhs, .None); try renderToken(ais, tree, main_tokens[node], .None); return renderToken(ais, tree, datas[node].rhs, space); }, - .Break => { + .@"break" => { const main_token = main_tokens[node]; const label_token = datas[node].lhs; const target = datas[node].rhs; @@ -467,7 +467,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac } }, - .Continue => { + .@"continue" => { const main_token = main_tokens[node]; const label = datas[node].lhs; if (label != 0) { @@ -479,7 +479,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac } }, - .Return => { + .@"return" => { if (datas[node].lhs != 0) { try renderToken(ais, tree, main_tokens[node], .Space); try renderExpression(ais, tree, datas[node].lhs, space); @@ -488,7 +488,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac } }, - .GroupedExpression => { + .grouped_expression => { ais.pushIndentNextLine(); try renderToken(ais, tree, main_tokens[node], .None); // lparen try renderExpression(ais, tree, datas[node].lhs, .None); @@ -496,32 +496,32 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderToken(ais, tree, datas[node].rhs, space); // rparen }, - .ContainerDecl, - .ContainerDeclComma, + .container_decl, + .container_decl_comma, => return renderContainerDecl(ais, tree, tree.containerDecl(node), space), - .ContainerDeclTwo, .ContainerDeclTwoComma => { + .container_decl_two, .container_decl_two_comma => { var buffer: [2]ast.Node.Index = undefined; return renderContainerDecl(ais, tree, tree.containerDeclTwo(&buffer, node), space); }, - .ContainerDeclArg, - .ContainerDeclArgComma, + .container_decl_arg, + .container_decl_arg_comma, => return renderContainerDecl(ais, tree, tree.containerDeclArg(node), space), - .TaggedUnion, - .TaggedUnionComma, + .tagged_union, + .tagged_union_comma, => return renderContainerDecl(ais, tree, tree.taggedUnion(node), space), - .TaggedUnionTwo, .TaggedUnionTwoComma => { + .tagged_union_two, .tagged_union_two_comma => { var buffer: [2]ast.Node.Index = undefined; return renderContainerDecl(ais, tree, tree.taggedUnionTwo(&buffer, node), space); }, - .TaggedUnionEnumTag, - .TaggedUnionEnumTagComma, + .tagged_union_enum_tag, + .tagged_union_enum_tag_comma, => return renderContainerDecl(ais, tree, tree.taggedUnionEnumTag(node), space), // TODO: handle comments properly - .ErrorSetDecl => { + .error_set_decl => { const error_token = main_tokens[node]; const lbrace = error_token + 1; const rbrace = datas[node].rhs; @@ -569,7 +569,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac } }, - .BuiltinCallTwo, .BuiltinCallTwoComma => { + .builtin_call_two, .builtin_call_two_comma => { if (datas[node].lhs == 0) { const params = [_]ast.Node.Index{}; return renderBuiltinCall(ais, tree, main_tokens[node], ¶ms, space); @@ -581,23 +581,23 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderBuiltinCall(ais, tree, main_tokens[node], ¶ms, space); } }, - .BuiltinCall, .BuiltinCallComma => { + .builtin_call, .builtin_call_comma => { const params = tree.extra_data[datas[node].lhs..datas[node].rhs]; return renderBuiltinCall(ais, tree, main_tokens[node], params, space); }, - .FnProtoSimple => { + .fn_proto_simple => { var params: [1]ast.Node.Index = undefined; return renderFnProto(ais, tree, tree.fnProtoSimple(¶ms, node), space); }, - .FnProtoMulti => return renderFnProto(ais, tree, tree.fnProtoMulti(node), space), - .FnProtoOne => { + .fn_proto_multi => return renderFnProto(ais, tree, tree.fnProtoMulti(node), space), + .fn_proto_one => { var params: [1]ast.Node.Index = undefined; return renderFnProto(ais, tree, tree.fnProtoOne(¶ms, node), space); }, - .FnProto => return renderFnProto(ais, tree, tree.fnProto(node), space), + .fn_proto => return renderFnProto(ais, tree, tree.fnProto(node), space), - .AnyFrameType => { + .anyframe_type => { const main_token = main_tokens[node]; if (datas[node].rhs != 0) { try renderToken(ais, tree, main_token, .None); // anyframe @@ -608,8 +608,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac } }, - .Switch, - .SwitchComma, + .@"switch", + .switch_comma, => { const switch_token = main_tokens[node]; const condition = datas[node].lhs; @@ -635,39 +635,39 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderToken(ais, tree, tree.lastToken(node), space); // rbrace }, - .SwitchCaseOne => return renderSwitchCase(ais, tree, tree.switchCaseOne(node), space), - .SwitchCase => return renderSwitchCase(ais, tree, tree.switchCase(node), space), + .switch_case_one => return renderSwitchCase(ais, tree, tree.switchCaseOne(node), space), + .switch_case => return renderSwitchCase(ais, tree, tree.switchCase(node), space), - .WhileSimple => return renderWhile(ais, tree, tree.whileSimple(node), space), - .WhileCont => return renderWhile(ais, tree, tree.whileCont(node), space), - .While => return renderWhile(ais, tree, tree.whileFull(node), space), - .ForSimple => return renderWhile(ais, tree, tree.forSimple(node), space), - .For => return renderWhile(ais, tree, tree.forFull(node), space), + .while_simple => return renderWhile(ais, tree, tree.whileSimple(node), space), + .while_cont => return renderWhile(ais, tree, tree.whileCont(node), space), + .@"while" => return renderWhile(ais, tree, tree.whileFull(node), space), + .for_simple => return renderWhile(ais, tree, tree.forSimple(node), space), + .@"for" => return renderWhile(ais, tree, tree.forFull(node), space), - .IfSimple => return renderIf(ais, tree, tree.ifSimple(node), space), - .If => return renderIf(ais, tree, tree.ifFull(node), space), + .if_simple => return renderIf(ais, tree, tree.ifSimple(node), space), + .@"if" => return renderIf(ais, tree, tree.ifFull(node), space), - .AsmSimple => return renderAsm(ais, tree, tree.asmSimple(node), space), - .Asm => return renderAsm(ais, tree, tree.asmFull(node), space), + .asm_simple => return renderAsm(ais, tree, tree.asmSimple(node), space), + .@"asm" => return renderAsm(ais, tree, tree.asmFull(node), space), - .EnumLiteral => { + .enum_literal => { try renderToken(ais, tree, main_tokens[node] - 1, .None); // . return renderToken(ais, tree, main_tokens[node], space); // name }, - .FnDecl => unreachable, - .ContainerField => unreachable, - .ContainerFieldInit => unreachable, - .ContainerFieldAlign => unreachable, - .Root => unreachable, - .GlobalVarDecl => unreachable, - .LocalVarDecl => unreachable, - .SimpleVarDecl => unreachable, - .AlignedVarDecl => unreachable, - .UsingNamespace => unreachable, - .TestDecl => unreachable, - .AsmOutput => unreachable, - .AsmInput => unreachable, + .fn_decl => unreachable, + .container_field => unreachable, + .container_field_init => unreachable, + .container_field_align => unreachable, + .root => unreachable, + .global_var_decl => unreachable, + .local_var_decl => unreachable, + .simple_var_decl => unreachable, + .aligned_var_decl => unreachable, + .@"usingnamespace" => unreachable, + .test_decl => unreachable, + .asm_output => unreachable, + .asm_input => unreachable, } } @@ -814,7 +814,7 @@ fn renderAsmOutput( const node_tags = tree.nodes.items(.tag); const main_tokens = tree.nodes.items(.main_token); const datas = tree.nodes.items(.data); - assert(node_tags[asm_output] == .AsmOutput); + assert(node_tags[asm_output] == .asm_output); const symbolic_name = main_tokens[asm_output]; try renderToken(ais, tree, symbolic_name - 1, .None); // lbracket @@ -842,7 +842,7 @@ fn renderAsmInput( const node_tags = tree.nodes.items(.tag); const main_tokens = tree.nodes.items(.main_token); const datas = tree.nodes.items(.data); - assert(node_tags[asm_input] == .AsmInput); + assert(node_tags[asm_input] == .asm_input); const symbolic_name = main_tokens[asm_input]; try renderToken(ais, tree, symbolic_name - 1, .None); // lbracket @@ -1516,10 +1516,10 @@ fn renderBlock( try renderToken(ais, tree, lbrace, .Newline); for (statements) |stmt, i| { switch (node_tags[stmt]) { - .GlobalVarDecl => try renderVarDecl(ais, tree, tree.globalVarDecl(stmt)), - .LocalVarDecl => try renderVarDecl(ais, tree, tree.localVarDecl(stmt)), - .SimpleVarDecl => try renderVarDecl(ais, tree, tree.simpleVarDecl(stmt)), - .AlignedVarDecl => try renderVarDecl(ais, tree, tree.alignedVarDecl(stmt)), + .global_var_decl => try renderVarDecl(ais, tree, tree.globalVarDecl(stmt)), + .local_var_decl => try renderVarDecl(ais, tree, tree.localVarDecl(stmt)), + .simple_var_decl => try renderVarDecl(ais, tree, tree.simpleVarDecl(stmt)), + .aligned_var_decl => try renderVarDecl(ais, tree, tree.alignedVarDecl(stmt)), else => try renderExpression(ais, tree, stmt, .Semicolon), } } @@ -1867,7 +1867,7 @@ fn renderCall( try renderExpression(ais, tree, param_node, Space.None); // Unindent the comma for multiline string literals - const is_multiline_string = node_tags[param_node] == .StringLiteral and + const is_multiline_string = node_tags[param_node] == .string_literal and token_tags[main_tokens[param_node]] == .multiline_string_literal_line; if (is_multiline_string) ais.popIndent(); @@ -2031,19 +2031,19 @@ fn renderDocComments(ais: *Ais, tree: ast.Tree, end_token: ast.TokenIndex) Error fn nodeIsBlock(tag: ast.Node.Tag) bool { return switch (tag) { - .Block, - .BlockSemicolon, - .BlockTwo, - .BlockTwoSemicolon, - .If, - .IfSimple, - .For, - .ForSimple, - .While, - .WhileSimple, - .WhileCont, - .Switch, - .SwitchComma, + .block, + .block_semicolon, + .block_two, + .block_two_semicolon, + .@"if", + .if_simple, + .@"for", + .for_simple, + .@"while", + .while_simple, + .while_cont, + .@"switch", + .switch_comma, => true, else => false, }; @@ -2051,47 +2051,47 @@ fn nodeIsBlock(tag: ast.Node.Tag) bool { fn nodeCausesSliceOpSpace(tag: ast.Node.Tag) bool { return switch (tag) { - .Catch, - .Add, - .AddWrap, - .ArrayCat, - .ArrayMult, - .Assign, - .AssignBitAnd, - .AssignBitOr, - .AssignBitShiftLeft, - .AssignBitShiftRight, - .AssignBitXor, - .AssignDiv, - .AssignSub, - .AssignSubWrap, - .AssignMod, - .AssignAdd, - .AssignAddWrap, - .AssignMul, - .AssignMulWrap, - .BangEqual, - .BitAnd, - .BitOr, - .BitShiftLeft, - .BitShiftRight, - .BitXor, - .BoolAnd, - .BoolOr, - .Div, - .EqualEqual, - .ErrorUnion, - .GreaterOrEqual, - .GreaterThan, - .LessOrEqual, - .LessThan, - .MergeErrorSets, - .Mod, - .Mul, - .MulWrap, - .Sub, - .SubWrap, - .OrElse, + .@"catch", + .add, + .add_wrap, + .array_cat, + .array_mult, + .assign, + .assign_bit_and, + .assign_bit_or, + .assign_bit_shift_left, + .assign_bit_shift_right, + .assign_bit_xor, + .assign_div, + .assign_sub, + .assign_sub_wrap, + .assign_mod, + .assign_add, + .assign_add_wrap, + .assign_mul, + .assign_mul_wrap, + .bang_equal, + .bit_and, + .bit_or, + .bit_shift_left, + .bit_shift_right, + .bit_xor, + .bool_and, + .bool_or, + .div, + .equal_equal, + .error_union, + .greater_or_equal, + .greater_than, + .less_or_equal, + .less_than, + .merge_error_sets, + .mod, + .mul, + .mul_wrap, + .sub, + .sub_wrap, + .@"orelse", => true, else => false, From 288e18059815f8c23f850dcc5cb3b2880f029ae9 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Fri, 12 Feb 2021 01:40:13 +0200 Subject: [PATCH 054/173] snake_case render.Space --- lib/std/zig/render.zig | 772 ++++++++++++++++++++--------------------- 1 file changed, 386 insertions(+), 386 deletions(-) diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 1510409ae1..8f98acc20c 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -86,7 +86,7 @@ fn renderRoot(ais: *Ais, tree: ast.Tree) Error!void { const root_decls = tree.extra_data[nodes_data[0].lhs..nodes_data[0].rhs]; for (root_decls) |decl| { - try renderMember(ais, tree, decl, .Newline); + try renderMember(ais, tree, decl, .newline); } } @@ -122,13 +122,13 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E } } while (i < fn_token) : (i += 1) { - try renderToken(ais, tree, i, .Space); + try renderToken(ais, tree, i, .space); } if (datas[decl].rhs != 0) { - try renderExpression(ais, tree, fn_proto, .Space); + try renderExpression(ais, tree, fn_proto, .space); return renderExpression(ais, tree, datas[decl].rhs, space); } else { - try renderExpression(ais, tree, fn_proto, .None); + try renderExpression(ais, tree, fn_proto, .none); return renderToken(ais, tree, tree.lastToken(fn_proto) + 1, space); // semicolon } }, @@ -137,7 +137,7 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E .fn_proto_one, .fn_proto, => { - try renderExpression(ais, tree, decl, .None); + try renderExpression(ais, tree, decl, .none); return renderToken(ais, tree, tree.lastToken(decl) + 1, space); // semicolon }, @@ -145,10 +145,10 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E const main_token = main_tokens[decl]; const expr = datas[decl].lhs; if (main_token > 0 and token_tags[main_token - 1] == .keyword_pub) { - try renderToken(ais, tree, main_token - 1, .Space); // pub + try renderToken(ais, tree, main_token - 1, .space); // pub } - try renderToken(ais, tree, main_token, .Space); // usingnamespace - try renderExpression(ais, tree, expr, .None); + try renderToken(ais, tree, main_token, .space); // usingnamespace + try renderExpression(ais, tree, expr, .none); return renderToken(ais, tree, tree.lastToken(expr) + 1, space); // ; }, @@ -159,9 +159,9 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E .test_decl => { const test_token = main_tokens[decl]; - try renderToken(ais, tree, test_token, .Space); + try renderToken(ais, tree, test_token, .space); if (token_tags[test_token + 1] == .string_literal) { - try renderToken(ais, tree, test_token + 1, .Space); + try renderToken(ais, tree, test_token + 1, .space); } try renderExpression(ais, tree, datas[decl].rhs, space); }, @@ -196,8 +196,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac => return renderToken(ais, tree, main_tokens[node], space), .error_value => { - try renderToken(ais, tree, main_tokens[node], .None); - try renderToken(ais, tree, main_tokens[node] + 1, .None); + try renderToken(ais, tree, main_tokens[node], .none); + try renderToken(ais, tree, main_tokens[node] + 1, .none); return renderToken(ais, tree, main_tokens[node] + 2, space); }, @@ -227,11 +227,11 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac const payload_token = datas[node].lhs; const expr = datas[node].rhs; - try renderToken(ais, tree, defer_token, .Space); + try renderToken(ais, tree, defer_token, .space); if (payload_token != 0) { - try renderToken(ais, tree, payload_token - 1, .None); // | - try renderToken(ais, tree, payload_token, .None); // identifier - try renderToken(ais, tree, payload_token + 1, .Space); // | + try renderToken(ais, tree, payload_token - 1, .none); // | + try renderToken(ais, tree, payload_token, .none); // identifier + try renderToken(ais, tree, payload_token + 1, .space); // | } return renderExpression(ais, tree, expr, space); }, @@ -239,13 +239,13 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .@"defer" => { const defer_token = main_tokens[node]; const expr = datas[node].rhs; - try renderToken(ais, tree, defer_token, .Space); + try renderToken(ais, tree, defer_token, .space); return renderExpression(ais, tree, expr, space); }, .@"comptime", .@"nosuspend" => { const comptime_token = main_tokens[node]; const block = datas[node].lhs; - try renderToken(ais, tree, comptime_token, .Space); + try renderToken(ais, tree, comptime_token, .space); return renderExpression(ais, tree, block, space); }, @@ -253,7 +253,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac const suspend_token = main_tokens[node]; const body = datas[node].lhs; if (body != 0) { - try renderToken(ais, tree, suspend_token, .Space); + try renderToken(ais, tree, suspend_token, .space); return renderExpression(ais, tree, body, space); } else { return renderToken(ais, tree, suspend_token, space); @@ -265,14 +265,14 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac const fallback_first = tree.firstToken(datas[node].rhs); const same_line = tree.tokensOnSameLine(main_token, fallback_first); - const after_op_space = if (same_line) Space.Space else Space.Newline; + const after_op_space = if (same_line) Space.space else Space.newline; - try renderExpression(ais, tree, datas[node].lhs, .Space); // target + try renderExpression(ais, tree, datas[node].lhs, .space); // target if (token_tags[fallback_first - 1] == .pipe) { - try renderToken(ais, tree, main_token, .Space); // catch keyword - try renderToken(ais, tree, main_token + 1, .None); // pipe - try renderToken(ais, tree, main_token + 2, .None); // payload identifier + try renderToken(ais, tree, main_token, .space); // catch keyword + try renderToken(ais, tree, main_token + 1, .none); // pipe + try renderToken(ais, tree, main_token + 2, .none); // payload identifier try renderToken(ais, tree, main_token + 3, after_op_space); // pipe } else { assert(token_tags[fallback_first - 1] == .keyword_catch); @@ -285,8 +285,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .field_access => { const field_access = datas[node]; - try renderExpression(ais, tree, field_access.lhs, .None); - try renderToken(ais, tree, main_tokens[node], .None); + try renderExpression(ais, tree, field_access.lhs, .none); + try renderToken(ais, tree, main_tokens[node], .none); return renderToken(ais, tree, field_access.rhs, space); }, @@ -294,8 +294,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .switch_range, => { const infix = datas[node]; - try renderExpression(ais, tree, infix.lhs, .None); - try renderToken(ais, tree, main_tokens[node], .None); + try renderExpression(ais, tree, infix.lhs, .none); + try renderToken(ais, tree, main_tokens[node], .none); return renderExpression(ais, tree, infix.rhs, space); }, @@ -340,13 +340,13 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .@"orelse", => { const infix = datas[node]; - try renderExpression(ais, tree, infix.lhs, .Space); + try renderExpression(ais, tree, infix.lhs, .space); const op_token = main_tokens[node]; if (tree.tokensOnSameLine(op_token, op_token + 1)) { - try renderToken(ais, tree, op_token, .Space); + try renderToken(ais, tree, op_token, .space); } else { ais.pushIndent(); - try renderToken(ais, tree, op_token, .Newline); + try renderToken(ais, tree, op_token, .newline); ais.popIndent(); ais.pushIndentOneShot(); } @@ -360,7 +360,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .optional_type, .address_of, => { - try renderToken(ais, tree, main_tokens[node], .None); + try renderToken(ais, tree, main_tokens[node], .none); return renderExpression(ais, tree, datas[node].lhs, space); }, @@ -368,7 +368,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .@"resume", .@"await", => { - try renderToken(ais, tree, main_tokens[node], .Space); + try renderToken(ais, tree, main_tokens[node], .space); return renderExpression(ais, tree, datas[node].lhs, space); }, @@ -425,9 +425,9 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac const suffix = datas[node]; const lbracket = tree.firstToken(suffix.rhs) - 1; const rbracket = tree.lastToken(suffix.rhs) + 1; - try renderExpression(ais, tree, suffix.lhs, .None); - try renderToken(ais, tree, lbracket, .None); // [ - try renderExpression(ais, tree, suffix.rhs, .None); + try renderExpression(ais, tree, suffix.lhs, .none); + try renderToken(ais, tree, lbracket, .none); // [ + try renderExpression(ais, tree, suffix.rhs, .none); return renderToken(ais, tree, rbracket, space); // ] }, @@ -436,13 +436,13 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .slice_sentinel => try renderSlice(ais, tree, tree.sliceSentinel(node), space), .deref => { - try renderExpression(ais, tree, datas[node].lhs, .None); + try renderExpression(ais, tree, datas[node].lhs, .none); return renderToken(ais, tree, main_tokens[node], space); }, .unwrap_optional => { - try renderExpression(ais, tree, datas[node].lhs, .None); - try renderToken(ais, tree, main_tokens[node], .None); + try renderExpression(ais, tree, datas[node].lhs, .none); + try renderToken(ais, tree, main_tokens[node], .none); return renderToken(ais, tree, datas[node].rhs, space); }, @@ -453,16 +453,16 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac if (label_token == 0 and target == 0) { try renderToken(ais, tree, main_token, space); // break keyword } else if (label_token == 0 and target != 0) { - try renderToken(ais, tree, main_token, .Space); // break keyword + try renderToken(ais, tree, main_token, .space); // break keyword try renderExpression(ais, tree, target, space); } else if (label_token != 0 and target == 0) { - try renderToken(ais, tree, main_token, .Space); // break keyword - try renderToken(ais, tree, label_token - 1, .None); // colon + try renderToken(ais, tree, main_token, .space); // break keyword + try renderToken(ais, tree, label_token - 1, .none); // colon try renderToken(ais, tree, label_token, space); // identifier } else if (label_token != 0 and target != 0) { - try renderToken(ais, tree, main_token, .Space); // break keyword - try renderToken(ais, tree, label_token - 1, .None); // colon - try renderToken(ais, tree, label_token, .Space); // identifier + try renderToken(ais, tree, main_token, .space); // break keyword + try renderToken(ais, tree, label_token - 1, .none); // colon + try renderToken(ais, tree, label_token, .space); // identifier try renderExpression(ais, tree, target, space); } }, @@ -471,8 +471,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac const main_token = main_tokens[node]; const label = datas[node].lhs; if (label != 0) { - try renderToken(ais, tree, main_token, .Space); // continue - try renderToken(ais, tree, label - 1, .None); // : + try renderToken(ais, tree, main_token, .space); // continue + try renderToken(ais, tree, label - 1, .none); // : return renderToken(ais, tree, label, space); // label } else { return renderToken(ais, tree, main_token, space); // continue @@ -481,7 +481,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .@"return" => { if (datas[node].lhs != 0) { - try renderToken(ais, tree, main_tokens[node], .Space); + try renderToken(ais, tree, main_tokens[node], .space); try renderExpression(ais, tree, datas[node].lhs, space); } else { try renderToken(ais, tree, main_tokens[node], space); @@ -490,8 +490,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .grouped_expression => { ais.pushIndentNextLine(); - try renderToken(ais, tree, main_tokens[node], .None); // lparen - try renderExpression(ais, tree, datas[node].lhs, .None); + try renderToken(ais, tree, main_tokens[node], .none); // lparen + try renderExpression(ais, tree, datas[node].lhs, .none); ais.popIndent(); return renderToken(ais, tree, datas[node].rhs, space); // rparen }, @@ -526,27 +526,27 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac const lbrace = error_token + 1; const rbrace = datas[node].rhs; - try renderToken(ais, tree, error_token, .None); + try renderToken(ais, tree, error_token, .none); if (lbrace + 1 == rbrace) { // There is nothing between the braces so render condensed: `error{}` - try renderToken(ais, tree, lbrace, .None); + try renderToken(ais, tree, lbrace, .none); return renderToken(ais, tree, rbrace, space); } else if (lbrace + 2 == rbrace and token_tags[lbrace + 1] == .identifier) { // There is exactly one member and no trailing comma or // comments, so render without surrounding spaces: `error{Foo}` - try renderToken(ais, tree, lbrace, .None); - try renderToken(ais, tree, lbrace + 1, .None); // identifier + try renderToken(ais, tree, lbrace, .none); + try renderToken(ais, tree, lbrace + 1, .none); // identifier return renderToken(ais, tree, rbrace, space); } else if (token_tags[rbrace - 1] == .comma) { // There is a trailing comma so render each member on a new line. - try renderToken(ais, tree, lbrace, .Newline); + try renderToken(ais, tree, lbrace, .newline); ais.pushIndent(); var i = lbrace + 1; while (i < rbrace) : (i += 1) { switch (token_tags[i]) { - .doc_comment => try renderToken(ais, tree, i, .Newline), - .identifier => try renderToken(ais, tree, i, .Comma), + .doc_comment => try renderToken(ais, tree, i, .newline), + .identifier => try renderToken(ais, tree, i, .comma), .comma => {}, else => unreachable, } @@ -555,12 +555,12 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderToken(ais, tree, rbrace, space); } else { // There is no trailing comma so render everything on one line. - try renderToken(ais, tree, lbrace, .Space); + try renderToken(ais, tree, lbrace, .space); var i = lbrace + 1; while (i < rbrace) : (i += 1) { switch (token_tags[i]) { .doc_comment => unreachable, // TODO - .identifier => try renderToken(ais, tree, i, .CommaSpace), + .identifier => try renderToken(ais, tree, i, .comma_space), .comma => {}, else => unreachable, } @@ -600,8 +600,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .anyframe_type => { const main_token = main_tokens[node]; if (datas[node].rhs != 0) { - try renderToken(ais, tree, main_token, .None); // anyframe - try renderToken(ais, tree, main_token + 1, .None); // -> + try renderToken(ais, tree, main_token, .none); // anyframe + try renderToken(ais, tree, main_token + 1, .none); // -> return renderExpression(ais, tree, datas[node].rhs, space); } else { return renderToken(ais, tree, main_token, space); // anyframe @@ -617,19 +617,19 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac const cases = tree.extra_data[extra.start..extra.end]; const rparen = tree.lastToken(condition) + 1; - try renderToken(ais, tree, switch_token, .Space); // switch keyword - try renderToken(ais, tree, switch_token + 1, .None); // lparen - try renderExpression(ais, tree, condition, .None); // condtion expression - try renderToken(ais, tree, rparen, .Space); // rparen + try renderToken(ais, tree, switch_token, .space); // switch keyword + try renderToken(ais, tree, switch_token + 1, .none); // lparen + try renderExpression(ais, tree, condition, .none); // condtion expression + try renderToken(ais, tree, rparen, .space); // rparen if (cases.len == 0) { - try renderToken(ais, tree, rparen + 1, .None); // lbrace + try renderToken(ais, tree, rparen + 1, .none); // lbrace return renderToken(ais, tree, rparen + 2, space); // rbrace } ais.pushIndentNextLine(); - try renderToken(ais, tree, rparen + 1, .Newline); // lbrace + try renderToken(ais, tree, rparen + 1, .newline); // lbrace for (cases) |case| { - try renderExpression(ais, tree, case, .Comma); + try renderExpression(ais, tree, case, .comma); } ais.popIndent(); return renderToken(ais, tree, tree.lastToken(node), space); // rbrace @@ -651,7 +651,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .@"asm" => return renderAsm(ais, tree, tree.asmFull(node), space), .enum_literal => { - try renderToken(ais, tree, main_tokens[node] - 1, .None); // . + try renderToken(ais, tree, main_tokens[node] - 1, .none); // . return renderToken(ais, tree, main_tokens[node], space); // name }, @@ -678,13 +678,13 @@ fn renderArrayType( array_type: ast.full.ArrayType, space: Space, ) Error!void { - try renderToken(ais, tree, array_type.ast.lbracket, .None); // lbracket - try renderExpression(ais, tree, array_type.ast.elem_count, .None); + try renderToken(ais, tree, array_type.ast.lbracket, .none); // lbracket + try renderExpression(ais, tree, array_type.ast.elem_count, .none); if (array_type.ast.sentinel) |sentinel| { - try renderToken(ais, tree, tree.firstToken(sentinel) - 1, .None); // colon - try renderExpression(ais, tree, sentinel, .None); + try renderToken(ais, tree, tree.firstToken(sentinel) - 1, .none); // colon + try renderExpression(ais, tree, sentinel, .none); } - try renderToken(ais, tree, tree.firstToken(array_type.ast.elem_type) - 1, .None); // rbracket + try renderToken(ais, tree, tree.firstToken(array_type.ast.elem_type) - 1, .none); // rbracket return renderExpression(ais, tree, array_type.ast.elem_type, space); } @@ -706,65 +706,65 @@ fn renderPtrType( { return renderExpression(ais, tree, ptr_type.ast.child_type, space); } - try renderToken(ais, tree, ptr_type.ast.main_token, .None); // asterisk + try renderToken(ais, tree, ptr_type.ast.main_token, .none); // asterisk }, .many => { - try renderToken(ais, tree, ptr_type.ast.main_token - 1, .None); // lbracket - try renderToken(ais, tree, ptr_type.ast.main_token, .None); // asterisk - try renderToken(ais, tree, ptr_type.ast.main_token + 1, .None); // rbracket + try renderToken(ais, tree, ptr_type.ast.main_token - 1, .none); // lbracket + try renderToken(ais, tree, ptr_type.ast.main_token, .none); // asterisk + try renderToken(ais, tree, ptr_type.ast.main_token + 1, .none); // rbracket }, .sentinel => { - try renderToken(ais, tree, ptr_type.ast.main_token - 1, .None); // lbracket - try renderToken(ais, tree, ptr_type.ast.main_token, .None); // asterisk - try renderToken(ais, tree, ptr_type.ast.main_token + 1, .None); // colon - try renderExpression(ais, tree, ptr_type.ast.sentinel, .None); - try renderToken(ais, tree, tree.lastToken(ptr_type.ast.sentinel) + 1, .None); // rbracket + try renderToken(ais, tree, ptr_type.ast.main_token - 1, .none); // lbracket + try renderToken(ais, tree, ptr_type.ast.main_token, .none); // asterisk + try renderToken(ais, tree, ptr_type.ast.main_token + 1, .none); // colon + try renderExpression(ais, tree, ptr_type.ast.sentinel, .none); + try renderToken(ais, tree, tree.lastToken(ptr_type.ast.sentinel) + 1, .none); // rbracket }, .c => { - try renderToken(ais, tree, ptr_type.ast.main_token - 1, .None); // lbracket - try renderToken(ais, tree, ptr_type.ast.main_token, .None); // asterisk - try renderToken(ais, tree, ptr_type.ast.main_token + 1, .None); // c - try renderToken(ais, tree, ptr_type.ast.main_token + 2, .None); // rbracket + try renderToken(ais, tree, ptr_type.ast.main_token - 1, .none); // lbracket + try renderToken(ais, tree, ptr_type.ast.main_token, .none); // asterisk + try renderToken(ais, tree, ptr_type.ast.main_token + 1, .none); // c + try renderToken(ais, tree, ptr_type.ast.main_token + 2, .none); // rbracket }, .slice => { - try renderToken(ais, tree, ptr_type.ast.main_token, .None); // lbracket - try renderToken(ais, tree, ptr_type.ast.main_token + 1, .None); // rbracket + try renderToken(ais, tree, ptr_type.ast.main_token, .none); // lbracket + try renderToken(ais, tree, ptr_type.ast.main_token + 1, .none); // rbracket }, .slice_sentinel => { - try renderToken(ais, tree, ptr_type.ast.main_token, .None); // lbracket - try renderToken(ais, tree, ptr_type.ast.main_token + 1, .None); // colon - try renderExpression(ais, tree, ptr_type.ast.sentinel, .None); - try renderToken(ais, tree, tree.lastToken(ptr_type.ast.sentinel) + 1, .None); // rbracket + try renderToken(ais, tree, ptr_type.ast.main_token, .none); // lbracket + try renderToken(ais, tree, ptr_type.ast.main_token + 1, .none); // colon + try renderExpression(ais, tree, ptr_type.ast.sentinel, .none); + try renderToken(ais, tree, tree.lastToken(ptr_type.ast.sentinel) + 1, .none); // rbracket }, } if (ptr_type.allowzero_token) |allowzero_token| { - try renderToken(ais, tree, allowzero_token, .Space); + try renderToken(ais, tree, allowzero_token, .space); } if (ptr_type.ast.align_node != 0) { const align_first = tree.firstToken(ptr_type.ast.align_node); - try renderToken(ais, tree, align_first - 2, .None); // align - try renderToken(ais, tree, align_first - 1, .None); // lparen - try renderExpression(ais, tree, ptr_type.ast.align_node, .None); + try renderToken(ais, tree, align_first - 2, .none); // align + try renderToken(ais, tree, align_first - 1, .none); // lparen + try renderExpression(ais, tree, ptr_type.ast.align_node, .none); if (ptr_type.ast.bit_range_start != 0) { assert(ptr_type.ast.bit_range_end != 0); - try renderToken(ais, tree, tree.firstToken(ptr_type.ast.bit_range_start) - 1, .None); // colon - try renderExpression(ais, tree, ptr_type.ast.bit_range_start, .None); - try renderToken(ais, tree, tree.firstToken(ptr_type.ast.bit_range_end) - 1, .None); // colon - try renderExpression(ais, tree, ptr_type.ast.bit_range_end, .None); - try renderToken(ais, tree, tree.lastToken(ptr_type.ast.bit_range_end) + 1, .Space); // rparen + try renderToken(ais, tree, tree.firstToken(ptr_type.ast.bit_range_start) - 1, .none); // colon + try renderExpression(ais, tree, ptr_type.ast.bit_range_start, .none); + try renderToken(ais, tree, tree.firstToken(ptr_type.ast.bit_range_end) - 1, .none); // colon + try renderExpression(ais, tree, ptr_type.ast.bit_range_end, .none); + try renderToken(ais, tree, tree.lastToken(ptr_type.ast.bit_range_end) + 1, .space); // rparen } else { - try renderToken(ais, tree, tree.lastToken(ptr_type.ast.align_node) + 1, .Space); // rparen + try renderToken(ais, tree, tree.lastToken(ptr_type.ast.align_node) + 1, .space); // rparen } } if (ptr_type.const_token) |const_token| { - try renderToken(ais, tree, const_token, .Space); + try renderToken(ais, tree, const_token, .space); } if (ptr_type.volatile_token) |volatile_token| { - try renderToken(ais, tree, volatile_token, .Space); + try renderToken(ais, tree, volatile_token, .space); } try renderExpression(ais, tree, ptr_type.ast.child_type, space); @@ -779,11 +779,11 @@ fn renderSlice( const node_tags = tree.nodes.items(.tag); const after_start_space_bool = nodeCausesSliceOpSpace(node_tags[slice.ast.start]) or if (slice.ast.end != 0) nodeCausesSliceOpSpace(node_tags[slice.ast.end]) else false; - const after_start_space = if (after_start_space_bool) Space.Space else Space.None; - const after_dots_space = if (slice.ast.end != 0) after_start_space else Space.None; + const after_start_space = if (after_start_space_bool) Space.space else Space.none; + const after_dots_space = if (slice.ast.end != 0) after_start_space else Space.none; - try renderExpression(ais, tree, slice.ast.sliced, .None); - try renderToken(ais, tree, slice.ast.lbracket, .None); // lbracket + try renderExpression(ais, tree, slice.ast.sliced, .none); + try renderToken(ais, tree, slice.ast.lbracket, .none); // lbracket const start_last = tree.lastToken(slice.ast.start); try renderExpression(ais, tree, slice.ast.start, after_start_space); @@ -793,14 +793,14 @@ fn renderSlice( } const end_last = tree.lastToken(slice.ast.end); - const after_end_space = if (slice.ast.sentinel != 0) Space.Space else Space.None; + const after_end_space = if (slice.ast.sentinel != 0) Space.space else Space.none; try renderExpression(ais, tree, slice.ast.end, after_end_space); if (slice.ast.sentinel == 0) { return renderToken(ais, tree, end_last + 1, space); // rbracket } - try renderToken(ais, tree, end_last + 1, .None); // colon - try renderExpression(ais, tree, slice.ast.sentinel, .None); + try renderToken(ais, tree, end_last + 1, .none); // colon + try renderExpression(ais, tree, slice.ast.sentinel, .none); try renderToken(ais, tree, tree.lastToken(slice.ast.sentinel) + 1, space); // rbracket } @@ -817,18 +817,18 @@ fn renderAsmOutput( assert(node_tags[asm_output] == .asm_output); const symbolic_name = main_tokens[asm_output]; - try renderToken(ais, tree, symbolic_name - 1, .None); // lbracket - try renderToken(ais, tree, symbolic_name, .None); // ident - try renderToken(ais, tree, symbolic_name + 1, .Space); // rbracket - try renderToken(ais, tree, symbolic_name + 2, .Space); // "constraint" - try renderToken(ais, tree, symbolic_name + 3, .None); // lparen + try renderToken(ais, tree, symbolic_name - 1, .none); // lbracket + try renderToken(ais, tree, symbolic_name, .none); // ident + try renderToken(ais, tree, symbolic_name + 1, .space); // rbracket + try renderToken(ais, tree, symbolic_name + 2, .space); // "constraint" + try renderToken(ais, tree, symbolic_name + 3, .none); // lparen if (token_tags[symbolic_name + 4] == .arrow) { - try renderToken(ais, tree, symbolic_name + 4, .Space); // -> - try renderExpression(ais, tree, datas[asm_output].lhs, Space.None); + try renderToken(ais, tree, symbolic_name + 4, .space); // -> + try renderExpression(ais, tree, datas[asm_output].lhs, Space.none); return renderToken(ais, tree, datas[asm_output].rhs, space); // rparen } else { - try renderToken(ais, tree, symbolic_name + 4, .None); // ident + try renderToken(ais, tree, symbolic_name + 4, .none); // ident return renderToken(ais, tree, symbolic_name + 5, space); // rparen } } @@ -845,57 +845,57 @@ fn renderAsmInput( assert(node_tags[asm_input] == .asm_input); const symbolic_name = main_tokens[asm_input]; - try renderToken(ais, tree, symbolic_name - 1, .None); // lbracket - try renderToken(ais, tree, symbolic_name, .None); // ident - try renderToken(ais, tree, symbolic_name + 1, .Space); // rbracket - try renderToken(ais, tree, symbolic_name + 2, .Space); // "constraint" - try renderToken(ais, tree, symbolic_name + 3, .None); // lparen - try renderExpression(ais, tree, datas[asm_input].lhs, Space.None); + try renderToken(ais, tree, symbolic_name - 1, .none); // lbracket + try renderToken(ais, tree, symbolic_name, .none); // ident + try renderToken(ais, tree, symbolic_name + 1, .space); // rbracket + try renderToken(ais, tree, symbolic_name + 2, .space); // "constraint" + try renderToken(ais, tree, symbolic_name + 3, .none); // lparen + try renderExpression(ais, tree, datas[asm_input].lhs, Space.none); return renderToken(ais, tree, datas[asm_input].rhs, space); // rparen } fn renderVarDecl(ais: *Ais, tree: ast.Tree, var_decl: ast.full.VarDecl) Error!void { if (var_decl.visib_token) |visib_token| { - try renderToken(ais, tree, visib_token, Space.Space); // pub + try renderToken(ais, tree, visib_token, Space.space); // pub } if (var_decl.extern_export_token) |extern_export_token| { - try renderToken(ais, tree, extern_export_token, Space.Space); // extern + try renderToken(ais, tree, extern_export_token, Space.space); // extern if (var_decl.lib_name) |lib_name| { - try renderExpression(ais, tree, lib_name, Space.Space); // "lib" + try renderExpression(ais, tree, lib_name, Space.space); // "lib" } } if (var_decl.threadlocal_token) |thread_local_token| { - try renderToken(ais, tree, thread_local_token, Space.Space); // threadlocal + try renderToken(ais, tree, thread_local_token, Space.space); // threadlocal } if (var_decl.comptime_token) |comptime_token| { - try renderToken(ais, tree, comptime_token, Space.Space); // comptime + try renderToken(ais, tree, comptime_token, Space.space); // comptime } - try renderToken(ais, tree, var_decl.ast.mut_token, .Space); // var + try renderToken(ais, tree, var_decl.ast.mut_token, .space); // var const name_space = if (var_decl.ast.type_node == 0 and (var_decl.ast.align_node != 0 or var_decl.ast.section_node != 0 or var_decl.ast.init_node != 0)) - Space.Space + Space.space else - Space.None; + Space.none; try renderToken(ais, tree, var_decl.ast.mut_token + 1, name_space); // name if (var_decl.ast.type_node != 0) { - try renderToken(ais, tree, var_decl.ast.mut_token + 2, Space.Space); // : + try renderToken(ais, tree, var_decl.ast.mut_token + 2, Space.space); // : if (var_decl.ast.align_node != 0 or var_decl.ast.section_node != 0 or var_decl.ast.init_node != 0) { - try renderExpression(ais, tree, var_decl.ast.type_node, .Space); + try renderExpression(ais, tree, var_decl.ast.type_node, .space); } else { - try renderExpression(ais, tree, var_decl.ast.type_node, .None); + try renderExpression(ais, tree, var_decl.ast.type_node, .none); const semicolon = tree.lastToken(var_decl.ast.type_node) + 1; - return renderToken(ais, tree, semicolon, Space.Newline); // ; + return renderToken(ais, tree, semicolon, Space.newline); // ; } } @@ -903,14 +903,14 @@ fn renderVarDecl(ais: *Ais, tree: ast.Tree, var_decl: ast.full.VarDecl) Error!vo const lparen = tree.firstToken(var_decl.ast.align_node) - 1; const align_kw = lparen - 1; const rparen = tree.lastToken(var_decl.ast.align_node) + 1; - try renderToken(ais, tree, align_kw, Space.None); // align - try renderToken(ais, tree, lparen, Space.None); // ( - try renderExpression(ais, tree, var_decl.ast.align_node, Space.None); + try renderToken(ais, tree, align_kw, Space.none); // align + try renderToken(ais, tree, lparen, Space.none); // ( + try renderExpression(ais, tree, var_decl.ast.align_node, Space.none); if (var_decl.ast.section_node != 0 or var_decl.ast.init_node != 0) { - try renderToken(ais, tree, rparen, .Space); // ) + try renderToken(ais, tree, rparen, .space); // ) } else { - try renderToken(ais, tree, rparen, .None); // ) - return renderToken(ais, tree, rparen + 1, Space.Newline); // ; + try renderToken(ais, tree, rparen, .none); // ) + return renderToken(ais, tree, rparen + 1, Space.newline); // ; } } @@ -918,27 +918,27 @@ fn renderVarDecl(ais: *Ais, tree: ast.Tree, var_decl: ast.full.VarDecl) Error!vo const lparen = tree.firstToken(var_decl.ast.section_node) - 1; const section_kw = lparen - 1; const rparen = tree.lastToken(var_decl.ast.section_node) + 1; - try renderToken(ais, tree, section_kw, Space.None); // linksection - try renderToken(ais, tree, lparen, Space.None); // ( - try renderExpression(ais, tree, var_decl.ast.section_node, Space.None); + try renderToken(ais, tree, section_kw, Space.none); // linksection + try renderToken(ais, tree, lparen, Space.none); // ( + try renderExpression(ais, tree, var_decl.ast.section_node, Space.none); if (var_decl.ast.init_node != 0) { - try renderToken(ais, tree, rparen, .Space); // ) + try renderToken(ais, tree, rparen, .space); // ) } else { - try renderToken(ais, tree, rparen, .None); // ) - return renderToken(ais, tree, rparen + 1, Space.Newline); // ; + try renderToken(ais, tree, rparen, .none); // ) + return renderToken(ais, tree, rparen + 1, Space.newline); // ; } } assert(var_decl.ast.init_node != 0); const eq_token = tree.firstToken(var_decl.ast.init_node) - 1; - const eq_space: Space = if (tree.tokensOnSameLine(eq_token, eq_token + 1)) .Space else .Newline; + const eq_space: Space = if (tree.tokensOnSameLine(eq_token, eq_token + 1)) .space else .newline; { ais.pushIndent(); try renderToken(ais, tree, eq_token, eq_space); // = ais.popIndent(); } ais.pushIndentOneShot(); - try renderExpression(ais, tree, var_decl.ast.init_node, .Semicolon); + try renderExpression(ais, tree, var_decl.ast.init_node, .semicolon); } fn renderIf(ais: *Ais, tree: ast.Tree, if_node: ast.full.If, space: Space) Error!void { @@ -965,60 +965,60 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa const token_tags = tree.tokens.items(.tag); if (while_node.label_token) |label| { - try renderToken(ais, tree, label, .None); // label - try renderToken(ais, tree, label + 1, .Space); // : + try renderToken(ais, tree, label, .none); // label + try renderToken(ais, tree, label + 1, .space); // : } if (while_node.inline_token) |inline_token| { - try renderToken(ais, tree, inline_token, .Space); // inline + try renderToken(ais, tree, inline_token, .space); // inline } - try renderToken(ais, tree, while_node.ast.while_token, .Space); // if - try renderToken(ais, tree, while_node.ast.while_token + 1, .None); // ( - try renderExpression(ais, tree, while_node.ast.cond_expr, .None); // condition + try renderToken(ais, tree, while_node.ast.while_token, .space); // if + try renderToken(ais, tree, while_node.ast.while_token + 1, .none); // ( + try renderExpression(ais, tree, while_node.ast.cond_expr, .none); // condition if (nodeIsBlock(node_tags[while_node.ast.then_expr])) { if (while_node.payload_token) |payload_token| { - try renderToken(ais, tree, payload_token - 2, .Space); // ) - try renderToken(ais, tree, payload_token - 1, .None); // | + try renderToken(ais, tree, payload_token - 2, .space); // ) + try renderToken(ais, tree, payload_token - 1, .none); // | const ident = blk: { if (token_tags[payload_token] == .asterisk) { - try renderToken(ais, tree, payload_token, .None); // * + try renderToken(ais, tree, payload_token, .none); // * break :blk payload_token + 1; } else { break :blk payload_token; } }; - try renderToken(ais, tree, ident, .None); // identifier + try renderToken(ais, tree, ident, .none); // identifier const pipe = blk: { if (token_tags[ident + 1] == .comma) { - try renderToken(ais, tree, ident + 1, .Space); // , - try renderToken(ais, tree, ident + 2, .None); // index + try renderToken(ais, tree, ident + 1, .space); // , + try renderToken(ais, tree, ident + 2, .none); // index break :blk payload_token + 3; } else { break :blk ident + 1; } }; - try renderToken(ais, tree, pipe, .Space); // | + try renderToken(ais, tree, pipe, .space); // | } else { const rparen = tree.lastToken(while_node.ast.cond_expr) + 1; - try renderToken(ais, tree, rparen, .Space); // ) + try renderToken(ais, tree, rparen, .space); // ) } if (while_node.ast.cont_expr != 0) { const rparen = tree.lastToken(while_node.ast.cont_expr) + 1; const lparen = tree.firstToken(while_node.ast.cont_expr) - 1; - try renderToken(ais, tree, lparen - 1, .Space); // : - try renderToken(ais, tree, lparen, .None); // lparen - try renderExpression(ais, tree, while_node.ast.cont_expr, .None); - try renderToken(ais, tree, rparen, .Space); // rparen + try renderToken(ais, tree, lparen - 1, .space); // : + try renderToken(ais, tree, lparen, .none); // lparen + try renderExpression(ais, tree, while_node.ast.cont_expr, .none); + try renderToken(ais, tree, rparen, .space); // rparen } if (while_node.ast.else_expr != 0) { - try renderExpression(ais, tree, while_node.ast.then_expr, Space.Space); - try renderToken(ais, tree, while_node.else_token, .Space); // else + try renderExpression(ais, tree, while_node.ast.then_expr, Space.space); + try renderToken(ais, tree, while_node.else_token, .space); // else if (while_node.error_token) |error_token| { - try renderToken(ais, tree, error_token - 1, .None); // | - try renderToken(ais, tree, error_token, .None); // identifier - try renderToken(ais, tree, error_token + 1, .Space); // | + try renderToken(ais, tree, error_token - 1, .none); // | + try renderToken(ais, tree, error_token, .none); // identifier + try renderToken(ais, tree, error_token + 1, .space); // | } return renderExpression(ais, tree, while_node.ast.else_expr, space); } else { @@ -1032,61 +1032,61 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa if (src_has_newline) { if (while_node.payload_token) |payload_token| { - try renderToken(ais, tree, payload_token - 2, .Space); // ) - try renderToken(ais, tree, payload_token - 1, .None); // | + try renderToken(ais, tree, payload_token - 2, .space); // ) + try renderToken(ais, tree, payload_token - 1, .none); // | const ident = blk: { if (token_tags[payload_token] == .asterisk) { - try renderToken(ais, tree, payload_token, .None); // * + try renderToken(ais, tree, payload_token, .none); // * break :blk payload_token + 1; } else { break :blk payload_token; } }; - try renderToken(ais, tree, ident, .None); // identifier + try renderToken(ais, tree, ident, .none); // identifier const pipe = blk: { if (token_tags[ident + 1] == .comma) { - try renderToken(ais, tree, ident + 1, .Space); // , - try renderToken(ais, tree, ident + 2, .None); // index + try renderToken(ais, tree, ident + 1, .space); // , + try renderToken(ais, tree, ident + 2, .none); // index break :blk payload_token + 3; } else { break :blk ident + 1; } }; - try renderToken(ais, tree, pipe, .Newline); // | + try renderToken(ais, tree, pipe, .newline); // | } else { ais.pushIndent(); - try renderToken(ais, tree, rparen, .Newline); // ) + try renderToken(ais, tree, rparen, .newline); // ) ais.popIndent(); } if (while_node.ast.cont_expr != 0) { const cont_rparen = tree.lastToken(while_node.ast.cont_expr) + 1; const cont_lparen = tree.firstToken(while_node.ast.cont_expr) - 1; - try renderToken(ais, tree, cont_lparen - 1, .Space); // : - try renderToken(ais, tree, cont_lparen, .None); // lparen - try renderExpression(ais, tree, while_node.ast.cont_expr, .None); - try renderToken(ais, tree, cont_rparen, .Newline); // rparen + try renderToken(ais, tree, cont_lparen - 1, .space); // : + try renderToken(ais, tree, cont_lparen, .none); // lparen + try renderExpression(ais, tree, while_node.ast.cont_expr, .none); + try renderToken(ais, tree, cont_rparen, .newline); // rparen } if (while_node.ast.else_expr != 0) { ais.pushIndent(); - try renderExpression(ais, tree, while_node.ast.then_expr, Space.Newline); + try renderExpression(ais, tree, while_node.ast.then_expr, Space.newline); ais.popIndent(); const else_is_block = nodeIsBlock(node_tags[while_node.ast.else_expr]); if (else_is_block) { - try renderToken(ais, tree, while_node.else_token, .Space); // else + try renderToken(ais, tree, while_node.else_token, .space); // else if (while_node.error_token) |error_token| { - try renderToken(ais, tree, error_token - 1, .None); // | - try renderToken(ais, tree, error_token, .None); // identifier - try renderToken(ais, tree, error_token + 1, .Space); // | + try renderToken(ais, tree, error_token - 1, .none); // | + try renderToken(ais, tree, error_token, .none); // identifier + try renderToken(ais, tree, error_token + 1, .space); // | } return renderExpression(ais, tree, while_node.ast.else_expr, space); } else { if (while_node.error_token) |error_token| { - try renderToken(ais, tree, while_node.else_token, .Space); // else - try renderToken(ais, tree, error_token - 1, .None); // | - try renderToken(ais, tree, error_token, .None); // identifier - try renderToken(ais, tree, error_token + 1, .Space); // | + try renderToken(ais, tree, while_node.else_token, .space); // else + try renderToken(ais, tree, error_token - 1, .none); // | + try renderToken(ais, tree, error_token, .none); // identifier + try renderToken(ais, tree, error_token + 1, .space); // | } else { - try renderToken(ais, tree, while_node.else_token, .Newline); // else + try renderToken(ais, tree, while_node.else_token, .newline); // else } ais.pushIndent(); try renderExpression(ais, tree, while_node.ast.else_expr, space); @@ -1105,48 +1105,48 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa if (while_node.payload_token) |payload_token| { assert(payload_token - 2 == rparen); - try renderToken(ais, tree, payload_token - 2, .Space); // ) - try renderToken(ais, tree, payload_token - 1, .None); // | + try renderToken(ais, tree, payload_token - 2, .space); // ) + try renderToken(ais, tree, payload_token - 1, .none); // | const ident = blk: { if (token_tags[payload_token] == .asterisk) { - try renderToken(ais, tree, payload_token, .None); // * + try renderToken(ais, tree, payload_token, .none); // * break :blk payload_token + 1; } else { break :blk payload_token; } }; - try renderToken(ais, tree, ident, .None); // identifier + try renderToken(ais, tree, ident, .none); // identifier const pipe = blk: { if (token_tags[ident + 1] == .comma) { - try renderToken(ais, tree, ident + 1, .Space); // , - try renderToken(ais, tree, ident + 2, .None); // index + try renderToken(ais, tree, ident + 1, .space); // , + try renderToken(ais, tree, ident + 2, .none); // index break :blk payload_token + 3; } else { break :blk ident + 1; } }; - try renderToken(ais, tree, pipe, .Space); // | + try renderToken(ais, tree, pipe, .space); // | } else { - try renderToken(ais, tree, rparen, .Space); // ) + try renderToken(ais, tree, rparen, .space); // ) } if (while_node.ast.cont_expr != 0) { const cont_rparen = tree.lastToken(while_node.ast.cont_expr) + 1; const cont_lparen = tree.firstToken(while_node.ast.cont_expr) - 1; - try renderToken(ais, tree, cont_lparen - 1, .Space); // : - try renderToken(ais, tree, cont_lparen, .None); // lparen - try renderExpression(ais, tree, while_node.ast.cont_expr, .None); - try renderToken(ais, tree, cont_rparen, .Space); // rparen + try renderToken(ais, tree, cont_lparen - 1, .space); // : + try renderToken(ais, tree, cont_lparen, .none); // lparen + try renderExpression(ais, tree, while_node.ast.cont_expr, .none); + try renderToken(ais, tree, cont_rparen, .space); // rparen } if (while_node.ast.else_expr != 0) { - try renderExpression(ais, tree, while_node.ast.then_expr, .Space); - try renderToken(ais, tree, while_node.else_token, .Space); // else + try renderExpression(ais, tree, while_node.ast.then_expr, .space); + try renderToken(ais, tree, while_node.else_token, .space); // else if (while_node.error_token) |error_token| { - try renderToken(ais, tree, error_token - 1, .None); // | - try renderToken(ais, tree, error_token, .None); // identifier - try renderToken(ais, tree, error_token + 1, .Space); // | + try renderToken(ais, tree, error_token - 1, .none); // | + try renderToken(ais, tree, error_token, .none); // identifier + try renderToken(ais, tree, error_token + 1, .space); // | } return renderExpression(ais, tree, while_node.ast.else_expr, space); @@ -1163,21 +1163,21 @@ fn renderContainerField( ) Error!void { const main_tokens = tree.nodes.items(.main_token); if (field.comptime_token) |t| { - try renderToken(ais, tree, t, .Space); // comptime + try renderToken(ais, tree, t, .space); // comptime } if (field.ast.type_expr == 0 and field.ast.value_expr == 0) { return renderTokenComma(ais, tree, field.ast.name_token, space); // name } if (field.ast.type_expr != 0 and field.ast.value_expr == 0) { - try renderToken(ais, tree, field.ast.name_token, .None); // name - try renderToken(ais, tree, field.ast.name_token + 1, .Space); // : + try renderToken(ais, tree, field.ast.name_token, .none); // name + try renderToken(ais, tree, field.ast.name_token + 1, .space); // : if (field.ast.align_expr != 0) { - try renderExpression(ais, tree, field.ast.type_expr, .Space); // type + try renderExpression(ais, tree, field.ast.type_expr, .space); // type const align_token = tree.firstToken(field.ast.align_expr) - 2; - try renderToken(ais, tree, align_token, .None); // align - try renderToken(ais, tree, align_token + 1, .None); // ( - try renderExpression(ais, tree, field.ast.align_expr, .None); // alignment + try renderToken(ais, tree, align_token, .none); // align + try renderToken(ais, tree, align_token + 1, .none); // ( + try renderExpression(ais, tree, field.ast.align_expr, .none); // alignment const rparen = tree.lastToken(field.ast.align_expr) + 1; return renderTokenComma(ais, tree, rparen, space); // ) } else { @@ -1185,26 +1185,26 @@ fn renderContainerField( } } if (field.ast.type_expr == 0 and field.ast.value_expr != 0) { - try renderToken(ais, tree, field.ast.name_token, .Space); // name - try renderToken(ais, tree, field.ast.name_token + 1, .Space); // = + try renderToken(ais, tree, field.ast.name_token, .space); // name + try renderToken(ais, tree, field.ast.name_token + 1, .space); // = return renderExpressionComma(ais, tree, field.ast.value_expr, space); // value } - try renderToken(ais, tree, field.ast.name_token, .None); // name - try renderToken(ais, tree, field.ast.name_token + 1, .Space); // : - try renderExpression(ais, tree, field.ast.type_expr, .Space); // type + try renderToken(ais, tree, field.ast.name_token, .none); // name + try renderToken(ais, tree, field.ast.name_token + 1, .space); // : + try renderExpression(ais, tree, field.ast.type_expr, .space); // type if (field.ast.align_expr != 0) { const lparen_token = tree.firstToken(field.ast.align_expr) - 1; const align_kw = lparen_token - 1; const rparen_token = tree.lastToken(field.ast.align_expr) + 1; - try renderToken(ais, tree, align_kw, .None); // align - try renderToken(ais, tree, lparen_token, .None); // ( - try renderExpression(ais, tree, field.ast.align_expr, .None); // alignment - try renderToken(ais, tree, rparen_token, .Space); // ) + try renderToken(ais, tree, align_kw, .none); // align + try renderToken(ais, tree, lparen_token, .none); // ( + try renderExpression(ais, tree, field.ast.align_expr, .none); // alignment + try renderToken(ais, tree, rparen_token, .space); // ) } const eq_token = tree.firstToken(field.ast.value_expr) - 1; - try renderToken(ais, tree, eq_token, .Space); // = + try renderToken(ais, tree, eq_token, .space); // = return renderExpressionComma(ais, tree, field.ast.value_expr, space); // value } @@ -1217,10 +1217,10 @@ fn renderBuiltinCall( ) Error!void { const token_tags = tree.tokens.items(.tag); - try renderToken(ais, tree, builtin_token, .None); // @name + try renderToken(ais, tree, builtin_token, .none); // @name if (params.len == 0) { - try renderToken(ais, tree, builtin_token + 1, .None); // ( + try renderToken(ais, tree, builtin_token + 1, .none); // ( return renderToken(ais, tree, builtin_token + 2, space); // ) } @@ -1229,24 +1229,24 @@ fn renderBuiltinCall( if (token_tags[after_last_param_token] != .comma) { // Render all on one line, no trailing comma. - try renderToken(ais, tree, builtin_token + 1, .None); // ( + try renderToken(ais, tree, builtin_token + 1, .none); // ( for (params) |param_node, i| { - try renderExpression(ais, tree, param_node, .None); + try renderExpression(ais, tree, param_node, .none); if (i + 1 < params.len) { const comma_token = tree.lastToken(param_node) + 1; - try renderToken(ais, tree, comma_token, .Space); // , + try renderToken(ais, tree, comma_token, .space); // , } } return renderToken(ais, tree, after_last_param_token, space); // ) } else { // Render one param per line. ais.pushIndent(); - try renderToken(ais, tree, builtin_token + 1, Space.Newline); // ( + try renderToken(ais, tree, builtin_token + 1, Space.newline); // ( for (params) |param_node| { - try renderExpression(ais, tree, param_node, .Comma); + try renderExpression(ais, tree, param_node, .comma); } ais.popIndent(); @@ -1260,11 +1260,11 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S const after_fn_token = fn_proto.ast.fn_token + 1; const lparen = if (token_tags[after_fn_token] == .identifier) blk: { - try renderToken(ais, tree, fn_proto.ast.fn_token, .Space); // fn - try renderToken(ais, tree, after_fn_token, .None); // name + try renderToken(ais, tree, fn_proto.ast.fn_token, .space); // fn + try renderToken(ais, tree, after_fn_token, .none); // name break :blk after_fn_token + 1; } else blk: { - try renderToken(ais, tree, fn_proto.ast.fn_token, .Space); // fn + try renderToken(ais, tree, fn_proto.ast.fn_token, .space); // fn break :blk fn_proto.ast.fn_token + 1; }; assert(token_tags[lparen] == .l_paren); @@ -1307,7 +1307,7 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S if (token_tags[rparen - 1] != .comma) { // Render all on one line, no trailing comma. - try renderToken(ais, tree, lparen, .None); // ( + try renderToken(ais, tree, lparen, .none); // ( var param_i: usize = 0; var last_param_token = lparen; @@ -1315,25 +1315,25 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S last_param_token += 1; switch (token_tags[last_param_token]) { .doc_comment => { - try renderToken(ais, tree, last_param_token, .Newline); + try renderToken(ais, tree, last_param_token, .newline); continue; }, .ellipsis3 => { - try renderToken(ais, tree, last_param_token, .None); // ... + try renderToken(ais, tree, last_param_token, .none); // ... break; }, .keyword_noalias, .keyword_comptime => { - try renderToken(ais, tree, last_param_token, .Space); + try renderToken(ais, tree, last_param_token, .space); last_param_token += 1; }, .identifier => {}, .keyword_anytype => { - try renderToken(ais, tree, last_param_token, .None); // anytype + try renderToken(ais, tree, last_param_token, .none); // anytype continue; }, .r_paren => break, .comma => { - try renderToken(ais, tree, last_param_token, .Space); // , + try renderToken(ais, tree, last_param_token, .space); // , last_param_token += 1; }, else => {}, // Parameter type without a name. @@ -1341,24 +1341,24 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S if (token_tags[last_param_token] == .identifier and token_tags[last_param_token + 1] == .colon) { - try renderToken(ais, tree, last_param_token, .None); // name + try renderToken(ais, tree, last_param_token, .none); // name last_param_token += 1; - try renderToken(ais, tree, last_param_token, .Space); // : + try renderToken(ais, tree, last_param_token, .space); // : last_param_token += 1; } if (token_tags[last_param_token] == .keyword_anytype) { - try renderToken(ais, tree, last_param_token, .None); // anytype + try renderToken(ais, tree, last_param_token, .none); // anytype continue; } const param = fn_proto.ast.params[param_i]; param_i += 1; - try renderExpression(ais, tree, param, .None); + try renderExpression(ais, tree, param, .none); last_param_token = tree.lastToken(param); } } else { // One param per line. ais.pushIndent(); - try renderToken(ais, tree, lparen, .Newline); // ( + try renderToken(ais, tree, lparen, .newline); // ( var param_i: usize = 0; var last_param_token = lparen; @@ -1366,77 +1366,77 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S last_param_token += 1; switch (token_tags[last_param_token]) { .doc_comment => { - try renderToken(ais, tree, last_param_token, .Newline); + try renderToken(ais, tree, last_param_token, .newline); continue; }, .ellipsis3 => { - try renderToken(ais, tree, last_param_token, .Comma); // ... + try renderToken(ais, tree, last_param_token, .comma); // ... break; }, .keyword_noalias, .keyword_comptime => { - try renderToken(ais, tree, last_param_token, .Space); + try renderToken(ais, tree, last_param_token, .space); last_param_token += 1; }, .identifier => {}, .keyword_anytype => { - try renderToken(ais, tree, last_param_token, .Comma); // anytype + try renderToken(ais, tree, last_param_token, .comma); // anytype continue; }, .r_paren => break, else => unreachable, } if (token_tags[last_param_token] == .identifier) { - try renderToken(ais, tree, last_param_token, .None); // name + try renderToken(ais, tree, last_param_token, .none); // name last_param_token += 1; - try renderToken(ais, tree, last_param_token, .Space); // : + try renderToken(ais, tree, last_param_token, .space); // : last_param_token += 1; } if (token_tags[last_param_token] == .keyword_anytype) { - try renderToken(ais, tree, last_param_token, .Comma); // anytype + try renderToken(ais, tree, last_param_token, .comma); // anytype continue; } const param = fn_proto.ast.params[param_i]; param_i += 1; - try renderExpression(ais, tree, param, .Comma); + try renderExpression(ais, tree, param, .comma); last_param_token = tree.lastToken(param) + 1; } ais.popIndent(); } - try renderToken(ais, tree, rparen, .Space); // ) + try renderToken(ais, tree, rparen, .space); // ) if (fn_proto.ast.align_expr != 0) { const align_lparen = tree.firstToken(fn_proto.ast.align_expr) - 1; const align_rparen = tree.lastToken(fn_proto.ast.align_expr) + 1; - try renderToken(ais, tree, align_lparen - 1, .None); // align - try renderToken(ais, tree, align_lparen, .None); // ( - try renderExpression(ais, tree, fn_proto.ast.align_expr, .None); - try renderToken(ais, tree, align_rparen, .Space); // ) + try renderToken(ais, tree, align_lparen - 1, .none); // align + try renderToken(ais, tree, align_lparen, .none); // ( + try renderExpression(ais, tree, fn_proto.ast.align_expr, .none); + try renderToken(ais, tree, align_rparen, .space); // ) } if (fn_proto.ast.section_expr != 0) { const section_lparen = tree.firstToken(fn_proto.ast.section_expr) - 1; const section_rparen = tree.lastToken(fn_proto.ast.section_expr) + 1; - try renderToken(ais, tree, section_lparen - 1, .None); // section - try renderToken(ais, tree, section_lparen, .None); // ( - try renderExpression(ais, tree, fn_proto.ast.section_expr, .None); - try renderToken(ais, tree, section_rparen, .Space); // ) + try renderToken(ais, tree, section_lparen - 1, .none); // section + try renderToken(ais, tree, section_lparen, .none); // ( + try renderExpression(ais, tree, fn_proto.ast.section_expr, .none); + try renderToken(ais, tree, section_rparen, .space); // ) } if (fn_proto.ast.callconv_expr != 0) { const callconv_lparen = tree.firstToken(fn_proto.ast.callconv_expr) - 1; const callconv_rparen = tree.lastToken(fn_proto.ast.callconv_expr) + 1; - try renderToken(ais, tree, callconv_lparen - 1, .None); // callconv - try renderToken(ais, tree, callconv_lparen, .None); // ( - try renderExpression(ais, tree, fn_proto.ast.callconv_expr, .None); - try renderToken(ais, tree, callconv_rparen, .Space); // ) + try renderToken(ais, tree, callconv_lparen - 1, .none); // callconv + try renderToken(ais, tree, callconv_lparen, .none); // ( + try renderExpression(ais, tree, fn_proto.ast.callconv_expr, .none); + try renderToken(ais, tree, callconv_rparen, .space); // ) } if (token_tags[maybe_bang] == .bang) { - try renderToken(ais, tree, maybe_bang, .None); // ! + try renderToken(ais, tree, maybe_bang, .none); // ! } return renderExpression(ais, tree, fn_proto.ast.return_type, space); } @@ -1452,34 +1452,34 @@ fn renderSwitchCase( // Render everything before the arrow if (switch_case.ast.values.len == 0) { - try renderToken(ais, tree, switch_case.ast.arrow_token - 1, .Space); // else keyword + try renderToken(ais, tree, switch_case.ast.arrow_token - 1, .space); // else keyword } else if (switch_case.ast.values.len == 1) { // render on one line and drop the trailing comma if any - try renderExpression(ais, tree, switch_case.ast.values[0], .Space); + try renderExpression(ais, tree, switch_case.ast.values[0], .space); } else if (trailing_comma) { // Render each value on a new line for (switch_case.ast.values) |value_expr| { - try renderExpression(ais, tree, value_expr, .Comma); + try renderExpression(ais, tree, value_expr, .comma); } } else { // Render on one line for (switch_case.ast.values) |value_expr| { - try renderExpression(ais, tree, value_expr, .CommaSpace); + try renderExpression(ais, tree, value_expr, .comma_space); } } // Render the arrow and everything after it - try renderToken(ais, tree, switch_case.ast.arrow_token, .Space); + try renderToken(ais, tree, switch_case.ast.arrow_token, .space); if (switch_case.payload_token) |payload_token| { - try renderToken(ais, tree, payload_token - 1, .None); // pipe + try renderToken(ais, tree, payload_token - 1, .none); // pipe if (token_tags[payload_token] == .asterisk) { - try renderToken(ais, tree, payload_token, .None); // asterisk - try renderToken(ais, tree, payload_token + 1, .None); // identifier - try renderToken(ais, tree, payload_token + 2, .Space); // pipe + try renderToken(ais, tree, payload_token, .none); // asterisk + try renderToken(ais, tree, payload_token + 1, .none); // identifier + try renderToken(ais, tree, payload_token + 2, .space); // pipe } else { - try renderToken(ais, tree, payload_token, .None); // identifier - try renderToken(ais, tree, payload_token + 1, .Space); // pipe + try renderToken(ais, tree, payload_token, .none); // identifier + try renderToken(ais, tree, payload_token + 1, .space); // pipe } } @@ -1501,26 +1501,26 @@ fn renderBlock( if (token_tags[lbrace - 1] == .colon and token_tags[lbrace - 2] == .identifier) { - try renderToken(ais, tree, lbrace - 2, .None); - try renderToken(ais, tree, lbrace - 1, .Space); + try renderToken(ais, tree, lbrace - 2, .none); + try renderToken(ais, tree, lbrace - 1, .space); } if (statements.len == 0) { ais.pushIndentNextLine(); - try renderToken(ais, tree, lbrace, .None); + try renderToken(ais, tree, lbrace, .none); ais.popIndent(); return renderToken(ais, tree, lbrace + 1, space); // rbrace } ais.pushIndentNextLine(); - try renderToken(ais, tree, lbrace, .Newline); + try renderToken(ais, tree, lbrace, .newline); for (statements) |stmt, i| { switch (node_tags[stmt]) { .global_var_decl => try renderVarDecl(ais, tree, tree.globalVarDecl(stmt)), .local_var_decl => try renderVarDecl(ais, tree, tree.localVarDecl(stmt)), .simple_var_decl => try renderVarDecl(ais, tree, tree.simpleVarDecl(stmt)), .aligned_var_decl => try renderVarDecl(ais, tree, tree.alignedVarDecl(stmt)), - else => try renderExpression(ais, tree, stmt, .Semicolon), + else => try renderExpression(ais, tree, stmt, .semicolon), } } ais.popIndent(); @@ -1537,12 +1537,12 @@ fn renderStructInit( ) Error!void { const token_tags = tree.tokens.items(.tag); if (struct_init.ast.type_expr == 0) { - try renderToken(ais, tree, struct_init.ast.lbrace - 1, .None); // . + try renderToken(ais, tree, struct_init.ast.lbrace - 1, .none); // . } else { - try renderExpression(ais, tree, struct_init.ast.type_expr, .None); // T + try renderExpression(ais, tree, struct_init.ast.type_expr, .none); // T } if (struct_init.ast.fields.len == 0) { - try renderToken(ais, tree, struct_init.ast.lbrace, .None); // lbrace + try renderToken(ais, tree, struct_init.ast.lbrace, .none); // lbrace return renderToken(ais, tree, struct_init.ast.lbrace + 1, space); // rbrace } const last_field = struct_init.ast.fields[struct_init.ast.fields.len - 1]; @@ -1550,28 +1550,28 @@ fn renderStructInit( if (token_tags[last_field_token + 1] == .comma) { // Render one field init per line. ais.pushIndent(); - try renderToken(ais, tree, struct_init.ast.lbrace, .Newline); + try renderToken(ais, tree, struct_init.ast.lbrace, .newline); for (struct_init.ast.fields) |field_init| { const init_token = tree.firstToken(field_init); - try renderToken(ais, tree, init_token - 3, .None); // . - try renderToken(ais, tree, init_token - 2, .Space); // name - try renderToken(ais, tree, init_token - 1, .Space); // = - try renderExpression(ais, tree, field_init, .Comma); + try renderToken(ais, tree, init_token - 3, .none); // . + try renderToken(ais, tree, init_token - 2, .space); // name + try renderToken(ais, tree, init_token - 1, .space); // = + try renderExpression(ais, tree, field_init, .comma); } ais.popIndent(); return renderToken(ais, tree, last_field_token + 2, space); // rbrace } else { // Render all on one line, no trailing comma. - try renderToken(ais, tree, struct_init.ast.lbrace, .Space); + try renderToken(ais, tree, struct_init.ast.lbrace, .space); for (struct_init.ast.fields) |field_init| { const init_token = tree.firstToken(field_init); - try renderToken(ais, tree, init_token - 3, .None); // . - try renderToken(ais, tree, init_token - 2, .Space); // name - try renderToken(ais, tree, init_token - 1, .Space); // = - try renderExpression(ais, tree, field_init, .CommaSpace); + try renderToken(ais, tree, init_token - 3, .none); // . + try renderToken(ais, tree, init_token - 2, .space); // name + try renderToken(ais, tree, init_token - 1, .space); // = + try renderExpression(ais, tree, field_init, .comma_space); } return renderToken(ais, tree, last_field_token + 1, space); // rbrace @@ -1587,12 +1587,12 @@ fn renderArrayInit( ) Error!void { const token_tags = tree.tokens.items(.tag); if (array_init.ast.type_expr == 0) { - try renderToken(ais, tree, array_init.ast.lbrace - 1, .None); // . + try renderToken(ais, tree, array_init.ast.lbrace - 1, .none); // . } else { - try renderExpression(ais, tree, array_init.ast.type_expr, .None); // T + try renderExpression(ais, tree, array_init.ast.type_expr, .none); // T } if (array_init.ast.elements.len == 0) { - try renderToken(ais, tree, array_init.ast.lbrace, .None); // lbrace + try renderToken(ais, tree, array_init.ast.lbrace, .none); // lbrace return renderToken(ais, tree, array_init.ast.lbrace + 1, space); // rbrace } const last_elem = array_init.ast.elements[array_init.ast.elements.len - 1]; @@ -1600,10 +1600,10 @@ fn renderArrayInit( if (token_tags[last_elem_token + 1] == .comma) { // Render one element per line. ais.pushIndent(); - try renderToken(ais, tree, array_init.ast.lbrace, .Newline); + try renderToken(ais, tree, array_init.ast.lbrace, .newline); for (array_init.ast.elements) |elem| { - try renderExpression(ais, tree, elem, .Comma); + try renderExpression(ais, tree, elem, .comma); } ais.popIndent(); @@ -1612,12 +1612,12 @@ fn renderArrayInit( // Render all on one line, no trailing comma. if (array_init.ast.elements.len == 1) { // If there is only one element, we don't use spaces - try renderToken(ais, tree, array_init.ast.lbrace, .None); - try renderExpression(ais, tree, array_init.ast.elements[0], .None); + try renderToken(ais, tree, array_init.ast.lbrace, .none); + try renderExpression(ais, tree, array_init.ast.elements[0], .none); } else { - try renderToken(ais, tree, array_init.ast.lbrace, .Space); + try renderToken(ais, tree, array_init.ast.lbrace, .space); for (array_init.ast.elements) |elem| { - try renderExpression(ais, tree, elem, .CommaSpace); + try renderExpression(ais, tree, elem, .comma_space); } } return renderToken(ais, tree, last_elem_token + 1, space); // rbrace @@ -1634,39 +1634,39 @@ fn renderContainerDecl( const node_tags = tree.nodes.items(.tag); if (container_decl.layout_token) |layout_token| { - try renderToken(ais, tree, layout_token, .Space); + try renderToken(ais, tree, layout_token, .space); } var lbrace: ast.TokenIndex = undefined; if (container_decl.ast.enum_token) |enum_token| { - try renderToken(ais, tree, container_decl.ast.main_token, .None); // union - try renderToken(ais, tree, enum_token - 1, .None); // lparen - try renderToken(ais, tree, enum_token, .None); // enum + try renderToken(ais, tree, container_decl.ast.main_token, .none); // union + try renderToken(ais, tree, enum_token - 1, .none); // lparen + try renderToken(ais, tree, enum_token, .none); // enum if (container_decl.ast.arg != 0) { - try renderToken(ais, tree, enum_token + 1, .None); // lparen - try renderExpression(ais, tree, container_decl.ast.arg, .None); + try renderToken(ais, tree, enum_token + 1, .none); // lparen + try renderExpression(ais, tree, container_decl.ast.arg, .none); const rparen = tree.lastToken(container_decl.ast.arg) + 1; - try renderToken(ais, tree, rparen, .None); // rparen - try renderToken(ais, tree, rparen + 1, .Space); // rparen + try renderToken(ais, tree, rparen, .none); // rparen + try renderToken(ais, tree, rparen + 1, .space); // rparen lbrace = rparen + 2; } else { - try renderToken(ais, tree, enum_token + 1, .Space); // rparen + try renderToken(ais, tree, enum_token + 1, .space); // rparen lbrace = enum_token + 2; } } else if (container_decl.ast.arg != 0) { - try renderToken(ais, tree, container_decl.ast.main_token, .None); // union - try renderToken(ais, tree, container_decl.ast.main_token + 1, .None); // lparen - try renderExpression(ais, tree, container_decl.ast.arg, .None); + try renderToken(ais, tree, container_decl.ast.main_token, .none); // union + try renderToken(ais, tree, container_decl.ast.main_token + 1, .none); // lparen + try renderExpression(ais, tree, container_decl.ast.arg, .none); const rparen = tree.lastToken(container_decl.ast.arg) + 1; - try renderToken(ais, tree, rparen, .Space); // rparen + try renderToken(ais, tree, rparen, .space); // rparen lbrace = rparen + 1; } else { - try renderToken(ais, tree, container_decl.ast.main_token, .Space); // union + try renderToken(ais, tree, container_decl.ast.main_token, .space); // union lbrace = container_decl.ast.main_token + 1; } if (container_decl.ast.members.len == 0) { - try renderToken(ais, tree, lbrace, Space.None); // lbrace + try renderToken(ais, tree, lbrace, Space.none); // lbrace return renderToken(ais, tree, lbrace + 1, space); // rbrace } @@ -1690,18 +1690,18 @@ fn renderContainerDecl( if (!node_tags[member].isContainerField()) break :one_line; } // All the declarations on the same line. - try renderToken(ais, tree, lbrace, .Space); // lbrace + try renderToken(ais, tree, lbrace, .space); // lbrace for (container_decl.ast.members) |member| { - try renderMember(ais, tree, member, .Space); + try renderMember(ais, tree, member, .space); } return renderToken(ais, tree, rbrace, space); // rbrace } // One member per line. ais.pushIndent(); - try renderToken(ais, tree, lbrace, .Newline); // lbrace + try renderToken(ais, tree, lbrace, .newline); // lbrace for (container_decl.ast.members) |member| { - try renderMember(ais, tree, member, .Newline); + try renderMember(ais, tree, member, .newline); } ais.popIndent(); @@ -1716,26 +1716,26 @@ fn renderAsm( ) Error!void { const token_tags = tree.tokens.items(.tag); - try renderToken(ais, tree, asm_node.ast.asm_token, .Space); // asm + try renderToken(ais, tree, asm_node.ast.asm_token, .space); // asm if (asm_node.volatile_token) |volatile_token| { - try renderToken(ais, tree, volatile_token, .Space); // volatile - try renderToken(ais, tree, volatile_token + 1, .None); // lparen + try renderToken(ais, tree, volatile_token, .space); // volatile + try renderToken(ais, tree, volatile_token + 1, .none); // lparen } else { - try renderToken(ais, tree, asm_node.ast.asm_token + 1, .None); // lparen + try renderToken(ais, tree, asm_node.ast.asm_token + 1, .none); // lparen } if (asm_node.ast.items.len == 0) { - try renderExpression(ais, tree, asm_node.ast.template, .None); + try renderExpression(ais, tree, asm_node.ast.template, .none); if (asm_node.first_clobber) |first_clobber| { // asm ("foo" ::: "a", "b") var tok_i = first_clobber; while (true) : (tok_i += 1) { - try renderToken(ais, tree, tok_i, .None); + try renderToken(ais, tree, tok_i, .none); tok_i += 1; switch (token_tags[tok_i]) { .r_paren => return renderToken(ais, tree, tok_i, space), - .comma => try renderToken(ais, tree, tok_i, .Space), + .comma => try renderToken(ais, tree, tok_i, .space), else => unreachable, } } @@ -1746,32 +1746,32 @@ fn renderAsm( } ais.pushIndent(); - try renderExpression(ais, tree, asm_node.ast.template, .Newline); + try renderExpression(ais, tree, asm_node.ast.template, .newline); ais.setIndentDelta(asm_indent_delta); const colon1 = tree.lastToken(asm_node.ast.template) + 1; const colon2 = if (asm_node.outputs.len == 0) colon2: { - try renderToken(ais, tree, colon1, .Newline); // : + try renderToken(ais, tree, colon1, .newline); // : break :colon2 colon1 + 1; } else colon2: { - try renderToken(ais, tree, colon1, .Space); // : + try renderToken(ais, tree, colon1, .space); // : ais.pushIndent(); for (asm_node.outputs) |asm_output, i| { if (i + 1 < asm_node.outputs.len) { const next_asm_output = asm_node.outputs[i + 1]; - try renderAsmOutput(ais, tree, asm_output, .None); + try renderAsmOutput(ais, tree, asm_output, .none); const comma = tree.firstToken(next_asm_output) - 1; - try renderToken(ais, tree, comma, .Newline); // , + try renderToken(ais, tree, comma, .newline); // , } else if (asm_node.inputs.len == 0 and asm_node.first_clobber == null) { - try renderAsmOutput(ais, tree, asm_output, .Newline); + try renderAsmOutput(ais, tree, asm_output, .newline); ais.popIndent(); ais.setIndentDelta(indent_delta); ais.popIndent(); return renderToken(ais, tree, asm_node.ast.rparen, space); // rparen } else { - try renderAsmOutput(ais, tree, asm_output, .Newline); + try renderAsmOutput(ais, tree, asm_output, .newline); const comma_or_colon = tree.lastToken(asm_output) + 1; ais.popIndent(); break :colon2 switch (token_tags[comma_or_colon]) { @@ -1783,26 +1783,26 @@ fn renderAsm( }; const colon3 = if (asm_node.inputs.len == 0) colon3: { - try renderToken(ais, tree, colon2, .Newline); // : + try renderToken(ais, tree, colon2, .newline); // : break :colon3 colon2 + 1; } else colon3: { - try renderToken(ais, tree, colon2, .Space); // : + try renderToken(ais, tree, colon2, .space); // : ais.pushIndent(); for (asm_node.inputs) |asm_input, i| { if (i + 1 < asm_node.inputs.len) { const next_asm_input = asm_node.inputs[i + 1]; - try renderAsmInput(ais, tree, asm_input, .None); + try renderAsmInput(ais, tree, asm_input, .none); const first_token = tree.firstToken(next_asm_input); - try renderToken(ais, tree, first_token - 1, .Newline); // , + try renderToken(ais, tree, first_token - 1, .newline); // , } else if (asm_node.first_clobber == null) { - try renderAsmInput(ais, tree, asm_input, .Newline); + try renderAsmInput(ais, tree, asm_input, .newline); ais.popIndent(); ais.setIndentDelta(indent_delta); ais.popIndent(); return renderToken(ais, tree, asm_node.ast.rparen, space); // rparen } else { - try renderAsmInput(ais, tree, asm_input, .Newline); + try renderAsmInput(ais, tree, asm_input, .newline); const comma_or_colon = tree.lastToken(asm_input) + 1; ais.popIndent(); break :colon3 switch (token_tags[comma_or_colon]) { @@ -1814,7 +1814,7 @@ fn renderAsm( unreachable; }; - try renderToken(ais, tree, colon3, .Space); // : + try renderToken(ais, tree, colon3, .space); // : const first_clobber = asm_node.first_clobber.?; var tok_i = first_clobber; while (true) { @@ -1822,12 +1822,12 @@ fn renderAsm( .r_paren => { ais.setIndentDelta(indent_delta); ais.popIndent(); - try renderToken(ais, tree, tok_i, .Newline); + try renderToken(ais, tree, tok_i, .newline); return renderToken(ais, tree, tok_i + 1, space); }, .comma => { - try renderToken(ais, tree, tok_i, .None); - try renderToken(ais, tree, tok_i + 1, .Space); + try renderToken(ais, tree, tok_i, .none); + try renderToken(ais, tree, tok_i + 1, .space); tok_i += 2; }, else => unreachable, @@ -1846,14 +1846,14 @@ fn renderCall( const main_tokens = tree.nodes.items(.main_token); if (call.async_token) |async_token| { - try renderToken(ais, tree, async_token, .Space); + try renderToken(ais, tree, async_token, .space); } - try renderExpression(ais, tree, call.ast.fn_expr, .None); + try renderExpression(ais, tree, call.ast.fn_expr, .none); const lparen = call.ast.lparen; const params = call.ast.params; if (params.len == 0) { - try renderToken(ais, tree, lparen, .None); + try renderToken(ais, tree, lparen, .none); return renderToken(ais, tree, lparen + 1, space); // ) } @@ -1861,10 +1861,10 @@ fn renderCall( const after_last_param_tok = tree.lastToken(last_param) + 1; if (token_tags[after_last_param_tok] == .comma) { ais.pushIndent(); - try renderToken(ais, tree, lparen, Space.Newline); // ( + try renderToken(ais, tree, lparen, Space.newline); // ( for (params) |param_node, i| { if (i + 1 < params.len) { - try renderExpression(ais, tree, param_node, Space.None); + try renderExpression(ais, tree, param_node, Space.none); // Unindent the comma for multiline string literals const is_multiline_string = node_tags[param_node] == .string_literal and @@ -1872,25 +1872,25 @@ fn renderCall( if (is_multiline_string) ais.popIndent(); const comma = tree.lastToken(param_node) + 1; - try renderToken(ais, tree, comma, Space.Newline); // , + try renderToken(ais, tree, comma, Space.newline); // , if (is_multiline_string) ais.pushIndent(); } else { - try renderExpression(ais, tree, param_node, Space.Comma); + try renderExpression(ais, tree, param_node, Space.comma); } } ais.popIndent(); return renderToken(ais, tree, after_last_param_tok + 1, space); // ) } - try renderToken(ais, tree, lparen, Space.None); // ( + try renderToken(ais, tree, lparen, Space.none); // ( for (params) |param_node, i| { - try renderExpression(ais, tree, param_node, Space.None); + try renderExpression(ais, tree, param_node, Space.none); if (i + 1 < params.len) { const comma = tree.lastToken(param_node) + 1; - try renderToken(ais, tree, comma, Space.Space); + try renderToken(ais, tree, comma, Space.space); } } return renderToken(ais, tree, after_last_param_tok, space); // ) @@ -1901,7 +1901,7 @@ fn renderExpressionComma(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: const token_tags = tree.tokens.items(.tag); const maybe_comma = tree.lastToken(node) + 1; if (token_tags[maybe_comma] == .comma) { - try renderExpression(ais, tree, node, .None); + try renderExpression(ais, tree, node, .none); return renderToken(ais, tree, maybe_comma, space); } else { return renderExpression(ais, tree, node, space); @@ -1912,7 +1912,7 @@ fn renderTokenComma(ais: *Ais, tree: ast.Tree, token: ast.TokenIndex, space: Spa const token_tags = tree.tokens.items(.tag); const maybe_comma = token + 1; if (token_tags[maybe_comma] == .comma) { - try renderToken(ais, tree, token, .None); + try renderToken(ais, tree, token, .none); return renderToken(ais, tree, maybe_comma, space); } else { return renderToken(ais, tree, token, space); @@ -1921,22 +1921,22 @@ fn renderTokenComma(ais: *Ais, tree: ast.Tree, token: ast.TokenIndex, space: Spa const Space = enum { /// Output the token lexeme only. - None, + none, /// Output the token lexeme followed by a single space. - Space, + space, /// Output the token lexeme followed by a newline. - Newline, + newline, /// Additionally consume the next token if it is a comma. /// In either case, a newline will be inserted afterwards. - Comma, + comma, /// Additionally consume the next token if it is a comma. /// In either case, a space will be inserted afterwards. - CommaSpace, + comma_space, /// Additionally consume the next token if it is a semicolon. /// In either case, a newline will be inserted afterwards. - Semicolon, + semicolon, /// Skips writing the possible line comment after the token. - NoComment, + no_comment, }; fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Space) Error!void { @@ -1958,39 +1958,39 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp try ais.writer().writeAll(lexeme); switch (space) { - .NoComment => {}, - .None => _ = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]), - .Comma => { + .no_comment => {}, + .none => _ = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]), + .comma => { const comment = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]); if (token_tags[token_index + 1] == .comma) { - return renderToken(ais, tree, token_index + 1, .Newline); + return renderToken(ais, tree, token_index + 1, .newline); } else if (!comment) { return ais.insertNewline(); } }, - .CommaSpace => { + .comma_space => { const comment = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]); if (token_tags[token_index + 1] == .comma) { - return renderToken(ais, tree, token_index + 1, .Space); + return renderToken(ais, tree, token_index + 1, .space); } else if (!comment) { return ais.writer().writeByte(' '); } }, - .Semicolon => { + .semicolon => { const comment = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]); if (token_tags[token_index + 1] == .semicolon) { - return renderToken(ais, tree, token_index + 1, .Newline); + return renderToken(ais, tree, token_index + 1, .newline); } else if (!comment) { return ais.insertNewline(); } }, - .Space => { + .space => { const comment = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]); if (!comment) { return ais.writer().writeByte(' '); } }, - .Newline => { + .newline => { if (!try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1])) { try ais.insertNewline(); } @@ -2018,9 +2018,9 @@ fn renderDocComments(ais: *Ais, tree: ast.Tree, end_token: ast.TokenIndex) Error switch (token_tags[tok]) { .doc_comment => { if (first_tok < end_token) { - try renderToken(ais, tree, tok, .Newline); + try renderToken(ais, tree, tok, .newline); } else { - try renderToken(ais, tree, tok, .NoComment); + try renderToken(ais, tree, tok, .no_comment); try ais.insertNewline(); } }, From 3d0f4b90305bc1815ccc86613cb3da715e9b62c0 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Thu, 11 Feb 2021 23:29:55 -0700 Subject: [PATCH 055/173] stage2: start reworking Module/astgen for memory layout changes This commit does not reach any particular milestone, it is work-in-progress towards getting things to build. There's a `@panic("TODO")` in translate-c that should be removed when working on translate-c stuff. --- lib/std/zig.zig | 1 - lib/std/zig/ast.zig | 195 +++++ lib/std/zig/parser_test.zig | 2 +- lib/std/zig/render.zig | 55 +- src/Compilation.zig | 13 +- src/Module.zig | 1542 ++++++++++++++++++++--------------- src/main.zig | 86 +- src/translate_c.zig | 10 +- src/zir.zig | 15 +- 9 files changed, 1177 insertions(+), 742 deletions(-) diff --git a/lib/std/zig.zig b/lib/std/zig.zig index 5b564864ad..7f23405150 100644 --- a/lib/std/zig.zig +++ b/lib/std/zig.zig @@ -12,7 +12,6 @@ pub const fmtId = @import("zig/fmt.zig").fmtId; pub const fmtEscapes = @import("zig/fmt.zig").fmtEscapes; pub const parse = @import("zig/parse.zig").parse; pub const parseStringLiteral = @import("zig/string_literal.zig").parse; -pub const render = @import("zig/render.zig").render; pub const ast = @import("zig/ast.zig"); pub const system = @import("zig/system.zig"); pub const CrossTarget = @import("zig/cross_target.zig").CrossTarget; diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 0b6133e789..ab81c3415e 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -45,6 +45,28 @@ pub const Tree = struct { tree.* = undefined; } + pub const RenderError = error{ + /// Ran out of memory allocating call stack frames to complete rendering, or + /// ran out of memory allocating space in the output buffer. + OutOfMemory, + }; + + /// `gpa` is used for allocating the resulting formatted source code, as well as + /// for allocating extra stack memory if needed, because this function utilizes recursion. + /// Note: that's not actually true yet, see https://github.com/ziglang/zig/issues/1006. + /// Caller owns the returned slice of bytes, allocated with `gpa`. + pub fn render(tree: Tree, gpa: *mem.Allocator) RenderError![]u8 { + var buffer = std.ArrayList(u8).init(gpa); + defer buffer.deinit(); + + try tree.renderToArrayList(&buffer); + return buffer.toOwnedSlice(); + } + + pub fn renderToArrayList(tree: Tree, buffer: *std.ArrayList(u8)) RenderError!void { + return @import("./render.zig").renderTree(buffer, tree); + } + pub fn tokenLocation(self: Tree, start_offset: ByteOffset, token_index: TokenIndex) Location { var loc = Location{ .line = 0, @@ -72,6 +94,27 @@ pub const Tree = struct { return loc; } + pub fn tokenSlice(tree: Tree, token_index: TokenIndex) []const u8 { + const token_starts = tree.tokens.items(.start); + const token_tags = tree.tokens.items(.tag); + const token_tag = token_tags[token_index]; + + // Many tokens can be determined entirely by their tag. + if (token_tag.lexeme()) |lexeme| { + return lexeme; + } + + // For some tokens, re-tokenization is needed to find the end. + var tokenizer: std.zig.Tokenizer = .{ + .buffer = tree.source, + .index = token_starts[token_index], + .pending_invalid_token = null, + }; + const token = tokenizer.next(); + assert(token.tag == token_tag); + return tree.source[token.loc.start..token.loc.end]; + } + pub fn extraData(tree: Tree, index: usize, comptime T: type) T { const fields = std.meta.fields(T); var result: T = undefined; @@ -82,6 +125,12 @@ pub const Tree = struct { return result; } + pub fn rootDecls(tree: Tree) []const Node.Index { + // Root is always index 0. + const nodes_data = tree.nodes.items(.data); + return tree.extra_data[nodes_data[0].lhs..nodes_data[0].rhs]; + } + pub fn renderError(tree: Tree, parse_error: Error, stream: anytype) !void { const tokens = tree.tokens.items(.tag); switch (parse_error) { @@ -966,6 +1015,15 @@ pub const Tree = struct { return mem.indexOfScalar(u8, source, '\n') == null; } + pub fn getNodeSource(tree: Tree, node: Node.Index) []const u8 { + const token_starts = tree.tokens.items(.start); + const first_token = tree.firstToken(node); + const last_token = tree.lastToken(node); + const start = token_starts[first_token]; + const len = tree.tokenSlice(last_token).len; + return tree.source[start..][0..len]; + } + pub fn globalVarDecl(tree: Tree, node: Node.Index) full.VarDecl { assert(tree.nodes.items(.tag)[node] == .global_var_decl); const data = tree.nodes.items(.data)[node]; @@ -1653,7 +1711,31 @@ pub const Tree = struct { const token_tags = tree.tokens.items(.tag); var result: full.FnProto = .{ .ast = info, + .visib_token = null, + .extern_export_token = null, + .lib_name = null, + .name_token = null, + .lparen = undefined, }; + var i = info.fn_token; + while (i > 0) { + i -= 1; + switch (token_tags[i]) { + .keyword_extern, .keyword_export => result.extern_export_token = i, + .keyword_pub => result.visib_token = i, + .string_literal => result.lib_name = i, + else => break, + } + } + const after_fn_token = info.fn_token + 1; + if (token_tags[after_fn_token] == .identifier) { + result.name_token = after_fn_token; + result.lparen = after_fn_token + 1; + } else { + result.lparen = after_fn_token; + } + assert(token_tags[result.lparen] == .l_paren); + return result; } @@ -1924,6 +2006,11 @@ pub const full = struct { }; pub const FnProto = struct { + visib_token: ?TokenIndex, + extern_export_token: ?TokenIndex, + lib_name: ?TokenIndex, + name_token: ?TokenIndex, + lparen: TokenIndex, ast: Ast, pub const Ast = struct { @@ -1934,6 +2021,114 @@ pub const full = struct { section_expr: Node.Index, callconv_expr: Node.Index, }; + + pub const Param = struct { + first_doc_comment: ?TokenIndex, + name_token: ?TokenIndex, + comptime_noalias: ?TokenIndex, + anytype_ellipsis3: ?TokenIndex, + type_expr: Node.Index, + }; + + /// Abstracts over the fact that anytype and ... are not included + /// in the params slice, since they are simple identifiers and + /// not sub-expressions. + pub const Iterator = struct { + tree: *const Tree, + fn_proto: *const FnProto, + param_i: usize, + tok_i: TokenIndex, + tok_flag: bool, + + pub fn next(it: *Iterator) ?Param { + const token_tags = it.tree.tokens.items(.tag); + while (true) { + var first_doc_comment: ?TokenIndex = null; + var comptime_noalias: ?TokenIndex = null; + var name_token: ?TokenIndex = null; + if (!it.tok_flag) { + if (it.param_i >= it.fn_proto.ast.params.len) { + return null; + } + const param_type = it.fn_proto.ast.params[it.param_i]; + var tok_i = tree.firstToken(param_type) - 1; + while (true) : (tok_i -= 1) switch (token_tags[tok_i]) { + .colon => continue, + .identifier => name_token = tok_i, + .doc_comment => first_doc_comment = tok_i, + .keyword_comptime, .keyword_noalias => comptime_noalias = tok_i, + else => break, + }; + it.param_i += 1; + it.tok_i = tree.lastToken(param_type) + 1; + it.tok_flag = true; + return Param{ + .first_doc_comment = first_doc_comment, + .comptime_noalias = comptime_noalias, + .name_token = name_token, + .anytype_ellipsis3 = null, + .type_expr = param_type, + }; + } + // Look for anytype and ... params afterwards. + if (token_tags[it.tok_i] == .comma) { + it.tok_i += 1; + } else { + return null; + } + if (token_tags[it.tok_i] == .doc_comment) { + first_doc_comment = it.tok_i; + while (token_tags[it.tok_i] == .doc_comment) { + it.tok_i += 1; + } + } + switch (token_tags[it.tok_i]) { + .ellipsis3 => { + it.tok_flag = false; // Next iteration should return null. + return Param{ + .first_doc_comment = first_doc_comment, + .comptime_noalias = null, + .name_token = null, + .anytype_ellipsis3 = it.tok_i, + .type_expr = 0, + }; + }, + .keyword_noalias, .keyword_comptime => { + comptime_noalias = it.tok_i; + it.tok_i += 1; + }, + else => {}, + } + if (token_tags[it.tok_i] == .identifier and + token_tags[it.tok_i + 1] == .colon) + { + name_token = it.tok_i; + it.tok_i += 2; + } + if (token_tags[it.tok_i] == .keyword_anytype) { + it.tok_i += 1; + return Param{ + .first_doc_comment = first_doc_comment, + .comptime_noalias = comptime_noalias, + .name_token = name_token, + .anytype_ellipsis3 = it.tok_i - 1, + .type_expr = param_type, + }; + } + it.tok_flag = false; + } + } + }; + + pub fn iterate(fn_proto: FnProto, tree: Tree) Iterator { + return .{ + .tree = &tree, + .fn_proto = &fn_proto, + .param_i = 0, + .tok_i = undefined, + .tok_flag = false, + }; + } }; pub const StructInit = struct { diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index aad9ad3ed6..9046feda85 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -4223,7 +4223,7 @@ fn testParse(source: []const u8, allocator: *mem.Allocator, anything_changed: *b return error.ParseError; } - const formatted = try std.zig.render(allocator, tree); + const formatted = try tree.render(allocator); anything_changed.* = !mem.eql(u8, formatted, source); return formatted; } diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 8f98acc20c..1410ef5c13 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -13,28 +13,24 @@ const Token = std.zig.Token; const indent_delta = 4; const asm_indent_delta = 2; -pub const Error = error{ - /// Ran out of memory allocating call stack frames to complete rendering, or - /// ran out of memory allocating space in the output buffer. - OutOfMemory, -}; +pub const Error = ast.Tree.RenderError; const Writer = std.ArrayList(u8).Writer; const Ais = std.io.AutoIndentingStream(Writer); -/// `gpa` is used for allocating the resulting formatted source code, as well as -/// for allocating extra stack memory if needed, because this function utilizes recursion. -/// Note: that's not actually true yet, see https://github.com/ziglang/zig/issues/1006. -/// Caller owns the returned slice of bytes, allocated with `gpa`. -pub fn render(gpa: *mem.Allocator, tree: ast.Tree) Error![]u8 { +pub fn renderTree(buffer: *std.ArrayList(u8), tree: ast.Tree) Error!void { assert(tree.errors.len == 0); // Cannot render an invalid tree. - - var buffer = std.ArrayList(u8).init(gpa); - defer buffer.deinit(); - var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, buffer.writer()); - try renderRoot(&auto_indenting_stream, tree); - return buffer.toOwnedSlice(); + const ais = &auto_indenting_stream; + + // Render all the line comments at the beginning of the file. + const src_start: usize = if (mem.startsWith(u8, tree.source, "\xEF\xBB\xBF")) 3 else 0; + const comment_end_loc: usize = tree.tokens.items(.start)[0]; + _ = try renderCommentsAndNewlines(ais, tree, src_start, comment_end_loc); + + for (tree.rootDecls()) |decl| { + try renderMember(ais, tree, decl, .newline); + } } /// Assumes that start is the first byte past the previous token and @@ -75,21 +71,6 @@ fn renderCommentsAndNewlines(ais: *Ais, tree: ast.Tree, start: usize, end: usize return index != start; } -fn renderRoot(ais: *Ais, tree: ast.Tree) Error!void { - // Render all the line comments at the beginning of the file. - const src_start: usize = if (mem.startsWith(u8, tree.source, "\xEF\xBB\xBF")) 3 else 0; - const comment_end_loc: usize = tree.tokens.items(.start)[0]; - _ = try renderCommentsAndNewlines(ais, tree, src_start, comment_end_loc); - - // Root is always index 0. - const nodes_data = tree.nodes.items(.data); - const root_decls = tree.extra_data[nodes_data[0].lhs..nodes_data[0].rhs]; - - for (root_decls) |decl| { - try renderMember(ais, tree, decl, .newline); - } -} - fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); const main_tokens = tree.nodes.items(.main_token); @@ -1944,17 +1925,7 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp const token_starts = tree.tokens.items(.start); const token_start = token_starts[token_index]; - const token_tag = token_tags[token_index]; - const lexeme = token_tag.lexeme() orelse lexeme: { - var tokenizer: std.zig.Tokenizer = .{ - .buffer = tree.source, - .index = token_start, - .pending_invalid_token = null, - }; - const token = tokenizer.next(); - assert(token.tag == token_tag); - break :lexeme tree.source[token.loc.start..token.loc.end]; - }; + const lexeme = tree.tokenSlice(token_index); try ais.writer().writeAll(lexeme); switch (space) { diff --git a/src/Compilation.zig b/src/Compilation.zig index c7bb260aa7..89c047065c 100644 --- a/src/Compilation.zig +++ b/src/Compilation.zig @@ -921,7 +921,7 @@ pub fn create(gpa: *Allocator, options: InitOptions) !*Compilation { // TODO this is duped so it can be freed in Container.deinit .sub_file_path = try gpa.dupe(u8, root_pkg.root_src_path), .source = .{ .unloaded = {} }, - .contents = .{ .not_available = {} }, + .tree = undefined, .status = .never_loaded, .pkg = root_pkg, .root_container = .{ @@ -1882,7 +1882,7 @@ pub fn cImport(comp: *Compilation, c_src: []const u8) !CImportResult { const c_headers_dir_path = try comp.zig_lib_directory.join(arena, &[_][]const u8{"include"}); const c_headers_dir_path_z = try arena.dupeZ(u8, c_headers_dir_path); var clang_errors: []translate_c.ClangErrMsg = &[0]translate_c.ClangErrMsg{}; - const tree = translate_c.translate( + var tree = translate_c.translate( comp.gpa, new_argv.ptr, new_argv.ptr + new_argv.len, @@ -1901,7 +1901,7 @@ pub fn cImport(comp: *Compilation, c_src: []const u8) !CImportResult { }; }, }; - defer tree.deinit(); + defer tree.deinit(comp.gpa); if (comp.verbose_cimport) { log.info("C import .d file: {s}", .{out_dep_path}); @@ -1919,9 +1919,10 @@ pub fn cImport(comp: *Compilation, c_src: []const u8) !CImportResult { var out_zig_file = try o_dir.createFile(cimport_zig_basename, .{}); defer out_zig_file.close(); - var bos = std.io.bufferedWriter(out_zig_file.writer()); - _ = try std.zig.render(comp.gpa, bos.writer(), tree); - try bos.flush(); + const formatted = try tree.render(comp.gpa); + defer comp.gpa.free(formatted); + + try out_zig_file.writeAll(formatted); man.writeManifest() catch |err| { log.warn("failed to write cache manifest for C import: {s}", .{@errorName(err)}); diff --git a/src/Module.zig b/src/Module.zig index fa9722814e..bd47332d15 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -244,9 +244,9 @@ pub const Decl = struct { } pub fn src(self: Decl) usize { - const tree = self.container.file_scope.contents.tree; - const decl_node = tree.root_node.decls()[self.src_index]; - return tree.token_locs[decl_node.firstToken()].start; + const tree = &self.container.file_scope.tree; + const decl_node = tree.rootDecls()[self.src_index]; + return tree.tokens.items(.start)[tree.firstToken(decl_node)]; } pub fn fullyQualifiedNameHash(self: Decl) Scope.NameHash { @@ -536,6 +536,12 @@ pub const Scope = struct { pub const File = struct { pub const base_tag: Tag = .file; base: Scope = Scope{ .tag = base_tag }, + status: enum { + never_loaded, + unloaded_success, + unloaded_parse_failure, + loaded_success, + }, /// Relative to the owning package's root_src_dir. /// Reference to external memory, not owned by File. @@ -544,16 +550,8 @@ pub const Scope = struct { unloaded: void, bytes: [:0]const u8, }, - contents: union { - not_available: void, - tree: *ast.Tree, - }, - status: enum { - never_loaded, - unloaded_success, - unloaded_parse_failure, - loaded_success, - }, + /// Whether this is populated or not depends on `status`. + tree: ast.Tree, /// Package that this file is a part of, managed externally. pkg: *Package, @@ -567,7 +565,7 @@ pub const Scope = struct { => {}, .loaded_success => { - self.contents.tree.deinit(); + self.tree.deinit(gpa); self.status = .unloaded_success; }, } @@ -905,7 +903,7 @@ pub fn ensureDeclAnalyzed(mod: *Module, decl: *Decl) InnerError!void { .unreferenced => false, }; - const type_changed = mod.astGenAndAnalyzeDecl(decl) catch |err| switch (err) { + const type_changed = mod.astgenAndSemaDecl(decl) catch |err| switch (err) { error.OutOfMemory => return error.OutOfMemory, error.AnalysisFail => return error.AnalysisFail, else => { @@ -947,514 +945,52 @@ pub fn ensureDeclAnalyzed(mod: *Module, decl: *Decl) InnerError!void { } } -fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { +/// Returns `true` if the Decl type changed. +/// Returns `true` if this is the first time analyzing the Decl. +/// Returns `false` otherwise. +fn astgenAndSemaDecl(mod: *Module, decl: *Decl) !bool { const tracy = trace(@src()); defer tracy.end(); - const tree = try self.getAstTree(decl.container.file_scope); - const ast_node = tree.root_node.decls()[decl.src_index]; - switch (ast_node.tag) { - .FnProto => { - const fn_proto = ast_node.castTag(.FnProto).?; - - decl.analysis = .in_progress; - - // This arena allocator's memory is discarded at the end of this function. It is used - // to determine the type of the function, and hence the type of the decl, which is needed - // to complete the Decl analysis. - var fn_type_scope_arena = std.heap.ArenaAllocator.init(self.gpa); - defer fn_type_scope_arena.deinit(); - var fn_type_scope: Scope.GenZIR = .{ - .decl = decl, - .arena = &fn_type_scope_arena.allocator, - .parent = &decl.container.base, - }; - defer fn_type_scope.instructions.deinit(self.gpa); - - decl.is_pub = fn_proto.getVisibToken() != null; - - const param_decls = fn_proto.params(); - const param_types = try fn_type_scope.arena.alloc(*zir.Inst, param_decls.len); - - const fn_src = tree.token_locs[fn_proto.fn_token].start; - const type_type = try astgen.addZIRInstConst(self, &fn_type_scope.base, fn_src, .{ - .ty = Type.initTag(.type), - .val = Value.initTag(.type_type), - }); - const type_type_rl: astgen.ResultLoc = .{ .ty = type_type }; - for (param_decls) |param_decl, i| { - const param_type_node = switch (param_decl.param_type) { - .any_type => |node| return self.failNode(&fn_type_scope.base, node, "TODO implement anytype parameter", .{}), - .type_expr => |node| node, - }; - param_types[i] = try astgen.expr(self, &fn_type_scope.base, type_type_rl, param_type_node); - } - if (fn_proto.getVarArgsToken()) |var_args_token| { - return self.failTok(&fn_type_scope.base, var_args_token, "TODO implement var args", .{}); - } - if (fn_proto.getLibName()) |lib_name| blk: { - const lib_name_str = mem.trim(u8, tree.tokenSlice(lib_name.firstToken()), "\""); // TODO: call identifierTokenString - log.debug("extern fn symbol expected in lib '{s}'", .{lib_name_str}); - const target = self.comp.getTarget(); - if (target_util.is_libc_lib_name(target, lib_name_str)) { - if (!self.comp.bin_file.options.link_libc) { - return self.failNode( - &fn_type_scope.base, - lib_name, - "dependency on libc must be explicitly specified in the build command", - .{}, - ); - } - break :blk; - } - if (target_util.is_libcpp_lib_name(target, lib_name_str)) { - if (!self.comp.bin_file.options.link_libcpp) { - return self.failNode( - &fn_type_scope.base, - lib_name, - "dependency on libc++ must be explicitly specified in the build command", - .{}, - ); - } - break :blk; - } - if (!target.isWasm() and !self.comp.bin_file.options.pic) { - return self.failNode( - &fn_type_scope.base, - lib_name, - "dependency on dynamic library '{s}' requires enabling Position Independent Code. Fixed by `-l{s}` or `-fPIC`.", - .{ lib_name, lib_name }, - ); - } - self.comp.stage1AddLinkLib(lib_name_str) catch |err| { - return self.failNode( - &fn_type_scope.base, - lib_name, - "unable to add link lib '{s}': {s}", - .{ lib_name, @errorName(err) }, - ); - }; - } - if (fn_proto.getAlignExpr()) |align_expr| { - return self.failNode(&fn_type_scope.base, align_expr, "TODO implement function align expression", .{}); - } - if (fn_proto.getSectionExpr()) |sect_expr| { - return self.failNode(&fn_type_scope.base, sect_expr, "TODO implement function section expression", .{}); - } - if (fn_proto.getCallconvExpr()) |callconv_expr| { - return self.failNode( - &fn_type_scope.base, - callconv_expr, - "TODO implement function calling convention expression", - .{}, - ); - } - const return_type_expr = switch (fn_proto.return_type) { - .Explicit => |node| node, - .InferErrorSet => |node| return self.failNode(&fn_type_scope.base, node, "TODO implement inferred error sets", .{}), - .Invalid => |tok| return self.failTok(&fn_type_scope.base, tok, "unable to parse return type", .{}), - }; - - const return_type_inst = try astgen.expr(self, &fn_type_scope.base, type_type_rl, return_type_expr); - const fn_type_inst = try astgen.addZIRInst(self, &fn_type_scope.base, fn_src, zir.Inst.FnType, .{ - .return_type = return_type_inst, - .param_types = param_types, - }, .{}); - - if (std.builtin.mode == .Debug and self.comp.verbose_ir) { - zir.dumpZir(self.gpa, "fn_type", decl.name, fn_type_scope.instructions.items) catch {}; - } - - // We need the memory for the Type to go into the arena for the Decl - var decl_arena = std.heap.ArenaAllocator.init(self.gpa); - errdefer decl_arena.deinit(); - const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State); - - var inst_table = Scope.Block.InstTable.init(self.gpa); - defer inst_table.deinit(); - - var branch_quota: u32 = default_eval_branch_quota; - - var block_scope: Scope.Block = .{ - .parent = null, - .inst_table = &inst_table, - .func = null, - .owner_decl = decl, - .src_decl = decl, - .instructions = .{}, - .arena = &decl_arena.allocator, - .inlining = null, - .is_comptime = false, - .branch_quota = &branch_quota, - }; - defer block_scope.instructions.deinit(self.gpa); - - const fn_type = try zir_sema.analyzeBodyValueAsType(self, &block_scope, fn_type_inst, .{ - .instructions = fn_type_scope.instructions.items, - }); - const body_node = fn_proto.getBodyNode() orelse { - // Extern function. - var type_changed = true; - if (decl.typedValueManaged()) |tvm| { - type_changed = !tvm.typed_value.ty.eql(fn_type); - - tvm.deinit(self.gpa); - } - const fn_val = try Value.Tag.extern_fn.create(&decl_arena.allocator, decl); - - decl_arena_state.* = decl_arena.state; - decl.typed_value = .{ - .most_recent = .{ - .typed_value = .{ .ty = fn_type, .val = fn_val }, - .arena = decl_arena_state, - }, - }; - decl.analysis = .complete; - decl.generation = self.generation; - - try self.comp.bin_file.allocateDeclIndexes(decl); - try self.comp.work_queue.writeItem(.{ .codegen_decl = decl }); - - if (type_changed and self.emit_h != null) { - try self.comp.work_queue.writeItem(.{ .emit_h_decl = decl }); - } - - return type_changed; - }; - - const new_func = try decl_arena.allocator.create(Fn); - const fn_payload = try decl_arena.allocator.create(Value.Payload.Function); - - const fn_zir: zir.Body = blk: { - // We put the ZIR inside the Decl arena. - var gen_scope: Scope.GenZIR = .{ - .decl = decl, - .arena = &decl_arena.allocator, - .parent = &decl.container.base, - }; - defer gen_scope.instructions.deinit(self.gpa); - - // We need an instruction for each parameter, and they must be first in the body. - try gen_scope.instructions.resize(self.gpa, fn_proto.params_len); - var params_scope = &gen_scope.base; - for (fn_proto.params()) |param, i| { - const name_token = param.name_token.?; - const src = tree.token_locs[name_token].start; - const param_name = try self.identifierTokenString(&gen_scope.base, name_token); - const arg = try decl_arena.allocator.create(zir.Inst.Arg); - arg.* = .{ - .base = .{ - .tag = .arg, - .src = src, - }, - .positionals = .{ - .name = param_name, - }, - .kw_args = .{}, - }; - gen_scope.instructions.items[i] = &arg.base; - const sub_scope = try decl_arena.allocator.create(Scope.LocalVal); - sub_scope.* = .{ - .parent = params_scope, - .gen_zir = &gen_scope, - .name = param_name, - .inst = &arg.base, - }; - params_scope = &sub_scope.base; - } - - const body_block = body_node.cast(ast.Node.Block).?; - - try astgen.blockExpr(self, params_scope, body_block); - - if (gen_scope.instructions.items.len == 0 or - !gen_scope.instructions.items[gen_scope.instructions.items.len - 1].tag.isNoReturn()) - { - const src = tree.token_locs[body_block.rbrace].start; - _ = try astgen.addZIRNoOp(self, &gen_scope.base, src, .returnvoid); - } - - if (std.builtin.mode == .Debug and self.comp.verbose_ir) { - zir.dumpZir(self.gpa, "fn_body", decl.name, gen_scope.instructions.items) catch {}; - } - - break :blk .{ - .instructions = try gen_scope.arena.dupe(*zir.Inst, gen_scope.instructions.items), - }; - }; - - const is_inline = blk: { - if (fn_proto.getExternExportInlineToken()) |maybe_inline_token| { - if (tree.token_ids[maybe_inline_token] == .Keyword_inline) { - break :blk true; - } - } - break :blk false; - }; - const anal_state = ([2]Fn.Analysis{ .queued, .inline_only })[@boolToInt(is_inline)]; - - new_func.* = .{ - .state = anal_state, - .zir = fn_zir, - .body = undefined, - .owner_decl = decl, - }; - fn_payload.* = .{ - .base = .{ .tag = .function }, - .data = new_func, - }; - - var prev_type_has_bits = false; - var prev_is_inline = false; - var type_changed = true; - - if (decl.typedValueManaged()) |tvm| { - prev_type_has_bits = tvm.typed_value.ty.hasCodeGenBits(); - type_changed = !tvm.typed_value.ty.eql(fn_type); - if (tvm.typed_value.val.castTag(.function)) |payload| { - const prev_func = payload.data; - prev_is_inline = prev_func.state == .inline_only; - } - - tvm.deinit(self.gpa); - } - - decl_arena_state.* = decl_arena.state; - decl.typed_value = .{ - .most_recent = .{ - .typed_value = .{ - .ty = fn_type, - .val = Value.initPayload(&fn_payload.base), - }, - .arena = decl_arena_state, + const tree = try mod.getAstTree(decl.container.file_scope); + const node_tags = tree.nodes.items(.tag); + const node_datas = tree.nodes.items(.data); + const decl_node = tree.rootDecls()[decl.src_index]; + switch (node_tags[decl_node]) { + .fn_decl => { + const fn_proto = node_datas[decl_node].lhs; + const body = node_datas[decl_node].rhs; + switch (node_tags[fn_proto]) { + .fn_proto_simple => { + var params: [1]ast.Node.Index = undefined; + return mod.astgenAndSemaFn(decl, tree, body, tree.fnProtoSimple(¶ms, fn_proto)); }, - }; - decl.analysis = .complete; - decl.generation = self.generation; - - if (!is_inline and fn_type.hasCodeGenBits()) { - // We don't fully codegen the decl until later, but we do need to reserve a global - // offset table index for it. This allows us to codegen decls out of dependency order, - // increasing how many computations can be done in parallel. - try self.comp.bin_file.allocateDeclIndexes(decl); - try self.comp.work_queue.writeItem(.{ .codegen_decl = decl }); - if (type_changed and self.emit_h != null) { - try self.comp.work_queue.writeItem(.{ .emit_h_decl = decl }); - } - } else if (!prev_is_inline and prev_type_has_bits) { - self.comp.bin_file.freeDecl(decl); - } - - if (fn_proto.getExternExportInlineToken()) |maybe_export_token| { - if (tree.token_ids[maybe_export_token] == .Keyword_export) { - if (is_inline) { - return self.failTok( - &block_scope.base, - maybe_export_token, - "export of inline function", - .{}, - ); - } - const export_src = tree.token_locs[maybe_export_token].start; - const name_loc = tree.token_locs[fn_proto.getNameToken().?]; - const name = tree.tokenSliceLoc(name_loc); - // The scope needs to have the decl in it. - try self.analyzeExport(&block_scope.base, export_src, name, decl); - } - } - return type_changed or is_inline != prev_is_inline; - }, - .VarDecl => { - const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", ast_node); - - decl.analysis = .in_progress; - - // We need the memory for the Type to go into the arena for the Decl - var decl_arena = std.heap.ArenaAllocator.init(self.gpa); - errdefer decl_arena.deinit(); - const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State); - - var decl_inst_table = Scope.Block.InstTable.init(self.gpa); - defer decl_inst_table.deinit(); - - var branch_quota: u32 = default_eval_branch_quota; - - var block_scope: Scope.Block = .{ - .parent = null, - .inst_table = &decl_inst_table, - .func = null, - .owner_decl = decl, - .src_decl = decl, - .instructions = .{}, - .arena = &decl_arena.allocator, - .inlining = null, - .is_comptime = true, - .branch_quota = &branch_quota, - }; - defer block_scope.instructions.deinit(self.gpa); - - decl.is_pub = var_decl.getVisibToken() != null; - const is_extern = blk: { - const maybe_extern_token = var_decl.getExternExportToken() orelse - break :blk false; - if (tree.token_ids[maybe_extern_token] != .Keyword_extern) break :blk false; - if (var_decl.getInitNode()) |some| { - return self.failNode(&block_scope.base, some, "extern variables have no initializers", .{}); - } - break :blk true; - }; - if (var_decl.getLibName()) |lib_name| { - assert(is_extern); - return self.failNode(&block_scope.base, lib_name, "TODO implement function library name", .{}); - } - const is_mutable = tree.token_ids[var_decl.mut_token] == .Keyword_var; - const is_threadlocal = if (var_decl.getThreadLocalToken()) |some| blk: { - if (!is_mutable) { - return self.failTok(&block_scope.base, some, "threadlocal variable cannot be constant", .{}); - } - break :blk true; - } else false; - assert(var_decl.getComptimeToken() == null); - if (var_decl.getAlignNode()) |align_expr| { - return self.failNode(&block_scope.base, align_expr, "TODO implement function align expression", .{}); - } - if (var_decl.getSectionNode()) |sect_expr| { - return self.failNode(&block_scope.base, sect_expr, "TODO implement function section expression", .{}); - } - - const var_info: struct { ty: Type, val: ?Value } = if (var_decl.getInitNode()) |init_node| vi: { - var gen_scope_arena = std.heap.ArenaAllocator.init(self.gpa); - defer gen_scope_arena.deinit(); - var gen_scope: Scope.GenZIR = .{ - .decl = decl, - .arena = &gen_scope_arena.allocator, - .parent = &decl.container.base, - }; - defer gen_scope.instructions.deinit(self.gpa); - - const init_result_loc: astgen.ResultLoc = if (var_decl.getTypeNode()) |type_node| rl: { - const src = tree.token_locs[type_node.firstToken()].start; - const type_type = try astgen.addZIRInstConst(self, &gen_scope.base, src, .{ - .ty = Type.initTag(.type), - .val = Value.initTag(.type_type), - }); - const var_type = try astgen.expr(self, &gen_scope.base, .{ .ty = type_type }, type_node); - break :rl .{ .ty = var_type }; - } else .none; - - const init_inst = try astgen.comptimeExpr(self, &gen_scope.base, init_result_loc, init_node); - if (std.builtin.mode == .Debug and self.comp.verbose_ir) { - zir.dumpZir(self.gpa, "var_init", decl.name, gen_scope.instructions.items) catch {}; - } - - var var_inst_table = Scope.Block.InstTable.init(self.gpa); - defer var_inst_table.deinit(); - - var branch_quota_vi: u32 = default_eval_branch_quota; - var inner_block: Scope.Block = .{ - .parent = null, - .inst_table = &var_inst_table, - .func = null, - .owner_decl = decl, - .src_decl = decl, - .instructions = .{}, - .arena = &gen_scope_arena.allocator, - .inlining = null, - .is_comptime = true, - .branch_quota = &branch_quota_vi, - }; - defer inner_block.instructions.deinit(self.gpa); - try zir_sema.analyzeBody(self, &inner_block, .{ - .instructions = gen_scope.instructions.items, - }); - - // The result location guarantees the type coercion. - const analyzed_init_inst = var_inst_table.get(init_inst).?; - // The is_comptime in the Scope.Block guarantees the result is comptime-known. - const val = analyzed_init_inst.value().?; - - const ty = try analyzed_init_inst.ty.copy(block_scope.arena); - break :vi .{ - .ty = ty, - .val = try val.copy(block_scope.arena), - }; - } else if (!is_extern) { - return self.failTok(&block_scope.base, var_decl.firstToken(), "variables must be initialized", .{}); - } else if (var_decl.getTypeNode()) |type_node| vi: { - // Temporary arena for the zir instructions. - var type_scope_arena = std.heap.ArenaAllocator.init(self.gpa); - defer type_scope_arena.deinit(); - var type_scope: Scope.GenZIR = .{ - .decl = decl, - .arena = &type_scope_arena.allocator, - .parent = &decl.container.base, - }; - defer type_scope.instructions.deinit(self.gpa); - - const var_type = try astgen.typeExpr(self, &type_scope.base, type_node); - if (std.builtin.mode == .Debug and self.comp.verbose_ir) { - zir.dumpZir(self.gpa, "var_type", decl.name, type_scope.instructions.items) catch {}; - } - - const ty = try zir_sema.analyzeBodyValueAsType(self, &block_scope, var_type, .{ - .instructions = type_scope.instructions.items, - }); - break :vi .{ - .ty = ty, - .val = null, - }; - } else { - return self.failTok(&block_scope.base, var_decl.firstToken(), "unable to infer variable type", .{}); - }; - - if (is_mutable and !var_info.ty.isValidVarType(is_extern)) { - return self.failTok(&block_scope.base, var_decl.firstToken(), "variable of type '{}' must be const", .{var_info.ty}); - } - - var type_changed = true; - if (decl.typedValueManaged()) |tvm| { - type_changed = !tvm.typed_value.ty.eql(var_info.ty); - - tvm.deinit(self.gpa); - } - - const new_variable = try decl_arena.allocator.create(Var); - new_variable.* = .{ - .owner_decl = decl, - .init = var_info.val orelse undefined, - .is_extern = is_extern, - .is_mutable = is_mutable, - .is_threadlocal = is_threadlocal, - }; - const var_val = try Value.Tag.variable.create(&decl_arena.allocator, new_variable); - - decl_arena_state.* = decl_arena.state; - decl.typed_value = .{ - .most_recent = .{ - .typed_value = .{ - .ty = var_info.ty, - .val = var_val, - }, - .arena = decl_arena_state, + .fn_proto_multi => return mod.astgenAndSemaFn(decl, tree, body, tree.fnProtoMulti(fn_proto)), + .fn_proto_one => { + var params: [1]ast.Node.Index = undefined; + return mod.astgenAndSemaFn(decl, tree, body, tree.fnProtoOne(¶ms, fn_proto)); }, - }; - decl.analysis = .complete; - decl.generation = self.generation; - - if (var_decl.getExternExportToken()) |maybe_export_token| { - if (tree.token_ids[maybe_export_token] == .Keyword_export) { - const export_src = tree.token_locs[maybe_export_token].start; - const name_loc = tree.token_locs[var_decl.name_token]; - const name = tree.tokenSliceLoc(name_loc); - // The scope needs to have the decl in it. - try self.analyzeExport(&block_scope.base, export_src, name, decl); - } + .fn_proto => return mod.astgenAndSemaFn(decl, tree, body, tree.fnProto(fn_proto)), + else => unreachable, } - return type_changed; }, - .Comptime => { - const comptime_decl = @fieldParentPtr(ast.Node.Comptime, "base", ast_node); + .fn_proto_simple => { + var params: [1]ast.Node.Index = undefined; + return mod.astgenAndSemaFn(decl, tree, null, tree.fnProtoSimple(¶ms, decl_node)); + }, + .fn_proto_multi => return mod.astgenAndSemaFn(decl, tree, null, tree.fnProtoMulti(decl_node)), + .fn_proto_one => { + var params: [1]ast.Node.Index = undefined; + return mod.astgenAndSemaFn(decl, tree, null, tree.fnProtoOne(¶ms, decl_node)); + }, + .fn_proto => return mod.astgenAndSemaFn(decl, tree, null, tree.fnProto(decl_node)), + .global_var_decl => return mod.astgenAndSemaVarDecl(decl, tree, tree.globalVarDecl(decl_node)), + .local_var_decl => return mod.astgenAndSemaVarDecl(decl, tree, tree.localVarDecl(decl_node)), + .simple_var_decl => return mod.astgenAndSemaVarDecl(decl, tree, tree.simpleVarDecl(decl_node)), + .aligned_var_decl => return mod.astgenAndSemaVarDecl(decl, tree, tree.alignedVarDecl(decl_node)), + + .@"comptime" => { decl.analysis = .in_progress; // A comptime decl does not store any value so we can just deinit this arena after analysis is done. @@ -1499,11 +1035,548 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { decl.generation = self.generation; return true; }, - .Use => @panic("TODO usingnamespace decl"), + .UsingNamespace => @panic("TODO usingnamespace decl"), else => unreachable, } } +fn astgenAndSemaFn( + mod: *Module, + decl: *Decl, + tree: ast.Tree, + body_node: ast.Node.Index, + fn_proto: ast.full.FnProto, +) !bool { + const tracy = trace(@src()); + defer tracy.end(); + + decl.analysis = .in_progress; + + const token_starts = tree.tokens.items(.start); + + // This arena allocator's memory is discarded at the end of this function. It is used + // to determine the type of the function, and hence the type of the decl, which is needed + // to complete the Decl analysis. + var fn_type_scope_arena = std.heap.ArenaAllocator.init(self.gpa); + defer fn_type_scope_arena.deinit(); + var fn_type_scope: Scope.GenZIR = .{ + .decl = decl, + .arena = &fn_type_scope_arena.allocator, + .parent = &decl.container.base, + }; + defer fn_type_scope.instructions.deinit(self.gpa); + + decl.is_pub = fn_proto.visib_token != null; + + // The AST params array does not contain anytype and ... parameters. + // We must iterate to count how many param types to allocate. + const param_count = blk: { + var count: usize = 0; + var it = fn_proto.iterate(tree); + while (it.next()) |_| { + count += 1; + } + break :blk count; + }; + const param_types = try fn_type_scope.arena.alloc(*zir.Inst, param_count); + const fn_src = token_starts[fn_proto.ast.fn_token]; + const type_type = try astgen.addZIRInstConst(self, &fn_type_scope.base, fn_src, .{ + .ty = Type.initTag(.type), + .val = Value.initTag(.type_type), + }); + const type_type_rl: astgen.ResultLoc = .{ .ty = type_type }; + + { + var param_type_i: usize = 0; + var it = fn_proto.iterate(tree); + while (it.next()) |param| : (param_type_i += 1) { + if (param.anytype_ellipsis3) |token| { + switch (token_tags[token]) { + .keyword_anytype => return self.failTok( + &fn_type_scope.base, + tok_i, + "TODO implement anytype parameter", + .{}, + ), + .ellipsis3 => return self.failTok( + &fn_type_scope.base, + token, + "TODO implement var args", + .{}, + ), + else => unreachable, + } + } + const param_type_node = param.type_expr; + assert(param_type_node != 0); + param_types[param_type_i] = + try astgen.expr(self, &fn_type_scope.base, type_type_rl, param_type_node); + } + assert(param_type_i == param_count); + } + if (fn_proto.lib_name) |lib_name| blk: { + // TODO call std.zig.parseStringLiteral + const lib_name_str = mem.trim(u8, tree.tokenSlice(lib_name), "\""); + log.debug("extern fn symbol expected in lib '{s}'", .{lib_name_str}); + const target = self.comp.getTarget(); + if (target_util.is_libc_lib_name(target, lib_name_str)) { + if (!self.comp.bin_file.options.link_libc) { + return self.failTok( + &fn_type_scope.base, + lib_name, + "dependency on libc must be explicitly specified in the build command", + .{}, + ); + } + break :blk; + } + if (target_util.is_libcpp_lib_name(target, lib_name_str)) { + if (!self.comp.bin_file.options.link_libcpp) { + return self.failTok( + &fn_type_scope.base, + lib_name, + "dependency on libc++ must be explicitly specified in the build command", + .{}, + ); + } + break :blk; + } + if (!target.isWasm() and !self.comp.bin_file.options.pic) { + return self.failTok( + &fn_type_scope.base, + lib_name, + "dependency on dynamic library '{s}' requires enabling Position Independent Code. Fixed by `-l{s}` or `-fPIC`.", + .{ lib_name, lib_name }, + ); + } + self.comp.stage1AddLinkLib(lib_name_str) catch |err| { + return self.failTok( + &fn_type_scope.base, + lib_name, + "unable to add link lib '{s}': {s}", + .{ lib_name, @errorName(err) }, + ); + }; + } + if (fn_proto.ast.align_expr) |align_expr| { + return self.failNode(&fn_type_scope.base, align_expr, "TODO implement function align expression", .{}); + } + if (fn_proto.ast.section_expr) |sect_expr| { + return self.failNode(&fn_type_scope.base, sect_expr, "TODO implement function section expression", .{}); + } + if (fn_proto.ast.callconv_expr) |callconv_expr| { + return self.failNode( + &fn_type_scope.base, + callconv_expr, + "TODO implement function calling convention expression", + .{}, + ); + } + const maybe_bang = tree.firstToken(fn_proto.ast.return_type) - 1; + if (token_tags[maybe_bang] == .bang) { + return self.failTok(&fn_type_scope.base, maybe_bang, "TODO implement inferred error sets", .{}); + } + const return_type_inst = try astgen.expr( + self, + &fn_type_scope.base, + type_type_rl, + fn_proto.ast.return_type, + ); + const fn_type_inst = try astgen.addZIRInst(self, &fn_type_scope.base, fn_src, zir.Inst.FnType, .{ + .return_type = return_type_inst, + .param_types = param_types, + }, .{}); + + if (std.builtin.mode == .Debug and self.comp.verbose_ir) { + zir.dumpZir(self.gpa, "fn_type", decl.name, fn_type_scope.instructions.items) catch {}; + } + + // We need the memory for the Type to go into the arena for the Decl + var decl_arena = std.heap.ArenaAllocator.init(self.gpa); + errdefer decl_arena.deinit(); + const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State); + + var inst_table = Scope.Block.InstTable.init(self.gpa); + defer inst_table.deinit(); + + var branch_quota: u32 = default_eval_branch_quota; + + var block_scope: Scope.Block = .{ + .parent = null, + .inst_table = &inst_table, + .func = null, + .owner_decl = decl, + .src_decl = decl, + .instructions = .{}, + .arena = &decl_arena.allocator, + .inlining = null, + .is_comptime = false, + .branch_quota = &branch_quota, + }; + defer block_scope.instructions.deinit(self.gpa); + + const fn_type = try zir_sema.analyzeBodyValueAsType(self, &block_scope, fn_type_inst, .{ + .instructions = fn_type_scope.instructions.items, + }); + if (body_node == 0) { + // Extern function. + var type_changed = true; + if (decl.typedValueManaged()) |tvm| { + type_changed = !tvm.typed_value.ty.eql(fn_type); + + tvm.deinit(self.gpa); + } + const fn_val = try Value.Tag.extern_fn.create(&decl_arena.allocator, decl); + + decl_arena_state.* = decl_arena.state; + decl.typed_value = .{ + .most_recent = .{ + .typed_value = .{ .ty = fn_type, .val = fn_val }, + .arena = decl_arena_state, + }, + }; + decl.analysis = .complete; + decl.generation = self.generation; + + try self.comp.bin_file.allocateDeclIndexes(decl); + try self.comp.work_queue.writeItem(.{ .codegen_decl = decl }); + + if (type_changed and self.emit_h != null) { + try self.comp.work_queue.writeItem(.{ .emit_h_decl = decl }); + } + + return type_changed; + } + + const new_func = try decl_arena.allocator.create(Fn); + const fn_payload = try decl_arena.allocator.create(Value.Payload.Function); + + const fn_zir: zir.Body = blk: { + // We put the ZIR inside the Decl arena. + var gen_scope: Scope.GenZIR = .{ + .decl = decl, + .arena = &decl_arena.allocator, + .parent = &decl.container.base, + }; + defer gen_scope.instructions.deinit(self.gpa); + + // We need an instruction for each parameter, and they must be first in the body. + try gen_scope.instructions.resize(self.gpa, param_count); + var params_scope = &gen_scope.base; + var i: usize = 0; + var it = fn_proto.iterate(tree); + while (it.next()) |param| : (i += 1) { + const name_token = param.name_token.?; + const src = token_starts[name_token]; + const param_name = try self.identifierTokenString(&gen_scope.base, name_token); + const arg = try decl_arena.allocator.create(zir.Inst.NoOp); + arg.* = .{ + .base = .{ + .tag = .arg, + .src = src, + }, + .positionals = .{}, + .kw_args = .{}, + }; + gen_scope.instructions.items[i] = &arg.base; + const sub_scope = try decl_arena.allocator.create(Scope.LocalVal); + sub_scope.* = .{ + .parent = params_scope, + .gen_zir = &gen_scope, + .name = param_name, + .inst = &arg.base, + }; + params_scope = &sub_scope.base; + } + + try astgen.blockExpr(self, params_scope, body_node); + + if (gen_scope.instructions.items.len == 0 or + !gen_scope.instructions.items[gen_scope.instructions.items.len - 1].tag.isNoReturn()) + { + const src = token_starts[tree.lastToken(body_node)]; + _ = try astgen.addZIRNoOp(self, &gen_scope.base, src, .returnvoid); + } + + if (std.builtin.mode == .Debug and self.comp.verbose_ir) { + zir.dumpZir(self.gpa, "fn_body", decl.name, gen_scope.instructions.items) catch {}; + } + + break :blk .{ + .instructions = try gen_scope.arena.dupe(*zir.Inst, gen_scope.instructions.items), + }; + }; + + const is_inline = fn_type.fnCallingConvention() == .Inline; + const anal_state: Fn.Analysis = if (is_inline) .inline_only else .queued; + + new_func.* = .{ + .state = anal_state, + .zir = fn_zir, + .body = undefined, + .owner_decl = decl, + }; + fn_payload.* = .{ + .base = .{ .tag = .function }, + .data = new_func, + }; + + var prev_type_has_bits = false; + var prev_is_inline = false; + var type_changed = true; + + if (decl.typedValueManaged()) |tvm| { + prev_type_has_bits = tvm.typed_value.ty.hasCodeGenBits(); + type_changed = !tvm.typed_value.ty.eql(fn_type); + if (tvm.typed_value.val.castTag(.function)) |payload| { + const prev_func = payload.data; + prev_is_inline = prev_func.state == .inline_only; + } + + tvm.deinit(self.gpa); + } + + decl_arena_state.* = decl_arena.state; + decl.typed_value = .{ + .most_recent = .{ + .typed_value = .{ + .ty = fn_type, + .val = Value.initPayload(&fn_payload.base), + }, + .arena = decl_arena_state, + }, + }; + decl.analysis = .complete; + decl.generation = self.generation; + + if (!is_inline and fn_type.hasCodeGenBits()) { + // We don't fully codegen the decl until later, but we do need to reserve a global + // offset table index for it. This allows us to codegen decls out of dependency order, + // increasing how many computations can be done in parallel. + try self.comp.bin_file.allocateDeclIndexes(decl); + try self.comp.work_queue.writeItem(.{ .codegen_decl = decl }); + if (type_changed and self.emit_h != null) { + try self.comp.work_queue.writeItem(.{ .emit_h_decl = decl }); + } + } else if (!prev_is_inline and prev_type_has_bits) { + self.comp.bin_file.freeDecl(decl); + } + + if (fn_proto.extern_export_token) |maybe_export_token| { + if (token_tags[maybe_export_token] == .Keyword_export) { + if (is_inline) { + return self.failTok( + &block_scope.base, + maybe_export_token, + "export of inline function", + .{}, + ); + } + const export_src = token_starts[maybe_export_token]; + const name = tree.tokenSlice(fn_proto.name_token.?); // TODO identifierTokenString + // The scope needs to have the decl in it. + try self.analyzeExport(&block_scope.base, export_src, name, decl); + } + } + return type_changed or is_inline != prev_is_inline; +} + +fn astgenAndSemaVarDecl( + mod: *Module, + decl: *Decl, + tree: ast.Tree, + var_decl: ast.full.VarDecl, +) !bool { + const tracy = trace(@src()); + defer tracy.end(); + + decl.analysis = .in_progress; + + const token_starts = tree.tokens.items(.start); + + // We need the memory for the Type to go into the arena for the Decl + var decl_arena = std.heap.ArenaAllocator.init(self.gpa); + errdefer decl_arena.deinit(); + const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State); + + var decl_inst_table = Scope.Block.InstTable.init(self.gpa); + defer decl_inst_table.deinit(); + + var branch_quota: u32 = default_eval_branch_quota; + + var block_scope: Scope.Block = .{ + .parent = null, + .inst_table = &decl_inst_table, + .func = null, + .owner_decl = decl, + .src_decl = decl, + .instructions = .{}, + .arena = &decl_arena.allocator, + .inlining = null, + .is_comptime = true, + .branch_quota = &branch_quota, + }; + defer block_scope.instructions.deinit(self.gpa); + + decl.is_pub = var_decl.getVisibToken() != null; + const is_extern = blk: { + const maybe_extern_token = var_decl.getExternExportToken() orelse + break :blk false; + if (tree.token_ids[maybe_extern_token] != .Keyword_extern) break :blk false; + if (var_decl.getInitNode()) |some| { + return self.failNode(&block_scope.base, some, "extern variables have no initializers", .{}); + } + break :blk true; + }; + if (var_decl.getLibName()) |lib_name| { + assert(is_extern); + return self.failNode(&block_scope.base, lib_name, "TODO implement function library name", .{}); + } + const is_mutable = tree.token_ids[var_decl.mut_token] == .Keyword_var; + const is_threadlocal = if (var_decl.getThreadLocalToken()) |some| blk: { + if (!is_mutable) { + return self.failTok(&block_scope.base, some, "threadlocal variable cannot be constant", .{}); + } + break :blk true; + } else false; + assert(var_decl.getComptimeToken() == null); + if (var_decl.getAlignNode()) |align_expr| { + return self.failNode(&block_scope.base, align_expr, "TODO implement function align expression", .{}); + } + if (var_decl.getSectionNode()) |sect_expr| { + return self.failNode(&block_scope.base, sect_expr, "TODO implement function section expression", .{}); + } + + const var_info: struct { ty: Type, val: ?Value } = if (var_decl.getInitNode()) |init_node| vi: { + var gen_scope_arena = std.heap.ArenaAllocator.init(self.gpa); + defer gen_scope_arena.deinit(); + var gen_scope: Scope.GenZIR = .{ + .decl = decl, + .arena = &gen_scope_arena.allocator, + .parent = &decl.container.base, + }; + defer gen_scope.instructions.deinit(self.gpa); + + const init_result_loc: astgen.ResultLoc = if (var_decl.getTypeNode()) |type_node| rl: { + const src = token_starts[type_node.firstToken()]; + const type_type = try astgen.addZIRInstConst(self, &gen_scope.base, src, .{ + .ty = Type.initTag(.type), + .val = Value.initTag(.type_type), + }); + const var_type = try astgen.expr(self, &gen_scope.base, .{ .ty = type_type }, type_node); + break :rl .{ .ty = var_type }; + } else .none; + + const init_inst = try astgen.comptimeExpr(self, &gen_scope.base, init_result_loc, init_node); + if (std.builtin.mode == .Debug and self.comp.verbose_ir) { + zir.dumpZir(self.gpa, "var_init", decl.name, gen_scope.instructions.items) catch {}; + } + + var var_inst_table = Scope.Block.InstTable.init(self.gpa); + defer var_inst_table.deinit(); + + var branch_quota_vi: u32 = default_eval_branch_quota; + var inner_block: Scope.Block = .{ + .parent = null, + .inst_table = &var_inst_table, + .func = null, + .owner_decl = decl, + .src_decl = decl, + .instructions = .{}, + .arena = &gen_scope_arena.allocator, + .inlining = null, + .is_comptime = true, + .branch_quota = &branch_quota_vi, + }; + defer inner_block.instructions.deinit(self.gpa); + try zir_sema.analyzeBody(self, &inner_block, .{ + .instructions = gen_scope.instructions.items, + }); + + // The result location guarantees the type coercion. + const analyzed_init_inst = var_inst_table.get(init_inst).?; + // The is_comptime in the Scope.Block guarantees the result is comptime-known. + const val = analyzed_init_inst.value().?; + + const ty = try analyzed_init_inst.ty.copy(block_scope.arena); + break :vi .{ + .ty = ty, + .val = try val.copy(block_scope.arena), + }; + } else if (!is_extern) { + return self.failTok(&block_scope.base, var_decl.firstToken(), "variables must be initialized", .{}); + } else if (var_decl.getTypeNode()) |type_node| vi: { + // Temporary arena for the zir instructions. + var type_scope_arena = std.heap.ArenaAllocator.init(self.gpa); + defer type_scope_arena.deinit(); + var type_scope: Scope.GenZIR = .{ + .decl = decl, + .arena = &type_scope_arena.allocator, + .parent = &decl.container.base, + }; + defer type_scope.instructions.deinit(self.gpa); + + const var_type = try astgen.typeExpr(self, &type_scope.base, type_node); + if (std.builtin.mode == .Debug and self.comp.verbose_ir) { + zir.dumpZir(self.gpa, "var_type", decl.name, type_scope.instructions.items) catch {}; + } + + const ty = try zir_sema.analyzeBodyValueAsType(self, &block_scope, var_type, .{ + .instructions = type_scope.instructions.items, + }); + break :vi .{ + .ty = ty, + .val = null, + }; + } else { + return self.failTok(&block_scope.base, var_decl.firstToken(), "unable to infer variable type", .{}); + }; + + if (is_mutable and !var_info.ty.isValidVarType(is_extern)) { + return self.failTok(&block_scope.base, var_decl.firstToken(), "variable of type '{}' must be const", .{var_info.ty}); + } + + var type_changed = true; + if (decl.typedValueManaged()) |tvm| { + type_changed = !tvm.typed_value.ty.eql(var_info.ty); + + tvm.deinit(self.gpa); + } + + const new_variable = try decl_arena.allocator.create(Var); + new_variable.* = .{ + .owner_decl = decl, + .init = var_info.val orelse undefined, + .is_extern = is_extern, + .is_mutable = is_mutable, + .is_threadlocal = is_threadlocal, + }; + const var_val = try Value.Tag.variable.create(&decl_arena.allocator, new_variable); + + decl_arena_state.* = decl_arena.state; + decl.typed_value = .{ + .most_recent = .{ + .typed_value = .{ + .ty = var_info.ty, + .val = var_val, + }, + .arena = decl_arena_state, + }, + }; + decl.analysis = .complete; + decl.generation = self.generation; + + if (var_decl.getExternExportToken()) |maybe_export_token| { + if (tree.token_ids[maybe_export_token] == .Keyword_export) { + const export_src = token_starts[maybe_export_token]; + const name = tree.tokenSlice(var_decl.name_token); // TODO identifierTokenString + // The scope needs to have the decl in it. + try self.analyzeExport(&block_scope.base, export_src, name, decl); + } + } + return type_changed; +} + fn declareDeclDependency(self: *Module, depender: *Decl, dependee: *Decl) !void { try depender.dependencies.ensureCapacity(self.gpa, depender.dependencies.items().len + 1); try dependee.dependants.ensureCapacity(self.gpa, dependee.dependants.items().len + 1); @@ -1512,7 +1585,7 @@ fn declareDeclDependency(self: *Module, depender: *Decl, dependee: *Decl) !void dependee.dependants.putAssumeCapacity(depender, {}); } -pub fn getAstTree(self: *Module, root_scope: *Scope.File) !*ast.Tree { +pub fn getAstTree(self: *Module, root_scope: *Scope.File) !*const ast.Tree { const tracy = trace(@src()); defer tracy.end(); @@ -1523,8 +1596,10 @@ pub fn getAstTree(self: *Module, root_scope: *Scope.File) !*ast.Tree { const source = try root_scope.getSource(self); var keep_tree = false; - const tree = try std.zig.parse(self.gpa, source); - defer if (!keep_tree) tree.deinit(); + root_scope.tree = try std.zig.parse(self.gpa, source); + defer if (!keep_tree) root_scope.tree.deinit(self.gpa); + + const tree = &root_scope.tree; if (tree.errors.len != 0) { const parse_err = tree.errors[0]; @@ -1532,12 +1607,12 @@ pub fn getAstTree(self: *Module, root_scope: *Scope.File) !*ast.Tree { var msg = std.ArrayList(u8).init(self.gpa); defer msg.deinit(); - try parse_err.render(tree.token_ids, msg.writer()); + try tree.renderError(parse_err, msg.writer()); const err_msg = try self.gpa.create(ErrorMsg); err_msg.* = .{ .src_loc = .{ .file_scope = root_scope, - .byte_offset = tree.token_locs[parse_err.loc()].start, + .byte_offset = tree.tokens.items(.start)[parse_err.loc()], }, .msg = msg.toOwnedSlice(), }; @@ -1548,7 +1623,6 @@ pub fn getAstTree(self: *Module, root_scope: *Scope.File) !*ast.Tree { } root_scope.status = .loaded_success; - root_scope.contents = .{ .tree = tree }; keep_tree = true; return tree; @@ -1556,144 +1630,336 @@ pub fn getAstTree(self: *Module, root_scope: *Scope.File) !*ast.Tree { .unloaded_parse_failure => return error.AnalysisFail, - .loaded_success => return root_scope.contents.tree, + .loaded_success => return &root_scope.tree, } } -pub fn analyzeContainer(self: *Module, container_scope: *Scope.Container) !void { +pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void { const tracy = trace(@src()); defer tracy.end(); // We may be analyzing it for the first time, or this may be // an incremental update. This code handles both cases. - const tree = try self.getAstTree(container_scope.file_scope); - const decls = tree.root_node.decls(); + const tree = try mod.getAstTree(container_scope.file_scope); + const node_tags = tree.nodes.items(.tag); + const node_datas = tree.nodes.items(.data); + const decls = tree.rootDecls(); - try self.comp.work_queue.ensureUnusedCapacity(decls.len); - try container_scope.decls.ensureCapacity(self.gpa, decls.len); + try mod.comp.work_queue.ensureUnusedCapacity(decls.len); + try container_scope.decls.ensureCapacity(mod.gpa, decls.len); // Keep track of the decls that we expect to see in this file so that // we know which ones have been deleted. - var deleted_decls = std.AutoArrayHashMap(*Decl, void).init(self.gpa); + var deleted_decls = std.AutoArrayHashMap(*Decl, void).init(mod.gpa); defer deleted_decls.deinit(); try deleted_decls.ensureCapacity(container_scope.decls.items().len); for (container_scope.decls.items()) |entry| { deleted_decls.putAssumeCapacityNoClobber(entry.key, {}); } - for (decls) |src_decl, decl_i| { - if (src_decl.cast(ast.Node.FnProto)) |fn_proto| { - // We will create a Decl for it regardless of analysis status. - const name_tok = fn_proto.getNameToken() orelse { - @panic("TODO missing function name"); - }; - - const name_loc = tree.token_locs[name_tok]; - const name = tree.tokenSliceLoc(name_loc); - const name_hash = container_scope.fullyQualifiedNameHash(name); - const contents_hash = std.zig.hashSrc(tree.getNodeSource(src_decl)); - if (self.decl_table.get(name_hash)) |decl| { - // Update the AST Node index of the decl, even if its contents are unchanged, it may - // have been re-ordered. - decl.src_index = decl_i; - if (deleted_decls.swapRemove(decl) == null) { - decl.analysis = .sema_failure; - const msg = try ErrorMsg.create(self.gpa, .{ - .file_scope = container_scope.file_scope, - .byte_offset = tree.token_locs[name_tok].start, - }, "redefinition of '{s}'", .{decl.name}); - errdefer msg.destroy(self.gpa); - try self.failed_decls.putNoClobber(self.gpa, decl, msg); - } else { - if (!srcHashEql(decl.contents_hash, contents_hash)) { - try self.markOutdatedDecl(decl); - decl.contents_hash = contents_hash; - } else switch (self.comp.bin_file.tag) { - .coff => { - // TODO Implement for COFF - }, - .elf => if (decl.fn_link.elf.len != 0) { - // TODO Look into detecting when this would be unnecessary by storing enough state - // in `Decl` to notice that the line number did not change. - self.comp.work_queue.writeItemAssumeCapacity(.{ .update_line_number = decl }); - }, - .macho => if (decl.fn_link.macho.len != 0) { - // TODO Look into detecting when this would be unnecessary by storing enough state - // in `Decl` to notice that the line number did not change. - self.comp.work_queue.writeItemAssumeCapacity(.{ .update_line_number = decl }); - }, - .c, .wasm => {}, - } - } - } else { - const new_decl = try self.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash); - container_scope.decls.putAssumeCapacity(new_decl, {}); - if (fn_proto.getExternExportInlineToken()) |maybe_export_token| { - if (tree.token_ids[maybe_export_token] == .Keyword_export) { - self.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl }); - } - } + for (decls) |decl_node, decl_i| switch (node_tags[decl_node]) { + .fn_decl => { + const fn_proto = node_datas[decl_node].lhs; + const body = node_datas[decl_node].rhs; + switch (node_tags[fn_proto]) { + .fn_proto_simple => { + var params: [1]ast.Node.Index = undefined; + try mod.semaContainerFn( + container_scope, + &deleted_decls, + decl_node, + decl_i, + tree.*, + body, + tree.fnProtoSimple(¶ms, fn_proto), + ); + }, + .fn_proto_multi => try mod.semaContainerFn( + container_scope, + &deleted_decls, + decl_node, + decl_i, + tree.*, + body, + tree.fnProtoMulti(fn_proto), + ), + .fn_proto_one => { + var params: [1]ast.Node.Index = undefined; + try mod.semaContainerFn( + container_scope, + &deleted_decls, + decl_node, + decl_i, + tree.*, + body, + tree.fnProtoOne(¶ms, fn_proto), + ); + }, + .fn_proto => try mod.semaContainerFn( + container_scope, + &deleted_decls, + decl_node, + decl_i, + tree.*, + body, + tree.fnProto(fn_proto), + ), + else => unreachable, } - } else if (src_decl.castTag(.VarDecl)) |var_decl| { - const name_loc = tree.token_locs[var_decl.name_token]; - const name = tree.tokenSliceLoc(name_loc); - const name_hash = container_scope.fullyQualifiedNameHash(name); - const contents_hash = std.zig.hashSrc(tree.getNodeSource(src_decl)); - if (self.decl_table.get(name_hash)) |decl| { - // Update the AST Node index of the decl, even if its contents are unchanged, it may - // have been re-ordered. - decl.src_index = decl_i; - if (deleted_decls.swapRemove(decl) == null) { - decl.analysis = .sema_failure; - const err_msg = try ErrorMsg.create(self.gpa, .{ - .file_scope = container_scope.file_scope, - .byte_offset = name_loc.start, - }, "redefinition of '{s}'", .{decl.name}); - errdefer err_msg.destroy(self.gpa); - try self.failed_decls.putNoClobber(self.gpa, decl, err_msg); - } else if (!srcHashEql(decl.contents_hash, contents_hash)) { - try self.markOutdatedDecl(decl); - decl.contents_hash = contents_hash; - } - } else { - const new_decl = try self.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash); - container_scope.decls.putAssumeCapacity(new_decl, {}); - if (var_decl.getExternExportToken()) |maybe_export_token| { - if (tree.token_ids[maybe_export_token] == .Keyword_export) { - self.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl }); - } - } - } - } else if (src_decl.castTag(.Comptime)) |comptime_node| { - const name_index = self.getNextAnonNameIndex(); - const name = try std.fmt.allocPrint(self.gpa, "__comptime_{d}", .{name_index}); - defer self.gpa.free(name); + }, + .fn_proto_simple => { + var params: [1]ast.Node.Index = undefined; + try mod.semaContainerFn( + container_scope, + &deleted_decls, + decl_node, + decl_i, + tree.*, + null, + tree.fnProtoSimple(¶ms, decl_node), + ); + }, + .fn_proto_multi => try mod.semaContainerFn( + container_scope, + &deleted_decls, + decl_node, + decl_i, + tree.*, + null, + tree.fnProtoMulti(decl_node), + ), + .fn_proto_one => { + var params: [1]ast.Node.Index = undefined; + try mod.semaContainerFn( + container_scope, + &deleted_decls, + decl_node, + decl_i, + tree.*, + null, + tree.fnProtoOne(¶ms, decl_node), + ); + }, + .fn_proto => try mod.semaContainerFn( + container_scope, + &deleted_decls, + decl_node, + decl_i, + tree.*, + null, + tree.fnProto(decl_node), + ), + + .global_var_decl => try mod.semaContainerVar( + container_scope, + &deleted_decls, + decl_node, + decl_i, + tree.*, + tree.globalVarDecl(decl_node), + ), + .local_var_decl => try mod.semaContainerVar( + container_scope, + &deleted_decls, + decl_node, + decl_i, + tree.*, + tree.localVarDecl(decl_node), + ), + .simple_var_decl => try mod.semaContainerVar( + container_scope, + &deleted_decls, + decl_node, + decl_i, + tree.*, + tree.simpleVarDecl(decl_node), + ), + .aligned_var_decl => try mod.semaContainerVar( + container_scope, + &deleted_decls, + decl_node, + decl_i, + tree.*, + tree.alignedVarDecl(decl_node), + ), + + .@"comptime" => { + const name_index = mod.getNextAnonNameIndex(); + const name = try std.fmt.allocPrint(mod.gpa, "__comptime_{d}", .{name_index}); + defer mod.gpa.free(name); const name_hash = container_scope.fullyQualifiedNameHash(name); - const contents_hash = std.zig.hashSrc(tree.getNodeSource(src_decl)); + const contents_hash = std.zig.hashSrc(tree.getNodeSource(decl_node)); - const new_decl = try self.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash); + const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash); container_scope.decls.putAssumeCapacity(new_decl, {}); - self.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl }); - } else if (src_decl.castTag(.ContainerField)) |container_field| { - log.err("TODO: analyze container field", .{}); - } else if (src_decl.castTag(.TestDecl)) |test_decl| { + mod.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl }); + }, + + .container_field_init => try mod.semaContainerField( + container_scope, + &deleted_decls, + decl_node, + decl_i, + tree.*, + tree.containerFieldInit(decl), + ), + .container_field_align => try mod.semaContainerField( + container_scope, + &deleted_decls, + decl_node, + decl_i, + tree.*, + tree.containerFieldAlign(decl), + ), + .container_field => try mod.semaContainerField( + container_scope, + &deleted_decls, + decl_node, + decl_i, + tree.*, + tree.containerField(decl), + ), + + .test_decl => { log.err("TODO: analyze test decl", .{}); - } else if (src_decl.castTag(.Use)) |use_decl| { + }, + .@"usingnamespace" => { log.err("TODO: analyze usingnamespace decl", .{}); - } else { - unreachable; - } - } + }, + else => unreachable, + }; // Handle explicitly deleted decls from the source code. Not to be confused // with when we delete decls because they are no longer referenced. for (deleted_decls.items()) |entry| { log.debug("noticed '{s}' deleted from source\n", .{entry.key.name}); - try self.deleteDecl(entry.key); + try mod.deleteDecl(entry.key); } } +fn semaContainerFn( + mod: *Module, + container_scope: *Scope.Container, + deleted_decls: *std.AutoArrayHashMap(*Decl, void), + decl_node: ast.Node.Index, + decl_i: usize, + tree: ast.Tree, + body_node: ast.Node.Index, + fn_proto: ast.full.FnProto, +) !void { + const tracy = trace(@src()); + defer tracy.end(); + + const token_starts = tree.tokens.items(.start); + const token_tags = tree.tokens.items(.tag); + + // We will create a Decl for it regardless of analysis status. + const name_tok = fn_proto.name_token orelse { + @panic("TODO missing function name"); + }; + const name = tree.tokenSlice(name_tok); // TODO use identifierTokenString + const name_hash = container_scope.fullyQualifiedNameHash(name); + const contents_hash = std.zig.hashSrc(tree.getNodeSource(decl_node)); + if (mod.decl_table.get(name_hash)) |decl| { + // Update the AST Node index of the decl, even if its contents are unchanged, it may + // have been re-ordered. + decl.src_index = decl_i; + if (deleted_decls.swapRemove(decl) == null) { + decl.analysis = .sema_failure; + const msg = try ErrorMsg.create(mod.gpa, .{ + .file_scope = container_scope.file_scope, + .byte_offset = token_starts[name_tok], + }, "redefinition of '{s}'", .{decl.name}); + errdefer msg.destroy(mod.gpa); + try mod.failed_decls.putNoClobber(mod.gpa, decl, msg); + } else { + if (!srcHashEql(decl.contents_hash, contents_hash)) { + try mod.markOutdatedDecl(decl); + decl.contents_hash = contents_hash; + } else switch (mod.comp.bin_file.tag) { + .coff => { + // TODO Implement for COFF + }, + .elf => if (decl.fn_link.elf.len != 0) { + // TODO Look into detecting when this would be unnecessary by storing enough state + // in `Decl` to notice that the line number did not change. + mod.comp.work_queue.writeItemAssumeCapacity(.{ .update_line_number = decl }); + }, + .macho => if (decl.fn_link.macho.len != 0) { + // TODO Look into detecting when this would be unnecessary by storing enough state + // in `Decl` to notice that the line number did not change. + mod.comp.work_queue.writeItemAssumeCapacity(.{ .update_line_number = decl }); + }, + .c, .wasm => {}, + } + } + } else { + const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash); + container_scope.decls.putAssumeCapacity(new_decl, {}); + if (fn_proto.getExternExportInlineToken()) |maybe_export_token| { + if (tree.token_ids[maybe_export_token] == .Keyword_export) { + mod.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl }); + } + } + } +} + +fn semaContainerVar( + mod: *Module, + container_scope: *Scope.Container, + deleted_decls: *std.AutoArrayHashMap(*Decl, void), + decl_node: ast.Node.Index, + decl_i: usize, + tree: ast.Tree, + var_decl: ast.full.VarDecl, +) !void { + const tracy = trace(@src()); + defer tracy.end(); + + const token_starts = tree.tokens.items(.start); + + const name_src = token_starts[var_decl.name_token]; + const name = tree.tokenSlice(var_decl.name_token); // TODO identifierTokenString + const name_hash = container_scope.fullyQualifiedNameHash(name); + const contents_hash = std.zig.hashSrc(tree.getNodeSource(decl_node)); + if (mod.decl_table.get(name_hash)) |decl| { + // Update the AST Node index of the decl, even if its contents are unchanged, it may + // have been re-ordered. + decl.src_index = decl_i; + if (deleted_decls.swapRemove(decl) == null) { + decl.analysis = .sema_failure; + const err_msg = try ErrorMsg.create(mod.gpa, .{ + .file_scope = container_scope.file_scope, + .byte_offset = name_src, + }, "redefinition of '{s}'", .{decl.name}); + errdefer err_msg.destroy(mod.gpa); + try mod.failed_decls.putNoClobber(mod.gpa, decl, err_msg); + } else if (!srcHashEql(decl.contents_hash, contents_hash)) { + try mod.markOutdatedDecl(decl); + decl.contents_hash = contents_hash; + } + } else { + const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash); + container_scope.decls.putAssumeCapacity(new_decl, {}); + if (var_decl.getExternExportToken()) |maybe_export_token| { + if (tree.token_ids[maybe_export_token] == .Keyword_export) { + mod.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl }); + } + } + } +} + +fn semaContainerField() void { + const tracy = trace(@src()); + defer tracy.end(); + + log.err("TODO: analyze container field", .{}); +} + pub fn deleteDecl(self: *Module, decl: *Decl) !void { + const tracy = trace(@src()); + defer tracy.end(); + try self.deletion_set.ensureCapacity(self.gpa, self.deletion_set.items.len + decl.dependencies.items().len); // Remove from the namespace it resides in. In the case of an anonymous Decl it will @@ -2338,15 +2604,16 @@ pub fn createContainerDecl( fn getAnonTypeName(self: *Module, scope: *Scope, base_token: std.zig.ast.TokenIndex) ![]u8 { // TODO add namespaces, generic function signatrues const tree = scope.tree(); - const base_name = switch (tree.token_ids[base_token]) { - .Keyword_struct => "struct", - .Keyword_enum => "enum", - .Keyword_union => "union", - .Keyword_opaque => "opaque", + const token_tags = tree.tokens.items(.tag); + const base_name = switch (token_tags[base_token]) { + .keyword_struct => "struct", + .keyword_enum => "enum", + .keyword_union => "union", + .keyword_opaque => "opaque", else => unreachable, }; - const loc = tree.tokenLocationLoc(0, tree.token_locs[base_token]); - return std.fmt.allocPrint(self.gpa, "{}:{}:{}", .{ base_name, loc.line, loc.column }); + const loc = tree.tokenLocation(0, base_token); + return std.fmt.allocPrint(self.gpa, "{s}:{d}:{d}", .{ base_name, loc.line, loc.column }); } fn getNextAnonNameIndex(self: *Module) usize { @@ -3092,7 +3359,7 @@ pub fn failTok( comptime format: []const u8, args: anytype, ) InnerError { - const src = scope.tree().token_locs[token_index].start; + const src = scope.tree().tokens.items(.start)[token_index]; return self.fail(scope, src, format, args); } @@ -3103,7 +3370,7 @@ pub fn failNode( comptime format: []const u8, args: anytype, ) InnerError { - const src = scope.tree().token_locs[ast_node.firstToken()].start; + const src = scope.tree().tokens.items(.start)[ast_node.firstToken()]; return self.fail(scope, src, format, args); } @@ -3537,6 +3804,7 @@ pub fn validateVarType(mod: *Module, scope: *Scope, src: usize, ty: Type) !void /// Identifier token -> String (allocated in scope.arena()) pub fn identifierTokenString(mod: *Module, scope: *Scope, token: ast.TokenIndex) InnerError![]const u8 { const tree = scope.tree(); + const token_starts = tree.tokens.items(.start); const ident_name = tree.tokenSlice(token); if (mem.startsWith(u8, ident_name, "@")) { @@ -3545,7 +3813,7 @@ pub fn identifierTokenString(mod: *Module, scope: *Scope, token: ast.TokenIndex) return std.zig.parseStringLiteral(scope.arena(), raw_string, &bad_index) catch |err| switch (err) { error.InvalidCharacter => { const bad_byte = raw_string[bad_index]; - const src = tree.token_locs[token].start; + const src = token_starts[token]; return mod.fail(scope, src + 1 + bad_index, "invalid string literal character: '{c}'\n", .{bad_byte}); }, else => |e| return e, diff --git a/src/main.zig b/src/main.zig index 26e222016a..2ae87fa56c 100644 --- a/src/main.zig +++ b/src/main.zig @@ -2153,7 +2153,7 @@ fn cmdTranslateC(comp: *Compilation, arena: *Allocator, enable_cache: bool) !voi const c_headers_dir_path = try comp.zig_lib_directory.join(arena, &[_][]const u8{"include"}); const c_headers_dir_path_z = try arena.dupeZ(u8, c_headers_dir_path); var clang_errors: []translate_c.ClangErrMsg = &[0]translate_c.ClangErrMsg{}; - const tree = translate_c.translate( + var tree = translate_c.translate( comp.gpa, new_argv.ptr, new_argv.ptr + new_argv.len, @@ -2174,7 +2174,7 @@ fn cmdTranslateC(comp: *Compilation, arena: *Allocator, enable_cache: bool) !voi process.exit(1); }, }; - defer tree.deinit(); + defer tree.deinit(comp.gpa); if (out_dep_path) |dep_file_path| { const dep_basename = std.fs.path.basename(dep_file_path); @@ -2188,16 +2188,21 @@ fn cmdTranslateC(comp: *Compilation, arena: *Allocator, enable_cache: bool) !voi const digest = man.final(); const o_sub_path = try fs.path.join(arena, &[_][]const u8{ "o", &digest }); + var o_dir = try comp.local_cache_directory.handle.makeOpenPath(o_sub_path, .{}); defer o_dir.close(); + var zig_file = try o_dir.createFile(translated_zig_basename, .{}); defer zig_file.close(); - var bw = io.bufferedWriter(zig_file.writer()); - _ = try std.zig.render(comp.gpa, bw.writer(), tree); - try bw.flush(); + const formatted = try tree.render(comp.gpa); + defer comp.gpa.free(formatted); - man.writeManifest() catch |err| warn("failed to write cache manifest: {s}", .{@errorName(err)}); + try zig_file.writeAll(formatted); + + man.writeManifest() catch |err| warn("failed to write cache manifest: {s}", .{ + @errorName(err), + }); break :digest digest; }; @@ -2684,10 +2689,10 @@ pub fn cmdFmt(gpa: *Allocator, args: []const []const u8) !void { const source_code = try stdin.readAllAlloc(gpa, max_src_size); defer gpa.free(source_code); - const tree = std.zig.parse(gpa, source_code) catch |err| { + var tree = std.zig.parse(gpa, source_code) catch |err| { fatal("error parsing stdin: {s}", .{err}); }; - defer tree.deinit(); + defer tree.deinit(gpa); for (tree.errors) |parse_error| { try printErrMsgToFile(gpa, parse_error, tree, "", stderr_file, color); @@ -2695,16 +2700,15 @@ pub fn cmdFmt(gpa: *Allocator, args: []const []const u8) !void { if (tree.errors.len != 0) { process.exit(1); } + const formatted = try tree.render(gpa); + defer gpa.free(formatted); + if (check_flag) { - const anything_changed = try std.zig.render(gpa, io.null_writer, tree); - const code = if (anything_changed) @as(u8, 1) else @as(u8, 0); + const code: u8 = @boolToInt(mem.eql(u8, formatted, source_code)); process.exit(code); } - var bw = io.bufferedWriter(io.getStdOut().writer()); - _ = try std.zig.render(gpa, bw.writer(), tree); - try bw.flush(); - return; + return io.getStdOut().writeAll(formatted); } if (input_files.items.len == 0) { @@ -2841,8 +2845,8 @@ fn fmtPathFile( // Add to set after no longer possible to get error.IsDir. if (try fmt.seen.fetchPut(stat.inode, {})) |_| return; - const tree = try std.zig.parse(fmt.gpa, source_code); - defer tree.deinit(); + var tree = try std.zig.parse(fmt.gpa, source_code); + defer tree.deinit(fmt.gpa); for (tree.errors) |parse_error| { try printErrMsgToFile(fmt.gpa, parse_error, tree, file_path, std.io.getStdErr(), fmt.color); @@ -2852,22 +2856,20 @@ fn fmtPathFile( return; } - if (check_mode) { - const anything_changed = try std.zig.render(fmt.gpa, io.null_writer, tree); - if (anything_changed) { - const stdout = io.getStdOut().writer(); - try stdout.print("{s}\n", .{file_path}); - fmt.any_error = true; - } - } else { - // As a heuristic, we make enough capacity for the same as the input source. - try fmt.out_buffer.ensureCapacity(source_code.len); - fmt.out_buffer.items.len = 0; - const writer = fmt.out_buffer.writer(); - const anything_changed = try std.zig.render(fmt.gpa, writer, tree); - if (!anything_changed) - return; // Good thing we didn't waste any file system access on this. + // As a heuristic, we make enough capacity for the same as the input source. + fmt.out_buffer.shrinkRetainingCapacity(0); + try fmt.out_buffer.ensureCapacity(source_code.len); + try tree.renderToArrayList(&fmt.out_buffer); + const anything_changed = mem.eql(u8, fmt.out_buffer.items, source_code); + if (!anything_changed) + return; + + if (check_mode) { + const stdout = io.getStdOut().writer(); + try stdout.print("{s}\n", .{file_path}); + fmt.any_error = true; + } else { var af = try dir.atomicFile(sub_path, .{ .mode = stat.mode }); defer af.deinit(); @@ -2881,7 +2883,7 @@ fn fmtPathFile( fn printErrMsgToFile( gpa: *mem.Allocator, parse_error: ast.Error, - tree: *ast.Tree, + tree: ast.Tree, path: []const u8, file: fs.File, color: Color, @@ -2892,18 +2894,16 @@ fn printErrMsgToFile( .off => false, }; const lok_token = parse_error.loc(); - const span_first = lok_token; - const span_last = lok_token; - const first_token = tree.token_locs[span_first]; - const last_token = tree.token_locs[span_last]; - const start_loc = tree.tokenLocationLoc(0, first_token); - const end_loc = tree.tokenLocationLoc(first_token.end, last_token); + const token_starts = tree.tokens.items(.start); + const token_tags = tree.tokens.items(.tag); + const first_token_start = token_starts[lok_token]; + const start_loc = tree.tokenLocation(0, lok_token); var text_buf = std.ArrayList(u8).init(gpa); defer text_buf.deinit(); const writer = text_buf.writer(); - try parse_error.render(tree.token_ids, writer); + try tree.renderError(parse_error, writer); const text = text_buf.items; const stream = file.writer(); @@ -2920,8 +2920,12 @@ fn printErrMsgToFile( } try stream.writeByte('\n'); try stream.writeByteNTimes(' ', start_loc.column); - try stream.writeByteNTimes('~', last_token.end - first_token.start); - try stream.writeByte('\n'); + if (token_tags[lok_token].lexeme()) |lexeme| { + try stream.writeByteNTimes('~', lexeme.len); + try stream.writeByte('\n'); + } else { + try stream.writeAll("^\n"); + } } pub const info_zen = diff --git a/src/translate_c.zig b/src/translate_c.zig index bca9ff3a20..c37355298a 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -375,7 +375,7 @@ pub fn translate( args_end: [*]?[*]const u8, errors: *[]ClangErrMsg, resources_path: [*:0]const u8, -) !*ast.Tree { +) !ast.Tree { const ast_unit = clang.LoadFromCommandLine( args_begin, args_end, @@ -396,6 +396,14 @@ pub fn translate( var arena = std.heap.ArenaAllocator.init(gpa); errdefer arena.deinit(); + if (true) { + var x = false; + if (x) { + return error.OutOfMemory; + } + @panic("TODO update translate-c"); + } + var context = Context{ .gpa = gpa, .arena = &arena.allocator, diff --git a/src/zir.zig b/src/zir.zig index 9e5830e79a..5b683c7fbf 100644 --- a/src/zir.zig +++ b/src/zir.zig @@ -357,6 +357,7 @@ pub const Inst = struct { .ret_type, .unreach_nocheck, .@"unreachable", + .arg, => NoOp, .alloc, @@ -449,7 +450,6 @@ pub const Inst = struct { .block_comptime_flat, => Block, - .arg => Arg, .array_type_sentinel => ArrayTypeSentinel, .@"break" => Break, .breakvoid => BreakVoid, @@ -685,16 +685,6 @@ pub const Inst = struct { kw_args: struct {}, }; - pub const Arg = struct { - pub const base_tag = Tag.arg; - base: Inst, - - positionals: struct { - name: []const u8, - }, - kw_args: struct {}, - }; - pub const Block = struct { pub const base_tag = Tag.block; base: Inst, @@ -1608,6 +1598,7 @@ const DumpTzir = struct { .unreach, .breakpoint, .dbg_stmt, + .arg, => {}, .ref, @@ -1652,8 +1643,6 @@ const DumpTzir = struct { try dtz.findConst(bin_op.rhs); }, - .arg => {}, - .br => { const br = inst.castTag(.br).?; try dtz.findConst(&br.block.base); From a9db40e8704bd4f87b0770e2d72ba05b94afad1e Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Thu, 11 Feb 2021 23:53:04 -0700 Subject: [PATCH 056/173] zig fmt: add failing test case for inline/callconv conversion --- lib/std/zig/parser_test.zig | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index fd587c10ed..d20f7aa5bf 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -5,15 +5,17 @@ // and substantial portions of the software. // TODO Remove this after zig 0.8.0 is released. -test "zig fmt: rewrite inline functions as callconv(.Inline)" { - try testTransform( - \\inline fn foo() void {} - \\ - , - \\fn foo() callconv(.Inline) void {} - \\ - ); -} +// TODO need to add the logic to make this test pass. it was added in master +// but was not added in the ast-memory-layout branch yet. +//test "zig fmt: rewrite inline functions as callconv(.Inline)" { +// try testTransform( +// \\inline fn foo() void {} +// \\ +// , +// \\fn foo() callconv(.Inline) void {} +// \\ +// ); +//} test "zig fmt: simple top level comptime block" { try testCanonical( From 7630a5c566b106b6325a55f29eb1ed9e584d0949 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Fri, 12 Feb 2021 23:47:17 -0700 Subject: [PATCH 057/173] stage2: more progress towards Module/astgen building with new mem layout --- lib/std/zig/ast.zig | 2 + src/Module.zig | 366 +++++++------ src/astgen.zig | 905 +++++++++++++++++--------------- src/codegen.zig | 15 +- src/ir.zig | 1 + src/link/Elf.zig | 32 +- src/link/MachO/DebugSymbols.zig | 32 +- src/zir.zig | 21 +- 8 files changed, 785 insertions(+), 589 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index ab81c3415e..40541ea7c1 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -2834,10 +2834,12 @@ pub const Node = struct { /// `(lhs)`. main_token is the `(`; rhs is the token index of the `)`. grouped_expression, /// `@a(lhs, rhs)`. lhs and rhs may be omitted. + /// main_token is the builtin token. builtin_call_two, /// Same as builtin_call_two but there is known to be a trailing comma before the rparen. builtin_call_two_comma, /// `@a(b, c)`. `sub_list[lhs..rhs]`. + /// main_token is the builtin token. builtin_call, /// Same as builtin_call but there is known to be a trailing comma before the rparen. builtin_call_comma, diff --git a/src/Module.zig b/src/Module.zig index a1c2822732..2071ff671c 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -428,14 +428,14 @@ pub const Scope = struct { } /// Asserts the scope is a child of a File and has an AST tree and returns the tree. - pub fn tree(self: *Scope) *ast.Tree { + pub fn tree(self: *Scope) *const ast.Tree { switch (self.tag) { - .file => return self.cast(File).?.contents.tree, - .block => return self.cast(Block).?.src_decl.container.file_scope.contents.tree, - .gen_zir => return self.cast(GenZIR).?.decl.container.file_scope.contents.tree, - .local_val => return self.cast(LocalVal).?.gen_zir.decl.container.file_scope.contents.tree, - .local_ptr => return self.cast(LocalPtr).?.gen_zir.decl.container.file_scope.contents.tree, - .container => return self.cast(Container).?.file_scope.contents.tree, + .file => return self.cast(File).?.tree, + .block => return self.cast(Block).?.src_decl.container.file_scope.tree, + .gen_zir => return self.cast(GenZIR).?.decl.container.file_scope.tree, + .local_val => return self.cast(LocalVal).?.gen_zir.decl.container.file_scope.tree, + .local_ptr => return self.cast(LocalPtr).?.gen_zir.decl.container.file_scope.tree, + .container => return self.cast(Container).?.file_scope.tree, } } @@ -1008,38 +1008,38 @@ fn astgenAndSemaDecl(mod: *Module, decl: *Decl) !bool { switch (node_tags[fn_proto]) { .fn_proto_simple => { var params: [1]ast.Node.Index = undefined; - return mod.astgenAndSemaFn(decl, tree, body, tree.fnProtoSimple(¶ms, fn_proto)); + return mod.astgenAndSemaFn(decl, tree.*, body, tree.fnProtoSimple(¶ms, fn_proto)); }, - .fn_proto_multi => return mod.astgenAndSemaFn(decl, tree, body, tree.fnProtoMulti(fn_proto)), + .fn_proto_multi => return mod.astgenAndSemaFn(decl, tree.*, body, tree.fnProtoMulti(fn_proto)), .fn_proto_one => { var params: [1]ast.Node.Index = undefined; - return mod.astgenAndSemaFn(decl, tree, body, tree.fnProtoOne(¶ms, fn_proto)); + return mod.astgenAndSemaFn(decl, tree.*, body, tree.fnProtoOne(¶ms, fn_proto)); }, - .fn_proto => return mod.astgenAndSemaFn(decl, tree, body, tree.fnProto(fn_proto)), + .fn_proto => return mod.astgenAndSemaFn(decl, tree.*, body, tree.fnProto(fn_proto)), else => unreachable, } }, .fn_proto_simple => { var params: [1]ast.Node.Index = undefined; - return mod.astgenAndSemaFn(decl, tree, null, tree.fnProtoSimple(¶ms, decl_node)); + return mod.astgenAndSemaFn(decl, tree.*, 0, tree.fnProtoSimple(¶ms, decl_node)); }, - .fn_proto_multi => return mod.astgenAndSemaFn(decl, tree, null, tree.fnProtoMulti(decl_node)), + .fn_proto_multi => return mod.astgenAndSemaFn(decl, tree.*, 0, tree.fnProtoMulti(decl_node)), .fn_proto_one => { var params: [1]ast.Node.Index = undefined; - return mod.astgenAndSemaFn(decl, tree, null, tree.fnProtoOne(¶ms, decl_node)); + return mod.astgenAndSemaFn(decl, tree.*, 0, tree.fnProtoOne(¶ms, decl_node)); }, - .fn_proto => return mod.astgenAndSemaFn(decl, tree, null, tree.fnProto(decl_node)), + .fn_proto => return mod.astgenAndSemaFn(decl, tree.*, 0, tree.fnProto(decl_node)), - .global_var_decl => return mod.astgenAndSemaVarDecl(decl, tree, tree.globalVarDecl(decl_node)), - .local_var_decl => return mod.astgenAndSemaVarDecl(decl, tree, tree.localVarDecl(decl_node)), - .simple_var_decl => return mod.astgenAndSemaVarDecl(decl, tree, tree.simpleVarDecl(decl_node)), - .aligned_var_decl => return mod.astgenAndSemaVarDecl(decl, tree, tree.alignedVarDecl(decl_node)), + .global_var_decl => return mod.astgenAndSemaVarDecl(decl, tree.*, tree.globalVarDecl(decl_node)), + .local_var_decl => return mod.astgenAndSemaVarDecl(decl, tree.*, tree.localVarDecl(decl_node)), + .simple_var_decl => return mod.astgenAndSemaVarDecl(decl, tree.*, tree.simpleVarDecl(decl_node)), + .aligned_var_decl => return mod.astgenAndSemaVarDecl(decl, tree.*, tree.alignedVarDecl(decl_node)), .@"comptime" => { decl.analysis = .in_progress; // A comptime decl does not store any value so we can just deinit this arena after analysis is done. - var analysis_arena = std.heap.ArenaAllocator.init(self.gpa); + var analysis_arena = std.heap.ArenaAllocator.init(mod.gpa); defer analysis_arena.deinit(); var gen_scope: Scope.GenZIR = .{ .decl = decl, @@ -1047,14 +1047,15 @@ fn astgenAndSemaDecl(mod: *Module, decl: *Decl) !bool { .parent = &decl.container.base, .force_comptime = true, }; - defer gen_scope.instructions.deinit(self.gpa); + defer gen_scope.instructions.deinit(mod.gpa); - _ = try astgen.comptimeExpr(self, &gen_scope.base, .none, comptime_decl.expr); - if (std.builtin.mode == .Debug and self.comp.verbose_ir) { - zir.dumpZir(self.gpa, "comptime_block", decl.name, gen_scope.instructions.items) catch {}; + const block_expr = node_datas[decl_node].lhs; + _ = try astgen.comptimeExpr(mod, &gen_scope.base, .none, block_expr); + if (std.builtin.mode == .Debug and mod.comp.verbose_ir) { + zir.dumpZir(mod.gpa, "comptime_block", decl.name, gen_scope.instructions.items) catch {}; } - var inst_table = Scope.Block.InstTable.init(self.gpa); + var inst_table = Scope.Block.InstTable.init(mod.gpa); defer inst_table.deinit(); var branch_quota: u32 = default_eval_branch_quota; @@ -1071,17 +1072,17 @@ fn astgenAndSemaDecl(mod: *Module, decl: *Decl) !bool { .is_comptime = true, .branch_quota = &branch_quota, }; - defer block_scope.instructions.deinit(self.gpa); + defer block_scope.instructions.deinit(mod.gpa); - _ = try zir_sema.analyzeBody(self, &block_scope, .{ + _ = try zir_sema.analyzeBody(mod, &block_scope, .{ .instructions = gen_scope.instructions.items, }); decl.analysis = .complete; - decl.generation = self.generation; + decl.generation = mod.generation; return true; }, - .UsingNamespace => @panic("TODO usingnamespace decl"), + .@"usingnamespace" => @panic("TODO usingnamespace decl"), else => unreachable, } } @@ -1099,18 +1100,20 @@ fn astgenAndSemaFn( decl.analysis = .in_progress; const token_starts = tree.tokens.items(.start); + const token_tags = tree.tokens.items(.tag); // This arena allocator's memory is discarded at the end of this function. It is used // to determine the type of the function, and hence the type of the decl, which is needed // to complete the Decl analysis. - var fn_type_scope_arena = std.heap.ArenaAllocator.init(self.gpa); + var fn_type_scope_arena = std.heap.ArenaAllocator.init(mod.gpa); defer fn_type_scope_arena.deinit(); var fn_type_scope: Scope.GenZIR = .{ .decl = decl, .arena = &fn_type_scope_arena.allocator, .parent = &decl.container.base, + .force_comptime = true, }; - defer fn_type_scope.instructions.deinit(self.gpa); + defer fn_type_scope.instructions.deinit(mod.gpa); decl.is_pub = fn_proto.visib_token != null; @@ -1126,7 +1129,7 @@ fn astgenAndSemaFn( }; const param_types = try fn_type_scope.arena.alloc(*zir.Inst, param_count); const fn_src = token_starts[fn_proto.ast.fn_token]; - const type_type = try astgen.addZIRInstConst(self, &fn_type_scope.base, fn_src, .{ + const type_type = try astgen.addZIRInstConst(mod, &fn_type_scope.base, fn_src, .{ .ty = Type.initTag(.type), .val = Value.initTag(.type_type), }); @@ -1138,13 +1141,13 @@ fn astgenAndSemaFn( while (it.next()) |param| : (param_type_i += 1) { if (param.anytype_ellipsis3) |token| { switch (token_tags[token]) { - .keyword_anytype => return self.failTok( + .keyword_anytype => return mod.failTok( &fn_type_scope.base, - tok_i, + token, "TODO implement anytype parameter", .{}, ), - .ellipsis3 => return self.failTok( + .ellipsis3 => return mod.failTok( &fn_type_scope.base, token, "TODO implement var args", @@ -1156,7 +1159,7 @@ fn astgenAndSemaFn( const param_type_node = param.type_expr; assert(param_type_node != 0); param_types[param_type_i] = - try astgen.expr(self, &fn_type_scope.base, type_type_rl, param_type_node); + try astgen.expr(mod, &fn_type_scope.base, type_type_rl, param_type_node); } assert(param_type_i == param_count); } @@ -1164,10 +1167,10 @@ fn astgenAndSemaFn( // TODO call std.zig.parseStringLiteral const lib_name_str = mem.trim(u8, tree.tokenSlice(lib_name), "\""); log.debug("extern fn symbol expected in lib '{s}'", .{lib_name_str}); - const target = self.comp.getTarget(); + const target = mod.comp.getTarget(); if (target_util.is_libc_lib_name(target, lib_name_str)) { - if (!self.comp.bin_file.options.link_libc) { - return self.failTok( + if (!mod.comp.bin_file.options.link_libc) { + return mod.failTok( &fn_type_scope.base, lib_name, "dependency on libc must be explicitly specified in the build command", @@ -1177,8 +1180,8 @@ fn astgenAndSemaFn( break :blk; } if (target_util.is_libcpp_lib_name(target, lib_name_str)) { - if (!self.comp.bin_file.options.link_libcpp) { - return self.failTok( + if (!mod.comp.bin_file.options.link_libcpp) { + return mod.failTok( &fn_type_scope.base, lib_name, "dependency on libc++ must be explicitly specified in the build command", @@ -1187,16 +1190,16 @@ fn astgenAndSemaFn( } break :blk; } - if (!target.isWasm() and !self.comp.bin_file.options.pic) { - return self.failTok( + if (!target.isWasm() and !mod.comp.bin_file.options.pic) { + return mod.failTok( &fn_type_scope.base, lib_name, "dependency on dynamic library '{s}' requires enabling Position Independent Code. Fixed by `-l{s}` or `-fPIC`.", .{ lib_name, lib_name }, ); } - self.comp.stage1AddLinkLib(lib_name_str) catch |err| { - return self.failTok( + mod.comp.stage1AddLinkLib(lib_name_str) catch |err| { + return mod.failTok( &fn_type_scope.base, lib_name, "unable to add link lib '{s}': {s}", @@ -1204,45 +1207,55 @@ fn astgenAndSemaFn( ); }; } - if (fn_proto.ast.align_expr) |align_expr| { - return self.failNode(&fn_type_scope.base, align_expr, "TODO implement function align expression", .{}); - } - if (fn_proto.ast.section_expr) |sect_expr| { - return self.failNode(&fn_type_scope.base, sect_expr, "TODO implement function section expression", .{}); - } - if (fn_proto.ast.callconv_expr) |callconv_expr| { - return self.failNode( + if (fn_proto.ast.align_expr != 0) { + return mod.failNode( &fn_type_scope.base, - callconv_expr, + fn_proto.ast.align_expr, + "TODO implement function align expression", + .{}, + ); + } + if (fn_proto.ast.section_expr != 0) { + return mod.failNode( + &fn_type_scope.base, + fn_proto.ast.section_expr, + "TODO implement function section expression", + .{}, + ); + } + if (fn_proto.ast.callconv_expr != 0) { + return mod.failNode( + &fn_type_scope.base, + fn_proto.ast.callconv_expr, "TODO implement function calling convention expression", .{}, ); } const maybe_bang = tree.firstToken(fn_proto.ast.return_type) - 1; if (token_tags[maybe_bang] == .bang) { - return self.failTok(&fn_type_scope.base, maybe_bang, "TODO implement inferred error sets", .{}); + return mod.failTok(&fn_type_scope.base, maybe_bang, "TODO implement inferred error sets", .{}); } const return_type_inst = try astgen.expr( - self, + mod, &fn_type_scope.base, type_type_rl, fn_proto.ast.return_type, ); - const fn_type_inst = try astgen.addZIRInst(self, &fn_type_scope.base, fn_src, zir.Inst.FnType, .{ + const fn_type_inst = try astgen.addZIRInst(mod, &fn_type_scope.base, fn_src, zir.Inst.FnType, .{ .return_type = return_type_inst, .param_types = param_types, }, .{}); - if (std.builtin.mode == .Debug and self.comp.verbose_ir) { - zir.dumpZir(self.gpa, "fn_type", decl.name, fn_type_scope.instructions.items) catch {}; + if (std.builtin.mode == .Debug and mod.comp.verbose_ir) { + zir.dumpZir(mod.gpa, "fn_type", decl.name, fn_type_scope.instructions.items) catch {}; } // We need the memory for the Type to go into the arena for the Decl - var decl_arena = std.heap.ArenaAllocator.init(self.gpa); + var decl_arena = std.heap.ArenaAllocator.init(mod.gpa); errdefer decl_arena.deinit(); const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State); - var inst_table = Scope.Block.InstTable.init(self.gpa); + var inst_table = Scope.Block.InstTable.init(mod.gpa); defer inst_table.deinit(); var branch_quota: u32 = default_eval_branch_quota; @@ -1259,9 +1272,9 @@ fn astgenAndSemaFn( .is_comptime = false, .branch_quota = &branch_quota, }; - defer block_scope.instructions.deinit(self.gpa); + defer block_scope.instructions.deinit(mod.gpa); - const fn_type = try zir_sema.analyzeBodyValueAsType(self, &block_scope, fn_type_inst, .{ + const fn_type = try zir_sema.analyzeBodyValueAsType(mod, &block_scope, fn_type_inst, .{ .instructions = fn_type_scope.instructions.items, }); if (body_node == 0) { @@ -1270,7 +1283,7 @@ fn astgenAndSemaFn( if (decl.typedValueManaged()) |tvm| { type_changed = !tvm.typed_value.ty.eql(fn_type); - tvm.deinit(self.gpa); + tvm.deinit(mod.gpa); } const fn_val = try Value.Tag.extern_fn.create(&decl_arena.allocator, decl); @@ -1282,13 +1295,13 @@ fn astgenAndSemaFn( }, }; decl.analysis = .complete; - decl.generation = self.generation; + decl.generation = mod.generation; - try self.comp.bin_file.allocateDeclIndexes(decl); - try self.comp.work_queue.writeItem(.{ .codegen_decl = decl }); + try mod.comp.bin_file.allocateDeclIndexes(decl); + try mod.comp.work_queue.writeItem(.{ .codegen_decl = decl }); - if (type_changed and self.emit_h != null) { - try self.comp.work_queue.writeItem(.{ .emit_h_decl = decl }); + if (type_changed and mod.emit_h != null) { + try mod.comp.work_queue.writeItem(.{ .emit_h_decl = decl }); } return type_changed; @@ -1304,17 +1317,17 @@ fn astgenAndSemaFn( .arena = &decl_arena.allocator, .parent = &decl.container.base, }; - defer gen_scope.instructions.deinit(self.gpa); + defer gen_scope.instructions.deinit(mod.gpa); // We need an instruction for each parameter, and they must be first in the body. - try gen_scope.instructions.resize(self.gpa, param_count); + try gen_scope.instructions.resize(mod.gpa, param_count); var params_scope = &gen_scope.base; var i: usize = 0; var it = fn_proto.iterate(tree); while (it.next()) |param| : (i += 1) { const name_token = param.name_token.?; const src = token_starts[name_token]; - const param_name = try self.identifierTokenString(&gen_scope.base, name_token); + const param_name = try mod.identifierTokenString(&gen_scope.base, name_token); const arg = try decl_arena.allocator.create(zir.Inst.NoOp); arg.* = .{ .base = .{ @@ -1335,17 +1348,17 @@ fn astgenAndSemaFn( params_scope = &sub_scope.base; } - try astgen.blockExpr(self, params_scope, body_node); + try astgen.blockExpr(mod, params_scope, body_node); if (gen_scope.instructions.items.len == 0 or !gen_scope.instructions.items[gen_scope.instructions.items.len - 1].tag.isNoReturn()) { const src = token_starts[tree.lastToken(body_node)]; - _ = try astgen.addZIRNoOp(self, &gen_scope.base, src, .returnvoid); + _ = try astgen.addZIRNoOp(mod, &gen_scope.base, src, .returnvoid); } - if (std.builtin.mode == .Debug and self.comp.verbose_ir) { - zir.dumpZir(self.gpa, "fn_body", decl.name, gen_scope.instructions.items) catch {}; + if (std.builtin.mode == .Debug and mod.comp.verbose_ir) { + zir.dumpZir(mod.gpa, "fn_body", decl.name, gen_scope.instructions.items) catch {}; } break :blk .{ @@ -1379,7 +1392,7 @@ fn astgenAndSemaFn( prev_is_inline = prev_func.state == .inline_only; } - tvm.deinit(self.gpa); + tvm.deinit(mod.gpa); } decl_arena_state.* = decl_arena.state; @@ -1393,25 +1406,25 @@ fn astgenAndSemaFn( }, }; decl.analysis = .complete; - decl.generation = self.generation; + decl.generation = mod.generation; if (!is_inline and fn_type.hasCodeGenBits()) { // We don't fully codegen the decl until later, but we do need to reserve a global // offset table index for it. This allows us to codegen decls out of dependency order, // increasing how many computations can be done in parallel. - try self.comp.bin_file.allocateDeclIndexes(decl); - try self.comp.work_queue.writeItem(.{ .codegen_decl = decl }); - if (type_changed and self.emit_h != null) { - try self.comp.work_queue.writeItem(.{ .emit_h_decl = decl }); + try mod.comp.bin_file.allocateDeclIndexes(decl); + try mod.comp.work_queue.writeItem(.{ .codegen_decl = decl }); + if (type_changed and mod.emit_h != null) { + try mod.comp.work_queue.writeItem(.{ .emit_h_decl = decl }); } } else if (!prev_is_inline and prev_type_has_bits) { - self.comp.bin_file.freeDecl(decl); + mod.comp.bin_file.freeDecl(decl); } if (fn_proto.extern_export_token) |maybe_export_token| { - if (token_tags[maybe_export_token] == .Keyword_export) { + if (token_tags[maybe_export_token] == .keyword_export) { if (is_inline) { - return self.failTok( + return mod.failTok( &block_scope.base, maybe_export_token, "export of inline function", @@ -1421,7 +1434,7 @@ fn astgenAndSemaFn( const export_src = token_starts[maybe_export_token]; const name = tree.tokenSlice(fn_proto.name_token.?); // TODO identifierTokenString // The scope needs to have the decl in it. - try self.analyzeExport(&block_scope.base, export_src, name, decl); + try mod.analyzeExport(&block_scope.base, export_src, name, decl); } } return type_changed or is_inline != prev_is_inline; @@ -1439,13 +1452,14 @@ fn astgenAndSemaVarDecl( decl.analysis = .in_progress; const token_starts = tree.tokens.items(.start); + const token_tags = tree.tokens.items(.tag); // We need the memory for the Type to go into the arena for the Decl - var decl_arena = std.heap.ArenaAllocator.init(self.gpa); + var decl_arena = std.heap.ArenaAllocator.init(mod.gpa); errdefer decl_arena.deinit(); const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State); - var decl_inst_table = Scope.Block.InstTable.init(self.gpa); + var decl_inst_table = Scope.Block.InstTable.init(mod.gpa); defer decl_inst_table.deinit(); var branch_quota: u32 = default_eval_branch_quota; @@ -1462,63 +1476,83 @@ fn astgenAndSemaVarDecl( .is_comptime = true, .branch_quota = &branch_quota, }; - defer block_scope.instructions.deinit(self.gpa); + defer block_scope.instructions.deinit(mod.gpa); - decl.is_pub = var_decl.getVisibToken() != null; + decl.is_pub = var_decl.visib_token != null; const is_extern = blk: { - const maybe_extern_token = var_decl.getExternExportToken() orelse - break :blk false; - if (tree.token_ids[maybe_extern_token] != .Keyword_extern) break :blk false; - if (var_decl.getInitNode()) |some| { - return self.failNode(&block_scope.base, some, "extern variables have no initializers", .{}); + const maybe_extern_token = var_decl.extern_export_token orelse break :blk false; + if (token_tags[maybe_extern_token] != .keyword_extern) break :blk false; + if (var_decl.ast.init_node != 0) { + return mod.failNode( + &block_scope.base, + var_decl.ast.init_node, + "extern variables have no initializers", + .{}, + ); } break :blk true; }; - if (var_decl.getLibName()) |lib_name| { + if (var_decl.lib_name) |lib_name| { assert(is_extern); - return self.failNode(&block_scope.base, lib_name, "TODO implement function library name", .{}); + return mod.failTok(&block_scope.base, lib_name, "TODO implement function library name", .{}); } - const is_mutable = tree.token_ids[var_decl.mut_token] == .Keyword_var; - const is_threadlocal = if (var_decl.getThreadLocalToken()) |some| blk: { + const is_mutable = token_tags[var_decl.mut_token] == .keyword_var; + const is_threadlocal = if (var_decl.threadlocal_token) |some| blk: { if (!is_mutable) { - return self.failTok(&block_scope.base, some, "threadlocal variable cannot be constant", .{}); + return mod.failTok(&block_scope.base, some, "threadlocal variable cannot be constant", .{}); } break :blk true; } else false; - assert(var_decl.getComptimeToken() == null); - if (var_decl.getAlignNode()) |align_expr| { - return self.failNode(&block_scope.base, align_expr, "TODO implement function align expression", .{}); + assert(var_decl.comptime_token == null); + if (var_decl.ast.align_node != 0) { + return mod.failNode( + &block_scope.base, + var_decl.ast.align_node, + "TODO implement function align expression", + .{}, + ); } - if (var_decl.getSectionNode()) |sect_expr| { - return self.failNode(&block_scope.base, sect_expr, "TODO implement function section expression", .{}); + if (var_decl.ast.section_node != 0) { + return mod.failNode( + &block_scope.base, + var_decl.ast.section_node, + "TODO implement function section expression", + .{}, + ); } - const var_info: struct { ty: Type, val: ?Value } = if (var_decl.getInitNode()) |init_node| vi: { - var gen_scope_arena = std.heap.ArenaAllocator.init(self.gpa); + const var_info: struct { ty: Type, val: ?Value } = if (var_decl.ast.init_node != 0) vi: { + var gen_scope_arena = std.heap.ArenaAllocator.init(mod.gpa); defer gen_scope_arena.deinit(); var gen_scope: Scope.GenZIR = .{ .decl = decl, .arena = &gen_scope_arena.allocator, .parent = &decl.container.base, }; - defer gen_scope.instructions.deinit(self.gpa); + defer gen_scope.instructions.deinit(mod.gpa); - const init_result_loc: astgen.ResultLoc = if (var_decl.getTypeNode()) |type_node| rl: { - const src = token_starts[type_node.firstToken()]; - const type_type = try astgen.addZIRInstConst(self, &gen_scope.base, src, .{ + const init_result_loc: astgen.ResultLoc = if (var_decl.ast.type_node != 0) rl: { + const type_node = var_decl.ast.type_node; + const src = token_starts[tree.firstToken(type_node)]; + const type_type = try astgen.addZIRInstConst(mod, &gen_scope.base, src, .{ .ty = Type.initTag(.type), .val = Value.initTag(.type_type), }); - const var_type = try astgen.expr(self, &gen_scope.base, .{ .ty = type_type }, type_node); + const var_type = try astgen.expr(mod, &gen_scope.base, .{ .ty = type_type }, type_node); break :rl .{ .ty = var_type }; } else .none; - const init_inst = try astgen.comptimeExpr(self, &gen_scope.base, init_result_loc, init_node); - if (std.builtin.mode == .Debug and self.comp.verbose_ir) { - zir.dumpZir(self.gpa, "var_init", decl.name, gen_scope.instructions.items) catch {}; + const init_inst = try astgen.comptimeExpr( + mod, + &gen_scope.base, + init_result_loc, + var_decl.ast.init_node, + ); + if (std.builtin.mode == .Debug and mod.comp.verbose_ir) { + zir.dumpZir(mod.gpa, "var_init", decl.name, gen_scope.instructions.items) catch {}; } - var var_inst_table = Scope.Block.InstTable.init(self.gpa); + var var_inst_table = Scope.Block.InstTable.init(mod.gpa); defer var_inst_table.deinit(); var branch_quota_vi: u32 = default_eval_branch_quota; @@ -1534,8 +1568,8 @@ fn astgenAndSemaVarDecl( .is_comptime = true, .branch_quota = &branch_quota_vi, }; - defer inner_block.instructions.deinit(self.gpa); - try zir_sema.analyzeBody(self, &inner_block, .{ + defer inner_block.instructions.deinit(mod.gpa); + try zir_sema.analyzeBody(mod, &inner_block, .{ .instructions = gen_scope.instructions.items, }); @@ -1550,24 +1584,30 @@ fn astgenAndSemaVarDecl( .val = try val.copy(block_scope.arena), }; } else if (!is_extern) { - return self.failTok(&block_scope.base, var_decl.firstToken(), "variables must be initialized", .{}); - } else if (var_decl.getTypeNode()) |type_node| vi: { + return mod.failTok( + &block_scope.base, + tree.firstToken(var_decl), + "variables must be initialized", + .{}, + ); + } else if (var_decl.ast.type_node != 0) vi: { + const type_node = var_decl.ast.type_node; // Temporary arena for the zir instructions. - var type_scope_arena = std.heap.ArenaAllocator.init(self.gpa); + var type_scope_arena = std.heap.ArenaAllocator.init(mod.gpa); defer type_scope_arena.deinit(); var type_scope: Scope.GenZIR = .{ .decl = decl, .arena = &type_scope_arena.allocator, .parent = &decl.container.base, }; - defer type_scope.instructions.deinit(self.gpa); + defer type_scope.instructions.deinit(mod.gpa); - const var_type = try astgen.typeExpr(self, &type_scope.base, type_node); - if (std.builtin.mode == .Debug and self.comp.verbose_ir) { - zir.dumpZir(self.gpa, "var_type", decl.name, type_scope.instructions.items) catch {}; + const var_type = try astgen.typeExpr(mod, &type_scope.base, type_node); + if (std.builtin.mode == .Debug and mod.comp.verbose_ir) { + zir.dumpZir(mod.gpa, "var_type", decl.name, type_scope.instructions.items) catch {}; } - const ty = try zir_sema.analyzeBodyValueAsType(self, &block_scope, var_type, .{ + const ty = try zir_sema.analyzeBodyValueAsType(mod, &block_scope, var_type, .{ .instructions = type_scope.instructions.items, }); break :vi .{ @@ -1575,18 +1615,28 @@ fn astgenAndSemaVarDecl( .val = null, }; } else { - return self.failTok(&block_scope.base, var_decl.firstToken(), "unable to infer variable type", .{}); + return mod.failTok( + &block_scope.base, + tree.firstToken(var_decl), + "unable to infer variable type", + .{}, + ); }; if (is_mutable and !var_info.ty.isValidVarType(is_extern)) { - return self.failTok(&block_scope.base, var_decl.firstToken(), "variable of type '{}' must be const", .{var_info.ty}); + return mod.failTok( + &block_scope.base, + tree.firstToken(var_decl), + "variable of type '{}' must be const", + .{var_info.ty}, + ); } var type_changed = true; if (decl.typedValueManaged()) |tvm| { type_changed = !tvm.typed_value.ty.eql(var_info.ty); - tvm.deinit(self.gpa); + tvm.deinit(mod.gpa); } const new_variable = try decl_arena.allocator.create(Var); @@ -1610,14 +1660,15 @@ fn astgenAndSemaVarDecl( }, }; decl.analysis = .complete; - decl.generation = self.generation; + decl.generation = mod.generation; - if (var_decl.getExternExportToken()) |maybe_export_token| { - if (tree.token_ids[maybe_export_token] == .Keyword_export) { + if (var_decl.extern_export_token) |maybe_export_token| { + if (token_tags[maybe_export_token] == .keyword_export) { const export_src = token_starts[maybe_export_token]; - const name = tree.tokenSlice(var_decl.name_token); // TODO identifierTokenString + const name_token = var_decl.ast.mut_token + 1; + const name = tree.tokenSlice(name_token); // TODO identifierTokenString // The scope needs to have the decl in it. - try self.analyzeExport(&block_scope.base, export_src, name, decl); + try mod.analyzeExport(&block_scope.base, export_src, name, decl); } } return type_changed; @@ -1761,7 +1812,7 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void { decl_node, decl_i, tree.*, - null, + 0, tree.fnProtoSimple(¶ms, decl_node), ); }, @@ -1771,7 +1822,7 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void { decl_node, decl_i, tree.*, - null, + 0, tree.fnProtoMulti(decl_node), ), .fn_proto_one => { @@ -1782,7 +1833,7 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void { decl_node, decl_i, tree.*, - null, + 0, tree.fnProtoOne(¶ms, decl_node), ); }, @@ -1792,7 +1843,7 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void { decl_node, decl_i, tree.*, - null, + 0, tree.fnProto(decl_node), ), @@ -1848,7 +1899,7 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void { decl_node, decl_i, tree.*, - tree.containerFieldInit(decl), + tree.containerFieldInit(decl_node), ), .container_field_align => try mod.semaContainerField( container_scope, @@ -1856,7 +1907,7 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void { decl_node, decl_i, tree.*, - tree.containerFieldAlign(decl), + tree.containerFieldAlign(decl_node), ), .container_field => try mod.semaContainerField( container_scope, @@ -1864,7 +1915,7 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void { decl_node, decl_i, tree.*, - tree.containerField(decl), + tree.containerField(decl_node), ), .test_decl => { @@ -1936,14 +1987,14 @@ fn semaContainerFn( // in `Decl` to notice that the line number did not change. mod.comp.work_queue.writeItemAssumeCapacity(.{ .update_line_number = decl }); }, - .c, .wasm => {}, + .c, .wasm, .spirv => {}, } } } else { const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash); container_scope.decls.putAssumeCapacity(new_decl, {}); - if (fn_proto.getExternExportInlineToken()) |maybe_export_token| { - if (tree.token_ids[maybe_export_token] == .Keyword_export) { + if (fn_proto.extern_export_token) |maybe_export_token| { + if (token_tags[maybe_export_token] == .keyword_export) { mod.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl }); } } @@ -1963,9 +2014,11 @@ fn semaContainerVar( defer tracy.end(); const token_starts = tree.tokens.items(.start); + const token_tags = tree.tokens.items(.tag); - const name_src = token_starts[var_decl.name_token]; - const name = tree.tokenSlice(var_decl.name_token); // TODO identifierTokenString + const name_token = var_decl.ast.mut_token + 1; + const name_src = token_starts[name_token]; + const name = tree.tokenSlice(name_token); // TODO identifierTokenString const name_hash = container_scope.fullyQualifiedNameHash(name); const contents_hash = std.zig.hashSrc(tree.getNodeSource(decl_node)); if (mod.decl_table.get(name_hash)) |decl| { @@ -1987,15 +2040,23 @@ fn semaContainerVar( } else { const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash); container_scope.decls.putAssumeCapacity(new_decl, {}); - if (var_decl.getExternExportToken()) |maybe_export_token| { - if (tree.token_ids[maybe_export_token] == .Keyword_export) { + if (var_decl.extern_export_token) |maybe_export_token| { + if (token_tags[maybe_export_token] == .keyword_export) { mod.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl }); } } } } -fn semaContainerField() void { +fn semaContainerField( + mod: *Module, + container_scope: *Scope.Container, + deleted_decls: *std.AutoArrayHashMap(*Decl, void), + decl_node: ast.Node.Index, + decl_i: usize, + tree: ast.Tree, + field: ast.full.ContainerField, +) !void { const tracy = trace(@src()); defer tracy.end(); @@ -2898,7 +2959,7 @@ pub fn analyzeImport(self: *Module, scope: *Scope, src: usize, target_string: [] file_scope.* = .{ .sub_file_path = resolved_path, .source = .{ .unloaded = {} }, - .contents = .{ .not_available = {} }, + .tree = undefined, .status = .never_loaded, .pkg = found_pkg orelse cur_pkg, .root_container = .{ @@ -3415,11 +3476,12 @@ pub fn failTok( pub fn failNode( self: *Module, scope: *Scope, - ast_node: *ast.Node, + ast_node: ast.Node.Index, comptime format: []const u8, args: anytype, ) InnerError { - const src = scope.tree().tokens.items(.start)[ast_node.firstToken()]; + const tree = scope.tree(); + const src = tree.tokens.items(.start)[tree.firstToken(ast_node)]; return self.fail(scope, src, format, args); } diff --git a/src/astgen.zig b/src/astgen.zig index ece16d70da..dcc2ea9ad2 100644 --- a/src/astgen.zig +++ b/src/astgen.zig @@ -55,7 +55,7 @@ pub const ResultLoc = union(enum) { }; }; -pub fn typeExpr(mod: *Module, scope: *Scope, type_node: *ast.Node) InnerError!*zir.Inst { +pub fn typeExpr(mod: *Module, scope: *Scope, type_node: ast.Node.Index) InnerError!*zir.Inst { const type_src = scope.tree().token_locs[type_node.firstToken()].start; const type_type = try addZIRInstConst(mod, scope, type_src, .{ .ty = Type.initTag(.type), @@ -65,134 +65,133 @@ pub fn typeExpr(mod: *Module, scope: *Scope, type_node: *ast.Node) InnerError!*z return expr(mod, scope, type_rl, type_node); } -fn lvalExpr(mod: *Module, scope: *Scope, node: *ast.Node) InnerError!*zir.Inst { - switch (node.tag) { - .Root => unreachable, - .Use => unreachable, - .TestDecl => unreachable, - .DocComment => unreachable, - .VarDecl => unreachable, - .SwitchCase => unreachable, - .SwitchElse => unreachable, - .Else => unreachable, - .Payload => unreachable, - .PointerPayload => unreachable, - .PointerIndexPayload => unreachable, - .ErrorTag => unreachable, - .FieldInitializer => unreachable, - .ContainerField => unreachable, +fn lvalExpr(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.Inst { + const tree = scope.tree(); + const node_tags = tree.nodes.items(.tag); + const main_tokens = tree.nodes.items(.main_token); + switch (node_tags[node]) { + .root => unreachable, + .@"usingnamespace" => unreachable, + .test_decl => unreachable, + .doc_comment => unreachable, + .var_decl => unreachable, + .switch_case => unreachable, + .switch_else => unreachable, + .container_field_init => unreachable, + .container_field_align => unreachable, + .container_field => unreachable, - .Assign, - .AssignBitAnd, - .AssignBitOr, - .AssignBitShiftLeft, - .AssignBitShiftRight, - .AssignBitXor, - .AssignDiv, - .AssignSub, - .AssignSubWrap, - .AssignMod, - .AssignAdd, - .AssignAddWrap, - .AssignMul, - .AssignMulWrap, - .Add, - .AddWrap, - .Sub, - .SubWrap, - .Mul, - .MulWrap, - .Div, - .Mod, - .BitAnd, - .BitOr, - .BitShiftLeft, - .BitShiftRight, - .BitXor, - .BangEqual, - .EqualEqual, - .GreaterThan, - .GreaterOrEqual, - .LessThan, - .LessOrEqual, - .ArrayCat, - .ArrayMult, - .BoolAnd, - .BoolOr, - .Asm, - .StringLiteral, - .IntegerLiteral, - .Call, - .Unreachable, - .Return, - .If, - .While, - .BoolNot, - .AddressOf, - .FloatLiteral, - .UndefinedLiteral, - .BoolLiteral, - .NullLiteral, - .OptionalType, - .Block, - .LabeledBlock, - .Break, + .assign, + .assign_bit_and, + .assign_bit_or, + .assign_bit_shift_left, + .assign_bit_shift_right, + .assign_bit_xor, + .assign_div, + .assign_sub, + .assign_sub_wrap, + .assign_mod, + .assign_add, + .assign_add_wrap, + .assign_mul, + .assign_mul_wrap, + .add, + .add_wrap, + .sub, + .sub_wrap, + .mul, + .mul_wrap, + .div, + .mod, + .bit_and, + .bit_or, + .bit_shift_left, + .bit_shift_right, + .bit_xor, + .bang_equal, + .equal_equal, + .greater_than, + .greater_or_equal, + .less_than, + .less_or_equal, + .array_cat, + .array_mult, + .bool_and, + .bool_or, + .@"asm", + .string_literal, + .integer_literal, + .call, + .@"unreachable", + .@"return", + .@"if", + .@"while", + .bool_not, + .address_of, + .float_literal, + .undefined_literal, + .bool_literal, + .null_literal, + .optional_type, + .block, + .labeled_block, + .@"break", .PtrType, - .ArrayType, - .ArrayTypeSentinel, - .EnumLiteral, + .array_type, + .array_type_sentinel, + .enum_literal, .MultilineStringLiteral, - .CharLiteral, - .Defer, - .Catch, - .ErrorUnion, - .MergeErrorSets, - .Range, - .Await, - .BitNot, - .Negation, - .NegationWrap, - .Resume, - .Try, - .SliceType, - .Slice, + .char_literal, + .@"defer", + .@"catch", + .error_union, + .merge_error_sets, + .range, + .@"await", + .bit_not, + .negation, + .negation_wrap, + .@"resume", + .@"try", + .slice_type, + .slice, .ArrayInitializer, .ArrayInitializerDot, .StructInitializer, .StructInitializerDot, - .Switch, - .For, - .Suspend, - .Continue, - .AnyType, - .ErrorType, + .@"switch", + .@"for", + .@"suspend", + .@"continue", + .@"anytype", + .error_type, .FnProto, - .AnyFrameType, - .ErrorSetDecl, + .anyframe_type, + .error_set_decl, .ContainerDecl, - .Comptime, - .Nosuspend, + .@"comptime", + .@"nosuspend", + .builtin_call, + .builtin_call_comma, => return mod.failNode(scope, node, "invalid left-hand side to assignment", .{}), - // @field can be assigned to - .BuiltinCall => { - const call = node.castTag(.BuiltinCall).?; - const tree = scope.tree(); - const builtin_name = tree.tokenSlice(call.builtin_token); - + // `@field` can be assigned to. + .builtin_call_two, .builtin_call_two_comma => { + const builtin_token = main_tokens[node]; + const builtin_name = tree.tokenSlice(builtin_token); if (!mem.eql(u8, builtin_name, "@field")) { return mod.failNode(scope, node, "invalid left-hand side to assignment", .{}); } }, // can be assigned to - .UnwrapOptional, - .Deref, - .Period, - .ArrayAccess, - .Identifier, - .GroupedExpression, - .OrElse, + .unwrap_optional, + .deref, + .period, + .array_access, + .identifier, + .grouped_expression, + .@"orelse", => {}, } return expr(mod, scope, .ref, node); @@ -202,16 +201,16 @@ fn lvalExpr(mod: *Module, scope: *Scope, node: *ast.Node) InnerError!*zir.Inst { /// When `rl` is discard, ptr, inferred_ptr, bitcasted_ptr, or inferred_ptr, the /// result instruction can be used to inspect whether it is isNoReturn() but that is it, /// it must otherwise not be used. -pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerError!*zir.Inst { +pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!*zir.Inst { switch (node.tag) { - .Root => unreachable, // Top-level declaration. - .Use => unreachable, // Top-level declaration. - .TestDecl => unreachable, // Top-level declaration. - .DocComment => unreachable, // Top-level declaration. - .VarDecl => unreachable, // Handled in `blockExpr`. - .SwitchCase => unreachable, // Handled in `switchExpr`. - .SwitchElse => unreachable, // Handled in `switchExpr`. - .Range => unreachable, // Handled in `switchExpr`. + .root => unreachable, // Top-level declaration. + .@"usingnamespace" => unreachable, // Top-level declaration. + .test_decl => unreachable, // Top-level declaration. + .doc_comment => unreachable, // Top-level declaration. + .var_decl => unreachable, // Handled in `blockExpr`. + .switch_case => unreachable, // Handled in `switchExpr`. + .switch_else => unreachable, // Handled in `switchExpr`. + .range => unreachable, // Handled in `switchExpr`. .Else => unreachable, // Handled explicitly the control flow expression functions. .Payload => unreachable, // Handled explicitly. .PointerPayload => unreachable, // Handled explicitly. @@ -220,114 +219,113 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr .FieldInitializer => unreachable, // Handled explicitly. .ContainerField => unreachable, // Handled explicitly. - .Assign => return rvalueVoid(mod, scope, rl, node, try assign(mod, scope, node.castTag(.Assign).?)), - .AssignBitAnd => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitAnd).?, .bit_and)), - .AssignBitOr => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitOr).?, .bit_or)), - .AssignBitShiftLeft => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitShiftLeft).?, .shl)), - .AssignBitShiftRight => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitShiftRight).?, .shr)), - .AssignBitXor => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitXor).?, .xor)), - .AssignDiv => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignDiv).?, .div)), - .AssignSub => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignSub).?, .sub)), - .AssignSubWrap => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignSubWrap).?, .subwrap)), - .AssignMod => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignMod).?, .mod_rem)), - .AssignAdd => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignAdd).?, .add)), - .AssignAddWrap => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignAddWrap).?, .addwrap)), - .AssignMul => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignMul).?, .mul)), - .AssignMulWrap => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignMulWrap).?, .mulwrap)), + .assign => return rvalueVoid(mod, scope, rl, node, try assign(mod, scope, node)), + .assign_bit_and => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .bit_and)), + .assign_bit_or => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .bit_or)), + .assign_bit_shift_left => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .shl)), + .assign_bit_shift_right => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .shr)), + .assign_bit_xor => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .xor)), + .assign_div => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .div)), + .assign_sub => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .sub)), + .assign_sub_wrap => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .subwrap)), + .assign_mod => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .mod_rem)), + .assign_add => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .add)), + .assign_add_wrap => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .addwrap)), + .assign_mul => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .mul)), + .assign_mul_wrap => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .mulwrap)), - .Add => return simpleBinOp(mod, scope, rl, node.castTag(.Add).?, .add), - .AddWrap => return simpleBinOp(mod, scope, rl, node.castTag(.AddWrap).?, .addwrap), - .Sub => return simpleBinOp(mod, scope, rl, node.castTag(.Sub).?, .sub), - .SubWrap => return simpleBinOp(mod, scope, rl, node.castTag(.SubWrap).?, .subwrap), - .Mul => return simpleBinOp(mod, scope, rl, node.castTag(.Mul).?, .mul), - .MulWrap => return simpleBinOp(mod, scope, rl, node.castTag(.MulWrap).?, .mulwrap), - .Div => return simpleBinOp(mod, scope, rl, node.castTag(.Div).?, .div), - .Mod => return simpleBinOp(mod, scope, rl, node.castTag(.Mod).?, .mod_rem), - .BitAnd => return simpleBinOp(mod, scope, rl, node.castTag(.BitAnd).?, .bit_and), - .BitOr => return simpleBinOp(mod, scope, rl, node.castTag(.BitOr).?, .bit_or), - .BitShiftLeft => return simpleBinOp(mod, scope, rl, node.castTag(.BitShiftLeft).?, .shl), - .BitShiftRight => return simpleBinOp(mod, scope, rl, node.castTag(.BitShiftRight).?, .shr), - .BitXor => return simpleBinOp(mod, scope, rl, node.castTag(.BitXor).?, .xor), + .add => return simpleBinOp(mod, scope, rl, node, .add), + .add_wrap => return simpleBinOp(mod, scope, rl, node, .addwrap), + .sub => return simpleBinOp(mod, scope, rl, node, .sub), + .sub_wrap => return simpleBinOp(mod, scope, rl, node, .subwrap), + .mul => return simpleBinOp(mod, scope, rl, node, .mul), + .mul_wrap => return simpleBinOp(mod, scope, rl, node, .mulwrap), + .div => return simpleBinOp(mod, scope, rl, node, .div), + .mod => return simpleBinOp(mod, scope, rl, node, .mod_rem), + .bit_and => return simpleBinOp(mod, scope, rl, node, .bit_and), + .bit_or => return simpleBinOp(mod, scope, rl, node, .bit_or), + .bit_shift_left => return simpleBinOp(mod, scope, rl, node, .shl), + .bit_shift_right => return simpleBinOp(mod, scope, rl, node, .shr), + .bit_xor => return simpleBinOp(mod, scope, rl, node, .xor), - .BangEqual => return simpleBinOp(mod, scope, rl, node.castTag(.BangEqual).?, .cmp_neq), - .EqualEqual => return simpleBinOp(mod, scope, rl, node.castTag(.EqualEqual).?, .cmp_eq), - .GreaterThan => return simpleBinOp(mod, scope, rl, node.castTag(.GreaterThan).?, .cmp_gt), - .GreaterOrEqual => return simpleBinOp(mod, scope, rl, node.castTag(.GreaterOrEqual).?, .cmp_gte), - .LessThan => return simpleBinOp(mod, scope, rl, node.castTag(.LessThan).?, .cmp_lt), - .LessOrEqual => return simpleBinOp(mod, scope, rl, node.castTag(.LessOrEqual).?, .cmp_lte), + .bang_equal => return simpleBinOp(mod, scope, rl, node, .cmp_neq), + .equal_equal => return simpleBinOp(mod, scope, rl, node, .cmp_eq), + .greater_than => return simpleBinOp(mod, scope, rl, node, .cmp_gt), + .greater_or_equal => return simpleBinOp(mod, scope, rl, node, .cmp_gte), + .less_than => return simpleBinOp(mod, scope, rl, node, .cmp_lt), + .less_or_equal => return simpleBinOp(mod, scope, rl, node, .cmp_lte), - .ArrayCat => return simpleBinOp(mod, scope, rl, node.castTag(.ArrayCat).?, .array_cat), - .ArrayMult => return simpleBinOp(mod, scope, rl, node.castTag(.ArrayMult).?, .array_mul), + .array_cat => return simpleBinOp(mod, scope, rl, node, .array_cat), + .array_mult => return simpleBinOp(mod, scope, rl, node, .array_mul), - .BoolAnd => return boolBinOp(mod, scope, rl, node.castTag(.BoolAnd).?), - .BoolOr => return boolBinOp(mod, scope, rl, node.castTag(.BoolOr).?), + .bool_and => return boolBinOp(mod, scope, rl, node), + .bool_or => return boolBinOp(mod, scope, rl, node), - .BoolNot => return rvalue(mod, scope, rl, try boolNot(mod, scope, node.castTag(.BoolNot).?)), - .BitNot => return rvalue(mod, scope, rl, try bitNot(mod, scope, node.castTag(.BitNot).?)), - .Negation => return rvalue(mod, scope, rl, try negation(mod, scope, node.castTag(.Negation).?, .sub)), - .NegationWrap => return rvalue(mod, scope, rl, try negation(mod, scope, node.castTag(.NegationWrap).?, .subwrap)), + .bool_not => return rvalue(mod, scope, rl, try boolNot(mod, scope, node)), + .bit_not => return rvalue(mod, scope, rl, try bitNot(mod, scope, node)), + .negation => return rvalue(mod, scope, rl, try negation(mod, scope, node, .sub)), + .negation_wrap => return rvalue(mod, scope, rl, try negation(mod, scope, node, .subwrap)), - .Identifier => return try identifier(mod, scope, rl, node.castTag(.Identifier).?), - .Asm => return rvalue(mod, scope, rl, try assembly(mod, scope, node.castTag(.Asm).?)), - .StringLiteral => return rvalue(mod, scope, rl, try stringLiteral(mod, scope, node.castTag(.StringLiteral).?)), - .IntegerLiteral => return rvalue(mod, scope, rl, try integerLiteral(mod, scope, node.castTag(.IntegerLiteral).?)), - .BuiltinCall => return builtinCall(mod, scope, rl, node.castTag(.BuiltinCall).?), - .Call => return callExpr(mod, scope, rl, node.castTag(.Call).?), - .Unreachable => return unreach(mod, scope, node.castTag(.Unreachable).?), - .Return => return ret(mod, scope, node.castTag(.Return).?), - .If => return ifExpr(mod, scope, rl, node.castTag(.If).?), - .While => return whileExpr(mod, scope, rl, node.castTag(.While).?), - .Period => return field(mod, scope, rl, node.castTag(.Period).?), - .Deref => return rvalue(mod, scope, rl, try deref(mod, scope, node.castTag(.Deref).?)), - .AddressOf => return rvalue(mod, scope, rl, try addressOf(mod, scope, node.castTag(.AddressOf).?)), - .FloatLiteral => return rvalue(mod, scope, rl, try floatLiteral(mod, scope, node.castTag(.FloatLiteral).?)), - .UndefinedLiteral => return rvalue(mod, scope, rl, try undefLiteral(mod, scope, node.castTag(.UndefinedLiteral).?)), - .BoolLiteral => return rvalue(mod, scope, rl, try boolLiteral(mod, scope, node.castTag(.BoolLiteral).?)), - .NullLiteral => return rvalue(mod, scope, rl, try nullLiteral(mod, scope, node.castTag(.NullLiteral).?)), - .OptionalType => return rvalue(mod, scope, rl, try optionalType(mod, scope, node.castTag(.OptionalType).?)), - .UnwrapOptional => return unwrapOptional(mod, scope, rl, node.castTag(.UnwrapOptional).?), - .Block => return rvalueVoid(mod, scope, rl, node, try blockExpr(mod, scope, node.castTag(.Block).?)), - .LabeledBlock => return labeledBlockExpr(mod, scope, rl, node.castTag(.LabeledBlock).?, .block), - .Break => return rvalue(mod, scope, rl, try breakExpr(mod, scope, node.castTag(.Break).?)), - .Continue => return rvalue(mod, scope, rl, try continueExpr(mod, scope, node.castTag(.Continue).?)), - .PtrType => return rvalue(mod, scope, rl, try ptrType(mod, scope, node.castTag(.PtrType).?)), - .GroupedExpression => return expr(mod, scope, rl, node.castTag(.GroupedExpression).?.expr), - .ArrayType => return rvalue(mod, scope, rl, try arrayType(mod, scope, node.castTag(.ArrayType).?)), - .ArrayTypeSentinel => return rvalue(mod, scope, rl, try arrayTypeSentinel(mod, scope, node.castTag(.ArrayTypeSentinel).?)), - .EnumLiteral => return rvalue(mod, scope, rl, try enumLiteral(mod, scope, node.castTag(.EnumLiteral).?)), - .MultilineStringLiteral => return rvalue(mod, scope, rl, try multilineStrLiteral(mod, scope, node.castTag(.MultilineStringLiteral).?)), - .CharLiteral => return rvalue(mod, scope, rl, try charLiteral(mod, scope, node.castTag(.CharLiteral).?)), - .SliceType => return rvalue(mod, scope, rl, try sliceType(mod, scope, node.castTag(.SliceType).?)), - .ErrorUnion => return rvalue(mod, scope, rl, try typeInixOp(mod, scope, node.castTag(.ErrorUnion).?, .error_union_type)), - .MergeErrorSets => return rvalue(mod, scope, rl, try typeInixOp(mod, scope, node.castTag(.MergeErrorSets).?, .merge_error_sets)), - .AnyFrameType => return rvalue(mod, scope, rl, try anyFrameType(mod, scope, node.castTag(.AnyFrameType).?)), - .ErrorSetDecl => return rvalue(mod, scope, rl, try errorSetDecl(mod, scope, node.castTag(.ErrorSetDecl).?)), - .ErrorType => return rvalue(mod, scope, rl, try errorType(mod, scope, node.castTag(.ErrorType).?)), - .For => return forExpr(mod, scope, rl, node.castTag(.For).?), - .ArrayAccess => return arrayAccess(mod, scope, rl, node.castTag(.ArrayAccess).?), - .Slice => return rvalue(mod, scope, rl, try sliceExpr(mod, scope, node.castTag(.Slice).?)), - .Catch => return catchExpr(mod, scope, rl, node.castTag(.Catch).?), - .Comptime => return comptimeKeyword(mod, scope, rl, node.castTag(.Comptime).?), - .OrElse => return orelseExpr(mod, scope, rl, node.castTag(.OrElse).?), - .Switch => return switchExpr(mod, scope, rl, node.castTag(.Switch).?), - .ContainerDecl => return containerDecl(mod, scope, rl, node.castTag(.ContainerDecl).?), + .identifier => return try identifier(mod, scope, rl, node), + .@"asm" => return rvalue(mod, scope, rl, try assembly(mod, scope, node)), + .string_literal => return rvalue(mod, scope, rl, try stringLiteral(mod, scope, node)), + .integer_literal => return rvalue(mod, scope, rl, try integerLiteral(mod, scope, node)), + .builtin_call => return builtinCall(mod, scope, rl, node), + .call => return callExpr(mod, scope, rl, node), + .@"unreachable" => return unreach(mod, scope, node), + .@"return" => return ret(mod, scope, node), + .@"if" => return ifExpr(mod, scope, rl, node), + .@"while" => return whileExpr(mod, scope, rl, node), + .period => return field(mod, scope, rl, node), + .deref => return rvalue(mod, scope, rl, try deref(mod, scope, node)), + .address_of => return rvalue(mod, scope, rl, try addressOf(mod, scope, node)), + .float_literal => return rvalue(mod, scope, rl, try floatLiteral(mod, scope, node)), + .undefined_literal => return rvalue(mod, scope, rl, try undefLiteral(mod, scope, node)), + .bool_literal => return rvalue(mod, scope, rl, try boolLiteral(mod, scope, node)), + .null_literal => return rvalue(mod, scope, rl, try nullLiteral(mod, scope, node)), + .optional_type => return rvalue(mod, scope, rl, try optionalType(mod, scope, node)), + .unwrap_optional => return unwrapOptional(mod, scope, rl, node), + .block => return rvalueVoid(mod, scope, rl, node, try blockExpr(mod, scope, node)), + .labeled_block => return labeledBlockExpr(mod, scope, rl, node, .block), + .@"break" => return rvalue(mod, scope, rl, try breakExpr(mod, scope, node)), + .@"continue" => return rvalue(mod, scope, rl, try continueExpr(mod, scope, node)), + .grouped_expression => return expr(mod, scope, rl, node.expr), + .array_type => return rvalue(mod, scope, rl, try arrayType(mod, scope, node)), + .array_type_sentinel => return rvalue(mod, scope, rl, try arrayTypeSentinel(mod, scope, node)), + .enum_literal => return rvalue(mod, scope, rl, try enumLiteral(mod, scope, node)), + .MultilineStringLiteral => return rvalue(mod, scope, rl, try multilineStrLiteral(mod, scope, node)), + .char_literal => return rvalue(mod, scope, rl, try charLiteral(mod, scope, node)), + .slice_type => return rvalue(mod, scope, rl, try sliceType(mod, scope, node)), + .error_union => return rvalue(mod, scope, rl, try typeInixOp(mod, scope, node, .error_union_type)), + .merge_error_sets => return rvalue(mod, scope, rl, try typeInixOp(mod, scope, node, .merge_error_sets)), + .anyframe_type => return rvalue(mod, scope, rl, try anyFrameType(mod, scope, node)), + .error_set_decl => return rvalue(mod, scope, rl, try errorSetDecl(mod, scope, node)), + .error_type => return rvalue(mod, scope, rl, try errorType(mod, scope, node)), + .@"for" => return forExpr(mod, scope, rl, node), + .array_access => return arrayAccess(mod, scope, rl, node), + .slice => return rvalue(mod, scope, rl, try sliceExpr(mod, scope, node)), + .@"catch" => return catchExpr(mod, scope, rl, node), + .@"comptime" => return comptimeKeyword(mod, scope, rl, node), + .@"orelse" => return orelseExpr(mod, scope, rl, node), + .@"switch" => return switchExpr(mod, scope, rl, node), + .ContainerDecl => return containerDecl(mod, scope, rl, node), - .Defer => return mod.failNode(scope, node, "TODO implement astgen.expr for .Defer", .{}), - .Await => return mod.failNode(scope, node, "TODO implement astgen.expr for .Await", .{}), - .Resume => return mod.failNode(scope, node, "TODO implement astgen.expr for .Resume", .{}), - .Try => return mod.failNode(scope, node, "TODO implement astgen.expr for .Try", .{}), + .@"defer" => return mod.failNode(scope, node, "TODO implement astgen.expr for .defer", .{}), + .@"await" => return mod.failNode(scope, node, "TODO implement astgen.expr for .await", .{}), + .@"resume" => return mod.failNode(scope, node, "TODO implement astgen.expr for .resume", .{}), + .@"try" => return mod.failNode(scope, node, "TODO implement astgen.expr for .Try", .{}), .ArrayInitializer => return mod.failNode(scope, node, "TODO implement astgen.expr for .ArrayInitializer", .{}), .ArrayInitializerDot => return mod.failNode(scope, node, "TODO implement astgen.expr for .ArrayInitializerDot", .{}), .StructInitializer => return mod.failNode(scope, node, "TODO implement astgen.expr for .StructInitializer", .{}), .StructInitializerDot => return mod.failNode(scope, node, "TODO implement astgen.expr for .StructInitializerDot", .{}), - .Suspend => return mod.failNode(scope, node, "TODO implement astgen.expr for .Suspend", .{}), - .AnyType => return mod.failNode(scope, node, "TODO implement astgen.expr for .AnyType", .{}), + .@"suspend" => return mod.failNode(scope, node, "TODO implement astgen.expr for .suspend", .{}), + .@"anytype" => return mod.failNode(scope, node, "TODO implement astgen.expr for .anytype", .{}), .FnProto => return mod.failNode(scope, node, "TODO implement astgen.expr for .FnProto", .{}), - .Nosuspend => return mod.failNode(scope, node, "TODO implement astgen.expr for .Nosuspend", .{}), + .@"nosuspend" => return mod.failNode(scope, node, "TODO implement astgen.expr for .nosuspend", .{}), } } -fn comptimeKeyword(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Comptime) InnerError!*zir.Inst { +fn comptimeKeyword(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.@"comptime") InnerError!*zir.Inst { const tracy = trace(@src()); defer tracy.end(); @@ -338,7 +336,7 @@ pub fn comptimeExpr( mod: *Module, parent_scope: *Scope, rl: ResultLoc, - node: *ast.Node, + node: ast.Node.Index, ) InnerError!*zir.Inst { // If we are already in a comptime scope, no need to make another one. if (parent_scope.isComptime()) { @@ -347,7 +345,7 @@ pub fn comptimeExpr( // Optimization for labeled blocks: don't need to have 2 layers of blocks, // we can reuse the existing one. - if (node.castTag(.LabeledBlock)) |block_node| { + if (node.castTag(.labeled_block)) |block_node| { return labeledBlockExpr(mod, parent_scope, rl, block_node, .block_comptime); } @@ -366,6 +364,8 @@ pub fn comptimeExpr( _ = try expr(mod, &block_scope.base, rl, node); const tree = parent_scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const src = tree.token_locs[node.firstToken()].start; const block = try addZIRInstBlock(mod, parent_scope, src, .block_comptime_flat, .{ @@ -381,6 +381,8 @@ fn breakExpr( node: *ast.Node.ControlFlowExpression, ) InnerError!*zir.Inst { const tree = parent_scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const src = tree.token_locs[node.ltoken].start; // Look for the label in the scope. @@ -445,6 +447,8 @@ fn breakExpr( fn continueExpr(mod: *Module, parent_scope: *Scope, node: *ast.Node.ControlFlowExpression) InnerError!*zir.Inst { const tree = parent_scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const src = tree.token_locs[node.ltoken].start; // Look for the label in the scope. @@ -485,7 +489,7 @@ fn continueExpr(mod: *Module, parent_scope: *Scope, node: *ast.Node.ControlFlowE } } -pub fn blockExpr(mod: *Module, parent_scope: *Scope, block_node: *ast.Node.Block) InnerError!void { +pub fn blockExpr(mod: *Module, parent_scope: *Scope, block_node: *ast.Node.block) InnerError!void { const tracy = trace(@src()); defer tracy.end(); @@ -502,6 +506,8 @@ fn checkLabelRedefinition(mod: *Module, parent_scope: *Scope, label: ast.TokenIn if (gen_zir.label) |prev_label| { if (try tokenIdentEql(mod, parent_scope, label, prev_label.token)) { const tree = parent_scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const label_src = tree.token_locs[label].start; const prev_label_src = tree.token_locs[prev_label.token].start; @@ -539,7 +545,7 @@ fn labeledBlockExpr( mod: *Module, parent_scope: *Scope, rl: ResultLoc, - block_node: *ast.Node.LabeledBlock, + block_node: *ast.Node.labeled_block, zir_tag: zir.Inst.Tag, ) InnerError!*zir.Inst { const tracy = trace(@src()); @@ -548,6 +554,8 @@ fn labeledBlockExpr( assert(zir_tag == .block or zir_tag == .block_comptime); const tree = parent_scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const src = tree.token_locs[block_node.lbrace].start; try checkLabelRedefinition(mod, parent_scope, block_node.label); @@ -627,10 +635,12 @@ fn labeledBlockExpr( fn blockExprStmts( mod: *Module, parent_scope: *Scope, - node: *ast.Node, - statements: []*ast.Node, + node: ast.Node.Index, + statements: []const ast.Node.Index, ) !void { const tree = parent_scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); var block_arena = std.heap.ArenaAllocator.init(mod.gpa); defer block_arena.deinit(); @@ -640,24 +650,24 @@ fn blockExprStmts( const src = tree.token_locs[statement.firstToken()].start; _ = try addZIRNoOp(mod, scope, src, .dbg_stmt); switch (statement.tag) { - .VarDecl => { - const var_decl_node = statement.castTag(.VarDecl).?; + .var_decl => { + const var_decl_node = statement.castTag(.var_decl).?; scope = try varDecl(mod, scope, var_decl_node, &block_arena.allocator); }, - .Assign => try assign(mod, scope, statement.castTag(.Assign).?), - .AssignBitAnd => try assignOp(mod, scope, statement.castTag(.AssignBitAnd).?, .bit_and), - .AssignBitOr => try assignOp(mod, scope, statement.castTag(.AssignBitOr).?, .bit_or), - .AssignBitShiftLeft => try assignOp(mod, scope, statement.castTag(.AssignBitShiftLeft).?, .shl), - .AssignBitShiftRight => try assignOp(mod, scope, statement.castTag(.AssignBitShiftRight).?, .shr), - .AssignBitXor => try assignOp(mod, scope, statement.castTag(.AssignBitXor).?, .xor), - .AssignDiv => try assignOp(mod, scope, statement.castTag(.AssignDiv).?, .div), - .AssignSub => try assignOp(mod, scope, statement.castTag(.AssignSub).?, .sub), - .AssignSubWrap => try assignOp(mod, scope, statement.castTag(.AssignSubWrap).?, .subwrap), - .AssignMod => try assignOp(mod, scope, statement.castTag(.AssignMod).?, .mod_rem), - .AssignAdd => try assignOp(mod, scope, statement.castTag(.AssignAdd).?, .add), - .AssignAddWrap => try assignOp(mod, scope, statement.castTag(.AssignAddWrap).?, .addwrap), - .AssignMul => try assignOp(mod, scope, statement.castTag(.AssignMul).?, .mul), - .AssignMulWrap => try assignOp(mod, scope, statement.castTag(.AssignMulWrap).?, .mulwrap), + .assign => try assign(mod, scope, statement), + .assign_bit_and => try assignOp(mod, scope, statement, .bit_and), + .assign_bit_or => try assignOp(mod, scope, statement, .bit_or), + .assign_bit_shift_left => try assignOp(mod, scope, statement, .shl), + .assign_bit_shift_right => try assignOp(mod, scope, statement, .shr), + .assign_bit_xor => try assignOp(mod, scope, statement, .xor), + .assign_div => try assignOp(mod, scope, statement, .div), + .assign_sub => try assignOp(mod, scope, statement, .sub), + .assign_sub_wrap => try assignOp(mod, scope, statement, .subwrap), + .assign_mod => try assignOp(mod, scope, statement, .mod_rem), + .assign_add => try assignOp(mod, scope, statement, .add), + .assign_add_wrap => try assignOp(mod, scope, statement, .addwrap), + .assign_mul => try assignOp(mod, scope, statement, .mul), + .assign_mul_wrap => try assignOp(mod, scope, statement, .mulwrap), else => { const possibly_unused_result = try expr(mod, scope, .none, statement); @@ -672,7 +682,7 @@ fn blockExprStmts( fn varDecl( mod: *Module, scope: *Scope, - node: *ast.Node.VarDecl, + node: *ast.Node.var_decl, block_arena: *Allocator, ) InnerError!*Scope { if (node.getComptimeToken()) |comptime_token| { @@ -682,6 +692,8 @@ fn varDecl( return mod.failNode(scope, align_node, "TODO implement alignment on locals", .{}); } const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const name_src = tree.token_locs[node.name_token].start; const ident_name = try mod.identifierTokenString(scope, node.name_token); @@ -733,7 +745,7 @@ fn varDecl( return mod.fail(scope, name_src, "variables must be initialized", .{}); switch (tree.token_ids[node.mut_token]) { - .Keyword_const => { + .keyword_const => { // Depending on the type of AST the initialization expression is, we may need an lvalue // or an rvalue as a result location. If it is an rvalue, we can use the instruction as // the variable, no memory location needed. @@ -834,7 +846,7 @@ fn varDecl( }; return &sub_scope.base; }, - .Keyword_var => { + .keyword_var => { var resolve_inferred_alloc: ?*zir.Inst = null; const var_data: struct { result_loc: ResultLoc, alloc: *zir.Inst } = if (node.getTypeNode()) |type_node| a: { const type_inst = try typeExpr(mod, scope, type_node); @@ -862,33 +874,39 @@ fn varDecl( } } -fn assign(mod: *Module, scope: *Scope, infix_node: *ast.Node.SimpleInfixOp) InnerError!void { - if (infix_node.lhs.castTag(.Identifier)) |ident| { - // This intentionally does not support @"_" syntax. - const ident_name = scope.tree().tokenSlice(ident.token); +fn assign(mod: *Module, scope: *Scope, infix_node: ast.Node.Index) InnerError!void { + const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); + const lhs = node_datas[infix_node].lhs; + const rhs = node_datas[infix_node].rhs; + if (node_tags[lhs] == .identifier) { + // This intentionally does not support `@"_"` syntax. + const ident_name = tree.tokenSlice(main_tokens[lhs]); if (mem.eql(u8, ident_name, "_")) { _ = try expr(mod, scope, .discard, infix_node.rhs); return; } } - const lvalue = try lvalExpr(mod, scope, infix_node.lhs); - _ = try expr(mod, scope, .{ .ptr = lvalue }, infix_node.rhs); + const lvalue = try lvalExpr(mod, scope, lhs); + _ = try expr(mod, scope, .{ .ptr = lvalue }, rhs); } fn assignOp( mod: *Module, scope: *Scope, - infix_node: *ast.Node.SimpleInfixOp, + infix_node: ast.Node.Index, op_inst_tag: zir.Inst.Tag, ) InnerError!void { - const lhs_ptr = try lvalExpr(mod, scope, infix_node.lhs); + const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); + + const lhs_ptr = try lvalExpr(mod, scope, node_datas[infix_node].lhs); const lhs = try addZIRUnOp(mod, scope, lhs_ptr.src, .deref, lhs_ptr); const lhs_type = try addZIRUnOp(mod, scope, lhs_ptr.src, .typeof, lhs); - const rhs = try expr(mod, scope, .{ .ty = lhs_type }, infix_node.rhs); - - const tree = scope.tree(); - const src = tree.token_locs[infix_node.op_token].start; - + const rhs = try expr(mod, scope, .{ .ty = lhs_type }, node_datas[infix_node].rhs); + const src = token_starts[main_tokens[infix_node]]; const result = try addZIRBinOp(mod, scope, src, op_inst_tag, lhs, rhs); _ = try addZIRBinOp(mod, scope, src, .store, lhs_ptr, result); } @@ -935,7 +953,7 @@ fn optionalType(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) Inn return addZIRUnOp(mod, scope, src, .optional_type, operand); } -fn sliceType(mod: *Module, scope: *Scope, node: *ast.Node.SliceType) InnerError!*zir.Inst { +fn sliceType(mod: *Module, scope: *Scope, node: *ast.Node.slice_type) InnerError!*zir.Inst { const tree = scope.tree(); const src = tree.token_locs[node.op_token].start; return ptrSliceType(mod, scope, src, &node.ptr_info, node.rhs, .Slice); @@ -948,7 +966,7 @@ fn ptrType(mod: *Module, scope: *Scope, node: *ast.Node.PtrType) InnerError!*zir .Asterisk, .AsteriskAsterisk => .One, // TODO stage1 type inference bug .LBracket => @as(std.builtin.TypeInfo.Pointer.Size, switch (tree.token_ids[node.op_token + 2]) { - .Identifier => .C, + .identifier => .C, else => .Many, }), else => unreachable, @@ -998,7 +1016,7 @@ fn ptrSliceType(mod: *Module, scope: *Scope, src: usize, ptr_info: *ast.PtrInfo, return addZIRInst(mod, scope, src, zir.Inst.PtrType, .{ .child_type = child_type }, kw_args); } -fn arrayType(mod: *Module, scope: *Scope, node: *ast.Node.ArrayType) !*zir.Inst { +fn arrayType(mod: *Module, scope: *Scope, node: *ast.Node.array_type) !*zir.Inst { const tree = scope.tree(); const src = tree.token_locs[node.op_token].start; const usize_type = try addZIRInstConst(mod, scope, src, .{ @@ -1013,7 +1031,7 @@ fn arrayType(mod: *Module, scope: *Scope, node: *ast.Node.ArrayType) !*zir.Inst return addZIRBinOp(mod, scope, src, .array_type, len, elem_type); } -fn arrayTypeSentinel(mod: *Module, scope: *Scope, node: *ast.Node.ArrayTypeSentinel) !*zir.Inst { +fn arrayTypeSentinel(mod: *Module, scope: *Scope, node: *ast.Node.array_type_sentinel) !*zir.Inst { const tree = scope.tree(); const src = tree.token_locs[node.op_token].start; const usize_type = try addZIRInstConst(mod, scope, src, .{ @@ -1034,7 +1052,7 @@ fn arrayTypeSentinel(mod: *Module, scope: *Scope, node: *ast.Node.ArrayTypeSenti }, .{}); } -fn anyFrameType(mod: *Module, scope: *Scope, node: *ast.Node.AnyFrameType) InnerError!*zir.Inst { +fn anyFrameType(mod: *Module, scope: *Scope, node: *ast.Node.anyframe_type) InnerError!*zir.Inst { const tree = scope.tree(); const src = tree.token_locs[node.anyframe_token].start; if (node.result) |some| { @@ -1056,7 +1074,7 @@ fn typeInixOp(mod: *Module, scope: *Scope, node: *ast.Node.SimpleInfixOp, op_ins return addZIRBinOp(mod, scope, src, op_inst_tag, error_set, payload); } -fn enumLiteral(mod: *Module, scope: *Scope, node: *ast.Node.EnumLiteral) !*zir.Inst { +fn enumLiteral(mod: *Module, scope: *Scope, node: *ast.Node.enum_literal) !*zir.Inst { const tree = scope.tree(); const src = tree.token_locs[node.name].start; const name = try mod.identifierTokenString(scope, node.name); @@ -1141,13 +1159,13 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con var layout: std.builtin.TypeInfo.ContainerLayout = .Auto; if (node.layout_token) |some| switch (tree.token_ids[some]) { - .Keyword_extern => layout = .Extern, - .Keyword_packed => layout = .Packed, + .keyword_extern => layout = .Extern, + .keyword_packed => layout = .Packed, else => unreachable, }; const container_type = switch (tree.token_ids[node.kind_token]) { - .Keyword_enum => blk: { + .keyword_enum => blk: { const tag_type: ?*zir.Inst = switch (node.init_arg_expr) { .Type => |t| try typeExpr(mod, &gen_scope.base, t), .None => null, @@ -1174,7 +1192,7 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con }; break :blk Type.initPayload(&enum_type.base); }, - .Keyword_struct => blk: { + .keyword_struct => blk: { assert(node.init_arg_expr == .None); const inst = try addZIRInst(mod, &gen_scope.base, src, zir.Inst.StructType, .{ .fields = try arena.dupe(*zir.Inst, fields.items), @@ -1196,7 +1214,7 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con }; break :blk Type.initPayload(&struct_type.base); }, - .Keyword_union => blk: { + .keyword_union => blk: { const init_inst = switch (node.init_arg_expr) { .Enum => |e| if (e) |t| try typeExpr(mod, &gen_scope.base, t) else null, .None => null, @@ -1229,7 +1247,7 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con }; break :blk Type.initPayload(&union_type.base); }, - .Keyword_opaque => blk: { + .keyword_opaque => blk: { if (fields.items.len > 0) { return mod.fail(scope, fields.items[0].src, "opaque types cannot have fields", .{}); } @@ -1258,7 +1276,7 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con } } -fn errorSetDecl(mod: *Module, scope: *Scope, node: *ast.Node.ErrorSetDecl) InnerError!*zir.Inst { +fn errorSetDecl(mod: *Module, scope: *Scope, node: *ast.Node.error_set_decl) InnerError!*zir.Inst { const tree = scope.tree(); const src = tree.token_locs[node.error_token].start; const decls = node.decls(); @@ -1281,7 +1299,7 @@ fn errorType(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!* }); } -fn catchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Catch) InnerError!*zir.Inst { +fn catchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.@"catch") InnerError!*zir.Inst { switch (rl) { .ref => return orelseCatchExpr( mod, @@ -1528,7 +1546,7 @@ pub fn field(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.SimpleI const tree = scope.tree(); const src = tree.token_locs[node.op_token].start; // TODO custom AST node for field access so that we don't have to go through a node cast here - const field_name = try mod.identifierTokenString(scope, node.rhs.castTag(.Identifier).?.token); + const field_name = try mod.identifierTokenString(scope, node.rhs.castTag(.identifier).?.token); if (rl == .ref) { return addZirInstTag(mod, scope, src, .field_ptr, .{ .object = try expr(mod, scope, .ref, node.lhs), @@ -1545,7 +1563,7 @@ fn namedField( mod: *Module, scope: *Scope, rl: ResultLoc, - call: *ast.Node.BuiltinCall, + call: *ast.Node.builtin_call, ) InnerError!*zir.Inst { try ensureBuiltinParamCount(mod, scope, call, 2); @@ -1571,7 +1589,7 @@ fn namedField( })); } -fn arrayAccess(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.ArrayAccess) InnerError!*zir.Inst { +fn arrayAccess(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.array_access) InnerError!*zir.Inst { const tree = scope.tree(); const src = tree.token_locs[node.rtoken].start; const usize_type = try addZIRInstConst(mod, scope, src, .{ @@ -1592,7 +1610,7 @@ fn arrayAccess(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Array })); } -fn sliceExpr(mod: *Module, scope: *Scope, node: *ast.Node.Slice) InnerError!*zir.Inst { +fn sliceExpr(mod: *Module, scope: *Scope, node: *ast.Node.slice) InnerError!*zir.Inst { const tree = scope.tree(); const src = tree.token_locs[node.rtoken].start; @@ -1633,15 +1651,16 @@ fn simpleBinOp( mod: *Module, scope: *Scope, rl: ResultLoc, - infix_node: *ast.Node.SimpleInfixOp, + infix_node: ast.Node.Index, op_inst_tag: zir.Inst.Tag, ) InnerError!*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[infix_node.op_token].start; - - const lhs = try expr(mod, scope, .none, infix_node.lhs); - const rhs = try expr(mod, scope, .none, infix_node.rhs); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); + const lhs = try expr(mod, scope, .none, node_datas[infix_node].lhs); + const rhs = try expr(mod, scope, .none, node_datas[infix_node].rhs); + const src = token_starts[main_tokens[infix_node]]; const result = try addZIRBinOp(mod, scope, src, op_inst_tag, lhs, rhs); return rvalue(mod, scope, rl, result); } @@ -1653,6 +1672,9 @@ fn boolBinOp( infix_node: *ast.Node.SimpleInfixOp, ) InnerError!*zir.Inst { const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); + const src = tree.token_locs[infix_node.op_token].start; const bool_type = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.type), @@ -1703,7 +1725,7 @@ fn boolBinOp( }; defer const_scope.instructions.deinit(mod.gpa); - const is_bool_and = infix_node.base.tag == .BoolAnd; + const is_bool_and = infix_node.base.tag == .bool_and; _ = try addZIRInst(mod, &const_scope.base, src, zir.Inst.Break, .{ .block = block, .operand = try addZIRInstConst(mod, &const_scope.base, src, .{ @@ -1769,7 +1791,7 @@ const CondKind = union(enum) { return &then_scope.base; }; const is_ptr = payload.ptr_token != null; - const ident_node = payload.value_symbol.castTag(.Identifier).?; + const ident_node = payload.value_symbol.castTag(.identifier).?; // This intentionally does not support @"_" syntax. const ident_name = then_scope.base.tree().tokenSlice(ident_node.token); @@ -1788,7 +1810,7 @@ const CondKind = union(enum) { const payload_ptr = try addZIRUnOp(mod, &else_scope.base, src, .err_union_payload_unsafe_ptr, self.err_union.?); const payload = payload_node.?.castTag(.Payload).?; - const ident_node = payload.error_symbol.castTag(.Identifier).?; + const ident_node = payload.error_symbol.castTag(.identifier).?; // This intentionally does not support @"_" syntax. const ident_name = else_scope.base.tree().tokenSlice(ident_node.token); @@ -1800,7 +1822,7 @@ const CondKind = union(enum) { } }; -fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) InnerError!*zir.Inst { +fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.@"if") InnerError!*zir.Inst { var cond_kind: CondKind = .bool; if (if_node.payload) |_| cond_kind = .{ .optional = null }; if (if_node.@"else") |else_node| { @@ -1819,6 +1841,8 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn defer block_scope.instructions.deinit(mod.gpa); const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const if_src = tree.token_locs[if_node.if_token].start; const cond = try cond_kind.cond(mod, &block_scope, if_src, if_node.condition); @@ -1918,7 +1942,7 @@ fn whileExpr( mod: *Module, scope: *Scope, rl: ResultLoc, - while_node: *ast.Node.While, + while_node: *ast.Node.@"while", ) InnerError!*zir.Inst { var cond_kind: CondKind = .bool; if (while_node.payload) |_| cond_kind = .{ .optional = null }; @@ -1955,6 +1979,8 @@ fn whileExpr( defer continue_scope.instructions.deinit(mod.gpa); const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const while_src = tree.token_locs[while_node.while_token].start; const void_type = try addZIRInstConst(mod, scope, while_src, .{ .ty = Type.initTag(.type), @@ -2066,7 +2092,7 @@ fn forExpr( mod: *Module, scope: *Scope, rl: ResultLoc, - for_node: *ast.Node.For, + for_node: *ast.Node.@"for", ) InnerError!*zir.Inst { if (for_node.label) |label| { try checkLabelRedefinition(mod, scope, label); @@ -2077,6 +2103,8 @@ fn forExpr( // setup variables and constants const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const for_src = tree.token_locs[for_node.for_token].start; const index_ptr = blk: { const usize_type = try addZIRInstConst(mod, scope, for_src, .{ @@ -2246,9 +2274,9 @@ fn forExpr( ); } -fn switchCaseUsesRef(node: *ast.Node.Switch) bool { +fn switchCaseUsesRef(node: *ast.Node.@"switch") bool { for (node.cases()) |uncasted_case| { - const case = uncasted_case.castTag(.SwitchCase).?; + const case = uncasted_case.castTag(.switch_case).?; const uncasted_payload = case.payload orelse continue; const payload = uncasted_payload.castTag(.PointerPayload).?; if (payload.ptr_token) |_| return true; @@ -2260,15 +2288,17 @@ fn getRangeNode(node: *ast.Node) ?*ast.Node.SimpleInfixOp { var cur = node; while (true) { switch (cur.tag) { - .Range => return @fieldParentPtr(ast.Node.SimpleInfixOp, "base", cur), - .GroupedExpression => cur = @fieldParentPtr(ast.Node.GroupedExpression, "base", cur).expr, + .range => return @fieldParentPtr(ast.Node.SimpleInfixOp, "base", cur), + .grouped_expression => cur = @fieldParentPtr(ast.Node.grouped_expression, "base", cur).expr, else => return null, } } } -fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node.Switch) InnerError!*zir.Inst { +fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node.@"switch") InnerError!*zir.Inst { const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const switch_src = tree.token_locs[switch_node.switch_token].start; const use_ref = switchCaseUsesRef(switch_node); @@ -2291,12 +2321,12 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node var first_range: ?*zir.Inst = null; var simple_case_count: usize = 0; for (switch_node.cases()) |uncasted_case| { - const case = uncasted_case.castTag(.SwitchCase).?; + const case = uncasted_case.castTag(.switch_case).?; const case_src = tree.token_locs[case.firstToken()].start; assert(case.items_len != 0); // Check for else/_ prong, those are handled last. - if (case.items_len == 1 and case.items()[0].tag == .SwitchElse) { + if (case.items_len == 1 and case.items()[0].tag == .switch_else) { if (else_src) |src| { const msg = msg: { const msg = try mod.errMsg( @@ -2313,7 +2343,7 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node } else_src = case_src; continue; - } else if (case.items_len == 1 and case.items()[0].tag == .Identifier and + } else if (case.items_len == 1 and case.items()[0].tag == .identifier and mem.eql(u8, tree.tokenSlice(case.items()[0].firstToken()), "_")) { if (underscore_src) |src| { @@ -2412,20 +2442,20 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node defer else_scope.instructions.deinit(mod.gpa); // Now generate all but the special cases - var special_case: ?*ast.Node.SwitchCase = null; + var special_case: ?*ast.Node.switch_case = null; var items_index: usize = 0; var case_index: usize = 0; for (switch_node.cases()) |uncasted_case| { - const case = uncasted_case.castTag(.SwitchCase).?; + const case = uncasted_case.castTag(.switch_case).?; const case_src = tree.token_locs[case.firstToken()].start; // reset without freeing to reduce allocations. case_scope.instructions.items.len = 0; // Check for else/_ prong, those are handled last. - if (case.items_len == 1 and case.items()[0].tag == .SwitchElse) { + if (case.items_len == 1 and case.items()[0].tag == .switch_else) { special_case = case; continue; - } else if (case.items_len == 1 and case.items()[0].tag == .Identifier and + } else if (case.items_len == 1 and case.items()[0].tag == .identifier and mem.eql(u8, tree.tokenSlice(case.items()[0].firstToken()), "_")) { special_case = case; @@ -2528,11 +2558,13 @@ fn switchCaseExpr( scope: *Scope, rl: ResultLoc, block: *zir.Inst.Block, - case: *ast.Node.SwitchCase, + case: *ast.Node.switch_case, target: *zir.Inst, target_ptr: ?*zir.Inst, ) !void { const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const case_src = tree.token_locs[case.firstToken()].start; const sub_scope = blk: { const uncasted_payload = case.payload orelse break :blk scope; @@ -2559,6 +2591,8 @@ fn switchCaseExpr( fn ret(mod: *Module, scope: *Scope, cfe: *ast.Node.ControlFlowExpression) InnerError!*zir.Inst { const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const src = tree.token_locs[cfe.ltoken].start; if (cfe.getRHS()) |rhs_node| { if (nodeMayNeedMemoryLocation(rhs_node, scope)) { @@ -2580,6 +2614,8 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo defer tracy.end(); const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const ident_name = try mod.identifierTokenString(scope, ident.token); const src = tree.token_locs[ident.token].start; if (mem.eql(u8, ident_name, "_")) { @@ -2667,6 +2703,8 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo fn stringLiteral(mod: *Module, scope: *Scope, str_lit: *ast.Node.OneToken) InnerError!*zir.Inst { const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const unparsed_bytes = tree.tokenSlice(str_lit.token); const arena = scope.arena(); @@ -2686,6 +2724,8 @@ fn stringLiteral(mod: *Module, scope: *Scope, str_lit: *ast.Node.OneToken) Inner fn multilineStrLiteral(mod: *Module, scope: *Scope, node: *ast.Node.MultilineStringLiteral) !*zir.Inst { const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const lines = node.linesConst(); const src = tree.token_locs[lines[0]].start; @@ -2713,6 +2753,8 @@ fn multilineStrLiteral(mod: *Module, scope: *Scope, node: *ast.Node.MultilineStr fn charLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) !*zir.Inst { const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const src = tree.token_locs[node.token].start; const slice = tree.tokenSlice(node.token); @@ -2733,6 +2775,8 @@ fn charLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) !*zir.Inst fn integerLiteral(mod: *Module, scope: *Scope, int_lit: *ast.Node.OneToken) InnerError!*zir.Inst { const arena = scope.arena(); const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const prefixed_bytes = tree.tokenSlice(int_lit.token); const base = if (mem.startsWith(u8, prefixed_bytes, "0x")) 16 @@ -2762,6 +2806,8 @@ fn integerLiteral(mod: *Module, scope: *Scope, int_lit: *ast.Node.OneToken) Inne fn floatLiteral(mod: *Module, scope: *Scope, float_lit: *ast.Node.OneToken) InnerError!*zir.Inst { const arena = scope.arena(); const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const bytes = tree.tokenSlice(float_lit.token); if (bytes.len > 2 and bytes[1] == 'x') { return mod.failTok(scope, float_lit.token, "TODO hex floats", .{}); @@ -2780,6 +2826,8 @@ fn floatLiteral(mod: *Module, scope: *Scope, float_lit: *ast.Node.OneToken) Inne fn undefLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!*zir.Inst { const arena = scope.arena(); const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const src = tree.token_locs[node.token].start; return addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.@"undefined"), @@ -2790,12 +2838,14 @@ fn undefLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerErro fn boolLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!*zir.Inst { const arena = scope.arena(); const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const src = tree.token_locs[node.token].start; return addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.bool), .val = switch (tree.token_ids[node.token]) { - .Keyword_true => Value.initTag(.bool_true), - .Keyword_false => Value.initTag(.bool_false), + .keyword_true => Value.initTag(.bool_true), + .keyword_false => Value.initTag(.bool_false), else => unreachable, }, }); @@ -2804,6 +2854,8 @@ fn boolLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError fn nullLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!*zir.Inst { const arena = scope.arena(); const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const src = tree.token_locs[node.token].start; return addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.@"null"), @@ -2811,12 +2863,14 @@ fn nullLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError }); } -fn assembly(mod: *Module, scope: *Scope, asm_node: *ast.Node.Asm) InnerError!*zir.Inst { +fn assembly(mod: *Module, scope: *Scope, asm_node: *ast.Node.@"asm") InnerError!*zir.Inst { if (asm_node.outputs.len != 0) { return mod.failNode(scope, &asm_node.base, "TODO implement asm with an output", .{}); } const arena = scope.arena(); const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const inputs = try arena.alloc(*zir.Inst, asm_node.inputs.len); const args = try arena.alloc(*zir.Inst, asm_node.inputs.len); @@ -2839,7 +2893,7 @@ fn assembly(mod: *Module, scope: *Scope, asm_node: *ast.Node.Asm) InnerError!*zi .ty = Type.initTag(.type), .val = Value.initTag(.void_type), }); - const asm_inst = try addZIRInst(mod, scope, src, zir.Inst.Asm, .{ + const asm_inst = try addZIRInst(mod, scope, src, zir.Inst.@"asm", .{ .asm_source = try expr(mod, scope, str_type_rl, asm_node.template), .return_type = return_type, }, .{ @@ -2851,7 +2905,7 @@ fn assembly(mod: *Module, scope: *Scope, asm_node: *ast.Node.Asm) InnerError!*zi return asm_inst; } -fn ensureBuiltinParamCount(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall, count: u32) !void { +fn ensureBuiltinParamCount(mod: *Module, scope: *Scope, call: *ast.Node.builtin_call, count: u32) !void { if (call.params_len == count) return; @@ -2863,11 +2917,13 @@ fn simpleCast( mod: *Module, scope: *Scope, rl: ResultLoc, - call: *ast.Node.BuiltinCall, + call: *ast.Node.builtin_call, inst_tag: zir.Inst.Tag, ) InnerError!*zir.Inst { try ensureBuiltinParamCount(mod, scope, call, 2); const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const src = tree.token_locs[call.builtin_token].start; const params = call.params(); const dest_type = try typeExpr(mod, scope, params[0]); @@ -2876,10 +2932,12 @@ fn simpleCast( return rvalue(mod, scope, rl, result); } -fn ptrToInt(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst { +fn ptrToInt(mod: *Module, scope: *Scope, call: *ast.Node.builtin_call) InnerError!*zir.Inst { try ensureBuiltinParamCount(mod, scope, call, 1); const operand = try expr(mod, scope, .none, call.params()[0]); const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const src = tree.token_locs[call.builtin_token].start; return addZIRUnOp(mod, scope, src, .ptrtoint, operand); } @@ -2888,10 +2946,12 @@ fn as( mod: *Module, scope: *Scope, rl: ResultLoc, - call: *ast.Node.BuiltinCall, + call: *ast.Node.builtin_call, ) InnerError!*zir.Inst { try ensureBuiltinParamCount(mod, scope, call, 2); const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const src = tree.token_locs[call.builtin_token].start; const params = call.params(); const dest_type = try typeExpr(mod, scope, params[0]); @@ -2963,9 +3023,11 @@ fn asRlPtr( } } -fn bitCast(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst { +fn bitCast(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.builtin_call) InnerError!*zir.Inst { try ensureBuiltinParamCount(mod, scope, call, 2); const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const src = tree.token_locs[call.builtin_token].start; const params = call.params(); const dest_type = try typeExpr(mod, scope, params[0]); @@ -3007,27 +3069,33 @@ fn bitCast(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCa } } -fn import(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst { +fn import(mod: *Module, scope: *Scope, call: *ast.Node.builtin_call) InnerError!*zir.Inst { try ensureBuiltinParamCount(mod, scope, call, 1); const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const src = tree.token_locs[call.builtin_token].start; const params = call.params(); const target = try expr(mod, scope, .none, params[0]); return addZIRUnOp(mod, scope, src, .import, target); } -fn compileError(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst { +fn compileError(mod: *Module, scope: *Scope, call: *ast.Node.builtin_call) InnerError!*zir.Inst { try ensureBuiltinParamCount(mod, scope, call, 1); const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const src = tree.token_locs[call.builtin_token].start; const params = call.params(); const target = try expr(mod, scope, .none, params[0]); return addZIRUnOp(mod, scope, src, .compile_error, target); } -fn setEvalBranchQuota(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst { +fn setEvalBranchQuota(mod: *Module, scope: *Scope, call: *ast.Node.builtin_call) InnerError!*zir.Inst { try ensureBuiltinParamCount(mod, scope, call, 1); const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const src = tree.token_locs[call.builtin_token].start; const params = call.params(); const u32_type = try addZIRInstConst(mod, scope, src, .{ @@ -3038,8 +3106,10 @@ fn setEvalBranchQuota(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) return addZIRUnOp(mod, scope, src, .set_eval_branch_quota, quota); } -fn typeOf(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst { +fn typeOf(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.builtin_call) InnerError!*zir.Inst { const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const arena = scope.arena(); const src = tree.token_locs[call.builtin_token].start; const params = call.params(); @@ -3054,8 +3124,10 @@ fn typeOf(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCal items[param_i] = try expr(mod, scope, .none, param); return rvalue(mod, scope, rl, try addZIRInst(mod, scope, src, zir.Inst.TypeOfPeer, .{ .items = items }, .{})); } -fn compileLog(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst { +fn compileLog(mod: *Module, scope: *Scope, call: *ast.Node.builtin_call) InnerError!*zir.Inst { const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const arena = scope.arena(); const src = tree.token_locs[call.builtin_token].start; const params = call.params(); @@ -3065,8 +3137,10 @@ fn compileLog(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerErr return addZIRInst(mod, scope, src, zir.Inst.CompileLog, .{ .to_log = targets }, .{}); } -fn builtinCall(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst { +fn builtinCall(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.builtin_call) InnerError!*zir.Inst { const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const builtin_name = tree.tokenSlice(call.builtin_token); // We handle the different builtins manually because they have different semantics depending @@ -3104,8 +3178,10 @@ fn builtinCall(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.Built } } -fn callExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Call) InnerError!*zir.Inst { +fn callExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.call) InnerError!*zir.Inst { const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const lhs = try expr(mod, scope, .none, node.lhs); const param_nodes = node.params(); @@ -3130,6 +3206,8 @@ fn callExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Call) In fn unreach(mod: *Module, scope: *Scope, unreach_node: *ast.Node.OneToken) InnerError!*zir.Inst { const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); const src = tree.token_locs[unreach_node.token].start; return addZIRNoOp(mod, scope, src, .unreachable_safe); } @@ -3176,11 +3254,11 @@ fn nodeMayNeedMemoryLocation(start_node: *ast.Node, scope: *Scope) bool { while (true) { switch (node.tag) { .Root, - .Use, - .TestDecl, - .DocComment, - .SwitchCase, - .SwitchElse, + .@"usingnamespace", + .test_decl, + .doc_comment, + .switch_case, + .switch_else, .Else, .Payload, .PointerPayload, @@ -3190,97 +3268,97 @@ fn nodeMayNeedMemoryLocation(start_node: *ast.Node, scope: *Scope) bool { .FieldInitializer, => unreachable, - .Return, - .Break, - .Continue, - .BitNot, - .BoolNot, - .VarDecl, - .Defer, - .AddressOf, - .OptionalType, - .Negation, - .NegationWrap, - .Resume, - .ArrayType, - .ArrayTypeSentinel, + .@"return", + .@"break", + .@"continue", + .bit_not, + .bool_not, + .var_decl, + .@"defer", + .address_of, + .optional_type, + .negation, + .negation_wrap, + .@"resume", + .array_type, + .array_type_sentinel, .PtrType, - .SliceType, - .Suspend, - .AnyType, - .ErrorType, + .slice_type, + .@"suspend", + .@"anytype", + .error_type, .FnProto, - .AnyFrameType, - .IntegerLiteral, - .FloatLiteral, - .EnumLiteral, - .StringLiteral, + .anyframe_type, + .integer_literal, + .float_literal, + .enum_literal, + .string_literal, .MultilineStringLiteral, - .CharLiteral, - .BoolLiteral, - .NullLiteral, - .UndefinedLiteral, - .Unreachable, - .Identifier, - .ErrorSetDecl, + .char_literal, + .bool_literal, + .null_literal, + .undefined_literal, + .@"unreachable", + .identifier, + .error_set_decl, .ContainerDecl, - .Asm, - .Add, - .AddWrap, - .ArrayCat, - .ArrayMult, - .Assign, - .AssignBitAnd, - .AssignBitOr, - .AssignBitShiftLeft, - .AssignBitShiftRight, - .AssignBitXor, - .AssignDiv, - .AssignSub, - .AssignSubWrap, - .AssignMod, - .AssignAdd, - .AssignAddWrap, - .AssignMul, - .AssignMulWrap, - .BangEqual, - .BitAnd, - .BitOr, - .BitShiftLeft, - .BitShiftRight, - .BitXor, - .BoolAnd, - .BoolOr, - .Div, - .EqualEqual, - .ErrorUnion, - .GreaterOrEqual, - .GreaterThan, - .LessOrEqual, - .LessThan, - .MergeErrorSets, - .Mod, - .Mul, - .MulWrap, - .Range, - .Period, - .Sub, - .SubWrap, - .Slice, - .Deref, - .ArrayAccess, - .Block, + .@"asm", + .add, + .add_wrap, + .array_cat, + .array_mult, + .assign, + .assign_bit_and, + .assign_bit_or, + .assign_bit_shift_left, + .assign_bit_shift_right, + .assign_bit_xor, + .assign_div, + .assign_sub, + .assign_sub_wrap, + .assign_mod, + .assign_add, + .assign_add_wrap, + .assign_mul, + .assign_mul_wrap, + .bang_equal, + .bit_and, + .bit_or, + .bit_shift_left, + .bit_shift_right, + .bit_xor, + .bool_and, + .bool_or, + .div, + .equal_equal, + .error_union, + .greater_or_equal, + .greater_than, + .less_or_equal, + .less_than, + .merge_error_sets, + .mod, + .mul, + .mul_wrap, + .range, + .period, + .sub, + .sub_wrap, + .slice, + .deref, + .array_access, + .block, => return false, // Forward the question to a sub-expression. - .GroupedExpression => node = node.castTag(.GroupedExpression).?.expr, - .Try => node = node.castTag(.Try).?.rhs, - .Await => node = node.castTag(.Await).?.rhs, - .Catch => node = node.castTag(.Catch).?.rhs, - .OrElse => node = node.castTag(.OrElse).?.rhs, - .Comptime => node = node.castTag(.Comptime).?.expr, - .Nosuspend => node = node.castTag(.Nosuspend).?.expr, - .UnwrapOptional => node = node.castTag(.UnwrapOptional).?.lhs, + .grouped_expression => node = node.castTag(.grouped_expression).?.expr, + .@"try" => node = node.castTag(.@"try").?.rhs, + .@"await" => node = node.castTag(.@"await").?.rhs, + .@"catch" => node = node.castTag(.@"catch").?.rhs, + .@"orelse" => node = node.castTag(.@"orelse").?.rhs, + .@"comptime" => node = node.castTag(.@"comptime").?.expr, + .@"nosuspend" => node = node.castTag(.@"nosuspend").?.expr, + .unwrap_optional => node = node.castTag(.unwrap_optional).?.lhs, // True because these are exactly the expressions we need memory locations for. .ArrayInitializer, @@ -3291,14 +3369,14 @@ fn nodeMayNeedMemoryLocation(start_node: *ast.Node, scope: *Scope) bool { // True because depending on comptime conditions, sub-expressions // may be the kind that need memory locations. - .While, - .For, - .Switch, - .Call, - .LabeledBlock, + .@"while", + .@"for", + .@"switch", + .call, + .labeled_block, => return true, - .BuiltinCall => { + .builtin_call => { @setEvalBranchQuota(5000); const builtin_needs_mem_loc = std.ComptimeStringMap(bool, .{ .{ "@addWithOverflow", false }, @@ -3404,12 +3482,12 @@ fn nodeMayNeedMemoryLocation(start_node: *ast.Node, scope: *Scope) bool { .{ "@TypeOf", false }, .{ "@unionInit", true }, }); - const name = scope.tree().tokenSlice(node.castTag(.BuiltinCall).?.builtin_token); + const name = scope.tree().tokenSlice(node.castTag(.builtin_call).?.builtin_token); return builtin_needs_mem_loc.get(name).?; }, // Depending on AST properties, they may need memory locations. - .If => return node.castTag(.If).?.@"else" != null, + .@"if" => return node.castTag(.@"if").?.@"else" != null, } } } @@ -3450,8 +3528,17 @@ fn rvalue(mod: *Module, scope: *Scope, rl: ResultLoc, result: *zir.Inst) InnerEr } } -fn rvalueVoid(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node, result: void) InnerError!*zir.Inst { - const src = scope.tree().token_locs[node.firstToken()].start; +fn rvalueVoid( + mod: *Module, + scope: *Scope, + rl: ResultLoc, + node: ast.Node.Index, + result: void, +) InnerError!*zir.Inst { + const tree = scope.tree(); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); + const src = tree.tokens.items(.start)[tree.firstToken(node)]; const void_inst = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.void), .val = Value.initTag(.void_value), diff --git a/src/codegen.zig b/src/codegen.zig index 9771386403..095bb123ba 100644 --- a/src/codegen.zig +++ b/src/codegen.zig @@ -451,11 +451,16 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type { const src_data: struct { lbrace_src: usize, rbrace_src: usize, source: []const u8 } = blk: { const container_scope = module_fn.owner_decl.container; - const tree = container_scope.file_scope.contents.tree; - const fn_proto = tree.root_node.decls()[module_fn.owner_decl.src_index].castTag(.FnProto).?; - const block = fn_proto.getBodyNode().?.castTag(.Block).?; - const lbrace_src = tree.token_locs[block.lbrace].start; - const rbrace_src = tree.token_locs[block.rbrace].start; + const tree = container_scope.file_scope.tree; + const node_tags = tree.nodes.items(.tag); + const node_datas = tree.nodes.items(.data); + const token_starts = tree.tokens.items(.start); + + const fn_decl = tree.rootDecls()[module_fn.owner_decl.src_index]; + assert(node_tags[fn_decl] == .fn_decl); + const block = node_datas[fn_decl].rhs; + const lbrace_src = token_starts[tree.firstToken(block)]; + const rbrace_src = token_starts[tree.lastToken(block)]; break :blk .{ .lbrace_src = lbrace_src, .rbrace_src = rbrace_src, diff --git a/src/ir.zig b/src/ir.zig index 0e83dbfd56..a0b33fba73 100644 --- a/src/ir.zig +++ b/src/ir.zig @@ -317,6 +317,7 @@ pub const Inst = struct { pub const base_tag = Tag.arg; base: Inst, + /// This exists to be emitted into debug info. name: [*:0]const u8, pub fn operandCount(self: *const Arg) usize { diff --git a/src/link/Elf.zig b/src/link/Elf.zig index 18f3f57712..f92c585cd5 100644 --- a/src/link/Elf.zig +++ b/src/link/Elf.zig @@ -2223,13 +2223,19 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void { try dbg_line_buffer.ensureCapacity(26); const line_off: u28 = blk: { - const tree = decl.container.file_scope.contents.tree; - const file_ast_decls = tree.root_node.decls(); + const tree = decl.container.file_scope.tree; + const node_tags = tree.nodes.items(.tag); + const node_datas = tree.nodes.items(.data); + const token_starts = tree.tokens.items(.start); + + const file_ast_decls = tree.rootDecls(); // TODO Look into improving the performance here by adding a token-index-to-line // lookup table. Currently this involves scanning over the source code for newlines. - const fn_proto = file_ast_decls[decl.src_index].castTag(.FnProto).?; - const block = fn_proto.getBodyNode().?.castTag(.Block).?; - const line_delta = std.zig.lineDelta(tree.source, 0, tree.token_locs[block.lbrace].start); + const fn_decl = file_ast_decls[decl.src_index]; + assert(node_tags[fn_decl] == .fn_decl); + const block = node_datas[fn_decl].rhs; + const lbrace = tree.firstToken(block); + const line_delta = std.zig.lineDelta(tree.source, 0, token_starts[lbrace]); break :blk @intCast(u28, line_delta); }; @@ -2744,13 +2750,19 @@ pub fn updateDeclLineNumber(self: *Elf, module: *Module, decl: *const Module.Dec if (self.llvm_ir_module) |_| return; - const tree = decl.container.file_scope.contents.tree; - const file_ast_decls = tree.root_node.decls(); + const tree = decl.container.file_scope.tree; + const node_tags = tree.nodes.items(.tag); + const node_datas = tree.nodes.items(.data); + const token_starts = tree.tokens.items(.start); + + const file_ast_decls = tree.rootDecls(); // TODO Look into improving the performance here by adding a token-index-to-line // lookup table. Currently this involves scanning over the source code for newlines. - const fn_proto = file_ast_decls[decl.src_index].castTag(.FnProto).?; - const block = fn_proto.getBodyNode().?.castTag(.Block).?; - const line_delta = std.zig.lineDelta(tree.source, 0, tree.token_locs[block.lbrace].start); + const fn_decl = file_ast_decls[decl.src_index]; + assert(node_tags[fn_decl] == .fn_decl); + const block = node_datas[fn_decl].rhs; + const lbrace = tree.firstToken(block); + const line_delta = std.zig.lineDelta(tree.source, 0, token_starts[lbrace]); const casted_line_off = @intCast(u28, line_delta); const shdr = &self.sections.items[self.debug_line_section_index.?]; diff --git a/src/link/MachO/DebugSymbols.zig b/src/link/MachO/DebugSymbols.zig index 15aa86be51..645e17068b 100644 --- a/src/link/MachO/DebugSymbols.zig +++ b/src/link/MachO/DebugSymbols.zig @@ -904,13 +904,19 @@ pub fn updateDeclLineNumber(self: *DebugSymbols, module: *Module, decl: *const M const tracy = trace(@src()); defer tracy.end(); - const tree = decl.container.file_scope.contents.tree; - const file_ast_decls = tree.root_node.decls(); + const tree = decl.container.file_scope.tree; + const node_tags = tree.nodes.items(.tag); + const node_datas = tree.nodes.items(.data); + const token_starts = tree.tokens.items(.start); + + const file_ast_decls = tree.rootDecls(); // TODO Look into improving the performance here by adding a token-index-to-line // lookup table. Currently this involves scanning over the source code for newlines. - const fn_proto = file_ast_decls[decl.src_index].castTag(.FnProto).?; - const block = fn_proto.getBodyNode().?.castTag(.Block).?; - const line_delta = std.zig.lineDelta(tree.source, 0, tree.token_locs[block.lbrace].start); + const fn_decl = file_ast_decls[decl.src_index]; + assert(node_tags[fn_decl] == .fn_decl); + const block = node_datas[fn_decl].rhs; + const lbrace = tree.firstToken(block); + const line_delta = std.zig.lineDelta(tree.source, 0, token_starts[lbrace]); const casted_line_off = @intCast(u28, line_delta); const dwarf_segment = &self.load_commands.items[self.dwarf_segment_cmd_index.?].Segment; @@ -948,13 +954,19 @@ pub fn initDeclDebugBuffers( try dbg_line_buffer.ensureCapacity(26); const line_off: u28 = blk: { - const tree = decl.container.file_scope.contents.tree; - const file_ast_decls = tree.root_node.decls(); + const tree = decl.container.file_scope.tree; + const node_tags = tree.nodes.items(.tag); + const node_datas = tree.nodes.items(.data); + const token_starts = tree.tokens.items(.start); + + const file_ast_decls = tree.rootDecls(); // TODO Look into improving the performance here by adding a token-index-to-line // lookup table. Currently this involves scanning over the source code for newlines. - const fn_proto = file_ast_decls[decl.src_index].castTag(.FnProto).?; - const block = fn_proto.getBodyNode().?.castTag(.Block).?; - const line_delta = std.zig.lineDelta(tree.source, 0, tree.token_locs[block.lbrace].start); + const fn_decl = file_ast_decls[decl.src_index]; + assert(node_tags[fn_decl] == .fn_decl); + const block = node_datas[fn_decl].rhs; + const lbrace = tree.firstToken(block); + const line_delta = std.zig.lineDelta(tree.source, 0, token_starts[lbrace]); break :blk @intCast(u28, line_delta); }; diff --git a/src/zir.zig b/src/zir.zig index d8ac023562..fc68aee216 100644 --- a/src/zir.zig +++ b/src/zir.zig @@ -53,6 +53,9 @@ pub const Inst = struct { indexable_ptr_len, /// Function parameter value. These must be first in a function's main block, /// in respective order with the parameters. + /// TODO make this instruction implicit; after we transition to having ZIR + /// instructions be same sized and referenced by index, the first N indexes + /// will implicitly be references to the parameters of the function. arg, /// Type coercion. as, @@ -354,9 +357,8 @@ pub const Inst = struct { .return_void, .ret_ptr, .ret_type, - .unreach_nocheck, - .@"unreachable", - .arg, + .unreachable_unsafe, + .unreachable_safe, .void_value, => NoOp, @@ -451,6 +453,7 @@ pub const Inst = struct { .block_comptime_flat, => Block, + .arg => Arg, .array_type_sentinel => ArrayTypeSentinel, .@"break" => Break, .break_void => BreakVoid, @@ -684,6 +687,18 @@ pub const Inst = struct { kw_args: struct {}, }; + pub const Arg = struct { + pub const base_tag = Tag.arg; + base: Inst, + + positionals: struct { + /// This exists to be passed to the arg TZIR instruction, which + /// needs it for debug info. + name: []const u8, + }, + kw_args: struct {}, + }; + pub const Block = struct { pub const base_tag = Tag.block; base: Inst, From 75ba8d8db6d3c067fd8f9c8d32e6fea7c3b9343b Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Fri, 12 Feb 2021 17:41:12 +0100 Subject: [PATCH 058/173] zig fmt: remove empty lines at start/end of block --- lib/std/zig/parser_test.zig | 62 ++++++++ lib/std/zig/render.zig | 278 ++++++++++++++++++++---------------- 2 files changed, 218 insertions(+), 122 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index d20f7aa5bf..db8d028131 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -239,6 +239,68 @@ test "zig fmt: container declaration, transform trailing comma" { ); } +test "zig fmt: remove empty lines at start/end of container decl" { + try testTransform( + \\const X = struct { + \\ + \\ foo: i32, + \\ + \\ bar: i8, + \\ + \\}; + \\ + , + \\const X = struct { + \\ foo: i32, + \\ + \\ bar: i8, + \\}; + \\ + ); +} + +test "zig fmt: remove empty lines at start/end of block" { + try testTransform( + \\test { + \\ + \\ if (foo) { + \\ foo(); + \\ } + \\ + \\} + \\ + , + \\test { + \\ if (foo) { + \\ foo(); + \\ } + \\} + \\ + ); +} + +test "zig fmt: allow empty line before commment at start of block" { + try testCanonical( + \\test { + \\ + \\ // foo + \\ const x = 42; + \\} + \\ + ); +} + +test "zig fmt: allow empty line before commment at start of block" { + try testCanonical( + \\test { + \\ + \\ // foo + \\ const x = 42; + \\} + \\ + ); +} + test "zig fmt: trailing comma in fn parameter list" { try testCanonical( \\pub fn f( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 1410ef5c13..42649779b7 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -24,51 +24,21 @@ pub fn renderTree(buffer: *std.ArrayList(u8), tree: ast.Tree) Error!void { const ais = &auto_indenting_stream; // Render all the line comments at the beginning of the file. - const src_start: usize = if (mem.startsWith(u8, tree.source, "\xEF\xBB\xBF")) 3 else 0; const comment_end_loc: usize = tree.tokens.items(.start)[0]; - _ = try renderCommentsAndNewlines(ais, tree, src_start, comment_end_loc); + _ = try renderComments(ais, tree, 0, comment_end_loc); - for (tree.rootDecls()) |decl| { - try renderMember(ais, tree, decl, .newline); - } + try renderMembers(ais, tree, tree.rootDecls()); } -/// Assumes that start is the first byte past the previous token and -/// that end is the last byte before the next token. -fn renderCommentsAndNewlines(ais: *Ais, tree: ast.Tree, start: usize, end: usize) Error!bool { - var index: usize = start; - while (mem.indexOf(u8, tree.source[index..end], "//")) |offset| { - const comment_start = index + offset; - const newline = comment_start + - mem.indexOfScalar(u8, tree.source[comment_start..end], '\n').?; - const untrimmed_comment = tree.source[comment_start..newline]; - const trimmed_comment = mem.trimRight(u8, untrimmed_comment, " \r\t"); - - // Leave up to one empty line before the comment - if (index == start and mem.containsAtLeast(u8, tree.source[index..comment_start], 2, "\n")) { - try ais.insertNewline(); - try ais.insertNewline(); - } else if (mem.indexOfScalar(u8, tree.source[index..comment_start], '\n') != null) { - // Respect the newline directly before the comment. This allows an - // empty line between comments - try ais.insertNewline(); - } else if (index == start and start != 0) { - // If the comment is on the same line as the token before it, - // prefix it with a single space - try ais.writer().writeByte(' '); - } - - try ais.writer().print("{s}\n", .{trimmed_comment}); - index = newline + 1; +/// Render all members in the given slice, keeping empty lines where appropriate +fn renderMembers(ais: *Ais, tree: ast.Tree, members: []const ast.Node.Index) Error!void { + if (members.len == 0) return; + //try renderExtraNewline(ais, tree, members[0]); + try renderMember(ais, tree, members[0], .newline); + for (members[1..]) |member| { + try renderExtraNewline(ais, tree, member); + try renderMember(ais, tree, member, .newline); } - - // Leave up to one empty line if present in the source - if (index > start) index -= 1; - if (end != tree.source.len and mem.containsAtLeast(u8, tree.source[index..end], 2, "\n")) { - try ais.insertNewline(); - } - - return index != start; } fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) Error!void { @@ -157,6 +127,16 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E } } +/// Render all expressions in the slice, keeping empty lines where appropriate +fn renderExpressions(ais: *Ais, tree: ast.Tree, expressions: []const ast.Node.Index, space: Space) Error!void { + if (expressions.len == 0) return; + try renderExpression(ais, tree, expressions[0], space); + for (expressions[1..]) |expression| { + try renderExtraNewline(ais, tree, expression); + try renderExpression(ais, tree, expression, space); + } +} + fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); const main_tokens = tree.nodes.items(.main_token); @@ -501,7 +481,6 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .tagged_union_enum_tag_comma, => return renderContainerDecl(ais, tree, tree.taggedUnionEnumTag(node), space), - // TODO: handle comments properly .error_set_decl => { const error_token = main_tokens[node]; const lbrace = error_token + 1; @@ -521,10 +500,11 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderToken(ais, tree, rbrace, space); } else if (token_tags[rbrace - 1] == .comma) { // There is a trailing comma so render each member on a new line. + ais.pushIndentNextLine(); try renderToken(ais, tree, lbrace, .newline); - ais.pushIndent(); var i = lbrace + 1; while (i < rbrace) : (i += 1) { + if (i > lbrace + 1) try renderExtraNewlineToken(ais, tree, i); switch (token_tags[i]) { .doc_comment => try renderToken(ais, tree, i, .newline), .identifier => try renderToken(ais, tree, i, .comma), @@ -603,14 +583,12 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac try renderExpression(ais, tree, condition, .none); // condtion expression try renderToken(ais, tree, rparen, .space); // rparen + ais.pushIndentNextLine(); if (cases.len == 0) { try renderToken(ais, tree, rparen + 1, .none); // lbrace - return renderToken(ais, tree, rparen + 2, space); // rbrace - } - ais.pushIndentNextLine(); - try renderToken(ais, tree, rparen + 1, .newline); // lbrace - for (cases) |case| { - try renderExpression(ais, tree, case, .comma); + } else { + try renderToken(ais, tree, rparen + 1, .newline); // lbrace + try renderExpressions(ais, tree, cases, .comma); } ais.popIndent(); return renderToken(ais, tree, tree.lastToken(node), space); // rbrace @@ -1439,9 +1417,7 @@ fn renderSwitchCase( try renderExpression(ais, tree, switch_case.ast.values[0], .space); } else if (trailing_comma) { // Render each value on a new line - for (switch_case.ast.values) |value_expr| { - try renderExpression(ais, tree, value_expr, .comma); - } + try renderExpressions(ais, tree, switch_case.ast.values, .comma); } else { // Render on one line for (switch_case.ast.values) |value_expr| { @@ -1486,22 +1462,20 @@ fn renderBlock( try renderToken(ais, tree, lbrace - 1, .space); } - if (statements.len == 0) { - ais.pushIndentNextLine(); - try renderToken(ais, tree, lbrace, .none); - ais.popIndent(); - return renderToken(ais, tree, lbrace + 1, space); // rbrace - } - ais.pushIndentNextLine(); - try renderToken(ais, tree, lbrace, .newline); - for (statements) |stmt, i| { - switch (node_tags[stmt]) { - .global_var_decl => try renderVarDecl(ais, tree, tree.globalVarDecl(stmt)), - .local_var_decl => try renderVarDecl(ais, tree, tree.localVarDecl(stmt)), - .simple_var_decl => try renderVarDecl(ais, tree, tree.simpleVarDecl(stmt)), - .aligned_var_decl => try renderVarDecl(ais, tree, tree.alignedVarDecl(stmt)), - else => try renderExpression(ais, tree, stmt, .semicolon), + if (statements.len == 0) { + try renderToken(ais, tree, lbrace, .none); + } else { + try renderToken(ais, tree, lbrace, .newline); + for (statements) |stmt, i| { + if (i != 0) try renderExtraNewline(ais, tree, stmt); + switch (node_tags[stmt]) { + .global_var_decl => try renderVarDecl(ais, tree, tree.globalVarDecl(stmt)), + .local_var_decl => try renderVarDecl(ais, tree, tree.localVarDecl(stmt)), + .simple_var_decl => try renderVarDecl(ais, tree, tree.simpleVarDecl(stmt)), + .aligned_var_decl => try renderVarDecl(ais, tree, tree.alignedVarDecl(stmt)), + else => try renderExpression(ais, tree, stmt, .semicolon), + } } } ais.popIndent(); @@ -1530,11 +1504,17 @@ fn renderStructInit( const last_field_token = tree.lastToken(last_field); if (token_tags[last_field_token + 1] == .comma) { // Render one field init per line. - ais.pushIndent(); + ais.pushIndentNextLine(); try renderToken(ais, tree, struct_init.ast.lbrace, .newline); - for (struct_init.ast.fields) |field_init| { + try renderToken(ais, tree, struct_init.ast.lbrace + 1, .none); // . + try renderToken(ais, tree, struct_init.ast.lbrace + 2, .space); // name + try renderToken(ais, tree, struct_init.ast.lbrace + 3, .space); // = + try renderExpression(ais, tree, struct_init.ast.fields[0], .comma); + + for (struct_init.ast.fields[1..]) |field_init| { const init_token = tree.firstToken(field_init); + try renderExtraNewlineToken(ais, tree, init_token - 3); try renderToken(ais, tree, init_token - 3, .none); // . try renderToken(ais, tree, init_token - 2, .space); // name try renderToken(ais, tree, init_token - 1, .space); // = @@ -1573,20 +1553,18 @@ fn renderArrayInit( try renderExpression(ais, tree, array_init.ast.type_expr, .none); // T } if (array_init.ast.elements.len == 0) { + ais.pushIndentNextLine(); try renderToken(ais, tree, array_init.ast.lbrace, .none); // lbrace + ais.popIndent(); return renderToken(ais, tree, array_init.ast.lbrace + 1, space); // rbrace } const last_elem = array_init.ast.elements[array_init.ast.elements.len - 1]; const last_elem_token = tree.lastToken(last_elem); if (token_tags[last_elem_token + 1] == .comma) { // Render one element per line. - ais.pushIndent(); + ais.pushIndentNextLine(); try renderToken(ais, tree, array_init.ast.lbrace, .newline); - - for (array_init.ast.elements) |elem| { - try renderExpression(ais, tree, elem, .comma); - } - + try renderExpressions(ais, tree, array_init.ast.elements, .comma); ais.popIndent(); return renderToken(ais, tree, last_elem_token + 2, space); // rbrace } else { @@ -1679,11 +1657,9 @@ fn renderContainerDecl( } // One member per line. - ais.pushIndent(); + ais.pushIndentNextLine(); try renderToken(ais, tree, lbrace, .newline); // lbrace - for (container_decl.ast.members) |member| { - try renderMember(ais, tree, member, .newline); - } + try renderMembers(ais, tree, container_decl.ast.members); ais.popIndent(); return renderToken(ais, tree, rbrace, space); // rbrace @@ -1745,6 +1721,7 @@ fn renderAsm( const comma = tree.firstToken(next_asm_output) - 1; try renderToken(ais, tree, comma, .newline); // , + try renderExtraNewlineToken(ais, tree, tree.firstToken(next_asm_output)); } else if (asm_node.inputs.len == 0 and asm_node.first_clobber == null) { try renderAsmOutput(ais, tree, asm_output, .newline); ais.popIndent(); @@ -1776,6 +1753,7 @@ fn renderAsm( const first_token = tree.firstToken(next_asm_input); try renderToken(ais, tree, first_token - 1, .newline); // , + try renderExtraNewlineToken(ais, tree, first_token); } else if (asm_node.first_clobber == null) { try renderAsmInput(ais, tree, asm_input, .newline); ais.popIndent(); @@ -1834,7 +1812,9 @@ fn renderCall( const lparen = call.ast.lparen; const params = call.ast.params; if (params.len == 0) { + ais.pushIndentNextLine(); try renderToken(ais, tree, lparen, .none); + ais.popIndent(); return renderToken(ais, tree, lparen + 1, space); // ) } @@ -1856,6 +1836,8 @@ fn renderCall( try renderToken(ais, tree, comma, Space.newline); // , if (is_multiline_string) ais.pushIndent(); + + try renderExtraNewline(ais, tree, params[i + 1]); } else { try renderExpression(ais, tree, param_node, Space.comma); } @@ -1928,44 +1910,100 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp const lexeme = tree.tokenSlice(token_index); try ais.writer().writeAll(lexeme); + if (space == .no_comment) return; + + const comment = try renderComments(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]); switch (space) { - .no_comment => {}, - .none => _ = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]), - .comma => { - const comment = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]); - if (token_tags[token_index + 1] == .comma) { - return renderToken(ais, tree, token_index + 1, .newline); - } else if (!comment) { - return ais.insertNewline(); - } + .none => {}, + .space => if (!comment) try ais.writer().writeByte(' '), + .newline => if (!comment) try ais.insertNewline(), + + .comma => if (token_tags[token_index + 1] == .comma) { + try renderToken(ais, tree, token_index + 1, .newline); + } else if (!comment) { + try ais.insertNewline(); }, - .comma_space => { - const comment = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]); - if (token_tags[token_index + 1] == .comma) { - return renderToken(ais, tree, token_index + 1, .space); - } else if (!comment) { - return ais.writer().writeByte(' '); - } + + .comma_space => if (token_tags[token_index + 1] == .comma) { + try renderToken(ais, tree, token_index + 1, .space); + } else if (!comment) { + try ais.writer().writeByte(' '); }, - .semicolon => { - const comment = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]); - if (token_tags[token_index + 1] == .semicolon) { - return renderToken(ais, tree, token_index + 1, .newline); - } else if (!comment) { - return ais.insertNewline(); - } + + .semicolon => if (token_tags[token_index + 1] == .semicolon) { + try renderToken(ais, tree, token_index + 1, .newline); + } else if (!comment) { + try ais.insertNewline(); }, - .space => { - const comment = try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]); - if (!comment) { - return ais.writer().writeByte(' '); - } - }, - .newline => { - if (!try renderCommentsAndNewlines(ais, tree, token_start + lexeme.len, token_starts[token_index + 1])) { + + .no_comment => unreachable, + } +} + +/// Assumes that start is the first byte past the previous token and +/// that end is the last byte before the next token. +fn renderComments(ais: *Ais, tree: ast.Tree, start: usize, end: usize) Error!bool { + var index: usize = start; + while (mem.indexOf(u8, tree.source[index..end], "//")) |offset| { + const comment_start = index + offset; + const newline = comment_start + + mem.indexOfScalar(u8, tree.source[comment_start..end], '\n').?; + const untrimmed_comment = tree.source[comment_start..newline]; + const trimmed_comment = mem.trimRight(u8, untrimmed_comment, &std.ascii.spaces); + + // Don't leave any whitespace at the start of the file + if (index != 0) { + if (index == start and mem.containsAtLeast(u8, tree.source[index..comment_start], 2, "\n")) { + // Leave up to one empty line before the first comment try ais.insertNewline(); + try ais.insertNewline(); + } else if (mem.indexOfScalar(u8, tree.source[index..comment_start], '\n') != null) { + // Respect the newline directly before the comment. + // Note: This allows an empty line between comments + try ais.insertNewline(); + } else if (index == start) { + // Otherwise if the first comment is on the same line as + // the token before it, prefix it with a single space. + try ais.writer().writeByte(' '); } - }, + } + + try ais.writer().print("{s}\n", .{trimmed_comment}); + index = newline + 1; + } + + if (index != start and mem.containsAtLeast(u8, tree.source[index - 1 .. end], 2, "\n")) { + try ais.insertNewline(); + } + + return index != start; +} + +fn renderExtraNewline(ais: *Ais, tree: ast.Tree, node: ast.Node.Index) Error!void { + return renderExtraNewlineToken(ais, tree, tree.firstToken(node)); +} + +/// Check if there is an empty line immediately before the given token. If so, render it. +fn renderExtraNewlineToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex) Error!void { + const token_starts = tree.tokens.items(.start); + const token_start = token_starts[token_index]; + if (token_start == 0) return; + const prev_token_end = if (token_index == 0) + 0 + else + token_starts[token_index - 1] + tree.tokenSlice(token_index - 1).len; + + // If there is a comment present, it will handle the empty line + if (mem.indexOf(u8, tree.source[prev_token_end..token_start], "//") != null) return; + + // Iterate backwards to the end of the previous token, stopping if a + // non-whitespace character is encountered or two newlines have been found. + var i = token_start - 1; + var newlines: u2 = 0; + while (std.ascii.isSpace(tree.source[i])) : (i -= 1) { + if (tree.source[i] == '\n') newlines += 1; + if (newlines == 2) return ais.insertNewline(); + if (i == prev_token_end) break; } } @@ -1983,19 +2021,15 @@ fn renderDocComments(ais: *Ais, tree: ast.Tree, end_token: ast.TokenIndex) Error tok += 1; } const first_tok = tok; - if (tok == end_token) return; + if (first_tok == end_token) return; + try renderExtraNewlineToken(ais, tree, first_tok); - while (true) : (tok += 1) { - switch (token_tags[tok]) { - .doc_comment => { - if (first_tok < end_token) { - try renderToken(ais, tree, tok, .newline); - } else { - try renderToken(ais, tree, tok, .no_comment); - try ais.insertNewline(); - } - }, - else => break, + while (token_tags[tok] == .doc_comment) : (tok += 1) { + if (first_tok < end_token) { + try renderToken(ais, tree, tok, .newline); + } else { + try renderToken(ais, tree, tok, .no_comment); + try ais.insertNewline(); } } } From 24798b84ad818a4c6d6a6ee3e215ccbc49293b81 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Sat, 13 Feb 2021 15:23:24 +0100 Subject: [PATCH 059/173] zig fmt: implement multiline string literals --- lib/std/zig/parser_test.zig | 154 ++++++++++++++++++------------------ lib/std/zig/render.zig | 43 +++++++++- 2 files changed, 116 insertions(+), 81 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index db8d028131..11bb4d8ceb 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -50,35 +50,35 @@ test "zig fmt: respect line breaks after var declarations" { ); } -//test "zig fmt: multiline string mixed with comments" { -// try testCanonical( -// \\const s1 = -// \\ //\\one -// \\ \\two) -// \\ \\three -// \\; -// \\const s2 = -// \\ \\one -// \\ \\two) -// \\ //\\three -// \\; -// \\const s3 = -// \\ \\one -// \\ //\\two) -// \\ \\three -// \\; -// \\const s4 = -// \\ \\one -// \\ //\\two -// \\ \\three -// \\ //\\four -// \\ \\five -// \\; -// \\const a = -// \\ 1; -// \\ -// ); -//} +test "zig fmt: multiline string mixed with comments" { + try testCanonical( + \\const s1 = + \\ //\\one + \\ \\two) + \\ \\three + \\; + \\const s2 = + \\ \\one + \\ \\two) + \\ //\\three + \\; + \\const s3 = + \\ \\one + \\ //\\two) + \\ \\three + \\; + \\const s4 = + \\ \\one + \\ //\\two + \\ \\three + \\ //\\four + \\ \\five + \\; + \\const a = + \\ 1; + \\ + ); +} test "zig fmt: empty file" { try testCanonical( @@ -974,25 +974,25 @@ test "zig fmt: character literal larger than u8" { ); } -//test "zig fmt: infix operator and then multiline string literal" { -// try testCanonical( -// \\const x = "" ++ -// \\ \\ hi -// \\; -// \\ -// ); -//} -// -//test "zig fmt: infix operator and then multiline string literal" { -// try testCanonical( -// \\const x = "" ++ -// \\ \\ hi0 -// \\ \\ hi1 -// \\ \\ hi2 -// \\; -// \\ -// ); -//} +test "zig fmt: infix operator and then multiline string literal" { + try testCanonical( + \\const x = "" ++ + \\ \\ hi + \\; + \\ + ); +} + +test "zig fmt: infix operator and then multiline string literal" { + try testCanonical( + \\const x = "" ++ + \\ \\ hi0 + \\ \\ hi1 + \\ \\ hi2 + \\; + \\ + ); +} test "zig fmt: C pointers" { try testCanonical( @@ -1725,35 +1725,35 @@ test "zig fmt: struct literal no trailing comma" { // \\ // ); //} -// -//test "zig fmt: multiline string with backslash at end of line" { -// try testCanonical( -// \\comptime { -// \\ err( -// \\ \\\ -// \\ ); -// \\} -// \\ -// ); -//} -// -//test "zig fmt: multiline string parameter in fn call with trailing comma" { -// try testCanonical( -// \\fn foo() void { -// \\ try stdout.print( -// \\ \\ZIG_CMAKE_BINARY_DIR {} -// \\ \\ZIG_C_HEADER_FILES {} -// \\ \\ZIG_DIA_GUIDS_LIB {} -// \\ \\ -// \\ , -// \\ std.cstr.toSliceConst(c.ZIG_CMAKE_BINARY_DIR), -// \\ std.cstr.toSliceConst(c.ZIG_CXX_COMPILER), -// \\ std.cstr.toSliceConst(c.ZIG_DIA_GUIDS_LIB), -// \\ ); -// \\} -// \\ -// ); -//} + +test "zig fmt: multiline string with backslash at end of line" { + try testCanonical( + \\comptime { + \\ err( + \\ \\\ + \\ ); + \\} + \\ + ); +} + +test "zig fmt: multiline string parameter in fn call with trailing comma" { + try testCanonical( + \\fn foo() void { + \\ try stdout.print( + \\ \\ZIG_CMAKE_BINARY_DIR {} + \\ \\ZIG_C_HEADER_FILES {} + \\ \\ZIG_DIA_GUIDS_LIB {} + \\ \\ + \\ , + \\ std.cstr.toSliceConst(c.ZIG_CMAKE_BINARY_DIR), + \\ std.cstr.toSliceConst(c.ZIG_CXX_COMPILER), + \\ std.cstr.toSliceConst(c.ZIG_DIA_GUIDS_LIB), + \\ ); + \\} + \\ + ); +} test "zig fmt: trailing comma on fn call" { try testCanonical( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 42649779b7..33584fa2c0 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -146,7 +146,6 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .identifier, .integer_literal, .float_literal, - .string_literal, .char_literal, .true_literal, .false_literal, @@ -156,6 +155,29 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .anyframe_literal, => return renderToken(ais, tree, main_tokens[node], space), + .string_literal => switch (token_tags[main_tokens[node]]) { + .string_literal => try renderToken(ais, tree, main_tokens[node], space), + + .multiline_string_literal_line => { + var locked_indents = ais.lockOneShotIndent(); + try ais.maybeInsertNewline(); + + var i = datas[node].lhs; + while (i <= datas[node].rhs) : (i += 1) try renderToken(ais, tree, i, .newline); + + while (locked_indents > 0) : (locked_indents -= 1) ais.popIndent(); + + switch (space) { + .none => {}, + .semicolon => if (token_tags[i] == .semicolon) try renderToken(ais, tree, i, .newline), + .comma => if (token_tags[i] == .comma) try renderToken(ais, tree, i, .newline), + .comma_space => if (token_tags[i] == .comma) try renderToken(ais, tree, i, .space), + else => unreachable, + } + }, + else => unreachable, + }, + .error_value => { try renderToken(ais, tree, main_tokens[node], .none); try renderToken(ais, tree, main_tokens[node] + 1, .none); @@ -1821,7 +1843,7 @@ fn renderCall( const last_param = params[params.len - 1]; const after_last_param_tok = tree.lastToken(last_param) + 1; if (token_tags[after_last_param_tok] == .comma) { - ais.pushIndent(); + ais.pushIndentNextLine(); try renderToken(ais, tree, lparen, Space.newline); // ( for (params) |param_node, i| { if (i + 1 < params.len) { @@ -1846,6 +1868,7 @@ fn renderCall( return renderToken(ais, tree, after_last_param_tok + 1, space); // ) } + ais.pushIndentNextLine(); try renderToken(ais, tree, lparen, Space.none); // ( for (params) |param_node, i| { @@ -1856,6 +1879,8 @@ fn renderCall( try renderToken(ais, tree, comma, Space.space); } } + + ais.popIndent(); return renderToken(ais, tree, after_last_param_tok, space); // ) } @@ -1907,7 +1932,8 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp const token_starts = tree.tokens.items(.start); const token_start = token_starts[token_index]; - const lexeme = tree.tokenSlice(token_index); + const lexeme = tokenSliceForRender(tree, token_index); + try ais.writer().writeAll(lexeme); if (space == .no_comment) return; @@ -1991,7 +2017,7 @@ fn renderExtraNewlineToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenInde const prev_token_end = if (token_index == 0) 0 else - token_starts[token_index - 1] + tree.tokenSlice(token_index - 1).len; + token_starts[token_index - 1] + tokenSliceForRender(tree, token_index - 1).len; // If there is a comment present, it will handle the empty line if (mem.indexOf(u8, tree.source[prev_token_end..token_start], "//") != null) return; @@ -2034,6 +2060,15 @@ fn renderDocComments(ais: *Ais, tree: ast.Tree, end_token: ast.TokenIndex) Error } } +fn tokenSliceForRender(tree: ast.Tree, token_index: ast.TokenIndex) []const u8 { + var ret = tree.tokenSlice(token_index); + if (tree.tokens.items(.tag)[token_index] == .multiline_string_literal_line) { + assert(ret[ret.len - 1] == '\n'); + ret.len -= 1; + } + return ret; +} + fn nodeIsBlock(tag: ast.Node.Tag) bool { return switch (tag) { .block, From c2b4d51749c365b1f007df782e06a85c69456937 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Sat, 13 Feb 2021 21:40:12 -0700 Subject: [PATCH 060/173] astgen: update a handful of expression types to new mem layout break, continue, blocks, bit_not, negation, identifiers, string literals, integer literals, inline assembly also gave multiline string literals a different node tag from regular string literals, for code clarity and to avoid an unnecessary load from token_tags array. --- lib/std/zig/ast.zig | 9 +- lib/std/zig/parse.zig | 12 +- lib/std/zig/render.zig | 35 ++-- src/astgen.zig | 408 +++++++++++++++++++++++++---------------- 4 files changed, 274 insertions(+), 190 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 40541ea7c1..18743c19d9 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -283,6 +283,7 @@ pub const Tree = struct { .undefined_literal, .unreachable_literal, .string_literal, + .multiline_string_literal, .grouped_expression, .builtin_call_two, .builtin_call_two_comma, @@ -593,7 +594,7 @@ pub const Tree = struct { .field_access, .unwrap_optional, .grouped_expression, - .string_literal, + .multiline_string_literal, .error_set_decl, .asm_simple, .asm_output, @@ -614,6 +615,7 @@ pub const Tree = struct { .identifier, .deref, .enum_literal, + .string_literal, => return main_tokens[n] + end_offset, .@"return" => if (datas[n].lhs != 0) { @@ -2826,11 +2828,14 @@ pub const Node = struct { identifier, /// lhs is the dot token index, rhs unused, main_token is the identifier. enum_literal, + /// main_token is the string literal token + /// Both lhs and rhs unused. + string_literal, /// main_token is the first token index (redundant with lhs) /// lhs is the first token index; rhs is the last token index. /// Could be a series of multiline_string_literal_line tokens, or a single /// string_literal token. - string_literal, + multiline_string_literal, /// `(lhs)`. main_token is the `(`; rhs is the token index of the `)`. grouped_expression, /// `@a(lhs, rhs)`. lhs and rhs may be omitted. diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 6eb617910c..a2043071d1 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -2595,8 +2595,8 @@ const Parser = struct { .tag = .string_literal, .main_token = main_token, .data = .{ - .lhs = main_token, - .rhs = main_token, + .lhs = undefined, + .rhs = undefined, }, }); }, @@ -2633,7 +2633,7 @@ const Parser = struct { p.tok_i += 1; } return p.addNode(.{ - .tag = .string_literal, + .tag = .multiline_string_literal, .main_token = first_line, .data = .{ .lhs = first_line, @@ -3996,8 +3996,8 @@ const Parser = struct { .tag = .string_literal, .main_token = main_token, .data = .{ - .lhs = main_token, - .rhs = main_token, + .lhs = undefined, + .rhs = undefined, }, }); }, @@ -4007,7 +4007,7 @@ const Parser = struct { p.tok_i += 1; } return p.addNode(.{ - .tag = .string_literal, + .tag = .multiline_string_literal, .main_token = first_line, .data = .{ .lhs = first_line, diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 33584fa2c0..4aea213d2a 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -153,29 +153,25 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .unreachable_literal, .undefined_literal, .anyframe_literal, + .string_literal, => return renderToken(ais, tree, main_tokens[node], space), - .string_literal => switch (token_tags[main_tokens[node]]) { - .string_literal => try renderToken(ais, tree, main_tokens[node], space), + .multiline_string_literal => { + var locked_indents = ais.lockOneShotIndent(); + try ais.maybeInsertNewline(); - .multiline_string_literal_line => { - var locked_indents = ais.lockOneShotIndent(); - try ais.maybeInsertNewline(); + var i = datas[node].lhs; + while (i <= datas[node].rhs) : (i += 1) try renderToken(ais, tree, i, .newline); - var i = datas[node].lhs; - while (i <= datas[node].rhs) : (i += 1) try renderToken(ais, tree, i, .newline); + while (locked_indents > 0) : (locked_indents -= 1) ais.popIndent(); - while (locked_indents > 0) : (locked_indents -= 1) ais.popIndent(); - - switch (space) { - .none => {}, - .semicolon => if (token_tags[i] == .semicolon) try renderToken(ais, tree, i, .newline), - .comma => if (token_tags[i] == .comma) try renderToken(ais, tree, i, .newline), - .comma_space => if (token_tags[i] == .comma) try renderToken(ais, tree, i, .space), - else => unreachable, - } - }, - else => unreachable, + switch (space) { + .none => {}, + .semicolon => if (token_tags[i] == .semicolon) try renderToken(ais, tree, i, .newline), + .comma => if (token_tags[i] == .comma) try renderToken(ais, tree, i, .newline), + .comma_space => if (token_tags[i] == .comma) try renderToken(ais, tree, i, .space), + else => unreachable, + } }, .error_value => { @@ -1850,8 +1846,7 @@ fn renderCall( try renderExpression(ais, tree, param_node, Space.none); // Unindent the comma for multiline string literals - const is_multiline_string = node_tags[param_node] == .string_literal and - token_tags[main_tokens[param_node]] == .multiline_string_literal_line; + const is_multiline_string = node_tags[param_node] == .multiline_string_literal; if (is_multiline_string) ais.popIndent(); const comma = tree.lastToken(param_node) + 1; diff --git a/src/astgen.zig b/src/astgen.zig index dcc2ea9ad2..da0c93bb8e 100644 --- a/src/astgen.zig +++ b/src/astgen.zig @@ -56,7 +56,8 @@ pub const ResultLoc = union(enum) { }; pub fn typeExpr(mod: *Module, scope: *Scope, type_node: ast.Node.Index) InnerError!*zir.Inst { - const type_src = scope.tree().token_locs[type_node.firstToken()].start; + const tree = scope.tree(); + const type_src = token_starts[tree.firstToken(type_node)]; const type_type = try addZIRInstConst(mod, scope, type_src, .{ .ty = Type.initTag(.type), .val = Value.initTag(.type_type), @@ -258,18 +259,23 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In .array_cat => return simpleBinOp(mod, scope, rl, node, .array_cat), .array_mult => return simpleBinOp(mod, scope, rl, node, .array_mul), - .bool_and => return boolBinOp(mod, scope, rl, node), - .bool_or => return boolBinOp(mod, scope, rl, node), + .bool_and => return boolBinOp(mod, scope, rl, node, true), + .bool_or => return boolBinOp(mod, scope, rl, node, false), .bool_not => return rvalue(mod, scope, rl, try boolNot(mod, scope, node)), .bit_not => return rvalue(mod, scope, rl, try bitNot(mod, scope, node)), .negation => return rvalue(mod, scope, rl, try negation(mod, scope, node, .sub)), .negation_wrap => return rvalue(mod, scope, rl, try negation(mod, scope, node, .subwrap)), - .identifier => return try identifier(mod, scope, rl, node), - .@"asm" => return rvalue(mod, scope, rl, try assembly(mod, scope, node)), - .string_literal => return rvalue(mod, scope, rl, try stringLiteral(mod, scope, node)), - .integer_literal => return rvalue(mod, scope, rl, try integerLiteral(mod, scope, node)), + .identifier => return identifier(mod, scope, rl, node), + + .asm_simple => return assembly(mod, scope, rl, tree.asmSimple(node)), + .@"asm" => return assembly(mod, scope, rl, tree.asmFull(node)), + + .string_literal => return stringLiteral(mod, scope, rl, node), + .multiline_string_literal => return multilineStringLiteral(mod, scope, rl, node), + + .integer_literal => return integerLiteral(mod, scope, rl, node), .builtin_call => return builtinCall(mod, scope, rl, node), .call => return callExpr(mod, scope, rl, node), .@"unreachable" => return unreach(mod, scope, node), @@ -285,7 +291,22 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In .null_literal => return rvalue(mod, scope, rl, try nullLiteral(mod, scope, node)), .optional_type => return rvalue(mod, scope, rl, try optionalType(mod, scope, node)), .unwrap_optional => return unwrapOptional(mod, scope, rl, node), - .block => return rvalueVoid(mod, scope, rl, node, try blockExpr(mod, scope, node)), + + .block_two, .block_two_semicolon => { + const statements = [2]ast.Node.Index{ node_datas[node].lhs, node_datas[node].rhs }; + if (node_datas[node].lhs == 0) { + return blockExpr(mod, scope, rl, node, statements[0..0]); + } else if (node_datas[node].rhs == 0) { + return blockExpr(mod, scope, rl, node, statements[0..1]); + } else { + return blockExpr(mod, scope, rl, node, statements[0..2]); + } + }, + .block, .block_semicolon => { + const statements = tree.extra_data[node_datas[node].lhs..node_datas[node].rhs]; + return blockExpr(mod, scope, rl, node, statements); + }, + .labeled_block => return labeledBlockExpr(mod, scope, rl, node, .block), .@"break" => return rvalue(mod, scope, rl, try breakExpr(mod, scope, node)), .@"continue" => return rvalue(mod, scope, rl, try continueExpr(mod, scope, node)), @@ -293,7 +314,6 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In .array_type => return rvalue(mod, scope, rl, try arrayType(mod, scope, node)), .array_type_sentinel => return rvalue(mod, scope, rl, try arrayTypeSentinel(mod, scope, node)), .enum_literal => return rvalue(mod, scope, rl, try enumLiteral(mod, scope, node)), - .MultilineStringLiteral => return rvalue(mod, scope, rl, try multilineStrLiteral(mod, scope, node)), .char_literal => return rvalue(mod, scope, rl, try charLiteral(mod, scope, node)), .slice_type => return rvalue(mod, scope, rl, try sliceType(mod, scope, node)), .error_union => return rvalue(mod, scope, rl, try typeInixOp(mod, scope, node, .error_union_type)), @@ -343,10 +363,17 @@ pub fn comptimeExpr( return expr(mod, parent_scope, rl, node); } + const tree = parent_scope.tree(); + const main_tokens = tree.nodes.items(.main_token); + const token_tags = tree.tokens.items(.tag); + // Optimization for labeled blocks: don't need to have 2 layers of blocks, // we can reuse the existing one. - if (node.castTag(.labeled_block)) |block_node| { - return labeledBlockExpr(mod, parent_scope, rl, block_node, .block_comptime); + const lbrace = main_tokens[node]; + if (token_tags[lbrace - 1] == .colon and + token_tags[lbrace - 2] == .identifier) + { + return labeledBlockExpr(mod, parent_scope, rl, node, .block_comptime); } // Make a scope to collect generated instructions in the sub-expression. @@ -363,11 +390,7 @@ pub fn comptimeExpr( // instruction is the block's result value. _ = try expr(mod, &block_scope.base, rl, node); - const tree = parent_scope.tree(); - const node_datas = tree.nodes.items(.data); - const main_tokens = tree.nodes.items(.main_token); - const src = tree.token_locs[node.firstToken()].start; - + const src = token_starts[tree.firstToken(node)]; const block = try addZIRInstBlock(mod, parent_scope, src, .block_comptime_flat, .{ .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items), }); @@ -375,15 +398,13 @@ pub fn comptimeExpr( return &block.base; } -fn breakExpr( - mod: *Module, - parent_scope: *Scope, - node: *ast.Node.ControlFlowExpression, -) InnerError!*zir.Inst { +fn breakExpr(mod: *Module, parent_scope: *Scope, node: ast.Node.Index) InnerError!*zir.Inst { const tree = parent_scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const src = tree.token_locs[node.ltoken].start; + const src = token_starts[main_tokens[node]]; + const break_label = node_datas[node].lhs; + const rhs = node_datas[node].rhs; // Look for the label in the scope. var scope = parent_scope; @@ -393,7 +414,7 @@ fn breakExpr( const gen_zir = scope.cast(Scope.GenZIR).?; const block_inst = blk: { - if (node.getLabel()) |break_label| { + if (break_label != 0) { if (gen_zir.label) |*label| { if (try tokenIdentEql(mod, parent_scope, label.token, break_label)) { label.used = true; @@ -407,11 +428,11 @@ fn breakExpr( continue; }; - const rhs = node.getRHS() orelse { + if (rhs == 0) { return addZirInstTag(mod, parent_scope, src, .break_void, .{ .block = block_inst, }); - }; + } gen_zir.break_count += 1; const prev_rvalue_rl_count = gen_zir.rvalue_rl_count; const operand = try expr(mod, parent_scope, gen_zir.break_result_loc, rhs); @@ -435,7 +456,7 @@ fn breakExpr( }, .local_val => scope = scope.cast(Scope.LocalVal).?.parent, .local_ptr => scope = scope.cast(Scope.LocalPtr).?.parent, - else => if (node.getLabel()) |break_label| { + else => if (break_label != 0) { const label_name = try mod.identifierTokenString(parent_scope, break_label); return mod.failTok(parent_scope, break_label, "label not found: '{s}'", .{label_name}); } else { @@ -445,11 +466,12 @@ fn breakExpr( } } -fn continueExpr(mod: *Module, parent_scope: *Scope, node: *ast.Node.ControlFlowExpression) InnerError!*zir.Inst { +fn continueExpr(mod: *Module, parent_scope: *Scope, node: ast.Node.Index) InnerError!*zir.Inst { const tree = parent_scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const src = tree.token_locs[node.ltoken].start; + const src = token_starts[main_tokens[node]]; + const break_label = node_datas[node].lhs; // Look for the label in the scope. var scope = parent_scope; @@ -461,7 +483,7 @@ fn continueExpr(mod: *Module, parent_scope: *Scope, node: *ast.Node.ControlFlowE scope = gen_zir.parent; continue; }; - if (node.getLabel()) |break_label| blk: { + if (break_label != 0) blk: { if (gen_zir.label) |*label| { if (try tokenIdentEql(mod, parent_scope, label.token, break_label)) { label.used = true; @@ -479,7 +501,7 @@ fn continueExpr(mod: *Module, parent_scope: *Scope, node: *ast.Node.ControlFlowE }, .local_val => scope = scope.cast(Scope.LocalVal).?.parent, .local_ptr => scope = scope.cast(Scope.LocalPtr).?.parent, - else => if (node.getLabel()) |break_label| { + else => if (break_label != 0) { const label_name = try mod.identifierTokenString(parent_scope, break_label); return mod.failTok(parent_scope, break_label, "label not found: '{s}'", .{label_name}); } else { @@ -489,11 +511,18 @@ fn continueExpr(mod: *Module, parent_scope: *Scope, node: *ast.Node.ControlFlowE } } -pub fn blockExpr(mod: *Module, parent_scope: *Scope, block_node: *ast.Node.block) InnerError!void { +pub fn blockExpr( + mod: *Module, + scope: *Scope, + rl: ResultLoc, + block_node: ast.Node.Index, + statements: []const ast.Node.Index, +) InnerError!void { const tracy = trace(@src()); defer tracy.end(); - try blockExprStmts(mod, parent_scope, &block_node.base, block_node.statements()); + try blockExprStmts(mod, scope, &block_node.base, statements); + return rvalueVoid(mod, scope, rl, block_node, {}); } fn checkLabelRedefinition(mod: *Module, parent_scope: *Scope, label: ast.TokenIndex) !void { @@ -508,8 +537,8 @@ fn checkLabelRedefinition(mod: *Module, parent_scope: *Scope, label: ast.TokenIn const tree = parent_scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const label_src = tree.token_locs[label].start; - const prev_label_src = tree.token_locs[prev_label.token].start; + const label_src = token_starts[label]; + const prev_label_src = token_starts[prev_label.token]; const label_name = try mod.identifierTokenString(parent_scope, label); const msg = msg: { @@ -556,7 +585,7 @@ fn labeledBlockExpr( const tree = parent_scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const src = tree.token_locs[block_node.lbrace].start; + const src = token_starts[block_node.lbrace]; try checkLabelRedefinition(mod, parent_scope, block_node.label); @@ -595,7 +624,7 @@ fn labeledBlockExpr( try blockExprStmts(mod, &block_scope.base, &block_node.base, block_node.statements()); if (!block_scope.label.?.used) { - return mod.fail(parent_scope, tree.token_locs[block_node.label].start, "unused block label", .{}); + return mod.fail(parent_scope, token_starts[block_node.label], "unused block label", .{}); } try gen_zir.instructions.append(mod.gpa, &block_inst.base); @@ -647,7 +676,7 @@ fn blockExprStmts( var scope = parent_scope; for (statements) |statement| { - const src = tree.token_locs[statement.firstToken()].start; + const src = token_starts[statement.firstToken()]; _ = try addZIRNoOp(mod, scope, src, .dbg_stmt); switch (statement.tag) { .var_decl => { @@ -694,7 +723,7 @@ fn varDecl( const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const name_src = tree.token_locs[node.name_token].start; + const name_src = token_starts[node.name_token]; const ident_name = try mod.identifierTokenString(scope, node.name_token); // Local variables shadowing detection, including function parameters. @@ -911,34 +940,46 @@ fn assignOp( _ = try addZIRBinOp(mod, scope, src, .store, lhs_ptr, result); } -fn boolNot(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst { +fn boolNot(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[node.op_token].start; + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); + + const src = token_starts[main_tokens[node]]; const bool_type = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.type), .val = Value.initTag(.bool_type), }); - const operand = try expr(mod, scope, .{ .ty = bool_type }, node.rhs); + const operand = try expr(mod, scope, .{ .ty = bool_type }, node_datas[node].lhs); return addZIRUnOp(mod, scope, src, .bool_not, operand); } -fn bitNot(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst { +fn bitNot(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[node.op_token].start; - const operand = try expr(mod, scope, .none, node.rhs); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); + + const src = token_starts[main_tokens[node]]; + const operand = try expr(mod, scope, .none, node_datas[node].lhs); return addZIRUnOp(mod, scope, src, .bit_not, operand); } -fn negation(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp, op_inst_tag: zir.Inst.Tag) InnerError!*zir.Inst { +fn negation( + mod: *Module, + scope: *Scope, + node: ast.Node.Index, + op_inst_tag: zir.Inst.Tag, +) InnerError!*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[node.op_token].start; + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); + const src = token_starts[main_tokens[node]]; const lhs = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.comptime_int), .val = Value.initTag(.zero), }); - const rhs = try expr(mod, scope, .none, node.rhs); - + const rhs = try expr(mod, scope, .none, node_datas[node].lhs); return addZIRBinOp(mod, scope, src, op_inst_tag, lhs, rhs); } @@ -948,20 +989,20 @@ fn addressOf(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerE fn optionalType(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[node.op_token].start; + const src = token_starts[node.op_token]; const operand = try typeExpr(mod, scope, node.rhs); return addZIRUnOp(mod, scope, src, .optional_type, operand); } fn sliceType(mod: *Module, scope: *Scope, node: *ast.Node.slice_type) InnerError!*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[node.op_token].start; + const src = token_starts[node.op_token]; return ptrSliceType(mod, scope, src, &node.ptr_info, node.rhs, .Slice); } fn ptrType(mod: *Module, scope: *Scope, node: *ast.Node.PtrType) InnerError!*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[node.op_token].start; + const src = token_starts[node.op_token]; return ptrSliceType(mod, scope, src, &node.ptr_info, node.rhs, switch (tree.token_ids[node.op_token]) { .Asterisk, .AsteriskAsterisk => .One, // TODO stage1 type inference bug @@ -1018,7 +1059,7 @@ fn ptrSliceType(mod: *Module, scope: *Scope, src: usize, ptr_info: *ast.PtrInfo, fn arrayType(mod: *Module, scope: *Scope, node: *ast.Node.array_type) !*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[node.op_token].start; + const src = token_starts[node.op_token]; const usize_type = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.type), .val = Value.initTag(.usize_type), @@ -1033,7 +1074,7 @@ fn arrayType(mod: *Module, scope: *Scope, node: *ast.Node.array_type) !*zir.Inst fn arrayTypeSentinel(mod: *Module, scope: *Scope, node: *ast.Node.array_type_sentinel) !*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[node.op_token].start; + const src = token_starts[node.op_token]; const usize_type = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.type), .val = Value.initTag(.usize_type), @@ -1054,7 +1095,7 @@ fn arrayTypeSentinel(mod: *Module, scope: *Scope, node: *ast.Node.array_type_sen fn anyFrameType(mod: *Module, scope: *Scope, node: *ast.Node.anyframe_type) InnerError!*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[node.anyframe_token].start; + const src = token_starts[node.anyframe_token]; if (node.result) |some| { const return_type = try typeExpr(mod, scope, some.return_type); return addZIRUnOp(mod, scope, src, .anyframe_type, return_type); @@ -1068,7 +1109,7 @@ fn anyFrameType(mod: *Module, scope: *Scope, node: *ast.Node.anyframe_type) Inne fn typeInixOp(mod: *Module, scope: *Scope, node: *ast.Node.SimpleInfixOp, op_inst_tag: zir.Inst.Tag) InnerError!*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[node.op_token].start; + const src = token_starts[node.op_token]; const error_set = try typeExpr(mod, scope, node.lhs); const payload = try typeExpr(mod, scope, node.rhs); return addZIRBinOp(mod, scope, src, op_inst_tag, error_set, payload); @@ -1076,7 +1117,7 @@ fn typeInixOp(mod: *Module, scope: *Scope, node: *ast.Node.SimpleInfixOp, op_ins fn enumLiteral(mod: *Module, scope: *Scope, node: *ast.Node.enum_literal) !*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[node.name].start; + const src = token_starts[node.name]; const name = try mod.identifierTokenString(scope, node.name); return addZIRInst(mod, scope, src, zir.Inst.EnumLiteral, .{ .name = name }, .{}); @@ -1084,7 +1125,7 @@ fn enumLiteral(mod: *Module, scope: *Scope, node: *ast.Node.enum_literal) !*zir. fn unwrapOptional(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.SimpleSuffixOp) InnerError!*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[node.rtoken].start; + const src = token_starts[node.rtoken]; const operand = try expr(mod, scope, rl, node.lhs); const op: zir.Inst.Tag = switch (rl) { @@ -1100,7 +1141,7 @@ fn containerField( node: *ast.Node.ContainerField, ) InnerError!*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[node.firstToken()].start; + const src = token_starts[tree.firstToken(node)]; const name = try mod.identifierTokenString(scope, node.name_token); if (node.comptime_token == null and node.value_expr == null and node.align_expr == null) { @@ -1133,7 +1174,7 @@ fn containerField( fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.ContainerDecl) InnerError!*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[node.kind_token].start; + const src = token_starts[node.kind_token]; var gen_scope: Scope.GenZIR = .{ .parent = scope, @@ -1278,7 +1319,7 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con fn errorSetDecl(mod: *Module, scope: *Scope, node: *ast.Node.error_set_decl) InnerError!*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[node.error_token].start; + const src = token_starts[node.error_token]; const decls = node.decls(); const fields = try scope.arena().alloc([]const u8, decls.len); @@ -1292,7 +1333,7 @@ fn errorSetDecl(mod: *Module, scope: *Scope, node: *ast.Node.error_set_decl) Inn fn errorType(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[node.token].start; + const src = token_starts[node.token]; return addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.type), .val = Value.initTag(.anyerror_type), @@ -1370,7 +1411,7 @@ fn orelseCatchExpr( payload_node: ?*ast.Node, ) InnerError!*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[op_token].start; + const src = token_starts[op_token]; var block_scope: Scope.GenZIR = .{ .parent = scope, @@ -1544,7 +1585,7 @@ fn tokenIdentEql(mod: *Module, scope: *Scope, token1: ast.TokenIndex, token2: as pub fn field(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.SimpleInfixOp) InnerError!*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[node.op_token].start; + const src = token_starts[node.op_token]; // TODO custom AST node for field access so that we don't have to go through a node cast here const field_name = try mod.identifierTokenString(scope, node.rhs.castTag(.identifier).?.token); if (rl == .ref) { @@ -1568,7 +1609,7 @@ fn namedField( try ensureBuiltinParamCount(mod, scope, call, 2); const tree = scope.tree(); - const src = tree.token_locs[call.builtin_token].start; + const src = token_starts[call.builtin_token]; const params = call.params(); const string_type = try addZIRInstConst(mod, scope, src, .{ @@ -1591,7 +1632,7 @@ fn namedField( fn arrayAccess(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.array_access) InnerError!*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[node.rtoken].start; + const src = token_starts[node.rtoken]; const usize_type = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.type), .val = Value.initTag(.usize_type), @@ -1612,7 +1653,7 @@ fn arrayAccess(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.array fn sliceExpr(mod: *Module, scope: *Scope, node: *ast.Node.slice) InnerError!*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[node.rtoken].start; + const src = token_starts[node.rtoken]; const usize_type = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.type), @@ -1642,7 +1683,7 @@ fn sliceExpr(mod: *Module, scope: *Scope, node: *ast.Node.slice) InnerError!*zir fn deref(mod: *Module, scope: *Scope, node: *ast.Node.SimpleSuffixOp) InnerError!*zir.Inst { const tree = scope.tree(); - const src = tree.token_locs[node.rtoken].start; + const src = token_starts[node.rtoken]; const lhs = try expr(mod, scope, .none, node.lhs); return addZIRUnOp(mod, scope, src, .deref, lhs); } @@ -1669,13 +1710,14 @@ fn boolBinOp( mod: *Module, scope: *Scope, rl: ResultLoc, - infix_node: *ast.Node.SimpleInfixOp, + infix_node: ast.Node.Index, + is_bool_and: bool, ) InnerError!*zir.Inst { const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const src = tree.token_locs[infix_node.op_token].start; + const src = token_starts[main_tokens[infix_node]]; const bool_type = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.type), .val = Value.initTag(.bool_type), @@ -1690,7 +1732,7 @@ fn boolBinOp( }; defer block_scope.instructions.deinit(mod.gpa); - const lhs = try expr(mod, scope, .{ .ty = bool_type }, infix_node.lhs); + const lhs = try expr(mod, scope, .{ .ty = bool_type }, node_datas[infix_node].lhs); const condbr = try addZIRInstSpecial(mod, &block_scope.base, src, zir.Inst.CondBr, .{ .condition = lhs, .then_body = undefined, // populated below @@ -1710,7 +1752,7 @@ fn boolBinOp( }; defer rhs_scope.instructions.deinit(mod.gpa); - const rhs = try expr(mod, &rhs_scope.base, .{ .ty = bool_type }, infix_node.rhs); + const rhs = try expr(mod, &rhs_scope.base, .{ .ty = bool_type }, node_datas[infix_node].rhs); _ = try addZIRInst(mod, &rhs_scope.base, src, zir.Inst.Break, .{ .block = block, .operand = rhs, @@ -1725,7 +1767,6 @@ fn boolBinOp( }; defer const_scope.instructions.deinit(mod.gpa); - const is_bool_and = infix_node.base.tag == .bool_and; _ = try addZIRInst(mod, &const_scope.base, src, zir.Inst.Break, .{ .block = block, .operand = try addZIRInstConst(mod, &const_scope.base, src, .{ @@ -1843,7 +1884,7 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.@"if") const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const if_src = tree.token_locs[if_node.if_token].start; + const if_src = token_starts[if_node.if_token]; const cond = try cond_kind.cond(mod, &block_scope, if_src, if_node.condition); const condbr = try addZIRInstSpecial(mod, &block_scope.base, if_src, zir.Inst.CondBr, .{ @@ -1856,7 +1897,7 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.@"if") .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items), }); - const then_src = tree.token_locs[if_node.body.lastToken()].start; + const then_src = token_starts[if_node.body.lastToken()]; var then_scope: Scope.GenZIR = .{ .parent = scope, .decl = block_scope.decl, @@ -1887,14 +1928,14 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.@"if") var else_src: usize = undefined; var else_sub_scope: *Module.Scope = undefined; const else_result: ?*zir.Inst = if (if_node.@"else") |else_node| blk: { - else_src = tree.token_locs[else_node.body.lastToken()].start; + else_src = token_starts[else_node.body.lastToken()]; // declare payload to the then_scope else_sub_scope = try cond_kind.elseSubScope(mod, &else_scope, else_src, else_node.payload); block_scope.break_count += 1; break :blk try expr(mod, else_sub_scope, block_scope.break_result_loc, else_node.body); } else blk: { - else_src = tree.token_locs[if_node.lastToken()].start; + else_src = token_starts[if_node.lastToken()]; else_sub_scope = &else_scope.base; break :blk null; }; @@ -1981,7 +2022,7 @@ fn whileExpr( const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const while_src = tree.token_locs[while_node.while_token].start; + const while_src = token_starts[while_node.while_token]; const void_type = try addZIRInstConst(mod, scope, while_src, .{ .ty = Type.initTag(.type), .val = Value.initTag(.void_type), @@ -2028,7 +2069,7 @@ fn whileExpr( }); } - const then_src = tree.token_locs[while_node.body.lastToken()].start; + const then_src = token_starts[while_node.body.lastToken()]; var then_scope: Scope.GenZIR = .{ .parent = &continue_scope.base, .decl = continue_scope.decl, @@ -2055,19 +2096,19 @@ fn whileExpr( var else_src: usize = undefined; const else_result: ?*zir.Inst = if (while_node.@"else") |else_node| blk: { - else_src = tree.token_locs[else_node.body.lastToken()].start; + else_src = token_starts[else_node.body.lastToken()]; // declare payload to the then_scope const else_sub_scope = try cond_kind.elseSubScope(mod, &else_scope, else_src, else_node.payload); loop_scope.break_count += 1; break :blk try expr(mod, else_sub_scope, loop_scope.break_result_loc, else_node.body); } else blk: { - else_src = tree.token_locs[while_node.lastToken()].start; + else_src = token_starts[while_node.lastToken()]; break :blk null; }; if (loop_scope.label) |some| { if (!some.used) { - return mod.fail(scope, tree.token_locs[some.token].start, "unused while label", .{}); + return mod.fail(scope, token_starts[some.token], "unused while label", .{}); } } return finishThenElseBlock( @@ -2105,7 +2146,7 @@ fn forExpr( const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const for_src = tree.token_locs[for_node.for_token].start; + const for_src = token_starts[for_node.for_token]; const index_ptr = blk: { const usize_type = try addZIRInstConst(mod, scope, for_src, .{ .ty = Type.initTag(.type), @@ -2121,7 +2162,7 @@ fn forExpr( break :blk index_ptr; }; const array_ptr = try expr(mod, scope, .ref, for_node.array_expr); - const cond_src = tree.token_locs[for_node.array_expr.firstToken()].start; + const cond_src = token_starts[for_node.array_expr.firstToken()]; const len = try addZIRUnOp(mod, scope, cond_src, .indexable_ptr_len, array_ptr); var loop_scope: Scope.GenZIR = .{ @@ -2191,7 +2232,7 @@ fn forExpr( } // while body - const then_src = tree.token_locs[for_node.body.lastToken()].start; + const then_src = token_starts[for_node.body.lastToken()]; var then_scope: Scope.GenZIR = .{ .parent = &cond_scope.base, .decl = cond_scope.decl, @@ -2215,7 +2256,7 @@ fn forExpr( const index_symbol_node = payload.index_symbol orelse break :blk &then_scope.base; - const index_name = tree.tokenSlice(index_symbol_node.firstToken()); + const index_name = tree.tokenSlice(tree.firstToken(index_symbol_node)); if (mem.eql(u8, index_name, "_")) { break :blk &then_scope.base; } @@ -2244,16 +2285,16 @@ fn forExpr( var else_src: usize = undefined; const else_result: ?*zir.Inst = if (for_node.@"else") |else_node| blk: { - else_src = tree.token_locs[else_node.body.lastToken()].start; + else_src = token_starts[else_node.body.lastToken()]; loop_scope.break_count += 1; break :blk try expr(mod, &else_scope.base, loop_scope.break_result_loc, else_node.body); } else blk: { - else_src = tree.token_locs[for_node.lastToken()].start; + else_src = token_starts[for_node.lastToken()]; break :blk null; }; if (loop_scope.label) |some| { if (!some.used) { - return mod.fail(scope, tree.token_locs[some.token].start, "unused for label", .{}); + return mod.fail(scope, token_starts[some.token], "unused for label", .{}); } } return finishThenElseBlock( @@ -2299,7 +2340,7 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const switch_src = tree.token_locs[switch_node.switch_token].start; + const switch_src = token_starts[switch_node.switch_token]; const use_ref = switchCaseUsesRef(switch_node); var block_scope: Scope.GenZIR = .{ @@ -2322,7 +2363,7 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node var simple_case_count: usize = 0; for (switch_node.cases()) |uncasted_case| { const case = uncasted_case.castTag(.switch_case).?; - const case_src = tree.token_locs[case.firstToken()].start; + const case_src = token_starts[case.firstToken()]; assert(case.items_len != 0); // Check for else/_ prong, those are handled last. @@ -2389,7 +2430,7 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node if (getRangeNode(item)) |range| { const start = try comptimeExpr(mod, &block_scope.base, .none, range.lhs); const end = try comptimeExpr(mod, &block_scope.base, .none, range.rhs); - const range_src = tree.token_locs[range.op_token].start; + const range_src = token_starts[range.op_token]; const range_inst = try addZIRBinOp(mod, &block_scope.base, range_src, .switch_range, start, end); try items.append(range_inst); } else { @@ -2447,7 +2488,7 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node var case_index: usize = 0; for (switch_node.cases()) |uncasted_case| { const case = uncasted_case.castTag(.switch_case).?; - const case_src = tree.token_locs[case.firstToken()].start; + const case_src = token_starts[case.firstToken()]; // reset without freeing to reduce allocations. case_scope.instructions.items.len = 0; @@ -2485,7 +2526,7 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node var any_ok: ?*zir.Inst = null; for (case.items()) |item| { if (getRangeNode(item)) |range| { - const range_src = tree.token_locs[range.op_token].start; + const range_src = token_starts[range.op_token]; const range_inst = items.items[items_index].castTag(.switch_range).?; items_index += 1; @@ -2565,7 +2606,7 @@ fn switchCaseExpr( const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const case_src = tree.token_locs[case.firstToken()].start; + const case_src = token_starts[case.firstToken()]; const sub_scope = blk: { const uncasted_payload = case.payload orelse break :blk scope; const payload = uncasted_payload.castTag(.PointerPayload).?; @@ -2593,7 +2634,7 @@ fn ret(mod: *Module, scope: *Scope, cfe: *ast.Node.ControlFlowExpression) InnerE const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const src = tree.token_locs[cfe.ltoken].start; + const src = token_starts[cfe.ltoken]; if (cfe.getRHS()) |rhs_node| { if (nodeMayNeedMemoryLocation(rhs_node, scope)) { const ret_ptr = try addZIRNoOp(mod, scope, src, .ret_ptr); @@ -2609,17 +2650,24 @@ fn ret(mod: *Module, scope: *Scope, cfe: *ast.Node.ControlFlowExpression) InnerE } } -fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneToken) InnerError!*zir.Inst { +fn identifier( + mod: *Module, + scope: *Scope, + rl: ResultLoc, + ident: ast.Node.Index, +) InnerError!*zir.Inst { const tracy = trace(@src()); defer tracy.end(); const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const ident_name = try mod.identifierTokenString(scope, ident.token); - const src = tree.token_locs[ident.token].start; + + const ident_token = main_tokens[ident]; + const ident_name = try mod.identifierTokenString(scope, ident_token); + const src = token_starts[ident_token]; if (mem.eql(u8, ident_name, "_")) { - return mod.failNode(scope, &ident.base, "TODO implement '_' identifier", .{}); + return mod.failNode(scope, ident, "TODO implement '_' identifier", .{}); } if (getSimplePrimitiveValue(ident_name)) |typed_value| { @@ -2634,7 +2682,7 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo const bit_count = std.fmt.parseInt(u16, ident_name[1..], 10) catch |err| switch (err) { error.Overflow => return mod.failNode( scope, - &ident.base, + ident, "primitive integer type '{s}' exceeds maximum bit width of 65535", .{ident_name}, ), @@ -2698,64 +2746,91 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo } } - return mod.failNode(scope, &ident.base, "use of undeclared identifier '{s}'", .{ident_name}); + return mod.failNode(scope, ident, "use of undeclared identifier '{s}'", .{ident_name}); } -fn stringLiteral(mod: *Module, scope: *Scope, str_lit: *ast.Node.OneToken) InnerError!*zir.Inst { +fn stringLiteral( + mod: *Module, + scope: *Scope, + rl: ResultLoc, + str_lit: ast.Node.Index, +) InnerError!*zir.Inst { const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const unparsed_bytes = tree.tokenSlice(str_lit.token); + + const str_lit_token = main_tokens[str_lit]; + const unparsed_bytes = tree.tokenSlice(str_lit_token); const arena = scope.arena(); var bad_index: usize = undefined; const bytes = std.zig.parseStringLiteral(arena, unparsed_bytes, &bad_index) catch |err| switch (err) { error.InvalidCharacter => { const bad_byte = unparsed_bytes[bad_index]; - const src = tree.token_locs[str_lit.token].start; + const src = token_starts[str_lit_token]; return mod.fail(scope, src + bad_index, "invalid string literal character: '{c}'\n", .{bad_byte}); }, else => |e| return e, }; - const src = tree.token_locs[str_lit.token].start; - return addZIRInst(mod, scope, src, zir.Inst.Str, .{ .bytes = bytes }, .{}); + const src = token_starts[str_lit_token]; + const str_inst = try addZIRInst(mod, scope, src, zir.Inst.Str, .{ .bytes = bytes }, .{}); + return rvalue(mod, scope, rl, str_inst); } -fn multilineStrLiteral(mod: *Module, scope: *Scope, node: *ast.Node.MultilineStringLiteral) !*zir.Inst { +fn multilineStringLiteral( + mod: *Module, + scope: *Scope, + rl: ResultLoc, + str_lit: ast.Node.Index, +) InnerError!*zir.Inst { const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const lines = node.linesConst(); - const src = tree.token_locs[lines[0]].start; - // line lengths and new lines - var len = lines.len - 1; - for (lines) |line| { - // 2 for the '//' + 1 for '\n' - len += tree.tokenSlice(line).len - 3; - } + const start = node_datas[node].lhs; + const end = node_datas[node].rhs; - const bytes = try scope.arena().alloc(u8, len); - var i: usize = 0; - for (lines) |line, line_i| { - if (line_i != 0) { - bytes[i] = '\n'; - i += 1; + // Count the number of bytes to allocate. + const len: usize = len: { + var tok_i = start; + var len: usize = 0; + while (tok_i <= end) : (tok_i += 1) { + // 2 for the '//' + 1 for '\n' + len += tree.tokenSlice(tok_i).len - 3; } - const slice = tree.tokenSlice(line); - mem.copy(u8, bytes[i..], slice[2 .. slice.len - 1]); - i += slice.len - 3; + break :len len; + }; + const bytes = try scope.arena().alloc(u8, len); + // First line: do not append a newline. + var byte_i: usize = 0; + var tok_i = start; + { + const slice = tree.tokenSlice(tok_i); + const line_bytes = slice[2 .. slice.len - 1]; + mem.copy(u8, bytes[byte_i..], line_bytes); + byte_i += line_bytes.len; + tok_i += 1; } - - return addZIRInst(mod, scope, src, zir.Inst.Str, .{ .bytes = bytes }, .{}); + // Following lines: each line prepends a newline. + while (tok_i <= end) : (tok_i += 1) { + bytes[byte_i] = '\n'; + byte_i += 1; + const slice = tree.tokenSlice(tok_i); + const line_bytes = slice[2 .. slice.len - 1]; + mem.copy(u8, bytes[byte_i..], line_bytes); + byte_i += line_bytes.len; + } + const src = token_starts[start]; + const str_inst = try addZIRInst(mod, scope, src, zir.Inst.Str, .{ .bytes = bytes }, .{}); + return rvalue(mod, scope, rl, str_inst); } fn charLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) !*zir.Inst { const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const src = tree.token_locs[node.token].start; + const src = token_starts[node.token]; const slice = tree.tokenSlice(node.token); var bad_index: usize = undefined; @@ -2772,13 +2847,19 @@ fn charLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) !*zir.Inst }); } -fn integerLiteral(mod: *Module, scope: *Scope, int_lit: *ast.Node.OneToken) InnerError!*zir.Inst { +fn integerLiteral( + mod: *Module, + scope: *Scope, + rl: ResultLoc, + int_lit: ast.Node.Index, +) InnerError!*zir.Inst { const arena = scope.arena(); const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const prefixed_bytes = tree.tokenSlice(int_lit.token); - const base = if (mem.startsWith(u8, prefixed_bytes, "0x")) + + const int_token = main_tokens[int_lit]; + const prefixed_bytes = tree.tokenSlice(int_token); + const base: u8 = if (mem.startsWith(u8, prefixed_bytes, "0x")) 16 else if (mem.startsWith(u8, prefixed_bytes, "0o")) 8 @@ -2793,13 +2874,14 @@ fn integerLiteral(mod: *Module, scope: *Scope, int_lit: *ast.Node.OneToken) Inne prefixed_bytes[2..]; if (std.fmt.parseInt(u64, bytes, base)) |small_int| { - const src = tree.token_locs[int_lit.token].start; - return addZIRInstConst(mod, scope, src, .{ + const src = token_starts[int_token]; + const result = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.comptime_int), .val = try Value.Tag.int_u64.create(arena, small_int), }); + return rvalue(mod, scope, rl, result); } else |err| { - return mod.failTok(scope, int_lit.token, "TODO implement int literals that don't fit in a u64", .{}); + return mod.failTok(scope, int_token, "TODO implement int literals that don't fit in a u64", .{}); } } @@ -2816,7 +2898,7 @@ fn floatLiteral(mod: *Module, scope: *Scope, float_lit: *ast.Node.OneToken) Inne const float_number = std.fmt.parseFloat(f128, bytes) catch |e| switch (e) { error.InvalidCharacter => unreachable, // validated by tokenizer }; - const src = tree.token_locs[float_lit.token].start; + const src = token_starts[float_lit.token]; return addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.comptime_float), .val = try Value.Tag.float_128.create(arena, float_number), @@ -2828,7 +2910,7 @@ fn undefLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerErro const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const src = tree.token_locs[node.token].start; + const src = token_starts[node.token]; return addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.@"undefined"), .val = Value.initTag(.undef), @@ -2840,7 +2922,7 @@ fn boolLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const src = tree.token_locs[node.token].start; + const src = token_starts[node.token]; return addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.bool), .val = switch (tree.token_ids[node.token]) { @@ -2856,34 +2938,34 @@ fn nullLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const src = tree.token_locs[node.token].start; + const src = token_starts[node.token]; return addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.@"null"), .val = Value.initTag(.null_value), }); } -fn assembly(mod: *Module, scope: *Scope, asm_node: *ast.Node.@"asm") InnerError!*zir.Inst { - if (asm_node.outputs.len != 0) { - return mod.failNode(scope, &asm_node.base, "TODO implement asm with an output", .{}); - } +fn assembly(mod: *Module, scope: *Scope, rl: ResultLoc, full: ast.full.Asm) InnerError!*zir.Inst { const arena = scope.arena(); const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const inputs = try arena.alloc(*zir.Inst, asm_node.inputs.len); - const args = try arena.alloc(*zir.Inst, asm_node.inputs.len); + if (full.outputs.len != 0) { + return mod.failTok(scope, full.ast.asm_token, "TODO implement asm with an output", .{}); + } - const src = tree.token_locs[asm_node.asm_token].start; + const inputs = try arena.alloc(*zir.Inst, full.inputs.len); + const args = try arena.alloc(*zir.Inst, full.inputs.len); + const src = token_starts[full.ast.asm_token]; const str_type = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.type), .val = Value.initTag(.const_slice_u8_type), }); const str_type_rl: ResultLoc = .{ .ty = str_type }; - for (asm_node.inputs) |input, i| { + for (full.inputs) |input, i| { // TODO semantically analyze constraints inputs[i] = try expr(mod, scope, str_type_rl, input.constraint); args[i] = try expr(mod, scope, .none, input.expr); @@ -2894,15 +2976,15 @@ fn assembly(mod: *Module, scope: *Scope, asm_node: *ast.Node.@"asm") InnerError! .val = Value.initTag(.void_type), }); const asm_inst = try addZIRInst(mod, scope, src, zir.Inst.@"asm", .{ - .asm_source = try expr(mod, scope, str_type_rl, asm_node.template), + .asm_source = try expr(mod, scope, str_type_rl, full.ast.template), .return_type = return_type, }, .{ - .@"volatile" = asm_node.volatile_token != null, + .@"volatile" = full.volatile_token != null, //.clobbers = TODO handle clobbers .inputs = inputs, .args = args, }); - return asm_inst; + return rvalue(mod, scope, rl, asm_inst); } fn ensureBuiltinParamCount(mod: *Module, scope: *Scope, call: *ast.Node.builtin_call, count: u32) !void { @@ -2924,7 +3006,7 @@ fn simpleCast( const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const src = tree.token_locs[call.builtin_token].start; + const src = token_starts[call.builtin_token]; const params = call.params(); const dest_type = try typeExpr(mod, scope, params[0]); const rhs = try expr(mod, scope, .none, params[1]); @@ -2938,7 +3020,7 @@ fn ptrToInt(mod: *Module, scope: *Scope, call: *ast.Node.builtin_call) InnerErro const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const src = tree.token_locs[call.builtin_token].start; + const src = token_starts[call.builtin_token]; return addZIRUnOp(mod, scope, src, .ptrtoint, operand); } @@ -2952,7 +3034,7 @@ fn as( const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const src = tree.token_locs[call.builtin_token].start; + const src = token_starts[call.builtin_token]; const params = call.params(); const dest_type = try typeExpr(mod, scope, params[0]); switch (rl) { @@ -3028,7 +3110,7 @@ fn bitCast(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.builtin_c const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const src = tree.token_locs[call.builtin_token].start; + const src = token_starts[call.builtin_token]; const params = call.params(); const dest_type = try typeExpr(mod, scope, params[0]); switch (rl) { @@ -3074,7 +3156,7 @@ fn import(mod: *Module, scope: *Scope, call: *ast.Node.builtin_call) InnerError! const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const src = tree.token_locs[call.builtin_token].start; + const src = token_starts[call.builtin_token]; const params = call.params(); const target = try expr(mod, scope, .none, params[0]); return addZIRUnOp(mod, scope, src, .import, target); @@ -3085,7 +3167,7 @@ fn compileError(mod: *Module, scope: *Scope, call: *ast.Node.builtin_call) Inner const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const src = tree.token_locs[call.builtin_token].start; + const src = token_starts[call.builtin_token]; const params = call.params(); const target = try expr(mod, scope, .none, params[0]); return addZIRUnOp(mod, scope, src, .compile_error, target); @@ -3096,7 +3178,7 @@ fn setEvalBranchQuota(mod: *Module, scope: *Scope, call: *ast.Node.builtin_call) const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const src = tree.token_locs[call.builtin_token].start; + const src = token_starts[call.builtin_token]; const params = call.params(); const u32_type = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.type), @@ -3111,7 +3193,7 @@ fn typeOf(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.builtin_ca const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); const arena = scope.arena(); - const src = tree.token_locs[call.builtin_token].start; + const src = token_starts[call.builtin_token]; const params = call.params(); if (params.len < 1) { return mod.failTok(scope, call.builtin_token, "expected at least 1 argument, found 0", .{}); @@ -3129,7 +3211,7 @@ fn compileLog(mod: *Module, scope: *Scope, call: *ast.Node.builtin_call) InnerEr const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); const arena = scope.arena(); - const src = tree.token_locs[call.builtin_token].start; + const src = token_starts[call.builtin_token]; const params = call.params(); var targets = try arena.alloc(*zir.Inst, params.len); for (params) |param, param_i| @@ -3161,7 +3243,7 @@ fn builtinCall(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.built } else if (mem.eql(u8, builtin_name, "@TypeOf")) { return typeOf(mod, scope, rl, call); } else if (mem.eql(u8, builtin_name, "@breakpoint")) { - const src = tree.token_locs[call.builtin_token].start; + const src = token_starts[call.builtin_token]; return rvalue(mod, scope, rl, try addZIRNoOp(mod, scope, src, .breakpoint)); } else if (mem.eql(u8, builtin_name, "@import")) { return rvalue(mod, scope, rl, try import(mod, scope, call)); @@ -3187,7 +3269,7 @@ fn callExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.call) In const param_nodes = node.params(); const args = try scope.getGenZIR().arena.alloc(*zir.Inst, param_nodes.len); for (param_nodes) |param_node, i| { - const param_src = tree.token_locs[param_node.firstToken()].start; + const param_src = token_starts[tree.firstToken(param_node)]; const param_type = try addZIRInst(mod, scope, param_src, zir.Inst.ParamType, .{ .func = lhs, .arg_index = i, @@ -3195,7 +3277,7 @@ fn callExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.call) In args[i] = try expr(mod, scope, .{ .ty = param_type }, param_node); } - const src = tree.token_locs[node.lhs.firstToken()].start; + const src = token_starts[node.lhs.firstToken()]; const result = try addZIRInst(mod, scope, src, zir.Inst.Call, .{ .func = lhs, .args = args, @@ -3208,7 +3290,7 @@ fn unreach(mod: *Module, scope: *Scope, unreach_node: *ast.Node.OneToken) InnerE const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const src = tree.token_locs[unreach_node.token].start; + const src = token_starts[unreach_node.token]; return addZIRNoOp(mod, scope, src, .unreachable_safe); } @@ -3528,6 +3610,8 @@ fn rvalue(mod: *Module, scope: *Scope, rl: ResultLoc, result: *zir.Inst) InnerEr } } +/// TODO when reworking ZIR memory layout, make the void value correspond to a hard coded +/// index; that way this does not actually need to allocate anything. fn rvalueVoid( mod: *Module, scope: *Scope, From bbf5a4d7c5726baf933e303e6c61c6bba38b694b Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Mon, 15 Feb 2021 13:54:48 +0100 Subject: [PATCH 061/173] zig fmt: fix inferred error union return types --- lib/std/zig/parser_test.zig | 78 ++++++++++++++++++------------------- lib/std/zig/render.zig | 2 +- 2 files changed, 40 insertions(+), 40 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 11bb4d8ceb..e4778b35e1 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -442,23 +442,23 @@ test "zig fmt: array types last token" { ); } -//test "zig fmt: sentinel-terminated array type" { -// try testCanonical( -// \\pub fn cStrToPrefixedFileW(s: [*:0]const u8) ![PATH_MAX_WIDE:0]u16 { -// \\ return sliceToPrefixedFileW(mem.toSliceConst(u8, s)); -// \\} -// \\ -// ); -//} -// -//test "zig fmt: sentinel-terminated slice type" { -// try testCanonical( -// \\pub fn toSlice(self: Buffer) [:0]u8 { -// \\ return self.list.toSlice()[0..self.len()]; -// \\} -// \\ -// ); -//} +test "zig fmt: sentinel-terminated array type" { + try testCanonical( + \\pub fn cStrToPrefixedFileW(s: [*:0]const u8) ![PATH_MAX_WIDE:0]u16 { + \\ return sliceToPrefixedFileW(mem.toSliceConst(u8, s)); + \\} + \\ + ); +} + +test "zig fmt: sentinel-terminated slice type" { + try testCanonical( + \\pub fn toSlice(self: Buffer) [:0]u8 { + \\ return self.list.toSlice()[0..self.len()]; + \\} + \\ + ); +} test "zig fmt: pointer-to-one with modifiers" { try testCanonical( @@ -2669,28 +2669,28 @@ test "zig fmt: call expression" { // \\ // ); //} -// -//test "zig fmt: functions" { -// try testCanonical( -// \\extern fn puts(s: *const u8) c_int; -// \\extern "c" fn puts(s: *const u8) c_int; -// \\export fn puts(s: *const u8) c_int; -// \\fn puts(s: *const u8) callconv(.Inline) c_int; -// \\noinline fn puts(s: *const u8) c_int; -// \\pub extern fn puts(s: *const u8) c_int; -// \\pub extern "c" fn puts(s: *const u8) c_int; -// \\pub export fn puts(s: *const u8) c_int; -// \\pub fn puts(s: *const u8) callconv(.Inline) c_int; -// \\pub noinline fn puts(s: *const u8) c_int; -// \\pub extern fn puts(s: *const u8) align(2 + 2) c_int; -// \\pub extern "c" fn puts(s: *const u8) align(2 + 2) c_int; -// \\pub export fn puts(s: *const u8) align(2 + 2) c_int; -// \\pub fn puts(s: *const u8) align(2 + 2) callconv(.Inline) c_int; -// \\pub noinline fn puts(s: *const u8) align(2 + 2) c_int; -// \\ -// ); -//} -// + +test "zig fmt: functions" { + try testCanonical( + \\extern fn puts(s: *const u8) c_int; + \\extern "c" fn puts(s: *const u8) c_int; + \\export fn puts(s: *const u8) c_int; + \\fn puts(s: *const u8) callconv(.Inline) c_int; + \\noinline fn puts(s: *const u8) c_int; + \\pub extern fn puts(s: *const u8) c_int; + \\pub extern "c" fn puts(s: *const u8) c_int; + \\pub export fn puts(s: *const u8) c_int; + \\pub fn puts(s: *const u8) callconv(.Inline) c_int; + \\pub noinline fn puts(s: *const u8) c_int; + \\pub extern fn puts(s: *const u8) align(2 + 2) c_int; + \\pub extern "c" fn puts(s: *const u8) align(2 + 2) c_int; + \\pub export fn puts(s: *const u8) align(2 + 2) c_int; + \\pub fn puts(s: *const u8) align(2 + 2) callconv(.Inline) c_int; + \\pub noinline fn puts(s: *const u8) align(2 + 2) c_int; + \\ + ); +} + //test "zig fmt: multiline string" { // try testCanonical( // \\test "" { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 4aea213d2a..2d9c2ae9a9 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -1250,7 +1250,7 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S const rparen = blk: { // These may appear in any order, so we have to check the token_starts array // to find out which is first. - var rparen: ast.TokenIndex = maybe_bang; + var rparen = if (token_tags[maybe_bang] == .bang) maybe_bang - 1 else maybe_bang; var smallest_start = token_starts[maybe_bang]; if (fn_proto.ast.align_expr != 0) { const tok = tree.firstToken(fn_proto.ast.align_expr) - 3; From 4006a3afb31f89be28721bdcd50fa64de63d6cbb Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Mon, 15 Feb 2021 22:35:50 -0700 Subject: [PATCH 062/173] astgen: update more expression types to new mem layout additionally introduce a new file to centralize all the data about builtin functions that we have, including: * enum tag identifying the builtin function * number of parameters. * whether the expression may need a memory location. * whether the expression allows an lvalue (currently only true for `@field`). Now there is only one ComptimeStringMap that has this data as the value, and we dispatch on the enum tag in order to asgen the builtin function. In particular this simplifies the logic for checking the number of parameters. This removes some untested code paths from if and while, which need to be restored with #7929 in mind. After this there are only a handful left of expression types to rework to the new memory layout, and then it will be only compile errors left to solve. --- src/BuiltinFn.zig | 841 +++++++++++++++++++++++++++++++++++ src/astgen.zig | 1081 ++++++++++++++++++++++----------------------- 2 files changed, 1360 insertions(+), 562 deletions(-) create mode 100644 src/BuiltinFn.zig diff --git a/src/BuiltinFn.zig b/src/BuiltinFn.zig new file mode 100644 index 0000000000..9776edfef3 --- /dev/null +++ b/src/BuiltinFn.zig @@ -0,0 +1,841 @@ +const std = @import("std"); + +pub const Tag = enum { + add_with_overflow, + align_cast, + align_of, + as, + async_call, + atomic_load, + atomic_rmw, + atomic_store, + bit_cast, + bit_offset_of, + bool_to_int, + bit_size_of, + breakpoint, + mul_add, + byte_swap, + bit_reverse, + byte_offset_of, + call, + c_define, + c_import, + c_include, + clz, + cmpxchg_strong, + cmpxchg_weak, + compile_error, + compile_log, + ctz, + c_undef, + div_exact, + div_floor, + div_trunc, + embed_file, + enum_to_int, + error_name, + error_return_trace, + error_to_int, + err_set_cast, + @"export", + fence, + field, + field_parent_ptr, + float_cast, + float_to_int, + frame, + Frame, + frame_address, + frame_size, + has_decl, + has_field, + import, + int_cast, + int_to_enum, + int_to_error, + int_to_float, + int_to_ptr, + memcpy, + memset, + wasm_memory_size, + wasm_memory_grow, + mod, + mul_with_overflow, + panic, + pop_count, + ptr_cast, + ptr_to_int, + rem, + return_address, + set_align_stack, + set_cold, + set_eval_branch_quota, + set_float_mode, + set_runtime_safety, + shl_exact, + shl_with_overflow, + shr_exact, + shuffle, + size_of, + splat, + reduce, + src, + sqrt, + sin, + cos, + exp, + exp2, + log, + log2, + log10, + fabs, + floor, + ceil, + trunc, + round, + sub_with_overflow, + tag_name, + This, + truncate, + Type, + type_info, + type_name, + TypeOf, + union_init, +}; + +tag: Tag, + +/// `true` if the builtin call can take advantage of a result location pointer. +needs_mem_loc: bool = false, +/// `true` if the builtin call can be the left-hand side of an expression (assigned to). +allows_lvalue: bool = false, +/// The number of parameters to this builtin function. `null` means variable number +/// of parameters. +param_count: ?u8, + +pub const list = std.ComptimeStringMap(@This(), .{ + .{ + "@addWithOverflow", + .{ + .tag = .add_with_overflow, + .param_count = 4, + }, + }, + .{ + "@alignCast", + .{ + .tag = align_cast, + .param_count = 1, + }, + }, + .{ + "@alignOf", + .{ + .tag = .align_of, + .param_count = 1, + }, + }, + .{ + "@as", + .{ + .tag = .as, + .needs_mem_loc = true, + .param_count = 2, + }, + }, + .{ + "@asyncCall", + .{ + .tag = .async_call, + .param_count = null, + }, + }, + .{ + "@atomicLoad", + .{ + .tag = .atomic_load, + .param_count = 3, + }, + }, + .{ + "@atomicRmw", + .{ + .tag = .atomic_rmw, + .param_count = 5, + }, + }, + .{ + "@atomicStore", + .{ + .tag = .atomic_store, + .param_count = 4, + }, + }, + .{ + "@bitCast", + .{ + .tag = .bit_cast, + .needs_mem_loc = true, + .param_count = 2, + }, + }, + .{ + "@bitOffsetOf", + .{ + .tag = .bit_offset_of, + .param_count = 2, + }, + }, + .{ + "@boolToInt", + .{ + .tag = .bool_to_int, + .param_count = 1, + }, + }, + .{ + "@bitSizeOf", + .{ + .tag = .bit_size_of, + .param_count = 1, + }, + }, + .{ + "@breakpoint", + .{ + .tag = .breakpoint, + .param_count = 0, + }, + }, + .{ + "@mulAdd", + .{ + .tag = .mul_add, + .param_count = 4, + }, + }, + .{ + "@byteSwap", + .{ + .tag = .byte_swap, + .param_count = 2, + }, + }, + .{ + "@bitReverse", + .{ + .tag = .bit_reverse, + .param_count = 2, + }, + }, + .{ + "@byteOffsetOf", + .{ + .tag = .byte_offset_of, + .param_count = 2, + }, + }, + .{ + "@call", + .{ + .tag = .call, + .needs_mem_loc = true, + .param_count = 3, + }, + }, + .{ + "@cDefine", + .{ + .tag = .c_define, + .param_count = 2, + }, + }, + .{ + "@cImport", + .{ + .tag = .c_import, + .param_count = 1, + }, + }, + .{ + "@cInclude", + .{ + .tag = .c_include, + .param_count = 1, + }, + }, + .{ + "@clz", + .{ + .tag = .clz, + .param_count = 2, + }, + }, + .{ + "@cmpxchgStrong", + .{ + .tag = .cmpxchg_strong, + .param_count = 6, + }, + }, + .{ + "@cmpxchgWeak", + .{ + .tag = .cmpxchg_weak, + .param_count = 6, + }, + }, + .{ + "@compileError", + .{ + .tag = .compile_error, + .param_count = 1, + }, + }, + .{ + "@compileLog", + .{ + .tag = .compile_log, + .param_count = null, + }, + }, + .{ + "@ctz", + .{ + .tag = .ctz, + .param_count = 2, + }, + }, + .{ + "@cUndef", + .{ + .tag = .c_undef, + .param_count = 1, + }, + }, + .{ + "@divExact", + .{ + .tag = .div_exact, + .param_count = 2, + }, + }, + .{ + "@divFloor", + .{ + .tag = .div_floor, + .param_count = 2, + }, + }, + .{ + "@divTrunc", + .{ + .tag = .div_trunc, + .param_count = 2, + }, + }, + .{ + "@embedFile", + .{ + .tag = .embed_file, + .param_count = 1, + }, + }, + .{ + "@enumToInt", + .{ + .tag = .enum_to_int, + .param_count = 1, + }, + }, + .{ + "@errorName", + .{ + .tag = .error_name, + .param_count = 1, + }, + }, + .{ + "@errorReturnTrace", + .{ + .tag = .error_return_trace, + .param_count = 0, + }, + }, + .{ + "@errorToInt", + .{ + .tag = .error_to_int, + .param_count = 1, + }, + }, + .{ + "@errSetCast", + .{ + .tag = .err_set_cast, + .param_count = 2, + }, + }, + .{ + "@export", + .{ + .tag = .@"export", + .param_count = 2, + }, + }, + .{ + "@fence", + .{ + .tag = .fence, + .param_count = 0, + }, + }, + .{ + "@field", + .{ + .tag = .field, + .needs_mem_loc = true, + .param_count = 2, + .allows_lvalue = true, + }, + }, + .{ + "@fieldParentPtr", + .{ + .tag = .field_parent_ptr, + .param_count = 3, + }, + }, + .{ + "@floatCast", + .{ + .tag = .float_cast, + .param_count = 1, + }, + }, + .{ + "@floatToInt", + .{ + .tag = .float_to_int, + .param_count = 1, + }, + }, + .{ + "@frame", + .{ + .tag = .frame, + .param_count = 0, + }, + }, + .{ + "@Frame", + .{ + .tag = .Frame, + .param_count = 1, + }, + }, + .{ + "@frameAddress", + .{ + .tag = .frame_address, + .param_count = 0, + }, + }, + .{ + "@frameSize", + .{ + .tag = .frame_size, + .param_count = 1, + }, + }, + .{ + "@hasDecl", + .{ + .tag = .has_decl, + .param_count = 2, + }, + }, + .{ + "@hasField", + .{ + .tag = .has_field, + .param_count = 2, + }, + }, + .{ + "@import", + .{ + .tag = .import, + .param_count = 1, + }, + }, + .{ + "@intCast", + .{ + .tag = .int_cast, + .param_count = 1, + }, + }, + .{ + "@intToEnum", + .{ + .tag = .int_to_enum, + .param_count = 1, + }, + }, + .{ + "@intToError", + .{ + .tag = .int_to_error, + .param_count = 1, + }, + }, + .{ + "@intToFloat", + .{ + .tag = .int_to_float, + .param_count = 1, + }, + }, + .{ + "@intToPtr", + .{ + .tag = .int_to_ptr, + .param_count = 2, + }, + }, + .{ + "@memcpy", + .{ + .tag = .memcpy, + .param_count = 3, + }, + }, + .{ + "@memset", + .{ + .tag = .memset, + .param_count = 3, + }, + }, + .{ + "@wasmMemorySize", + .{ + .tag = .wasm_memory_size, + .param_count = 1, + }, + }, + .{ + "@wasmMemoryGrow", + .{ + .tag = .wasm_memory_grow, + .param_count = 2, + }, + }, + .{ + "@mod", + .{ + .tag = .mod, + .param_count = 2, + }, + }, + .{ + "@mulWithOverflow", + .{ + .tag = .mul_with_overflow, + .param_count = 4, + }, + }, + .{ + "@panic", + .{ + .tag = .panic, + .param_count = 1, + }, + }, + .{ + "@popCount", + .{ + .tag = .pop_count, + .param_count = 2, + }, + }, + .{ + "@ptrCast", + .{ + .tag = .ptr_cast, + .param_count = 2, + }, + }, + .{ + "@ptrToInt", + .{ + .tag = .ptr_to_int, + .param_count = 1, + }, + }, + .{ + "@rem", + .{ + .tag = .rem, + .param_count = 2, + }, + }, + .{ + "@returnAddress", + .{ + .tag = .return_address, + .param_count = 0, + }, + }, + .{ + "@setAlignStack", + .{ + .tag = .set_align_stack, + .param_count = 1, + }, + }, + .{ + "@setCold", + .{ + .tag = .set_cold, + .param_count = 1, + }, + }, + .{ + "@setEvalBranchQuota", + .{ + .tag = .set_eval_branch_quota, + .param_count = 1, + }, + }, + .{ + "@setFloatMode", + .{ + .tag = .set_float_mode, + .param_count = 1, + }, + }, + .{ + "@setRuntimeSafety", + .{ + .tag = .set_runtime_safety, + .param_count = 1, + }, + }, + .{ + "@shlExact", + .{ + .tag = .shl_exact, + .param_count = 2, + }, + }, + .{ + "@shlWithOverflow", + .{ + .tag = .shl_with_overflow, + .param_count = 4, + }, + }, + .{ + "@shrExact", + .{ + .tag = .shr_exact, + .param_count = 2, + }, + }, + .{ + "@shuffle", + .{ + .tag = .shuffle, + .param_count = 4, + }, + }, + .{ + "@sizeOf", + .{ + .tag = .size_of, + .param_count = 1, + }, + }, + .{ + "@splat", + .{ + .tag = .splat, + .needs_mem_loc = true, + .param_count = 2, + }, + }, + .{ + "@reduce", + .{ + .tag = .reduce, + .param_count = 2, + }, + }, + .{ + "@src", + .{ + .tag = .src, + .needs_mem_loc = true, + .param_count = 0, + }, + }, + .{ + "@sqrt", + .{ + .tag = .sqrt, + .param_count = 1, + }, + }, + .{ + "@sin", + .{ + .tag = .sin, + .param_count = 1, + }, + }, + .{ + "@cos", + .{ + .tag = .cos, + .param_count = 1, + }, + }, + .{ + "@exp", + .{ + .tag = .exp, + .param_count = 1, + }, + }, + .{ + "@exp2", + .{ + .tag = .exp2, + .param_count = 1, + }, + }, + .{ + "@log", + .{ + .tag = .log, + .param_count = 1, + }, + }, + .{ + "@log2", + .{ + .tag = .log2, + .param_count = 1, + }, + }, + .{ + "@log10", + .{ + .tag = .log10, + .param_count = 1, + }, + }, + .{ + "@fabs", + .{ + .tag = .fabs, + .param_count = 1, + }, + }, + .{ + "@floor", + .{ + .tag = .floor, + .param_count = 1, + }, + }, + .{ + "@ceil", + .{ + .tag = .ceil, + .param_count = 1, + }, + }, + .{ + "@trunc", + .{ + .tag = .trunc, + .param_count = 1, + }, + }, + .{ + "@round", + .{ + .tag = .round, + .param_count = 1, + }, + }, + .{ + "@subWithOverflow", + .{ + .tag = .sub_with_overflow, + .param_count = 4, + }, + }, + .{ + "@tagName", + .{ + .tag = .tag_name, + .param_count = 1, + }, + }, + .{ + "@This", + .{ + .tag = .This, + .param_count = 0, + }, + }, + .{ + "@truncate", + .{ + .tag = .truncate, + .param_count = 2, + }, + }, + .{ + "@Type", + .{ + .tag = .Type, + .param_count = 1, + }, + }, + .{ + "@typeInfo", + .{ + .tag = .type_info, + .param_count = 1, + }, + }, + .{ + "@typeName", + .{ + .tag = .type_name, + .param_count = 1, + }, + }, + .{ + "@TypeOf", + .{ + .tag = .TypeOf, + .param_count = null, + }, + }, + .{ + "@unionInit", + .{ + .tag = .union_init, + .needs_mem_loc = true, + .param_count = 3, + }, + }, +}); diff --git a/src/astgen.zig b/src/astgen.zig index da0c93bb8e..56d1497f63 100644 --- a/src/astgen.zig +++ b/src/astgen.zig @@ -1,16 +1,18 @@ const std = @import("std"); const mem = std.mem; const Allocator = std.mem.Allocator; +const assert = std.debug.assert; + const Value = @import("value.zig").Value; const Type = @import("type.zig").Type; const TypedValue = @import("TypedValue.zig"); -const assert = std.debug.assert; const zir = @import("zir.zig"); const Module = @import("Module.zig"); const ast = std.zig.ast; const trace = @import("tracy.zig").trace; const Scope = Module.Scope; const InnerError = Module.InnerError; +const BuiltinFn = @import("BuiltinFn.zig"); pub const ResultLoc = union(enum) { /// The expression is the right-hand side of assignment to `_`. Only the side-effects of the @@ -172,16 +174,21 @@ fn lvalExpr(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.I .ContainerDecl, .@"comptime", .@"nosuspend", - .builtin_call, - .builtin_call_comma, => return mod.failNode(scope, node, "invalid left-hand side to assignment", .{}), - // `@field` can be assigned to. - .builtin_call_two, .builtin_call_two_comma => { + .builtin_call, + .builtin_call_comma, + .builtin_call_two, + .builtin_call_two_comma, + => { const builtin_token = main_tokens[node]; const builtin_name = tree.tokenSlice(builtin_token); - if (!mem.eql(u8, builtin_name, "@field")) { - return mod.failNode(scope, node, "invalid left-hand side to assignment", .{}); + // If the builtin is an invalid name, we don't cause an error here; instead + // let it pass, and the error will be "invalid builtin function" later. + if (BuiltinFn.list.get(builtin_name)) |info| { + if (!info.allows_lvalue) { + return mod.failNode(scope, node, "invalid left-hand side to assignment", .{}); + } } }, @@ -276,22 +283,111 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In .multiline_string_literal => return multilineStringLiteral(mod, scope, rl, node), .integer_literal => return integerLiteral(mod, scope, rl, node), - .builtin_call => return builtinCall(mod, scope, rl, node), - .call => return callExpr(mod, scope, rl, node), - .@"unreachable" => return unreach(mod, scope, node), - .@"return" => return ret(mod, scope, node), - .@"if" => return ifExpr(mod, scope, rl, node), - .@"while" => return whileExpr(mod, scope, rl, node), - .period => return field(mod, scope, rl, node), - .deref => return rvalue(mod, scope, rl, try deref(mod, scope, node)), - .address_of => return rvalue(mod, scope, rl, try addressOf(mod, scope, node)), - .float_literal => return rvalue(mod, scope, rl, try floatLiteral(mod, scope, node)), - .undefined_literal => return rvalue(mod, scope, rl, try undefLiteral(mod, scope, node)), - .bool_literal => return rvalue(mod, scope, rl, try boolLiteral(mod, scope, node)), - .null_literal => return rvalue(mod, scope, rl, try nullLiteral(mod, scope, node)), - .optional_type => return rvalue(mod, scope, rl, try optionalType(mod, scope, node)), - .unwrap_optional => return unwrapOptional(mod, scope, rl, node), + .builtin_call => return builtinCall(mod, scope, rl, node), + + .builtin_call_two, .builtin_call_two_comma => { + if (datas[node].lhs == 0) { + const params = [_]ast.Node.Index{}; + return builtinCall(mod, scope, rl, node, ¶ms); + } else if (datas[node].rhs == 0) { + const params = [_]ast.Node.Index{datas[node].lhs}; + return builtinCall(mod, scope, rl, node, ¶ms); + } else { + const params = [_]ast.Node.Index{ datas[node].lhs, datas[node].rhs }; + return builtinCall(mod, scope, rl, node, ¶ms); + } + }, + .builtin_call, .builtin_call_comma => { + const params = tree.extra_data[datas[node].lhs..datas[node].rhs]; + return builtinCall(mod, scope, rl, node, params); + }, + + .call_one, .call_one_comma, .async_call_one, .async_call_one_comma => { + var params: [1]ast.Node.Index = undefined; + return callExpr(mod, scope, rl, tree.callOne(¶ms, node)); + }, + .call, .call_comma, .async_call, .async_call_comma => { + return callExpr(mod, scope, rl, tree.callFull(node)); + }, + + .@"unreachable" => { + const main_token = main_tokens[node]; + const src = token_starts[main_token]; + return addZIRNoOp(mod, scope, src, .unreachable_safe); + }, + .@"return" => return ret(mod, scope, node), + .period => return field(mod, scope, rl, node), + .float_literal => return floatLiteral(mod, scope, rl, node), + + .if_simple => return ifExpr(mod, scope, rl, tree.ifSimple(node)), + .@"if" => return ifExpr(mode, scope, rl, tree.ifFull(node)), + + .while_simple => return whileExpr(mod, scope, rl, tree.whileSimple(node)), + .while_cont => return whileExpr(mod, scope, tree.whileCont(node)), + .@"while" => return whileExpr(mod, scope, rl, tree.whileFull(node)), + + .deref => { + const lhs = try expr(mod, scope, .none, node_datas[node].lhs); + const src = token_starts[main_tokens[node]]; + const result = try addZIRUnOp(mod, scope, src, .deref, lhs); + return rvalue(mod, scope, rl, result); + }, + .address_of => { + const result = try expr(mod, scope, .ref, node_datas[node].lhs); + return rvalue(mod, scope, rl, result); + }, + .undefined_literal => { + const main_token = main_tokens[node]; + const src = token_starts[main_token]; + const result = try addZIRInstConst(mod, scope, src, .{ + .ty = Type.initTag(.@"undefined"), + .val = Value.initTag(.undef), + }); + return rvalue(mod, scope, rl, result); + }, + .true_literal => { + const main_token = main_tokens[node]; + const src = token_starts[main_token]; + const result = try addZIRInstConst(mod, scope, src, .{ + .ty = Type.initTag(.bool), + .val = Value.initTag(.bool_true), + }); + return rvalue(mod, scope, rl, result); + }, + .false_literal => { + const main_token = main_tokens[node]; + const src = token_starts[main_token]; + const result = try addZIRInstConst(mod, scope, src, .{ + .ty = Type.initTag(.bool), + .val = Value.initTag(.bool_false), + }); + return rvalue(mod, scope, rl, result); + }, + .null_literal => { + const main_token = main_tokens[node]; + const src = token_starts[main_token]; + const result = try addZIRInstConst(mod, scope, src, .{ + .ty = Type.initTag(.@"null"), + .val = Value.initTag(.null_value), + }); + return rvalue(mod, scope, rl, result); + }, + .optional_type => { + const src = token_starts[main_tokens[node]]; + const operand = try typeExpr(mod, scope, node_datas[node].lhs); + const result = try addZIRUnOp(mod, scope, src, .optional_type, operand); + return rvalue(mod, scope, rl, result); + }, + .unwrap_optional => { + const operand = try expr(mod, scope, rl, node.lhs); + const op: zir.Inst.Tag = switch (rl) { + .ref => .optional_payload_safe_ptr, + else => .optional_payload_safe, + }; + const src = token_starts[main_tokens[node]]; + return addZIRUnOp(mod, scope, src, op, operand); + }, .block_two, .block_two_semicolon => { const statements = [2]ast.Node.Index{ node_datas[node].lhs, node_datas[node].rhs }; if (node_datas[node].lhs == 0) { @@ -307,7 +403,6 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In return blockExpr(mod, scope, rl, node, statements); }, - .labeled_block => return labeledBlockExpr(mod, scope, rl, node, .block), .@"break" => return rvalue(mod, scope, rl, try breakExpr(mod, scope, node)), .@"continue" => return rvalue(mod, scope, rl, try continueExpr(mod, scope, node)), .grouped_expression => return expr(mod, scope, rl, node.expr), @@ -521,7 +616,12 @@ pub fn blockExpr( const tracy = trace(@src()); defer tracy.end(); - try blockExprStmts(mod, scope, &block_node.base, statements); + const lbrace = main_tokens[node]; + if (token_tags[lbrace - 1] == .colon) { + return labeledBlockExpr(mod, scope, rl, block_node, .block); + } + + try blockExprStmts(mod, scope, block_node, statements); return rvalueVoid(mod, scope, rl, block_node, {}); } @@ -983,17 +1083,6 @@ fn negation( return addZIRBinOp(mod, scope, src, op_inst_tag, lhs, rhs); } -fn addressOf(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst { - return expr(mod, scope, .ref, node.rhs); -} - -fn optionalType(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst { - const tree = scope.tree(); - const src = token_starts[node.op_token]; - const operand = try typeExpr(mod, scope, node.rhs); - return addZIRUnOp(mod, scope, src, .optional_type, operand); -} - fn sliceType(mod: *Module, scope: *Scope, node: *ast.Node.slice_type) InnerError!*zir.Inst { const tree = scope.tree(); const src = token_starts[node.op_token]; @@ -1123,18 +1212,6 @@ fn enumLiteral(mod: *Module, scope: *Scope, node: *ast.Node.enum_literal) !*zir. return addZIRInst(mod, scope, src, zir.Inst.EnumLiteral, .{ .name = name }, .{}); } -fn unwrapOptional(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.SimpleSuffixOp) InnerError!*zir.Inst { - const tree = scope.tree(); - const src = token_starts[node.rtoken]; - - const operand = try expr(mod, scope, rl, node.lhs); - const op: zir.Inst.Tag = switch (rl) { - .ref => .optional_payload_safe_ptr, - else => .optional_payload_safe, - }; - return addZIRUnOp(mod, scope, src, op, operand); -} - fn containerField( mod: *Module, scope: *Scope, @@ -1583,51 +1660,25 @@ fn tokenIdentEql(mod: *Module, scope: *Scope, token1: ast.TokenIndex, token2: as return mem.eql(u8, ident_name_1, ident_name_2); } -pub fn field(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.SimpleInfixOp) InnerError!*zir.Inst { +pub fn field(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!*zir.Inst { const tree = scope.tree(); - const src = token_starts[node.op_token]; - // TODO custom AST node for field access so that we don't have to go through a node cast here - const field_name = try mod.identifierTokenString(scope, node.rhs.castTag(.identifier).?.token); + const token_starts = tree.tokens.items(.start); + const main_tokens = tree.nodes.items(.main_token); + const dot_token = main_tokens[node]; + const src = token_starts[dot_token]; + const field_ident = dot_token + 1; + const field_name = try mod.identifierTokenString(scope, field_ident); if (rl == .ref) { return addZirInstTag(mod, scope, src, .field_ptr, .{ .object = try expr(mod, scope, .ref, node.lhs), .field_name = field_name, }); + } else { + return rvalue(mod, scope, rl, try addZirInstTag(mod, scope, src, .field_val, .{ + .object = try expr(mod, scope, .none, node.lhs), + .field_name = field_name, + })); } - return rvalue(mod, scope, rl, try addZirInstTag(mod, scope, src, .field_val, .{ - .object = try expr(mod, scope, .none, node.lhs), - .field_name = field_name, - })); -} - -fn namedField( - mod: *Module, - scope: *Scope, - rl: ResultLoc, - call: *ast.Node.builtin_call, -) InnerError!*zir.Inst { - try ensureBuiltinParamCount(mod, scope, call, 2); - - const tree = scope.tree(); - const src = token_starts[call.builtin_token]; - const params = call.params(); - - const string_type = try addZIRInstConst(mod, scope, src, .{ - .ty = Type.initTag(.type), - .val = Value.initTag(.const_slice_u8_type), - }); - const string_rl: ResultLoc = .{ .ty = string_type }; - - if (rl == .ref) { - return addZirInstTag(mod, scope, src, .field_ptr_named, .{ - .object = try expr(mod, scope, .ref, params[0]), - .field_name = try comptimeExpr(mod, scope, string_rl, params[1]), - }); - } - return rvalue(mod, scope, rl, try addZirInstTag(mod, scope, src, .field_val_named, .{ - .object = try expr(mod, scope, .none, params[0]), - .field_name = try comptimeExpr(mod, scope, string_rl, params[1]), - })); } fn arrayAccess(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.array_access) InnerError!*zir.Inst { @@ -1681,13 +1732,6 @@ fn sliceExpr(mod: *Module, scope: *Scope, node: *ast.Node.slice) InnerError!*zir ); } -fn deref(mod: *Module, scope: *Scope, node: *ast.Node.SimpleSuffixOp) InnerError!*zir.Inst { - const tree = scope.tree(); - const src = token_starts[node.rtoken]; - const lhs = try expr(mod, scope, .none, node.lhs); - return addZIRUnOp(mod, scope, src, .deref, lhs); -} - fn simpleBinOp( mod: *Module, scope: *Scope, @@ -1794,83 +1838,12 @@ fn boolBinOp( return rvalue(mod, scope, rl, &block.base); } -const CondKind = union(enum) { - bool, - optional: ?*zir.Inst, - err_union: ?*zir.Inst, - - fn cond(self: *CondKind, mod: *Module, block_scope: *Scope.GenZIR, src: usize, cond_node: *ast.Node) !*zir.Inst { - switch (self.*) { - .bool => { - const bool_type = try addZIRInstConst(mod, &block_scope.base, src, .{ - .ty = Type.initTag(.type), - .val = Value.initTag(.bool_type), - }); - return try expr(mod, &block_scope.base, .{ .ty = bool_type }, cond_node); - }, - .optional => { - const cond_ptr = try expr(mod, &block_scope.base, .ref, cond_node); - self.* = .{ .optional = cond_ptr }; - const result = try addZIRUnOp(mod, &block_scope.base, src, .deref, cond_ptr); - return try addZIRUnOp(mod, &block_scope.base, src, .is_non_null, result); - }, - .err_union => { - const err_ptr = try expr(mod, &block_scope.base, .ref, cond_node); - self.* = .{ .err_union = err_ptr }; - const result = try addZIRUnOp(mod, &block_scope.base, src, .deref, err_ptr); - return try addZIRUnOp(mod, &block_scope.base, src, .is_err, result); - }, - } - } - - fn thenSubScope(self: CondKind, mod: *Module, then_scope: *Scope.GenZIR, src: usize, payload_node: ?*ast.Node) !*Scope { - if (self == .bool) return &then_scope.base; - - const payload = payload_node.?.castTag(.PointerPayload) orelse { - // condition is error union and payload is not explicitly ignored - _ = try addZIRUnOp(mod, &then_scope.base, src, .ensure_err_payload_void, self.err_union.?); - return &then_scope.base; - }; - const is_ptr = payload.ptr_token != null; - const ident_node = payload.value_symbol.castTag(.identifier).?; - - // This intentionally does not support @"_" syntax. - const ident_name = then_scope.base.tree().tokenSlice(ident_node.token); - if (mem.eql(u8, ident_name, "_")) { - if (is_ptr) - return mod.failTok(&then_scope.base, payload.ptr_token.?, "pointer modifier invalid on discard", .{}); - return &then_scope.base; - } - - return mod.failNode(&then_scope.base, payload.value_symbol, "TODO implement payload symbols", .{}); - } - - fn elseSubScope(self: CondKind, mod: *Module, else_scope: *Scope.GenZIR, src: usize, payload_node: ?*ast.Node) !*Scope { - if (self != .err_union) return &else_scope.base; - - const payload_ptr = try addZIRUnOp(mod, &else_scope.base, src, .err_union_payload_unsafe_ptr, self.err_union.?); - - const payload = payload_node.?.castTag(.Payload).?; - const ident_node = payload.error_symbol.castTag(.identifier).?; - - // This intentionally does not support @"_" syntax. - const ident_name = else_scope.base.tree().tokenSlice(ident_node.token); - if (mem.eql(u8, ident_name, "_")) { - return &else_scope.base; - } - - return mod.failNode(&else_scope.base, payload.error_symbol, "TODO implement payload symbols", .{}); - } -}; - -fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.@"if") InnerError!*zir.Inst { - var cond_kind: CondKind = .bool; - if (if_node.payload) |_| cond_kind = .{ .optional = null }; - if (if_node.@"else") |else_node| { - if (else_node.payload) |payload| { - cond_kind = .{ .err_union = null }; - } - } +fn ifExpr( + mod: *Module, + scope: *Scope, + rl: ResultLoc, + if_full: ast.full.If, +) InnerError!*zir.Inst { var block_scope: Scope.GenZIR = .{ .parent = scope, .decl = scope.ownerDecl().?, @@ -1884,8 +1857,22 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.@"if") const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const if_src = token_starts[if_node.if_token]; - const cond = try cond_kind.cond(mod, &block_scope, if_src, if_node.condition); + const if_src = token_starts[if_full.ast.if_token]; + + const cond = c: { + // TODO https://github.com/ziglang/zig/issues/7929 + if (if_full.ast.error_token) |error_token| { + return mod.failTok(scope, error_token, "TODO implement if error union", .{}); + } else if (if_full.payload_token) |payload_token| { + return mod.failTok(scope, payload_token, "TODO implement if optional", .{}); + } else { + const bool_type = try addZIRInstConst(mod, &block_scope.base, if_src, .{ + .ty = Type.initTag(.type), + .val = Value.initTag(.bool_type), + }); + break :c try expr(mod, &block_scope.base, .{ .ty = bool_type }, if_full.ast.cond_expr); + } + }; const condbr = try addZIRInstSpecial(mod, &block_scope.base, if_src, zir.Inst.CondBr, .{ .condition = cond, @@ -1897,7 +1884,7 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.@"if") .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items), }); - const then_src = token_starts[if_node.body.lastToken()]; + const then_src = token_starts[tree.lastToken(if_full.ast.then_expr)]; var then_scope: Scope.GenZIR = .{ .parent = scope, .decl = block_scope.decl, @@ -1908,10 +1895,10 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.@"if") defer then_scope.instructions.deinit(mod.gpa); // declare payload to the then_scope - const then_sub_scope = try cond_kind.thenSubScope(mod, &then_scope, then_src, if_node.payload); + const then_sub_scope = &then_scope.base; block_scope.break_count += 1; - const then_result = try expr(mod, then_sub_scope, block_scope.break_result_loc, if_node.body); + const then_result = try expr(mod, then_sub_scope, block_scope.break_result_loc, if_full.ast.then_expr); // We hold off on the break instructions as well as copying the then/else // instructions into place until we know whether to keep store_to_block_ptr // instructions or not. @@ -1925,20 +1912,19 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.@"if") }; defer else_scope.instructions.deinit(mod.gpa); - var else_src: usize = undefined; - var else_sub_scope: *Module.Scope = undefined; - const else_result: ?*zir.Inst = if (if_node.@"else") |else_node| blk: { - else_src = token_starts[else_node.body.lastToken()]; - // declare payload to the then_scope - else_sub_scope = try cond_kind.elseSubScope(mod, &else_scope, else_src, else_node.payload); - + const else_node = if_full.ast.else_expr; + const else_info: struct { src: usize, result: ?*zir.Inst } = if (else_node != 0) blk: { block_scope.break_count += 1; - break :blk try expr(mod, else_sub_scope, block_scope.break_result_loc, else_node.body); - } else blk: { - else_src = token_starts[if_node.lastToken()]; - else_sub_scope = &else_scope.base; - break :blk null; - }; + const sub_scope = &else_scope.base; + break :blk .{ + .src = token_starts[tree.lastToken(else_node)], + .result = try expr(mod, sub_scope, block_scope.break_result_loc, else_node), + }; + } else + .{ + .src = token_starts[tree.lastToken(if_full.then_expr)], + .result = null, + }; return finishThenElseBlock( mod, @@ -1950,9 +1936,9 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.@"if") &condbr.positionals.then_body, &condbr.positionals.else_body, then_src, - else_src, + else_info.src, then_result, - else_result, + else_info.result, block, block, ); @@ -1983,23 +1969,15 @@ fn whileExpr( mod: *Module, scope: *Scope, rl: ResultLoc, - while_node: *ast.Node.@"while", + while_full: ast.full.While, ) InnerError!*zir.Inst { - var cond_kind: CondKind = .bool; - if (while_node.payload) |_| cond_kind = .{ .optional = null }; - if (while_node.@"else") |else_node| { - if (else_node.payload) |payload| { - cond_kind = .{ .err_union = null }; - } + if (while_full.label_token) |label_token| { + try checkLabelRedefinition(mod, scope, label_token); } - - if (while_node.label) |label| { - try checkLabelRedefinition(mod, scope, label); + if (while_full.inline_token) |inline_token| { + return mod.failTok(scope, inline_token, "TODO inline while", .{}); } - if (while_node.inline_token) |tok| - return mod.failTok(scope, tok, "TODO inline while", .{}); - var loop_scope: Scope.GenZIR = .{ .parent = scope, .decl = scope.ownerDecl().?, @@ -2022,12 +2000,25 @@ fn whileExpr( const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const while_src = token_starts[while_node.while_token]; + const while_src = token_starts[while_full.ast.while_token]; const void_type = try addZIRInstConst(mod, scope, while_src, .{ .ty = Type.initTag(.type), .val = Value.initTag(.void_type), }); - const cond = try cond_kind.cond(mod, &continue_scope, while_src, while_node.condition); + const cond = c: { + // TODO https://github.com/ziglang/zig/issues/7929 + if (while_full.ast.error_token) |error_token| { + return mod.failTok(scope, error_token, "TODO implement while error union", .{}); + } else if (while_full.payload_token) |payload_token| { + return mod.failTok(scope, payload_token, "TODO implement while optional", .{}); + } else { + const bool_type = try addZIRInstConst(mod, &block_scope.base, while_src, .{ + .ty = Type.initTag(.type), + .val = Value.initTag(.bool_type), + }); + break :c try expr(mod, &block_scope.base, .{ .ty = bool_type }, while_full.ast.cond_expr); + } + }; const condbr = try addZIRInstSpecial(mod, &continue_scope.base, while_src, zir.Inst.CondBr, .{ .condition = cond, @@ -2041,8 +2032,8 @@ fn whileExpr( // are no jumps to it. This happens when the last statement of a while body is noreturn // and there are no `continue` statements. // The "repeat" at the end of a loop body is implied. - if (while_node.continue_expr) |cont_expr| { - _ = try expr(mod, &loop_scope.base, .{ .ty = void_type }, cont_expr); + if (while_full.ast.cont_expr != 0) { + _ = try expr(mod, &loop_scope.base, .{ .ty = void_type }, while_full.ast.cont_expr); } const loop = try scope.arena().create(zir.Inst.Loop); loop.* = .{ @@ -2062,14 +2053,14 @@ fn whileExpr( }); loop_scope.break_block = while_block; loop_scope.continue_block = cond_block; - if (while_node.label) |some| { + if (while_full.label_token) |label_token| { loop_scope.label = @as(?Scope.GenZIR.Label, Scope.GenZIR.Label{ - .token = some, + .token = label_token, .block_inst = while_block, }); } - const then_src = token_starts[while_node.body.lastToken()]; + const then_src = token_starts[tree.lastToken(while_full.ast.then_expr)]; var then_scope: Scope.GenZIR = .{ .parent = &continue_scope.base, .decl = continue_scope.decl, @@ -2080,10 +2071,10 @@ fn whileExpr( defer then_scope.instructions.deinit(mod.gpa); // declare payload to the then_scope - const then_sub_scope = try cond_kind.thenSubScope(mod, &then_scope, then_src, while_node.payload); + const then_sub_scope = &then_scope.base; loop_scope.break_count += 1; - const then_result = try expr(mod, then_sub_scope, loop_scope.break_result_loc, while_node.body); + const then_result = try expr(mod, then_sub_scope, loop_scope.break_result_loc, while_full.ast.then_expr); var else_scope: Scope.GenZIR = .{ .parent = &continue_scope.base, @@ -2094,18 +2085,20 @@ fn whileExpr( }; defer else_scope.instructions.deinit(mod.gpa); - var else_src: usize = undefined; - const else_result: ?*zir.Inst = if (while_node.@"else") |else_node| blk: { - else_src = token_starts[else_node.body.lastToken()]; - // declare payload to the then_scope - const else_sub_scope = try cond_kind.elseSubScope(mod, &else_scope, else_src, else_node.payload); - + const else_node = if_full.ast.else_expr; + const else_info: struct { src: usize, result: ?*zir.Inst } = if (else_node != 0) blk: { loop_scope.break_count += 1; - break :blk try expr(mod, else_sub_scope, loop_scope.break_result_loc, else_node.body); - } else blk: { - else_src = token_starts[while_node.lastToken()]; - break :blk null; - }; + const sub_scope = &else_scope.base; + break :blk .{ + .src = token_starts[tree.lastToken(else_node)], + .result = try expr(mod, sub_scope, loop_scope.break_result_loc, else_node), + }; + } else + .{ + .src = token_starts[tree.lastToken(then_node)], + .result = null, + }; + if (loop_scope.label) |some| { if (!some.used) { return mod.fail(scope, token_starts[some.token], "unused while label", .{}); @@ -2121,9 +2114,9 @@ fn whileExpr( &condbr.positionals.then_body, &condbr.positionals.else_body, then_src, - else_src, + else_info.src, then_result, - else_result, + else_info.result, while_block, cond_block, ); @@ -2630,12 +2623,13 @@ fn switchCaseExpr( } } -fn ret(mod: *Module, scope: *Scope, cfe: *ast.Node.ControlFlowExpression) InnerError!*zir.Inst { +fn ret(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.Inst { const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const src = token_starts[cfe.ltoken]; - if (cfe.getRHS()) |rhs_node| { + const src = token_starts[main_tokens[node]]; + const rhs_node = node_datas[node].lhs; + if (rhs_node != 0) { if (nodeMayNeedMemoryLocation(rhs_node, scope)) { const ret_ptr = try addZIRNoOp(mod, scope, src, .ret_ptr); const operand = try expr(mod, scope, .{ .ptr = ret_ptr }, rhs_node); @@ -2885,64 +2879,29 @@ fn integerLiteral( } } -fn floatLiteral(mod: *Module, scope: *Scope, float_lit: *ast.Node.OneToken) InnerError!*zir.Inst { +fn floatLiteral( + mod: *Module, + scope: *Scope, + rl: ResultLoc, + float_lit: ast.Node.Index, +) InnerError!*zir.Inst { const arena = scope.arena(); const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const bytes = tree.tokenSlice(float_lit.token); + const main_token = main_tokens[float_lit]; + const bytes = tree.tokenSlice(main_token); if (bytes.len > 2 and bytes[1] == 'x') { - return mod.failTok(scope, float_lit.token, "TODO hex floats", .{}); + return mod.failTok(scope, main_token, "TODO implement hex floats", .{}); } - const float_number = std.fmt.parseFloat(f128, bytes) catch |e| switch (e) { error.InvalidCharacter => unreachable, // validated by tokenizer }; - const src = token_starts[float_lit.token]; - return addZIRInstConst(mod, scope, src, .{ + const src = token_starts[main_token]; + const result = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.comptime_float), .val = try Value.Tag.float_128.create(arena, float_number), }); -} - -fn undefLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!*zir.Inst { - const arena = scope.arena(); - const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); - const main_tokens = tree.nodes.items(.main_token); - const src = token_starts[node.token]; - return addZIRInstConst(mod, scope, src, .{ - .ty = Type.initTag(.@"undefined"), - .val = Value.initTag(.undef), - }); -} - -fn boolLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!*zir.Inst { - const arena = scope.arena(); - const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); - const main_tokens = tree.nodes.items(.main_token); - const src = token_starts[node.token]; - return addZIRInstConst(mod, scope, src, .{ - .ty = Type.initTag(.bool), - .val = switch (tree.token_ids[node.token]) { - .keyword_true => Value.initTag(.bool_true), - .keyword_false => Value.initTag(.bool_false), - else => unreachable, - }, - }); -} - -fn nullLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!*zir.Inst { - const arena = scope.arena(); - const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); - const main_tokens = tree.nodes.items(.main_token); - const src = token_starts[node.token]; - return addZIRInstConst(mod, scope, src, .{ - .ty = Type.initTag(.@"null"), - .val = Value.initTag(.null_value), - }); + return rvalue(mod, scope, rl, result); } fn assembly(mod: *Module, scope: *Scope, rl: ResultLoc, full: ast.full.Asm) InnerError!*zir.Inst { @@ -2987,76 +2946,36 @@ fn assembly(mod: *Module, scope: *Scope, rl: ResultLoc, full: ast.full.Asm) Inne return rvalue(mod, scope, rl, asm_inst); } -fn ensureBuiltinParamCount(mod: *Module, scope: *Scope, call: *ast.Node.builtin_call, count: u32) !void { - if (call.params_len == count) - return; - - const s = if (count == 1) "" else "s"; - return mod.failTok(scope, call.builtin_token, "expected {d} parameter{s}, found {d}", .{ count, s, call.params_len }); -} - -fn simpleCast( - mod: *Module, - scope: *Scope, - rl: ResultLoc, - call: *ast.Node.builtin_call, - inst_tag: zir.Inst.Tag, -) InnerError!*zir.Inst { - try ensureBuiltinParamCount(mod, scope, call, 2); - const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); - const main_tokens = tree.nodes.items(.main_token); - const src = token_starts[call.builtin_token]; - const params = call.params(); - const dest_type = try typeExpr(mod, scope, params[0]); - const rhs = try expr(mod, scope, .none, params[1]); - const result = try addZIRBinOp(mod, scope, src, inst_tag, dest_type, rhs); - return rvalue(mod, scope, rl, result); -} - -fn ptrToInt(mod: *Module, scope: *Scope, call: *ast.Node.builtin_call) InnerError!*zir.Inst { - try ensureBuiltinParamCount(mod, scope, call, 1); - const operand = try expr(mod, scope, .none, call.params()[0]); - const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); - const main_tokens = tree.nodes.items(.main_token); - const src = token_starts[call.builtin_token]; - return addZIRUnOp(mod, scope, src, .ptrtoint, operand); -} - fn as( mod: *Module, scope: *Scope, rl: ResultLoc, - call: *ast.Node.builtin_call, + builtin_token: ast.TokenIndex, + src: usize, + lhs: ast.Node.Index, + rhs: ast.Node.Index, ) InnerError!*zir.Inst { - try ensureBuiltinParamCount(mod, scope, call, 2); - const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); - const main_tokens = tree.nodes.items(.main_token); - const src = token_starts[call.builtin_token]; - const params = call.params(); - const dest_type = try typeExpr(mod, scope, params[0]); + const dest_type = try typeExpr(mod, scope, lhs); switch (rl) { .none, .discard, .ref, .ty => { - const result = try expr(mod, scope, .{ .ty = dest_type }, params[1]); + const result = try expr(mod, scope, .{ .ty = dest_type }, rhs); return rvalue(mod, scope, rl, result); }, .ptr => |result_ptr| { - return asRlPtr(mod, scope, rl, src, result_ptr, params[1], dest_type); + return asRlPtr(mod, scope, rl, src, result_ptr, rhs, dest_type); }, .block_ptr => |block_scope| { - return asRlPtr(mod, scope, rl, src, block_scope.rl_ptr.?, params[1], dest_type); + return asRlPtr(mod, scope, rl, src, block_scope.rl_ptr.?, rhs, dest_type); }, .bitcasted_ptr => |bitcasted_ptr| { // TODO here we should be able to resolve the inference; we now have a type for the result. - return mod.failTok(scope, call.builtin_token, "TODO implement @as with result location @bitCast", .{}); + return mod.failTok(scope, builtin_token, "TODO implement @as with result location @bitCast", .{}); }, .inferred_ptr => |result_alloc| { // TODO here we should be able to resolve the inference; we now have a type for the result. - return mod.failTok(scope, call.builtin_token, "TODO implement @as with inferred-type result location pointer", .{}); + return mod.failTok(scope, builtin_token, "TODO implement @as with inferred-type result location pointer", .{}); }, } } @@ -3105,170 +3024,290 @@ fn asRlPtr( } } -fn bitCast(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.builtin_call) InnerError!*zir.Inst { - try ensureBuiltinParamCount(mod, scope, call, 2); - const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); - const main_tokens = tree.nodes.items(.main_token); - const src = token_starts[call.builtin_token]; - const params = call.params(); - const dest_type = try typeExpr(mod, scope, params[0]); +fn bitCast( + mod: *Module, + scope: *Scope, + rl: ResultLoc, + builtin_token: ast.TokenIndex, + src: usize, + lhs: ast.Node.Index, + rhs: ast.Node.Index, +) InnerError!*zir.Inst { + const dest_type = try typeExpr(mod, scope, lhs); switch (rl) { .none => { - const operand = try expr(mod, scope, .none, params[1]); + const operand = try expr(mod, scope, .none, rhs); return addZIRBinOp(mod, scope, src, .bitcast, dest_type, operand); }, .discard => { - const operand = try expr(mod, scope, .none, params[1]); + const operand = try expr(mod, scope, .none, rhs); const result = try addZIRBinOp(mod, scope, src, .bitcast, dest_type, operand); _ = try addZIRUnOp(mod, scope, result.src, .ensure_result_non_error, result); return result; }, .ref => { - const operand = try expr(mod, scope, .ref, params[1]); + const operand = try expr(mod, scope, .ref, rhs); const result = try addZIRBinOp(mod, scope, src, .bitcast_ref, dest_type, operand); return result; }, .ty => |result_ty| { - const result = try expr(mod, scope, .none, params[1]); + const result = try expr(mod, scope, .none, rhs); const bitcasted = try addZIRBinOp(mod, scope, src, .bitcast, dest_type, result); return addZIRBinOp(mod, scope, src, .as, result_ty, bitcasted); }, .ptr => |result_ptr| { const casted_result_ptr = try addZIRUnOp(mod, scope, src, .bitcast_result_ptr, result_ptr); - return expr(mod, scope, .{ .bitcasted_ptr = casted_result_ptr.castTag(.bitcast_result_ptr).? }, params[1]); + return expr(mod, scope, .{ .bitcasted_ptr = casted_result_ptr.castTag(.bitcast_result_ptr).? }, rhs); }, .bitcasted_ptr => |bitcasted_ptr| { - return mod.failTok(scope, call.builtin_token, "TODO implement @bitCast with result location another @bitCast", .{}); + return mod.failTok(scope, builtin_token, "TODO implement @bitCast with result location another @bitCast", .{}); }, .block_ptr => |block_ptr| { - return mod.failTok(scope, call.builtin_token, "TODO implement @bitCast with result location inferred peer types", .{}); + return mod.failTok(scope, builtin_token, "TODO implement @bitCast with result location inferred peer types", .{}); }, .inferred_ptr => |result_alloc| { // TODO here we should be able to resolve the inference; we now have a type for the result. - return mod.failTok(scope, call.builtin_token, "TODO implement @bitCast with inferred-type result location pointer", .{}); + return mod.failTok(scope, builtin_token, "TODO implement @bitCast with inferred-type result location pointer", .{}); }, } } -fn import(mod: *Module, scope: *Scope, call: *ast.Node.builtin_call) InnerError!*zir.Inst { - try ensureBuiltinParamCount(mod, scope, call, 1); - const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); - const main_tokens = tree.nodes.items(.main_token); - const src = token_starts[call.builtin_token]; - const params = call.params(); - const target = try expr(mod, scope, .none, params[0]); - return addZIRUnOp(mod, scope, src, .import, target); -} - -fn compileError(mod: *Module, scope: *Scope, call: *ast.Node.builtin_call) InnerError!*zir.Inst { - try ensureBuiltinParamCount(mod, scope, call, 1); - const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); - const main_tokens = tree.nodes.items(.main_token); - const src = token_starts[call.builtin_token]; - const params = call.params(); - const target = try expr(mod, scope, .none, params[0]); - return addZIRUnOp(mod, scope, src, .compile_error, target); -} - -fn setEvalBranchQuota(mod: *Module, scope: *Scope, call: *ast.Node.builtin_call) InnerError!*zir.Inst { - try ensureBuiltinParamCount(mod, scope, call, 1); - const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); - const main_tokens = tree.nodes.items(.main_token); - const src = token_starts[call.builtin_token]; - const params = call.params(); - const u32_type = try addZIRInstConst(mod, scope, src, .{ - .ty = Type.initTag(.type), - .val = Value.initTag(.u32_type), - }); - const quota = try expr(mod, scope, .{ .ty = u32_type }, params[0]); - return addZIRUnOp(mod, scope, src, .set_eval_branch_quota, quota); -} - -fn typeOf(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.builtin_call) InnerError!*zir.Inst { - const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); - const main_tokens = tree.nodes.items(.main_token); - const arena = scope.arena(); - const src = token_starts[call.builtin_token]; - const params = call.params(); +fn typeOf( + mod: *Module, + scope: *Scope, + rl: ResultLoc, + builtin_token: ast.TokenIndex, + src: usize, + params: []const ast.Node.Index, +) InnerError!*zir.Inst { if (params.len < 1) { - return mod.failTok(scope, call.builtin_token, "expected at least 1 argument, found 0", .{}); + return mod.failTok(scope, builtin_token, "expected at least 1 argument, found 0", .{}); } if (params.len == 1) { return rvalue(mod, scope, rl, try addZIRUnOp(mod, scope, src, .typeof, try expr(mod, scope, .none, params[0]))); } + const arena = scope.arena(); var items = try arena.alloc(*zir.Inst, params.len); for (params) |param, param_i| items[param_i] = try expr(mod, scope, .none, param); return rvalue(mod, scope, rl, try addZIRInst(mod, scope, src, zir.Inst.TypeOfPeer, .{ .items = items }, .{})); } -fn compileLog(mod: *Module, scope: *Scope, call: *ast.Node.builtin_call) InnerError!*zir.Inst { - const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); - const main_tokens = tree.nodes.items(.main_token); - const arena = scope.arena(); - const src = token_starts[call.builtin_token]; - const params = call.params(); - var targets = try arena.alloc(*zir.Inst, params.len); - for (params) |param, param_i| - targets[param_i] = try expr(mod, scope, .none, param); - return addZIRInst(mod, scope, src, zir.Inst.CompileLog, .{ .to_log = targets }, .{}); -} -fn builtinCall(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.builtin_call) InnerError!*zir.Inst { +fn builtinCall( + mod: *Module, + scope: *Scope, + rl: ResultLoc, + call: ast.Node.Index, + params: []const ast.Node.Index, +) InnerError!*zir.Inst { const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const builtin_name = tree.tokenSlice(call.builtin_token); + const builtin_token = main_tokens[call]; + const builtin_name = tree.tokenSlice(builtin_token); // We handle the different builtins manually because they have different semantics depending // on the function. For example, `@as` and others participate in result location semantics, // and `@cImport` creates a special scope that collects a .c source code text buffer. // Also, some builtins have a variable number of parameters. - if (mem.eql(u8, builtin_name, "@ptrToInt")) { - return rvalue(mod, scope, rl, try ptrToInt(mod, scope, call)); - } else if (mem.eql(u8, builtin_name, "@as")) { - return as(mod, scope, rl, call); - } else if (mem.eql(u8, builtin_name, "@floatCast")) { - return simpleCast(mod, scope, rl, call, .floatcast); - } else if (mem.eql(u8, builtin_name, "@intCast")) { - return simpleCast(mod, scope, rl, call, .intcast); - } else if (mem.eql(u8, builtin_name, "@bitCast")) { - return bitCast(mod, scope, rl, call); - } else if (mem.eql(u8, builtin_name, "@TypeOf")) { - return typeOf(mod, scope, rl, call); - } else if (mem.eql(u8, builtin_name, "@breakpoint")) { - const src = token_starts[call.builtin_token]; - return rvalue(mod, scope, rl, try addZIRNoOp(mod, scope, src, .breakpoint)); - } else if (mem.eql(u8, builtin_name, "@import")) { - return rvalue(mod, scope, rl, try import(mod, scope, call)); - } else if (mem.eql(u8, builtin_name, "@compileError")) { - return compileError(mod, scope, call); - } else if (mem.eql(u8, builtin_name, "@setEvalBranchQuota")) { - return setEvalBranchQuota(mod, scope, call); - } else if (mem.eql(u8, builtin_name, "@compileLog")) { - return compileLog(mod, scope, call); - } else if (mem.eql(u8, builtin_name, "@field")) { - return namedField(mod, scope, rl, call); - } else { - return mod.failTok(scope, call.builtin_token, "invalid builtin function: '{s}'", .{builtin_name}); + const info = BuiltinFn.list.get(builtin_name) orelse { + return mod.failTok(scope, builtin_token, "invalid builtin function: '{s}'", .{ + builtin_name, + }); + }; + if (info.param_count != params.len) { + const s = if (params.len == 1) "" else "s"; + return mod.failTok(scope, builtin_token, "expected {d} parameter{s}, found {d}", .{ + expected, s, found, + }); + } + const src = token_starts[builtin_token]; + + switch (info.tag) { + .ptr_to_int => { + const operand = try expr(mod, scope, .none, params[0]); + const result = try addZIRUnOp(mod, scope, src, .ptrtoint, operand); + return rvalue(mod, scope, rl, result); + }, + .float_cast => { + const dest_type = try typeExpr(mod, scope, params[0]); + const rhs = try expr(mod, scope, .none, params[1]); + const result = try addZIRBinOp(mod, scope, src, .floatcast, dest_type, rhs); + return rvalue(mod, scope, rl, result); + }, + .int_cast => { + const dest_type = try typeExpr(mod, scope, params[0]); + const rhs = try expr(mod, scope, .none, params[1]); + const result = try addZIRBinOp(mod, scope, src, .intcast, dest_type, rhs); + return rvalue(mod, scope, rl, result); + }, + .breakpoint => { + const result = try addZIRNoOp(mod, scope, src, .breakpoint); + return rvalue(mod, scope, rl, result); + }, + .import => { + const target = try expr(mod, scope, .none, params[0]); + const result = try addZIRUnOp(mod, scope, src, .import, target); + return rvalue(mod, scope, rl, result); + }, + .compile_error => { + const target = try expr(mod, scope, .none, params[0]); + const result = addZIRUnOp(mod, scope, src, .compile_error, target); + return rvalue(mod, scope, rl, result); + }, + .set_eval_branch_quota => { + const u32_type = try addZIRInstConst(mod, scope, src, .{ + .ty = Type.initTag(.type), + .val = Value.initTag(.u32_type), + }); + const quota = try expr(mod, scope, .{ .ty = u32_type }, params[0]); + const result = try addZIRUnOp(mod, scope, src, .set_eval_branch_quota, quota); + return rvalue(mod, scope, rl, result); + }, + .compile_log => { + const arena = scope.arena(); + var targets = try arena.alloc(*zir.Inst, params.len); + for (params) |param, param_i| + targets[param_i] = try expr(mod, scope, .none, param); + const result = try addZIRInst(mod, scope, src, zir.Inst.CompileLog, .{ .to_log = targets }, .{}); + return rvalue(mod, scope, rl, result); + }, + .field => { + const string_type = try addZIRInstConst(mod, scope, src, .{ + .ty = Type.initTag(.type), + .val = Value.initTag(.const_slice_u8_type), + }); + const string_rl: ResultLoc = .{ .ty = string_type }; + + if (rl == .ref) { + return addZirInstTag(mod, scope, src, .field_ptr_named, .{ + .object = try expr(mod, scope, .ref, params[0]), + .field_name = try comptimeExpr(mod, scope, string_rl, params[1]), + }); + } + return rvalue(mod, scope, rl, try addZirInstTag(mod, scope, src, .field_val_named, .{ + .object = try expr(mod, scope, .none, params[0]), + .field_name = try comptimeExpr(mod, scope, string_rl, params[1]), + })); + }, + .as => return as(mod, scope, rl, builtin_token, src, params[0], params[1]), + .bit_cast => return bitCast(mod, scope, rl, builtin_token, src, params[0], params[1]), + .TypeOf => return typeOf(mod, scope, rl, builtin_token, src, params), + + .add_with_overflow, + .align_cast, + .align_of, + .async_call, + .atomic_load, + .atomic_rmw, + .atomic_store, + .bit_offset_of, + .bool_to_int, + .bit_size_of, + .mul_add, + .byte_swap, + .bit_reverse, + .byte_offset_of, + .call, + .c_define, + .c_import, + .c_include, + .clz, + .cmpxchg_strong, + .cmpxchg_weak, + .ctz, + .c_undef, + .div_exact, + .div_floor, + .div_trunc, + .embed_file, + .enum_to_int, + .error_name, + .error_return_trace, + .error_to_int, + .err_set_cast, + .@"export", + .fence, + .field_parent_ptr, + .float_to_int, + .frame, + .Frame, + .frame_address, + .frame_size, + .has_decl, + .has_field, + .int_to_enum, + .int_to_error, + .int_to_float, + .int_to_ptr, + .memcpy, + .memset, + .wasm_memory_size, + .wasm_memory_grow, + .mod, + .mul_with_overflow, + .panic, + .pop_count, + .ptr_cast, + .rem, + .return_address, + .set_align_stack, + .set_cold, + .set_float_mode, + .set_runtime_safety, + .shl_exact, + .shl_with_overflow, + .shr_exact, + .shuffle, + .size_of, + .splat, + .reduce, + .src, + .sqrt, + .sin, + .cos, + .exp, + .exp2, + .log, + .log2, + .log10, + .fabs, + .floor, + .ceil, + .trunc, + .round, + .sub_with_overflow, + .tag_name, + .This, + .truncate, + .Type, + .type_info, + .type_name, + .union_init, + => return mod.failTok(scope, builtin_token, "TODO: implement builtin function {s}", .{ + builtin_name, + }), } } -fn callExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.call) InnerError!*zir.Inst { +fn callExpr( + mod: *Module, + scope: *Scope, + rl: ResultLoc, + call: ast.full.Call, +) InnerError!*zir.Inst { + if (call.async_token) |async_token| { + return mod.failTok(scope, async_token, "TODO implement async fn call", .{}); + } + const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const lhs = try expr(mod, scope, .none, node.lhs); + const lhs = try expr(mod, scope, .none, call.ast.fn_expr); - const param_nodes = node.params(); - const args = try scope.getGenZIR().arena.alloc(*zir.Inst, param_nodes.len); - for (param_nodes) |param_node, i| { + const args = try scope.getGenZIR().arena.alloc(*zir.Inst, call.ast.params.len); + for (call.ast.params) |param_node, i| { const param_src = token_starts[tree.firstToken(param_node)]; const param_type = try addZIRInst(mod, scope, param_src, zir.Inst.ParamType, .{ .func = lhs, @@ -3277,7 +3316,7 @@ fn callExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.call) In args[i] = try expr(mod, scope, .{ .ty = param_type }, param_node); } - const src = token_starts[node.lhs.firstToken()]; + const src = token_starts[call.ast.lparen]; const result = try addZIRInst(mod, scope, src, zir.Inst.Call, .{ .func = lhs, .args = args, @@ -3286,14 +3325,6 @@ fn callExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.call) In return rvalue(mod, scope, rl, result); } -fn unreach(mod: *Module, scope: *Scope, unreach_node: *ast.Node.OneToken) InnerError!*zir.Inst { - const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); - const main_tokens = tree.nodes.items(.main_token); - const src = token_starts[unreach_node.token]; - return addZIRNoOp(mod, scope, src, .unreachable_safe); -} - fn getSimplePrimitiveValue(name: []const u8) ?TypedValue { const simple_types = std.ComptimeStringMap(Value.Tag, .{ .{ "u8", .u8_type }, @@ -3430,17 +3461,25 @@ fn nodeMayNeedMemoryLocation(start_node: *ast.Node, scope: *Scope) bool { .deref, .array_access, .block, + .while_simple, // This variant cannot have an else expression. + .while_cont, // This variant cannot have an else expression. + .for_simple, // This variant cannot have an else expression. + .if_simple, // This variant cannot have an else expression. => return false, - // Forward the question to a sub-expression. - .grouped_expression => node = node.castTag(.grouped_expression).?.expr, - .@"try" => node = node.castTag(.@"try").?.rhs, - .@"await" => node = node.castTag(.@"await").?.rhs, - .@"catch" => node = node.castTag(.@"catch").?.rhs, - .@"orelse" => node = node.castTag(.@"orelse").?.rhs, - .@"comptime" => node = node.castTag(.@"comptime").?.expr, - .@"nosuspend" => node = node.castTag(.@"nosuspend").?.expr, - .unwrap_optional => node = node.castTag(.unwrap_optional).?.lhs, + // Forward the question to the LHS sub-expression. + .grouped_expression, + .@"try", + .@"await", + .@"comptime", + .@"nosuspend", + .unwrap_optional, + => node = datas[node].lhs, + + // Forward the question to the RHS sub-expression. + .@"catch", + .@"orelse", + => node = datas[node].rhs, // True because these are exactly the expressions we need memory locations for. .ArrayInitializer, @@ -3451,125 +3490,43 @@ fn nodeMayNeedMemoryLocation(start_node: *ast.Node, scope: *Scope) bool { // True because depending on comptime conditions, sub-expressions // may be the kind that need memory locations. - .@"while", - .@"for", + .@"while", // This variant always has an else expression. + .@"if", // This variant always has an else expression. + .@"for", // This variant always has an else expression. .@"switch", + .call_one, + .call_one_comma, + .async_call_one, + .async_call_one_comma, .call, - .labeled_block, + .call_comma, + .async_call, + .async_call_comma, => return true, - .builtin_call => { - @setEvalBranchQuota(5000); - const builtin_needs_mem_loc = std.ComptimeStringMap(bool, .{ - .{ "@addWithOverflow", false }, - .{ "@alignCast", false }, - .{ "@alignOf", false }, - .{ "@as", true }, - .{ "@asyncCall", false }, - .{ "@atomicLoad", false }, - .{ "@atomicRmw", false }, - .{ "@atomicStore", false }, - .{ "@bitCast", true }, - .{ "@bitOffsetOf", false }, - .{ "@boolToInt", false }, - .{ "@bitSizeOf", false }, - .{ "@breakpoint", false }, - .{ "@mulAdd", false }, - .{ "@byteSwap", false }, - .{ "@bitReverse", false }, - .{ "@byteOffsetOf", false }, - .{ "@call", true }, - .{ "@cDefine", false }, - .{ "@cImport", false }, - .{ "@cInclude", false }, - .{ "@clz", false }, - .{ "@cmpxchgStrong", false }, - .{ "@cmpxchgWeak", false }, - .{ "@compileError", false }, - .{ "@compileLog", false }, - .{ "@ctz", false }, - .{ "@cUndef", false }, - .{ "@divExact", false }, - .{ "@divFloor", false }, - .{ "@divTrunc", false }, - .{ "@embedFile", false }, - .{ "@enumToInt", false }, - .{ "@errorName", false }, - .{ "@errorReturnTrace", false }, - .{ "@errorToInt", false }, - .{ "@errSetCast", false }, - .{ "@export", false }, - .{ "@fence", false }, - .{ "@field", true }, - .{ "@fieldParentPtr", false }, - .{ "@floatCast", false }, - .{ "@floatToInt", false }, - .{ "@frame", false }, - .{ "@Frame", false }, - .{ "@frameAddress", false }, - .{ "@frameSize", false }, - .{ "@hasDecl", false }, - .{ "@hasField", false }, - .{ "@import", false }, - .{ "@intCast", false }, - .{ "@intToEnum", false }, - .{ "@intToError", false }, - .{ "@intToFloat", false }, - .{ "@intToPtr", false }, - .{ "@memcpy", false }, - .{ "@memset", false }, - .{ "@wasmMemorySize", false }, - .{ "@wasmMemoryGrow", false }, - .{ "@mod", false }, - .{ "@mulWithOverflow", false }, - .{ "@panic", false }, - .{ "@popCount", false }, - .{ "@ptrCast", false }, - .{ "@ptrToInt", false }, - .{ "@rem", false }, - .{ "@returnAddress", false }, - .{ "@setAlignStack", false }, - .{ "@setCold", false }, - .{ "@setEvalBranchQuota", false }, - .{ "@setFloatMode", false }, - .{ "@setRuntimeSafety", false }, - .{ "@shlExact", false }, - .{ "@shlWithOverflow", false }, - .{ "@shrExact", false }, - .{ "@shuffle", false }, - .{ "@sizeOf", false }, - .{ "@splat", true }, - .{ "@reduce", false }, - .{ "@src", true }, - .{ "@sqrt", false }, - .{ "@sin", false }, - .{ "@cos", false }, - .{ "@exp", false }, - .{ "@exp2", false }, - .{ "@log", false }, - .{ "@log2", false }, - .{ "@log10", false }, - .{ "@fabs", false }, - .{ "@floor", false }, - .{ "@ceil", false }, - .{ "@trunc", false }, - .{ "@round", false }, - .{ "@subWithOverflow", false }, - .{ "@tagName", false }, - .{ "@This", false }, - .{ "@truncate", false }, - .{ "@Type", false }, - .{ "@typeInfo", false }, - .{ "@typeName", false }, - .{ "@TypeOf", false }, - .{ "@unionInit", true }, - }); - const name = scope.tree().tokenSlice(node.castTag(.builtin_call).?.builtin_token); - return builtin_needs_mem_loc.get(name).?; + block_two, + block_two_semicolon, + block, + block_semicolon, + => { + const lbrace = main_tokens[node]; + if (token_tags[lbrace - 1] == .colon) { + // Labeled blocks may need a memory location to forward + // to their break statements. + return true; + } else { + return false; + } }, - // Depending on AST properties, they may need memory locations. - .@"if" => return node.castTag(.@"if").?.@"else" != null, + .builtin_call => { + const builtin_token = main_tokens[node]; + const builtin_name = tree.tokenSlice(builtin_token); + // If the builtin is an invalid name, we don't cause an error here; instead + // let it pass, and the error will be "invalid builtin function" later. + const builtin_info = BuiltinFn.list.get(builtin_name) orelse return false; + return builtin_info.needs_mem_loc; + }, } } } From 7051ef32bf8e1a16cfd73f2bfa09fdbdf39ffc54 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Fri, 18 Dec 2020 01:10:31 +0200 Subject: [PATCH 063/173] translate-c: start creating intermediate AST --- src/translate_c/ast.zig | 207 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 207 insertions(+) create mode 100644 src/translate_c/ast.zig diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig new file mode 100644 index 0000000000..4418971269 --- /dev/null +++ b/src/translate_c/ast.zig @@ -0,0 +1,207 @@ +const std = @import("std"); +const Type = @import("../type.zig").Type; + +pub const Node = struct { + tag: Tag, + // type: Type = Type.initTag(.noreturn), + + pub const Tag = enum { + null_literal, + undefined_literal, + opaque_literal, + bool_literal, + int, + float, + string, + char, + identifier, + @"if", + @"while", + @"switch", + @"continue", + @"break", + @"return", + field_access, + field_access_arrow, + array_access, + call, + std_mem_zeroes, + var_decl, + func, + warning, + failed_decl, + @"enum", + @"struct", + @"union", + array_init, + container_init, + std_meta_cast, + discard, + block, + + pub fn Type(tag: Tag) ?type { + return switch (tag) { + .null_literal => null, + .undefined_literal => null, + .opaque_literal => null, + .bool_literal, + .int, + .float, + .string, + .char, + .identifier, + .field_access, + .field_access_arrow, + .warning, + .failed_decl, + => Value, + .@"if" => If, + .@"while" => While, + .@"switch" => Switch, + .@"break" => Break, + .call => Call, + .array_access, + .std_mem_zeroes, + .@"return", + .discard, + => SingleArg, + .var_decl => VarDecl, + .func => Func, + .@"enum" => Enum, + .@"struct", .@"union" => Record, + .array_init => ArrayInit, + .container_init => ContainerInit, + .std_meta_cast => Infix, + .block => Block, + }; + } + }; + + pub const Infix = struct { + base: Node, + lhs: *Node, + rhs: *Node, + }; + + pub const Value = struct { + base: Node, + val: []const u8, + }; + + pub const SingleArg = struct { + base: Node, + index: *Node, + }; + + pub const If = struct { + base: Node = .{ .tag = .@"if" }, + cond: *Node, + then: *Node, + @"else": ?*Node, + }; + + pub const While = struct { + base: Node = .{ .tag = .@"while" }, + cond: *Node, + body: *Node, + }; + + pub const Switch = struct { + base: Node = .{ .tag = .@"switch" }, + cond: *Node, + cases: []Prong, + default: ?[]const u8, + + pub const Prong = struct { + lhs: *Node, + rhs: ?*Node, + label: []const u8, + }; + }; + + pub const Break = struct { + base: Node = .{ .tag = .@"break" }, + label: ?[]const u8, + rhs: ?*Node, + }; + + pub const Call = struct { + base: Node = .{.call}, + lhs: *Node, + args: []*Node, + }; + + pub const VarDecl = struct { + base: Node = .{ .tag = .var_decl }, + @"pub": bool, + @"const": bool, + @"extern": bool, + @"export": bool, + name: []const u8, + type: Type, + init: *Node, + }; + + pub const Func = struct { + base: Node = .{.func}, + @"pub": bool, + @"extern": bool, + @"export": bool, + name: []const u8, + cc: std.builtin.CallingConvention, + params: []Param, + return_type: Type, + body: ?*Node, + + pub const Param = struct { + @"noalias": bool, + name: ?[]const u8, + type: Type, + }; + }; + + pub const Enum = struct { + base: Node = .{ .tag = .@"enum" }, + name: ?[]const u8, + fields: []Field, + + pub const Field = struct { + name: []const u8, + value: ?[]const u8, + }; + }; + + pub const Record = struct { + base: Node, + name: ?[]const u8, + @"packed": bool, + fields: []Field, + + pub const Field = struct { + name: []const u8, + type: Type, + alignment: c_uint, + }; + }; + + pub const ArrayInit = struct { + base: Node = .{ .tag = .array_init }, + values: []*Node, + }; + + pub const ContainerInit = struct { + base: Node = .{ .tag = .container_init }, + values: []Initializer, + + pub const Initializer = struct { + name: []const u8, + value: *Node, + }; + }; + + pub const Block = struct { + base: Node = .{ .tag = .block }, + label: ?[]const u8, + stmts: []*Node, + }; +}; From 6ecec4c8b761c9f8f272602ccb2abdfd9656c71c Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Sun, 31 Jan 2021 12:19:09 +0200 Subject: [PATCH 064/173] translate-c: translate C types to stage2 types --- src/translate_c.zig | 182 ++++++++++++++++++++++++++++++++++++++++++++ src/type.zig | 56 ++++++++++++++ 2 files changed, 238 insertions(+) diff --git a/src/translate_c.zig b/src/translate_c.zig index a7d79e655a..e6629592ed 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -10,6 +10,7 @@ const ctok = std.c.tokenizer; const CToken = std.c.Token; const mem = std.mem; const math = std.math; +const Type = @import("type.zig").Type; const CallingConvention = std.builtin.CallingConvention; @@ -5178,6 +5179,176 @@ fn transType(rp: RestorePoint, ty: *const clang.Type, source_loc: clang.SourceLo } } +fn transType1(c: *Context, ty: *const clang.Type, source_loc: clang.SourceLocation) TypeError!Type { + switch (ty.getTypeClass()) { + .Builtin => { + const builtin_ty = @ptrCast(*const clang.BuiltinType, ty); + return Type.initTag(switch (builtin_ty.getKind()) { + .Void => .c_void, + .Bool => .bool, + .Char_U, .UChar, .Char_S, .Char8 => .u8, + .SChar => .i8, + .UShort => .c_ushort, + .UInt => .c_uint, + .ULong => .c_ulong, + .ULongLong => .c_ulonglong, + .Short => .c_short, + .Int => .c_int, + .Long => .c_long, + .LongLong => .c_longlong, + .UInt128 => .u128, + .Int128 => .i128, + .Float => .f32, + .Double => .f64, + .Float128 => .f128, + .Float16 => .f16, + .LongDouble => .c_longdouble, + else => return fail(c, error.UnsupportedType, source_loc, "unsupported builtin type", .{}), + }); + }, + .FunctionProto => { + const fn_proto_ty = @ptrCast(*const clang.FunctionProtoType, ty); + return transFnProto(c, null, fn_proto_ty, source_loc, null, false); + }, + .FunctionNoProto => { + const fn_no_proto_ty = @ptrCast(*const clang.FunctionType, ty); + return transFnNoProto(c, fn_no_proto_ty, source_loc, null, false); + }, + .Paren => { + const paren_ty = @ptrCast(*const clang.ParenType, ty); + return transQualType(c, paren_ty.getInnerType(), source_loc); + }, + .Pointer => { + const child_qt = ty.getPointeeType(); + if (qualTypeChildIsFnProto(child_qt)) { + return Type.optional_single_mut_pointer.create(c.arena, try transQualType(c, child_qt, source_loc)); + } + const is_const = child_qt.isConstQualified(); + const is_volatile = child_qt.isVolatileQualified(); + const elem_type = try transQualType(c, child_qt, source_loc); + if (elem_type.zigTypeTag() == .Opaque) { + if (!is_volatile) { + if (is_const) { + return Type.optional_single_const_pointer.create(c.arena, elem_type); + } else { + return Type.optional_single_mut_pointer.create(c.arena, elem_type); + } + } + + return Type.pointer.create(c.arena, .{ + .pointee_type = elem_type, + .sentinel = null, + .@"align" = 0, + .bit_offset = 0, + .host_size = 0, + .@"allowzero" = false, + .mutable = !is_const, + .@"volatile" = true, + .size = .Single, + }); + } + + if (!is_volatile) { + if (is_const) { + return Type.c_const_pointer.create(c.arena, elem_type); + } else { + return Type.c_mut_pointer.create(c.arena, elem_type); + } + } + + return Type.pointer.create(c.arena, .{ + .pointee_type = elem_type, + .sentinel = null, + .@"align" = 0, + .bit_offset = 0, + .host_size = 0, + .@"allowzero" = false, + .mutable = !is_const, + .@"volatile" = true, + .size = .C, + }); + }, + .ConstantArray => { + const const_arr_ty = @ptrCast(*const clang.ConstantArrayType, ty); + + const size_ap_int = const_arr_ty.getSize(); + const size = size_ap_int.getLimitedValue(math.maxInt(usize)); + const elem_type = try transType1(c, const_arr_ty.getElementType().getTypePtr(), source_loc); + + return Type.array.create(c.arena, .{ .len = size, .elem_type = elem_type }); + }, + .IncompleteArray => { + const incomplete_array_ty = @ptrCast(*const clang.IncompleteArrayType, ty); + + const child_qt = incomplete_array_ty.getElementType(); + const is_const = child_qt.isConstQualified(); + const is_volatile = child_qt.isVolatileQualified(); + const elem_type = try transQualType(c, child_qt, source_loc); + + if (!is_volatile) { + if (is_const) { + return Type.c_const_pointer.create(c.arena, elem_type); + } else { + return Type.c_mut_pointer.create(c.arena, elem_type); + } + } + + return Type.pointer.create(c.arena, .{ + .pointee_type = elem_type, + .sentinel = null, + .@"align" = 0, + .bit_offset = 0, + .host_size = 0, + .@"allowzero" = false, + .mutable = !is_const, + .@"volatile" = true, + .size = .C, + }); + }, + .Typedef => { + const typedef_ty = @ptrCast(*const clang.TypedefType, ty); + + const typedef_decl = typedef_ty.getDecl(); + return (try transTypeDef(c, typedef_decl, false)) orelse + fail(c, error.UnsupportedType, source_loc, "unable to translate typedef declaration", .{}); + }, + .Record => { + const record_ty = @ptrCast(*const clang.RecordType, ty); + + const record_decl = record_ty.getDecl(); + return (try transRecordDecl(c, record_decl)) orelse + fail(c, error.UnsupportedType, source_loc, "unable to resolve record declaration", .{}); + }, + .Enum => { + const enum_ty = @ptrCast(*const clang.EnumType, ty); + + const enum_decl = enum_ty.getDecl(); + return (try transEnumDecl(c, enum_decl)) orelse + fail(c, error.UnsupportedType, source_loc, "unable to translate enum declaration", .{}); + }, + .Elaborated => { + const elaborated_ty = @ptrCast(*const clang.ElaboratedType, ty); + return transQualType(c, elaborated_ty.getNamedType(), source_loc); + }, + .Decayed => { + const decayed_ty = @ptrCast(*const clang.DecayedType, ty); + return transQualType(c, decayed_ty.getDecayedType(), source_loc); + }, + .Attributed => { + const attributed_ty = @ptrCast(*const clang.AttributedType, ty); + return transQualType(c, attributed_ty.getEquivalentType(), source_loc); + }, + .MacroQualified => { + const macroqualified_ty = @ptrCast(*const clang.MacroQualifiedType, ty); + return transQualType(c, macroqualified_ty.getModifiedType(), source_loc); + }, + else => { + const type_name = c.str(ty.getTypeClassName()); + return fail(c, error.UnsupportedType, source_loc, "unsupported type: '{}'", .{type_name}); + }, + } +} + fn qualTypeWasDemotedToOpaque(c: *Context, qt: clang.QualType) bool { const ty = qt.getTypePtr(); switch (qt.getTypeClass()) { @@ -5474,6 +5645,17 @@ fn emitWarning(c: *Context, loc: clang.SourceLocation, comptime format: []const _ = try appendTokenFmt(c, .LineComment, "// {s}: warning: " ++ format, args_prefix ++ args); } +fn fail( + rp: RestorePoint, + err: anytype, + source_loc: clang.SourceLocation, + comptime format: []const u8, + args: anytype, +) (@TypeOf(err) || error{OutOfMemory}) { + try emitWarning(c, source_loc, format, args); + return err; +} + pub fn failDecl(c: *Context, loc: clang.SourceLocation, name: []const u8, comptime format: []const u8, args: anytype) !void { // pub const name = @compileError(msg); const pub_tok = try appendToken(c, .Keyword_pub, "pub"); diff --git a/src/type.zig b/src/type.zig index e1006e554c..7d3308e403 100644 --- a/src/type.zig +++ b/src/type.zig @@ -28,6 +28,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -357,6 +359,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -506,6 +510,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -772,6 +778,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -868,6 +876,7 @@ pub const Type = extern union { .i16, .u16 => return 2, .i32, .u32 => return 4, .i64, .u64 => return 8, + .u128, .i128 => return 16, .isize, .usize, @@ -1010,6 +1019,7 @@ pub const Type = extern union { .i16, .u16 => return 2, .i32, .u32 => return 4, .i64, .u64 => return 8, + .u128, .i128 => return 16, .@"anyframe", .anyframe_T, .isize, .usize => return @divExact(target.cpu.arch.ptrBitWidth(), 8), @@ -1109,6 +1119,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -1191,6 +1203,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -1278,6 +1292,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -1359,6 +1375,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -1440,6 +1458,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -1522,6 +1542,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -1776,6 +1798,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -1856,6 +1880,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -2009,6 +2035,8 @@ pub const Type = extern union { .i16, .i32, .i64, + .u128, + .i128, => true, }; } @@ -2061,6 +2089,8 @@ pub const Type = extern union { .i16, .i32, .i64, + .u128, + .i128, .optional, .optional_single_mut_pointer, .optional_single_const_pointer, @@ -2227,6 +2257,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .optional, .optional_single_mut_pointer, .optional_single_const_pointer, @@ -2333,6 +2365,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -2417,6 +2451,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -2500,6 +2536,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -2583,6 +2621,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -2663,6 +2703,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -2743,6 +2785,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -2793,6 +2837,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -2874,6 +2920,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -2971,6 +3019,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -3060,6 +3110,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, @@ -3193,6 +3245,8 @@ pub const Type = extern union { i32, u64, i64, + u128, + i128, usize, isize, c_short, @@ -3277,6 +3331,8 @@ pub const Type = extern union { .i32, .u64, .i64, + .u128, + .i128, .usize, .isize, .c_short, From d835f5cce5fc3b296f55e208905d9ff4d368e497 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Sun, 31 Jan 2021 12:55:33 +0200 Subject: [PATCH 065/173] translate-c: make Node more like Type --- src/translate_c/ast.zig | 222 +++++++++++++++++++++++++--------------- src/type.zig | 5 + 2 files changed, 146 insertions(+), 81 deletions(-) diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 4418971269..0585c26d18 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -2,14 +2,19 @@ const std = @import("std"); const Type = @import("../type.zig").Type; pub const Node = struct { - tag: Tag, - // type: Type = Type.initTag(.noreturn), + /// If the tag value is less than Tag.no_payload_count, then no pointer + /// dereference is needed. + tag_if_small_enough: usize, + ptr_otherwise: *Payload, pub const Tag = enum { null_literal, undefined_literal, opaque_literal, - bool_literal, + true_literal, + false_literal, + // After this, the tag requires a payload. + int, float, string, @@ -39,12 +44,18 @@ pub const Node = struct { discard, block, + pub const last_no_payload_tag = Tag.false_literal; + pub const no_payload_count = @enumToInt(last_no_payload_tag) + 1; + pub fn Type(tag: Tag) ?type { return switch (tag) { - .null_literal => null, - .undefined_literal => null, - .opaque_literal => null, - .bool_literal, + .null_literal, + .undefined_literal, + .opaque_literal, + .true_literal, + .false_litral, + => @compileError("Type Tag " ++ @tagName(t) ++ " has no payload"), + .int, .float, .string, @@ -54,144 +65,186 @@ pub const Node = struct { .field_access_arrow, .warning, .failed_decl, - => Value, - .@"if" => If, - .@"while" => While, - .@"switch" => Switch, - .@"break" => Break, - .call => Call, + => Payload.Value, + .@"if" => Payload.If, + .@"while" => Payload.While, + .@"switch" => Payload.Switch, + .@"break" => Payload.Break, + .call => Payload.Call, .array_access, .std_mem_zeroes, .@"return", .discard, - => SingleArg, - .var_decl => VarDecl, - .func => Func, - .@"enum" => Enum, - .@"struct", .@"union" => Record, - .array_init => ArrayInit, - .container_init => ContainerInit, - .std_meta_cast => Infix, - .block => Block, + => Payload.SingleArg, + .var_decl => Payload.VarDecl, + .func => Payload.Func, + .@"enum" => Payload.Enum, + .@"struct", .@"union" => Payload.Record, + .array_init => Payload.ArrayInit, + .container_init => Payload.ContainerInit, + .std_meta_cast => Payload.Infix, + .block => Payload.Block, }; } + + pub fn init(comptime t: Tag) Node { + comptime std.debug.assert(@enumToInt(t) < Tag.no_payload_count); + return .{ .tag_if_small_enough = @enumToInt(t) }; + } + + pub fn create(comptime t: Tag, ally: *Allocator, data: Data(t)) error{OutOfMemory}!Node { + const ptr = try ally.create(t.Type()); + ptr.* = .{ + .base = .{ .tag = t }, + .data = data, + }; + return Node{ .ptr_otherwise = &ptr.base }; + } + + pub fn Data(comptime t: Tag) type { + return std.meta.fieldInfo(t.Type(), .data).field_type; + } }; +}; + +pub const Payload = struct { + tag: Tag, pub const Infix = struct { base: Node, - lhs: *Node, - rhs: *Node, + data: struct { + lhs: *Node, + rhs: *Node, + }, }; pub const Value = struct { base: Node, - val: []const u8, + data: []const u8, }; pub const SingleArg = struct { base: Node, - index: *Node, + data: *Node, }; pub const If = struct { base: Node = .{ .tag = .@"if" }, - cond: *Node, - then: *Node, - @"else": ?*Node, + data: struct { + cond: *Node, + then: *Node, + @"else": ?*Node, + }, }; pub const While = struct { base: Node = .{ .tag = .@"while" }, - cond: *Node, - body: *Node, + data: struct { + cond: *Node, + body: *Node, + }, }; pub const Switch = struct { base: Node = .{ .tag = .@"switch" }, - cond: *Node, - cases: []Prong, - default: ?[]const u8, + data: struct { + cond: *Node, + cases: []Prong, + default: ?[]const u8, - pub const Prong = struct { - lhs: *Node, - rhs: ?*Node, - label: []const u8, - }; + pub const Prong = struct { + lhs: *Node, + rhs: ?*Node, + label: []const u8, + }; + }, }; pub const Break = struct { base: Node = .{ .tag = .@"break" }, - label: ?[]const u8, - rhs: ?*Node, + data: struct { + label: ?[]const u8, + rhs: ?*Node, + }, }; pub const Call = struct { base: Node = .{.call}, - lhs: *Node, - args: []*Node, + data: struct { + lhs: *Node, + args: []*Node, + }, }; pub const VarDecl = struct { base: Node = .{ .tag = .var_decl }, - @"pub": bool, - @"const": bool, - @"extern": bool, - @"export": bool, - name: []const u8, - type: Type, - init: *Node, + data: struct { + @"pub": bool, + @"const": bool, + @"extern": bool, + @"export": bool, + name: []const u8, + type: Type, + init: *Node, + }, }; pub const Func = struct { base: Node = .{.func}, - @"pub": bool, - @"extern": bool, - @"export": bool, - name: []const u8, - cc: std.builtin.CallingConvention, - params: []Param, - return_type: Type, - body: ?*Node, + data: struct { + @"pub": bool, + @"extern": bool, + @"export": bool, + name: []const u8, + cc: std.builtin.CallingConvention, + params: []Param, + return_type: Type, + body: ?*Node, - pub const Param = struct { - @"noalias": bool, - name: ?[]const u8, - type: Type, - }; + pub const Param = struct { + @"noalias": bool, + name: ?[]const u8, + type: Type, + }; + }, }; pub const Enum = struct { base: Node = .{ .tag = .@"enum" }, - name: ?[]const u8, - fields: []Field, + data: struct { + name: ?[]const u8, + fields: []Field, - pub const Field = struct { - name: []const u8, - value: ?[]const u8, - }; + pub const Field = struct { + name: []const u8, + value: ?[]const u8, + }; + }, }; pub const Record = struct { base: Node, - name: ?[]const u8, - @"packed": bool, - fields: []Field, + data: struct { + name: ?[]const u8, + @"packed": bool, + fields: []Field, - pub const Field = struct { - name: []const u8, - type: Type, - alignment: c_uint, - }; + pub const Field = struct { + name: []const u8, + type: Type, + alignment: c_uint, + }; + }, }; pub const ArrayInit = struct { base: Node = .{ .tag = .array_init }, - values: []*Node, + data: []*Node, }; pub const ContainerInit = struct { base: Node = .{ .tag = .container_init }, - values: []Initializer, + data: []Initializer, pub const Initializer = struct { name: []const u8, @@ -201,7 +254,14 @@ pub const Node = struct { pub const Block = struct { base: Node = .{ .tag = .block }, - label: ?[]const u8, - stmts: []*Node, + data: struct { + label: ?[]const u8, + stmts: []*Node, + }, }; }; + +/// Converts the nodes into a Zig ast and then renders it. +pub fn render(allocator: *Allocator, nodes: []const Node) !void { + @panic("TODO"); +} diff --git a/src/type.zig b/src/type.zig index 7d3308e403..8fcaba6fad 100644 --- a/src/type.zig +++ b/src/type.zig @@ -3408,6 +3408,11 @@ pub const Type = extern union { }; } + pub fn init(comptime t: Tag) Type { + comptime std.debug.assert(@enumToInt(t) < Tag.no_payload_count); + return .{ .tag_if_small_enough = @enumToInt(t) }; + } + pub fn create(comptime t: Tag, ally: *Allocator, data: Data(t)) error{OutOfMemory}!Type { const ptr = try ally.create(t.Type()); ptr.* = .{ From 4c0c9b07555bb69d05142dfe038a7cad79068ba9 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Sat, 6 Feb 2021 18:45:45 +0200 Subject: [PATCH 066/173] translate-c: convert transBinaryOperator --- src/translate_c.zig | 653 ++++++++-------------------------------- src/translate_c/ast.zig | 151 +++++++++- 2 files changed, 269 insertions(+), 535 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index e6629592ed..f885ea98b9 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -3,14 +3,14 @@ const std = @import("std"); const assert = std.debug.assert; -const ast = std.zig.ast; -const Token = std.zig.Token; const clang = @import("clang.zig"); const ctok = std.c.tokenizer; const CToken = std.c.Token; const mem = std.mem; const math = std.math; const Type = @import("type.zig").Type; +const ast = @import("translate_c/ast.zig"); +const Node = ast.Node; const CallingConvention = std.builtin.CallingConvention; @@ -348,47 +348,6 @@ pub const Context = struct { } }; -fn addCBuiltinsNamespace(c: *Context) Error!void { - // pub usingnamespace @import("std").c.builtins; - const pub_tok = try appendToken(c, .Keyword_pub, "pub"); - const use_tok = try appendToken(c, .Keyword_usingnamespace, "usingnamespace"); - const import_tok = try appendToken(c, .Builtin, "@import"); - const lparen_tok = try appendToken(c, .LParen, "("); - const std_tok = try appendToken(c, .StringLiteral, "\"std\""); - const rparen_tok = try appendToken(c, .RParen, ")"); - - const std_node = try c.arena.create(ast.Node.OneToken); - std_node.* = .{ - .base = .{ .tag = .StringLiteral }, - .token = std_tok, - }; - - const call_node = try ast.Node.BuiltinCall.alloc(c.arena, 1); - call_node.* = .{ - .builtin_token = import_tok, - .params_len = 1, - .rparen_token = rparen_tok, - }; - call_node.params()[0] = &std_node.base; - - var access_chain = &call_node.base; - access_chain = try transCreateNodeFieldAccess(c, access_chain, "c"); - access_chain = try transCreateNodeFieldAccess(c, access_chain, "builtins"); - - const semi_tok = try appendToken(c, .Semicolon, ";"); - - const bytes = try c.gpa.alignedAlloc(u8, @alignOf(ast.Node.Use), @sizeOf(ast.Node.Use)); - const using_node = @ptrCast(*ast.Node.Use, bytes.ptr); - using_node.* = .{ - .doc_comments = null, - .visib_token = pub_tok, - .use_token = use_tok, - .expr = access_chain, - .semicolon_token = semi_tok, - }; - try c.root_decls.append(c.gpa, &using_node.base); -} - pub fn translate( gpa: *mem.Allocator, args_begin: [*]?[*]const u8, @@ -446,7 +405,7 @@ pub fn translate( context.opaque_demotes.deinit(gpa); } - try addCBuiltinsNamespace(&context); + _ = try Node.usingnamespace_builtins.init(); try prepopulateGlobalNameTable(ast_unit, &context); @@ -1318,26 +1277,6 @@ fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?*ast.Node return transCreateNodeIdentifier(c, name); } -fn createAlias(c: *Context, alias: anytype) !void { - const visib_tok = try appendToken(c, .Keyword_pub, "pub"); - const mut_tok = try appendToken(c, .Keyword_const, "const"); - const name_tok = try appendIdentifier(c, alias.alias); - const eq_token = try appendToken(c, .Equal, "="); - const init_node = try transCreateNodeIdentifier(c, alias.name); - const semicolon_token = try appendToken(c, .Semicolon, ";"); - - const node = try ast.Node.VarDecl.create(c.arena, .{ - .name_token = name_tok, - .mut_token = mut_tok, - .semicolon_token = semicolon_token, - }, .{ - .visib_token = visib_tok, - .eq_token = eq_token, - .init_node = init_node, - }); - return addTopLevelDecl(c, alias.alias, &node.base); -} - const ResultUsed = enum { used, unused, @@ -1349,78 +1288,63 @@ const LRValue = enum { }; fn transStmt( - rp: RestorePoint, + c: *Context, scope: *Scope, stmt: *const clang.Stmt, result_used: ResultUsed, lrvalue: LRValue, -) TransError!*ast.Node { +) TransError!Node { const sc = stmt.getStmtClass(); switch (sc) { - .BinaryOperatorClass => return transBinaryOperator(rp, scope, @ptrCast(*const clang.BinaryOperator, stmt), result_used), - .CompoundStmtClass => return transCompoundStmt(rp, scope, @ptrCast(*const clang.CompoundStmt, stmt)), - .CStyleCastExprClass => return transCStyleCastExprClass(rp, scope, @ptrCast(*const clang.CStyleCastExpr, stmt), result_used, lrvalue), - .DeclStmtClass => return transDeclStmt(rp, scope, @ptrCast(*const clang.DeclStmt, stmt)), - .DeclRefExprClass => return transDeclRefExpr(rp, scope, @ptrCast(*const clang.DeclRefExpr, stmt), lrvalue), - .ImplicitCastExprClass => return transImplicitCastExpr(rp, scope, @ptrCast(*const clang.ImplicitCastExpr, stmt), result_used), - .IntegerLiteralClass => return transIntegerLiteral(rp, scope, @ptrCast(*const clang.IntegerLiteral, stmt), result_used, .with_as), - .ReturnStmtClass => return transReturnStmt(rp, scope, @ptrCast(*const clang.ReturnStmt, stmt)), - .StringLiteralClass => return transStringLiteral(rp, scope, @ptrCast(*const clang.StringLiteral, stmt), result_used), + .BinaryOperatorClass => return transBinaryOperator(c, scope, @ptrCast(*const clang.BinaryOperator, stmt), result_used), + .CompoundStmtClass => return transCompoundStmt(c, scope, @ptrCast(*const clang.CompoundStmt, stmt)), + .CStyleCastExprClass => return transCStyleCastExprClass(c, scope, @ptrCast(*const clang.CStyleCastExpr, stmt), result_used, lrvalue), + .DeclStmtClass => return transDeclStmt(c, scope, @ptrCast(*const clang.DeclStmt, stmt)), + .DeclRefExprClass => return transDeclRefExpr(c, scope, @ptrCast(*const clang.DeclRefExpr, stmt), lrvalue), + .ImplicitCastExprClass => return transImplicitCastExpr(c, scope, @ptrCast(*const clang.ImplicitCastExpr, stmt), result_used), + .IntegerLiteralClass => return transIntegerLiteral(c, scope, @ptrCast(*const clang.IntegerLiteral, stmt), result_used, .with_as), + .ReturnStmtClass => return transReturnStmt(c, scope, @ptrCast(*const clang.ReturnStmt, stmt)), + .StringLiteralClass => return transStringLiteral(c, scope, @ptrCast(*const clang.StringLiteral, stmt), result_used), .ParenExprClass => { - const expr = try transExpr(rp, scope, @ptrCast(*const clang.ParenExpr, stmt).getSubExpr(), .used, lrvalue); - if (expr.tag == .GroupedExpression) return maybeSuppressResult(rp, scope, result_used, expr); - const node = try rp.c.arena.create(ast.Node.GroupedExpression); - node.* = .{ - .lparen = try appendToken(rp.c, .LParen, "("), - .expr = expr, - .rparen = try appendToken(rp.c, .RParen, ")"), - }; - return maybeSuppressResult(rp, scope, result_used, &node.base); + const expr = try transExpr(c, scope, @ptrCast(*const clang.ParenExpr, stmt).getSubExpr(), .used, lrvalue); + return maybeSuppressResult(c, scope, result_used, expr); }, - .InitListExprClass => return transInitListExpr(rp, scope, @ptrCast(*const clang.InitListExpr, stmt), result_used), - .ImplicitValueInitExprClass => return transImplicitValueInitExpr(rp, scope, @ptrCast(*const clang.Expr, stmt), result_used), - .IfStmtClass => return transIfStmt(rp, scope, @ptrCast(*const clang.IfStmt, stmt)), - .WhileStmtClass => return transWhileLoop(rp, scope, @ptrCast(*const clang.WhileStmt, stmt)), - .DoStmtClass => return transDoWhileLoop(rp, scope, @ptrCast(*const clang.DoStmt, stmt)), + .InitListExprClass => return transInitListExpr(c, scope, @ptrCast(*const clang.InitListExpr, stmt), result_used), + .ImplicitValueInitExprClass => return transImplicitValueInitExpr(c, scope, @ptrCast(*const clang.Expr, stmt), result_used), + .IfStmtClass => return transIfStmt(c, scope, @ptrCast(*const clang.IfStmt, stmt)), + .WhileStmtClass => return transWhileLoop(c, scope, @ptrCast(*const clang.WhileStmt, stmt)), + .DoStmtClass => return transDoWhileLoop(c, scope, @ptrCast(*const clang.DoStmt, stmt)), .NullStmtClass => { - const block = try rp.c.createBlock(0); - block.rbrace = try appendToken(rp.c, .RBrace, "}"); - return &block.base; + return Node.empty_block.init(); }, - .ContinueStmtClass => return try transCreateNodeContinue(rp.c), - .BreakStmtClass => return transBreak(rp, scope), - .ForStmtClass => return transForLoop(rp, scope, @ptrCast(*const clang.ForStmt, stmt)), - .FloatingLiteralClass => return transFloatingLiteral(rp, scope, @ptrCast(*const clang.FloatingLiteral, stmt), result_used), + .ContinueStmtClass => return try transCreateNodeContinue(c), + .BreakStmtClass => return transBreak(c, scope), + .ForStmtClass => return transForLoop(c, scope, @ptrCast(*const clang.ForStmt, stmt)), + .FloatingLiteralClass => return transFloatingLiteral(c, scope, @ptrCast(*const clang.FloatingLiteral, stmt), result_used), .ConditionalOperatorClass => { - return transConditionalOperator(rp, scope, @ptrCast(*const clang.ConditionalOperator, stmt), result_used); + return transConditionalOperator(c, scope, @ptrCast(*const clang.ConditionalOperator, stmt), result_used); }, .BinaryConditionalOperatorClass => { - return transBinaryConditionalOperator(rp, scope, @ptrCast(*const clang.BinaryConditionalOperator, stmt), result_used); + return transBinaryConditionalOperator(c, scope, @ptrCast(*const clang.BinaryConditionalOperator, stmt), result_used); }, - .SwitchStmtClass => return transSwitch(rp, scope, @ptrCast(*const clang.SwitchStmt, stmt)), - .CaseStmtClass => return transCase(rp, scope, @ptrCast(*const clang.CaseStmt, stmt)), - .DefaultStmtClass => return transDefault(rp, scope, @ptrCast(*const clang.DefaultStmt, stmt)), - .ConstantExprClass => return transConstantExpr(rp, scope, @ptrCast(*const clang.Expr, stmt), result_used), - .PredefinedExprClass => return transPredefinedExpr(rp, scope, @ptrCast(*const clang.PredefinedExpr, stmt), result_used), - .CharacterLiteralClass => return transCharLiteral(rp, scope, @ptrCast(*const clang.CharacterLiteral, stmt), result_used, .with_as), - .StmtExprClass => return transStmtExpr(rp, scope, @ptrCast(*const clang.StmtExpr, stmt), result_used), - .MemberExprClass => return transMemberExpr(rp, scope, @ptrCast(*const clang.MemberExpr, stmt), result_used), - .ArraySubscriptExprClass => return transArrayAccess(rp, scope, @ptrCast(*const clang.ArraySubscriptExpr, stmt), result_used), - .CallExprClass => return transCallExpr(rp, scope, @ptrCast(*const clang.CallExpr, stmt), result_used), - .UnaryExprOrTypeTraitExprClass => return transUnaryExprOrTypeTraitExpr(rp, scope, @ptrCast(*const clang.UnaryExprOrTypeTraitExpr, stmt), result_used), - .UnaryOperatorClass => return transUnaryOperator(rp, scope, @ptrCast(*const clang.UnaryOperator, stmt), result_used), - .CompoundAssignOperatorClass => return transCompoundAssignOperator(rp, scope, @ptrCast(*const clang.CompoundAssignOperator, stmt), result_used), + .SwitchStmtClass => return transSwitch(c, scope, @ptrCast(*const clang.SwitchStmt, stmt)), + .CaseStmtClass => return transCase(c, scope, @ptrCast(*const clang.CaseStmt, stmt)), + .DefaultStmtClass => return transDefault(c, scope, @ptrCast(*const clang.DefaultStmt, stmt)), + .ConstantExprClass => return transConstantExpr(c, scope, @ptrCast(*const clang.Expr, stmt), result_used), + .PredefinedExprClass => return transPredefinedExpr(c, scope, @ptrCast(*const clang.PredefinedExpr, stmt), result_used), + .CharacterLiteralClass => return transCharLiteral(c, scope, @ptrCast(*const clang.CharacterLiteral, stmt), result_used, .with_as), + .StmtExprClass => return transStmtExpr(c, scope, @ptrCast(*const clang.StmtExpr, stmt), result_used), + .MemberExprClass => return transMemberExpr(c, scope, @ptrCast(*const clang.MemberExpr, stmt), result_used), + .ArraySubscriptExprClass => return transArrayAccess(c, scope, @ptrCast(*const clang.ArraySubscriptExpr, stmt), result_used), + .CallExprClass => return transCallExpr(c, scope, @ptrCast(*const clang.CallExpr, stmt), result_used), + .UnaryExprOrTypeTraitExprClass => return transUnaryExprOrTypeTraitExpr(c, scope, @ptrCast(*const clang.UnaryExprOrTypeTraitExpr, stmt), result_used), + .UnaryOperatorClass => return transUnaryOperator(c, scope, @ptrCast(*const clang.UnaryOperator, stmt), result_used), + .CompoundAssignOperatorClass => return transCompoundAssignOperator(c, scope, @ptrCast(*const clang.CompoundAssignOperator, stmt), result_used), .OpaqueValueExprClass => { const source_expr = @ptrCast(*const clang.OpaqueValueExpr, stmt).getSourceExpr().?; - const expr = try transExpr(rp, scope, source_expr, .used, lrvalue); - if (expr.tag == .GroupedExpression) return maybeSuppressResult(rp, scope, result_used, expr); - const node = try rp.c.arena.create(ast.Node.GroupedExpression); - node.* = .{ - .lparen = try appendToken(rp.c, .LParen, "("), - .expr = expr, - .rparen = try appendToken(rp.c, .RParen, ")"), - }; - return maybeSuppressResult(rp, scope, result_used, &node.base); + const expr = try transExpr(c, scope, source_expr, .used, lrvalue); + return maybeSuppressResult(c, scope, result_used, expr); + const node = try c.arena.create(Node.GroupedExpression); }, else => { return revertAndWarn( @@ -1435,175 +1359,139 @@ fn transStmt( } fn transBinaryOperator( - rp: RestorePoint, + c: *Context, scope: *Scope, stmt: *const clang.BinaryOperator, result_used: ResultUsed, -) TransError!*ast.Node { +) TransError!Node { const op = stmt.getOpcode(); const qt = stmt.getType(); - var op_token: ast.TokenIndex = undefined; - var op_id: ast.Node.Tag = undefined; switch (op) { - .Assign => return try transCreateNodeAssign(rp, scope, result_used, stmt.getLHS(), stmt.getRHS()), + .Assign => return try transCreateNodeAssign(c, scope, result_used, stmt.getLHS(), stmt.getRHS()), .Comma => { var block_scope = try Scope.Block.init(rp.c, scope, true); - const lparen = try appendToken(rp.c, .LParen, "("); + defer block_scope.deinit(); - const lhs = try transExpr(rp, &block_scope.base, stmt.getLHS(), .unused, .r_value); + + const lhs = try transExpr(c, &block_scope.base, stmt.getLHS(), .unused, .r_value); try block_scope.statements.append(lhs); const rhs = try transExpr(rp, &block_scope.base, stmt.getRHS(), .used, .r_value); - _ = try appendToken(rp.c, .Semicolon, ";"); - const break_node = try transCreateNodeBreak(rp.c, block_scope.label, rhs); - try block_scope.statements.append(&break_node.base); + const break_node = try Node.break_val.create(c.arena, .{ + .label = block_scope.label, + .val = rhs, + }); + try block_scope.statements.append(break_node); const block_node = try block_scope.complete(rp.c); - const rparen = try appendToken(rp.c, .RParen, ")"); - const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression); - grouped_expr.* = .{ - .lparen = lparen, - .expr = block_node, - .rparen = rparen, - }; - return maybeSuppressResult(rp, scope, result_used, &grouped_expr.base); + return maybeSuppressResult(rp, scope, result_used, block_node); }, .Div => { if (cIsSignedInteger(qt)) { // signed integer division uses @divTrunc - const div_trunc_node = try rp.c.createBuiltinCall("@divTrunc", 2); - div_trunc_node.params()[0] = try transExpr(rp, scope, stmt.getLHS(), .used, .l_value); - _ = try appendToken(rp.c, .Comma, ","); - const rhs = try transExpr(rp, scope, stmt.getRHS(), .used, .r_value); - div_trunc_node.params()[1] = rhs; - div_trunc_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - return maybeSuppressResult(rp, scope, result_used, &div_trunc_node.base); + const lhs = try transExpr(c, scope, stmt.getLHS(), .used, .l_value); + const rhs = try transExpr(c, scope, stmt.getRHS(), .used, .r_value); + const div_trunc = try Node.div_trunc.create(c.arena, .{ .lhs = lhs, .rhs = rhs}); + return maybeSuppressResult(c, scope, result_used, div_trunc); } }, .Rem => { if (cIsSignedInteger(qt)) { // signed integer division uses @rem - const rem_node = try rp.c.createBuiltinCall("@rem", 2); - rem_node.params()[0] = try transExpr(rp, scope, stmt.getLHS(), .used, .l_value); - _ = try appendToken(rp.c, .Comma, ","); - const rhs = try transExpr(rp, scope, stmt.getRHS(), .used, .r_value); - rem_node.params()[1] = rhs; - rem_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - return maybeSuppressResult(rp, scope, result_used, &rem_node.base); + const lhs = try transExpr(c, scope, stmt.getLHS(), .used, .l_value); + const rhs = try transExpr(c, scope, stmt.getRHS(), .used, .r_value); + const rem = try Node.rem.create(c.arena, .{ .lhs = lhs, .rhs = rhs}); + return maybeSuppressResult(c, scope, result_used, rem); } }, .Shl => { - const node = try transCreateNodeShiftOp(rp, scope, stmt, .BitShiftLeft, .AngleBracketAngleBracketLeft, "<<"); - return maybeSuppressResult(rp, scope, result_used, node); + const node = try transCreateNodeShiftOp(c, scope, stmt, .shl); + return maybeSuppressResult(c, scope, result_used, node); }, .Shr => { - const node = try transCreateNodeShiftOp(rp, scope, stmt, .BitShiftRight, .AngleBracketAngleBracketRight, ">>"); - return maybeSuppressResult(rp, scope, result_used, node); - }, - .LAnd => { - const node = try transCreateNodeBoolInfixOp(rp, scope, stmt, .BoolAnd, result_used, true); - return maybeSuppressResult(rp, scope, result_used, node); - }, - .LOr => { - const node = try transCreateNodeBoolInfixOp(rp, scope, stmt, .BoolOr, result_used, true); - return maybeSuppressResult(rp, scope, result_used, node); + const node = try transCreateNodeShiftOp(c, scope, stmt, .shr); + return maybeSuppressResult(c, scope, result_used, node); }, else => {}, } - const lhs_node = try transExpr(rp, scope, stmt.getLHS(), .used, .l_value); + var op_id: Node.Tag = undefined; switch (op) { .Add => { if (cIsUnsignedInteger(qt)) { - op_token = try appendToken(rp.c, .PlusPercent, "+%"); - op_id = .AddWrap; + op_id = .add_wrap; } else { - op_token = try appendToken(rp.c, .Plus, "+"); - op_id = .Add; + op_id = .add; } }, .Sub => { if (cIsUnsignedInteger(qt)) { - op_token = try appendToken(rp.c, .MinusPercent, "-%"); - op_id = .SubWrap; + op_id = .sub_wrap; } else { - op_token = try appendToken(rp.c, .Minus, "-"); - op_id = .Sub; + op_id = .sub; } }, .Mul => { if (cIsUnsignedInteger(qt)) { - op_token = try appendToken(rp.c, .AsteriskPercent, "*%"); - op_id = .MulWrap; + op_id = .mul_wrap; } else { - op_token = try appendToken(rp.c, .Asterisk, "*"); - op_id = .Mul; + op_id = .mul; } }, .Div => { // unsigned/float division uses the operator - op_id = .Div; - op_token = try appendToken(rp.c, .Slash, "/"); + op_id = .div; }, .Rem => { // unsigned/float division uses the operator - op_id = .Mod; - op_token = try appendToken(rp.c, .Percent, "%"); + op_id = .mod; }, .LT => { - op_id = .LessThan; - op_token = try appendToken(rp.c, .AngleBracketLeft, "<"); + op_id = .less_than; }, .GT => { - op_id = .GreaterThan; - op_token = try appendToken(rp.c, .AngleBracketRight, ">"); + op_id = .greater_than; }, .LE => { - op_id = .LessOrEqual; - op_token = try appendToken(rp.c, .AngleBracketLeftEqual, "<="); + op_id = .less_than_equal; }, .GE => { - op_id = .GreaterOrEqual; - op_token = try appendToken(rp.c, .AngleBracketRightEqual, ">="); + op_id = .greater_than_equal; }, .EQ => { - op_id = .EqualEqual; - op_token = try appendToken(rp.c, .EqualEqual, "=="); + op_id = .equal; }, .NE => { - op_id = .BangEqual; - op_token = try appendToken(rp.c, .BangEqual, "!="); + op_id = .not_equal; }, .And => { - op_id = .BitAnd; - op_token = try appendToken(rp.c, .Ampersand, "&"); + op_id = .bit_and; }, .Xor => { - op_id = .BitXor; - op_token = try appendToken(rp.c, .Caret, "^"); + op_id = .bit_xor; }, .Or => { - op_id = .BitOr; - op_token = try appendToken(rp.c, .Pipe, "|"); + op_id = .bit_or; + }, + .LAnd => { + op_id = .@"and"; + }, + .LOr => { + op_id = .@"or"; }, else => unreachable, } - const rhs_node = try transExpr(rp, scope, stmt.getRHS(), .used, .r_value); + const lhs = try transExpr(c, scope, stmt.getLHS(), .used, .l_value); + const rhs = try transExpr(c, scope, stmt.getRHS(), .used, .r_value); - const lhs = if (isBoolRes(lhs_node)) init: { - const cast_node = try rp.c.createBuiltinCall("@boolToInt", 1); - cast_node.params()[0] = lhs_node; - cast_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - break :init &cast_node.base; - } else lhs_node; - - const rhs = if (isBoolRes(rhs_node)) init: { - const cast_node = try rp.c.createBuiltinCall("@boolToInt", 1); - cast_node.params()[0] = rhs_node; - cast_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - break :init &cast_node.base; - } else rhs_node; - - return transCreateNodeInfixOp(rp, scope, lhs, op_id, op_token, rhs, result_used, true); + const payload = try c.arena.create(ast.Payload.BinOp); + payload.* = .{ + .base = .{ .tag = op_id }, + .data = .{ + .lhs = lhs, + .rhs = rhs, + }, + }; + return maybeSuppressResult(c, scope, used, &payload.base); } fn transCompoundStmtInline( @@ -2365,40 +2253,13 @@ fn transEnumToInt(c: *Context, enum_expr: *ast.Node) TypeError!*ast.Node { } fn transExpr( - rp: RestorePoint, + c: *Context, scope: *Scope, expr: *const clang.Expr, used: ResultUsed, lrvalue: LRValue, -) TransError!*ast.Node { - return transStmt(rp, scope, @ptrCast(*const clang.Stmt, expr), used, lrvalue); -} - -/// Same as `transExpr` but with the knowledge that the operand will be type coerced, and therefore -/// an `@as` would be redundant. This is used to prevent redundant `@as` in integer literals. -fn transExprCoercing( - rp: RestorePoint, - scope: *Scope, - expr: *const clang.Expr, - used: ResultUsed, - lrvalue: LRValue, -) TransError!*ast.Node { - switch (@ptrCast(*const clang.Stmt, expr).getStmtClass()) { - .IntegerLiteralClass => { - return transIntegerLiteral(rp, scope, @ptrCast(*const clang.IntegerLiteral, expr), .used, .no_as); - }, - .CharacterLiteralClass => { - return transCharLiteral(rp, scope, @ptrCast(*const clang.CharacterLiteral, expr), .used, .no_as); - }, - .UnaryOperatorClass => { - const un_expr = @ptrCast(*const clang.UnaryOperator, expr); - if (un_expr.getOpcode() == .Extension) { - return transExprCoercing(rp, scope, un_expr.getSubExpr(), used, lrvalue); - } - }, - else => {}, - } - return transExpr(rp, scope, expr, .used, .r_value); +) TransError!Node { + return transStmt(c, scope, @ptrCast(*const clang.Stmt, expr), used, lrvalue); } fn transInitListExprRecord( @@ -4150,7 +4011,7 @@ fn qualTypeIsBoolean(qt: clang.QualType) bool { return qualTypeCanon(qt).isBooleanType(); } -fn qualTypeIntBitWidth(rp: RestorePoint, qt: clang.QualType, source_loc: clang.SourceLocation) !u32 { +fn qualTypeIntBitWidth(c: *Context, qt: clang.QualType, source_loc: clang.SourceLocation) !u32 { const ty = qt.getTypePtr(); switch (ty.getTypeClass()) { @@ -4174,7 +4035,7 @@ fn qualTypeIntBitWidth(rp: RestorePoint, qt: clang.QualType, source_loc: clang.S .Typedef => { const typedef_ty = @ptrCast(*const clang.TypedefType, ty); const typedef_decl = typedef_ty.getDecl(); - const type_name = try rp.c.str(@ptrCast(*const clang.NamedDecl, typedef_decl).getName_bytes_begin()); + const type_name = try c.str(@ptrCast(*const clang.NamedDecl, typedef_decl).getName_bytes_begin()); if (mem.eql(u8, type_name, "uint8_t") or mem.eql(u8, type_name, "int8_t")) { return 8; @@ -4194,51 +4055,17 @@ fn qualTypeIntBitWidth(rp: RestorePoint, qt: clang.QualType, source_loc: clang.S unreachable; } -fn qualTypeToLog2IntRef(rp: RestorePoint, qt: clang.QualType, source_loc: clang.SourceLocation) !*ast.Node { - const int_bit_width = try qualTypeIntBitWidth(rp, qt, source_loc); +fn qualTypeToLog2IntRef(c: *Context, qt: clang.QualType, source_loc: clang.SourceLocation) !Node { + const int_bit_width = try qualTypeIntBitWidth(c, qt, source_loc); if (int_bit_width != 0) { // we can perform the log2 now. const cast_bit_width = math.log2_int(u64, int_bit_width); - const node = try rp.c.arena.create(ast.Node.OneToken); - node.* = .{ - .base = .{ .tag = .IntegerLiteral }, - .token = try appendTokenFmt(rp.c, .Identifier, "u{d}", .{cast_bit_width}), - }; - return &node.base; + return Node.uint_type.create(c.arena, cast_bit_width); } - const zig_type_node = try transQualType(rp, qt, source_loc); - - // @import("std").math.Log2Int(c_long); - // - // FnCall - // FieldAccess - // FieldAccess - // FnCall (.builtin = true) - // Symbol "import" - // StringLiteral "std" - // Symbol "math" - // Symbol "Log2Int" - // Symbol (var from above) - - const import_fn_call = try rp.c.createBuiltinCall("@import", 1); - const std_token = try appendToken(rp.c, .StringLiteral, "\"std\""); - const std_node = try rp.c.arena.create(ast.Node.OneToken); - std_node.* = .{ - .base = .{ .tag = .StringLiteral }, - .token = std_token, - }; - import_fn_call.params()[0] = &std_node.base; - import_fn_call.rparen_token = try appendToken(rp.c, .RParen, ")"); - - const inner_field_access = try transCreateNodeFieldAccess(rp.c, &import_fn_call.base, "math"); - const outer_field_access = try transCreateNodeFieldAccess(rp.c, inner_field_access, "Log2Int"); - const log2int_fn_call = try rp.c.createCall(outer_field_access, 1); - log2int_fn_call.params()[0] = zig_type_node; - log2int_fn_call.rtoken = try appendToken(rp.c, .RParen, ")"); - - return &log2int_fn_call.base; + const zig_type = try transQualType(c, qt, source_loc); + return Node.std_math_Log2Int.create(c.arena, zig_type); } fn qualTypeChildIsFnProto(qt: clang.QualType) bool { @@ -4506,67 +4333,6 @@ fn transCreateNodeSimplePrefixOp( return node; } -fn transCreateNodeInfixOp( - rp: RestorePoint, - scope: *Scope, - lhs_node: *ast.Node, - op: ast.Node.Tag, - op_token: ast.TokenIndex, - rhs_node: *ast.Node, - used: ResultUsed, - grouped: bool, -) !*ast.Node { - var lparen = if (grouped) - try appendToken(rp.c, .LParen, "(") - else - null; - const node = try rp.c.arena.create(ast.Node.SimpleInfixOp); - node.* = .{ - .base = .{ .tag = op }, - .op_token = op_token, - .lhs = lhs_node, - .rhs = rhs_node, - }; - if (!grouped) return maybeSuppressResult(rp, scope, used, &node.base); - const rparen = try appendToken(rp.c, .RParen, ")"); - const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression); - grouped_expr.* = .{ - .lparen = lparen.?, - .expr = &node.base, - .rparen = rparen, - }; - return maybeSuppressResult(rp, scope, used, &grouped_expr.base); -} - -fn transCreateNodeBoolInfixOp( - rp: RestorePoint, - scope: *Scope, - stmt: *const clang.BinaryOperator, - op: ast.Node.Tag, - used: ResultUsed, - grouped: bool, -) !*ast.Node { - std.debug.assert(op == .BoolAnd or op == .BoolOr); - - const lhs_hode = try transBoolExpr(rp, scope, stmt.getLHS(), .used, .l_value, true); - const op_token = if (op == .BoolAnd) - try appendToken(rp.c, .Keyword_and, "and") - else - try appendToken(rp.c, .Keyword_or, "or"); - const rhs = try transBoolExpr(rp, scope, stmt.getRHS(), .used, .r_value, true); - - return transCreateNodeInfixOp( - rp, - scope, - lhs_hode, - op, - op_token, - rhs, - used, - grouped, - ); -} - fn transCreateNodePtrType( c: *Context, is_const: bool, @@ -4968,40 +4734,33 @@ fn transCreateNodeSwitchElse(c: *Context) !*ast.Node { } fn transCreateNodeShiftOp( - rp: RestorePoint, + c: *Context, scope: *Scope, stmt: *const clang.BinaryOperator, - op: ast.Node.Tag, - op_tok_id: std.zig.Token.Id, - bytes: []const u8, -) !*ast.Node { - std.debug.assert(op == .BitShiftLeft or op == .BitShiftRight); + op: Node.Tag, +) !Node { + std.debug.assert(op == .shl or op == .shr); const lhs_expr = stmt.getLHS(); const rhs_expr = stmt.getRHS(); const rhs_location = rhs_expr.getBeginLoc(); // lhs >> @as(u5, rh) - const lhs = try transExpr(rp, scope, lhs_expr, .used, .l_value); - const op_token = try appendToken(rp.c, op_tok_id, bytes); + const lhs = try transExpr(c, scope, lhs_expr, .used, .l_value); - const cast_node = try rp.c.createBuiltinCall("@intCast", 2); - const rhs_type = try qualTypeToLog2IntRef(rp, stmt.getType(), rhs_location); - cast_node.params()[0] = rhs_type; - _ = try appendToken(rp.c, .Comma, ","); - const rhs = try transExprCoercing(rp, scope, rhs_expr, .used, .r_value); - cast_node.params()[1] = rhs; - cast_node.rparen_token = try appendToken(rp.c, .RParen, ")"); + const rhs_type = try qualTypeToLog2IntRef(c, stmt.getType(), rhs_location); + const rhs = try transExpr(c, scope, rhs_expr, .used, .r_value); + const rhs_casted = try Node.int_cast.create(c.arena, .{ .lhs = rhs_type, .rhs = rhs_type }); - const node = try rp.c.arena.create(ast.Node.SimpleInfixOp); - node.* = .{ + const payload = try c.arena.create(ast.Payload.BinOp); + payload.* = .{ .base = .{ .tag = op }, - .op_token = op_token, - .lhs = lhs, - .rhs = &cast_node.base, + .data = .{ + .lhs = lhs, + .rhs = rhs_casted, + } }; - - return &node.base; + return &payload.base; } fn transCreateNodePtrDeref(c: *Context, lhs: *ast.Node) !*ast.Node { @@ -5025,161 +4784,7 @@ fn transCreateNodeArrayAccess(c: *Context, lhs: *ast.Node) !*ast.Node.ArrayAcces return node; } -const RestorePoint = struct { - c: *Context, - token_index: ast.TokenIndex, - src_buf_index: usize, - - fn activate(self: RestorePoint) void { - self.c.token_ids.shrinkAndFree(self.c.gpa, self.token_index); - self.c.token_locs.shrinkAndFree(self.c.gpa, self.token_index); - self.c.source_buffer.shrinkAndFree(self.src_buf_index); - } -}; - -fn makeRestorePoint(c: *Context) RestorePoint { - return RestorePoint{ - .c = c, - .token_index = c.token_ids.items.len, - .src_buf_index = c.source_buffer.items.len, - }; -} - -fn transType(rp: RestorePoint, ty: *const clang.Type, source_loc: clang.SourceLocation) TypeError!*ast.Node { - switch (ty.getTypeClass()) { - .Builtin => { - const builtin_ty = @ptrCast(*const clang.BuiltinType, ty); - return transCreateNodeIdentifier(rp.c, switch (builtin_ty.getKind()) { - .Void => "c_void", - .Bool => "bool", - .Char_U, .UChar, .Char_S, .Char8 => "u8", - .SChar => "i8", - .UShort => "c_ushort", - .UInt => "c_uint", - .ULong => "c_ulong", - .ULongLong => "c_ulonglong", - .Short => "c_short", - .Int => "c_int", - .Long => "c_long", - .LongLong => "c_longlong", - .UInt128 => "u128", - .Int128 => "i128", - .Float => "f32", - .Double => "f64", - .Float128 => "f128", - .Float16 => "f16", - .LongDouble => "c_longdouble", - else => return revertAndWarn(rp, error.UnsupportedType, source_loc, "unsupported builtin type", .{}), - }); - }, - .FunctionProto => { - const fn_proto_ty = @ptrCast(*const clang.FunctionProtoType, ty); - const fn_proto = try transFnProto(rp, null, fn_proto_ty, source_loc, null, false); - return &fn_proto.base; - }, - .FunctionNoProto => { - const fn_no_proto_ty = @ptrCast(*const clang.FunctionType, ty); - const fn_proto = try transFnNoProto(rp, fn_no_proto_ty, source_loc, null, false); - return &fn_proto.base; - }, - .Paren => { - const paren_ty = @ptrCast(*const clang.ParenType, ty); - return transQualType(rp, paren_ty.getInnerType(), source_loc); - }, - .Pointer => { - const child_qt = ty.getPointeeType(); - if (qualTypeChildIsFnProto(child_qt)) { - const optional_node = try transCreateNodeSimplePrefixOp(rp.c, .OptionalType, .QuestionMark, "?"); - optional_node.rhs = try transQualType(rp, child_qt, source_loc); - return &optional_node.base; - } - if (typeIsOpaque(rp.c, child_qt.getTypePtr(), source_loc) or qualTypeWasDemotedToOpaque(rp.c, child_qt)) { - const optional_node = try transCreateNodeSimplePrefixOp(rp.c, .OptionalType, .QuestionMark, "?"); - const pointer_node = try transCreateNodePtrType( - rp.c, - child_qt.isConstQualified(), - child_qt.isVolatileQualified(), - .Asterisk, - ); - optional_node.rhs = &pointer_node.base; - pointer_node.rhs = try transQualType(rp, child_qt, source_loc); - return &optional_node.base; - } - const pointer_node = try transCreateNodePtrType( - rp.c, - child_qt.isConstQualified(), - child_qt.isVolatileQualified(), - .Identifier, - ); - pointer_node.rhs = try transQualType(rp, child_qt, source_loc); - return &pointer_node.base; - }, - .ConstantArray => { - const const_arr_ty = @ptrCast(*const clang.ConstantArrayType, ty); - - const size_ap_int = const_arr_ty.getSize(); - const size = size_ap_int.getLimitedValue(math.maxInt(usize)); - const elem_ty = const_arr_ty.getElementType().getTypePtr(); - return try transCreateNodeArrayType(rp, source_loc, elem_ty, size); - }, - .IncompleteArray => { - const incomplete_array_ty = @ptrCast(*const clang.IncompleteArrayType, ty); - - const child_qt = incomplete_array_ty.getElementType(); - var node = try transCreateNodePtrType( - rp.c, - child_qt.isConstQualified(), - child_qt.isVolatileQualified(), - .Identifier, - ); - node.rhs = try transQualType(rp, child_qt, source_loc); - return &node.base; - }, - .Typedef => { - const typedef_ty = @ptrCast(*const clang.TypedefType, ty); - - const typedef_decl = typedef_ty.getDecl(); - return (try transTypeDef(rp.c, typedef_decl, false)) orelse - revertAndWarn(rp, error.UnsupportedType, source_loc, "unable to translate typedef declaration", .{}); - }, - .Record => { - const record_ty = @ptrCast(*const clang.RecordType, ty); - - const record_decl = record_ty.getDecl(); - return (try transRecordDecl(rp.c, record_decl)) orelse - revertAndWarn(rp, error.UnsupportedType, source_loc, "unable to resolve record declaration", .{}); - }, - .Enum => { - const enum_ty = @ptrCast(*const clang.EnumType, ty); - - const enum_decl = enum_ty.getDecl(); - return (try transEnumDecl(rp.c, enum_decl)) orelse - revertAndWarn(rp, error.UnsupportedType, source_loc, "unable to translate enum declaration", .{}); - }, - .Elaborated => { - const elaborated_ty = @ptrCast(*const clang.ElaboratedType, ty); - return transQualType(rp, elaborated_ty.getNamedType(), source_loc); - }, - .Decayed => { - const decayed_ty = @ptrCast(*const clang.DecayedType, ty); - return transQualType(rp, decayed_ty.getDecayedType(), source_loc); - }, - .Attributed => { - const attributed_ty = @ptrCast(*const clang.AttributedType, ty); - return transQualType(rp, attributed_ty.getEquivalentType(), source_loc); - }, - .MacroQualified => { - const macroqualified_ty = @ptrCast(*const clang.MacroQualifiedType, ty); - return transQualType(rp, macroqualified_ty.getModifiedType(), source_loc); - }, - else => { - const type_name = rp.c.str(ty.getTypeClassName()); - return revertAndWarn(rp, error.UnsupportedType, source_loc, "unsupported type: '{s}'", .{type_name}); - }, - } -} - -fn transType1(c: *Context, ty: *const clang.Type, source_loc: clang.SourceLocation) TypeError!Type { +fn transType(c: *Context, ty: *const clang.Type, source_loc: clang.SourceLocation) TypeError!Type { switch (ty.getTypeClass()) { .Builtin => { const builtin_ty = @ptrCast(*const clang.BuiltinType, ty); diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 0585c26d18..3ceccf16b8 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -1,7 +1,7 @@ const std = @import("std"); const Type = @import("../type.zig").Type; -pub const Node = struct { +pub const Node = extern union { /// If the tag value is less than Tag.no_payload_count, then no pointer /// dereference is needed. tag_if_small_enough: usize, @@ -13,12 +13,15 @@ pub const Node = struct { opaque_literal, true_literal, false_literal, + empty_block, + /// pub usingnamespace @import("std").c.builtins; + usingnamespace_builtins, // After this, the tag requires a payload. - int, - float, - string, - char, + int_literal, + float_literal, + string_literal, + char_literal, identifier, @"if", @"while", @@ -44,6 +47,67 @@ pub const Node = struct { discard, block, + // a + b + add, + // a = b + add_assign, + // c = (a = b) + add_assign_value, + add_wrap, + add_wrap_assign, + add_wrap_assign_value, + sub, + sub_assign, + sub_assign_value, + sub_wrap, + sub_wrap_assign, + sub_wrap_assign_value, + mul, + mul_assign, + mul_assign_value, + mul_wrap, + mul_wrap_assign, + mul_wrap_assign_value, + div, + div_assign, + div_assign_value, + shl, + shl_assign, + shl_assign_value, + shr, + shr_assign, + shr_assign_value, + mod, + mod_assign, + mod_assign_value, + @"and", + and_assign, + and_assign_value, + @"or", + or_assign, + or_assign_value, + xor, + xor_assign, + xor_assign_value, + less_than, + less_than_equal, + greater_than, + greater_than_equal, + equal, + not_equal, + bit_and, + bit_or, + bit_xor, + + /// @import("std").math.Log2Int(operand) + std_math_Log2Int, + /// @intCast(lhs, rhs) + int_cast, + /// @rem(lhs, rhs) + rem, + /// @divTrunc(lhs, rhs) + div_trunc, + pub const last_no_payload_tag = Tag.false_literal; pub const no_payload_count = @enumToInt(last_no_payload_tag) + 1; @@ -54,8 +118,70 @@ pub const Node = struct { .opaque_literal, .true_literal, .false_litral, + .empty_block, + .usingnamespace_builtins, => @compileError("Type Tag " ++ @tagName(t) ++ " has no payload"), + .array_access, + .std_mem_zeroes, + .@"return", + .discard, + .std_math_Log2Int, + => Payload.UnOp, + + .add, + .add_assign, + .add_assign_value, + .add_wrap, + .add_wrap_assign, + .add_wrap_assign_value, + .sub, + .sub_assign, + .sub_assign_value, + .sub_wrap, + .sub_wrap_assign, + .sub_wrap_assign_value, + .mul, + .mul_assign, + .mul_assign_value, + .mul_wrap, + .mul_wrap_assign, + .mul_wrap_assign_value, + .div, + .div_assign, + .div_assign_value, + .shl, + .shl_assign, + .shl_assign_value, + .shr, + .shr_assign, + .shr_assign_value, + .mod, + .mod_assign, + .mod_assign_value, + .@"and", + .and_assign, + .and_assign_value, + .@"or", + .or_assign, + .or_assign_value, + .xor, + .xor_assign, + .xor_assign_value, + .less_than, + .less_than_equal, + .greater_than, + .greater_than_equal, + .equal, + .not_equal, + .bit_and, + .bit_or, + .bit_xor, + .div_trunc, + .rem, + .int_cast, + => Payload.BinOp, + .int, .float, .string, @@ -71,11 +197,6 @@ pub const Node = struct { .@"switch" => Payload.Switch, .@"break" => Payload.Break, .call => Payload.Call, - .array_access, - .std_mem_zeroes, - .@"return", - .discard, - => Payload.SingleArg, .var_decl => Payload.VarDecl, .func => Payload.Func, .@"enum" => Payload.Enum, @@ -123,11 +244,19 @@ pub const Payload = struct { data: []const u8, }; - pub const SingleArg = struct { + pub const UnOp = struct { base: Node, data: *Node, }; + pub const BinOp = struct { + base: Node, + data: struct { + lhs: *Node, + rhs: *Node, + }, + }; + pub const If = struct { base: Node = .{ .tag = .@"if" }, data: struct { From f5041caa2e9d1e891bd93aa2721a4a283123f0d9 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Sun, 7 Feb 2021 22:02:42 +0200 Subject: [PATCH 067/173] translate-c: more binaryoperator chagnes, blocks and unary type expressions --- src/translate_c.zig | 273 +++++++++++++++------------------------- src/translate_c/ast.zig | 80 ++++++++---- 2 files changed, 156 insertions(+), 197 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index f885ea98b9..6570766988 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -68,16 +68,15 @@ const Scope = struct { } }; - /// Represents an in-progress ast.Node.Block. This struct is stack-allocated. - /// When it is deinitialized, it produces an ast.Node.Block which is allocated + /// Represents an in-progress Node.Block. This struct is stack-allocated. + /// When it is deinitialized, it produces an Node.Block which is allocated /// into the main arena. const Block = struct { base: Scope, - statements: std.ArrayList(*ast.Node), + statements: std.ArrayList(Node), variables: AliasList, - label: ?ast.TokenIndex, mangle_count: u32 = 0, - lbrace: ast.TokenIndex, + label: ?[]const u8 = null, /// When the block corresponds to a function, keep track of the return type /// so that the return expression can be cast, if necessary @@ -89,14 +88,11 @@ const Scope = struct { .id = .Block, .parent = parent, }, - .statements = std.ArrayList(*ast.Node).init(c.gpa), + .statements = std.ArrayList(Node).init(c.gpa), .variables = AliasList.init(c.gpa), - .label = null, - .lbrace = try appendToken(c, .LBrace, "{"), }; if (labeled) { - blk.label = try appendIdentifier(c, try blk.makeMangledName(c, "blk")); - _ = try appendToken(c, .Colon, ":"); + blk.label = try blk.makeMangledName(c, "blk"); } return blk; } @@ -107,31 +103,16 @@ const Scope = struct { self.* = undefined; } - fn complete(self: *Block, c: *Context) !*ast.Node { + fn complete(self: *Block, c: *Context) !Node { // We reserve 1 extra statement if the parent is a Loop. This is in case of // do while, we want to put `if (cond) break;` at the end. const alloc_len = self.statements.items.len + @boolToInt(self.base.parent.?.id == .Loop); - const rbrace = try appendToken(c, .RBrace, "}"); - if (self.label) |label| { - const node = try ast.Node.LabeledBlock.alloc(c.arena, alloc_len); - node.* = .{ - .statements_len = self.statements.items.len, - .lbrace = self.lbrace, - .rbrace = rbrace, - .label = label, - }; - mem.copy(*ast.Node, node.statements(), self.statements.items); - return &node.base; - } else { - const node = try ast.Node.Block.alloc(c.arena, alloc_len); - node.* = .{ - .statements_len = self.statements.items.len, - .lbrace = self.lbrace, - .rbrace = rbrace, - }; - mem.copy(*ast.Node, node.statements(), self.statements.items); - return &node.base; - } + const stmts = try c.arena.alloc(Node, alloc_len); + mem.copy(Node, stmts, self.statements.items); + return Node.block.create(c.arena, .{ + .lable = self.label, + .stmts = stmts, + }); } /// Given the desired name, return a name that does not shadow anything from outer scopes. @@ -1390,7 +1371,7 @@ fn transBinaryOperator( // signed integer division uses @divTrunc const lhs = try transExpr(c, scope, stmt.getLHS(), .used, .l_value); const rhs = try transExpr(c, scope, stmt.getRHS(), .used, .r_value); - const div_trunc = try Node.div_trunc.create(c.arena, .{ .lhs = lhs, .rhs = rhs}); + const div_trunc = try Node.div_trunc.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); return maybeSuppressResult(c, scope, result_used, div_trunc); } }, @@ -1399,7 +1380,7 @@ fn transBinaryOperator( // signed integer division uses @rem const lhs = try transExpr(c, scope, stmt.getLHS(), .used, .l_value); const rhs = try transExpr(c, scope, stmt.getRHS(), .used, .r_value); - const rem = try Node.rem.create(c.arena, .{ .lhs = lhs, .rhs = rhs}); + const rem = try Node.rem.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); return maybeSuppressResult(c, scope, result_used, rem); } }, @@ -1411,6 +1392,12 @@ fn transBinaryOperator( const node = try transCreateNodeShiftOp(c, scope, stmt, .shr); return maybeSuppressResult(c, scope, result_used, node); }, + .LAnd => { + return transCreateNodeBoolInfixOp(c, scope, stmt, .bool_and, result_used, true); + }, + .LOr => { + return transCreateNodeBoolInfixOp(c, scope, stmt, .bool_or, result_used, true); + }, else => {}, } var op_id: Node.Tag = undefined; @@ -1471,17 +1458,19 @@ fn transBinaryOperator( .Or => { op_id = .bit_or; }, - .LAnd => { - op_id = .@"and"; - }, - .LOr => { - op_id = .@"or"; - }, else => unreachable, } - const lhs = try transExpr(c, scope, stmt.getLHS(), .used, .l_value); - const rhs = try transExpr(c, scope, stmt.getRHS(), .used, .r_value); + const lhs_uncasted = try transExpr(c, scope, stmt.getLHS(), .used, .l_value); + const rhs_uncasted = try transExpr(c, scope, stmt.getRHS(), .used, .r_value); + + const lhs = if (isBoolRes(lhs_uncasted)) + try Node.bool_to_int.create(c.arena, lhs_uncasted) + else lhs_uncasted; + + const rhs = if (isBoolRes(rhs_uncasted)) + try Node.bool_to_int.create(c.arena, rhs_uncasted) + else rhs_uncasted; const payload = try c.arena.create(ast.Payload.BinOp); payload.* = .{ @@ -1495,7 +1484,7 @@ fn transBinaryOperator( } fn transCompoundStmtInline( - rp: RestorePoint, + c: *Context, parent_scope: *Scope, stmt: *const clang.CompoundStmt, block: *Scope.Block, @@ -1503,16 +1492,16 @@ fn transCompoundStmtInline( var it = stmt.body_begin(); const end_it = stmt.body_end(); while (it != end_it) : (it += 1) { - const result = try transStmt(rp, parent_scope, it[0], .unused, .r_value); + const result = try transStmt(c, parent_scope, it[0], .unused, .r_value); try block.statements.append(result); } } -fn transCompoundStmt(rp: RestorePoint, scope: *Scope, stmt: *const clang.CompoundStmt) TransError!*ast.Node { - var block_scope = try Scope.Block.init(rp.c, scope, false); +fn transCompoundStmt(c: *Context, scope: *Scope, stmt: *const clang.CompoundStmt) TransError!Node { + var block_scope = try Scope.Block.init(c, scope, false); defer block_scope.deinit(); - try transCompoundStmtInline(rp, &block_scope.base, stmt, &block_scope); - return try block_scope.complete(rp.c); + try transCompoundStmtInline(c, &block_scope.base, stmt, &block_scope); + return try block_scope.complete(c); } fn transCStyleCastExprClass( @@ -3233,22 +3222,18 @@ fn qualTypeGetFnProto(qt: clang.QualType, is_ptr: *bool) ?ClangFunctionType { } fn transUnaryExprOrTypeTraitExpr( - rp: RestorePoint, + c: *Context, scope: *Scope, stmt: *const clang.UnaryExprOrTypeTraitExpr, result_used: ResultUsed, -) TransError!*ast.Node { +) TransError!Node { const loc = stmt.getBeginLoc(); - const type_node = try transQualType( - rp, - stmt.getTypeOfArgument(), - loc, - ); + const type_node = try transQualType(rp, stmt.getTypeOfArgument(), loc); const kind = stmt.getKind(); - const kind_str = switch (kind) { - .SizeOf => "@sizeOf", - .AlignOf => "@alignOf", + switch (kind) { + .SizeOf => return Node.sizeof.create(c.arena, type_node), + .AlignOf => return Node.alignof.create(c.arena, type_node), .PreferredAlignOf, .VecStep, .OpenMPRequiredSimdAlign, @@ -3259,12 +3244,7 @@ fn transUnaryExprOrTypeTraitExpr( "Unsupported type trait kind {}", .{kind}, ), - }; - - const builtin_node = try rp.c.createBuiltinCall(kind_str, 1); - builtin_node.params()[0] = type_node; - builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - return maybeSuppressResult(rp, scope, result_used, &builtin_node.base); + } } fn qualTypeHasWrappingOverflow(qt: clang.QualType) bool { @@ -3967,8 +3947,8 @@ fn transQualTypeInitialized( return transQualType(rp, qt, source_loc); } -fn transQualType(rp: RestorePoint, qt: clang.QualType, source_loc: clang.SourceLocation) TypeError!*ast.Node { - return transType(rp, qt.getTypePtr(), source_loc); +fn transQualType(c: *Context, qt: clang.QualType, source_loc: clang.SourceLocation) TypeError!Node { + return transType(c, qt.getTypePtr(), source_loc); } /// Produces a Zig AST node by translating a Clang QualType, respecting the width, but modifying the signed-ness. @@ -4318,19 +4298,27 @@ fn transCreateNodeFieldAccess(c: *Context, container: *ast.Node, field_name: []c return &field_access_node.base; } -fn transCreateNodeSimplePrefixOp( +fn transCreateNodeBoolInfixOp( c: *Context, - comptime tag: ast.Node.Tag, - op_tok_id: std.zig.Token.Id, - bytes: []const u8, -) !*ast.Node.SimplePrefixOp { - const node = try c.arena.create(ast.Node.SimplePrefixOp); - node.* = .{ - .base = .{ .tag = tag }, - .op_token = try appendToken(c, op_tok_id, bytes), - .rhs = undefined, // translate and set afterward + scope: *Scope, + stmt: *const clang.BinaryOperator, + op: ast.Node.Tag, + used: ResultUsed, +) !Node { + std.debug.assert(op == .bool_and or op == .bool_or); + + const lhs = try transBoolExpr(rp, scope, stmt.getLHS(), .used, .l_value, true); + const rhs = try transBoolExpr(rp, scope, stmt.getRHS(), .used, .r_value, true); + + const payload = try c.arena.create(ast.Payload.BinOp); + payload.* = .{ + .base = .{ .tag = op }, + .data = .{ + .lhs = lhs, + .rhs = rhs, + }, }; - return node; + return maybeSuppressResult(c, scope, used, &payload.base); } fn transCreateNodePtrType( @@ -4784,30 +4772,30 @@ fn transCreateNodeArrayAccess(c: *Context, lhs: *ast.Node) !*ast.Node.ArrayAcces return node; } -fn transType(c: *Context, ty: *const clang.Type, source_loc: clang.SourceLocation) TypeError!Type { +fn transType(c: *Context, ty: *const clang.Type, source_loc: clang.SourceLocation) TypeError!Node { switch (ty.getTypeClass()) { .Builtin => { const builtin_ty = @ptrCast(*const clang.BuiltinType, ty); - return Type.initTag(switch (builtin_ty.getKind()) { - .Void => .c_void, - .Bool => .bool, - .Char_U, .UChar, .Char_S, .Char8 => .u8, - .SChar => .i8, - .UShort => .c_ushort, - .UInt => .c_uint, - .ULong => .c_ulong, - .ULongLong => .c_ulonglong, - .Short => .c_short, - .Int => .c_int, - .Long => .c_long, - .LongLong => .c_longlong, - .UInt128 => .u128, - .Int128 => .i128, - .Float => .f32, - .Double => .f64, - .Float128 => .f128, - .Float16 => .f16, - .LongDouble => .c_longdouble, + return Node.type.create(c.arena, switch (builtin_ty.getKind()) { + .Void => "c_void", + .Bool => "bool", + .Char_U, .UChar, .Char_S, .Char8 => "u8", + .SChar => "i8", + .UShort => "c_ushort", + .UInt => "c_uint", + .ULong => "c_ulong", + .ULongLong => "c_ulonglong", + .Short => "c_short", + .Int => "c_int", + .Long => "c_long", + .LongLong => "c_longlong", + .UInt128 => "u128", + .Int128 => "i128", + .Float => "f32", + .Double => "f64", + .Float128 => "f128", + .Float16 => "f16", + .LongDouble => "c_longdouble", else => return fail(c, error.UnsupportedType, source_loc, "unsupported builtin type", .{}), }); }, @@ -4826,61 +4814,25 @@ fn transType(c: *Context, ty: *const clang.Type, source_loc: clang.SourceLocatio .Pointer => { const child_qt = ty.getPointeeType(); if (qualTypeChildIsFnProto(child_qt)) { - return Type.optional_single_mut_pointer.create(c.arena, try transQualType(c, child_qt, source_loc)); + return Node.optional_type.create(c.arena, try transQualType(c, child_qt, source_loc)); } const is_const = child_qt.isConstQualified(); const is_volatile = child_qt.isVolatileQualified(); const elem_type = try transQualType(c, child_qt, source_loc); - if (elem_type.zigTypeTag() == .Opaque) { - if (!is_volatile) { - if (is_const) { - return Type.optional_single_const_pointer.create(c.arena, elem_type); - } else { - return Type.optional_single_mut_pointer.create(c.arena, elem_type); - } - } - - return Type.pointer.create(c.arena, .{ - .pointee_type = elem_type, - .sentinel = null, - .@"align" = 0, - .bit_offset = 0, - .host_size = 0, - .@"allowzero" = false, - .mutable = !is_const, - .@"volatile" = true, - .size = .Single, - }); + if (typeIsOpaque(rp.c, child_qt.getTypePtr(), source_loc) or qualTypeWasDemotedToOpaque(rp.c, child_qt)) { + return Node.single_pointer.create(c.arena, .{ .is_const = is_const, .is_volatile = is_volatile, .elem_type = elem_type }); } - if (!is_volatile) { - if (is_const) { - return Type.c_const_pointer.create(c.arena, elem_type); - } else { - return Type.c_mut_pointer.create(c.arena, elem_type); - } - } - - return Type.pointer.create(c.arena, .{ - .pointee_type = elem_type, - .sentinel = null, - .@"align" = 0, - .bit_offset = 0, - .host_size = 0, - .@"allowzero" = false, - .mutable = !is_const, - .@"volatile" = true, - .size = .C, - }); + return Node.c_pointer.create(c.arena, .{ .is_const = is_const, .is_volatile = is_volatile, .elem_type = elem_type }); }, .ConstantArray => { const const_arr_ty = @ptrCast(*const clang.ConstantArrayType, ty); const size_ap_int = const_arr_ty.getSize(); const size = size_ap_int.getLimitedValue(math.maxInt(usize)); - const elem_type = try transType1(c, const_arr_ty.getElementType().getTypePtr(), source_loc); - - return Type.array.create(c.arena, .{ .len = size, .elem_type = elem_type }); + const elem_type = try transType(c, const_arr_ty.getElementType().getTypePtr(), source_loc); + + return Node.array_type.create(c.arena, .{ .len = size, .elem_type = elem_type }); }, .IncompleteArray => { const incomplete_array_ty = @ptrCast(*const clang.IncompleteArrayType, ty); @@ -4890,25 +4842,7 @@ fn transType(c: *Context, ty: *const clang.Type, source_loc: clang.SourceLocatio const is_volatile = child_qt.isVolatileQualified(); const elem_type = try transQualType(c, child_qt, source_loc); - if (!is_volatile) { - if (is_const) { - return Type.c_const_pointer.create(c.arena, elem_type); - } else { - return Type.c_mut_pointer.create(c.arena, elem_type); - } - } - - return Type.pointer.create(c.arena, .{ - .pointee_type = elem_type, - .sentinel = null, - .@"align" = 0, - .bit_offset = 0, - .host_size = 0, - .@"allowzero" = false, - .mutable = !is_const, - .@"volatile" = true, - .size = .C, - }); + return Node.c_pointer.create(c.arena, .{ .is_const = is_const, .is_volatile = is_volatile, .elem_type = elem_type }); }, .Typedef => { const typedef_ty = @ptrCast(*const clang.TypedefType, ty); @@ -5233,25 +5167,14 @@ fn finishTransFnProto( return fn_proto; } -fn revertAndWarn( - rp: RestorePoint, - err: anytype, - source_loc: clang.SourceLocation, - comptime format: []const u8, - args: anytype, -) (@TypeOf(err) || error{OutOfMemory}) { - rp.activate(); - try emitWarning(rp.c, source_loc, format, args); - return err; -} - -fn emitWarning(c: *Context, loc: clang.SourceLocation, comptime format: []const u8, args: anytype) !void { +fn warn(c: *Context, scope: *Scope, loc: clang.SourceLocation, comptime format: []const u8, args: anytype) !void { const args_prefix = .{c.locStr(loc)}; - _ = try appendTokenFmt(c, .LineComment, "// {s}: warning: " ++ format, args_prefix ++ args); + const value = std.fmt.allocPrint(c.arena, "// {s}: warning: " ++ format, args_prefix ++ args); + try scope.appendNode(c.gpa, try Node.warning.create(c.arena, value)); } fn fail( - rp: RestorePoint, + c: *Context, err: anytype, source_loc: clang.SourceLocation, comptime format: []const u8, diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 3ceccf16b8..638d4cefa0 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -107,6 +107,25 @@ pub const Node = extern union { rem, /// @divTrunc(lhs, rhs) div_trunc, + /// @boolToInt(lhs, rhs) + bool_to_int, + + negate, + negate_wrap, + bit_not, + not, + + block, + @"break", + + sizeof, + alignof, + type, + + optional_type, + c_pointer, + single_pointer, + array_type, pub const last_no_payload_tag = Tag.false_literal; pub const no_payload_count = @enumToInt(last_no_payload_tag) + 1; @@ -127,6 +146,14 @@ pub const Node = extern union { .@"return", .discard, .std_math_Log2Int, + .negate, + .negate_wrap, + .bit_not, + .not, + .optional_type, + .c_pointer, + .single_pointer, + .array_type, => Payload.UnOp, .add, @@ -180,6 +207,7 @@ pub const Node = extern union { .div_trunc, .rem, .int_cast, + .bool_to_int, => Payload.BinOp, .int, @@ -191,6 +219,9 @@ pub const Node = extern union { .field_access_arrow, .warning, .failed_decl, + .sizeof, + .alignof, + .type, => Payload.Value, .@"if" => Payload.If, .@"while" => Payload.While, @@ -234,8 +265,8 @@ pub const Payload = struct { pub const Infix = struct { base: Node, data: struct { - lhs: *Node, - rhs: *Node, + lhs: Node, + rhs: Node, }, }; @@ -246,44 +277,44 @@ pub const Payload = struct { pub const UnOp = struct { base: Node, - data: *Node, + data: Node, }; pub const BinOp = struct { base: Node, data: struct { - lhs: *Node, - rhs: *Node, + lhs: Node, + rhs: Node, }, }; pub const If = struct { base: Node = .{ .tag = .@"if" }, data: struct { - cond: *Node, - then: *Node, - @"else": ?*Node, + cond: Node, + then: Node, + @"else": ?Node, }, }; pub const While = struct { base: Node = .{ .tag = .@"while" }, data: struct { - cond: *Node, - body: *Node, + cond: Node, + body: Node, }, }; pub const Switch = struct { base: Node = .{ .tag = .@"switch" }, data: struct { - cond: *Node, + cond: Node, cases: []Prong, default: ?[]const u8, pub const Prong = struct { - lhs: *Node, - rhs: ?*Node, + lhs: Node, + rhs: ?Node, label: []const u8, }; }, @@ -293,15 +324,15 @@ pub const Payload = struct { base: Node = .{ .tag = .@"break" }, data: struct { label: ?[]const u8, - rhs: ?*Node, + rhs: ?Node, }, }; pub const Call = struct { base: Node = .{.call}, data: struct { - lhs: *Node, - args: []*Node, + lhs: Node, + args: []Node, }, }; @@ -314,7 +345,7 @@ pub const Payload = struct { @"export": bool, name: []const u8, type: Type, - init: *Node, + init: Node, }, }; @@ -328,7 +359,7 @@ pub const Payload = struct { cc: std.builtin.CallingConvention, params: []Param, return_type: Type, - body: ?*Node, + body: ?Node, pub const Param = struct { @"noalias": bool, @@ -368,7 +399,7 @@ pub const Payload = struct { pub const ArrayInit = struct { base: Node = .{ .tag = .array_init }, - data: []*Node, + data: []Node, }; pub const ContainerInit = struct { @@ -377,17 +408,22 @@ pub const Payload = struct { pub const Initializer = struct { name: []const u8, - value: *Node, + value: Node, }; }; pub const Block = struct { - base: Node = .{ .tag = .block }, + base: Node, data: struct { label: ?[]const u8, - stmts: []*Node, + stmts: []Node }, }; + + pub const Break = struct { + base: Node = .{ .tag = .@"break" }, + data: *Block + }; }; /// Converts the nodes into a Zig ast and then renders it. From 7514c0ad0d327d2427009c26e5930540297e396e Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Sun, 7 Feb 2021 22:28:41 +0200 Subject: [PATCH 068/173] translate-c: unary operator, integers and misc --- src/translate_c.zig | 279 ++++++++++++++++------------------------ src/translate_c/ast.zig | 35 ++++- 2 files changed, 138 insertions(+), 176 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index 6570766988..a8285e3036 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -8,7 +8,6 @@ const ctok = std.c.tokenizer; const CToken = std.c.Token; const mem = std.mem; const math = std.math; -const Type = @import("type.zig").Type; const ast = @import("translate_c/ast.zig"); const Node = ast.Node; @@ -20,7 +19,7 @@ pub const Error = error{OutOfMemory}; const TypeError = Error || error{UnsupportedType}; const TransError = TypeError || error{UnsupportedTranslation}; -const SymbolTable = std.StringArrayHashMap(*ast.Node); +const SymbolTable = std.StringArrayHashMap(Node); const AliasList = std.ArrayList(struct { alias: []const u8, name: []const u8, @@ -38,13 +37,13 @@ const Scope = struct { Loop, }; - /// Represents an in-progress ast.Node.Switch. This struct is stack-allocated. - /// When it is deinitialized, it produces an ast.Node.Switch which is allocated + /// Represents an in-progress Node.Switch. This struct is stack-allocated. + /// When it is deinitialized, it produces an Node.Switch which is allocated /// into the main arena. const Switch = struct { base: Scope, pending_block: Block, - cases: []*ast.Node, + cases: []Node, case_index: usize, switch_label: ?[]const u8, default_label: ?[]const u8, @@ -156,6 +155,7 @@ const Scope = struct { sym_table: SymbolTable, macro_table: SymbolTable, context: *Context, + nodes: std.ArrayList(Node), fn init(c: *Context) Root { return .{ @@ -163,12 +163,19 @@ const Scope = struct { .id = .Root, .parent = null, }, - .sym_table = SymbolTable.init(c.arena), - .macro_table = SymbolTable.init(c.arena), + .sym_table = SymbolTable.init(c.gpa), + .macro_table = SymbolTable.init(c.gpa), .context = c, + .nodes = std.ArrayList(Node).init(c.gpa), }; } + fn deinit(scope: *Root) void { + scope.sym_table.deinit(); + scope.macro_table.deinit(); + scope.nodes.deinit(); + } + /// Check if the global scope contains this name, without looking into the "future", e.g. /// ignore the preprocessed decl and macro names. fn containsNow(scope: *Root, name: []const u8) bool { @@ -195,11 +202,11 @@ const Scope = struct { } } - fn findBlockReturnType(inner: *Scope, c: *Context) ?clang.QualType { + fn findBlockReturnType(inner: *Scope, c: *Context) clang.QualType { var scope = inner; while (true) { switch (scope.id) { - .Root => return null, + .Root => unreachable, .Block => { const block = @fieldParentPtr(Block, "base", scope); if (block.return_type) |qt| return qt; @@ -248,23 +255,35 @@ const Scope = struct { } } } + + /// Appends a node to the first block scope if inside a function, or to the root tree if not. + fn appendNode(scope: *Scope, node: Node) !void { + var scope = inner; + while (true) { + switch (scope.id) { + .Root => { + const root = @fieldParentPtr(Root, "base", scope).contains(name); + return root.nodes.append(node); + }, + .Block => { + const block = @fieldParentPtr(Block, "base", scope).contains(name); + return block.statements.append(node); + }, + else => scope = scope.parent.?, + } + } + } }; pub const Context = struct { gpa: *mem.Allocator, arena: *mem.Allocator, - token_ids: std.ArrayListUnmanaged(Token.Id) = .{}, - token_locs: std.ArrayListUnmanaged(Token.Loc) = .{}, - errors: std.ArrayListUnmanaged(ast.Error) = .{}, - source_buffer: *std.ArrayList(u8), - err: Error, source_manager: *clang.SourceManager, decl_table: std.AutoArrayHashMapUnmanaged(usize, []const u8) = .{}, alias_list: AliasList, global_scope: *Scope.Root, clang_context: *clang.ASTContext, mangle_count: u32 = 0, - root_decls: std.ArrayListUnmanaged(*ast.Node) = .{}, opaque_demotes: std.AutoHashMapUnmanaged(usize, void) = .{}, /// This one is different than the root scope's name table. This contains @@ -293,40 +312,6 @@ pub const Context = struct { const column = c.source_manager.getSpellingColumnNumber(spelling_loc); return std.fmt.allocPrint(c.arena, "{s}:{d}:{d}", .{ filename, line, column }); } - - fn createCall(c: *Context, fn_expr: *ast.Node, params_len: ast.NodeIndex) !*ast.Node.Call { - _ = try appendToken(c, .LParen, "("); - const node = try ast.Node.Call.alloc(c.arena, params_len); - node.* = .{ - .lhs = fn_expr, - .params_len = params_len, - .async_token = null, - .rtoken = undefined, // set after appending args - }; - return node; - } - - fn createBuiltinCall(c: *Context, name: []const u8, params_len: ast.NodeIndex) !*ast.Node.BuiltinCall { - const builtin_token = try appendToken(c, .Builtin, name); - _ = try appendToken(c, .LParen, "("); - const node = try ast.Node.BuiltinCall.alloc(c.arena, params_len); - node.* = .{ - .builtin_token = builtin_token, - .params_len = params_len, - .rparen_token = undefined, // set after appending args - }; - return node; - } - - fn createBlock(c: *Context, statements_len: ast.NodeIndex) !*ast.Node.Block { - const block_node = try ast.Node.Block.alloc(c.arena, statements_len); - block_node.* = .{ - .lbrace = try appendToken(c, .LBrace, "{"), - .statements_len = statements_len, - .rbrace = undefined, - }; - return block_node; - } }; pub fn translate( @@ -348,9 +333,6 @@ pub fn translate( }; defer ast_unit.delete(); - var source_buffer = std.ArrayList(u8).init(gpa); - defer source_buffer.deinit(); - // For memory that has the same lifetime as the Tree that we return // from this function. var arena = std.heap.ArenaAllocator.init(gpa); @@ -367,9 +349,7 @@ pub fn translate( var context = Context{ .gpa = gpa, .arena = &arena.allocator, - .source_buffer = &source_buffer, .source_manager = ast_unit.getSourceManager(), - .err = undefined, .alias_list = AliasList.init(gpa), .global_scope = try arena.allocator.create(Scope.Root), .clang_context = ast_unit.getASTContext(), @@ -378,15 +358,12 @@ pub fn translate( defer { context.decl_table.deinit(gpa); context.alias_list.deinit(); - context.token_ids.deinit(gpa); - context.token_locs.deinit(gpa); - context.errors.deinit(gpa); context.global_names.deinit(gpa); - context.root_decls.deinit(gpa); context.opaque_demotes.deinit(gpa); + context.global_scope.deini(); } - _ = try Node.usingnamespace_builtins.init(); + try context.global_scope.nodes.append(try Node.usingnamespace_builtins.init()); try prepopulateGlobalNameTable(ast_unit, &context); @@ -403,29 +380,7 @@ pub fn translate( } } - const eof_token = try appendToken(&context, .Eof, ""); - const root_node = try ast.Node.Root.create(&arena.allocator, context.root_decls.items.len, eof_token); - mem.copy(*ast.Node, root_node.decls(), context.root_decls.items); - - if (false) { - std.debug.warn("debug source:\n{s}\n==EOF==\ntokens:\n", .{source_buffer.items}); - for (context.token_ids.items) |token| { - std.debug.warn("{}\n", .{token}); - } - } - - const tree = try arena.allocator.create(ast.Tree); - tree.* = .{ - .gpa = gpa, - .source = try arena.allocator.dupe(u8, source_buffer.items), - .token_ids = context.token_ids.toOwnedSlice(gpa), - .token_locs = context.token_locs.toOwnedSlice(gpa), - .errors = context.errors.toOwnedSlice(gpa), - .root_node = root_node, - .arena = arena.state, - .generated = true, - }; - return tree; + return ast.render(context.global_scope.nodes.items); } fn prepopulateGlobalNameTable(ast_unit: *clang.ASTUnit, c: *Context) !void { @@ -498,7 +453,7 @@ fn declVisitor(c: *Context, decl: *const clang.Decl) Error!void { }, else => { const decl_name = try c.str(decl.getDeclKindName()); - try emitWarning(c, decl.getLocation(), "ignoring {s} declaration", .{decl_name}); + try warn(c, decl.getLocation(), "ignoring {s} declaration", .{decl_name}); }, } } @@ -1880,21 +1835,21 @@ const SuppressCast = enum { no_as, }; fn transIntegerLiteral( - rp: RestorePoint, + c: *Context, scope: *Scope, expr: *const clang.IntegerLiteral, result_used: ResultUsed, suppress_as: SuppressCast, -) TransError!*ast.Node { +) TransError!Node { var eval_result: clang.ExprEvalResult = undefined; - if (!expr.EvaluateAsInt(&eval_result, rp.c.clang_context)) { + if (!expr.EvaluateAsInt(&eval_result, c.clang_context)) { const loc = expr.getBeginLoc(); - return revertAndWarn(rp, error.UnsupportedTranslation, loc, "invalid integer literal", .{}); + return revertAndWarn(c, error.UnsupportedTranslation, loc, "invalid integer literal", .{}); } if (suppress_as == .no_as) { - const int_lit_node = try transCreateNodeAPInt(rp.c, eval_result.Val.getInt()); - return maybeSuppressResult(rp, scope, result_used, int_lit_node); + const int_lit_node = try transCreateNodeAPInt(c, eval_result.Val.getInt()); + return maybeSuppressResult(c, scope, result_used, int_lit_node); } // Integer literals in C have types, and this can matter for several reasons. @@ -1908,51 +1863,26 @@ fn transIntegerLiteral( // But the first step is to be correct, and the next step is to make the output more elegant. // @as(T, x) - const expr_base = @ptrCast(*const clang.Expr, expr); - const as_node = try rp.c.createBuiltinCall("@as", 2); - const ty_node = try transQualType(rp, expr_base.getType(), expr_base.getBeginLoc()); - as_node.params()[0] = ty_node; - _ = try appendToken(rp.c, .Comma, ","); - as_node.params()[1] = try transCreateNodeAPInt(rp.c, eval_result.Val.getInt()); - - as_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - return maybeSuppressResult(rp, scope, result_used, &as_node.base); -} - -/// In C if a function has return type `int` and the return value is a boolean -/// expression, there is no implicit cast. So the translated Zig will need to -/// call @boolToInt -fn zigShouldCastBooleanReturnToInt(node: ?*ast.Node, qt: ?clang.QualType) bool { - if (node == null or qt == null) return false; - return isBoolRes(node.?) and cIsNativeInt(qt.?); + const ty_node = try transQualType(c, expr_base.getType(), expr_base.getBeginLoc()); + const rhs = try transCreateNodeAPInt(c, eval_result.Val.getInt()); + const as = try Node.as.create(c.arena, .{ .lhs = ty_node, .rhs = rhs }); + return maybeSuppressResult(c, scope, result_used, as); } fn transReturnStmt( - rp: RestorePoint, + c: *Context, scope: *Scope, expr: *const clang.ReturnStmt, ) TransError!*ast.Node { - const return_kw = try appendToken(rp.c, .Keyword_return, "return"); - var rhs: ?*ast.Node = if (expr.getRetValue()) |val_expr| - try transExprCoercing(rp, scope, val_expr, .used, .r_value) - else - null; - const return_qt = scope.findBlockReturnType(rp.c); - if (zigShouldCastBooleanReturnToInt(rhs, return_qt)) { - const bool_to_int_node = try rp.c.createBuiltinCall("@boolToInt", 1); - bool_to_int_node.params()[0] = rhs.?; - bool_to_int_node.rparen_token = try appendToken(rp.c, .RParen, ")"); + const val_expr = expr.getRetValue() orelse + return Node.return_void.init(); - rhs = &bool_to_int_node.base; + var rhs = try transExprCoercing(c, scope, val_expr, .used, .r_value); + const return_qt = scope.findBlockReturnType(c); + if (isBoolRes(rhs) and !qualTypeIsBoolean(return_qt)) { + rhs = try Node.bool_to_int.create(c.arena, rhs); } - const return_expr = try ast.Node.ControlFlowExpression.create(rp.c.arena, .{ - .ltoken = return_kw, - .tag = .Return, - }, .{ - .rhs = rhs, - }); - _ = try appendToken(rp.c, .Semicolon, ";"); - return &return_expr.base; + return Node.@"return".create(c.arena, rhs); } fn transStringLiteral( @@ -2251,6 +2181,33 @@ fn transExpr( return transStmt(c, scope, @ptrCast(*const clang.Stmt, expr), used, lrvalue); } +/// Same as `transExpr` but with the knowledge that the operand will be type coerced, and therefore +/// an `@as` would be redundant. This is used to prevent redundant `@as` in integer literals. +fn transExprCoercing( + c: *Context, + scope: *Scope, + expr: *const clang.Expr, + used: ResultUsed, + lrvalue: LRValue, +) TransError!Node { + switch (@ptrCast(*const clang.Stmt, expr).getStmtClass()) { + .IntegerLiteralClass => { + return transIntegerLiteral(c, scope, @ptrCast(*const clang.IntegerLiteral, expr), .used, .no_as); + }, + .CharacterLiteralClass => { + return transCharLiteral(c, scope, @ptrCast(*const clang.CharacterLiteral, expr), .used, .no_as); + }, + .UnaryOperatorClass => { + const un_expr = @ptrCast(*const clang.UnaryOperator, expr); + if (un_expr.getOpcode() == .Extension) { + return transExprCoercing(c, scope, un_expr.getSubExpr(), used, lrvalue); + } + }, + else => {}, + } + return transExpr(c, scope, expr, .used, .r_value); +} + fn transInitListExprRecord( rp: RestorePoint, scope: *Scope, @@ -3257,71 +3214,59 @@ fn qualTypeHasWrappingOverflow(qt: clang.QualType) bool { } } -fn transUnaryOperator(rp: RestorePoint, scope: *Scope, stmt: *const clang.UnaryOperator, used: ResultUsed) TransError!*ast.Node { +fn transUnaryOperator(c: *Context, scope: *Scope, stmt: *const clang.UnaryOperator, used: ResultUsed) TransError!Node { const op_expr = stmt.getSubExpr(); switch (stmt.getOpcode()) { .PostInc => if (qualTypeHasWrappingOverflow(stmt.getType())) - return transCreatePostCrement(rp, scope, stmt, .AssignAddWrap, .PlusPercentEqual, "+%=", used) + return transCreatePostCrement(c, scope, stmt, .AssignAddWrap, .PlusPercentEqual, "+%=", used) else - return transCreatePostCrement(rp, scope, stmt, .AssignAdd, .PlusEqual, "+=", used), + return transCreatePostCrement(c, scope, stmt, .AssignAdd, .PlusEqual, "+=", used), .PostDec => if (qualTypeHasWrappingOverflow(stmt.getType())) - return transCreatePostCrement(rp, scope, stmt, .AssignSubWrap, .MinusPercentEqual, "-%=", used) + return transCreatePostCrement(c, scope, stmt, .AssignSubWrap, .MinusPercentEqual, "-%=", used) else - return transCreatePostCrement(rp, scope, stmt, .AssignSub, .MinusEqual, "-=", used), + return transCreatePostCrement(c, scope, stmt, .AssignSub, .MinusEqual, "-=", used), .PreInc => if (qualTypeHasWrappingOverflow(stmt.getType())) - return transCreatePreCrement(rp, scope, stmt, .AssignAddWrap, .PlusPercentEqual, "+%=", used) + return transCreatePreCrement(c, scope, stmt, .AssignAddWrap, .PlusPercentEqual, "+%=", used) else - return transCreatePreCrement(rp, scope, stmt, .AssignAdd, .PlusEqual, "+=", used), + return transCreatePreCrement(c, scope, stmt, .AssignAdd, .PlusEqual, "+=", used), .PreDec => if (qualTypeHasWrappingOverflow(stmt.getType())) - return transCreatePreCrement(rp, scope, stmt, .AssignSubWrap, .MinusPercentEqual, "-%=", used) + return transCreatePreCrement(c, scope, stmt, .AssignSubWrap, .MinusPercentEqual, "-%=", used) else - return transCreatePreCrement(rp, scope, stmt, .AssignSub, .MinusEqual, "-=", used), + return transCreatePreCrement(c, scope, stmt, .AssignSub, .MinusEqual, "-=", used), .AddrOf => { if (cIsFunctionDeclRef(op_expr)) { return transExpr(rp, scope, op_expr, used, .r_value); } - const op_node = try transCreateNodeSimplePrefixOp(rp.c, .AddressOf, .Ampersand, "&"); - op_node.rhs = try transExpr(rp, scope, op_expr, used, .r_value); - return &op_node.base; + return Node.address_of.create(c.arena, try transExpr(c, scope, op_expr, used, .r_value)); }, .Deref => { - const value_node = try transExpr(rp, scope, op_expr, used, .r_value); + const node = try transExpr(c, scope, op_expr, used, .r_value); var is_ptr = false; const fn_ty = qualTypeGetFnProto(op_expr.getType(), &is_ptr); if (fn_ty != null and is_ptr) - return value_node; - const unwrapped = try transCreateNodeUnwrapNull(rp.c, value_node); - return transCreateNodePtrDeref(rp.c, unwrapped); + return node; + return Node.unwrap_deref.create(c.arena, node); }, - .Plus => return transExpr(rp, scope, op_expr, used, .r_value), + .Plus => return transExpr(c, scope, op_expr, used, .r_value), .Minus => { if (!qualTypeHasWrappingOverflow(op_expr.getType())) { - const op_node = try transCreateNodeSimplePrefixOp(rp.c, .Negation, .Minus, "-"); - op_node.rhs = try transExpr(rp, scope, op_expr, .used, .r_value); - return &op_node.base; + return Node.negate.create(c.arena, try transExpr(c, scope, op_expr, .used, .r_value)); } else if (cIsUnsignedInteger(op_expr.getType())) { - // we gotta emit 0 -% x - const zero = try transCreateNodeInt(rp.c, 0); - const token = try appendToken(rp.c, .MinusPercent, "-%"); - const expr = try transExpr(rp, scope, op_expr, .used, .r_value); - return transCreateNodeInfixOp(rp, scope, zero, .SubWrap, token, expr, used, true); + // use -% x for unsigned integers + return Node.negate_wrap.create(c.arena, try transExpr(c, scope, op_expr, .used, .r_value)); } else - return revertAndWarn(rp, error.UnsupportedTranslation, stmt.getBeginLoc(), "C negation with non float non integer", .{}); + return revertAndWarn(c, error.UnsupportedTranslation, stmt.getBeginLoc(), "C negation with non float non integer", .{}); }, .Not => { - const op_node = try transCreateNodeSimplePrefixOp(rp.c, .BitNot, .Tilde, "~"); - op_node.rhs = try transExpr(rp, scope, op_expr, .used, .r_value); - return &op_node.base; + return Node.bit_not.create(c.arena, try transExpr(c, scope, op_expr, .used, .r_value)); }, .LNot => { - const op_node = try transCreateNodeSimplePrefixOp(rp.c, .BoolNot, .Bang, "!"); - op_node.rhs = try transBoolExpr(rp, scope, op_expr, .used, .r_value, true); - return &op_node.base; + return Node.not.create(c.arena, try transExpr(c, scope, op_expr, .used, .r_value)); }, .Extension => { - return transExpr(rp, scope, stmt.getSubExpr(), used, .l_value); + return transExpr(c, scope, stmt.getSubExpr(), used, .l_value); }, - else => return revertAndWarn(rp, error.UnsupportedTranslation, stmt.getBeginLoc(), "unsupported C translation {}", .{stmt.getOpcode()}), + else => return revertAndWarn(c, error.UnsupportedTranslation, stmt.getBeginLoc(), "unsupported C translation {}", .{stmt.getOpcode()}), } } @@ -3910,8 +3855,7 @@ fn maybeSuppressResult( return &op_node.base; } -fn addTopLevelDecl(c: *Context, name: []const u8, decl_node: *ast.Node) !void { - try c.root_decls.append(c.gpa, decl_node); +fn addTopLevelDecl(c: *Context, name: []const u8, decl_node: Node) !void { _ = try c.global_scope.sym_table.put(name, decl_node); } @@ -4356,7 +4300,7 @@ fn transCreateNodePtrType( return node; } -fn transCreateNodeAPInt(c: *Context, int: *const clang.APSInt) !*ast.Node { +fn transCreateNodeAPInt(c: *Context, int: *const clang.APSInt) !Node { const num_limbs = math.cast(usize, int.getNumWords()) catch |err| switch (err) { error.Overflow => return error.OutOfMemory, }; @@ -4396,14 +4340,7 @@ fn transCreateNodeAPInt(c: *Context, int: *const clang.APSInt) !*ast.Node { const str = big.toStringAlloc(c.arena, 10, false) catch |err| switch (err) { error.OutOfMemory => return error.OutOfMemory, }; - defer c.arena.free(str); - const token = try appendToken(c, .IntegerLiteral, str); - const node = try c.arena.create(ast.Node.OneToken); - node.* = .{ - .base = .{ .tag = .IntegerLiteral }, - .token = token, - }; - return &node.base; + return Node.int_literal.create(c.arena, str); } fn transCreateNodeUndefinedLiteral(c: *Context) !*ast.Node { diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 638d4cefa0..cc5e8dd7ce 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -109,11 +109,16 @@ pub const Node = extern union { div_trunc, /// @boolToInt(lhs, rhs) bool_to_int, + /// @as(lhs, rhs) + as, negate, negate_wrap, bit_not, not, + address_of, + // operand.?.* + unwrap_deref, block, @"break", @@ -151,9 +156,8 @@ pub const Node = extern union { .bit_not, .not, .optional_type, - .c_pointer, - .single_pointer, - .array_type, + .address_of, + .unwrap_deref, => Payload.UnOp, .add, @@ -208,6 +212,7 @@ pub const Node = extern union { .rem, .int_cast, .bool_to_int, + .as, => Payload.BinOp, .int, @@ -236,6 +241,9 @@ pub const Node = extern union { .container_init => Payload.ContainerInit, .std_meta_cast => Payload.Infix, .block => Payload.Block, + .c_pointer => Payload.Pointer, + .single_pointer => Payload.Pointer, + .array_type => Payload.Array, }; } @@ -424,9 +432,26 @@ pub const Payload = struct { base: Node = .{ .tag = .@"break" }, data: *Block }; + + pub const Array = struct { + base: Node, + data: struct { + elem_type: Node, + len: Node, + }, + }; + + pub const Pointer = struct { + base: Node, + data: struct { + elem_type: Node, + is_const: bool, + is_volatile: bool, + }, + }; }; -/// Converts the nodes into a Zig ast and then renders it. -pub fn render(allocator: *Allocator, nodes: []const Node) !void { +/// Converts the nodes into a Zig ast. +pub fn render(allocator: *Allocator, nodes: []const Node) !*ast.Tree { @panic("TODO"); } From f36849fed24b54476ecadacb52d9a1b55ae14274 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Sun, 7 Feb 2021 23:13:40 +0200 Subject: [PATCH 069/173] translate-c: convert function translation --- src/translate_c.zig | 378 ++++++++++------------------------------ src/translate_c/ast.zig | 68 +++++++- 2 files changed, 153 insertions(+), 293 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index a8285e3036..2bbfe0a562 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -469,7 +469,6 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void { return visitFnDecl(c, def); } - const rp = makeRestorePoint(c); const fn_decl_loc = fn_decl.getLocation(); const has_body = fn_decl.hasBody(); const storage_class = fn_decl.getStorageClass(); @@ -513,9 +512,9 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void { decl_ctx.has_body = false; decl_ctx.storage_class = .Extern; decl_ctx.is_export = false; - try emitWarning(c, fn_decl_loc, "TODO unable to translate variadic function, demoted to declaration", .{}); + try warn(c, fn_decl_loc, "TODO unable to translate variadic function, demoted to declaration", .{}); } - break :blk transFnProto(rp, fn_decl, fn_proto_type, fn_decl_loc, decl_ctx, true) catch |err| switch (err) { + break :blk transFnProto(c, fn_decl, fn_proto_type, fn_decl_loc, decl_ctx, true) catch |err| switch (err) { error.UnsupportedType => { return failDecl(c, fn_decl_loc, fn_name, "unable to resolve prototype of function", .{}); }, @@ -524,7 +523,7 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void { }, .FunctionNoProto => blk: { const fn_no_proto_type = @ptrCast(*const clang.FunctionType, fn_type); - break :blk transFnNoProto(rp, fn_no_proto_type, fn_decl_loc, decl_ctx, true) catch |err| switch (err) { + break :blk transFnNoProto(c, fn_no_proto_type, fn_decl_loc, decl_ctx, true) catch |err| switch (err) { error.UnsupportedType => { return failDecl(c, fn_decl_loc, fn_name, "unable to resolve prototype of function", .{}); }, @@ -535,13 +534,12 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void { }; if (!decl_ctx.has_body) { - const semi_tok = try appendToken(c, .Semicolon, ";"); return addTopLevelDecl(c, fn_name, &proto_node.base); } // actual function definition with body const body_stmt = fn_decl.getBody(); - var block_scope = try Scope.Block.init(rp.c, &c.global_scope.base, false); + var block_scope = try Scope.Block.init(c, &c.global_scope.base, false); block_scope.return_type = return_qt; defer block_scope.deinit(); @@ -559,34 +557,22 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void { const is_const = qual_type.isConstQualified(); const mangled_param_name = try block_scope.makeMangledName(c, param_name); + param.name = mangled_param_name; if (!is_const) { const bare_arg_name = try std.fmt.allocPrint(c.arena, "arg_{s}", .{mangled_param_name}); const arg_name = try block_scope.makeMangledName(c, bare_arg_name); + param.name = arg_name; - const mut_tok = try appendToken(c, .Keyword_var, "var"); - const name_tok = try appendIdentifier(c, mangled_param_name); - const eq_token = try appendToken(c, .Equal, "="); - const init_node = try transCreateNodeIdentifier(c, arg_name); - const semicolon_token = try appendToken(c, .Semicolon, ";"); - const node = try ast.Node.VarDecl.create(c.arena, .{ - .mut_token = mut_tok, - .name_token = name_tok, - .semicolon_token = semicolon_token, - }, .{ - .eq_token = eq_token, - .init_node = init_node, - }); - try block_scope.statements.append(&node.base); - param.name_token = try appendIdentifier(c, arg_name); - _ = try appendToken(c, .Colon, ":"); + const redecl_node = try Node.arg_redecl.create(c.arena, .{ .actual = mangled_param_name, .mangled = arg_name }); + try block_scope.statements.append(redecl_node); } param_id += 1; } const casted_body = @ptrCast(*const clang.CompoundStmt, body_stmt); - transCompoundStmtInline(rp, &block_scope.base, casted_body, &block_scope) catch |err| switch (err) { + transCompoundStmtInline(c, &block_scope.base, casted_body, &block_scope) catch |err| switch (err) { error.OutOfMemory => |e| return e, error.UnsupportedTranslation, error.UnsupportedType, @@ -600,37 +586,31 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void { if (block_scope.statements.items.len > 0) { var last = block_scope.statements.items[block_scope.statements.items.len - 1]; while (true) { - switch (last.tag) { - .Block, .LabeledBlock => { - const stmts = last.blockStatements(); - if (stmts.len == 0) break; + switch (last.tag()) { + .block => { + const block = last.castTag(.block).?; + if (block.data.stmts.len == 0) break; - last = stmts[stmts.len - 1]; + last = block.data.stmts[block.data.stmts.len - 1]; }, // no extra return needed - .Return => break :blk, + .@"return", .return_void => break :blk, else => break, } } } - const return_expr = try ast.Node.ControlFlowExpression.create(rp.c.arena, .{ - .ltoken = try appendToken(rp.c, .Keyword_return, "return"), - .tag = .Return, - }, .{ - .rhs = transZeroInitExpr(rp, scope, fn_decl_loc, return_qt.getTypePtr()) catch |err| switch (err) { - error.OutOfMemory => |e| return e, - error.UnsupportedTranslation, - error.UnsupportedType, - => return failDecl(c, fn_decl_loc, fn_name, "unable to create a return value for function", .{}), - }, - }); - _ = try appendToken(rp.c, .Semicolon, ";"); - try block_scope.statements.append(&return_expr.base); + const rhs = transZeroInitExpr(c, scope, fn_decl_loc, return_qt.getTypePtr()) catch |err| switch (err) { + error.OutOfMemory => |e| return e, + error.UnsupportedTranslation, + error.UnsupportedType, + => return failDecl(c, fn_decl_loc, fn_name, "unable to create a return value for function", .{}), + }; + const ret = try Node.@"return".create(c.arena, rhs); + try block_scope.statements.append(ret); } - const body_node = try block_scope.complete(rp.c); - proto_node.setBodyNode(body_node); + proto_node.body = try block_scope.complete(c); return addTopLevelDecl(c, fn_name, &proto_node.base); } @@ -2440,16 +2420,16 @@ fn transInitListExpr( } fn transZeroInitExpr( - rp: RestorePoint, + c: *Context, scope: *Scope, source_loc: clang.SourceLocation, ty: *const clang.Type, -) TransError!*ast.Node { +) TransError!Node { switch (ty.getTypeClass()) { .Builtin => { const builtin_ty = @ptrCast(*const clang.BuiltinType, ty); switch (builtin_ty.getKind()) { - .Bool => return try transCreateNodeBoolLiteral(rp.c, false), + .Bool => return Node.false_literal.init(), .Char_U, .UChar, .Char_S, @@ -2470,16 +2450,16 @@ fn transZeroInitExpr( .Float128, .Float16, .LongDouble, - => return transCreateNodeInt(rp.c, 0), - else => return revertAndWarn(rp, error.UnsupportedType, source_loc, "unsupported builtin type", .{}), + => return Node.zero_literal.init(), + else => return fail(c, error.UnsupportedType, source_loc, "unsupported builtin type", .{}), } }, - .Pointer => return transCreateNodeNullLiteral(rp.c), + .Pointer => return Node.null_literal.init(), .Typedef => { const typedef_ty = @ptrCast(*const clang.TypedefType, ty); const typedef_decl = typedef_ty.getDecl(); return transZeroInitExpr( - rp, + c, scope, source_loc, typedef_decl.getUnderlyingType().getTypePtr(), @@ -2488,7 +2468,7 @@ fn transZeroInitExpr( else => {}, } - return revertAndWarn(rp, error.UnsupportedType, source_loc, "type does not have an implicit init value", .{}); + return fail(c, error.UnsupportedType, source_loc, "type does not have an implicit init value", .{}); } fn transImplicitValueInitExpr( @@ -3985,7 +3965,7 @@ fn qualTypeToLog2IntRef(c: *Context, qt: clang.QualType, source_loc: clang.Sourc if (int_bit_width != 0) { // we can perform the log2 now. const cast_bit_width = math.log2_int(u64, int_bit_width); - return Node.uint_type.create(c.arena, cast_bit_width); + return Node.log2_int_type.create(c.arena, cast_bit_width); } const zig_type = try transQualType(c, qt, source_loc); @@ -4886,7 +4866,7 @@ const FnDeclContext = struct { }; fn transCC( - rp: RestorePoint, + c: *Context, fn_ty: *const clang.FunctionType, source_loc: clang.SourceLocation, ) !CallingConvention { @@ -4899,7 +4879,7 @@ fn transCC( .X86ThisCall => return CallingConvention.Thiscall, .AAPCS => return CallingConvention.AAPCS, .AAPCS_VFP => return CallingConvention.AAPCSVFP, - else => return revertAndWarn( + else => return fail( rp, error.UnsupportedType, source_loc, @@ -4910,33 +4890,33 @@ fn transCC( } fn transFnProto( - rp: RestorePoint, + c: *Context, fn_decl: ?*const clang.FunctionDecl, fn_proto_ty: *const clang.FunctionProtoType, source_loc: clang.SourceLocation, fn_decl_context: ?FnDeclContext, is_pub: bool, -) !*ast.Node.FnProto { +) !Node.FnProto { const fn_ty = @ptrCast(*const clang.FunctionType, fn_proto_ty); - const cc = try transCC(rp, fn_ty, source_loc); + const cc = try transCC(c, fn_ty, source_loc); const is_var_args = fn_proto_ty.isVariadic(); - return finishTransFnProto(rp, fn_decl, fn_proto_ty, fn_ty, source_loc, fn_decl_context, is_var_args, cc, is_pub); + return finishTransFnProto(c, fn_decl, fn_proto_ty, fn_ty, source_loc, fn_decl_context, is_var_args, cc, is_pub); } fn transFnNoProto( - rp: RestorePoint, + c: *Context, fn_ty: *const clang.FunctionType, source_loc: clang.SourceLocation, fn_decl_context: ?FnDeclContext, is_pub: bool, -) !*ast.Node.FnProto { - const cc = try transCC(rp, fn_ty, source_loc); +) !Node.FnProto { + const cc = try transCC(c, fn_ty, source_loc); const is_var_args = if (fn_decl_context) |ctx| (!ctx.is_export and ctx.storage_class != .Static) else true; - return finishTransFnProto(rp, null, null, fn_ty, source_loc, fn_decl_context, is_var_args, cc, is_pub); + return finishTransFnProto(c, null, null, fn_ty, source_loc, fn_decl_context, is_var_args, cc, is_pub); } fn finishTransFnProto( - rp: RestorePoint, + c: *Context, fn_decl: ?*const clang.FunctionDecl, fn_proto_ty: ?*const clang.FunctionProtoType, fn_ty: *const clang.FunctionType, @@ -4945,128 +4925,77 @@ fn finishTransFnProto( is_var_args: bool, cc: CallingConvention, is_pub: bool, -) !*ast.Node.FnProto { +) !*ast.Payload.Func { const is_export = if (fn_decl_context) |ctx| ctx.is_export else false; const is_extern = if (fn_decl_context) |ctx| !ctx.has_body else false; // TODO check for always_inline attribute // TODO check for align attribute - // pub extern fn name(...) T - const pub_tok = if (is_pub) try appendToken(rp.c, .Keyword_pub, "pub") else null; - const extern_export_inline_tok = if (is_export) - try appendToken(rp.c, .Keyword_export, "export") - else if (is_extern) - try appendToken(rp.c, .Keyword_extern, "extern") - else - null; - const fn_tok = try appendToken(rp.c, .Keyword_fn, "fn"); - const name_tok = if (fn_decl_context) |ctx| try appendIdentifier(rp.c, ctx.fn_name) else null; - const lparen_tok = try appendToken(rp.c, .LParen, "("); - - var fn_params = std.ArrayList(ast.Node.FnProto.ParamDecl).init(rp.c.gpa); + var fn_params = std.ArrayList(ast.Payload.Func.Param).init(c.gpa); defer fn_params.deinit(); const param_count: usize = if (fn_proto_ty != null) fn_proto_ty.?.getNumParams() else 0; - try fn_params.ensureCapacity(param_count + 1); // +1 for possible var args node + try fn_params.ensureCapacity(param_count); var i: usize = 0; while (i < param_count) : (i += 1) { const param_qt = fn_proto_ty.?.getParamType(@intCast(c_uint, i)); + const is_noalias = param_qt.isRestrictQualified(); - const noalias_tok = if (param_qt.isRestrictQualified()) try appendToken(rp.c, .Keyword_noalias, "noalias") else null; + const param_name: ?[]const u8 = + if (fn_decl) |decl| + blk: { + const param = decl.getParamDecl(@intCast(c_uint, i)); + const param_name: []const u8 = try c.str(@ptrCast(*const clang.NamedDecl, param).getName_bytes_begin()); + if (param_name.len < 1) + break :blk null; - const param_name_tok: ?ast.TokenIndex = blk: { - if (fn_decl) |decl| { - const param = decl.getParamDecl(@intCast(c_uint, i)); - const param_name: []const u8 = try rp.c.str(@ptrCast(*const clang.NamedDecl, param).getName_bytes_begin()); - if (param_name.len < 1) - break :blk null; - - const result = try appendIdentifier(rp.c, param_name); - _ = try appendToken(rp.c, .Colon, ":"); - break :blk result; - } - break :blk null; - }; - - const type_node = try transQualType(rp, param_qt, source_loc); + break :blk param_name; + } else null; + const type_node = try transQualType(c, param_qt, source_loc); fn_params.addOneAssumeCapacity().* = .{ - .doc_comments = null, - .comptime_token = null, - .noalias_token = noalias_tok, - .name_token = param_name_tok, - .param_type = .{ .type_expr = type_node }, + .is_noalias = is_noalias, + .name = param_name, + .type = type_node, }; - - if (i + 1 < param_count) { - _ = try appendToken(rp.c, .Comma, ","); - } } - const var_args_token: ?ast.TokenIndex = if (is_var_args) blk: { - if (param_count > 0) { - _ = try appendToken(rp.c, .Comma, ","); - } - break :blk try appendToken(rp.c, .Ellipsis3, "..."); - } else null; - - const rparen_tok = try appendToken(rp.c, .RParen, ")"); - - const linksection_expr = blk: { + const link_section_string: ?[]const u8 = blk: { if (fn_decl) |decl| { var str_len: usize = undefined; if (decl.getSectionAttribute(&str_len)) |str_ptr| { - _ = try appendToken(rp.c, .Keyword_linksection, "linksection"); - _ = try appendToken(rp.c, .LParen, "("); - const expr = try transCreateNodeStringLiteral( - rp.c, - try std.fmt.allocPrint(rp.c.arena, "\"{s}\"", .{str_ptr[0..str_len]}), - ); - _ = try appendToken(rp.c, .RParen, ")"); - - break :blk expr; + break :blk str_ptr[0..str_len]; } } break :blk null; }; - const align_expr = blk: { + const alignment: c_uint = blk: { if (fn_decl) |decl| { - const alignment = decl.getAlignedAttribute(rp.c.clang_context); + const alignment = decl.getAlignedAttribute(c.clang_context); if (alignment != 0) { - _ = try appendToken(rp.c, .Keyword_align, "align"); - _ = try appendToken(rp.c, .LParen, "("); // Clang reports the alignment in bits - const expr = try transCreateNodeInt(rp.c, alignment / 8); - _ = try appendToken(rp.c, .RParen, ")"); - - break :blk expr; + break :blk alignment / 8; } } break :blk null; }; - const callconv_expr = if ((is_export or is_extern) and cc == .C) null else blk: { - _ = try appendToken(rp.c, .Keyword_callconv, "callconv"); - _ = try appendToken(rp.c, .LParen, "("); - const expr = try transCreateNodeEnumLiteral(rp.c, @tagName(cc)); - _ = try appendToken(rp.c, .RParen, ")"); - break :blk expr; - }; + const explicit_callconv = if ((is_export or is_extern) and cc == .C) null else cc; const return_type_node = blk: { if (fn_ty.getNoReturnAttr()) { - break :blk try transCreateNodeIdentifier(rp.c, "noreturn"); + break :blk Node.noreturn_type.init(); } else { const return_qt = fn_ty.getReturnType(); if (isCVoid(return_qt)) { // convert primitive c_void to actual void (only for return type) - break :blk try transCreateNodeIdentifier(rp.c, "void"); + break :blk Node.void_type.init(); } else { - break :blk transQualType(rp, return_qt, source_loc) catch |err| switch (err) { + break :blk transQualType(c, return_qt, source_loc) catch |err| switch (err) { error.UnsupportedType => { - try emitWarning(rp.c, source_loc, "unsupported function proto return type", .{}); + try warn(c, source_loc, "unsupported function proto return type", .{}); return err; }, error.OutOfMemory => |e| return e, @@ -5075,32 +5004,23 @@ fn finishTransFnProto( } }; - // We need to reserve an undefined (but non-null) body node to set later. - var body_node: ?*ast.Node = null; - if (fn_decl_context) |ctx| { - if (ctx.has_body) { - // TODO: we should be able to use undefined here but - // it causes a bug. This is undefined without zig language - // being aware of it. - body_node = @intToPtr(*ast.Node, 0x08); - } - } - - const fn_proto = try ast.Node.FnProto.create(rp.c.arena, .{ - .params_len = fn_params.items.len, - .return_type = .{ .Explicit = return_type_node }, - .fn_token = fn_tok, - }, .{ - .visib_token = pub_tok, - .name_token = name_tok, - .extern_export_inline_token = extern_export_inline_tok, - .align_expr = align_expr, - .section_expr = linksection_expr, - .callconv_expr = callconv_expr, - .body_node = body_node, - .var_args_token = var_args_token, - }); - mem.copy(ast.Node.FnProto.ParamDecl, fn_proto.params(), fn_params.items); + const fn_proto = try c.arena.create(ast.Payload.Func); + fn_proto.* = .{ + .base = .{ .tag = .func }, + .data = .{ + .is_pub = is_pub, + .is_extern = is_extern, + .is_export = is_export, + .is_var_args = is_var_args, + .name = name, + .link_section_string = link_section_string, + .explicit_callconv = explicit_callconv, + .params = c.arena.dupe(ast.Payload.Func.Param, fn_params.items), + .return_type = return_node, + .body = null, + .alignment = alignment, + }, + }; return fn_proto; } @@ -5122,124 +5042,12 @@ fn fail( } pub fn failDecl(c: *Context, loc: clang.SourceLocation, name: []const u8, comptime format: []const u8, args: anytype) !void { + // location // pub const name = @compileError(msg); - const pub_tok = try appendToken(c, .Keyword_pub, "pub"); - const const_tok = try appendToken(c, .Keyword_const, "const"); - const name_tok = try appendIdentifier(c, name); - const eq_tok = try appendToken(c, .Equal, "="); - const builtin_tok = try appendToken(c, .Builtin, "@compileError"); - const lparen_tok = try appendToken(c, .LParen, "("); - const msg_tok = try appendTokenFmt(c, .StringLiteral, "\"" ++ format ++ "\"", args); - const rparen_tok = try appendToken(c, .RParen, ")"); - const semi_tok = try appendToken(c, .Semicolon, ";"); - _ = try appendTokenFmt(c, .LineComment, "// {s}", .{c.locStr(loc)}); - - const msg_node = try c.arena.create(ast.Node.OneToken); - msg_node.* = .{ - .base = .{ .tag = .StringLiteral }, - .token = msg_tok, - }; - - const call_node = try ast.Node.BuiltinCall.alloc(c.arena, 1); - call_node.* = .{ - .builtin_token = builtin_tok, - .params_len = 1, - .rparen_token = rparen_tok, - }; - call_node.params()[0] = &msg_node.base; - - const var_decl_node = try ast.Node.VarDecl.create(c.arena, .{ - .name_token = name_tok, - .mut_token = const_tok, - .semicolon_token = semi_tok, - }, .{ - .visib_token = pub_tok, - .eq_token = eq_tok, - .init_node = &call_node.base, - }); - try addTopLevelDecl(c, name, &var_decl_node.base); -} - -fn appendToken(c: *Context, token_id: Token.Id, bytes: []const u8) !ast.TokenIndex { - std.debug.assert(token_id != .Identifier); // use appendIdentifier - return appendTokenFmt(c, token_id, "{s}", .{bytes}); -} - -fn appendTokenFmt(c: *Context, token_id: Token.Id, comptime format: []const u8, args: anytype) !ast.TokenIndex { - assert(token_id != .Invalid); - - try c.token_ids.ensureCapacity(c.gpa, c.token_ids.items.len + 1); - try c.token_locs.ensureCapacity(c.gpa, c.token_locs.items.len + 1); - - const start_index = c.source_buffer.items.len; - try c.source_buffer.writer().print(format ++ " ", args); - - c.token_ids.appendAssumeCapacity(token_id); - c.token_locs.appendAssumeCapacity(.{ - .start = start_index, - .end = c.source_buffer.items.len - 1, // back up before the space - }); - - return c.token_ids.items.len - 1; -} - -// TODO hook up with codegen -fn isZigPrimitiveType(name: []const u8) bool { - if (name.len > 1 and (name[0] == 'u' or name[0] == 'i')) { - for (name[1..]) |c| { - switch (c) { - '0'...'9' => {}, - else => return false, - } - } - return true; - } - // void is invalid in c so it doesn't need to be checked. - return mem.eql(u8, name, "comptime_float") or - mem.eql(u8, name, "comptime_int") or - mem.eql(u8, name, "bool") or - mem.eql(u8, name, "isize") or - mem.eql(u8, name, "usize") or - mem.eql(u8, name, "f16") or - mem.eql(u8, name, "f32") or - mem.eql(u8, name, "f64") or - mem.eql(u8, name, "f128") or - mem.eql(u8, name, "c_longdouble") or - mem.eql(u8, name, "noreturn") or - mem.eql(u8, name, "type") or - mem.eql(u8, name, "anyerror") or - mem.eql(u8, name, "c_short") or - mem.eql(u8, name, "c_ushort") or - mem.eql(u8, name, "c_int") or - mem.eql(u8, name, "c_uint") or - mem.eql(u8, name, "c_long") or - mem.eql(u8, name, "c_ulong") or - mem.eql(u8, name, "c_longlong") or - mem.eql(u8, name, "c_ulonglong"); -} - -fn appendIdentifier(c: *Context, name: []const u8) !ast.TokenIndex { - return appendTokenFmt(c, .Identifier, "{}", .{std.zig.fmtId(name)}); -} - -fn transCreateNodeIdentifier(c: *Context, name: []const u8) !*ast.Node { - const token_index = try appendIdentifier(c, name); - const identifier = try c.arena.create(ast.Node.OneToken); - identifier.* = .{ - .base = .{ .tag = .Identifier }, - .token = token_index, - }; - return &identifier.base; -} - -fn transCreateNodeIdentifierUnchecked(c: *Context, name: []const u8) !*ast.Node { - const token_index = try appendTokenFmt(c, .Identifier, "{s}", .{name}); - const identifier = try c.arena.create(ast.Node.OneToken); - identifier.* = .{ - .base = .{ .tag = .Identifier }, - .token = token_index, - }; - return &identifier.base; + const location_comment = std.fmt.allocPrint(c.arena, "// {s}", .{c.locStr(loc)}); + try c.global_scope.nodes.append(try Node.warning.create(c.arena, location_comment)); + const fail_msg = std.fmt.allocPrint(c.arena, format, args); + try c.global_scope.nodes.append(try Node.fail_decl.create(c.arena, fail_msg)); } pub fn freeErrors(errors: []ClangErrMsg) void { diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index cc5e8dd7ce..825d36add1 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -14,6 +14,10 @@ pub const Node = extern union { true_literal, false_literal, empty_block, + return_void, + zero_literal, + void_type, + noreturn_type, /// pub usingnamespace @import("std").c.builtins; usingnamespace_builtins, // After this, the tag requires a payload. @@ -99,6 +103,7 @@ pub const Node = extern union { bit_or, bit_xor, + log2_int_type, /// @import("std").math.Log2Int(operand) std_math_Log2Int, /// @intCast(lhs, rhs) @@ -132,7 +137,13 @@ pub const Node = extern union { single_pointer, array_type, - pub const last_no_payload_tag = Tag.false_literal; + + // pub const name = @compileError(msg); + fail_decl, + // var actual = mangled; + arg_redecl, + + pub const last_no_payload_tag = Tag.usingnamespace_builtins; pub const no_payload_count = @enumToInt(last_no_payload_tag) + 1; pub fn Type(tag: Tag) ?type { @@ -144,6 +155,10 @@ pub const Node = extern union { .false_litral, .empty_block, .usingnamespace_builtins, + .return_void, + .zero_literal, + .void_type, + .noreturn_type, => @compileError("Type Tag " ++ @tagName(t) ++ " has no payload"), .array_access, @@ -227,6 +242,7 @@ pub const Node = extern union { .sizeof, .alignof, .type, + .fail_decl, => Payload.Value, .@"if" => Payload.If, .@"while" => Payload.While, @@ -244,6 +260,8 @@ pub const Node = extern union { .c_pointer => Payload.Pointer, .single_pointer => Payload.Pointer, .array_type => Payload.Array, + .arg_redecl => Payload.ArgRedecl, + .log2_int_type => Payload.Log2IntType, }; } @@ -265,6 +283,24 @@ pub const Node = extern union { return std.meta.fieldInfo(t.Type(), .data).field_type; } }; + + pub fn tag(self: Node) Tag { + if (self.tag_if_small_enough < Tag.no_payload_count) { + return @intToEnum(Tag, @intCast(@TagType(Tag), self.tag_if_small_enough)); + } else { + return self.ptr_otherwise.tag; + } + } + + pub fn castTag(self: Node, comptime t: Tag) ?*t.Type() { + if (self.tag_if_small_enough < Tag.no_payload_count) + return null; + + if (self.ptr_otherwise.tag == t) + return @fieldParentPtr(t.Type(), "base", self.ptr_otherwise); + + return null; + } }; pub const Payload = struct { @@ -360,19 +396,22 @@ pub const Payload = struct { pub const Func = struct { base: Node = .{.func}, data: struct { - @"pub": bool, - @"extern": bool, - @"export": bool, + is_pub: bool, + is_extern: bool, + is_export: bool, + is_var_args: bool, name: []const u8, - cc: std.builtin.CallingConvention, + link_section_string: ?[]const u8, + explicit_callconv: ?std.builtin.CallingConvention, params: []Param, - return_type: Type, + return_type: Node, body: ?Node, + alignment: c_uint, pub const Param = struct { - @"noalias": bool, + is_noalias: bool, name: ?[]const u8, - type: Type, + type: Node, }; }, }; @@ -449,6 +488,19 @@ pub const Payload = struct { is_volatile: bool, }, }; + + pub const ArgRedecl = struct { + base: Node, + data: struct { + actual: []const u8, + mangled: []const u8, + }, + }; + + pub const Log2IntType = struct { + base: Node, + data: std.math.Log2Int(u64), + }; }; /// Converts the nodes into a Zig ast. From bb867b071a9e2fa69f39a45532dffa0f51ffdbfd Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Mon, 8 Feb 2021 11:07:44 +0200 Subject: [PATCH 070/173] translate-c: convert vardecl and typedef --- src/astgen.zig | 70 +++---- src/translate_c.zig | 425 ++++++++++++++++------------------------ src/translate_c/ast.zig | 42 ++-- 3 files changed, 231 insertions(+), 306 deletions(-) diff --git a/src/astgen.zig b/src/astgen.zig index 56d1497f63..dd600e2840 100644 --- a/src/astgen.zig +++ b/src/astgen.zig @@ -2664,8 +2664,11 @@ fn identifier( return mod.failNode(scope, ident, "TODO implement '_' identifier", .{}); } - if (getSimplePrimitiveValue(ident_name)) |typed_value| { - const result = try addZIRInstConst(mod, scope, src, typed_value); + if (simple_types.get(ident_name)) |val_tag| { + const result = try addZIRInstConst(mod, scope, src, TypedValue{ + .ty = Type.initTag(.type), + .val = Value.initTag(val_tag), + }); return rvalue(mod, scope, rl, result); } @@ -3325,42 +3328,33 @@ fn callExpr( return rvalue(mod, scope, rl, result); } -fn getSimplePrimitiveValue(name: []const u8) ?TypedValue { - const simple_types = std.ComptimeStringMap(Value.Tag, .{ - .{ "u8", .u8_type }, - .{ "i8", .i8_type }, - .{ "isize", .isize_type }, - .{ "usize", .usize_type }, - .{ "c_short", .c_short_type }, - .{ "c_ushort", .c_ushort_type }, - .{ "c_int", .c_int_type }, - .{ "c_uint", .c_uint_type }, - .{ "c_long", .c_long_type }, - .{ "c_ulong", .c_ulong_type }, - .{ "c_longlong", .c_longlong_type }, - .{ "c_ulonglong", .c_ulonglong_type }, - .{ "c_longdouble", .c_longdouble_type }, - .{ "f16", .f16_type }, - .{ "f32", .f32_type }, - .{ "f64", .f64_type }, - .{ "f128", .f128_type }, - .{ "c_void", .c_void_type }, - .{ "bool", .bool_type }, - .{ "void", .void_type }, - .{ "type", .type_type }, - .{ "anyerror", .anyerror_type }, - .{ "comptime_int", .comptime_int_type }, - .{ "comptime_float", .comptime_float_type }, - .{ "noreturn", .noreturn_type }, - }); - if (simple_types.get(name)) |tag| { - return TypedValue{ - .ty = Type.initTag(.type), - .val = Value.initTag(tag), - }; - } - return null; -} +pub const simple_types = std.ComptimeStringMap(Value.Tag, .{ + .{ "u8", .u8_type }, + .{ "i8", .i8_type }, + .{ "isize", .isize_type }, + .{ "usize", .usize_type }, + .{ "c_short", .c_short_type }, + .{ "c_ushort", .c_ushort_type }, + .{ "c_int", .c_int_type }, + .{ "c_uint", .c_uint_type }, + .{ "c_long", .c_long_type }, + .{ "c_ulong", .c_ulong_type }, + .{ "c_longlong", .c_longlong_type }, + .{ "c_ulonglong", .c_ulonglong_type }, + .{ "c_longdouble", .c_longdouble_type }, + .{ "f16", .f16_type }, + .{ "f32", .f32_type }, + .{ "f64", .f64_type }, + .{ "f128", .f128_type }, + .{ "c_void", .c_void_type }, + .{ "bool", .bool_type }, + .{ "void", .void_type }, + .{ "type", .type_type }, + .{ "anyerror", .anyerror_type }, + .{ "comptime_int", .comptime_int_type }, + .{ "comptime_float", .comptime_float_type }, + .{ "noreturn", .noreturn_type }, +}); fn nodeMayNeedMemoryLocation(start_node: *ast.Node, scope: *Scope) bool { var node = start_node; diff --git a/src/translate_c.zig b/src/translate_c.zig index 2bbfe0a562..bcefc41971 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -614,25 +614,21 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void { return addTopLevelDecl(c, fn_name, &proto_node.base); } -fn transQualTypeMaybeInitialized(rp: RestorePoint, qt: clang.QualType, decl_init: ?*const clang.Expr, loc: clang.SourceLocation) TransError!*ast.Node { +fn transQualTypeMaybeInitialized(c: *Context, qt: clang.QualType, decl_init: ?*const clang.Expr, loc: clang.SourceLocation) TransError!Node { return if (decl_init) |init_expr| - transQualTypeInitialized(rp, qt, init_expr, loc) + transQualTypeInitialized(c, qt, init_expr, loc) else - transQualType(rp, qt, loc); + transQualType(c, qt, loc); } + /// if mangled_name is not null, this var decl was declared in a block scope. fn visitVarDecl(c: *Context, var_decl: *const clang.VarDecl, mangled_name: ?[]const u8) Error!void { const var_name = mangled_name orelse try c.str(@ptrCast(*const clang.NamedDecl, var_decl).getName_bytes_begin()); if (c.global_scope.sym_table.contains(var_name)) return; // Avoid processing this decl twice - const rp = makeRestorePoint(c); - const visib_tok = if (mangled_name) |_| null else try appendToken(c, .Keyword_pub, "pub"); - - const thread_local_token = if (var_decl.getTLSKind() == .None) - null - else - try appendToken(c, .Keyword_threadlocal, "threadlocal"); + const is_pub = mangled_name == null; + const is_thread_local = var_decl.getTLSKind() != .None; const scope = &c.global_scope.base; // TODO https://github.com/ziglang/zig/issues/3756 @@ -651,42 +647,27 @@ fn visitVarDecl(c: *Context, var_decl: *const clang.VarDecl, mangled_name: ?[]co // does the same as: // extern int foo; // int foo = 2; - const extern_tok = if (storage_class == .Extern and !has_init) - try appendToken(c, .Keyword_extern, "extern") - else if (storage_class != .Static) - try appendToken(c, .Keyword_export, "export") - else - null; + const is_extern = storage_class == .Extern and !has_init; + const is_export = !is_extern and storage_class != .Static; - const mut_tok = if (is_const) - try appendToken(c, .Keyword_const, "const") - else - try appendToken(c, .Keyword_var, "var"); - - const name_tok = try appendIdentifier(c, checked_name); - - _ = try appendToken(c, .Colon, ":"); - - const type_node = transQualTypeMaybeInitialized(rp, qual_type, decl_init, var_decl_loc) catch |err| switch (err) { + const type_node = transQualTypeMaybeInitialized(c, qual_type, decl_init, var_decl_loc) catch |err| switch (err) { error.UnsupportedTranslation, error.UnsupportedType => { return failDecl(c, var_decl_loc, checked_name, "unable to resolve variable type", .{}); }, error.OutOfMemory => |e| return e, }; - var eq_tok: ast.TokenIndex = undefined; - var init_node: ?*ast.Node = null; + var init_node: ?Node = null; // If the initialization expression is not present, initialize with undefined. // If it is an integer literal, we can skip the @as since it will be redundant // with the variable type. if (has_init) { - eq_tok = try appendToken(c, .Equal, "="); if (decl_init) |expr| { const node_or_error = if (expr.getStmtClass() == .StringLiteralClass) - transStringLiteralAsArray(rp, &c.global_scope.base, @ptrCast(*const clang.StringLiteral, expr), zigArraySize(rp.c, type_node) catch 0) + transStringLiteralAsArray(c, scope, @ptrCast(*const clang.StringLiteral, expr), zigArraySize(c, type_node) catch 0) else - transExprCoercing(rp, scope, expr, .used, .r_value); + transExprCoercing(c, scope, expr, .used, .r_value); init_node = node_or_error catch |err| switch (err) { error.UnsupportedTranslation, error.UnsupportedType, @@ -695,118 +676,83 @@ fn visitVarDecl(c: *Context, var_decl: *const clang.VarDecl, mangled_name: ?[]co }, error.OutOfMemory => |e| return e, }; + if (!qualTypeIsBoolean(qual_type) and isBoolRes(init_node)) { + init_node = try Node.bool_to_int.create(c.arena, init_node); + } } else { - init_node = try transCreateNodeUndefinedLiteral(c); + init_node = Node.undefined_literal.init(); } } else if (storage_class != .Extern) { - eq_tok = try appendToken(c, .Equal, "="); // The C language specification states that variables with static or threadlocal // storage without an initializer are initialized to a zero value. // @import("std").mem.zeroes(T) - const import_fn_call = try c.createBuiltinCall("@import", 1); - const std_node = try transCreateNodeStringLiteral(c, "\"std\""); - import_fn_call.params()[0] = std_node; - import_fn_call.rparen_token = try appendToken(c, .RParen, ")"); - const inner_field_access = try transCreateNodeFieldAccess(c, &import_fn_call.base, "mem"); - const outer_field_access = try transCreateNodeFieldAccess(c, inner_field_access, "zeroes"); - - const zero_init_call = try c.createCall(outer_field_access, 1); - zero_init_call.params()[0] = type_node; - zero_init_call.rtoken = try appendToken(c, .RParen, ")"); - - init_node = &zero_init_call.base; + init_node = try Node.std_mem_zeroes.create(c.arena, type_node); } - const linksection_expr = blk: { + const linksection_string = blk: { var str_len: usize = undefined; if (var_decl.getSectionAttribute(&str_len)) |str_ptr| { - _ = try appendToken(rp.c, .Keyword_linksection, "linksection"); - _ = try appendToken(rp.c, .LParen, "("); - const expr = try transCreateNodeStringLiteral( - rp.c, - try std.fmt.allocPrint(rp.c.arena, "\"{s}\"", .{str_ptr[0..str_len]}), - ); - _ = try appendToken(rp.c, .RParen, ")"); - - break :blk expr; + break :blk str_ptr[0..str_len]; } break :blk null; }; - const align_expr = blk: { - const alignment = var_decl.getAlignedAttribute(rp.c.clang_context); + const alignment = blk: { + const alignment = var_decl.getAlignedAttribute(c.clang_context); if (alignment != 0) { - _ = try appendToken(rp.c, .Keyword_align, "align"); - _ = try appendToken(rp.c, .LParen, "("); // Clang reports the alignment in bits - const expr = try transCreateNodeInt(rp.c, alignment / 8); - _ = try appendToken(rp.c, .RParen, ")"); - - break :blk expr; + break :blk alignment / 8; } break :blk null; }; - const node = try ast.Node.VarDecl.create(c.arena, .{ - .name_token = name_tok, - .mut_token = mut_tok, - .semicolon_token = try appendToken(c, .Semicolon, ";"), - }, .{ - .visib_token = visib_tok, - .thread_local_token = thread_local_token, - .eq_token = eq_tok, - .extern_export_token = extern_tok, - .type_node = type_node, - .align_node = align_expr, - .section_node = linksection_expr, - .init_node = init_node, + const node = try Node.var_decl.create(c.arena, .{ + .is_pub = is_pub, + .is_const = is_const, + .is_extern = is_extern, + .is_export = is_export, + .linksection_string = linksection_string, + .alignment = alignment, + .name = checked_name, + .type = type_node, + .init = init_node, }); return addTopLevelDecl(c, checked_name, &node.base); } -fn transTypeDefAsBuiltin(c: *Context, typedef_decl: *const clang.TypedefNameDecl, builtin_name: []const u8) !*ast.Node { +fn transTypeDefAsBuiltin(c: *Context, typedef_decl: *const clang.TypedefNameDecl, builtin_name: []const u8) !Node { _ = try c.decl_table.put(c.gpa, @ptrToInt(typedef_decl.getCanonicalDecl()), builtin_name); - return transCreateNodeIdentifier(c, builtin_name); + return Node.identifier.create(c.arena, builtin_name); } -fn checkForBuiltinTypedef(checked_name: []const u8) ?[]const u8 { - const table = [_][2][]const u8{ - .{ "uint8_t", "u8" }, - .{ "int8_t", "i8" }, - .{ "uint16_t", "u16" }, - .{ "int16_t", "i16" }, - .{ "uint32_t", "u32" }, - .{ "int32_t", "i32" }, - .{ "uint64_t", "u64" }, - .{ "int64_t", "i64" }, - .{ "intptr_t", "isize" }, - .{ "uintptr_t", "usize" }, - .{ "ssize_t", "isize" }, - .{ "size_t", "usize" }, - }; +const builtin_typedef_map = std.ComptimeStringMap([]const u8, .{ + .{ "uint8_t", "u8" }, + .{ "int8_t", "i8" }, + .{ "uint16_t", "u16" }, + .{ "int16_t", "i16" }, + .{ "uint32_t", "u32" }, + .{ "int32_t", "i32" }, + .{ "uint64_t", "u64" }, + .{ "int64_t", "i64" }, + .{ "intptr_t", "isize" }, + .{ "uintptr_t", "usize" }, + .{ "ssize_t", "isize" }, + .{ "size_t", "usize" }, +}); - for (table) |entry| { - if (mem.eql(u8, checked_name, entry[0])) { - return entry[1]; - } - } - - return null; -} - -fn transTypeDef(c: *Context, typedef_decl: *const clang.TypedefNameDecl, top_level_visit: bool) Error!?*ast.Node { +fn transTypeDef(c: *Context, typedef_decl: *const clang.TypedefNameDecl, top_level_visit: bool) Error!?Node { if (c.decl_table.get(@ptrToInt(typedef_decl.getCanonicalDecl()))) |name| return transCreateNodeIdentifier(c, name); // Avoid processing this decl twice - const rp = makeRestorePoint(c); const typedef_name = try c.str(@ptrCast(*const clang.NamedDecl, typedef_decl).getName_bytes_begin()); // TODO https://github.com/ziglang/zig/issues/3756 // TODO https://github.com/ziglang/zig/issues/1802 const checked_name = if (isZigPrimitiveType(typedef_name)) try std.fmt.allocPrint(c.arena, "{s}_{d}", .{ typedef_name, c.getMangle() }) else typedef_name; - if (checkForBuiltinTypedef(checked_name)) |builtin| { - return transTypeDefAsBuiltin(c, typedef_decl, builtin); + if (builtin_typedef_map.get(checked_name)) |builtin| { + _ = try c.decl_table.put(c.gpa, @ptrToInt(typedef_decl.getCanonicalDecl()), builtin); + return Node.identifier.create(c.arena, builtin); } if (!top_level_visit) { @@ -814,42 +760,36 @@ fn transTypeDef(c: *Context, typedef_decl: *const clang.TypedefNameDecl, top_lev } _ = try c.decl_table.put(c.gpa, @ptrToInt(typedef_decl.getCanonicalDecl()), checked_name); - const node = (try transCreateNodeTypedef(rp, typedef_decl, true, checked_name)) orelse return null; + const node = (try transCreateNodeTypedef(c, typedef_decl, true, checked_name)) orelse return null; try addTopLevelDecl(c, checked_name, node); return transCreateNodeIdentifier(c, checked_name); } fn transCreateNodeTypedef( - rp: RestorePoint, + c: *Context, typedef_decl: *const clang.TypedefNameDecl, toplevel: bool, checked_name: []const u8, -) Error!?*ast.Node { - const visib_tok = if (toplevel) try appendToken(rp.c, .Keyword_pub, "pub") else null; - const mut_tok = try appendToken(rp.c, .Keyword_const, "const"); - const name_tok = try appendIdentifier(rp.c, checked_name); - const eq_token = try appendToken(rp.c, .Equal, "="); +) Error!?Node { const child_qt = typedef_decl.getUnderlyingType(); const typedef_loc = typedef_decl.getLocation(); - const init_node = transQualType(rp, child_qt, typedef_loc) catch |err| switch (err) { + const init_node = transQualType(c, child_qt, typedef_loc) catch |err| switch (err) { error.UnsupportedType => { - try failDecl(rp.c, typedef_loc, checked_name, "unable to resolve typedef child type", .{}); + try failDecl(c, typedef_loc, checked_name, "unable to resolve typedef child type", .{}); return null; }, error.OutOfMemory => |e| return e, }; - const semicolon_token = try appendToken(rp.c, .Semicolon, ";"); - const node = try ast.Node.VarDecl.create(rp.c.arena, .{ - .name_token = name_tok, - .mut_token = mut_tok, - .semicolon_token = semicolon_token, - }, .{ - .visib_token = visib_tok, - .eq_token = eq_token, - .init_node = init_node, - }); - return &node.base; + const payload = try c.arena.create(ast.Payload.Typedef); + payload.* = .{ + .base = .{ .tag = ([2]ast.Node.Tag{ .typedef, .pub_typedef })[toplevel] }, + .data = .{ + .name = checked_name, + .init = init_node, + }, + }; + return Node.initPayload(&payload.base); } fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?*ast.Node { @@ -1399,13 +1339,15 @@ fn transBinaryOperator( const lhs_uncasted = try transExpr(c, scope, stmt.getLHS(), .used, .l_value); const rhs_uncasted = try transExpr(c, scope, stmt.getRHS(), .used, .r_value); - const lhs = if (isBoolRes(lhs_uncasted)) + const lhs = if (isBoolRes(lhs_uncasted)) try Node.bool_to_int.create(c.arena, lhs_uncasted) - else lhs_uncasted; + else + lhs_uncasted; - const rhs = if (isBoolRes(rhs_uncasted)) + const rhs = if (isBoolRes(rhs_uncasted)) try Node.bool_to_int.create(c.arena, rhs_uncasted) - else rhs_uncasted; + else + rhs_uncasted; const payload = try c.arena.create(ast.Payload.BinOp); payload.* = .{ @@ -1415,7 +1357,7 @@ fn transBinaryOperator( .rhs = rhs, }, }; - return maybeSuppressResult(c, scope, used, &payload.base); + return maybeSuppressResult(c, scope, used, Node.initPayload(&payload.base)); } fn transCompoundStmtInline( @@ -1459,13 +1401,11 @@ fn transCStyleCastExprClass( } fn transDeclStmtOne( - rp: RestorePoint, + c: *Context, scope: *Scope, decl: *const clang.Decl, block_scope: *Scope.Block, -) TransError!*ast.Node { - const c = rp.c; - +) TransError!Node { switch (decl.getKind()) { .Var => { const var_decl = @ptrCast(*const clang.VarDecl, decl); @@ -1479,47 +1419,38 @@ fn transDeclStmtOne( .Extern, .Static => { // This is actually a global variable, put it in the global scope and reference it. // `_ = mangled_name;` - try visitVarDecl(rp.c, var_decl, mangled_name); - return try maybeSuppressResult(rp, scope, .unused, try transCreateNodeIdentifier(rp.c, mangled_name)); + try visitVarDecl(c, var_decl, mangled_name); + return try maybeSuppressResult(c, scope, .unused, try Node.identifier.create(c.arena, mangled_name)); }, else => {}, } - const mut_tok = if (qual_type.isConstQualified()) - try appendToken(c, .Keyword_const, "const") - else - try appendToken(c, .Keyword_var, "var"); - const name_tok = try appendIdentifier(c, mangled_name); + const is_const = qual_type.isConstQualified(); - _ = try appendToken(c, .Colon, ":"); const loc = decl.getLocation(); - const type_node = try transQualTypeMaybeInitialized(rp, qual_type, decl_init, loc); + const type_node = try transQualTypeMaybeInitialized(c, qual_type, decl_init, loc); - const eq_token = try appendToken(c, .Equal, "="); var init_node = if (decl_init) |expr| if (expr.getStmtClass() == .StringLiteralClass) - try transStringLiteralAsArray(rp, scope, @ptrCast(*const clang.StringLiteral, expr), try zigArraySize(rp.c, type_node)) + try transStringLiteralAsArray(c, scope, @ptrCast(*const clang.StringLiteral, expr), try zigArraySize(c, type_node)) else - try transExprCoercing(rp, scope, expr, .used, .r_value) + try transExprCoercing(c, scope, expr, .used, .r_value) else try transCreateNodeUndefinedLiteral(c); if (!qualTypeIsBoolean(qual_type) and isBoolRes(init_node)) { - const builtin_node = try rp.c.createBuiltinCall("@boolToInt", 1); - builtin_node.params()[0] = init_node; - builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - init_node = &builtin_node.base; + init_node = try Node.bool_to_int.create(c.arena, init_node); } - const semicolon_token = try appendToken(c, .Semicolon, ";"); - const node = try ast.Node.VarDecl.create(c.arena, .{ - .name_token = name_tok, - .mut_token = mut_tok, - .semicolon_token = semicolon_token, - }, .{ - .eq_token = eq_token, - .type_node = type_node, - .init_node = init_node, + return Node.var_decl.create(c.arena, .{ + .is_pub = false, + .is_const = is_const, + .is_extern = false, + .is_export = false, + .linksection_string = null, + .alignment = null, + .name = mangled_name, + .type = type_node, + .init = init_node, }); - return &node.base; }, .Typedef => { const typedef_decl = @ptrCast(*const clang.TypedefNameDecl, decl); @@ -1529,7 +1460,7 @@ fn transDeclStmtOne( const underlying_type = underlying_qual.getTypePtr(); const mangled_name = try block_scope.makeMangledName(c, name); - const node = (try transCreateNodeTypedef(rp, typedef_decl, false, mangled_name)) orelse + const node = (try transCreateNodeTypedef(c, typedef_decl, false, mangled_name)) orelse return error.UnsupportedTranslation; return node; }, @@ -1543,14 +1474,14 @@ fn transDeclStmtOne( } } -fn transDeclStmt(rp: RestorePoint, scope: *Scope, stmt: *const clang.DeclStmt) TransError!*ast.Node { - const block_scope = scope.findBlockScope(rp.c) catch unreachable; +fn transDeclStmt(c: *Context, scope: *Scope, stmt: *const clang.DeclStmt) TransError!Node { + const block_scope = scope.findBlockScope(c) catch unreachable; var it = stmt.decl_begin(); const end_it = stmt.decl_end(); assert(it != end_it); while (true) : (it += 1) { - const node = try transDeclStmtOne(rp, scope, it[0], block_scope); + const node = try transDeclStmtOne(c, scope, it[0], block_scope); if (it + 1 == end_it) { return node; @@ -1562,15 +1493,15 @@ fn transDeclStmt(rp: RestorePoint, scope: *Scope, stmt: *const clang.DeclStmt) T } fn transDeclRefExpr( - rp: RestorePoint, + c: *Context, scope: *Scope, expr: *const clang.DeclRefExpr, lrvalue: LRValue, -) TransError!*ast.Node { +) TransError!Node { const value_decl = expr.getDecl(); - const name = try rp.c.str(@ptrCast(*const clang.NamedDecl, value_decl).getName_bytes_begin()); + const name = try c.str(@ptrCast(*const clang.NamedDecl, value_decl).getName_bytes_begin()); const mangled_name = scope.getAlias(name); - return transCreateNodeIdentifier(rp.c, mangled_name); + return Node.identifier.create(c.arena, mangled_name); } fn transImplicitCastExpr( @@ -1642,52 +1573,29 @@ fn transImplicitCastExpr( } fn transBoolExpr( - rp: RestorePoint, + c: *Context, scope: *Scope, expr: *const clang.Expr, used: ResultUsed, lrvalue: LRValue, - grouped: bool, -) TransError!*ast.Node { +) TransError!Node { if (@ptrCast(*const clang.Stmt, expr).getStmtClass() == .IntegerLiteralClass) { var is_zero: bool = undefined; - if (!(@ptrCast(*const clang.IntegerLiteral, expr).isZero(&is_zero, rp.c.clang_context))) { - return revertAndWarn(rp, error.UnsupportedTranslation, expr.getBeginLoc(), "invalid integer literal", .{}); + if (!(@ptrCast(*const clang.IntegerLiteral, expr).isZero(&is_zero, c.clang_context))) { + return revertAndWarn(c, error.UnsupportedTranslation, expr.getBeginLoc(), "invalid integer literal", .{}); } - return try transCreateNodeBoolLiteral(rp.c, !is_zero); + return Node{ .tag = ([2]ast.Node.Tag{ .true_literal, .false_literal })[is_zero] }; } - const lparen = if (grouped) - try appendToken(rp.c, .LParen, "(") - else - undefined; - var res = try transExpr(rp, scope, expr, used, lrvalue); - + var res = try transExpr(c, scope, expr, used, lrvalue); if (isBoolRes(res)) { - if (!grouped and res.tag == .GroupedExpression) { - const group = @fieldParentPtr(ast.Node.GroupedExpression, "base", res); - res = group.expr; - // get zig fmt to work properly - tokenSlice(rp.c, group.lparen)[0] = ')'; - } return res; } - const ty = getExprQualType(rp.c, expr).getTypePtr(); - const node = try finishBoolExpr(rp, scope, expr.getBeginLoc(), ty, res, used); + const ty = getExprQualType(c, expr).getTypePtr(); + const node = try finishBoolExpr(c, scope, expr.getBeginLoc(), ty, res, used); - if (grouped) { - const rparen = try appendToken(rp.c, .RParen, ")"); - const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression); - grouped_expr.* = .{ - .lparen = lparen, - .expr = node, - .rparen = rparen, - }; - return maybeSuppressResult(rp, scope, used, &grouped_expr.base); - } else { - return maybeSuppressResult(rp, scope, used, node); - } + return maybeSuppressResult(c, scope, used, node); } fn exprIsBooleanType(expr: *const clang.Expr) bool { @@ -1713,34 +1621,32 @@ fn exprIsNarrowStringLiteral(expr: *const clang.Expr) bool { } } -fn isBoolRes(res: *ast.Node) bool { - switch (res.tag) { - .BoolOr, - .BoolAnd, - .EqualEqual, - .BangEqual, - .LessThan, - .GreaterThan, - .LessOrEqual, - .GreaterOrEqual, - .BoolNot, - .BoolLiteral, +fn isBoolRes(res: Node) bool { + switch (res.tag()) { + .@"or", + .@"and", + .equal, + .note_equal, + .less_than, + .less_than_equal, + .greater_than, + .greater_than_equal, + .not, + .false_literal, + .true_literal, => return true, - - .GroupedExpression => return isBoolRes(@fieldParentPtr(ast.Node.GroupedExpression, "base", res).expr), - else => return false, } } fn finishBoolExpr( - rp: RestorePoint, + c: *Context, scope: *Scope, loc: clang.SourceLocation, ty: *const clang.Type, - node: *ast.Node, + node: Node, used: ResultUsed, -) TransError!*ast.Node { +) TransError!Node { switch (ty.getTypeClass()) { .Builtin => { const builtin_ty = @ptrCast(*const clang.BuiltinType, ty); @@ -1772,42 +1678,39 @@ fn finishBoolExpr( .WChar_S, .Float16, => { - const op_token = try appendToken(rp.c, .BangEqual, "!="); - const rhs_node = try transCreateNodeInt(rp.c, 0); - return transCreateNodeInfixOp(rp, scope, node, .BangEqual, op_token, rhs_node, used, false); + // node != 0 + return Node.not_equal.create(c.arena, .{ .lhs = node, .rhs = Node.zero_literal.init()}); }, .NullPtr => { - const op_token = try appendToken(rp.c, .EqualEqual, "=="); - const rhs_node = try transCreateNodeNullLiteral(rp.c); - return transCreateNodeInfixOp(rp, scope, node, .EqualEqual, op_token, rhs_node, used, false); + // node == null + return Node.equal.create(c.arena, .{ .lhs = node, .rhs = Node.null_literal.init()}); }, else => {}, } }, .Pointer => { - const op_token = try appendToken(rp.c, .BangEqual, "!="); - const rhs_node = try transCreateNodeNullLiteral(rp.c); - return transCreateNodeInfixOp(rp, scope, node, .BangEqual, op_token, rhs_node, used, false); + // node == null + return Node.equal.create(c.arena, .{ .lhs = node, .rhs = Node.null_literal.init()}); }, .Typedef => { const typedef_ty = @ptrCast(*const clang.TypedefType, ty); const typedef_decl = typedef_ty.getDecl(); const underlying_type = typedef_decl.getUnderlyingType(); - return finishBoolExpr(rp, scope, loc, underlying_type.getTypePtr(), node, used); + return finishBoolExpr(c, scope, loc, underlying_type.getTypePtr(), node, used); }, .Enum => { - const op_token = try appendToken(rp.c, .BangEqual, "!="); - const rhs_node = try transCreateNodeInt(rp.c, 0); - return transCreateNodeInfixOp(rp, scope, node, .BangEqual, op_token, rhs_node, used, false); + // node != 0 + return Node.not_equal.create(c.arena, .{ .lhs = node, .rhs = Node.zero_literal.init()}); + const op_token = try appendToken(c, .BangEqual, "!="); }, .Elaborated => { const elaborated_ty = @ptrCast(*const clang.ElaboratedType, ty); const named_type = elaborated_ty.getNamedType(); - return finishBoolExpr(rp, scope, loc, named_type.getTypePtr(), node, used); + return finishBoolExpr(c, scope, loc, named_type.getTypePtr(), node, used); }, else => {}, } - return revertAndWarn(rp, error.UnsupportedType, loc, "unsupported bool expression type", .{}); + return fail(c, error.UnsupportedType, loc, "unsupported bool expression type", .{}); } const SuppressCast = enum { @@ -4242,7 +4145,7 @@ fn transCreateNodeBoolInfixOp( .rhs = rhs, }, }; - return maybeSuppressResult(c, scope, used, &payload.base); + return maybeSuppressResult(c, scope, used, Node.initPayload(&payload.base)); } fn transCreateNodePtrType( @@ -4654,7 +4557,7 @@ fn transCreateNodeShiftOp( const lhs = try transExpr(c, scope, lhs_expr, .used, .l_value); const rhs_type = try qualTypeToLog2IntRef(c, stmt.getType(), rhs_location); - const rhs = try transExpr(c, scope, rhs_expr, .used, .r_value); + const rhs = try transExprCoercing(c, scope, rhs_expr, .used, .r_value); const rhs_casted = try Node.int_cast.create(c.arena, .{ .lhs = rhs_type, .rhs = rhs_type }); const payload = try c.arena.create(ast.Payload.BinOp); @@ -4663,9 +4566,9 @@ fn transCreateNodeShiftOp( .data = .{ .lhs = lhs, .rhs = rhs_casted, - } + }, }; - return &payload.base; + return Node.initPayload(&payload.base); } fn transCreateNodePtrDeref(c: *Context, lhs: *ast.Node) !*ast.Node { @@ -4961,7 +4864,7 @@ fn finishTransFnProto( }; } - const link_section_string: ?[]const u8 = blk: { + const linksection_string = blk: { if (fn_decl) |decl| { var str_len: usize = undefined; if (decl.getSectionAttribute(&str_len)) |str_ptr| { @@ -5004,24 +4907,19 @@ fn finishTransFnProto( } }; - const fn_proto = try c.arena.create(ast.Payload.Func); - fn_proto.* = .{ - .base = .{ .tag = .func }, - .data = .{ - .is_pub = is_pub, - .is_extern = is_extern, - .is_export = is_export, - .is_var_args = is_var_args, - .name = name, - .link_section_string = link_section_string, - .explicit_callconv = explicit_callconv, - .params = c.arena.dupe(ast.Payload.Func.Param, fn_params.items), - .return_type = return_node, - .body = null, - .alignment = alignment, - }, - }; - return fn_proto; + return Node.func.create(c.arena, .{ + .is_pub = is_pub, + .is_extern = is_extern, + .is_export = is_export, + .is_var_args = is_var_args, + .name = name, + .linksection_string = linksection_string, + .explicit_callconv = explicit_callconv, + .params = try c.arena.dupe(ast.Payload.Func.Param, fn_params.items), + .return_type = return_node, + .body = null, + .alignment = alignment, + }); } fn warn(c: *Context, scope: *Scope, loc: clang.SourceLocation, comptime format: []const u8, args: anytype) !void { @@ -5054,6 +4952,19 @@ pub fn freeErrors(errors: []ClangErrMsg) void { errors.ptr.delete(errors.len); } +fn isZigPrimitiveType(name: []const u8) bool { + if (name.len > 1 and (name[0] == 'u' or name[0] == 'i')) { + for (name[1..]) |c| { + switch (c) { + '0'...'9' => {}, + else => return false, + } + } + return true; + } + return @import("astgen.zig").simple_types.has(name); +} + const MacroCtx = struct { source: []const u8, list: []const CToken, diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 825d36add1..9abfe215e6 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -137,11 +137,16 @@ pub const Node = extern union { single_pointer, array_type, - + /// @import("std").mem.zeroes(T) + std_mem_zeroes, // pub const name = @compileError(msg); fail_decl, // var actual = mangled; arg_redecl, + /// const name = init; + typedef, + /// pub const name = init; + pub_typedef, pub const last_no_payload_tag = Tag.usingnamespace_builtins; pub const no_payload_count = @enumToInt(last_no_payload_tag) + 1; @@ -257,11 +262,11 @@ pub const Node = extern union { .container_init => Payload.ContainerInit, .std_meta_cast => Payload.Infix, .block => Payload.Block, - .c_pointer => Payload.Pointer, - .single_pointer => Payload.Pointer, + .c_pointer, .single_pointer => Payload.Pointer, .array_type => Payload.Array, .arg_redecl => Payload.ArgRedecl, .log2_int_type => Payload.Log2IntType, + .typedef, .pub_typedef => Payload.Typedef, }; } @@ -301,6 +306,11 @@ pub const Node = extern union { return null; } + + pub fn initPayload(payload: *Payload) Node { + assert(@enumToInt(payload.tag) >= Tag.no_payload_count); + return .{ .ptr_otherwise = payload }; + } }; pub const Payload = struct { @@ -383,13 +393,15 @@ pub const Payload = struct { pub const VarDecl = struct { base: Node = .{ .tag = .var_decl }, data: struct { - @"pub": bool, - @"const": bool, - @"extern": bool, - @"export": bool, + is_pub: bool, + is_const: bool, + is_extern: bool, + is_export: bool, + alignment: ?c_uint, + linksection_string: ?[]const u8, name: []const u8, - type: Type, - init: Node, + type: Node, + init: ?Node, }, }; @@ -401,12 +413,12 @@ pub const Payload = struct { is_export: bool, is_var_args: bool, name: []const u8, - link_section_string: ?[]const u8, + linksection_string: ?[]const u8, explicit_callconv: ?std.builtin.CallingConvention, params: []Param, return_type: Node, body: ?Node, - alignment: c_uint, + alignment: ?c_uint, pub const Param = struct { is_noalias: bool, @@ -501,6 +513,14 @@ pub const Payload = struct { base: Node, data: std.math.Log2Int(u64), }; + + pub const Typedef = struct { + base: Node, + data: struct { + name: []const u8, + init: Node, + }, + }; }; /// Converts the nodes into a Zig ast. From 5dac3683c9e8819d6ceefb869577e3537d863c41 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Mon, 8 Feb 2021 11:52:21 +0200 Subject: [PATCH 071/173] translate-c: convert record and enum decls --- src/translate_c.zig | 296 ++++++++++++---------------------------- src/translate_c/ast.zig | 58 ++++---- 2 files changed, 125 insertions(+), 229 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index bcefc41971..3181f05f78 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -783,7 +783,7 @@ fn transCreateNodeTypedef( const payload = try c.arena.create(ast.Payload.Typedef); payload.* = .{ - .base = .{ .tag = ([2]ast.Node.Tag{ .typedef, .pub_typedef })[toplevel] }, + .base = .{ .tag = ([2]ast.Node.Tag{ .typedef, .pub_typedef })[@boolToInt(toplevel)] }, .data = .{ .name = checked_name, .init = init_node, @@ -792,7 +792,7 @@ fn transCreateNodeTypedef( return Node.initPayload(&payload.base); } -fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?*ast.Node { +fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?Node { if (c.decl_table.get(@ptrToInt(record_decl.getCanonicalDecl()))) |name| return try transCreateNodeIdentifier(c, name); // Avoid processing this decl twice const record_loc = record_decl.getLocation(); @@ -807,46 +807,30 @@ fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?*as } var container_kind_name: []const u8 = undefined; - var container_kind: std.zig.Token.Id = undefined; + var is_union = false; if (record_decl.isUnion()) { container_kind_name = "union"; - container_kind = .Keyword_union; + is_union = true; } else if (record_decl.isStruct()) { container_kind_name = "struct"; - container_kind = .Keyword_struct; } else { - try emitWarning(c, record_loc, "record {s} is not a struct or union", .{bare_name}); + try warn(c, record_loc, "record {s} is not a struct or union", .{bare_name}); return null; } const name = try std.fmt.allocPrint(c.arena, "{s}_{s}", .{ container_kind_name, bare_name }); _ = try c.decl_table.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), name); - const visib_tok = if (!is_unnamed) try appendToken(c, .Keyword_pub, "pub") else null; - const mut_tok = try appendToken(c, .Keyword_const, "const"); - const name_tok = try appendIdentifier(c, name); - - const eq_token = try appendToken(c, .Equal, "="); - - var semicolon: ast.TokenIndex = undefined; + const is_pub = !is_unnamed; const init_node = blk: { - const rp = makeRestorePoint(c); const record_def = record_decl.getDefinition() orelse { _ = try c.opaque_demotes.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), {}); - const opaque_type = try transCreateNodeOpaqueType(c); - semicolon = try appendToken(c, .Semicolon, ";"); - break :blk opaque_type; + break :blk Node.opaque_literal.init(); }; - const layout_tok = try if (record_decl.getPackedAttribute()) - appendToken(c, .Keyword_packed, "packed") - else - appendToken(c, .Keyword_extern, "extern"); - const container_tok = try appendToken(c, container_kind, container_kind_name); - const lbrace_token = try appendToken(c, .LBrace, "{"); - - var fields_and_decls = std.ArrayList(*ast.Node).init(c.gpa); - defer fields_and_decls.deinit(); + const is_packed = record_decl.getPackedAttribute(); + var fields = std.ArrayList(ast.Payload.Record.Field).init(c.gpa); + defer fields.deinit(); var unnamed_field_count: u32 = 0; var it = record_def.field_begin(); @@ -858,110 +842,82 @@ fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?*as if (field_decl.isBitField()) { _ = try c.opaque_demotes.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), {}); - const opaque_type = try transCreateNodeOpaqueType(c); - semicolon = try appendToken(c, .Semicolon, ";"); - try emitWarning(c, field_loc, "{s} demoted to opaque type - has bitfield", .{container_kind_name}); - break :blk opaque_type; + try warn(c, field_loc, "{s} demoted to opaque type - has bitfield", .{container_kind_name}); + break :blk Node.opaque_literal.init(); } if (qualTypeCanon(field_qt).isIncompleteOrZeroLengthArrayType(c.clang_context)) { _ = try c.opaque_demotes.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), {}); - const opaque_type = try transCreateNodeOpaqueType(c); - semicolon = try appendToken(c, .Semicolon, ";"); - try emitWarning(c, field_loc, "{s} demoted to opaque type - has variable length array", .{container_kind_name}); - break :blk opaque_type; + try warn(c, field_loc, "{s} demoted to opaque type - has variable length array", .{container_kind_name}); + break :blk Node.opaque_literal.init(); } var is_anon = false; - var raw_name = try c.str(@ptrCast(*const clang.NamedDecl, field_decl).getName_bytes_begin()); - if (field_decl.isAnonymousStructOrUnion() or raw_name.len == 0) { + var field_name = try c.str(@ptrCast(*const clang.NamedDecl, field_decl).getName_bytes_begin()); + if (field_decl.isAnonymousStructOrUnion() or field_name.len == 0) { // Context.getMangle() is not used here because doing so causes unpredictable field names for anonymous fields. - raw_name = try std.fmt.allocPrint(c.arena, "unnamed_{d}", .{unnamed_field_count}); + field_name = try std.fmt.allocPrint(c.arena, "unnamed_{d}", .{unnamed_field_count}); unnamed_field_count += 1; is_anon = true; } - const field_name = try appendIdentifier(c, raw_name); - _ = try appendToken(c, .Colon, ":"); - const field_type = transQualType(rp, field_qt, field_loc) catch |err| switch (err) { + const field_type = transQualType(c, field_qt, field_loc) catch |err| switch (err) { error.UnsupportedType => { _ = try c.opaque_demotes.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), {}); - const opaque_type = try transCreateNodeOpaqueType(c); - semicolon = try appendToken(c, .Semicolon, ";"); - try emitWarning(c, record_loc, "{s} demoted to opaque type - unable to translate type of field {s}", .{ container_kind_name, raw_name }); - break :blk opaque_type; + try warn(c, record_loc, "{s} demoted to opaque type - unable to translate type of field {s}", .{ container_kind_name, raw_name }); + break :blk Node.opaque_literal.init(); }, else => |e| return e, }; - const align_expr = blk_2: { + const alignment = blk_2: { const alignment = field_decl.getAlignedAttribute(c.clang_context); if (alignment != 0) { - _ = try appendToken(c, .Keyword_align, "align"); - _ = try appendToken(c, .LParen, "("); // Clang reports the alignment in bits - const expr = try transCreateNodeInt(c, alignment / 8); - _ = try appendToken(c, .RParen, ")"); - - break :blk_2 expr; + break :blk_2 alignment / 8; } break :blk_2 null; }; - const field_node = try c.arena.create(ast.Node.ContainerField); - field_node.* = .{ - .doc_comments = null, - .comptime_token = null, - .name_token = field_name, - .type_expr = field_type, - .value_expr = null, - .align_expr = align_expr, - }; - if (is_anon) { - _ = try c.decl_table.put( - c.gpa, - @ptrToInt(field_decl.getCanonicalDecl()), - raw_name, - ); + _ = try c.decl_table.put(c.gpa, @ptrToInt(field_decl.getCanonicalDecl()), field_name); } - try fields_and_decls.append(&field_node.base); - _ = try appendToken(c, .Comma, ","); + try fields.append(.{ + .name = field_name, + .type = field_type, + .alignment = alignment, + }); } - const container_node = try ast.Node.ContainerDecl.alloc(c.arena, fields_and_decls.items.len); + + const payload = try c.arena.create(ast.Payload.Record); container_node.* = .{ - .layout_token = layout_tok, - .kind_token = container_tok, - .init_arg_expr = .None, - .fields_and_decls_len = fields_and_decls.items.len, - .lbrace_token = lbrace_token, - .rbrace_token = try appendToken(c, .RBrace, "}"), + .base = .{ .tag = ([2]ast.Node.Tag{ .@"struct", .@"union" })[@boolToInt(is_union)] }, + .data = .{ + .is_packed = is_packed, + .fields = try c.arena.dupe(ast.Payload.Record.Field, fields.items), + }, }; - mem.copy(*ast.Node, container_node.fieldsAndDecls(), fields_and_decls.items); - semicolon = try appendToken(c, .Semicolon, ";"); - break :blk &container_node.base; + break :blk Node.initPayload(&container_node.base); }; - const node = try ast.Node.VarDecl.create(c.arena, .{ - .name_token = name_tok, - .mut_token = mut_tok, - .semicolon_token = semicolon, - }, .{ - .visib_token = visib_tok, - .eq_token = eq_token, - .init_node = init_node, - }); + const payload = try c.arena.create(ast.Payload.SimpleVarDecl); + payload.* = .{ + .base = .{ .tag = ([2]ast.Node.Tag{ .var_simple, .pub_var_simple })[@boolToInt(is_pub)] }, + .data = .{ + .name = name, + .init = init_node, + }, + }; - try addTopLevelDecl(c, name, &node.base); + try addTopLevelDecl(c, name, Node.initPayload(&payload.base)); if (!is_unnamed) try c.alias_list.append(.{ .alias = bare_name, .name = name }); - return transCreateNodeIdentifier(c, name); + return Node.identifier.create(c.arena, name); } -fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?*ast.Node { +fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?Node { if (c.decl_table.get(@ptrToInt(enum_decl.getCanonicalDecl()))) |name| return try transCreateNodeIdentifier(c, name); // Avoid processing this decl twice - const rp = makeRestorePoint(c); const enum_loc = enum_decl.getLocation(); var bare_name = try c.str(@ptrCast(*const clang.NamedDecl, enum_decl).getName_bytes_begin()); @@ -974,10 +930,7 @@ fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?*ast.Node const name = try std.fmt.allocPrint(c.arena, "enum_{s}", .{bare_name}); _ = try c.decl_table.put(c.gpa, @ptrToInt(enum_decl.getCanonicalDecl()), name); - const visib_tok = if (!is_unnamed) try appendToken(c, .Keyword_pub, "pub") else null; - const mut_tok = try appendToken(c, .Keyword_const, "const"); - const name_tok = try appendIdentifier(c, name); - const eq_token = try appendToken(c, .Equal, "="); + const is_pub = !is_unnamed; const init_node = if (enum_decl.getDefinition()) |enum_def| blk: { var pure_enum = true; @@ -991,11 +944,8 @@ fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?*ast.Node } } - const extern_tok = try appendToken(c, .Keyword_extern, "extern"); - const container_tok = try appendToken(c, .Keyword_enum, "enum"); - - var fields_and_decls = std.ArrayList(*ast.Node).init(c.gpa); - defer fields_and_decls.deinit(); + var fields = std.ArrayList(ast.Payload.Enum.Field).init(c.gpa); + defer fields.deinit(); const int_type = enum_decl.getIntegerType(); // The underlying type may be null in case of forward-declared enum @@ -1003,30 +953,23 @@ fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?*ast.Node // default to the usual integer type used for all the enums. // default to c_int since msvc and gcc default to different types - _ = try appendToken(c, .LParen, "("); - const init_arg_expr = ast.Node.ContainerDecl.InitArg{ - .Type = if (int_type.ptr != null and - !isCBuiltinType(int_type, .UInt) and - !isCBuiltinType(int_type, .Int)) - transQualType(rp, int_type, enum_loc) catch |err| switch (err) { - error.UnsupportedType => { - try failDecl(c, enum_loc, name, "unable to translate enum tag type", .{}); - return null; - }, - else => |e| return e, - } - else - try transCreateNodeIdentifier(c, "c_int"), - }; - _ = try appendToken(c, .RParen, ")"); - - const lbrace_token = try appendToken(c, .LBrace, "{"); + const init_arg_expr = if (int_type.ptr != null and + !isCBuiltinType(int_type, .UInt) and + !isCBuiltinType(int_type, .Int)) + transQualType(c, int_type, enum_loc) catch |err| switch (err) { + error.UnsupportedType => { + try failDecl(c, enum_loc, name, "unable to translate enum tag type", .{}); + return null; + }, + else => |e| return e, + } + else + try Node.type.create(c.arena, "c_int"); it = enum_def.enumerator_begin(); end_it = enum_def.enumerator_end(); while (it.neq(end_it)) : (it = it.next()) { const enum_const = it.deref(); - const enum_val_name = try c.str(@ptrCast(*const clang.NamedDecl, enum_const).getName_bytes_begin()); const field_name = if (!is_unnamed and mem.startsWith(u8, enum_val_name, bare_name)) @@ -1034,100 +977,41 @@ fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?*ast.Node else enum_val_name; - const field_name_tok = try appendIdentifier(c, field_name); - - const int_node = if (!pure_enum) blk_2: { - _ = try appendToken(c, .Colon, "="); - break :blk_2 try transCreateNodeAPInt(c, enum_const.getInitVal()); - } else + const int_node = if (!pure_enum) + try transCreateNodeAPInt(c, enum_const.getInitVal()) + else null; - const field_node = try c.arena.create(ast.Node.ContainerField); - field_node.* = .{ - .doc_comments = null, - .comptime_token = null, - .name_token = field_name_tok, - .type_expr = null, - .value_expr = int_node, - .align_expr = null, - }; - - try fields_and_decls.append(&field_node.base); - _ = try appendToken(c, .Comma, ","); + try fields_and_decls.append(.{ + .name = field_name, + .value = int_node, + }); // In C each enum value is in the global namespace. So we put them there too. // At this point we can rely on the enum emitting successfully. - const tld_visib_tok = try appendToken(c, .Keyword_pub, "pub"); - const tld_mut_tok = try appendToken(c, .Keyword_const, "const"); - const tld_name_tok = try appendIdentifier(c, enum_val_name); - const tld_eq_token = try appendToken(c, .Equal, "="); - const cast_node = try rp.c.createBuiltinCall("@enumToInt", 1); - const enum_ident = try transCreateNodeIdentifier(c, name); - const period_tok = try appendToken(c, .Period, "."); - const field_ident = try transCreateNodeIdentifier(c, field_name); - const field_access_node = try c.arena.create(ast.Node.SimpleInfixOp); - field_access_node.* = .{ - .base = .{ .tag = .Period }, - .op_token = period_tok, - .lhs = enum_ident, - .rhs = field_ident, - }; - cast_node.params()[0] = &field_access_node.base; - cast_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - const tld_init_node = &cast_node.base; - const tld_semicolon_token = try appendToken(c, .Semicolon, ";"); - const tld_node = try ast.Node.VarDecl.create(c.arena, .{ - .name_token = tld_name_tok, - .mut_token = tld_mut_tok, - .semicolon_token = tld_semicolon_token, - }, .{ - .visib_token = tld_visib_tok, - .eq_token = tld_eq_token, - .init_node = tld_init_node, - }); - try addTopLevelDecl(c, field_name, &tld_node.base); + try addTopLevelDecl(c, field_name, try Node.enum_redecl.create(c.arena, .{ + .enum_val_name = enum_val_name, + .field_name = field_name, + .enum_name = name, + })); } - // make non exhaustive - const field_node = try c.arena.create(ast.Node.ContainerField); - field_node.* = .{ - .doc_comments = null, - .comptime_token = null, - .name_token = try appendIdentifier(c, "_"), - .type_expr = null, - .value_expr = null, - .align_expr = null, - }; - try fields_and_decls.append(&field_node.base); - _ = try appendToken(c, .Comma, ","); - const container_node = try ast.Node.ContainerDecl.alloc(c.arena, fields_and_decls.items.len); - container_node.* = .{ - .layout_token = extern_tok, - .kind_token = container_tok, - .init_arg_expr = init_arg_expr, - .fields_and_decls_len = fields_and_decls.items.len, - .lbrace_token = lbrace_token, - .rbrace_token = try appendToken(c, .RBrace, "}"), - }; - mem.copy(*ast.Node, container_node.fieldsAndDecls(), fields_and_decls.items); - break :blk &container_node.base; + break :blk try Node.@"enum".create(c.arena, try c.arena.dupe(ast.Payload.Enum.Field, fields.items)); } else blk: { _ = try c.opaque_demotes.put(c.gpa, @ptrToInt(enum_decl.getCanonicalDecl()), {}); - break :blk try transCreateNodeOpaqueType(c); + break :blk Node.opaque_literal.init(); }; - const semicolon_token = try appendToken(c, .Semicolon, ";"); - const node = try ast.Node.VarDecl.create(c.arena, .{ - .name_token = name_tok, - .mut_token = mut_tok, - .semicolon_token = semicolon_token, - }, .{ - .visib_token = visib_tok, - .eq_token = eq_token, - .init_node = init_node, - }); + const payload = try c.arena.create(ast.Payload.SimpleVarDecl); + payload.* = .{ + .base = .{ .tag = ([2]ast.Node.Tag{ .var_simple, .pub_var_simple })[@boolToInt(is_pub)] }, + .data = .{ + .name = name, + .init = init_node, + }, + }; - try addTopLevelDecl(c, name, &node.base); + try addTopLevelDecl(c, name, Node.initPayload(&payload.base)); if (!is_unnamed) try c.alias_list.append(.{ .alias = bare_name, .name = name }); return transCreateNodeIdentifier(c, name); @@ -1382,22 +1266,22 @@ fn transCompoundStmt(c: *Context, scope: *Scope, stmt: *const clang.CompoundStmt } fn transCStyleCastExprClass( - rp: RestorePoint, + c: *Context, scope: *Scope, stmt: *const clang.CStyleCastExpr, result_used: ResultUsed, lrvalue: LRValue, -) TransError!*ast.Node { +) TransError!Node { const sub_expr = stmt.getSubExpr(); const cast_node = (try transCCast( - rp, + c, scope, stmt.getBeginLoc(), stmt.getType(), sub_expr.getType(), - try transExpr(rp, scope, sub_expr, .used, lrvalue), + try transExpr(c, scope, sub_expr, .used, lrvalue), )); - return maybeSuppressResult(rp, scope, result_used, cast_node); + return maybeSuppressResult(c, scope, result_used, cast_node); } fn transDeclStmtOne( diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 9abfe215e6..b926efdbef 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -10,6 +10,7 @@ pub const Node = extern union { pub const Tag = enum { null_literal, undefined_literal, + /// opaque {} opaque_literal, true_literal, false_literal, @@ -42,6 +43,7 @@ pub const Node = extern union { func, warning, failed_decl, + /// All enums are non-exhaustive @"enum", @"struct", @"union", @@ -145,8 +147,12 @@ pub const Node = extern union { arg_redecl, /// const name = init; typedef, + var_simple, /// pub const name = init; pub_typedef, + pub_var_simple, + /// pub const enum_field_name = @enumToInt(enum_name.field_name); + enum_redecl, pub const last_no_payload_tag = Tag.usingnamespace_builtins; pub const no_payload_count = @enumToInt(last_no_payload_tag) + 1; @@ -266,7 +272,8 @@ pub const Node = extern union { .array_type => Payload.Array, .arg_redecl => Payload.ArgRedecl, .log2_int_type => Payload.Log2IntType, - .typedef, .pub_typedef => Payload.Typedef, + .typedef, .pub_typedef, .pub_var_simple => Payload.SimpleVarDecl, + .enum_redecl => Payload.EnumRedecl, }; } @@ -419,41 +426,37 @@ pub const Payload = struct { return_type: Node, body: ?Node, alignment: ?c_uint, - - pub const Param = struct { - is_noalias: bool, - name: ?[]const u8, - type: Node, - }; }, + + pub const Param = struct { + is_noalias: bool, + name: ?[]const u8, + type: Node, + }; }; pub const Enum = struct { base: Node = .{ .tag = .@"enum" }, - data: struct { - name: ?[]const u8, - fields: []Field, + data: []Field, - pub const Field = struct { - name: []const u8, - value: ?[]const u8, - }; - }, + pub const Field = struct { + name: []const u8, + value: ?Node, + }; }; pub const Record = struct { base: Node, data: struct { - name: ?[]const u8, @"packed": bool, fields: []Field, - - pub const Field = struct { - name: []const u8, - type: Type, - alignment: c_uint, - }; }, + + pub const Field = struct { + name: []const u8, + type: Node, + alignment: ?c_uint, + }; }; pub const ArrayInit = struct { @@ -514,13 +517,22 @@ pub const Payload = struct { data: std.math.Log2Int(u64), }; - pub const Typedef = struct { + pub const SimpleVarDecl = struct { base: Node, data: struct { name: []const u8, init: Node, }, }; + + pub const EnumRedecl = struct { + base: Node, + data: struct { + enum_val_name: []const u8, + field_name: []const u8, + enum_name: []const u8, + }, + }; }; /// Converts the nodes into a Zig ast. From d8b9fca0b1c8929be99b6dff8f961afbe2fe2e3c Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Wed, 10 Feb 2021 22:04:19 +0200 Subject: [PATCH 072/173] translate-c: convert casts and string/array init --- src/translate_c.zig | 471 +++++++++++----------------------------- src/translate_c/ast.zig | 45 ++++ 2 files changed, 174 insertions(+), 342 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index 3181f05f78..0bef2cb843 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -1084,7 +1084,6 @@ fn transStmt( const source_expr = @ptrCast(*const clang.OpaqueValueExpr, stmt).getSourceExpr().?; const expr = try transExpr(c, scope, source_expr, .used, lrvalue); return maybeSuppressResult(c, scope, result_used, expr); - const node = try c.arena.create(Node.GroupedExpression); }, else => { return revertAndWarn( @@ -1389,62 +1388,56 @@ fn transDeclRefExpr( } fn transImplicitCastExpr( - rp: RestorePoint, + c: *Context, scope: *Scope, expr: *const clang.ImplicitCastExpr, result_used: ResultUsed, -) TransError!*ast.Node { - const c = rp.c; +) TransError!Node { const sub_expr = expr.getSubExpr(); const dest_type = getExprQualType(c, @ptrCast(*const clang.Expr, expr)); const src_type = getExprQualType(c, sub_expr); switch (expr.getCastKind()) { .BitCast, .FloatingCast, .FloatingToIntegral, .IntegralToFloating, .IntegralCast, .PointerToIntegral, .IntegralToPointer => { - const sub_expr_node = try transExpr(rp, scope, sub_expr, .used, .r_value); - return try transCCast(rp, scope, expr.getBeginLoc(), dest_type, src_type, sub_expr_node); + const sub_expr_node = try transExpr(c, scope, sub_expr, .used, .r_value); + const casted = try transCCast(c, scope, expr.getBeginLoc(), dest_type, src_type, sub_expr_node); + return maybeSuppressResult(c, scope, result_used, casted); }, .LValueToRValue, .NoOp, .FunctionToPointerDecay => { - const sub_expr_node = try transExpr(rp, scope, sub_expr, .used, .r_value); - return maybeSuppressResult(rp, scope, result_used, sub_expr_node); + const sub_expr_node = try transExpr(c, scope, sub_expr, .used, .r_value); + return maybeSuppressResult(c, scope, result_used, sub_expr_node); }, .ArrayToPointerDecay => { if (exprIsNarrowStringLiteral(sub_expr)) { - const sub_expr_node = try transExpr(rp, scope, sub_expr, .used, .r_value); - return maybeSuppressResult(rp, scope, result_used, sub_expr_node); + const sub_expr_node = try transExpr(c, scope, sub_expr, .used, .r_value); + return maybeSuppressResult(c, scope, result_used, sub_expr_node); } - const prefix_op = try transCreateNodeSimplePrefixOp(rp.c, .AddressOf, .Ampersand, "&"); - prefix_op.rhs = try transExpr(rp, scope, sub_expr, .used, .r_value); - - return maybeSuppressResult(rp, scope, result_used, &prefix_op.base); + const addr = try Node.address_of.create(c.arena, try transExpr(c, scope, sub_expr, .used, .r_value)); + return maybeSuppressResult(c, scope, result_used, addr); }, .NullToPointer => { - return try transCreateNodeNullLiteral(rp.c); + return Node.null_literal.init(); }, .PointerToBoolean => { // @ptrToInt(val) != 0 - const ptr_to_int = try rp.c.createBuiltinCall("@ptrToInt", 1); - ptr_to_int.params()[0] = try transExpr(rp, scope, sub_expr, .used, .r_value); - ptr_to_int.rparen_token = try appendToken(rp.c, .RParen, ")"); + const ptr_to_int = try Node.ptr_to_int.create(c.arena, try transExpr(c, scope, sub_expr, .used, .r_value)); - const op_token = try appendToken(rp.c, .BangEqual, "!="); - const rhs_node = try transCreateNodeInt(rp.c, 0); - return transCreateNodeInfixOp(rp, scope, &ptr_to_int.base, .BangEqual, op_token, rhs_node, result_used, false); + const ne = try Node.not_equal.create(c.arena, .{ .lhs = ptr_to_int, .rhs = Node.zero_literal.init() }); + return maybeSuppressResult(c, scope, result_used, ne); }, .IntegralToBoolean => { - const sub_expr_node = try transExpr(rp, scope, sub_expr, .used, .r_value); + const sub_expr_node = try transExpr(c, scope, sub_expr, .used, .r_value); // The expression is already a boolean one, return it as-is if (isBoolRes(sub_expr_node)) - return sub_expr_node; + return maybeSuppressResult(c, scope, result_used, sub_expr_node); // val != 0 - const op_token = try appendToken(rp.c, .BangEqual, "!="); - const rhs_node = try transCreateNodeInt(rp.c, 0); - return transCreateNodeInfixOp(rp, scope, sub_expr_node, .BangEqual, op_token, rhs_node, result_used, false); + const ne = try Node.not_equal.create(c.arena, .{ .lhs = sub_expr_node, .rhs = Node.zero_literal.init() }); + return maybeSuppressResult(c, scope, result_used, ne); }, .BuiltinFnToFnPtr => { - return transExpr(rp, scope, sub_expr, .used, .r_value); + return transExpr(rp, scope, sub_expr, result_used, .r_value); }, else => |kind| return revertAndWarn( rp, @@ -1468,12 +1461,12 @@ fn transBoolExpr( if (!(@ptrCast(*const clang.IntegerLiteral, expr).isZero(&is_zero, c.clang_context))) { return revertAndWarn(c, error.UnsupportedTranslation, expr.getBeginLoc(), "invalid integer literal", .{}); } - return Node{ .tag = ([2]ast.Node.Tag{ .true_literal, .false_literal })[is_zero] }; + return Node{ .tag = ([2]ast.Node.Tag{ .true_literal, .false_literal })[@boolToInt(is_zero)] }; } var res = try transExpr(c, scope, expr, used, lrvalue); if (isBoolRes(res)) { - return res; + return maybeSuppressResult(c, scope, used, res); } const ty = getExprQualType(c, expr).getTypePtr(); @@ -1563,18 +1556,18 @@ fn finishBoolExpr( .Float16, => { // node != 0 - return Node.not_equal.create(c.arena, .{ .lhs = node, .rhs = Node.zero_literal.init()}); + return Node.not_equal.create(c.arena, .{ .lhs = node, .rhs = Node.zero_literal.init() }); }, .NullPtr => { // node == null - return Node.equal.create(c.arena, .{ .lhs = node, .rhs = Node.null_literal.init()}); + return Node.equal.create(c.arena, .{ .lhs = node, .rhs = Node.null_literal.init() }); }, else => {}, } }, .Pointer => { // node == null - return Node.equal.create(c.arena, .{ .lhs = node, .rhs = Node.null_literal.init()}); + return Node.equal.create(c.arena, .{ .lhs = node, .rhs = Node.null_literal.init() }); }, .Typedef => { const typedef_ty = @ptrCast(*const clang.TypedefType, ty); @@ -1584,7 +1577,7 @@ fn finishBoolExpr( }, .Enum => { // node != 0 - return Node.not_equal.create(c.arena, .{ .lhs = node, .rhs = Node.zero_literal.init()}); + return Node.not_equal.create(c.arena, .{ .lhs = node, .rhs = Node.zero_literal.init() }); const op_token = try appendToken(c, .BangEqual, "!="); }, .Elaborated => { @@ -1653,11 +1646,11 @@ fn transReturnStmt( } fn transStringLiteral( - rp: RestorePoint, + c: *Context, scope: *Scope, stmt: *const clang.StringLiteral, result_used: ResultUsed, -) TransError!*ast.Node { +) TransError!Node { const kind = stmt.getKind(); switch (kind) { .Ascii, .UTF8 => { @@ -1665,55 +1658,28 @@ fn transStringLiteral( const bytes_ptr = stmt.getString_bytes_begin_size(&len); const str = bytes_ptr[0..len]; - const token = try appendTokenFmt(rp.c, .StringLiteral, "\"{}\"", .{std.zig.fmtEscapes(str)}); - const node = try rp.c.arena.create(ast.Node.OneToken); - node.* = .{ - .base = .{ .tag = .StringLiteral }, - .token = token, - }; - return maybeSuppressResult(rp, scope, result_used, &node.base); + const str = try std.fmt.allocPrint(c.arena, "\"{}\"", .{std.zig.fmtEscapes(str)}); + const node = try Node.string_literal.create(c.arena, str); + return maybeSuppressResult(c, scope, result_used, node); }, .UTF16, .UTF32, .Wide => { - const node = try transWideStringLiteral(rp, scope, stmt); - return maybeSuppressResult(rp, scope, result_used, node); + const str_type = @tagName(stmt.getKind()); + const name = try std.fmt.allocPrint(c.arena, "zig.{s}_string_{d}", .{ str_type, c.getMangle() }); + const lit_array = try transStringLiteralAsArray(c, scope, stmt, stmt.getLength() + 1); + + const decl = try Node.var_simple.create(c.arena, .{ .name = name, .init = lit_array }); + try scope.appendNode(name, decl); + const node = try Node.identifier.create(c.arena, name); + return maybeSuppressResult(c, scope, result_used, node); }, } } -/// Translates a wide string literal as a global "anonymous" array of the relevant-sized -/// integer type + null terminator, and returns an identifier node for it -fn transWideStringLiteral(rp: RestorePoint, scope: *Scope, stmt: *const clang.StringLiteral) TransError!*ast.Node { - const str_type = @tagName(stmt.getKind()); - const mangle = rp.c.getMangle(); - const name = try std.fmt.allocPrint(rp.c.arena, "zig.{s}_string_{d}", .{ str_type, mangle }); - - const const_tok = try appendToken(rp.c, .Keyword_const, "const"); - const name_tok = try appendIdentifier(rp.c, name); - const eq_tok = try appendToken(rp.c, .Equal, "="); - var semi_tok: ast.TokenIndex = undefined; - - const lit_array = try transStringLiteralAsArray(rp, scope, stmt, stmt.getLength() + 1); - - semi_tok = try appendToken(rp.c, .Semicolon, ";"); - const var_decl_node = try ast.Node.VarDecl.create(rp.c.arena, .{ - .name_token = name_tok, - .mut_token = const_tok, - .semicolon_token = semi_tok, - }, .{ - .visib_token = null, - .eq_token = eq_tok, - .init_node = lit_array, - }); - try addTopLevelDecl(rp.c, name, &var_decl_node.base); - return transCreateNodeIdentifier(rp.c, name); -} - /// Parse the size of an array back out from an ast Node. -fn zigArraySize(c: *Context, node: *ast.Node) TransError!usize { - if (node.castTag(.ArrayType)) |array| { - if (array.len_expr.castTag(.IntegerLiteral)) |int_lit| { - const tok = tokenSlice(c, int_lit.token); - return std.fmt.parseUnsigned(usize, tok, 10) catch error.UnsupportedTranslation; +fn zigArraySize(c: *Context, node: Node) TransError!usize { + if (node.castTag(.array_type)) |array| { + if (array.data.len.castTag(.int_literal)) |int_lit| { + return std.fmt.parseUnsigned(usize, int_lit.data, 10) catch error.UnsupportedTranslation; } } return error.UnsupportedTranslation; @@ -1725,11 +1691,11 @@ fn zigArraySize(c: *Context, node: *ast.Node) TransError!usize { /// than the array, truncate the string. If the array is larger than the /// string literal, pad the array with 0's fn transStringLiteralAsArray( - rp: RestorePoint, + c: *Context, scope: *Scope, stmt: *const clang.StringLiteral, array_size: usize, -) TransError!*ast.Node { +) TransError!Node { if (array_size == 0) return error.UnsupportedType; const str_length = stmt.getLength(); @@ -1738,40 +1704,21 @@ fn transStringLiteralAsArray( const ty = expr_base.getType().getTypePtr(); const const_arr_ty = @ptrCast(*const clang.ConstantArrayType, ty); - const ty_node = try rp.c.arena.create(ast.Node.ArrayType); - const op_token = try appendToken(rp.c, .LBracket, "["); - const len_expr = try transCreateNodeInt(rp.c, array_size); - _ = try appendToken(rp.c, .RBracket, "]"); - - ty_node.* = .{ - .op_token = op_token, - .rhs = try transQualType(rp, const_arr_ty.getElementType(), expr_base.getBeginLoc()), - .len_expr = len_expr, - }; - _ = try appendToken(rp.c, .LBrace, "{"); - var init_node = try ast.Node.ArrayInitializer.alloc(rp.c.arena, array_size); - init_node.* = .{ - .lhs = &ty_node.base, - .rtoken = undefined, - .list_len = array_size, - }; - const init_list = init_node.list(); + const arr_type = try transQualType(c, const_arr_ty.getElementType(), expr_base.getBeginLoc()); + const init_list = try c.arena.alloc(Node, array_size); var i: c_uint = 0; const kind = stmt.getKind(); const narrow = kind == .Ascii or kind == .UTF8; while (i < str_length and i < array_size) : (i += 1) { const code_unit = stmt.getCodeUnit(i); - init_list[i] = try transCreateCharLitNode(rp.c, narrow, code_unit); - _ = try appendToken(rp.c, .Comma, ","); + init_list[i] = try transCreateCharLitNode(c, narrow, code_unit); } while (i < array_size) : (i += 1) { - init_list[i] = try transCreateNodeInt(rp.c, 0); - _ = try appendToken(rp.c, .Comma, ","); + init_list[i] = try transCreateNodeInt(c, 0); } - init_node.rtoken = try appendToken(rp.c, .RBrace, "}"); - return &init_node.base; + return Node.array_init.create(c.arena, init_list); } fn cIsEnum(qt: clang.QualType) bool { @@ -1790,152 +1737,87 @@ fn cIntTypeForEnum(enum_qt: clang.QualType) clang.QualType { } fn transCCast( - rp: RestorePoint, + c: *Context, scope: *Scope, loc: clang.SourceLocation, dst_type: clang.QualType, src_type: clang.QualType, - expr: *ast.Node, -) !*ast.Node { + expr: Node, +) !Node { if (qualTypeCanon(dst_type).isVoidType()) return expr; if (dst_type.eq(src_type)) return expr; if (qualTypeIsPtr(dst_type) and qualTypeIsPtr(src_type)) - return transCPtrCast(rp, loc, dst_type, src_type, expr); + return transCPtrCast(c, loc, dst_type, src_type, expr); + + const dst_node = try transQualType(c, dst_type, loc); if (cIsInteger(dst_type) and (cIsInteger(src_type) or cIsEnum(src_type))) { // 1. If src_type is an enum, determine the underlying signed int type // 2. Extend or truncate without changing signed-ness. // 3. Bit-cast to correct signed-ness const src_type_is_signed = cIsSignedInteger(src_type) or cIsEnum(src_type); const src_int_type = if (cIsInteger(src_type)) src_type else cIntTypeForEnum(src_type); - var src_int_expr = if (cIsInteger(src_type)) expr else try transEnumToInt(rp.c, expr); - - // @bitCast(dest_type, intermediate_value) - const cast_node = try rp.c.createBuiltinCall("@bitCast", 2); - cast_node.params()[0] = try transQualType(rp, dst_type, loc); - _ = try appendToken(rp.c, .Comma, ","); + var src_int_expr = if (cIsInteger(src_type)) expr else Node.enum_to_int.create(c.arena, expr); if (isBoolRes(src_int_expr)) { - const bool_to_int_node = try rp.c.createBuiltinCall("@boolToInt", 1); - bool_to_int_node.params()[0] = src_int_expr; - bool_to_int_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - src_int_expr = &bool_to_int_node.base; + src_int_expr = try Node.bool_to_int.create(c.arena, src_int_expr); } switch (cIntTypeCmp(dst_type, src_int_type)) { .lt => { // @truncate(SameSignSmallerInt, src_int_expr) - const trunc_node = try rp.c.createBuiltinCall("@truncate", 2); - const ty_node = try transQualTypeIntWidthOf(rp.c, dst_type, src_type_is_signed); - trunc_node.params()[0] = ty_node; - _ = try appendToken(rp.c, .Comma, ","); - trunc_node.params()[1] = src_int_expr; - trunc_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - - cast_node.params()[1] = &trunc_node.base; + const ty_node = try transQualTypeIntWidthOf(c, dst_type, src_type_is_signed); + src_int_expr = try Node.truncate.create(c.arena, .{ .lhs = ty_node, .rhs = src_int_expr }); }, .gt => { // @as(SameSignBiggerInt, src_int_expr) - const as_node = try rp.c.createBuiltinCall("@as", 2); - const ty_node = try transQualTypeIntWidthOf(rp.c, dst_type, src_type_is_signed); - as_node.params()[0] = ty_node; - _ = try appendToken(rp.c, .Comma, ","); - as_node.params()[1] = src_int_expr; - as_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - - cast_node.params()[1] = &as_node.base; + const ty_node = try transQualTypeIntWidthOf(c, dst_type, src_type_is_signed); + src_int_expr = try Node.as.create(c.arena, .{ .lhs = ty_node, .rhs = src_int_expr }); }, .eq => { - cast_node.params()[1] = src_int_expr; + // src_int_expr = src_int_expr }, } - cast_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - return &cast_node.base; + // @bitCast(dest_type, intermediate_value) + return Node.bit_cast.create(c.arena, .{ .lhs = dst_node, .rhs = src_int_expr }); } if (cIsInteger(dst_type) and qualTypeIsPtr(src_type)) { // @intCast(dest_type, @ptrToInt(val)) - const cast_node = try rp.c.createBuiltinCall("@intCast", 2); - cast_node.params()[0] = try transQualType(rp, dst_type, loc); - _ = try appendToken(rp.c, .Comma, ","); - const builtin_node = try rp.c.createBuiltinCall("@ptrToInt", 1); - builtin_node.params()[0] = expr; - builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - cast_node.params()[1] = &builtin_node.base; - cast_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - return &cast_node.base; + const ptr_to_int = try Node.ptr_to_int.create(c.arena, expr); + return Node.int_cast.create(c.arena, .{ .lhs = dst_node, .rhs = ptr_to_int }); } if (cIsInteger(src_type) and qualTypeIsPtr(dst_type)) { // @intToPtr(dest_type, val) - const builtin_node = try rp.c.createBuiltinCall("@intToPtr", 2); - builtin_node.params()[0] = try transQualType(rp, dst_type, loc); - _ = try appendToken(rp.c, .Comma, ","); - builtin_node.params()[1] = expr; - builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - return &builtin_node.base; + return Node.int_to_ptr.create(c.arena, .{ .lhs = dst_node, .rhs = expr }); } if (cIsFloating(src_type) and cIsFloating(dst_type)) { - const builtin_node = try rp.c.createBuiltinCall("@floatCast", 2); - builtin_node.params()[0] = try transQualType(rp, dst_type, loc); - _ = try appendToken(rp.c, .Comma, ","); - builtin_node.params()[1] = expr; - builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - return &builtin_node.base; + // @floatCast(dest_type, val) + return Node.float_cast.create(c.arena, .{ .lhs = dst_node, .rhs = expr }); } if (cIsFloating(src_type) and !cIsFloating(dst_type)) { - const builtin_node = try rp.c.createBuiltinCall("@floatToInt", 2); - builtin_node.params()[0] = try transQualType(rp, dst_type, loc); - _ = try appendToken(rp.c, .Comma, ","); - builtin_node.params()[1] = expr; - builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - return &builtin_node.base; + // @floatToInt(dest_type, val) + return Node.float_to_int.create(c.arena, .{ .lhs = dst_node, .rhs = expr }); } if (!cIsFloating(src_type) and cIsFloating(dst_type)) { - const builtin_node = try rp.c.createBuiltinCall("@intToFloat", 2); - builtin_node.params()[0] = try transQualType(rp, dst_type, loc); - _ = try appendToken(rp.c, .Comma, ","); - builtin_node.params()[1] = expr; - builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - return &builtin_node.base; + // @intToFloat(dest_type, val) + return Node.int_to_float.create(c.arena, .{ .lhs = dst_node, .rhs = expr }); } if (qualTypeIsBoolean(src_type) and !qualTypeIsBoolean(dst_type)) { // @boolToInt returns either a comptime_int or a u1 // TODO: if dst_type is 1 bit & signed (bitfield) we need @bitCast // instead of @as - const builtin_node = try rp.c.createBuiltinCall("@boolToInt", 1); - builtin_node.params()[0] = expr; - builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - - const as_node = try rp.c.createBuiltinCall("@as", 2); - as_node.params()[0] = try transQualType(rp, dst_type, loc); - _ = try appendToken(rp.c, .Comma, ","); - as_node.params()[1] = &builtin_node.base; - as_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - - return &as_node.base; + const bool_to_int = Node.bool_to_int.create(c.arena, expr); + return Node.as.create(c.arena, .{ .lhs = dst_node, .rhs = bool_to_int }); } if (cIsEnum(dst_type)) { - const builtin_node = try rp.c.createBuiltinCall("@intToEnum", 2); - builtin_node.params()[0] = try transQualType(rp, dst_type, loc); - _ = try appendToken(rp.c, .Comma, ","); - builtin_node.params()[1] = expr; - builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - return &builtin_node.base; + // @intToEnum(dest_type, val) + return Node.int_to_enum.create(c.arena, .{ .lhs = dst_node, .rhs = expr }); } if (cIsEnum(src_type) and !cIsEnum(dst_type)) { - return transEnumToInt(rp.c, expr); + // @enumToInt(val) + return Node.enum_to_int.create(c.arena, expr); } - const cast_node = try rp.c.createBuiltinCall("@as", 2); - cast_node.params()[0] = try transQualType(rp, dst_type, loc); - _ = try appendToken(rp.c, .Comma, ","); - cast_node.params()[1] = expr; - cast_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - return &cast_node.base; -} - -fn transEnumToInt(c: *Context, enum_expr: *ast.Node) TypeError!*ast.Node { - const builtin_node = try c.createBuiltinCall("@enumToInt", 1); - builtin_node.params()[0] = enum_expr; - builtin_node.rparen_token = try appendToken(c, .RParen, ")"); - return &builtin_node.base; + // @as(dest_type, val) + return Node.as.create(c.arena, .{ .lhs = dst_node, .rhs = expr }); } fn transExpr( @@ -1976,13 +1858,12 @@ fn transExprCoercing( } fn transInitListExprRecord( - rp: RestorePoint, + c: *Context, scope: *Scope, loc: clang.SourceLocation, expr: *const clang.InitListExpr, ty: *const clang.Type, - used: ResultUsed, -) TransError!*ast.Node { +) TransError!Node { var is_union_type = false; // Unions and Structs are both represented as RecordDecl const record_ty = ty.getAsRecordType() orelse @@ -1994,13 +1875,11 @@ fn transInitListExprRecord( const record_def = record_decl.getDefinition() orelse unreachable; - const ty_node = try transType(rp, ty, loc); + const ty_node = try transType(c, ty, loc); const init_count = expr.getNumInits(); - var field_inits = std.ArrayList(*ast.Node).init(rp.c.gpa); + var field_inits = std.ArrayList(ast.Payload.ContainerInit.Initializer).init(c.gpa); defer field_inits.deinit(); - _ = try appendToken(rp.c, .LBrace, "{"); - var init_i: c_uint = 0; var it = record_def.field_begin(); const end_it = record_def.field_end(); @@ -2018,76 +1897,28 @@ fn transInitListExprRecord( // Generate the field assignment expression: // .field_name = expr - const period_tok = try appendToken(rp.c, .Period, "."); - - var raw_name = try rp.c.str(@ptrCast(*const clang.NamedDecl, field_decl).getName_bytes_begin()); + var raw_name = try c.str(@ptrCast(*const clang.NamedDecl, field_decl).getName_bytes_begin()); if (field_decl.isAnonymousStructOrUnion()) { - const name = rp.c.decl_table.get(@ptrToInt(field_decl.getCanonicalDecl())).?; - raw_name = try mem.dupe(rp.c.arena, u8, name); + const name = c.decl_table.get(@ptrToInt(field_decl.getCanonicalDecl())).?; + raw_name = try mem.dupe(c.arena, u8, name); } - const field_name_tok = try appendIdentifier(rp.c, raw_name); - _ = try appendToken(rp.c, .Equal, "="); - - const field_init_node = try rp.c.arena.create(ast.Node.FieldInitializer); - field_init_node.* = .{ - .period_token = period_tok, - .name_token = field_name_tok, - .expr = try transExpr(rp, scope, elem_expr, .used, .r_value), - }; - - try field_inits.append(&field_init_node.base); - _ = try appendToken(rp.c, .Comma, ","); + try field_inits.append(.{ + .name = raw_name, + .value = try transExpr(c, scope, elem_expr, .used, .r_value), + }); } - const node = try ast.Node.StructInitializer.alloc(rp.c.arena, field_inits.items.len); - node.* = .{ - .lhs = ty_node, - .rtoken = try appendToken(rp.c, .RBrace, "}"), - .list_len = field_inits.items.len, - }; - mem.copy(*ast.Node, node.list(), field_inits.items); - return &node.base; -} - -fn transCreateNodeArrayType( - rp: RestorePoint, - source_loc: clang.SourceLocation, - ty: *const clang.Type, - len: anytype, -) !*ast.Node { - const node = try rp.c.arena.create(ast.Node.ArrayType); - const op_token = try appendToken(rp.c, .LBracket, "["); - const len_expr = try transCreateNodeInt(rp.c, len); - _ = try appendToken(rp.c, .RBracket, "]"); - node.* = .{ - .op_token = op_token, - .rhs = try transType(rp, ty, source_loc), - .len_expr = len_expr, - }; - return &node.base; -} - -fn transCreateEmptyArray(rp: RestorePoint, loc: clang.SourceLocation, ty: *const clang.Type) TransError!*ast.Node { - const ty_node = try transCreateNodeArrayType(rp, loc, ty, 0); - _ = try appendToken(rp.c, .LBrace, "{"); - const filler_init_node = try ast.Node.ArrayInitializer.alloc(rp.c.arena, 0); - filler_init_node.* = .{ - .lhs = ty_node, - .rtoken = try appendToken(rp.c, .RBrace, "}"), - .list_len = 0, - }; - return &filler_init_node.base; + return Node.container_init.create(c.arena, try c.arena.dupe(ast.Payload.ContainerInit.Initializer, field_inits.items)); } fn transInitListExprArray( - rp: RestorePoint, + c: *Context, scope: *Scope, loc: clang.SourceLocation, expr: *const clang.InitListExpr, ty: *const clang.Type, - used: ResultUsed, -) TransError!*ast.Node { +) TransError!Node { const arr_type = ty.getAsArrayTypeUnsafe(); const child_qt = arr_type.getElementType(); const init_count = expr.getNumInits(); @@ -2098,111 +1929,67 @@ fn transInitListExprArray( const leftover_count = all_count - init_count; if (all_count == 0) { - return transCreateEmptyArray(rp, loc, child_qt.getTypePtr()); + return Node.empty_array.create(c.arena, try transQualType(c, child_qt, source_loc)); } + + const ty_node = try transType(ty); + const init_node = if (init_count != 0) blk: { + const init_list = try c.arena.alloc(Node, init_count); - var init_node: *ast.Node.ArrayInitializer = undefined; - var cat_tok: ast.TokenIndex = undefined; - if (init_count != 0) { - const ty_node = try transCreateNodeArrayType( - rp, - loc, - child_qt.getTypePtr(), - init_count, - ); - _ = try appendToken(rp.c, .LBrace, "{"); - init_node = try ast.Node.ArrayInitializer.alloc(rp.c.arena, init_count); - init_node.* = .{ - .lhs = ty_node, - .rtoken = undefined, - .list_len = init_count, - }; - const init_list = init_node.list(); - - var i: c_uint = 0; - while (i < init_count) : (i += 1) { + for (init_list) |*init, i| { const elem_expr = expr.getInit(i); - init_list[i] = try transExpr(rp, scope, elem_expr, .used, .r_value); - _ = try appendToken(rp.c, .Comma, ","); + init.* = try transExpr(c, scope, elem_expr, .used, .r_value); } - init_node.rtoken = try appendToken(rp.c, .RBrace, "}"); + const init_node = try Node.array_init.create(c.arena, init_list); if (leftover_count == 0) { - return &init_node.base; + return init_node; } - cat_tok = try appendToken(rp.c, .PlusPlus, "++"); - } + break :blk init_node; + } else null; - const ty_node = try transCreateNodeArrayType(rp, loc, child_qt.getTypePtr(), 1); - _ = try appendToken(rp.c, .LBrace, "{"); - const filler_init_node = try ast.Node.ArrayInitializer.alloc(rp.c.arena, 1); - filler_init_node.* = .{ - .lhs = ty_node, - .rtoken = undefined, - .list_len = 1, - }; const filler_val_expr = expr.getArrayFiller(); - filler_init_node.list()[0] = try transExpr(rp, scope, filler_val_expr, .used, .r_value); - filler_init_node.rtoken = try appendToken(rp.c, .RBrace, "}"); + const filler_node = try Node.array_filler.create(c.arena, .{ + .type = ty_node, + .filler = try transExpr(c, scope, filler_val_expr, .used, .r_value), + .count = leftover_count, + }); - const rhs_node = if (leftover_count == 1) - &filler_init_node.base - else blk: { - const mul_tok = try appendToken(rp.c, .AsteriskAsterisk, "**"); - const mul_node = try rp.c.arena.create(ast.Node.SimpleInfixOp); - mul_node.* = .{ - .base = .{ .tag = .ArrayMult }, - .op_token = mul_tok, - .lhs = &filler_init_node.base, - .rhs = try transCreateNodeInt(rp.c, leftover_count), - }; - break :blk &mul_node.base; - }; - - if (init_count == 0) { - return rhs_node; + if (init_node) |some| { + return Node.array_cat.create(c.arena, some, filler_node); + } else { + return filler_node; } - - const cat_node = try rp.c.arena.create(ast.Node.SimpleInfixOp); - cat_node.* = .{ - .base = .{ .tag = .ArrayCat }, - .op_token = cat_tok, - .lhs = &init_node.base, - .rhs = rhs_node, - }; - return &cat_node.base; } fn transInitListExpr( - rp: RestorePoint, + c: *Context, scope: *Scope, expr: *const clang.InitListExpr, used: ResultUsed, -) TransError!*ast.Node { - const qt = getExprQualType(rp.c, @ptrCast(*const clang.Expr, expr)); +) TransError!Node { + const qt = getExprQualType(c, @ptrCast(*const clang.Expr, expr)); var qual_type = qt.getTypePtr(); const source_loc = @ptrCast(*const clang.Expr, expr).getBeginLoc(); if (qual_type.isRecordType()) { - return transInitListExprRecord( + return maybeSuppressResult(c, scope, used, try transInitListExprRecord( rp, scope, source_loc, expr, qual_type, - used, - ); + )); } else if (qual_type.isArrayType()) { - return transInitListExprArray( + return maybeSuppressResult(c, scope, used, try transInitListExprArray( rp, scope, source_loc, expr, qual_type, - used, - ); + )); } else { - const type_name = rp.c.str(qual_type.getTypeClassName()); - return revertAndWarn(rp, error.UnsupportedType, source_loc, "unsupported initlist type: '{s}'", .{type_name}); + const type_name = c.str(qual_type.getTypeClassName()); + return fail(c, error.UnsupportedType, source_loc, "unsupported initlist type: '{s}'", .{type_name}); } } @@ -2259,15 +2046,15 @@ fn transZeroInitExpr( } fn transImplicitValueInitExpr( - rp: RestorePoint, + c: *Context, scope: *Scope, expr: *const clang.Expr, used: ResultUsed, -) TransError!*ast.Node { +) TransError!Node { const source_loc = expr.getBeginLoc(); - const qt = getExprQualType(rp.c, expr); + const qt = getExprQualType(c, expr); const ty = qt.getTypePtr(); - return transZeroInitExpr(rp, scope, source_loc, ty); + return transZeroInitExpr(c, scope, source_loc, ty); } fn transIfStmt( diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index b926efdbef..8cf6cf74d5 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -104,6 +104,7 @@ pub const Node = extern union { bit_and, bit_or, bit_xor, + array_cat, log2_int_type, /// @import("std").math.Log2Int(operand) @@ -118,6 +119,24 @@ pub const Node = extern union { bool_to_int, /// @as(lhs, rhs) as, + /// @truncate(lhs, rhs) + truncate, + /// @bitCast(lhs, rhs) + bit_cast, + /// @floatCast(lhs, rhs) + float_cast, + /// @floatToInt(lhs, rhs) + float_to_int, + /// @intToFloat(lhs, rhs) + int_to_float, + /// @intToEnum(lhs, rhs) + int_to_enum, + /// @enumToInt(operand) + enum_to_int, + /// @intToPtr(lhs, rhs) + int_to_ptr, + /// @ptrToInt(operand) + ptr_to_int, negate, negate_wrap, @@ -154,6 +173,11 @@ pub const Node = extern union { /// pub const enum_field_name = @enumToInt(enum_name.field_name); enum_redecl, + /// [0]type{} + empty_array, + /// [1]type{val} ** count + array_filler, + pub const last_no_payload_tag = Tag.usingnamespace_builtins; pub const no_payload_count = @enumToInt(last_no_payload_tag) + 1; @@ -184,6 +208,9 @@ pub const Node = extern union { .optional_type, .address_of, .unwrap_deref, + .ptr_to_int, + .enum_to_int, + .empty_array, => Payload.UnOp, .add, @@ -239,6 +266,14 @@ pub const Node = extern union { .int_cast, .bool_to_int, .as, + .truncate, + .bit_cast, + .float_cast, + .float_to_int, + .int_to_float, + .int_to_enum, + .int_to_ptr, + .array_cat, => Payload.BinOp, .int, @@ -274,6 +309,7 @@ pub const Node = extern union { .log2_int_type => Payload.Log2IntType, .typedef, .pub_typedef, .pub_var_simple => Payload.SimpleVarDecl, .enum_redecl => Payload.EnumRedecl, + .array_filler => Payload.ArrayFiller, }; } @@ -533,6 +569,15 @@ pub const Payload = struct { enum_name: []const u8, }, }; + + pub const ArrayFiller = struct { + base: Node, + data: struct { + type: Node, + filler: Node, + count: usize, + }, + }; }; /// Converts the nodes into a Zig ast. From 66dd64ec15d787bb43cb0ab1169e8bac6cfe762e Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Wed, 10 Feb 2021 22:30:19 +0200 Subject: [PATCH 073/173] translate-c: convert most control flow --- src/translate_c.zig | 123 ++++++++++++++++------------------------ src/translate_c/ast.zig | 7 +++ 2 files changed, 56 insertions(+), 74 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index 0bef2cb843..e256d9813f 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -107,6 +107,7 @@ const Scope = struct { // do while, we want to put `if (cond) break;` at the end. const alloc_len = self.statements.items.len + @boolToInt(self.base.parent.?.id == .Loop); const stmts = try c.arena.alloc(Node, alloc_len); + stmts.len -= 1; mem.copy(Node, stmts, self.statements.items); return Node.block.create(c.arena, .{ .lable = self.label, @@ -1748,7 +1749,7 @@ fn transCCast( if (dst_type.eq(src_type)) return expr; if (qualTypeIsPtr(dst_type) and qualTypeIsPtr(src_type)) return transCPtrCast(c, loc, dst_type, src_type, expr); - + const dst_node = try transQualType(c, dst_type, loc); if (cIsInteger(dst_type) and (cIsInteger(src_type) or cIsEnum(src_type))) { // 1. If src_type is an enum, determine the underlying signed int type @@ -1931,7 +1932,7 @@ fn transInitListExprArray( if (all_count == 0) { return Node.empty_array.create(c.arena, try transQualType(c, child_qt, source_loc)); } - + const ty_node = try transType(ty); const init_node = if (init_count != 0) blk: { const init_list = try c.arena.alloc(Node, init_count); @@ -2058,14 +2059,12 @@ fn transImplicitValueInitExpr( } fn transIfStmt( - rp: RestorePoint, + c: *Context, scope: *Scope, stmt: *const clang.IfStmt, -) TransError!*ast.Node { +) TransError!Node { // if (c) t // if (c) t else e - const if_node = try transCreateNodeIf(rp.c); - var cond_scope = Scope.Condition{ .base = .{ .parent = scope, @@ -2074,26 +2073,21 @@ fn transIfStmt( }; defer cond_scope.deinit(); const cond_expr = @ptrCast(*const clang.Expr, stmt.getCond()); - if_node.condition = try transBoolExpr(rp, &cond_scope.base, cond_expr, .used, .r_value, false); - _ = try appendToken(rp.c, .RParen, ")"); + const cond = try transBoolExpr(c, &cond_scope.base, cond_expr, .used, .r_value); - if_node.body = try transStmt(rp, scope, stmt.getThen(), .unused, .r_value); - - if (stmt.getElse()) |expr| { - if_node.@"else" = try transCreateNodeElse(rp.c); - if_node.@"else".?.body = try transStmt(rp, scope, expr, .unused, .r_value); - } - _ = try appendToken(rp.c, .Semicolon, ";"); - return &if_node.base; + const then_body = try transStmt(c, scope, stmt.getThen(), .unused, .r_value); + const else_body = if (stmt.getElse()) |expr| + try transStmt(c, scope, expr, .unused, .r_value) + else + null; + return Node.@"if".create(c.arena, .{ .cond = cond, .then = then_body, .@"else" = else_body }); } fn transWhileLoop( - rp: RestorePoint, + c: *Context, scope: *Scope, stmt: *const clang.WhileStmt, -) TransError!*ast.Node { - const while_node = try transCreateNodeWhile(rp.c); - +) TransError!Node { var cond_scope = Scope.Condition{ .base = .{ .parent = scope, @@ -2102,35 +2096,28 @@ fn transWhileLoop( }; defer cond_scope.deinit(); const cond_expr = @ptrCast(*const clang.Expr, stmt.getCond()); - while_node.condition = try transBoolExpr(rp, &cond_scope.base, cond_expr, .used, .r_value, false); - _ = try appendToken(rp.c, .RParen, ")"); + const cond = try transBoolExpr(c, &cond_scope.base, cond_expr, .used, .r_value); var loop_scope = Scope{ .parent = scope, .id = .Loop, }; - while_node.body = try transStmt(rp, &loop_scope, stmt.getBody(), .unused, .r_value); - _ = try appendToken(rp.c, .Semicolon, ";"); - return &while_node.base; + const body = try transStmt(c, &loop_scope, stmt.getBody(), .unused, .r_value); + return Node.@"while".create(c.arena, .{ .cond = cond, .body = body, .cont_expr = null }); } fn transDoWhileLoop( - rp: RestorePoint, + c: *Context, scope: *Scope, stmt: *const clang.DoStmt, -) TransError!*ast.Node { - const while_node = try transCreateNodeWhile(rp.c); - - while_node.condition = try transCreateNodeBoolLiteral(rp.c, true); - _ = try appendToken(rp.c, .RParen, ")"); - var new = false; +) TransError!Node { var loop_scope = Scope{ .parent = scope, .id = .Loop, }; // if (!cond) break; - const if_node = try transCreateNodeIf(rp.c); + const if_node = try transCreateNodeIf(c); var cond_scope = Scope.Condition{ .base = .{ .parent = scope, @@ -2138,12 +2125,8 @@ fn transDoWhileLoop( }, }; defer cond_scope.deinit(); - const prefix_op = try transCreateNodeSimplePrefixOp(rp.c, .BoolNot, .Bang, "!"); - prefix_op.rhs = try transBoolExpr(rp, &cond_scope.base, @ptrCast(*const clang.Expr, stmt.getCond()), .used, .r_value, true); - _ = try appendToken(rp.c, .RParen, ")"); - if_node.condition = &prefix_op.base; - if_node.body = &(try transCreateNodeBreak(rp.c, null, null)).base; - _ = try appendToken(rp.c, .Semicolon, ";"); + const cond = try transBoolExpr(c, &cond_scope.base, @ptrCast(*const clang.Expr, stmt.getCond()), .used, .r_value); + const if_not_break = try Node.if_not_break.create(c.arena, cond); const body_node = if (stmt.getBody().getStmtClass() == .CompoundStmtClass) blk: { // there's already a block in C, so we'll append our condition to it. @@ -2156,8 +2139,12 @@ fn transDoWhileLoop( // zig: b; // zig: if (!cond) break; // zig: } - const node = try transStmt(rp, &loop_scope, stmt.getBody(), .unused, .r_value); - break :blk node.castTag(.Block).?; + const node = try transStmt(c, &loop_scope, stmt.getBody(), .unused, .r_value); + const block = node.castTag(.block); + block.data.stmts.len += 1; // This is safe since we reserve one extra space in Scope.Block.complete. + block.data.stmts[block.data.stmts.len - 1] = if_not_break; + break :blk node; + } else blk: { // the C statement is without a block, so we need to create a block to contain it. // c: do @@ -2167,27 +2154,19 @@ fn transDoWhileLoop( // zig: a; // zig: if (!cond) break; // zig: } - new = true; - const block = try rp.c.createBlock(2); - block.statements_len = 1; // over-allocated so we can add another below - block.statements()[0] = try transStmt(rp, &loop_scope, stmt.getBody(), .unused, .r_value); - break :blk block; + const statements = try c.arena.create(Node, 2); + statements[0] = try transStmt(c, &loop_scope, stmt.getBody(), .unused, .r_value); + statements[1] = if_not_break; + break :blk try Node.block.create(c.arena, .{ .label = null, .stmts = statements }); }; - - // In both cases above, we reserved 1 extra statement. - body_node.statements_len += 1; - body_node.statements()[body_node.statements_len - 1] = &if_node.base; - if (new) - body_node.rbrace = try appendToken(rp.c, .RBrace, "}"); - while_node.body = &body_node.base; - return &while_node.base; + return Node.while_true.create(c.arena, body_node); } fn transForLoop( - rp: RestorePoint, + c: *Context, scope: *Scope, stmt: *const clang.ForStmt, -) TransError!*ast.Node { +) TransError!Node { var loop_scope = Scope{ .parent = scope, .id = .Loop, @@ -2197,9 +2176,9 @@ fn transForLoop( defer if (block_scope) |*bs| bs.deinit(); if (stmt.getInit()) |init| { - block_scope = try Scope.Block.init(rp.c, scope, false); + block_scope = try Scope.Block.init(c, scope, false); loop_scope.parent = &block_scope.?.base; - const init_node = try transStmt(rp, &block_scope.?.base, init, .unused, .r_value); + const init_node = try transStmt(c, &block_scope.?.base, init, .unused, .r_value); try block_scope.?.statements.append(init_node); } var cond_scope = Scope.Condition{ @@ -2210,27 +2189,23 @@ fn transForLoop( }; defer cond_scope.deinit(); - const while_node = try transCreateNodeWhile(rp.c); - while_node.condition = if (stmt.getCond()) |cond| - try transBoolExpr(rp, &cond_scope.base, cond, .used, .r_value, false) + const cond = if (stmt.getCond()) |cond| + try transBoolExpr(c, &cond_scope.base, cond, .used, .r_value) else - try transCreateNodeBoolLiteral(rp.c, true); - _ = try appendToken(rp.c, .RParen, ")"); + Node.true_literal.init(); - if (stmt.getInc()) |incr| { - _ = try appendToken(rp.c, .Colon, ":"); - _ = try appendToken(rp.c, .LParen, "("); - while_node.continue_expr = try transExpr(rp, &cond_scope.base, incr, .unused, .r_value); - _ = try appendToken(rp.c, .RParen, ")"); - } + const cont_expr = if (stmt.getInc()) |incr| + try transExpr(c, &cond_scope.base, incr, .unused, .r_value) + else + null; - while_node.body = try transStmt(rp, &loop_scope, stmt.getBody(), .unused, .r_value); + const body = try transStmt(c, &loop_scope, stmt.getBody(), .unused, .r_value); + const while_node = try Node.@"while".create(c.arena, .{ .cond = cond, .body = body, .cont_expr = cont_expr }); if (block_scope) |*bs| { - try bs.statements.append(&while_node.base); - return try bs.complete(rp.c); + try bs.statements.append(while_node); + return try bs.complete(c); } else { - _ = try appendToken(rp.c, .Semicolon, ";"); - return &while_node.base; + return while_node; } } diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 8cf6cf74d5..c59bcd09e4 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -29,7 +29,11 @@ pub const Node = extern union { char_literal, identifier, @"if", + /// if (!operand) break; + if_not_break, @"while", + /// while (true) operand + while_true, @"switch", @"continue", @"break", @@ -211,6 +215,8 @@ pub const Node = extern union { .ptr_to_int, .enum_to_int, .empty_array, + .while_true, + .if_not_break, => Payload.UnOp, .add, @@ -399,6 +405,7 @@ pub const Payload = struct { data: struct { cond: Node, body: Node, + cont_expr: ?Node }, }; From cadd4483be80f73f192621f79eea41117183129c Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Wed, 10 Feb 2021 23:02:38 +0200 Subject: [PATCH 074/173] translate-c: convert switch --- src/translate_c.zig | 280 ++++++++++++++-------------------------- src/translate_c/ast.zig | 31 +++-- 2 files changed, 117 insertions(+), 194 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index e256d9813f..e5c755c1a4 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -43,7 +43,7 @@ const Scope = struct { const Switch = struct { base: Scope, pending_block: Block, - cases: []Node, + cases: std.ArrayList(Node), case_index: usize, switch_label: ?[]const u8, default_label: ?[]const u8, @@ -1087,7 +1087,7 @@ fn transStmt( return maybeSuppressResult(c, scope, result_used, expr); }, else => { - return revertAndWarn( + return fail( rp, error.UnsupportedTranslation, stmt.getBeginLoc(), @@ -1348,7 +1348,7 @@ fn transDeclStmtOne( return error.UnsupportedTranslation; return node; }, - else => |kind| return revertAndWarn( + else => |kind| return fail( rp, error.UnsupportedTranslation, decl.getLocation(), @@ -1440,7 +1440,7 @@ fn transImplicitCastExpr( .BuiltinFnToFnPtr => { return transExpr(rp, scope, sub_expr, result_used, .r_value); }, - else => |kind| return revertAndWarn( + else => |kind| return fail( rp, error.UnsupportedTranslation, @ptrCast(*const clang.Stmt, expr).getBeginLoc(), @@ -1460,7 +1460,7 @@ fn transBoolExpr( if (@ptrCast(*const clang.Stmt, expr).getStmtClass() == .IntegerLiteralClass) { var is_zero: bool = undefined; if (!(@ptrCast(*const clang.IntegerLiteral, expr).isZero(&is_zero, c.clang_context))) { - return revertAndWarn(c, error.UnsupportedTranslation, expr.getBeginLoc(), "invalid integer literal", .{}); + return fail(c, error.UnsupportedTranslation, expr.getBeginLoc(), "invalid integer literal", .{}); } return Node{ .tag = ([2]ast.Node.Tag{ .true_literal, .false_literal })[@boolToInt(is_zero)] }; } @@ -1605,7 +1605,7 @@ fn transIntegerLiteral( var eval_result: clang.ExprEvalResult = undefined; if (!expr.EvaluateAsInt(&eval_result, c.clang_context)) { const loc = expr.getBeginLoc(); - return revertAndWarn(c, error.UnsupportedTranslation, loc, "invalid integer literal", .{}); + return fail(c, error.UnsupportedTranslation, loc, "invalid integer literal", .{}); } if (suppress_as == .no_as) { @@ -2144,7 +2144,6 @@ fn transDoWhileLoop( block.data.stmts.len += 1; // This is safe since we reserve one extra space in Scope.Block.complete. block.data.stmts[block.data.stmts.len - 1] = if_not_break; break :blk node; - } else blk: { // the C statement is without a block, so we need to create a block to contain it. // c: do @@ -2209,27 +2208,11 @@ fn transForLoop( } } -fn getSwitchCaseCount(stmt: *const clang.SwitchStmt) usize { - const body = stmt.getBody(); - assert(body.getStmtClass() == .CompoundStmtClass); - const comp = @ptrCast(*const clang.CompoundStmt, body); - // TODO https://github.com/ziglang/zig/issues/1738 - // return comp.body_end() - comp.body_begin(); - const start_addr = @ptrToInt(comp.body_begin()); - const end_addr = @ptrToInt(comp.body_end()); - return (end_addr - start_addr) / @sizeOf(*clang.Stmt); -} - fn transSwitch( - rp: RestorePoint, + c: *Context, scope: *Scope, stmt: *const clang.SwitchStmt, -) TransError!*ast.Node { - const switch_tok = try appendToken(rp.c, .Keyword_switch, "switch"); - _ = try appendToken(rp.c, .LParen, "("); - - const cases_len = getSwitchCaseCount(stmt); - +) TransError!Node { var cond_scope = Scope.Condition{ .base = .{ .parent = scope, @@ -2237,16 +2220,13 @@ fn transSwitch( }, }; defer cond_scope.deinit(); - const switch_expr = try transExpr(rp, &cond_scope.base, stmt.getCond(), .used, .r_value); - _ = try appendToken(rp.c, .RParen, ")"); - _ = try appendToken(rp.c, .LBrace, "{"); - // reserve +1 case in case there is no default case - const switch_node = try ast.Node.Switch.alloc(rp.c.arena, cases_len + 1); + const switch_expr = try transExpr(c, &cond_scope.base, stmt.getCond(), .used, .r_value); + const switch_node = try c.arena.create(ast.Payload.Switch); switch_node.* = .{ - .switch_token = switch_tok, - .expr = switch_expr, - .cases_len = cases_len + 1, - .rbrace = try appendToken(rp.c, .RBrace, "}"), + .data = .{ + .cond = switch_expr, + .cases = undefined, // set later + }, }; var switch_scope = Scope.Switch{ @@ -2254,29 +2234,32 @@ fn transSwitch( .id = .Switch, .parent = scope, }, - .cases = switch_node.cases(), - .case_index = 0, + .cases = std.ArrayList(Node).init(c.gpa), .pending_block = undefined, .default_label = null, .switch_label = null, }; + defer { + switch_node.data.cases = try c.arena.dupe(Node, switch_scope.cases.items); + switch_node.data.default = switch_scope.switch_label; + switch_scope.cases.deinit(); + } // tmp block that all statements will go before being picked up by a case or default - var block_scope = try Scope.Block.init(rp.c, &switch_scope.base, false); + var block_scope = try Scope.Block.init(c, &switch_scope.base, false); defer block_scope.deinit(); // Note that we do not defer a deinit here; the switch_scope.pending_block field // has its own memory management. This resource is freed inside `transCase` and // then the final pending_block is freed at the bottom of this function with // pending_block.deinit(). - switch_scope.pending_block = try Scope.Block.init(rp.c, scope, false); - try switch_scope.pending_block.statements.append(&switch_node.base); + switch_scope.pending_block = try Scope.Block.init(c, scope, false); + try switch_scope.pending_block.statements.append(Node.initPayload(&switch_node.base)); - const last = try transStmt(rp, &block_scope.base, stmt.getBody(), .unused, .r_value); - _ = try appendToken(rp.c, .Semicolon, ";"); + const last = try transStmt(c, &block_scope.base, stmt.getBody(), .unused, .r_value); // take all pending statements - const last_block_stmts = last.cast(ast.Node.Block).?.statements(); + const last_block_stmts = last.castTag(.block).?.data.stmts; try switch_scope.pending_block.statements.ensureCapacity( switch_scope.pending_block.statements.items.len + last_block_stmts.len, ); @@ -2285,213 +2268,159 @@ fn transSwitch( } if (switch_scope.default_label == null) { - switch_scope.switch_label = try block_scope.makeMangledName(rp.c, "switch"); + switch_scope.switch_label = try block_scope.makeMangledName(c, "switch"); } if (switch_scope.switch_label) |l| { - switch_scope.pending_block.label = try appendIdentifier(rp.c, l); - _ = try appendToken(rp.c, .Colon, ":"); + switch_scope.pending_block.label = l; } if (switch_scope.default_label == null) { - const else_prong = try transCreateNodeSwitchCase(rp.c, try transCreateNodeSwitchElse(rp.c)); - else_prong.expr = blk: { - var br = try CtrlFlow.init(rp.c, .Break, switch_scope.switch_label.?); - break :blk &(try br.finish(null)).base; - }; - _ = try appendToken(rp.c, .Comma, ","); - - if (switch_scope.case_index >= switch_scope.cases.len) - return revertAndWarn(rp, error.UnsupportedTranslation, @ptrCast(*const clang.Stmt, stmt).getBeginLoc(), "TODO complex switch cases", .{}); - switch_scope.cases[switch_scope.case_index] = &else_prong.base; - switch_scope.case_index += 1; + const else_prong = try Node.switch_else.create( + c.arena, + try Node.@"break".create(c.arena, switch_scope.switch_label.?), + ); + switch_scope.cases.append(else_prong); } - // We overallocated in case there was no default, so now we correct - // the number of cases in the AST node. - switch_node.cases_len = switch_scope.case_index; - const result_node = try switch_scope.pending_block.complete(rp.c); + const result_node = try switch_scope.pending_block.complete(c); switch_scope.pending_block.deinit(); return result_node; } fn transCase( - rp: RestorePoint, + c: *Context, scope: *Scope, stmt: *const clang.CaseStmt, -) TransError!*ast.Node { - const block_scope = scope.findBlockScope(rp.c) catch unreachable; +) TransError!Node { + const block_scope = scope.findBlockScope(c) catch unreachable; const switch_scope = scope.getSwitch(); - const label = try block_scope.makeMangledName(rp.c, "case"); - _ = try appendToken(rp.c, .Semicolon, ";"); + const label = try block_scope.makeMangledName(c, "case"); const expr = if (stmt.getRHS()) |rhs| blk: { - const lhs_node = try transExpr(rp, scope, stmt.getLHS(), .used, .r_value); - const ellips = try appendToken(rp.c, .Ellipsis3, "..."); - const rhs_node = try transExpr(rp, scope, rhs, .used, .r_value); + const lhs_node = try transExpr(c, scope, stmt.getLHS(), .used, .r_value); + const rhs_node = try transExpr(c, scope, rhs, .used, .r_value); - const node = try rp.c.arena.create(ast.Node.SimpleInfixOp); - node.* = .{ - .base = .{ .tag = .Range }, - .op_token = ellips, - .lhs = lhs_node, - .rhs = rhs_node, - }; - break :blk &node.base; + break :blk Node.ellipsis3.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node }); } else - try transExpr(rp, scope, stmt.getLHS(), .used, .r_value); + try transExpr(c, scope, stmt.getLHS(), .used, .r_value); - const switch_prong = try transCreateNodeSwitchCase(rp.c, expr); - switch_prong.expr = blk: { - var br = try CtrlFlow.init(rp.c, .Break, label); - break :blk &(try br.finish(null)).base; - }; - _ = try appendToken(rp.c, .Comma, ","); + const switch_prong = try Node.switch_prong.create( + c.arena, + try Node.@"break".create(c.arena, label), + ); + switch_scope.cases.append(switch_prong); - if (switch_scope.case_index >= switch_scope.cases.len) - return revertAndWarn(rp, error.UnsupportedTranslation, @ptrCast(*const clang.Stmt, stmt).getBeginLoc(), "TODO complex switch cases", .{}); - switch_scope.cases[switch_scope.case_index] = &switch_prong.base; - switch_scope.case_index += 1; - - switch_scope.pending_block.label = try appendIdentifier(rp.c, label); - _ = try appendToken(rp.c, .Colon, ":"); + switch_scope.pending_block.label = label; // take all pending statements try switch_scope.pending_block.statements.appendSlice(block_scope.statements.items); block_scope.statements.shrinkAndFree(0); - const pending_node = try switch_scope.pending_block.complete(rp.c); + const pending_node = try switch_scope.pending_block.complete(c); switch_scope.pending_block.deinit(); - switch_scope.pending_block = try Scope.Block.init(rp.c, scope, false); + switch_scope.pending_block = try Scope.Block.init(c, scope, false); try switch_scope.pending_block.statements.append(pending_node); - return transStmt(rp, scope, stmt.getSubStmt(), .unused, .r_value); + return transStmt(c, scope, stmt.getSubStmt(), .unused, .r_value); } fn transDefault( - rp: RestorePoint, + c: *Context, scope: *Scope, stmt: *const clang.DefaultStmt, -) TransError!*ast.Node { - const block_scope = scope.findBlockScope(rp.c) catch unreachable; +) TransError!Node { + const block_scope = scope.findBlockScope(c) catch unreachable; const switch_scope = scope.getSwitch(); - switch_scope.default_label = try block_scope.makeMangledName(rp.c, "default"); - _ = try appendToken(rp.c, .Semicolon, ";"); + switch_scope.default_label = try block_scope.makeMangledName(c, "default"); - const else_prong = try transCreateNodeSwitchCase(rp.c, try transCreateNodeSwitchElse(rp.c)); - else_prong.expr = blk: { - var br = try CtrlFlow.init(rp.c, .Break, switch_scope.default_label.?); - break :blk &(try br.finish(null)).base; - }; - _ = try appendToken(rp.c, .Comma, ","); - - if (switch_scope.case_index >= switch_scope.cases.len) - return revertAndWarn(rp, error.UnsupportedTranslation, @ptrCast(*const clang.Stmt, stmt).getBeginLoc(), "TODO complex switch cases", .{}); - switch_scope.cases[switch_scope.case_index] = &else_prong.base; - switch_scope.case_index += 1; - - switch_scope.pending_block.label = try appendIdentifier(rp.c, switch_scope.default_label.?); - _ = try appendToken(rp.c, .Colon, ":"); + const else_prong = try Node.switch_else.create( + c.arena, + try Node.@"break".create(c.arena, switch_scope.default_label.?), + ); + switch_scope.cases.append(else_prong); + switch_scope.pending_block.label = try appendIdentifier(c, switch_scope.default_label.?); // take all pending statements try switch_scope.pending_block.statements.appendSlice(block_scope.statements.items); block_scope.statements.shrinkAndFree(0); - const pending_node = try switch_scope.pending_block.complete(rp.c); + const pending_node = try switch_scope.pending_block.complete(c); switch_scope.pending_block.deinit(); - switch_scope.pending_block = try Scope.Block.init(rp.c, scope, false); + switch_scope.pending_block = try Scope.Block.init(c, scope, false); try switch_scope.pending_block.statements.append(pending_node); - return transStmt(rp, scope, stmt.getSubStmt(), .unused, .r_value); + return transStmt(c, scope, stmt.getSubStmt(), .unused, .r_value); } -fn transConstantExpr(rp: RestorePoint, scope: *Scope, expr: *const clang.Expr, used: ResultUsed) TransError!*ast.Node { +fn transConstantExpr(c: *Context, scope: *Scope, expr: *const clang.Expr, used: ResultUsed) TransError!Node { var result: clang.ExprEvalResult = undefined; - if (!expr.EvaluateAsConstantExpr(&result, .EvaluateForCodeGen, rp.c.clang_context)) - return revertAndWarn(rp, error.UnsupportedTranslation, expr.getBeginLoc(), "invalid constant expression", .{}); + if (!expr.EvaluateAsConstantExpr(&result, .EvaluateForCodeGen, c.clang_context)) + return fail(c, error.UnsupportedTranslation, expr.getBeginLoc(), "invalid constant expression", .{}); - var val_node: ?*ast.Node = null; switch (result.Val.getKind()) { .Int => { // See comment in `transIntegerLiteral` for why this code is here. // @as(T, x) const expr_base = @ptrCast(*const clang.Expr, expr); - const as_node = try rp.c.createBuiltinCall("@as", 2); - const ty_node = try transQualType(rp, expr_base.getType(), expr_base.getBeginLoc()); - as_node.params()[0] = ty_node; - _ = try appendToken(rp.c, .Comma, ","); - - const int_lit_node = try transCreateNodeAPInt(rp.c, result.Val.getInt()); - as_node.params()[1] = int_lit_node; - - as_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - - return maybeSuppressResult(rp, scope, used, &as_node.base); + const as_node = try Node.as.create(c.arena, .{ + .lhs = try transQualType(c, expr_base.getType(), expr_base.getBeginLoc()), + .rhs = try transCreateNodeAPInt(c, result.Val.getInt()), + }); + return maybeSuppressResult(c, scope, used, as_node); }, else => { - return revertAndWarn(rp, error.UnsupportedTranslation, expr.getBeginLoc(), "unsupported constant expression kind", .{}); + return fail(c, error.UnsupportedTranslation, expr.getBeginLoc(), "unsupported constant expression kind", .{}); }, } } -fn transPredefinedExpr(rp: RestorePoint, scope: *Scope, expr: *const clang.PredefinedExpr, used: ResultUsed) TransError!*ast.Node { - return transStringLiteral(rp, scope, expr.getFunctionName(), used); +fn transPredefinedExpr(c: *Context, scope: *Scope, expr: *const clang.PredefinedExpr, used: ResultUsed) TransError!Node { + return transStringLiteral(c, scope, expr.getFunctionName(), used); } -fn transCreateCharLitNode(c: *Context, narrow: bool, val: u32) TransError!*ast.Node { - const node = try c.arena.create(ast.Node.OneToken); - node.* = .{ - .base = .{ .tag = .CharLiteral }, - .token = undefined, - }; - if (narrow) { - const val_array = [_]u8{@intCast(u8, val)}; - node.token = try appendTokenFmt(c, .CharLiteral, "'{}'", .{std.zig.fmtEscapes(&val_array)}); - } else { - node.token = try appendTokenFmt(c, .CharLiteral, "'\\u{{{x}}}'", .{val}); - } - return &node.base; +fn transCreateCharLitNode(c: *Context, narrow: bool, val: u32) TransError!Node { + return Node.char_literal.create(c.arena, if (narrow) + try std.fmt.bufPrint(c.arena, "'{}'", .{std.zig.fmtEscapes(&.{@intCast(u8, val)})}) + else + try std.fmt.bufPrint(c.arena, "'\\u{{{x}}}'", .{val})); } fn transCharLiteral( - rp: RestorePoint, + c: *Context, scope: *Scope, stmt: *const clang.CharacterLiteral, result_used: ResultUsed, suppress_as: SuppressCast, -) TransError!*ast.Node { +) TransError!Node { const kind = stmt.getKind(); const val = stmt.getValue(); const narrow = kind == .Ascii or kind == .UTF8; // C has a somewhat obscure feature called multi-character character constant // e.g. 'abcd' const int_lit_node = if (kind == .Ascii and val > 255) - try transCreateNodeInt(rp.c, val) + try transCreateNodeInt(c, val) else - try transCreateCharLitNode(rp.c, narrow, val); + try transCreateCharLitNode(c, narrow, val); if (suppress_as == .no_as) { - return maybeSuppressResult(rp, scope, result_used, int_lit_node); + return maybeSuppressResult(c, scope, result_used, int_lit_node); } // See comment in `transIntegerLiteral` for why this code is here. // @as(T, x) const expr_base = @ptrCast(*const clang.Expr, stmt); - const as_node = try rp.c.createBuiltinCall("@as", 2); - const ty_node = try transQualType(rp, expr_base.getType(), expr_base.getBeginLoc()); - as_node.params()[0] = ty_node; - _ = try appendToken(rp.c, .Comma, ","); - as_node.params()[1] = int_lit_node; - - as_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - return maybeSuppressResult(rp, scope, result_used, &as_node.base); + const as_node = Node.as.create(c.arena, .{ + .lhs = try transQualType(c, expr_base.getType(), expr_base.getBeginLoc()), + .rhs = int_lit_node, + }); + return maybeSuppressResult(c, scope, result_used, as_node); } -fn transStmtExpr(rp: RestorePoint, scope: *Scope, stmt: *const clang.StmtExpr, used: ResultUsed) TransError!*ast.Node { +fn transStmtExpr(c: *Context, scope: *Scope, stmt: *const clang.StmtExpr, used: ResultUsed) TransError!Node { const comp = stmt.getSubStmt(); if (used == .unused) { - return transCompoundStmt(rp, scope, comp); + return transCompoundStmt(c, scope, comp); } - const lparen = try appendToken(rp.c, .LParen, "("); - var block_scope = try Scope.Block.init(rp.c, scope, true); + var block_scope = try Scope.Block.init(c, scope, true); defer block_scope.deinit(); var it = comp.body_begin(); @@ -2500,22 +2429,13 @@ fn transStmtExpr(rp: RestorePoint, scope: *Scope, stmt: *const clang.StmtExpr, u const result = try transStmt(rp, &block_scope.base, it[0], .unused, .r_value); try block_scope.statements.append(result); } - const break_node = blk: { - var tmp = try CtrlFlow.init(rp.c, .Break, "blk"); - const rhs = try transStmt(rp, &block_scope.base, it[0], .used, .r_value); - break :blk try tmp.finish(rhs); - }; - _ = try appendToken(rp.c, .Semicolon, ";"); - try block_scope.statements.append(&break_node.base); - const block_node = try block_scope.complete(rp.c); - const rparen = try appendToken(rp.c, .RParen, ")"); - const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression); - grouped_expr.* = .{ - .lparen = lparen, - .expr = block_node, - .rparen = rparen, - }; - return maybeSuppressResult(rp, scope, used, &grouped_expr.base); + const break_node = try Node.break_val.create(c.arena, .{ + .label = block_scope.label, + .val = try transStmt(c, &block_scope.base, it[0], .used, .r_value), + }); + try block_scope.statements.append(break_node); + + return block_scope.complete(c); } fn transMemberExpr(rp: RestorePoint, scope: *Scope, stmt: *const clang.MemberExpr, result_used: ResultUsed) TransError!*ast.Node { @@ -4581,7 +4501,7 @@ fn fail( comptime format: []const u8, args: anytype, ) (@TypeOf(err) || error{OutOfMemory}) { - try emitWarning(c, source_loc, format, args); + try warn(c, source_loc, format, args); return err; } diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index c59bcd09e4..a163950d2b 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -35,8 +35,13 @@ pub const Node = extern union { /// while (true) operand while_true, @"switch", + /// else => operand, + switch_else, + /// lhs => rhs, + switch_prong, @"continue", @"break", + break_val, @"return", field_access, field_access_arrow, @@ -109,6 +114,7 @@ pub const Node = extern union { bit_or, bit_xor, array_cat, + ellipsis3, log2_int_type, /// @import("std").math.Log2Int(operand) @@ -217,6 +223,7 @@ pub const Node = extern union { .empty_array, .while_true, .if_not_break, + .switch_else, => Payload.UnOp, .add, @@ -280,6 +287,8 @@ pub const Node = extern union { .int_to_enum, .int_to_ptr, .array_cat, + .ellipsis3, + .switch_prong, => Payload.BinOp, .int, @@ -300,6 +309,7 @@ pub const Node = extern union { .@"while" => Payload.While, .@"switch" => Payload.Switch, .@"break" => Payload.Break, + .break_val => Payload.BreakVal, .call => Payload.Call, .var_decl => Payload.VarDecl, .func => Payload.Func, @@ -413,22 +423,20 @@ pub const Payload = struct { base: Node = .{ .tag = .@"switch" }, data: struct { cond: Node, - cases: []Prong, - default: ?[]const u8, - - pub const Prong = struct { - lhs: Node, - rhs: ?Node, - label: []const u8, - }; + cases: []Node, }, }; pub const Break = struct { base: Node = .{ .tag = .@"break" }, + data: ?[]const u8, + }; + + pub const BreakVal = struct { + base: Node = .{ .tag = .break_val }, data: struct { label: ?[]const u8, - rhs: ?Node, + val: Node, }, }; @@ -525,11 +533,6 @@ pub const Payload = struct { }, }; - pub const Break = struct { - base: Node = .{ .tag = .@"break" }, - data: *Block - }; - pub const Array = struct { base: Node, data: struct { From 450b718b9ec21b328966469c851983ee8969e577 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Wed, 10 Feb 2021 23:38:45 +0200 Subject: [PATCH 075/173] translate-c: convert field/array access, call, pre/postcrement --- src/translate_c.zig | 479 ++++++++++------------------------------ src/translate_c/ast.zig | 22 +- 2 files changed, 129 insertions(+), 372 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index e5c755c1a4..1a98979317 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -2438,11 +2438,11 @@ fn transStmtExpr(c: *Context, scope: *Scope, stmt: *const clang.StmtExpr, used: return block_scope.complete(c); } -fn transMemberExpr(rp: RestorePoint, scope: *Scope, stmt: *const clang.MemberExpr, result_used: ResultUsed) TransError!*ast.Node { - var container_node = try transExpr(rp, scope, stmt.getBase(), .used, .r_value); +fn transMemberExpr(c: *Context, scope: *Scope, stmt: *const clang.MemberExpr, result_used: ResultUsed) TransError!Node { + var container_node = try transExpr(c, scope, stmt.getBase(), .used, .r_value); if (stmt.isArrow()) { - container_node = try transCreateNodePtrDeref(rp.c, container_node); + container_node = try Node.deref.create(c.arena, container_node); } const member_decl = stmt.getMemberDecl(); @@ -2453,19 +2453,19 @@ fn transMemberExpr(rp: RestorePoint, scope: *Scope, stmt: *const clang.MemberExp if (decl_kind == .Field) { const field_decl = @ptrCast(*const clang.FieldDecl, member_decl); if (field_decl.isAnonymousStructOrUnion()) { - const name = rp.c.decl_table.get(@ptrToInt(field_decl.getCanonicalDecl())).?; - break :blk try mem.dupe(rp.c.arena, u8, name); + const name = c.decl_table.get(@ptrToInt(field_decl.getCanonicalDecl())).?; + break :blk try mem.dupe(c.arena, u8, name); } } const decl = @ptrCast(*const clang.NamedDecl, member_decl); - break :blk try rp.c.str(decl.getName_bytes_begin()); + break :blk try c.str(decl.getName_bytes_begin()); }; - const node = try transCreateNodeFieldAccess(rp.c, container_node, name); - return maybeSuppressResult(rp, scope, result_used, node); + const node = try Node.field_access.create(c.arena, .{ .container = container_node, .name = name}); + return maybeSuppressResult(c, scope, result_used, node); } -fn transArrayAccess(rp: RestorePoint, scope: *Scope, stmt: *const clang.ArraySubscriptExpr, result_used: ResultUsed) TransError!*ast.Node { +fn transArrayAccess(c: *Context, scope: *Scope, stmt: *const clang.ArraySubscriptExpr, result_used: ResultUsed) TransError!Node { var base_stmt = stmt.getBase(); // Unwrap the base statement if it's an array decayed to a bare pointer type @@ -2478,30 +2478,23 @@ fn transArrayAccess(rp: RestorePoint, scope: *Scope, stmt: *const clang.ArraySub } } - const container_node = try transExpr(rp, scope, base_stmt, .used, .r_value); - const node = try transCreateNodeArrayAccess(rp.c, container_node); + const container_node = try transExpr(c, scope, base_stmt, .used, .r_value); // cast if the index is long long or signed const subscr_expr = stmt.getIdx(); - const qt = getExprQualType(rp.c, subscr_expr); + const qt = getExprQualType(c, subscr_expr); const is_longlong = cIsLongLongInteger(qt); const is_signed = cIsSignedInteger(qt); - if (is_longlong or is_signed) { - const cast_node = try rp.c.createBuiltinCall("@intCast", 2); + + const node = try Node.array_access.create(c.arena, .{ .lhs = container_node, .rhs = if (is_longlong or is_signed) blk: { + const cast_node = try c.createBuiltinCall("@intCast", 2); // check if long long first so that signed long long doesn't just become unsigned long long - var typeid_node = if (is_longlong) try transCreateNodeIdentifier(rp.c, "usize") else try transQualTypeIntWidthOf(rp.c, qt, false); - cast_node.params()[0] = typeid_node; - _ = try appendToken(rp.c, .Comma, ","); - cast_node.params()[1] = try transExpr(rp, scope, subscr_expr, .used, .r_value); - cast_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - node.rtoken = try appendToken(rp.c, .RBrace, "]"); - node.index_expr = &cast_node.base; - } else { - node.index_expr = try transExpr(rp, scope, subscr_expr, .used, .r_value); - node.rtoken = try appendToken(rp.c, .RBrace, "]"); - } - return maybeSuppressResult(rp, scope, result_used, &node.base); + var typeid_node = if (is_longlong) try transCreateNodeIdentifier(c, "usize") else try transQualTypeIntWidthOf(c, qt, false); + break :blk try Node.int_cast.create(c.arena, .{ .lhs = typeid_node, .rhs = try transExpr(c, scope, subscr_expr, .used, .r_value)}); + } else + try transExpr(c, scope, subscr_expr, .used, .r_value)}); + return maybeSuppressResult(c, scope, result_used, node); } /// Check if an expression is ultimately a reference to a function declaration @@ -2536,9 +2529,9 @@ fn cIsFunctionDeclRef(expr: *const clang.Expr) bool { } } -fn transCallExpr(rp: RestorePoint, scope: *Scope, stmt: *const clang.CallExpr, result_used: ResultUsed) TransError!*ast.Node { +fn transCallExpr(c: *Context, scope: *Scope, stmt: *const clang.CallExpr, result_used: ResultUsed) TransError!Node { const callee = stmt.getCallee(); - var raw_fn_expr = try transExpr(rp, scope, callee, .used, .r_value); + var raw_fn_expr = try transExpr(c, scope, callee, .used, .r_value); var is_ptr = false; const fn_ty = qualTypeGetFnProto(callee.getType(), &is_ptr); @@ -2549,16 +2542,12 @@ fn transCallExpr(rp: RestorePoint, scope: *Scope, stmt: *const clang.CallExpr, r raw_fn_expr; const num_args = stmt.getNumArgs(); - const node = try rp.c.createCall(fn_expr, num_args); - const call_params = node.params(); + const call_params = try c.arena.alloc(Node, num_args); const args = stmt.getArgs(); var i: usize = 0; while (i < num_args) : (i += 1) { - if (i != 0) { - _ = try appendToken(rp.c, .Comma, ","); - } - var call_param = try transExpr(rp, scope, args[i], .used, .r_value); + var call_param = try transExpr(c, scope, args[i], .used, .r_value); // In C the result type of a boolean expression is int. If this result is passed as // an argument to a function whose parameter is also int, there is no cast. Therefore @@ -2570,10 +2559,7 @@ fn transCallExpr(rp: RestorePoint, scope: *Scope, stmt: *const clang.CallExpr, r if (i < param_count) { const param_qt = fn_proto.getParamType(@intCast(c_uint, i)); if (isBoolRes(call_param) and cIsNativeInt(param_qt)) { - const builtin_node = try rp.c.createBuiltinCall("@boolToInt", 1); - builtin_node.params()[0] = call_param; - builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - call_param = &builtin_node.base; + call_param = try Node.bool_to_int.create(c.arena, call_param); } } }, @@ -2582,18 +2568,16 @@ fn transCallExpr(rp: RestorePoint, scope: *Scope, stmt: *const clang.CallExpr, r } call_params[i] = call_param; } - node.rtoken = try appendToken(rp.c, .RParen, ")"); - + const node = try Node.call.create(c.arena, .{ .lhs = fn_expr, .args = call_params }); if (fn_ty) |ty| { const canon = ty.getReturnType().getCanonicalType(); const ret_ty = canon.getTypePtr(); if (ret_ty.isVoidType()) { - _ = try appendToken(rp.c, .Semicolon, ";"); - return &node.base; + return node; } } - return maybeSuppressResult(rp, scope, result_used, &node.base); + return maybeSuppressResult(c, scope, result_used, node); } const ClangFunctionType = union(enum) { @@ -2667,21 +2651,21 @@ fn transUnaryOperator(c: *Context, scope: *Scope, stmt: *const clang.UnaryOperat const op_expr = stmt.getSubExpr(); switch (stmt.getOpcode()) { .PostInc => if (qualTypeHasWrappingOverflow(stmt.getType())) - return transCreatePostCrement(c, scope, stmt, .AssignAddWrap, .PlusPercentEqual, "+%=", used) + return transCreatePostCrement(c, scope, stmt, .assign_add_wrap, used) else - return transCreatePostCrement(c, scope, stmt, .AssignAdd, .PlusEqual, "+=", used), + return transCreatePostCrement(c, scope, stmt, .assign_add, used), .PostDec => if (qualTypeHasWrappingOverflow(stmt.getType())) - return transCreatePostCrement(c, scope, stmt, .AssignSubWrap, .MinusPercentEqual, "-%=", used) + return transCreatePostCrement(c, scope, stmt, .assign_sub_wrap, used) else - return transCreatePostCrement(c, scope, stmt, .AssignSub, .MinusEqual, "-=", used), + return transCreatePostCrement(c, scope, stmt, .assign_sub, used), .PreInc => if (qualTypeHasWrappingOverflow(stmt.getType())) - return transCreatePreCrement(c, scope, stmt, .AssignAddWrap, .PlusPercentEqual, "+%=", used) + return transCreatePreCrement(c, scope, stmt, .assign_add_wrap, used) else - return transCreatePreCrement(c, scope, stmt, .AssignAdd, .PlusEqual, "+=", used), + return transCreatePreCrement(c, scope, stmt, .assign_add, used), .PreDec => if (qualTypeHasWrappingOverflow(stmt.getType())) - return transCreatePreCrement(c, scope, stmt, .AssignSubWrap, .MinusPercentEqual, "-%=", used) + return transCreatePreCrement(c, scope, stmt, .assign_sub_wrap, used) else - return transCreatePreCrement(c, scope, stmt, .AssignSub, .MinusEqual, "-=", used), + return transCreatePreCrement(c, scope, stmt, .assign_sub, used), .AddrOf => { if (cIsFunctionDeclRef(op_expr)) { return transExpr(rp, scope, op_expr, used, .r_value); @@ -2704,7 +2688,7 @@ fn transUnaryOperator(c: *Context, scope: *Scope, stmt: *const clang.UnaryOperat // use -% x for unsigned integers return Node.negate_wrap.create(c.arena, try transExpr(c, scope, op_expr, .used, .r_value)); } else - return revertAndWarn(c, error.UnsupportedTranslation, stmt.getBeginLoc(), "C negation with non float non integer", .{}); + return fail(c, error.UnsupportedTranslation, stmt.getBeginLoc(), "C negation with non float non integer", .{}); }, .Not => { return Node.bit_not.create(c.arena, try transExpr(c, scope, op_expr, .used, .r_value)); @@ -2715,31 +2699,32 @@ fn transUnaryOperator(c: *Context, scope: *Scope, stmt: *const clang.UnaryOperat .Extension => { return transExpr(c, scope, stmt.getSubExpr(), used, .l_value); }, - else => return revertAndWarn(c, error.UnsupportedTranslation, stmt.getBeginLoc(), "unsupported C translation {}", .{stmt.getOpcode()}), + else => return fail(c, error.UnsupportedTranslation, stmt.getBeginLoc(), "unsupported C translation {}", .{stmt.getOpcode()}), } } fn transCreatePreCrement( - rp: RestorePoint, + c: *Context, scope: *Scope, stmt: *const clang.UnaryOperator, - op: ast.Node.Tag, - op_tok_id: std.zig.Token.Id, - bytes: []const u8, + op: Node.Tag, used: ResultUsed, -) TransError!*ast.Node { +) TransError!Node { const op_expr = stmt.getSubExpr(); if (used == .unused) { // common case // c: ++expr // zig: expr += 1 - const expr = try transExpr(rp, scope, op_expr, .used, .r_value); - const token = try appendToken(rp.c, op_tok_id, bytes); - const one = try transCreateNodeInt(rp.c, 1); - if (scope.id != .Condition) - _ = try appendToken(rp.c, .Semicolon, ";"); - return transCreateNodeInfixOp(rp, scope, expr, op, token, one, .used, false); + const payload = try c.arena.create(ast.Payload.BinOp); + payload.* = .{ + .base = .{ .tag = op }, + .data = .{ + .lhs = try transExpr(c, scope, op_expr, .used, .r_value), + .rhs = Node.one_literal.init(), + } + }; + return Node.initPayload(&payload.base); } // worst case // c: ++expr @@ -2748,71 +2733,55 @@ fn transCreatePreCrement( // zig: _ref.* += 1; // zig: break :blk _ref.* // zig: }) - var block_scope = try Scope.Block.init(rp.c, scope, true); + var block_scope = try Scope.Block.init(c, scope, true); defer block_scope.deinit(); - const ref = try block_scope.makeMangledName(rp.c, "ref"); + const ref = try block_scope.makeMangledName(c, "ref"); - const mut_tok = try appendToken(rp.c, .Keyword_const, "const"); - const name_tok = try appendIdentifier(rp.c, ref); - const eq_token = try appendToken(rp.c, .Equal, "="); - const rhs_node = try transCreateNodeSimplePrefixOp(rp.c, .AddressOf, .Ampersand, "&"); - rhs_node.rhs = try transExpr(rp, scope, op_expr, .used, .r_value); - const init_node = &rhs_node.base; - const semicolon_token = try appendToken(rp.c, .Semicolon, ";"); - const node = try ast.Node.VarDecl.create(rp.c.arena, .{ - .name_token = name_tok, - .mut_token = mut_tok, - .semicolon_token = semicolon_token, - }, .{ - .eq_token = eq_token, - .init_node = init_node, - }); - try block_scope.statements.append(&node.base); + const expr = try transExpr(c, scope, op_expr, .used, .r_value); + const addr_of = try Node.address_of.create(c.arena, expr); + const ref_decl = try Node.var_simple.create(c.arena, .{ .name = ref, .init = addr_of}); + try block_scope.statements.append(ref_decl); - const lhs_node = try transCreateNodeIdentifier(rp.c, ref); - const ref_node = try transCreateNodePtrDeref(rp.c, lhs_node); - _ = try appendToken(rp.c, .Semicolon, ";"); - const token = try appendToken(rp.c, op_tok_id, bytes); - const one = try transCreateNodeInt(rp.c, 1); - _ = try appendToken(rp.c, .Semicolon, ";"); - const assign = try transCreateNodeInfixOp(rp, scope, ref_node, op, token, one, .used, false); - try block_scope.statements.append(assign); - - const break_node = try transCreateNodeBreak(rp.c, block_scope.label, ref_node); - try block_scope.statements.append(&break_node.base); - const block_node = try block_scope.complete(rp.c); - // semicolon must immediately follow rbrace because it is the last token in a block - _ = try appendToken(rp.c, .Semicolon, ";"); - const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression); - grouped_expr.* = .{ - .lparen = try appendToken(rp.c, .LParen, "("), - .expr = block_node, - .rparen = try appendToken(rp.c, .RParen, ")"), + const lhs_node = try Node.identifier.create(c.arena, ref); + const ref_node = try Node.deref.create(c.arena, lhs_node); + const payload = try c.arena.create(ast.Payload.BinOp); + payload.* = .{ + .base = .{ .tag = op }, + .data = .{ + .lhs = ref_node, + .rhs = Node.one_literal.init(), + } }; - return &grouped_expr.base; + try block_scope.statements.append(Node.initPayload(&payload.base)); + + return Node.break_val.create(c.arena, .{ + .label = block_scope.label, + .val = ref_node, + }); } fn transCreatePostCrement( - rp: RestorePoint, + c: *Context, scope: *Scope, stmt: *const clang.UnaryOperator, - op: ast.Node.Tag, - op_tok_id: std.zig.Token.Id, - bytes: []const u8, + op: Node.Tag, used: ResultUsed, -) TransError!*ast.Node { +) TransError!Node { const op_expr = stmt.getSubExpr(); if (used == .unused) { // common case // c: ++expr // zig: expr += 1 - const expr = try transExpr(rp, scope, op_expr, .used, .r_value); - const token = try appendToken(rp.c, op_tok_id, bytes); - const one = try transCreateNodeInt(rp.c, 1); - if (scope.id != .Condition) - _ = try appendToken(rp.c, .Semicolon, ";"); - return transCreateNodeInfixOp(rp, scope, expr, op, token, one, .used, false); + const payload = try c.arena.create(ast.Payload.BinOp); + payload.* = .{ + .base = .{ .tag = op }, + .data = .{ + .lhs = try transExpr(c, scope, op_expr, .used, .r_value), + .rhs = Node.one_literal.init(), + } + }; + return Node.initPayload(&payload.base); } // worst case // c: expr++ @@ -2822,68 +2791,36 @@ fn transCreatePostCrement( // zig: _ref.* += 1; // zig: break :blk _tmp // zig: }) - var block_scope = try Scope.Block.init(rp.c, scope, true); + var block_scope = try Scope.Block.init(c, scope, true); defer block_scope.deinit(); - const ref = try block_scope.makeMangledName(rp.c, "ref"); + const ref = try block_scope.makeMangledName(c, "ref"); - const mut_tok = try appendToken(rp.c, .Keyword_const, "const"); - const name_tok = try appendIdentifier(rp.c, ref); - const eq_token = try appendToken(rp.c, .Equal, "="); - const rhs_node = try transCreateNodeSimplePrefixOp(rp.c, .AddressOf, .Ampersand, "&"); - rhs_node.rhs = try transExpr(rp, scope, op_expr, .used, .r_value); - const init_node = &rhs_node.base; - const semicolon_token = try appendToken(rp.c, .Semicolon, ";"); - const node = try ast.Node.VarDecl.create(rp.c.arena, .{ - .name_token = name_tok, - .mut_token = mut_tok, - .semicolon_token = semicolon_token, - }, .{ - .eq_token = eq_token, - .init_node = init_node, - }); - try block_scope.statements.append(&node.base); + const expr = try transExpr(c, scope, op_expr, .used, .r_value); + const addr_of = try Node.address_of.create(c.arena, expr); + const ref_decl = try Node.var_simple.create(c.arena, .{ .name = ref, .init = addr_of}); + try block_scope.statements.append(ref_decl); - const lhs_node = try transCreateNodeIdentifier(rp.c, ref); - const ref_node = try transCreateNodePtrDeref(rp.c, lhs_node); - _ = try appendToken(rp.c, .Semicolon, ";"); + const lhs_node = try Node.identifier.create(c.arena, ref); + const ref_node = try Node.deref.create(c.arena, lhs_node); - const tmp = try block_scope.makeMangledName(rp.c, "tmp"); - const tmp_mut_tok = try appendToken(rp.c, .Keyword_const, "const"); - const tmp_name_tok = try appendIdentifier(rp.c, tmp); - const tmp_eq_token = try appendToken(rp.c, .Equal, "="); - const tmp_init_node = ref_node; - const tmp_semicolon_token = try appendToken(rp.c, .Semicolon, ";"); - const tmp_node = try ast.Node.VarDecl.create(rp.c.arena, .{ - .name_token = tmp_name_tok, - .mut_token = tmp_mut_tok, - .semicolon_token = semicolon_token, - }, .{ - .eq_token = tmp_eq_token, - .init_node = tmp_init_node, - }); - try block_scope.statements.append(&tmp_node.base); + const tmp = try block_scope.makeMangledName(c, "tmp"); + const tmp_decl = try Node.var_simple.create(c.arena, .{ .name = tmp, .init = ref_node}); + try block_scope.statements.append(tmp_decl); - const token = try appendToken(rp.c, op_tok_id, bytes); - const one = try transCreateNodeInt(rp.c, 1); - _ = try appendToken(rp.c, .Semicolon, ";"); - const assign = try transCreateNodeInfixOp(rp, scope, ref_node, op, token, one, .used, false); - try block_scope.statements.append(assign); - - const break_node = blk: { - var tmp_ctrl_flow = try CtrlFlow.initToken(rp.c, .Break, block_scope.label); - const rhs = try transCreateNodeIdentifier(rp.c, tmp); - break :blk try tmp_ctrl_flow.finish(rhs); + const payload = try c.arena.create(ast.Payload.BinOp); + payload.* = .{ + .base = .{ .tag = op }, + .data = .{ + .lhs = ref_node, + .rhs = Node.one_literal.init(), + } }; - try block_scope.statements.append(&break_node.base); - _ = try appendToken(rp.c, .Semicolon, ";"); - const block_node = try block_scope.complete(rp.c); - const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression); - grouped_expr.* = .{ - .lparen = try appendToken(rp.c, .LParen, "("), - .expr = block_node, - .rparen = try appendToken(rp.c, .RParen, ")"), - }; - return &grouped_expr.base; + try block_scope.statements.append(Node.initPayload(&payload.base)); + + return Node.break_val.create(c.arena, .{ + .label = block_scope.label, + .val = try Node.identifier.create(c.arena, tmp), + }); } fn transCompoundAssignOperator(rp: RestorePoint, scope: *Scope, stmt: *const clang.CompoundAssignOperator, used: ResultUsed) TransError!*ast.Node { @@ -3139,31 +3076,24 @@ fn transCPtrCast( } } -fn transBreak(rp: RestorePoint, scope: *Scope) TransError!*ast.Node { +fn transBreak(c: *Context, scope: *Scope) TransError!Node { const break_scope = scope.getBreakableScope(); const label_text: ?[]const u8 = if (break_scope.id == .Switch) blk: { const swtch = @fieldParentPtr(Scope.Switch, "base", break_scope); - const block_scope = try scope.findBlockScope(rp.c); - swtch.switch_label = try block_scope.makeMangledName(rp.c, "switch"); + const block_scope = try scope.findBlockScope(c); + swtch.switch_label = try block_scope.makeMangledName(c, "switch"); break :blk swtch.switch_label; } else null; - var cf = try CtrlFlow.init(rp.c, .Break, label_text); - const br = try cf.finish(null); - _ = try appendToken(rp.c, .Semicolon, ";"); - return &br.base; + return Node.@"break".create(c.arena, label_text); } -fn transFloatingLiteral(rp: RestorePoint, scope: *Scope, stmt: *const clang.FloatingLiteral, used: ResultUsed) TransError!*ast.Node { +fn transFloatingLiteral(c: *Context, scope: *Scope, stmt: *const clang.FloatingLiteral, used: ResultUsed) TransError!Node { // TODO use something more accurate const dbl = stmt.getValueAsApproximateDouble(); - const node = try rp.c.arena.create(ast.Node.OneToken); - node.* = .{ - .base = .{ .tag = .FloatLiteral }, - .token = try appendTokenFmt(rp.c, .FloatLiteral, "{d}", .{dbl}), - }; - return maybeSuppressResult(rp, scope, used, &node.base); + const node = try Node.float_literal.create(c.arena, try std.fmt.allocPrint(c.arena, "{d}", .{dbl})); + return maybeSuppressResult(c, scope, used, &node.base); } fn transBinaryConditionalOperator(rp: RestorePoint, scope: *Scope, stmt: *const clang.BinaryConditionalOperator, used: ResultUsed) TransError!*ast.Node { @@ -3943,169 +3873,7 @@ fn transCreateNodeMacroFn(c: *Context, name: []const u8, ref: *ast.Node, proto_a return &fn_proto.base; } -fn transCreateNodeUnwrapNull(c: *Context, wrapped: *ast.Node) !*ast.Node { - _ = try appendToken(c, .Period, "."); - const qm = try appendToken(c, .QuestionMark, "?"); - const node = try c.arena.create(ast.Node.SimpleSuffixOp); - node.* = .{ - .base = .{ .tag = .UnwrapOptional }, - .lhs = wrapped, - .rtoken = qm, - }; - return &node.base; -} -fn transCreateNodeEnumLiteral(c: *Context, name: []const u8) !*ast.Node { - const node = try c.arena.create(ast.Node.EnumLiteral); - node.* = .{ - .dot = try appendToken(c, .Period, "."), - .name = try appendIdentifier(c, name), - }; - return &node.base; -} - -fn transCreateNodeStringLiteral(c: *Context, str: []const u8) !*ast.Node { - const node = try c.arena.create(ast.Node.OneToken); - node.* = .{ - .base = .{ .tag = .StringLiteral }, - .token = try appendToken(c, .StringLiteral, str), - }; - return &node.base; -} - -fn transCreateNodeIf(c: *Context) !*ast.Node.If { - const if_tok = try appendToken(c, .Keyword_if, "if"); - _ = try appendToken(c, .LParen, "("); - const node = try c.arena.create(ast.Node.If); - node.* = .{ - .if_token = if_tok, - .condition = undefined, - .payload = null, - .body = undefined, - .@"else" = null, - }; - return node; -} - -fn transCreateNodeElse(c: *Context) !*ast.Node.Else { - const node = try c.arena.create(ast.Node.Else); - node.* = .{ - .else_token = try appendToken(c, .Keyword_else, "else"), - .payload = null, - .body = undefined, - }; - return node; -} - -fn transCreateNodeBreak( - c: *Context, - label: ?ast.TokenIndex, - rhs: ?*ast.Node, -) !*ast.Node.ControlFlowExpression { - var ctrl_flow = try CtrlFlow.init(c, .Break, if (label) |l| tokenSlice(c, l) else null); - return ctrl_flow.finish(rhs); -} - -const CtrlFlow = struct { - c: *Context, - ltoken: ast.TokenIndex, - label_token: ?ast.TokenIndex, - tag: ast.Node.Tag, - - /// Does everything except the RHS. - fn init(c: *Context, tag: ast.Node.Tag, label: ?[]const u8) !CtrlFlow { - const kw: Token.Id = switch (tag) { - .Break => .Keyword_break, - .Continue => .Keyword_continue, - .Return => .Keyword_return, - else => unreachable, - }; - const kw_text = switch (tag) { - .Break => "break", - .Continue => "continue", - .Return => "return", - else => unreachable, - }; - const ltoken = try appendToken(c, kw, kw_text); - const label_token = if (label) |l| blk: { - _ = try appendToken(c, .Colon, ":"); - break :blk try appendIdentifier(c, l); - } else null; - return CtrlFlow{ - .c = c, - .ltoken = ltoken, - .label_token = label_token, - .tag = tag, - }; - } - - fn initToken(c: *Context, tag: ast.Node.Tag, label: ?ast.TokenIndex) !CtrlFlow { - const other_token = label orelse return init(c, tag, null); - const loc = c.token_locs.items[other_token]; - const label_name = c.source_buffer.items[loc.start..loc.end]; - return init(c, tag, label_name); - } - - fn finish(self: *CtrlFlow, rhs: ?*ast.Node) !*ast.Node.ControlFlowExpression { - return ast.Node.ControlFlowExpression.create(self.c.arena, .{ - .ltoken = self.ltoken, - .tag = self.tag, - }, .{ - .label = self.label_token, - .rhs = rhs, - }); - } -}; - -fn transCreateNodeWhile(c: *Context) !*ast.Node.While { - const while_tok = try appendToken(c, .Keyword_while, "while"); - _ = try appendToken(c, .LParen, "("); - - const node = try c.arena.create(ast.Node.While); - node.* = .{ - .label = null, - .inline_token = null, - .while_token = while_tok, - .condition = undefined, - .payload = null, - .continue_expr = null, - .body = undefined, - .@"else" = null, - }; - return node; -} - -fn transCreateNodeContinue(c: *Context) !*ast.Node { - const ltoken = try appendToken(c, .Keyword_continue, "continue"); - const node = try ast.Node.ControlFlowExpression.create(c.arena, .{ - .ltoken = ltoken, - .tag = .Continue, - }, .{}); - _ = try appendToken(c, .Semicolon, ";"); - return &node.base; -} - -fn transCreateNodeSwitchCase(c: *Context, lhs: *ast.Node) !*ast.Node.SwitchCase { - const arrow_tok = try appendToken(c, .EqualAngleBracketRight, "=>"); - - const node = try ast.Node.SwitchCase.alloc(c.arena, 1); - node.* = .{ - .items_len = 1, - .arrow_token = arrow_tok, - .payload = null, - .expr = undefined, - }; - node.items()[0] = lhs; - return node; -} - -fn transCreateNodeSwitchElse(c: *Context) !*ast.Node { - const node = try c.arena.create(ast.Node.SwitchElse); - node.* = .{ - .token = try appendToken(c, .Keyword_else, "else"), - }; - return &node.base; -} fn transCreateNodeShiftOp( c: *Context, @@ -4137,27 +3905,6 @@ fn transCreateNodeShiftOp( return Node.initPayload(&payload.base); } -fn transCreateNodePtrDeref(c: *Context, lhs: *ast.Node) !*ast.Node { - const node = try c.arena.create(ast.Node.SimpleSuffixOp); - node.* = .{ - .base = .{ .tag = .Deref }, - .lhs = lhs, - .rtoken = try appendToken(c, .PeriodAsterisk, ".*"), - }; - return &node.base; -} - -fn transCreateNodeArrayAccess(c: *Context, lhs: *ast.Node) !*ast.Node.ArrayAccess { - _ = try appendToken(c, .LBrace, "["); - const node = try c.arena.create(ast.Node.ArrayAccess); - node.* = .{ - .lhs = lhs, - .index_expr = undefined, - .rtoken = undefined, - }; - return node; -} - fn transType(c: *Context, ty: *const clang.Type, source_loc: clang.SourceLocation) TypeError!Node { switch (ty.getTypeClass()) { .Builtin => { diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index a163950d2b..3a7c02fd74 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -17,6 +17,7 @@ pub const Node = extern union { empty_block, return_void, zero_literal, + one_literal, void_type, noreturn_type, /// pub usingnamespace @import("std").c.builtins; @@ -44,7 +45,6 @@ pub const Node = extern union { break_val, @"return", field_access, - field_access_arrow, array_access, call, std_mem_zeroes, @@ -153,8 +153,10 @@ pub const Node = extern union { bit_not, not, address_of, - // operand.?.* + /// operand.?.* unwrap_deref, + /// .* + deref, block, @"break", @@ -202,11 +204,11 @@ pub const Node = extern union { .usingnamespace_builtins, .return_void, .zero_literal, + .one_literal, .void_type, .noreturn_type, => @compileError("Type Tag " ++ @tagName(t) ++ " has no payload"), - .array_access, .std_mem_zeroes, .@"return", .discard, @@ -218,6 +220,7 @@ pub const Node = extern union { .optional_type, .address_of, .unwrap_deref, + .deref, .ptr_to_int, .enum_to_int, .empty_array, @@ -296,8 +299,6 @@ pub const Node = extern union { .string, .char, .identifier, - .field_access, - .field_access_arrow, .warning, .failed_decl, .sizeof, @@ -323,9 +324,10 @@ pub const Node = extern union { .array_type => Payload.Array, .arg_redecl => Payload.ArgRedecl, .log2_int_type => Payload.Log2IntType, - .typedef, .pub_typedef, .pub_var_simple => Payload.SimpleVarDecl, + .typedef, .pub_typedef, .var_simple, .pub_var_simple => Payload.SimpleVarDecl, .enum_redecl => Payload.EnumRedecl, .array_filler => Payload.ArrayFiller, + .field_access => Payload.FieldAccess, }; } @@ -588,6 +590,14 @@ pub const Payload = struct { count: usize, }, }; + + pub const FieldAccess = struct { + base: Node, + data: struct { + container: Node, + name: []const u8, + }, + }; }; /// Converts the nodes into a Zig ast. From c30c2f7c133861a8749a9ffbc2bf6a49d9c6283e Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Thu, 11 Feb 2021 12:31:48 +0200 Subject: [PATCH 076/173] translate-c: convert assignment and conditional exprs --- src/translate_c.zig | 728 ++++++++++++---------------------------- src/translate_c/ast.zig | 30 +- 2 files changed, 235 insertions(+), 523 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index 1a98979317..0a20d5e6d5 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -1144,18 +1144,16 @@ fn transBinaryOperator( } }, .Shl => { - const node = try transCreateNodeShiftOp(c, scope, stmt, .shl); - return maybeSuppressResult(c, scope, result_used, node); + return transCreateNodeShiftOp(c, scope, stmt, .shl, result_used); }, .Shr => { - const node = try transCreateNodeShiftOp(c, scope, stmt, .shr); - return maybeSuppressResult(c, scope, result_used, node); + return transCreateNodeShiftOp(c, scope, stmt, .shr, result_used); }, .LAnd => { - return transCreateNodeBoolInfixOp(c, scope, stmt, .bool_and, result_used, true); + return transCreateNodeBoolInfixOp(c, scope, stmt, .bool_and, result_used); }, .LOr => { - return transCreateNodeBoolInfixOp(c, scope, stmt, .bool_or, result_used, true); + return transCreateNodeBoolInfixOp(c, scope, stmt, .bool_or, result_used); }, else => {}, } @@ -1233,15 +1231,7 @@ fn transBinaryOperator( else rhs_uncasted; - const payload = try c.arena.create(ast.Payload.BinOp); - payload.* = .{ - .base = .{ .tag = op_id }, - .data = .{ - .lhs = lhs, - .rhs = rhs, - }, - }; - return maybeSuppressResult(c, scope, used, Node.initPayload(&payload.base)); + return transCreateNodeInfixOp(c, scope, op_id, lhs, rhs, used); } fn transCompoundStmtInline( @@ -1716,7 +1706,7 @@ fn transStringLiteralAsArray( init_list[i] = try transCreateCharLitNode(c, narrow, code_unit); } while (i < array_size) : (i += 1) { - init_list[i] = try transCreateNodeInt(c, 0); + init_list[i] = try transCreateNodeNumber(c, 0); } return Node.array_init.create(c.arena, init_list); @@ -2398,7 +2388,7 @@ fn transCharLiteral( // C has a somewhat obscure feature called multi-character character constant // e.g. 'abcd' const int_lit_node = if (kind == .Ascii and val > 255) - try transCreateNodeInt(c, val) + try transCreateNodeNumber(c, val) else try transCreateCharLitNode(c, narrow, val); @@ -2434,8 +2424,8 @@ fn transStmtExpr(c: *Context, scope: *Scope, stmt: *const clang.StmtExpr, used: .val = try transStmt(c, &block_scope.base, it[0], .used, .r_value), }); try block_scope.statements.append(break_node); - - return block_scope.complete(c); + const res = try block_scope.complete(c); + return maybeSuppressResult(c, scope, used, res); } fn transMemberExpr(c: *Context, scope: *Scope, stmt: *const clang.MemberExpr, result_used: ResultUsed) TransError!Node { @@ -2460,8 +2450,9 @@ fn transMemberExpr(c: *Context, scope: *Scope, stmt: *const clang.MemberExpr, re const decl = @ptrCast(*const clang.NamedDecl, member_decl); break :blk try c.str(decl.getName_bytes_begin()); }; + const ident = try Node.identifier.create(c.arena, name); - const node = try Node.field_access.create(c.arena, .{ .container = container_node, .name = name}); + const node = try Node.field_access.create(c.arena, .{ .lhs = container_node, .rhs = ident}); return maybeSuppressResult(c, scope, result_used, node); } @@ -2716,15 +2707,9 @@ fn transCreatePreCrement( // common case // c: ++expr // zig: expr += 1 - const payload = try c.arena.create(ast.Payload.BinOp); - payload.* = .{ - .base = .{ .tag = op }, - .data = .{ - .lhs = try transExpr(c, scope, op_expr, .used, .r_value), - .rhs = Node.one_literal.init(), - } - }; - return Node.initPayload(&payload.base); + const lhs = try transExpr(c, scope, op_expr, .used, .r_value); + const rhs = Node.one_literal.init(); + return transCreateNodeInfixOp(c, scope, op, lhs, rhs, .used); } // worst case // c: ++expr @@ -2744,20 +2729,15 @@ fn transCreatePreCrement( const lhs_node = try Node.identifier.create(c.arena, ref); const ref_node = try Node.deref.create(c.arena, lhs_node); - const payload = try c.arena.create(ast.Payload.BinOp); - payload.* = .{ - .base = .{ .tag = op }, - .data = .{ - .lhs = ref_node, - .rhs = Node.one_literal.init(), - } - }; - try block_scope.statements.append(Node.initPayload(&payload.base)); + const node = try transCreateNodeInfixOp(c, scope, op, ref_node, Node.one_literal.init(), .used); + try block_scope.statements.append(node); - return Node.break_val.create(c.arena, .{ + const break_node = try Node.break_val.create(c.arena, .{ .label = block_scope.label, .val = ref_node, }); + try block_scope.statements.append(break_node); + return block_scope.complete(c); } fn transCreatePostCrement( @@ -2771,17 +2751,11 @@ fn transCreatePostCrement( if (used == .unused) { // common case - // c: ++expr + // c: expr++ // zig: expr += 1 - const payload = try c.arena.create(ast.Payload.BinOp); - payload.* = .{ - .base = .{ .tag = op }, - .data = .{ - .lhs = try transExpr(c, scope, op_expr, .used, .r_value), - .rhs = Node.one_literal.init(), - } - }; - return Node.initPayload(&payload.base); + const lhs = try transExpr(c, scope, op_expr, .used, .r_value); + const rhs = Node.one_literal.init(); + return transCreateNodeInfixOp(c, scope, op, lhs, rhs, .used); } // worst case // c: expr++ @@ -2807,44 +2781,39 @@ fn transCreatePostCrement( const tmp_decl = try Node.var_simple.create(c.arena, .{ .name = tmp, .init = ref_node}); try block_scope.statements.append(tmp_decl); - const payload = try c.arena.create(ast.Payload.BinOp); - payload.* = .{ - .base = .{ .tag = op }, - .data = .{ - .lhs = ref_node, - .rhs = Node.one_literal.init(), - } - }; - try block_scope.statements.append(Node.initPayload(&payload.base)); + const node = try transCreateNodeInfixOp(c, scope, op, ref_node, Node.one_literal.init(), .used); + try block_scope.statements.append(node); - return Node.break_val.create(c.arena, .{ + const break_node = try Node.break_val.create(c.arena, .{ .label = block_scope.label, .val = try Node.identifier.create(c.arena, tmp), }); + try block_scope.statements.append(break_node); + return block_scope.complete(c); } -fn transCompoundAssignOperator(rp: RestorePoint, scope: *Scope, stmt: *const clang.CompoundAssignOperator, used: ResultUsed) TransError!*ast.Node { +fn transCompoundAssignOperator(c: *Context, scope: *Scope, stmt: *const clang.CompoundAssignOperator, used: ResultUsed) TransError!Node { switch (stmt.getOpcode()) { .MulAssign => if (qualTypeHasWrappingOverflow(stmt.getType())) - return transCreateCompoundAssign(rp, scope, stmt, .AssignMulWrap, .AsteriskPercentEqual, "*%=", .MulWrap, .AsteriskPercent, "*%", used) + return transCreateCompoundAssign(c, scope, stmt, .assign_mul_wrap, used) else - return transCreateCompoundAssign(rp, scope, stmt, .AssignMul, .AsteriskEqual, "*=", .Mul, .Asterisk, "*", used), + return transCreateCompoundAssign(c, scope, stmt, .assign_mul, used), .AddAssign => if (qualTypeHasWrappingOverflow(stmt.getType())) - return transCreateCompoundAssign(rp, scope, stmt, .AssignAddWrap, .PlusPercentEqual, "+%=", .AddWrap, .PlusPercent, "+%", used) + return transCreateCompoundAssign(c, scope, stmt, .assign_add_wrap, used) else - return transCreateCompoundAssign(rp, scope, stmt, .AssignAdd, .PlusEqual, "+=", .Add, .Plus, "+", used), + return transCreateCompoundAssign(c, scope, stmt, .assign_add, used), .SubAssign => if (qualTypeHasWrappingOverflow(stmt.getType())) - return transCreateCompoundAssign(rp, scope, stmt, .AssignSubWrap, .MinusPercentEqual, "-%=", .SubWrap, .MinusPercent, "-%", used) + return transCreateCompoundAssign(c, scope, stmt, .assign_sub_wrap, used) else - return transCreateCompoundAssign(rp, scope, stmt, .AssignSub, .MinusPercentEqual, "-=", .Sub, .Minus, "-", used), - .DivAssign => return transCreateCompoundAssign(rp, scope, stmt, .AssignDiv, .SlashEqual, "/=", .Div, .Slash, "/", used), - .RemAssign => return transCreateCompoundAssign(rp, scope, stmt, .AssignMod, .PercentEqual, "%=", .Mod, .Percent, "%", used), - .ShlAssign => return transCreateCompoundAssign(rp, scope, stmt, .AssignBitShiftLeft, .AngleBracketAngleBracketLeftEqual, "<<=", .BitShiftLeft, .AngleBracketAngleBracketLeft, "<<", used), - .ShrAssign => return transCreateCompoundAssign(rp, scope, stmt, .AssignBitShiftRight, .AngleBracketAngleBracketRightEqual, ">>=", .BitShiftRight, .AngleBracketAngleBracketRight, ">>", used), - .AndAssign => return transCreateCompoundAssign(rp, scope, stmt, .AssignBitAnd, .AmpersandEqual, "&=", .BitAnd, .Ampersand, "&", used), - .XorAssign => return transCreateCompoundAssign(rp, scope, stmt, .AssignBitXor, .CaretEqual, "^=", .BitXor, .Caret, "^", used), - .OrAssign => return transCreateCompoundAssign(rp, scope, stmt, .AssignBitOr, .PipeEqual, "|=", .BitOr, .Pipe, "|", used), - else => return revertAndWarn( + return transCreateCompoundAssign(c, scope, stmt, .assign_sub, used), + .DivAssign => return transCreateCompoundAssign(c, scope, stmt, .assign_div, used), + .RemAssign => return transCreateCompoundAssign(c, scope, stmt, .assign_mod, used), + .ShlAssign => return transCreateCompoundAssign(c, scope, stmt, .assign_shl, used), + .ShrAssign => return transCreateCompoundAssign(c, scope, stmt, .assign_shr, used), + .AndAssign => return transCreateCompoundAssign(c, scope, stmt, .assign_bit_and, used), + .XorAssign => return transCreateCompoundAssign(c, scope, stmt, .assign_bit_xor, used), + .OrAssign => return transCreateCompoundAssign(c, scope, stmt, .assign_bit_or, used), + else => return fail( rp, error.UnsupportedTranslation, stmt.getBeginLoc(), @@ -2855,25 +2824,20 @@ fn transCompoundAssignOperator(rp: RestorePoint, scope: *Scope, stmt: *const cla } fn transCreateCompoundAssign( - rp: RestorePoint, + c: *Context, scope: *Scope, stmt: *const clang.CompoundAssignOperator, - assign_op: ast.Node.Tag, - assign_tok_id: std.zig.Token.Id, - assign_bytes: []const u8, - bin_op: ast.Node.Tag, - bin_tok_id: std.zig.Token.Id, - bin_bytes: []const u8, + op: Node.Tag, used: ResultUsed, -) TransError!*ast.Node { - const is_shift = bin_op == .BitShiftLeft or bin_op == .BitShiftRight; - const is_div = bin_op == .Div; - const is_mod = bin_op == .Mod; +) TransError!Node { + const is_shift = op == .assign_shl or op == .assign_shr; + const is_div = op == .assign_div; + const is_mod = op == .assign_mod; const lhs = stmt.getLHS(); const rhs = stmt.getRHS(); const loc = stmt.getBeginLoc(); - const lhs_qt = getExprQualType(rp.c, lhs); - const rhs_qt = getExprQualType(rp.c, rhs); + const lhs_qt = getExprQualType(c, lhs); + const rhs_qt = getExprQualType(c, rhs); const is_signed = cIsSignedInteger(lhs_qt); const requires_int_cast = blk: { const are_integers = cIsInteger(lhs_qt) and cIsInteger(rhs_qt); @@ -2885,146 +2849,99 @@ fn transCreateCompoundAssign( // c: lhs += rhs // zig: lhs += rhs if ((is_mod or is_div) and is_signed) { - const op_token = try appendToken(rp.c, .Equal, "="); - const op_node = try rp.c.arena.create(ast.Node.SimpleInfixOp); - const builtin = if (is_mod) "@rem" else "@divTrunc"; - const builtin_node = try rp.c.createBuiltinCall(builtin, 2); - const lhs_node = try transExpr(rp, scope, lhs, .used, .l_value); - builtin_node.params()[0] = lhs_node; - _ = try appendToken(rp.c, .Comma, ","); - builtin_node.params()[1] = try transExpr(rp, scope, rhs, .used, .r_value); - builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - op_node.* = .{ - .base = .{ .tag = .Assign }, - .op_token = op_token, - .lhs = lhs_node, - .rhs = &builtin_node.base, - }; - _ = try appendToken(rp.c, .Semicolon, ";"); - return &op_node.base; + const lhs_node = try transExpr(c, scope, lhs, .used, .l_value); + const rhs_node = try transExpr(c, scope, rhs, .used, .r_value); + const builtin = if (is_mod) + try Node.rem.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node }) + else + try Node.divTrunc.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node }); + + return transCreateNodeInfixOp(c, scope, .assign, lhs_node, builtin, .used); } - const lhs_node = try transExpr(rp, scope, lhs, .used, .l_value); - const eq_token = try appendToken(rp.c, assign_tok_id, assign_bytes); + const lhs_node = try transExpr(c, scope, lhs, .used, .l_value); var rhs_node = if (is_shift or requires_int_cast) - try transExprCoercing(rp, scope, rhs, .used, .r_value) + try transExprCoercing(c, scope, rhs, .used, .r_value) else - try transExpr(rp, scope, rhs, .used, .r_value); + try transExpr(c, scope, rhs, .used, .r_value); if (is_shift or requires_int_cast) { - const cast_node = try rp.c.createBuiltinCall("@intCast", 2); + // @intCast(rhs) const cast_to_type = if (is_shift) - try qualTypeToLog2IntRef(rp, getExprQualType(rp.c, rhs), loc) + try qualTypeToLog2IntRef(c, getExprQualType(c, rhs), loc) else - try transQualType(rp, getExprQualType(rp.c, lhs), loc); - cast_node.params()[0] = cast_to_type; - _ = try appendToken(rp.c, .Comma, ","); - cast_node.params()[1] = rhs_node; - cast_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - rhs_node = &cast_node.base; + try transQualType(c, getExprQualType(c, lhs), loc); + + rhs_node = try Node.int_cast.create(c.arena, .{ .lhs = cast_to_type, .rhs = rhs_node }); } - if (scope.id != .Condition) - _ = try appendToken(rp.c, .Semicolon, ";"); - return transCreateNodeInfixOp(rp, scope, lhs_node, assign_op, eq_token, rhs_node, .used, false); + + return transCreateNodeInfixOp(c, scope, assign_op, lhs_node, rhs_node, .used); } // worst case // c: lhs += rhs // zig: (blk: { // zig: const _ref = &lhs; - // zig: _ref.* = _ref.* + rhs; + // zig: _ref.* += rhs; // zig: break :blk _ref.* // zig: }) - var block_scope = try Scope.Block.init(rp.c, scope, true); + var block_scope = try Scope.Block.init(c, scope, true); defer block_scope.deinit(); - const ref = try block_scope.makeMangledName(rp.c, "ref"); + const ref = try block_scope.makeMangledName(c, "ref"); - const mut_tok = try appendToken(rp.c, .Keyword_const, "const"); - const name_tok = try appendIdentifier(rp.c, ref); - const eq_token = try appendToken(rp.c, .Equal, "="); - const addr_node = try transCreateNodeSimplePrefixOp(rp.c, .AddressOf, .Ampersand, "&"); - addr_node.rhs = try transExpr(rp, scope, lhs, .used, .l_value); - const init_node = &addr_node.base; - const semicolon_token = try appendToken(rp.c, .Semicolon, ";"); - const node = try ast.Node.VarDecl.create(rp.c.arena, .{ - .name_token = name_tok, - .mut_token = mut_tok, - .semicolon_token = semicolon_token, - }, .{ - .eq_token = eq_token, - .init_node = init_node, - }); - try block_scope.statements.append(&node.base); + const expr = try transExpr(c, scope, op_expr, .used, .r_value); + const addr_of = try Node.address_of.create(c.arena, expr); + const ref_decl = try Node.var_simple.create(c.arena, .{ .name = ref, .init = addr_of}); + try block_scope.statements.append(ref_decl); - const lhs_node = try transCreateNodeIdentifier(rp.c, ref); - const ref_node = try transCreateNodePtrDeref(rp.c, lhs_node); - _ = try appendToken(rp.c, .Semicolon, ";"); + const lhs_node = try Node.identifier.create(c.arena, ref); + const ref_node = try Node.deref.create(c.arena, lhs_node); if ((is_mod or is_div) and is_signed) { - const op_token = try appendToken(rp.c, .Equal, "="); - const op_node = try rp.c.arena.create(ast.Node.SimpleInfixOp); - const builtin = if (is_mod) "@rem" else "@divTrunc"; - const builtin_node = try rp.c.createBuiltinCall(builtin, 2); - builtin_node.params()[0] = try transCreateNodePtrDeref(rp.c, lhs_node); - _ = try appendToken(rp.c, .Comma, ","); - builtin_node.params()[1] = try transExpr(rp, scope, rhs, .used, .r_value); - builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - _ = try appendToken(rp.c, .Semicolon, ";"); - op_node.* = .{ - .base = .{ .tag = .Assign }, - .op_token = op_token, - .lhs = ref_node, - .rhs = &builtin_node.base, - }; - _ = try appendToken(rp.c, .Semicolon, ";"); - try block_scope.statements.append(&op_node.base); + const rhs_node = try transExpr(c, scope, rhs, .used, .r_value); + const builtin = if (is_mod) + try Node.rem.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node }) + else + try Node.divTrunc.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node }); + + const assign = try transCreateNodeInfixOp(c, scope, .assign, lhs_node, builtin, .used); + try block_scope.statements.append(assign); } else { - const bin_token = try appendToken(rp.c, bin_tok_id, bin_bytes); - var rhs_node = try transExpr(rp, scope, rhs, .used, .r_value); + var rhs_node = try transExpr(c, scope, rhs, .used, .r_value); if (is_shift or requires_int_cast) { - const cast_node = try rp.c.createBuiltinCall("@intCast", 2); + // @intCast(rhs) const cast_to_type = if (is_shift) - try qualTypeToLog2IntRef(rp, getExprQualType(rp.c, rhs), loc) + try qualTypeToLog2IntRef(c, getExprQualType(c, rhs), loc) else - try transQualType(rp, getExprQualType(rp.c, lhs), loc); - cast_node.params()[0] = cast_to_type; - _ = try appendToken(rp.c, .Comma, ","); - cast_node.params()[1] = rhs_node; - cast_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - rhs_node = &cast_node.base; + try transQualType(c, getExprQualType(c, lhs), loc); + + rhs_node = try Node.int_cast.create(c.arena, .{ .lhs = cast_to_type, .rhs = rhs_node }); } - const rhs_bin = try transCreateNodeInfixOp(rp, scope, ref_node, bin_op, bin_token, rhs_node, .used, false); - _ = try appendToken(rp.c, .Semicolon, ";"); - - const ass_eq_token = try appendToken(rp.c, .Equal, "="); - const assign = try transCreateNodeInfixOp(rp, scope, ref_node, .Assign, ass_eq_token, rhs_bin, .used, false); + const assign = try transCreateNodeInfixOp(c, scope, op, ref_node, rhs_node, .used); try block_scope.statements.append(assign); } - const break_node = try transCreateNodeBreak(rp.c, block_scope.label, ref_node); - try block_scope.statements.append(&break_node.base); - const block_node = try block_scope.complete(rp.c); - const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression); - grouped_expr.* = .{ - .lparen = try appendToken(rp.c, .LParen, "("), - .expr = block_node, - .rparen = try appendToken(rp.c, .RParen, ")"), - }; - return &grouped_expr.base; + const break_node = try Node.break_val.create(c.arena, .{ + .label = block_scope.label, + .val = ref_node, + }); + try block_scope.statements.append(break_node); + return block_scope.complete(c); } fn transCPtrCast( - rp: RestorePoint, + c: *Context, loc: clang.SourceLocation, dst_type: clang.QualType, src_type: clang.QualType, - expr: *ast.Node, -) !*ast.Node { + expr: Node, +) !Node { const ty = dst_type.getTypePtr(); const child_type = ty.getPointeeType(); const src_ty = src_type.getTypePtr(); const src_child_type = src_ty.getPointeeType(); + const dst_type = try transType(c, ty, loc); if ((src_child_type.isConstQualified() and !child_type.isConstQualified()) or @@ -3032,47 +2949,25 @@ fn transCPtrCast( !child_type.isVolatileQualified())) { // Casting away const or volatile requires us to use @intToPtr - const inttoptr_node = try rp.c.createBuiltinCall("@intToPtr", 2); - const dst_type_node = try transType(rp, ty, loc); - inttoptr_node.params()[0] = dst_type_node; - _ = try appendToken(rp.c, .Comma, ","); - - const ptrtoint_node = try rp.c.createBuiltinCall("@ptrToInt", 1); - ptrtoint_node.params()[0] = expr; - ptrtoint_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - - inttoptr_node.params()[1] = &ptrtoint_node.base; - inttoptr_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - return &inttoptr_node.base; + const ptr_to_int = try Node.ptr_to_int.create(c.arena, expr); + const int_to_ptr = try Node.int_to_ptr.create(c.arena, .{ .lhs = dst_type, .rhs = ptr_to_int }); + return int_to_ptr; } else { // Implicit downcasting from higher to lower alignment values is forbidden, // use @alignCast to side-step this problem - const ptrcast_node = try rp.c.createBuiltinCall("@ptrCast", 2); - const dst_type_node = try transType(rp, ty, loc); - ptrcast_node.params()[0] = dst_type_node; - _ = try appendToken(rp.c, .Comma, ","); - - if (qualTypeCanon(child_type).isVoidType()) { + const rhs = if (qualTypeCanon(child_type).isVoidType()) // void has 1-byte alignment, so @alignCast is not needed - ptrcast_node.params()[1] = expr; - } else if (typeIsOpaque(rp.c, qualTypeCanon(child_type), loc)) { + expr + else if (typeIsOpaque(c, qualTypeCanon(child_type), loc)) // For opaque types a ptrCast is enough - ptrcast_node.params()[1] = expr; - } else { - const aligncast_node = try rp.c.createBuiltinCall("@alignCast", 2); - const alignof_node = try rp.c.createBuiltinCall("@alignOf", 1); - const child_type_node = try transQualType(rp, child_type, loc); - alignof_node.params()[0] = child_type_node; - alignof_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - aligncast_node.params()[0] = &alignof_node.base; - _ = try appendToken(rp.c, .Comma, ","); - aligncast_node.params()[1] = expr; - aligncast_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - ptrcast_node.params()[1] = &aligncast_node.base; - } - ptrcast_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - - return &ptrcast_node.base; + expr + else blk: { + const child_type_node = try transQualType(c, child_type, loc); + const alignof = try Node.alignof.create(c.arena, child_type_node); + const align_cast = try Node.align_cast.create(c.arena, .{ .lhs = alignof, .rhs = expr }); + break :blk align_cast; + }; + return Node.ptr_cast.create(c.arena, .{ .lhs = dst_type, .rhs = rhs }); } } @@ -3092,13 +2987,15 @@ fn transBreak(c: *Context, scope: *Scope) TransError!Node { fn transFloatingLiteral(c: *Context, scope: *Scope, stmt: *const clang.FloatingLiteral, used: ResultUsed) TransError!Node { // TODO use something more accurate const dbl = stmt.getValueAsApproximateDouble(); - const node = try Node.float_literal.create(c.arena, try std.fmt.allocPrint(c.arena, "{d}", .{dbl})); + const node = try transCreateNodeNumber(c, dbl); return maybeSuppressResult(c, scope, used, &node.base); } -fn transBinaryConditionalOperator(rp: RestorePoint, scope: *Scope, stmt: *const clang.BinaryConditionalOperator, used: ResultUsed) TransError!*ast.Node { +fn transBinaryConditionalOperator(c: *Context, scope: *Scope, stmt: *const clang.BinaryConditionalOperator, used: ResultUsed) TransError!Node { // GNU extension of the ternary operator where the middle expression is // omitted, the conditition itself is returned if it evaluates to true + const qt = @ptrCast(*const clang.Stmt, stmt).getType(); + const res_is_bool = qualTypeIsBoolean(qt); const casted_stmt = @ptrCast(*const clang.AbstractConditionalOperator, stmt); const cond_expr = casted_stmt.getCond(); const true_expr = casted_stmt.getTrueExpr(); @@ -3109,67 +3006,39 @@ fn transBinaryConditionalOperator(rp: RestorePoint, scope: *Scope, stmt: *const // const _cond_temp = (cond_expr); // break :blk if (_cond_temp) _cond_temp else (false_expr); // }) - const lparen = try appendToken(rp.c, .LParen, "("); - - var block_scope = try Scope.Block.init(rp.c, scope, true); + var block_scope = try Scope.Block.init(c, scope, true); defer block_scope.deinit(); - const mangled_name = try block_scope.makeMangledName(rp.c, "cond_temp"); - const mut_tok = try appendToken(rp.c, .Keyword_const, "const"); - const name_tok = try appendIdentifier(rp.c, mangled_name); - const eq_token = try appendToken(rp.c, .Equal, "="); - const init_node = try transExpr(rp, &block_scope.base, cond_expr, .used, .r_value); - const semicolon_token = try appendToken(rp.c, .Semicolon, ";"); - const tmp_var = try ast.Node.VarDecl.create(rp.c.arena, .{ - .name_token = name_tok, - .mut_token = mut_tok, - .semicolon_token = semicolon_token, - }, .{ - .eq_token = eq_token, - .init_node = init_node, + const mangled_name = try block_scope.makeMangledName(c, "cond_temp"); + const init_node = try transExpr(c, &block_scope.base, cond_expr, .used, .r_value); + const ref_decl = try Node.var_simple.create(c.arena, .{ .name = mangled_name, .init = init_node}); + try block_scope.statements.append(ref_decl); + + const cond_node = try transBoolExpr(c, &cond_scope.base, cond_expr, .used); + var then_body = try Node.identifier.create(c.arena, mangled_name); + if (!res_is_bool and isBoolRes(init_node)) { + then_body = try Node.bool_to_int.create(c.arena, then_body); + } + + var else_body = try transExpr(c, &block_scope.base, false_expr, .used, .r_value); + if (!res_is_bool and isBoolRes(else_body)) { + else_body = try Node.bool_to_int.create(c.arena, else_body); + } + const if_node = try Node.@"if".create(c.arena, .{ + .cond = cond, + .then = then_body, + .@"else" = else_body, }); - try block_scope.statements.append(&tmp_var.base); - - var break_node_tmp = try CtrlFlow.initToken(rp.c, .Break, block_scope.label); - - const if_node = try transCreateNodeIf(rp.c); - var cond_scope = Scope.Condition{ - .base = .{ - .parent = &block_scope.base, - .id = .Condition, - }, - }; - defer cond_scope.deinit(); - const tmp_var_node = try transCreateNodeIdentifier(rp.c, mangled_name); - - const ty = getExprQualType(rp.c, cond_expr).getTypePtr(); - const cond_node = try finishBoolExpr(rp, &cond_scope.base, cond_expr.getBeginLoc(), ty, tmp_var_node, used); - if_node.condition = cond_node; - _ = try appendToken(rp.c, .RParen, ")"); - - if_node.body = try transCreateNodeIdentifier(rp.c, mangled_name); - if_node.@"else" = try transCreateNodeElse(rp.c); - if_node.@"else".?.body = try transExpr(rp, &block_scope.base, false_expr, .used, .r_value); - _ = try appendToken(rp.c, .Semicolon, ";"); - - const break_node = try break_node_tmp.finish(&if_node.base); - _ = try appendToken(rp.c, .Semicolon, ";"); - try block_scope.statements.append(&break_node.base); - const block_node = try block_scope.complete(rp.c); - - const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression); - grouped_expr.* = .{ - .lparen = lparen, - .expr = block_node, - .rparen = try appendToken(rp.c, .RParen, ")"), - }; - return maybeSuppressResult(rp, scope, used, &grouped_expr.base); + const break_node = try Node.break_val.create(c.arena, .{ + .label = block_scope.label, + .val = if_node, + }); + try block_scope.statements.append(break_node); + const res = try block_scope.complete(c); + return maybeSuppressResult(c, scope, used, res); } -fn transConditionalOperator(rp: RestorePoint, scope: *Scope, stmt: *const clang.ConditionalOperator, used: ResultUsed) TransError!*ast.Node { - const grouped = scope.id == .Condition; - const lparen = if (grouped) try appendToken(rp.c, .LParen, "(") else undefined; - const if_node = try transCreateNodeIf(rp.c); +fn transConditionalOperator(c: *Context, scope: *Scope, stmt: *const clang.ConditionalOperator, used: ResultUsed) TransError!Node { var cond_scope = Scope.Condition{ .base = .{ .parent = scope, @@ -3178,60 +3047,41 @@ fn transConditionalOperator(rp: RestorePoint, scope: *Scope, stmt: *const clang. }; defer cond_scope.deinit(); + const qt = @ptrCast(*const clang.Stmt, stmt).getType(); + const res_is_bool = qualTypeIsBoolean(qt); const casted_stmt = @ptrCast(*const clang.AbstractConditionalOperator, stmt); const cond_expr = casted_stmt.getCond(); const true_expr = casted_stmt.getTrueExpr(); const false_expr = casted_stmt.getFalseExpr(); - if_node.condition = try transBoolExpr(rp, &cond_scope.base, cond_expr, .used, .r_value, false); - _ = try appendToken(rp.c, .RParen, ")"); + const cond = try transBoolExpr(c, &cond_scope.base, cond_expr, .used, .r_value); - if_node.body = try transExpr(rp, scope, true_expr, .used, .r_value); - - if_node.@"else" = try transCreateNodeElse(rp.c); - if_node.@"else".?.body = try transExpr(rp, scope, false_expr, .used, .r_value); - - if (grouped) { - const rparen = try appendToken(rp.c, .RParen, ")"); - const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression); - grouped_expr.* = .{ - .lparen = lparen, - .expr = &if_node.base, - .rparen = rparen, - }; - return maybeSuppressResult(rp, scope, used, &grouped_expr.base); - } else { - return maybeSuppressResult(rp, scope, used, &if_node.base); + var then_body = try transExpr(c, scope, true_expr, .used, .r_value); + if (!res_is_bool and isBoolRes(then_body)) { + then_body = try Node.bool_to_int.create(c.arena, then_body); } + + var else_body = try transExpr(c, scope, false_expr, .used, .r_value); + if (!res_is_bool and isBoolRes(else_body)) { + else_body = try Node.bool_to_int.create(c.arena, else_body); + } + + const if_node = try Node.@"if".create(c.arena, .{ + .cond = cond, + .then = then_body, + .@"else" = else_body, + }); + return maybeSuppressResult(c, scope, used, if_node); } fn maybeSuppressResult( - rp: RestorePoint, + c: *Context, scope: *Scope, used: ResultUsed, - result: *ast.Node, -) TransError!*ast.Node { + result: Node, +) TransError!Node { if (used == .used) return result; - if (scope.id != .Condition) { - // NOTE: This is backwards, but the semicolon must immediately follow the node. - _ = try appendToken(rp.c, .Semicolon, ";"); - } else { // TODO is there a way to avoid this hack? - // this parenthesis must come immediately following the node - _ = try appendToken(rp.c, .RParen, ")"); - // these need to come before _ - _ = try appendToken(rp.c, .Colon, ":"); - _ = try appendToken(rp.c, .LParen, "("); - } - const lhs = try transCreateNodeIdentifier(rp.c, "_"); - const op_token = try appendToken(rp.c, .Equal, "="); - const op_node = try rp.c.arena.create(ast.Node.SimpleInfixOp); - op_node.* = .{ - .base = .{ .tag = .Assign }, - .op_token = op_token, - .lhs = lhs, - .rhs = result, - }; - return &op_node.base; + return Node.ignore.create(c.arena, result); } fn addTopLevelDecl(c: *Context, name: []const u8, decl_node: Node) !void { @@ -3242,11 +3092,11 @@ fn addTopLevelDecl(c: *Context, name: []const u8, decl_node: Node) !void { /// only matters for incomplete arrays, since the size of the array is determined /// by the size of the initializer fn transQualTypeInitialized( - rp: RestorePoint, + c: *Context, qt: clang.QualType, decl_init: *const clang.Expr, source_loc: clang.SourceLocation, -) TypeError!*ast.Node { +) TypeError!Node { const ty = qt.getTypePtr(); if (ty.getTypeClass() == .IncompleteArray) { const incomplete_array_ty = @ptrCast(*const clang.IncompleteArrayType, ty); @@ -3257,17 +3107,17 @@ fn transQualTypeInitialized( const string_lit = @ptrCast(*const clang.StringLiteral, decl_init); const string_lit_size = string_lit.getLength() + 1; // +1 for null terminator const array_size = @intCast(usize, string_lit_size); - return transCreateNodeArrayType(rp, source_loc, elem_ty, array_size); + return Node.array_type.create(c.arena, .{ .len = array_size, .elem_type = elem_ty }); }, .InitListExprClass => { const init_expr = @ptrCast(*const clang.InitListExpr, decl_init); const size = init_expr.getNumInits(); - return transCreateNodeArrayType(rp, source_loc, elem_ty, size); + return Node.array_type.create(c.arena, .{ .len = size, .elem_type = elem_ty }); }, else => {}, } } - return transQualType(rp, qt, source_loc); + return transQualType(c, qt, source_loc); } fn transQualType(c: *Context, qt: clang.QualType, source_loc: clang.SourceLocation) TypeError!Node { @@ -3276,16 +3126,16 @@ fn transQualType(c: *Context, qt: clang.QualType, source_loc: clang.SourceLocati /// Produces a Zig AST node by translating a Clang QualType, respecting the width, but modifying the signed-ness. /// Asserts the type is an integer. -fn transQualTypeIntWidthOf(c: *Context, ty: clang.QualType, is_signed: bool) TypeError!*ast.Node { +fn transQualTypeIntWidthOf(c: *Context, ty: clang.QualType, is_signed: bool) TypeError!Node { return transTypeIntWidthOf(c, qualTypeCanon(ty), is_signed); } /// Produces a Zig AST node by translating a Clang Type, respecting the width, but modifying the signed-ness. /// Asserts the type is an integer. -fn transTypeIntWidthOf(c: *Context, ty: *const clang.Type, is_signed: bool) TypeError!*ast.Node { +fn transTypeIntWidthOf(c: *Context, ty: *const clang.Type, is_signed: bool) TypeError!Node { assert(ty.getTypeClass() == .Builtin); const builtin_ty = @ptrCast(*const clang.BuiltinType, ty); - return transCreateNodeIdentifier(c, switch (builtin_ty.getKind()) { + return Node.type.create(c.arena, switch (builtin_ty.getKind()) { .Char_U, .Char_S, .UChar, .SChar, .Char8 => if (is_signed) "i8" else "u8", .UShort, .Short => if (is_signed) "c_short" else "c_ushort", .UInt, .Int => if (is_signed) "c_int" else "c_uint", @@ -3532,28 +3382,22 @@ fn cIsLongLongInteger(qt: clang.QualType) bool { }; } fn transCreateNodeAssign( - rp: RestorePoint, + c: *Context, scope: *Scope, result_used: ResultUsed, lhs: *const clang.Expr, rhs: *const clang.Expr, -) !*ast.Node { +) !Node { // common case // c: lhs = rhs // zig: lhs = rhs if (result_used == .unused) { - const lhs_node = try transExpr(rp, scope, lhs, .used, .l_value); - const eq_token = try appendToken(rp.c, .Equal, "="); - var rhs_node = try transExprCoercing(rp, scope, rhs, .used, .r_value); + const lhs_node = try transExpr(c, scope, lhs, .used, .l_value); + var rhs_node = try transExprCoercing(c, scope, rhs, .used, .r_value); if (!exprIsBooleanType(lhs) and isBoolRes(rhs_node)) { - const builtin_node = try rp.c.createBuiltinCall("@boolToInt", 1); - builtin_node.params()[0] = rhs_node; - builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - rhs_node = &builtin_node.base; + rhs_node = try Node.bool_to_int.create(c.arena, rhs_node); } - if (scope.id != .Condition) - _ = try appendToken(rp.c, .Semicolon, ";"); - return transCreateNodeInfixOp(rp, scope, lhs_node, .Assign, eq_token, rhs_node, .used, false); + return transCreateNodeInfixOp(c, scope, .assign, lhs_node, rhs_node, .used); } // worst case @@ -3563,76 +3407,36 @@ fn transCreateNodeAssign( // zig: lhs = _tmp; // zig: break :blk _tmp // zig: }) - var block_scope = try Scope.Block.init(rp.c, scope, true); + var block_scope = try Scope.Block.init(c, scope, true); defer block_scope.deinit(); - const tmp = try block_scope.makeMangledName(rp.c, "tmp"); - const mut_tok = try appendToken(rp.c, .Keyword_const, "const"); - const name_tok = try appendIdentifier(rp.c, tmp); - const eq_token = try appendToken(rp.c, .Equal, "="); - var rhs_node = try transExpr(rp, &block_scope.base, rhs, .used, .r_value); - if (!exprIsBooleanType(lhs) and isBoolRes(rhs_node)) { - const builtin_node = try rp.c.createBuiltinCall("@boolToInt", 1); - builtin_node.params()[0] = rhs_node; - builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")"); - rhs_node = &builtin_node.base; - } - const init_node = rhs_node; - const semicolon_token = try appendToken(rp.c, .Semicolon, ";"); - const node = try ast.Node.VarDecl.create(rp.c.arena, .{ - .name_token = name_tok, - .mut_token = mut_tok, - .semicolon_token = semicolon_token, - }, .{ - .eq_token = eq_token, - .init_node = init_node, - }); - try block_scope.statements.append(&node.base); + const tmp = try block_scope.makeMangledName(c, "tmp"); + const rhs = try transExpr(c, scope, op_expr, .used, .r_value); + const tmp_decl = try Node.var_simple.create(c.arena, .{ .name = tmp, .init = rhs}); + try block_scope.statements.append(tmp_decl); - const lhs_node = try transExpr(rp, &block_scope.base, lhs, .used, .l_value); - const lhs_eq_token = try appendToken(rp.c, .Equal, "="); - const ident = try transCreateNodeIdentifier(rp.c, tmp); - _ = try appendToken(rp.c, .Semicolon, ";"); - const assign = try transCreateNodeInfixOp(rp, &block_scope.base, lhs_node, .Assign, lhs_eq_token, ident, .used, false); + const lhs = try transExpr(c, &block_scope.base, lhs, .used, .l_value); + const tmp_ident = try Node.identifier.create(c.arena, tmp); + const assign = try transCreateNodeInfixOp(c, &block_scope.base, .assign, lhs, tmp_iden, .used); try block_scope.statements.append(assign); - const break_node = blk: { - var tmp_ctrl_flow = try CtrlFlow.init(rp.c, .Break, tokenSlice(rp.c, block_scope.label.?)); - const rhs_expr = try transCreateNodeIdentifier(rp.c, tmp); - break :blk try tmp_ctrl_flow.finish(rhs_expr); - }; - _ = try appendToken(rp.c, .Semicolon, ";"); - try block_scope.statements.append(&break_node.base); - const block_node = try block_scope.complete(rp.c); - // semicolon must immediately follow rbrace because it is the last token in a block - _ = try appendToken(rp.c, .Semicolon, ";"); - return block_node; + const break_node = try Node.break_val.create(c.arena, .{ + .label = block_scope.label, + .val = tmp_ident, + }); + try block_scope.statements.append(break_node); + return block_scope.complete(c); } -fn transCreateNodeFieldAccess(c: *Context, container: *ast.Node, field_name: []const u8) !*ast.Node { - const field_access_node = try c.arena.create(ast.Node.SimpleInfixOp); - field_access_node.* = .{ - .base = .{ .tag = .Period }, - .op_token = try appendToken(c, .Period, "."), - .lhs = container, - .rhs = try transCreateNodeIdentifier(c, field_name), - }; - return &field_access_node.base; -} - -fn transCreateNodeBoolInfixOp( +fn transCreateNodeInfixOp( c: *Context, scope: *Scope, - stmt: *const clang.BinaryOperator, op: ast.Node.Tag, + lhs: Node, + rhs: Node, used: ResultUsed, ) !Node { - std.debug.assert(op == .bool_and or op == .bool_or); - - const lhs = try transBoolExpr(rp, scope, stmt.getLHS(), .used, .l_value, true); - const rhs = try transBoolExpr(rp, scope, stmt.getRHS(), .used, .r_value, true); - const payload = try c.arena.create(ast.Payload.BinOp); payload.* = .{ .base = .{ .tag = op }, @@ -3644,39 +3448,19 @@ fn transCreateNodeBoolInfixOp( return maybeSuppressResult(c, scope, used, Node.initPayload(&payload.base)); } -fn transCreateNodePtrType( +fn transCreateNodeBoolInfixOp( c: *Context, - is_const: bool, - is_volatile: bool, - op_tok_id: std.zig.Token.Id, -) !*ast.Node.PtrType { - const node = try c.arena.create(ast.Node.PtrType); - const op_token = switch (op_tok_id) { - .LBracket => blk: { - const lbracket = try appendToken(c, .LBracket, "["); - _ = try appendToken(c, .Asterisk, "*"); - _ = try appendToken(c, .RBracket, "]"); - break :blk lbracket; - }, - .Identifier => blk: { - const lbracket = try appendToken(c, .LBracket, "["); // Rendering checks if this token + 2 == .Identifier, so needs to return this token - _ = try appendToken(c, .Asterisk, "*"); - _ = try appendIdentifier(c, "c"); - _ = try appendToken(c, .RBracket, "]"); - break :blk lbracket; - }, - .Asterisk => try appendToken(c, .Asterisk, "*"), - else => unreachable, - }; - node.* = .{ - .op_token = op_token, - .ptr_info = .{ - .const_token = if (is_const) try appendToken(c, .Keyword_const, "const") else null, - .volatile_token = if (is_volatile) try appendToken(c, .Keyword_volatile, "volatile") else null, - }, - .rhs = undefined, // translate and set afterward - }; - return node; + scope: *Scope, + stmt: *const clang.BinaryOperator, + op: ast.Node.Tag, + used: ResultUsed, +) !Node { + std.debug.assert(op == .bool_and or op == .bool_or); + + const lhs = try transBoolExpr(rp, scope, stmt.getLHS(), .used, .l_value); + const rhs = try transBoolExpr(rp, scope, stmt.getRHS(), .used, .r_value); + + return transCreateNodeInfixOp(c, scope, op, lhs, rhs, used); } fn transCreateNodeAPInt(c: *Context, int: *const clang.APSInt) !Node { @@ -3722,73 +3506,10 @@ fn transCreateNodeAPInt(c: *Context, int: *const clang.APSInt) !Node { return Node.int_literal.create(c.arena, str); } -fn transCreateNodeUndefinedLiteral(c: *Context) !*ast.Node { - const token = try appendToken(c, .Keyword_undefined, "undefined"); - const node = try c.arena.create(ast.Node.OneToken); - node.* = .{ - .base = .{ .tag = .UndefinedLiteral }, - .token = token, - }; - return &node.base; -} - -fn transCreateNodeNullLiteral(c: *Context) !*ast.Node { - const token = try appendToken(c, .Keyword_null, "null"); - const node = try c.arena.create(ast.Node.OneToken); - node.* = .{ - .base = .{ .tag = .NullLiteral }, - .token = token, - }; - return &node.base; -} - -fn transCreateNodeBoolLiteral(c: *Context, value: bool) !*ast.Node { - const token = if (value) - try appendToken(c, .Keyword_true, "true") - else - try appendToken(c, .Keyword_false, "false"); - const node = try c.arena.create(ast.Node.OneToken); - node.* = .{ - .base = .{ .tag = .BoolLiteral }, - .token = token, - }; - return &node.base; -} - -fn transCreateNodeInt(c: *Context, int: anytype) !*ast.Node { - const fmt_s = if (comptime std.meta.trait.isIntegerNumber(@TypeOf(int))) "{d}" else "{s}"; - const token = try appendTokenFmt(c, .IntegerLiteral, fmt_s, .{int}); - const node = try c.arena.create(ast.Node.OneToken); - node.* = .{ - .base = .{ .tag = .IntegerLiteral }, - .token = token, - }; - return &node.base; -} - -fn transCreateNodeFloat(c: *Context, str: []const u8) !*ast.Node { - const token = try appendTokenFmt(c, .FloatLiteral, "{s}", .{str}); - const node = try c.arena.create(ast.Node.OneToken); - node.* = .{ - .base = .{ .tag = .FloatLiteral }, - .token = token, - }; - return &node.base; -} - -fn transCreateNodeOpaqueType(c: *Context) !*ast.Node { - const container_tok = try appendToken(c, .Keyword_opaque, "opaque"); - const lbrace_token = try appendToken(c, .LBrace, "{"); - const container_node = try ast.Node.ContainerDecl.alloc(c.arena, 0); - container_node.* = .{ - .kind_token = container_tok, - .layout_token = null, - .lbrace_token = lbrace_token, - .rbrace_token = try appendToken(c, .RBrace, "}"), - .fields_and_decls_len = 0, - .init_arg_expr = .None, - }; - return &container_node.base; +fn transCreateNodeNumber(c: *Context, int: anytype) !Node { + const fmt_s = if (comptime std.meta.trait.isNumber(@TypeOf(int))) "{d}" else "{s}"; + const str = try std.fmt.allocPrint(c.arena, fmt_s, .{int}); + return Node.int_literal.create(c.arena, str); } fn transCreateNodeMacroFn(c: *Context, name: []const u8, ref: *ast.Node, proto_alias: *ast.Node.FnProto) !*ast.Node { @@ -3873,13 +3594,12 @@ fn transCreateNodeMacroFn(c: *Context, name: []const u8, ref: *ast.Node, proto_a return &fn_proto.base; } - - fn transCreateNodeShiftOp( c: *Context, scope: *Scope, stmt: *const clang.BinaryOperator, op: Node.Tag, + used: ResultUsed, ) !Node { std.debug.assert(op == .shl or op == .shr); @@ -3894,15 +3614,7 @@ fn transCreateNodeShiftOp( const rhs = try transExprCoercing(c, scope, rhs_expr, .used, .r_value); const rhs_casted = try Node.int_cast.create(c.arena, .{ .lhs = rhs_type, .rhs = rhs_type }); - const payload = try c.arena.create(ast.Payload.BinOp); - payload.* = .{ - .base = .{ .tag = op }, - .data = .{ - .lhs = lhs, - .rhs = rhs_casted, - }, - }; - return Node.initPayload(&payload.base); + return transCreateNodeInfixOp(c, scope, op, lhs, rhs_casted, used); } fn transType(c: *Context, ty: *const clang.Type, source_loc: clang.SourceLocation) TypeError!Node { diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 3a7c02fd74..d62e83ea6a 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -24,8 +24,8 @@ pub const Node = extern union { usingnamespace_builtins, // After this, the tag requires a payload. - int_literal, - float_literal, + // int or float, doesn't really matter + number_literal, string_literal, char_literal, identifier, @@ -115,6 +115,7 @@ pub const Node = extern union { bit_xor, array_cat, ellipsis3, + assign, log2_int_type, /// @import("std").math.Log2Int(operand) @@ -147,6 +148,8 @@ pub const Node = extern union { int_to_ptr, /// @ptrToInt(operand) ptr_to_int, + /// @alignCast(lhs, rhs) + align_cast, negate, negate_wrap, @@ -190,6 +193,9 @@ pub const Node = extern union { /// [1]type{val} ** count array_filler, + /// _ = operand; + ignore, + pub const last_no_payload_tag = Tag.usingnamespace_builtins; pub const no_payload_count = @enumToInt(last_no_payload_tag) + 1; @@ -227,6 +233,7 @@ pub const Node = extern union { .while_true, .if_not_break, .switch_else, + .ignore, => Payload.UnOp, .add, @@ -292,12 +299,14 @@ pub const Node = extern union { .array_cat, .ellipsis3, .switch_prong, + .field_access, + .assign, + .align_cast, => Payload.BinOp, - .int, - .float, - .string, - .char, + .number_literal, + .string_literal, + .char_literal, .identifier, .warning, .failed_decl, @@ -327,7 +336,6 @@ pub const Node = extern union { .typedef, .pub_typedef, .var_simple, .pub_var_simple => Payload.SimpleVarDecl, .enum_redecl => Payload.EnumRedecl, .array_filler => Payload.ArrayFiller, - .field_access => Payload.FieldAccess, }; } @@ -590,14 +598,6 @@ pub const Payload = struct { count: usize, }, }; - - pub const FieldAccess = struct { - base: Node, - data: struct { - container: Node, - name: []const u8, - }, - }; }; /// Converts the nodes into a Zig ast. From 66bbd75a8346b8a292f0d95d4b60a5d7d11b73b2 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Thu, 11 Feb 2021 17:44:04 +0200 Subject: [PATCH 077/173] translate-c: convert macro translation --- src/translate_c.zig | 960 ++++++++++++---------------------------- src/translate_c/ast.zig | 41 +- 2 files changed, 307 insertions(+), 694 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index 0a20d5e6d5..9ef4788ba7 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -3512,86 +3512,48 @@ fn transCreateNodeNumber(c: *Context, int: anytype) !Node { return Node.int_literal.create(c.arena, str); } -fn transCreateNodeMacroFn(c: *Context, name: []const u8, ref: *ast.Node, proto_alias: *ast.Node.FnProto) !*ast.Node { +fn transCreateNodeMacroFn(c: *Context, name: []const u8, ref: Node, proto_alias: *ast.Payload.Func) !Node { const scope = &c.global_scope.base; - const pub_tok = try appendToken(c, .Keyword_pub, "pub"); - const fn_tok = try appendToken(c, .Keyword_fn, "fn"); - const name_tok = try appendIdentifier(c, name); - _ = try appendToken(c, .LParen, "("); - - var fn_params = std.ArrayList(ast.Node.FnProto.ParamDecl).init(c.gpa); + var fn_params = std.ArrayList(Node).init(c.gpa); defer fn_params.deinit(); for (proto_alias.params()) |param, i| { - if (i != 0) { - _ = try appendToken(c, .Comma, ","); - } - const param_name_tok = param.name_token orelse - try appendTokenFmt(c, .Identifier, "arg_{d}", .{c.getMangle()}); + const param_name = param.name orelse + try std.fmt.allocPrint(c.arena, "arg_{d}", .{c.getMangle()}); - _ = try appendToken(c, .Colon, ":"); - - (try fn_params.addOne()).* = .{ - .doc_comments = null, - .comptime_token = null, - .noalias_token = param.noalias_token, - .name_token = param_name_tok, - .param_type = param.param_type, - }; + try fn_params.append(.{ + .name = param_name, + .type = param.type, + .is_noalias = param.is_noalias, + }); } - _ = try appendToken(c, .RParen, ")"); - - _ = try appendToken(c, .Keyword_callconv, "callconv"); - _ = try appendToken(c, .LParen, "("); - const callconv_expr = try transCreateNodeEnumLiteral(c, "Inline"); - _ = try appendToken(c, .RParen, ")"); - - const block_lbrace = try appendToken(c, .LBrace, "{"); - - const return_kw = try appendToken(c, .Keyword_return, "return"); - const unwrap_expr = try transCreateNodeUnwrapNull(c, ref.cast(ast.Node.VarDecl).?.getInitNode().?); - - const call_expr = try c.createCall(unwrap_expr, fn_params.items.len); - const call_params = call_expr.params(); + const init = if (value.castTag(.var_decl)) |v| + v.data.init + else if (value.castTag(.var_simple) orelse value.castTag(.pub_var_simple)) |v| + v.data.init + else + unreachable; + const unwrap_expr = try Node.unwrap.create(c.arena, init); + const call_params = try c.arena.alloc(Node, fn_params.items.len); for (fn_params.items) |param, i| { - if (i != 0) { - _ = try appendToken(c, .Comma, ","); - } - call_params[i] = try transCreateNodeIdentifier(c, tokenSlice(c, param.name_token.?)); + call_params[i] = try Node.identifier.create(c.arena, param.name); } - call_expr.rtoken = try appendToken(c, .RParen, ")"); - - const return_expr = try ast.Node.ControlFlowExpression.create(c.arena, .{ - .ltoken = return_kw, - .tag = .Return, - }, .{ - .rhs = &call_expr.base, + const call_expr = try Node.call.create(c.arean, .{ + .lhs = unwrap_expr, + .args = call_params, }); - _ = try appendToken(c, .Semicolon, ";"); + const return_expr = try Node.@"return".create(c.arean, call_expr); + const block = try Node.block_single.create(c.arean, return_expr); - const block = try ast.Node.Block.alloc(c.arena, 1); - block.* = .{ - .lbrace = block_lbrace, - .statements_len = 1, - .rbrace = try appendToken(c, .RBrace, "}"), - }; - block.statements()[0] = &return_expr.base; - - const fn_proto = try ast.Node.FnProto.create(c.arena, .{ - .params_len = fn_params.items.len, - .fn_token = fn_tok, + return Node.pub_inline_fn.create(c.arena, .{ + .name = name, + .params = try c.arena.dupe(ast.Node.Param, fn_params.items), .return_type = proto_alias.return_type, - }, .{ - .visib_token = pub_tok, - .name_token = name_tok, - .body_node = &block.base, - .callconv_expr = callconv_expr, + .body = block, }); - mem.copy(ast.Node.FnProto.ParamDecl, fn_proto.params(), fn_params.items); - return &fn_proto.base; } fn transCreateNodeShiftOp( @@ -4108,27 +4070,13 @@ fn transPreprocessorEntities(c: *Context, unit: *clang.ASTUnit) Error!void { fn transMacroDefine(c: *Context, m: *MacroCtx) ParseError!void { const scope = &c.global_scope.base; - const visib_tok = try appendToken(c, .Keyword_pub, "pub"); - const mut_tok = try appendToken(c, .Keyword_const, "const"); - const name_tok = try appendIdentifier(c, m.name); - const eq_token = try appendToken(c, .Equal, "="); - const init_node = try parseCExpr(c, m, scope); const last = m.next().?; if (last != .Eof and last != .Nl) return m.fail(c, "unable to translate C expr: unexpected token .{s}", .{@tagName(last)}); - const semicolon_token = try appendToken(c, .Semicolon, ";"); - const node = try ast.Node.VarDecl.create(c.arena, .{ - .name_token = name_tok, - .mut_token = mut_tok, - .semicolon_token = semicolon_token, - }, .{ - .visib_token = visib_tok, - .eq_token = eq_token, - .init_node = init_node, - }); - _ = try c.global_scope.macro_table.put(m.name, &node.base); + const var_decl = try Node.pub_var_simple.create(c.arena, .{ .name = m.name, .init = init_node }); + _ = try c.global_scope.macro_table.put(m.name, var_decl); } fn transMacroFnDefine(c: *Context, m: *MacroCtx) ParseError!void { @@ -4136,16 +4084,11 @@ fn transMacroFnDefine(c: *Context, m: *MacroCtx) ParseError!void { defer block_scope.deinit(); const scope = &block_scope.base; - const pub_tok = try appendToken(c, .Keyword_pub, "pub"); - const fn_tok = try appendToken(c, .Keyword_fn, "fn"); - const name_tok = try appendIdentifier(c, m.name); - _ = try appendToken(c, .LParen, "("); - if (m.next().? != .LParen) { return m.fail(c, "unable to translate C expr: expected '('", .{}); } - var fn_params = std.ArrayList(ast.Node.FnProto.ParamDecl).init(c.gpa); + var fn_params = std.ArrayList(ast.Payload.Param).init(c.gpa); defer fn_params.deinit(); while (true) { @@ -4153,120 +4096,78 @@ fn transMacroFnDefine(c: *Context, m: *MacroCtx) ParseError!void { _ = m.next(); const mangled_name = try block_scope.makeMangledName(c, m.slice()); - const param_name_tok = try appendIdentifier(c, mangled_name); - _ = try appendToken(c, .Colon, ":"); - - const any_type = try c.arena.create(ast.Node.OneToken); - any_type.* = .{ - .base = .{ .tag = .AnyType }, - .token = try appendToken(c, .Keyword_anytype, "anytype"), - }; - - (try fn_params.addOne()).* = .{ - .doc_comments = null, - .comptime_token = null, - .noalias_token = null, - .name_token = param_name_tok, - .param_type = .{ .any_type = &any_type.base }, - }; + try fn_params.append(.{ + .is_noalias = false, + .name = mangled_name, + .type = Node.@"anytype".init(), + }); if (m.peek().? != .Comma) break; _ = m.next(); - _ = try appendToken(c, .Comma, ","); } if (m.next().? != .RParen) { return m.fail(c, "unable to translate C expr: expected ')'", .{}); } - _ = try appendToken(c, .RParen, ")"); - - _ = try appendToken(c, .Keyword_callconv, "callconv"); - _ = try appendToken(c, .LParen, "("); - const callconv_expr = try transCreateNodeEnumLiteral(c, "Inline"); - _ = try appendToken(c, .RParen, ")"); - - const type_of = try c.createBuiltinCall("@TypeOf", 1); - - const return_kw = try appendToken(c, .Keyword_return, "return"); const expr = try parseCExpr(c, m, scope); const last = m.next().?; if (last != .Eof and last != .Nl) return m.fail(c, "unable to translate C expr: unexpected token .{s}", .{@tagName(last)}); - _ = try appendToken(c, .Semicolon, ";"); - const type_of_arg = if (!expr.tag.isBlock()) expr else blk: { - const stmts = expr.blockStatements(); + + const typeof_arg = if (expr.castTag(.block)) |some| blk: { + const stmts = some.data.stmts; const blk_last = stmts[stmts.len - 1]; - const br = blk_last.cast(ast.Node.ControlFlowExpression).?; - break :blk br.getRHS().?; - }; - type_of.params()[0] = type_of_arg; - type_of.rparen_token = try appendToken(c, .RParen, ")"); - const return_expr = try ast.Node.ControlFlowExpression.create(c.arena, .{ - .ltoken = return_kw, - .tag = .Return, - }, .{ - .rhs = expr, - }); - + const br = blk_last.castTag(.break_val).?; + break :blk br.data; + } else expr; + const typeof = try Node.typeof.create(c.arean, typeof_arg); + const return_expr = try Node.@"return".create(c.arena, expr); try block_scope.statements.append(&return_expr.base); - const block_node = try block_scope.complete(c); - const fn_proto = try ast.Node.FnProto.create(c.arena, .{ - .fn_token = fn_tok, - .params_len = fn_params.items.len, - .return_type = .{ .Explicit = &type_of.base }, - }, .{ - .visib_token = pub_tok, - .name_token = name_tok, - .body_node = block_node, - .callconv_expr = callconv_expr, + + const fn_decl = try Node.pub_inline_fn.create(c.arena, .{ + .name = m.name, + .params = try c.arena.dupe(ast.Payload.Param, fn_params.items), + .return_type = typeof, + .body = try block_scope.complete(c), }); - mem.copy(ast.Node.FnProto.ParamDecl, fn_proto.params(), fn_params.items); - _ = try c.global_scope.macro_table.put(m.name, &fn_proto.base); } const ParseError = Error || error{ParseError}; -fn parseCExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node { +fn parseCExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { // TODO parseCAssignExpr here const node = try parseCCondExpr(c, m, scope); if (m.next().? != .Comma) { m.i -= 1; return node; } - _ = try appendToken(c, .Semicolon, ";"); var block_scope = try Scope.Block.init(c, scope, true); defer block_scope.deinit(); var last = node; while (true) { // suppress result - const lhs = try transCreateNodeIdentifier(c, "_"); - const op_token = try appendToken(c, .Equal, "="); - const op_node = try c.arena.create(ast.Node.SimpleInfixOp); - op_node.* = .{ - .base = .{ .tag = .Assign }, - .op_token = op_token, - .lhs = lhs, - .rhs = last, - }; - try block_scope.statements.append(&op_node.base); + const ignore = try Node.ignore.create(c.arena, last); + try block_scope.statements.append(ignore); last = try parseCCondExpr(c, m, scope); - _ = try appendToken(c, .Semicolon, ";"); if (m.next().? != .Comma) { m.i -= 1; break; } } - const break_node = try transCreateNodeBreak(c, block_scope.label, last); - try block_scope.statements.append(&break_node.base); + const break_node = try Node.break_val.create(c.arena, .{ + .label = block_scope.label, + .val = last, + }); + try block_scope.statements.append(break_node); return try block_scope.complete(c); } -fn parseCNumLit(c: *Context, m: *MacroCtx) ParseError!*ast.Node { +fn parseCNumLit(c: *Context, m: *MacroCtx) ParseError!Node { var lit_bytes = m.slice(); switch (m.list[m.i].id) { @@ -4286,11 +4187,10 @@ fn parseCNumLit(c: *Context, m: *MacroCtx) ParseError!*ast.Node { } if (suffix == .none) { - return transCreateNodeInt(c, lit_bytes); + return transCreateNodeNumber(c, lit_bytes); } - const cast_node = try c.createBuiltinCall("@as", 2); - cast_node.params()[0] = try transCreateNodeIdentifier(c, switch (suffix) { + const type_node = try Node.type.create(c.arena, switch (suffix) { .u => "c_uint", .l => "c_long", .lu => "c_ulong", @@ -4304,27 +4204,22 @@ fn parseCNumLit(c: *Context, m: *MacroCtx) ParseError!*ast.Node { .llu => 3, else => unreachable, }]; - _ = try appendToken(c, .Comma, ","); - cast_node.params()[1] = try transCreateNodeInt(c, lit_bytes); - cast_node.rparen_token = try appendToken(c, .RParen, ")"); - return &cast_node.base; + const rhs = try transCreateNodeNumber(c, lit_bytes); + return Node.as.create(c.arena, .{ .lhs = type_node, .rhs = rhs }); }, .FloatLiteral => |suffix| { if (lit_bytes[0] == '.') lit_bytes = try std.fmt.allocPrint(c.arena, "0{s}", .{lit_bytes}); if (suffix == .none) { - return transCreateNodeFloat(c, lit_bytes); + return transCreateNodeNumber(c, lit_bytes); } - const cast_node = try c.createBuiltinCall("@as", 2); - cast_node.params()[0] = try transCreateNodeIdentifier(c, switch (suffix) { + const type_node = try Node.type.create(c.arena, switch (suffix) { .f => "f32", .l => "c_longdouble", else => unreachable, }); - _ = try appendToken(c, .Comma, ","); - cast_node.params()[1] = try transCreateNodeFloat(c, lit_bytes[0 .. lit_bytes.len - 1]); - cast_node.rparen_token = try appendToken(c, .RParen, ")"); - return &cast_node.base; + const rhs = try transCreateNodeNumber(c, lit_bytes[0 .. lit_bytes.len - 1]); + return Node.as.create(c.arena, .{ .lhs = type_node, .rhs = rhs }); }, else => unreachable, } @@ -4490,79 +4385,62 @@ fn zigifyEscapeSequences(ctx: *Context, m: *MacroCtx) ![]const u8 { return bytes[0..i]; } -fn parseCPrimaryExprInner(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node { +fn parseCPrimaryExprInner(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { const tok = m.next().?; const slice = m.slice(); switch (tok) { .CharLiteral => { if (slice[0] != '\'' or slice[1] == '\\' or slice.len == 3) { - const token = try appendToken(c, .CharLiteral, try zigifyEscapeSequences(c, m)); - const node = try c.arena.create(ast.Node.OneToken); - node.* = .{ - .base = .{ .tag = .CharLiteral }, - .token = token, - }; - return &node.base; + return Node.char_literal.create(c.arena, try zigifyEscapeSequences(c, m)); } else { - const token = try appendTokenFmt(c, .IntegerLiteral, "0x{x}", .{slice[1 .. slice.len - 1]}); - const node = try c.arena.create(ast.Node.OneToken); - node.* = .{ - .base = .{ .tag = .IntegerLiteral }, - .token = token, - }; - return &node.base; + const str = try std.fmt.allocPrint(c.arena, "0x{x}", .{slice[1 .. slice.len - 1]}); + return Node.int_literal.create(c.arena, str); } }, .StringLiteral => { - const token = try appendToken(c, .StringLiteral, try zigifyEscapeSequences(c, m)); - const node = try c.arena.create(ast.Node.OneToken); - node.* = .{ - .base = .{ .tag = .StringLiteral }, - .token = token, - }; - return &node.base; + return Node.string_literal.create(c.arena, try zigifyEscapeSequences(c, m)); }, .IntegerLiteral, .FloatLiteral => { return parseCNumLit(c, m); }, // eventually this will be replaced by std.c.parse which will handle these correctly - .Keyword_void => return transCreateNodeIdentifierUnchecked(c, "c_void"), - .Keyword_bool => return transCreateNodeIdentifierUnchecked(c, "bool"), - .Keyword_double => return transCreateNodeIdentifierUnchecked(c, "f64"), - .Keyword_long => return transCreateNodeIdentifierUnchecked(c, "c_long"), - .Keyword_int => return transCreateNodeIdentifierUnchecked(c, "c_int"), - .Keyword_float => return transCreateNodeIdentifierUnchecked(c, "f32"), - .Keyword_short => return transCreateNodeIdentifierUnchecked(c, "c_short"), - .Keyword_char => return transCreateNodeIdentifierUnchecked(c, "u8"), + .Keyword_void => return Node.type.create(c.arena, "c_void"), + .Keyword_bool => return Node.type.create(c.arena, "bool"), + .Keyword_double => return Node.type.create(c.arena, "f64"), + .Keyword_long => return Node.type.create(c.arena, "c_long"), + .Keyword_int => return Node.type.create(c.arena, "c_int"), + .Keyword_float => return Node.type.create(c.arena, "f32"), + .Keyword_short => return Node.type.create(c.arena, "c_short"), + .Keyword_char => return Node.type.create(c.arena, "u8"), .Keyword_unsigned => if (m.next()) |t| switch (t) { - .Keyword_char => return transCreateNodeIdentifierUnchecked(c, "u8"), - .Keyword_short => return transCreateNodeIdentifierUnchecked(c, "c_ushort"), - .Keyword_int => return transCreateNodeIdentifierUnchecked(c, "c_uint"), + .Keyword_char => return Node.type.create(c.arena, "u8"), + .Keyword_short => return Node.type.create(c.arena, "c_ushort"), + .Keyword_int => return Node.type.create(c.arena, "c_uint"), .Keyword_long => if (m.peek() != null and m.peek().? == .Keyword_long) { _ = m.next(); - return transCreateNodeIdentifierUnchecked(c, "c_ulonglong"); - } else return transCreateNodeIdentifierUnchecked(c, "c_ulong"), + return Node.type.create(c.arena, "c_ulonglong"); + } else return Node.type.create(c.arena, "c_ulong"), else => { m.i -= 1; - return transCreateNodeIdentifierUnchecked(c, "c_uint"); + return Node.type.create(c.arena, "c_uint"); }, } else { - return transCreateNodeIdentifierUnchecked(c, "c_uint"); + return Node.type.create(c.arena, "c_uint"); }, .Keyword_signed => if (m.next()) |t| switch (t) { - .Keyword_char => return transCreateNodeIdentifierUnchecked(c, "i8"), - .Keyword_short => return transCreateNodeIdentifierUnchecked(c, "c_short"), - .Keyword_int => return transCreateNodeIdentifierUnchecked(c, "c_int"), + .Keyword_char => return Node.type.create(c.arena, "i8"), + .Keyword_short => return Node.type.create(c.arena, "c_short"), + .Keyword_int => return Node.type.create(c.arena, "c_int"), .Keyword_long => if (m.peek() != null and m.peek().? == .Keyword_long) { _ = m.next(); - return transCreateNodeIdentifierUnchecked(c, "c_longlong"); - } else return transCreateNodeIdentifierUnchecked(c, "c_long"), + return Node.type.create(c.arena, "c_longlong"); + } else return Node.type.create(c.arena, "c_long"), else => { m.i -= 1; - return transCreateNodeIdentifierUnchecked(c, "c_int"); + return Node.type.create(c.arena, "c_int"); }, } else { - return transCreateNodeIdentifierUnchecked(c, "c_int"); + return Node.type.create(c.arena, "c_int"); }, .Keyword_enum, .Keyword_struct, .Keyword_union => { // struct Foo will be declared as struct_Foo by transRecordDecl @@ -4572,17 +4450,12 @@ fn parseCPrimaryExprInner(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!* return error.ParseError; } - const ident_token = try appendTokenFmt(c, .Identifier, "{s}_{s}", .{ slice, m.slice() }); - const identifier = try c.arena.create(ast.Node.OneToken); - identifier.* = .{ - .base = .{ .tag = .Identifier }, - .token = ident_token, - }; - return &identifier.base; + const name = try std.fmt.allocPrint(c.arena, "{s}_{s}", .{ slice, m.slice() }); + return Node.identifier.create(c.arena, name); }, .Identifier => { const mangled_name = scope.getAlias(slice); - return transCreateNodeIdentifier(c, checkForBuiltinTypedef(mangled_name) orelse mangled_name); + return Node.identifier.create(c.arena, builtin_typedef_map.get(mangled_name) orelse mangled_name); }, .LParen => { const inner_node = try parseCExpr(c, m, scope); @@ -4612,10 +4485,6 @@ fn parseCPrimaryExprInner(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!* }, else => return inner_node, } - - // hack to get zig fmt to render a comma in builtin calls - _ = try appendToken(c, .Comma, ","); - const node_to_cast = try parseCExpr(c, m, scope); if (saw_l_paren and m.next().? != .RParen) { @@ -4623,28 +4492,7 @@ fn parseCPrimaryExprInner(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!* return error.ParseError; } - const lparen = try appendToken(c, .LParen, "("); - - //(@import("std").meta.cast(dest, x)) - const import_fn_call = try c.createBuiltinCall("@import", 1); - const std_node = try transCreateNodeStringLiteral(c, "\"std\""); - import_fn_call.params()[0] = std_node; - import_fn_call.rparen_token = try appendToken(c, .RParen, ")"); - const inner_field_access = try transCreateNodeFieldAccess(c, &import_fn_call.base, "meta"); - const outer_field_access = try transCreateNodeFieldAccess(c, inner_field_access, "cast"); - - const cast_fn_call = try c.createCall(outer_field_access, 2); - cast_fn_call.params()[0] = inner_node; - cast_fn_call.params()[1] = node_to_cast; - cast_fn_call.rtoken = try appendToken(c, .RParen, ")"); - - const group_node = try c.arena.create(ast.Node.GroupedExpression); - group_node.* = .{ - .lparen = lparen, - .expr = &cast_fn_call.base, - .rparen = try appendToken(c, .RParen, ")"), - }; - return &group_node.base; + return Node.std_meta_cast.create(c.arena, .{ .lhs = inner_node, .rhs = node_to_cast }); }, else => { try m.fail(c, "unable to translate C expr: unexpected token .{s}", .{@tagName(tok)}); @@ -4653,447 +4501,255 @@ fn parseCPrimaryExprInner(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!* } } -fn parseCPrimaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node { +fn parseCPrimaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { var node = try parseCPrimaryExprInner(c, m, scope); // In C the preprocessor would handle concatting strings while expanding macros. // This should do approximately the same by concatting any strings and identifiers // after a primary expression. while (true) { - var op_token: ast.TokenIndex = undefined; - var op_id: ast.Node.Tag = undefined; switch (m.peek().?) { .StringLiteral, .Identifier => {}, else => break, } - const op_node = try c.arena.create(ast.Node.SimpleInfixOp); - op_node.* = .{ - .base = .{ .tag = .ArrayCat }, - .op_token = try appendToken(c, .PlusPlus, "++"), - .lhs = node, - .rhs = try parseCPrimaryExprInner(c, m, scope), - }; - node = &op_node.base; + node = try Node.array_cat.create(c.arena, .{ .lhs = node, .rhs = try parseCPrimaryExprInner(c, m, scope) }); } return node; } -fn nodeIsInfixOp(tag: ast.Node.Tag) bool { - return switch (tag) { - .Add, - .AddWrap, - .ArrayCat, - .ArrayMult, - .Assign, - .AssignBitAnd, - .AssignBitOr, - .AssignBitShiftLeft, - .AssignBitShiftRight, - .AssignBitXor, - .AssignDiv, - .AssignSub, - .AssignSubWrap, - .AssignMod, - .AssignAdd, - .AssignAddWrap, - .AssignMul, - .AssignMulWrap, - .BangEqual, - .BitAnd, - .BitOr, - .BitShiftLeft, - .BitShiftRight, - .BitXor, - .BoolAnd, - .BoolOr, - .Div, - .EqualEqual, - .ErrorUnion, - .GreaterOrEqual, - .GreaterThan, - .LessOrEqual, - .LessThan, - .MergeErrorSets, - .Mod, - .Mul, - .MulWrap, - .Period, - .Range, - .Sub, - .SubWrap, - .UnwrapOptional, - .Catch, - => true, - - else => false, - }; -} - -fn macroBoolToInt(c: *Context, node: *ast.Node) !*ast.Node { +fn macroBoolToInt(c: *Context, node: Node) !Node { if (!isBoolRes(node)) { - if (!nodeIsInfixOp(node.tag)) return node; - - const group_node = try c.arena.create(ast.Node.GroupedExpression); - group_node.* = .{ - .lparen = try appendToken(c, .LParen, "("), - .expr = node, - .rparen = try appendToken(c, .RParen, ")"), - }; - return &group_node.base; + return node; } - const builtin_node = try c.createBuiltinCall("@boolToInt", 1); - builtin_node.params()[0] = node; - builtin_node.rparen_token = try appendToken(c, .RParen, ")"); - return &builtin_node.base; + return Node.bool_to_int.create(c.arena, node); } -fn macroIntToBool(c: *Context, node: *ast.Node) !*ast.Node { +fn macroIntToBool(c: *Context, node: Node) !Node { if (isBoolRes(node)) { - if (!nodeIsInfixOp(node.tag)) return node; - - const group_node = try c.arena.create(ast.Node.GroupedExpression); - group_node.* = .{ - .lparen = try appendToken(c, .LParen, "("), - .expr = node, - .rparen = try appendToken(c, .RParen, ")"), - }; - return &group_node.base; + return node; } - const op_token = try appendToken(c, .BangEqual, "!="); - const zero = try transCreateNodeInt(c, 0); - const res = try c.arena.create(ast.Node.SimpleInfixOp); - res.* = .{ - .base = .{ .tag = .BangEqual }, - .op_token = op_token, - .lhs = node, - .rhs = zero, - }; - const group_node = try c.arena.create(ast.Node.GroupedExpression); - group_node.* = .{ - .lparen = try appendToken(c, .LParen, "("), - .expr = &res.base, - .rparen = try appendToken(c, .RParen, ")"), - }; - return &group_node.base; + return Node.not_equal.create(c.arena, .{ .lhs = node, .rhs = Node.zero_literal.init() }); } -fn macroGroup(c: *Context, node: *ast.Node) !*ast.Node { - if (!nodeIsInfixOp(node.tag)) return node; - - const group_node = try c.arena.create(ast.Node.GroupedExpression); - group_node.* = .{ - .lparen = try appendToken(c, .LParen, "("), - .expr = node, - .rparen = try appendToken(c, .RParen, ")"), - }; - return &group_node.base; -} - -fn parseCCondExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node { +fn parseCCondExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { const node = try parseCOrExpr(c, m, scope); if (m.peek().? != .QuestionMark) { return node; } _ = m.next(); - // must come immediately after expr - _ = try appendToken(c, .RParen, ")"); - const if_node = try transCreateNodeIf(c); - if_node.condition = node; - if_node.body = try parseCOrExpr(c, m, scope); + const then_body = try parseCOrExpr(c, m, scope); if (m.next().? != .Colon) { try m.fail(c, "unable to translate C expr: expected ':'", .{}); return error.ParseError; } - if_node.@"else" = try transCreateNodeElse(c); - if_node.@"else".?.body = try parseCCondExpr(c, m, scope); - return &if_node.base; + const else_body = try parseCCondExpr(c, m, scope); + return Node.@"if".create(c.arena, .{ .cond = node, .then = then_body, .@"else" = else_body }); } -fn parseCOrExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node { +fn parseCOrExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { var node = try parseCAndExpr(c, m, scope); while (m.next().? == .PipePipe) { - const lhs_node = try macroIntToBool(c, node); - const op_token = try appendToken(c, .Keyword_or, "or"); - const rhs_node = try parseCAndExpr(c, m, scope); - const op_node = try c.arena.create(ast.Node.SimpleInfixOp); - op_node.* = .{ - .base = .{ .tag = .BoolOr }, - .op_token = op_token, - .lhs = lhs_node, - .rhs = try macroIntToBool(c, rhs_node), - }; - node = &op_node.base; + const lhs = try macroIntToBool(c, node); + const rhs = try macroIntToBool(c, try parseCAndExpr(c, m, scope)); + node = try Node.@"or".create(c.arena, .{ .lhs = lhs, .rhs = rhs }); } m.i -= 1; return node; } -fn parseCAndExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node { +fn parseCAndExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { var node = try parseCBitOrExpr(c, m, scope); while (m.next().? == .AmpersandAmpersand) { - const lhs_node = try macroIntToBool(c, node); - const op_token = try appendToken(c, .Keyword_and, "and"); - const rhs_node = try parseCBitOrExpr(c, m, scope); - const op_node = try c.arena.create(ast.Node.SimpleInfixOp); - op_node.* = .{ - .base = .{ .tag = .BoolAnd }, - .op_token = op_token, - .lhs = lhs_node, - .rhs = try macroIntToBool(c, rhs_node), - }; - node = &op_node.base; + const lhs = try macroIntToBool(c, node); + const rhs = try macroIntToBool(c, try parseCBitOrExpr(c, m, scope)); + node = try Node.@"and".create(c.arena, .{ .lhs = lhs, .rhs = rhs }); } m.i -= 1; return node; } -fn parseCBitOrExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node { +fn parseCBitOrExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { var node = try parseCBitXorExpr(c, m, scope); while (m.next().? == .Pipe) { - const lhs_node = try macroBoolToInt(c, node); - const op_token = try appendToken(c, .Pipe, "|"); - const rhs_node = try parseCBitXorExpr(c, m, scope); - const op_node = try c.arena.create(ast.Node.SimpleInfixOp); - op_node.* = .{ - .base = .{ .tag = .BitOr }, - .op_token = op_token, - .lhs = lhs_node, - .rhs = try macroBoolToInt(c, rhs_node), - }; - node = &op_node.base; + const lhs = try macroBoolToInt(c, node); + const rhs = try macroBoolToInt(c, try parseCBitXorExpr(c, m, scope)); + node = try Node.bit_or.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); } m.i -= 1; return node; } -fn parseCBitXorExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node { +fn parseCBitXorExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { var node = try parseCBitAndExpr(c, m, scope); while (m.next().? == .Caret) { - const lhs_node = try macroBoolToInt(c, node); - const op_token = try appendToken(c, .Caret, "^"); - const rhs_node = try parseCBitAndExpr(c, m, scope); - const op_node = try c.arena.create(ast.Node.SimpleInfixOp); - op_node.* = .{ - .base = .{ .tag = .BitXor }, - .op_token = op_token, - .lhs = lhs_node, - .rhs = try macroBoolToInt(c, rhs_node), - }; - node = &op_node.base; + const lhs = try macroBoolToInt(c, node); + const rhs = try macroBoolToInt(c, try parseCBitAndExpr(c, m, scope)); + node = try Node.bit_xor.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); } m.i -= 1; return node; } -fn parseCBitAndExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node { +fn parseCBitAndExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { var node = try parseCEqExpr(c, m, scope); while (m.next().? == .Ampersand) { - const lhs_node = try macroBoolToInt(c, node); - const op_token = try appendToken(c, .Ampersand, "&"); - const rhs_node = try parseCEqExpr(c, m, scope); - const op_node = try c.arena.create(ast.Node.SimpleInfixOp); - op_node.* = .{ - .base = .{ .tag = .BitAnd }, - .op_token = op_token, - .lhs = lhs_node, - .rhs = try macroBoolToInt(c, rhs_node), - }; - node = &op_node.base; + const lhs = try macroBoolToInt(c, node); + const rhs = try macroBoolToInt(c, try parseCEqExpr(c, m, scope)); + node = try Node.bit_and.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); } m.i -= 1; return node; } -fn parseCEqExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node { +fn parseCEqExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { var node = try parseCRelExpr(c, m, scope); while (true) { - var op_token: ast.TokenIndex = undefined; - var op_id: ast.Node.Tag = undefined; switch (m.peek().?) { .BangEqual => { - op_token = try appendToken(c, .BangEqual, "!="); - op_id = .BangEqual; + _ = m.next(); + const lhs = try macroBoolToInt(c, node); + const rhs = try macroBoolToInt(c, try parseCRelExpr(c, m, scope)); + node = try Node.not_equal.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, .EqualEqual => { - op_token = try appendToken(c, .EqualEqual, "=="); - op_id = .EqualEqual; + _ = m.next(); + const lhs = try macroBoolToInt(c, node); + const rhs = try macroBoolToInt(c, try parseCRelExpr(c, m, scope)); + node = try Node.equal.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, else => return node, } - _ = m.next(); - const lhs_node = try macroBoolToInt(c, node); - const rhs_node = try parseCRelExpr(c, m, scope); - const op_node = try c.arena.create(ast.Node.SimpleInfixOp); - op_node.* = .{ - .base = .{ .tag = op_id }, - .op_token = op_token, - .lhs = lhs_node, - .rhs = try macroBoolToInt(c, rhs_node), - }; - node = &op_node.base; } } -fn parseCRelExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node { +fn parseCRelExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { var node = try parseCShiftExpr(c, m, scope); while (true) { - var op_token: ast.TokenIndex = undefined; - var op_id: ast.Node.Tag = undefined; switch (m.peek().?) { .AngleBracketRight => { - op_token = try appendToken(c, .AngleBracketRight, ">"); - op_id = .GreaterThan; + _ = m.next(); + const lhs = try macroBoolToInt(c, node); + const rhs = try macroBoolToInt(c, try parseCShiftExpr(c, m, scope)); + node = try Node.greater_than.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, .AngleBracketRightEqual => { - op_token = try appendToken(c, .AngleBracketRightEqual, ">="); - op_id = .GreaterOrEqual; + _ = m.next(); + const lhs = try macroBoolToInt(c, node); + const rhs = try macroBoolToInt(c, try parseCShiftExpr(c, m, scope)); + node = try Node.greater_than_equal.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, .AngleBracketLeft => { - op_token = try appendToken(c, .AngleBracketLeft, "<"); - op_id = .LessThan; + _ = m.next(); + const lhs = try macroBoolToInt(c, node); + const rhs = try macroBoolToInt(c, try parseCShiftExpr(c, m, scope)); + node = try Node.less_than.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, .AngleBracketLeftEqual => { - op_token = try appendToken(c, .AngleBracketLeftEqual, "<="); - op_id = .LessOrEqual; + _ = m.next(); + const lhs = try macroBoolToInt(c, node); + const rhs = try macroBoolToInt(c, try parseCShiftExpr(c, m, scope)); + node = try Node.less_than_equal.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, else => return node, } - _ = m.next(); - const lhs_node = try macroBoolToInt(c, node); - const rhs_node = try parseCShiftExpr(c, m, scope); - const op_node = try c.arena.create(ast.Node.SimpleInfixOp); - op_node.* = .{ - .base = .{ .tag = op_id }, - .op_token = op_token, - .lhs = lhs_node, - .rhs = try macroBoolToInt(c, rhs_node), - }; - node = &op_node.base; } } -fn parseCShiftExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node { +fn parseCShiftExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { var node = try parseCAddSubExpr(c, m, scope); while (true) { - var op_token: ast.TokenIndex = undefined; - var op_id: ast.Node.Tag = undefined; switch (m.peek().?) { .AngleBracketAngleBracketLeft => { - op_token = try appendToken(c, .AngleBracketAngleBracketLeft, "<<"); - op_id = .BitShiftLeft; + _ = m.next(); + const lhs = try macroBoolToInt(c, node); + const rhs = try macroBoolToInt(c, try parseCAddSubExpr(c, m, scope)); + node = try Node.shl.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, .AngleBracketAngleBracketRight => { - op_token = try appendToken(c, .AngleBracketAngleBracketRight, ">>"); - op_id = .BitShiftRight; + _ = m.next(); + const lhs = try macroBoolToInt(c, node); + const rhs = try macroBoolToInt(c, try parseCAddSubExpr(c, m, scope)); + node = try Node.shr.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, else => return node, } - _ = m.next(); - const lhs_node = try macroBoolToInt(c, node); - const rhs_node = try parseCAddSubExpr(c, m, scope); - const op_node = try c.arena.create(ast.Node.SimpleInfixOp); - op_node.* = .{ - .base = .{ .tag = op_id }, - .op_token = op_token, - .lhs = lhs_node, - .rhs = try macroBoolToInt(c, rhs_node), - }; - node = &op_node.base; } } -fn parseCAddSubExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node { +fn parseCAddSubExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { var node = try parseCMulExpr(c, m, scope); while (true) { - var op_token: ast.TokenIndex = undefined; - var op_id: ast.Node.Tag = undefined; switch (m.peek().?) { .Plus => { - op_token = try appendToken(c, .Plus, "+"); - op_id = .Add; + _ = m.next(); + const lhs = try macroBoolToInt(c, node); + const rhs = try macroBoolToInt(c, try parseCMulExpr(c, m, scope)); + node = try Node.add.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, .Minus => { - op_token = try appendToken(c, .Minus, "-"); - op_id = .Sub; + _ = m.next(); + const lhs = try macroBoolToInt(c, node); + const rhs = try macroBoolToInt(c, try parseCMulExpr(c, m, scope)); + node = try Node.sub.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, else => return node, } - _ = m.next(); - const lhs_node = try macroBoolToInt(c, node); - const rhs_node = try parseCMulExpr(c, m, scope); - const op_node = try c.arena.create(ast.Node.SimpleInfixOp); - op_node.* = .{ - .base = .{ .tag = op_id }, - .op_token = op_token, - .lhs = lhs_node, - .rhs = try macroBoolToInt(c, rhs_node), - }; - node = &op_node.base; } } -fn parseCMulExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node { +fn parseCMulExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { var node = try parseCUnaryExpr(c, m, scope); while (true) { - var op_token: ast.TokenIndex = undefined; - var op_id: ast.Node.Tag = undefined; switch (m.next().?) { .Asterisk => { if (m.peek().? == .RParen) { // type *) - // hack to get zig fmt to render a comma in builtin calls - _ = try appendToken(c, .Comma, ","); - // last token of `node` const prev_id = m.list[m.i - 1].id; if (prev_id == .Keyword_void) { - const ptr = try transCreateNodePtrType(c, false, false, .Asterisk); - ptr.rhs = node; - const optional_node = try transCreateNodeSimplePrefixOp(c, .OptionalType, .QuestionMark, "?"); - optional_node.rhs = &ptr.base; - return &optional_node.base; + const ptr = try Node.single_pointer.create(c.arena, .{ + .is_const = false, + .is_volatile = false, + .elem_type = node, + }); + return Node.optional_type.create(c.arena, ptr); } else { - const ptr = try transCreateNodePtrType(c, false, false, Token.Id.Identifier); - ptr.rhs = node; - return &ptr.base; + return Node.c_pointer.create(c.arena, .{ + .is_const = false, + .is_volatile = false, + .elem_type = node, + }); } } else { // expr * expr - op_token = try appendToken(c, .Asterisk, "*"); - op_id = .BitShiftLeft; + const lhs = try macroBoolToInt(c, node); + const rhs = try macroBoolToInt(c, try parseCUnaryExpr(c, m, scope)); + node = try Node.mul.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); } }, .Slash => { - op_id = .Div; - op_token = try appendToken(c, .Slash, "/"); + const lhs = try macroBoolToInt(c, node); + const rhs = try macroBoolToInt(c, try parseCUnaryExpr(c, m, scope)); + node = try Node.div.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, .Percent => { - op_id = .Mod; - op_token = try appendToken(c, .Percent, "%"); + const lhs = try macroBoolToInt(c, node); + const rhs = try macroBoolToInt(c, try parseCUnaryExpr(c, m, scope)); + node = try Node.mod.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, else => { m.i -= 1; return node; }, } - const lhs_node = try macroBoolToInt(c, node); - const rhs_node = try parseCUnaryExpr(c, m, scope); - const op_node = try c.arena.create(ast.Node.SimpleInfixOp); - op_node.* = .{ - .base = .{ .tag = op_id }, - .op_token = op_token, - .lhs = lhs_node, - .rhs = try macroBoolToInt(c, rhs_node), - }; - node = &op_node.base; } } -fn parseCPostfixExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node { +fn parseCPostfixExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { var node = try parseCPrimaryExpr(c, m, scope); while (true) { switch (m.next().?) { @@ -5103,38 +4759,31 @@ fn parseCPostfixExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.N return error.ParseError; } - node = try transCreateNodeFieldAccess(c, node, m.slice()); - continue; + const ident = try Node.identifier.create(c.arena, m.slice()); + node = try Node.field_access.create(c.arena, .{ .lhs = node, .rhs = ident }); }, .Arrow => { if (m.next().? != .Identifier) { try m.fail(c, "unable to translate C expr: expected identifier", .{}); return error.ParseError; } - const deref = try transCreateNodePtrDeref(c, node); - node = try transCreateNodeFieldAccess(c, deref, m.slice()); - continue; + + const deref = try Node.deref.create(c.arena, node); + const ident = try Node.identifier.create(c.arena, m.slice()); + node = try Node.field_access.create(c.arena, .{ .lhs = deref, .rhs = ident }); }, .LBracket => { - const arr_node = try transCreateNodeArrayAccess(c, node); - arr_node.index_expr = try parseCExpr(c, m, scope); - arr_node.rtoken = try appendToken(c, .RBracket, "]"); - node = &arr_node.base; - if (m.next().? != .RBracket) { - try m.fail(c, "unable to translate C expr: expected ']'", .{}); - return error.ParseError; - } - continue; + const index = try macroBoolToInt(c, try parseCExpr(c, m, scope)); + node = try Node.array_access.create(c.arena, .{ .lhs = node, .rhs = index }); }, .LParen => { - _ = try appendToken(c, .LParen, "("); - var call_params = std.ArrayList(*ast.Node).init(c.gpa); + var call_params = std.ArrayList(Node).init(c.gpa); defer call_params.deinit(); while (true) { const arg = try parseCCondExpr(c, m, scope); try call_params.append(arg); switch (m.next().?) { - .Comma => _ = try appendToken(c, .Comma, ","), + .Comma => {}, .RParen => break, else => { try m.fail(c, "unable to translate C expr: expected ',' or ')'", .{}); @@ -5142,32 +4791,17 @@ fn parseCPostfixExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.N }, } } - const call_node = try ast.Node.Call.alloc(c.arena, call_params.items.len); - call_node.* = .{ - .lhs = node, - .params_len = call_params.items.len, - .async_token = null, - .rtoken = try appendToken(c, .RParen, ")"), - }; - mem.copy(*ast.Node, call_node.params(), call_params.items); - node = &call_node.base; - continue; + node = try Node.call.create(c.arena, .{ .lhs = node, .rhs = try c.arena.dupe(Node, call_params.items) }); }, .LBrace => { - // must come immediately after `node` - _ = try appendToken(c, .Comma, ","); - - const dot = try appendToken(c, .Period, "."); - _ = try appendToken(c, .LBrace, "{"); - - var init_vals = std.ArrayList(*ast.Node).init(c.gpa); + var init_vals = std.ArrayList(Node).init(c.gpa); defer init_vals.deinit(); while (true) { const val = try parseCCondExpr(c, m, scope); try init_vals.append(val); switch (m.next().?) { - .Comma => _ = try appendToken(c, .Comma, ","), + .Comma => {}, .RBrace => break, else => { try m.fail(c, "unable to translate C expr: expected ',' or '}}'", .{}); @@ -5175,29 +4809,8 @@ fn parseCPostfixExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.N }, } } - const tuple_node = try ast.Node.StructInitializerDot.alloc(c.arena, init_vals.items.len); - tuple_node.* = .{ - .dot = dot, - .list_len = init_vals.items.len, - .rtoken = try appendToken(c, .RBrace, "}"), - }; - mem.copy(*ast.Node, tuple_node.list(), init_vals.items); - - //(@import("std").mem.zeroInit(T, .{x})) - const import_fn_call = try c.createBuiltinCall("@import", 1); - const std_node = try transCreateNodeStringLiteral(c, "\"std\""); - import_fn_call.params()[0] = std_node; - import_fn_call.rparen_token = try appendToken(c, .RParen, ")"); - const inner_field_access = try transCreateNodeFieldAccess(c, &import_fn_call.base, "mem"); - const outer_field_access = try transCreateNodeFieldAccess(c, inner_field_access, "zeroInit"); - - const zero_init_call = try c.createCall(outer_field_access, 2); - zero_init_call.params()[0] = node; - zero_init_call.params()[1] = &tuple_node.base; - zero_init_call.rtoken = try appendToken(c, .RParen, ")"); - - node = &zero_init_call.base; - continue; + const tuple_node = try Node.tuple.create(c.arena, try c.arena.dupe(Node, init_vals.items)); + node = try Node.std_mem_zeroinit.create(c.arena, .{ .lhs = node, .rhs = tuple_node }); }, .PlusPlus, .MinusMinus => { try m.fail(c, "TODO postfix inc/dec expr", .{}); @@ -5211,35 +4824,31 @@ fn parseCPostfixExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.N } } -fn parseCUnaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node { +fn parseCUnaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { switch (m.next().?) { .Bang => { - const node = try transCreateNodeSimplePrefixOp(c, .BoolNot, .Bang, "!"); - node.rhs = try macroIntToBool(c, try parseCUnaryExpr(c, m, scope)); - return &node.base; + const operand = try macroIntToBool(c, try parseCUnaryExpr(c, m, scope)); + return Node.not.create(c.arena, operand); }, .Minus => { - const node = try transCreateNodeSimplePrefixOp(c, .Negation, .Minus, "-"); - node.rhs = try macroBoolToInt(c, try parseCUnaryExpr(c, m, scope)); - return &node.base; + const operand = try macroBoolToInt(c, try parseCUnaryExpr(c, m, scope)); + return Node.negate.create(c.arena, operand); }, .Plus => return try parseCUnaryExpr(c, m, scope), .Tilde => { - const node = try transCreateNodeSimplePrefixOp(c, .BitNot, .Tilde, "~"); - node.rhs = try macroBoolToInt(c, try parseCUnaryExpr(c, m, scope)); - return &node.base; + const operand = try macroBoolToInt(c, try parseCUnaryExpr(c, m, scope)); + return Node.bit_not.create(c.arena, operand); }, .Asterisk => { - const node = try macroGroup(c, try parseCUnaryExpr(c, m, scope)); - return try transCreateNodePtrDeref(c, node); + const operand = try parseCUnaryExpr(c, m, scope); + return Node.deref.create(c.arena, operand); }, .Ampersand => { - const node = try transCreateNodeSimplePrefixOp(c, .AddressOf, .Ampersand, "&"); - node.rhs = try macroGroup(c, try parseCUnaryExpr(c, m, scope)); - return &node.base; + const operand = try parseCUnaryExpr(c, m, scope); + return Node.address_of.create(c.arena, operand); }, .Keyword_sizeof => { - const inner = if (m.peek().? == .LParen) blk: { + const operand = if (m.peek().? == .LParen) blk: { _ = m.next(); // C grammar says this should be 'type-name' but we have to // use parseCMulExpr to correctly handle pointer types. @@ -5251,18 +4860,7 @@ fn parseCUnaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Nod break :blk inner; } else try parseCUnaryExpr(c, m, scope); - //(@import("std").meta.sizeof(dest, x)) - const import_fn_call = try c.createBuiltinCall("@import", 1); - const std_node = try transCreateNodeStringLiteral(c, "\"std\""); - import_fn_call.params()[0] = std_node; - import_fn_call.rparen_token = try appendToken(c, .RParen, ")"); - const inner_field_access = try transCreateNodeFieldAccess(c, &import_fn_call.base, "meta"); - const outer_field_access = try transCreateNodeFieldAccess(c, inner_field_access, "sizeof"); - - const sizeof_call = try c.createCall(outer_field_access, 1); - sizeof_call.params()[0] = inner; - sizeof_call.rtoken = try appendToken(c, .RParen, ")"); - return &sizeof_call.base; + return Node.std_meta_sizeof.create(c.arena, operand); }, .Keyword_alignof => { // TODO this won't work if using 's @@ -5273,16 +4871,13 @@ fn parseCUnaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Nod } // C grammar says this should be 'type-name' but we have to // use parseCMulExpr to correctly handle pointer types. - const inner = try parseCMulExpr(c, m, scope); + const operand = try parseCMulExpr(c, m, scope); if (m.next().? != .RParen) { try m.fail(c, "unable to translate C expr: expected ')'", .{}); return error.ParseError; } - const builtin_call = try c.createBuiltinCall("@alignOf", 1); - builtin_call.params()[0] = inner; - builtin_call.rparen_token = try appendToken(c, .RParen, ")"); - return &builtin_call.base; + return Node.alignof.create(c.arena, operand); }, .PlusPlus, .MinusMinus => { try m.fail(c, "TODO unary inc/dec expr", .{}); @@ -5295,50 +4890,40 @@ fn parseCUnaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Nod } } -fn tokenSlice(c: *Context, token: ast.TokenIndex) []u8 { - const tok = c.token_locs.items[token]; - const slice = c.source_buffer.items[tok.start..tok.end]; - return if (mem.startsWith(u8, slice, "@\"")) - slice[2 .. slice.len - 1] - else - slice; -} - -fn getContainer(c: *Context, node: *ast.Node) ?*ast.Node { - switch (node.tag) { - .ContainerDecl, - .AddressOf, - .Await, - .BitNot, - .BoolNot, - .OptionalType, - .Negation, - .NegationWrap, - .Resume, - .Try, - .ArrayType, - .ArrayTypeSentinel, - .PtrType, - .SliceType, +fn getContainer(c: *Context, node: Node) ?Node { + switch (node.tag()) { + .@"union", + .@"struct", + .@"enum", + .address_of, + .bit_not, + .not, + .optional_type, + .negate, + .negate_wrap, + .array_type, + .c_pointer, + .single_pointer, => return node, - .Identifier => { - const ident = node.castTag(.Identifier).?; - if (c.global_scope.sym_table.get(tokenSlice(c, ident.token))) |value| { - if (value.cast(ast.Node.VarDecl)) |var_decl| - return getContainer(c, var_decl.getInitNode().?); + .identifier => { + const ident = node.castTag(.identifier).?; + if (c.global_scope.sym_table.get(ident.data)) |value| { + if (value.castTag(.var_decl)) |var_decl| + return getContainer(c, var_decl.data.init); + if (value.castTag(.var_simple) orelse value.castTag(.pub_var_simple)) |var_decl| + return getContainer(c, var_decl.data.init); } }, - .Period => { - const infix = node.castTag(.Period).?; + .field_access => { + const infix = node.castTag(.field_access).?; - if (getContainerTypeOf(c, infix.lhs)) |ty_node| { - if (ty_node.cast(ast.Node.ContainerDecl)) |container| { - for (container.fieldsAndDecls()) |field_ref| { - const field = field_ref.cast(ast.Node.ContainerField).?; - const ident = infix.rhs.castTag(.Identifier).?; - if (mem.eql(u8, tokenSlice(c, field.name_token), tokenSlice(c, ident.token))) { + if (getContainerTypeOf(c, infix.data.lhs)) |ty_node| { + if (ty_node.castTag(.@"struct") orelse ty_node.castTag(.@"union")) |container| { + for (container.data.fields) |field| { + const ident = infix.data.rhs.castTag(.identifier).?; + if (mem.eql(u8, field.data.name, field.data)) { return getContainer(c, field.type_expr.?); } } @@ -5351,22 +4936,20 @@ fn getContainer(c: *Context, node: *ast.Node) ?*ast.Node { return null; } -fn getContainerTypeOf(c: *Context, ref: *ast.Node) ?*ast.Node { - if (ref.castTag(.Identifier)) |ident| { - if (c.global_scope.sym_table.get(tokenSlice(c, ident.token))) |value| { - if (value.cast(ast.Node.VarDecl)) |var_decl| { - if (var_decl.getTypeNode()) |ty| - return getContainer(c, ty); +fn getContainerTypeOf(c: *Context, ref: Node) ?Node { + if (ref.castTag(.identifier)) |ident| { + if (c.global_scope.sym_table.get(ident.data)) |value| { + if (value.castTag(.var_decl)) |var_decl| { + return getContainer(c, var_decl.data.type); } } - } else if (ref.castTag(.Period)) |infix| { - if (getContainerTypeOf(c, infix.lhs)) |ty_node| { - if (ty_node.cast(ast.Node.ContainerDecl)) |container| { - for (container.fieldsAndDecls()) |field_ref| { - const field = field_ref.cast(ast.Node.ContainerField).?; - const ident = infix.rhs.castTag(.Identifier).?; - if (mem.eql(u8, tokenSlice(c, field.name_token), tokenSlice(c, ident.token))) { - return getContainer(c, field.type_expr.?); + } else if (ref.castTag(.field_access)) |infix| { + if (getContainerTypeOf(c, infix.data.lhs)) |ty_node| { + if (ty_node.castTag(.@"struct") orelse ty_node.castTag(.@"union")) |container| { + for (container.data.fields) |field| { + const ident = infix.data.rhs.castTag(.identifier).?; + if (mem.eql(u8, field.name, ident.data)) { + return getContainer(c, field.type); } } } else @@ -5376,11 +4959,16 @@ fn getContainerTypeOf(c: *Context, ref: *ast.Node) ?*ast.Node { return null; } -fn getFnProto(c: *Context, ref: *ast.Node) ?*ast.Node.FnProto { - const init = if (ref.cast(ast.Node.VarDecl)) |v| v.getInitNode().? else return null; +fn getFnProto(c: *Context, ref: Node) ?*ast.Payload.Func { + const init = if (value.castTag(.var_decl)) |v| + v.data.init + else if (value.castTag(.var_simple) orelse value.castTag(.pub_var_simple)) |v| + v.data.init + else + return null; if (getContainerTypeOf(c, init)) |ty_node| { - if (ty_node.castTag(.OptionalType)) |prefix| { - if (prefix.rhs.cast(ast.Node.FnProto)) |fn_proto| { + if (ty_node.castTag(.optional_type)) |prefix| { + if (prefix.data.castTag(.func)) |fn_proto| { return fn_proto; } } diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index d62e83ea6a..1cf014e5dc 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -47,7 +47,6 @@ pub const Node = extern union { field_access, array_access, call, - std_mem_zeroes, var_decl, func, warning, @@ -57,6 +56,7 @@ pub const Node = extern union { @"struct", @"union", array_init, + tuple, container_init, std_meta_cast, discard, @@ -162,6 +162,8 @@ pub const Node = extern union { deref, block, + /// { operand } + block_single, @"break", sizeof, @@ -173,8 +175,12 @@ pub const Node = extern union { single_pointer, array_type, - /// @import("std").mem.zeroes(T) + /// @import("std").meta.sizeof(operand) + std_meta_sizeof, + /// @import("std").mem.zeroes(operand) std_mem_zeroes, + /// @import("std").mem.zeroInit(lhs, rhs) + std_mem_zeroinit, // pub const name = @compileError(msg); fail_decl, // var actual = mangled; @@ -188,6 +194,9 @@ pub const Node = extern union { /// pub const enum_field_name = @enumToInt(enum_name.field_name); enum_redecl, + /// pub inline fn name(params) return_type body + pub_inline_fn, + /// [0]type{} empty_array, /// [1]type{val} ** count @@ -195,6 +204,7 @@ pub const Node = extern union { /// _ = operand; ignore, + @"anytype", pub const last_no_payload_tag = Tag.usingnamespace_builtins; pub const no_payload_count = @enumToInt(last_no_payload_tag) + 1; @@ -213,6 +223,7 @@ pub const Node = extern union { .one_literal, .void_type, .noreturn_type, + .@"anytype", => @compileError("Type Tag " ++ @tagName(t) ++ " has no payload"), .std_mem_zeroes, @@ -234,6 +245,7 @@ pub const Node = extern union { .if_not_break, .switch_else, .ignore, + .block_single, => Payload.UnOp, .add, @@ -302,6 +314,8 @@ pub const Node = extern union { .field_access, .assign, .align_cast, + .array_access, + .std_mem_zeroinit, => Payload.BinOp, .number_literal, @@ -325,7 +339,7 @@ pub const Node = extern union { .func => Payload.Func, .@"enum" => Payload.Enum, .@"struct", .@"union" => Payload.Record, - .array_init => Payload.ArrayInit, + .array_init, .tuple => Payload.ArrayInit, .container_init => Payload.ContainerInit, .std_meta_cast => Payload.Infix, .block => Payload.Block, @@ -336,6 +350,7 @@ pub const Node = extern union { .typedef, .pub_typedef, .var_simple, .pub_var_simple => Payload.SimpleVarDecl, .enum_redecl => Payload.EnumRedecl, .array_filler => Payload.ArrayFiller, + .pub_inline_fn => Payload.PubInlineFn, }; } @@ -488,12 +503,12 @@ pub const Payload = struct { body: ?Node, alignment: ?c_uint, }, + }; - pub const Param = struct { - is_noalias: bool, - name: ?[]const u8, - type: Node, - }; + pub const Param = struct { + is_noalias: bool, + name: ?[]const u8, + type: Node, }; pub const Enum = struct { @@ -598,6 +613,16 @@ pub const Payload = struct { count: usize, }, }; + + pub const PubInlineFn = struct { + base: Node, + data: struct { + name: []const u8, + params: []Param, + return_type: Node, + body: Node, + }, + }; }; /// Converts the nodes into a Zig ast. From 2a74a1ebaace8b5de1796b1756f65e421eb479a4 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Fri, 12 Feb 2021 11:23:15 +0200 Subject: [PATCH 078/173] translate-c: bunch of small fixes to get it compiling --- src/translate_c.zig | 1190 +++++++++++++++++++-------------------- src/translate_c/ast.zig | 108 ++-- src/type.zig | 4 + 3 files changed, 647 insertions(+), 655 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index 9ef4788ba7..c7a30ff919 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -10,12 +10,13 @@ const mem = std.mem; const math = std.math; const ast = @import("translate_c/ast.zig"); const Node = ast.Node; +const Tag = Node.Tag; const CallingConvention = std.builtin.CallingConvention; pub const ClangErrMsg = clang.Stage2ErrorMsg; -pub const Error = error{OutOfMemory}; +pub const Error = std.mem.Allocator.Error; const TypeError = Error || error{UnsupportedType}; const TransError = TypeError || error{UnsupportedTranslation}; @@ -30,11 +31,11 @@ const Scope = struct { parent: ?*Scope, const Id = enum { - Switch, - Block, - Root, - Condition, - Loop, + @"switch", + block, + root, + condition, + loop, }; /// Represents an in-progress Node.Switch. This struct is stack-allocated. @@ -44,7 +45,6 @@ const Scope = struct { base: Scope, pending_block: Block, cases: std.ArrayList(Node), - case_index: usize, switch_label: ?[]const u8, default_label: ?[]const u8, }; @@ -84,7 +84,7 @@ const Scope = struct { fn init(c: *Context, parent: *Scope, labeled: bool) !Block { var blk = Block{ .base = .{ - .id = .Block, + .id = .block, .parent = parent, }, .statements = std.ArrayList(Node).init(c.gpa), @@ -105,12 +105,12 @@ const Scope = struct { fn complete(self: *Block, c: *Context) !Node { // We reserve 1 extra statement if the parent is a Loop. This is in case of // do while, we want to put `if (cond) break;` at the end. - const alloc_len = self.statements.items.len + @boolToInt(self.base.parent.?.id == .Loop); - const stmts = try c.arena.alloc(Node, alloc_len); + const alloc_len = self.statements.items.len + @boolToInt(self.base.parent.?.id == .loop); + var stmts = try c.arena.alloc(Node, alloc_len); stmts.len -= 1; mem.copy(Node, stmts, self.statements.items); - return Node.block.create(c.arena, .{ - .lable = self.label, + return Tag.block.create(c.arena, .{ + .label = self.label, .stmts = stmts, }); } @@ -161,7 +161,7 @@ const Scope = struct { fn init(c: *Context) Root { return .{ .base = .{ - .id = .Root, + .id = .root, .parent = null, }, .sym_table = SymbolTable.init(c.gpa), @@ -195,9 +195,9 @@ const Scope = struct { var scope = inner; while (true) { switch (scope.id) { - .Root => unreachable, - .Block => return @fieldParentPtr(Block, "base", scope), - .Condition => return @fieldParentPtr(Condition, "base", scope).getBlockScope(c), + .root => unreachable, + .block => return @fieldParentPtr(Block, "base", scope), + .condition => return @fieldParentPtr(Condition, "base", scope).getBlockScope(c), else => scope = scope.parent.?, } } @@ -207,8 +207,8 @@ const Scope = struct { var scope = inner; while (true) { switch (scope.id) { - .Root => unreachable, - .Block => { + .root => unreachable, + .block => { const block = @fieldParentPtr(Block, "base", scope); if (block.return_type) |qt| return qt; scope = scope.parent.?; @@ -220,17 +220,17 @@ const Scope = struct { fn getAlias(scope: *Scope, name: []const u8) []const u8 { return switch (scope.id) { - .Root => return name, - .Block => @fieldParentPtr(Block, "base", scope).getAlias(name), - .Switch, .Loop, .Condition => scope.parent.?.getAlias(name), + .root => return name, + .block => @fieldParentPtr(Block, "base", scope).getAlias(name), + .@"switch", .loop, .condition => scope.parent.?.getAlias(name), }; } fn contains(scope: *Scope, name: []const u8) bool { return switch (scope.id) { - .Root => @fieldParentPtr(Root, "base", scope).contains(name), - .Block => @fieldParentPtr(Block, "base", scope).contains(name), - .Switch, .Loop, .Condition => scope.parent.?.contains(name), + .root => @fieldParentPtr(Root, "base", scope).contains(name), + .block => @fieldParentPtr(Block, "base", scope).contains(name), + .@"switch", .loop, .condition => scope.parent.?.contains(name), }; } @@ -238,9 +238,9 @@ const Scope = struct { var scope = inner; while (true) { switch (scope.id) { - .Root => unreachable, - .Switch => return scope, - .Loop => return scope, + .root => unreachable, + .@"switch" => return scope, + .loop => return scope, else => scope = scope.parent.?, } } @@ -250,24 +250,24 @@ const Scope = struct { var scope = inner; while (true) { switch (scope.id) { - .Root => unreachable, - .Switch => return @fieldParentPtr(Switch, "base", scope), + .root => unreachable, + .@"switch" => return @fieldParentPtr(Switch, "base", scope), else => scope = scope.parent.?, } } } /// Appends a node to the first block scope if inside a function, or to the root tree if not. - fn appendNode(scope: *Scope, node: Node) !void { + fn appendNode(inner: *Scope, node: Node) !void { var scope = inner; while (true) { switch (scope.id) { - .Root => { - const root = @fieldParentPtr(Root, "base", scope).contains(name); + .root => { + const root = @fieldParentPtr(Root, "base", scope); return root.nodes.append(node); }, - .Block => { - const block = @fieldParentPtr(Block, "base", scope).contains(name); + .block => { + const block = @fieldParentPtr(Block, "base", scope); return block.statements.append(node); }, else => scope = scope.parent.?, @@ -321,7 +321,7 @@ pub fn translate( args_end: [*]?[*]const u8, errors: *[]ClangErrMsg, resources_path: [*:0]const u8, -) !ast.Tree { +) !std.zig.ast.Tree { const ast_unit = clang.LoadFromCommandLine( args_begin, args_end, @@ -339,14 +339,6 @@ pub fn translate( var arena = std.heap.ArenaAllocator.init(gpa); errdefer arena.deinit(); - if (true) { - var x = false; - if (x) { - return error.OutOfMemory; - } - @panic("TODO update translate-c"); - } - var context = Context{ .gpa = gpa, .arena = &arena.allocator, @@ -361,15 +353,15 @@ pub fn translate( context.alias_list.deinit(); context.global_names.deinit(gpa); context.opaque_demotes.deinit(gpa); - context.global_scope.deini(); + context.global_scope.deinit(); } - try context.global_scope.nodes.append(try Node.usingnamespace_builtins.init()); + try context.global_scope.nodes.append(Tag.usingnamespace_builtins.init()); try prepopulateGlobalNameTable(ast_unit, &context); if (!ast_unit.visitLocalTopLevelDecls(&context, declVisitorC)) { - return context.err; + return error.OutOfMemory; } try transPreprocessorEntities(&context, ast_unit); @@ -377,16 +369,17 @@ pub fn translate( try addMacros(&context); for (context.alias_list.items) |alias| { if (!context.global_scope.sym_table.contains(alias.alias)) { - try createAlias(&context, alias); + const node = try Tag.alias.create(context.arena, .{ .actual = alias.alias, .mangled = alias.name }); + try addTopLevelDecl(&context, alias.alias, node); } } - return ast.render(context.global_scope.nodes.items); + return ast.render(gpa, context.global_scope.nodes.items); } fn prepopulateGlobalNameTable(ast_unit: *clang.ASTUnit, c: *Context) !void { if (!ast_unit.visitLocalTopLevelDecls(c, declVisitorNamesOnlyC)) { - return c.err; + return error.OutOfMemory; } // TODO if we see #undef, delete it from the table @@ -409,19 +402,13 @@ fn prepopulateGlobalNameTable(ast_unit: *clang.ASTUnit, c: *Context) !void { fn declVisitorNamesOnlyC(context: ?*c_void, decl: *const clang.Decl) callconv(.C) bool { const c = @ptrCast(*Context, @alignCast(@alignOf(Context), context)); - declVisitorNamesOnly(c, decl) catch |err| { - c.err = err; - return false; - }; + declVisitorNamesOnly(c, decl) catch return false; return true; } fn declVisitorC(context: ?*c_void, decl: *const clang.Decl) callconv(.C) bool { const c = @ptrCast(*Context, @alignCast(@alignOf(Context), context)); - declVisitor(c, decl) catch |err| { - c.err = err; - return false; - }; + declVisitor(c, decl) catch return false; return true; } @@ -454,7 +441,7 @@ fn declVisitor(c: *Context, decl: *const clang.Decl) Error!void { }, else => { const decl_name = try c.str(decl.getDeclKindName()); - try warn(c, decl.getLocation(), "ignoring {s} declaration", .{decl_name}); + try warn(c, &c.global_scope.base, decl.getLocation(), "ignoring {s} declaration", .{decl_name}); }, } } @@ -513,7 +500,7 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void { decl_ctx.has_body = false; decl_ctx.storage_class = .Extern; decl_ctx.is_export = false; - try warn(c, fn_decl_loc, "TODO unable to translate variadic function, demoted to declaration", .{}); + try warn(c, &c.global_scope.base, fn_decl_loc, "TODO unable to translate variadic function, demoted to declaration", .{}); } break :blk transFnProto(c, fn_decl, fn_proto_type, fn_decl_loc, decl_ctx, true) catch |err| switch (err) { error.UnsupportedType => { @@ -535,7 +522,7 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void { }; if (!decl_ctx.has_body) { - return addTopLevelDecl(c, fn_name, &proto_node.base); + return addTopLevelDecl(c, fn_name, Node.initPayload(&proto_node.base)); } // actual function definition with body @@ -547,10 +534,8 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void { var scope = &block_scope.base; var param_id: c_uint = 0; - for (proto_node.params()) |*param, i| { - const param_name = if (param.name_token) |name_tok| - tokenSlice(c, name_tok) - else + for (proto_node.data.params) |*param, i| { + const param_name = param.name orelse return failDecl(c, fn_decl_loc, fn_name, "function {s} parameter has no name", .{fn_name}); const c_param = fn_decl.getParamDecl(param_id); @@ -565,7 +550,7 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void { const arg_name = try block_scope.makeMangledName(c, bare_arg_name); param.name = arg_name; - const redecl_node = try Node.arg_redecl.create(c.arena, .{ .actual = mangled_param_name, .mangled = arg_name }); + const redecl_node = try Tag.arg_redecl.create(c.arena, .{ .actual = mangled_param_name, .mangled = arg_name }); try block_scope.statements.append(redecl_node); } @@ -607,12 +592,12 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void { error.UnsupportedType, => return failDecl(c, fn_decl_loc, fn_name, "unable to create a return value for function", .{}), }; - const ret = try Node.@"return".create(c.arena, rhs); + const ret = try Tag.@"return".create(c.arena, rhs); try block_scope.statements.append(ret); } - proto_node.body = try block_scope.complete(c); - return addTopLevelDecl(c, fn_name, &proto_node.base); + proto_node.data.body = try block_scope.complete(c); + return addTopLevelDecl(c, fn_name, Node.initPayload(&proto_node.base)); } fn transQualTypeMaybeInitialized(c: *Context, qt: clang.QualType, decl_init: ?*const clang.Expr, loc: clang.SourceLocation) TransError!Node { @@ -668,7 +653,7 @@ fn visitVarDecl(c: *Context, var_decl: *const clang.VarDecl, mangled_name: ?[]co const node_or_error = if (expr.getStmtClass() == .StringLiteralClass) transStringLiteralAsArray(c, scope, @ptrCast(*const clang.StringLiteral, expr), zigArraySize(c, type_node) catch 0) else - transExprCoercing(c, scope, expr, .used, .r_value); + transExprCoercing(c, scope, expr, .used); init_node = node_or_error catch |err| switch (err) { error.UnsupportedTranslation, error.UnsupportedType, @@ -677,18 +662,18 @@ fn visitVarDecl(c: *Context, var_decl: *const clang.VarDecl, mangled_name: ?[]co }, error.OutOfMemory => |e| return e, }; - if (!qualTypeIsBoolean(qual_type) and isBoolRes(init_node)) { - init_node = try Node.bool_to_int.create(c.arena, init_node); + if (!qualTypeIsBoolean(qual_type) and isBoolRes(init_node.?)) { + init_node = try Tag.bool_to_int.create(c.arena, init_node.?); } } else { - init_node = Node.undefined_literal.init(); + init_node = Tag.undefined_literal.init(); } } else if (storage_class != .Extern) { // The C language specification states that variables with static or threadlocal // storage without an initializer are initialized to a zero value. // @import("std").mem.zeroes(T) - init_node = try Node.std_mem_zeroes.create(c.arena, type_node); + init_node = try Tag.std_mem_zeroes.create(c.arena, type_node); } const linksection_string = blk: { @@ -708,7 +693,7 @@ fn visitVarDecl(c: *Context, var_decl: *const clang.VarDecl, mangled_name: ?[]co break :blk null; }; - const node = try Node.var_decl.create(c.arena, .{ + const node = try Tag.var_decl.create(c.arena, .{ .is_pub = is_pub, .is_const = is_const, .is_extern = is_extern, @@ -719,12 +704,12 @@ fn visitVarDecl(c: *Context, var_decl: *const clang.VarDecl, mangled_name: ?[]co .type = type_node, .init = init_node, }); - return addTopLevelDecl(c, checked_name, &node.base); + return addTopLevelDecl(c, checked_name, node); } fn transTypeDefAsBuiltin(c: *Context, typedef_decl: *const clang.TypedefNameDecl, builtin_name: []const u8) !Node { _ = try c.decl_table.put(c.gpa, @ptrToInt(typedef_decl.getCanonicalDecl()), builtin_name); - return Node.identifier.create(c.arena, builtin_name); + return Tag.identifier.create(c.arena, builtin_name); } const builtin_typedef_map = std.ComptimeStringMap([]const u8, .{ @@ -744,7 +729,7 @@ const builtin_typedef_map = std.ComptimeStringMap([]const u8, .{ fn transTypeDef(c: *Context, typedef_decl: *const clang.TypedefNameDecl, top_level_visit: bool) Error!?Node { if (c.decl_table.get(@ptrToInt(typedef_decl.getCanonicalDecl()))) |name| - return transCreateNodeIdentifier(c, name); // Avoid processing this decl twice + return try Tag.identifier.create(c.arena, name); // Avoid processing this decl twice const typedef_name = try c.str(@ptrCast(*const clang.NamedDecl, typedef_decl).getName_bytes_begin()); @@ -753,17 +738,17 @@ fn transTypeDef(c: *Context, typedef_decl: *const clang.TypedefNameDecl, top_lev const checked_name = if (isZigPrimitiveType(typedef_name)) try std.fmt.allocPrint(c.arena, "{s}_{d}", .{ typedef_name, c.getMangle() }) else typedef_name; if (builtin_typedef_map.get(checked_name)) |builtin| { _ = try c.decl_table.put(c.gpa, @ptrToInt(typedef_decl.getCanonicalDecl()), builtin); - return Node.identifier.create(c.arena, builtin); + return try Tag.identifier.create(c.arena, builtin); } if (!top_level_visit) { - return transCreateNodeIdentifier(c, checked_name); + return try Tag.identifier.create(c.arena, checked_name); } _ = try c.decl_table.put(c.gpa, @ptrToInt(typedef_decl.getCanonicalDecl()), checked_name); const node = (try transCreateNodeTypedef(c, typedef_decl, true, checked_name)) orelse return null; try addTopLevelDecl(c, checked_name, node); - return transCreateNodeIdentifier(c, checked_name); + return try Tag.identifier.create(c.arena, checked_name); } fn transCreateNodeTypedef( @@ -782,9 +767,9 @@ fn transCreateNodeTypedef( error.OutOfMemory => |e| return e, }; - const payload = try c.arena.create(ast.Payload.Typedef); + const payload = try c.arena.create(ast.Payload.SimpleVarDecl); payload.* = .{ - .base = .{ .tag = ([2]ast.Node.Tag{ .typedef, .pub_typedef })[@boolToInt(toplevel)] }, + .base = .{ .tag = ([2]Tag{ .typedef, .pub_typedef })[@boolToInt(toplevel)] }, .data = .{ .name = checked_name, .init = init_node, @@ -795,7 +780,7 @@ fn transCreateNodeTypedef( fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?Node { if (c.decl_table.get(@ptrToInt(record_decl.getCanonicalDecl()))) |name| - return try transCreateNodeIdentifier(c, name); // Avoid processing this decl twice + return try Tag.identifier.create(c.arena, name); // Avoid processing this decl twice const record_loc = record_decl.getLocation(); var bare_name = try c.str(@ptrCast(*const clang.NamedDecl, record_decl).getName_bytes_begin()); @@ -815,7 +800,7 @@ fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?Nod } else if (record_decl.isStruct()) { container_kind_name = "struct"; } else { - try warn(c, record_loc, "record {s} is not a struct or union", .{bare_name}); + try warn(c, &c.global_scope.base, record_loc, "record {s} is not a struct or union", .{bare_name}); return null; } @@ -826,7 +811,7 @@ fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?Nod const init_node = blk: { const record_def = record_decl.getDefinition() orelse { _ = try c.opaque_demotes.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), {}); - break :blk Node.opaque_literal.init(); + break :blk Tag.opaque_literal.init(); }; const is_packed = record_decl.getPackedAttribute(); @@ -843,14 +828,14 @@ fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?Nod if (field_decl.isBitField()) { _ = try c.opaque_demotes.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), {}); - try warn(c, field_loc, "{s} demoted to opaque type - has bitfield", .{container_kind_name}); - break :blk Node.opaque_literal.init(); + try warn(c, &c.global_scope.base, field_loc, "{s} demoted to opaque type - has bitfield", .{container_kind_name}); + break :blk Tag.opaque_literal.init(); } if (qualTypeCanon(field_qt).isIncompleteOrZeroLengthArrayType(c.clang_context)) { _ = try c.opaque_demotes.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), {}); - try warn(c, field_loc, "{s} demoted to opaque type - has variable length array", .{container_kind_name}); - break :blk Node.opaque_literal.init(); + try warn(c, &c.global_scope.base, field_loc, "{s} demoted to opaque type - has variable length array", .{container_kind_name}); + break :blk Tag.opaque_literal.init(); } var is_anon = false; @@ -864,8 +849,8 @@ fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?Nod const field_type = transQualType(c, field_qt, field_loc) catch |err| switch (err) { error.UnsupportedType => { _ = try c.opaque_demotes.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), {}); - try warn(c, record_loc, "{s} demoted to opaque type - unable to translate type of field {s}", .{ container_kind_name, raw_name }); - break :blk Node.opaque_literal.init(); + try warn(c, &c.global_scope.base, record_loc, "{s} demoted to opaque type - unable to translate type of field {s}", .{ container_kind_name, field_name }); + break :blk Tag.opaque_literal.init(); }, else => |e| return e, }; @@ -890,20 +875,20 @@ fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?Nod }); } - const payload = try c.arena.create(ast.Payload.Record); - container_node.* = .{ - .base = .{ .tag = ([2]ast.Node.Tag{ .@"struct", .@"union" })[@boolToInt(is_union)] }, + const record_payload = try c.arena.create(ast.Payload.Record); + record_payload.* = .{ + .base = .{ .tag = ([2]Tag{ .@"struct", .@"union" })[@boolToInt(is_union)] }, .data = .{ .is_packed = is_packed, .fields = try c.arena.dupe(ast.Payload.Record.Field, fields.items), }, }; - break :blk Node.initPayload(&container_node.base); + break :blk Node.initPayload(&record_payload.base); }; const payload = try c.arena.create(ast.Payload.SimpleVarDecl); payload.* = .{ - .base = .{ .tag = ([2]ast.Node.Tag{ .var_simple, .pub_var_simple })[@boolToInt(is_pub)] }, + .base = .{ .tag = ([2]Tag{ .var_simple, .pub_var_simple })[@boolToInt(is_pub)] }, .data = .{ .name = name, .init = init_node, @@ -913,12 +898,12 @@ fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?Nod try addTopLevelDecl(c, name, Node.initPayload(&payload.base)); if (!is_unnamed) try c.alias_list.append(.{ .alias = bare_name, .name = name }); - return Node.identifier.create(c.arena, name); + return try Tag.identifier.create(c.arena, name); } fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?Node { if (c.decl_table.get(@ptrToInt(enum_decl.getCanonicalDecl()))) |name| - return try transCreateNodeIdentifier(c, name); // Avoid processing this decl twice + return try Tag.identifier.create(c.arena, name); // Avoid processing this decl twice const enum_loc = enum_decl.getLocation(); var bare_name = try c.str(@ptrCast(*const clang.NamedDecl, enum_decl).getName_bytes_begin()); @@ -965,7 +950,7 @@ fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?Node { else => |e| return e, } else - try Node.type.create(c.arena, "c_int"); + try Tag.type.create(c.arena, "c_int"); it = enum_def.enumerator_begin(); end_it = enum_def.enumerator_end(); @@ -983,29 +968,29 @@ fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?Node { else null; - try fields_and_decls.append(.{ + try fields.append(.{ .name = field_name, .value = int_node, }); // In C each enum value is in the global namespace. So we put them there too. // At this point we can rely on the enum emitting successfully. - try addTopLevelDecl(c, field_name, try Node.enum_redecl.create(c.arena, .{ + try addTopLevelDecl(c, field_name, try Tag.enum_redecl.create(c.arena, .{ .enum_val_name = enum_val_name, .field_name = field_name, .enum_name = name, })); } - break :blk try Node.@"enum".create(c.arena, try c.arena.dupe(ast.Payload.Enum.Field, fields.items)); + break :blk try Tag.@"enum".create(c.arena, try c.arena.dupe(ast.Payload.Enum.Field, fields.items)); } else blk: { _ = try c.opaque_demotes.put(c.gpa, @ptrToInt(enum_decl.getCanonicalDecl()), {}); - break :blk Node.opaque_literal.init(); + break :blk Tag.opaque_literal.init(); }; const payload = try c.arena.create(ast.Payload.SimpleVarDecl); payload.* = .{ - .base = .{ .tag = ([2]ast.Node.Tag{ .var_simple, .pub_var_simple })[@boolToInt(is_pub)] }, + .base = .{ .tag = ([2]Tag{ .var_simple, .pub_var_simple })[@boolToInt(is_pub)] }, .data = .{ .name = name, .init = init_node, @@ -1015,7 +1000,7 @@ fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?Node { try addTopLevelDecl(c, name, Node.initPayload(&payload.base)); if (!is_unnamed) try c.alias_list.append(.{ .alias = bare_name, .name = name }); - return transCreateNodeIdentifier(c, name); + return try Tag.identifier.create(c.arena, name); } const ResultUsed = enum { @@ -1023,31 +1008,25 @@ const ResultUsed = enum { unused, }; -const LRValue = enum { - l_value, - r_value, -}; - fn transStmt( c: *Context, scope: *Scope, stmt: *const clang.Stmt, result_used: ResultUsed, - lrvalue: LRValue, ) TransError!Node { const sc = stmt.getStmtClass(); switch (sc) { .BinaryOperatorClass => return transBinaryOperator(c, scope, @ptrCast(*const clang.BinaryOperator, stmt), result_used), .CompoundStmtClass => return transCompoundStmt(c, scope, @ptrCast(*const clang.CompoundStmt, stmt)), - .CStyleCastExprClass => return transCStyleCastExprClass(c, scope, @ptrCast(*const clang.CStyleCastExpr, stmt), result_used, lrvalue), + .CStyleCastExprClass => return transCStyleCastExprClass(c, scope, @ptrCast(*const clang.CStyleCastExpr, stmt), result_used), .DeclStmtClass => return transDeclStmt(c, scope, @ptrCast(*const clang.DeclStmt, stmt)), - .DeclRefExprClass => return transDeclRefExpr(c, scope, @ptrCast(*const clang.DeclRefExpr, stmt), lrvalue), + .DeclRefExprClass => return transDeclRefExpr(c, scope, @ptrCast(*const clang.DeclRefExpr, stmt)), .ImplicitCastExprClass => return transImplicitCastExpr(c, scope, @ptrCast(*const clang.ImplicitCastExpr, stmt), result_used), .IntegerLiteralClass => return transIntegerLiteral(c, scope, @ptrCast(*const clang.IntegerLiteral, stmt), result_used, .with_as), .ReturnStmtClass => return transReturnStmt(c, scope, @ptrCast(*const clang.ReturnStmt, stmt)), .StringLiteralClass => return transStringLiteral(c, scope, @ptrCast(*const clang.StringLiteral, stmt), result_used), .ParenExprClass => { - const expr = try transExpr(c, scope, @ptrCast(*const clang.ParenExpr, stmt).getSubExpr(), .used, lrvalue); + const expr = try transExpr(c, scope, @ptrCast(*const clang.ParenExpr, stmt).getSubExpr(), .used); return maybeSuppressResult(c, scope, result_used, expr); }, .InitListExprClass => return transInitListExpr(c, scope, @ptrCast(*const clang.InitListExpr, stmt), result_used), @@ -1056,9 +1035,9 @@ fn transStmt( .WhileStmtClass => return transWhileLoop(c, scope, @ptrCast(*const clang.WhileStmt, stmt)), .DoStmtClass => return transDoWhileLoop(c, scope, @ptrCast(*const clang.DoStmt, stmt)), .NullStmtClass => { - return Node.empty_block.init(); + return Tag.empty_block.init(); }, - .ContinueStmtClass => return try transCreateNodeContinue(c), + .ContinueStmtClass => return Tag.@"continue".init(), .BreakStmtClass => return transBreak(c, scope), .ForStmtClass => return transForLoop(c, scope, @ptrCast(*const clang.ForStmt, stmt)), .FloatingLiteralClass => return transFloatingLiteral(c, scope, @ptrCast(*const clang.FloatingLiteral, stmt), result_used), @@ -1083,12 +1062,12 @@ fn transStmt( .CompoundAssignOperatorClass => return transCompoundAssignOperator(c, scope, @ptrCast(*const clang.CompoundAssignOperator, stmt), result_used), .OpaqueValueExprClass => { const source_expr = @ptrCast(*const clang.OpaqueValueExpr, stmt).getSourceExpr().?; - const expr = try transExpr(c, scope, source_expr, .used, lrvalue); + const expr = try transExpr(c, scope, source_expr, .used); return maybeSuppressResult(c, scope, result_used, expr); }, else => { return fail( - rp, + c, error.UnsupportedTranslation, stmt.getBeginLoc(), "TODO implement translation of stmt class {s}", @@ -1109,37 +1088,36 @@ fn transBinaryOperator( switch (op) { .Assign => return try transCreateNodeAssign(c, scope, result_used, stmt.getLHS(), stmt.getRHS()), .Comma => { - var block_scope = try Scope.Block.init(rp.c, scope, true); + var block_scope = try Scope.Block.init(c, scope, true); defer block_scope.deinit(); - - const lhs = try transExpr(c, &block_scope.base, stmt.getLHS(), .unused, .r_value); + const lhs = try transExpr(c, &block_scope.base, stmt.getLHS(), .unused); try block_scope.statements.append(lhs); - const rhs = try transExpr(rp, &block_scope.base, stmt.getRHS(), .used, .r_value); - const break_node = try Node.break_val.create(c.arena, .{ + const rhs = try transExpr(c, &block_scope.base, stmt.getRHS(), .used); + const break_node = try Tag.break_val.create(c.arena, .{ .label = block_scope.label, .val = rhs, }); try block_scope.statements.append(break_node); - const block_node = try block_scope.complete(rp.c); - return maybeSuppressResult(rp, scope, result_used, block_node); + const block_node = try block_scope.complete(c); + return maybeSuppressResult(c, scope, result_used, block_node); }, .Div => { if (cIsSignedInteger(qt)) { // signed integer division uses @divTrunc - const lhs = try transExpr(c, scope, stmt.getLHS(), .used, .l_value); - const rhs = try transExpr(c, scope, stmt.getRHS(), .used, .r_value); - const div_trunc = try Node.div_trunc.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); + const lhs = try transExpr(c, scope, stmt.getLHS(), .used); + const rhs = try transExpr(c, scope, stmt.getRHS(), .used); + const div_trunc = try Tag.div_trunc.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); return maybeSuppressResult(c, scope, result_used, div_trunc); } }, .Rem => { if (cIsSignedInteger(qt)) { // signed integer division uses @rem - const lhs = try transExpr(c, scope, stmt.getLHS(), .used, .l_value); - const rhs = try transExpr(c, scope, stmt.getRHS(), .used, .r_value); - const rem = try Node.rem.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); + const lhs = try transExpr(c, scope, stmt.getLHS(), .used); + const rhs = try transExpr(c, scope, stmt.getRHS(), .used); + const rem = try Tag.rem.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); return maybeSuppressResult(c, scope, result_used, rem); } }, @@ -1150,14 +1128,14 @@ fn transBinaryOperator( return transCreateNodeShiftOp(c, scope, stmt, .shr, result_used); }, .LAnd => { - return transCreateNodeBoolInfixOp(c, scope, stmt, .bool_and, result_used); + return transCreateNodeBoolInfixOp(c, scope, stmt, .@"and", result_used); }, .LOr => { - return transCreateNodeBoolInfixOp(c, scope, stmt, .bool_or, result_used); + return transCreateNodeBoolInfixOp(c, scope, stmt, .@"or", result_used); }, else => {}, } - var op_id: Node.Tag = undefined; + var op_id: Tag = undefined; switch (op) { .Add => { if (cIsUnsignedInteger(qt)) { @@ -1218,20 +1196,20 @@ fn transBinaryOperator( else => unreachable, } - const lhs_uncasted = try transExpr(c, scope, stmt.getLHS(), .used, .l_value); - const rhs_uncasted = try transExpr(c, scope, stmt.getRHS(), .used, .r_value); + const lhs_uncasted = try transExpr(c, scope, stmt.getLHS(), .used); + const rhs_uncasted = try transExpr(c, scope, stmt.getRHS(), .used); const lhs = if (isBoolRes(lhs_uncasted)) - try Node.bool_to_int.create(c.arena, lhs_uncasted) + try Tag.bool_to_int.create(c.arena, lhs_uncasted) else lhs_uncasted; const rhs = if (isBoolRes(rhs_uncasted)) - try Node.bool_to_int.create(c.arena, rhs_uncasted) + try Tag.bool_to_int.create(c.arena, rhs_uncasted) else rhs_uncasted; - return transCreateNodeInfixOp(c, scope, op_id, lhs, rhs, used); + return transCreateNodeInfixOp(c, scope, op_id, lhs, rhs, result_used); } fn transCompoundStmtInline( @@ -1243,7 +1221,7 @@ fn transCompoundStmtInline( var it = stmt.body_begin(); const end_it = stmt.body_end(); while (it != end_it) : (it += 1) { - const result = try transStmt(c, parent_scope, it[0], .unused, .r_value); + const result = try transStmt(c, parent_scope, it[0], .unused); try block.statements.append(result); } } @@ -1260,7 +1238,6 @@ fn transCStyleCastExprClass( scope: *Scope, stmt: *const clang.CStyleCastExpr, result_used: ResultUsed, - lrvalue: LRValue, ) TransError!Node { const sub_expr = stmt.getSubExpr(); const cast_node = (try transCCast( @@ -1269,7 +1246,7 @@ fn transCStyleCastExprClass( stmt.getBeginLoc(), stmt.getType(), sub_expr.getType(), - try transExpr(c, scope, sub_expr, .used, lrvalue), + try transExpr(c, scope, sub_expr, .used), )); return maybeSuppressResult(c, scope, result_used, cast_node); } @@ -1294,7 +1271,7 @@ fn transDeclStmtOne( // This is actually a global variable, put it in the global scope and reference it. // `_ = mangled_name;` try visitVarDecl(c, var_decl, mangled_name); - return try maybeSuppressResult(c, scope, .unused, try Node.identifier.create(c.arena, mangled_name)); + return try maybeSuppressResult(c, scope, .unused, try Tag.identifier.create(c.arena, mangled_name)); }, else => {}, } @@ -1308,13 +1285,13 @@ fn transDeclStmtOne( if (expr.getStmtClass() == .StringLiteralClass) try transStringLiteralAsArray(c, scope, @ptrCast(*const clang.StringLiteral, expr), try zigArraySize(c, type_node)) else - try transExprCoercing(c, scope, expr, .used, .r_value) + try transExprCoercing(c, scope, expr, .used) else - try transCreateNodeUndefinedLiteral(c); + Tag.undefined_literal.init(); if (!qualTypeIsBoolean(qual_type) and isBoolRes(init_node)) { - init_node = try Node.bool_to_int.create(c.arena, init_node); + init_node = try Tag.bool_to_int.create(c.arena, init_node); } - return Node.var_decl.create(c.arena, .{ + return Tag.var_decl.create(c.arena, .{ .is_pub = false, .is_const = is_const, .is_extern = false, @@ -1339,7 +1316,7 @@ fn transDeclStmtOne( return node; }, else => |kind| return fail( - rp, + c, error.UnsupportedTranslation, decl.getLocation(), "TODO implement translation of DeclStmt kind {s}", @@ -1370,12 +1347,11 @@ fn transDeclRefExpr( c: *Context, scope: *Scope, expr: *const clang.DeclRefExpr, - lrvalue: LRValue, ) TransError!Node { const value_decl = expr.getDecl(); const name = try c.str(@ptrCast(*const clang.NamedDecl, value_decl).getName_bytes_begin()); const mangled_name = scope.getAlias(name); - return Node.identifier.create(c.arena, mangled_name); + return Tag.identifier.create(c.arena, mangled_name); } fn transImplicitCastExpr( @@ -1389,49 +1365,49 @@ fn transImplicitCastExpr( const src_type = getExprQualType(c, sub_expr); switch (expr.getCastKind()) { .BitCast, .FloatingCast, .FloatingToIntegral, .IntegralToFloating, .IntegralCast, .PointerToIntegral, .IntegralToPointer => { - const sub_expr_node = try transExpr(c, scope, sub_expr, .used, .r_value); + const sub_expr_node = try transExpr(c, scope, sub_expr, .used); const casted = try transCCast(c, scope, expr.getBeginLoc(), dest_type, src_type, sub_expr_node); return maybeSuppressResult(c, scope, result_used, casted); }, .LValueToRValue, .NoOp, .FunctionToPointerDecay => { - const sub_expr_node = try transExpr(c, scope, sub_expr, .used, .r_value); + const sub_expr_node = try transExpr(c, scope, sub_expr, .used); return maybeSuppressResult(c, scope, result_used, sub_expr_node); }, .ArrayToPointerDecay => { if (exprIsNarrowStringLiteral(sub_expr)) { - const sub_expr_node = try transExpr(c, scope, sub_expr, .used, .r_value); + const sub_expr_node = try transExpr(c, scope, sub_expr, .used); return maybeSuppressResult(c, scope, result_used, sub_expr_node); } - const addr = try Node.address_of.create(c.arena, try transExpr(c, scope, sub_expr, .used, .r_value)); + const addr = try Tag.address_of.create(c.arena, try transExpr(c, scope, sub_expr, .used)); return maybeSuppressResult(c, scope, result_used, addr); }, .NullToPointer => { - return Node.null_literal.init(); + return Tag.null_literal.init(); }, .PointerToBoolean => { // @ptrToInt(val) != 0 - const ptr_to_int = try Node.ptr_to_int.create(c.arena, try transExpr(c, scope, sub_expr, .used, .r_value)); + const ptr_to_int = try Tag.ptr_to_int.create(c.arena, try transExpr(c, scope, sub_expr, .used)); - const ne = try Node.not_equal.create(c.arena, .{ .lhs = ptr_to_int, .rhs = Node.zero_literal.init() }); + const ne = try Tag.not_equal.create(c.arena, .{ .lhs = ptr_to_int, .rhs = Tag.zero_literal.init() }); return maybeSuppressResult(c, scope, result_used, ne); }, .IntegralToBoolean => { - const sub_expr_node = try transExpr(c, scope, sub_expr, .used, .r_value); + const sub_expr_node = try transExpr(c, scope, sub_expr, .used); // The expression is already a boolean one, return it as-is if (isBoolRes(sub_expr_node)) return maybeSuppressResult(c, scope, result_used, sub_expr_node); // val != 0 - const ne = try Node.not_equal.create(c.arena, .{ .lhs = sub_expr_node, .rhs = Node.zero_literal.init() }); + const ne = try Tag.not_equal.create(c.arena, .{ .lhs = sub_expr_node, .rhs = Tag.zero_literal.init() }); return maybeSuppressResult(c, scope, result_used, ne); }, .BuiltinFnToFnPtr => { - return transExpr(rp, scope, sub_expr, result_used, .r_value); + return transExpr(c, scope, sub_expr, result_used); }, else => |kind| return fail( - rp, + c, error.UnsupportedTranslation, @ptrCast(*const clang.Stmt, expr).getBeginLoc(), "TODO implement translation of CastKind {s}", @@ -1445,17 +1421,16 @@ fn transBoolExpr( scope: *Scope, expr: *const clang.Expr, used: ResultUsed, - lrvalue: LRValue, ) TransError!Node { if (@ptrCast(*const clang.Stmt, expr).getStmtClass() == .IntegerLiteralClass) { var is_zero: bool = undefined; if (!(@ptrCast(*const clang.IntegerLiteral, expr).isZero(&is_zero, c.clang_context))) { return fail(c, error.UnsupportedTranslation, expr.getBeginLoc(), "invalid integer literal", .{}); } - return Node{ .tag = ([2]ast.Node.Tag{ .true_literal, .false_literal })[@boolToInt(is_zero)] }; + return Node{ .tag_if_small_enough = @enumToInt(([2]Tag{ .true_literal, .false_literal })[@boolToInt(is_zero)]) }; } - var res = try transExpr(c, scope, expr, used, lrvalue); + var res = try transExpr(c, scope, expr, used); if (isBoolRes(res)) { return maybeSuppressResult(c, scope, used, res); } @@ -1494,7 +1469,7 @@ fn isBoolRes(res: Node) bool { .@"or", .@"and", .equal, - .note_equal, + .not_equal, .less_than, .less_than_equal, .greater_than, @@ -1547,18 +1522,18 @@ fn finishBoolExpr( .Float16, => { // node != 0 - return Node.not_equal.create(c.arena, .{ .lhs = node, .rhs = Node.zero_literal.init() }); + return Tag.not_equal.create(c.arena, .{ .lhs = node, .rhs = Tag.zero_literal.init() }); }, .NullPtr => { // node == null - return Node.equal.create(c.arena, .{ .lhs = node, .rhs = Node.null_literal.init() }); + return Tag.equal.create(c.arena, .{ .lhs = node, .rhs = Tag.null_literal.init() }); }, else => {}, } }, .Pointer => { // node == null - return Node.equal.create(c.arena, .{ .lhs = node, .rhs = Node.null_literal.init() }); + return Tag.equal.create(c.arena, .{ .lhs = node, .rhs = Tag.null_literal.init() }); }, .Typedef => { const typedef_ty = @ptrCast(*const clang.TypedefType, ty); @@ -1568,8 +1543,7 @@ fn finishBoolExpr( }, .Enum => { // node != 0 - return Node.not_equal.create(c.arena, .{ .lhs = node, .rhs = Node.zero_literal.init() }); - const op_token = try appendToken(c, .BangEqual, "!="); + return Tag.not_equal.create(c.arena, .{ .lhs = node, .rhs = Tag.zero_literal.init() }); }, .Elaborated => { const elaborated_ty = @ptrCast(*const clang.ElaboratedType, ty); @@ -1614,9 +1588,10 @@ fn transIntegerLiteral( // But the first step is to be correct, and the next step is to make the output more elegant. // @as(T, x) + const expr_base = @ptrCast(*const clang.Expr, expr); const ty_node = try transQualType(c, expr_base.getType(), expr_base.getBeginLoc()); const rhs = try transCreateNodeAPInt(c, eval_result.Val.getInt()); - const as = try Node.as.create(c.arena, .{ .lhs = ty_node, .rhs = rhs }); + const as = try Tag.as.create(c.arena, .{ .lhs = ty_node, .rhs = rhs }); return maybeSuppressResult(c, scope, result_used, as); } @@ -1624,16 +1599,16 @@ fn transReturnStmt( c: *Context, scope: *Scope, expr: *const clang.ReturnStmt, -) TransError!*ast.Node { +) TransError!Node { const val_expr = expr.getRetValue() orelse - return Node.return_void.init(); + return Tag.return_void.init(); - var rhs = try transExprCoercing(c, scope, val_expr, .used, .r_value); + var rhs = try transExprCoercing(c, scope, val_expr, .used); const return_qt = scope.findBlockReturnType(c); if (isBoolRes(rhs) and !qualTypeIsBoolean(return_qt)) { - rhs = try Node.bool_to_int.create(c.arena, rhs); + rhs = try Tag.bool_to_int.create(c.arena, rhs); } - return Node.@"return".create(c.arena, rhs); + return Tag.@"return".create(c.arena, rhs); } fn transStringLiteral( @@ -1647,10 +1622,9 @@ fn transStringLiteral( .Ascii, .UTF8 => { var len: usize = undefined; const bytes_ptr = stmt.getString_bytes_begin_size(&len); - const str = bytes_ptr[0..len]; - const str = try std.fmt.allocPrint(c.arena, "\"{}\"", .{std.zig.fmtEscapes(str)}); - const node = try Node.string_literal.create(c.arena, str); + const str = try std.fmt.allocPrint(c.arena, "\"{}\"", .{std.zig.fmtEscapes(bytes_ptr[0..len])}); + const node = try Tag.string_literal.create(c.arena, str); return maybeSuppressResult(c, scope, result_used, node); }, .UTF16, .UTF32, .Wide => { @@ -1658,9 +1632,9 @@ fn transStringLiteral( const name = try std.fmt.allocPrint(c.arena, "zig.{s}_string_{d}", .{ str_type, c.getMangle() }); const lit_array = try transStringLiteralAsArray(c, scope, stmt, stmt.getLength() + 1); - const decl = try Node.var_simple.create(c.arena, .{ .name = name, .init = lit_array }); - try scope.appendNode(name, decl); - const node = try Node.identifier.create(c.arena, name); + const decl = try Tag.var_simple.create(c.arena, .{ .name = name, .init = lit_array }); + try scope.appendNode(decl); + const node = try Tag.identifier.create(c.arena, name); return maybeSuppressResult(c, scope, result_used, node); }, } @@ -1669,9 +1643,7 @@ fn transStringLiteral( /// Parse the size of an array back out from an ast Node. fn zigArraySize(c: *Context, node: Node) TransError!usize { if (node.castTag(.array_type)) |array| { - if (array.data.len.castTag(.int_literal)) |int_lit| { - return std.fmt.parseUnsigned(usize, int_lit.data, 10) catch error.UnsupportedTranslation; - } + return array.data.len; } return error.UnsupportedTranslation; } @@ -1709,7 +1681,7 @@ fn transStringLiteralAsArray( init_list[i] = try transCreateNodeNumber(c, 0); } - return Node.array_init.create(c.arena, init_list); + return Tag.array_init.create(c.arena, init_list); } fn cIsEnum(qt: clang.QualType) bool { @@ -1747,89 +1719,77 @@ fn transCCast( // 3. Bit-cast to correct signed-ness const src_type_is_signed = cIsSignedInteger(src_type) or cIsEnum(src_type); const src_int_type = if (cIsInteger(src_type)) src_type else cIntTypeForEnum(src_type); - var src_int_expr = if (cIsInteger(src_type)) expr else Node.enum_to_int.create(c.arena, expr); + var src_int_expr = if (cIsInteger(src_type)) expr else try Tag.enum_to_int.create(c.arena, expr); if (isBoolRes(src_int_expr)) { - src_int_expr = try Node.bool_to_int.create(c.arena, src_int_expr); + src_int_expr = try Tag.bool_to_int.create(c.arena, src_int_expr); } switch (cIntTypeCmp(dst_type, src_int_type)) { .lt => { // @truncate(SameSignSmallerInt, src_int_expr) const ty_node = try transQualTypeIntWidthOf(c, dst_type, src_type_is_signed); - src_int_expr = try Node.truncate.create(c.arena, .{ .lhs = ty_node, .rhs = src_int_expr }); + src_int_expr = try Tag.truncate.create(c.arena, .{ .lhs = ty_node, .rhs = src_int_expr }); }, .gt => { // @as(SameSignBiggerInt, src_int_expr) const ty_node = try transQualTypeIntWidthOf(c, dst_type, src_type_is_signed); - src_int_expr = try Node.as.create(c.arena, .{ .lhs = ty_node, .rhs = src_int_expr }); + src_int_expr = try Tag.as.create(c.arena, .{ .lhs = ty_node, .rhs = src_int_expr }); }, .eq => { // src_int_expr = src_int_expr }, } // @bitCast(dest_type, intermediate_value) - return Node.bit_cast.create(c.arena, .{ .lhs = dst_node, .rhs = src_int_expr }); + return Tag.bit_cast.create(c.arena, .{ .lhs = dst_node, .rhs = src_int_expr }); } if (cIsInteger(dst_type) and qualTypeIsPtr(src_type)) { // @intCast(dest_type, @ptrToInt(val)) - const ptr_to_int = try Node.ptr_to_int.create(c.arena, expr); - return Node.int_cast.create(c.arena, .{ .lhs = dst_node, .rhs = ptr_to_int }); + const ptr_to_int = try Tag.ptr_to_int.create(c.arena, expr); + return Tag.int_cast.create(c.arena, .{ .lhs = dst_node, .rhs = ptr_to_int }); } if (cIsInteger(src_type) and qualTypeIsPtr(dst_type)) { // @intToPtr(dest_type, val) - return Node.int_to_ptr.create(c.arena, .{ .lhs = dst_node, .rhs = expr }); + return Tag.int_to_ptr.create(c.arena, .{ .lhs = dst_node, .rhs = expr }); } if (cIsFloating(src_type) and cIsFloating(dst_type)) { // @floatCast(dest_type, val) - return Node.float_cast.create(c.arena, .{ .lhs = dst_node, .rhs = expr }); + return Tag.float_cast.create(c.arena, .{ .lhs = dst_node, .rhs = expr }); } if (cIsFloating(src_type) and !cIsFloating(dst_type)) { // @floatToInt(dest_type, val) - return Node.float_to_int.create(c.arena, .{ .lhs = dst_node, .rhs = expr }); + return Tag.float_to_int.create(c.arena, .{ .lhs = dst_node, .rhs = expr }); } if (!cIsFloating(src_type) and cIsFloating(dst_type)) { // @intToFloat(dest_type, val) - return Node.int_to_float.create(c.arena, .{ .lhs = dst_node, .rhs = expr }); + return Tag.int_to_float.create(c.arena, .{ .lhs = dst_node, .rhs = expr }); } if (qualTypeIsBoolean(src_type) and !qualTypeIsBoolean(dst_type)) { // @boolToInt returns either a comptime_int or a u1 // TODO: if dst_type is 1 bit & signed (bitfield) we need @bitCast // instead of @as - const bool_to_int = Node.bool_to_int.create(c.arena, expr); - return Node.as.create(c.arena, .{ .lhs = dst_node, .rhs = bool_to_int }); + const bool_to_int = try Tag.bool_to_int.create(c.arena, expr); + return Tag.as.create(c.arena, .{ .lhs = dst_node, .rhs = bool_to_int }); } if (cIsEnum(dst_type)) { // @intToEnum(dest_type, val) - return Node.int_to_enum.create(c.arena, .{ .lhs = dst_node, .rhs = expr }); + return Tag.int_to_enum.create(c.arena, .{ .lhs = dst_node, .rhs = expr }); } if (cIsEnum(src_type) and !cIsEnum(dst_type)) { // @enumToInt(val) - return Node.enum_to_int.create(c.arena, expr); + return Tag.enum_to_int.create(c.arena, expr); } // @as(dest_type, val) - return Node.as.create(c.arena, .{ .lhs = dst_node, .rhs = expr }); + return Tag.as.create(c.arena, .{ .lhs = dst_node, .rhs = expr }); } -fn transExpr( - c: *Context, - scope: *Scope, - expr: *const clang.Expr, - used: ResultUsed, - lrvalue: LRValue, -) TransError!Node { - return transStmt(c, scope, @ptrCast(*const clang.Stmt, expr), used, lrvalue); +fn transExpr(c: *Context, scope: *Scope, expr: *const clang.Expr, used: ResultUsed) TransError!Node { + return transStmt(c, scope, @ptrCast(*const clang.Stmt, expr), used); } /// Same as `transExpr` but with the knowledge that the operand will be type coerced, and therefore /// an `@as` would be redundant. This is used to prevent redundant `@as` in integer literals. -fn transExprCoercing( - c: *Context, - scope: *Scope, - expr: *const clang.Expr, - used: ResultUsed, - lrvalue: LRValue, -) TransError!Node { +fn transExprCoercing(c: *Context, scope: *Scope, expr: *const clang.Expr, used: ResultUsed) TransError!Node { switch (@ptrCast(*const clang.Stmt, expr).getStmtClass()) { .IntegerLiteralClass => { return transIntegerLiteral(c, scope, @ptrCast(*const clang.IntegerLiteral, expr), .used, .no_as); @@ -1840,12 +1800,12 @@ fn transExprCoercing( .UnaryOperatorClass => { const un_expr = @ptrCast(*const clang.UnaryOperator, expr); if (un_expr.getOpcode() == .Extension) { - return transExprCoercing(c, scope, un_expr.getSubExpr(), used, lrvalue); + return transExprCoercing(c, scope, un_expr.getSubExpr(), used); } }, else => {}, } - return transExpr(c, scope, expr, .used, .r_value); + return transExpr(c, scope, expr, .used); } fn transInitListExprRecord( @@ -1896,11 +1856,11 @@ fn transInitListExprRecord( try field_inits.append(.{ .name = raw_name, - .value = try transExpr(c, scope, elem_expr, .used, .r_value), + .value = try transExpr(c, scope, elem_expr, .used), }); } - return Node.container_init.create(c.arena, try c.arena.dupe(ast.Payload.ContainerInit.Initializer, field_inits.items)); + return Tag.container_init.create(c.arena, try c.arena.dupe(ast.Payload.ContainerInit.Initializer, field_inits.items)); } fn transInitListExprArray( @@ -1920,18 +1880,18 @@ fn transInitListExprArray( const leftover_count = all_count - init_count; if (all_count == 0) { - return Node.empty_array.create(c.arena, try transQualType(c, child_qt, source_loc)); + return Tag.empty_array.create(c.arena, try transQualType(c, child_qt, loc)); } - const ty_node = try transType(ty); + const ty_node = try transType(c, ty, loc); const init_node = if (init_count != 0) blk: { const init_list = try c.arena.alloc(Node, init_count); for (init_list) |*init, i| { - const elem_expr = expr.getInit(i); - init.* = try transExpr(c, scope, elem_expr, .used, .r_value); + const elem_expr = expr.getInit(@intCast(c_uint, i)); + init.* = try transExpr(c, scope, elem_expr, .used); } - const init_node = try Node.array_init.create(c.arena, init_list); + const init_node = try Tag.array_init.create(c.arena, init_list); if (leftover_count == 0) { return init_node; } @@ -1939,14 +1899,14 @@ fn transInitListExprArray( } else null; const filler_val_expr = expr.getArrayFiller(); - const filler_node = try Node.array_filler.create(c.arena, .{ + const filler_node = try Tag.array_filler.create(c.arena, .{ .type = ty_node, - .filler = try transExpr(c, scope, filler_val_expr, .used, .r_value), + .filler = try transExpr(c, scope, filler_val_expr, .used), .count = leftover_count, }); if (init_node) |some| { - return Node.array_cat.create(c.arena, some, filler_node); + return Tag.array_cat.create(c.arena, .{ .lhs = some, .rhs = filler_node }); } else { return filler_node; } @@ -1964,7 +1924,7 @@ fn transInitListExpr( if (qual_type.isRecordType()) { return maybeSuppressResult(c, scope, used, try transInitListExprRecord( - rp, + c, scope, source_loc, expr, @@ -1972,7 +1932,7 @@ fn transInitListExpr( )); } else if (qual_type.isArrayType()) { return maybeSuppressResult(c, scope, used, try transInitListExprArray( - rp, + c, scope, source_loc, expr, @@ -1994,7 +1954,7 @@ fn transZeroInitExpr( .Builtin => { const builtin_ty = @ptrCast(*const clang.BuiltinType, ty); switch (builtin_ty.getKind()) { - .Bool => return Node.false_literal.init(), + .Bool => return Tag.false_literal.init(), .Char_U, .UChar, .Char_S, @@ -2015,11 +1975,11 @@ fn transZeroInitExpr( .Float128, .Float16, .LongDouble, - => return Node.zero_literal.init(), + => return Tag.zero_literal.init(), else => return fail(c, error.UnsupportedType, source_loc, "unsupported builtin type", .{}), } }, - .Pointer => return Node.null_literal.init(), + .Pointer => return Tag.null_literal.init(), .Typedef => { const typedef_ty = @ptrCast(*const clang.TypedefType, ty); const typedef_decl = typedef_ty.getDecl(); @@ -2058,19 +2018,19 @@ fn transIfStmt( var cond_scope = Scope.Condition{ .base = .{ .parent = scope, - .id = .Condition, + .id = .condition, }, }; defer cond_scope.deinit(); const cond_expr = @ptrCast(*const clang.Expr, stmt.getCond()); - const cond = try transBoolExpr(c, &cond_scope.base, cond_expr, .used, .r_value); + const cond = try transBoolExpr(c, &cond_scope.base, cond_expr, .used); - const then_body = try transStmt(c, scope, stmt.getThen(), .unused, .r_value); + const then_body = try transStmt(c, scope, stmt.getThen(), .unused); const else_body = if (stmt.getElse()) |expr| - try transStmt(c, scope, expr, .unused, .r_value) + try transStmt(c, scope, expr, .unused) else null; - return Node.@"if".create(c.arena, .{ .cond = cond, .then = then_body, .@"else" = else_body }); + return Tag.@"if".create(c.arena, .{ .cond = cond, .then = then_body, .@"else" = else_body }); } fn transWhileLoop( @@ -2081,19 +2041,19 @@ fn transWhileLoop( var cond_scope = Scope.Condition{ .base = .{ .parent = scope, - .id = .Condition, + .id = .condition, }, }; defer cond_scope.deinit(); const cond_expr = @ptrCast(*const clang.Expr, stmt.getCond()); - const cond = try transBoolExpr(c, &cond_scope.base, cond_expr, .used, .r_value); + const cond = try transBoolExpr(c, &cond_scope.base, cond_expr, .used); var loop_scope = Scope{ .parent = scope, - .id = .Loop, + .id = .loop, }; - const body = try transStmt(c, &loop_scope, stmt.getBody(), .unused, .r_value); - return Node.@"while".create(c.arena, .{ .cond = cond, .body = body, .cont_expr = null }); + const body = try transStmt(c, &loop_scope, stmt.getBody(), .unused); + return Tag.@"while".create(c.arena, .{ .cond = cond, .body = body, .cont_expr = null }); } fn transDoWhileLoop( @@ -2103,20 +2063,19 @@ fn transDoWhileLoop( ) TransError!Node { var loop_scope = Scope{ .parent = scope, - .id = .Loop, + .id = .loop, }; // if (!cond) break; - const if_node = try transCreateNodeIf(c); var cond_scope = Scope.Condition{ .base = .{ .parent = scope, - .id = .Condition, + .id = .condition, }, }; defer cond_scope.deinit(); - const cond = try transBoolExpr(c, &cond_scope.base, @ptrCast(*const clang.Expr, stmt.getCond()), .used, .r_value); - const if_not_break = try Node.if_not_break.create(c.arena, cond); + const cond = try transBoolExpr(c, &cond_scope.base, @ptrCast(*const clang.Expr, stmt.getCond()), .used); + const if_not_break = try Tag.if_not_break.create(c.arena, cond); const body_node = if (stmt.getBody().getStmtClass() == .CompoundStmtClass) blk: { // there's already a block in C, so we'll append our condition to it. @@ -2129,8 +2088,8 @@ fn transDoWhileLoop( // zig: b; // zig: if (!cond) break; // zig: } - const node = try transStmt(c, &loop_scope, stmt.getBody(), .unused, .r_value); - const block = node.castTag(.block); + const node = try transStmt(c, &loop_scope, stmt.getBody(), .unused); + const block = node.castTag(.block).?; block.data.stmts.len += 1; // This is safe since we reserve one extra space in Scope.Block.complete. block.data.stmts[block.data.stmts.len - 1] = if_not_break; break :blk node; @@ -2143,12 +2102,12 @@ fn transDoWhileLoop( // zig: a; // zig: if (!cond) break; // zig: } - const statements = try c.arena.create(Node, 2); - statements[0] = try transStmt(c, &loop_scope, stmt.getBody(), .unused, .r_value); + const statements = try c.arena.alloc(Node, 2); + statements[0] = try transStmt(c, &loop_scope, stmt.getBody(), .unused); statements[1] = if_not_break; - break :blk try Node.block.create(c.arena, .{ .label = null, .stmts = statements }); + break :blk try Tag.block.create(c.arena, .{ .label = null, .stmts = statements }); }; - return Node.while_true.create(c.arena, body_node); + return Tag.while_true.create(c.arena, body_node); } fn transForLoop( @@ -2158,7 +2117,7 @@ fn transForLoop( ) TransError!Node { var loop_scope = Scope{ .parent = scope, - .id = .Loop, + .id = .loop, }; var block_scope: ?Scope.Block = null; @@ -2167,29 +2126,29 @@ fn transForLoop( if (stmt.getInit()) |init| { block_scope = try Scope.Block.init(c, scope, false); loop_scope.parent = &block_scope.?.base; - const init_node = try transStmt(c, &block_scope.?.base, init, .unused, .r_value); + const init_node = try transStmt(c, &block_scope.?.base, init, .unused); try block_scope.?.statements.append(init_node); } var cond_scope = Scope.Condition{ .base = .{ .parent = &loop_scope, - .id = .Condition, + .id = .condition, }, }; defer cond_scope.deinit(); const cond = if (stmt.getCond()) |cond| - try transBoolExpr(c, &cond_scope.base, cond, .used, .r_value) + try transBoolExpr(c, &cond_scope.base, cond, .used) else - Node.true_literal.init(); + Tag.true_literal.init(); const cont_expr = if (stmt.getInc()) |incr| - try transExpr(c, &cond_scope.base, incr, .unused, .r_value) + try transExpr(c, &cond_scope.base, incr, .unused) else null; - const body = try transStmt(c, &loop_scope, stmt.getBody(), .unused, .r_value); - const while_node = try Node.@"while".create(c.arena, .{ .cond = cond, .body = body, .cont_expr = cont_expr }); + const body = try transStmt(c, &loop_scope, stmt.getBody(), .unused); + const while_node = try Tag.@"while".create(c.arena, .{ .cond = cond, .body = body, .cont_expr = cont_expr }); if (block_scope) |*bs| { try bs.statements.append(while_node); return try bs.complete(c); @@ -2206,13 +2165,14 @@ fn transSwitch( var cond_scope = Scope.Condition{ .base = .{ .parent = scope, - .id = .Condition, + .id = .condition, }, }; defer cond_scope.deinit(); - const switch_expr = try transExpr(c, &cond_scope.base, stmt.getCond(), .used, .r_value); + const switch_expr = try transExpr(c, &cond_scope.base, stmt.getCond(), .used); const switch_node = try c.arena.create(ast.Payload.Switch); switch_node.* = .{ + .base = .{ .tag = .@"switch" }, .data = .{ .cond = switch_expr, .cases = undefined, // set later @@ -2221,7 +2181,7 @@ fn transSwitch( var switch_scope = Scope.Switch{ .base = .{ - .id = .Switch, + .id = .@"switch", .parent = scope, }, .cases = std.ArrayList(Node).init(c.gpa), @@ -2229,11 +2189,7 @@ fn transSwitch( .default_label = null, .switch_label = null, }; - defer { - switch_node.data.cases = try c.arena.dupe(Node, switch_scope.cases.items); - switch_node.data.default = switch_scope.switch_label; - switch_scope.cases.deinit(); - } + defer switch_scope.cases.deinit(); // tmp block that all statements will go before being picked up by a case or default var block_scope = try Scope.Block.init(c, &switch_scope.base, false); @@ -2246,7 +2202,7 @@ fn transSwitch( switch_scope.pending_block = try Scope.Block.init(c, scope, false); try switch_scope.pending_block.statements.append(Node.initPayload(&switch_node.base)); - const last = try transStmt(c, &block_scope.base, stmt.getBody(), .unused, .r_value); + const last = try transStmt(c, &block_scope.base, stmt.getBody(), .unused); // take all pending statements const last_block_stmts = last.castTag(.block).?.data.stmts; @@ -2264,13 +2220,14 @@ fn transSwitch( switch_scope.pending_block.label = l; } if (switch_scope.default_label == null) { - const else_prong = try Node.switch_else.create( + const else_prong = try Tag.switch_else.create( c.arena, - try Node.@"break".create(c.arena, switch_scope.switch_label.?), + try Tag.@"break".create(c.arena, switch_scope.switch_label.?), ); - switch_scope.cases.append(else_prong); + try switch_scope.cases.append(else_prong); } + switch_node.data.cases = try c.arena.dupe(Node, switch_scope.cases.items); const result_node = try switch_scope.pending_block.complete(c); switch_scope.pending_block.deinit(); return result_node; @@ -2286,18 +2243,18 @@ fn transCase( const label = try block_scope.makeMangledName(c, "case"); const expr = if (stmt.getRHS()) |rhs| blk: { - const lhs_node = try transExpr(c, scope, stmt.getLHS(), .used, .r_value); - const rhs_node = try transExpr(c, scope, rhs, .used, .r_value); + const lhs_node = try transExpr(c, scope, stmt.getLHS(), .used); + const rhs_node = try transExpr(c, scope, rhs, .used); - break :blk Node.ellipsis3.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node }); + break :blk try Tag.ellipsis3.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node }); } else - try transExpr(c, scope, stmt.getLHS(), .used, .r_value); + try transExpr(c, scope, stmt.getLHS(), .used); - const switch_prong = try Node.switch_prong.create( - c.arena, - try Node.@"break".create(c.arena, label), - ); - switch_scope.cases.append(switch_prong); + const switch_prong = try Tag.switch_prong.create(c.arena, .{ + .lhs = expr, + .rhs = try Tag.@"break".create(c.arena, label), + }); + try switch_scope.cases.append(switch_prong); switch_scope.pending_block.label = label; @@ -2311,7 +2268,7 @@ fn transCase( try switch_scope.pending_block.statements.append(pending_node); - return transStmt(c, scope, stmt.getSubStmt(), .unused, .r_value); + return transStmt(c, scope, stmt.getSubStmt(), .unused); } fn transDefault( @@ -2323,12 +2280,12 @@ fn transDefault( const switch_scope = scope.getSwitch(); switch_scope.default_label = try block_scope.makeMangledName(c, "default"); - const else_prong = try Node.switch_else.create( + const else_prong = try Tag.switch_else.create( c.arena, - try Node.@"break".create(c.arena, switch_scope.default_label.?), + try Tag.@"break".create(c.arena, switch_scope.default_label.?), ); - switch_scope.cases.append(else_prong); - switch_scope.pending_block.label = try appendIdentifier(c, switch_scope.default_label.?); + try switch_scope.cases.append(else_prong); + switch_scope.pending_block.label = switch_scope.default_label.?; // take all pending statements try switch_scope.pending_block.statements.appendSlice(block_scope.statements.items); @@ -2339,7 +2296,7 @@ fn transDefault( switch_scope.pending_block = try Scope.Block.init(c, scope, false); try switch_scope.pending_block.statements.append(pending_node); - return transStmt(c, scope, stmt.getSubStmt(), .unused, .r_value); + return transStmt(c, scope, stmt.getSubStmt(), .unused); } fn transConstantExpr(c: *Context, scope: *Scope, expr: *const clang.Expr, used: ResultUsed) TransError!Node { @@ -2352,7 +2309,7 @@ fn transConstantExpr(c: *Context, scope: *Scope, expr: *const clang.Expr, used: // See comment in `transIntegerLiteral` for why this code is here. // @as(T, x) const expr_base = @ptrCast(*const clang.Expr, expr); - const as_node = try Node.as.create(c.arena, .{ + const as_node = try Tag.as.create(c.arena, .{ .lhs = try transQualType(c, expr_base.getType(), expr_base.getBeginLoc()), .rhs = try transCreateNodeAPInt(c, result.Val.getInt()), }); @@ -2369,10 +2326,10 @@ fn transPredefinedExpr(c: *Context, scope: *Scope, expr: *const clang.Predefined } fn transCreateCharLitNode(c: *Context, narrow: bool, val: u32) TransError!Node { - return Node.char_literal.create(c.arena, if (narrow) - try std.fmt.bufPrint(c.arena, "'{}'", .{std.zig.fmtEscapes(&.{@intCast(u8, val)})}) + return Tag.char_literal.create(c.arena, if (narrow) + try std.fmt.allocPrint(c.arena, "'{s}'", .{std.zig.fmtEscapes(&.{@intCast(u8, val)})}) else - try std.fmt.bufPrint(c.arena, "'\\u{{{x}}}'", .{val})); + try std.fmt.allocPrint(c.arena, "'\\u{{{x}}}'", .{val})); } fn transCharLiteral( @@ -2398,7 +2355,7 @@ fn transCharLiteral( // See comment in `transIntegerLiteral` for why this code is here. // @as(T, x) const expr_base = @ptrCast(*const clang.Expr, stmt); - const as_node = Node.as.create(c.arena, .{ + const as_node = try Tag.as.create(c.arena, .{ .lhs = try transQualType(c, expr_base.getType(), expr_base.getBeginLoc()), .rhs = int_lit_node, }); @@ -2416,12 +2373,12 @@ fn transStmtExpr(c: *Context, scope: *Scope, stmt: *const clang.StmtExpr, used: var it = comp.body_begin(); const end_it = comp.body_end(); while (it != end_it - 1) : (it += 1) { - const result = try transStmt(rp, &block_scope.base, it[0], .unused, .r_value); + const result = try transStmt(c, &block_scope.base, it[0], .unused); try block_scope.statements.append(result); } - const break_node = try Node.break_val.create(c.arena, .{ - .label = block_scope.label, - .val = try transStmt(c, &block_scope.base, it[0], .used, .r_value), + const break_node = try Tag.break_val.create(c.arena, .{ + .label = block_scope.label, + .val = try transStmt(c, &block_scope.base, it[0], .used), }); try block_scope.statements.append(break_node); const res = try block_scope.complete(c); @@ -2429,10 +2386,10 @@ fn transStmtExpr(c: *Context, scope: *Scope, stmt: *const clang.StmtExpr, used: } fn transMemberExpr(c: *Context, scope: *Scope, stmt: *const clang.MemberExpr, result_used: ResultUsed) TransError!Node { - var container_node = try transExpr(c, scope, stmt.getBase(), .used, .r_value); + var container_node = try transExpr(c, scope, stmt.getBase(), .used); if (stmt.isArrow()) { - container_node = try Node.deref.create(c.arena, container_node); + container_node = try Tag.deref.create(c.arena, container_node); } const member_decl = stmt.getMemberDecl(); @@ -2450,9 +2407,9 @@ fn transMemberExpr(c: *Context, scope: *Scope, stmt: *const clang.MemberExpr, re const decl = @ptrCast(*const clang.NamedDecl, member_decl); break :blk try c.str(decl.getName_bytes_begin()); }; - const ident = try Node.identifier.create(c.arena, name); + const ident = try Tag.identifier.create(c.arena, name); - const node = try Node.field_access.create(c.arena, .{ .lhs = container_node, .rhs = ident}); + const node = try Tag.field_access.create(c.arena, .{ .lhs = container_node, .rhs = ident }); return maybeSuppressResult(c, scope, result_used, node); } @@ -2469,7 +2426,7 @@ fn transArrayAccess(c: *Context, scope: *Scope, stmt: *const clang.ArraySubscrip } } - const container_node = try transExpr(c, scope, base_stmt, .used, .r_value); + const container_node = try transExpr(c, scope, base_stmt, .used); // cast if the index is long long or signed const subscr_expr = stmt.getIdx(); @@ -2477,14 +2434,17 @@ fn transArrayAccess(c: *Context, scope: *Scope, stmt: *const clang.ArraySubscrip const is_longlong = cIsLongLongInteger(qt); const is_signed = cIsSignedInteger(qt); - - const node = try Node.array_access.create(c.arena, .{ .lhs = container_node, .rhs = if (is_longlong or is_signed) blk: { - const cast_node = try c.createBuiltinCall("@intCast", 2); + const rhs = if (is_longlong or is_signed) blk: { // check if long long first so that signed long long doesn't just become unsigned long long - var typeid_node = if (is_longlong) try transCreateNodeIdentifier(c, "usize") else try transQualTypeIntWidthOf(c, qt, false); - break :blk try Node.int_cast.create(c.arena, .{ .lhs = typeid_node, .rhs = try transExpr(c, scope, subscr_expr, .used, .r_value)}); + var typeid_node = if (is_longlong) try Tag.identifier.create(c.arena, "usize") else try transQualTypeIntWidthOf(c, qt, false); + break :blk try Tag.int_cast.create(c.arena, .{ .lhs = typeid_node, .rhs = try transExpr(c, scope, subscr_expr, .used) }); } else - try transExpr(c, scope, subscr_expr, .used, .r_value)}); + try transExpr(c, scope, subscr_expr, .used); + + const node = try Tag.array_access.create(c.arena, .{ + .lhs = container_node, + .rhs = rhs, + }); return maybeSuppressResult(c, scope, result_used, node); } @@ -2522,23 +2482,23 @@ fn cIsFunctionDeclRef(expr: *const clang.Expr) bool { fn transCallExpr(c: *Context, scope: *Scope, stmt: *const clang.CallExpr, result_used: ResultUsed) TransError!Node { const callee = stmt.getCallee(); - var raw_fn_expr = try transExpr(c, scope, callee, .used, .r_value); + var raw_fn_expr = try transExpr(c, scope, callee, .used); var is_ptr = false; const fn_ty = qualTypeGetFnProto(callee.getType(), &is_ptr); const fn_expr = if (is_ptr and fn_ty != null and !cIsFunctionDeclRef(callee)) - try transCreateNodeUnwrapNull(rp.c, raw_fn_expr) + try Tag.unwrap.create(c.arena, raw_fn_expr) else raw_fn_expr; const num_args = stmt.getNumArgs(); - const call_params = try c.arena.alloc(Node, num_args); + const args = try c.arena.alloc(Node, num_args); - const args = stmt.getArgs(); + const c_args = stmt.getArgs(); var i: usize = 0; while (i < num_args) : (i += 1) { - var call_param = try transExpr(c, scope, args[i], .used, .r_value); + var arg = try transExpr(c, scope, c_args[i], .used); // In C the result type of a boolean expression is int. If this result is passed as // an argument to a function whose parameter is also int, there is no cast. Therefore @@ -2549,17 +2509,17 @@ fn transCallExpr(c: *Context, scope: *Scope, stmt: *const clang.CallExpr, result const param_count = fn_proto.getNumParams(); if (i < param_count) { const param_qt = fn_proto.getParamType(@intCast(c_uint, i)); - if (isBoolRes(call_param) and cIsNativeInt(param_qt)) { - call_param = try Node.bool_to_int.create(c.arena, call_param); + if (isBoolRes(arg) and cIsNativeInt(param_qt)) { + arg = try Tag.bool_to_int.create(c.arena, arg); } } }, else => {}, } } - call_params[i] = call_param; + args[i] = arg; } - const node = try Node.call.create(c.arena, .{ .lhs = fn_expr, .args = call_params }); + const node = try Tag.call.create(c.arena, .{ .lhs = fn_expr, .args = args }); if (fn_ty) |ty| { const canon = ty.getReturnType().getCanonicalType(); const ret_ty = canon.getTypePtr(); @@ -2609,17 +2569,17 @@ fn transUnaryExprOrTypeTraitExpr( result_used: ResultUsed, ) TransError!Node { const loc = stmt.getBeginLoc(); - const type_node = try transQualType(rp, stmt.getTypeOfArgument(), loc); + const type_node = try transQualType(c, stmt.getTypeOfArgument(), loc); const kind = stmt.getKind(); switch (kind) { - .SizeOf => return Node.sizeof.create(c.arena, type_node), - .AlignOf => return Node.alignof.create(c.arena, type_node), + .SizeOf => return Tag.sizeof.create(c.arena, type_node), + .AlignOf => return Tag.alignof.create(c.arena, type_node), .PreferredAlignOf, .VecStep, .OpenMPRequiredSimdAlign, - => return revertAndWarn( - rp, + => return fail( + c, error.UnsupportedTranslation, loc, "Unsupported type trait kind {}", @@ -2642,53 +2602,54 @@ fn transUnaryOperator(c: *Context, scope: *Scope, stmt: *const clang.UnaryOperat const op_expr = stmt.getSubExpr(); switch (stmt.getOpcode()) { .PostInc => if (qualTypeHasWrappingOverflow(stmt.getType())) - return transCreatePostCrement(c, scope, stmt, .assign_add_wrap, used) + return transCreatePostCrement(c, scope, stmt, .add_wrap_assign, used) else - return transCreatePostCrement(c, scope, stmt, .assign_add, used), + return transCreatePostCrement(c, scope, stmt, .add_assign, used), .PostDec => if (qualTypeHasWrappingOverflow(stmt.getType())) - return transCreatePostCrement(c, scope, stmt, .assign_sub_wrap, used) + return transCreatePostCrement(c, scope, stmt, .sub_wrap_assign, used) else - return transCreatePostCrement(c, scope, stmt, .assign_sub, used), + return transCreatePostCrement(c, scope, stmt, .sub_assign, used), .PreInc => if (qualTypeHasWrappingOverflow(stmt.getType())) - return transCreatePreCrement(c, scope, stmt, .assign_add_wrap, used) + return transCreatePreCrement(c, scope, stmt, .add_wrap_assign, used) else - return transCreatePreCrement(c, scope, stmt, .assign_add, used), + return transCreatePreCrement(c, scope, stmt, .add_assign, used), .PreDec => if (qualTypeHasWrappingOverflow(stmt.getType())) - return transCreatePreCrement(c, scope, stmt, .assign_sub_wrap, used) + return transCreatePreCrement(c, scope, stmt, .sub_wrap_assign, used) else - return transCreatePreCrement(c, scope, stmt, .assign_sub, used), + return transCreatePreCrement(c, scope, stmt, .sub_assign, used), .AddrOf => { if (cIsFunctionDeclRef(op_expr)) { - return transExpr(rp, scope, op_expr, used, .r_value); + return transExpr(c, scope, op_expr, used); } - return Node.address_of.create(c.arena, try transExpr(c, scope, op_expr, used, .r_value)); + return Tag.address_of.create(c.arena, try transExpr(c, scope, op_expr, used)); }, .Deref => { - const node = try transExpr(c, scope, op_expr, used, .r_value); + const node = try transExpr(c, scope, op_expr, used); var is_ptr = false; const fn_ty = qualTypeGetFnProto(op_expr.getType(), &is_ptr); if (fn_ty != null and is_ptr) return node; - return Node.unwrap_deref.create(c.arena, node); + const unwrapped = try Tag.unwrap.create(c.arena, node); + return Tag.deref.create(c.arena, unwrapped); }, - .Plus => return transExpr(c, scope, op_expr, used, .r_value), + .Plus => return transExpr(c, scope, op_expr, used), .Minus => { if (!qualTypeHasWrappingOverflow(op_expr.getType())) { - return Node.negate.create(c.arena, try transExpr(c, scope, op_expr, .used, .r_value)); + return Tag.negate.create(c.arena, try transExpr(c, scope, op_expr, .used)); } else if (cIsUnsignedInteger(op_expr.getType())) { // use -% x for unsigned integers - return Node.negate_wrap.create(c.arena, try transExpr(c, scope, op_expr, .used, .r_value)); + return Tag.negate_wrap.create(c.arena, try transExpr(c, scope, op_expr, .used)); } else return fail(c, error.UnsupportedTranslation, stmt.getBeginLoc(), "C negation with non float non integer", .{}); }, .Not => { - return Node.bit_not.create(c.arena, try transExpr(c, scope, op_expr, .used, .r_value)); + return Tag.bit_not.create(c.arena, try transExpr(c, scope, op_expr, .used)); }, .LNot => { - return Node.not.create(c.arena, try transExpr(c, scope, op_expr, .used, .r_value)); + return Tag.not.create(c.arena, try transExpr(c, scope, op_expr, .used)); }, .Extension => { - return transExpr(c, scope, stmt.getSubExpr(), used, .l_value); + return transExpr(c, scope, stmt.getSubExpr(), used); }, else => return fail(c, error.UnsupportedTranslation, stmt.getBeginLoc(), "unsupported C translation {}", .{stmt.getOpcode()}), } @@ -2698,7 +2659,7 @@ fn transCreatePreCrement( c: *Context, scope: *Scope, stmt: *const clang.UnaryOperator, - op: Node.Tag, + op: Tag, used: ResultUsed, ) TransError!Node { const op_expr = stmt.getSubExpr(); @@ -2707,8 +2668,8 @@ fn transCreatePreCrement( // common case // c: ++expr // zig: expr += 1 - const lhs = try transExpr(c, scope, op_expr, .used, .r_value); - const rhs = Node.one_literal.init(); + const lhs = try transExpr(c, scope, op_expr, .used); + const rhs = Tag.one_literal.init(); return transCreateNodeInfixOp(c, scope, op, lhs, rhs, .used); } // worst case @@ -2722,17 +2683,17 @@ fn transCreatePreCrement( defer block_scope.deinit(); const ref = try block_scope.makeMangledName(c, "ref"); - const expr = try transExpr(c, scope, op_expr, .used, .r_value); - const addr_of = try Node.address_of.create(c.arena, expr); - const ref_decl = try Node.var_simple.create(c.arena, .{ .name = ref, .init = addr_of}); + const expr = try transExpr(c, scope, op_expr, .used); + const addr_of = try Tag.address_of.create(c.arena, expr); + const ref_decl = try Tag.var_simple.create(c.arena, .{ .name = ref, .init = addr_of }); try block_scope.statements.append(ref_decl); - const lhs_node = try Node.identifier.create(c.arena, ref); - const ref_node = try Node.deref.create(c.arena, lhs_node); - const node = try transCreateNodeInfixOp(c, scope, op, ref_node, Node.one_literal.init(), .used); + const lhs_node = try Tag.identifier.create(c.arena, ref); + const ref_node = try Tag.deref.create(c.arena, lhs_node); + const node = try transCreateNodeInfixOp(c, scope, op, ref_node, Tag.one_literal.init(), .used); try block_scope.statements.append(node); - const break_node = try Node.break_val.create(c.arena, .{ + const break_node = try Tag.break_val.create(c.arena, .{ .label = block_scope.label, .val = ref_node, }); @@ -2744,7 +2705,7 @@ fn transCreatePostCrement( c: *Context, scope: *Scope, stmt: *const clang.UnaryOperator, - op: Node.Tag, + op: Tag, used: ResultUsed, ) TransError!Node { const op_expr = stmt.getSubExpr(); @@ -2753,8 +2714,8 @@ fn transCreatePostCrement( // common case // c: expr++ // zig: expr += 1 - const lhs = try transExpr(c, scope, op_expr, .used, .r_value); - const rhs = Node.one_literal.init(); + const lhs = try transExpr(c, scope, op_expr, .used); + const rhs = Tag.one_literal.init(); return transCreateNodeInfixOp(c, scope, op, lhs, rhs, .used); } // worst case @@ -2769,24 +2730,24 @@ fn transCreatePostCrement( defer block_scope.deinit(); const ref = try block_scope.makeMangledName(c, "ref"); - const expr = try transExpr(c, scope, op_expr, .used, .r_value); - const addr_of = try Node.address_of.create(c.arena, expr); - const ref_decl = try Node.var_simple.create(c.arena, .{ .name = ref, .init = addr_of}); + const expr = try transExpr(c, scope, op_expr, .used); + const addr_of = try Tag.address_of.create(c.arena, expr); + const ref_decl = try Tag.var_simple.create(c.arena, .{ .name = ref, .init = addr_of }); try block_scope.statements.append(ref_decl); - const lhs_node = try Node.identifier.create(c.arena, ref); - const ref_node = try Node.deref.create(c.arena, lhs_node); + const lhs_node = try Tag.identifier.create(c.arena, ref); + const ref_node = try Tag.deref.create(c.arena, lhs_node); const tmp = try block_scope.makeMangledName(c, "tmp"); - const tmp_decl = try Node.var_simple.create(c.arena, .{ .name = tmp, .init = ref_node}); + const tmp_decl = try Tag.var_simple.create(c.arena, .{ .name = tmp, .init = ref_node }); try block_scope.statements.append(tmp_decl); - const node = try transCreateNodeInfixOp(c, scope, op, ref_node, Node.one_literal.init(), .used); + const node = try transCreateNodeInfixOp(c, scope, op, ref_node, Tag.one_literal.init(), .used); try block_scope.statements.append(node); - const break_node = try Node.break_val.create(c.arena, .{ + const break_node = try Tag.break_val.create(c.arena, .{ .label = block_scope.label, - .val = try Node.identifier.create(c.arena, tmp), + .val = try Tag.identifier.create(c.arena, tmp), }); try block_scope.statements.append(break_node); return block_scope.complete(c); @@ -2795,26 +2756,26 @@ fn transCreatePostCrement( fn transCompoundAssignOperator(c: *Context, scope: *Scope, stmt: *const clang.CompoundAssignOperator, used: ResultUsed) TransError!Node { switch (stmt.getOpcode()) { .MulAssign => if (qualTypeHasWrappingOverflow(stmt.getType())) - return transCreateCompoundAssign(c, scope, stmt, .assign_mul_wrap, used) + return transCreateCompoundAssign(c, scope, stmt, .mul_wrap_assign, used) else - return transCreateCompoundAssign(c, scope, stmt, .assign_mul, used), + return transCreateCompoundAssign(c, scope, stmt, .mul_assign, used), .AddAssign => if (qualTypeHasWrappingOverflow(stmt.getType())) - return transCreateCompoundAssign(c, scope, stmt, .assign_add_wrap, used) + return transCreateCompoundAssign(c, scope, stmt, .add_wrap_assign, used) else - return transCreateCompoundAssign(c, scope, stmt, .assign_add, used), + return transCreateCompoundAssign(c, scope, stmt, .add_assign, used), .SubAssign => if (qualTypeHasWrappingOverflow(stmt.getType())) - return transCreateCompoundAssign(c, scope, stmt, .assign_sub_wrap, used) + return transCreateCompoundAssign(c, scope, stmt, .sub_wrap_assign, used) else - return transCreateCompoundAssign(c, scope, stmt, .assign_sub, used), - .DivAssign => return transCreateCompoundAssign(c, scope, stmt, .assign_div, used), - .RemAssign => return transCreateCompoundAssign(c, scope, stmt, .assign_mod, used), - .ShlAssign => return transCreateCompoundAssign(c, scope, stmt, .assign_shl, used), - .ShrAssign => return transCreateCompoundAssign(c, scope, stmt, .assign_shr, used), - .AndAssign => return transCreateCompoundAssign(c, scope, stmt, .assign_bit_and, used), - .XorAssign => return transCreateCompoundAssign(c, scope, stmt, .assign_bit_xor, used), - .OrAssign => return transCreateCompoundAssign(c, scope, stmt, .assign_bit_or, used), + return transCreateCompoundAssign(c, scope, stmt, .sub_assign, used), + .DivAssign => return transCreateCompoundAssign(c, scope, stmt, .div_assign, used), + .RemAssign => return transCreateCompoundAssign(c, scope, stmt, .mod_assign, used), + .ShlAssign => return transCreateCompoundAssign(c, scope, stmt, .shl_assign, used), + .ShrAssign => return transCreateCompoundAssign(c, scope, stmt, .shr_assign, used), + .AndAssign => return transCreateCompoundAssign(c, scope, stmt, .bit_and_assign, used), + .XorAssign => return transCreateCompoundAssign(c, scope, stmt, .bit_xor_assign, used), + .OrAssign => return transCreateCompoundAssign(c, scope, stmt, .bit_or_assign, used), else => return fail( - rp, + c, error.UnsupportedTranslation, stmt.getBeginLoc(), "unsupported C translation {}", @@ -2827,12 +2788,12 @@ fn transCreateCompoundAssign( c: *Context, scope: *Scope, stmt: *const clang.CompoundAssignOperator, - op: Node.Tag, + op: Tag, used: ResultUsed, ) TransError!Node { - const is_shift = op == .assign_shl or op == .assign_shr; - const is_div = op == .assign_div; - const is_mod = op == .assign_mod; + const is_shift = op == .shl_assign or op == .shr_assign; + const is_div = op == .div_assign; + const is_mod = op == .mod_assign; const lhs = stmt.getLHS(); const rhs = stmt.getRHS(); const loc = stmt.getBeginLoc(); @@ -2849,21 +2810,21 @@ fn transCreateCompoundAssign( // c: lhs += rhs // zig: lhs += rhs if ((is_mod or is_div) and is_signed) { - const lhs_node = try transExpr(c, scope, lhs, .used, .l_value); - const rhs_node = try transExpr(c, scope, rhs, .used, .r_value); + const lhs_node = try transExpr(c, scope, lhs, .used); + const rhs_node = try transExpr(c, scope, rhs, .used); const builtin = if (is_mod) - try Node.rem.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node }) + try Tag.rem.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node }) else - try Node.divTrunc.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node }); + try Tag.div_trunc.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node }); return transCreateNodeInfixOp(c, scope, .assign, lhs_node, builtin, .used); } - const lhs_node = try transExpr(c, scope, lhs, .used, .l_value); + const lhs_node = try transExpr(c, scope, lhs, .used); var rhs_node = if (is_shift or requires_int_cast) - try transExprCoercing(c, scope, rhs, .used, .r_value) + try transExprCoercing(c, scope, rhs, .used) else - try transExpr(c, scope, rhs, .used, .r_value); + try transExpr(c, scope, rhs, .used); if (is_shift or requires_int_cast) { // @intCast(rhs) @@ -2871,11 +2832,11 @@ fn transCreateCompoundAssign( try qualTypeToLog2IntRef(c, getExprQualType(c, rhs), loc) else try transQualType(c, getExprQualType(c, lhs), loc); - - rhs_node = try Node.int_cast.create(c.arena, .{ .lhs = cast_to_type, .rhs = rhs_node }); + + rhs_node = try Tag.int_cast.create(c.arena, .{ .lhs = cast_to_type, .rhs = rhs_node }); } - return transCreateNodeInfixOp(c, scope, assign_op, lhs_node, rhs_node, .used); + return transCreateNodeInfixOp(c, scope, op, lhs_node, rhs_node, .used); } // worst case // c: lhs += rhs @@ -2888,25 +2849,25 @@ fn transCreateCompoundAssign( defer block_scope.deinit(); const ref = try block_scope.makeMangledName(c, "ref"); - const expr = try transExpr(c, scope, op_expr, .used, .r_value); - const addr_of = try Node.address_of.create(c.arena, expr); - const ref_decl = try Node.var_simple.create(c.arena, .{ .name = ref, .init = addr_of}); + const expr = try transExpr(c, scope, lhs, .used); + const addr_of = try Tag.address_of.create(c.arena, expr); + const ref_decl = try Tag.var_simple.create(c.arena, .{ .name = ref, .init = addr_of }); try block_scope.statements.append(ref_decl); - const lhs_node = try Node.identifier.create(c.arena, ref); - const ref_node = try Node.deref.create(c.arena, lhs_node); + const lhs_node = try Tag.identifier.create(c.arena, ref); + const ref_node = try Tag.deref.create(c.arena, lhs_node); if ((is_mod or is_div) and is_signed) { - const rhs_node = try transExpr(c, scope, rhs, .used, .r_value); + const rhs_node = try transExpr(c, scope, rhs, .used); const builtin = if (is_mod) - try Node.rem.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node }) + try Tag.rem.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node }) else - try Node.divTrunc.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node }); + try Tag.div_trunc.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node }); const assign = try transCreateNodeInfixOp(c, scope, .assign, lhs_node, builtin, .used); try block_scope.statements.append(assign); } else { - var rhs_node = try transExpr(c, scope, rhs, .used, .r_value); + var rhs_node = try transExpr(c, scope, rhs, .used); if (is_shift or requires_int_cast) { // @intCast(rhs) @@ -2914,15 +2875,15 @@ fn transCreateCompoundAssign( try qualTypeToLog2IntRef(c, getExprQualType(c, rhs), loc) else try transQualType(c, getExprQualType(c, lhs), loc); - - rhs_node = try Node.int_cast.create(c.arena, .{ .lhs = cast_to_type, .rhs = rhs_node }); + + rhs_node = try Tag.int_cast.create(c.arena, .{ .lhs = cast_to_type, .rhs = rhs_node }); } const assign = try transCreateNodeInfixOp(c, scope, op, ref_node, rhs_node, .used); try block_scope.statements.append(assign); } - const break_node = try Node.break_val.create(c.arena, .{ + const break_node = try Tag.break_val.create(c.arena, .{ .label = block_scope.label, .val = ref_node, }); @@ -2941,7 +2902,7 @@ fn transCPtrCast( const child_type = ty.getPointeeType(); const src_ty = src_type.getTypePtr(); const src_child_type = src_ty.getPointeeType(); - const dst_type = try transType(c, ty, loc); + const dst_type_node = try transType(c, ty, loc); if ((src_child_type.isConstQualified() and !child_type.isConstQualified()) or @@ -2949,8 +2910,8 @@ fn transCPtrCast( !child_type.isVolatileQualified())) { // Casting away const or volatile requires us to use @intToPtr - const ptr_to_int = try Node.ptr_to_int.create(c.arena, expr); - const int_to_ptr = try Node.int_to_ptr.create(c.arena, .{ .lhs = dst_type, .rhs = ptr_to_int }); + const ptr_to_int = try Tag.ptr_to_int.create(c.arena, expr); + const int_to_ptr = try Tag.int_to_ptr.create(c.arena, .{ .lhs = dst_type_node, .rhs = ptr_to_int }); return int_to_ptr; } else { // Implicit downcasting from higher to lower alignment values is forbidden, @@ -2963,17 +2924,17 @@ fn transCPtrCast( expr else blk: { const child_type_node = try transQualType(c, child_type, loc); - const alignof = try Node.alignof.create(c.arena, child_type_node); - const align_cast = try Node.align_cast.create(c.arena, .{ .lhs = alignof, .rhs = expr }); + const alignof = try Tag.alignof.create(c.arena, child_type_node); + const align_cast = try Tag.align_cast.create(c.arena, .{ .lhs = alignof, .rhs = expr }); break :blk align_cast; }; - return Node.ptr_cast.create(c.arena, .{ .lhs = dst_type, .rhs = rhs }); + return Tag.ptr_cast.create(c.arena, .{ .lhs = dst_type_node, .rhs = rhs }); } } fn transBreak(c: *Context, scope: *Scope) TransError!Node { const break_scope = scope.getBreakableScope(); - const label_text: ?[]const u8 = if (break_scope.id == .Switch) blk: { + const label_text: ?[]const u8 = if (break_scope.id == .@"switch") blk: { const swtch = @fieldParentPtr(Scope.Switch, "base", break_scope); const block_scope = try scope.findBlockScope(c); swtch.switch_label = try block_scope.makeMangledName(c, "switch"); @@ -2981,20 +2942,20 @@ fn transBreak(c: *Context, scope: *Scope) TransError!Node { } else null; - return Node.@"break".create(c.arena, label_text); + return Tag.@"break".create(c.arena, label_text); } fn transFloatingLiteral(c: *Context, scope: *Scope, stmt: *const clang.FloatingLiteral, used: ResultUsed) TransError!Node { // TODO use something more accurate const dbl = stmt.getValueAsApproximateDouble(); const node = try transCreateNodeNumber(c, dbl); - return maybeSuppressResult(c, scope, used, &node.base); + return maybeSuppressResult(c, scope, used, node); } fn transBinaryConditionalOperator(c: *Context, scope: *Scope, stmt: *const clang.BinaryConditionalOperator, used: ResultUsed) TransError!Node { // GNU extension of the ternary operator where the middle expression is // omitted, the conditition itself is returned if it evaluates to true - const qt = @ptrCast(*const clang.Stmt, stmt).getType(); + const qt = @ptrCast(*const clang.Expr, stmt).getType(); const res_is_bool = qualTypeIsBoolean(qt); const casted_stmt = @ptrCast(*const clang.AbstractConditionalOperator, stmt); const cond_expr = casted_stmt.getCond(); @@ -3010,26 +2971,33 @@ fn transBinaryConditionalOperator(c: *Context, scope: *Scope, stmt: *const clang defer block_scope.deinit(); const mangled_name = try block_scope.makeMangledName(c, "cond_temp"); - const init_node = try transExpr(c, &block_scope.base, cond_expr, .used, .r_value); - const ref_decl = try Node.var_simple.create(c.arena, .{ .name = mangled_name, .init = init_node}); + const init_node = try transExpr(c, &block_scope.base, cond_expr, .used); + const ref_decl = try Tag.var_simple.create(c.arena, .{ .name = mangled_name, .init = init_node }); try block_scope.statements.append(ref_decl); + var cond_scope = Scope.Condition{ + .base = .{ + .parent = &block_scope.base, + .id = .condition, + }, + }; + defer cond_scope.deinit(); const cond_node = try transBoolExpr(c, &cond_scope.base, cond_expr, .used); - var then_body = try Node.identifier.create(c.arena, mangled_name); + var then_body = try Tag.identifier.create(c.arena, mangled_name); if (!res_is_bool and isBoolRes(init_node)) { - then_body = try Node.bool_to_int.create(c.arena, then_body); + then_body = try Tag.bool_to_int.create(c.arena, then_body); } - var else_body = try transExpr(c, &block_scope.base, false_expr, .used, .r_value); + var else_body = try transExpr(c, &block_scope.base, false_expr, .used); if (!res_is_bool and isBoolRes(else_body)) { - else_body = try Node.bool_to_int.create(c.arena, else_body); + else_body = try Tag.bool_to_int.create(c.arena, else_body); } - const if_node = try Node.@"if".create(c.arena, .{ - .cond = cond, + const if_node = try Tag.@"if".create(c.arena, .{ + .cond = cond_node, .then = then_body, .@"else" = else_body, }); - const break_node = try Node.break_val.create(c.arena, .{ + const break_node = try Tag.break_val.create(c.arena, .{ .label = block_scope.label, .val = if_node, }); @@ -3042,31 +3010,31 @@ fn transConditionalOperator(c: *Context, scope: *Scope, stmt: *const clang.Condi var cond_scope = Scope.Condition{ .base = .{ .parent = scope, - .id = .Condition, + .id = .condition, }, }; defer cond_scope.deinit(); - const qt = @ptrCast(*const clang.Stmt, stmt).getType(); + const qt = @ptrCast(*const clang.Expr, stmt).getType(); const res_is_bool = qualTypeIsBoolean(qt); const casted_stmt = @ptrCast(*const clang.AbstractConditionalOperator, stmt); const cond_expr = casted_stmt.getCond(); const true_expr = casted_stmt.getTrueExpr(); const false_expr = casted_stmt.getFalseExpr(); - const cond = try transBoolExpr(c, &cond_scope.base, cond_expr, .used, .r_value); + const cond = try transBoolExpr(c, &cond_scope.base, cond_expr, .used); - var then_body = try transExpr(c, scope, true_expr, .used, .r_value); + var then_body = try transExpr(c, scope, true_expr, .used); if (!res_is_bool and isBoolRes(then_body)) { - then_body = try Node.bool_to_int.create(c.arena, then_body); + then_body = try Tag.bool_to_int.create(c.arena, then_body); } - var else_body = try transExpr(c, scope, false_expr, .used, .r_value); + var else_body = try transExpr(c, scope, false_expr, .used); if (!res_is_bool and isBoolRes(else_body)) { - else_body = try Node.bool_to_int.create(c.arena, else_body); + else_body = try Tag.bool_to_int.create(c.arena, else_body); } - const if_node = try Node.@"if".create(c.arena, .{ + const if_node = try Tag.@"if".create(c.arena, .{ .cond = cond, .then = then_body, .@"else" = else_body, @@ -3081,7 +3049,7 @@ fn maybeSuppressResult( result: Node, ) TransError!Node { if (used == .used) return result; - return Node.ignore.create(c.arena, result); + return Tag.ignore.create(c.arena, result); } fn addTopLevelDecl(c: *Context, name: []const u8, decl_node: Node) !void { @@ -3100,19 +3068,19 @@ fn transQualTypeInitialized( const ty = qt.getTypePtr(); if (ty.getTypeClass() == .IncompleteArray) { const incomplete_array_ty = @ptrCast(*const clang.IncompleteArrayType, ty); - const elem_ty = incomplete_array_ty.getElementType().getTypePtr(); + const elem_ty = try transType(c, incomplete_array_ty.getElementType().getTypePtr(), source_loc); switch (decl_init.getStmtClass()) { .StringLiteralClass => { const string_lit = @ptrCast(*const clang.StringLiteral, decl_init); const string_lit_size = string_lit.getLength() + 1; // +1 for null terminator const array_size = @intCast(usize, string_lit_size); - return Node.array_type.create(c.arena, .{ .len = array_size, .elem_type = elem_ty }); + return Tag.array_type.create(c.arena, .{ .len = array_size, .elem_type = elem_ty }); }, .InitListExprClass => { const init_expr = @ptrCast(*const clang.InitListExpr, decl_init); const size = init_expr.getNumInits(); - return Node.array_type.create(c.arena, .{ .len = size, .elem_type = elem_ty }); + return Tag.array_type.create(c.arena, .{ .len = size, .elem_type = elem_ty }); }, else => {}, } @@ -3135,7 +3103,7 @@ fn transQualTypeIntWidthOf(c: *Context, ty: clang.QualType, is_signed: bool) Typ fn transTypeIntWidthOf(c: *Context, ty: *const clang.Type, is_signed: bool) TypeError!Node { assert(ty.getTypeClass() == .Builtin); const builtin_ty = @ptrCast(*const clang.BuiltinType, ty); - return Node.type.create(c.arena, switch (builtin_ty.getKind()) { + return Tag.type.create(c.arena, switch (builtin_ty.getKind()) { .Char_U, .Char_S, .UChar, .SChar, .Char8 => if (is_signed) "i8" else "u8", .UShort, .Short => if (is_signed) "c_short" else "c_ushort", .UInt, .Int => if (is_signed) "c_int" else "c_uint", @@ -3214,11 +3182,11 @@ fn qualTypeToLog2IntRef(c: *Context, qt: clang.QualType, source_loc: clang.Sourc if (int_bit_width != 0) { // we can perform the log2 now. const cast_bit_width = math.log2_int(u64, int_bit_width); - return Node.log2_int_type.create(c.arena, cast_bit_width); + return Tag.log2_int_type.create(c.arena, cast_bit_width); } const zig_type = try transQualType(c, qt, source_loc); - return Node.std_math_Log2Int.create(c.arena, zig_type); + return Tag.std_math_Log2Int.create(c.arena, zig_type); } fn qualTypeChildIsFnProto(qt: clang.QualType) bool { @@ -3392,10 +3360,10 @@ fn transCreateNodeAssign( // c: lhs = rhs // zig: lhs = rhs if (result_used == .unused) { - const lhs_node = try transExpr(c, scope, lhs, .used, .l_value); - var rhs_node = try transExprCoercing(c, scope, rhs, .used, .r_value); + const lhs_node = try transExpr(c, scope, lhs, .used); + var rhs_node = try transExprCoercing(c, scope, rhs, .used); if (!exprIsBooleanType(lhs) and isBoolRes(rhs_node)) { - rhs_node = try Node.bool_to_int.create(c.arena, rhs_node); + rhs_node = try Tag.bool_to_int.create(c.arena, rhs_node); } return transCreateNodeInfixOp(c, scope, .assign, lhs_node, rhs_node, .used); } @@ -3411,17 +3379,16 @@ fn transCreateNodeAssign( defer block_scope.deinit(); const tmp = try block_scope.makeMangledName(c, "tmp"); - const rhs = try transExpr(c, scope, op_expr, .used, .r_value); - const tmp_decl = try Node.var_simple.create(c.arena, .{ .name = tmp, .init = rhs}); + const rhs_node = try transExpr(c, scope, rhs, .used); + const tmp_decl = try Tag.var_simple.create(c.arena, .{ .name = tmp, .init = rhs_node }); try block_scope.statements.append(tmp_decl); - - const lhs = try transExpr(c, &block_scope.base, lhs, .used, .l_value); - const tmp_ident = try Node.identifier.create(c.arena, tmp); - const assign = try transCreateNodeInfixOp(c, &block_scope.base, .assign, lhs, tmp_iden, .used); + const lhs_node = try transExpr(c, &block_scope.base, lhs, .used); + const tmp_ident = try Tag.identifier.create(c.arena, tmp); + const assign = try transCreateNodeInfixOp(c, &block_scope.base, .assign, lhs_node, tmp_ident, .used); try block_scope.statements.append(assign); - const break_node = try Node.break_val.create(c.arena, .{ + const break_node = try Tag.break_val.create(c.arena, .{ .label = block_scope.label, .val = tmp_ident, }); @@ -3432,7 +3399,7 @@ fn transCreateNodeAssign( fn transCreateNodeInfixOp( c: *Context, scope: *Scope, - op: ast.Node.Tag, + op: Tag, lhs: Node, rhs: Node, used: ResultUsed, @@ -3452,13 +3419,13 @@ fn transCreateNodeBoolInfixOp( c: *Context, scope: *Scope, stmt: *const clang.BinaryOperator, - op: ast.Node.Tag, + op: Tag, used: ResultUsed, ) !Node { - std.debug.assert(op == .bool_and or op == .bool_or); + std.debug.assert(op == .@"and" or op == .@"or"); - const lhs = try transBoolExpr(rp, scope, stmt.getLHS(), .used, .l_value); - const rhs = try transBoolExpr(rp, scope, stmt.getRHS(), .used, .r_value); + const lhs = try transBoolExpr(c, scope, stmt.getLHS(), .used); + const rhs = try transBoolExpr(c, scope, stmt.getRHS(), .used); return transCreateNodeInfixOp(c, scope, op, lhs, rhs, used); } @@ -3503,22 +3470,22 @@ fn transCreateNodeAPInt(c: *Context, int: *const clang.APSInt) !Node { const str = big.toStringAlloc(c.arena, 10, false) catch |err| switch (err) { error.OutOfMemory => return error.OutOfMemory, }; - return Node.int_literal.create(c.arena, str); + return Tag.number_literal.create(c.arena, str); } fn transCreateNodeNumber(c: *Context, int: anytype) !Node { const fmt_s = if (comptime std.meta.trait.isNumber(@TypeOf(int))) "{d}" else "{s}"; const str = try std.fmt.allocPrint(c.arena, fmt_s, .{int}); - return Node.int_literal.create(c.arena, str); + return Tag.number_literal.create(c.arena, str); } fn transCreateNodeMacroFn(c: *Context, name: []const u8, ref: Node, proto_alias: *ast.Payload.Func) !Node { const scope = &c.global_scope.base; - var fn_params = std.ArrayList(Node).init(c.gpa); + var fn_params = std.ArrayList(ast.Payload.Param).init(c.gpa); defer fn_params.deinit(); - for (proto_alias.params()) |param, i| { + for (proto_alias.data.params) |param, i| { const param_name = param.name orelse try std.fmt.allocPrint(c.arena, "arg_{d}", .{c.getMangle()}); @@ -3529,29 +3496,29 @@ fn transCreateNodeMacroFn(c: *Context, name: []const u8, ref: Node, proto_alias: }); } - const init = if (value.castTag(.var_decl)) |v| - v.data.init - else if (value.castTag(.var_simple) orelse value.castTag(.pub_var_simple)) |v| + const init = if (ref.castTag(.var_decl)) |v| + v.data.init.? + else if (ref.castTag(.var_simple) orelse ref.castTag(.pub_var_simple)) |v| v.data.init else unreachable; - const unwrap_expr = try Node.unwrap.create(c.arena, init); - const call_params = try c.arena.alloc(Node, fn_params.items.len); + const unwrap_expr = try Tag.unwrap.create(c.arena, init); + const args = try c.arena.alloc(Node, fn_params.items.len); for (fn_params.items) |param, i| { - call_params[i] = try Node.identifier.create(c.arena, param.name); + args[i] = try Tag.identifier.create(c.arena, param.name.?); } - const call_expr = try Node.call.create(c.arean, .{ + const call_expr = try Tag.call.create(c.arena, .{ .lhs = unwrap_expr, - .args = call_params, + .args = args, }); - const return_expr = try Node.@"return".create(c.arean, call_expr); - const block = try Node.block_single.create(c.arean, return_expr); + const return_expr = try Tag.@"return".create(c.arena, call_expr); + const block = try Tag.block_single.create(c.arena, return_expr); - return Node.pub_inline_fn.create(c.arena, .{ + return Tag.pub_inline_fn.create(c.arena, .{ .name = name, - .params = try c.arena.dupe(ast.Node.Param, fn_params.items), - .return_type = proto_alias.return_type, + .params = try c.arena.dupe(ast.Payload.Param, fn_params.items), + .return_type = proto_alias.data.return_type, .body = block, }); } @@ -3560,7 +3527,7 @@ fn transCreateNodeShiftOp( c: *Context, scope: *Scope, stmt: *const clang.BinaryOperator, - op: Node.Tag, + op: Tag, used: ResultUsed, ) !Node { std.debug.assert(op == .shl or op == .shr); @@ -3570,11 +3537,11 @@ fn transCreateNodeShiftOp( const rhs_location = rhs_expr.getBeginLoc(); // lhs >> @as(u5, rh) - const lhs = try transExpr(c, scope, lhs_expr, .used, .l_value); + const lhs = try transExpr(c, scope, lhs_expr, .used); const rhs_type = try qualTypeToLog2IntRef(c, stmt.getType(), rhs_location); - const rhs = try transExprCoercing(c, scope, rhs_expr, .used, .r_value); - const rhs_casted = try Node.int_cast.create(c.arena, .{ .lhs = rhs_type, .rhs = rhs_type }); + const rhs = try transExprCoercing(c, scope, rhs_expr, .used); + const rhs_casted = try Tag.int_cast.create(c.arena, .{ .lhs = rhs_type, .rhs = rhs_type }); return transCreateNodeInfixOp(c, scope, op, lhs, rhs_casted, used); } @@ -3583,7 +3550,7 @@ fn transType(c: *Context, ty: *const clang.Type, source_loc: clang.SourceLocatio switch (ty.getTypeClass()) { .Builtin => { const builtin_ty = @ptrCast(*const clang.BuiltinType, ty); - return Node.type.create(c.arena, switch (builtin_ty.getKind()) { + return Tag.type.create(c.arena, switch (builtin_ty.getKind()) { .Void => "c_void", .Bool => "bool", .Char_U, .UChar, .Char_S, .Char8 => "u8", @@ -3608,11 +3575,13 @@ fn transType(c: *Context, ty: *const clang.Type, source_loc: clang.SourceLocatio }, .FunctionProto => { const fn_proto_ty = @ptrCast(*const clang.FunctionProtoType, ty); - return transFnProto(c, null, fn_proto_ty, source_loc, null, false); + const fn_proto = try transFnProto(c, null, fn_proto_ty, source_loc, null, false); + return Node.initPayload(&fn_proto.base); }, .FunctionNoProto => { const fn_no_proto_ty = @ptrCast(*const clang.FunctionType, ty); - return transFnNoProto(c, fn_no_proto_ty, source_loc, null, false); + const fn_proto = try transFnNoProto(c, fn_no_proto_ty, source_loc, null, false); + return Node.initPayload(&fn_proto.base); }, .Paren => { const paren_ty = @ptrCast(*const clang.ParenType, ty); @@ -3621,16 +3590,16 @@ fn transType(c: *Context, ty: *const clang.Type, source_loc: clang.SourceLocatio .Pointer => { const child_qt = ty.getPointeeType(); if (qualTypeChildIsFnProto(child_qt)) { - return Node.optional_type.create(c.arena, try transQualType(c, child_qt, source_loc)); + return Tag.optional_type.create(c.arena, try transQualType(c, child_qt, source_loc)); } const is_const = child_qt.isConstQualified(); const is_volatile = child_qt.isVolatileQualified(); const elem_type = try transQualType(c, child_qt, source_loc); - if (typeIsOpaque(rp.c, child_qt.getTypePtr(), source_loc) or qualTypeWasDemotedToOpaque(rp.c, child_qt)) { - return Node.single_pointer.create(c.arena, .{ .is_const = is_const, .is_volatile = is_volatile, .elem_type = elem_type }); + if (typeIsOpaque(c, child_qt.getTypePtr(), source_loc) or qualTypeWasDemotedToOpaque(c, child_qt)) { + return Tag.single_pointer.create(c.arena, .{ .is_const = is_const, .is_volatile = is_volatile, .elem_type = elem_type }); } - return Node.c_pointer.create(c.arena, .{ .is_const = is_const, .is_volatile = is_volatile, .elem_type = elem_type }); + return Tag.c_pointer.create(c.arena, .{ .is_const = is_const, .is_volatile = is_volatile, .elem_type = elem_type }); }, .ConstantArray => { const const_arr_ty = @ptrCast(*const clang.ConstantArrayType, ty); @@ -3639,7 +3608,7 @@ fn transType(c: *Context, ty: *const clang.Type, source_loc: clang.SourceLocatio const size = size_ap_int.getLimitedValue(math.maxInt(usize)); const elem_type = try transType(c, const_arr_ty.getElementType().getTypePtr(), source_loc); - return Node.array_type.create(c.arena, .{ .len = size, .elem_type = elem_type }); + return Tag.array_type.create(c.arena, .{ .len = size, .elem_type = elem_type }); }, .IncompleteArray => { const incomplete_array_ty = @ptrCast(*const clang.IncompleteArrayType, ty); @@ -3649,7 +3618,7 @@ fn transType(c: *Context, ty: *const clang.Type, source_loc: clang.SourceLocatio const is_volatile = child_qt.isVolatileQualified(); const elem_type = try transQualType(c, child_qt, source_loc); - return Node.c_pointer.create(c.arena, .{ .is_const = is_const, .is_volatile = is_volatile, .elem_type = elem_type }); + return Tag.c_pointer.create(c.arena, .{ .is_const = is_const, .is_volatile = is_volatile, .elem_type = elem_type }); }, .Typedef => { const typedef_ty = @ptrCast(*const clang.TypedefType, ty); @@ -3690,7 +3659,7 @@ fn transType(c: *Context, ty: *const clang.Type, source_loc: clang.SourceLocatio }, else => { const type_name = c.str(ty.getTypeClassName()); - return fail(c, error.UnsupportedType, source_loc, "unsupported type: '{}'", .{type_name}); + return fail(c, error.UnsupportedType, source_loc, "unsupported type: '{s}'", .{type_name}); }, } } @@ -3770,7 +3739,7 @@ fn transCC( .AAPCS => return CallingConvention.AAPCS, .AAPCS_VFP => return CallingConvention.AAPCSVFP, else => return fail( - rp, + c, error.UnsupportedType, source_loc, "unsupported calling convention: {s}", @@ -3786,7 +3755,7 @@ fn transFnProto( source_loc: clang.SourceLocation, fn_decl_context: ?FnDeclContext, is_pub: bool, -) !Node.FnProto { +) !*ast.Payload.Func { const fn_ty = @ptrCast(*const clang.FunctionType, fn_proto_ty); const cc = try transCC(c, fn_ty, source_loc); const is_var_args = fn_proto_ty.isVariadic(); @@ -3799,7 +3768,7 @@ fn transFnNoProto( source_loc: clang.SourceLocation, fn_decl_context: ?FnDeclContext, is_pub: bool, -) !Node.FnProto { +) !*ast.Payload.Func { const cc = try transCC(c, fn_ty, source_loc); const is_var_args = if (fn_decl_context) |ctx| (!ctx.is_export and ctx.storage_class != .Static) else true; return finishTransFnProto(c, null, null, fn_ty, source_loc, fn_decl_context, is_var_args, cc, is_pub); @@ -3822,7 +3791,7 @@ fn finishTransFnProto( // TODO check for always_inline attribute // TODO check for align attribute - var fn_params = std.ArrayList(ast.Payload.Func.Param).init(c.gpa); + var fn_params = std.ArrayList(ast.Payload.Param).init(c.gpa); defer fn_params.deinit(); const param_count: usize = if (fn_proto_ty != null) fn_proto_ty.?.getNumParams() else 0; try fn_params.ensureCapacity(param_count); @@ -3861,7 +3830,7 @@ fn finishTransFnProto( break :blk null; }; - const alignment: c_uint = blk: { + const alignment = blk: { if (fn_decl) |decl| { const alignment = decl.getAlignedAttribute(c.clang_context); if (alignment != 0) { @@ -3876,16 +3845,16 @@ fn finishTransFnProto( const return_type_node = blk: { if (fn_ty.getNoReturnAttr()) { - break :blk Node.noreturn_type.init(); + break :blk Tag.noreturn_type.init(); } else { const return_qt = fn_ty.getReturnType(); if (isCVoid(return_qt)) { // convert primitive c_void to actual void (only for return type) - break :blk Node.void_type.init(); + break :blk Tag.void_type.init(); } else { break :blk transQualType(c, return_qt, source_loc) catch |err| switch (err) { error.UnsupportedType => { - try warn(c, source_loc, "unsupported function proto return type", .{}); + try warn(c, &c.global_scope.base, source_loc, "unsupported function proto return type", .{}); return err; }, error.OutOfMemory => |e| return e, @@ -3893,26 +3862,31 @@ fn finishTransFnProto( } } }; - - return Node.func.create(c.arena, .{ - .is_pub = is_pub, - .is_extern = is_extern, - .is_export = is_export, - .is_var_args = is_var_args, - .name = name, - .linksection_string = linksection_string, - .explicit_callconv = explicit_callconv, - .params = try c.arena.dupe(ast.Payload.Func.Param, fn_params.items), - .return_type = return_node, - .body = null, - .alignment = alignment, - }); + const name: ?[]const u8 = if (fn_decl_context) |ctx| ctx.fn_name else null; + const payload = try c.arena.create(ast.Payload.Func); + payload.* = .{ + .base = .{ .tag = .func }, + .data = .{ + .is_pub = is_pub, + .is_extern = is_extern, + .is_export = is_export, + .is_var_args = is_var_args, + .name = name, + .linksection_string = linksection_string, + .explicit_callconv = explicit_callconv, + .params = try c.arena.dupe(ast.Payload.Param, fn_params.items), + .return_type = return_type_node, + .body = null, + .alignment = alignment, + }, + }; + return payload; } fn warn(c: *Context, scope: *Scope, loc: clang.SourceLocation, comptime format: []const u8, args: anytype) !void { const args_prefix = .{c.locStr(loc)}; - const value = std.fmt.allocPrint(c.arena, "// {s}: warning: " ++ format, args_prefix ++ args); - try scope.appendNode(c.gpa, try Node.warning.create(c.arena, value)); + const value = try std.fmt.allocPrint(c.arena, "// {s}: warning: " ++ format, args_prefix ++ args); + try scope.appendNode(try Tag.warning.create(c.arena, value)); } fn fail( @@ -3922,17 +3896,17 @@ fn fail( comptime format: []const u8, args: anytype, ) (@TypeOf(err) || error{OutOfMemory}) { - try warn(c, source_loc, format, args); + try warn(c, &c.global_scope.base, source_loc, format, args); return err; } -pub fn failDecl(c: *Context, loc: clang.SourceLocation, name: []const u8, comptime format: []const u8, args: anytype) !void { +pub fn failDecl(c: *Context, loc: clang.SourceLocation, name: []const u8, comptime format: []const u8, args: anytype) Error!void { // location // pub const name = @compileError(msg); - const location_comment = std.fmt.allocPrint(c.arena, "// {s}", .{c.locStr(loc)}); - try c.global_scope.nodes.append(try Node.warning.create(c.arena, location_comment)); - const fail_msg = std.fmt.allocPrint(c.arena, format, args); - try c.global_scope.nodes.append(try Node.fail_decl.create(c.arena, fail_msg)); + const location_comment = try std.fmt.allocPrint(c.arena, "// {s}", .{c.locStr(loc)}); + try c.global_scope.nodes.append(try Tag.warning.create(c.arena, location_comment)); + const fail_msg = try std.fmt.allocPrint(c.arena, format, args); + try c.global_scope.nodes.append(try Tag.fail_decl.create(c.arena, fail_msg)); } pub fn freeErrors(errors: []ClangErrMsg) void { @@ -4075,7 +4049,7 @@ fn transMacroDefine(c: *Context, m: *MacroCtx) ParseError!void { if (last != .Eof and last != .Nl) return m.fail(c, "unable to translate C expr: unexpected token .{s}", .{@tagName(last)}); - const var_decl = try Node.pub_var_simple.create(c.arena, .{ .name = m.name, .init = init_node }); + const var_decl = try Tag.pub_var_simple.create(c.arena, .{ .name = m.name, .init = init_node }); _ = try c.global_scope.macro_table.put(m.name, var_decl); } @@ -4099,7 +4073,7 @@ fn transMacroFnDefine(c: *Context, m: *MacroCtx) ParseError!void { try fn_params.append(.{ .is_noalias = false, .name = mangled_name, - .type = Node.@"anytype".init(), + .type = Tag.@"anytype".init(), }); if (m.peek().? != .Comma) break; @@ -4119,19 +4093,19 @@ fn transMacroFnDefine(c: *Context, m: *MacroCtx) ParseError!void { const stmts = some.data.stmts; const blk_last = stmts[stmts.len - 1]; const br = blk_last.castTag(.break_val).?; - break :blk br.data; + break :blk br.data.val; } else expr; - const typeof = try Node.typeof.create(c.arean, typeof_arg); - const return_expr = try Node.@"return".create(c.arena, expr); - try block_scope.statements.append(&return_expr.base); - - const fn_decl = try Node.pub_inline_fn.create(c.arena, .{ + const typeof = try Tag.typeof.create(c.arena, typeof_arg); + const return_expr = try Tag.@"return".create(c.arena, expr); + try block_scope.statements.append(return_expr); + + const fn_decl = try Tag.pub_inline_fn.create(c.arena, .{ .name = m.name, .params = try c.arena.dupe(ast.Payload.Param, fn_params.items), .return_type = typeof, .body = try block_scope.complete(c), }); - _ = try c.global_scope.macro_table.put(m.name, &fn_proto.base); + _ = try c.global_scope.macro_table.put(m.name, fn_decl); } const ParseError = Error || error{ParseError}; @@ -4149,7 +4123,7 @@ fn parseCExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { var last = node; while (true) { // suppress result - const ignore = try Node.ignore.create(c.arena, last); + const ignore = try Tag.ignore.create(c.arena, last); try block_scope.statements.append(ignore); last = try parseCCondExpr(c, m, scope); @@ -4159,7 +4133,7 @@ fn parseCExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { } } - const break_node = try Node.break_val.create(c.arena, .{ + const break_node = try Tag.break_val.create(c.arena, .{ .label = block_scope.label, .val = last, }); @@ -4190,7 +4164,7 @@ fn parseCNumLit(c: *Context, m: *MacroCtx) ParseError!Node { return transCreateNodeNumber(c, lit_bytes); } - const type_node = try Node.type.create(c.arena, switch (suffix) { + const type_node = try Tag.type.create(c.arena, switch (suffix) { .u => "c_uint", .l => "c_long", .lu => "c_ulong", @@ -4205,7 +4179,7 @@ fn parseCNumLit(c: *Context, m: *MacroCtx) ParseError!Node { else => unreachable, }]; const rhs = try transCreateNodeNumber(c, lit_bytes); - return Node.as.create(c.arena, .{ .lhs = type_node, .rhs = rhs }); + return Tag.as.create(c.arena, .{ .lhs = type_node, .rhs = rhs }); }, .FloatLiteral => |suffix| { if (lit_bytes[0] == '.') @@ -4213,13 +4187,13 @@ fn parseCNumLit(c: *Context, m: *MacroCtx) ParseError!Node { if (suffix == .none) { return transCreateNodeNumber(c, lit_bytes); } - const type_node = try Node.type.create(c.arena, switch (suffix) { + const type_node = try Tag.type.create(c.arena, switch (suffix) { .f => "f32", .l => "c_longdouble", else => unreachable, }); const rhs = try transCreateNodeNumber(c, lit_bytes[0 .. lit_bytes.len - 1]); - return Node.as.create(c.arena, .{ .lhs = type_node, .rhs = rhs }); + return Tag.as.create(c.arena, .{ .lhs = type_node, .rhs = rhs }); }, else => unreachable, } @@ -4391,56 +4365,56 @@ fn parseCPrimaryExprInner(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!N switch (tok) { .CharLiteral => { if (slice[0] != '\'' or slice[1] == '\\' or slice.len == 3) { - return Node.char_literal.create(c.arena, try zigifyEscapeSequences(c, m)); + return Tag.char_literal.create(c.arena, try zigifyEscapeSequences(c, m)); } else { const str = try std.fmt.allocPrint(c.arena, "0x{x}", .{slice[1 .. slice.len - 1]}); - return Node.int_literal.create(c.arena, str); + return Tag.number_literal.create(c.arena, str); } }, .StringLiteral => { - return Node.string_literal.create(c.arena, try zigifyEscapeSequences(c, m)); + return Tag.string_literal.create(c.arena, try zigifyEscapeSequences(c, m)); }, .IntegerLiteral, .FloatLiteral => { return parseCNumLit(c, m); }, // eventually this will be replaced by std.c.parse which will handle these correctly - .Keyword_void => return Node.type.create(c.arena, "c_void"), - .Keyword_bool => return Node.type.create(c.arena, "bool"), - .Keyword_double => return Node.type.create(c.arena, "f64"), - .Keyword_long => return Node.type.create(c.arena, "c_long"), - .Keyword_int => return Node.type.create(c.arena, "c_int"), - .Keyword_float => return Node.type.create(c.arena, "f32"), - .Keyword_short => return Node.type.create(c.arena, "c_short"), - .Keyword_char => return Node.type.create(c.arena, "u8"), + .Keyword_void => return Tag.type.create(c.arena, "c_void"), + .Keyword_bool => return Tag.type.create(c.arena, "bool"), + .Keyword_double => return Tag.type.create(c.arena, "f64"), + .Keyword_long => return Tag.type.create(c.arena, "c_long"), + .Keyword_int => return Tag.type.create(c.arena, "c_int"), + .Keyword_float => return Tag.type.create(c.arena, "f32"), + .Keyword_short => return Tag.type.create(c.arena, "c_short"), + .Keyword_char => return Tag.type.create(c.arena, "u8"), .Keyword_unsigned => if (m.next()) |t| switch (t) { - .Keyword_char => return Node.type.create(c.arena, "u8"), - .Keyword_short => return Node.type.create(c.arena, "c_ushort"), - .Keyword_int => return Node.type.create(c.arena, "c_uint"), + .Keyword_char => return Tag.type.create(c.arena, "u8"), + .Keyword_short => return Tag.type.create(c.arena, "c_ushort"), + .Keyword_int => return Tag.type.create(c.arena, "c_uint"), .Keyword_long => if (m.peek() != null and m.peek().? == .Keyword_long) { _ = m.next(); - return Node.type.create(c.arena, "c_ulonglong"); - } else return Node.type.create(c.arena, "c_ulong"), + return Tag.type.create(c.arena, "c_ulonglong"); + } else return Tag.type.create(c.arena, "c_ulong"), else => { m.i -= 1; - return Node.type.create(c.arena, "c_uint"); + return Tag.type.create(c.arena, "c_uint"); }, } else { - return Node.type.create(c.arena, "c_uint"); + return Tag.type.create(c.arena, "c_uint"); }, .Keyword_signed => if (m.next()) |t| switch (t) { - .Keyword_char => return Node.type.create(c.arena, "i8"), - .Keyword_short => return Node.type.create(c.arena, "c_short"), - .Keyword_int => return Node.type.create(c.arena, "c_int"), + .Keyword_char => return Tag.type.create(c.arena, "i8"), + .Keyword_short => return Tag.type.create(c.arena, "c_short"), + .Keyword_int => return Tag.type.create(c.arena, "c_int"), .Keyword_long => if (m.peek() != null and m.peek().? == .Keyword_long) { _ = m.next(); - return Node.type.create(c.arena, "c_longlong"); - } else return Node.type.create(c.arena, "c_long"), + return Tag.type.create(c.arena, "c_longlong"); + } else return Tag.type.create(c.arena, "c_long"), else => { m.i -= 1; - return Node.type.create(c.arena, "c_int"); + return Tag.type.create(c.arena, "c_int"); }, } else { - return Node.type.create(c.arena, "c_int"); + return Tag.type.create(c.arena, "c_int"); }, .Keyword_enum, .Keyword_struct, .Keyword_union => { // struct Foo will be declared as struct_Foo by transRecordDecl @@ -4451,11 +4425,11 @@ fn parseCPrimaryExprInner(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!N } const name = try std.fmt.allocPrint(c.arena, "{s}_{s}", .{ slice, m.slice() }); - return Node.identifier.create(c.arena, name); + return Tag.identifier.create(c.arena, name); }, .Identifier => { const mangled_name = scope.getAlias(slice); - return Node.identifier.create(c.arena, builtin_typedef_map.get(mangled_name) orelse mangled_name); + return Tag.identifier.create(c.arena, builtin_typedef_map.get(mangled_name) orelse mangled_name); }, .LParen => { const inner_node = try parseCExpr(c, m, scope); @@ -4492,7 +4466,7 @@ fn parseCPrimaryExprInner(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!N return error.ParseError; } - return Node.std_meta_cast.create(c.arena, .{ .lhs = inner_node, .rhs = node_to_cast }); + return Tag.std_meta_cast.create(c.arena, .{ .lhs = inner_node, .rhs = node_to_cast }); }, else => { try m.fail(c, "unable to translate C expr: unexpected token .{s}", .{@tagName(tok)}); @@ -4511,7 +4485,7 @@ fn parseCPrimaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { .StringLiteral, .Identifier => {}, else => break, } - node = try Node.array_cat.create(c.arena, .{ .lhs = node, .rhs = try parseCPrimaryExprInner(c, m, scope) }); + node = try Tag.array_cat.create(c.arena, .{ .lhs = node, .rhs = try parseCPrimaryExprInner(c, m, scope) }); } return node; } @@ -4521,7 +4495,7 @@ fn macroBoolToInt(c: *Context, node: Node) !Node { return node; } - return Node.bool_to_int.create(c.arena, node); + return Tag.bool_to_int.create(c.arena, node); } fn macroIntToBool(c: *Context, node: Node) !Node { @@ -4529,7 +4503,7 @@ fn macroIntToBool(c: *Context, node: Node) !Node { return node; } - return Node.not_equal.create(c.arena, .{ .lhs = node, .rhs = Node.zero_literal.init() }); + return Tag.not_equal.create(c.arena, .{ .lhs = node, .rhs = Tag.zero_literal.init() }); } fn parseCCondExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { @@ -4545,7 +4519,7 @@ fn parseCCondExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { return error.ParseError; } const else_body = try parseCCondExpr(c, m, scope); - return Node.@"if".create(c.arena, .{ .cond = node, .then = then_body, .@"else" = else_body }); + return Tag.@"if".create(c.arena, .{ .cond = node, .then = then_body, .@"else" = else_body }); } fn parseCOrExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { @@ -4553,7 +4527,7 @@ fn parseCOrExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { while (m.next().? == .PipePipe) { const lhs = try macroIntToBool(c, node); const rhs = try macroIntToBool(c, try parseCAndExpr(c, m, scope)); - node = try Node.@"or".create(c.arena, .{ .lhs = lhs, .rhs = rhs }); + node = try Tag.@"or".create(c.arena, .{ .lhs = lhs, .rhs = rhs }); } m.i -= 1; return node; @@ -4564,7 +4538,7 @@ fn parseCAndExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { while (m.next().? == .AmpersandAmpersand) { const lhs = try macroIntToBool(c, node); const rhs = try macroIntToBool(c, try parseCBitOrExpr(c, m, scope)); - node = try Node.@"and".create(c.arena, .{ .lhs = lhs, .rhs = rhs }); + node = try Tag.@"and".create(c.arena, .{ .lhs = lhs, .rhs = rhs }); } m.i -= 1; return node; @@ -4575,7 +4549,7 @@ fn parseCBitOrExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { while (m.next().? == .Pipe) { const lhs = try macroBoolToInt(c, node); const rhs = try macroBoolToInt(c, try parseCBitXorExpr(c, m, scope)); - node = try Node.bit_or.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); + node = try Tag.bit_or.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); } m.i -= 1; return node; @@ -4586,7 +4560,7 @@ fn parseCBitXorExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { while (m.next().? == .Caret) { const lhs = try macroBoolToInt(c, node); const rhs = try macroBoolToInt(c, try parseCBitAndExpr(c, m, scope)); - node = try Node.bit_xor.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); + node = try Tag.bit_xor.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); } m.i -= 1; return node; @@ -4597,7 +4571,7 @@ fn parseCBitAndExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { while (m.next().? == .Ampersand) { const lhs = try macroBoolToInt(c, node); const rhs = try macroBoolToInt(c, try parseCEqExpr(c, m, scope)); - node = try Node.bit_and.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); + node = try Tag.bit_and.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); } m.i -= 1; return node; @@ -4611,13 +4585,13 @@ fn parseCEqExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { _ = m.next(); const lhs = try macroBoolToInt(c, node); const rhs = try macroBoolToInt(c, try parseCRelExpr(c, m, scope)); - node = try Node.not_equal.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); + node = try Tag.not_equal.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, .EqualEqual => { _ = m.next(); const lhs = try macroBoolToInt(c, node); const rhs = try macroBoolToInt(c, try parseCRelExpr(c, m, scope)); - node = try Node.equal.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); + node = try Tag.equal.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, else => return node, } @@ -4632,25 +4606,25 @@ fn parseCRelExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { _ = m.next(); const lhs = try macroBoolToInt(c, node); const rhs = try macroBoolToInt(c, try parseCShiftExpr(c, m, scope)); - node = try Node.greater_than.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); + node = try Tag.greater_than.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, .AngleBracketRightEqual => { _ = m.next(); const lhs = try macroBoolToInt(c, node); const rhs = try macroBoolToInt(c, try parseCShiftExpr(c, m, scope)); - node = try Node.greater_than_equal.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); + node = try Tag.greater_than_equal.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, .AngleBracketLeft => { _ = m.next(); const lhs = try macroBoolToInt(c, node); const rhs = try macroBoolToInt(c, try parseCShiftExpr(c, m, scope)); - node = try Node.less_than.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); + node = try Tag.less_than.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, .AngleBracketLeftEqual => { _ = m.next(); const lhs = try macroBoolToInt(c, node); const rhs = try macroBoolToInt(c, try parseCShiftExpr(c, m, scope)); - node = try Node.less_than_equal.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); + node = try Tag.less_than_equal.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, else => return node, } @@ -4665,13 +4639,13 @@ fn parseCShiftExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { _ = m.next(); const lhs = try macroBoolToInt(c, node); const rhs = try macroBoolToInt(c, try parseCAddSubExpr(c, m, scope)); - node = try Node.shl.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); + node = try Tag.shl.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, .AngleBracketAngleBracketRight => { _ = m.next(); const lhs = try macroBoolToInt(c, node); const rhs = try macroBoolToInt(c, try parseCAddSubExpr(c, m, scope)); - node = try Node.shr.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); + node = try Tag.shr.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, else => return node, } @@ -4686,13 +4660,13 @@ fn parseCAddSubExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { _ = m.next(); const lhs = try macroBoolToInt(c, node); const rhs = try macroBoolToInt(c, try parseCMulExpr(c, m, scope)); - node = try Node.add.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); + node = try Tag.add.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, .Minus => { _ = m.next(); const lhs = try macroBoolToInt(c, node); const rhs = try macroBoolToInt(c, try parseCMulExpr(c, m, scope)); - node = try Node.sub.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); + node = try Tag.sub.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, else => return node, } @@ -4711,14 +4685,14 @@ fn parseCMulExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { const prev_id = m.list[m.i - 1].id; if (prev_id == .Keyword_void) { - const ptr = try Node.single_pointer.create(c.arena, .{ + const ptr = try Tag.single_pointer.create(c.arena, .{ .is_const = false, .is_volatile = false, .elem_type = node, }); - return Node.optional_type.create(c.arena, ptr); + return Tag.optional_type.create(c.arena, ptr); } else { - return Node.c_pointer.create(c.arena, .{ + return Tag.c_pointer.create(c.arena, .{ .is_const = false, .is_volatile = false, .elem_type = node, @@ -4728,18 +4702,18 @@ fn parseCMulExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { // expr * expr const lhs = try macroBoolToInt(c, node); const rhs = try macroBoolToInt(c, try parseCUnaryExpr(c, m, scope)); - node = try Node.mul.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); + node = try Tag.mul.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); } }, .Slash => { const lhs = try macroBoolToInt(c, node); const rhs = try macroBoolToInt(c, try parseCUnaryExpr(c, m, scope)); - node = try Node.div.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); + node = try Tag.div.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, .Percent => { const lhs = try macroBoolToInt(c, node); const rhs = try macroBoolToInt(c, try parseCUnaryExpr(c, m, scope)); - node = try Node.mod.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); + node = try Tag.mod.create(c.arena, .{ .lhs = lhs, .rhs = rhs }); }, else => { m.i -= 1; @@ -4759,8 +4733,8 @@ fn parseCPostfixExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { return error.ParseError; } - const ident = try Node.identifier.create(c.arena, m.slice()); - node = try Node.field_access.create(c.arena, .{ .lhs = node, .rhs = ident }); + const ident = try Tag.identifier.create(c.arena, m.slice()); + node = try Tag.field_access.create(c.arena, .{ .lhs = node, .rhs = ident }); }, .Arrow => { if (m.next().? != .Identifier) { @@ -4768,20 +4742,20 @@ fn parseCPostfixExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { return error.ParseError; } - const deref = try Node.deref.create(c.arena, node); - const ident = try Node.identifier.create(c.arena, m.slice()); - node = try Node.field_access.create(c.arena, .{ .lhs = deref, .rhs = ident }); + const deref = try Tag.deref.create(c.arena, node); + const ident = try Tag.identifier.create(c.arena, m.slice()); + node = try Tag.field_access.create(c.arena, .{ .lhs = deref, .rhs = ident }); }, .LBracket => { const index = try macroBoolToInt(c, try parseCExpr(c, m, scope)); - node = try Node.array_access.create(c.arena, .{ .lhs = node, .rhs = index }); + node = try Tag.array_access.create(c.arena, .{ .lhs = node, .rhs = index }); }, .LParen => { - var call_params = std.ArrayList(Node).init(c.gpa); - defer call_params.deinit(); + var args = std.ArrayList(Node).init(c.gpa); + defer args.deinit(); while (true) { const arg = try parseCCondExpr(c, m, scope); - try call_params.append(arg); + try args.append(arg); switch (m.next().?) { .Comma => {}, .RParen => break, @@ -4791,7 +4765,7 @@ fn parseCPostfixExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { }, } } - node = try Node.call.create(c.arena, .{ .lhs = node, .rhs = try c.arena.dupe(Node, call_params.items) }); + node = try Tag.call.create(c.arena, .{ .lhs = node, .args = try c.arena.dupe(Node, args.items) }); }, .LBrace => { var init_vals = std.ArrayList(Node).init(c.gpa); @@ -4809,8 +4783,8 @@ fn parseCPostfixExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { }, } } - const tuple_node = try Node.tuple.create(c.arena, try c.arena.dupe(Node, init_vals.items)); - node = try Node.std_mem_zeroinit.create(c.arena, .{ .lhs = node, .rhs = tuple_node }); + const tuple_node = try Tag.tuple.create(c.arena, try c.arena.dupe(Node, init_vals.items)); + node = try Tag.std_mem_zeroinit.create(c.arena, .{ .lhs = node, .rhs = tuple_node }); }, .PlusPlus, .MinusMinus => { try m.fail(c, "TODO postfix inc/dec expr", .{}); @@ -4828,24 +4802,24 @@ fn parseCUnaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { switch (m.next().?) { .Bang => { const operand = try macroIntToBool(c, try parseCUnaryExpr(c, m, scope)); - return Node.not.create(c.arena, operand); + return Tag.not.create(c.arena, operand); }, .Minus => { const operand = try macroBoolToInt(c, try parseCUnaryExpr(c, m, scope)); - return Node.negate.create(c.arena, operand); + return Tag.negate.create(c.arena, operand); }, .Plus => return try parseCUnaryExpr(c, m, scope), .Tilde => { const operand = try macroBoolToInt(c, try parseCUnaryExpr(c, m, scope)); - return Node.bit_not.create(c.arena, operand); + return Tag.bit_not.create(c.arena, operand); }, .Asterisk => { const operand = try parseCUnaryExpr(c, m, scope); - return Node.deref.create(c.arena, operand); + return Tag.deref.create(c.arena, operand); }, .Ampersand => { const operand = try parseCUnaryExpr(c, m, scope); - return Node.address_of.create(c.arena, operand); + return Tag.address_of.create(c.arena, operand); }, .Keyword_sizeof => { const operand = if (m.peek().? == .LParen) blk: { @@ -4860,7 +4834,7 @@ fn parseCUnaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { break :blk inner; } else try parseCUnaryExpr(c, m, scope); - return Node.std_meta_sizeof.create(c.arena, operand); + return Tag.std_meta_sizeof.create(c.arena, operand); }, .Keyword_alignof => { // TODO this won't work if using 's @@ -4877,7 +4851,7 @@ fn parseCUnaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { return error.ParseError; } - return Node.alignof.create(c.arena, operand); + return Tag.alignof.create(c.arena, operand); }, .PlusPlus, .MinusMinus => { try m.fail(c, "TODO unary inc/dec expr", .{}); @@ -4902,7 +4876,7 @@ fn getContainer(c: *Context, node: Node) ?Node { .negate, .negate_wrap, .array_type, - .c_pointer, + .c_pointer, .single_pointer, => return node, @@ -4910,7 +4884,7 @@ fn getContainer(c: *Context, node: Node) ?Node { const ident = node.castTag(.identifier).?; if (c.global_scope.sym_table.get(ident.data)) |value| { if (value.castTag(.var_decl)) |var_decl| - return getContainer(c, var_decl.data.init); + return getContainer(c, var_decl.data.init.?); if (value.castTag(.var_simple) orelse value.castTag(.pub_var_simple)) |var_decl| return getContainer(c, var_decl.data.init); } @@ -4923,8 +4897,8 @@ fn getContainer(c: *Context, node: Node) ?Node { if (ty_node.castTag(.@"struct") orelse ty_node.castTag(.@"union")) |container| { for (container.data.fields) |field| { const ident = infix.data.rhs.castTag(.identifier).?; - if (mem.eql(u8, field.data.name, field.data)) { - return getContainer(c, field.type_expr.?); + if (mem.eql(u8, field.name, ident.data)) { + return getContainer(c, field.type); } } } @@ -4960,9 +4934,9 @@ fn getContainerTypeOf(c: *Context, ref: Node) ?Node { } fn getFnProto(c: *Context, ref: Node) ?*ast.Payload.Func { - const init = if (value.castTag(.var_decl)) |v| - v.data.init - else if (value.castTag(.var_simple) orelse value.castTag(.pub_var_simple)) |v| + const init = if (ref.castTag(.var_decl)) |v| + v.data.init orelse return null + else if (ref.castTag(.var_simple) orelse ref.castTag(.pub_var_simple)) |v| v.data.init else return null; diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 1cf014e5dc..24689c89db 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -1,5 +1,6 @@ const std = @import("std"); const Type = @import("../type.zig").Type; +const Allocator = std.mem.Allocator; pub const Node = extern union { /// If the tag value is less than Tag.no_payload_count, then no pointer @@ -20,6 +21,8 @@ pub const Node = extern union { one_literal, void_type, noreturn_type, + @"anytype", + @"continue", /// pub usingnamespace @import("std").c.builtins; usingnamespace_builtins, // After this, the tag requires a payload. @@ -40,7 +43,6 @@ pub const Node = extern union { switch_else, /// lhs => rhs, switch_prong, - @"continue", @"break", break_val, @"return", @@ -60,7 +62,6 @@ pub const Node = extern union { container_init, std_meta_cast, discard, - block, // a + b add, @@ -111,8 +112,11 @@ pub const Node = extern union { equal, not_equal, bit_and, + bit_and_assign, bit_or, + bit_or_assign, bit_xor, + bit_xor_assign, array_cat, ellipsis3, assign, @@ -126,7 +130,7 @@ pub const Node = extern union { rem, /// @divTrunc(lhs, rhs) div_trunc, - /// @boolToInt(lhs, rhs) + /// @boolToInt(operand) bool_to_int, /// @as(lhs, rhs) as, @@ -150,24 +154,26 @@ pub const Node = extern union { ptr_to_int, /// @alignCast(lhs, rhs) align_cast, + /// @ptrCast(lhs, rhs) + ptr_cast, negate, negate_wrap, bit_not, not, address_of, - /// operand.?.* - unwrap_deref, + /// .? + unwrap, /// .* deref, block, /// { operand } block_single, - @"break", sizeof, alignof, + typeof, type, optional_type, @@ -185,6 +191,8 @@ pub const Node = extern union { fail_decl, // var actual = mangled; arg_redecl, + /// pub const alias = actual; + alias, /// const name = init; typedef, var_simple, @@ -204,18 +212,17 @@ pub const Node = extern union { /// _ = operand; ignore, - @"anytype", pub const last_no_payload_tag = Tag.usingnamespace_builtins; pub const no_payload_count = @enumToInt(last_no_payload_tag) + 1; - pub fn Type(tag: Tag) ?type { - return switch (tag) { + pub fn Type(comptime t: Tag) type { + return switch (t) { .null_literal, .undefined_literal, .opaque_literal, .true_literal, - .false_litral, + .false_literal, .empty_block, .usingnamespace_builtins, .return_void, @@ -224,6 +231,7 @@ pub const Node = extern union { .void_type, .noreturn_type, .@"anytype", + .@"continue", => @compileError("Type Tag " ++ @tagName(t) ++ " has no payload"), .std_mem_zeroes, @@ -236,7 +244,7 @@ pub const Node = extern union { .not, .optional_type, .address_of, - .unwrap_deref, + .unwrap, .deref, .ptr_to_int, .enum_to_int, @@ -246,6 +254,11 @@ pub const Node = extern union { .switch_else, .ignore, .block_single, + .std_meta_sizeof, + .bool_to_int, + .sizeof, + .alignof, + .typeof, => Payload.UnOp, .add, @@ -294,12 +307,14 @@ pub const Node = extern union { .equal, .not_equal, .bit_and, + .bit_and_assign, .bit_or, + .bit_or_assign, .bit_xor, + .bit_xor_assign, .div_trunc, .rem, .int_cast, - .bool_to_int, .as, .truncate, .bit_cast, @@ -316,6 +331,7 @@ pub const Node = extern union { .align_cast, .array_access, .std_mem_zeroinit, + .ptr_cast, => Payload.BinOp, .number_literal, @@ -324,8 +340,6 @@ pub const Node = extern union { .identifier, .warning, .failed_decl, - .sizeof, - .alignof, .type, .fail_decl, => Payload.Value, @@ -345,7 +359,7 @@ pub const Node = extern union { .block => Payload.Block, .c_pointer, .single_pointer => Payload.Pointer, .array_type => Payload.Array, - .arg_redecl => Payload.ArgRedecl, + .arg_redecl, .alias => Payload.ArgRedecl, .log2_int_type => Payload.Log2IntType, .typedef, .pub_typedef, .var_simple, .pub_var_simple => Payload.SimpleVarDecl, .enum_redecl => Payload.EnumRedecl, @@ -375,7 +389,7 @@ pub const Node = extern union { pub fn tag(self: Node) Tag { if (self.tag_if_small_enough < Tag.no_payload_count) { - return @intToEnum(Tag, @intCast(@TagType(Tag), self.tag_if_small_enough)); + return @intToEnum(Tag, @intCast(std.meta.Tag(Tag), self.tag_if_small_enough)); } else { return self.ptr_otherwise.tag; } @@ -392,16 +406,16 @@ pub const Node = extern union { } pub fn initPayload(payload: *Payload) Node { - assert(@enumToInt(payload.tag) >= Tag.no_payload_count); + std.debug.assert(@enumToInt(payload.tag) >= Tag.no_payload_count); return .{ .ptr_otherwise = payload }; } }; pub const Payload = struct { - tag: Tag, + tag: Node.Tag, pub const Infix = struct { - base: Node, + base: Payload, data: struct { lhs: Node, rhs: Node, @@ -409,17 +423,17 @@ pub const Payload = struct { }; pub const Value = struct { - base: Node, + base: Payload, data: []const u8, }; pub const UnOp = struct { - base: Node, + base: Payload, data: Node, }; pub const BinOp = struct { - base: Node, + base: Payload, data: struct { lhs: Node, rhs: Node, @@ -427,7 +441,7 @@ pub const Payload = struct { }; pub const If = struct { - base: Node = .{ .tag = .@"if" }, + base: Payload, data: struct { cond: Node, then: Node, @@ -436,7 +450,7 @@ pub const Payload = struct { }; pub const While = struct { - base: Node = .{ .tag = .@"while" }, + base: Payload, data: struct { cond: Node, body: Node, @@ -445,7 +459,7 @@ pub const Payload = struct { }; pub const Switch = struct { - base: Node = .{ .tag = .@"switch" }, + base: Payload, data: struct { cond: Node, cases: []Node, @@ -453,12 +467,12 @@ pub const Payload = struct { }; pub const Break = struct { - base: Node = .{ .tag = .@"break" }, + base: Payload, data: ?[]const u8, }; pub const BreakVal = struct { - base: Node = .{ .tag = .break_val }, + base: Payload, data: struct { label: ?[]const u8, val: Node, @@ -466,7 +480,7 @@ pub const Payload = struct { }; pub const Call = struct { - base: Node = .{.call}, + base: Payload, data: struct { lhs: Node, args: []Node, @@ -474,7 +488,7 @@ pub const Payload = struct { }; pub const VarDecl = struct { - base: Node = .{ .tag = .var_decl }, + base: Payload, data: struct { is_pub: bool, is_const: bool, @@ -489,13 +503,13 @@ pub const Payload = struct { }; pub const Func = struct { - base: Node = .{.func}, + base: Payload, data: struct { is_pub: bool, is_extern: bool, is_export: bool, is_var_args: bool, - name: []const u8, + name: ?[]const u8, linksection_string: ?[]const u8, explicit_callconv: ?std.builtin.CallingConvention, params: []Param, @@ -512,7 +526,7 @@ pub const Payload = struct { }; pub const Enum = struct { - base: Node = .{ .tag = .@"enum" }, + base: Payload, data: []Field, pub const Field = struct { @@ -522,9 +536,9 @@ pub const Payload = struct { }; pub const Record = struct { - base: Node, + base: Payload, data: struct { - @"packed": bool, + is_packed: bool, fields: []Field, }, @@ -536,12 +550,12 @@ pub const Payload = struct { }; pub const ArrayInit = struct { - base: Node = .{ .tag = .array_init }, + base: Payload, data: []Node, }; pub const ContainerInit = struct { - base: Node = .{ .tag = .container_init }, + base: Payload, data: []Initializer, pub const Initializer = struct { @@ -551,7 +565,7 @@ pub const Payload = struct { }; pub const Block = struct { - base: Node, + base: Payload, data: struct { label: ?[]const u8, stmts: []Node @@ -559,15 +573,15 @@ pub const Payload = struct { }; pub const Array = struct { - base: Node, + base: Payload, data: struct { elem_type: Node, - len: Node, + len: usize, }, }; pub const Pointer = struct { - base: Node, + base: Payload, data: struct { elem_type: Node, is_const: bool, @@ -576,7 +590,7 @@ pub const Payload = struct { }; pub const ArgRedecl = struct { - base: Node, + base: Payload, data: struct { actual: []const u8, mangled: []const u8, @@ -584,12 +598,12 @@ pub const Payload = struct { }; pub const Log2IntType = struct { - base: Node, + base: Payload, data: std.math.Log2Int(u64), }; pub const SimpleVarDecl = struct { - base: Node, + base: Payload, data: struct { name: []const u8, init: Node, @@ -597,7 +611,7 @@ pub const Payload = struct { }; pub const EnumRedecl = struct { - base: Node, + base: Payload, data: struct { enum_val_name: []const u8, field_name: []const u8, @@ -606,7 +620,7 @@ pub const Payload = struct { }; pub const ArrayFiller = struct { - base: Node, + base: Payload, data: struct { type: Node, filler: Node, @@ -615,7 +629,7 @@ pub const Payload = struct { }; pub const PubInlineFn = struct { - base: Node, + base: Payload, data: struct { name: []const u8, params: []Param, @@ -626,6 +640,6 @@ pub const Payload = struct { }; /// Converts the nodes into a Zig ast. -pub fn render(allocator: *Allocator, nodes: []const Node) !*ast.Tree { +pub fn render(allocator: *Allocator, nodes: []const Node) !std.zig.ast.Tree { @panic("TODO"); } diff --git a/src/type.zig b/src/type.zig index 8fcaba6fad..38fe6dd3e6 100644 --- a/src/type.zig +++ b/src/type.zig @@ -1682,6 +1682,8 @@ pub const Type = extern union { .i32 => unreachable, .u64 => unreachable, .i64 => unreachable, + .u128 => unreachable, + .i128 => unreachable, .usize => unreachable, .isize => unreachable, .c_short => unreachable, @@ -2197,6 +2199,8 @@ pub const Type = extern union { .i32 => .{ .signedness = .signed, .bits = 32 }, .u64 => .{ .signedness = .unsigned, .bits = 64 }, .i64 => .{ .signedness = .signed, .bits = 64 }, + .u128 => .{ .signedness = .unsigned, .bits = 128 }, + .i128 => .{ .signedness = .signed, .bits = 128 }, .usize => .{ .signedness = .unsigned, .bits = target.cpu.arch.ptrBitWidth() }, .isize => .{ .signedness = .signed, .bits = target.cpu.arch.ptrBitWidth() }, .c_short => .{ .signedness = .signed, .bits = CType.short.sizeInBits(target) }, From 13a9db208566449cd6bcfa5fd77f2707f7c9f394 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Fri, 12 Feb 2021 13:53:04 +0200 Subject: [PATCH 079/173] translate-c: begin implementing ast.render --- CMakeLists.txt | 1 + src/translate_c/ast.zig | 184 +++++++++++++++++++++++++++++++++++++++- 2 files changed, 181 insertions(+), 4 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index a0c3ae84fa..71dd2c0cee 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -578,6 +578,7 @@ set(ZIG_STAGE2_SOURCES "${CMAKE_SOURCE_DIR}/src/windows_sdk.zig" "${CMAKE_SOURCE_DIR}/src/zir.zig" "${CMAKE_SOURCE_DIR}/src/zir_sema.zig" + "${CMAKE_SOURCE_DIR}/src/translate_c/ast.zig" ) if(MSVC) diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 24689c89db..8e18b55c04 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -454,7 +454,7 @@ pub const Payload = struct { data: struct { cond: Node, body: Node, - cont_expr: ?Node + cont_expr: ?Node, }, }; @@ -568,7 +568,7 @@ pub const Payload = struct { base: Payload, data: struct { label: ?[]const u8, - stmts: []Node + stmts: []Node, }, }; @@ -640,6 +640,182 @@ pub const Payload = struct { }; /// Converts the nodes into a Zig ast. -pub fn render(allocator: *Allocator, nodes: []const Node) !std.zig.ast.Tree { - @panic("TODO"); +/// Caller must free the source slice. +pub fn render(gpa: *Allocator, nodes: []const Node) !std.zig.ast.Tree { + var ctx = Context{ + .gpa = gpa, + .buf = std.ArrayList(u8).init(gpa), + }; + defer ctx.buf.deinit(); + defer ctx.nodes.deinit(gpa); + defer ctx.extra_data.deinit(gpa); + defer ctx.tokens.deinit(gpa); + + // Estimate that each top level node has 25 child nodes. + const estimated_node_count = nodes.len * 25; + try ctx.nodes.ensureCapacity(gpa, estimated_node_count); + + ctx.nodes.appendAssumeCapacity(.{ + .tag = .root, + .main_token = 0, + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }); + const root_members = try renderNodes(&ctx, nodes); + ctx.nodes.items(.data)[0] = .{ + .lhs = root_members.start, + .rhs = root_members.end, + }; + + try ctx.tokens.append(gpa, .{ + .tag = .eof, + .start = @intCast(u32, ctx.buf.items.len), + }); + + return std.zig.ast.Tree{ + .source = ctx.buf.toOwnedSlice(), + .tokens = ctx.tokens.toOwnedSlice(), + .nodes = ctx.nodes.toOwnedSlice(), + .extra_data = ctx.extra_data.toOwnedSlice(gpa), + .errors = &.{}, + }; +} + +const NodeIndex = std.zig.ast.Node.Index; +const NodeSubRange = std.zig.ast.Node.SubRange; +const TokenIndex = std.zig.ast.TokenIndex; +const TokenTag = std.zig.Token.Tag; + +const Context = struct { + gpa: *Allocator, + buf: std.ArrayList(u8) = .{}, + nodes: std.zig.ast.NodeList = .{}, + extra_data: std.ArrayListUnmanaged(std.zig.ast.Node.Index) = .{}, + tokens: std.zig.ast.TokenList = .{}, + + fn appendTokenFmt(c: *Context, tag: TokenTag, comptime format: []const u8, args: anytype) Allocator.Error!TokenIndex { + const start_index = c.buf.items.len; + try c.buf.writer().print(format ++ " ", args); + + try c.tokens.append(c.gpa, .{ + .tag = tag, + .start = @intCast(u32, start_index), + }); + + return @intCast(u32, c.tokens.len - 1); + } + + fn appendToken(c: *Context, tag: TokenTag, bytes: []const u8) Allocator.Error!TokenIndex { + std.debug.assert(tag != .identifier); // use appendIdentifier + return appendTokenFmt(c, tag, "{s}", .{bytes}); + } + + fn appendIdentifier(c: *Context, bytes: []const u8) Allocator.Error!TokenIndex { + return appendTokenFmt(c, .identifier, "{s}", .{std.zig.fmtId(bytes)}); + } + + fn listToSpan(c: *Context, list: []const NodeIndex) Allocator.Error!NodeSubRange { + try c.extra_data.appendSlice(c.gpa, list); + return NodeSubRange{ + .start = @intCast(NodeIndex, c.extra_data.items.len - list.len), + .end = @intCast(NodeIndex, c.extra_data.items.len), + }; + } + + fn appendNode(c: *Context, elem: std.zig.ast.NodeList.Elem) Allocator.Error!NodeIndex { + const result = @intCast(NodeIndex, c.nodes.len); + try c.nodes.append(c.gpa, elem); + return result; + } +}; + +fn renderNodes(c: *Context, nodes: []const Node) !NodeSubRange { + var result = std.ArrayList(NodeIndex).init(c.gpa); + defer result.deinit(); + + for (nodes) |node| { + const res = try renderNode(c, node); + if (res == 0) continue; + try result.append(res); + } + + return try c.listToSpan(result.items); +} + +fn renderNode(c: *Context, node: Node) !NodeIndex { + switch (node.tag()) { + .warning => { + const payload = node.castTag(.warning).?; + try c.buf.appendSlice(payload.data); + try c.buf.append('\n'); + return 0; + }, + .usingnamespace_builtins => { + // pub usingnamespace @import("std").c.builtins; + _ = try c.appendToken(.keyword_pub, "pub"); + const usingnamespace_token = try c.appendToken(.keyword_usingnamespace, "usingnamespace"); + const import_node = try renderStdImport(c, "c", "builtins"); + _ = try c.appendToken(.semicolon, ";"); + + return c.appendNode(.{ + .tag = .@"usingnamespace", + .main_token = usingnamespace_token, + .data = .{ + .lhs = import_node, + .rhs = undefined, + }, + }); + }, + else => { + try c.buf.writer().print("// TODO renderNode {}\n", .{node.tag()}); + return @as(u32, 0); // error: integer value 0 cannot be coerced to type 'std.mem.Allocator.Error!u32' + }, + } +} + +fn renderStdImport(c: *Context, first: []const u8, second: []const u8) !NodeIndex { + const import_tok = try c.appendToken(.builtin, "@import"); + _ = try c.appendToken(.l_paren, "("); + + const std_tok = try c.appendToken(.string_literal, "\"std\""); + const std_node = try c.appendNode(.{ + .tag = .string_literal, + .main_token = std_tok, + .data = .{ + .lhs = std_tok, + .rhs = std_tok, + }, + }); + + _ = try c.appendToken(.r_paren, ")"); + + const import_node = try c.appendNode(.{ + .tag = .builtin_call_two, + .main_token = import_tok, + .data = .{ + .lhs = std_node, + .rhs = 0, + }, + }); + + var access_chain = import_node; + access_chain = try c.appendNode(.{ + .tag = .field_access, + .main_token = try c.appendToken(.period, "."), + .data = .{ + .lhs = access_chain, + .rhs = try c.appendIdentifier(first), + }, + }); + access_chain = try c.appendNode(.{ + .tag = .field_access, + .main_token = try c.appendToken(.period, "."), + .data = .{ + .lhs = access_chain, + .rhs = try c.appendIdentifier(second), + }, + }); + return access_chain; } From d7460db044ef6649486a27f2b9ebb1de9e2ce2b0 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Sat, 13 Feb 2021 17:57:52 +0200 Subject: [PATCH 080/173] translate-c: render a bunch of simple nodes and calls --- lib/std/zig/render.zig | 9 +- src/translate_c.zig | 5 +- src/translate_c/ast.zig | 297 +++++++++++++++++++++++++++++++++++----- 3 files changed, 270 insertions(+), 41 deletions(-) diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 2d9c2ae9a9..6fd091d32c 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -550,14 +550,11 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .builtin_call_two, .builtin_call_two_comma => { if (datas[node].lhs == 0) { - const params = [_]ast.Node.Index{}; - return renderBuiltinCall(ais, tree, main_tokens[node], ¶ms, space); + return renderBuiltinCall(ais, tree, main_tokens[node], &.{}, space); } else if (datas[node].rhs == 0) { - const params = [_]ast.Node.Index{datas[node].lhs}; - return renderBuiltinCall(ais, tree, main_tokens[node], ¶ms, space); + return renderBuiltinCall(ais, tree, main_tokens[node], &.{datas[node].lhs}, space); } else { - const params = [_]ast.Node.Index{ datas[node].lhs, datas[node].rhs }; - return renderBuiltinCall(ais, tree, main_tokens[node], ¶ms, space); + return renderBuiltinCall(ais, tree, main_tokens[node], &.{ datas[node].lhs, datas[node].rhs }, space); } }, .builtin_call, .builtin_call_comma => { diff --git a/src/translate_c.zig b/src/translate_c.zig index c7a30ff919..9b80582ab5 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -3054,6 +3054,7 @@ fn maybeSuppressResult( fn addTopLevelDecl(c: *Context, name: []const u8, decl_node: Node) !void { _ = try c.global_scope.sym_table.put(name, decl_node); + try c.global_scope.nodes.append(decl_node); } /// Translate a qual type for a variable with an initializer. The initializer @@ -3903,10 +3904,10 @@ fn fail( pub fn failDecl(c: *Context, loc: clang.SourceLocation, name: []const u8, comptime format: []const u8, args: anytype) Error!void { // location // pub const name = @compileError(msg); + const fail_msg = try std.fmt.allocPrint(c.arena, format, args); + try c.global_scope.nodes.append(try Tag.fail_decl.create(c.arena, .{ .actual = name, .mangled = fail_msg })); const location_comment = try std.fmt.allocPrint(c.arena, "// {s}", .{c.locStr(loc)}); try c.global_scope.nodes.append(try Tag.warning.create(c.arena, location_comment)); - const fail_msg = try std.fmt.allocPrint(c.arena, format, args); - try c.global_scope.nodes.append(try Tag.fail_decl.create(c.arena, fail_msg)); } pub fn freeErrors(errors: []ClangErrMsg) void { diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 8e18b55c04..4e91f13757 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -52,7 +52,6 @@ pub const Node = extern union { var_decl, func, warning, - failed_decl, /// All enums are non-exhaustive @"enum", @"struct", @@ -339,9 +338,7 @@ pub const Node = extern union { .char_literal, .identifier, .warning, - .failed_decl, .type, - .fail_decl, => Payload.Value, .@"if" => Payload.If, .@"while" => Payload.While, @@ -359,7 +356,7 @@ pub const Node = extern union { .block => Payload.Block, .c_pointer, .single_pointer => Payload.Pointer, .array_type => Payload.Array, - .arg_redecl, .alias => Payload.ArgRedecl, + .arg_redecl, .alias, .fail_decl => Payload.ArgRedecl, .log2_int_type => Payload.Log2IntType, .typedef, .pub_typedef, .var_simple, .pub_var_simple => Payload.SimpleVarDecl, .enum_redecl => Payload.EnumRedecl, @@ -695,7 +692,7 @@ const Context = struct { extra_data: std.ArrayListUnmanaged(std.zig.ast.Node.Index) = .{}, tokens: std.zig.ast.TokenList = .{}, - fn appendTokenFmt(c: *Context, tag: TokenTag, comptime format: []const u8, args: anytype) Allocator.Error!TokenIndex { + fn addTokenFmt(c: *Context, tag: TokenTag, comptime format: []const u8, args: anytype) Allocator.Error!TokenIndex { const start_index = c.buf.items.len; try c.buf.writer().print(format ++ " ", args); @@ -707,13 +704,13 @@ const Context = struct { return @intCast(u32, c.tokens.len - 1); } - fn appendToken(c: *Context, tag: TokenTag, bytes: []const u8) Allocator.Error!TokenIndex { - std.debug.assert(tag != .identifier); // use appendIdentifier - return appendTokenFmt(c, tag, "{s}", .{bytes}); + fn addToken(c: *Context, tag: TokenTag, bytes: []const u8) Allocator.Error!TokenIndex { + std.debug.assert(tag != .identifier); // use addIdentifier + return addTokenFmt(c, tag, "{s}", .{bytes}); } - fn appendIdentifier(c: *Context, bytes: []const u8) Allocator.Error!TokenIndex { - return appendTokenFmt(c, .identifier, "{s}", .{std.zig.fmtId(bytes)}); + fn addIdentifier(c: *Context, bytes: []const u8) Allocator.Error!TokenIndex { + return addTokenFmt(c, .identifier, "{s}", .{std.zig.fmtId(bytes)}); } fn listToSpan(c: *Context, list: []const NodeIndex) Allocator.Error!NodeSubRange { @@ -724,14 +721,14 @@ const Context = struct { }; } - fn appendNode(c: *Context, elem: std.zig.ast.NodeList.Elem) Allocator.Error!NodeIndex { + fn addNode(c: *Context, elem: std.zig.ast.NodeList.Elem) Allocator.Error!NodeIndex { const result = @intCast(NodeIndex, c.nodes.len); try c.nodes.append(c.gpa, elem); return result; } }; -fn renderNodes(c: *Context, nodes: []const Node) !NodeSubRange { +fn renderNodes(c: *Context, nodes: []const Node) Allocator.Error!NodeSubRange { var result = std.ArrayList(NodeIndex).init(c.gpa); defer result.deinit(); @@ -744,7 +741,7 @@ fn renderNodes(c: *Context, nodes: []const Node) !NodeSubRange { return try c.listToSpan(result.items); } -fn renderNode(c: *Context, node: Node) !NodeIndex { +fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { switch (node.tag()) { .warning => { const payload = node.castTag(.warning).?; @@ -754,12 +751,12 @@ fn renderNode(c: *Context, node: Node) !NodeIndex { }, .usingnamespace_builtins => { // pub usingnamespace @import("std").c.builtins; - _ = try c.appendToken(.keyword_pub, "pub"); - const usingnamespace_token = try c.appendToken(.keyword_usingnamespace, "usingnamespace"); + _ = try c.addToken(.keyword_pub, "pub"); + const usingnamespace_token = try c.addToken(.keyword_usingnamespace, "usingnamespace"); const import_node = try renderStdImport(c, "c", "builtins"); - _ = try c.appendToken(.semicolon, ";"); + _ = try c.addToken(.semicolon, ";"); - return c.appendNode(.{ + return c.addNode(.{ .tag = .@"usingnamespace", .main_token = usingnamespace_token, .data = .{ @@ -768,6 +765,196 @@ fn renderNode(c: *Context, node: Node) !NodeIndex { }, }); }, + .std_math_Log2Int => { + const payload = node.castTag(.std_math_Log2Int).?; + const import_node = try renderStdImport(c, "math", "Log2Int"); + return renderCall(c, import_node, &.{payload.data}); + }, + .std_meta_cast => { + const payload = node.castTag(.std_meta_cast).?; + const import_node = try renderStdImport(c, "meta", "cast"); + return renderCall(c, import_node, &.{ payload.data.lhs, payload.data.rhs }); + }, + .std_meta_sizeof => { + const payload = node.castTag(.std_meta_sizeof).?; + const import_node = try renderStdImport(c, "meta", "sizeof"); + return renderCall(c, import_node, &.{payload.data}); + }, + .std_mem_zeroes => { + const payload = node.castTag(.std_mem_zeroes).?; + const import_node = try renderStdImport(c, "mem", "zeroes"); + return renderCall(c, import_node, &.{payload.data}); + }, + .std_mem_zeroinit => { + const payload = node.castTag(.std_mem_zeroinit).?; + const import_node = try renderStdImport(c, "mem", "zeroInit"); + return renderCall(c, import_node, &.{ payload.data.lhs, payload.data.rhs }); + }, + .call => { + const payload = node.castTag(.call).?; + const lhs = try renderNode(c, payload.data.lhs); + return renderCall(c, lhs, payload.data.args); + }, + .null_literal => return c.addNode(.{ + .tag = .null_literal, + .main_token = try c.addToken(.keyword_null, "null"), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), + .undefined_literal => return c.addNode(.{ + .tag = .undefined_literal, + .main_token = try c.addToken(.keyword_undefined, "undefined"), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), + .true_literal => return c.addNode(.{ + .tag = .true_literal, + .main_token = try c.addToken(.keyword_true, "true"), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), + .false_literal => return c.addNode(.{ + .tag = .false_literal, + .main_token = try c.addToken(.keyword_false, "false"), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), + .zero_literal => return c.addNode(.{ + .tag = .integer_literal, + .main_token = try c.addToken(.integer_literal, "0"), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), + .one_literal => return c.addNode(.{ + .tag = .integer_literal, + .main_token = try c.addToken(.integer_literal, "1"), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), + .void_type => return c.addNode(.{ + .tag = .identifier, + .main_token = try c.addToken(.identifier, "void"), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), + .@"anytype" => return try c.addNode(.{ + .tag = .@"anytype", + .main_token = try c.addToken(.keyword_anytype, "anytype"), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), + .type => { + const payload = node.castTag(.type).?; + return c.addNode(.{ + .tag = .identifier, + .main_token = try c.addToken(.identifier, payload.data), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }); + }, + .identifier => { + const payload = node.castTag(.identifier).?; + return c.addNode(.{ + .tag = .identifier, + .main_token = try c.addIdentifier(payload.data), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }); + }, + .number_literal => { + const payload = node.castTag(.number_literal).?; + return c.addNode(.{ + .tag = .identifier, + // might be integer or float, but it doesn't matter for rendering + .main_token = try c.addToken(.integer_literal, payload.data), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }); + }, + .string_literal => { + const payload = node.castTag(.string_literal).?; + return c.addNode(.{ + .tag = .identifier, + .main_token = try c.addToken(.char_literal, payload.data), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }); + }, + .char_literal => { + const payload = node.castTag(.char_literal).?; + return c.addNode(.{ + .tag = .identifier, + .main_token = try c.addToken(.string_literal, payload.data), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }); + }, + .fail_decl => { + const payload = node.castTag(.fail_decl).?; + // pub const name = @compileError(msg); + _ = try c.addToken(.keyword_pub, "pub"); + const const_kw = try c.addToken(.keyword_const, "const"); + _ = try c.addIdentifier(payload.data.actual); + _ = try c.addToken(.equal, "="); + + + const compile_error_tok = try c.addToken(.builtin, "@compileError"); + _ = try c.addToken(.l_paren, "("); + const err_msg_tok = try c.addTokenFmt(.string_literal, "\"{s}\"", .{std.zig.fmtEscapes(payload.data.mangled)}); + const err_msg = try c.addNode(.{ + .tag = .string_literal, + .main_token = err_msg_tok, + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }); + _ = try c.addToken(.r_paren, ")"); + const compile_error = try c.addNode(.{ + .tag = .builtin_call_two, + .main_token = compile_error_tok, + .data = .{ + .lhs = err_msg, + .rhs = 0, + }, + }); + _ = try c.addToken(.semicolon, ";"); + + return c.addNode(.{ + .tag = .simple_var_decl, + .main_token = const_kw, + .data = .{ + .lhs = 0, + .rhs = compile_error, + } + }); + }, else => { try c.buf.writer().print("// TODO renderNode {}\n", .{node.tag()}); return @as(u32, 0); // error: integer value 0 cannot be coerced to type 'std.mem.Allocator.Error!u32' @@ -776,22 +963,20 @@ fn renderNode(c: *Context, node: Node) !NodeIndex { } fn renderStdImport(c: *Context, first: []const u8, second: []const u8) !NodeIndex { - const import_tok = try c.appendToken(.builtin, "@import"); - _ = try c.appendToken(.l_paren, "("); - - const std_tok = try c.appendToken(.string_literal, "\"std\""); - const std_node = try c.appendNode(.{ + const import_tok = try c.addToken(.builtin, "@import"); + _ = try c.addToken(.l_paren, "("); + const std_tok = try c.addToken(.string_literal, "\"std\""); + const std_node = try c.addNode(.{ .tag = .string_literal, .main_token = std_tok, .data = .{ - .lhs = std_tok, - .rhs = std_tok, + .lhs = undefined, + .rhs = undefined, }, }); + _ = try c.addToken(.r_paren, ")"); - _ = try c.appendToken(.r_paren, ")"); - - const import_node = try c.appendNode(.{ + const import_node = try c.addNode(.{ .tag = .builtin_call_two, .main_token = import_tok, .data = .{ @@ -801,21 +986,67 @@ fn renderStdImport(c: *Context, first: []const u8, second: []const u8) !NodeInde }); var access_chain = import_node; - access_chain = try c.appendNode(.{ + access_chain = try c.addNode(.{ .tag = .field_access, - .main_token = try c.appendToken(.period, "."), + .main_token = try c.addToken(.period, "."), .data = .{ .lhs = access_chain, - .rhs = try c.appendIdentifier(first), + .rhs = try c.addIdentifier(first), }, }); - access_chain = try c.appendNode(.{ + access_chain = try c.addNode(.{ .tag = .field_access, - .main_token = try c.appendToken(.period, "."), + .main_token = try c.addToken(.period, "."), .data = .{ .lhs = access_chain, - .rhs = try c.appendIdentifier(second), + .rhs = try c.addIdentifier(second), }, }); return access_chain; } + +fn renderCall(c: *Context, lhs: NodeIndex, args: []const Node) !NodeIndex { + const lparen = try c.addToken(.l_paren, "("); + const res = switch (args.len) { + 0 => try c.addNode(.{ + .tag = .call_one, + .main_token = lparen, + .data = .{ + .lhs = lhs, + .rhs = 0, + }, + }), + 1 => blk: { + const arg = try renderNode(c, args[0]); + break :blk try c.addNode(.{ + .tag = .call_one, + .main_token = lparen, + .data = .{ + .lhs = lhs, + .rhs = arg, + }, + }); + }, + else => blk: { + const start = @intCast(u32, c.extra_data.items.len); + const end = @intCast(u32, start + args.len); + try c.extra_data.ensureCapacity(c.gpa, end + 2); // + 2 for span start + end + for (args) |arg, i| { + if (i != 0) _ = try c.addToken(.comma, ","); + c.extra_data.appendAssumeCapacity(try renderNode(c, arg)); + } + c.extra_data.appendAssumeCapacity(start); + c.extra_data.appendAssumeCapacity(end); + break :blk try c.addNode(.{ + .tag = .call_comma, + .main_token = lparen, + .data = .{ + .lhs = lhs, + .rhs = end + 2, + }, + }); + }, + }; + _ = try c.addToken(.r_paren, ")"); + return res; +} From 1147ecc5fda6d46c5e4d02eeda18238f34ae56dd Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Sat, 13 Feb 2021 20:50:39 +0200 Subject: [PATCH 081/173] translate-c: render variables and builtin calls --- src/translate_c.zig | 31 ++-- src/translate_c/ast.zig | 312 ++++++++++++++++++++++++++++++++++------ 2 files changed, 282 insertions(+), 61 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index 9b80582ab5..821f3c5b74 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -769,7 +769,7 @@ fn transCreateNodeTypedef( const payload = try c.arena.create(ast.Payload.SimpleVarDecl); payload.* = .{ - .base = .{ .tag = ([2]Tag{ .typedef, .pub_typedef })[@boolToInt(toplevel)] }, + .base = .{ .tag = ([2]Tag{ .var_simple, .pub_var_simple })[@boolToInt(toplevel)] }, .data = .{ .name = checked_name, .init = init_node, @@ -1678,7 +1678,7 @@ fn transStringLiteralAsArray( init_list[i] = try transCreateCharLitNode(c, narrow, code_unit); } while (i < array_size) : (i += 1) { - init_list[i] = try transCreateNodeNumber(c, 0); + init_list[i] = try transCreateNodeNumber(c, 0, .int); } return Tag.array_init.create(c.arena, init_list); @@ -2345,7 +2345,7 @@ fn transCharLiteral( // C has a somewhat obscure feature called multi-character character constant // e.g. 'abcd' const int_lit_node = if (kind == .Ascii and val > 255) - try transCreateNodeNumber(c, val) + try transCreateNodeNumber(c, val, .int) else try transCreateCharLitNode(c, narrow, val); @@ -2948,7 +2948,7 @@ fn transBreak(c: *Context, scope: *Scope) TransError!Node { fn transFloatingLiteral(c: *Context, scope: *Scope, stmt: *const clang.FloatingLiteral, used: ResultUsed) TransError!Node { // TODO use something more accurate const dbl = stmt.getValueAsApproximateDouble(); - const node = try transCreateNodeNumber(c, dbl); + const node = try transCreateNodeNumber(c, dbl, .float); return maybeSuppressResult(c, scope, used, node); } @@ -3471,13 +3471,16 @@ fn transCreateNodeAPInt(c: *Context, int: *const clang.APSInt) !Node { const str = big.toStringAlloc(c.arena, 10, false) catch |err| switch (err) { error.OutOfMemory => return error.OutOfMemory, }; - return Tag.number_literal.create(c.arena, str); + return Tag.integer_literal.create(c.arena, str); } -fn transCreateNodeNumber(c: *Context, int: anytype) !Node { - const fmt_s = if (comptime std.meta.trait.isNumber(@TypeOf(int))) "{d}" else "{s}"; - const str = try std.fmt.allocPrint(c.arena, fmt_s, .{int}); - return Tag.number_literal.create(c.arena, str); +fn transCreateNodeNumber(c: *Context, num: anytype, num_kind: enum { int, float }) !Node { + const fmt_s = if (comptime std.meta.trait.isNumber(@TypeOf(num))) "{d}" else "{s}"; + const str = try std.fmt.allocPrint(c.arena, fmt_s, .{num}); + if (num_kind == .float) + return Tag.float_literal.create(c.arena, str) + else + return Tag.integer_literal.create(c.arena, str); } fn transCreateNodeMacroFn(c: *Context, name: []const u8, ref: Node, proto_alias: *ast.Payload.Func) !Node { @@ -4162,7 +4165,7 @@ fn parseCNumLit(c: *Context, m: *MacroCtx) ParseError!Node { } if (suffix == .none) { - return transCreateNodeNumber(c, lit_bytes); + return transCreateNodeNumber(c, lit_bytes, .int); } const type_node = try Tag.type.create(c.arena, switch (suffix) { @@ -4179,21 +4182,21 @@ fn parseCNumLit(c: *Context, m: *MacroCtx) ParseError!Node { .llu => 3, else => unreachable, }]; - const rhs = try transCreateNodeNumber(c, lit_bytes); + const rhs = try transCreateNodeNumber(c, lit_bytes, .int); return Tag.as.create(c.arena, .{ .lhs = type_node, .rhs = rhs }); }, .FloatLiteral => |suffix| { if (lit_bytes[0] == '.') lit_bytes = try std.fmt.allocPrint(c.arena, "0{s}", .{lit_bytes}); if (suffix == .none) { - return transCreateNodeNumber(c, lit_bytes); + return transCreateNodeNumber(c, lit_bytes, .float); } const type_node = try Tag.type.create(c.arena, switch (suffix) { .f => "f32", .l => "c_longdouble", else => unreachable, }); - const rhs = try transCreateNodeNumber(c, lit_bytes[0 .. lit_bytes.len - 1]); + const rhs = try transCreateNodeNumber(c, lit_bytes[0 .. lit_bytes.len - 1], .float); return Tag.as.create(c.arena, .{ .lhs = type_node, .rhs = rhs }); }, else => unreachable, @@ -4369,7 +4372,7 @@ fn parseCPrimaryExprInner(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!N return Tag.char_literal.create(c.arena, try zigifyEscapeSequences(c, m)); } else { const str = try std.fmt.allocPrint(c.arena, "0x{x}", .{slice[1 .. slice.len - 1]}); - return Tag.number_literal.create(c.arena, str); + return Tag.integer_literal.create(c.arena, str); } }, .StringLiteral => { diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 4e91f13757..282e645f2e 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -27,8 +27,8 @@ pub const Node = extern union { usingnamespace_builtins, // After this, the tag requires a payload. - // int or float, doesn't really matter - number_literal, + integer_literal, + float_literal, string_literal, char_literal, identifier, @@ -193,10 +193,8 @@ pub const Node = extern union { /// pub const alias = actual; alias, /// const name = init; - typedef, var_simple, /// pub const name = init; - pub_typedef, pub_var_simple, /// pub const enum_field_name = @enumToInt(enum_name.field_name); enum_redecl, @@ -333,7 +331,8 @@ pub const Node = extern union { .ptr_cast, => Payload.BinOp, - .number_literal, + .integer_literal, + .float_literal, .string_literal, .char_literal, .identifier, @@ -358,7 +357,7 @@ pub const Node = extern union { .array_type => Payload.Array, .arg_redecl, .alias, .fail_decl => Payload.ArgRedecl, .log2_int_type => Payload.Log2IntType, - .typedef, .pub_typedef, .var_simple, .pub_var_simple => Payload.SimpleVarDecl, + .var_simple, .pub_var_simple => Payload.SimpleVarDecl, .enum_redecl => Payload.EnumRedecl, .array_filler => Payload.ArrayFiller, .pub_inline_fn => Payload.PubInlineFn, @@ -705,7 +704,6 @@ const Context = struct { } fn addToken(c: *Context, tag: TokenTag, bytes: []const u8) Allocator.Error!TokenIndex { - std.debug.assert(tag != .identifier); // use addIdentifier return addTokenFmt(c, tag, "{s}", .{bytes}); } @@ -726,6 +724,17 @@ const Context = struct { try c.nodes.append(c.gpa, elem); return result; } + + fn addExtra(c: *Context, extra: anytype) Allocator.Error!NodeIndex { + const fields = std.meta.fields(@TypeOf(extra)); + try c.extra_data.ensureCapacity(c.gpa, c.extra_data.items.len + fields.len); + const result = @intCast(u32, c.extra_data.items.len); + inline for (fields) |field| { + comptime std.debug.assert(field.field_type == NodeIndex); + c.extra_data.appendAssumeCapacity(@field(extra, field.name)); + } + return result; + } }; fn renderNodes(c: *Context, nodes: []const Node) Allocator.Error!NodeSubRange { @@ -734,7 +743,8 @@ fn renderNodes(c: *Context, nodes: []const Node) Allocator.Error!NodeSubRange { for (nodes) |node| { const res = try renderNode(c, node); - if (res == 0) continue; + if (node.tag() == .warning) continue; + if (c.nodes.items(.tag)[res] == .identifier) continue; // TODO remove try result.append(res); } @@ -744,10 +754,10 @@ fn renderNodes(c: *Context, nodes: []const Node) Allocator.Error!NodeSubRange { fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { switch (node.tag()) { .warning => { - const payload = node.castTag(.warning).?; - try c.buf.appendSlice(payload.data); + const payload = node.castTag(.warning).?.data; + try c.buf.appendSlice(payload); try c.buf.append('\n'); - return 0; + return @as(NodeIndex, 0); // error: integer value 0 cannot be coerced to type 'std.mem.Allocator.Error!u32' }, .usingnamespace_builtins => { // pub usingnamespace @import("std").c.builtins; @@ -766,34 +776,34 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { }); }, .std_math_Log2Int => { - const payload = node.castTag(.std_math_Log2Int).?; + const payload = node.castTag(.std_math_Log2Int).?.data; const import_node = try renderStdImport(c, "math", "Log2Int"); - return renderCall(c, import_node, &.{payload.data}); + return renderCall(c, import_node, &.{payload}); }, .std_meta_cast => { - const payload = node.castTag(.std_meta_cast).?; + const payload = node.castTag(.std_meta_cast).?.data; const import_node = try renderStdImport(c, "meta", "cast"); - return renderCall(c, import_node, &.{ payload.data.lhs, payload.data.rhs }); + return renderCall(c, import_node, &.{ payload.lhs, payload.rhs }); }, .std_meta_sizeof => { - const payload = node.castTag(.std_meta_sizeof).?; + const payload = node.castTag(.std_meta_sizeof).?.data; const import_node = try renderStdImport(c, "meta", "sizeof"); - return renderCall(c, import_node, &.{payload.data}); + return renderCall(c, import_node, &.{payload}); }, .std_mem_zeroes => { - const payload = node.castTag(.std_mem_zeroes).?; + const payload = node.castTag(.std_mem_zeroes).?.data; const import_node = try renderStdImport(c, "mem", "zeroes"); - return renderCall(c, import_node, &.{payload.data}); + return renderCall(c, import_node, &.{payload}); }, .std_mem_zeroinit => { - const payload = node.castTag(.std_mem_zeroinit).?; + const payload = node.castTag(.std_mem_zeroinit).?.data; const import_node = try renderStdImport(c, "mem", "zeroInit"); - return renderCall(c, import_node, &.{ payload.data.lhs, payload.data.rhs }); + return renderCall(c, import_node, &.{ payload.lhs, payload.rhs }); }, .call => { - const payload = node.castTag(.call).?; - const lhs = try renderNode(c, payload.data.lhs); - return renderCall(c, lhs, payload.data.args); + const payload = node.castTag(.call).?.data; + const lhs = try renderNode(c, payload.lhs); + return renderCall(c, lhs, payload.args); }, .null_literal => return c.addNode(.{ .tag = .null_literal, @@ -860,10 +870,10 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { }, }), .type => { - const payload = node.castTag(.type).?; + const payload = node.castTag(.type).?.data; return c.addNode(.{ .tag = .identifier, - .main_token = try c.addToken(.identifier, payload.data), + .main_token = try c.addToken(.identifier, payload), .data = .{ .lhs = undefined, .rhs = undefined, @@ -871,22 +881,32 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { }); }, .identifier => { - const payload = node.castTag(.identifier).?; + const payload = node.castTag(.identifier).?.data; return c.addNode(.{ .tag = .identifier, - .main_token = try c.addIdentifier(payload.data), + .main_token = try c.addIdentifier(payload), .data = .{ .lhs = undefined, .rhs = undefined, }, }); }, - .number_literal => { - const payload = node.castTag(.number_literal).?; + .float_literal => { + const payload = node.castTag(.float_literal).?.data; return c.addNode(.{ - .tag = .identifier, - // might be integer or float, but it doesn't matter for rendering - .main_token = try c.addToken(.integer_literal, payload.data), + .tag = .float_literal, + .main_token = try c.addToken(.float_literal, payload), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }); + }, + .integer_literal => { + const payload = node.castTag(.integer_literal).?.data; + return c.addNode(.{ + .tag = .integer_literal, + .main_token = try c.addToken(.integer_literal, payload), .data = .{ .lhs = undefined, .rhs = undefined, @@ -894,10 +914,10 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { }); }, .string_literal => { - const payload = node.castTag(.string_literal).?; + const payload = node.castTag(.string_literal).?.data; return c.addNode(.{ .tag = .identifier, - .main_token = try c.addToken(.char_literal, payload.data), + .main_token = try c.addToken(.string_literal, payload), .data = .{ .lhs = undefined, .rhs = undefined, @@ -905,10 +925,10 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { }); }, .char_literal => { - const payload = node.castTag(.char_literal).?; + const payload = node.castTag(.char_literal).?.data; return c.addNode(.{ .tag = .identifier, - .main_token = try c.addToken(.string_literal, payload.data), + .main_token = try c.addToken(.string_literal, payload), .data = .{ .lhs = undefined, .rhs = undefined, @@ -916,17 +936,16 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { }); }, .fail_decl => { - const payload = node.castTag(.fail_decl).?; + const payload = node.castTag(.fail_decl).?.data; // pub const name = @compileError(msg); _ = try c.addToken(.keyword_pub, "pub"); - const const_kw = try c.addToken(.keyword_const, "const"); - _ = try c.addIdentifier(payload.data.actual); + const const_tok = try c.addToken(.keyword_const, "const"); + _ = try c.addIdentifier(payload.actual); _ = try c.addToken(.equal, "="); - const compile_error_tok = try c.addToken(.builtin, "@compileError"); _ = try c.addToken(.l_paren, "("); - const err_msg_tok = try c.addTokenFmt(.string_literal, "\"{s}\"", .{std.zig.fmtEscapes(payload.data.mangled)}); + const err_msg_tok = try c.addTokenFmt(.string_literal, "\"{s}\"", .{std.zig.fmtEscapes(payload.mangled)}); const err_msg = try c.addNode(.{ .tag = .string_literal, .main_token = err_msg_tok, @@ -948,17 +967,105 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { return c.addNode(.{ .tag = .simple_var_decl, - .main_token = const_kw, + .main_token = const_tok, .data = .{ .lhs = 0, .rhs = compile_error, - } + }, }); }, - else => { - try c.buf.writer().print("// TODO renderNode {}\n", .{node.tag()}); - return @as(u32, 0); // error: integer value 0 cannot be coerced to type 'std.mem.Allocator.Error!u32' + .pub_var_simple, .var_simple => { + const payload = @fieldParentPtr(Payload.SimpleVarDecl, "base", node.ptr_otherwise).data; + if (node.tag() == .pub_var_simple) _ = try c.addToken(.keyword_pub, "pub"); + const const_tok = try c.addToken(.keyword_const, "const"); + _ = try c.addIdentifier(payload.name); + _ = try c.addToken(.equal, "="); + + const init = try renderNode(c, payload.init); + _ = try c.addToken(.semicolon, ";"); + + return c.addNode(.{ + .tag = .simple_var_decl, + .main_token = const_tok, + .data = .{ + .lhs = 0, + .rhs = init, + }, + }); }, + .var_decl => return renderVar(c, node), + .int_cast => { + const payload = node.castTag(.int_cast).?.data; + return renderBuiltinCall(c, "@intCast", &.{ payload.lhs, payload.rhs }); + }, + .rem => { + const payload = node.castTag(.rem).?.data; + return renderBuiltinCall(c, "@rem", &.{ payload.lhs, payload.rhs }); + }, + .div_trunc => { + const payload = node.castTag(.div_trunc).?.data; + return renderBuiltinCall(c, "@divTrunc", &.{ payload.lhs, payload.rhs }); + }, + .bool_to_int => { + const payload = node.castTag(.bool_to_int).?.data; + return renderBuiltinCall(c, "@boolToInt", &.{payload}); + }, + .as => { + const payload = node.castTag(.as).?.data; + return renderBuiltinCall(c, "@as", &.{ payload.lhs, payload.rhs }); + }, + .truncate => { + const payload = node.castTag(.truncate).?.data; + return renderBuiltinCall(c, "@truncate", &.{ payload.lhs, payload.rhs }); + }, + .bit_cast => { + const payload = node.castTag(.bit_cast).?.data; + return renderBuiltinCall(c, "@bitCast", &.{ payload.lhs, payload.rhs }); + }, + .float_cast => { + const payload = node.castTag(.float_cast).?.data; + return renderBuiltinCall(c, "@floatCast", &.{ payload.lhs, payload.rhs }); + }, + .float_to_int => { + const payload = node.castTag(.float_to_int).?.data; + return renderBuiltinCall(c, "@floatToInt", &.{ payload.lhs, payload.rhs }); + }, + .int_to_float => { + const payload = node.castTag(.int_to_float).?.data; + return renderBuiltinCall(c, "@intToFloat", &.{ payload.lhs, payload.rhs }); + }, + .int_to_enum => { + const payload = node.castTag(.int_to_enum).?.data; + return renderBuiltinCall(c, "@intToEnum", &.{ payload.lhs, payload.rhs }); + }, + .enum_to_int => { + const payload = node.castTag(.enum_to_int).?.data; + return renderBuiltinCall(c, "@enumToInt", &.{payload}); + }, + .int_to_ptr => { + const payload = node.castTag(.int_to_ptr).?.data; + return renderBuiltinCall(c, "@intToPtr", &.{ payload.lhs, payload.rhs }); + }, + .ptr_to_int => { + const payload = node.castTag(.ptr_to_int).?.data; + return renderBuiltinCall(c, "@ptrToInt", &.{payload}); + }, + .align_cast => { + const payload = node.castTag(.align_cast).?.data; + return renderBuiltinCall(c, "@alignCast", &.{ payload.lhs, payload.rhs }); + }, + .ptr_cast => { + const payload = node.castTag(.ptr_cast).?.data; + return renderBuiltinCall(c, "@ptrCast", &.{ payload.lhs, payload.rhs }); + }, + else => return c.addNode(.{ + .tag = .identifier, + .main_token = try c.addTokenFmt(.identifier, "@\"TODO {}\"", .{node.tag()}), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), } } @@ -1050,3 +1157,114 @@ fn renderCall(c: *Context, lhs: NodeIndex, args: []const Node) !NodeIndex { _ = try c.addToken(.r_paren, ")"); return res; } + +fn renderBuiltinCall(c: *Context, builtin: []const u8, args: []const Node) !NodeIndex { + const builtin_tok = try c.addToken(.builtin, builtin); + _ = try c.addToken(.l_paren, "("); + var arg_1: NodeIndex = 0; + var arg_2: NodeIndex = 0; + switch (args.len) { + 0 => {}, + 1 => { + arg_1 = try renderNode(c, args[0]); + }, + 2 => { + arg_1 = try renderNode(c, args[0]); + _ = try c.addToken(.comma, ","); + arg_2 = try renderNode(c, args[1]); + }, + else => unreachable, // expand this function as needed. + } + + _ = try c.addToken(.r_paren, ")"); + return c.addNode(.{ + .tag = .builtin_call_two, + .main_token = builtin_tok, + .data = .{ + .lhs = arg_1, + .rhs = arg_2, + }, + }); +} + +fn renderVar(c: *Context, node: Node) !NodeIndex { + const payload = node.castTag(.var_decl).?.data; + if (payload.is_pub) _ = try c.addToken(.keyword_pub, "pub"); + if (payload.is_extern) _ = try c.addToken(.keyword_extern, "extern"); + if (payload.is_export) _ = try c.addToken(.keyword_export, "export"); + const mut_tok = if (payload.is_const) + try c.addToken(.keyword_const, "const") + else + try c.addToken(.keyword_var, "var"); + _ = try c.addIdentifier(payload.name); + _ = try c.addToken(.colon, ":"); + const type_node = try renderNode(c, payload.type); + + const align_node = if (payload.alignment) |some| blk: { + _ = try c.addToken(.keyword_align, "align"); + _ = try c.addToken(.l_paren, "("); + const res = try c.addNode(.{ + .tag = .integer_literal, + .main_token = try c.addTokenFmt(.integer_literal, "{d}", .{some}), + .data = .{ .lhs = undefined, .rhs = undefined }, + }); + _ = try c.addToken(.r_paren, ")"); + break :blk res; + } else 0; + + const section_node = if (payload.linksection_string) |some| blk: { + _ = try c.addToken(.keyword_linksection, "linksection"); + _ = try c.addToken(.l_paren, "("); + const res = try c.addNode(.{ + .tag = .string_literal, + .main_token = try c.addTokenFmt(.string_literal, "\"{s}\"", .{std.zig.fmtEscapes(some)}), + .data = .{ .lhs = undefined, .rhs = undefined }, + }); + _ = try c.addToken(.r_paren, ")"); + break :blk res; + } else 0; + + const init_node = if (payload.init) |some| blk: { + _ = try c.addToken(.equal, "="); + break :blk try renderNode(c, some); + } else 0; + _ = try c.addToken(.semicolon, ";"); + + if (section_node == 0) { + if (align_node == 0) { + return c.addNode(.{ + .tag = .simple_var_decl, + .main_token = mut_tok, + .data = .{ + .lhs = type_node, + .rhs = init_node, + }, + }); + } else { + return c.addNode(.{ + .tag = .local_var_decl, + .main_token = mut_tok, + .data = .{ + .lhs = try c.addExtra(std.zig.ast.Node.LocalVarDecl{ + .type_node = type_node, + .align_node = align_node, + }), + .rhs = init_node, + }, + }); + } + } else { + return c.addNode(.{ + .tag = .global_var_decl, + .main_token = mut_tok, + .data = .{ + .lhs = try c.addExtra(std.zig.ast.Node.GlobalVarDecl{ + .type_node = type_node, + .align_node = align_node, + .section_node = section_node, + }), + .rhs = init_node, + }, + }); + } +} From 685778c5a73bf08487ebfb4b1632aee089a89e79 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Sat, 13 Feb 2021 21:24:40 +0200 Subject: [PATCH 082/173] translate-c: render unary ops --- src/translate_c.zig | 4 +- src/translate_c/ast.zig | 262 +++++++++++++++++++++++++++++++++++----- 2 files changed, 234 insertions(+), 32 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index 821f3c5b74..03695186ec 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -3049,7 +3049,7 @@ fn maybeSuppressResult( result: Node, ) TransError!Node { if (used == .used) return result; - return Tag.ignore.create(c.arena, result); + return Tag.discard.create(c.arena, result); } fn addTopLevelDecl(c: *Context, name: []const u8, decl_node: Node) !void { @@ -4127,7 +4127,7 @@ fn parseCExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { var last = node; while (true) { // suppress result - const ignore = try Tag.ignore.create(c.arena, last); + const ignore = try Tag.discard.create(c.arena, last); try block_scope.statements.append(ignore); last = try parseCCondExpr(c, m, scope); diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 282e645f2e..3bc1dc986f 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -60,6 +60,7 @@ pub const Node = extern union { tuple, container_init, std_meta_cast, + /// _ = operand; discard, // a + b @@ -67,43 +68,30 @@ pub const Node = extern union { // a = b add_assign, // c = (a = b) - add_assign_value, add_wrap, add_wrap_assign, - add_wrap_assign_value, sub, sub_assign, - sub_assign_value, sub_wrap, sub_wrap_assign, - sub_wrap_assign_value, mul, mul_assign, - mul_assign_value, mul_wrap, mul_wrap_assign, - mul_wrap_assign_value, div, div_assign, - div_assign_value, shl, shl_assign, - shl_assign_value, shr, shr_assign, - shr_assign_value, mod, mod_assign, - mod_assign_value, @"and", and_assign, - and_assign_value, @"or", or_assign, - or_assign_value, xor, xor_assign, - xor_assign_value, less_than, less_than_equal, greater_than, @@ -207,9 +195,6 @@ pub const Node = extern union { /// [1]type{val} ** count array_filler, - /// _ = operand; - ignore, - pub const last_no_payload_tag = Tag.usingnamespace_builtins; pub const no_payload_count = @enumToInt(last_no_payload_tag) + 1; @@ -249,7 +234,6 @@ pub const Node = extern union { .while_true, .if_not_break, .switch_else, - .ignore, .block_single, .std_meta_sizeof, .bool_to_int, @@ -260,43 +244,30 @@ pub const Node = extern union { .add, .add_assign, - .add_assign_value, .add_wrap, .add_wrap_assign, - .add_wrap_assign_value, .sub, .sub_assign, - .sub_assign_value, .sub_wrap, .sub_wrap_assign, - .sub_wrap_assign_value, .mul, .mul_assign, - .mul_assign_value, .mul_wrap, .mul_wrap_assign, - .mul_wrap_assign_value, .div, .div_assign, - .div_assign_value, .shl, .shl_assign, - .shl_assign_value, .shr, .shr_assign, - .shr_assign_value, .mod, .mod_assign, - .mod_assign_value, .@"and", .and_assign, - .and_assign_value, .@"or", .or_assign, - .or_assign_value, .xor, .xor_assign, - .xor_assign_value, .less_than, .less_than_equal, .greater_than, @@ -869,6 +840,14 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { .rhs = undefined, }, }), + .noreturn_type => return try c.addNode(.{ + .tag = .identifier, + .main_token = try c.addToken(.identifier, "noreturn"), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }), .type => { const payload = node.castTag(.type).?.data; return c.addNode(.{ @@ -880,6 +859,17 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { }, }); }, + .log2_int_type => { + const payload = node.castTag(.log2_int_type).?.data; + return c.addNode(.{ + .tag = .identifier, + .main_token = try c.addTokenFmt(.identifier, "u{d}", .{payload}), + .data = .{ + .lhs = undefined, + .rhs = undefined, + }, + }); + }, .identifier => { const payload = node.castTag(.identifier).?.data; return c.addNode(.{ @@ -1058,6 +1048,51 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { const payload = node.castTag(.ptr_cast).?.data; return renderBuiltinCall(c, "@ptrCast", &.{ payload.lhs, payload.rhs }); }, + .sizeof => { + const payload = node.castTag(.sizeof).?.data; + return renderBuiltinCall(c, "@sizeOf", &.{payload}); + }, + .alignof => { + const payload = node.castTag(.alignof).?.data; + return renderBuiltinCall(c, "@alignOf", &.{payload}); + }, + .typeof => { + const payload = node.castTag(.typeof).?.data; + return renderBuiltinCall(c, "@TypeOf", &.{payload}); + }, + .negate => return renderPrefixOp(c, node, .negation, .minus, "-"), + .negate_wrap => return renderPrefixOp(c, node, .negation_wrap, .minus_percent, "-%"), + .bit_not => return renderPrefixOp(c, node, .bit_not, .tilde, "~"), + .not => return renderPrefixOp(c, node, .bool_not, .bang, "!"), + .optional_type => return renderPrefixOp(c, node, .optional_type, .question_mark, "?"), + .address_of => return renderPrefixOp(c, node, .address_of, .ampersand, "&"), + .deref => { + const payload = node.castTag(.deref).?.data; + const operand = try renderNodeGrouped(c, payload); + const deref_tok = try c.addToken(.period_asterisk, ".*"); + return c.addNode(.{ + .tag = .deref, + .main_token = deref_tok, + .data = .{ + .lhs = operand, + .rhs = undefined, + }, + }); + }, + .unwrap => { + const payload = node.castTag(.unwrap).?.data; + const operand = try renderNodeGrouped(c, payload); + const period = try c.addToken(.period, "."); + const question_mark = try c.addToken(.question_mark, "?"); + return c.addNode(.{ + .tag = .unwrap_optional, + .main_token = period, + .data = .{ + .lhs = operand, + .rhs = question_mark, + }, + }); + }, else => return c.addNode(.{ .tag = .identifier, .main_token = try c.addTokenFmt(.identifier, "@\"TODO {}\"", .{node.tag()}), @@ -1069,6 +1104,173 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { } } +fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex { + switch (node.tag()) { + .null_literal, + .undefined_literal, + .true_literal, + .false_literal, + .return_void, + .zero_literal, + .one_literal, + .void_type, + .noreturn_type, + .@"anytype", + .div_trunc, + .rem, + .int_cast, + .as, + .truncate, + .bit_cast, + .float_cast, + .float_to_int, + .int_to_float, + .int_to_enum, + .int_to_ptr, + .std_mem_zeroes, + .std_math_Log2Int, + .log2_int_type, + .ptr_to_int, + .enum_to_int, + .sizeof, + .alignof, + .typeof, + .std_meta_sizeof, + .std_meta_cast, + .std_mem_zeroinit, + .integer_literal, + .float_literal, + .string_literal, + .char_literal, + .identifier, + .field_access, + .ptr_cast, + .type, + .array_access, + .align_cast, + => { + // no grouping needed + return renderNode(c, node); + }, + + .negate, + .negate_wrap, + .bit_not, + .opaque_literal, + .not, + .optional_type, + .address_of, + .unwrap, + .deref, + .empty_array, + .block_single, + .bool_to_int, + .add, + .add_wrap, + .sub, + .sub_wrap, + .mul, + .mul_wrap, + .div, + .shl, + .shr, + .mod, + .@"and", + .@"or", + .xor, + .less_than, + .less_than_equal, + .greater_than, + .greater_than_equal, + .equal, + .not_equal, + .bit_and, + .bit_or, + .bit_xor, + .empty_block, + .array_cat, + .array_filler, + .@"if", + .call, + .@"enum", + .@"struct", + .@"union", + .array_init, + .tuple, + .container_init, + .block, + .c_pointer, + .single_pointer, + .array_type, + => return c.addNode(.{ + .tag = .grouped_expression, + .main_token = try c.addToken(.l_paren, "("), + .data = .{ + .lhs = try renderNode(c, node), + .rhs = try c.addToken(.r_paren, ")"), + }, + }), + .ellipsis3, + .switch_prong, + .warning, + .var_decl, + .func, + .fail_decl, + .arg_redecl, + .alias, + .var_simple, + .pub_var_simple, + .enum_redecl, + .@"while", + .@"switch", + .@"break", + .break_val, + .pub_inline_fn, + .discard, + .@"continue", + .@"return", + .usingnamespace_builtins, + .while_true, + .if_not_break, + .switch_else, + .add_assign, + .add_wrap_assign, + .sub_assign, + .sub_wrap_assign, + .mul_assign, + .mul_wrap_assign, + .div_assign, + .shl_assign, + .shr_assign, + .mod_assign, + .and_assign, + .or_assign, + .xor_assign, + .bit_and_assign, + .bit_or_assign, + .bit_xor_assign, + .assign, + => { + // these should never appear in places where grouping might be needed. + unreachable; + }, + } +} + +fn renderPrefixOp(c: *Context, node: Node, tag: std.zig.ast.Node.Tag, tok_tag: TokenTag, bytes: []const u8) !NodeIndex { + const payload = @fieldParentPtr(Payload.UnOp, "base", node.ptr_otherwise).data; + const tok = try c.addToken(tok_tag, bytes); + const operand = try renderNodeGrouped(c, payload); + return c.addNode(.{ + .tag = tag, + .main_token = tok, + .data = .{ + .lhs = operand, + .rhs = undefined, + }, + }); +} + fn renderStdImport(c: *Context, first: []const u8, second: []const u8) !NodeIndex { const import_tok = try c.addToken(.builtin, "@import"); _ = try c.addToken(.l_paren, "("); From 227f167958ab37fe1df1b1dea5e3da42651b49c9 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Sat, 13 Feb 2021 21:45:25 +0200 Subject: [PATCH 083/173] translate-c: render binops and break/continue --- src/translate_c/ast.zig | 113 +++++++++++++++++++++++++++++++++++----- 1 file changed, 99 insertions(+), 14 deletions(-) diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 3bc1dc986f..11a8fe4544 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -87,11 +87,7 @@ pub const Node = extern union { mod, mod_assign, @"and", - and_assign, @"or", - or_assign, - xor, - xor_assign, less_than, less_than_equal, greater_than, @@ -263,11 +259,7 @@ pub const Node = extern union { .mod, .mod_assign, .@"and", - .and_assign, .@"or", - .or_assign, - .xor, - .xor_assign, .less_than, .less_than_equal, .greater_than, @@ -832,7 +824,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { .rhs = undefined, }, }), - .@"anytype" => return try c.addNode(.{ + .@"anytype" => return c.addNode(.{ .tag = .@"anytype", .main_token = try c.addToken(.keyword_anytype, "anytype"), .data = .{ @@ -840,7 +832,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { .rhs = undefined, }, }), - .noreturn_type => return try c.addNode(.{ + .noreturn_type => return c.addNode(.{ .tag = .identifier, .main_token = try c.addToken(.identifier, "noreturn"), .data = .{ @@ -848,6 +840,53 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { .rhs = undefined, }, }), + .@"continue" => { + const tok = try c.addToken(.keyword_continue, "continue"); + _ = try c.addToken(.semicolon, ";"); + return c.addNode(.{ + .tag = .@"continue", + .main_token = tok, + .data = .{ + .lhs = 0, + .rhs = undefined, + }, + }); + }, + .@"break" => { + const payload = node.castTag(.@"break").?.data; + const tok = try c.addToken(.keyword_break, "break"); + const break_label = if (payload) |some| blk: { + _ = try c.addToken(.colon, ":"); + break :blk try c.addIdentifier(some); + } else 0; + _ = try c.addToken(.semicolon, ";"); + return c.addNode(.{ + .tag = .identifier, + .main_token = try c.addToken(.keyword_break, "break"), + .data = .{ + .lhs = break_label, + .rhs = 0, + }, + }); + }, + .break_val => { + const payload = node.castTag(.break_val).?.data; + const tok = try c.addToken(.keyword_break, "break"); + const break_label = if (payload.label) |some| blk: { + _ = try c.addToken(.colon, ":"); + break :blk try c.addIdentifier(some); + } else 0; + const val = try renderNode(c, payload.val); + _ = try c.addToken(.semicolon, ";"); + return c.addNode(.{ + .tag = .identifier, + .main_token = try c.addToken(.keyword_break, "break"), + .data = .{ + .lhs = break_label, + .rhs = val, + }, + }); + }, .type => { const payload = node.castTag(.type).?.data; return c.addNode(.{ @@ -1093,6 +1132,43 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { }, }); }, + .add => return renderBinOp(c, node, .add, .plus, "+"), + .add_assign => return renderBinOp(c, node, .assign_add, .plus_equal, "+="), + .add_wrap => return renderBinOp(c, node, .add_wrap, .plus_percent, "+%"), + .add_wrap_assign => return renderBinOp(c, node, .assign_add_wrap, .plus_percent_equal, "+%="), + .sub => return renderBinOp(c, node, .sub, .minus, "-"), + .sub_assign => return renderBinOp(c, node, .assign_sub, .minus_equal, "-="), + .sub_wrap => return renderBinOp(c, node, .sub_wrap, .minus_percent, "-%"), + .sub_wrap_assign => return renderBinOp(c, node, .assign_sub_wrap, .minus_percent_equal, "-%="), + .mul => return renderBinOp(c, node, .mul, .asterisk, "*"), + .mul_assign => return renderBinOp(c, node, .assign_mul, .asterisk_equal, "*="), + .mul_wrap => return renderBinOp(c, node, .mul_wrap, .asterisk_percent, "*="), + .mul_wrap_assign => return renderBinOp(c, node, .assign_mul_wrap, .asterisk_percent_equal, "*%="), + .div => return renderBinOp(c, node, .div, .slash, "/"), + .div_assign => return renderBinOp(c, node, .assign_div, .slash_equal, "/="), + .shl => return renderBinOp(c, node, .bit_shift_left, .angle_bracket_angle_bracket_left, "<<"), + .shl_assign => return renderBinOp(c, node, .assign_bit_shift_left, .angle_bracket_angle_bracket_left_equal, "<<="), + .shr => return renderBinOp(c, node, .bit_shift_right, .angle_bracket_angle_bracket_right, ">>"), + .shr_assign => return renderBinOp(c, node, .assign_bit_shift_right, .angle_bracket_angle_bracket_right_equal, ">>="), + .mod => return renderBinOp(c, node, .mod, .percent, "%"), + .mod_assign => return renderBinOp(c, node, .assign_mod, .percent_equal, "%="), + .@"and" => return renderBinOp(c, node, .bool_and, .keyword_and, "and"), + .@"or" => return renderBinOp(c, node, .bool_or, .keyword_or, "or"), + .less_than => return renderBinOp(c, node, .less_than, .angle_bracket_left, "<"), + .less_than_equal => return renderBinOp(c, node, .less_or_equal, .angle_bracket_left_equal, "<="), + .greater_than => return renderBinOp(c, node, .greater_than, .angle_bracket_right, ">="), + .greater_than_equal => return renderBinOp(c, node, .greater_or_equal, .angle_bracket_right_equal, ">="), + .equal => return renderBinOp(c, node, .equal_equal, .equal_equal, "=="), + .not_equal => return renderBinOp(c, node, .bang_equal, .bang_equal, "!="), + .bit_and => return renderBinOp(c, node, .bit_and, .ampersand, "&"), + .bit_and_assign => return renderBinOp(c, node, .assign_bit_and, .ampersand_equal, "&="), + .bit_or => return renderBinOp(c, node, .bit_or, .pipe, "|"), + .bit_or_assign => return renderBinOp(c, node, .assign_bit_or, .pipe_equal, "|="), + .bit_xor => return renderBinOp(c, node, .bit_xor, .caret, "^"), + .bit_xor_assign => return renderBinOp(c, node, .assign_bit_xor, .caret_equal, "^="), + .array_cat => return renderBinOp(c, node, .array_cat, .plus_plus, "++"), + .ellipsis3 => return renderBinOp(c, node, .switch_range, .ellipsis3, "..."), + .assign => return renderBinOp(c, node, .assign, .equal, "="), else => return c.addNode(.{ .tag = .identifier, .main_token = try c.addTokenFmt(.identifier, "@\"TODO {}\"", .{node.tag()}), @@ -1177,7 +1253,6 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex { .mod, .@"and", .@"or", - .xor, .less_than, .less_than_equal, .greater_than, @@ -1243,9 +1318,6 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex { .shl_assign, .shr_assign, .mod_assign, - .and_assign, - .or_assign, - .xor_assign, .bit_and_assign, .bit_or_assign, .bit_xor_assign, @@ -1271,6 +1343,19 @@ fn renderPrefixOp(c: *Context, node: Node, tag: std.zig.ast.Node.Tag, tok_tag: T }); } +fn renderBinOp(c: *Context, node: Node, tag: std.zig.ast.Node.Tag, tok_tag: TokenTag, bytes: []const u8) !NodeIndex { + const payload = @fieldParentPtr(Payload.BinOp, "base", node.ptr_otherwise).data; + const lhs = try renderNodeGrouped(c, payload.lhs); + return c.addNode(.{ + .tag = tag, + .main_token = try c.addToken(tok_tag, bytes), + .data = .{ + .lhs = lhs, + .rhs = try renderNodeGrouped(c, payload.rhs), + }, + }); +} + fn renderStdImport(c: *Context, first: []const u8, second: []const u8) !NodeIndex { const import_tok = try c.addToken(.builtin, "@import"); _ = try c.addToken(.l_paren, "("); From f191251ddb424eba7384d31fa2d50e50586f3993 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Sat, 13 Feb 2021 22:53:24 +0200 Subject: [PATCH 084/173] translate-c: render functions --- src/translate_c.zig | 2 +- src/translate_c/ast.zig | 261 +++++++++++++++++++++++++++++++++++++--- 2 files changed, 246 insertions(+), 17 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index 03695186ec..abb4fff7f4 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -107,7 +107,7 @@ const Scope = struct { // do while, we want to put `if (cond) break;` at the end. const alloc_len = self.statements.items.len + @boolToInt(self.base.parent.?.id == .loop); var stmts = try c.arena.alloc(Node, alloc_len); - stmts.len -= 1; + stmts.len = self.statements.items.len; mem.copy(Node, stmts, self.statements.items); return Tag.block.create(c.arena, .{ .label = self.label, diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 11a8fe4544..6d08688de4 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -1,3 +1,8 @@ +// SPDX-License-Identifier: MIT +// Copyright (c) 2021 Zig Contributors +// This file is part of [zig](https://ziglang.org/), which is MIT licensed. +// The MIT license requires this copyright notice to be included in all copies +// and substantial portions of the software. const std = @import("std"); const Type = @import("../type.zig").Type; const Allocator = std.mem.Allocator; @@ -840,18 +845,14 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { .rhs = undefined, }, }), - .@"continue" => { - const tok = try c.addToken(.keyword_continue, "continue"); - _ = try c.addToken(.semicolon, ";"); - return c.addNode(.{ - .tag = .@"continue", - .main_token = tok, - .data = .{ - .lhs = 0, - .rhs = undefined, - }, - }); - }, + .@"continue" => return c.addNode(.{ + .tag = .@"continue", + .main_token = try c.addToken(.keyword_continue, "continue"), + .data = .{ + .lhs = 0, + .rhs = undefined, + }, + }), .@"break" => { const payload = node.castTag(.@"break").?.data; const tok = try c.addToken(.keyword_break, "break"); @@ -859,7 +860,6 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { _ = try c.addToken(.colon, ":"); break :blk try c.addIdentifier(some); } else 0; - _ = try c.addToken(.semicolon, ";"); return c.addNode(.{ .tag = .identifier, .main_token = try c.addToken(.keyword_break, "break"), @@ -876,14 +876,12 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { _ = try c.addToken(.colon, ":"); break :blk try c.addIdentifier(some); } else 0; - const val = try renderNode(c, payload.val); - _ = try c.addToken(.semicolon, ";"); return c.addNode(.{ .tag = .identifier, .main_token = try c.addToken(.keyword_break, "break"), .data = .{ .lhs = break_label, - .rhs = val, + .rhs = try renderNode(c, payload.val), }, }); }, @@ -1169,6 +1167,84 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { .array_cat => return renderBinOp(c, node, .array_cat, .plus_plus, "++"), .ellipsis3 => return renderBinOp(c, node, .switch_range, .ellipsis3, "..."), .assign => return renderBinOp(c, node, .assign, .equal, "="), + .empty_block => { + const l_brace = try c.addToken(.l_brace, "{"); + _ = try c.addToken(.r_brace, "}"); + return c.addNode(.{ + .tag = .block_two, + .main_token = l_brace, + .data = .{ + .lhs = 0, + .rhs = 0, + }, + }); + }, + .block_single => { + const payload = node.castTag(.block_single).?.data; + const l_brace = try c.addToken(.l_brace, "{"); + + const stmt = try renderNode(c, payload); + _ = try c.addToken(.semicolon, ";"); + + _ = try c.addToken(.r_brace, "}"); + return c.addNode(.{ + .tag = .block_two, + .main_token = l_brace, + .data = .{ + .lhs = stmt, + .rhs = 0, + }, + }); + }, + .block => { + const payload = node.castTag(.block).?.data; + if (payload.label) |some| { + _ = try c.addIdentifier(some); + _ = try c.addToken(.colon, ":"); + } + const l_brace = try c.addToken(.l_brace, "{"); + + var stmts = std.ArrayList(NodeIndex).init(c.gpa); + defer stmts.deinit(); + for (payload.stmts) |stmt| { + const res = try renderNode(c, stmt); + switch (stmt.tag()) { + .warning => continue, + .var_decl, .var_simple => {}, + else => _ = try c.addToken(.semicolon, ";"), + } + try stmts.append(res); + } + const span = try c.listToSpan(stmts.items); + _ = try c.addToken(.r_brace, "}"); + + const semicolon = c.tokens.items(.tag)[c.tokens.len - 2] == .semicolon; + return c.addNode(.{ + .tag = if (semicolon) .block_semicolon else .block, + .main_token = l_brace, + .data = .{ + .lhs = span.start, + .rhs = span.end, + }, + }); + }, + .func => return renderFunc(c, node), + .discard => { + const payload = node.castTag(.discard).?.data; + const lhs = try c.addNode(.{ + .tag = .identifier, + .main_token = try c.addToken(.identifier, "_"), + .data = .{ .lhs = undefined, .rhs = undefined }, + }); + return c.addNode(.{ + .tag = .assign, + .main_token = try c.addToken(.equal, "="), + .data = .{ + .lhs = lhs, + .rhs = try renderNode(c, payload), + }, + }); + }, else => return c.addNode(.{ .tag = .identifier, .main_token = try c.addTokenFmt(.identifier, "@\"TODO {}\"", .{node.tag()}), @@ -1555,3 +1631,156 @@ fn renderVar(c: *Context, node: Node) !NodeIndex { }); } } + +fn renderFunc(c: *Context, node: Node) !NodeIndex { + const payload = node.castTag(.func).?.data; + if (payload.is_pub) _ = try c.addToken(.keyword_pub, "pub"); + if (payload.is_extern) _ = try c.addToken(.keyword_extern, "extern"); + if (payload.is_export) _ = try c.addToken(.keyword_export, "export"); + const fn_token = try c.addToken(.keyword_fn, "fn"); + if (payload.name) |some| _ = try c.addIdentifier(some); + + _ = try c.addToken(.l_paren, "("); + const first = if (payload.params.len != 0) blk: { + const param = payload.params[0]; + if (param.is_noalias) _ = try c.addToken(.keyword_noalias, "noalias"); + if (param.name) |some| { + _ = try c.addIdentifier(some); + _ = try c.addToken(.colon, ":"); + } + break :blk try renderNode(c, param.type); + } else 0; + + var span: NodeSubRange = undefined; + if (payload.params.len > 1) { + var params = std.ArrayList(NodeIndex).init(c.gpa); + defer params.deinit(); + + try params.append(first); + for (payload.params[1..]) |param| { + _ = try c.addToken(.comma, ","); + if (param.is_noalias) _ = try c.addToken(.keyword_noalias, "noalias"); + if (param.name) |some| { + _ = try c.addIdentifier(some); + _ = try c.addToken(.colon, ":"); + } + try params.append(try renderNode(c, param.type)); + } + span = try c.listToSpan(params.items); + } + if (payload.is_var_args) { + if (payload.params.len != 0) _ = try c.addToken(.comma, ","); + _ = try c.addToken(.ellipsis3, "..."); + } + _ = try c.addToken(.r_paren, ")"); + + const return_type_expr = try renderNode(c, payload.return_type); + + const align_expr = if (payload.alignment) |some| blk: { + _ = try c.addToken(.keyword_align, "align"); + _ = try c.addToken(.l_paren, "("); + const res = try c.addNode(.{ + .tag = .integer_literal, + .main_token = try c.addTokenFmt(.integer_literal, "{d}", .{some}), + .data = .{ .lhs = undefined, .rhs = undefined }, + }); + _ = try c.addToken(.r_paren, ")"); + break :blk res; + } else 0; + + const section_expr = if (payload.linksection_string) |some| blk: { + _ = try c.addToken(.keyword_linksection, "linksection"); + _ = try c.addToken(.l_paren, "("); + const res = try c.addNode(.{ + .tag = .string_literal, + .main_token = try c.addTokenFmt(.string_literal, "\"{s}\"", .{std.zig.fmtEscapes(some)}), + .data = .{ .lhs = undefined, .rhs = undefined }, + }); + _ = try c.addToken(.r_paren, ")"); + break :blk res; + } else 0; + + const callconv_expr = if (payload.explicit_callconv) |some| blk: { + _ = try c.addToken(.keyword_linksection, "callconv"); + _ = try c.addToken(.l_paren, "("); + _ = try c.addToken(.period, "."); + const res = try c.addNode(.{ + .tag = .enum_literal, + .main_token = try c.addTokenFmt(.string_literal, "{}", .{some}), + .data = .{ .lhs = undefined, .rhs = undefined }, + }); + _ = try c.addToken(.r_paren, ")"); + break :blk res; + } else 0; + + const fn_proto = try blk: { + if (align_expr == 0 and section_expr == 0 and callconv_expr == 0) { + if (payload.params.len < 2) + break :blk c.addNode(.{ + .tag = .fn_proto_simple, + .main_token = fn_token, + .data = .{ + .lhs = first, + .rhs = return_type_expr, + }, + }) + else + break :blk c.addNode(.{ + .tag = .fn_proto_multi, + .main_token = fn_token, + .data = .{ + .lhs = try c.addExtra(std.zig.ast.Node.SubRange{ + .start = span.start, + .end = span.end, + }), + .rhs = return_type_expr, + }, + }); + } + if (payload.params.len < 2) + break :blk c.addNode(.{ + .tag = .fn_proto_one, + .main_token = fn_token, + .data = .{ + .lhs = try c.addExtra(std.zig.ast.Node.FnProtoOne{ + .param = first, + .align_expr = align_expr, + .section_expr = section_expr, + .callconv_expr = callconv_expr, + }), + .rhs = return_type_expr, + }, + }) + else + break :blk c.addNode(.{ + .tag = .fn_proto, + .main_token = fn_token, + .data = .{ + .lhs = try c.addExtra(std.zig.ast.Node.FnProto{ + .params_start = span.start, + .params_end = span.end, + .align_expr = align_expr, + .section_expr = section_expr, + .callconv_expr = callconv_expr, + }), + .rhs = return_type_expr, + }, + }); + }; + + const body = if (payload.body) |some| + try renderNode(c, some) + else blk: { + _ = try c.addToken(.semicolon, ";"); + break :blk 0; + }; + + return c.addNode(.{ + .tag = .fn_decl, + .main_token = fn_token, + .data = .{ + .lhs = fn_proto, + .rhs = body, + }, + }); +} From c4dfabf4dcfc29f48356e3c5f762f1ca2c21f088 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Sun, 14 Feb 2021 15:35:51 +0200 Subject: [PATCH 085/173] translate-c: render macro functions, use cast type as return type Closes #8004 --- src/translate_c.zig | 8 +- src/translate_c/ast.zig | 280 ++++++++++++++++++++++++++-------------- 2 files changed, 191 insertions(+), 97 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index abb4fff7f4..6fa3110118 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -4099,14 +4099,18 @@ fn transMacroFnDefine(c: *Context, m: *MacroCtx) ParseError!void { const br = blk_last.castTag(.break_val).?; break :blk br.data.val; } else expr; - const typeof = try Tag.typeof.create(c.arena, typeof_arg); + const return_type = if (typeof_arg.castTag(.std_meta_cast)) |some| + some.data.lhs + else + try Tag.typeof.create(c.arena, typeof_arg); + const return_expr = try Tag.@"return".create(c.arena, expr); try block_scope.statements.append(return_expr); const fn_decl = try Tag.pub_inline_fn.create(c.arena, .{ .name = m.name, .params = try c.arena.dupe(ast.Payload.Param, fn_params.items), - .return_type = typeof, + .return_type = return_type, .body = try block_scope.complete(c), }); _ = try c.global_scope.macro_table.put(m.name, fn_decl); diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 6d08688de4..2c3b9f29fe 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -615,9 +615,15 @@ pub fn render(gpa: *Allocator, nodes: []const Node) !std.zig.ast.Tree { defer ctx.extra_data.deinit(gpa); defer ctx.tokens.deinit(gpa); - // Estimate that each top level node has 25 child nodes. - const estimated_node_count = nodes.len * 25; + // Estimate that each top level node has 10 child nodes. + const estimated_node_count = nodes.len * 10; try ctx.nodes.ensureCapacity(gpa, estimated_node_count); + // Estimate that each each node has 2 tokens. + const estimated_tokens_count = estimated_node_count * 2; + try ctx.tokens.ensureCapacity(gpa, estimated_tokens_count); + // Estimate that each each token is 3 bytes long. + const estimated_buf_len = estimated_tokens_count * 3; + try ctx.buf.ensureCapacity(estimated_buf_len); ctx.nodes.appendAssumeCapacity(.{ .tag = .root, @@ -776,74 +782,47 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { .null_literal => return c.addNode(.{ .tag = .null_literal, .main_token = try c.addToken(.keyword_null, "null"), - .data = .{ - .lhs = undefined, - .rhs = undefined, - }, + .data = undefined, }), .undefined_literal => return c.addNode(.{ .tag = .undefined_literal, .main_token = try c.addToken(.keyword_undefined, "undefined"), - .data = .{ - .lhs = undefined, - .rhs = undefined, - }, + .data = undefined, }), .true_literal => return c.addNode(.{ .tag = .true_literal, .main_token = try c.addToken(.keyword_true, "true"), - .data = .{ - .lhs = undefined, - .rhs = undefined, - }, + .data = undefined, }), .false_literal => return c.addNode(.{ .tag = .false_literal, .main_token = try c.addToken(.keyword_false, "false"), - .data = .{ - .lhs = undefined, - .rhs = undefined, - }, + .data = undefined, }), .zero_literal => return c.addNode(.{ .tag = .integer_literal, .main_token = try c.addToken(.integer_literal, "0"), - .data = .{ - .lhs = undefined, - .rhs = undefined, - }, + .data = undefined, }), .one_literal => return c.addNode(.{ .tag = .integer_literal, .main_token = try c.addToken(.integer_literal, "1"), - .data = .{ - .lhs = undefined, - .rhs = undefined, - }, + .data = undefined, }), .void_type => return c.addNode(.{ .tag = .identifier, .main_token = try c.addToken(.identifier, "void"), - .data = .{ - .lhs = undefined, - .rhs = undefined, - }, + .data = undefined, }), .@"anytype" => return c.addNode(.{ .tag = .@"anytype", .main_token = try c.addToken(.keyword_anytype, "anytype"), - .data = .{ - .lhs = undefined, - .rhs = undefined, - }, + .data = undefined, }), .noreturn_type => return c.addNode(.{ .tag = .identifier, .main_token = try c.addToken(.identifier, "noreturn"), - .data = .{ - .lhs = undefined, - .rhs = undefined, - }, + .data = undefined, }), .@"continue" => return c.addNode(.{ .tag = .@"continue", @@ -853,6 +832,14 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { .rhs = undefined, }, }), + .return_void => return c.addNode(.{ + .tag = .@"return", + .main_token = try c.addToken(.keyword_return, "return"), + .data = .{ + .lhs = 0, + .rhs = undefined, + }, + }), .@"break" => { const payload = node.castTag(.@"break").?.data; const tok = try c.addToken(.keyword_break, "break"); @@ -885,15 +872,23 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { }, }); }, + .@"return" => { + const payload = node.castTag(.@"return").?.data; + return c.addNode(.{ + .tag = .@"return", + .main_token = try c.addToken(.keyword_return, "return"), + .data = .{ + .lhs = try renderNode(c, payload), + .rhs = undefined, + }, + }); + }, .type => { const payload = node.castTag(.type).?.data; return c.addNode(.{ .tag = .identifier, .main_token = try c.addToken(.identifier, payload), - .data = .{ - .lhs = undefined, - .rhs = undefined, - }, + .data = undefined, }); }, .log2_int_type => { @@ -901,10 +896,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { return c.addNode(.{ .tag = .identifier, .main_token = try c.addTokenFmt(.identifier, "u{d}", .{payload}), - .data = .{ - .lhs = undefined, - .rhs = undefined, - }, + .data = undefined, }); }, .identifier => { @@ -912,10 +904,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { return c.addNode(.{ .tag = .identifier, .main_token = try c.addIdentifier(payload), - .data = .{ - .lhs = undefined, - .rhs = undefined, - }, + .data = undefined, }); }, .float_literal => { @@ -923,10 +912,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { return c.addNode(.{ .tag = .float_literal, .main_token = try c.addToken(.float_literal, payload), - .data = .{ - .lhs = undefined, - .rhs = undefined, - }, + .data = undefined, }); }, .integer_literal => { @@ -934,10 +920,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { return c.addNode(.{ .tag = .integer_literal, .main_token = try c.addToken(.integer_literal, payload), - .data = .{ - .lhs = undefined, - .rhs = undefined, - }, + .data = undefined, }); }, .string_literal => { @@ -945,10 +928,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { return c.addNode(.{ .tag = .identifier, .main_token = try c.addToken(.string_literal, payload), - .data = .{ - .lhs = undefined, - .rhs = undefined, - }, + .data = undefined, }); }, .char_literal => { @@ -956,10 +936,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { return c.addNode(.{ .tag = .identifier, .main_token = try c.addToken(.string_literal, payload), - .data = .{ - .lhs = undefined, - .rhs = undefined, - }, + .data = undefined, }); }, .fail_decl => { @@ -976,10 +953,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { const err_msg = try c.addNode(.{ .tag = .string_literal, .main_token = err_msg_tok, - .data = .{ - .lhs = undefined, - .rhs = undefined, - }, + .data = undefined, }); _ = try c.addToken(.r_paren, ")"); const compile_error = try c.addNode(.{ @@ -1130,6 +1104,31 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { }, }); }, + .c_pointer, .single_pointer => { + const payload = @fieldParentPtr(Payload.Pointer, "base", node.ptr_otherwise).data; + + const asterisk = if (node.tag() == .single_pointer) + try c.addToken(.asterisk, "*") + else blk: { + _ = try c.addToken(.l_bracket, "["); + const res = try c.addToken(.asterisk, "*"); + _ = try c.addIdentifier("c"); + _ = try c.addToken(.r_bracket, "]"); + break :blk res; + }; + if (payload.is_const) _ = try c.addToken(.keyword_const, "const"); + if (payload.is_volatile) _ = try c.addToken(.keyword_volatile, "volatile"); + const elem_type = try renderNode(c, payload.elem_type); + + return c.addNode(.{ + .tag = .ptr_type_aligned, + .main_token = asterisk, + .data = .{ + .lhs = 0, + .rhs = elem_type, + }, + }); + }, .add => return renderBinOp(c, node, .add, .plus, "+"), .add_assign => return renderBinOp(c, node, .assign_add, .plus_equal, "+="), .add_wrap => return renderBinOp(c, node, .add_wrap, .plus_percent, "+%"), @@ -1229,6 +1228,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { }); }, .func => return renderFunc(c, node), + .pub_inline_fn => return renderMacroFunc(c, node), .discard => { const payload = node.castTag(.discard).?.data; const lhs = try c.addNode(.{ @@ -1300,6 +1300,9 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex { .type, .array_access, .align_cast, + .optional_type, + .c_pointer, + .single_pointer, => { // no grouping needed return renderNode(c, node); @@ -1310,7 +1313,6 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex { .bit_not, .opaque_literal, .not, - .optional_type, .address_of, .unwrap, .deref, @@ -1350,8 +1352,6 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex { .tuple, .container_init, .block, - .c_pointer, - .single_pointer, .array_type, => return c.addNode(.{ .tag = .grouped_expression, @@ -1439,10 +1439,7 @@ fn renderStdImport(c: *Context, first: []const u8, second: []const u8) !NodeInde const std_node = try c.addNode(.{ .tag = .string_literal, .main_token = std_tok, - .data = .{ - .lhs = undefined, - .rhs = undefined, - }, + .data = undefined, }); _ = try c.addToken(.r_paren, ")"); @@ -1498,21 +1495,23 @@ fn renderCall(c: *Context, lhs: NodeIndex, args: []const Node) !NodeIndex { }); }, else => blk: { - const start = @intCast(u32, c.extra_data.items.len); - const end = @intCast(u32, start + args.len); - try c.extra_data.ensureCapacity(c.gpa, end + 2); // + 2 for span start + end + var rendered = try c.gpa.alloc(NodeIndex, args.len); + defer c.gpa.free(rendered); + for (args) |arg, i| { if (i != 0) _ = try c.addToken(.comma, ","); - c.extra_data.appendAssumeCapacity(try renderNode(c, arg)); + rendered[i] = try renderNode(c, arg); } - c.extra_data.appendAssumeCapacity(start); - c.extra_data.appendAssumeCapacity(end); + const span = try c.listToSpan(rendered); break :blk try c.addNode(.{ - .tag = .call_comma, + .tag = .call, .main_token = lparen, .data = .{ .lhs = lhs, - .rhs = end + 2, + .rhs = try c.addExtra(std.zig.ast.Node.SubRange{ + .start = span.start, + .end = span.end, + }), }, }); }, @@ -1653,20 +1652,20 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex { var span: NodeSubRange = undefined; if (payload.params.len > 1) { - var params = std.ArrayList(NodeIndex).init(c.gpa); - defer params.deinit(); + var params = try c.gpa.alloc(NodeIndex, payload.params.len); + defer c.gpa.free(params); - try params.append(first); - for (payload.params[1..]) |param| { + params[0] = first; + for (payload.params[1..]) |param, i| { _ = try c.addToken(.comma, ","); if (param.is_noalias) _ = try c.addToken(.keyword_noalias, "noalias"); if (param.name) |some| { _ = try c.addIdentifier(some); _ = try c.addToken(.colon, ":"); } - try params.append(try renderNode(c, param.type)); + params[i + 1] = try renderNode(c, param.type); } - span = try c.listToSpan(params.items); + span = try c.listToSpan(params); } if (payload.is_var_args) { if (payload.params.len != 0) _ = try c.addToken(.comma, ","); @@ -1674,8 +1673,6 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex { } _ = try c.addToken(.r_paren, ")"); - const return_type_expr = try renderNode(c, payload.return_type); - const align_expr = if (payload.alignment) |some| blk: { _ = try c.addToken(.keyword_align, "align"); _ = try c.addToken(.l_paren, "("); @@ -1701,18 +1698,20 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex { } else 0; const callconv_expr = if (payload.explicit_callconv) |some| blk: { - _ = try c.addToken(.keyword_linksection, "callconv"); + _ = try c.addToken(.keyword_callconv, "callconv"); _ = try c.addToken(.l_paren, "("); _ = try c.addToken(.period, "."); const res = try c.addNode(.{ .tag = .enum_literal, - .main_token = try c.addTokenFmt(.string_literal, "{}", .{some}), + .main_token = try c.addTokenFmt(.identifier, "{}", .{some}), .data = .{ .lhs = undefined, .rhs = undefined }, }); _ = try c.addToken(.r_paren, ")"); break :blk res; } else 0; + const return_type_expr = try renderNode(c, payload.return_type); + const fn_proto = try blk: { if (align_expr == 0 and section_expr == 0 and callconv_expr == 0) { if (payload.params.len < 2) @@ -1784,3 +1783,94 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex { }, }); } + +fn renderMacroFunc(c: *Context, node: Node) !NodeIndex { + const payload = node.castTag(.pub_inline_fn).?.data; + _ = try c.addToken(.keyword_pub, "pub"); + const fn_token = try c.addToken(.keyword_fn, "fn"); + _ = try c.addIdentifier(payload.name); + + _ = try c.addToken(.l_paren, "("); + const first = if (payload.params.len != 0) blk: { + const param = payload.params[0]; + if (param.is_noalias) _ = try c.addToken(.keyword_noalias, "noalias"); + if (param.name) |some| { + _ = try c.addIdentifier(some); + _ = try c.addToken(.colon, ":"); + } + break :blk try renderNode(c, param.type); + } else 0; + + var span: NodeSubRange = undefined; + if (payload.params.len > 1) { + var params = try c.gpa.alloc(NodeIndex, payload.params.len); + defer c.gpa.free(params); + + params[0] = first; + for (payload.params[1..]) |param, i| { + _ = try c.addToken(.comma, ","); + if (param.is_noalias) _ = try c.addToken(.keyword_noalias, "noalias"); + if (param.name) |some| { + _ = try c.addIdentifier(some); + _ = try c.addToken(.colon, ":"); + } + params[i + 1] = try renderNode(c, param.type); + } + span = try c.listToSpan(params); + } + _ = try c.addToken(.r_paren, ")"); + + const callconv_expr = blk: { + _ = try c.addToken(.keyword_callconv, "callconv"); + _ = try c.addToken(.l_paren, "("); + _ = try c.addToken(.period, "."); + const res = try c.addNode(.{ + .tag = .enum_literal, + .main_token = try c.addToken(.identifier, "Inline"), + .data = .{ .lhs = undefined, .rhs = undefined }, + }); + _ = try c.addToken(.r_paren, ")"); + break :blk res; + }; + const return_type_expr = try renderNode(c, payload.return_type); + + const fn_proto = try blk: { + if (payload.params.len < 2) + break :blk c.addNode(.{ + .tag = .fn_proto_one, + .main_token = fn_token, + .data = .{ + .lhs = try c.addExtra(std.zig.ast.Node.FnProtoOne{ + .param = first, + .align_expr = 0, + .section_expr = 0, + .callconv_expr = callconv_expr, + }), + .rhs = return_type_expr, + }, + }) + else + break :blk c.addNode(.{ + .tag = .fn_proto, + .main_token = fn_token, + .data = .{ + .lhs = try c.addExtra(std.zig.ast.Node.FnProto{ + .params_start = span.start, + .params_end = span.end, + .align_expr = 0, + .section_expr = 0, + .callconv_expr = callconv_expr, + }), + .rhs = return_type_expr, + }, + }); + }; + return c.addNode(.{ + .tag = .fn_decl, + .main_token = fn_token, + .data = .{ + .lhs = fn_proto, + .rhs = try renderNode(c, payload.body), + }, + }); +} From 62162a07171ccbdb753650205880c3745a599366 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Sun, 14 Feb 2021 16:32:31 +0200 Subject: [PATCH 086/173] translate-c: render control flow --- src/translate_c.zig | 41 ++++--- src/translate_c/ast.zig | 251 ++++++++++++++++++++++++++++++++++++---- 2 files changed, 256 insertions(+), 36 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index 6fa3110118..894502ecc5 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -36,6 +36,7 @@ const Scope = struct { root, condition, loop, + do_loop, }; /// Represents an in-progress Node.Switch. This struct is stack-allocated. @@ -103,15 +104,22 @@ const Scope = struct { } fn complete(self: *Block, c: *Context) !Node { - // We reserve 1 extra statement if the parent is a Loop. This is in case of - // do while, we want to put `if (cond) break;` at the end. - const alloc_len = self.statements.items.len + @boolToInt(self.base.parent.?.id == .loop); - var stmts = try c.arena.alloc(Node, alloc_len); - stmts.len = self.statements.items.len; - mem.copy(Node, stmts, self.statements.items); + if (self.base.parent.?.id == .do_loop) { + // We reserve 1 extra statement if the parent is a do_loop. This is in case of + // do while, we want to put `if (cond) break;` at the end. + const alloc_len = self.statements.items.len + @boolToInt(self.base.parent.?.id == .do_loop); + var stmts = try c.arena.alloc(Node, alloc_len); + stmts.len = self.statements.items.len; + mem.copy(Node, stmts, self.statements.items); + return Tag.block.create(c.arena, .{ + .label = self.label, + .stmts = stmts, + }); + } + if (self.statements.items.len == 0) return Tag.empty_block.init(); return Tag.block.create(c.arena, .{ .label = self.label, - .stmts = stmts, + .stmts = try c.arena.dupe(Node, self.statements.items), }); } @@ -222,7 +230,7 @@ const Scope = struct { return switch (scope.id) { .root => return name, .block => @fieldParentPtr(Block, "base", scope).getAlias(name), - .@"switch", .loop, .condition => scope.parent.?.getAlias(name), + .@"switch", .loop, .do_loop, .condition => scope.parent.?.getAlias(name), }; } @@ -230,7 +238,7 @@ const Scope = struct { return switch (scope.id) { .root => @fieldParentPtr(Root, "base", scope).contains(name), .block => @fieldParentPtr(Block, "base", scope).contains(name), - .@"switch", .loop, .condition => scope.parent.?.contains(name), + .@"switch", .loop, .do_loop, .condition => scope.parent.?.contains(name), }; } @@ -240,7 +248,7 @@ const Scope = struct { switch (scope.id) { .root => unreachable, .@"switch" => return scope, - .loop => return scope, + .loop, .do_loop => return scope, else => scope = scope.parent.?, } } @@ -2063,7 +2071,7 @@ fn transDoWhileLoop( ) TransError!Node { var loop_scope = Scope{ .parent = scope, - .id = .loop, + .id = .do_loop, }; // if (!cond) break; @@ -2075,7 +2083,14 @@ fn transDoWhileLoop( }; defer cond_scope.deinit(); const cond = try transBoolExpr(c, &cond_scope.base, @ptrCast(*const clang.Expr, stmt.getCond()), .used); - const if_not_break = try Tag.if_not_break.create(c.arena, cond); + const if_not_break = switch (cond.tag()) { + .false_literal => try Tag.@"break".create(c.arena, null), + .true_literal => { + const body_node = try transStmt(c, scope, stmt.getBody(), .unused); + return Tag.while_true.create(c.arena, body_node); + }, + else => try Tag.if_not_break.create(c.arena, cond), + }; const body_node = if (stmt.getBody().getStmtClass() == .CompoundStmtClass) blk: { // there's already a block in C, so we'll append our condition to it. @@ -4099,7 +4114,7 @@ fn transMacroFnDefine(c: *Context, m: *MacroCtx) ParseError!void { const br = blk_last.castTag(.break_val).?; break :blk br.data.val; } else expr; - const return_type = if (typeof_arg.castTag(.std_meta_cast)) |some| + const return_type = if (typeof_arg.castTag(.std_meta_cast)) |some| some.data.lhs else try Tag.typeof.create(c.arena, typeof_arg); diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 2c3b9f29fe..d108a2852e 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -848,8 +848,8 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { break :blk try c.addIdentifier(some); } else 0; return c.addNode(.{ - .tag = .identifier, - .main_token = try c.addToken(.keyword_break, "break"), + .tag = .@"break", + .main_token = tok, .data = .{ .lhs = break_label, .rhs = 0, @@ -864,8 +864,8 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { break :blk try c.addIdentifier(some); } else 0; return c.addNode(.{ - .tag = .identifier, - .main_token = try c.addToken(.keyword_break, "break"), + .tag = .@"break", + .main_token = tok, .data = .{ .lhs = break_label, .rhs = try renderNode(c, payload.val), @@ -1183,7 +1183,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { const l_brace = try c.addToken(.l_brace, "{"); const stmt = try renderNode(c, payload); - _ = try c.addToken(.semicolon, ";"); + try addSemicolonIfNeeded(c, payload); _ = try c.addToken(.r_brace, "}"); return c.addNode(.{ @@ -1207,11 +1207,8 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { defer stmts.deinit(); for (payload.stmts) |stmt| { const res = try renderNode(c, stmt); - switch (stmt.tag()) { - .warning => continue, - .var_decl, .var_simple => {}, - else => _ = try c.addToken(.semicolon, ";"), - } + if (res == 0) continue; + try addSemicolonIfNeeded(c, stmt); try stmts.append(res); } const span = try c.listToSpan(stmts.items); @@ -1245,6 +1242,194 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { }, }); }, + .@"while" => { + const payload = node.castTag(.@"while").?.data; + const while_tok = try c.addToken(.keyword_while, "while"); + _ = try c.addToken(.l_paren, "("); + const cond = try renderNode(c, payload.cond); + _ = try c.addToken(.r_paren, ")"); + + const cont_expr = if (payload.cont_expr) |some| blk: { + _ = try c.addToken(.colon, ":"); + _ = try c.addToken(.l_paren, "("); + const res = try renderNode(c, some); + _ = try c.addToken(.r_paren, ")"); + break :blk res; + } else 0; + const body = try renderNode(c, payload.body); + + if (cont_expr == 0) { + return c.addNode(.{ + .tag = .while_simple, + .main_token = while_tok, + .data = .{ + .lhs = cond, + .rhs = body, + }, + }); + } else { + return c.addNode(.{ + .tag = .while_cont, + .main_token = while_tok, + .data = .{ + .lhs = cond, + .rhs = try c.addExtra(std.zig.ast.Node.WhileCont{ + .cont_expr = cont_expr, + .then_expr = body, + }), + }, + }); + } + }, + .while_true => { + const payload = node.castTag(.while_true).?.data; + const while_tok = try c.addToken(.keyword_while, "while"); + _ = try c.addToken(.l_paren, "("); + const cond = try c.addNode(.{ + .tag = .true_literal, + .main_token = try c.addToken(.keyword_true, "true"), + .data = undefined, + }); + _ = try c.addToken(.r_paren, ")"); + const body = try renderNode(c, payload); + + return c.addNode(.{ + .tag = .while_simple, + .main_token = while_tok, + .data = .{ + .lhs = cond, + .rhs = body, + }, + }); + }, + .@"if" => { + const payload = node.castTag(.@"if").?.data; + const if_tok = try c.addToken(.keyword_if, "if"); + _ = try c.addToken(.l_paren, "("); + const cond = try renderNode(c, payload.cond); + _ = try c.addToken(.r_paren, ")"); + + const then_expr = try renderNode(c, payload.then); + const else_node = payload.@"else" orelse return c.addNode(.{ + .tag = .if_simple, + .main_token = if_tok, + .data = .{ + .lhs = cond, + .rhs = then_expr, + }, + }); + _ = try c.addToken(.keyword_else, "else"); + const else_expr = try renderNode(c, else_node); + + return c.addNode(.{ + .tag = .@"if", + .main_token = if_tok, + .data = .{ + .lhs = cond, + .rhs = try c.addExtra(std.zig.ast.Node.If{ + .then_expr = then_expr, + .else_expr = else_expr, + }), + }, + }); + }, + .if_not_break => { + const payload = node.castTag(.if_not_break).?.data; + const if_tok = try c.addToken(.keyword_if, "if"); + _ = try c.addToken(.l_paren, "("); + const cond = try c.addNode(.{ + .tag = .bool_not, + .main_token = try c.addToken(.bang, "!"), + .data = .{ + .lhs = try renderNodeGrouped(c, payload), + .rhs = undefined, + }, + }); + _ = try c.addToken(.r_paren, ")"); + const then_expr = try c.addNode(.{ + .tag = .@"break", + .main_token = try c.addToken(.keyword_break, "break"), + .data = .{ + .lhs = 0, + .rhs = 0, + }, + }); + + return c.addNode(.{ + .tag = .if_simple, + .main_token = if_tok, + .data = .{ + .lhs = cond, + .rhs = then_expr, + }, + }); + }, + .@"switch" => { + const payload = node.castTag(.@"switch").?.data; + const switch_tok = try c.addToken(.keyword_switch, "switch"); + _ = try c.addToken(.l_paren, "("); + const cond = try renderNode(c, payload.cond); + _ = try c.addToken(.r_paren, ")"); + + _ = try c.addToken(.l_brace, "{"); + var cases = try c.gpa.alloc(NodeIndex, payload.cases.len); + defer c.gpa.free(cases); + for (payload.cases) |case, i| { + if (i != 0) _ = try c.addToken(.comma, ","); + cases[i] = try renderNode(c, case); + } + const span = try c.listToSpan(cases); + _ = try c.addToken(.r_brace, "}"); + return c.addNode(.{ + .tag = .@"switch", + .main_token = switch_tok, + .data = .{ + .lhs = cond, + .rhs = try c.addExtra(NodeSubRange{ + .start = span.start, + .end = span.end, + }), + }, + }); + }, + .switch_else => { + const payload = node.castTag(.switch_else).?.data; + _ = try c.addToken(.keyword_else, "else"); + return c.addNode(.{ + .tag = .switch_case_one, + .main_token = try c.addToken(.equal_angle_bracket_right, "=>"), + .data = .{ + .lhs = 0, + .rhs = try renderNode(c, payload), + }, + }); + }, + .switch_prong => { + const payload = node.castTag(.switch_prong).?.data; + const item = try renderNode(c, payload.lhs); + return c.addNode(.{ + .tag = .switch_case_one, + .main_token = try c.addToken(.equal_angle_bracket_right, "=>"), + .data = .{ + .lhs = item, + .rhs = try renderNode(c, payload.rhs), + }, + }); + }, + .opaque_literal => { + const opaque_tok = try c.addToken(.keyword_opaque, "opaque"); + _ = try c.addToken(.l_brace, "{"); + _ = try c.addToken(.r_brace, "}"); + + return c.addNode(.{ + .tag = .container_decl_two, + .main_token = opaque_tok, + .data = .{ + .lhs = 0, + .rhs = 0, + }, + }); + }, else => return c.addNode(.{ .tag = .identifier, .main_token = try c.addTokenFmt(.identifier, "@\"TODO {}\"", .{node.tag()}), @@ -1256,6 +1441,28 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { } } +fn addSemicolonIfNeeded(c: *Context, node: Node) !void { + switch (node.tag()) { + .warning => unreachable, + .var_decl, .var_simple, .block, .empty_block, .@"switch" => {}, + .while_true => { + const payload = node.castTag(.while_true).?.data; + return addSemicolonIfNeeded(c, payload); + }, + .@"while" => { + const payload = node.castTag(.@"while").?.data; + return addSemicolonIfNeeded(c, payload.body); + }, + .@"if" => { + const payload = node.castTag(.@"if").?.data; + if (payload.@"else") |some| + return addSemicolonIfNeeded(c, some); + return addSemicolonIfNeeded(c, payload.then); + }, + else => _ = try c.addToken(.semicolon, ";"), + } +} + fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex { switch (node.tag()) { .null_literal, @@ -1303,19 +1510,19 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex { .optional_type, .c_pointer, .single_pointer, + .unwrap, + .deref, + .address_of, + .not, + .negate, + .negate_wrap, + .bit_not, => { // no grouping needed return renderNode(c, node); }, - .negate, - .negate_wrap, - .bit_not, .opaque_literal, - .not, - .address_of, - .unwrap, - .deref, .empty_array, .block_single, .bool_to_int, @@ -1407,13 +1614,11 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex { fn renderPrefixOp(c: *Context, node: Node, tag: std.zig.ast.Node.Tag, tok_tag: TokenTag, bytes: []const u8) !NodeIndex { const payload = @fieldParentPtr(Payload.UnOp, "base", node.ptr_otherwise).data; - const tok = try c.addToken(tok_tag, bytes); - const operand = try renderNodeGrouped(c, payload); return c.addNode(.{ .tag = tag, - .main_token = tok, + .main_token = try c.addToken(tok_tag, bytes), .data = .{ - .lhs = operand, + .lhs = try renderNodeGrouped(c, payload), .rhs = undefined, }, }); @@ -1508,7 +1713,7 @@ fn renderCall(c: *Context, lhs: NodeIndex, args: []const Node) !NodeIndex { .main_token = lparen, .data = .{ .lhs = lhs, - .rhs = try c.addExtra(std.zig.ast.Node.SubRange{ + .rhs = try c.addExtra(NodeSubRange{ .start = span.start, .end = span.end, }), @@ -1728,7 +1933,7 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex { .tag = .fn_proto_multi, .main_token = fn_token, .data = .{ - .lhs = try c.addExtra(std.zig.ast.Node.SubRange{ + .lhs = try c.addExtra(NodeSubRange{ .start = span.start, .end = span.end, }), From c0540967e974b59bf85f71be9f085b5852d20ef6 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Sun, 14 Feb 2021 18:06:15 +0200 Subject: [PATCH 087/173] translate-c: render array stuff --- src/Compilation.zig | 2 +- src/translate_c.zig | 64 ++++++------ src/translate_c/ast.zig | 220 +++++++++++++++++++++++++++++++++++----- 3 files changed, 227 insertions(+), 59 deletions(-) diff --git a/src/Compilation.zig b/src/Compilation.zig index 89c047065c..b3ee73f03f 100644 --- a/src/Compilation.zig +++ b/src/Compilation.zig @@ -1334,7 +1334,7 @@ pub fn update(self: *Compilation) !void { self.c_object_work_queue.writeItemAssumeCapacity(entry.key); } - const use_stage1 = build_options.is_stage1 and self.bin_file.options.use_llvm; + const use_stage1 = build_options.omit_stage2 or build_options.is_stage1 and self.bin_file.options.use_llvm; if (!use_stage1) { if (self.bin_file.options.module) |module| { module.compile_log_text.shrinkAndFree(module.gpa, 0); diff --git a/src/translate_c.zig b/src/translate_c.zig index 894502ecc5..ce8fea7a71 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -1689,7 +1689,10 @@ fn transStringLiteralAsArray( init_list[i] = try transCreateNodeNumber(c, 0, .int); } - return Tag.array_init.create(c.arena, init_list); + return Tag.array_init.create(c.arena, .{ + .cond = arr_type, + .cases = init_list, + }); } fn cIsEnum(qt: clang.QualType) bool { @@ -1880,6 +1883,7 @@ fn transInitListExprArray( ) TransError!Node { const arr_type = ty.getAsArrayTypeUnsafe(); const child_qt = arr_type.getElementType(); + const child_type = try transQualType(c, child_qt, loc); const init_count = expr.getNumInits(); assert(@ptrCast(*const clang.Type, arr_type).isConstantArrayType()); const const_arr_ty = @ptrCast(*const clang.ConstantArrayType, arr_type); @@ -1888,18 +1892,20 @@ fn transInitListExprArray( const leftover_count = all_count - init_count; if (all_count == 0) { - return Tag.empty_array.create(c.arena, try transQualType(c, child_qt, loc)); + return Tag.empty_array.create(c.arena, child_type); } - const ty_node = try transType(c, ty, loc); const init_node = if (init_count != 0) blk: { const init_list = try c.arena.alloc(Node, init_count); for (init_list) |*init, i| { const elem_expr = expr.getInit(@intCast(c_uint, i)); - init.* = try transExpr(c, scope, elem_expr, .used); + init.* = try transExprCoercing(c, scope, elem_expr, .used); } - const init_node = try Tag.array_init.create(c.arena, init_list); + const init_node = try Tag.array_init.create(c.arena, .{ + .cond = try Tag.array_type.create(c.arena, .{ .len = init_count, .elem_type = child_type }), + .cases = init_list, + }); if (leftover_count == 0) { return init_node; } @@ -1908,8 +1914,8 @@ fn transInitListExprArray( const filler_val_expr = expr.getArrayFiller(); const filler_node = try Tag.array_filler.create(c.arena, .{ - .type = ty_node, - .filler = try transExpr(c, scope, filler_val_expr, .used), + .type = child_type, + .filler = try transExprCoercing(c, scope, filler_val_expr, .used), .count = leftover_count, }); @@ -2422,9 +2428,7 @@ fn transMemberExpr(c: *Context, scope: *Scope, stmt: *const clang.MemberExpr, re const decl = @ptrCast(*const clang.NamedDecl, member_decl); break :blk try c.str(decl.getName_bytes_begin()); }; - const ident = try Tag.identifier.create(c.arena, name); - - const node = try Tag.field_access.create(c.arena, .{ .lhs = container_node, .rhs = ident }); + const node = try Tag.field_access.create(c.arena, .{ .lhs = container_node, .field_name = name }); return maybeSuppressResult(c, scope, result_used, node); } @@ -2698,14 +2702,14 @@ fn transCreatePreCrement( defer block_scope.deinit(); const ref = try block_scope.makeMangledName(c, "ref"); - const expr = try transExpr(c, scope, op_expr, .used); + const expr = try transExpr(c, &block_scope.base, op_expr, .used); const addr_of = try Tag.address_of.create(c.arena, expr); const ref_decl = try Tag.var_simple.create(c.arena, .{ .name = ref, .init = addr_of }); try block_scope.statements.append(ref_decl); const lhs_node = try Tag.identifier.create(c.arena, ref); const ref_node = try Tag.deref.create(c.arena, lhs_node); - const node = try transCreateNodeInfixOp(c, scope, op, ref_node, Tag.one_literal.init(), .used); + const node = try transCreateNodeInfixOp(c, &block_scope.base, op, ref_node, Tag.one_literal.init(), .used); try block_scope.statements.append(node); const break_node = try Tag.break_val.create(c.arena, .{ @@ -2745,7 +2749,7 @@ fn transCreatePostCrement( defer block_scope.deinit(); const ref = try block_scope.makeMangledName(c, "ref"); - const expr = try transExpr(c, scope, op_expr, .used); + const expr = try transExpr(c, &block_scope.base, op_expr, .used); const addr_of = try Tag.address_of.create(c.arena, expr); const ref_decl = try Tag.var_simple.create(c.arena, .{ .name = ref, .init = addr_of }); try block_scope.statements.append(ref_decl); @@ -2757,7 +2761,7 @@ fn transCreatePostCrement( const tmp_decl = try Tag.var_simple.create(c.arena, .{ .name = tmp, .init = ref_node }); try block_scope.statements.append(tmp_decl); - const node = try transCreateNodeInfixOp(c, scope, op, ref_node, Tag.one_literal.init(), .used); + const node = try transCreateNodeInfixOp(c, &block_scope.base, op, ref_node, Tag.one_literal.init(), .used); try block_scope.statements.append(node); const break_node = try Tag.break_val.create(c.arena, .{ @@ -2864,7 +2868,7 @@ fn transCreateCompoundAssign( defer block_scope.deinit(); const ref = try block_scope.makeMangledName(c, "ref"); - const expr = try transExpr(c, scope, lhs, .used); + const expr = try transExpr(c, &block_scope.base, lhs, .used); const addr_of = try Tag.address_of.create(c.arena, expr); const ref_decl = try Tag.var_simple.create(c.arena, .{ .name = ref, .init = addr_of }); try block_scope.statements.append(ref_decl); @@ -2873,16 +2877,16 @@ fn transCreateCompoundAssign( const ref_node = try Tag.deref.create(c.arena, lhs_node); if ((is_mod or is_div) and is_signed) { - const rhs_node = try transExpr(c, scope, rhs, .used); + const rhs_node = try transExpr(c, &block_scope.base, rhs, .used); const builtin = if (is_mod) try Tag.rem.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node }) else try Tag.div_trunc.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node }); - const assign = try transCreateNodeInfixOp(c, scope, .assign, lhs_node, builtin, .used); + const assign = try transCreateNodeInfixOp(c, &block_scope.base, .assign, lhs_node, builtin, .used); try block_scope.statements.append(assign); } else { - var rhs_node = try transExpr(c, scope, rhs, .used); + var rhs_node = try transExpr(c, &block_scope.base, rhs, .used); if (is_shift or requires_int_cast) { // @intCast(rhs) @@ -2894,7 +2898,7 @@ fn transCreateCompoundAssign( rhs_node = try Tag.int_cast.create(c.arena, .{ .lhs = cast_to_type, .rhs = rhs_node }); } - const assign = try transCreateNodeInfixOp(c, scope, op, ref_node, rhs_node, .used); + const assign = try transCreateNodeInfixOp(c, &block_scope.base, op, ref_node, rhs_node, .used); try block_scope.statements.append(assign); } @@ -3395,7 +3399,7 @@ fn transCreateNodeAssign( defer block_scope.deinit(); const tmp = try block_scope.makeMangledName(c, "tmp"); - const rhs_node = try transExpr(c, scope, rhs, .used); + const rhs_node = try transExpr(c, &block_scope.base, rhs, .used); const tmp_decl = try Tag.var_simple.create(c.arena, .{ .name = tmp, .init = rhs_node }); try block_scope.statements.append(tmp_decl); @@ -4756,8 +4760,7 @@ fn parseCPostfixExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { return error.ParseError; } - const ident = try Tag.identifier.create(c.arena, m.slice()); - node = try Tag.field_access.create(c.arena, .{ .lhs = node, .rhs = ident }); + node = try Tag.field_access.create(c.arena, .{ .lhs = node, .field_name = m.slice() }); }, .Arrow => { if (m.next().? != .Identifier) { @@ -4766,8 +4769,7 @@ fn parseCPostfixExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { } const deref = try Tag.deref.create(c.arena, node); - const ident = try Tag.identifier.create(c.arena, m.slice()); - node = try Tag.field_access.create(c.arena, .{ .lhs = deref, .rhs = ident }); + node = try Tag.field_access.create(c.arena, .{ .lhs = deref, .field_name = m.slice() }); }, .LBracket => { const index = try macroBoolToInt(c, try parseCExpr(c, m, scope)); @@ -4914,13 +4916,12 @@ fn getContainer(c: *Context, node: Node) ?Node { }, .field_access => { - const infix = node.castTag(.field_access).?; + const field_access = node.castTag(.field_access).?; - if (getContainerTypeOf(c, infix.data.lhs)) |ty_node| { + if (getContainerTypeOf(c, field_access.data.lhs)) |ty_node| { if (ty_node.castTag(.@"struct") orelse ty_node.castTag(.@"union")) |container| { for (container.data.fields) |field| { - const ident = infix.data.rhs.castTag(.identifier).?; - if (mem.eql(u8, field.name, ident.data)) { + if (mem.eql(u8, field.name, field_access.data.field_name)) { return getContainer(c, field.type); } } @@ -4940,12 +4941,11 @@ fn getContainerTypeOf(c: *Context, ref: Node) ?Node { return getContainer(c, var_decl.data.type); } } - } else if (ref.castTag(.field_access)) |infix| { - if (getContainerTypeOf(c, infix.data.lhs)) |ty_node| { + } else if (ref.castTag(.field_access)) |field_access| { + if (getContainerTypeOf(c, field_access.data.lhs)) |ty_node| { if (ty_node.castTag(.@"struct") orelse ty_node.castTag(.@"union")) |container| { for (container.data.fields) |field| { - const ident = infix.data.rhs.castTag(.identifier).?; - if (mem.eql(u8, field.name, ident.data)) { + if (mem.eql(u8, field.name, field_access.data.field_name)) { return getContainer(c, field.type); } } diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index d108a2852e..014236e002 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -291,7 +291,6 @@ pub const Node = extern union { .array_cat, .ellipsis3, .switch_prong, - .field_access, .assign, .align_cast, .array_access, @@ -309,7 +308,7 @@ pub const Node = extern union { => Payload.Value, .@"if" => Payload.If, .@"while" => Payload.While, - .@"switch" => Payload.Switch, + .@"switch", .array_init => Payload.Switch, .@"break" => Payload.Break, .break_val => Payload.BreakVal, .call => Payload.Call, @@ -317,7 +316,7 @@ pub const Node = extern union { .func => Payload.Func, .@"enum" => Payload.Enum, .@"struct", .@"union" => Payload.Record, - .array_init, .tuple => Payload.ArrayInit, + .tuple => Payload.TupleInit, .container_init => Payload.ContainerInit, .std_meta_cast => Payload.Infix, .block => Payload.Block, @@ -329,6 +328,7 @@ pub const Node = extern union { .enum_redecl => Payload.EnumRedecl, .array_filler => Payload.ArrayFiller, .pub_inline_fn => Payload.PubInlineFn, + .field_access => Payload.FieldAccess, }; } @@ -513,7 +513,7 @@ pub const Payload = struct { }; }; - pub const ArrayInit = struct { + pub const TupleInit = struct { base: Payload, data: []Node, }; @@ -601,6 +601,14 @@ pub const Payload = struct { body: Node, }, }; + + pub const FieldAccess = struct { + base: Payload, + data: struct { + lhs: Node, + field_name: []const u8, + }, + }; }; /// Converts the nodes into a Zig ast. @@ -995,6 +1003,32 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { }); }, .var_decl => return renderVar(c, node), + .arg_redecl, .alias => { + const payload = @fieldParentPtr(Payload.ArgRedecl, "base", node.ptr_otherwise).data; + if (node.tag() == .alias) _ = try c.addToken(.keyword_pub, "pub"); + const mut_tok = if (node.tag() == .alias) + try c.addToken(.keyword_const, "const") + else + try c.addToken(.keyword_var, "var"); + _ = try c.addIdentifier(payload.actual); + _ = try c.addToken(.equal, "="); + + const init = try c.addNode(.{ + .tag = .identifier, + .main_token = try c.addIdentifier(payload.mangled), + .data = undefined, + }); + _ = try c.addToken(.semicolon, ";"); + + return c.addNode(.{ + .tag = .simple_var_decl, + .main_token = mut_tok, + .data = .{ + .lhs = 0, + .rhs = init, + }, + }); + }, .int_cast => { const payload = node.castTag(.int_cast).?.data; return renderBuiltinCall(c, "@intCast", &.{ payload.lhs, payload.rhs }); @@ -1339,7 +1373,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { _ = try c.addToken(.l_paren, "("); const cond = try c.addNode(.{ .tag = .bool_not, - .main_token = try c.addToken(.bang, "!"), + .main_token = try c.addToken(.bang, "!"), .data = .{ .lhs = try renderNodeGrouped(c, payload), .rhs = undefined, @@ -1430,7 +1464,77 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { }, }); }, - else => return c.addNode(.{ + .array_access => { + const payload = node.castTag(.array_access).?.data; + const lhs = try renderNode(c, payload.lhs); + const l_bracket = try c.addToken(.l_bracket, "["); + const index_expr = try renderNode(c, payload.rhs); + _ = try c.addToken(.r_bracket, "]"); + return c.addNode(.{ + .tag = .array_access, + .main_token = l_bracket, + .data = .{ + .lhs = lhs, + .rhs = index_expr, + }, + }); + }, + .array_type => { + const payload = node.castTag(.array_type).?.data; + return renderArrayType(c, payload.len, payload.elem_type); + }, + .array_filler => { + const payload = node.castTag(.array_filler).?.data; + + const type_expr = try renderArrayType(c, 1, payload.type); + const l_brace = try c.addToken(.l_brace, "{"); + const val = try renderNode(c, payload.filler); + _ = try c.addToken(.r_brace, "}"); + + const init = try c.addNode(.{ + .tag = .array_init_one, + .main_token = l_brace, + .data = .{ + .lhs = type_expr, + .rhs = val, + }, + }); + return c.addNode(.{ + .tag = .array_cat, + .main_token = try c.addToken(.asterisk_asterisk, "**"), + .data = .{ + .lhs = init, + .rhs = try c.addNode(.{ + .tag = .integer_literal, + .main_token = try c.addTokenFmt(.integer_literal, "{d}", .{payload.count}), + .data = .{ .lhs = undefined, .rhs = undefined }, + }), + }, + }); + }, + .empty_array => { + const payload = node.castTag(.empty_array).?.data; + + const type_expr = try renderArrayType(c, 0, payload); + return renderArrayInit(c, 0, &.{}); + }, + .array_init => { + const payload = node.castTag(.array_init).?.data; + const type_expr = try renderNode(c, payload.cond); + return renderArrayInit(c, type_expr, payload.cases); + }, + .field_access => { + const payload = node.castTag(.field_access).?.data; + const lhs = try renderNode(c, payload.lhs); + return renderFieldAccess(c, lhs, payload.field_name); + }, + .tuple, + .@"enum", + .@"struct", + .@"union", + .container_init, + .enum_redecl, + => return c.addNode(.{ .tag = .identifier, .main_token = try c.addTokenFmt(.identifier, "@\"TODO {}\"", .{node.tag()}), .data = .{ @@ -1441,10 +1545,88 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { } } +fn renderFieldAccess(c: *Context, lhs: NodeIndex, field_name: []const u8) !NodeIndex { + return c.addNode(.{ + .tag = .field_access, + .main_token = try c.addToken(.period, "."), + .data = .{ + .lhs = lhs, + .rhs = try c.addIdentifier(field_name), + }, + }); +} + +fn renderArrayInit(c: *Context, lhs: NodeIndex, inits: []const Node) !NodeIndex { + const l_brace = try c.addToken(.l_brace, "{"); + const res = switch (inits.len) { + 0 => try c.addNode(.{ + .tag = .struct_init_one, + .main_token = l_brace, + .data = .{ + .lhs = lhs, + .rhs = 0, + }, + }), + 1 => blk: { + const init = try renderNode(c, inits[0]); + break :blk try c.addNode(.{ + .tag = .array_init_one, + .main_token = l_brace, + .data = .{ + .lhs = lhs, + .rhs = init, + }, + }); + }, + else => blk: { + var rendered = try c.gpa.alloc(NodeIndex, inits.len); + defer c.gpa.free(rendered); + + for (inits) |init, i| { + if (i != 0) _ = try c.addToken(.comma, ","); + rendered[i] = try renderNode(c, init); + } + const span = try c.listToSpan(rendered); + break :blk try c.addNode(.{ + .tag = .array_init, + .main_token = l_brace, + .data = .{ + .lhs = lhs, + .rhs = try c.addExtra(NodeSubRange{ + .start = span.start, + .end = span.end, + }), + }, + }); + }, + }; + _ = try c.addToken(.r_brace, "}"); + return res; +} + +fn renderArrayType(c: *Context, len: usize, elem_type: Node) !NodeIndex { + const l_bracket = try c.addToken(.l_bracket, "["); + const len_expr = try c.addNode(.{ + .tag = .integer_literal, + .main_token = try c.addTokenFmt(.integer_literal, "{d}", .{len}), + .data = .{ .lhs = undefined, .rhs = undefined }, + }); + _ = try c.addToken(.r_bracket, "]"); + const elem_type_expr = try renderNode(c, elem_type); + return c.addNode(.{ + .tag = .array_type, + .main_token = l_bracket, + .data = .{ + .lhs = len_expr, + .rhs = elem_type_expr, + }, + }); +} + fn addSemicolonIfNeeded(c: *Context, node: Node) !void { switch (node.tag()) { .warning => unreachable, - .var_decl, .var_simple, .block, .empty_block, .@"switch" => {}, + .var_decl, .var_simple, .arg_redecl, .alias, .enum_redecl, .block, .empty_block, .@"switch" => {}, .while_true => { const payload = node.castTag(.while_true).?.data; return addSemicolonIfNeeded(c, payload); @@ -1517,6 +1699,7 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex { .negate, .negate_wrap, .bit_not, + .func, => { // no grouping needed return renderNode(c, node); @@ -1572,7 +1755,6 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex { .switch_prong, .warning, .var_decl, - .func, .fail_decl, .arg_redecl, .alias, @@ -1658,22 +1840,8 @@ fn renderStdImport(c: *Context, first: []const u8, second: []const u8) !NodeInde }); var access_chain = import_node; - access_chain = try c.addNode(.{ - .tag = .field_access, - .main_token = try c.addToken(.period, "."), - .data = .{ - .lhs = access_chain, - .rhs = try c.addIdentifier(first), - }, - }); - access_chain = try c.addNode(.{ - .tag = .field_access, - .main_token = try c.addToken(.period, "."), - .data = .{ - .lhs = access_chain, - .rhs = try c.addIdentifier(second), - }, - }); + access_chain = try renderFieldAccess(c, access_chain, first); + access_chain = try renderFieldAccess(c, access_chain, second); return access_chain; } @@ -1974,10 +2142,10 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex { const body = if (payload.body) |some| try renderNode(c, some) - else blk: { + else if (payload.is_extern) blk: { _ = try c.addToken(.semicolon, ";"); break :blk 0; - }; + } else return fn_proto; return c.addNode(.{ .tag = .fn_decl, From 77a11e6873d0f30950d8a523c990d32b5a25a658 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Mon, 15 Feb 2021 13:27:14 +0200 Subject: [PATCH 088/173] translate-c: render records and enums --- src/translate_c.zig | 5 +- src/translate_c/ast.zig | 326 +++++++++++++++++++++++++++++++++------- 2 files changed, 277 insertions(+), 54 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index ce8fea7a71..995ff16b1c 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -1871,7 +1871,10 @@ fn transInitListExprRecord( }); } - return Tag.container_init.create(c.arena, try c.arena.dupe(ast.Payload.ContainerInit.Initializer, field_inits.items)); + return Tag.container_init.create(c.arena, .{ + .lhs = ty_node, + .inits = try c.arena.dupe(ast.Payload.ContainerInit.Initializer, field_inits.items), + }); } fn transInitListExprArray( diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 014236e002..b22e61c65e 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -520,7 +520,10 @@ pub const Payload = struct { pub const ContainerInit = struct { base: Payload, - data: []Initializer, + data: struct { + lhs: Node, + inits: []Initializer, + }, pub const Initializer = struct { name: []const u8, @@ -1528,20 +1531,250 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { const lhs = try renderNode(c, payload.lhs); return renderFieldAccess(c, lhs, payload.field_name); }, - .tuple, - .@"enum", - .@"struct", - .@"union", - .container_init, - .enum_redecl, - => return c.addNode(.{ - .tag = .identifier, - .main_token = try c.addTokenFmt(.identifier, "@\"TODO {}\"", .{node.tag()}), + .@"struct", .@"union" => return renderRecord(c, node), + .@"enum" => { + const payload = node.castTag(.@"enum").?.data; + const enum_tok = try c.addToken(.keyword_enum, "enum"); + + _ = try c.addToken(.l_brace, "{"); + const members = try c.gpa.alloc(NodeIndex, std.math.max(payload.len + 1, 1)); + defer c.gpa.free(members); + members[0] = 0; + members[1] = 0; + + for (payload) |field, i| { + const name_tok = try c.addIdentifier(field.name); + const value_expr = if (field.value) |some| blk: { + _ = try c.addToken(.equal, "="); + break :blk try renderNode(c, some); + } else 0; + + members[i] = try c.addNode(.{ + .tag = .container_field_init, + .main_token = name_tok, + .data = .{ + .lhs = 0, + .rhs = value_expr, + }, + }); + _ = try c.addToken(.comma, ","); + } + // make non-exhaustive + members[payload.len] = try c.addNode(.{ + .tag = .container_field_init, + .main_token = try c.addIdentifier("_"), + .data = .{ + .lhs = 0, + .rhs = 0, + }, + }); + _ = try c.addToken(.comma, ","); + _ = try c.addToken(.r_brace, "}"); + + if (members.len <= 2) { + return c.addNode(.{ + .tag = .container_decl_two_comma, + .main_token = enum_tok, + .data = .{ + .lhs = members[0], + .rhs = members[1], + }, + }); + } else { + const span = try c.listToSpan(members); + return c.addNode(.{ + .tag = .container_decl_comma, + .main_token = enum_tok, + .data = .{ + .lhs = span.start, + .rhs = span.end, + }, + }); + } + }, + .enum_redecl => { + const payload = node.castTag(.enum_redecl).?.data; + _ = try c.addToken(.keyword_pub, "pub"); + const const_tok = try c.addToken(.keyword_const, "const"); + _ = try c.addIdentifier(payload.enum_val_name); + _ = try c.addToken(.equal, "="); + + const enum_to_int_tok = try c.addToken(.builtin, "@enumToInt"); + _ = try c.addToken(.l_paren, "("); + const enum_name = try c.addNode(.{ + .tag = .identifier, + .main_token = try c.addIdentifier(payload.enum_name), + .data = undefined, + }); + const field_access = try renderFieldAccess(c, enum_name, payload.field_name); + const init_node = try c.addNode(.{ + .tag = .builtin_call_two, + .main_token = enum_to_int_tok, + .data = .{ + .lhs = field_access, + .rhs = 0, + }, + }); + _ = try c.addToken(.r_paren, ")"); + _ = try c.addToken(.semicolon, ";"); + + return c.addNode(.{ + .tag = .simple_var_decl, + .main_token = const_tok, + .data = .{ + .lhs = 0, + .rhs = init_node, + }, + }); + }, + .tuple => { + const payload = node.castTag(.tuple).?.data; + _ = try c.addToken(.period, "."); + const l_brace = try c.addToken(.l_brace, "{"); + var inits = try c.gpa.alloc(NodeIndex, std.math.max(payload.len, 1)); + defer c.gpa.free(inits); + inits[0] = 0; + for (payload) |init, i| { + if (i != 0) _ = try c.addToken(.comma, ","); + inits[i] = try renderNode(c, init); + } + _ = try c.addToken(.r_brace, "}"); + if (payload.len < 3) { + return c.addNode(.{ + .tag = .array_init_dot_two, + .main_token = l_brace, + .data = .{ + .lhs = inits[0], + .rhs = inits[1], + }, + }); + } else { + const span = try c.listToSpan(inits); + return c.addNode(.{ + .tag = .array_init_dot, + .main_token = l_brace, + .data = .{ + .lhs = span.start, + .rhs = span.end, + }, + }); + } + }, + .container_init => { + const payload = node.castTag(.container_init).?.data; + const lhs = try renderNode(c, payload.lhs); + + const l_brace = try c.addToken(.l_brace, "{"); + var inits = try c.gpa.alloc(NodeIndex, std.math.max(payload.inits.len, 1)); + defer c.gpa.free(inits); + inits[0] = 0; + for (payload.inits) |init, i| { + if (i != 0) _ = try c.addToken(.comma, ","); + _ = try c.addToken(.period, "."); + _ = try c.addIdentifier(init.name); + _ = try c.addToken(.equal, "="); + inits[i] = try renderNode(c, init.value); + } + _ = try c.addToken(.r_brace, "}"); + + if (payload.inits.len < 2) { + return c.addNode(.{ + .tag = .struct_init_one, + .main_token = l_brace, + .data = .{ + .lhs = lhs, + .rhs = inits[0], + }, + }); + } else { + const span = try c.listToSpan(inits); + return c.addNode(.{ + .tag = .struct_init, + .main_token = l_brace, + .data = .{ + .lhs = lhs, + .rhs = try c.addExtra(NodeSubRange{ + .start = span.start, + .end = span.end, + }), + }, + }); + } + }, + } +} + +fn renderRecord(c: *Context, node: Node) !NodeIndex { + const payload = @fieldParentPtr(Payload.Record, "base", node.ptr_otherwise).data; + if (payload.is_packed) _ = try c.addToken(.keyword_packed, "packed"); + const kind_tok = if (node.tag() == .@"struct") + try c.addToken(.keyword_struct, "struct") + else + try c.addToken(.keyword_union, "union"); + + _ = try c.addToken(.l_brace, "{"); + const members = try c.gpa.alloc(NodeIndex, std.math.max(payload.fields.len, 2)); + defer c.gpa.free(members); + members[0] = 0; + members[1] = 0; + + for (payload.fields) |field, i| { + const name_tok = try c.addIdentifier(field.name); + _ = try c.addToken(.colon, ":"); + const type_expr = try renderNode(c, field.type); + + const alignment = field.alignment orelse { + members[i] = try c.addNode(.{ + .tag = .container_field_init, + .main_token = name_tok, + .data = .{ + .lhs = type_expr, + .rhs = 0, + }, + }); + _ = try c.addToken(.comma, ","); + continue; + }; + _ = try c.addToken(.keyword_align, "align"); + _ = try c.addToken(.l_paren, "("); + const align_expr = try c.addNode(.{ + .tag = .integer_literal, + .main_token = try c.addTokenFmt(.integer_literal, "{d}", .{alignment}), + .data = undefined, + }); + _ = try c.addToken(.r_paren, ")"); + + members[i] = try c.addNode(.{ + .tag = .container_field_align, + .main_token = name_tok, .data = .{ - .lhs = undefined, - .rhs = undefined, + .lhs = type_expr, + .rhs = align_expr, }, - }), + }); + _ = try c.addToken(.comma, ","); + } + _ = try c.addToken(.r_brace, "}"); + + if (members.len <= 2) { + return c.addNode(.{ + .tag = .container_decl_two_comma, + .main_token = kind_tok, + .data = .{ + .lhs = members[0], + .rhs = members[1], + }, + }); + } else { + const span = try c.listToSpan(members); + return c.addNode(.{ + .tag = .container_decl_comma, + .main_token = kind_tok, + .data = .{ + .lhs = span.start, + .rhs = span.end, + }, + }); } } @@ -1558,50 +1791,37 @@ fn renderFieldAccess(c: *Context, lhs: NodeIndex, field_name: []const u8) !NodeI fn renderArrayInit(c: *Context, lhs: NodeIndex, inits: []const Node) !NodeIndex { const l_brace = try c.addToken(.l_brace, "{"); - const res = switch (inits.len) { - 0 => try c.addNode(.{ - .tag = .struct_init_one, + var rendered = try c.gpa.alloc(NodeIndex, std.math.max(inits.len, 1)); + defer c.gpa.free(rendered); + rendered[0] = 0; + for (inits) |init, i| { + if (i != 0) _ = try c.addToken(.comma, ","); + rendered[i] = try renderNode(c, init); + } + _ = try c.addToken(.r_brace, "}"); + if (inits.len < 2) { + return c.addNode(.{ + .tag = .array_init_one, .main_token = l_brace, .data = .{ .lhs = lhs, - .rhs = 0, + .rhs = rendered[0], }, - }), - 1 => blk: { - const init = try renderNode(c, inits[0]); - break :blk try c.addNode(.{ - .tag = .array_init_one, - .main_token = l_brace, - .data = .{ - .lhs = lhs, - .rhs = init, - }, - }); - }, - else => blk: { - var rendered = try c.gpa.alloc(NodeIndex, inits.len); - defer c.gpa.free(rendered); - - for (inits) |init, i| { - if (i != 0) _ = try c.addToken(.comma, ","); - rendered[i] = try renderNode(c, init); - } - const span = try c.listToSpan(rendered); - break :blk try c.addNode(.{ - .tag = .array_init, - .main_token = l_brace, - .data = .{ - .lhs = lhs, - .rhs = try c.addExtra(NodeSubRange{ - .start = span.start, - .end = span.end, - }), - }, - }); - }, - }; - _ = try c.addToken(.r_brace, "}"); - return res; + }); + } else { + const span = try c.listToSpan(rendered); + return c.addNode(.{ + .tag = .array_init, + .main_token = l_brace, + .data = .{ + .lhs = lhs, + .rhs = try c.addExtra(NodeSubRange{ + .start = span.start, + .end = span.end, + }), + }, + }); + } } fn renderArrayType(c: *Context, len: usize, elem_type: Node) !NodeIndex { From 78fba4e0213dc9bcc2274ef6c31023256a235301 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Mon, 15 Feb 2021 16:07:21 +0200 Subject: [PATCH 089/173] translate-c: get all run-translated-c tests passing --- lib/std/zig/parser_test.zig | 12 +-- lib/std/zig/render.zig | 2 +- src/translate_c.zig | 43 +++++--- src/translate_c/ast.zig | 195 +++++++++++++++--------------------- 4 files changed, 120 insertions(+), 132 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index e4778b35e1..623682056c 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -3669,12 +3669,12 @@ test "zig fmt: hexadeciaml float literals with underscore separators" { ); } -//test "zig fmt: C var args" { -// try testCanonical( -// \\pub extern "c" fn printf(format: [*:0]const u8, ...) c_int; -// \\ -// ); -//} +test "zig fmt: C var args" { + try testCanonical( + \\pub extern "c" fn printf(format: [*:0]const u8, ...) c_int; + \\ + ); +} //test "zig fmt: Only indent multiline string literals in function calls" { // try testCanonical( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 6fd091d32c..89a9f472ef 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -1308,7 +1308,7 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S .r_paren => break, .comma => { try renderToken(ais, tree, last_param_token, .space); // , - last_param_token += 1; + continue; }, else => {}, // Parameter type without a name. } diff --git a/src/translate_c.zig b/src/translate_c.zig index 995ff16b1c..8b7aedcee1 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -990,7 +990,10 @@ fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?Node { })); } - break :blk try Tag.@"enum".create(c.arena, try c.arena.dupe(ast.Payload.Enum.Field, fields.items)); + break :blk try Tag.@"enum".create(c.arena, .{ + .int_type = init_arg_expr, + .fields = try c.arena.dupe(ast.Payload.Enum.Field, fields.items), + }); } else blk: { _ = try c.opaque_demotes.put(c.gpa, @ptrToInt(enum_decl.getCanonicalDecl()), {}); break :blk Tag.opaque_literal.init(); @@ -1540,8 +1543,8 @@ fn finishBoolExpr( } }, .Pointer => { - // node == null - return Tag.equal.create(c.arena, .{ .lhs = node, .rhs = Tag.null_literal.init() }); + // node != null + return Tag.not_equal.create(c.arena, .{ .lhs = node, .rhs = Tag.null_literal.init() }); }, .Typedef => { const typedef_ty = @ptrCast(*const clang.TypedefType, ty); @@ -1675,7 +1678,8 @@ fn transStringLiteralAsArray( const ty = expr_base.getType().getTypePtr(); const const_arr_ty = @ptrCast(*const clang.ConstantArrayType, ty); - const arr_type = try transQualType(c, const_arr_ty.getElementType(), expr_base.getBeginLoc()); + const elem_type = try transQualType(c, const_arr_ty.getElementType(), expr_base.getBeginLoc()); + const arr_type = try Tag.array_type.create(c.arena, .{ .len = array_size, .elem_type = elem_type }); const init_list = try c.arena.alloc(Node, array_size); var i: c_uint = 0; @@ -2668,7 +2672,7 @@ fn transUnaryOperator(c: *Context, scope: *Scope, stmt: *const clang.UnaryOperat return Tag.bit_not.create(c.arena, try transExpr(c, scope, op_expr, .used)); }, .LNot => { - return Tag.not.create(c.arena, try transExpr(c, scope, op_expr, .used)); + return Tag.not.create(c.arena, try transBoolExpr(c, scope, op_expr, .used)); }, .Extension => { return transExpr(c, scope, stmt.getSubExpr(), used); @@ -2969,8 +2973,15 @@ fn transBreak(c: *Context, scope: *Scope) TransError!Node { fn transFloatingLiteral(c: *Context, scope: *Scope, stmt: *const clang.FloatingLiteral, used: ResultUsed) TransError!Node { // TODO use something more accurate - const dbl = stmt.getValueAsApproximateDouble(); - const node = try transCreateNodeNumber(c, dbl, .float); + var dbl = stmt.getValueAsApproximateDouble(); + const is_negative = dbl < 0; + if (is_negative) dbl = -dbl; + const str = try std.fmt.allocPrint(c.arena, "{d}", .{dbl}); + var node = if (dbl == std.math.floor(dbl)) + try Tag.integer_literal.create(c.arena, str) + else + try Tag.float_literal.create(c.arena, str); + if (is_negative) node = try Tag.negate.create(c.arena, node); return maybeSuppressResult(c, scope, used, node); } @@ -3004,8 +3015,11 @@ fn transBinaryConditionalOperator(c: *Context, scope: *Scope, stmt: *const clang }, }; defer cond_scope.deinit(); - const cond_node = try transBoolExpr(c, &cond_scope.base, cond_expr, .used); - var then_body = try Tag.identifier.create(c.arena, mangled_name); + + const cond_ident = try Tag.identifier.create(c.arena, mangled_name); + const ty = getExprQualType(c, cond_expr).getTypePtr(); + const cond_node = try finishBoolExpr(c, &cond_scope.base, cond_expr.getBeginLoc(), ty, cond_ident, .used); + var then_body = cond_ident; if (!res_is_bool and isBoolRes(init_node)) { then_body = try Tag.bool_to_int.create(c.arena, then_body); } @@ -3489,11 +3503,13 @@ fn transCreateNodeAPInt(c: *Context, int: *const clang.APSInt) !Node { else => @compileError("unimplemented"), } - const big: math.big.int.Const = .{ .limbs = limbs, .positive = !is_negative }; + const big: math.big.int.Const = .{ .limbs = limbs, .positive = true }; const str = big.toStringAlloc(c.arena, 10, false) catch |err| switch (err) { error.OutOfMemory => return error.OutOfMemory, }; - return Tag.integer_literal.create(c.arena, str); + const res = try Tag.integer_literal.create(c.arena, str); + if (is_negative) return Tag.negate.create(c.arena, res); + return res; } fn transCreateNodeNumber(c: *Context, num: anytype, num_kind: enum { int, float }) !Node { @@ -3567,7 +3583,7 @@ fn transCreateNodeShiftOp( const rhs_type = try qualTypeToLog2IntRef(c, stmt.getType(), rhs_location); const rhs = try transExprCoercing(c, scope, rhs_expr, .used); - const rhs_casted = try Tag.int_cast.create(c.arena, .{ .lhs = rhs_type, .rhs = rhs_type }); + const rhs_casted = try Tag.int_cast.create(c.arena, .{ .lhs = rhs_type, .rhs = rhs }); return transCreateNodeInfixOp(c, scope, op, lhs, rhs_casted, used); } @@ -3622,7 +3638,8 @@ fn transType(c: *Context, ty: *const clang.Type, source_loc: clang.SourceLocatio const is_volatile = child_qt.isVolatileQualified(); const elem_type = try transQualType(c, child_qt, source_loc); if (typeIsOpaque(c, child_qt.getTypePtr(), source_loc) or qualTypeWasDemotedToOpaque(c, child_qt)) { - return Tag.single_pointer.create(c.arena, .{ .is_const = is_const, .is_volatile = is_volatile, .elem_type = elem_type }); + const ptr = try Tag.single_pointer.create(c.arena, .{ .is_const = is_const, .is_volatile = is_volatile, .elem_type = elem_type }); + return Tag.optional_type.create(c.arena, ptr); } return Tag.c_pointer.create(c.arena, .{ .is_const = is_const, .is_volatile = is_volatile, .elem_type = elem_type }); diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index b22e61c65e..3921acd546 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -491,7 +491,10 @@ pub const Payload = struct { pub const Enum = struct { base: Payload, - data: []Field, + data: struct { + int_type: Node, + fields: []Field, + }, pub const Field = struct { name: []const u8, @@ -825,11 +828,6 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { .main_token = try c.addToken(.identifier, "void"), .data = undefined, }), - .@"anytype" => return c.addNode(.{ - .tag = .@"anytype", - .main_token = try c.addToken(.keyword_anytype, "anytype"), - .data = undefined, - }), .noreturn_type => return c.addNode(.{ .tag = .identifier, .main_token = try c.addToken(.identifier, "noreturn"), @@ -946,7 +944,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { const payload = node.castTag(.char_literal).?.data; return c.addNode(.{ .tag = .identifier, - .main_token = try c.addToken(.string_literal, payload), + .main_token = try c.addToken(.char_literal, payload), .data = undefined, }); }, @@ -1268,7 +1266,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { const lhs = try c.addNode(.{ .tag = .identifier, .main_token = try c.addToken(.identifier, "_"), - .data = .{ .lhs = undefined, .rhs = undefined }, + .data = undefined, }); return c.addNode(.{ .tag = .assign, @@ -1510,7 +1508,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { .rhs = try c.addNode(.{ .tag = .integer_literal, .main_token = try c.addTokenFmt(.integer_literal, "{d}", .{payload.count}), - .data = .{ .lhs = undefined, .rhs = undefined }, + .data = undefined, }), }, }); @@ -1519,7 +1517,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { const payload = node.castTag(.empty_array).?.data; const type_expr = try renderArrayType(c, 0, payload); - return renderArrayInit(c, 0, &.{}); + return renderArrayInit(c, type_expr, &.{}); }, .array_init => { const payload = node.castTag(.array_init).?.data; @@ -1534,15 +1532,17 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { .@"struct", .@"union" => return renderRecord(c, node), .@"enum" => { const payload = node.castTag(.@"enum").?.data; + _ = try c.addToken(.keyword_extern, "extern"); const enum_tok = try c.addToken(.keyword_enum, "enum"); - + _ = try c.addToken(.l_paren, "("); + const arg_expr = try renderNode(c, payload.int_type); + _ = try c.addToken(.r_paren, ")"); _ = try c.addToken(.l_brace, "{"); - const members = try c.gpa.alloc(NodeIndex, std.math.max(payload.len + 1, 1)); + const members = try c.gpa.alloc(NodeIndex, std.math.max(payload.fields.len + 1, 1)); defer c.gpa.free(members); members[0] = 0; - members[1] = 0; - for (payload) |field, i| { + for (payload.fields) |field, i| { const name_tok = try c.addIdentifier(field.name); const value_expr = if (field.value) |some| blk: { _ = try c.addToken(.equal, "="); @@ -1560,7 +1560,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { _ = try c.addToken(.comma, ","); } // make non-exhaustive - members[payload.len] = try c.addNode(.{ + members[payload.fields.len] = try c.addNode(.{ .tag = .container_field_init, .main_token = try c.addIdentifier("_"), .data = .{ @@ -1571,26 +1571,18 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { _ = try c.addToken(.comma, ","); _ = try c.addToken(.r_brace, "}"); - if (members.len <= 2) { - return c.addNode(.{ - .tag = .container_decl_two_comma, - .main_token = enum_tok, - .data = .{ - .lhs = members[0], - .rhs = members[1], - }, - }); - } else { - const span = try c.listToSpan(members); - return c.addNode(.{ - .tag = .container_decl_comma, - .main_token = enum_tok, - .data = .{ - .lhs = span.start, - .rhs = span.end, - }, - }); - } + const span = try c.listToSpan(members); + return c.addNode(.{ + .tag = .container_decl_arg_comma, + .main_token = enum_tok, + .data = .{ + .lhs = arg_expr, + .rhs = try c.addExtra(NodeSubRange{ + .start = span.start, + .end = span.end, + }), + }, + }); }, .enum_redecl => { const payload = node.castTag(.enum_redecl).?.data; @@ -1701,12 +1693,16 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { }); } }, + .@"anytype" => unreachable, // Handled in renderParams } } fn renderRecord(c: *Context, node: Node) !NodeIndex { const payload = @fieldParentPtr(Payload.Record, "base", node.ptr_otherwise).data; - if (payload.is_packed) _ = try c.addToken(.keyword_packed, "packed"); + if (payload.is_packed) + _ = try c.addToken(.keyword_packed, "packed") + else + _ = try c.addToken(.keyword_extern, "extern"); const kind_tok = if (node.tag() == .@"struct") try c.addToken(.keyword_struct, "struct") else @@ -1829,7 +1825,7 @@ fn renderArrayType(c: *Context, len: usize, elem_type: Node) !NodeIndex { const len_expr = try c.addNode(.{ .tag = .integer_literal, .main_token = try c.addTokenFmt(.integer_literal, "{d}", .{len}), - .data = .{ .lhs = undefined, .rhs = undefined }, + .data = undefined, }); _ = try c.addToken(.r_bracket, "]"); const elem_type_expr = try renderNode(c, elem_type); @@ -1920,6 +1916,8 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex { .negate_wrap, .bit_not, .func, + .call, + .array_type, => { // no grouping needed return renderNode(c, node); @@ -1954,7 +1952,6 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex { .array_cat, .array_filler, .@"if", - .call, .@"enum", .@"struct", .@"union", @@ -1962,7 +1959,6 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex { .tuple, .container_init, .block, - .array_type, => return c.addNode(.{ .tag = .grouped_expression, .main_token = try c.addToken(.l_paren, "("), @@ -2161,7 +2157,7 @@ fn renderVar(c: *Context, node: Node) !NodeIndex { const res = try c.addNode(.{ .tag = .integer_literal, .main_token = try c.addTokenFmt(.integer_literal, "{d}", .{some}), - .data = .{ .lhs = undefined, .rhs = undefined }, + .data = undefined, }); _ = try c.addToken(.r_paren, ")"); break :blk res; @@ -2173,7 +2169,7 @@ fn renderVar(c: *Context, node: Node) !NodeIndex { const res = try c.addNode(.{ .tag = .string_literal, .main_token = try c.addTokenFmt(.string_literal, "\"{s}\"", .{std.zig.fmtEscapes(some)}), - .data = .{ .lhs = undefined, .rhs = undefined }, + .data = undefined, }); _ = try c.addToken(.r_paren, ")"); break :blk res; @@ -2232,39 +2228,10 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex { const fn_token = try c.addToken(.keyword_fn, "fn"); if (payload.name) |some| _ = try c.addIdentifier(some); - _ = try c.addToken(.l_paren, "("); - const first = if (payload.params.len != 0) blk: { - const param = payload.params[0]; - if (param.is_noalias) _ = try c.addToken(.keyword_noalias, "noalias"); - if (param.name) |some| { - _ = try c.addIdentifier(some); - _ = try c.addToken(.colon, ":"); - } - break :blk try renderNode(c, param.type); - } else 0; - + const params = try renderParams(c, payload.params, payload.is_var_args); + defer params.deinit(); var span: NodeSubRange = undefined; - if (payload.params.len > 1) { - var params = try c.gpa.alloc(NodeIndex, payload.params.len); - defer c.gpa.free(params); - - params[0] = first; - for (payload.params[1..]) |param, i| { - _ = try c.addToken(.comma, ","); - if (param.is_noalias) _ = try c.addToken(.keyword_noalias, "noalias"); - if (param.name) |some| { - _ = try c.addIdentifier(some); - _ = try c.addToken(.colon, ":"); - } - params[i + 1] = try renderNode(c, param.type); - } - span = try c.listToSpan(params); - } - if (payload.is_var_args) { - if (payload.params.len != 0) _ = try c.addToken(.comma, ","); - _ = try c.addToken(.ellipsis3, "..."); - } - _ = try c.addToken(.r_paren, ")"); + if (params.items.len > 1) span = try c.listToSpan(params.items); const align_expr = if (payload.alignment) |some| blk: { _ = try c.addToken(.keyword_align, "align"); @@ -2272,7 +2239,7 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex { const res = try c.addNode(.{ .tag = .integer_literal, .main_token = try c.addTokenFmt(.integer_literal, "{d}", .{some}), - .data = .{ .lhs = undefined, .rhs = undefined }, + .data = undefined, }); _ = try c.addToken(.r_paren, ")"); break :blk res; @@ -2284,7 +2251,7 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex { const res = try c.addNode(.{ .tag = .string_literal, .main_token = try c.addTokenFmt(.string_literal, "\"{s}\"", .{std.zig.fmtEscapes(some)}), - .data = .{ .lhs = undefined, .rhs = undefined }, + .data = undefined, }); _ = try c.addToken(.r_paren, ")"); break :blk res; @@ -2296,8 +2263,8 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex { _ = try c.addToken(.period, "."); const res = try c.addNode(.{ .tag = .enum_literal, - .main_token = try c.addTokenFmt(.identifier, "{}", .{some}), - .data = .{ .lhs = undefined, .rhs = undefined }, + .main_token = try c.addTokenFmt(.identifier, "{s}", .{@tagName(some)}), + .data = undefined, }); _ = try c.addToken(.r_paren, ")"); break :blk res; @@ -2307,12 +2274,12 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex { const fn_proto = try blk: { if (align_expr == 0 and section_expr == 0 and callconv_expr == 0) { - if (payload.params.len < 2) + if (params.items.len < 2) break :blk c.addNode(.{ .tag = .fn_proto_simple, .main_token = fn_token, .data = .{ - .lhs = first, + .lhs = params.items[0], .rhs = return_type_expr, }, }) @@ -2329,13 +2296,13 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex { }, }); } - if (payload.params.len < 2) + if (params.items.len < 2) break :blk c.addNode(.{ .tag = .fn_proto_one, .main_token = fn_token, .data = .{ .lhs = try c.addExtra(std.zig.ast.Node.FnProtoOne{ - .param = first, + .param = params.items[0], .align_expr = align_expr, .section_expr = section_expr, .callconv_expr = callconv_expr, @@ -2383,35 +2350,10 @@ fn renderMacroFunc(c: *Context, node: Node) !NodeIndex { const fn_token = try c.addToken(.keyword_fn, "fn"); _ = try c.addIdentifier(payload.name); - _ = try c.addToken(.l_paren, "("); - const first = if (payload.params.len != 0) blk: { - const param = payload.params[0]; - if (param.is_noalias) _ = try c.addToken(.keyword_noalias, "noalias"); - if (param.name) |some| { - _ = try c.addIdentifier(some); - _ = try c.addToken(.colon, ":"); - } - break :blk try renderNode(c, param.type); - } else 0; - + const params = try renderParams(c, payload.params, false); + defer params.deinit(); var span: NodeSubRange = undefined; - if (payload.params.len > 1) { - var params = try c.gpa.alloc(NodeIndex, payload.params.len); - defer c.gpa.free(params); - - params[0] = first; - for (payload.params[1..]) |param, i| { - _ = try c.addToken(.comma, ","); - if (param.is_noalias) _ = try c.addToken(.keyword_noalias, "noalias"); - if (param.name) |some| { - _ = try c.addIdentifier(some); - _ = try c.addToken(.colon, ":"); - } - params[i + 1] = try renderNode(c, param.type); - } - span = try c.listToSpan(params); - } - _ = try c.addToken(.r_paren, ")"); + if (params.items.len > 1) span = try c.listToSpan(params.items); const callconv_expr = blk: { _ = try c.addToken(.keyword_callconv, "callconv"); @@ -2420,7 +2362,7 @@ fn renderMacroFunc(c: *Context, node: Node) !NodeIndex { const res = try c.addNode(.{ .tag = .enum_literal, .main_token = try c.addToken(.identifier, "Inline"), - .data = .{ .lhs = undefined, .rhs = undefined }, + .data = undefined, }); _ = try c.addToken(.r_paren, ")"); break :blk res; @@ -2428,13 +2370,13 @@ fn renderMacroFunc(c: *Context, node: Node) !NodeIndex { const return_type_expr = try renderNode(c, payload.return_type); const fn_proto = try blk: { - if (payload.params.len < 2) + if (params.items.len < 2) break :blk c.addNode(.{ .tag = .fn_proto_one, .main_token = fn_token, .data = .{ .lhs = try c.addExtra(std.zig.ast.Node.FnProtoOne{ - .param = first, + .param = params.items[0], .align_expr = 0, .section_expr = 0, .callconv_expr = callconv_expr, @@ -2467,3 +2409,32 @@ fn renderMacroFunc(c: *Context, node: Node) !NodeIndex { }, }); } + +fn renderParams(c: *Context, params: []Payload.Param, is_var_args: bool) !std.ArrayList(NodeIndex) { + _ = try c.addToken(.l_paren, "("); + var rendered = std.ArrayList(NodeIndex).init(c.gpa); + errdefer rendered.deinit(); + try rendered.ensureCapacity(std.math.max(params.len, 1)); + + for (params) |param, i| { + if (i != 0) _ = try c.addToken(.comma, ","); + if (param.is_noalias) _ = try c.addToken(.keyword_noalias, "noalias"); + if (param.name) |some| { + _ = try c.addIdentifier(some); + _ = try c.addToken(.colon, ":"); + } + if (param.type.tag() == .@"anytype") { + _ = try c.addToken(.keyword_anytype, "anytype"); + continue; + } + rendered.appendAssumeCapacity(try renderNode(c, param.type)); + } + if (is_var_args) { + if (params.len != 0) _ = try c.addToken(.comma, ","); + _ = try c.addToken(.ellipsis3, "..."); + } + _ = try c.addToken(.r_paren, ")"); + + if (rendered.items.len == 0) rendered.appendAssumeCapacity(0); + return rendered; +} From 74e9d4ca820f9940b47ae658c41e7edb315681c8 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Tue, 16 Feb 2021 15:45:33 +0200 Subject: [PATCH 090/173] translate-c: get all translate-c tests passing --- src/translate_c.zig | 14 +- src/translate_c/ast.zig | 101 +++++---- test/translate_c.zig | 441 +++++++++++++++++++++------------------- 3 files changed, 302 insertions(+), 254 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index 8b7aedcee1..fbd0ec6245 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -622,7 +622,7 @@ fn visitVarDecl(c: *Context, var_decl: *const clang.VarDecl, mangled_name: ?[]co return; // Avoid processing this decl twice const is_pub = mangled_name == null; - const is_thread_local = var_decl.getTLSKind() != .None; + const is_threadlocal = var_decl.getTLSKind() != .None; const scope = &c.global_scope.base; // TODO https://github.com/ziglang/zig/issues/3756 @@ -706,6 +706,7 @@ fn visitVarDecl(c: *Context, var_decl: *const clang.VarDecl, mangled_name: ?[]co .is_const = is_const, .is_extern = is_extern, .is_export = is_export, + .is_threadlocal = is_threadlocal, .linksection_string = linksection_string, .alignment = alignment, .name = checked_name, @@ -1307,6 +1308,7 @@ fn transDeclStmtOne( .is_const = is_const, .is_extern = false, .is_export = false, + .is_threadlocal = false, .linksection_string = null, .alignment = null, .name = mangled_name, @@ -2886,11 +2888,11 @@ fn transCreateCompoundAssign( if ((is_mod or is_div) and is_signed) { const rhs_node = try transExpr(c, &block_scope.base, rhs, .used); const builtin = if (is_mod) - try Tag.rem.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node }) + try Tag.rem.create(c.arena, .{ .lhs = ref_node, .rhs = rhs_node }) else - try Tag.div_trunc.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node }); + try Tag.div_trunc.create(c.arena, .{ .lhs = ref_node, .rhs = rhs_node }); - const assign = try transCreateNodeInfixOp(c, &block_scope.base, .assign, lhs_node, builtin, .used); + const assign = try transCreateNodeInfixOp(c, &block_scope.base, .assign, ref_node, builtin, .used); try block_scope.statements.append(assign); } else { var rhs_node = try transExpr(c, &block_scope.base, rhs, .used); @@ -4794,6 +4796,10 @@ fn parseCPostfixExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { .LBracket => { const index = try macroBoolToInt(c, try parseCExpr(c, m, scope)); node = try Tag.array_access.create(c.arena, .{ .lhs = node, .rhs = index }); + if (m.next().? != .RBracket) { + try m.fail(c, "unable to translate C expr: expected ']'", .{}); + return error.ParseError; + } }, .LParen => { var args = std.ArrayList(Node).init(c.gpa); diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 3921acd546..8f3d605d8e 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -458,6 +458,7 @@ pub const Payload = struct { is_const: bool, is_extern: bool, is_export: bool, + is_threadlocal: bool, alignment: ?c_uint, linksection_string: ?[]const u8, name: []const u8, @@ -1164,42 +1165,42 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { }, }); }, - .add => return renderBinOp(c, node, .add, .plus, "+"), + .add => return renderBinOpGrouped(c, node, .add, .plus, "+"), .add_assign => return renderBinOp(c, node, .assign_add, .plus_equal, "+="), - .add_wrap => return renderBinOp(c, node, .add_wrap, .plus_percent, "+%"), + .add_wrap => return renderBinOpGrouped(c, node, .add_wrap, .plus_percent, "+%"), .add_wrap_assign => return renderBinOp(c, node, .assign_add_wrap, .plus_percent_equal, "+%="), - .sub => return renderBinOp(c, node, .sub, .minus, "-"), + .sub => return renderBinOpGrouped(c, node, .sub, .minus, "-"), .sub_assign => return renderBinOp(c, node, .assign_sub, .minus_equal, "-="), - .sub_wrap => return renderBinOp(c, node, .sub_wrap, .minus_percent, "-%"), + .sub_wrap => return renderBinOpGrouped(c, node, .sub_wrap, .minus_percent, "-%"), .sub_wrap_assign => return renderBinOp(c, node, .assign_sub_wrap, .minus_percent_equal, "-%="), - .mul => return renderBinOp(c, node, .mul, .asterisk, "*"), + .mul => return renderBinOpGrouped(c, node, .mul, .asterisk, "*"), .mul_assign => return renderBinOp(c, node, .assign_mul, .asterisk_equal, "*="), - .mul_wrap => return renderBinOp(c, node, .mul_wrap, .asterisk_percent, "*="), + .mul_wrap => return renderBinOpGrouped(c, node, .mul_wrap, .asterisk_percent, "*%"), .mul_wrap_assign => return renderBinOp(c, node, .assign_mul_wrap, .asterisk_percent_equal, "*%="), - .div => return renderBinOp(c, node, .div, .slash, "/"), + .div => return renderBinOpGrouped(c, node, .div, .slash, "/"), .div_assign => return renderBinOp(c, node, .assign_div, .slash_equal, "/="), - .shl => return renderBinOp(c, node, .bit_shift_left, .angle_bracket_angle_bracket_left, "<<"), + .shl => return renderBinOpGrouped(c, node, .bit_shift_left, .angle_bracket_angle_bracket_left, "<<"), .shl_assign => return renderBinOp(c, node, .assign_bit_shift_left, .angle_bracket_angle_bracket_left_equal, "<<="), - .shr => return renderBinOp(c, node, .bit_shift_right, .angle_bracket_angle_bracket_right, ">>"), + .shr => return renderBinOpGrouped(c, node, .bit_shift_right, .angle_bracket_angle_bracket_right, ">>"), .shr_assign => return renderBinOp(c, node, .assign_bit_shift_right, .angle_bracket_angle_bracket_right_equal, ">>="), - .mod => return renderBinOp(c, node, .mod, .percent, "%"), + .mod => return renderBinOpGrouped(c, node, .mod, .percent, "%"), .mod_assign => return renderBinOp(c, node, .assign_mod, .percent_equal, "%="), - .@"and" => return renderBinOp(c, node, .bool_and, .keyword_and, "and"), - .@"or" => return renderBinOp(c, node, .bool_or, .keyword_or, "or"), - .less_than => return renderBinOp(c, node, .less_than, .angle_bracket_left, "<"), - .less_than_equal => return renderBinOp(c, node, .less_or_equal, .angle_bracket_left_equal, "<="), - .greater_than => return renderBinOp(c, node, .greater_than, .angle_bracket_right, ">="), - .greater_than_equal => return renderBinOp(c, node, .greater_or_equal, .angle_bracket_right_equal, ">="), - .equal => return renderBinOp(c, node, .equal_equal, .equal_equal, "=="), - .not_equal => return renderBinOp(c, node, .bang_equal, .bang_equal, "!="), - .bit_and => return renderBinOp(c, node, .bit_and, .ampersand, "&"), + .@"and" => return renderBinOpGrouped(c, node, .bool_and, .keyword_and, "and"), + .@"or" => return renderBinOpGrouped(c, node, .bool_or, .keyword_or, "or"), + .less_than => return renderBinOpGrouped(c, node, .less_than, .angle_bracket_left, "<"), + .less_than_equal => return renderBinOpGrouped(c, node, .less_or_equal, .angle_bracket_left_equal, "<="), + .greater_than => return renderBinOpGrouped(c, node, .greater_than, .angle_bracket_right, ">="), + .greater_than_equal => return renderBinOpGrouped(c, node, .greater_or_equal, .angle_bracket_right_equal, ">="), + .equal => return renderBinOpGrouped(c, node, .equal_equal, .equal_equal, "=="), + .not_equal => return renderBinOpGrouped(c, node, .bang_equal, .bang_equal, "!="), + .bit_and => return renderBinOpGrouped(c, node, .bit_and, .ampersand, "&"), .bit_and_assign => return renderBinOp(c, node, .assign_bit_and, .ampersand_equal, "&="), - .bit_or => return renderBinOp(c, node, .bit_or, .pipe, "|"), + .bit_or => return renderBinOpGrouped(c, node, .bit_or, .pipe, "|"), .bit_or_assign => return renderBinOp(c, node, .assign_bit_or, .pipe_equal, "|="), - .bit_xor => return renderBinOp(c, node, .bit_xor, .caret, "^"), + .bit_xor => return renderBinOpGrouped(c, node, .bit_xor, .caret, "^"), .bit_xor_assign => return renderBinOp(c, node, .assign_bit_xor, .caret_equal, "^="), .array_cat => return renderBinOp(c, node, .array_cat, .plus_plus, "++"), - .ellipsis3 => return renderBinOp(c, node, .switch_range, .ellipsis3, "..."), + .ellipsis3 => return renderBinOpGrouped(c, node, .switch_range, .ellipsis3, "..."), .assign => return renderBinOp(c, node, .assign, .equal, "="), .empty_block => { const l_brace = try c.addToken(.l_brace, "{"); @@ -1222,7 +1223,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { _ = try c.addToken(.r_brace, "}"); return c.addNode(.{ - .tag = .block_two, + .tag = .block_two_semicolon, .main_token = l_brace, .data = .{ .lhs = stmt, @@ -1410,13 +1411,13 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { var cases = try c.gpa.alloc(NodeIndex, payload.cases.len); defer c.gpa.free(cases); for (payload.cases) |case, i| { - if (i != 0) _ = try c.addToken(.comma, ","); cases[i] = try renderNode(c, case); + _ = try c.addToken(.comma, ","); } const span = try c.listToSpan(cases); _ = try c.addToken(.r_brace, "}"); return c.addNode(.{ - .tag = .@"switch", + .tag = .switch_comma, .main_token = switch_tok, .data = .{ .lhs = cond, @@ -1623,9 +1624,10 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { const payload = node.castTag(.tuple).?.data; _ = try c.addToken(.period, "."); const l_brace = try c.addToken(.l_brace, "{"); - var inits = try c.gpa.alloc(NodeIndex, std.math.max(payload.len, 1)); + var inits = try c.gpa.alloc(NodeIndex, std.math.max(payload.len, 2)); defer c.gpa.free(inits); inits[0] = 0; + inits[1] = 0; for (payload) |init, i| { if (i != 0) _ = try c.addToken(.comma, ","); inits[i] = try renderNode(c, init); @@ -1661,17 +1663,17 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { defer c.gpa.free(inits); inits[0] = 0; for (payload.inits) |init, i| { - if (i != 0) _ = try c.addToken(.comma, ","); _ = try c.addToken(.period, "."); _ = try c.addIdentifier(init.name); _ = try c.addToken(.equal, "="); inits[i] = try renderNode(c, init.value); + _ = try c.addToken(.comma, ","); } _ = try c.addToken(.r_brace, "}"); if (payload.inits.len < 2) { return c.addNode(.{ - .tag = .struct_init_one, + .tag = .struct_init_one_comma, .main_token = l_brace, .data = .{ .lhs = lhs, @@ -1681,7 +1683,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { } else { const span = try c.listToSpan(inits); return c.addNode(.{ - .tag = .struct_init, + .tag = .struct_init_comma, .main_token = l_brace, .data = .{ .lhs = lhs, @@ -1791,13 +1793,13 @@ fn renderArrayInit(c: *Context, lhs: NodeIndex, inits: []const Node) !NodeIndex defer c.gpa.free(rendered); rendered[0] = 0; for (inits) |init, i| { - if (i != 0) _ = try c.addToken(.comma, ","); rendered[i] = try renderNode(c, init); + _ = try c.addToken(.comma, ","); } _ = try c.addToken(.r_brace, "}"); if (inits.len < 2) { return c.addNode(.{ - .tag = .array_init_one, + .tag = .array_init_one_comma, .main_token = l_brace, .data = .{ .lhs = lhs, @@ -1807,7 +1809,7 @@ fn renderArrayInit(c: *Context, lhs: NodeIndex, inits: []const Node) !NodeIndex } else { const span = try c.listToSpan(rendered); return c.addNode(.{ - .tag = .array_init, + .tag = .array_init_comma, .main_token = l_brace, .data = .{ .lhs = lhs, @@ -1842,25 +1844,32 @@ fn renderArrayType(c: *Context, len: usize, elem_type: Node) !NodeIndex { fn addSemicolonIfNeeded(c: *Context, node: Node) !void { switch (node.tag()) { .warning => unreachable, - .var_decl, .var_simple, .arg_redecl, .alias, .enum_redecl, .block, .empty_block, .@"switch" => {}, + .var_decl, .var_simple, .arg_redecl, .alias, .enum_redecl, .block, .empty_block, .block_single, .@"switch" => {}, .while_true => { const payload = node.castTag(.while_true).?.data; - return addSemicolonIfNeeded(c, payload); + return addSemicolonIfNotBlock(c, payload); }, .@"while" => { const payload = node.castTag(.@"while").?.data; - return addSemicolonIfNeeded(c, payload.body); + return addSemicolonIfNotBlock(c, payload.body); }, .@"if" => { const payload = node.castTag(.@"if").?.data; if (payload.@"else") |some| - return addSemicolonIfNeeded(c, some); - return addSemicolonIfNeeded(c, payload.then); + return addSemicolonIfNotBlock(c, some); + return addSemicolonIfNotBlock(c, payload.then); }, else => _ = try c.addToken(.semicolon, ";"), } } +fn addSemicolonIfNotBlock(c: *Context, node: Node) !void { + switch (node.tag()) { + .block, .empty_block, .block_single, => {}, + else => _ = try c.addToken(.semicolon, ";"), + } +} + fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex { switch (node.tag()) { .null_literal, @@ -1918,6 +1927,7 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex { .func, .call, .array_type, + .bool_to_int, => { // no grouping needed return renderNode(c, node); @@ -1926,7 +1936,6 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex { .opaque_literal, .empty_array, .block_single, - .bool_to_int, .add, .add_wrap, .sub, @@ -2022,7 +2031,7 @@ fn renderPrefixOp(c: *Context, node: Node, tag: std.zig.ast.Node.Tag, tok_tag: T }); } -fn renderBinOp(c: *Context, node: Node, tag: std.zig.ast.Node.Tag, tok_tag: TokenTag, bytes: []const u8) !NodeIndex { +fn renderBinOpGrouped(c: *Context, node: Node, tag: std.zig.ast.Node.Tag, tok_tag: TokenTag, bytes: []const u8) !NodeIndex { const payload = @fieldParentPtr(Payload.BinOp, "base", node.ptr_otherwise).data; const lhs = try renderNodeGrouped(c, payload.lhs); return c.addNode(.{ @@ -2035,6 +2044,19 @@ fn renderBinOp(c: *Context, node: Node, tag: std.zig.ast.Node.Tag, tok_tag: Toke }); } +fn renderBinOp(c: *Context, node: Node, tag: std.zig.ast.Node.Tag, tok_tag: TokenTag, bytes: []const u8) !NodeIndex { + const payload = @fieldParentPtr(Payload.BinOp, "base", node.ptr_otherwise).data; + const lhs = try renderNode(c, payload.lhs); + return c.addNode(.{ + .tag = tag, + .main_token = try c.addToken(tok_tag, bytes), + .data = .{ + .lhs = lhs, + .rhs = try renderNode(c, payload.rhs), + }, + }); +} + fn renderStdImport(c: *Context, first: []const u8, second: []const u8) !NodeIndex { const import_tok = try c.addToken(.builtin, "@import"); _ = try c.addToken(.l_paren, "("); @@ -2143,6 +2165,7 @@ fn renderVar(c: *Context, node: Node) !NodeIndex { if (payload.is_pub) _ = try c.addToken(.keyword_pub, "pub"); if (payload.is_extern) _ = try c.addToken(.keyword_extern, "extern"); if (payload.is_export) _ = try c.addToken(.keyword_export, "export"); + if (payload.is_threadlocal) _ = try c.addToken(.keyword_threadlocal, "threadlocal"); const mut_tok = if (payload.is_const) try c.addToken(.keyword_const, "const") else diff --git a/test/translate_c.zig b/test/translate_c.zig index 95969a2f72..6cbdbe931c 100644 --- a/test/translate_c.zig +++ b/test/translate_c.zig @@ -3,6 +3,14 @@ const std = @import("std"); const CrossTarget = std.zig.CrossTarget; pub fn addCases(cases: *tests.TranslateCContext) void { + cases.add("use cast param as macro fn return type", + \\#define MEM_PHYSICAL_TO_K0(x) (void*)((u32)(x) + SYS_BASE_CACHED) + , &[_][]const u8{ + \\pub fn MEM_PHYSICAL_TO_K0(x: anytype) callconv(.Inline) ?*c_void { + \\ return @import("std").meta.cast(?*c_void, @import("std").meta.cast(u32, x) + SYS_BASE_CACHED); + \\} + }); + cases.add("variadic function demoted to prototype", \\int foo(int bar, ...) { \\ return 1; @@ -21,11 +29,8 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ Foo *bar; \\} Bar; , &[_][]const u8{ - \\const struct_unnamed_1 = // - , - \\warning: unsupported type: 'Atomic' - \\ opaque {}; // - , + \\source.h:1:9: warning: struct demoted to opaque type - unable to translate type of field foo + \\const struct_unnamed_1 = opaque {}; \\pub const Foo = struct_unnamed_1; \\const struct_unnamed_2 = extern struct { \\ bar: ?*Foo, @@ -43,8 +48,8 @@ pub fn addCases(cases: *tests.TranslateCContext) void { , \\pub const VALUE = ((((1 + (2 * 3)) + (4 * 5)) + 6) << 7) | @boolToInt(8 == 9); , - \\pub fn _AL_READ3BYTES(p: anytype) callconv(.Inline) @TypeOf(((@import("std").meta.cast([*c]u8, p)).* | (((@import("std").meta.cast([*c]u8, p)) + 1).* << 8)) | (((@import("std").meta.cast([*c]u8, p)) + 2).* << 16)) { - \\ return ((@import("std").meta.cast([*c]u8, p)).* | (((@import("std").meta.cast([*c]u8, p)) + 1).* << 8)) | (((@import("std").meta.cast([*c]u8, p)) + 2).* << 16); + \\pub fn _AL_READ3BYTES(p: anytype) callconv(.Inline) @TypeOf((@import("std").meta.cast([*c]u8, p).* | ((@import("std").meta.cast([*c]u8, p) + 1).* << 8)) | ((@import("std").meta.cast([*c]u8, p) + 2).* << 16)) { + \\ return (@import("std").meta.cast([*c]u8, p).* | ((@import("std").meta.cast([*c]u8, p) + 1).* << 8)) | ((@import("std").meta.cast([*c]u8, p) + 2).* << 16); \\} }); @@ -107,7 +112,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ int i1; \\} boom_t; \\#define FOO ((boom_t){1}) - , &[_][]const u8{ // TODO properly translate this + , &[_][]const u8{ \\pub const struct_Color = extern struct { \\ r: u8, \\ g: u8, @@ -127,7 +132,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\}; \\pub const boom_t = struct_boom_t; , - \\pub const FOO = @import("std").mem.zeroInit(boom_t, .{ 1 }); + \\pub const FOO = @import("std").mem.zeroInit(boom_t, .{1}); }); cases.add("complex switch", @@ -142,14 +147,34 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ } \\} , &[_][]const u8{ // TODO properly translate this - \\pub const main = @compileError("unable to translate function"); + \\pub export fn main() c_int { + \\ var i: c_int = 2; + \\ @"switch": { + \\ case_1: { + \\ case: { + \\ switch (i) { + \\ @as(c_int, 0) => break :case, + \\ @as(c_int, 2) => break :case_1, + \\ else => break :@"switch", + \\ } + \\ } + \\ } + \\ { + \\ { + \\ i += @as(c_int, 2); + \\ } + \\ i += @as(c_int, 1); + \\ } + \\ } + \\ return 0; + \\} }); cases.add("correct semicolon after infixop", \\#define __ferror_unlocked_body(_fp) (((_fp)->_flags & _IO_ERR_SEEN) != 0) , &[_][]const u8{ - \\pub fn __ferror_unlocked_body(_fp: anytype) callconv(.Inline) @TypeOf(((_fp.*._flags) & _IO_ERR_SEEN) != 0) { - \\ return ((_fp.*._flags) & _IO_ERR_SEEN) != 0; + \\pub fn __ferror_unlocked_body(_fp: anytype) callconv(.Inline) @TypeOf((_fp.*._flags & _IO_ERR_SEEN) != 0) { + \\ return (_fp.*._flags & _IO_ERR_SEEN) != 0; \\} }); @@ -194,7 +219,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ while (false) while (false) {}; \\ while (true) while (false) {}; \\ while (true) while (true) { - \\ if (!false) break; + \\ break; \\ }; \\} }); @@ -245,15 +270,10 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ volatile _Atomic int abufused[12]; \\}; , &[_][]const u8{ - \\pub const struct_arcan_shmif_page = // - , - \\warning: unsupported type: 'Atomic' - \\ opaque {}; // - , - \\ warning: struct demoted to opaque type - unable to translate type of field abufused - , // TODO should be `addr: *struct_arcan_shmif_page` + \\source.h:4:8: warning: struct demoted to opaque type - unable to translate type of field abufused + \\pub const struct_arcan_shmif_page = opaque {}; \\pub const struct_arcan_shmif_cont = extern struct { - \\ addr: [*c]struct_arcan_shmif_page, + \\ addr: ?*struct_arcan_shmif_page, \\}; }); @@ -514,8 +534,8 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ var a: c_int = undefined; \\ _ = @as(c_int, 1); \\ _ = "hey"; - \\ _ = (@as(c_int, 1) + @as(c_int, 1)); - \\ _ = (@as(c_int, 1) - @as(c_int, 1)); + \\ _ = @as(c_int, 1) + @as(c_int, 1); + \\ _ = @as(c_int, 1) - @as(c_int, 1); \\ a = 1; \\} }); @@ -634,9 +654,9 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ var a: c_int = undefined; \\ var b: c_int = undefined; \\ var c: c_int = undefined; - \\ c = (a + b); - \\ c = (a - b); - \\ c = (a * b); + \\ c = a + b; + \\ c = a - b; + \\ c = a * b; \\ c = @divTrunc(a, b); \\ c = @rem(a, b); \\ return 0; @@ -645,11 +665,11 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ var a: c_uint = undefined; \\ var b: c_uint = undefined; \\ var c: c_uint = undefined; - \\ c = (a +% b); - \\ c = (a -% b); - \\ c = (a *% b); - \\ c = (a / b); - \\ c = (a % b); + \\ c = a +% b; + \\ c = a -% b; + \\ c = a *% b; + \\ c = a / b; + \\ c = a % b; \\ return 0; \\} }); @@ -1639,7 +1659,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { cases.add("macro pointer cast", \\#define NRF_GPIO ((NRF_GPIO_Type *) NRF_GPIO_BASE) , &[_][]const u8{ - \\pub const NRF_GPIO = (@import("std").meta.cast([*c]NRF_GPIO_Type, NRF_GPIO_BASE)); + \\pub const NRF_GPIO = @import("std").meta.cast([*c]NRF_GPIO_Type, NRF_GPIO_BASE); }); cases.add("basic macro function", @@ -1723,17 +1743,17 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\} , &[_][]const u8{ \\pub export fn foo() c_int { - \\ _ = (blk: { + \\ _ = blk: { \\ _ = @as(c_int, 2); \\ break :blk @as(c_int, 4); - \\ }); - \\ return (blk: { - \\ _ = (blk_1: { + \\ }; + \\ return blk: { + \\ _ = blk_1: { \\ _ = @as(c_int, 2); \\ break :blk_1 @as(c_int, 4); - \\ }); + \\ }; \\ break :blk @as(c_int, 6); - \\ }); + \\ }; \\} }); @@ -1780,20 +1800,16 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ while (true) { \\ var a_1: c_int = 4; \\ a_1 = 9; - \\ return (blk: { + \\ return blk: { \\ _ = @as(c_int, 6); \\ break :blk a_1; - \\ }); + \\ }; \\ } \\ while (true) { \\ var a_1: c_int = 2; \\ a_1 = 12; - \\ if (!true) break; - \\ } - \\ while (true) { - \\ a = 7; - \\ if (!true) break; \\ } + \\ while (true) a = 7; \\ return 0; \\} }); @@ -1813,13 +1829,13 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ var b: c_int = 4; \\ while ((i + @as(c_int, 2)) != 0) : (i = 2) { \\ var a: c_int = 2; - \\ _ = (blk: { - \\ _ = (blk_1: { + \\ _ = blk: { + \\ _ = blk_1: { \\ a = 6; \\ break :blk_1 @as(c_int, 5); - \\ }); + \\ }; \\ break :blk @as(c_int, 7); - \\ }); + \\ }; \\ } \\ } \\ var i: u8 = @bitCast(u8, @truncate(i8, @as(c_int, 2))); @@ -1854,7 +1870,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\} , &[_][]const u8{ \\pub export fn bar() c_int { - \\ if ((if (true) @as(c_int, 5) else (if (true) @as(c_int, 4) else @as(c_int, 6))) != 0) _ = @as(c_int, 2); + \\ if ((if (true) @as(c_int, 5) else if (true) @as(c_int, 4) else @as(c_int, 6)) != 0) _ = @as(c_int, 2); \\ return if (true) @as(c_int, 5) else if (true) @as(c_int, 4) else @as(c_int, 6); \\} }); @@ -1894,7 +1910,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ } \\ res = 2; \\ } - \\ res = (@as(c_int, 3) * i); + \\ res = @as(c_int, 3) * i; \\ break :@"switch"; \\ } \\ res = 5; @@ -2043,12 +2059,12 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\pub export fn foo() void { \\ var a: c_int = 2; \\ while (true) { - \\ a = (a - @as(c_int, 1)); + \\ a = a - @as(c_int, 1); \\ if (!(a != 0)) break; \\ } \\ var b: c_int = 2; \\ while (true) { - \\ b = (b - @as(c_int, 1)); + \\ b = b - @as(c_int, 1); \\ if (!(b != 0)) break; \\ } \\} @@ -2078,6 +2094,9 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ return ((((((((((e + f) + g) + h) + i) + j) + k) + l) + m) + o) + p); \\} , &[_][]const u8{ + \\pub const FooA = @enumToInt(enum_Foo.A); + \\pub const FooB = @enumToInt(enum_Foo.B); + \\pub const FooC = @enumToInt(enum_Foo.C); \\pub const enum_Foo = extern enum(c_int) { \\ A, \\ B, @@ -2090,19 +2109,19 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ var b = arg_b; \\ var c = arg_c; \\ var d: enum_Foo = @intToEnum(enum_Foo, FooA); - \\ var e: c_int = @boolToInt(((a != 0) and (b != 0))); - \\ var f: c_int = @boolToInt(((b != 0) and (c != null))); - \\ var g: c_int = @boolToInt(((a != 0) and (c != null))); - \\ var h: c_int = @boolToInt(((a != 0) or (b != 0))); - \\ var i: c_int = @boolToInt(((b != 0) or (c != null))); - \\ var j: c_int = @boolToInt(((a != 0) or (c != null))); - \\ var k: c_int = @boolToInt(((a != 0) or (@bitCast(c_int, @enumToInt(d)) != 0))); - \\ var l: c_int = @boolToInt(((@bitCast(c_int, @enumToInt(d)) != 0) and (b != 0))); - \\ var m: c_int = @boolToInt(((c != null) or (@bitCast(c_uint, @enumToInt(d)) != 0))); + \\ var e: c_int = @boolToInt((a != 0) and (b != 0)); + \\ var f: c_int = @boolToInt((b != 0) and (c != null)); + \\ var g: c_int = @boolToInt((a != 0) and (c != null)); + \\ var h: c_int = @boolToInt((a != 0) or (b != 0)); + \\ var i: c_int = @boolToInt((b != 0) or (c != null)); + \\ var j: c_int = @boolToInt((a != 0) or (c != null)); + \\ var k: c_int = @boolToInt((a != 0) or (@bitCast(c_int, @enumToInt(d)) != 0)); + \\ var l: c_int = @boolToInt((@bitCast(c_int, @enumToInt(d)) != 0) and (b != 0)); + \\ var m: c_int = @boolToInt((c != null) or (@bitCast(c_uint, @enumToInt(d)) != 0)); \\ var td: SomeTypedef = 44; - \\ var o: c_int = @boolToInt(((td != 0) or (b != 0))); - \\ var p: c_int = @boolToInt(((c != null) and (td != 0))); - \\ return ((((((((((e + f) + g) + h) + i) + j) + k) + l) + m) + o) + p); + \\ var o: c_int = @boolToInt((td != 0) or (b != 0)); + \\ var p: c_int = @boolToInt((c != null) and (td != 0)); + \\ return (((((((((e + f) + g) + h) + i) + j) + k) + l) + m) + o) + p; \\} , \\pub const Foo = enum_Foo; @@ -2143,7 +2162,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\pub export fn max(arg_a: c_int, arg_b: c_int) c_int { \\ var a = arg_a; \\ var b = arg_b; - \\ return ((a & b) ^ (a | b)); + \\ return (a & b) ^ (a | b); \\} }); @@ -2162,13 +2181,13 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\pub export fn test_comparisons(arg_a: c_int, arg_b: c_int) c_int { \\ var a = arg_a; \\ var b = arg_b; - \\ var c: c_int = @boolToInt((a < b)); - \\ var d: c_int = @boolToInt((a > b)); - \\ var e: c_int = @boolToInt((a <= b)); - \\ var f: c_int = @boolToInt((a >= b)); - \\ var g: c_int = @boolToInt((c < d)); - \\ var h: c_int = @boolToInt((e < f)); - \\ var i: c_int = @boolToInt((g < h)); + \\ var c: c_int = @boolToInt(a < b); + \\ var d: c_int = @boolToInt(a > b); + \\ var e: c_int = @boolToInt(a <= b); + \\ var f: c_int = @boolToInt(a >= b); + \\ var g: c_int = @boolToInt(c < d); + \\ var h: c_int = @boolToInt(e < f); + \\ var i: c_int = @boolToInt(g < h); \\ return i; \\} }); @@ -2215,11 +2234,11 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\} , &[_][]const u8{ \\pub export fn foo() c_int { - \\ return (blk: { + \\ return blk: { \\ var a: c_int = 1; \\ _ = a; \\ break :blk a; - \\ }); + \\ }; \\} }); @@ -2371,9 +2390,9 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ var a: c_int = 2; \\ } \\ if ((blk: { - \\ _ = @as(c_int, 2); - \\ break :blk @as(c_int, 5); - \\ }) != 0) { + \\ _ = @as(c_int, 2); + \\ break :blk @as(c_int, 5); + \\ }) != 0) { \\ var a: c_int = 2; \\ } \\} @@ -2484,10 +2503,10 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ var f: ?fn () callconv(.C) void = foo; \\ var b: ?fn () callconv(.C) c_int = baz; \\ f.?(); - \\ (f).?(); + \\ f.?(); \\ foo(); \\ _ = b.?(); - \\ _ = (b).?(); + \\ _ = b.?(); \\ _ = baz(); \\} }); @@ -2513,26 +2532,26 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ i -= 1; \\ u +%= 1; \\ u -%= 1; - \\ i = (blk: { + \\ i = blk: { \\ const ref = &i; \\ ref.* += 1; \\ break :blk ref.*; - \\ }); - \\ i = (blk: { + \\ }; + \\ i = blk: { \\ const ref = &i; \\ ref.* -= 1; \\ break :blk ref.*; - \\ }); - \\ u = (blk: { + \\ }; + \\ u = blk: { \\ const ref = &u; \\ ref.* +%= 1; \\ break :blk ref.*; - \\ }); - \\ u = (blk: { + \\ }; + \\ u = blk: { \\ const ref = &u; \\ ref.* -%= 1; \\ break :blk ref.*; - \\ }); + \\ }; \\} }); @@ -2596,66 +2615,66 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\pub export fn foo() void { \\ var a: c_int = 0; \\ var b: c_uint = @bitCast(c_uint, @as(c_int, 0)); - \\ a += (blk: { + \\ a += blk: { \\ const ref = &a; - \\ ref.* = ref.* + @as(c_int, 1); + \\ ref.* += @as(c_int, 1); + \\ break :blk ref.*; + \\ }; + \\ a -= blk: { + \\ const ref = &a; + \\ ref.* -= @as(c_int, 1); + \\ break :blk ref.*; + \\ }; + \\ a *= blk: { + \\ const ref = &a; + \\ ref.* *= @as(c_int, 1); + \\ break :blk ref.*; + \\ }; + \\ a &= blk: { + \\ const ref = &a; + \\ ref.* &= @as(c_int, 1); + \\ break :blk ref.*; + \\ }; + \\ a |= blk: { + \\ const ref = &a; + \\ ref.* |= @as(c_int, 1); + \\ break :blk ref.*; + \\ }; + \\ a ^= blk: { + \\ const ref = &a; + \\ ref.* ^= @as(c_int, 1); + \\ break :blk ref.*; + \\ }; + \\ a >>= @intCast(@import("std").math.Log2Int(c_int), blk: { + \\ const ref = &a; + \\ ref.* >>= @intCast(@import("std").math.Log2Int(c_int), @as(c_int, 1)); \\ break :blk ref.*; \\ }); - \\ a -= (blk: { + \\ a <<= @intCast(@import("std").math.Log2Int(c_int), blk: { \\ const ref = &a; - \\ ref.* = ref.* - @as(c_int, 1); + \\ ref.* <<= @intCast(@import("std").math.Log2Int(c_int), @as(c_int, 1)); \\ break :blk ref.*; \\ }); - \\ a *= (blk: { - \\ const ref = &a; - \\ ref.* = ref.* * @as(c_int, 1); - \\ break :blk ref.*; - \\ }); - \\ a &= (blk: { - \\ const ref = &a; - \\ ref.* = ref.* & @as(c_int, 1); - \\ break :blk ref.*; - \\ }); - \\ a |= (blk: { - \\ const ref = &a; - \\ ref.* = ref.* | @as(c_int, 1); - \\ break :blk ref.*; - \\ }); - \\ a ^= (blk: { - \\ const ref = &a; - \\ ref.* = ref.* ^ @as(c_int, 1); - \\ break :blk ref.*; - \\ }); - \\ a >>= @intCast(@import("std").math.Log2Int(c_int), (blk: { - \\ const ref = &a; - \\ ref.* = ref.* >> @intCast(@import("std").math.Log2Int(c_int), @as(c_int, 1)); - \\ break :blk ref.*; - \\ })); - \\ a <<= @intCast(@import("std").math.Log2Int(c_int), (blk: { - \\ const ref = &a; - \\ ref.* = ref.* << @intCast(@import("std").math.Log2Int(c_int), @as(c_int, 1)); - \\ break :blk ref.*; - \\ })); - \\ a = @divTrunc(a, (blk: { + \\ a = @divTrunc(a, blk: { \\ const ref = &a; \\ ref.* = @divTrunc(ref.*, @as(c_int, 1)); \\ break :blk ref.*; - \\ })); - \\ a = @rem(a, (blk: { + \\ }); + \\ a = @rem(a, blk: { \\ const ref = &a; \\ ref.* = @rem(ref.*, @as(c_int, 1)); \\ break :blk ref.*; - \\ })); - \\ b /= (blk: { - \\ const ref = &b; - \\ ref.* = ref.* / @bitCast(c_uint, @as(c_int, 1)); - \\ break :blk ref.*; \\ }); - \\ b %= (blk: { + \\ b /= blk: { \\ const ref = &b; - \\ ref.* = ref.* % @bitCast(c_uint, @as(c_int, 1)); + \\ ref.* /= @bitCast(c_uint, @as(c_int, 1)); \\ break :blk ref.*; - \\ }); + \\ }; + \\ b %= blk: { + \\ const ref = &b; + \\ ref.* %= @bitCast(c_uint, @as(c_int, 1)); + \\ break :blk ref.*; + \\ }; \\} }); @@ -2674,46 +2693,46 @@ pub fn addCases(cases: *tests.TranslateCContext) void { , &[_][]const u8{ \\pub export fn foo() void { \\ var a: c_uint = @bitCast(c_uint, @as(c_int, 0)); - \\ a +%= (blk: { + \\ a +%= blk: { \\ const ref = &a; - \\ ref.* = ref.* +% @bitCast(c_uint, @as(c_int, 1)); + \\ ref.* +%= @bitCast(c_uint, @as(c_int, 1)); + \\ break :blk ref.*; + \\ }; + \\ a -%= blk: { + \\ const ref = &a; + \\ ref.* -%= @bitCast(c_uint, @as(c_int, 1)); + \\ break :blk ref.*; + \\ }; + \\ a *%= blk: { + \\ const ref = &a; + \\ ref.* *%= @bitCast(c_uint, @as(c_int, 1)); + \\ break :blk ref.*; + \\ }; + \\ a &= blk: { + \\ const ref = &a; + \\ ref.* &= @bitCast(c_uint, @as(c_int, 1)); + \\ break :blk ref.*; + \\ }; + \\ a |= blk: { + \\ const ref = &a; + \\ ref.* |= @bitCast(c_uint, @as(c_int, 1)); + \\ break :blk ref.*; + \\ }; + \\ a ^= blk: { + \\ const ref = &a; + \\ ref.* ^= @bitCast(c_uint, @as(c_int, 1)); + \\ break :blk ref.*; + \\ }; + \\ a >>= @intCast(@import("std").math.Log2Int(c_uint), blk: { + \\ const ref = &a; + \\ ref.* >>= @intCast(@import("std").math.Log2Int(c_int), @as(c_int, 1)); \\ break :blk ref.*; \\ }); - \\ a -%= (blk: { + \\ a <<= @intCast(@import("std").math.Log2Int(c_uint), blk: { \\ const ref = &a; - \\ ref.* = ref.* -% @bitCast(c_uint, @as(c_int, 1)); + \\ ref.* <<= @intCast(@import("std").math.Log2Int(c_int), @as(c_int, 1)); \\ break :blk ref.*; \\ }); - \\ a *%= (blk: { - \\ const ref = &a; - \\ ref.* = ref.* *% @bitCast(c_uint, @as(c_int, 1)); - \\ break :blk ref.*; - \\ }); - \\ a &= (blk: { - \\ const ref = &a; - \\ ref.* = ref.* & @bitCast(c_uint, @as(c_int, 1)); - \\ break :blk ref.*; - \\ }); - \\ a |= (blk: { - \\ const ref = &a; - \\ ref.* = ref.* | @bitCast(c_uint, @as(c_int, 1)); - \\ break :blk ref.*; - \\ }); - \\ a ^= (blk: { - \\ const ref = &a; - \\ ref.* = ref.* ^ @bitCast(c_uint, @as(c_int, 1)); - \\ break :blk ref.*; - \\ }); - \\ a >>= @intCast(@import("std").math.Log2Int(c_uint), (blk: { - \\ const ref = &a; - \\ ref.* = ref.* >> @intCast(@import("std").math.Log2Int(c_int), @as(c_int, 1)); - \\ break :blk ref.*; - \\ })); - \\ a <<= @intCast(@import("std").math.Log2Int(c_uint), (blk: { - \\ const ref = &a; - \\ ref.* = ref.* << @intCast(@import("std").math.Log2Int(c_int), @as(c_int, 1)); - \\ break :blk ref.*; - \\ })); \\} }); @@ -2738,30 +2757,30 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ i -= 1; \\ u +%= 1; \\ u -%= 1; - \\ i = (blk: { + \\ i = blk: { \\ const ref = &i; \\ const tmp = ref.*; \\ ref.* += 1; \\ break :blk tmp; - \\ }); - \\ i = (blk: { + \\ }; + \\ i = blk: { \\ const ref = &i; \\ const tmp = ref.*; \\ ref.* -= 1; \\ break :blk tmp; - \\ }); - \\ u = (blk: { + \\ }; + \\ u = blk: { \\ const ref = &u; \\ const tmp = ref.*; \\ ref.* +%= 1; \\ break :blk tmp; - \\ }); - \\ u = (blk: { + \\ }; + \\ u = blk: { \\ const ref = &u; \\ const tmp = ref.*; \\ ref.* -%= 1; \\ break :blk tmp; - \\ }); + \\ }; \\} }); @@ -2872,13 +2891,13 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\#define BAR (void*) a \\#define BAZ (uint32_t)(2) , &[_][]const u8{ - \\pub fn FOO(bar: anytype) callconv(.Inline) @TypeOf(baz((@import("std").meta.cast(?*c_void, baz)))) { - \\ return baz((@import("std").meta.cast(?*c_void, baz))); + \\pub fn FOO(bar: anytype) callconv(.Inline) @TypeOf(baz(@import("std").meta.cast(?*c_void, baz))) { + \\ return baz(@import("std").meta.cast(?*c_void, baz)); \\} , - \\pub const BAR = (@import("std").meta.cast(?*c_void, a)); + \\pub const BAR = @import("std").meta.cast(?*c_void, a); , - \\pub const BAZ = (@import("std").meta.cast(u32, 2)); + \\pub const BAZ = @import("std").meta.cast(u32, 2); }); cases.add("macro with cast to unsigned short, long, and long long", @@ -2886,9 +2905,9 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\#define CURLAUTH_BASIC ((unsigned long) 1) \\#define CURLAUTH_BASIC_BUT_ULONGLONG ((unsigned long long) 1) , &[_][]const u8{ - \\pub const CURLAUTH_BASIC_BUT_USHORT = (@import("std").meta.cast(c_ushort, 1)); - \\pub const CURLAUTH_BASIC = (@import("std").meta.cast(c_ulong, 1)); - \\pub const CURLAUTH_BASIC_BUT_ULONGLONG = (@import("std").meta.cast(c_ulonglong, 1)); + \\pub const CURLAUTH_BASIC_BUT_USHORT = @import("std").meta.cast(c_ushort, 1); + \\pub const CURLAUTH_BASIC = @import("std").meta.cast(c_ulong, 1); + \\pub const CURLAUTH_BASIC_BUT_ULONGLONG = @import("std").meta.cast(c_ulonglong, 1); }); cases.add("macro conditional operator", @@ -2905,7 +2924,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { , &[_][]const u8{ \\pub fn foo() callconv(.C) void { \\ if (true) while (true) { - \\ if (!false) break; + \\ break; \\ }; \\} }); @@ -2923,27 +2942,27 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\} }); - // TODO: detect to use different block labels here - cases.add("nested assignment", - \\int foo(int *p, int x) { - \\ return *p++ = x; - \\} - , &[_][]const u8{ - \\pub export fn foo(arg_p: [*c]c_int, arg_x: c_int) c_int { - \\ var p = arg_p; - \\ var x = arg_x; - \\ return blk: { - \\ const tmp = x; - \\ (blk_1: { - \\ const ref = &p; - \\ const tmp_2 = ref.*; - \\ ref.* += 1; - \\ break :blk_1 tmp_2; - \\ }).?.* = tmp; - \\ break :blk tmp; - \\ }; - \\} - }); + // TODO fix zig fmt here + // cases.add("nested assignment", + // \\int foo(int *p, int x) { + // \\ return *p++ = x; + // \\} + // , &[_][]const u8{ + // \\pub export fn foo(arg_p: [*c]c_int, arg_x: c_int) c_int { + // \\ var p = arg_p; + // \\ var x = arg_x; + // \\ return blk: { + // \\ const tmp = x; + // \\ (blk_1: { + // \\ const ref = &p; + // \\ const tmp_2 = ref.*; + // \\ ref.* += 1; + // \\ break :blk_1 tmp_2; + // \\ }).?.* = tmp; + // \\ break :blk tmp; + // \\ }; + // \\} + // }); cases.add("widening and truncating integer casting to different signedness", \\unsigned long foo(void) { @@ -3033,10 +3052,10 @@ pub fn addCases(cases: *tests.TranslateCContext) void { , &[_][]const u8{ \\pub export fn foo(arg_x: bool) bool { \\ var x = arg_x; - \\ var a: bool = (@as(c_int, @boolToInt(x)) != @as(c_int, 1)); - \\ var b: bool = (@as(c_int, @boolToInt(a)) != @as(c_int, 0)); + \\ var a: bool = @as(c_int, @boolToInt(x)) != @as(c_int, 1); + \\ var b: bool = @as(c_int, @boolToInt(a)) != @as(c_int, 0); \\ var c: bool = @ptrToInt(foo) != 0; - \\ return foo((@as(c_int, @boolToInt(c)) != @as(c_int, @boolToInt(b)))); + \\ return foo(@as(c_int, @boolToInt(c)) != @as(c_int, @boolToInt(b))); \\} }); @@ -3106,8 +3125,8 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\#define DefaultScreen(dpy) (((_XPrivDisplay)(dpy))->default_screen) \\ , &[_][]const u8{ - \\pub fn DefaultScreen(dpy: anytype) callconv(.Inline) @TypeOf((@import("std").meta.cast(_XPrivDisplay, dpy)).*.default_screen) { - \\ return (@import("std").meta.cast(_XPrivDisplay, dpy)).*.default_screen; + \\pub fn DefaultScreen(dpy: anytype) callconv(.Inline) @TypeOf(@import("std").meta.cast(_XPrivDisplay, dpy).*.default_screen) { + \\ return @import("std").meta.cast(_XPrivDisplay, dpy).*.default_screen; \\} }); @@ -3115,9 +3134,9 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\#define NULL ((void*)0) \\#define FOO ((int)0x8000) , &[_][]const u8{ - \\pub const NULL = (@import("std").meta.cast(?*c_void, 0)); + \\pub const NULL = @import("std").meta.cast(?*c_void, 0); , - \\pub const FOO = (@import("std").meta.cast(c_int, 0x8000)); + \\pub const FOO = @import("std").meta.cast(c_int, 0x8000); }); if (std.Target.current.abi == .msvc) { From 9a826ccbe0b10046e5dd7c482e9e87912c0fa95c Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Tue, 16 Feb 2021 16:34:37 +0200 Subject: [PATCH 091/173] translate-c: elide some unecessary casts of literals --- src/clang.zig | 5 ++- src/translate_c.zig | 52 +++++++++++++++++++++--- src/zig_clang.cpp | 5 +++ src/zig_clang.h | 1 + test/translate_c.zig | 95 ++++++++++++++++++++++---------------------- 5 files changed, 104 insertions(+), 54 deletions(-) diff --git a/src/clang.zig b/src/clang.zig index 954cfee6b2..fbb955205b 100644 --- a/src/clang.zig +++ b/src/clang.zig @@ -127,6 +127,9 @@ pub const APSInt = opaque { pub const getNumWords = ZigClangAPSInt_getNumWords; extern fn ZigClangAPSInt_getNumWords(*const APSInt) c_uint; + + pub const lessThanEqual = ZigClangAPSInt_lessThanEqual; + extern fn ZigClangAPSInt_lessThanEqual(*const APSInt, rhs: u64) bool; }; pub const ASTContext = opaque { @@ -407,7 +410,7 @@ pub const Expr = opaque { pub const getBeginLoc = ZigClangExpr_getBeginLoc; extern fn ZigClangExpr_getBeginLoc(*const Expr) SourceLocation; - pub const EvaluateAsConstantExpr = ZigClangExpr_EvaluateAsConstantExpr; + pub const evaluateAsConstantExpr = ZigClangExpr_EvaluateAsConstantExpr; extern fn ZigClangExpr_EvaluateAsConstantExpr(*const Expr, *ExprEvalResult, Expr_ConstExprUsage, *const ASTContext) bool; }; diff --git a/src/translate_c.zig b/src/translate_c.zig index fbd0ec6245..1fcac233fc 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -1820,11 +1820,55 @@ fn transExprCoercing(c: *Context, scope: *Scope, expr: *const clang.Expr, used: return transExprCoercing(c, scope, un_expr.getSubExpr(), used); } }, + .ImplicitCastExprClass => { + const cast_expr = @ptrCast(*const clang.ImplicitCastExpr, expr); + const sub_expr = cast_expr.getSubExpr(); + switch (@ptrCast(*const clang.Stmt, sub_expr).getStmtClass()) { + .IntegerLiteralClass, .CharacterLiteralClass => switch (cast_expr.getCastKind()) { + .IntegralToFloating => return transExprCoercing(c, scope, sub_expr, used), + .IntegralCast => { + const dest_type = getExprQualType(c, expr); + if (literalFitsInType(c, sub_expr, dest_type)) + return transExprCoercing(c, scope, sub_expr, used); + }, + else => {}, + }, + else => {}, + } + }, else => {}, } return transExpr(c, scope, expr, .used); } +fn literalFitsInType(c: *Context, expr: *const clang.Expr, qt: clang.QualType) bool { + var width = qualTypeIntBitWidth(c, qt) catch 8; + if (width == 0) width = 8; // Byte is the smallest type. + const is_signed = cIsSignedInteger(qt); + const width_max_int= (@as(u64, 1) << math.lossyCast(u6, width - @boolToInt(is_signed))) - 1; + + switch (@ptrCast(*const clang.Stmt, expr).getStmtClass()) { + .CharacterLiteralClass => { + const char_lit = @ptrCast(*const clang.CharacterLiteral, expr); + const val = char_lit.getValue(); + // If the val is less than the max int then it fits. + return val <= width_max_int; + }, + .IntegerLiteralClass => { + const int_lit = @ptrCast(*const clang.IntegerLiteral, expr); + var eval_result: clang.ExprEvalResult = undefined; + if (!int_lit.EvaluateAsInt(&eval_result, c.clang_context)) { + return false; + } + + const int = eval_result.Val.getInt(); + return int.lessThanEqual(width_max_int); + }, + else => unreachable, + } + +} + fn transInitListExprRecord( c: *Context, scope: *Scope, @@ -2331,7 +2375,7 @@ fn transDefault( fn transConstantExpr(c: *Context, scope: *Scope, expr: *const clang.Expr, used: ResultUsed) TransError!Node { var result: clang.ExprEvalResult = undefined; - if (!expr.EvaluateAsConstantExpr(&result, .EvaluateForCodeGen, c.clang_context)) + if (!expr.evaluateAsConstantExpr(&result, .EvaluateForCodeGen, c.clang_context)) return fail(c, error.UnsupportedTranslation, expr.getBeginLoc(), "invalid constant expression", .{}); switch (result.Val.getKind()) { @@ -3171,7 +3215,7 @@ fn qualTypeIsBoolean(qt: clang.QualType) bool { return qualTypeCanon(qt).isBooleanType(); } -fn qualTypeIntBitWidth(c: *Context, qt: clang.QualType, source_loc: clang.SourceLocation) !u32 { +fn qualTypeIntBitWidth(c: *Context, qt: clang.QualType) !u32 { const ty = qt.getTypePtr(); switch (ty.getTypeClass()) { @@ -3211,12 +3255,10 @@ fn qualTypeIntBitWidth(c: *Context, qt: clang.QualType, source_loc: clang.Source }, else => return 0, } - - unreachable; } fn qualTypeToLog2IntRef(c: *Context, qt: clang.QualType, source_loc: clang.SourceLocation) !Node { - const int_bit_width = try qualTypeIntBitWidth(c, qt, source_loc); + const int_bit_width = try qualTypeIntBitWidth(c, qt); if (int_bit_width != 0) { // we can perform the log2 now. diff --git a/src/zig_clang.cpp b/src/zig_clang.cpp index 8dc6a0823b..d9e5e527ac 100644 --- a/src/zig_clang.cpp +++ b/src/zig_clang.cpp @@ -2244,6 +2244,11 @@ unsigned ZigClangAPSInt_getNumWords(const ZigClangAPSInt *self) { return casted->getNumWords(); } +bool ZigClangAPSInt_lessThanEqual(const ZigClangAPSInt *self, uint64_t rhs) { + auto casted = reinterpret_cast(self); + return casted->ule(rhs); +} + uint64_t ZigClangAPInt_getLimitedValue(const ZigClangAPInt *self, uint64_t limit) { auto casted = reinterpret_cast(self); return casted->getLimitedValue(limit); diff --git a/src/zig_clang.h b/src/zig_clang.h index 6fe1da0bc1..a697c58b4f 100644 --- a/src/zig_clang.h +++ b/src/zig_clang.h @@ -1097,6 +1097,7 @@ ZIG_EXTERN_C const struct ZigClangAPSInt *ZigClangAPSInt_negate(const struct Zig ZIG_EXTERN_C void ZigClangAPSInt_free(const struct ZigClangAPSInt *self); ZIG_EXTERN_C const uint64_t *ZigClangAPSInt_getRawData(const struct ZigClangAPSInt *self); ZIG_EXTERN_C unsigned ZigClangAPSInt_getNumWords(const struct ZigClangAPSInt *self); +ZIG_EXTERN_C bool ZigClangAPSInt_lessThanEqual(const struct ZigClangAPSInt *self, uint64_t rhs); ZIG_EXTERN_C uint64_t ZigClangAPInt_getLimitedValue(const struct ZigClangAPInt *self, uint64_t limit); diff --git a/test/translate_c.zig b/test/translate_c.zig index 6cbdbe931c..dda0e45144 100644 --- a/test/translate_c.zig +++ b/test/translate_c.zig @@ -313,22 +313,22 @@ pub fn addCases(cases: *tests.TranslateCContext) void { , &[_][]const u8{ \\pub const uuid_t = [16]u8; \\pub const UUID_NULL: uuid_t = [16]u8{ - \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))), - \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))), - \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))), - \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))), - \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))), - \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))), - \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))), - \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))), - \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))), - \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))), - \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))), - \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))), - \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))), - \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))), - \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))), - \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))), + \\ 0, + \\ 0, + \\ 0, + \\ 0, + \\ 0, + \\ 0, + \\ 0, + \\ 0, + \\ 0, + \\ 0, + \\ 0, + \\ 0, + \\ 0, + \\ 0, + \\ 0, + \\ 0, \\}; }); @@ -382,10 +382,10 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\}; \\pub export var ub: union_unnamed_1 = union_unnamed_1{ \\ .c = [4]u8{ - \\ @bitCast(u8, @truncate(i8, @as(c_int, 'a'))), - \\ @bitCast(u8, @truncate(i8, @as(c_int, 'b'))), - \\ @bitCast(u8, @truncate(i8, @as(c_int, 'b'))), - \\ @bitCast(u8, @truncate(i8, @as(c_int, 'a'))), + \\ 'a', + \\ 'b', + \\ 'b', + \\ 'a', \\ }, \\}; }); @@ -512,7 +512,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { , &[_][]const u8{ \\pub export fn foo() void { \\ var a: c_int = undefined; - \\ var b: u8 = @bitCast(u8, @truncate(i8, @as(c_int, 123))); + \\ var b: u8 = 123; \\ const c: c_int = undefined; \\ const d: c_uint = @bitCast(c_uint, @as(c_int, 440)); \\ var e: c_int = 10; @@ -1468,7 +1468,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { , &[_][]const u8{ \\pub fn foo() callconv(.C) void { \\ var arr: [10]u8 = [1]u8{ - \\ @bitCast(u8, @truncate(i8, @as(c_int, 1))), + \\ 1, \\ } ++ [1]u8{0} ** 9; \\ var arr1: [10][*c]u8 = [1][*c]u8{ \\ null, @@ -1721,13 +1721,13 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ unsigned d = 440; \\} , &[_][]const u8{ - \\pub var a: c_long = @bitCast(c_long, @as(c_long, @as(c_int, 2))); - \\pub var b: c_long = @bitCast(c_long, @as(c_long, @as(c_int, 2))); + \\pub var a: c_long = 2; + \\pub var b: c_long = 2; \\pub var c: c_int = 4; \\pub export fn foo(arg_c_1: u8) void { \\ var c_1 = arg_c_1; \\ var a_2: c_int = undefined; - \\ var b_3: u8 = @bitCast(u8, @truncate(i8, @as(c_int, 123))); + \\ var b_3: u8 = 123; \\ b_3 = @bitCast(u8, @truncate(i8, a_2)); \\ { \\ var d: c_int = 5; @@ -1838,7 +1838,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ }; \\ } \\ } - \\ var i: u8 = @bitCast(u8, @truncate(i8, @as(c_int, 2))); + \\ var i: u8 = 2; \\} }); @@ -1846,7 +1846,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\unsigned anyerror = 2; \\#define noreturn _Noreturn , &[_][]const u8{ - \\pub export var anyerror_1: c_uint = @bitCast(c_uint, @as(c_int, 2)); + \\pub export var anyerror_1: c_uint = 2; , \\pub const noreturn_2 = @compileError("unable to translate C expr: unexpected token .Keyword_noreturn"); }); @@ -1860,7 +1860,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\pub export var a: f32 = @floatCast(f32, 3.1415); \\pub export var b: f64 = 3.1415; \\pub export var c: c_int = @floatToInt(c_int, 3.1415); - \\pub export var d: f64 = @intToFloat(f64, @as(c_int, 3)); + \\pub export var d: f64 = 3; }); cases.add("conditional operator", @@ -2009,7 +2009,6 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\} }); - // TODO translate-c should in theory be able to figure out to drop all these casts cases.add("escape sequences", \\const char *escapes() { \\char a = '\'', @@ -2028,17 +2027,17 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ , &[_][]const u8{ \\pub export fn escapes() [*c]const u8 { - \\ var a: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\''))); - \\ var b: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\\'))); - \\ var c: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\x07'))); - \\ var d: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\x08'))); - \\ var e: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\x0c'))); - \\ var f: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\n'))); - \\ var g: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\r'))); - \\ var h: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\t'))); - \\ var i: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\x0b'))); - \\ var j: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\x00'))); - \\ var k: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\"'))); + \\ var a: u8 = '\''; + \\ var b: u8 = '\\'; + \\ var c: u8 = '\x07'; + \\ var d: u8 = '\x08'; + \\ var e: u8 = '\x0c'; + \\ var f: u8 = '\n'; + \\ var g: u8 = '\r'; + \\ var h: u8 = '\t'; + \\ var i: u8 = '\x0b'; + \\ var j: u8 = '\x00'; + \\ var k: u8 = '\"'; \\ return "\'\\\x07\x08\x0c\n\r\t\x0b\x00\""; \\} }); @@ -2308,8 +2307,8 @@ pub fn addCases(cases: *tests.TranslateCContext) void { , &[_][]const u8{ \\pub export fn foo() void { \\ var a: [10]c_longlong = undefined; - \\ var i: c_longlong = @bitCast(c_longlong, @as(c_longlong, @as(c_int, 0))); - \\ a[@intCast(usize, i)] = @bitCast(c_longlong, @as(c_longlong, @as(c_int, 0))); + \\ var i: c_longlong = 0; + \\ a[@intCast(usize, i)] = 0; \\} }); @@ -2321,8 +2320,8 @@ pub fn addCases(cases: *tests.TranslateCContext) void { , &[_][]const u8{ \\pub export fn foo() void { \\ var a: [10]c_uint = undefined; - \\ var i: c_uint = @bitCast(c_uint, @as(c_int, 0)); - \\ a[i] = @bitCast(c_uint, @as(c_int, 0)); + \\ var i: c_uint = 0; + \\ a[i] = 0; \\} }); @@ -2527,7 +2526,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { , &[_][]const u8{ \\pub export fn foo() void { \\ var i: c_int = 0; - \\ var u: c_uint = @bitCast(c_uint, @as(c_int, 0)); + \\ var u: c_uint = 0; \\ i += 1; \\ i -= 1; \\ u +%= 1; @@ -2614,7 +2613,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { , &[_][]const u8{ \\pub export fn foo() void { \\ var a: c_int = 0; - \\ var b: c_uint = @bitCast(c_uint, @as(c_int, 0)); + \\ var b: c_uint = 0; \\ a += blk: { \\ const ref = &a; \\ ref.* += @as(c_int, 1); @@ -2692,7 +2691,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\} , &[_][]const u8{ \\pub export fn foo() void { - \\ var a: c_uint = @bitCast(c_uint, @as(c_int, 0)); + \\ var a: c_uint = 0; \\ a +%= blk: { \\ const ref = &a; \\ ref.* +%= @bitCast(c_uint, @as(c_int, 1)); @@ -2752,7 +2751,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { , &[_][]const u8{ \\pub export fn foo() void { \\ var i: c_int = 0; - \\ var u: c_uint = @bitCast(c_uint, @as(c_int, 0)); + \\ var u: c_uint = 0; \\ i += 1; \\ i -= 1; \\ u +%= 1; From 070e548acf8b5cb22459b779ce771b42157f49f7 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Tue, 16 Feb 2021 22:06:35 +0100 Subject: [PATCH 092/173] std: remove io.AutoIndentingStream This type is not widely applicable enough to be a public part of the public interface of the std. The current implementation in only fully utilized by the zig fmt implementation, which could benefit by even tighter integration as will be demonstrated in the next commit. Therefore, move the current io.AutoIndentingStream to lib/std/zig/render.zig. The C backend of the self hosted compiler also use this type currently, but it does not require anywhere near its full complexity. Therefore, implement a greatly simplified version of this interface in src/codegen/c.zig. --- CMakeLists.txt | 1 - lib/std/io.zig | 3 - lib/std/io/auto_indenting_stream.zig | 154 --------------------------- lib/std/zig/render.zig | 142 +++++++++++++++++++++++- src/codegen/c.zig | 58 +++++++++- src/link/C.zig | 2 +- 6 files changed, 196 insertions(+), 164 deletions(-) delete mode 100644 lib/std/io/auto_indenting_stream.zig diff --git a/CMakeLists.txt b/CMakeLists.txt index a0c3ae84fa..c203a493c1 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -370,7 +370,6 @@ set(ZIG_STAGE2_SOURCES "${CMAKE_SOURCE_DIR}/lib/std/heap.zig" "${CMAKE_SOURCE_DIR}/lib/std/heap/arena_allocator.zig" "${CMAKE_SOURCE_DIR}/lib/std/io.zig" - "${CMAKE_SOURCE_DIR}/lib/std/io/auto_indenting_stream.zig" "${CMAKE_SOURCE_DIR}/lib/std/io/buffered_atomic_file.zig" "${CMAKE_SOURCE_DIR}/lib/std/io/buffered_writer.zig" "${CMAKE_SOURCE_DIR}/lib/std/io/change_detection_stream.zig" diff --git a/lib/std/io.zig b/lib/std/io.zig index 240faaa452..b529c57866 100644 --- a/lib/std/io.zig +++ b/lib/std/io.zig @@ -142,9 +142,6 @@ pub const bitReader = @import("io/bit_reader.zig").bitReader; pub const BitWriter = @import("io/bit_writer.zig").BitWriter; pub const bitWriter = @import("io/bit_writer.zig").bitWriter; -pub const AutoIndentingStream = @import("io/auto_indenting_stream.zig").AutoIndentingStream; -pub const autoIndentingStream = @import("io/auto_indenting_stream.zig").autoIndentingStream; - pub const ChangeDetectionStream = @import("io/change_detection_stream.zig").ChangeDetectionStream; pub const changeDetectionStream = @import("io/change_detection_stream.zig").changeDetectionStream; diff --git a/lib/std/io/auto_indenting_stream.zig b/lib/std/io/auto_indenting_stream.zig deleted file mode 100644 index 8f8b981b9b..0000000000 --- a/lib/std/io/auto_indenting_stream.zig +++ /dev/null @@ -1,154 +0,0 @@ -// SPDX-License-Identifier: MIT -// Copyright (c) 2015-2021 Zig Contributors -// This file is part of [zig](https://ziglang.org/), which is MIT licensed. -// The MIT license requires this copyright notice to be included in all copies -// and substantial portions of the software. - -const std = @import("../std.zig"); -const io = std.io; -const mem = std.mem; -const assert = std.debug.assert; - -/// Automatically inserts indentation of written data by keeping -/// track of the current indentation level -pub fn AutoIndentingStream(comptime UnderlyingWriter: type) type { - return struct { - const Self = @This(); - pub const Error = UnderlyingWriter.Error; - pub const Writer = io.Writer(*Self, Error, write); - - underlying_writer: UnderlyingWriter, - - indent_count: usize = 0, - indent_delta: usize, - current_line_empty: bool = true, - indent_one_shot_count: usize = 0, // automatically popped when applied - applied_indent: usize = 0, // the most recently applied indent - indent_next_line: usize = 0, // not used until the next line - - pub fn writer(self: *Self) Writer { - return .{ .context = self }; - } - - pub fn write(self: *Self, bytes: []const u8) Error!usize { - if (bytes.len == 0) - return @as(usize, 0); - - try self.applyIndent(); - return self.writeNoIndent(bytes); - } - - // Change the indent delta without changing the final indentation level - pub fn setIndentDelta(self: *Self, indent_delta: usize) void { - if (self.indent_delta == indent_delta) { - return; - } else if (self.indent_delta > indent_delta) { - assert(self.indent_delta % indent_delta == 0); - self.indent_count = self.indent_count * (self.indent_delta / indent_delta); - } else { - // assert that the current indentation (in spaces) in a multiple of the new delta - assert((self.indent_count * self.indent_delta) % indent_delta == 0); - self.indent_count = self.indent_count / (indent_delta / self.indent_delta); - } - self.indent_delta = indent_delta; - } - - fn writeNoIndent(self: *Self, bytes: []const u8) Error!usize { - if (bytes.len == 0) - return @as(usize, 0); - - try self.underlying_writer.writeAll(bytes); - if (bytes[bytes.len - 1] == '\n') - self.resetLine(); - return bytes.len; - } - - pub fn insertNewline(self: *Self) Error!void { - _ = try self.writeNoIndent("\n"); - } - - fn resetLine(self: *Self) void { - self.current_line_empty = true; - self.indent_next_line = 0; - } - - /// Insert a newline unless the current line is blank - pub fn maybeInsertNewline(self: *Self) Error!void { - if (!self.current_line_empty) - try self.insertNewline(); - } - - /// Push default indentation - pub fn pushIndent(self: *Self) void { - // Doesn't actually write any indentation. - // Just primes the stream to be able to write the correct indentation if it needs to. - self.indent_count += 1; - } - - /// Push an indent that is automatically popped after being applied - pub fn pushIndentOneShot(self: *Self) void { - self.indent_one_shot_count += 1; - self.pushIndent(); - } - - /// Turns all one-shot indents into regular indents - /// Returns number of indents that must now be manually popped - pub fn lockOneShotIndent(self: *Self) usize { - var locked_count = self.indent_one_shot_count; - self.indent_one_shot_count = 0; - return locked_count; - } - - /// Push an indent that should not take effect until the next line - pub fn pushIndentNextLine(self: *Self) void { - self.indent_next_line += 1; - self.pushIndent(); - } - - pub fn popIndent(self: *Self) void { - assert(self.indent_count != 0); - self.indent_count -= 1; - - if (self.indent_next_line > 0) - self.indent_next_line -= 1; - } - - /// Writes ' ' bytes if the current line is empty - fn applyIndent(self: *Self) Error!void { - const current_indent = self.currentIndent(); - if (self.current_line_empty and current_indent > 0) { - try self.underlying_writer.writeByteNTimes(' ', current_indent); - self.applied_indent = current_indent; - } - - self.indent_count -= self.indent_one_shot_count; - self.indent_one_shot_count = 0; - self.current_line_empty = false; - } - - /// Checks to see if the most recent indentation exceeds the currently pushed indents - pub fn isLineOverIndented(self: *Self) bool { - if (self.current_line_empty) return false; - return self.applied_indent > self.currentIndent(); - } - - fn currentIndent(self: *Self) usize { - var indent_current: usize = 0; - if (self.indent_count > 0) { - const indent_count = self.indent_count - self.indent_next_line; - indent_current = indent_count * self.indent_delta; - } - return indent_current; - } - }; -} - -pub fn autoIndentingStream( - indent_delta: usize, - underlying_writer: anytype, -) AutoIndentingStream(@TypeOf(underlying_writer)) { - return AutoIndentingStream(@TypeOf(underlying_writer)){ - .underlying_writer = underlying_writer, - .indent_delta = indent_delta, - }; -} diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 2d9c2ae9a9..e668c4d64d 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -15,12 +15,14 @@ const asm_indent_delta = 2; pub const Error = ast.Tree.RenderError; -const Writer = std.ArrayList(u8).Writer; -const Ais = std.io.AutoIndentingStream(Writer); +const Ais = AutoIndentingStream(std.ArrayList(u8).Writer); pub fn renderTree(buffer: *std.ArrayList(u8), tree: ast.Tree) Error!void { assert(tree.errors.len == 0); // Cannot render an invalid tree. - var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, buffer.writer()); + var auto_indenting_stream = Ais{ + .indent_delta = indent_delta, + .underlying_writer = buffer.writer(), + }; const ais = &auto_indenting_stream; // Render all the line comments at the beginning of the file. @@ -2132,3 +2134,137 @@ fn nodeCausesSliceOpSpace(tag: ast.Node.Tag) bool { else => false, }; } + +/// Automatically inserts indentation of written data by keeping +/// track of the current indentation level +fn AutoIndentingStream(comptime UnderlyingWriter: type) type { + return struct { + const Self = @This(); + pub const Error = UnderlyingWriter.Error; + pub const Writer = std.io.Writer(*Self, Error, write); + + underlying_writer: UnderlyingWriter, + + indent_count: usize = 0, + indent_delta: usize, + current_line_empty: bool = true, + indent_one_shot_count: usize = 0, // automatically popped when applied + applied_indent: usize = 0, // the most recently applied indent + indent_next_line: usize = 0, // not used until the next line + + pub fn writer(self: *Self) Writer { + return .{ .context = self }; + } + + pub fn write(self: *Self, bytes: []const u8) Error!usize { + if (bytes.len == 0) + return @as(usize, 0); + + try self.applyIndent(); + return self.writeNoIndent(bytes); + } + + // Change the indent delta without changing the final indentation level + pub fn setIndentDelta(self: *Self, new_indent_delta: usize) void { + if (self.indent_delta == new_indent_delta) { + return; + } else if (self.indent_delta > new_indent_delta) { + assert(self.indent_delta % new_indent_delta == 0); + self.indent_count = self.indent_count * (self.indent_delta / new_indent_delta); + } else { + // assert that the current indentation (in spaces) in a multiple of the new delta + assert((self.indent_count * self.indent_delta) % new_indent_delta == 0); + self.indent_count = self.indent_count / (new_indent_delta / self.indent_delta); + } + self.indent_delta = new_indent_delta; + } + + fn writeNoIndent(self: *Self, bytes: []const u8) Error!usize { + if (bytes.len == 0) + return @as(usize, 0); + + try self.underlying_writer.writeAll(bytes); + if (bytes[bytes.len - 1] == '\n') + self.resetLine(); + return bytes.len; + } + + pub fn insertNewline(self: *Self) Error!void { + _ = try self.writeNoIndent("\n"); + } + + fn resetLine(self: *Self) void { + self.current_line_empty = true; + self.indent_next_line = 0; + } + + /// Insert a newline unless the current line is blank + pub fn maybeInsertNewline(self: *Self) Error!void { + if (!self.current_line_empty) + try self.insertNewline(); + } + + /// Push default indentation + pub fn pushIndent(self: *Self) void { + // Doesn't actually write any indentation. + // Just primes the stream to be able to write the correct indentation if it needs to. + self.indent_count += 1; + } + + /// Push an indent that is automatically popped after being applied + pub fn pushIndentOneShot(self: *Self) void { + self.indent_one_shot_count += 1; + self.pushIndent(); + } + + /// Turns all one-shot indents into regular indents + /// Returns number of indents that must now be manually popped + pub fn lockOneShotIndent(self: *Self) usize { + var locked_count = self.indent_one_shot_count; + self.indent_one_shot_count = 0; + return locked_count; + } + + /// Push an indent that should not take effect until the next line + pub fn pushIndentNextLine(self: *Self) void { + self.indent_next_line += 1; + self.pushIndent(); + } + + pub fn popIndent(self: *Self) void { + assert(self.indent_count != 0); + self.indent_count -= 1; + + if (self.indent_next_line > 0) + self.indent_next_line -= 1; + } + + /// Writes ' ' bytes if the current line is empty + fn applyIndent(self: *Self) Error!void { + const current_indent = self.currentIndent(); + if (self.current_line_empty and current_indent > 0) { + try self.underlying_writer.writeByteNTimes(' ', current_indent); + self.applied_indent = current_indent; + } + + self.indent_count -= self.indent_one_shot_count; + self.indent_one_shot_count = 0; + self.current_line_empty = false; + } + + /// Checks to see if the most recent indentation exceeds the currently pushed indents + pub fn isLineOverIndented(self: *Self) bool { + if (self.current_line_empty) return false; + return self.applied_indent > self.currentIndent(); + } + + fn currentIndent(self: *Self) usize { + var indent_current: usize = 0; + if (self.indent_count > 0) { + const indent_count = self.indent_count - self.indent_next_line; + indent_current = indent_count * self.indent_delta; + } + return indent_current; + } + }; +} diff --git a/src/codegen/c.zig b/src/codegen/c.zig index cb3271a57f..d8c81ad0e4 100644 --- a/src/codegen/c.zig +++ b/src/codegen/c.zig @@ -1,4 +1,5 @@ const std = @import("std"); +const assert = std.debug.assert; const mem = std.mem; const log = std.log.scoped(.c); @@ -42,7 +43,7 @@ pub const Object = struct { next_arg_index: usize = 0, next_local_index: usize = 0, next_block_index: usize = 0, - indent_writer: std.io.AutoIndentingStream(std.ArrayList(u8).Writer), + indent_writer: IndentWriter(std.ArrayList(u8).Writer), fn resolveInst(o: *Object, inst: *Inst) !CValue { if (inst.value()) |_| { @@ -63,7 +64,7 @@ pub const Object = struct { return local_value; } - fn writer(o: *Object) std.io.AutoIndentingStream(std.ArrayList(u8).Writer).Writer { + fn writer(o: *Object) IndentWriter(std.ArrayList(u8).Writer).Writer { return o.indent_writer.writer(); } @@ -796,3 +797,56 @@ fn genAsm(o: *Object, as: *Inst.Assembly) !CValue { return o.dg.fail(o.dg.decl.src(), "TODO: C backend: inline asm expression result used", .{}); } + +fn IndentWriter(comptime UnderlyingWriter: type) type { + return struct { + const Self = @This(); + pub const Error = UnderlyingWriter.Error; + pub const Writer = std.io.Writer(*Self, Error, write); + + pub const indent_delta = 4; + + underlying_writer: UnderlyingWriter, + indent_count: usize = 0, + current_line_empty: bool = true, + + pub fn writer(self: *Self) Writer { + return .{ .context = self }; + } + + pub fn write(self: *Self, bytes: []const u8) Error!usize { + if (bytes.len == 0) return @as(usize, 0); + + const current_indent = self.indent_count * Self.indent_delta; + if (self.current_line_empty and current_indent > 0) { + try self.underlying_writer.writeByteNTimes(' ', current_indent); + } + self.current_line_empty = false; + + return self.writeNoIndent(bytes); + } + + pub fn insertNewline(self: *Self) Error!void { + _ = try self.writeNoIndent("\n"); + } + + pub fn pushIndent(self: *Self) void { + self.indent_count += 1; + } + + pub fn popIndent(self: *Self) void { + assert(self.indent_count != 0); + self.indent_count -= 1; + } + + fn writeNoIndent(self: *Self, bytes: []const u8) Error!usize { + if (bytes.len == 0) return @as(usize, 0); + + try self.underlying_writer.writeAll(bytes); + if (bytes[bytes.len - 1] == '\n') { + self.current_line_empty = true; + } + return bytes.len; + } + }; +} diff --git a/src/link/C.zig b/src/link/C.zig index 765249cd7d..8fb3637cbe 100644 --- a/src/link/C.zig +++ b/src/link/C.zig @@ -97,7 +97,7 @@ pub fn updateDecl(self: *C, module: *Module, decl: *Module.Decl) !void { .value_map = codegen.CValueMap.init(module.gpa), .indent_writer = undefined, // set later so we can get a pointer to object.code }; - object.indent_writer = std.io.autoIndentingStream(4, object.code.writer()); + object.indent_writer = .{ .underlying_writer = object.code.writer() }; defer object.value_map.deinit(); defer object.code.deinit(); defer object.dg.fwd_decl.deinit(); From 895fb2bd6da5e1b69668dd6572df9d559ebf2407 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Tue, 16 Feb 2021 20:57:18 +0100 Subject: [PATCH 093/173] zig fmt: implement 'zig fmt: (on|off)' directives With the new implementation, these now work anywhere in the source code as opposed to only at the top level. --- lib/std/zig/parser_test.zig | 292 ++++++++++++++++++++---------------- lib/std/zig/render.zig | 38 ++++- 2 files changed, 195 insertions(+), 135 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index e4778b35e1..f7143d0908 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -878,17 +878,17 @@ test "zig fmt: async function" { ); } -//test "zig fmt: whitespace fixes" { -// try testTransform("test \"\" {\r\n\tconst hi = x;\r\n}\n// zig fmt: off\ntest \"\"{\r\n\tconst a = b;}\r\n", -// \\test "" { -// \\ const hi = x; -// \\} -// \\// zig fmt: off -// \\test ""{ -// \\ const a = b;} -// \\ -// ); -//} +test "zig fmt: whitespace fixes" { + try testTransform("test \"\" {\r\n\tconst hi = x;\r\n}\n// zig fmt: off\ntest \"\"{\r\n\tconst a = b;}\r\n", + \\test "" { + \\ const hi = x; + \\} + \\// zig fmt: off + \\test ""{ + \\ const a = b;} + \\ + ); +} test "zig fmt: while else err prong with no block" { try testCanonical( @@ -1098,128 +1098,154 @@ test "zig fmt: aligned struct field" { ); } -//test "zig fmt: comment to disable/enable zig fmt first" { -// try testCanonical( -// \\// Test trailing comma syntax -// \\// zig fmt: off -// \\ -// \\const struct_trailing_comma = struct { x: i32, y: i32, }; -// ); -//} -// -//test "zig fmt: comment to disable/enable zig fmt" { -// try testTransform( -// \\const a = b; -// \\// zig fmt: off -// \\const c = d; -// \\// zig fmt: on -// \\const e = f; -// , -// \\const a = b; -// \\// zig fmt: off -// \\const c = d; -// \\// zig fmt: on -// \\const e = f; -// \\ -// ); -//} -// -//test "zig fmt: line comment following 'zig fmt: off'" { -// try testCanonical( -// \\// zig fmt: off -// \\// Test -// \\const e = f; -// ); -//} -// -//test "zig fmt: doc comment following 'zig fmt: off'" { -// try testCanonical( -// \\// zig fmt: off -// \\/// test -// \\const e = f; -// ); -//} -// -//test "zig fmt: line and doc comment following 'zig fmt: off'" { -// try testCanonical( -// \\// zig fmt: off -// \\// test 1 -// \\/// test 2 -// \\const e = f; -// ); -//} -// -//test "zig fmt: doc and line comment following 'zig fmt: off'" { -// try testCanonical( -// \\// zig fmt: off -// \\/// test 1 -// \\// test 2 -// \\const e = f; -// ); -//} -// -//test "zig fmt: alternating 'zig fmt: off' and 'zig fmt: on'" { -// try testCanonical( -// \\// zig fmt: off -// \\// zig fmt: on -// \\// zig fmt: off -// \\const e = f; -// \\// zig fmt: off -// \\// zig fmt: on -// \\// zig fmt: off -// \\const a = b; -// \\// zig fmt: on -// \\const c = d; -// \\// zig fmt: on -// \\ -// ); -//} -// -//test "zig fmt: line comment following 'zig fmt: on'" { -// try testCanonical( -// \\// zig fmt: off -// \\const e = f; -// \\// zig fmt: on -// \\// test -// \\const e = f; -// \\ -// ); -//} -// -//test "zig fmt: doc comment following 'zig fmt: on'" { -// try testCanonical( -// \\// zig fmt: off -// \\const e = f; -// \\// zig fmt: on -// \\/// test -// \\const e = f; -// \\ -// ); -//} -// -//test "zig fmt: line and doc comment following 'zig fmt: on'" { -// try testCanonical( -// \\// zig fmt: off -// \\const e = f; -// \\// zig fmt: on -// \\// test1 -// \\/// test2 -// \\const e = f; -// \\ -// ); -//} -// -//test "zig fmt: doc and line comment following 'zig fmt: on'" { -// try testCanonical( -// \\// zig fmt: off -// \\const e = f; -// \\// zig fmt: on -// \\/// test1 -// \\// test2 -// \\const e = f; -// \\ -// ); -//} +test "zig fmt: comment to disable/enable zig fmt first" { + try testCanonical( + \\// Test trailing comma syntax + \\// zig fmt: off + \\ + \\const struct_trailing_comma = struct { x: i32, y: i32, }; + ); +} + +test "zig fmt: comment to disable/enable zig fmt" { + try testTransform( + \\const a = b; + \\// zig fmt: off + \\const c = d; + \\// zig fmt: on + \\const e = f; + , + \\const a = b; + \\// zig fmt: off + \\const c = d; + \\// zig fmt: on + \\const e = f; + \\ + ); +} + +test "zig fmt: line comment following 'zig fmt: off'" { + try testCanonical( + \\// zig fmt: off + \\// Test + \\const e = f; + ); +} + +test "zig fmt: doc comment following 'zig fmt: off'" { + try testCanonical( + \\// zig fmt: off + \\/// test + \\const e = f; + ); +} + +test "zig fmt: line and doc comment following 'zig fmt: off'" { + try testCanonical( + \\// zig fmt: off + \\// test 1 + \\/// test 2 + \\const e = f; + ); +} + +test "zig fmt: doc and line comment following 'zig fmt: off'" { + try testCanonical( + \\// zig fmt: off + \\/// test 1 + \\// test 2 + \\const e = f; + ); +} + +test "zig fmt: alternating 'zig fmt: off' and 'zig fmt: on'" { + try testCanonical( + \\// zig fmt: off + \\// zig fmt: on + \\// zig fmt: off + \\const e = f; + \\// zig fmt: off + \\// zig fmt: on + \\// zig fmt: off + \\const a = b; + \\// zig fmt: on + \\const c = d; + \\// zig fmt: on + \\ + ); +} + +test "zig fmt: line comment following 'zig fmt: on'" { + try testCanonical( + \\// zig fmt: off + \\const e = f; + \\// zig fmt: on + \\// test + \\const e = f; + \\ + ); +} + +test "zig fmt: doc comment following 'zig fmt: on'" { + try testCanonical( + \\// zig fmt: off + \\const e = f; + \\// zig fmt: on + \\/// test + \\const e = f; + \\ + ); +} + +test "zig fmt: line and doc comment following 'zig fmt: on'" { + try testCanonical( + \\// zig fmt: off + \\const e = f; + \\// zig fmt: on + \\// test1 + \\/// test2 + \\const e = f; + \\ + ); +} + +test "zig fmt: doc and line comment following 'zig fmt: on'" { + try testCanonical( + \\// zig fmt: off + \\const e = f; + \\// zig fmt: on + \\/// test1 + \\// test2 + \\const e = f; + \\ + ); +} + +test "zig fmt: 'zig fmt: (off|on)' works in the middle of code" { + try testTransform( + \\test "" { + \\ const x = 42; + \\ + \\ if (foobar) |y| { + \\ // zig fmt: off + \\ }// zig fmt: on + \\ + \\ const z = 420; + \\} + \\ + , + \\test "" { + \\ const x = 42; + \\ + \\ if (foobar) |y| { + \\ // zig fmt: off + \\ }// zig fmt: on + \\ + \\ const z = 420; + \\} + \\ + ); +} test "zig fmt: pointer of unknown length" { try testCanonical( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index e668c4d64d..a520a2d18b 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -30,6 +30,10 @@ pub fn renderTree(buffer: *std.ArrayList(u8), tree: ast.Tree) Error!void { _ = try renderComments(ais, tree, 0, comment_end_loc); try renderMembers(ais, tree, tree.rootDecls()); + + if (ais.disabled_offset) |disabled_offset| { + try writeFixingWhitespace(ais.underlying_writer, tree.source[disabled_offset..]); + } } /// Render all members in the given slice, keeping empty lines where appropriate @@ -1971,6 +1975,7 @@ fn renderComments(ais: *Ais, tree: ast.Tree, start: usize, end: usize) Error!boo const comment_start = index + offset; const newline = comment_start + mem.indexOfScalar(u8, tree.source[comment_start..end], '\n').?; + const untrimmed_comment = tree.source[comment_start..newline]; const trimmed_comment = mem.trimRight(u8, untrimmed_comment, &std.ascii.spaces); @@ -1993,6 +1998,17 @@ fn renderComments(ais: *Ais, tree: ast.Tree, start: usize, end: usize) Error!boo try ais.writer().print("{s}\n", .{trimmed_comment}); index = newline + 1; + + if (ais.disabled_offset) |disabled_offset| { + if (mem.eql(u8, trimmed_comment, "// zig fmt: on")) { + // write the source for which formatting was disabled directly + // to the underlying writer, fixing up invaild whitespace + try writeFixingWhitespace(ais.underlying_writer, tree.source[disabled_offset..index]); + ais.disabled_offset = null; + } + } else if (mem.eql(u8, trimmed_comment, "// zig fmt: off")) { + ais.disabled_offset = index; + } } if (index != start and mem.containsAtLeast(u8, tree.source[index - 1 .. end], 2, "\n")) { @@ -2066,6 +2082,14 @@ fn tokenSliceForRender(tree: ast.Tree, token_index: ast.TokenIndex) []const u8 { return ret; } +fn writeFixingWhitespace(writer: std.ArrayList(u8).Writer, slice: []const u8) Error!void { + for (slice) |byte| switch (byte) { + '\t' => try writer.writeAll(" " ** 4), + '\r' => {}, + else => try writer.writeByte(byte), + }; +} + fn nodeIsBlock(tag: ast.Node.Tag) bool { return switch (tag) { .block, @@ -2145,6 +2169,14 @@ fn AutoIndentingStream(comptime UnderlyingWriter: type) type { underlying_writer: UnderlyingWriter, + /// Offset into the source at which formatting has been disabled with + /// a `zig fmt: off` comment. + /// + /// If non-null, the AutoIndentingStream will not write any bytes + /// to the underlying writer. It will however continue to track the + /// indentation level. + disabled_offset: ?usize = null, + indent_count: usize = 0, indent_delta: usize, current_line_empty: bool = true, @@ -2183,7 +2215,7 @@ fn AutoIndentingStream(comptime UnderlyingWriter: type) type { if (bytes.len == 0) return @as(usize, 0); - try self.underlying_writer.writeAll(bytes); + if (self.disabled_offset == null) try self.underlying_writer.writeAll(bytes); if (bytes[bytes.len - 1] == '\n') self.resetLine(); return bytes.len; @@ -2243,7 +2275,9 @@ fn AutoIndentingStream(comptime UnderlyingWriter: type) type { fn applyIndent(self: *Self) Error!void { const current_indent = self.currentIndent(); if (self.current_line_empty and current_indent > 0) { - try self.underlying_writer.writeByteNTimes(' ', current_indent); + if (self.disabled_offset == null) { + try self.underlying_writer.writeByteNTimes(' ', current_indent); + } self.applied_indent = current_indent; } From 4b226286e88d9b10f720c949d1241299af0d6c3f Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Wed, 17 Feb 2021 00:03:39 +0100 Subject: [PATCH 094/173] zig fmt: get rid of Space.no_comment Using this in its current state would be a bug as it could cause line comments to be deleted or a `// zig fmt: (on|off)` directive to be missed. Removing it doesn't currently cause any test failures, if a reason for its continued existence is discovered in the future another solution will have to be found. --- lib/std/zig/render.zig | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index a520a2d18b..3b5765507b 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -1924,8 +1924,6 @@ const Space = enum { /// Additionally consume the next token if it is a semicolon. /// In either case, a newline will be inserted afterwards. semicolon, - /// Skips writing the possible line comment after the token. - no_comment, }; fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Space) Error!void { @@ -1937,8 +1935,6 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp try ais.writer().writeAll(lexeme); - if (space == .no_comment) return; - const comment = try renderComments(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]); switch (space) { .none => {}, @@ -1962,8 +1958,6 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp } else if (!comment) { try ais.insertNewline(); }, - - .no_comment => unreachable, } } @@ -2064,12 +2058,7 @@ fn renderDocComments(ais: *Ais, tree: ast.Tree, end_token: ast.TokenIndex) Error try renderExtraNewlineToken(ais, tree, first_tok); while (token_tags[tok] == .doc_comment) : (tok += 1) { - if (first_tok < end_token) { - try renderToken(ais, tree, tok, .newline); - } else { - try renderToken(ais, tree, tok, .no_comment); - try ais.insertNewline(); - } + try renderToken(ais, tree, tok, .newline); } } From e2974759dd62e15f04e1aeb8babee65e6ffb3413 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Wed, 17 Feb 2021 14:11:49 +0200 Subject: [PATCH 095/173] translate-c: demote untranslatable declarations to externs --- src/translate_c.zig | 38 +++++++++++++++++++++++++++----------- src/translate_c/ast.zig | 6 ++++++ test/translate_c.zig | 4 ++-- 3 files changed, 35 insertions(+), 13 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index 1fcac233fc..0ab72d7734 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -508,7 +508,7 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void { decl_ctx.has_body = false; decl_ctx.storage_class = .Extern; decl_ctx.is_export = false; - try warn(c, &c.global_scope.base, fn_decl_loc, "TODO unable to translate variadic function, demoted to declaration", .{}); + try warn(c, &c.global_scope.base, fn_decl_loc, "TODO unable to translate variadic function, demoted to extern", .{}); } break :blk transFnProto(c, fn_decl, fn_proto_type, fn_decl_loc, decl_ctx, true) catch |err| switch (err) { error.UnsupportedType => { @@ -543,8 +543,12 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void { var param_id: c_uint = 0; for (proto_node.data.params) |*param, i| { - const param_name = param.name orelse - return failDecl(c, fn_decl_loc, fn_name, "function {s} parameter has no name", .{fn_name}); + const param_name = param.name orelse { + proto_node.data.is_extern = true; + proto_node.data.is_export = false; + try warn(c, &c.global_scope.base, fn_decl_loc, "function {s} parameter has no name, demoted to extern", .{fn_name}); + return addTopLevelDecl(c, fn_name, Node.initPayload(&proto_node.base)); + }; const c_param = fn_decl.getParamDecl(param_id); const qual_type = c_param.getOriginalType(); @@ -570,7 +574,12 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void { error.OutOfMemory => |e| return e, error.UnsupportedTranslation, error.UnsupportedType, - => return failDecl(c, fn_decl_loc, fn_name, "unable to translate function", .{}), + => { + proto_node.data.is_extern = true; + proto_node.data.is_export = false; + try warn(c, &c.global_scope.base, fn_decl_loc, "unable to translate function, demoted to extern", .{}); + return addTopLevelDecl(c, fn_name, Node.initPayload(&proto_node.base)); + }, }; // add return statement if the function didn't have one blk: { @@ -598,7 +607,12 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void { error.OutOfMemory => |e| return e, error.UnsupportedTranslation, error.UnsupportedType, - => return failDecl(c, fn_decl_loc, fn_name, "unable to create a return value for function", .{}), + => { + proto_node.data.is_extern = true; + proto_node.data.is_export = false; + try warn(c, &c.global_scope.base, fn_decl_loc, "unable to create a return value for function, demoted to extern", .{}); + return addTopLevelDecl(c, fn_name, Node.initPayload(&proto_node.base)); + }, }; const ret = try Tag.@"return".create(c.arena, rhs); try block_scope.statements.append(ret); @@ -641,8 +655,8 @@ fn visitVarDecl(c: *Context, var_decl: *const clang.VarDecl, mangled_name: ?[]co // does the same as: // extern int foo; // int foo = 2; - const is_extern = storage_class == .Extern and !has_init; - const is_export = !is_extern and storage_class != .Static; + var is_extern = storage_class == .Extern and !has_init; + var is_export = !is_extern and storage_class != .Static; const type_node = transQualTypeMaybeInitialized(c, qual_type, decl_init, var_decl_loc) catch |err| switch (err) { error.UnsupportedTranslation, error.UnsupportedType => { @@ -656,7 +670,7 @@ fn visitVarDecl(c: *Context, var_decl: *const clang.VarDecl, mangled_name: ?[]co // If the initialization expression is not present, initialize with undefined. // If it is an integer literal, we can skip the @as since it will be redundant // with the variable type. - if (has_init) { + if (has_init) trans_init: { if (decl_init) |expr| { const node_or_error = if (expr.getStmtClass() == .StringLiteralClass) transStringLiteralAsArray(c, scope, @ptrCast(*const clang.StringLiteral, expr), zigArraySize(c, type_node) catch 0) @@ -666,7 +680,10 @@ fn visitVarDecl(c: *Context, var_decl: *const clang.VarDecl, mangled_name: ?[]co error.UnsupportedTranslation, error.UnsupportedType, => { - return failDecl(c, var_decl_loc, checked_name, "unable to translate initializer", .{}); + is_extern = true; + is_export = false; + try warn(c, scope, var_decl_loc, "unable to translate variable initializer, demoted to extern", .{}); + break :trans_init; }, error.OutOfMemory => |e| return e, }; @@ -1845,7 +1862,7 @@ fn literalFitsInType(c: *Context, expr: *const clang.Expr, qt: clang.QualType) b var width = qualTypeIntBitWidth(c, qt) catch 8; if (width == 0) width = 8; // Byte is the smallest type. const is_signed = cIsSignedInteger(qt); - const width_max_int= (@as(u64, 1) << math.lossyCast(u6, width - @boolToInt(is_signed))) - 1; + const width_max_int = (@as(u64, 1) << math.lossyCast(u6, width - @boolToInt(is_signed))) - 1; switch (@ptrCast(*const clang.Stmt, expr).getStmtClass()) { .CharacterLiteralClass => { @@ -1866,7 +1883,6 @@ fn literalFitsInType(c: *Context, expr: *const clang.Expr, qt: clang.QualType) b }, else => unreachable, } - } fn transInitListExprRecord( diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 8f3d605d8e..6d22c7a270 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -1866,6 +1866,12 @@ fn addSemicolonIfNeeded(c: *Context, node: Node) !void { fn addSemicolonIfNotBlock(c: *Context, node: Node) !void { switch (node.tag()) { .block, .empty_block, .block_single, => {}, + .@"if" => { + const payload = node.castTag(.@"if").?.data; + if (payload.@"else") |some| + return addSemicolonIfNotBlock(c, some); + return addSemicolonIfNotBlock(c, payload.then); + }, else => _ = try c.addToken(.semicolon, ";"), } } diff --git a/test/translate_c.zig b/test/translate_c.zig index dda0e45144..79361bf9bf 100644 --- a/test/translate_c.zig +++ b/test/translate_c.zig @@ -11,12 +11,12 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\} }); - cases.add("variadic function demoted to prototype", + cases.add("variadic function demoted to extern", \\int foo(int bar, ...) { \\ return 1; \\} , &[_][]const u8{ - \\warning: TODO unable to translate variadic function, demoted to declaration + \\warning: TODO unable to translate variadic function, demoted to extern \\pub extern fn foo(bar: c_int, ...) c_int; }); From d5fecbd0bacd986fc02c8a98aea07ac42303f0ce Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Wed, 17 Feb 2021 16:26:11 +0200 Subject: [PATCH 096/173] translate-c: support scoped typedef, enum and record decls Closes #5256 --- src/translate_c.zig | 290 +++++++++++++++++++++------------------- src/translate_c/ast.zig | 15 ++- test/translate_c.zig | 182 +++++++++++++++++++++---- 3 files changed, 318 insertions(+), 169 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index 0ab72d7734..5ac60bffae 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -433,13 +433,13 @@ fn declVisitor(c: *Context, decl: *const clang.Decl) Error!void { return visitFnDecl(c, @ptrCast(*const clang.FunctionDecl, decl)); }, .Typedef => { - _ = try transTypeDef(c, @ptrCast(*const clang.TypedefNameDecl, decl), true); + try transTypeDef(c, &c.global_scope.base, @ptrCast(*const clang.TypedefNameDecl, decl)); }, .Enum => { - _ = try transEnumDecl(c, @ptrCast(*const clang.EnumDecl, decl)); + try transEnumDecl(c, &c.global_scope.base, @ptrCast(*const clang.EnumDecl, decl)); }, .Record => { - _ = try transRecordDecl(c, @ptrCast(*const clang.RecordDecl, decl)); + try transRecordDecl(c, &c.global_scope.base, @ptrCast(*const clang.RecordDecl, decl)); }, .Var => { return visitVarDecl(c, @ptrCast(*const clang.VarDecl, decl), null); @@ -622,11 +622,11 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void { return addTopLevelDecl(c, fn_name, Node.initPayload(&proto_node.base)); } -fn transQualTypeMaybeInitialized(c: *Context, qt: clang.QualType, decl_init: ?*const clang.Expr, loc: clang.SourceLocation) TransError!Node { +fn transQualTypeMaybeInitialized(c: *Context, scope: *Scope, qt: clang.QualType, decl_init: ?*const clang.Expr, loc: clang.SourceLocation) TransError!Node { return if (decl_init) |init_expr| - transQualTypeInitialized(c, qt, init_expr, loc) + transQualTypeInitialized(c, scope, qt, init_expr, loc) else - transQualType(c, qt, loc); + transQualType(c, scope, qt, loc); } /// if mangled_name is not null, this var decl was declared in a block scope. @@ -658,7 +658,7 @@ fn visitVarDecl(c: *Context, var_decl: *const clang.VarDecl, mangled_name: ?[]co var is_extern = storage_class == .Extern and !has_init; var is_export = !is_extern and storage_class != .Static; - const type_node = transQualTypeMaybeInitialized(c, qual_type, decl_init, var_decl_loc) catch |err| switch (err) { + const type_node = transQualTypeMaybeInitialized(c, scope, qual_type, decl_init, var_decl_loc) catch |err| switch (err) { error.UnsupportedTranslation, error.UnsupportedType => { return failDecl(c, var_decl_loc, checked_name, "unable to resolve variable type", .{}); }, @@ -733,11 +733,6 @@ fn visitVarDecl(c: *Context, var_decl: *const clang.VarDecl, mangled_name: ?[]co return addTopLevelDecl(c, checked_name, node); } -fn transTypeDefAsBuiltin(c: *Context, typedef_decl: *const clang.TypedefNameDecl, builtin_name: []const u8) !Node { - _ = try c.decl_table.put(c.gpa, @ptrToInt(typedef_decl.getCanonicalDecl()), builtin_name); - return Tag.identifier.create(c.arena, builtin_name); -} - const builtin_typedef_map = std.ComptimeStringMap([]const u8, .{ .{ "uint8_t", "u8" }, .{ "int8_t", "i8" }, @@ -753,42 +748,28 @@ const builtin_typedef_map = std.ComptimeStringMap([]const u8, .{ .{ "size_t", "usize" }, }); -fn transTypeDef(c: *Context, typedef_decl: *const clang.TypedefNameDecl, top_level_visit: bool) Error!?Node { +fn transTypeDef(c: *Context, scope: *Scope, typedef_decl: *const clang.TypedefNameDecl) Error!void { if (c.decl_table.get(@ptrToInt(typedef_decl.getCanonicalDecl()))) |name| - return try Tag.identifier.create(c.arena, name); // Avoid processing this decl twice + return; // Avoid processing this decl twice + const toplevel = scope.id == .root; + const bs: *Scope.Block = if (!toplevel) try scope.findBlockScope(c) else undefined; - const typedef_name = try c.str(@ptrCast(*const clang.NamedDecl, typedef_decl).getName_bytes_begin()); + const bare_name = try c.str(@ptrCast(*const clang.NamedDecl, typedef_decl).getName_bytes_begin()); // TODO https://github.com/ziglang/zig/issues/3756 // TODO https://github.com/ziglang/zig/issues/1802 - const checked_name = if (isZigPrimitiveType(typedef_name)) try std.fmt.allocPrint(c.arena, "{s}_{d}", .{ typedef_name, c.getMangle() }) else typedef_name; - if (builtin_typedef_map.get(checked_name)) |builtin| { - _ = try c.decl_table.put(c.gpa, @ptrToInt(typedef_decl.getCanonicalDecl()), builtin); - return try Tag.identifier.create(c.arena, builtin); + var name: []const u8 = if (isZigPrimitiveType(bare_name)) try std.fmt.allocPrint(c.arena, "{s}_{d}", .{ bare_name, c.getMangle() }) else bare_name; + if (builtin_typedef_map.get(name)) |builtin| { + return c.decl_table.putNoClobber(c.gpa, @ptrToInt(typedef_decl.getCanonicalDecl()), builtin); } + if (!toplevel) name = try bs.makeMangledName(c, name); + try c.decl_table.putNoClobber(c.gpa, @ptrToInt(typedef_decl.getCanonicalDecl()), name); - if (!top_level_visit) { - return try Tag.identifier.create(c.arena, checked_name); - } - - _ = try c.decl_table.put(c.gpa, @ptrToInt(typedef_decl.getCanonicalDecl()), checked_name); - const node = (try transCreateNodeTypedef(c, typedef_decl, true, checked_name)) orelse return null; - try addTopLevelDecl(c, checked_name, node); - return try Tag.identifier.create(c.arena, checked_name); -} - -fn transCreateNodeTypedef( - c: *Context, - typedef_decl: *const clang.TypedefNameDecl, - toplevel: bool, - checked_name: []const u8, -) Error!?Node { const child_qt = typedef_decl.getUnderlyingType(); const typedef_loc = typedef_decl.getLocation(); - const init_node = transQualType(c, child_qt, typedef_loc) catch |err| switch (err) { + const init_node = transQualType(c, scope, child_qt, typedef_loc) catch |err| switch (err) { error.UnsupportedType => { - try failDecl(c, typedef_loc, checked_name, "unable to resolve typedef child type", .{}); - return null; + return failDecl(c, typedef_loc, name, "unable to resolve typedef child type", .{}); }, error.OutOfMemory => |e| return e, }; @@ -797,17 +778,25 @@ fn transCreateNodeTypedef( payload.* = .{ .base = .{ .tag = ([2]Tag{ .var_simple, .pub_var_simple })[@boolToInt(toplevel)] }, .data = .{ - .name = checked_name, + .name = name, .init = init_node, }, }; - return Node.initPayload(&payload.base); + const node = Node.initPayload(&payload.base); + + if (toplevel) { + try addTopLevelDecl(c, name, node); + } else { + try scope.appendNode(node); + } } -fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?Node { +fn transRecordDecl(c: *Context, scope: *Scope, record_decl: *const clang.RecordDecl) Error!void { if (c.decl_table.get(@ptrToInt(record_decl.getCanonicalDecl()))) |name| - return try Tag.identifier.create(c.arena, name); // Avoid processing this decl twice + return; // Avoid processing this decl twice const record_loc = record_decl.getLocation(); + const toplevel = scope.id == .root; + const bs: *Scope.Block = if (!toplevel) try scope.findBlockScope(c) else undefined; var bare_name = try c.str(@ptrCast(*const clang.NamedDecl, record_decl).getName_bytes_begin()); var is_unnamed = false; @@ -826,14 +815,15 @@ fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?Nod } else if (record_decl.isStruct()) { container_kind_name = "struct"; } else { - try warn(c, &c.global_scope.base, record_loc, "record {s} is not a struct or union", .{bare_name}); - return null; + try c.decl_table.putNoClobber(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), bare_name); + return failDecl(c, record_loc, bare_name, "record {s} is not a struct or union", .{bare_name}); } - const name = try std.fmt.allocPrint(c.arena, "{s}_{s}", .{ container_kind_name, bare_name }); - _ = try c.decl_table.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), name); + var name: []const u8 = try std.fmt.allocPrint(c.arena, "{s}_{s}", .{ container_kind_name, bare_name }); + if (!toplevel) name = try bs.makeMangledName(c, name); + try c.decl_table.putNoClobber(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), name); - const is_pub = !is_unnamed; + const is_pub = toplevel and !is_unnamed; const init_node = blk: { const record_def = record_decl.getDefinition() orelse { _ = try c.opaque_demotes.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), {}); @@ -854,13 +844,13 @@ fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?Nod if (field_decl.isBitField()) { _ = try c.opaque_demotes.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), {}); - try warn(c, &c.global_scope.base, field_loc, "{s} demoted to opaque type - has bitfield", .{container_kind_name}); + try warn(c, scope, field_loc, "{s} demoted to opaque type - has bitfield", .{container_kind_name}); break :blk Tag.opaque_literal.init(); } if (qualTypeCanon(field_qt).isIncompleteOrZeroLengthArrayType(c.clang_context)) { _ = try c.opaque_demotes.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), {}); - try warn(c, &c.global_scope.base, field_loc, "{s} demoted to opaque type - has variable length array", .{container_kind_name}); + try warn(c, scope, field_loc, "{s} demoted to opaque type - has variable length array", .{container_kind_name}); break :blk Tag.opaque_literal.init(); } @@ -872,10 +862,10 @@ fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?Nod unnamed_field_count += 1; is_anon = true; } - const field_type = transQualType(c, field_qt, field_loc) catch |err| switch (err) { + const field_type = transQualType(c, scope, field_qt, field_loc) catch |err| switch (err) { error.UnsupportedType => { _ = try c.opaque_demotes.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), {}); - try warn(c, &c.global_scope.base, record_loc, "{s} demoted to opaque type - unable to translate type of field {s}", .{ container_kind_name, field_name }); + try warn(c, scope, record_loc, "{s} demoted to opaque type - unable to translate type of field {s}", .{ container_kind_name, field_name }); break :blk Tag.opaque_literal.init(); }, else => |e| return e, @@ -891,7 +881,7 @@ fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?Nod }; if (is_anon) { - _ = try c.decl_table.put(c.gpa, @ptrToInt(field_decl.getCanonicalDecl()), field_name); + try c.decl_table.putNoClobber(c.gpa, @ptrToInt(field_decl.getCanonicalDecl()), field_name); } try fields.append(.{ @@ -921,16 +911,21 @@ fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?Nod }, }; - try addTopLevelDecl(c, name, Node.initPayload(&payload.base)); - if (!is_unnamed) - try c.alias_list.append(.{ .alias = bare_name, .name = name }); - return try Tag.identifier.create(c.arena, name); + if (toplevel) { + try addTopLevelDecl(c, name, Node.initPayload(&payload.base)); + if (!is_unnamed) + try c.alias_list.append(.{ .alias = bare_name, .name = name }); + } else { + try scope.appendNode(Node.initPayload(&payload.base)); + } } -fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?Node { +fn transEnumDecl(c: *Context, scope: *Scope, enum_decl: *const clang.EnumDecl) Error!void { if (c.decl_table.get(@ptrToInt(enum_decl.getCanonicalDecl()))) |name| - return try Tag.identifier.create(c.arena, name); // Avoid processing this decl twice + return; // Avoid processing this decl twice const enum_loc = enum_decl.getLocation(); + const toplevel = scope.id == .root; + const bs: *Scope.Block = if (!toplevel) try scope.findBlockScope(c) else undefined; var bare_name = try c.str(@ptrCast(*const clang.NamedDecl, enum_decl).getName_bytes_begin()); var is_unnamed = false; @@ -939,10 +934,13 @@ fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?Node { is_unnamed = true; } - const name = try std.fmt.allocPrint(c.arena, "enum_{s}", .{bare_name}); - _ = try c.decl_table.put(c.gpa, @ptrToInt(enum_decl.getCanonicalDecl()), name); + var name: []const u8 = try std.fmt.allocPrint(c.arena, "enum_{s}", .{bare_name}); + if (!toplevel) _ = try bs.makeMangledName(c, name); + try c.decl_table.putNoClobber(c.gpa, @ptrToInt(enum_decl.getCanonicalDecl()), name); - const is_pub = !is_unnamed; + const is_pub = toplevel and !is_unnamed; + var redecls = std.ArrayList(Tag.enum_redecl.Data()).init(c.gpa); + defer redecls.deinit(); const init_node = if (enum_decl.getDefinition()) |enum_def| blk: { var pure_enum = true; @@ -968,10 +966,9 @@ fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?Node { const init_arg_expr = if (int_type.ptr != null and !isCBuiltinType(int_type, .UInt) and !isCBuiltinType(int_type, .Int)) - transQualType(c, int_type, enum_loc) catch |err| switch (err) { + transQualType(c, scope, int_type, enum_loc) catch |err| switch (err) { error.UnsupportedType => { - try failDecl(c, enum_loc, name, "unable to translate enum tag type", .{}); - return null; + return failDecl(c, enum_loc, name, "unable to translate enum tag type", .{}); }, else => |e| return e, } @@ -1001,11 +998,11 @@ fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?Node { // In C each enum value is in the global namespace. So we put them there too. // At this point we can rely on the enum emitting successfully. - try addTopLevelDecl(c, field_name, try Tag.enum_redecl.create(c.arena, .{ + try redecls.append(.{ .enum_val_name = enum_val_name, .field_name = field_name, .enum_name = name, - })); + }); } break :blk try Tag.@"enum".create(c.arena, .{ @@ -1026,10 +1023,25 @@ fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?Node { }, }; - try addTopLevelDecl(c, name, Node.initPayload(&payload.base)); - if (!is_unnamed) - try c.alias_list.append(.{ .alias = bare_name, .name = name }); - return try Tag.identifier.create(c.arena, name); + if (toplevel) { + try addTopLevelDecl(c, name, Node.initPayload(&payload.base)); + if (!is_unnamed) + try c.alias_list.append(.{ .alias = bare_name, .name = name }); + } else { + try scope.appendNode(Node.initPayload(&payload.base)); + } + + for (redecls.items) |redecl| { + if (toplevel) { + try addTopLevelDecl(c, redecl.field_name, try Tag.pub_enum_redecl.create(c.arena, redecl)); + } else { + try scope.appendNode(try Tag.enum_redecl.create(c.arena, .{ + .enum_val_name = try bs.makeMangledName(c, redecl.enum_val_name), + .field_name = redecl.field_name, + .enum_name = redecl.enum_name, + })); + } + } } const ResultUsed = enum { @@ -1251,6 +1263,7 @@ fn transCompoundStmtInline( const end_it = stmt.body_end(); while (it != end_it) : (it += 1) { const result = try transStmt(c, parent_scope, it[0], .unused); + if (result.tag() == .declaration) continue; try block.statements.append(result); } } @@ -1285,7 +1298,7 @@ fn transDeclStmtOne( scope: *Scope, decl: *const clang.Decl, block_scope: *Scope.Block, -) TransError!Node { +) TransError!void { switch (decl.getKind()) { .Var => { const var_decl = @ptrCast(*const clang.VarDecl, decl); @@ -1299,8 +1312,7 @@ fn transDeclStmtOne( .Extern, .Static => { // This is actually a global variable, put it in the global scope and reference it. // `_ = mangled_name;` - try visitVarDecl(c, var_decl, mangled_name); - return try maybeSuppressResult(c, scope, .unused, try Tag.identifier.create(c.arena, mangled_name)); + return visitVarDecl(c, var_decl, mangled_name); }, else => {}, } @@ -1308,7 +1320,7 @@ fn transDeclStmtOne( const is_const = qual_type.isConstQualified(); const loc = decl.getLocation(); - const type_node = try transQualTypeMaybeInitialized(c, qual_type, decl_init, loc); + const type_node = try transQualTypeMaybeInitialized(c, scope, qual_type, decl_init, loc); var init_node = if (decl_init) |expr| if (expr.getStmtClass() == .StringLiteralClass) @@ -1320,7 +1332,7 @@ fn transDeclStmtOne( if (!qualTypeIsBoolean(qual_type) and isBoolRes(init_node)) { init_node = try Tag.bool_to_int.create(c.arena, init_node); } - return Tag.var_decl.create(c.arena, .{ + const node = try Tag.var_decl.create(c.arena, .{ .is_pub = false, .is_const = is_const, .is_extern = false, @@ -1332,18 +1344,16 @@ fn transDeclStmtOne( .type = type_node, .init = init_node, }); + try block_scope.statements.append(node); }, .Typedef => { - const typedef_decl = @ptrCast(*const clang.TypedefNameDecl, decl); - const name = try c.str(@ptrCast(*const clang.NamedDecl, typedef_decl).getName_bytes_begin()); - - const underlying_qual = typedef_decl.getUnderlyingType(); - const underlying_type = underlying_qual.getTypePtr(); - - const mangled_name = try block_scope.makeMangledName(c, name); - const node = (try transCreateNodeTypedef(c, typedef_decl, false, mangled_name)) orelse - return error.UnsupportedTranslation; - return node; + try transTypeDef(c, scope, @ptrCast(*const clang.TypedefNameDecl, decl)); + }, + .Record => { + try transRecordDecl(c, scope, @ptrCast(*const clang.RecordDecl, decl)); + }, + .Enum => { + try transEnumDecl(c, scope, @ptrCast(*const clang.EnumDecl, decl)); }, else => |kind| return fail( c, @@ -1356,21 +1366,14 @@ fn transDeclStmtOne( } fn transDeclStmt(c: *Context, scope: *Scope, stmt: *const clang.DeclStmt) TransError!Node { - const block_scope = scope.findBlockScope(c) catch unreachable; + const block_scope = try scope.findBlockScope(c); var it = stmt.decl_begin(); const end_it = stmt.decl_end(); - assert(it != end_it); - while (true) : (it += 1) { - const node = try transDeclStmtOne(c, scope, it[0], block_scope); - - if (it + 1 == end_it) { - return node; - } else { - try block_scope.statements.append(node); - } + while (it != end_it) : (it += 1) { + try transDeclStmtOne(c, scope, it[0], block_scope); } - unreachable; + return Tag.declaration.init(); } fn transDeclRefExpr( @@ -1619,7 +1622,7 @@ fn transIntegerLiteral( // @as(T, x) const expr_base = @ptrCast(*const clang.Expr, expr); - const ty_node = try transQualType(c, expr_base.getType(), expr_base.getBeginLoc()); + const ty_node = try transQualType(c, scope, expr_base.getType(), expr_base.getBeginLoc()); const rhs = try transCreateNodeAPInt(c, eval_result.Val.getInt()); const as = try Tag.as.create(c.arena, .{ .lhs = ty_node, .rhs = rhs }); return maybeSuppressResult(c, scope, result_used, as); @@ -1697,7 +1700,7 @@ fn transStringLiteralAsArray( const ty = expr_base.getType().getTypePtr(); const const_arr_ty = @ptrCast(*const clang.ConstantArrayType, ty); - const elem_type = try transQualType(c, const_arr_ty.getElementType(), expr_base.getBeginLoc()); + const elem_type = try transQualType(c, scope, const_arr_ty.getElementType(), expr_base.getBeginLoc()); const arr_type = try Tag.array_type.create(c.arena, .{ .len = array_size, .elem_type = elem_type }); const init_list = try c.arena.alloc(Node, array_size); @@ -1744,9 +1747,9 @@ fn transCCast( if (qualTypeCanon(dst_type).isVoidType()) return expr; if (dst_type.eq(src_type)) return expr; if (qualTypeIsPtr(dst_type) and qualTypeIsPtr(src_type)) - return transCPtrCast(c, loc, dst_type, src_type, expr); + return transCPtrCast(c, scope, loc, dst_type, src_type, expr); - const dst_node = try transQualType(c, dst_type, loc); + const dst_node = try transQualType(c, scope, dst_type, loc); if (cIsInteger(dst_type) and (cIsInteger(src_type) or cIsEnum(src_type))) { // 1. If src_type is an enum, determine the underlying signed int type // 2. Extend or truncate without changing signed-ness. @@ -1903,7 +1906,7 @@ fn transInitListExprRecord( const record_def = record_decl.getDefinition() orelse unreachable; - const ty_node = try transType(c, ty, loc); + const ty_node = try transType(c, scope, ty, loc); const init_count = expr.getNumInits(); var field_inits = std.ArrayList(ast.Payload.ContainerInit.Initializer).init(c.gpa); defer field_inits.deinit(); @@ -1952,7 +1955,7 @@ fn transInitListExprArray( ) TransError!Node { const arr_type = ty.getAsArrayTypeUnsafe(); const child_qt = arr_type.getElementType(); - const child_type = try transQualType(c, child_qt, loc); + const child_type = try transQualType(c, scope, child_qt, loc); const init_count = expr.getNumInits(); assert(@ptrCast(*const clang.Type, arr_type).isConstantArrayType()); const const_arr_ty = @ptrCast(*const clang.ConstantArrayType, arr_type); @@ -2217,7 +2220,7 @@ fn transForLoop( block_scope = try Scope.Block.init(c, scope, false); loop_scope.parent = &block_scope.?.base; const init_node = try transStmt(c, &block_scope.?.base, init, .unused); - try block_scope.?.statements.append(init_node); + if (init_node.tag() != .declaration) try block_scope.?.statements.append(init_node); } var cond_scope = Scope.Condition{ .base = .{ @@ -2328,7 +2331,7 @@ fn transCase( scope: *Scope, stmt: *const clang.CaseStmt, ) TransError!Node { - const block_scope = scope.findBlockScope(c) catch unreachable; + const block_scope = try scope.findBlockScope(c); const switch_scope = scope.getSwitch(); const label = try block_scope.makeMangledName(c, "case"); @@ -2366,7 +2369,7 @@ fn transDefault( scope: *Scope, stmt: *const clang.DefaultStmt, ) TransError!Node { - const block_scope = scope.findBlockScope(c) catch unreachable; + const block_scope = try scope.findBlockScope(c); const switch_scope = scope.getSwitch(); switch_scope.default_label = try block_scope.makeMangledName(c, "default"); @@ -2400,7 +2403,7 @@ fn transConstantExpr(c: *Context, scope: *Scope, expr: *const clang.Expr, used: // @as(T, x) const expr_base = @ptrCast(*const clang.Expr, expr); const as_node = try Tag.as.create(c.arena, .{ - .lhs = try transQualType(c, expr_base.getType(), expr_base.getBeginLoc()), + .lhs = try transQualType(c, scope, expr_base.getType(), expr_base.getBeginLoc()), .rhs = try transCreateNodeAPInt(c, result.Val.getInt()), }); return maybeSuppressResult(c, scope, used, as_node); @@ -2446,7 +2449,7 @@ fn transCharLiteral( // @as(T, x) const expr_base = @ptrCast(*const clang.Expr, stmt); const as_node = try Tag.as.create(c.arena, .{ - .lhs = try transQualType(c, expr_base.getType(), expr_base.getBeginLoc()), + .lhs = try transQualType(c, scope, expr_base.getType(), expr_base.getBeginLoc()), .rhs = int_lit_node, }); return maybeSuppressResult(c, scope, result_used, as_node); @@ -2464,6 +2467,7 @@ fn transStmtExpr(c: *Context, scope: *Scope, stmt: *const clang.StmtExpr, used: const end_it = comp.body_end(); while (it != end_it - 1) : (it += 1) { const result = try transStmt(c, &block_scope.base, it[0], .unused); + if (result.tag() == .declaration) continue; try block_scope.statements.append(result); } const break_node = try Tag.break_val.create(c.arena, .{ @@ -2657,7 +2661,7 @@ fn transUnaryExprOrTypeTraitExpr( result_used: ResultUsed, ) TransError!Node { const loc = stmt.getBeginLoc(); - const type_node = try transQualType(c, stmt.getTypeOfArgument(), loc); + const type_node = try transQualType(c, scope, stmt.getTypeOfArgument(), loc); const kind = stmt.getKind(); switch (kind) { @@ -2917,9 +2921,9 @@ fn transCreateCompoundAssign( if (is_shift or requires_int_cast) { // @intCast(rhs) const cast_to_type = if (is_shift) - try qualTypeToLog2IntRef(c, getExprQualType(c, rhs), loc) + try qualTypeToLog2IntRef(c, scope, getExprQualType(c, rhs), loc) else - try transQualType(c, getExprQualType(c, lhs), loc); + try transQualType(c, scope, getExprQualType(c, lhs), loc); rhs_node = try Tag.int_cast.create(c.arena, .{ .lhs = cast_to_type, .rhs = rhs_node }); } @@ -2960,9 +2964,9 @@ fn transCreateCompoundAssign( if (is_shift or requires_int_cast) { // @intCast(rhs) const cast_to_type = if (is_shift) - try qualTypeToLog2IntRef(c, getExprQualType(c, rhs), loc) + try qualTypeToLog2IntRef(c, scope, getExprQualType(c, rhs), loc) else - try transQualType(c, getExprQualType(c, lhs), loc); + try transQualType(c, scope, getExprQualType(c, lhs), loc); rhs_node = try Tag.int_cast.create(c.arena, .{ .lhs = cast_to_type, .rhs = rhs_node }); } @@ -2981,6 +2985,7 @@ fn transCreateCompoundAssign( fn transCPtrCast( c: *Context, + scope: *Scope, loc: clang.SourceLocation, dst_type: clang.QualType, src_type: clang.QualType, @@ -2990,7 +2995,7 @@ fn transCPtrCast( const child_type = ty.getPointeeType(); const src_ty = src_type.getTypePtr(); const src_child_type = src_ty.getPointeeType(); - const dst_type_node = try transType(c, ty, loc); + const dst_type_node = try transType(c, scope, ty, loc); if ((src_child_type.isConstQualified() and !child_type.isConstQualified()) or @@ -3011,7 +3016,7 @@ fn transCPtrCast( // For opaque types a ptrCast is enough expr else blk: { - const child_type_node = try transQualType(c, child_type, loc); + const child_type_node = try transQualType(c, scope, child_type, loc); const alignof = try Tag.alignof.create(c.arena, child_type_node); const align_cast = try Tag.align_cast.create(c.arena, .{ .lhs = alignof, .rhs = expr }); break :blk align_cast; @@ -3160,6 +3165,7 @@ fn addTopLevelDecl(c: *Context, name: []const u8, decl_node: Node) !void { /// by the size of the initializer fn transQualTypeInitialized( c: *Context, + scope: *Scope, qt: clang.QualType, decl_init: *const clang.Expr, source_loc: clang.SourceLocation, @@ -3167,7 +3173,7 @@ fn transQualTypeInitialized( const ty = qt.getTypePtr(); if (ty.getTypeClass() == .IncompleteArray) { const incomplete_array_ty = @ptrCast(*const clang.IncompleteArrayType, ty); - const elem_ty = try transType(c, incomplete_array_ty.getElementType().getTypePtr(), source_loc); + const elem_ty = try transType(c, scope, incomplete_array_ty.getElementType().getTypePtr(), source_loc); switch (decl_init.getStmtClass()) { .StringLiteralClass => { @@ -3184,11 +3190,11 @@ fn transQualTypeInitialized( else => {}, } } - return transQualType(c, qt, source_loc); + return transQualType(c, scope, qt, source_loc); } -fn transQualType(c: *Context, qt: clang.QualType, source_loc: clang.SourceLocation) TypeError!Node { - return transType(c, qt.getTypePtr(), source_loc); +fn transQualType(c: *Context, scope: *Scope, qt: clang.QualType, source_loc: clang.SourceLocation) TypeError!Node { + return transType(c, scope, qt.getTypePtr(), source_loc); } /// Produces a Zig AST node by translating a Clang QualType, respecting the width, but modifying the signed-ness. @@ -3273,7 +3279,7 @@ fn qualTypeIntBitWidth(c: *Context, qt: clang.QualType) !u32 { } } -fn qualTypeToLog2IntRef(c: *Context, qt: clang.QualType, source_loc: clang.SourceLocation) !Node { +fn qualTypeToLog2IntRef(c: *Context, scope: *Scope, qt: clang.QualType, source_loc: clang.SourceLocation) !Node { const int_bit_width = try qualTypeIntBitWidth(c, qt); if (int_bit_width != 0) { @@ -3282,7 +3288,7 @@ fn qualTypeToLog2IntRef(c: *Context, qt: clang.QualType, source_loc: clang.Sourc return Tag.log2_int_type.create(c.arena, cast_bit_width); } - const zig_type = try transQualType(c, qt, source_loc); + const zig_type = try transQualType(c, scope, qt, source_loc); return Tag.std_math_Log2Int.create(c.arena, zig_type); } @@ -3641,14 +3647,14 @@ fn transCreateNodeShiftOp( const lhs = try transExpr(c, scope, lhs_expr, .used); - const rhs_type = try qualTypeToLog2IntRef(c, stmt.getType(), rhs_location); + const rhs_type = try qualTypeToLog2IntRef(c, scope, stmt.getType(), rhs_location); const rhs = try transExprCoercing(c, scope, rhs_expr, .used); const rhs_casted = try Tag.int_cast.create(c.arena, .{ .lhs = rhs_type, .rhs = rhs }); return transCreateNodeInfixOp(c, scope, op, lhs, rhs_casted, used); } -fn transType(c: *Context, ty: *const clang.Type, source_loc: clang.SourceLocation) TypeError!Node { +fn transType(c: *Context, scope: *Scope, ty: *const clang.Type, source_loc: clang.SourceLocation) TypeError!Node { switch (ty.getTypeClass()) { .Builtin => { const builtin_ty = @ptrCast(*const clang.BuiltinType, ty); @@ -3687,16 +3693,16 @@ fn transType(c: *Context, ty: *const clang.Type, source_loc: clang.SourceLocatio }, .Paren => { const paren_ty = @ptrCast(*const clang.ParenType, ty); - return transQualType(c, paren_ty.getInnerType(), source_loc); + return transQualType(c, scope, paren_ty.getInnerType(), source_loc); }, .Pointer => { const child_qt = ty.getPointeeType(); if (qualTypeChildIsFnProto(child_qt)) { - return Tag.optional_type.create(c.arena, try transQualType(c, child_qt, source_loc)); + return Tag.optional_type.create(c.arena, try transQualType(c, scope, child_qt, source_loc)); } const is_const = child_qt.isConstQualified(); const is_volatile = child_qt.isVolatileQualified(); - const elem_type = try transQualType(c, child_qt, source_loc); + const elem_type = try transQualType(c, scope, child_qt, source_loc); if (typeIsOpaque(c, child_qt.getTypePtr(), source_loc) or qualTypeWasDemotedToOpaque(c, child_qt)) { const ptr = try Tag.single_pointer.create(c.arena, .{ .is_const = is_const, .is_volatile = is_volatile, .elem_type = elem_type }); return Tag.optional_type.create(c.arena, ptr); @@ -3709,7 +3715,7 @@ fn transType(c: *Context, ty: *const clang.Type, source_loc: clang.SourceLocatio const size_ap_int = const_arr_ty.getSize(); const size = size_ap_int.getLimitedValue(math.maxInt(usize)); - const elem_type = try transType(c, const_arr_ty.getElementType().getTypePtr(), source_loc); + const elem_type = try transType(c, scope, const_arr_ty.getElementType().getTypePtr(), source_loc); return Tag.array_type.create(c.arena, .{ .len = size, .elem_type = elem_type }); }, @@ -3719,7 +3725,7 @@ fn transType(c: *Context, ty: *const clang.Type, source_loc: clang.SourceLocatio const child_qt = incomplete_array_ty.getElementType(); const is_const = child_qt.isConstQualified(); const is_volatile = child_qt.isVolatileQualified(); - const elem_type = try transQualType(c, child_qt, source_loc); + const elem_type = try transQualType(c, scope, child_qt, source_loc); return Tag.c_pointer.create(c.arena, .{ .is_const = is_const, .is_volatile = is_volatile, .elem_type = elem_type }); }, @@ -3727,38 +3733,41 @@ fn transType(c: *Context, ty: *const clang.Type, source_loc: clang.SourceLocatio const typedef_ty = @ptrCast(*const clang.TypedefType, ty); const typedef_decl = typedef_ty.getDecl(); - return (try transTypeDef(c, typedef_decl, false)) orelse - fail(c, error.UnsupportedType, source_loc, "unable to translate typedef declaration", .{}); + try transTypeDef(c, scope, typedef_decl); + const name = c.decl_table.get(@ptrToInt(typedef_decl.getCanonicalDecl())).?; + return Tag.identifier.create(c.arena, name); }, .Record => { const record_ty = @ptrCast(*const clang.RecordType, ty); const record_decl = record_ty.getDecl(); - return (try transRecordDecl(c, record_decl)) orelse - fail(c, error.UnsupportedType, source_loc, "unable to resolve record declaration", .{}); + try transRecordDecl(c, scope, record_decl); + const name = c.decl_table.get(@ptrToInt(record_decl.getCanonicalDecl())).?; + return Tag.identifier.create(c.arena, name); }, .Enum => { const enum_ty = @ptrCast(*const clang.EnumType, ty); const enum_decl = enum_ty.getDecl(); - return (try transEnumDecl(c, enum_decl)) orelse - fail(c, error.UnsupportedType, source_loc, "unable to translate enum declaration", .{}); + try transEnumDecl(c, scope, enum_decl); + const name = c.decl_table.get(@ptrToInt(enum_decl.getCanonicalDecl())).?; + return Tag.identifier.create(c.arena, name); }, .Elaborated => { const elaborated_ty = @ptrCast(*const clang.ElaboratedType, ty); - return transQualType(c, elaborated_ty.getNamedType(), source_loc); + return transQualType(c, scope, elaborated_ty.getNamedType(), source_loc); }, .Decayed => { const decayed_ty = @ptrCast(*const clang.DecayedType, ty); - return transQualType(c, decayed_ty.getDecayedType(), source_loc); + return transQualType(c, scope, decayed_ty.getDecayedType(), source_loc); }, .Attributed => { const attributed_ty = @ptrCast(*const clang.AttributedType, ty); - return transQualType(c, attributed_ty.getEquivalentType(), source_loc); + return transQualType(c, scope, attributed_ty.getEquivalentType(), source_loc); }, .MacroQualified => { const macroqualified_ty = @ptrCast(*const clang.MacroQualifiedType, ty); - return transQualType(c, macroqualified_ty.getModifiedType(), source_loc); + return transQualType(c, scope, macroqualified_ty.getModifiedType(), source_loc); }, else => { const type_name = c.str(ty.getTypeClassName()); @@ -3890,6 +3899,7 @@ fn finishTransFnProto( ) !*ast.Payload.Func { const is_export = if (fn_decl_context) |ctx| ctx.is_export else false; const is_extern = if (fn_decl_context) |ctx| !ctx.has_body else false; + const scope = &c.global_scope.base; // TODO check for always_inline attribute // TODO check for align attribute @@ -3914,7 +3924,7 @@ fn finishTransFnProto( break :blk param_name; } else null; - const type_node = try transQualType(c, param_qt, source_loc); + const type_node = try transQualType(c, scope, param_qt, source_loc); fn_params.addOneAssumeCapacity().* = .{ .is_noalias = is_noalias, @@ -3955,9 +3965,9 @@ fn finishTransFnProto( // convert primitive c_void to actual void (only for return type) break :blk Tag.void_type.init(); } else { - break :blk transQualType(c, return_qt, source_loc) catch |err| switch (err) { + break :blk transQualType(c, scope, return_qt, source_loc) catch |err| switch (err) { error.UnsupportedType => { - try warn(c, &c.global_scope.base, source_loc, "unsupported function proto return type", .{}); + try warn(c, scope, source_loc, "unsupported function proto return type", .{}); return err; }, error.OutOfMemory => |e| return e, diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 6d22c7a270..25cbe1bf3f 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -14,6 +14,8 @@ pub const Node = extern union { ptr_otherwise: *Payload, pub const Tag = enum { + /// Declarations add themselves to the correct scopes and should not be emitted as this tag. + declaration, null_literal, undefined_literal, /// opaque {} @@ -186,6 +188,7 @@ pub const Node = extern union { /// pub const name = init; pub_var_simple, /// pub const enum_field_name = @enumToInt(enum_name.field_name); + pub_enum_redecl, enum_redecl, /// pub inline fn name(params) return_type body @@ -201,6 +204,7 @@ pub const Node = extern union { pub fn Type(comptime t: Tag) type { return switch (t) { + .declaration, .null_literal, .undefined_literal, .opaque_literal, @@ -325,7 +329,7 @@ pub const Node = extern union { .arg_redecl, .alias, .fail_decl => Payload.ArgRedecl, .log2_int_type => Payload.Log2IntType, .var_simple, .pub_var_simple => Payload.SimpleVarDecl, - .enum_redecl => Payload.EnumRedecl, + .pub_enum_redecl, .enum_redecl => Payload.EnumRedecl, .array_filler => Payload.ArrayFiller, .pub_inline_fn => Payload.PubInlineFn, .field_access => Payload.FieldAccess, @@ -742,6 +746,7 @@ fn renderNodes(c: *Context, nodes: []const Node) Allocator.Error!NodeSubRange { fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { switch (node.tag()) { + .declaration => unreachable, .warning => { const payload = node.castTag(.warning).?.data; try c.buf.appendSlice(payload); @@ -1585,9 +1590,9 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { }, }); }, - .enum_redecl => { - const payload = node.castTag(.enum_redecl).?.data; - _ = try c.addToken(.keyword_pub, "pub"); + .pub_enum_redecl, .enum_redecl => { + const payload = @fieldParentPtr(Payload.EnumRedecl, "base", node.ptr_otherwise).data; + if (node.tag() == .pub_enum_redecl) _ = try c.addToken(.keyword_pub, "pub"); const const_tok = try c.addToken(.keyword_const, "const"); _ = try c.addIdentifier(payload.enum_val_name); _ = try c.addToken(.equal, "="); @@ -1878,6 +1883,7 @@ fn addSemicolonIfNotBlock(c: *Context, node: Node) !void { fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex { switch (node.tag()) { + .declaration => unreachable, .null_literal, .undefined_literal, .true_literal, @@ -1991,6 +1997,7 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex { .alias, .var_simple, .pub_var_simple, + .pub_enum_redecl, .enum_redecl, .@"while", .@"switch", diff --git a/test/translate_c.zig b/test/translate_c.zig index 79361bf9bf..26d35a4c2e 100644 --- a/test/translate_c.zig +++ b/test/translate_c.zig @@ -3,6 +3,136 @@ const std = @import("std"); const CrossTarget = std.zig.CrossTarget; pub fn addCases(cases: *tests.TranslateCContext) void { + cases.add("scoped enum", + \\void foo() { + \\ enum Foo { + \\ A, + \\ B, + \\ C, + \\ }; + \\ enum Foo a = B; + \\ { + \\ enum Foo { + \\ A, + \\ B, + \\ C, + \\ }; + \\ enum Foo a = B; + \\ } + \\} + , &[_][]const u8{ + \\pub export fn foo() void { + \\ const enum_Foo = extern enum(c_int) { + \\ A, + \\ B, + \\ C, + \\ _, + \\ }; + \\ const A = @enumToInt(enum_Foo.A); + \\ const B = @enumToInt(enum_Foo.B); + \\ const C = @enumToInt(enum_Foo.C); + \\ var a: enum_Foo = @intToEnum(enum_Foo, B); + \\ { + \\ const enum_Foo = extern enum(c_int) { + \\ A, + \\ B, + \\ C, + \\ _, + \\ }; + \\ const A_2 = @enumToInt(enum_Foo.A); + \\ const B_3 = @enumToInt(enum_Foo.B); + \\ const C_4 = @enumToInt(enum_Foo.C); + \\ var a_5: enum_Foo = @intToEnum(enum_Foo, B_3); + \\ } + \\} + }); + + cases.add("scoped record", + \\void foo() { + \\ struct Foo { + \\ int A; + \\ int B; + \\ int C; + \\ }; + \\ struct Foo a = {0}; + \\ { + \\ struct Foo { + \\ int A; + \\ int B; + \\ int C; + \\ }; + \\ struct Foo a = {0}; + \\ } + \\} + , &[_][]const u8{ + \\pub export fn foo() void { + \\ const struct_Foo = extern struct { + \\ A: c_int, + \\ B: c_int, + \\ C: c_int, + \\ }; + \\ var a: struct_Foo = struct_Foo{ + \\ .A = @as(c_int, 0), + \\ .B = 0, + \\ .C = 0, + \\ }; + \\ { + \\ const struct_Foo_1 = extern struct { + \\ A: c_int, + \\ B: c_int, + \\ C: c_int, + \\ }; + \\ var a_2: struct_Foo_1 = struct_Foo_1{ + \\ .A = @as(c_int, 0), + \\ .B = 0, + \\ .C = 0, + \\ }; + \\ } + \\} + }); + + cases.add("scoped typedef", + \\void foo() { + \\ typedef union { + \\ int A; + \\ int B; + \\ int C; + \\ } Foo; + \\ Foo a = {0}; + \\ { + \\ typedef union { + \\ int A; + \\ int B; + \\ int C; + \\ } Foo; + \\ Foo a = {0}; + \\ } + \\} + , &[_][]const u8{ + \\pub export fn foo() void { + \\ const union_unnamed_1 = extern union { + \\ A: c_int, + \\ B: c_int, + \\ C: c_int, + \\ }; + \\ const Foo = union_unnamed_1; + \\ var a: Foo = Foo{ + \\ .A = @as(c_int, 0), + \\ }; + \\ { + \\ const union_unnamed_2 = extern union { + \\ A: c_int, + \\ B: c_int, + \\ C: c_int, + \\ }; + \\ const Foo_1 = union_unnamed_2; + \\ var a_2: Foo_1 = Foo_1{ + \\ .A = @as(c_int, 0), + \\ }; + \\ } + \\} + }); + cases.add("use cast param as macro fn return type", \\#define MEM_PHYSICAL_TO_K0(x) (void*)((u32)(x) + SYS_BASE_CACHED) , &[_][]const u8{ @@ -62,7 +192,6 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\pub export var bar: f32 = @import("std").mem.zeroes(f32); \\threadlocal var bar_1: c_int = 2; \\pub export fn foo() c_int { - \\ _ = bar_1; \\ return 0; \\} }); @@ -579,9 +708,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ '2', \\ 0, \\}; - \\pub export fn foo() void { - \\ _ = v2; - \\} + \\pub export fn foo() void {} }); cases.add("simple function definition", @@ -1355,11 +1482,11 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\extern enum enum_ty my_enum; \\enum enum_ty { FOO }; , &[_][]const u8{ - \\pub const FOO = @enumToInt(enum_enum_ty.FOO); \\pub const enum_enum_ty = extern enum(c_int) { \\ FOO, \\ _, \\}; + \\pub const FOO = @enumToInt(enum_enum_ty.FOO); \\pub extern var my_enum: enum_enum_ty; }); @@ -1501,48 +1628,48 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ p, \\}; , &[_][]const u8{ - \\pub const a = @enumToInt(enum_unnamed_1.a); - \\pub const b = @enumToInt(enum_unnamed_1.b); - \\pub const c = @enumToInt(enum_unnamed_1.c); \\const enum_unnamed_1 = extern enum(c_int) { \\ a, \\ b, \\ c, \\ _, \\}; + \\pub const a = @enumToInt(enum_unnamed_1.a); + \\pub const b = @enumToInt(enum_unnamed_1.b); + \\pub const c = @enumToInt(enum_unnamed_1.c); \\pub const d = enum_unnamed_1; - \\pub const e = @enumToInt(enum_unnamed_2.e); - \\pub const f = @enumToInt(enum_unnamed_2.f); - \\pub const g = @enumToInt(enum_unnamed_2.g); \\const enum_unnamed_2 = extern enum(c_int) { \\ e = 0, \\ f = 4, \\ g = 5, \\ _, \\}; + \\pub const e = @enumToInt(enum_unnamed_2.e); + \\pub const f = @enumToInt(enum_unnamed_2.f); + \\pub const g = @enumToInt(enum_unnamed_2.g); \\pub export var h: enum_unnamed_2 = @intToEnum(enum_unnamed_2, e); - \\pub const i = @enumToInt(enum_unnamed_3.i); - \\pub const j = @enumToInt(enum_unnamed_3.j); - \\pub const k = @enumToInt(enum_unnamed_3.k); \\const enum_unnamed_3 = extern enum(c_int) { \\ i, \\ j, \\ k, \\ _, \\}; + \\pub const i = @enumToInt(enum_unnamed_3.i); + \\pub const j = @enumToInt(enum_unnamed_3.j); + \\pub const k = @enumToInt(enum_unnamed_3.k); \\pub const struct_Baz = extern struct { \\ l: enum_unnamed_3, \\ m: d, \\}; - \\pub const n = @enumToInt(enum_i.n); - \\pub const o = @enumToInt(enum_i.o); - \\pub const p = @enumToInt(enum_i.p); \\pub const enum_i = extern enum(c_int) { \\ n, \\ o, \\ p, \\ _, \\}; + \\pub const n = @enumToInt(enum_i.n); + \\pub const o = @enumToInt(enum_i.o); + \\pub const p = @enumToInt(enum_i.p); , \\pub const Baz = struct_Baz; }); @@ -1989,13 +2116,13 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ Two, \\}; , &[_][]const u8{ - \\pub const One = @enumToInt(enum_unnamed_1.One); - \\pub const Two = @enumToInt(enum_unnamed_1.Two); \\const enum_unnamed_1 = extern enum(c_int) { \\ One, \\ Two, \\ _, \\}; + \\pub const One = @enumToInt(enum_unnamed_1.One); + \\pub const Two = @enumToInt(enum_unnamed_1.Two); }); cases.add("c style cast", @@ -2093,15 +2220,15 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ return ((((((((((e + f) + g) + h) + i) + j) + k) + l) + m) + o) + p); \\} , &[_][]const u8{ - \\pub const FooA = @enumToInt(enum_Foo.A); - \\pub const FooB = @enumToInt(enum_Foo.B); - \\pub const FooC = @enumToInt(enum_Foo.C); \\pub const enum_Foo = extern enum(c_int) { \\ A, \\ B, \\ C, \\ _, \\}; + \\pub const FooA = @enumToInt(enum_Foo.A); + \\pub const FooB = @enumToInt(enum_Foo.B); + \\pub const FooC = @enumToInt(enum_Foo.C); \\pub const SomeTypedef = c_int; \\pub export fn and_or_non_bool(arg_a: c_int, arg_b: f32, arg_c: ?*c_void) c_int { \\ var a = arg_a; @@ -2147,6 +2274,8 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ B, \\ _, \\}; + \\pub const BarA = @enumToInt(enum_Bar.A); + \\pub const BarB = @enumToInt(enum_Bar.B); \\pub extern fn func(a: [*c]struct_Foo, b: [*c][*c]enum_Bar) void; , \\pub const Foo = struct_Foo; @@ -2413,6 +2542,9 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ C, \\ _, \\}; + \\pub const A = @enumToInt(enum_SomeEnum.A); + \\pub const B = @enumToInt(enum_SomeEnum.B); + \\pub const C = @enumToInt(enum_SomeEnum.C); \\pub export fn if_none_bool(arg_a: c_int, arg_b: f32, arg_c: ?*c_void, arg_d: enum_SomeEnum) c_int { \\ var a = arg_a; \\ var b = arg_b; @@ -2872,15 +3004,15 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ Foo1, \\}; , &[_][]const u8{ - \\pub const FooA = @enumToInt(enum_Foo.A); - \\pub const FooB = @enumToInt(enum_Foo.B); - \\pub const Foo1 = @enumToInt(enum_Foo.@"1"); \\pub const enum_Foo = extern enum(c_int) { \\ A = 2, \\ B = 5, \\ @"1" = 6, \\ _, \\}; + \\pub const FooA = @enumToInt(enum_Foo.A); + \\pub const FooB = @enumToInt(enum_Foo.B); + \\pub const Foo1 = @enumToInt(enum_Foo.@"1"); , \\pub const Foo = enum_Foo; }); From 3717bedb4e3198fe2ded167b41f9b0441e817b9c Mon Sep 17 00:00:00 2001 From: Evan Haas Date: Sat, 13 Feb 2021 16:24:02 -0800 Subject: [PATCH 097/173] translate-c: Add test for using correct block label for StmtExpr The previous iteration of translate-c used an incorrect block label in the break statement for a translated C statement expression. This adds a test to ensure the correct label is used in the new intermediate AST version of translate-c. --- test/run_translated_c.zig | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/test/run_translated_c.zig b/test/run_translated_c.zig index b8af201e36..04095d3b71 100644 --- a/test/run_translated_c.zig +++ b/test/run_translated_c.zig @@ -922,4 +922,13 @@ pub fn addCases(cases: *tests.RunTranslatedCContext) void { \\ return 0; \\} , ""); + + cases.add("Use correct break label for statement expression in nested scope", + \\#include + \\int main(void) { + \\ int x = ({1, ({2; 3;});}); + \\ if (x != 3) abort(); + \\ return 0; + \\} + , ""); } From 7ca53bdfaab59e61c38d0bedb6b16739904f7519 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Wed, 17 Feb 2021 22:11:26 +0200 Subject: [PATCH 098/173] translate-c: improve switch translation --- src/clang.zig | 6 +- src/translate_c.zig | 363 ++++++++++++++++++++-------------------- src/translate_c/ast.zig | 111 ++++++++---- test/translate_c.zig | 76 ++++----- 4 files changed, 295 insertions(+), 261 deletions(-) diff --git a/src/clang.zig b/src/clang.zig index fbb955205b..5adb858b90 100644 --- a/src/clang.zig +++ b/src/clang.zig @@ -273,12 +273,12 @@ pub const CompoundAssignOperator = opaque { pub const CompoundStmt = opaque { pub const body_begin = ZigClangCompoundStmt_body_begin; - extern fn ZigClangCompoundStmt_body_begin(*const CompoundStmt) const_body_iterator; + extern fn ZigClangCompoundStmt_body_begin(*const CompoundStmt) ConstBodyIterator; pub const body_end = ZigClangCompoundStmt_body_end; - extern fn ZigClangCompoundStmt_body_end(*const CompoundStmt) const_body_iterator; + extern fn ZigClangCompoundStmt_body_end(*const CompoundStmt) ConstBodyIterator; - pub const const_body_iterator = [*]const *Stmt; + pub const ConstBodyIterator = [*]const *Stmt; }; pub const ConditionalOperator = opaque {}; diff --git a/src/translate_c.zig b/src/translate_c.zig index 5ac60bffae..47c601677e 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -31,7 +31,6 @@ const Scope = struct { parent: ?*Scope, const Id = enum { - @"switch", block, root, condition, @@ -39,17 +38,6 @@ const Scope = struct { do_loop, }; - /// Represents an in-progress Node.Switch. This struct is stack-allocated. - /// When it is deinitialized, it produces an Node.Switch which is allocated - /// into the main arena. - const Switch = struct { - base: Scope, - pending_block: Block, - cases: std.ArrayList(Node), - switch_label: ?[]const u8, - default_label: ?[]const u8, - }; - /// Used for the scope of condition expressions, for example `if (cond)`. /// The block is lazily initialised because it is only needed for rare /// cases of comma operators being used. @@ -230,7 +218,7 @@ const Scope = struct { return switch (scope.id) { .root => return name, .block => @fieldParentPtr(Block, "base", scope).getAlias(name), - .@"switch", .loop, .do_loop, .condition => scope.parent.?.getAlias(name), + .loop, .do_loop, .condition => scope.parent.?.getAlias(name), }; } @@ -238,7 +226,7 @@ const Scope = struct { return switch (scope.id) { .root => @fieldParentPtr(Root, "base", scope).contains(name), .block => @fieldParentPtr(Block, "base", scope).contains(name), - .@"switch", .loop, .do_loop, .condition => scope.parent.?.contains(name), + .loop, .do_loop, .condition => scope.parent.?.contains(name), }; } @@ -247,24 +235,12 @@ const Scope = struct { while (true) { switch (scope.id) { .root => unreachable, - .@"switch" => return scope, .loop, .do_loop => return scope, else => scope = scope.parent.?, } } } - fn getSwitch(inner: *Scope) *Scope.Switch { - var scope = inner; - while (true) { - switch (scope.id) { - .root => unreachable, - .@"switch" => return @fieldParentPtr(Switch, "base", scope), - else => scope = scope.parent.?, - } - } - } - /// Appends a node to the first block scope if inside a function, or to the root tree if not. fn appendNode(inner: *Scope, node: Node) !void { var scope = inner; @@ -570,7 +546,7 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void { } const casted_body = @ptrCast(*const clang.CompoundStmt, body_stmt); - transCompoundStmtInline(c, &block_scope.base, casted_body, &block_scope) catch |err| switch (err) { + transCompoundStmtInline(c, casted_body, &block_scope) catch |err| switch (err) { error.OutOfMemory => |e| return e, error.UnsupportedTranslation, error.UnsupportedType, @@ -583,24 +559,10 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void { }; // add return statement if the function didn't have one blk: { - if (fn_ty.getNoReturnAttr()) break :blk; - if (isCVoid(return_qt)) break :blk; - - if (block_scope.statements.items.len > 0) { - var last = block_scope.statements.items[block_scope.statements.items.len - 1]; - while (true) { - switch (last.tag()) { - .block => { - const block = last.castTag(.block).?; - if (block.data.stmts.len == 0) break; - - last = block.data.stmts[block.data.stmts.len - 1]; - }, - // no extra return needed - .@"return", .return_void => break :blk, - else => break, - } - } + const maybe_body = try block_scope.complete(c); + if (fn_ty.getNoReturnAttr() or isCVoid(return_qt) or maybe_body.isNoreturn(false)) { + proto_node.data.body = maybe_body; + break :blk; } const rhs = transZeroInitExpr(c, scope, fn_decl_loc, return_qt.getTypePtr()) catch |err| switch (err) { @@ -616,9 +578,9 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void { }; const ret = try Tag.@"return".create(c.arena, rhs); try block_scope.statements.append(ret); + proto_node.data.body = try block_scope.complete(c); } - proto_node.data.body = try block_scope.complete(c); return addTopLevelDecl(c, fn_name, Node.initPayload(&proto_node.base)); } @@ -1079,7 +1041,7 @@ fn transStmt( return Tag.empty_block.init(); }, .ContinueStmtClass => return Tag.@"continue".init(), - .BreakStmtClass => return transBreak(c, scope), + .BreakStmtClass => return Tag.@"break".init(), .ForStmtClass => return transForLoop(c, scope, @ptrCast(*const clang.ForStmt, stmt)), .FloatingLiteralClass => return transFloatingLiteral(c, scope, @ptrCast(*const clang.FloatingLiteral, stmt), result_used), .ConditionalOperatorClass => { @@ -1089,8 +1051,9 @@ fn transStmt( return transBinaryConditionalOperator(c, scope, @ptrCast(*const clang.BinaryConditionalOperator, stmt), result_used); }, .SwitchStmtClass => return transSwitch(c, scope, @ptrCast(*const clang.SwitchStmt, stmt)), - .CaseStmtClass => return transCase(c, scope, @ptrCast(*const clang.CaseStmt, stmt)), - .DefaultStmtClass => return transDefault(c, scope, @ptrCast(*const clang.DefaultStmt, stmt)), + .CaseStmtClass, .DefaultStmtClass => { + return fail(c, error.UnsupportedTranslation, stmt.getBeginLoc(), "TODO complex switch", .{}); + }, .ConstantExprClass => return transConstantExpr(c, scope, @ptrCast(*const clang.Expr, stmt), result_used), .PredefinedExprClass => return transPredefinedExpr(c, scope, @ptrCast(*const clang.PredefinedExpr, stmt), result_used), .CharacterLiteralClass => return transCharLiteral(c, scope, @ptrCast(*const clang.CharacterLiteral, stmt), result_used, .with_as), @@ -1107,13 +1070,7 @@ fn transStmt( return maybeSuppressResult(c, scope, result_used, expr); }, else => { - return fail( - c, - error.UnsupportedTranslation, - stmt.getBeginLoc(), - "TODO implement translation of stmt class {s}", - .{@tagName(sc)}, - ); + return fail(c, error.UnsupportedTranslation, stmt.getBeginLoc(), "TODO implement translation of stmt class {s}", .{@tagName(sc)}); }, } } @@ -1255,14 +1212,13 @@ fn transBinaryOperator( fn transCompoundStmtInline( c: *Context, - parent_scope: *Scope, stmt: *const clang.CompoundStmt, block: *Scope.Block, ) TransError!void { var it = stmt.body_begin(); const end_it = stmt.body_end(); while (it != end_it) : (it += 1) { - const result = try transStmt(c, parent_scope, it[0], .unused); + const result = try transStmt(c, &block.base, it[0], .unused); if (result.tag() == .declaration) continue; try block.statements.append(result); } @@ -1271,7 +1227,7 @@ fn transCompoundStmtInline( fn transCompoundStmt(c: *Context, scope: *Scope, stmt: *const clang.CompoundStmt) TransError!Node { var block_scope = try Scope.Block.init(c, scope, false); defer block_scope.deinit(); - try transCompoundStmtInline(c, &block_scope.base, stmt, &block_scope); + try transCompoundStmtInline(c, stmt, &block_scope); return try block_scope.complete(c); } @@ -2162,7 +2118,7 @@ fn transDoWhileLoop( defer cond_scope.deinit(); const cond = try transBoolExpr(c, &cond_scope.base, @ptrCast(*const clang.Expr, stmt.getCond()), .used); const if_not_break = switch (cond.tag()) { - .false_literal => try Tag.@"break".create(c.arena, null), + .false_literal => Tag.@"break".init(), .true_literal => { const body_node = try transStmt(c, scope, stmt.getBody(), .unused); return Tag.while_true.create(c.arena, body_node); @@ -2263,133 +2219,189 @@ fn transSwitch( }; defer cond_scope.deinit(); const switch_expr = try transExpr(c, &cond_scope.base, stmt.getCond(), .used); - const switch_node = try c.arena.create(ast.Payload.Switch); - switch_node.* = .{ - .base = .{ .tag = .@"switch" }, - .data = .{ - .cond = switch_expr, - .cases = undefined, // set later - }, - }; - var switch_scope = Scope.Switch{ - .base = .{ - .id = .@"switch", - .parent = scope, - }, - .cases = std.ArrayList(Node).init(c.gpa), - .pending_block = undefined, - .default_label = null, - .switch_label = null, - }; - defer switch_scope.cases.deinit(); + var cases = std.ArrayList(Node).init(c.gpa); + defer cases.deinit(); + var has_default = false; - // tmp block that all statements will go before being picked up by a case or default - var block_scope = try Scope.Block.init(c, &switch_scope.base, false); - defer block_scope.deinit(); + const body = stmt.getBody(); + assert(body.getStmtClass() == .CompoundStmtClass); + const compound_stmt = @ptrCast(*const clang.CompoundStmt, body); + var it = compound_stmt.body_begin(); + const end_it = compound_stmt.body_end(); + // Iterate over switch body and collect all cases. + // Fallthrough is handled by duplicating statements. + while (it != end_it) : (it += 1) { + switch (it[0].getStmtClass()) { + .CaseStmtClass => { + var items = std.ArrayList(Node).init(c.gpa); + defer items.deinit(); + const sub = try transCaseStmt(c, scope, it[0], &items); + const res = try transSwitchProngStmt(c, scope, sub, it, end_it); - // Note that we do not defer a deinit here; the switch_scope.pending_block field - // has its own memory management. This resource is freed inside `transCase` and - // then the final pending_block is freed at the bottom of this function with - // pending_block.deinit(). - switch_scope.pending_block = try Scope.Block.init(c, scope, false); - try switch_scope.pending_block.statements.append(Node.initPayload(&switch_node.base)); + if (items.items.len == 0) { + has_default = true; + const switch_else = try Tag.switch_else.create(c.arena, res); + try cases.append(switch_else); + } else { + const switch_prong = try Tag.switch_prong.create(c.arena, .{ + .cases = try c.arena.dupe(Node, items.items), + .cond = res, + }); + try cases.append(switch_prong); + } + }, + .DefaultStmtClass => { + has_default = true; + const default_stmt = @ptrCast(*const clang.DefaultStmt, it[0]); - const last = try transStmt(c, &block_scope.base, stmt.getBody(), .unused); + var sub = default_stmt.getSubStmt(); + while (true) switch (sub.getStmtClass()) { + .CaseStmtClass => sub = @ptrCast(*const clang.CaseStmt, sub).getSubStmt(), + .DefaultStmtClass => sub = @ptrCast(*const clang.DefaultStmt, sub).getSubStmt(), + else => break, + }; - // take all pending statements - const last_block_stmts = last.castTag(.block).?.data.stmts; - try switch_scope.pending_block.statements.ensureCapacity( - switch_scope.pending_block.statements.items.len + last_block_stmts.len, - ); - for (last_block_stmts) |n| { - switch_scope.pending_block.statements.appendAssumeCapacity(n); + const res = try transSwitchProngStmt(c, scope, sub, it, end_it); + + const switch_else = try Tag.switch_else.create(c.arena, res); + try cases.append(switch_else); + }, + else => {}, // collected in transSwitchProngStmt + } } - if (switch_scope.default_label == null) { - switch_scope.switch_label = try block_scope.makeMangledName(c, "switch"); - } - if (switch_scope.switch_label) |l| { - switch_scope.pending_block.label = l; - } - if (switch_scope.default_label == null) { - const else_prong = try Tag.switch_else.create( - c.arena, - try Tag.@"break".create(c.arena, switch_scope.switch_label.?), - ); - try switch_scope.cases.append(else_prong); + if (!has_default) { + const else_prong = try Tag.switch_else.create(c.arena, Tag.@"break".init()); + try cases.append(else_prong); } - switch_node.data.cases = try c.arena.dupe(Node, switch_scope.cases.items); - const result_node = try switch_scope.pending_block.complete(c); - switch_scope.pending_block.deinit(); - return result_node; -} - -fn transCase( - c: *Context, - scope: *Scope, - stmt: *const clang.CaseStmt, -) TransError!Node { - const block_scope = try scope.findBlockScope(c); - const switch_scope = scope.getSwitch(); - const label = try block_scope.makeMangledName(c, "case"); - - const expr = if (stmt.getRHS()) |rhs| blk: { - const lhs_node = try transExpr(c, scope, stmt.getLHS(), .used); - const rhs_node = try transExpr(c, scope, rhs, .used); - - break :blk try Tag.ellipsis3.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node }); - } else - try transExpr(c, scope, stmt.getLHS(), .used); - - const switch_prong = try Tag.switch_prong.create(c.arena, .{ - .lhs = expr, - .rhs = try Tag.@"break".create(c.arena, label), + return Tag.@"switch".create(c.arena, .{ + .cond = switch_expr, + .cases = try c.arena.dupe(Node, cases.items), }); - try switch_scope.cases.append(switch_prong); - - switch_scope.pending_block.label = label; - - // take all pending statements - try switch_scope.pending_block.statements.appendSlice(block_scope.statements.items); - block_scope.statements.shrinkAndFree(0); - - const pending_node = try switch_scope.pending_block.complete(c); - switch_scope.pending_block.deinit(); - switch_scope.pending_block = try Scope.Block.init(c, scope, false); - - try switch_scope.pending_block.statements.append(pending_node); - - return transStmt(c, scope, stmt.getSubStmt(), .unused); } -fn transDefault( +/// Collects all items for this case, returns the first statement after the labels. +/// If items ends up empty, the prong should be translated as an else. +fn transCaseStmt(c: *Context, scope: *Scope, stmt: *const clang.Stmt, items: *std.ArrayList(Node)) TransError!*const clang.Stmt { + var sub = stmt; + var seen_default = false; + while (true) { + switch (sub.getStmtClass()) { + .DefaultStmtClass => { + seen_default = true; + items.items.len = 0; + const default_stmt = @ptrCast(*const clang.DefaultStmt, sub); + sub = default_stmt.getSubStmt(); + }, + .CaseStmtClass => { + const case_stmt = @ptrCast(*const clang.CaseStmt, sub); + + if (seen_default) { + items.items.len = 0; + sub = case_stmt.getSubStmt(); + continue; + } + + const expr = if (case_stmt.getRHS()) |rhs| blk: { + const lhs_node = try transExprCoercing(c, scope, case_stmt.getLHS(), .used); + const rhs_node = try transExprCoercing(c, scope, rhs, .used); + + break :blk try Tag.ellipsis3.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node }); + } else + try transExprCoercing(c, scope, case_stmt.getLHS(), .used); + + try items.append(expr); + sub = case_stmt.getSubStmt(); + }, + else => return sub, + } + } +} + +/// Collects all statements seen by this case into a block. +/// Avoids creating a block if the first statement is a break or return. +fn transSwitchProngStmt( c: *Context, scope: *Scope, - stmt: *const clang.DefaultStmt, + stmt: *const clang.Stmt, + parent_it: clang.CompoundStmt.ConstBodyIterator, + parent_end_it: clang.CompoundStmt.ConstBodyIterator, ) TransError!Node { - const block_scope = try scope.findBlockScope(c); - const switch_scope = scope.getSwitch(); - switch_scope.default_label = try block_scope.makeMangledName(c, "default"); + switch (stmt.getStmtClass()) { + .BreakStmtClass => return Tag.empty_block.init(), + .ReturnStmtClass => return transStmt(c, scope, stmt, .unused), + .CaseStmtClass, .DefaultStmtClass => unreachable, + else => { + var block_scope = try Scope.Block.init(c, scope, false); + defer block_scope.deinit(); - const else_prong = try Tag.switch_else.create( - c.arena, - try Tag.@"break".create(c.arena, switch_scope.default_label.?), - ); - try switch_scope.cases.append(else_prong); - switch_scope.pending_block.label = switch_scope.default_label.?; + // we do not need to translate `stmt` since it is the first stmt of `parent_it` + try transSwitchProngStmtInline(c, &block_scope, parent_it, parent_end_it); + return try block_scope.complete(c); + }, + } +} - // take all pending statements - try switch_scope.pending_block.statements.appendSlice(block_scope.statements.items); - block_scope.statements.shrinkAndFree(0); +/// Collects all statements seen by this case into a block. +fn transSwitchProngStmtInline( + c: *Context, + block: *Scope.Block, + start_it: clang.CompoundStmt.ConstBodyIterator, + end_it: clang.CompoundStmt.ConstBodyIterator, +) TransError!void { + var it = start_it; + while (it != end_it) : (it += 1) { + switch (it[0].getStmtClass()) { + .ReturnStmtClass => { + const result = try transStmt(c, &block.base, it[0], .unused); + try block.statements.append(result); + return; + }, + .BreakStmtClass => return, + .CaseStmtClass => { + var sub = @ptrCast(*const clang.CaseStmt, it[0]).getSubStmt(); + while (true) switch (sub.getStmtClass()) { + .CaseStmtClass => sub = @ptrCast(*const clang.CaseStmt, sub).getSubStmt(), + .DefaultStmtClass => sub = @ptrCast(*const clang.DefaultStmt, sub).getSubStmt(), + else => break, + }; + const result = try transStmt(c, &block.base, sub, .unused); + assert(result.tag() != .declaration); + try block.statements.append(result); + }, + .DefaultStmtClass => { + var sub = @ptrCast(*const clang.DefaultStmt, it[0]).getSubStmt(); + while (true) switch (sub.getStmtClass()) { + .CaseStmtClass => sub = @ptrCast(*const clang.CaseStmt, sub).getSubStmt(), + .DefaultStmtClass => sub = @ptrCast(*const clang.DefaultStmt, sub).getSubStmt(), + else => break, + }; + const result = try transStmt(c, &block.base, sub, .unused); + assert(result.tag() != .declaration); + try block.statements.append(result); + }, + .CompoundStmtClass => { + const compound_stmt = @ptrCast(*const clang.CompoundStmt, it[0]); + var child_block = try Scope.Block.init(c, &block.base, false); + defer child_block.deinit(); - const pending_node = try switch_scope.pending_block.complete(c); - switch_scope.pending_block.deinit(); - switch_scope.pending_block = try Scope.Block.init(c, scope, false); - try switch_scope.pending_block.statements.append(pending_node); - - return transStmt(c, scope, stmt.getSubStmt(), .unused); + try transCompoundStmtInline(c, compound_stmt, &child_block); + const result = try child_block.complete(c); + try block.statements.append(result); + if (result.isNoreturn(true)) { + return; + } + }, + else => { + const result = try transStmt(c, &block.base, it[0], .unused); + if (result.tag() == .declaration) continue; + try block.statements.append(result); + }, + } + } + return; } fn transConstantExpr(c: *Context, scope: *Scope, expr: *const clang.Expr, used: ResultUsed) TransError!Node { @@ -3025,19 +3037,6 @@ fn transCPtrCast( } } -fn transBreak(c: *Context, scope: *Scope) TransError!Node { - const break_scope = scope.getBreakableScope(); - const label_text: ?[]const u8 = if (break_scope.id == .@"switch") blk: { - const swtch = @fieldParentPtr(Scope.Switch, "base", break_scope); - const block_scope = try scope.findBlockScope(c); - swtch.switch_label = try block_scope.makeMangledName(c, "switch"); - break :blk swtch.switch_label; - } else - null; - - return Tag.@"break".create(c.arena, label_text); -} - fn transFloatingLiteral(c: *Context, scope: *Scope, stmt: *const clang.FloatingLiteral, used: ResultUsed) TransError!Node { // TODO use something more accurate var dbl = stmt.getValueAsApproximateDouble(); diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 25cbe1bf3f..2306d1c36f 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -30,6 +30,7 @@ pub const Node = extern union { noreturn_type, @"anytype", @"continue", + @"break", /// pub usingnamespace @import("std").c.builtins; usingnamespace_builtins, // After this, the tag requires a payload. @@ -48,9 +49,8 @@ pub const Node = extern union { @"switch", /// else => operand, switch_else, - /// lhs => rhs, + /// items => body, switch_prong, - @"break", break_val, @"return", field_access, @@ -219,6 +219,7 @@ pub const Node = extern union { .noreturn_type, .@"anytype", .@"continue", + .@"break", => @compileError("Type Tag " ++ @tagName(t) ++ " has no payload"), .std_mem_zeroes, @@ -294,7 +295,6 @@ pub const Node = extern union { .int_to_ptr, .array_cat, .ellipsis3, - .switch_prong, .assign, .align_cast, .array_access, @@ -312,8 +312,7 @@ pub const Node = extern union { => Payload.Value, .@"if" => Payload.If, .@"while" => Payload.While, - .@"switch", .array_init => Payload.Switch, - .@"break" => Payload.Break, + .@"switch", .array_init,.switch_prong => Payload.Switch, .break_val => Payload.BreakVal, .call => Payload.Call, .var_decl => Payload.VarDecl, @@ -377,6 +376,37 @@ pub const Node = extern union { std.debug.assert(@enumToInt(payload.tag) >= Tag.no_payload_count); return .{ .ptr_otherwise = payload }; } + + pub fn isNoreturn(node: Node, break_counts: bool) bool { + switch (node.tag()) { + .block => { + const block_node = node.castTag(.block).?; + if (block_node.data.stmts.len == 0) return false; + + const last = block_node.data.stmts[block_node.data.stmts.len - 1]; + return last.isNoreturn(break_counts); + }, + .@"switch" => { + const switch_node = node.castTag(.@"switch").?; + + for (switch_node.data.cases) |case| { + const body = if (case.castTag(.switch_else)) |some| + some.data + else if (case.castTag(.switch_prong)) |some| + some.data.cond + else unreachable; + + if (!body.isNoreturn(break_counts)) return false; + } + return true; + }, + .@"return", .return_void => return true, + .break_val, .@"break" => if (break_counts) return true, + else => {}, + } + return false; + } + }; pub const Payload = struct { @@ -434,11 +464,6 @@ pub const Payload = struct { }, }; - pub const Break = struct { - base: Payload, - data: ?[]const u8, - }; - pub const BreakVal = struct { base: Payload, data: struct { @@ -855,22 +880,14 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { .rhs = undefined, }, }), - .@"break" => { - const payload = node.castTag(.@"break").?.data; - const tok = try c.addToken(.keyword_break, "break"); - const break_label = if (payload) |some| blk: { - _ = try c.addToken(.colon, ":"); - break :blk try c.addIdentifier(some); - } else 0; - return c.addNode(.{ - .tag = .@"break", - .main_token = tok, - .data = .{ - .lhs = break_label, - .rhs = 0, - }, - }); - }, + .@"break" => return c.addNode(.{ + .tag = .@"break", + .main_token = try c.addToken(.keyword_break, "break"), + .data = .{ + .lhs = 0, + .rhs = 0, + }, + }), .break_val => { const payload = node.castTag(.break_val).?.data; const tok = try c.addToken(.keyword_break, "break"); @@ -1447,15 +1464,37 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { }, .switch_prong => { const payload = node.castTag(.switch_prong).?.data; - const item = try renderNode(c, payload.lhs); - return c.addNode(.{ - .tag = .switch_case_one, - .main_token = try c.addToken(.equal_angle_bracket_right, "=>"), - .data = .{ - .lhs = item, - .rhs = try renderNode(c, payload.rhs), - }, - }); + var items = try c.gpa.alloc(NodeIndex, std.math.max(payload.cases.len, 1)); + defer c.gpa.free(items); + items[0] = 0; + for (payload.cases) |item, i| { + if (i != 0) _ = try c.addToken(.comma, ","); + items[i] = try renderNode(c, item); + } + _ = try c.addToken(.r_brace, "}"); + if (items.len < 2) { + return c.addNode(.{ + .tag = .switch_case_one, + .main_token = try c.addToken(.equal_angle_bracket_right, "=>"), + .data = .{ + .lhs = items[0], + .rhs = try renderNode(c, payload.cond), + }, + }); + } else { + const span = try c.listToSpan(items); + return c.addNode(.{ + .tag = .switch_case, + .main_token = try c.addToken(.equal_angle_bracket_right, "=>"), + .data = .{ + .lhs = try c.addExtra(NodeSubRange{ + .start = span.start, + .end = span.end, + }), + .rhs = try renderNode(c, payload.cond), + }, + }); + } }, .opaque_literal => { const opaque_tok = try c.addToken(.keyword_opaque, "opaque"); @@ -1870,7 +1909,7 @@ fn addSemicolonIfNeeded(c: *Context, node: Node) !void { fn addSemicolonIfNotBlock(c: *Context, node: Node) !void { switch (node.tag()) { - .block, .empty_block, .block_single, => {}, + .block, .empty_block, .block_single => {}, .@"if" => { const payload = node.castTag(.@"if").?.data; if (payload.@"else") |some| diff --git a/test/translate_c.zig b/test/translate_c.zig index 26d35a4c2e..a2d1bf417e 100644 --- a/test/translate_c.zig +++ b/test/translate_c.zig @@ -276,27 +276,10 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ } \\} , &[_][]const u8{ // TODO properly translate this - \\pub export fn main() c_int { - \\ var i: c_int = 2; - \\ @"switch": { - \\ case_1: { - \\ case: { - \\ switch (i) { - \\ @as(c_int, 0) => break :case, - \\ @as(c_int, 2) => break :case_1, - \\ else => break :@"switch", - \\ } - \\ } - \\ } - \\ { - \\ { - \\ i += @as(c_int, 2); - \\ } - \\ i += @as(c_int, 1); - \\ } - \\ } - \\ return 0; - \\} + \\source.h:5:13: warning: TODO complex switch + , + \\source.h:1:5: warning: unable to translate function, demoted to extern + \\pub extern fn main() c_int; }); cases.add("correct semicolon after infixop", @@ -2013,34 +1996,47 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ default: \\ res = 3 * i; \\ break; + \\ break; \\ case 4: + \\ case 5: + \\ res = 69; + \\ { \\ res = 5; + \\ return; + \\ } + \\ case 6: + \\ res = 1; + \\ return; \\ } \\} , &[_][]const u8{ \\pub export fn switch_fn(arg_i: c_int) void { \\ var i = arg_i; \\ var res: c_int = 0; - \\ @"switch": { - \\ case_2: { - \\ default: { - \\ case_1: { - \\ case: { - \\ switch (i) { - \\ @as(c_int, 0) => break :case, - \\ @as(c_int, 1)...@as(c_int, 3) => break :case_1, - \\ else => break :default, - \\ @as(c_int, 4) => break :case_2, - \\ } - \\ } - \\ res = 1; - \\ } - \\ res = 2; - \\ } + \\ switch (i) { + \\ @as(c_int, 0) => { + \\ res = 1; + \\ res = 2; \\ res = @as(c_int, 3) * i; - \\ break :@"switch"; - \\ } - \\ res = 5; + \\ }, + \\ @as(c_int, 1)...@as(c_int, 3) => { + \\ res = 2; + \\ res = @as(c_int, 3) * i; + \\ }, + \\ else => { + \\ res = @as(c_int, 3) * i; + \\ }, + \\ @as(c_int, 4), @as(c_int, 5) => { + \\ res = 69; + \\ { + \\ res = 5; + \\ return; + \\ } + \\ }, + \\ @as(c_int, 6) => { + \\ res = 1; + \\ return; + \\ }, \\ } \\} }); From c66481f9bcc5c08b92ccfd5d38d9d72be83479c0 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Wed, 17 Feb 2021 20:59:21 -0700 Subject: [PATCH 099/173] astgen: finish updating expressions to new mem layout Now all that is left is compile errors and whatever regressions this branch introduced. --- lib/std/zig/ast.zig | 7 +- lib/std/zig/render.zig | 6 +- src/Module.zig | 16 +- src/astgen.zig | 723 ++++++++++++++++++++++++----------------- src/zir.zig | 18 +- src/zir_sema.zig | 12 +- 6 files changed, 456 insertions(+), 326 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 18743c19d9..02bca79986 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -1528,8 +1528,9 @@ pub const Tree = struct { pub fn switchCaseOne(tree: Tree, node: Node.Index) full.SwitchCase { const data = &tree.nodes.items(.data)[node]; + const values: *[1]Node.Index = &data.lhs; return tree.fullSwitchCase(.{ - .values = if (data.lhs == 0) &.{} else @ptrCast([*]Node.Index, &data.lhs)[0..1], + .values = if (data.lhs == 0) values[0..0] else values[0..1], .arrow_token = tree.nodes.items(.main_token)[node], .target_expr = data.rhs, }); @@ -2532,8 +2533,8 @@ pub const Node = struct { @"defer", /// lhs catch rhs /// lhs catch |err| rhs - /// main_token is the catch - /// payload is determined by looking at the prev tokens before rhs. + /// main_token is the `catch` keyword. + /// payload is determined by looking at the next token after the `catch` keyword. @"catch", /// `lhs.a`. main_token is the dot. rhs is the identifier token index. field_access, diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 3b5765507b..f7608fe61a 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -416,9 +416,9 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac return renderToken(ais, tree, rbracket, space); // ] }, - .slice_open => try renderSlice(ais, tree, tree.sliceOpen(node), space), - .slice => try renderSlice(ais, tree, tree.slice(node), space), - .slice_sentinel => try renderSlice(ais, tree, tree.sliceSentinel(node), space), + .slice_open => return renderSlice(ais, tree, tree.sliceOpen(node), space), + .slice => return renderSlice(ais, tree, tree.slice(node), space), + .slice_sentinel => return renderSlice(ais, tree, tree.sliceSentinel(node), space), .deref => { try renderExpression(ais, tree, datas[node].lhs, .none); diff --git a/src/Module.zig b/src/Module.zig index 2071ff671c..19566dee43 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -1163,16 +1163,16 @@ fn astgenAndSemaFn( } assert(param_type_i == param_count); } - if (fn_proto.lib_name) |lib_name| blk: { + if (fn_proto.lib_name) |lib_name_token| blk: { // TODO call std.zig.parseStringLiteral - const lib_name_str = mem.trim(u8, tree.tokenSlice(lib_name), "\""); + const lib_name_str = mem.trim(u8, tree.tokenSlice(lib_name_token), "\""); log.debug("extern fn symbol expected in lib '{s}'", .{lib_name_str}); const target = mod.comp.getTarget(); if (target_util.is_libc_lib_name(target, lib_name_str)) { if (!mod.comp.bin_file.options.link_libc) { return mod.failTok( &fn_type_scope.base, - lib_name, + lib_name_token, "dependency on libc must be explicitly specified in the build command", .{}, ); @@ -1183,7 +1183,7 @@ fn astgenAndSemaFn( if (!mod.comp.bin_file.options.link_libcpp) { return mod.failTok( &fn_type_scope.base, - lib_name, + lib_name_token, "dependency on libc++ must be explicitly specified in the build command", .{}, ); @@ -1193,17 +1193,17 @@ fn astgenAndSemaFn( if (!target.isWasm() and !mod.comp.bin_file.options.pic) { return mod.failTok( &fn_type_scope.base, - lib_name, + lib_name_token, "dependency on dynamic library '{s}' requires enabling Position Independent Code. Fixed by `-l{s}` or `-fPIC`.", - .{ lib_name, lib_name }, + .{ lib_name_str, lib_name_str }, ); } mod.comp.stage1AddLinkLib(lib_name_str) catch |err| { return mod.failTok( &fn_type_scope.base, - lib_name, + lib_name_token, "unable to add link lib '{s}': {s}", - .{ lib_name, @errorName(err) }, + .{ lib_name_str, @errorName(err) }, ); }; } diff --git a/src/astgen.zig b/src/astgen.zig index 56d1497f63..aef1b21a6c 100644 --- a/src/astgen.zig +++ b/src/astgen.zig @@ -327,6 +327,15 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In .while_cont => return whileExpr(mod, scope, tree.whileCont(node)), .@"while" => return whileExpr(mod, scope, rl, tree.whileFull(node)), + .for_simple => return forExpr(mod, scope, rl, tree.forSimple(node)), + .@"for" => return forExpr(mod, scope, rl, tree.forFull(node)), + + // TODO handling these separately would actually be simpler & have fewer branches + // once we have a ZIR instruction for each of these 3 cases. + .slice_open => return sliceExpr(mod, scope, rl, tree.sliceOpen(node)), + .slice => return sliceExpr(mod, scope, rl, tree.slice(node)), + .slice_sentinel => return sliceExpr(mod, scope, rl, tree.sliceSentinel(node)), + .deref => { const lhs = try expr(mod, scope, .none, node_datas[node].lhs); const src = token_starts[main_tokens[node]]; @@ -402,51 +411,122 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In const statements = tree.extra_data[node_datas[node].lhs..node_datas[node].rhs]; return blockExpr(mod, scope, rl, node, statements); }, - - .@"break" => return rvalue(mod, scope, rl, try breakExpr(mod, scope, node)), - .@"continue" => return rvalue(mod, scope, rl, try continueExpr(mod, scope, node)), - .grouped_expression => return expr(mod, scope, rl, node.expr), - .array_type => return rvalue(mod, scope, rl, try arrayType(mod, scope, node)), - .array_type_sentinel => return rvalue(mod, scope, rl, try arrayTypeSentinel(mod, scope, node)), - .enum_literal => return rvalue(mod, scope, rl, try enumLiteral(mod, scope, node)), - .char_literal => return rvalue(mod, scope, rl, try charLiteral(mod, scope, node)), - .slice_type => return rvalue(mod, scope, rl, try sliceType(mod, scope, node)), - .error_union => return rvalue(mod, scope, rl, try typeInixOp(mod, scope, node, .error_union_type)), - .merge_error_sets => return rvalue(mod, scope, rl, try typeInixOp(mod, scope, node, .merge_error_sets)), - .anyframe_type => return rvalue(mod, scope, rl, try anyFrameType(mod, scope, node)), - .error_set_decl => return rvalue(mod, scope, rl, try errorSetDecl(mod, scope, node)), - .error_type => return rvalue(mod, scope, rl, try errorType(mod, scope, node)), - .@"for" => return forExpr(mod, scope, rl, node), + .enum_literal => { + const ident_token = main_tokens[node]; + const name = try mod.identifierTokenString(scope, ident_token); + const src = token_starts[ident_token]; + const result = try addZIRInst(mod, scope, src, zir.Inst.EnumLiteral, .{ .name = name }, .{}); + return rvalue(mod, scope, rl, result); + }, + .error_union => { + const error_set = try typeExpr(mod, scope, node_datas[node].lhs); + const payload = try typeExpr(mod, scope, node_datas[node].rhs); + const src = token_starts[main_tokens[node]]; + const result = try addZIRBinOp(mod, scope, src, .error_union_type, error_set, payload); + return rvalue(mod, scope, rl, result); + }, + .merge_error_sets => { + const lhs = try typeExpr(mod, scope, node_datas[node].lhs); + const rhs = try typeExpr(mod, scope, node_datas[node].rhs); + const src = token_starts[main_tokens[node]]; + const result = try addZIRBinOp(mod, scope, src, .merge_error_sets, lhs, rhs); + return rvalue(mod, scope, rl, result); + }, + .anyframe_literal => { + const main_token = main_tokens[node]; + const src = token_starts[main_token]; + const result = try addZIRInstConst(mod, scope, src, .{ + .ty = Type.initTag(.type), + .val = Value.initTag(.anyframe_type), + }); + return rvalue(mod, scope, rl, result); + }, + .anyframe_type => { + const src = token_starts[node_datas[node].lhs]; + const return_type = try typeExpr(mod, scope, node_datas[node].rhs); + const result = try addZIRUnOp(mod, scope, src, .anyframe_type, return_type); + return rvalue(mod, scope, rl, result); + }, + .@"catch" => { + const catch_token = main_tokens[node]; + const payload_token: ?TokenIndex = if (token_tags[catch_token + 1] == .pipe) + catch_token + 2 + else + null; + switch (rl) { + .ref => return orelseCatchExpr( + mod, + scope, + rl, + node_datas[node].lhs, + main_tokens[node], + .is_err_ptr, + .err_union_payload_unsafe_ptr, + .err_union_code_ptr, + node_datas[node].rhs, + payload_token, + ), + else => return orelseCatchExpr( + mod, + scope, + rl, + node_datas[node].lhs, + main_tokens[node], + .is_err, + .err_union_payload_unsafe, + .err_union_code, + node_datas[node].rhs, + payload_token, + ), + } + }, + .@"orelse" => switch (rl) { + .ref => return orelseCatchExpr( + mod, + scope, + rl, + node_datas[node].lhs, + main_tokens[node], + .is_null_ptr, + .optional_payload_unsafe_ptr, + undefined, + node_datas[node].rhs, + null, + ), + else => return orelseCatchExpr( + mod, + scope, + rl, + node_datas[node].lhs, + main_tokens[node], + .is_null, + .optional_payload_unsafe, + undefined, + node_datas[node].rhs, + null, + ), + }, + .@"break" => return breakExpr(mod, scope, rl, node), + .@"continue" => return continueExpr(mod, scope, rl, node), + .grouped_expression => return expr(mod, scope, rl, node_datas[node].lhs), + .array_type => return arrayType(mod, scope, rl, node), + .array_type_sentinel => return arrayTypeSentinel(mod, scope, rl, node), + .char_literal => return charLiteral(mod, scope, rl, node), + .error_set_decl => return errorSetDecl(mod, scope, rl, node), .array_access => return arrayAccess(mod, scope, rl, node), - .slice => return rvalue(mod, scope, rl, try sliceExpr(mod, scope, node)), - .@"catch" => return catchExpr(mod, scope, rl, node), - .@"comptime" => return comptimeKeyword(mod, scope, rl, node), - .@"orelse" => return orelseExpr(mod, scope, rl, node), - .@"switch" => return switchExpr(mod, scope, rl, node), - .ContainerDecl => return containerDecl(mod, scope, rl, node), + .@"comptime" => return comptimeExpr(mod, scope, rl, node_datas[node].lhs), + .@"switch", .switch_comma => return switchExpr(mod, scope, rl, node), .@"defer" => return mod.failNode(scope, node, "TODO implement astgen.expr for .defer", .{}), .@"await" => return mod.failNode(scope, node, "TODO implement astgen.expr for .await", .{}), .@"resume" => return mod.failNode(scope, node, "TODO implement astgen.expr for .resume", .{}), .@"try" => return mod.failNode(scope, node, "TODO implement astgen.expr for .Try", .{}), - .ArrayInitializer => return mod.failNode(scope, node, "TODO implement astgen.expr for .ArrayInitializer", .{}), - .ArrayInitializerDot => return mod.failNode(scope, node, "TODO implement astgen.expr for .ArrayInitializerDot", .{}), - .StructInitializer => return mod.failNode(scope, node, "TODO implement astgen.expr for .StructInitializer", .{}), - .StructInitializerDot => return mod.failNode(scope, node, "TODO implement astgen.expr for .StructInitializerDot", .{}), .@"suspend" => return mod.failNode(scope, node, "TODO implement astgen.expr for .suspend", .{}), .@"anytype" => return mod.failNode(scope, node, "TODO implement astgen.expr for .anytype", .{}), - .FnProto => return mod.failNode(scope, node, "TODO implement astgen.expr for .FnProto", .{}), .@"nosuspend" => return mod.failNode(scope, node, "TODO implement astgen.expr for .nosuspend", .{}), } } -fn comptimeKeyword(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.@"comptime") InnerError!*zir.Inst { - const tracy = trace(@src()); - defer tracy.end(); - - return comptimeExpr(mod, scope, rl, node.expr); -} - pub fn comptimeExpr( mod: *Module, parent_scope: *Scope, @@ -493,7 +573,12 @@ pub fn comptimeExpr( return &block.base; } -fn breakExpr(mod: *Module, parent_scope: *Scope, node: ast.Node.Index) InnerError!*zir.Inst { +fn breakExpr( + mod: *Module, + parent_scope: *Scope, + rl: ResultLoc, + node: ast.Node.Index, +) InnerError!*zir.Inst { const tree = parent_scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); @@ -524,9 +609,10 @@ fn breakExpr(mod: *Module, parent_scope: *Scope, node: ast.Node.Index) InnerErro }; if (rhs == 0) { - return addZirInstTag(mod, parent_scope, src, .break_void, .{ + const result = try addZirInstTag(mod, parent_scope, src, .break_void, .{ .block = block_inst, }); + return rvalue(mod, parent_scope, rl, result); } gen_zir.break_count += 1; const prev_rvalue_rl_count = gen_zir.rvalue_rl_count; @@ -547,7 +633,7 @@ fn breakExpr(mod: *Module, parent_scope: *Scope, node: ast.Node.Index) InnerErro try gen_zir.labeled_store_to_block_ptr_list.append(mod.gpa, store_inst); } } - return br; + return rvalue(mod, parent_scope, rl, br); }, .local_val => scope = scope.cast(Scope.LocalVal).?.parent, .local_ptr => scope = scope.cast(Scope.LocalPtr).?.parent, @@ -561,7 +647,12 @@ fn breakExpr(mod: *Module, parent_scope: *Scope, node: ast.Node.Index) InnerErro } } -fn continueExpr(mod: *Module, parent_scope: *Scope, node: ast.Node.Index) InnerError!*zir.Inst { +fn continueExpr( + mod: *Module, + parent_scope: *Scope, + rl: ResultLoc, + node: ast.Node.Index, +) InnerError!*zir.Inst { const tree = parent_scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); @@ -590,9 +681,10 @@ fn continueExpr(mod: *Module, parent_scope: *Scope, node: ast.Node.Index) InnerE continue; } - return addZirInstTag(mod, parent_scope, src, .break_void, .{ + const result = try addZirInstTag(mod, parent_scope, src, .break_void, .{ .block = continue_block, }); + return rvalue(mod, parent_scope, rl, result); }, .local_val => scope = scope.cast(Scope.LocalVal).?.parent, .local_ptr => scope = scope.cast(Scope.LocalPtr).?.parent, @@ -1083,12 +1175,6 @@ fn negation( return addZIRBinOp(mod, scope, src, op_inst_tag, lhs, rhs); } -fn sliceType(mod: *Module, scope: *Scope, node: *ast.Node.slice_type) InnerError!*zir.Inst { - const tree = scope.tree(); - const src = token_starts[node.op_token]; - return ptrSliceType(mod, scope, src, &node.ptr_info, node.rhs, .Slice); -} - fn ptrType(mod: *Module, scope: *Scope, node: *ast.Node.PtrType) InnerError!*zir.Inst { const tree = scope.tree(); const src = token_starts[node.op_token]; @@ -1146,70 +1232,54 @@ fn ptrSliceType(mod: *Module, scope: *Scope, src: usize, ptr_info: *ast.PtrInfo, return addZIRInst(mod, scope, src, zir.Inst.PtrType, .{ .child_type = child_type }, kw_args); } -fn arrayType(mod: *Module, scope: *Scope, node: *ast.Node.array_type) !*zir.Inst { +fn arrayType(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !*zir.Inst { const tree = scope.tree(); - const src = token_starts[node.op_token]; + const main_tokens = tree.nodes.items(.main_token); + const node_datas = tree.nodes.items(.data); + const src = token_starts[main_tokens[node]]; const usize_type = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.type), .val = Value.initTag(.usize_type), }); + const len_node = node_datas[node].lhs; + const elem_node = node_datas[node].rhs; + if (len_node == 0) { + const elem_type = try typeExpr(mod, scope, elem_node); + const result = try addZIRUnOp(mod, scope, src, .mut_slice_type, elem_type); + return rvalue(mod, scope, rl, result); + } else { + // TODO check for [_]T + const len = try expr(mod, scope, .{ .ty = usize_type }, len_node); + const elem_type = try typeExpr(mod, scope, elem_node); - // TODO check for [_]T - const len = try expr(mod, scope, .{ .ty = usize_type }, node.len_expr); - const elem_type = try typeExpr(mod, scope, node.rhs); - - return addZIRBinOp(mod, scope, src, .array_type, len, elem_type); + const result = try addZIRBinOp(mod, scope, src, .array_type, len, elem_type); + return rvalue(mod, scope, rl, result); + } } -fn arrayTypeSentinel(mod: *Module, scope: *Scope, node: *ast.Node.array_type_sentinel) !*zir.Inst { +fn arrayTypeSentinel(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !*zir.Inst { const tree = scope.tree(); - const src = token_starts[node.op_token]; + const main_tokens = tree.nodes.items(.main_token); + const len_node = node_datas[node].lhs; + const extra = tree.extraData(node_datas[node].rhs, ast.Node.ArrayTypeSentinel); + const src = token_starts[main_tokens[node]]; const usize_type = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.type), .val = Value.initTag(.usize_type), }); // TODO check for [_]T - const len = try expr(mod, scope, .{ .ty = usize_type }, node.len_expr); - const sentinel_uncasted = try expr(mod, scope, .none, node.sentinel); - const elem_type = try typeExpr(mod, scope, node.rhs); + const len = try expr(mod, scope, .{ .ty = usize_type }, len_node); + const sentinel_uncasted = try expr(mod, scope, .none, extra.sentinel); + const elem_type = try typeExpr(mod, scope, extra.elem_type); const sentinel = try addZIRBinOp(mod, scope, src, .as, elem_type, sentinel_uncasted); - return addZIRInst(mod, scope, src, zir.Inst.ArrayTypeSentinel, .{ + const result = try addZIRInst(mod, scope, src, zir.Inst.ArrayTypeSentinel, .{ .len = len, .sentinel = sentinel, .elem_type = elem_type, }, .{}); -} - -fn anyFrameType(mod: *Module, scope: *Scope, node: *ast.Node.anyframe_type) InnerError!*zir.Inst { - const tree = scope.tree(); - const src = token_starts[node.anyframe_token]; - if (node.result) |some| { - const return_type = try typeExpr(mod, scope, some.return_type); - return addZIRUnOp(mod, scope, src, .anyframe_type, return_type); - } else { - return addZIRInstConst(mod, scope, src, .{ - .ty = Type.initTag(.type), - .val = Value.initTag(.anyframe_type), - }); - } -} - -fn typeInixOp(mod: *Module, scope: *Scope, node: *ast.Node.SimpleInfixOp, op_inst_tag: zir.Inst.Tag) InnerError!*zir.Inst { - const tree = scope.tree(); - const src = token_starts[node.op_token]; - const error_set = try typeExpr(mod, scope, node.lhs); - const payload = try typeExpr(mod, scope, node.rhs); - return addZIRBinOp(mod, scope, src, op_inst_tag, error_set, payload); -} - -fn enumLiteral(mod: *Module, scope: *Scope, node: *ast.Node.enum_literal) !*zir.Inst { - const tree = scope.tree(); - const src = token_starts[node.name]; - const name = try mod.identifierTokenString(scope, node.name); - - return addZIRInst(mod, scope, src, zir.Inst.EnumLiteral, .{ .name = name }, .{}); + return rvalue(mod, scope, rl, result); } fn containerField( @@ -1394,85 +1464,50 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con } } -fn errorSetDecl(mod: *Module, scope: *Scope, node: *ast.Node.error_set_decl) InnerError!*zir.Inst { +fn errorSetDecl( + mod: *Module, + scope: *Scope, + rl: ResultLoc, + node: ast.Node.Index, +) InnerError!*zir.Inst { const tree = scope.tree(); - const src = token_starts[node.error_token]; - const decls = node.decls(); - const fields = try scope.arena().alloc([]const u8, decls.len); + const main_tokens = tree.nodes.items(.main_token); + const token_tags = tree.tokens.items(.tag); - for (decls) |decl, i| { - const tag = decl.castTag(.ErrorTag).?; - fields[i] = try mod.identifierTokenString(scope, tag.name_token); - } - - return addZIRInst(mod, scope, src, zir.Inst.ErrorSet, .{ .fields = fields }, .{}); -} - -fn errorType(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!*zir.Inst { - const tree = scope.tree(); - const src = token_starts[node.token]; - return addZIRInstConst(mod, scope, src, .{ - .ty = Type.initTag(.type), - .val = Value.initTag(.anyerror_type), - }); -} - -fn catchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.@"catch") InnerError!*zir.Inst { - switch (rl) { - .ref => return orelseCatchExpr( - mod, - scope, - rl, - node.lhs, - node.op_token, - .is_err_ptr, - .err_union_payload_unsafe_ptr, - .err_union_code_ptr, - node.rhs, - node.payload, - ), - else => return orelseCatchExpr( - mod, - scope, - rl, - node.lhs, - node.op_token, - .is_err, - .err_union_payload_unsafe, - .err_union_code, - node.rhs, - node.payload, - ), - } -} - -fn orelseExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.SimpleInfixOp) InnerError!*zir.Inst { - switch (rl) { - .ref => return orelseCatchExpr( - mod, - scope, - rl, - node.lhs, - node.op_token, - .is_null_ptr, - .optional_payload_unsafe_ptr, - undefined, - node.rhs, - null, - ), - else => return orelseCatchExpr( - mod, - scope, - rl, - node.lhs, - node.op_token, - .is_null, - .optional_payload_unsafe, - undefined, - node.rhs, - null, - ), + // Count how many fields there are. + const error_token = main_tokens[node]; + const count: usize = count: { + var tok_i = error_token + 2; + var count: usize = 0; + while (true) : (tok_i += 1) { + switch (token_tags[tok_i]) { + .doc_comment, .comma => {}, + .identifier => count += 1, + .r_paren => break :count count, + else => unreachable, + } + } else unreachable; // TODO should not need else unreachable here + }; + + const fields = try scope.arena().alloc([]const u8, count); + { + var tok_i = error_token + 2; + var field_i: usize = 0; + while (true) : (tok_i += 1) { + switch (token_tags[tok_i]) { + .doc_comment, .comma => {}, + .identifier => { + fields[field_i] = try mod.identifierTokenString(scope, tok_i); + field_i += 1; + }, + .r_paren => break, + else => unreachable, + } + } } + const src = token_starts[error_token]; + const result = try addZIRInst(mod, scope, src, zir.Inst.ErrorSet, .{ .fields = fields }, .{}); + return rvalue(mod, scope, rl, result); } fn orelseCatchExpr( @@ -1681,55 +1716,78 @@ pub fn field(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) I } } -fn arrayAccess(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.array_access) InnerError!*zir.Inst { +fn arrayAccess( + mod: *Module, + scope: *Scope, + rl: ResultLoc, + node: ast.Node.Index, +) InnerError!*zir.Inst { const tree = scope.tree(); - const src = token_starts[node.rtoken]; + const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); + const src = token_starts[main_tokens[node]]; const usize_type = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.type), .val = Value.initTag(.usize_type), }); const index_rl: ResultLoc = .{ .ty = usize_type }; - - if (rl == .ref) { - return addZirInstTag(mod, scope, src, .elem_ptr, .{ - .array = try expr(mod, scope, .ref, node.lhs), - .index = try expr(mod, scope, index_rl, node.index_expr), - }); + switch (rl) { + .ref => return addZirInstTag(mod, scope, src, .elem_ptr, .{ + .array = try expr(mod, scope, .ref, node_datas[node].lhs), + .index = try expr(mod, scope, index_rl, node_datas[node].rhs), + }), + else => return rvalue(mod, scope, rl, try addZirInstTag(mod, scope, src, .elem_val, .{ + .array = try expr(mod, scope, .none, node_datas[node].lhs), + .index = try expr(mod, scope, index_rl, node_datas[node].rhs), + })), } - return rvalue(mod, scope, rl, try addZirInstTag(mod, scope, src, .elem_val, .{ - .array = try expr(mod, scope, .none, node.lhs), - .index = try expr(mod, scope, index_rl, node.index_expr), - })); } -fn sliceExpr(mod: *Module, scope: *Scope, node: *ast.Node.slice) InnerError!*zir.Inst { +fn sliceExpr( + mod: *Module, + scope: *Scope, + rl: ResultLoc, + slice: ast.full.Slice, +) InnerError!*zir.Inst { const tree = scope.tree(); - const src = token_starts[node.rtoken]; + const token_starts = tree.tokens.items(.start); + const src = token_starts[slice.ast.lbracket]; const usize_type = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.type), .val = Value.initTag(.usize_type), }); - const array_ptr = try expr(mod, scope, .ref, node.lhs); - const start = try expr(mod, scope, .{ .ty = usize_type }, node.start); + const array_ptr = try expr(mod, scope, .ref, slice.ast.sliced); + const start = try expr(mod, scope, .{ .ty = usize_type }, slice.ast.start); - if (node.end == null and node.sentinel == null) { - return try addZIRBinOp(mod, scope, src, .slice_start, array_ptr, start); + if (slice.ast.sentinel == 0) { + if (slice.ast.end == 0) { + const result = try addZIRBinOp(mod, scope, src, .slice_start, array_ptr, start); + return rvalue(mod, scope, rl, result); + } else { + const end = try expr(mod, scope, .{ .ty = usize_type }, slice.ast.end); + // TODO a ZIR slice_open instruction + const result = try addZIRInst(mod, scope, src, zir.Inst.Slice, .{ + .array_ptr = array_ptr, + .start = start, + }, .{ .end = end }); + return rvalue(mod, scope, rl, result); + } } - const end = if (node.end) |end| try expr(mod, scope, .{ .ty = usize_type }, end) else null; - // we could get the child type here, but it is easier to just do it in semantic analysis. - const sentinel = if (node.sentinel) |sentinel| try expr(mod, scope, .none, sentinel) else null; - - return try addZIRInst( - mod, - scope, - src, - zir.Inst.Slice, - .{ .array_ptr = array_ptr, .start = start }, - .{ .end = end, .sentinel = sentinel }, - ); + const end = try expr(mod, scope, .{ .ty = usize_type }, slice.ast.end); + // TODO pass the proper result loc to this expression using a ZIR instruction + // "get the child element type for a slice target". + const sentinel = try expr(mod, scope, .none, slice.ast.sentinel); + const result = try addZIRInst(mod, scope, src, zir.Inst.Slice, .{ + .array_ptr = array_ptr, + .start = start, + }, .{ + .end = end, + .sentinel = sentinel, + }); + return rvalue(mod, scope, rl, result); } fn simpleBinOp( @@ -2070,7 +2128,6 @@ fn whileExpr( }; defer then_scope.instructions.deinit(mod.gpa); - // declare payload to the then_scope const then_sub_scope = &then_scope.base; loop_scope.break_count += 1; @@ -2101,7 +2158,7 @@ fn whileExpr( if (loop_scope.label) |some| { if (!some.used) { - return mod.fail(scope, token_starts[some.token], "unused while label", .{}); + return mod.fail(scope, token_starts[some.token], "unused while loop label", .{}); } } return finishThenElseBlock( @@ -2126,20 +2183,21 @@ fn forExpr( mod: *Module, scope: *Scope, rl: ResultLoc, - for_node: *ast.Node.@"for", + for_full: ast.full.While, ) InnerError!*zir.Inst { - if (for_node.label) |label| { - try checkLabelRedefinition(mod, scope, label); + if (for_full.label_token) |label_token| { + try checkLabelRedefinition(mod, scope, label_token); } - if (for_node.inline_token) |tok| - return mod.failTok(scope, tok, "TODO inline for", .{}); + if (for_full.inline_token) |inline_token| { + return mod.failTok(scope, inline_token, "TODO inline for", .{}); + } - // setup variables and constants + // Set up variables and constants. const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const for_src = token_starts[for_node.for_token]; + const for_src = token_starts[for_full.ast.while_token]; const index_ptr = blk: { const usize_type = try addZIRInstConst(mod, scope, for_src, .{ .ty = Type.initTag(.type), @@ -2154,8 +2212,8 @@ fn forExpr( _ = try addZIRBinOp(mod, scope, for_src, .store, index_ptr, zero); break :blk index_ptr; }; - const array_ptr = try expr(mod, scope, .ref, for_node.array_expr); - const cond_src = token_starts[for_node.array_expr.firstToken()]; + const array_ptr = try expr(mod, scope, .ref, for_full.ast.cond_expr); + const cond_src = token_starts[tree.firstToken(for_full.ast.cond_expr)]; const len = try addZIRUnOp(mod, scope, cond_src, .indexable_ptr_len, array_ptr); var loop_scope: Scope.GenZIR = .{ @@ -2217,15 +2275,15 @@ fn forExpr( }); loop_scope.break_block = for_block; loop_scope.continue_block = cond_block; - if (for_node.label) |some| { + if (for_full.label_token) |label_token| { loop_scope.label = @as(?Scope.GenZIR.Label, Scope.GenZIR.Label{ - .token = some, + .token = label_token, .block_inst = for_block, }); } // while body - const then_src = token_starts[for_node.body.lastToken()]; + const then_src = token_starts[tree.lastToken(for_full.ast.then_expr)]; var then_scope: Scope.GenZIR = .{ .parent = &cond_scope.base, .decl = cond_scope.decl, @@ -2237,23 +2295,27 @@ fn forExpr( var index_scope: Scope.LocalPtr = undefined; const then_sub_scope = blk: { - const payload = for_node.payload.castTag(.PointerIndexPayload).?; - const is_ptr = payload.ptr_token != null; - const value_name = tree.tokenSlice(payload.value_symbol.firstToken()); + const payload_token = for_full.payload_token.?; + const ident = if (token_tags[payload_token] == .asterisk) + payload_token + 1 + else + payload_token; + const is_ptr = ident != payload_token; + const value_name = tree.tokenSlice(ident); if (!mem.eql(u8, value_name, "_")) { - return mod.failNode(&then_scope.base, payload.value_symbol, "TODO implement for value payload", .{}); + return mod.failNode(&then_scope.base, ident, "TODO implement for loop value payload", .{}); } else if (is_ptr) { - return mod.failTok(&then_scope.base, payload.ptr_token.?, "pointer modifier invalid on discard", .{}); + return mod.failTok(&then_scope.base, payload_token, "pointer modifier invalid on discard", .{}); } - const index_symbol_node = payload.index_symbol orelse - break :blk &then_scope.base; - - const index_name = tree.tokenSlice(tree.firstToken(index_symbol_node)); - if (mem.eql(u8, index_name, "_")) { + const index_token = if (token_tags[ident + 1] == .comma) + ident + 2 + else break :blk &then_scope.base; + if (mem.eql(u8, tree.tokenSlice(index_token), "_")) { + return mod.failTok(&then_scope.base, index_token, "discard of index capture not allowed; omit it instead", .{}); } - // TODO make this const without an extra copy? + const index_name = try mod.identifierTokenString(&then_scope.base, index_token); index_scope = .{ .parent = &then_scope.base, .gen_zir = &then_scope, @@ -2264,7 +2326,7 @@ fn forExpr( }; loop_scope.break_count += 1; - const then_result = try expr(mod, then_sub_scope, loop_scope.break_result_loc, for_node.body); + const then_result = try expr(mod, then_sub_scope, loop_scope.break_result_loc, for_full.ast.then_expr); // else branch var else_scope: Scope.GenZIR = .{ @@ -2276,18 +2338,23 @@ fn forExpr( }; defer else_scope.instructions.deinit(mod.gpa); - var else_src: usize = undefined; - const else_result: ?*zir.Inst = if (for_node.@"else") |else_node| blk: { - else_src = token_starts[else_node.body.lastToken()]; + const else_node = for_full.ast.else_expr; + const else_info: struct { src: usize, result: ?*zir.Inst } = if (else_node != 0) blk: { loop_scope.break_count += 1; - break :blk try expr(mod, &else_scope.base, loop_scope.break_result_loc, else_node.body); - } else blk: { - else_src = token_starts[for_node.lastToken()]; - break :blk null; - }; + const sub_scope = &else_scope.base; + break :blk .{ + .src = token_starts[tree.lastToken(else_node)], + .result = try expr(mod, sub_scope, loop_scope.break_result_loc, else_node), + }; + } else + .{ + .src = token_starts[tree.lastToken(then_node)], + .result = null, + }; + if (loop_scope.label) |some| { if (!some.used) { - return mod.fail(scope, token_starts[some.token], "unused for label", .{}); + return mod.fail(scope, token_starts[some.token], "unused for loop label", .{}); } } return finishThenElseBlock( @@ -2300,41 +2367,46 @@ fn forExpr( &condbr.positionals.then_body, &condbr.positionals.else_body, then_src, - else_src, + else_info.src, then_result, - else_result, + else_info.result, for_block, cond_block, ); } -fn switchCaseUsesRef(node: *ast.Node.@"switch") bool { - for (node.cases()) |uncasted_case| { - const case = uncasted_case.castTag(.switch_case).?; - const uncasted_payload = case.payload orelse continue; - const payload = uncasted_payload.castTag(.PointerPayload).?; - if (payload.ptr_token) |_| return true; - } - return false; -} - -fn getRangeNode(node: *ast.Node) ?*ast.Node.SimpleInfixOp { - var cur = node; +fn getRangeNode( + node_tags: []const ast.Node.Tag, + node_datas: []const ast.Node.Data, + start_node: ast.Node.Index, +) ?ast.Node.Index { + var node = start_node; while (true) { - switch (cur.tag) { - .range => return @fieldParentPtr(ast.Node.SimpleInfixOp, "base", cur), - .grouped_expression => cur = @fieldParentPtr(ast.Node.grouped_expression, "base", cur).expr, + switch (node_tags[node]) { + .switch_range => return node, + .grouped_expression => node = node_datas[node].lhs, else => return null, } } } -fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node.@"switch") InnerError!*zir.Inst { +fn switchExpr( + mod: *Module, + scope: *Scope, + rl: ResultLoc, + switch_node: ast.Node.Index, +) InnerError!*zir.Inst { const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const switch_src = token_starts[switch_node.switch_token]; - const use_ref = switchCaseUsesRef(switch_node); + const token_tags = tree.tokens.items(.tag); + + const switch_token = main_tokens[switch_node]; + const target_node = datas[switch_node].lhs; + const extra = tree.extraData(datas[switch_node].rhs, ast.switch_node.SubRange); + const case_nodes = tree.extra_data[extra.start..extra.end]; + + const switch_src = token_starts[switch_token]; var block_scope: Scope.GenZIR = .{ .parent = scope, @@ -2349,18 +2421,26 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node var items = std.ArrayList(*zir.Inst).init(mod.gpa); defer items.deinit(); - // first we gather all the switch items and check else/'_' prongs + // First we gather all the switch items and check else/'_' prongs. var else_src: ?usize = null; var underscore_src: ?usize = null; var first_range: ?*zir.Inst = null; var simple_case_count: usize = 0; - for (switch_node.cases()) |uncasted_case| { - const case = uncasted_case.castTag(.switch_case).?; - const case_src = token_starts[case.firstToken()]; - assert(case.items_len != 0); - + var any_payload_is_ref = false; + for (case_nodes) |case_node| { + const case = switch (node_tags[case_node]) { + .switch_case_one => tree.switchCaseOne(case_node), + .switch_case => tree.switchCase(case_node), + else => unreachable, + }; + if (case.payload_token) |payload_token| { + if (token_tags[payload_token] == .asterisk) { + any_payload_is_ref = true; + } + } // Check for else/_ prong, those are handled last. - if (case.items_len == 1 and case.items()[0].tag == .switch_else) { + if (case.ast.values.len == 0) { + const case_src = token_starts[case.ast.arrow_token - 1]; if (else_src) |src| { const msg = msg: { const msg = try mod.errMsg( @@ -2377,9 +2457,11 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node } else_src = case_src; continue; - } else if (case.items_len == 1 and case.items()[0].tag == .identifier and - mem.eql(u8, tree.tokenSlice(case.items()[0].firstToken()), "_")) + } else if (case.ast.values.len == 1 and + node_tags[case.ast.values[0]] == .identifier and + mem.eql(u8, tree.tokenSlice(main_tokens[case.ast.values[0]]), "_")) { + const case_src = token_starts[case.ast.arrow_token - 1]; if (underscore_src) |src| { const msg = msg: { const msg = try mod.errMsg( @@ -2416,14 +2498,18 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node } } - if (case.items_len == 1 and getRangeNode(case.items()[0]) == null) simple_case_count += 1; + if (case.ast.values.len == 1 and + getRangeNode(node_tags, node_datas, case.ast.values[0]) == null) + { + simple_case_count += 1; + } - // generate all the switch items as comptime expressions - for (case.items()) |item| { - if (getRangeNode(item)) |range| { - const start = try comptimeExpr(mod, &block_scope.base, .none, range.lhs); - const end = try comptimeExpr(mod, &block_scope.base, .none, range.rhs); - const range_src = token_starts[range.op_token]; + // Generate all the switch items as comptime expressions. + for (case.ast.values) |item| { + if (getRangeNode(node_tags, node_datas, item)) |range| { + const start = try comptimeExpr(mod, &block_scope.base, .none, node_datas[range].lhs); + const end = try comptimeExpr(mod, &block_scope.base, .none, node_datas[range].rhs); + const range_src = token_starts[main_tokens[range]]; const range_inst = try addZIRBinOp(mod, &block_scope.base, range_src, .switch_range, start, end); try items.append(range_inst); } else { @@ -2438,21 +2524,25 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node if (underscore_src != null) special_prong = .underscore; var cases = try block_scope.arena.alloc(zir.Inst.SwitchBr.Case, simple_case_count); - const target_ptr = if (use_ref) try expr(mod, &block_scope.base, .ref, switch_node.expr) else null; - const target = if (target_ptr) |some| - try addZIRUnOp(mod, &block_scope.base, some.src, .deref, some) + const rl_and_tag: struct { rl: ResultLoc, tag: zir.Inst.Tag } = if (any_payload_is_ref) + .{ + .rl = .ref, + .tag = .switchbr_ref, + } else - try expr(mod, &block_scope.base, .none, switch_node.expr); - const switch_inst = try addZIRInst(mod, &block_scope.base, switch_src, zir.Inst.SwitchBr, .{ + .{ + .rl = .none, + .tag = .switchbr, + }; + const target = try expr(mod, &block_scope.base, rl_and_tag.rl, target_node); + const switch_inst = try addZirInstT(mod, &block_scope.base, switch_src, zir.Inst.SwitchBr, rl_and_tag.tag, .{ .target = target, .cases = cases, .items = try block_scope.arena.dupe(*zir.Inst, items.items), .else_body = undefined, // populated below - }, .{ .range = first_range, .special_prong = special_prong, }); - const block = try addZIRInstBlock(mod, scope, switch_src, .block, .{ .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items), }); @@ -2475,29 +2565,35 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node }; defer else_scope.instructions.deinit(mod.gpa); - // Now generate all but the special cases - var special_case: ?*ast.Node.switch_case = null; + // Now generate all but the special cases. + var special_case: ?ast.Node.Index = null; var items_index: usize = 0; var case_index: usize = 0; - for (switch_node.cases()) |uncasted_case| { - const case = uncasted_case.castTag(.switch_case).?; - const case_src = token_starts[case.firstToken()]; - // reset without freeing to reduce allocations. - case_scope.instructions.items.len = 0; + for (case_nodes) |case_node| { + const case = switch (node_tags[case_node]) { + .switch_case_one => tree.switchCaseOne(case_node), + .switch_case => tree.switchCase(case_node), + else => unreachable, + }; + const case_src = token_starts[main_tokens[case_node]]; + case_scope.instructions.shrinkRetainingCapacity(0); // Check for else/_ prong, those are handled last. - if (case.items_len == 1 and case.items()[0].tag == .switch_else) { + if (case.ast.values.len == 0) { special_case = case; continue; - } else if (case.items_len == 1 and case.items()[0].tag == .identifier and - mem.eql(u8, tree.tokenSlice(case.items()[0].firstToken()), "_")) + } else if (case.ast.values.len == 1 and + node_tags[case.ast.values[0]] == .identifier and + mem.eql(u8, tree.tokenSlice(main_tokens[case.ast.values[0]]), "_")) { special_case = case; continue; } // If this is a simple one item prong then it is handled by the switchbr. - if (case.items_len == 1 and getRangeNode(case.items()[0]) == null) { + if (case.ast.values.len == 1 and + getRangeNode(node_tags, node_datas, case.ast.values[0]) == null) + { const item = items.items[items_index]; items_index += 1; try switchCaseExpr(mod, &case_scope.base, block_scope.break_result_loc, block, case, target, target_ptr); @@ -2510,16 +2606,14 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node continue; } - // TODO if the case has few items and no ranges it might be better - // to just handle them as switch prongs. - // Check if the target matches any of the items. // 1, 2, 3..6 will result in // target == 1 or target == 2 or (target >= 3 and target <= 6) + // TODO handle multiple items as switch prongs rather than along with ranges. var any_ok: ?*zir.Inst = null; - for (case.items()) |item| { - if (getRangeNode(item)) |range| { - const range_src = token_starts[range.op_token]; + for (case.ast.values) |item| { + if (getRangeNode(node_tags, node_datas, item)) |range| { + const range_src = token_starts[main_tokens[range]]; const range_inst = items.items[items_index].castTag(.switch_range).?; items_index += 1; @@ -2580,7 +2674,7 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node // Not handling all possible cases is a compile error. _ = try addZIRNoOp(mod, &else_scope.base, switch_src, .unreachable_unsafe); } - switch_inst.castTag(.switchbr).?.positionals.else_body = .{ + switch_inst.positionals.else_body = .{ .instructions = try block_scope.arena.dupe(*zir.Inst, else_scope.instructions.items), }; @@ -2592,19 +2686,22 @@ fn switchCaseExpr( scope: *Scope, rl: ResultLoc, block: *zir.Inst.Block, - case: *ast.Node.switch_case, + case: ast.full.SwitchCase, target: *zir.Inst, target_ptr: ?*zir.Inst, ) !void { const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const case_src = token_starts[case.firstToken()]; + const case_src = token_starts[case.ast.arrow_token]; const sub_scope = blk: { - const uncasted_payload = case.payload orelse break :blk scope; - const payload = uncasted_payload.castTag(.PointerPayload).?; - const is_ptr = payload.ptr_token != null; - const value_name = tree.tokenSlice(payload.value_symbol.firstToken()); + const payload_token = case.payload_token orelse break :blk scope; + const ident = if (token_tags[payload_token] == .asterisk) + payload_token + 1 + else + payload_token; + const is_ptr = ident != payload_token; + const value_name = tree.tokenSlice(ident); if (mem.eql(u8, value_name, "_")) { if (is_ptr) { return mod.failTok(scope, payload.ptr_token.?, "pointer modifier invalid on discard", .{}); @@ -2614,7 +2711,7 @@ fn switchCaseExpr( return mod.failNode(scope, payload.value_symbol, "TODO implement switch value payload", .{}); }; - const case_body = try expr(mod, sub_scope, rl, case.expr); + const case_body = try expr(mod, sub_scope, rl, case.ast.target_expr); if (!case_body.tag.isNoReturn()) { _ = try addZIRInst(mod, sub_scope, case_src, zir.Inst.Break, .{ .block = block, @@ -2820,12 +2917,13 @@ fn multilineStringLiteral( return rvalue(mod, scope, rl, str_inst); } -fn charLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) !*zir.Inst { +fn charLiteral(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !*zir.Inst { const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const src = token_starts[node.token]; - const slice = tree.tokenSlice(node.token); + const main_token = main_tokens[node]; + const src = token_starts[main_token]; + const slice = tree.tokenSlice(main_token); var bad_index: usize = undefined; const value = std.zig.parseCharLiteral(slice, &bad_index) catch |err| switch (err) { @@ -2834,11 +2932,11 @@ fn charLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) !*zir.Inst return mod.fail(scope, src + bad_index, "invalid character: '{c}'\n", .{bad_byte}); }, }; - - return addZIRInstConst(mod, scope, src, .{ + const result = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.comptime_int), .val = try Value.Tag.int_u64.create(scope.arena(), value), }); + return rvalue(mod, scope, rl, result); } fn integerLiteral( @@ -3675,6 +3773,29 @@ pub fn addZirInstTag( return &inst.base; } +pub fn addZirInstT( + mod: *Module, + scope: *Scope, + src: usize, + comptime T: type, + tag: zir.Inst.Tag, + positionals: std.meta.fieldInfo(tag.Type(), .positionals).field_type, +) !*T { + const gen_zir = scope.getGenZIR(); + try gen_zir.instructions.ensureCapacity(mod.gpa, gen_zir.instructions.items.len + 1); + const inst = try gen_zir.arena.create(T); + inst.* = .{ + .base = .{ + .tag = tag, + .src = src, + }, + .positionals = positionals, + .kw_args = .{}, + }; + gen_zir.instructions.appendAssumeCapacity(&inst.base); + return inst; +} + pub fn addZIRInstSpecial( mod: *Module, scope: *Scope, diff --git a/src/zir.zig b/src/zir.zig index fc68aee216..fcbcee9ccd 100644 --- a/src/zir.zig +++ b/src/zir.zig @@ -343,6 +343,8 @@ pub const Inst = struct { void_value, /// A switch expression. switchbr, + /// Same as `switchbr` but the target is a pointer to the value being switched on. + switchbr_ref, /// A range in a switch case, `lhs...rhs`. /// Only checks that `lhs >= rhs` if they are ints, everything else is /// validated by the .switch instruction. @@ -453,6 +455,8 @@ pub const Inst = struct { .block_comptime_flat, => Block, + .switchbr, .switchbr_ref => SwitchBr, + .arg => Arg, .array_type_sentinel => ArrayTypeSentinel, .@"break" => Break, @@ -488,7 +492,6 @@ pub const Inst = struct { .enum_type => EnumType, .union_type => UnionType, .struct_type => StructType, - .switchbr => SwitchBr, }; } @@ -617,7 +620,6 @@ pub const Inst = struct { .struct_type, .void_value, .switch_range, - .switchbr, => false, .@"break", @@ -632,6 +634,8 @@ pub const Inst = struct { .container_field_named, .container_field_typed, .container_field, + .switchbr, + .switchbr_ref, => true, }; } @@ -730,6 +734,8 @@ pub const Inst = struct { kw_args: struct {}, }; + // TODO break this into multiple call instructions to avoid paying the cost + // of the calling convention field most of the time. pub const Call = struct { pub const base_tag = Tag.call; base: Inst, @@ -737,10 +743,9 @@ pub const Inst = struct { positionals: struct { func: *Inst, args: []*Inst, - }, - kw_args: struct { modifier: std.builtin.CallOptions.Modifier = .auto, }, + kw_args: struct {}, }; pub const DeclRef = struct { @@ -1185,7 +1190,6 @@ pub const Inst = struct { }; pub const SwitchBr = struct { - pub const base_tag = Tag.switchbr; base: Inst, positionals: struct { @@ -1194,14 +1198,12 @@ pub const Inst = struct { items: []*Inst, cases: []Case, else_body: Body, - }, - kw_args: struct { /// Pointer to first range if such exists. range: ?*Inst = null, special_prong: SpecialProng = .none, }, + kw_args: struct {}, - // Not anonymous due to stage1 limitations pub const SpecialProng = enum { none, @"else", diff --git a/src/zir_sema.zig b/src/zir_sema.zig index 480e0b4c33..80146397c5 100644 --- a/src/zir_sema.zig +++ b/src/zir_sema.zig @@ -154,7 +154,8 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError! .bool_and => return zirBoolOp(mod, scope, old_inst.castTag(.bool_and).?), .bool_or => return zirBoolOp(mod, scope, old_inst.castTag(.bool_or).?), .void_value => return mod.constVoid(scope, old_inst.src), - .switchbr => return zirSwitchBr(mod, scope, old_inst.castTag(.switchbr).?), + .switchbr => return zirSwitchBr(mod, scope, old_inst.castTag(.switchbr).?, false), + .switchbr_ref => return zirSwitchBr(mod, scope, old_inst.castTag(.switchbr).?, true), .switch_range => return zirSwitchRange(mod, scope, old_inst.castTag(.switch_range).?), .container_field_named, @@ -1554,10 +1555,15 @@ fn zirSwitchRange(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError return mod.constVoid(scope, inst.base.src); } -fn zirSwitchBr(mod: *Module, scope: *Scope, inst: *zir.Inst.SwitchBr) InnerError!*Inst { +fn zirSwitchBr(mod: *Module, scope: *Scope, inst: *zir.Inst.SwitchBr, ref: bool) InnerError!*Inst { const tracy = trace(@src()); defer tracy.end(); - const target = try resolveInst(mod, scope, inst.positionals.target); + + const target_ptr = try resolveInst(mod, scope, inst.positionals.target); + const target = if (ref) + try mod.analyzeDeref(scope, inst.base.src, target_ptr, inst.positionals.target.src) + else + target_ptr; try validateSwitch(mod, scope, target, inst); if (try mod.resolveDefinedValue(scope, target)) |target_val| { From 5a2620fcca55813d87000f3018e70509b1d325e0 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Wed, 17 Feb 2021 22:22:10 -0700 Subject: [PATCH 100/173] stage2: fix some of the compilation errors in this branch --- src/Module.zig | 42 ++++++++++++++++++++----------- src/astgen.zig | 33 ++++++++++++------------- src/zir.zig | 25 +++++++++++++++---- src/zir_sema.zig | 64 +++++++++++++++++++++++++++++++++++++----------- 4 files changed, 114 insertions(+), 50 deletions(-) diff --git a/src/Module.zig b/src/Module.zig index 19566dee43..17084677d4 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -1223,14 +1223,20 @@ fn astgenAndSemaFn( .{}, ); } - if (fn_proto.ast.callconv_expr != 0) { - return mod.failNode( - &fn_type_scope.base, - fn_proto.ast.callconv_expr, - "TODO implement function calling convention expression", - .{}, - ); - } + const opt_cc: ?*zir.Inst = if (fn_proto.ast.callconv_expr != 0) cc: { + // TODO instead of enum literal type, this needs to be the + // std.builtin.CallingConvention enum. We need to implement importing other files + // and enums in order to fix this. + const src = token_starts[tree.firstToken(fn_proto.ast.callconv_expr)]; + const enum_lit_ty = try astgen.addZIRInstConst(mod, &fn_type_scope.base, src, .{ + .ty = Type.initTag(.type), + .val = Value.initTag(.enum_literal_type), + }); + break :cc try astgen.comptimeExpr(mod, &fn_type_scope.base, .{ + .ty = enum_lit_ty, + }, fn_proto.ast.callconv_expr); + } else null; + const maybe_bang = tree.firstToken(fn_proto.ast.return_type) - 1; if (token_tags[maybe_bang] == .bang) { return mod.failTok(&fn_type_scope.base, maybe_bang, "TODO implement inferred error sets", .{}); @@ -1241,10 +1247,17 @@ fn astgenAndSemaFn( type_type_rl, fn_proto.ast.return_type, ); - const fn_type_inst = try astgen.addZIRInst(mod, &fn_type_scope.base, fn_src, zir.Inst.FnType, .{ - .return_type = return_type_inst, - .param_types = param_types, - }, .{}); + const fn_type_inst = if (opt_cc) |cc| + try astgen.addZirInstTag(mod, &fn_type_scope.base, fn_src, .fn_type_cc, .{ + .return_type = return_type_inst, + .param_types = param_types, + .cc = cc, + }) + else + try astgen.addZirInstTag(mod, &fn_type_scope.base, fn_src, .fn_type, .{ + .return_type = return_type_inst, + .param_types = param_types, + }); if (std.builtin.mode == .Debug and mod.comp.verbose_ir) { zir.dumpZir(mod.gpa, "fn_type", decl.name, fn_type_scope.instructions.items) catch {}; @@ -1316,6 +1329,7 @@ fn astgenAndSemaFn( .decl = decl, .arena = &decl_arena.allocator, .parent = &decl.container.base, + .force_comptime = false, }; defer gen_scope.instructions.deinit(mod.gpa); @@ -1348,7 +1362,7 @@ fn astgenAndSemaFn( params_scope = &sub_scope.base; } - try astgen.blockExpr(mod, params_scope, body_node); + try astgen.expr(mod, params_scope, .none, body_node); if (gen_scope.instructions.items.len == 0 or !gen_scope.instructions.items[gen_scope.instructions.items.len - 1].tag.isNoReturn()) @@ -1496,7 +1510,7 @@ fn astgenAndSemaVarDecl( assert(is_extern); return mod.failTok(&block_scope.base, lib_name, "TODO implement function library name", .{}); } - const is_mutable = token_tags[var_decl.mut_token] == .keyword_var; + const is_mutable = token_tags[var_decl.ast.mut_token] == .keyword_var; const is_threadlocal = if (var_decl.threadlocal_token) |some| blk: { if (!is_mutable) { return mod.failTok(&block_scope.base, some, "threadlocal variable cannot be constant", .{}); diff --git a/src/astgen.zig b/src/astgen.zig index aef1b21a6c..125e9bceda 100644 --- a/src/astgen.zig +++ b/src/astgen.zig @@ -539,17 +539,6 @@ pub fn comptimeExpr( } const tree = parent_scope.tree(); - const main_tokens = tree.nodes.items(.main_token); - const token_tags = tree.tokens.items(.tag); - - // Optimization for labeled blocks: don't need to have 2 layers of blocks, - // we can reuse the existing one. - const lbrace = main_tokens[node]; - if (token_tags[lbrace - 1] == .colon and - token_tags[lbrace - 2] == .identifier) - { - return labeledBlockExpr(mod, parent_scope, rl, node, .block_comptime); - } // Make a scope to collect generated instructions in the sub-expression. var block_scope: Scope.GenZIR = .{ @@ -708,9 +697,13 @@ pub fn blockExpr( const tracy = trace(@src()); defer tracy.end(); + const tree = scope.tree(); + const main_tokens = tree.nodes.items(.main_token); + const token_tags = tree.tokens.items(.tag); + const lbrace = main_tokens[node]; if (token_tags[lbrace - 1] == .colon) { - return labeledBlockExpr(mod, scope, rl, block_node, .block); + return labeledBlockExpr(mod, scope, rl, block_node, statements, .block); } try blockExprStmts(mod, scope, block_node, statements); @@ -766,7 +759,8 @@ fn labeledBlockExpr( mod: *Module, parent_scope: *Scope, rl: ResultLoc, - block_node: *ast.Node.labeled_block, + block_node: ast.Node.Index, + statements: []const ast.Node.Index, zir_tag: zir.Inst.Tag, ) InnerError!*zir.Inst { const tracy = trace(@src()); @@ -777,9 +771,14 @@ fn labeledBlockExpr( const tree = parent_scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const src = token_starts[block_node.lbrace]; + const token_starts = tree.tokens.items(.start); - try checkLabelRedefinition(mod, parent_scope, block_node.label); + const lbrace = main_tokens[block_node]; + const label_token = lbrace - 1; + assert(token_tags[label_token] == .identifier); + const src = token_starts[lbrace]; + + try checkLabelRedefinition(mod, parent_scope, label_token); // Create the Block ZIR instruction so that we can put it into the GenZIR struct // so that break statements can reference it. @@ -804,7 +803,7 @@ fn labeledBlockExpr( .instructions = .{}, // TODO @as here is working around a stage1 miscompilation bug :( .label = @as(?Scope.GenZIR.Label, Scope.GenZIR.Label{ - .token = block_node.label, + .token = label_token, .block_inst = block_inst, }), }; @@ -813,7 +812,7 @@ fn labeledBlockExpr( defer block_scope.labeled_breaks.deinit(mod.gpa); defer block_scope.labeled_store_to_block_ptr_list.deinit(mod.gpa); - try blockExprStmts(mod, &block_scope.base, &block_node.base, block_node.statements()); + try blockExprStmts(mod, &block_scope.base, block_node, block_node.statements()); if (!block_scope.label.?.used) { return mod.fail(parent_scope, token_starts[block_node.label], "unused block label", .{}); diff --git a/src/zir.zig b/src/zir.zig index fcbcee9ccd..9a3c080760 100644 --- a/src/zir.zig +++ b/src/zir.zig @@ -172,8 +172,10 @@ pub const Inst = struct { floatcast, /// Declare a function body. @"fn", - /// Returns a function type. - fntype, + /// Returns a function type, assuming unspecified calling convention. + fn_type, + /// Returns a function type, with a calling convention instruction operand. + fn_type_cc, /// @import(operand) import, /// Integer literal. @@ -478,7 +480,8 @@ pub const Inst = struct { .@"export" => Export, .param_type => ParamType, .primitive => Primitive, - .fntype => FnType, + .fn_type => FnType, + .fn_type_cc => FnTypeCc, .elem_ptr, .elem_val => Elem, .condbr => CondBr, .ptr_type => PtrType, @@ -552,7 +555,8 @@ pub const Inst = struct { .field_ptr_named, .field_val_named, .@"fn", - .fntype, + .fn_type, + .fn_type_cc, .int, .intcast, .int_type, @@ -877,7 +881,18 @@ pub const Inst = struct { }; pub const FnType = struct { - pub const base_tag = Tag.fntype; + pub const base_tag = Tag.fn_type; + base: Inst, + + positionals: struct { + param_types: []*Inst, + return_type: *Inst, + }, + kw_args: struct {}, + }; + + pub const FnTypeCc = struct { + pub const base_tag = Tag.fn_type_cc; base: Inst, positionals: struct { diff --git a/src/zir_sema.zig b/src/zir_sema.zig index 80146397c5..1a2e99ded5 100644 --- a/src/zir_sema.zig +++ b/src/zir_sema.zig @@ -91,7 +91,8 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError! .@"fn" => return zirFn(mod, scope, old_inst.castTag(.@"fn").?), .@"export" => return zirExport(mod, scope, old_inst.castTag(.@"export").?), .primitive => return zirPrimitive(mod, scope, old_inst.castTag(.primitive).?), - .fntype => return zirFnType(mod, scope, old_inst.castTag(.fntype).?), + .fn_type => return zirFnType(mod, scope, old_inst.castTag(.fn_type).?), + .fn_type_cc => return zirFnTypeCc(mod, scope, old_inst.castTag(.fn_type_cc).?), .intcast => return zirIntcast(mod, scope, old_inst.castTag(.intcast).?), .bitcast => return zirBitcast(mod, scope, old_inst.castTag(.bitcast).?), .floatcast => return zirFloatcast(mod, scope, old_inst.castTag(.floatcast).?), @@ -155,7 +156,7 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError! .bool_or => return zirBoolOp(mod, scope, old_inst.castTag(.bool_or).?), .void_value => return mod.constVoid(scope, old_inst.src), .switchbr => return zirSwitchBr(mod, scope, old_inst.castTag(.switchbr).?, false), - .switchbr_ref => return zirSwitchBr(mod, scope, old_inst.castTag(.switchbr).?, true), + .switchbr_ref => return zirSwitchBr(mod, scope, old_inst.castTag(.switchbr_ref).?, true), .switch_range => return zirSwitchRange(mod, scope, old_inst.castTag(.switch_range).?), .container_field_named, @@ -958,11 +959,11 @@ fn zirCall(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError!*Inst { ); } - if (inst.kw_args.modifier == .compile_time) { + if (inst.positionals.modifier == .compile_time) { return mod.fail(scope, inst.base.src, "TODO implement comptime function calls", .{}); } - if (inst.kw_args.modifier != .auto) { - return mod.fail(scope, inst.base.src, "TODO implement call with modifier {}", .{inst.kw_args.modifier}); + if (inst.positionals.modifier != .auto) { + return mod.fail(scope, inst.base.src, "TODO implement call with modifier {}", .{inst.positionals.modifier}); } // TODO handle function calls of generic functions @@ -1295,34 +1296,69 @@ fn zirEnsureErrPayloadVoid(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp) fn zirFnType(mod: *Module, scope: *Scope, fntype: *zir.Inst.FnType) InnerError!*Inst { const tracy = trace(@src()); defer tracy.end(); - const return_type = try resolveType(mod, scope, fntype.positionals.return_type); + + return fnTypeCommon( + mod, + scope, + &fntype.base, + fntype.positionals.param_types, + fntype.positionals.return_type, + .Unspecified, + ); +} + +fn zirFnTypeCc(mod: *Module, scope: *Scope, fntype: *zir.Inst.FnTypeCc) InnerError!*Inst { + const tracy = trace(@src()); + defer tracy.end(); + const cc_tv = try resolveInstConst(mod, scope, fntype.positionals.cc); + // TODO once we're capable of importing and analyzing decls from + // std.builtin, this needs to change const cc_str = cc_tv.val.castTag(.enum_literal).?.data; const cc = std.meta.stringToEnum(std.builtin.CallingConvention, cc_str) orelse return mod.fail(scope, fntype.positionals.cc.src, "Unknown calling convention {s}", .{cc_str}); + return fnTypeCommon( + mod, + scope, + &fntype.base, + fntype.positionals.param_types, + fntype.positionals.return_type, + cc, + ); +} + +fn fnTypeCommon( + mod: *Module, + scope: *Scope, + zir_inst: *zir.Inst, + zir_param_types: []*zir.Inst, + zir_return_type: *zir.Inst, + cc: std.builtin.CallingConvention, +) InnerError!*Inst { + const return_type = try resolveType(mod, scope, zir_return_type); // Hot path for some common function types. - if (fntype.positionals.param_types.len == 0) { + if (zir_param_types.len == 0) { if (return_type.zigTypeTag() == .NoReturn and cc == .Unspecified) { - return mod.constType(scope, fntype.base.src, Type.initTag(.fn_noreturn_no_args)); + return mod.constType(scope, zir_inst.src, Type.initTag(.fn_noreturn_no_args)); } if (return_type.zigTypeTag() == .Void and cc == .Unspecified) { - return mod.constType(scope, fntype.base.src, Type.initTag(.fn_void_no_args)); + return mod.constType(scope, zir_inst.src, Type.initTag(.fn_void_no_args)); } if (return_type.zigTypeTag() == .NoReturn and cc == .Naked) { - return mod.constType(scope, fntype.base.src, Type.initTag(.fn_naked_noreturn_no_args)); + return mod.constType(scope, zir_inst.src, Type.initTag(.fn_naked_noreturn_no_args)); } if (return_type.zigTypeTag() == .Void and cc == .C) { - return mod.constType(scope, fntype.base.src, Type.initTag(.fn_ccc_void_no_args)); + return mod.constType(scope, zir_inst.src, Type.initTag(.fn_ccc_void_no_args)); } } const arena = scope.arena(); - const param_types = try arena.alloc(Type, fntype.positionals.param_types.len); - for (fntype.positionals.param_types) |param_type, i| { + const param_types = try arena.alloc(Type, zir_param_types.len); + for (zir_param_types) |param_type, i| { const resolved = try resolveType(mod, scope, param_type); // TODO skip for comptime params if (!resolved.isValidVarType(false)) { @@ -1336,7 +1372,7 @@ fn zirFnType(mod: *Module, scope: *Scope, fntype: *zir.Inst.FnType) InnerError!* .return_type = return_type, .cc = cc, }); - return mod.constType(scope, fntype.base.src, fn_ty); + return mod.constType(scope, zir_inst.src, fn_ty); } fn zirPrimitive(mod: *Module, scope: *Scope, primitive: *zir.Inst.Primitive) InnerError!*Inst { From 29daf10639149bd023db0be4e04eaf154dce0f83 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Wed, 17 Feb 2021 22:34:06 -0700 Subject: [PATCH 101/173] stage2: fix a couple more compilation errors --- lib/std/zig/ast.zig | 4 ++-- src/Module.zig | 39 +++++++++++++++++++-------------------- src/astgen.zig | 17 ++--------------- src/zir_sema.zig | 18 +++++++++--------- 4 files changed, 32 insertions(+), 46 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 02bca79986..72b47ecd3f 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -2054,7 +2054,7 @@ pub const full = struct { return null; } const param_type = it.fn_proto.ast.params[it.param_i]; - var tok_i = tree.firstToken(param_type) - 1; + var tok_i = it.tree.firstToken(param_type) - 1; while (true) : (tok_i -= 1) switch (token_tags[tok_i]) { .colon => continue, .identifier => name_token = tok_i, @@ -2063,7 +2063,7 @@ pub const full = struct { else => break, }; it.param_i += 1; - it.tok_i = tree.lastToken(param_type) + 1; + it.tok_i = it.tree.lastToken(param_type) + 1; it.tok_flag = true; return Param{ .first_doc_comment = first_doc_comment, diff --git a/src/Module.zig b/src/Module.zig index 17084677d4..d2530d7df3 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -1223,19 +1223,6 @@ fn astgenAndSemaFn( .{}, ); } - const opt_cc: ?*zir.Inst = if (fn_proto.ast.callconv_expr != 0) cc: { - // TODO instead of enum literal type, this needs to be the - // std.builtin.CallingConvention enum. We need to implement importing other files - // and enums in order to fix this. - const src = token_starts[tree.firstToken(fn_proto.ast.callconv_expr)]; - const enum_lit_ty = try astgen.addZIRInstConst(mod, &fn_type_scope.base, src, .{ - .ty = Type.initTag(.type), - .val = Value.initTag(.enum_literal_type), - }); - break :cc try astgen.comptimeExpr(mod, &fn_type_scope.base, .{ - .ty = enum_lit_ty, - }, fn_proto.ast.callconv_expr); - } else null; const maybe_bang = tree.firstToken(fn_proto.ast.return_type) - 1; if (token_tags[maybe_bang] == .bang) { @@ -1247,13 +1234,24 @@ fn astgenAndSemaFn( type_type_rl, fn_proto.ast.return_type, ); - const fn_type_inst = if (opt_cc) |cc| - try astgen.addZirInstTag(mod, &fn_type_scope.base, fn_src, .fn_type_cc, .{ + const fn_type_inst = if (fn_proto.ast.callconv_expr != 0) cc: { + // TODO instead of enum literal type, this needs to be the + // std.builtin.CallingConvention enum. We need to implement importing other files + // and enums in order to fix this. + const src = token_starts[tree.firstToken(fn_proto.ast.callconv_expr)]; + const enum_lit_ty = try astgen.addZIRInstConst(mod, &fn_type_scope.base, src, .{ + .ty = Type.initTag(.type), + .val = Value.initTag(.enum_literal_type), + }); + const cc = try astgen.comptimeExpr(mod, &fn_type_scope.base, .{ + .ty = enum_lit_ty, + }, fn_proto.ast.callconv_expr); + break :cc try astgen.addZirInstTag(mod, &fn_type_scope.base, fn_src, .fn_type_cc, .{ .return_type = return_type_inst, .param_types = param_types, .cc = cc, - }) - else + }); + } else try astgen.addZirInstTag(mod, &fn_type_scope.base, fn_src, .fn_type, .{ .return_type = return_type_inst, .param_types = param_types, @@ -1362,13 +1360,13 @@ fn astgenAndSemaFn( params_scope = &sub_scope.base; } - try astgen.expr(mod, params_scope, .none, body_node); + _ = try astgen.expr(mod, params_scope, .none, body_node); if (gen_scope.instructions.items.len == 0 or !gen_scope.instructions.items[gen_scope.instructions.items.len - 1].tag.isNoReturn()) { const src = token_starts[tree.lastToken(body_node)]; - _ = try astgen.addZIRNoOp(mod, &gen_scope.base, src, .returnvoid); + _ = try astgen.addZIRNoOp(mod, &gen_scope.base, src, .return_void); } if (std.builtin.mode == .Debug and mod.comp.verbose_ir) { @@ -1542,6 +1540,7 @@ fn astgenAndSemaVarDecl( .decl = decl, .arena = &gen_scope_arena.allocator, .parent = &decl.container.base, + .force_comptime = true, }; defer gen_scope.instructions.deinit(mod.gpa); @@ -1600,7 +1599,7 @@ fn astgenAndSemaVarDecl( } else if (!is_extern) { return mod.failTok( &block_scope.base, - tree.firstToken(var_decl), + var_decl.ast.mut_token, "variables must be initialized", .{}, ); diff --git a/src/astgen.zig b/src/astgen.zig index 125e9bceda..a018d58d2f 100644 --- a/src/astgen.zig +++ b/src/astgen.zig @@ -539,6 +539,7 @@ pub fn comptimeExpr( } const tree = parent_scope.tree(); + const token_starts = tree.tokens.items(.start); // Make a scope to collect generated instructions in the sub-expression. var block_scope: Scope.GenZIR = .{ @@ -693,7 +694,7 @@ pub fn blockExpr( rl: ResultLoc, block_node: ast.Node.Index, statements: []const ast.Node.Index, -) InnerError!void { +) InnerError!*zir.Inst { const tracy = trace(@src()); defer tracy.end(); @@ -1174,20 +1175,6 @@ fn negation( return addZIRBinOp(mod, scope, src, op_inst_tag, lhs, rhs); } -fn ptrType(mod: *Module, scope: *Scope, node: *ast.Node.PtrType) InnerError!*zir.Inst { - const tree = scope.tree(); - const src = token_starts[node.op_token]; - return ptrSliceType(mod, scope, src, &node.ptr_info, node.rhs, switch (tree.token_ids[node.op_token]) { - .Asterisk, .AsteriskAsterisk => .One, - // TODO stage1 type inference bug - .LBracket => @as(std.builtin.TypeInfo.Pointer.Size, switch (tree.token_ids[node.op_token + 2]) { - .identifier => .C, - else => .Many, - }), - else => unreachable, - }); -} - fn ptrSliceType(mod: *Module, scope: *Scope, src: usize, ptr_info: *ast.PtrInfo, rhs: *ast.Node, size: std.builtin.TypeInfo.Pointer.Size) InnerError!*zir.Inst { const simple = ptr_info.allowzero_token == null and ptr_info.align_info == null and diff --git a/src/zir_sema.zig b/src/zir_sema.zig index 1a2e99ded5..83d7113c9c 100644 --- a/src/zir_sema.zig +++ b/src/zir_sema.zig @@ -981,8 +981,8 @@ fn zirCall(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError!*Inst { const ret_type = func.ty.fnReturnType(); const b = try mod.requireFunctionBlock(scope, inst.base.src); - const is_comptime_call = b.is_comptime or inst.kw_args.modifier == .compile_time; - const is_inline_call = is_comptime_call or inst.kw_args.modifier == .always_inline or + const is_comptime_call = b.is_comptime or inst.positionals.modifier == .compile_time; + const is_inline_call = is_comptime_call or inst.positionals.modifier == .always_inline or func.ty.fnCallingConvention() == .Inline; if (is_inline_call) { const func_val = try mod.resolveConstValue(scope, func); @@ -1668,13 +1668,13 @@ fn zirSwitchBr(mod: *Module, scope: *Scope, inst: *zir.Inst.SwitchBr, ref: bool) fn validateSwitch(mod: *Module, scope: *Scope, target: *Inst, inst: *zir.Inst.SwitchBr) InnerError!void { // validate usage of '_' prongs - if (inst.kw_args.special_prong == .underscore and target.ty.zigTypeTag() != .Enum) { + if (inst.positionals.special_prong == .underscore and target.ty.zigTypeTag() != .Enum) { return mod.fail(scope, inst.base.src, "'_' prong only allowed when switching on non-exhaustive enums", .{}); // TODO notes "'_' prong here" inst.positionals.cases[last].src } // check that target type supports ranges - if (inst.kw_args.range) |range_inst| { + if (inst.positionals.range) |range_inst| { switch (target.ty.zigTypeTag()) { .Int, .ComptimeInt => {}, else => { @@ -1725,14 +1725,14 @@ fn validateSwitch(mod: *Module, scope: *Scope, target: *Inst, inst: *zir.Inst.Sw const start = try target.ty.minInt(&arena, mod.getTarget()); const end = try target.ty.maxInt(&arena, mod.getTarget()); if (try range_set.spans(start, end)) { - if (inst.kw_args.special_prong == .@"else") { + if (inst.positionals.special_prong == .@"else") { return mod.fail(scope, inst.base.src, "unreachable else prong, all cases already handled", .{}); } return; } } - if (inst.kw_args.special_prong != .@"else") { + if (inst.positionals.special_prong != .@"else") { return mod.fail(scope, inst.base.src, "switch must handle all possibilities", .{}); } }, @@ -1752,15 +1752,15 @@ fn validateSwitch(mod: *Module, scope: *Scope, target: *Inst, inst: *zir.Inst.Sw return mod.fail(scope, item.src, "duplicate switch value", .{}); } } - if ((true_count + false_count < 2) and inst.kw_args.special_prong != .@"else") { + if ((true_count + false_count < 2) and inst.positionals.special_prong != .@"else") { return mod.fail(scope, inst.base.src, "switch must handle all possibilities", .{}); } - if ((true_count + false_count == 2) and inst.kw_args.special_prong == .@"else") { + if ((true_count + false_count == 2) and inst.positionals.special_prong == .@"else") { return mod.fail(scope, inst.base.src, "unreachable else prong, all cases already handled", .{}); } }, .EnumLiteral, .Void, .Fn, .Pointer, .Type => { - if (inst.kw_args.special_prong != .@"else") { + if (inst.positionals.special_prong != .@"else") { return mod.fail(scope, inst.base.src, "else prong required when switching on type '{}'", .{target.ty}); } From df5a8120df640de900667624ad8390394f99521f Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Thu, 18 Feb 2021 21:34:31 +0200 Subject: [PATCH 102/173] translate-c: small miscellaneous improvements --- src/translate_c.zig | 38 ++++++++++++++-------- src/translate_c/ast.zig | 19 +++++++---- test/translate_c.zig | 72 +++++++++++++++++++++++++++++++++-------- 3 files changed, 95 insertions(+), 34 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index 47c601677e..541b854da7 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -1219,8 +1219,10 @@ fn transCompoundStmtInline( const end_it = stmt.body_end(); while (it != end_it) : (it += 1) { const result = try transStmt(c, &block.base, it[0], .unused); - if (result.tag() == .declaration) continue; - try block.statements.append(result); + switch (result.tag()) { + .declaration, .empty_block => {}, + else => try block.statements.append(result), + } } } @@ -1395,6 +1397,10 @@ fn transImplicitCastExpr( .BuiltinFnToFnPtr => { return transExpr(c, scope, sub_expr, result_used); }, + .ToVoid => { + // Should only appear in the rhs and lhs of a ConditionalOperator + return transExpr(c, scope, sub_expr, .unused); + }, else => |kind| return fail( c, error.UnsupportedTranslation, @@ -2032,10 +2038,8 @@ fn transZeroInitExpr( typedef_decl.getUnderlyingType().getTypePtr(), ); }, - else => {}, + else => return Tag.std_mem_zeroes.create(c.arena, try transType(c, scope, ty, source_loc)), } - - return fail(c, error.UnsupportedType, source_loc, "type does not have an implicit init value", .{}); } fn transImplicitValueInitExpr( @@ -2118,7 +2122,7 @@ fn transDoWhileLoop( defer cond_scope.deinit(); const cond = try transBoolExpr(c, &cond_scope.base, @ptrCast(*const clang.Expr, stmt.getCond()), .used); const if_not_break = switch (cond.tag()) { - .false_literal => Tag.@"break".init(), + .false_literal => return transStmt(c, scope, stmt.getBody(), .unused), .true_literal => { const body_node = try transStmt(c, scope, stmt.getBody(), .unused); return Tag.while_true.create(c.arena, body_node); @@ -2396,8 +2400,10 @@ fn transSwitchProngStmtInline( }, else => { const result = try transStmt(c, &block.base, it[0], .unused); - if (result.tag() == .declaration) continue; - try block.statements.append(result); + switch (result.tag()) { + .declaration, .empty_block => {}, + else => try block.statements.append(result), + } }, } } @@ -2479,8 +2485,10 @@ fn transStmtExpr(c: *Context, scope: *Scope, stmt: *const clang.StmtExpr, used: const end_it = comp.body_end(); while (it != end_it - 1) : (it += 1) { const result = try transStmt(c, &block_scope.base, it[0], .unused); - if (result.tag() == .declaration) continue; - try block_scope.statements.append(result); + switch (result.tag()) { + .declaration, .empty_block => {}, + else => try block_scope.statements.append(result), + } } const break_node = try Tag.break_val.create(c.arena, .{ .label = block_scope.label, @@ -3126,12 +3134,12 @@ fn transConditionalOperator(c: *Context, scope: *Scope, stmt: *const clang.Condi const cond = try transBoolExpr(c, &cond_scope.base, cond_expr, .used); - var then_body = try transExpr(c, scope, true_expr, .used); + var then_body = try transExpr(c, scope, true_expr, used); if (!res_is_bool and isBoolRes(then_body)) { then_body = try Tag.bool_to_int.create(c.arena, then_body); } - var else_body = try transExpr(c, scope, false_expr, .used); + var else_body = try transExpr(c, scope, false_expr, used); if (!res_is_bool and isBoolRes(else_body)) { else_body = try Tag.bool_to_int.create(c.arena, else_body); } @@ -3141,7 +3149,8 @@ fn transConditionalOperator(c: *Context, scope: *Scope, stmt: *const clang.Condi .then = then_body, .@"else" = else_body, }); - return maybeSuppressResult(c, scope, used, if_node); + // Clang inserts ImplicitCast(ToVoid)'s to both rhs and lhs so we don't need to supress the result here. + return if_node; } fn maybeSuppressResult( @@ -4794,7 +4803,8 @@ fn parseCMulExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { while (true) { switch (m.next().?) { .Asterisk => { - if (m.peek().? == .RParen) { + const next = m.peek().?; + if (next == .RParen or next == .Nl or next == .Eof) { // type *) // last token of `node` diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 2306d1c36f..be331f39dc 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -1891,30 +1891,35 @@ fn addSemicolonIfNeeded(c: *Context, node: Node) !void { .var_decl, .var_simple, .arg_redecl, .alias, .enum_redecl, .block, .empty_block, .block_single, .@"switch" => {}, .while_true => { const payload = node.castTag(.while_true).?.data; - return addSemicolonIfNotBlock(c, payload); + return addSemicolonIfNotBlock(c, payload, .yes_if); }, .@"while" => { const payload = node.castTag(.@"while").?.data; - return addSemicolonIfNotBlock(c, payload.body); + return addSemicolonIfNotBlock(c, payload.body, .yes_if); }, .@"if" => { const payload = node.castTag(.@"if").?.data; if (payload.@"else") |some| - return addSemicolonIfNotBlock(c, some); - return addSemicolonIfNotBlock(c, payload.then); + return addSemicolonIfNotBlock(c, some, .no_if); + return addSemicolonIfNotBlock(c, payload.then, .no_if); }, else => _ = try c.addToken(.semicolon, ";"), } } -fn addSemicolonIfNotBlock(c: *Context, node: Node) !void { +fn addSemicolonIfNotBlock(c: *Context, node: Node, if_needs_semicolon: enum{ yes_if, no_if}) !void { switch (node.tag()) { .block, .empty_block, .block_single => {}, .@"if" => { + if (if_needs_semicolon == .yes_if) { + _ = try c.addToken(.semicolon, ";"); + return; + } + const payload = node.castTag(.@"if").?.data; if (payload.@"else") |some| - return addSemicolonIfNotBlock(c, some); - return addSemicolonIfNotBlock(c, payload.then); + return addSemicolonIfNotBlock(c, some, .no_if); + return addSemicolonIfNotBlock(c, payload.then, .no_if); }, else => _ = try c.addToken(.semicolon, ";"), } diff --git a/test/translate_c.zig b/test/translate_c.zig index a2d1bf417e..34e6897c94 100644 --- a/test/translate_c.zig +++ b/test/translate_c.zig @@ -3,6 +3,62 @@ const std = @import("std"); const CrossTarget = std.zig.CrossTarget; pub fn addCases(cases: *tests.TranslateCContext) void { + cases.add("if as while stmt has semicolon", + \\void foo() { + \\ while (1) if (1) { + \\ int a = 1; + \\ } else { + \\ int b = 2; + \\ } + \\} + , &[_][]const u8{ + \\pub export fn foo() void { + \\ while (true) if (true) { + \\ var a: c_int = 1; + \\ } else { + \\ var b: c_int = 2; + \\ }; + \\} + }); + + cases.add("conditional operator cast to void", + \\int bar(); + \\void foo() { + \\ int a; + \\ a ? a = 2 : bar(); + \\} + , &[_][]const u8{ + \\pub extern fn bar(...) c_int; + \\pub export fn foo() void { + \\ var a: c_int = undefined; + \\ if (a != 0) a = 2 else _ = bar(); + \\} + }); + + cases.add("struct in struct init to zero", + \\struct Foo { + \\ int a; + \\ struct Bar { + \\ int a; + \\ } b; + \\} a = {}; + \\#define PTR void * + , &[_][]const u8{ + \\pub const struct_Bar = extern struct { + \\ a: c_int, + \\}; + \\pub const struct_Foo = extern struct { + \\ a: c_int, + \\ b: struct_Bar, + \\}; + \\pub export var a: struct_Foo = struct_Foo{ + \\ .a = 0, + \\ .b = @import("std").mem.zeroes(struct_Bar), + \\}; + , + \\pub const PTR = ?*c_void; + }); + cases.add("scoped enum", \\void foo() { \\ enum Foo { @@ -330,9 +386,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\pub export fn foo() void { \\ while (false) while (false) {}; \\ while (true) while (false) {}; - \\ while (true) while (true) { - \\ break; - \\ }; + \\ while (true) {} \\} }); @@ -1044,13 +1098,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ ;;;;; \\} , &[_][]const u8{ - \\pub export fn foo() void { - \\ {} - \\ {} - \\ {} - \\ {} - \\ {} - \\} + \\pub export fn foo() void {} }); if (std.Target.current.os.tag != .windows) { @@ -3050,9 +3098,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\} , &[_][]const u8{ \\pub fn foo() callconv(.C) void { - \\ if (true) while (true) { - \\ break; - \\ }; + \\ if (true) {} \\} }); From 3aba1fa04fe633f89e05a7adc4c170d400705bf9 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Fri, 19 Feb 2021 01:52:27 +0200 Subject: [PATCH 103/173] translate-c: ensure failed macros don't get defined multiple times --- src/translate_c.zig | 2 +- test/run_translated_c.zig | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index 541b854da7..f29dfccfa3 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -4025,7 +4025,7 @@ pub fn failDecl(c: *Context, loc: clang.SourceLocation, name: []const u8, compti // location // pub const name = @compileError(msg); const fail_msg = try std.fmt.allocPrint(c.arena, format, args); - try c.global_scope.nodes.append(try Tag.fail_decl.create(c.arena, .{ .actual = name, .mangled = fail_msg })); + try addTopLevelDecl(c, name, try Tag.fail_decl.create(c.arena, .{ .actual = name, .mangled = fail_msg })); const location_comment = try std.fmt.allocPrint(c.arena, "// {s}", .{c.locStr(loc)}); try c.global_scope.nodes.append(try Tag.warning.create(c.arena, location_comment)); } diff --git a/test/run_translated_c.zig b/test/run_translated_c.zig index 04095d3b71..2db80c9c6a 100644 --- a/test/run_translated_c.zig +++ b/test/run_translated_c.zig @@ -3,6 +3,12 @@ const tests = @import("tests.zig"); const nl = std.cstr.line_sep; pub fn addCases(cases: *tests.RunTranslatedCContext) void { + cases.add("failed macros are only declared once", + \\#define FOO = + \\#define FOO = + \\int main(void) {} + , ""); + cases.add("parenthesized string literal", \\void foo(const char *s) {} \\int main(void) { From 9010bd8aec612d5a14e4be800c80b72025fac2c5 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Thu, 18 Feb 2021 20:09:29 -0700 Subject: [PATCH 104/173] stage2: astgen: fix most of the remaining compile errors more progress on converting astgen to the new AST memory layout. only a few code paths left to update. --- lib/std/zig/ast.zig | 40 +- lib/std/zig/render.zig | 48 +- src/BuiltinFn.zig | 1043 ++++++++++++++++++++-------------------- src/Module.zig | 17 +- src/astgen.zig | 699 ++++++++++++++++++--------- src/zir.zig | 16 +- src/zir_sema.zig | 12 +- 7 files changed, 1052 insertions(+), 823 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 72b47ecd3f..c8f9afd080 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -1754,25 +1754,20 @@ pub const Tree = struct { const token_tags = tree.tokens.items(.tag); // TODO: looks like stage1 isn't quite smart enough to handle enum // literals in some places here - const Kind = full.PtrType.Kind; - const kind: Kind = switch (token_tags[info.main_token]) { + const Size = std.builtin.TypeInfo.Pointer.Size; + const size: Size = switch (token_tags[info.main_token]) { .asterisk, .asterisk_asterisk, => switch (token_tags[info.main_token + 1]) { - .r_bracket => .many, - .colon => .sentinel, - .identifier => if (token_tags[info.main_token - 1] == .l_bracket) Kind.c else .one, - else => .one, - }, - .l_bracket => switch (token_tags[info.main_token + 1]) { - .r_bracket => Kind.slice, - .colon => .slice_sentinel, - else => unreachable, + .r_bracket, .colon => .Many, + .identifier => if (token_tags[info.main_token - 1] == .l_bracket) Size.C else .One, + else => .One, }, + .l_bracket => Size.Slice, else => unreachable, }; var result: full.PtrType = .{ - .kind = kind, + .size = size, .allowzero_token = null, .const_token = null, .volatile_token = null, @@ -1782,13 +1777,7 @@ pub const Tree = struct { // here while looking for modifiers as that could result in false // positives. Therefore, start after a sentinel if there is one and // skip over any align node and bit range nodes. - var i = if (kind == .sentinel or kind == .slice_sentinel) blk: { - assert(info.sentinel != 0); - break :blk tree.lastToken(info.sentinel) + 1; - } else blk: { - assert(info.sentinel == 0); - break :blk info.main_token; - }; + var i = if (info.sentinel != 0) tree.lastToken(info.sentinel) + 1 else info.main_token; const end = tree.firstToken(info.child_type); while (i < end) : (i += 1) { switch (token_tags[i]) { @@ -2115,7 +2104,7 @@ pub const full = struct { .comptime_noalias = comptime_noalias, .name_token = name_token, .anytype_ellipsis3 = it.tok_i - 1, - .type_expr = param_type, + .type_expr = 0, }; } it.tok_flag = false; @@ -2166,21 +2155,12 @@ pub const full = struct { }; pub const PtrType = struct { - kind: Kind, + size: std.builtin.TypeInfo.Pointer.Size, allowzero_token: ?TokenIndex, const_token: ?TokenIndex, volatile_token: ?TokenIndex, ast: Ast, - pub const Kind = enum { - one, - many, - sentinel, - c, - slice, - slice_sentinel, - }; - pub const Ast = struct { main_token: TokenIndex, align_node: Node.Index, diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index f7608fe61a..c169a48b01 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -677,8 +677,8 @@ fn renderPtrType( ptr_type: ast.full.PtrType, space: Space, ) Error!void { - switch (ptr_type.kind) { - .one => { + switch (ptr_type.size) { + .One => { // Since ** tokens exist and the same token is shared by two // nested pointer types, we check to see if we are the parent // in such a relationship. If so, skip rendering anything for @@ -691,33 +691,35 @@ fn renderPtrType( } try renderToken(ais, tree, ptr_type.ast.main_token, .none); // asterisk }, - .many => { - try renderToken(ais, tree, ptr_type.ast.main_token - 1, .none); // lbracket - try renderToken(ais, tree, ptr_type.ast.main_token, .none); // asterisk - try renderToken(ais, tree, ptr_type.ast.main_token + 1, .none); // rbracket + .Many => { + if (ptr_type.ast.sentinel == 0) { + try renderToken(ais, tree, ptr_type.ast.main_token - 1, .none); // lbracket + try renderToken(ais, tree, ptr_type.ast.main_token, .none); // asterisk + try renderToken(ais, tree, ptr_type.ast.main_token + 1, .none); // rbracket + } else { + try renderToken(ais, tree, ptr_type.ast.main_token - 1, .none); // lbracket + try renderToken(ais, tree, ptr_type.ast.main_token, .none); // asterisk + try renderToken(ais, tree, ptr_type.ast.main_token + 1, .none); // colon + try renderExpression(ais, tree, ptr_type.ast.sentinel, .none); + try renderToken(ais, tree, tree.lastToken(ptr_type.ast.sentinel) + 1, .none); // rbracket + } }, - .sentinel => { - try renderToken(ais, tree, ptr_type.ast.main_token - 1, .none); // lbracket - try renderToken(ais, tree, ptr_type.ast.main_token, .none); // asterisk - try renderToken(ais, tree, ptr_type.ast.main_token + 1, .none); // colon - try renderExpression(ais, tree, ptr_type.ast.sentinel, .none); - try renderToken(ais, tree, tree.lastToken(ptr_type.ast.sentinel) + 1, .none); // rbracket - }, - .c => { + .C => { try renderToken(ais, tree, ptr_type.ast.main_token - 1, .none); // lbracket try renderToken(ais, tree, ptr_type.ast.main_token, .none); // asterisk try renderToken(ais, tree, ptr_type.ast.main_token + 1, .none); // c try renderToken(ais, tree, ptr_type.ast.main_token + 2, .none); // rbracket }, - .slice => { - try renderToken(ais, tree, ptr_type.ast.main_token, .none); // lbracket - try renderToken(ais, tree, ptr_type.ast.main_token + 1, .none); // rbracket - }, - .slice_sentinel => { - try renderToken(ais, tree, ptr_type.ast.main_token, .none); // lbracket - try renderToken(ais, tree, ptr_type.ast.main_token + 1, .none); // colon - try renderExpression(ais, tree, ptr_type.ast.sentinel, .none); - try renderToken(ais, tree, tree.lastToken(ptr_type.ast.sentinel) + 1, .none); // rbracket + .Slice => { + if (ptr_type.ast.sentinel == 0) { + try renderToken(ais, tree, ptr_type.ast.main_token, .none); // lbracket + try renderToken(ais, tree, ptr_type.ast.main_token + 1, .none); // rbracket + } else { + try renderToken(ais, tree, ptr_type.ast.main_token, .none); // lbracket + try renderToken(ais, tree, ptr_type.ast.main_token + 1, .none); // colon + try renderExpression(ais, tree, ptr_type.ast.sentinel, .none); + try renderToken(ais, tree, tree.lastToken(ptr_type.ast.sentinel) + 1, .none); // rbracket + } }, } diff --git a/src/BuiltinFn.zig b/src/BuiltinFn.zig index 9776edfef3..deb1cbfa76 100644 --- a/src/BuiltinFn.zig +++ b/src/BuiltinFn.zig @@ -115,727 +115,730 @@ allows_lvalue: bool = false, /// of parameters. param_count: ?u8, -pub const list = std.ComptimeStringMap(@This(), .{ - .{ - "@addWithOverflow", +pub const list = list: { + @setEvalBranchQuota(3000); + break :list std.ComptimeStringMap(@This(), .{ .{ - .tag = .add_with_overflow, - .param_count = 4, + "@addWithOverflow", + .{ + .tag = .add_with_overflow, + .param_count = 4, + }, }, - }, - .{ - "@alignCast", .{ - .tag = align_cast, - .param_count = 1, + "@alignCast", + .{ + .tag = .align_cast, + .param_count = 1, + }, }, - }, - .{ - "@alignOf", .{ - .tag = .align_of, - .param_count = 1, + "@alignOf", + .{ + .tag = .align_of, + .param_count = 1, + }, }, - }, - .{ - "@as", .{ - .tag = .as, - .needs_mem_loc = true, - .param_count = 2, + "@as", + .{ + .tag = .as, + .needs_mem_loc = true, + .param_count = 2, + }, }, - }, - .{ - "@asyncCall", .{ - .tag = .async_call, - .param_count = null, + "@asyncCall", + .{ + .tag = .async_call, + .param_count = null, + }, }, - }, - .{ - "@atomicLoad", .{ - .tag = .atomic_load, - .param_count = 3, + "@atomicLoad", + .{ + .tag = .atomic_load, + .param_count = 3, + }, }, - }, - .{ - "@atomicRmw", .{ - .tag = .atomic_rmw, - .param_count = 5, + "@atomicRmw", + .{ + .tag = .atomic_rmw, + .param_count = 5, + }, }, - }, - .{ - "@atomicStore", .{ - .tag = .atomic_store, - .param_count = 4, + "@atomicStore", + .{ + .tag = .atomic_store, + .param_count = 4, + }, }, - }, - .{ - "@bitCast", .{ - .tag = .bit_cast, - .needs_mem_loc = true, - .param_count = 2, + "@bitCast", + .{ + .tag = .bit_cast, + .needs_mem_loc = true, + .param_count = 2, + }, }, - }, - .{ - "@bitOffsetOf", .{ - .tag = .bit_offset_of, - .param_count = 2, + "@bitOffsetOf", + .{ + .tag = .bit_offset_of, + .param_count = 2, + }, }, - }, - .{ - "@boolToInt", .{ - .tag = .bool_to_int, - .param_count = 1, + "@boolToInt", + .{ + .tag = .bool_to_int, + .param_count = 1, + }, }, - }, - .{ - "@bitSizeOf", .{ - .tag = .bit_size_of, - .param_count = 1, + "@bitSizeOf", + .{ + .tag = .bit_size_of, + .param_count = 1, + }, }, - }, - .{ - "@breakpoint", .{ - .tag = .breakpoint, - .param_count = 0, + "@breakpoint", + .{ + .tag = .breakpoint, + .param_count = 0, + }, }, - }, - .{ - "@mulAdd", .{ - .tag = .mul_add, - .param_count = 4, + "@mulAdd", + .{ + .tag = .mul_add, + .param_count = 4, + }, }, - }, - .{ - "@byteSwap", .{ - .tag = .byte_swap, - .param_count = 2, + "@byteSwap", + .{ + .tag = .byte_swap, + .param_count = 2, + }, }, - }, - .{ - "@bitReverse", .{ - .tag = .bit_reverse, - .param_count = 2, + "@bitReverse", + .{ + .tag = .bit_reverse, + .param_count = 2, + }, }, - }, - .{ - "@byteOffsetOf", .{ - .tag = .byte_offset_of, - .param_count = 2, + "@byteOffsetOf", + .{ + .tag = .byte_offset_of, + .param_count = 2, + }, }, - }, - .{ - "@call", .{ - .tag = .call, - .needs_mem_loc = true, - .param_count = 3, + "@call", + .{ + .tag = .call, + .needs_mem_loc = true, + .param_count = 3, + }, }, - }, - .{ - "@cDefine", .{ - .tag = .c_define, - .param_count = 2, + "@cDefine", + .{ + .tag = .c_define, + .param_count = 2, + }, }, - }, - .{ - "@cImport", .{ - .tag = .c_import, - .param_count = 1, + "@cImport", + .{ + .tag = .c_import, + .param_count = 1, + }, }, - }, - .{ - "@cInclude", .{ - .tag = .c_include, - .param_count = 1, + "@cInclude", + .{ + .tag = .c_include, + .param_count = 1, + }, }, - }, - .{ - "@clz", .{ - .tag = .clz, - .param_count = 2, + "@clz", + .{ + .tag = .clz, + .param_count = 2, + }, }, - }, - .{ - "@cmpxchgStrong", .{ - .tag = .cmpxchg_strong, - .param_count = 6, + "@cmpxchgStrong", + .{ + .tag = .cmpxchg_strong, + .param_count = 6, + }, }, - }, - .{ - "@cmpxchgWeak", .{ - .tag = .cmpxchg_weak, - .param_count = 6, + "@cmpxchgWeak", + .{ + .tag = .cmpxchg_weak, + .param_count = 6, + }, }, - }, - .{ - "@compileError", .{ - .tag = .compile_error, - .param_count = 1, + "@compileError", + .{ + .tag = .compile_error, + .param_count = 1, + }, }, - }, - .{ - "@compileLog", .{ - .tag = .compile_log, - .param_count = null, + "@compileLog", + .{ + .tag = .compile_log, + .param_count = null, + }, }, - }, - .{ - "@ctz", .{ - .tag = .ctz, - .param_count = 2, + "@ctz", + .{ + .tag = .ctz, + .param_count = 2, + }, }, - }, - .{ - "@cUndef", .{ - .tag = .c_undef, - .param_count = 1, + "@cUndef", + .{ + .tag = .c_undef, + .param_count = 1, + }, }, - }, - .{ - "@divExact", .{ - .tag = .div_exact, - .param_count = 2, + "@divExact", + .{ + .tag = .div_exact, + .param_count = 2, + }, }, - }, - .{ - "@divFloor", .{ - .tag = .div_floor, - .param_count = 2, + "@divFloor", + .{ + .tag = .div_floor, + .param_count = 2, + }, }, - }, - .{ - "@divTrunc", .{ - .tag = .div_trunc, - .param_count = 2, + "@divTrunc", + .{ + .tag = .div_trunc, + .param_count = 2, + }, }, - }, - .{ - "@embedFile", .{ - .tag = .embed_file, - .param_count = 1, + "@embedFile", + .{ + .tag = .embed_file, + .param_count = 1, + }, }, - }, - .{ - "@enumToInt", .{ - .tag = .enum_to_int, - .param_count = 1, + "@enumToInt", + .{ + .tag = .enum_to_int, + .param_count = 1, + }, }, - }, - .{ - "@errorName", .{ - .tag = .error_name, - .param_count = 1, + "@errorName", + .{ + .tag = .error_name, + .param_count = 1, + }, }, - }, - .{ - "@errorReturnTrace", .{ - .tag = .error_return_trace, - .param_count = 0, + "@errorReturnTrace", + .{ + .tag = .error_return_trace, + .param_count = 0, + }, }, - }, - .{ - "@errorToInt", .{ - .tag = .error_to_int, - .param_count = 1, + "@errorToInt", + .{ + .tag = .error_to_int, + .param_count = 1, + }, }, - }, - .{ - "@errSetCast", .{ - .tag = .err_set_cast, - .param_count = 2, + "@errSetCast", + .{ + .tag = .err_set_cast, + .param_count = 2, + }, }, - }, - .{ - "@export", .{ - .tag = .@"export", - .param_count = 2, + "@export", + .{ + .tag = .@"export", + .param_count = 2, + }, }, - }, - .{ - "@fence", .{ - .tag = .fence, - .param_count = 0, + "@fence", + .{ + .tag = .fence, + .param_count = 0, + }, }, - }, - .{ - "@field", .{ - .tag = .field, - .needs_mem_loc = true, - .param_count = 2, - .allows_lvalue = true, + "@field", + .{ + .tag = .field, + .needs_mem_loc = true, + .param_count = 2, + .allows_lvalue = true, + }, }, - }, - .{ - "@fieldParentPtr", .{ - .tag = .field_parent_ptr, - .param_count = 3, + "@fieldParentPtr", + .{ + .tag = .field_parent_ptr, + .param_count = 3, + }, }, - }, - .{ - "@floatCast", .{ - .tag = .float_cast, - .param_count = 1, + "@floatCast", + .{ + .tag = .float_cast, + .param_count = 1, + }, }, - }, - .{ - "@floatToInt", .{ - .tag = .float_to_int, - .param_count = 1, + "@floatToInt", + .{ + .tag = .float_to_int, + .param_count = 1, + }, }, - }, - .{ - "@frame", .{ - .tag = .frame, - .param_count = 0, + "@frame", + .{ + .tag = .frame, + .param_count = 0, + }, }, - }, - .{ - "@Frame", .{ - .tag = .Frame, - .param_count = 1, + "@Frame", + .{ + .tag = .Frame, + .param_count = 1, + }, }, - }, - .{ - "@frameAddress", .{ - .tag = .frame_address, - .param_count = 0, + "@frameAddress", + .{ + .tag = .frame_address, + .param_count = 0, + }, }, - }, - .{ - "@frameSize", .{ - .tag = .frame_size, - .param_count = 1, + "@frameSize", + .{ + .tag = .frame_size, + .param_count = 1, + }, }, - }, - .{ - "@hasDecl", .{ - .tag = .has_decl, - .param_count = 2, + "@hasDecl", + .{ + .tag = .has_decl, + .param_count = 2, + }, }, - }, - .{ - "@hasField", .{ - .tag = .has_field, - .param_count = 2, + "@hasField", + .{ + .tag = .has_field, + .param_count = 2, + }, }, - }, - .{ - "@import", .{ - .tag = .import, - .param_count = 1, + "@import", + .{ + .tag = .import, + .param_count = 1, + }, }, - }, - .{ - "@intCast", .{ - .tag = .int_cast, - .param_count = 1, + "@intCast", + .{ + .tag = .int_cast, + .param_count = 1, + }, }, - }, - .{ - "@intToEnum", .{ - .tag = .int_to_enum, - .param_count = 1, + "@intToEnum", + .{ + .tag = .int_to_enum, + .param_count = 1, + }, }, - }, - .{ - "@intToError", .{ - .tag = .int_to_error, - .param_count = 1, + "@intToError", + .{ + .tag = .int_to_error, + .param_count = 1, + }, }, - }, - .{ - "@intToFloat", .{ - .tag = .int_to_float, - .param_count = 1, + "@intToFloat", + .{ + .tag = .int_to_float, + .param_count = 1, + }, }, - }, - .{ - "@intToPtr", .{ - .tag = .int_to_ptr, - .param_count = 2, + "@intToPtr", + .{ + .tag = .int_to_ptr, + .param_count = 2, + }, }, - }, - .{ - "@memcpy", .{ - .tag = .memcpy, - .param_count = 3, + "@memcpy", + .{ + .tag = .memcpy, + .param_count = 3, + }, }, - }, - .{ - "@memset", .{ - .tag = .memset, - .param_count = 3, + "@memset", + .{ + .tag = .memset, + .param_count = 3, + }, }, - }, - .{ - "@wasmMemorySize", .{ - .tag = .wasm_memory_size, - .param_count = 1, + "@wasmMemorySize", + .{ + .tag = .wasm_memory_size, + .param_count = 1, + }, }, - }, - .{ - "@wasmMemoryGrow", .{ - .tag = .wasm_memory_grow, - .param_count = 2, + "@wasmMemoryGrow", + .{ + .tag = .wasm_memory_grow, + .param_count = 2, + }, }, - }, - .{ - "@mod", .{ - .tag = .mod, - .param_count = 2, + "@mod", + .{ + .tag = .mod, + .param_count = 2, + }, }, - }, - .{ - "@mulWithOverflow", .{ - .tag = .mul_with_overflow, - .param_count = 4, + "@mulWithOverflow", + .{ + .tag = .mul_with_overflow, + .param_count = 4, + }, }, - }, - .{ - "@panic", .{ - .tag = .panic, - .param_count = 1, + "@panic", + .{ + .tag = .panic, + .param_count = 1, + }, }, - }, - .{ - "@popCount", .{ - .tag = .pop_count, - .param_count = 2, + "@popCount", + .{ + .tag = .pop_count, + .param_count = 2, + }, }, - }, - .{ - "@ptrCast", .{ - .tag = .ptr_cast, - .param_count = 2, + "@ptrCast", + .{ + .tag = .ptr_cast, + .param_count = 2, + }, }, - }, - .{ - "@ptrToInt", .{ - .tag = .ptr_to_int, - .param_count = 1, + "@ptrToInt", + .{ + .tag = .ptr_to_int, + .param_count = 1, + }, }, - }, - .{ - "@rem", .{ - .tag = .rem, - .param_count = 2, + "@rem", + .{ + .tag = .rem, + .param_count = 2, + }, }, - }, - .{ - "@returnAddress", .{ - .tag = .return_address, - .param_count = 0, + "@returnAddress", + .{ + .tag = .return_address, + .param_count = 0, + }, }, - }, - .{ - "@setAlignStack", .{ - .tag = .set_align_stack, - .param_count = 1, + "@setAlignStack", + .{ + .tag = .set_align_stack, + .param_count = 1, + }, }, - }, - .{ - "@setCold", .{ - .tag = .set_cold, - .param_count = 1, + "@setCold", + .{ + .tag = .set_cold, + .param_count = 1, + }, }, - }, - .{ - "@setEvalBranchQuota", .{ - .tag = .set_eval_branch_quota, - .param_count = 1, + "@setEvalBranchQuota", + .{ + .tag = .set_eval_branch_quota, + .param_count = 1, + }, }, - }, - .{ - "@setFloatMode", .{ - .tag = .set_float_mode, - .param_count = 1, + "@setFloatMode", + .{ + .tag = .set_float_mode, + .param_count = 1, + }, }, - }, - .{ - "@setRuntimeSafety", .{ - .tag = .set_runtime_safety, - .param_count = 1, + "@setRuntimeSafety", + .{ + .tag = .set_runtime_safety, + .param_count = 1, + }, }, - }, - .{ - "@shlExact", .{ - .tag = .shl_exact, - .param_count = 2, + "@shlExact", + .{ + .tag = .shl_exact, + .param_count = 2, + }, }, - }, - .{ - "@shlWithOverflow", .{ - .tag = .shl_with_overflow, - .param_count = 4, + "@shlWithOverflow", + .{ + .tag = .shl_with_overflow, + .param_count = 4, + }, }, - }, - .{ - "@shrExact", .{ - .tag = .shr_exact, - .param_count = 2, + "@shrExact", + .{ + .tag = .shr_exact, + .param_count = 2, + }, }, - }, - .{ - "@shuffle", .{ - .tag = .shuffle, - .param_count = 4, + "@shuffle", + .{ + .tag = .shuffle, + .param_count = 4, + }, }, - }, - .{ - "@sizeOf", .{ - .tag = .size_of, - .param_count = 1, + "@sizeOf", + .{ + .tag = .size_of, + .param_count = 1, + }, }, - }, - .{ - "@splat", .{ - .tag = .splat, - .needs_mem_loc = true, - .param_count = 2, + "@splat", + .{ + .tag = .splat, + .needs_mem_loc = true, + .param_count = 2, + }, }, - }, - .{ - "@reduce", .{ - .tag = .reduce, - .param_count = 2, + "@reduce", + .{ + .tag = .reduce, + .param_count = 2, + }, }, - }, - .{ - "@src", .{ - .tag = .src, - .needs_mem_loc = true, - .param_count = 0, + "@src", + .{ + .tag = .src, + .needs_mem_loc = true, + .param_count = 0, + }, }, - }, - .{ - "@sqrt", .{ - .tag = .sqrt, - .param_count = 1, + "@sqrt", + .{ + .tag = .sqrt, + .param_count = 1, + }, }, - }, - .{ - "@sin", .{ - .tag = .sin, - .param_count = 1, + "@sin", + .{ + .tag = .sin, + .param_count = 1, + }, }, - }, - .{ - "@cos", .{ - .tag = .cos, - .param_count = 1, + "@cos", + .{ + .tag = .cos, + .param_count = 1, + }, }, - }, - .{ - "@exp", .{ - .tag = .exp, - .param_count = 1, + "@exp", + .{ + .tag = .exp, + .param_count = 1, + }, }, - }, - .{ - "@exp2", .{ - .tag = .exp2, - .param_count = 1, + "@exp2", + .{ + .tag = .exp2, + .param_count = 1, + }, }, - }, - .{ - "@log", .{ - .tag = .log, - .param_count = 1, + "@log", + .{ + .tag = .log, + .param_count = 1, + }, }, - }, - .{ - "@log2", .{ - .tag = .log2, - .param_count = 1, + "@log2", + .{ + .tag = .log2, + .param_count = 1, + }, }, - }, - .{ - "@log10", .{ - .tag = .log10, - .param_count = 1, + "@log10", + .{ + .tag = .log10, + .param_count = 1, + }, }, - }, - .{ - "@fabs", .{ - .tag = .fabs, - .param_count = 1, + "@fabs", + .{ + .tag = .fabs, + .param_count = 1, + }, }, - }, - .{ - "@floor", .{ - .tag = .floor, - .param_count = 1, + "@floor", + .{ + .tag = .floor, + .param_count = 1, + }, }, - }, - .{ - "@ceil", .{ - .tag = .ceil, - .param_count = 1, + "@ceil", + .{ + .tag = .ceil, + .param_count = 1, + }, }, - }, - .{ - "@trunc", .{ - .tag = .trunc, - .param_count = 1, + "@trunc", + .{ + .tag = .trunc, + .param_count = 1, + }, }, - }, - .{ - "@round", .{ - .tag = .round, - .param_count = 1, + "@round", + .{ + .tag = .round, + .param_count = 1, + }, }, - }, - .{ - "@subWithOverflow", .{ - .tag = .sub_with_overflow, - .param_count = 4, + "@subWithOverflow", + .{ + .tag = .sub_with_overflow, + .param_count = 4, + }, }, - }, - .{ - "@tagName", .{ - .tag = .tag_name, - .param_count = 1, + "@tagName", + .{ + .tag = .tag_name, + .param_count = 1, + }, }, - }, - .{ - "@This", .{ - .tag = .This, - .param_count = 0, + "@This", + .{ + .tag = .This, + .param_count = 0, + }, }, - }, - .{ - "@truncate", .{ - .tag = .truncate, - .param_count = 2, + "@truncate", + .{ + .tag = .truncate, + .param_count = 2, + }, }, - }, - .{ - "@Type", .{ - .tag = .Type, - .param_count = 1, + "@Type", + .{ + .tag = .Type, + .param_count = 1, + }, }, - }, - .{ - "@typeInfo", .{ - .tag = .type_info, - .param_count = 1, + "@typeInfo", + .{ + .tag = .type_info, + .param_count = 1, + }, }, - }, - .{ - "@typeName", .{ - .tag = .type_name, - .param_count = 1, + "@typeName", + .{ + .tag = .type_name, + .param_count = 1, + }, }, - }, - .{ - "@TypeOf", .{ - .tag = .TypeOf, - .param_count = null, + "@TypeOf", + .{ + .tag = .TypeOf, + .param_count = null, + }, }, - }, - .{ - "@unionInit", .{ - .tag = .union_init, - .needs_mem_loc = true, - .param_count = 3, + "@unionInit", + .{ + .tag = .union_init, + .needs_mem_loc = true, + .param_count = 3, + }, }, - }, -}); + }); +}; diff --git a/src/Module.zig b/src/Module.zig index d2530d7df3..35819c5d44 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -430,12 +430,12 @@ pub const Scope = struct { /// Asserts the scope is a child of a File and has an AST tree and returns the tree. pub fn tree(self: *Scope) *const ast.Tree { switch (self.tag) { - .file => return self.cast(File).?.tree, - .block => return self.cast(Block).?.src_decl.container.file_scope.tree, - .gen_zir => return self.cast(GenZIR).?.decl.container.file_scope.tree, - .local_val => return self.cast(LocalVal).?.gen_zir.decl.container.file_scope.tree, - .local_ptr => return self.cast(LocalPtr).?.gen_zir.decl.container.file_scope.tree, - .container => return self.cast(Container).?.file_scope.tree, + .file => return &self.cast(File).?.tree, + .block => return &self.cast(Block).?.src_decl.container.file_scope.tree, + .gen_zir => return &self.cast(GenZIR).?.decl.container.file_scope.tree, + .local_val => return &self.cast(LocalVal).?.gen_zir.decl.container.file_scope.tree, + .local_ptr => return &self.cast(LocalPtr).?.gen_zir.decl.container.file_scope.tree, + .container => return &self.cast(Container).?.file_scope.tree, } } @@ -1612,6 +1612,7 @@ fn astgenAndSemaVarDecl( .decl = decl, .arena = &type_scope_arena.allocator, .parent = &decl.container.base, + .force_comptime = true, }; defer type_scope.instructions.deinit(mod.gpa); @@ -1630,7 +1631,7 @@ fn astgenAndSemaVarDecl( } else { return mod.failTok( &block_scope.base, - tree.firstToken(var_decl), + var_decl.ast.mut_token, "unable to infer variable type", .{}, ); @@ -1639,7 +1640,7 @@ fn astgenAndSemaVarDecl( if (is_mutable and !var_info.ty.isValidVarType(is_extern)) { return mod.failTok( &block_scope.base, - tree.firstToken(var_decl), + var_decl.ast.mut_token, "variable of type '{}' must be const", .{var_info.ty}, ); diff --git a/src/astgen.zig b/src/astgen.zig index a018d58d2f..3e5d63796f 100644 --- a/src/astgen.zig +++ b/src/astgen.zig @@ -59,6 +59,8 @@ pub const ResultLoc = union(enum) { pub fn typeExpr(mod: *Module, scope: *Scope, type_node: ast.Node.Index) InnerError!*zir.Inst { const tree = scope.tree(); + const token_starts = tree.tokens.items(.start); + const type_src = token_starts[tree.firstToken(type_node)]; const type_type = try addZIRInstConst(mod, scope, type_src, .{ .ty = Type.initTag(.type), @@ -76,13 +78,17 @@ fn lvalExpr(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.I .root => unreachable, .@"usingnamespace" => unreachable, .test_decl => unreachable, - .doc_comment => unreachable, - .var_decl => unreachable, + .global_var_decl => unreachable, + .local_var_decl => unreachable, + .simple_var_decl => unreachable, + .aligned_var_decl => unreachable, .switch_case => unreachable, - .switch_else => unreachable, + .switch_case_one => unreachable, .container_field_init => unreachable, .container_field_align => unreachable, .container_field => unreachable, + .asm_output => unreachable, + .asm_input => unreachable, .assign, .assign_bit_and, @@ -122,58 +128,107 @@ fn lvalExpr(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.I .bool_and, .bool_or, .@"asm", + .asm_simple, .string_literal, .integer_literal, .call, - .@"unreachable", + .call_comma, + .async_call, + .async_call_comma, + .call_one, + .call_one_comma, + .async_call_one, + .async_call_one_comma, + .unreachable_literal, .@"return", .@"if", + .if_simple, .@"while", + .while_simple, + .while_cont, .bool_not, .address_of, .float_literal, .undefined_literal, - .bool_literal, + .true_literal, + .false_literal, .null_literal, .optional_type, .block, - .labeled_block, + .block_semicolon, + .block_two, + .block_two_semicolon, .@"break", - .PtrType, + .ptr_type_aligned, + .ptr_type_sentinel, + .ptr_type, + .ptr_type_bit_range, .array_type, .array_type_sentinel, .enum_literal, - .MultilineStringLiteral, + .multiline_string_literal, .char_literal, .@"defer", + .@"errdefer", .@"catch", .error_union, .merge_error_sets, - .range, + .switch_range, .@"await", .bit_not, .negation, .negation_wrap, .@"resume", .@"try", - .slice_type, .slice, - .ArrayInitializer, - .ArrayInitializerDot, - .StructInitializer, - .StructInitializerDot, + .slice_open, + .slice_sentinel, + .array_init_one, + .array_init_one_comma, + .array_init_dot_two, + .array_init_dot_two_comma, + .array_init_dot, + .array_init_dot_comma, + .array_init, + .array_init_comma, + .struct_init_one, + .struct_init_one_comma, + .struct_init_dot_two, + .struct_init_dot_two_comma, + .struct_init_dot, + .struct_init_dot_comma, + .struct_init, + .struct_init_comma, .@"switch", + .switch_comma, .@"for", + .for_simple, .@"suspend", .@"continue", .@"anytype", - .error_type, - .FnProto, + .fn_proto_simple, + .fn_proto_multi, + .fn_proto_one, + .fn_proto, + .fn_decl, .anyframe_type, + .anyframe_literal, .error_set_decl, - .ContainerDecl, + .container_decl, + .container_decl_comma, + .container_decl_two, + .container_decl_two_comma, + .container_decl_arg, + .container_decl_arg_comma, + .tagged_union, + .tagged_union_comma, + .tagged_union_two, + .tagged_union_two_comma, + .tagged_union_enum_tag, + .tagged_union_enum_tag_comma, .@"comptime", .@"nosuspend", + .error_value, => return mod.failNode(scope, node, "invalid left-hand side to assignment", .{}), .builtin_call, @@ -192,10 +247,10 @@ fn lvalExpr(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.I } }, - // can be assigned to + // These can be assigned to. .unwrap_optional, .deref, - .period, + .field_access, .array_access, .identifier, .grouped_expression, @@ -210,22 +265,33 @@ fn lvalExpr(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.I /// result instruction can be used to inspect whether it is isNoReturn() but that is it, /// it must otherwise not be used. pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!*zir.Inst { - switch (node.tag) { + const tree = scope.tree(); + const main_tokens = tree.nodes.items(.main_token); + const token_tags = tree.tokens.items(.tag); + const node_datas = tree.nodes.items(.data); + const node_tags = tree.nodes.items(.tag); + const token_starts = tree.tokens.items(.start); + + switch (node_tags[node]) { .root => unreachable, // Top-level declaration. .@"usingnamespace" => unreachable, // Top-level declaration. .test_decl => unreachable, // Top-level declaration. - .doc_comment => unreachable, // Top-level declaration. - .var_decl => unreachable, // Handled in `blockExpr`. + .container_field_init => unreachable, // Top-level declaration. + .container_field_align => unreachable, // Top-level declaration. + .container_field => unreachable, // Top-level declaration. + .fn_decl => unreachable, // Top-level declaration. + + .global_var_decl => unreachable, // Handled in `blockExpr`. + .local_var_decl => unreachable, // Handled in `blockExpr`. + .simple_var_decl => unreachable, // Handled in `blockExpr`. + .aligned_var_decl => unreachable, // Handled in `blockExpr`. + .switch_case => unreachable, // Handled in `switchExpr`. - .switch_else => unreachable, // Handled in `switchExpr`. - .range => unreachable, // Handled in `switchExpr`. - .Else => unreachable, // Handled explicitly the control flow expression functions. - .Payload => unreachable, // Handled explicitly. - .PointerPayload => unreachable, // Handled explicitly. - .PointerIndexPayload => unreachable, // Handled explicitly. - .ErrorTag => unreachable, // Handled explicitly. - .FieldInitializer => unreachable, // Handled explicitly. - .ContainerField => unreachable, // Handled explicitly. + .switch_case_one => unreachable, // Handled in `switchExpr`. + .switch_range => unreachable, // Handled in `switchExpr`. + + .asm_output => unreachable, // Handled in `asmExpr`. + .asm_input => unreachable, // Handled in `asmExpr`. .assign => return rvalueVoid(mod, scope, rl, node, try assign(mod, scope, node)), .assign_bit_and => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .bit_and)), @@ -276,30 +342,28 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In .identifier => return identifier(mod, scope, rl, node), - .asm_simple => return assembly(mod, scope, rl, tree.asmSimple(node)), - .@"asm" => return assembly(mod, scope, rl, tree.asmFull(node)), + .asm_simple => return asmExpr(mod, scope, rl, tree.asmSimple(node)), + .@"asm" => return asmExpr(mod, scope, rl, tree.asmFull(node)), .string_literal => return stringLiteral(mod, scope, rl, node), .multiline_string_literal => return multilineStringLiteral(mod, scope, rl, node), .integer_literal => return integerLiteral(mod, scope, rl, node), - .builtin_call => return builtinCall(mod, scope, rl, node), - .builtin_call_two, .builtin_call_two_comma => { - if (datas[node].lhs == 0) { + if (node_datas[node].lhs == 0) { const params = [_]ast.Node.Index{}; return builtinCall(mod, scope, rl, node, ¶ms); - } else if (datas[node].rhs == 0) { - const params = [_]ast.Node.Index{datas[node].lhs}; + } else if (node_datas[node].rhs == 0) { + const params = [_]ast.Node.Index{node_datas[node].lhs}; return builtinCall(mod, scope, rl, node, ¶ms); } else { - const params = [_]ast.Node.Index{ datas[node].lhs, datas[node].rhs }; + const params = [_]ast.Node.Index{ node_datas[node].lhs, node_datas[node].rhs }; return builtinCall(mod, scope, rl, node, ¶ms); } }, .builtin_call, .builtin_call_comma => { - const params = tree.extra_data[datas[node].lhs..datas[node].rhs]; + const params = tree.extra_data[node_datas[node].lhs..node_datas[node].rhs]; return builtinCall(mod, scope, rl, node, params); }, @@ -311,20 +375,20 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In return callExpr(mod, scope, rl, tree.callFull(node)); }, - .@"unreachable" => { + .unreachable_literal => { const main_token = main_tokens[node]; const src = token_starts[main_token]; return addZIRNoOp(mod, scope, src, .unreachable_safe); }, .@"return" => return ret(mod, scope, node), - .period => return field(mod, scope, rl, node), + .field_access => return field(mod, scope, rl, node), .float_literal => return floatLiteral(mod, scope, rl, node), .if_simple => return ifExpr(mod, scope, rl, tree.ifSimple(node)), - .@"if" => return ifExpr(mode, scope, rl, tree.ifFull(node)), + .@"if" => return ifExpr(mod, scope, rl, tree.ifFull(node)), .while_simple => return whileExpr(mod, scope, rl, tree.whileSimple(node)), - .while_cont => return whileExpr(mod, scope, tree.whileCont(node)), + .while_cont => return whileExpr(mod, scope, rl, tree.whileCont(node)), .@"while" => return whileExpr(mod, scope, rl, tree.whileFull(node)), .for_simple => return forExpr(mod, scope, rl, tree.forSimple(node)), @@ -389,7 +453,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In return rvalue(mod, scope, rl, result); }, .unwrap_optional => { - const operand = try expr(mod, scope, rl, node.lhs); + const operand = try expr(mod, scope, rl, node_datas[node].lhs); const op: zir.Inst.Tag = switch (rl) { .ref => .optional_payload_safe_ptr, else => .optional_payload_safe, @@ -449,7 +513,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In }, .@"catch" => { const catch_token = main_tokens[node]; - const payload_token: ?TokenIndex = if (token_tags[catch_token + 1] == .pipe) + const payload_token: ?ast.TokenIndex = if (token_tags[catch_token + 1] == .pipe) catch_token + 2 else null; @@ -506,6 +570,34 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In null, ), }, + + .ptr_type_aligned => return ptrType(mod, scope, rl, tree.ptrTypeAligned(node)), + .ptr_type_sentinel => return ptrType(mod, scope, rl, tree.ptrTypeSentinel(node)), + .ptr_type => return ptrType(mod, scope, rl, tree.ptrType(node)), + .ptr_type_bit_range => return ptrType(mod, scope, rl, tree.ptrTypeBitRange(node)), + + .container_decl, + .container_decl_comma, + => return containerDecl(mod, scope, rl, tree.containerDecl(node)), + .container_decl_two, .container_decl_two_comma => { + var buffer: [2]ast.Node.Index = undefined; + return containerDecl(mod, scope, rl, tree.containerDeclTwo(&buffer, node)); + }, + .container_decl_arg, + .container_decl_arg_comma, + => return containerDecl(mod, scope, rl, tree.containerDeclArg(node)), + + .tagged_union, + .tagged_union_comma, + => return containerDecl(mod, scope, rl, tree.taggedUnion(node)), + .tagged_union_two, .tagged_union_two_comma => { + var buffer: [2]ast.Node.Index = undefined; + return containerDecl(mod, scope, rl, tree.taggedUnionTwo(&buffer, node)); + }, + .tagged_union_enum_tag, + .tagged_union_enum_tag_comma, + => return containerDecl(mod, scope, rl, tree.taggedUnionEnumTag(node)), + .@"break" => return breakExpr(mod, scope, rl, node), .@"continue" => return continueExpr(mod, scope, rl, node), .grouped_expression => return expr(mod, scope, rl, node_datas[node].lhs), @@ -518,12 +610,41 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In .@"switch", .switch_comma => return switchExpr(mod, scope, rl, node), .@"defer" => return mod.failNode(scope, node, "TODO implement astgen.expr for .defer", .{}), + .@"errdefer" => return mod.failNode(scope, node, "TODO implement astgen.expr for .errdefer", .{}), .@"await" => return mod.failNode(scope, node, "TODO implement astgen.expr for .await", .{}), .@"resume" => return mod.failNode(scope, node, "TODO implement astgen.expr for .resume", .{}), .@"try" => return mod.failNode(scope, node, "TODO implement astgen.expr for .Try", .{}), + + .array_init_one, + .array_init_one_comma, + .array_init_dot_two, + .array_init_dot_two_comma, + .array_init_dot, + .array_init_dot_comma, + .array_init, + .array_init_comma, + => return mod.failNode(scope, node, "TODO implement astgen.expr for array literals", .{}), + + .struct_init_one, + .struct_init_one_comma, + .struct_init_dot_two, + .struct_init_dot_two_comma, + .struct_init_dot, + .struct_init_dot_comma, + .struct_init, + .struct_init_comma, + => return mod.failNode(scope, node, "TODO implement astgen.expr for struct literals", .{}), + .@"suspend" => return mod.failNode(scope, node, "TODO implement astgen.expr for .suspend", .{}), .@"anytype" => return mod.failNode(scope, node, "TODO implement astgen.expr for .anytype", .{}), + .fn_proto_simple, + .fn_proto_multi, + .fn_proto_one, + .fn_proto, + => return mod.failNode(scope, node, "TODO implement astgen.expr for function prototypes", .{}), + .@"nosuspend" => return mod.failNode(scope, node, "TODO implement astgen.expr for .nosuspend", .{}), + .error_value => return mod.failNode(scope, node, "TODO implement astgen.expr for .error_value", .{}), } } @@ -572,6 +693,8 @@ fn breakExpr( const tree = parent_scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); + const src = token_starts[main_tokens[node]]; const break_label = node_datas[node].lhs; const rhs = node_datas[node].rhs; @@ -646,6 +769,8 @@ fn continueExpr( const tree = parent_scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); + const src = token_starts[main_tokens[node]]; const break_label = node_datas[node].lhs; @@ -702,7 +827,7 @@ pub fn blockExpr( const main_tokens = tree.nodes.items(.main_token); const token_tags = tree.tokens.items(.tag); - const lbrace = main_tokens[node]; + const lbrace = main_tokens[block_node]; if (token_tags[lbrace - 1] == .colon) { return labeledBlockExpr(mod, scope, rl, block_node, statements, .block); } @@ -721,8 +846,9 @@ fn checkLabelRedefinition(mod: *Module, parent_scope: *Scope, label: ast.TokenIn if (gen_zir.label) |prev_label| { if (try tokenIdentEql(mod, parent_scope, label, prev_label.token)) { const tree = parent_scope.tree(); - const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); + const label_src = token_starts[label]; const prev_label_src = token_starts[prev_label.token]; @@ -770,9 +896,9 @@ fn labeledBlockExpr( assert(zir_tag == .block or zir_tag == .block_comptime); const tree = parent_scope.tree(); - const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); const token_starts = tree.tokens.items(.start); + const token_tags = tree.tokens.items(.tag); const lbrace = main_tokens[block_node]; const label_token = lbrace - 1; @@ -813,10 +939,10 @@ fn labeledBlockExpr( defer block_scope.labeled_breaks.deinit(mod.gpa); defer block_scope.labeled_store_to_block_ptr_list.deinit(mod.gpa); - try blockExprStmts(mod, &block_scope.base, block_node, block_node.statements()); + try blockExprStmts(mod, &block_scope.base, block_node, statements); if (!block_scope.label.?.used) { - return mod.fail(parent_scope, token_starts[block_node.label], "unused block label", .{}); + return mod.failTok(parent_scope, label_token, "unused block label", .{}); } try gen_zir.instructions.append(mod.gpa, &block_inst.base); @@ -860,21 +986,23 @@ fn blockExprStmts( statements: []const ast.Node.Index, ) !void { const tree = parent_scope.tree(); - const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); + const node_tags = tree.nodes.items(.tag); var block_arena = std.heap.ArenaAllocator.init(mod.gpa); defer block_arena.deinit(); var scope = parent_scope; for (statements) |statement| { - const src = token_starts[statement.firstToken()]; + const src = token_starts[tree.firstToken(statement)]; _ = try addZIRNoOp(mod, scope, src, .dbg_stmt); - switch (statement.tag) { - .var_decl => { - const var_decl_node = statement.castTag(.var_decl).?; - scope = try varDecl(mod, scope, var_decl_node, &block_arena.allocator); - }, + switch (node_tags[statement]) { + .global_var_decl => scope = try varDecl(mod, scope, &block_arena.allocator, tree.globalVarDecl(statement)), + .local_var_decl => scope = try varDecl(mod, scope, &block_arena.allocator, tree.localVarDecl(statement)), + .simple_var_decl => scope = try varDecl(mod, scope, &block_arena.allocator, tree.simpleVarDecl(statement)), + .aligned_var_decl => scope = try varDecl(mod, scope, &block_arena.allocator, tree.alignedVarDecl(statement)), + .assign => try assign(mod, scope, statement), .assign_bit_and => try assignOp(mod, scope, statement, .bit_and), .assign_bit_or => try assignOp(mod, scope, statement, .bit_or), @@ -903,20 +1031,23 @@ fn blockExprStmts( fn varDecl( mod: *Module, scope: *Scope, - node: *ast.Node.var_decl, block_arena: *Allocator, + var_decl: ast.full.VarDecl, ) InnerError!*Scope { - if (node.getComptimeToken()) |comptime_token| { + if (var_decl.comptime_token) |comptime_token| { return mod.failTok(scope, comptime_token, "TODO implement comptime locals", .{}); } - if (node.getAlignNode()) |align_node| { - return mod.failNode(scope, align_node, "TODO implement alignment on locals", .{}); + if (var_decl.ast.align_node != 0) { + return mod.failNode(scope, var_decl.ast.align_node, "TODO implement alignment on locals", .{}); } const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); - const name_src = token_starts[node.name_token]; - const ident_name = try mod.identifierTokenString(scope, node.name_token); + const token_starts = tree.tokens.items(.start); + const token_tags = tree.tokens.items(.tag); + + const name_token = var_decl.ast.mut_token + 1; + const name_src = token_starts[name_token]; + const ident_name = try mod.identifierTokenString(scope, name_token); // Local variables shadowing detection, including function parameters. { @@ -962,20 +1093,21 @@ fn varDecl( // TODO add note for other definition return mod.fail(scope, name_src, "redefinition of '{s}'", .{ident_name}); } - const init_node = node.getInitNode() orelse + if (var_decl.ast.init_node == 0) { return mod.fail(scope, name_src, "variables must be initialized", .{}); + } - switch (tree.token_ids[node.mut_token]) { + switch (token_tags[var_decl.ast.mut_token]) { .keyword_const => { // Depending on the type of AST the initialization expression is, we may need an lvalue // or an rvalue as a result location. If it is an rvalue, we can use the instruction as // the variable, no memory location needed. - if (!nodeMayNeedMemoryLocation(init_node, scope)) { - const result_loc: ResultLoc = if (node.getTypeNode()) |type_node| - .{ .ty = try typeExpr(mod, scope, type_node) } + if (!nodeMayNeedMemoryLocation(scope, var_decl.ast.init_node)) { + const result_loc: ResultLoc = if (var_decl.ast.type_node != 0) + .{ .ty = try typeExpr(mod, scope, var_decl.ast.type_node) } else .none; - const init_inst = try expr(mod, scope, result_loc, init_node); + const init_inst = try expr(mod, scope, result_loc, var_decl.ast.init_node); const sub_scope = try block_arena.create(Scope.LocalVal); sub_scope.* = .{ .parent = scope, @@ -999,8 +1131,8 @@ fn varDecl( var resolve_inferred_alloc: ?*zir.Inst = null; var opt_type_inst: ?*zir.Inst = null; - if (node.getTypeNode()) |type_node| { - const type_inst = try typeExpr(mod, &init_scope.base, type_node); + if (var_decl.ast.type_node != 0) { + const type_inst = try typeExpr(mod, &init_scope.base, var_decl.ast.type_node); opt_type_inst = type_inst; init_scope.rl_ptr = try addZIRUnOp(mod, &init_scope.base, name_src, .alloc, type_inst); } else { @@ -1009,7 +1141,7 @@ fn varDecl( init_scope.rl_ptr = &alloc.base; } const init_result_loc: ResultLoc = .{ .block_ptr = &init_scope }; - const init_inst = try expr(mod, &init_scope.base, init_result_loc, init_node); + const init_inst = try expr(mod, &init_scope.base, init_result_loc, var_decl.ast.init_node); const parent_zir = &scope.getGenZIR().instructions; if (init_scope.rvalue_rl_count == 1) { // Result location pointer not used. We don't need an alloc for this @@ -1069,8 +1201,11 @@ fn varDecl( }, .keyword_var => { var resolve_inferred_alloc: ?*zir.Inst = null; - const var_data: struct { result_loc: ResultLoc, alloc: *zir.Inst } = if (node.getTypeNode()) |type_node| a: { - const type_inst = try typeExpr(mod, scope, type_node); + const var_data: struct { + result_loc: ResultLoc, + alloc: *zir.Inst, + } = if (var_decl.ast.type_node != 0) a: { + const type_inst = try typeExpr(mod, scope, var_decl.ast.type_node); const alloc = try addZIRUnOp(mod, scope, name_src, .alloc_mut, type_inst); break :a .{ .alloc = alloc, .result_loc = .{ .ptr = alloc } }; } else a: { @@ -1078,7 +1213,7 @@ fn varDecl( resolve_inferred_alloc = &alloc.base; break :a .{ .alloc = &alloc.base, .result_loc = .{ .inferred_ptr = alloc } }; }; - const init_inst = try expr(mod, scope, var_data.result_loc, init_node); + const init_inst = try expr(mod, scope, var_data.result_loc, var_decl.ast.init_node); if (resolve_inferred_alloc) |inst| { _ = try addZIRUnOp(mod, scope, name_src, .resolve_inferred_alloc, inst); } @@ -1099,13 +1234,15 @@ fn assign(mod: *Module, scope: *Scope, infix_node: ast.Node.Index) InnerError!vo const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const node_tags = tree.nodes.items(.tag); + const lhs = node_datas[infix_node].lhs; const rhs = node_datas[infix_node].rhs; if (node_tags[lhs] == .identifier) { // This intentionally does not support `@"_"` syntax. const ident_name = tree.tokenSlice(main_tokens[lhs]); if (mem.eql(u8, ident_name, "_")) { - _ = try expr(mod, scope, .discard, infix_node.rhs); + _ = try expr(mod, scope, .discard, rhs); return; } } @@ -1122,6 +1259,7 @@ fn assignOp( const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); const lhs_ptr = try lvalExpr(mod, scope, node_datas[infix_node].lhs); const lhs = try addZIRUnOp(mod, scope, lhs_ptr.src, .deref, lhs_ptr); @@ -1136,6 +1274,7 @@ fn boolNot(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.In const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); const src = token_starts[main_tokens[node]]; const bool_type = try addZIRInstConst(mod, scope, src, .{ @@ -1150,6 +1289,7 @@ fn bitNot(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.Ins const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); const src = token_starts[main_tokens[node]]; const operand = try expr(mod, scope, .none, node_datas[node].lhs); @@ -1165,6 +1305,7 @@ fn negation( const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); const src = token_starts[main_tokens[node]]; const lhs = try addZIRInstConst(mod, scope, src, .{ @@ -1175,53 +1316,61 @@ fn negation( return addZIRBinOp(mod, scope, src, op_inst_tag, lhs, rhs); } -fn ptrSliceType(mod: *Module, scope: *Scope, src: usize, ptr_info: *ast.PtrInfo, rhs: *ast.Node, size: std.builtin.TypeInfo.Pointer.Size) InnerError!*zir.Inst { +fn ptrType( + mod: *Module, + scope: *Scope, + rl: ResultLoc, + ptr_info: ast.full.PtrType, +) InnerError!*zir.Inst { + const tree = scope.tree(); + const token_starts = tree.tokens.items(.start); + + const src = token_starts[ptr_info.ast.main_token]; + const simple = ptr_info.allowzero_token == null and - ptr_info.align_info == null and + ptr_info.ast.align_node == 0 and ptr_info.volatile_token == null and - ptr_info.sentinel == null; + ptr_info.ast.sentinel == 0; if (simple) { - const child_type = try typeExpr(mod, scope, rhs); + const child_type = try typeExpr(mod, scope, ptr_info.ast.child_type); const mutable = ptr_info.const_token == null; - // TODO stage1 type inference bug const T = zir.Inst.Tag; - return addZIRUnOp(mod, scope, src, switch (size) { + const result = try addZIRUnOp(mod, scope, src, switch (ptr_info.size) { .One => if (mutable) T.single_mut_ptr_type else T.single_const_ptr_type, .Many => if (mutable) T.many_mut_ptr_type else T.many_const_ptr_type, .C => if (mutable) T.c_mut_ptr_type else T.c_const_ptr_type, .Slice => if (mutable) T.mut_slice_type else T.const_slice_type, }, child_type); + return rvalue(mod, scope, rl, result); } var kw_args: std.meta.fieldInfo(zir.Inst.PtrType, .kw_args).field_type = .{}; - kw_args.size = size; + kw_args.size = ptr_info.size; kw_args.@"allowzero" = ptr_info.allowzero_token != null; - if (ptr_info.align_info) |some| { - kw_args.@"align" = try expr(mod, scope, .none, some.node); - if (some.bit_range) |bit_range| { - kw_args.align_bit_start = try expr(mod, scope, .none, bit_range.start); - kw_args.align_bit_end = try expr(mod, scope, .none, bit_range.end); + if (ptr_info.ast.align_node != 0) { + kw_args.@"align" = try expr(mod, scope, .none, ptr_info.ast.align_node); + if (ptr_info.ast.bit_range_start != 0) { + kw_args.align_bit_start = try expr(mod, scope, .none, ptr_info.ast.bit_range_start); + kw_args.align_bit_end = try expr(mod, scope, .none, ptr_info.ast.bit_range_end); } } kw_args.mutable = ptr_info.const_token == null; kw_args.@"volatile" = ptr_info.volatile_token != null; - if (ptr_info.sentinel) |some| { - kw_args.sentinel = try expr(mod, scope, .none, some); + const child_type = try typeExpr(mod, scope, ptr_info.ast.child_type); + if (ptr_info.ast.sentinel != 0) { + kw_args.sentinel = try expr(mod, scope, .{ .ty = child_type }, ptr_info.ast.sentinel); } - - const child_type = try typeExpr(mod, scope, rhs); - if (kw_args.sentinel) |some| { - kw_args.sentinel = try addZIRBinOp(mod, scope, some.src, .as, child_type, some); - } - - return addZIRInst(mod, scope, src, zir.Inst.PtrType, .{ .child_type = child_type }, kw_args); + const result = try addZIRInst(mod, scope, src, zir.Inst.PtrType, .{ .child_type = child_type }, kw_args); + return rvalue(mod, scope, rl, result); } fn arrayType(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !*zir.Inst { const tree = scope.tree(); const main_tokens = tree.nodes.items(.main_token); const node_datas = tree.nodes.items(.data); + const token_starts = tree.tokens.items(.start); + const src = token_starts[main_tokens[node]]; const usize_type = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.type), @@ -1246,6 +1395,9 @@ fn arrayType(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) ! fn arrayTypeSentinel(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !*zir.Inst { const tree = scope.tree(); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); + const node_datas = tree.nodes.items(.data); + const len_node = node_datas[node].lhs; const extra = tree.extraData(node_datas[node].rhs, ast.Node.ArrayTypeSentinel); const src = token_starts[main_tokens[node]]; @@ -1274,6 +1426,8 @@ fn containerField( node: *ast.Node.ContainerField, ) InnerError!*zir.Inst { const tree = scope.tree(); + const token_starts = tree.tokens.items(.start); + const src = token_starts[tree.firstToken(node)]; const name = try mod.identifierTokenString(scope, node.name_token); @@ -1305,9 +1459,18 @@ fn containerField( }); } -fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.ContainerDecl) InnerError!*zir.Inst { +fn containerDecl( + mod: *Module, + scope: *Scope, + rl: ResultLoc, + container_decl: ast.full.ContainerDecl, +) InnerError!*zir.Inst { const tree = scope.tree(); - const src = token_starts[node.kind_token]; + const token_starts = tree.tokens.items(.start); + const node_tags = tree.nodes.items(.tag); + const token_tags = tree.tokens.items(.tag); + + const src = token_starts[container_decl.ast.main_token]; var gen_scope: Scope.GenZIR = .{ .parent = scope, @@ -1321,9 +1484,12 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con var fields = std.ArrayList(*zir.Inst).init(mod.gpa); defer fields.deinit(); - for (node.fieldsAndDecls()) |fd| { - if (fd.castTag(.ContainerField)) |f| { - try fields.append(try containerField(mod, &gen_scope.base, f)); + for (container_decl.ast.members) |member| { + switch (node_tags[member]) { + .container_field_init, .container_field_align, .container_field => { + try fields.append(try containerField(mod, &gen_scope.base, member)); + }, + else => continue, } } @@ -1332,19 +1498,22 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con const arena = &decl_arena.allocator; var layout: std.builtin.TypeInfo.ContainerLayout = .Auto; - if (node.layout_token) |some| switch (tree.token_ids[some]) { + if (container_decl.layout_token) |some| switch (token_tags[some]) { .keyword_extern => layout = .Extern, .keyword_packed => layout = .Packed, else => unreachable, }; - const container_type = switch (tree.token_ids[node.kind_token]) { + // TODO this implementation is incorrect. The types must be created in semantic + // analysis, not astgen, because the same ZIR is re-used for multiple inline function calls, + // comptime function calls, and generic function instantiations, and these + // must result in different instances of container types. + const container_type = switch (token_tags[container_decl.ast.main_token]) { .keyword_enum => blk: { - const tag_type: ?*zir.Inst = switch (node.init_arg_expr) { - .Type => |t| try typeExpr(mod, &gen_scope.base, t), - .None => null, - .Enum => unreachable, - }; + const tag_type: ?*zir.Inst = if (container_decl.ast.arg != 0) + try typeExpr(mod, &gen_scope.base, container_decl.ast.arg) + else + null; const inst = try addZIRInst(mod, &gen_scope.base, src, zir.Inst.EnumType, .{ .fields = try arena.dupe(*zir.Inst, fields.items), }, .{ @@ -1367,7 +1536,7 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con break :blk Type.initPayload(&enum_type.base); }, .keyword_struct => blk: { - assert(node.init_arg_expr == .None); + assert(container_decl.ast.arg == 0); const inst = try addZIRInst(mod, &gen_scope.base, src, zir.Inst.StructType, .{ .fields = try arena.dupe(*zir.Inst, fields.items), }, .{ @@ -1389,21 +1558,16 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con break :blk Type.initPayload(&struct_type.base); }, .keyword_union => blk: { - const init_inst = switch (node.init_arg_expr) { - .Enum => |e| if (e) |t| try typeExpr(mod, &gen_scope.base, t) else null, - .None => null, - .Type => |t| try typeExpr(mod, &gen_scope.base, t), - }; - const init_kind: zir.Inst.UnionType.InitKind = switch (node.init_arg_expr) { - .Enum => .enum_type, - .None => .none, - .Type => .tag_type, - }; + const init_inst: ?*zir.Inst = if (container_decl.ast.arg != 0) + try typeExpr(mod, &gen_scope.base, container_decl.ast.arg) + else + null; + const has_enum_token = container_decl.ast.enum_token != null; const inst = try addZIRInst(mod, &gen_scope.base, src, zir.Inst.UnionType, .{ .fields = try arena.dupe(*zir.Inst, fields.items), }, .{ .layout = layout, - .init_kind = init_kind, + .has_enum_token = has_enum_token, .init_inst = init_inst, }); const union_type = try arena.create(Type.Payload.Union); @@ -1437,7 +1601,7 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con else => unreachable, }; const val = try Value.Tag.ty.create(arena, container_type); - const decl = try mod.createContainerDecl(scope, node.kind_token, &decl_arena, .{ + const decl = try mod.createContainerDecl(scope, container_decl.ast.main_token, &decl_arena, .{ .ty = Type.initTag(.type), .val = val, }); @@ -1459,6 +1623,7 @@ fn errorSetDecl( const tree = scope.tree(); const main_tokens = tree.nodes.items(.main_token); const token_tags = tree.tokens.items(.tag); + const token_starts = tree.tokens.items(.start); // Count how many fields there are. const error_token = main_tokens[node]; @@ -1500,15 +1665,17 @@ fn orelseCatchExpr( mod: *Module, scope: *Scope, rl: ResultLoc, - lhs: *ast.Node, + lhs: ast.Node.Index, op_token: ast.TokenIndex, cond_op: zir.Inst.Tag, unwrap_op: zir.Inst.Tag, unwrap_code_op: zir.Inst.Tag, - rhs: *ast.Node, - payload_node: ?*ast.Node, + rhs: ast.Node.Index, + payload_token: ?ast.TokenIndex, ) InnerError!*zir.Inst { const tree = scope.tree(); + const token_starts = tree.tokens.items(.start); + const src = token_starts[op_token]; var block_scope: Scope.GenZIR = .{ @@ -1547,12 +1714,11 @@ fn orelseCatchExpr( var err_val_scope: Scope.LocalVal = undefined; const then_sub_scope = blk: { - const payload = payload_node orelse break :blk &then_scope.base; - - const err_name = tree.tokenSlice(payload.castTag(.Payload).?.error_symbol.firstToken()); - if (mem.eql(u8, err_name, "_")) - break :blk &then_scope.base; - + const payload = payload_token orelse break :blk &then_scope.base; + if (mem.eql(u8, tree.tokenSlice(payload), "_")) { + return mod.failTok(&then_scope.base, payload, "discard of error capture; omit it instead", .{}); + } + const err_name = try mod.identifierTokenString(scope, payload); err_val_scope = .{ .parent = &then_scope.base, .gen_zir = &then_scope, @@ -1685,18 +1851,20 @@ pub fn field(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) I const tree = scope.tree(); const token_starts = tree.tokens.items(.start); const main_tokens = tree.nodes.items(.main_token); + const node_datas = tree.nodes.items(.data); + const dot_token = main_tokens[node]; const src = token_starts[dot_token]; const field_ident = dot_token + 1; const field_name = try mod.identifierTokenString(scope, field_ident); if (rl == .ref) { return addZirInstTag(mod, scope, src, .field_ptr, .{ - .object = try expr(mod, scope, .ref, node.lhs), + .object = try expr(mod, scope, .ref, node_datas[node].lhs), .field_name = field_name, }); } else { return rvalue(mod, scope, rl, try addZirInstTag(mod, scope, src, .field_val, .{ - .object = try expr(mod, scope, .none, node.lhs), + .object = try expr(mod, scope, .none, node_datas[node].lhs), .field_name = field_name, })); } @@ -1711,6 +1879,8 @@ fn arrayAccess( const tree = scope.tree(); const main_tokens = tree.nodes.items(.main_token); const token_starts = tree.tokens.items(.start); + const node_datas = tree.nodes.items(.data); + const src = token_starts[main_tokens[node]]; const usize_type = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.type), @@ -1737,6 +1907,7 @@ fn sliceExpr( ) InnerError!*zir.Inst { const tree = scope.tree(); const token_starts = tree.tokens.items(.start); + const src = token_starts[slice.ast.lbracket]; const usize_type = try addZIRInstConst(mod, scope, src, .{ @@ -1786,6 +1957,7 @@ fn simpleBinOp( const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); const lhs = try expr(mod, scope, .none, node_datas[infix_node].lhs); const rhs = try expr(mod, scope, .none, node_datas[infix_node].rhs); @@ -1804,6 +1976,7 @@ fn boolBinOp( const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); const src = token_starts[main_tokens[infix_node]]; const bool_type = try addZIRInstConst(mod, scope, src, .{ @@ -1899,13 +2072,14 @@ fn ifExpr( defer block_scope.instructions.deinit(mod.gpa); const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); + const if_src = token_starts[if_full.ast.if_token]; const cond = c: { // TODO https://github.com/ziglang/zig/issues/7929 - if (if_full.ast.error_token) |error_token| { + if (if_full.error_token) |error_token| { return mod.failTok(scope, error_token, "TODO implement if error union", .{}); } else if (if_full.payload_token) |payload_token| { return mod.failTok(scope, payload_token, "TODO implement if optional", .{}); @@ -1966,7 +2140,7 @@ fn ifExpr( }; } else .{ - .src = token_starts[tree.lastToken(if_full.then_expr)], + .src = token_starts[tree.lastToken(if_full.ast.then_expr)], .result = null, }; @@ -2042,8 +2216,9 @@ fn whileExpr( defer continue_scope.instructions.deinit(mod.gpa); const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); + const while_src = token_starts[while_full.ast.while_token]; const void_type = try addZIRInstConst(mod, scope, while_src, .{ .ty = Type.initTag(.type), @@ -2051,16 +2226,16 @@ fn whileExpr( }); const cond = c: { // TODO https://github.com/ziglang/zig/issues/7929 - if (while_full.ast.error_token) |error_token| { + if (while_full.error_token) |error_token| { return mod.failTok(scope, error_token, "TODO implement while error union", .{}); } else if (while_full.payload_token) |payload_token| { return mod.failTok(scope, payload_token, "TODO implement while optional", .{}); } else { - const bool_type = try addZIRInstConst(mod, &block_scope.base, while_src, .{ + const bool_type = try addZIRInstConst(mod, &continue_scope.base, while_src, .{ .ty = Type.initTag(.type), .val = Value.initTag(.bool_type), }); - break :c try expr(mod, &block_scope.base, .{ .ty = bool_type }, while_full.ast.cond_expr); + break :c try expr(mod, &continue_scope.base, .{ .ty = bool_type }, while_full.ast.cond_expr); } }; @@ -2128,7 +2303,7 @@ fn whileExpr( }; defer else_scope.instructions.deinit(mod.gpa); - const else_node = if_full.ast.else_expr; + const else_node = while_full.ast.else_expr; const else_info: struct { src: usize, result: ?*zir.Inst } = if (else_node != 0) blk: { loop_scope.break_count += 1; const sub_scope = &else_scope.base; @@ -2138,7 +2313,7 @@ fn whileExpr( }; } else .{ - .src = token_starts[tree.lastToken(then_node)], + .src = token_starts[tree.lastToken(while_full.ast.then_expr)], .result = null, }; @@ -2181,8 +2356,10 @@ fn forExpr( // Set up variables and constants. const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); + const token_tags = tree.tokens.items(.tag); + const for_src = token_starts[for_full.ast.while_token]; const index_ptr = blk: { const usize_type = try addZIRInstConst(mod, scope, for_src, .{ @@ -2299,7 +2476,7 @@ fn forExpr( else break :blk &then_scope.base; if (mem.eql(u8, tree.tokenSlice(index_token), "_")) { - return mod.failTok(&then_scope.base, index_token, "discard of index capture not allowed; omit it instead", .{}); + return mod.failTok(&then_scope.base, index_token, "discard of index capture; omit it instead", .{}); } const index_name = try mod.identifierTokenString(&then_scope.base, index_token); index_scope = .{ @@ -2334,7 +2511,7 @@ fn forExpr( }; } else .{ - .src = token_starts[tree.lastToken(then_node)], + .src = token_starts[tree.lastToken(for_full.ast.then_expr)], .result = null, }; @@ -2386,10 +2563,12 @@ fn switchExpr( const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); const token_tags = tree.tokens.items(.tag); + const token_starts = tree.tokens.items(.start); + const node_tags = tree.nodes.items(.tag); const switch_token = main_tokens[switch_node]; - const target_node = datas[switch_node].lhs; - const extra = tree.extraData(datas[switch_node].rhs, ast.switch_node.SubRange); + const target_node = node_datas[switch_node].lhs; + const extra = tree.extraData(node_datas[switch_node].rhs, ast.Node.SubRange); const case_nodes = tree.extra_data[extra.start..extra.end]; const switch_src = token_starts[switch_token]; @@ -2552,7 +2731,7 @@ fn switchExpr( defer else_scope.instructions.deinit(mod.gpa); // Now generate all but the special cases. - var special_case: ?ast.Node.Index = null; + var special_case: ?ast.full.SwitchCase = null; var items_index: usize = 0; var case_index: usize = 0; for (case_nodes) |case_node| { @@ -2582,7 +2761,7 @@ fn switchExpr( { const item = items.items[items_index]; items_index += 1; - try switchCaseExpr(mod, &case_scope.base, block_scope.break_result_loc, block, case, target, target_ptr); + try switchCaseExpr(mod, &case_scope.base, block_scope.break_result_loc, block, case, target); cases[case_index] = .{ .item = item, @@ -2638,7 +2817,7 @@ fn switchExpr( // reset cond_scope for then_body case_scope.instructions.items.len = 0; - try switchCaseExpr(mod, &case_scope.base, block_scope.break_result_loc, block, case, target, target_ptr); + try switchCaseExpr(mod, &case_scope.base, block_scope.break_result_loc, block, case, target); condbr.positionals.then_body = .{ .instructions = try scope.arena().dupe(*zir.Inst, case_scope.instructions.items), }; @@ -2655,7 +2834,7 @@ fn switchExpr( // Finally generate else block or a break. if (special_case) |case| { - try switchCaseExpr(mod, &else_scope.base, block_scope.break_result_loc, block, case, target, target_ptr); + try switchCaseExpr(mod, &else_scope.base, block_scope.break_result_loc, block, case, target); } else { // Not handling all possible cases is a compile error. _ = try addZIRNoOp(mod, &else_scope.base, switch_src, .unreachable_unsafe); @@ -2674,11 +2853,13 @@ fn switchCaseExpr( block: *zir.Inst.Block, case: ast.full.SwitchCase, target: *zir.Inst, - target_ptr: ?*zir.Inst, ) !void { const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); + const token_tags = tree.tokens.items(.tag); + const case_src = token_starts[case.ast.arrow_token]; const sub_scope = blk: { const payload_token = case.payload_token orelse break :blk scope; @@ -2690,11 +2871,11 @@ fn switchCaseExpr( const value_name = tree.tokenSlice(ident); if (mem.eql(u8, value_name, "_")) { if (is_ptr) { - return mod.failTok(scope, payload.ptr_token.?, "pointer modifier invalid on discard", .{}); + return mod.failTok(scope, payload_token, "pointer modifier invalid on discard", .{}); } break :blk scope; } - return mod.failNode(scope, payload.value_symbol, "TODO implement switch value payload", .{}); + return mod.failTok(scope, ident, "TODO implement switch value payload", .{}); }; const case_body = try expr(mod, sub_scope, rl, case.ast.target_expr); @@ -2710,10 +2891,12 @@ fn ret(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.Inst { const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); + const src = token_starts[main_tokens[node]]; const rhs_node = node_datas[node].lhs; if (rhs_node != 0) { - if (nodeMayNeedMemoryLocation(rhs_node, scope)) { + if (nodeMayNeedMemoryLocation(scope, rhs_node)) { const ret_ptr = try addZIRNoOp(mod, scope, src, .ret_ptr); const operand = try expr(mod, scope, .{ .ptr = ret_ptr }, rhs_node); return addZIRUnOp(mod, scope, src, .@"return", operand); @@ -2737,8 +2920,8 @@ fn identifier( defer tracy.end(); const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); const ident_token = main_tokens[ident]; const ident_name = try mod.identifierTokenString(scope, ident_token); @@ -2826,6 +3009,27 @@ fn identifier( return mod.failNode(scope, ident, "use of undeclared identifier '{s}'", .{ident_name}); } +fn parseStringLiteral(mod: *Module, scope: *Scope, token: ast.TokenIndex) ![]u8 { + const tree = scope.tree(); + const token_tags = tree.tokens.items(.tag); + const token_starts = tree.tokens.items(.start); + assert(token_tags[token] == .string_literal); + const unparsed = tree.tokenSlice(token); + const arena = scope.arena(); + var bad_index: usize = undefined; + const bytes = std.zig.parseStringLiteral(arena, unparsed, &bad_index) catch |err| switch (err) { + error.InvalidCharacter => { + const bad_byte = unparsed[bad_index]; + const src = token_starts[token]; + return mod.fail(scope, src + bad_index, "invalid string literal character: '{c}'", .{ + bad_byte, + }); + }, + else => |e| return e, + }; + return bytes; +} + fn stringLiteral( mod: *Module, scope: *Scope, @@ -2833,23 +3037,11 @@ fn stringLiteral( str_lit: ast.Node.Index, ) InnerError!*zir.Inst { const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); const str_lit_token = main_tokens[str_lit]; - const unparsed_bytes = tree.tokenSlice(str_lit_token); - const arena = scope.arena(); - - var bad_index: usize = undefined; - const bytes = std.zig.parseStringLiteral(arena, unparsed_bytes, &bad_index) catch |err| switch (err) { - error.InvalidCharacter => { - const bad_byte = unparsed_bytes[bad_index]; - const src = token_starts[str_lit_token]; - return mod.fail(scope, src + bad_index, "invalid string literal character: '{c}'\n", .{bad_byte}); - }, - else => |e| return e, - }; - + const bytes = try parseStringLiteral(mod, scope, str_lit_token); const src = token_starts[str_lit_token]; const str_inst = try addZIRInst(mod, scope, src, zir.Inst.Str, .{ .bytes = bytes }, .{}); return rvalue(mod, scope, rl, str_inst); @@ -2864,9 +3056,10 @@ fn multilineStringLiteral( const tree = scope.tree(); const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); - const start = node_datas[node].lhs; - const end = node_datas[node].rhs; + const start = node_datas[str_lit].lhs; + const end = node_datas[str_lit].rhs; // Count the number of bytes to allocate. const len: usize = len: { @@ -2905,9 +3098,10 @@ fn multilineStringLiteral( fn charLiteral(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !*zir.Inst { const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); const main_token = main_tokens[node]; + const token_starts = tree.tokens.items(.start); + const src = token_starts[main_token]; const slice = tree.tokenSlice(main_token); @@ -2934,6 +3128,7 @@ fn integerLiteral( const arena = scope.arena(); const tree = scope.tree(); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); const int_token = main_tokens[int_lit]; const prefixed_bytes = tree.tokenSlice(int_token); @@ -2972,6 +3167,8 @@ fn floatLiteral( const arena = scope.arena(); const tree = scope.tree(); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); + const main_token = main_tokens[float_lit]; const bytes = tree.tokenSlice(main_token); if (bytes.len > 2 and bytes[1] == 'x') { @@ -2988,17 +3185,18 @@ fn floatLiteral( return rvalue(mod, scope, rl, result); } -fn assembly(mod: *Module, scope: *Scope, rl: ResultLoc, full: ast.full.Asm) InnerError!*zir.Inst { +fn asmExpr(mod: *Module, scope: *Scope, rl: ResultLoc, full: ast.full.Asm) InnerError!*zir.Inst { const arena = scope.arena(); const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); + const node_datas = tree.nodes.items(.data); if (full.outputs.len != 0) { return mod.failTok(scope, full.ast.asm_token, "TODO implement asm with an output", .{}); } - const inputs = try arena.alloc(*zir.Inst, full.inputs.len); + const inputs = try arena.alloc([]const u8, full.inputs.len); const args = try arena.alloc(*zir.Inst, full.inputs.len); const src = token_starts[full.ast.asm_token]; @@ -3010,15 +3208,16 @@ fn assembly(mod: *Module, scope: *Scope, rl: ResultLoc, full: ast.full.Asm) Inne for (full.inputs) |input, i| { // TODO semantically analyze constraints - inputs[i] = try expr(mod, scope, str_type_rl, input.constraint); - args[i] = try expr(mod, scope, .none, input.expr); + const constraint_token = main_tokens[input] + 2; + inputs[i] = try parseStringLiteral(mod, scope, constraint_token); + args[i] = try expr(mod, scope, .none, node_datas[input].lhs); } const return_type = try addZIRInstConst(mod, scope, src, .{ .ty = Type.initTag(.type), .val = Value.initTag(.void_type), }); - const asm_inst = try addZIRInst(mod, scope, src, zir.Inst.@"asm", .{ + const asm_inst = try addZIRInst(mod, scope, src, zir.Inst.Asm, .{ .asm_source = try expr(mod, scope, str_type_rl, full.ast.template), .return_type = return_type, }, .{ @@ -3185,8 +3384,9 @@ fn builtinCall( params: []const ast.Node.Index, ) InnerError!*zir.Inst { const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); + const builtin_token = main_tokens[call]; const builtin_name = tree.tokenSlice(builtin_token); @@ -3200,11 +3400,13 @@ fn builtinCall( builtin_name, }); }; - if (info.param_count != params.len) { - const s = if (params.len == 1) "" else "s"; - return mod.failTok(scope, builtin_token, "expected {d} parameter{s}, found {d}", .{ - expected, s, found, - }); + if (info.param_count) |expected| { + if (expected != params.len) { + const s = if (expected == 1) "" else "s"; + return mod.failTok(scope, builtin_token, "expected {d} parameter{s}, found {d}", .{ + expected, s, params.len, + }); + } } const src = token_starts[builtin_token]; @@ -3237,7 +3439,7 @@ fn builtinCall( }, .compile_error => { const target = try expr(mod, scope, .none, params[0]); - const result = addZIRUnOp(mod, scope, src, .compile_error, target); + const result = try addZIRUnOp(mod, scope, src, .compile_error, target); return rvalue(mod, scope, rl, result); }, .set_eval_branch_quota => { @@ -3386,8 +3588,9 @@ fn callExpr( } const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); + const token_starts = tree.tokens.items(.start); + const lhs = try expr(mod, scope, .none, call.ast.fn_expr); const args = try scope.getGenZIR().arena.alloc(*zir.Inst, call.ast.params.len); @@ -3446,23 +3649,26 @@ fn getSimplePrimitiveValue(name: []const u8) ?TypedValue { return null; } -fn nodeMayNeedMemoryLocation(start_node: *ast.Node, scope: *Scope) bool { +fn nodeMayNeedMemoryLocation(scope: *Scope, start_node: ast.Node.Index) bool { + const tree = scope.tree(); + const node_tags = tree.nodes.items(.tag); + const node_datas = tree.nodes.items(.data); + const main_tokens = tree.nodes.items(.main_token); + const token_tags = tree.tokens.items(.tag); + var node = start_node; while (true) { - switch (node.tag) { - .Root, + switch (node_tags[node]) { + .root, .@"usingnamespace", .test_decl, - .doc_comment, .switch_case, - .switch_else, - .Else, - .Payload, - .PointerPayload, - .PointerIndexPayload, - .ContainerField, - .ErrorTag, - .FieldInitializer, + .switch_case_one, + .container_field_init, + .container_field_align, + .container_field, + .asm_output, + .asm_input, => unreachable, .@"return", @@ -3470,8 +3676,12 @@ fn nodeMayNeedMemoryLocation(start_node: *ast.Node, scope: *Scope) bool { .@"continue", .bit_not, .bool_not, - .var_decl, + .global_var_decl, + .local_var_decl, + .simple_var_decl, + .aligned_var_decl, .@"defer", + .@"errdefer", .address_of, .optional_type, .negation, @@ -3479,27 +3689,46 @@ fn nodeMayNeedMemoryLocation(start_node: *ast.Node, scope: *Scope) bool { .@"resume", .array_type, .array_type_sentinel, - .PtrType, - .slice_type, + .ptr_type_aligned, + .ptr_type_sentinel, + .ptr_type, + .ptr_type_bit_range, .@"suspend", .@"anytype", - .error_type, - .FnProto, + .fn_proto_simple, + .fn_proto_multi, + .fn_proto_one, + .fn_proto, + .fn_decl, .anyframe_type, + .anyframe_literal, .integer_literal, .float_literal, .enum_literal, .string_literal, - .MultilineStringLiteral, + .multiline_string_literal, .char_literal, - .bool_literal, + .true_literal, + .false_literal, .null_literal, .undefined_literal, - .@"unreachable", + .unreachable_literal, .identifier, .error_set_decl, - .ContainerDecl, + .container_decl, + .container_decl_comma, + .container_decl_two, + .container_decl_two_comma, + .container_decl_arg, + .container_decl_arg_comma, + .tagged_union, + .tagged_union_comma, + .tagged_union_two, + .tagged_union_two_comma, + .tagged_union_enum_tag, + .tagged_union_enum_tag_comma, .@"asm", + .asm_simple, .add, .add_wrap, .array_cat, @@ -3537,14 +3766,16 @@ fn nodeMayNeedMemoryLocation(start_node: *ast.Node, scope: *Scope) bool { .mod, .mul, .mul_wrap, - .range, - .period, + .switch_range, + .field_access, .sub, .sub_wrap, .slice, + .slice_open, + .slice_sentinel, .deref, .array_access, - .block, + .error_value, .while_simple, // This variant cannot have an else expression. .while_cont, // This variant cannot have an else expression. .for_simple, // This variant cannot have an else expression. @@ -3558,18 +3789,30 @@ fn nodeMayNeedMemoryLocation(start_node: *ast.Node, scope: *Scope) bool { .@"comptime", .@"nosuspend", .unwrap_optional, - => node = datas[node].lhs, + => node = node_datas[node].lhs, // Forward the question to the RHS sub-expression. .@"catch", .@"orelse", - => node = datas[node].rhs, + => node = node_datas[node].rhs, // True because these are exactly the expressions we need memory locations for. - .ArrayInitializer, - .ArrayInitializerDot, - .StructInitializer, - .StructInitializerDot, + .array_init_one, + .array_init_one_comma, + .array_init_dot_two, + .array_init_dot_two_comma, + .array_init_dot, + .array_init_dot_comma, + .array_init, + .array_init_comma, + .struct_init_one, + .struct_init_one_comma, + .struct_init_dot_two, + .struct_init_dot_two_comma, + .struct_init_dot, + .struct_init_dot_comma, + .struct_init, + .struct_init_comma, => return true, // True because depending on comptime conditions, sub-expressions @@ -3578,6 +3821,7 @@ fn nodeMayNeedMemoryLocation(start_node: *ast.Node, scope: *Scope) bool { .@"if", // This variant always has an else expression. .@"for", // This variant always has an else expression. .@"switch", + .switch_comma, .call_one, .call_one_comma, .async_call_one, @@ -3588,10 +3832,10 @@ fn nodeMayNeedMemoryLocation(start_node: *ast.Node, scope: *Scope) bool { .async_call_comma, => return true, - block_two, - block_two_semicolon, - block, - block_semicolon, + .block_two, + .block_two_semicolon, + .block, + .block_semicolon, => { const lbrace = main_tokens[node]; if (token_tags[lbrace - 1] == .colon) { @@ -3603,7 +3847,11 @@ fn nodeMayNeedMemoryLocation(start_node: *ast.Node, scope: *Scope) bool { } }, - .builtin_call => { + .builtin_call, + .builtin_call_comma, + .builtin_call_two, + .builtin_call_two_comma, + => { const builtin_token = main_tokens[node]; const builtin_name = tree.tokenSlice(builtin_token); // If the builtin is an invalid name, we don't cause an error here; instead @@ -3661,7 +3909,6 @@ fn rvalueVoid( result: void, ) InnerError!*zir.Inst { const tree = scope.tree(); - const node_datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); const src = tree.tokens.items(.start)[tree.firstToken(node)]; const void_inst = try addZIRInstConst(mod, scope, src, .{ @@ -3765,7 +4012,7 @@ pub fn addZirInstT( src: usize, comptime T: type, tag: zir.Inst.Tag, - positionals: std.meta.fieldInfo(tag.Type(), .positionals).field_type, + positionals: std.meta.fieldInfo(T, .positionals).field_type, ) !*T { const gen_zir = scope.getGenZIR(); try gen_zir.instructions.ensureCapacity(mod.gpa, gen_zir.instructions.items.len + 1); diff --git a/src/zir.zig b/src/zir.zig index 9a3c080760..ee0fd3dc3d 100644 --- a/src/zir.zig +++ b/src/zir.zig @@ -863,8 +863,8 @@ pub const Inst = struct { kw_args: struct { @"volatile": bool = false, output: ?*Inst = null, - inputs: []*Inst = &[0]*Inst{}, - clobbers: []*Inst = &[0]*Inst{}, + inputs: []const []const u8 = &.{}, + clobbers: []const []const u8 = &.{}, args: []*Inst = &[0]*Inst{}, }, }; @@ -1192,16 +1192,9 @@ pub const Inst = struct { }, kw_args: struct { init_inst: ?*Inst = null, - init_kind: InitKind = .none, + has_enum_token: bool, layout: std.builtin.TypeInfo.ContainerLayout = .Auto, }, - - // TODO error: values of type '(enum literal)' must be comptime known - pub const InitKind = enum { - enum_type, - tag_type, - none, - }; }; pub const SwitchBr = struct { @@ -1413,6 +1406,7 @@ const Writer = struct { } switch (@TypeOf(param)) { *Inst => return self.writeInstParamToStream(stream, param), + ?*Inst => return self.writeInstParamToStream(stream, param.?), []*Inst => { try stream.writeByte('['); for (param) |inst, i| { @@ -1480,7 +1474,7 @@ const Writer = struct { const name = self.loop_table.get(param).?; return stream.print("\"{}\"", .{std.zig.fmtEscapes(name)}); }, - [][]const u8 => { + [][]const u8, []const []const u8 => { try stream.writeByte('['); for (param) |str, i| { if (i != 0) { diff --git a/src/zir_sema.zig b/src/zir_sema.zig index 83d7113c9c..b20e78d448 100644 --- a/src/zir_sema.zig +++ b/src/zir_sema.zig @@ -2023,19 +2023,21 @@ fn zirDeref(mod: *Module, scope: *Scope, deref: *zir.Inst.UnOp) InnerError!*Inst fn zirAsm(mod: *Module, scope: *Scope, assembly: *zir.Inst.Asm) InnerError!*Inst { const tracy = trace(@src()); defer tracy.end(); + const return_type = try resolveType(mod, scope, assembly.positionals.return_type); const asm_source = try resolveConstString(mod, scope, assembly.positionals.asm_source); const output = if (assembly.kw_args.output) |o| try resolveConstString(mod, scope, o) else null; - const inputs = try scope.arena().alloc([]const u8, assembly.kw_args.inputs.len); - const clobbers = try scope.arena().alloc([]const u8, assembly.kw_args.clobbers.len); - const args = try scope.arena().alloc(*Inst, assembly.kw_args.args.len); + const arena = scope.arena(); + const inputs = try arena.alloc([]const u8, assembly.kw_args.inputs.len); + const clobbers = try arena.alloc([]const u8, assembly.kw_args.clobbers.len); + const args = try arena.alloc(*Inst, assembly.kw_args.args.len); for (inputs) |*elem, i| { - elem.* = try resolveConstString(mod, scope, assembly.kw_args.inputs[i]); + elem.* = try arena.dupe(u8, assembly.kw_args.inputs[i]); } for (clobbers) |*elem, i| { - elem.* = try resolveConstString(mod, scope, assembly.kw_args.clobbers[i]); + elem.* = try arena.dupe(u8, assembly.kw_args.clobbers[i]); } for (args) |*elem, i| { const arg = try resolveInst(mod, scope, assembly.kw_args.args[i]); From 914540ddb5867efe5c1823c281641897bc4cd22a Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Thu, 18 Feb 2021 23:54:47 -0700 Subject: [PATCH 105/173] astgen: fix remaining compile errors Now it builds and what remains in this branch is: * fix the stage2 compiler regressions from this branch * finish the rest of zig fmt test cases, get them passing * Merge in Vexu's translate-c AST branch & fix translate-c regressions --- src/astgen.zig | 41 +++++++++++++++++++++++------------------ src/link.zig | 4 ++-- src/link/Coff.zig | 6 +++--- src/link/Elf.zig | 6 +++--- src/link/MachO.zig | 6 +++--- src/link/Wasm.zig | 6 +++--- 6 files changed, 37 insertions(+), 32 deletions(-) diff --git a/src/astgen.zig b/src/astgen.zig index 3e5d63796f..468156e7a9 100644 --- a/src/astgen.zig +++ b/src/astgen.zig @@ -381,7 +381,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In return addZIRNoOp(mod, scope, src, .unreachable_safe); }, .@"return" => return ret(mod, scope, node), - .field_access => return field(mod, scope, rl, node), + .field_access => return fieldAccess(mod, scope, rl, node), .float_literal => return floatLiteral(mod, scope, rl, node), .if_simple => return ifExpr(mod, scope, rl, tree.ifSimple(node)), @@ -1423,17 +1423,17 @@ fn arrayTypeSentinel(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node. fn containerField( mod: *Module, scope: *Scope, - node: *ast.Node.ContainerField, + field: ast.full.ContainerField, ) InnerError!*zir.Inst { const tree = scope.tree(); const token_starts = tree.tokens.items(.start); - const src = token_starts[tree.firstToken(node)]; - const name = try mod.identifierTokenString(scope, node.name_token); + const src = token_starts[field.ast.name_token]; + const name = try mod.identifierTokenString(scope, field.ast.name_token); - if (node.comptime_token == null and node.value_expr == null and node.align_expr == null) { - if (node.type_expr) |some| { - const ty = try typeExpr(mod, scope, some); + if (field.comptime_token == null and field.ast.value_expr == 0 and field.ast.align_expr == 0) { + if (field.ast.type_expr != 0) { + const ty = try typeExpr(mod, scope, field.ast.type_expr); return addZIRInst(mod, scope, src, zir.Inst.ContainerFieldTyped, .{ .bytes = name, .ty = ty, @@ -1445,9 +1445,11 @@ fn containerField( } } - const ty = if (node.type_expr) |some| try typeExpr(mod, scope, some) else null; - const alignment = if (node.align_expr) |some| try expr(mod, scope, .none, some) else null; - const init = if (node.value_expr) |some| try expr(mod, scope, .none, some) else null; + const ty = if (field.ast.type_expr != 0) try typeExpr(mod, scope, field.ast.type_expr) else null; + // TODO result location should be alignment type + const alignment = if (field.ast.align_expr != 0) try expr(mod, scope, .none, field.ast.align_expr) else null; + // TODO result location should be the field type + const init = if (field.ast.value_expr != 0) try expr(mod, scope, .none, field.ast.value_expr) else null; return addZIRInst(mod, scope, src, zir.Inst.ContainerField, .{ .bytes = name, @@ -1455,7 +1457,7 @@ fn containerField( .ty = ty, .init = init, .alignment = alignment, - .is_comptime = node.comptime_token != null, + .is_comptime = field.comptime_token != null, }); } @@ -1485,12 +1487,15 @@ fn containerDecl( defer fields.deinit(); for (container_decl.ast.members) |member| { - switch (node_tags[member]) { - .container_field_init, .container_field_align, .container_field => { - try fields.append(try containerField(mod, &gen_scope.base, member)); - }, + // TODO just handle these cases differently since they end up with different ZIR + // instructions anyway. It will be simpler & have fewer branches. + const field = switch (node_tags[member]) { + .container_field_init => try containerField(mod, &gen_scope.base, tree.containerFieldInit(member)), + .container_field_align => try containerField(mod, &gen_scope.base, tree.containerFieldAlign(member)), + .container_field => try containerField(mod, &gen_scope.base, tree.containerField(member)), else => continue, - } + }; + try fields.append(field); } var decl_arena = std.heap.ArenaAllocator.init(mod.gpa); @@ -1847,7 +1852,7 @@ fn tokenIdentEql(mod: *Module, scope: *Scope, token1: ast.TokenIndex, token2: as return mem.eql(u8, ident_name_1, ident_name_2); } -pub fn field(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!*zir.Inst { +pub fn fieldAccess(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!*zir.Inst { const tree = scope.tree(); const token_starts = tree.tokens.items(.start); const main_tokens = tree.nodes.items(.main_token); @@ -3269,7 +3274,7 @@ fn asRlPtr( rl: ResultLoc, src: usize, result_ptr: *zir.Inst, - operand_node: *ast.Node, + operand_node: ast.Node.Index, dest_type: *zir.Inst, ) InnerError!*zir.Inst { // Detect whether this expr() call goes into rvalue() to store the result into the diff --git a/src/link.zig b/src/link.zig index 3d0a54f416..0a4cde0284 100644 --- a/src/link.zig +++ b/src/link.zig @@ -550,11 +550,11 @@ pub const File = struct { id_symlink_basename, &prev_digest_buf, ) catch |err| b: { - log.debug("archive new_digest={} readFile error: {s}", .{ digest, @errorName(err) }); + log.debug("archive new_digest={x} readFile error: {s}", .{ digest, @errorName(err) }); break :b prev_digest_buf[0..0]; }; if (mem.eql(u8, prev_digest, &digest)) { - log.debug("archive digest={} match - skipping invocation", .{digest}); + log.debug("archive digest={x} match - skipping invocation", .{digest}); base.lock = man.toOwnedLock(); return; } diff --git a/src/link/Coff.zig b/src/link/Coff.zig index 0dfa3d54a1..2eee19b4f6 100644 --- a/src/link/Coff.zig +++ b/src/link/Coff.zig @@ -892,17 +892,17 @@ fn linkWithLLD(self: *Coff, comp: *Compilation) !void { id_symlink_basename, &prev_digest_buf, ) catch |err| blk: { - log.debug("COFF LLD new_digest={} error: {s}", .{ digest, @errorName(err) }); + log.debug("COFF LLD new_digest={x} error: {s}", .{ digest, @errorName(err) }); // Handle this as a cache miss. break :blk prev_digest_buf[0..0]; }; if (mem.eql(u8, prev_digest, &digest)) { - log.debug("COFF LLD digest={} match - skipping invocation", .{digest}); + log.debug("COFF LLD digest={x} match - skipping invocation", .{digest}); // Hot diggity dog! The output binary is already there. self.base.lock = man.toOwnedLock(); return; } - log.debug("COFF LLD prev_digest={} new_digest={}", .{ prev_digest, digest }); + log.debug("COFF LLD prev_digest={x} new_digest={x}", .{ prev_digest, digest }); // We are about to change the output file to be different, so we invalidate the build hash now. directory.handle.deleteFile(id_symlink_basename) catch |err| switch (err) { diff --git a/src/link/Elf.zig b/src/link/Elf.zig index f92c585cd5..1a015ba0c2 100644 --- a/src/link/Elf.zig +++ b/src/link/Elf.zig @@ -1365,17 +1365,17 @@ fn linkWithLLD(self: *Elf, comp: *Compilation) !void { id_symlink_basename, &prev_digest_buf, ) catch |err| blk: { - log.debug("ELF LLD new_digest={} error: {s}", .{ digest, @errorName(err) }); + log.debug("ELF LLD new_digest={x} error: {s}", .{ digest, @errorName(err) }); // Handle this as a cache miss. break :blk prev_digest_buf[0..0]; }; if (mem.eql(u8, prev_digest, &digest)) { - log.debug("ELF LLD digest={} match - skipping invocation", .{digest}); + log.debug("ELF LLD digest={x} match - skipping invocation", .{digest}); // Hot diggity dog! The output binary is already there. self.base.lock = man.toOwnedLock(); return; } - log.debug("ELF LLD prev_digest={} new_digest={}", .{ prev_digest, digest }); + log.debug("ELF LLD prev_digest={x} new_digest={x}", .{ prev_digest, digest }); // We are about to change the output file to be different, so we invalidate the build hash now. directory.handle.deleteFile(id_symlink_basename) catch |err| switch (err) { diff --git a/src/link/MachO.zig b/src/link/MachO.zig index fd1c53cb67..139a9b8940 100644 --- a/src/link/MachO.zig +++ b/src/link/MachO.zig @@ -556,17 +556,17 @@ fn linkWithLLD(self: *MachO, comp: *Compilation) !void { id_symlink_basename, &prev_digest_buf, ) catch |err| blk: { - log.debug("MachO LLD new_digest={} error: {s}", .{ digest, @errorName(err) }); + log.debug("MachO LLD new_digest={x} error: {s}", .{ digest, @errorName(err) }); // Handle this as a cache miss. break :blk prev_digest_buf[0..0]; }; if (mem.eql(u8, prev_digest, &digest)) { - log.debug("MachO LLD digest={} match - skipping invocation", .{digest}); + log.debug("MachO LLD digest={x} match - skipping invocation", .{digest}); // Hot diggity dog! The output binary is already there. self.base.lock = man.toOwnedLock(); return; } - log.debug("MachO LLD prev_digest={} new_digest={}", .{ prev_digest, digest }); + log.debug("MachO LLD prev_digest={x} new_digest={x}", .{ prev_digest, digest }); // We are about to change the output file to be different, so we invalidate the build hash now. directory.handle.deleteFile(id_symlink_basename) catch |err| switch (err) { diff --git a/src/link/Wasm.zig b/src/link/Wasm.zig index 547ab2a012..cafd17cd2c 100644 --- a/src/link/Wasm.zig +++ b/src/link/Wasm.zig @@ -333,17 +333,17 @@ fn linkWithLLD(self: *Wasm, comp: *Compilation) !void { id_symlink_basename, &prev_digest_buf, ) catch |err| blk: { - log.debug("WASM LLD new_digest={} error: {s}", .{ digest, @errorName(err) }); + log.debug("WASM LLD new_digest={x} error: {s}", .{ digest, @errorName(err) }); // Handle this as a cache miss. break :blk prev_digest_buf[0..0]; }; if (mem.eql(u8, prev_digest, &digest)) { - log.debug("WASM LLD digest={} match - skipping invocation", .{digest}); + log.debug("WASM LLD digest={x} match - skipping invocation", .{digest}); // Hot diggity dog! The output binary is already there. self.base.lock = man.toOwnedLock(); return; } - log.debug("WASM LLD prev_digest={} new_digest={}", .{ prev_digest, digest }); + log.debug("WASM LLD prev_digest={x} new_digest={x}", .{ prev_digest, digest }); // We are about to change the output file to be different, so we invalidate the build hash now. directory.handle.deleteFile(id_symlink_basename) catch |err| switch (err) { From 974a1c55255560318f1d99003d353374b94b63e1 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Fri, 19 Feb 2021 12:50:42 +0200 Subject: [PATCH 106/173] translate-c: small fixes to avoid generating invalid code for macros --- src/translate_c/ast.zig | 19 +++++++++++++++---- test/run_translated_c.zig | 2 ++ 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index be331f39dc..34498e7315 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -677,7 +677,19 @@ pub fn render(gpa: *Allocator, nodes: []const Node) !std.zig.ast.Tree { .rhs = undefined, }, }); - const root_members = try renderNodes(&ctx, nodes); + + const root_members = blk: { + var result = std.ArrayList(NodeIndex).init(gpa); + defer result.deinit(); + + for (nodes) |node| { + const res = try renderNode(&ctx, node); + if (node.tag() == .warning) continue; + try result.append(res); + } + break :blk try ctx.listToSpan(result.items); + }; + ctx.nodes.items(.data)[0] = .{ .lhs = root_members.start, .rhs = root_members.end, @@ -762,7 +774,6 @@ fn renderNodes(c: *Context, nodes: []const Node) Allocator.Error!NodeSubRange { for (nodes) |node| { const res = try renderNode(c, node); if (node.tag() == .warning) continue; - if (c.nodes.items(.tag)[res] == .identifier) continue; // TODO remove try result.append(res); } @@ -1176,7 +1187,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { }; if (payload.is_const) _ = try c.addToken(.keyword_const, "const"); if (payload.is_volatile) _ = try c.addToken(.keyword_volatile, "volatile"); - const elem_type = try renderNode(c, payload.elem_type); + const elem_type = try renderNodeGrouped(c, payload.elem_type); return c.addNode(.{ .tag = .ptr_type_aligned, @@ -2447,7 +2458,7 @@ fn renderMacroFunc(c: *Context, node: Node) !NodeIndex { _ = try c.addToken(.r_paren, ")"); break :blk res; }; - const return_type_expr = try renderNode(c, payload.return_type); + const return_type_expr = try renderNodeGrouped(c, payload.return_type); const fn_proto = try blk: { if (params.items.len < 2) diff --git a/test/run_translated_c.zig b/test/run_translated_c.zig index 2db80c9c6a..0b72ed2926 100644 --- a/test/run_translated_c.zig +++ b/test/run_translated_c.zig @@ -6,6 +6,8 @@ pub fn addCases(cases: *tests.RunTranslatedCContext) void { cases.add("failed macros are only declared once", \\#define FOO = \\#define FOO = + \\#define PtrToPtr64(p) ((void *POINTER_64) p) + \\#define STRUC_ALIGNED_STACK_COPY(t,s) ((CONST t *)(s)) \\int main(void) {} , ""); From 6f6568b1fdb30fc8574b3047470510b798307717 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Fri, 19 Feb 2021 20:48:06 +0200 Subject: [PATCH 107/173] translate-c: correctly add semicolon to if statements --- src/translate_c/ast.zig | 21 +++++---------------- test/translate_c.zig | 2 ++ 2 files changed, 7 insertions(+), 16 deletions(-) diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 34498e7315..99d198c995 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -1902,36 +1902,25 @@ fn addSemicolonIfNeeded(c: *Context, node: Node) !void { .var_decl, .var_simple, .arg_redecl, .alias, .enum_redecl, .block, .empty_block, .block_single, .@"switch" => {}, .while_true => { const payload = node.castTag(.while_true).?.data; - return addSemicolonIfNotBlock(c, payload, .yes_if); + return addSemicolonIfNotBlock(c, payload); }, .@"while" => { const payload = node.castTag(.@"while").?.data; - return addSemicolonIfNotBlock(c, payload.body, .yes_if); + return addSemicolonIfNotBlock(c, payload.body); }, .@"if" => { const payload = node.castTag(.@"if").?.data; if (payload.@"else") |some| - return addSemicolonIfNotBlock(c, some, .no_if); - return addSemicolonIfNotBlock(c, payload.then, .no_if); + return addSemicolonIfNeeded(c, some); + return addSemicolonIfNotBlock(c, payload.then); }, else => _ = try c.addToken(.semicolon, ";"), } } -fn addSemicolonIfNotBlock(c: *Context, node: Node, if_needs_semicolon: enum{ yes_if, no_if}) !void { +fn addSemicolonIfNotBlock(c: *Context, node: Node) !void { switch (node.tag()) { .block, .empty_block, .block_single => {}, - .@"if" => { - if (if_needs_semicolon == .yes_if) { - _ = try c.addToken(.semicolon, ";"); - return; - } - - const payload = node.castTag(.@"if").?.data; - if (payload.@"else") |some| - return addSemicolonIfNotBlock(c, some, .no_if); - return addSemicolonIfNotBlock(c, payload.then, .no_if); - }, else => _ = try c.addToken(.semicolon, ";"), } } diff --git a/test/translate_c.zig b/test/translate_c.zig index 34e6897c94..6eb23201e5 100644 --- a/test/translate_c.zig +++ b/test/translate_c.zig @@ -10,6 +10,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ } else { \\ int b = 2; \\ } + \\ if (1) if (1) {} \\} , &[_][]const u8{ \\pub export fn foo() void { @@ -18,6 +19,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ } else { \\ var b: c_int = 2; \\ }; + \\ if (true) if (true) {}; \\} }); From 95b95ea33e4b595d2fc6fbea850694b79f27fe55 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Fri, 19 Feb 2021 22:54:47 +0100 Subject: [PATCH 108/173] stage2: make same line doc comments a parse error Allowing same line doc comments causes some ambiguity as to how generated docs should represent the case in which both same line and preceding line doc comments are present: /// preceding line const foobar = 42; /// same line Furthermore disallowing these makes things simpler as there is now only one way to add a doc comment to a decl or struct field. --- lib/std/zig/ast.zig | 4 +++ lib/std/zig/parse.zig | 26 ++++++-------- lib/std/zig/parser_test.zig | 68 ++++++++++++++----------------------- 3 files changed, 39 insertions(+), 59 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index c8f9afd080..2042f07cf6 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -146,6 +146,7 @@ pub const Tree = struct { .ExpectedFn => |*x| return x.render(tokens, stream), .ExpectedReturnType => |*x| return x.render(tokens, stream), .ExpectedAggregateKw => |*x| return x.render(tokens, stream), + .SameLineDocComment => |*x| return x.render(tokens, stream), .UnattachedDocComment => |*x| return x.render(tokens, stream), .ExpectedEqOrSemi => |*x| return x.render(tokens, stream), .ExpectedSemiOrLBrace => |*x| return x.render(tokens, stream), @@ -200,6 +201,7 @@ pub const Tree = struct { .ExpectedFn => |x| return x.token, .ExpectedReturnType => |x| return x.token, .ExpectedAggregateKw => |x| return x.token, + .SameLineDocComment => |x| return x.token, .UnattachedDocComment => |x| return x.token, .ExpectedEqOrSemi => |x| return x.token, .ExpectedSemiOrLBrace => |x| return x.token, @@ -2250,6 +2252,7 @@ pub const Error = union(enum) { ExpectedFn: ExpectedFn, ExpectedReturnType: ExpectedReturnType, ExpectedAggregateKw: ExpectedAggregateKw, + SameLineDocComment: SameLineDocComment, UnattachedDocComment: UnattachedDocComment, ExpectedEqOrSemi: ExpectedEqOrSemi, ExpectedSemiOrLBrace: ExpectedSemiOrLBrace, @@ -2326,6 +2329,7 @@ pub const Error = union(enum) { pub const ExpectedParamType = SimpleError("Expected parameter type"); pub const ExpectedPubItem = SimpleError("Expected function or variable declaration after pub"); + pub const SameLineDocComment = SimpleError("Same line documentation comment"); pub const UnattachedDocComment = SimpleError("Unattached documentation comment"); pub const ExtraAlignQualifier = SimpleError("Extra align qualifier"); pub const ExtraConstQualifier = SimpleError("Extra const qualifier"); diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index a2043071d1..f6545c0f13 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -190,7 +190,7 @@ const Parser = struct { var trailing_comma = false; while (true) { - const doc_comment = p.eatDocComments(); + const doc_comment = try p.eatDocComments (); switch (p.token_tags[p.tok_i]) { .keyword_test => { @@ -515,7 +515,6 @@ const Parser = struct { switch (p.token_tags[p.tok_i]) { .semicolon => { const semicolon_token = p.nextToken(); - try p.parseAppendedDocComment(semicolon_token); return p.addNode(.{ .tag = .fn_decl, .main_token = p.nodes.items(.main_token)[fn_proto], @@ -557,7 +556,6 @@ const Parser = struct { const var_decl = try p.parseVarDecl(); if (var_decl != 0) { const semicolon_token = try p.expectToken(.semicolon); - try p.parseAppendedDocComment(semicolon_token); return var_decl; } if (thread_local_token != null) { @@ -585,7 +583,6 @@ const Parser = struct { const usingnamespace_token = try p.expectToken(.keyword_usingnamespace); const expr = try p.expectExpr(); const semicolon_token = try p.expectToken(.semicolon); - try p.parseAppendedDocComment(semicolon_token); return p.addNode(.{ .tag = .@"usingnamespace", .main_token = usingnamespace_token, @@ -2885,7 +2882,7 @@ const Parser = struct { } while (true) { - const doc_comment = p.eatDocComments(); + const doc_comment = try p.eatDocComments(); const identifier = try p.expectToken(.identifier); switch (p.token_tags[p.nextToken()]) { .comma => { @@ -3274,7 +3271,7 @@ const Parser = struct { /// such as in the case of anytype and `...`. Caller must look for rparen to find /// out when there are no more param decls left. fn expectParamDecl(p: *Parser) !Node.Index { - _ = p.eatDocComments(); + _ = try p.eatDocComments(); switch (p.token_tags[p.tok_i]) { .keyword_noalias, .keyword_comptime => p.tok_i += 1, .ellipsis3 => { @@ -4075,8 +4072,13 @@ const Parser = struct { } /// Skips over doc comment tokens. Returns the first one, if any. - fn eatDocComments(p: *Parser) ?TokenIndex { - if (p.eatToken(.doc_comment)) |first_line| { + fn eatDocComments(p: *Parser) !?TokenIndex { + if (p.eatToken(.doc_comment)) |tok| { + var first_line = tok; + if (tok > 0 and tokensOnSameLine(p, tok - 1, tok)) { + try p.warn(.{ .SameLineDocComment = .{ .token = tok } }); + first_line = p.eatToken(.doc_comment) orelse return null; + } while (p.eatToken(.doc_comment)) |_| {} return first_line; } @@ -4087,14 +4089,6 @@ const Parser = struct { return std.mem.indexOfScalar(u8, p.source[p.token_starts[token1]..p.token_starts[token2]], '\n') == null; } - /// Eat a single-line doc comment on the same line as another node - fn parseAppendedDocComment(p: *Parser, after_token: TokenIndex) !void { - const comment_token = p.eatToken(.doc_comment) orelse return; - if (!p.tokensOnSameLine(after_token, comment_token)) { - p.tok_i -= 1; - } - } - fn eatToken(p: *Parser, tag: Token.Tag) ?TokenIndex { return if (p.token_tags[p.tok_i] == tag) p.nextToken() else null; } diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index e00b94114d..f8e992bb2e 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -1016,23 +1016,6 @@ test "zig fmt: linksection" { ); } -//test "zig fmt: correctly move doc comments on struct fields" { -// try testTransform( -// \\pub const section_64 = extern struct { -// \\ sectname: [16]u8, /// name of this section -// \\ segname: [16]u8, /// segment this section goes in -// \\}; -// , -// \\pub const section_64 = extern struct { -// \\ /// name of this section -// \\ sectname: [16]u8, -// \\ /// segment this section goes in -// \\ segname: [16]u8, -// \\}; -// \\ -// ); -//} - test "zig fmt: correctly space struct fields with doc comments" { try testTransform( \\pub const S = struct { @@ -1449,31 +1432,6 @@ test "zig fmt: async call in if condition" { // \\ // ); //} -// -//test "zig fmt: same-line doc comment on variable declaration" { -// try testTransform( -// \\pub const MAP_ANONYMOUS = 0x1000; /// allocated from memory, swap space -// \\pub const MAP_FILE = 0x0000; /// map from file (default) -// \\ -// \\pub const EMEDIUMTYPE = 124; /// Wrong medium type -// \\ -// \\// nameserver query return codes -// \\pub const ENSROK = 0; /// DNS server returned answer with no data -// , -// \\/// allocated from memory, swap space -// \\pub const MAP_ANONYMOUS = 0x1000; -// \\/// map from file (default) -// \\pub const MAP_FILE = 0x0000; -// \\ -// \\/// Wrong medium type -// \\pub const EMEDIUMTYPE = 124; -// \\ -// \\// nameserver query return codes -// \\/// DNS server returned answer with no data -// \\pub const ENSROK = 0; -// \\ -// ); -//} test "zig fmt: if-else with comment before else" { try testCanonical( @@ -3625,6 +3583,30 @@ test "zig fmt: file ends with struct field" { // }); //} +test "zig fmt: same line doc comment returns error" { + try testError( + \\const Foo = struct{ + \\ bar: u32, /// comment + \\ foo: u32, /// comment + \\ /// commment + \\}; + \\ + \\const a = 42; /// comment + \\ + \\extern fn foo() void; /// comment + \\ + \\/// comment + \\ + , &[_]Error{ + .SameLineDocComment, + .SameLineDocComment, + .UnattachedDocComment, + .SameLineDocComment, + .SameLineDocComment, + .UnattachedDocComment, + }); +} + test "zig fmt: integer literals with underscore separators" { try testTransform( \\const @@ -4388,6 +4370,6 @@ fn testError(source: []const u8, expected_errors: []const Error) !void { std.testing.expect(tree.errors.len == expected_errors.len); for (expected_errors) |expected, i| { - std.testing.expect(expected == tree.errors[i]); + std.testing.expectEqual(expected, tree.errors[i]); } } From 74878565e5112fed04336089ed769443e08e605b Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Fri, 19 Feb 2021 16:38:04 -0700 Subject: [PATCH 109/173] readme: update introduction link --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 5eedcdab8b..e98eebf29f 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ A general-purpose programming language and toolchain for maintaining ## Resources - * [Introduction](https://ziglang.org/#Introduction) + * [Introduction](https://ziglang.org/learn/#introduction) * [Download & Documentation](https://ziglang.org/download) * [Chapter 0 - Getting Started | ZigLearn.org](https://ziglearn.org/) * [Community](https://github.com/ziglang/zig/wiki/Community) From 8fee41b1d528d598521525574206e200fd332c67 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Fri, 19 Feb 2021 18:04:52 -0700 Subject: [PATCH 110/173] stage2: AST: clean up parse errors * struct instead of tagged union * delete dead code * simplify parser code * remove unnecessary metaprogramming --- lib/std/zig/ast.zig | 527 +++++++++++++----------------------- lib/std/zig/parse.zig | 338 +++++++++-------------- lib/std/zig/parser_test.zig | 127 +++++---- src/Module.zig | 2 +- src/main.zig | 2 +- 5 files changed, 389 insertions(+), 607 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 2042f07cf6..fda2916af4 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -132,113 +132,160 @@ pub const Tree = struct { } pub fn renderError(tree: Tree, parse_error: Error, stream: anytype) !void { - const tokens = tree.tokens.items(.tag); - switch (parse_error) { - .InvalidToken => |*x| return x.render(tokens, stream), - .ExpectedContainerMembers => |*x| return x.render(tokens, stream), - .ExpectedStringLiteral => |*x| return x.render(tokens, stream), - .ExpectedIntegerLiteral => |*x| return x.render(tokens, stream), - .ExpectedPubItem => |*x| return x.render(tokens, stream), - .ExpectedIdentifier => |*x| return x.render(tokens, stream), - .ExpectedStatement => |*x| return x.render(tokens, stream), - .ExpectedVarDeclOrFn => |*x| return x.render(tokens, stream), - .ExpectedVarDecl => |*x| return x.render(tokens, stream), - .ExpectedFn => |*x| return x.render(tokens, stream), - .ExpectedReturnType => |*x| return x.render(tokens, stream), - .ExpectedAggregateKw => |*x| return x.render(tokens, stream), - .SameLineDocComment => |*x| return x.render(tokens, stream), - .UnattachedDocComment => |*x| return x.render(tokens, stream), - .ExpectedEqOrSemi => |*x| return x.render(tokens, stream), - .ExpectedSemiOrLBrace => |*x| return x.render(tokens, stream), - .ExpectedSemiOrElse => |*x| return x.render(tokens, stream), - .ExpectedLabelOrLBrace => |*x| return x.render(tokens, stream), - .ExpectedLBrace => |*x| return x.render(tokens, stream), - .ExpectedColonOrRParen => |*x| return x.render(tokens, stream), - .ExpectedLabelable => |*x| return x.render(tokens, stream), - .ExpectedInlinable => |*x| return x.render(tokens, stream), - .ExpectedAsmOutputReturnOrType => |*x| return x.render(tokens, stream), - .ExpectedCall => |x| return x.render(tree, stream), - .ExpectedCallOrFnProto => |x| return x.render(tree, stream), - .ExpectedSliceOrRBracket => |*x| return x.render(tokens, stream), - .ExtraAlignQualifier => |*x| return x.render(tokens, stream), - .ExtraConstQualifier => |*x| return x.render(tokens, stream), - .ExtraVolatileQualifier => |*x| return x.render(tokens, stream), - .ExtraAllowZeroQualifier => |*x| return x.render(tokens, stream), - .ExpectedTypeExpr => |*x| return x.render(tokens, stream), - .ExpectedPrimaryTypeExpr => |*x| return x.render(tokens, stream), - .ExpectedParamType => |*x| return x.render(tokens, stream), - .ExpectedExpr => |*x| return x.render(tokens, stream), - .ExpectedPrimaryExpr => |*x| return x.render(tokens, stream), - .ExpectedToken => |*x| return x.render(tokens, stream), - .ExpectedCommaOrEnd => |*x| return x.render(tokens, stream), - .ExpectedParamList => |*x| return x.render(tokens, stream), - .ExpectedPayload => |*x| return x.render(tokens, stream), - .ExpectedBlockOrAssignment => |*x| return x.render(tokens, stream), - .ExpectedBlockOrExpression => |*x| return x.render(tokens, stream), - .ExpectedExprOrAssignment => |*x| return x.render(tokens, stream), - .ExpectedPrefixExpr => |*x| return x.render(tokens, stream), - .ExpectedLoopExpr => |*x| return x.render(tokens, stream), - .ExpectedDerefOrUnwrap => |*x| return x.render(tokens, stream), - .ExpectedSuffixOp => |*x| return x.render(tokens, stream), - .ExpectedBlockOrField => |*x| return x.render(tokens, stream), - .DeclBetweenFields => |*x| return x.render(tokens, stream), - .InvalidAnd => |*x| return x.render(tokens, stream), - .AsteriskAfterPointerDereference => |*x| return x.render(tokens, stream), - } - } + const token_tags = tree.tokens.items(.tag); + switch (parse_error.tag) { + .asterisk_after_ptr_deref => { + return stream.writeAll("'.*' cannot be followed by '*'. Are you missing a space?"); + }, + .decl_between_fields => { + return stream.writeAll("declarations are not allowed between container fields"); + }, + .expected_block => { + return stream.print("expected block or field, found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .expected_block_or_assignment => { + return stream.print("expected block or assignment, found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .expected_block_or_expr => { + return stream.print("expected block or expression, found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .expected_block_or_field => { + return stream.print("expected block or field, found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .expected_container_members => { + return stream.print("expected test, comptime, var decl, or container field, found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .expected_expr => { + return stream.print("expected expression, found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .expected_expr_or_assignment => { + return stream.print("expected expression or assignment, found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .expected_fn => { + return stream.print("expected function, found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .expected_inlinable => { + return stream.print("expected 'while' or 'for', found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .expected_labelable => { + return stream.print("expected 'while', 'for', 'inline', 'suspend', or '{{', found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .expected_param_list => { + return stream.print("expected parameter list, found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .expected_prefix_expr => { + return stream.print("expected prefix expression, found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .expected_primary_type_expr => { + return stream.print("expected primary type expression, found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .expected_return_type => { + return stream.print("expected return type expression, found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .expected_semi_or_else => { + return stream.print("expected ';' or 'else', found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .expected_semi_or_lbrace => { + return stream.print("expected ';' or '{{', found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .expected_statement => { + return stream.print("expected statement, found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .expected_string_literal => { + return stream.print("expected string literal, found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .expected_suffix_op => { + return stream.print("expected pointer dereference, optional unwrap, or field access, found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .expected_type_expr => { + return stream.print("expected type expression, found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .expected_var_decl => { + return stream.print("expected variable declaration, found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .expected_var_decl_or_fn => { + return stream.print("expected variable declaration or function, found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .extra_align_qualifier => { + return stream.writeAll("extra align qualifier"); + }, + .extra_allowzero_qualifier => { + return stream.writeAll("extra allowzero qualifier"); + }, + .extra_const_qualifier => { + return stream.writeAll("extra const qualifier"); + }, + .extra_volatile_qualifier => { + return stream.writeAll("extra volatile qualifier"); + }, + .invalid_token => { + return stream.print("invalid token '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, + .same_line_doc_comment => { + return stream.writeAll("same line documentation comment"); + }, + .unattached_doc_comment => { + return stream.writeAll("unattached documentation comment"); + }, - pub fn errorToken(tree: Tree, parse_error: Error) TokenIndex { - switch (parse_error) { - .InvalidToken => |x| return x.token, - .ExpectedContainerMembers => |x| return x.token, - .ExpectedStringLiteral => |x| return x.token, - .ExpectedIntegerLiteral => |x| return x.token, - .ExpectedPubItem => |x| return x.token, - .ExpectedIdentifier => |x| return x.token, - .ExpectedStatement => |x| return x.token, - .ExpectedVarDeclOrFn => |x| return x.token, - .ExpectedVarDecl => |x| return x.token, - .ExpectedFn => |x| return x.token, - .ExpectedReturnType => |x| return x.token, - .ExpectedAggregateKw => |x| return x.token, - .SameLineDocComment => |x| return x.token, - .UnattachedDocComment => |x| return x.token, - .ExpectedEqOrSemi => |x| return x.token, - .ExpectedSemiOrLBrace => |x| return x.token, - .ExpectedSemiOrElse => |x| return x.token, - .ExpectedLabelOrLBrace => |x| return x.token, - .ExpectedLBrace => |x| return x.token, - .ExpectedColonOrRParen => |x| return x.token, - .ExpectedLabelable => |x| return x.token, - .ExpectedInlinable => |x| return x.token, - .ExpectedAsmOutputReturnOrType => |x| return x.token, - .ExpectedCall => |x| return tree.nodes.items(.main_token)[x.node], - .ExpectedCallOrFnProto => |x| return tree.nodes.items(.main_token)[x.node], - .ExpectedSliceOrRBracket => |x| return x.token, - .ExtraAlignQualifier => |x| return x.token, - .ExtraConstQualifier => |x| return x.token, - .ExtraVolatileQualifier => |x| return x.token, - .ExtraAllowZeroQualifier => |x| return x.token, - .ExpectedTypeExpr => |x| return x.token, - .ExpectedPrimaryTypeExpr => |x| return x.token, - .ExpectedParamType => |x| return x.token, - .ExpectedExpr => |x| return x.token, - .ExpectedPrimaryExpr => |x| return x.token, - .ExpectedToken => |x| return x.token, - .ExpectedCommaOrEnd => |x| return x.token, - .ExpectedParamList => |x| return x.token, - .ExpectedPayload => |x| return x.token, - .ExpectedBlockOrAssignment => |x| return x.token, - .ExpectedBlockOrExpression => |x| return x.token, - .ExpectedExprOrAssignment => |x| return x.token, - .ExpectedPrefixExpr => |x| return x.token, - .ExpectedLoopExpr => |x| return x.token, - .ExpectedDerefOrUnwrap => |x| return x.token, - .ExpectedSuffixOp => |x| return x.token, - .ExpectedBlockOrField => |x| return x.token, - .DeclBetweenFields => |x| return x.token, - .InvalidAnd => |x| return x.token, - .AsteriskAfterPointerDereference => |x| return x.token, + .expected_token => { + const found_tag = token_tags[parse_error.token]; + const expected_symbol = parse_error.extra.expected_tag.symbol(); + switch (found_tag) { + .invalid => return stream.print("expected '{s}', found invalid bytes", .{ + expected_symbol, + }), + else => return stream.print("expected '{s}', found '{s}'", .{ + expected_symbol, found_tag.symbol(), + }), + } + }, } } @@ -2239,236 +2286,50 @@ pub const full = struct { }; }; -pub const Error = union(enum) { - InvalidToken: InvalidToken, - ExpectedContainerMembers: ExpectedContainerMembers, - ExpectedStringLiteral: ExpectedStringLiteral, - ExpectedIntegerLiteral: ExpectedIntegerLiteral, - ExpectedPubItem: ExpectedPubItem, - ExpectedIdentifier: ExpectedIdentifier, - ExpectedStatement: ExpectedStatement, - ExpectedVarDeclOrFn: ExpectedVarDeclOrFn, - ExpectedVarDecl: ExpectedVarDecl, - ExpectedFn: ExpectedFn, - ExpectedReturnType: ExpectedReturnType, - ExpectedAggregateKw: ExpectedAggregateKw, - SameLineDocComment: SameLineDocComment, - UnattachedDocComment: UnattachedDocComment, - ExpectedEqOrSemi: ExpectedEqOrSemi, - ExpectedSemiOrLBrace: ExpectedSemiOrLBrace, - ExpectedSemiOrElse: ExpectedSemiOrElse, - ExpectedLabelOrLBrace: ExpectedLabelOrLBrace, - ExpectedLBrace: ExpectedLBrace, - ExpectedColonOrRParen: ExpectedColonOrRParen, - ExpectedLabelable: ExpectedLabelable, - ExpectedInlinable: ExpectedInlinable, - ExpectedAsmOutputReturnOrType: ExpectedAsmOutputReturnOrType, - ExpectedCall: ExpectedCall, - ExpectedCallOrFnProto: ExpectedCallOrFnProto, - ExpectedSliceOrRBracket: ExpectedSliceOrRBracket, - ExtraAlignQualifier: ExtraAlignQualifier, - ExtraConstQualifier: ExtraConstQualifier, - ExtraVolatileQualifier: ExtraVolatileQualifier, - ExtraAllowZeroQualifier: ExtraAllowZeroQualifier, - ExpectedTypeExpr: ExpectedTypeExpr, - ExpectedPrimaryTypeExpr: ExpectedPrimaryTypeExpr, - ExpectedParamType: ExpectedParamType, - ExpectedExpr: ExpectedExpr, - ExpectedPrimaryExpr: ExpectedPrimaryExpr, - ExpectedToken: ExpectedToken, - ExpectedCommaOrEnd: ExpectedCommaOrEnd, - ExpectedParamList: ExpectedParamList, - ExpectedPayload: ExpectedPayload, - ExpectedBlockOrAssignment: ExpectedBlockOrAssignment, - ExpectedBlockOrExpression: ExpectedBlockOrExpression, - ExpectedExprOrAssignment: ExpectedExprOrAssignment, - ExpectedPrefixExpr: ExpectedPrefixExpr, - ExpectedLoopExpr: ExpectedLoopExpr, - ExpectedDerefOrUnwrap: ExpectedDerefOrUnwrap, - ExpectedSuffixOp: ExpectedSuffixOp, - ExpectedBlockOrField: ExpectedBlockOrField, - DeclBetweenFields: DeclBetweenFields, - InvalidAnd: InvalidAnd, - AsteriskAfterPointerDereference: AsteriskAfterPointerDereference, +pub const Error = struct { + tag: Tag, + token: TokenIndex, + extra: union { + none: void, + expected_tag: Token.Tag, + } = .{ .none = {} }, - pub const InvalidToken = SingleTokenError("Invalid token '{s}'"); - pub const ExpectedContainerMembers = SingleTokenError("Expected test, comptime, var decl, or container field, found '{s}'"); - pub const ExpectedStringLiteral = SingleTokenError("Expected string literal, found '{s}'"); - pub const ExpectedIntegerLiteral = SingleTokenError("Expected integer literal, found '{s}'"); - pub const ExpectedIdentifier = SingleTokenError("Expected identifier, found '{s}'"); - pub const ExpectedStatement = SingleTokenError("Expected statement, found '{s}'"); - pub const ExpectedVarDeclOrFn = SingleTokenError("Expected variable declaration or function, found '{s}'"); - pub const ExpectedVarDecl = SingleTokenError("Expected variable declaration, found '{s}'"); - pub const ExpectedFn = SingleTokenError("Expected function, found '{s}'"); - pub const ExpectedReturnType = SingleTokenError("Expected return type expression, found '{s}'"); - pub const ExpectedAggregateKw = SingleTokenError("Expected '" ++ Token.Tag.keyword_struct.symbol() ++ "', '" ++ Token.Tag.keyword_union.symbol() ++ "', '" ++ Token.Tag.keyword_enum.symbol() ++ "', or '" ++ Token.Tag.keyword_opaque.symbol() ++ "', found '{s}'"); - pub const ExpectedEqOrSemi = SingleTokenError("Expected '=' or ';', found '{s}'"); - pub const ExpectedSemiOrLBrace = SingleTokenError("Expected ';' or '{{', found '{s}'"); - pub const ExpectedSemiOrElse = SingleTokenError("Expected ';' or 'else', found '{s}'"); - pub const ExpectedLBrace = SingleTokenError("Expected '{{', found '{s}'"); - pub const ExpectedLabelOrLBrace = SingleTokenError("Expected label or '{{', found '{s}'"); - pub const ExpectedColonOrRParen = SingleTokenError("Expected ':' or ')', found '{s}'"); - pub const ExpectedLabelable = SingleTokenError("Expected 'while', 'for', 'inline', 'suspend', or '{{', found '{s}'"); - pub const ExpectedInlinable = SingleTokenError("Expected 'while' or 'for', found '{s}'"); - pub const ExpectedAsmOutputReturnOrType = SingleTokenError("Expected '->' or '" ++ Token.Tag.identifier.symbol() ++ "', found '{s}'"); - pub const ExpectedSliceOrRBracket = SingleTokenError("Expected ']' or '..', found '{s}'"); - pub const ExpectedTypeExpr = SingleTokenError("Expected type expression, found '{s}'"); - pub const ExpectedPrimaryTypeExpr = SingleTokenError("Expected primary type expression, found '{s}'"); - pub const ExpectedExpr = SingleTokenError("Expected expression, found '{s}'"); - pub const ExpectedPrimaryExpr = SingleTokenError("Expected primary expression, found '{s}'"); - pub const ExpectedParamList = SingleTokenError("Expected parameter list, found '{s}'"); - pub const ExpectedPayload = SingleTokenError("Expected loop payload, found '{s}'"); - pub const ExpectedBlockOrAssignment = SingleTokenError("Expected block or assignment, found '{s}'"); - pub const ExpectedBlockOrExpression = SingleTokenError("Expected block or expression, found '{s}'"); - pub const ExpectedExprOrAssignment = SingleTokenError("Expected expression or assignment, found '{s}'"); - pub const ExpectedPrefixExpr = SingleTokenError("Expected prefix expression, found '{s}'"); - pub const ExpectedLoopExpr = SingleTokenError("Expected loop expression, found '{s}'"); - pub const ExpectedDerefOrUnwrap = SingleTokenError("Expected pointer dereference or optional unwrap, found '{s}'"); - pub const ExpectedSuffixOp = SingleTokenError("Expected pointer dereference, optional unwrap, or field access, found '{s}'"); - pub const ExpectedBlockOrField = SingleTokenError("Expected block or field, found '{s}'"); + pub const Tag = enum { + asterisk_after_ptr_deref, + decl_between_fields, + expected_block, + expected_block_or_assignment, + expected_block_or_expr, + expected_block_or_field, + expected_container_members, + expected_expr, + expected_expr_or_assignment, + expected_fn, + expected_inlinable, + expected_labelable, + expected_param_list, + expected_prefix_expr, + expected_primary_type_expr, + expected_return_type, + expected_semi_or_else, + expected_semi_or_lbrace, + expected_statement, + expected_string_literal, + expected_suffix_op, + expected_type_expr, + expected_var_decl, + expected_var_decl_or_fn, + extra_align_qualifier, + extra_allowzero_qualifier, + extra_const_qualifier, + extra_volatile_qualifier, + invalid_token, + same_line_doc_comment, + unattached_doc_comment, - pub const ExpectedParamType = SimpleError("Expected parameter type"); - pub const ExpectedPubItem = SimpleError("Expected function or variable declaration after pub"); - pub const SameLineDocComment = SimpleError("Same line documentation comment"); - pub const UnattachedDocComment = SimpleError("Unattached documentation comment"); - pub const ExtraAlignQualifier = SimpleError("Extra align qualifier"); - pub const ExtraConstQualifier = SimpleError("Extra const qualifier"); - pub const ExtraVolatileQualifier = SimpleError("Extra volatile qualifier"); - pub const ExtraAllowZeroQualifier = SimpleError("Extra allowzero qualifier"); - pub const DeclBetweenFields = SimpleError("Declarations are not allowed between container fields"); - pub const InvalidAnd = SimpleError("`&&` is invalid. Note that `and` is boolean AND."); - pub const AsteriskAfterPointerDereference = SimpleError("`.*` can't be followed by `*`. Are you missing a space?"); - - pub const ExpectedCall = struct { - node: Node.Index, - - pub fn render(self: ExpectedCall, tree: Tree, stream: anytype) !void { - const node_tag = tree.nodes.items(.tag)[self.node]; - return stream.print("expected " ++ @tagName(Node.Tag.call) ++ ", found {s}", .{ - @tagName(node_tag), - }); - } + /// `expected_tag` is populated. + expected_token, }; - - pub const ExpectedCallOrFnProto = struct { - node: Node.Index, - - pub fn render(self: ExpectedCallOrFnProto, tree: Tree, stream: anytype) !void { - const node_tag = tree.nodes.items(.tag)[self.node]; - return stream.print("expected " ++ @tagName(Node.Tag.call) ++ " or " ++ - @tagName(Node.Tag.fn_proto) ++ ", found {s}", .{@tagName(node_tag)}); - } - }; - - pub const ExpectedToken = struct { - token: TokenIndex, - expected_id: Token.Tag, - - pub fn render(self: *const ExpectedToken, tokens: []const Token.Tag, stream: anytype) !void { - const found_token = tokens[self.token]; - switch (found_token) { - .invalid => { - return stream.print("expected '{s}', found invalid bytes", .{self.expected_id.symbol()}); - }, - else => { - const token_name = found_token.symbol(); - return stream.print("expected '{s}', found '{s}'", .{ self.expected_id.symbol(), token_name }); - }, - } - } - }; - - pub const ExpectedCommaOrEnd = struct { - token: TokenIndex, - end_id: Token.Tag, - - pub fn render(self: *const ExpectedCommaOrEnd, tokens: []const Token.Tag, stream: anytype) !void { - const actual_token = tokens[self.token]; - return stream.print("expected ',' or '{s}', found '{s}'", .{ - self.end_id.symbol(), - actual_token.symbol(), - }); - } - }; - - fn SingleTokenError(comptime msg: []const u8) type { - return struct { - const ThisError = @This(); - - token: TokenIndex, - - pub fn render(self: *const ThisError, tokens: []const Token.Tag, stream: anytype) !void { - const actual_token = tokens[self.token]; - return stream.print(msg, .{actual_token.symbol()}); - } - }; - } - - fn SimpleError(comptime msg: []const u8) type { - return struct { - const ThisError = @This(); - - token: TokenIndex, - - pub fn render(self: *const ThisError, tokens: []const Token.Tag, stream: anytype) !void { - return stream.writeAll(msg); - } - }; - } - - pub fn loc(self: Error) TokenIndex { - switch (self) { - .InvalidToken => |x| return x.token, - .ExpectedContainerMembers => |x| return x.token, - .ExpectedStringLiteral => |x| return x.token, - .ExpectedIntegerLiteral => |x| return x.token, - .ExpectedPubItem => |x| return x.token, - .ExpectedIdentifier => |x| return x.token, - .ExpectedStatement => |x| return x.token, - .ExpectedVarDeclOrFn => |x| return x.token, - .ExpectedVarDecl => |x| return x.token, - .ExpectedFn => |x| return x.token, - .ExpectedReturnType => |x| return x.token, - .ExpectedAggregateKw => |x| return x.token, - .UnattachedDocComment => |x| return x.token, - .ExpectedEqOrSemi => |x| return x.token, - .ExpectedSemiOrLBrace => |x| return x.token, - .ExpectedSemiOrElse => |x| return x.token, - .ExpectedLabelOrLBrace => |x| return x.token, - .ExpectedLBrace => |x| return x.token, - .ExpectedColonOrRParen => |x| return x.token, - .ExpectedLabelable => |x| return x.token, - .ExpectedInlinable => |x| return x.token, - .ExpectedAsmOutputReturnOrType => |x| return x.token, - .ExpectedCall => |x| @panic("TODO redo ast errors"), - .ExpectedCallOrFnProto => |x| @panic("TODO redo ast errors"), - .ExpectedSliceOrRBracket => |x| return x.token, - .ExtraAlignQualifier => |x| return x.token, - .ExtraConstQualifier => |x| return x.token, - .ExtraVolatileQualifier => |x| return x.token, - .ExtraAllowZeroQualifier => |x| return x.token, - .ExpectedTypeExpr => |x| return x.token, - .ExpectedPrimaryTypeExpr => |x| return x.token, - .ExpectedParamType => |x| return x.token, - .ExpectedExpr => |x| return x.token, - .ExpectedPrimaryExpr => |x| return x.token, - .ExpectedToken => |x| return x.token, - .ExpectedCommaOrEnd => |x| return x.token, - .ExpectedParamList => |x| return x.token, - .ExpectedPayload => |x| return x.token, - .ExpectedBlockOrAssignment => |x| return x.token, - .ExpectedBlockOrExpression => |x| return x.token, - .ExpectedExprOrAssignment => |x| return x.token, - .ExpectedPrefixExpr => |x| return x.token, - .ExpectedLoopExpr => |x| return x.token, - .ExpectedDerefOrUnwrap => |x| return x.token, - .ExpectedSuffixOp => |x| return x.token, - .ExpectedBlockOrField => |x| return x.token, - .DeclBetweenFields => |x| return x.token, - .InvalidAnd => |x| return x.token, - .AsteriskAfterPointerDereference => |x| return x.token, - } - } }; pub const Node = struct { diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index f6545c0f13..a557b57ad4 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -150,14 +150,41 @@ const Parser = struct { return result; } - fn warn(p: *Parser, msg: ast.Error) error{OutOfMemory}!void { + fn warn(p: *Parser, tag: ast.Error.Tag) error{OutOfMemory}!void { + @setCold(true); + try p.warnMsg(.{ .tag = tag, .token = p.tok_i }); + } + + fn warnExpected(p: *Parser, expected_token: Token.Tag) error{OutOfMemory}!void { + @setCold(true); + try p.warnMsg(.{ + .tag = .expected_token, + .token = p.tok_i, + .extra = .{ .expected_tag = expected_token }, + }); + } + fn warnMsg(p: *Parser, msg: ast.Error) error{OutOfMemory}!void { @setCold(true); try p.errors.append(p.gpa, msg); } - fn fail(p: *Parser, msg: ast.Error) error{ ParseError, OutOfMemory } { + fn fail(p: *Parser, tag: ast.Error.Tag) error{ ParseError, OutOfMemory } { @setCold(true); - try p.warn(msg); + return p.failMsg(.{ .tag = tag, .token = p.tok_i }); + } + + fn failExpected(p: *Parser, expected_token: Token.Tag) error{ ParseError, OutOfMemory } { + @setCold(true); + return p.failMsg(.{ + .tag = .expected_token, + .token = p.tok_i, + .extra = .{ .expected_tag = expected_token }, + }); + } + + fn failMsg(p: *Parser, msg: ast.Error) error{ ParseError, OutOfMemory } { + @setCold(true); + try p.warnMsg(msg); return error.ParseError; } @@ -190,7 +217,7 @@ const Parser = struct { var trailing_comma = false; while (true) { - const doc_comment = try p.eatDocComments (); + const doc_comment = try p.eatDocComments(); switch (p.token_tags[p.tok_i]) { .keyword_test => { @@ -212,8 +239,9 @@ const Parser = struct { .none => field_state = .seen, .err, .seen => {}, .end => |node| { - try p.warn(.{ - .DeclBetweenFields = .{ .token = p.nodes.items(.main_token)[node] }, + try p.warnMsg(.{ + .tag = .decl_between_fields, + .token = p.nodes.items(.main_token)[node], }); // Continue parsing; error will be reported later. field_state = .err; @@ -234,9 +262,7 @@ const Parser = struct { } // There is not allowed to be a decl after a field with no comma. // Report error but recover parser. - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, - }); + try p.warnExpected(.comma); p.findNextContainerMember(); } }, @@ -267,7 +293,7 @@ const Parser = struct { }, else => { p.tok_i += 1; - try p.warn(.{ .ExpectedBlockOrField = .{ .token = p.tok_i } }); + try p.warn(.expected_block_or_field); }, }, .keyword_pub => { @@ -316,8 +342,9 @@ const Parser = struct { .none => field_state = .seen, .err, .seen => {}, .end => |node| { - try p.warn(.{ - .DeclBetweenFields = .{ .token = p.nodes.items(.main_token)[node] }, + try p.warnMsg(.{ + .tag = .decl_between_fields, + .token = p.nodes.items(.main_token)[node], }); // Continue parsing; error will be reported later. field_state = .err; @@ -338,20 +365,21 @@ const Parser = struct { } // There is not allowed to be a decl after a field with no comma. // Report error but recover parser. - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, - }); + try p.warnExpected(.comma); p.findNextContainerMember(); } }, .eof, .r_brace => { if (doc_comment) |tok| { - try p.warn(.{ .UnattachedDocComment = .{ .token = tok } }); + try p.warnMsg(.{ + .tag = .unattached_doc_comment, + .token = tok, + }); } break; }, else => { - try p.warn(.{ .ExpectedContainerMembers = .{ .token = p.tok_i } }); + try p.warn(.expected_container_members); // This was likely not supposed to end yet; try to find the next declaration. p.findNextContainerMember(); }, @@ -475,7 +503,7 @@ const Parser = struct { const test_token = p.assertToken(.keyword_test); const name_token = p.eatToken(.string_literal); const block_node = try p.parseBlock(); - if (block_node == 0) return p.fail(.{ .ExpectedLBrace = .{ .token = p.tok_i } }); + if (block_node == 0) return p.fail(.expected_block); return p.addNode(.{ .tag = .test_decl, .main_token = test_token, @@ -540,15 +568,13 @@ const Parser = struct { // Since parseBlock only return error.ParseError on // a missing '}' we can assume this function was // supposed to end here. - try p.warn(.{ .ExpectedSemiOrLBrace = .{ .token = p.tok_i } }); + try p.warn(.expected_semi_or_lbrace); return null_node; }, } } if (expect_fn) { - try p.warn(.{ - .ExpectedFn = .{ .token = p.tok_i }, - }); + try p.warn(.expected_fn); return error.ParseError; } @@ -559,11 +585,11 @@ const Parser = struct { return var_decl; } if (thread_local_token != null) { - return p.fail(.{ .ExpectedVarDecl = .{ .token = p.tok_i } }); + return p.fail(.expected_var_decl); } if (exported) { - return p.fail(.{ .ExpectedVarDeclOrFn = .{ .token = p.tok_i } }); + return p.fail(.expected_var_decl_or_fn); } return p.expectUsingNamespace(); @@ -618,7 +644,7 @@ const Parser = struct { if (return_type_expr == 0) { // most likely the user forgot to specify the return type. // Mark return type as invalid and try to continue. - try p.warn(.{ .ExpectedReturnType = .{ .token = p.tok_i } }); + try p.warn(.expected_return_type); } if (align_expr == 0 and section_expr == 0 and callconv_expr == 0) { @@ -901,7 +927,7 @@ const Parser = struct { fn expectStatement(p: *Parser) !Node.Index { const statement = try p.parseStatement(); if (statement == 0) { - return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); + return p.fail(.expected_statement); } return statement; } @@ -940,7 +966,7 @@ const Parser = struct { if (block_expr != 0) break :blk block_expr; const assign_expr = try p.parseAssignExpr(); if (assign_expr == 0) { - return p.fail(.{ .ExpectedBlockOrAssignment = .{ .token = p.tok_i } }); + return p.fail(.expected_block_or_assignment); } if (p.eatToken(.semicolon)) |_| { return p.addNode(.{ @@ -957,7 +983,7 @@ const Parser = struct { }; const else_token = p.eatToken(.keyword_else) orelse { if (else_required) { - return p.fail(.{ .ExpectedSemiOrElse = .{ .token = p.tok_i } }); + return p.fail(.expected_semi_or_else); } return p.addNode(.{ .tag = .if_simple, @@ -993,7 +1019,7 @@ const Parser = struct { if (loop_stmt != 0) return loop_stmt; if (label_token != 0) { - return p.fail(.{ .ExpectedLabelable = .{ .token = p.tok_i } }); + return p.fail(.expected_labelable); } return null_node; @@ -1012,7 +1038,7 @@ const Parser = struct { if (inline_token == null) return null_node; // If we've seen "inline", there should have been a "for" or "while" - return p.fail(.{ .ExpectedInlinable = .{ .token = p.tok_i } }); + return p.fail(.expected_inlinable); } /// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload @@ -1034,7 +1060,7 @@ const Parser = struct { if (block_expr != 0) break :blk block_expr; const assign_expr = try p.parseAssignExpr(); if (assign_expr == 0) { - return p.fail(.{ .ExpectedBlockOrAssignment = .{ .token = p.tok_i } }); + return p.fail(.expected_block_or_assignment); } if (p.eatToken(.semicolon)) |_| { return p.addNode(.{ @@ -1051,7 +1077,7 @@ const Parser = struct { }; const else_token = p.eatToken(.keyword_else) orelse { if (else_required) { - return p.fail(.{ .ExpectedSemiOrElse = .{ .token = p.tok_i } }); + return p.fail(.expected_semi_or_else); } return p.addNode(.{ .tag = .for_simple, @@ -1095,7 +1121,7 @@ const Parser = struct { if (block_expr != 0) break :blk block_expr; const assign_expr = try p.parseAssignExpr(); if (assign_expr == 0) { - return p.fail(.{ .ExpectedBlockOrAssignment = .{ .token = p.tok_i } }); + return p.fail(.expected_block_or_assignment); } if (p.eatToken(.semicolon)) |_| { if (cont_expr == 0) { @@ -1126,7 +1152,7 @@ const Parser = struct { }; const else_token = p.eatToken(.keyword_else) orelse { if (else_required) { - return p.fail(.{ .ExpectedSemiOrElse = .{ .token = p.tok_i } }); + return p.fail(.expected_semi_or_else); } if (cont_expr == 0) { return p.addNode(.{ @@ -1186,7 +1212,7 @@ const Parser = struct { fn expectBlockExprStatement(p: *Parser) !Node.Index { const node = try p.parseBlockExprStatement(); if (node == 0) { - return p.fail(.{ .ExpectedBlockOrExpression = .{ .token = p.tok_i } }); + return p.fail(.expected_block_or_expr); } return node; } @@ -1259,7 +1285,7 @@ const Parser = struct { fn expectAssignExpr(p: *Parser) !Node.Index { const expr = try p.parseAssignExpr(); if (expr == 0) { - return p.fail(.{ .ExpectedExprOrAssignment = .{ .token = p.tok_i } }); + return p.fail(.expected_expr_or_assignment); } return expr; } @@ -1272,7 +1298,7 @@ const Parser = struct { fn expectExpr(p: *Parser) Error!Node.Index { const node = try p.parseExpr(); if (node == 0) { - return p.fail(.{ .ExpectedExpr = .{ .token = p.tok_i } }); + return p.fail(.expected_expr); } else { return node; } @@ -1289,7 +1315,7 @@ const Parser = struct { const or_token = p.nextToken(); const rhs = try p.parseBoolAndExpr(); if (rhs == 0) { - return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); + return p.fail(.invalid_token); } res = try p.addNode(.{ .tag = .bool_or, @@ -1316,7 +1342,7 @@ const Parser = struct { const and_token = p.nextToken(); const rhs = try p.parseCompareExpr(); if (rhs == 0) { - return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); + return p.fail(.invalid_token); } res = try p.addNode(.{ .tag = .bool_and, @@ -1385,7 +1411,7 @@ const Parser = struct { _ = try p.parsePayload(); const rhs = try p.parseBitShiftExpr(); if (rhs == 0) { - return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); + return p.fail(.invalid_token); } res = try p.addNode(.{ .tag = .@"catch", @@ -1413,7 +1439,7 @@ const Parser = struct { fn expectBitwiseExpr(p: *Parser) Error!Node.Index { const node = try p.parseBitwiseExpr(); if (node == 0) { - return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); + return p.fail(.invalid_token); } else { return node; } @@ -1447,7 +1473,7 @@ const Parser = struct { fn expectBitShiftExpr(p: *Parser) Error!Node.Index { const node = try p.parseBitShiftExpr(); if (node == 0) { - return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); + return p.fail(.invalid_token); } else { return node; } @@ -1487,7 +1513,7 @@ const Parser = struct { fn expectAdditionExpr(p: *Parser) Error!Node.Index { const node = try p.parseAdditionExpr(); if (node == 0) { - return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); + return p.fail(.invalid_token); } return node; } @@ -1528,7 +1554,7 @@ const Parser = struct { fn expectMultiplyExpr(p: *Parser) Error!Node.Index { const node = try p.parseMultiplyExpr(); if (node == 0) { - return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); + return p.fail(.invalid_token); } return node; } @@ -1566,7 +1592,7 @@ const Parser = struct { fn expectPrefixExpr(p: *Parser) Error!Node.Index { const node = try p.parsePrefixExpr(); if (node == 0) { - return p.fail(.{ .ExpectedPrefixExpr = .{ .token = p.tok_i } }); + return p.fail(.expected_prefix_expr); } return node; } @@ -1827,7 +1853,7 @@ const Parser = struct { fn expectTypeExpr(p: *Parser) Error!Node.Index { const node = try p.parseTypeExpr(); if (node == 0) { - return p.fail(.{ .ExpectedTypeExpr = .{ .token = p.tok_i } }); + return p.fail(.expected_type_expr); } return node; } @@ -1922,9 +1948,7 @@ const Parser = struct { switch (p.token_tags[p.tok_i]) { .keyword_for => return p.parseForExpr(), .keyword_while => return p.parseWhileExpr(), - else => return p.fail(.{ - .ExpectedInlinable = .{ .token = p.tok_i }, - }), + else => return p.fail(.expected_inlinable), } }, .keyword_for => { @@ -1950,9 +1974,7 @@ const Parser = struct { switch (p.token_tags[p.tok_i]) { .keyword_for => return p.parseForExpr(), .keyword_while => return p.parseWhileExpr(), - else => return p.fail(.{ - .ExpectedInlinable = .{ .token = p.tok_i }, - }), + else => return p.fail(.expected_inlinable), } }, .keyword_for => return p.parseForExpr(), @@ -2170,20 +2192,13 @@ const Parser = struct { .r_brace => break, .colon, .r_paren, .r_bracket => { p.tok_i -= 1; - return p.fail(.{ - .ExpectedToken = .{ - .token = p.tok_i, - .expected_id = .r_brace, - }, - }); + return p.failExpected(.r_brace); }, else => { // This is likely just a missing comma; // give an error but continue parsing this list. p.tok_i -= 1; - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, - }); + try p.warnExpected(.comma); }, } } @@ -2214,9 +2229,7 @@ const Parser = struct { }); } if (comma_one == null) { - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, - }); + try p.warnExpected(.comma); } var init_list = std.ArrayList(Node.Index).init(p.gpa); @@ -2278,7 +2291,7 @@ const Parser = struct { res = node; } const lparen = (try p.expectTokenRecoverable(.l_paren)) orelse { - try p.warn(.{ .ExpectedParamList = .{ .token = p.tok_i } }); + try p.warn(.expected_param_list); return res; }; if (p.eatToken(.r_paren)) |_| { @@ -2304,9 +2317,7 @@ const Parser = struct { }); } if (comma_one == null) { - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, - }); + try p.warnExpected(.comma); } var param_list = std.ArrayList(Node.Index).init(p.gpa); @@ -2352,21 +2363,11 @@ const Parser = struct { }, .colon, .r_brace, .r_bracket => { p.tok_i -= 1; - return p.fail(.{ - .ExpectedToken = .{ - .token = p.tok_i, - .expected_id = .r_paren, - }, - }); + return p.failExpected(.r_paren); }, else => { p.tok_i -= 1; - try p.warn(.{ - .ExpectedToken = .{ - .token = p.tok_i, - .expected_id = .comma, - }, - }); + try p.warnExpected(.comma); }, } } @@ -2405,9 +2406,7 @@ const Parser = struct { }); } if (comma_one == null) { - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, - }); + try p.warnExpected(.comma); } var param_list = std.ArrayList(Node.Index).init(p.gpa); @@ -2453,21 +2452,11 @@ const Parser = struct { }, .colon, .r_brace, .r_bracket => { p.tok_i -= 1; - return p.fail(.{ - .ExpectedToken = .{ - .token = p.tok_i, - .expected_id = .r_paren, - }, - }); + return p.failExpected(.r_paren); }, else => { p.tok_i -= 1; - try p.warn(.{ - .ExpectedToken = .{ - .token = p.tok_i, - .expected_id = .comma, - }, - }); + try p.warnExpected(.comma); }, } } @@ -2645,9 +2634,7 @@ const Parser = struct { switch (p.token_tags[p.tok_i]) { .keyword_for => return p.parseForTypeExpr(), .keyword_while => return p.parseWhileTypeExpr(), - else => return p.fail(.{ - .ExpectedInlinable = .{ .token = p.tok_i }, - }), + else => return p.fail(.expected_inlinable), } }, .keyword_for => { @@ -2716,9 +2703,7 @@ const Parser = struct { }); } if (comma_one == null) { - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, - }); + try p.warnExpected(.comma); } const field_init_two = try p.expectFieldInit(); const comma_two = p.eatToken(.comma); @@ -2733,9 +2718,7 @@ const Parser = struct { }); } if (comma_two == null) { - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, - }); + try p.warnExpected(.comma); } var init_list = std.ArrayList(Node.Index).init(p.gpa); defer init_list.deinit(); @@ -2754,21 +2737,11 @@ const Parser = struct { .r_brace => break, .colon, .r_paren, .r_bracket => { p.tok_i -= 1; - return p.fail(.{ - .ExpectedToken = .{ - .token = p.tok_i, - .expected_id = .r_brace, - }, - }); + return p.failExpected(.r_brace); }, else => { p.tok_i -= 1; - try p.warn(.{ - .ExpectedToken = .{ - .token = p.tok_i, - .expected_id = .comma, - }, - }); + try p.warnExpected(.comma); }, } } @@ -2797,9 +2770,7 @@ const Parser = struct { }); } if (comma_one == null) { - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, - }); + try p.warnExpected(.comma); } const elem_init_two = try p.expectExpr(); const comma_two = p.eatToken(.comma); @@ -2814,9 +2785,7 @@ const Parser = struct { }); } if (comma_two == null) { - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, - }); + try p.warnExpected(.comma); } var init_list = std.ArrayList(Node.Index).init(p.gpa); defer init_list.deinit(); @@ -2835,21 +2804,11 @@ const Parser = struct { .r_brace => break, .colon, .r_paren, .r_bracket => { p.tok_i -= 1; - return p.fail(.{ - .ExpectedToken = .{ - .token = p.tok_i, - .expected_id = .r_brace, - }, - }); + return p.failExpected(.r_brace); }, else => { p.tok_i -= 1; - try p.warn(.{ - .ExpectedToken = .{ - .token = p.tok_i, - .expected_id = .comma, - }, - }); + try p.warnExpected(.comma); }, } } @@ -2892,20 +2851,13 @@ const Parser = struct { .r_brace => break, .colon, .r_paren, .r_bracket => { p.tok_i -= 1; - return p.fail(.{ - .ExpectedToken = .{ - .token = p.tok_i, - .expected_id = .r_brace, - }, - }); + return p.failExpected(.r_brace); }, else => { // This is likely just a missing comma; // give an error but continue parsing this list. p.tok_i -= 1; - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, - }); + try p.warnExpected(.comma); }, } } @@ -2942,7 +2894,7 @@ const Parser = struct { fn expectPrimaryTypeExpr(p: *Parser) !Node.Index { const node = try p.parsePrimaryTypeExpr(); if (node == 0) { - return p.fail(.{ .ExpectedPrimaryTypeExpr = .{ .token = p.tok_i } }); + return p.fail(.expected_primary_type_expr); } return node; } @@ -3095,9 +3047,7 @@ const Parser = struct { else => { // This is likely just a missing comma; // give an error but continue parsing this list. - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, - }); + try p.warnExpected(.comma); }, } } @@ -3112,9 +3062,7 @@ const Parser = struct { else => { // This is likely just a missing comma; // give an error but continue parsing this list. - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, - }); + try p.warnExpected(.comma); }, } } @@ -3126,9 +3074,7 @@ const Parser = struct { else => { // This is likely just a missing comma; // give an error but continue parsing this list. - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, - }); + try p.warnExpected(.comma); }, } } @@ -3238,7 +3184,7 @@ const Parser = struct { _ = p.eatToken(.colon) orelse return null_node; _ = try p.expectToken(.l_paren); const node = try p.parseAssignExpr(); - if (node == 0) return p.fail(.{ .ExpectedExprOrAssignment = .{ .token = p.tok_i } }); + if (node == 0) return p.fail(.expected_expr_or_assignment); _ = try p.expectToken(.r_paren); return node; } @@ -3418,9 +3364,7 @@ const Parser = struct { switch (p.token_tags[p.tok_i]) { .keyword_align => { if (result.align_node != 0) { - try p.warn(.{ - .ExtraAlignQualifier = .{ .token = p.tok_i }, - }); + try p.warn(.extra_align_qualifier); } p.tok_i += 1; _ = try p.expectToken(.l_paren); @@ -3436,27 +3380,21 @@ const Parser = struct { }, .keyword_const => { if (saw_const) { - try p.warn(.{ - .ExtraConstQualifier = .{ .token = p.tok_i }, - }); + try p.warn(.extra_const_qualifier); } p.tok_i += 1; saw_const = true; }, .keyword_volatile => { if (saw_volatile) { - try p.warn(.{ - .ExtraVolatileQualifier = .{ .token = p.tok_i }, - }); + try p.warn(.extra_volatile_qualifier); } p.tok_i += 1; saw_volatile = true; }, .keyword_allowzero => { if (saw_allowzero) { - try p.warn(.{ - .ExtraAllowZeroQualifier = .{ .token = p.tok_i }, - }); + try p.warn(.extra_allowzero_qualifier); } p.tok_i += 1; saw_allowzero = true; @@ -3539,11 +3477,10 @@ const Parser = struct { }, }), .invalid_periodasterisks => { - const period_asterisk = p.nextToken(); - try p.warn(.{ .AsteriskAfterPointerDereference = .{ .token = period_asterisk } }); + try p.warn(.asterisk_after_ptr_deref); return p.addNode(.{ .tag = .deref, - .main_token = period_asterisk, + .main_token = p.nextToken(), .data = .{ .lhs = lhs, .rhs = undefined, @@ -3569,7 +3506,7 @@ const Parser = struct { }), else => { p.tok_i += 1; - try p.warn(.{ .ExpectedSuffixOp = .{ .token = p.tok_i } }); + try p.warn(.expected_suffix_op); return null_node; }, }, @@ -3743,9 +3680,7 @@ const Parser = struct { // This is likely just a missing comma; // give an error but continue parsing this list. p.tok_i -= 1; - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, - }); + try p.warnExpected(.comma); }, } } else unreachable; @@ -3763,17 +3698,13 @@ const Parser = struct { .r_paren => return SmallSpan{ .zero_or_one = param_one }, .colon, .r_brace, .r_bracket => { p.tok_i -= 1; - return p.fail(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .r_paren }, - }); + return p.failExpected(.r_paren); }, else => { // This is likely just a missing comma; // give an error but continue parsing this list. p.tok_i -= 1; - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, - }); + try p.warnExpected(.comma); }, } } else unreachable; @@ -3799,17 +3730,13 @@ const Parser = struct { .r_paren => return SmallSpan{ .multi = list.toOwnedSlice() }, .colon, .r_brace, .r_bracket => { p.tok_i -= 1; - return p.fail(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .r_paren }, - }); + return p.failExpected(.r_paren); }, else => { // This is likely just a missing comma; // give an error but continue parsing this list. p.tok_i -= 1; - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, - }); + try p.warnExpected(.comma); }, } } @@ -3836,9 +3763,7 @@ const Parser = struct { else => { // This is likely just a missing comma; // give an error but continue parsing this list. - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, - }); + try p.warnExpected(.comma); }, } } @@ -3852,9 +3777,7 @@ const Parser = struct { fn parseBuiltinCall(p: *Parser) !Node.Index { const builtin_token = p.assertToken(.builtin); _ = (try p.expectTokenRecoverable(.l_paren)) orelse { - try p.warn(.{ - .ExpectedParamList = .{ .token = p.tok_i }, - }); + try p.warn(.expected_param_list); // Pretend this was an identifier so we can continue parsing. return p.addNode(.{ .tag = .identifier, @@ -3901,9 +3824,7 @@ const Parser = struct { // This is likely just a missing comma; // give an error but continue parsing this list. p.tok_i -= 1; - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, - }); + try p.warnExpected(.comma); }, } const param_two = try p.expectExpr(); @@ -3932,9 +3853,7 @@ const Parser = struct { // This is likely just a missing comma; // give an error but continue parsing this list. p.tok_i -= 1; - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, - }); + try p.warnExpected(.comma); }, } @@ -3976,9 +3895,7 @@ const Parser = struct { // This is likely just a missing comma; // give an error but continue parsing this list. p.tok_i -= 1; - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = .comma }, - }); + try p.warnExpected(.comma); }, } } @@ -4019,7 +3936,7 @@ const Parser = struct { fn expectStringLiteral(p: *Parser) !Node.Index { const node = try p.parseStringLiteral(); if (node == 0) { - return p.fail(.{ .ExpectedStringLiteral = .{ .token = p.tok_i } }); + return p.fail(.expected_string_literal); } return node; } @@ -4044,7 +3961,7 @@ const Parser = struct { const then_payload = try p.parsePtrPayload(); const then_expr = try bodyParseFn(p); - if (then_expr == 0) return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); + if (then_expr == 0) return p.fail(.invalid_token); const else_token = p.eatToken(.keyword_else) orelse return p.addNode(.{ .tag = .if_simple, @@ -4056,7 +3973,7 @@ const Parser = struct { }); const else_payload = try p.parsePayload(); const else_expr = try bodyParseFn(p); - if (else_expr == 0) return p.fail(.{ .InvalidToken = .{ .token = p.tok_i } }); + if (else_expr == 0) return p.fail(.invalid_token); return p.addNode(.{ .tag = .@"if", @@ -4076,7 +3993,10 @@ const Parser = struct { if (p.eatToken(.doc_comment)) |tok| { var first_line = tok; if (tok > 0 and tokensOnSameLine(p, tok - 1, tok)) { - try p.warn(.{ .SameLineDocComment = .{ .token = tok } }); + try p.warnMsg(.{ + .tag = .same_line_doc_comment, + .token = tok, + }); first_line = p.eatToken(.doc_comment) orelse return null; } while (p.eatToken(.doc_comment)) |_| {} @@ -4102,16 +4022,18 @@ const Parser = struct { fn expectToken(p: *Parser, tag: Token.Tag) Error!TokenIndex { const token = p.nextToken(); if (p.token_tags[token] != tag) { - return p.fail(.{ .ExpectedToken = .{ .token = token, .expected_id = tag } }); + return p.failMsg(.{ + .tag = .expected_token, + .token = token, + .extra = .{ .expected_tag = tag }, + }); } return token; } fn expectTokenRecoverable(p: *Parser, tag: Token.Tag) !?TokenIndex { if (p.token_tags[p.tok_i] != tag) { - try p.warn(.{ - .ExpectedToken = .{ .token = p.tok_i, .expected_id = tag }, - }); + try p.warnExpected(tag); return null; } else { return p.nextToken(); diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index f8e992bb2e..dc653047df 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -127,7 +127,7 @@ test "zig fmt: decl between fields" { \\ b: usize, \\}; , &[_]Error{ - .DeclBetweenFields, + .decl_between_fields, }); } @@ -135,7 +135,7 @@ test "zig fmt: eof after missing comma" { try testError( \\foo() , &[_]Error{ - .ExpectedToken, + .expected_token, }); } @@ -3578,7 +3578,7 @@ test "zig fmt: file ends with struct field" { // \\const container = extern {}; // \\ // , &[_]Error{ -// .ExpectedExpr, +// .expected_expr, // .ExpectedVarDeclOrFn, // }); //} @@ -3598,12 +3598,12 @@ test "zig fmt: same line doc comment returns error" { \\/// comment \\ , &[_]Error{ - .SameLineDocComment, - .SameLineDocComment, - .UnattachedDocComment, - .SameLineDocComment, - .SameLineDocComment, - .UnattachedDocComment, + .same_line_doc_comment, + .same_line_doc_comment, + .unattached_doc_comment, + .same_line_doc_comment, + .same_line_doc_comment, + .unattached_doc_comment, }); } @@ -3678,10 +3678,10 @@ test "zig fmt: hexadeciaml float literals with underscore separators" { } test "zig fmt: C var args" { - try testCanonical( - \\pub extern "c" fn printf(format: [*:0]const u8, ...) c_int; - \\ - ); + try testCanonical( + \\pub extern "c" fn printf(format: [*:0]const u8, ...) c_int; + \\ + ); } //test "zig fmt: Only indent multiline string literals in function calls" { @@ -4037,8 +4037,8 @@ test "recovery: top level" { \\test "" {inline} \\test "" {inline} , &[_]Error{ - .ExpectedInlinable, - .ExpectedInlinable, + .expected_inlinable, + .expected_inlinable, }); } @@ -4049,8 +4049,8 @@ test "recovery: block statements" { \\ inline; \\} , &[_]Error{ - .InvalidToken, - .ExpectedInlinable, + .invalid_token, + .expected_inlinable, }); } @@ -4066,10 +4066,10 @@ test "recovery: block statements" { // \\ } // \\} // , &[_]Error{ -// .ExpectedToken, -// .ExpectedToken, -// .InvalidAnd, -// .InvalidToken, +// .expected_token, +// .expected_token, +// .invalid_and, +// .invalid_token, // }); //} @@ -4078,8 +4078,8 @@ test "recovery: extra qualifier" { \\const a: *const const u8; \\test "" , &[_]Error{ - .ExtraConstQualifier, - .ExpectedLBrace, + .extra_const_qualifier, + .expected_block, }); } @@ -4091,8 +4091,8 @@ test "recovery: extra qualifier" { // \\test "" // , &[_]Error{ // .ExpectedReturnType, -// .InvalidAnd, -// .ExpectedLBrace, +// .invalid_and, +// .expected_block, // }); //} @@ -4105,10 +4105,10 @@ test "recovery: extra qualifier" { // \\ async a && b; // \\} // , &[_]Error{ -// .ExpectedToken, +// .expected_token, // .ExpectedPubItem, // .ExpectedParamList, -// .InvalidAnd, +// .invalid_and, // }); // try testError( // \\threadlocal test "" { @@ -4117,7 +4117,7 @@ test "recovery: extra qualifier" { // , &[_]Error{ // .ExpectedVarDecl, // .ExpectedParamList, -// .InvalidAnd, +// .invalid_and, // }); //} @@ -4126,13 +4126,13 @@ test "recovery: extra qualifier" { // \\inline test "" { a && b; } // , &[_]Error{ // .ExpectedFn, -// .InvalidAnd, +// .invalid_and, // }); // try testError( // \\extern "" test "" { a && b; } // , &[_]Error{ // .ExpectedVarDeclOrFn, -// .InvalidAnd, +// .invalid_and, // }); //} @@ -4144,12 +4144,12 @@ test "recovery: extra qualifier" { // \\ @foo // \\} // , &[_]Error{ -// .InvalidAnd, -// .ExpectedToken, -// .InvalidAnd, -// .ExpectedToken, +// .invalid_and, +// .expected_token, +// .invalid_and, +// .expected_token, // .ExpectedParamList, -// .ExpectedToken, +// .expected_token, // }); //} @@ -4163,12 +4163,12 @@ test "recovery: extra qualifier" { // \\ a && b // \\} // , &[_]Error{ -// .ExpectedExpr, -// .ExpectedToken, -// .ExpectedToken, -// .ExpectedContainerMembers, -// .InvalidAnd, -// .ExpectedToken, +// .expected_expr, +// .expected_token, +// .expected_token, +// .expected_container_members, +// .invalid_and, +// .expected_token, // }); //} @@ -4178,7 +4178,7 @@ test "recovery: extra qualifier" { // \\ a(comptime T: type) // \\} // , &[_]Error{ -// .ExpectedToken, +// .expected_token, // }); //} @@ -4189,10 +4189,10 @@ test "recovery: extra qualifier" { // \\ a && b; // \\} // , &[_]Error{ -// .ExpectedContainerMembers, -// .ExpectedContainerMembers, -// .ExpectedContainerMembers, -// .InvalidAnd, +// .expected_container_members, +// .expected_container_members, +// .expected_container_members, +// .invalid_and, // }); //} // @@ -4202,7 +4202,7 @@ test "recovery: mismatched bracket at top level" { \\ arr: 128]?G \\}; , &[_]Error{ - .ExpectedToken, + .expected_token, }); } @@ -4212,9 +4212,9 @@ test "recovery: mismatched bracket at top level" { // \\ error && foo; // \\} // , &[_]Error{ -// .ExpectedToken, +// .expected_token, // .ExpectedIdentifier, -// .InvalidAnd, +// .invalid_and, // }); //} @@ -4224,15 +4224,15 @@ test "recovery: mismatched bracket at top level" { // \\ var sequence = "repeat".*** 10; // \\} // , &[_]Error{ -// .AsteriskAfterPointerDereference, +// .asterisk_after_ptr_deref, // }); // try testError( // \\test "" { // \\ var sequence = "repeat".** 10&&a; // \\} // , &[_]Error{ -// .AsteriskAfterPointerDereference, -// .InvalidAnd, +// .asterisk_after_ptr_deref, +// .invalid_and, // }); //} @@ -4245,10 +4245,10 @@ test "recovery: mismatched bracket at top level" { // \\ a && b; // \\} // , &[_]Error{ -// .ExpectedSemiOrElse, -// .ExpectedSemiOrElse, -// .ExpectedSemiOrElse, -// .InvalidAnd, +// .expected_semi_or_else, +// .expected_semi_or_else, +// .expected_semi_or_else, +// .invalid_and, // }); //} @@ -4256,7 +4256,7 @@ test "recovery: invalid comptime" { try testError( \\comptime , &[_]Error{ - .ExpectedBlockOrField, + .expected_block_or_field, }); } @@ -4264,12 +4264,12 @@ test "recovery: missing block after for/while loops" { try testError( \\test "" { while (foo) } , &[_]Error{ - .ExpectedBlockOrAssignment, + .expected_block_or_assignment, }); try testError( \\test "" { for (foo) |bar| } , &[_]Error{ - .ExpectedBlockOrAssignment, + .expected_block_or_assignment, }); } @@ -4288,9 +4288,8 @@ fn testParse(source: []const u8, allocator: *mem.Allocator, anything_changed: *b defer tree.deinit(allocator); for (tree.errors) |parse_error| { - const error_token = tree.errorToken(parse_error); - const token_start = tree.tokens.items(.start)[error_token]; - const loc = tree.tokenLocation(0, error_token); + const token_start = tree.tokens.items(.start)[parse_error.token]; + const loc = tree.tokenLocation(0, parse_error.token); try stderr.print("(memory buffer):{d}:{d}: error: ", .{ loc.line + 1, loc.column + 1 }); try tree.renderError(parse_error, stderr); try stderr.print("\n{s}\n", .{source[loc.line_start..loc.line_end]}); @@ -4362,7 +4361,7 @@ fn testCanonical(source: []const u8) !void { return testTransform(source, source); } -const Error = std.meta.Tag(std.zig.ast.Error); +const Error = std.zig.ast.Error.Tag; fn testError(source: []const u8, expected_errors: []const Error) !void { var tree = try std.zig.parse(std.testing.allocator, source); @@ -4370,6 +4369,6 @@ fn testError(source: []const u8, expected_errors: []const Error) !void { std.testing.expect(tree.errors.len == expected_errors.len); for (expected_errors) |expected, i| { - std.testing.expectEqual(expected, tree.errors[i]); + std.testing.expectEqual(expected, tree.errors[i].tag); } } diff --git a/src/Module.zig b/src/Module.zig index 35819c5d44..8f2ac3721e 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -1723,7 +1723,7 @@ pub fn getAstTree(self: *Module, root_scope: *Scope.File) !*const ast.Tree { err_msg.* = .{ .src_loc = .{ .file_scope = root_scope, - .byte_offset = tree.tokens.items(.start)[parse_err.loc()], + .byte_offset = tree.tokens.items(.start)[parse_err.token], }, .msg = msg.toOwnedSlice(), }; diff --git a/src/main.zig b/src/main.zig index c0ac41d9bf..09d791cfb5 100644 --- a/src/main.zig +++ b/src/main.zig @@ -2898,7 +2898,7 @@ fn printErrMsgToFile( .on => true, .off => false, }; - const lok_token = parse_error.loc(); + const lok_token = parse_error.token; const token_starts = tree.tokens.items(.start); const token_tags = tree.tokens.items(.tag); From 6959b177ef27f88d6fc9ef1f0a6301f58d36d730 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Fri, 19 Feb 2021 20:26:25 -0700 Subject: [PATCH 111/173] stage2: test harness: panic on OOM rather than assert also add a prefix to test names for C backend --- src/test.zig | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/src/test.zig b/src/test.zig index 07eb001e14..a28787e952 100644 --- a/src/test.zig +++ b/src/test.zig @@ -155,7 +155,7 @@ pub const TestContext = struct { self.updates.append(.{ .src = src, .case = .{ .Header = result }, - }) catch unreachable; + }) catch @panic("out of memory"); } /// Adds a subcase in which the module is updated with `src`, compiled, @@ -164,7 +164,7 @@ pub const TestContext = struct { self.updates.append(.{ .src = src, .case = .{ .Execution = result }, - }) catch unreachable; + }) catch @panic("out of memory"); } /// Adds a subcase in which the module is updated with `src`, compiled, @@ -173,7 +173,7 @@ pub const TestContext = struct { self.updates.append(.{ .src = src, .case = .{ .CompareObjectFile = result }, - }) catch unreachable; + }) catch @panic("out of memory"); } /// Adds a subcase in which the module is updated with `src`, which @@ -181,7 +181,7 @@ pub const TestContext = struct { /// for the expected reasons, given in sequential order in `errors` in /// the form `:line:column: error: message`. pub fn addError(self: *Case, src: [:0]const u8, errors: []const []const u8) void { - var array = self.updates.allocator.alloc(ErrorMsg, errors.len) catch unreachable; + var array = self.updates.allocator.alloc(ErrorMsg, errors.len) catch @panic("out of memory"); for (errors) |err_msg_line, i| { if (std.mem.startsWith(u8, err_msg_line, "error: ")) { array[i] = .{ @@ -224,7 +224,7 @@ pub const TestContext = struct { }, }; } - self.updates.append(.{ .src = src, .case = .{ .Error = array } }) catch unreachable; + self.updates.append(.{ .src = src, .case = .{ .Error = array } }) catch @panic("out of memory"); } /// Adds a subcase in which the module is updated with `src`, and @@ -247,7 +247,7 @@ pub const TestContext = struct { .output_mode = .Exe, .extension = extension, .files = std.ArrayList(File).init(ctx.cases.allocator), - }) catch unreachable; + }) catch @panic("out of memory"); return &ctx.cases.items[ctx.cases.items.len - 1]; } @@ -262,15 +262,17 @@ pub const TestContext = struct { } pub fn exeFromCompiledC(ctx: *TestContext, name: []const u8, target: CrossTarget) *Case { + const prefixed_name = std.fmt.allocPrint(ctx.cases.allocator, "CBE: {s}", .{name}) catch + @panic("out of memory"); ctx.cases.append(Case{ - .name = name, + .name = prefixed_name, .target = target, .updates = std.ArrayList(Update).init(ctx.cases.allocator), .output_mode = .Exe, .extension = .Zig, .object_format = .c, .files = std.ArrayList(File).init(ctx.cases.allocator), - }) catch unreachable; + }) catch @panic("out of memory"); return &ctx.cases.items[ctx.cases.items.len - 1]; } @@ -285,7 +287,7 @@ pub const TestContext = struct { .extension = .Zig, .files = std.ArrayList(File).init(ctx.cases.allocator), .llvm_backend = true, - }) catch unreachable; + }) catch @panic("out of memory"); return &ctx.cases.items[ctx.cases.items.len - 1]; } @@ -302,7 +304,7 @@ pub const TestContext = struct { .output_mode = .Obj, .extension = extension, .files = std.ArrayList(File).init(ctx.cases.allocator), - }) catch unreachable; + }) catch @panic("out of memory"); return &ctx.cases.items[ctx.cases.items.len - 1]; } @@ -326,7 +328,7 @@ pub const TestContext = struct { .extension = ext, .object_format = .c, .files = std.ArrayList(File).init(ctx.cases.allocator), - }) catch unreachable; + }) catch @panic("out of memory"); return &ctx.cases.items[ctx.cases.items.len - 1]; } From 70761d7c52cd3634e086b0914c9520ef4dc01eee Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Fri, 19 Feb 2021 20:27:06 -0700 Subject: [PATCH 112/173] stage2: remove incorrect newlines from log statements --- src/Compilation.zig | 4 ++-- src/Module.zig | 12 ++++++------ 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/Compilation.zig b/src/Compilation.zig index b3ee73f03f..227355de93 100644 --- a/src/Compilation.zig +++ b/src/Compilation.zig @@ -1935,7 +1935,7 @@ pub fn cImport(comp: *Compilation, c_src: []const u8) !CImportResult { "o", &digest, cimport_zig_basename, }); if (comp.verbose_cimport) { - log.info("C import output: {s}\n", .{out_zig_path}); + log.info("C import output: {s}", .{out_zig_path}); } return CImportResult{ .out_zig_path = out_zig_path, @@ -2999,7 +2999,7 @@ pub fn updateSubCompilation(sub_compilation: *Compilation) !void { for (errors.list) |full_err_msg| { switch (full_err_msg) { .src => |src| { - log.err("{s}:{d}:{d}: {s}\n", .{ + log.err("{s}:{d}:{d}: {s}", .{ src.src_path, src.line + 1, src.column + 1, diff --git a/src/Module.zig b/src/Module.zig index 8f2ac3721e..9a918321c4 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -924,7 +924,7 @@ pub fn ensureDeclAnalyzed(mod: *Module, decl: *Decl) InnerError!void { .complete => return, .outdated => blk: { - log.debug("re-analyzing {s}\n", .{decl.name}); + log.debug("re-analyzing {s}", .{decl.name}); // The exports this Decl performs will be re-discovered, so we remove them here // prior to re-analysis. @@ -1943,7 +1943,7 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void { // Handle explicitly deleted decls from the source code. Not to be confused // with when we delete decls because they are no longer referenced. for (deleted_decls.items()) |entry| { - log.debug("noticed '{s}' deleted from source\n", .{entry.key.name}); + log.debug("noticed '{s}' deleted from source", .{entry.key.name}); try mod.deleteDecl(entry.key); } } @@ -2087,7 +2087,7 @@ pub fn deleteDecl(self: *Module, decl: *Decl) !void { // not be present in the set, and this does nothing. decl.container.removeDecl(decl); - log.debug("deleting decl '{s}'\n", .{decl.name}); + log.debug("deleting decl '{s}'", .{decl.name}); const name_hash = decl.fullyQualifiedNameHash(); self.decl_table.removeAssertDiscard(name_hash); // Remove itself from its dependencies, because we are about to destroy the decl pointer. @@ -2189,18 +2189,18 @@ pub fn analyzeFnBody(self: *Module, decl: *Decl, func: *Fn) !void { defer inner_block.instructions.deinit(self.gpa); func.state = .in_progress; - log.debug("set {s} to in_progress\n", .{decl.name}); + log.debug("set {s} to in_progress", .{decl.name}); try zir_sema.analyzeBody(self, &inner_block, func.zir); const instructions = try arena.allocator.dupe(*Inst, inner_block.instructions.items); func.state = .success; func.body = .{ .instructions = instructions }; - log.debug("set {s} to success\n", .{decl.name}); + log.debug("set {s} to success", .{decl.name}); } fn markOutdatedDecl(self: *Module, decl: *Decl) !void { - log.debug("mark {s} outdated\n", .{decl.name}); + log.debug("mark {s} outdated", .{decl.name}); try self.comp.work_queue.writeItem(.{ .analyze_decl = decl }); if (self.failed_decls.swapRemove(decl)) |entry| { entry.value.destroy(self.gpa); From d8560edc2941016a12a03df36035cabd537dde77 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Fri, 19 Feb 2021 20:28:47 -0700 Subject: [PATCH 113/173] stage2: fix incorrect ast.Tree.getNodeSource impl --- lib/std/zig/ast.zig | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index fda2916af4..943ddd30db 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -1071,8 +1071,8 @@ pub const Tree = struct { const first_token = tree.firstToken(node); const last_token = tree.lastToken(node); const start = token_starts[first_token]; - const len = tree.tokenSlice(last_token).len; - return tree.source[start..][0..len]; + const end = token_starts[last_token] + tree.tokenSlice(last_token).len; + return tree.source[start..end]; } pub fn globalVarDecl(tree: Tree, node: Node.Index) full.VarDecl { From 5b597a16c6c4ac36a8d2004d5eeed62c38c75253 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Fri, 19 Feb 2021 20:57:06 -0700 Subject: [PATCH 114/173] stage2: fix not setting up ZIR arg instruction correctly This is a regression from when I briefly flirted with changing how arg ZIR instructions work in this branch, and then failed to revert it correctly. --- src/Module.zig | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/Module.zig b/src/Module.zig index 9a918321c4..7af4648c79 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -1340,13 +1340,15 @@ fn astgenAndSemaFn( const name_token = param.name_token.?; const src = token_starts[name_token]; const param_name = try mod.identifierTokenString(&gen_scope.base, name_token); - const arg = try decl_arena.allocator.create(zir.Inst.NoOp); + const arg = try decl_arena.allocator.create(zir.Inst.Arg); arg.* = .{ .base = .{ .tag = .arg, .src = src, }, - .positionals = .{}, + .positionals = .{ + .name = param_name, + }, .kw_args = .{}, }; gen_scope.instructions.items[i] = &arg.base; @@ -3929,7 +3931,9 @@ pub fn validateVarType(mod: *Module, scope: *Scope, src: usize, ty: Type) !void /// Identifier token -> String (allocated in scope.arena()) pub fn identifierTokenString(mod: *Module, scope: *Scope, token: ast.TokenIndex) InnerError![]const u8 { const tree = scope.tree(); + const token_tags = tree.tokens.items(.tag); const token_starts = tree.tokens.items(.start); + assert(token_tags[token] == .identifier); const ident_name = tree.tokenSlice(token); if (mem.startsWith(u8, ident_name, "@")) { From ed1e5cb3f62e12f3939e6f20d387a5c205a44a3d Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Fri, 19 Feb 2021 21:47:11 -0700 Subject: [PATCH 115/173] stage2: fix a couple off by one errors All stage2 tests are passing again in this branch. Remaining checklist for this branch: * get the rest of the zig fmt test cases passing - re-enable the translate-c test case that is blocking on this * implement the 2 `@panic(TODO)`'s in parse.zig * use fn_proto not fn_decl for extern function declarations --- lib/std/zig/ast.zig | 2 ++ src/astgen.zig | 4 ++-- test/stage2/test.zig | 12 ++++++++---- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 943ddd30db..0b0445d39a 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -2566,8 +2566,10 @@ pub const Node = struct { /// before the final rbrace. struct_init_comma, /// `lhs(rhs)`. rhs can be omitted. + /// main_token is the lparen. call_one, /// `lhs(rhs,)`. rhs can be omitted. + /// main_token is the lparen. call_one_comma, /// `async lhs(rhs)`. rhs can be omitted. async_call_one, diff --git a/src/astgen.zig b/src/astgen.zig index fc4b2f6b23..583e748035 100644 --- a/src/astgen.zig +++ b/src/astgen.zig @@ -901,7 +901,7 @@ fn labeledBlockExpr( const token_tags = tree.tokens.items(.tag); const lbrace = main_tokens[block_node]; - const label_token = lbrace - 1; + const label_token = lbrace - 2; assert(token_tags[label_token] == .identifier); const src = token_starts[lbrace]; @@ -3072,7 +3072,7 @@ fn multilineStringLiteral( // Count the number of bytes to allocate. const len: usize = len: { var tok_i = start; - var len: usize = 0; + var len: usize = end - start + 1; while (tok_i <= end) : (tok_i += 1) { // 2 for the '//' + 1 for '\n' len += tree.tokenSlice(tok_i).len - 3; diff --git a/test/stage2/test.zig b/test/stage2/test.zig index 486edeb864..54a8f6ae1a 100644 --- a/test/stage2/test.zig +++ b/test/stage2/test.zig @@ -1088,7 +1088,7 @@ pub fn addCases(ctx: *TestContext) !void { \\ _ = foo; \\} \\extern var foo; - , &[_][]const u8{":4:1: error: unable to infer variable type"}); + , &[_][]const u8{":4:8: error: unable to infer variable type"}); } { @@ -1194,12 +1194,12 @@ pub fn addCases(ctx: *TestContext) !void { \\comptime { \\ foo: while (true) {} \\} - , &[_][]const u8{":2:5: error: unused while label"}); + , &[_][]const u8{":2:5: error: unused while loop label"}); case.addError( \\comptime { \\ foo: for ("foo") |_| {} \\} - , &[_][]const u8{":2:5: error: unused for label"}); + , &[_][]const u8{":2:5: error: unused for loop label"}); case.addError( \\comptime { \\ blk: {blk: {}} @@ -1294,6 +1294,10 @@ pub fn addCases(ctx: *TestContext) !void { , "", ); + // TODO this should be :8:21 not :8:19. we need to improve source locations + // to be relative to the containing Decl so that they can survive when the byte + // offset of a previous Decl changes. Here the change from 7 to 999 introduces + // +2 to the byte offset and makes the error location wrong by 2 bytes. case.addError( \\export fn _start() noreturn { \\ const y = fibonacci(999); @@ -1314,7 +1318,7 @@ pub fn addCases(ctx: *TestContext) !void { \\ ); \\ unreachable; \\} - , &[_][]const u8{":8:10: error: evaluation exceeded 1000 backwards branches"}); + , &[_][]const u8{":8:19: error: evaluation exceeded 1000 backwards branches"}); } { var case = ctx.exe("orelse at comptime", linux_x64); From 669c2054a83cac594e5bbca4a7ca677e3b3a1a0d Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Fri, 19 Feb 2021 22:55:42 -0700 Subject: [PATCH 116/173] stage2: debug line nops supports bigger padding --- src/link/Elf.zig | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/link/Elf.zig b/src/link/Elf.zig index 1a015ba0c2..e1a6a1dff1 100644 --- a/src/link/Elf.zig +++ b/src/link/Elf.zig @@ -3037,7 +3037,7 @@ const min_nop_size = 2; /// Writes to the file a buffer, prefixed and suffixed by the specified number of /// bytes of NOPs. Asserts each padding size is at least `min_nop_size` and total padding bytes -/// are less than 126,976 bytes (if this limit is ever reached, this function can be +/// are less than 1044480 bytes (if this limit is ever reached, this function can be /// improved to make more than one pwritev call, or the limit can be raised by a fixed /// amount by increasing the length of `vecs`). fn pwriteDbgLineNops( @@ -3052,7 +3052,7 @@ fn pwriteDbgLineNops( const page_of_nops = [1]u8{DW.LNS_negate_stmt} ** 4096; const three_byte_nop = [3]u8{ DW.LNS_advance_pc, 0b1000_0000, 0 }; - var vecs: [32]std.os.iovec_const = undefined; + var vecs: [256]std.os.iovec_const = undefined; var vec_index: usize = 0; { var padding_left = prev_padding_size; From 4074e79748ad9ecc39a4127cd1c28c115efff56a Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Sat, 20 Feb 2021 13:32:07 +0200 Subject: [PATCH 117/173] translate-c: use global scope for typedef/record/enum type translation if needed If the type is a reference to a global declaration that has not yet been translated we need to use the global scope for translation so that other functions can also reference it. --- src/clang.zig | 2 -- src/translate_c.zig | 21 ++++++++++++++++++--- test/run_translated_c.zig | 19 +++++++++++++++++++ 3 files changed, 37 insertions(+), 5 deletions(-) diff --git a/src/clang.zig b/src/clang.zig index 5adb858b90..270f477ddb 100644 --- a/src/clang.zig +++ b/src/clang.zig @@ -697,8 +697,6 @@ pub const ReturnStmt = opaque { extern fn ZigClangReturnStmt_getRetValue(*const ReturnStmt) ?*const Expr; }; -pub const SkipFunctionBodiesScope = opaque {}; - pub const SourceManager = opaque { pub const getSpellingLoc = ZigClangSourceManager_getSpellingLoc; extern fn ZigClangSourceManager_getSpellingLoc(*const SourceManager, Loc: SourceLocation) SourceLocation; diff --git a/src/translate_c.zig b/src/translate_c.zig index f29dfccfa3..dc13e5e380 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -3741,7 +3741,12 @@ fn transType(c: *Context, scope: *Scope, ty: *const clang.Type, source_loc: clan const typedef_ty = @ptrCast(*const clang.TypedefType, ty); const typedef_decl = typedef_ty.getDecl(); - try transTypeDef(c, scope, typedef_decl); + var trans_scope = scope; + if (@ptrCast(*const clang.Decl, typedef_decl).castToNamedDecl()) |named_decl| { + const decl_name = try c.str(named_decl.getName_bytes_begin()); + if (c.global_names.get(decl_name)) |_| trans_scope = &c.global_scope.base; + } + try transTypeDef(c, trans_scope, typedef_decl); const name = c.decl_table.get(@ptrToInt(typedef_decl.getCanonicalDecl())).?; return Tag.identifier.create(c.arena, name); }, @@ -3749,7 +3754,12 @@ fn transType(c: *Context, scope: *Scope, ty: *const clang.Type, source_loc: clan const record_ty = @ptrCast(*const clang.RecordType, ty); const record_decl = record_ty.getDecl(); - try transRecordDecl(c, scope, record_decl); + var trans_scope = scope; + if (@ptrCast(*const clang.Decl, record_decl).castToNamedDecl()) |named_decl| { + const decl_name = try c.str(named_decl.getName_bytes_begin()); + if (c.global_names.get(decl_name)) |_| trans_scope = &c.global_scope.base; + } + try transRecordDecl(c, trans_scope, record_decl); const name = c.decl_table.get(@ptrToInt(record_decl.getCanonicalDecl())).?; return Tag.identifier.create(c.arena, name); }, @@ -3757,7 +3767,12 @@ fn transType(c: *Context, scope: *Scope, ty: *const clang.Type, source_loc: clan const enum_ty = @ptrCast(*const clang.EnumType, ty); const enum_decl = enum_ty.getDecl(); - try transEnumDecl(c, scope, enum_decl); + var trans_scope = scope; + if (@ptrCast(*const clang.Decl, enum_decl).castToNamedDecl()) |named_decl| { + const decl_name = try c.str(named_decl.getName_bytes_begin()); + if (c.global_names.get(decl_name)) |_| trans_scope = &c.global_scope.base; + } + try transEnumDecl(c, trans_scope, enum_decl); const name = c.decl_table.get(@ptrToInt(enum_decl.getCanonicalDecl())).?; return Tag.identifier.create(c.arena, name); }, diff --git a/test/run_translated_c.zig b/test/run_translated_c.zig index 0b72ed2926..85b0f19c88 100644 --- a/test/run_translated_c.zig +++ b/test/run_translated_c.zig @@ -3,6 +3,25 @@ const tests = @import("tests.zig"); const nl = std.cstr.line_sep; pub fn addCases(cases: *tests.RunTranslatedCContext) void { + cases.add("use global scope for record/enum/typedef type transalation if needed", + \\void bar(void); + \\void baz(void); + \\struct foo { int x; }; + \\void bar() { + \\ struct foo tmp; + \\} + \\ + \\void baz() { + \\ struct foo tmp; + \\} + \\ + \\int main(void) { + \\ bar(); + \\ baz(); + \\ return 0; + \\} + , ""); + cases.add("failed macros are only declared once", \\#define FOO = \\#define FOO = From 88d0e77b9747152a923e4a4479988924de0fe26f Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Sun, 21 Feb 2021 00:18:20 -0700 Subject: [PATCH 118/173] parse: implement error for invalid bit range and alignment --- lib/std/zig/ast.zig | 8 ++++++++ lib/std/zig/parse.zig | 16 ++++++++-------- lib/std/zig/parser_test.zig | 16 ++++++++++++++++ 3 files changed, 32 insertions(+), 8 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 0b0445d39a..785c58b067 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -262,6 +262,12 @@ pub const Tree = struct { .extra_volatile_qualifier => { return stream.writeAll("extra volatile qualifier"); }, + .invalid_align => { + return stream.writeAll("alignment not allowed on arrays"); + }, + .invalid_bit_range => { + return stream.writeAll("bit range not allowed on slices and arrays"); + }, .invalid_token => { return stream.print("invalid token '{s}'", .{ token_tags[parse_error.token].symbol(), @@ -2323,6 +2329,8 @@ pub const Error = struct { extra_allowzero_qualifier, extra_const_qualifier, extra_volatile_qualifier, + invalid_align, + invalid_bit_range, invalid_token, same_line_doc_comment, unattached_doc_comment, diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index a557b57ad4..c980602b7c 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -1777,10 +1777,10 @@ const Parser = struct { const mods = try p.parsePtrModifiers(); const elem_type = try p.expectTypeExpr(); if (mods.bit_range_start != 0) { - @panic("TODO implement this error"); - //try p.warn(.{ - // .BitRangeInvalid = .{ .node = mods.bit_range_start }, - //}); + try p.warnMsg(.{ + .tag = .invalid_bit_range, + .token = p.nodes.items(.main_token)[mods.bit_range_start], + }); } if (len_expr == 0) { if (sentinel == 0) { @@ -1816,10 +1816,10 @@ const Parser = struct { } } else { if (mods.align_node != 0) { - @panic("TODO implement this error"); - //try p.warn(.{ - // .AlignInvalid = .{ .node = mods.align_node }, - //}); + try p.warnMsg(.{ + .tag = .invalid_align, + .token = p.nodes.items(.main_token)[mods.align_node], + }); } if (sentinel == 0) { return p.addNode(.{ diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index dc653047df..3b58cb43ea 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -4032,6 +4032,22 @@ test "zig fmt: trailing comma should force multiline 1 column" { // ); //} +test "zig fmt: error for invalid bit range" { + try testError( + \\var x: []align(0:0:0)u8 = bar; + , &[_]Error{ + .invalid_bit_range, + }); +} + +test "zig fmt: error for invalid align" { + try testError( + \\var x: [10]align(10)u8 = bar; + , &[_]Error{ + .invalid_align, + }); +} + test "recovery: top level" { try testError( \\test "" {inline} From 15603f403c9ca91f7530798a5a7751cace284a28 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Sun, 21 Feb 2021 16:01:22 -0700 Subject: [PATCH 119/173] AST: use fn_proto not fn_decl for extern decls saves a few bytes per extern function declaration --- lib/std/zig/ast.zig | 13 +++++++++++-- lib/std/zig/parse.zig | 11 ++--------- lib/std/zig/render.zig | 34 +++++++++++++++++++++++++++------- 3 files changed, 40 insertions(+), 18 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 785c58b067..72c2f82d09 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -2642,20 +2642,29 @@ pub const Node = struct { @"return", /// `fn(a: lhs) rhs`. lhs can be omitted. /// anytype and ... parameters are omitted from the AST tree. + /// main_token is the `fn` keyword. + /// extern function declarations use this tag. fn_proto_simple, /// `fn(a: b, c: d) rhs`. `sub_range_list[lhs]`. /// anytype and ... parameters are omitted from the AST tree. + /// main_token is the `fn` keyword. + /// extern function declarations use this tag. fn_proto_multi, /// `fn(a: b) rhs linksection(e) callconv(f)`. `FnProtoOne[lhs]`. /// zero or one parameters. /// anytype and ... parameters are omitted from the AST tree. + /// main_token is the `fn` keyword. + /// extern function declarations use this tag. fn_proto_one, /// `fn(a: b, c: d) rhs linksection(e) callconv(f)`. `FnProto[lhs]`. /// anytype and ... parameters are omitted from the AST tree. + /// main_token is the `fn` keyword. + /// extern function declarations use this tag. fn_proto, /// lhs is the fn_proto. - /// rhs is the function body block if non-zero. - /// if rhs is zero, the function decl has no body (e.g. an extern function) + /// rhs is the function body block. + /// Note that extern function declarations use the fn_proto tags rather + /// than this one. fn_decl, /// `anyframe->rhs`. main_token is `anyframe`. `lhs` is arrow token index. anyframe_type, diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index c980602b7c..fc57084ad8 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -542,15 +542,8 @@ const Parser = struct { if (fn_proto != 0) { switch (p.token_tags[p.tok_i]) { .semicolon => { - const semicolon_token = p.nextToken(); - return p.addNode(.{ - .tag = .fn_decl, - .main_token = p.nodes.items(.main_token)[fn_proto], - .data = .{ - .lhs = fn_proto, - .rhs = 0, - }, - }); + p.tok_i += 1; + return fn_proto; }, .l_brace => { const body_block = try p.parseBlock(); diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 96721f27c0..48746ec391 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -81,19 +81,39 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E while (i < fn_token) : (i += 1) { try renderToken(ais, tree, i, .space); } - if (datas[decl].rhs != 0) { - try renderExpression(ais, tree, fn_proto, .space); - return renderExpression(ais, tree, datas[decl].rhs, space); - } else { - try renderExpression(ais, tree, fn_proto, .none); - return renderToken(ais, tree, tree.lastToken(fn_proto) + 1, space); // semicolon - } + assert(datas[decl].rhs != 0); + try renderExpression(ais, tree, fn_proto, .space); + return renderExpression(ais, tree, datas[decl].rhs, space); }, .fn_proto_simple, .fn_proto_multi, .fn_proto_one, .fn_proto, => { + // Extern function prototypes are parsed as these tags. + // Go back to the first token we should render here. + const fn_token = main_tokens[decl]; + var i = fn_token; + while (i > 0) { + i -= 1; + switch (token_tags[i]) { + .keyword_extern, + .keyword_export, + .keyword_pub, + .string_literal, + .keyword_inline, + .keyword_noinline, + => continue, + + else => { + i += 1; + break; + }, + } + } + while (i < fn_token) : (i += 1) { + try renderToken(ais, tree, i, .space); + } try renderExpression(ais, tree, decl, .none); return renderToken(ais, tree, tree.lastToken(decl) + 1, space); // semicolon }, From 866f7dc7d68156d2cb1f3a7edad0882c67943726 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Sun, 21 Feb 2021 17:37:10 -0700 Subject: [PATCH 120/173] parser: support more recovery test cases --- lib/std/zig/ast.zig | 8 ++ lib/std/zig/parse.zig | 44 +++++--- lib/std/zig/parser_test.zig | 205 ++++++++++++++++++------------------ 3 files changed, 141 insertions(+), 116 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 72c2f82d09..c81386cca4 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -205,6 +205,9 @@ pub const Tree = struct { token_tags[parse_error.token].symbol(), }); }, + .expected_pub_item => { + return stream.writeAll("expected function or variable declaration after pub"); + }, .expected_return_type => { return stream.print("expected return type expression, found '{s}'", .{ token_tags[parse_error.token].symbol(), @@ -265,6 +268,9 @@ pub const Tree = struct { .invalid_align => { return stream.writeAll("alignment not allowed on arrays"); }, + .invalid_and => { + return stream.writeAll("`&&` is invalid; note that `and` is boolean AND"); + }, .invalid_bit_range => { return stream.writeAll("bit range not allowed on slices and arrays"); }, @@ -2316,6 +2322,7 @@ pub const Error = struct { expected_param_list, expected_prefix_expr, expected_primary_type_expr, + expected_pub_item, expected_return_type, expected_semi_or_else, expected_semi_or_lbrace, @@ -2330,6 +2337,7 @@ pub const Error = struct { extra_const_qualifier, extra_volatile_qualifier, invalid_align, + invalid_and, invalid_bit_range, invalid_token, same_line_doc_comment, diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index fc57084ad8..848134cbdc 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -423,7 +423,7 @@ const Parser = struct { while (true) { const tok = p.nextToken(); switch (p.token_tags[tok]) { - // any of these can start a new top level declaration + // Any of these can start a new top level declaration. .keyword_test, .keyword_comptime, .keyword_pub, @@ -436,13 +436,18 @@ const Parser = struct { .keyword_const, .keyword_var, .keyword_fn, - .identifier, => { if (level == 0) { p.tok_i -= 1; return; } }, + .identifier => { + if (p.token_tags[tok + 1] == .comma and level == 0) { + p.tok_i -= 1; + return; + } + }, .comma, .semicolon => { // this decl was likely meant to end here if (level == 0) { @@ -531,10 +536,13 @@ const Parser = struct { fn expectTopLevelDecl(p: *Parser) !Node.Index { const extern_export_inline_token = p.nextToken(); var expect_fn: bool = false; - var exported: bool = false; + var expect_var_or_fn: bool = false; switch (p.token_tags[extern_export_inline_token]) { - .keyword_extern => _ = p.eatToken(.string_literal), - .keyword_export => exported = true, + .keyword_extern => { + _ = p.eatToken(.string_literal); + expect_var_or_fn = true; + }, + .keyword_export => expect_var_or_fn = true, .keyword_inline, .keyword_noinline => expect_fn = true, else => p.tok_i -= 1, } @@ -580,11 +588,12 @@ const Parser = struct { if (thread_local_token != null) { return p.fail(.expected_var_decl); } - - if (exported) { + if (expect_var_or_fn) { return p.fail(.expected_var_decl_or_fn); } - + if (p.token_tags[p.tok_i] != .keyword_usingnamespace) { + return p.fail(.expected_pub_item); + } return p.expectUsingNamespace(); } @@ -599,7 +608,7 @@ const Parser = struct { } fn expectUsingNamespace(p: *Parser) !Node.Index { - const usingnamespace_token = try p.expectToken(.keyword_usingnamespace); + const usingnamespace_token = p.assertToken(.keyword_usingnamespace); const expr = try p.expectExpr(); const semicolon_token = try p.expectToken(.semicolon); return p.addNode(.{ @@ -1346,6 +1355,11 @@ const Parser = struct { }, }); }, + .invalid_ampersands => { + try p.warn(.invalid_and); + p.tok_i += 1; + return p.parseCompareExpr(); + }, else => return res, } } @@ -2283,10 +2297,12 @@ const Parser = struct { if (node == 0) break; res = node; } - const lparen = (try p.expectTokenRecoverable(.l_paren)) orelse { + const lparen = p.nextToken(); + if (p.token_tags[lparen] != .l_paren) { + p.tok_i -= 1; try p.warn(.expected_param_list); return res; - }; + } if (p.eatToken(.r_paren)) |_| { return p.addNode(.{ .tag = .async_call_one, @@ -3769,7 +3785,8 @@ const Parser = struct { /// ExprList <- (Expr COMMA)* Expr? fn parseBuiltinCall(p: *Parser) !Node.Index { const builtin_token = p.assertToken(.builtin); - _ = (try p.expectTokenRecoverable(.l_paren)) orelse { + if (p.token_tags[p.nextToken()] != .l_paren) { + p.tok_i -= 1; try p.warn(.expected_param_list); // Pretend this was an identifier so we can continue parsing. return p.addNode(.{ @@ -3780,7 +3797,7 @@ const Parser = struct { .rhs = undefined, }, }); - }; + } if (p.eatToken(.r_paren)) |_| { return p.addNode(.{ .tag = .builtin_call_two, @@ -4015,6 +4032,7 @@ const Parser = struct { fn expectToken(p: *Parser, tag: Token.Tag) Error!TokenIndex { const token = p.nextToken(); if (p.token_tags[token] != tag) { + p.tok_i -= 1; // Go back so that we can recover properly. return p.failMsg(.{ .tag = .expected_token, .token = token, diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 3b58cb43ea..bc4fc797ef 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -3579,7 +3579,7 @@ test "zig fmt: file ends with struct field" { // \\ // , &[_]Error{ // .expected_expr, -// .ExpectedVarDeclOrFn, +// .expected_var_decl_or_fn, // }); //} @@ -4070,24 +4070,24 @@ test "recovery: block statements" { }); } -//test "recovery: missing comma" { -// try testError( -// \\test "" { -// \\ switch (foo) { -// \\ 2 => {} -// \\ 3 => {} -// \\ else => { -// \\ foo && bar +; -// \\ } -// \\ } -// \\} -// , &[_]Error{ -// .expected_token, -// .expected_token, -// .invalid_and, -// .invalid_token, -// }); -//} +test "recovery: missing comma" { + try testError( + \\test "" { + \\ switch (foo) { + \\ 2 => {} + \\ 3 => {} + \\ else => { + \\ foo && bar +; + \\ } + \\ } + \\} + , &[_]Error{ + .expected_token, + .expected_token, + .invalid_and, + .invalid_token, + }); +} test "recovery: extra qualifier" { try testError( @@ -4099,94 +4099,93 @@ test "recovery: extra qualifier" { }); } -//test "recovery: missing return type" { -// try testError( -// \\fn foo() { -// \\ a && b; -// \\} -// \\test "" -// , &[_]Error{ -// .ExpectedReturnType, -// .invalid_and, -// .expected_block, -// }); -//} +test "recovery: missing return type" { + try testError( + \\fn foo() { + \\ a && b; + \\} + \\test "" + , &[_]Error{ + .expected_return_type, + .invalid_and, + .expected_block, + }); +} -//test "recovery: continue after invalid decl" { -// try testError( -// \\fn foo { -// \\ inline; -// \\} -// \\pub test "" { -// \\ async a && b; -// \\} -// , &[_]Error{ -// .expected_token, -// .ExpectedPubItem, -// .ExpectedParamList, -// .invalid_and, -// }); -// try testError( -// \\threadlocal test "" { -// \\ @a && b; -// \\} -// , &[_]Error{ -// .ExpectedVarDecl, -// .ExpectedParamList, -// .invalid_and, -// }); -//} +test "recovery: continue after invalid decl" { + try testError( + \\fn foo { + \\ inline; + \\} + \\pub test "" { + \\ async a && b; + \\} + , &[_]Error{ + .expected_token, + .expected_pub_item, + .expected_param_list, + .invalid_and, + }); + try testError( + \\threadlocal test "" { + \\ @a && b; + \\} + , &[_]Error{ + .expected_var_decl, + .expected_param_list, + .invalid_and, + }); +} -//test "recovery: invalid extern/inline" { -// try testError( -// \\inline test "" { a && b; } -// , &[_]Error{ -// .ExpectedFn, -// .invalid_and, -// }); -// try testError( -// \\extern "" test "" { a && b; } -// , &[_]Error{ -// .ExpectedVarDeclOrFn, -// .invalid_and, -// }); -//} +test "recovery: invalid extern/inline" { + try testError( + \\inline test "" { a && b; } + , &[_]Error{ + .expected_fn, + .invalid_and, + }); + try testError( + \\extern "" test "" { a && b; } + , &[_]Error{ + .expected_var_decl_or_fn, + .invalid_and, + }); +} -//test "recovery: missing semicolon" { -// try testError( -// \\test "" { -// \\ comptime a && b -// \\ c && d -// \\ @foo -// \\} -// , &[_]Error{ -// .invalid_and, -// .expected_token, -// .invalid_and, -// .expected_token, -// .ExpectedParamList, -// .expected_token, -// }); -//} +test "recovery: missing semicolon" { + try testError( + \\test "" { + \\ comptime a && b + \\ c && d + \\ @foo + \\} + , &[_]Error{ + .invalid_and, + .expected_token, + .invalid_and, + .expected_token, + .expected_param_list, + .expected_token, + }); +} -//test "recovery: invalid container members" { -// try testError( -// \\usingnamespace; -// \\foo+ -// \\bar@, -// \\while (a == 2) { test "" {}} -// \\test "" { -// \\ a && b -// \\} -// , &[_]Error{ -// .expected_expr, -// .expected_token, -// .expected_token, -// .expected_container_members, -// .invalid_and, -// .expected_token, -// }); -//} +test "recovery: invalid container members" { + try testError( + \\usingnamespace; + \\foo+ + \\bar@, + \\while (a == 2) { test "" {}} + \\test "" { + \\ a && b + \\} + , &[_]Error{ + .expected_expr, + .expected_token, + .expected_container_members, + .invalid_and, + .expected_token, + }); +} //test "recovery: invalid parameter" { // try testError( From 2da2123128dfb78893109ad195fa536ff758f2ec Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Sun, 21 Feb 2021 17:37:39 -0700 Subject: [PATCH 121/173] parser: remove support for a recovery test case I don't understand the idea here of this kind of recovery. If we want to resurrect this test case we need some comments on it to explain the purpose, example use cases, expected behavior, etc. --- lib/std/zig/parser_test.zig | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index bc4fc797ef..d5dbd52190 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -4187,16 +4187,6 @@ test "recovery: invalid container members" { }); } -//test "recovery: invalid parameter" { -// try testError( -// \\fn main() void { -// \\ a(comptime T: type) -// \\} -// , &[_]Error{ -// .expected_token, -// }); -//} - //test "recovery: extra '}' at top level" { // try testError( // \\}}} From 79f18763679a9eac7cb1ff7bd9ede063277b266b Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Sun, 21 Feb 2021 17:57:04 -0700 Subject: [PATCH 122/173] parser: remove support for recovering from extra top level end curlies After #35 is implemented, we should be able to recover from this *at any indentation level*, reporting a parse error and yet also parsing all the decls even inside structs. Until then, I don't want to add any hacks to make this work. --- lib/std/zig/parse.zig | 6 +++--- lib/std/zig/parser_test.zig | 29 +++++++++++++++-------------- 2 files changed, 18 insertions(+), 17 deletions(-) diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 848134cbdc..ac05cc321f 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -66,9 +66,9 @@ pub fn parse(gpa: *Allocator, source: []const u8) Allocator.Error!Tree { }); const root_members = try parser.parseContainerMembers(); const root_decls = try root_members.toSpan(&parser); - // parseContainerMembers will try to skip as much invalid tokens as - // it can, so we are now at EOF. - assert(parser.token_tags[parser.tok_i] == .eof); + if (parser.token_tags[parser.tok_i] != .eof) { + try parser.warnExpected(.eof); + } parser.nodes.items(.data)[0] = .{ .lhs = root_decls.start, .rhs = root_decls.end, diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index d5dbd52190..b2562072f5 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -4187,20 +4187,21 @@ test "recovery: invalid container members" { }); } -//test "recovery: extra '}' at top level" { -// try testError( -// \\}}} -// \\test "" { -// \\ a && b; -// \\} -// , &[_]Error{ -// .expected_container_members, -// .expected_container_members, -// .expected_container_members, -// .invalid_and, -// }); -//} -// +// TODO after https://github.com/ziglang/zig/issues/35 is implemented, +// we should be able to recover from this *at any indentation level*, +// reporting a parse error and yet also parsing all the decls even +// inside structs. +test "recovery: extra '}' at top level" { + try testError( + \\}}} + \\test "" { + \\ a && b; + \\} + , &[_]Error{ + .expected_token, + }); +} + test "recovery: mismatched bracket at top level" { try testError( \\const S = struct { From 1247b7b9ef10d1b16117615bbd64d7bbd27507b2 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Sun, 21 Feb 2021 18:00:37 -0700 Subject: [PATCH 123/173] parser: modify another recovery test case Zig no longer has `error` as the global error set; it is now a special case keyword. The new parse error message is correct. --- lib/std/zig/parser_test.zig | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index b2562072f5..06e32d56fd 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -4212,17 +4212,15 @@ test "recovery: mismatched bracket at top level" { }); } -//test "recovery: invalid global error set access" { -// try testError( -// \\test "" { -// \\ error && foo; -// \\} -// , &[_]Error{ -// .expected_token, -// .ExpectedIdentifier, -// .invalid_and, -// }); -//} +test "recovery: invalid global error set access" { + try testError( + \\test "" { + \\ error && foo; + \\} + , &[_]Error{ + .expected_token, + }); +} //test "recovery: invalid asterisk after pointer dereference" { // try testError( From 878e99d580faf27aba2fbd782cb664da5d460614 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Sun, 21 Feb 2021 18:04:23 -0700 Subject: [PATCH 124/173] parser: fix recovery for missing semicolons --- lib/std/zig/parse.zig | 6 ++-- lib/std/zig/parser_test.zig | 64 ++++++++++++++++++------------------- 2 files changed, 35 insertions(+), 35 deletions(-) diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index ac05cc321f..90d634c1fc 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -985,7 +985,7 @@ const Parser = struct { }; const else_token = p.eatToken(.keyword_else) orelse { if (else_required) { - return p.fail(.expected_semi_or_else); + try p.warn(.expected_semi_or_else); } return p.addNode(.{ .tag = .if_simple, @@ -1079,7 +1079,7 @@ const Parser = struct { }; const else_token = p.eatToken(.keyword_else) orelse { if (else_required) { - return p.fail(.expected_semi_or_else); + try p.warn(.expected_semi_or_else); } return p.addNode(.{ .tag = .for_simple, @@ -1154,7 +1154,7 @@ const Parser = struct { }; const else_token = p.eatToken(.keyword_else) orelse { if (else_required) { - return p.fail(.expected_semi_or_else); + try p.warn(.expected_semi_or_else); } if (cont_expr == 0) { return p.addNode(.{ diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 06e32d56fd..40d58337e0 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -4222,39 +4222,39 @@ test "recovery: invalid global error set access" { }); } -//test "recovery: invalid asterisk after pointer dereference" { -// try testError( -// \\test "" { -// \\ var sequence = "repeat".*** 10; -// \\} -// , &[_]Error{ -// .asterisk_after_ptr_deref, -// }); -// try testError( -// \\test "" { -// \\ var sequence = "repeat".** 10&&a; -// \\} -// , &[_]Error{ -// .asterisk_after_ptr_deref, -// .invalid_and, -// }); -//} +test "recovery: invalid asterisk after pointer dereference" { + try testError( + \\test "" { + \\ var sequence = "repeat".*** 10; + \\} + , &[_]Error{ + .asterisk_after_ptr_deref, + }); + try testError( + \\test "" { + \\ var sequence = "repeat".** 10&&a; + \\} + , &[_]Error{ + .asterisk_after_ptr_deref, + .invalid_and, + }); +} -//test "recovery: missing semicolon after if, for, while stmt" { -// try testError( -// \\test "" { -// \\ if (foo) bar -// \\ for (foo) |a| bar -// \\ while (foo) bar -// \\ a && b; -// \\} -// , &[_]Error{ -// .expected_semi_or_else, -// .expected_semi_or_else, -// .expected_semi_or_else, -// .invalid_and, -// }); -//} +test "recovery: missing semicolon after if, for, while stmt" { + try testError( + \\test "" { + \\ if (foo) bar + \\ for (foo) |a| bar + \\ while (foo) bar + \\ a && b; + \\} + , &[_]Error{ + .expected_semi_or_else, + .expected_semi_or_else, + .expected_semi_or_else, + .invalid_and, + }); +} test "recovery: invalid comptime" { try testError( From c6efb23796053a4409b0cf7b6abb4044719779aa Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Sun, 21 Feb 2021 18:20:46 -0700 Subject: [PATCH 125/173] zig fmt: rewrite inline functions as callconv(.Inline) --- lib/std/zig/parser_test.zig | 24 ++++++++++-------------- lib/std/zig/render.zig | 10 ++++++++++ 2 files changed, 20 insertions(+), 14 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 40d58337e0..2ba85dca3c 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -4,18 +4,16 @@ // The MIT license requires this copyright notice to be included in all copies // and substantial portions of the software. -// TODO Remove this after zig 0.8.0 is released. -// TODO need to add the logic to make this test pass. it was added in master -// but was not added in the ast-memory-layout branch yet. -//test "zig fmt: rewrite inline functions as callconv(.Inline)" { -// try testTransform( -// \\inline fn foo() void {} -// \\ -// , -// \\fn foo() callconv(.Inline) void {} -// \\ -// ); -//} +// TODO Remove this after zig 0.9.0 is released. +test "zig fmt: rewrite inline functions as callconv(.Inline)" { + try testTransform( + \\inline fn foo() void {} + \\ + , + \\fn foo() callconv(.Inline) void {} + \\ + ); +} test "zig fmt: simple top level comptime block" { try testCanonical( @@ -2490,8 +2488,6 @@ test "zig fmt: function attributes" { \\pub extern fn foo() void; \\extern "c" fn foo() void; \\pub extern "c" fn foo() void; - \\inline fn foo() void {} - \\pub inline fn foo() void {} \\noinline fn foo() void {} \\pub noinline fn foo() void {} \\ diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 48746ec391..37587fe5d3 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -79,6 +79,11 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E } } while (i < fn_token) : (i += 1) { + if (token_tags[i] == .keyword_inline) { + // TODO remove this special case when 0.9.0 is released. + // See the commit that introduced this comment for more details. + continue; + } try renderToken(ais, tree, i, .space); } assert(datas[decl].rhs != 0); @@ -1260,6 +1265,9 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S const token_tags = tree.tokens.items(.tag); const token_starts = tree.tokens.items(.start); + const is_inline = fn_proto.ast.fn_token > 0 and + token_tags[fn_proto.ast.fn_token - 1] == .keyword_inline; + const after_fn_token = fn_proto.ast.fn_token + 1; const lparen = if (token_tags[after_fn_token] == .identifier) blk: { try renderToken(ais, tree, fn_proto.ast.fn_token, .space); // fn @@ -1435,6 +1443,8 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S try renderToken(ais, tree, callconv_lparen, .none); // ( try renderExpression(ais, tree, fn_proto.ast.callconv_expr, .none); try renderToken(ais, tree, callconv_rparen, .space); // ) + } else if (is_inline) { + try ais.writer().writeAll("callconv(.Inline) "); } if (token_tags[maybe_bang] == .bang) { From a17a5ca3a85c92aa933a8eaac66198b31b8b294c Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Sun, 21 Feb 2021 18:31:32 -0700 Subject: [PATCH 126/173] translate-c: fix fn_decl vs fn_proto Regressed in 15603f403c9ca91f7530798a5a7751cace284a28. --- src/translate_c/ast.zig | 14 +++++++------- test/translate_c.zig | 3 ++- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 99d198c995..7b54d6a8bb 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -2407,13 +2407,13 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex { }); }; - const body = if (payload.body) |some| - try renderNode(c, some) - else if (payload.is_extern) blk: { - _ = try c.addToken(.semicolon, ";"); - break :blk 0; - } else return fn_proto; - + const payload_body = payload.body orelse { + if (payload.is_extern) { + _ = try c.addToken(.semicolon, ";"); + } + return fn_proto; + }; + const body = try renderNode(c, payload_body); return c.addNode(.{ .tag = .fn_decl, .main_token = fn_token, diff --git a/test/translate_c.zig b/test/translate_c.zig index 6eb23201e5..fe89e073c9 100644 --- a/test/translate_c.zig +++ b/test/translate_c.zig @@ -3117,7 +3117,8 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\} }); - // TODO fix zig fmt here + // TODO fix zig fmt here - it incorrectly inserts an additional indentation + // level inside blk_1. // cases.add("nested assignment", // \\int foo(int *p, int x) { // \\ return *p++ = x; From 621ad241d6dfbde60ee8a5b1d0dcd7d9cf29f8f3 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Sun, 21 Feb 2021 20:25:31 -0700 Subject: [PATCH 127/173] zig fmt: if nested --- lib/std/zig/parser_test.zig | 40 ++++++++++++++++++------------------- lib/std/zig/render.zig | 10 +++++++++- 2 files changed, 29 insertions(+), 21 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 2ba85dca3c..375f12a3f5 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -1449,26 +1449,26 @@ test "zig fmt: if-else with comment before else" { ); } -//test "zig fmt: if nested" { -// try testCanonical( -// \\pub fn foo() void { -// \\ return if ((aInt & bInt) >= 0) -// \\ if (aInt < bInt) -// \\ GE_LESS -// \\ else if (aInt == bInt) -// \\ GE_EQUAL -// \\ else -// \\ GE_GREATER -// \\ else if (aInt > bInt) -// \\ GE_LESS -// \\ else if (aInt == bInt) -// \\ GE_EQUAL -// \\ else -// \\ GE_GREATER; -// \\} -// \\ -// ); -//} +test "zig fmt: if nested" { + try testCanonical( + \\pub fn foo() void { + \\ return if ((aInt & bInt) >= 0) + \\ if (aInt < bInt) + \\ GE_LESS + \\ else if (aInt == bInt) + \\ GE_EQUAL + \\ else + \\ GE_GREATER + \\ else if (aInt > bInt) + \\ GE_LESS + \\ else if (aInt == bInt) + \\ GE_EQUAL + \\ else + \\ GE_GREATER; + \\} + \\ + ); +} test "zig fmt: respect line breaks in if-else" { try testCanonical( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 37587fe5d3..b9edbbf515 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -984,7 +984,8 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa try renderToken(ais, tree, while_node.ast.while_token + 1, .none); // ( try renderExpression(ais, tree, while_node.ast.cond_expr, .none); // condition - if (nodeIsBlock(node_tags[while_node.ast.then_expr])) { + const then_tag = node_tags[while_node.ast.then_expr]; + if (nodeIsBlock(then_tag) and !nodeIsIf(then_tag)) { if (while_node.payload_token) |payload_token| { try renderToken(ais, tree, payload_token - 2, .space); // ) try renderToken(ais, tree, payload_token - 1, .none); // | @@ -2128,6 +2129,13 @@ fn nodeIsBlock(tag: ast.Node.Tag) bool { }; } +fn nodeIsIf(tag: ast.Node.Tag) bool { + return switch (tag) { + .@"if", .if_simple => true, + else => false, + }; +} + fn nodeCausesSliceOpSpace(tag: ast.Node.Tag) bool { return switch (tag) { .@"catch", From 67dac2936cb1fc2ce1d3ea00a5c8e574921aded3 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Mon, 22 Feb 2021 10:04:05 +0200 Subject: [PATCH 128/173] parser: warn on missing for loop payload, recover from invalid global error set access --- lib/std/zig/ast.zig | 6 +++++ lib/std/zig/parse.zig | 44 +++++++++++++++++++++++++++---------- lib/std/zig/parser_test.zig | 16 ++++++++++++++ 3 files changed, 54 insertions(+), 12 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index c81386cca4..72846b333c 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -253,6 +253,11 @@ pub const Tree = struct { token_tags[parse_error.token].symbol(), }); }, + .expected_loop_payload => { + return stream.print("expected loop payload, found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, .extra_align_qualifier => { return stream.writeAll("extra align qualifier"); }, @@ -2332,6 +2337,7 @@ pub const Error = struct { expected_type_expr, expected_var_decl, expected_var_decl_or_fn, + expected_loop_payload, extra_align_qualifier, extra_allowzero_qualifier, extra_const_qualifier, diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 90d634c1fc..fd142b8765 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -1052,7 +1052,8 @@ const Parser = struct { _ = try p.expectToken(.l_paren); const array_expr = try p.expectExpr(); _ = try p.expectToken(.r_paren); - _ = try p.parsePtrIndexPayload(); + const found_payload = try p.parsePtrIndexPayload(); + if (found_payload == 0) try p.warn(.expected_loop_payload); // TODO propose to change the syntax so that semicolons are always required // inside while statements, even if there is an `else`. @@ -2067,7 +2068,8 @@ const Parser = struct { _ = try p.expectToken(.l_paren); const array_expr = try p.expectExpr(); _ = try p.expectToken(.r_paren); - _ = try p.parsePtrIndexPayload(); + const found_payload = try p.parsePtrIndexPayload(); + if (found_payload == 0) try p.warn(.expected_loop_payload); const then_expr = try p.expectExpr(); const else_token = p.eatToken(.keyword_else) orelse { @@ -2672,6 +2674,16 @@ const Parser = struct { }, }), }, + .keyword_inline => { + p.tok_i += 1; + switch (p.token_tags[p.tok_i]) { + .keyword_for => return p.parseForTypeExpr(), + .keyword_while => return p.parseWhileTypeExpr(), + else => return p.fail(.expected_inlinable), + } + }, + .keyword_for => return p.parseForTypeExpr(), + .keyword_while => return p.parseWhileTypeExpr(), .period => switch (p.token_tags[p.tok_i + 1]) { .identifier => return p.addNode(.{ .tag = .enum_literal, @@ -2879,14 +2891,21 @@ const Parser = struct { }, }); }, - else => return p.addNode(.{ - .tag = .error_value, - .main_token = p.nextToken(), - .data = .{ - .lhs = try p.expectToken(.period), - .rhs = try p.expectToken(.identifier), - }, - }), + else => { + const main_token = p.nextToken(); + const period = p.eatToken(.period); + if (period == null) try p.warnExpected(.period); + const identifier = p.eatToken(.identifier); + if (identifier == null) try p.warnExpected(.identifier); + return p.addNode(.{ + .tag = .error_value, + .main_token = main_token, + .data = .{ + .lhs = period orelse 0, + .rhs = identifier orelse 0, + }, + }); + }, }, .l_paren => return p.addNode(.{ .tag = .grouped_expression, @@ -2913,9 +2932,10 @@ const Parser = struct { fn parseForTypeExpr(p: *Parser) !Node.Index { const for_token = p.eatToken(.keyword_for) orelse return null_node; _ = try p.expectToken(.l_paren); - const array_expr = try p.expectTypeExpr(); + const array_expr = try p.expectExpr(); _ = try p.expectToken(.r_paren); - _ = try p.parsePtrIndexPayload(); + const found_payload = try p.parsePtrIndexPayload(); + if (found_payload == 0) try p.warn(.expected_loop_payload); const then_expr = try p.expectExpr(); const else_token = p.eatToken(.keyword_else) orelse { diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 375f12a3f5..3802427af2 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -4215,6 +4215,8 @@ test "recovery: invalid global error set access" { \\} , &[_]Error{ .expected_token, + .expected_token, + .invalid_and, }); } @@ -4273,6 +4275,20 @@ test "recovery: missing block after for/while loops" { }); } +test "recovery: missing for payload" { + try testError( + \\comptime { + \\ const a = for(a) {}; + \\ const a: for(a) {}; + \\ for(a) {} + \\} + , &[_]Error{ + .expected_loop_payload, + .expected_loop_payload, + .expected_loop_payload, + }); +} + const std = @import("std"); const mem = std.mem; const warn = std.debug.warn; From 69d5a106da3fd339c7d1ed177b706aa30d8cb1a9 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Mon, 22 Feb 2021 16:59:44 +0200 Subject: [PATCH 129/173] render: handle comments ending in EOF --- lib/std/zig/parser_test.zig | 37 +++++++++++++++++++++++-------------- lib/std/zig/render.zig | 30 +++++++++++++++++++++++++----- src/main.zig | 3 +-- 3 files changed, 49 insertions(+), 21 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 3802427af2..9748c07557 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -84,6 +84,15 @@ test "zig fmt: empty file" { ); } +test "zig fmt: file ends in comment" { + try testTransform( + \\ //foobar + , + \\//foobar + \\ + ); +} + test "zig fmt: doc comments on test" { try testCanonical( \\/// hello @@ -3385,20 +3394,20 @@ test "zig fmt: file ends with struct field" { ); } -//test "zig fmt: comment after empty comment" { -// try testTransform( -// \\const x = true; // -// \\// -// \\// -// \\//a -// \\ -// , -// \\const x = true; -// \\//a -// \\ -// ); -//} -// +test "zig fmt: comment after empty comment" { + try testTransform( + \\const x = true; // + \\// + \\// + \\//a + \\ + , + \\const x = true; + \\//a + \\ + ); +} + //test "zig fmt: line comment in array" { // try testTransform( // \\test "a" { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index b9edbbf515..cbed4ac14d 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -26,7 +26,10 @@ pub fn renderTree(buffer: *std.ArrayList(u8), tree: ast.Tree) Error!void { const ais = &auto_indenting_stream; // Render all the line comments at the beginning of the file. - const comment_end_loc: usize = tree.tokens.items(.start)[0]; + const comment_end_loc = if (tree.tokens.items(.tag)[0] == .eof) + tree.source.len + else + tree.tokens.items(.start)[0]; _ = try renderComments(ais, tree, 0, comment_end_loc); try renderMembers(ais, tree, tree.rootDecls()); @@ -1995,11 +1998,20 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp /// that end is the last byte before the next token. fn renderComments(ais: *Ais, tree: ast.Tree, start: usize, end: usize) Error!bool { var index: usize = start; + var rendered_empty_comments = false; while (mem.indexOf(u8, tree.source[index..end], "//")) |offset| { const comment_start = index + offset; - const newline = comment_start + - mem.indexOfScalar(u8, tree.source[comment_start..end], '\n').?; - + const newline_index = mem.indexOfScalar(u8, tree.source[comment_start..end], '\n') orelse { + // comment ends in EOF. + const untrimmed_comment = tree.source[comment_start..]; + const trimmed_comment = mem.trimRight(u8, untrimmed_comment, &std.ascii.spaces); + if (trimmed_comment.len != 2) { + try ais.writer().print("{s}\n", .{trimmed_comment}); + index = end; + } + return index != start; + }; + const newline = comment_start + newline_index; const untrimmed_comment = tree.source[comment_start..newline]; const trimmed_comment = mem.trimRight(u8, untrimmed_comment, &std.ascii.spaces); @@ -2013,6 +2025,11 @@ fn renderComments(ais: *Ais, tree: ast.Tree, start: usize, end: usize) Error!boo // Respect the newline directly before the comment. // Note: This allows an empty line between comments try ais.insertNewline(); + } else if (trimmed_comment.len == 2) { + if (!rendered_empty_comments) { + try ais.writer().writeByte('\n'); + rendered_empty_comments = true; + } } else if (index == start) { // Otherwise if the first comment is on the same line as // the token before it, prefix it with a single space. @@ -2020,7 +2037,10 @@ fn renderComments(ais: *Ais, tree: ast.Tree, start: usize, end: usize) Error!boo } } - try ais.writer().print("{s}\n", .{trimmed_comment}); + if (trimmed_comment.len != 2) { + try ais.writer().print("{s}\n", .{trimmed_comment}); + rendered_empty_comments = false; + } index = newline + 1; if (ais.disabled_offset) |disabled_offset| { diff --git a/src/main.zig b/src/main.zig index 09d791cfb5..38da3d5a3b 100644 --- a/src/main.zig +++ b/src/main.zig @@ -2866,8 +2866,7 @@ fn fmtPathFile( try fmt.out_buffer.ensureCapacity(source_code.len); try tree.renderToArrayList(&fmt.out_buffer); - const anything_changed = mem.eql(u8, fmt.out_buffer.items, source_code); - if (!anything_changed) + if (mem.eql(u8, fmt.out_buffer.items, source_code)) return; if (check_mode) { From 928790364ab486de240b8c14ef5ef312079377d7 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Mon, 22 Feb 2021 17:39:41 +0200 Subject: [PATCH 130/173] zig fmt: correct Node.firstToken for .fn_decl, add error for missing container --- lib/std/zig/ast.zig | 25 ++++++- lib/std/zig/parse.zig | 5 +- lib/std/zig/parser_test.zig | 141 ++++++++++++++++++------------------ lib/std/zig/render.zig | 2 +- 4 files changed, 99 insertions(+), 74 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 72846b333c..7b0380dfaf 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -258,6 +258,11 @@ pub const Tree = struct { token_tags[parse_error.token].symbol(), }); }, + .expected_container => { + return stream.print("expected a struct, enum or union, found '{s}'", .{ + token_tags[parse_error.token].symbol(), + }); + }, .extra_align_qualifier => { return stream.writeAll("extra align qualifier"); }, @@ -441,10 +446,27 @@ pub const Tree = struct { .call, .call_comma, .switch_range, - .fn_decl, .error_union, => n = datas[n].lhs, + .fn_decl => { + var i = main_tokens[n]; // fn token + while (i > 0) { + i -= 1; + switch (token_tags[i]) { + .keyword_extern, + .keyword_export, + .keyword_pub, + .keyword_threadlocal, + .string_literal, + => continue, + + else => return i + 1 - end_offset, + } + } + return i - end_offset; + }, + .async_call_one, .async_call_one_comma, .async_call, @@ -2338,6 +2360,7 @@ pub const Error = struct { expected_var_decl, expected_var_decl_or_fn, expected_loop_payload, + expected_container, extra_align_qualifier, extra_allowzero_qualifier, extra_const_qualifier, diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index fd142b8765..f5672bedf9 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -3627,7 +3627,10 @@ const Parser = struct { break :blk null_node; } }, - else => unreachable, + else => { + p.tok_i -= 1; + return p.fail(.expected_container); + }, }; _ = try p.expectToken(.l_brace); const members = try p.parseContainerMembers(); diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 9748c07557..eecc4f769a 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -2399,20 +2399,20 @@ test "zig fmt: comments before test decl" { ); } -//test "zig fmt: preserve spacing" { -// try testCanonical( -// \\const std = @import("std"); -// \\ -// \\pub fn main() !void { -// \\ var stdout_file = std.io.getStdOut; -// \\ var stdout_file = std.io.getStdOut; -// \\ -// \\ var stdout_file = std.io.getStdOut; -// \\ var stdout_file = std.io.getStdOut; -// \\} -// \\ -// ); -//} +test "zig fmt: preserve spacing" { + try testCanonical( + \\const std = @import("std"); + \\ + \\pub fn main() !void { + \\ var stdout_file = std.io.getStdOut; + \\ var stdout_file = std.io.getStdOut; + \\ + \\ var stdout_file = std.io.getStdOut; + \\ var stdout_file = std.io.getStdOut; + \\} + \\ + ); +} //test "zig fmt: return types" { // try testCanonical( @@ -2431,27 +2431,27 @@ test "zig fmt: imports" { ); } -//test "zig fmt: global declarations" { -// try testCanonical( -// \\const a = b; -// \\pub const a = b; -// \\var a = b; -// \\pub var a = b; -// \\const a: i32 = b; -// \\pub const a: i32 = b; -// \\var a: i32 = b; -// \\pub var a: i32 = b; -// \\extern const a: i32 = b; -// \\pub extern const a: i32 = b; -// \\extern var a: i32 = b; -// \\pub extern var a: i32 = b; -// \\extern "a" const a: i32 = b; -// \\pub extern "a" const a: i32 = b; -// \\extern "a" var a: i32 = b; -// \\pub extern "a" var a: i32 = b; -// \\ -// ); -//} +test "zig fmt: global declarations" { + try testCanonical( + \\const a = b; + \\pub const a = b; + \\var a = b; + \\pub var a = b; + \\const a: i32 = b; + \\pub const a: i32 = b; + \\var a: i32 = b; + \\pub var a: i32 = b; + \\extern const a: i32 = b; + \\pub extern const a: i32 = b; + \\extern var a: i32 = b; + \\pub extern var a: i32 = b; + \\extern "a" const a: i32 = b; + \\pub extern "a" const a: i32 = b; + \\extern "a" var a: i32 = b; + \\pub extern "a" var a: i32 = b; + \\ + ); +} test "zig fmt: extern declaration" { try testCanonical( @@ -2680,23 +2680,23 @@ test "zig fmt: functions" { ); } -//test "zig fmt: multiline string" { -// try testCanonical( -// \\test "" { -// \\ const s1 = -// \\ \\one -// \\ \\two) -// \\ \\three -// \\ ; -// \\ const s3 = // hi -// \\ \\one -// \\ \\two) -// \\ \\three -// \\ ; -// \\} -// \\ -// ); -//} +test "zig fmt: multiline string" { + try testCanonical( + \\test "" { + \\ const s1 = + \\ \\one + \\ \\two) + \\ \\three + \\ ; + \\ const s3 = // hi + \\ \\one + \\ \\two) + \\ \\three + \\ ; + \\} + \\ + ); +} test "zig fmt: values" { try testCanonical( @@ -3578,15 +3578,14 @@ test "zig fmt: comment after empty comment" { // ); //} -//test "zig fmt: extern without container keyword returns error" { -// try testError( -// \\const container = extern {}; -// \\ -// , &[_]Error{ -// .expected_expr, -// .expected_var_decl_or_fn, -// }); -//} +test "zig fmt: extern without container keyword returns error" { + try testError( + \\const container = extern {}; + \\ + , &[_]Error{ + .expected_container, + }); +} test "zig fmt: same line doc comment returns error" { try testError( @@ -3706,16 +3705,16 @@ test "zig fmt: C var args" { // ); //} -//test "zig fmt: Don't add extra newline after if" { -// try testCanonical( -// \\pub fn atomicSymLink(allocator: *Allocator, existing_path: []const u8, new_path: []const u8) !void { -// \\ if (cwd().symLink(existing_path, new_path, .{})) { -// \\ return; -// \\ } -// \\} -// \\ -// ); -//} +test "zig fmt: Don't add extra newline after if" { + try testCanonical( + \\pub fn atomicSymLink(allocator: *Allocator, existing_path: []const u8, new_path: []const u8) !void { + \\ if (cwd().symLink(existing_path, new_path, .{})) { + \\ return; + \\ } + \\} + \\ + ); +} //test "zig fmt: comments in ternary ifs" { // try testCanonical( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index cbed4ac14d..83ff56febe 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -873,7 +873,7 @@ fn renderVarDecl(ais: *Ais, tree: ast.Tree, var_decl: ast.full.VarDecl) Error!vo try renderToken(ais, tree, extern_export_token, Space.space); // extern if (var_decl.lib_name) |lib_name| { - try renderExpression(ais, tree, lib_name, Space.space); // "lib" + try renderToken(ais, tree, lib_name, Space.space); // "lib" } } From 34c08a91d59a521f81ec5d4d4ee58a73038c286f Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Mon, 22 Feb 2021 17:37:17 +0100 Subject: [PATCH 131/173] zig fmt: fix formatting of parser tests --- lib/std/zig/parser_test.zig | 144 ++++++++++++++++++------------------ 1 file changed, 72 insertions(+), 72 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index eecc4f769a..3008744023 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -87,7 +87,7 @@ test "zig fmt: empty file" { test "zig fmt: file ends in comment" { try testTransform( \\ //foobar - , + , \\//foobar \\ ); @@ -2400,18 +2400,18 @@ test "zig fmt: comments before test decl" { } test "zig fmt: preserve spacing" { - try testCanonical( - \\const std = @import("std"); - \\ - \\pub fn main() !void { - \\ var stdout_file = std.io.getStdOut; - \\ var stdout_file = std.io.getStdOut; - \\ - \\ var stdout_file = std.io.getStdOut; - \\ var stdout_file = std.io.getStdOut; - \\} - \\ - ); + try testCanonical( + \\const std = @import("std"); + \\ + \\pub fn main() !void { + \\ var stdout_file = std.io.getStdOut; + \\ var stdout_file = std.io.getStdOut; + \\ + \\ var stdout_file = std.io.getStdOut; + \\ var stdout_file = std.io.getStdOut; + \\} + \\ + ); } //test "zig fmt: return types" { @@ -2432,25 +2432,25 @@ test "zig fmt: imports" { } test "zig fmt: global declarations" { - try testCanonical( - \\const a = b; - \\pub const a = b; - \\var a = b; - \\pub var a = b; - \\const a: i32 = b; - \\pub const a: i32 = b; - \\var a: i32 = b; - \\pub var a: i32 = b; - \\extern const a: i32 = b; - \\pub extern const a: i32 = b; - \\extern var a: i32 = b; - \\pub extern var a: i32 = b; - \\extern "a" const a: i32 = b; - \\pub extern "a" const a: i32 = b; - \\extern "a" var a: i32 = b; - \\pub extern "a" var a: i32 = b; - \\ - ); + try testCanonical( + \\const a = b; + \\pub const a = b; + \\var a = b; + \\pub var a = b; + \\const a: i32 = b; + \\pub const a: i32 = b; + \\var a: i32 = b; + \\pub var a: i32 = b; + \\extern const a: i32 = b; + \\pub extern const a: i32 = b; + \\extern var a: i32 = b; + \\pub extern var a: i32 = b; + \\extern "a" const a: i32 = b; + \\pub extern "a" const a: i32 = b; + \\extern "a" var a: i32 = b; + \\pub extern "a" var a: i32 = b; + \\ + ); } test "zig fmt: extern declaration" { @@ -2681,21 +2681,21 @@ test "zig fmt: functions" { } test "zig fmt: multiline string" { - try testCanonical( - \\test "" { - \\ const s1 = - \\ \\one - \\ \\two) - \\ \\three - \\ ; - \\ const s3 = // hi - \\ \\one - \\ \\two) - \\ \\three - \\ ; - \\} - \\ - ); + try testCanonical( + \\test "" { + \\ const s1 = + \\ \\one + \\ \\two) + \\ \\three + \\ ; + \\ const s3 = // hi + \\ \\one + \\ \\two) + \\ \\three + \\ ; + \\} + \\ + ); } test "zig fmt: values" { @@ -3395,17 +3395,17 @@ test "zig fmt: file ends with struct field" { } test "zig fmt: comment after empty comment" { - try testTransform( - \\const x = true; // - \\// - \\// - \\//a - \\ - , - \\const x = true; - \\//a - \\ - ); + try testTransform( + \\const x = true; // + \\// + \\// + \\//a + \\ + , + \\const x = true; + \\//a + \\ + ); } //test "zig fmt: line comment in array" { @@ -3579,12 +3579,12 @@ test "zig fmt: comment after empty comment" { //} test "zig fmt: extern without container keyword returns error" { - try testError( - \\const container = extern {}; - \\ - , &[_]Error{ - .expected_container, - }); + try testError( + \\const container = extern {}; + \\ + , &[_]Error{ + .expected_container, + }); } test "zig fmt: same line doc comment returns error" { @@ -3706,14 +3706,14 @@ test "zig fmt: C var args" { //} test "zig fmt: Don't add extra newline after if" { - try testCanonical( - \\pub fn atomicSymLink(allocator: *Allocator, existing_path: []const u8, new_path: []const u8) !void { - \\ if (cwd().symLink(existing_path, new_path, .{})) { - \\ return; - \\ } - \\} - \\ - ); + try testCanonical( + \\pub fn atomicSymLink(allocator: *Allocator, existing_path: []const u8, new_path: []const u8) !void { + \\ if (cwd().symLink(existing_path, new_path, .{})) { + \\ return; + \\ } + \\} + \\ + ); } //test "zig fmt: comments in ternary ifs" { From ce9b3ee0f908a776b67a5d7bedc2075559e7643c Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Mon, 22 Feb 2021 17:38:09 +0100 Subject: [PATCH 132/173] parser: anytype is not a vaild return type --- lib/std/zig/parser_test.zig | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 3008744023..aaf2331a9c 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -2414,14 +2414,14 @@ test "zig fmt: preserve spacing" { ); } -//test "zig fmt: return types" { -// try testCanonical( -// \\pub fn main() !void {} -// \\pub fn main() anytype {} -// \\pub fn main() i32 {} -// \\ -// ); -//} +test "zig fmt: return types" { + try testCanonical( + \\pub fn main() !void {} + \\pub fn main() FooBar {} + \\pub fn main() i32 {} + \\ + ); +} test "zig fmt: imports" { try testCanonical( @@ -2652,12 +2652,12 @@ test "zig fmt: call expression" { ); } -//test "zig fmt: anytype type" { -// try testCanonical( -// \\fn print(args: anytype) anytype {} -// \\ -// ); -//} +test "zig fmt: anytype type" { + try testCanonical( + \\fn print(args: anytype) @This() {} + \\ + ); +} test "zig fmt: functions" { try testCanonical( From 011bc1b84fc0ea1147cc96ccd30962bc38b65e02 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Mon, 22 Feb 2021 19:26:19 +0200 Subject: [PATCH 133/173] translate-c: switch default should have an empty block not break --- src/translate_c.zig | 2 +- test/translate_c.zig | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index dc13e5e380..e3debc01a0 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -2276,7 +2276,7 @@ fn transSwitch( } if (!has_default) { - const else_prong = try Tag.switch_else.create(c.arena, Tag.@"break".init()); + const else_prong = try Tag.switch_else.create(c.arena, Tag.empty_block.init()); try cases.append(else_prong); } diff --git a/test/translate_c.zig b/test/translate_c.zig index fe89e073c9..2e99bd8dd6 100644 --- a/test/translate_c.zig +++ b/test/translate_c.zig @@ -2055,6 +2055,9 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ return; \\ } \\ case 6: + \\ switch (res) { + \\ case 9: break; + \\ } \\ res = 1; \\ return; \\ } @@ -2084,6 +2087,10 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ } \\ }, \\ @as(c_int, 6) => { + \\ switch (res) { + \\ @as(c_int, 9) => {}, + \\ else => {}, + \\ } \\ res = 1; \\ return; \\ }, From f3ee10b4547bf5f03a4728b0d48b22bad6c9a5b1 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Mon, 22 Feb 2021 18:30:51 +0100 Subject: [PATCH 134/173] zig fmt: fix comments ending with EOF after decls Achieve this by reducing the amount of special casing to handle EOF so that the already correct logic for normal comments does not need to be duplicated. --- lib/std/zig/parser_test.zig | 11 +++++++++++ lib/std/zig/render.zig | 30 ++++++++++++------------------ lib/std/zig/tokenizer.zig | 2 ++ 3 files changed, 25 insertions(+), 18 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index aaf2331a9c..2c43e04ae5 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -93,6 +93,17 @@ test "zig fmt: file ends in comment" { ); } +test "zig fmt: file ends in comment after var decl" { + try testTransform( + \\const x = 42; + \\ //foobar + , + \\const x = 42; + \\//foobar + \\ + ); +} + test "zig fmt: doc comments on test" { try testCanonical( \\/// hello diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 83ff56febe..fe9df25dad 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -26,10 +26,7 @@ pub fn renderTree(buffer: *std.ArrayList(u8), tree: ast.Tree) Error!void { const ais = &auto_indenting_stream; // Render all the line comments at the beginning of the file. - const comment_end_loc = if (tree.tokens.items(.tag)[0] == .eof) - tree.source.len - else - tree.tokens.items(.start)[0]; + const comment_end_loc = tree.tokens.items(.start)[0]; _ = try renderComments(ais, tree, 0, comment_end_loc); try renderMembers(ais, tree, tree.rootDecls()); @@ -1609,15 +1606,18 @@ fn renderArrayInit( } else { try renderExpression(ais, tree, array_init.ast.type_expr, .none); // T } + if (array_init.ast.elements.len == 0) { ais.pushIndentNextLine(); try renderToken(ais, tree, array_init.ast.lbrace, .none); // lbrace ais.popIndent(); return renderToken(ais, tree, array_init.ast.lbrace + 1, space); // rbrace } + const last_elem = array_init.ast.elements[array_init.ast.elements.len - 1]; const last_elem_token = tree.lastToken(last_elem); - if (token_tags[last_elem_token + 1] == .comma) { + const trailing_comma = token_tags[last_elem_token + 1] == .comma; + if (trailing_comma) { // Render one element per line. ais.pushIndentNextLine(); try renderToken(ais, tree, array_init.ast.lbrace, .newline); @@ -2001,18 +2001,12 @@ fn renderComments(ais: *Ais, tree: ast.Tree, start: usize, end: usize) Error!boo var rendered_empty_comments = false; while (mem.indexOf(u8, tree.source[index..end], "//")) |offset| { const comment_start = index + offset; - const newline_index = mem.indexOfScalar(u8, tree.source[comment_start..end], '\n') orelse { - // comment ends in EOF. - const untrimmed_comment = tree.source[comment_start..]; - const trimmed_comment = mem.trimRight(u8, untrimmed_comment, &std.ascii.spaces); - if (trimmed_comment.len != 2) { - try ais.writer().print("{s}\n", .{trimmed_comment}); - index = end; - } - return index != start; - }; - const newline = comment_start + newline_index; - const untrimmed_comment = tree.source[comment_start..newline]; + + // If there is no newline, the comment ends with EOF + const newline_index = mem.indexOfScalar(u8, tree.source[comment_start..end], '\n'); + const newline = if (newline_index) |i| comment_start + i else null; + + const untrimmed_comment = tree.source[comment_start .. newline orelse tree.source.len]; const trimmed_comment = mem.trimRight(u8, untrimmed_comment, &std.ascii.spaces); // Don't leave any whitespace at the start of the file @@ -2041,7 +2035,7 @@ fn renderComments(ais: *Ais, tree: ast.Tree, start: usize, end: usize) Error!boo try ais.writer().print("{s}\n", .{trimmed_comment}); rendered_empty_comments = false; } - index = newline + 1; + index = 1 + (newline orelse return true); if (ais.disabled_offset) |disabled_offset| { if (mem.eql(u8, trimmed_comment, "// zig fmt: on")) { diff --git a/lib/std/zig/tokenizer.zig b/lib/std/zig/tokenizer.zig index dd18025efb..71b8fad4aa 100644 --- a/lib/std/zig/tokenizer.zig +++ b/lib/std/zig/tokenizer.zig @@ -1444,6 +1444,7 @@ pub const Tokenizer = struct { self.pending_invalid_token = null; return token; } + result.loc.start = self.index; } result.loc.end = self.index; @@ -2055,4 +2056,5 @@ fn testTokenize(source: []const u8, expected_tokens: []const Token.Tag) void { } const last_token = tokenizer.next(); std.testing.expect(last_token.tag == .eof); + std.testing.expect(last_token.loc.start == source.len); } From d83698ab54947c79c90d54c27e5a6d52c12fc52f Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Mon, 22 Feb 2021 21:23:17 +0200 Subject: [PATCH 135/173] translate-c: check for noreturn in switch in more cases --- src/translate_c.zig | 13 +++++++------ src/translate_c/ast.zig | 2 +- test/translate_c.zig | 10 ++++++++++ 3 files changed, 18 insertions(+), 7 deletions(-) diff --git a/src/translate_c.zig b/src/translate_c.zig index e3debc01a0..f2d2f53050 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -2374,6 +2374,9 @@ fn transSwitchProngStmtInline( const result = try transStmt(c, &block.base, sub, .unused); assert(result.tag() != .declaration); try block.statements.append(result); + if (result.isNoreturn(true)) { + return; + } }, .DefaultStmtClass => { var sub = @ptrCast(*const clang.DefaultStmt, it[0]).getSubStmt(); @@ -2385,14 +2388,12 @@ fn transSwitchProngStmtInline( const result = try transStmt(c, &block.base, sub, .unused); assert(result.tag() != .declaration); try block.statements.append(result); + if (result.isNoreturn(true)) { + return; + } }, .CompoundStmtClass => { - const compound_stmt = @ptrCast(*const clang.CompoundStmt, it[0]); - var child_block = try Scope.Block.init(c, &block.base, false); - defer child_block.deinit(); - - try transCompoundStmtInline(c, compound_stmt, &child_block); - const result = try child_block.complete(c); + const result = try transCompoundStmt(c, &block.base, @ptrCast(*const clang.CompoundStmt, it[0])); try block.statements.append(result); if (result.isNoreturn(true)) { return; diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 7b54d6a8bb..928619449f 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -401,7 +401,7 @@ pub const Node = extern union { return true; }, .@"return", .return_void => return true, - .break_val, .@"break" => if (break_counts) return true, + .@"break" => if (break_counts) return true, else => {}, } return false; diff --git a/test/translate_c.zig b/test/translate_c.zig index 2e99bd8dd6..40e074fcc4 100644 --- a/test/translate_c.zig +++ b/test/translate_c.zig @@ -2047,6 +2047,10 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ res = 3 * i; \\ break; \\ break; + \\ case 7: { + \\ res = 7; + \\ break; + \\ } \\ case 4: \\ case 5: \\ res = 69; @@ -2079,6 +2083,12 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ else => { \\ res = @as(c_int, 3) * i; \\ }, + \\ @as(c_int, 7) => { + \\ { + \\ res = 7; + \\ break; + \\ } + \\ }, \\ @as(c_int, 4), @as(c_int, 5) => { \\ res = 69; \\ { From 4758e0c9a662b59e071b52fbcc931fae138292d7 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Mon, 22 Feb 2021 21:43:44 +0200 Subject: [PATCH 136/173] update docgen to use new ast --- doc/docgen.zig | 255 ++++++++++++++++++++++++++----------------------- 1 file changed, 134 insertions(+), 121 deletions(-) diff --git a/doc/docgen.zig b/doc/docgen.zig index 90e3e32201..4f06b63c2c 100644 --- a/doc/docgen.zig +++ b/doc/docgen.zig @@ -781,106 +781,119 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: anytype, source_token: next_tok_is_fn = false; const token = tokenizer.next(); - try writeEscaped(out, src[index..token.loc.start]); - switch (token.id) { - .Eof => break, + if (mem.indexOf(u8, src[index..token.loc.start], "//")) |comment_start_off| { + // render one comment + const comment_start = index + comment_start_off; + const comment_end_off = mem.indexOf(u8, src[comment_start .. token.loc.start], "\n"); + const comment_end = if (comment_end_off) |o| comment_start + o else token.loc.start; - .Keyword_align, - .Keyword_and, - .Keyword_asm, - .Keyword_async, - .Keyword_await, - .Keyword_break, - .Keyword_catch, - .Keyword_comptime, - .Keyword_const, - .Keyword_continue, - .Keyword_defer, - .Keyword_else, - .Keyword_enum, - .Keyword_errdefer, - .Keyword_error, - .Keyword_export, - .Keyword_extern, - .Keyword_for, - .Keyword_if, - .Keyword_inline, - .Keyword_noalias, - .Keyword_noinline, - .Keyword_nosuspend, - .Keyword_opaque, - .Keyword_or, - .Keyword_orelse, - .Keyword_packed, - .Keyword_anyframe, - .Keyword_pub, - .Keyword_resume, - .Keyword_return, - .Keyword_linksection, - .Keyword_callconv, - .Keyword_struct, - .Keyword_suspend, - .Keyword_switch, - .Keyword_test, - .Keyword_threadlocal, - .Keyword_try, - .Keyword_union, - .Keyword_unreachable, - .Keyword_usingnamespace, - .Keyword_var, - .Keyword_volatile, - .Keyword_allowzero, - .Keyword_while, - .Keyword_anytype, + try writeEscaped(out, src[index..comment_start]); + try out.writeAll(""); + try writeEscaped(out, src[comment_start .. comment_end]); + try out.writeAll(""); + index = comment_end; + tokenizer.index = index; + continue; + } + + try writeEscaped(out, src[index..token.loc.start]); + switch (token.tag) { + .eof => break, + + .keyword_align, + .keyword_and, + .keyword_asm, + .keyword_async, + .keyword_await, + .keyword_break, + .keyword_catch, + .keyword_comptime, + .keyword_const, + .keyword_continue, + .keyword_defer, + .keyword_else, + .keyword_enum, + .keyword_errdefer, + .keyword_error, + .keyword_export, + .keyword_extern, + .keyword_for, + .keyword_if, + .keyword_inline, + .keyword_noalias, + .keyword_noinline, + .keyword_nosuspend, + .keyword_opaque, + .keyword_or, + .keyword_orelse, + .keyword_packed, + .keyword_anyframe, + .keyword_pub, + .keyword_resume, + .keyword_return, + .keyword_linksection, + .keyword_callconv, + .keyword_struct, + .keyword_suspend, + .keyword_switch, + .keyword_test, + .keyword_threadlocal, + .keyword_try, + .keyword_union, + .keyword_unreachable, + .keyword_usingnamespace, + .keyword_var, + .keyword_volatile, + .keyword_allowzero, + .keyword_while, + .keyword_anytype, => { try out.writeAll(""); try writeEscaped(out, src[token.loc.start..token.loc.end]); try out.writeAll(""); }, - .Keyword_fn => { + .keyword_fn => { try out.writeAll(""); try writeEscaped(out, src[token.loc.start..token.loc.end]); try out.writeAll(""); next_tok_is_fn = true; }, - .Keyword_undefined, - .Keyword_null, - .Keyword_true, - .Keyword_false, + .keyword_undefined, + .keyword_null, + .keyword_true, + .keyword_false, => { try out.writeAll(""); try writeEscaped(out, src[token.loc.start..token.loc.end]); try out.writeAll(""); }, - .StringLiteral, - .MultilineStringLiteralLine, - .CharLiteral, + .string_literal, + .multiline_string_literal_line, + .char_literal, => { try out.writeAll(""); try writeEscaped(out, src[token.loc.start..token.loc.end]); try out.writeAll(""); }, - .Builtin => { + .builtin => { try out.writeAll(""); try writeEscaped(out, src[token.loc.start..token.loc.end]); try out.writeAll(""); }, - .LineComment, - .DocComment, - .ContainerDocComment, - .ShebangLine, + .doc_comment, + .container_doc_comment, => { try out.writeAll(""); try writeEscaped(out, src[token.loc.start..token.loc.end]); try out.writeAll(""); }, - .Identifier => { + .identifier => { if (prev_tok_was_fn) { try out.writeAll(""); try writeEscaped(out, src[token.loc.start..token.loc.end]); @@ -908,71 +921,71 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: anytype, source_token: } }, - .IntegerLiteral, - .FloatLiteral, + .integer_literal, + .float_literal, => { try out.writeAll(""); try writeEscaped(out, src[token.loc.start..token.loc.end]); try out.writeAll(""); }, - .Bang, - .Pipe, - .PipePipe, - .PipeEqual, - .Equal, - .EqualEqual, - .EqualAngleBracketRight, - .BangEqual, - .LParen, - .RParen, - .Semicolon, - .Percent, - .PercentEqual, - .LBrace, - .RBrace, - .LBracket, - .RBracket, - .Period, - .PeriodAsterisk, - .Ellipsis2, - .Ellipsis3, - .Caret, - .CaretEqual, - .Plus, - .PlusPlus, - .PlusEqual, - .PlusPercent, - .PlusPercentEqual, - .Minus, - .MinusEqual, - .MinusPercent, - .MinusPercentEqual, - .Asterisk, - .AsteriskEqual, - .AsteriskAsterisk, - .AsteriskPercent, - .AsteriskPercentEqual, - .Arrow, - .Colon, - .Slash, - .SlashEqual, - .Comma, - .Ampersand, - .AmpersandEqual, - .QuestionMark, - .AngleBracketLeft, - .AngleBracketLeftEqual, - .AngleBracketAngleBracketLeft, - .AngleBracketAngleBracketLeftEqual, - .AngleBracketRight, - .AngleBracketRightEqual, - .AngleBracketAngleBracketRight, - .AngleBracketAngleBracketRightEqual, - .Tilde, + .bang, + .pipe, + .pipe_pipe, + .pipe_equal, + .equal, + .equal_equal, + .equal_angle_bracket_right, + .bang_equal, + .l_paren, + .r_paren, + .semicolon, + .percent, + .percent_equal, + .l_brace, + .r_brace, + .l_bracket, + .r_bracket, + .period, + .period_asterisk, + .ellipsis2, + .ellipsis3, + .caret, + .caret_equal, + .plus, + .plus_plus, + .plus_equal, + .plus_percent, + .plus_percent_equal, + .minus, + .minus_equal, + .minus_percent, + .minus_percent_equal, + .asterisk, + .asterisk_equal, + .asterisk_asterisk, + .asterisk_percent, + .asterisk_percent_equal, + .arrow, + .colon, + .slash, + .slash_equal, + .comma, + .ampersand, + .ampersand_equal, + .question_mark, + .angle_bracket_left, + .angle_bracket_left_equal, + .angle_bracket_angle_bracket_left, + .angle_bracket_angle_bracket_left_equal, + .angle_bracket_right, + .angle_bracket_right_equal, + .angle_bracket_angle_bracket_right, + .angle_bracket_angle_bracket_right_equal, + .tilde, => try writeEscaped(out, src[token.loc.start..token.loc.end]), - .Invalid, .Invalid_ampersands, .Invalid_periodasterisks => return parseError( + .invalid, .invalid_ampersands, .invalid_periodasterisks => return parseError( docgen_tokenizer, source_token, "syntax error", From 550688f427a2303f5e309a269def4d8be532cc54 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Mon, 22 Feb 2021 23:14:01 +0100 Subject: [PATCH 137/173] zig fmt: insert trailing comma in switches --- lib/std/zig/parser_test.zig | 56 ++++++++++++++++++------------------- lib/std/zig/render.zig | 6 +++- 2 files changed, 33 insertions(+), 29 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 2c43e04ae5..42df500ddc 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -2103,34 +2103,34 @@ test "zig fmt: same line comments in expression" { ); } -//test "zig fmt: add comma on last switch prong" { -// try testTransform( -// \\test "aoeu" { -// \\switch (self.init_arg_expr) { -// \\ InitArg.Type => |t| { }, -// \\ InitArg.None, -// \\ InitArg.Enum => { } -// \\} -// \\ switch (self.init_arg_expr) { -// \\ InitArg.Type => |t| { }, -// \\ InitArg.None, -// \\ InitArg.Enum => { }//line comment -// \\ } -// \\} -// , -// \\test "aoeu" { -// \\ switch (self.init_arg_expr) { -// \\ InitArg.Type => |t| {}, -// \\ InitArg.None, InitArg.Enum => {}, -// \\ } -// \\ switch (self.init_arg_expr) { -// \\ InitArg.Type => |t| {}, -// \\ InitArg.None, InitArg.Enum => {}, //line comment -// \\ } -// \\} -// \\ -// ); -//} +test "zig fmt: add comma on last switch prong" { + try testTransform( + \\test "aoeu" { + \\switch (self.init_arg_expr) { + \\ InitArg.Type => |t| { }, + \\ InitArg.None, + \\ InitArg.Enum => { } + \\} + \\ switch (self.init_arg_expr) { + \\ InitArg.Type => |t| { }, + \\ InitArg.None, + \\ InitArg.Enum => { }//line comment + \\ } + \\} + , + \\test "aoeu" { + \\ switch (self.init_arg_expr) { + \\ InitArg.Type => |t| {}, + \\ InitArg.None, InitArg.Enum => {}, + \\ } + \\ switch (self.init_arg_expr) { + \\ InitArg.Type => |t| {}, + \\ InitArg.None, InitArg.Enum => {}, //line comment + \\ } + \\} + \\ + ); +} test "zig fmt: same-line comment after a statement" { try testCanonical( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index fe9df25dad..67a7a9a514 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -1948,7 +1948,7 @@ const Space = enum { space, /// Output the token lexeme followed by a newline. newline, - /// Additionally consume the next token if it is a comma. + /// If the next token is a comma, render it as well. If not, insert one. /// In either case, a newline will be inserted afterwards. comma, /// Additionally consume the next token if it is a comma. @@ -1968,6 +1968,10 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp try ais.writer().writeAll(lexeme); + if (space == .comma and token_tags[token_index + 1] != .comma) { + try ais.writer().writeByte(','); + } + const comment = try renderComments(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]); switch (space) { .none => {}, From 45634851de92f7848b648a0e009247e7f102e9ff Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Mon, 22 Feb 2021 23:51:15 +0100 Subject: [PATCH 138/173] zig fmt: fix firstToken() for extern fn_protos --- lib/std/zig/ast.zig | 11 ++++++----- lib/std/zig/parser_test.zig | 36 ++++++++++++++++++------------------ 2 files changed, 24 insertions(+), 23 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 7b0380dfaf..40bb2256ec 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -366,10 +366,6 @@ pub const Tree = struct { .@"nosuspend", .asm_simple, .@"asm", - .fn_proto_simple, - .fn_proto_multi, - .fn_proto_one, - .fn_proto, .array_type, .array_type_sentinel, .error_value, @@ -449,7 +445,12 @@ pub const Tree = struct { .error_union, => n = datas[n].lhs, - .fn_decl => { + .fn_decl, + .fn_proto_simple, + .fn_proto_multi, + .fn_proto_one, + .fn_proto, + => { var i = main_tokens[n]; // fn token while (i > 0) { i -= 1; diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 42df500ddc..7bae2f3162 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -3727,24 +3727,24 @@ test "zig fmt: Don't add extra newline after if" { ); } -//test "zig fmt: comments in ternary ifs" { -// try testCanonical( -// \\const x = if (true) { -// \\ 1; -// \\} else if (false) -// \\ // Comment -// \\ 0; -// \\const y = if (true) -// \\ // Comment -// \\ 1 -// \\else -// \\ 0; -// \\ -// \\pub extern "c" fn printf(format: [*:0]const u8, ...) c_int; -// \\ -// ); -//} -// +test "zig fmt: comments in ternary ifs" { + try testCanonical( + \\const x = if (true) { + \\ 1; + \\} else if (false) + \\ // Comment + \\ 0; + \\const y = if (true) + \\ // Comment + \\ 1 + \\else + \\ 0; + \\ + \\pub extern "c" fn printf(format: [*:0]const u8, ...) c_int; + \\ + ); +} + //test "zig fmt: test comments in field access chain" { // try testCanonical( // \\pub const str = struct { From 253906fb93f25481ae80af9043b58342858e156c Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Mon, 22 Feb 2021 16:00:21 -0700 Subject: [PATCH 139/173] zig fmt: 2nd arg multiline string --- lib/std/zig/parser_test.zig | 39 ++++++++++++++++++++++++++----------- lib/std/zig/render.zig | 25 ++++++++++++++++-------- 2 files changed, 45 insertions(+), 19 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 7bae2f3162..109d9e9620 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -1280,17 +1280,34 @@ test "zig fmt: async call in if condition" { ); } -//test "zig fmt: 2nd arg multiline string" { -// try testCanonical( -// \\comptime { -// \\ cases.addAsm("hello world linux x86_64", -// \\ \\.text -// \\ , "Hello, world!\n"); -// \\} -// \\ -// ); -//} -// +test "zig fmt: 2nd arg multiline string" { + try testCanonical( + \\comptime { + \\ cases.addAsm("hello world linux x86_64", + \\ \\.text + \\ , "Hello, world!\n"); + \\} + \\ + ); + try testTransform( + \\comptime { + \\ cases.addAsm("hello world linux x86_64", + \\ \\.text + \\ , "Hello, world!\n",); + \\} + , + \\comptime { + \\ cases.addAsm( + \\ "hello world linux x86_64", + \\ \\.text + \\ , + \\ "Hello, world!\n", + \\ ); + \\} + \\ + ); +} + //test "zig fmt: 2nd arg multiline string many args" { // try testCanonical( // \\comptime { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 67a7a9a514..105be0c9c4 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -1879,23 +1879,23 @@ fn renderCall( const after_last_param_tok = tree.lastToken(last_param) + 1; if (token_tags[after_last_param_tok] == .comma) { ais.pushIndentNextLine(); - try renderToken(ais, tree, lparen, Space.newline); // ( + try renderToken(ais, tree, lparen, .newline); // ( for (params) |param_node, i| { if (i + 1 < params.len) { - try renderExpression(ais, tree, param_node, Space.none); + try renderExpression(ais, tree, param_node, .none); - // Unindent the comma for multiline string literals + // Unindent the comma for multiline string literals. const is_multiline_string = node_tags[param_node] == .multiline_string_literal; if (is_multiline_string) ais.popIndent(); const comma = tree.lastToken(param_node) + 1; - try renderToken(ais, tree, comma, Space.newline); // , + try renderToken(ais, tree, comma, .newline); // , if (is_multiline_string) ais.pushIndent(); try renderExtraNewline(ais, tree, params[i + 1]); } else { - try renderExpression(ais, tree, param_node, Space.comma); + try renderExpression(ais, tree, param_node, .comma); } } ais.popIndent(); @@ -1903,14 +1903,23 @@ fn renderCall( } ais.pushIndentNextLine(); - try renderToken(ais, tree, lparen, Space.none); // ( + try renderToken(ais, tree, lparen, .none); // ( for (params) |param_node, i| { - try renderExpression(ais, tree, param_node, Space.none); + try renderExpression(ais, tree, param_node, .none); if (i + 1 < params.len) { const comma = tree.lastToken(param_node) + 1; - try renderToken(ais, tree, comma, Space.space); + const this_multiline_string = node_tags[param_node] == .multiline_string_literal; + const next_multiline_string = node_tags[params[i + 1]] == .multiline_string_literal; + const comma_space: Space = if (next_multiline_string) .none else .space; + if (this_multiline_string) { + ais.popIndent(); + try renderToken(ais, tree, comma, comma_space); + ais.pushIndent(); + } else { + try renderToken(ais, tree, comma, comma_space); + } } } From b301999cd3e3d070f5d62a9d3da2a736b7026b30 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Mon, 22 Feb 2021 16:21:19 -0700 Subject: [PATCH 140/173] zig fmt: if condition wraps --- lib/std/zig/parser_test.zig | 204 ++++++++++++++++++------------------ lib/std/zig/render.zig | 19 ++-- 2 files changed, 113 insertions(+), 110 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 109d9e9620..ee18fc6c9a 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -1308,108 +1308,108 @@ test "zig fmt: 2nd arg multiline string" { ); } -//test "zig fmt: 2nd arg multiline string many args" { -// try testCanonical( -// \\comptime { -// \\ cases.addAsm("hello world linux x86_64", -// \\ \\.text -// \\ , "Hello, world!\n", "Hello, world!\n"); -// \\} -// \\ -// ); -//} -// -//test "zig fmt: final arg multiline string" { -// try testCanonical( -// \\comptime { -// \\ cases.addAsm("hello world linux x86_64", "Hello, world!\n", -// \\ \\.text -// \\ ); -// \\} -// \\ -// ); -//} -// -//test "zig fmt: if condition wraps" { -// try testTransform( -// \\comptime { -// \\ if (cond and -// \\ cond) { -// \\ return x; -// \\ } -// \\ while (cond and -// \\ cond) { -// \\ return x; -// \\ } -// \\ if (a == b and -// \\ c) { -// \\ a = b; -// \\ } -// \\ while (a == b and -// \\ c) { -// \\ a = b; -// \\ } -// \\ if ((cond and -// \\ cond)) { -// \\ return x; -// \\ } -// \\ while ((cond and -// \\ cond)) { -// \\ return x; -// \\ } -// \\ var a = if (a) |*f| x: { -// \\ break :x &a.b; -// \\ } else |err| err; -// \\ var a = if (cond and -// \\ cond) |*f| -// \\ x: { -// \\ break :x &a.b; -// \\ } else |err| err; -// \\} -// , -// \\comptime { -// \\ if (cond and -// \\ cond) -// \\ { -// \\ return x; -// \\ } -// \\ while (cond and -// \\ cond) -// \\ { -// \\ return x; -// \\ } -// \\ if (a == b and -// \\ c) -// \\ { -// \\ a = b; -// \\ } -// \\ while (a == b and -// \\ c) -// \\ { -// \\ a = b; -// \\ } -// \\ if ((cond and -// \\ cond)) -// \\ { -// \\ return x; -// \\ } -// \\ while ((cond and -// \\ cond)) -// \\ { -// \\ return x; -// \\ } -// \\ var a = if (a) |*f| x: { -// \\ break :x &a.b; -// \\ } else |err| err; -// \\ var a = if (cond and -// \\ cond) |*f| -// \\ x: { -// \\ break :x &a.b; -// \\ } else |err| err; -// \\} -// \\ -// ); -//} +test "zig fmt: 2nd arg multiline string many args" { + try testCanonical( + \\comptime { + \\ cases.addAsm("hello world linux x86_64", + \\ \\.text + \\ , "Hello, world!\n", "Hello, world!\n"); + \\} + \\ + ); +} + +test "zig fmt: final arg multiline string" { + try testCanonical( + \\comptime { + \\ cases.addAsm("hello world linux x86_64", "Hello, world!\n", + \\ \\.text + \\ ); + \\} + \\ + ); +} + +test "zig fmt: if condition wraps" { + try testTransform( + \\comptime { + \\ if (cond and + \\ cond) { + \\ return x; + \\ } + \\ while (cond and + \\ cond) { + \\ return x; + \\ } + \\ if (a == b and + \\ c) { + \\ a = b; + \\ } + \\ while (a == b and + \\ c) { + \\ a = b; + \\ } + \\ if ((cond and + \\ cond)) { + \\ return x; + \\ } + \\ while ((cond and + \\ cond)) { + \\ return x; + \\ } + \\ var a = if (a) |*f| x: { + \\ break :x &a.b; + \\ } else |err| err; + \\ var a = if (cond and + \\ cond) |*f| + \\ x: { + \\ break :x &a.b; + \\ } else |err| err; + \\} + , + \\comptime { + \\ if (cond and + \\ cond) + \\ { + \\ return x; + \\ } + \\ while (cond and + \\ cond) + \\ { + \\ return x; + \\ } + \\ if (a == b and + \\ c) + \\ { + \\ a = b; + \\ } + \\ while (a == b and + \\ c) + \\ { + \\ a = b; + \\ } + \\ if ((cond and + \\ cond)) + \\ { + \\ return x; + \\ } + \\ while ((cond and + \\ cond)) + \\ { + \\ return x; + \\ } + \\ var a = if (a) |*f| x: { + \\ break :x &a.b; + \\ } else |err| err; + \\ var a = if (cond and + \\ cond) |*f| + \\ x: { + \\ break :x &a.b; + \\ } else |err| err; + \\} + \\ + ); +} // //test "zig fmt: if condition has line break but must not wrap" { // try testCanonical( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 105be0c9c4..06b5052fd5 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -499,10 +499,9 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac }, .grouped_expression => { - ais.pushIndentNextLine(); try renderToken(ais, tree, main_tokens[node], .none); // lparen + ais.pushIndentOneShot(); try renderExpression(ais, tree, datas[node].lhs, .none); - ais.popIndent(); return renderToken(ais, tree, datas[node].rhs, space); // rparen }, @@ -981,13 +980,13 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa } try renderToken(ais, tree, while_node.ast.while_token, .space); // if - try renderToken(ais, tree, while_node.ast.while_token + 1, .none); // ( + try renderToken(ais, tree, while_node.ast.while_token + 1, .none); // lparen try renderExpression(ais, tree, while_node.ast.cond_expr, .none); // condition const then_tag = node_tags[while_node.ast.then_expr]; if (nodeIsBlock(then_tag) and !nodeIsIf(then_tag)) { if (while_node.payload_token) |payload_token| { - try renderToken(ais, tree, payload_token - 2, .space); // ) + try renderToken(ais, tree, payload_token - 2, .space); // rparen try renderToken(ais, tree, payload_token - 1, .none); // | const ident = blk: { if (token_tags[payload_token] == .asterisk) { @@ -1007,10 +1006,14 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa break :blk ident + 1; } }; - try renderToken(ais, tree, pipe, .space); // | + const cond_has_newline = !tree.tokensOnSameLine(while_node.ast.while_token, pipe); + const brace_space: Space = if (cond_has_newline) .newline else .space; + try renderToken(ais, tree, pipe, brace_space); // | } else { const rparen = tree.lastToken(while_node.ast.cond_expr) + 1; - try renderToken(ais, tree, rparen, .space); // ) + const cond_has_newline = !tree.tokensOnSameLine(while_node.ast.while_token, rparen); + const brace_space: Space = if (cond_has_newline) .newline else .space; + try renderToken(ais, tree, rparen, brace_space); // rparen } if (while_node.ast.cont_expr != 0) { const rparen = tree.lastToken(while_node.ast.cont_expr) + 1; @@ -1040,7 +1043,7 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa if (src_has_newline) { if (while_node.payload_token) |payload_token| { - try renderToken(ais, tree, payload_token - 2, .space); // ) + try renderToken(ais, tree, payload_token - 2, .space); // rparen try renderToken(ais, tree, payload_token - 1, .none); // | const ident = blk: { if (token_tags[payload_token] == .asterisk) { @@ -1063,7 +1066,7 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa try renderToken(ais, tree, pipe, .newline); // | } else { ais.pushIndent(); - try renderToken(ais, tree, rparen, .newline); // ) + try renderToken(ais, tree, rparen, .newline); // rparen ais.popIndent(); } if (while_node.ast.cont_expr != 0) { From ec987a7a4652a0dbbd214e0252f35214532a9493 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Mon, 22 Feb 2021 16:39:50 -0700 Subject: [PATCH 141/173] zig fmt: if condition has line break but must not wrap --- lib/std/zig/parser_test.zig | 50 ++++++++++++++++++------------------- lib/std/zig/render.zig | 6 ++--- 2 files changed, 27 insertions(+), 29 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index ee18fc6c9a..0df9a16bc2 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -1410,31 +1410,31 @@ test "zig fmt: if condition wraps" { \\ ); } -// -//test "zig fmt: if condition has line break but must not wrap" { -// try testCanonical( -// \\comptime { -// \\ if (self.user_input_options.put( -// \\ name, -// \\ UserInputOption{ -// \\ .name = name, -// \\ .used = false, -// \\ }, -// \\ ) catch unreachable) |*prev_value| { -// \\ foo(); -// \\ bar(); -// \\ } -// \\ if (put( -// \\ a, -// \\ b, -// \\ )) { -// \\ foo(); -// \\ } -// \\} -// \\ -// ); -//} -// + +test "zig fmt: if condition has line break but must not wrap" { + try testCanonical( + \\comptime { + \\ if (self.user_input_options.put( + \\ name, + \\ UserInputOption{ + \\ .name = name, + \\ .used = false, + \\ }, + \\ ) catch unreachable) |*prev_value| { + \\ foo(); + \\ bar(); + \\ } + \\ if (put( + \\ a, + \\ b, + \\ )) { + \\ foo(); + \\ } + \\} + \\ + ); +} + //test "zig fmt: if condition has line break but must not wrap" { // try testCanonical( // \\comptime { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 06b5052fd5..a3974d7204 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -1006,13 +1006,11 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa break :blk ident + 1; } }; - const cond_has_newline = !tree.tokensOnSameLine(while_node.ast.while_token, pipe); - const brace_space: Space = if (cond_has_newline) .newline else .space; + const brace_space: Space = if (ais.isLineOverIndented()) .newline else .space; try renderToken(ais, tree, pipe, brace_space); // | } else { const rparen = tree.lastToken(while_node.ast.cond_expr) + 1; - const cond_has_newline = !tree.tokensOnSameLine(while_node.ast.while_token, rparen); - const brace_space: Space = if (cond_has_newline) .newline else .space; + const brace_space: Space = if (ais.isLineOverIndented()) .newline else .space; try renderToken(ais, tree, rparen, brace_space); // rparen } if (while_node.ast.cont_expr != 0) { From 20cfa0b5b62bf712d5aac882f7fde9b8e8cacae3 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Mon, 22 Feb 2021 17:34:30 -0700 Subject: [PATCH 142/173] zig fmt: if condition has line break, no fn call comma --- lib/std/zig/parser_test.zig | 42 ++++++++++++++++++------------------- lib/std/zig/render.zig | 28 ++++++++++++------------- 2 files changed, 34 insertions(+), 36 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 0df9a16bc2..f7197e3853 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -1435,27 +1435,27 @@ test "zig fmt: if condition has line break but must not wrap" { ); } -//test "zig fmt: if condition has line break but must not wrap" { -// try testCanonical( -// \\comptime { -// \\ if (self.user_input_options.put(name, UserInputOption{ -// \\ .name = name, -// \\ .used = false, -// \\ }) catch unreachable) |*prev_value| { -// \\ foo(); -// \\ bar(); -// \\ } -// \\ if (put( -// \\ a, -// \\ b, -// \\ )) { -// \\ foo(); -// \\ } -// \\} -// \\ -// ); -//} -// +test "zig fmt: if condition has line break but must not wrap (no fn call comma)" { + try testCanonical( + \\comptime { + \\ if (self.user_input_options.put(name, UserInputOption{ + \\ .name = name, + \\ .used = false, + \\ }) catch unreachable) |*prev_value| { + \\ foo(); + \\ bar(); + \\ } + \\ if (put( + \\ a, + \\ b, + \\ )) { + \\ foo(); + \\ } + \\} + \\ + ); +} + //test "zig fmt: function call with multiline argument" { // try testCanonical( // \\comptime { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index a3974d7204..cd48013b42 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -1903,28 +1903,23 @@ fn renderCall( return renderToken(ais, tree, after_last_param_tok + 1, space); // ) } - ais.pushIndentNextLine(); try renderToken(ais, tree, lparen, .none); // ( for (params) |param_node, i| { + const this_multiline_string = node_tags[param_node] == .multiline_string_literal; + if (this_multiline_string) { + ais.pushIndentOneShot(); + } try renderExpression(ais, tree, param_node, .none); if (i + 1 < params.len) { const comma = tree.lastToken(param_node) + 1; - const this_multiline_string = node_tags[param_node] == .multiline_string_literal; const next_multiline_string = node_tags[params[i + 1]] == .multiline_string_literal; const comma_space: Space = if (next_multiline_string) .none else .space; - if (this_multiline_string) { - ais.popIndent(); - try renderToken(ais, tree, comma, comma_space); - ais.pushIndent(); - } else { - try renderToken(ais, tree, comma, comma_space); - } + try renderToken(ais, tree, comma, comma_space); } } - ais.popIndent(); return renderToken(ais, tree, after_last_param_tok, space); // ) } @@ -2234,9 +2229,12 @@ fn AutoIndentingStream(comptime UnderlyingWriter: type) type { indent_count: usize = 0, indent_delta: usize, current_line_empty: bool = true, - indent_one_shot_count: usize = 0, // automatically popped when applied - applied_indent: usize = 0, // the most recently applied indent - indent_next_line: usize = 0, // not used until the next line + /// automatically popped when applied + indent_one_shot_count: usize = 0, + /// the most recently applied indent + applied_indent: usize = 0, + /// not used until the next line + indent_next_line: usize = 0, pub fn writer(self: *Self) Writer { return .{ .context = self }; @@ -2291,9 +2289,9 @@ fn AutoIndentingStream(comptime UnderlyingWriter: type) type { } /// Push default indentation + /// Doesn't actually write any indentation. + /// Just primes the stream to be able to write the correct indentation if it needs to. pub fn pushIndent(self: *Self) void { - // Doesn't actually write any indentation. - // Just primes the stream to be able to write the correct indentation if it needs to. self.indent_count += 1; } From fec51ad7c5fe5f141f8f0c53a1287c5a00613429 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Mon, 22 Feb 2021 17:55:19 -0700 Subject: [PATCH 143/173] zig fmt: while --- lib/std/zig/ast.zig | 13 ++- lib/std/zig/parser_test.zig | 186 +++++++++++++++++++----------------- lib/std/zig/render.zig | 19 +++- 3 files changed, 121 insertions(+), 97 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 40bb2256ec..abe4993d3e 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -587,11 +587,16 @@ pub const Tree = struct { .for_simple, .@"for", => { + // Look for a label and inline. const main_token = main_tokens[n]; - return switch (token_tags[main_token - 1]) { - .keyword_inline => main_token - 1, - else => main_token, - } - end_offset; + var result = main_token; + if (token_tags[result - 1] == .keyword_inline) { + result -= 1; + } + if (token_tags[result - 1] == .colon) { + result -= 2; + } + return result - end_offset; }, }; } diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index f7197e3853..63ad1d431c 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -1456,17 +1456,17 @@ test "zig fmt: if condition has line break but must not wrap (no fn call comma)" ); } -//test "zig fmt: function call with multiline argument" { -// try testCanonical( -// \\comptime { -// \\ self.user_input_options.put(name, UserInputOption{ -// \\ .name = name, -// \\ .used = false, -// \\ }); -// \\} -// \\ -// ); -//} +test "zig fmt: function call with multiline argument" { + try testCanonical( + \\comptime { + \\ self.user_input_options.put(name, UserInputOption{ + \\ .name = name, + \\ .used = false, + \\ }); + \\} + \\ + ); +} test "zig fmt: if-else with comment before else" { try testCanonical( @@ -1817,14 +1817,24 @@ test "zig fmt: empty block with only comment" { ); } -//test "zig fmt: no trailing comma on struct decl" { -// try testCanonical( -// \\const RoundParam = struct { -// \\ k: usize, s: u32, t: u32 -// \\}; -// \\ -// ); -//} +test "zig fmt: trailing commas on struct decl" { + try testTransform( + \\const RoundParam = struct { + \\ k: usize, s: u32, t: u32 + \\}; + \\const RoundParam = struct { + \\ k: usize, s: u32, t: u32, + \\}; + , + \\const RoundParam = struct { k: usize, s: u32, t: u32 }; + \\const RoundParam = struct { + \\ k: usize, + \\ s: u32, + \\ t: u32, + \\}; + \\ + ); +} test "zig fmt: extra newlines at the end" { try testTransform( @@ -2975,75 +2985,75 @@ test "zig fmt: switch" { ); } -//test "zig fmt: while" { -// try testCanonical( -// \\test "while" { -// \\ while (10 < 1) unreachable; -// \\ -// \\ while (10 < 1) unreachable else unreachable; -// \\ -// \\ while (10 < 1) { -// \\ unreachable; -// \\ } -// \\ -// \\ while (10 < 1) -// \\ unreachable; -// \\ -// \\ var i: usize = 0; -// \\ while (i < 10) : (i += 1) { -// \\ continue; -// \\ } -// \\ -// \\ i = 0; -// \\ while (i < 10) : (i += 1) -// \\ continue; -// \\ -// \\ i = 0; -// \\ var j: usize = 0; -// \\ while (i < 10) : ({ -// \\ i += 1; -// \\ j += 1; -// \\ }) { -// \\ continue; -// \\ } -// \\ -// \\ var a: ?u8 = 2; -// \\ while (a) |v| : (a = null) { -// \\ continue; -// \\ } -// \\ -// \\ while (a) |v| : (a = null) -// \\ unreachable; -// \\ -// \\ label: while (10 < 0) { -// \\ unreachable; -// \\ } -// \\ -// \\ const res = while (0 < 10) { -// \\ break 7; -// \\ } else { -// \\ unreachable; -// \\ }; -// \\ -// \\ const res = while (0 < 10) -// \\ break 7 -// \\ else -// \\ unreachable; -// \\ -// \\ var a: anyerror!u8 = 0; -// \\ while (a) |v| { -// \\ a = error.Err; -// \\ } else |err| { -// \\ i = 1; -// \\ } -// \\ -// \\ comptime var k: usize = 0; -// \\ inline while (i < 10) : (i += 1) -// \\ j += 2; -// \\} -// \\ -// ); -//} +test "zig fmt: while" { + try testCanonical( + \\test "while" { + \\ while (10 < 1) unreachable; + \\ + \\ while (10 < 1) unreachable else unreachable; + \\ + \\ while (10 < 1) { + \\ unreachable; + \\ } + \\ + \\ while (10 < 1) + \\ unreachable; + \\ + \\ var i: usize = 0; + \\ while (i < 10) : (i += 1) { + \\ continue; + \\ } + \\ + \\ i = 0; + \\ while (i < 10) : (i += 1) + \\ continue; + \\ + \\ i = 0; + \\ var j: usize = 0; + \\ while (i < 10) : ({ + \\ i += 1; + \\ j += 1; + \\ }) { + \\ continue; + \\ } + \\ + \\ var a: ?u8 = 2; + \\ while (a) |v| : (a = null) { + \\ continue; + \\ } + \\ + \\ while (a) |v| : (a = null) + \\ unreachable; + \\ + \\ label: while (10 < 0) { + \\ unreachable; + \\ } + \\ + \\ const res = while (0 < 10) { + \\ break 7; + \\ } else { + \\ unreachable; + \\ }; + \\ + \\ const res = while (0 < 10) + \\ break 7 + \\ else + \\ unreachable; + \\ + \\ var a: anyerror!u8 = 0; + \\ while (a) |v| { + \\ a = error.Err; + \\ } else |err| { + \\ i = 1; + \\ } + \\ + \\ comptime var k: usize = 0; + \\ inline while (i < 10) : (i += 1) + \\ j += 2; + \\} + \\ + ); +} test "zig fmt: for" { try testCanonical( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index cd48013b42..0c7564e31e 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -1006,11 +1006,17 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa break :blk ident + 1; } }; - const brace_space: Space = if (ais.isLineOverIndented()) .newline else .space; + const brace_space = if (while_node.ast.cont_expr == 0 and ais.isLineOverIndented()) + Space.newline + else + Space.space; try renderToken(ais, tree, pipe, brace_space); // | } else { const rparen = tree.lastToken(while_node.ast.cond_expr) + 1; - const brace_space: Space = if (ais.isLineOverIndented()) .newline else .space; + const brace_space = if (while_node.ast.cont_expr == 0 and ais.isLineOverIndented()) + Space.newline + else + Space.space; try renderToken(ais, tree, rparen, brace_space); // rparen } if (while_node.ast.cont_expr != 0) { @@ -1019,7 +1025,8 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa try renderToken(ais, tree, lparen - 1, .space); // : try renderToken(ais, tree, lparen, .none); // lparen try renderExpression(ais, tree, while_node.ast.cont_expr, .none); - try renderToken(ais, tree, rparen, .space); // rparen + const brace_space: Space = if (ais.isLineOverIndented()) .newline else .space; + try renderToken(ais, tree, rparen, brace_space); // rparen } if (while_node.ast.else_expr != 0) { try renderExpression(ais, tree, while_node.ast.then_expr, Space.space); @@ -1061,10 +1068,12 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa break :blk ident + 1; } }; - try renderToken(ais, tree, pipe, .newline); // | + const after_space: Space = if (while_node.ast.cont_expr != 0) .space else .newline; + try renderToken(ais, tree, pipe, after_space); // | } else { ais.pushIndent(); - try renderToken(ais, tree, rparen, .newline); // rparen + const after_space: Space = if (while_node.ast.cont_expr != 0) .space else .newline; + try renderToken(ais, tree, rparen, after_space); // rparen ais.popIndent(); } if (while_node.ast.cont_expr != 0) { From a6038f03753a967082487020a0b834c1c4305503 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Mon, 22 Feb 2021 18:17:26 -0700 Subject: [PATCH 144/173] zig fmt: function params should align nicely --- lib/std/zig/parser_test.zig | 182 ++++++++++++++++++------------------ lib/std/zig/render.zig | 15 +-- 2 files changed, 98 insertions(+), 99 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 63ad1d431c..6bf551b2bf 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -3726,22 +3726,22 @@ test "zig fmt: C var args" { ); } -//test "zig fmt: Only indent multiline string literals in function calls" { -// try testCanonical( -// \\test "zig fmt:" { -// \\ try testTransform( -// \\ \\const X = struct { -// \\ \\ foo: i32, bar: i8 }; -// \\ , -// \\ \\const X = struct { -// \\ \\ foo: i32, bar: i8 -// \\ \\}; -// \\ \\ -// \\ ); -// \\} -// \\ -// ); -//} +test "zig fmt: Only indent multiline string literals in function calls" { + try testCanonical( + \\test "zig fmt:" { + \\ try testTransform( + \\ \\const X = struct { + \\ \\ foo: i32, bar: i8 }; + \\ , + \\ \\const X = struct { + \\ \\ foo: i32, bar: i8 + \\ \\}; + \\ \\ + \\ ); + \\} + \\ + ); +} test "zig fmt: Don't add extra newline after if" { try testCanonical( @@ -3806,68 +3806,66 @@ test "zig fmt: comments in ternary ifs" { // \\ // ); //} -// -//test "zig fmt: Indent comma correctly after multiline string literals in arg list (trailing comma)" { -// try testCanonical( -// \\fn foo() void { -// \\ z.display_message_dialog( -// \\ *const [323:0]u8, -// \\ \\Message Text -// \\ \\------------ -// \\ \\xxxxxxxxxxxx -// \\ \\xxxxxxxxxxxx -// \\ , -// \\ g.GtkMessageType.GTK_MESSAGE_WARNING, -// \\ null, -// \\ ); -// \\ -// \\ z.display_message_dialog(*const [323:0]u8, -// \\ \\Message Text -// \\ \\------------ -// \\ \\xxxxxxxxxxxx -// \\ \\xxxxxxxxxxxx -// \\ , g.GtkMessageType.GTK_MESSAGE_WARNING, null); -// \\} -// \\ -// ); -//} -//test "zig fmt: Control flow statement as body of blockless if" { -// try testCanonical( -// \\pub fn main() void { -// \\ const zoom_node = if (focused_node == layout_first) -// \\ if (it.next()) { -// \\ if (!node.view.pending.float and !node.view.pending.fullscreen) break node; -// \\ } else null -// \\ else -// \\ focused_node; -// \\ -// \\ const zoom_node = if (focused_node == layout_first) while (it.next()) |node| { -// \\ if (!node.view.pending.float and !node.view.pending.fullscreen) break node; -// \\ } else null else -// \\ focused_node; -// \\ -// \\ const zoom_node = if (focused_node == layout_first) -// \\ if (it.next()) { -// \\ if (!node.view.pending.float and !node.view.pending.fullscreen) break node; -// \\ } else null; -// \\ -// \\ const zoom_node = if (focused_node == layout_first) while (it.next()) |node| { -// \\ if (!node.view.pending.float and !node.view.pending.fullscreen) break node; -// \\ }; -// \\ -// \\ const zoom_node = if (focused_node == layout_first) for (nodes) |node| { -// \\ break node; -// \\ }; -// \\ -// \\ const zoom_node = if (focused_node == layout_first) switch (nodes) { -// \\ 0 => 0, -// \\ } else -// \\ focused_node; -// \\} -// \\ -// ); -//} +test "zig fmt: Indent comma correctly after multiline string literals in arg list (trailing comma)" { + try testCanonical( + \\fn foo() void { + \\ z.display_message_dialog( + \\ *const [323:0]u8, + \\ \\Message Text + \\ \\------------ + \\ \\xxxxxxxxxxxx + \\ \\xxxxxxxxxxxx + \\ , + \\ g.GtkMessageType.GTK_MESSAGE_WARNING, + \\ null, + \\ ); + \\ + \\ z.display_message_dialog(*const [323:0]u8, + \\ \\Message Text + \\ \\------------ + \\ \\xxxxxxxxxxxx + \\ \\xxxxxxxxxxxx + \\ , g.GtkMessageType.GTK_MESSAGE_WARNING, null); + \\} + \\ + ); +} + +test "zig fmt: Control flow statement as body of blockless if" { + try testCanonical( + \\pub fn main() void { + \\ const zoom_node = if (focused_node == layout_first) + \\ if (it.next()) { + \\ if (!node.view.pending.float and !node.view.pending.fullscreen) break node; + \\ } else null + \\ else + \\ focused_node; + \\ + \\ const zoom_node = if (focused_node == layout_first) while (it.next()) |node| { + \\ if (!node.view.pending.float and !node.view.pending.fullscreen) break node; + \\ } else null else focused_node; + \\ + \\ const zoom_node = if (focused_node == layout_first) + \\ if (it.next()) { + \\ if (!node.view.pending.float and !node.view.pending.fullscreen) break node; + \\ } else null; + \\ + \\ const zoom_node = if (focused_node == layout_first) while (it.next()) |node| { + \\ if (!node.view.pending.float and !node.view.pending.fullscreen) break node; + \\ }; + \\ + \\ const zoom_node = if (focused_node == layout_first) for (nodes) |node| { + \\ break node; + \\ }; + \\ + \\ const zoom_node = if (focused_node == layout_first) switch (nodes) { + \\ 0 => 0, + \\ } else focused_node; + \\} + \\ + ); +} test "zig fmt: regression test for #5722" { try testCanonical( @@ -4058,21 +4056,21 @@ test "zig fmt: trailing comma should force multiline 1 column" { ); } -//test "zig fmt: function params should align nicely" { -// try testCanonical( -// \\pub fn foo() void { -// \\ cases.addRuntimeSafety("slicing operator with sentinel", -// \\ \\const std = @import("std"); -// \\ ++ check_panic_msg ++ -// \\ \\pub fn main() void { -// \\ \\ var buf = [4]u8{'a','b','c',0}; -// \\ \\ const slice = buf[0..:0]; -// \\ \\} -// \\ ); -// \\} -// \\ -// ); -//} +test "zig fmt: function params should align nicely" { + try testCanonical( + \\pub fn foo() void { + \\ cases.addRuntimeSafety("slicing operator with sentinel", + \\ \\const std = @import("std"); + \\ ++ check_panic_msg ++ + \\ \\pub fn main() void { + \\ \\ var buf = [4]u8{'a','b','c',0}; + \\ \\ const slice = buf[0..:0]; + \\ \\} + \\ ); + \\} + \\ + ); +} test "zig fmt: error for invalid bit range" { try testError( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 0c7564e31e..c17a9fa572 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -197,11 +197,10 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac while (locked_indents > 0) : (locked_indents -= 1) ais.popIndent(); switch (space) { - .none => {}, + .none, .space, .newline => {}, .semicolon => if (token_tags[i] == .semicolon) try renderToken(ais, tree, i, .newline), .comma => if (token_tags[i] == .comma) try renderToken(ais, tree, i, .newline), .comma_space => if (token_tags[i] == .comma) try renderToken(ais, tree, i, .space), - else => unreachable, } }, @@ -358,8 +357,8 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac ais.pushIndent(); try renderToken(ais, tree, op_token, .newline); ais.popIndent(); - ais.pushIndentOneShot(); } + ais.pushIndentOneShot(); return renderExpression(ais, tree, infix.rhs, space); }, @@ -1868,7 +1867,6 @@ fn renderCall( space: Space, ) Error!void { const token_tags = tree.tokens.items(.tag); - const node_tags = tree.nodes.items(.tag); const main_tokens = tree.nodes.items(.main_token); if (call.async_token) |async_token| { @@ -1895,7 +1893,8 @@ fn renderCall( try renderExpression(ais, tree, param_node, .none); // Unindent the comma for multiline string literals. - const is_multiline_string = node_tags[param_node] == .multiline_string_literal; + const is_multiline_string = + token_tags[tree.firstToken(param_node)] == .multiline_string_literal_line; if (is_multiline_string) ais.popIndent(); const comma = tree.lastToken(param_node) + 1; @@ -1915,7 +1914,8 @@ fn renderCall( try renderToken(ais, tree, lparen, .none); // ( for (params) |param_node, i| { - const this_multiline_string = node_tags[param_node] == .multiline_string_literal; + const this_multiline_string = + token_tags[tree.firstToken(param_node)] == .multiline_string_literal_line; if (this_multiline_string) { ais.pushIndentOneShot(); } @@ -1923,7 +1923,8 @@ fn renderCall( if (i + 1 < params.len) { const comma = tree.lastToken(param_node) + 1; - const next_multiline_string = node_tags[params[i + 1]] == .multiline_string_literal; + const next_multiline_string = + token_tags[tree.firstToken(params[i + 1])] == .multiline_string_literal_line; const comma_space: Space = if (next_multiline_string) .none else .space; try renderToken(ais, tree, comma, comma_space); } From 8379fff80455f89f9673638642cf506cb73c4241 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Mon, 22 Feb 2021 21:04:22 -0700 Subject: [PATCH 145/173] zig fmt: alignment in anonymous literal --- lib/std/zig/parser_test.zig | 34 +-- lib/std/zig/render.zig | 572 ++++++++++++++++++++++++------------ 2 files changed, 395 insertions(+), 211 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 6bf551b2bf..dd2755b8ab 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -540,23 +540,23 @@ test "zig fmt: anon literal in array" { ); } -//test "zig fmt: alignment in anonymous literal" { -// try testTransform( -// \\const a = .{ -// \\ "U", "L", "F", -// \\ "U'", -// \\ "L'", -// \\ "F'", -// \\}; -// \\ -// , -// \\const a = .{ -// \\ "U", "L", "F", -// \\ "U'", "L'", "F'", -// \\}; -// \\ -// ); -//} +test "zig fmt: alignment in anonymous literal" { + try testTransform( + \\const a = .{ + \\ "U", "L", "F", + \\ "U'", + \\ "L'", + \\ "F'", + \\}; + \\ + , + \\const a = .{ + \\ "U", "L", "F", + \\ "U'", "L'", "F'", + \\}; + \\ + ); +} test "zig fmt: anon struct literal 0 element" { try testCanonical( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index c17a9fa572..9dc35aa9d8 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -6,6 +6,7 @@ const std = @import("../std.zig"); const assert = std.debug.assert; const mem = std.mem; +const Allocator = std.mem.Allocator; const meta = std.meta; const ast = std.zig.ast; const Token = std.zig.Token; @@ -29,7 +30,7 @@ pub fn renderTree(buffer: *std.ArrayList(u8), tree: ast.Tree) Error!void { const comment_end_loc = tree.tokens.items(.start)[0]; _ = try renderComments(ais, tree, 0, comment_end_loc); - try renderMembers(ais, tree, tree.rootDecls()); + try renderMembers(buffer.allocator, ais, tree, tree.rootDecls()); if (ais.disabled_offset) |disabled_offset| { try writeFixingWhitespace(ais.underlying_writer, tree.source[disabled_offset..]); @@ -37,17 +38,17 @@ pub fn renderTree(buffer: *std.ArrayList(u8), tree: ast.Tree) Error!void { } /// Render all members in the given slice, keeping empty lines where appropriate -fn renderMembers(ais: *Ais, tree: ast.Tree, members: []const ast.Node.Index) Error!void { +fn renderMembers(gpa: *Allocator, ais: *Ais, tree: ast.Tree, members: []const ast.Node.Index) Error!void { if (members.len == 0) return; //try renderExtraNewline(ais, tree, members[0]); - try renderMember(ais, tree, members[0], .newline); + try renderMember(gpa, ais, tree, members[0], .newline); for (members[1..]) |member| { try renderExtraNewline(ais, tree, member); - try renderMember(ais, tree, member, .newline); + try renderMember(gpa, ais, tree, member, .newline); } } -fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) Error!void { +fn renderMember(gpa: *Allocator, ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); const main_tokens = tree.nodes.items(.main_token); const datas = tree.nodes.items(.data); @@ -87,8 +88,8 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E try renderToken(ais, tree, i, .space); } assert(datas[decl].rhs != 0); - try renderExpression(ais, tree, fn_proto, .space); - return renderExpression(ais, tree, datas[decl].rhs, space); + try renderExpression(gpa, ais, tree, fn_proto, .space); + return renderExpression(gpa, ais, tree, datas[decl].rhs, space); }, .fn_proto_simple, .fn_proto_multi, @@ -119,7 +120,7 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E while (i < fn_token) : (i += 1) { try renderToken(ais, tree, i, .space); } - try renderExpression(ais, tree, decl, .none); + try renderExpression(gpa, ais, tree, decl, .none); return renderToken(ais, tree, tree.lastToken(decl) + 1, space); // semicolon }, @@ -130,14 +131,14 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E try renderToken(ais, tree, main_token - 1, .space); // pub } try renderToken(ais, tree, main_token, .space); // usingnamespace - try renderExpression(ais, tree, expr, .none); + try renderExpression(gpa, ais, tree, expr, .none); return renderToken(ais, tree, tree.lastToken(expr) + 1, space); // ; }, - .global_var_decl => return renderVarDecl(ais, tree, tree.globalVarDecl(decl)), - .local_var_decl => return renderVarDecl(ais, tree, tree.localVarDecl(decl)), - .simple_var_decl => return renderVarDecl(ais, tree, tree.simpleVarDecl(decl)), - .aligned_var_decl => return renderVarDecl(ais, tree, tree.alignedVarDecl(decl)), + .global_var_decl => return renderVarDecl(gpa, ais, tree, tree.globalVarDecl(decl)), + .local_var_decl => return renderVarDecl(gpa, ais, tree, tree.localVarDecl(decl)), + .simple_var_decl => return renderVarDecl(gpa, ais, tree, tree.simpleVarDecl(decl)), + .aligned_var_decl => return renderVarDecl(gpa, ais, tree, tree.alignedVarDecl(decl)), .test_decl => { const test_token = main_tokens[decl]; @@ -145,13 +146,13 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E if (token_tags[test_token + 1] == .string_literal) { try renderToken(ais, tree, test_token + 1, .space); } - try renderExpression(ais, tree, datas[decl].rhs, space); + try renderExpression(gpa, ais, tree, datas[decl].rhs, space); }, - .container_field_init => return renderContainerField(ais, tree, tree.containerFieldInit(decl), space), - .container_field_align => return renderContainerField(ais, tree, tree.containerFieldAlign(decl), space), - .container_field => return renderContainerField(ais, tree, tree.containerField(decl), space), - .@"comptime" => return renderExpression(ais, tree, decl, space), + .container_field_init => return renderContainerField(gpa, ais, tree, tree.containerFieldInit(decl), space), + .container_field_align => return renderContainerField(gpa, ais, tree, tree.containerFieldAlign(decl), space), + .container_field => return renderContainerField(gpa, ais, tree, tree.containerField(decl), space), + .@"comptime" => return renderExpression(gpa, ais, tree, decl, space), .root => unreachable, else => unreachable, @@ -159,16 +160,16 @@ fn renderMember(ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) E } /// Render all expressions in the slice, keeping empty lines where appropriate -fn renderExpressions(ais: *Ais, tree: ast.Tree, expressions: []const ast.Node.Index, space: Space) Error!void { +fn renderExpressions(gpa: *Allocator, ais: *Ais, tree: ast.Tree, expressions: []const ast.Node.Index, space: Space) Error!void { if (expressions.len == 0) return; - try renderExpression(ais, tree, expressions[0], space); + try renderExpression(gpa, ais, tree, expressions[0], space); for (expressions[1..]) |expression| { try renderExtraNewline(ais, tree, expression); - try renderExpression(ais, tree, expression, space); + try renderExpression(gpa, ais, tree, expression, space); } } -fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void { +fn renderExpression(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); const main_tokens = tree.nodes.items(.main_token); const node_tags = tree.nodes.items(.tag); @@ -217,18 +218,18 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac => { const statements = [2]ast.Node.Index{ datas[node].lhs, datas[node].rhs }; if (datas[node].lhs == 0) { - return renderBlock(ais, tree, node, statements[0..0], space); + return renderBlock(gpa, ais, tree, node, statements[0..0], space); } else if (datas[node].rhs == 0) { - return renderBlock(ais, tree, node, statements[0..1], space); + return renderBlock(gpa, ais, tree, node, statements[0..1], space); } else { - return renderBlock(ais, tree, node, statements[0..2], space); + return renderBlock(gpa, ais, tree, node, statements[0..2], space); } }, .block, .block_semicolon, => { const statements = tree.extra_data[datas[node].lhs..datas[node].rhs]; - return renderBlock(ais, tree, node, statements, space); + return renderBlock(gpa, ais, tree, node, statements, space); }, .@"errdefer" => { @@ -242,20 +243,20 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac try renderToken(ais, tree, payload_token, .none); // identifier try renderToken(ais, tree, payload_token + 1, .space); // | } - return renderExpression(ais, tree, expr, space); + return renderExpression(gpa, ais, tree, expr, space); }, .@"defer" => { const defer_token = main_tokens[node]; const expr = datas[node].rhs; try renderToken(ais, tree, defer_token, .space); - return renderExpression(ais, tree, expr, space); + return renderExpression(gpa, ais, tree, expr, space); }, .@"comptime", .@"nosuspend" => { const comptime_token = main_tokens[node]; const block = datas[node].lhs; try renderToken(ais, tree, comptime_token, .space); - return renderExpression(ais, tree, block, space); + return renderExpression(gpa, ais, tree, block, space); }, .@"suspend" => { @@ -263,7 +264,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac const body = datas[node].lhs; if (body != 0) { try renderToken(ais, tree, suspend_token, .space); - return renderExpression(ais, tree, body, space); + return renderExpression(gpa, ais, tree, body, space); } else { return renderToken(ais, tree, suspend_token, space); } @@ -276,7 +277,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac const same_line = tree.tokensOnSameLine(main_token, fallback_first); const after_op_space = if (same_line) Space.space else Space.newline; - try renderExpression(ais, tree, datas[node].lhs, .space); // target + try renderExpression(gpa, ais, tree, datas[node].lhs, .space); // target if (token_tags[fallback_first - 1] == .pipe) { try renderToken(ais, tree, main_token, .space); // catch keyword @@ -289,12 +290,12 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac } ais.pushIndentOneShot(); - try renderExpression(ais, tree, datas[node].rhs, space); // fallback + try renderExpression(gpa, ais, tree, datas[node].rhs, space); // fallback }, .field_access => { const field_access = datas[node]; - try renderExpression(ais, tree, field_access.lhs, .none); + try renderExpression(gpa, ais, tree, field_access.lhs, .none); try renderToken(ais, tree, main_tokens[node], .none); return renderToken(ais, tree, field_access.rhs, space); }, @@ -303,9 +304,9 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .switch_range, => { const infix = datas[node]; - try renderExpression(ais, tree, infix.lhs, .none); + try renderExpression(gpa, ais, tree, infix.lhs, .none); try renderToken(ais, tree, main_tokens[node], .none); - return renderExpression(ais, tree, infix.rhs, space); + return renderExpression(gpa, ais, tree, infix.rhs, space); }, .add, @@ -349,7 +350,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .@"orelse", => { const infix = datas[node]; - try renderExpression(ais, tree, infix.lhs, .space); + try renderExpression(gpa, ais, tree, infix.lhs, .space); const op_token = main_tokens[node]; if (tree.tokensOnSameLine(op_token, op_token + 1)) { try renderToken(ais, tree, op_token, .space); @@ -359,7 +360,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac ais.popIndent(); } ais.pushIndentOneShot(); - return renderExpression(ais, tree, infix.rhs, space); + return renderExpression(gpa, ais, tree, infix.rhs, space); }, .bit_not, @@ -370,7 +371,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .address_of, => { try renderToken(ais, tree, main_tokens[node], .none); - return renderExpression(ais, tree, datas[node].lhs, space); + return renderExpression(gpa, ais, tree, datas[node].lhs, space); }, .@"try", @@ -378,79 +379,79 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .@"await", => { try renderToken(ais, tree, main_tokens[node], .space); - return renderExpression(ais, tree, datas[node].lhs, space); + return renderExpression(gpa, ais, tree, datas[node].lhs, space); }, - .array_type => return renderArrayType(ais, tree, tree.arrayType(node), space), - .array_type_sentinel => return renderArrayType(ais, tree, tree.arrayTypeSentinel(node), space), + .array_type => return renderArrayType(gpa, ais, tree, tree.arrayType(node), space), + .array_type_sentinel => return renderArrayType(gpa, ais, tree, tree.arrayTypeSentinel(node), space), - .ptr_type_aligned => return renderPtrType(ais, tree, tree.ptrTypeAligned(node), space), - .ptr_type_sentinel => return renderPtrType(ais, tree, tree.ptrTypeSentinel(node), space), - .ptr_type => return renderPtrType(ais, tree, tree.ptrType(node), space), - .ptr_type_bit_range => return renderPtrType(ais, tree, tree.ptrTypeBitRange(node), space), + .ptr_type_aligned => return renderPtrType(gpa, ais, tree, tree.ptrTypeAligned(node), space), + .ptr_type_sentinel => return renderPtrType(gpa, ais, tree, tree.ptrTypeSentinel(node), space), + .ptr_type => return renderPtrType(gpa, ais, tree, tree.ptrType(node), space), + .ptr_type_bit_range => return renderPtrType(gpa, ais, tree, tree.ptrTypeBitRange(node), space), .array_init_one, .array_init_one_comma => { var elements: [1]ast.Node.Index = undefined; - return renderArrayInit(ais, tree, tree.arrayInitOne(&elements, node), space); + return renderArrayInit(gpa, ais, tree, tree.arrayInitOne(&elements, node), space); }, .array_init_dot_two, .array_init_dot_two_comma => { var elements: [2]ast.Node.Index = undefined; - return renderArrayInit(ais, tree, tree.arrayInitDotTwo(&elements, node), space); + return renderArrayInit(gpa, ais, tree, tree.arrayInitDotTwo(&elements, node), space); }, .array_init_dot, .array_init_dot_comma, - => return renderArrayInit(ais, tree, tree.arrayInitDot(node), space), + => return renderArrayInit(gpa, ais, tree, tree.arrayInitDot(node), space), .array_init, .array_init_comma, - => return renderArrayInit(ais, tree, tree.arrayInit(node), space), + => return renderArrayInit(gpa, ais, tree, tree.arrayInit(node), space), .struct_init_one, .struct_init_one_comma => { var fields: [1]ast.Node.Index = undefined; - return renderStructInit(ais, tree, tree.structInitOne(&fields, node), space); + return renderStructInit(gpa, ais, tree, tree.structInitOne(&fields, node), space); }, .struct_init_dot_two, .struct_init_dot_two_comma => { var fields: [2]ast.Node.Index = undefined; - return renderStructInit(ais, tree, tree.structInitDotTwo(&fields, node), space); + return renderStructInit(gpa, ais, tree, tree.structInitDotTwo(&fields, node), space); }, .struct_init_dot, .struct_init_dot_comma, - => return renderStructInit(ais, tree, tree.structInitDot(node), space), + => return renderStructInit(gpa, ais, tree, tree.structInitDot(node), space), .struct_init, .struct_init_comma, - => return renderStructInit(ais, tree, tree.structInit(node), space), + => return renderStructInit(gpa, ais, tree, tree.structInit(node), space), .call_one, .call_one_comma, .async_call_one, .async_call_one_comma => { var params: [1]ast.Node.Index = undefined; - return renderCall(ais, tree, tree.callOne(¶ms, node), space); + return renderCall(gpa, ais, tree, tree.callOne(¶ms, node), space); }, .call, .call_comma, .async_call, .async_call_comma, - => return renderCall(ais, tree, tree.callFull(node), space), + => return renderCall(gpa, ais, tree, tree.callFull(node), space), .array_access => { const suffix = datas[node]; const lbracket = tree.firstToken(suffix.rhs) - 1; const rbracket = tree.lastToken(suffix.rhs) + 1; - try renderExpression(ais, tree, suffix.lhs, .none); + try renderExpression(gpa, ais, tree, suffix.lhs, .none); try renderToken(ais, tree, lbracket, .none); // [ - try renderExpression(ais, tree, suffix.rhs, .none); + try renderExpression(gpa, ais, tree, suffix.rhs, .none); return renderToken(ais, tree, rbracket, space); // ] }, - .slice_open => return renderSlice(ais, tree, tree.sliceOpen(node), space), - .slice => return renderSlice(ais, tree, tree.slice(node), space), - .slice_sentinel => return renderSlice(ais, tree, tree.sliceSentinel(node), space), + .slice_open => return renderSlice(gpa, ais, tree, tree.sliceOpen(node), space), + .slice => return renderSlice(gpa, ais, tree, tree.slice(node), space), + .slice_sentinel => return renderSlice(gpa, ais, tree, tree.sliceSentinel(node), space), .deref => { - try renderExpression(ais, tree, datas[node].lhs, .none); + try renderExpression(gpa, ais, tree, datas[node].lhs, .none); return renderToken(ais, tree, main_tokens[node], space); }, .unwrap_optional => { - try renderExpression(ais, tree, datas[node].lhs, .none); + try renderExpression(gpa, ais, tree, datas[node].lhs, .none); try renderToken(ais, tree, main_tokens[node], .none); return renderToken(ais, tree, datas[node].rhs, space); }, @@ -463,7 +464,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac try renderToken(ais, tree, main_token, space); // break keyword } else if (label_token == 0 and target != 0) { try renderToken(ais, tree, main_token, .space); // break keyword - try renderExpression(ais, tree, target, space); + try renderExpression(gpa, ais, tree, target, space); } else if (label_token != 0 and target == 0) { try renderToken(ais, tree, main_token, .space); // break keyword try renderToken(ais, tree, label_token - 1, .none); // colon @@ -472,7 +473,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac try renderToken(ais, tree, main_token, .space); // break keyword try renderToken(ais, tree, label_token - 1, .none); // colon try renderToken(ais, tree, label_token, .space); // identifier - try renderExpression(ais, tree, target, space); + try renderExpression(gpa, ais, tree, target, space); } }, @@ -491,7 +492,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .@"return" => { if (datas[node].lhs != 0) { try renderToken(ais, tree, main_tokens[node], .space); - try renderExpression(ais, tree, datas[node].lhs, space); + try renderExpression(gpa, ais, tree, datas[node].lhs, space); } else { try renderToken(ais, tree, main_tokens[node], space); } @@ -500,33 +501,33 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .grouped_expression => { try renderToken(ais, tree, main_tokens[node], .none); // lparen ais.pushIndentOneShot(); - try renderExpression(ais, tree, datas[node].lhs, .none); + try renderExpression(gpa, ais, tree, datas[node].lhs, .none); return renderToken(ais, tree, datas[node].rhs, space); // rparen }, .container_decl, .container_decl_comma, - => return renderContainerDecl(ais, tree, tree.containerDecl(node), space), + => return renderContainerDecl(gpa, ais, tree, tree.containerDecl(node), space), .container_decl_two, .container_decl_two_comma => { var buffer: [2]ast.Node.Index = undefined; - return renderContainerDecl(ais, tree, tree.containerDeclTwo(&buffer, node), space); + return renderContainerDecl(gpa, ais, tree, tree.containerDeclTwo(&buffer, node), space); }, .container_decl_arg, .container_decl_arg_comma, - => return renderContainerDecl(ais, tree, tree.containerDeclArg(node), space), + => return renderContainerDecl(gpa, ais, tree, tree.containerDeclArg(node), space), .tagged_union, .tagged_union_comma, - => return renderContainerDecl(ais, tree, tree.taggedUnion(node), space), + => return renderContainerDecl(gpa, ais, tree, tree.taggedUnion(node), space), .tagged_union_two, .tagged_union_two_comma => { var buffer: [2]ast.Node.Index = undefined; - return renderContainerDecl(ais, tree, tree.taggedUnionTwo(&buffer, node), space); + return renderContainerDecl(gpa, ais, tree, tree.taggedUnionTwo(&buffer, node), space); }, .tagged_union_enum_tag, .tagged_union_enum_tag_comma, - => return renderContainerDecl(ais, tree, tree.taggedUnionEnumTag(node), space), + => return renderContainerDecl(gpa, ais, tree, tree.taggedUnionEnumTag(node), space), .error_set_decl => { const error_token = main_tokens[node]; @@ -579,35 +580,35 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac .builtin_call_two, .builtin_call_two_comma => { if (datas[node].lhs == 0) { - return renderBuiltinCall(ais, tree, main_tokens[node], &.{}, space); + return renderBuiltinCall(gpa, ais, tree, main_tokens[node], &.{}, space); } else if (datas[node].rhs == 0) { - return renderBuiltinCall(ais, tree, main_tokens[node], &.{datas[node].lhs}, space); + return renderBuiltinCall(gpa, ais, tree, main_tokens[node], &.{datas[node].lhs}, space); } else { - return renderBuiltinCall(ais, tree, main_tokens[node], &.{ datas[node].lhs, datas[node].rhs }, space); + return renderBuiltinCall(gpa, ais, tree, main_tokens[node], &.{ datas[node].lhs, datas[node].rhs }, space); } }, .builtin_call, .builtin_call_comma => { const params = tree.extra_data[datas[node].lhs..datas[node].rhs]; - return renderBuiltinCall(ais, tree, main_tokens[node], params, space); + return renderBuiltinCall(gpa, ais, tree, main_tokens[node], params, space); }, .fn_proto_simple => { var params: [1]ast.Node.Index = undefined; - return renderFnProto(ais, tree, tree.fnProtoSimple(¶ms, node), space); + return renderFnProto(gpa, ais, tree, tree.fnProtoSimple(¶ms, node), space); }, - .fn_proto_multi => return renderFnProto(ais, tree, tree.fnProtoMulti(node), space), + .fn_proto_multi => return renderFnProto(gpa, ais, tree, tree.fnProtoMulti(node), space), .fn_proto_one => { var params: [1]ast.Node.Index = undefined; - return renderFnProto(ais, tree, tree.fnProtoOne(¶ms, node), space); + return renderFnProto(gpa, ais, tree, tree.fnProtoOne(¶ms, node), space); }, - .fn_proto => return renderFnProto(ais, tree, tree.fnProto(node), space), + .fn_proto => return renderFnProto(gpa, ais, tree, tree.fnProto(node), space), .anyframe_type => { const main_token = main_tokens[node]; if (datas[node].rhs != 0) { try renderToken(ais, tree, main_token, .none); // anyframe try renderToken(ais, tree, main_token + 1, .none); // -> - return renderExpression(ais, tree, datas[node].rhs, space); + return renderExpression(gpa, ais, tree, datas[node].rhs, space); } else { return renderToken(ais, tree, main_token, space); // anyframe } @@ -624,7 +625,7 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac try renderToken(ais, tree, switch_token, .space); // switch keyword try renderToken(ais, tree, switch_token + 1, .none); // lparen - try renderExpression(ais, tree, condition, .none); // condtion expression + try renderExpression(gpa, ais, tree, condition, .none); // condtion expression try renderToken(ais, tree, rparen, .space); // rparen ais.pushIndentNextLine(); @@ -632,26 +633,26 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac try renderToken(ais, tree, rparen + 1, .none); // lbrace } else { try renderToken(ais, tree, rparen + 1, .newline); // lbrace - try renderExpressions(ais, tree, cases, .comma); + try renderExpressions(gpa, ais, tree, cases, .comma); } ais.popIndent(); return renderToken(ais, tree, tree.lastToken(node), space); // rbrace }, - .switch_case_one => return renderSwitchCase(ais, tree, tree.switchCaseOne(node), space), - .switch_case => return renderSwitchCase(ais, tree, tree.switchCase(node), space), + .switch_case_one => return renderSwitchCase(gpa, ais, tree, tree.switchCaseOne(node), space), + .switch_case => return renderSwitchCase(gpa, ais, tree, tree.switchCase(node), space), - .while_simple => return renderWhile(ais, tree, tree.whileSimple(node), space), - .while_cont => return renderWhile(ais, tree, tree.whileCont(node), space), - .@"while" => return renderWhile(ais, tree, tree.whileFull(node), space), - .for_simple => return renderWhile(ais, tree, tree.forSimple(node), space), - .@"for" => return renderWhile(ais, tree, tree.forFull(node), space), + .while_simple => return renderWhile(gpa, ais, tree, tree.whileSimple(node), space), + .while_cont => return renderWhile(gpa, ais, tree, tree.whileCont(node), space), + .@"while" => return renderWhile(gpa, ais, tree, tree.whileFull(node), space), + .for_simple => return renderWhile(gpa, ais, tree, tree.forSimple(node), space), + .@"for" => return renderWhile(gpa, ais, tree, tree.forFull(node), space), - .if_simple => return renderIf(ais, tree, tree.ifSimple(node), space), - .@"if" => return renderIf(ais, tree, tree.ifFull(node), space), + .if_simple => return renderIf(gpa, ais, tree, tree.ifSimple(node), space), + .@"if" => return renderIf(gpa, ais, tree, tree.ifFull(node), space), - .asm_simple => return renderAsm(ais, tree, tree.asmSimple(node), space), - .@"asm" => return renderAsm(ais, tree, tree.asmFull(node), space), + .asm_simple => return renderAsm(gpa, ais, tree, tree.asmSimple(node), space), + .@"asm" => return renderAsm(gpa, ais, tree, tree.asmFull(node), space), .enum_literal => { try renderToken(ais, tree, main_tokens[node] - 1, .none); // . @@ -676,22 +677,24 @@ fn renderExpression(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Spac // TODO: handle comments inside the brackets fn renderArrayType( + gpa: *Allocator, ais: *Ais, tree: ast.Tree, array_type: ast.full.ArrayType, space: Space, ) Error!void { try renderToken(ais, tree, array_type.ast.lbracket, .none); // lbracket - try renderExpression(ais, tree, array_type.ast.elem_count, .none); + try renderExpression(gpa, ais, tree, array_type.ast.elem_count, .none); if (array_type.ast.sentinel) |sentinel| { try renderToken(ais, tree, tree.firstToken(sentinel) - 1, .none); // colon - try renderExpression(ais, tree, sentinel, .none); + try renderExpression(gpa, ais, tree, sentinel, .none); } try renderToken(ais, tree, tree.firstToken(array_type.ast.elem_type) - 1, .none); // rbracket - return renderExpression(ais, tree, array_type.ast.elem_type, space); + return renderExpression(gpa, ais, tree, array_type.ast.elem_type, space); } fn renderPtrType( + gpa: *Allocator, ais: *Ais, tree: ast.Tree, ptr_type: ast.full.PtrType, @@ -707,7 +710,7 @@ fn renderPtrType( if (tree.tokens.items(.tag)[ptr_type.ast.main_token] == .asterisk_asterisk and ptr_type.ast.main_token == tree.nodes.items(.main_token)[ptr_type.ast.child_type]) { - return renderExpression(ais, tree, ptr_type.ast.child_type, space); + return renderExpression(gpa, ais, tree, ptr_type.ast.child_type, space); } try renderToken(ais, tree, ptr_type.ast.main_token, .none); // asterisk }, @@ -720,7 +723,7 @@ fn renderPtrType( try renderToken(ais, tree, ptr_type.ast.main_token - 1, .none); // lbracket try renderToken(ais, tree, ptr_type.ast.main_token, .none); // asterisk try renderToken(ais, tree, ptr_type.ast.main_token + 1, .none); // colon - try renderExpression(ais, tree, ptr_type.ast.sentinel, .none); + try renderExpression(gpa, ais, tree, ptr_type.ast.sentinel, .none); try renderToken(ais, tree, tree.lastToken(ptr_type.ast.sentinel) + 1, .none); // rbracket } }, @@ -737,7 +740,7 @@ fn renderPtrType( } else { try renderToken(ais, tree, ptr_type.ast.main_token, .none); // lbracket try renderToken(ais, tree, ptr_type.ast.main_token + 1, .none); // colon - try renderExpression(ais, tree, ptr_type.ast.sentinel, .none); + try renderExpression(gpa, ais, tree, ptr_type.ast.sentinel, .none); try renderToken(ais, tree, tree.lastToken(ptr_type.ast.sentinel) + 1, .none); // rbracket } }, @@ -751,13 +754,13 @@ fn renderPtrType( const align_first = tree.firstToken(ptr_type.ast.align_node); try renderToken(ais, tree, align_first - 2, .none); // align try renderToken(ais, tree, align_first - 1, .none); // lparen - try renderExpression(ais, tree, ptr_type.ast.align_node, .none); + try renderExpression(gpa, ais, tree, ptr_type.ast.align_node, .none); if (ptr_type.ast.bit_range_start != 0) { assert(ptr_type.ast.bit_range_end != 0); try renderToken(ais, tree, tree.firstToken(ptr_type.ast.bit_range_start) - 1, .none); // colon - try renderExpression(ais, tree, ptr_type.ast.bit_range_start, .none); + try renderExpression(gpa, ais, tree, ptr_type.ast.bit_range_start, .none); try renderToken(ais, tree, tree.firstToken(ptr_type.ast.bit_range_end) - 1, .none); // colon - try renderExpression(ais, tree, ptr_type.ast.bit_range_end, .none); + try renderExpression(gpa, ais, tree, ptr_type.ast.bit_range_end, .none); try renderToken(ais, tree, tree.lastToken(ptr_type.ast.bit_range_end) + 1, .space); // rparen } else { try renderToken(ais, tree, tree.lastToken(ptr_type.ast.align_node) + 1, .space); // rparen @@ -772,10 +775,11 @@ fn renderPtrType( try renderToken(ais, tree, volatile_token, .space); } - try renderExpression(ais, tree, ptr_type.ast.child_type, space); + try renderExpression(gpa, ais, tree, ptr_type.ast.child_type, space); } fn renderSlice( + gpa: *Allocator, ais: *Ais, tree: ast.Tree, slice: ast.full.Slice, @@ -787,11 +791,11 @@ fn renderSlice( const after_start_space = if (after_start_space_bool) Space.space else Space.none; const after_dots_space = if (slice.ast.end != 0) after_start_space else Space.none; - try renderExpression(ais, tree, slice.ast.sliced, .none); + try renderExpression(gpa, ais, tree, slice.ast.sliced, .none); try renderToken(ais, tree, slice.ast.lbracket, .none); // lbracket const start_last = tree.lastToken(slice.ast.start); - try renderExpression(ais, tree, slice.ast.start, after_start_space); + try renderExpression(gpa, ais, tree, slice.ast.start, after_start_space); try renderToken(ais, tree, start_last + 1, after_dots_space); // ellipsis2 ("..") if (slice.ast.end == 0) { return renderToken(ais, tree, start_last + 2, space); // rbracket @@ -799,17 +803,18 @@ fn renderSlice( const end_last = tree.lastToken(slice.ast.end); const after_end_space = if (slice.ast.sentinel != 0) Space.space else Space.none; - try renderExpression(ais, tree, slice.ast.end, after_end_space); + try renderExpression(gpa, ais, tree, slice.ast.end, after_end_space); if (slice.ast.sentinel == 0) { return renderToken(ais, tree, end_last + 1, space); // rbracket } try renderToken(ais, tree, end_last + 1, .none); // colon - try renderExpression(ais, tree, slice.ast.sentinel, .none); + try renderExpression(gpa, ais, tree, slice.ast.sentinel, .none); try renderToken(ais, tree, tree.lastToken(slice.ast.sentinel) + 1, space); // rbracket } fn renderAsmOutput( + gpa: *Allocator, ais: *Ais, tree: ast.Tree, asm_output: ast.Node.Index, @@ -830,7 +835,7 @@ fn renderAsmOutput( if (token_tags[symbolic_name + 4] == .arrow) { try renderToken(ais, tree, symbolic_name + 4, .space); // -> - try renderExpression(ais, tree, datas[asm_output].lhs, Space.none); + try renderExpression(gpa, ais, tree, datas[asm_output].lhs, Space.none); return renderToken(ais, tree, datas[asm_output].rhs, space); // rparen } else { try renderToken(ais, tree, symbolic_name + 4, .none); // ident @@ -839,6 +844,7 @@ fn renderAsmOutput( } fn renderAsmInput( + gpa: *Allocator, ais: *Ais, tree: ast.Tree, asm_input: ast.Node.Index, @@ -855,11 +861,11 @@ fn renderAsmInput( try renderToken(ais, tree, symbolic_name + 1, .space); // rbracket try renderToken(ais, tree, symbolic_name + 2, .space); // "constraint" try renderToken(ais, tree, symbolic_name + 3, .none); // lparen - try renderExpression(ais, tree, datas[asm_input].lhs, Space.none); + try renderExpression(gpa, ais, tree, datas[asm_input].lhs, Space.none); return renderToken(ais, tree, datas[asm_input].rhs, space); // rparen } -fn renderVarDecl(ais: *Ais, tree: ast.Tree, var_decl: ast.full.VarDecl) Error!void { +fn renderVarDecl(gpa: *Allocator, ais: *Ais, tree: ast.Tree, var_decl: ast.full.VarDecl) Error!void { if (var_decl.visib_token) |visib_token| { try renderToken(ais, tree, visib_token, Space.space); // pub } @@ -896,9 +902,9 @@ fn renderVarDecl(ais: *Ais, tree: ast.Tree, var_decl: ast.full.VarDecl) Error!vo if (var_decl.ast.align_node != 0 or var_decl.ast.section_node != 0 or var_decl.ast.init_node != 0) { - try renderExpression(ais, tree, var_decl.ast.type_node, .space); + try renderExpression(gpa, ais, tree, var_decl.ast.type_node, .space); } else { - try renderExpression(ais, tree, var_decl.ast.type_node, .none); + try renderExpression(gpa, ais, tree, var_decl.ast.type_node, .none); const semicolon = tree.lastToken(var_decl.ast.type_node) + 1; return renderToken(ais, tree, semicolon, Space.newline); // ; } @@ -910,7 +916,7 @@ fn renderVarDecl(ais: *Ais, tree: ast.Tree, var_decl: ast.full.VarDecl) Error!vo const rparen = tree.lastToken(var_decl.ast.align_node) + 1; try renderToken(ais, tree, align_kw, Space.none); // align try renderToken(ais, tree, lparen, Space.none); // ( - try renderExpression(ais, tree, var_decl.ast.align_node, Space.none); + try renderExpression(gpa, ais, tree, var_decl.ast.align_node, Space.none); if (var_decl.ast.section_node != 0 or var_decl.ast.init_node != 0) { try renderToken(ais, tree, rparen, .space); // ) } else { @@ -925,7 +931,7 @@ fn renderVarDecl(ais: *Ais, tree: ast.Tree, var_decl: ast.full.VarDecl) Error!vo const rparen = tree.lastToken(var_decl.ast.section_node) + 1; try renderToken(ais, tree, section_kw, Space.none); // linksection try renderToken(ais, tree, lparen, Space.none); // ( - try renderExpression(ais, tree, var_decl.ast.section_node, Space.none); + try renderExpression(gpa, ais, tree, var_decl.ast.section_node, Space.none); if (var_decl.ast.init_node != 0) { try renderToken(ais, tree, rparen, .space); // ) } else { @@ -943,11 +949,11 @@ fn renderVarDecl(ais: *Ais, tree: ast.Tree, var_decl: ast.full.VarDecl) Error!vo ais.popIndent(); } ais.pushIndentOneShot(); - try renderExpression(ais, tree, var_decl.ast.init_node, .semicolon); + try renderExpression(gpa, ais, tree, var_decl.ast.init_node, .semicolon); } -fn renderIf(ais: *Ais, tree: ast.Tree, if_node: ast.full.If, space: Space) Error!void { - return renderWhile(ais, tree, .{ +fn renderIf(gpa: *Allocator, ais: *Ais, tree: ast.Tree, if_node: ast.full.If, space: Space) Error!void { + return renderWhile(gpa, ais, tree, .{ .ast = .{ .while_token = if_node.ast.if_token, .cond_expr = if_node.ast.cond_expr, @@ -965,7 +971,7 @@ fn renderIf(ais: *Ais, tree: ast.Tree, if_node: ast.full.If, space: Space) Error /// Note that this function is additionally used to render if and for expressions, with /// respective values set to null. -fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Space) Error!void { +fn renderWhile(gpa: *Allocator, ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Space) Error!void { const node_tags = tree.nodes.items(.tag); const token_tags = tree.tokens.items(.tag); @@ -980,7 +986,7 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa try renderToken(ais, tree, while_node.ast.while_token, .space); // if try renderToken(ais, tree, while_node.ast.while_token + 1, .none); // lparen - try renderExpression(ais, tree, while_node.ast.cond_expr, .none); // condition + try renderExpression(gpa, ais, tree, while_node.ast.cond_expr, .none); // condition const then_tag = node_tags[while_node.ast.then_expr]; if (nodeIsBlock(then_tag) and !nodeIsIf(then_tag)) { @@ -1023,21 +1029,21 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa const lparen = tree.firstToken(while_node.ast.cont_expr) - 1; try renderToken(ais, tree, lparen - 1, .space); // : try renderToken(ais, tree, lparen, .none); // lparen - try renderExpression(ais, tree, while_node.ast.cont_expr, .none); + try renderExpression(gpa, ais, tree, while_node.ast.cont_expr, .none); const brace_space: Space = if (ais.isLineOverIndented()) .newline else .space; try renderToken(ais, tree, rparen, brace_space); // rparen } if (while_node.ast.else_expr != 0) { - try renderExpression(ais, tree, while_node.ast.then_expr, Space.space); + try renderExpression(gpa, ais, tree, while_node.ast.then_expr, Space.space); try renderToken(ais, tree, while_node.else_token, .space); // else if (while_node.error_token) |error_token| { try renderToken(ais, tree, error_token - 1, .none); // | try renderToken(ais, tree, error_token, .none); // identifier try renderToken(ais, tree, error_token + 1, .space); // | } - return renderExpression(ais, tree, while_node.ast.else_expr, space); + return renderExpression(gpa, ais, tree, while_node.ast.else_expr, space); } else { - return renderExpression(ais, tree, while_node.ast.then_expr, space); + return renderExpression(gpa, ais, tree, while_node.ast.then_expr, space); } } @@ -1067,11 +1073,11 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa break :blk ident + 1; } }; - const after_space: Space = if (while_node.ast.cont_expr != 0) .space else .newline; + const after_space: Space = if (while_node.ast.cont_expr != 0) .space else .newline; try renderToken(ais, tree, pipe, after_space); // | } else { ais.pushIndent(); - const after_space: Space = if (while_node.ast.cont_expr != 0) .space else .newline; + const after_space: Space = if (while_node.ast.cont_expr != 0) .space else .newline; try renderToken(ais, tree, rparen, after_space); // rparen ais.popIndent(); } @@ -1080,12 +1086,12 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa const cont_lparen = tree.firstToken(while_node.ast.cont_expr) - 1; try renderToken(ais, tree, cont_lparen - 1, .space); // : try renderToken(ais, tree, cont_lparen, .none); // lparen - try renderExpression(ais, tree, while_node.ast.cont_expr, .none); + try renderExpression(gpa, ais, tree, while_node.ast.cont_expr, .none); try renderToken(ais, tree, cont_rparen, .newline); // rparen } if (while_node.ast.else_expr != 0) { ais.pushIndent(); - try renderExpression(ais, tree, while_node.ast.then_expr, Space.newline); + try renderExpression(gpa, ais, tree, while_node.ast.then_expr, Space.newline); ais.popIndent(); const else_is_block = nodeIsBlock(node_tags[while_node.ast.else_expr]); if (else_is_block) { @@ -1095,7 +1101,7 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa try renderToken(ais, tree, error_token, .none); // identifier try renderToken(ais, tree, error_token + 1, .space); // | } - return renderExpression(ais, tree, while_node.ast.else_expr, space); + return renderExpression(gpa, ais, tree, while_node.ast.else_expr, space); } else { if (while_node.error_token) |error_token| { try renderToken(ais, tree, while_node.else_token, .space); // else @@ -1106,13 +1112,13 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa try renderToken(ais, tree, while_node.else_token, .newline); // else } ais.pushIndent(); - try renderExpression(ais, tree, while_node.ast.else_expr, space); + try renderExpression(gpa, ais, tree, while_node.ast.else_expr, space); ais.popIndent(); return; } } else { ais.pushIndent(); - try renderExpression(ais, tree, while_node.ast.then_expr, space); + try renderExpression(gpa, ais, tree, while_node.ast.then_expr, space); ais.popIndent(); return; } @@ -1152,12 +1158,12 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa const cont_lparen = tree.firstToken(while_node.ast.cont_expr) - 1; try renderToken(ais, tree, cont_lparen - 1, .space); // : try renderToken(ais, tree, cont_lparen, .none); // lparen - try renderExpression(ais, tree, while_node.ast.cont_expr, .none); + try renderExpression(gpa, ais, tree, while_node.ast.cont_expr, .none); try renderToken(ais, tree, cont_rparen, .space); // rparen } if (while_node.ast.else_expr != 0) { - try renderExpression(ais, tree, while_node.ast.then_expr, .space); + try renderExpression(gpa, ais, tree, while_node.ast.then_expr, .space); try renderToken(ais, tree, while_node.else_token, .space); // else if (while_node.error_token) |error_token| { @@ -1166,13 +1172,14 @@ fn renderWhile(ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Spa try renderToken(ais, tree, error_token + 1, .space); // | } - return renderExpression(ais, tree, while_node.ast.else_expr, space); + return renderExpression(gpa, ais, tree, while_node.ast.else_expr, space); } else { - return renderExpression(ais, tree, while_node.ast.then_expr, space); + return renderExpression(gpa, ais, tree, while_node.ast.then_expr, space); } } fn renderContainerField( + gpa: *Allocator, ais: *Ais, tree: ast.Tree, field: ast.full.ContainerField, @@ -1190,26 +1197,26 @@ fn renderContainerField( try renderToken(ais, tree, field.ast.name_token + 1, .space); // : if (field.ast.align_expr != 0) { - try renderExpression(ais, tree, field.ast.type_expr, .space); // type + try renderExpression(gpa, ais, tree, field.ast.type_expr, .space); // type const align_token = tree.firstToken(field.ast.align_expr) - 2; try renderToken(ais, tree, align_token, .none); // align try renderToken(ais, tree, align_token + 1, .none); // ( - try renderExpression(ais, tree, field.ast.align_expr, .none); // alignment + try renderExpression(gpa, ais, tree, field.ast.align_expr, .none); // alignment const rparen = tree.lastToken(field.ast.align_expr) + 1; return renderTokenComma(ais, tree, rparen, space); // ) } else { - return renderExpressionComma(ais, tree, field.ast.type_expr, space); // type + return renderExpressionComma(gpa, ais, tree, field.ast.type_expr, space); // type } } if (field.ast.type_expr == 0 and field.ast.value_expr != 0) { try renderToken(ais, tree, field.ast.name_token, .space); // name try renderToken(ais, tree, field.ast.name_token + 1, .space); // = - return renderExpressionComma(ais, tree, field.ast.value_expr, space); // value + return renderExpressionComma(gpa, ais, tree, field.ast.value_expr, space); // value } try renderToken(ais, tree, field.ast.name_token, .none); // name try renderToken(ais, tree, field.ast.name_token + 1, .space); // : - try renderExpression(ais, tree, field.ast.type_expr, .space); // type + try renderExpression(gpa, ais, tree, field.ast.type_expr, .space); // type if (field.ast.align_expr != 0) { const lparen_token = tree.firstToken(field.ast.align_expr) - 1; @@ -1217,15 +1224,16 @@ fn renderContainerField( const rparen_token = tree.lastToken(field.ast.align_expr) + 1; try renderToken(ais, tree, align_kw, .none); // align try renderToken(ais, tree, lparen_token, .none); // ( - try renderExpression(ais, tree, field.ast.align_expr, .none); // alignment + try renderExpression(gpa, ais, tree, field.ast.align_expr, .none); // alignment try renderToken(ais, tree, rparen_token, .space); // ) } const eq_token = tree.firstToken(field.ast.value_expr) - 1; try renderToken(ais, tree, eq_token, .space); // = - return renderExpressionComma(ais, tree, field.ast.value_expr, space); // value + return renderExpressionComma(gpa, ais, tree, field.ast.value_expr, space); // value } fn renderBuiltinCall( + gpa: *Allocator, ais: *Ais, tree: ast.Tree, builtin_token: ast.TokenIndex, @@ -1249,7 +1257,7 @@ fn renderBuiltinCall( try renderToken(ais, tree, builtin_token + 1, .none); // ( for (params) |param_node, i| { - try renderExpression(ais, tree, param_node, .none); + try renderExpression(gpa, ais, tree, param_node, .none); if (i + 1 < params.len) { const comma_token = tree.lastToken(param_node) + 1; @@ -1263,7 +1271,7 @@ fn renderBuiltinCall( try renderToken(ais, tree, builtin_token + 1, Space.newline); // ( for (params) |param_node| { - try renderExpression(ais, tree, param_node, .comma); + try renderExpression(gpa, ais, tree, param_node, .comma); } ais.popIndent(); @@ -1271,7 +1279,7 @@ fn renderBuiltinCall( } } -fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: Space) Error!void { +fn renderFnProto(gpa: *Allocator, ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); const token_starts = tree.tokens.items(.start); @@ -1372,7 +1380,7 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S } const param = fn_proto.ast.params[param_i]; param_i += 1; - try renderExpression(ais, tree, param, .none); + try renderExpression(gpa, ais, tree, param, .none); last_param_token = tree.lastToken(param); } } else { @@ -1417,7 +1425,7 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S } const param = fn_proto.ast.params[param_i]; param_i += 1; - try renderExpression(ais, tree, param, .comma); + try renderExpression(gpa, ais, tree, param, .comma); last_param_token = tree.lastToken(param) + 1; } ais.popIndent(); @@ -1431,7 +1439,7 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S try renderToken(ais, tree, align_lparen - 1, .none); // align try renderToken(ais, tree, align_lparen, .none); // ( - try renderExpression(ais, tree, fn_proto.ast.align_expr, .none); + try renderExpression(gpa, ais, tree, fn_proto.ast.align_expr, .none); try renderToken(ais, tree, align_rparen, .space); // ) } @@ -1441,7 +1449,7 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S try renderToken(ais, tree, section_lparen - 1, .none); // section try renderToken(ais, tree, section_lparen, .none); // ( - try renderExpression(ais, tree, fn_proto.ast.section_expr, .none); + try renderExpression(gpa, ais, tree, fn_proto.ast.section_expr, .none); try renderToken(ais, tree, section_rparen, .space); // ) } @@ -1451,7 +1459,7 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S try renderToken(ais, tree, callconv_lparen - 1, .none); // callconv try renderToken(ais, tree, callconv_lparen, .none); // ( - try renderExpression(ais, tree, fn_proto.ast.callconv_expr, .none); + try renderExpression(gpa, ais, tree, fn_proto.ast.callconv_expr, .none); try renderToken(ais, tree, callconv_rparen, .space); // ) } else if (is_inline) { try ais.writer().writeAll("callconv(.Inline) "); @@ -1460,10 +1468,11 @@ fn renderFnProto(ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: S if (token_tags[maybe_bang] == .bang) { try renderToken(ais, tree, maybe_bang, .none); // ! } - return renderExpression(ais, tree, fn_proto.ast.return_type, space); + return renderExpression(gpa, ais, tree, fn_proto.ast.return_type, space); } fn renderSwitchCase( + gpa: *Allocator, ais: *Ais, tree: ast.Tree, switch_case: ast.full.SwitchCase, @@ -1477,14 +1486,14 @@ fn renderSwitchCase( try renderToken(ais, tree, switch_case.ast.arrow_token - 1, .space); // else keyword } else if (switch_case.ast.values.len == 1) { // render on one line and drop the trailing comma if any - try renderExpression(ais, tree, switch_case.ast.values[0], .space); + try renderExpression(gpa, ais, tree, switch_case.ast.values[0], .space); } else if (trailing_comma) { // Render each value on a new line - try renderExpressions(ais, tree, switch_case.ast.values, .comma); + try renderExpressions(gpa, ais, tree, switch_case.ast.values, .comma); } else { // Render on one line for (switch_case.ast.values) |value_expr| { - try renderExpression(ais, tree, value_expr, .comma_space); + try renderExpression(gpa, ais, tree, value_expr, .comma_space); } } @@ -1503,10 +1512,11 @@ fn renderSwitchCase( } } - try renderExpression(ais, tree, switch_case.ast.target_expr, space); + try renderExpression(gpa, ais, tree, switch_case.ast.target_expr, space); } fn renderBlock( + gpa: *Allocator, ais: *Ais, tree: ast.Tree, block_node: ast.Node.Index, @@ -1533,11 +1543,11 @@ fn renderBlock( for (statements) |stmt, i| { if (i != 0) try renderExtraNewline(ais, tree, stmt); switch (node_tags[stmt]) { - .global_var_decl => try renderVarDecl(ais, tree, tree.globalVarDecl(stmt)), - .local_var_decl => try renderVarDecl(ais, tree, tree.localVarDecl(stmt)), - .simple_var_decl => try renderVarDecl(ais, tree, tree.simpleVarDecl(stmt)), - .aligned_var_decl => try renderVarDecl(ais, tree, tree.alignedVarDecl(stmt)), - else => try renderExpression(ais, tree, stmt, .semicolon), + .global_var_decl => try renderVarDecl(gpa, ais, tree, tree.globalVarDecl(stmt)), + .local_var_decl => try renderVarDecl(gpa, ais, tree, tree.localVarDecl(stmt)), + .simple_var_decl => try renderVarDecl(gpa, ais, tree, tree.simpleVarDecl(stmt)), + .aligned_var_decl => try renderVarDecl(gpa, ais, tree, tree.alignedVarDecl(stmt)), + else => try renderExpression(gpa, ais, tree, stmt, .semicolon), } } } @@ -1548,6 +1558,7 @@ fn renderBlock( // TODO: handle comments between fields fn renderStructInit( + gpa: *Allocator, ais: *Ais, tree: ast.Tree, struct_init: ast.full.StructInit, @@ -1557,7 +1568,7 @@ fn renderStructInit( if (struct_init.ast.type_expr == 0) { try renderToken(ais, tree, struct_init.ast.lbrace - 1, .none); // . } else { - try renderExpression(ais, tree, struct_init.ast.type_expr, .none); // T + try renderExpression(gpa, ais, tree, struct_init.ast.type_expr, .none); // T } if (struct_init.ast.fields.len == 0) { try renderToken(ais, tree, struct_init.ast.lbrace, .none); // lbrace @@ -1573,7 +1584,7 @@ fn renderStructInit( try renderToken(ais, tree, struct_init.ast.lbrace + 1, .none); // . try renderToken(ais, tree, struct_init.ast.lbrace + 2, .space); // name try renderToken(ais, tree, struct_init.ast.lbrace + 3, .space); // = - try renderExpression(ais, tree, struct_init.ast.fields[0], .comma); + try renderExpression(gpa, ais, tree, struct_init.ast.fields[0], .comma); for (struct_init.ast.fields[1..]) |field_init| { const init_token = tree.firstToken(field_init); @@ -1581,7 +1592,7 @@ fn renderStructInit( try renderToken(ais, tree, init_token - 3, .none); // . try renderToken(ais, tree, init_token - 2, .space); // name try renderToken(ais, tree, init_token - 1, .space); // = - try renderExpression(ais, tree, field_init, .comma); + try renderExpression(gpa, ais, tree, field_init, .comma); } ais.popIndent(); @@ -1595,7 +1606,7 @@ fn renderStructInit( try renderToken(ais, tree, init_token - 3, .none); // . try renderToken(ais, tree, init_token - 2, .space); // name try renderToken(ais, tree, init_token - 1, .space); // = - try renderExpression(ais, tree, field_init, .comma_space); + try renderExpression(gpa, ais, tree, field_init, .comma_space); } return renderToken(ais, tree, last_field_token + 1, space); // rbrace @@ -1604,16 +1615,19 @@ fn renderStructInit( // TODO: handle comments between elements fn renderArrayInit( + gpa: *Allocator, ais: *Ais, tree: ast.Tree, array_init: ast.full.ArrayInit, space: Space, ) Error!void { const token_tags = tree.tokens.items(.tag); + const token_starts = tree.tokens.items(.start); + if (array_init.ast.type_expr == 0) { try renderToken(ais, tree, array_init.ast.lbrace - 1, .none); // . } else { - try renderExpression(ais, tree, array_init.ast.type_expr, .none); // T + try renderExpression(gpa, ais, tree, array_init.ast.type_expr, .none); // T } if (array_init.ast.elements.len == 0) { @@ -1626,30 +1640,172 @@ fn renderArrayInit( const last_elem = array_init.ast.elements[array_init.ast.elements.len - 1]; const last_elem_token = tree.lastToken(last_elem); const trailing_comma = token_tags[last_elem_token + 1] == .comma; - if (trailing_comma) { - // Render one element per line. - ais.pushIndentNextLine(); - try renderToken(ais, tree, array_init.ast.lbrace, .newline); - try renderExpressions(ais, tree, array_init.ast.elements, .comma); - ais.popIndent(); - return renderToken(ais, tree, last_elem_token + 2, space); // rbrace - } else { + if (!trailing_comma) { // Render all on one line, no trailing comma. if (array_init.ast.elements.len == 1) { // If there is only one element, we don't use spaces try renderToken(ais, tree, array_init.ast.lbrace, .none); - try renderExpression(ais, tree, array_init.ast.elements[0], .none); + try renderExpression(gpa, ais, tree, array_init.ast.elements[0], .none); } else { try renderToken(ais, tree, array_init.ast.lbrace, .space); for (array_init.ast.elements) |elem| { - try renderExpression(ais, tree, elem, .comma_space); + try renderExpression(gpa, ais, tree, elem, .comma_space); } } return renderToken(ais, tree, last_elem_token + 1, space); // rbrace } + + ais.pushIndentNextLine(); + try renderToken(ais, tree, array_init.ast.lbrace, .newline); + + const rbrace = last_elem_token + 2; + assert(token_tags[rbrace] == .r_brace); + + var expr_index: usize = 0; + while (rowSize(tree, array_init.ast.elements[expr_index..], rbrace)) |row_size| { + const row_exprs = array_init.ast.elements[expr_index..]; + // A place to store the width of each expression and its column's maximum + const widths = try gpa.alloc(usize, row_exprs.len + row_size); + defer gpa.free(widths); + mem.set(usize, widths, 0); + + const expr_newlines = try gpa.alloc(bool, row_exprs.len); + defer gpa.free(expr_newlines); + mem.set(bool, expr_newlines, false); + + const expr_widths = widths[0 .. widths.len - row_size]; + const column_widths = widths[widths.len - row_size ..]; + + // Find next row with trailing comment (if any) to end the current section + const section_end = sec_end: { + var this_line_first_expr: usize = 0; + var this_line_size = rowSize(tree, row_exprs, rbrace); + for (row_exprs) |expr, i| { + // Ignore comment on first line of this section + if (i == 0 or tree.tokensOnSameLine(tree.firstToken(row_exprs[0]), tree.lastToken(expr))) continue; + // Track start of line containing comment + if (!tree.tokensOnSameLine(tree.firstToken(row_exprs[this_line_first_expr]), tree.lastToken(expr))) { + this_line_first_expr = i; + this_line_size = rowSize(tree, row_exprs[this_line_first_expr..], rbrace); + } + + const maybe_comma = tree.lastToken(expr) + 1; + if (token_tags[maybe_comma] == .comma) { + const after_comma_src = tree.source[token_starts[maybe_comma]..token_starts[maybe_comma + 1]]; + const same_line_comment = for (after_comma_src) |byte| switch (byte) { + '\n' => break, + '/' => break :sec_end i - this_line_size.? + 1, // Found row ending in comment + else => continue, + }; + } + } + break :sec_end row_exprs.len; + }; + expr_index += section_end; + + const section_exprs = row_exprs[0..section_end]; + + var sub_expr_buffer = std.ArrayList(u8).init(gpa); + defer sub_expr_buffer.deinit(); + + var auto_indenting_stream = Ais{ + .indent_delta = indent_delta, + .underlying_writer = sub_expr_buffer.writer(), + }; + + // Calculate size of columns in current section + var column_counter: usize = 0; + var single_line = true; + var contains_newline = false; + for (section_exprs) |expr, i| { + sub_expr_buffer.shrinkRetainingCapacity(0); + if (i + 1 < section_exprs.len) { + try renderExpression(gpa, &auto_indenting_stream, tree, expr, .none); + const width = sub_expr_buffer.items.len; + const this_contains_newline = mem.indexOfScalar(u8, sub_expr_buffer.items, '\n') != null; + contains_newline = contains_newline or this_contains_newline; + expr_widths[i] = width; + expr_newlines[i] = this_contains_newline; + + if (!this_contains_newline) { + const column = column_counter % row_size; + column_widths[column] = std.math.max(column_widths[column], width); + + const expr_last_token = tree.lastToken(expr) + 1; + const next_expr = section_exprs[i + 1]; + column_counter += 1; + if (!tree.tokensOnSameLine(expr_last_token, tree.firstToken(next_expr))) single_line = false; + } else { + single_line = false; + column_counter = 0; + } + } else { + try renderExpression(gpa, &auto_indenting_stream, tree, expr, .none); + const width = sub_expr_buffer.items.len; + contains_newline = contains_newline or mem.indexOfScalar(u8, sub_expr_buffer.items, '\n') != null; + expr_widths[i] = width; + expr_newlines[i] = contains_newline; + + if (!contains_newline) { + const column = column_counter % row_size; + column_widths[column] = std.math.max(column_widths[column], width); + } + break; + } + } + + // Render exprs in current section + column_counter = 0; + var last_col_index: usize = row_size - 1; + for (section_exprs) |expr, i| { + if (i + 1 < section_exprs.len) { + const next_expr = section_exprs[i + 1]; + try renderExpression(gpa, ais, tree, expr, .none); + + const comma = tree.lastToken(expr) + 1; + + if (column_counter != last_col_index) { + if (!expr_newlines[i] and !expr_newlines[i + 1]) { + // Neither the current or next expression is multiline + try renderToken(ais, tree, comma, .space); // , + assert(column_widths[column_counter % row_size] >= expr_widths[i]); + const padding = column_widths[column_counter % row_size] - expr_widths[i]; + try ais.writer().writeByteNTimes(' ', padding); + + column_counter += 1; + continue; + } + } + if (single_line and row_size != 1) { + try renderToken(ais, tree, comma, .space); // , + continue; + } + + column_counter = 0; + try renderToken(ais, tree, comma, .newline); // , + try renderExtraNewline(ais, tree, next_expr); + } else { + const maybe_comma = tree.lastToken(expr) + 1; + if (token_tags[maybe_comma] == .comma) { + try renderExpression(gpa, ais, tree, expr, .none); // , + try renderToken(ais, tree, maybe_comma, .newline); // , + } else { + try renderExpression(gpa, ais, tree, expr, .comma); // , + } + } + } + + if (expr_index == array_init.ast.elements.len) { + break; + } + } + + ais.popIndent(); + return renderToken(ais, tree, rbrace, space); // rbrace } fn renderContainerDecl( + gpa: *Allocator, ais: *Ais, tree: ast.Tree, container_decl: ast.full.ContainerDecl, @@ -1669,7 +1825,7 @@ fn renderContainerDecl( try renderToken(ais, tree, enum_token, .none); // enum if (container_decl.ast.arg != 0) { try renderToken(ais, tree, enum_token + 1, .none); // lparen - try renderExpression(ais, tree, container_decl.ast.arg, .none); + try renderExpression(gpa, ais, tree, container_decl.ast.arg, .none); const rparen = tree.lastToken(container_decl.ast.arg) + 1; try renderToken(ais, tree, rparen, .none); // rparen try renderToken(ais, tree, rparen + 1, .space); // rparen @@ -1681,7 +1837,7 @@ fn renderContainerDecl( } else if (container_decl.ast.arg != 0) { try renderToken(ais, tree, container_decl.ast.main_token, .none); // union try renderToken(ais, tree, container_decl.ast.main_token + 1, .none); // lparen - try renderExpression(ais, tree, container_decl.ast.arg, .none); + try renderExpression(gpa, ais, tree, container_decl.ast.arg, .none); const rparen = tree.lastToken(container_decl.ast.arg) + 1; try renderToken(ais, tree, rparen, .space); // rparen lbrace = rparen + 1; @@ -1717,7 +1873,7 @@ fn renderContainerDecl( // All the declarations on the same line. try renderToken(ais, tree, lbrace, .space); // lbrace for (container_decl.ast.members) |member| { - try renderMember(ais, tree, member, .space); + try renderMember(gpa, ais, tree, member, .space); } return renderToken(ais, tree, rbrace, space); // rbrace } @@ -1725,13 +1881,14 @@ fn renderContainerDecl( // One member per line. ais.pushIndentNextLine(); try renderToken(ais, tree, lbrace, .newline); // lbrace - try renderMembers(ais, tree, container_decl.ast.members); + try renderMembers(gpa, ais, tree, container_decl.ast.members); ais.popIndent(); return renderToken(ais, tree, rbrace, space); // rbrace } fn renderAsm( + gpa: *Allocator, ais: *Ais, tree: ast.Tree, asm_node: ast.full.Asm, @@ -1749,7 +1906,7 @@ fn renderAsm( } if (asm_node.ast.items.len == 0) { - try renderExpression(ais, tree, asm_node.ast.template, .none); + try renderExpression(gpa, ais, tree, asm_node.ast.template, .none); if (asm_node.first_clobber) |first_clobber| { // asm ("foo" ::: "a", "b") var tok_i = first_clobber; @@ -1769,7 +1926,7 @@ fn renderAsm( } ais.pushIndent(); - try renderExpression(ais, tree, asm_node.ast.template, .newline); + try renderExpression(gpa, ais, tree, asm_node.ast.template, .newline); ais.setIndentDelta(asm_indent_delta); const colon1 = tree.lastToken(asm_node.ast.template) + 1; @@ -1783,19 +1940,19 @@ fn renderAsm( for (asm_node.outputs) |asm_output, i| { if (i + 1 < asm_node.outputs.len) { const next_asm_output = asm_node.outputs[i + 1]; - try renderAsmOutput(ais, tree, asm_output, .none); + try renderAsmOutput(gpa, ais, tree, asm_output, .none); const comma = tree.firstToken(next_asm_output) - 1; try renderToken(ais, tree, comma, .newline); // , try renderExtraNewlineToken(ais, tree, tree.firstToken(next_asm_output)); } else if (asm_node.inputs.len == 0 and asm_node.first_clobber == null) { - try renderAsmOutput(ais, tree, asm_output, .newline); + try renderAsmOutput(gpa, ais, tree, asm_output, .newline); ais.popIndent(); ais.setIndentDelta(indent_delta); ais.popIndent(); return renderToken(ais, tree, asm_node.ast.rparen, space); // rparen } else { - try renderAsmOutput(ais, tree, asm_output, .newline); + try renderAsmOutput(gpa, ais, tree, asm_output, .newline); const comma_or_colon = tree.lastToken(asm_output) + 1; ais.popIndent(); break :colon2 switch (token_tags[comma_or_colon]) { @@ -1815,19 +1972,19 @@ fn renderAsm( for (asm_node.inputs) |asm_input, i| { if (i + 1 < asm_node.inputs.len) { const next_asm_input = asm_node.inputs[i + 1]; - try renderAsmInput(ais, tree, asm_input, .none); + try renderAsmInput(gpa, ais, tree, asm_input, .none); const first_token = tree.firstToken(next_asm_input); try renderToken(ais, tree, first_token - 1, .newline); // , try renderExtraNewlineToken(ais, tree, first_token); } else if (asm_node.first_clobber == null) { - try renderAsmInput(ais, tree, asm_input, .newline); + try renderAsmInput(gpa, ais, tree, asm_input, .newline); ais.popIndent(); ais.setIndentDelta(indent_delta); ais.popIndent(); return renderToken(ais, tree, asm_node.ast.rparen, space); // rparen } else { - try renderAsmInput(ais, tree, asm_input, .newline); + try renderAsmInput(gpa, ais, tree, asm_input, .newline); const comma_or_colon = tree.lastToken(asm_input) + 1; ais.popIndent(); break :colon3 switch (token_tags[comma_or_colon]) { @@ -1861,6 +2018,7 @@ fn renderAsm( } fn renderCall( + gpa: *Allocator, ais: *Ais, tree: ast.Tree, call: ast.full.Call, @@ -1872,7 +2030,7 @@ fn renderCall( if (call.async_token) |async_token| { try renderToken(ais, tree, async_token, .space); } - try renderExpression(ais, tree, call.ast.fn_expr, .none); + try renderExpression(gpa, ais, tree, call.ast.fn_expr, .none); const lparen = call.ast.lparen; const params = call.ast.params; @@ -1890,7 +2048,7 @@ fn renderCall( try renderToken(ais, tree, lparen, .newline); // ( for (params) |param_node, i| { if (i + 1 < params.len) { - try renderExpression(ais, tree, param_node, .none); + try renderExpression(gpa, ais, tree, param_node, .none); // Unindent the comma for multiline string literals. const is_multiline_string = @@ -1904,7 +2062,7 @@ fn renderCall( try renderExtraNewline(ais, tree, params[i + 1]); } else { - try renderExpression(ais, tree, param_node, .comma); + try renderExpression(gpa, ais, tree, param_node, .comma); } } ais.popIndent(); @@ -1919,7 +2077,7 @@ fn renderCall( if (this_multiline_string) { ais.pushIndentOneShot(); } - try renderExpression(ais, tree, param_node, .none); + try renderExpression(gpa, ais, tree, param_node, .none); if (i + 1 < params.len) { const comma = tree.lastToken(param_node) + 1; @@ -1934,14 +2092,14 @@ fn renderCall( } /// Render an expression, and the comma that follows it, if it is present in the source. -fn renderExpressionComma(ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void { +fn renderExpressionComma(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); const maybe_comma = tree.lastToken(node) + 1; if (token_tags[maybe_comma] == .comma) { - try renderExpression(ais, tree, node, .none); + try renderExpression(gpa, ais, tree, node, .none); return renderToken(ais, tree, maybe_comma, space); } else { - return renderExpression(ais, tree, node, space); + return renderExpression(gpa, ais, tree, node, space); } } @@ -2218,6 +2376,32 @@ fn nodeCausesSliceOpSpace(tag: ast.Node.Tag) bool { }; } +// Returns the number of nodes in `expr` that are on the same line as `rtoken`, +// or null if they all are on the same line. +fn rowSize(tree: ast.Tree, exprs: []const ast.Node.Index, rtoken: ast.TokenIndex) ?usize { + const token_tags = tree.tokens.items(.tag); + + const first_token = tree.firstToken(exprs[0]); + if (tree.tokensOnSameLine(first_token, rtoken)) { + const maybe_comma = rtoken - 1; + if (token_tags[maybe_comma] == .comma) + return 1; + return null; // no newlines + } + + var count: usize = 1; + for (exprs) |expr, i| { + if (i + 1 < exprs.len) { + const expr_last_token = tree.lastToken(expr) + 1; + if (!tree.tokensOnSameLine(expr_last_token, tree.firstToken(exprs[i + 1]))) return count; + count += 1; + } else { + return count; + } + } + unreachable; +} + /// Automatically inserts indentation of written data by keeping /// track of the current indentation level fn AutoIndentingStream(comptime UnderlyingWriter: type) type { From 1253903fef07d7fd0435310f474bfe2745b41405 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Mon, 22 Feb 2021 21:33:21 -0700 Subject: [PATCH 146/173] zig fmt: struct literal containing a multiline expression I modified this test case to expect different results. Now, the trailing comma on a list of struct fields is the only deciding factor, not whether or not the field init expressions contain a newline. --- lib/std/zig/parser_test.zig | 105 +++++++++++++++++++++--------------- 1 file changed, 61 insertions(+), 44 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index dd2755b8ab..4aa6d95b30 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -1594,50 +1594,67 @@ test "zig fmt: struct literal no trailing comma" { ); } -//test "zig fmt: struct literal containing a multiline expression" { -// try testTransform( -// \\const a = A{ .x = if (f1()) 10 else 20 }; -// \\const a = A{ .x = if (f1()) 10 else 20, }; -// \\const a = A{ .x = if (f1()) -// \\ 10 else 20 }; -// \\const a = A{ .x = if (f1()) 10 else 20, .y = f2() + 100 }; -// \\const a = A{ .x = if (f1()) 10 else 20, .y = f2() + 100, }; -// \\const a = A{ .x = if (f1()) -// \\ 10 else 20}; -// \\const a = A{ .x = switch(g) {0 => "ok", else => "no"} }; -// \\ -// , -// \\const a = A{ .x = if (f1()) 10 else 20 }; -// \\const a = A{ -// \\ .x = if (f1()) 10 else 20, -// \\}; -// \\const a = A{ -// \\ .x = if (f1()) -// \\ 10 -// \\ else -// \\ 20, -// \\}; -// \\const a = A{ .x = if (f1()) 10 else 20, .y = f2() + 100 }; -// \\const a = A{ -// \\ .x = if (f1()) 10 else 20, -// \\ .y = f2() + 100, -// \\}; -// \\const a = A{ -// \\ .x = if (f1()) -// \\ 10 -// \\ else -// \\ 20, -// \\}; -// \\const a = A{ -// \\ .x = switch (g) { -// \\ 0 => "ok", -// \\ else => "no", -// \\ }, -// \\}; -// \\ -// ); -//} -// +test "zig fmt: struct literal containing a multiline expression" { + try testTransform( + \\const a = A{ .x = if (f1()) 10 else 20 }; + \\const a = A{ .x = if (f1()) 10 else 20, }; + \\const a = A{ .x = if (f1()) + \\ 10 else 20 }; + \\const a = A{ .x = if (f1()) + \\ 10 else 20,}; + \\const a = A{ .x = if (f1()) 10 else 20, .y = f2() + 100 }; + \\const a = A{ .x = if (f1()) 10 else 20, .y = f2() + 100, }; + \\const a = A{ .x = if (f1()) + \\ 10 else 20}; + \\const a = A{ .x = if (f1()) + \\ 10 else 20,}; + \\const a = A{ .x = switch(g) {0 => "ok", else => "no"} }; + \\const a = A{ .x = switch(g) {0 => "ok", else => "no"}, }; + \\ + , + \\const a = A{ .x = if (f1()) 10 else 20 }; + \\const a = A{ + \\ .x = if (f1()) 10 else 20, + \\}; + \\const a = A{ .x = if (f1()) + \\ 10 + \\else + \\ 20 }; + \\const a = A{ + \\ .x = if (f1()) + \\ 10 + \\ else + \\ 20, + \\}; + \\const a = A{ .x = if (f1()) 10 else 20, .y = f2() + 100 }; + \\const a = A{ + \\ .x = if (f1()) 10 else 20, + \\ .y = f2() + 100, + \\}; + \\const a = A{ .x = if (f1()) + \\ 10 + \\else + \\ 20 }; + \\const a = A{ + \\ .x = if (f1()) + \\ 10 + \\ else + \\ 20, + \\}; + \\const a = A{ .x = switch (g) { + \\ 0 => "ok", + \\ else => "no", + \\} }; + \\const a = A{ + \\ .x = switch (g) { + \\ 0 => "ok", + \\ else => "no", + \\ }, + \\}; + \\ + ); +} + //test "zig fmt: array literal with hint" { // try testTransform( // \\const a = []u8{ From 1e3a200ba604c8d19fcc1119d01c2e80f2a8beca Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Mon, 22 Feb 2021 23:25:12 -0700 Subject: [PATCH 147/173] zig fmt: array literal with hint This regresses the test case of `zig fmt` deleting empty line comments. Two open questions here: * What should the rules be about deleting empty line comments? It makes sense usually, but for array initization, empty line comments cause a line break, affecting the row/column alignment. Perhaps we should therefore respect all empty line comments? Or should we special case array initializations? * If we decide to special case some kinds of line comments to respect them (which is status quo!), how should that be implemented? --- lib/std/zig/parser_test.zig | 416 ++++++++++++++++++------------------ lib/std/zig/render.zig | 55 ++--- 2 files changed, 230 insertions(+), 241 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 4aa6d95b30..27e49b4c01 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -1655,112 +1655,110 @@ test "zig fmt: struct literal containing a multiline expression" { ); } -//test "zig fmt: array literal with hint" { -// try testTransform( -// \\const a = []u8{ -// \\ 1, 2, // -// \\ 3, -// \\ 4, -// \\ 5, -// \\ 6, -// \\ 7 }; -// \\const a = []u8{ -// \\ 1, 2, // -// \\ 3, -// \\ 4, -// \\ 5, -// \\ 6, -// \\ 7, 8 }; -// \\const a = []u8{ -// \\ 1, 2, // -// \\ 3, -// \\ 4, -// \\ 5, -// \\ 6, // blah -// \\ 7, 8 }; -// \\const a = []u8{ -// \\ 1, 2, // -// \\ 3, // -// \\ 4, -// \\ 5, -// \\ 6, -// \\ 7 }; -// \\const a = []u8{ -// \\ 1, -// \\ 2, -// \\ 3, 4, // -// \\ 5, 6, // -// \\ 7, 8, // -// \\}; -// , -// \\const a = []u8{ -// \\ 1, 2, -// \\ 3, 4, -// \\ 5, 6, -// \\ 7, -// \\}; -// \\const a = []u8{ -// \\ 1, 2, -// \\ 3, 4, -// \\ 5, 6, -// \\ 7, 8, -// \\}; -// \\const a = []u8{ -// \\ 1, 2, -// \\ 3, 4, -// \\ 5, -// \\ 6, // blah -// \\ 7, -// \\ 8, -// \\}; -// \\const a = []u8{ -// \\ 1, 2, -// \\ 3, // -// \\ 4, -// \\ 5, 6, -// \\ 7, -// \\}; -// \\const a = []u8{ -// \\ 1, -// \\ 2, -// \\ 3, -// \\ 4, -// \\ 5, -// \\ 6, -// \\ 7, -// \\ 8, -// \\}; -// \\ -// ); -//} -// -//test "zig fmt: array literal veritical column alignment" { -// try testTransform( -// \\const a = []u8{ -// \\ 1000, 200, -// \\ 30, 4, -// \\ 50000, 60 -// \\}; -// \\const a = []u8{0, 1, 2, 3, 40, -// \\ 4,5,600,7, -// \\ 80, -// \\ 9, 10, 11, 0, 13, 14, 15}; -// \\ -// , -// \\const a = []u8{ -// \\ 1000, 200, -// \\ 30, 4, -// \\ 50000, 60, -// \\}; -// \\const a = []u8{ -// \\ 0, 1, 2, 3, 40, -// \\ 4, 5, 600, 7, 80, -// \\ 9, 10, 11, 0, 13, -// \\ 14, 15, -// \\}; -// \\ -// ); -//} +test "zig fmt: array literal with hint" { + try testTransform( + \\const a = []u8{ + \\ 1, 2, // + \\ 3, + \\ 4, + \\ 5, + \\ 6, + \\ 7 }; + \\const a = []u8{ + \\ 1, 2, // + \\ 3, + \\ 4, + \\ 5, + \\ 6, + \\ 7, 8 }; + \\const a = []u8{ + \\ 1, 2, // + \\ 3, + \\ 4, + \\ 5, + \\ 6, // blah + \\ 7, 8 }; + \\const a = []u8{ + \\ 1, 2, // + \\ 3, // + \\ 4, + \\ 5, + \\ 6, + \\ 7 }; + \\const a = []u8{ + \\ 1, + \\ 2, + \\ 3, 4, // + \\ 5, 6, // + \\ 7, 8, // + \\}; + , + \\const a = []u8{ + \\ 1, 2, // + \\ 3, 4, + \\ 5, 6, + \\ 7, + \\}; + \\const a = []u8{ + \\ 1, 2, // + \\ 3, 4, + \\ 5, 6, + \\ 7, 8, + \\}; + \\const a = []u8{ + \\ 1, 2, // + \\ 3, 4, + \\ 5, + \\ 6, // blah + \\ 7, + \\ 8, + \\}; + \\const a = []u8{ + \\ 1, 2, // + \\ 3, // + \\ 4, + \\ 5, + \\ 6, + \\ 7, + \\}; + \\const a = []u8{ + \\ 1, + \\ 2, + \\ 3, 4, // + \\ 5, 6, // + \\ 7, 8, // + \\}; + \\ + ); +} + +test "zig fmt: array literal veritical column alignment" { + try testTransform( + \\const a = []u8{ + \\ 1000, 200, + \\ 30, 4, + \\ 50000, 60 + \\}; + \\const a = []u8{0, 1, 2, 3, 40, + \\ 4,5,600,7, + \\ 80, + \\ 9, 10, 11, 0, 13, 14, 15}; + \\ + , + \\const a = []u8{ + \\ 1000, 200, + \\ 30, 4, + \\ 50000, 60, + \\}; + \\const a = []u8{ + \\ 0, 1, 2, 3, 40, + \\ 4, 5, 600, 7, 80, + \\ 9, 10, 11, 0, 13, + \\ 14, 15, + \\}; + \\ + ); +} test "zig fmt: multiline string with backslash at end of line" { try testCanonical( @@ -1949,29 +1947,29 @@ test "zig fmt: slice align" { ); } -//test "zig fmt: add trailing comma to array literal" { -// try testTransform( -// \\comptime { -// \\ return []u16{'m', 's', 'y', 's', '-' // hi -// \\ }; -// \\ return []u16{'m', 's', 'y', 's', -// \\ '-'}; -// \\ return []u16{'m', 's', 'y', 's', '-'}; -// \\} -// , -// \\comptime { -// \\ return []u16{ -// \\ 'm', 's', 'y', 's', '-', // hi -// \\ }; -// \\ return []u16{ -// \\ 'm', 's', 'y', 's', -// \\ '-', -// \\ }; -// \\ return []u16{ 'm', 's', 'y', 's', '-' }; -// \\} -// \\ -// ); -//} +test "zig fmt: add trailing comma to array literal" { + try testTransform( + \\comptime { + \\ return []u16{'m', 's', 'y', 's', '-' // hi + \\ }; + \\ return []u16{'m', 's', 'y', 's', + \\ '-'}; + \\ return []u16{'m', 's', 'y', 's', '-'}; + \\} + , + \\comptime { + \\ return []u16{ + \\ 'm', 's', 'y', 's', '-', // hi + \\ }; + \\ return []u16{ + \\ 'm', 's', 'y', 's', + \\ '-', + \\ }; + \\ return []u16{ 'm', 's', 'y', 's', '-' }; + \\} + \\ + ); +} test "zig fmt: first thing in file is line comment" { try testCanonical( @@ -3406,30 +3404,30 @@ test "zig fmt: comptime block in container" { // ); //} -//test "zig fmt: multiline string in array" { -// try testCanonical( -// \\const Foo = [][]const u8{ -// \\ \\aaa -// \\ , -// \\ \\bbb -// \\}; -// \\ -// \\fn bar() void { -// \\ const Foo = [][]const u8{ -// \\ \\aaa -// \\ , -// \\ \\bbb -// \\ }; -// \\ const Bar = [][]const u8{ // comment here -// \\ \\aaa -// \\ \\ -// \\ , // and another comment can go here -// \\ \\bbb -// \\ }; -// \\} -// \\ -// ); -//} +test "zig fmt: multiline string in array" { + try testCanonical( + \\const Foo = [][]const u8{ + \\ \\aaa + \\ , + \\ \\bbb + \\}; + \\ + \\fn bar() void { + \\ const Foo = [][]const u8{ + \\ \\aaa + \\ , + \\ \\bbb + \\ }; + \\ const Bar = [][]const u8{ // comment here + \\ \\aaa + \\ \\ + \\ , // and another comment can go here + \\ \\bbb + \\ }; + \\} + \\ + ); +} test "zig fmt: if type expr" { try testCanonical( @@ -3449,19 +3447,21 @@ test "zig fmt: file ends with struct field" { ); } -test "zig fmt: comment after empty comment" { - try testTransform( - \\const x = true; // - \\// - \\// - \\//a - \\ - , - \\const x = true; - \\//a - \\ - ); -} +// TODO intentionally change the behavior of this case? +// for array literals we necessarily have meaningful empty comments +//test "zig fmt: comment after empty comment" { +// try testTransform( +// \\const x = true; // +// \\// +// \\// +// \\//a +// \\ +// , +// \\const x = true; +// \\//a +// \\ +// ); +//} //test "zig fmt: line comment in array" { // try testTransform( @@ -3493,7 +3493,7 @@ test "zig fmt: comment after empty comment" { // \\ // ); //} -// + //test "zig fmt: comment after params" { // try testTransform( // \\fn a( @@ -3518,7 +3518,7 @@ test "zig fmt: comment after empty comment" { // \\ // ); //} -// + //test "zig fmt: comment in array initializer/access" { // try testCanonical( // \\test "a" { @@ -3550,7 +3550,7 @@ test "zig fmt: comment after empty comment" { // \\ // ); //} -// + //test "zig fmt: comments at several places in struct init" { // try testTransform( // \\var bar = Bar{ @@ -3899,44 +3899,44 @@ test "zig fmt: regression test for #5722" { ); } -//test "zig fmt: allow trailing line comments to do manual array formatting" { -// try testCanonical( -// \\fn foo() void { -// \\ self.code.appendSliceAssumeCapacity(&[_]u8{ -// \\ 0x55, // push rbp -// \\ 0x48, 0x89, 0xe5, // mov rbp, rsp -// \\ 0x48, 0x81, 0xec, // sub rsp, imm32 (with reloc) -// \\ }); -// \\ -// \\ di_buf.appendAssumeCapacity(&[_]u8{ -// \\ 1, DW.TAG_compile_unit, DW.CHILDREN_no, // header -// \\ DW.AT_stmt_list, DW_FORM_data4, // form value pairs -// \\ DW.AT_low_pc, DW_FORM_addr, -// \\ DW.AT_high_pc, DW_FORM_addr, -// \\ DW.AT_name, DW_FORM_strp, -// \\ DW.AT_comp_dir, DW_FORM_strp, -// \\ DW.AT_producer, DW_FORM_strp, -// \\ DW.AT_language, DW_FORM_data2, -// \\ 0, 0, // sentinel -// \\ }); -// \\ -// \\ self.code.appendSliceAssumeCapacity(&[_]u8{ -// \\ 0x55, // push rbp -// \\ 0x48, 0x89, 0xe5, // mov rbp, rsp -// \\ // How do we handle this? -// \\ //0x48, 0x81, 0xec, // sub rsp, imm32 (with reloc) -// \\ // Here's a blank line, should that be allowed? -// \\ -// \\ 0x48, 0x89, 0xe5, -// \\ 0x33, 0x45, -// \\ // Now the comment breaks a single line -- how do we handle this? -// \\ 0x88, -// \\ }); -// \\} -// \\ -// ); -//} -// +test "zig fmt: allow trailing line comments to do manual array formatting" { + try testCanonical( + \\fn foo() void { + \\ self.code.appendSliceAssumeCapacity(&[_]u8{ + \\ 0x55, // push rbp + \\ 0x48, 0x89, 0xe5, // mov rbp, rsp + \\ 0x48, 0x81, 0xec, // sub rsp, imm32 (with reloc) + \\ }); + \\ + \\ di_buf.appendAssumeCapacity(&[_]u8{ + \\ 1, DW.TAG_compile_unit, DW.CHILDREN_no, // header + \\ DW.AT_stmt_list, DW_FORM_data4, // form value pairs + \\ DW.AT_low_pc, DW_FORM_addr, + \\ DW.AT_high_pc, DW_FORM_addr, + \\ DW.AT_name, DW_FORM_strp, + \\ DW.AT_comp_dir, DW_FORM_strp, + \\ DW.AT_producer, DW_FORM_strp, + \\ DW.AT_language, DW_FORM_data2, + \\ 0, 0, // sentinel + \\ }); + \\ + \\ self.code.appendSliceAssumeCapacity(&[_]u8{ + \\ 0x55, // push rbp + \\ 0x48, 0x89, 0xe5, // mov rbp, rsp + \\ // How do we handle this? + \\ //0x48, 0x81, 0xec, // sub rsp, imm32 (with reloc) + \\ // Here's a blank line, should that be allowed? + \\ + \\ 0x48, 0x89, 0xe5, + \\ 0x33, 0x45, + \\ // Now the comment breaks a single line -- how do we handle this? + \\ 0x88, + \\ }); + \\} + \\ + ); +} + //test "zig fmt: multiline string literals should play nice with array initializers" { // try testCanonical( // \\fn main() void { @@ -3999,7 +3999,7 @@ test "zig fmt: regression test for #5722" { // \\ // ); //} -// + //test "zig fmt: use of comments and Multiline string literals may force the parameters over multiple lines" { // try testCanonical( // \\pub fn makeMemUndefined(qzz: []u8) i1 { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 9dc35aa9d8..0a1b6c19c0 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -1640,7 +1640,11 @@ fn renderArrayInit( const last_elem = array_init.ast.elements[array_init.ast.elements.len - 1]; const last_elem_token = tree.lastToken(last_elem); const trailing_comma = token_tags[last_elem_token + 1] == .comma; - if (!trailing_comma) { + const rbrace = if (trailing_comma) last_elem_token + 2 else last_elem_token + 1; + assert(token_tags[rbrace] == .r_brace); + const contains_newlines = !tree.tokensOnSameLine(array_init.ast.lbrace, rbrace); + + if (!trailing_comma and !contains_newlines) { // Render all on one line, no trailing comma. if (array_init.ast.elements.len == 1) { // If there is only one element, we don't use spaces @@ -1658,8 +1662,6 @@ fn renderArrayInit( ais.pushIndentNextLine(); try renderToken(ais, tree, array_init.ast.lbrace, .newline); - const rbrace = last_elem_token + 2; - assert(token_tags[rbrace] == .r_brace); var expr_index: usize = 0; while (rowSize(tree, array_init.ast.elements[expr_index..], rbrace)) |row_size| { @@ -1673,28 +1675,31 @@ fn renderArrayInit( defer gpa.free(expr_newlines); mem.set(bool, expr_newlines, false); - const expr_widths = widths[0 .. widths.len - row_size]; - const column_widths = widths[widths.len - row_size ..]; + const expr_widths = widths[0..row_exprs.len]; + const column_widths = widths[row_exprs.len..]; - // Find next row with trailing comment (if any) to end the current section + // Find next row with trailing comment (if any) to end the current section. const section_end = sec_end: { var this_line_first_expr: usize = 0; var this_line_size = rowSize(tree, row_exprs, rbrace); for (row_exprs) |expr, i| { - // Ignore comment on first line of this section - if (i == 0 or tree.tokensOnSameLine(tree.firstToken(row_exprs[0]), tree.lastToken(expr))) continue; - // Track start of line containing comment - if (!tree.tokensOnSameLine(tree.firstToken(row_exprs[this_line_first_expr]), tree.lastToken(expr))) { + // Ignore comment on first line of this section. + if (i == 0) continue; + const expr_last_token = tree.lastToken(expr); + if (tree.tokensOnSameLine(tree.firstToken(row_exprs[0]), expr_last_token)) + continue; + // Track start of line containing comment. + if (!tree.tokensOnSameLine(tree.firstToken(row_exprs[this_line_first_expr]), expr_last_token)) { this_line_first_expr = i; this_line_size = rowSize(tree, row_exprs[this_line_first_expr..], rbrace); } - const maybe_comma = tree.lastToken(expr) + 1; + const maybe_comma = expr_last_token + 1; if (token_tags[maybe_comma] == .comma) { const after_comma_src = tree.source[token_starts[maybe_comma]..token_starts[maybe_comma + 1]]; - const same_line_comment = for (after_comma_src) |byte| switch (byte) { + for (after_comma_src) |byte| switch (byte) { '\n' => break, - '/' => break :sec_end i - this_line_size.? + 1, // Found row ending in comment + '/' => break :sec_end i - this_line_size.? + 1, else => continue, }; } @@ -1754,7 +1759,7 @@ fn renderArrayInit( } } - // Render exprs in current section + // Render exprs in current section. column_counter = 0; var last_col_index: usize = row_size - 1; for (section_exprs) |expr, i| { @@ -1785,19 +1790,12 @@ fn renderArrayInit( try renderToken(ais, tree, comma, .newline); // , try renderExtraNewline(ais, tree, next_expr); } else { - const maybe_comma = tree.lastToken(expr) + 1; - if (token_tags[maybe_comma] == .comma) { - try renderExpression(gpa, ais, tree, expr, .none); // , - try renderToken(ais, tree, maybe_comma, .newline); // , - } else { - try renderExpression(gpa, ais, tree, expr, .comma); // , - } + try renderExpression(gpa, ais, tree, expr, .comma); // , } } - if (expr_index == array_init.ast.elements.len) { + if (expr_index == array_init.ast.elements.len) break; - } } ais.popIndent(); @@ -2175,7 +2173,6 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp /// that end is the last byte before the next token. fn renderComments(ais: *Ais, tree: ast.Tree, start: usize, end: usize) Error!bool { var index: usize = start; - var rendered_empty_comments = false; while (mem.indexOf(u8, tree.source[index..end], "//")) |offset| { const comment_start = index + offset; @@ -2196,11 +2193,6 @@ fn renderComments(ais: *Ais, tree: ast.Tree, start: usize, end: usize) Error!boo // Respect the newline directly before the comment. // Note: This allows an empty line between comments try ais.insertNewline(); - } else if (trimmed_comment.len == 2) { - if (!rendered_empty_comments) { - try ais.writer().writeByte('\n'); - rendered_empty_comments = true; - } } else if (index == start) { // Otherwise if the first comment is on the same line as // the token before it, prefix it with a single space. @@ -2208,10 +2200,7 @@ fn renderComments(ais: *Ais, tree: ast.Tree, start: usize, end: usize) Error!boo } } - if (trimmed_comment.len != 2) { - try ais.writer().print("{s}\n", .{trimmed_comment}); - rendered_empty_comments = false; - } + try ais.writer().print("{s}\n", .{trimmed_comment}); index = 1 + (newline orelse return true); if (ais.disabled_offset) |disabled_offset| { From 1f62e87031cbb85387934313a7a9e7091d424b84 Mon Sep 17 00:00:00 2001 From: Veikka Tuominen Date: Tue, 23 Feb 2021 10:53:25 +0200 Subject: [PATCH 148/173] fix formatting in translate-c test case --- test/translate_c.zig | 48 +++++++++++++++++++++----------------------- 1 file changed, 23 insertions(+), 25 deletions(-) diff --git a/test/translate_c.zig b/test/translate_c.zig index 40e074fcc4..2f95bb2d1e 100644 --- a/test/translate_c.zig +++ b/test/translate_c.zig @@ -2581,9 +2581,9 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\ var a: c_int = 2; \\ } \\ if ((blk: { - \\ _ = @as(c_int, 2); - \\ break :blk @as(c_int, 5); - \\ }) != 0) { + \\ _ = @as(c_int, 2); + \\ break :blk @as(c_int, 5); + \\ }) != 0) { \\ var a: c_int = 2; \\ } \\} @@ -3134,28 +3134,26 @@ pub fn addCases(cases: *tests.TranslateCContext) void { \\} }); - // TODO fix zig fmt here - it incorrectly inserts an additional indentation - // level inside blk_1. - // cases.add("nested assignment", - // \\int foo(int *p, int x) { - // \\ return *p++ = x; - // \\} - // , &[_][]const u8{ - // \\pub export fn foo(arg_p: [*c]c_int, arg_x: c_int) c_int { - // \\ var p = arg_p; - // \\ var x = arg_x; - // \\ return blk: { - // \\ const tmp = x; - // \\ (blk_1: { - // \\ const ref = &p; - // \\ const tmp_2 = ref.*; - // \\ ref.* += 1; - // \\ break :blk_1 tmp_2; - // \\ }).?.* = tmp; - // \\ break :blk tmp; - // \\ }; - // \\} - // }); + cases.add("nested assignment", + \\int foo(int *p, int x) { + \\ return *p++ = x; + \\} + , &[_][]const u8{ + \\pub export fn foo(arg_p: [*c]c_int, arg_x: c_int) c_int { + \\ var p = arg_p; + \\ var x = arg_x; + \\ return blk: { + \\ const tmp = x; + \\ (blk_1: { + \\ const ref = &p; + \\ const tmp_2 = ref.*; + \\ ref.* += 1; + \\ break :blk_1 tmp_2; + \\ }).?.* = tmp; + \\ break :blk tmp; + \\ }; + \\} + }); cases.add("widening and truncating integer casting to different signedness", \\unsigned long foo(void) { From 5306b1a9ab5cb0730fcc21da095db39b0197e99b Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Tue, 23 Feb 2021 18:32:47 +0100 Subject: [PATCH 149/173] zig fmt: container doc comments --- lib/std/zig/ast.zig | 82 +++++++++++++++------------ lib/std/zig/parse.zig | 54 +++++++++--------- lib/std/zig/parser_test.zig | 110 ++++++++++++++++++------------------ lib/std/zig/render.zig | 69 ++++++++++++---------- src/astgen.zig | 36 ++++++------ 5 files changed, 187 insertions(+), 164 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index abe4993d3e..98ffe26818 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -525,17 +525,17 @@ pub const Tree = struct { }, .container_decl, - .container_decl_comma, + .container_decl_trailing, .container_decl_two, - .container_decl_two_comma, + .container_decl_two_trailing, .container_decl_arg, - .container_decl_arg_comma, + .container_decl_arg_trailing, .tagged_union, - .tagged_union_comma, + .tagged_union_trailing, .tagged_union_two, - .tagged_union_two_comma, + .tagged_union_two_trailing, .tagged_union_enum_tag, - .tagged_union_enum_tag_comma, + .tagged_union_enum_tag_trailing, => { const main_token = main_tokens[n]; switch (token_tags[main_token - 1]) { @@ -606,6 +606,7 @@ pub const Tree = struct { const datas = tree.nodes.items(.data); const main_tokens = tree.nodes.items(.main_token); const token_starts = tree.tokens.items(.start); + const token_tags = tree.tokens.items(.tag); var n = node; var end_offset: TokenIndex = 0; while (true) switch (tags[n]) { @@ -738,9 +739,9 @@ pub const Tree = struct { }, .call_comma, .async_call_comma, - .tagged_union_enum_tag_comma, + .tagged_union_enum_tag_trailing, => { - end_offset += 2; // for the comma + rparen/rbrace + end_offset += 2; // for the comma/semicolon + rparen/rbrace const params = tree.extraData(datas[n].rhs, Node.SubRange); assert(params.end > params.start); n = tree.extra_data[params.end - 1]; // last parameter @@ -779,7 +780,7 @@ pub const Tree = struct { }, .array_init_comma, .struct_init_comma, - .container_decl_arg_comma, + .container_decl_arg_trailing, .switch_comma, => { const members = tree.extraData(datas[n].rhs, Node.SubRange); @@ -801,8 +802,8 @@ pub const Tree = struct { .array_init_dot_comma, .struct_init_dot_comma, .block_semicolon, - .container_decl_comma, - .tagged_union_comma, + .container_decl_trailing, + .tagged_union_trailing, .builtin_call_comma, => { assert(datas[n].rhs - datas[n].lhs > 0); @@ -838,10 +839,17 @@ pub const Tree = struct { .block_two, .struct_init_dot_two, => end_offset += 1, // rbrace - .builtin_call_two, - .container_decl_two, - => end_offset += 2, // lparen/lbrace + rparen/rbrace - .tagged_union_two => end_offset += 5, // (enum) {} + .builtin_call_two => end_offset += 2, // lparen/lbrace + rparen/rbrace + .container_decl_two => { + var i: u32 = 2; // lbrace + rbrace + while (token_tags[main_tokens[n] + i] == .container_doc_comment) i += 1; + end_offset += i; + }, + .tagged_union_two => { + var i: u32 = 5; // (enum) {} + while (token_tags[main_tokens[n] + i] == .container_doc_comment) i += 1; + end_offset += i; + }, else => unreachable, } return main_tokens[n] + end_offset; @@ -851,8 +859,8 @@ pub const Tree = struct { .builtin_call_two_comma, .block_two_semicolon, .struct_init_dot_two_comma, - .container_decl_two_comma, - .tagged_union_two_comma, + .container_decl_two_trailing, + .tagged_union_two_trailing, => { end_offset += 2; // for the comma/semicolon + rbrace/rparen if (datas[n].rhs != 0) { @@ -1531,7 +1539,7 @@ pub const Tree = struct { pub fn containerDeclTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ContainerDecl { assert(tree.nodes.items(.tag)[node] == .container_decl_two or - tree.nodes.items(.tag)[node] == .container_decl_two_comma); + tree.nodes.items(.tag)[node] == .container_decl_two_trailing); const data = tree.nodes.items(.data)[node]; buffer.* = .{ data.lhs, data.rhs }; const members = if (data.rhs != 0) @@ -1550,7 +1558,7 @@ pub const Tree = struct { pub fn containerDecl(tree: Tree, node: Node.Index) full.ContainerDecl { assert(tree.nodes.items(.tag)[node] == .container_decl or - tree.nodes.items(.tag)[node] == .container_decl_comma); + tree.nodes.items(.tag)[node] == .container_decl_trailing); const data = tree.nodes.items(.data)[node]; return tree.fullContainerDecl(.{ .main_token = tree.nodes.items(.main_token)[node], @@ -1562,7 +1570,7 @@ pub const Tree = struct { pub fn containerDeclArg(tree: Tree, node: Node.Index) full.ContainerDecl { assert(tree.nodes.items(.tag)[node] == .container_decl_arg or - tree.nodes.items(.tag)[node] == .container_decl_arg_comma); + tree.nodes.items(.tag)[node] == .container_decl_arg_trailing); const data = tree.nodes.items(.data)[node]; const members_range = tree.extraData(data.rhs, Node.SubRange); return tree.fullContainerDecl(.{ @@ -1575,7 +1583,7 @@ pub const Tree = struct { pub fn taggedUnionTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ContainerDecl { assert(tree.nodes.items(.tag)[node] == .tagged_union_two or - tree.nodes.items(.tag)[node] == .tagged_union_two_comma); + tree.nodes.items(.tag)[node] == .tagged_union_two_trailing); const data = tree.nodes.items(.data)[node]; buffer.* = .{ data.lhs, data.rhs }; const members = if (data.rhs != 0) @@ -1595,7 +1603,7 @@ pub const Tree = struct { pub fn taggedUnion(tree: Tree, node: Node.Index) full.ContainerDecl { assert(tree.nodes.items(.tag)[node] == .tagged_union or - tree.nodes.items(.tag)[node] == .tagged_union_comma); + tree.nodes.items(.tag)[node] == .tagged_union_trailing); const data = tree.nodes.items(.data)[node]; const main_token = tree.nodes.items(.main_token)[node]; return tree.fullContainerDecl(.{ @@ -1608,7 +1616,7 @@ pub const Tree = struct { pub fn taggedUnionEnumTag(tree: Tree, node: Node.Index) full.ContainerDecl { assert(tree.nodes.items(.tag)[node] == .tagged_union_enum_tag or - tree.nodes.items(.tag)[node] == .tagged_union_enum_tag_comma); + tree.nodes.items(.tag)[node] == .tagged_union_enum_tag_trailing); const data = tree.nodes.items(.data)[node]; const members_range = tree.extraData(data.rhs, Node.SubRange); const main_token = tree.nodes.items(.main_token)[node]; @@ -2762,36 +2770,40 @@ pub const Node = struct { /// `struct {}`, `union {}`, `opaque {}`, `enum {}`. `extra_data[lhs..rhs]`. /// main_token is `struct`, `union`, `opaque`, `enum` keyword. container_decl, - /// Same as ContainerDecl but there is known to be a trailing comma before the rbrace. - container_decl_comma, + /// Same as ContainerDecl but there is known to be a trailing comma + /// or semicolon before the rbrace. + container_decl_trailing, /// `struct {lhs, rhs}`, `union {lhs, rhs}`, `opaque {lhs, rhs}`, `enum {lhs, rhs}`. /// lhs or rhs can be omitted. /// main_token is `struct`, `union`, `opaque`, `enum` keyword. container_decl_two, /// Same as ContainerDeclTwo except there is known to be a trailing comma - /// before the rbrace. - container_decl_two_comma, + /// or semicolon before the rbrace. + container_decl_two_trailing, /// `union(lhs)` / `enum(lhs)`. `SubRange[rhs]`. container_decl_arg, - /// Same as container_decl_arg but there is known to be a trailing comma before the rbrace. - container_decl_arg_comma, + /// Same as container_decl_arg but there is known to be a trailing + /// comma or semicolon before the rbrace. + container_decl_arg_trailing, /// `union(enum) {}`. `sub_list[lhs..rhs]`. /// Note that tagged unions with explicitly provided enums are represented /// by `container_decl_arg`. tagged_union, - /// Same as tagged_union but there is known to be a trailing comma before the rbrace. - tagged_union_comma, + /// Same as tagged_union but there is known to be a trailing comma + /// or semicolon before the rbrace. + tagged_union_trailing, /// `union(enum) {lhs, rhs}`. lhs or rhs may be omitted. /// Note that tagged unions with explicitly provided enums are represented /// by `container_decl_arg`. tagged_union_two, - /// Same as tagged_union_two but there is known to be a trailing comma before the rbrace. - tagged_union_two_comma, + /// Same as tagged_union_two but there is known to be a trailing comma + /// or semicolon before the rbrace. + tagged_union_two_trailing, /// `union(enum(lhs)) {}`. `SubRange[rhs]`. tagged_union_enum_tag, /// Same as tagged_union_enum_tag but there is known to be a trailing comma - /// before the rbrace. - tagged_union_enum_tag_comma, + /// or semicolon before the rbrace. + tagged_union_enum_tag_trailing, /// `a: lhs = rhs,`. lhs and rhs can be omitted. /// main_token is the field name identifier. /// lastToken() does not include the possible trailing comma. diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index f5672bedf9..528b684e46 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -113,7 +113,7 @@ const Parser = struct { len: usize, lhs: Node.Index, rhs: Node.Index, - trailing_comma: bool, + trailing: bool, fn toSpan(self: Members, p: *Parser) !Node.SubRange { if (self.len <= 2) { @@ -215,7 +215,7 @@ const Parser = struct { // Skip container doc comments. while (p.eatToken(.container_doc_comment)) |_| {} - var trailing_comma = false; + var trailing = false; while (true) { const doc_comment = try p.eatDocComments(); @@ -228,7 +228,7 @@ const Parser = struct { } try list.append(test_decl_node); } - trailing_comma = false; + trailing = false; }, .keyword_comptime => switch (p.token_tags[p.tok_i + 1]) { .identifier => { @@ -251,11 +251,11 @@ const Parser = struct { switch (p.token_tags[p.tok_i]) { .comma => { p.tok_i += 1; - trailing_comma = true; + trailing = true; continue; }, .r_brace, .eof => { - trailing_comma = false; + trailing = false; break; }, else => {}, @@ -289,7 +289,7 @@ const Parser = struct { } try list.append(comptime_node); } - trailing_comma = false; + trailing = false; }, else => { p.tok_i += 1; @@ -305,7 +305,7 @@ const Parser = struct { } try list.append(top_level_decl); } - trailing_comma = false; + trailing = p.token_tags[p.tok_i - 1] == .semicolon; }, .keyword_usingnamespace => { const node = try p.expectUsingNamespaceRecoverable(); @@ -315,7 +315,7 @@ const Parser = struct { } try list.append(node); } - trailing_comma = false; + trailing = p.token_tags[p.tok_i - 1] == .semicolon; }, .keyword_const, .keyword_var, @@ -333,7 +333,7 @@ const Parser = struct { } try list.append(top_level_decl); } - trailing_comma = false; + trailing = p.token_tags[p.tok_i - 1] == .semicolon; }, .identifier => { const container_field = try p.expectContainerFieldRecoverable(); @@ -354,11 +354,11 @@ const Parser = struct { switch (p.token_tags[p.tok_i]) { .comma => { p.tok_i += 1; - trailing_comma = true; + trailing = true; continue; }, .r_brace, .eof => { - trailing_comma = false; + trailing = false; break; }, else => {}, @@ -391,19 +391,19 @@ const Parser = struct { .len = 0, .lhs = 0, .rhs = 0, - .trailing_comma = trailing_comma, + .trailing = trailing, }, 1 => return Members{ .len = 1, .lhs = list.items[0], .rhs = 0, - .trailing_comma = trailing_comma, + .trailing = trailing, }, 2 => return Members{ .len = 2, .lhs = list.items[0], .rhs = list.items[1], - .trailing_comma = trailing_comma, + .trailing = trailing, }, else => { const span = try p.listToSpan(list.items); @@ -411,7 +411,7 @@ const Parser = struct { .len = list.items.len, .lhs = span.start, .rhs = span.end, - .trailing_comma = trailing_comma, + .trailing = trailing, }; }, } @@ -3575,8 +3575,8 @@ const Parser = struct { const members_span = try members.toSpan(p); _ = try p.expectToken(.r_brace); return p.addNode(.{ - .tag = switch (members.trailing_comma) { - true => .tagged_union_enum_tag_comma, + .tag = switch (members.trailing) { + true => .tagged_union_enum_tag_trailing, false => .tagged_union_enum_tag, }, .main_token = main_token, @@ -3593,8 +3593,8 @@ const Parser = struct { _ = try p.expectToken(.r_brace); if (members.len <= 2) { return p.addNode(.{ - .tag = switch (members.trailing_comma) { - true => .tagged_union_two_comma, + .tag = switch (members.trailing) { + true => .tagged_union_two_trailing, false => .tagged_union_two, }, .main_token = main_token, @@ -3606,8 +3606,8 @@ const Parser = struct { } else { const span = try members.toSpan(p); return p.addNode(.{ - .tag = switch (members.trailing_comma) { - true => .tagged_union_comma, + .tag = switch (members.trailing) { + true => .tagged_union_trailing, false => .tagged_union, }, .main_token = main_token, @@ -3638,8 +3638,8 @@ const Parser = struct { if (arg_expr == 0) { if (members.len <= 2) { return p.addNode(.{ - .tag = switch (members.trailing_comma) { - true => .container_decl_two_comma, + .tag = switch (members.trailing) { + true => .container_decl_two_trailing, false => .container_decl_two, }, .main_token = main_token, @@ -3651,8 +3651,8 @@ const Parser = struct { } else { const span = try members.toSpan(p); return p.addNode(.{ - .tag = switch (members.trailing_comma) { - true => .container_decl_comma, + .tag = switch (members.trailing) { + true => .container_decl_trailing, false => .container_decl, }, .main_token = main_token, @@ -3665,8 +3665,8 @@ const Parser = struct { } else { const span = try members.toSpan(p); return p.addNode(.{ - .tag = switch (members.trailing_comma) { - true => .container_decl_arg_comma, + .tag = switch (members.trailing) { + true => .container_decl_arg_trailing, false => .container_decl_arg, }, .main_token = main_token, diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 27e49b4c01..7185eb1588 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -3577,61 +3577,61 @@ test "zig fmt: file ends with struct field" { // ); //} -//test "zig fmt: top level doc comments" { -// try testCanonical( -// \\//! tld 1 -// \\//! tld 2 -// \\//! tld 3 -// \\ -// \\// comment -// \\ -// \\/// A doc -// \\const A = struct { -// \\ //! A tld 1 -// \\ //! A tld 2 -// \\ //! A tld 3 -// \\}; -// \\ -// \\/// B doc -// \\const B = struct { -// \\ //! B tld 1 -// \\ //! B tld 2 -// \\ //! B tld 3 -// \\ -// \\ /// b doc -// \\ b: u32, -// \\}; -// \\ -// \\/// C doc -// \\const C = struct { -// \\ //! C tld 1 -// \\ //! C tld 2 -// \\ //! C tld 3 -// \\ -// \\ /// c1 doc -// \\ c1: u32, -// \\ -// \\ //! C tld 4 -// \\ //! C tld 5 -// \\ //! C tld 6 -// \\ -// \\ /// c2 doc -// \\ c2: u32, -// \\}; -// \\ -// ); -// try testCanonical( -// \\//! Top-level documentation. -// \\ -// \\/// This is A -// \\pub const A = usize; -// \\ -// ); -// try testCanonical( -// \\//! Nothing here -// \\ -// ); -//} +test "zig fmt: container doc comments" { + try testCanonical( + \\//! tld 1 + \\//! tld 2 + \\//! tld 3 + \\ + \\// comment + \\ + \\/// A doc + \\const A = struct { + \\ //! A tld 1 + \\ //! A tld 2 + \\ //! A tld 3 + \\}; + \\ + \\/// B doc + \\const B = struct { + \\ //! B tld 1 + \\ //! B tld 2 + \\ //! B tld 3 + \\ + \\ /// B doc + \\ b: u32, + \\}; + \\ + \\/// C doc + \\const C = union(enum) { // comment + \\ //! C tld 1 + \\ //! C tld 2 + \\ //! C tld 3 + \\}; + \\ + \\/// D doc + \\const D = union(Foo) { + \\ //! D tld 1 + \\ //! D tld 2 + \\ //! D tld 3 + \\ + \\ /// D doc + \\ b: u32, + \\}; + \\ + ); + try testCanonical( + \\//! Top-level documentation. + \\ + \\/// This is A + \\pub const A = usize; + \\ + ); + try testCanonical( + \\//! Nothing here + \\ + ); +} test "zig fmt: extern without container keyword returns error" { try testError( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 0a1b6c19c0..ab00a787c5 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -30,6 +30,10 @@ pub fn renderTree(buffer: *std.ArrayList(u8), tree: ast.Tree) Error!void { const comment_end_loc = tree.tokens.items(.start)[0]; _ = try renderComments(ais, tree, 0, comment_end_loc); + if (tree.tokens.items(.tag)[0] == .container_doc_comment) { + try renderContainerDocComments(ais, tree, 0); + } + try renderMembers(buffer.allocator, ais, tree, tree.rootDecls()); if (ais.disabled_offset) |disabled_offset| { @@ -506,28 +510,28 @@ fn renderExpression(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.I }, .container_decl, - .container_decl_comma, - => return renderContainerDecl(gpa, ais, tree, tree.containerDecl(node), space), + .container_decl_trailing, + => return renderContainerDecl(gpa, ais, tree, node, tree.containerDecl(node), space), - .container_decl_two, .container_decl_two_comma => { + .container_decl_two, .container_decl_two_trailing => { var buffer: [2]ast.Node.Index = undefined; - return renderContainerDecl(gpa, ais, tree, tree.containerDeclTwo(&buffer, node), space); + return renderContainerDecl(gpa, ais, tree, node, tree.containerDeclTwo(&buffer, node), space); }, .container_decl_arg, - .container_decl_arg_comma, - => return renderContainerDecl(gpa, ais, tree, tree.containerDeclArg(node), space), + .container_decl_arg_trailing, + => return renderContainerDecl(gpa, ais, tree, node, tree.containerDeclArg(node), space), .tagged_union, - .tagged_union_comma, - => return renderContainerDecl(gpa, ais, tree, tree.taggedUnion(node), space), + .tagged_union_trailing, + => return renderContainerDecl(gpa, ais, tree, node, tree.taggedUnion(node), space), - .tagged_union_two, .tagged_union_two_comma => { + .tagged_union_two, .tagged_union_two_trailing => { var buffer: [2]ast.Node.Index = undefined; - return renderContainerDecl(gpa, ais, tree, tree.taggedUnionTwo(&buffer, node), space); + return renderContainerDecl(gpa, ais, tree, node, tree.taggedUnionTwo(&buffer, node), space); }, .tagged_union_enum_tag, - .tagged_union_enum_tag_comma, - => return renderContainerDecl(gpa, ais, tree, tree.taggedUnionEnumTag(node), space), + .tagged_union_enum_tag_trailing, + => return renderContainerDecl(gpa, ais, tree, node, tree.taggedUnionEnumTag(node), space), .error_set_decl => { const error_token = main_tokens[node]; @@ -1662,7 +1666,6 @@ fn renderArrayInit( ais.pushIndentNextLine(); try renderToken(ais, tree, array_init.ast.lbrace, .newline); - var expr_index: usize = 0; while (rowSize(tree, array_init.ast.elements[expr_index..], rbrace)) |row_size| { const row_exprs = array_init.ast.elements[expr_index..]; @@ -1806,6 +1809,7 @@ fn renderContainerDecl( gpa: *Allocator, ais: *Ais, tree: ast.Tree, + container_decl_node: ast.Node.Index, container_decl: ast.full.ContainerDecl, space: Space, ) Error!void { @@ -1844,25 +1848,20 @@ fn renderContainerDecl( lbrace = container_decl.ast.main_token + 1; } + const rbrace = tree.lastToken(container_decl_node); if (container_decl.ast.members.len == 0) { - try renderToken(ais, tree, lbrace, Space.none); // lbrace - return renderToken(ais, tree, lbrace + 1, space); // rbrace + ais.pushIndentNextLine(); + if (token_tags[lbrace + 1] == .container_doc_comment) { + try renderToken(ais, tree, lbrace, .newline); // lbrace + try renderContainerDocComments(ais, tree, lbrace + 1); + } else { + try renderToken(ais, tree, lbrace, .none); // lbrace + } + ais.popIndent(); + return renderToken(ais, tree, rbrace, space); // rbrace } - const last_member = container_decl.ast.members[container_decl.ast.members.len - 1]; - const last_member_token = tree.lastToken(last_member); - const rbrace = switch (token_tags[last_member_token + 1]) { - .doc_comment => last_member_token + 2, - .comma, .semicolon => switch (token_tags[last_member_token + 2]) { - .doc_comment => last_member_token + 3, - .r_brace => last_member_token + 2, - else => unreachable, - }, - .r_brace => last_member_token + 1, - else => unreachable, - }; - const src_has_trailing_comma = token_tags[last_member_token + 1] == .comma; - + const src_has_trailing_comma = token_tags[rbrace - 1] == .comma; if (!src_has_trailing_comma) one_line: { // We can only print all the members in-line if all the members are fields. for (container_decl.ast.members) |member| { @@ -1879,6 +1878,9 @@ fn renderContainerDecl( // One member per line. ais.pushIndentNextLine(); try renderToken(ais, tree, lbrace, .newline); // lbrace + if (token_tags[lbrace + 1] == .container_doc_comment) { + try renderContainerDocComments(ais, tree, lbrace + 1); + } try renderMembers(gpa, ais, tree, container_decl.ast.members); ais.popIndent(); @@ -2272,6 +2274,15 @@ fn renderDocComments(ais: *Ais, tree: ast.Tree, end_token: ast.TokenIndex) Error } } +/// start_token is first container doc comment token. +fn renderContainerDocComments(ais: *Ais, tree: ast.Tree, start_token: ast.TokenIndex) Error!void { + const token_tags = tree.tokens.items(.tag); + var tok = start_token; + while (token_tags[tok] == .container_doc_comment) : (tok += 1) { + try renderToken(ais, tree, tok, .newline); + } +} + fn tokenSliceForRender(tree: ast.Tree, token_index: ast.TokenIndex) []const u8 { var ret = tree.tokenSlice(token_index); if (tree.tokens.items(.tag)[token_index] == .multiline_string_literal_line) { diff --git a/src/astgen.zig b/src/astgen.zig index 583e748035..5b27925a5f 100644 --- a/src/astgen.zig +++ b/src/astgen.zig @@ -215,17 +215,17 @@ fn lvalExpr(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.I .anyframe_literal, .error_set_decl, .container_decl, - .container_decl_comma, + .container_decl_trailing, .container_decl_two, - .container_decl_two_comma, + .container_decl_two_trailing, .container_decl_arg, - .container_decl_arg_comma, + .container_decl_arg_trailing, .tagged_union, - .tagged_union_comma, + .tagged_union_trailing, .tagged_union_two, - .tagged_union_two_comma, + .tagged_union_two_trailing, .tagged_union_enum_tag, - .tagged_union_enum_tag_comma, + .tagged_union_enum_tag_trailing, .@"comptime", .@"nosuspend", .error_value, @@ -577,25 +577,25 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In .ptr_type_bit_range => return ptrType(mod, scope, rl, tree.ptrTypeBitRange(node)), .container_decl, - .container_decl_comma, + .container_decl_trailing, => return containerDecl(mod, scope, rl, tree.containerDecl(node)), - .container_decl_two, .container_decl_two_comma => { + .container_decl_two, .container_decl_two_trailing => { var buffer: [2]ast.Node.Index = undefined; return containerDecl(mod, scope, rl, tree.containerDeclTwo(&buffer, node)); }, .container_decl_arg, - .container_decl_arg_comma, + .container_decl_arg_trailing, => return containerDecl(mod, scope, rl, tree.containerDeclArg(node)), .tagged_union, - .tagged_union_comma, + .tagged_union_trailing, => return containerDecl(mod, scope, rl, tree.taggedUnion(node)), - .tagged_union_two, .tagged_union_two_comma => { + .tagged_union_two, .tagged_union_two_trailing => { var buffer: [2]ast.Node.Index = undefined; return containerDecl(mod, scope, rl, tree.taggedUnionTwo(&buffer, node)); }, .tagged_union_enum_tag, - .tagged_union_enum_tag_comma, + .tagged_union_enum_tag_trailing, => return containerDecl(mod, scope, rl, tree.taggedUnionEnumTag(node)), .@"break" => return breakExpr(mod, scope, rl, node), @@ -3715,17 +3715,17 @@ fn nodeMayNeedMemoryLocation(scope: *Scope, start_node: ast.Node.Index) bool { .identifier, .error_set_decl, .container_decl, - .container_decl_comma, + .container_decl_trailing, .container_decl_two, - .container_decl_two_comma, + .container_decl_two_trailing, .container_decl_arg, - .container_decl_arg_comma, + .container_decl_arg_trailing, .tagged_union, - .tagged_union_comma, + .tagged_union_trailing, .tagged_union_two, - .tagged_union_two_comma, + .tagged_union_two_trailing, .tagged_union_enum_tag, - .tagged_union_enum_tag_comma, + .tagged_union_enum_tag_trailing, .@"asm", .asm_simple, .add, From 5820bd0e64ce58cca045a5dfe5ba03d9979eece8 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Tue, 23 Feb 2021 19:11:50 +0100 Subject: [PATCH 150/173] zig fmt: insert trailing comma in fn params with comment --- lib/std/zig/parser_test.zig | 49 +++++++++++++++++++------------------ lib/std/zig/render.zig | 23 ++++++++++++++--- 2 files changed, 45 insertions(+), 27 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 7185eb1588..825a144b61 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -3494,30 +3494,31 @@ test "zig fmt: file ends with struct field" { // ); //} -//test "zig fmt: comment after params" { -// try testTransform( -// \\fn a( -// \\ b: u32 -// \\ // c: u32, -// \\ // d: u32, -// \\) void {} -// \\ -// , -// \\fn a( -// \\ b: u32, // c: u32, -// \\ // d: u32, -// \\) void {} -// \\ -// ); -// try testCanonical( -// \\fn a( -// \\ b: u32, -// \\ // c: u32, -// \\ // d: u32, -// \\) void {} -// \\ -// ); -//} +test "zig fmt: comment after params" { + try testTransform( + \\fn a( + \\ b: u32 + \\ // c: u32, + \\ // d: u32, + \\) void {} + \\ + , + \\fn a( + \\ b: u32, + \\ // c: u32, + \\ // d: u32, + \\) void {} + \\ + ); + try testCanonical( + \\fn a( + \\ b: u32, + \\ // c: u32, + \\ // d: u32, + \\) void {} + \\ + ); +} //test "zig fmt: comment in array initializer/access" { // try testCanonical( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index ab00a787c5..acb881164e 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -1337,7 +1337,8 @@ fn renderFnProto(gpa: *Allocator, ais: *Ais, tree: ast.Tree, fn_proto: ast.full. // The params list is a sparse set that does *not* include anytype or ... parameters. - if (token_tags[rparen - 1] != .comma) { + const trailing_comma = token_tags[rparen - 1] == .comma; + if (!trailing_comma and !hasComment(tree, lparen, rparen)) { // Render all on one line, no trailing comma. try renderToken(ais, tree, lparen, .none); // ( @@ -1415,7 +1416,9 @@ fn renderFnProto(gpa: *Allocator, ais: *Ais, tree: ast.Tree, fn_proto: ast.full. continue; }, .r_paren => break, - else => unreachable, + else => { + std.debug.print("\n{}\n", .{token_tags[last_param_token]}); + }, } if (token_tags[last_param_token] == .identifier) { try renderToken(ais, tree, last_param_token, .none); // name @@ -1430,7 +1433,8 @@ fn renderFnProto(gpa: *Allocator, ais: *Ais, tree: ast.Tree, fn_proto: ast.full. const param = fn_proto.ast.params[param_i]; param_i += 1; try renderExpression(gpa, ais, tree, param, .comma); - last_param_token = tree.lastToken(param) + 1; + last_param_token = tree.lastToken(param); + if (token_tags[last_param_token + 1] == .comma) last_param_token += 1; } ais.popIndent(); } @@ -2171,6 +2175,19 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp } } +/// Returns true if there exists a comment between the start of token +/// `start_token` and the start of token `end_token`. This is used to determine +/// if e.g. a fn_proto should be wrapped and have a trailing comma inserted +/// even if there is none in the source. +fn hasComment(tree: ast.Tree, start_token: ast.TokenIndex, end_token: ast.TokenIndex) bool { + const token_starts = tree.tokens.items(.start); + + const start = token_starts[start_token]; + const end = token_starts[end_token]; + + return mem.indexOf(u8, tree.source[start..end], "//") != null; +} + /// Assumes that start is the first byte past the previous token and /// that end is the last byte before the next token. fn renderComments(ais: *Ais, tree: ast.Tree, start: usize, end: usize) Error!bool { From 0f24b61ed543ad4879c5f9ef607bd856ce016f75 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Tue, 23 Feb 2021 19:17:11 +0100 Subject: [PATCH 151/173] zig fmt: insert trailing comma in struct init with comment --- lib/std/zig/parser_test.zig | 51 +++++++++++++++++++------------------ lib/std/zig/render.zig | 22 ++++++++-------- 2 files changed, 37 insertions(+), 36 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 825a144b61..903528c711 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -3552,31 +3552,32 @@ test "zig fmt: comment after params" { // ); //} -//test "zig fmt: comments at several places in struct init" { -// try testTransform( -// \\var bar = Bar{ -// \\ .x = 10, // test -// \\ .y = "test" -// \\ // test -// \\}; -// \\ -// , -// \\var bar = Bar{ -// \\ .x = 10, // test -// \\ .y = "test", // test -// \\}; -// \\ -// ); -// -// try testCanonical( -// \\var bar = Bar{ // test -// \\ .x = 10, // test -// \\ .y = "test", -// \\ // test -// \\}; -// \\ -// ); -//} +test "zig fmt: comments at several places in struct init" { + try testTransform( + \\var bar = Bar{ + \\ .x = 10, // test + \\ .y = "test" + \\ // test + \\}; + \\ + , + \\var bar = Bar{ + \\ .x = 10, // test + \\ .y = "test", + \\ // test + \\}; + \\ + ); + + try testCanonical( + \\var bar = Bar{ // test + \\ .x = 10, // test + \\ .y = "test", + \\ // test + \\}; + \\ + ); +} test "zig fmt: container doc comments" { try testCanonical( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index acb881164e..be807a05b6 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -411,18 +411,18 @@ fn renderExpression(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.I .struct_init_one, .struct_init_one_comma => { var fields: [1]ast.Node.Index = undefined; - return renderStructInit(gpa, ais, tree, tree.structInitOne(&fields, node), space); + return renderStructInit(gpa, ais, tree, node, tree.structInitOne(&fields, node), space); }, .struct_init_dot_two, .struct_init_dot_two_comma => { var fields: [2]ast.Node.Index = undefined; - return renderStructInit(gpa, ais, tree, tree.structInitDotTwo(&fields, node), space); + return renderStructInit(gpa, ais, tree, node, tree.structInitDotTwo(&fields, node), space); }, .struct_init_dot, .struct_init_dot_comma, - => return renderStructInit(gpa, ais, tree, tree.structInitDot(node), space), + => return renderStructInit(gpa, ais, tree, node, tree.structInitDot(node), space), .struct_init, .struct_init_comma, - => return renderStructInit(gpa, ais, tree, tree.structInit(node), space), + => return renderStructInit(gpa, ais, tree, node, tree.structInit(node), space), .call_one, .call_one_comma, .async_call_one, .async_call_one_comma => { var params: [1]ast.Node.Index = undefined; @@ -1564,11 +1564,11 @@ fn renderBlock( try renderToken(ais, tree, tree.lastToken(block_node), space); // rbrace } -// TODO: handle comments between fields fn renderStructInit( gpa: *Allocator, ais: *Ais, tree: ast.Tree, + struct_node: ast.Node.Index, struct_init: ast.full.StructInit, space: Space, ) Error!void { @@ -1582,9 +1582,10 @@ fn renderStructInit( try renderToken(ais, tree, struct_init.ast.lbrace, .none); // lbrace return renderToken(ais, tree, struct_init.ast.lbrace + 1, space); // rbrace } - const last_field = struct_init.ast.fields[struct_init.ast.fields.len - 1]; - const last_field_token = tree.lastToken(last_field); - if (token_tags[last_field_token + 1] == .comma) { + + const rbrace = tree.lastToken(struct_node); + const trailing_comma = token_tags[rbrace - 1] == .comma; + if (trailing_comma or hasComment(tree, struct_init.ast.lbrace, rbrace)) { // Render one field init per line. ais.pushIndentNextLine(); try renderToken(ais, tree, struct_init.ast.lbrace, .newline); @@ -1604,7 +1605,6 @@ fn renderStructInit( } ais.popIndent(); - return renderToken(ais, tree, last_field_token + 2, space); // rbrace } else { // Render all on one line, no trailing comma. try renderToken(ais, tree, struct_init.ast.lbrace, .space); @@ -1616,9 +1616,9 @@ fn renderStructInit( try renderToken(ais, tree, init_token - 1, .space); // = try renderExpression(gpa, ais, tree, field_init, .comma_space); } - - return renderToken(ais, tree, last_field_token + 1, space); // rbrace } + + return renderToken(ais, tree, rbrace, space); } // TODO: handle comments between elements From 6b9f19a6449b0fe5dce835590fcd6d13b5c785c4 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Tue, 23 Feb 2021 19:18:36 +0100 Subject: [PATCH 152/173] zig fmt: remove stray std.debug.print() --- lib/std/zig/render.zig | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index be807a05b6..60f54c4ab5 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -1416,9 +1416,7 @@ fn renderFnProto(gpa: *Allocator, ais: *Ais, tree: ast.Tree, fn_proto: ast.full. continue; }, .r_paren => break, - else => { - std.debug.print("\n{}\n", .{token_tags[last_param_token]}); - }, + else => unreachable, } if (token_tags[last_param_token] == .identifier) { try renderToken(ais, tree, last_param_token, .none); // name From abfe21383035522945e52ffb3954c03398767a39 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Tue, 23 Feb 2021 19:40:34 +0100 Subject: [PATCH 153/173] zig fmt: enable array init trailing comment insertion test Modify the test case slightly to match similar modifications done in 5820bd0 and 0f24b61. --- lib/std/zig/parser_test.zig | 61 +++++++++++++++++++------------------ 1 file changed, 31 insertions(+), 30 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 903528c711..72952cde76 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -3463,36 +3463,37 @@ test "zig fmt: file ends with struct field" { // ); //} -//test "zig fmt: line comment in array" { -// try testTransform( -// \\test "a" { -// \\ var arr = [_]u32{ -// \\ 0 -// \\ // 1, -// \\ // 2, -// \\ }; -// \\} -// \\ -// , -// \\test "a" { -// \\ var arr = [_]u32{ -// \\ 0, // 1, -// \\ // 2, -// \\ }; -// \\} -// \\ -// ); -// try testCanonical( -// \\test "a" { -// \\ var arr = [_]u32{ -// \\ 0, -// \\ // 1, -// \\ // 2, -// \\ }; -// \\} -// \\ -// ); -//} +test "zig fmt: line comment in array" { + try testTransform( + \\test "a" { + \\ var arr = [_]u32{ + \\ 0 + \\ // 1, + \\ // 2, + \\ }; + \\} + \\ + , + \\test "a" { + \\ var arr = [_]u32{ + \\ 0, + \\ // 1, + \\ // 2, + \\ }; + \\} + \\ + ); + try testCanonical( + \\test "a" { + \\ var arr = [_]u32{ + \\ 0, + \\ // 1, + \\ // 2, + \\ }; + \\} + \\ + ); +} test "zig fmt: comment after params" { try testTransform( From b028a92a6033b5a8de6607b3f544f9c8f376f0fd Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Tue, 23 Feb 2021 19:56:56 +0100 Subject: [PATCH 154/173] zig fmt: handle comments in array type/init/access --- lib/std/zig/parser_test.zig | 66 ++++++++++++++++++++----------------- lib/std/zig/render.zig | 26 ++++++++++----- 2 files changed, 53 insertions(+), 39 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 72952cde76..76d3985576 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -3521,37 +3521,41 @@ test "zig fmt: comment after params" { ); } -//test "zig fmt: comment in array initializer/access" { -// try testCanonical( -// \\test "a" { -// \\ var a = x{ //aa -// \\ //bb -// \\ }; -// \\ var a = []x{ //aa -// \\ //bb -// \\ }; -// \\ var b = [ //aa -// \\ _ -// \\ ]x{ //aa -// \\ //bb -// \\ 9, -// \\ }; -// \\ var c = b[ //aa -// \\ 0 -// \\ ]; -// \\ var d = [_ -// \\ //aa -// \\ ]x{ //aa -// \\ //bb -// \\ 9, -// \\ }; -// \\ var e = d[0 -// \\ //aa -// \\ ]; -// \\} -// \\ -// ); -//} +test "zig fmt: comment in array initializer/access" { + try testCanonical( + \\test "a" { + \\ var a = x{ //aa + \\ //bb + \\ }; + \\ var a = []x{ //aa + \\ //bb + \\ }; + \\ var b = [ //aa + \\ _ + \\ ]x{ //aa + \\ //bb + \\ 9, + \\ }; + \\ var c = b[ //aa + \\ 0 + \\ ]; + \\ var d = [ + \\ _ + \\ //aa + \\ : + \\ 0 + \\ ]x{ //aa + \\ //bb + \\ 9, + \\ }; + \\ var e = d[ + \\ 0 + \\ //aa + \\ ]; + \\} + \\ + ); +} test "zig fmt: comments at several places in struct init" { try testTransform( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 60f54c4ab5..4f7cadb437 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -439,9 +439,13 @@ fn renderExpression(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.I const suffix = datas[node]; const lbracket = tree.firstToken(suffix.rhs) - 1; const rbracket = tree.lastToken(suffix.rhs) + 1; + const one_line = tree.tokensOnSameLine(lbracket, rbracket); + const inner_space = if (one_line) Space.none else Space.newline; try renderExpression(gpa, ais, tree, suffix.lhs, .none); - try renderToken(ais, tree, lbracket, .none); // [ - try renderExpression(gpa, ais, tree, suffix.rhs, .none); + ais.pushIndentNextLine(); + try renderToken(ais, tree, lbracket, inner_space); // [ + try renderExpression(gpa, ais, tree, suffix.rhs, inner_space); + ais.popIndent(); return renderToken(ais, tree, rbracket, space); // ] }, @@ -679,7 +683,6 @@ fn renderExpression(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.I } } -// TODO: handle comments inside the brackets fn renderArrayType( gpa: *Allocator, ais: *Ais, @@ -687,13 +690,18 @@ fn renderArrayType( array_type: ast.full.ArrayType, space: Space, ) Error!void { - try renderToken(ais, tree, array_type.ast.lbracket, .none); // lbracket - try renderExpression(gpa, ais, tree, array_type.ast.elem_count, .none); + const rbracket = tree.firstToken(array_type.ast.elem_type) - 1; + const one_line = tree.tokensOnSameLine(array_type.ast.lbracket, rbracket); + const inner_space = if (one_line) Space.none else Space.newline; + ais.pushIndentNextLine(); + try renderToken(ais, tree, array_type.ast.lbracket, inner_space); // lbracket + try renderExpression(gpa, ais, tree, array_type.ast.elem_count, inner_space); if (array_type.ast.sentinel) |sentinel| { - try renderToken(ais, tree, tree.firstToken(sentinel) - 1, .none); // colon - try renderExpression(gpa, ais, tree, sentinel, .none); + try renderToken(ais, tree, tree.firstToken(sentinel) - 1, inner_space); // colon + try renderExpression(gpa, ais, tree, sentinel, inner_space); } - try renderToken(ais, tree, tree.firstToken(array_type.ast.elem_type) - 1, .none); // rbracket + ais.popIndent(); + try renderToken(ais, tree, rbracket, .none); // rbracket return renderExpression(gpa, ais, tree, array_type.ast.elem_type, space); } @@ -1577,7 +1585,9 @@ fn renderStructInit( try renderExpression(gpa, ais, tree, struct_init.ast.type_expr, .none); // T } if (struct_init.ast.fields.len == 0) { + ais.pushIndentNextLine(); try renderToken(ais, tree, struct_init.ast.lbrace, .none); // lbrace + ais.popIndent(); return renderToken(ais, tree, struct_init.ast.lbrace + 1, space); // rbrace } From 01e89c91717b1f9102429d4809c40d53d751a806 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 23 Feb 2021 12:39:50 -0700 Subject: [PATCH 155/173] translate-c: update to latest AST tag changes --- src/translate_c/ast.zig | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index 928619449f..b3a05656e3 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -1629,7 +1629,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { const span = try c.listToSpan(members); return c.addNode(.{ - .tag = .container_decl_arg_comma, + .tag = .container_decl_arg_trailing, .main_token = enum_tok, .data = .{ .lhs = arg_expr, @@ -1811,7 +1811,7 @@ fn renderRecord(c: *Context, node: Node) !NodeIndex { if (members.len <= 2) { return c.addNode(.{ - .tag = .container_decl_two_comma, + .tag = .container_decl_two_trailing, .main_token = kind_tok, .data = .{ .lhs = members[0], @@ -1821,7 +1821,7 @@ fn renderRecord(c: *Context, node: Node) !NodeIndex { } else { const span = try c.listToSpan(members); return c.addNode(.{ - .tag = .container_decl_comma, + .tag = .container_decl_trailing, .main_token = kind_tok, .data = .{ .lhs = span.start, From f041425e48b2f65fd4262d3c9ba210c410322d02 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 23 Feb 2021 13:55:12 -0700 Subject: [PATCH 156/173] translate-c: fix using wrong slice and AST tag --- CMakeLists.txt | 2 +- src/translate_c/ast.zig | 11 ++++++++++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 80b586e9cd..4f2dc4fa4a 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -572,12 +572,12 @@ set(ZIG_STAGE2_SOURCES "${CMAKE_SOURCE_DIR}/src/target.zig" "${CMAKE_SOURCE_DIR}/src/tracy.zig" "${CMAKE_SOURCE_DIR}/src/translate_c.zig" + "${CMAKE_SOURCE_DIR}/src/translate_c/ast.zig" "${CMAKE_SOURCE_DIR}/src/type.zig" "${CMAKE_SOURCE_DIR}/src/value.zig" "${CMAKE_SOURCE_DIR}/src/windows_sdk.zig" "${CMAKE_SOURCE_DIR}/src/zir.zig" "${CMAKE_SOURCE_DIR}/src/zir_sema.zig" - "${CMAKE_SOURCE_DIR}/src/translate_c/ast.zig" ) if(MSVC) diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig index b3a05656e3..3bc20271cc 100644 --- a/src/translate_c/ast.zig +++ b/src/translate_c/ast.zig @@ -1809,7 +1809,16 @@ fn renderRecord(c: *Context, node: Node) !NodeIndex { } _ = try c.addToken(.r_brace, "}"); - if (members.len <= 2) { + if (payload.fields.len == 0) { + return c.addNode(.{ + .tag = .container_decl_two, + .main_token = kind_tok, + .data = .{ + .lhs = 0, + .rhs = 0, + }, + }); + } else if (payload.fields.len <= 2) { return c.addNode(.{ .tag = .container_decl_two_trailing, .main_token = kind_tok, From ca9259340d42d92c8e44cb814effae447ee64a24 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 23 Feb 2021 14:30:21 -0700 Subject: [PATCH 157/173] zig fmt now intentionally respects all empty line comments --- lib/std/zig/parser_test.zig | 24 +++++++++--------------- 1 file changed, 9 insertions(+), 15 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 76d3985576..3fd7fbdb74 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -3447,21 +3447,15 @@ test "zig fmt: file ends with struct field" { ); } -// TODO intentionally change the behavior of this case? -// for array literals we necessarily have meaningful empty comments -//test "zig fmt: comment after empty comment" { -// try testTransform( -// \\const x = true; // -// \\// -// \\// -// \\//a -// \\ -// , -// \\const x = true; -// \\//a -// \\ -// ); -//} +test "zig fmt: comment after empty comment" { + try testCanonical( + \\const x = true; // + \\// + \\// + \\//a + \\ + ); +} test "zig fmt: line comment in array" { try testTransform( From 4ee368c4b33f5cf286c575e94535deaf59659895 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Tue, 23 Feb 2021 23:17:00 +0100 Subject: [PATCH 158/173] zig fmt: comments/line breaks in field access chain --- lib/std/zig/parser_test.zig | 116 +++++++++++++++++++++++++----------- lib/std/zig/render.zig | 24 +++++++- 2 files changed, 105 insertions(+), 35 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 3fd7fbdb74..9701cd4e29 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -3790,40 +3790,88 @@ test "zig fmt: comments in ternary ifs" { ); } -//test "zig fmt: test comments in field access chain" { -// try testCanonical( -// \\pub const str = struct { -// \\ pub const Thing = more.more // -// \\ .more() // -// \\ .more().more() // -// \\ .more() // -// \\ // .more() // -// \\ .more() // -// \\ .more(); -// \\ data: Data, -// \\}; -// \\ -// \\pub const str = struct { -// \\ pub const Thing = more.more // -// \\ .more() // -// \\ // .more() // -// \\ // .more() // -// \\ // .more() // -// \\ .more() // -// \\ .more(); -// \\ data: Data, -// \\}; -// \\ -// \\pub const str = struct { -// \\ pub const Thing = more // -// \\ .more // -// \\ .more() // -// \\ .more(); -// \\ data: Data, -// \\}; -// \\ -// ); -//} +test "zig fmt: test comments in field access chain" { + try testCanonical( + \\pub const str = struct { + \\ pub const Thing = more.more // + \\ .more() // + \\ .more().more() // + \\ .more() // + \\ // .more() // + \\ .more() // + \\ .more(); + \\ data: Data, + \\}; + \\ + \\pub const str = struct { + \\ pub const Thing = more.more // + \\ .more() // + \\ // .more() // + \\ // .more() // + \\ // .more() // + \\ .more() // + \\ .more(); + \\ data: Data, + \\}; + \\ + \\pub const str = struct { + \\ pub const Thing = more // + \\ .more // + \\ .more() // + \\ .more(); + \\ data: Data, + \\}; + \\ + ); +} + +test "zig fmt: allow line break before field access" { + try testCanonical( + \\test { + \\ const w = foo.bar().zippy(zag).iguessthisisok(); + \\ + \\ const x = foo + \\ .bar() + \\ . // comment + \\ // comment + \\ swooop().zippy(zag) + \\ .iguessthisisok(); + \\ + \\ const y = view.output.root.server.input_manager.default_seat.wlr_seat.name; + \\ + \\ const z = view.output.root.server + \\ .input_manager // + \\ .default_seat + \\ . // comment + \\ // another comment + \\ wlr_seat.name; + \\} + \\ + ); + try testTransform( + \\test { + \\ const x = foo. + \\ bar() + \\ .zippy(zag).iguessthisisok(); + \\ + \\ const z = view.output.root.server. + \\ input_manager. + \\ default_seat.wlr_seat.name; + \\} + \\ + , + \\test { + \\ const x = foo + \\ .bar() + \\ .zippy(zag).iguessthisisok(); + \\ + \\ const z = view.output.root.server + \\ .input_manager + \\ .default_seat.wlr_seat.name; + \\} + \\ + ); +} test "zig fmt: Indent comma correctly after multiline string literals in arg list (trailing comma)" { try testCanonical( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 4f7cadb437..bd25868e12 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -298,9 +298,31 @@ fn renderExpression(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.I }, .field_access => { + const main_token = main_tokens[node]; const field_access = datas[node]; + try renderExpression(gpa, ais, tree, field_access.lhs, .none); - try renderToken(ais, tree, main_tokens[node], .none); + + // Allow a line break between the lhs and the dot if the lhs and rhs + // are on different lines. + const lhs_last_token = tree.lastToken(field_access.lhs); + const same_line = tree.tokensOnSameLine(lhs_last_token, main_token + 1); + if (!same_line) { + if (!hasComment(tree, lhs_last_token, main_token)) try ais.insertNewline(); + ais.pushIndentOneShot(); + } + + try renderToken(ais, tree, main_token, .none); + + // This check ensures that zag() is indented in the following example: + // const x = foo + // .bar() + // . // comment + // zag(); + if (!same_line and hasComment(tree, main_token, main_token + 1)) { + ais.pushIndentOneShot(); + } + return renderToken(ais, tree, field_access.rhs, space); }, From 6f4a1bafcf9cc1120881dabc462b46696481720e Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 23 Feb 2021 16:09:51 -0700 Subject: [PATCH 159/173] zig fmt: fn call with comments and multiline strings forcing the parameters over multiple lines --- lib/std/zig/parser_test.zig | 78 ++++++++++++++++++------------------- lib/std/zig/render.zig | 32 ++++++++++----- 2 files changed, 62 insertions(+), 48 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 9701cd4e29..17e0701621 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -4049,45 +4049,45 @@ test "zig fmt: allow trailing line comments to do manual array formatting" { // ); //} -//test "zig fmt: use of comments and Multiline string literals may force the parameters over multiple lines" { -// try testCanonical( -// \\pub fn makeMemUndefined(qzz: []u8) i1 { -// \\ cases.add( // fixed bug #2032 -// \\ "compile diagnostic string for top level decl type", -// \\ \\export fn entry() void { -// \\ \\ var foo: u32 = @This(){}; -// \\ \\} -// \\ , &[_][]const u8{ -// \\ "tmp.zig:2:27: error: type 'u32' does not support array initialization", -// \\ }); -// \\ @compileError( -// \\ \\ unknown-length pointers and C pointers cannot be hashed deeply. -// \\ \\ Consider providing your own hash function. -// \\ \\ unknown-length pointers and C pointers cannot be hashed deeply. -// \\ \\ Consider providing your own hash function. -// \\ ); -// \\ return @intCast(i1, doMemCheckClientRequestExpr(0, // default return -// \\ .MakeMemUndefined, @ptrToInt(qzz.ptr), qzz.len, 0, 0, 0)); -// \\} -// \\ -// \\// This looks like garbage don't do this -// \\const rparen = tree.prevToken( -// \\// the first token for the annotation expressions is the left -// \\// parenthesis, hence the need for two prevToken -// \\ if (fn_proto.getAlignExpr()) |align_expr| -// \\ tree.prevToken(tree.prevToken(align_expr.firstToken())) -// \\else if (fn_proto.getSectionExpr()) |section_expr| -// \\ tree.prevToken(tree.prevToken(section_expr.firstToken())) -// \\else if (fn_proto.getCallconvExpr()) |callconv_expr| -// \\ tree.prevToken(tree.prevToken(callconv_expr.firstToken())) -// \\else switch (fn_proto.return_type) { -// \\ .Explicit => |node| node.firstToken(), -// \\ .InferErrorSet => |node| tree.prevToken(node.firstToken()), -// \\ .Invalid => unreachable, -// \\}); -// \\ -// ); -//} +test "zig fmt: use of comments and multiline string literals may force the parameters over multiple lines" { + try testCanonical( + \\pub fn makeMemUndefined(qzz: []u8) i1 { + \\ cases.add( // fixed bug foo + \\ "compile diagnostic string for top level decl type", + \\ \\export fn entry() void { + \\ \\ var foo: u32 = @This(){}; + \\ \\} + \\ , &[_][]const u8{ + \\ "tmp.zig:2:27: error: type 'u32' does not support array initialization", + \\ }); + \\ @compileError( + \\ \\ unknown-length pointers and C pointers cannot be hashed deeply. + \\ \\ Consider providing your own hash function. + \\ \\ unknown-length pointers and C pointers cannot be hashed deeply. + \\ \\ Consider providing your own hash function. + \\ ); + \\ return @intCast(i1, doMemCheckClientRequestExpr(0, // default return + \\ .MakeMemUndefined, @ptrToInt(qzz.ptr), qzz.len, 0, 0, 0)); + \\} + \\ + \\// This looks like garbage don't do this + \\const rparen = tree.prevToken( + \\// the first token for the annotation expressions is the left + \\// parenthesis, hence the need for two prevToken + \\if (fn_proto.getAlignExpr()) |align_expr| + \\ tree.prevToken(tree.prevToken(align_expr.firstToken())) + \\else if (fn_proto.getSectionExpr()) |section_expr| + \\ tree.prevToken(tree.prevToken(section_expr.firstToken())) + \\else if (fn_proto.getCallconvExpr()) |callconv_expr| + \\ tree.prevToken(tree.prevToken(callconv_expr.firstToken())) + \\else switch (fn_proto.return_type) { + \\ .Explicit => |node| node.firstToken(), + \\ .InferErrorSet => |node| tree.prevToken(node.firstToken()), + \\ .Invalid => unreachable, + \\}); + \\ + ); +} test "zig fmt: single argument trailing commas in @builtins()" { try testCanonical( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index bd25868e12..3a37781de6 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -1291,6 +1291,12 @@ fn renderBuiltinCall( try renderToken(ais, tree, builtin_token + 1, .none); // ( for (params) |param_node, i| { + const first_param_token = tree.firstToken(param_node); + if (token_tags[first_param_token] == .multiline_string_literal_line or + hasSameLineComment(tree, first_param_token - 1)) + { + ais.pushIndentOneShot(); + } try renderExpression(gpa, ais, tree, param_node, .none); if (i + 1 < params.len) { @@ -1733,12 +1739,8 @@ fn renderArrayInit( const maybe_comma = expr_last_token + 1; if (token_tags[maybe_comma] == .comma) { - const after_comma_src = tree.source[token_starts[maybe_comma]..token_starts[maybe_comma + 1]]; - for (after_comma_src) |byte| switch (byte) { - '\n' => break, - '/' => break :sec_end i - this_line_size.? + 1, - else => continue, - }; + if (hasSameLineComment(tree, maybe_comma)) + break :sec_end i - this_line_size.? + 1; } } break :sec_end row_exprs.len; @@ -2106,9 +2108,10 @@ fn renderCall( try renderToken(ais, tree, lparen, .none); // ( for (params) |param_node, i| { - const this_multiline_string = - token_tags[tree.firstToken(param_node)] == .multiline_string_literal_line; - if (this_multiline_string) { + const first_param_token = tree.firstToken(param_node); + if (token_tags[first_param_token] == .multiline_string_literal_line or + hasSameLineComment(tree, first_param_token - 1)) + { ais.pushIndentOneShot(); } try renderExpression(gpa, ais, tree, param_node, .none); @@ -2339,6 +2342,17 @@ fn tokenSliceForRender(tree: ast.Tree, token_index: ast.TokenIndex) []const u8 { return ret; } +fn hasSameLineComment(tree: ast.Tree, token_index: ast.TokenIndex) bool { + const token_starts = tree.tokens.items(.start); + const between_source = tree.source[token_starts[token_index]..token_starts[token_index + 1]]; + for (between_source) |byte| switch (byte) { + '\n' => return false, + '/' => return true, + else => continue, + }; + return false; +} + fn writeFixingWhitespace(writer: std.ArrayList(u8).Writer, slice: []const u8) Error!void { for (slice) |byte| switch (byte) { '\t' => try writer.writeAll(" " ** 4), From bb89c619edbae8b02c826a2d334e2736c876c07d Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 23 Feb 2021 17:00:33 -0700 Subject: [PATCH 160/173] zig fmt: multiline string literals + array init --- lib/std/zig/parser_test.zig | 127 ++++++++++++++++++------------------ lib/std/zig/render.zig | 26 ++++++++ 2 files changed, 91 insertions(+), 62 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 17e0701621..44f4dc509a 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -3986,68 +3986,71 @@ test "zig fmt: allow trailing line comments to do manual array formatting" { ); } -//test "zig fmt: multiline string literals should play nice with array initializers" { -// try testCanonical( -// \\fn main() void { -// \\ var a = .{.{.{.{.{.{.{.{ -// \\ 0, -// \\ }}}}}}}}; -// \\ myFunc(.{ -// \\ "aaaaaaa", "bbbbbb", "ccccc", -// \\ "dddd", ("eee"), ("fff"), -// \\ ("gggg"), -// \\ // Line comment -// \\ \\Multiline String Literals can be quite long -// \\ , -// \\ \\Multiline String Literals can be quite long -// \\ \\Multiline String Literals can be quite long -// \\ , -// \\ \\Multiline String Literals can be quite long -// \\ \\Multiline String Literals can be quite long -// \\ \\Multiline String Literals can be quite long -// \\ \\Multiline String Literals can be quite long -// \\ , -// \\ ( -// \\ \\Multiline String Literals can be quite long -// \\ ), -// \\ .{ -// \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx -// \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx -// \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx -// \\ }, -// \\ .{( -// \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx -// \\ )}, -// \\ .{ -// \\ "xxxxxxx", "xxx", -// \\ ( -// \\ \\ xxx -// \\ ), -// \\ "xxx", "xxx", -// \\ }, -// \\ .{ "xxxxxxx", "xxx", "xxx", "xxx" }, .{ "xxxxxxx", "xxx", "xxx", "xxx" }, -// \\ "aaaaaaa", "bbbbbb", "ccccc", // - -// \\ "dddd", ("eee"), ("fff"), -// \\ .{ -// \\ "xxx", "xxx", -// \\ ( -// \\ \\ xxx -// \\ ), -// \\ "xxxxxxxxxxxxxx", "xxx", -// \\ }, -// \\ .{ -// \\ ( -// \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx -// \\ ), -// \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx -// \\ }, -// \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx -// \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx -// \\ }); -// \\} -// \\ -// ); -//} +test "zig fmt: multiline string literals should play nice with array initializers" { + try testCanonical( + \\fn main() void { + \\ var a = .{.{.{.{.{.{.{.{ + \\ 0, + \\ }}}}}}}}; + \\ myFunc(.{ + \\ "aaaaaaa", "bbbbbb", "ccccc", + \\ "dddd", ("eee"), ("fff"), + \\ ("gggg"), + \\ // Line comment + \\ \\Multiline String Literals can be quite long + \\ , + \\ \\Multiline String Literals can be quite long + \\ \\Multiline String Literals can be quite long + \\ , + \\ \\Multiline String Literals can be quite long + \\ \\Multiline String Literals can be quite long + \\ \\Multiline String Literals can be quite long + \\ \\Multiline String Literals can be quite long + \\ , + \\ ( + \\ \\Multiline String Literals can be quite long + \\ ), + \\ .{ + \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + \\ }, + \\ .{( + \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + \\ )}, + \\ .{ + \\ "xxxxxxx", "xxx", + \\ ( + \\ \\ xxx + \\ ), + \\ "xxx", + \\ "xxx", + \\ }, + \\ .{ "xxxxxxx", "xxx", "xxx", "xxx" }, + \\ .{ "xxxxxxx", "xxx", "xxx", "xxx" }, + \\ "aaaaaaa", "bbbbbb", "ccccc", // - + \\ "dddd", ("eee"), ("fff"), + \\ .{ + \\ "xxx", "xxx", + \\ ( + \\ \\ xxx + \\ ), + \\ "xxxxxxxxxxxxxx", + \\ "xxx", + \\ }, + \\ .{ + \\ ( + \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + \\ ), + \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + \\ }, + \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + \\ \\xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + \\ }); + \\} + \\ + ); +} test "zig fmt: use of comments and multiline string literals may force the parameters over multiple lines" { try testCanonical( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 3a37781de6..53a541ab61 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -1686,6 +1686,19 @@ fn renderArrayInit( const trailing_comma = token_tags[last_elem_token + 1] == .comma; const rbrace = if (trailing_comma) last_elem_token + 2 else last_elem_token + 1; assert(token_tags[rbrace] == .r_brace); + + if (array_init.ast.elements.len == 1) { + const only_elem = array_init.ast.elements[0]; + const first_token = tree.firstToken(only_elem); + if (token_tags[first_token] != .multiline_string_literal_line and + !anythingBetween(tree, last_elem_token, rbrace)) + { + try renderToken(ais, tree, array_init.ast.lbrace, .none); + try renderExpression(gpa, ais, tree, only_elem, .none); + return renderToken(ais, tree, rbrace, space); + } + } + const contains_newlines = !tree.tokensOnSameLine(array_init.ast.lbrace, rbrace); if (!trailing_comma and !contains_newlines) { @@ -2353,6 +2366,19 @@ fn hasSameLineComment(tree: ast.Tree, token_index: ast.TokenIndex) bool { return false; } +/// Returns `true` if and only if there are any tokens or line comments between +/// start_token and end_token. +fn anythingBetween(tree: ast.Tree, start_token: ast.TokenIndex, end_token: ast.TokenIndex) bool { + if (start_token + 1 != end_token) return true; + const token_starts = tree.tokens.items(.start); + const between_source = tree.source[token_starts[start_token]..token_starts[start_token + 1]]; + for (between_source) |byte| switch (byte) { + '/' => return true, + else => continue, + }; + return false; +} + fn writeFixingWhitespace(writer: std.ArrayList(u8).Writer, slice: []const u8) Error!void { for (slice) |byte| switch (byte) { '\t' => try writer.writeAll(" " ** 4), From 08107a555eb1bc8d6e9aa8d80507896d08b2b18d Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 23 Feb 2021 17:19:01 -0700 Subject: [PATCH 161/173] zig fmt: fix inline assembly test cases All zig fmt test cases are now passing again in this branch. --- lib/std/zig/parser_test.zig | 153 +++++++++++++++++------------------- lib/std/zig/render.zig | 25 +++++- 2 files changed, 95 insertions(+), 83 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 44f4dc509a..e32a37a3c6 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -1863,46 +1863,42 @@ test "zig fmt: extra newlines at the end" { ); } -//test "zig fmt: simple asm" { -// try testTransform( -// \\comptime { -// \\ asm volatile ( -// \\ \\.globl aoeu; -// \\ \\.type aoeu, @function; -// \\ \\.set aoeu, derp; -// \\ ); -// \\ -// \\ asm ("not real assembly" -// \\ :[a] "x" (x),); -// \\ asm ("not real assembly" -// \\ :[a] "x" (->i32),:[a] "x" (1),); -// \\ asm ("still not real assembly" -// \\ :::"a","b",); -// \\} -// , -// \\comptime { -// \\ asm volatile ( -// \\ \\.globl aoeu; -// \\ \\.type aoeu, @function; -// \\ \\.set aoeu, derp; -// \\ ); -// \\ -// \\ asm ("not real assembly" -// \\ : [a] "x" (x) -// \\ ); -// \\ asm ("not real assembly" -// \\ : [a] "x" (-> i32) -// \\ : [a] "x" (1) -// \\ ); -// \\ asm ("still not real assembly" -// \\ : -// \\ : -// \\ : "a", "b" -// \\ ); -// \\} -// \\ -// ); -//} +test "zig fmt: simple asm" { + try testTransform( + \\comptime { + \\ asm volatile ( + \\ \\.globl aoeu; + \\ \\.type aoeu, @function; + \\ \\.set aoeu, derp; + \\ ); + \\ + \\ asm ("not real assembly" + \\ :[a] "x" (x),); + \\ asm ("not real assembly" + \\ :[a] "x" (->i32),:[a] "x" (1),); + \\ asm ("still not real assembly" + \\ :::"a","b",); + \\} + , + \\comptime { + \\ asm volatile ( + \\ \\.globl aoeu; + \\ \\.type aoeu, @function; + \\ \\.set aoeu, derp; + \\ ); + \\ + \\ asm ("not real assembly" + \\ : [a] "x" (x) + \\ ); + \\ asm ("not real assembly" + \\ : [a] "x" (-> i32) + \\ : [a] "x" (1) + \\ ); + \\ asm ("still not real assembly" ::: "a", "b"); + \\} + \\ + ); +} test "zig fmt: nested struct literal with one item" { try testCanonical( @@ -3363,46 +3359,43 @@ test "zig fmt: comptime block in container" { ); } -//test "zig fmt: inline asm parameter alignment" { -// try testCanonical( -// \\pub fn main() void { -// \\ asm volatile ( -// \\ \\ foo -// \\ \\ bar -// \\ ); -// \\ asm volatile ( -// \\ \\ foo -// \\ \\ bar -// \\ : [_] "" (-> usize), -// \\ [_] "" (-> usize) -// \\ ); -// \\ asm volatile ( -// \\ \\ foo -// \\ \\ bar -// \\ : -// \\ : [_] "" (0), -// \\ [_] "" (0) -// \\ ); -// \\ asm volatile ( -// \\ \\ foo -// \\ \\ bar -// \\ : -// \\ : -// \\ : "", "" -// \\ ); -// \\ asm volatile ( -// \\ \\ foo -// \\ \\ bar -// \\ : [_] "" (-> usize), -// \\ [_] "" (-> usize) -// \\ : [_] "" (0), -// \\ [_] "" (0) -// \\ : "", "" -// \\ ); -// \\} -// \\ -// ); -//} +test "zig fmt: inline asm parameter alignment" { + try testCanonical( + \\pub fn main() void { + \\ asm volatile ( + \\ \\ foo + \\ \\ bar + \\ ); + \\ asm volatile ( + \\ \\ foo + \\ \\ bar + \\ : [_] "" (-> usize), + \\ [_] "" (-> usize) + \\ ); + \\ asm volatile ( + \\ \\ foo + \\ \\ bar + \\ : + \\ : [_] "" (0), + \\ [_] "" (0) + \\ ); + \\ asm volatile ( + \\ \\ foo + \\ \\ bar + \\ ::: "", ""); + \\ asm volatile ( + \\ \\ foo + \\ \\ bar + \\ : [_] "" (-> usize), + \\ [_] "" (-> usize) + \\ : [_] "" (0), + \\ [_] "" (0) + \\ : "", "" + \\ ); + \\} + \\ + ); +} test "zig fmt: multiline string in array" { try testCanonical( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 53a541ab61..cd1d4a562b 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -1955,21 +1955,40 @@ fn renderAsm( } if (asm_node.ast.items.len == 0) { - try renderExpression(gpa, ais, tree, asm_node.ast.template, .none); + ais.pushIndent(); if (asm_node.first_clobber) |first_clobber| { // asm ("foo" ::: "a", "b") + // asm ("foo" ::: "a", "b",) + try renderExpression(gpa, ais, tree, asm_node.ast.template, .space); + // Render the three colons. + try renderToken(ais, tree, first_clobber - 3, .none); + try renderToken(ais, tree, first_clobber - 2, .none); + try renderToken(ais, tree, first_clobber - 1, .space); + var tok_i = first_clobber; while (true) : (tok_i += 1) { try renderToken(ais, tree, tok_i, .none); tok_i += 1; switch (token_tags[tok_i]) { - .r_paren => return renderToken(ais, tree, tok_i, space), - .comma => try renderToken(ais, tree, tok_i, .space), + .r_paren => { + ais.popIndent(); + return renderToken(ais, tree, tok_i, space); + }, + .comma => { + if (token_tags[tok_i + 1] == .r_paren) { + ais.popIndent(); + return renderToken(ais, tree, tok_i + 1, space); + } else { + try renderToken(ais, tree, tok_i, .space); + } + }, else => unreachable, } } } else { // asm ("foo") + try renderExpression(gpa, ais, tree, asm_node.ast.template, .none); + ais.popIndent(); return renderToken(ais, tree, asm_node.ast.rparen, space); // rparen } } From 988f1c6a6f075e63d0ab2dc8f6e802a67b4a0902 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 23 Feb 2021 18:23:49 -0700 Subject: [PATCH 162/173] zig fmt: fn proto end with anytype and comma also zig fmt: space after top level doc comment --- lib/std/zig/parse.zig | 7 ++++++- lib/std/zig/parser_test.zig | 18 ++++++++++++++++++ lib/std/zig/render.zig | 10 ++++++++++ 3 files changed, 34 insertions(+), 1 deletion(-) diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 528b684e46..36da59ed82 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -3706,7 +3706,12 @@ const Parser = struct { const param = try p.expectParamDecl(); if (param != 0) break param; switch (p.token_tags[p.nextToken()]) { - .comma => continue, + .comma => { + if (p.eatToken(.r_paren)) |_| { + return SmallSpan{ .zero_or_one = 0 }; + } + continue; + }, .r_paren => return SmallSpan{ .zero_or_one = 0 }, else => { // This is likely just a missing comma; diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index e32a37a3c6..9f77f3a258 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -4134,6 +4134,24 @@ test "zig fmt: function params should align nicely" { ); } +test "zig fmt: fn proto end with anytype and comma" { + try testCanonical( + \\pub fn format( + \\ out_stream: anytype, + \\) !void {} + \\ + ); +} + +test "zig fmt: space after top level doc comment" { + try testCanonical( + \\//! top level doc comment + \\ + \\field: i32, + \\ + ); +} + test "zig fmt: error for invalid bit range" { try testError( \\var x: []align(0:0:0)u8 = bar; diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index cd1d4a562b..784242f192 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -1449,6 +1449,8 @@ fn renderFnProto(gpa: *Allocator, ais: *Ais, tree: ast.Tree, fn_proto: ast.full. .identifier => {}, .keyword_anytype => { try renderToken(ais, tree, last_param_token, .comma); // anytype + if (token_tags[last_param_token + 1] == .comma) + last_param_token += 1; continue; }, .r_paren => break, @@ -1462,6 +1464,8 @@ fn renderFnProto(gpa: *Allocator, ais: *Ais, tree: ast.Tree, fn_proto: ast.full. } if (token_tags[last_param_token] == .keyword_anytype) { try renderToken(ais, tree, last_param_token, .comma); // anytype + if (token_tags[last_param_token + 1] == .comma) + last_param_token += 1; continue; } const param = fn_proto.ast.params[param_i]; @@ -2363,6 +2367,12 @@ fn renderContainerDocComments(ais: *Ais, tree: ast.Tree, start_token: ast.TokenI while (token_tags[tok] == .container_doc_comment) : (tok += 1) { try renderToken(ais, tree, tok, .newline); } + // Render extra newline if there is one between final container doc comment and + // the next token. If the next token is a doc comment, that code path + // will have its own logic to insert a newline. + if (token_tags[tok] != .doc_comment) { + try renderExtraNewlineToken(ais, tree, tok); + } } fn tokenSliceForRender(tree: ast.Tree, token_index: ast.TokenIndex) []const u8 { From 4420fe97bebbecfb595c8acad255c3f3747bcaef Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 23 Feb 2021 18:33:13 -0700 Subject: [PATCH 163/173] zig fmt: for loop with ptr payload and index --- lib/std/zig/parser_test.zig | 12 ++++++++++++ lib/std/zig/render.zig | 6 +++--- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 9f77f3a258..951c437333 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -4152,6 +4152,18 @@ test "zig fmt: space after top level doc comment" { ); } +test "zig fmt: for loop with ptr payload and index" { + try testCanonical( + \\test { + \\ for (self.entries.items) |*item, i| {} + \\ for (self.entries.items) |*item, i| + \\ a = b; + \\ for (self.entries.items) |*item, i| a = b; + \\} + \\ + ); +} + test "zig fmt: error for invalid bit range" { try testError( \\var x: []align(0:0:0)u8 = bar; diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 784242f192..0331ca2c70 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -1040,7 +1040,7 @@ fn renderWhile(gpa: *Allocator, ais: *Ais, tree: ast.Tree, while_node: ast.full. if (token_tags[ident + 1] == .comma) { try renderToken(ais, tree, ident + 1, .space); // , try renderToken(ais, tree, ident + 2, .none); // index - break :blk payload_token + 3; + break :blk ident + 3; } else { break :blk ident + 1; } @@ -1102,7 +1102,7 @@ fn renderWhile(gpa: *Allocator, ais: *Ais, tree: ast.Tree, while_node: ast.full. if (token_tags[ident + 1] == .comma) { try renderToken(ais, tree, ident + 1, .space); // , try renderToken(ais, tree, ident + 2, .none); // index - break :blk payload_token + 3; + break :blk ident + 3; } else { break :blk ident + 1; } @@ -1177,7 +1177,7 @@ fn renderWhile(gpa: *Allocator, ais: *Ais, tree: ast.Tree, while_node: ast.full. if (token_tags[ident + 1] == .comma) { try renderToken(ais, tree, ident + 1, .space); // , try renderToken(ais, tree, ident + 2, .none); // index - break :blk payload_token + 3; + break :blk ident + 3; } else { break :blk ident + 1; } From 05f304807f4d492bea0fa54c113e77ec9bb49f6d Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 23 Feb 2021 18:48:01 -0700 Subject: [PATCH 164/173] zig fmt: add 3 more disabled failing test cases Found by running `zig fmt` on the std lib. --- lib/std/zig/parser_test.zig | 40 +++++++++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 951c437333..3f25b570c3 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -4164,6 +4164,46 @@ test "zig fmt: for loop with ptr payload and index" { ); } +// TODO +//test "zig fmt: proper indent line comment after multi-line single expr while loop" { +// try testCanonical( +// \\test { +// \\ while (a) : (b) +// \\ foo(); +// \\ +// \\ // bar +// \\ baz(); +// \\} +// \\ +// ); +//} + +// TODO +//test "zig fmt: respect extra newline between fn and pub usingnamespace" { +// try testCanonical( +// \\fn foo() void { +// \\ bar(); +// \\} +// \\ +// \\pub usingnamespace baz; +// \\ +// ); +//} + +// TODO +//test "zig fmt: respect extra newline between switch items" { +// try testCanonical( +// \\const a = switch (b) { +// \\ .c => {}, +// \\ +// \\ .d, +// \\ .e, +// \\ => f, +// \\}; +// \\ +// ); +//} + test "zig fmt: error for invalid bit range" { try testError( \\var x: []align(0:0:0)u8 = bar; From bf642204b373e01314ecfb0c50a643dc4b05746f Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 23 Feb 2021 22:24:59 -0700 Subject: [PATCH 165/173] std.MultiArrayList: add workaround for LLVM bug --- lib/std/multi_array_list.zig | 94 ++++++++++++++++++++++++++++++++++-- 1 file changed, 91 insertions(+), 3 deletions(-) diff --git a/lib/std/multi_array_list.zig b/lib/std/multi_array_list.zig index 24269c2316..689105755c 100644 --- a/lib/std/multi_array_list.zig +++ b/lib/std/multi_array_list.zig @@ -263,9 +263,18 @@ pub fn MultiArrayList(comptime S: type) type { } fn capacityInBytes(capacity: usize) usize { - const sizes_vector: std.meta.Vector(sizes.bytes.len, usize) = sizes.bytes; - const capacity_vector = @splat(sizes.bytes.len, capacity); - return @reduce(.Add, capacity_vector * sizes_vector); + // TODO move this workaround of LLVM SIMD bugs into the Zig frontend. + if (std.Target.current.cpu.arch == .aarch64) { + var sum: usize = 0; + for (sizes.bytes) |size| { + sum += capacity * size; + } + return sum; + } else { + const sizes_vector: std.meta.Vector(sizes.bytes.len, usize) = sizes.bytes; + const capacity_vector = @splat(sizes.bytes.len, capacity); + return @reduce(.Add, capacity_vector * sizes_vector); + } } fn allocatedBytes(self: Self) []align(@alignOf(S)) u8 { @@ -357,3 +366,82 @@ test "basic usage" { testing.expectEqualStrings("zigzag", list.items(.b)[1]); testing.expectEqualStrings("fizzbuzz", list.items(.b)[2]); } + +// This was observed to fail on aarch64 with LLVM 11, when the capacityInBytes +// function used the @reduce code path. +test "regression test for @reduce bug" { + const ally = std.testing.allocator; + var list = MultiArrayList(struct { + tag: std.zig.Token.Tag, + start: u32, + }){}; + defer list.deinit(ally); + + try list.ensureCapacity(ally, 20); + + try list.append(ally, .{ .tag = .keyword_const, .start = 0 }); + try list.append(ally, .{ .tag = .identifier, .start = 6 }); + try list.append(ally, .{ .tag = .equal, .start = 10 }); + try list.append(ally, .{ .tag = .builtin, .start = 12 }); + try list.append(ally, .{ .tag = .l_paren, .start = 19 }); + try list.append(ally, .{ .tag = .string_literal, .start = 20 }); + try list.append(ally, .{ .tag = .r_paren, .start = 25 }); + try list.append(ally, .{ .tag = .semicolon, .start = 26 }); + try list.append(ally, .{ .tag = .keyword_pub, .start = 29 }); + try list.append(ally, .{ .tag = .keyword_fn, .start = 33 }); + try list.append(ally, .{ .tag = .identifier, .start = 36 }); + try list.append(ally, .{ .tag = .l_paren, .start = 40 }); + try list.append(ally, .{ .tag = .r_paren, .start = 41 }); + try list.append(ally, .{ .tag = .identifier, .start = 43 }); + try list.append(ally, .{ .tag = .bang, .start = 51 }); + try list.append(ally, .{ .tag = .identifier, .start = 52 }); + try list.append(ally, .{ .tag = .l_brace, .start = 57 }); + try list.append(ally, .{ .tag = .identifier, .start = 63 }); + try list.append(ally, .{ .tag = .period, .start = 66 }); + try list.append(ally, .{ .tag = .identifier, .start = 67 }); + try list.append(ally, .{ .tag = .period, .start = 70 }); + try list.append(ally, .{ .tag = .identifier, .start = 71 }); + try list.append(ally, .{ .tag = .l_paren, .start = 75 }); + try list.append(ally, .{ .tag = .string_literal, .start = 76 }); + try list.append(ally, .{ .tag = .comma, .start = 113 }); + try list.append(ally, .{ .tag = .period, .start = 115 }); + try list.append(ally, .{ .tag = .l_brace, .start = 116 }); + try list.append(ally, .{ .tag = .r_brace, .start = 117 }); + try list.append(ally, .{ .tag = .r_paren, .start = 118 }); + try list.append(ally, .{ .tag = .semicolon, .start = 119 }); + try list.append(ally, .{ .tag = .r_brace, .start = 121 }); + try list.append(ally, .{ .tag = .eof, .start = 123 }); + + const tags = list.items(.tag); + std.testing.expectEqual(tags[1], .identifier); + std.testing.expectEqual(tags[2], .equal); + std.testing.expectEqual(tags[3], .builtin); + std.testing.expectEqual(tags[4], .l_paren); + std.testing.expectEqual(tags[5], .string_literal); + std.testing.expectEqual(tags[6], .r_paren); + std.testing.expectEqual(tags[7], .semicolon); + std.testing.expectEqual(tags[8], .keyword_pub); + std.testing.expectEqual(tags[9], .keyword_fn); + std.testing.expectEqual(tags[10], .identifier); + std.testing.expectEqual(tags[11], .l_paren); + std.testing.expectEqual(tags[12], .r_paren); + std.testing.expectEqual(tags[13], .identifier); + std.testing.expectEqual(tags[14], .bang); + std.testing.expectEqual(tags[15], .identifier); + std.testing.expectEqual(tags[16], .l_brace); + std.testing.expectEqual(tags[17], .identifier); + std.testing.expectEqual(tags[18], .period); + std.testing.expectEqual(tags[19], .identifier); + std.testing.expectEqual(tags[20], .period); + std.testing.expectEqual(tags[21], .identifier); + std.testing.expectEqual(tags[22], .l_paren); + std.testing.expectEqual(tags[23], .string_literal); + std.testing.expectEqual(tags[24], .comma); + std.testing.expectEqual(tags[25], .period); + std.testing.expectEqual(tags[26], .l_brace); + std.testing.expectEqual(tags[27], .r_brace); + std.testing.expectEqual(tags[28], .r_paren); + std.testing.expectEqual(tags[29], .semicolon); + std.testing.expectEqual(tags[30], .r_brace); + std.testing.expectEqual(tags[31], .eof); +} From db4c15be507a55f63b7f19dd4cdc10717f44eb06 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 23 Feb 2021 23:01:16 -0700 Subject: [PATCH 166/173] zig fmt: respect extra newline between fn and pub usingnamespace --- lib/std/zig/ast.zig | 9 ++++++++- lib/std/zig/parser_test.zig | 21 ++++++++++----------- 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index 98ffe26818..f2418f7c11 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -321,7 +321,6 @@ pub const Tree = struct { while (true) switch (tags[n]) { .root => return 0, - .@"usingnamespace", .test_decl, .@"errdefer", .@"defer", @@ -468,6 +467,14 @@ pub const Tree = struct { return i - end_offset; }, + .@"usingnamespace" => { + const main_token = main_tokens[n]; + if (main_token > 0 and token_tags[main_token - 1] == .keyword_pub) { + end_offset += 1; + } + return main_token - end_offset; + }, + .async_call_one, .async_call_one_comma, .async_call, diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 3f25b570c3..6b81f9a16e 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -4178,17 +4178,16 @@ test "zig fmt: for loop with ptr payload and index" { // ); //} -// TODO -//test "zig fmt: respect extra newline between fn and pub usingnamespace" { -// try testCanonical( -// \\fn foo() void { -// \\ bar(); -// \\} -// \\ -// \\pub usingnamespace baz; -// \\ -// ); -//} +test "zig fmt: respect extra newline between fn and pub usingnamespace" { + try testCanonical( + \\fn foo() void { + \\ bar(); + \\} + \\ + \\pub usingnamespace baz; + \\ + ); +} // TODO //test "zig fmt: respect extra newline between switch items" { From 1b8eca030e9dac51361d80428f8276a09f31a8c2 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Wed, 24 Feb 2021 12:14:15 +0100 Subject: [PATCH 167/173] zig fmt: fix firstToken() for switch_case --- lib/std/zig/ast.zig | 2 +- lib/std/zig/parser_test.zig | 25 ++++++++++++------------- 2 files changed, 13 insertions(+), 14 deletions(-) diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig index f2418f7c11..46b58e9465 100644 --- a/lib/std/zig/ast.zig +++ b/lib/std/zig/ast.zig @@ -580,7 +580,7 @@ pub const Tree = struct { .switch_case => { const extra = tree.extraData(datas[n].lhs, Node.SubRange); assert(extra.end - extra.start > 0); - n = extra.start; + n = tree.extra_data[extra.start]; }, .asm_output, .asm_input => { diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 6b81f9a16e..c0d8a012c9 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -4189,19 +4189,18 @@ test "zig fmt: respect extra newline between fn and pub usingnamespace" { ); } -// TODO -//test "zig fmt: respect extra newline between switch items" { -// try testCanonical( -// \\const a = switch (b) { -// \\ .c => {}, -// \\ -// \\ .d, -// \\ .e, -// \\ => f, -// \\}; -// \\ -// ); -//} +test "zig fmt: respect extra newline between switch items" { + try testCanonical( + \\const a = switch (b) { + \\ .c => {}, + \\ + \\ .d, + \\ .e, + \\ => f, + \\}; + \\ + ); +} test "zig fmt: error for invalid bit range" { try testError( From 15c7c6ab970830a87b6aa502a369fb2b29b933c5 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Wed, 24 Feb 2021 12:29:17 +0100 Subject: [PATCH 168/173] zig fmt: handle comments in switch case value list --- lib/std/zig/parser_test.zig | 31 +++++++++++++++++++++++++++++++ lib/std/zig/render.zig | 4 +++- 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index c0d8a012c9..5ac0d62026 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -4202,6 +4202,37 @@ test "zig fmt: respect extra newline between switch items" { ); } +test "zig fmt: insert trailing comma if there are comments between switch values" { + try testTransform( + \\const a = switch (b) { + \\ .c => {}, + \\ + \\ .d, // foobar + \\ .e + \\ => f, + \\ + \\ .g, .h + \\ // comment + \\ => i, + \\}; + \\ + , + \\const a = switch (b) { + \\ .c => {}, + \\ + \\ .d, // foobar + \\ .e, + \\ => f, + \\ + \\ .g, + \\ .h, + \\ // comment + \\ => i, + \\}; + \\ + ); +} + test "zig fmt: error for invalid bit range" { try testError( \\var x: []align(0:0:0)u8 = bar; diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 0331ca2c70..604ee4f312 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -1533,7 +1533,9 @@ fn renderSwitchCase( } else if (switch_case.ast.values.len == 1) { // render on one line and drop the trailing comma if any try renderExpression(gpa, ais, tree, switch_case.ast.values[0], .space); - } else if (trailing_comma) { + } else if (trailing_comma or + hasComment(tree, tree.firstToken(switch_case.ast.values[0]), switch_case.ast.arrow_token)) + { // Render each value on a new line try renderExpressions(gpa, ais, tree, switch_case.ast.values, .comma); } else { From 371b21bdfbfa8917b44e809fb8a041411ffc6b8a Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Wed, 24 Feb 2021 13:46:11 +0100 Subject: [PATCH 169/173] zig fmt: fix comment indent after multiline single statement if/while/for --- lib/std/zig/parser_test.zig | 42 +++++++++++++------ lib/std/zig/render.zig | 84 +++++++++++++++++++++++++++---------- 2 files changed, 91 insertions(+), 35 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 5ac0d62026..a74a002385 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -4164,19 +4164,35 @@ test "zig fmt: for loop with ptr payload and index" { ); } -// TODO -//test "zig fmt: proper indent line comment after multi-line single expr while loop" { -// try testCanonical( -// \\test { -// \\ while (a) : (b) -// \\ foo(); -// \\ -// \\ // bar -// \\ baz(); -// \\} -// \\ -// ); -//} +test "zig fmt: proper indent line comment after multi-line single expr while loop" { + try testCanonical( + \\test { + \\ while (a) : (b) + \\ foo(); + \\ + \\ // bar + \\ baz(); + \\} + \\ + ); +} + +test "zig fmt: line comment after multiline single expr if statement with multiline string" { + try testCanonical( + \\test { + \\ if (foo) + \\ x = + \\ \\hello + \\ \\hello + \\ \\ + \\ ; + \\ + \\ // bar + \\ baz(); + \\} + \\ + ); +} test "zig fmt: respect extra newline between fn and pub usingnamespace" { try testCanonical( diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 604ee4f312..e93881edc0 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -203,7 +203,9 @@ fn renderExpression(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.I switch (space) { .none, .space, .newline => {}, + .newline_pop => ais.popIndent(), .semicolon => if (token_tags[i] == .semicolon) try renderToken(ais, tree, i, .newline), + .semicolon_pop => if (token_tags[i] == .semicolon) try renderToken(ais, tree, i, .newline_pop), .comma => if (token_tags[i] == .comma) try renderToken(ais, tree, i, .newline), .comma_space => if (token_tags[i] == .comma) try renderToken(ais, tree, i, .space), } @@ -1152,8 +1154,8 @@ fn renderWhile(gpa: *Allocator, ais: *Ais, tree: ast.Tree, while_node: ast.full. } } else { ais.pushIndent(); - try renderExpression(gpa, ais, tree, while_node.ast.then_expr, space); - ais.popIndent(); + assert(space == .semicolon); + try renderExpression(gpa, ais, tree, while_node.ast.then_expr, .semicolon_pop); return; } } @@ -2196,6 +2198,9 @@ const Space = enum { space, /// Output the token lexeme followed by a newline. newline, + /// Same as newline, but pop an indent level before rendering the + /// following comments if any. + newline_pop, /// If the next token is a comma, render it as well. If not, insert one. /// In either case, a newline will be inserted afterwards. comma, @@ -2205,6 +2210,9 @@ const Space = enum { /// Additionally consume the next token if it is a semicolon. /// In either case, a newline will be inserted afterwards. semicolon, + /// Same as semicolon, but pop an indent level before rendering the + /// following comments if any. + semicolon_pop, }; fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Space) Error!void { @@ -2216,33 +2224,65 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp try ais.writer().writeAll(lexeme); - if (space == .comma and token_tags[token_index + 1] != .comma) { - try ais.writer().writeByte(','); - } - - const comment = try renderComments(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]); + const token_end = token_start + lexeme.len; + const next_token_start = token_starts[token_index + 1]; switch (space) { - .none => {}, - .space => if (!comment) try ais.writer().writeByte(' '), - .newline => if (!comment) try ais.insertNewline(), + .none => _ = try renderComments(ais, tree, token_end, next_token_start), - .comma => if (token_tags[token_index + 1] == .comma) { - try renderToken(ais, tree, token_index + 1, .newline); - } else if (!comment) { - try ais.insertNewline(); - }, - - .comma_space => if (token_tags[token_index + 1] == .comma) { - try renderToken(ais, tree, token_index + 1, .space); - } else if (!comment) { + .space => if (!try renderComments(ais, tree, token_end, next_token_start)) { try ais.writer().writeByte(' '); }, - .semicolon => if (token_tags[token_index + 1] == .semicolon) { - try renderToken(ais, tree, token_index + 1, .newline); - } else if (!comment) { + .newline => if (!try renderComments(ais, tree, token_end, next_token_start)) { try ais.insertNewline(); }, + + .newline_pop => { + ais.popIndent(); + if (!try renderComments(ais, tree, token_end, next_token_start)) { + try ais.insertNewline(); + } + }, + + .comma => if (token_tags[token_index + 1] == .comma) { + _ = try renderComments(ais, tree, token_end, next_token_start); + try renderToken(ais, tree, token_index + 1, .newline); + } else { + try ais.writer().writeByte(','); + if (!try renderComments(ais, tree, token_end, next_token_start)) { + try ais.insertNewline(); + } + }, + + .comma_space => { + const comment = try renderComments(ais, tree, token_end, next_token_start); + if (token_tags[token_index + 1] == .comma) { + try renderToken(ais, tree, token_index + 1, .space); + } else if (!comment) { + try ais.writer().writeByte(' '); + } + }, + + .semicolon => { + const comment = try renderComments(ais, tree, token_end, next_token_start); + if (token_tags[token_index + 1] == .semicolon) { + try renderToken(ais, tree, token_index + 1, .newline); + } else if (!comment) { + try ais.insertNewline(); + } + }, + + .semicolon_pop => { + if (token_tags[token_index + 1] == .semicolon) { + _ = try renderComments(ais, tree, token_end, next_token_start); + try renderToken(ais, tree, token_index + 1, .newline_pop); + } else { + ais.popIndent(); + if (!try renderComments(ais, tree, token_end, next_token_start)) { + try ais.insertNewline(); + } + } + }, } } From 52c45bf44d797f28594db5a75c306ed209cfb933 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Wed, 24 Feb 2021 16:44:55 +0100 Subject: [PATCH 170/173] zig fmt: rework single statement if/while/for indentation This approach properly handles nesting unlike the approach in the previous commit. --- lib/std/zig/parser_test.zig | 17 ++++ lib/std/zig/render.zig | 150 ++++++++++++++++++++---------------- 2 files changed, 102 insertions(+), 65 deletions(-) diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index a74a002385..0fe3e64a35 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -1496,12 +1496,14 @@ test "zig fmt: if nested" { \\ GE_EQUAL \\ else \\ GE_GREATER + \\ // comment \\ else if (aInt > bInt) \\ GE_LESS \\ else if (aInt == bInt) \\ GE_EQUAL \\ else \\ GE_GREATER; + \\ // comment \\} \\ ); @@ -4189,6 +4191,21 @@ test "zig fmt: line comment after multiline single expr if statement with multil \\ \\ // bar \\ baz(); + \\ + \\ if (foo) + \\ x = + \\ \\hello + \\ \\hello + \\ \\ + \\ else + \\ y = + \\ \\hello + \\ \\hello + \\ \\ + \\ ; + \\ + \\ // bar + \\ baz(); \\} \\ ); diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index e93881edc0..673a05f8e6 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -44,7 +44,6 @@ pub fn renderTree(buffer: *std.ArrayList(u8), tree: ast.Tree) Error!void { /// Render all members in the given slice, keeping empty lines where appropriate fn renderMembers(gpa: *Allocator, ais: *Ais, tree: ast.Tree, members: []const ast.Node.Index) Error!void { if (members.len == 0) return; - //try renderExtraNewline(ais, tree, members[0]); try renderMember(gpa, ais, tree, members[0], .newline); for (members[1..]) |member| { try renderExtraNewline(ais, tree, member); @@ -202,10 +201,8 @@ fn renderExpression(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.I while (locked_indents > 0) : (locked_indents -= 1) ais.popIndent(); switch (space) { - .none, .space, .newline => {}, - .newline_pop => ais.popIndent(), + .none, .space, .newline, .skip => {}, .semicolon => if (token_tags[i] == .semicolon) try renderToken(ais, tree, i, .newline), - .semicolon_pop => if (token_tags[i] == .semicolon) try renderToken(ais, tree, i, .newline_pop), .comma => if (token_tags[i] == .comma) try renderToken(ais, tree, i, .newline), .comma_space => if (token_tags[i] == .comma) try renderToken(ais, tree, i, .space), } @@ -1147,15 +1144,11 @@ fn renderWhile(gpa: *Allocator, ais: *Ais, tree: ast.Tree, while_node: ast.full. } else { try renderToken(ais, tree, while_node.else_token, .newline); // else } - ais.pushIndent(); - try renderExpression(gpa, ais, tree, while_node.ast.else_expr, space); - ais.popIndent(); + try renderExpressionIndented(gpa, ais, tree, while_node.ast.else_expr, space); return; } } else { - ais.pushIndent(); - assert(space == .semicolon); - try renderExpression(gpa, ais, tree, while_node.ast.then_expr, .semicolon_pop); + try renderExpressionIndented(gpa, ais, tree, while_node.ast.then_expr, space); return; } } @@ -2168,6 +2161,64 @@ fn renderCall( return renderToken(ais, tree, after_last_param_tok, space); // ) } +/// Renders the given expression indented, popping the indent before rendering +/// any following line comments +fn renderExpressionIndented(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void { + const token_starts = tree.tokens.items(.start); + const token_tags = tree.tokens.items(.tag); + + ais.pushIndent(); + + var last_token = tree.lastToken(node); + const punctuation = switch (space) { + .none, .space, .newline, .skip => false, + .comma => true, + .comma_space => token_tags[last_token + 1] == .comma, + .semicolon => token_tags[last_token + 1] == .semicolon, + }; + + try renderExpression(gpa, ais, tree, node, if (punctuation) .none else .skip); + + switch (space) { + .none, .space, .newline, .skip => {}, + .comma => { + if (token_tags[last_token + 1] == .comma) { + try renderToken(ais, tree, last_token + 1, .skip); + last_token += 1; + } else { + try ais.writer().writeByte(','); + } + }, + .comma_space => if (token_tags[last_token + 1] == .comma) { + try renderToken(ais, tree, last_token + 1, .skip); + last_token += 1; + }, + .semicolon => if (token_tags[last_token + 1] == .semicolon) { + try renderToken(ais, tree, last_token + 1, .skip); + last_token += 1; + }, + } + + ais.popIndent(); + + if (space == .skip) return; + + const comment_start = token_starts[last_token] + tokenSliceForRender(tree, last_token).len; + const comment = try renderComments(ais, tree, comment_start, token_starts[last_token + 1]); + + if (!comment) switch (space) { + .none => {}, + .space, + .comma_space, + => try ais.writer().writeByte(' '), + .newline, + .comma, + .semicolon, + => try ais.insertNewline(), + .skip => unreachable, + }; +} + /// Render an expression, and the comma that follows it, if it is present in the source. fn renderExpressionComma(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void { const token_tags = tree.tokens.items(.tag); @@ -2198,9 +2249,6 @@ const Space = enum { space, /// Output the token lexeme followed by a newline. newline, - /// Same as newline, but pop an indent level before rendering the - /// following comments if any. - newline_pop, /// If the next token is a comma, render it as well. If not, insert one. /// In either case, a newline will be inserted afterwards. comma, @@ -2210,9 +2258,9 @@ const Space = enum { /// Additionally consume the next token if it is a semicolon. /// In either case, a newline will be inserted afterwards. semicolon, - /// Same as semicolon, but pop an indent level before rendering the - /// following comments if any. - semicolon_pop, + /// Skip rendering whitespace and comments. If this is used, the caller + /// *must* handle handle whitespace and comments manually. + skip, }; fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Space) Error!void { @@ -2224,65 +2272,37 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp try ais.writer().writeAll(lexeme); - const token_end = token_start + lexeme.len; - const next_token_start = token_starts[token_index + 1]; + if (space == .skip) return; + + if (space == .comma and token_tags[token_index + 1] != .comma) { + try ais.writer().writeByte(','); + } + + const comment = try renderComments(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]); switch (space) { - .none => _ = try renderComments(ais, tree, token_end, next_token_start), + .none => {}, + .space => if (!comment) try ais.writer().writeByte(' '), + .newline => if (!comment) try ais.insertNewline(), - .space => if (!try renderComments(ais, tree, token_end, next_token_start)) { - try ais.writer().writeByte(' '); - }, - - .newline => if (!try renderComments(ais, tree, token_end, next_token_start)) { + .comma => if (token_tags[token_index + 1] == .comma) { + try renderToken(ais, tree, token_index + 1, .newline); + } else if (!comment) { try ais.insertNewline(); }, - .newline_pop => { - ais.popIndent(); - if (!try renderComments(ais, tree, token_end, next_token_start)) { - try ais.insertNewline(); - } + .comma_space => if (token_tags[token_index + 1] == .comma) { + try renderToken(ais, tree, token_index + 1, .space); + } else if (!comment) { + try ais.writer().writeByte(' '); }, - .comma => if (token_tags[token_index + 1] == .comma) { - _ = try renderComments(ais, tree, token_end, next_token_start); + .semicolon => if (token_tags[token_index + 1] == .semicolon) { try renderToken(ais, tree, token_index + 1, .newline); - } else { - try ais.writer().writeByte(','); - if (!try renderComments(ais, tree, token_end, next_token_start)) { - try ais.insertNewline(); - } + } else if (!comment) { + try ais.insertNewline(); }, - .comma_space => { - const comment = try renderComments(ais, tree, token_end, next_token_start); - if (token_tags[token_index + 1] == .comma) { - try renderToken(ais, tree, token_index + 1, .space); - } else if (!comment) { - try ais.writer().writeByte(' '); - } - }, - - .semicolon => { - const comment = try renderComments(ais, tree, token_end, next_token_start); - if (token_tags[token_index + 1] == .semicolon) { - try renderToken(ais, tree, token_index + 1, .newline); - } else if (!comment) { - try ais.insertNewline(); - } - }, - - .semicolon_pop => { - if (token_tags[token_index + 1] == .semicolon) { - _ = try renderComments(ais, tree, token_end, next_token_start); - try renderToken(ais, tree, token_index + 1, .newline_pop); - } else { - ais.popIndent(); - if (!try renderComments(ais, tree, token_end, next_token_start)) { - try ais.insertNewline(); - } - } - }, + .skip => unreachable, } } From 38441b5eab3c9371f8412aa46a277f37fc026a79 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Wed, 24 Feb 2021 12:49:12 -0700 Subject: [PATCH 171/173] MultiArrayList: use @memcpy as a workaround Reverts bf642204b373e01314ecfb0c50a643dc4b05746f and uses a different workaround, suggested by @LemonBoy. There is either a compiler bug or a design flaw somewhere around here. It does not have to block this branch, but I need to understand exactly what's going on here and make it so that nobody ever has to run into this problem again. --- CMakeLists.txt | 1 + lib/std/multi_array_list.zig | 27 +++++++++++++-------------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 4f2dc4fa4a..6e89d87ca9 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -407,6 +407,7 @@ set(ZIG_STAGE2_SOURCES "${CMAKE_SOURCE_DIR}/lib/std/meta.zig" "${CMAKE_SOURCE_DIR}/lib/std/meta/trailer_flags.zig" "${CMAKE_SOURCE_DIR}/lib/std/meta/trait.zig" + "${CMAKE_SOURCE_DIR}/lib/std/multi_array_list.zig" "${CMAKE_SOURCE_DIR}/lib/std/os.zig" "${CMAKE_SOURCE_DIR}/lib/std/os/bits.zig" "${CMAKE_SOURCE_DIR}/lib/std/os/bits/linux.zig" diff --git a/lib/std/multi_array_list.zig b/lib/std/multi_array_list.zig index 689105755c..3306fd3ef0 100644 --- a/lib/std/multi_array_list.zig +++ b/lib/std/multi_array_list.zig @@ -203,7 +203,11 @@ pub fn MultiArrayList(comptime S: type) type { const other_slice = other.slice(); inline for (fields) |field_info, i| { const field = @intToEnum(Field, i); - mem.copy(field_info.field_type, other_slice.items(field), self_slice.items(field)); + // TODO we should be able to use std.mem.copy here but it causes a + // test failure on aarch64 with -OReleaseFast + const src_slice = mem.sliceAsBytes(self_slice.items(field)); + const dst_slice = mem.sliceAsBytes(other_slice.items(field)); + @memcpy(dst_slice.ptr, src_slice.ptr, src_slice.len); } gpa.free(self.allocatedBytes()); self.* = other; @@ -256,25 +260,20 @@ pub fn MultiArrayList(comptime S: type) type { const other_slice = other.slice(); inline for (fields) |field_info, i| { const field = @intToEnum(Field, i); - mem.copy(field_info.field_type, other_slice.items(field), self_slice.items(field)); + // TODO we should be able to use std.mem.copy here but it causes a + // test failure on aarch64 with -OReleaseFast + const src_slice = mem.sliceAsBytes(self_slice.items(field)); + const dst_slice = mem.sliceAsBytes(other_slice.items(field)); + @memcpy(dst_slice.ptr, src_slice.ptr, src_slice.len); } gpa.free(self.allocatedBytes()); self.* = other; } fn capacityInBytes(capacity: usize) usize { - // TODO move this workaround of LLVM SIMD bugs into the Zig frontend. - if (std.Target.current.cpu.arch == .aarch64) { - var sum: usize = 0; - for (sizes.bytes) |size| { - sum += capacity * size; - } - return sum; - } else { - const sizes_vector: std.meta.Vector(sizes.bytes.len, usize) = sizes.bytes; - const capacity_vector = @splat(sizes.bytes.len, capacity); - return @reduce(.Add, capacity_vector * sizes_vector); - } + const sizes_vector: std.meta.Vector(sizes.bytes.len, usize) = sizes.bytes; + const capacity_vector = @splat(sizes.bytes.len, capacity); + return @reduce(.Add, capacity_vector * sizes_vector); } fn allocatedBytes(self: Self) []align(@alignOf(S)) u8 { From c79ee1fc8d3d44b3c11ca72b0fdb11a158beae63 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Wed, 24 Feb 2021 16:36:27 -0700 Subject: [PATCH 172/173] build.zig: expose a strip option --- build.zig | 2 ++ 1 file changed, 2 insertions(+) diff --git a/build.zig b/build.zig index f53fab73bf..92e03603c5 100644 --- a/build.zig +++ b/build.zig @@ -77,10 +77,12 @@ pub fn build(b: *Builder) !void { const tracy = b.option([]const u8, "tracy", "Enable Tracy integration. Supply path to Tracy source"); const link_libc = b.option(bool, "force-link-libc", "Force self-hosted compiler to link libc") orelse enable_llvm; + const strip = b.option(bool, "strip", "Omit debug information") orelse false; const main_file = if (is_stage1) "src/stage1.zig" else "src/main.zig"; var exe = b.addExecutable("zig", main_file); + exe.strip = strip; exe.install(); exe.setBuildMode(mode); exe.setTarget(target); From 9ada7638a5dbb535ba37223a14478691dd60cf6a Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Wed, 24 Feb 2021 16:42:46 -0700 Subject: [PATCH 173/173] zig fmt: function with labeled block as return type --- lib/std/zig/parse.zig | 4 ++++ lib/std/zig/parser_test.zig | 11 +++++++++++ 2 files changed, 15 insertions(+) diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig index 36da59ed82..7a6404fbb2 100644 --- a/lib/std/zig/parse.zig +++ b/lib/std/zig/parse.zig @@ -2656,6 +2656,10 @@ const Parser = struct { p.tok_i += 2; return p.parseWhileTypeExpr(); }, + .l_brace => { + p.tok_i += 2; + return p.parseBlock(); + }, else => return p.addNode(.{ .tag = .identifier, .main_token = p.nextToken(), diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index d8aea5ed9d..4ec527fd6c 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -4179,6 +4179,17 @@ test "zig fmt: proper indent line comment after multi-line single expr while loo ); } +test "zig fmt: function with labeled block as return type" { + try testCanonical( + \\fn foo() t: { + \\ break :t bar; + \\} { + \\ baz(); + \\} + \\ + ); +} + test "zig fmt: line comment after multiline single expr if statement with multiline string" { try testCanonical( \\test {