From a72b9d403d036ac0da64ed826c535fff0c142e6a Mon Sep 17 00:00:00 2001 From: Lachlan Easton Date: Sat, 29 Aug 2020 11:02:22 +1000 Subject: [PATCH 01/35] Refactor zig fmt indentation. Remove indent from rendering code and have a stream handle automatic indentation --- lib/std/io.zig | 13 +- lib/std/io/auto_indenting_stream.zig | 135 ++ lib/std/io/change_detection_stream.zig | 58 + lib/std/io/find_byte_out_stream.zig | 44 + lib/std/io/writer.zig | 4 + lib/std/zig/parser_test.zig | 109 +- lib/std/zig/render.zig | 1656 +++++++++++------------- src-self-hosted/main.zig | 9 +- src-self-hosted/stage2.zig | 2 +- 9 files changed, 1113 insertions(+), 917 deletions(-) create mode 100644 lib/std/io/auto_indenting_stream.zig create mode 100644 lib/std/io/change_detection_stream.zig create mode 100644 lib/std/io/find_byte_out_stream.zig diff --git a/lib/std/io.zig b/lib/std/io.zig index e30ed1fa92..1514d80cb0 100644 --- a/lib/std/io.zig +++ b/lib/std/io.zig @@ -169,6 +169,15 @@ pub const BitOutStream = BitWriter; /// Deprecated: use `bitWriter` pub const bitOutStream = bitWriter; +pub const AutoIndentingStream = @import("io/auto_indenting_stream.zig").AutoIndentingStream; +pub const autoIndentingStream = @import("io/auto_indenting_stream.zig").autoIndentingStream; + +pub const ChangeDetectionStream = @import("io/change_detection_stream.zig").ChangeDetectionStream; +pub const changeDetectionStream = @import("io/change_detection_stream.zig").changeDetectionStream; + +pub const FindByteOutStream = @import("io/find_byte_out_stream.zig").FindByteOutStream; +pub const findByteOutStream = @import("io/find_byte_out_stream.zig").findByteOutStream; + pub const Packing = @import("io/serialization.zig").Packing; pub const Serializer = @import("io/serialization.zig").Serializer; @@ -182,10 +191,10 @@ pub const BufferedAtomicFile = @import("io/buffered_atomic_file.zig").BufferedAt pub const StreamSource = @import("io/stream_source.zig").StreamSource; /// A Writer that doesn't write to anything. -pub const null_writer = @as(NullWriter, .{ .context = {} }); +pub var null_writer = @as(NullWriter, .{ .context = {} }); /// Deprecated: use `null_writer` -pub const null_out_stream = null_writer; +pub var null_out_stream = null_writer; const NullWriter = Writer(void, error{}, dummyWrite); /// Deprecated: use NullWriter diff --git a/lib/std/io/auto_indenting_stream.zig b/lib/std/io/auto_indenting_stream.zig new file mode 100644 index 0000000000..e7657c1f91 --- /dev/null +++ b/lib/std/io/auto_indenting_stream.zig @@ -0,0 +1,135 @@ +const std = @import("../std.zig"); +const io = std.io; +const mem = std.mem; +const assert = std.debug.assert; + +pub fn AutoIndentingStream(comptime indent_delta: u8, comptime OutStreamType: type) type { + return struct { + const Self = @This(); + pub const Error = OutStreamType.Error; + pub const OutStream = io.Writer(*Self, Error, write); + + out_stream: *OutStreamType, + current_line_empty: bool = true, + indent_stack: [255]u8 = undefined, + indent_stack_top: u8 = 0, + indent_one_shot_count: u8 = 0, // automatically popped when applied + applied_indent: u8 = 0, // the most recently applied indent + indent_next_line: u8 = 0, // not used until the next line + + pub fn init(out_stream: *OutStreamType) Self { + return Self{ .out_stream = out_stream }; + } + + pub fn writer(self: *Self) OutStream { + return .{ .context = self }; + } + + pub fn write(self: *Self, bytes: []const u8) Error!usize { + if (bytes.len == 0) + return @as(usize, 0); + + try self.applyIndent(); + return self.writeNoIndent(bytes); + } + + fn writeNoIndent(self: *Self, bytes: []const u8) Error!usize { + try self.out_stream.outStream().writeAll(bytes); + if (bytes[bytes.len - 1] == '\n') + self.resetLine(); + return bytes.len; + } + + pub fn insertNewline(self: *Self) Error!void { + _ = try self.writeNoIndent("\n"); + } + + fn resetLine(self: *Self) void { + self.current_line_empty = true; + self.indent_next_line = 0; + } + + /// Insert a newline unless the current line is blank + pub fn maybeInsertNewline(self: *Self) Error!void { + if (!self.current_line_empty) + try self.insertNewline(); + } + + /// Push default indentation + pub fn pushIndent(self: *Self) void { + // Doesn't actually write any indentation. Just primes the stream to be able to write the correct indentation if it needs to. + self.pushIndentN(indent_delta); + } + + /// Push an indent of arbitrary width + pub fn pushIndentN(self: *Self, n: u8) void { + assert(self.indent_stack_top < std.math.maxInt(u8)); + self.indent_stack[self.indent_stack_top] = n; + self.indent_stack_top += 1; + } + + /// Push an indent that is automatically popped after being applied + pub fn pushIndentOneShot(self: *Self) void { + self.indent_one_shot_count += 1; + self.pushIndent(); + } + + /// Turns all one-shot indents into regular indents + /// Returns number of indents that must now be manually popped + pub fn lockOneShotIndent(self: *Self) u8 { + var locked_count = self.indent_one_shot_count; + self.indent_one_shot_count = 0; + return locked_count; + } + + /// Push an indent that should not take effect until the next line + pub fn pushIndentNextLine(self: *Self) void { + self.indent_next_line += 1; + self.pushIndent(); + } + + pub fn popIndent(self: *Self) void { + assert(self.indent_stack_top != 0); + self.indent_stack_top -= 1; + self.indent_next_line = std.math.min(self.indent_stack_top, self.indent_next_line); // Tentative indent may have been popped before there was a newline + } + + /// Writes ' ' bytes if the current line is empty + fn applyIndent(self: *Self) Error!void { + const current_indent = self.currentIndent(); + if (self.current_line_empty and current_indent > 0) { + try self.out_stream.outStream().writeByteNTimes(' ', current_indent); + self.applied_indent = current_indent; + } + + self.indent_stack_top -= self.indent_one_shot_count; + self.indent_one_shot_count = 0; + self.current_line_empty = false; + } + + /// Checks to see if the most recent indentation exceeds the currently pushed indents + pub fn isLineOverIndented(self: *Self) bool { + if (self.current_line_empty) return false; + return self.applied_indent > self.currentIndent(); + } + + fn currentIndent(self: *Self) u8 { + var indent_current: u8 = 0; + if (self.indent_stack_top > 0) { + const stack_top = self.indent_stack_top - self.indent_next_line; + for (self.indent_stack[0..stack_top]) |indent| { + indent_current += indent; + } + } + return indent_current; + } + }; +} + +pub fn autoIndentingStream( + comptime indent_delta: u8, + underlying_stream: anytype, +) AutoIndentingStream(indent_delta, @TypeOf(underlying_stream).Child) { + comptime assert(@typeInfo(@TypeOf(underlying_stream)) == .Pointer); + return AutoIndentingStream(indent_delta, @TypeOf(underlying_stream).Child).init(underlying_stream); +} diff --git a/lib/std/io/change_detection_stream.zig b/lib/std/io/change_detection_stream.zig new file mode 100644 index 0000000000..941569320c --- /dev/null +++ b/lib/std/io/change_detection_stream.zig @@ -0,0 +1,58 @@ +const std = @import("../std.zig"); +const io = std.io; +const mem = std.mem; +const assert = std.debug.assert; + +pub fn ChangeDetectionStream(comptime OutStreamType: type) type { + return struct { + const Self = @This(); + pub const Error = OutStreamType.Error; + pub const OutStream = io.OutStream(*Self, Error, write); + + anything_changed: bool = false, + out_stream: *OutStreamType, + source_index: usize, + source: []const u8, + + pub fn init(source: []const u8, out_stream: *OutStreamType) Self { + return Self{ + .out_stream = out_stream, + .source_index = 0, + .source = source, + }; + } + + pub fn outStream(self: *Self) OutStream { + return .{ .context = self }; + } + + fn write(self: *Self, bytes: []const u8) Error!usize { + if (!self.anything_changed) { + const end = self.source_index + bytes.len; + if (end > self.source.len) { + self.anything_changed = true; + } else { + const src_slice = self.source[self.source_index..end]; + self.source_index += bytes.len; + if (!mem.eql(u8, bytes, src_slice)) { + self.anything_changed = true; + } + } + } + + return self.out_stream.write(bytes); + } + + pub fn changeDetected(self: *Self) bool { + return self.anything_changed or (self.source_index != self.source.len); + } + }; +} + +pub fn changeDetectionStream( + source: []const u8, + underlying_stream: anytype, +) ChangeDetectionStream(@TypeOf(underlying_stream).Child) { + comptime assert(@typeInfo(@TypeOf(underlying_stream)) == .Pointer); + return ChangeDetectionStream(@TypeOf(underlying_stream).Child).init(source, underlying_stream); +} diff --git a/lib/std/io/find_byte_out_stream.zig b/lib/std/io/find_byte_out_stream.zig new file mode 100644 index 0000000000..e835cbd584 --- /dev/null +++ b/lib/std/io/find_byte_out_stream.zig @@ -0,0 +1,44 @@ +const std = @import("../std.zig"); +const io = std.io; +const assert = std.debug.assert; + +// An OutStream that returns whether the given character has been written to it. +// The contents are not written to anything. +pub fn FindByteOutStream(comptime OutStreamType: type) type { + return struct { + const Self = @This(); + pub const Error = OutStreamType.Error; + pub const OutStream = io.OutStream(*Self, Error, write); + + out_stream: *OutStreamType, + byte_found: bool, + byte: u8, + + pub fn init(byte: u8, out_stream: *OutStreamType) Self { + return Self{ + .out_stream = out_stream, + .byte = byte, + .byte_found = false, + }; + } + + pub fn outStream(self: *Self) OutStream { + return .{ .context = self }; + } + + fn write(self: *Self, bytes: []const u8) Error!usize { + if (!self.byte_found) { + self.byte_found = blk: { + for (bytes) |b| + if (b == self.byte) break :blk true; + break :blk false; + }; + } + return self.out_stream.writer().write(bytes); + } + }; +} +pub fn findByteOutStream(byte: u8, underlying_stream: anytype) FindByteOutStream(@TypeOf(underlying_stream).Child) { + comptime assert(@typeInfo(@TypeOf(underlying_stream)) == .Pointer); + return FindByteOutStream(@TypeOf(underlying_stream).Child).init(byte, underlying_stream); +} diff --git a/lib/std/io/writer.zig b/lib/std/io/writer.zig index 39729ef0a2..ffdca0d6a6 100644 --- a/lib/std/io/writer.zig +++ b/lib/std/io/writer.zig @@ -18,6 +18,10 @@ pub fn Writer( const Self = @This(); pub const Error = WriteError; + pub fn writer(self: *const Self) Self { + return self.*; + } + pub fn write(self: Self, bytes: []const u8) Error!usize { return writeFn(self.context, bytes); } diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 8259af32a6..f4da650efb 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -615,6 +615,17 @@ test "zig fmt: infix operator and then multiline string literal" { ); } +test "zig fmt: infix operator and then multiline string literal" { + try testCanonical( + \\const x = "" ++ + \\ \\ hi0 + \\ \\ hi1 + \\ \\ hi2 + \\; + \\ + ); +} + test "zig fmt: C pointers" { try testCanonical( \\const Ptr = [*c]i32; @@ -885,6 +896,28 @@ test "zig fmt: 2nd arg multiline string" { ); } +test "zig fmt: 2nd arg multiline string many args" { + try testCanonical( + \\comptime { + \\ cases.addAsm("hello world linux x86_64", + \\ \\.text + \\ , "Hello, world!\n", "Hello, world!\n"); + \\} + \\ + ); +} + +test "zig fmt: final arg multiline string" { + try testCanonical( + \\comptime { + \\ cases.addAsm("hello world linux x86_64", "Hello, world!\n", + \\ \\.text + \\ ); + \\} + \\ + ); +} + test "zig fmt: if condition wraps" { try testTransform( \\comptime { @@ -915,6 +948,11 @@ test "zig fmt: if condition wraps" { \\ var a = if (a) |*f| x: { \\ break :x &a.b; \\ } else |err| err; + \\ var a = if (cond and + \\ cond) |*f| + \\ x: { + \\ break :x &a.b; + \\ } else |err| err; \\} , \\comptime { @@ -951,6 +989,35 @@ test "zig fmt: if condition wraps" { \\ var a = if (a) |*f| x: { \\ break :x &a.b; \\ } else |err| err; + \\ var a = if (cond and + \\ cond) |*f| + \\ x: { + \\ break :x &a.b; + \\ } else |err| err; + \\} + \\ + ); +} + +test "zig fmt: if condition has line break but must not wrap" { + try testCanonical( + \\comptime { + \\ if (self.user_input_options.put( + \\ name, + \\ UserInputOption{ + \\ .name = name, + \\ .used = false, + \\ }, + \\ ) catch unreachable) |*prev_value| { + \\ foo(); + \\ bar(); + \\ } + \\ if (put( + \\ a, + \\ b, + \\ )) { + \\ foo(); + \\ } \\} \\ ); @@ -977,6 +1044,18 @@ test "zig fmt: if condition has line break but must not wrap" { ); } +test "zig fmt: function call with multiline argument" { + try testCanonical( + \\comptime { + \\ self.user_input_options.put(name, UserInputOption{ + \\ .name = name, + \\ .used = false, + \\ }); + \\} + \\ + ); +} + test "zig fmt: same-line doc comment on variable declaration" { try testTransform( \\pub const MAP_ANONYMOUS = 0x1000; /// allocated from memory, swap space @@ -1228,7 +1307,7 @@ test "zig fmt: array literal with hint" { \\const a = []u8{ \\ 1, 2, \\ 3, // - \\ 4, + \\ 4, \\ 5, 6, \\ 7, \\}; @@ -1293,7 +1372,7 @@ test "zig fmt: multiline string parameter in fn call with trailing comma" { \\ \\ZIG_C_HEADER_FILES {} \\ \\ZIG_DIA_GUIDS_LIB {} \\ \\ - \\ , + \\ , \\ std.cstr.toSliceConst(c.ZIG_CMAKE_BINARY_DIR), \\ std.cstr.toSliceConst(c.ZIG_CXX_COMPILER), \\ std.cstr.toSliceConst(c.ZIG_DIA_GUIDS_LIB), @@ -2885,20 +2964,20 @@ test "zig fmt: multiline string in array" { try testCanonical( \\const Foo = [][]const u8{ \\ \\aaa - \\, + \\ , \\ \\bbb \\}; \\ \\fn bar() void { \\ const Foo = [][]const u8{ \\ \\aaa - \\ , + \\ , \\ \\bbb \\ }; \\ const Bar = [][]const u8{ // comment here \\ \\aaa \\ \\ - \\ , // and another comment can go here + \\ , // and another comment can go here \\ \\bbb \\ }; \\} @@ -3214,6 +3293,23 @@ test "zig fmt: C var args" { ); } +test "zig fmt: Only indent multiline string literals in function calls" { + try testCanonical( + \\test "zig fmt:" { + \\ try testTransform( + \\ \\const X = struct { + \\ \\ foo: i32, bar: i8 }; + \\ , + \\ \\const X = struct { + \\ \\ foo: i32, bar: i8 + \\ \\}; + \\ \\ + \\ ); + \\} + \\ + ); +} + const std = @import("std"); const mem = std.mem; const warn = std.debug.warn; @@ -3256,7 +3352,8 @@ fn testParse(source: []const u8, allocator: *mem.Allocator, anything_changed: *b var buffer = std.ArrayList(u8).init(allocator); errdefer buffer.deinit(); - anything_changed.* = try std.zig.render(allocator, buffer.outStream(), tree); + const outStream = buffer.outStream(); + anything_changed.* = try std.zig.render(allocator, &outStream, tree); return buffer.toOwnedSlice(); } fn testTransform(source: []const u8, expected_source: []const u8) !void { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 4d44c41bfa..d7bba2f6bf 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -6,6 +6,7 @@ const std = @import("../std.zig"); const assert = std.debug.assert; const mem = std.mem; +const meta = std.meta; const ast = std.zig.ast; const Token = std.zig.Token; @@ -17,74 +18,37 @@ pub const Error = error{ }; /// Returns whether anything changed -pub fn render(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree) (@TypeOf(stream).Error || Error)!bool { +pub fn render(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree) (meta.Child(@TypeOf(stream)).Error || Error)!bool { // cannot render an invalid tree std.debug.assert(tree.errors.len == 0); - // make a passthrough stream that checks whether something changed - const MyStream = struct { - const MyStream = @This(); - const StreamError = @TypeOf(stream).Error; + var s = stream.*; + var change_detection_stream = std.io.changeDetectionStream(tree.source, &s); + var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, &change_detection_stream); - child_stream: @TypeOf(stream), - anything_changed: bool, - source_index: usize, - source: []const u8, + try renderRoot(allocator, &auto_indenting_stream, tree); - fn write(self: *MyStream, bytes: []const u8) StreamError!usize { - if (!self.anything_changed) { - const end = self.source_index + bytes.len; - if (end > self.source.len) { - self.anything_changed = true; - } else { - const src_slice = self.source[self.source_index..end]; - self.source_index += bytes.len; - if (!mem.eql(u8, bytes, src_slice)) { - self.anything_changed = true; - } - } - } - - return self.child_stream.write(bytes); - } - }; - var my_stream = MyStream{ - .child_stream = stream, - .anything_changed = false, - .source_index = 0, - .source = tree.source, - }; - const my_stream_stream: std.io.Writer(*MyStream, MyStream.StreamError, MyStream.write) = .{ - .context = &my_stream, - }; - - try renderRoot(allocator, my_stream_stream, tree); - - if (my_stream.source_index != my_stream.source.len) { - my_stream.anything_changed = true; - } - - return my_stream.anything_changed; + return change_detection_stream.changeDetected(); } fn renderRoot( allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree, -) (@TypeOf(stream).Error || Error)!void { +) (@TypeOf(stream.*).Error || Error)!void { + // render all the line comments at the beginning of the file for (tree.token_ids) |token_id, i| { if (token_id != .LineComment) break; const token_loc = tree.token_locs[i]; - try stream.print("{}\n", .{mem.trimRight(u8, tree.tokenSliceLoc(token_loc), " ")}); + try stream.writer().print("{}\n", .{mem.trimRight(u8, tree.tokenSliceLoc(token_loc), " ")}); const next_token = tree.token_locs[i + 1]; const loc = tree.tokenLocationLoc(token_loc.end, next_token); if (loc.line >= 2) { - try stream.writeByte('\n'); + try stream.insertNewline(); } } - var start_col: usize = 0; var decl_i: ast.NodeIndex = 0; const root_decls = tree.root_node.decls(); @@ -189,23 +153,22 @@ fn renderRoot( try copyFixingWhitespace(stream, tree.source[start..end]); } - try renderTopLevelDecl(allocator, stream, tree, 0, &start_col, decl); + try renderTopLevelDecl(allocator, stream, tree, decl); decl_i += 1; if (decl_i >= root_decls.len) return; - try renderExtraNewline(tree, stream, &start_col, root_decls[decl_i]); + try renderExtraNewline(tree, stream, root_decls[decl_i]); } } -fn renderExtraNewline(tree: *ast.Tree, stream: anytype, start_col: *usize, node: *ast.Node) @TypeOf(stream).Error!void { - return renderExtraNewlineToken(tree, stream, start_col, node.firstToken()); +fn renderExtraNewline(tree: *ast.Tree, stream: anytype, node: *ast.Node) @TypeOf(stream.*).Error!void { + return renderExtraNewlineToken(tree, stream, node.firstToken()); } fn renderExtraNewlineToken( tree: *ast.Tree, stream: anytype, - start_col: *usize, first_token: ast.TokenIndex, -) @TypeOf(stream).Error!void { +) @TypeOf(stream.*).Error!void { var prev_token = first_token; if (prev_token == 0) return; var newline_threshold: usize = 2; @@ -218,28 +181,27 @@ fn renderExtraNewlineToken( const prev_token_end = tree.token_locs[prev_token - 1].end; const loc = tree.tokenLocation(prev_token_end, first_token); if (loc.line >= newline_threshold) { - try stream.writeByte('\n'); - start_col.* = 0; + try stream.insertNewline(); } } -fn renderTopLevelDecl(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree, indent: usize, start_col: *usize, decl: *ast.Node) (@TypeOf(stream).Error || Error)!void { - try renderContainerDecl(allocator, stream, tree, indent, start_col, decl, .Newline); +fn renderTopLevelDecl(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree, decl: *ast.Node) (@TypeOf(stream.*).Error || Error)!void { + try renderContainerDecl(allocator, stream, tree, decl, .Newline); } -fn renderContainerDecl(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree, indent: usize, start_col: *usize, decl: *ast.Node, space: Space) (@TypeOf(stream).Error || Error)!void { +fn renderContainerDecl(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree, decl: *ast.Node, space: Space) (@TypeOf(stream.*).Error || Error)!void { switch (decl.tag) { .FnProto => { const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl); - try renderDocComments(tree, stream, fn_proto, fn_proto.getTrailer("doc_comments"), indent, start_col); + try renderDocComments(tree, stream, fn_proto, fn_proto.getTrailer("doc_comments")); if (fn_proto.getTrailer("body_node")) |body_node| { - try renderExpression(allocator, stream, tree, indent, start_col, decl, .Space); - try renderExpression(allocator, stream, tree, indent, start_col, body_node, space); + try renderExpression(allocator, stream, tree, decl, .Space); + try renderExpression(allocator, stream, tree, body_node, space); } else { - try renderExpression(allocator, stream, tree, indent, start_col, decl, .None); - try renderToken(tree, stream, tree.nextToken(decl.lastToken()), indent, start_col, space); + try renderExpression(allocator, stream, tree, decl, .None); + try renderToken(tree, stream, tree.nextToken(decl.lastToken()), space); } }, @@ -247,35 +209,35 @@ fn renderContainerDecl(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tr const use_decl = @fieldParentPtr(ast.Node.Use, "base", decl); if (use_decl.visib_token) |visib_token| { - try renderToken(tree, stream, visib_token, indent, start_col, .Space); // pub + try renderToken(tree, stream, visib_token, .Space); // pub } - try renderToken(tree, stream, use_decl.use_token, indent, start_col, .Space); // usingnamespace - try renderExpression(allocator, stream, tree, indent, start_col, use_decl.expr, .None); - try renderToken(tree, stream, use_decl.semicolon_token, indent, start_col, space); // ; + try renderToken(tree, stream, use_decl.use_token, .Space); // usingnamespace + try renderExpression(allocator, stream, tree, use_decl.expr, .None); + try renderToken(tree, stream, use_decl.semicolon_token, space); // ; }, .VarDecl => { const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", decl); - try renderDocComments(tree, stream, var_decl, var_decl.getTrailer("doc_comments"), indent, start_col); - try renderVarDecl(allocator, stream, tree, indent, start_col, var_decl); + try renderDocComments(tree, stream, var_decl, var_decl.getTrailer("doc_comments")); + try renderVarDecl(allocator, stream, tree, var_decl); }, .TestDecl => { const test_decl = @fieldParentPtr(ast.Node.TestDecl, "base", decl); - try renderDocComments(tree, stream, test_decl, test_decl.doc_comments, indent, start_col); - try renderToken(tree, stream, test_decl.test_token, indent, start_col, .Space); - try renderExpression(allocator, stream, tree, indent, start_col, test_decl.name, .Space); - try renderExpression(allocator, stream, tree, indent, start_col, test_decl.body_node, space); + try renderDocComments(tree, stream, test_decl, test_decl.doc_comments); + try renderToken(tree, stream, test_decl.test_token, .Space); + try renderExpression(allocator, stream, tree, test_decl.name, .Space); + try renderExpression(allocator, stream, tree, test_decl.body_node, space); }, .ContainerField => { const field = @fieldParentPtr(ast.Node.ContainerField, "base", decl); - try renderDocComments(tree, stream, field, field.doc_comments, indent, start_col); + try renderDocComments(tree, stream, field, field.doc_comments); if (field.comptime_token) |t| { - try renderToken(tree, stream, t, indent, start_col, .Space); // comptime + try renderToken(tree, stream, t, .Space); // comptime } const src_has_trailing_comma = blk: { @@ -288,68 +250,67 @@ fn renderContainerDecl(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tr const last_token_space: Space = if (src_has_trailing_comma) .None else space; if (field.type_expr == null and field.value_expr == null) { - try renderToken(tree, stream, field.name_token, indent, start_col, last_token_space); // name + try renderToken(tree, stream, field.name_token, last_token_space); // name } else if (field.type_expr != null and field.value_expr == null) { - try renderToken(tree, stream, field.name_token, indent, start_col, .None); // name - try renderToken(tree, stream, tree.nextToken(field.name_token), indent, start_col, .Space); // : + try renderToken(tree, stream, field.name_token, .None); // name + try renderToken(tree, stream, tree.nextToken(field.name_token), .Space); // : if (field.align_expr) |align_value_expr| { - try renderExpression(allocator, stream, tree, indent, start_col, field.type_expr.?, .Space); // type + try renderExpression(allocator, stream, tree, field.type_expr.?, .Space); // type const lparen_token = tree.prevToken(align_value_expr.firstToken()); const align_kw = tree.prevToken(lparen_token); const rparen_token = tree.nextToken(align_value_expr.lastToken()); - try renderToken(tree, stream, align_kw, indent, start_col, .None); // align - try renderToken(tree, stream, lparen_token, indent, start_col, .None); // ( - try renderExpression(allocator, stream, tree, indent, start_col, align_value_expr, .None); // alignment - try renderToken(tree, stream, rparen_token, indent, start_col, last_token_space); // ) + try renderToken(tree, stream, align_kw, .None); // align + try renderToken(tree, stream, lparen_token, .None); // ( + try renderExpression(allocator, stream, tree, align_value_expr, .None); // alignment + try renderToken(tree, stream, rparen_token, last_token_space); // ) } else { - try renderExpression(allocator, stream, tree, indent, start_col, field.type_expr.?, last_token_space); // type + try renderExpression(allocator, stream, tree, field.type_expr.?, last_token_space); // type } } else if (field.type_expr == null and field.value_expr != null) { - try renderToken(tree, stream, field.name_token, indent, start_col, .Space); // name - try renderToken(tree, stream, tree.nextToken(field.name_token), indent, start_col, .Space); // = - try renderExpression(allocator, stream, tree, indent, start_col, field.value_expr.?, last_token_space); // value + try renderToken(tree, stream, field.name_token, .Space); // name + try renderToken(tree, stream, tree.nextToken(field.name_token), .Space); // = + try renderExpression(allocator, stream, tree, field.value_expr.?, last_token_space); // value } else { - try renderToken(tree, stream, field.name_token, indent, start_col, .None); // name - try renderToken(tree, stream, tree.nextToken(field.name_token), indent, start_col, .Space); // : + try renderToken(tree, stream, field.name_token, .None); // name + try renderToken(tree, stream, tree.nextToken(field.name_token), .Space); // : if (field.align_expr) |align_value_expr| { - try renderExpression(allocator, stream, tree, indent, start_col, field.type_expr.?, .Space); // type + try renderExpression(allocator, stream, tree, field.type_expr.?, .Space); // type const lparen_token = tree.prevToken(align_value_expr.firstToken()); const align_kw = tree.prevToken(lparen_token); const rparen_token = tree.nextToken(align_value_expr.lastToken()); - try renderToken(tree, stream, align_kw, indent, start_col, .None); // align - try renderToken(tree, stream, lparen_token, indent, start_col, .None); // ( - try renderExpression(allocator, stream, tree, indent, start_col, align_value_expr, .None); // alignment - try renderToken(tree, stream, rparen_token, indent, start_col, .Space); // ) + try renderToken(tree, stream, align_kw, .None); // align + try renderToken(tree, stream, lparen_token, .None); // ( + try renderExpression(allocator, stream, tree, align_value_expr, .None); // alignment + try renderToken(tree, stream, rparen_token, .Space); // ) } else { - try renderExpression(allocator, stream, tree, indent, start_col, field.type_expr.?, .Space); // type + try renderExpression(allocator, stream, tree, field.type_expr.?, .Space); // type } - try renderToken(tree, stream, tree.prevToken(field.value_expr.?.firstToken()), indent, start_col, .Space); // = - try renderExpression(allocator, stream, tree, indent, start_col, field.value_expr.?, last_token_space); // value + try renderToken(tree, stream, tree.prevToken(field.value_expr.?.firstToken()), .Space); // = + try renderExpression(allocator, stream, tree, field.value_expr.?, last_token_space); // value } if (src_has_trailing_comma) { const comma = tree.nextToken(field.lastToken()); - try renderToken(tree, stream, comma, indent, start_col, space); + try renderToken(tree, stream, comma, space); } }, .Comptime => { assert(!decl.requireSemiColon()); - try renderExpression(allocator, stream, tree, indent, start_col, decl, space); + try renderExpression(allocator, stream, tree, decl, space); }, .DocComment => { const comment = @fieldParentPtr(ast.Node.DocComment, "base", decl); const kind = tree.token_ids[comment.first_line]; - try renderToken(tree, stream, comment.first_line, indent, start_col, .Newline); + try renderToken(tree, stream, comment.first_line, .Newline); var tok_i = comment.first_line + 1; while (true) : (tok_i += 1) { const tok_id = tree.token_ids[tok_i]; if (tok_id == kind) { - try stream.writeByteNTimes(' ', indent); - try renderToken(tree, stream, tok_i, indent, start_col, .Newline); + try renderToken(tree, stream, tok_i, .Newline); } else if (tok_id == .LineComment) { continue; } else { @@ -365,11 +326,9 @@ fn renderExpression( allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree, - indent: usize, - start_col: *usize, base: *ast.Node, space: Space, -) (@TypeOf(stream).Error || Error)!void { +) (@TypeOf(stream.*).Error || Error)!void { switch (base.tag) { .Identifier, .IntegerLiteral, @@ -383,18 +342,18 @@ fn renderExpression( .UndefinedLiteral, => { const casted_node = base.cast(ast.Node.OneToken).?; - return renderToken(tree, stream, casted_node.token, indent, start_col, space); + return renderToken(tree, stream, casted_node.token, space); }, .AnyType => { const any_type = base.castTag(.AnyType).?; if (mem.eql(u8, tree.tokenSlice(any_type.token), "var")) { // TODO remove in next release cycle - try stream.writeAll("anytype"); - if (space == .Comma) try stream.writeAll(",\n"); + try stream.writer().writeAll("anytype"); + if (space == .Comma) try stream.writer().writeAll(",\n"); return; } - return renderToken(tree, stream, any_type.token, indent, start_col, space); + return renderToken(tree, stream, any_type.token, space); }, .Block, .LabeledBlock => { @@ -424,65 +383,65 @@ fn renderExpression( }; if (block.label) |label| { - try renderToken(tree, stream, label, indent, start_col, Space.None); - try renderToken(tree, stream, tree.nextToken(label), indent, start_col, Space.Space); + try renderToken(tree, stream, label, Space.None); + try renderToken(tree, stream, tree.nextToken(label), Space.Space); } if (block.statements.len == 0) { - try renderToken(tree, stream, block.lbrace, indent + indent_delta, start_col, Space.None); - return renderToken(tree, stream, block.rbrace, indent, start_col, space); + stream.pushIndentNextLine(); + defer stream.popIndent(); + try renderToken(tree, stream, block.lbrace, Space.None); } else { - const block_indent = indent + indent_delta; - try renderToken(tree, stream, block.lbrace, block_indent, start_col, Space.Newline); + stream.pushIndentNextLine(); + defer stream.popIndent(); + + try renderToken(tree, stream, block.lbrace, Space.Newline); for (block.statements) |statement, i| { - try stream.writeByteNTimes(' ', block_indent); - try renderStatement(allocator, stream, tree, block_indent, start_col, statement); + try renderStatement(allocator, stream, tree, statement); if (i + 1 < block.statements.len) { - try renderExtraNewline(tree, stream, start_col, block.statements[i + 1]); + try renderExtraNewline(tree, stream, block.statements[i + 1]); } } - - try stream.writeByteNTimes(' ', indent); - return renderToken(tree, stream, block.rbrace, indent, start_col, space); } + return renderToken(tree, stream, block.rbrace, space); }, .Defer => { const defer_node = @fieldParentPtr(ast.Node.Defer, "base", base); - try renderToken(tree, stream, defer_node.defer_token, indent, start_col, Space.Space); + try renderToken(tree, stream, defer_node.defer_token, Space.Space); if (defer_node.payload) |payload| { - try renderExpression(allocator, stream, tree, indent, start_col, payload, Space.Space); + try renderExpression(allocator, stream, tree, payload, Space.Space); } - return renderExpression(allocator, stream, tree, indent, start_col, defer_node.expr, space); + return renderExpression(allocator, stream, tree, defer_node.expr, space); }, .Comptime => { const comptime_node = @fieldParentPtr(ast.Node.Comptime, "base", base); - try renderToken(tree, stream, comptime_node.comptime_token, indent, start_col, Space.Space); - return renderExpression(allocator, stream, tree, indent, start_col, comptime_node.expr, space); + try renderToken(tree, stream, comptime_node.comptime_token, Space.Space); + return renderExpression(allocator, stream, tree, comptime_node.expr, space); }, .Nosuspend => { const nosuspend_node = @fieldParentPtr(ast.Node.Nosuspend, "base", base); if (mem.eql(u8, tree.tokenSlice(nosuspend_node.nosuspend_token), "noasync")) { // TODO: remove this - try stream.writeAll("nosuspend "); + try stream.writer().writeAll("nosuspend "); } else { - try renderToken(tree, stream, nosuspend_node.nosuspend_token, indent, start_col, Space.Space); + try renderToken(tree, stream, nosuspend_node.nosuspend_token, Space.Space); } - return renderExpression(allocator, stream, tree, indent, start_col, nosuspend_node.expr, space); + return renderExpression(allocator, stream, tree, nosuspend_node.expr, space); }, .Suspend => { const suspend_node = @fieldParentPtr(ast.Node.Suspend, "base", base); if (suspend_node.body) |body| { - try renderToken(tree, stream, suspend_node.suspend_token, indent, start_col, Space.Space); - return renderExpression(allocator, stream, tree, indent, start_col, body, space); + try renderToken(tree, stream, suspend_node.suspend_token, Space.Space); + return renderExpression(allocator, stream, tree, body, space); } else { - return renderToken(tree, stream, suspend_node.suspend_token, indent, start_col, space); + return renderToken(tree, stream, suspend_node.suspend_token, space); } }, @@ -490,26 +449,21 @@ fn renderExpression( const infix_op_node = @fieldParentPtr(ast.Node.Catch, "base", base); const op_space = Space.Space; - try renderExpression(allocator, stream, tree, indent, start_col, infix_op_node.lhs, op_space); + try renderExpression(allocator, stream, tree, infix_op_node.lhs, op_space); const after_op_space = blk: { - const loc = tree.tokenLocation(tree.token_locs[infix_op_node.op_token].end, tree.nextToken(infix_op_node.op_token)); - break :blk if (loc.line == 0) op_space else Space.Newline; + const same_line = tree.tokensOnSameLine(infix_op_node.op_token, tree.nextToken(infix_op_node.op_token)); + break :blk if (same_line) op_space else Space.Newline; }; - try renderToken(tree, stream, infix_op_node.op_token, indent, start_col, after_op_space); - if (after_op_space == Space.Newline and - tree.token_ids[tree.nextToken(infix_op_node.op_token)] != .MultilineStringLiteralLine) - { - try stream.writeByteNTimes(' ', indent + indent_delta); - start_col.* = indent + indent_delta; - } + try renderToken(tree, stream, infix_op_node.op_token, after_op_space); if (infix_op_node.payload) |payload| { - try renderExpression(allocator, stream, tree, indent, start_col, payload, Space.Space); + try renderExpression(allocator, stream, tree, payload, Space.Space); } - return renderExpression(allocator, stream, tree, indent, start_col, infix_op_node.rhs, space); + stream.pushIndentOneShot(); + return renderExpression(allocator, stream, tree, infix_op_node.rhs, space); }, .Add, @@ -561,22 +515,16 @@ fn renderExpression( .Period, .ErrorUnion, .Range => Space.None, else => Space.Space, }; - try renderExpression(allocator, stream, tree, indent, start_col, infix_op_node.lhs, op_space); + try renderExpression(allocator, stream, tree, infix_op_node.lhs, op_space); const after_op_space = blk: { const loc = tree.tokenLocation(tree.token_locs[infix_op_node.op_token].end, tree.nextToken(infix_op_node.op_token)); break :blk if (loc.line == 0) op_space else Space.Newline; }; - try renderToken(tree, stream, infix_op_node.op_token, indent, start_col, after_op_space); - if (after_op_space == Space.Newline and - tree.token_ids[tree.nextToken(infix_op_node.op_token)] != .MultilineStringLiteralLine) - { - try stream.writeByteNTimes(' ', indent + indent_delta); - start_col.* = indent + indent_delta; - } - - return renderExpression(allocator, stream, tree, indent, start_col, infix_op_node.rhs, space); + try renderToken(tree, stream, infix_op_node.op_token, after_op_space); + stream.pushIndentOneShot(); + return renderExpression(allocator, stream, tree, infix_op_node.rhs, space); }, .BitNot, @@ -587,8 +535,8 @@ fn renderExpression( .AddressOf, => { const casted_node = @fieldParentPtr(ast.Node.SimplePrefixOp, "base", base); - try renderToken(tree, stream, casted_node.op_token, indent, start_col, Space.None); - return renderExpression(allocator, stream, tree, indent, start_col, casted_node.rhs, space); + try renderToken(tree, stream, casted_node.op_token, Space.None); + return renderExpression(allocator, stream, tree, casted_node.rhs, space); }, .Try, @@ -596,8 +544,8 @@ fn renderExpression( .Await, => { const casted_node = @fieldParentPtr(ast.Node.SimplePrefixOp, "base", base); - try renderToken(tree, stream, casted_node.op_token, indent, start_col, Space.Space); - return renderExpression(allocator, stream, tree, indent, start_col, casted_node.rhs, space); + try renderToken(tree, stream, casted_node.op_token, Space.Space); + return renderExpression(allocator, stream, tree, casted_node.rhs, space); }, .ArrayType => { @@ -606,8 +554,6 @@ fn renderExpression( allocator, stream, tree, - indent, - start_col, array_type.op_token, array_type.rhs, array_type.len_expr, @@ -621,8 +567,6 @@ fn renderExpression( allocator, stream, tree, - indent, - start_col, array_type.op_token, array_type.rhs, array_type.len_expr, @@ -635,111 +579,111 @@ fn renderExpression( const ptr_type = @fieldParentPtr(ast.Node.PtrType, "base", base); const op_tok_id = tree.token_ids[ptr_type.op_token]; switch (op_tok_id) { - .Asterisk, .AsteriskAsterisk => try stream.writeByte('*'), + .Asterisk, .AsteriskAsterisk => try stream.writer().writeByte('*'), .LBracket => if (tree.token_ids[ptr_type.op_token + 2] == .Identifier) - try stream.writeAll("[*c") + try stream.writer().writeAll("[*c") else - try stream.writeAll("[*"), + try stream.writer().writeAll("[*"), else => unreachable, } if (ptr_type.ptr_info.sentinel) |sentinel| { const colon_token = tree.prevToken(sentinel.firstToken()); - try renderToken(tree, stream, colon_token, indent, start_col, Space.None); // : + try renderToken(tree, stream, colon_token, Space.None); // : const sentinel_space = switch (op_tok_id) { .LBracket => Space.None, else => Space.Space, }; - try renderExpression(allocator, stream, tree, indent, start_col, sentinel, sentinel_space); + try renderExpression(allocator, stream, tree, sentinel, sentinel_space); } switch (op_tok_id) { .Asterisk, .AsteriskAsterisk => {}, - .LBracket => try stream.writeByte(']'), + .LBracket => try stream.writer().writeByte(']'), else => unreachable, } if (ptr_type.ptr_info.allowzero_token) |allowzero_token| { - try renderToken(tree, stream, allowzero_token, indent, start_col, Space.Space); // allowzero + try renderToken(tree, stream, allowzero_token, Space.Space); // allowzero } if (ptr_type.ptr_info.align_info) |align_info| { const lparen_token = tree.prevToken(align_info.node.firstToken()); const align_token = tree.prevToken(lparen_token); - try renderToken(tree, stream, align_token, indent, start_col, Space.None); // align - try renderToken(tree, stream, lparen_token, indent, start_col, Space.None); // ( + try renderToken(tree, stream, align_token, Space.None); // align + try renderToken(tree, stream, lparen_token, Space.None); // ( - try renderExpression(allocator, stream, tree, indent, start_col, align_info.node, Space.None); + try renderExpression(allocator, stream, tree, align_info.node, Space.None); if (align_info.bit_range) |bit_range| { const colon1 = tree.prevToken(bit_range.start.firstToken()); const colon2 = tree.prevToken(bit_range.end.firstToken()); - try renderToken(tree, stream, colon1, indent, start_col, Space.None); // : - try renderExpression(allocator, stream, tree, indent, start_col, bit_range.start, Space.None); - try renderToken(tree, stream, colon2, indent, start_col, Space.None); // : - try renderExpression(allocator, stream, tree, indent, start_col, bit_range.end, Space.None); + try renderToken(tree, stream, colon1, Space.None); // : + try renderExpression(allocator, stream, tree, bit_range.start, Space.None); + try renderToken(tree, stream, colon2, Space.None); // : + try renderExpression(allocator, stream, tree, bit_range.end, Space.None); const rparen_token = tree.nextToken(bit_range.end.lastToken()); - try renderToken(tree, stream, rparen_token, indent, start_col, Space.Space); // ) + try renderToken(tree, stream, rparen_token, Space.Space); // ) } else { const rparen_token = tree.nextToken(align_info.node.lastToken()); - try renderToken(tree, stream, rparen_token, indent, start_col, Space.Space); // ) + try renderToken(tree, stream, rparen_token, Space.Space); // ) } } if (ptr_type.ptr_info.const_token) |const_token| { - try renderToken(tree, stream, const_token, indent, start_col, Space.Space); // const + try renderToken(tree, stream, const_token, Space.Space); // const } if (ptr_type.ptr_info.volatile_token) |volatile_token| { - try renderToken(tree, stream, volatile_token, indent, start_col, Space.Space); // volatile + try renderToken(tree, stream, volatile_token, Space.Space); // volatile } - return renderExpression(allocator, stream, tree, indent, start_col, ptr_type.rhs, space); + return renderExpression(allocator, stream, tree, ptr_type.rhs, space); }, .SliceType => { const slice_type = @fieldParentPtr(ast.Node.SliceType, "base", base); - try renderToken(tree, stream, slice_type.op_token, indent, start_col, Space.None); // [ + try renderToken(tree, stream, slice_type.op_token, Space.None); // [ if (slice_type.ptr_info.sentinel) |sentinel| { const colon_token = tree.prevToken(sentinel.firstToken()); - try renderToken(tree, stream, colon_token, indent, start_col, Space.None); // : - try renderExpression(allocator, stream, tree, indent, start_col, sentinel, Space.None); - try renderToken(tree, stream, tree.nextToken(sentinel.lastToken()), indent, start_col, Space.None); // ] + try renderToken(tree, stream, colon_token, Space.None); // : + try renderExpression(allocator, stream, tree, sentinel, Space.None); + try renderToken(tree, stream, tree.nextToken(sentinel.lastToken()), Space.None); // ] } else { - try renderToken(tree, stream, tree.nextToken(slice_type.op_token), indent, start_col, Space.None); // ] + try renderToken(tree, stream, tree.nextToken(slice_type.op_token), Space.None); // ] } if (slice_type.ptr_info.allowzero_token) |allowzero_token| { - try renderToken(tree, stream, allowzero_token, indent, start_col, Space.Space); // allowzero + try renderToken(tree, stream, allowzero_token, Space.Space); // allowzero } if (slice_type.ptr_info.align_info) |align_info| { const lparen_token = tree.prevToken(align_info.node.firstToken()); const align_token = tree.prevToken(lparen_token); - try renderToken(tree, stream, align_token, indent, start_col, Space.None); // align - try renderToken(tree, stream, lparen_token, indent, start_col, Space.None); // ( + try renderToken(tree, stream, align_token, Space.None); // align + try renderToken(tree, stream, lparen_token, Space.None); // ( - try renderExpression(allocator, stream, tree, indent, start_col, align_info.node, Space.None); + try renderExpression(allocator, stream, tree, align_info.node, Space.None); if (align_info.bit_range) |bit_range| { const colon1 = tree.prevToken(bit_range.start.firstToken()); const colon2 = tree.prevToken(bit_range.end.firstToken()); - try renderToken(tree, stream, colon1, indent, start_col, Space.None); // : - try renderExpression(allocator, stream, tree, indent, start_col, bit_range.start, Space.None); - try renderToken(tree, stream, colon2, indent, start_col, Space.None); // : - try renderExpression(allocator, stream, tree, indent, start_col, bit_range.end, Space.None); + try renderToken(tree, stream, colon1, Space.None); // : + try renderExpression(allocator, stream, tree, bit_range.start, Space.None); + try renderToken(tree, stream, colon2, Space.None); // : + try renderExpression(allocator, stream, tree, bit_range.end, Space.None); const rparen_token = tree.nextToken(bit_range.end.lastToken()); - try renderToken(tree, stream, rparen_token, indent, start_col, Space.Space); // ) + try renderToken(tree, stream, rparen_token, Space.Space); // ) } else { const rparen_token = tree.nextToken(align_info.node.lastToken()); - try renderToken(tree, stream, rparen_token, indent, start_col, Space.Space); // ) + try renderToken(tree, stream, rparen_token, Space.Space); // ) } } if (slice_type.ptr_info.const_token) |const_token| { - try renderToken(tree, stream, const_token, indent, start_col, Space.Space); + try renderToken(tree, stream, const_token, Space.Space); } if (slice_type.ptr_info.volatile_token) |volatile_token| { - try renderToken(tree, stream, volatile_token, indent, start_col, Space.Space); + try renderToken(tree, stream, volatile_token, Space.Space); } - return renderExpression(allocator, stream, tree, indent, start_col, slice_type.rhs, space); + return renderExpression(allocator, stream, tree, slice_type.rhs, space); }, .ArrayInitializer, .ArrayInitializerDot => { @@ -768,27 +712,33 @@ fn renderExpression( if (exprs.len == 0) { switch (lhs) { - .dot => |dot| try renderToken(tree, stream, dot, indent, start_col, Space.None), - .node => |node| try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None), + .dot => |dot| try renderToken(tree, stream, dot, Space.None), + .node => |node| try renderExpression(allocator, stream, tree, node, Space.None), } - try renderToken(tree, stream, lbrace, indent, start_col, Space.None); - return renderToken(tree, stream, rtoken, indent, start_col, space); - } - if (exprs.len == 1 and tree.token_ids[exprs[0].lastToken() + 1] == .RBrace) { - const expr = exprs[0]; - switch (lhs) { - .dot => |dot| try renderToken(tree, stream, dot, indent, start_col, Space.None), - .node => |node| try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None), + { + stream.pushIndent(); + defer stream.popIndent(); + try renderToken(tree, stream, lbrace, Space.None); } - try renderToken(tree, stream, lbrace, indent, start_col, Space.None); - try renderExpression(allocator, stream, tree, indent, start_col, expr, Space.None); - return renderToken(tree, stream, rtoken, indent, start_col, space); + + return renderToken(tree, stream, rtoken, space); + } + if (exprs.len == 1 and tree.token_ids[exprs[0].*.lastToken() + 1] == .RBrace) { + const expr = exprs[0]; + + switch (lhs) { + .dot => |dot| try renderToken(tree, stream, dot, Space.None), + .node => |node| try renderExpression(allocator, stream, tree, node, Space.None), + } + try renderToken(tree, stream, lbrace, Space.None); + try renderExpression(allocator, stream, tree, expr, Space.None); + return renderToken(tree, stream, rtoken, space); } switch (lhs) { - .dot => |dot| try renderToken(tree, stream, dot, indent, start_col, Space.None), - .node => |node| try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None), + .dot => |dot| try renderToken(tree, stream, dot, Space.None), + .node => |node| try renderExpression(allocator, stream, tree, node, Space.None), } // scan to find row size @@ -832,77 +782,68 @@ fn renderExpression( // Null stream for counting the printed length of each expression var counting_stream = std.io.countingOutStream(std.io.null_out_stream); + var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, &counting_stream); for (exprs) |expr, i| { counting_stream.bytes_written = 0; - var dummy_col: usize = 0; - try renderExpression(allocator, counting_stream.outStream(), tree, indent, &dummy_col, expr, Space.None); + try renderExpression(allocator, &auto_indenting_stream, tree, expr, Space.None); const width = @intCast(usize, counting_stream.bytes_written); const col = i % row_size; column_widths[col] = std.math.max(column_widths[col], width); expr_widths[i] = width; } - var new_indent = indent + indent_delta; + { + stream.pushIndentNextLine(); + defer stream.popIndent(); + try renderToken(tree, stream, lbrace, Space.Newline); - if (tree.token_ids[tree.nextToken(lbrace)] != .MultilineStringLiteralLine) { - try renderToken(tree, stream, lbrace, new_indent, start_col, Space.Newline); - try stream.writeByteNTimes(' ', new_indent); - } else { - new_indent -= indent_delta; - try renderToken(tree, stream, lbrace, new_indent, start_col, Space.None); + var col: usize = 1; + for (exprs) |expr, i| { + if (i + 1 < exprs.len) { + const next_expr = exprs[i + 1]; + try renderExpression(allocator, stream, tree, expr, Space.None); + + const comma = tree.nextToken(expr.*.lastToken()); + + if (col != row_size) { + try renderToken(tree, stream, comma, Space.Space); // , + + const padding = column_widths[i % row_size] - expr_widths[i]; + try stream.writer().writeByteNTimes(' ', padding); + + col += 1; + continue; + } + col = 1; + + if (tree.token_ids[tree.nextToken(comma)] != .MultilineStringLiteralLine) { + try renderToken(tree, stream, comma, Space.Newline); // , + } else { + try renderToken(tree, stream, comma, Space.None); // , + } + + try renderExtraNewline(tree, stream, next_expr); + } else { + try renderExpression(allocator, stream, tree, expr, Space.Comma); // , + } + } } - - var col: usize = 1; + return renderToken(tree, stream, rtoken, space); + } else { + try renderToken(tree, stream, lbrace, Space.Space); for (exprs) |expr, i| { if (i + 1 < exprs.len) { const next_expr = exprs[i + 1]; - try renderExpression(allocator, stream, tree, new_indent, start_col, expr, Space.None); - - const comma = tree.nextToken(expr.lastToken()); - - if (col != row_size) { - try renderToken(tree, stream, comma, new_indent, start_col, Space.Space); // , - - const padding = column_widths[i % row_size] - expr_widths[i]; - try stream.writeByteNTimes(' ', padding); - - col += 1; - continue; - } - col = 1; - - if (tree.token_ids[tree.nextToken(comma)] != .MultilineStringLiteralLine) { - try renderToken(tree, stream, comma, new_indent, start_col, Space.Newline); // , - } else { - try renderToken(tree, stream, comma, new_indent, start_col, Space.None); // , - } - - try renderExtraNewline(tree, stream, start_col, next_expr); - if (next_expr.tag != .MultilineStringLiteral) { - try stream.writeByteNTimes(' ', new_indent); - } + try renderExpression(allocator, stream, tree, expr, Space.None); + const comma = tree.nextToken(expr.*.lastToken()); + try renderToken(tree, stream, comma, Space.Space); // , } else { - try renderExpression(allocator, stream, tree, new_indent, start_col, expr, Space.Comma); // , - } - } - if (exprs[exprs.len - 1].tag != .MultilineStringLiteral) { - try stream.writeByteNTimes(' ', indent); - } - return renderToken(tree, stream, rtoken, indent, start_col, space); - } else { - try renderToken(tree, stream, lbrace, indent, start_col, Space.Space); - for (exprs) |expr, i| { - if (i + 1 < exprs.len) { - try renderExpression(allocator, stream, tree, indent, start_col, expr, Space.None); - const comma = tree.nextToken(expr.lastToken()); - try renderToken(tree, stream, comma, indent, start_col, Space.Space); // , - } else { - try renderExpression(allocator, stream, tree, indent, start_col, expr, Space.Space); + try renderExpression(allocator, stream, tree, expr, Space.Space); } } - return renderToken(tree, stream, rtoken, indent, start_col, space); + return renderToken(tree, stream, rtoken, space); } }, @@ -932,11 +873,17 @@ fn renderExpression( if (field_inits.len == 0) { switch (lhs) { - .dot => |dot| try renderToken(tree, stream, dot, indent, start_col, Space.None), - .node => |node| try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None), + .dot => |dot| try renderToken(tree, stream, dot, Space.None), + .node => |node| try renderExpression(allocator, stream, tree, node, Space.None), } - try renderToken(tree, stream, lbrace, indent + indent_delta, start_col, Space.None); - return renderToken(tree, stream, rtoken, indent, start_col, space); + + { + stream.pushIndentNextLine(); + defer stream.popIndent(); + try renderToken(tree, stream, lbrace, Space.None); + } + + return renderToken(tree, stream, rtoken, space); } const src_has_trailing_comma = blk: { @@ -952,9 +899,10 @@ fn renderExpression( const expr_outputs_one_line = blk: { // render field expressions until a LF is found for (field_inits) |field_init| { - var find_stream = FindByteOutStream.init('\n'); - var dummy_col: usize = 0; - try renderExpression(allocator, find_stream.outStream(), tree, 0, &dummy_col, field_init, Space.None); + var find_stream = std.io.findByteOutStream('\n', &std.io.null_out_stream); + var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, &find_stream); + + try renderExpression(allocator, &auto_indenting_stream, tree, field_init, Space.None); if (find_stream.byte_found) break :blk false; } break :blk true; @@ -967,7 +915,6 @@ fn renderExpression( .StructInitializer, .StructInitializerDot, => break :blk, - else => {}, } @@ -977,76 +924,78 @@ fn renderExpression( } switch (lhs) { - .dot => |dot| try renderToken(tree, stream, dot, indent, start_col, Space.None), - .node => |node| try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None), + .dot => |dot| try renderToken(tree, stream, dot, Space.None), + .node => |node| try renderExpression(allocator, stream, tree, node, Space.None), } - try renderToken(tree, stream, lbrace, indent, start_col, Space.Space); - try renderExpression(allocator, stream, tree, indent, start_col, &field_init.base, Space.Space); - return renderToken(tree, stream, rtoken, indent, start_col, space); + try renderToken(tree, stream, lbrace, Space.Space); + try renderExpression(allocator, stream, tree, &field_init.base, Space.Space); + return renderToken(tree, stream, rtoken, space); } if (!src_has_trailing_comma and src_same_line and expr_outputs_one_line) { // render all on one line, no trailing comma switch (lhs) { - .dot => |dot| try renderToken(tree, stream, dot, indent, start_col, Space.None), - .node => |node| try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None), + .dot => |dot| try renderToken(tree, stream, dot, Space.None), + .node => |node| try renderExpression(allocator, stream, tree, node, Space.None), } - try renderToken(tree, stream, lbrace, indent, start_col, Space.Space); + try renderToken(tree, stream, lbrace, Space.Space); for (field_inits) |field_init, i| { if (i + 1 < field_inits.len) { - try renderExpression(allocator, stream, tree, indent, start_col, field_init, Space.None); + try renderExpression(allocator, stream, tree, field_init, Space.None); const comma = tree.nextToken(field_init.lastToken()); - try renderToken(tree, stream, comma, indent, start_col, Space.Space); + try renderToken(tree, stream, comma, Space.Space); } else { - try renderExpression(allocator, stream, tree, indent, start_col, field_init, Space.Space); + try renderExpression(allocator, stream, tree, field_init, Space.Space); } } - return renderToken(tree, stream, rtoken, indent, start_col, space); + return renderToken(tree, stream, rtoken, space); } - const new_indent = indent + indent_delta; + { + switch (lhs) { + .dot => |dot| try renderToken(tree, stream, dot, Space.None), + .node => |node| try renderExpression(allocator, stream, tree, node, Space.None), + } - switch (lhs) { - .dot => |dot| try renderToken(tree, stream, dot, new_indent, start_col, Space.None), - .node => |node| try renderExpression(allocator, stream, tree, new_indent, start_col, node, Space.None), - } - try renderToken(tree, stream, lbrace, new_indent, start_col, Space.Newline); + stream.pushIndentNextLine(); + defer stream.popIndent(); - for (field_inits) |field_init, i| { - try stream.writeByteNTimes(' ', new_indent); + try renderToken(tree, stream, lbrace, Space.Newline); - if (i + 1 < field_inits.len) { - try renderExpression(allocator, stream, tree, new_indent, start_col, field_init, Space.None); + for (field_inits) |field_init, i| { + if (i + 1 < field_inits.len) { + const next_field_init = field_inits[i + 1]; + try renderExpression(allocator, stream, tree, field_init, Space.None); - const comma = tree.nextToken(field_init.lastToken()); - try renderToken(tree, stream, comma, new_indent, start_col, Space.Newline); + const comma = tree.nextToken(field_init.lastToken()); + try renderToken(tree, stream, comma, Space.Newline); - try renderExtraNewline(tree, stream, start_col, field_inits[i + 1]); - } else { - try renderExpression(allocator, stream, tree, new_indent, start_col, field_init, Space.Comma); + try renderExtraNewline(tree, stream, next_field_init); + } else { + try renderExpression(allocator, stream, tree, field_init, Space.Comma); + } } } - try stream.writeByteNTimes(' ', indent); - return renderToken(tree, stream, rtoken, indent, start_col, space); + return renderToken(tree, stream, rtoken, space); }, .Call => { const call = @fieldParentPtr(ast.Node.Call, "base", base); if (call.async_token) |async_token| { - try renderToken(tree, stream, async_token, indent, start_col, Space.Space); + try renderToken(tree, stream, async_token, Space.Space); } - try renderExpression(allocator, stream, tree, indent, start_col, call.lhs, Space.None); + try renderExpression(allocator, stream, tree, call.lhs, Space.None); const lparen = tree.nextToken(call.lhs.lastToken()); if (call.params_len == 0) { - try renderToken(tree, stream, lparen, indent, start_col, Space.None); - return renderToken(tree, stream, call.rtoken, indent, start_col, space); + try renderToken(tree, stream, lparen, Space.None); + return renderToken(tree, stream, call.rtoken, space); } const src_has_trailing_comma = blk: { @@ -1055,43 +1004,41 @@ fn renderExpression( }; if (src_has_trailing_comma) { - const new_indent = indent + indent_delta; - try renderToken(tree, stream, lparen, new_indent, start_col, Space.Newline); + try renderToken(tree, stream, lparen, Space.Newline); const params = call.params(); for (params) |param_node, i| { - const param_node_new_indent = if (param_node.tag == .MultilineStringLiteral) blk: { - break :blk indent; - } else blk: { - try stream.writeByteNTimes(' ', new_indent); - break :blk new_indent; - }; + stream.pushIndent(); + defer stream.popIndent(); if (i + 1 < params.len) { - try renderExpression(allocator, stream, tree, param_node_new_indent, start_col, param_node, Space.None); + const next_node = params[i + 1]; + try renderExpression(allocator, stream, tree, param_node, Space.None); const comma = tree.nextToken(param_node.lastToken()); - try renderToken(tree, stream, comma, new_indent, start_col, Space.Newline); // , - try renderExtraNewline(tree, stream, start_col, params[i + 1]); + try renderToken(tree, stream, comma, Space.Newline); // , + try renderExtraNewline(tree, stream, next_node); } else { - try renderExpression(allocator, stream, tree, param_node_new_indent, start_col, param_node, Space.Comma); - try stream.writeByteNTimes(' ', indent); - return renderToken(tree, stream, call.rtoken, indent, start_col, space); + try renderExpression(allocator, stream, tree, param_node, Space.Comma); } } + return renderToken(tree, stream, call.rtoken, space); } - try renderToken(tree, stream, lparen, indent, start_col, Space.None); // ( + try renderToken(tree, stream, lparen, Space.None); // ( const params = call.params(); for (params) |param_node, i| { - try renderExpression(allocator, stream, tree, indent, start_col, param_node, Space.None); + if (param_node.*.tag == .MultilineStringLiteral) stream.pushIndentOneShot(); + + try renderExpression(allocator, stream, tree, param_node, Space.None); if (i + 1 < params.len) { + const next_param = params[i + 1]; const comma = tree.nextToken(param_node.lastToken()); - try renderToken(tree, stream, comma, indent, start_col, Space.Space); + try renderToken(tree, stream, comma, Space.Space); } } - return renderToken(tree, stream, call.rtoken, indent, start_col, space); + return renderToken(tree, stream, call.rtoken, space); }, .ArrayAccess => { @@ -1100,26 +1047,25 @@ fn renderExpression( const lbracket = tree.nextToken(suffix_op.lhs.lastToken()); const rbracket = tree.nextToken(suffix_op.index_expr.lastToken()); - try renderExpression(allocator, stream, tree, indent, start_col, suffix_op.lhs, Space.None); - try renderToken(tree, stream, lbracket, indent, start_col, Space.None); // [ + try renderExpression(allocator, stream, tree, suffix_op.lhs, Space.None); + try renderToken(tree, stream, lbracket, Space.None); // [ const starts_with_comment = tree.token_ids[lbracket + 1] == .LineComment; const ends_with_comment = tree.token_ids[rbracket - 1] == .LineComment; - const new_indent = if (ends_with_comment) indent + indent_delta else indent; - const new_space = if (ends_with_comment) Space.Newline else Space.None; - try renderExpression(allocator, stream, tree, new_indent, start_col, suffix_op.index_expr, new_space); - if (starts_with_comment) { - try stream.writeByte('\n'); + { + const new_space = if (ends_with_comment) Space.Newline else Space.None; + + stream.pushIndent(); + defer stream.popIndent(); + try renderExpression(allocator, stream, tree, suffix_op.index_expr, new_space); } - if (ends_with_comment or starts_with_comment) { - try stream.writeByteNTimes(' ', indent); - } - return renderToken(tree, stream, rbracket, indent, start_col, space); // ] + if (starts_with_comment) try stream.maybeInsertNewline(); + return renderToken(tree, stream, rbracket, space); // ] }, + .Slice => { const suffix_op = base.castTag(.Slice).?; - - try renderExpression(allocator, stream, tree, indent, start_col, suffix_op.lhs, Space.None); + try renderExpression(allocator, stream, tree, suffix_op.lhs, Space.None); const lbracket = tree.prevToken(suffix_op.start.firstToken()); const dotdot = tree.nextToken(suffix_op.start.lastToken()); @@ -1129,32 +1075,33 @@ fn renderExpression( const after_start_space = if (after_start_space_bool) Space.Space else Space.None; const after_op_space = if (suffix_op.end != null) after_start_space else Space.None; - try renderToken(tree, stream, lbracket, indent, start_col, Space.None); // [ - try renderExpression(allocator, stream, tree, indent, start_col, suffix_op.start, after_start_space); - try renderToken(tree, stream, dotdot, indent, start_col, after_op_space); // .. + try renderToken(tree, stream, lbracket, Space.None); // [ + try renderExpression(allocator, stream, tree, suffix_op.start, after_start_space); + try renderToken(tree, stream, dotdot, after_op_space); // .. if (suffix_op.end) |end| { const after_end_space = if (suffix_op.sentinel != null) Space.Space else Space.None; - try renderExpression(allocator, stream, tree, indent, start_col, end, after_end_space); + try renderExpression(allocator, stream, tree, end, after_end_space); } if (suffix_op.sentinel) |sentinel| { const colon = tree.prevToken(sentinel.firstToken()); - try renderToken(tree, stream, colon, indent, start_col, Space.None); // : - try renderExpression(allocator, stream, tree, indent, start_col, sentinel, Space.None); + try renderToken(tree, stream, colon, Space.None); // : + try renderExpression(allocator, stream, tree, sentinel, Space.None); } - return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space); // ] + return renderToken(tree, stream, suffix_op.rtoken, space); // ] }, + .Deref => { const suffix_op = base.castTag(.Deref).?; - try renderExpression(allocator, stream, tree, indent, start_col, suffix_op.lhs, Space.None); - return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space); // .* + try renderExpression(allocator, stream, tree, suffix_op.lhs, Space.None); + return renderToken(tree, stream, suffix_op.rtoken, space); // .* }, .UnwrapOptional => { const suffix_op = base.castTag(.UnwrapOptional).?; - try renderExpression(allocator, stream, tree, indent, start_col, suffix_op.lhs, Space.None); - try renderToken(tree, stream, tree.prevToken(suffix_op.rtoken), indent, start_col, Space.None); // . - return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space); // ? + try renderExpression(allocator, stream, tree, suffix_op.lhs, Space.None); + try renderToken(tree, stream, tree.prevToken(suffix_op.rtoken), Space.None); // . + return renderToken(tree, stream, suffix_op.rtoken, space); // ? }, .Break => { @@ -1163,145 +1110,152 @@ fn renderExpression( const maybe_label = flow_expr.getLabel(); if (maybe_label == null and maybe_rhs == null) { - return renderToken(tree, stream, flow_expr.ltoken, indent, start_col, space); // break + return renderToken(tree, stream, flow_expr.ltoken, space); // break } - try renderToken(tree, stream, flow_expr.ltoken, indent, start_col, Space.Space); // break + try renderToken(tree, stream, flow_expr.ltoken, Space.Space); // break if (maybe_label) |label| { const colon = tree.nextToken(flow_expr.ltoken); - try renderToken(tree, stream, colon, indent, start_col, Space.None); // : + try renderToken(tree, stream, colon, Space.None); // : if (maybe_rhs == null) { - return renderToken(tree, stream, label, indent, start_col, space); // label + return renderToken(tree, stream, label, space); // label } - try renderToken(tree, stream, label, indent, start_col, Space.Space); // label + try renderToken(tree, stream, label, Space.Space); // label } - return renderExpression(allocator, stream, tree, indent, start_col, maybe_rhs.?, space); + return renderExpression(allocator, stream, tree, maybe_rhs.?, space); }, .Continue => { const flow_expr = base.castTag(.Continue).?; if (flow_expr.getLabel()) |label| { - try renderToken(tree, stream, flow_expr.ltoken, indent, start_col, Space.Space); // continue + try renderToken(tree, stream, flow_expr.ltoken, Space.Space); // continue const colon = tree.nextToken(flow_expr.ltoken); - try renderToken(tree, stream, colon, indent, start_col, Space.None); // : - return renderToken(tree, stream, label, indent, start_col, space); // label + try renderToken(tree, stream, colon, Space.None); // : + return renderToken(tree, stream, label, space); // label } else { - return renderToken(tree, stream, flow_expr.ltoken, indent, start_col, space); // continue + return renderToken(tree, stream, flow_expr.ltoken, space); // continue } }, .Return => { const flow_expr = base.castTag(.Return).?; if (flow_expr.getRHS()) |rhs| { - try renderToken(tree, stream, flow_expr.ltoken, indent, start_col, Space.Space); - return renderExpression(allocator, stream, tree, indent, start_col, rhs, space); + try renderToken(tree, stream, flow_expr.ltoken, Space.Space); + return renderExpression(allocator, stream, tree, rhs, space); } else { - return renderToken(tree, stream, flow_expr.ltoken, indent, start_col, space); + return renderToken(tree, stream, flow_expr.ltoken, space); } }, .Payload => { const payload = @fieldParentPtr(ast.Node.Payload, "base", base); - try renderToken(tree, stream, payload.lpipe, indent, start_col, Space.None); - try renderExpression(allocator, stream, tree, indent, start_col, payload.error_symbol, Space.None); - return renderToken(tree, stream, payload.rpipe, indent, start_col, space); + try renderToken(tree, stream, payload.lpipe, Space.None); + try renderExpression(allocator, stream, tree, payload.error_symbol, Space.None); + return renderToken(tree, stream, payload.rpipe, space); }, .PointerPayload => { const payload = @fieldParentPtr(ast.Node.PointerPayload, "base", base); - try renderToken(tree, stream, payload.lpipe, indent, start_col, Space.None); + try renderToken(tree, stream, payload.lpipe, Space.None); if (payload.ptr_token) |ptr_token| { - try renderToken(tree, stream, ptr_token, indent, start_col, Space.None); + try renderToken(tree, stream, ptr_token, Space.None); } - try renderExpression(allocator, stream, tree, indent, start_col, payload.value_symbol, Space.None); - return renderToken(tree, stream, payload.rpipe, indent, start_col, space); + try renderExpression(allocator, stream, tree, payload.value_symbol, Space.None); + return renderToken(tree, stream, payload.rpipe, space); }, .PointerIndexPayload => { const payload = @fieldParentPtr(ast.Node.PointerIndexPayload, "base", base); - try renderToken(tree, stream, payload.lpipe, indent, start_col, Space.None); + try renderToken(tree, stream, payload.lpipe, Space.None); if (payload.ptr_token) |ptr_token| { - try renderToken(tree, stream, ptr_token, indent, start_col, Space.None); + try renderToken(tree, stream, ptr_token, Space.None); } - try renderExpression(allocator, stream, tree, indent, start_col, payload.value_symbol, Space.None); + try renderExpression(allocator, stream, tree, payload.value_symbol, Space.None); if (payload.index_symbol) |index_symbol| { const comma = tree.nextToken(payload.value_symbol.lastToken()); - try renderToken(tree, stream, comma, indent, start_col, Space.Space); - try renderExpression(allocator, stream, tree, indent, start_col, index_symbol, Space.None); + try renderToken(tree, stream, comma, Space.Space); + try renderExpression(allocator, stream, tree, index_symbol, Space.None); } - return renderToken(tree, stream, payload.rpipe, indent, start_col, space); + return renderToken(tree, stream, payload.rpipe, space); }, .GroupedExpression => { const grouped_expr = @fieldParentPtr(ast.Node.GroupedExpression, "base", base); - try renderToken(tree, stream, grouped_expr.lparen, indent, start_col, Space.None); - try renderExpression(allocator, stream, tree, indent, start_col, grouped_expr.expr, Space.None); - return renderToken(tree, stream, grouped_expr.rparen, indent, start_col, space); + try renderToken(tree, stream, grouped_expr.lparen, Space.None); + { + stream.pushIndentOneShot(); + try renderExpression(allocator, stream, tree, grouped_expr.expr, Space.None); + } + return renderToken(tree, stream, grouped_expr.rparen, space); }, .FieldInitializer => { const field_init = @fieldParentPtr(ast.Node.FieldInitializer, "base", base); - try renderToken(tree, stream, field_init.period_token, indent, start_col, Space.None); // . - try renderToken(tree, stream, field_init.name_token, indent, start_col, Space.Space); // name - try renderToken(tree, stream, tree.nextToken(field_init.name_token), indent, start_col, Space.Space); // = - return renderExpression(allocator, stream, tree, indent, start_col, field_init.expr, space); + try renderToken(tree, stream, field_init.period_token, Space.None); // . + try renderToken(tree, stream, field_init.name_token, Space.Space); // name + try renderToken(tree, stream, tree.nextToken(field_init.name_token), Space.Space); // = + return renderExpression(allocator, stream, tree, field_init.expr, space); }, .ContainerDecl => { const container_decl = @fieldParentPtr(ast.Node.ContainerDecl, "base", base); if (container_decl.layout_token) |layout_token| { - try renderToken(tree, stream, layout_token, indent, start_col, Space.Space); + try renderToken(tree, stream, layout_token, Space.Space); } switch (container_decl.init_arg_expr) { .None => { - try renderToken(tree, stream, container_decl.kind_token, indent, start_col, Space.Space); // union + try renderToken(tree, stream, container_decl.kind_token, Space.Space); // union }, .Enum => |enum_tag_type| { - try renderToken(tree, stream, container_decl.kind_token, indent, start_col, Space.None); // union + try renderToken(tree, stream, container_decl.kind_token, Space.None); // union const lparen = tree.nextToken(container_decl.kind_token); const enum_token = tree.nextToken(lparen); - try renderToken(tree, stream, lparen, indent, start_col, Space.None); // ( - try renderToken(tree, stream, enum_token, indent, start_col, Space.None); // enum + try renderToken(tree, stream, lparen, Space.None); // ( + try renderToken(tree, stream, enum_token, Space.None); // enum if (enum_tag_type) |expr| { - try renderToken(tree, stream, tree.nextToken(enum_token), indent, start_col, Space.None); // ( - try renderExpression(allocator, stream, tree, indent, start_col, expr, Space.None); + try renderToken(tree, stream, tree.nextToken(enum_token), Space.None); // ( + try renderExpression(allocator, stream, tree, expr, Space.None); const rparen = tree.nextToken(expr.lastToken()); - try renderToken(tree, stream, rparen, indent, start_col, Space.None); // ) - try renderToken(tree, stream, tree.nextToken(rparen), indent, start_col, Space.Space); // ) + try renderToken(tree, stream, rparen, Space.None); // ) + try renderToken(tree, stream, tree.nextToken(rparen), Space.Space); // ) } else { - try renderToken(tree, stream, tree.nextToken(enum_token), indent, start_col, Space.Space); // ) + try renderToken(tree, stream, tree.nextToken(enum_token), Space.Space); // ) } }, .Type => |type_expr| { - try renderToken(tree, stream, container_decl.kind_token, indent, start_col, Space.None); // union + try renderToken(tree, stream, container_decl.kind_token, Space.None); // union const lparen = tree.nextToken(container_decl.kind_token); const rparen = tree.nextToken(type_expr.lastToken()); - try renderToken(tree, stream, lparen, indent, start_col, Space.None); // ( - try renderExpression(allocator, stream, tree, indent, start_col, type_expr, Space.None); - try renderToken(tree, stream, rparen, indent, start_col, Space.Space); // ) + try renderToken(tree, stream, lparen, Space.None); // ( + try renderExpression(allocator, stream, tree, type_expr, Space.None); + try renderToken(tree, stream, rparen, Space.Space); // ) }, } if (container_decl.fields_and_decls_len == 0) { - try renderToken(tree, stream, container_decl.lbrace_token, indent + indent_delta, start_col, Space.None); // { - return renderToken(tree, stream, container_decl.rbrace_token, indent, start_col, space); // } + { + stream.pushIndentNextLine(); + defer stream.popIndent(); + try renderToken(tree, stream, container_decl.lbrace_token, Space.None); // { + } + return renderToken(tree, stream, container_decl.rbrace_token, space); // } } const src_has_trailing_comma = blk: { @@ -1332,43 +1286,39 @@ fn renderExpression( if (src_has_trailing_comma or !src_has_only_fields) { // One declaration per line - const new_indent = indent + indent_delta; - try renderToken(tree, stream, container_decl.lbrace_token, new_indent, start_col, .Newline); // { + stream.pushIndentNextLine(); + defer stream.popIndent(); + try renderToken(tree, stream, container_decl.lbrace_token, .Newline); // { for (fields_and_decls) |decl, i| { - try stream.writeByteNTimes(' ', new_indent); - try renderContainerDecl(allocator, stream, tree, new_indent, start_col, decl, .Newline); + try renderContainerDecl(allocator, stream, tree, decl, .Newline); if (i + 1 < fields_and_decls.len) { - try renderExtraNewline(tree, stream, start_col, fields_and_decls[i + 1]); + try renderExtraNewline(tree, stream, fields_and_decls[i + 1]); } } - - try stream.writeByteNTimes(' ', indent); } else if (src_has_newline) { // All the declarations on the same line, but place the items on // their own line - try renderToken(tree, stream, container_decl.lbrace_token, indent, start_col, .Newline); // { + try renderToken(tree, stream, container_decl.lbrace_token, .Newline); // { - const new_indent = indent + indent_delta; - try stream.writeByteNTimes(' ', new_indent); + stream.pushIndent(); + defer stream.popIndent(); for (fields_and_decls) |decl, i| { const space_after_decl: Space = if (i + 1 >= fields_and_decls.len) .Newline else .Space; - try renderContainerDecl(allocator, stream, tree, new_indent, start_col, decl, space_after_decl); + try renderContainerDecl(allocator, stream, tree, decl, space_after_decl); } - - try stream.writeByteNTimes(' ', indent); } else { // All the declarations on the same line - try renderToken(tree, stream, container_decl.lbrace_token, indent, start_col, .Space); // { + try renderToken(tree, stream, container_decl.lbrace_token, .Space); // { for (fields_and_decls) |decl| { - try renderContainerDecl(allocator, stream, tree, indent, start_col, decl, .Space); + try renderContainerDecl(allocator, stream, tree, decl, .Space); } } - return renderToken(tree, stream, container_decl.rbrace_token, indent, start_col, space); // } + return renderToken(tree, stream, container_decl.rbrace_token, space); // } }, .ErrorSetDecl => { @@ -1377,9 +1327,9 @@ fn renderExpression( const lbrace = tree.nextToken(err_set_decl.error_token); if (err_set_decl.decls_len == 0) { - try renderToken(tree, stream, err_set_decl.error_token, indent, start_col, Space.None); - try renderToken(tree, stream, lbrace, indent, start_col, Space.None); - return renderToken(tree, stream, err_set_decl.rbrace_token, indent, start_col, space); + try renderToken(tree, stream, err_set_decl.error_token, Space.None); + try renderToken(tree, stream, lbrace, Space.None); + return renderToken(tree, stream, err_set_decl.rbrace_token, space); } if (err_set_decl.decls_len == 1) blk: { @@ -1393,13 +1343,13 @@ fn renderExpression( break :blk; } - try renderToken(tree, stream, err_set_decl.error_token, indent, start_col, Space.None); // error - try renderToken(tree, stream, lbrace, indent, start_col, Space.None); // { - try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None); - return renderToken(tree, stream, err_set_decl.rbrace_token, indent, start_col, space); // } + try renderToken(tree, stream, err_set_decl.error_token, Space.None); // error + try renderToken(tree, stream, lbrace, Space.None); // { + try renderExpression(allocator, stream, tree, node, Space.None); + return renderToken(tree, stream, err_set_decl.rbrace_token, space); // } } - try renderToken(tree, stream, err_set_decl.error_token, indent, start_col, Space.None); // error + try renderToken(tree, stream, err_set_decl.error_token, Space.None); // error const src_has_trailing_comma = blk: { const maybe_comma = tree.prevToken(err_set_decl.rbrace_token); @@ -1407,78 +1357,72 @@ fn renderExpression( }; if (src_has_trailing_comma) { - try renderToken(tree, stream, lbrace, indent, start_col, Space.Newline); // { - const new_indent = indent + indent_delta; + { + stream.pushIndent(); + defer stream.popIndent(); - const decls = err_set_decl.decls(); - for (decls) |node, i| { - try stream.writeByteNTimes(' ', new_indent); + try renderToken(tree, stream, lbrace, Space.Newline); // { + const decls = err_set_decl.decls(); + for (decls) |node, i| { + if (i + 1 < decls.len) { + try renderExpression(allocator, stream, tree, node, Space.None); + try renderToken(tree, stream, tree.nextToken(node.lastToken()), Space.Newline); // , - if (i + 1 < decls.len) { - try renderExpression(allocator, stream, tree, new_indent, start_col, node, Space.None); - try renderToken(tree, stream, tree.nextToken(node.lastToken()), new_indent, start_col, Space.Newline); // , - - try renderExtraNewline(tree, stream, start_col, decls[i + 1]); - } else { - try renderExpression(allocator, stream, tree, new_indent, start_col, node, Space.Comma); + try renderExtraNewline(tree, stream, decls[i + 1]); + } else { + try renderExpression(allocator, stream, tree, node, Space.Comma); + } } } - try stream.writeByteNTimes(' ', indent); - return renderToken(tree, stream, err_set_decl.rbrace_token, indent, start_col, space); // } + return renderToken(tree, stream, err_set_decl.rbrace_token, space); // } } else { - try renderToken(tree, stream, lbrace, indent, start_col, Space.Space); // { + try renderToken(tree, stream, lbrace, Space.Space); // { const decls = err_set_decl.decls(); for (decls) |node, i| { if (i + 1 < decls.len) { - try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None); + try renderExpression(allocator, stream, tree, node, Space.None); const comma_token = tree.nextToken(node.lastToken()); assert(tree.token_ids[comma_token] == .Comma); - try renderToken(tree, stream, comma_token, indent, start_col, Space.Space); // , - try renderExtraNewline(tree, stream, start_col, decls[i + 1]); + try renderToken(tree, stream, comma_token, Space.Space); // , + try renderExtraNewline(tree, stream, decls[i + 1]); } else { - try renderExpression(allocator, stream, tree, indent, start_col, node, Space.Space); + try renderExpression(allocator, stream, tree, node, Space.Space); } } - return renderToken(tree, stream, err_set_decl.rbrace_token, indent, start_col, space); // } + return renderToken(tree, stream, err_set_decl.rbrace_token, space); // } } }, .ErrorTag => { const tag = @fieldParentPtr(ast.Node.ErrorTag, "base", base); - try renderDocComments(tree, stream, tag, tag.doc_comments, indent, start_col); - return renderToken(tree, stream, tag.name_token, indent, start_col, space); // name + try renderDocComments(tree, stream, tag, tag.doc_comments); + return renderToken(tree, stream, tag.name_token, space); // name }, .MultilineStringLiteral => { - // TODO: Don't indent in this function, but let the caller indent. - // If this has been implemented, a lot of hacky solutions in i.e. ArrayInit and FunctionCall can be removed const multiline_str_literal = @fieldParentPtr(ast.Node.MultilineStringLiteral, "base", base); - var skip_first_indent = true; - if (tree.token_ids[multiline_str_literal.firstToken() - 1] != .LineComment) { - try stream.print("\n", .{}); - skip_first_indent = false; - } - - for (multiline_str_literal.lines()) |t| { - if (!skip_first_indent) { - try stream.writeByteNTimes(' ', indent + indent_delta); + { + const locked_indents = stream.lockOneShotIndent(); + defer { + var i: u8 = 0; + while (i < locked_indents) : (i += 1) stream.popIndent(); } - try renderToken(tree, stream, t, indent, start_col, Space.None); - skip_first_indent = false; + try stream.maybeInsertNewline(); + + for (multiline_str_literal.lines()) |t| try renderToken(tree, stream, t, Space.None); } - try stream.writeByteNTimes(' ', indent); }, .BuiltinCall => { const builtin_call = @fieldParentPtr(ast.Node.BuiltinCall, "base", base); - try renderToken(tree, stream, builtin_call.builtin_token, indent, start_col, Space.None); // @name + try renderToken(tree, stream, builtin_call.builtin_token, Space.None); // @name const src_params_trailing_comma = blk: { if (builtin_call.params_len < 2) break :blk false; @@ -1490,31 +1434,30 @@ fn renderExpression( const lparen = tree.nextToken(builtin_call.builtin_token); if (!src_params_trailing_comma) { - try renderToken(tree, stream, lparen, indent, start_col, Space.None); // ( + try renderToken(tree, stream, lparen, Space.None); // ( // render all on one line, no trailing comma const params = builtin_call.params(); for (params) |param_node, i| { - try renderExpression(allocator, stream, tree, indent, start_col, param_node, Space.None); + try renderExpression(allocator, stream, tree, param_node, Space.None); if (i + 1 < params.len) { const comma_token = tree.nextToken(param_node.lastToken()); - try renderToken(tree, stream, comma_token, indent, start_col, Space.Space); // , + try renderToken(tree, stream, comma_token, Space.Space); // , } } } else { // one param per line - const new_indent = indent + indent_delta; - try renderToken(tree, stream, lparen, new_indent, start_col, Space.Newline); // ( + stream.pushIndent(); + defer stream.popIndent(); + try renderToken(tree, stream, lparen, Space.Newline); // ( for (builtin_call.params()) |param_node| { - try stream.writeByteNTimes(' ', new_indent); - try renderExpression(allocator, stream, tree, indent, start_col, param_node, Space.Comma); + try renderExpression(allocator, stream, tree, param_node, Space.Comma); } - try stream.writeByteNTimes(' ', indent); } - return renderToken(tree, stream, builtin_call.rparen_token, indent, start_col, space); // ) + return renderToken(tree, stream, builtin_call.rparen_token, space); // ) }, .FnProto => { @@ -1524,24 +1467,24 @@ fn renderExpression( const visib_token = tree.token_ids[visib_token_index]; assert(visib_token == .Keyword_pub or visib_token == .Keyword_export); - try renderToken(tree, stream, visib_token_index, indent, start_col, Space.Space); // pub + try renderToken(tree, stream, visib_token_index, Space.Space); // pub } if (fn_proto.getTrailer("extern_export_inline_token")) |extern_export_inline_token| { if (fn_proto.getTrailer("is_extern_prototype") == null) - try renderToken(tree, stream, extern_export_inline_token, indent, start_col, Space.Space); // extern/export/inline + try renderToken(tree, stream, extern_export_inline_token, Space.Space); // extern/export/inline } if (fn_proto.getTrailer("lib_name")) |lib_name| { - try renderExpression(allocator, stream, tree, indent, start_col, lib_name, Space.Space); + try renderExpression(allocator, stream, tree, lib_name, Space.Space); } const lparen = if (fn_proto.getTrailer("name_token")) |name_token| blk: { - try renderToken(tree, stream, fn_proto.fn_token, indent, start_col, Space.Space); // fn - try renderToken(tree, stream, name_token, indent, start_col, Space.None); // name + try renderToken(tree, stream, fn_proto.fn_token, Space.Space); // fn + try renderToken(tree, stream, name_token, Space.None); // name break :blk tree.nextToken(name_token); } else blk: { - try renderToken(tree, stream, fn_proto.fn_token, indent, start_col, Space.Space); // fn + try renderToken(tree, stream, fn_proto.fn_token, Space.Space); // fn break :blk tree.nextToken(fn_proto.fn_token); }; assert(tree.token_ids[lparen] == .LParen); @@ -1568,47 +1511,45 @@ fn renderExpression( }; if (!src_params_trailing_comma) { - try renderToken(tree, stream, lparen, indent, start_col, Space.None); // ( + try renderToken(tree, stream, lparen, Space.None); // ( // render all on one line, no trailing comma for (fn_proto.params()) |param_decl, i| { - try renderParamDecl(allocator, stream, tree, indent, start_col, param_decl, Space.None); + try renderParamDecl(allocator, stream, tree, param_decl, Space.None); if (i + 1 < fn_proto.params_len or fn_proto.getTrailer("var_args_token") != null) { const comma = tree.nextToken(param_decl.lastToken()); - try renderToken(tree, stream, comma, indent, start_col, Space.Space); // , + try renderToken(tree, stream, comma, Space.Space); // , } } if (fn_proto.getTrailer("var_args_token")) |var_args_token| { - try renderToken(tree, stream, var_args_token, indent, start_col, Space.None); + try renderToken(tree, stream, var_args_token, Space.None); } } else { // one param per line - const new_indent = indent + indent_delta; - try renderToken(tree, stream, lparen, new_indent, start_col, Space.Newline); // ( + stream.pushIndent(); + defer stream.popIndent(); + try renderToken(tree, stream, lparen, Space.Newline); // ( for (fn_proto.params()) |param_decl| { - try stream.writeByteNTimes(' ', new_indent); - try renderParamDecl(allocator, stream, tree, new_indent, start_col, param_decl, Space.Comma); + try renderParamDecl(allocator, stream, tree, param_decl, Space.Comma); } if (fn_proto.getTrailer("var_args_token")) |var_args_token| { - try stream.writeByteNTimes(' ', new_indent); - try renderToken(tree, stream, var_args_token, new_indent, start_col, Space.Comma); + try renderToken(tree, stream, var_args_token, Space.Comma); } - try stream.writeByteNTimes(' ', indent); } - try renderToken(tree, stream, rparen, indent, start_col, Space.Space); // ) + try renderToken(tree, stream, rparen, Space.Space); // ) if (fn_proto.getTrailer("align_expr")) |align_expr| { const align_rparen = tree.nextToken(align_expr.lastToken()); const align_lparen = tree.prevToken(align_expr.firstToken()); const align_kw = tree.prevToken(align_lparen); - try renderToken(tree, stream, align_kw, indent, start_col, Space.None); // align - try renderToken(tree, stream, align_lparen, indent, start_col, Space.None); // ( - try renderExpression(allocator, stream, tree, indent, start_col, align_expr, Space.None); - try renderToken(tree, stream, align_rparen, indent, start_col, Space.Space); // ) + try renderToken(tree, stream, align_kw, Space.None); // align + try renderToken(tree, stream, align_lparen, Space.None); // ( + try renderExpression(allocator, stream, tree, align_expr, Space.None); + try renderToken(tree, stream, align_rparen, Space.Space); // ) } if (fn_proto.getTrailer("section_expr")) |section_expr| { @@ -1616,10 +1557,10 @@ fn renderExpression( const section_lparen = tree.prevToken(section_expr.firstToken()); const section_kw = tree.prevToken(section_lparen); - try renderToken(tree, stream, section_kw, indent, start_col, Space.None); // section - try renderToken(tree, stream, section_lparen, indent, start_col, Space.None); // ( - try renderExpression(allocator, stream, tree, indent, start_col, section_expr, Space.None); - try renderToken(tree, stream, section_rparen, indent, start_col, Space.Space); // ) + try renderToken(tree, stream, section_kw, Space.None); // section + try renderToken(tree, stream, section_lparen, Space.None); // ( + try renderExpression(allocator, stream, tree, section_expr, Space.None); + try renderToken(tree, stream, section_rparen, Space.Space); // ) } if (fn_proto.getTrailer("callconv_expr")) |callconv_expr| { @@ -1627,23 +1568,23 @@ fn renderExpression( const callconv_lparen = tree.prevToken(callconv_expr.firstToken()); const callconv_kw = tree.prevToken(callconv_lparen); - try renderToken(tree, stream, callconv_kw, indent, start_col, Space.None); // callconv - try renderToken(tree, stream, callconv_lparen, indent, start_col, Space.None); // ( - try renderExpression(allocator, stream, tree, indent, start_col, callconv_expr, Space.None); - try renderToken(tree, stream, callconv_rparen, indent, start_col, Space.Space); // ) + try renderToken(tree, stream, callconv_kw, Space.None); // callconv + try renderToken(tree, stream, callconv_lparen, Space.None); // ( + try renderExpression(allocator, stream, tree, callconv_expr, Space.None); + try renderToken(tree, stream, callconv_rparen, Space.Space); // ) } else if (fn_proto.getTrailer("is_extern_prototype") != null) { - try stream.writeAll("callconv(.C) "); + try stream.writer().writeAll("callconv(.C) "); } else if (fn_proto.getTrailer("is_async") != null) { - try stream.writeAll("callconv(.Async) "); + try stream.writer().writeAll("callconv(.Async) "); } switch (fn_proto.return_type) { .Explicit => |node| { - return renderExpression(allocator, stream, tree, indent, start_col, node, space); + return renderExpression(allocator, stream, tree, node, space); }, .InferErrorSet => |node| { - try renderToken(tree, stream, tree.prevToken(node.firstToken()), indent, start_col, Space.None); // ! - return renderExpression(allocator, stream, tree, indent, start_col, node, space); + try renderToken(tree, stream, tree.prevToken(node.firstToken()), Space.None); // ! + return renderExpression(allocator, stream, tree, node, space); }, .Invalid => unreachable, } @@ -1653,11 +1594,11 @@ fn renderExpression( const anyframe_type = @fieldParentPtr(ast.Node.AnyFrameType, "base", base); if (anyframe_type.result) |result| { - try renderToken(tree, stream, anyframe_type.anyframe_token, indent, start_col, Space.None); // anyframe - try renderToken(tree, stream, result.arrow_token, indent, start_col, Space.None); // -> - return renderExpression(allocator, stream, tree, indent, start_col, result.return_type, space); + try renderToken(tree, stream, anyframe_type.anyframe_token, Space.None); // anyframe + try renderToken(tree, stream, result.arrow_token, Space.None); // -> + return renderExpression(allocator, stream, tree, result.return_type, space); } else { - return renderToken(tree, stream, anyframe_type.anyframe_token, indent, start_col, space); // anyframe + return renderToken(tree, stream, anyframe_type.anyframe_token, space); // anyframe } }, @@ -1666,38 +1607,38 @@ fn renderExpression( .Switch => { const switch_node = @fieldParentPtr(ast.Node.Switch, "base", base); - try renderToken(tree, stream, switch_node.switch_token, indent, start_col, Space.Space); // switch - try renderToken(tree, stream, tree.nextToken(switch_node.switch_token), indent, start_col, Space.None); // ( + try renderToken(tree, stream, switch_node.switch_token, Space.Space); // switch + try renderToken(tree, stream, tree.nextToken(switch_node.switch_token), Space.None); // ( const rparen = tree.nextToken(switch_node.expr.lastToken()); const lbrace = tree.nextToken(rparen); if (switch_node.cases_len == 0) { - try renderExpression(allocator, stream, tree, indent, start_col, switch_node.expr, Space.None); - try renderToken(tree, stream, rparen, indent, start_col, Space.Space); // ) - try renderToken(tree, stream, lbrace, indent, start_col, Space.None); // { - return renderToken(tree, stream, switch_node.rbrace, indent, start_col, space); // } + try renderExpression(allocator, stream, tree, switch_node.expr, Space.None); + try renderToken(tree, stream, rparen, Space.Space); // ) + try renderToken(tree, stream, lbrace, Space.None); // { + return renderToken(tree, stream, switch_node.rbrace, space); // } } - try renderExpression(allocator, stream, tree, indent, start_col, switch_node.expr, Space.None); + try renderExpression(allocator, stream, tree, switch_node.expr, Space.None); + try renderToken(tree, stream, rparen, Space.Space); // ) - const new_indent = indent + indent_delta; + { + stream.pushIndentNextLine(); + defer stream.popIndent(); + try renderToken(tree, stream, lbrace, Space.Newline); // { - try renderToken(tree, stream, rparen, indent, start_col, Space.Space); // ) - try renderToken(tree, stream, lbrace, new_indent, start_col, Space.Newline); // { + const cases = switch_node.cases(); + for (cases) |node, i| { + try renderExpression(allocator, stream, tree, node, Space.Comma); - const cases = switch_node.cases(); - for (cases) |node, i| { - try stream.writeByteNTimes(' ', new_indent); - try renderExpression(allocator, stream, tree, new_indent, start_col, node, Space.Comma); - - if (i + 1 < cases.len) { - try renderExtraNewline(tree, stream, start_col, cases[i + 1]); + if (i + 1 < cases.len) { + try renderExtraNewline(tree, stream, cases[i + 1]); + } } } - try stream.writeByteNTimes(' ', indent); - return renderToken(tree, stream, switch_node.rbrace, indent, start_col, space); // } + return renderToken(tree, stream, switch_node.rbrace, space); // } }, .SwitchCase => { @@ -1714,43 +1655,41 @@ fn renderExpression( const items = switch_case.items(); for (items) |node, i| { if (i + 1 < items.len) { - try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None); + try renderExpression(allocator, stream, tree, node, Space.None); const comma_token = tree.nextToken(node.lastToken()); - try renderToken(tree, stream, comma_token, indent, start_col, Space.Space); // , - try renderExtraNewline(tree, stream, start_col, items[i + 1]); + try renderToken(tree, stream, comma_token, Space.Space); // , + try renderExtraNewline(tree, stream, items[i + 1]); } else { - try renderExpression(allocator, stream, tree, indent, start_col, node, Space.Space); + try renderExpression(allocator, stream, tree, node, Space.Space); } } } else { const items = switch_case.items(); for (items) |node, i| { if (i + 1 < items.len) { - try renderExpression(allocator, stream, tree, indent, start_col, node, Space.None); + try renderExpression(allocator, stream, tree, node, Space.None); const comma_token = tree.nextToken(node.lastToken()); - try renderToken(tree, stream, comma_token, indent, start_col, Space.Newline); // , - try renderExtraNewline(tree, stream, start_col, items[i + 1]); - try stream.writeByteNTimes(' ', indent); + try renderToken(tree, stream, comma_token, Space.Newline); // , + try renderExtraNewline(tree, stream, items[i + 1]); } else { - try renderExpression(allocator, stream, tree, indent, start_col, node, Space.Comma); - try stream.writeByteNTimes(' ', indent); + try renderExpression(allocator, stream, tree, node, Space.Comma); } } } - try renderToken(tree, stream, switch_case.arrow_token, indent, start_col, Space.Space); // => + try renderToken(tree, stream, switch_case.arrow_token, Space.Space); // => if (switch_case.payload) |payload| { - try renderExpression(allocator, stream, tree, indent, start_col, payload, Space.Space); + try renderExpression(allocator, stream, tree, payload, Space.Space); } - return renderExpression(allocator, stream, tree, indent, start_col, switch_case.expr, space); + return renderExpression(allocator, stream, tree, switch_case.expr, space); }, .SwitchElse => { const switch_else = @fieldParentPtr(ast.Node.SwitchElse, "base", base); - return renderToken(tree, stream, switch_else.token, indent, start_col, space); + return renderToken(tree, stream, switch_else.token, space); }, .Else => { const else_node = @fieldParentPtr(ast.Node.Else, "base", base); @@ -1759,37 +1698,37 @@ fn renderExpression( const same_line = body_is_block or tree.tokensOnSameLine(else_node.else_token, else_node.body.lastToken()); const after_else_space = if (same_line or else_node.payload != null) Space.Space else Space.Newline; - try renderToken(tree, stream, else_node.else_token, indent, start_col, after_else_space); + try renderToken(tree, stream, else_node.else_token, after_else_space); if (else_node.payload) |payload| { const payload_space = if (same_line) Space.Space else Space.Newline; - try renderExpression(allocator, stream, tree, indent, start_col, payload, payload_space); + try renderExpression(allocator, stream, tree, payload, payload_space); } if (same_line) { - return renderExpression(allocator, stream, tree, indent, start_col, else_node.body, space); + return renderExpression(allocator, stream, tree, else_node.body, space); + } else { + stream.pushIndent(); + defer stream.popIndent(); + return renderExpression(allocator, stream, tree, else_node.body, space); } - - try stream.writeByteNTimes(' ', indent + indent_delta); - start_col.* = indent + indent_delta; - return renderExpression(allocator, stream, tree, indent, start_col, else_node.body, space); }, .While => { const while_node = @fieldParentPtr(ast.Node.While, "base", base); if (while_node.label) |label| { - try renderToken(tree, stream, label, indent, start_col, Space.None); // label - try renderToken(tree, stream, tree.nextToken(label), indent, start_col, Space.Space); // : + try renderToken(tree, stream, label, Space.None); // label + try renderToken(tree, stream, tree.nextToken(label), Space.Space); // : } if (while_node.inline_token) |inline_token| { - try renderToken(tree, stream, inline_token, indent, start_col, Space.Space); // inline + try renderToken(tree, stream, inline_token, Space.Space); // inline } - try renderToken(tree, stream, while_node.while_token, indent, start_col, Space.Space); // while - try renderToken(tree, stream, tree.nextToken(while_node.while_token), indent, start_col, Space.None); // ( - try renderExpression(allocator, stream, tree, indent, start_col, while_node.condition, Space.None); + try renderToken(tree, stream, while_node.while_token, Space.Space); // while + try renderToken(tree, stream, tree.nextToken(while_node.while_token), Space.None); // ( + try renderExpression(allocator, stream, tree, while_node.condition, Space.None); const cond_rparen = tree.nextToken(while_node.condition.lastToken()); @@ -1811,12 +1750,12 @@ fn renderExpression( { const rparen_space = if (while_node.payload != null or while_node.continue_expr != null) Space.Space else block_start_space; - try renderToken(tree, stream, cond_rparen, indent, start_col, rparen_space); // ) + try renderToken(tree, stream, cond_rparen, rparen_space); // ) } if (while_node.payload) |payload| { - const payload_space = if (while_node.continue_expr != null) Space.Space else block_start_space; - try renderExpression(allocator, stream, tree, indent, start_col, payload, payload_space); + const payload_space = Space.Space; //if (while_node.continue_expr != null) Space.Space else block_start_space; + try renderExpression(allocator, stream, tree, payload, payload_space); } if (while_node.continue_expr) |continue_expr| { @@ -1824,29 +1763,22 @@ fn renderExpression( const lparen = tree.prevToken(continue_expr.firstToken()); const colon = tree.prevToken(lparen); - try renderToken(tree, stream, colon, indent, start_col, Space.Space); // : - try renderToken(tree, stream, lparen, indent, start_col, Space.None); // ( + try renderToken(tree, stream, colon, Space.Space); // : + try renderToken(tree, stream, lparen, Space.None); // ( - try renderExpression(allocator, stream, tree, indent, start_col, continue_expr, Space.None); + try renderExpression(allocator, stream, tree, continue_expr, Space.None); - try renderToken(tree, stream, rparen, indent, start_col, block_start_space); // ) + try renderToken(tree, stream, rparen, block_start_space); // ) } - var new_indent = indent; - if (block_start_space == Space.Newline) { - new_indent += indent_delta; - try stream.writeByteNTimes(' ', new_indent); - start_col.* = new_indent; + { + if (!body_is_block) stream.pushIndent(); + defer if (!body_is_block) stream.popIndent(); + try renderExpression(allocator, stream, tree, while_node.body, after_body_space); } - try renderExpression(allocator, stream, tree, indent, start_col, while_node.body, after_body_space); - if (while_node.@"else") |@"else"| { - if (after_body_space == Space.Newline) { - try stream.writeByteNTimes(' ', indent); - start_col.* = indent; - } - return renderExpression(allocator, stream, tree, indent, start_col, &@"else".base, space); + return renderExpression(allocator, stream, tree, &@"else".base, space); } }, @@ -1854,17 +1786,17 @@ fn renderExpression( const for_node = @fieldParentPtr(ast.Node.For, "base", base); if (for_node.label) |label| { - try renderToken(tree, stream, label, indent, start_col, Space.None); // label - try renderToken(tree, stream, tree.nextToken(label), indent, start_col, Space.Space); // : + try renderToken(tree, stream, label, Space.None); // label + try renderToken(tree, stream, tree.nextToken(label), Space.Space); // : } if (for_node.inline_token) |inline_token| { - try renderToken(tree, stream, inline_token, indent, start_col, Space.Space); // inline + try renderToken(tree, stream, inline_token, Space.Space); // inline } - try renderToken(tree, stream, for_node.for_token, indent, start_col, Space.Space); // for - try renderToken(tree, stream, tree.nextToken(for_node.for_token), indent, start_col, Space.None); // ( - try renderExpression(allocator, stream, tree, indent, start_col, for_node.array_expr, Space.None); + try renderToken(tree, stream, for_node.for_token, Space.Space); // for + try renderToken(tree, stream, tree.nextToken(for_node.for_token), Space.None); // ( + try renderExpression(allocator, stream, tree, for_node.array_expr, Space.None); const rparen = tree.nextToken(for_node.array_expr.lastToken()); @@ -1872,10 +1804,10 @@ fn renderExpression( const src_one_line_to_body = !body_is_block and tree.tokensOnSameLine(rparen, for_node.body.firstToken()); const body_on_same_line = body_is_block or src_one_line_to_body; - try renderToken(tree, stream, rparen, indent, start_col, Space.Space); // ) + try renderToken(tree, stream, rparen, Space.Space); // ) const space_after_payload = if (body_on_same_line) Space.Space else Space.Newline; - try renderExpression(allocator, stream, tree, indent, start_col, for_node.payload, space_after_payload); // |x| + try renderExpression(allocator, stream, tree, for_node.payload, space_after_payload); // |x| const space_after_body = blk: { if (for_node.@"else") |@"else"| { @@ -1890,13 +1822,14 @@ fn renderExpression( } }; - const body_indent = if (body_on_same_line) indent else indent + indent_delta; - if (!body_on_same_line) try stream.writeByteNTimes(' ', body_indent); - try renderExpression(allocator, stream, tree, body_indent, start_col, for_node.body, space_after_body); // { body } + { + if (!body_on_same_line) stream.pushIndent(); + defer if (!body_on_same_line) stream.popIndent(); + try renderExpression(allocator, stream, tree, for_node.body, space_after_body); // { body } + } if (for_node.@"else") |@"else"| { - if (space_after_body == Space.Newline) try stream.writeByteNTimes(' ', indent); - return renderExpression(allocator, stream, tree, indent, start_col, &@"else".base, space); // else + return renderExpression(allocator, stream, tree, &@"else".base, space); // else } }, @@ -1906,29 +1839,29 @@ fn renderExpression( const lparen = tree.nextToken(if_node.if_token); const rparen = tree.nextToken(if_node.condition.lastToken()); - try renderToken(tree, stream, if_node.if_token, indent, start_col, Space.Space); // if - try renderToken(tree, stream, lparen, indent, start_col, Space.None); // ( + try renderToken(tree, stream, if_node.if_token, Space.Space); // if + try renderToken(tree, stream, lparen, Space.None); // ( - try renderExpression(allocator, stream, tree, indent, start_col, if_node.condition, Space.None); // condition + try renderExpression(allocator, stream, tree, if_node.condition, Space.None); // condition const body_is_if_block = if_node.body.tag == .If; const body_is_block = nodeIsBlock(if_node.body); if (body_is_if_block) { - try renderExtraNewline(tree, stream, start_col, if_node.body); + try renderExtraNewline(tree, stream, if_node.body); } else if (body_is_block) { const after_rparen_space = if (if_node.payload == null) Space.BlockStart else Space.Space; - try renderToken(tree, stream, rparen, indent, start_col, after_rparen_space); // ) + try renderToken(tree, stream, rparen, after_rparen_space); // ) if (if_node.payload) |payload| { - try renderExpression(allocator, stream, tree, indent, start_col, payload, Space.BlockStart); // |x| + try renderExpression(allocator, stream, tree, payload, Space.BlockStart); // |x| } if (if_node.@"else") |@"else"| { - try renderExpression(allocator, stream, tree, indent, start_col, if_node.body, Space.SpaceOrOutdent); - return renderExpression(allocator, stream, tree, indent, start_col, &@"else".base, space); + try renderExpression(allocator, stream, tree, if_node.body, Space.SpaceOrOutdent); + return renderExpression(allocator, stream, tree, &@"else".base, space); } else { - return renderExpression(allocator, stream, tree, indent, start_col, if_node.body, space); + return renderExpression(allocator, stream, tree, if_node.body, space); } } @@ -1936,186 +1869,181 @@ fn renderExpression( if (src_has_newline) { const after_rparen_space = if (if_node.payload == null) Space.Newline else Space.Space; - try renderToken(tree, stream, rparen, indent, start_col, after_rparen_space); // ) + try renderToken(tree, stream, rparen, after_rparen_space); // ) if (if_node.payload) |payload| { - try renderExpression(allocator, stream, tree, indent, start_col, payload, Space.Newline); + try renderExpression(allocator, stream, tree, payload, Space.Newline); } - const new_indent = indent + indent_delta; - try stream.writeByteNTimes(' ', new_indent); - if (if_node.@"else") |@"else"| { const else_is_block = nodeIsBlock(@"else".body); - try renderExpression(allocator, stream, tree, new_indent, start_col, if_node.body, Space.Newline); - try stream.writeByteNTimes(' ', indent); + + { + stream.pushIndent(); + defer stream.popIndent(); + try renderExpression(allocator, stream, tree, if_node.body, Space.Newline); + } if (else_is_block) { - try renderToken(tree, stream, @"else".else_token, indent, start_col, Space.Space); // else + try renderToken(tree, stream, @"else".else_token, Space.Space); // else if (@"else".payload) |payload| { - try renderExpression(allocator, stream, tree, indent, start_col, payload, Space.Space); + try renderExpression(allocator, stream, tree, payload, Space.Space); } - return renderExpression(allocator, stream, tree, indent, start_col, @"else".body, space); + return renderExpression(allocator, stream, tree, @"else".body, space); } else { const after_else_space = if (@"else".payload == null) Space.Newline else Space.Space; - try renderToken(tree, stream, @"else".else_token, indent, start_col, after_else_space); // else + try renderToken(tree, stream, @"else".else_token, after_else_space); // else if (@"else".payload) |payload| { - try renderExpression(allocator, stream, tree, indent, start_col, payload, Space.Newline); + try renderExpression(allocator, stream, tree, payload, Space.Newline); } - try stream.writeByteNTimes(' ', new_indent); - return renderExpression(allocator, stream, tree, new_indent, start_col, @"else".body, space); + stream.pushIndent(); + defer stream.popIndent(); + return renderExpression(allocator, stream, tree, @"else".body, space); } } else { - return renderExpression(allocator, stream, tree, new_indent, start_col, if_node.body, space); + stream.pushIndent(); + defer stream.popIndent(); + return renderExpression(allocator, stream, tree, if_node.body, space); } } - try renderToken(tree, stream, rparen, indent, start_col, Space.Space); // ) + // Single line if statement + + try renderToken(tree, stream, rparen, Space.Space); // ) if (if_node.payload) |payload| { - try renderExpression(allocator, stream, tree, indent, start_col, payload, Space.Space); + try renderExpression(allocator, stream, tree, payload, Space.Space); } if (if_node.@"else") |@"else"| { - try renderExpression(allocator, stream, tree, indent, start_col, if_node.body, Space.Space); - try renderToken(tree, stream, @"else".else_token, indent, start_col, Space.Space); + try renderExpression(allocator, stream, tree, if_node.body, Space.Space); + try renderToken(tree, stream, @"else".else_token, Space.Space); if (@"else".payload) |payload| { - try renderExpression(allocator, stream, tree, indent, start_col, payload, Space.Space); + try renderExpression(allocator, stream, tree, payload, Space.Space); } - return renderExpression(allocator, stream, tree, indent, start_col, @"else".body, space); + return renderExpression(allocator, stream, tree, @"else".body, space); } else { - return renderExpression(allocator, stream, tree, indent, start_col, if_node.body, space); + return renderExpression(allocator, stream, tree, if_node.body, space); } }, .Asm => { const asm_node = @fieldParentPtr(ast.Node.Asm, "base", base); - try renderToken(tree, stream, asm_node.asm_token, indent, start_col, Space.Space); // asm + try renderToken(tree, stream, asm_node.asm_token, Space.Space); // asm if (asm_node.volatile_token) |volatile_token| { - try renderToken(tree, stream, volatile_token, indent, start_col, Space.Space); // volatile - try renderToken(tree, stream, tree.nextToken(volatile_token), indent, start_col, Space.None); // ( + try renderToken(tree, stream, volatile_token, Space.Space); // volatile + try renderToken(tree, stream, tree.nextToken(volatile_token), Space.None); // ( } else { - try renderToken(tree, stream, tree.nextToken(asm_node.asm_token), indent, start_col, Space.None); // ( + try renderToken(tree, stream, tree.nextToken(asm_node.asm_token), Space.None); // ( } - if (asm_node.outputs.len == 0 and asm_node.inputs.len == 0 and asm_node.clobbers.len == 0) { - try renderExpression(allocator, stream, tree, indent, start_col, asm_node.template, Space.None); - return renderToken(tree, stream, asm_node.rparen, indent, start_col, space); - } + asmblk: { + stream.pushIndent(); + defer stream.popIndent(); - try renderExpression(allocator, stream, tree, indent, start_col, asm_node.template, Space.Newline); + if (asm_node.outputs.len == 0 and asm_node.inputs.len == 0 and asm_node.clobbers.len == 0) { + try renderExpression(allocator, stream, tree, asm_node.template, Space.None); + break :asmblk; + } - const indent_once = indent + indent_delta; + try renderExpression(allocator, stream, tree, asm_node.template, Space.Newline); - if (asm_node.template.tag == .MultilineStringLiteral) { - // After rendering a multiline string literal the cursor is - // already offset by indent - try stream.writeByteNTimes(' ', indent_delta); - } else { - try stream.writeByteNTimes(' ', indent_once); - } + const colon1 = tree.nextToken(asm_node.template.lastToken()); - const colon1 = tree.nextToken(asm_node.template.lastToken()); - const indent_extra = indent_once + 2; + const colon2 = if (asm_node.outputs.len == 0) blk: { + try renderToken(tree, stream, colon1, Space.Newline); // : - const colon2 = if (asm_node.outputs.len == 0) blk: { - try renderToken(tree, stream, colon1, indent, start_col, Space.Newline); // : - try stream.writeByteNTimes(' ', indent_once); + break :blk tree.nextToken(colon1); + } else blk: { + try renderToken(tree, stream, colon1, Space.Space); // : - break :blk tree.nextToken(colon1); - } else blk: { - try renderToken(tree, stream, colon1, indent, start_col, Space.Space); // : + stream.pushIndentN(2); + defer stream.popIndent(); - for (asm_node.outputs) |*asm_output, i| { - if (i + 1 < asm_node.outputs.len) { - const next_asm_output = asm_node.outputs[i + 1]; - try renderAsmOutput(allocator, stream, tree, indent_extra, start_col, asm_output, Space.None); + for (asm_node.outputs) |*asm_output, i| { + if (i + 1 < asm_node.outputs.len) { + const next_asm_output = asm_node.outputs[i + 1]; + try renderAsmOutput(allocator, stream, tree, asm_output, Space.None); - const comma = tree.prevToken(next_asm_output.firstToken()); - try renderToken(tree, stream, comma, indent_extra, start_col, Space.Newline); // , - try renderExtraNewlineToken(tree, stream, start_col, next_asm_output.firstToken()); + const comma = tree.prevToken(next_asm_output.firstToken()); + try renderToken(tree, stream, comma, Space.Newline); // , + try renderExtraNewlineToken(tree, stream, next_asm_output.firstToken()); + } else if (asm_node.inputs.len == 0 and asm_node.clobbers.len == 0) { + try renderAsmOutput(allocator, stream, tree, asm_output, Space.Newline); + break :asmblk; + } else { + try renderAsmOutput(allocator, stream, tree, asm_output, Space.Newline); + const comma_or_colon = tree.nextToken(asm_output.lastToken()); + break :blk switch (tree.token_ids[comma_or_colon]) { + .Comma => tree.nextToken(comma_or_colon), + else => comma_or_colon, + }; + } + } + unreachable; + }; - try stream.writeByteNTimes(' ', indent_extra); - } else if (asm_node.inputs.len == 0 and asm_node.clobbers.len == 0) { - try renderAsmOutput(allocator, stream, tree, indent_extra, start_col, asm_output, Space.Newline); - try stream.writeByteNTimes(' ', indent); - return renderToken(tree, stream, asm_node.rparen, indent, start_col, space); + const colon3 = if (asm_node.inputs.len == 0) blk: { + try renderToken(tree, stream, colon2, Space.Newline); // : + break :blk tree.nextToken(colon2); + } else blk: { + try renderToken(tree, stream, colon2, Space.Space); // : + stream.pushIndentN(2); + defer stream.popIndent(); + for (asm_node.inputs) |*asm_input, i| { + if (i + 1 < asm_node.inputs.len) { + const next_asm_input = &asm_node.inputs[i + 1]; + try renderAsmInput(allocator, stream, tree, asm_input, Space.None); + + const comma = tree.prevToken(next_asm_input.firstToken()); + try renderToken(tree, stream, comma, Space.Newline); // , + try renderExtraNewlineToken(tree, stream, next_asm_input.firstToken()); + } else if (asm_node.clobbers.len == 0) { + try renderAsmInput(allocator, stream, tree, asm_input, Space.Newline); + break :asmblk; + } else { + try renderAsmInput(allocator, stream, tree, asm_input, Space.Newline); + const comma_or_colon = tree.nextToken(asm_input.lastToken()); + break :blk switch (tree.token_ids[comma_or_colon]) { + .Comma => tree.nextToken(comma_or_colon), + else => comma_or_colon, + }; + } + } + unreachable; + }; + + try renderToken(tree, stream, colon3, Space.Space); // : + stream.pushIndentN(2); + defer stream.popIndent(); + for (asm_node.clobbers) |clobber_node, i| { + if (i + 1 >= asm_node.clobbers.len) { + try renderExpression(allocator, stream, tree, clobber_node, Space.Newline); + break :asmblk; } else { - try renderAsmOutput(allocator, stream, tree, indent_extra, start_col, asm_output, Space.Newline); - try stream.writeByteNTimes(' ', indent_once); - const comma_or_colon = tree.nextToken(asm_output.lastToken()); - break :blk switch (tree.token_ids[comma_or_colon]) { - .Comma => tree.nextToken(comma_or_colon), - else => comma_or_colon, - }; + try renderExpression(allocator, stream, tree, clobber_node, Space.None); + const comma = tree.nextToken(clobber_node.lastToken()); + try renderToken(tree, stream, comma, Space.Space); // , } } - unreachable; - }; - - const colon3 = if (asm_node.inputs.len == 0) blk: { - try renderToken(tree, stream, colon2, indent, start_col, Space.Newline); // : - try stream.writeByteNTimes(' ', indent_once); - - break :blk tree.nextToken(colon2); - } else blk: { - try renderToken(tree, stream, colon2, indent, start_col, Space.Space); // : - - for (asm_node.inputs) |*asm_input, i| { - if (i + 1 < asm_node.inputs.len) { - const next_asm_input = &asm_node.inputs[i + 1]; - try renderAsmInput(allocator, stream, tree, indent_extra, start_col, asm_input, Space.None); - - const comma = tree.prevToken(next_asm_input.firstToken()); - try renderToken(tree, stream, comma, indent_extra, start_col, Space.Newline); // , - try renderExtraNewlineToken(tree, stream, start_col, next_asm_input.firstToken()); - - try stream.writeByteNTimes(' ', indent_extra); - } else if (asm_node.clobbers.len == 0) { - try renderAsmInput(allocator, stream, tree, indent_extra, start_col, asm_input, Space.Newline); - try stream.writeByteNTimes(' ', indent); - return renderToken(tree, stream, asm_node.rparen, indent, start_col, space); // ) - } else { - try renderAsmInput(allocator, stream, tree, indent_extra, start_col, asm_input, Space.Newline); - try stream.writeByteNTimes(' ', indent_once); - const comma_or_colon = tree.nextToken(asm_input.lastToken()); - break :blk switch (tree.token_ids[comma_or_colon]) { - .Comma => tree.nextToken(comma_or_colon), - else => comma_or_colon, - }; - } - } - unreachable; - }; - - try renderToken(tree, stream, colon3, indent, start_col, Space.Space); // : - - for (asm_node.clobbers) |clobber_node, i| { - if (i + 1 >= asm_node.clobbers.len) { - try renderExpression(allocator, stream, tree, indent_extra, start_col, clobber_node, Space.Newline); - try stream.writeByteNTimes(' ', indent); - return renderToken(tree, stream, asm_node.rparen, indent, start_col, space); - } else { - try renderExpression(allocator, stream, tree, indent_extra, start_col, clobber_node, Space.None); - const comma = tree.nextToken(clobber_node.lastToken()); - try renderToken(tree, stream, comma, indent_once, start_col, Space.Space); // , - } } + + return renderToken(tree, stream, asm_node.rparen, space); }, .EnumLiteral => { const enum_literal = @fieldParentPtr(ast.Node.EnumLiteral, "base", base); - try renderToken(tree, stream, enum_literal.dot, indent, start_col, Space.None); // . - return renderToken(tree, stream, enum_literal.name, indent, start_col, space); // name + try renderToken(tree, stream, enum_literal.dot, Space.None); // . + return renderToken(tree, stream, enum_literal.name, space); // name }, .ContainerField, @@ -2131,116 +2059,113 @@ fn renderArrayType( allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree, - indent: usize, - start_col: *usize, lbracket: ast.TokenIndex, rhs: *ast.Node, len_expr: *ast.Node, opt_sentinel: ?*ast.Node, space: Space, -) (@TypeOf(stream).Error || Error)!void { +) (@TypeOf(stream.*).Error || Error)!void { const rbracket = tree.nextToken(if (opt_sentinel) |sentinel| sentinel.lastToken() else len_expr.lastToken()); - try renderToken(tree, stream, lbracket, indent, start_col, Space.None); // [ - const starts_with_comment = tree.token_ids[lbracket + 1] == .LineComment; const ends_with_comment = tree.token_ids[rbracket - 1] == .LineComment; - const new_indent = if (ends_with_comment) indent + indent_delta else indent; const new_space = if (ends_with_comment) Space.Newline else Space.None; - try renderExpression(allocator, stream, tree, new_indent, start_col, len_expr, new_space); - if (starts_with_comment) { - try stream.writeByte('\n'); - } - if (ends_with_comment or starts_with_comment) { - try stream.writeByteNTimes(' ', indent); - } - if (opt_sentinel) |sentinel| { - const colon_token = tree.prevToken(sentinel.firstToken()); - try renderToken(tree, stream, colon_token, indent, start_col, Space.None); // : - try renderExpression(allocator, stream, tree, indent, start_col, sentinel, Space.None); - } - try renderToken(tree, stream, rbracket, indent, start_col, Space.None); // ] + { + const do_indent = (starts_with_comment or ends_with_comment); + if (do_indent) stream.pushIndent(); + defer if (do_indent) stream.popIndent(); - return renderExpression(allocator, stream, tree, indent, start_col, rhs, space); + try renderToken(tree, stream, lbracket, Space.None); // [ + try renderExpression(allocator, stream, tree, len_expr, new_space); + + if (starts_with_comment) { + try stream.maybeInsertNewline(); + } + if (opt_sentinel) |sentinel| { + const colon_token = tree.prevToken(sentinel.firstToken()); + try renderToken(tree, stream, colon_token, Space.None); // : + try renderExpression(allocator, stream, tree, sentinel, Space.None); + } + if (starts_with_comment) { + try stream.maybeInsertNewline(); + } + } + try renderToken(tree, stream, rbracket, Space.None); // ] + + return renderExpression(allocator, stream, tree, rhs, space); } fn renderAsmOutput( allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree, - indent: usize, - start_col: *usize, asm_output: *const ast.Node.Asm.Output, space: Space, -) (@TypeOf(stream).Error || Error)!void { - try stream.writeAll("["); - try renderExpression(allocator, stream, tree, indent, start_col, asm_output.symbolic_name, Space.None); - try stream.writeAll("] "); - try renderExpression(allocator, stream, tree, indent, start_col, asm_output.constraint, Space.None); - try stream.writeAll(" ("); +) (@TypeOf(stream.*).Error || Error)!void { + try stream.writer().writeAll("["); + try renderExpression(allocator, stream, tree, asm_output.symbolic_name, Space.None); + try stream.writer().writeAll("] "); + try renderExpression(allocator, stream, tree, asm_output.constraint, Space.None); + try stream.writer().writeAll(" ("); switch (asm_output.kind) { ast.Node.Asm.Output.Kind.Variable => |variable_name| { - try renderExpression(allocator, stream, tree, indent, start_col, &variable_name.base, Space.None); + try renderExpression(allocator, stream, tree, &variable_name.base, Space.None); }, ast.Node.Asm.Output.Kind.Return => |return_type| { - try stream.writeAll("-> "); - try renderExpression(allocator, stream, tree, indent, start_col, return_type, Space.None); + try stream.writer().writeAll("-> "); + try renderExpression(allocator, stream, tree, return_type, Space.None); }, } - return renderToken(tree, stream, asm_output.lastToken(), indent, start_col, space); // ) + return renderToken(tree, stream, asm_output.lastToken(), space); // ) } fn renderAsmInput( allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree, - indent: usize, - start_col: *usize, asm_input: *const ast.Node.Asm.Input, space: Space, -) (@TypeOf(stream).Error || Error)!void { - try stream.writeAll("["); - try renderExpression(allocator, stream, tree, indent, start_col, asm_input.symbolic_name, Space.None); - try stream.writeAll("] "); - try renderExpression(allocator, stream, tree, indent, start_col, asm_input.constraint, Space.None); - try stream.writeAll(" ("); - try renderExpression(allocator, stream, tree, indent, start_col, asm_input.expr, Space.None); - return renderToken(tree, stream, asm_input.lastToken(), indent, start_col, space); // ) +) (@TypeOf(stream.*).Error || Error)!void { + try stream.writer().writeAll("["); + try renderExpression(allocator, stream, tree, asm_input.symbolic_name, Space.None); + try stream.writer().writeAll("] "); + try renderExpression(allocator, stream, tree, asm_input.constraint, Space.None); + try stream.writer().writeAll(" ("); + try renderExpression(allocator, stream, tree, asm_input.expr, Space.None); + return renderToken(tree, stream, asm_input.lastToken(), space); // ) } fn renderVarDecl( allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree, - indent: usize, - start_col: *usize, var_decl: *ast.Node.VarDecl, -) (@TypeOf(stream).Error || Error)!void { +) (@TypeOf(stream.*).Error || Error)!void { if (var_decl.getTrailer("visib_token")) |visib_token| { - try renderToken(tree, stream, visib_token, indent, start_col, Space.Space); // pub + try renderToken(tree, stream, visib_token, Space.Space); // pub } if (var_decl.getTrailer("extern_export_token")) |extern_export_token| { - try renderToken(tree, stream, extern_export_token, indent, start_col, Space.Space); // extern + try renderToken(tree, stream, extern_export_token, Space.Space); // extern if (var_decl.getTrailer("lib_name")) |lib_name| { - try renderExpression(allocator, stream, tree, indent, start_col, lib_name, Space.Space); // "lib" + try renderExpression(allocator, stream, tree, lib_name, Space.Space); // "lib" } } if (var_decl.getTrailer("comptime_token")) |comptime_token| { - try renderToken(tree, stream, comptime_token, indent, start_col, Space.Space); // comptime + try renderToken(tree, stream, comptime_token, Space.Space); // comptime } if (var_decl.getTrailer("thread_local_token")) |thread_local_token| { - try renderToken(tree, stream, thread_local_token, indent, start_col, Space.Space); // threadlocal + try renderToken(tree, stream, thread_local_token, Space.Space); // threadlocal } - try renderToken(tree, stream, var_decl.mut_token, indent, start_col, Space.Space); // var + try renderToken(tree, stream, var_decl.mut_token, Space.Space); // var const name_space = if (var_decl.getTrailer("type_node") == null and (var_decl.getTrailer("align_node") != null or @@ -2249,70 +2174,69 @@ fn renderVarDecl( Space.Space else Space.None; - try renderToken(tree, stream, var_decl.name_token, indent, start_col, name_space); + try renderToken(tree, stream, var_decl.name_token, name_space); if (var_decl.getTrailer("type_node")) |type_node| { - try renderToken(tree, stream, tree.nextToken(var_decl.name_token), indent, start_col, Space.Space); + try renderToken(tree, stream, tree.nextToken(var_decl.name_token), Space.Space); const s = if (var_decl.getTrailer("align_node") != null or var_decl.getTrailer("section_node") != null or var_decl.getTrailer("init_node") != null) Space.Space else Space.None; - try renderExpression(allocator, stream, tree, indent, start_col, type_node, s); + try renderExpression(allocator, stream, tree, type_node, s); } if (var_decl.getTrailer("align_node")) |align_node| { const lparen = tree.prevToken(align_node.firstToken()); const align_kw = tree.prevToken(lparen); const rparen = tree.nextToken(align_node.lastToken()); - try renderToken(tree, stream, align_kw, indent, start_col, Space.None); // align - try renderToken(tree, stream, lparen, indent, start_col, Space.None); // ( - try renderExpression(allocator, stream, tree, indent, start_col, align_node, Space.None); + try renderToken(tree, stream, align_kw, Space.None); // align + try renderToken(tree, stream, lparen, Space.None); // ( + try renderExpression(allocator, stream, tree, align_node, Space.None); const s = if (var_decl.getTrailer("section_node") != null or var_decl.getTrailer("init_node") != null) Space.Space else Space.None; - try renderToken(tree, stream, rparen, indent, start_col, s); // ) + try renderToken(tree, stream, rparen, s); // ) } if (var_decl.getTrailer("section_node")) |section_node| { const lparen = tree.prevToken(section_node.firstToken()); const section_kw = tree.prevToken(lparen); const rparen = tree.nextToken(section_node.lastToken()); - try renderToken(tree, stream, section_kw, indent, start_col, Space.None); // linksection - try renderToken(tree, stream, lparen, indent, start_col, Space.None); // ( - try renderExpression(allocator, stream, tree, indent, start_col, section_node, Space.None); + try renderToken(tree, stream, section_kw, Space.None); // linksection + try renderToken(tree, stream, lparen, Space.None); // ( + try renderExpression(allocator, stream, tree, section_node, Space.None); const s = if (var_decl.getTrailer("init_node") != null) Space.Space else Space.None; - try renderToken(tree, stream, rparen, indent, start_col, s); // ) + try renderToken(tree, stream, rparen, s); // ) } if (var_decl.getTrailer("init_node")) |init_node| { const s = if (init_node.tag == .MultilineStringLiteral) Space.None else Space.Space; - try renderToken(tree, stream, var_decl.getTrailer("eq_token").?, indent, start_col, s); // = - try renderExpression(allocator, stream, tree, indent, start_col, init_node, Space.None); + try renderToken(tree, stream, var_decl.getTrailer("eq_token").?, s); // = + stream.pushIndentOneShot(); + try renderExpression(allocator, stream, tree, init_node, Space.None); } - try renderToken(tree, stream, var_decl.semicolon_token, indent, start_col, Space.Newline); + try renderToken(tree, stream, var_decl.semicolon_token, Space.Newline); } fn renderParamDecl( allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree, - indent: usize, - start_col: *usize, param_decl: ast.Node.FnProto.ParamDecl, space: Space, -) (@TypeOf(stream).Error || Error)!void { - try renderDocComments(tree, stream, param_decl, param_decl.doc_comments, indent, start_col); +) (@TypeOf(stream.*).Error || Error)!void { + try renderDocComments(tree, stream, param_decl, param_decl.doc_comments); if (param_decl.comptime_token) |comptime_token| { - try renderToken(tree, stream, comptime_token, indent, start_col, Space.Space); + try renderToken(tree, stream, comptime_token, Space.Space); } if (param_decl.noalias_token) |noalias_token| { - try renderToken(tree, stream, noalias_token, indent, start_col, Space.Space); + try renderToken(tree, stream, noalias_token, Space.Space); } if (param_decl.name_token) |name_token| { - try renderToken(tree, stream, name_token, indent, start_col, Space.None); - try renderToken(tree, stream, tree.nextToken(name_token), indent, start_col, Space.Space); // : + try renderToken(tree, stream, name_token, Space.None); + try renderToken(tree, stream, tree.nextToken(name_token), Space.Space); // : } switch (param_decl.param_type) { - .any_type, .type_expr => |node| try renderExpression(allocator, stream, tree, indent, start_col, node, space), + .any_type, .type_expr => |node| try renderExpression(allocator, stream, tree, node, space), } } @@ -2320,24 +2244,22 @@ fn renderStatement( allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree, - indent: usize, - start_col: *usize, base: *ast.Node, -) (@TypeOf(stream).Error || Error)!void { +) (@TypeOf(stream.*).Error || Error)!void { switch (base.tag) { .VarDecl => { const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", base); - try renderVarDecl(allocator, stream, tree, indent, start_col, var_decl); + try renderVarDecl(allocator, stream, tree, var_decl); }, else => { if (base.requireSemiColon()) { - try renderExpression(allocator, stream, tree, indent, start_col, base, Space.None); + try renderExpression(allocator, stream, tree, base, Space.None); const semicolon_index = tree.nextToken(base.lastToken()); assert(tree.token_ids[semicolon_index] == .Semicolon); - try renderToken(tree, stream, semicolon_index, indent, start_col, Space.Newline); + try renderToken(tree, stream, semicolon_index, Space.Newline); } else { - try renderExpression(allocator, stream, tree, indent, start_col, base, Space.Newline); + try renderExpression(allocator, stream, tree, base, Space.Newline); } }, } @@ -2358,22 +2280,17 @@ fn renderTokenOffset( tree: *ast.Tree, stream: anytype, token_index: ast.TokenIndex, - indent: usize, - start_col: *usize, space: Space, token_skip_bytes: usize, -) (@TypeOf(stream).Error || Error)!void { +) (@TypeOf(stream.*).Error || Error)!void { if (space == Space.BlockStart) { - if (start_col.* < indent + indent_delta) - return renderToken(tree, stream, token_index, indent, start_col, Space.Space); - try renderToken(tree, stream, token_index, indent, start_col, Space.Newline); - try stream.writeByteNTimes(' ', indent); - start_col.* = indent; - return; + // If placing the lbrace on the current line would cause an uggly gap then put the lbrace on the next line + const new_space = if (stream.isLineOverIndented()) Space.Newline else Space.Space; + return renderToken(tree, stream, token_index, new_space); } var token_loc = tree.token_locs[token_index]; - try stream.writeAll(mem.trimRight(u8, tree.tokenSliceLoc(token_loc)[token_skip_bytes..], " ")); + try stream.writer().writeAll(mem.trimRight(u8, tree.tokenSliceLoc(token_loc)[token_skip_bytes..], " ")); if (space == Space.NoComment) return; @@ -2382,20 +2299,20 @@ fn renderTokenOffset( var next_token_loc = tree.token_locs[token_index + 1]; if (space == Space.Comma) switch (next_token_id) { - .Comma => return renderToken(tree, stream, token_index + 1, indent, start_col, Space.Newline), + .Comma => return renderToken(tree, stream, token_index + 1, Space.Newline), .LineComment => { - try stream.writeAll(", "); - return renderToken(tree, stream, token_index + 1, indent, start_col, Space.Newline); + try stream.writer().writeAll(", "); + return renderToken(tree, stream, token_index + 1, Space.Newline); }, else => { if (token_index + 2 < tree.token_ids.len and tree.token_ids[token_index + 2] == .MultilineStringLiteralLine) { - try stream.writeAll(","); + try stream.writer().writeAll(","); return; } else { - try stream.writeAll(",\n"); - start_col.* = 0; + try stream.writer().writeAll(","); + try stream.insertNewline(); return; } }, @@ -2419,15 +2336,14 @@ fn renderTokenOffset( if (next_token_id == .MultilineStringLiteralLine) { return; } else { - try stream.writeAll("\n"); - start_col.* = 0; + try stream.insertNewline(); return; } }, Space.Space, Space.SpaceOrOutdent => { if (next_token_id == .MultilineStringLiteralLine) return; - try stream.writeByte(' '); + try stream.writer().writeByte(' '); return; }, Space.NoComment, Space.Comma, Space.BlockStart => unreachable, @@ -2444,8 +2360,7 @@ fn renderTokenOffset( next_token_id = tree.token_ids[token_index + offset]; next_token_loc = tree.token_locs[token_index + offset]; if (next_token_id != .LineComment) { - try stream.writeByte('\n'); - start_col.* = 0; + try stream.insertNewline(); return; } }, @@ -2458,7 +2373,7 @@ fn renderTokenOffset( var loc = tree.tokenLocationLoc(token_loc.end, next_token_loc); if (loc.line == 0) { - try stream.print(" {}", .{mem.trimRight(u8, tree.tokenSliceLoc(next_token_loc), " ")}); + try stream.writer().print(" {}", .{mem.trimRight(u8, tree.tokenSliceLoc(next_token_loc), " ")}); offset = 2; token_loc = next_token_loc; next_token_loc = tree.token_locs[token_index + offset]; @@ -2466,26 +2381,16 @@ fn renderTokenOffset( if (next_token_id != .LineComment) { switch (space) { Space.None, Space.Space => { - try stream.writeByte('\n'); - const after_comment_token = tree.token_ids[token_index + offset]; - const next_line_indent = switch (after_comment_token) { - .RParen, .RBrace, .RBracket => indent, - else => indent + indent_delta, - }; - try stream.writeByteNTimes(' ', next_line_indent); - start_col.* = next_line_indent; + try stream.insertNewline(); }, Space.SpaceOrOutdent => { - try stream.writeByte('\n'); - try stream.writeByteNTimes(' ', indent); - start_col.* = indent; + try stream.insertNewline(); }, Space.Newline => { if (next_token_id == .MultilineStringLiteralLine) { return; } else { - try stream.writeAll("\n"); - start_col.* = 0; + try stream.insertNewline(); return; } }, @@ -2501,10 +2406,9 @@ fn renderTokenOffset( // translate-c doesn't generate correct newlines // in generated code (loc.line == 0) so treat that case // as though there was meant to be a newline between the tokens - const newline_count = if (loc.line <= 1) @as(u8, 1) else @as(u8, 2); - try stream.writeByteNTimes('\n', newline_count); - try stream.writeByteNTimes(' ', indent); - try stream.writeAll(mem.trimRight(u8, tree.tokenSliceLoc(next_token_loc), " ")); + var newline_count = if (loc.line <= 1) @as(u8, 1) else @as(u8, 2); + while (newline_count > 0) : (newline_count -= 1) try stream.insertNewline(); + try stream.writer().writeAll(mem.trimRight(u8, tree.tokenSliceLoc(next_token_loc), " ")); offset += 1; token_loc = next_token_loc; @@ -2516,32 +2420,15 @@ fn renderTokenOffset( if (next_token_id == .MultilineStringLiteralLine) { return; } else { - try stream.writeAll("\n"); - start_col.* = 0; + try stream.insertNewline(); return; } }, Space.None, Space.Space => { - try stream.writeByte('\n'); - - const after_comment_token = tree.token_ids[token_index + offset]; - const next_line_indent = switch (after_comment_token) { - .RParen, .RBrace, .RBracket => blk: { - if (indent > indent_delta) { - break :blk indent - indent_delta; - } else { - break :blk 0; - } - }, - else => indent, - }; - try stream.writeByteNTimes(' ', next_line_indent); - start_col.* = next_line_indent; + try stream.insertNewline(); }, Space.SpaceOrOutdent => { - try stream.writeByte('\n'); - try stream.writeByteNTimes(' ', indent); - start_col.* = indent; + try stream.insertNewline(); }, Space.NoNewline => {}, Space.NoComment, Space.Comma, Space.BlockStart => unreachable, @@ -2556,11 +2443,9 @@ fn renderToken( tree: *ast.Tree, stream: anytype, token_index: ast.TokenIndex, - indent: usize, - start_col: *usize, space: Space, -) (@TypeOf(stream).Error || Error)!void { - return renderTokenOffset(tree, stream, token_index, indent, start_col, space, 0); +) (@TypeOf(stream.*).Error || Error)!void { + return renderTokenOffset(tree, stream, token_index, space, 0); } fn renderDocComments( @@ -2568,11 +2453,9 @@ fn renderDocComments( stream: anytype, node: anytype, doc_comments: ?*ast.Node.DocComment, - indent: usize, - start_col: *usize, -) (@TypeOf(stream).Error || Error)!void { +) (@TypeOf(stream.*).Error || Error)!void { const comment = doc_comments orelse return; - return renderDocCommentsToken(tree, stream, comment, node.firstToken(), indent, start_col); + return renderDocCommentsToken(tree, stream, comment, node.firstToken()); } fn renderDocCommentsToken( @@ -2580,20 +2463,16 @@ fn renderDocCommentsToken( stream: anytype, comment: *ast.Node.DocComment, first_token: ast.TokenIndex, - indent: usize, - start_col: *usize, -) (@TypeOf(stream).Error || Error)!void { +) (@TypeOf(stream.*).Error || Error)!void { var tok_i = comment.first_line; while (true) : (tok_i += 1) { switch (tree.token_ids[tok_i]) { .DocComment, .ContainerDocComment => { if (comment.first_line < first_token) { - try renderToken(tree, stream, tok_i, indent, start_col, Space.Newline); - try stream.writeByteNTimes(' ', indent); + try renderToken(tree, stream, tok_i, Space.Newline); } else { - try renderToken(tree, stream, tok_i, indent, start_col, Space.NoComment); - try stream.writeAll("\n"); - try stream.writeByteNTimes(' ', indent); + try renderToken(tree, stream, tok_i, Space.NoComment); + try stream.insertNewline(); } }, .LineComment => continue, @@ -2665,41 +2544,10 @@ fn nodeCausesSliceOpSpace(base: *ast.Node) bool { }; } -/// A `std.io.OutStream` that returns whether the given character has been written to it. -/// The contents are not written to anything. -const FindByteOutStream = struct { - byte_found: bool, - byte: u8, - - pub const Error = error{}; - pub const OutStream = std.io.OutStream(*FindByteOutStream, Error, write); - - pub fn init(byte: u8) FindByteOutStream { - return FindByteOutStream{ - .byte = byte, - .byte_found = false, - }; - } - - pub fn write(self: *FindByteOutStream, bytes: []const u8) Error!usize { - if (self.byte_found) return bytes.len; - self.byte_found = blk: { - for (bytes) |b| - if (b == self.byte) break :blk true; - break :blk false; - }; - return bytes.len; - } - - pub fn outStream(self: *FindByteOutStream) OutStream { - return .{ .context = self }; - } -}; - -fn copyFixingWhitespace(stream: anytype, slice: []const u8) @TypeOf(stream).Error!void { +fn copyFixingWhitespace(stream: anytype, slice: []const u8) @TypeOf(stream.*).Error!void { for (slice) |byte| switch (byte) { - '\t' => try stream.writeAll(" "), + '\t' => try stream.writer().writeAll(" "), '\r' => {}, - else => try stream.writeByte(byte), + else => try stream.writer().writeByte(byte), }; } diff --git a/src-self-hosted/main.zig b/src-self-hosted/main.zig index 7a67e197cc..019982e9b7 100644 --- a/src-self-hosted/main.zig +++ b/src-self-hosted/main.zig @@ -682,13 +682,13 @@ pub fn cmdFmt(gpa: *Allocator, args: []const []const u8) !void { process.exit(1); } if (check_flag) { - const anything_changed = try std.zig.render(gpa, io.null_out_stream, tree); + const anything_changed = try std.zig.render(gpa, &io.null_out_stream, tree); const code = if (anything_changed) @as(u8, 1) else @as(u8, 0); process.exit(code); } const stdout = io.getStdOut().outStream(); - _ = try std.zig.render(gpa, stdout, tree); + _ = try std.zig.render(gpa, &stdout, tree); return; } @@ -830,7 +830,7 @@ fn fmtPathFile( } if (check_mode) { - const anything_changed = try std.zig.render(fmt.gpa, io.null_out_stream, tree); + const anything_changed = try std.zig.render(fmt.gpa, &io.null_out_stream, tree); if (anything_changed) { std.debug.print("{}\n", .{file_path}); fmt.any_error = true; @@ -839,7 +839,8 @@ fn fmtPathFile( // As a heuristic, we make enough capacity for the same as the input source. try fmt.out_buffer.ensureCapacity(source_code.len); fmt.out_buffer.items.len = 0; - const anything_changed = try std.zig.render(fmt.gpa, fmt.out_buffer.writer(), tree); + const writer = fmt.out_buffer.writer(); + const anything_changed = try std.zig.render(fmt.gpa, &writer, tree); if (!anything_changed) return; // Good thing we didn't waste any file system access on this. diff --git a/src-self-hosted/stage2.zig b/src-self-hosted/stage2.zig index 30d2ea44db..29b8f3df44 100644 --- a/src-self-hosted/stage2.zig +++ b/src-self-hosted/stage2.zig @@ -151,7 +151,7 @@ export fn stage2_free_clang_errors(errors_ptr: [*]translate_c.ClangErrMsg, error export fn stage2_render_ast(tree: *ast.Tree, output_file: *FILE) Error { const c_out_stream = std.io.cOutStream(output_file); - _ = std.zig.render(std.heap.c_allocator, c_out_stream, tree) catch |e| switch (e) { + _ = std.zig.render(std.heap.c_allocator, &c_out_stream, tree) catch |e| switch (e) { error.WouldBlock => unreachable, // stage1 opens stuff in exclusively blocking mode error.NotOpenForWriting => unreachable, error.SystemResources => return .SystemResources, From 7d950210a64f51cba6c4edaacbd9c67f12e72604 Mon Sep 17 00:00:00 2001 From: Lachlan Easton Date: Sat, 29 Aug 2020 23:07:47 +1000 Subject: [PATCH 02/35] zig fmt review comments --- lib/std/io/auto_indenting_stream.zig | 23 ++++++++++++++--------- lib/std/io/change_detection_stream.zig | 17 +++++++++-------- lib/std/io/find_byte_out_stream.zig | 16 ++++++++-------- 3 files changed, 31 insertions(+), 25 deletions(-) diff --git a/lib/std/io/auto_indenting_stream.zig b/lib/std/io/auto_indenting_stream.zig index e7657c1f91..227dd616a1 100644 --- a/lib/std/io/auto_indenting_stream.zig +++ b/lib/std/io/auto_indenting_stream.zig @@ -3,13 +3,15 @@ const io = std.io; const mem = std.mem; const assert = std.debug.assert; -pub fn AutoIndentingStream(comptime indent_delta: u8, comptime OutStreamType: type) type { +/// Automatically inserts indentation of written data by keeping +/// track of the current indentation level +pub fn AutoIndentingStream(comptime indent_delta: u8, comptime WriterType: type) type { return struct { const Self = @This(); - pub const Error = OutStreamType.Error; - pub const OutStream = io.Writer(*Self, Error, write); + pub const Error = WriterType.Error; + pub const Writer = io.Writer(*Self, Error, write); - out_stream: *OutStreamType, + writer_pointer: *WriterType, current_line_empty: bool = true, indent_stack: [255]u8 = undefined, indent_stack_top: u8 = 0, @@ -17,11 +19,11 @@ pub fn AutoIndentingStream(comptime indent_delta: u8, comptime OutStreamType: ty applied_indent: u8 = 0, // the most recently applied indent indent_next_line: u8 = 0, // not used until the next line - pub fn init(out_stream: *OutStreamType) Self { - return Self{ .out_stream = out_stream }; + pub fn init(writer_pointer: *WriterType) Self { + return Self{ .writer_pointer = writer_pointer }; } - pub fn writer(self: *Self) OutStream { + pub fn writer(self: *Self) Writer { return .{ .context = self }; } @@ -34,7 +36,10 @@ pub fn AutoIndentingStream(comptime indent_delta: u8, comptime OutStreamType: ty } fn writeNoIndent(self: *Self, bytes: []const u8) Error!usize { - try self.out_stream.outStream().writeAll(bytes); + if (bytes.len == 0) + return @as(usize, 0); + + try self.writer_pointer.outStream().writeAll(bytes); if (bytes[bytes.len - 1] == '\n') self.resetLine(); return bytes.len; @@ -98,7 +103,7 @@ pub fn AutoIndentingStream(comptime indent_delta: u8, comptime OutStreamType: ty fn applyIndent(self: *Self) Error!void { const current_indent = self.currentIndent(); if (self.current_line_empty and current_indent > 0) { - try self.out_stream.outStream().writeByteNTimes(' ', current_indent); + try self.writer_pointer.outStream().writeByteNTimes(' ', current_indent); self.applied_indent = current_indent; } diff --git a/lib/std/io/change_detection_stream.zig b/lib/std/io/change_detection_stream.zig index 941569320c..98c8130b44 100644 --- a/lib/std/io/change_detection_stream.zig +++ b/lib/std/io/change_detection_stream.zig @@ -3,26 +3,27 @@ const io = std.io; const mem = std.mem; const assert = std.debug.assert; -pub fn ChangeDetectionStream(comptime OutStreamType: type) type { +/// Used to detect if the data written to a stream differs from a source buffer +pub fn ChangeDetectionStream(comptime WriterType: type) type { return struct { const Self = @This(); - pub const Error = OutStreamType.Error; - pub const OutStream = io.OutStream(*Self, Error, write); + pub const Error = WriterType.Error; + pub const Writer = io.Writer(*Self, Error, write); anything_changed: bool = false, - out_stream: *OutStreamType, + writer_pointer: *WriterType, source_index: usize, source: []const u8, - pub fn init(source: []const u8, out_stream: *OutStreamType) Self { + pub fn init(source: []const u8, writer_pointer: *WriterType) Self { return Self{ - .out_stream = out_stream, + .writer_pointer = writer_pointer, .source_index = 0, .source = source, }; } - pub fn outStream(self: *Self) OutStream { + pub fn writer(self: *Self) Writer { return .{ .context = self }; } @@ -40,7 +41,7 @@ pub fn ChangeDetectionStream(comptime OutStreamType: type) type { } } - return self.out_stream.write(bytes); + return self.writer_pointer.write(bytes); } pub fn changeDetected(self: *Self) bool { diff --git a/lib/std/io/find_byte_out_stream.zig b/lib/std/io/find_byte_out_stream.zig index e835cbd584..b316a98549 100644 --- a/lib/std/io/find_byte_out_stream.zig +++ b/lib/std/io/find_byte_out_stream.zig @@ -2,21 +2,21 @@ const std = @import("../std.zig"); const io = std.io; const assert = std.debug.assert; -// An OutStream that returns whether the given character has been written to it. -// The contents are not written to anything. -pub fn FindByteOutStream(comptime OutStreamType: type) type { +/// An OutStream that returns whether the given character has been written to it. +/// The contents are not written to anything. +pub fn FindByteOutStream(comptime WriterType: type) type { return struct { const Self = @This(); - pub const Error = OutStreamType.Error; + pub const Error = WriterType.Error; pub const OutStream = io.OutStream(*Self, Error, write); - out_stream: *OutStreamType, + writer_pointer: *WriterType, byte_found: bool, byte: u8, - pub fn init(byte: u8, out_stream: *OutStreamType) Self { + pub fn init(byte: u8, writer_pointer: *WriterType) Self { return Self{ - .out_stream = out_stream, + .writer_pointer = writer_pointer, .byte = byte, .byte_found = false, }; @@ -34,7 +34,7 @@ pub fn FindByteOutStream(comptime OutStreamType: type) type { break :blk false; }; } - return self.out_stream.writer().write(bytes); + return self.writer_pointer.writer().write(bytes); } }; } From 50c8a53188bdd20321990b7a4999f534b9e613dd Mon Sep 17 00:00:00 2001 From: Lachlan Easton Date: Sun, 30 Aug 2020 10:32:21 +1000 Subject: [PATCH 03/35] Use ArrayList instead of fixed array for auto_indenting_stream --- lib/std/io/auto_indenting_stream.zig | 78 ++++++++++++++++------------ lib/std/io/find_byte_out_stream.zig | 4 +- lib/std/zig/render.zig | 71 +++++++++++++------------ 3 files changed, 83 insertions(+), 70 deletions(-) diff --git a/lib/std/io/auto_indenting_stream.zig b/lib/std/io/auto_indenting_stream.zig index 227dd616a1..71547026a4 100644 --- a/lib/std/io/auto_indenting_stream.zig +++ b/lib/std/io/auto_indenting_stream.zig @@ -1,26 +1,36 @@ const std = @import("../std.zig"); const io = std.io; const mem = std.mem; +const Allocator = mem.Allocator; +const ArrayList = std.ArrayList; const assert = std.debug.assert; /// Automatically inserts indentation of written data by keeping /// track of the current indentation level -pub fn AutoIndentingStream(comptime indent_delta: u8, comptime WriterType: type) type { +pub fn AutoIndentingStream(comptime indent_delta: usize, comptime WriterType: type) type { return struct { const Self = @This(); pub const Error = WriterType.Error; + pub const PushError = Allocator.Error; pub const Writer = io.Writer(*Self, Error, write); + const Stack = ArrayList(usize); writer_pointer: *WriterType, - current_line_empty: bool = true, - indent_stack: [255]u8 = undefined, - indent_stack_top: u8 = 0, - indent_one_shot_count: u8 = 0, // automatically popped when applied - applied_indent: u8 = 0, // the most recently applied indent - indent_next_line: u8 = 0, // not used until the next line + indent_stack: Stack, - pub fn init(writer_pointer: *WriterType) Self { - return Self{ .writer_pointer = writer_pointer }; + current_line_empty: bool = true, + indent_one_shot_count: usize = 0, // automatically popped when applied + applied_indent: usize = 0, // the most recently applied indent + indent_next_line: usize = 0, // not used until the next line + + pub fn init(writer_pointer: *WriterType, allocator: *Allocator) Self { + var indent_stack = Stack.init(allocator); + return Self{ .writer_pointer = writer_pointer, .indent_stack = indent_stack }; + } + + /// Release all allocated memory. + pub fn deinit(self: Self) void { + self.indent_stack.deinit(); } pub fn writer(self: *Self) Writer { @@ -39,7 +49,7 @@ pub fn AutoIndentingStream(comptime indent_delta: u8, comptime WriterType: type) if (bytes.len == 0) return @as(usize, 0); - try self.writer_pointer.outStream().writeAll(bytes); + try self.writer_pointer.writer().writeAll(bytes); if (bytes[bytes.len - 1] == '\n') self.resetLine(); return bytes.len; @@ -61,53 +71,52 @@ pub fn AutoIndentingStream(comptime indent_delta: u8, comptime WriterType: type) } /// Push default indentation - pub fn pushIndent(self: *Self) void { - // Doesn't actually write any indentation. Just primes the stream to be able to write the correct indentation if it needs to. - self.pushIndentN(indent_delta); + pub fn pushIndent(self: *Self) PushError!void { + // Doesn't actually write any indentation. + // Just primes the stream to be able to write the correct indentation if it needs to. + try self.pushIndentN(indent_delta); } /// Push an indent of arbitrary width - pub fn pushIndentN(self: *Self, n: u8) void { - assert(self.indent_stack_top < std.math.maxInt(u8)); - self.indent_stack[self.indent_stack_top] = n; - self.indent_stack_top += 1; + pub fn pushIndentN(self: *Self, n: usize) PushError!void { + try self.indent_stack.append(n); } /// Push an indent that is automatically popped after being applied - pub fn pushIndentOneShot(self: *Self) void { + pub fn pushIndentOneShot(self: *Self) PushError!void { self.indent_one_shot_count += 1; - self.pushIndent(); + try self.pushIndent(); } /// Turns all one-shot indents into regular indents /// Returns number of indents that must now be manually popped - pub fn lockOneShotIndent(self: *Self) u8 { + pub fn lockOneShotIndent(self: *Self) usize { var locked_count = self.indent_one_shot_count; self.indent_one_shot_count = 0; return locked_count; } /// Push an indent that should not take effect until the next line - pub fn pushIndentNextLine(self: *Self) void { + pub fn pushIndentNextLine(self: *Self) PushError!void { self.indent_next_line += 1; - self.pushIndent(); + try self.pushIndent(); } pub fn popIndent(self: *Self) void { - assert(self.indent_stack_top != 0); - self.indent_stack_top -= 1; - self.indent_next_line = std.math.min(self.indent_stack_top, self.indent_next_line); // Tentative indent may have been popped before there was a newline + assert(self.indent_stack.items.len != 0); + self.indent_stack.items.len -= 1; + self.indent_next_line = std.math.min(self.indent_stack.items.len, self.indent_next_line); // Tentative indent may have been popped before there was a newline } /// Writes ' ' bytes if the current line is empty fn applyIndent(self: *Self) Error!void { const current_indent = self.currentIndent(); if (self.current_line_empty and current_indent > 0) { - try self.writer_pointer.outStream().writeByteNTimes(' ', current_indent); + try self.writer_pointer.writer().writeByteNTimes(' ', current_indent); self.applied_indent = current_indent; } - self.indent_stack_top -= self.indent_one_shot_count; + self.indent_stack.items.len -= self.indent_one_shot_count; self.indent_one_shot_count = 0; self.current_line_empty = false; } @@ -118,11 +127,11 @@ pub fn AutoIndentingStream(comptime indent_delta: u8, comptime WriterType: type) return self.applied_indent > self.currentIndent(); } - fn currentIndent(self: *Self) u8 { - var indent_current: u8 = 0; - if (self.indent_stack_top > 0) { - const stack_top = self.indent_stack_top - self.indent_next_line; - for (self.indent_stack[0..stack_top]) |indent| { + fn currentIndent(self: *Self) usize { + var indent_current: usize = 0; + if (self.indent_stack.items.len > 0) { + const stack_top = self.indent_stack.items.len - self.indent_next_line; + for (self.indent_stack.items[0..stack_top]) |indent| { indent_current += indent; } } @@ -132,9 +141,10 @@ pub fn AutoIndentingStream(comptime indent_delta: u8, comptime WriterType: type) } pub fn autoIndentingStream( - comptime indent_delta: u8, + comptime indent_delta: usize, underlying_stream: anytype, + allocator: *Allocator, ) AutoIndentingStream(indent_delta, @TypeOf(underlying_stream).Child) { comptime assert(@typeInfo(@TypeOf(underlying_stream)) == .Pointer); - return AutoIndentingStream(indent_delta, @TypeOf(underlying_stream).Child).init(underlying_stream); + return AutoIndentingStream(indent_delta, @TypeOf(underlying_stream).Child).init(underlying_stream, allocator); } diff --git a/lib/std/io/find_byte_out_stream.zig b/lib/std/io/find_byte_out_stream.zig index b316a98549..0bf3a15545 100644 --- a/lib/std/io/find_byte_out_stream.zig +++ b/lib/std/io/find_byte_out_stream.zig @@ -8,7 +8,7 @@ pub fn FindByteOutStream(comptime WriterType: type) type { return struct { const Self = @This(); pub const Error = WriterType.Error; - pub const OutStream = io.OutStream(*Self, Error, write); + pub const Writer = io.Writer(*Self, Error, write); writer_pointer: *WriterType, byte_found: bool, @@ -22,7 +22,7 @@ pub fn FindByteOutStream(comptime WriterType: type) type { }; } - pub fn outStream(self: *Self) OutStream { + pub fn writer(self: *Self) Writer { return .{ .context = self }; } diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index d7bba2f6bf..e3133a5501 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -24,7 +24,8 @@ pub fn render(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree) (meta var s = stream.*; var change_detection_stream = std.io.changeDetectionStream(tree.source, &s); - var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, &change_detection_stream); + var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, &change_detection_stream, allocator); + defer auto_indenting_stream.deinit(); try renderRoot(allocator, &auto_indenting_stream, tree); @@ -388,11 +389,11 @@ fn renderExpression( } if (block.statements.len == 0) { - stream.pushIndentNextLine(); + try stream.pushIndentNextLine(); defer stream.popIndent(); try renderToken(tree, stream, block.lbrace, Space.None); } else { - stream.pushIndentNextLine(); + try stream.pushIndentNextLine(); defer stream.popIndent(); try renderToken(tree, stream, block.lbrace, Space.Newline); @@ -462,7 +463,7 @@ fn renderExpression( try renderExpression(allocator, stream, tree, payload, Space.Space); } - stream.pushIndentOneShot(); + try stream.pushIndentOneShot(); return renderExpression(allocator, stream, tree, infix_op_node.rhs, space); }, @@ -523,7 +524,7 @@ fn renderExpression( }; try renderToken(tree, stream, infix_op_node.op_token, after_op_space); - stream.pushIndentOneShot(); + try stream.pushIndentOneShot(); return renderExpression(allocator, stream, tree, infix_op_node.rhs, space); }, @@ -717,7 +718,7 @@ fn renderExpression( } { - stream.pushIndent(); + try stream.pushIndent(); defer stream.popIndent(); try renderToken(tree, stream, lbrace, Space.None); } @@ -782,7 +783,8 @@ fn renderExpression( // Null stream for counting the printed length of each expression var counting_stream = std.io.countingOutStream(std.io.null_out_stream); - var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, &counting_stream); + var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, &counting_stream, allocator); + defer auto_indenting_stream.deinit(); for (exprs) |expr, i| { counting_stream.bytes_written = 0; @@ -794,7 +796,7 @@ fn renderExpression( } { - stream.pushIndentNextLine(); + try stream.pushIndentNextLine(); defer stream.popIndent(); try renderToken(tree, stream, lbrace, Space.Newline); @@ -878,7 +880,7 @@ fn renderExpression( } { - stream.pushIndentNextLine(); + try stream.pushIndentNextLine(); defer stream.popIndent(); try renderToken(tree, stream, lbrace, Space.None); } @@ -900,7 +902,8 @@ fn renderExpression( // render field expressions until a LF is found for (field_inits) |field_init| { var find_stream = std.io.findByteOutStream('\n', &std.io.null_out_stream); - var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, &find_stream); + var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, &find_stream, allocator); + defer auto_indenting_stream.deinit(); try renderExpression(allocator, &auto_indenting_stream, tree, field_init, Space.None); if (find_stream.byte_found) break :blk false; @@ -960,7 +963,7 @@ fn renderExpression( .node => |node| try renderExpression(allocator, stream, tree, node, Space.None), } - stream.pushIndentNextLine(); + try stream.pushIndentNextLine(); defer stream.popIndent(); try renderToken(tree, stream, lbrace, Space.Newline); @@ -1008,7 +1011,7 @@ fn renderExpression( const params = call.params(); for (params) |param_node, i| { - stream.pushIndent(); + try stream.pushIndent(); defer stream.popIndent(); if (i + 1 < params.len) { @@ -1028,7 +1031,7 @@ fn renderExpression( const params = call.params(); for (params) |param_node, i| { - if (param_node.*.tag == .MultilineStringLiteral) stream.pushIndentOneShot(); + if (param_node.*.tag == .MultilineStringLiteral) try stream.pushIndentOneShot(); try renderExpression(allocator, stream, tree, param_node, Space.None); @@ -1055,7 +1058,7 @@ fn renderExpression( { const new_space = if (ends_with_comment) Space.Newline else Space.None; - stream.pushIndent(); + try stream.pushIndent(); defer stream.popIndent(); try renderExpression(allocator, stream, tree, suffix_op.index_expr, new_space); } @@ -1191,7 +1194,7 @@ fn renderExpression( try renderToken(tree, stream, grouped_expr.lparen, Space.None); { - stream.pushIndentOneShot(); + try stream.pushIndentOneShot(); try renderExpression(allocator, stream, tree, grouped_expr.expr, Space.None); } return renderToken(tree, stream, grouped_expr.rparen, space); @@ -1251,7 +1254,7 @@ fn renderExpression( if (container_decl.fields_and_decls_len == 0) { { - stream.pushIndentNextLine(); + try stream.pushIndentNextLine(); defer stream.popIndent(); try renderToken(tree, stream, container_decl.lbrace_token, Space.None); // { } @@ -1286,7 +1289,7 @@ fn renderExpression( if (src_has_trailing_comma or !src_has_only_fields) { // One declaration per line - stream.pushIndentNextLine(); + try stream.pushIndentNextLine(); defer stream.popIndent(); try renderToken(tree, stream, container_decl.lbrace_token, .Newline); // { @@ -1302,7 +1305,7 @@ fn renderExpression( // their own line try renderToken(tree, stream, container_decl.lbrace_token, .Newline); // { - stream.pushIndent(); + try stream.pushIndent(); defer stream.popIndent(); for (fields_and_decls) |decl, i| { @@ -1358,7 +1361,7 @@ fn renderExpression( if (src_has_trailing_comma) { { - stream.pushIndent(); + try stream.pushIndent(); defer stream.popIndent(); try renderToken(tree, stream, lbrace, Space.Newline); // { @@ -1448,7 +1451,7 @@ fn renderExpression( } } else { // one param per line - stream.pushIndent(); + try stream.pushIndent(); defer stream.popIndent(); try renderToken(tree, stream, lparen, Space.Newline); // ( @@ -1527,7 +1530,7 @@ fn renderExpression( } } else { // one param per line - stream.pushIndent(); + try stream.pushIndent(); defer stream.popIndent(); try renderToken(tree, stream, lparen, Space.Newline); // ( @@ -1624,7 +1627,7 @@ fn renderExpression( try renderToken(tree, stream, rparen, Space.Space); // ) { - stream.pushIndentNextLine(); + try stream.pushIndentNextLine(); defer stream.popIndent(); try renderToken(tree, stream, lbrace, Space.Newline); // { @@ -1708,7 +1711,7 @@ fn renderExpression( if (same_line) { return renderExpression(allocator, stream, tree, else_node.body, space); } else { - stream.pushIndent(); + try stream.pushIndent(); defer stream.popIndent(); return renderExpression(allocator, stream, tree, else_node.body, space); } @@ -1772,7 +1775,7 @@ fn renderExpression( } { - if (!body_is_block) stream.pushIndent(); + if (!body_is_block) try stream.pushIndent(); defer if (!body_is_block) stream.popIndent(); try renderExpression(allocator, stream, tree, while_node.body, after_body_space); } @@ -1823,7 +1826,7 @@ fn renderExpression( }; { - if (!body_on_same_line) stream.pushIndent(); + if (!body_on_same_line) try stream.pushIndent(); defer if (!body_on_same_line) stream.popIndent(); try renderExpression(allocator, stream, tree, for_node.body, space_after_body); // { body } } @@ -1879,7 +1882,7 @@ fn renderExpression( const else_is_block = nodeIsBlock(@"else".body); { - stream.pushIndent(); + try stream.pushIndent(); defer stream.popIndent(); try renderExpression(allocator, stream, tree, if_node.body, Space.Newline); } @@ -1900,12 +1903,12 @@ fn renderExpression( try renderExpression(allocator, stream, tree, payload, Space.Newline); } - stream.pushIndent(); + try stream.pushIndent(); defer stream.popIndent(); return renderExpression(allocator, stream, tree, @"else".body, space); } } else { - stream.pushIndent(); + try stream.pushIndent(); defer stream.popIndent(); return renderExpression(allocator, stream, tree, if_node.body, space); } @@ -1946,7 +1949,7 @@ fn renderExpression( } asmblk: { - stream.pushIndent(); + try stream.pushIndent(); defer stream.popIndent(); if (asm_node.outputs.len == 0 and asm_node.inputs.len == 0 and asm_node.clobbers.len == 0) { @@ -1965,7 +1968,7 @@ fn renderExpression( } else blk: { try renderToken(tree, stream, colon1, Space.Space); // : - stream.pushIndentN(2); + try stream.pushIndentN(2); defer stream.popIndent(); for (asm_node.outputs) |*asm_output, i| { @@ -1996,7 +1999,7 @@ fn renderExpression( break :blk tree.nextToken(colon2); } else blk: { try renderToken(tree, stream, colon2, Space.Space); // : - stream.pushIndentN(2); + try stream.pushIndentN(2); defer stream.popIndent(); for (asm_node.inputs) |*asm_input, i| { if (i + 1 < asm_node.inputs.len) { @@ -2022,7 +2025,7 @@ fn renderExpression( }; try renderToken(tree, stream, colon3, Space.Space); // : - stream.pushIndentN(2); + try stream.pushIndentN(2); defer stream.popIndent(); for (asm_node.clobbers) |clobber_node, i| { if (i + 1 >= asm_node.clobbers.len) { @@ -2075,7 +2078,7 @@ fn renderArrayType( const new_space = if (ends_with_comment) Space.Newline else Space.None; { const do_indent = (starts_with_comment or ends_with_comment); - if (do_indent) stream.pushIndent(); + if (do_indent) try stream.pushIndent(); defer if (do_indent) stream.popIndent(); try renderToken(tree, stream, lbracket, Space.None); // [ @@ -2209,7 +2212,7 @@ fn renderVarDecl( if (var_decl.getTrailer("init_node")) |init_node| { const s = if (init_node.tag == .MultilineStringLiteral) Space.None else Space.Space; try renderToken(tree, stream, var_decl.getTrailer("eq_token").?, s); // = - stream.pushIndentOneShot(); + try stream.pushIndentOneShot(); try renderExpression(allocator, stream, tree, init_node, Space.None); } From 5aca3baea62326dee301ec29c567dc224baa4a08 Mon Sep 17 00:00:00 2001 From: Lachlan Easton Date: Mon, 31 Aug 2020 23:39:30 +1000 Subject: [PATCH 04/35] zig fmt: Remove dynamic stack from auto-indenting-stream --- lib/std/io.zig | 4 +- lib/std/io/auto_indenting_stream.zig | 58 +++++++++------------- lib/std/io/change_detection_stream.zig | 4 +- lib/std/io/find_byte_out_stream.zig | 4 +- lib/std/target.zig | 2 +- lib/std/zig/parser_test.zig | 10 ++-- lib/std/zig/render.zig | 68 +++++++++++++------------- 7 files changed, 68 insertions(+), 82 deletions(-) diff --git a/lib/std/io.zig b/lib/std/io.zig index 1514d80cb0..3f02128a6c 100644 --- a/lib/std/io.zig +++ b/lib/std/io.zig @@ -191,10 +191,10 @@ pub const BufferedAtomicFile = @import("io/buffered_atomic_file.zig").BufferedAt pub const StreamSource = @import("io/stream_source.zig").StreamSource; /// A Writer that doesn't write to anything. -pub var null_writer = @as(NullWriter, .{ .context = {} }); +pub const null_writer = @as(NullWriter, .{ .context = {} }); /// Deprecated: use `null_writer` -pub var null_out_stream = null_writer; +pub const null_out_stream = null_writer; const NullWriter = Writer(void, error{}, dummyWrite); /// Deprecated: use NullWriter diff --git a/lib/std/io/auto_indenting_stream.zig b/lib/std/io/auto_indenting_stream.zig index 71547026a4..bebbf9aed6 100644 --- a/lib/std/io/auto_indenting_stream.zig +++ b/lib/std/io/auto_indenting_stream.zig @@ -1,37 +1,31 @@ const std = @import("../std.zig"); const io = std.io; const mem = std.mem; -const Allocator = mem.Allocator; -const ArrayList = std.ArrayList; const assert = std.debug.assert; /// Automatically inserts indentation of written data by keeping /// track of the current indentation level -pub fn AutoIndentingStream(comptime indent_delta: usize, comptime WriterType: type) type { +pub fn AutoIndentingStream(comptime WriterType: type) type { return struct { const Self = @This(); pub const Error = WriterType.Error; - pub const PushError = Allocator.Error; pub const Writer = io.Writer(*Self, Error, write); - const Stack = ArrayList(usize); writer_pointer: *WriterType, - indent_stack: Stack, + indent_stack: usize = 0, + indent_delta: usize, current_line_empty: bool = true, indent_one_shot_count: usize = 0, // automatically popped when applied applied_indent: usize = 0, // the most recently applied indent indent_next_line: usize = 0, // not used until the next line - pub fn init(writer_pointer: *WriterType, allocator: *Allocator) Self { - var indent_stack = Stack.init(allocator); - return Self{ .writer_pointer = writer_pointer, .indent_stack = indent_stack }; + pub fn init(indent_delta: usize, writer_pointer: *WriterType) Self { + return Self{ .writer_pointer = writer_pointer, .indent_delta = indent_delta }; } /// Release all allocated memory. - pub fn deinit(self: Self) void { - self.indent_stack.deinit(); - } + pub fn deinit(self: Self) void {} pub fn writer(self: *Self) Writer { return .{ .context = self }; @@ -71,21 +65,16 @@ pub fn AutoIndentingStream(comptime indent_delta: usize, comptime WriterType: ty } /// Push default indentation - pub fn pushIndent(self: *Self) PushError!void { + pub fn pushIndent(self: *Self) void { // Doesn't actually write any indentation. // Just primes the stream to be able to write the correct indentation if it needs to. - try self.pushIndentN(indent_delta); - } - - /// Push an indent of arbitrary width - pub fn pushIndentN(self: *Self, n: usize) PushError!void { - try self.indent_stack.append(n); + self.indent_stack += 1; } /// Push an indent that is automatically popped after being applied - pub fn pushIndentOneShot(self: *Self) PushError!void { + pub fn pushIndentOneShot(self: *Self) void { self.indent_one_shot_count += 1; - try self.pushIndent(); + self.pushIndent(); } /// Turns all one-shot indents into regular indents @@ -97,15 +86,15 @@ pub fn AutoIndentingStream(comptime indent_delta: usize, comptime WriterType: ty } /// Push an indent that should not take effect until the next line - pub fn pushIndentNextLine(self: *Self) PushError!void { + pub fn pushIndentNextLine(self: *Self) void { self.indent_next_line += 1; - try self.pushIndent(); + self.pushIndent(); } pub fn popIndent(self: *Self) void { - assert(self.indent_stack.items.len != 0); - self.indent_stack.items.len -= 1; - self.indent_next_line = std.math.min(self.indent_stack.items.len, self.indent_next_line); // Tentative indent may have been popped before there was a newline + assert(self.indent_stack != 0); + self.indent_stack -= 1; + self.indent_next_line = std.math.min(self.indent_stack, self.indent_next_line); // Tentative indent may have been popped before there was a newline } /// Writes ' ' bytes if the current line is empty @@ -116,7 +105,7 @@ pub fn AutoIndentingStream(comptime indent_delta: usize, comptime WriterType: ty self.applied_indent = current_indent; } - self.indent_stack.items.len -= self.indent_one_shot_count; + self.indent_stack -= self.indent_one_shot_count; self.indent_one_shot_count = 0; self.current_line_empty = false; } @@ -129,11 +118,9 @@ pub fn AutoIndentingStream(comptime indent_delta: usize, comptime WriterType: ty fn currentIndent(self: *Self) usize { var indent_current: usize = 0; - if (self.indent_stack.items.len > 0) { - const stack_top = self.indent_stack.items.len - self.indent_next_line; - for (self.indent_stack.items[0..stack_top]) |indent| { - indent_current += indent; - } + if (self.indent_stack > 0) { + const stack_top = self.indent_stack - self.indent_next_line; + indent_current = stack_top * self.indent_delta; } return indent_current; } @@ -141,10 +128,9 @@ pub fn AutoIndentingStream(comptime indent_delta: usize, comptime WriterType: ty } pub fn autoIndentingStream( - comptime indent_delta: usize, + indent_delta: usize, underlying_stream: anytype, - allocator: *Allocator, -) AutoIndentingStream(indent_delta, @TypeOf(underlying_stream).Child) { +) AutoIndentingStream(@TypeOf(underlying_stream).Child) { comptime assert(@typeInfo(@TypeOf(underlying_stream)) == .Pointer); - return AutoIndentingStream(indent_delta, @TypeOf(underlying_stream).Child).init(underlying_stream, allocator); + return AutoIndentingStream(@TypeOf(underlying_stream).Child).init(indent_delta, underlying_stream); } diff --git a/lib/std/io/change_detection_stream.zig b/lib/std/io/change_detection_stream.zig index 98c8130b44..b559e66751 100644 --- a/lib/std/io/change_detection_stream.zig +++ b/lib/std/io/change_detection_stream.zig @@ -11,11 +11,11 @@ pub fn ChangeDetectionStream(comptime WriterType: type) type { pub const Writer = io.Writer(*Self, Error, write); anything_changed: bool = false, - writer_pointer: *WriterType, + writer_pointer: *const WriterType, source_index: usize, source: []const u8, - pub fn init(source: []const u8, writer_pointer: *WriterType) Self { + pub fn init(source: []const u8, writer_pointer: *const WriterType) Self { return Self{ .writer_pointer = writer_pointer, .source_index = 0, diff --git a/lib/std/io/find_byte_out_stream.zig b/lib/std/io/find_byte_out_stream.zig index 0bf3a15545..bfd0e815e4 100644 --- a/lib/std/io/find_byte_out_stream.zig +++ b/lib/std/io/find_byte_out_stream.zig @@ -10,11 +10,11 @@ pub fn FindByteOutStream(comptime WriterType: type) type { pub const Error = WriterType.Error; pub const Writer = io.Writer(*Self, Error, write); - writer_pointer: *WriterType, + writer_pointer: *const WriterType, byte_found: bool, byte: u8, - pub fn init(byte: u8, writer_pointer: *WriterType) Self { + pub fn init(byte: u8, writer_pointer: *const WriterType) Self { return Self{ .writer_pointer = writer_pointer, .byte = byte, diff --git a/lib/std/target.zig b/lib/std/target.zig index deb7c85984..034ab780d0 100644 --- a/lib/std/target.zig +++ b/lib/std/target.zig @@ -101,7 +101,7 @@ pub const Target = struct { /// Latest Windows version that the Zig Standard Library is aware of pub const latest = WindowsVersion.win10_20h1; - + pub const Range = struct { min: WindowsVersion, max: WindowsVersion, diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index f4da650efb..9369d44010 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -2827,7 +2827,7 @@ test "zig fmt: inline asm" { \\ return asm volatile ("syscall" \\ : [ret] "={rax}" (-> usize) \\ : [number] "{rax}" (number), - \\ [arg1] "{rdi}" (arg1) + \\ [arg1] "{rdi}" (arg1) \\ : "rcx", "r11" \\ ); \\} @@ -2930,14 +2930,14 @@ test "zig fmt: inline asm parameter alignment" { \\ \\ foo \\ \\ bar \\ : [_] "" (-> usize), - \\ [_] "" (-> usize) + \\ [_] "" (-> usize) \\ ); \\ asm volatile ( \\ \\ foo \\ \\ bar \\ : \\ : [_] "" (0), - \\ [_] "" (0) + \\ [_] "" (0) \\ ); \\ asm volatile ( \\ \\ foo @@ -2950,9 +2950,9 @@ test "zig fmt: inline asm parameter alignment" { \\ \\ foo \\ \\ bar \\ : [_] "" (-> usize), - \\ [_] "" (-> usize) + \\ [_] "" (-> usize) \\ : [_] "" (0), - \\ [_] "" (0) + \\ [_] "" (0) \\ : "", "" \\ ); \\} diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index e3133a5501..471b98398b 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -24,7 +24,7 @@ pub fn render(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree) (meta var s = stream.*; var change_detection_stream = std.io.changeDetectionStream(tree.source, &s); - var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, &change_detection_stream, allocator); + var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, &change_detection_stream); defer auto_indenting_stream.deinit(); try renderRoot(allocator, &auto_indenting_stream, tree); @@ -389,11 +389,11 @@ fn renderExpression( } if (block.statements.len == 0) { - try stream.pushIndentNextLine(); + stream.pushIndentNextLine(); defer stream.popIndent(); try renderToken(tree, stream, block.lbrace, Space.None); } else { - try stream.pushIndentNextLine(); + stream.pushIndentNextLine(); defer stream.popIndent(); try renderToken(tree, stream, block.lbrace, Space.Newline); @@ -463,7 +463,7 @@ fn renderExpression( try renderExpression(allocator, stream, tree, payload, Space.Space); } - try stream.pushIndentOneShot(); + stream.pushIndentOneShot(); return renderExpression(allocator, stream, tree, infix_op_node.rhs, space); }, @@ -524,7 +524,7 @@ fn renderExpression( }; try renderToken(tree, stream, infix_op_node.op_token, after_op_space); - try stream.pushIndentOneShot(); + stream.pushIndentOneShot(); return renderExpression(allocator, stream, tree, infix_op_node.rhs, space); }, @@ -718,7 +718,7 @@ fn renderExpression( } { - try stream.pushIndent(); + stream.pushIndent(); defer stream.popIndent(); try renderToken(tree, stream, lbrace, Space.None); } @@ -783,7 +783,7 @@ fn renderExpression( // Null stream for counting the printed length of each expression var counting_stream = std.io.countingOutStream(std.io.null_out_stream); - var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, &counting_stream, allocator); + var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, &counting_stream); defer auto_indenting_stream.deinit(); for (exprs) |expr, i| { @@ -796,7 +796,7 @@ fn renderExpression( } { - try stream.pushIndentNextLine(); + stream.pushIndentNextLine(); defer stream.popIndent(); try renderToken(tree, stream, lbrace, Space.Newline); @@ -880,7 +880,7 @@ fn renderExpression( } { - try stream.pushIndentNextLine(); + stream.pushIndentNextLine(); defer stream.popIndent(); try renderToken(tree, stream, lbrace, Space.None); } @@ -902,7 +902,7 @@ fn renderExpression( // render field expressions until a LF is found for (field_inits) |field_init| { var find_stream = std.io.findByteOutStream('\n', &std.io.null_out_stream); - var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, &find_stream, allocator); + var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, &find_stream); defer auto_indenting_stream.deinit(); try renderExpression(allocator, &auto_indenting_stream, tree, field_init, Space.None); @@ -963,7 +963,7 @@ fn renderExpression( .node => |node| try renderExpression(allocator, stream, tree, node, Space.None), } - try stream.pushIndentNextLine(); + stream.pushIndentNextLine(); defer stream.popIndent(); try renderToken(tree, stream, lbrace, Space.Newline); @@ -1011,7 +1011,7 @@ fn renderExpression( const params = call.params(); for (params) |param_node, i| { - try stream.pushIndent(); + stream.pushIndent(); defer stream.popIndent(); if (i + 1 < params.len) { @@ -1031,7 +1031,7 @@ fn renderExpression( const params = call.params(); for (params) |param_node, i| { - if (param_node.*.tag == .MultilineStringLiteral) try stream.pushIndentOneShot(); + if (param_node.*.tag == .MultilineStringLiteral) stream.pushIndentOneShot(); try renderExpression(allocator, stream, tree, param_node, Space.None); @@ -1058,7 +1058,7 @@ fn renderExpression( { const new_space = if (ends_with_comment) Space.Newline else Space.None; - try stream.pushIndent(); + stream.pushIndent(); defer stream.popIndent(); try renderExpression(allocator, stream, tree, suffix_op.index_expr, new_space); } @@ -1194,7 +1194,7 @@ fn renderExpression( try renderToken(tree, stream, grouped_expr.lparen, Space.None); { - try stream.pushIndentOneShot(); + stream.pushIndentOneShot(); try renderExpression(allocator, stream, tree, grouped_expr.expr, Space.None); } return renderToken(tree, stream, grouped_expr.rparen, space); @@ -1254,7 +1254,7 @@ fn renderExpression( if (container_decl.fields_and_decls_len == 0) { { - try stream.pushIndentNextLine(); + stream.pushIndentNextLine(); defer stream.popIndent(); try renderToken(tree, stream, container_decl.lbrace_token, Space.None); // { } @@ -1289,7 +1289,7 @@ fn renderExpression( if (src_has_trailing_comma or !src_has_only_fields) { // One declaration per line - try stream.pushIndentNextLine(); + stream.pushIndentNextLine(); defer stream.popIndent(); try renderToken(tree, stream, container_decl.lbrace_token, .Newline); // { @@ -1305,7 +1305,7 @@ fn renderExpression( // their own line try renderToken(tree, stream, container_decl.lbrace_token, .Newline); // { - try stream.pushIndent(); + stream.pushIndent(); defer stream.popIndent(); for (fields_and_decls) |decl, i| { @@ -1361,7 +1361,7 @@ fn renderExpression( if (src_has_trailing_comma) { { - try stream.pushIndent(); + stream.pushIndent(); defer stream.popIndent(); try renderToken(tree, stream, lbrace, Space.Newline); // { @@ -1451,7 +1451,7 @@ fn renderExpression( } } else { // one param per line - try stream.pushIndent(); + stream.pushIndent(); defer stream.popIndent(); try renderToken(tree, stream, lparen, Space.Newline); // ( @@ -1530,7 +1530,7 @@ fn renderExpression( } } else { // one param per line - try stream.pushIndent(); + stream.pushIndent(); defer stream.popIndent(); try renderToken(tree, stream, lparen, Space.Newline); // ( @@ -1627,7 +1627,7 @@ fn renderExpression( try renderToken(tree, stream, rparen, Space.Space); // ) { - try stream.pushIndentNextLine(); + stream.pushIndentNextLine(); defer stream.popIndent(); try renderToken(tree, stream, lbrace, Space.Newline); // { @@ -1711,7 +1711,7 @@ fn renderExpression( if (same_line) { return renderExpression(allocator, stream, tree, else_node.body, space); } else { - try stream.pushIndent(); + stream.pushIndent(); defer stream.popIndent(); return renderExpression(allocator, stream, tree, else_node.body, space); } @@ -1775,7 +1775,7 @@ fn renderExpression( } { - if (!body_is_block) try stream.pushIndent(); + if (!body_is_block) stream.pushIndent(); defer if (!body_is_block) stream.popIndent(); try renderExpression(allocator, stream, tree, while_node.body, after_body_space); } @@ -1826,7 +1826,7 @@ fn renderExpression( }; { - if (!body_on_same_line) try stream.pushIndent(); + if (!body_on_same_line) stream.pushIndent(); defer if (!body_on_same_line) stream.popIndent(); try renderExpression(allocator, stream, tree, for_node.body, space_after_body); // { body } } @@ -1882,7 +1882,7 @@ fn renderExpression( const else_is_block = nodeIsBlock(@"else".body); { - try stream.pushIndent(); + stream.pushIndent(); defer stream.popIndent(); try renderExpression(allocator, stream, tree, if_node.body, Space.Newline); } @@ -1903,12 +1903,12 @@ fn renderExpression( try renderExpression(allocator, stream, tree, payload, Space.Newline); } - try stream.pushIndent(); + stream.pushIndent(); defer stream.popIndent(); return renderExpression(allocator, stream, tree, @"else".body, space); } } else { - try stream.pushIndent(); + stream.pushIndent(); defer stream.popIndent(); return renderExpression(allocator, stream, tree, if_node.body, space); } @@ -1949,7 +1949,7 @@ fn renderExpression( } asmblk: { - try stream.pushIndent(); + stream.pushIndent(); defer stream.popIndent(); if (asm_node.outputs.len == 0 and asm_node.inputs.len == 0 and asm_node.clobbers.len == 0) { @@ -1968,7 +1968,7 @@ fn renderExpression( } else blk: { try renderToken(tree, stream, colon1, Space.Space); // : - try stream.pushIndentN(2); + stream.pushIndent(); defer stream.popIndent(); for (asm_node.outputs) |*asm_output, i| { @@ -1999,7 +1999,7 @@ fn renderExpression( break :blk tree.nextToken(colon2); } else blk: { try renderToken(tree, stream, colon2, Space.Space); // : - try stream.pushIndentN(2); + stream.pushIndent(); defer stream.popIndent(); for (asm_node.inputs) |*asm_input, i| { if (i + 1 < asm_node.inputs.len) { @@ -2025,7 +2025,7 @@ fn renderExpression( }; try renderToken(tree, stream, colon3, Space.Space); // : - try stream.pushIndentN(2); + stream.pushIndent(); defer stream.popIndent(); for (asm_node.clobbers) |clobber_node, i| { if (i + 1 >= asm_node.clobbers.len) { @@ -2078,7 +2078,7 @@ fn renderArrayType( const new_space = if (ends_with_comment) Space.Newline else Space.None; { const do_indent = (starts_with_comment or ends_with_comment); - if (do_indent) try stream.pushIndent(); + if (do_indent) stream.pushIndent(); defer if (do_indent) stream.popIndent(); try renderToken(tree, stream, lbracket, Space.None); // [ @@ -2212,7 +2212,7 @@ fn renderVarDecl( if (var_decl.getTrailer("init_node")) |init_node| { const s = if (init_node.tag == .MultilineStringLiteral) Space.None else Space.Space; try renderToken(tree, stream, var_decl.getTrailer("eq_token").?, s); // = - try stream.pushIndentOneShot(); + stream.pushIndentOneShot(); try renderExpression(allocator, stream, tree, init_node, Space.None); } From 029ec456bce5fc6c57eea496db1cebed55e31ede Mon Sep 17 00:00:00 2001 From: Lachlan Easton Date: Mon, 31 Aug 2020 23:32:42 +1000 Subject: [PATCH 05/35] zig fmt: Set indent_delta to 2 when rendering inline asm --- lib/std/io/auto_indenting_stream.zig | 36 ++++++++++++++++++---------- lib/std/zig/parser_test.zig | 10 ++++---- lib/std/zig/render.zig | 7 +++--- 3 files changed, 33 insertions(+), 20 deletions(-) diff --git a/lib/std/io/auto_indenting_stream.zig b/lib/std/io/auto_indenting_stream.zig index bebbf9aed6..e2233f8cc2 100644 --- a/lib/std/io/auto_indenting_stream.zig +++ b/lib/std/io/auto_indenting_stream.zig @@ -13,7 +13,7 @@ pub fn AutoIndentingStream(comptime WriterType: type) type { writer_pointer: *WriterType, - indent_stack: usize = 0, + indent_count: usize = 0, indent_delta: usize, current_line_empty: bool = true, indent_one_shot_count: usize = 0, // automatically popped when applied @@ -24,9 +24,6 @@ pub fn AutoIndentingStream(comptime WriterType: type) type { return Self{ .writer_pointer = writer_pointer, .indent_delta = indent_delta }; } - /// Release all allocated memory. - pub fn deinit(self: Self) void {} - pub fn writer(self: *Self) Writer { return .{ .context = self }; } @@ -39,6 +36,21 @@ pub fn AutoIndentingStream(comptime WriterType: type) type { return self.writeNoIndent(bytes); } + // Change the indent delta without changing the final indentation level + pub fn setIndentDelta(self: *Self, indent_delta: usize) void { + if (self.indent_delta == indent_delta) { + return; + } else if (self.indent_delta > indent_delta) { + assert(self.indent_delta % indent_delta == 0); + self.indent_count = self.indent_count * (self.indent_delta / indent_delta); + } else { + // assert that the current indentation (in spaces) in a multiple of the new delta + assert((self.indent_count * self.indent_delta) % indent_delta == 0); + self.indent_count = self.indent_count / (indent_delta / self.indent_delta); + } + self.indent_delta = indent_delta; + } + fn writeNoIndent(self: *Self, bytes: []const u8) Error!usize { if (bytes.len == 0) return @as(usize, 0); @@ -68,7 +80,7 @@ pub fn AutoIndentingStream(comptime WriterType: type) type { pub fn pushIndent(self: *Self) void { // Doesn't actually write any indentation. // Just primes the stream to be able to write the correct indentation if it needs to. - self.indent_stack += 1; + self.indent_count += 1; } /// Push an indent that is automatically popped after being applied @@ -92,9 +104,9 @@ pub fn AutoIndentingStream(comptime WriterType: type) type { } pub fn popIndent(self: *Self) void { - assert(self.indent_stack != 0); - self.indent_stack -= 1; - self.indent_next_line = std.math.min(self.indent_stack, self.indent_next_line); // Tentative indent may have been popped before there was a newline + assert(self.indent_count != 0); + self.indent_count -= 1; + self.indent_next_line = std.math.min(self.indent_count, self.indent_next_line); // Tentative indent may have been popped before there was a newline } /// Writes ' ' bytes if the current line is empty @@ -105,7 +117,7 @@ pub fn AutoIndentingStream(comptime WriterType: type) type { self.applied_indent = current_indent; } - self.indent_stack -= self.indent_one_shot_count; + self.indent_count -= self.indent_one_shot_count; self.indent_one_shot_count = 0; self.current_line_empty = false; } @@ -118,9 +130,9 @@ pub fn AutoIndentingStream(comptime WriterType: type) type { fn currentIndent(self: *Self) usize { var indent_current: usize = 0; - if (self.indent_stack > 0) { - const stack_top = self.indent_stack - self.indent_next_line; - indent_current = stack_top * self.indent_delta; + if (self.indent_count > 0) { + const indent_count = self.indent_count - self.indent_next_line; + indent_current = indent_count * self.indent_delta; } return indent_current; } diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 9369d44010..f4da650efb 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -2827,7 +2827,7 @@ test "zig fmt: inline asm" { \\ return asm volatile ("syscall" \\ : [ret] "={rax}" (-> usize) \\ : [number] "{rax}" (number), - \\ [arg1] "{rdi}" (arg1) + \\ [arg1] "{rdi}" (arg1) \\ : "rcx", "r11" \\ ); \\} @@ -2930,14 +2930,14 @@ test "zig fmt: inline asm parameter alignment" { \\ \\ foo \\ \\ bar \\ : [_] "" (-> usize), - \\ [_] "" (-> usize) + \\ [_] "" (-> usize) \\ ); \\ asm volatile ( \\ \\ foo \\ \\ bar \\ : \\ : [_] "" (0), - \\ [_] "" (0) + \\ [_] "" (0) \\ ); \\ asm volatile ( \\ \\ foo @@ -2950,9 +2950,9 @@ test "zig fmt: inline asm parameter alignment" { \\ \\ foo \\ \\ bar \\ : [_] "" (-> usize), - \\ [_] "" (-> usize) + \\ [_] "" (-> usize) \\ : [_] "" (0), - \\ [_] "" (0) + \\ [_] "" (0) \\ : "", "" \\ ); \\} diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 471b98398b..e4b03193c7 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -11,6 +11,7 @@ const ast = std.zig.ast; const Token = std.zig.Token; const indent_delta = 4; +const asm_indent_delta = 2; pub const Error = error{ /// Ran out of memory allocating call stack frames to complete rendering. @@ -25,7 +26,6 @@ pub fn render(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree) (meta var s = stream.*; var change_detection_stream = std.io.changeDetectionStream(tree.source, &s); var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, &change_detection_stream); - defer auto_indenting_stream.deinit(); try renderRoot(allocator, &auto_indenting_stream, tree); @@ -784,7 +784,6 @@ fn renderExpression( // Null stream for counting the printed length of each expression var counting_stream = std.io.countingOutStream(std.io.null_out_stream); var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, &counting_stream); - defer auto_indenting_stream.deinit(); for (exprs) |expr, i| { counting_stream.bytes_written = 0; @@ -903,7 +902,6 @@ fn renderExpression( for (field_inits) |field_init| { var find_stream = std.io.findByteOutStream('\n', &std.io.null_out_stream); var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, &find_stream); - defer auto_indenting_stream.deinit(); try renderExpression(allocator, &auto_indenting_stream, tree, field_init, Space.None); if (find_stream.byte_found) break :blk false; @@ -1959,6 +1957,9 @@ fn renderExpression( try renderExpression(allocator, stream, tree, asm_node.template, Space.Newline); + stream.setIndentDelta(asm_indent_delta); + defer stream.setIndentDelta(indent_delta); + const colon1 = tree.nextToken(asm_node.template.lastToken()); const colon2 = if (asm_node.outputs.len == 0) blk: { From bc24b86d82ec3b8d7d6e7e5d2d3dceb82d7b53dc Mon Sep 17 00:00:00 2001 From: Lachlan Easton Date: Tue, 1 Sep 2020 13:19:34 +1000 Subject: [PATCH 06/35] zig fmt: Fix regression not covered by testing --- lib/std/io/auto_indenting_stream.zig | 4 +++- lib/std/zig/parser_test.zig | 11 +++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/lib/std/io/auto_indenting_stream.zig b/lib/std/io/auto_indenting_stream.zig index e2233f8cc2..d4256324f1 100644 --- a/lib/std/io/auto_indenting_stream.zig +++ b/lib/std/io/auto_indenting_stream.zig @@ -106,7 +106,9 @@ pub fn AutoIndentingStream(comptime WriterType: type) type { pub fn popIndent(self: *Self) void { assert(self.indent_count != 0); self.indent_count -= 1; - self.indent_next_line = std.math.min(self.indent_count, self.indent_next_line); // Tentative indent may have been popped before there was a newline + + if (self.indent_next_line > 0) + self.indent_next_line -= 1; } /// Writes ' ' bytes if the current line is empty diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index f4da650efb..8652a73c50 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -3310,6 +3310,17 @@ test "zig fmt: Only indent multiline string literals in function calls" { ); } +test "zig fmt: Don't add extra newline after if" { + try testCanonical( + \\pub fn atomicSymLink(allocator: *Allocator, existing_path: []const u8, new_path: []const u8) !void { + \\ if (cwd().symLink(existing_path, new_path, .{})) { + \\ return; + \\ } + \\} + \\ + ); +} + const std = @import("std"); const mem = std.mem; const warn = std.debug.warn; From 717b0e827511b55375de82258f570709c07cc59d Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Mon, 31 Aug 2020 23:34:58 -0700 Subject: [PATCH 07/35] stage2: introduce the ability for Scope.Block to be comptime This gives zir_sema analysis the ability to check if the current scope is expected to be comptime. --- src-self-hosted/Module.zig | 11 +++-- src-self-hosted/astgen.zig | 89 +++++++++++++++++++++++++++++------- src-self-hosted/ir.zig | 10 +++- src-self-hosted/zir.zig | 17 ++++++- src-self-hosted/zir_sema.zig | 58 +++++++++++++++++++++-- test/stage2/test.zig | 2 +- 6 files changed, 162 insertions(+), 25 deletions(-) diff --git a/src-self-hosted/Module.zig b/src-self-hosted/Module.zig index c476c307d2..78586dd096 100644 --- a/src-self-hosted/Module.zig +++ b/src-self-hosted/Module.zig @@ -725,6 +725,7 @@ pub const Scope = struct { /// Points to the arena allocator of DeclAnalysis arena: *Allocator, label: ?Label = null, + is_comptime: bool, pub const Label = struct { zir_block: *zir.Inst.Block, @@ -1320,6 +1321,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { .decl = decl, .instructions = .{}, .arena = &decl_arena.allocator, + .is_comptime = false, }; defer block_scope.instructions.deinit(self.gpa); @@ -1457,6 +1459,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { .decl = decl, .instructions = .{}, .arena = &decl_arena.allocator, + .is_comptime = true, }; defer block_scope.instructions.deinit(self.gpa); @@ -1528,7 +1531,6 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { defer gen_scope.instructions.deinit(self.gpa); const src = tree.token_locs[init_node.firstToken()].start; - // TODO comptime scope here const init_inst = try astgen.expr(self, &gen_scope.base, .none, init_node); _ = try astgen.addZIRUnOp(self, &gen_scope.base, src, .@"return", init_inst); @@ -1538,6 +1540,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { .decl = decl, .instructions = .{}, .arena = &gen_scope_arena.allocator, + .is_comptime = true, }; defer inner_block.instructions.deinit(self.gpa); try zir_sema.analyzeBody(self, &inner_block.base, .{ .instructions = gen_scope.instructions.items }); @@ -1628,8 +1631,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { }; defer gen_scope.instructions.deinit(self.gpa); - // TODO comptime scope here - _ = try astgen.expr(self, &gen_scope.base, .none, comptime_decl.expr); + _ = try astgen.comptimeExpr(self, &gen_scope.base, .none, comptime_decl.expr); var block_scope: Scope.Block = .{ .parent = null, @@ -1637,6 +1639,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { .decl = decl, .instructions = .{}, .arena = &analysis_arena.allocator, + .is_comptime = true, }; defer block_scope.instructions.deinit(self.gpa); @@ -2007,6 +2010,7 @@ fn analyzeFnBody(self: *Module, decl: *Decl, func: *Fn) !void { .decl = decl, .instructions = .{}, .arena = &arena.allocator, + .is_comptime = false, }; defer inner_block.instructions.deinit(self.gpa); @@ -3432,6 +3436,7 @@ pub fn addSafetyCheck(mod: *Module, parent_block: *Scope.Block, ok: *Inst, panic .decl = parent_block.decl, .instructions = .{}, .arena = parent_block.arena, + .is_comptime = parent_block.is_comptime, }; defer fail_block.instructions.deinit(mod.gpa); diff --git a/src-self-hosted/astgen.zig b/src-self-hosted/astgen.zig index 472f8deaa0..17db584e56 100644 --- a/src-self-hosted/astgen.zig +++ b/src-self-hosted/astgen.zig @@ -258,7 +258,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr .OptionalType => return rlWrap(mod, scope, rl, try optionalType(mod, scope, node.castTag(.OptionalType).?)), .UnwrapOptional => return unwrapOptional(mod, scope, rl, node.castTag(.UnwrapOptional).?), .Block => return rlWrapVoid(mod, scope, rl, node, try blockExpr(mod, scope, node.castTag(.Block).?)), - .LabeledBlock => return labeledBlockExpr(mod, scope, rl, node.castTag(.LabeledBlock).?), + .LabeledBlock => return labeledBlockExpr(mod, scope, rl, node.castTag(.LabeledBlock).?, .block), .Break => return rlWrap(mod, scope, rl, try breakExpr(mod, scope, node.castTag(.Break).?)), .PtrType => return rlWrap(mod, scope, rl, try ptrType(mod, scope, node.castTag(.PtrType).?)), .GroupedExpression => return expr(mod, scope, rl, node.castTag(.GroupedExpression).?.expr), @@ -276,6 +276,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr .For => return forExpr(mod, scope, rl, node.castTag(.For).?), .ArrayAccess => return arrayAccess(mod, scope, rl, node.castTag(.ArrayAccess).?), .Catch => return catchExpr(mod, scope, rl, node.castTag(.Catch).?), + .Comptime => return comptimeKeyword(mod, scope, rl, node.castTag(.Comptime).?), .Defer => return mod.failNode(scope, node, "TODO implement astgen.expr for .Defer", .{}), .Range => return mod.failNode(scope, node, "TODO implement astgen.expr for .Range", .{}), @@ -294,11 +295,46 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr .AnyType => return mod.failNode(scope, node, "TODO implement astgen.expr for .AnyType", .{}), .FnProto => return mod.failNode(scope, node, "TODO implement astgen.expr for .FnProto", .{}), .ContainerDecl => return mod.failNode(scope, node, "TODO implement astgen.expr for .ContainerDecl", .{}), - .Comptime => return mod.failNode(scope, node, "TODO implement astgen.expr for .Comptime", .{}), .Nosuspend => return mod.failNode(scope, node, "TODO implement astgen.expr for .Nosuspend", .{}), } } +fn comptimeKeyword(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Comptime) InnerError!*zir.Inst { + const tracy = trace(@src()); + defer tracy.end(); + + return comptimeExpr(mod, scope, rl, node.expr); +} + +pub fn comptimeExpr(mod: *Module, parent_scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerError!*zir.Inst { + const tree = parent_scope.tree(); + const src = tree.token_locs[node.firstToken()].start; + + // Optimization for labeled blocks: don't need to have 2 layers of blocks, we can reuse the existing one. + if (node.castTag(.LabeledBlock)) |block_node| { + return labeledBlockExpr(mod, parent_scope, rl, block_node, .block_comptime); + } + + // Make a scope to collect generated instructions in the sub-expression. + var block_scope: Scope.GenZIR = .{ + .parent = parent_scope, + .decl = parent_scope.decl().?, + .arena = parent_scope.arena(), + .instructions = .{}, + }; + defer block_scope.instructions.deinit(mod.gpa); + + // No need to capture the result here because block_comptime_flat implies that the final + // instruction is the block's result value. + _ = try expr(mod, &block_scope.base, rl, node); + + const block = try addZIRInstBlock(mod, parent_scope, src, .block_comptime_flat, .{ + .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items), + }); + + return &block.base; +} + fn breakExpr(mod: *Module, parent_scope: *Scope, node: *ast.Node.ControlFlowExpression) InnerError!*zir.Inst { const tree = parent_scope.tree(); const src = tree.token_locs[node.ltoken].start; @@ -360,10 +396,13 @@ fn labeledBlockExpr( parent_scope: *Scope, rl: ResultLoc, block_node: *ast.Node.LabeledBlock, + zir_tag: zir.Inst.Tag, ) InnerError!*zir.Inst { const tracy = trace(@src()); defer tracy.end(); + assert(zir_tag == .block or zir_tag == .block_comptime); + const tree = parent_scope.tree(); const src = tree.token_locs[block_node.lbrace].start; @@ -373,7 +412,7 @@ fn labeledBlockExpr( const block_inst = try gen_zir.arena.create(zir.Inst.Block); block_inst.* = .{ .base = .{ - .tag = .block, + .tag = zir_tag, .src = src, }, .positionals = .{ @@ -773,7 +812,7 @@ fn catchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Catch) .else_body = undefined, // populated below }, .{}); - const block = try addZIRInstBlock(mod, scope, src, .{ + const block = try addZIRInstBlock(mod, scope, src, .block, .{ .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items), }); @@ -946,7 +985,7 @@ fn boolBinOp( .else_body = undefined, // populated below }, .{}); - const block = try addZIRInstBlock(mod, scope, src, .{ + const block = try addZIRInstBlock(mod, scope, src, .block, .{ .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items), }); @@ -1095,7 +1134,7 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn .else_body = undefined, // populated below }, .{}); - const block = try addZIRInstBlock(mod, scope, if_src, .{ + const block = try addZIRInstBlock(mod, scope, if_src, .block, .{ .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items), }); @@ -1218,7 +1257,7 @@ fn whileExpr(mod: *Module, scope: *Scope, rl: ResultLoc, while_node: *ast.Node.W .then_body = undefined, // populated below .else_body = undefined, // populated below }, .{}); - const cond_block = try addZIRInstBlock(mod, &loop_scope.base, while_src, .{ + const cond_block = try addZIRInstBlock(mod, &loop_scope.base, while_src, .block, .{ .instructions = try loop_scope.arena.dupe(*zir.Inst, continue_scope.instructions.items), }); // TODO avoid emitting the continue expr when there @@ -1231,7 +1270,7 @@ fn whileExpr(mod: *Module, scope: *Scope, rl: ResultLoc, while_node: *ast.Node.W const loop = try addZIRInstLoop(mod, &expr_scope.base, while_src, .{ .instructions = try expr_scope.arena.dupe(*zir.Inst, loop_scope.instructions.items), }); - const while_block = try addZIRInstBlock(mod, scope, while_src, .{ + const while_block = try addZIRInstBlock(mod, scope, while_src, .block, .{ .instructions = try expr_scope.arena.dupe(*zir.Inst, expr_scope.instructions.items), }); @@ -1365,7 +1404,7 @@ fn forExpr(mod: *Module, scope: *Scope, rl: ResultLoc, for_node: *ast.Node.For) .then_body = undefined, // populated below .else_body = undefined, // populated below }, .{}); - const cond_block = try addZIRInstBlock(mod, &loop_scope.base, for_src, .{ + const cond_block = try addZIRInstBlock(mod, &loop_scope.base, for_src, .block, .{ .instructions = try loop_scope.arena.dupe(*zir.Inst, cond_scope.instructions.items), }); @@ -1382,7 +1421,7 @@ fn forExpr(mod: *Module, scope: *Scope, rl: ResultLoc, for_node: *ast.Node.For) const loop = try addZIRInstLoop(mod, &for_scope.base, for_src, .{ .instructions = try for_scope.arena.dupe(*zir.Inst, loop_scope.instructions.items), }); - const for_block = try addZIRInstBlock(mod, scope, for_src, .{ + const for_block = try addZIRInstBlock(mod, scope, for_src, .block, .{ .instructions = try for_scope.arena.dupe(*zir.Inst, for_scope.instructions.items), }); @@ -2260,6 +2299,30 @@ pub fn addZIRBinOp( return &inst.base; } +pub fn addZIRInstBlock( + mod: *Module, + scope: *Scope, + src: usize, + tag: zir.Inst.Tag, + body: zir.Module.Body, +) !*zir.Inst.Block { + const gen_zir = scope.getGenZIR(); + try gen_zir.instructions.ensureCapacity(mod.gpa, gen_zir.instructions.items.len + 1); + const inst = try gen_zir.arena.create(zir.Inst.Block); + inst.* = .{ + .base = .{ + .tag = tag, + .src = src, + }, + .positionals = .{ + .body = body, + }, + .kw_args = .{}, + }; + gen_zir.instructions.appendAssumeCapacity(&inst.base); + return inst; +} + pub fn addZIRInst( mod: *Module, scope: *Scope, @@ -2278,12 +2341,6 @@ pub fn addZIRInstConst(mod: *Module, scope: *Scope, src: usize, typed_value: Typ return addZIRInst(mod, scope, src, zir.Inst.Const, P{ .typed_value = typed_value }, .{}); } -/// TODO The existence of this function is a workaround for a bug in stage1. -pub fn addZIRInstBlock(mod: *Module, scope: *Scope, src: usize, body: zir.Module.Body) !*zir.Inst.Block { - const P = std.meta.fieldInfo(zir.Inst.Block, "positionals").field_type; - return addZIRInstSpecial(mod, scope, src, zir.Inst.Block, P{ .body = body }, .{}); -} - /// TODO The existence of this function is a workaround for a bug in stage1. pub fn addZIRInstLoop(mod: *Module, scope: *Scope, src: usize, body: zir.Module.Body) !*zir.Inst.Loop { const P = std.meta.fieldInfo(zir.Inst.Loop, "positionals").field_type; diff --git a/src-self-hosted/ir.zig b/src-self-hosted/ir.zig index ff90c68d42..26afa52929 100644 --- a/src-self-hosted/ir.zig +++ b/src-self-hosted/ir.zig @@ -189,7 +189,7 @@ pub const Inst = struct { } pub fn cmpOperator(base: *Inst) ?std.math.CompareOperator { - return switch (self.base.tag) { + return switch (base.tag) { .cmp_lt => .lt, .cmp_lte => .lte, .cmp_eq => .eq, @@ -220,6 +220,14 @@ pub const Inst = struct { unreachable; } + pub fn breakBlock(base: *Inst) ?*Block { + return switch (base.tag) { + .br => base.castTag(.br).?.block, + .brvoid => base.castTag(.brvoid).?.block, + else => null, + }; + } + pub const NoOp = struct { base: Inst, diff --git a/src-self-hosted/zir.zig b/src-self-hosted/zir.zig index 4e8967f8dc..8915cb0f90 100644 --- a/src-self-hosted/zir.zig +++ b/src-self-hosted/zir.zig @@ -78,6 +78,13 @@ pub const Inst = struct { bitor, /// A labeled block of code, which can return a value. block, + /// A block of code, which can return a value. There are no instructions that break out of + /// this block; it is implied that the final instruction is the result. + block_flat, + /// Same as `block` but additionally makes the inner instructions execute at comptime. + block_comptime, + /// Same as `block_flat` but additionally makes the inner instructions execute at comptime. + block_comptime_flat, /// Boolean NOT. See also `bitnot`. boolnot, /// Return a value from a `Block`. @@ -338,9 +345,14 @@ pub const Inst = struct { .merge_error_sets, => BinOp, + .block, + .block_flat, + .block_comptime, + .block_comptime_flat, + => Block, + .arg => Arg, .array_type_sentinel => ArrayTypeSentinel, - .block => Block, .@"break" => Break, .breakvoid => BreakVoid, .call => Call, @@ -392,6 +404,9 @@ pub const Inst = struct { .bitcast_result_ptr, .bitor, .block, + .block_flat, + .block_comptime, + .block_comptime_flat, .boolnot, .breakpoint, .call, diff --git a/src-self-hosted/zir_sema.zig b/src-self-hosted/zir_sema.zig index 2ac14f8bb4..2f2f1ec1bb 100644 --- a/src-self-hosted/zir_sema.zig +++ b/src-self-hosted/zir_sema.zig @@ -31,7 +31,10 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError! .arg => return analyzeInstArg(mod, scope, old_inst.castTag(.arg).?), .bitcast_ref => return analyzeInstBitCastRef(mod, scope, old_inst.castTag(.bitcast_ref).?), .bitcast_result_ptr => return analyzeInstBitCastResultPtr(mod, scope, old_inst.castTag(.bitcast_result_ptr).?), - .block => return analyzeInstBlock(mod, scope, old_inst.castTag(.block).?), + .block => return analyzeInstBlock(mod, scope, old_inst.castTag(.block).?, false), + .block_comptime => return analyzeInstBlock(mod, scope, old_inst.castTag(.block_comptime).?, true), + .block_flat => return analyzeInstBlockFlat(mod, scope, old_inst.castTag(.block_flat).?, false), + .block_comptime_flat => return analyzeInstBlockFlat(mod, scope, old_inst.castTag(.block_comptime_flat).?, true), .@"break" => return analyzeInstBreak(mod, scope, old_inst.castTag(.@"break").?), .breakpoint => return analyzeInstBreakpoint(mod, scope, old_inst.castTag(.breakpoint).?), .breakvoid => return analyzeInstBreakVoid(mod, scope, old_inst.castTag(.breakvoid).?), @@ -147,6 +150,7 @@ pub fn analyzeBody(mod: *Module, scope: *Scope, body: zir.Module.Body) !void { } } +/// TODO improve this to use .block_comptime_flat pub fn analyzeBodyValueAsType(mod: *Module, block_scope: *Scope.Block, body: zir.Module.Body) !Type { try analyzeBody(mod, &block_scope.base, body); for (block_scope.instructions.items) |inst| { @@ -517,6 +521,7 @@ fn analyzeInstLoop(mod: *Module, scope: *Scope, inst: *zir.Inst.Loop) InnerError .decl = parent_block.decl, .instructions = .{}, .arena = parent_block.arena, + .is_comptime = parent_block.is_comptime, }; defer child_block.instructions.deinit(mod.gpa); @@ -529,7 +534,29 @@ fn analyzeInstLoop(mod: *Module, scope: *Scope, inst: *zir.Inst.Loop) InnerError return &loop_inst.base; } -fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block) InnerError!*Inst { +fn analyzeInstBlockFlat(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_comptime: bool) InnerError!*Inst { + const parent_block = scope.cast(Scope.Block).?; + + var child_block: Scope.Block = .{ + .parent = parent_block, + .func = parent_block.func, + .decl = parent_block.decl, + .instructions = .{}, + .arena = parent_block.arena, + .label = null, + .is_comptime = parent_block.is_comptime or is_comptime, + }; + defer child_block.instructions.deinit(mod.gpa); + + try analyzeBody(mod, &child_block.base, inst.positionals.body); + + const copied_instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items); + try parent_block.instructions.appendSlice(mod.gpa, copied_instructions); + + return copied_instructions[copied_instructions.len - 1]; +} + +fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_comptime: bool) InnerError!*Inst { const parent_block = scope.cast(Scope.Block).?; // Reserve space for a Block instruction so that generated Break instructions can @@ -557,6 +584,7 @@ fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block) InnerErr .results = .{}, .block_inst = block_inst, }), + .is_comptime = is_comptime or parent_block.is_comptime, }; const label = &child_block.label.?; @@ -569,6 +597,28 @@ fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block) InnerErr assert(child_block.instructions.items.len != 0); assert(child_block.instructions.items[child_block.instructions.items.len - 1].ty.isNoReturn()); + if (label.results.items.len == 0) { + // No need for a block instruction. We can put the new instructions directly into the parent block. + const copied_instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items); + try parent_block.instructions.appendSlice(mod.gpa, copied_instructions); + return copied_instructions[copied_instructions.len - 1]; + } + if (label.results.items.len == 1) { + const last_inst_index = child_block.instructions.items.len - 1; + const last_inst = child_block.instructions.items[last_inst_index]; + if (last_inst.breakBlock()) |br_block| { + if (br_block == block_inst) { + // No need for a block instruction. We can put the new instructions directly into the parent block. + // Here we omit the break instruction. + const copied_instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items[0..last_inst_index]); + try parent_block.instructions.appendSlice(mod.gpa, copied_instructions); + return label.results.items[0]; + } + } + } + // It should be impossible to have the number of results be > 1 in a comptime scope. + assert(!child_block.is_comptime); // We should have already got a compile error in the condbr condition. + // Need to set the type and emit the Block instruction. This allows machine code generation // to emit a jump instruction to after the block when it encounters the break. try parent_block.instructions.append(mod.gpa, &block_inst.base); @@ -1083,7 +1133,7 @@ fn analyzeInstElemPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.ElemPtr) Inne const array_ptr = try resolveInst(mod, scope, inst.positionals.array_ptr); const uncasted_index = try resolveInst(mod, scope, inst.positionals.index); const elem_index = try mod.coerce(scope, Type.initTag(.usize), uncasted_index); - + const elem_ty = switch (array_ptr.ty.zigTypeTag()) { .Pointer => array_ptr.ty.elemType(), else => return mod.fail(scope, inst.positionals.array_ptr.src, "expected pointer, found '{}'", .{array_ptr.ty}), @@ -1376,6 +1426,7 @@ fn analyzeInstCondBr(mod: *Module, scope: *Scope, inst: *zir.Inst.CondBr) InnerE .decl = parent_block.decl, .instructions = .{}, .arena = parent_block.arena, + .is_comptime = parent_block.is_comptime, }; defer true_block.instructions.deinit(mod.gpa); try analyzeBody(mod, &true_block.base, inst.positionals.then_body); @@ -1386,6 +1437,7 @@ fn analyzeInstCondBr(mod: *Module, scope: *Scope, inst: *zir.Inst.CondBr) InnerE .decl = parent_block.decl, .instructions = .{}, .arena = parent_block.arena, + .is_comptime = parent_block.is_comptime, }; defer false_block.instructions.deinit(mod.gpa); try analyzeBody(mod, &false_block.base, inst.positionals.else_body); diff --git a/test/stage2/test.zig b/test/stage2/test.zig index 50203c7ee9..c8f8c19cf7 100644 --- a/test/stage2/test.zig +++ b/test/stage2/test.zig @@ -274,7 +274,7 @@ pub fn addCases(ctx: *TestContext) !void { } { - var case = ctx.exe("substracting numbers at runtime", linux_x64); + var case = ctx.exe("subtracting numbers at runtime", linux_x64); case.addCompareOutput( \\export fn _start() noreturn { \\ sub(7, 4); From 7841c9b7d105a62ccb55f4f6589d5ff31194a013 Mon Sep 17 00:00:00 2001 From: Lachlan Easton Date: Tue, 1 Sep 2020 21:59:27 +1000 Subject: [PATCH 08/35] zig fmt: Fix merge errors --- lib/std/zig/render.zig | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index eb6df45c20..b7a2b8675a 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -1425,7 +1425,7 @@ fn renderExpression( // TODO remove after 0.7.0 release if (mem.eql(u8, tree.tokenSlice(builtin_call.builtin_token), "@OpaqueType")) - return stream.writeAll("@Type(.Opaque)"); + return stream.writer().writeAll("@Type(.Opaque)"); try renderToken(tree, stream, builtin_call.builtin_token, Space.None); // @name @@ -2184,11 +2184,11 @@ fn renderVarDecl( Space.None; try renderToken(tree, stream, var_decl.name_token, name_space); - if (var_decl.getTrailer("type_node")) |type_node| { + if (var_decl.getTypeNode()) |type_node| { try renderToken(tree, stream, tree.nextToken(var_decl.name_token), Space.Space); - const s = if (var_decl.getTrailer("align_node") != null or - var_decl.getTrailer("section_node") != null or - var_decl.getTrailer("init_node") != null) Space.Space else Space.None; + const s = if (var_decl.getAlignNode() != null or + var_decl.getSectionNode() != null or + var_decl.getInitNode() != null) Space.Space else Space.None; try renderExpression(allocator, stream, tree, type_node, s); } From c51b871c4516003e8d2c84e7e1c36124c3797f5c Mon Sep 17 00:00:00 2001 From: LemonBoy Date: Tue, 1 Sep 2020 17:29:10 +0200 Subject: [PATCH 09/35] ir: Typecheck the sentinel value in *[N:S1]T to [S2]T casts Closes #6054 --- src/ir.cpp | 7 ++++++- test/compile_errors.zig | 8 ++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/src/ir.cpp b/src/ir.cpp index 692dd392e1..36be78ed7c 100644 --- a/src/ir.cpp +++ b/src/ir.cpp @@ -15341,9 +15341,14 @@ static IrInstGen *ir_analyze_cast(IrAnalyze *ira, IrInst *source_instr, ZigType *array_type = actual_type->data.pointer.child_type; bool const_ok = (slice_ptr_type->data.pointer.is_const || array_type->data.array.len == 0 || !actual_type->data.pointer.is_const); + if (const_ok && types_match_const_cast_only(ira, slice_ptr_type->data.pointer.child_type, array_type->data.array.child_type, source_node, - !slice_ptr_type->data.pointer.is_const).id == ConstCastResultIdOk) + !slice_ptr_type->data.pointer.is_const).id == ConstCastResultIdOk && + (slice_ptr_type->data.pointer.sentinel == nullptr || + (array_type->data.array.sentinel != nullptr && + const_values_equal(ira->codegen, array_type->data.array.sentinel, + slice_ptr_type->data.pointer.sentinel)))) { // If the pointers both have ABI align, it works. // Or if the array length is 0, alignment doesn't matter. diff --git a/test/compile_errors.zig b/test/compile_errors.zig index 9e81ed27a7..f6e00e1dbb 100644 --- a/test/compile_errors.zig +++ b/test/compile_errors.zig @@ -2,6 +2,14 @@ const tests = @import("tests.zig"); const std = @import("std"); pub fn addCases(cases: *tests.CompileErrorContext) void { + cases.add("slice sentinel mismatch", + \\export fn entry() void { + \\ const y: [:1]const u8 = &[_:2]u8{ 1, 2 }; + \\} + , &[_][]const u8{ + "tmp.zig:2:37: error: expected type '[:1]const u8', found '*const [2:2]u8'", + }); + cases.add("@Type with undefined", \\comptime { \\ _ = @Type(.{ .Array = .{ .len = 0, .child = u8, .sentinel = undefined } }); From dd4994a4e4379454f6b58779276f1b6aa9ed6e1b Mon Sep 17 00:00:00 2001 From: LemonBoy Date: Tue, 1 Sep 2020 18:45:35 +0200 Subject: [PATCH 10/35] std: Fix C-string with missing NUL terminator Spotted thanks to the stricter conversion rules. --- lib/std/net.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/std/net.zig b/lib/std/net.zig index 10e5b371f8..5a1407c35f 100644 --- a/lib/std/net.zig +++ b/lib/std/net.zig @@ -1164,7 +1164,7 @@ fn linuxLookupNameFromDnsSearch( } const search = if (rc.search.isNull() or dots >= rc.ndots or mem.endsWith(u8, name, ".")) - &[_]u8{} + "" else rc.search.span(); From baa734c42a2bdb3f63fd26dd30bc8c9a846831bf Mon Sep 17 00:00:00 2001 From: pwzk Date: Mon, 31 Aug 2020 17:09:32 +0000 Subject: [PATCH 11/35] Fixing dylib search path --- cmake/Findclang.cmake | 2 ++ cmake/Findllvm.cmake | 2 ++ 2 files changed, 4 insertions(+) diff --git a/cmake/Findclang.cmake b/cmake/Findclang.cmake index 534d740c00..8acc29faec 100644 --- a/cmake/Findclang.cmake +++ b/cmake/Findclang.cmake @@ -25,6 +25,8 @@ if(ZIG_PREFER_CLANG_CPP_DYLIB) clang-cpp PATHS ${CLANG_LIBDIRS} + /usr/lib/llvm/10/lib + /usr/lib/llvm/10/lib64 /usr/lib/llvm-10/lib /usr/local/llvm100/lib /usr/local/llvm10/lib diff --git a/cmake/Findllvm.cmake b/cmake/Findllvm.cmake index 5228710dcb..6bee52c315 100644 --- a/cmake/Findllvm.cmake +++ b/cmake/Findllvm.cmake @@ -26,6 +26,8 @@ if(ZIG_PREFER_CLANG_CPP_DYLIB) LLVM PATHS ${LLVM_LIBDIRS} + /usr/lib/llvm/10/lib + /usr/lib/llvm/10/lib64 /usr/lib/llvm-10/lib /usr/local/llvm10/lib /usr/local/llvm100/lib From 4c13d020dbecbd7664b99765de33f230e98f3322 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 1 Sep 2020 12:39:47 -0700 Subject: [PATCH 12/35] stage2: proper split of requireRuntimeBlock and requireFunctionBlock * improve the ZIR generated of variable decls - utilize the same ZIR for the type and init value when possible - init value gets a result location with the variable type. no manual coercion is required. * no longer use return instructions to extract values out of comptime blocks. Instead run the analysis and then look at the corresponding analyzed instruction, relying on the comptime mechanism to report errors when something could not be comptime evaluated. --- src-self-hosted/Module.zig | 128 ++++++++++++++++++----------------- src-self-hosted/test.zig | 22 +++--- src-self-hosted/zir_sema.zig | 22 +++--- test/stage2/test.zig | 21 ++++-- 4 files changed, 101 insertions(+), 92 deletions(-) diff --git a/src-self-hosted/Module.zig b/src-self-hosted/Module.zig index 78586dd096..72597975c9 100644 --- a/src-self-hosted/Module.zig +++ b/src-self-hosted/Module.zig @@ -1308,7 +1308,6 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { .return_type = return_type_inst, .param_types = param_types, }, .{}); - _ = try astgen.addZIRUnOp(self, &fn_type_scope.base, fn_src, .@"return", fn_type_inst); // We need the memory for the Type to go into the arena for the Decl var decl_arena = std.heap.ArenaAllocator.init(self.gpa); @@ -1325,7 +1324,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { }; defer block_scope.instructions.deinit(self.gpa); - const fn_type = try zir_sema.analyzeBodyValueAsType(self, &block_scope, .{ + const fn_type = try zir_sema.analyzeBodyValueAsType(self, &block_scope, fn_type_inst, .{ .instructions = fn_type_scope.instructions.items, }); const new_func = try decl_arena.allocator.create(Fn); @@ -1492,10 +1491,53 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { return self.failNode(&block_scope.base, sect_expr, "TODO implement function section expression", .{}); } - const explicit_type = blk: { - const type_node = var_decl.getTypeNode() orelse - break :blk null; + const var_info: struct { ty: Type, val: ?Value } = if (var_decl.getInitNode()) |init_node| vi: { + var gen_scope_arena = std.heap.ArenaAllocator.init(self.gpa); + defer gen_scope_arena.deinit(); + var gen_scope: Scope.GenZIR = .{ + .decl = decl, + .arena = &gen_scope_arena.allocator, + .parent = decl.scope, + }; + defer gen_scope.instructions.deinit(self.gpa); + const init_result_loc: astgen.ResultLoc = if (var_decl.getTypeNode()) |type_node| rl: { + const src = tree.token_locs[type_node.firstToken()].start; + const type_type = try astgen.addZIRInstConst(self, &gen_scope.base, src, .{ + .ty = Type.initTag(.type), + .val = Value.initTag(.type_type), + }); + const var_type = try astgen.expr(self, &gen_scope.base, .{ .ty = type_type }, type_node); + break :rl .{ .ty = var_type }; + } else .none; + + const src = tree.token_locs[init_node.firstToken()].start; + const init_inst = try astgen.expr(self, &gen_scope.base, init_result_loc, init_node); + + var inner_block: Scope.Block = .{ + .parent = null, + .func = null, + .decl = decl, + .instructions = .{}, + .arena = &gen_scope_arena.allocator, + .is_comptime = true, + }; + defer inner_block.instructions.deinit(self.gpa); + try zir_sema.analyzeBody(self, &inner_block.base, .{ .instructions = gen_scope.instructions.items }); + + // The result location guarantees the type coercion. + const analyzed_init_inst = init_inst.analyzed_inst.?; + // The is_comptime in the Scope.Block guarantees the result is comptime-known. + const val = analyzed_init_inst.value().?; + + const ty = try analyzed_init_inst.ty.copy(block_scope.arena); + break :vi .{ + .ty = ty, + .val = try val.copy(block_scope.arena), + }; + } else if (!is_extern) { + return self.failTok(&block_scope.base, var_decl.firstToken(), "variables must be initialized", .{}); + } else if (var_decl.getTypeNode()) |type_node| vi: { // Temporary arena for the zir instructions. var type_scope_arena = std.heap.ArenaAllocator.init(self.gpa); defer type_scope_arena.deinit(); @@ -1512,71 +1554,24 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { .val = Value.initTag(.type_type), }); const var_type = try astgen.expr(self, &type_scope.base, .{ .ty = type_type }, type_node); - _ = try astgen.addZIRUnOp(self, &type_scope.base, src, .@"return", var_type); - - break :blk try zir_sema.analyzeBodyValueAsType(self, &block_scope, .{ + const ty = try zir_sema.analyzeBodyValueAsType(self, &block_scope, var_type, .{ .instructions = type_scope.instructions.items, }); - }; - - var var_type: Type = undefined; - const value: ?Value = if (var_decl.getInitNode()) |init_node| blk: { - var gen_scope_arena = std.heap.ArenaAllocator.init(self.gpa); - defer gen_scope_arena.deinit(); - var gen_scope: Scope.GenZIR = .{ - .decl = decl, - .arena = &gen_scope_arena.allocator, - .parent = decl.scope, + break :vi .{ + .ty = ty, + .val = null, }; - defer gen_scope.instructions.deinit(self.gpa); - const src = tree.token_locs[init_node.firstToken()].start; - - const init_inst = try astgen.expr(self, &gen_scope.base, .none, init_node); - _ = try astgen.addZIRUnOp(self, &gen_scope.base, src, .@"return", init_inst); - - var inner_block: Scope.Block = .{ - .parent = null, - .func = null, - .decl = decl, - .instructions = .{}, - .arena = &gen_scope_arena.allocator, - .is_comptime = true, - }; - defer inner_block.instructions.deinit(self.gpa); - try zir_sema.analyzeBody(self, &inner_block.base, .{ .instructions = gen_scope.instructions.items }); - - for (inner_block.instructions.items) |inst| { - if (inst.castTag(.ret)) |ret| { - const coerced = if (explicit_type) |some| - try self.coerce(&inner_block.base, some, ret.operand) - else - ret.operand; - const val = coerced.value() orelse - return self.fail(&block_scope.base, inst.src, "unable to resolve comptime value", .{}); - - var_type = explicit_type orelse try ret.operand.ty.copy(block_scope.arena); - break :blk try val.copy(block_scope.arena); - } else { - return self.fail(&block_scope.base, inst.src, "unable to resolve comptime value", .{}); - } - } - unreachable; - } else if (!is_extern) { - return self.failTok(&block_scope.base, var_decl.firstToken(), "variables must be initialized", .{}); - } else if (explicit_type) |some| blk: { - var_type = some; - break :blk null; } else { return self.failTok(&block_scope.base, var_decl.firstToken(), "unable to infer variable type", .{}); }; - if (is_mutable and !var_type.isValidVarType(is_extern)) { - return self.failTok(&block_scope.base, var_decl.firstToken(), "variable of type '{}' must be const", .{var_type}); + if (is_mutable and !var_info.ty.isValidVarType(is_extern)) { + return self.failTok(&block_scope.base, var_decl.firstToken(), "variable of type '{}' must be const", .{var_info.ty}); } var type_changed = true; if (decl.typedValueManaged()) |tvm| { - type_changed = !tvm.typed_value.ty.eql(var_type); + type_changed = !tvm.typed_value.ty.eql(var_info.ty); tvm.deinit(self.gpa); } @@ -1585,7 +1580,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { const var_payload = try decl_arena.allocator.create(Value.Payload.Variable); new_variable.* = .{ .owner_decl = decl, - .init = value orelse undefined, + .init = var_info.val orelse undefined, .is_extern = is_extern, .is_mutable = is_mutable, .is_threadlocal = is_threadlocal, @@ -1596,7 +1591,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { decl.typed_value = .{ .most_recent = .{ .typed_value = .{ - .ty = var_type, + .ty = var_info.ty, .val = Value.initPayload(&var_payload.base), }, .arena = decl_arena_state, @@ -2096,12 +2091,19 @@ pub fn getErrorValue(self: *Module, name: []const u8) !std.StringHashMapUnmanage return gop.entry.*; } -/// TODO split this into `requireRuntimeBlock` and `requireFunctionBlock` and audit callsites. -pub fn requireRuntimeBlock(self: *Module, scope: *Scope, src: usize) !*Scope.Block { +pub fn requireFunctionBlock(self: *Module, scope: *Scope, src: usize) !*Scope.Block { return scope.cast(Scope.Block) orelse return self.fail(scope, src, "instruction illegal outside function body", .{}); } +pub fn requireRuntimeBlock(self: *Module, scope: *Scope, src: usize) !*Scope.Block { + const block = try self.requireFunctionBlock(scope, src); + if (block.is_comptime) { + return self.fail(scope, src, "unable to resolve comptime value", .{}); + } + return block; +} + pub fn resolveConstValue(self: *Module, scope: *Scope, base: *Inst) !Value { return (try self.resolveDefinedValue(scope, base)) orelse return self.fail(scope, base.src, "unable to resolve comptime value", .{}); diff --git a/src-self-hosted/test.zig b/src-self-hosted/test.zig index f9c9121817..aef48e198b 100644 --- a/src-self-hosted/test.zig +++ b/src-self-hosted/test.zig @@ -474,15 +474,15 @@ pub const TestContext = struct { var all_errors = try module.getAllErrorsAlloc(); defer all_errors.deinit(allocator); if (all_errors.list.len != 0) { - std.debug.warn("\nErrors occurred updating the module:\n================\n", .{}); + std.debug.print("\nErrors occurred updating the module:\n================\n", .{}); for (all_errors.list) |err| { - std.debug.warn(":{}:{}: error: {}\n================\n", .{ err.line + 1, err.column + 1, err.msg }); + std.debug.print(":{}:{}: error: {}\n================\n", .{ err.line + 1, err.column + 1, err.msg }); } if (case.cbe) { const C = module.bin_file.cast(link.File.C).?; - std.debug.warn("Generated C: \n===============\n{}\n\n===========\n\n", .{C.main.items}); + std.debug.print("Generated C: \n===============\n{}\n\n===========\n\n", .{C.main.items}); } - std.debug.warn("Test failed.\n", .{}); + std.debug.print("Test failed.\n", .{}); std.process.exit(1); } } @@ -497,12 +497,12 @@ pub const TestContext = struct { var out = file.reader().readAllAlloc(arena, 1024 * 1024) catch @panic("Unable to read C output!"); if (expected_output.len != out.len) { - std.debug.warn("\nTransformed C length differs:\n================\nExpected:\n================\n{}\n================\nFound:\n================\n{}\n================\nTest failed.\n", .{ expected_output, out }); + std.debug.print("\nTransformed C length differs:\n================\nExpected:\n================\n{}\n================\nFound:\n================\n{}\n================\nTest failed.\n", .{ expected_output, out }); std.process.exit(1); } for (expected_output) |e, i| { if (out[i] != e) { - std.debug.warn("\nTransformed C differs:\n================\nExpected:\n================\n{}\n================\nFound:\n================\n{}\n================\nTest failed.\n", .{ expected_output, out }); + std.debug.print("\nTransformed C differs:\n================\nExpected:\n================\n{}\n================\nFound:\n================\n{}\n================\nTest failed.\n", .{ expected_output, out }); std.process.exit(1); } } @@ -526,12 +526,12 @@ pub const TestContext = struct { defer test_node.end(); if (expected_output.len != out_zir.items.len) { - std.debug.warn("{}\nTransformed ZIR length differs:\n================\nExpected:\n================\n{}\n================\nFound:\n================\n{}\n================\nTest failed.\n", .{ case.name, expected_output, out_zir.items }); + std.debug.print("{}\nTransformed ZIR length differs:\n================\nExpected:\n================\n{}\n================\nFound:\n================\n{}\n================\nTest failed.\n", .{ case.name, expected_output, out_zir.items }); std.process.exit(1); } for (expected_output) |e, i| { if (out_zir.items[i] != e) { - std.debug.warn("{}\nTransformed ZIR differs:\n================\nExpected:\n================\n{}\n================\nFound:\n================\n{}\n================\nTest failed.\n", .{ case.name, expected_output, out_zir.items }); + std.debug.print("{}\nTransformed ZIR differs:\n================\nExpected:\n================\n{}\n================\nFound:\n================\n{}\n================\nTest failed.\n", .{ case.name, expected_output, out_zir.items }); std.process.exit(1); } } @@ -554,7 +554,7 @@ pub const TestContext = struct { break; } } else { - std.debug.warn("{}\nUnexpected error:\n================\n:{}:{}: error: {}\n================\nTest failed.\n", .{ case.name, a.line + 1, a.column + 1, a.msg }); + std.debug.print("{}\nUnexpected error:\n================\n:{}:{}: error: {}\n================\nTest failed.\n", .{ case.name, a.line + 1, a.column + 1, a.msg }); std.process.exit(1); } } @@ -562,7 +562,7 @@ pub const TestContext = struct { for (handled_errors) |h, i| { if (!h) { const er = e[i]; - std.debug.warn("{}\nDid not receive error:\n================\n{}:{}: {}\n================\nTest failed.\n", .{ case.name, er.line, er.column, er.msg }); + std.debug.print("{}\nDid not receive error:\n================\n{}:{}: {}\n================\nTest failed.\n", .{ case.name, er.line, er.column, er.msg }); std.process.exit(1); } } @@ -643,7 +643,7 @@ pub const TestContext = struct { switch (exec_result.term) { .Exited => |code| { if (code != 0) { - std.debug.warn("elf file exited with code {}\n", .{code}); + std.debug.print("elf file exited with code {}\n", .{code}); return error.BinaryBadExitCode; } }, diff --git a/src-self-hosted/zir_sema.zig b/src-self-hosted/zir_sema.zig index 2f2f1ec1bb..b4dafac1da 100644 --- a/src-self-hosted/zir_sema.zig +++ b/src-self-hosted/zir_sema.zig @@ -150,18 +150,16 @@ pub fn analyzeBody(mod: *Module, scope: *Scope, body: zir.Module.Body) !void { } } -/// TODO improve this to use .block_comptime_flat -pub fn analyzeBodyValueAsType(mod: *Module, block_scope: *Scope.Block, body: zir.Module.Body) !Type { +pub fn analyzeBodyValueAsType( + mod: *Module, + block_scope: *Scope.Block, + zir_result_inst: *zir.Inst, + body: zir.Module.Body, +) !Type { try analyzeBody(mod, &block_scope.base, body); - for (block_scope.instructions.items) |inst| { - if (inst.castTag(.ret)) |ret| { - const val = try mod.resolveConstValue(&block_scope.base, ret.operand); - return val.toType(block_scope.base.arena()); - } else { - return mod.fail(&block_scope.base, inst.src, "unable to resolve comptime value", .{}); - } - } - unreachable; + const result_inst = zir_result_inst.analyzed_inst.?; + const val = try mod.resolveConstValue(&block_scope.base, result_inst); + return val.toType(block_scope.base.arena()); } pub fn analyzeZirDecl(mod: *Module, decl: *Decl, src_decl: *zir.Decl) InnerError!bool { @@ -366,7 +364,7 @@ fn analyzeInstRef(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError! } fn analyzeInstRetType(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst { - const b = try mod.requireRuntimeBlock(scope, inst.base.src); + const b = try mod.requireFunctionBlock(scope, inst.base.src); const fn_ty = b.func.?.owner_decl.typed_value.most_recent.typed_value.ty; const ret_type = fn_ty.fnReturnType(); return mod.constType(scope, inst.base.src, ret_type); diff --git a/test/stage2/test.zig b/test/stage2/test.zig index c8f8c19cf7..b631e37b97 100644 --- a/test/stage2/test.zig +++ b/test/stage2/test.zig @@ -967,10 +967,19 @@ pub fn addCases(ctx: *TestContext) !void { \\fn entry() void {} , &[_][]const u8{":2:4: error: redefinition of 'entry'"}); - ctx.compileError("extern variable has no type", linux_x64, - \\comptime { - \\ _ = foo; - \\} - \\extern var foo; - , &[_][]const u8{":4:1: error: unable to infer variable type"}); + { + var case = ctx.obj("extern variable has no type", linux_x64); + case.addError( + \\comptime { + \\ _ = foo; + \\} + \\extern var foo; + , &[_][]const u8{":2:5: error: unable to resolve comptime value"}); + case.addError( + \\export fn entry() void { + \\ _ = foo; + \\} + \\extern var foo; + , &[_][]const u8{":4:1: error: unable to infer variable type"}); + } } From 575fbd5e3592cff70cbfc5153884d919e6bed89f Mon Sep 17 00:00:00 2001 From: Sahnvour Date: Sun, 2 Aug 2020 23:24:03 +0200 Subject: [PATCH 13/35] hash_map: rename to ArrayHashMap and add new HashMap implementation --- lib/std/array_hash_map.zig | 1087 ++++++++++++++ lib/std/buf_set.zig | 3 +- lib/std/hash_map.zig | 1563 +++++++++++--------- lib/std/heap/general_purpose_allocator.zig | 5 +- lib/std/http/headers.zig | 9 +- lib/std/std.zig | 7 + src-self-hosted/Module.zig | 27 +- src-self-hosted/codegen.zig | 8 +- src-self-hosted/codegen/c.zig | 3 +- src-self-hosted/link.zig | 2 +- src-self-hosted/link/Elf.zig | 9 +- src-self-hosted/liveness.zig | 41 +- src-self-hosted/translate_c.zig | 25 +- src-self-hosted/type.zig | 4 +- src-self-hosted/value.zig | 3 +- src-self-hosted/zir.zig | 6 +- src-self-hosted/zir_sema.zig | 2 +- 17 files changed, 2027 insertions(+), 777 deletions(-) create mode 100644 lib/std/array_hash_map.zig diff --git a/lib/std/array_hash_map.zig b/lib/std/array_hash_map.zig new file mode 100644 index 0000000000..f8c3623ef2 --- /dev/null +++ b/lib/std/array_hash_map.zig @@ -0,0 +1,1087 @@ +// SPDX-License-Identifier: MIT +// Copyright (c) 2015-2020 Zig Contributors +// This file is part of [zig](https://ziglang.org/), which is MIT licensed. +// The MIT license requires this copyright notice to be included in all copies +// and substantial portions of the software. +const std = @import("std.zig"); +const debug = std.debug; +const assert = debug.assert; +const testing = std.testing; +const math = std.math; +const mem = std.mem; +const meta = std.meta; +const trait = meta.trait; +const autoHash = std.hash.autoHash; +const Wyhash = std.hash.Wyhash; +const Allocator = mem.Allocator; +const builtin = @import("builtin"); +const hash_map = @This(); + +pub fn AutoArrayHashMap(comptime K: type, comptime V: type) type { + return ArrayHashMap(K, V, getAutoHashFn(K), getAutoEqlFn(K), autoEqlIsCheap(K)); +} + +pub fn AutoArrayHashMapUnmanaged(comptime K: type, comptime V: type) type { + return ArrayHashMapUnmanaged(K, V, getAutoHashFn(K), getAutoEqlFn(K), autoEqlIsCheap(K)); +} + +/// Builtin hashmap for strings as keys. +pub fn StringArrayHashMap(comptime V: type) type { + return ArrayHashMap([]const u8, V, hashString, eqlString, true); +} + +pub fn StringArrayHashMapUnmanaged(comptime V: type) type { + return ArrayHashMapUnmanaged([]const u8, V, hashString, eqlString, true); +} + +pub fn eqlString(a: []const u8, b: []const u8) bool { + return mem.eql(u8, a, b); +} + +pub fn hashString(s: []const u8) u32 { + return @truncate(u32, std.hash.Wyhash.hash(0, s)); +} + +/// Insertion order is preserved. +/// Deletions perform a "swap removal" on the entries list. +/// Modifying the hash map while iterating is allowed, however one must understand +/// the (well defined) behavior when mixing insertions and deletions with iteration. +/// For a hash map that can be initialized directly that does not store an Allocator +/// field, see `ArrayHashMapUnmanaged`. +/// When `store_hash` is `false`, this data structure is biased towards cheap `eql` +/// functions. It does not store each item's hash in the table. Setting `store_hash` +/// to `true` incurs slightly more memory cost by storing each key's hash in the table +/// but only has to call `eql` for hash collisions. +/// If typical operations (except iteration over entries) need to be faster, prefer +/// the alternative `std.HashMap`. +pub fn ArrayHashMap( + comptime K: type, + comptime V: type, + comptime hash: fn (key: K) u32, + comptime eql: fn (a: K, b: K) bool, + comptime store_hash: bool, +) type { + return struct { + unmanaged: Unmanaged, + allocator: *Allocator, + + pub const Unmanaged = ArrayHashMapUnmanaged(K, V, hash, eql, store_hash); + pub const Entry = Unmanaged.Entry; + pub const Hash = Unmanaged.Hash; + pub const GetOrPutResult = Unmanaged.GetOrPutResult; + + /// Deprecated. Iterate using `items`. + pub const Iterator = struct { + hm: *const Self, + /// Iterator through the entry array. + index: usize, + + pub fn next(it: *Iterator) ?*Entry { + if (it.index >= it.hm.unmanaged.entries.items.len) return null; + const result = &it.hm.unmanaged.entries.items[it.index]; + it.index += 1; + return result; + } + + /// Reset the iterator to the initial index + pub fn reset(it: *Iterator) void { + it.index = 0; + } + }; + + const Self = @This(); + const Index = Unmanaged.Index; + + pub fn init(allocator: *Allocator) Self { + return .{ + .unmanaged = .{}, + .allocator = allocator, + }; + } + + pub fn deinit(self: *Self) void { + self.unmanaged.deinit(self.allocator); + self.* = undefined; + } + + pub fn clearRetainingCapacity(self: *Self) void { + return self.unmanaged.clearRetainingCapacity(); + } + + pub fn clearAndFree(self: *Self) void { + return self.unmanaged.clearAndFree(self.allocator); + } + + /// Deprecated. Use `items().len`. + pub fn count(self: Self) usize { + return self.items().len; + } + + /// Deprecated. Iterate using `items`. + pub fn iterator(self: *const Self) Iterator { + return Iterator{ + .hm = self, + .index = 0, + }; + } + + /// If key exists this function cannot fail. + /// If there is an existing item with `key`, then the result + /// `Entry` pointer points to it, and found_existing is true. + /// Otherwise, puts a new item with undefined value, and + /// the `Entry` pointer points to it. Caller should then initialize + /// the value (but not the key). + pub fn getOrPut(self: *Self, key: K) !GetOrPutResult { + return self.unmanaged.getOrPut(self.allocator, key); + } + + /// If there is an existing item with `key`, then the result + /// `Entry` pointer points to it, and found_existing is true. + /// Otherwise, puts a new item with undefined value, and + /// the `Entry` pointer points to it. Caller should then initialize + /// the value (but not the key). + /// If a new entry needs to be stored, this function asserts there + /// is enough capacity to store it. + pub fn getOrPutAssumeCapacity(self: *Self, key: K) GetOrPutResult { + return self.unmanaged.getOrPutAssumeCapacity(key); + } + + pub fn getOrPutValue(self: *Self, key: K, value: V) !*Entry { + return self.unmanaged.getOrPutValue(self.allocator, key, value); + } + + /// Increases capacity, guaranteeing that insertions up until the + /// `expected_count` will not cause an allocation, and therefore cannot fail. + pub fn ensureCapacity(self: *Self, new_capacity: usize) !void { + return self.unmanaged.ensureCapacity(self.allocator, new_capacity); + } + + /// Returns the number of total elements which may be present before it is + /// no longer guaranteed that no allocations will be performed. + pub fn capacity(self: *Self) usize { + return self.unmanaged.capacity(); + } + + /// Clobbers any existing data. To detect if a put would clobber + /// existing data, see `getOrPut`. + pub fn put(self: *Self, key: K, value: V) !void { + return self.unmanaged.put(self.allocator, key, value); + } + + /// Inserts a key-value pair into the hash map, asserting that no previous + /// entry with the same key is already present + pub fn putNoClobber(self: *Self, key: K, value: V) !void { + return self.unmanaged.putNoClobber(self.allocator, key, value); + } + + /// Asserts there is enough capacity to store the new key-value pair. + /// Clobbers any existing data. To detect if a put would clobber + /// existing data, see `getOrPutAssumeCapacity`. + pub fn putAssumeCapacity(self: *Self, key: K, value: V) void { + return self.unmanaged.putAssumeCapacity(key, value); + } + + /// Asserts there is enough capacity to store the new key-value pair. + /// Asserts that it does not clobber any existing data. + /// To detect if a put would clobber existing data, see `getOrPutAssumeCapacity`. + pub fn putAssumeCapacityNoClobber(self: *Self, key: K, value: V) void { + return self.unmanaged.putAssumeCapacityNoClobber(key, value); + } + + /// Inserts a new `Entry` into the hash map, returning the previous one, if any. + pub fn fetchPut(self: *Self, key: K, value: V) !?Entry { + return self.unmanaged.fetchPut(self.allocator, key, value); + } + + /// Inserts a new `Entry` into the hash map, returning the previous one, if any. + /// If insertion happuns, asserts there is enough capacity without allocating. + pub fn fetchPutAssumeCapacity(self: *Self, key: K, value: V) ?Entry { + return self.unmanaged.fetchPutAssumeCapacity(key, value); + } + + pub fn getEntry(self: Self, key: K) ?*Entry { + return self.unmanaged.getEntry(key); + } + + pub fn getIndex(self: Self, key: K) ?usize { + return self.unmanaged.getIndex(key); + } + + pub fn get(self: Self, key: K) ?V { + return self.unmanaged.get(key); + } + + pub fn contains(self: Self, key: K) bool { + return self.unmanaged.contains(key); + } + + /// If there is an `Entry` with a matching key, it is deleted from + /// the hash map, and then returned from this function. + pub fn remove(self: *Self, key: K) ?Entry { + return self.unmanaged.remove(key); + } + + /// Asserts there is an `Entry` with matching key, deletes it from the hash map, + /// and discards it. + pub fn removeAssertDiscard(self: *Self, key: K) void { + return self.unmanaged.removeAssertDiscard(key); + } + + pub fn items(self: Self) []Entry { + return self.unmanaged.items(); + } + + pub fn clone(self: Self) !Self { + var other = try self.unmanaged.clone(self.allocator); + return other.promote(self.allocator); + } + }; +} + +/// General purpose hash table. +/// Insertion order is preserved. +/// Deletions perform a "swap removal" on the entries list. +/// Modifying the hash map while iterating is allowed, however one must understand +/// the (well defined) behavior when mixing insertions and deletions with iteration. +/// This type does not store an Allocator field - the Allocator must be passed in +/// with each function call that requires it. See `ArrayHashMap` for a type that stores +/// an Allocator field for convenience. +/// Can be initialized directly using the default field values. +/// This type is designed to have low overhead for small numbers of entries. When +/// `store_hash` is `false` and the number of entries in the map is less than 9, +/// the overhead cost of using `ArrayHashMapUnmanaged` rather than `std.ArrayList` is +/// only a single pointer-sized integer. +/// When `store_hash` is `false`, this data structure is biased towards cheap `eql` +/// functions. It does not store each item's hash in the table. Setting `store_hash` +/// to `true` incurs slightly more memory cost by storing each key's hash in the table +/// but guarantees only one call to `eql` per insertion/deletion. +pub fn ArrayHashMapUnmanaged( + comptime K: type, + comptime V: type, + comptime hash: fn (key: K) u32, + comptime eql: fn (a: K, b: K) bool, + comptime store_hash: bool, +) type { + return struct { + /// It is permitted to access this field directly. + entries: std.ArrayListUnmanaged(Entry) = .{}, + + /// When entries length is less than `linear_scan_max`, this remains `null`. + /// Once entries length grows big enough, this field is allocated. There is + /// an IndexHeader followed by an array of Index(I) structs, where I is defined + /// by how many total indexes there are. + index_header: ?*IndexHeader = null, + + /// Modifying the key is illegal behavior. + /// Modifying the value is allowed. + /// Entry pointers become invalid whenever this ArrayHashMap is modified, + /// unless `ensureCapacity` was previously used. + pub const Entry = struct { + /// This field is `void` if `store_hash` is `false`. + hash: Hash, + key: K, + value: V, + }; + + pub const Hash = if (store_hash) u32 else void; + + pub const GetOrPutResult = struct { + entry: *Entry, + found_existing: bool, + }; + + pub const Managed = ArrayHashMap(K, V, hash, eql, store_hash); + + const Self = @This(); + + const linear_scan_max = 8; + + pub fn promote(self: Self, allocator: *Allocator) Managed { + return .{ + .unmanaged = self, + .allocator = allocator, + }; + } + + pub fn deinit(self: *Self, allocator: *Allocator) void { + self.entries.deinit(allocator); + if (self.index_header) |header| { + header.free(allocator); + } + self.* = undefined; + } + + pub fn clearRetainingCapacity(self: *Self) void { + self.entries.items.len = 0; + if (self.index_header) |header| { + header.max_distance_from_start_index = 0; + switch (header.capacityIndexType()) { + .u8 => mem.set(Index(u8), header.indexes(u8), Index(u8).empty), + .u16 => mem.set(Index(u16), header.indexes(u16), Index(u16).empty), + .u32 => mem.set(Index(u32), header.indexes(u32), Index(u32).empty), + .usize => mem.set(Index(usize), header.indexes(usize), Index(usize).empty), + } + } + } + + pub fn clearAndFree(self: *Self, allocator: *Allocator) void { + self.entries.shrink(allocator, 0); + if (self.index_header) |header| { + header.free(allocator); + self.index_header = null; + } + } + + /// If key exists this function cannot fail. + /// If there is an existing item with `key`, then the result + /// `Entry` pointer points to it, and found_existing is true. + /// Otherwise, puts a new item with undefined value, and + /// the `Entry` pointer points to it. Caller should then initialize + /// the value (but not the key). + pub fn getOrPut(self: *Self, allocator: *Allocator, key: K) !GetOrPutResult { + self.ensureCapacity(allocator, self.entries.items.len + 1) catch |err| { + // "If key exists this function cannot fail." + return GetOrPutResult{ + .entry = self.getEntry(key) orelse return err, + .found_existing = true, + }; + }; + return self.getOrPutAssumeCapacity(key); + } + + /// If there is an existing item with `key`, then the result + /// `Entry` pointer points to it, and found_existing is true. + /// Otherwise, puts a new item with undefined value, and + /// the `Entry` pointer points to it. Caller should then initialize + /// the value (but not the key). + /// If a new entry needs to be stored, this function asserts there + /// is enough capacity to store it. + pub fn getOrPutAssumeCapacity(self: *Self, key: K) GetOrPutResult { + const header = self.index_header orelse { + // Linear scan. + const h = if (store_hash) hash(key) else {}; + for (self.entries.items) |*item| { + if (item.hash == h and eql(key, item.key)) { + return GetOrPutResult{ + .entry = item, + .found_existing = true, + }; + } + } + const new_entry = self.entries.addOneAssumeCapacity(); + new_entry.* = .{ + .hash = if (store_hash) h else {}, + .key = key, + .value = undefined, + }; + return GetOrPutResult{ + .entry = new_entry, + .found_existing = false, + }; + }; + + switch (header.capacityIndexType()) { + .u8 => return self.getOrPutInternal(key, header, u8), + .u16 => return self.getOrPutInternal(key, header, u16), + .u32 => return self.getOrPutInternal(key, header, u32), + .usize => return self.getOrPutInternal(key, header, usize), + } + } + + pub fn getOrPutValue(self: *Self, allocator: *Allocator, key: K, value: V) !*Entry { + const res = try self.getOrPut(allocator, key); + if (!res.found_existing) + res.entry.value = value; + + return res.entry; + } + + /// Increases capacity, guaranteeing that insertions up until the + /// `expected_count` will not cause an allocation, and therefore cannot fail. + pub fn ensureCapacity(self: *Self, allocator: *Allocator, new_capacity: usize) !void { + try self.entries.ensureCapacity(allocator, new_capacity); + if (new_capacity <= linear_scan_max) return; + + // Ensure that the indexes will be at most 60% full if + // `new_capacity` items are put into it. + const needed_len = new_capacity * 5 / 3; + if (self.index_header) |header| { + if (needed_len > header.indexes_len) { + // An overflow here would mean the amount of memory required would not + // be representable in the address space. + const new_indexes_len = math.ceilPowerOfTwo(usize, needed_len) catch unreachable; + const new_header = try IndexHeader.alloc(allocator, new_indexes_len); + self.insertAllEntriesIntoNewHeader(new_header); + header.free(allocator); + self.index_header = new_header; + } + } else { + // An overflow here would mean the amount of memory required would not + // be representable in the address space. + const new_indexes_len = math.ceilPowerOfTwo(usize, needed_len) catch unreachable; + const header = try IndexHeader.alloc(allocator, new_indexes_len); + self.insertAllEntriesIntoNewHeader(header); + self.index_header = header; + } + } + + /// Returns the number of total elements which may be present before it is + /// no longer guaranteed that no allocations will be performed. + pub fn capacity(self: Self) usize { + const entry_cap = self.entries.capacity; + const header = self.index_header orelse return math.min(linear_scan_max, entry_cap); + const indexes_cap = (header.indexes_len + 1) * 3 / 4; + return math.min(entry_cap, indexes_cap); + } + + /// Clobbers any existing data. To detect if a put would clobber + /// existing data, see `getOrPut`. + pub fn put(self: *Self, allocator: *Allocator, key: K, value: V) !void { + const result = try self.getOrPut(allocator, key); + result.entry.value = value; + } + + /// Inserts a key-value pair into the hash map, asserting that no previous + /// entry with the same key is already present + pub fn putNoClobber(self: *Self, allocator: *Allocator, key: K, value: V) !void { + const result = try self.getOrPut(allocator, key); + assert(!result.found_existing); + result.entry.value = value; + } + + /// Asserts there is enough capacity to store the new key-value pair. + /// Clobbers any existing data. To detect if a put would clobber + /// existing data, see `getOrPutAssumeCapacity`. + pub fn putAssumeCapacity(self: *Self, key: K, value: V) void { + const result = self.getOrPutAssumeCapacity(key); + result.entry.value = value; + } + + /// Asserts there is enough capacity to store the new key-value pair. + /// Asserts that it does not clobber any existing data. + /// To detect if a put would clobber existing data, see `getOrPutAssumeCapacity`. + pub fn putAssumeCapacityNoClobber(self: *Self, key: K, value: V) void { + const result = self.getOrPutAssumeCapacity(key); + assert(!result.found_existing); + result.entry.value = value; + } + + /// Inserts a new `Entry` into the hash map, returning the previous one, if any. + pub fn fetchPut(self: *Self, allocator: *Allocator, key: K, value: V) !?Entry { + const gop = try self.getOrPut(allocator, key); + var result: ?Entry = null; + if (gop.found_existing) { + result = gop.entry.*; + } + gop.entry.value = value; + return result; + } + + /// Inserts a new `Entry` into the hash map, returning the previous one, if any. + /// If insertion happens, asserts there is enough capacity without allocating. + pub fn fetchPutAssumeCapacity(self: *Self, key: K, value: V) ?Entry { + const gop = self.getOrPutAssumeCapacity(key); + var result: ?Entry = null; + if (gop.found_existing) { + result = gop.entry.*; + } + gop.entry.value = value; + return result; + } + + pub fn getEntry(self: Self, key: K) ?*Entry { + const index = self.getIndex(key) orelse return null; + return &self.entries.items[index]; + } + + pub fn getIndex(self: Self, key: K) ?usize { + const header = self.index_header orelse { + // Linear scan. + const h = if (store_hash) hash(key) else {}; + for (self.entries.items) |*item, i| { + if (item.hash == h and eql(key, item.key)) { + return i; + } + } + return null; + }; + switch (header.capacityIndexType()) { + .u8 => return self.getInternal(key, header, u8), + .u16 => return self.getInternal(key, header, u16), + .u32 => return self.getInternal(key, header, u32), + .usize => return self.getInternal(key, header, usize), + } + } + + pub fn get(self: Self, key: K) ?V { + return if (self.getEntry(key)) |entry| entry.value else null; + } + + pub fn contains(self: Self, key: K) bool { + return self.getEntry(key) != null; + } + + /// If there is an `Entry` with a matching key, it is deleted from + /// the hash map, and then returned from this function. + pub fn remove(self: *Self, key: K) ?Entry { + const header = self.index_header orelse { + // Linear scan. + const h = if (store_hash) hash(key) else {}; + for (self.entries.items) |item, i| { + if (item.hash == h and eql(key, item.key)) { + return self.entries.swapRemove(i); + } + } + return null; + }; + switch (header.capacityIndexType()) { + .u8 => return self.removeInternal(key, header, u8), + .u16 => return self.removeInternal(key, header, u16), + .u32 => return self.removeInternal(key, header, u32), + .usize => return self.removeInternal(key, header, usize), + } + } + + /// Asserts there is an `Entry` with matching key, deletes it from the hash map, + /// and discards it. + pub fn removeAssertDiscard(self: *Self, key: K) void { + assert(self.remove(key) != null); + } + + pub fn items(self: Self) []Entry { + return self.entries.items; + } + + pub fn clone(self: Self, allocator: *Allocator) !Self { + var other: Self = .{}; + try other.entries.appendSlice(allocator, self.entries.items); + + if (self.index_header) |header| { + const new_header = try IndexHeader.alloc(allocator, header.indexes_len); + other.insertAllEntriesIntoNewHeader(new_header); + other.index_header = new_header; + } + return other; + } + + fn removeInternal(self: *Self, key: K, header: *IndexHeader, comptime I: type) ?Entry { + const indexes = header.indexes(I); + const h = hash(key); + const start_index = header.constrainIndex(h); + var roll_over: usize = 0; + while (roll_over <= header.max_distance_from_start_index) : (roll_over += 1) { + const index_index = header.constrainIndex(start_index + roll_over); + var index = &indexes[index_index]; + if (index.isEmpty()) + return null; + + const entry = &self.entries.items[index.entry_index]; + + const hash_match = if (store_hash) h == entry.hash else true; + if (!hash_match or !eql(key, entry.key)) + continue; + + const removed_entry = self.entries.swapRemove(index.entry_index); + if (self.entries.items.len > 0 and self.entries.items.len != index.entry_index) { + // Because of the swap remove, now we need to update the index that was + // pointing to the last entry and is now pointing to this removed item slot. + self.updateEntryIndex(header, self.entries.items.len, index.entry_index, I, indexes); + } + + // Now we have to shift over the following indexes. + roll_over += 1; + while (roll_over < header.indexes_len) : (roll_over += 1) { + const next_index_index = header.constrainIndex(start_index + roll_over); + const next_index = &indexes[next_index_index]; + if (next_index.isEmpty() or next_index.distance_from_start_index == 0) { + index.setEmpty(); + return removed_entry; + } + index.* = next_index.*; + index.distance_from_start_index -= 1; + index = next_index; + } + unreachable; + } + return null; + } + + fn updateEntryIndex( + self: *Self, + header: *IndexHeader, + old_entry_index: usize, + new_entry_index: usize, + comptime I: type, + indexes: []Index(I), + ) void { + const h = if (store_hash) self.entries.items[new_entry_index].hash else hash(self.entries.items[new_entry_index].key); + const start_index = header.constrainIndex(h); + var roll_over: usize = 0; + while (roll_over <= header.max_distance_from_start_index) : (roll_over += 1) { + const index_index = header.constrainIndex(start_index + roll_over); + const index = &indexes[index_index]; + if (index.entry_index == old_entry_index) { + index.entry_index = @intCast(I, new_entry_index); + return; + } + } + unreachable; + } + + /// Must ensureCapacity before calling this. + fn getOrPutInternal(self: *Self, key: K, header: *IndexHeader, comptime I: type) GetOrPutResult { + const indexes = header.indexes(I); + const h = hash(key); + const start_index = header.constrainIndex(h); + var roll_over: usize = 0; + var distance_from_start_index: usize = 0; + while (roll_over <= header.indexes_len) : ({ + roll_over += 1; + distance_from_start_index += 1; + }) { + const index_index = header.constrainIndex(start_index + roll_over); + const index = indexes[index_index]; + if (index.isEmpty()) { + indexes[index_index] = .{ + .distance_from_start_index = @intCast(I, distance_from_start_index), + .entry_index = @intCast(I, self.entries.items.len), + }; + header.maybeBumpMax(distance_from_start_index); + const new_entry = self.entries.addOneAssumeCapacity(); + new_entry.* = .{ + .hash = if (store_hash) h else {}, + .key = key, + .value = undefined, + }; + return .{ + .found_existing = false, + .entry = new_entry, + }; + } + + // This pointer survives the following append because we call + // entries.ensureCapacity before getOrPutInternal. + const entry = &self.entries.items[index.entry_index]; + const hash_match = if (store_hash) h == entry.hash else true; + if (hash_match and eql(key, entry.key)) { + return .{ + .found_existing = true, + .entry = entry, + }; + } + if (index.distance_from_start_index < distance_from_start_index) { + // In this case, we did not find the item. We will put a new entry. + // However, we will use this index for the new entry, and move + // the previous index down the line, to keep the max_distance_from_start_index + // as small as possible. + indexes[index_index] = .{ + .distance_from_start_index = @intCast(I, distance_from_start_index), + .entry_index = @intCast(I, self.entries.items.len), + }; + header.maybeBumpMax(distance_from_start_index); + const new_entry = self.entries.addOneAssumeCapacity(); + new_entry.* = .{ + .hash = if (store_hash) h else {}, + .key = key, + .value = undefined, + }; + + distance_from_start_index = index.distance_from_start_index; + var prev_entry_index = index.entry_index; + + // Find somewhere to put the index we replaced by shifting + // following indexes backwards. + roll_over += 1; + distance_from_start_index += 1; + while (roll_over < header.indexes_len) : ({ + roll_over += 1; + distance_from_start_index += 1; + }) { + const next_index_index = header.constrainIndex(start_index + roll_over); + const next_index = indexes[next_index_index]; + if (next_index.isEmpty()) { + header.maybeBumpMax(distance_from_start_index); + indexes[next_index_index] = .{ + .entry_index = prev_entry_index, + .distance_from_start_index = @intCast(I, distance_from_start_index), + }; + return .{ + .found_existing = false, + .entry = new_entry, + }; + } + if (next_index.distance_from_start_index < distance_from_start_index) { + header.maybeBumpMax(distance_from_start_index); + indexes[next_index_index] = .{ + .entry_index = prev_entry_index, + .distance_from_start_index = @intCast(I, distance_from_start_index), + }; + distance_from_start_index = next_index.distance_from_start_index; + prev_entry_index = next_index.entry_index; + } + } + unreachable; + } + } + unreachable; + } + + fn getInternal(self: Self, key: K, header: *IndexHeader, comptime I: type) ?usize { + const indexes = header.indexes(I); + const h = hash(key); + const start_index = header.constrainIndex(h); + var roll_over: usize = 0; + while (roll_over <= header.max_distance_from_start_index) : (roll_over += 1) { + const index_index = header.constrainIndex(start_index + roll_over); + const index = indexes[index_index]; + if (index.isEmpty()) + return null; + + const entry = &self.entries.items[index.entry_index]; + const hash_match = if (store_hash) h == entry.hash else true; + if (hash_match and eql(key, entry.key)) + return index.entry_index; + } + return null; + } + + fn insertAllEntriesIntoNewHeader(self: *Self, header: *IndexHeader) void { + switch (header.capacityIndexType()) { + .u8 => return self.insertAllEntriesIntoNewHeaderGeneric(header, u8), + .u16 => return self.insertAllEntriesIntoNewHeaderGeneric(header, u16), + .u32 => return self.insertAllEntriesIntoNewHeaderGeneric(header, u32), + .usize => return self.insertAllEntriesIntoNewHeaderGeneric(header, usize), + } + } + + fn insertAllEntriesIntoNewHeaderGeneric(self: *Self, header: *IndexHeader, comptime I: type) void { + const indexes = header.indexes(I); + entry_loop: for (self.entries.items) |entry, i| { + const h = if (store_hash) entry.hash else hash(entry.key); + const start_index = header.constrainIndex(h); + var entry_index = i; + var roll_over: usize = 0; + var distance_from_start_index: usize = 0; + while (roll_over < header.indexes_len) : ({ + roll_over += 1; + distance_from_start_index += 1; + }) { + const index_index = header.constrainIndex(start_index + roll_over); + const next_index = indexes[index_index]; + if (next_index.isEmpty()) { + header.maybeBumpMax(distance_from_start_index); + indexes[index_index] = .{ + .distance_from_start_index = @intCast(I, distance_from_start_index), + .entry_index = @intCast(I, entry_index), + }; + continue :entry_loop; + } + if (next_index.distance_from_start_index < distance_from_start_index) { + header.maybeBumpMax(distance_from_start_index); + indexes[index_index] = .{ + .distance_from_start_index = @intCast(I, distance_from_start_index), + .entry_index = @intCast(I, entry_index), + }; + distance_from_start_index = next_index.distance_from_start_index; + entry_index = next_index.entry_index; + } + } + unreachable; + } + } + }; +} + +const CapacityIndexType = enum { u8, u16, u32, usize }; + +fn capacityIndexType(indexes_len: usize) CapacityIndexType { + if (indexes_len < math.maxInt(u8)) + return .u8; + if (indexes_len < math.maxInt(u16)) + return .u16; + if (indexes_len < math.maxInt(u32)) + return .u32; + return .usize; +} + +fn capacityIndexSize(indexes_len: usize) usize { + switch (capacityIndexType(indexes_len)) { + .u8 => return @sizeOf(Index(u8)), + .u16 => return @sizeOf(Index(u16)), + .u32 => return @sizeOf(Index(u32)), + .usize => return @sizeOf(Index(usize)), + } +} + +fn Index(comptime I: type) type { + return extern struct { + entry_index: I, + distance_from_start_index: I, + + const Self = @This(); + + const empty = Self{ + .entry_index = math.maxInt(I), + .distance_from_start_index = undefined, + }; + + fn isEmpty(idx: Self) bool { + return idx.entry_index == math.maxInt(I); + } + + fn setEmpty(idx: *Self) void { + idx.entry_index = math.maxInt(I); + } + }; +} + +/// This struct is trailed by an array of `Index(I)`, where `I` +/// and the array length are determined by `indexes_len`. +const IndexHeader = struct { + max_distance_from_start_index: usize, + indexes_len: usize, + + fn constrainIndex(header: IndexHeader, i: usize) usize { + // This is an optimization for modulo of power of two integers; + // it requires `indexes_len` to always be a power of two. + return i & (header.indexes_len - 1); + } + + fn indexes(header: *IndexHeader, comptime I: type) []Index(I) { + const start = @ptrCast([*]Index(I), @ptrCast([*]u8, header) + @sizeOf(IndexHeader)); + return start[0..header.indexes_len]; + } + + fn capacityIndexType(header: IndexHeader) CapacityIndexType { + return hash_map.capacityIndexType(header.indexes_len); + } + + fn maybeBumpMax(header: *IndexHeader, distance_from_start_index: usize) void { + if (distance_from_start_index > header.max_distance_from_start_index) { + header.max_distance_from_start_index = distance_from_start_index; + } + } + + fn alloc(allocator: *Allocator, len: usize) !*IndexHeader { + const index_size = hash_map.capacityIndexSize(len); + const nbytes = @sizeOf(IndexHeader) + index_size * len; + const bytes = try allocator.allocAdvanced(u8, @alignOf(IndexHeader), nbytes, .exact); + @memset(bytes.ptr + @sizeOf(IndexHeader), 0xff, bytes.len - @sizeOf(IndexHeader)); + const result = @ptrCast(*IndexHeader, bytes.ptr); + result.* = .{ + .max_distance_from_start_index = 0, + .indexes_len = len, + }; + return result; + } + + fn free(header: *IndexHeader, allocator: *Allocator) void { + const index_size = hash_map.capacityIndexSize(header.indexes_len); + const ptr = @ptrCast([*]u8, header); + const slice = ptr[0 .. @sizeOf(IndexHeader) + header.indexes_len * index_size]; + allocator.free(slice); + } +}; + +test "basic hash map usage" { + var map = AutoArrayHashMap(i32, i32).init(std.testing.allocator); + defer map.deinit(); + + testing.expect((try map.fetchPut(1, 11)) == null); + testing.expect((try map.fetchPut(2, 22)) == null); + testing.expect((try map.fetchPut(3, 33)) == null); + testing.expect((try map.fetchPut(4, 44)) == null); + + try map.putNoClobber(5, 55); + testing.expect((try map.fetchPut(5, 66)).?.value == 55); + testing.expect((try map.fetchPut(5, 55)).?.value == 66); + + const gop1 = try map.getOrPut(5); + testing.expect(gop1.found_existing == true); + testing.expect(gop1.entry.value == 55); + gop1.entry.value = 77; + testing.expect(map.getEntry(5).?.value == 77); + + const gop2 = try map.getOrPut(99); + testing.expect(gop2.found_existing == false); + gop2.entry.value = 42; + testing.expect(map.getEntry(99).?.value == 42); + + const gop3 = try map.getOrPutValue(5, 5); + testing.expect(gop3.value == 77); + + const gop4 = try map.getOrPutValue(100, 41); + testing.expect(gop4.value == 41); + + testing.expect(map.contains(2)); + testing.expect(map.getEntry(2).?.value == 22); + testing.expect(map.get(2).? == 22); + + const rmv1 = map.remove(2); + testing.expect(rmv1.?.key == 2); + testing.expect(rmv1.?.value == 22); + testing.expect(map.remove(2) == null); + testing.expect(map.getEntry(2) == null); + testing.expect(map.get(2) == null); + + map.removeAssertDiscard(3); +} + +test "iterator hash map" { + // https://github.com/ziglang/zig/issues/5127 + if (std.Target.current.cpu.arch == .mips) return error.SkipZigTest; + + var reset_map = AutoArrayHashMap(i32, i32).init(std.testing.allocator); + defer reset_map.deinit(); + + // test ensureCapacity with a 0 parameter + try reset_map.ensureCapacity(0); + + try reset_map.putNoClobber(0, 11); + try reset_map.putNoClobber(1, 22); + try reset_map.putNoClobber(2, 33); + + var keys = [_]i32{ + 0, 2, 1, + }; + + var values = [_]i32{ + 11, 33, 22, + }; + + var buffer = [_]i32{ + 0, 0, 0, + }; + + var it = reset_map.iterator(); + const first_entry = it.next().?; + it.reset(); + + var count: usize = 0; + while (it.next()) |entry| : (count += 1) { + buffer[@intCast(usize, entry.key)] = entry.value; + } + testing.expect(count == 3); + testing.expect(it.next() == null); + + for (buffer) |v, i| { + testing.expect(buffer[@intCast(usize, keys[i])] == values[i]); + } + + it.reset(); + count = 0; + while (it.next()) |entry| { + buffer[@intCast(usize, entry.key)] = entry.value; + count += 1; + if (count >= 2) break; + } + + for (buffer[0..2]) |v, i| { + testing.expect(buffer[@intCast(usize, keys[i])] == values[i]); + } + + it.reset(); + var entry = it.next().?; + testing.expect(entry.key == first_entry.key); + testing.expect(entry.value == first_entry.value); +} + +test "ensure capacity" { + var map = AutoArrayHashMap(i32, i32).init(std.testing.allocator); + defer map.deinit(); + + try map.ensureCapacity(20); + const initial_capacity = map.capacity(); + testing.expect(initial_capacity >= 20); + var i: i32 = 0; + while (i < 20) : (i += 1) { + testing.expect(map.fetchPutAssumeCapacity(i, i + 10) == null); + } + // shouldn't resize from putAssumeCapacity + testing.expect(initial_capacity == map.capacity()); +} + +test "clone" { + var original = AutoArrayHashMap(i32, i32).init(std.testing.allocator); + defer original.deinit(); + + // put more than `linear_scan_max` so we can test that the index header is properly cloned + var i: u8 = 0; + while (i < 10) : (i += 1) { + try original.putNoClobber(i, i * 10); + } + + var copy = try original.clone(); + defer copy.deinit(); + + i = 0; + while (i < 10) : (i += 1) { + testing.expect(copy.get(i).? == i * 10); + } +} + +pub fn getHashPtrAddrFn(comptime K: type) (fn (K) u32) { + return struct { + fn hash(key: K) u32 { + return getAutoHashFn(usize)(@ptrToInt(key)); + } + }.hash; +} + +pub fn getTrivialEqlFn(comptime K: type) (fn (K, K) bool) { + return struct { + fn eql(a: K, b: K) bool { + return a == b; + } + }.eql; +} + +pub fn getAutoHashFn(comptime K: type) (fn (K) u32) { + return struct { + fn hash(key: K) u32 { + if (comptime trait.hasUniqueRepresentation(K)) { + return @truncate(u32, Wyhash.hash(0, std.mem.asBytes(&key))); + } else { + var hasher = Wyhash.init(0); + autoHash(&hasher, key); + return @truncate(u32, hasher.final()); + } + } + }.hash; +} + +pub fn getAutoEqlFn(comptime K: type) (fn (K, K) bool) { + return struct { + fn eql(a: K, b: K) bool { + return meta.eql(a, b); + } + }.eql; +} + +pub fn autoEqlIsCheap(comptime K: type) bool { + return switch (@typeInfo(K)) { + .Bool, + .Int, + .Float, + .Pointer, + .ComptimeFloat, + .ComptimeInt, + .Enum, + .Fn, + .ErrorSet, + .AnyFrame, + .EnumLiteral, + => true, + else => false, + }; +} + +pub fn getAutoHashStratFn(comptime K: type, comptime strategy: std.hash.Strategy) (fn (K) u32) { + return struct { + fn hash(key: K) u32 { + var hasher = Wyhash.init(0); + std.hash.autoHashStrat(&hasher, key, strategy); + return @truncate(u32, hasher.final()); + } + }.hash; +} diff --git a/lib/std/buf_set.zig b/lib/std/buf_set.zig index 1f5dda09c2..f48e6c594c 100644 --- a/lib/std/buf_set.zig +++ b/lib/std/buf_set.zig @@ -20,7 +20,8 @@ pub const BufSet = struct { } pub fn deinit(self: *BufSet) void { - for (self.hash_map.items()) |entry| { + var it = self.hash_map.iterator(); + while (it.next()) |entry| { self.free(entry.key); } self.hash_map.deinit(); diff --git a/lib/std/hash_map.zig b/lib/std/hash_map.zig index 8966737a0c..144a512edc 100644 --- a/lib/std/hash_map.zig +++ b/lib/std/hash_map.zig @@ -4,91 +4,94 @@ // The MIT license requires this copyright notice to be included in all copies // and substantial portions of the software. const std = @import("std.zig"); -const debug = std.debug; +const builtin = @import("builtin"); const assert = debug.assert; -const testing = std.testing; +const autoHash = std.hash.autoHash; +const debug = std.debug; +const warn = debug.warn; const math = std.math; const mem = std.mem; const meta = std.meta; const trait = meta.trait; -const autoHash = std.hash.autoHash; -const Wyhash = std.hash.Wyhash; const Allocator = mem.Allocator; -const builtin = @import("builtin"); -const hash_map = @This(); +const Wyhash = std.hash.Wyhash; + +pub fn getAutoHashFn(comptime K: type) (fn (K) u64) { + return struct { + fn hash(key: K) u64 { + if (comptime trait.hasUniqueRepresentation(K)) { + return Wyhash.hash(0, std.mem.asBytes(&key)); + } else { + var hasher = Wyhash.init(0); + autoHash(&hasher, key); + return hasher.final(); + } + } + }.hash; +} + +pub fn getAutoEqlFn(comptime K: type) (fn (K, K) bool) { + return struct { + fn eql(a: K, b: K) bool { + return meta.eql(a, b); + } + }.eql; +} pub fn AutoHashMap(comptime K: type, comptime V: type) type { - return HashMap(K, V, getAutoHashFn(K), getAutoEqlFn(K), autoEqlIsCheap(K)); + return HashMap(K, V, getAutoHashFn(K), getAutoEqlFn(K), DefaultMaxLoadPercentage); } pub fn AutoHashMapUnmanaged(comptime K: type, comptime V: type) type { - return HashMapUnmanaged(K, V, getAutoHashFn(K), getAutoEqlFn(K), autoEqlIsCheap(K)); + return HashMapUnmanaged(K, V, getAutoHashFn(K), getAutoEqlFn(K), DefaultMaxLoadPercentage); } /// Builtin hashmap for strings as keys. pub fn StringHashMap(comptime V: type) type { - return HashMap([]const u8, V, hashString, eqlString, true); + return HashMap([]const u8, V, hashString, eqlString, DefaultMaxLoadPercentage); } pub fn StringHashMapUnmanaged(comptime V: type) type { - return HashMapUnmanaged([]const u8, V, hashString, eqlString, true); + return HashMapUnmanaged([]const u8, V, hashString, eqlString, DefaultMaxLoadPercentage); } pub fn eqlString(a: []const u8, b: []const u8) bool { return mem.eql(u8, a, b); } -pub fn hashString(s: []const u8) u32 { - return @truncate(u32, std.hash.Wyhash.hash(0, s)); +pub fn hashString(s: []const u8) u64 { + return std.hash.Wyhash.hash(0, s); } -/// Insertion order is preserved. -/// Deletions perform a "swap removal" on the entries list. -/// Modifying the hash map while iterating is allowed, however one must understand -/// the (well defined) behavior when mixing insertions and deletions with iteration. +pub const DefaultMaxLoadPercentage = 80; + +/// General purpose hash table. +/// No order is guaranteed and any modification invalidates live iterators. +/// It provides fast operations (lookup, insertion, deletion) with quite high +/// load factors (up to 80% by default) for a low memory usage. /// For a hash map that can be initialized directly that does not store an Allocator /// field, see `HashMapUnmanaged`. -/// When `store_hash` is `false`, this data structure is biased towards cheap `eql` -/// functions. It does not store each item's hash in the table. Setting `store_hash` -/// to `true` incurs slightly more memory cost by storing each key's hash in the table -/// but only has to call `eql` for hash collisions. +/// If iterating over the table entries is a strong usecase and needs to be fast, +/// prefer the alternative `std.ArrayHashMap`. pub fn HashMap( comptime K: type, comptime V: type, - comptime hash: fn (key: K) u32, - comptime eql: fn (a: K, b: K) bool, - comptime store_hash: bool, + comptime hashFn: fn (key: K) u64, + comptime eqlFn: fn (a: K, b: K) bool, + comptime MaxLoadPercentage: u64, ) type { return struct { unmanaged: Unmanaged, allocator: *Allocator, - pub const Unmanaged = HashMapUnmanaged(K, V, hash, eql, store_hash); + pub const Unmanaged = HashMapUnmanaged(K, V, hashFn, eqlFn, MaxLoadPercentage); pub const Entry = Unmanaged.Entry; pub const Hash = Unmanaged.Hash; + pub const Iterator = Unmanaged.Iterator; + pub const Size = Unmanaged.Size; pub const GetOrPutResult = Unmanaged.GetOrPutResult; - /// Deprecated. Iterate using `items`. - pub const Iterator = struct { - hm: *const Self, - /// Iterator through the entry array. - index: usize, - - pub fn next(it: *Iterator) ?*Entry { - if (it.index >= it.hm.unmanaged.entries.items.len) return null; - const result = &it.hm.unmanaged.entries.items[it.index]; - it.index += 1; - return result; - } - - /// Reset the iterator to the initial index - pub fn reset(it: *Iterator) void { - it.index = 0; - } - }; - const Self = @This(); - const Index = Unmanaged.Index; pub fn init(allocator: *Allocator) Self { return .{ @@ -110,17 +113,12 @@ pub fn HashMap( return self.unmanaged.clearAndFree(self.allocator); } - /// Deprecated. Use `items().len`. pub fn count(self: Self) usize { - return self.items().len; + return self.unmanaged.count(); } - /// Deprecated. Iterate using `items`. pub fn iterator(self: *const Self) Iterator { - return Iterator{ - .hm = self, - .index = 0, - }; + return self.unmanaged.iterator(); } /// If key exists this function cannot fail. @@ -150,13 +148,13 @@ pub fn HashMap( /// Increases capacity, guaranteeing that insertions up until the /// `expected_count` will not cause an allocation, and therefore cannot fail. - pub fn ensureCapacity(self: *Self, new_capacity: usize) !void { - return self.unmanaged.ensureCapacity(self.allocator, new_capacity); + pub fn ensureCapacity(self: *Self, expected_count: Size) !void { + return self.unmanaged.ensureCapacity(self.allocator, expected_count); } /// Returns the number of total elements which may be present before it is /// no longer guaranteed that no allocations will be performed. - pub fn capacity(self: *Self) usize { + pub fn capacity(self: *Self) Size { return self.unmanaged.capacity(); } @@ -197,18 +195,14 @@ pub fn HashMap( return self.unmanaged.fetchPutAssumeCapacity(key, value); } - pub fn getEntry(self: Self, key: K) ?*Entry { - return self.unmanaged.getEntry(key); - } - - pub fn getIndex(self: Self, key: K) ?usize { - return self.unmanaged.getIndex(key); - } - pub fn get(self: Self, key: K) ?V { return self.unmanaged.get(key); } + pub fn getEntry(self: Self, key: K) ?*Entry { + return self.unmanaged.getEntry(key); + } + pub fn contains(self: Self, key: K) bool { return self.unmanaged.contains(key); } @@ -225,10 +219,6 @@ pub fn HashMap( return self.unmanaged.removeAssertDiscard(key); } - pub fn items(self: Self) []Entry { - return self.unmanaged.items(); - } - pub fn clone(self: Self) !Self { var other = try self.unmanaged.clone(self.allocator); return other.promote(self.allocator); @@ -236,63 +226,152 @@ pub fn HashMap( }; } -/// General purpose hash table. -/// Insertion order is preserved. -/// Deletions perform a "swap removal" on the entries list. -/// Modifying the hash map while iterating is allowed, however one must understand -/// the (well defined) behavior when mixing insertions and deletions with iteration. -/// This type does not store an Allocator field - the Allocator must be passed in -/// with each function call that requires it. See `HashMap` for a type that stores -/// an Allocator field for convenience. -/// Can be initialized directly using the default field values. -/// This type is designed to have low overhead for small numbers of entries. When -/// `store_hash` is `false` and the number of entries in the map is less than 9, -/// the overhead cost of using `HashMapUnmanaged` rather than `std.ArrayList` is -/// only a single pointer-sized integer. -/// When `store_hash` is `false`, this data structure is biased towards cheap `eql` -/// functions. It does not store each item's hash in the table. Setting `store_hash` -/// to `true` incurs slightly more memory cost by storing each key's hash in the table -/// but guarantees only one call to `eql` per insertion/deletion. +/// A HashMap based on open addressing and linear probing. +/// A lookup or modification typically occurs only 2 cache misses. +/// No order is guaranteed and any modification invalidates live iterators. +/// It achieves good performance with quite high load factors (by default, +/// grow is triggered at 80% full) and only one byte of overhead per element. +/// The struct itself is only 16 bytes for a small footprint. This comes at +/// the price of handling size with u32, which should be reasonnable enough +/// for almost all uses. +/// Deletions are achieved with tombstones. pub fn HashMapUnmanaged( comptime K: type, comptime V: type, - comptime hash: fn (key: K) u32, - comptime eql: fn (a: K, b: K) bool, - comptime store_hash: bool, + hashFn: fn (key: K) u64, + eqlFn: fn (a: K, b: K) bool, + comptime MaxLoadPercentage: u64, ) type { + comptime assert(MaxLoadPercentage > 0 and MaxLoadPercentage < 100); + return struct { - /// It is permitted to access this field directly. - entries: std.ArrayListUnmanaged(Entry) = .{}, + const Self = @This(); - /// When entries length is less than `linear_scan_max`, this remains `null`. - /// Once entries length grows big enough, this field is allocated. There is - /// an IndexHeader followed by an array of Index(I) structs, where I is defined - /// by how many total indexes there are. - index_header: ?*IndexHeader = null, + // This is actually a midway pointer to the single buffer containing + // a `Header` field, the `Metadata`s and `Entry`s. + // At `-@sizeOf(Header)` is the Header field. + // At `sizeOf(Metadata) * capacity + offset`, which is pointed to by + // self.header().entries, is the array of entries. + // This means that the hashmap only holds one live allocation, to + // reduce memory fragmentation and struct size. + /// Pointer to the metadata. + metadata: ?[*]Metadata = null, + + /// Current number of elements in the hashmap. + size: Size = 0, + + // Having a countdown to grow reduces the number of instructions to + // execute when determining if the hashmap has enough capacity already. + /// Number of available slots before a grow is needed to satisfy the + /// `MaxLoadPercentage`. + available: Size = 0, + + // This is purely empirical and not a /very smart magic constantâ„¢/. + /// Capacity of the first grow when bootstrapping the hashmap. + const MinimalCapacity = 8; + + // This hashmap is specially designed for sizes that fit in a u32. + const Size = u32; + + // u64 hashes guarantee us that the fingerprint bits will never be used + // to compute the index of a slot, maximizing the use of entropy. + const Hash = u64; - /// Modifying the key is illegal behavior. - /// Modifying the value is allowed. - /// Entry pointers become invalid whenever this HashMap is modified, - /// unless `ensureCapacity` was previously used. pub const Entry = struct { - /// This field is `void` if `store_hash` is `false`. - hash: Hash, key: K, value: V, }; - pub const Hash = if (store_hash) u32 else void; + const Header = packed struct { + entries: [*]Entry, + capacity: Size, + }; + + /// Metadata for a slot. It can be in three states: empty, used or + /// tombstone. Tombstones indicate that an entry was previously used, + /// they are a simple way to handle removal. + /// To this state, we add 6 bits from the slot's key hash. These are + /// used as a fast way to disambiguate between entries without + /// having to use the equality function. If two fingerprints are + /// different, we know that we don't have to compare the keys at all. + /// The 6 bits are the highest ones from a 64 bit hash. This way, not + /// only we use the `log2(capacity)` lowest bits from the hash to determine + /// a slot index, but we use 6 more bits to quickly resolve collisions + /// when multiple elements with different hashes end up wanting to be in / the same slot. + /// Not using the equality function means we don't have to read into + /// the entries array, avoiding a likely cache miss. + const Metadata = packed struct { + const FingerPrint = u6; + + used: u1 = 0, + tombstone: u1 = 0, + fingerprint: FingerPrint = 0, + + pub fn isUsed(self: Metadata) bool { + return self.used == 1; + } + + pub fn isTombstone(self: Metadata) bool { + return self.tombstone == 1; + } + + pub fn takeFingerprint(hash: Hash) FingerPrint { + const hash_bits = @typeInfo(Hash).Int.bits; + const fp_bits = @typeInfo(FingerPrint).Int.bits; + return @truncate(FingerPrint, hash >> (hash_bits - fp_bits)); + } + + pub fn fill(self: *Metadata, fp: FingerPrint) void { + self.used = 1; + self.tombstone = 0; + self.fingerprint = fp; + } + + pub fn remove(self: *Metadata) void { + self.used = 0; + self.tombstone = 1; + self.fingerprint = 0; + } + }; + + comptime { + assert(@sizeOf(Metadata) == 1); + assert(@alignOf(Metadata) == 1); + } + + const Iterator = struct { + hm: *const Self, + index: Size = 0, + + pub fn next(it: *Iterator) ?*Entry { + assert(it.index <= it.hm.capacity()); + if (it.hm.size == 0) return null; + + const cap = it.hm.capacity(); + const end = it.hm.metadata.? + cap; + var metadata = it.hm.metadata.? + it.index; + + while (metadata != end) : ({ + metadata += 1; + it.index += 1; + }) { + if (metadata[0].isUsed()) { + const entry = &it.hm.entries()[it.index]; + it.index += 1; + return entry; + } + } + + return null; + } + }; pub const GetOrPutResult = struct { entry: *Entry, found_existing: bool, }; - pub const Managed = HashMap(K, V, hash, eql, store_hash); - - const Self = @This(); - - const linear_scan_max = 8; + pub const Managed = HashMap(K, V, hashFn, eqlFn, MaxLoadPercentage); pub fn promote(self: Self, allocator: *Allocator) Managed { return .{ @@ -301,167 +380,156 @@ pub fn HashMapUnmanaged( }; } + fn isUnderMaxLoadPercentage(size: Size, cap: Size) bool { + return size * 100 < MaxLoadPercentage * cap; + } + + pub fn init(allocator: *Allocator) Self { + return .{}; + } + pub fn deinit(self: *Self, allocator: *Allocator) void { - self.entries.deinit(allocator); - if (self.index_header) |header| { - header.free(allocator); - } + self.deallocate(allocator); self.* = undefined; } + fn deallocate(self: *Self, allocator: *Allocator) void { + if (self.metadata == null) return; + + const cap = self.capacity(); + const meta_size = @sizeOf(Header) + cap * @sizeOf(Metadata); + + const alignment = @alignOf(Entry) - 1; + const entries_size = @as(usize, cap) * @sizeOf(Entry) + alignment; + + const total_size = meta_size + entries_size; + + var slice: []u8 = undefined; + slice.ptr = @intToPtr([*]u8, @ptrToInt(self.header())); + slice.len = total_size; + allocator.free(slice); + + self.metadata = null; + self.available = 0; + } + + fn capacityForSize(size: Size) Size { + var new_cap = @truncate(u32, (@as(u64, size) * 100) / MaxLoadPercentage + 1); + new_cap = math.ceilPowerOfTwo(u32, new_cap) catch unreachable; + return new_cap; + } + + pub fn ensureCapacity(self: *Self, allocator: *Allocator, new_size: Size) !void { + if (new_size > self.size) + try self.growIfNeeded(allocator, new_size - self.size); + } + pub fn clearRetainingCapacity(self: *Self) void { - self.entries.items.len = 0; - if (self.index_header) |header| { - header.max_distance_from_start_index = 0; - switch (header.capacityIndexType()) { - .u8 => mem.set(Index(u8), header.indexes(u8), Index(u8).empty), - .u16 => mem.set(Index(u16), header.indexes(u16), Index(u16).empty), - .u32 => mem.set(Index(u32), header.indexes(u32), Index(u32).empty), - .usize => mem.set(Index(usize), header.indexes(usize), Index(usize).empty), - } + if (self.metadata) |_| { + self.initMetadatas(); + self.size = 0; + self.available = 0; } } pub fn clearAndFree(self: *Self, allocator: *Allocator) void { - self.entries.shrink(allocator, 0); - if (self.index_header) |header| { - header.free(allocator); - self.index_header = null; - } + self.deallocate(allocator); + self.size = 0; + self.available = 0; } - /// If key exists this function cannot fail. - /// If there is an existing item with `key`, then the result - /// `Entry` pointer points to it, and found_existing is true. - /// Otherwise, puts a new item with undefined value, and - /// the `Entry` pointer points to it. Caller should then initialize - /// the value (but not the key). - pub fn getOrPut(self: *Self, allocator: *Allocator, key: K) !GetOrPutResult { - self.ensureCapacity(allocator, self.entries.items.len + 1) catch |err| { - // "If key exists this function cannot fail." - return GetOrPutResult{ - .entry = self.getEntry(key) orelse return err, - .found_existing = true, - }; - }; - return self.getOrPutAssumeCapacity(key); + pub fn count(self: *const Self) Size { + return self.size; } - /// If there is an existing item with `key`, then the result - /// `Entry` pointer points to it, and found_existing is true. - /// Otherwise, puts a new item with undefined value, and - /// the `Entry` pointer points to it. Caller should then initialize - /// the value (but not the key). - /// If a new entry needs to be stored, this function asserts there - /// is enough capacity to store it. - pub fn getOrPutAssumeCapacity(self: *Self, key: K) GetOrPutResult { - const header = self.index_header orelse { - // Linear scan. - const h = if (store_hash) hash(key) else {}; - for (self.entries.items) |*item| { - if (item.hash == h and eql(key, item.key)) { - return GetOrPutResult{ - .entry = item, - .found_existing = true, - }; - } - } - const new_entry = self.entries.addOneAssumeCapacity(); - new_entry.* = .{ - .hash = if (store_hash) h else {}, - .key = key, - .value = undefined, - }; - return GetOrPutResult{ - .entry = new_entry, - .found_existing = false, - }; - }; - - switch (header.capacityIndexType()) { - .u8 => return self.getOrPutInternal(key, header, u8), - .u16 => return self.getOrPutInternal(key, header, u16), - .u32 => return self.getOrPutInternal(key, header, u32), - .usize => return self.getOrPutInternal(key, header, usize), - } + fn header(self: *const Self) *Header { + return @ptrCast(*Header, @ptrCast([*]Header, self.metadata.?) - 1); } - pub fn getOrPutValue(self: *Self, allocator: *Allocator, key: K, value: V) !*Entry { - const res = try self.getOrPut(allocator, key); - if (!res.found_existing) - res.entry.value = value; - - return res.entry; + fn entries(self: *const Self) [*]Entry { + return self.header().entries; } - /// Increases capacity, guaranteeing that insertions up until the - /// `expected_count` will not cause an allocation, and therefore cannot fail. - pub fn ensureCapacity(self: *Self, allocator: *Allocator, new_capacity: usize) !void { - try self.entries.ensureCapacity(allocator, new_capacity); - if (new_capacity <= linear_scan_max) return; + pub fn capacity(self: *const Self) Size { + if (self.metadata == null) return 0; - // Ensure that the indexes will be at most 60% full if - // `new_capacity` items are put into it. - const needed_len = new_capacity * 5 / 3; - if (self.index_header) |header| { - if (needed_len > header.indexes_len) { - // An overflow here would mean the amount of memory required would not - // be representable in the address space. - const new_indexes_len = math.ceilPowerOfTwo(usize, needed_len) catch unreachable; - const new_header = try IndexHeader.alloc(allocator, new_indexes_len); - self.insertAllEntriesIntoNewHeader(new_header); - header.free(allocator); - self.index_header = new_header; - } - } else { - // An overflow here would mean the amount of memory required would not - // be representable in the address space. - const new_indexes_len = math.ceilPowerOfTwo(usize, needed_len) catch unreachable; - const header = try IndexHeader.alloc(allocator, new_indexes_len); - self.insertAllEntriesIntoNewHeader(header); - self.index_header = header; - } + return self.header().capacity; } - /// Returns the number of total elements which may be present before it is - /// no longer guaranteed that no allocations will be performed. - pub fn capacity(self: Self) usize { - const entry_cap = self.entries.capacity; - const header = self.index_header orelse return math.min(linear_scan_max, entry_cap); - const indexes_cap = (header.indexes_len + 1) * 3 / 4; - return math.min(entry_cap, indexes_cap); + pub fn iterator(self: *const Self) Iterator { + return .{ .hm = self }; } - /// Clobbers any existing data. To detect if a put would clobber - /// existing data, see `getOrPut`. - pub fn put(self: *Self, allocator: *Allocator, key: K, value: V) !void { - const result = try self.getOrPut(allocator, key); - result.entry.value = value; - } - - /// Inserts a key-value pair into the hash map, asserting that no previous - /// entry with the same key is already present + /// Insert an entry in the map. Assumes it is not already present. pub fn putNoClobber(self: *Self, allocator: *Allocator, key: K, value: V) !void { - const result = try self.getOrPut(allocator, key); - assert(!result.found_existing); - result.entry.value = value; + assert(!self.contains(key)); + try self.growIfNeeded(allocator, 1); + + self.putAssumeCapacityNoClobber(key, value); } - /// Asserts there is enough capacity to store the new key-value pair. - /// Clobbers any existing data. To detect if a put would clobber - /// existing data, see `getOrPutAssumeCapacity`. pub fn putAssumeCapacity(self: *Self, key: K, value: V) void { - const result = self.getOrPutAssumeCapacity(key); - result.entry.value = value; + const hash = hashFn(key); + const mask = self.capacity() - 1; + const fingerprint = Metadata.takeFingerprint(hash); + var idx = @truncate(usize, hash & mask); + + var first_tombstone_idx: usize = self.capacity(); // invalid index + var metadata = self.metadata.? + idx; + while (metadata[0].isUsed() or metadata[0].isTombstone()) { + if (metadata[0].isUsed() and metadata[0].fingerprint == fingerprint) { + const entry = &self.entries()[idx]; + if (eqlFn(entry.key, key)) { + return; + } + } else if (first_tombstone_idx == self.capacity() and metadata[0].isTombstone()) { + first_tombstone_idx = idx; + } + + idx = (idx + 1) & mask; + metadata = self.metadata.? + idx; + } + + if (first_tombstone_idx < self.capacity()) { + // Cheap try to lower probing lengths after deletions. Recycle a tombstone. + idx = first_tombstone_idx; + metadata = self.metadata.? + idx; + } else { + // We're using a slot previously free. + self.available -= 1; + } + + metadata[0].fill(fingerprint); + const entry = &self.entries()[idx]; + entry.* = .{ .key = key, .value = undefined }; + self.size += 1; } - /// Asserts there is enough capacity to store the new key-value pair. - /// Asserts that it does not clobber any existing data. - /// To detect if a put would clobber existing data, see `getOrPutAssumeCapacity`. + /// Insert an entry in the map. Assumes it is not already present, + /// and that no allocation is needed. pub fn putAssumeCapacityNoClobber(self: *Self, key: K, value: V) void { - const result = self.getOrPutAssumeCapacity(key); - assert(!result.found_existing); - result.entry.value = value; + assert(!self.contains(key)); + + const hash = hashFn(key); + const mask = self.capacity() - 1; + var idx = @truncate(usize, hash & mask); + + var metadata = self.metadata.? + idx; + while (metadata[0].isUsed()) { + idx = (idx + 1) & mask; + metadata = self.metadata.? + idx; + } + + if (!metadata[0].isTombstone()) { + assert(self.available > 0); + self.available -= 1; + } + + const fingerprint = Metadata.takeFingerprint(hash); + metadata[0].fill(fingerprint); + self.entries()[idx] = Entry{ .key = key, .value = value }; + + self.size += 1; } /// Inserts a new `Entry` into the hash map, returning the previous one, if any. @@ -488,400 +556,622 @@ pub fn HashMapUnmanaged( } pub fn getEntry(self: Self, key: K) ?*Entry { - const index = self.getIndex(key) orelse return null; - return &self.entries.items[index]; - } + if (self.size == 0) { + return null; + } - pub fn getIndex(self: Self, key: K) ?usize { - const header = self.index_header orelse { - // Linear scan. - const h = if (store_hash) hash(key) else {}; - for (self.entries.items) |*item, i| { - if (item.hash == h and eql(key, item.key)) { - return i; + const hash = hashFn(key); + const mask = self.capacity() - 1; + const fingerprint = Metadata.takeFingerprint(hash); + var idx = @truncate(usize, hash & mask); + + var metadata = self.metadata.? + idx; + while (metadata[0].isUsed() or metadata[0].isTombstone()) { + if (metadata[0].isUsed() and metadata[0].fingerprint == fingerprint) { + const entry = &self.entries()[idx]; + if (eqlFn(entry.key, key)) { + return entry; } } - return null; - }; - switch (header.capacityIndexType()) { - .u8 => return self.getInternal(key, header, u8), - .u16 => return self.getInternal(key, header, u16), - .u32 => return self.getInternal(key, header, u32), - .usize => return self.getInternal(key, header, usize), + idx = (idx + 1) & mask; + metadata = self.metadata.? + idx; } + + return null; } + /// Insert an entry if the associated key is not already present, otherwise update preexisting value. + /// Returns true if the key was already present. + pub fn put(self: *Self, allocator: *Allocator, key: K, value: V) !void { + const result = try self.getOrPut(allocator, key); + result.entry.value = value; + } + + /// Get an optional pointer to the value associated with key, if present. pub fn get(self: Self, key: K) ?V { - return if (self.getEntry(key)) |entry| entry.value else null; + if (self.size == 0) { + return null; + } + + const hash = hashFn(key); + const mask = self.capacity() - 1; + const fingerprint = Metadata.takeFingerprint(hash); + var idx = @truncate(usize, hash & mask); + + var metadata = self.metadata.? + idx; + while (metadata[0].isUsed() or metadata[0].isTombstone()) { + if (metadata[0].isUsed() and metadata[0].fingerprint == fingerprint) { + const entry = &self.entries()[idx]; + if (eqlFn(entry.key, key)) { + return entry.value; + } + } + idx = (idx + 1) & mask; + metadata = self.metadata.? + idx; + } + + return null; } - pub fn contains(self: Self, key: K) bool { - return self.getEntry(key) != null; + pub fn getOrPut(self: *Self, allocator: *Allocator, key: K) !GetOrPutResult { + try self.growIfNeeded(allocator, 1); + + return self.getOrPutAssumeCapacity(key); + } + + pub fn getOrPutAssumeCapacity(self: *Self, key: K) GetOrPutResult { + const hash = hashFn(key); + const mask = self.capacity() - 1; + const fingerprint = Metadata.takeFingerprint(hash); + var idx = @truncate(usize, hash & mask); + + var first_tombstone_idx: usize = self.capacity(); // invalid index + var metadata = self.metadata.? + idx; + while (metadata[0].isUsed() or metadata[0].isTombstone()) { + if (metadata[0].isUsed() and metadata[0].fingerprint == fingerprint) { + const entry = &self.entries()[idx]; + if (eqlFn(entry.key, key)) { + return GetOrPutResult{ .entry = entry, .found_existing = true }; + } + } else if (first_tombstone_idx == self.capacity() and metadata[0].isTombstone()) { + first_tombstone_idx = idx; + } + + idx = (idx + 1) & mask; + metadata = self.metadata.? + idx; + } + + if (first_tombstone_idx < self.capacity()) { + // Cheap try to lower probing lengths after deletions. Recycle a tombstone. + idx = first_tombstone_idx; + metadata = self.metadata.? + idx; + } else { + // We're using a slot previously free. + self.available -= 1; + } + + metadata[0].fill(fingerprint); + const entry = &self.entries()[idx]; + entry.* = .{ .key = key, .value = undefined }; + self.size += 1; + + return GetOrPutResult{ .entry = entry, .found_existing = false }; + } + + pub fn getOrPutValue(self: *Self, allocator: *Allocator, key: K, value: V) !*Entry { + const res = try self.getOrPut(allocator, key); + if (!res.found_existing) res.entry.value = value; + return res.entry; + } + + /// Return true if there is a value associated with key in the map. + pub fn contains(self: *const Self, key: K) bool { + return self.get(key) != null; } /// If there is an `Entry` with a matching key, it is deleted from /// the hash map, and then returned from this function. pub fn remove(self: *Self, key: K) ?Entry { - const header = self.index_header orelse { - // Linear scan. - const h = if (store_hash) hash(key) else {}; - for (self.entries.items) |item, i| { - if (item.hash == h and eql(key, item.key)) { - return self.entries.swapRemove(i); + if (self.size == 0) return null; + + const hash = hashFn(key); + const mask = self.capacity() - 1; + const fingerprint = Metadata.takeFingerprint(hash); + var idx = @truncate(usize, hash & mask); + + var metadata = self.metadata.? + idx; + while (metadata[0].isUsed() or metadata[0].isTombstone()) { + if (metadata[0].isUsed() and metadata[0].fingerprint == fingerprint) { + const entry = &self.entries()[idx]; + if (eqlFn(entry.key, key)) { + const removed_entry = entry.*; + metadata[0].remove(); + entry.* = undefined; + self.size -= 1; + return removed_entry; } } - return null; - }; - switch (header.capacityIndexType()) { - .u8 => return self.removeInternal(key, header, u8), - .u16 => return self.removeInternal(key, header, u16), - .u32 => return self.removeInternal(key, header, u32), - .usize => return self.removeInternal(key, header, usize), + idx = (idx + 1) & mask; + metadata = self.metadata.? + idx; } + + return null; } /// Asserts there is an `Entry` with matching key, deletes it from the hash map, /// and discards it. pub fn removeAssertDiscard(self: *Self, key: K) void { - assert(self.remove(key) != null); + assert(self.contains(key)); + + const hash = hashFn(key); + const mask = self.capacity() - 1; + const fingerprint = Metadata.takeFingerprint(hash); + var idx = @truncate(usize, hash & mask); + + var metadata = self.metadata.? + idx; + while (metadata[0].isUsed() or metadata[0].isTombstone()) { + if (metadata[0].isUsed() and metadata[0].fingerprint == fingerprint) { + const entry = &self.entries()[idx]; + if (eqlFn(entry.key, key)) { + metadata[0].remove(); + entry.* = undefined; + self.size -= 1; + return; + } + } + idx = (idx + 1) & mask; + metadata = self.metadata.? + idx; + } + + unreachable; } - pub fn items(self: Self) []Entry { - return self.entries.items; + fn initMetadatas(self: *Self) void { + @memset(@ptrCast([*]u8, self.metadata.?), 0, @sizeOf(Metadata) * self.capacity()); + } + + // This counts the number of occupied slots, used + tombstones, which is + // what has to stay under the MaxLoadPercentage of capacity. + fn load(self: *const Self) Size { + const max_load = (self.capacity() * MaxLoadPercentage) / 100; + assert(max_load >= self.available); + return @truncate(Size, max_load - self.available); + } + + fn growIfNeeded(self: *Self, allocator: *Allocator, new_count: Size) !void { + if (new_count > self.available) { + try self.grow(allocator, capacityForSize(self.load() + new_count)); + } } pub fn clone(self: Self, allocator: *Allocator) !Self { - var other: Self = .{}; - try other.entries.appendSlice(allocator, self.entries.items); + var other = Self{}; + if (self.size == 0) + return other; - if (self.index_header) |header| { - const new_header = try IndexHeader.alloc(allocator, header.indexes_len); - other.insertAllEntriesIntoNewHeader(new_header); - other.index_header = new_header; + const new_cap = capacityForSize(self.size); + try other.allocate(allocator, new_cap); + other.initMetadatas(); + other.available = @truncate(u32, (new_cap * MaxLoadPercentage) / 100); + + var i: Size = 0; + var metadata = self.metadata.?; + var entr = self.entries(); + while (i < self.capacity()) : (i += 1) { + if (metadata[i].isUsed()) { + const entry = &entr[i]; + other.putAssumeCapacityNoClobber(entry.key, entry.value); + if (other.size == self.size) + break; + } } + return other; } - fn removeInternal(self: *Self, key: K, header: *IndexHeader, comptime I: type) ?Entry { - const indexes = header.indexes(I); - const h = hash(key); - const start_index = header.constrainIndex(h); - var roll_over: usize = 0; - while (roll_over <= header.max_distance_from_start_index) : (roll_over += 1) { - const index_index = header.constrainIndex(start_index + roll_over); - var index = &indexes[index_index]; - if (index.isEmpty()) - return null; + fn grow(self: *Self, allocator: *Allocator, new_capacity: Size) !void { + const new_cap = std.math.max(new_capacity, MinimalCapacity); + assert(new_cap > self.capacity()); + assert(std.math.isPowerOfTwo(new_cap)); - const entry = &self.entries.items[index.entry_index]; + var map = Self{}; + defer map.deinit(allocator); + try map.allocate(allocator, new_cap); + map.initMetadatas(); + map.available = @truncate(u32, (new_cap * MaxLoadPercentage) / 100); - const hash_match = if (store_hash) h == entry.hash else true; - if (!hash_match or !eql(key, entry.key)) - continue; - - const removed_entry = self.entries.swapRemove(index.entry_index); - if (self.entries.items.len > 0 and self.entries.items.len != index.entry_index) { - // Because of the swap remove, now we need to update the index that was - // pointing to the last entry and is now pointing to this removed item slot. - self.updateEntryIndex(header, self.entries.items.len, index.entry_index, I, indexes); - } - - // Now we have to shift over the following indexes. - roll_over += 1; - while (roll_over < header.indexes_len) : (roll_over += 1) { - const next_index_index = header.constrainIndex(start_index + roll_over); - const next_index = &indexes[next_index_index]; - if (next_index.isEmpty() or next_index.distance_from_start_index == 0) { - index.setEmpty(); - return removed_entry; - } - index.* = next_index.*; - index.distance_from_start_index -= 1; - index = next_index; - } - unreachable; - } - return null; - } - - fn updateEntryIndex( - self: *Self, - header: *IndexHeader, - old_entry_index: usize, - new_entry_index: usize, - comptime I: type, - indexes: []Index(I), - ) void { - const h = if (store_hash) self.entries.items[new_entry_index].hash else hash(self.entries.items[new_entry_index].key); - const start_index = header.constrainIndex(h); - var roll_over: usize = 0; - while (roll_over <= header.max_distance_from_start_index) : (roll_over += 1) { - const index_index = header.constrainIndex(start_index + roll_over); - const index = &indexes[index_index]; - if (index.entry_index == old_entry_index) { - index.entry_index = @intCast(I, new_entry_index); - return; - } - } - unreachable; - } - - /// Must ensureCapacity before calling this. - fn getOrPutInternal(self: *Self, key: K, header: *IndexHeader, comptime I: type) GetOrPutResult { - const indexes = header.indexes(I); - const h = hash(key); - const start_index = header.constrainIndex(h); - var roll_over: usize = 0; - var distance_from_start_index: usize = 0; - while (roll_over <= header.indexes_len) : ({ - roll_over += 1; - distance_from_start_index += 1; - }) { - const index_index = header.constrainIndex(start_index + roll_over); - const index = indexes[index_index]; - if (index.isEmpty()) { - indexes[index_index] = .{ - .distance_from_start_index = @intCast(I, distance_from_start_index), - .entry_index = @intCast(I, self.entries.items.len), - }; - header.maybeBumpMax(distance_from_start_index); - const new_entry = self.entries.addOneAssumeCapacity(); - new_entry.* = .{ - .hash = if (store_hash) h else {}, - .key = key, - .value = undefined, - }; - return .{ - .found_existing = false, - .entry = new_entry, - }; - } - - // This pointer survives the following append because we call - // entries.ensureCapacity before getOrPutInternal. - const entry = &self.entries.items[index.entry_index]; - const hash_match = if (store_hash) h == entry.hash else true; - if (hash_match and eql(key, entry.key)) { - return .{ - .found_existing = true, - .entry = entry, - }; - } - if (index.distance_from_start_index < distance_from_start_index) { - // In this case, we did not find the item. We will put a new entry. - // However, we will use this index for the new entry, and move - // the previous index down the line, to keep the max_distance_from_start_index - // as small as possible. - indexes[index_index] = .{ - .distance_from_start_index = @intCast(I, distance_from_start_index), - .entry_index = @intCast(I, self.entries.items.len), - }; - header.maybeBumpMax(distance_from_start_index); - const new_entry = self.entries.addOneAssumeCapacity(); - new_entry.* = .{ - .hash = if (store_hash) h else {}, - .key = key, - .value = undefined, - }; - - distance_from_start_index = index.distance_from_start_index; - var prev_entry_index = index.entry_index; - - // Find somewhere to put the index we replaced by shifting - // following indexes backwards. - roll_over += 1; - distance_from_start_index += 1; - while (roll_over < header.indexes_len) : ({ - roll_over += 1; - distance_from_start_index += 1; - }) { - const next_index_index = header.constrainIndex(start_index + roll_over); - const next_index = indexes[next_index_index]; - if (next_index.isEmpty()) { - header.maybeBumpMax(distance_from_start_index); - indexes[next_index_index] = .{ - .entry_index = prev_entry_index, - .distance_from_start_index = @intCast(I, distance_from_start_index), - }; - return .{ - .found_existing = false, - .entry = new_entry, - }; - } - if (next_index.distance_from_start_index < distance_from_start_index) { - header.maybeBumpMax(distance_from_start_index); - indexes[next_index_index] = .{ - .entry_index = prev_entry_index, - .distance_from_start_index = @intCast(I, distance_from_start_index), - }; - distance_from_start_index = next_index.distance_from_start_index; - prev_entry_index = next_index.entry_index; - } - } - unreachable; - } - } - unreachable; - } - - fn getInternal(self: Self, key: K, header: *IndexHeader, comptime I: type) ?usize { - const indexes = header.indexes(I); - const h = hash(key); - const start_index = header.constrainIndex(h); - var roll_over: usize = 0; - while (roll_over <= header.max_distance_from_start_index) : (roll_over += 1) { - const index_index = header.constrainIndex(start_index + roll_over); - const index = indexes[index_index]; - if (index.isEmpty()) - return null; - - const entry = &self.entries.items[index.entry_index]; - const hash_match = if (store_hash) h == entry.hash else true; - if (hash_match and eql(key, entry.key)) - return index.entry_index; - } - return null; - } - - fn insertAllEntriesIntoNewHeader(self: *Self, header: *IndexHeader) void { - switch (header.capacityIndexType()) { - .u8 => return self.insertAllEntriesIntoNewHeaderGeneric(header, u8), - .u16 => return self.insertAllEntriesIntoNewHeaderGeneric(header, u16), - .u32 => return self.insertAllEntriesIntoNewHeaderGeneric(header, u32), - .usize => return self.insertAllEntriesIntoNewHeaderGeneric(header, usize), - } - } - - fn insertAllEntriesIntoNewHeaderGeneric(self: *Self, header: *IndexHeader, comptime I: type) void { - const indexes = header.indexes(I); - entry_loop: for (self.entries.items) |entry, i| { - const h = if (store_hash) entry.hash else hash(entry.key); - const start_index = header.constrainIndex(h); - var entry_index = i; - var roll_over: usize = 0; - var distance_from_start_index: usize = 0; - while (roll_over < header.indexes_len) : ({ - roll_over += 1; - distance_from_start_index += 1; - }) { - const index_index = header.constrainIndex(start_index + roll_over); - const next_index = indexes[index_index]; - if (next_index.isEmpty()) { - header.maybeBumpMax(distance_from_start_index); - indexes[index_index] = .{ - .distance_from_start_index = @intCast(I, distance_from_start_index), - .entry_index = @intCast(I, entry_index), - }; - continue :entry_loop; - } - if (next_index.distance_from_start_index < distance_from_start_index) { - header.maybeBumpMax(distance_from_start_index); - indexes[index_index] = .{ - .distance_from_start_index = @intCast(I, distance_from_start_index), - .entry_index = @intCast(I, entry_index), - }; - distance_from_start_index = next_index.distance_from_start_index; - entry_index = next_index.entry_index; + if (self.size != 0) { + const old_capacity = self.capacity(); + var i: Size = 0; + var metadata = self.metadata.?; + var entr = self.entries(); + while (i < old_capacity) : (i += 1) { + if (metadata[i].isUsed()) { + const entry = &entr[i]; + map.putAssumeCapacityNoClobber(entry.key, entry.value); + if (map.size == self.size) + break; } } - unreachable; } + + self.size = 0; + std.mem.swap(Self, self, &map); + } + + fn allocate(self: *Self, allocator: *Allocator, new_capacity: Size) !void { + const meta_size = @sizeOf(Header) + new_capacity * @sizeOf(Metadata); + + const alignment = @alignOf(Entry) - 1; + const entries_size = @as(usize, new_capacity) * @sizeOf(Entry) + alignment; + + const total_size = meta_size + entries_size; + + const slice = try allocator.alignedAlloc(u8, @alignOf(Header), total_size); + const ptr = @ptrToInt(slice.ptr); + + const metadata = ptr + @sizeOf(Header); + var entry_ptr = ptr + meta_size; + entry_ptr = (entry_ptr + alignment) & ~@as(usize, alignment); + assert(entry_ptr + @as(usize, new_capacity) * @sizeOf(Entry) <= ptr + total_size); + + const hdr = @intToPtr(*Header, ptr); + hdr.entries = @intToPtr([*]Entry, entry_ptr); + hdr.capacity = new_capacity; + self.metadata = @intToPtr([*]Metadata, metadata); } }; } -const CapacityIndexType = enum { u8, u16, u32, usize }; +const testing = std.testing; +const expect = std.testing.expect; +const expectEqual = std.testing.expectEqual; -fn capacityIndexType(indexes_len: usize) CapacityIndexType { - if (indexes_len < math.maxInt(u8)) - return .u8; - if (indexes_len < math.maxInt(u16)) - return .u16; - if (indexes_len < math.maxInt(u32)) - return .u32; - return .usize; +test "std.hash_map basic usage" { + var map = AutoHashMap(u32, u32).init(std.testing.allocator); + defer map.deinit(); + + const count = 5; + var i: u32 = 0; + var total: u32 = 0; + while (i < count) : (i += 1) { + try map.put(i, i); + total += i; + } + + var sum: u32 = 0; + var it = map.iterator(); + while (it.next()) |kv| { + sum += kv.key; + } + expect(sum == total); + + i = 0; + sum = 0; + while (i < count) : (i += 1) { + expectEqual(map.get(i).?, i); + sum += map.get(i).?; + } + expectEqual(total, sum); } -fn capacityIndexSize(indexes_len: usize) usize { - switch (capacityIndexType(indexes_len)) { - .u8 => return @sizeOf(Index(u8)), - .u16 => return @sizeOf(Index(u16)), - .u32 => return @sizeOf(Index(u32)), - .usize => return @sizeOf(Index(usize)), +test "std.hash_map ensureCapacity" { + var map = AutoHashMap(i32, i32).init(std.testing.allocator); + defer map.deinit(); + + try map.ensureCapacity(20); + const initial_capacity = map.capacity(); + testing.expect(initial_capacity >= 20); + var i: i32 = 0; + while (i < 20) : (i += 1) { + testing.expect(map.fetchPutAssumeCapacity(i, i + 10) == null); + } + // shouldn't resize from putAssumeCapacity + testing.expect(initial_capacity == map.capacity()); +} + +test "std.hash_map ensureCapacity with tombstones" { + var map = AutoHashMap(i32, i32).init(std.testing.allocator); + defer map.deinit(); + + var i: i32 = 0; + while (i < 100) : (i += 1) { + try map.ensureCapacity(@intCast(u32, map.count() + 1)); + map.putAssumeCapacity(i, i); + // Remove to create tombstones that still count as load in the hashmap. + _ = map.remove(i); } } -fn Index(comptime I: type) type { - return extern struct { - entry_index: I, - distance_from_start_index: I, +test "std.hash_map clearRetainingCapacity" { + var map = AutoHashMap(u32, u32).init(std.testing.allocator); + defer map.deinit(); - const Self = @This(); + map.clearRetainingCapacity(); - const empty = Self{ - .entry_index = math.maxInt(I), - .distance_from_start_index = undefined, - }; + try map.put(1, 1); + expectEqual(map.get(1).?, 1); + expectEqual(map.count(), 1); - fn isEmpty(idx: Self) bool { - return idx.entry_index == math.maxInt(I); - } + const cap = map.capacity(); + expect(cap > 0); - fn setEmpty(idx: *Self) void { - idx.entry_index = math.maxInt(I); - } - }; + map.clearRetainingCapacity(); + map.clearRetainingCapacity(); + expectEqual(map.count(), 0); + expectEqual(map.capacity(), cap); + expect(!map.contains(1)); } -/// This struct is trailed by an array of `Index(I)`, where `I` -/// and the array length are determined by `indexes_len`. -const IndexHeader = struct { - max_distance_from_start_index: usize, - indexes_len: usize, +test "std.hash_map grow" { + var map = AutoHashMap(u32, u32).init(std.testing.allocator); + defer map.deinit(); - fn constrainIndex(header: IndexHeader, i: usize) usize { - // This is an optimization for modulo of power of two integers; - // it requires `indexes_len` to always be a power of two. - return i & (header.indexes_len - 1); + const growTo = 12456; + + var i: u32 = 0; + while (i < growTo) : (i += 1) { + try map.put(i, i); + } + expectEqual(map.count(), growTo); + + i = 0; + var it = map.iterator(); + while (it.next()) |kv| { + expectEqual(kv.key, kv.value); + i += 1; + } + expectEqual(i, growTo); + + i = 0; + while (i < growTo) : (i += 1) { + expectEqual(map.get(i).?, i); + } +} + +test "std.hash_map clone" { + var map = AutoHashMap(u32, u32).init(std.testing.allocator); + defer map.deinit(); + + var a = try map.clone(); + defer a.deinit(); + + expectEqual(a.count(), 0); + + try a.put(1, 1); + try a.put(2, 2); + try a.put(3, 3); + + var b = try a.clone(); + defer b.deinit(); + + expectEqual(b.count(), 3); + expectEqual(b.get(1), 1); + expectEqual(b.get(2), 2); + expectEqual(b.get(3), 3); +} + +test "std.hash_map ensureCapacity with existing elements" { + var map = AutoHashMap(u32, u32).init(std.testing.allocator); + defer map.deinit(); + + try map.put(0, 0); + expectEqual(map.count(), 1); + expectEqual(map.capacity(), @TypeOf(map).Unmanaged.MinimalCapacity); + + try map.ensureCapacity(65); + expectEqual(map.count(), 1); + expectEqual(map.capacity(), 128); +} + +test "std.hash_map ensureCapacity satisfies max load factor" { + var map = AutoHashMap(u32, u32).init(std.testing.allocator); + defer map.deinit(); + + try map.ensureCapacity(127); + expectEqual(map.capacity(), 256); +} + +test "std.hash_map remove" { + var map = AutoHashMap(u32, u32).init(std.testing.allocator); + defer map.deinit(); + + var i: u32 = 0; + while (i < 16) : (i += 1) { + try map.put(i, i); } - fn indexes(header: *IndexHeader, comptime I: type) []Index(I) { - const start = @ptrCast([*]Index(I), @ptrCast([*]u8, header) + @sizeOf(IndexHeader)); - return start[0..header.indexes_len]; + i = 0; + while (i < 16) : (i += 1) { + if (i % 3 == 0) { + _ = map.remove(i); + } + } + expectEqual(map.count(), 10); + var it = map.iterator(); + while (it.next()) |kv| { + expectEqual(kv.key, kv.value); + expect(kv.key % 3 != 0); } - fn capacityIndexType(header: IndexHeader) CapacityIndexType { - return hash_map.capacityIndexType(header.indexes_len); + i = 0; + while (i < 16) : (i += 1) { + if (i % 3 == 0) { + expect(!map.contains(i)); + } else { + expectEqual(map.get(i).?, i); + } + } +} + +test "std.hash_map reverse removes" { + var map = AutoHashMap(u32, u32).init(std.testing.allocator); + defer map.deinit(); + + var i: u32 = 0; + while (i < 16) : (i += 1) { + try map.putNoClobber(i, i); } - fn maybeBumpMax(header: *IndexHeader, distance_from_start_index: usize) void { - if (distance_from_start_index > header.max_distance_from_start_index) { - header.max_distance_from_start_index = distance_from_start_index; + i = 16; + while (i > 0) : (i -= 1) { + _ = map.remove(i - 1); + expect(!map.contains(i - 1)); + var j: u32 = 0; + while (j < i - 1) : (j += 1) { + expectEqual(map.get(j).?, j); } } - fn alloc(allocator: *Allocator, len: usize) !*IndexHeader { - const index_size = hash_map.capacityIndexSize(len); - const nbytes = @sizeOf(IndexHeader) + index_size * len; - const bytes = try allocator.allocAdvanced(u8, @alignOf(IndexHeader), nbytes, .exact); - @memset(bytes.ptr + @sizeOf(IndexHeader), 0xff, bytes.len - @sizeOf(IndexHeader)); - const result = @ptrCast(*IndexHeader, bytes.ptr); - result.* = .{ - .max_distance_from_start_index = 0, - .indexes_len = len, - }; - return result; + expectEqual(map.count(), 0); +} + +test "std.hash_map multiple removes on same metadata" { + var map = AutoHashMap(u32, u32).init(std.testing.allocator); + defer map.deinit(); + + var i: u32 = 0; + while (i < 16) : (i += 1) { + try map.put(i, i); } - fn free(header: *IndexHeader, allocator: *Allocator) void { - const index_size = hash_map.capacityIndexSize(header.indexes_len); - const ptr = @ptrCast([*]u8, header); - const slice = ptr[0 .. @sizeOf(IndexHeader) + header.indexes_len * index_size]; - allocator.free(slice); - } -}; + _ = map.remove(7); + _ = map.remove(15); + _ = map.remove(14); + _ = map.remove(13); + expect(!map.contains(7)); + expect(!map.contains(15)); + expect(!map.contains(14)); + expect(!map.contains(13)); -test "basic hash map usage" { + i = 0; + while (i < 13) : (i += 1) { + if (i == 7) { + expect(!map.contains(i)); + } else { + expectEqual(map.get(i).?, i); + } + } + + try map.put(15, 15); + try map.put(13, 13); + try map.put(14, 14); + try map.put(7, 7); + i = 0; + while (i < 16) : (i += 1) { + expectEqual(map.get(i).?, i); + } +} + +test "std.hash_map put and remove loop in random order" { + var map = AutoHashMap(u32, u32).init(std.testing.allocator); + defer map.deinit(); + + var keys = std.ArrayList(u32).init(std.testing.allocator); + defer keys.deinit(); + + const size = 32; + const iterations = 100; + + var i: u32 = 0; + while (i < size) : (i += 1) { + try keys.append(i); + } + var rng = std.rand.DefaultPrng.init(0); + + while (i < iterations) : (i += 1) { + std.rand.Random.shuffle(&rng.random, u32, keys.items); + + for (keys.items) |key| { + try map.put(key, key); + } + expectEqual(map.count(), size); + + for (keys.items) |key| { + _ = map.remove(key); + } + expectEqual(map.count(), 0); + } +} + +test "std.hash_map remove one million elements in random order" { + const Map = AutoHashMap(u32, u32); + const n = 1000 * 1000; + var map = Map.init(std.heap.page_allocator); + defer map.deinit(); + + var keys = std.ArrayList(u32).init(std.heap.page_allocator); + defer keys.deinit(); + + var i: u32 = 0; + while (i < n) : (i += 1) { + keys.append(i) catch unreachable; + } + + var rng = std.rand.DefaultPrng.init(0); + std.rand.Random.shuffle(&rng.random, u32, keys.items); + + for (keys.items) |key| { + map.put(key, key) catch unreachable; + } + + std.rand.Random.shuffle(&rng.random, u32, keys.items); + i = 0; + while (i < n) : (i += 1) { + const key = keys.items[i]; + _ = map.remove(key); + } +} + +test "std.hash_map put" { + var map = AutoHashMap(u32, u32).init(std.testing.allocator); + defer map.deinit(); + + var i: u32 = 0; + while (i < 16) : (i += 1) { + _ = try map.put(i, i); + } + + i = 0; + while (i < 16) : (i += 1) { + expectEqual(map.get(i).?, i); + } + + i = 0; + while (i < 16) : (i += 1) { + try map.put(i, i * 16 + 1); + } + + i = 0; + while (i < 16) : (i += 1) { + expectEqual(map.get(i).?, i * 16 + 1); + } +} + +test "std.hash_map getOrPut" { + var map = AutoHashMap(u32, u32).init(std.testing.allocator); + defer map.deinit(); + + var i: u32 = 0; + while (i < 10) : (i += 1) { + try map.put(i * 2, 2); + } + + i = 0; + while (i < 20) : (i += 1) { + var n = try map.getOrPutValue(i, 1); + } + + i = 0; + var sum = i; + while (i < 20) : (i += 1) { + sum += map.get(i).?; + } + + expectEqual(sum, 30); +} + +test "std.hash_map basic hash map usage" { var map = AutoHashMap(i32, i32).init(std.testing.allocator); defer map.deinit(); @@ -925,85 +1215,10 @@ test "basic hash map usage" { map.removeAssertDiscard(3); } -test "iterator hash map" { - // https://github.com/ziglang/zig/issues/5127 - if (std.Target.current.cpu.arch == .mips) return error.SkipZigTest; - - var reset_map = AutoHashMap(i32, i32).init(std.testing.allocator); - defer reset_map.deinit(); - - // test ensureCapacity with a 0 parameter - try reset_map.ensureCapacity(0); - - try reset_map.putNoClobber(0, 11); - try reset_map.putNoClobber(1, 22); - try reset_map.putNoClobber(2, 33); - - var keys = [_]i32{ - 0, 2, 1, - }; - - var values = [_]i32{ - 11, 33, 22, - }; - - var buffer = [_]i32{ - 0, 0, 0, - }; - - var it = reset_map.iterator(); - const first_entry = it.next().?; - it.reset(); - - var count: usize = 0; - while (it.next()) |entry| : (count += 1) { - buffer[@intCast(usize, entry.key)] = entry.value; - } - testing.expect(count == 3); - testing.expect(it.next() == null); - - for (buffer) |v, i| { - testing.expect(buffer[@intCast(usize, keys[i])] == values[i]); - } - - it.reset(); - count = 0; - while (it.next()) |entry| { - buffer[@intCast(usize, entry.key)] = entry.value; - count += 1; - if (count >= 2) break; - } - - for (buffer[0..2]) |v, i| { - testing.expect(buffer[@intCast(usize, keys[i])] == values[i]); - } - - it.reset(); - var entry = it.next().?; - testing.expect(entry.key == first_entry.key); - testing.expect(entry.value == first_entry.value); -} - -test "ensure capacity" { - var map = AutoHashMap(i32, i32).init(std.testing.allocator); - defer map.deinit(); - - try map.ensureCapacity(20); - const initial_capacity = map.capacity(); - testing.expect(initial_capacity >= 20); - var i: i32 = 0; - while (i < 20) : (i += 1) { - testing.expect(map.fetchPutAssumeCapacity(i, i + 10) == null); - } - // shouldn't resize from putAssumeCapacity - testing.expect(initial_capacity == map.capacity()); -} - -test "clone" { +test "std.hash_map clone" { var original = AutoHashMap(i32, i32).init(std.testing.allocator); defer original.deinit(); - // put more than `linear_scan_max` so we can test that the index header is properly cloned var i: u8 = 0; while (i < 10) : (i += 1) { try original.putNoClobber(i, i * 10); @@ -1017,69 +1232,3 @@ test "clone" { testing.expect(copy.get(i).? == i * 10); } } - -pub fn getHashPtrAddrFn(comptime K: type) (fn (K) u32) { - return struct { - fn hash(key: K) u32 { - return getAutoHashFn(usize)(@ptrToInt(key)); - } - }.hash; -} - -pub fn getTrivialEqlFn(comptime K: type) (fn (K, K) bool) { - return struct { - fn eql(a: K, b: K) bool { - return a == b; - } - }.eql; -} - -pub fn getAutoHashFn(comptime K: type) (fn (K) u32) { - return struct { - fn hash(key: K) u32 { - if (comptime trait.hasUniqueRepresentation(K)) { - return @truncate(u32, Wyhash.hash(0, std.mem.asBytes(&key))); - } else { - var hasher = Wyhash.init(0); - autoHash(&hasher, key); - return @truncate(u32, hasher.final()); - } - } - }.hash; -} - -pub fn getAutoEqlFn(comptime K: type) (fn (K, K) bool) { - return struct { - fn eql(a: K, b: K) bool { - return meta.eql(a, b); - } - }.eql; -} - -pub fn autoEqlIsCheap(comptime K: type) bool { - return switch (@typeInfo(K)) { - .Bool, - .Int, - .Float, - .Pointer, - .ComptimeFloat, - .ComptimeInt, - .Enum, - .Fn, - .ErrorSet, - .AnyFrame, - .EnumLiteral, - => true, - else => false, - }; -} - -pub fn getAutoHashStratFn(comptime K: type, comptime strategy: std.hash.Strategy) (fn (K) u32) { - return struct { - fn hash(key: K) u32 { - var hasher = Wyhash.init(0); - std.hash.autoHashStrat(&hasher, key, strategy); - return @truncate(u32, hasher.final()); - } - }.hash; -} diff --git a/lib/std/heap/general_purpose_allocator.zig b/lib/std/heap/general_purpose_allocator.zig index 4b9a5aea66..2ae13cba0c 100644 --- a/lib/std/heap/general_purpose_allocator.zig +++ b/lib/std/heap/general_purpose_allocator.zig @@ -325,7 +325,8 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type { break; } } - for (self.large_allocations.items()) |*large_alloc| { + var it = self.large_allocations.iterator(); + while (it.next()) |large_alloc| { log.err("Memory leak detected: {}", .{large_alloc.value.getStackTrace()}); leaks = true; } @@ -584,7 +585,7 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type { if (new_aligned_size > largest_bucket_object_size) { try self.large_allocations.ensureCapacity( self.backing_allocator, - self.large_allocations.entries.items.len + 1, + self.large_allocations.count() + 1, ); const slice = try self.backing_allocator.allocFn(self.backing_allocator, len, ptr_align, len_align, ret_addr); diff --git a/lib/std/http/headers.zig b/lib/std/http/headers.zig index 8c80af512f..0ce642865c 100644 --- a/lib/std/http/headers.zig +++ b/lib/std/http/headers.zig @@ -123,9 +123,9 @@ pub const Headers = struct { pub fn deinit(self: *Self) void { { - for (self.index.items()) |*entry| { - const dex = &entry.value; - dex.deinit(self.allocator); + var it = self.index.iterator(); + while (it.next()) |entry| { + entry.value.deinit(self.allocator); self.allocator.free(entry.key); } self.index.deinit(self.allocator); @@ -333,7 +333,8 @@ pub const Headers = struct { fn rebuildIndex(self: *Self) void { // clear out the indexes - for (self.index.items()) |*entry| { + var it = self.index.iterator(); + while (it.next()) |entry| { entry.value.shrinkRetainingCapacity(0); } // fill up indexes again; we know capacity is fine from before diff --git a/lib/std/std.zig b/lib/std/std.zig index 2ff44f5e41..330f3c253b 100644 --- a/lib/std/std.zig +++ b/lib/std/std.zig @@ -3,11 +3,15 @@ // This file is part of [zig](https://ziglang.org/), which is MIT licensed. // The MIT license requires this copyright notice to be included in all copies // and substantial portions of the software. +pub const ArrayHashMap = array_hash_map.ArrayHashMap; +pub const ArrayHashMapUnmanaged = array_hash_map.ArrayHashMapUnmanaged; pub const ArrayList = @import("array_list.zig").ArrayList; pub const ArrayListAligned = @import("array_list.zig").ArrayListAligned; pub const ArrayListAlignedUnmanaged = @import("array_list.zig").ArrayListAlignedUnmanaged; pub const ArrayListSentineled = @import("array_list_sentineled.zig").ArrayListSentineled; pub const ArrayListUnmanaged = @import("array_list.zig").ArrayListUnmanaged; +pub const AutoArrayHashMap = array_hash_map.AutoArrayHashMap; +pub const AutoArrayHashMapUnmanaged = array_hash_map.AutoArrayHashMapUnmanaged; pub const AutoHashMap = hash_map.AutoHashMap; pub const AutoHashMapUnmanaged = hash_map.AutoHashMapUnmanaged; pub const BloomFilter = @import("bloom_filter.zig").BloomFilter; @@ -32,10 +36,13 @@ pub const SinglyLinkedList = @import("linked_list.zig").SinglyLinkedList; pub const SpinLock = @import("spinlock.zig").SpinLock; pub const StringHashMap = hash_map.StringHashMap; pub const StringHashMapUnmanaged = hash_map.StringHashMapUnmanaged; +pub const StringArrayHashMap = array_hash_map.StringArrayHashMap; +pub const StringArrayHashMapUnmanaged = array_hash_map.StringArrayHashMapUnmanaged; pub const TailQueue = @import("linked_list.zig").TailQueue; pub const Target = @import("target.zig").Target; pub const Thread = @import("thread.zig").Thread; +pub const array_hash_map = @import("array_hash_map.zig"); pub const atomic = @import("atomic.zig"); pub const base64 = @import("base64.zig"); pub const build = @import("build.zig"); diff --git a/src-self-hosted/Module.zig b/src-self-hosted/Module.zig index 72597975c9..24dcb541b4 100644 --- a/src-self-hosted/Module.zig +++ b/src-self-hosted/Module.zig @@ -36,17 +36,17 @@ bin_file_path: []const u8, /// It's rare for a decl to be exported, so we save memory by having a sparse map of /// Decl pointers to details about them being exported. /// The Export memory is owned by the `export_owners` table; the slice itself is owned by this table. -decl_exports: std.AutoHashMapUnmanaged(*Decl, []*Export) = .{}, +decl_exports: std.AutoArrayHashMapUnmanaged(*Decl, []*Export) = .{}, /// We track which export is associated with the given symbol name for quick /// detection of symbol collisions. -symbol_exports: std.StringHashMapUnmanaged(*Export) = .{}, +symbol_exports: std.StringArrayHashMapUnmanaged(*Export) = .{}, /// This models the Decls that perform exports, so that `decl_exports` can be updated when a Decl /// is modified. Note that the key of this table is not the Decl being exported, but the Decl that /// is performing the export of another Decl. /// This table owns the Export memory. -export_owners: std.AutoHashMapUnmanaged(*Decl, []*Export) = .{}, +export_owners: std.AutoArrayHashMapUnmanaged(*Decl, []*Export) = .{}, /// Maps fully qualified namespaced names to the Decl struct for them. -decl_table: std.HashMapUnmanaged(Scope.NameHash, *Decl, Scope.name_hash_hash, Scope.name_hash_eql, false) = .{}, +decl_table: std.ArrayHashMapUnmanaged(Scope.NameHash, *Decl, Scope.name_hash_hash, Scope.name_hash_eql, false) = .{}, link_error_flags: link.File.ErrorFlags = .{}, @@ -57,13 +57,13 @@ work_queue: std.fifo.LinearFifo(WorkItem, .Dynamic), /// The ErrorMsg memory is owned by the decl, using Module's allocator. /// Note that a Decl can succeed but the Fn it represents can fail. In this case, /// a Decl can have a failed_decls entry but have analysis status of success. -failed_decls: std.AutoHashMapUnmanaged(*Decl, *ErrorMsg) = .{}, +failed_decls: std.AutoArrayHashMapUnmanaged(*Decl, *ErrorMsg) = .{}, /// Using a map here for consistency with the other fields here. /// The ErrorMsg memory is owned by the `Scope`, using Module's allocator. -failed_files: std.AutoHashMapUnmanaged(*Scope, *ErrorMsg) = .{}, +failed_files: std.AutoArrayHashMapUnmanaged(*Scope, *ErrorMsg) = .{}, /// Using a map here for consistency with the other fields here. /// The ErrorMsg memory is owned by the `Export`, using Module's allocator. -failed_exports: std.AutoHashMapUnmanaged(*Export, *ErrorMsg) = .{}, +failed_exports: std.AutoArrayHashMapUnmanaged(*Export, *ErrorMsg) = .{}, /// Incrementing integer used to compare against the corresponding Decl /// field to determine whether a Decl's status applies to an ongoing update, or a @@ -201,9 +201,9 @@ pub const Decl = struct { /// typed_value may need to be regenerated. dependencies: DepsTable = .{}, - /// The reason this is not `std.AutoHashMapUnmanaged` is a workaround for + /// The reason this is not `std.AutoArrayHashMapUnmanaged` is a workaround for /// stage1 compiler giving me: `error: struct 'Module.Decl' depends on itself` - pub const DepsTable = std.HashMapUnmanaged(*Decl, void, std.hash_map.getAutoHashFn(*Decl), std.hash_map.getAutoEqlFn(*Decl), false); + pub const DepsTable = std.ArrayHashMapUnmanaged(*Decl, void, std.array_hash_map.getAutoHashFn(*Decl), std.array_hash_map.getAutoEqlFn(*Decl), false); pub fn destroy(self: *Decl, gpa: *Allocator) void { gpa.free(mem.spanZ(self.name)); @@ -933,7 +933,8 @@ pub fn deinit(self: *Module) void { self.symbol_exports.deinit(gpa); self.root_scope.destroy(gpa); - for (self.global_error_set.items()) |entry| { + var it = self.global_error_set.iterator(); + while (it.next()) |entry| { gpa.free(entry.key); } self.global_error_set.deinit(gpa); @@ -1756,7 +1757,7 @@ fn analyzeRootSrcFile(self: *Module, root_scope: *Scope.File) !void { // Keep track of the decls that we expect to see in this file so that // we know which ones have been deleted. - var deleted_decls = std.AutoHashMap(*Decl, void).init(self.gpa); + var deleted_decls = std.AutoArrayHashMap(*Decl, void).init(self.gpa); defer deleted_decls.deinit(); try deleted_decls.ensureCapacity(root_scope.decls.items.len); for (root_scope.decls.items) |file_decl| { @@ -1877,7 +1878,7 @@ fn analyzeRootZIRModule(self: *Module, root_scope: *Scope.ZIRModule) !void { // Keep track of the decls that we expect to see in this file so that // we know which ones have been deleted. - var deleted_decls = std.AutoHashMap(*Decl, void).init(self.gpa); + var deleted_decls = std.AutoArrayHashMap(*Decl, void).init(self.gpa); defer deleted_decls.deinit(); try deleted_decls.ensureCapacity(self.decl_table.items().len); for (self.decl_table.items()) |entry| { @@ -2087,7 +2088,7 @@ pub fn getErrorValue(self: *Module, name: []const u8) !std.StringHashMapUnmanage errdefer self.global_error_set.removeAssertDiscard(name); gop.entry.key = try self.gpa.dupe(u8, name); - gop.entry.value = @intCast(u16, self.global_error_set.items().len - 1); + gop.entry.value = @intCast(u16, self.global_error_set.count() - 1); return gop.entry.*; } diff --git a/src-self-hosted/codegen.zig b/src-self-hosted/codegen.zig index 82c06d8003..d6e3194c12 100644 --- a/src-self-hosted/codegen.zig +++ b/src-self-hosted/codegen.zig @@ -359,7 +359,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type { }; const Branch = struct { - inst_table: std.AutoHashMapUnmanaged(*ir.Inst, MCValue) = .{}, + inst_table: std.AutoArrayHashMapUnmanaged(*ir.Inst, MCValue) = .{}, fn deinit(self: *Branch, gpa: *Allocator) void { self.inst_table.deinit(gpa); @@ -750,7 +750,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type { const ptr_bits = arch.ptrBitWidth(); const ptr_bytes: u64 = @divExact(ptr_bits, 8); if (abi_size <= ptr_bytes) { - try self.registers.ensureCapacity(self.gpa, self.registers.items().len + 1); + try self.registers.ensureCapacity(self.gpa, self.registers.count() + 1); if (self.allocReg(inst)) |reg| { return MCValue{ .register = registerAlias(reg, abi_size) }; } @@ -788,7 +788,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type { /// `reg_owner` is the instruction that gets associated with the register in the register table. /// This can have a side effect of spilling instructions to the stack to free up a register. fn copyToNewRegister(self: *Self, reg_owner: *ir.Inst, mcv: MCValue) !MCValue { - try self.registers.ensureCapacity(self.gpa, self.registers.items().len + 1); + try self.registers.ensureCapacity(self.gpa, @intCast(u32, self.registers.count() + 1)); const reg = self.allocReg(reg_owner) orelse b: { // We'll take over the first register. Move the instruction that was previously @@ -1247,7 +1247,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type { if (inst.base.isUnused()) return MCValue.dead; - try self.registers.ensureCapacity(self.gpa, self.registers.items().len + 1); + try self.registers.ensureCapacity(self.gpa, self.registers.count() + 1); const result = self.args[self.arg_index]; self.arg_index += 1; diff --git a/src-self-hosted/codegen/c.zig b/src-self-hosted/codegen/c.zig index 9a7a4888be..c037c55289 100644 --- a/src-self-hosted/codegen/c.zig +++ b/src-self-hosted/codegen/c.zig @@ -110,7 +110,8 @@ const Context = struct { } fn deinit(self: *Context) void { - for (self.inst_map.items()) |kv| { + var it = self.inst_map.iterator(); + while (it.next()) |kv| { self.file.base.allocator.free(kv.value); } self.inst_map.deinit(); diff --git a/src-self-hosted/link.zig b/src-self-hosted/link.zig index 7a5680dfbf..ecf3876582 100644 --- a/src-self-hosted/link.zig +++ b/src-self-hosted/link.zig @@ -47,7 +47,7 @@ pub const File = struct { }; /// For DWARF .debug_info. - pub const DbgInfoTypeRelocsTable = std.HashMapUnmanaged(Type, DbgInfoTypeReloc, Type.hash, Type.eql, true); + pub const DbgInfoTypeRelocsTable = std.HashMapUnmanaged(Type, DbgInfoTypeReloc, Type.hash, Type.eql, std.hash_map.DefaultMaxLoadPercentage); /// For DWARF .debug_info. pub const DbgInfoTypeReloc = struct { diff --git a/src-self-hosted/link/Elf.zig b/src-self-hosted/link/Elf.zig index 8bf28557b4..69f1260d20 100644 --- a/src-self-hosted/link/Elf.zig +++ b/src-self-hosted/link/Elf.zig @@ -1629,7 +1629,8 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void { var dbg_info_type_relocs: File.DbgInfoTypeRelocsTable = .{}; defer { - for (dbg_info_type_relocs.items()) |*entry| { + var it = dbg_info_type_relocs.iterator(); + while (it.next()) |entry| { entry.value.relocs.deinit(self.base.allocator); } dbg_info_type_relocs.deinit(self.base.allocator); @@ -1917,7 +1918,8 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void { // Now we emit the .debug_info types of the Decl. These will count towards the size of // the buffer, so we have to do it before computing the offset, and we can't perform the actual // relocations yet. - for (dbg_info_type_relocs.items()) |*entry| { + var it = dbg_info_type_relocs.iterator(); + while (it.next()) |entry| { entry.value.off = @intCast(u32, dbg_info_buffer.items.len); try self.addDbgInfoType(entry.key, &dbg_info_buffer); } @@ -1925,7 +1927,8 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void { try self.updateDeclDebugInfoAllocation(text_block, @intCast(u32, dbg_info_buffer.items.len)); // Now that we have the offset assigned we can finally perform type relocations. - for (dbg_info_type_relocs.items()) |entry| { + it = dbg_info_type_relocs.iterator(); + while (it.next()) |entry| { for (entry.value.relocs.items) |off| { mem.writeInt( u32, diff --git a/src-self-hosted/liveness.zig b/src-self-hosted/liveness.zig index 6ea949d8af..d528e09ce7 100644 --- a/src-self-hosted/liveness.zig +++ b/src-self-hosted/liveness.zig @@ -15,7 +15,7 @@ pub fn analyze( var table = std.AutoHashMap(*ir.Inst, void).init(gpa); defer table.deinit(); - try table.ensureCapacity(body.instructions.len); + try table.ensureCapacity(@intCast(u32, body.instructions.len)); try analyzeWithTable(arena, &table, null, body); } @@ -84,8 +84,11 @@ fn analyzeInst( try analyzeWithTable(arena, table, &then_table, inst.then_body); // Reset the table back to its state from before the branch. - for (then_table.items()) |entry| { - table.removeAssertDiscard(entry.key); + { + var it = then_table.iterator(); + while (it.next()) |entry| { + table.removeAssertDiscard(entry.key); + } } var else_table = std.AutoHashMap(*ir.Inst, void).init(table.allocator); @@ -97,28 +100,36 @@ fn analyzeInst( var else_entry_deaths = std.ArrayList(*ir.Inst).init(table.allocator); defer else_entry_deaths.deinit(); - for (else_table.items()) |entry| { - const else_death = entry.key; - if (!then_table.contains(else_death)) { - try then_entry_deaths.append(else_death); + { + var it = else_table.iterator(); + while (it.next()) |entry| { + const else_death = entry.key; + if (!then_table.contains(else_death)) { + try then_entry_deaths.append(else_death); + } } } // This loop is the same, except it's for the then branch, and it additionally // has to put its items back into the table to undo the reset. - for (then_table.items()) |entry| { - const then_death = entry.key; - if (!else_table.contains(then_death)) { - try else_entry_deaths.append(then_death); + { + var it = then_table.iterator(); + while (it.next()) |entry| { + const then_death = entry.key; + if (!else_table.contains(then_death)) { + try else_entry_deaths.append(then_death); + } + _ = try table.put(then_death, {}); } - _ = try table.put(then_death, {}); } // Now we have to correctly populate new_set. if (new_set) |ns| { - try ns.ensureCapacity(ns.items().len + then_table.items().len + else_table.items().len); - for (then_table.items()) |entry| { + try ns.ensureCapacity(@intCast(u32, ns.count() + then_table.count() + else_table.count())); + var it = then_table.iterator(); + while (it.next()) |entry| { _ = ns.putAssumeCapacity(entry.key, {}); } - for (else_table.items()) |entry| { + it = else_table.iterator(); + while (it.next()) |entry| { _ = ns.putAssumeCapacity(entry.key, {}); } } diff --git a/src-self-hosted/translate_c.zig b/src-self-hosted/translate_c.zig index b333d24c51..68d1dabb0e 100644 --- a/src-self-hosted/translate_c.zig +++ b/src-self-hosted/translate_c.zig @@ -19,23 +19,9 @@ pub const Error = error{OutOfMemory}; const TypeError = Error || error{UnsupportedType}; const TransError = TypeError || error{UnsupportedTranslation}; -const DeclTable = std.HashMap(usize, []const u8, addrHash, addrEql, false); +const DeclTable = std.AutoArrayHashMap(usize, []const u8); -fn addrHash(x: usize) u32 { - switch (@typeInfo(usize).Int.bits) { - 32 => return x, - // pointers are usually aligned so we ignore the bits that are probably all 0 anyway - // usually the larger bits of addr space are unused so we just chop em off - 64 => return @truncate(u32, x >> 4), - else => @compileError("unreachable"), - } -} - -fn addrEql(a: usize, b: usize) bool { - return a == b; -} - -const SymbolTable = std.StringHashMap(*ast.Node); +const SymbolTable = std.StringArrayHashMap(*ast.Node); const AliasList = std.ArrayList(struct { alias: []const u8, name: []const u8, @@ -285,7 +271,7 @@ pub const Context = struct { /// a list of names that we found by visiting all the top level decls without /// translating them. The other maps are updated as we translate; this one is updated /// up front in a pre-processing step. - global_names: std.StringHashMap(void), + global_names: std.StringArrayHashMap(void), fn getMangle(c: *Context) u32 { c.mangle_count += 1; @@ -380,7 +366,7 @@ pub fn translate( .alias_list = AliasList.init(gpa), .global_scope = try arena.allocator.create(Scope.Root), .clang_context = ZigClangASTUnit_getASTContext(ast_unit).?, - .global_names = std.StringHashMap(void).init(gpa), + .global_names = std.StringArrayHashMap(void).init(gpa), .token_ids = .{}, .token_locs = .{}, .errors = .{}, @@ -6424,7 +6410,8 @@ fn getFnProto(c: *Context, ref: *ast.Node) ?*ast.Node.FnProto { } fn addMacros(c: *Context) !void { - for (c.global_scope.macro_table.items()) |kv| { + var it = c.global_scope.macro_table.iterator(); + while (it.next()) |kv| { if (getFnProto(c, kv.value)) |proto_node| { // If a macro aliases a global variable which is a function pointer, we conclude that // the macro is intended to represent a function that assumes the function pointer diff --git a/src-self-hosted/type.zig b/src-self-hosted/type.zig index 13024f34de..a9a1acf44b 100644 --- a/src-self-hosted/type.zig +++ b/src-self-hosted/type.zig @@ -238,7 +238,7 @@ pub const Type = extern union { } } - pub fn hash(self: Type) u32 { + pub fn hash(self: Type) u64 { var hasher = std.hash.Wyhash.init(0); const zig_type_tag = self.zigTypeTag(); std.hash.autoHash(&hasher, zig_type_tag); @@ -303,7 +303,7 @@ pub const Type = extern union { // TODO implement more type hashing }, } - return @truncate(u32, hasher.final()); + return hasher.final(); } pub fn copy(self: Type, allocator: *Allocator) error{OutOfMemory}!Type { diff --git a/src-self-hosted/value.zig b/src-self-hosted/value.zig index 6a39371ebe..bfd205f4d9 100644 --- a/src-self-hosted/value.zig +++ b/src-self-hosted/value.zig @@ -358,7 +358,8 @@ pub const Value = extern union { .error_set => { const error_set = val.cast(Payload.ErrorSet).?; try out_stream.writeAll("error{"); - for (error_set.fields.items()) |entry| { + var it = error_set.fields.iterator(); + while (it.next()) |entry| { try out_stream.print("{},", .{entry.value}); } return out_stream.writeAll("}"); diff --git a/src-self-hosted/zir.zig b/src-self-hosted/zir.zig index 8915cb0f90..04d3393626 100644 --- a/src-self-hosted/zir.zig +++ b/src-self-hosted/zir.zig @@ -1049,7 +1049,7 @@ pub const Module = struct { defer write.loop_table.deinit(); // First, build a map of *Inst to @ or % indexes - try write.inst_table.ensureCapacity(self.decls.len); + try write.inst_table.ensureCapacity(@intCast(u32, self.decls.len)); for (self.decls) |decl, decl_i| { try write.inst_table.putNoClobber(decl.inst, .{ .inst = decl.inst, .index = null, .name = decl.name }); @@ -1685,7 +1685,7 @@ pub fn emit(allocator: *Allocator, old_module: IrModule) !Module { .arena = std.heap.ArenaAllocator.init(allocator), .old_module = &old_module, .next_auto_name = 0, - .names = std.StringHashMap(void).init(allocator), + .names = std.StringArrayHashMap(void).init(allocator), .primitive_table = std.AutoHashMap(Inst.Primitive.Builtin, *Decl).init(allocator), .indent = 0, .block_table = std.AutoHashMap(*ir.Inst.Block, *Inst.Block).init(allocator), @@ -1758,7 +1758,7 @@ const EmitZIR = struct { arena: std.heap.ArenaAllocator, old_module: *const IrModule, decls: std.ArrayListUnmanaged(*Decl), - names: std.StringHashMap(void), + names: std.StringArrayHashMap(void), next_auto_name: usize, primitive_table: std.AutoHashMap(Inst.Primitive.Builtin, *Decl), indent: usize, diff --git a/src-self-hosted/zir_sema.zig b/src-self-hosted/zir_sema.zig index b4dafac1da..8a61178a68 100644 --- a/src-self-hosted/zir_sema.zig +++ b/src-self-hosted/zir_sema.zig @@ -812,7 +812,7 @@ fn analyzeInstErrorSet(mod: *Module, scope: *Scope, inst: *zir.Inst.ErrorSet) In .fields = .{}, .decl = undefined, // populated below }; - try payload.fields.ensureCapacity(&new_decl_arena.allocator, inst.positionals.fields.len); + try payload.fields.ensureCapacity(&new_decl_arena.allocator, @intCast(u32, inst.positionals.fields.len)); for (inst.positionals.fields) |field_name| { const entry = try mod.getErrorValue(field_name); From 1b2154dfe2f9b5030f487e7c4be8c706ce6e59b5 Mon Sep 17 00:00:00 2001 From: Tadeo Kondrak Date: Tue, 1 Sep 2020 18:55:36 -0600 Subject: [PATCH 14/35] builtin: Add TypeInfo.StructField.is_comptime --- lib/std/builtin.zig | 1 + lib/std/meta/trailer_flags.zig | 1 + src/ir.cpp | 6 ++++++ test/stage1/behavior/type_info.zig | 6 ++++++ 4 files changed, 14 insertions(+) diff --git a/lib/std/builtin.zig b/lib/std/builtin.zig index 3d103d6d06..911a0eb15c 100644 --- a/lib/std/builtin.zig +++ b/lib/std/builtin.zig @@ -261,6 +261,7 @@ pub const TypeInfo = union(enum) { name: []const u8, field_type: type, default_value: anytype, + is_comptime: bool, }; /// This data structure is used by the Zig language code generation and diff --git a/lib/std/meta/trailer_flags.zig b/lib/std/meta/trailer_flags.zig index 19f6ee2e80..c8c1323686 100644 --- a/lib/std/meta/trailer_flags.zig +++ b/lib/std/meta/trailer_flags.zig @@ -46,6 +46,7 @@ pub fn TrailerFlags(comptime Fields: type) type { ??struct_field.field_type, @as(?struct_field.field_type, null), ), + .is_comptime = false, }; } break :blk @Type(.{ diff --git a/src/ir.cpp b/src/ir.cpp index 692dd392e1..124c1feb06 100644 --- a/src/ir.cpp +++ b/src/ir.cpp @@ -25683,6 +25683,10 @@ static Error ir_make_type_info_value(IrAnalyze *ira, IrInst* source_instr, ZigTy } set_optional_payload(inner_fields[2], struct_field->init_val); + inner_fields[3]->special = ConstValSpecialStatic; + inner_fields[3]->type = ira->codegen->builtin_types.entry_bool; + inner_fields[3]->data.x_bool = struct_field->is_comptime; + ZigValue *name = create_const_str_lit(ira->codegen, struct_field->name)->data.x_ptr.data.ref.pointee; init_const_slice(ira->codegen, inner_fields[0], name, 0, buf_len(struct_field->name), true); @@ -26291,6 +26295,8 @@ static ZigType *type_info_to_type(IrAnalyze *ira, IrInst *source_instr, ZigTypeI buf_ptr(&field->type_entry->name), buf_ptr(&field->type_entry->name))); return ira->codegen->invalid_inst_gen->value->type; } + if ((err = get_const_field_bool(ira, source_instr->source_node, field_value, "is_comptime", 3, &field->is_comptime))) + return ira->codegen->invalid_inst_gen->value->type; } return entry; diff --git a/test/stage1/behavior/type_info.zig b/test/stage1/behavior/type_info.zig index d0da8815f7..409993a741 100644 --- a/test/stage1/behavior/type_info.zig +++ b/test/stage1/behavior/type_info.zig @@ -418,3 +418,9 @@ test "Struct.is_tuple" { expect(@typeInfo(@TypeOf(.{0})).Struct.is_tuple); expect(!@typeInfo(@TypeOf(.{ .a = 0 })).Struct.is_tuple); } + +test "StructField.is_comptime" { + const info = @typeInfo(struct { x: u8 = 3, comptime y: u32 = 5 }).Struct; + expect(!info.fields[0].is_comptime); + expect(info.fields[1].is_comptime); +} From 12ce6eb8f65e6de5920c713cf2767e4b8ba0f1d3 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Tue, 1 Sep 2020 23:23:29 -0700 Subject: [PATCH 15/35] stage2: support dbg_stmt at comptime --- src-self-hosted/zir_sema.zig | 8 ++++++-- test/stage2/test.zig | 4 ++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src-self-hosted/zir_sema.zig b/src-self-hosted/zir_sema.zig index 8a61178a68..676b662077 100644 --- a/src-self-hosted/zir_sema.zig +++ b/src-self-hosted/zir_sema.zig @@ -643,8 +643,12 @@ fn analyzeInstBreakVoid(mod: *Module, scope: *Scope, inst: *zir.Inst.BreakVoid) } fn analyzeInstDbgStmt(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst { - const b = try mod.requireRuntimeBlock(scope, inst.base.src); - return mod.addNoOp(b, inst.base.src, Type.initTag(.void), .dbg_stmt); + if (scope.cast(Scope.Block)) |b| { + if (!b.is_comptime) { + return mod.addNoOp(b, inst.base.src, Type.initTag(.void), .dbg_stmt); + } + } + return mod.constVoid(scope, inst.base.src); } fn analyzeInstDeclRefStr(mod: *Module, scope: *Scope, inst: *zir.Inst.DeclRefStr) InnerError!*Inst { diff --git a/test/stage2/test.zig b/test/stage2/test.zig index b631e37b97..ad81e463b9 100644 --- a/test/stage2/test.zig +++ b/test/stage2/test.zig @@ -973,8 +973,8 @@ pub fn addCases(ctx: *TestContext) !void { \\comptime { \\ _ = foo; \\} - \\extern var foo; - , &[_][]const u8{":2:5: error: unable to resolve comptime value"}); + \\extern var foo: i32; + , &[_][]const u8{":2:9: error: unable to resolve comptime value"}); case.addError( \\export fn entry() void { \\ _ = foo; From bb848dbeeec133b2ca1d6dc42fe9dd04b848b58a Mon Sep 17 00:00:00 2001 From: Lachlan Easton Date: Wed, 2 Sep 2020 20:16:28 +1000 Subject: [PATCH 16/35] zig fmt: Patch rename stream to ais (auto indenting stream) & other small refactors --- lib/std/io/auto_indenting_stream.zig | 24 +- lib/std/io/change_detection_stream.zig | 26 +- lib/std/io/find_byte_out_stream.zig | 26 +- lib/std/io/writer.zig | 4 - lib/std/zig/parser_test.zig | 2 +- lib/std/zig/render.zig | 1229 ++++++++++++------------ src-self-hosted/main.zig | 8 +- src-self-hosted/stage2.zig | 2 +- 8 files changed, 653 insertions(+), 668 deletions(-) diff --git a/lib/std/io/auto_indenting_stream.zig b/lib/std/io/auto_indenting_stream.zig index d4256324f1..d08878e851 100644 --- a/lib/std/io/auto_indenting_stream.zig +++ b/lib/std/io/auto_indenting_stream.zig @@ -5,13 +5,13 @@ const assert = std.debug.assert; /// Automatically inserts indentation of written data by keeping /// track of the current indentation level -pub fn AutoIndentingStream(comptime WriterType: type) type { +pub fn AutoIndentingStream(comptime UnderlyingWriter: type) type { return struct { const Self = @This(); - pub const Error = WriterType.Error; + pub const Error = UnderlyingWriter.Error; pub const Writer = io.Writer(*Self, Error, write); - writer_pointer: *WriterType, + underlying_writer: UnderlyingWriter, indent_count: usize = 0, indent_delta: usize, @@ -20,10 +20,6 @@ pub fn AutoIndentingStream(comptime WriterType: type) type { applied_indent: usize = 0, // the most recently applied indent indent_next_line: usize = 0, // not used until the next line - pub fn init(indent_delta: usize, writer_pointer: *WriterType) Self { - return Self{ .writer_pointer = writer_pointer, .indent_delta = indent_delta }; - } - pub fn writer(self: *Self) Writer { return .{ .context = self }; } @@ -55,7 +51,7 @@ pub fn AutoIndentingStream(comptime WriterType: type) type { if (bytes.len == 0) return @as(usize, 0); - try self.writer_pointer.writer().writeAll(bytes); + try self.underlying_writer.writeAll(bytes); if (bytes[bytes.len - 1] == '\n') self.resetLine(); return bytes.len; @@ -115,7 +111,7 @@ pub fn AutoIndentingStream(comptime WriterType: type) type { fn applyIndent(self: *Self) Error!void { const current_indent = self.currentIndent(); if (self.current_line_empty and current_indent > 0) { - try self.writer_pointer.writer().writeByteNTimes(' ', current_indent); + try self.underlying_writer.writeByteNTimes(' ', current_indent); self.applied_indent = current_indent; } @@ -143,8 +139,10 @@ pub fn AutoIndentingStream(comptime WriterType: type) type { pub fn autoIndentingStream( indent_delta: usize, - underlying_stream: anytype, -) AutoIndentingStream(@TypeOf(underlying_stream).Child) { - comptime assert(@typeInfo(@TypeOf(underlying_stream)) == .Pointer); - return AutoIndentingStream(@TypeOf(underlying_stream).Child).init(indent_delta, underlying_stream); + underlying_writer: anytype, +) AutoIndentingStream(@TypeOf(underlying_writer)) { + return AutoIndentingStream(@TypeOf(underlying_writer)){ + .underlying_writer = underlying_writer, + .indent_delta = indent_delta, + }; } diff --git a/lib/std/io/change_detection_stream.zig b/lib/std/io/change_detection_stream.zig index b559e66751..5ba2bb3c10 100644 --- a/lib/std/io/change_detection_stream.zig +++ b/lib/std/io/change_detection_stream.zig @@ -10,19 +10,11 @@ pub fn ChangeDetectionStream(comptime WriterType: type) type { pub const Error = WriterType.Error; pub const Writer = io.Writer(*Self, Error, write); - anything_changed: bool = false, - writer_pointer: *const WriterType, + anything_changed: bool, + underlying_writer: WriterType, source_index: usize, source: []const u8, - pub fn init(source: []const u8, writer_pointer: *const WriterType) Self { - return Self{ - .writer_pointer = writer_pointer, - .source_index = 0, - .source = source, - }; - } - pub fn writer(self: *Self) Writer { return .{ .context = self }; } @@ -41,7 +33,7 @@ pub fn ChangeDetectionStream(comptime WriterType: type) type { } } - return self.writer_pointer.write(bytes); + return self.underlying_writer.write(bytes); } pub fn changeDetected(self: *Self) bool { @@ -52,8 +44,12 @@ pub fn ChangeDetectionStream(comptime WriterType: type) type { pub fn changeDetectionStream( source: []const u8, - underlying_stream: anytype, -) ChangeDetectionStream(@TypeOf(underlying_stream).Child) { - comptime assert(@typeInfo(@TypeOf(underlying_stream)) == .Pointer); - return ChangeDetectionStream(@TypeOf(underlying_stream).Child).init(source, underlying_stream); + underlying_writer: anytype, +) ChangeDetectionStream(@TypeOf(underlying_writer)) { + return ChangeDetectionStream(@TypeOf(underlying_writer)){ + .anything_changed = false, + .underlying_writer = underlying_writer, + .source_index = 0, + .source = source, + }; } diff --git a/lib/std/io/find_byte_out_stream.zig b/lib/std/io/find_byte_out_stream.zig index bfd0e815e4..b8689b7992 100644 --- a/lib/std/io/find_byte_out_stream.zig +++ b/lib/std/io/find_byte_out_stream.zig @@ -4,24 +4,16 @@ const assert = std.debug.assert; /// An OutStream that returns whether the given character has been written to it. /// The contents are not written to anything. -pub fn FindByteOutStream(comptime WriterType: type) type { +pub fn FindByteOutStream(comptime UnderlyingWriter: type) type { return struct { const Self = @This(); - pub const Error = WriterType.Error; + pub const Error = UnderlyingWriter.Error; pub const Writer = io.Writer(*Self, Error, write); - writer_pointer: *const WriterType, + underlying_writer: UnderlyingWriter, byte_found: bool, byte: u8, - pub fn init(byte: u8, writer_pointer: *const WriterType) Self { - return Self{ - .writer_pointer = writer_pointer, - .byte = byte, - .byte_found = false, - }; - } - pub fn writer(self: *Self) Writer { return .{ .context = self }; } @@ -34,11 +26,15 @@ pub fn FindByteOutStream(comptime WriterType: type) type { break :blk false; }; } - return self.writer_pointer.writer().write(bytes); + return self.underlying_writer.write(bytes); } }; } -pub fn findByteOutStream(byte: u8, underlying_stream: anytype) FindByteOutStream(@TypeOf(underlying_stream).Child) { - comptime assert(@typeInfo(@TypeOf(underlying_stream)) == .Pointer); - return FindByteOutStream(@TypeOf(underlying_stream).Child).init(byte, underlying_stream); + +pub fn findByteOutStream(byte: u8, underlying_writer: anytype) FindByteOutStream(@TypeOf(underlying_writer)) { + return FindByteOutStream(@TypeOf(underlying_writer)){ + .underlying_writer = underlying_writer, + .byte = byte, + .byte_found = false, + }; } diff --git a/lib/std/io/writer.zig b/lib/std/io/writer.zig index ffdca0d6a6..39729ef0a2 100644 --- a/lib/std/io/writer.zig +++ b/lib/std/io/writer.zig @@ -18,10 +18,6 @@ pub fn Writer( const Self = @This(); pub const Error = WriteError; - pub fn writer(self: *const Self) Self { - return self.*; - } - pub fn write(self: Self, bytes: []const u8) Error!usize { return writeFn(self.context, bytes); } diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 8652a73c50..36ceb400dc 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -3364,7 +3364,7 @@ fn testParse(source: []const u8, allocator: *mem.Allocator, anything_changed: *b errdefer buffer.deinit(); const outStream = buffer.outStream(); - anything_changed.* = try std.zig.render(allocator, &outStream, tree); + anything_changed.* = try std.zig.render(allocator, outStream, tree); return buffer.toOwnedSlice(); } fn testTransform(source: []const u8, expected_source: []const u8) !void { diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index b7a2b8675a..237ca07d2b 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -19,13 +19,12 @@ pub const Error = error{ }; /// Returns whether anything changed -pub fn render(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree) (meta.Child(@TypeOf(stream)).Error || Error)!bool { +pub fn render(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree) (@TypeOf(stream).Error || Error)!bool { // cannot render an invalid tree std.debug.assert(tree.errors.len == 0); - var s = stream.*; - var change_detection_stream = std.io.changeDetectionStream(tree.source, &s); - var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, &change_detection_stream); + var change_detection_stream = std.io.changeDetectionStream(tree.source, stream); + var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, change_detection_stream.writer()); try renderRoot(allocator, &auto_indenting_stream, tree); @@ -34,19 +33,19 @@ pub fn render(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree) (meta fn renderRoot( allocator: *mem.Allocator, - stream: anytype, + ais: anytype, tree: *ast.Tree, -) (@TypeOf(stream.*).Error || Error)!void { +) (@TypeOf(ais.*).Error || Error)!void { // render all the line comments at the beginning of the file for (tree.token_ids) |token_id, i| { if (token_id != .LineComment) break; const token_loc = tree.token_locs[i]; - try stream.writer().print("{}\n", .{mem.trimRight(u8, tree.tokenSliceLoc(token_loc), " ")}); + try ais.writer().print("{}\n", .{mem.trimRight(u8, tree.tokenSliceLoc(token_loc), " ")}); const next_token = tree.token_locs[i + 1]; const loc = tree.tokenLocationLoc(token_loc.end, next_token); if (loc.line >= 2) { - try stream.insertNewline(); + try ais.insertNewline(); } } @@ -110,7 +109,7 @@ fn renderRoot( // If there's no next reformatted `decl`, just copy the // remaining input tokens and bail out. const start = tree.token_locs[copy_start_token_index].start; - try copyFixingWhitespace(stream, tree.source[start..]); + try copyFixingWhitespace(ais, tree.source[start..]); return; } decl = root_decls[decl_i]; @@ -151,25 +150,25 @@ fn renderRoot( const start = tree.token_locs[copy_start_token_index].start; const end = tree.token_locs[copy_end_token_index].start; - try copyFixingWhitespace(stream, tree.source[start..end]); + try copyFixingWhitespace(ais, tree.source[start..end]); } - try renderTopLevelDecl(allocator, stream, tree, decl); + try renderTopLevelDecl(allocator, ais, tree, decl); decl_i += 1; if (decl_i >= root_decls.len) return; - try renderExtraNewline(tree, stream, root_decls[decl_i]); + try renderExtraNewline(tree, ais, root_decls[decl_i]); } } -fn renderExtraNewline(tree: *ast.Tree, stream: anytype, node: *ast.Node) @TypeOf(stream.*).Error!void { - return renderExtraNewlineToken(tree, stream, node.firstToken()); +fn renderExtraNewline(tree: *ast.Tree, ais: anytype, node: *ast.Node) @TypeOf(ais.*).Error!void { + return renderExtraNewlineToken(tree, ais, node.firstToken()); } fn renderExtraNewlineToken( tree: *ast.Tree, - stream: anytype, + ais: anytype, first_token: ast.TokenIndex, -) @TypeOf(stream.*).Error!void { +) @TypeOf(ais.*).Error!void { var prev_token = first_token; if (prev_token == 0) return; var newline_threshold: usize = 2; @@ -182,27 +181,27 @@ fn renderExtraNewlineToken( const prev_token_end = tree.token_locs[prev_token - 1].end; const loc = tree.tokenLocation(prev_token_end, first_token); if (loc.line >= newline_threshold) { - try stream.insertNewline(); + try ais.insertNewline(); } } -fn renderTopLevelDecl(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree, decl: *ast.Node) (@TypeOf(stream.*).Error || Error)!void { - try renderContainerDecl(allocator, stream, tree, decl, .Newline); +fn renderTopLevelDecl(allocator: *mem.Allocator, ais: anytype, tree: *ast.Tree, decl: *ast.Node) (@TypeOf(ais.*).Error || Error)!void { + try renderContainerDecl(allocator, ais, tree, decl, .Newline); } -fn renderContainerDecl(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree, decl: *ast.Node, space: Space) (@TypeOf(stream.*).Error || Error)!void { +fn renderContainerDecl(allocator: *mem.Allocator, ais: anytype, tree: *ast.Tree, decl: *ast.Node, space: Space) (@TypeOf(ais.*).Error || Error)!void { switch (decl.tag) { .FnProto => { const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl); - try renderDocComments(tree, stream, fn_proto, fn_proto.getDocComments()); + try renderDocComments(tree, ais, fn_proto, fn_proto.getDocComments()); if (fn_proto.getBodyNode()) |body_node| { - try renderExpression(allocator, stream, tree, decl, .Space); - try renderExpression(allocator, stream, tree, body_node, space); + try renderExpression(allocator, ais, tree, decl, .Space); + try renderExpression(allocator, ais, tree, body_node, space); } else { - try renderExpression(allocator, stream, tree, decl, .None); - try renderToken(tree, stream, tree.nextToken(decl.lastToken()), space); + try renderExpression(allocator, ais, tree, decl, .None); + try renderToken(tree, ais, tree.nextToken(decl.lastToken()), space); } }, @@ -210,35 +209,35 @@ fn renderContainerDecl(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tr const use_decl = @fieldParentPtr(ast.Node.Use, "base", decl); if (use_decl.visib_token) |visib_token| { - try renderToken(tree, stream, visib_token, .Space); // pub + try renderToken(tree, ais, visib_token, .Space); // pub } - try renderToken(tree, stream, use_decl.use_token, .Space); // usingnamespace - try renderExpression(allocator, stream, tree, use_decl.expr, .None); - try renderToken(tree, stream, use_decl.semicolon_token, space); // ; + try renderToken(tree, ais, use_decl.use_token, .Space); // usingnamespace + try renderExpression(allocator, ais, tree, use_decl.expr, .None); + try renderToken(tree, ais, use_decl.semicolon_token, space); // ; }, .VarDecl => { const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", decl); - try renderDocComments(tree, stream, var_decl, var_decl.getDocComments()); - try renderVarDecl(allocator, stream, tree, var_decl); + try renderDocComments(tree, ais, var_decl, var_decl.getDocComments()); + try renderVarDecl(allocator, ais, tree, var_decl); }, .TestDecl => { const test_decl = @fieldParentPtr(ast.Node.TestDecl, "base", decl); - try renderDocComments(tree, stream, test_decl, test_decl.doc_comments); - try renderToken(tree, stream, test_decl.test_token, .Space); - try renderExpression(allocator, stream, tree, test_decl.name, .Space); - try renderExpression(allocator, stream, tree, test_decl.body_node, space); + try renderDocComments(tree, ais, test_decl, test_decl.doc_comments); + try renderToken(tree, ais, test_decl.test_token, .Space); + try renderExpression(allocator, ais, tree, test_decl.name, .Space); + try renderExpression(allocator, ais, tree, test_decl.body_node, space); }, .ContainerField => { const field = @fieldParentPtr(ast.Node.ContainerField, "base", decl); - try renderDocComments(tree, stream, field, field.doc_comments); + try renderDocComments(tree, ais, field, field.doc_comments); if (field.comptime_token) |t| { - try renderToken(tree, stream, t, .Space); // comptime + try renderToken(tree, ais, t, .Space); // comptime } const src_has_trailing_comma = blk: { @@ -251,67 +250,67 @@ fn renderContainerDecl(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tr const last_token_space: Space = if (src_has_trailing_comma) .None else space; if (field.type_expr == null and field.value_expr == null) { - try renderToken(tree, stream, field.name_token, last_token_space); // name + try renderToken(tree, ais, field.name_token, last_token_space); // name } else if (field.type_expr != null and field.value_expr == null) { - try renderToken(tree, stream, field.name_token, .None); // name - try renderToken(tree, stream, tree.nextToken(field.name_token), .Space); // : + try renderToken(tree, ais, field.name_token, .None); // name + try renderToken(tree, ais, tree.nextToken(field.name_token), .Space); // : if (field.align_expr) |align_value_expr| { - try renderExpression(allocator, stream, tree, field.type_expr.?, .Space); // type + try renderExpression(allocator, ais, tree, field.type_expr.?, .Space); // type const lparen_token = tree.prevToken(align_value_expr.firstToken()); const align_kw = tree.prevToken(lparen_token); const rparen_token = tree.nextToken(align_value_expr.lastToken()); - try renderToken(tree, stream, align_kw, .None); // align - try renderToken(tree, stream, lparen_token, .None); // ( - try renderExpression(allocator, stream, tree, align_value_expr, .None); // alignment - try renderToken(tree, stream, rparen_token, last_token_space); // ) + try renderToken(tree, ais, align_kw, .None); // align + try renderToken(tree, ais, lparen_token, .None); // ( + try renderExpression(allocator, ais, tree, align_value_expr, .None); // alignment + try renderToken(tree, ais, rparen_token, last_token_space); // ) } else { - try renderExpression(allocator, stream, tree, field.type_expr.?, last_token_space); // type + try renderExpression(allocator, ais, tree, field.type_expr.?, last_token_space); // type } } else if (field.type_expr == null and field.value_expr != null) { - try renderToken(tree, stream, field.name_token, .Space); // name - try renderToken(tree, stream, tree.nextToken(field.name_token), .Space); // = - try renderExpression(allocator, stream, tree, field.value_expr.?, last_token_space); // value + try renderToken(tree, ais, field.name_token, .Space); // name + try renderToken(tree, ais, tree.nextToken(field.name_token), .Space); // = + try renderExpression(allocator, ais, tree, field.value_expr.?, last_token_space); // value } else { - try renderToken(tree, stream, field.name_token, .None); // name - try renderToken(tree, stream, tree.nextToken(field.name_token), .Space); // : + try renderToken(tree, ais, field.name_token, .None); // name + try renderToken(tree, ais, tree.nextToken(field.name_token), .Space); // : if (field.align_expr) |align_value_expr| { - try renderExpression(allocator, stream, tree, field.type_expr.?, .Space); // type + try renderExpression(allocator, ais, tree, field.type_expr.?, .Space); // type const lparen_token = tree.prevToken(align_value_expr.firstToken()); const align_kw = tree.prevToken(lparen_token); const rparen_token = tree.nextToken(align_value_expr.lastToken()); - try renderToken(tree, stream, align_kw, .None); // align - try renderToken(tree, stream, lparen_token, .None); // ( - try renderExpression(allocator, stream, tree, align_value_expr, .None); // alignment - try renderToken(tree, stream, rparen_token, .Space); // ) + try renderToken(tree, ais, align_kw, .None); // align + try renderToken(tree, ais, lparen_token, .None); // ( + try renderExpression(allocator, ais, tree, align_value_expr, .None); // alignment + try renderToken(tree, ais, rparen_token, .Space); // ) } else { - try renderExpression(allocator, stream, tree, field.type_expr.?, .Space); // type + try renderExpression(allocator, ais, tree, field.type_expr.?, .Space); // type } - try renderToken(tree, stream, tree.prevToken(field.value_expr.?.firstToken()), .Space); // = - try renderExpression(allocator, stream, tree, field.value_expr.?, last_token_space); // value + try renderToken(tree, ais, tree.prevToken(field.value_expr.?.firstToken()), .Space); // = + try renderExpression(allocator, ais, tree, field.value_expr.?, last_token_space); // value } if (src_has_trailing_comma) { const comma = tree.nextToken(field.lastToken()); - try renderToken(tree, stream, comma, space); + try renderToken(tree, ais, comma, space); } }, .Comptime => { assert(!decl.requireSemiColon()); - try renderExpression(allocator, stream, tree, decl, space); + try renderExpression(allocator, ais, tree, decl, space); }, .DocComment => { const comment = @fieldParentPtr(ast.Node.DocComment, "base", decl); const kind = tree.token_ids[comment.first_line]; - try renderToken(tree, stream, comment.first_line, .Newline); + try renderToken(tree, ais, comment.first_line, .Newline); var tok_i = comment.first_line + 1; while (true) : (tok_i += 1) { const tok_id = tree.token_ids[tok_i]; if (tok_id == kind) { - try renderToken(tree, stream, tok_i, .Newline); + try renderToken(tree, ais, tok_i, .Newline); } else if (tok_id == .LineComment) { continue; } else { @@ -325,11 +324,11 @@ fn renderContainerDecl(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tr fn renderExpression( allocator: *mem.Allocator, - stream: anytype, + ais: anytype, tree: *ast.Tree, base: *ast.Node, space: Space, -) (@TypeOf(stream.*).Error || Error)!void { +) (@TypeOf(ais.*).Error || Error)!void { switch (base.tag) { .Identifier, .IntegerLiteral, @@ -343,18 +342,18 @@ fn renderExpression( .UndefinedLiteral, => { const casted_node = base.cast(ast.Node.OneToken).?; - return renderToken(tree, stream, casted_node.token, space); + return renderToken(tree, ais, casted_node.token, space); }, .AnyType => { const any_type = base.castTag(.AnyType).?; if (mem.eql(u8, tree.tokenSlice(any_type.token), "var")) { // TODO remove in next release cycle - try stream.writer().writeAll("anytype"); - if (space == .Comma) try stream.writer().writeAll(",\n"); + try ais.writer().writeAll("anytype"); + if (space == .Comma) try ais.writer().writeAll(",\n"); return; } - return renderToken(tree, stream, any_type.token, space); + return renderToken(tree, ais, any_type.token, space); }, .Block, .LabeledBlock => { @@ -384,65 +383,65 @@ fn renderExpression( }; if (block.label) |label| { - try renderToken(tree, stream, label, Space.None); - try renderToken(tree, stream, tree.nextToken(label), Space.Space); + try renderToken(tree, ais, label, Space.None); + try renderToken(tree, ais, tree.nextToken(label), Space.Space); } if (block.statements.len == 0) { - stream.pushIndentNextLine(); - defer stream.popIndent(); - try renderToken(tree, stream, block.lbrace, Space.None); + ais.pushIndentNextLine(); + defer ais.popIndent(); + try renderToken(tree, ais, block.lbrace, Space.None); } else { - stream.pushIndentNextLine(); - defer stream.popIndent(); + ais.pushIndentNextLine(); + defer ais.popIndent(); - try renderToken(tree, stream, block.lbrace, Space.Newline); + try renderToken(tree, ais, block.lbrace, Space.Newline); for (block.statements) |statement, i| { - try renderStatement(allocator, stream, tree, statement); + try renderStatement(allocator, ais, tree, statement); if (i + 1 < block.statements.len) { - try renderExtraNewline(tree, stream, block.statements[i + 1]); + try renderExtraNewline(tree, ais, block.statements[i + 1]); } } } - return renderToken(tree, stream, block.rbrace, space); + return renderToken(tree, ais, block.rbrace, space); }, .Defer => { const defer_node = @fieldParentPtr(ast.Node.Defer, "base", base); - try renderToken(tree, stream, defer_node.defer_token, Space.Space); + try renderToken(tree, ais, defer_node.defer_token, Space.Space); if (defer_node.payload) |payload| { - try renderExpression(allocator, stream, tree, payload, Space.Space); + try renderExpression(allocator, ais, tree, payload, Space.Space); } - return renderExpression(allocator, stream, tree, defer_node.expr, space); + return renderExpression(allocator, ais, tree, defer_node.expr, space); }, .Comptime => { const comptime_node = @fieldParentPtr(ast.Node.Comptime, "base", base); - try renderToken(tree, stream, comptime_node.comptime_token, Space.Space); - return renderExpression(allocator, stream, tree, comptime_node.expr, space); + try renderToken(tree, ais, comptime_node.comptime_token, Space.Space); + return renderExpression(allocator, ais, tree, comptime_node.expr, space); }, .Nosuspend => { const nosuspend_node = @fieldParentPtr(ast.Node.Nosuspend, "base", base); if (mem.eql(u8, tree.tokenSlice(nosuspend_node.nosuspend_token), "noasync")) { // TODO: remove this - try stream.writer().writeAll("nosuspend "); + try ais.writer().writeAll("nosuspend "); } else { - try renderToken(tree, stream, nosuspend_node.nosuspend_token, Space.Space); + try renderToken(tree, ais, nosuspend_node.nosuspend_token, Space.Space); } - return renderExpression(allocator, stream, tree, nosuspend_node.expr, space); + return renderExpression(allocator, ais, tree, nosuspend_node.expr, space); }, .Suspend => { const suspend_node = @fieldParentPtr(ast.Node.Suspend, "base", base); if (suspend_node.body) |body| { - try renderToken(tree, stream, suspend_node.suspend_token, Space.Space); - return renderExpression(allocator, stream, tree, body, space); + try renderToken(tree, ais, suspend_node.suspend_token, Space.Space); + return renderExpression(allocator, ais, tree, body, space); } else { - return renderToken(tree, stream, suspend_node.suspend_token, space); + return renderToken(tree, ais, suspend_node.suspend_token, space); } }, @@ -450,21 +449,21 @@ fn renderExpression( const infix_op_node = @fieldParentPtr(ast.Node.Catch, "base", base); const op_space = Space.Space; - try renderExpression(allocator, stream, tree, infix_op_node.lhs, op_space); + try renderExpression(allocator, ais, tree, infix_op_node.lhs, op_space); const after_op_space = blk: { const same_line = tree.tokensOnSameLine(infix_op_node.op_token, tree.nextToken(infix_op_node.op_token)); break :blk if (same_line) op_space else Space.Newline; }; - try renderToken(tree, stream, infix_op_node.op_token, after_op_space); + try renderToken(tree, ais, infix_op_node.op_token, after_op_space); if (infix_op_node.payload) |payload| { - try renderExpression(allocator, stream, tree, payload, Space.Space); + try renderExpression(allocator, ais, tree, payload, Space.Space); } - stream.pushIndentOneShot(); - return renderExpression(allocator, stream, tree, infix_op_node.rhs, space); + ais.pushIndentOneShot(); + return renderExpression(allocator, ais, tree, infix_op_node.rhs, space); }, .Add, @@ -516,16 +515,16 @@ fn renderExpression( .Period, .ErrorUnion, .Range => Space.None, else => Space.Space, }; - try renderExpression(allocator, stream, tree, infix_op_node.lhs, op_space); + try renderExpression(allocator, ais, tree, infix_op_node.lhs, op_space); const after_op_space = blk: { const loc = tree.tokenLocation(tree.token_locs[infix_op_node.op_token].end, tree.nextToken(infix_op_node.op_token)); break :blk if (loc.line == 0) op_space else Space.Newline; }; - try renderToken(tree, stream, infix_op_node.op_token, after_op_space); - stream.pushIndentOneShot(); - return renderExpression(allocator, stream, tree, infix_op_node.rhs, space); + try renderToken(tree, ais, infix_op_node.op_token, after_op_space); + ais.pushIndentOneShot(); + return renderExpression(allocator, ais, tree, infix_op_node.rhs, space); }, .BitNot, @@ -536,8 +535,8 @@ fn renderExpression( .AddressOf, => { const casted_node = @fieldParentPtr(ast.Node.SimplePrefixOp, "base", base); - try renderToken(tree, stream, casted_node.op_token, Space.None); - return renderExpression(allocator, stream, tree, casted_node.rhs, space); + try renderToken(tree, ais, casted_node.op_token, Space.None); + return renderExpression(allocator, ais, tree, casted_node.rhs, space); }, .Try, @@ -545,15 +544,15 @@ fn renderExpression( .Await, => { const casted_node = @fieldParentPtr(ast.Node.SimplePrefixOp, "base", base); - try renderToken(tree, stream, casted_node.op_token, Space.Space); - return renderExpression(allocator, stream, tree, casted_node.rhs, space); + try renderToken(tree, ais, casted_node.op_token, Space.Space); + return renderExpression(allocator, ais, tree, casted_node.rhs, space); }, .ArrayType => { const array_type = @fieldParentPtr(ast.Node.ArrayType, "base", base); return renderArrayType( allocator, - stream, + ais, tree, array_type.op_token, array_type.rhs, @@ -566,7 +565,7 @@ fn renderExpression( const array_type = @fieldParentPtr(ast.Node.ArrayTypeSentinel, "base", base); return renderArrayType( allocator, - stream, + ais, tree, array_type.op_token, array_type.rhs, @@ -580,111 +579,111 @@ fn renderExpression( const ptr_type = @fieldParentPtr(ast.Node.PtrType, "base", base); const op_tok_id = tree.token_ids[ptr_type.op_token]; switch (op_tok_id) { - .Asterisk, .AsteriskAsterisk => try stream.writer().writeByte('*'), + .Asterisk, .AsteriskAsterisk => try ais.writer().writeByte('*'), .LBracket => if (tree.token_ids[ptr_type.op_token + 2] == .Identifier) - try stream.writer().writeAll("[*c") + try ais.writer().writeAll("[*c") else - try stream.writer().writeAll("[*"), + try ais.writer().writeAll("[*"), else => unreachable, } if (ptr_type.ptr_info.sentinel) |sentinel| { const colon_token = tree.prevToken(sentinel.firstToken()); - try renderToken(tree, stream, colon_token, Space.None); // : + try renderToken(tree, ais, colon_token, Space.None); // : const sentinel_space = switch (op_tok_id) { .LBracket => Space.None, else => Space.Space, }; - try renderExpression(allocator, stream, tree, sentinel, sentinel_space); + try renderExpression(allocator, ais, tree, sentinel, sentinel_space); } switch (op_tok_id) { .Asterisk, .AsteriskAsterisk => {}, - .LBracket => try stream.writer().writeByte(']'), + .LBracket => try ais.writer().writeByte(']'), else => unreachable, } if (ptr_type.ptr_info.allowzero_token) |allowzero_token| { - try renderToken(tree, stream, allowzero_token, Space.Space); // allowzero + try renderToken(tree, ais, allowzero_token, Space.Space); // allowzero } if (ptr_type.ptr_info.align_info) |align_info| { const lparen_token = tree.prevToken(align_info.node.firstToken()); const align_token = tree.prevToken(lparen_token); - try renderToken(tree, stream, align_token, Space.None); // align - try renderToken(tree, stream, lparen_token, Space.None); // ( + try renderToken(tree, ais, align_token, Space.None); // align + try renderToken(tree, ais, lparen_token, Space.None); // ( - try renderExpression(allocator, stream, tree, align_info.node, Space.None); + try renderExpression(allocator, ais, tree, align_info.node, Space.None); if (align_info.bit_range) |bit_range| { const colon1 = tree.prevToken(bit_range.start.firstToken()); const colon2 = tree.prevToken(bit_range.end.firstToken()); - try renderToken(tree, stream, colon1, Space.None); // : - try renderExpression(allocator, stream, tree, bit_range.start, Space.None); - try renderToken(tree, stream, colon2, Space.None); // : - try renderExpression(allocator, stream, tree, bit_range.end, Space.None); + try renderToken(tree, ais, colon1, Space.None); // : + try renderExpression(allocator, ais, tree, bit_range.start, Space.None); + try renderToken(tree, ais, colon2, Space.None); // : + try renderExpression(allocator, ais, tree, bit_range.end, Space.None); const rparen_token = tree.nextToken(bit_range.end.lastToken()); - try renderToken(tree, stream, rparen_token, Space.Space); // ) + try renderToken(tree, ais, rparen_token, Space.Space); // ) } else { const rparen_token = tree.nextToken(align_info.node.lastToken()); - try renderToken(tree, stream, rparen_token, Space.Space); // ) + try renderToken(tree, ais, rparen_token, Space.Space); // ) } } if (ptr_type.ptr_info.const_token) |const_token| { - try renderToken(tree, stream, const_token, Space.Space); // const + try renderToken(tree, ais, const_token, Space.Space); // const } if (ptr_type.ptr_info.volatile_token) |volatile_token| { - try renderToken(tree, stream, volatile_token, Space.Space); // volatile + try renderToken(tree, ais, volatile_token, Space.Space); // volatile } - return renderExpression(allocator, stream, tree, ptr_type.rhs, space); + return renderExpression(allocator, ais, tree, ptr_type.rhs, space); }, .SliceType => { const slice_type = @fieldParentPtr(ast.Node.SliceType, "base", base); - try renderToken(tree, stream, slice_type.op_token, Space.None); // [ + try renderToken(tree, ais, slice_type.op_token, Space.None); // [ if (slice_type.ptr_info.sentinel) |sentinel| { const colon_token = tree.prevToken(sentinel.firstToken()); - try renderToken(tree, stream, colon_token, Space.None); // : - try renderExpression(allocator, stream, tree, sentinel, Space.None); - try renderToken(tree, stream, tree.nextToken(sentinel.lastToken()), Space.None); // ] + try renderToken(tree, ais, colon_token, Space.None); // : + try renderExpression(allocator, ais, tree, sentinel, Space.None); + try renderToken(tree, ais, tree.nextToken(sentinel.lastToken()), Space.None); // ] } else { - try renderToken(tree, stream, tree.nextToken(slice_type.op_token), Space.None); // ] + try renderToken(tree, ais, tree.nextToken(slice_type.op_token), Space.None); // ] } if (slice_type.ptr_info.allowzero_token) |allowzero_token| { - try renderToken(tree, stream, allowzero_token, Space.Space); // allowzero + try renderToken(tree, ais, allowzero_token, Space.Space); // allowzero } if (slice_type.ptr_info.align_info) |align_info| { const lparen_token = tree.prevToken(align_info.node.firstToken()); const align_token = tree.prevToken(lparen_token); - try renderToken(tree, stream, align_token, Space.None); // align - try renderToken(tree, stream, lparen_token, Space.None); // ( + try renderToken(tree, ais, align_token, Space.None); // align + try renderToken(tree, ais, lparen_token, Space.None); // ( - try renderExpression(allocator, stream, tree, align_info.node, Space.None); + try renderExpression(allocator, ais, tree, align_info.node, Space.None); if (align_info.bit_range) |bit_range| { const colon1 = tree.prevToken(bit_range.start.firstToken()); const colon2 = tree.prevToken(bit_range.end.firstToken()); - try renderToken(tree, stream, colon1, Space.None); // : - try renderExpression(allocator, stream, tree, bit_range.start, Space.None); - try renderToken(tree, stream, colon2, Space.None); // : - try renderExpression(allocator, stream, tree, bit_range.end, Space.None); + try renderToken(tree, ais, colon1, Space.None); // : + try renderExpression(allocator, ais, tree, bit_range.start, Space.None); + try renderToken(tree, ais, colon2, Space.None); // : + try renderExpression(allocator, ais, tree, bit_range.end, Space.None); const rparen_token = tree.nextToken(bit_range.end.lastToken()); - try renderToken(tree, stream, rparen_token, Space.Space); // ) + try renderToken(tree, ais, rparen_token, Space.Space); // ) } else { const rparen_token = tree.nextToken(align_info.node.lastToken()); - try renderToken(tree, stream, rparen_token, Space.Space); // ) + try renderToken(tree, ais, rparen_token, Space.Space); // ) } } if (slice_type.ptr_info.const_token) |const_token| { - try renderToken(tree, stream, const_token, Space.Space); + try renderToken(tree, ais, const_token, Space.Space); } if (slice_type.ptr_info.volatile_token) |volatile_token| { - try renderToken(tree, stream, volatile_token, Space.Space); + try renderToken(tree, ais, volatile_token, Space.Space); } - return renderExpression(allocator, stream, tree, slice_type.rhs, space); + return renderExpression(allocator, ais, tree, slice_type.rhs, space); }, .ArrayInitializer, .ArrayInitializerDot => { @@ -713,33 +712,33 @@ fn renderExpression( if (exprs.len == 0) { switch (lhs) { - .dot => |dot| try renderToken(tree, stream, dot, Space.None), - .node => |node| try renderExpression(allocator, stream, tree, node, Space.None), + .dot => |dot| try renderToken(tree, ais, dot, Space.None), + .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), } { - stream.pushIndent(); - defer stream.popIndent(); - try renderToken(tree, stream, lbrace, Space.None); + ais.pushIndent(); + defer ais.popIndent(); + try renderToken(tree, ais, lbrace, Space.None); } - return renderToken(tree, stream, rtoken, space); + return renderToken(tree, ais, rtoken, space); } if (exprs.len == 1 and tree.token_ids[exprs[0].*.lastToken() + 1] == .RBrace) { const expr = exprs[0]; switch (lhs) { - .dot => |dot| try renderToken(tree, stream, dot, Space.None), - .node => |node| try renderExpression(allocator, stream, tree, node, Space.None), + .dot => |dot| try renderToken(tree, ais, dot, Space.None), + .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), } - try renderToken(tree, stream, lbrace, Space.None); - try renderExpression(allocator, stream, tree, expr, Space.None); - return renderToken(tree, stream, rtoken, space); + try renderToken(tree, ais, lbrace, Space.None); + try renderExpression(allocator, ais, tree, expr, Space.None); + return renderToken(tree, ais, rtoken, space); } switch (lhs) { - .dot => |dot| try renderToken(tree, stream, dot, Space.None), - .node => |node| try renderExpression(allocator, stream, tree, node, Space.None), + .dot => |dot| try renderToken(tree, ais, dot, Space.None), + .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), } // scan to find row size @@ -781,9 +780,9 @@ fn renderExpression( var expr_widths = widths[0 .. widths.len - row_size]; var column_widths = widths[widths.len - row_size ..]; - // Null stream for counting the printed length of each expression + // Null ais for counting the printed length of each expression var counting_stream = std.io.countingOutStream(std.io.null_out_stream); - var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, &counting_stream); + var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, counting_stream.writer()); for (exprs) |expr, i| { counting_stream.bytes_written = 0; @@ -795,23 +794,23 @@ fn renderExpression( } { - stream.pushIndentNextLine(); - defer stream.popIndent(); - try renderToken(tree, stream, lbrace, Space.Newline); + ais.pushIndentNextLine(); + defer ais.popIndent(); + try renderToken(tree, ais, lbrace, Space.Newline); var col: usize = 1; for (exprs) |expr, i| { if (i + 1 < exprs.len) { const next_expr = exprs[i + 1]; - try renderExpression(allocator, stream, tree, expr, Space.None); + try renderExpression(allocator, ais, tree, expr, Space.None); const comma = tree.nextToken(expr.*.lastToken()); if (col != row_size) { - try renderToken(tree, stream, comma, Space.Space); // , + try renderToken(tree, ais, comma, Space.Space); // , const padding = column_widths[i % row_size] - expr_widths[i]; - try stream.writer().writeByteNTimes(' ', padding); + try ais.writer().writeByteNTimes(' ', padding); col += 1; continue; @@ -819,32 +818,32 @@ fn renderExpression( col = 1; if (tree.token_ids[tree.nextToken(comma)] != .MultilineStringLiteralLine) { - try renderToken(tree, stream, comma, Space.Newline); // , + try renderToken(tree, ais, comma, Space.Newline); // , } else { - try renderToken(tree, stream, comma, Space.None); // , + try renderToken(tree, ais, comma, Space.None); // , } - try renderExtraNewline(tree, stream, next_expr); + try renderExtraNewline(tree, ais, next_expr); } else { - try renderExpression(allocator, stream, tree, expr, Space.Comma); // , + try renderExpression(allocator, ais, tree, expr, Space.Comma); // , } } } - return renderToken(tree, stream, rtoken, space); + return renderToken(tree, ais, rtoken, space); } else { - try renderToken(tree, stream, lbrace, Space.Space); + try renderToken(tree, ais, lbrace, Space.Space); for (exprs) |expr, i| { if (i + 1 < exprs.len) { const next_expr = exprs[i + 1]; - try renderExpression(allocator, stream, tree, expr, Space.None); + try renderExpression(allocator, ais, tree, expr, Space.None); const comma = tree.nextToken(expr.*.lastToken()); - try renderToken(tree, stream, comma, Space.Space); // , + try renderToken(tree, ais, comma, Space.Space); // , } else { - try renderExpression(allocator, stream, tree, expr, Space.Space); + try renderExpression(allocator, ais, tree, expr, Space.Space); } } - return renderToken(tree, stream, rtoken, space); + return renderToken(tree, ais, rtoken, space); } }, @@ -874,17 +873,17 @@ fn renderExpression( if (field_inits.len == 0) { switch (lhs) { - .dot => |dot| try renderToken(tree, stream, dot, Space.None), - .node => |node| try renderExpression(allocator, stream, tree, node, Space.None), + .dot => |dot| try renderToken(tree, ais, dot, Space.None), + .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), } { - stream.pushIndentNextLine(); - defer stream.popIndent(); - try renderToken(tree, stream, lbrace, Space.None); + ais.pushIndentNextLine(); + defer ais.popIndent(); + try renderToken(tree, ais, lbrace, Space.None); } - return renderToken(tree, stream, rtoken, space); + return renderToken(tree, ais, rtoken, space); } const src_has_trailing_comma = blk: { @@ -900,8 +899,8 @@ fn renderExpression( const expr_outputs_one_line = blk: { // render field expressions until a LF is found for (field_inits) |field_init| { - var find_stream = std.io.findByteOutStream('\n', &std.io.null_out_stream); - var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, &find_stream); + var find_stream = std.io.findByteOutStream('\n', std.io.null_out_stream); + var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, find_stream.writer()); try renderExpression(allocator, &auto_indenting_stream, tree, field_init, Space.None); if (find_stream.byte_found) break :blk false; @@ -925,78 +924,78 @@ fn renderExpression( } switch (lhs) { - .dot => |dot| try renderToken(tree, stream, dot, Space.None), - .node => |node| try renderExpression(allocator, stream, tree, node, Space.None), + .dot => |dot| try renderToken(tree, ais, dot, Space.None), + .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), } - try renderToken(tree, stream, lbrace, Space.Space); - try renderExpression(allocator, stream, tree, &field_init.base, Space.Space); - return renderToken(tree, stream, rtoken, space); + try renderToken(tree, ais, lbrace, Space.Space); + try renderExpression(allocator, ais, tree, &field_init.base, Space.Space); + return renderToken(tree, ais, rtoken, space); } if (!src_has_trailing_comma and src_same_line and expr_outputs_one_line) { // render all on one line, no trailing comma switch (lhs) { - .dot => |dot| try renderToken(tree, stream, dot, Space.None), - .node => |node| try renderExpression(allocator, stream, tree, node, Space.None), + .dot => |dot| try renderToken(tree, ais, dot, Space.None), + .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), } - try renderToken(tree, stream, lbrace, Space.Space); + try renderToken(tree, ais, lbrace, Space.Space); for (field_inits) |field_init, i| { if (i + 1 < field_inits.len) { - try renderExpression(allocator, stream, tree, field_init, Space.None); + try renderExpression(allocator, ais, tree, field_init, Space.None); const comma = tree.nextToken(field_init.lastToken()); - try renderToken(tree, stream, comma, Space.Space); + try renderToken(tree, ais, comma, Space.Space); } else { - try renderExpression(allocator, stream, tree, field_init, Space.Space); + try renderExpression(allocator, ais, tree, field_init, Space.Space); } } - return renderToken(tree, stream, rtoken, space); + return renderToken(tree, ais, rtoken, space); } { switch (lhs) { - .dot => |dot| try renderToken(tree, stream, dot, Space.None), - .node => |node| try renderExpression(allocator, stream, tree, node, Space.None), + .dot => |dot| try renderToken(tree, ais, dot, Space.None), + .node => |node| try renderExpression(allocator, ais, tree, node, Space.None), } - stream.pushIndentNextLine(); - defer stream.popIndent(); + ais.pushIndentNextLine(); + defer ais.popIndent(); - try renderToken(tree, stream, lbrace, Space.Newline); + try renderToken(tree, ais, lbrace, Space.Newline); for (field_inits) |field_init, i| { if (i + 1 < field_inits.len) { const next_field_init = field_inits[i + 1]; - try renderExpression(allocator, stream, tree, field_init, Space.None); + try renderExpression(allocator, ais, tree, field_init, Space.None); const comma = tree.nextToken(field_init.lastToken()); - try renderToken(tree, stream, comma, Space.Newline); + try renderToken(tree, ais, comma, Space.Newline); - try renderExtraNewline(tree, stream, next_field_init); + try renderExtraNewline(tree, ais, next_field_init); } else { - try renderExpression(allocator, stream, tree, field_init, Space.Comma); + try renderExpression(allocator, ais, tree, field_init, Space.Comma); } } } - return renderToken(tree, stream, rtoken, space); + return renderToken(tree, ais, rtoken, space); }, .Call => { const call = @fieldParentPtr(ast.Node.Call, "base", base); if (call.async_token) |async_token| { - try renderToken(tree, stream, async_token, Space.Space); + try renderToken(tree, ais, async_token, Space.Space); } - try renderExpression(allocator, stream, tree, call.lhs, Space.None); + try renderExpression(allocator, ais, tree, call.lhs, Space.None); const lparen = tree.nextToken(call.lhs.lastToken()); if (call.params_len == 0) { - try renderToken(tree, stream, lparen, Space.None); - return renderToken(tree, stream, call.rtoken, space); + try renderToken(tree, ais, lparen, Space.None); + return renderToken(tree, ais, call.rtoken, space); } const src_has_trailing_comma = blk: { @@ -1005,41 +1004,41 @@ fn renderExpression( }; if (src_has_trailing_comma) { - try renderToken(tree, stream, lparen, Space.Newline); + try renderToken(tree, ais, lparen, Space.Newline); const params = call.params(); for (params) |param_node, i| { - stream.pushIndent(); - defer stream.popIndent(); + ais.pushIndent(); + defer ais.popIndent(); if (i + 1 < params.len) { const next_node = params[i + 1]; - try renderExpression(allocator, stream, tree, param_node, Space.None); + try renderExpression(allocator, ais, tree, param_node, Space.None); const comma = tree.nextToken(param_node.lastToken()); - try renderToken(tree, stream, comma, Space.Newline); // , - try renderExtraNewline(tree, stream, next_node); + try renderToken(tree, ais, comma, Space.Newline); // , + try renderExtraNewline(tree, ais, next_node); } else { - try renderExpression(allocator, stream, tree, param_node, Space.Comma); + try renderExpression(allocator, ais, tree, param_node, Space.Comma); } } - return renderToken(tree, stream, call.rtoken, space); + return renderToken(tree, ais, call.rtoken, space); } - try renderToken(tree, stream, lparen, Space.None); // ( + try renderToken(tree, ais, lparen, Space.None); // ( const params = call.params(); for (params) |param_node, i| { - if (param_node.*.tag == .MultilineStringLiteral) stream.pushIndentOneShot(); + if (param_node.*.tag == .MultilineStringLiteral) ais.pushIndentOneShot(); - try renderExpression(allocator, stream, tree, param_node, Space.None); + try renderExpression(allocator, ais, tree, param_node, Space.None); if (i + 1 < params.len) { const next_param = params[i + 1]; const comma = tree.nextToken(param_node.lastToken()); - try renderToken(tree, stream, comma, Space.Space); + try renderToken(tree, ais, comma, Space.Space); } } - return renderToken(tree, stream, call.rtoken, space); + return renderToken(tree, ais, call.rtoken, space); }, .ArrayAccess => { @@ -1048,25 +1047,25 @@ fn renderExpression( const lbracket = tree.nextToken(suffix_op.lhs.lastToken()); const rbracket = tree.nextToken(suffix_op.index_expr.lastToken()); - try renderExpression(allocator, stream, tree, suffix_op.lhs, Space.None); - try renderToken(tree, stream, lbracket, Space.None); // [ + try renderExpression(allocator, ais, tree, suffix_op.lhs, Space.None); + try renderToken(tree, ais, lbracket, Space.None); // [ const starts_with_comment = tree.token_ids[lbracket + 1] == .LineComment; const ends_with_comment = tree.token_ids[rbracket - 1] == .LineComment; { const new_space = if (ends_with_comment) Space.Newline else Space.None; - stream.pushIndent(); - defer stream.popIndent(); - try renderExpression(allocator, stream, tree, suffix_op.index_expr, new_space); + ais.pushIndent(); + defer ais.popIndent(); + try renderExpression(allocator, ais, tree, suffix_op.index_expr, new_space); } - if (starts_with_comment) try stream.maybeInsertNewline(); - return renderToken(tree, stream, rbracket, space); // ] + if (starts_with_comment) try ais.maybeInsertNewline(); + return renderToken(tree, ais, rbracket, space); // ] }, .Slice => { const suffix_op = base.castTag(.Slice).?; - try renderExpression(allocator, stream, tree, suffix_op.lhs, Space.None); + try renderExpression(allocator, ais, tree, suffix_op.lhs, Space.None); const lbracket = tree.prevToken(suffix_op.start.firstToken()); const dotdot = tree.nextToken(suffix_op.start.lastToken()); @@ -1076,33 +1075,33 @@ fn renderExpression( const after_start_space = if (after_start_space_bool) Space.Space else Space.None; const after_op_space = if (suffix_op.end != null) after_start_space else Space.None; - try renderToken(tree, stream, lbracket, Space.None); // [ - try renderExpression(allocator, stream, tree, suffix_op.start, after_start_space); - try renderToken(tree, stream, dotdot, after_op_space); // .. + try renderToken(tree, ais, lbracket, Space.None); // [ + try renderExpression(allocator, ais, tree, suffix_op.start, after_start_space); + try renderToken(tree, ais, dotdot, after_op_space); // .. if (suffix_op.end) |end| { const after_end_space = if (suffix_op.sentinel != null) Space.Space else Space.None; - try renderExpression(allocator, stream, tree, end, after_end_space); + try renderExpression(allocator, ais, tree, end, after_end_space); } if (suffix_op.sentinel) |sentinel| { const colon = tree.prevToken(sentinel.firstToken()); - try renderToken(tree, stream, colon, Space.None); // : - try renderExpression(allocator, stream, tree, sentinel, Space.None); + try renderToken(tree, ais, colon, Space.None); // : + try renderExpression(allocator, ais, tree, sentinel, Space.None); } - return renderToken(tree, stream, suffix_op.rtoken, space); // ] + return renderToken(tree, ais, suffix_op.rtoken, space); // ] }, .Deref => { const suffix_op = base.castTag(.Deref).?; - try renderExpression(allocator, stream, tree, suffix_op.lhs, Space.None); - return renderToken(tree, stream, suffix_op.rtoken, space); // .* + try renderExpression(allocator, ais, tree, suffix_op.lhs, Space.None); + return renderToken(tree, ais, suffix_op.rtoken, space); // .* }, .UnwrapOptional => { const suffix_op = base.castTag(.UnwrapOptional).?; - try renderExpression(allocator, stream, tree, suffix_op.lhs, Space.None); - try renderToken(tree, stream, tree.prevToken(suffix_op.rtoken), Space.None); // . - return renderToken(tree, stream, suffix_op.rtoken, space); // ? + try renderExpression(allocator, ais, tree, suffix_op.lhs, Space.None); + try renderToken(tree, ais, tree.prevToken(suffix_op.rtoken), Space.None); // . + return renderToken(tree, ais, suffix_op.rtoken, space); // ? }, .Break => { @@ -1111,152 +1110,152 @@ fn renderExpression( const maybe_label = flow_expr.getLabel(); if (maybe_label == null and maybe_rhs == null) { - return renderToken(tree, stream, flow_expr.ltoken, space); // break + return renderToken(tree, ais, flow_expr.ltoken, space); // break } - try renderToken(tree, stream, flow_expr.ltoken, Space.Space); // break + try renderToken(tree, ais, flow_expr.ltoken, Space.Space); // break if (maybe_label) |label| { const colon = tree.nextToken(flow_expr.ltoken); - try renderToken(tree, stream, colon, Space.None); // : + try renderToken(tree, ais, colon, Space.None); // : if (maybe_rhs == null) { - return renderToken(tree, stream, label, space); // label + return renderToken(tree, ais, label, space); // label } - try renderToken(tree, stream, label, Space.Space); // label + try renderToken(tree, ais, label, Space.Space); // label } - return renderExpression(allocator, stream, tree, maybe_rhs.?, space); + return renderExpression(allocator, ais, tree, maybe_rhs.?, space); }, .Continue => { const flow_expr = base.castTag(.Continue).?; if (flow_expr.getLabel()) |label| { - try renderToken(tree, stream, flow_expr.ltoken, Space.Space); // continue + try renderToken(tree, ais, flow_expr.ltoken, Space.Space); // continue const colon = tree.nextToken(flow_expr.ltoken); - try renderToken(tree, stream, colon, Space.None); // : - return renderToken(tree, stream, label, space); // label + try renderToken(tree, ais, colon, Space.None); // : + return renderToken(tree, ais, label, space); // label } else { - return renderToken(tree, stream, flow_expr.ltoken, space); // continue + return renderToken(tree, ais, flow_expr.ltoken, space); // continue } }, .Return => { const flow_expr = base.castTag(.Return).?; if (flow_expr.getRHS()) |rhs| { - try renderToken(tree, stream, flow_expr.ltoken, Space.Space); - return renderExpression(allocator, stream, tree, rhs, space); + try renderToken(tree, ais, flow_expr.ltoken, Space.Space); + return renderExpression(allocator, ais, tree, rhs, space); } else { - return renderToken(tree, stream, flow_expr.ltoken, space); + return renderToken(tree, ais, flow_expr.ltoken, space); } }, .Payload => { const payload = @fieldParentPtr(ast.Node.Payload, "base", base); - try renderToken(tree, stream, payload.lpipe, Space.None); - try renderExpression(allocator, stream, tree, payload.error_symbol, Space.None); - return renderToken(tree, stream, payload.rpipe, space); + try renderToken(tree, ais, payload.lpipe, Space.None); + try renderExpression(allocator, ais, tree, payload.error_symbol, Space.None); + return renderToken(tree, ais, payload.rpipe, space); }, .PointerPayload => { const payload = @fieldParentPtr(ast.Node.PointerPayload, "base", base); - try renderToken(tree, stream, payload.lpipe, Space.None); + try renderToken(tree, ais, payload.lpipe, Space.None); if (payload.ptr_token) |ptr_token| { - try renderToken(tree, stream, ptr_token, Space.None); + try renderToken(tree, ais, ptr_token, Space.None); } - try renderExpression(allocator, stream, tree, payload.value_symbol, Space.None); - return renderToken(tree, stream, payload.rpipe, space); + try renderExpression(allocator, ais, tree, payload.value_symbol, Space.None); + return renderToken(tree, ais, payload.rpipe, space); }, .PointerIndexPayload => { const payload = @fieldParentPtr(ast.Node.PointerIndexPayload, "base", base); - try renderToken(tree, stream, payload.lpipe, Space.None); + try renderToken(tree, ais, payload.lpipe, Space.None); if (payload.ptr_token) |ptr_token| { - try renderToken(tree, stream, ptr_token, Space.None); + try renderToken(tree, ais, ptr_token, Space.None); } - try renderExpression(allocator, stream, tree, payload.value_symbol, Space.None); + try renderExpression(allocator, ais, tree, payload.value_symbol, Space.None); if (payload.index_symbol) |index_symbol| { const comma = tree.nextToken(payload.value_symbol.lastToken()); - try renderToken(tree, stream, comma, Space.Space); - try renderExpression(allocator, stream, tree, index_symbol, Space.None); + try renderToken(tree, ais, comma, Space.Space); + try renderExpression(allocator, ais, tree, index_symbol, Space.None); } - return renderToken(tree, stream, payload.rpipe, space); + return renderToken(tree, ais, payload.rpipe, space); }, .GroupedExpression => { const grouped_expr = @fieldParentPtr(ast.Node.GroupedExpression, "base", base); - try renderToken(tree, stream, grouped_expr.lparen, Space.None); + try renderToken(tree, ais, grouped_expr.lparen, Space.None); { - stream.pushIndentOneShot(); - try renderExpression(allocator, stream, tree, grouped_expr.expr, Space.None); + ais.pushIndentOneShot(); + try renderExpression(allocator, ais, tree, grouped_expr.expr, Space.None); } - return renderToken(tree, stream, grouped_expr.rparen, space); + return renderToken(tree, ais, grouped_expr.rparen, space); }, .FieldInitializer => { const field_init = @fieldParentPtr(ast.Node.FieldInitializer, "base", base); - try renderToken(tree, stream, field_init.period_token, Space.None); // . - try renderToken(tree, stream, field_init.name_token, Space.Space); // name - try renderToken(tree, stream, tree.nextToken(field_init.name_token), Space.Space); // = - return renderExpression(allocator, stream, tree, field_init.expr, space); + try renderToken(tree, ais, field_init.period_token, Space.None); // . + try renderToken(tree, ais, field_init.name_token, Space.Space); // name + try renderToken(tree, ais, tree.nextToken(field_init.name_token), Space.Space); // = + return renderExpression(allocator, ais, tree, field_init.expr, space); }, .ContainerDecl => { const container_decl = @fieldParentPtr(ast.Node.ContainerDecl, "base", base); if (container_decl.layout_token) |layout_token| { - try renderToken(tree, stream, layout_token, Space.Space); + try renderToken(tree, ais, layout_token, Space.Space); } switch (container_decl.init_arg_expr) { .None => { - try renderToken(tree, stream, container_decl.kind_token, Space.Space); // union + try renderToken(tree, ais, container_decl.kind_token, Space.Space); // union }, .Enum => |enum_tag_type| { - try renderToken(tree, stream, container_decl.kind_token, Space.None); // union + try renderToken(tree, ais, container_decl.kind_token, Space.None); // union const lparen = tree.nextToken(container_decl.kind_token); const enum_token = tree.nextToken(lparen); - try renderToken(tree, stream, lparen, Space.None); // ( - try renderToken(tree, stream, enum_token, Space.None); // enum + try renderToken(tree, ais, lparen, Space.None); // ( + try renderToken(tree, ais, enum_token, Space.None); // enum if (enum_tag_type) |expr| { - try renderToken(tree, stream, tree.nextToken(enum_token), Space.None); // ( - try renderExpression(allocator, stream, tree, expr, Space.None); + try renderToken(tree, ais, tree.nextToken(enum_token), Space.None); // ( + try renderExpression(allocator, ais, tree, expr, Space.None); const rparen = tree.nextToken(expr.lastToken()); - try renderToken(tree, stream, rparen, Space.None); // ) - try renderToken(tree, stream, tree.nextToken(rparen), Space.Space); // ) + try renderToken(tree, ais, rparen, Space.None); // ) + try renderToken(tree, ais, tree.nextToken(rparen), Space.Space); // ) } else { - try renderToken(tree, stream, tree.nextToken(enum_token), Space.Space); // ) + try renderToken(tree, ais, tree.nextToken(enum_token), Space.Space); // ) } }, .Type => |type_expr| { - try renderToken(tree, stream, container_decl.kind_token, Space.None); // union + try renderToken(tree, ais, container_decl.kind_token, Space.None); // union const lparen = tree.nextToken(container_decl.kind_token); const rparen = tree.nextToken(type_expr.lastToken()); - try renderToken(tree, stream, lparen, Space.None); // ( - try renderExpression(allocator, stream, tree, type_expr, Space.None); - try renderToken(tree, stream, rparen, Space.Space); // ) + try renderToken(tree, ais, lparen, Space.None); // ( + try renderExpression(allocator, ais, tree, type_expr, Space.None); + try renderToken(tree, ais, rparen, Space.Space); // ) }, } if (container_decl.fields_and_decls_len == 0) { { - stream.pushIndentNextLine(); - defer stream.popIndent(); - try renderToken(tree, stream, container_decl.lbrace_token, Space.None); // { + ais.pushIndentNextLine(); + defer ais.popIndent(); + try renderToken(tree, ais, container_decl.lbrace_token, Space.None); // { } - return renderToken(tree, stream, container_decl.rbrace_token, space); // } + return renderToken(tree, ais, container_decl.rbrace_token, space); // } } const src_has_trailing_comma = blk: { @@ -1287,39 +1286,39 @@ fn renderExpression( if (src_has_trailing_comma or !src_has_only_fields) { // One declaration per line - stream.pushIndentNextLine(); - defer stream.popIndent(); - try renderToken(tree, stream, container_decl.lbrace_token, .Newline); // { + ais.pushIndentNextLine(); + defer ais.popIndent(); + try renderToken(tree, ais, container_decl.lbrace_token, .Newline); // { for (fields_and_decls) |decl, i| { - try renderContainerDecl(allocator, stream, tree, decl, .Newline); + try renderContainerDecl(allocator, ais, tree, decl, .Newline); if (i + 1 < fields_and_decls.len) { - try renderExtraNewline(tree, stream, fields_and_decls[i + 1]); + try renderExtraNewline(tree, ais, fields_and_decls[i + 1]); } } } else if (src_has_newline) { // All the declarations on the same line, but place the items on // their own line - try renderToken(tree, stream, container_decl.lbrace_token, .Newline); // { + try renderToken(tree, ais, container_decl.lbrace_token, .Newline); // { - stream.pushIndent(); - defer stream.popIndent(); + ais.pushIndent(); + defer ais.popIndent(); for (fields_and_decls) |decl, i| { const space_after_decl: Space = if (i + 1 >= fields_and_decls.len) .Newline else .Space; - try renderContainerDecl(allocator, stream, tree, decl, space_after_decl); + try renderContainerDecl(allocator, ais, tree, decl, space_after_decl); } } else { // All the declarations on the same line - try renderToken(tree, stream, container_decl.lbrace_token, .Space); // { + try renderToken(tree, ais, container_decl.lbrace_token, .Space); // { for (fields_and_decls) |decl| { - try renderContainerDecl(allocator, stream, tree, decl, .Space); + try renderContainerDecl(allocator, ais, tree, decl, .Space); } } - return renderToken(tree, stream, container_decl.rbrace_token, space); // } + return renderToken(tree, ais, container_decl.rbrace_token, space); // } }, .ErrorSetDecl => { @@ -1328,9 +1327,9 @@ fn renderExpression( const lbrace = tree.nextToken(err_set_decl.error_token); if (err_set_decl.decls_len == 0) { - try renderToken(tree, stream, err_set_decl.error_token, Space.None); - try renderToken(tree, stream, lbrace, Space.None); - return renderToken(tree, stream, err_set_decl.rbrace_token, space); + try renderToken(tree, ais, err_set_decl.error_token, Space.None); + try renderToken(tree, ais, lbrace, Space.None); + return renderToken(tree, ais, err_set_decl.rbrace_token, space); } if (err_set_decl.decls_len == 1) blk: { @@ -1344,13 +1343,13 @@ fn renderExpression( break :blk; } - try renderToken(tree, stream, err_set_decl.error_token, Space.None); // error - try renderToken(tree, stream, lbrace, Space.None); // { - try renderExpression(allocator, stream, tree, node, Space.None); - return renderToken(tree, stream, err_set_decl.rbrace_token, space); // } + try renderToken(tree, ais, err_set_decl.error_token, Space.None); // error + try renderToken(tree, ais, lbrace, Space.None); // { + try renderExpression(allocator, ais, tree, node, Space.None); + return renderToken(tree, ais, err_set_decl.rbrace_token, space); // } } - try renderToken(tree, stream, err_set_decl.error_token, Space.None); // error + try renderToken(tree, ais, err_set_decl.error_token, Space.None); // error const src_has_trailing_comma = blk: { const maybe_comma = tree.prevToken(err_set_decl.rbrace_token); @@ -1359,64 +1358,64 @@ fn renderExpression( if (src_has_trailing_comma) { { - stream.pushIndent(); - defer stream.popIndent(); + ais.pushIndent(); + defer ais.popIndent(); - try renderToken(tree, stream, lbrace, Space.Newline); // { + try renderToken(tree, ais, lbrace, Space.Newline); // { const decls = err_set_decl.decls(); for (decls) |node, i| { if (i + 1 < decls.len) { - try renderExpression(allocator, stream, tree, node, Space.None); - try renderToken(tree, stream, tree.nextToken(node.lastToken()), Space.Newline); // , + try renderExpression(allocator, ais, tree, node, Space.None); + try renderToken(tree, ais, tree.nextToken(node.lastToken()), Space.Newline); // , - try renderExtraNewline(tree, stream, decls[i + 1]); + try renderExtraNewline(tree, ais, decls[i + 1]); } else { - try renderExpression(allocator, stream, tree, node, Space.Comma); + try renderExpression(allocator, ais, tree, node, Space.Comma); } } } - return renderToken(tree, stream, err_set_decl.rbrace_token, space); // } + return renderToken(tree, ais, err_set_decl.rbrace_token, space); // } } else { - try renderToken(tree, stream, lbrace, Space.Space); // { + try renderToken(tree, ais, lbrace, Space.Space); // { const decls = err_set_decl.decls(); for (decls) |node, i| { if (i + 1 < decls.len) { - try renderExpression(allocator, stream, tree, node, Space.None); + try renderExpression(allocator, ais, tree, node, Space.None); const comma_token = tree.nextToken(node.lastToken()); assert(tree.token_ids[comma_token] == .Comma); - try renderToken(tree, stream, comma_token, Space.Space); // , - try renderExtraNewline(tree, stream, decls[i + 1]); + try renderToken(tree, ais, comma_token, Space.Space); // , + try renderExtraNewline(tree, ais, decls[i + 1]); } else { - try renderExpression(allocator, stream, tree, node, Space.Space); + try renderExpression(allocator, ais, tree, node, Space.Space); } } - return renderToken(tree, stream, err_set_decl.rbrace_token, space); // } + return renderToken(tree, ais, err_set_decl.rbrace_token, space); // } } }, .ErrorTag => { const tag = @fieldParentPtr(ast.Node.ErrorTag, "base", base); - try renderDocComments(tree, stream, tag, tag.doc_comments); - return renderToken(tree, stream, tag.name_token, space); // name + try renderDocComments(tree, ais, tag, tag.doc_comments); + return renderToken(tree, ais, tag.name_token, space); // name }, .MultilineStringLiteral => { const multiline_str_literal = @fieldParentPtr(ast.Node.MultilineStringLiteral, "base", base); { - const locked_indents = stream.lockOneShotIndent(); + const locked_indents = ais.lockOneShotIndent(); defer { var i: u8 = 0; - while (i < locked_indents) : (i += 1) stream.popIndent(); + while (i < locked_indents) : (i += 1) ais.popIndent(); } - try stream.maybeInsertNewline(); + try ais.maybeInsertNewline(); - for (multiline_str_literal.lines()) |t| try renderToken(tree, stream, t, Space.None); + for (multiline_str_literal.lines()) |t| try renderToken(tree, ais, t, Space.None); } }, @@ -1425,9 +1424,9 @@ fn renderExpression( // TODO remove after 0.7.0 release if (mem.eql(u8, tree.tokenSlice(builtin_call.builtin_token), "@OpaqueType")) - return stream.writer().writeAll("@Type(.Opaque)"); + return ais.writer().writeAll("@Type(.Opaque)"); - try renderToken(tree, stream, builtin_call.builtin_token, Space.None); // @name + try renderToken(tree, ais, builtin_call.builtin_token, Space.None); // @name const src_params_trailing_comma = blk: { if (builtin_call.params_len < 2) break :blk false; @@ -1439,30 +1438,30 @@ fn renderExpression( const lparen = tree.nextToken(builtin_call.builtin_token); if (!src_params_trailing_comma) { - try renderToken(tree, stream, lparen, Space.None); // ( + try renderToken(tree, ais, lparen, Space.None); // ( // render all on one line, no trailing comma const params = builtin_call.params(); for (params) |param_node, i| { - try renderExpression(allocator, stream, tree, param_node, Space.None); + try renderExpression(allocator, ais, tree, param_node, Space.None); if (i + 1 < params.len) { const comma_token = tree.nextToken(param_node.lastToken()); - try renderToken(tree, stream, comma_token, Space.Space); // , + try renderToken(tree, ais, comma_token, Space.Space); // , } } } else { // one param per line - stream.pushIndent(); - defer stream.popIndent(); - try renderToken(tree, stream, lparen, Space.Newline); // ( + ais.pushIndent(); + defer ais.popIndent(); + try renderToken(tree, ais, lparen, Space.Newline); // ( for (builtin_call.params()) |param_node| { - try renderExpression(allocator, stream, tree, param_node, Space.Comma); + try renderExpression(allocator, ais, tree, param_node, Space.Comma); } } - return renderToken(tree, stream, builtin_call.rparen_token, space); // ) + return renderToken(tree, ais, builtin_call.rparen_token, space); // ) }, .FnProto => { @@ -1472,24 +1471,24 @@ fn renderExpression( const visib_token = tree.token_ids[visib_token_index]; assert(visib_token == .Keyword_pub or visib_token == .Keyword_export); - try renderToken(tree, stream, visib_token_index, Space.Space); // pub + try renderToken(tree, ais, visib_token_index, Space.Space); // pub } if (fn_proto.getExternExportInlineToken()) |extern_export_inline_token| { if (fn_proto.getIsExternPrototype() == null) - try renderToken(tree, stream, extern_export_inline_token, Space.Space); // extern/export/inline + try renderToken(tree, ais, extern_export_inline_token, Space.Space); // extern/export/inline } if (fn_proto.getLibName()) |lib_name| { - try renderExpression(allocator, stream, tree, lib_name, Space.Space); + try renderExpression(allocator, ais, tree, lib_name, Space.Space); } const lparen = if (fn_proto.getNameToken()) |name_token| blk: { - try renderToken(tree, stream, fn_proto.fn_token, Space.Space); // fn - try renderToken(tree, stream, name_token, Space.None); // name + try renderToken(tree, ais, fn_proto.fn_token, Space.Space); // fn + try renderToken(tree, ais, name_token, Space.None); // name break :blk tree.nextToken(name_token); } else blk: { - try renderToken(tree, stream, fn_proto.fn_token, Space.Space); // fn + try renderToken(tree, ais, fn_proto.fn_token, Space.Space); // fn break :blk tree.nextToken(fn_proto.fn_token); }; assert(tree.token_ids[lparen] == .LParen); @@ -1516,45 +1515,45 @@ fn renderExpression( }; if (!src_params_trailing_comma) { - try renderToken(tree, stream, lparen, Space.None); // ( + try renderToken(tree, ais, lparen, Space.None); // ( // render all on one line, no trailing comma for (fn_proto.params()) |param_decl, i| { - try renderParamDecl(allocator, stream, tree, param_decl, Space.None); + try renderParamDecl(allocator, ais, tree, param_decl, Space.None); if (i + 1 < fn_proto.params_len or fn_proto.getVarArgsToken() != null) { const comma = tree.nextToken(param_decl.lastToken()); - try renderToken(tree, stream, comma, Space.Space); // , + try renderToken(tree, ais, comma, Space.Space); // , } } if (fn_proto.getVarArgsToken()) |var_args_token| { - try renderToken(tree, stream, var_args_token, Space.None); + try renderToken(tree, ais, var_args_token, Space.None); } } else { // one param per line - stream.pushIndent(); - defer stream.popIndent(); - try renderToken(tree, stream, lparen, Space.Newline); // ( + ais.pushIndent(); + defer ais.popIndent(); + try renderToken(tree, ais, lparen, Space.Newline); // ( for (fn_proto.params()) |param_decl| { - try renderParamDecl(allocator, stream, tree, param_decl, Space.Comma); + try renderParamDecl(allocator, ais, tree, param_decl, Space.Comma); } if (fn_proto.getVarArgsToken()) |var_args_token| { - try renderToken(tree, stream, var_args_token, Space.Comma); + try renderToken(tree, ais, var_args_token, Space.Comma); } } - try renderToken(tree, stream, rparen, Space.Space); // ) + try renderToken(tree, ais, rparen, Space.Space); // ) if (fn_proto.getAlignExpr()) |align_expr| { const align_rparen = tree.nextToken(align_expr.lastToken()); const align_lparen = tree.prevToken(align_expr.firstToken()); const align_kw = tree.prevToken(align_lparen); - try renderToken(tree, stream, align_kw, Space.None); // align - try renderToken(tree, stream, align_lparen, Space.None); // ( - try renderExpression(allocator, stream, tree, align_expr, Space.None); - try renderToken(tree, stream, align_rparen, Space.Space); // ) + try renderToken(tree, ais, align_kw, Space.None); // align + try renderToken(tree, ais, align_lparen, Space.None); // ( + try renderExpression(allocator, ais, tree, align_expr, Space.None); + try renderToken(tree, ais, align_rparen, Space.Space); // ) } if (fn_proto.getSectionExpr()) |section_expr| { @@ -1562,10 +1561,10 @@ fn renderExpression( const section_lparen = tree.prevToken(section_expr.firstToken()); const section_kw = tree.prevToken(section_lparen); - try renderToken(tree, stream, section_kw, Space.None); // section - try renderToken(tree, stream, section_lparen, Space.None); // ( - try renderExpression(allocator, stream, tree, section_expr, Space.None); - try renderToken(tree, stream, section_rparen, Space.Space); // ) + try renderToken(tree, ais, section_kw, Space.None); // section + try renderToken(tree, ais, section_lparen, Space.None); // ( + try renderExpression(allocator, ais, tree, section_expr, Space.None); + try renderToken(tree, ais, section_rparen, Space.Space); // ) } if (fn_proto.getCallconvExpr()) |callconv_expr| { @@ -1573,23 +1572,23 @@ fn renderExpression( const callconv_lparen = tree.prevToken(callconv_expr.firstToken()); const callconv_kw = tree.prevToken(callconv_lparen); - try renderToken(tree, stream, callconv_kw, Space.None); // callconv - try renderToken(tree, stream, callconv_lparen, Space.None); // ( - try renderExpression(allocator, stream, tree, callconv_expr, Space.None); - try renderToken(tree, stream, callconv_rparen, Space.Space); // ) + try renderToken(tree, ais, callconv_kw, Space.None); // callconv + try renderToken(tree, ais, callconv_lparen, Space.None); // ( + try renderExpression(allocator, ais, tree, callconv_expr, Space.None); + try renderToken(tree, ais, callconv_rparen, Space.Space); // ) } else if (fn_proto.getIsExternPrototype() != null) { - try stream.writer().writeAll("callconv(.C) "); + try ais.writer().writeAll("callconv(.C) "); } else if (fn_proto.getIsAsync() != null) { - try stream.writer().writeAll("callconv(.Async) "); + try ais.writer().writeAll("callconv(.Async) "); } switch (fn_proto.return_type) { .Explicit => |node| { - return renderExpression(allocator, stream, tree, node, space); + return renderExpression(allocator, ais, tree, node, space); }, .InferErrorSet => |node| { - try renderToken(tree, stream, tree.prevToken(node.firstToken()), Space.None); // ! - return renderExpression(allocator, stream, tree, node, space); + try renderToken(tree, ais, tree.prevToken(node.firstToken()), Space.None); // ! + return renderExpression(allocator, ais, tree, node, space); }, .Invalid => unreachable, } @@ -1599,11 +1598,11 @@ fn renderExpression( const anyframe_type = @fieldParentPtr(ast.Node.AnyFrameType, "base", base); if (anyframe_type.result) |result| { - try renderToken(tree, stream, anyframe_type.anyframe_token, Space.None); // anyframe - try renderToken(tree, stream, result.arrow_token, Space.None); // -> - return renderExpression(allocator, stream, tree, result.return_type, space); + try renderToken(tree, ais, anyframe_type.anyframe_token, Space.None); // anyframe + try renderToken(tree, ais, result.arrow_token, Space.None); // -> + return renderExpression(allocator, ais, tree, result.return_type, space); } else { - return renderToken(tree, stream, anyframe_type.anyframe_token, space); // anyframe + return renderToken(tree, ais, anyframe_type.anyframe_token, space); // anyframe } }, @@ -1612,38 +1611,38 @@ fn renderExpression( .Switch => { const switch_node = @fieldParentPtr(ast.Node.Switch, "base", base); - try renderToken(tree, stream, switch_node.switch_token, Space.Space); // switch - try renderToken(tree, stream, tree.nextToken(switch_node.switch_token), Space.None); // ( + try renderToken(tree, ais, switch_node.switch_token, Space.Space); // switch + try renderToken(tree, ais, tree.nextToken(switch_node.switch_token), Space.None); // ( const rparen = tree.nextToken(switch_node.expr.lastToken()); const lbrace = tree.nextToken(rparen); if (switch_node.cases_len == 0) { - try renderExpression(allocator, stream, tree, switch_node.expr, Space.None); - try renderToken(tree, stream, rparen, Space.Space); // ) - try renderToken(tree, stream, lbrace, Space.None); // { - return renderToken(tree, stream, switch_node.rbrace, space); // } + try renderExpression(allocator, ais, tree, switch_node.expr, Space.None); + try renderToken(tree, ais, rparen, Space.Space); // ) + try renderToken(tree, ais, lbrace, Space.None); // { + return renderToken(tree, ais, switch_node.rbrace, space); // } } - try renderExpression(allocator, stream, tree, switch_node.expr, Space.None); - try renderToken(tree, stream, rparen, Space.Space); // ) + try renderExpression(allocator, ais, tree, switch_node.expr, Space.None); + try renderToken(tree, ais, rparen, Space.Space); // ) { - stream.pushIndentNextLine(); - defer stream.popIndent(); - try renderToken(tree, stream, lbrace, Space.Newline); // { + ais.pushIndentNextLine(); + defer ais.popIndent(); + try renderToken(tree, ais, lbrace, Space.Newline); // { const cases = switch_node.cases(); for (cases) |node, i| { - try renderExpression(allocator, stream, tree, node, Space.Comma); + try renderExpression(allocator, ais, tree, node, Space.Comma); if (i + 1 < cases.len) { - try renderExtraNewline(tree, stream, cases[i + 1]); + try renderExtraNewline(tree, ais, cases[i + 1]); } } } - return renderToken(tree, stream, switch_node.rbrace, space); // } + return renderToken(tree, ais, switch_node.rbrace, space); // } }, .SwitchCase => { @@ -1660,41 +1659,41 @@ fn renderExpression( const items = switch_case.items(); for (items) |node, i| { if (i + 1 < items.len) { - try renderExpression(allocator, stream, tree, node, Space.None); + try renderExpression(allocator, ais, tree, node, Space.None); const comma_token = tree.nextToken(node.lastToken()); - try renderToken(tree, stream, comma_token, Space.Space); // , - try renderExtraNewline(tree, stream, items[i + 1]); + try renderToken(tree, ais, comma_token, Space.Space); // , + try renderExtraNewline(tree, ais, items[i + 1]); } else { - try renderExpression(allocator, stream, tree, node, Space.Space); + try renderExpression(allocator, ais, tree, node, Space.Space); } } } else { const items = switch_case.items(); for (items) |node, i| { if (i + 1 < items.len) { - try renderExpression(allocator, stream, tree, node, Space.None); + try renderExpression(allocator, ais, tree, node, Space.None); const comma_token = tree.nextToken(node.lastToken()); - try renderToken(tree, stream, comma_token, Space.Newline); // , - try renderExtraNewline(tree, stream, items[i + 1]); + try renderToken(tree, ais, comma_token, Space.Newline); // , + try renderExtraNewline(tree, ais, items[i + 1]); } else { - try renderExpression(allocator, stream, tree, node, Space.Comma); + try renderExpression(allocator, ais, tree, node, Space.Comma); } } } - try renderToken(tree, stream, switch_case.arrow_token, Space.Space); // => + try renderToken(tree, ais, switch_case.arrow_token, Space.Space); // => if (switch_case.payload) |payload| { - try renderExpression(allocator, stream, tree, payload, Space.Space); + try renderExpression(allocator, ais, tree, payload, Space.Space); } - return renderExpression(allocator, stream, tree, switch_case.expr, space); + return renderExpression(allocator, ais, tree, switch_case.expr, space); }, .SwitchElse => { const switch_else = @fieldParentPtr(ast.Node.SwitchElse, "base", base); - return renderToken(tree, stream, switch_else.token, space); + return renderToken(tree, ais, switch_else.token, space); }, .Else => { const else_node = @fieldParentPtr(ast.Node.Else, "base", base); @@ -1703,19 +1702,19 @@ fn renderExpression( const same_line = body_is_block or tree.tokensOnSameLine(else_node.else_token, else_node.body.lastToken()); const after_else_space = if (same_line or else_node.payload != null) Space.Space else Space.Newline; - try renderToken(tree, stream, else_node.else_token, after_else_space); + try renderToken(tree, ais, else_node.else_token, after_else_space); if (else_node.payload) |payload| { const payload_space = if (same_line) Space.Space else Space.Newline; - try renderExpression(allocator, stream, tree, payload, payload_space); + try renderExpression(allocator, ais, tree, payload, payload_space); } if (same_line) { - return renderExpression(allocator, stream, tree, else_node.body, space); + return renderExpression(allocator, ais, tree, else_node.body, space); } else { - stream.pushIndent(); - defer stream.popIndent(); - return renderExpression(allocator, stream, tree, else_node.body, space); + ais.pushIndent(); + defer ais.popIndent(); + return renderExpression(allocator, ais, tree, else_node.body, space); } }, @@ -1723,17 +1722,17 @@ fn renderExpression( const while_node = @fieldParentPtr(ast.Node.While, "base", base); if (while_node.label) |label| { - try renderToken(tree, stream, label, Space.None); // label - try renderToken(tree, stream, tree.nextToken(label), Space.Space); // : + try renderToken(tree, ais, label, Space.None); // label + try renderToken(tree, ais, tree.nextToken(label), Space.Space); // : } if (while_node.inline_token) |inline_token| { - try renderToken(tree, stream, inline_token, Space.Space); // inline + try renderToken(tree, ais, inline_token, Space.Space); // inline } - try renderToken(tree, stream, while_node.while_token, Space.Space); // while - try renderToken(tree, stream, tree.nextToken(while_node.while_token), Space.None); // ( - try renderExpression(allocator, stream, tree, while_node.condition, Space.None); + try renderToken(tree, ais, while_node.while_token, Space.Space); // while + try renderToken(tree, ais, tree.nextToken(while_node.while_token), Space.None); // ( + try renderExpression(allocator, ais, tree, while_node.condition, Space.None); const cond_rparen = tree.nextToken(while_node.condition.lastToken()); @@ -1755,12 +1754,12 @@ fn renderExpression( { const rparen_space = if (while_node.payload != null or while_node.continue_expr != null) Space.Space else block_start_space; - try renderToken(tree, stream, cond_rparen, rparen_space); // ) + try renderToken(tree, ais, cond_rparen, rparen_space); // ) } if (while_node.payload) |payload| { const payload_space = Space.Space; //if (while_node.continue_expr != null) Space.Space else block_start_space; - try renderExpression(allocator, stream, tree, payload, payload_space); + try renderExpression(allocator, ais, tree, payload, payload_space); } if (while_node.continue_expr) |continue_expr| { @@ -1768,22 +1767,22 @@ fn renderExpression( const lparen = tree.prevToken(continue_expr.firstToken()); const colon = tree.prevToken(lparen); - try renderToken(tree, stream, colon, Space.Space); // : - try renderToken(tree, stream, lparen, Space.None); // ( + try renderToken(tree, ais, colon, Space.Space); // : + try renderToken(tree, ais, lparen, Space.None); // ( - try renderExpression(allocator, stream, tree, continue_expr, Space.None); + try renderExpression(allocator, ais, tree, continue_expr, Space.None); - try renderToken(tree, stream, rparen, block_start_space); // ) + try renderToken(tree, ais, rparen, block_start_space); // ) } { - if (!body_is_block) stream.pushIndent(); - defer if (!body_is_block) stream.popIndent(); - try renderExpression(allocator, stream, tree, while_node.body, after_body_space); + if (!body_is_block) ais.pushIndent(); + defer if (!body_is_block) ais.popIndent(); + try renderExpression(allocator, ais, tree, while_node.body, after_body_space); } if (while_node.@"else") |@"else"| { - return renderExpression(allocator, stream, tree, &@"else".base, space); + return renderExpression(allocator, ais, tree, &@"else".base, space); } }, @@ -1791,17 +1790,17 @@ fn renderExpression( const for_node = @fieldParentPtr(ast.Node.For, "base", base); if (for_node.label) |label| { - try renderToken(tree, stream, label, Space.None); // label - try renderToken(tree, stream, tree.nextToken(label), Space.Space); // : + try renderToken(tree, ais, label, Space.None); // label + try renderToken(tree, ais, tree.nextToken(label), Space.Space); // : } if (for_node.inline_token) |inline_token| { - try renderToken(tree, stream, inline_token, Space.Space); // inline + try renderToken(tree, ais, inline_token, Space.Space); // inline } - try renderToken(tree, stream, for_node.for_token, Space.Space); // for - try renderToken(tree, stream, tree.nextToken(for_node.for_token), Space.None); // ( - try renderExpression(allocator, stream, tree, for_node.array_expr, Space.None); + try renderToken(tree, ais, for_node.for_token, Space.Space); // for + try renderToken(tree, ais, tree.nextToken(for_node.for_token), Space.None); // ( + try renderExpression(allocator, ais, tree, for_node.array_expr, Space.None); const rparen = tree.nextToken(for_node.array_expr.lastToken()); @@ -1809,10 +1808,10 @@ fn renderExpression( const src_one_line_to_body = !body_is_block and tree.tokensOnSameLine(rparen, for_node.body.firstToken()); const body_on_same_line = body_is_block or src_one_line_to_body; - try renderToken(tree, stream, rparen, Space.Space); // ) + try renderToken(tree, ais, rparen, Space.Space); // ) const space_after_payload = if (body_on_same_line) Space.Space else Space.Newline; - try renderExpression(allocator, stream, tree, for_node.payload, space_after_payload); // |x| + try renderExpression(allocator, ais, tree, for_node.payload, space_after_payload); // |x| const space_after_body = blk: { if (for_node.@"else") |@"else"| { @@ -1828,13 +1827,13 @@ fn renderExpression( }; { - if (!body_on_same_line) stream.pushIndent(); - defer if (!body_on_same_line) stream.popIndent(); - try renderExpression(allocator, stream, tree, for_node.body, space_after_body); // { body } + if (!body_on_same_line) ais.pushIndent(); + defer if (!body_on_same_line) ais.popIndent(); + try renderExpression(allocator, ais, tree, for_node.body, space_after_body); // { body } } if (for_node.@"else") |@"else"| { - return renderExpression(allocator, stream, tree, &@"else".base, space); // else + return renderExpression(allocator, ais, tree, &@"else".base, space); // else } }, @@ -1844,29 +1843,29 @@ fn renderExpression( const lparen = tree.nextToken(if_node.if_token); const rparen = tree.nextToken(if_node.condition.lastToken()); - try renderToken(tree, stream, if_node.if_token, Space.Space); // if - try renderToken(tree, stream, lparen, Space.None); // ( + try renderToken(tree, ais, if_node.if_token, Space.Space); // if + try renderToken(tree, ais, lparen, Space.None); // ( - try renderExpression(allocator, stream, tree, if_node.condition, Space.None); // condition + try renderExpression(allocator, ais, tree, if_node.condition, Space.None); // condition const body_is_if_block = if_node.body.tag == .If; const body_is_block = nodeIsBlock(if_node.body); if (body_is_if_block) { - try renderExtraNewline(tree, stream, if_node.body); + try renderExtraNewline(tree, ais, if_node.body); } else if (body_is_block) { const after_rparen_space = if (if_node.payload == null) Space.BlockStart else Space.Space; - try renderToken(tree, stream, rparen, after_rparen_space); // ) + try renderToken(tree, ais, rparen, after_rparen_space); // ) if (if_node.payload) |payload| { - try renderExpression(allocator, stream, tree, payload, Space.BlockStart); // |x| + try renderExpression(allocator, ais, tree, payload, Space.BlockStart); // |x| } if (if_node.@"else") |@"else"| { - try renderExpression(allocator, stream, tree, if_node.body, Space.SpaceOrOutdent); - return renderExpression(allocator, stream, tree, &@"else".base, space); + try renderExpression(allocator, ais, tree, if_node.body, Space.SpaceOrOutdent); + return renderExpression(allocator, ais, tree, &@"else".base, space); } else { - return renderExpression(allocator, stream, tree, if_node.body, space); + return renderExpression(allocator, ais, tree, if_node.body, space); } } @@ -1874,121 +1873,121 @@ fn renderExpression( if (src_has_newline) { const after_rparen_space = if (if_node.payload == null) Space.Newline else Space.Space; - try renderToken(tree, stream, rparen, after_rparen_space); // ) + try renderToken(tree, ais, rparen, after_rparen_space); // ) if (if_node.payload) |payload| { - try renderExpression(allocator, stream, tree, payload, Space.Newline); + try renderExpression(allocator, ais, tree, payload, Space.Newline); } if (if_node.@"else") |@"else"| { const else_is_block = nodeIsBlock(@"else".body); { - stream.pushIndent(); - defer stream.popIndent(); - try renderExpression(allocator, stream, tree, if_node.body, Space.Newline); + ais.pushIndent(); + defer ais.popIndent(); + try renderExpression(allocator, ais, tree, if_node.body, Space.Newline); } if (else_is_block) { - try renderToken(tree, stream, @"else".else_token, Space.Space); // else + try renderToken(tree, ais, @"else".else_token, Space.Space); // else if (@"else".payload) |payload| { - try renderExpression(allocator, stream, tree, payload, Space.Space); + try renderExpression(allocator, ais, tree, payload, Space.Space); } - return renderExpression(allocator, stream, tree, @"else".body, space); + return renderExpression(allocator, ais, tree, @"else".body, space); } else { const after_else_space = if (@"else".payload == null) Space.Newline else Space.Space; - try renderToken(tree, stream, @"else".else_token, after_else_space); // else + try renderToken(tree, ais, @"else".else_token, after_else_space); // else if (@"else".payload) |payload| { - try renderExpression(allocator, stream, tree, payload, Space.Newline); + try renderExpression(allocator, ais, tree, payload, Space.Newline); } - stream.pushIndent(); - defer stream.popIndent(); - return renderExpression(allocator, stream, tree, @"else".body, space); + ais.pushIndent(); + defer ais.popIndent(); + return renderExpression(allocator, ais, tree, @"else".body, space); } } else { - stream.pushIndent(); - defer stream.popIndent(); - return renderExpression(allocator, stream, tree, if_node.body, space); + ais.pushIndent(); + defer ais.popIndent(); + return renderExpression(allocator, ais, tree, if_node.body, space); } } // Single line if statement - try renderToken(tree, stream, rparen, Space.Space); // ) + try renderToken(tree, ais, rparen, Space.Space); // ) if (if_node.payload) |payload| { - try renderExpression(allocator, stream, tree, payload, Space.Space); + try renderExpression(allocator, ais, tree, payload, Space.Space); } if (if_node.@"else") |@"else"| { - try renderExpression(allocator, stream, tree, if_node.body, Space.Space); - try renderToken(tree, stream, @"else".else_token, Space.Space); + try renderExpression(allocator, ais, tree, if_node.body, Space.Space); + try renderToken(tree, ais, @"else".else_token, Space.Space); if (@"else".payload) |payload| { - try renderExpression(allocator, stream, tree, payload, Space.Space); + try renderExpression(allocator, ais, tree, payload, Space.Space); } - return renderExpression(allocator, stream, tree, @"else".body, space); + return renderExpression(allocator, ais, tree, @"else".body, space); } else { - return renderExpression(allocator, stream, tree, if_node.body, space); + return renderExpression(allocator, ais, tree, if_node.body, space); } }, .Asm => { const asm_node = @fieldParentPtr(ast.Node.Asm, "base", base); - try renderToken(tree, stream, asm_node.asm_token, Space.Space); // asm + try renderToken(tree, ais, asm_node.asm_token, Space.Space); // asm if (asm_node.volatile_token) |volatile_token| { - try renderToken(tree, stream, volatile_token, Space.Space); // volatile - try renderToken(tree, stream, tree.nextToken(volatile_token), Space.None); // ( + try renderToken(tree, ais, volatile_token, Space.Space); // volatile + try renderToken(tree, ais, tree.nextToken(volatile_token), Space.None); // ( } else { - try renderToken(tree, stream, tree.nextToken(asm_node.asm_token), Space.None); // ( + try renderToken(tree, ais, tree.nextToken(asm_node.asm_token), Space.None); // ( } asmblk: { - stream.pushIndent(); - defer stream.popIndent(); + ais.pushIndent(); + defer ais.popIndent(); if (asm_node.outputs.len == 0 and asm_node.inputs.len == 0 and asm_node.clobbers.len == 0) { - try renderExpression(allocator, stream, tree, asm_node.template, Space.None); + try renderExpression(allocator, ais, tree, asm_node.template, Space.None); break :asmblk; } - try renderExpression(allocator, stream, tree, asm_node.template, Space.Newline); + try renderExpression(allocator, ais, tree, asm_node.template, Space.Newline); - stream.setIndentDelta(asm_indent_delta); - defer stream.setIndentDelta(indent_delta); + ais.setIndentDelta(asm_indent_delta); + defer ais.setIndentDelta(indent_delta); const colon1 = tree.nextToken(asm_node.template.lastToken()); const colon2 = if (asm_node.outputs.len == 0) blk: { - try renderToken(tree, stream, colon1, Space.Newline); // : + try renderToken(tree, ais, colon1, Space.Newline); // : break :blk tree.nextToken(colon1); } else blk: { - try renderToken(tree, stream, colon1, Space.Space); // : + try renderToken(tree, ais, colon1, Space.Space); // : - stream.pushIndent(); - defer stream.popIndent(); + ais.pushIndent(); + defer ais.popIndent(); for (asm_node.outputs) |*asm_output, i| { if (i + 1 < asm_node.outputs.len) { const next_asm_output = asm_node.outputs[i + 1]; - try renderAsmOutput(allocator, stream, tree, asm_output, Space.None); + try renderAsmOutput(allocator, ais, tree, asm_output, Space.None); const comma = tree.prevToken(next_asm_output.firstToken()); - try renderToken(tree, stream, comma, Space.Newline); // , - try renderExtraNewlineToken(tree, stream, next_asm_output.firstToken()); + try renderToken(tree, ais, comma, Space.Newline); // , + try renderExtraNewlineToken(tree, ais, next_asm_output.firstToken()); } else if (asm_node.inputs.len == 0 and asm_node.clobbers.len == 0) { - try renderAsmOutput(allocator, stream, tree, asm_output, Space.Newline); + try renderAsmOutput(allocator, ais, tree, asm_output, Space.Newline); break :asmblk; } else { - try renderAsmOutput(allocator, stream, tree, asm_output, Space.Newline); + try renderAsmOutput(allocator, ais, tree, asm_output, Space.Newline); const comma_or_colon = tree.nextToken(asm_output.lastToken()); break :blk switch (tree.token_ids[comma_or_colon]) { .Comma => tree.nextToken(comma_or_colon), @@ -2000,25 +1999,25 @@ fn renderExpression( }; const colon3 = if (asm_node.inputs.len == 0) blk: { - try renderToken(tree, stream, colon2, Space.Newline); // : + try renderToken(tree, ais, colon2, Space.Newline); // : break :blk tree.nextToken(colon2); } else blk: { - try renderToken(tree, stream, colon2, Space.Space); // : - stream.pushIndent(); - defer stream.popIndent(); + try renderToken(tree, ais, colon2, Space.Space); // : + ais.pushIndent(); + defer ais.popIndent(); for (asm_node.inputs) |*asm_input, i| { if (i + 1 < asm_node.inputs.len) { const next_asm_input = &asm_node.inputs[i + 1]; - try renderAsmInput(allocator, stream, tree, asm_input, Space.None); + try renderAsmInput(allocator, ais, tree, asm_input, Space.None); const comma = tree.prevToken(next_asm_input.firstToken()); - try renderToken(tree, stream, comma, Space.Newline); // , - try renderExtraNewlineToken(tree, stream, next_asm_input.firstToken()); + try renderToken(tree, ais, comma, Space.Newline); // , + try renderExtraNewlineToken(tree, ais, next_asm_input.firstToken()); } else if (asm_node.clobbers.len == 0) { - try renderAsmInput(allocator, stream, tree, asm_input, Space.Newline); + try renderAsmInput(allocator, ais, tree, asm_input, Space.Newline); break :asmblk; } else { - try renderAsmInput(allocator, stream, tree, asm_input, Space.Newline); + try renderAsmInput(allocator, ais, tree, asm_input, Space.Newline); const comma_or_colon = tree.nextToken(asm_input.lastToken()); break :blk switch (tree.token_ids[comma_or_colon]) { .Comma => tree.nextToken(comma_or_colon), @@ -2029,29 +2028,29 @@ fn renderExpression( unreachable; }; - try renderToken(tree, stream, colon3, Space.Space); // : - stream.pushIndent(); - defer stream.popIndent(); + try renderToken(tree, ais, colon3, Space.Space); // : + ais.pushIndent(); + defer ais.popIndent(); for (asm_node.clobbers) |clobber_node, i| { if (i + 1 >= asm_node.clobbers.len) { - try renderExpression(allocator, stream, tree, clobber_node, Space.Newline); + try renderExpression(allocator, ais, tree, clobber_node, Space.Newline); break :asmblk; } else { - try renderExpression(allocator, stream, tree, clobber_node, Space.None); + try renderExpression(allocator, ais, tree, clobber_node, Space.None); const comma = tree.nextToken(clobber_node.lastToken()); - try renderToken(tree, stream, comma, Space.Space); // , + try renderToken(tree, ais, comma, Space.Space); // , } } } - return renderToken(tree, stream, asm_node.rparen, space); + return renderToken(tree, ais, asm_node.rparen, space); }, .EnumLiteral => { const enum_literal = @fieldParentPtr(ast.Node.EnumLiteral, "base", base); - try renderToken(tree, stream, enum_literal.dot, Space.None); // . - return renderToken(tree, stream, enum_literal.name, space); // name + try renderToken(tree, ais, enum_literal.dot, Space.None); // . + return renderToken(tree, ais, enum_literal.name, space); // name }, .ContainerField, @@ -2065,14 +2064,14 @@ fn renderExpression( fn renderArrayType( allocator: *mem.Allocator, - stream: anytype, + ais: anytype, tree: *ast.Tree, lbracket: ast.TokenIndex, rhs: *ast.Node, len_expr: *ast.Node, opt_sentinel: ?*ast.Node, space: Space, -) (@TypeOf(stream.*).Error || Error)!void { +) (@TypeOf(ais.*).Error || Error)!void { const rbracket = tree.nextToken(if (opt_sentinel) |sentinel| sentinel.lastToken() else @@ -2083,97 +2082,97 @@ fn renderArrayType( const new_space = if (ends_with_comment) Space.Newline else Space.None; { const do_indent = (starts_with_comment or ends_with_comment); - if (do_indent) stream.pushIndent(); - defer if (do_indent) stream.popIndent(); + if (do_indent) ais.pushIndent(); + defer if (do_indent) ais.popIndent(); - try renderToken(tree, stream, lbracket, Space.None); // [ - try renderExpression(allocator, stream, tree, len_expr, new_space); + try renderToken(tree, ais, lbracket, Space.None); // [ + try renderExpression(allocator, ais, tree, len_expr, new_space); if (starts_with_comment) { - try stream.maybeInsertNewline(); + try ais.maybeInsertNewline(); } if (opt_sentinel) |sentinel| { const colon_token = tree.prevToken(sentinel.firstToken()); - try renderToken(tree, stream, colon_token, Space.None); // : - try renderExpression(allocator, stream, tree, sentinel, Space.None); + try renderToken(tree, ais, colon_token, Space.None); // : + try renderExpression(allocator, ais, tree, sentinel, Space.None); } if (starts_with_comment) { - try stream.maybeInsertNewline(); + try ais.maybeInsertNewline(); } } - try renderToken(tree, stream, rbracket, Space.None); // ] + try renderToken(tree, ais, rbracket, Space.None); // ] - return renderExpression(allocator, stream, tree, rhs, space); + return renderExpression(allocator, ais, tree, rhs, space); } fn renderAsmOutput( allocator: *mem.Allocator, - stream: anytype, + ais: anytype, tree: *ast.Tree, asm_output: *const ast.Node.Asm.Output, space: Space, -) (@TypeOf(stream.*).Error || Error)!void { - try stream.writer().writeAll("["); - try renderExpression(allocator, stream, tree, asm_output.symbolic_name, Space.None); - try stream.writer().writeAll("] "); - try renderExpression(allocator, stream, tree, asm_output.constraint, Space.None); - try stream.writer().writeAll(" ("); +) (@TypeOf(ais.*).Error || Error)!void { + try ais.writer().writeAll("["); + try renderExpression(allocator, ais, tree, asm_output.symbolic_name, Space.None); + try ais.writer().writeAll("] "); + try renderExpression(allocator, ais, tree, asm_output.constraint, Space.None); + try ais.writer().writeAll(" ("); switch (asm_output.kind) { ast.Node.Asm.Output.Kind.Variable => |variable_name| { - try renderExpression(allocator, stream, tree, &variable_name.base, Space.None); + try renderExpression(allocator, ais, tree, &variable_name.base, Space.None); }, ast.Node.Asm.Output.Kind.Return => |return_type| { - try stream.writer().writeAll("-> "); - try renderExpression(allocator, stream, tree, return_type, Space.None); + try ais.writer().writeAll("-> "); + try renderExpression(allocator, ais, tree, return_type, Space.None); }, } - return renderToken(tree, stream, asm_output.lastToken(), space); // ) + return renderToken(tree, ais, asm_output.lastToken(), space); // ) } fn renderAsmInput( allocator: *mem.Allocator, - stream: anytype, + ais: anytype, tree: *ast.Tree, asm_input: *const ast.Node.Asm.Input, space: Space, -) (@TypeOf(stream.*).Error || Error)!void { - try stream.writer().writeAll("["); - try renderExpression(allocator, stream, tree, asm_input.symbolic_name, Space.None); - try stream.writer().writeAll("] "); - try renderExpression(allocator, stream, tree, asm_input.constraint, Space.None); - try stream.writer().writeAll(" ("); - try renderExpression(allocator, stream, tree, asm_input.expr, Space.None); - return renderToken(tree, stream, asm_input.lastToken(), space); // ) +) (@TypeOf(ais.*).Error || Error)!void { + try ais.writer().writeAll("["); + try renderExpression(allocator, ais, tree, asm_input.symbolic_name, Space.None); + try ais.writer().writeAll("] "); + try renderExpression(allocator, ais, tree, asm_input.constraint, Space.None); + try ais.writer().writeAll(" ("); + try renderExpression(allocator, ais, tree, asm_input.expr, Space.None); + return renderToken(tree, ais, asm_input.lastToken(), space); // ) } fn renderVarDecl( allocator: *mem.Allocator, - stream: anytype, + ais: anytype, tree: *ast.Tree, var_decl: *ast.Node.VarDecl, -) (@TypeOf(stream.*).Error || Error)!void { +) (@TypeOf(ais.*).Error || Error)!void { if (var_decl.getVisibToken()) |visib_token| { - try renderToken(tree, stream, visib_token, Space.Space); // pub + try renderToken(tree, ais, visib_token, Space.Space); // pub } if (var_decl.getExternExportToken()) |extern_export_token| { - try renderToken(tree, stream, extern_export_token, Space.Space); // extern + try renderToken(tree, ais, extern_export_token, Space.Space); // extern if (var_decl.getLibName()) |lib_name| { - try renderExpression(allocator, stream, tree, lib_name, Space.Space); // "lib" + try renderExpression(allocator, ais, tree, lib_name, Space.Space); // "lib" } } if (var_decl.getComptimeToken()) |comptime_token| { - try renderToken(tree, stream, comptime_token, Space.Space); // comptime + try renderToken(tree, ais, comptime_token, Space.Space); // comptime } if (var_decl.getThreadLocalToken()) |thread_local_token| { - try renderToken(tree, stream, thread_local_token, Space.Space); // threadlocal + try renderToken(tree, ais, thread_local_token, Space.Space); // threadlocal } - try renderToken(tree, stream, var_decl.mut_token, Space.Space); // var + try renderToken(tree, ais, var_decl.mut_token, Space.Space); // var const name_space = if (var_decl.getTypeNode() == null and (var_decl.getAlignNode() != null or @@ -2182,92 +2181,92 @@ fn renderVarDecl( Space.Space else Space.None; - try renderToken(tree, stream, var_decl.name_token, name_space); + try renderToken(tree, ais, var_decl.name_token, name_space); if (var_decl.getTypeNode()) |type_node| { - try renderToken(tree, stream, tree.nextToken(var_decl.name_token), Space.Space); + try renderToken(tree, ais, tree.nextToken(var_decl.name_token), Space.Space); const s = if (var_decl.getAlignNode() != null or var_decl.getSectionNode() != null or var_decl.getInitNode() != null) Space.Space else Space.None; - try renderExpression(allocator, stream, tree, type_node, s); + try renderExpression(allocator, ais, tree, type_node, s); } if (var_decl.getAlignNode()) |align_node| { const lparen = tree.prevToken(align_node.firstToken()); const align_kw = tree.prevToken(lparen); const rparen = tree.nextToken(align_node.lastToken()); - try renderToken(tree, stream, align_kw, Space.None); // align - try renderToken(tree, stream, lparen, Space.None); // ( - try renderExpression(allocator, stream, tree, align_node, Space.None); + try renderToken(tree, ais, align_kw, Space.None); // align + try renderToken(tree, ais, lparen, Space.None); // ( + try renderExpression(allocator, ais, tree, align_node, Space.None); const s = if (var_decl.getSectionNode() != null or var_decl.getInitNode() != null) Space.Space else Space.None; - try renderToken(tree, stream, rparen, s); // ) + try renderToken(tree, ais, rparen, s); // ) } if (var_decl.getSectionNode()) |section_node| { const lparen = tree.prevToken(section_node.firstToken()); const section_kw = tree.prevToken(lparen); const rparen = tree.nextToken(section_node.lastToken()); - try renderToken(tree, stream, section_kw, Space.None); // linksection - try renderToken(tree, stream, lparen, Space.None); // ( - try renderExpression(allocator, stream, tree, section_node, Space.None); + try renderToken(tree, ais, section_kw, Space.None); // linksection + try renderToken(tree, ais, lparen, Space.None); // ( + try renderExpression(allocator, ais, tree, section_node, Space.None); const s = if (var_decl.getInitNode() != null) Space.Space else Space.None; - try renderToken(tree, stream, rparen, s); // ) + try renderToken(tree, ais, rparen, s); // ) } if (var_decl.getInitNode()) |init_node| { const s = if (init_node.tag == .MultilineStringLiteral) Space.None else Space.Space; - try renderToken(tree, stream, var_decl.getEqToken().?, s); // = - stream.pushIndentOneShot(); - try renderExpression(allocator, stream, tree, init_node, Space.None); + try renderToken(tree, ais, var_decl.getEqToken().?, s); // = + ais.pushIndentOneShot(); + try renderExpression(allocator, ais, tree, init_node, Space.None); } - try renderToken(tree, stream, var_decl.semicolon_token, Space.Newline); + try renderToken(tree, ais, var_decl.semicolon_token, Space.Newline); } fn renderParamDecl( allocator: *mem.Allocator, - stream: anytype, + ais: anytype, tree: *ast.Tree, param_decl: ast.Node.FnProto.ParamDecl, space: Space, -) (@TypeOf(stream.*).Error || Error)!void { - try renderDocComments(tree, stream, param_decl, param_decl.doc_comments); +) (@TypeOf(ais.*).Error || Error)!void { + try renderDocComments(tree, ais, param_decl, param_decl.doc_comments); if (param_decl.comptime_token) |comptime_token| { - try renderToken(tree, stream, comptime_token, Space.Space); + try renderToken(tree, ais, comptime_token, Space.Space); } if (param_decl.noalias_token) |noalias_token| { - try renderToken(tree, stream, noalias_token, Space.Space); + try renderToken(tree, ais, noalias_token, Space.Space); } if (param_decl.name_token) |name_token| { - try renderToken(tree, stream, name_token, Space.None); - try renderToken(tree, stream, tree.nextToken(name_token), Space.Space); // : + try renderToken(tree, ais, name_token, Space.None); + try renderToken(tree, ais, tree.nextToken(name_token), Space.Space); // : } switch (param_decl.param_type) { - .any_type, .type_expr => |node| try renderExpression(allocator, stream, tree, node, space), + .any_type, .type_expr => |node| try renderExpression(allocator, ais, tree, node, space), } } fn renderStatement( allocator: *mem.Allocator, - stream: anytype, + ais: anytype, tree: *ast.Tree, base: *ast.Node, -) (@TypeOf(stream.*).Error || Error)!void { +) (@TypeOf(ais.*).Error || Error)!void { switch (base.tag) { .VarDecl => { const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", base); - try renderVarDecl(allocator, stream, tree, var_decl); + try renderVarDecl(allocator, ais, tree, var_decl); }, else => { if (base.requireSemiColon()) { - try renderExpression(allocator, stream, tree, base, Space.None); + try renderExpression(allocator, ais, tree, base, Space.None); const semicolon_index = tree.nextToken(base.lastToken()); assert(tree.token_ids[semicolon_index] == .Semicolon); - try renderToken(tree, stream, semicolon_index, Space.Newline); + try renderToken(tree, ais, semicolon_index, Space.Newline); } else { - try renderExpression(allocator, stream, tree, base, Space.Newline); + try renderExpression(allocator, ais, tree, base, Space.Newline); } }, } @@ -2286,19 +2285,19 @@ const Space = enum { fn renderTokenOffset( tree: *ast.Tree, - stream: anytype, + ais: anytype, token_index: ast.TokenIndex, space: Space, token_skip_bytes: usize, -) (@TypeOf(stream.*).Error || Error)!void { +) (@TypeOf(ais.*).Error || Error)!void { if (space == Space.BlockStart) { // If placing the lbrace on the current line would cause an uggly gap then put the lbrace on the next line - const new_space = if (stream.isLineOverIndented()) Space.Newline else Space.Space; - return renderToken(tree, stream, token_index, new_space); + const new_space = if (ais.isLineOverIndented()) Space.Newline else Space.Space; + return renderToken(tree, ais, token_index, new_space); } var token_loc = tree.token_locs[token_index]; - try stream.writer().writeAll(mem.trimRight(u8, tree.tokenSliceLoc(token_loc)[token_skip_bytes..], " ")); + try ais.writer().writeAll(mem.trimRight(u8, tree.tokenSliceLoc(token_loc)[token_skip_bytes..], " ")); if (space == Space.NoComment) return; @@ -2307,20 +2306,20 @@ fn renderTokenOffset( var next_token_loc = tree.token_locs[token_index + 1]; if (space == Space.Comma) switch (next_token_id) { - .Comma => return renderToken(tree, stream, token_index + 1, Space.Newline), + .Comma => return renderToken(tree, ais, token_index + 1, Space.Newline), .LineComment => { - try stream.writer().writeAll(", "); - return renderToken(tree, stream, token_index + 1, Space.Newline); + try ais.writer().writeAll(", "); + return renderToken(tree, ais, token_index + 1, Space.Newline); }, else => { if (token_index + 2 < tree.token_ids.len and tree.token_ids[token_index + 2] == .MultilineStringLiteralLine) { - try stream.writer().writeAll(","); + try ais.writer().writeAll(","); return; } else { - try stream.writer().writeAll(","); - try stream.insertNewline(); + try ais.writer().writeAll(","); + try ais.insertNewline(); return; } }, @@ -2344,14 +2343,14 @@ fn renderTokenOffset( if (next_token_id == .MultilineStringLiteralLine) { return; } else { - try stream.insertNewline(); + try ais.insertNewline(); return; } }, Space.Space, Space.SpaceOrOutdent => { if (next_token_id == .MultilineStringLiteralLine) return; - try stream.writer().writeByte(' '); + try ais.writer().writeByte(' '); return; }, Space.NoComment, Space.Comma, Space.BlockStart => unreachable, @@ -2368,7 +2367,7 @@ fn renderTokenOffset( next_token_id = tree.token_ids[token_index + offset]; next_token_loc = tree.token_locs[token_index + offset]; if (next_token_id != .LineComment) { - try stream.insertNewline(); + try ais.insertNewline(); return; } }, @@ -2381,7 +2380,7 @@ fn renderTokenOffset( var loc = tree.tokenLocationLoc(token_loc.end, next_token_loc); if (loc.line == 0) { - try stream.writer().print(" {}", .{mem.trimRight(u8, tree.tokenSliceLoc(next_token_loc), " ")}); + try ais.writer().print(" {}", .{mem.trimRight(u8, tree.tokenSliceLoc(next_token_loc), " ")}); offset = 2; token_loc = next_token_loc; next_token_loc = tree.token_locs[token_index + offset]; @@ -2389,16 +2388,16 @@ fn renderTokenOffset( if (next_token_id != .LineComment) { switch (space) { Space.None, Space.Space => { - try stream.insertNewline(); + try ais.insertNewline(); }, Space.SpaceOrOutdent => { - try stream.insertNewline(); + try ais.insertNewline(); }, Space.Newline => { if (next_token_id == .MultilineStringLiteralLine) { return; } else { - try stream.insertNewline(); + try ais.insertNewline(); return; } }, @@ -2415,8 +2414,8 @@ fn renderTokenOffset( // in generated code (loc.line == 0) so treat that case // as though there was meant to be a newline between the tokens var newline_count = if (loc.line <= 1) @as(u8, 1) else @as(u8, 2); - while (newline_count > 0) : (newline_count -= 1) try stream.insertNewline(); - try stream.writer().writeAll(mem.trimRight(u8, tree.tokenSliceLoc(next_token_loc), " ")); + while (newline_count > 0) : (newline_count -= 1) try ais.insertNewline(); + try ais.writer().writeAll(mem.trimRight(u8, tree.tokenSliceLoc(next_token_loc), " ")); offset += 1; token_loc = next_token_loc; @@ -2428,15 +2427,15 @@ fn renderTokenOffset( if (next_token_id == .MultilineStringLiteralLine) { return; } else { - try stream.insertNewline(); + try ais.insertNewline(); return; } }, Space.None, Space.Space => { - try stream.insertNewline(); + try ais.insertNewline(); }, Space.SpaceOrOutdent => { - try stream.insertNewline(); + try ais.insertNewline(); }, Space.NoNewline => {}, Space.NoComment, Space.Comma, Space.BlockStart => unreachable, @@ -2449,38 +2448,38 @@ fn renderTokenOffset( fn renderToken( tree: *ast.Tree, - stream: anytype, + ais: anytype, token_index: ast.TokenIndex, space: Space, -) (@TypeOf(stream.*).Error || Error)!void { - return renderTokenOffset(tree, stream, token_index, space, 0); +) (@TypeOf(ais.*).Error || Error)!void { + return renderTokenOffset(tree, ais, token_index, space, 0); } fn renderDocComments( tree: *ast.Tree, - stream: anytype, + ais: anytype, node: anytype, doc_comments: ?*ast.Node.DocComment, -) (@TypeOf(stream.*).Error || Error)!void { +) (@TypeOf(ais.*).Error || Error)!void { const comment = doc_comments orelse return; - return renderDocCommentsToken(tree, stream, comment, node.firstToken()); + return renderDocCommentsToken(tree, ais, comment, node.firstToken()); } fn renderDocCommentsToken( tree: *ast.Tree, - stream: anytype, + ais: anytype, comment: *ast.Node.DocComment, first_token: ast.TokenIndex, -) (@TypeOf(stream.*).Error || Error)!void { +) (@TypeOf(ais.*).Error || Error)!void { var tok_i = comment.first_line; while (true) : (tok_i += 1) { switch (tree.token_ids[tok_i]) { .DocComment, .ContainerDocComment => { if (comment.first_line < first_token) { - try renderToken(tree, stream, tok_i, Space.Newline); + try renderToken(tree, ais, tok_i, Space.Newline); } else { - try renderToken(tree, stream, tok_i, Space.NoComment); - try stream.insertNewline(); + try renderToken(tree, ais, tok_i, Space.NoComment); + try ais.insertNewline(); } }, .LineComment => continue, @@ -2552,10 +2551,10 @@ fn nodeCausesSliceOpSpace(base: *ast.Node) bool { }; } -fn copyFixingWhitespace(stream: anytype, slice: []const u8) @TypeOf(stream.*).Error!void { +fn copyFixingWhitespace(ais: anytype, slice: []const u8) @TypeOf(ais.*).Error!void { for (slice) |byte| switch (byte) { - '\t' => try stream.writer().writeAll(" "), + '\t' => try ais.writer().writeAll(" "), '\r' => {}, - else => try stream.writer().writeByte(byte), + else => try ais.writer().writeByte(byte), }; } diff --git a/src-self-hosted/main.zig b/src-self-hosted/main.zig index d14a28419d..8c5c034238 100644 --- a/src-self-hosted/main.zig +++ b/src-self-hosted/main.zig @@ -682,13 +682,13 @@ pub fn cmdFmt(gpa: *Allocator, args: []const []const u8) !void { process.exit(1); } if (check_flag) { - const anything_changed = try std.zig.render(gpa, &io.null_out_stream, tree); + const anything_changed = try std.zig.render(gpa, io.null_out_stream, tree); const code = if (anything_changed) @as(u8, 1) else @as(u8, 0); process.exit(code); } const stdout = io.getStdOut().outStream(); - _ = try std.zig.render(gpa, &stdout, tree); + _ = try std.zig.render(gpa, stdout, tree); return; } @@ -830,7 +830,7 @@ fn fmtPathFile( } if (check_mode) { - const anything_changed = try std.zig.render(fmt.gpa, &io.null_out_stream, tree); + const anything_changed = try std.zig.render(fmt.gpa, io.null_out_stream, tree); if (anything_changed) { std.debug.print("{}\n", .{file_path}); fmt.any_error = true; @@ -840,7 +840,7 @@ fn fmtPathFile( try fmt.out_buffer.ensureCapacity(source_code.len); fmt.out_buffer.items.len = 0; const writer = fmt.out_buffer.writer(); - const anything_changed = try std.zig.render(fmt.gpa, &writer, tree); + const anything_changed = try std.zig.render(fmt.gpa, writer, tree); if (!anything_changed) return; // Good thing we didn't waste any file system access on this. diff --git a/src-self-hosted/stage2.zig b/src-self-hosted/stage2.zig index 29b8f3df44..30d2ea44db 100644 --- a/src-self-hosted/stage2.zig +++ b/src-self-hosted/stage2.zig @@ -151,7 +151,7 @@ export fn stage2_free_clang_errors(errors_ptr: [*]translate_c.ClangErrMsg, error export fn stage2_render_ast(tree: *ast.Tree, output_file: *FILE) Error { const c_out_stream = std.io.cOutStream(output_file); - _ = std.zig.render(std.heap.c_allocator, &c_out_stream, tree) catch |e| switch (e) { + _ = std.zig.render(std.heap.c_allocator, c_out_stream, tree) catch |e| switch (e) { error.WouldBlock => unreachable, // stage1 opens stuff in exclusively blocking mode error.NotOpenForWriting => unreachable, error.SystemResources => return .SystemResources, From 25f666330480a2391d1c06e1beab8d517a096e99 Mon Sep 17 00:00:00 2001 From: Aransentin Date: Wed, 2 Sep 2020 22:16:40 +0000 Subject: [PATCH 17/35] Add the sync functions --- lib/std/os.zig | 68 +++++++++++++++++++++++++++++++++ lib/std/os/linux.zig | 16 ++++++++ lib/std/os/windows/kernel32.zig | 2 + 3 files changed, 86 insertions(+) diff --git a/lib/std/os.zig b/lib/std/os.zig index e8431c386b..d25fe2c56a 100644 --- a/lib/std/os.zig +++ b/lib/std/os.zig @@ -5328,3 +5328,71 @@ pub fn signalfd(fd: fd_t, mask: *const sigset_t, flags: u32) !fd_t { else => |err| return std.os.unexpectedErrno(err), } } + +pub const SyncError = error{ + InputOutput, + NoSpaceLeft, + DiskQuota, + AccessDenied, +} || UnexpectedError; + +/// Write all pending file contents and metadata modifications to all filesystems. +pub fn sync() void { + system.sync(); +} + +/// Write all pending file contents and metadata modifications to the filesystem which contains the specified file. +pub fn syncfs(fd: fd_t) SyncError!void { + const rc = system.syncfs(fd); + switch (errno(rc)) { + 0 => return, + EBADF, EINVAL, EROFS => unreachable, + EIO => return error.InputOutput, + ENOSPC => return error.NoSpaceLeft, + EDQUOT => return error.DiskQuota, + else => |err| return std.os.unexpectedErrno(err), + } +} + +/// Write all pending file contents and metadata modifications for the specified file descriptor to the underlying filesystem. +pub fn fsync(fd: fd_t) SyncError!void { + if (std.Target.current.os.tag == .windows) { + if (windows.kernel32.FlushFileBuffers(fd) != 0) + return; + switch (windows.kernel32.GetLastError()) { + .SUCCESS => return, + .INVALID_HANDLE => unreachable, + .ACCESS_DENIED => return error.AccessDenied, // a sync was performed but the system couldn't update the access time + .UNEXP_NET_ERR => return error.InputOutput, + else => return error.InputOutput, + } + } + const rc = system.fsync(fd); + switch (errno(rc)) { + 0 => return, + EBADF, EINVAL, EROFS => unreachable, + EIO => return error.InputOutput, + ENOSPC => return error.NoSpaceLeft, + EDQUOT => return error.DiskQuota, + else => |err| return std.os.unexpectedErrno(err), + } +} + +/// Write all pending file contents for the specified file descriptor to the underlying filesystem, but not necessarily the metadata. +pub fn fdatasync(fd: fd_t) SyncError!void { + if (std.Target.current.os.tag == .windows) { + return fsync(fd) catch |err| switch (err) { + SyncError.AccessDenied => return, // fdatasync doesn't promise that the access time was synced + else => return err, + }; + } + const rc = system.fdatasync(fd); + switch (errno(rc)) { + 0 => return, + EBADF, EINVAL, EROFS => unreachable, + EIO => return error.InputOutput, + ENOSPC => return error.NoSpaceLeft, + EDQUOT => return error.DiskQuota, + else => |err| return std.os.unexpectedErrno(err), + } +} diff --git a/lib/std/os/linux.zig b/lib/std/os/linux.zig index 13094b3a3a..1f916876cf 100644 --- a/lib/std/os/linux.zig +++ b/lib/std/os/linux.zig @@ -1226,6 +1226,22 @@ pub fn bpf(cmd: BPF.Cmd, attr: *BPF.Attr, size: u32) usize { return syscall3(.bpf, @enumToInt(cmd), @ptrToInt(attr), size); } +pub fn sync() void { + _ = syscall0(.sync); +} + +pub fn syncfs(fd: fd_t) usize { + return syscall1(.syncfs, @bitCast(usize, @as(isize, fd))); +} + +pub fn fsync(fd: fd_t) usize { + return syscall1(.fsync, @bitCast(usize, @as(isize, fd))); +} + +pub fn fdatasync(fd: fd_t) usize { + return syscall1(.fdatasync, @bitCast(usize, @as(isize, fd))); +} + test "" { if (builtin.os.tag == .linux) { _ = @import("linux/test.zig"); diff --git a/lib/std/os/windows/kernel32.zig b/lib/std/os/windows/kernel32.zig index fce9eea908..05d160485d 100644 --- a/lib/std/os/windows/kernel32.zig +++ b/lib/std/os/windows/kernel32.zig @@ -287,3 +287,5 @@ pub extern "kernel32" fn K32GetWsChangesEx(hProcess: HANDLE, lpWatchInfoEx: PPSA pub extern "kernel32" fn K32InitializeProcessForWsWatch(hProcess: HANDLE) callconv(.Stdcall) BOOL; pub extern "kernel32" fn K32QueryWorkingSet(hProcess: HANDLE, pv: PVOID, cb: DWORD) callconv(.Stdcall) BOOL; pub extern "kernel32" fn K32QueryWorkingSetEx(hProcess: HANDLE, pv: PVOID, cb: DWORD) callconv(.Stdcall) BOOL; + +pub extern "kernel32" fn FlushFileBuffers(hFile: HANDLE) callconv(.Stdcall) BOOL; From fb3c5b84ede6fa48949c8069bf735ac67ec21091 Mon Sep 17 00:00:00 2001 From: daurnimator Date: Mon, 31 Aug 2020 22:31:29 +1000 Subject: [PATCH 18/35] std: add fmt option to escape non-printable characters --- lib/std/fmt.zig | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/lib/std/fmt.zig b/lib/std/fmt.zig index 16d0eaa07a..3067a55759 100644 --- a/lib/std/fmt.zig +++ b/lib/std/fmt.zig @@ -66,6 +66,7 @@ fn peekIsAlign(comptime fmt: []const u8) bool { /// - output numeric value in hexadecimal notation /// - `s`: print a pointer-to-many as a c-string, use zero-termination /// - `B` and `Bi`: output a memory size in either metric (1000) or power-of-two (1024) based notation. works for both float and integer values. +/// - `e` and `E`: if printing a string, escape non-printable characters /// - `e`: output floating point value in scientific notation /// - `d`: output numeric value in decimal notation /// - `b`: output integer value in binary notation @@ -599,6 +600,16 @@ pub fn formatText( try formatInt(c, 16, fmt[0] == 'X', FormatOptions{ .width = 2, .fill = '0' }, writer); } return; + } else if (comptime (std.mem.eql(u8, fmt, "e") or std.mem.eql(u8, fmt, "E"))) { + for (bytes) |c| { + if (std.ascii.isPrint(c)) { + try writer.writeByte(c); + } else { + try writer.writeAll("\\x"); + try formatInt(c, 16, fmt[0] == 'E', FormatOptions{ .width = 2, .fill = '0' }, writer); + } + } + return; } else { @compileError("Unknown format string: '" ++ fmt ++ "'"); } @@ -1319,6 +1330,12 @@ test "slice" { try testFmt("buf: Test\n Other text", "buf: {s}\n Other text", .{"Test"}); } +test "escape non-printable" { + try testFmt("abc", "{e}", .{"abc"}); + try testFmt("ab\\xffc", "{e}", .{"ab\xffc"}); + try testFmt("ab\\xFFc", "{E}", .{"ab\xffc"}); +} + test "pointer" { { const value = @intToPtr(*align(1) i32, 0xdeadbeef); From e747d2ba172a086e6df831c854ebe7f92bf07cd0 Mon Sep 17 00:00:00 2001 From: Jens Goldberg Date: Thu, 3 Sep 2020 07:49:18 +0000 Subject: [PATCH 19/35] Add C declarations and tests for the sync functions --- lib/std/c.zig | 5 +++++ lib/std/os/test.zig | 20 ++++++++++++++++++++ 2 files changed, 25 insertions(+) diff --git a/lib/std/c.zig b/lib/std/c.zig index b4e5fc7392..1b3f403ab5 100644 --- a/lib/std/c.zig +++ b/lib/std/c.zig @@ -330,3 +330,8 @@ pub const FILE = @Type(.Opaque); pub extern "c" fn dlopen(path: [*:0]const u8, mode: c_int) ?*c_void; pub extern "c" fn dlclose(handle: *c_void) c_int; pub extern "c" fn dlsym(handle: ?*c_void, symbol: [*:0]const u8) ?*c_void; + +pub extern "c" fn sync() void; +pub extern "c" fn syncfs(fd: c_int) c_int; +pub extern "c" fn fsync(fd: c_int) c_int; +pub extern "c" fn fdatasync(fd: c_int) c_int; diff --git a/lib/std/os/test.zig b/lib/std/os/test.zig index 576125e2a3..0d8d2cc0db 100644 --- a/lib/std/os/test.zig +++ b/lib/std/os/test.zig @@ -555,3 +555,23 @@ test "signalfd" { return error.SkipZigTest; _ = std.os.signalfd; } + +test "sync" { + if (builtin.os.tag != .linux and builtin.os.tag != .windows) + return error.SkipZigTest; + + var tmp = tmpDir(.{}); + defer tmp.cleanup(); + + const test_out_file = "os_tmp_test"; + const file = try tmp.dir.createFile(test_out_file, .{}); + defer { + file.close(); + tmp.dir.deleteFile(test_out_file) catch {}; + } + + try os.syncfs(file.handle); + try os.fsync(file.handle); + try os.fdatasync(file.handle); + os.sync(); +} From 68818983aef0d44f43f9575d8207053d5b7250ba Mon Sep 17 00:00:00 2001 From: Jens Goldberg Date: Thu, 3 Sep 2020 09:52:26 +0000 Subject: [PATCH 20/35] Split the sync tests into sync and fsync --- lib/std/os/test.zig | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/lib/std/os/test.zig b/lib/std/os/test.zig index 0d8d2cc0db..0a453d8b2e 100644 --- a/lib/std/os/test.zig +++ b/lib/std/os/test.zig @@ -557,6 +557,24 @@ test "signalfd" { } test "sync" { + if (builtin.os.tag != .linux) + return error.SkipZigTest; + + var tmp = tmpDir(.{}); + defer tmp.cleanup(); + + const test_out_file = "os_tmp_test"; + const file = try tmp.dir.createFile(test_out_file, .{}); + defer { + file.close(); + tmp.dir.deleteFile(test_out_file) catch {}; + } + + os.sync(); + try os.syncfs(file.handle); +} + +test "fsync" { if (builtin.os.tag != .linux and builtin.os.tag != .windows) return error.SkipZigTest; @@ -570,8 +588,6 @@ test "sync" { tmp.dir.deleteFile(test_out_file) catch {}; } - try os.syncfs(file.handle); try os.fsync(file.handle); try os.fdatasync(file.handle); - os.sync(); } From 9a59cdcd41f5a05d70a02d89178afaf8789791c6 Mon Sep 17 00:00:00 2001 From: Vexu Date: Thu, 27 Aug 2020 23:07:39 +0300 Subject: [PATCH 21/35] stage2: various small type fixes --- src-self-hosted/Module.zig | 6 ++++++ src-self-hosted/type.zig | 10 +++++----- src-self-hosted/value.zig | 6 +++--- src-self-hosted/zir_sema.zig | 6 ++++++ 4 files changed, 20 insertions(+), 8 deletions(-) diff --git a/src-self-hosted/Module.zig b/src-self-hosted/Module.zig index 24dcb541b4..c4b0f70d5c 100644 --- a/src-self-hosted/Module.zig +++ b/src-self-hosted/Module.zig @@ -2801,6 +2801,12 @@ pub fn resolvePeerTypes(self: *Module, scope: *Scope, instructions: []*Inst) !Ty prev_inst = next_inst; continue; } + if (next_inst.ty.zigTypeTag() == .Undefined) + continue; + if (prev_inst.ty.zigTypeTag() == .Undefined) { + prev_inst = next_inst; + continue; + } if (prev_inst.ty.isInt() and next_inst.ty.isInt() and prev_inst.ty.isSignedInt() == next_inst.ty.isSignedInt()) diff --git a/src-self-hosted/type.zig b/src-self-hosted/type.zig index a9a1acf44b..66a7961073 100644 --- a/src-self-hosted/type.zig +++ b/src-self-hosted/type.zig @@ -163,7 +163,7 @@ pub const Type = extern union { // Hot path for common case: if (a.castPointer()) |a_payload| { if (b.castPointer()) |b_payload| { - return eql(a_payload.pointee_type, b_payload.pointee_type); + return a.tag() == b.tag() and eql(a_payload.pointee_type, b_payload.pointee_type); } } const is_slice_a = isSlice(a); @@ -189,7 +189,7 @@ pub const Type = extern union { .Array => { if (a.arrayLen() != b.arrayLen()) return false; - if (a.elemType().eql(b.elemType())) + if (!a.elemType().eql(b.elemType())) return false; const sentinel_a = a.arraySentinel(); const sentinel_b = b.arraySentinel(); @@ -501,9 +501,9 @@ pub const Type = extern union { .noreturn, => return out_stream.writeAll(@tagName(t)), - .enum_literal => return out_stream.writeAll("@TypeOf(.EnumLiteral)"), - .@"null" => return out_stream.writeAll("@TypeOf(null)"), - .@"undefined" => return out_stream.writeAll("@TypeOf(undefined)"), + .enum_literal => return out_stream.writeAll("@Type(.EnumLiteral)"), + .@"null" => return out_stream.writeAll("@Type(.Null)"), + .@"undefined" => return out_stream.writeAll("@Type(.Undefined)"), .@"anyframe" => return out_stream.writeAll("anyframe"), .anyerror_void_error_union => return out_stream.writeAll("anyerror!void"), diff --git a/src-self-hosted/value.zig b/src-self-hosted/value.zig index bfd205f4d9..b65aa06bea 100644 --- a/src-self-hosted/value.zig +++ b/src-self-hosted/value.zig @@ -301,15 +301,15 @@ pub const Value = extern union { .comptime_int_type => return out_stream.writeAll("comptime_int"), .comptime_float_type => return out_stream.writeAll("comptime_float"), .noreturn_type => return out_stream.writeAll("noreturn"), - .null_type => return out_stream.writeAll("@TypeOf(null)"), - .undefined_type => return out_stream.writeAll("@TypeOf(undefined)"), + .null_type => return out_stream.writeAll("@Type(.Null)"), + .undefined_type => return out_stream.writeAll("@Type(.Undefined)"), .fn_noreturn_no_args_type => return out_stream.writeAll("fn() noreturn"), .fn_void_no_args_type => return out_stream.writeAll("fn() void"), .fn_naked_noreturn_no_args_type => return out_stream.writeAll("fn() callconv(.Naked) noreturn"), .fn_ccc_void_no_args_type => return out_stream.writeAll("fn() callconv(.C) void"), .single_const_pointer_to_comptime_int_type => return out_stream.writeAll("*const comptime_int"), .const_slice_u8_type => return out_stream.writeAll("[]const u8"), - .enum_literal_type => return out_stream.writeAll("@TypeOf(.EnumLiteral)"), + .enum_literal_type => return out_stream.writeAll("@Type(.EnumLiteral)"), .anyframe_type => return out_stream.writeAll("anyframe"), .null_value => return out_stream.writeAll("null"), diff --git a/src-self-hosted/zir_sema.zig b/src-self-hosted/zir_sema.zig index 676b662077..88a130c1db 100644 --- a/src-self-hosted/zir_sema.zig +++ b/src-self-hosted/zir_sema.zig @@ -1239,6 +1239,12 @@ fn analyzeInstArithmetic(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) Inn if (casted_lhs.value()) |lhs_val| { if (casted_rhs.value()) |rhs_val| { + if (lhs_val.isUndef() or rhs_val.isUndef()) { + return mod.constInst(scope, inst.base.src, .{ + .ty = resolved_type, + .val = Value.initTag(.undef), + }); + } return analyzeInstComptimeOp(mod, scope, scalar_type, inst, lhs_val, rhs_val); } } From ff7c6e1e3cea86e130e15a720c729a05763b5f08 Mon Sep 17 00:00:00 2001 From: Vexu Date: Fri, 28 Aug 2020 14:35:13 +0300 Subject: [PATCH 22/35] stage2: astgen orelse --- src-self-hosted/astgen.zig | 70 ++++++++++++++++++++++++-------------- 1 file changed, 44 insertions(+), 26 deletions(-) diff --git a/src-self-hosted/astgen.zig b/src-self-hosted/astgen.zig index 17db584e56..bb56bf34b1 100644 --- a/src-self-hosted/astgen.zig +++ b/src-self-hosted/astgen.zig @@ -277,10 +277,10 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr .ArrayAccess => return arrayAccess(mod, scope, rl, node.castTag(.ArrayAccess).?), .Catch => return catchExpr(mod, scope, rl, node.castTag(.Catch).?), .Comptime => return comptimeKeyword(mod, scope, rl, node.castTag(.Comptime).?), + .OrElse => return orelseExpr(mod, scope, rl, node.castTag(.OrElse).?), .Defer => return mod.failNode(scope, node, "TODO implement astgen.expr for .Defer", .{}), .Range => return mod.failNode(scope, node, "TODO implement astgen.expr for .Range", .{}), - .OrElse => return mod.failNode(scope, node, "TODO implement astgen.expr for .OrElse", .{}), .Await => return mod.failNode(scope, node, "TODO implement astgen.expr for .Await", .{}), .Resume => return mod.failNode(scope, node, "TODO implement astgen.expr for .Resume", .{}), .Try => return mod.failNode(scope, node, "TODO implement astgen.expr for .Try", .{}), @@ -790,13 +790,31 @@ fn errorType(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!* } fn catchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Catch) InnerError!*zir.Inst { - const tree = scope.tree(); - const src = tree.token_locs[node.op_token].start; + return orelseCatchExpr(mod, scope, rl, node.lhs, node.op_token, .iserr, .unwrap_err_unsafe, node.rhs, node.payload); +} - const err_union_ptr = try expr(mod, scope, .ref, node.lhs); - // TODO we could avoid an unnecessary copy if .iserr took a pointer - const err_union = try addZIRUnOp(mod, scope, src, .deref, err_union_ptr); - const cond = try addZIRUnOp(mod, scope, src, .iserr, err_union); +fn orelseExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.SimpleInfixOp) InnerError!*zir.Inst { + return orelseCatchExpr(mod, scope, rl, node.lhs, node.op_token, .isnull, .unwrap_optional_unsafe, node.rhs, null); +} + +fn orelseCatchExpr( + mod: *Module, + scope: *Scope, + rl: ResultLoc, + lhs: *ast.Node, + op_token: ast.TokenIndex, + cond_op: zir.Inst.Tag, + unwrap_op: zir.Inst.Tag, + rhs: *ast.Node, + payload_node: ?*ast.Node, +) InnerError!*zir.Inst { + const tree = scope.tree(); + const src = tree.token_locs[op_token].start; + + const operand_ptr = try expr(mod, scope, .ref, lhs); + // TODO we could avoid an unnecessary copy if .iserr, .isnull took a pointer + const err_union = try addZIRUnOp(mod, scope, src, .deref, operand_ptr); + const cond = try addZIRUnOp(mod, scope, src, cond_op, err_union); var block_scope: Scope.GenZIR = .{ .parent = scope, @@ -825,55 +843,55 @@ fn catchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Catch) .inferred_ptr, .bitcasted_ptr, .block_ptr => .{ .block_ptr = block }, }; - var err_scope: Scope.GenZIR = .{ + var then_scope: Scope.GenZIR = .{ .parent = scope, .decl = block_scope.decl, .arena = block_scope.arena, .instructions = .{}, }; - defer err_scope.instructions.deinit(mod.gpa); + defer then_scope.instructions.deinit(mod.gpa); var err_val_scope: Scope.LocalVal = undefined; - const err_sub_scope = blk: { - const payload = node.payload orelse - break :blk &err_scope.base; + const then_sub_scope = blk: { + const payload = payload_node orelse + break :blk &then_scope.base; const err_name = tree.tokenSlice(payload.castTag(.Payload).?.error_symbol.firstToken()); if (mem.eql(u8, err_name, "_")) - break :blk &err_scope.base; + break :blk &then_scope.base; - const unwrapped_err_ptr = try addZIRUnOp(mod, &err_scope.base, src, .unwrap_err_code, err_union_ptr); + const unwrapped_err_ptr = try addZIRUnOp(mod, &then_scope.base, src, .unwrap_err_code, operand_ptr); err_val_scope = .{ - .parent = &err_scope.base, - .gen_zir = &err_scope, + .parent = &then_scope.base, + .gen_zir = &then_scope, .name = err_name, - .inst = try addZIRUnOp(mod, &err_scope.base, src, .deref, unwrapped_err_ptr), + .inst = try addZIRUnOp(mod, &then_scope.base, src, .deref, unwrapped_err_ptr), }; break :blk &err_val_scope.base; }; - _ = try addZIRInst(mod, &err_scope.base, src, zir.Inst.Break, .{ + _ = try addZIRInst(mod, &then_scope.base, src, zir.Inst.Break, .{ .block = block, - .operand = try expr(mod, err_sub_scope, branch_rl, node.rhs), + .operand = try expr(mod, then_sub_scope, branch_rl, rhs), }, .{}); - var not_err_scope: Scope.GenZIR = .{ + var else_scope: Scope.GenZIR = .{ .parent = scope, .decl = block_scope.decl, .arena = block_scope.arena, .instructions = .{}, }; - defer not_err_scope.instructions.deinit(mod.gpa); + defer else_scope.instructions.deinit(mod.gpa); - const unwrapped_payload = try addZIRUnOp(mod, ¬_err_scope.base, src, .unwrap_err_unsafe, err_union_ptr); - _ = try addZIRInst(mod, ¬_err_scope.base, src, zir.Inst.Break, .{ + const unwrapped_payload = try addZIRUnOp(mod, &else_scope.base, src, unwrap_op, operand_ptr); + _ = try addZIRInst(mod, &else_scope.base, src, zir.Inst.Break, .{ .block = block, .operand = unwrapped_payload, }, .{}); - condbr.positionals.then_body = .{ .instructions = try err_scope.arena.dupe(*zir.Inst, err_scope.instructions.items) }; - condbr.positionals.else_body = .{ .instructions = try not_err_scope.arena.dupe(*zir.Inst, not_err_scope.instructions.items) }; - return rlWrap(mod, scope, rl, &block.base); + condbr.positionals.then_body = .{ .instructions = try then_scope.arena.dupe(*zir.Inst, then_scope.instructions.items) }; + condbr.positionals.else_body = .{ .instructions = try else_scope.arena.dupe(*zir.Inst, else_scope.instructions.items) }; + return rlWrapPtr(mod, scope, rl, &block.base); } /// Return whether the identifier names of two tokens are equal. Resolves @"" tokens without allocating. From 2a628fd401bf057a71175c8b723375fd4f375a84 Mon Sep 17 00:00:00 2001 From: Vexu Date: Fri, 28 Aug 2020 14:55:04 +0300 Subject: [PATCH 23/35] stage2: astgen slice --- src-self-hosted/astgen.zig | 32 +++++++++++++++++++++++++++++++- src-self-hosted/zir.zig | 22 ++++++++++++++++++++++ src-self-hosted/zir_sema.zig | 10 ++++++++++ 3 files changed, 63 insertions(+), 1 deletion(-) diff --git a/src-self-hosted/astgen.zig b/src-self-hosted/astgen.zig index bb56bf34b1..2c091a86ec 100644 --- a/src-self-hosted/astgen.zig +++ b/src-self-hosted/astgen.zig @@ -275,6 +275,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr .ErrorType => return rlWrap(mod, scope, rl, try errorType(mod, scope, node.castTag(.ErrorType).?)), .For => return forExpr(mod, scope, rl, node.castTag(.For).?), .ArrayAccess => return arrayAccess(mod, scope, rl, node.castTag(.ArrayAccess).?), + .Slice => return rlWrap(mod, scope, rl, try sliceExpr(mod, scope, node.castTag(.Slice).?)), .Catch => return catchExpr(mod, scope, rl, node.castTag(.Catch).?), .Comptime => return comptimeKeyword(mod, scope, rl, node.castTag(.Comptime).?), .OrElse => return orelseExpr(mod, scope, rl, node.castTag(.OrElse).?), @@ -284,7 +285,6 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr .Await => return mod.failNode(scope, node, "TODO implement astgen.expr for .Await", .{}), .Resume => return mod.failNode(scope, node, "TODO implement astgen.expr for .Resume", .{}), .Try => return mod.failNode(scope, node, "TODO implement astgen.expr for .Try", .{}), - .Slice => return mod.failNode(scope, node, "TODO implement astgen.expr for .Slice", .{}), .ArrayInitializer => return mod.failNode(scope, node, "TODO implement astgen.expr for .ArrayInitializer", .{}), .ArrayInitializerDot => return mod.failNode(scope, node, "TODO implement astgen.expr for .ArrayInitializerDot", .{}), .StructInitializer => return mod.failNode(scope, node, "TODO implement astgen.expr for .StructInitializer", .{}), @@ -951,6 +951,36 @@ fn arrayAccess(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Array return rlWrapPtr(mod, scope, rl, try addZIRInst(mod, scope, src, zir.Inst.ElemPtr, .{ .array_ptr = array_ptr, .index = index }, .{})); } +fn sliceExpr(mod: *Module, scope: *Scope, node: *ast.Node.Slice) InnerError!*zir.Inst { + const tree = scope.tree(); + const src = tree.token_locs[node.rtoken].start; + + const usize_type = try addZIRInstConst(mod, scope, src, .{ + .ty = Type.initTag(.type), + .val = Value.initTag(.usize_type), + }); + + const array_ptr = try expr(mod, scope, .ref, node.lhs); + const start = try expr(mod, scope, .{ .ty = usize_type }, node.start); + + if (node.end == null and node.sentinel == null) { + return try addZIRBinOp(mod, scope, src, .slice_start, array_ptr, start); + } + + const end = if (node.end) |end| try expr(mod, scope, .{ .ty = usize_type }, end) else null; + // we could get the child type here, but it is easier to just do it in semantic analysis. + const sentinel = if (node.sentinel) |sentinel| try expr(mod, scope, .none, sentinel) else null; + + return try addZIRInst( + mod, + scope, + src, + zir.Inst.Slice, + .{ .array_ptr = array_ptr, .start = start }, + .{ .end = end, .sentinel = sentinel }, + ); +} + fn deref(mod: *Module, scope: *Scope, node: *ast.Node.SimpleSuffixOp) InnerError!*zir.Inst { const tree = scope.tree(); const src = tree.token_locs[node.rtoken].start; diff --git a/src-self-hosted/zir.zig b/src-self-hosted/zir.zig index 04d3393626..9d0a5b825e 100644 --- a/src-self-hosted/zir.zig +++ b/src-self-hosted/zir.zig @@ -231,6 +231,10 @@ pub const Inst = struct { const_slice_type, /// Create a pointer type with attributes ptr_type, + /// Slice operation `array_ptr[start..end:sentinel]` + slice, + /// Slice operation with just start `lhs[rhs..]` + slice_start, /// Write a value to a pointer. For loading, see `deref`. store, /// String Literal. Makes an anonymous Decl and then takes a pointer to it. @@ -343,6 +347,7 @@ pub const Inst = struct { .xor, .error_union_type, .merge_error_sets, + .slice_start, => BinOp, .block, @@ -380,6 +385,7 @@ pub const Inst = struct { .ptr_type => PtrType, .enum_literal => EnumLiteral, .error_set => ErrorSet, + .slice => Slice, }; } @@ -481,6 +487,8 @@ pub const Inst = struct { .error_union_type, .bitnot, .error_set, + .slice, + .slice_start, => false, .@"break", @@ -961,6 +969,20 @@ pub const Inst = struct { }, kw_args: struct {}, }; + + pub const Slice = struct { + pub const base_tag = Tag.slice; + base: Inst, + + positionals: struct { + array_ptr: *Inst, + start: *Inst, + }, + kw_args: struct { + end: ?*Inst = null, + sentinel: ?*Inst = null, + }, + }; }; pub const ErrorMsg = struct { diff --git a/src-self-hosted/zir_sema.zig b/src-self-hosted/zir_sema.zig index 88a130c1db..012bc63581 100644 --- a/src-self-hosted/zir_sema.zig +++ b/src-self-hosted/zir_sema.zig @@ -132,6 +132,8 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError! .error_union_type => return analyzeInstErrorUnionType(mod, scope, old_inst.castTag(.error_union_type).?), .anyframe_type => return analyzeInstAnyframeType(mod, scope, old_inst.castTag(.anyframe_type).?), .error_set => return analyzeInstErrorSet(mod, scope, old_inst.castTag(.error_set).?), + .slice => return analyzeInstSlice(mod, scope, old_inst.castTag(.slice).?), + .slice_start => return analyzeInstSliceStart(mod, scope, old_inst.castTag(.slice_start).?), } } @@ -1172,6 +1174,14 @@ fn analyzeInstElemPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.ElemPtr) Inne return mod.fail(scope, inst.base.src, "TODO implement more analyze elemptr", .{}); } +fn analyzeInstSlice(mod: *Module, scope: *Scope, inst: *zir.Inst.Slice) InnerError!*Inst { + return mod.fail(scope, inst.base.src, "TODO implement analyzeInstSlice", .{}); +} + +fn analyzeInstSliceStart(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst { + return mod.fail(scope, inst.base.src, "TODO implement analyzeInstSliceStart", .{}); +} + fn analyzeInstShl(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst { return mod.fail(scope, inst.base.src, "TODO implement analyzeInstShl", .{}); } From 6ab0ac161e02c2361b72d124423509556b9332fa Mon Sep 17 00:00:00 2001 From: Vexu Date: Fri, 28 Aug 2020 15:51:27 +0300 Subject: [PATCH 24/35] stage2: slice return type analysis --- src-self-hosted/Module.zig | 66 +++++++++++++++++++++ src-self-hosted/codegen.zig | 2 +- src-self-hosted/codegen/c.zig | 2 +- src-self-hosted/type.zig | 108 +++++++++++++++++++++++++++++----- src-self-hosted/zir.zig | 2 +- src-self-hosted/zir_sema.zig | 12 +++- 6 files changed, 172 insertions(+), 20 deletions(-) diff --git a/src-self-hosted/Module.zig b/src-self-hosted/Module.zig index c4b0f70d5c..93509c6674 100644 --- a/src-self-hosted/Module.zig +++ b/src-self-hosted/Module.zig @@ -2591,6 +2591,72 @@ pub fn analyzeIsErr(self: *Module, scope: *Scope, src: usize, operand: *Inst) In return self.fail(scope, src, "TODO implement analysis of iserr", .{}); } +pub fn analyzeSlice(self: *Module, scope: *Scope, src: usize, array_ptr: *Inst, start: *Inst, end_opt: ?*Inst, sentinel_opt: ?*Inst) InnerError!*Inst { + const ptr_child = switch (array_ptr.ty.zigTypeTag()) { + .Pointer => array_ptr.ty.elemType(), + else => return self.fail(scope, src, "expected pointer, found '{}'", .{array_ptr.ty}), + }; + + var array_type = ptr_child; + const elem_type = switch (ptr_child.zigTypeTag()) { + .Array => ptr_child.elemType(), + .Pointer => blk: { + if (ptr_child.isSinglePointer()) { + if (ptr_child.elemType().zigTypeTag() == .Array) { + array_type = ptr_child.elemType(); + break :blk ptr_child.elemType().elemType(); + } + + return self.fail(scope, src, "slice of single-item pointer", .{}); + } + break :blk ptr_child.elemType(); + }, + else => return self.fail(scope, src, "slice of non-array type '{}'", .{ptr_child}), + }; + + const slice_sentinel = if (sentinel_opt) |sentinel| blk: { + const casted = try self.coerce(scope, elem_type, sentinel); + break :blk try self.resolveConstValue(scope, casted); + } else null; + + var return_ptr_size: std.builtin.TypeInfo.Pointer.Size = .Slice; + var return_elem_type = elem_type; + if (end_opt) |end| { + if (end.value()) |end_val| { + if (start.value()) |start_val| { + const start_u64 = start_val.toUnsignedInt(); + const end_u64 = end_val.toUnsignedInt(); + if (start_u64 > end_u64) { + return self.fail(scope, src, "out of bounds slice", .{}); + } + + const len = end_u64 - start_u64; + const array_sentinel = if (array_type.zigTypeTag() == .Array and end_u64 == array_type.arrayLen()) + array_type.sentinel() + else + slice_sentinel; + return_elem_type = try self.arrayType(scope, len, array_sentinel, elem_type); + return_ptr_size = .One; + } + } + } + const return_type = try self.ptrType( + scope, + src, + return_elem_type, + if (end_opt == null) slice_sentinel else null, + 0, // TODO alignment + 0, + 0, + !ptr_child.isConstPtr(), + ptr_child.isAllowzeroPtr(), + ptr_child.isVolatilePtr(), + return_ptr_size, + ); + + return self.fail(scope, src, "TODO implement analysis of slice", .{}); +} + /// Asserts that lhs and rhs types are both numeric. pub fn cmpNumeric( self: *Module, diff --git a/src-self-hosted/codegen.zig b/src-self-hosted/codegen.zig index d6e3194c12..6f08c7a689 100644 --- a/src-self-hosted/codegen.zig +++ b/src-self-hosted/codegen.zig @@ -132,7 +132,7 @@ pub fn generateSymbol( .Array => { // TODO populate .debug_info for the array if (typed_value.val.cast(Value.Payload.Bytes)) |payload| { - if (typed_value.ty.arraySentinel()) |sentinel| { + if (typed_value.ty.sentinel()) |sentinel| { try code.ensureCapacity(code.items.len + payload.data.len + 1); code.appendSliceAssumeCapacity(payload.data); const prev_len = code.items.len; diff --git a/src-self-hosted/codegen/c.zig b/src-self-hosted/codegen/c.zig index c037c55289..34ddcfbb3b 100644 --- a/src-self-hosted/codegen/c.zig +++ b/src-self-hosted/codegen/c.zig @@ -85,7 +85,7 @@ fn genArray(file: *C, decl: *Decl) !void { const name = try map(file.base.allocator, mem.span(decl.name)); defer file.base.allocator.free(name); if (tv.val.cast(Value.Payload.Bytes)) |payload| - if (tv.ty.arraySentinel()) |sentinel| + if (tv.ty.sentinel()) |sentinel| if (sentinel.toUnsignedInt() == 0) try file.constants.writer().print("const char *const {} = \"{}\";\n", .{ name, payload.data }) else diff --git a/src-self-hosted/type.zig b/src-self-hosted/type.zig index 66a7961073..4966395512 100644 --- a/src-self-hosted/type.zig +++ b/src-self-hosted/type.zig @@ -191,8 +191,8 @@ pub const Type = extern union { return false; if (!a.elemType().eql(b.elemType())) return false; - const sentinel_a = a.arraySentinel(); - const sentinel_b = b.arraySentinel(); + const sentinel_a = a.sentinel(); + const sentinel_b = b.sentinel(); if (sentinel_a) |sa| { if (sentinel_b) |sb| { return sa.eql(sb); @@ -630,8 +630,8 @@ pub const Type = extern union { const payload = @fieldParentPtr(Payload.Pointer, "base", ty.ptr_otherwise); if (payload.sentinel) |some| switch (payload.size) { .One, .C => unreachable, - .Many => try out_stream.writeAll("[*:{}]"), - .Slice => try out_stream.writeAll("[:{}]"), + .Many => try out_stream.print("[*:{}]", .{some}), + .Slice => try out_stream.print("[:{}]", .{some}), } else switch (payload.size) { .One => try out_stream.writeAll("*"), .Many => try out_stream.writeAll("[*]"), @@ -1341,6 +1341,81 @@ pub const Type = extern union { }; } + pub fn isAllowzeroPtr(self: Type) bool { + return switch (self.tag()) { + .u8, + .i8, + .u16, + .i16, + .u32, + .i32, + .u64, + .i64, + .usize, + .isize, + .c_short, + .c_ushort, + .c_int, + .c_uint, + .c_long, + .c_ulong, + .c_longlong, + .c_ulonglong, + .c_longdouble, + .f16, + .f32, + .f64, + .f128, + .c_void, + .bool, + .void, + .type, + .anyerror, + .comptime_int, + .comptime_float, + .noreturn, + .@"null", + .@"undefined", + .array, + .array_sentinel, + .array_u8, + .array_u8_sentinel_0, + .fn_noreturn_no_args, + .fn_void_no_args, + .fn_naked_noreturn_no_args, + .fn_ccc_void_no_args, + .function, + .int_unsigned, + .int_signed, + .single_mut_pointer, + .single_const_pointer, + .many_const_pointer, + .many_mut_pointer, + .c_const_pointer, + .c_mut_pointer, + .const_slice, + .mut_slice, + .single_const_pointer_to_comptime_int, + .const_slice_u8, + .optional, + .optional_single_mut_pointer, + .optional_single_const_pointer, + .enum_literal, + .error_union, + .@"anyframe", + .anyframe_T, + .anyerror_void_error_union, + .error_set, + .error_set_single, + => false, + + .pointer => { + const payload = @fieldParentPtr(Payload.Pointer, "base", self.ptr_otherwise); + return payload.@"allowzero"; + }, + }; + } + /// Asserts that the type is an optional pub fn isPtrLikeOptional(self: Type) bool { switch (self.tag()) { @@ -1585,8 +1660,8 @@ pub const Type = extern union { }; } - /// Asserts the type is an array or vector. - pub fn arraySentinel(self: Type) ?Value { + /// Asserts the type is an array, pointer or vector. + pub fn sentinel(self: Type) ?Value { return switch (self.tag()) { .u8, .i8, @@ -1626,16 +1701,8 @@ pub const Type = extern union { .fn_naked_noreturn_no_args, .fn_ccc_void_no_args, .function, - .pointer, - .single_const_pointer, - .single_mut_pointer, - .many_const_pointer, - .many_mut_pointer, - .c_const_pointer, - .c_mut_pointer, .const_slice, .mut_slice, - .single_const_pointer_to_comptime_int, .const_slice_u8, .int_unsigned, .int_signed, @@ -1651,7 +1718,18 @@ pub const Type = extern union { .error_set_single, => unreachable, - .array, .array_u8 => return null, + .single_const_pointer, + .single_mut_pointer, + .many_const_pointer, + .many_mut_pointer, + .c_const_pointer, + .c_mut_pointer, + .single_const_pointer_to_comptime_int, + .array, + .array_u8, + => return null, + + .pointer => return self.cast(Payload.Pointer).?.sentinel, .array_sentinel => return self.cast(Payload.ArraySentinel).?.sentinel, .array_u8_sentinel_0 => return Value.initTag(.zero), }; diff --git a/src-self-hosted/zir.zig b/src-self-hosted/zir.zig index 9d0a5b825e..b6d7fab4c5 100644 --- a/src-self-hosted/zir.zig +++ b/src-self-hosted/zir.zig @@ -2596,7 +2596,7 @@ const EmitZIR = struct { var len_pl = Value.Payload.Int_u64{ .int = ty.arrayLen() }; const len = Value.initPayload(&len_pl.base); - const inst = if (ty.arraySentinel()) |sentinel| blk: { + const inst = if (ty.sentinel()) |sentinel| blk: { const inst = try self.arena.allocator.create(Inst.ArrayTypeSentinel); inst.* = .{ .base = .{ diff --git a/src-self-hosted/zir_sema.zig b/src-self-hosted/zir_sema.zig index 012bc63581..c99da39c04 100644 --- a/src-self-hosted/zir_sema.zig +++ b/src-self-hosted/zir_sema.zig @@ -1175,11 +1175,19 @@ fn analyzeInstElemPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.ElemPtr) Inne } fn analyzeInstSlice(mod: *Module, scope: *Scope, inst: *zir.Inst.Slice) InnerError!*Inst { - return mod.fail(scope, inst.base.src, "TODO implement analyzeInstSlice", .{}); + const array_ptr = try resolveInst(mod, scope, inst.positionals.array_ptr); + const start = try resolveInst(mod, scope, inst.positionals.start); + const end = if (inst.kw_args.end) |end| try resolveInst(mod, scope, end) else null; + const sentinel = if (inst.kw_args.sentinel) |sentinel| try resolveInst(mod, scope, sentinel) else null; + + return mod.analyzeSlice(scope, inst.base.src, array_ptr, start, end, sentinel); } fn analyzeInstSliceStart(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst { - return mod.fail(scope, inst.base.src, "TODO implement analyzeInstSliceStart", .{}); + const array_ptr = try resolveInst(mod, scope, inst.positionals.lhs); + const start = try resolveInst(mod, scope, inst.positionals.rhs); + + return mod.analyzeSlice(scope, inst.base.src, array_ptr, start, null, null); } fn analyzeInstShl(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst { From 1174cb15173208ead5f2ce828ade5b7d07ce6abe Mon Sep 17 00:00:00 2001 From: Vexu Date: Fri, 28 Aug 2020 15:56:24 +0300 Subject: [PATCH 25/35] stage2: fix tokenizer float bug --- lib/std/zig/tokenizer.zig | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/std/zig/tokenizer.zig b/lib/std/zig/tokenizer.zig index 47c7d23b35..86968c73b2 100644 --- a/lib/std/zig/tokenizer.zig +++ b/lib/std/zig/tokenizer.zig @@ -1175,6 +1175,7 @@ pub const Tokenizer = struct { }, .num_dot_dec => switch (c) { '.' => { + result.id = .IntegerLiteral; self.index -= 1; state = .start; break; @@ -1183,7 +1184,6 @@ pub const Tokenizer = struct { state = .float_exponent_unsigned; }, '0'...'9' => { - result.id = .FloatLiteral; state = .float_fraction_dec; }, else => { @@ -1769,6 +1769,7 @@ test "tokenizer - number literals decimal" { testTokenize("7", &[_]Token.Id{.IntegerLiteral}); testTokenize("8", &[_]Token.Id{.IntegerLiteral}); testTokenize("9", &[_]Token.Id{.IntegerLiteral}); + testTokenize("1..", &[_]Token.Id{ .IntegerLiteral, .Ellipsis2 }); testTokenize("0a", &[_]Token.Id{ .Invalid, .Identifier }); testTokenize("9b", &[_]Token.Id{ .Invalid, .Identifier }); testTokenize("1z", &[_]Token.Id{ .Invalid, .Identifier }); From 6f0126e9573a6bde9cbe5b113208e0a515b2eee7 Mon Sep 17 00:00:00 2001 From: Vexu Date: Thu, 3 Sep 2020 14:58:47 +0300 Subject: [PATCH 26/35] stage2: split Scope.Container from Scope.File --- src-self-hosted/Module.zig | 142 ++++++++++++++++++++++------------- src-self-hosted/codegen.zig | 4 +- src-self-hosted/link/Elf.zig | 8 +- 3 files changed, 96 insertions(+), 58 deletions(-) diff --git a/src-self-hosted/Module.zig b/src-self-hosted/Module.zig index 93509c6674..8d7a4d7b36 100644 --- a/src-self-hosted/Module.zig +++ b/src-self-hosted/Module.zig @@ -125,7 +125,7 @@ pub const Decl = struct { /// mapping them to an address in the output file. /// Memory owned by this decl, using Module's allocator. name: [*:0]const u8, - /// The direct parent container of the Decl. This is either a `Scope.File` or `Scope.ZIRModule`. + /// The direct parent container of the Decl. This is either a `Scope.Container` or `Scope.ZIRModule`. /// Reference to externally owned memory. scope: *Scope, /// The AST Node decl index or ZIR Inst index that contains this declaration. @@ -217,9 +217,10 @@ pub const Decl = struct { pub fn src(self: Decl) usize { switch (self.scope.tag) { - .file => { - const file = @fieldParentPtr(Scope.File, "base", self.scope); - const tree = file.contents.tree; + .container => { + const container = @fieldParentPtr(Scope.Container, "base", self.scope); + const tree = container.file_scope.contents.tree; + // TODO Container should have it's own decls() const decl_node = tree.root_node.decls()[self.src_index]; return tree.token_locs[decl_node.firstToken()].start; }, @@ -229,6 +230,7 @@ pub const Decl = struct { const src_decl = module.decls[self.src_index]; return src_decl.inst.src; }, + .file, .block => unreachable, .gen_zir => unreachable, .local_val => unreachable, @@ -359,6 +361,7 @@ pub const Scope = struct { .local_ptr => return self.cast(LocalPtr).?.gen_zir.arena, .zir_module => return &self.cast(ZIRModule).?.contents.module.arena.allocator, .file => unreachable, + .container => unreachable, } } @@ -368,15 +371,16 @@ pub const Scope = struct { return switch (self.tag) { .block => self.cast(Block).?.decl, .gen_zir => self.cast(GenZIR).?.decl, - .local_val => return self.cast(LocalVal).?.gen_zir.decl, - .local_ptr => return self.cast(LocalPtr).?.gen_zir.decl, + .local_val => self.cast(LocalVal).?.gen_zir.decl, + .local_ptr => self.cast(LocalPtr).?.gen_zir.decl, .decl => self.cast(DeclAnalysis).?.decl, .zir_module => null, .file => null, + .container => null, }; } - /// Asserts the scope has a parent which is a ZIRModule or File and + /// Asserts the scope has a parent which is a ZIRModule or Container and /// returns it. pub fn namespace(self: *Scope) *Scope { switch (self.tag) { @@ -385,7 +389,8 @@ pub const Scope = struct { .local_val => return self.cast(LocalVal).?.gen_zir.decl.scope, .local_ptr => return self.cast(LocalPtr).?.gen_zir.decl.scope, .decl => return self.cast(DeclAnalysis).?.decl.scope, - .zir_module, .file => return self, + .file => return &self.cast(File).?.root_container.base, + .zir_module, .container => return self, } } @@ -399,8 +404,9 @@ pub const Scope = struct { .local_val => unreachable, .local_ptr => unreachable, .decl => unreachable, + .file => unreachable, .zir_module => return self.cast(ZIRModule).?.fullyQualifiedNameHash(name), - .file => return self.cast(File).?.fullyQualifiedNameHash(name), + .container => return self.cast(Container).?.fullyQualifiedNameHash(name), } } @@ -409,11 +415,12 @@ pub const Scope = struct { switch (self.tag) { .file => return self.cast(File).?.contents.tree, .zir_module => unreachable, - .decl => return self.cast(DeclAnalysis).?.decl.scope.cast(File).?.contents.tree, - .block => return self.cast(Block).?.decl.scope.cast(File).?.contents.tree, - .gen_zir => return self.cast(GenZIR).?.decl.scope.cast(File).?.contents.tree, - .local_val => return self.cast(LocalVal).?.gen_zir.decl.scope.cast(File).?.contents.tree, - .local_ptr => return self.cast(LocalPtr).?.gen_zir.decl.scope.cast(File).?.contents.tree, + .decl => return self.cast(DeclAnalysis).?.decl.scope.cast(Container).?.file_scope.contents.tree, + .block => return self.cast(Block).?.decl.scope.cast(Container).?.file_scope.contents.tree, + .gen_zir => return self.cast(GenZIR).?.decl.scope.cast(Container).?.file_scope.contents.tree, + .local_val => return self.cast(LocalVal).?.gen_zir.decl.scope.cast(Container).?.file_scope.contents.tree, + .local_ptr => return self.cast(LocalPtr).?.gen_zir.decl.scope.cast(Container).?.file_scope.contents.tree, + .container => return self.cast(Container).?.file_scope.contents.tree, } } @@ -427,13 +434,15 @@ pub const Scope = struct { .decl => unreachable, .zir_module => unreachable, .file => unreachable, + .container => unreachable, }; } - /// Asserts the scope has a parent which is a ZIRModule or File and + /// Asserts the scope has a parent which is a ZIRModule, Contaienr or File and /// returns the sub_file_path field. pub fn subFilePath(base: *Scope) []const u8 { switch (base.tag) { + .container => return @fieldParentPtr(Container, "base", base).file_scope.sub_file_path, .file => return @fieldParentPtr(File, "base", base).sub_file_path, .zir_module => return @fieldParentPtr(ZIRModule, "base", base).sub_file_path, .block => unreachable, @@ -453,11 +462,13 @@ pub const Scope = struct { .local_val => unreachable, .local_ptr => unreachable, .decl => unreachable, + .container => unreachable, } } pub fn getSource(base: *Scope, module: *Module) ![:0]const u8 { switch (base.tag) { + .container => return @fieldParentPtr(Container, "base", base).file_scope.getSource(module), .file => return @fieldParentPtr(File, "base", base).getSource(module), .zir_module => return @fieldParentPtr(ZIRModule, "base", base).getSource(module), .gen_zir => unreachable, @@ -471,8 +482,9 @@ pub const Scope = struct { /// Asserts the scope is a namespace Scope and removes the Decl from the namespace. pub fn removeDecl(base: *Scope, child: *Decl) void { switch (base.tag) { - .file => return @fieldParentPtr(File, "base", base).removeDecl(child), + .container => return @fieldParentPtr(Container, "base", base).removeDecl(child), .zir_module => return @fieldParentPtr(ZIRModule, "base", base).removeDecl(child), + .file => unreachable, .block => unreachable, .gen_zir => unreachable, .local_val => unreachable, @@ -499,6 +511,7 @@ pub const Scope = struct { .local_val => unreachable, .local_ptr => unreachable, .decl => unreachable, + .container => unreachable, } } @@ -515,6 +528,8 @@ pub const Scope = struct { zir_module, /// .zig source code. file, + /// struct, enum or union, every .file contains one of these. + container, block, decl, gen_zir, @@ -522,6 +537,38 @@ pub const Scope = struct { local_ptr, }; + pub const Container = struct { + pub const base_tag: Tag = .container; + base: Scope = Scope{ .tag = base_tag }, + + file_scope: *Scope.File, + + /// Direct children of the file. + decls: ArrayListUnmanaged(*Decl), + + // TODO implement container types and put this in a status union + // ty: Type + + pub fn deinit(self: *Container, gpa: *Allocator) void { + self.decls.deinit(gpa); + self.* = undefined; + } + + pub fn removeDecl(self: *Container, child: *Decl) void { + for (self.decls.items) |item, i| { + if (item == child) { + _ = self.decls.swapRemove(i); + return; + } + } + } + + pub fn fullyQualifiedNameHash(self: *Container, name: []const u8) NameHash { + // TODO container scope qualified names. + return std.zig.hashSrc(name); + } + }; + pub const File = struct { pub const base_tag: Tag = .file; base: Scope = Scope{ .tag = base_tag }, @@ -544,8 +591,7 @@ pub const Scope = struct { loaded_success, }, - /// Direct children of the file. - decls: ArrayListUnmanaged(*Decl), + root_container: Container, pub fn unload(self: *File, gpa: *Allocator) void { switch (self.status) { @@ -569,20 +615,11 @@ pub const Scope = struct { } pub fn deinit(self: *File, gpa: *Allocator) void { - self.decls.deinit(gpa); + self.root_container.deinit(gpa); self.unload(gpa); self.* = undefined; } - pub fn removeDecl(self: *File, child: *Decl) void { - for (self.decls.items) |item, i| { - if (item == child) { - _ = self.decls.swapRemove(i); - return; - } - } - } - pub fn dumpSrc(self: *File, src: usize) void { const loc = std.zig.findLineColumn(self.source.bytes, src); std.debug.print("{}:{}:{}\n", .{ self.sub_file_path, loc.line + 1, loc.column + 1 }); @@ -604,11 +641,6 @@ pub const Scope = struct { .bytes => |bytes| return bytes, } } - - pub fn fullyQualifiedNameHash(self: *File, name: []const u8) NameHash { - // We don't have struct scopes yet so this is currently just a simple name hash. - return std.zig.hashSrc(name); - } }; pub const ZIRModule = struct { @@ -861,7 +893,10 @@ pub fn init(gpa: *Allocator, options: InitOptions) !Module { .source = .{ .unloaded = {} }, .contents = .{ .not_available = {} }, .status = .never_loaded, - .decls = .{}, + .root_container = .{ + .file_scope = root_scope, + .decls = .{}, + }, }; break :blk &root_scope.base; } else if (mem.endsWith(u8, options.root_pkg.root_src_path, ".zir")) { @@ -969,7 +1004,7 @@ pub fn update(self: *Module) !void { // to force a refresh we unload now. if (self.root_scope.cast(Scope.File)) |zig_file| { zig_file.unload(self.gpa); - self.analyzeRootSrcFile(zig_file) catch |err| switch (err) { + self.analyzeContainer(&zig_file.root_container) catch |err| switch (err) { error.AnalysisFail => { assert(self.totalErrorCount() != 0); }, @@ -1237,8 +1272,8 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { const tracy = trace(@src()); defer tracy.end(); - const file_scope = decl.scope.cast(Scope.File).?; - const tree = try self.getAstTree(file_scope); + const container_scope = decl.scope.cast(Scope.Container).?; + const tree = try self.getAstTree(container_scope); const ast_node = tree.root_node.decls()[decl.src_index]; switch (ast_node.tag) { .FnProto => { @@ -1698,10 +1733,12 @@ fn getSrcModule(self: *Module, root_scope: *Scope.ZIRModule) !*zir.Module { } } -fn getAstTree(self: *Module, root_scope: *Scope.File) !*ast.Tree { +fn getAstTree(self: *Module, container_scope: *Scope.Container) !*ast.Tree { const tracy = trace(@src()); defer tracy.end(); + const root_scope = container_scope.file_scope; + switch (root_scope.status) { .never_loaded, .unloaded_success => { try self.failed_files.ensureCapacity(self.gpa, self.failed_files.items().len + 1); @@ -1743,24 +1780,24 @@ fn getAstTree(self: *Module, root_scope: *Scope.File) !*ast.Tree { } } -fn analyzeRootSrcFile(self: *Module, root_scope: *Scope.File) !void { +fn analyzeContainer(self: *Module, container_scope: *Scope.Container) !void { const tracy = trace(@src()); defer tracy.end(); // We may be analyzing it for the first time, or this may be // an incremental update. This code handles both cases. - const tree = try self.getAstTree(root_scope); + const tree = try self.getAstTree(container_scope); const decls = tree.root_node.decls(); try self.work_queue.ensureUnusedCapacity(decls.len); - try root_scope.decls.ensureCapacity(self.gpa, decls.len); + try container_scope.decls.ensureCapacity(self.gpa, decls.len); // Keep track of the decls that we expect to see in this file so that // we know which ones have been deleted. var deleted_decls = std.AutoArrayHashMap(*Decl, void).init(self.gpa); defer deleted_decls.deinit(); - try deleted_decls.ensureCapacity(root_scope.decls.items.len); - for (root_scope.decls.items) |file_decl| { + try deleted_decls.ensureCapacity(container_scope.decls.items.len); + for (container_scope.decls.items) |file_decl| { deleted_decls.putAssumeCapacityNoClobber(file_decl, {}); } @@ -1773,7 +1810,7 @@ fn analyzeRootSrcFile(self: *Module, root_scope: *Scope.File) !void { const name_loc = tree.token_locs[name_tok]; const name = tree.tokenSliceLoc(name_loc); - const name_hash = root_scope.fullyQualifiedNameHash(name); + const name_hash = container_scope.fullyQualifiedNameHash(name); const contents_hash = std.zig.hashSrc(tree.getNodeSource(src_decl)); if (self.decl_table.get(name_hash)) |decl| { // Update the AST Node index of the decl, even if its contents are unchanged, it may @@ -1801,8 +1838,8 @@ fn analyzeRootSrcFile(self: *Module, root_scope: *Scope.File) !void { } } } else { - const new_decl = try self.createNewDecl(&root_scope.base, name, decl_i, name_hash, contents_hash); - root_scope.decls.appendAssumeCapacity(new_decl); + const new_decl = try self.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash); + container_scope.decls.appendAssumeCapacity(new_decl); if (fn_proto.getExternExportInlineToken()) |maybe_export_token| { if (tree.token_ids[maybe_export_token] == .Keyword_export) { self.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl }); @@ -1812,7 +1849,7 @@ fn analyzeRootSrcFile(self: *Module, root_scope: *Scope.File) !void { } else if (src_decl.castTag(.VarDecl)) |var_decl| { const name_loc = tree.token_locs[var_decl.name_token]; const name = tree.tokenSliceLoc(name_loc); - const name_hash = root_scope.fullyQualifiedNameHash(name); + const name_hash = container_scope.fullyQualifiedNameHash(name); const contents_hash = std.zig.hashSrc(tree.getNodeSource(src_decl)); if (self.decl_table.get(name_hash)) |decl| { // Update the AST Node index of the decl, even if its contents are unchanged, it may @@ -1828,8 +1865,8 @@ fn analyzeRootSrcFile(self: *Module, root_scope: *Scope.File) !void { decl.contents_hash = contents_hash; } } else { - const new_decl = try self.createNewDecl(&root_scope.base, name, decl_i, name_hash, contents_hash); - root_scope.decls.appendAssumeCapacity(new_decl); + const new_decl = try self.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash); + container_scope.decls.appendAssumeCapacity(new_decl); if (var_decl.getExternExportToken()) |maybe_export_token| { if (tree.token_ids[maybe_export_token] == .Keyword_export) { self.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl }); @@ -1841,11 +1878,11 @@ fn analyzeRootSrcFile(self: *Module, root_scope: *Scope.File) !void { const name = try std.fmt.allocPrint(self.gpa, "__comptime_{}", .{name_index}); defer self.gpa.free(name); - const name_hash = root_scope.fullyQualifiedNameHash(name); + const name_hash = container_scope.fullyQualifiedNameHash(name); const contents_hash = std.zig.hashSrc(tree.getNodeSource(src_decl)); - const new_decl = try self.createNewDecl(&root_scope.base, name, decl_i, name_hash, contents_hash); - root_scope.decls.appendAssumeCapacity(new_decl); + const new_decl = try self.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash); + container_scope.decls.appendAssumeCapacity(new_decl); self.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl }); } else if (src_decl.castTag(.ContainerField)) |container_field| { log.err("TODO: analyze container field", .{}); @@ -3124,6 +3161,7 @@ fn failWithOwnedErrorMsg(self: *Module, scope: *Scope, src: usize, err_msg: *Err self.failed_files.putAssumeCapacityNoClobber(scope, err_msg); }, .file => unreachable, + .container => unreachable, } return error.AnalysisFail; } diff --git a/src-self-hosted/codegen.zig b/src-self-hosted/codegen.zig index 6f08c7a689..bad1f59b88 100644 --- a/src-self-hosted/codegen.zig +++ b/src-self-hosted/codegen.zig @@ -436,8 +436,8 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type { try branch_stack.append(.{}); const src_data: struct {lbrace_src: usize, rbrace_src: usize, source: []const u8} = blk: { - if (module_fn.owner_decl.scope.cast(Module.Scope.File)) |scope_file| { - const tree = scope_file.contents.tree; + if (module_fn.owner_decl.scope.cast(Module.Scope.Container)) |container_scope| { + const tree = container_scope.file_scope.contents.tree; const fn_proto = tree.root_node.decls()[module_fn.owner_decl.src_index].castTag(.FnProto).?; const block = fn_proto.getBodyNode().?.castTag(.Block).?; const lbrace_src = tree.token_locs[block.lbrace].start; diff --git a/src-self-hosted/link/Elf.zig b/src-self-hosted/link/Elf.zig index 69f1260d20..451160630a 100644 --- a/src-self-hosted/link/Elf.zig +++ b/src-self-hosted/link/Elf.zig @@ -1656,8 +1656,8 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void { try dbg_line_buffer.ensureCapacity(26); const line_off: u28 = blk: { - if (decl.scope.cast(Module.Scope.File)) |scope_file| { - const tree = scope_file.contents.tree; + if (decl.scope.cast(Module.Scope.Container)) |container_scope| { + const tree = container_scope.file_scope.contents.tree; const file_ast_decls = tree.root_node.decls(); // TODO Look into improving the performance here by adding a token-index-to-line // lookup table. Currently this involves scanning over the source code for newlines. @@ -2157,8 +2157,8 @@ pub fn updateDeclLineNumber(self: *Elf, module: *Module, decl: *const Module.Dec const tracy = trace(@src()); defer tracy.end(); - const scope_file = decl.scope.cast(Module.Scope.File).?; - const tree = scope_file.contents.tree; + const container_scope = decl.scope.cast(Module.Scope.Container).?; + const tree = container_scope.file_scope.contents.tree; const file_ast_decls = tree.root_node.decls(); // TODO Look into improving the performance here by adding a token-index-to-line // lookup table. Currently this involves scanning over the source code for newlines. From e8a2aecd2f3ed13d7b9fb74248d455752de19840 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Thu, 3 Sep 2020 15:08:37 +0200 Subject: [PATCH 27/35] std: fix linux uid_t, use uid_t/gid_t in std.os - correct uid_t from i32 to u32 on linux - define uid_t and gid_t for OSes missing definitions - use uid_t/gid_t instead of plain u32s throughout std.os --- lib/std/child_process.zig | 4 +-- lib/std/os.zig | 8 ++--- lib/std/os/bits/darwin.zig | 8 +++-- lib/std/os/bits/dragonfly.zig | 13 ++++++-- lib/std/os/bits/freebsd.zig | 8 +++-- lib/std/os/bits/linux.zig | 8 ++--- lib/std/os/bits/linux/x86_64.zig | 5 +-- lib/std/os/linux.zig | 52 ++++++++++++++++---------------- lib/std/process.zig | 8 ++--- 9 files changed, 65 insertions(+), 49 deletions(-) diff --git a/lib/std/child_process.zig b/lib/std/child_process.zig index 287fc3e7cd..c64fefbc63 100644 --- a/lib/std/child_process.zig +++ b/lib/std/child_process.zig @@ -44,10 +44,10 @@ pub const ChildProcess = struct { stderr_behavior: StdIo, /// Set to change the user id when spawning the child process. - uid: if (builtin.os.tag == .windows) void else ?u32, + uid: if (builtin.os.tag == .windows or builtin.os.tag == .wasi) void else ?os.uid_t, /// Set to change the group id when spawning the child process. - gid: if (builtin.os.tag == .windows) void else ?u32, + gid: if (builtin.os.tag == .windows or builtin.os.tag == .wasi) void else ?os.gid_t, /// Set to change the current working directory when spawning the child process. cwd: ?[]const u8, diff --git a/lib/std/os.zig b/lib/std/os.zig index e8431c386b..3574468a38 100644 --- a/lib/std/os.zig +++ b/lib/std/os.zig @@ -2518,7 +2518,7 @@ pub const SetIdError = error{ PermissionDenied, } || UnexpectedError; -pub fn setuid(uid: u32) SetIdError!void { +pub fn setuid(uid: uid_t) SetIdError!void { switch (errno(system.setuid(uid))) { 0 => return, EAGAIN => return error.ResourceLimitReached, @@ -2528,7 +2528,7 @@ pub fn setuid(uid: u32) SetIdError!void { } } -pub fn setreuid(ruid: u32, euid: u32) SetIdError!void { +pub fn setreuid(ruid: uid_t, euid: uid_t) SetIdError!void { switch (errno(system.setreuid(ruid, euid))) { 0 => return, EAGAIN => return error.ResourceLimitReached, @@ -2538,7 +2538,7 @@ pub fn setreuid(ruid: u32, euid: u32) SetIdError!void { } } -pub fn setgid(gid: u32) SetIdError!void { +pub fn setgid(gid: gid_t) SetIdError!void { switch (errno(system.setgid(gid))) { 0 => return, EAGAIN => return error.ResourceLimitReached, @@ -2548,7 +2548,7 @@ pub fn setgid(gid: u32) SetIdError!void { } } -pub fn setregid(rgid: u32, egid: u32) SetIdError!void { +pub fn setregid(rgid: gid_t, egid: gid_t) SetIdError!void { switch (errno(system.setregid(rgid, egid))) { 0 => return, EAGAIN => return error.ResourceLimitReached, diff --git a/lib/std/os/bits/darwin.zig b/lib/std/os/bits/darwin.zig index 375127f278..ce73d2a6dc 100644 --- a/lib/std/os/bits/darwin.zig +++ b/lib/std/os/bits/darwin.zig @@ -7,9 +7,13 @@ const std = @import("../../std.zig"); const assert = std.debug.assert; const maxInt = std.math.maxInt; +// See: https://opensource.apple.com/source/xnu/xnu-6153.141.1/bsd/sys/_types.h.auto.html +// TODO: audit mode_t/pid_t, should likely be u16/i32 pub const fd_t = c_int; pub const pid_t = c_int; pub const mode_t = c_uint; +pub const uid_t = u32; +pub const gid_t = u32; pub const in_port_t = u16; pub const sa_family_t = u8; @@ -79,8 +83,8 @@ pub const Stat = extern struct { mode: u16, nlink: u16, ino: ino_t, - uid: u32, - gid: u32, + uid: uid_t, + gid: gid_t, rdev: i32, atimesec: isize, atimensec: isize, diff --git a/lib/std/os/bits/dragonfly.zig b/lib/std/os/bits/dragonfly.zig index 8b6d6be212..1412aa5c41 100644 --- a/lib/std/os/bits/dragonfly.zig +++ b/lib/std/os/bits/dragonfly.zig @@ -9,10 +9,17 @@ const maxInt = std.math.maxInt; pub fn S_ISCHR(m: u32) bool { return m & S_IFMT == S_IFCHR; } + +// See: +// - https://gitweb.dragonflybsd.org/dragonfly.git/blob/HEAD:/include/unistd.h +// - https://gitweb.dragonflybsd.org/dragonfly.git/blob/HEAD:/sys/sys/types.h +// TODO: mode_t should probably be changed to a u16, audit pid_t/off_t as well pub const fd_t = c_int; pub const pid_t = c_int; pub const off_t = c_long; pub const mode_t = c_uint; +pub const uid_t = u32; +pub const gid_t = u32; pub const ENOTSUP = EOPNOTSUPP; pub const EWOULDBLOCK = EAGAIN; @@ -151,8 +158,8 @@ pub const Stat = extern struct { dev: c_uint, mode: c_ushort, padding1: u16, - uid: c_uint, - gid: c_uint, + uid: uid_t, + gid: gid_t, rdev: c_uint, atim: timespec, mtim: timespec, @@ -511,7 +518,7 @@ pub const siginfo_t = extern struct { si_errno: c_int, si_code: c_int, si_pid: c_int, - si_uid: c_uint, + si_uid: uid_t, si_status: c_int, si_addr: ?*c_void, si_value: union_sigval, diff --git a/lib/std/os/bits/freebsd.zig b/lib/std/os/bits/freebsd.zig index 22edf4b9d1..32936f7515 100644 --- a/lib/std/os/bits/freebsd.zig +++ b/lib/std/os/bits/freebsd.zig @@ -6,8 +6,12 @@ const std = @import("../../std.zig"); const maxInt = std.math.maxInt; +// See https://svnweb.freebsd.org/base/head/sys/sys/_types.h?view=co +// TODO: audit pid_t/mode_t. They should likely be i32 and u16, respectively pub const fd_t = c_int; pub const pid_t = c_int; +pub const uid_t = u32; +pub const gid_t = u32; pub const mode_t = c_uint; pub const socklen_t = u32; @@ -128,8 +132,8 @@ pub const Stat = extern struct { mode: u16, __pad0: u16, - uid: u32, - gid: u32, + uid: uid_t, + gid: gid_t, __pad1: u32, rdev: u64, diff --git a/lib/std/os/bits/linux.zig b/lib/std/os/bits/linux.zig index 1327eaa330..1e12a278f3 100644 --- a/lib/std/os/bits/linux.zig +++ b/lib/std/os/bits/linux.zig @@ -29,7 +29,7 @@ const is_mips = builtin.arch.isMIPS(); pub const pid_t = i32; pub const fd_t = i32; -pub const uid_t = i32; +pub const uid_t = u32; pub const gid_t = u32; pub const clock_t = isize; @@ -853,7 +853,7 @@ pub const signalfd_siginfo = extern struct { errno: i32, code: i32, pid: u32, - uid: u32, + uid: uid_t, fd: i32, tid: u32, band: u32, @@ -1491,10 +1491,10 @@ pub const Statx = extern struct { nlink: u32, /// User ID of owner - uid: u32, + uid: uid_t, /// Group ID of owner - gid: u32, + gid: gid_t, /// File type and mode mode: u16, diff --git a/lib/std/os/bits/linux/x86_64.zig b/lib/std/os/bits/linux/x86_64.zig index 0800feeddf..0f01c40813 100644 --- a/lib/std/os/bits/linux/x86_64.zig +++ b/lib/std/os/bits/linux/x86_64.zig @@ -7,6 +7,7 @@ const std = @import("../../../std.zig"); const pid_t = linux.pid_t; const uid_t = linux.uid_t; +const gid_t = linux.gid_t; const clock_t = linux.clock_t; const stack_t = linux.stack_t; const sigset_t = linux.sigset_t; @@ -523,8 +524,8 @@ pub const Stat = extern struct { nlink: usize, mode: u32, - uid: u32, - gid: u32, + uid: uid_t, + gid: gid_t, __pad0: u32, rdev: u64, size: off_t, diff --git a/lib/std/os/linux.zig b/lib/std/os/linux.zig index 13094b3a3a..5e2a554018 100644 --- a/lib/std/os/linux.zig +++ b/lib/std/os/linux.zig @@ -655,7 +655,7 @@ pub fn nanosleep(req: *const timespec, rem: ?*timespec) usize { return syscall2(.nanosleep, @ptrToInt(req), @ptrToInt(rem)); } -pub fn setuid(uid: u32) usize { +pub fn setuid(uid: uid_t) usize { if (@hasField(SYS, "setuid32")) { return syscall1(.setuid32, uid); } else { @@ -663,7 +663,7 @@ pub fn setuid(uid: u32) usize { } } -pub fn setgid(gid: u32) usize { +pub fn setgid(gid: gid_t) usize { if (@hasField(SYS, "setgid32")) { return syscall1(.setgid32, gid); } else { @@ -671,7 +671,7 @@ pub fn setgid(gid: u32) usize { } } -pub fn setreuid(ruid: u32, euid: u32) usize { +pub fn setreuid(ruid: uid_t, euid: uid_t) usize { if (@hasField(SYS, "setreuid32")) { return syscall2(.setreuid32, ruid, euid); } else { @@ -679,7 +679,7 @@ pub fn setreuid(ruid: u32, euid: u32) usize { } } -pub fn setregid(rgid: u32, egid: u32) usize { +pub fn setregid(rgid: gid_t, egid: gid_t) usize { if (@hasField(SYS, "setregid32")) { return syscall2(.setregid32, rgid, egid); } else { @@ -687,47 +687,47 @@ pub fn setregid(rgid: u32, egid: u32) usize { } } -pub fn getuid() u32 { +pub fn getuid() uid_t { if (@hasField(SYS, "getuid32")) { - return @as(u32, syscall0(.getuid32)); + return @as(uid_t, syscall0(.getuid32)); } else { - return @as(u32, syscall0(.getuid)); + return @as(uid_t, syscall0(.getuid)); } } -pub fn getgid() u32 { +pub fn getgid() gid_t { if (@hasField(SYS, "getgid32")) { - return @as(u32, syscall0(.getgid32)); + return @as(gid_t, syscall0(.getgid32)); } else { - return @as(u32, syscall0(.getgid)); + return @as(gid_t, syscall0(.getgid)); } } -pub fn geteuid() u32 { +pub fn geteuid() uid_t { if (@hasField(SYS, "geteuid32")) { - return @as(u32, syscall0(.geteuid32)); + return @as(uid_t, syscall0(.geteuid32)); } else { - return @as(u32, syscall0(.geteuid)); + return @as(uid_t, syscall0(.geteuid)); } } -pub fn getegid() u32 { +pub fn getegid() gid_t { if (@hasField(SYS, "getegid32")) { - return @as(u32, syscall0(.getegid32)); + return @as(gid_t, syscall0(.getegid32)); } else { - return @as(u32, syscall0(.getegid)); + return @as(gid_t, syscall0(.getegid)); } } -pub fn seteuid(euid: u32) usize { - return setreuid(std.math.maxInt(u32), euid); +pub fn seteuid(euid: uid_t) usize { + return setresuid(std.math.maxInt(uid_t), euid); } -pub fn setegid(egid: u32) usize { - return setregid(std.math.maxInt(u32), egid); +pub fn setegid(egid: gid_t) usize { + return setregid(std.math.maxInt(gid_t), egid); } -pub fn getresuid(ruid: *u32, euid: *u32, suid: *u32) usize { +pub fn getresuid(ruid: *uid_t, euid: *uid_t, suid: *uid_t) usize { if (@hasField(SYS, "getresuid32")) { return syscall3(.getresuid32, @ptrToInt(ruid), @ptrToInt(euid), @ptrToInt(suid)); } else { @@ -735,7 +735,7 @@ pub fn getresuid(ruid: *u32, euid: *u32, suid: *u32) usize { } } -pub fn getresgid(rgid: *u32, egid: *u32, sgid: *u32) usize { +pub fn getresgid(rgid: *gid_t, egid: *gid_t, sgid: *gid_t) usize { if (@hasField(SYS, "getresgid32")) { return syscall3(.getresgid32, @ptrToInt(rgid), @ptrToInt(egid), @ptrToInt(sgid)); } else { @@ -743,7 +743,7 @@ pub fn getresgid(rgid: *u32, egid: *u32, sgid: *u32) usize { } } -pub fn setresuid(ruid: u32, euid: u32, suid: u32) usize { +pub fn setresuid(ruid: uid_t, euid: uid_t, suid: uid_t) usize { if (@hasField(SYS, "setresuid32")) { return syscall3(.setresuid32, ruid, euid, suid); } else { @@ -751,7 +751,7 @@ pub fn setresuid(ruid: u32, euid: u32, suid: u32) usize { } } -pub fn setresgid(rgid: u32, egid: u32, sgid: u32) usize { +pub fn setresgid(rgid: gid_t, egid: gid_t, sgid: gid_t) usize { if (@hasField(SYS, "setresgid32")) { return syscall3(.setresgid32, rgid, egid, sgid); } else { @@ -759,7 +759,7 @@ pub fn setresgid(rgid: u32, egid: u32, sgid: u32) usize { } } -pub fn getgroups(size: usize, list: *u32) usize { +pub fn getgroups(size: usize, list: *gid_t) usize { if (@hasField(SYS, "getgroups32")) { return syscall2(.getgroups32, size, @ptrToInt(list)); } else { @@ -767,7 +767,7 @@ pub fn getgroups(size: usize, list: *u32) usize { } } -pub fn setgroups(size: usize, list: *const u32) usize { +pub fn setgroups(size: usize, list: *const gid_t) usize { if (@hasField(SYS, "setgroups32")) { return syscall2(.setgroups32, size, @ptrToInt(list)); } else { diff --git a/lib/std/process.zig b/lib/std/process.zig index 69befa2fc8..9cb571714c 100644 --- a/lib/std/process.zig +++ b/lib/std/process.zig @@ -578,8 +578,8 @@ fn testWindowsCmdLine(input_cmd_line: [*]const u8, expected_args: []const []cons } pub const UserInfo = struct { - uid: u32, - gid: u32, + uid: os.uid_t, + gid: os.gid_t, }; /// POSIX function which gets a uid from username. @@ -607,8 +607,8 @@ pub fn posixGetUserInfo(name: []const u8) !UserInfo { var buf: [std.mem.page_size]u8 = undefined; var name_index: usize = 0; var state = State.Start; - var uid: u32 = 0; - var gid: u32 = 0; + var uid: os.uid_t = 0; + var gid: os.gid_t = 0; while (true) { const amt_read = try reader.read(buf[0..]); From 01a365f1b008fc1546f99c339dbae99521c169cd Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Thu, 3 Sep 2020 15:16:26 +0200 Subject: [PATCH 28/35] std: ensure seteuid/setegid do not change saved id --- lib/std/os/linux.zig | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/lib/std/os/linux.zig b/lib/std/os/linux.zig index 5e2a554018..3fc8006d06 100644 --- a/lib/std/os/linux.zig +++ b/lib/std/os/linux.zig @@ -720,11 +720,25 @@ pub fn getegid() gid_t { } pub fn seteuid(euid: uid_t) usize { - return setresuid(std.math.maxInt(uid_t), euid); + // We use setresuid here instead of setreuid to ensure that the saved uid + // is not changed. This is what musl and recent glibc versions do as well. + // + // The setresuid(2) man page says that if -1 is passed the corresponding + // id will not be changed. Since uid_t is unsigned, this wraps around to the + // max value in C. + comptime assert(@typeInfo(uid_t) == .Int and !@typeInfo(uid_t).Int.is_signed); + return setresuid(std.math.maxInt(uid_t), euid, std.math.maxInt(uid_t)); } pub fn setegid(egid: gid_t) usize { - return setregid(std.math.maxInt(gid_t), egid); + // We use setresgid here instead of setregid to ensure that the saved uid + // is not changed. This is what musl and recent glibc versions do as well. + // + // The setresgid(2) man page says that if -1 is passed the corresponding + // id will not be changed. Since gid_t is unsigned, this wraps around to the + // max value in C. + comptime assert(@typeInfo(uid_t) == .Int and !@typeInfo(uid_t).Int.is_signed); + return setresgid(std.math.maxInt(gid_t), egid, std.math.maxInt(gid_t)); } pub fn getresuid(ruid: *uid_t, euid: *uid_t, suid: *uid_t) usize { From d0d6647fdbfbe1a5764c2624e46eee35052d0da6 Mon Sep 17 00:00:00 2001 From: Isaac Freund Date: Thu, 3 Sep 2020 15:22:43 +0200 Subject: [PATCH 29/35] std: add seteuid/setegid to std.os Currently these are only implemented for linux. --- lib/std/os.zig | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/lib/std/os.zig b/lib/std/os.zig index 3574468a38..8b923989e6 100644 --- a/lib/std/os.zig +++ b/lib/std/os.zig @@ -2512,11 +2512,12 @@ pub fn readlinkatZ(dirfd: fd_t, file_path: [*:0]const u8, out_buffer: []u8) Read } } -pub const SetIdError = error{ - ResourceLimitReached, +pub const SetEidError = error{ InvalidUserId, PermissionDenied, -} || UnexpectedError; +}; + +pub const SetIdError = error{ResourceLimitReached} || SetEidError || UnexpectedError; pub fn setuid(uid: uid_t) SetIdError!void { switch (errno(system.setuid(uid))) { @@ -2528,6 +2529,15 @@ pub fn setuid(uid: uid_t) SetIdError!void { } } +pub fn seteuid(uid: uid_t) SetEidError!void { + switch (errno(system.seteuid(uid))) { + 0 => return, + EINVAL => return error.InvalidUserId, + EPERM => return error.PermissionDenied, + else => |err| return unexpectedErrno(err), + } +} + pub fn setreuid(ruid: uid_t, euid: uid_t) SetIdError!void { switch (errno(system.setreuid(ruid, euid))) { 0 => return, @@ -2548,6 +2558,15 @@ pub fn setgid(gid: gid_t) SetIdError!void { } } +pub fn setegid(uid: uid_t) SetEidError!void { + switch (errno(system.setegid(uid))) { + 0 => return, + EINVAL => return error.InvalidUserId, + EPERM => return error.PermissionDenied, + else => |err| return unexpectedErrno(err), + } +} + pub fn setregid(rgid: gid_t, egid: gid_t) SetIdError!void { switch (errno(system.setregid(rgid, egid))) { 0 => return, From 969547902b49d6b21af762fb24ed591789b9d2a4 Mon Sep 17 00:00:00 2001 From: LemonBoy Date: Thu, 3 Sep 2020 10:22:35 +0200 Subject: [PATCH 30/35] std: Fix silent overflow in float parsing code A u64 can only hold 19 decimal digits, adjust the limit. --- lib/std/fmt/parse_float.zig | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/std/fmt/parse_float.zig b/lib/std/fmt/parse_float.zig index 69557714f6..7e72e7ba51 100644 --- a/lib/std/fmt/parse_float.zig +++ b/lib/std/fmt/parse_float.zig @@ -37,7 +37,9 @@ const std = @import("../std.zig"); const ascii = std.ascii; -const max_digits = 25; +// The mantissa field in FloatRepr is 64bit wide and holds only 19 digits +// without overflowing +const max_digits = 19; const f64_plus_zero: u64 = 0x0000000000000000; const f64_minus_zero: u64 = 0x8000000000000000; @@ -409,6 +411,7 @@ test "fmt.parseFloat" { expect(approxEq(T, try parseFloat(T, "123142.1"), 123142.1, epsilon)); expect(approxEq(T, try parseFloat(T, "-123142.1124"), @as(T, -123142.1124), epsilon)); expect(approxEq(T, try parseFloat(T, "0.7062146892655368"), @as(T, 0.7062146892655368), epsilon)); + expect(approxEq(T, try parseFloat(T, "2.71828182845904523536"), @as(T, 2.718281828459045), epsilon)); } } } From 50e39069518a0c2643cd5e3189ad087b5fbed0c6 Mon Sep 17 00:00:00 2001 From: Kenta Iwasaki <63115601+lithdew@users.noreply.github.com> Date: Fri, 4 Sep 2020 02:57:08 +0900 Subject: [PATCH 31/35] os: return error.SocketNotListening for EINVAL on accept (#6226) --- lib/std/net.zig | 3 +++ lib/std/os.zig | 5 ++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/lib/std/net.zig b/lib/std/net.zig index 5a1407c35f..45d8f07f04 100644 --- a/lib/std/net.zig +++ b/lib/std/net.zig @@ -1641,6 +1641,9 @@ pub const StreamServer = struct { /// by the socket buffer limits, not by the system memory. SystemResources, + /// Socket is not listening for new connections. + SocketNotListening, + ProtocolFailure, /// Firewall rules forbid connection. diff --git a/lib/std/os.zig b/lib/std/os.zig index e8431c386b..2e4cc3aed0 100644 --- a/lib/std/os.zig +++ b/lib/std/os.zig @@ -2802,6 +2802,9 @@ pub const AcceptError = error{ /// by the socket buffer limits, not by the system memory. SystemResources, + /// Socket is not listening for new connections. + SocketNotListening, + ProtocolFailure, /// Firewall rules forbid connection. @@ -2870,7 +2873,7 @@ pub fn accept( EBADF => unreachable, // always a race condition ECONNABORTED => return error.ConnectionAborted, EFAULT => unreachable, - EINVAL => unreachable, + EINVAL => return error.SocketNotListening, ENOTSOCK => unreachable, EMFILE => return error.ProcessFdQuotaExceeded, ENFILE => return error.SystemFdQuotaExceeded, From 2a58e30bd5f522bf3077f556f47a1e28c537627e Mon Sep 17 00:00:00 2001 From: Lachlan Easton Date: Thu, 3 Sep 2020 20:16:12 +1000 Subject: [PATCH 32/35] std meta: fix use of alignOf in meta.cast --- lib/std/meta.zig | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/lib/std/meta.zig b/lib/std/meta.zig index aaa8e7ca78..73e0661498 100644 --- a/lib/std/meta.zig +++ b/lib/std/meta.zig @@ -705,34 +705,34 @@ pub fn Vector(comptime len: u32, comptime child: type) type { pub fn cast(comptime DestType: type, target: anytype) DestType { const TargetType = @TypeOf(target); switch (@typeInfo(DestType)) { - .Pointer => { + .Pointer => |dest_ptr| { switch (@typeInfo(TargetType)) { .Int, .ComptimeInt => { return @intToPtr(DestType, target); }, .Pointer => |ptr| { - return @ptrCast(DestType, @alignCast(ptr.alignment, target)); + return @ptrCast(DestType, @alignCast(dest_ptr.alignment, target)); }, .Optional => |opt| { if (@typeInfo(opt.child) == .Pointer) { - return @ptrCast(DestType, @alignCast(@alignOf(opt.child.Child), target)); + return @ptrCast(DestType, @alignCast(dest_ptr, target)); } }, else => {}, } }, - .Optional => |opt| { - if (@typeInfo(opt.child) == .Pointer) { + .Optional => |dest_opt| { + if (@typeInfo(dest_opt.child) == .Pointer) { switch (@typeInfo(TargetType)) { .Int, .ComptimeInt => { return @intToPtr(DestType, target); }, - .Pointer => |ptr| { - return @ptrCast(DestType, @alignCast(ptr.alignment, target)); + .Pointer => { + return @ptrCast(DestType, @alignCast(@alignOf(dest_opt.child.Child), target)); }, .Optional => |target_opt| { if (@typeInfo(target_opt.child) == .Pointer) { - return @ptrCast(DestType, @alignCast(@alignOf(target_opt.child.Child), target)); + return @ptrCast(DestType, @alignCast(@alignOf(dest_opt.child.Child), target)); } }, else => {}, From dac1cd77505ef9fa493e069549c139d74e31081f Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Thu, 27 Aug 2020 08:44:13 +0200 Subject: [PATCH 33/35] Write out simple Mach-O object file This commit adds enough Mach-O linker implementation to write out simple Mach-O object file. Be warned however, the object file is largely incomplete: misses relocation info, debug symbols, etc. However, it seemed like a good starting to get the basic understanding right. Signed-off-by: Jakub Konka --- src-self-hosted/codegen.zig | 57 ++- src-self-hosted/link/MachO.zig | 816 ++++++++++++++++++++++++--------- 2 files changed, 647 insertions(+), 226 deletions(-) diff --git a/src-self-hosted/codegen.zig b/src-self-hosted/codegen.zig index d6e3194c12..d72ce5ee16 100644 --- a/src-self-hosted/codegen.zig +++ b/src-self-hosted/codegen.zig @@ -1443,7 +1443,57 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type { } } else if (self.bin_file.cast(link.File.MachO)) |macho_file| { switch (arch) { - .x86_64 => return self.fail(inst.base.src, "TODO implement codegen for call when linking with MachO for x86_64 arch", .{}), + .x86_64 => { + for (info.args) |mc_arg, arg_i| { + const arg = inst.args[arg_i]; + const arg_mcv = try self.resolveInst(inst.args[arg_i]); + // Here we do not use setRegOrMem even though the logic is similar, because + // the function call will move the stack pointer, so the offsets are different. + switch (mc_arg) { + .none => continue, + .register => |reg| { + try self.genSetReg(arg.src, reg, arg_mcv); + // TODO interact with the register allocator to mark the instruction as moved. + }, + .stack_offset => { + // Here we need to emit instructions like this: + // mov qword ptr [rsp + stack_offset], x + return self.fail(inst.base.src, "TODO implement calling with parameters in memory", .{}); + }, + .ptr_stack_offset => { + return self.fail(inst.base.src, "TODO implement calling with MCValue.ptr_stack_offset arg", .{}); + }, + .ptr_embedded_in_code => { + return self.fail(inst.base.src, "TODO implement calling with MCValue.ptr_embedded_in_code arg", .{}); + }, + .undef => unreachable, + .immediate => unreachable, + .unreach => unreachable, + .dead => unreachable, + .embedded_in_code => unreachable, + .memory => unreachable, + .compare_flags_signed => unreachable, + .compare_flags_unsigned => unreachable, + } + } + + if (inst.func.cast(ir.Inst.Constant)) |func_inst| { + if (func_inst.val.cast(Value.Payload.Function)) |func_val| { + const func = func_val.func; + const got = &macho_file.sections.items[macho_file.got_section_index.?]; + const ptr_bytes = 8; + const got_addr = @intCast(u32, got.addr + func.owner_decl.link.macho.offset_table_index.? * ptr_bytes); + // ff 14 25 xx xx xx xx call [addr] + try self.code.ensureCapacity(self.code.items.len + 7); + self.code.appendSliceAssumeCapacity(&[3]u8{ 0xff, 0x14, 0x25 }); + mem.writeIntLittle(u32, self.code.addManyAsArrayAssumeCapacity(4), got_addr); + } else { + return self.fail(inst.base.src, "TODO implement calling bitcasted functions", .{}); + } + } else { + return self.fail(inst.base.src, "TODO implement calling runtime known function pointer", .{}); + } + }, .aarch64 => return self.fail(inst.base.src, "TODO implement codegen for call when linking with MachO for aarch64 arch", .{}), else => unreachable, } @@ -2486,6 +2536,11 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type { const got = &elf_file.program_headers.items[elf_file.phdr_got_index.?]; const got_addr = got.p_vaddr + decl.link.elf.offset_table_index * ptr_bytes; return MCValue{ .memory = got_addr }; + } else if (self.bin_file.cast(link.File.MachO)) |macho_file| { + const decl = payload.decl; + const got = &macho_file.sections.items[macho_file.got_section_index.?]; + const got_addr = got.addr + decl.link.macho.offset_table_index.? * ptr_bytes; + return MCValue{ .memory = got_addr }; } else { return self.fail(src, "TODO codegen non-ELF const Decl pointer", .{}); } diff --git a/src-self-hosted/link/MachO.zig b/src-self-hosted/link/MachO.zig index 047e62f950..93d7b2381a 100644 --- a/src-self-hosted/link/MachO.zig +++ b/src-self-hosted/link/MachO.zig @@ -18,36 +18,66 @@ const File = link.File; pub const base_tag: File.Tag = File.Tag.macho; +const LoadCommand = union(enum) { + Segment: macho.segment_command_64, + LinkeditData: macho.linkedit_data_command, + Symtab: macho.symtab_command, + Dysymtab: macho.dysymtab_command, + + pub fn cmdsize(self: LoadCommand) u32 { + return switch (self) { + .Segment => |x| x.cmdsize, + .LinkeditData => |x| x.cmdsize, + .Symtab => |x| x.cmdsize, + .Dysymtab => |x| x.cmdsize, + }; + } +}; + base: File, -/// List of all load command headers that are in the file. -/// We use it to track number and size of all commands needed by the header. -commands: std.ArrayListUnmanaged(macho.load_command) = std.ArrayListUnmanaged(macho.load_command){}, -command_file_offset: ?u64 = null, +/// Table of all load commands +load_commands: std.ArrayListUnmanaged(LoadCommand) = .{}, +segment_cmd_index: ?u16 = null, +symtab_cmd_index: ?u16 = null, +dysymtab_cmd_index: ?u16 = null, +data_in_code_cmd_index: ?u16 = null, -/// Stored in native-endian format, depending on target endianness needs to be bswapped on read/write. -/// Same order as in the file. -segments: std.ArrayListUnmanaged(macho.segment_command_64) = std.ArrayListUnmanaged(macho.segment_command_64){}, -/// Section (headers) *always* follow segment (load commands) directly! -sections: std.ArrayListUnmanaged(macho.section_64) = std.ArrayListUnmanaged(macho.section_64){}, +/// Table of all sections +sections: std.ArrayListUnmanaged(macho.section_64) = .{}, -/// Offset (index) into __TEXT segment load command. -text_segment_offset: ?u64 = null, -/// Offset (index) into __LINKEDIT segment load command. -linkedit_segment_offset: ?u664 = null, +/// __TEXT segment sections +text_section_index: ?u16 = null, +cstring_section_index: ?u16 = null, +const_text_section_index: ?u16 = null, +stubs_section_index: ?u16 = null, +stub_helper_section_index: ?u16 = null, + +/// __DATA segment sections +got_section_index: ?u16 = null, +const_data_section_index: ?u16 = null, -/// Entry point load command -entry_point_cmd: ?macho.entry_point_command = null, entry_addr: ?u64 = null, -/// The first 4GB of process' memory is reserved for the null (__PAGEZERO) segment. -/// This is also the start address for our binary. -vm_start_address: u64 = 0x100000000, +/// Table of all symbols used. +/// Internally references string table for names (which are optional). +symbol_table: std.ArrayListUnmanaged(macho.nlist_64) = .{}, -seg_table_dirty: bool = false, +/// Table of symbol names aka the string table. +string_table: std.ArrayListUnmanaged(u8) = .{}, + +/// Table of symbol vaddr values. The values is the absolute vaddr value. +/// If the vaddr of the executable __TEXT segment vaddr changes, the entire offset +/// table needs to be rewritten. +offset_table: std.ArrayListUnmanaged(u64) = .{}, error_flags: File.ErrorFlags = File.ErrorFlags{}, +cmd_table_dirty: bool = false, + +/// Pointer to the last allocated text block +last_text_block: ?*TextBlock = null, + /// `alloc_num / alloc_den` is the factor of padding when allocating. const alloc_num = 4; const alloc_den = 3; @@ -67,7 +97,23 @@ const LIB_SYSTEM_NAME: [*:0]const u8 = "System"; const LIB_SYSTEM_PATH: [*:0]const u8 = DEFAULT_LIB_SEARCH_PATH ++ "/libSystem.B.dylib"; pub const TextBlock = struct { - pub const empty = TextBlock{}; + /// Index into the symbol table + symbol_table_index: ?u32, + /// Index into offset table + offset_table_index: ?u32, + /// Size of this text block + size: u64, + /// Points to the previous and next neighbours + prev: ?*TextBlock, + next: ?*TextBlock, + + pub const empty = TextBlock{ + .symbol_table_index = null, + .offset_table_index = null, + .size = 0, + .prev = null, + .next = null, + }; }; pub const SrcFn = struct { @@ -117,6 +163,12 @@ fn openFile(allocator: *Allocator, file: fs.File, options: link.Options) !MachO /// Truncates the existing file contents and overwrites the contents. /// Returns an error if `file` is not already open with +read +write +seek abilities. fn createFile(allocator: *Allocator, file: fs.File, options: link.Options) !MachO { + switch (options.output_mode) { + .Exe => {}, + .Obj => {}, + .Lib => return error.TODOImplementWritingLibFiles, + } + var self: MachO = .{ .base = .{ .file = file, @@ -127,37 +179,518 @@ fn createFile(allocator: *Allocator, file: fs.File, options: link.Options) !Mach }; errdefer self.deinit(); - switch (options.output_mode) { - .Exe => { - // The first segment command for executables is always a __PAGEZERO segment. - const pagezero = .{ - .cmd = macho.LC_SEGMENT_64, - .cmdsize = commandSize(@sizeOf(macho.segment_command_64)), - .segname = makeString("__PAGEZERO"), - .vmaddr = 0, - .vmsize = self.vm_start_address, - .fileoff = 0, - .filesize = 0, - .maxprot = macho.VM_PROT_NONE, - .initprot = macho.VM_PROT_NONE, - .nsects = 0, - .flags = 0, - }; - try self.commands.append(allocator, .{ - .cmd = pagezero.cmd, - .cmdsize = pagezero.cmdsize, - }); - try self.segments.append(allocator, pagezero); - }, - .Obj => return error.TODOImplementWritingObjFiles, - .Lib => return error.TODOImplementWritingLibFiles, - } - try self.populateMissingMetadata(); return self; } +pub fn flush(self: *MachO, module: *Module) !void { + switch (self.base.options.output_mode) { + .Exe => { + var last_cmd_offset: usize = @sizeOf(macho.mach_header_64); + { + // Specify path to dynamic linker dyld + const cmdsize = commandSize(@sizeOf(macho.dylinker_command) + mem.lenZ(DEFAULT_DYLD_PATH)); + const load_dylinker = [1]macho.dylinker_command{ + .{ + .cmd = macho.LC_LOAD_DYLINKER, + .cmdsize = cmdsize, + .name = @sizeOf(macho.dylinker_command), + }, + }; + + try self.base.file.?.pwriteAll(mem.sliceAsBytes(load_dylinker[0..1]), last_cmd_offset); + + const file_offset = last_cmd_offset + @sizeOf(macho.dylinker_command); + try self.addPadding(cmdsize - @sizeOf(macho.dylinker_command), file_offset); + + try self.base.file.?.pwriteAll(mem.spanZ(DEFAULT_DYLD_PATH), file_offset); + last_cmd_offset += cmdsize; + } + + { + // Link against libSystem + const cmdsize = commandSize(@sizeOf(macho.dylib_command) + mem.lenZ(LIB_SYSTEM_PATH)); + // TODO Find a way to work out runtime version from the OS version triple stored in std.Target. + // In the meantime, we're gonna hardcode to the minimum compatibility version of 1.0.0. + const min_version = 0x10000; + const dylib = .{ + .name = @sizeOf(macho.dylib_command), + .timestamp = 2, // not sure why not simply 0; this is reverse engineered from Mach-O files + .current_version = min_version, + .compatibility_version = min_version, + }; + const load_dylib = [1]macho.dylib_command{ + .{ + .cmd = macho.LC_LOAD_DYLIB, + .cmdsize = cmdsize, + .dylib = dylib, + }, + }; + + try self.base.file.?.pwriteAll(mem.sliceAsBytes(load_dylib[0..1]), last_cmd_offset); + + const file_offset = last_cmd_offset + @sizeOf(macho.dylib_command); + try self.addPadding(cmdsize - @sizeOf(macho.dylib_command), file_offset); + + try self.base.file.?.pwriteAll(mem.spanZ(LIB_SYSTEM_PATH), file_offset); + last_cmd_offset += cmdsize; + } + }, + .Obj => { + { + const symtab = &self.load_commands.items[self.symtab_cmd_index.?].Symtab; + symtab.nsyms = @intCast(u32, self.symbol_table.items.len); + const allocated_size = self.allocatedSize(symtab.stroff); + const needed_size = self.string_table.items.len; + log.debug("allocated_size = 0x{x}, needed_size = 0x{x}\n", .{ allocated_size, needed_size }); + + if (needed_size > allocated_size) { + symtab.strsize = 0; + symtab.stroff = @intCast(u32, self.findFreeSpace(needed_size, 1)); + } + symtab.strsize = @intCast(u32, needed_size); + + log.debug("writing string table from 0x{x} to 0x{x}\n", .{ symtab.stroff, symtab.stroff + symtab.strsize }); + + try self.base.file.?.pwriteAll(self.string_table.items, symtab.stroff); + } + + var last_cmd_offset: usize = @sizeOf(macho.mach_header_64); + for (self.load_commands.items) |cmd| { + const cmd_to_write = [1]@TypeOf(cmd){cmd}; + try self.base.file.?.pwriteAll(mem.sliceAsBytes(cmd_to_write[0..1]), last_cmd_offset); + last_cmd_offset += cmd.cmdsize(); + } + const off = @sizeOf(macho.mach_header_64) + @sizeOf(macho.segment_command_64); + try self.base.file.?.pwriteAll(mem.sliceAsBytes(self.sections.items), off); + }, + .Lib => return error.TODOImplementWritingLibFiles, + } + + if (self.entry_addr == null and self.base.options.output_mode == .Exe) { + log.debug("flushing. no_entry_point_found = true\n", .{}); + self.error_flags.no_entry_point_found = true; + } else { + log.debug("flushing. no_entry_point_found = false\n", .{}); + self.error_flags.no_entry_point_found = false; + try self.writeMachOHeader(); + } +} + +pub fn deinit(self: *MachO) void { + self.offset_table.deinit(self.base.allocator); + self.string_table.deinit(self.base.allocator); + self.symbol_table.deinit(self.base.allocator); + self.sections.deinit(self.base.allocator); + self.load_commands.deinit(self.base.allocator); +} + +pub fn allocateDeclIndexes(self: *MachO, decl: *Module.Decl) !void { + if (decl.link.macho.symbol_table_index) |_| return; + + try self.symbol_table.ensureCapacity(self.base.allocator, self.symbol_table.items.len + 1); + try self.offset_table.ensureCapacity(self.base.allocator, self.offset_table.items.len + 1); + + log.debug("allocating symbol index {} for {}\n", .{ self.symbol_table.items.len, decl.name }); + decl.link.macho.symbol_table_index = @intCast(u32, self.symbol_table.items.len); + _ = self.symbol_table.addOneAssumeCapacity(); + + decl.link.macho.offset_table_index = @intCast(u32, self.offset_table.items.len); + _ = self.offset_table.addOneAssumeCapacity(); + + self.symbol_table.items[decl.link.macho.symbol_table_index.?] = .{ + .n_strx = 0, + .n_type = 0, + .n_sect = 0, + .n_desc = 0, + .n_value = 0, + }; + self.offset_table.items[decl.link.macho.offset_table_index.?] = 0; +} + +pub fn updateDecl(self: *MachO, module: *Module, decl: *Module.Decl) !void { + const tracy = trace(@src()); + defer tracy.end(); + + var code_buffer = std.ArrayList(u8).init(self.base.allocator); + defer code_buffer.deinit(); + + var dbg_line_buffer = std.ArrayList(u8).init(self.base.allocator); + defer dbg_line_buffer.deinit(); + + var dbg_info_buffer = std.ArrayList(u8).init(self.base.allocator); + defer dbg_info_buffer.deinit(); + + var dbg_info_type_relocs: File.DbgInfoTypeRelocsTable = .{}; + defer { + var it = dbg_info_type_relocs.iterator(); + while (it.next()) |entry| { + entry.value.relocs.deinit(self.base.allocator); + } + dbg_info_type_relocs.deinit(self.base.allocator); + } + + const typed_value = decl.typed_value.most_recent.typed_value; + const res = try codegen.generateSymbol( + &self.base, + decl.src(), + typed_value, + &code_buffer, + &dbg_line_buffer, + &dbg_info_buffer, + &dbg_info_type_relocs, + ); + + const code = switch (res) { + .externally_managed => |x| x, + .appended => code_buffer.items, + .fail => |em| { + decl.analysis = .codegen_failure; + try module.failed_decls.put(module.gpa, decl, em); + return; + }, + }; + log.debug("generated code {}\n", .{code}); + + const required_alignment = typed_value.ty.abiAlignment(self.base.options.target); + const symbol = &self.symbol_table.items[decl.link.macho.symbol_table_index.?]; + + const decl_name = mem.spanZ(decl.name); + const name_str_index = try self.makeString(decl_name); + const addr = try self.allocateTextBlock(&decl.link.macho, code.len, required_alignment); + log.debug("allocated text block for {} at 0x{x}\n", .{ decl_name, addr }); + log.debug("updated text section {}\n", .{self.sections.items[self.text_section_index.?]}); + + symbol.* = .{ + .n_strx = name_str_index, + .n_type = macho.N_SECT, + .n_sect = @intCast(u8, self.text_section_index.?) + 1, + .n_desc = 0, + .n_value = addr, + }; + self.offset_table.items[decl.link.macho.offset_table_index.?] = addr; + + try self.writeSymbol(decl.link.macho.symbol_table_index.?); + + const text_section = self.sections.items[self.text_section_index.?]; + const section_offset = symbol.n_value - text_section.addr; + const file_offset = text_section.offset + section_offset; + log.debug("file_offset 0x{x}\n", .{file_offset}); + try self.base.file.?.pwriteAll(code, file_offset); + + // Since we updated the vaddr and the size, each corresponding export symbol also needs to be updated. + const decl_exports = module.decl_exports.get(decl) orelse &[0]*Module.Export{}; + return self.updateDeclExports(module, decl, decl_exports); +} + +pub fn updateDeclLineNumber(self: *MachO, module: *Module, decl: *const Module.Decl) !void {} + +pub fn updateDeclExports( + self: *MachO, + module: *Module, + decl: *const Module.Decl, + exports: []const *Module.Export, +) !void { + const tracy = trace(@src()); + defer tracy.end(); + + if (decl.link.macho.symbol_table_index == null) return; + + var decl_sym = self.symbol_table.items[decl.link.macho.symbol_table_index.?]; + // TODO implement + if (exports.len == 0) return; + + const exp = exports[0]; + self.entry_addr = decl_sym.n_value; + decl_sym.n_type |= macho.N_EXT; + exp.link.sym_index = 0; +} + +pub fn freeDecl(self: *MachO, decl: *Module.Decl) void {} + +pub fn getDeclVAddr(self: *MachO, decl: *const Module.Decl) u64 { + return self.symbol_table.items[decl.link.macho.symbol_table_index.?].n_value; +} + +pub fn populateMissingMetadata(self: *MachO) !void { + if (self.segment_cmd_index == null) { + self.segment_cmd_index = @intCast(u16, self.load_commands.items.len); + try self.load_commands.append(self.base.allocator, .{ + .Segment = .{ + .cmd = macho.LC_SEGMENT_64, + .cmdsize = @sizeOf(macho.segment_command_64), + .segname = makeStaticString(""), + .vmaddr = 0, + .vmsize = 0, + .fileoff = 0, + .filesize = 0, + .maxprot = 0, + .initprot = 0, + .nsects = 0, + .flags = 0, + }, + }); + self.cmd_table_dirty = true; + } + if (self.symtab_cmd_index == null) { + self.symtab_cmd_index = @intCast(u16, self.load_commands.items.len); + try self.load_commands.append(self.base.allocator, .{ + .Symtab = .{ + .cmd = macho.LC_SYMTAB, + .cmdsize = @sizeOf(macho.symtab_command), + .symoff = 0, + .nsyms = 0, + .stroff = 0, + .strsize = 0, + }, + }); + self.cmd_table_dirty = true; + } + if (self.text_section_index == null) { + self.text_section_index = @intCast(u16, self.sections.items.len); + const segment = &self.load_commands.items[self.segment_cmd_index.?].Segment; + segment.cmdsize += @sizeOf(macho.section_64); + segment.nsects += 1; + + const file_size = self.base.options.program_code_size_hint; + const off = @intCast(u32, self.findFreeSpace(file_size, 1)); + const flags = macho.S_REGULAR | macho.S_ATTR_PURE_INSTRUCTIONS | macho.S_ATTR_SOME_INSTRUCTIONS; + + log.debug("found __text section free space 0x{x} to 0x{x}\n", .{ off, off + file_size }); + + try self.sections.append(self.base.allocator, .{ + .sectname = makeStaticString("__text"), + .segname = makeStaticString("__TEXT"), + .addr = 0, + .size = file_size, + .offset = off, + .@"align" = 0x1000, + .reloff = 0, + .nreloc = 0, + .flags = flags, + .reserved1 = 0, + .reserved2 = 0, + .reserved3 = 0, + }); + + segment.vmsize += file_size; + segment.filesize += file_size; + segment.fileoff = off; + + log.debug("initial text section {}\n", .{self.sections.items[self.text_section_index.?]}); + } + { + const symtab = &self.load_commands.items[self.symtab_cmd_index.?].Symtab; + if (symtab.symoff == 0) { + const p_align = @sizeOf(macho.nlist_64); + const nsyms = self.base.options.symbol_count_hint; + const file_size = p_align * nsyms; + const off = @intCast(u32, self.findFreeSpace(file_size, p_align)); + log.debug("found symbol table free space 0x{x} to 0x{x}\n", .{ off, off + file_size }); + symtab.symoff = off; + symtab.nsyms = @intCast(u32, nsyms); + } + if (symtab.stroff == 0) { + try self.string_table.append(self.base.allocator, 0); + const file_size = @intCast(u32, self.string_table.items.len); + const off = @intCast(u32, self.findFreeSpace(file_size, 1)); + log.debug("found string table free space 0x{x} to 0x{x}\n", .{ off, off + file_size }); + symtab.stroff = off; + symtab.strsize = file_size; + } + } +} + +fn allocateTextBlock(self: *MachO, text_block: *TextBlock, new_block_size: u64, alignment: u64) !u64 { + const segment = &self.load_commands.items[self.segment_cmd_index.?].Segment; + const text_section = &self.sections.items[self.text_section_index.?]; + const new_block_ideal_capacity = new_block_size * alloc_num / alloc_den; + + var block_placement: ?*TextBlock = null; + const addr = blk: { + if (self.last_text_block) |last| { + const last_symbol = self.symbol_table.items[last.symbol_table_index.?]; + const ideal_capacity = last.size * alloc_num / alloc_den; + const ideal_capacity_end_addr = last_symbol.n_value + ideal_capacity; + const new_start_addr = mem.alignForwardGeneric(u64, ideal_capacity_end_addr, alignment); + block_placement = last; + break :blk new_start_addr; + } else { + break :blk text_section.addr; + } + }; + log.debug("computed symbol address 0x{x}\n", .{addr}); + + const expand_text_section = block_placement == null or block_placement.?.next == null; + if (expand_text_section) { + const text_capacity = self.allocatedSize(text_section.offset); + const needed_size = (addr + new_block_size) - text_section.addr; + log.debug("text capacity 0x{x}, needed size 0x{x}\n", .{ text_capacity, needed_size }); + + if (needed_size > text_capacity) { + // TODO handle growth + } + + self.last_text_block = text_block; + text_section.size = needed_size; + segment.vmsize = needed_size; + segment.filesize = needed_size; + if (alignment < text_section.@"align") { + text_section.@"align" = @intCast(u32, alignment); + } + } + text_block.size = new_block_size; + + if (text_block.prev) |prev| { + prev.next = text_block.next; + } + if (text_block.next) |next| { + next.prev = text_block.prev; + } + + if (block_placement) |big_block| { + text_block.prev = big_block; + text_block.next = big_block.next; + big_block.next = text_block; + } else { + text_block.prev = null; + text_block.next = null; + } + + return addr; +} + +fn makeStaticString(comptime bytes: []const u8) [16]u8 { + var buf = [_]u8{0} ** 16; + if (bytes.len > buf.len) @compileError("string too long; max 16 bytes"); + mem.copy(u8, buf[0..], bytes); + return buf; +} + +fn makeString(self: *MachO, bytes: []const u8) !u32 { + try self.string_table.ensureCapacity(self.base.allocator, self.string_table.items.len + bytes.len + 1); + const result = self.string_table.items.len; + self.string_table.appendSliceAssumeCapacity(bytes); + self.string_table.appendAssumeCapacity(0); + return @intCast(u32, result); +} + +fn alignSize(comptime Int: type, min_size: anytype, alignment: Int) Int { + const size = @intCast(Int, min_size); + if (size % alignment == 0) return size; + + const div = size / alignment; + return (div + 1) * alignment; +} + +fn commandSize(min_size: anytype) u32 { + return alignSize(u32, min_size, @sizeOf(u64)); +} + +fn addPadding(self: *MachO, size: u64, file_offset: u64) !void { + if (size == 0) return; + + const buf = try self.base.allocator.alloc(u8, size); + defer self.base.allocator.free(buf); + + mem.set(u8, buf[0..], 0); + + try self.base.file.?.pwriteAll(buf, file_offset); +} + +fn detectAllocCollision(self: *MachO, start: u64, size: u64) ?u64 { + const hdr_size: u64 = @sizeOf(macho.mach_header_64); + if (start < hdr_size) + return hdr_size; + + const end = start + satMul(size, alloc_num) / alloc_den; + + { + const off = @sizeOf(macho.mach_header_64); + var tight_size: u64 = 0; + for (self.load_commands.items) |cmd| { + tight_size += cmd.cmdsize(); + } + const increased_size = satMul(tight_size, alloc_num) / alloc_den; + const test_end = off + increased_size; + if (end > off and start < test_end) { + return test_end; + } + } + + for (self.sections.items) |section| { + const increased_size = satMul(section.size, alloc_num) / alloc_den; + const test_end = section.offset + increased_size; + if (end > section.offset and start < test_end) { + return test_end; + } + } + + if (self.symtab_cmd_index) |symtab_index| { + const symtab = self.load_commands.items[symtab_index].Symtab; + { + const tight_size = @sizeOf(macho.nlist_64) * symtab.nsyms; + const increased_size = satMul(tight_size, alloc_num) / alloc_den; + const test_end = symtab.symoff + increased_size; + if (end > symtab.symoff and start < test_end) { + return test_end; + } + } + { + const increased_size = satMul(symtab.strsize, alloc_num) / alloc_den; + const test_end = symtab.stroff + increased_size; + if (end > symtab.stroff and start < test_end) { + return test_end; + } + } + } + + return null; +} + +fn allocatedSize(self: *MachO, start: u64) u64 { + if (start == 0) + return 0; + var min_pos: u64 = std.math.maxInt(u64); + { + const off = @sizeOf(macho.mach_header_64); + if (off > start and off < min_pos) min_pos = off; + } + for (self.sections.items) |section| { + if (section.offset <= start) continue; + if (section.offset < min_pos) min_pos = section.offset; + } + if (self.symtab_cmd_index) |symtab_index| { + const symtab = self.load_commands.items[symtab_index].Symtab; + if (symtab.symoff > start and symtab.symoff < min_pos) min_pos = symtab.symoff; + if (symtab.stroff > start and symtab.stroff < min_pos) min_pos = symtab.stroff; + } + return min_pos - start; +} + +fn findFreeSpace(self: *MachO, object_size: u64, min_alignment: u16) u64 { + var start: u64 = 0; + while (self.detectAllocCollision(start, object_size)) |item_end| { + start = mem.alignForwardGeneric(u64, item_end, min_alignment); + } + return start; +} + +fn writeSymbol(self: *MachO, index: usize) !void { + const tracy = trace(@src()); + defer tracy.end(); + + const symtab = &self.load_commands.items[self.symtab_cmd_index.?].Symtab; + var sym = [1]macho.nlist_64{self.symbol_table.items[index]}; + const off = symtab.symoff + @sizeOf(macho.nlist_64) * index; + log.debug("writing symbol {} at 0x{x}\n", .{ sym[0], off }); + try self.base.file.?.pwriteAll(mem.sliceAsBytes(sym[0..1]), off); +} + +/// Writes Mach-O file header. +/// Should be invoked last as it needs up-to-date values of ncmds and sizeof_cmds bookkeeping +/// variables. fn writeMachOHeader(self: *MachO) !void { var hdr: macho.mach_header_64 = undefined; hdr.magic = macho.MH_MAGIC_64; @@ -190,193 +723,26 @@ fn writeMachOHeader(self: *MachO) !void { }, }; hdr.filetype = filetype; + hdr.ncmds = @intCast(u32, self.load_commands.items.len); - const ncmds = try math.cast(u32, self.commands.items.len); - hdr.ncmds = ncmds; - - var sizeof_cmds: u32 = 0; - for (self.commands.items) |cmd| { - sizeof_cmds += cmd.cmdsize; + var sizeofcmds: u32 = 0; + for (self.load_commands.items) |cmd| { + sizeofcmds += cmd.cmdsize(); } - hdr.sizeofcmds = sizeof_cmds; + + hdr.sizeofcmds = sizeofcmds; // TODO should these be set to something else? hdr.flags = 0; hdr.reserved = 0; + log.debug("writing Mach-O header {}\n", .{hdr}); + try self.base.file.?.pwriteAll(@ptrCast([*]const u8, &hdr)[0..@sizeOf(macho.mach_header_64)], 0); } -pub fn flush(self: *MachO, module: *Module) !void { - // Save segments first - { - const buf = try self.base.allocator.alloc(macho.segment_command_64, self.segments.items.len); - defer self.base.allocator.free(buf); - - self.command_file_offset = @sizeOf(macho.mach_header_64); - - for (buf) |*seg, i| { - seg.* = self.segments.items[i]; - self.command_file_offset.? += self.segments.items[i].cmdsize; - } - - try self.base.file.?.pwriteAll(mem.sliceAsBytes(buf), @sizeOf(macho.mach_header_64)); - } - - switch (self.base.options.output_mode) { - .Exe => { - { - // Specify path to dynamic linker dyld - const cmdsize = commandSize(@sizeOf(macho.dylinker_command) + mem.lenZ(DEFAULT_DYLD_PATH)); - const load_dylinker = [1]macho.dylinker_command{ - .{ - .cmd = macho.LC_LOAD_DYLINKER, - .cmdsize = cmdsize, - .name = @sizeOf(macho.dylinker_command), - }, - }; - try self.commands.append(self.base.allocator, .{ - .cmd = macho.LC_LOAD_DYLINKER, - .cmdsize = cmdsize, - }); - - try self.base.file.?.pwriteAll(mem.sliceAsBytes(load_dylinker[0..1]), self.command_file_offset.?); - - const file_offset = self.command_file_offset.? + @sizeOf(macho.dylinker_command); - try self.addPadding(cmdsize - @sizeOf(macho.dylinker_command), file_offset); - - try self.base.file.?.pwriteAll(mem.spanZ(DEFAULT_DYLD_PATH), file_offset); - self.command_file_offset.? += cmdsize; - } - - { - // Link against libSystem - const cmdsize = commandSize(@sizeOf(macho.dylib_command) + mem.lenZ(LIB_SYSTEM_PATH)); - // TODO Find a way to work out runtime version from the OS version triple stored in std.Target. - // In the meantime, we're gonna hardcode to the minimum compatibility version of 1.0.0. - const min_version = 0x10000; - const dylib = .{ - .name = @sizeOf(macho.dylib_command), - .timestamp = 2, // not sure why not simply 0; this is reverse engineered from Mach-O files - .current_version = min_version, - .compatibility_version = min_version, - }; - const load_dylib = [1]macho.dylib_command{ - .{ - .cmd = macho.LC_LOAD_DYLIB, - .cmdsize = cmdsize, - .dylib = dylib, - }, - }; - try self.commands.append(self.base.allocator, .{ - .cmd = macho.LC_LOAD_DYLIB, - .cmdsize = cmdsize, - }); - - try self.base.file.?.pwriteAll(mem.sliceAsBytes(load_dylib[0..1]), self.command_file_offset.?); - - const file_offset = self.command_file_offset.? + @sizeOf(macho.dylib_command); - try self.addPadding(cmdsize - @sizeOf(macho.dylib_command), file_offset); - - try self.base.file.?.pwriteAll(mem.spanZ(LIB_SYSTEM_PATH), file_offset); - self.command_file_offset.? += cmdsize; - } - }, - .Obj => return error.TODOImplementWritingObjFiles, - .Lib => return error.TODOImplementWritingLibFiles, - } - - if (self.entry_addr == null and self.base.options.output_mode == .Exe) { - log.debug("flushing. no_entry_point_found = true\n", .{}); - self.error_flags.no_entry_point_found = true; - } else { - log.debug("flushing. no_entry_point_found = false\n", .{}); - self.error_flags.no_entry_point_found = false; - try self.writeMachOHeader(); - } -} - -pub fn deinit(self: *MachO) void { - self.commands.deinit(self.base.allocator); - self.segments.deinit(self.base.allocator); - self.sections.deinit(self.base.allocator); -} - -pub fn allocateDeclIndexes(self: *MachO, decl: *Module.Decl) !void {} - -pub fn updateDecl(self: *MachO, module: *Module, decl: *Module.Decl) !void {} - -pub fn updateDeclLineNumber(self: *MachO, module: *Module, decl: *const Module.Decl) !void {} - -pub fn updateDeclExports( - self: *MachO, - module: *Module, - decl: *const Module.Decl, - exports: []const *Module.Export, -) !void {} - -pub fn freeDecl(self: *MachO, decl: *Module.Decl) void {} - -pub fn getDeclVAddr(self: *MachO, decl: *const Module.Decl) u64 { - @panic("TODO implement getDeclVAddr for MachO"); -} - -pub fn populateMissingMetadata(self: *MachO) !void { - if (self.text_segment_offset == null) { - self.text_segment_offset = @intCast(u64, self.segments.items.len); - const file_size = alignSize(u64, self.base.options.program_code_size_hint, 0x1000); - log.debug("vmsize/filesize = {}", .{file_size}); - const file_offset = 0; - const vm_address = self.vm_start_address; // the end of __PAGEZERO segment in VM - const protection = macho.VM_PROT_READ | macho.VM_PROT_EXECUTE; - const cmdsize = commandSize(@sizeOf(macho.segment_command_64)); - const text_segment = .{ - .cmd = macho.LC_SEGMENT_64, - .cmdsize = cmdsize, - .segname = makeString("__TEXT"), - .vmaddr = vm_address, - .vmsize = file_size, - .fileoff = 0, // __TEXT segment *always* starts at 0 file offset - .filesize = 0, //file_size, - .maxprot = protection, - .initprot = protection, - .nsects = 0, - .flags = 0, - }; - try self.commands.append(self.base.allocator, .{ - .cmd = macho.LC_SEGMENT_64, - .cmdsize = cmdsize, - }); - try self.segments.append(self.base.allocator, text_segment); - } -} - -fn makeString(comptime bytes: []const u8) [16]u8 { - var buf = [_]u8{0} ** 16; - if (bytes.len > buf.len) @compileError("MachO segment/section name too long"); - mem.copy(u8, buf[0..], bytes); - return buf; -} - -fn alignSize(comptime Int: type, min_size: anytype, alignment: Int) Int { - const size = @intCast(Int, min_size); - if (size % alignment == 0) return size; - - const div = size / alignment; - return (div + 1) * alignment; -} - -fn commandSize(min_size: anytype) u32 { - return alignSize(u32, min_size, @sizeOf(u64)); -} - -fn addPadding(self: *MachO, size: u32, file_offset: u64) !void { - if (size == 0) return; - - const buf = try self.base.allocator.alloc(u8, size); - defer self.base.allocator.free(buf); - - mem.set(u8, buf[0..], 0); - - try self.base.file.?.pwriteAll(buf, file_offset); +/// Saturating multiplication +fn satMul(a: anytype, b: anytype) @TypeOf(a, b) { + const T = @TypeOf(a, b); + return std.math.mul(T, a, b) catch std.math.maxInt(T); } From 17f36566de1cf549907d20dfd963596784691c73 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Thu, 3 Sep 2020 15:02:38 -0700 Subject: [PATCH 34/35] stage2: upgrade Scope.Container decls from ArrayList to HashMap --- src-self-hosted/Module.zig | 24 +++++++++--------------- 1 file changed, 9 insertions(+), 15 deletions(-) diff --git a/src-self-hosted/Module.zig b/src-self-hosted/Module.zig index 8d7a4d7b36..d273712cd1 100644 --- a/src-self-hosted/Module.zig +++ b/src-self-hosted/Module.zig @@ -230,8 +230,7 @@ pub const Decl = struct { const src_decl = module.decls[self.src_index]; return src_decl.inst.src; }, - .file, - .block => unreachable, + .file, .block => unreachable, .gen_zir => unreachable, .local_val => unreachable, .local_ptr => unreachable, @@ -544,7 +543,7 @@ pub const Scope = struct { file_scope: *Scope.File, /// Direct children of the file. - decls: ArrayListUnmanaged(*Decl), + decls: std.AutoArrayHashMapUnmanaged(*Decl, void), // TODO implement container types and put this in a status union // ty: Type @@ -555,12 +554,7 @@ pub const Scope = struct { } pub fn removeDecl(self: *Container, child: *Decl) void { - for (self.decls.items) |item, i| { - if (item == child) { - _ = self.decls.swapRemove(i); - return; - } - } + _ = self.decls.remove(child); } pub fn fullyQualifiedNameHash(self: *Container, name: []const u8) NameHash { @@ -1796,9 +1790,9 @@ fn analyzeContainer(self: *Module, container_scope: *Scope.Container) !void { // we know which ones have been deleted. var deleted_decls = std.AutoArrayHashMap(*Decl, void).init(self.gpa); defer deleted_decls.deinit(); - try deleted_decls.ensureCapacity(container_scope.decls.items.len); - for (container_scope.decls.items) |file_decl| { - deleted_decls.putAssumeCapacityNoClobber(file_decl, {}); + try deleted_decls.ensureCapacity(container_scope.decls.items().len); + for (container_scope.decls.items()) |entry| { + deleted_decls.putAssumeCapacityNoClobber(entry.key, {}); } for (decls) |src_decl, decl_i| { @@ -1839,7 +1833,7 @@ fn analyzeContainer(self: *Module, container_scope: *Scope.Container) !void { } } else { const new_decl = try self.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash); - container_scope.decls.appendAssumeCapacity(new_decl); + container_scope.decls.putAssumeCapacity(new_decl, {}); if (fn_proto.getExternExportInlineToken()) |maybe_export_token| { if (tree.token_ids[maybe_export_token] == .Keyword_export) { self.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl }); @@ -1866,7 +1860,7 @@ fn analyzeContainer(self: *Module, container_scope: *Scope.Container) !void { } } else { const new_decl = try self.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash); - container_scope.decls.appendAssumeCapacity(new_decl); + container_scope.decls.putAssumeCapacity(new_decl, {}); if (var_decl.getExternExportToken()) |maybe_export_token| { if (tree.token_ids[maybe_export_token] == .Keyword_export) { self.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl }); @@ -1882,7 +1876,7 @@ fn analyzeContainer(self: *Module, container_scope: *Scope.Container) !void { const contents_hash = std.zig.hashSrc(tree.getNodeSource(src_decl)); const new_decl = try self.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash); - container_scope.decls.appendAssumeCapacity(new_decl); + container_scope.decls.putAssumeCapacity(new_decl, {}); self.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl }); } else if (src_decl.castTag(.ContainerField)) |container_field| { log.err("TODO: analyze container field", .{}); From 88724b2a89157ecc3a8eea03aa0f8a6b66829915 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20=28xq=29=20Quei=C3=9Fner?= Date: Thu, 3 Sep 2020 17:48:17 +0200 Subject: [PATCH 35/35] Introduces a space after the ellipsis for test and progress. --- lib/std/progress.zig | 6 +++--- lib/std/special/test_runner.zig | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/std/progress.zig b/lib/std/progress.zig index 654d8cc228..82f2801fa1 100644 --- a/lib/std/progress.zig +++ b/lib/std/progress.zig @@ -197,7 +197,7 @@ pub const Progress = struct { var maybe_node: ?*Node = &self.root; while (maybe_node) |node| { if (need_ellipse) { - self.bufWrite(&end, "...", .{}); + self.bufWrite(&end, "... ", .{}); } need_ellipse = false; if (node.name.len != 0 or node.estimated_total_items != null) { @@ -218,7 +218,7 @@ pub const Progress = struct { maybe_node = node.recently_updated_child; } if (need_ellipse) { - self.bufWrite(&end, "...", .{}); + self.bufWrite(&end, "... ", .{}); } } @@ -253,7 +253,7 @@ pub const Progress = struct { const bytes_needed_for_esc_codes_at_end = if (std.builtin.os.tag == .windows) 0 else 11; const max_end = self.output_buffer.len - bytes_needed_for_esc_codes_at_end; if (end.* > max_end) { - const suffix = "..."; + const suffix = "... "; self.columns_written = self.columns_written - (end.* - max_end) + suffix.len; std.mem.copy(u8, self.output_buffer[max_end..], suffix); end.* = max_end + suffix.len; diff --git a/lib/std/special/test_runner.zig b/lib/std/special/test_runner.zig index 87b011ede8..b9452b79cc 100644 --- a/lib/std/special/test_runner.zig +++ b/lib/std/special/test_runner.zig @@ -40,7 +40,7 @@ pub fn main() anyerror!void { test_node.activate(); progress.refresh(); if (progress.terminal == null) { - std.debug.print("{}/{} {}...", .{ i + 1, test_fn_list.len, test_fn.name }); + std.debug.print("{}/{} {}... ", .{ i + 1, test_fn_list.len, test_fn.name }); } const result = if (test_fn.async_frame_size) |size| switch (io_mode) { .evented => blk: {