diff --git a/lib/std/crypto/tls/Client.zig b/lib/std/crypto/tls/Client.zig index cf458f5f7c..e00976fea3 100644 --- a/lib/std/crypto/tls/Client.zig +++ b/lib/std/crypto/tls/Client.zig @@ -125,9 +125,9 @@ pub const Options = struct { /// Verify that the server certificate is authorized by a given ca bundle. bundle: Certificate.Bundle, }, - /// If non-null, ssl secrets are logged to this file. Creating such a log file allows + /// If non-null, ssl secrets are logged to this stream. Creating such a log file allows /// other programs with access to that file to decrypt all traffic over this connection. - ssl_key_log_file: ?std.fs.File = null, + ssl_key_log_file: ?*std.io.BufferedWriter = null, }; pub fn InitError(comptime Stream: type) type { diff --git a/lib/std/io.zig b/lib/std/io.zig index 5f1a4c29ac..5d2cd8b74b 100644 --- a/lib/std/io.zig +++ b/lib/std/io.zig @@ -282,8 +282,6 @@ pub const Reader = GenericReader; pub const Writer = @import("io/Writer.zig"); pub const AnyReader = @import("io/Reader.zig"); -/// Deprecated; to be removed after 0.14.0 is tagged. -pub const AnyWriter = Writer; pub const SeekableStream = @import("io/seekable_stream.zig").SeekableStream; diff --git a/lib/std/io/BufferedWriter.zig b/lib/std/io/BufferedWriter.zig index b23e190af3..1ef71330bf 100644 --- a/lib/std/io/BufferedWriter.zig +++ b/lib/std/io/BufferedWriter.zig @@ -663,9 +663,14 @@ pub fn printValue( } }, .error_set => { - if (actual_fmt.len != 0) invalidFmtError(fmt, value); - try bw.writeAll("error."); - return bw.writeAll(@errorName(value)); + if (actual_fmt.len > 0 and actual_fmt.len[0] == 's') { + return bw.writeAll(@errorName(value)); + } else if (actual_fmt.len != 0) { + invalidFmtError(fmt, value); + } else { + try bw.writeAll("error."); + return bw.writeAll(@errorName(value)); + } }, .@"enum" => |enumInfo| { try bw.writeAll(@typeName(T)); diff --git a/lib/std/tar.zig b/lib/std/tar.zig index a8b448ffea..92bfe43796 100644 --- a/lib/std/tar.zig +++ b/lib/std/tar.zig @@ -603,9 +603,10 @@ fn PaxIterator(comptime ReaderType: type) type { return null; } - fn readUntil(self: *Self, delimiter: u8) ![]const u8 { - var fbs: std.io.FixedBufferStream = .{ .buffer = &self.scratch }; - try self.reader.streamUntilDelimiter(fbs.writer(), delimiter, null); + fn readUntil(self: *Self, delimiter: u8) anyerror![]const u8 { + var fbs: std.io.BufferedWriter = undefined; + fbs.initFixed(&self.scratch); + try self.reader.streamUntilDelimiter(&fbs, delimiter, null); return fbs.getWritten(); } diff --git a/lib/std/zig/Ast.zig b/lib/std/zig/Ast.zig index d0e819e943..fbfe4f8d33 100644 --- a/lib/std/zig/Ast.zig +++ b/lib/std/zig/Ast.zig @@ -199,27 +199,24 @@ pub fn parse(gpa: Allocator, source: [:0]const u8, mode: Mode) Allocator.Error!A /// `gpa` is used for allocating the resulting formatted source code. /// Caller owns the returned slice of bytes, allocated with `gpa`. -pub fn render(tree: Ast, gpa: Allocator) RenderError![]u8 { - var buffer = std.ArrayList(u8).init(gpa); - defer buffer.deinit(); +pub fn renderAlloc(tree: Ast, gpa: Allocator) RenderError![]u8 { + var aw: std.io.AllocatingWriter = undefined; + const bw = aw.init(gpa); + errdefer aw.deinit(); + render(tree, gpa, bw, .{}) catch |err| return @errorCast(err); // TODO try @errorCast(...) + return aw.toOwnedSlice(); +} - try tree.renderToArrayList(&buffer, .{}); - return buffer.toOwnedSlice(); +pub fn render(tree: Ast, gpa: Allocator, bw: *std.io.BufferedWriter, fixups: Fixups) anyerror!void { + return @import("./render.zig").renderTree(gpa, bw, tree, fixups); } pub const Fixups = private_render.Fixups; -pub fn renderToArrayList(tree: Ast, buffer: *std.ArrayList(u8), fixups: Fixups) RenderError!void { - return @import("./render.zig").renderTree(buffer, tree, fixups); -} - /// Returns an extra offset for column and byte offset of errors that /// should point after the token in the error message. pub fn errorOffset(tree: Ast, parse_error: Error) u32 { - return if (parse_error.token_is_prev) - @as(u32, @intCast(tree.tokenSlice(parse_error.token).len)) - else - 0; + return if (parse_error.token_is_prev) @intCast(tree.tokenSlice(parse_error.token).len) else 0; } pub fn tokenLocation(self: Ast, start_offset: ByteOffset, token_index: TokenIndex) Location { @@ -318,254 +315,254 @@ pub fn rootDecls(tree: Ast) []const Node.Index { } } -pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void { +pub fn renderError(tree: Ast, parse_error: Error, bw: *std.io.BufferedWriter) anyerror!void { switch (parse_error.tag) { .asterisk_after_ptr_deref => { // Note that the token will point at the `.*` but ideally the source // location would point to the `*` after the `.*`. - return stream.writeAll("'.*' cannot be followed by '*'; are you missing a space?"); + return bw.writeAll("'.*' cannot be followed by '*'; are you missing a space?"); }, .chained_comparison_operators => { - return stream.writeAll("comparison operators cannot be chained"); + return bw.writeAll("comparison operators cannot be chained"); }, .decl_between_fields => { - return stream.writeAll("declarations are not allowed between container fields"); + return bw.writeAll("declarations are not allowed between container fields"); }, .expected_block => { - return stream.print("expected block, found '{s}'", .{ + return bw.print("expected block, found '{s}'", .{ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(), }); }, .expected_block_or_assignment => { - return stream.print("expected block or assignment, found '{s}'", .{ + return bw.print("expected block or assignment, found '{s}'", .{ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(), }); }, .expected_block_or_expr => { - return stream.print("expected block or expression, found '{s}'", .{ + return bw.print("expected block or expression, found '{s}'", .{ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(), }); }, .expected_block_or_field => { - return stream.print("expected block or field, found '{s}'", .{ + return bw.print("expected block or field, found '{s}'", .{ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(), }); }, .expected_container_members => { - return stream.print("expected test, comptime, var decl, or container field, found '{s}'", .{ + return bw.print("expected test, comptime, var decl, or container field, found '{s}'", .{ tree.tokenTag(parse_error.token).symbol(), }); }, .expected_expr => { - return stream.print("expected expression, found '{s}'", .{ + return bw.print("expected expression, found '{s}'", .{ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(), }); }, .expected_expr_or_assignment => { - return stream.print("expected expression or assignment, found '{s}'", .{ + return bw.print("expected expression or assignment, found '{s}'", .{ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(), }); }, .expected_expr_or_var_decl => { - return stream.print("expected expression or var decl, found '{s}'", .{ + return bw.print("expected expression or var decl, found '{s}'", .{ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(), }); }, .expected_fn => { - return stream.print("expected function, found '{s}'", .{ + return bw.print("expected function, found '{s}'", .{ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(), }); }, .expected_inlinable => { - return stream.print("expected 'while' or 'for', found '{s}'", .{ + return bw.print("expected 'while' or 'for', found '{s}'", .{ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(), }); }, .expected_labelable => { - return stream.print("expected 'while', 'for', 'inline', or '{{', found '{s}'", .{ + return bw.print("expected 'while', 'for', 'inline', or '{{', found '{s}'", .{ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(), }); }, .expected_param_list => { - return stream.print("expected parameter list, found '{s}'", .{ + return bw.print("expected parameter list, found '{s}'", .{ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(), }); }, .expected_prefix_expr => { - return stream.print("expected prefix expression, found '{s}'", .{ + return bw.print("expected prefix expression, found '{s}'", .{ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(), }); }, .expected_primary_type_expr => { - return stream.print("expected primary type expression, found '{s}'", .{ + return bw.print("expected primary type expression, found '{s}'", .{ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(), }); }, .expected_pub_item => { - return stream.writeAll("expected function or variable declaration after pub"); + return bw.writeAll("expected function or variable declaration after pub"); }, .expected_return_type => { - return stream.print("expected return type expression, found '{s}'", .{ + return bw.print("expected return type expression, found '{s}'", .{ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(), }); }, .expected_semi_or_else => { - return stream.writeAll("expected ';' or 'else' after statement"); + return bw.writeAll("expected ';' or 'else' after statement"); }, .expected_semi_or_lbrace => { - return stream.writeAll("expected ';' or block after function prototype"); + return bw.writeAll("expected ';' or block after function prototype"); }, .expected_statement => { - return stream.print("expected statement, found '{s}'", .{ + return bw.print("expected statement, found '{s}'", .{ tree.tokenTag(parse_error.token).symbol(), }); }, .expected_suffix_op => { - return stream.print("expected pointer dereference, optional unwrap, or field access, found '{s}'", .{ + return bw.print("expected pointer dereference, optional unwrap, or field access, found '{s}'", .{ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(), }); }, .expected_type_expr => { - return stream.print("expected type expression, found '{s}'", .{ + return bw.print("expected type expression, found '{s}'", .{ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(), }); }, .expected_var_decl => { - return stream.print("expected variable declaration, found '{s}'", .{ + return bw.print("expected variable declaration, found '{s}'", .{ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(), }); }, .expected_var_decl_or_fn => { - return stream.print("expected variable declaration or function, found '{s}'", .{ + return bw.print("expected variable declaration or function, found '{s}'", .{ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(), }); }, .expected_loop_payload => { - return stream.print("expected loop payload, found '{s}'", .{ + return bw.print("expected loop payload, found '{s}'", .{ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(), }); }, .expected_container => { - return stream.print("expected a struct, enum or union, found '{s}'", .{ + return bw.print("expected a struct, enum or union, found '{s}'", .{ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(), }); }, .extern_fn_body => { - return stream.writeAll("extern functions have no body"); + return bw.writeAll("extern functions have no body"); }, .extra_addrspace_qualifier => { - return stream.writeAll("extra addrspace qualifier"); + return bw.writeAll("extra addrspace qualifier"); }, .extra_align_qualifier => { - return stream.writeAll("extra align qualifier"); + return bw.writeAll("extra align qualifier"); }, .extra_allowzero_qualifier => { - return stream.writeAll("extra allowzero qualifier"); + return bw.writeAll("extra allowzero qualifier"); }, .extra_const_qualifier => { - return stream.writeAll("extra const qualifier"); + return bw.writeAll("extra const qualifier"); }, .extra_volatile_qualifier => { - return stream.writeAll("extra volatile qualifier"); + return bw.writeAll("extra volatile qualifier"); }, .ptr_mod_on_array_child_type => { - return stream.print("pointer modifier '{s}' not allowed on array child type", .{ + return bw.print("pointer modifier '{s}' not allowed on array child type", .{ tree.tokenTag(parse_error.token).symbol(), }); }, .invalid_bit_range => { - return stream.writeAll("bit range not allowed on slices and arrays"); + return bw.writeAll("bit range not allowed on slices and arrays"); }, .same_line_doc_comment => { - return stream.writeAll("same line documentation comment"); + return bw.writeAll("same line documentation comment"); }, .unattached_doc_comment => { - return stream.writeAll("unattached documentation comment"); + return bw.writeAll("unattached documentation comment"); }, .test_doc_comment => { - return stream.writeAll("documentation comments cannot be attached to tests"); + return bw.writeAll("documentation comments cannot be attached to tests"); }, .comptime_doc_comment => { - return stream.writeAll("documentation comments cannot be attached to comptime blocks"); + return bw.writeAll("documentation comments cannot be attached to comptime blocks"); }, .varargs_nonfinal => { - return stream.writeAll("function prototype has parameter after varargs"); + return bw.writeAll("function prototype has parameter after varargs"); }, .expected_continue_expr => { - return stream.writeAll("expected ':' before while continue expression"); + return bw.writeAll("expected ':' before while continue expression"); }, .expected_semi_after_decl => { - return stream.writeAll("expected ';' after declaration"); + return bw.writeAll("expected ';' after declaration"); }, .expected_semi_after_stmt => { - return stream.writeAll("expected ';' after statement"); + return bw.writeAll("expected ';' after statement"); }, .expected_comma_after_field => { - return stream.writeAll("expected ',' after field"); + return bw.writeAll("expected ',' after field"); }, .expected_comma_after_arg => { - return stream.writeAll("expected ',' after argument"); + return bw.writeAll("expected ',' after argument"); }, .expected_comma_after_param => { - return stream.writeAll("expected ',' after parameter"); + return bw.writeAll("expected ',' after parameter"); }, .expected_comma_after_initializer => { - return stream.writeAll("expected ',' after initializer"); + return bw.writeAll("expected ',' after initializer"); }, .expected_comma_after_switch_prong => { - return stream.writeAll("expected ',' after switch prong"); + return bw.writeAll("expected ',' after switch prong"); }, .expected_comma_after_for_operand => { - return stream.writeAll("expected ',' after for operand"); + return bw.writeAll("expected ',' after for operand"); }, .expected_comma_after_capture => { - return stream.writeAll("expected ',' after for capture"); + return bw.writeAll("expected ',' after for capture"); }, .expected_initializer => { - return stream.writeAll("expected field initializer"); + return bw.writeAll("expected field initializer"); }, .mismatched_binary_op_whitespace => { - return stream.print("binary operator '{s}' has whitespace on one side, but not the other", .{tree.tokenTag(parse_error.token).lexeme().?}); + return bw.print("binary operator '{s}' has whitespace on one side, but not the other", .{tree.tokenTag(parse_error.token).lexeme().?}); }, .invalid_ampersand_ampersand => { - return stream.writeAll("ambiguous use of '&&'; use 'and' for logical AND, or change whitespace to ' & &' for bitwise AND"); + return bw.writeAll("ambiguous use of '&&'; use 'and' for logical AND, or change whitespace to ' & &' for bitwise AND"); }, .c_style_container => { - return stream.print("'{s} {s}' is invalid", .{ + return bw.print("'{s} {s}' is invalid", .{ parse_error.extra.expected_tag.symbol(), tree.tokenSlice(parse_error.token), }); }, .zig_style_container => { - return stream.print("to declare a container do 'const {s} = {s}'", .{ + return bw.print("to declare a container do 'const {s} = {s}'", .{ tree.tokenSlice(parse_error.token), parse_error.extra.expected_tag.symbol(), }); }, .previous_field => { - return stream.writeAll("field before declarations here"); + return bw.writeAll("field before declarations here"); }, .next_field => { - return stream.writeAll("field after declarations here"); + return bw.writeAll("field after declarations here"); }, .expected_var_const => { - return stream.writeAll("expected 'var' or 'const' before variable declaration"); + return bw.writeAll("expected 'var' or 'const' before variable declaration"); }, .wrong_equal_var_decl => { - return stream.writeAll("variable initialized with '==' instead of '='"); + return bw.writeAll("variable initialized with '==' instead of '='"); }, .var_const_decl => { - return stream.writeAll("use 'var' or 'const' to declare variable"); + return bw.writeAll("use 'var' or 'const' to declare variable"); }, .extra_for_capture => { - return stream.writeAll("extra capture in for loop"); + return bw.writeAll("extra capture in for loop"); }, .for_input_not_captured => { - return stream.writeAll("for input is not captured"); + return bw.writeAll("for input is not captured"); }, .invalid_byte => { const tok_slice = tree.source[tree.tokens.items(.start)[parse_error.token]..]; - return stream.print("{s} contains invalid byte: '{'}'", .{ + return bw.print("{s} contains invalid byte: '{'}'", .{ switch (tok_slice[0]) { '\'' => "character literal", '"', '\\' => "string literal", @@ -580,10 +577,10 @@ pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void { const found_tag = tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)); const expected_symbol = parse_error.extra.expected_tag.symbol(); switch (found_tag) { - .invalid => return stream.print("expected '{s}', found invalid bytes", .{ + .invalid => return bw.print("expected '{s}', found invalid bytes", .{ expected_symbol, }), - else => return stream.print("expected '{s}', found '{s}'", .{ + else => return bw.print("expected '{s}', found '{s}'", .{ expected_symbol, found_tag.symbol(), }), } diff --git a/lib/std/zig/AstGen.zig b/lib/std/zig/AstGen.zig index e253729c3e..fd6b02901f 100644 --- a/lib/std/zig/AstGen.zig +++ b/lib/std/zig/AstGen.zig @@ -11441,10 +11441,13 @@ fn parseStrLit( offset: u32, ) InnerError!void { const raw_string = bytes[offset..]; - var buf_managed = buf.toManaged(astgen.gpa); - const result = std.zig.string_literal.parseWrite(buf_managed.writer(), raw_string); - buf.* = buf_managed.moveToUnmanaged(); - switch (try result) { + const result = r: { + var aw: std.io.AllocatingWriter = undefined; + const bw = aw.fromArrayList(astgen.gpa, buf); + defer buf.* = aw.toArrayList(); + break :r std.zig.string_literal.parseWrite(bw, raw_string) catch |err| return @errorCast(err); + }; + switch (result) { .success => return, .failure => |err| return astgen.failWithStrLitError(err, token, bytes, offset), } @@ -11493,17 +11496,18 @@ fn appendErrorNodeNotes( notes: []const u32, ) Allocator.Error!void { @branchHint(.cold); + const gpa = astgen.gpa; const string_bytes = &astgen.string_bytes; const msg: Zir.NullTerminatedString = @enumFromInt(string_bytes.items.len); - try string_bytes.writer(astgen.gpa).print(format ++ "\x00", args); + try string_bytes.print(gpa, format ++ "\x00", args); const notes_index: u32 = if (notes.len != 0) blk: { const notes_start = astgen.extra.items.len; - try astgen.extra.ensureTotalCapacity(astgen.gpa, notes_start + 1 + notes.len); + try astgen.extra.ensureTotalCapacity(gpa, notes_start + 1 + notes.len); astgen.extra.appendAssumeCapacity(@intCast(notes.len)); astgen.extra.appendSliceAssumeCapacity(notes); break :blk @intCast(notes_start); } else 0; - try astgen.compile_errors.append(astgen.gpa, .{ + try astgen.compile_errors.append(gpa, .{ .msg = msg, .node = node.toOptional(), .token = .none, @@ -11587,7 +11591,7 @@ fn appendErrorTokNotesOff( const gpa = astgen.gpa; const string_bytes = &astgen.string_bytes; const msg: Zir.NullTerminatedString = @enumFromInt(string_bytes.items.len); - try string_bytes.writer(gpa).print(format ++ "\x00", args); + try string_bytes.print(gpa, format ++ "\x00", args); const notes_index: u32 = if (notes.len != 0) blk: { const notes_start = astgen.extra.items.len; try astgen.extra.ensureTotalCapacity(gpa, notes_start + 1 + notes.len); @@ -11623,7 +11627,7 @@ fn errNoteTokOff( @branchHint(.cold); const string_bytes = &astgen.string_bytes; const msg: Zir.NullTerminatedString = @enumFromInt(string_bytes.items.len); - try string_bytes.writer(astgen.gpa).print(format ++ "\x00", args); + try string_bytes.print(astgen.gpa, format ++ "\x00", args); return astgen.addExtra(Zir.Inst.CompileErrors.Item{ .msg = msg, .node = .none, @@ -11642,7 +11646,7 @@ fn errNoteNode( @branchHint(.cold); const string_bytes = &astgen.string_bytes; const msg: Zir.NullTerminatedString = @enumFromInt(string_bytes.items.len); - try string_bytes.writer(astgen.gpa).print(format ++ "\x00", args); + try string_bytes.print(astgen.gpa, format ++ "\x00", args); return astgen.addExtra(Zir.Inst.CompileErrors.Item{ .msg = msg, .node = node.toOptional(), @@ -13888,13 +13892,14 @@ fn emitDbgStmtForceCurrentIndex(gz: *GenZir, lc: LineColumn) !void { } }); } -fn lowerAstErrors(astgen: *AstGen) !void { +fn lowerAstErrors(astgen: *AstGen) error{OutOfMemory}!void { const gpa = astgen.gpa; const tree = astgen.tree; assert(tree.errors.len > 0); - var msg: std.ArrayListUnmanaged(u8) = .empty; - defer msg.deinit(gpa); + var msg: std.io.AllocatingWriter = undefined; + const msg_writer = msg.init(gpa); + defer msg.deinit(); var notes: std.ArrayListUnmanaged(u32) = .empty; defer notes.deinit(gpa); @@ -13928,20 +13933,20 @@ fn lowerAstErrors(astgen: *AstGen) !void { .extra = .{ .offset = bad_off }, }; msg.clearRetainingCapacity(); - try tree.renderError(err, msg.writer(gpa)); - return try astgen.appendErrorTokNotesOff(tok, bad_off, "{s}", .{msg.items}, notes.items); + tree.renderError(err, msg_writer) catch |e| return @errorCast(e); // TODO try @errorCast(...) + return try astgen.appendErrorTokNotesOff(tok, bad_off, "{s}", .{msg.getWritten()}, notes.items); } var cur_err = tree.errors[0]; for (tree.errors[1..]) |err| { if (err.is_note) { - try tree.renderError(err, msg.writer(gpa)); - try notes.append(gpa, try astgen.errNoteTok(err.token, "{s}", .{msg.items})); + tree.renderError(err, msg_writer) catch |e| return @errorCast(e); // TODO try @errorCast(...) + try notes.append(gpa, try astgen.errNoteTok(err.token, "{s}", .{msg.getWritten()})); } else { // Flush error const extra_offset = tree.errorOffset(cur_err); - try tree.renderError(cur_err, msg.writer(gpa)); - try astgen.appendErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.items}, notes.items); + tree.renderError(cur_err, msg_writer) catch |e| return @errorCast(e); // TODO try @errorCast(...) + try astgen.appendErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.getWritten()}, notes.items); notes.clearRetainingCapacity(); cur_err = err; @@ -13954,8 +13959,8 @@ fn lowerAstErrors(astgen: *AstGen) !void { // Flush error const extra_offset = tree.errorOffset(cur_err); - try tree.renderError(cur_err, msg.writer(gpa)); - try astgen.appendErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.items}, notes.items); + tree.renderError(cur_err, msg_writer) catch |e| return @errorCast(e); // TODO try @errorCast(...) + try astgen.appendErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.getWritten()}, notes.items); } const DeclarationName = union(enum) { diff --git a/lib/std/zig/ZonGen.zig b/lib/std/zig/ZonGen.zig index 2114260e84..0acd996d8f 100644 --- a/lib/std/zig/ZonGen.zig +++ b/lib/std/zig/ZonGen.zig @@ -452,37 +452,43 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator } } -fn appendIdentStr(zg: *ZonGen, ident_token: Ast.TokenIndex) !u32 { +fn appendIdentStr(zg: *ZonGen, ident_token: Ast.TokenIndex) error{ OutOfMemory, BadString }!u32 { + const gpa = zg.gpa; const tree = zg.tree; assert(tree.tokenTag(ident_token) == .identifier); const ident_name = tree.tokenSlice(ident_token); if (!mem.startsWith(u8, ident_name, "@")) { const start = zg.string_bytes.items.len; - try zg.string_bytes.appendSlice(zg.gpa, ident_name); + try zg.string_bytes.appendSlice(gpa, ident_name); return @intCast(start); - } else { - const offset = 1; - const start: u32 = @intCast(zg.string_bytes.items.len); - const raw_string = zg.tree.tokenSlice(ident_token)[offset..]; - try zg.string_bytes.ensureUnusedCapacity(zg.gpa, raw_string.len); - switch (try std.zig.string_literal.parseWrite(zg.string_bytes.writer(zg.gpa), raw_string)) { - .success => {}, - .failure => |err| { - try zg.lowerStrLitError(err, ident_token, raw_string, offset); - return error.BadString; - }, - } - - const slice = zg.string_bytes.items[start..]; - if (mem.indexOfScalar(u8, slice, 0) != null) { - try zg.addErrorTok(ident_token, "identifier cannot contain null bytes", .{}); - return error.BadString; - } else if (slice.len == 0) { - try zg.addErrorTok(ident_token, "identifier cannot be empty", .{}); - return error.BadString; - } - return start; } + const offset = 1; + const start: u32 = @intCast(zg.string_bytes.items.len); + const raw_string = zg.tree.tokenSlice(ident_token)[offset..]; + try zg.string_bytes.ensureUnusedCapacity(gpa, raw_string.len); + const result = r: { + var aw: std.io.AllocatingWriter = undefined; + const bw = aw.fromArrayList(gpa, &zg.string_bytes); + defer zg.string_bytes = aw.toArrayList(); + break :r std.zig.string_literal.parseWrite(bw, raw_string) catch |err| return @errorCast(err); + }; + switch (result) { + .success => {}, + .failure => |err| { + try zg.lowerStrLitError(err, ident_token, raw_string, offset); + return error.BadString; + }, + } + + const slice = zg.string_bytes.items[start..]; + if (mem.indexOfScalar(u8, slice, 0) != null) { + try zg.addErrorTok(ident_token, "identifier cannot contain null bytes", .{}); + return error.BadString; + } else if (slice.len == 0) { + try zg.addErrorTok(ident_token, "identifier cannot be empty", .{}); + return error.BadString; + } + return start; } /// Estimates the size of a string node without parsing it. @@ -513,8 +519,8 @@ pub fn strLitSizeHint(tree: Ast, node: Ast.Node.Index) usize { pub fn parseStrLit( tree: Ast, node: Ast.Node.Index, - writer: anytype, -) error{OutOfMemory}!std.zig.string_literal.Result { + writer: *std.io.BufferedWriter, +) anyerror!std.zig.string_literal.Result { switch (tree.nodeTag(node)) { .string_literal => { const token = tree.nodeMainToken(node); @@ -549,15 +555,21 @@ const StringLiteralResult = union(enum) { slice: struct { start: u32, len: u32 }, }; -fn strLitAsString(zg: *ZonGen, str_node: Ast.Node.Index) !StringLiteralResult { +fn strLitAsString(zg: *ZonGen, str_node: Ast.Node.Index) error{ OutOfMemory, BadString }!StringLiteralResult { if (!zg.options.parse_str_lits) return .{ .slice = .{ .start = 0, .len = 0 } }; const gpa = zg.gpa; const string_bytes = &zg.string_bytes; const str_index: u32 = @intCast(zg.string_bytes.items.len); const size_hint = strLitSizeHint(zg.tree, str_node); - try string_bytes.ensureUnusedCapacity(zg.gpa, size_hint); - switch (try parseStrLit(zg.tree, str_node, zg.string_bytes.writer(zg.gpa))) { + try string_bytes.ensureUnusedCapacity(gpa, size_hint); + const result = r: { + var aw: std.io.AllocatingWriter = undefined; + const bw = aw.fromArrayList(gpa, &zg.string_bytes); + defer zg.string_bytes = aw.toArrayList(); + break :r parseStrLit(zg.tree, str_node, bw) catch |err| return @errorCast(err); + }; + switch (result) { .success => {}, .failure => |err| { const token = zg.tree.nodeMainToken(str_node); @@ -805,10 +817,7 @@ fn lowerNumberError(zg: *ZonGen, err: std.zig.number_literal.Error, token: Ast.T fn errNoteNode(zg: *ZonGen, node: Ast.Node.Index, comptime format: []const u8, args: anytype) Allocator.Error!Zoir.CompileError.Note { const message_idx: u32 = @intCast(zg.string_bytes.items.len); - const writer = zg.string_bytes.writer(zg.gpa); - try writer.print(format, args); - try writer.writeByte(0); - + try zg.string_bytes.print(zg.gpa, format ++ "\x00", args); return .{ .msg = @enumFromInt(message_idx), .token = .none, @@ -818,10 +827,7 @@ fn errNoteNode(zg: *ZonGen, node: Ast.Node.Index, comptime format: []const u8, a fn errNoteTok(zg: *ZonGen, tok: Ast.TokenIndex, comptime format: []const u8, args: anytype) Allocator.Error!Zoir.CompileError.Note { const message_idx: u32 = @intCast(zg.string_bytes.items.len); - const writer = zg.string_bytes.writer(zg.gpa); - try writer.print(format, args); - try writer.writeByte(0); - + try zg.string_bytes.print(zg.gpa, format ++ "\x00", args); return .{ .msg = @enumFromInt(message_idx), .token = .fromToken(tok), @@ -862,9 +868,7 @@ fn addErrorInner( try zg.error_notes.appendSlice(gpa, notes); const message_idx: u32 = @intCast(zg.string_bytes.items.len); - const writer = zg.string_bytes.writer(zg.gpa); - try writer.print(format, args); - try writer.writeByte(0); + try zg.string_bytes.print(gpa, format ++ "\x00", args); try zg.compile_errors.append(gpa, .{ .msg = @enumFromInt(message_idx), @@ -880,8 +884,9 @@ fn lowerAstErrors(zg: *ZonGen) Allocator.Error!void { const tree = zg.tree; assert(tree.errors.len > 0); - var msg: std.ArrayListUnmanaged(u8) = .empty; - defer msg.deinit(gpa); + var msg: std.io.AllocatingWriter = undefined; + const msg_bw = msg.init(gpa); + defer msg.deinit(); var notes: std.ArrayListUnmanaged(Zoir.CompileError.Note) = .empty; defer notes.deinit(gpa); @@ -889,18 +894,20 @@ fn lowerAstErrors(zg: *ZonGen) Allocator.Error!void { var cur_err = tree.errors[0]; for (tree.errors[1..]) |err| { if (err.is_note) { - try tree.renderError(err, msg.writer(gpa)); - try notes.append(gpa, try zg.errNoteTok(err.token, "{s}", .{msg.items})); + tree.renderError(err, msg_bw) catch |e| return @errorCast(e); // TODO: try @errorCast(...) + try notes.append(gpa, try zg.errNoteTok(err.token, "{s}", .{msg.getWritten()})); } else { // Flush error - try tree.renderError(cur_err, msg.writer(gpa)); + tree.renderError(cur_err, msg_bw) catch |e| return @errorCast(e); // TODO try @errorCast(...) const extra_offset = tree.errorOffset(cur_err); - try zg.addErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.items}, notes.items); + try zg.addErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.getWritten()}, notes.items); notes.clearRetainingCapacity(); cur_err = err; - // TODO: `Parse` currently does not have good error recovery mechanisms, so the remaining errors could be bogus. - // As such, we'll ignore all remaining errors for now. We should improve `Parse` so that we can report all the errors. + // TODO: `Parse` currently does not have good error recovery + // mechanisms, so the remaining errors could be bogus. As such, + // we'll ignore all remaining errors for now. We should improve + // `Parse` so that we can report all the errors. return; } msg.clearRetainingCapacity(); @@ -908,8 +915,8 @@ fn lowerAstErrors(zg: *ZonGen) Allocator.Error!void { // Flush error const extra_offset = tree.errorOffset(cur_err); - try tree.renderError(cur_err, msg.writer(gpa)); - try zg.addErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.items}, notes.items); + tree.renderError(cur_err, msg_bw) catch |e| return @errorCast(e); // TODO try @errorCast(...) + try zg.addErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.getWritten()}, notes.items); } const std = @import("std"); diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index 994c6aa2cd..893fc1635d 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -10,10 +10,6 @@ const primitives = std.zig.primitives; const indent_delta = 4; const asm_indent_delta = 2; -pub const Error = Ast.RenderError; - -const Ais = AutoIndentingStream(std.ArrayList(u8).Writer); - pub const Fixups = struct { /// The key is the mut token (`var`/`const`) of the variable declaration /// that should have a `_ = foo;` inserted afterwards. @@ -74,17 +70,17 @@ pub const Fixups = struct { const Render = struct { gpa: Allocator, - ais: *Ais, + ais: *AutoIndentingStream, tree: Ast, fixups: Fixups, }; -pub fn renderTree(buffer: *std.ArrayList(u8), tree: Ast, fixups: Fixups) Error!void { +pub fn renderTree(gpa: Allocator, bw: *std.io.BufferedWriter, tree: Ast, fixups: Fixups) anyerror!void { assert(tree.errors.len == 0); // Cannot render an invalid tree. - var auto_indenting_stream = Ais.init(buffer, indent_delta); + var auto_indenting_stream: AutoIndentingStream = .init(bw, indent_delta); defer auto_indenting_stream.deinit(); var r: Render = .{ - .gpa = buffer.allocator, + .gpa = gpa, .ais = &auto_indenting_stream, .tree = tree, .fixups = fixups, @@ -115,7 +111,7 @@ pub fn renderTree(buffer: *std.ArrayList(u8), tree: Ast, fixups: Fixups) Error!v } /// Render all members in the given slice, keeping empty lines where appropriate -fn renderMembers(r: *Render, members: []const Ast.Node.Index) Error!void { +fn renderMembers(r: *Render, members: []const Ast.Node.Index) anyerror!void { const tree = r.tree; if (members.len == 0) return; const container: Container = for (members) |member| { @@ -139,7 +135,7 @@ fn renderMember( container: Container, decl: Ast.Node.Index, space: Space, -) Error!void { +) anyerror!void { const tree = r.tree; const ais = r.ais; if (r.fixups.omit_nodes.contains(decl)) return; @@ -186,7 +182,7 @@ fn renderMember( if (opt_callconv_expr.unwrap()) |callconv_expr| { if (tree.nodeTag(callconv_expr) == .enum_literal) { if (mem.eql(u8, "@\"inline\"", tree.tokenSlice(tree.nodeMainToken(callconv_expr)))) { - try ais.writer().writeAll("inline "); + try ais.underlying_writer.writeAll("inline "); } } } @@ -200,7 +196,7 @@ fn renderMember( const lbrace = tree.nodeMainToken(body_node); try renderToken(r, lbrace, .newline); try discardAllParams(r, fn_proto); - try ais.writer().writeAll("@trap();"); + try ais.writeAll("@trap();"); ais.popIndent(); try ais.insertNewline(); try renderToken(r, tree.lastToken(body_node), space); // rbrace @@ -216,10 +212,9 @@ fn renderMember( const name_ident = param.name_token.?; assert(tree.tokenTag(name_ident) == .identifier); if (r.fixups.unused_var_decls.contains(name_ident)) { - const w = ais.writer(); - try w.writeAll("_ = "); - try w.writeAll(tokenSliceForRender(r.tree, name_ident)); - try w.writeAll(";\n"); + try ais.writeAll("_ = "); + try ais.writeAll(tokenSliceForRender(r.tree, name_ident)); + try ais.writeAll(";\n"); } } var statements_buf: [2]Ast.Node.Index = undefined; @@ -310,7 +305,7 @@ fn renderMember( } /// Render all expressions in the slice, keeping empty lines where appropriate -fn renderExpressions(r: *Render, expressions: []const Ast.Node.Index, space: Space) Error!void { +fn renderExpressions(r: *Render, expressions: []const Ast.Node.Index, space: Space) anyerror!void { if (expressions.len == 0) return; try renderExpression(r, expressions[0], space); for (expressions[1..]) |expression| { @@ -319,11 +314,11 @@ fn renderExpressions(r: *Render, expressions: []const Ast.Node.Index, space: Spa } } -fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void { +fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) anyerror!void { const tree = r.tree; const ais = r.ais; if (r.fixups.replace_nodes_with_string.get(node)) |replacement| { - try ais.writer().writeAll(replacement); + try ais.writeAll(replacement); try renderOnlySpace(r, space); return; } else if (r.fixups.replace_nodes_with_node.get(node)) |replacement| { @@ -891,11 +886,11 @@ fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void { /// Same as `renderExpression`, but afterwards looks for any /// append_string_after_node fixups to apply -fn renderExpressionFixup(r: *Render, node: Ast.Node.Index, space: Space) Error!void { +fn renderExpressionFixup(r: *Render, node: Ast.Node.Index, space: Space) anyerror!void { const ais = r.ais; try renderExpression(r, node, space); if (r.fixups.append_string_after_node.get(node)) |bytes| { - try ais.writer().writeAll(bytes); + try ais.writeAll(bytes); } } @@ -903,7 +898,7 @@ fn renderArrayType( r: *Render, array_type: Ast.full.ArrayType, space: Space, -) Error!void { +) anyerror!void { const tree = r.tree; const ais = r.ais; const rbracket = tree.firstToken(array_type.ast.elem_type) - 1; @@ -921,7 +916,7 @@ fn renderArrayType( return renderExpression(r, array_type.ast.elem_type, space); } -fn renderPtrType(r: *Render, ptr_type: Ast.full.PtrType, space: Space) Error!void { +fn renderPtrType(r: *Render, ptr_type: Ast.full.PtrType, space: Space) anyerror!void { const tree = r.tree; const main_token = ptr_type.ast.main_token; switch (ptr_type.size) { @@ -1015,7 +1010,7 @@ fn renderSlice( slice_node: Ast.Node.Index, slice: Ast.full.Slice, space: Space, -) Error!void { +) anyerror!void { const tree = r.tree; const after_start_space_bool = nodeCausesSliceOpSpace(tree.nodeTag(slice.ast.start)) or if (slice.ast.end.unwrap()) |end| nodeCausesSliceOpSpace(tree.nodeTag(end)) else false; @@ -1048,7 +1043,7 @@ fn renderAsmOutput( r: *Render, asm_output: Ast.Node.Index, space: Space, -) Error!void { +) anyerror!void { const tree = r.tree; assert(tree.nodeTag(asm_output) == .asm_output); const symbolic_name = tree.nodeMainToken(asm_output); @@ -1074,7 +1069,7 @@ fn renderAsmInput( r: *Render, asm_input: Ast.Node.Index, space: Space, -) Error!void { +) anyerror!void { const tree = r.tree; assert(tree.nodeTag(asm_input) == .asm_input); const symbolic_name = tree.nodeMainToken(asm_input); @@ -1096,14 +1091,14 @@ fn renderVarDecl( ignore_comptime_token: bool, /// `comma_space` and `space` are used for destructure LHS decls. space: Space, -) Error!void { +) anyerror!void { try renderVarDeclWithoutFixups(r, var_decl, ignore_comptime_token, space); if (r.fixups.unused_var_decls.contains(var_decl.ast.mut_token + 1)) { // Discard the variable like this: `_ = foo;` - const w = r.ais.writer(); - try w.writeAll("_ = "); - try w.writeAll(tokenSliceForRender(r.tree, var_decl.ast.mut_token + 1)); - try w.writeAll(";\n"); + const ais = r.ais; + try ais.writeAll("_ = "); + try ais.writeAll(tokenSliceForRender(r.tree, var_decl.ast.mut_token + 1)); + try ais.writeAll(";\n"); } } @@ -1114,7 +1109,7 @@ fn renderVarDeclWithoutFixups( ignore_comptime_token: bool, /// `comma_space` and `space` are used for destructure LHS decls. space: Space, -) Error!void { +) anyerror!void { const tree = r.tree; const ais = r.ais; @@ -1226,7 +1221,7 @@ fn renderVarDeclWithoutFixups( ais.popIndent(); } -fn renderIf(r: *Render, if_node: Ast.full.If, space: Space) Error!void { +fn renderIf(r: *Render, if_node: Ast.full.If, space: Space) anyerror!void { return renderWhile(r, .{ .ast = .{ .while_token = if_node.ast.if_token, @@ -1245,7 +1240,7 @@ fn renderIf(r: *Render, if_node: Ast.full.If, space: Space) Error!void { /// Note that this function is additionally used to render if expressions, with /// respective values set to null. -fn renderWhile(r: *Render, while_node: Ast.full.While, space: Space) Error!void { +fn renderWhile(r: *Render, while_node: Ast.full.While, space: Space) anyerror!void { const tree = r.tree; if (while_node.label_token) |label| { @@ -1315,7 +1310,7 @@ fn renderThenElse( maybe_error_token: ?Ast.TokenIndex, opt_else_expr: Ast.Node.OptionalIndex, space: Space, -) Error!void { +) anyerror!void { const tree = r.tree; const ais = r.ais; const then_expr_is_block = nodeIsBlock(tree.nodeTag(then_expr)); @@ -1370,7 +1365,7 @@ fn renderThenElse( } } -fn renderFor(r: *Render, for_node: Ast.full.For, space: Space) Error!void { +fn renderFor(r: *Render, for_node: Ast.full.For, space: Space) anyerror!void { const tree = r.tree; const ais = r.ais; const token_tags = tree.tokens.items(.tag); @@ -1445,7 +1440,7 @@ fn renderContainerField( container: Container, field_param: Ast.full.ContainerField, space: Space, -) Error!void { +) anyerror!void { const tree = r.tree; const ais = r.ais; var field = field_param; @@ -1554,7 +1549,7 @@ fn renderBuiltinCall( builtin_token: Ast.TokenIndex, params: []const Ast.Node.Index, space: Space, -) Error!void { +) anyerror!void { const tree = r.tree; const ais = r.ais; @@ -1581,7 +1576,7 @@ fn renderBuiltinCall( defer r.gpa.free(new_string); try renderToken(r, builtin_token + 1, .none); // ( - try ais.writer().print("\"{}\"", .{std.zig.fmtEscapes(new_string)}); + try ais.print("\"{}\"", .{std.zig.fmtEscapes(new_string)}); return renderToken(r, str_lit_token + 1, space); // ) } } @@ -1627,7 +1622,7 @@ fn renderBuiltinCall( } } -fn renderFnProto(r: *Render, fn_proto: Ast.full.FnProto, space: Space) Error!void { +fn renderFnProto(r: *Render, fn_proto: Ast.full.FnProto, space: Space) anyerror!void { const tree = r.tree; const ais = r.ais; @@ -1852,7 +1847,7 @@ fn renderSwitchCase( r: *Render, switch_case: Ast.full.SwitchCase, space: Space, -) Error!void { +) anyerror!void { const ais = r.ais; const tree = r.tree; const trailing_comma = tree.tokenTag(switch_case.ast.arrow_token - 1) == .comma; @@ -1914,7 +1909,7 @@ fn renderBlock( block_node: Ast.Node.Index, statements: []const Ast.Node.Index, space: Space, -) Error!void { +) anyerror!void { const tree = r.tree; const ais = r.ais; const lbrace = tree.nodeMainToken(block_node); @@ -1939,7 +1934,7 @@ fn finishRenderBlock( block_node: Ast.Node.Index, statements: []const Ast.Node.Index, space: Space, -) Error!void { +) anyerror!void { const tree = r.tree; const ais = r.ais; for (statements, 0..) |stmt, i| { @@ -1967,7 +1962,7 @@ fn renderStructInit( struct_node: Ast.Node.Index, struct_init: Ast.full.StructInit, space: Space, -) Error!void { +) anyerror!void { const tree = r.tree; const ais = r.ais; @@ -2038,7 +2033,7 @@ fn renderArrayInit( r: *Render, array_init: Ast.full.ArrayInit, space: Space, -) Error!void { +) anyerror!void { const tree = r.tree; const ais = r.ais; const gpa = r.gpa; @@ -2139,13 +2134,14 @@ fn renderArrayInit( const section_exprs = row_exprs[0..section_end]; - var sub_expr_buffer = std.ArrayList(u8).init(gpa); + var sub_expr_buffer: std.io.AllocatingWriter = undefined; + const sub_expr_buffer_writer = sub_expr_buffer.init(gpa); defer sub_expr_buffer.deinit(); const sub_expr_buffer_starts = try gpa.alloc(usize, section_exprs.len + 1); defer gpa.free(sub_expr_buffer_starts); - var auto_indenting_stream = Ais.init(&sub_expr_buffer, indent_delta); + var auto_indenting_stream: AutoIndentingStream = .init(sub_expr_buffer_writer, indent_delta); defer auto_indenting_stream.deinit(); var sub_render: Render = .{ .gpa = r.gpa, @@ -2159,13 +2155,13 @@ fn renderArrayInit( var single_line = true; var contains_newline = false; for (section_exprs, 0..) |expr, i| { - const start = sub_expr_buffer.items.len; + const start = sub_expr_buffer.getWritten().len; sub_expr_buffer_starts[i] = start; if (i + 1 < section_exprs.len) { try renderExpression(&sub_render, expr, .none); - const width = sub_expr_buffer.items.len - start; - const this_contains_newline = mem.indexOfScalar(u8, sub_expr_buffer.items[start..], '\n') != null; + const width = sub_expr_buffer.getWritten().len - start; + const this_contains_newline = mem.indexOfScalar(u8, sub_expr_buffer.getWritten()[start..], '\n') != null; contains_newline = contains_newline or this_contains_newline; expr_widths[i] = width; expr_newlines[i] = this_contains_newline; @@ -2188,7 +2184,7 @@ fn renderArrayInit( ais.popSpace(); const width = sub_expr_buffer.items.len - start - 2; - const this_contains_newline = mem.indexOfScalar(u8, sub_expr_buffer.items[start .. sub_expr_buffer.items.len - 1], '\n') != null; + const this_contains_newline = mem.indexOfScalar(u8, sub_expr_buffer.getWritten()[start .. sub_expr_buffer.items.len - 1], '\n') != null; contains_newline = contains_newline or this_contains_newline; expr_widths[i] = width; expr_newlines[i] = contains_newline; @@ -2199,20 +2195,20 @@ fn renderArrayInit( } } } - sub_expr_buffer_starts[section_exprs.len] = sub_expr_buffer.items.len; + sub_expr_buffer_starts[section_exprs.len] = sub_expr_buffer.getWritten().len; // Render exprs in current section. column_counter = 0; for (section_exprs, 0..) |expr, i| { const start = sub_expr_buffer_starts[i]; const end = sub_expr_buffer_starts[i + 1]; - const expr_text = sub_expr_buffer.items[start..end]; + const expr_text = sub_expr_buffer.getWritten()[start..end]; if (!expr_newlines[i]) { - try ais.writer().writeAll(expr_text); + try ais.writeAll(expr_text); } else { var by_line = std.mem.splitScalar(u8, expr_text, '\n'); var last_line_was_empty = false; - try ais.writer().writeAll(by_line.first()); + try ais.writeAll(by_line.first()); while (by_line.next()) |line| { if (std.mem.startsWith(u8, line, "//") and last_line_was_empty) { try ais.insertNewline(); @@ -2220,7 +2216,7 @@ fn renderArrayInit( try ais.maybeInsertNewline(); } last_line_was_empty = (line.len == 0); - try ais.writer().writeAll(line); + try ais.writeAll(line); } } @@ -2234,7 +2230,7 @@ fn renderArrayInit( try renderToken(r, comma, .space); // , assert(column_widths[column_counter % row_size] >= expr_widths[i]); const padding = column_widths[column_counter % row_size] - expr_widths[i]; - try ais.writer().writeByteNTimes(' ', padding); + try ais.splatByteAll(' ', padding); column_counter += 1; continue; @@ -2265,7 +2261,7 @@ fn renderContainerDecl( container_decl_node: Ast.Node.Index, container_decl: Ast.full.ContainerDecl, space: Space, -) Error!void { +) anyerror!void { const tree = r.tree; const ais = r.ais; @@ -2384,7 +2380,7 @@ fn renderAsm( r: *Render, asm_node: Ast.full.Asm, space: Space, -) Error!void { +) anyerror!void { const tree = r.tree; const ais = r.ais; @@ -2550,7 +2546,7 @@ fn renderCall( r: *Render, call: Ast.full.Call, space: Space, -) Error!void { +) anyerror!void { if (call.async_token) |async_token| { try renderToken(r, async_token, .space); } @@ -2563,7 +2559,7 @@ fn renderParamList( lparen: Ast.TokenIndex, params: []const Ast.Node.Index, space: Space, -) Error!void { +) anyerror!void { const tree = r.tree; const ais = r.ais; @@ -2616,7 +2612,7 @@ fn renderParamList( /// Render an expression, and the comma that follows it, if it is present in the source. /// If a comma is present, and `space` is `Space.comma`, render only a single comma. -fn renderExpressionComma(r: *Render, node: Ast.Node.Index, space: Space) Error!void { +fn renderExpressionComma(r: *Render, node: Ast.Node.Index, space: Space) anyerror!void { const tree = r.tree; const maybe_comma = tree.lastToken(node) + 1; if (tree.tokenTag(maybe_comma) == .comma and space != .comma) { @@ -2629,7 +2625,7 @@ fn renderExpressionComma(r: *Render, node: Ast.Node.Index, space: Space) Error!v /// Render a token, and the comma that follows it, if it is present in the source. /// If a comma is present, and `space` is `Space.comma`, render only a single comma. -fn renderTokenComma(r: *Render, token: Ast.TokenIndex, space: Space) Error!void { +fn renderTokenComma(r: *Render, token: Ast.TokenIndex, space: Space) anyerror!void { const tree = r.tree; const maybe_comma = token + 1; if (tree.tokenTag(maybe_comma) == .comma and space != .comma) { @@ -2642,7 +2638,7 @@ fn renderTokenComma(r: *Render, token: Ast.TokenIndex, space: Space) Error!void /// Render an identifier, and the comma that follows it, if it is present in the source. /// If a comma is present, and `space` is `Space.comma`, render only a single comma. -fn renderIdentifierComma(r: *Render, token: Ast.TokenIndex, space: Space, quote: QuoteBehavior) Error!void { +fn renderIdentifierComma(r: *Render, token: Ast.TokenIndex, space: Space, quote: QuoteBehavior) anyerror!void { const tree = r.tree; const maybe_comma = token + 1; if (tree.tokenTag(maybe_comma) == .comma and space != .comma) { @@ -2674,15 +2670,15 @@ const Space = enum { skip, }; -fn renderToken(r: *Render, token_index: Ast.TokenIndex, space: Space) Error!void { +fn renderToken(r: *Render, token_index: Ast.TokenIndex, space: Space) anyerror!void { const tree = r.tree; const ais = r.ais; const lexeme = tokenSliceForRender(tree, token_index); - try ais.writer().writeAll(lexeme); + try ais.writeAll(lexeme); try renderSpace(r, token_index, lexeme.len, space); } -fn renderTokenOverrideSpaceMode(r: *Render, token_index: Ast.TokenIndex, space: Space, override_space: Space) Error!void { +fn renderTokenOverrideSpaceMode(r: *Render, token_index: Ast.TokenIndex, space: Space, override_space: Space) anyerror!void { const tree = r.tree; const ais = r.ais; const lexeme = tokenSliceForRender(tree, token_index); @@ -2692,7 +2688,7 @@ fn renderTokenOverrideSpaceMode(r: *Render, token_index: Ast.TokenIndex, space: try renderSpace(r, token_index, lexeme.len, space); } -fn renderSpace(r: *Render, token_index: Ast.TokenIndex, lexeme_len: usize, space: Space) Error!void { +fn renderSpace(r: *Render, token_index: Ast.TokenIndex, lexeme_len: usize, space: Space) anyerror!void { const tree = r.tree; const ais = r.ais; @@ -2701,7 +2697,7 @@ fn renderSpace(r: *Render, token_index: Ast.TokenIndex, lexeme_len: usize, space if (space == .skip) return; if (space == .comma and next_token_tag != .comma) { - try ais.writer().writeByte(','); + try ais.underlying_writer.writeByte(','); } if (space == .semicolon or space == .comma) ais.enableSpaceMode(space); defer ais.disableSpaceMode(); @@ -2712,7 +2708,7 @@ fn renderSpace(r: *Render, token_index: Ast.TokenIndex, lexeme_len: usize, space ); switch (space) { .none => {}, - .space => if (!comment) try ais.writer().writeByte(' '), + .space => if (!comment) try ais.writeByte(' '), .newline => if (!comment) try ais.insertNewline(), .comma => if (next_token_tag == .comma) { @@ -2724,7 +2720,7 @@ fn renderSpace(r: *Render, token_index: Ast.TokenIndex, lexeme_len: usize, space .comma_space => if (next_token_tag == .comma) { try renderToken(r, token_index + 1, .space); } else if (!comment) { - try ais.writer().writeByte(' '); + try ais.writeByte(' '); }, .semicolon => if (next_token_tag == .semicolon) { @@ -2737,15 +2733,15 @@ fn renderSpace(r: *Render, token_index: Ast.TokenIndex, lexeme_len: usize, space } } -fn renderOnlySpace(r: *Render, space: Space) Error!void { +fn renderOnlySpace(r: *Render, space: Space) anyerror!void { const ais = r.ais; switch (space) { .none => {}, - .space => try ais.writer().writeByte(' '), + .space => try ais.writeByte(' '), .newline => try ais.insertNewline(), - .comma => try ais.writer().writeAll(",\n"), - .comma_space => try ais.writer().writeAll(", "), - .semicolon => try ais.writer().writeAll(";\n"), + .comma => try ais.writeAll(",\n"), + .comma_space => try ais.writeAll(", "), + .semicolon => try ais.writeAll(";\n"), .skip => unreachable, } } @@ -2756,13 +2752,13 @@ const QuoteBehavior = enum { eagerly_unquote_except_underscore, }; -fn renderIdentifier(r: *Render, token_index: Ast.TokenIndex, space: Space, quote: QuoteBehavior) Error!void { +fn renderIdentifier(r: *Render, token_index: Ast.TokenIndex, space: Space, quote: QuoteBehavior) anyerror!void { const tree = r.tree; assert(tree.tokenTag(token_index) == .identifier); const lexeme = tokenSliceForRender(tree, token_index); if (r.fixups.rename_identifiers.get(lexeme)) |mangled| { - try r.ais.writer().writeAll(mangled); + try r.ais.writeAll(mangled); try renderSpace(r, token_index, lexeme.len, space); return; } @@ -2871,15 +2867,15 @@ fn renderQuotedIdentifier(r: *Render, token_index: Ast.TokenIndex, space: Space, const lexeme = tokenSliceForRender(tree, token_index); assert(lexeme.len >= 3 and lexeme[0] == '@'); - if (!unquote) try ais.writer().writeAll("@\""); + if (!unquote) try ais.writeAll("@\""); const contents = lexeme[2 .. lexeme.len - 1]; - try renderIdentifierContents(ais.writer(), contents); - if (!unquote) try ais.writer().writeByte('\"'); + try renderIdentifierContents(ais, contents); + if (!unquote) try ais.writeByte('\"'); try renderSpace(r, token_index, lexeme.len, space); } -fn renderIdentifierContents(writer: anytype, bytes: []const u8) !void { +fn renderIdentifierContents(ais: *AutoIndentingStream, bytes: []const u8) !void { var pos: usize = 0; while (pos < bytes.len) { const byte = bytes[pos]; @@ -2892,23 +2888,23 @@ fn renderIdentifierContents(writer: anytype, bytes: []const u8) !void { .success => |codepoint| { if (codepoint <= 0x7f) { const buf = [1]u8{@as(u8, @intCast(codepoint))}; - try std.fmt.format(writer, "{}", .{std.zig.fmtEscapes(&buf)}); + try ais.print("{}", .{std.zig.fmtEscapes(&buf)}); } else { - try writer.writeAll(escape_sequence); + try ais.writeAll(escape_sequence); } }, .failure => { - try writer.writeAll(escape_sequence); + try ais.writeAll(escape_sequence); }, } }, 0x00...('\\' - 1), ('\\' + 1)...0x7f => { const buf = [1]u8{byte}; - try std.fmt.format(writer, "{}", .{std.zig.fmtEscapes(&buf)}); + try ais.print("{}", .{std.zig.fmtEscapes(&buf)}); pos += 1; }, 0x80...0xff => { - try writer.writeByte(byte); + try ais.writeByte(byte); pos += 1; }, } @@ -2942,7 +2938,7 @@ fn hasMultilineString(tree: Ast, start_token: Ast.TokenIndex, end_token: Ast.Tok /// Assumes that start is the first byte past the previous token and /// that end is the last byte before the next token. -fn renderComments(r: *Render, start: usize, end: usize) Error!bool { +fn renderComments(r: *Render, start: usize, end: usize) anyerror!bool { const tree = r.tree; const ais = r.ais; @@ -2970,7 +2966,7 @@ fn renderComments(r: *Render, start: usize, end: usize) Error!bool { } else if (index == start) { // Otherwise if the first comment is on the same line as // the token before it, prefix it with a single space. - try ais.writer().writeByte(' '); + try ais.writeByte(' '); } } @@ -2987,11 +2983,11 @@ fn renderComments(r: *Render, start: usize, end: usize) Error!bool { ais.disabled_offset = null; } else if (ais.disabled_offset == null and mem.eql(u8, comment_content, "zig fmt: off")) { // Write with the canonical single space. - try ais.writer().writeAll("// zig fmt: off\n"); + try ais.writeAll("// zig fmt: off\n"); ais.disabled_offset = index; } else { // Write the comment minus trailing whitespace. - try ais.writer().print("{s}\n", .{trimmed_comment}); + try ais.print("{s}\n", .{trimmed_comment}); } } @@ -3005,12 +3001,12 @@ fn renderComments(r: *Render, start: usize, end: usize) Error!bool { return index != start; } -fn renderExtraNewline(r: *Render, node: Ast.Node.Index) Error!void { +fn renderExtraNewline(r: *Render, node: Ast.Node.Index) anyerror!void { return renderExtraNewlineToken(r, r.tree.firstToken(node)); } /// Check if there is an empty line immediately before the given token. If so, render it. -fn renderExtraNewlineToken(r: *Render, token_index: Ast.TokenIndex) Error!void { +fn renderExtraNewlineToken(r: *Render, token_index: Ast.TokenIndex) anyerror!void { const tree = r.tree; const ais = r.ais; const token_start = tree.tokenStart(token_index); @@ -3038,7 +3034,7 @@ fn renderExtraNewlineToken(r: *Render, token_index: Ast.TokenIndex) Error!void { /// end_token is the token one past the last doc comment token. This function /// searches backwards from there. -fn renderDocComments(r: *Render, end_token: Ast.TokenIndex) Error!void { +fn renderDocComments(r: *Render, end_token: Ast.TokenIndex) anyerror!void { const tree = r.tree; // Search backwards for the first doc comment. if (end_token == 0) return; @@ -3069,7 +3065,7 @@ fn renderDocComments(r: *Render, end_token: Ast.TokenIndex) Error!void { } /// start_token is first container doc comment token. -fn renderContainerDocComments(r: *Render, start_token: Ast.TokenIndex) Error!void { +fn renderContainerDocComments(r: *Render, start_token: Ast.TokenIndex) anyerror!void { const tree = r.tree; var tok = start_token; while (tree.tokenTag(tok) == .container_doc_comment) : (tok += 1) { @@ -3083,7 +3079,7 @@ fn renderContainerDocComments(r: *Render, start_token: Ast.TokenIndex) Error!voi } } -fn discardAllParams(r: *Render, fn_proto_node: Ast.Node.Index) Error!void { +fn discardAllParams(r: *Render, fn_proto_node: Ast.Node.Index) anyerror!void { const tree = &r.tree; const ais = r.ais; var buf: [1]Ast.Node.Index = undefined; @@ -3092,10 +3088,9 @@ fn discardAllParams(r: *Render, fn_proto_node: Ast.Node.Index) Error!void { while (it.next()) |param| { const name_ident = param.name_token.?; assert(tree.tokenTag(name_ident) == .identifier); - const w = ais.writer(); - try w.writeAll("_ = "); - try w.writeAll(tokenSliceForRender(r.tree, name_ident)); - try w.writeAll(";\n"); + try ais.writeAll("_ = "); + try ais.writeAll(tokenSliceForRender(r.tree, name_ident)); + try ais.writeAll(";\n"); } } @@ -3132,11 +3127,11 @@ fn anythingBetween(tree: Ast, start_token: Ast.TokenIndex, end_token: Ast.TokenI return false; } -fn writeFixingWhitespace(writer: std.ArrayList(u8).Writer, slice: []const u8) Error!void { +fn writeFixingWhitespace(bw: *std.io.BufferedWriter, slice: []const u8) anyerror!void { for (slice) |byte| switch (byte) { - '\t' => try writer.writeAll(" " ** indent_delta), + '\t' => try bw.splatByteAll(' ', indent_delta), '\r' => {}, - else => try writer.writeByte(byte), + else => try bw.writeByte(byte), }; } @@ -3261,224 +3256,235 @@ fn rowSize(tree: Ast, exprs: []const Ast.Node.Index, rtoken: Ast.TokenIndex) usi /// of the appropriate indentation level for them with pushSpace/popSpace. /// This should be done whenever a scope that ends in a .semicolon or a /// .comma is introduced. -fn AutoIndentingStream(comptime UnderlyingWriter: type) type { - return struct { - const Self = @This(); - pub const WriteError = UnderlyingWriter.Error; - pub const Writer = std.io.Writer(*Self, WriteError, write); +const AutoIndentingStream = struct { + underlying_writer: *std.io.BufferedWriter, - pub const IndentType = enum { - normal, - after_equals, - binop, - field_access, - }; - const StackElem = struct { - indent_type: IndentType, - realized: bool, - }; - const SpaceElem = struct { - space: Space, - indent_count: usize, - }; + indent_count: usize = 0, + indent_delta: usize, + indent_stack: std.ArrayList(StackElem), + space_stack: std.ArrayList(SpaceElem), + space_mode: ?usize = null, + disable_indent_committing: usize = 0, + current_line_empty: bool = true, + /// the most recently applied indent + applied_indent: usize = 0, - underlying_writer: UnderlyingWriter, - - /// Offset into the source at which formatting has been disabled with - /// a `zig fmt: off` comment. - /// - /// If non-null, the AutoIndentingStream will not write any bytes - /// to the underlying writer. It will however continue to track the - /// indentation level. - disabled_offset: ?usize = null, - - indent_count: usize = 0, - indent_delta: usize, - indent_stack: std.ArrayList(StackElem), - space_stack: std.ArrayList(SpaceElem), - space_mode: ?usize = null, - disable_indent_committing: usize = 0, - current_line_empty: bool = true, - /// the most recently applied indent - applied_indent: usize = 0, - - pub fn init(buffer: *std.ArrayList(u8), indent_delta_: usize) Self { - return .{ - .underlying_writer = buffer.writer(), - .indent_delta = indent_delta_, - .indent_stack = std.ArrayList(StackElem).init(buffer.allocator), - .space_stack = std.ArrayList(SpaceElem).init(buffer.allocator), - }; - } - - pub fn deinit(self: *Self) void { - self.indent_stack.deinit(); - self.space_stack.deinit(); - } - - pub fn writer(self: *Self) Writer { - return .{ .context = self }; - } - - pub fn write(self: *Self, bytes: []const u8) WriteError!usize { - if (bytes.len == 0) - return @as(usize, 0); - - try self.applyIndent(); - return self.writeNoIndent(bytes); - } - - // Change the indent delta without changing the final indentation level - pub fn setIndentDelta(self: *Self, new_indent_delta: usize) void { - if (self.indent_delta == new_indent_delta) { - return; - } else if (self.indent_delta > new_indent_delta) { - assert(self.indent_delta % new_indent_delta == 0); - self.indent_count = self.indent_count * (self.indent_delta / new_indent_delta); - } else { - // assert that the current indentation (in spaces) in a multiple of the new delta - assert((self.indent_count * self.indent_delta) % new_indent_delta == 0); - self.indent_count = self.indent_count / (new_indent_delta / self.indent_delta); - } - self.indent_delta = new_indent_delta; - } - - fn writeNoIndent(self: *Self, bytes: []const u8) WriteError!usize { - if (bytes.len == 0) - return @as(usize, 0); - - if (self.disabled_offset == null) try self.underlying_writer.writeAll(bytes); - if (bytes[bytes.len - 1] == '\n') - self.resetLine(); - return bytes.len; - } - - pub fn insertNewline(self: *Self) WriteError!void { - _ = try self.writeNoIndent("\n"); - } - - fn resetLine(self: *Self) void { - self.current_line_empty = true; - - if (self.disable_indent_committing > 0) return; - - if (self.indent_stack.items.len > 0) { - // By default, we realize the most recent indentation scope. - var to_realize = self.indent_stack.items.len - 1; - - if (self.indent_stack.items.len >= 2 and - self.indent_stack.items[to_realize - 1].indent_type == .after_equals and - self.indent_stack.items[to_realize - 1].realized and - self.indent_stack.items[to_realize].indent_type == .binop) - { - // If we are in a .binop scope and our direct parent is .after_equals, don't indent. - // This ensures correct indentation in the below example: - // - // const foo = - // (x >= 'a' and x <= 'z') or //<-- we are here - // (x >= 'A' and x <= 'Z'); - // - return; - } - - if (self.indent_stack.items[to_realize].indent_type == .field_access) { - // Only realize the top-most field_access in a chain. - while (to_realize > 0 and self.indent_stack.items[to_realize - 1].indent_type == .field_access) - to_realize -= 1; - } - - if (self.indent_stack.items[to_realize].realized) return; - self.indent_stack.items[to_realize].realized = true; - self.indent_count += 1; - } - } - - /// Disables indentation level changes during the next newlines until re-enabled. - pub fn disableIndentCommitting(self: *Self) void { - self.disable_indent_committing += 1; - } - - pub fn enableIndentCommitting(self: *Self) void { - assert(self.disable_indent_committing > 0); - self.disable_indent_committing -= 1; - } - - pub fn pushSpace(self: *Self, space: Space) !void { - try self.space_stack.append(.{ .space = space, .indent_count = self.indent_count }); - } - - pub fn popSpace(self: *Self) void { - _ = self.space_stack.pop(); - } - - /// Sets current indentation level to be the same as that of the last pushSpace. - pub fn enableSpaceMode(self: *Self, space: Space) void { - if (self.space_stack.items.len == 0) return; - const curr = self.space_stack.getLast(); - if (curr.space != space) return; - self.space_mode = curr.indent_count; - } - - pub fn disableSpaceMode(self: *Self) void { - self.space_mode = null; - } - - pub fn lastSpaceModeIndent(self: *Self) usize { - if (self.space_stack.items.len == 0) return 0; - return self.space_stack.getLast().indent_count * self.indent_delta; - } - - /// Insert a newline unless the current line is blank - pub fn maybeInsertNewline(self: *Self) WriteError!void { - if (!self.current_line_empty) - try self.insertNewline(); - } - - /// Push default indentation - /// Doesn't actually write any indentation. - /// Just primes the stream to be able to write the correct indentation if it needs to. - pub fn pushIndent(self: *Self, indent_type: IndentType) !void { - try self.indent_stack.append(.{ .indent_type = indent_type, .realized = false }); - } - - /// Forces an indentation level to be realized. - pub fn forcePushIndent(self: *Self, indent_type: IndentType) !void { - try self.indent_stack.append(.{ .indent_type = indent_type, .realized = true }); - self.indent_count += 1; - } - - pub fn popIndent(self: *Self) void { - if (self.indent_stack.pop().?.realized) { - assert(self.indent_count > 0); - self.indent_count -= 1; - } - } - - pub fn indentStackEmpty(self: *Self) bool { - return self.indent_stack.items.len == 0; - } - - /// Writes ' ' bytes if the current line is empty - fn applyIndent(self: *Self) WriteError!void { - const current_indent = self.currentIndent(); - if (self.current_line_empty and current_indent > 0) { - if (self.disabled_offset == null) { - try self.underlying_writer.writeByteNTimes(' ', current_indent); - } - self.applied_indent = current_indent; - } - self.current_line_empty = false; - } - - /// Checks to see if the most recent indentation exceeds the currently pushed indents - pub fn isLineOverIndented(self: *Self) bool { - if (self.current_line_empty) return false; - return self.applied_indent > self.currentIndent(); - } - - fn currentIndent(self: *Self) usize { - const indent_count = self.space_mode orelse self.indent_count; - return indent_count * self.indent_delta; - } + pub const IndentType = enum { + normal, + after_equals, + binop, + field_access, }; -} + const StackElem = struct { + indent_type: IndentType, + realized: bool, + }; + const SpaceElem = struct { + space: Space, + indent_count: usize, + }; + + pub fn init(buffer: *std.ArrayList(u8), indent_delta_: usize) AutoIndentingStream { + return .{ + .underlying_writer = buffer.writer(), + .indent_delta = indent_delta_, + .indent_stack = std.ArrayList(StackElem).init(buffer.allocator), + .space_stack = std.ArrayList(SpaceElem).init(buffer.allocator), + }; + } + + pub fn deinit(self: *AutoIndentingStream) void { + self.indent_stack.deinit(); + self.space_stack.deinit(); + } + + pub fn writeAll(ais: *AutoIndentingStream, bytes: []const u8) anyerror!void { + if (bytes.len == 0) return; + try ais.applyIndent(); + if (ais.disabled_offset == null) try ais.underlying_writer.writeAll(bytes); + if (bytes[bytes.len - 1] == '\n') ais.resetLine(); + } + + pub fn print(ais: *AutoIndentingStream, comptime format: []const u8, args: anytype) anyerror!void { + comptime assert(format[format.len - 1] != '}'); + try ais.applyIndent(); + if (ais.disabled_offset == null) try ais.underlying_writer.print(format, args); + if (format[format.len - 1] == '\n') ais.resetLine(); + } + + pub fn writeByte(ais: *AutoIndentingStream, byte: u8) anyerror!void { + try ais.applyIndent(); + if (ais.disabled_offset == null) try ais.underlying_writer.writeByte(byte); + assert(byte != '\n'); + } + + pub fn splatByteAll(ais: *AutoIndentingStream, byte: u8, n: usize) anyerror!void { + assert(byte != '\n'); + try ais.applyIndent(); + if (ais.disabled_offset == null) try ais.underlying_writer.splatByteAll(byte, n); + } + + // Change the indent delta without changing the final indentation level + pub fn setIndentDelta(ais: *AutoIndentingStream, new_indent_delta: usize) void { + if (ais.indent_delta == new_indent_delta) { + return; + } else if (ais.indent_delta > new_indent_delta) { + assert(ais.indent_delta % new_indent_delta == 0); + ais.indent_count = ais.indent_count * (ais.indent_delta / new_indent_delta); + } else { + // assert that the current indentation (in spaces) in a multiple of the new delta + assert((ais.indent_count * ais.indent_delta) % new_indent_delta == 0); + ais.indent_count = ais.indent_count / (new_indent_delta / ais.indent_delta); + } + ais.indent_delta = new_indent_delta; + } + + pub fn insertNewline(ais: *AutoIndentingStream) anyerror!void { + if (ais.disabled_offset == null) try ais.underlying_writer.writeByte('\n'); + ais.resetLine(); + } + + /// Insert a newline unless the current line is blank + pub fn maybeInsertNewline(ais: *AutoIndentingStream) anyerror!void { + if (!ais.current_line_empty) + try ais.insertNewline(); + } + + /// Push an indent that is automatically popped after being applied + pub fn pushIndentOneShot(ais: *AutoIndentingStream) void { + ais.indent_one_shot_count += 1; + ais.pushIndent(); + } + + /// Turns all one-shot indents into regular indents + /// Returns number of indents that must now be manually popped + pub fn lockOneShotIndent(ais: *AutoIndentingStream) usize { + const locked_count = ais.indent_one_shot_count; + ais.indent_one_shot_count = 0; + return locked_count; + } + + /// Push an indent that should not take effect until the next line + pub fn pushIndentNextLine(ais: *AutoIndentingStream) void { + ais.indent_next_line += 1; + ais.pushIndent(); + } + + /// Checks to see if the most recent indentation exceeds the currently pushed indents + pub fn isLineOverIndented(ais: *AutoIndentingStream) bool { + if (ais.current_line_empty) return false; + return ais.applied_indent > ais.currentIndent(); + } + + fn resetLine(ais: *AutoIndentingStream) void { + ais.current_line_empty = true; + + if (ais.disable_indent_committing > 0) return; + + if (ais.indent_stack.items.len > 0) { + // By default, we realize the most recent indentation scope. + var to_realize = ais.indent_stack.items.len - 1; + + if (ais.indent_stack.items.len >= 2 and + ais.indent_stack.items[to_realize - 1].indent_type == .after_equals and + ais.indent_stack.items[to_realize - 1].realized and + ais.indent_stack.items[to_realize].indent_type == .binop) + { + // If we are in a .binop scope and our direct parent is .after_equals, don't indent. + // This ensures correct indentation in the below example: + // + // const foo = + // (x >= 'a' and x <= 'z') or //<-- we are here + // (x >= 'A' and x <= 'Z'); + // + return; + } + + if (ais.indent_stack.items[to_realize].indent_type == .field_access) { + // Only realize the top-most field_access in a chain. + while (to_realize > 0 and ais.indent_stack.items[to_realize - 1].indent_type == .field_access) + to_realize -= 1; + } + + if (ais.indent_stack.items[to_realize].realized) return; + ais.indent_stack.items[to_realize].realized = true; + ais.indent_count += 1; + } + } + + /// Disables indentation level changes during the next newlines until re-enabled. + pub fn disableIndentCommitting(ais: *AutoIndentingStream) void { + ais.disable_indent_committing += 1; + } + + pub fn enableIndentCommitting(ais: *AutoIndentingStream) void { + assert(ais.disable_indent_committing > 0); + ais.disable_indent_committing -= 1; + } + + pub fn pushSpace(ais: *AutoIndentingStream, space: Space) !void { + try ais.space_stack.append(.{ .space = space, .indent_count = ais.indent_count }); + } + + pub fn popSpace(ais: *AutoIndentingStream) void { + _ = ais.space_stack.pop(); + } + + /// Sets current indentation level to be the same as that of the last pushSpace. + pub fn enableSpaceMode(ais: *AutoIndentingStream, space: Space) void { + if (ais.space_stack.items.len == 0) return; + const curr = ais.space_stack.getLast(); + if (curr.space != space) return; + ais.space_mode = curr.indent_count; + } + + pub fn disableSpaceMode(ais: *AutoIndentingStream) void { + ais.space_mode = null; + } + + pub fn lastSpaceModeIndent(ais: *AutoIndentingStream) usize { + if (ais.space_stack.items.len == 0) return 0; + return ais.space_stack.getLast().indent_count * ais.indent_delta; + } + + /// Push default indentation + /// Doesn't actually write any indentation. + /// Just primes the stream to be able to write the correct indentation if it needs to. + pub fn pushIndent(ais: *AutoIndentingStream, indent_type: IndentType) !void { + try ais.indent_stack.append(.{ .indent_type = indent_type, .realized = false }); + } + + /// Forces an indentation level to be realized. + pub fn forcePushIndent(ais: *AutoIndentingStream, indent_type: IndentType) !void { + try ais.indent_stack.append(.{ .indent_type = indent_type, .realized = true }); + ais.indent_count += 1; + } + + pub fn popIndent(ais: *AutoIndentingStream) void { + if (ais.indent_stack.pop().?.realized) { + assert(ais.indent_count > 0); + ais.indent_count -= 1; + } + } + + pub fn indentStackEmpty(ais: *AutoIndentingStream) bool { + return ais.indent_stack.items.len == 0; + } + + /// Writes ' ' bytes if the current line is empty + fn applyIndent(ais: *AutoIndentingStream) anyerror!void { + const current_indent = ais.currentIndent(); + if (ais.current_line_empty and current_indent > 0) { + if (ais.disabled_offset == null) { + try ais.underlying_writer.writeByteNTimes(' ', current_indent); + } + ais.applied_indent = current_indent; + } + ais.current_line_empty = false; + } + + fn currentIndent(ais: *AutoIndentingStream) usize { + const indent_count = ais.space_mode orelse ais.indent_count; + return indent_count * ais.indent_delta; + } +}; diff --git a/lib/std/zig/string_literal.zig b/lib/std/zig/string_literal.zig index 972219abbd..5df4777f7d 100644 --- a/lib/std/zig/string_literal.zig +++ b/lib/std/zig/string_literal.zig @@ -322,9 +322,9 @@ test parseCharLiteral { ); } -/// Parses `bytes` as a Zig string literal and writes the result to the std.io.Writer type. +/// Parses `bytes` as a Zig string literal and writes the result to the `std.io.Writer` type. /// Asserts `bytes` has '"' at beginning and end. -pub fn parseWrite(writer: anytype, bytes: []const u8) error{OutOfMemory}!Result { +pub fn parseWrite(writer: *std.io.BufferedWriter, bytes: []const u8) anyerror!Result { assert(bytes.len >= 2 and bytes[0] == '"' and bytes[bytes.len - 1] == '"'); var index: usize = 1; @@ -340,18 +340,18 @@ pub fn parseWrite(writer: anytype, bytes: []const u8) error{OutOfMemory}!Result if (bytes[escape_char_index] == 'u') { var buf: [4]u8 = undefined; const len = utf8Encode(codepoint, &buf) catch { - return Result{ .failure = .{ .invalid_unicode_codepoint = escape_char_index + 1 } }; + return .{ .failure = .{ .invalid_unicode_codepoint = escape_char_index + 1 } }; }; try writer.writeAll(buf[0..len]); } else { try writer.writeByte(@as(u8, @intCast(codepoint))); } }, - .failure => |err| return Result{ .failure = err }, + .failure => |err| return .{ .failure = err }, } }, - '\n' => return Result{ .failure = .{ .invalid_character = index } }, - '"' => return Result.success, + '\n' => return .{ .failure = .{ .invalid_character = index } }, + '"' => return .success, else => { try writer.writeByte(b); index += 1; @@ -363,10 +363,12 @@ pub fn parseWrite(writer: anytype, bytes: []const u8) error{OutOfMemory}!Result /// Higher level API. Does not return extra info about parse errors. /// Caller owns returned memory. pub fn parseAlloc(allocator: std.mem.Allocator, bytes: []const u8) ParseError![]u8 { - var buf = std.ArrayList(u8).init(allocator); + var buf: std.io.AllocatingWriter = undefined; + const bw = buf.init(allocator); defer buf.deinit(); - - switch (try parseWrite(buf.writer(), bytes)) { + // TODO try @errorCast(...) + const result = parseWrite(bw, bytes) catch |err| return @errorCast(err); + switch (result) { .success => return buf.toOwnedSlice(), .failure => return error.InvalidLiteral, } diff --git a/src/Air/print.zig b/src/Air/print.zig index 53efa72356..d99b53abd5 100644 --- a/src/Air/print.zig +++ b/src/Air/print.zig @@ -1,6 +1,5 @@ const std = @import("std"); const Allocator = std.mem.Allocator; -const fmtIntSizeBin = std.fmt.fmtIntSizeBin; const build_options = @import("build_options"); const Zcu = @import("../Zcu.zig"); @@ -25,20 +24,20 @@ pub fn write(air: Air, stream: anytype, pt: Zcu.PerThread, liveness: ?Air.Livene // zig fmt: off stream.print( - \\# Total AIR+Liveness bytes: {} - \\# AIR Instructions: {d} ({}) - \\# AIR Extra Data: {d} ({}) - \\# Liveness tomb_bits: {} - \\# Liveness Extra Data: {d} ({}) - \\# Liveness special table: {d} ({}) + \\# Total AIR+Liveness bytes: {Bi} + \\# AIR Instructions: {d} ({Bi}) + \\# AIR Extra Data: {d} ({Bi}) + \\# Liveness tomb_bits: {Bi} + \\# Liveness Extra Data: {d} ({Bi}) + \\# Liveness special table: {d} ({Bi}) \\ , .{ - fmtIntSizeBin(total_bytes), - air.instructions.len, fmtIntSizeBin(instruction_bytes), - air.extra.items.len, fmtIntSizeBin(extra_bytes), - fmtIntSizeBin(tomb_bytes), - if (liveness) |l| l.extra.len else 0, fmtIntSizeBin(liveness_extra_bytes), - if (liveness) |l| l.special.count() else 0, fmtIntSizeBin(liveness_special_bytes), + total_bytes, + air.instructions.len, instruction_bytes, + air.extra.items.len, extra_bytes, + tomb_bytes, + if (liveness) |l| l.extra.len else 0, liveness_extra_bytes, + if (liveness) |l| l.special.count() else 0, liveness_special_bytes, }) catch return; // zig fmt: on diff --git a/src/Builtin.zig b/src/Builtin.zig index d68d49c253..33518723fb 100644 --- a/src/Builtin.zig +++ b/src/Builtin.zig @@ -51,7 +51,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void { const zig_backend = opts.zig_backend; @setEvalBranchQuota(4000); - try buffer.writer().print( + try buffer.print( \\const std = @import("std"); \\/// Zig version. When writing code that supports multiple versions of Zig, prefer \\/// feature detection (i.e. with `@hasDecl` or `@hasField`) over version checks. @@ -89,10 +89,10 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void { const index = @as(std.Target.Cpu.Feature.Set.Index, @intCast(index_usize)); const is_enabled = target.cpu.features.isEnabled(index); if (is_enabled) { - try buffer.writer().print(" .{p_},\n", .{std.zig.fmtId(feature.name)}); + try buffer.print(" .{p_},\n", .{std.zig.fmtId(feature.name)}); } } - try buffer.writer().print( + try buffer.print( \\ }}), \\}}; \\pub const os: std.Target.Os = .{{ @@ -104,7 +104,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void { switch (target.os.versionRange()) { .none => try buffer.appendSlice(" .none = {} },\n"), - .semver => |semver| try buffer.writer().print( + .semver => |semver| try buffer.print( \\ .semver = .{{ \\ .min = .{{ \\ .major = {}, @@ -127,7 +127,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void { semver.max.minor, semver.max.patch, }), - .linux => |linux| try buffer.writer().print( + .linux => |linux| try buffer.print( \\ .linux = .{{ \\ .range = .{{ \\ .min = .{{ @@ -164,7 +164,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void { linux.android, }), - .hurd => |hurd| try buffer.writer().print( + .hurd => |hurd| try buffer.print( \\ .hurd = .{{ \\ .range = .{{ \\ .min = .{{ @@ -198,7 +198,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void { hurd.glibc.minor, hurd.glibc.patch, }), - .windows => |windows| try buffer.writer().print( + .windows => |windows| try buffer.print( \\ .windows = .{{ \\ .min = {c}, \\ .max = {c}, @@ -217,7 +217,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void { ); if (target.dynamic_linker.get()) |dl| { - try buffer.writer().print( + try buffer.print( \\ .dynamic_linker = .init("{s}"), \\}}; \\ @@ -237,7 +237,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void { // knows libc will provide it, and likewise c.zig will not export memcpy. const link_libc = opts.link_libc; - try buffer.writer().print( + try buffer.print( \\pub const object_format: std.Target.ObjectFormat = .{p_}; \\pub const mode: std.builtin.OptimizeMode = .{p_}; \\pub const link_libc = {}; @@ -269,7 +269,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void { }); if (target.os.tag == .wasi) { - try buffer.writer().print( + try buffer.print( \\pub const wasi_exec_model: std.builtin.WasiExecModel = .{p_}; \\ , .{std.zig.fmtId(@tagName(opts.wasi_exec_model))}); diff --git a/src/Package/Fetch.zig b/src/Package/Fetch.zig index 9acfe1d97a..270d40f134 100644 --- a/src/Package/Fetch.zig +++ b/src/Package/Fetch.zig @@ -1643,10 +1643,8 @@ fn dumpHashInfo(all_files: []const *const HashedFile) !void { const w = bw.writer(); for (all_files) |hashed_file| { - try w.print("{s}: {s}: {s}\n", .{ - @tagName(hashed_file.kind), - std.fmt.fmtSliceHexLower(&hashed_file.hash), - hashed_file.normalized_path, + try w.print("{s}: {x}: {s}\n", .{ + @tagName(hashed_file.kind), &hashed_file.hash, hashed_file.normalized_path, }); } diff --git a/src/Package/Fetch/git.zig b/src/Package/Fetch/git.zig index de3912041f..1d4d0075c5 100644 --- a/src/Package/Fetch/git.zig +++ b/src/Package/Fetch/git.zig @@ -127,7 +127,7 @@ pub const Oid = union(Format) { ) @TypeOf(writer).Error!void { _ = fmt; _ = options; - try writer.print("{}", .{std.fmt.fmtSliceHexLower(oid.slice())}); + try writer.print("{x}", .{oid.slice()}); } pub fn slice(oid: *const Oid) []const u8 { diff --git a/src/Zcu/PerThread.zig b/src/Zcu/PerThread.zig index 7548fbff31..052113c5e3 100644 --- a/src/Zcu/PerThread.zig +++ b/src/Zcu/PerThread.zig @@ -477,11 +477,8 @@ pub fn updateZirRefs(pt: Zcu.PerThread) Allocator.Error!void { if (std.zig.srcHashEql(old_hash, new_hash)) { break :hash_changed; } - log.debug("hash for (%{d} -> %{d}) changed: {} -> {}", .{ - old_inst, - new_inst, - std.fmt.fmtSliceHexLower(&old_hash), - std.fmt.fmtSliceHexLower(&new_hash), + log.debug("hash for (%{d} -> %{d}) changed: {x} -> {x}", .{ + old_inst, new_inst, &old_hash, &new_hash, }); } // The source hash associated with this instruction changed - invalidate relevant dependencies. diff --git a/src/arch/x86_64/encoder.zig b/src/arch/x86_64/encoder.zig index 8d07dce83a..ab3eeaa586 100644 --- a/src/arch/x86_64/encoder.zig +++ b/src/arch/x86_64/encoder.zig @@ -1205,9 +1205,9 @@ pub const Vex = struct { fn expectEqualHexStrings(expected: []const u8, given: []const u8, assembly: []const u8) !void { assert(expected.len > 0); if (std.mem.eql(u8, expected, given)) return; - const expected_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{std.fmt.fmtSliceHexLower(expected)}); + const expected_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{expected}); defer testing.allocator.free(expected_fmt); - const given_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{std.fmt.fmtSliceHexLower(given)}); + const given_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{given}); defer testing.allocator.free(given_fmt); const idx = std.mem.indexOfDiff(u8, expected_fmt, given_fmt).?; const padding = try testing.allocator.alloc(u8, idx + 5); diff --git a/src/fmt.zig b/src/fmt.zig index 6b1d73951e..b0336823b9 100644 --- a/src/fmt.zig +++ b/src/fmt.zig @@ -190,41 +190,7 @@ pub fn run( } } -const FmtError = error{ - SystemResources, - OperationAborted, - IoPending, - BrokenPipe, - Unexpected, - WouldBlock, - Canceled, - FileClosed, - DestinationAddressRequired, - DiskQuota, - FileTooBig, - MessageTooBig, - InputOutput, - NoSpaceLeft, - AccessDenied, - OutOfMemory, - RenameAcrossMountPoints, - ReadOnlyFileSystem, - LinkQuotaExceeded, - FileBusy, - EndOfStream, - Unseekable, - NotOpenForWriting, - UnsupportedEncoding, - InvalidEncoding, - ConnectionResetByPeer, - SocketNotConnected, - LockViolation, - NetNameDeleted, - InvalidArgument, - ProcessNotFound, -} || fs.File.OpenError; - -fn fmtPath(fmt: *Fmt, file_path: []const u8, check_mode: bool, dir: fs.Dir, sub_path: []const u8) FmtError!void { +fn fmtPath(fmt: *Fmt, file_path: []const u8, check_mode: bool, dir: fs.Dir, sub_path: []const u8) anyerror!void { fmtPathFile(fmt, file_path, check_mode, dir, sub_path) catch |err| switch (err) { error.IsDir, error.AccessDenied => return fmtPathDir(fmt, file_path, check_mode, dir, sub_path), else => { @@ -241,7 +207,7 @@ fn fmtPathDir( check_mode: bool, parent_dir: fs.Dir, parent_sub_path: []const u8, -) FmtError!void { +) anyerror!void { var dir = try parent_dir.openDir(parent_sub_path, .{ .iterate = true }); defer dir.close(); @@ -277,7 +243,7 @@ fn fmtPathFile( check_mode: bool, dir: fs.Dir, sub_path: []const u8, -) FmtError!void { +) anyerror!void { const source_file = try dir.openFile(sub_path, .{}); var file_closed = false; errdefer if (!file_closed) source_file.close(); diff --git a/src/libs/mingw.zig b/src/libs/mingw.zig index c978a651a0..1fcdb55538 100644 --- a/src/libs/mingw.zig +++ b/src/libs/mingw.zig @@ -388,7 +388,7 @@ pub fn libExists( /// This function body is verbose but all it does is test 3 different paths and /// see if a .def file exists. fn findDef( - allocator: Allocator, + gpa: Allocator, target: *const std.Target, zig_lib_directory: Cache.Directory, lib_name: []const u8, @@ -401,7 +401,8 @@ fn findDef( else => unreachable, }; - var override_path = std.ArrayList(u8).init(allocator); + var override_path: std.io.AllocatingWriter = undefined; + const override_path_writer = override_path.init(gpa); defer override_path.deinit(); const s = path.sep_str; @@ -410,11 +411,11 @@ fn findDef( // Try the archtecture-specific path first. const fmt_path = "libc" ++ s ++ "mingw" ++ s ++ "{s}" ++ s ++ "{s}.def"; if (zig_lib_directory.path) |p| { - try override_path.writer().print("{s}" ++ s ++ fmt_path, .{ p, lib_path, lib_name }); + try override_path_writer.print("{s}" ++ s ++ fmt_path, .{ p, lib_path, lib_name }); } else { - try override_path.writer().print(fmt_path, .{ lib_path, lib_name }); + try override_path_writer.print(fmt_path, .{ lib_path, lib_name }); } - if (std.fs.cwd().access(override_path.items, .{})) |_| { + if (std.fs.cwd().access(override_path.getWritten(), .{})) |_| { return override_path.toOwnedSlice(); } else |err| switch (err) { error.FileNotFound => {}, @@ -424,14 +425,14 @@ fn findDef( { // Try the generic version. - override_path.shrinkRetainingCapacity(0); + override_path.clearRetainingCapacity(); const fmt_path = "libc" ++ s ++ "mingw" ++ s ++ "lib-common" ++ s ++ "{s}.def"; if (zig_lib_directory.path) |p| { - try override_path.writer().print("{s}" ++ s ++ fmt_path, .{ p, lib_name }); + try override_path_writer.print("{s}" ++ s ++ fmt_path, .{ p, lib_name }); } else { - try override_path.writer().print(fmt_path, .{lib_name}); + try override_path_writer.print(fmt_path, .{lib_name}); } - if (std.fs.cwd().access(override_path.items, .{})) |_| { + if (std.fs.cwd().access(override_path.getWritten(), .{})) |_| { return override_path.toOwnedSlice(); } else |err| switch (err) { error.FileNotFound => {}, @@ -441,14 +442,14 @@ fn findDef( { // Try the generic version and preprocess it. - override_path.shrinkRetainingCapacity(0); + override_path.clearRetainingCapacity(); const fmt_path = "libc" ++ s ++ "mingw" ++ s ++ "lib-common" ++ s ++ "{s}.def.in"; if (zig_lib_directory.path) |p| { - try override_path.writer().print("{s}" ++ s ++ fmt_path, .{ p, lib_name }); + try override_path_writer.print("{s}" ++ s ++ fmt_path, .{ p, lib_name }); } else { - try override_path.writer().print(fmt_path, .{lib_name}); + try override_path_writer.print(fmt_path, .{lib_name}); } - if (std.fs.cwd().access(override_path.items, .{})) |_| { + if (std.fs.cwd().access(override_path.getWritten(), .{})) |_| { return override_path.toOwnedSlice(); } else |err| switch (err) { error.FileNotFound => {}, diff --git a/src/link/Coff.zig b/src/link/Coff.zig index ea5d6a901c..ff71b0ca63 100644 --- a/src/link/Coff.zig +++ b/src/link/Coff.zig @@ -830,8 +830,8 @@ fn debugMem(allocator: Allocator, handle: std.process.Child.Id, pvaddr: std.os.w const buffer = try allocator.alloc(u8, code.len); defer allocator.free(buffer); const memread = try std.os.windows.ReadProcessMemory(handle, pvaddr, buffer); - log.debug("to write: {x}", .{std.fmt.fmtSliceHexLower(code)}); - log.debug("in memory: {x}", .{std.fmt.fmtSliceHexLower(memread)}); + log.debug("to write: {x}", .{code}); + log.debug("in memory: {x}", .{memread}); } fn writeMemProtected(handle: std.process.Child.Id, pvaddr: std.os.windows.LPVOID, code: []const u8) !void { diff --git a/src/link/MachO/dyld_info/Trie.zig b/src/link/MachO/dyld_info/Trie.zig index b45651eb67..8224dc8424 100644 --- a/src/link/MachO/dyld_info/Trie.zig +++ b/src/link/MachO/dyld_info/Trie.zig @@ -336,9 +336,9 @@ const Edge = struct { fn expectEqualHexStrings(expected: []const u8, given: []const u8) !void { assert(expected.len > 0); if (mem.eql(u8, expected, given)) return; - const expected_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{std.fmt.fmtSliceHexLower(expected)}); + const expected_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{expected}); defer testing.allocator.free(expected_fmt); - const given_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{std.fmt.fmtSliceHexLower(given)}); + const given_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{given}); defer testing.allocator.free(given_fmt); const idx = mem.indexOfDiff(u8, expected_fmt, given_fmt).?; const padding = try testing.allocator.alloc(u8, idx + 5); diff --git a/src/link/Wasm/Flush.zig b/src/link/Wasm/Flush.zig index 60f5971e40..7bd9f89a42 100644 --- a/src/link/Wasm/Flush.zig +++ b/src/link/Wasm/Flush.zig @@ -1035,20 +1035,14 @@ pub fn finish(f: *Flush, wasm: *Wasm) !void { var id: [16]u8 = undefined; std.crypto.hash.sha3.TurboShake128(null).hash(binary_bytes.items, &id, .{}); var uuid: [36]u8 = undefined; - _ = try std.fmt.bufPrint(&uuid, "{s}-{s}-{s}-{s}-{s}", .{ - std.fmt.fmtSliceHexLower(id[0..4]), - std.fmt.fmtSliceHexLower(id[4..6]), - std.fmt.fmtSliceHexLower(id[6..8]), - std.fmt.fmtSliceHexLower(id[8..10]), - std.fmt.fmtSliceHexLower(id[10..]), + _ = try std.fmt.bufPrint(&uuid, "{x}-{x}-{x}-{x}-{x}", .{ + id[0..4], id[4..6], id[6..8], id[8..10], id[10..], }); try emitBuildIdSection(gpa, binary_bytes, &uuid); }, .hexstring => |hs| { var buffer: [32 * 2]u8 = undefined; - const str = std.fmt.bufPrint(&buffer, "{s}", .{ - std.fmt.fmtSliceHexLower(hs.toSlice()), - }) catch unreachable; + const str = std.fmt.bufPrint(&buffer, "{x}", .{hs.toSlice()}) catch unreachable; try emitBuildIdSection(gpa, binary_bytes, str); }, else => |mode| { diff --git a/src/main.zig b/src/main.zig index e974621e5e..6e0b758f59 100644 --- a/src/main.zig +++ b/src/main.zig @@ -65,6 +65,9 @@ pub fn wasi_cwd() std.os.wasi.fd_t { const fatal = std.process.fatal; +/// This can be global since stdout is a singleton. +var stdout_buffer: [4096]u8 = undefined; + /// Shaming all the locations that inappropriately use an O(N) search algorithm. /// Please delete this and fix the compilation errors! pub const @"bad O(N)" = void; @@ -338,9 +341,7 @@ fn mainArgs(gpa: Allocator, arena: Allocator, args: []const []const u8) !void { return cmdInit(gpa, arena, cmd_args); } else if (mem.eql(u8, cmd, "targets")) { dev.check(.targets_command); - const host = std.zig.resolveTargetQueryOrFatal(.{}); - const stdout = io.getStdOut().writer(); - return @import("print_targets.zig").cmdTargets(arena, cmd_args, stdout, &host); + return @import("print_targets.zig").cmdTargets(arena, cmd_args); } else if (mem.eql(u8, cmd, "version")) { dev.check(.version_command); try std.io.getStdOut().writeAll(build_options.version ++ "\n"); @@ -351,7 +352,7 @@ fn mainArgs(gpa: Allocator, arena: Allocator, args: []const []const u8) !void { } else if (mem.eql(u8, cmd, "env")) { dev.check(.env_command); verifyLibcxxCorrectlyLinked(); - return @import("print_env.zig").cmdEnv(arena, cmd_args, io.getStdOut().writer()); + return @import("print_env.zig").cmdEnv(arena, cmd_args); } else if (mem.eql(u8, cmd, "reduce")) { return jitCmd(gpa, arena, cmd_args, .{ .cmd_name = "reduce", @@ -3334,9 +3335,8 @@ fn buildOutputType( var bin_digest: Cache.BinDigest = undefined; hasher.final(&bin_digest); - const sub_path = try std.fmt.allocPrint(arena, "tmp" ++ sep ++ "{s}-stdin{s}", .{ - std.fmt.fmtSliceHexLower(&bin_digest), - ext.canonicalName(target), + const sub_path = try std.fmt.allocPrint(arena, "tmp" ++ sep ++ "{x}-stdin{s}", .{ + &bin_digest, ext.canonicalName(target), }); try dirs.local_cache.handle.rename(dump_path, sub_path); @@ -6061,6 +6061,11 @@ fn cmdAstCheck( const tree = try Ast.parse(arena, source, mode); + var bw: std.io.BufferedWriter = .{ + .unbuffered_writer = io.getStdOut().writer(), + .buffer = &stdout_buffer, + }; + switch (mode) { .zig => { const zir = try AstGen.generate(arena, tree); @@ -6103,31 +6108,30 @@ fn cmdAstCheck( const extra_bytes = zir.extra.len * @sizeOf(u32); const total_bytes = @sizeOf(Zir) + instruction_bytes + extra_bytes + zir.string_bytes.len * @sizeOf(u8); - const stdout = io.getStdOut(); - const fmtIntSizeBin = std.fmt.fmtIntSizeBin; // zig fmt: off - try stdout.writer().print( - \\# Source bytes: {} - \\# Tokens: {} ({}) - \\# AST Nodes: {} ({}) - \\# Total ZIR bytes: {} - \\# Instructions: {d} ({}) + try bw.print( + \\# Source bytes: {Bi} + \\# Tokens: {} ({Bi}) + \\# AST Nodes: {} ({Bi}) + \\# Total ZIR bytes: {Bi} + \\# Instructions: {d} ({Bi}) \\# String Table Bytes: {} - \\# Extra Data Items: {d} ({}) + \\# Extra Data Items: {d} ({Bi}) \\ , .{ - fmtIntSizeBin(source.len), - tree.tokens.len, fmtIntSizeBin(token_bytes), - tree.nodes.len, fmtIntSizeBin(tree_bytes), - fmtIntSizeBin(total_bytes), - zir.instructions.len, fmtIntSizeBin(instruction_bytes), - fmtIntSizeBin(zir.string_bytes.len), - zir.extra.len, fmtIntSizeBin(extra_bytes), + source.len, + tree.tokens.len, token_bytes, + tree.nodes.len, tree_bytes, + total_bytes, + zir.instructions.len, instruction_bytes, + zir.string_bytes.len, + zir.extra.len, extra_bytes, }); // zig fmt: on } - try @import("print_zir.zig").renderAsTextToFile(arena, tree, zir, io.getStdOut()); + try @import("print_zir.zig").renderAsText(arena, tree, zir, &bw); + try bw.flush(); if (zir.hasCompileErrors()) { process.exit(1); @@ -6154,7 +6158,8 @@ fn cmdAstCheck( fatal("-t option only available in builds of zig with debug extensions", .{}); } - try @import("print_zoir.zig").renderToFile(zoir, arena, io.getStdOut()); + try @import("print_zoir.zig").renderToWriter(zoir, arena, &bw); + try bw.flush(); return cleanExit(); }, } @@ -6275,11 +6280,13 @@ fn detectNativeCpuWithLLVM( } fn printCpu(cpu: std.Target.Cpu) !void { - var bw = io.bufferedWriter(io.getStdOut().writer()); - const stdout = bw.writer(); + var bw: std.io.BufferedWriter = .{ + .unbuffered_writer = io.getStdOut().writer(), + .buffer = &stdout_buffer, + }; if (cpu.model.llvm_name) |llvm_name| { - try stdout.print("{s}\n", .{llvm_name}); + try bw.print("{s}\n", .{llvm_name}); } const all_features = cpu.arch.allFeaturesList(); @@ -6288,7 +6295,7 @@ fn printCpu(cpu: std.Target.Cpu) !void { const index: std.Target.Cpu.Feature.Set.Index = @intCast(index_usize); const is_enabled = cpu.features.isEnabled(index); const plus_or_minus = "-+"[@intFromBool(is_enabled)]; - try stdout.print("{c}{s}\n", .{ plus_or_minus, llvm_name }); + try bw.print("{c}{s}\n", .{ plus_or_minus, llvm_name }); } try bw.flush(); @@ -6356,6 +6363,11 @@ fn cmdDumpZir( const zir = try Zcu.loadZirCache(arena, f); + var bw: std.io.BufferedWriter = .{ + .unbuffered_writer = io.getStdOut().writer(), + .buffer = &stdout_buffer, + }; + { const instruction_bytes = zir.instructions.len * // Here we don't use @sizeOf(Zir.Inst.Data) because it would include @@ -6364,25 +6376,24 @@ fn cmdDumpZir( const extra_bytes = zir.extra.len * @sizeOf(u32); const total_bytes = @sizeOf(Zir) + instruction_bytes + extra_bytes + zir.string_bytes.len * @sizeOf(u8); - const stdout = io.getStdOut(); - const fmtIntSizeBin = std.fmt.fmtIntSizeBin; // zig fmt: off - try stdout.writer().print( - \\# Total ZIR bytes: {} - \\# Instructions: {d} ({}) - \\# String Table Bytes: {} - \\# Extra Data Items: {d} ({}) + try bw.print( + \\# Total ZIR bytes: {Bi} + \\# Instructions: {d} ({Bi}) + \\# String Table Bytes: {Bi} + \\# Extra Data Items: {d} ({Bi}) \\ , .{ - fmtIntSizeBin(total_bytes), - zir.instructions.len, fmtIntSizeBin(instruction_bytes), - fmtIntSizeBin(zir.string_bytes.len), - zir.extra.len, fmtIntSizeBin(extra_bytes), + total_bytes, + zir.instructions.len, instruction_bytes, + zir.string_bytes.len, + zir.extra.len, extra_bytes, }); // zig fmt: on } - return @import("print_zir.zig").renderAsTextToFile(arena, null, zir, io.getStdOut()); + try @import("print_zir.zig").renderAsText(arena, null, zir, &bw); + try bw.flush(); } /// This is only enabled for debug builds. @@ -6440,13 +6451,15 @@ fn cmdChangelist( var inst_map: std.AutoHashMapUnmanaged(Zir.Inst.Index, Zir.Inst.Index) = .empty; try Zcu.mapOldZirToNew(arena, old_zir, new_zir, &inst_map); - var bw = io.bufferedWriter(io.getStdOut().writer()); - const stdout = bw.writer(); + var bw: std.io.BufferedWriter = .{ + .unbuffered_writer = io.getStdOut().writer(), + .buffer = &stdout_buffer, + }; { - try stdout.print("Instruction mappings:\n", .{}); + try bw.print("Instruction mappings:\n", .{}); var it = inst_map.iterator(); while (it.next()) |entry| { - try stdout.print(" %{d} => %{d}\n", .{ + try bw.print(" %{d} => %{d}\n", .{ @intFromEnum(entry.key_ptr.*), @intFromEnum(entry.value_ptr.*), }); @@ -6714,13 +6727,10 @@ fn accessFrameworkPath( for (&[_][]const u8{ ".tbd", ".dylib", "" }) |ext| { test_path.clearRetainingCapacity(); - try test_path.writer().print("{s}" ++ sep ++ "{s}.framework" ++ sep ++ "{s}{s}", .{ - framework_dir_path, - framework_name, - framework_name, - ext, + try test_path.print("{s}" ++ sep ++ "{s}.framework" ++ sep ++ "{s}{s}", .{ + framework_dir_path, framework_name, framework_name, ext, }); - try checked_paths.writer().print("\n {s}", .{test_path.items}); + try checked_paths.print("\n {s}", .{test_path.items}); fs.cwd().access(test_path.items, .{}) catch |err| switch (err) { error.FileNotFound => continue, else => |e| fatal("unable to search for {s} framework '{s}': {s}", .{ @@ -7033,14 +7043,19 @@ fn cmdFetch( try fixups.append_string_after_node.put(gpa, manifest.version_node, dependencies_text); } - var rendered = std.ArrayList(u8).init(gpa); - defer rendered.deinit(); - try ast.renderToArrayList(&rendered, fixups); - - build_root.directory.handle.writeFile(.{ .sub_path = Package.Manifest.basename, .data = rendered.items }) catch |err| { - fatal("unable to write {s} file: {s}", .{ Package.Manifest.basename, @errorName(err) }); + var file = build_root.directory.handle.createFile(Package.Manifest.basename, .{}) catch |err| { + fatal("unable to create {s} file: {s}", .{ Package.Manifest.basename, err }); }; - + defer file.close(); + var buffer: [4096]u8 = undefined; + var bw: std.io.BufferedWriter = .{ + .unbuffered_writer = file.writer(), + .buffer = &buffer, + }; + ast.render(gpa, &bw, fixups) catch |err| fatal("failed to render AST to {s}: {s}", .{ + Package.Manifest.basename, err, + }); + bw.flush() catch |err| fatal("failed to flush {s}: {s}", .{ Package.Manifest.basename, err }); return cleanExit(); } diff --git a/src/print_env.zig b/src/print_env.zig index cc5db2e40a..d42292b956 100644 --- a/src/print_env.zig +++ b/src/print_env.zig @@ -4,7 +4,7 @@ const introspect = @import("introspect.zig"); const Allocator = std.mem.Allocator; const fatal = std.process.fatal; -pub fn cmdEnv(arena: Allocator, args: []const []const u8, stdout: std.fs.File.Writer) !void { +pub fn cmdEnv(arena: Allocator, args: []const []const u8) !void { _ = args; const cwd_path = try introspect.getResolvedCwd(arena); const self_exe_path = try std.fs.selfExePathAlloc(arena); @@ -21,10 +21,12 @@ pub fn cmdEnv(arena: Allocator, args: []const []const u8, stdout: std.fs.File.Wr const host = try std.zig.system.resolveTargetQuery(.{}); const triple = try host.zigTriple(arena); - var bw = std.io.bufferedWriter(stdout); - const w = bw.writer(); - - var jws = std.json.writeStream(w, .{ .whitespace = .indent_1 }); + var buffer: [1024]u8 = undefined; + var bw: std.io.BufferedWriter = .{ + .buffer = &buffer, + .unbuffered_writer = std.io.getStdOut().writer(), + }; + var jws = std.json.writeStream(bw, .{ .whitespace = .indent_1 }); try jws.beginObject(); @@ -55,7 +57,7 @@ pub fn cmdEnv(arena: Allocator, args: []const []const u8, stdout: std.fs.File.Wr try jws.endObject(); try jws.endObject(); - try w.writeByte('\n'); + try bw.writeByte('\n'); try bw.flush(); } diff --git a/src/print_targets.zig b/src/print_targets.zig index 96371eb1ee..29c5d5b0fc 100644 --- a/src/print_targets.zig +++ b/src/print_targets.zig @@ -11,36 +11,36 @@ const assert = std.debug.assert; const glibc = @import("libs/glibc.zig"); const introspect = @import("introspect.zig"); -pub fn cmdTargets( - allocator: Allocator, - args: []const []const u8, - /// Output stream - stdout: anytype, - native_target: *const Target, -) !void { +pub fn cmdTargets(arena: Allocator, args: []const []const u8) anyerror!void { _ = args; - var zig_lib_directory = introspect.findZigLibDir(allocator) catch |err| { + const host = std.zig.resolveTargetQueryOrFatal(.{}); + var buffer: [1024]u8 = undefined; + var bw: std.io.BufferedWriter = .{ + .unbuffered_writer = io.getStdOut().writer(), + .buffer = &buffer, + }; + try print(arena, &bw, host); + try bw.flush(); +} + +fn print(arena: Allocator, output: *std.io.BufferedWriter, host: *const Target) anyerror!void { + var zig_lib_directory = introspect.findZigLibDir(arena) catch |err| { fatal("unable to find zig installation directory: {s}\n", .{@errorName(err)}); }; defer zig_lib_directory.handle.close(); - defer allocator.free(zig_lib_directory.path.?); const abilists_contents = zig_lib_directory.handle.readFileAlloc( - allocator, + arena, glibc.abilists_path, glibc.abilists_max_size, ) catch |err| switch (err) { error.OutOfMemory => return error.OutOfMemory, else => fatal("unable to read " ++ glibc.abilists_path ++ ": {s}", .{@errorName(err)}), }; - defer allocator.free(abilists_contents); - const glibc_abi = try glibc.loadMetaData(allocator, abilists_contents); - defer glibc_abi.destroy(allocator); + const glibc_abi = try glibc.loadMetaData(arena, abilists_contents); - var bw = io.bufferedWriter(stdout); - const w = bw.writer(); - var sz = std.zon.stringify.serializer(w, .{}); + var sz = std.zon.stringify.serializer(output, .{}); { var root_obj = try sz.beginStruct(.{}); @@ -52,10 +52,9 @@ pub fn cmdTargets( { var libc_obj = try root_obj.beginTupleField("libc", .{}); for (std.zig.target.available_libcs) |libc| { - const tmp = try std.fmt.allocPrint(allocator, "{s}-{s}-{s}", .{ + const tmp = try std.fmt.allocPrint(arena, "{s}-{s}-{s}", .{ @tagName(libc.arch), @tagName(libc.os), @tagName(libc.abi), }); - defer allocator.free(tmp); try libc_obj.field(tmp, .{}); } try libc_obj.end(); @@ -64,8 +63,7 @@ pub fn cmdTargets( { var glibc_obj = try root_obj.beginTupleField("glibc", .{}); for (glibc_abi.all_versions) |ver| { - const tmp = try std.fmt.allocPrint(allocator, "{}", .{ver}); - defer allocator.free(tmp); + const tmp = try std.fmt.allocPrint(arena, "{}", .{ver}); try glibc_obj.field(tmp, .{}); } try glibc_obj.end(); @@ -105,21 +103,20 @@ pub fn cmdTargets( { var native_obj = try root_obj.beginStructField("native", .{}); { - const triple = try native_target.zigTriple(allocator); - defer allocator.free(triple); + const triple = try host.zigTriple(arena); try native_obj.field("triple", triple, .{}); } { var cpu_obj = try native_obj.beginStructField("cpu", .{}); - try cpu_obj.field("arch", @tagName(native_target.cpu.arch), .{}); + try cpu_obj.field("arch", @tagName(host.cpu.arch), .{}); - try cpu_obj.field("name", native_target.cpu.model.name, .{}); + try cpu_obj.field("name", host.cpu.model.name, .{}); { var features = try native_obj.beginTupleField("features", .{}); - for (native_target.cpu.arch.allFeaturesList(), 0..) |feature, i_usize| { + for (host.cpu.arch.allFeaturesList(), 0..) |feature, i_usize| { const index = @as(Target.Cpu.Feature.Set.Index, @intCast(i_usize)); - if (native_target.cpu.features.isEnabled(index)) { + if (host.cpu.features.isEnabled(index)) { try features.field(feature.name, .{}); } } @@ -128,14 +125,13 @@ pub fn cmdTargets( try cpu_obj.end(); } - try native_obj.field("os", @tagName(native_target.os.tag), .{}); - try native_obj.field("abi", @tagName(native_target.abi), .{}); + try native_obj.field("os", @tagName(host.os.tag), .{}); + try native_obj.field("abi", @tagName(host.abi), .{}); try native_obj.end(); } try root_obj.end(); } - try w.writeByte('\n'); - return bw.flush(); + try output.writeByte('\n'); } diff --git a/src/print_zir.zig b/src/print_zir.zig index a4a141fc4e..05f58d3a50 100644 --- a/src/print_zir.zig +++ b/src/print_zir.zig @@ -9,13 +9,8 @@ const Zir = std.zig.Zir; const Zcu = @import("Zcu.zig"); const LazySrcLoc = Zcu.LazySrcLoc; -/// Write human-readable, debug formatted ZIR code to a file. -pub fn renderAsTextToFile( - gpa: Allocator, - tree: ?Ast, - zir: Zir, - fs_file: std.fs.File, -) !void { +/// Write human-readable, debug formatted ZIR code. +pub fn renderAsText(gpa: Allocator, tree: ?Ast, zir: Zir, bw: *std.io.BufferedWriter) anyerror!void { var arena = std.heap.ArenaAllocator.init(gpa); defer arena.deinit(); @@ -30,16 +25,13 @@ pub fn renderAsTextToFile( .recurse_blocks = true, }; - var raw_stream = std.io.bufferedWriter(fs_file.writer()); - const stream = raw_stream.writer(); - const main_struct_inst: Zir.Inst.Index = .main_struct_inst; - try stream.print("%{d} ", .{@intFromEnum(main_struct_inst)}); - try writer.writeInstToStream(stream, main_struct_inst); - try stream.writeAll("\n"); + try bw.print("%{d} ", .{@intFromEnum(main_struct_inst)}); + try writer.writeInstToStream(bw, main_struct_inst); + try bw.writeAll("\n"); const imports_index = zir.extra[@intFromEnum(Zir.ExtraIndex.imports)]; if (imports_index != 0) { - try stream.writeAll("Imports:\n"); + try bw.writeAll("Imports:\n"); const extra = zir.extraData(Zir.Inst.Imports, imports_index); var extra_index = extra.end; @@ -49,15 +41,13 @@ pub fn renderAsTextToFile( extra_index = item.end; const import_path = zir.nullTerminatedString(item.data.name); - try stream.print(" @import(\"{}\") ", .{ + try bw.print(" @import(\"{}\") ", .{ std.zig.fmtEscapes(import_path), }); - try writer.writeSrcTokAbs(stream, item.data.token); - try stream.writeAll("\n"); + try writer.writeSrcTokAbs(bw, item.data.token); + try bw.writeAll("\n"); } } - - try raw_stream.flush(); } pub fn renderInstructionContext( @@ -67,7 +57,7 @@ pub fn renderInstructionContext( scope_file: *Zcu.File, parent_decl_node: Ast.Node.Index, indent: u32, - stream: anytype, + bw: *std.io.BufferedWriter, ) !void { var arena = std.heap.ArenaAllocator.init(gpa); defer arena.deinit(); @@ -83,13 +73,13 @@ pub fn renderInstructionContext( .recurse_blocks = true, }; - try writer.writeBody(stream, block[0..block_index]); - try stream.writeByteNTimes(' ', writer.indent - 2); - try stream.print("> %{d} ", .{@intFromEnum(block[block_index])}); - try writer.writeInstToStream(stream, block[block_index]); - try stream.writeByte('\n'); + try writer.writeBody(bw, block[0..block_index]); + try bw.splatByteAll(' ', writer.indent - 2); + try bw.print("> %{d} ", .{@intFromEnum(block[block_index])}); + try writer.writeInstToStream(bw, block[block_index]); + try bw.writeByte('\n'); if (block_index + 1 < block.len) { - try writer.writeBody(stream, block[block_index + 1 ..]); + try writer.writeBody(bw, block[block_index + 1 ..]); } } @@ -99,7 +89,7 @@ pub fn renderSingleInstruction( scope_file: *Zcu.File, parent_decl_node: Ast.Node.Index, indent: u32, - stream: anytype, + bw: *std.io.BufferedWriter, ) !void { var arena = std.heap.ArenaAllocator.init(gpa); defer arena.deinit(); @@ -115,8 +105,8 @@ pub fn renderSingleInstruction( .recurse_blocks = false, }; - try stream.print("%{d} ", .{@intFromEnum(inst)}); - try writer.writeInstToStream(stream, inst); + try bw.print("%{d} ", .{@intFromEnum(inst)}); + try writer.writeInstToStream(bw, inst); } const Writer = struct { @@ -188,9 +178,9 @@ const Writer = struct { fn writeInstToStream( self: *Writer, - stream: anytype, + stream: *std.io.BufferedWriter, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) anyerror!void { const tags = self.code.instructions.items(.tag); const tag = tags[@intFromEnum(inst)]; try stream.print("= {s}(", .{@tagName(tags[@intFromEnum(inst)])}); @@ -518,7 +508,7 @@ const Writer = struct { } } - fn writeExtended(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeExtended(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const extended = self.code.instructions.items(.data)[@intFromEnum(inst)].extended; try stream.print("{s}(", .{@tagName(extended.opcode)}); switch (extended.opcode) { @@ -627,13 +617,13 @@ const Writer = struct { } } - fn writeExtNode(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeExtNode(self: *Writer, stream: *std.io.BufferedWriter, extended: Zir.Inst.Extended.InstData) !void { try stream.writeAll(")) "); const src_node: Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand))); try self.writeSrcNode(stream, src_node); } - fn writeArrayInitElemType(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeArrayInitElemType(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].bin; try self.writeInstRef(stream, inst_data.lhs); try stream.print(", {d})", .{@intFromEnum(inst_data.rhs)}); @@ -641,9 +631,9 @@ const Writer = struct { fn writeUnNode( self: *Writer, - stream: anytype, + stream: *std.io.BufferedWriter, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) anyerror!void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].un_node; try self.writeInstRef(stream, inst_data.operand); try stream.writeAll(") "); @@ -652,9 +642,9 @@ const Writer = struct { fn writeUnTok( self: *Writer, - stream: anytype, + stream: *std.io.BufferedWriter, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) anyerror!void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].un_tok; try self.writeInstRef(stream, inst_data.operand); try stream.writeAll(") "); @@ -663,9 +653,9 @@ const Writer = struct { fn writeValidateDestructure( self: *Writer, - stream: anytype, + stream: *std.io.BufferedWriter, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) anyerror!void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.ValidateDestructure, inst_data.payload_index).data; try self.writeInstRef(stream, extra.operand); @@ -677,9 +667,9 @@ const Writer = struct { fn writeValidateArrayInitTy( self: *Writer, - stream: anytype, + stream: *std.io.BufferedWriter, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) anyerror!void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.ArrayInit, inst_data.payload_index).data; try self.writeInstRef(stream, extra.ty); @@ -689,9 +679,9 @@ const Writer = struct { fn writeArrayTypeSentinel( self: *Writer, - stream: anytype, + stream: *std.io.BufferedWriter, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) anyerror!void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.ArrayTypeSentinel, inst_data.payload_index).data; try self.writeInstRef(stream, extra.len); @@ -705,9 +695,9 @@ const Writer = struct { fn writePtrType( self: *Writer, - stream: anytype, + stream: *std.io.BufferedWriter, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) anyerror!void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].ptr_type; const str_allowzero = if (inst_data.flags.is_allowzero) "allowzero, " else ""; const str_const = if (!inst_data.flags.is_mutable) "const, " else ""; @@ -748,12 +738,12 @@ const Writer = struct { try self.writeSrcNode(stream, extra.data.src_node); } - fn writeInt(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeInt(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].int; try stream.print("{d})", .{inst_data}); } - fn writeIntBig(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeIntBig(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].str; const byte_count = inst_data.len * @sizeOf(std.math.big.Limb); const limb_bytes = self.code.string_bytes[@intFromEnum(inst_data.start)..][0..byte_count]; @@ -772,12 +762,12 @@ const Writer = struct { try stream.print("{s})", .{as_string}); } - fn writeFloat(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeFloat(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const number = self.code.instructions.items(.data)[@intFromEnum(inst)].float; try stream.print("{d})", .{number}); } - fn writeFloat128(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeFloat128(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.Float128, inst_data.payload_index).data; const number = extra.get(); @@ -788,15 +778,15 @@ const Writer = struct { fn writeStr( self: *Writer, - stream: anytype, + stream: *std.io.BufferedWriter, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) anyerror!void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].str; const str = inst_data.get(self.code); try stream.print("\"{}\")", .{std.zig.fmtEscapes(str)}); } - fn writeSliceStart(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeSliceStart(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.SliceStart, inst_data.payload_index).data; try self.writeInstRef(stream, extra.lhs); @@ -806,7 +796,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeSliceEnd(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeSliceEnd(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.SliceEnd, inst_data.payload_index).data; try self.writeInstRef(stream, extra.lhs); @@ -818,7 +808,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeSliceSentinel(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeSliceSentinel(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.SliceSentinel, inst_data.payload_index).data; try self.writeInstRef(stream, extra.lhs); @@ -832,7 +822,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeSliceLength(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeSliceLength(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.SliceLength, inst_data.payload_index).data; try self.writeInstRef(stream, extra.lhs); @@ -848,7 +838,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeUnionInit(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeUnionInit(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.UnionInit, inst_data.payload_index).data; try self.writeInstRef(stream, extra.union_type); @@ -860,7 +850,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeShuffle(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeShuffle(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.Shuffle, inst_data.payload_index).data; try self.writeInstRef(stream, extra.elem_type); @@ -874,7 +864,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeSelect(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeSelect(self: *Writer, stream: *std.io.BufferedWriter, extended: Zir.Inst.Extended.InstData) !void { const extra = self.code.extraData(Zir.Inst.Select, extended.operand).data; try self.writeInstRef(stream, extra.elem_type); try stream.writeAll(", "); @@ -887,7 +877,7 @@ const Writer = struct { try self.writeSrcNode(stream, extra.node); } - fn writeMulAdd(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeMulAdd(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.MulAdd, inst_data.payload_index).data; try self.writeInstRef(stream, extra.mulend1); @@ -899,7 +889,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeBuiltinCall(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeBuiltinCall(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.BuiltinCall, inst_data.payload_index).data; @@ -915,7 +905,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeFieldParentPtr(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeFieldParentPtr(self: *Writer, stream: *std.io.BufferedWriter, extended: Zir.Inst.Extended.InstData) !void { const extra = self.code.extraData(Zir.Inst.FieldParentPtr, extended.operand).data; const FlagsInt = @typeInfo(Zir.Inst.FullPtrCastFlags).@"struct".backing_integer.?; const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(FlagsInt, @truncate(extended.small))); @@ -932,7 +922,7 @@ const Writer = struct { try self.writeSrcNode(stream, extra.src_node); } - fn writeBuiltinAsyncCall(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeBuiltinAsyncCall(self: *Writer, stream: *std.io.BufferedWriter, extended: Zir.Inst.Extended.InstData) !void { const extra = self.code.extraData(Zir.Inst.AsyncCall, extended.operand).data; try self.writeInstRef(stream, extra.frame_buffer); try stream.writeAll(", "); @@ -945,7 +935,7 @@ const Writer = struct { try self.writeSrcNode(stream, extra.node); } - fn writeParam(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeParam(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_tok; const extra = self.code.extraData(Zir.Inst.Param, inst_data.payload_index); const body = self.code.bodySlice(extra.end, extra.data.type.body_len); @@ -960,7 +950,7 @@ const Writer = struct { try self.writeSrcTok(stream, inst_data.src_tok); } - fn writePlNodeBin(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writePlNodeBin(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data; try self.writeInstRef(stream, extra.lhs); @@ -970,7 +960,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writePlNodeMultiOp(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writePlNodeMultiOp(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.MultiOp, inst_data.payload_index); const args = self.code.refSlice(extra.end, extra.data.operands_len); @@ -983,7 +973,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeArrayMul(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeArrayMul(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.ArrayMul, inst_data.payload_index).data; try self.writeInstRef(stream, extra.res_ty); @@ -995,13 +985,13 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeElemValImm(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeElemValImm(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].elem_val_imm; try self.writeInstRef(stream, inst_data.operand); try stream.print(", {d})", .{inst_data.idx}); } - fn writeArrayInitElemPtr(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeArrayInitElemPtr(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.ElemPtrImm, inst_data.payload_index).data; @@ -1010,7 +1000,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writePlNodeExport(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writePlNodeExport(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.Export, inst_data.payload_index).data; @@ -1021,7 +1011,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeValidateArrayInitRefTy(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeValidateArrayInitRefTy(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.ArrayInitRefTy, inst_data.payload_index).data; @@ -1031,7 +1021,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeStructInit(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeStructInit(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.StructInit, inst_data.payload_index); var field_i: u32 = 0; @@ -1055,7 +1045,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeCmpxchg(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeCmpxchg(self: *Writer, stream: *std.io.BufferedWriter, extended: Zir.Inst.Extended.InstData) !void { const extra = self.code.extraData(Zir.Inst.Cmpxchg, extended.operand).data; try self.writeInstRef(stream, extra.ptr); @@ -1071,7 +1061,7 @@ const Writer = struct { try self.writeSrcNode(stream, extra.node); } - fn writePtrCastFull(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writePtrCastFull(self: *Writer, stream: *std.io.BufferedWriter, extended: Zir.Inst.Extended.InstData) !void { const FlagsInt = @typeInfo(Zir.Inst.FullPtrCastFlags).@"struct".backing_integer.?; const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(FlagsInt, @truncate(extended.small))); const extra = self.code.extraData(Zir.Inst.BinNode, extended.operand).data; @@ -1087,7 +1077,7 @@ const Writer = struct { try self.writeSrcNode(stream, extra.node); } - fn writePtrCastNoDest(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writePtrCastNoDest(self: *Writer, stream: *std.io.BufferedWriter, extended: Zir.Inst.Extended.InstData) !void { const FlagsInt = @typeInfo(Zir.Inst.FullPtrCastFlags).@"struct".backing_integer.?; const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(FlagsInt, @truncate(extended.small))); const extra = self.code.extraData(Zir.Inst.UnNode, extended.operand).data; @@ -1098,7 +1088,7 @@ const Writer = struct { try self.writeSrcNode(stream, extra.node); } - fn writeAtomicLoad(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeAtomicLoad(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.AtomicLoad, inst_data.payload_index).data; @@ -1111,7 +1101,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeAtomicStore(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeAtomicStore(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.AtomicStore, inst_data.payload_index).data; @@ -1124,7 +1114,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeAtomicRmw(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeAtomicRmw(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.AtomicRmw, inst_data.payload_index).data; @@ -1139,7 +1129,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeStructInitAnon(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeStructInitAnon(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.StructInitAnon, inst_data.payload_index); var field_i: u32 = 0; @@ -1160,7 +1150,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeStructInitFieldType(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeStructInitFieldType(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.FieldType, inst_data.payload_index).data; try self.writeInstRef(stream, extra.container_type); @@ -1169,7 +1159,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeFieldTypeRef(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeFieldTypeRef(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.FieldTypeRef, inst_data.payload_index).data; try self.writeInstRef(stream, extra.container_type); @@ -1179,7 +1169,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeNodeMultiOp(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeNodeMultiOp(self: *Writer, stream: *std.io.BufferedWriter, extended: Zir.Inst.Extended.InstData) !void { const extra = self.code.extraData(Zir.Inst.NodeMultiOp, extended.operand); const operands = self.code.refSlice(extra.end, extended.small); @@ -1193,9 +1183,9 @@ const Writer = struct { fn writeInstNode( self: *Writer, - stream: anytype, + stream: *std.io.BufferedWriter, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) anyerror!void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].inst_node; try self.writeInstIndex(stream, inst_data.inst); try stream.writeAll(") "); @@ -1204,7 +1194,7 @@ const Writer = struct { fn writeAsm( self: *Writer, - stream: anytype, + stream: *std.io.BufferedWriter, extended: Zir.Inst.Extended.InstData, tmpl_is_expr: bool, ) !void { @@ -1282,7 +1272,7 @@ const Writer = struct { try self.writeSrcNode(stream, extra.data.src_node); } - fn writeOverflowArithmetic(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeOverflowArithmetic(self: *Writer, stream: *std.io.BufferedWriter, extended: Zir.Inst.Extended.InstData) !void { const extra = self.code.extraData(Zir.Inst.BinNode, extended.operand).data; try self.writeInstRef(stream, extra.lhs); @@ -1294,7 +1284,7 @@ const Writer = struct { fn writeCall( self: *Writer, - stream: anytype, + stream: *std.io.BufferedWriter, inst: Zir.Inst.Index, comptime kind: enum { direct, field }, ) !void { @@ -1328,7 +1318,7 @@ const Writer = struct { var i: usize = 0; var arg_start: u32 = args_len; while (i < args_len) : (i += 1) { - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); const arg_end = self.code.extra[extra.end + i]; defer arg_start = arg_end; const arg_body = body[arg_start..arg_end]; @@ -1338,14 +1328,14 @@ const Writer = struct { } self.indent -= 2; if (args_len != 0) { - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); } try stream.writeAll("]) "); try self.writeSrcNode(stream, inst_data.src_node); } - fn writeBlock(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeBlock(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.Block, inst_data.payload_index); const body = self.code.bodySlice(extra.end, extra.data.body_len); @@ -1354,7 +1344,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeBlockComptime(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeBlockComptime(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.BlockComptime, inst_data.payload_index); const body = self.code.bodySlice(extra.end, extra.data.body_len); @@ -1364,7 +1354,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeCondBr(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeCondBr(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.CondBr, inst_data.payload_index); const then_body = self.code.bodySlice(extra.end, extra.data.then_body_len); @@ -1378,7 +1368,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeTry(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeTry(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.Try, inst_data.payload_index); const body = self.code.bodySlice(extra.end, extra.data.body_len); @@ -1389,7 +1379,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeStructDecl(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeStructDecl(self: *Writer, stream: *std.io.BufferedWriter, extended: Zir.Inst.Extended.InstData) !void { const small: Zir.Inst.StructDecl.Small = @bitCast(extended.small); const extra = self.code.extraData(Zir.Inst.StructDecl, extended.operand); @@ -1405,7 +1395,7 @@ const Writer = struct { extra.data.fields_hash_3, }); - try stream.print("hash({}) ", .{std.fmt.fmtSliceHexLower(&fields_hash)}); + try stream.print("hash({x}) ", .{&fields_hash}); var extra_index: usize = extra.end; @@ -1463,7 +1453,7 @@ const Writer = struct { try self.writeBody(stream, self.code.bodySlice(extra_index, decls_len)); self.indent -= 2; extra_index += decls_len; - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.writeAll("}, "); } @@ -1532,7 +1522,7 @@ const Writer = struct { self.indent += 2; for (fields, 0..) |field, i| { - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try self.writeFlag(stream, "comptime ", field.is_comptime); if (field.name != .empty) { const field_name = self.code.nullTerminatedString(field.name); @@ -1575,13 +1565,13 @@ const Writer = struct { } self.indent -= 2; - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.writeAll("}) "); } try self.writeSrcNode(stream, .zero); } - fn writeUnionDecl(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeUnionDecl(self: *Writer, stream: *std.io.BufferedWriter, extended: Zir.Inst.Extended.InstData) !void { const small = @as(Zir.Inst.UnionDecl.Small, @bitCast(extended.small)); const extra = self.code.extraData(Zir.Inst.UnionDecl, extended.operand); @@ -1597,7 +1587,7 @@ const Writer = struct { extra.data.fields_hash_3, }); - try stream.print("hash({}) ", .{std.fmt.fmtSliceHexLower(&fields_hash)}); + try stream.print("hash({x}) ", .{&fields_hash}); var extra_index: usize = extra.end; @@ -1647,7 +1637,7 @@ const Writer = struct { try self.writeBody(stream, self.code.bodySlice(extra_index, decls_len)); self.indent -= 2; extra_index += decls_len; - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.writeAll("}"); } @@ -1698,7 +1688,7 @@ const Writer = struct { const field_name = self.code.nullTerminatedString(field_name_index); extra_index += 1; - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.print("{p}", .{std.zig.fmtId(field_name)}); if (has_type) { @@ -1727,12 +1717,12 @@ const Writer = struct { } self.indent -= 2; - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.writeAll("}) "); try self.writeSrcNode(stream, .zero); } - fn writeEnumDecl(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeEnumDecl(self: *Writer, stream: *std.io.BufferedWriter, extended: Zir.Inst.Extended.InstData) !void { const small = @as(Zir.Inst.EnumDecl.Small, @bitCast(extended.small)); const extra = self.code.extraData(Zir.Inst.EnumDecl, extended.operand); @@ -1748,7 +1738,7 @@ const Writer = struct { extra.data.fields_hash_3, }); - try stream.print("hash({}) ", .{std.fmt.fmtSliceHexLower(&fields_hash)}); + try stream.print("hash({x}) ", .{&fields_hash}); var extra_index: usize = extra.end; @@ -1796,7 +1786,7 @@ const Writer = struct { try self.writeBody(stream, self.code.bodySlice(extra_index, decls_len)); self.indent -= 2; extra_index += decls_len; - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.writeAll("}, "); } @@ -1832,7 +1822,7 @@ const Writer = struct { const field_name = self.code.nullTerminatedString(@enumFromInt(self.code.extra[extra_index])); extra_index += 1; - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.print("{p}", .{std.zig.fmtId(field_name)}); if (has_tag_value) { @@ -1845,7 +1835,7 @@ const Writer = struct { try stream.writeAll(",\n"); } self.indent -= 2; - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.writeAll("}) "); } try self.writeSrcNode(stream, .zero); @@ -1853,7 +1843,7 @@ const Writer = struct { fn writeOpaqueDecl( self: *Writer, - stream: anytype, + stream: *std.io.BufferedWriter, extended: Zir.Inst.Extended.InstData, ) !void { const small = @as(Zir.Inst.OpaqueDecl.Small, @bitCast(extended.small)); @@ -1889,13 +1879,13 @@ const Writer = struct { self.indent += 2; try self.writeBody(stream, self.code.bodySlice(extra_index, decls_len)); self.indent -= 2; - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.writeAll("}) "); } try self.writeSrcNode(stream, .zero); } - fn writeTupleDecl(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeTupleDecl(self: *Writer, stream: *std.io.BufferedWriter, extended: Zir.Inst.Extended.InstData) !void { const fields_len = extended.small; assert(fields_len != 0); const extra = self.code.extraData(Zir.Inst.TupleDecl, extended.operand); @@ -1923,7 +1913,7 @@ const Writer = struct { fn writeErrorSetDecl( self: *Writer, - stream: anytype, + stream: *std.io.BufferedWriter, inst: Zir.Inst.Index, ) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; @@ -1937,18 +1927,18 @@ const Writer = struct { while (extra_index < extra_index_end) : (extra_index += 1) { const name_index: Zir.NullTerminatedString = @enumFromInt(self.code.extra[extra_index]); const name = self.code.nullTerminatedString(name_index); - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.print("{p},\n", .{std.zig.fmtId(name)}); } self.indent -= 2; - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.writeAll("}) "); try self.writeSrcNode(stream, inst_data.src_node); } - fn writeSwitchBlockErrUnion(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeSwitchBlockErrUnion(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.SwitchBlockErrUnion, inst_data.payload_index); @@ -1984,7 +1974,7 @@ const Writer = struct { extra_index += body.len; try stream.writeAll(",\n"); - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.writeAll("non_err => "); try self.writeBracedBody(stream, body); } @@ -2002,7 +1992,7 @@ const Writer = struct { extra_index += body.len; try stream.writeAll(",\n"); - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.print("{s}{s}else => ", .{ capture_text, inline_text }); try self.writeBracedBody(stream, body); } @@ -2019,7 +2009,7 @@ const Writer = struct { extra_index += info.body_len; try stream.writeAll(",\n"); - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); switch (info.capture) { .none => {}, .by_val => try stream.writeAll("by_val "), @@ -2044,7 +2034,7 @@ const Writer = struct { extra_index += items_len; try stream.writeAll(",\n"); - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); switch (info.capture) { .none => {}, .by_val => try stream.writeAll("by_val "), @@ -2085,7 +2075,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeSwitchBlock(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeSwitchBlock(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.SwitchBlock, inst_data.payload_index); @@ -2132,7 +2122,7 @@ const Writer = struct { extra_index += body.len; try stream.writeAll(",\n"); - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.print("{s}{s}{s} => ", .{ capture_text, inline_text, prong_name }); try self.writeBracedBody(stream, body); } @@ -2149,7 +2139,7 @@ const Writer = struct { extra_index += info.body_len; try stream.writeAll(",\n"); - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); switch (info.capture) { .none => {}, .by_val => try stream.writeAll("by_val "), @@ -2174,7 +2164,7 @@ const Writer = struct { extra_index += items_len; try stream.writeAll(",\n"); - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); switch (info.capture) { .none => {}, .by_val => try stream.writeAll("by_val "), @@ -2215,7 +2205,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writePlNodeField(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writePlNodeField(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.Field, inst_data.payload_index).data; const name = self.code.nullTerminatedString(extra.field_name_start); @@ -2224,7 +2214,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writePlNodeFieldNamed(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writePlNodeFieldNamed(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.FieldNamed, inst_data.payload_index).data; try self.writeInstRef(stream, extra.lhs); @@ -2234,7 +2224,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeAs(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeAs(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.As, inst_data.payload_index).data; try self.writeInstRef(stream, extra.dest_type); @@ -2246,9 +2236,9 @@ const Writer = struct { fn writeNode( self: *Writer, - stream: anytype, + stream: *std.io.BufferedWriter, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) anyerror!void { const src_node = self.code.instructions.items(.data)[@intFromEnum(inst)].node; try stream.writeAll(") "); try self.writeSrcNode(stream, src_node); @@ -2256,16 +2246,16 @@ const Writer = struct { fn writeStrTok( self: *Writer, - stream: anytype, + stream: *std.io.BufferedWriter, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) anyerror!void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].str_tok; const str = inst_data.get(self.code); try stream.print("\"{}\") ", .{std.zig.fmtEscapes(str)}); try self.writeSrcTok(stream, inst_data.src_tok); } - fn writeStrOp(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeStrOp(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].str_op; const str = inst_data.getStr(self.code); try self.writeInstRef(stream, inst_data.operand); @@ -2274,7 +2264,7 @@ const Writer = struct { fn writeFunc( self: *Writer, - stream: anytype, + stream: *std.io.BufferedWriter, inst: Zir.Inst.Index, inferred_error_set: bool, ) !void { @@ -2325,7 +2315,7 @@ const Writer = struct { ); } - fn writeFuncFancy(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeFuncFancy(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.FuncFancy, inst_data.payload_index); @@ -2384,7 +2374,7 @@ const Writer = struct { ); } - fn writeAllocExtended(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeAllocExtended(self: *Writer, stream: *std.io.BufferedWriter, extended: Zir.Inst.Extended.InstData) !void { const extra = self.code.extraData(Zir.Inst.AllocExtended, extended.operand); const small = @as(Zir.Inst.AllocExtended.Small, @bitCast(extended.small)); @@ -2407,7 +2397,7 @@ const Writer = struct { try self.writeSrcNode(stream, extra.data.src_node); } - fn writeTypeofPeer(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeTypeofPeer(self: *Writer, stream: *std.io.BufferedWriter, extended: Zir.Inst.Extended.InstData) !void { const extra = self.code.extraData(Zir.Inst.TypeOfPeer, extended.operand); const body = self.code.bodySlice(extra.data.body_index, extra.data.body_len); try self.writeBracedBody(stream, body); @@ -2420,7 +2410,7 @@ const Writer = struct { try stream.writeAll("])"); } - fn writeBoolBr(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeBoolBr(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.BoolBr, inst_data.payload_index); const body = self.code.bodySlice(extra.end, extra.data.body_len); @@ -2431,7 +2421,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeIntType(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeIntType(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const int_type = self.code.instructions.items(.data)[@intFromEnum(inst)].int_type; const prefix: u8 = switch (int_type.signedness) { .signed => 'i', @@ -2441,7 +2431,7 @@ const Writer = struct { try self.writeSrcNode(stream, int_type.src_node); } - fn writeSaveErrRetIndex(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeSaveErrRetIndex(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].save_err_ret_index; try self.writeInstRef(stream, inst_data.operand); @@ -2449,7 +2439,7 @@ const Writer = struct { try stream.writeAll(")"); } - fn writeRestoreErrRetIndex(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeRestoreErrRetIndex(self: *Writer, stream: *std.io.BufferedWriter, extended: Zir.Inst.Extended.InstData) !void { const extra = self.code.extraData(Zir.Inst.RestoreErrRetIndex, extended.operand).data; try self.writeInstRef(stream, extra.block); @@ -2459,7 +2449,7 @@ const Writer = struct { try self.writeSrcNode(stream, extra.src_node); } - fn writeBreak(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeBreak(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].@"break"; const extra = self.code.extraData(Zir.Inst.Break, inst_data.payload_index).data; @@ -2469,7 +2459,7 @@ const Writer = struct { try stream.writeAll(")"); } - fn writeArrayInit(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeArrayInit(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.MultiOp, inst_data.payload_index); @@ -2485,7 +2475,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeArrayInitAnon(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeArrayInitAnon(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.MultiOp, inst_data.payload_index); @@ -2500,7 +2490,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeArrayInitSent(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeArrayInitSent(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.MultiOp, inst_data.payload_index); @@ -2520,7 +2510,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeUnreachable(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeUnreachable(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].@"unreachable"; try stream.writeAll(") "); try self.writeSrcNode(stream, inst_data.src_node); @@ -2528,7 +2518,7 @@ const Writer = struct { fn writeFuncCommon( self: *Writer, - stream: anytype, + stream: *std.io.BufferedWriter, inferred_error_set: bool, var_args: bool, is_noinline: bool, @@ -2565,19 +2555,19 @@ const Writer = struct { try self.writeSrcNode(stream, src_node); } - fn writeDbgStmt(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeDbgStmt(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].dbg_stmt; try stream.print("{d}, {d})", .{ inst_data.line + 1, inst_data.column + 1 }); } - fn writeDefer(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeDefer(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].@"defer"; const body = self.code.bodySlice(inst_data.index, inst_data.len); try self.writeBracedBody(stream, body); try stream.writeByte(')'); } - fn writeDeferErrCode(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeDeferErrCode(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].defer_err_code; const extra = self.code.extraData(Zir.Inst.DeferErrCode, inst_data.payload_index).data; @@ -2590,7 +2580,7 @@ const Writer = struct { try stream.writeByte(')'); } - fn writeDeclaration(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeDeclaration(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const decl = self.code.getDeclaration(inst); const prev_parent_decl_node = self.parent_decl_node; @@ -2612,10 +2602,8 @@ const Writer = struct { }, } const src_hash = self.code.getAssociatedSrcHash(inst).?; - try stream.print(" line({d}) column({d}) hash({})", .{ - decl.src_line, - decl.src_column, - std.fmt.fmtSliceHexLower(&src_hash), + try stream.print(" line({d}) column({d}) hash({x})", .{ + decl.src_line, decl.src_column, &src_hash, }); { @@ -2649,26 +2637,26 @@ const Writer = struct { try self.writeSrcNode(stream, .zero); } - fn writeClosureGet(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeClosureGet(self: *Writer, stream: *std.io.BufferedWriter, extended: Zir.Inst.Extended.InstData) !void { try stream.print("{d})) ", .{extended.small}); const src_node: Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand))); try self.writeSrcNode(stream, src_node); } - fn writeBuiltinValue(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeBuiltinValue(self: *Writer, stream: *std.io.BufferedWriter, extended: Zir.Inst.Extended.InstData) !void { const val: Zir.Inst.BuiltinValue = @enumFromInt(extended.small); try stream.print("{s})) ", .{@tagName(val)}); const src_node: Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand))); try self.writeSrcNode(stream, src_node); } - fn writeInplaceArithResultTy(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeInplaceArithResultTy(self: *Writer, stream: *std.io.BufferedWriter, extended: Zir.Inst.Extended.InstData) !void { const op: Zir.Inst.InplaceOp = @enumFromInt(extended.small); try self.writeInstRef(stream, @enumFromInt(extended.operand)); try stream.print(", {s}))", .{@tagName(op)}); } - fn writeInstRef(self: *Writer, stream: anytype, ref: Zir.Inst.Ref) !void { + fn writeInstRef(self: *Writer, stream: *std.io.BufferedWriter, ref: Zir.Inst.Ref) !void { if (ref == .none) { return stream.writeAll(".none"); } else if (ref.toIndex()) |i| { @@ -2679,12 +2667,12 @@ const Writer = struct { } } - fn writeInstIndex(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeInstIndex(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { _ = self; return stream.print("%{d}", .{@intFromEnum(inst)}); } - fn writeCaptures(self: *Writer, stream: anytype, extra_index: usize, captures_len: u32) !usize { + fn writeCaptures(self: *Writer, stream: *std.io.BufferedWriter, extra_index: usize, captures_len: u32) !usize { if (captures_len == 0) { try stream.writeAll("{}"); return extra_index; @@ -2704,7 +2692,7 @@ const Writer = struct { return extra_index + 2 * captures_len; } - fn writeCapture(self: *Writer, stream: anytype, capture: Zir.Inst.Capture) !void { + fn writeCapture(self: *Writer, stream: *std.io.BufferedWriter, capture: Zir.Inst.Capture) !void { switch (capture.unwrap()) { .nested => |i| return stream.print("[{d}]", .{i}), .instruction => |inst| return self.writeInstIndex(stream, inst), @@ -2723,7 +2711,7 @@ const Writer = struct { fn writeOptionalInstRef( self: *Writer, - stream: anytype, + stream: *std.io.BufferedWriter, prefix: []const u8, inst: Zir.Inst.Ref, ) !void { @@ -2734,7 +2722,7 @@ const Writer = struct { fn writeOptionalInstRefOrBody( self: *Writer, - stream: anytype, + stream: *std.io.BufferedWriter, prefix: []const u8, ref: Zir.Inst.Ref, body: []const Zir.Inst.Index, @@ -2752,7 +2740,7 @@ const Writer = struct { fn writeFlag( self: *Writer, - stream: anytype, + stream: *std.io.BufferedWriter, name: []const u8, flag: bool, ) !void { @@ -2761,7 +2749,7 @@ const Writer = struct { try stream.writeAll(name); } - fn writeSrcNode(self: *Writer, stream: anytype, src_node: Ast.Node.Offset) !void { + fn writeSrcNode(self: *Writer, stream: *std.io.BufferedWriter, src_node: Ast.Node.Offset) !void { const tree = self.tree orelse return; const abs_node = src_node.toAbsolute(self.parent_decl_node); const src_span = tree.nodeToSpan(abs_node); @@ -2773,7 +2761,7 @@ const Writer = struct { }); } - fn writeSrcTok(self: *Writer, stream: anytype, src_tok: Ast.TokenOffset) !void { + fn writeSrcTok(self: *Writer, stream: *std.io.BufferedWriter, src_tok: Ast.TokenOffset) !void { const tree = self.tree orelse return; const abs_tok = src_tok.toAbsolute(tree.firstToken(self.parent_decl_node)); const span_start = tree.tokenStart(abs_tok); @@ -2786,7 +2774,7 @@ const Writer = struct { }); } - fn writeSrcTokAbs(self: *Writer, stream: anytype, src_tok: Ast.TokenIndex) !void { + fn writeSrcTokAbs(self: *Writer, stream: *std.io.BufferedWriter, src_tok: Ast.TokenIndex) !void { const tree = self.tree orelse return; const span_start = tree.tokenStart(src_tok); const span_end = span_start + @as(u32, @intCast(tree.tokenSlice(src_tok).len)); @@ -2798,15 +2786,15 @@ const Writer = struct { }); } - fn writeBracedDecl(self: *Writer, stream: anytype, body: []const Zir.Inst.Index) !void { + fn writeBracedDecl(self: *Writer, stream: *std.io.BufferedWriter, body: []const Zir.Inst.Index) !void { try self.writeBracedBodyConditional(stream, body, self.recurse_decls); } - fn writeBracedBody(self: *Writer, stream: anytype, body: []const Zir.Inst.Index) !void { + fn writeBracedBody(self: *Writer, stream: *std.io.BufferedWriter, body: []const Zir.Inst.Index) !void { try self.writeBracedBodyConditional(stream, body, self.recurse_blocks); } - fn writeBracedBodyConditional(self: *Writer, stream: anytype, body: []const Zir.Inst.Index, enabled: bool) !void { + fn writeBracedBodyConditional(self: *Writer, stream: *std.io.BufferedWriter, body: []const Zir.Inst.Index, enabled: bool) !void { if (body.len == 0) { try stream.writeAll("{}"); } else if (enabled) { @@ -2814,7 +2802,7 @@ const Writer = struct { self.indent += 2; try self.writeBody(stream, body); self.indent -= 2; - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.writeAll("}"); } else if (body.len == 1) { try stream.writeByte('{'); @@ -2835,16 +2823,16 @@ const Writer = struct { } } - fn writeBody(self: *Writer, stream: anytype, body: []const Zir.Inst.Index) !void { + fn writeBody(self: *Writer, stream: *std.io.BufferedWriter, body: []const Zir.Inst.Index) !void { for (body) |inst| { - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.print("%{d} ", .{@intFromEnum(inst)}); try self.writeInstToStream(stream, inst); try stream.writeByte('\n'); } } - fn writeImport(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeImport(self: *Writer, stream: *std.io.BufferedWriter, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_tok; const extra = self.code.extraData(Zir.Inst.Import, inst_data.payload_index).data; try self.writeInstRef(stream, extra.res_ty); diff --git a/src/print_zoir.zig b/src/print_zoir.zig index b6cc8fe4d9..61a246d007 100644 --- a/src/print_zoir.zig +++ b/src/print_zoir.zig @@ -1,13 +1,6 @@ -pub fn renderToFile(zoir: Zoir, arena: Allocator, f: std.fs.File) (std.fs.File.WriteError || Allocator.Error)!void { - var bw = std.io.bufferedWriter(f.writer()); - try renderToWriter(zoir, arena, bw.writer()); - try bw.flush(); -} - -pub fn renderToWriter(zoir: Zoir, arena: Allocator, w: anytype) (@TypeOf(w).Error || Allocator.Error)!void { +pub fn renderToWriter(zoir: Zoir, arena: Allocator, w: *std.io.BufferedWriter) anyerror!void { assert(!zoir.hasCompileErrors()); - const fmtIntSizeBin = std.fmt.fmtIntSizeBin; const bytes_per_node = comptime n: { var n: usize = 0; for (@typeInfo(Zoir.Node.Repr).@"struct".fields) |f| { @@ -23,22 +16,22 @@ pub fn renderToWriter(zoir: Zoir, arena: Allocator, w: anytype) (@TypeOf(w).Erro // zig fmt: off try w.print( - \\# Nodes: {} ({}) - \\# Extra Data Items: {} ({}) - \\# BigInt Limbs: {} ({}) - \\# String Table Bytes: {} - \\# Total ZON Bytes: {} + \\# Nodes: {} ({Bi}) + \\# Extra Data Items: {} ({Bi}) + \\# BigInt Limbs: {} ({Bi}) + \\# String Table Bytes: {Bi} + \\# Total ZON Bytes: {Bi} \\ , .{ - zoir.nodes.len, fmtIntSizeBin(node_bytes), - zoir.extra.len, fmtIntSizeBin(extra_bytes), - zoir.limbs.len, fmtIntSizeBin(limb_bytes), - fmtIntSizeBin(string_bytes), - fmtIntSizeBin(node_bytes + extra_bytes + limb_bytes + string_bytes), + zoir.nodes.len, node_bytes, + zoir.extra.len, extra_bytes, + zoir.limbs.len, limb_bytes, + string_bytes, + node_bytes + extra_bytes + limb_bytes + string_bytes, }); // zig fmt: on var pz: PrintZon = .{ - .w = w.any(), + .w = w, .arena = arena, .zoir = zoir, .indent = 0, @@ -48,7 +41,7 @@ pub fn renderToWriter(zoir: Zoir, arena: Allocator, w: anytype) (@TypeOf(w).Erro } const PrintZon = struct { - w: std.io.AnyWriter, + w: *std.io.BufferedWriter, arena: Allocator, zoir: Zoir, indent: u32, diff --git a/src/translate_c.zig b/src/translate_c.zig index dda2ee8e2e..4fe066b1e4 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -5905,7 +5905,7 @@ fn parseCPrimaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { if (slice[0] != '\'' or slice[1] == '\\' or slice.len == 3) { return Tag.char_literal.create(c.arena, try escapeUnprintables(c, m)); } else { - const str = try std.fmt.allocPrint(c.arena, "0x{s}", .{std.fmt.fmtSliceHexLower(slice[1 .. slice.len - 1])}); + const str = try std.fmt.allocPrint(c.arena, "0x{x}", .{slice[1 .. slice.len - 1]}); return Tag.integer_literal.create(c.arena, str); } },