diff --git a/CMakeLists.txt b/CMakeLists.txt index 22419aa9fa..57e7cba085 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -436,7 +436,6 @@ set(ZIG_STAGE2_SOURCES lib/std/elf.zig lib/std/fifo.zig lib/std/fmt.zig - lib/std/fmt/format_float.zig lib/std/fmt/parse_float.zig lib/std/fs.zig lib/std/fs/AtomicFile.zig diff --git a/build.zig b/build.zig index 4cff998384..60c4e8f0e4 100644 --- a/build.zig +++ b/build.zig @@ -279,7 +279,7 @@ pub fn build(b: *std.Build) !void { const ancestor_ver = try std.SemanticVersion.parse(tagged_ancestor); if (zig_version.order(ancestor_ver) != .gt) { - std.debug.print("Zig version '{}' must be greater than tagged ancestor '{}'\n", .{ zig_version, ancestor_ver }); + std.debug.print("Zig version '{f}' must be greater than tagged ancestor '{f}'\n", .{ zig_version, ancestor_ver }); std.process.exit(1); } @@ -1449,7 +1449,7 @@ fn generateLangRef(b: *std.Build) std.Build.LazyPath { } var dir = b.build_root.handle.openDir("doc/langref", .{ .iterate = true }) catch |err| { - std.debug.panic("unable to open '{}doc/langref' directory: {s}", .{ + std.debug.panic("unable to open '{f}doc/langref' directory: {s}", .{ b.build_root, @errorName(err), }); }; @@ -1470,7 +1470,7 @@ fn generateLangRef(b: *std.Build) std.Build.LazyPath { // in a temporary directory "--cache-root", b.cache_root.path orelse ".", }); - cmd.addArgs(&.{ "--zig-lib-dir", b.fmt("{}", .{b.graph.zig_lib_directory}) }); + cmd.addArgs(&.{ "--zig-lib-dir", b.fmt("{f}", .{b.graph.zig_lib_directory}) }); cmd.addArgs(&.{"-i"}); cmd.addFileArg(b.path(b.fmt("doc/langref/{s}", .{entry.name}))); diff --git a/doc/langref.html.in b/doc/langref.html.in index 97226e4b33..dcf13e812d 100644 --- a/doc/langref.html.in +++ b/doc/langref.html.in @@ -374,7 +374,8 @@

Most of the time, it is more appropriate to write to stderr rather than stdout, and whether or not the message is successfully written to the stream is irrelevant. - For this common case, there is a simpler API: + Also, formatted printing often comes in handy. For this common case, + there is a simpler API:

{#code|hello_again.zig#} diff --git a/doc/langref/bad_default_value.zig b/doc/langref/bad_default_value.zig index 98fd498451..df38209c49 100644 --- a/doc/langref/bad_default_value.zig +++ b/doc/langref/bad_default_value.zig @@ -17,7 +17,7 @@ pub fn main() !void { .maximum = 0.20, }; const category = threshold.categorize(0.90); - try std.io.getStdOut().writeAll(@tagName(category)); + try std.fs.File.stdout().writeAll(@tagName(category)); } const std = @import("std"); diff --git a/doc/langref/hello.zig b/doc/langref/hello.zig index 8730e46456..27ea1f689a 100644 --- a/doc/langref/hello.zig +++ b/doc/langref/hello.zig @@ -1,8 +1,7 @@ const std = @import("std"); pub fn main() !void { - const stdout = std.io.getStdOut().writer(); - try stdout.print("Hello, {s}!\n", .{"world"}); + try std.fs.File.stdout().writeAll("Hello, World!\n"); } // exe=succeed diff --git a/doc/langref/hello_again.zig b/doc/langref/hello_again.zig index d3c9d019dd..91c3f3d158 100644 --- a/doc/langref/hello_again.zig +++ b/doc/langref/hello_again.zig @@ -1,7 +1,7 @@ const std = @import("std"); pub fn main() void { - std.debug.print("Hello, world!\n", .{}); + std.debug.print("Hello, {s}!\n", .{"World"}); } // exe=succeed diff --git a/lib/compiler/aro/aro/Compilation.zig b/lib/compiler/aro/aro/Compilation.zig index 798c192516..d723fdc3c5 100644 --- a/lib/compiler/aro/aro/Compilation.zig +++ b/lib/compiler/aro/aro/Compilation.zig @@ -1432,7 +1432,7 @@ fn getFileContents(comp: *Compilation, path: []const u8, limit: ?u32) ![]const u defer buf.deinit(); const max = limit orelse std.math.maxInt(u32); - file.reader().readAllArrayList(&buf, max) catch |e| switch (e) { + file.deprecatedReader().readAllArrayList(&buf, max) catch |e| switch (e) { error.StreamTooLong => if (limit == null) return e, else => return e, }; diff --git a/lib/compiler/aro/aro/Diagnostics.zig b/lib/compiler/aro/aro/Diagnostics.zig index eb3bb31ee8..dbf6e29af5 100644 --- a/lib/compiler/aro/aro/Diagnostics.zig +++ b/lib/compiler/aro/aro/Diagnostics.zig @@ -1,4 +1,5 @@ const std = @import("std"); +const assert = std.debug.assert; const Allocator = mem.Allocator; const mem = std.mem; const Source = @import("Source.zig"); @@ -323,12 +324,13 @@ pub fn addExtra( pub fn render(comp: *Compilation, config: std.io.tty.Config) void { if (comp.diagnostics.list.items.len == 0) return; - var m = defaultMsgWriter(config); + var buffer: [1000]u8 = undefined; + var m = defaultMsgWriter(config, &buffer); defer m.deinit(); renderMessages(comp, &m); } -pub fn defaultMsgWriter(config: std.io.tty.Config) MsgWriter { - return MsgWriter.init(config); +pub fn defaultMsgWriter(config: std.io.tty.Config, buffer: []u8) MsgWriter { + return MsgWriter.init(config, buffer); } pub fn renderMessages(comp: *Compilation, m: anytype) void { @@ -443,18 +445,13 @@ pub fn renderMessage(comp: *Compilation, m: anytype, msg: Message) void { printRt(m, prop.msg, .{"{s}"}, .{&str}); } else { var buf: [3]u8 = undefined; - const str = std.fmt.bufPrint(&buf, "x{x}", .{std.fmt.fmtSliceHexLower(&.{msg.extra.invalid_escape.char})}) catch unreachable; + const str = std.fmt.bufPrint(&buf, "x{x}", .{msg.extra.invalid_escape.char}) catch unreachable; printRt(m, prop.msg, .{"{s}"}, .{str}); } }, .normalized => { const f = struct { - pub fn f( - bytes: []const u8, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) !void { + pub fn f(bytes: []const u8, writer: *std.io.Writer) std.io.Writer.Error!void { var it: std.unicode.Utf8Iterator = .{ .bytes = bytes, .i = 0, @@ -464,22 +461,16 @@ pub fn renderMessage(comp: *Compilation, m: anytype, msg: Message) void { try writer.writeByte(@intCast(codepoint)); } else if (codepoint < 0xFFFF) { try writer.writeAll("\\u"); - try std.fmt.formatInt(codepoint, 16, .upper, .{ - .fill = '0', - .width = 4, - }, writer); + try writer.printInt(codepoint, 16, .upper, .{ .fill = '0', .width = 4 }); } else { try writer.writeAll("\\U"); - try std.fmt.formatInt(codepoint, 16, .upper, .{ - .fill = '0', - .width = 8, - }, writer); + try writer.printInt(codepoint, 16, .upper, .{ .fill = '0', .width = 8 }); } } } }.f; - printRt(m, prop.msg, .{"{s}"}, .{ - std.fmt.Formatter(f){ .data = msg.extra.normalized }, + printRt(m, prop.msg, .{"{f}"}, .{ + std.fmt.Formatter([]const u8, f){ .data = msg.extra.normalized }, }); }, .none, .offset => m.write(prop.msg), @@ -535,32 +526,31 @@ fn tagKind(d: *Diagnostics, tag: Tag, langopts: LangOpts) Kind { } const MsgWriter = struct { - w: std.io.BufferedWriter(4096, std.fs.File.Writer), + writer: *std.io.Writer, config: std.io.tty.Config, - fn init(config: std.io.tty.Config) MsgWriter { - std.debug.lockStdErr(); + fn init(config: std.io.tty.Config, buffer: []u8) MsgWriter { return .{ - .w = std.io.bufferedWriter(std.io.getStdErr().writer()), + .writer = std.debug.lockStderrWriter(buffer), .config = config, }; } pub fn deinit(m: *MsgWriter) void { - m.w.flush() catch {}; - std.debug.unlockStdErr(); + std.debug.unlockStderrWriter(); + m.* = undefined; } pub fn print(m: *MsgWriter, comptime fmt: []const u8, args: anytype) void { - m.w.writer().print(fmt, args) catch {}; + m.writer.print(fmt, args) catch {}; } fn write(m: *MsgWriter, msg: []const u8) void { - m.w.writer().writeAll(msg) catch {}; + m.writer.writeAll(msg) catch {}; } fn setColor(m: *MsgWriter, color: std.io.tty.Color) void { - m.config.setColor(m.w.writer(), color) catch {}; + m.config.setColor(m.writer, color) catch {}; } fn location(m: *MsgWriter, path: []const u8, line: u32, col: u32) void { diff --git a/lib/compiler/aro/aro/Driver.zig b/lib/compiler/aro/aro/Driver.zig index c89dafe002..f719e8cc15 100644 --- a/lib/compiler/aro/aro/Driver.zig +++ b/lib/compiler/aro/aro/Driver.zig @@ -519,7 +519,7 @@ fn option(arg: []const u8, name: []const u8) ?[]const u8 { fn addSource(d: *Driver, path: []const u8) !Source { if (mem.eql(u8, "-", path)) { - const stdin = std.io.getStdIn().reader(); + const stdin = std.fs.File.stdin().deprecatedReader(); const input = try stdin.readAllAlloc(d.comp.gpa, std.math.maxInt(u32)); defer d.comp.gpa.free(input); return d.comp.addSourceFromBuffer("", input); @@ -541,7 +541,7 @@ pub fn fatal(d: *Driver, comptime fmt: []const u8, args: anytype) error{ FatalEr } pub fn renderErrors(d: *Driver) void { - Diagnostics.render(d.comp, d.detectConfig(std.io.getStdErr())); + Diagnostics.render(d.comp, d.detectConfig(std.fs.File.stderr())); } pub fn detectConfig(d: *Driver, file: std.fs.File) std.io.tty.Config { @@ -591,7 +591,7 @@ pub fn main(d: *Driver, tc: *Toolchain, args: []const []const u8, comptime fast_ var macro_buf = std.ArrayList(u8).init(d.comp.gpa); defer macro_buf.deinit(); - const std_out = std.io.getStdOut().writer(); + const std_out = std.fs.File.stdout().deprecatedWriter(); if (try parseArgs(d, std_out, macro_buf.writer(), args)) return; const linking = !(d.only_preprocess or d.only_syntax or d.only_compile or d.only_preprocess_and_compile); @@ -686,10 +686,10 @@ fn processSource( std.fs.cwd().createFile(some, .{}) catch |er| return d.fatal("unable to create output file '{s}': {s}", .{ some, errorDescription(er) }) else - std.io.getStdOut(); + std.fs.File.stdout(); defer if (d.output_name != null) file.close(); - var buf_w = std.io.bufferedWriter(file.writer()); + var buf_w = std.io.bufferedWriter(file.deprecatedWriter()); pp.prettyPrintTokens(buf_w.writer(), dump_mode) catch |er| return d.fatal("unable to write result: {s}", .{errorDescription(er)}); @@ -704,8 +704,8 @@ fn processSource( defer tree.deinit(); if (d.verbose_ast) { - const stdout = std.io.getStdOut(); - var buf_writer = std.io.bufferedWriter(stdout.writer()); + const stdout = std.fs.File.stdout(); + var buf_writer = std.io.bufferedWriter(stdout.deprecatedWriter()); tree.dump(d.detectConfig(stdout), buf_writer.writer()) catch {}; buf_writer.flush() catch {}; } @@ -734,8 +734,8 @@ fn processSource( defer ir.deinit(d.comp.gpa); if (d.verbose_ir) { - const stdout = std.io.getStdOut(); - var buf_writer = std.io.bufferedWriter(stdout.writer()); + const stdout = std.fs.File.stdout(); + var buf_writer = std.io.bufferedWriter(stdout.deprecatedWriter()); ir.dump(d.comp.gpa, d.detectConfig(stdout), buf_writer.writer()) catch {}; buf_writer.flush() catch {}; } @@ -806,10 +806,10 @@ fn processSource( } fn dumpLinkerArgs(items: []const []const u8) !void { - const stdout = std.io.getStdOut().writer(); + const stdout = std.fs.File.stdout().deprecatedWriter(); for (items, 0..) |item, i| { if (i > 0) try stdout.writeByte(' '); - try stdout.print("\"{}\"", .{std.zig.fmtEscapes(item)}); + try stdout.print("\"{f}\"", .{std.zig.fmtString(item)}); } try stdout.writeByte('\n'); } diff --git a/lib/compiler/aro/aro/Parser.zig b/lib/compiler/aro/aro/Parser.zig index 5ebd89ec3d..ada6298a87 100644 --- a/lib/compiler/aro/aro/Parser.zig +++ b/lib/compiler/aro/aro/Parser.zig @@ -500,8 +500,8 @@ fn checkDeprecatedUnavailable(p: *Parser, ty: Type, usage_tok: TokenIndex, decl_ const w = p.strings.writer(); const msg_str = p.comp.interner.get(@"error".msg.ref()).bytes; - try w.print("call to '{s}' declared with attribute error: {}", .{ - p.tokSlice(@"error".__name_tok), std.zig.fmtEscapes(msg_str), + try w.print("call to '{s}' declared with attribute error: {f}", .{ + p.tokSlice(@"error".__name_tok), std.zig.fmtString(msg_str), }); const str = try p.comp.diagnostics.arena.allocator().dupe(u8, p.strings.items[strings_top..]); try p.errStr(.error_attribute, usage_tok, str); @@ -512,8 +512,8 @@ fn checkDeprecatedUnavailable(p: *Parser, ty: Type, usage_tok: TokenIndex, decl_ const w = p.strings.writer(); const msg_str = p.comp.interner.get(warning.msg.ref()).bytes; - try w.print("call to '{s}' declared with attribute warning: {}", .{ - p.tokSlice(warning.__name_tok), std.zig.fmtEscapes(msg_str), + try w.print("call to '{s}' declared with attribute warning: {f}", .{ + p.tokSlice(warning.__name_tok), std.zig.fmtString(msg_str), }); const str = try p.comp.diagnostics.arena.allocator().dupe(u8, p.strings.items[strings_top..]); try p.errStr(.warning_attribute, usage_tok, str); @@ -542,7 +542,7 @@ fn errDeprecated(p: *Parser, tag: Diagnostics.Tag, tok_i: TokenIndex, msg: ?Valu try w.writeAll(reason); if (msg) |m| { const str = p.comp.interner.get(m.ref()).bytes; - try w.print(": {}", .{std.zig.fmtEscapes(str)}); + try w.print(": {f}", .{std.zig.fmtString(str)}); } const str = try p.comp.diagnostics.arena.allocator().dupe(u8, p.strings.items[strings_top..]); return p.errStr(tag, tok_i, str); diff --git a/lib/compiler/aro/aro/Preprocessor.zig b/lib/compiler/aro/aro/Preprocessor.zig index e45f6eabc6..c8695edd6b 100644 --- a/lib/compiler/aro/aro/Preprocessor.zig +++ b/lib/compiler/aro/aro/Preprocessor.zig @@ -811,7 +811,7 @@ fn verboseLog(pp: *Preprocessor, raw: RawToken, comptime fmt: []const u8, args: const source = pp.comp.getSource(raw.source); const line_col = source.lineCol(.{ .id = raw.source, .line = raw.line, .byte_offset = raw.start }); - const stderr = std.io.getStdErr().writer(); + const stderr = std.fs.File.stderr().deprecatedWriter(); var buf_writer = std.io.bufferedWriter(stderr); const writer = buf_writer.writer(); defer buf_writer.flush() catch {}; @@ -3262,7 +3262,8 @@ fn printLinemarker( // containing the same bytes as the input regardless of encoding. else => { try w.writeAll("\\x"); - try std.fmt.formatInt(byte, 16, .lower, .{ .width = 2, .fill = '0' }, w); + // TODO try w.printInt(byte, 16, .lower, .{ .width = 2, .fill = '0' }); + try w.print("{x:0>2}", .{byte}); }, }; try w.writeByte('"'); diff --git a/lib/compiler/aro/aro/Value.zig b/lib/compiler/aro/aro/Value.zig index 02c9cfc830..183c557976 100644 --- a/lib/compiler/aro/aro/Value.zig +++ b/lib/compiler/aro/aro/Value.zig @@ -961,7 +961,7 @@ pub fn print(v: Value, ty: Type, comp: *const Compilation, w: anytype) @TypeOf(w switch (key) { .null => return w.writeAll("nullptr_t"), .int => |repr| switch (repr) { - inline else => |x| return w.print("{d}", .{x}), + inline .u64, .i64, .big_int => |x| return w.print("{d}", .{x}), }, .float => |repr| switch (repr) { .f16 => |x| return w.print("{d}", .{@round(@as(f64, @floatCast(x)) * 1000) / 1000}), @@ -982,7 +982,7 @@ pub fn printString(bytes: []const u8, ty: Type, comp: *const Compilation, w: any const without_null = bytes[0 .. bytes.len - @intFromEnum(size)]; try w.writeByte('"'); switch (size) { - .@"1" => try w.print("{}", .{std.zig.fmtEscapes(without_null)}), + .@"1" => try w.print("{f}", .{std.zig.fmtString(without_null)}), .@"2" => { var items: [2]u16 = undefined; var i: usize = 0; diff --git a/lib/compiler/aro/backend/Object/Elf.zig b/lib/compiler/aro/backend/Object/Elf.zig index 9b4f347de5..ddd66a3c9d 100644 --- a/lib/compiler/aro/backend/Object/Elf.zig +++ b/lib/compiler/aro/backend/Object/Elf.zig @@ -171,7 +171,7 @@ pub fn addRelocation(elf: *Elf, name: []const u8, section_kind: Object.Section, /// strtab /// section headers pub fn finish(elf: *Elf, file: std.fs.File) !void { - var buf_writer = std.io.bufferedWriter(file.writer()); + var buf_writer = std.io.bufferedWriter(file.deprecatedWriter()); const w = buf_writer.writer(); var num_sections: std.elf.Elf64_Half = additional_sections; diff --git a/lib/compiler/aro_translate_c.zig b/lib/compiler/aro_translate_c.zig index e064cfa345..939aede655 100644 --- a/lib/compiler/aro_translate_c.zig +++ b/lib/compiler/aro_translate_c.zig @@ -1781,7 +1781,8 @@ test "Macro matching" { fn renderErrorsAndExit(comp: *aro.Compilation) noreturn { defer std.process.exit(1); - var writer = aro.Diagnostics.defaultMsgWriter(std.io.tty.detectConfig(std.io.getStdErr())); + var buffer: [1000]u8 = undefined; + var writer = aro.Diagnostics.defaultMsgWriter(std.io.tty.detectConfig(std.fs.File.stderr()), &buffer); defer writer.deinit(); // writer deinit must run *before* exit so that stderr is flushed var saw_error = false; @@ -1824,6 +1825,6 @@ pub fn main() !void { defer tree.deinit(gpa); const formatted = try tree.render(arena); - try std.io.getStdOut().writeAll(formatted); + try std.fs.File.stdout().writeAll(formatted); return std.process.cleanExit(); } diff --git a/lib/compiler/aro_translate_c/ast.zig b/lib/compiler/aro_translate_c/ast.zig index 77e7bf1609..132a07c6c8 100644 --- a/lib/compiler/aro_translate_c/ast.zig +++ b/lib/compiler/aro_translate_c/ast.zig @@ -849,7 +849,7 @@ const Context = struct { fn addIdentifier(c: *Context, bytes: []const u8) Allocator.Error!TokenIndex { if (std.zig.primitives.isPrimitive(bytes)) return c.addTokenFmt(.identifier, "@\"{s}\"", .{bytes}); - return c.addTokenFmt(.identifier, "{p}", .{std.zig.fmtId(bytes)}); + return c.addTokenFmt(.identifier, "{f}", .{std.zig.fmtIdFlags(bytes, .{ .allow_primitive = true })}); } fn listToSpan(c: *Context, list: []const NodeIndex) Allocator.Error!NodeSubRange { @@ -1201,7 +1201,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex { const compile_error_tok = try c.addToken(.builtin, "@compileError"); _ = try c.addToken(.l_paren, "("); - const err_msg_tok = try c.addTokenFmt(.string_literal, "\"{}\"", .{std.zig.fmtEscapes(payload.mangled)}); + const err_msg_tok = try c.addTokenFmt(.string_literal, "\"{f}\"", .{std.zig.fmtString(payload.mangled)}); const err_msg = try c.addNode(.{ .tag = .string_literal, .main_token = err_msg_tok, @@ -2116,7 +2116,7 @@ fn renderRecord(c: *Context, node: Node) !NodeIndex { defer c.gpa.free(members); for (payload.fields, 0..) |field, i| { - const name_tok = try c.addTokenFmt(.identifier, "{p}", .{std.zig.fmtId(field.name)}); + const name_tok = try c.addTokenFmt(.identifier, "{f}", .{std.zig.fmtIdFlags(field.name, .{ .allow_primitive = true })}); _ = try c.addToken(.colon, ":"); const type_expr = try renderNode(c, field.type); @@ -2205,7 +2205,7 @@ fn renderFieldAccess(c: *Context, lhs: NodeIndex, field_name: []const u8) !NodeI .main_token = try c.addToken(.period, "."), .data = .{ .node_and_token = .{ lhs, - try c.addTokenFmt(.identifier, "{p}", .{std.zig.fmtId(field_name)}), + try c.addTokenFmt(.identifier, "{f}", .{std.zig.fmtIdFlags(field_name, .{ .allow_primitive = true })}), } }, }); } @@ -2681,7 +2681,7 @@ fn renderVar(c: *Context, node: Node) !NodeIndex { _ = try c.addToken(.l_paren, "("); const res = try c.addNode(.{ .tag = .string_literal, - .main_token = try c.addTokenFmt(.string_literal, "\"{}\"", .{std.zig.fmtEscapes(some)}), + .main_token = try c.addTokenFmt(.string_literal, "\"{f}\"", .{std.zig.fmtString(some)}), .data = undefined, }); _ = try c.addToken(.r_paren, ")"); @@ -2765,7 +2765,7 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex { _ = try c.addToken(.l_paren, "("); const res = try c.addNode(.{ .tag = .string_literal, - .main_token = try c.addTokenFmt(.string_literal, "\"{}\"", .{std.zig.fmtEscapes(some)}), + .main_token = try c.addTokenFmt(.string_literal, "\"{f}\"", .{std.zig.fmtString(some)}), .data = undefined, }); _ = try c.addToken(.r_paren, ")"); diff --git a/lib/compiler/build_runner.zig b/lib/compiler/build_runner.zig index 740461f774..693e9b4c70 100644 --- a/lib/compiler/build_runner.zig +++ b/lib/compiler/build_runner.zig @@ -12,6 +12,7 @@ const Watch = std.Build.Watch; const Fuzz = std.Build.Fuzz; const Allocator = std.mem.Allocator; const fatal = std.process.fatal; +const Writer = std.io.Writer; const runner = @This(); pub const root = @import("@build"); @@ -330,7 +331,7 @@ pub fn main() !void { } } - const stderr = std.io.getStdErr(); + const stderr: std.fs.File = .stderr(); const ttyconf = get_tty_conf(color, stderr); switch (ttyconf) { .no_color => try graph.env_map.put("NO_COLOR", "1"), @@ -365,7 +366,7 @@ pub fn main() !void { .data = buffer.items, .flags = .{ .exclusive = true }, }) catch |err| { - fatal("unable to write configuration results to '{}{s}': {s}", .{ + fatal("unable to write configuration results to '{f}{s}': {s}", .{ local_cache_directory, tmp_sub_path, @errorName(err), }); }; @@ -378,13 +379,19 @@ pub fn main() !void { validateSystemLibraryOptions(builder); - const stdout_writer = io.getStdOut().writer(); + if (help_menu) { + var w = initStdoutWriter(); + printUsage(builder, w) catch return stdout_writer_allocation.err.?; + w.flush() catch return stdout_writer_allocation.err.?; + return; + } - if (help_menu) - return usage(builder, stdout_writer); - - if (steps_menu) - return steps(builder, stdout_writer); + if (steps_menu) { + var w = initStdoutWriter(); + printSteps(builder, w) catch return stdout_writer_allocation.err.?; + w.flush() catch return stdout_writer_allocation.err.?; + return; + } var run: Run = .{ .max_rss = max_rss, @@ -696,24 +703,23 @@ fn runStepNames( const ttyconf = run.ttyconf; if (run.summary != .none) { - std.debug.lockStdErr(); - defer std.debug.unlockStdErr(); - const stderr = run.stderr; + const w = std.debug.lockStderrWriter(&stdio_buffer_allocation); + defer std.debug.unlockStderrWriter(); const total_count = success_count + failure_count + pending_count + skipped_count; - ttyconf.setColor(stderr, .cyan) catch {}; - stderr.writeAll("Build Summary:") catch {}; - ttyconf.setColor(stderr, .reset) catch {}; - stderr.writer().print(" {d}/{d} steps succeeded", .{ success_count, total_count }) catch {}; - if (skipped_count > 0) stderr.writer().print("; {d} skipped", .{skipped_count}) catch {}; - if (failure_count > 0) stderr.writer().print("; {d} failed", .{failure_count}) catch {}; + ttyconf.setColor(w, .cyan) catch {}; + w.writeAll("Build Summary:") catch {}; + ttyconf.setColor(w, .reset) catch {}; + w.print(" {d}/{d} steps succeeded", .{ success_count, total_count }) catch {}; + if (skipped_count > 0) w.print("; {d} skipped", .{skipped_count}) catch {}; + if (failure_count > 0) w.print("; {d} failed", .{failure_count}) catch {}; - if (test_count > 0) stderr.writer().print("; {d}/{d} tests passed", .{ test_pass_count, test_count }) catch {}; - if (test_skip_count > 0) stderr.writer().print("; {d} skipped", .{test_skip_count}) catch {}; - if (test_fail_count > 0) stderr.writer().print("; {d} failed", .{test_fail_count}) catch {}; - if (test_leak_count > 0) stderr.writer().print("; {d} leaked", .{test_leak_count}) catch {}; + if (test_count > 0) w.print("; {d}/{d} tests passed", .{ test_pass_count, test_count }) catch {}; + if (test_skip_count > 0) w.print("; {d} skipped", .{test_skip_count}) catch {}; + if (test_fail_count > 0) w.print("; {d} failed", .{test_fail_count}) catch {}; + if (test_leak_count > 0) w.print("; {d} leaked", .{test_leak_count}) catch {}; - stderr.writeAll("\n") catch {}; + w.writeAll("\n") catch {}; // Print a fancy tree with build results. var step_stack_copy = try step_stack.clone(gpa); @@ -722,7 +728,7 @@ fn runStepNames( var print_node: PrintNode = .{ .parent = null }; if (step_names.len == 0) { print_node.last = true; - printTreeStep(b, b.default_step, run, stderr, ttyconf, &print_node, &step_stack_copy) catch {}; + printTreeStep(b, b.default_step, run, w, ttyconf, &print_node, &step_stack_copy) catch {}; } else { const last_index = if (run.summary == .all) b.top_level_steps.count() else blk: { var i: usize = step_names.len; @@ -741,9 +747,10 @@ fn runStepNames( for (step_names, 0..) |step_name, i| { const tls = b.top_level_steps.get(step_name).?; print_node.last = i + 1 == last_index; - printTreeStep(b, &tls.step, run, stderr, ttyconf, &print_node, &step_stack_copy) catch {}; + printTreeStep(b, &tls.step, run, w, ttyconf, &print_node, &step_stack_copy) catch {}; } } + w.writeByte('\n') catch {}; } if (failure_count == 0) { @@ -775,7 +782,7 @@ const PrintNode = struct { last: bool = false, }; -fn printPrefix(node: *PrintNode, stderr: File, ttyconf: std.io.tty.Config) !void { +fn printPrefix(node: *PrintNode, stderr: *Writer, ttyconf: std.io.tty.Config) !void { const parent = node.parent orelse return; if (parent.parent == null) return; try printPrefix(parent, stderr, ttyconf); @@ -789,7 +796,7 @@ fn printPrefix(node: *PrintNode, stderr: File, ttyconf: std.io.tty.Config) !void } } -fn printChildNodePrefix(stderr: File, ttyconf: std.io.tty.Config) !void { +fn printChildNodePrefix(stderr: *Writer, ttyconf: std.io.tty.Config) !void { try stderr.writeAll(switch (ttyconf) { .no_color, .windows_api => "+- ", .escape_codes => "\x1B\x28\x30\x6d\x71\x1B\x28\x42 ", // └─ @@ -798,7 +805,7 @@ fn printChildNodePrefix(stderr: File, ttyconf: std.io.tty.Config) !void { fn printStepStatus( s: *Step, - stderr: File, + stderr: *Writer, ttyconf: std.io.tty.Config, run: *const Run, ) !void { @@ -820,10 +827,10 @@ fn printStepStatus( try stderr.writeAll(" cached"); } else if (s.test_results.test_count > 0) { const pass_count = s.test_results.passCount(); - try stderr.writer().print(" {d} passed", .{pass_count}); + try stderr.print(" {d} passed", .{pass_count}); if (s.test_results.skip_count > 0) { try ttyconf.setColor(stderr, .yellow); - try stderr.writer().print(" {d} skipped", .{s.test_results.skip_count}); + try stderr.print(" {d} skipped", .{s.test_results.skip_count}); } } else { try stderr.writeAll(" success"); @@ -832,15 +839,15 @@ fn printStepStatus( if (s.result_duration_ns) |ns| { try ttyconf.setColor(stderr, .dim); if (ns >= std.time.ns_per_min) { - try stderr.writer().print(" {d}m", .{ns / std.time.ns_per_min}); + try stderr.print(" {d}m", .{ns / std.time.ns_per_min}); } else if (ns >= std.time.ns_per_s) { - try stderr.writer().print(" {d}s", .{ns / std.time.ns_per_s}); + try stderr.print(" {d}s", .{ns / std.time.ns_per_s}); } else if (ns >= std.time.ns_per_ms) { - try stderr.writer().print(" {d}ms", .{ns / std.time.ns_per_ms}); + try stderr.print(" {d}ms", .{ns / std.time.ns_per_ms}); } else if (ns >= std.time.ns_per_us) { - try stderr.writer().print(" {d}us", .{ns / std.time.ns_per_us}); + try stderr.print(" {d}us", .{ns / std.time.ns_per_us}); } else { - try stderr.writer().print(" {d}ns", .{ns}); + try stderr.print(" {d}ns", .{ns}); } try ttyconf.setColor(stderr, .reset); } @@ -848,13 +855,13 @@ fn printStepStatus( const rss = s.result_peak_rss; try ttyconf.setColor(stderr, .dim); if (rss >= 1000_000_000) { - try stderr.writer().print(" MaxRSS:{d}G", .{rss / 1000_000_000}); + try stderr.print(" MaxRSS:{d}G", .{rss / 1000_000_000}); } else if (rss >= 1000_000) { - try stderr.writer().print(" MaxRSS:{d}M", .{rss / 1000_000}); + try stderr.print(" MaxRSS:{d}M", .{rss / 1000_000}); } else if (rss >= 1000) { - try stderr.writer().print(" MaxRSS:{d}K", .{rss / 1000}); + try stderr.print(" MaxRSS:{d}K", .{rss / 1000}); } else { - try stderr.writer().print(" MaxRSS:{d}B", .{rss}); + try stderr.print(" MaxRSS:{d}B", .{rss}); } try ttyconf.setColor(stderr, .reset); } @@ -866,7 +873,7 @@ fn printStepStatus( if (skip == .skipped_oom) { try stderr.writeAll(" (not enough memory)"); try ttyconf.setColor(stderr, .dim); - try stderr.writer().print(" upper bound of {d} exceeded runner limit ({d})", .{ s.max_rss, run.max_rss }); + try stderr.print(" upper bound of {d} exceeded runner limit ({d})", .{ s.max_rss, run.max_rss }); try ttyconf.setColor(stderr, .yellow); } try stderr.writeAll("\n"); @@ -878,23 +885,23 @@ fn printStepStatus( fn printStepFailure( s: *Step, - stderr: File, + stderr: *Writer, ttyconf: std.io.tty.Config, ) !void { if (s.result_error_bundle.errorMessageCount() > 0) { try ttyconf.setColor(stderr, .red); - try stderr.writer().print(" {d} errors\n", .{ + try stderr.print(" {d} errors\n", .{ s.result_error_bundle.errorMessageCount(), }); try ttyconf.setColor(stderr, .reset); } else if (!s.test_results.isSuccess()) { - try stderr.writer().print(" {d}/{d} passed", .{ + try stderr.print(" {d}/{d} passed", .{ s.test_results.passCount(), s.test_results.test_count, }); if (s.test_results.fail_count > 0) { try stderr.writeAll(", "); try ttyconf.setColor(stderr, .red); - try stderr.writer().print("{d} failed", .{ + try stderr.print("{d} failed", .{ s.test_results.fail_count, }); try ttyconf.setColor(stderr, .reset); @@ -902,7 +909,7 @@ fn printStepFailure( if (s.test_results.skip_count > 0) { try stderr.writeAll(", "); try ttyconf.setColor(stderr, .yellow); - try stderr.writer().print("{d} skipped", .{ + try stderr.print("{d} skipped", .{ s.test_results.skip_count, }); try ttyconf.setColor(stderr, .reset); @@ -910,7 +917,7 @@ fn printStepFailure( if (s.test_results.leak_count > 0) { try stderr.writeAll(", "); try ttyconf.setColor(stderr, .red); - try stderr.writer().print("{d} leaked", .{ + try stderr.print("{d} leaked", .{ s.test_results.leak_count, }); try ttyconf.setColor(stderr, .reset); @@ -932,7 +939,7 @@ fn printTreeStep( b: *std.Build, s: *Step, run: *const Run, - stderr: File, + stderr: *Writer, ttyconf: std.io.tty.Config, parent_node: *PrintNode, step_stack: *std.AutoArrayHashMapUnmanaged(*Step, void), @@ -992,7 +999,7 @@ fn printTreeStep( if (s.dependencies.items.len == 0) { try stderr.writeAll(" (reused)\n"); } else { - try stderr.writer().print(" (+{d} more reused dependencies)\n", .{ + try stderr.print(" (+{d} more reused dependencies)\n", .{ s.dependencies.items.len, }); } @@ -1129,11 +1136,11 @@ fn workerMakeOneStep( const show_stderr = s.result_stderr.len > 0; if (show_error_msgs or show_compile_errors or show_stderr) { - std.debug.lockStdErr(); - defer std.debug.unlockStdErr(); + const bw = std.debug.lockStderrWriter(&stdio_buffer_allocation); + defer std.debug.unlockStderrWriter(); const gpa = b.allocator; - printErrorMessages(gpa, s, .{ .ttyconf = run.ttyconf }, run.stderr, run.prominent_compile_errors) catch {}; + printErrorMessages(gpa, s, .{ .ttyconf = run.ttyconf }, bw, run.prominent_compile_errors) catch {}; } handle_result: { @@ -1190,7 +1197,7 @@ pub fn printErrorMessages( gpa: Allocator, failing_step: *Step, options: std.zig.ErrorBundle.RenderOptions, - stderr: File, + stderr: *Writer, prominent_compile_errors: bool, ) !void { // Provide context for where these error messages are coming from by @@ -1209,7 +1216,7 @@ pub fn printErrorMessages( var indent: usize = 0; while (step_stack.pop()) |s| : (indent += 1) { if (indent > 0) { - try stderr.writer().writeByteNTimes(' ', (indent - 1) * 3); + try stderr.splatByteAll(' ', (indent - 1) * 3); try printChildNodePrefix(stderr, ttyconf); } @@ -1231,7 +1238,7 @@ pub fn printErrorMessages( } if (!prominent_compile_errors and failing_step.result_error_bundle.errorMessageCount() > 0) { - try failing_step.result_error_bundle.renderToWriter(options, stderr.writer()); + try failing_step.result_error_bundle.renderToWriter(options, stderr); } for (failing_step.result_error_msgs.items) |msg| { @@ -1243,27 +1250,27 @@ pub fn printErrorMessages( } } -fn steps(builder: *std.Build, out_stream: anytype) !void { +fn printSteps(builder: *std.Build, w: *Writer) !void { const allocator = builder.allocator; for (builder.top_level_steps.values()) |top_level_step| { const name = if (&top_level_step.step == builder.default_step) try fmt.allocPrint(allocator, "{s} (default)", .{top_level_step.step.name}) else top_level_step.step.name; - try out_stream.print(" {s:<28} {s}\n", .{ name, top_level_step.description }); + try w.print(" {s:<28} {s}\n", .{ name, top_level_step.description }); } } -fn usage(b: *std.Build, out_stream: anytype) !void { - try out_stream.print( +fn printUsage(b: *std.Build, w: *Writer) !void { + try w.print( \\Usage: {s} build [steps] [options] \\ \\Steps: \\ , .{b.graph.zig_exe}); - try steps(b, out_stream); + try printSteps(b, w); - try out_stream.writeAll( + try w.writeAll( \\ \\General Options: \\ -p, --prefix [path] Where to install files (default: zig-out) @@ -1319,25 +1326,25 @@ fn usage(b: *std.Build, out_stream: anytype) !void { const arena = b.allocator; if (b.available_options_list.items.len == 0) { - try out_stream.print(" (none)\n", .{}); + try w.print(" (none)\n", .{}); } else { for (b.available_options_list.items) |option| { const name = try fmt.allocPrint(arena, " -D{s}=[{s}]", .{ option.name, @tagName(option.type_id), }); - try out_stream.print("{s:<30} {s}\n", .{ name, option.description }); + try w.print("{s:<30} {s}\n", .{ name, option.description }); if (option.enum_options) |enum_options| { const padding = " " ** 33; - try out_stream.writeAll(padding ++ "Supported Values:\n"); + try w.writeAll(padding ++ "Supported Values:\n"); for (enum_options) |enum_option| { - try out_stream.print(padding ++ " {s}\n", .{enum_option}); + try w.print(padding ++ " {s}\n", .{enum_option}); } } } } - try out_stream.writeAll( + try w.writeAll( \\ \\System Integration Options: \\ --search-prefix [path] Add a path to look for binaries, libraries, headers @@ -1352,7 +1359,7 @@ fn usage(b: *std.Build, out_stream: anytype) !void { \\ ); if (b.graph.system_library_options.entries.len == 0) { - try out_stream.writeAll(" (none) -\n"); + try w.writeAll(" (none) -\n"); } else { for (b.graph.system_library_options.keys(), b.graph.system_library_options.values()) |k, v| { const status = switch (v) { @@ -1360,11 +1367,11 @@ fn usage(b: *std.Build, out_stream: anytype) !void { .declared_disabled => "no", .user_enabled, .user_disabled => unreachable, // already emitted error }; - try out_stream.print(" {s:<43} {s}\n", .{ k, status }); + try w.print(" {s:<43} {s}\n", .{ k, status }); } } - try out_stream.writeAll( + try w.writeAll( \\ \\Advanced Options: \\ -freference-trace[=num] How many lines of reference trace should be shown per compile error @@ -1544,3 +1551,11 @@ fn createModuleDependenciesForStep(step: *Step) Allocator.Error!void { }; } } + +var stdio_buffer_allocation: [256]u8 = undefined; +var stdout_writer_allocation: std.fs.File.Writer = undefined; + +fn initStdoutWriter() *Writer { + stdout_writer_allocation = std.fs.File.stdout().writerStreaming(&stdio_buffer_allocation); + return &stdout_writer_allocation.interface; +} diff --git a/lib/compiler/libc.zig b/lib/compiler/libc.zig index 2f4c26b0cc..0d26b59d24 100644 --- a/lib/compiler/libc.zig +++ b/lib/compiler/libc.zig @@ -40,7 +40,7 @@ pub fn main() !void { const arg = args[i]; if (mem.startsWith(u8, arg, "-")) { if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) { - const stdout = std.io.getStdOut().writer(); + const stdout = std.fs.File.stdout().deprecatedWriter(); try stdout.writeAll(usage_libc); return std.process.cleanExit(); } else if (mem.eql(u8, arg, "-target")) { @@ -97,7 +97,7 @@ pub fn main() !void { fatal("no include dirs detected for target {s}", .{zig_target}); } - var bw = std.io.bufferedWriter(std.io.getStdOut().writer()); + var bw = std.io.bufferedWriter(std.fs.File.stdout().deprecatedWriter()); var writer = bw.writer(); for (libc_dirs.libc_include_dir_list) |include_dir| { try writer.writeAll(include_dir); @@ -125,7 +125,7 @@ pub fn main() !void { }; defer libc.deinit(gpa); - var bw = std.io.bufferedWriter(std.io.getStdOut().writer()); + var bw = std.io.bufferedWriter(std.fs.File.stdout().deprecatedWriter()); try libc.render(bw.writer()); try bw.flush(); } diff --git a/lib/compiler/objcopy.zig b/lib/compiler/objcopy.zig index bcd3a69a0c..3e383e6ad4 100644 --- a/lib/compiler/objcopy.zig +++ b/lib/compiler/objcopy.zig @@ -54,7 +54,7 @@ fn cmdObjCopy( fatal("unexpected positional argument: '{s}'", .{arg}); } } else if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) { - return std.io.getStdOut().writeAll(usage); + return std.fs.File.stdout().writeAll(usage); } else if (mem.eql(u8, arg, "-O") or mem.eql(u8, arg, "--output-target")) { i += 1; if (i >= args.len) fatal("expected another argument after '{s}'", .{arg}); @@ -227,8 +227,8 @@ fn cmdObjCopy( if (listen) { var server = try Server.init(.{ .gpa = gpa, - .in = std.io.getStdIn(), - .out = std.io.getStdOut(), + .in = .stdin(), + .out = .stdout(), .zig_version = builtin.zig_version_string, }); defer server.deinit(); @@ -635,11 +635,11 @@ const HexWriter = struct { const payload_bytes = self.getPayloadBytes(); assert(payload_bytes.len <= MAX_PAYLOAD_LEN); - const line = try std.fmt.bufPrint(&outbuf, ":{0X:0>2}{1X:0>4}{2X:0>2}{3s}{4X:0>2}" ++ linesep, .{ + const line = try std.fmt.bufPrint(&outbuf, ":{0X:0>2}{1X:0>4}{2X:0>2}{3X}{4X:0>2}" ++ linesep, .{ @as(u8, @intCast(payload_bytes.len)), self.address, @intFromEnum(self.payload), - std.fmt.fmtSliceHexUpper(payload_bytes), + payload_bytes, self.checksum(), }); try file.writeAll(line); @@ -1495,7 +1495,7 @@ const ElfFileHelper = struct { if (size < prefix.len) return null; try in_file.seekTo(offset); - var section_reader = std.io.limitedReader(in_file.reader(), size); + var section_reader = std.io.limitedReader(in_file.deprecatedReader(), size); // allocate as large as decompressed data. if the compression doesn't fit, keep the data uncompressed. const compressed_data = try allocator.alignedAlloc(u8, .@"8", @intCast(size)); diff --git a/lib/compiler/reduce.zig b/lib/compiler/reduce.zig index 57b4fd5b1b..a25ef5304b 100644 --- a/lib/compiler/reduce.zig +++ b/lib/compiler/reduce.zig @@ -68,7 +68,7 @@ pub fn main() !void { const arg = args[i]; if (mem.startsWith(u8, arg, "-")) { if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) { - const stdout = std.io.getStdOut().writer(); + const stdout = std.fs.File.stdout().deprecatedWriter(); try stdout.writeAll(usage); return std.process.cleanExit(); } else if (mem.eql(u8, arg, "--")) { diff --git a/lib/compiler/resinator/cli.zig b/lib/compiler/resinator/cli.zig index f27775d6af..6d4a368b4e 100644 --- a/lib/compiler/resinator/cli.zig +++ b/lib/compiler/resinator/cli.zig @@ -125,13 +125,12 @@ pub const Diagnostics = struct { } pub fn renderToStdErr(self: *Diagnostics, args: []const []const u8, config: std.io.tty.Config) void { - std.debug.lockStdErr(); - defer std.debug.unlockStdErr(); - const stderr = std.io.getStdErr().writer(); + const stderr = std.debug.lockStderrWriter(&.{}); + defer std.debug.unlockStderrWriter(); self.renderToWriter(args, stderr, config) catch return; } - pub fn renderToWriter(self: *Diagnostics, args: []const []const u8, writer: anytype, config: std.io.tty.Config) !void { + pub fn renderToWriter(self: *Diagnostics, args: []const []const u8, writer: *std.io.Writer, config: std.io.tty.Config) !void { for (self.errors.items) |err_details| { try renderErrorMessage(writer, config, err_details, args); } @@ -1403,7 +1402,7 @@ test parsePercent { try std.testing.expectError(error.InvalidFormat, parsePercent("~1")); } -pub fn renderErrorMessage(writer: anytype, config: std.io.tty.Config, err_details: Diagnostics.ErrorDetails, args: []const []const u8) !void { +pub fn renderErrorMessage(writer: *std.io.Writer, config: std.io.tty.Config, err_details: Diagnostics.ErrorDetails, args: []const []const u8) !void { try config.setColor(writer, .dim); try writer.writeAll(""); try config.setColor(writer, .reset); @@ -1481,27 +1480,27 @@ pub fn renderErrorMessage(writer: anytype, config: std.io.tty.Config, err_detail try writer.writeByte('\n'); try config.setColor(writer, .green); - try writer.writeByteNTimes(' ', prefix.len); + try writer.splatByteAll(' ', prefix.len); // Special case for when the option is *only* a prefix (e.g. invalid option: -) if (err_details.arg_span.prefix_len == arg_with_name.len) { - try writer.writeByteNTimes('^', err_details.arg_span.prefix_len); + try writer.splatByteAll('^', err_details.arg_span.prefix_len); } else { - try writer.writeByteNTimes('~', err_details.arg_span.prefix_len); - try writer.writeByteNTimes(' ', err_details.arg_span.name_offset - err_details.arg_span.prefix_len); + try writer.splatByteAll('~', err_details.arg_span.prefix_len); + try writer.splatByteAll(' ', err_details.arg_span.name_offset - err_details.arg_span.prefix_len); if (!err_details.arg_span.point_at_next_arg and err_details.arg_span.value_offset == 0) { try writer.writeByte('^'); - try writer.writeByteNTimes('~', name_slice.len - 1); + try writer.splatByteAll('~', name_slice.len - 1); } else if (err_details.arg_span.value_offset > 0) { - try writer.writeByteNTimes('~', err_details.arg_span.value_offset - err_details.arg_span.name_offset); + try writer.splatByteAll('~', err_details.arg_span.value_offset - err_details.arg_span.name_offset); try writer.writeByte('^'); if (err_details.arg_span.value_offset < arg_with_name.len) { - try writer.writeByteNTimes('~', arg_with_name.len - err_details.arg_span.value_offset - 1); + try writer.splatByteAll('~', arg_with_name.len - err_details.arg_span.value_offset - 1); } } else if (err_details.arg_span.point_at_next_arg) { - try writer.writeByteNTimes('~', arg_with_name.len - err_details.arg_span.name_offset + 1); + try writer.splatByteAll('~', arg_with_name.len - err_details.arg_span.name_offset + 1); try writer.writeByte('^'); if (next_arg_len > 0) { - try writer.writeByteNTimes('~', next_arg_len - 1); + try writer.splatByteAll('~', next_arg_len - 1); } } } diff --git a/lib/compiler/resinator/compile.zig b/lib/compiler/resinator/compile.zig index 1f35af8988..3515421ff0 100644 --- a/lib/compiler/resinator/compile.zig +++ b/lib/compiler/resinator/compile.zig @@ -570,7 +570,7 @@ pub const Compiler = struct { switch (predefined_type) { .GROUP_ICON, .GROUP_CURSOR => { // Check for animated icon first - if (ani.isAnimatedIcon(file.reader())) { + if (ani.isAnimatedIcon(file.deprecatedReader())) { // Animated icons are just put into the resource unmodified, // and the resource type changes to ANIICON/ANICURSOR @@ -586,14 +586,14 @@ pub const Compiler = struct { try header.write(writer, self.errContext(node.id)); try file.seekTo(0); - try writeResourceData(writer, file.reader(), header.data_size); + try writeResourceData(writer, file.deprecatedReader(), header.data_size); return; } // isAnimatedIcon moved the file cursor so reset to the start try file.seekTo(0); - const icon_dir = ico.read(self.allocator, file.reader(), try file.getEndPos()) catch |err| switch (err) { + const icon_dir = ico.read(self.allocator, file.deprecatedReader(), try file.getEndPos()) catch |err| switch (err) { error.OutOfMemory => |e| return e, else => |e| { return self.iconReadError( @@ -672,7 +672,7 @@ pub const Compiler = struct { } try file.seekTo(entry.data_offset_from_start_of_file); - var header_bytes = file.reader().readBytesNoEof(16) catch { + var header_bytes = file.deprecatedReader().readBytesNoEof(16) catch { return self.iconReadError( error.UnexpectedEOF, filename_utf8, @@ -803,7 +803,7 @@ pub const Compiler = struct { } try file.seekTo(entry.data_offset_from_start_of_file); - try writeResourceDataNoPadding(writer, file.reader(), entry.data_size_in_bytes); + try writeResourceDataNoPadding(writer, file.deprecatedReader(), entry.data_size_in_bytes); try writeDataPadding(writer, full_data_size); if (self.state.icon_id == std.math.maxInt(u16)) { @@ -859,7 +859,7 @@ pub const Compiler = struct { header.applyMemoryFlags(node.common_resource_attributes, self.source); const file_size = try file.getEndPos(); - const bitmap_info = bmp.read(file.reader(), file_size) catch |err| { + const bitmap_info = bmp.read(file.deprecatedReader(), file_size) catch |err| { const filename_string_index = try self.diagnostics.putString(filename_utf8); return self.addErrorDetailsAndFail(.{ .err = .bmp_read_error, @@ -922,7 +922,7 @@ pub const Compiler = struct { header.data_size = bmp_bytes_to_write; try header.write(writer, self.errContext(node.id)); try file.seekTo(bmp.file_header_len); - const file_reader = file.reader(); + const file_reader = file.deprecatedReader(); try writeResourceDataNoPadding(writer, file_reader, bitmap_info.dib_header_size); if (bitmap_info.getBitmasksByteLen() > 0) { try writeResourceDataNoPadding(writer, file_reader, bitmap_info.getBitmasksByteLen()); @@ -968,7 +968,7 @@ pub const Compiler = struct { header.data_size = @intCast(file_size); try header.write(writer, self.errContext(node.id)); - var header_slurping_reader = headerSlurpingReader(148, file.reader()); + var header_slurping_reader = headerSlurpingReader(148, file.deprecatedReader()); try writeResourceData(writer, header_slurping_reader.reader(), header.data_size); try self.state.font_dir.add(self.arena, FontDir.Font{ @@ -1002,7 +1002,7 @@ pub const Compiler = struct { // We now know that the data size will fit in a u32 header.data_size = @intCast(data_size); try header.write(writer, self.errContext(node.id)); - try writeResourceData(writer, file.reader(), header.data_size); + try writeResourceData(writer, file.deprecatedReader(), header.data_size); } fn iconReadError( @@ -2949,7 +2949,7 @@ pub fn HeaderSlurpingReader(comptime size: usize, comptime ReaderType: anytype) slurped_header: [size]u8 = [_]u8{0x00} ** size, pub const Error = ReaderType.Error; - pub const Reader = std.io.Reader(*@This(), Error, read); + pub const Reader = std.io.GenericReader(*@This(), Error, read); pub fn read(self: *@This(), buf: []u8) Error!usize { const amt = try self.child_reader.read(buf); @@ -2983,7 +2983,7 @@ pub fn LimitedWriter(comptime WriterType: type) type { bytes_left: u64, pub const Error = error{NoSpaceLeft} || WriterType.Error; - pub const Writer = std.io.Writer(*Self, Error, write); + pub const Writer = std.io.GenericWriter(*Self, Error, write); const Self = @This(); diff --git a/lib/compiler/resinator/errors.zig b/lib/compiler/resinator/errors.zig index 9727872367..14a001894e 100644 --- a/lib/compiler/resinator/errors.zig +++ b/lib/compiler/resinator/errors.zig @@ -1,4 +1,5 @@ const std = @import("std"); +const assert = std.debug.assert; const Token = @import("lex.zig").Token; const SourceMappings = @import("source_mapping.zig").SourceMappings; const utils = @import("utils.zig"); @@ -61,16 +62,15 @@ pub const Diagnostics = struct { } pub fn renderToStdErr(self: *Diagnostics, cwd: std.fs.Dir, source: []const u8, tty_config: std.io.tty.Config, source_mappings: ?SourceMappings) void { - std.debug.lockStdErr(); - defer std.debug.unlockStdErr(); - const stderr = std.io.getStdErr().writer(); + const stderr = std.debug.lockStderrWriter(&.{}); + defer std.debug.unlockStderrWriter(); for (self.errors.items) |err_details| { renderErrorMessage(stderr, tty_config, cwd, err_details, source, self.strings.items, source_mappings) catch return; } } pub fn renderToStdErrDetectTTY(self: *Diagnostics, cwd: std.fs.Dir, source: []const u8, source_mappings: ?SourceMappings) void { - const tty_config = std.io.tty.detectConfig(std.io.getStdErr()); + const tty_config = std.io.tty.detectConfig(std.fs.File.stderr()); return self.renderToStdErr(cwd, source, tty_config, source_mappings); } @@ -409,15 +409,7 @@ pub const ErrorDetails = struct { failed_to_open_cwd, }; - fn formatToken( - ctx: TokenFormatContext, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = fmt; - _ = options; - + fn formatToken(ctx: TokenFormatContext, writer: *std.io.Writer) std.io.Writer.Error!void { switch (ctx.token.id) { .eof => return writer.writeAll(ctx.token.id.nameForErrorDisplay()), else => {}, @@ -441,7 +433,7 @@ pub const ErrorDetails = struct { code_page: SupportedCodePage, }; - fn fmtToken(self: ErrorDetails, source: []const u8) std.fmt.Formatter(formatToken) { + fn fmtToken(self: ErrorDetails, source: []const u8) std.fmt.Formatter(TokenFormatContext, formatToken) { return .{ .data = .{ .token = self.token, .code_page = self.code_page, @@ -452,7 +444,7 @@ pub const ErrorDetails = struct { pub fn render(self: ErrorDetails, writer: anytype, source: []const u8, strings: []const []const u8) !void { switch (self.err) { .unfinished_string_literal => { - return writer.print("unfinished string literal at '{s}', expected closing '\"'", .{self.fmtToken(source)}); + return writer.print("unfinished string literal at '{f}', expected closing '\"'", .{self.fmtToken(source)}); }, .string_literal_too_long => { return writer.print("string literal too long (max is currently {} characters)", .{self.extra.number}); @@ -466,10 +458,14 @@ pub const ErrorDetails = struct { .hint => return, }, .illegal_byte => { - return writer.print("character '{s}' is not allowed", .{std.fmt.fmtSliceEscapeUpper(self.token.slice(source))}); + return writer.print("character '{f}' is not allowed", .{ + std.ascii.hexEscape(self.token.slice(source), .upper), + }); }, .illegal_byte_outside_string_literals => { - return writer.print("character '{s}' is not allowed outside of string literals", .{std.fmt.fmtSliceEscapeUpper(self.token.slice(source))}); + return writer.print("character '{f}' is not allowed outside of string literals", .{ + std.ascii.hexEscape(self.token.slice(source), .upper), + }); }, .illegal_codepoint_outside_string_literals => { // This is somewhat hacky, but we know that: @@ -527,26 +523,26 @@ pub const ErrorDetails = struct { return writer.print("unsupported code page '{s} (id={})' in #pragma code_page", .{ @tagName(code_page), number }); }, .unfinished_raw_data_block => { - return writer.print("unfinished raw data block at '{s}', expected closing '}}' or 'END'", .{self.fmtToken(source)}); + return writer.print("unfinished raw data block at '{f}', expected closing '}}' or 'END'", .{self.fmtToken(source)}); }, .unfinished_string_table_block => { - return writer.print("unfinished STRINGTABLE block at '{s}', expected closing '}}' or 'END'", .{self.fmtToken(source)}); + return writer.print("unfinished STRINGTABLE block at '{f}', expected closing '}}' or 'END'", .{self.fmtToken(source)}); }, .expected_token => { - return writer.print("expected '{s}', got '{s}'", .{ self.extra.expected.nameForErrorDisplay(), self.fmtToken(source) }); + return writer.print("expected '{s}', got '{f}'", .{ self.extra.expected.nameForErrorDisplay(), self.fmtToken(source) }); }, .expected_something_else => { try writer.writeAll("expected "); try self.extra.expected_types.writeCommaSeparated(writer); - return writer.print("; got '{s}'", .{self.fmtToken(source)}); + return writer.print("; got '{f}'", .{self.fmtToken(source)}); }, .resource_type_cant_use_raw_data => switch (self.type) { - .err, .warning => try writer.print("expected '', found '{s}' (resource type '{s}' can't use raw data)", .{ self.fmtToken(source), self.extra.resource.nameForErrorDisplay() }), - .note => try writer.print("if '{s}' is intended to be a filename, it must be specified as a quoted string literal", .{self.fmtToken(source)}), + .err, .warning => try writer.print("expected '', found '{f}' (resource type '{s}' can't use raw data)", .{ self.fmtToken(source), self.extra.resource.nameForErrorDisplay() }), + .note => try writer.print("if '{f}' is intended to be a filename, it must be specified as a quoted string literal", .{self.fmtToken(source)}), .hint => return, }, .id_must_be_ordinal => { - try writer.print("id of resource type '{s}' must be an ordinal (u16), got '{s}'", .{ self.extra.resource.nameForErrorDisplay(), self.fmtToken(source) }); + try writer.print("id of resource type '{s}' must be an ordinal (u16), got '{f}'", .{ self.extra.resource.nameForErrorDisplay(), self.fmtToken(source) }); }, .name_or_id_not_allowed => { try writer.print("name or id is not allowed for resource type '{s}'", .{self.extra.resource.nameForErrorDisplay()}); @@ -562,7 +558,7 @@ pub const ErrorDetails = struct { try writer.writeAll("ASCII character not equivalent to virtual key code"); }, .empty_menu_not_allowed => { - try writer.print("empty menu of type '{s}' not allowed", .{self.fmtToken(source)}); + try writer.print("empty menu of type '{f}' not allowed", .{self.fmtToken(source)}); }, .rc_would_miscompile_version_value_padding => switch (self.type) { .err, .warning => return writer.print("the padding before this quoted string value would be miscompiled by the Win32 RC compiler", .{}), @@ -627,7 +623,7 @@ pub const ErrorDetails = struct { .string_already_defined => switch (self.type) { .err, .warning => { const language = self.extra.string_and_language.language; - return writer.print("string with id {d} (0x{X}) already defined for language {}", .{ self.extra.string_and_language.id, self.extra.string_and_language.id, language }); + return writer.print("string with id {d} (0x{X}) already defined for language {f}", .{ self.extra.string_and_language.id, self.extra.string_and_language.id, language }); }, .note => return writer.print("previous definition of string with id {d} (0x{X}) here", .{ self.extra.string_and_language.id, self.extra.string_and_language.id }), .hint => return, @@ -642,7 +638,7 @@ pub const ErrorDetails = struct { try writer.print("unable to open file '{s}': {s}", .{ strings[self.extra.file_open_error.filename_string_index], @tagName(self.extra.file_open_error.err) }); }, .invalid_accelerator_key => { - try writer.print("invalid accelerator key '{s}': {s}", .{ self.fmtToken(source), @tagName(self.extra.accelerator_error.err) }); + try writer.print("invalid accelerator key '{f}': {s}", .{ self.fmtToken(source), @tagName(self.extra.accelerator_error.err) }); }, .accelerator_type_required => { try writer.writeAll("accelerator type [ASCII or VIRTKEY] required when key is an integer"); @@ -898,7 +894,7 @@ fn cellCount(code_page: SupportedCodePage, source: []const u8, start_index: usiz const truncated_str = "<...truncated...>"; -pub fn renderErrorMessage(writer: anytype, tty_config: std.io.tty.Config, cwd: std.fs.Dir, err_details: ErrorDetails, source: []const u8, strings: []const []const u8, source_mappings: ?SourceMappings) !void { +pub fn renderErrorMessage(writer: *std.io.Writer, tty_config: std.io.tty.Config, cwd: std.fs.Dir, err_details: ErrorDetails, source: []const u8, strings: []const []const u8, source_mappings: ?SourceMappings) !void { if (err_details.type == .hint) return; const source_line_start = err_details.token.getLineStartForErrorDisplay(source); @@ -981,10 +977,10 @@ pub fn renderErrorMessage(writer: anytype, tty_config: std.io.tty.Config, cwd: s try tty_config.setColor(writer, .green); const num_spaces = truncated_visual_info.point_offset - truncated_visual_info.before_len; - try writer.writeByteNTimes(' ', num_spaces); - try writer.writeByteNTimes('~', truncated_visual_info.before_len); + try writer.splatByteAll(' ', num_spaces); + try writer.splatByteAll('~', truncated_visual_info.before_len); try writer.writeByte('^'); - try writer.writeByteNTimes('~', truncated_visual_info.after_len); + try writer.splatByteAll('~', truncated_visual_info.after_len); try writer.writeByte('\n'); try tty_config.setColor(writer, .reset); @@ -1085,7 +1081,7 @@ const CorrespondingLines = struct { buffered_reader: BufferedReaderType, code_page: SupportedCodePage, - const BufferedReaderType = std.io.BufferedReader(512, std.fs.File.Reader); + const BufferedReaderType = std.io.BufferedReader(512, std.fs.File.DeprecatedReader); pub fn init(cwd: std.fs.Dir, err_details: ErrorDetails, line_for_comparison: []const u8, corresponding_span: SourceMappings.CorrespondingSpan, corresponding_file: []const u8) !CorrespondingLines { // We don't do line comparison for this error, so don't print the note if the line @@ -1106,7 +1102,7 @@ const CorrespondingLines = struct { .code_page = err_details.code_page, }; corresponding_lines.buffered_reader = BufferedReaderType{ - .unbuffered_reader = corresponding_lines.file.reader(), + .unbuffered_reader = corresponding_lines.file.deprecatedReader(), }; errdefer corresponding_lines.deinit(); diff --git a/lib/compiler/resinator/lex.zig b/lib/compiler/resinator/lex.zig index cfb75e4c5b..0b276b92f5 100644 --- a/lib/compiler/resinator/lex.zig +++ b/lib/compiler/resinator/lex.zig @@ -237,7 +237,9 @@ pub const Lexer = struct { } pub fn dump(self: *Self, token: *const Token) void { - std.debug.print("{s}:{d}: {s}\n", .{ @tagName(token.id), token.line_number, std.fmt.fmtSliceEscapeLower(token.slice(self.buffer)) }); + std.debug.print("{s}:{d}: {f}\n", .{ + @tagName(token.id), token.line_number, std.ascii.hexEscape(token.slice(self.buffer), .lower), + }); } pub const LexMethod = enum { diff --git a/lib/compiler/resinator/main.zig b/lib/compiler/resinator/main.zig index 8344a16b25..903e0a2f71 100644 --- a/lib/compiler/resinator/main.zig +++ b/lib/compiler/resinator/main.zig @@ -22,14 +22,14 @@ pub fn main() !void { defer arena_state.deinit(); const arena = arena_state.allocator(); - const stderr = std.io.getStdErr(); + const stderr = std.fs.File.stderr(); const stderr_config = std.io.tty.detectConfig(stderr); const args = try std.process.argsAlloc(allocator); defer std.process.argsFree(allocator, args); if (args.len < 2) { - try renderErrorMessage(stderr.writer(), stderr_config, .err, "expected zig lib dir as first argument", .{}); + try renderErrorMessage(std.debug.lockStderrWriter(&.{}), stderr_config, .err, "expected zig lib dir as first argument", .{}); std.process.exit(1); } const zig_lib_dir = args[1]; @@ -44,7 +44,7 @@ pub fn main() !void { var error_handler: ErrorHandler = switch (zig_integration) { true => .{ .server = .{ - .out = std.io.getStdOut(), + .out = std.fs.File.stdout(), .in = undefined, // won't be receiving messages .receive_fifo = undefined, // won't be receiving messages }, @@ -81,15 +81,15 @@ pub fn main() !void { defer options.deinit(); if (options.print_help_and_exit) { - const stdout = std.io.getStdOut(); - try cli.writeUsage(stdout.writer(), "zig rc"); + const stdout = std.fs.File.stdout(); + try cli.writeUsage(stdout.deprecatedWriter(), "zig rc"); return; } // Don't allow verbose when integrating with Zig via stdout options.verbose = false; - const stdout_writer = std.io.getStdOut().writer(); + const stdout_writer = std.fs.File.stdout().deprecatedWriter(); if (options.verbose) { try options.dumpVerbose(stdout_writer); try stdout_writer.writeByte('\n'); @@ -290,7 +290,7 @@ pub fn main() !void { }; defer depfile.close(); - const depfile_writer = depfile.writer(); + const depfile_writer = depfile.deprecatedWriter(); var depfile_buffered_writer = std.io.bufferedWriter(depfile_writer); switch (options.depfile_fmt) { .json => { @@ -343,7 +343,7 @@ pub fn main() !void { switch (err) { error.DuplicateResource => { const duplicate_resource = resources.list.items[cvtres_diagnostics.duplicate_resource]; - try error_handler.emitMessage(allocator, .err, "duplicate resource [id: {}, type: {}, language: {}]", .{ + try error_handler.emitMessage(allocator, .err, "duplicate resource [id: {f}, type: {f}, language: {f}]", .{ duplicate_resource.name_value, fmtResourceType(duplicate_resource.type_value), duplicate_resource.language, @@ -352,7 +352,7 @@ pub fn main() !void { error.ResourceDataTooLong => { const overflow_resource = resources.list.items[cvtres_diagnostics.duplicate_resource]; try error_handler.emitMessage(allocator, .err, "resource has a data length that is too large to be written into a coff section", .{}); - try error_handler.emitMessage(allocator, .note, "the resource with the invalid size is [id: {}, type: {}, language: {}]", .{ + try error_handler.emitMessage(allocator, .note, "the resource with the invalid size is [id: {f}, type: {f}, language: {f}]", .{ overflow_resource.name_value, fmtResourceType(overflow_resource.type_value), overflow_resource.language, @@ -361,7 +361,7 @@ pub fn main() !void { error.TotalResourceDataTooLong => { const overflow_resource = resources.list.items[cvtres_diagnostics.duplicate_resource]; try error_handler.emitMessage(allocator, .err, "total resource data exceeds the maximum of the coff 'size of raw data' field", .{}); - try error_handler.emitMessage(allocator, .note, "size overflow occurred when attempting to write this resource: [id: {}, type: {}, language: {}]", .{ + try error_handler.emitMessage(allocator, .note, "size overflow occurred when attempting to write this resource: [id: {f}, type: {f}, language: {f}]", .{ overflow_resource.name_value, fmtResourceType(overflow_resource.type_value), overflow_resource.language, @@ -471,7 +471,7 @@ const IoStream = struct { allocator: std.mem.Allocator, }; pub const WriteError = std.mem.Allocator.Error || std.fs.File.WriteError; - pub const Writer = std.io.Writer(WriterContext, WriteError, write); + pub const Writer = std.io.GenericWriter(WriterContext, WriteError, write); pub fn write(ctx: WriterContext, bytes: []const u8) WriteError!usize { switch (ctx.self.*) { @@ -645,7 +645,9 @@ const ErrorHandler = union(enum) { }, .tty => { // extra newline to separate this line from the aro errors - try renderErrorMessage(std.io.getStdErr().writer(), self.tty, .err, "{s}\n", .{fail_msg}); + const stderr = std.debug.lockStderrWriter(&.{}); + defer std.debug.unlockStderrWriter(); + try renderErrorMessage(stderr, self.tty, .err, "{s}\n", .{fail_msg}); aro.Diagnostics.render(comp, self.tty); }, } @@ -690,7 +692,9 @@ const ErrorHandler = union(enum) { try server.serveErrorBundle(error_bundle); }, .tty => { - try renderErrorMessage(std.io.getStdErr().writer(), self.tty, msg_type, format, args); + const stderr = std.debug.lockStderrWriter(&.{}); + defer std.debug.unlockStderrWriter(); + try renderErrorMessage(stderr, self.tty, msg_type, format, args); }, } } diff --git a/lib/compiler/resinator/res.zig b/lib/compiler/resinator/res.zig index b4fcd53f9d..4e1953233d 100644 --- a/lib/compiler/resinator/res.zig +++ b/lib/compiler/resinator/res.zig @@ -1,4 +1,5 @@ const std = @import("std"); +const assert = std.debug.assert; const rc = @import("rc.zig"); const ResourceType = rc.ResourceType; const CommonResourceAttributes = rc.CommonResourceAttributes; @@ -163,14 +164,7 @@ pub const Language = packed struct(u16) { return @bitCast(self); } - pub fn format( - language: Language, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - out_stream: anytype, - ) !void { - _ = fmt; - _ = options; + pub fn format(language: Language, w: *std.io.Writer) std.io.Writer.Error!void { const language_id = language.asInt(); const language_name = language_name: { if (std.enums.fromInt(lang.LanguageId, language_id)) |lang_enum_val| { @@ -181,7 +175,7 @@ pub const Language = packed struct(u16) { } break :language_name ""; }; - try out_stream.print("{s} (0x{X})", .{ language_name, language_id }); + try w.print("{s} (0x{X})", .{ language_name, language_id }); } }; @@ -445,47 +439,33 @@ pub const NameOrOrdinal = union(enum) { } } - pub fn format( - self: NameOrOrdinal, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - out_stream: anytype, - ) !void { - _ = fmt; - _ = options; + pub fn format(self: NameOrOrdinal, w: *std.io.Writer) !void { switch (self) { .name => |name| { - try out_stream.print("{s}", .{std.unicode.fmtUtf16Le(name)}); + try w.print("{f}", .{std.unicode.fmtUtf16Le(name)}); }, .ordinal => |ordinal| { - try out_stream.print("{d}", .{ordinal}); + try w.print("{d}", .{ordinal}); }, } } - fn formatResourceType( - self: NameOrOrdinal, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - out_stream: anytype, - ) !void { - _ = fmt; - _ = options; + fn formatResourceType(self: NameOrOrdinal, w: *std.io.Writer) std.io.Writer.Error!void { switch (self) { .name => |name| { - try out_stream.print("{s}", .{std.unicode.fmtUtf16Le(name)}); + try w.print("{f}", .{std.unicode.fmtUtf16Le(name)}); }, .ordinal => |ordinal| { if (std.enums.tagName(RT, @enumFromInt(ordinal))) |predefined_type_name| { - try out_stream.print("{s}", .{predefined_type_name}); + try w.print("{s}", .{predefined_type_name}); } else { - try out_stream.print("{d}", .{ordinal}); + try w.print("{d}", .{ordinal}); } }, } } - pub fn fmtResourceType(type_value: NameOrOrdinal) std.fmt.Formatter(formatResourceType) { + pub fn fmtResourceType(type_value: NameOrOrdinal) std.fmt.Formatter(NameOrOrdinal, formatResourceType) { return .{ .data = type_value }; } }; diff --git a/lib/compiler/resinator/utils.zig b/lib/compiler/resinator/utils.zig index b69222fe03..840833fbcd 100644 --- a/lib/compiler/resinator/utils.zig +++ b/lib/compiler/resinator/utils.zig @@ -86,7 +86,7 @@ pub const ErrorMessageType = enum { err, warning, note }; /// Used for generic colored errors/warnings/notes, more context-specific error messages /// are handled elsewhere. -pub fn renderErrorMessage(writer: anytype, config: std.io.tty.Config, msg_type: ErrorMessageType, comptime format: []const u8, args: anytype) !void { +pub fn renderErrorMessage(writer: *std.io.Writer, config: std.io.tty.Config, msg_type: ErrorMessageType, comptime format: []const u8, args: anytype) !void { switch (msg_type) { .err => { try config.setColor(writer, .bold); diff --git a/lib/compiler/std-docs.zig b/lib/compiler/std-docs.zig index 6247077527..b5bc742717 100644 --- a/lib/compiler/std-docs.zig +++ b/lib/compiler/std-docs.zig @@ -7,7 +7,7 @@ const assert = std.debug.assert; const Cache = std.Build.Cache; fn usage() noreturn { - io.getStdOut().writeAll( + std.fs.File.stdout().writeAll( \\Usage: zig std [options] \\ \\Options: @@ -63,7 +63,7 @@ pub fn main() !void { var http_server = try address.listen(.{}); const port = http_server.listen_address.in.getPort(); const url_with_newline = try std.fmt.allocPrint(arena, "http://127.0.0.1:{d}/\n", .{port}); - std.io.getStdOut().writeAll(url_with_newline) catch {}; + std.fs.File.stdout().writeAll(url_with_newline) catch {}; if (should_open_browser) { openBrowserTab(gpa, url_with_newline[0 .. url_with_newline.len - 1 :'\n']) catch |err| { std.log.err("unable to open browser: {s}", .{@errorName(err)}); diff --git a/lib/compiler/test_runner.zig b/lib/compiler/test_runner.zig index f9adc002ad..929bd1c417 100644 --- a/lib/compiler/test_runner.zig +++ b/lib/compiler/test_runner.zig @@ -69,8 +69,8 @@ fn mainServer() !void { @disableInstrumentation(); var server = try std.zig.Server.init(.{ .gpa = fba.allocator(), - .in = std.io.getStdIn(), - .out = std.io.getStdOut(), + .in = .stdin(), + .out = .stdout(), .zig_version = builtin.zig_version_string, }); defer server.deinit(); @@ -191,7 +191,7 @@ fn mainTerminal() void { .root_name = "Test", .estimated_total_items = test_fn_list.len, }); - const have_tty = std.io.getStdErr().isTty(); + const have_tty = std.fs.File.stderr().isTty(); var async_frame_buffer: []align(builtin.target.stackAlignment()) u8 = undefined; // TODO this is on the next line (using `undefined` above) because otherwise zig incorrectly @@ -301,7 +301,7 @@ pub fn mainSimple() anyerror!void { var failed: u64 = 0; // we don't want to bring in File and Writer if the backend doesn't support it - const stderr = if (comptime enable_print) std.io.getStdErr() else {}; + const stderr = if (comptime enable_print) std.fs.File.stderr() else {}; for (builtin.test_functions) |test_fn| { if (test_fn.func()) |_| { @@ -328,7 +328,7 @@ pub fn mainSimple() anyerror!void { passed += 1; } if (enable_print and print_summary) { - stderr.writer().print("{} passed, {} skipped, {} failed\n", .{ passed, skipped, failed }) catch {}; + stderr.deprecatedWriter().print("{} passed, {} skipped, {} failed\n", .{ passed, skipped, failed }) catch {}; } if (failed != 0) std.process.exit(1); } diff --git a/lib/docs/wasm/Walk.zig b/lib/docs/wasm/Walk.zig index 65288b8878..688fcbd00a 100644 --- a/lib/docs/wasm/Walk.zig +++ b/lib/docs/wasm/Walk.zig @@ -440,7 +440,7 @@ fn parse(file_name: []const u8, source: []u8) Oom!Ast { const err_loc = std.zig.findLineColumn(ast.source, err_offset); rendered_err.clearRetainingCapacity(); try ast.renderError(err, rendered_err.writer(gpa)); - log.err("{s}:{}:{}: {s}", .{ file_name, err_loc.line + 1, err_loc.column + 1, rendered_err.items }); + log.err("{s}:{d}:{d}: {s}", .{ file_name, err_loc.line + 1, err_loc.column + 1, rendered_err.items }); } return Ast.parse(gpa, "", .zig); } diff --git a/lib/docs/wasm/main.zig b/lib/docs/wasm/main.zig index d886f8037c..7e9ffa5e4c 100644 --- a/lib/docs/wasm/main.zig +++ b/lib/docs/wasm/main.zig @@ -717,9 +717,9 @@ fn render_docs( try writer.writeAll("{}", .{markdown.fmtHtml(content)}); + try writer.print("\">{f}", .{markdown.fmtHtml(content)}); } else { - try writer.print("{}", .{markdown.fmtHtml(content)}); + try writer.print("{f}", .{markdown.fmtHtml(content)}); } try writer.writeAll(""); diff --git a/lib/docs/wasm/markdown.zig b/lib/docs/wasm/markdown.zig index b18fe05445..3293b680c9 100644 --- a/lib/docs/wasm/markdown.zig +++ b/lib/docs/wasm/markdown.zig @@ -145,7 +145,7 @@ fn mainImpl() !void { var parser = try Parser.init(gpa); defer parser.deinit(); - var stdin_buf = std.io.bufferedReader(std.io.getStdIn().reader()); + var stdin_buf = std.io.bufferedReader(std.fs.File.stdin().deprecatedReader()); var line_buf = std.ArrayList(u8).init(gpa); defer line_buf.deinit(); while (stdin_buf.reader().streamUntilDelimiter(line_buf.writer(), '\n', null)) { @@ -160,7 +160,7 @@ fn mainImpl() !void { var doc = try parser.endInput(); defer doc.deinit(gpa); - var stdout_buf = std.io.bufferedWriter(std.io.getStdOut().writer()); + var stdout_buf = std.io.bufferedWriter(std.fs.File.stdout().deprecatedWriter()); try doc.render(stdout_buf.writer()); try stdout_buf.flush(); } diff --git a/lib/docs/wasm/markdown/renderer.zig b/lib/docs/wasm/markdown/renderer.zig index 1e6041399a..cba857d204 100644 --- a/lib/docs/wasm/markdown/renderer.zig +++ b/lib/docs/wasm/markdown/renderer.zig @@ -1,6 +1,7 @@ const std = @import("std"); const Document = @import("Document.zig"); const Node = Document.Node; +const assert = std.debug.assert; /// A Markdown document renderer. /// @@ -41,7 +42,7 @@ pub fn Renderer(comptime Writer: type, comptime Context: type) type { if (start == 1) { try writer.writeAll("
    \n"); } else { - try writer.print("
      \n", .{start}); + try writer.print("
        \n", .{start}); } } else { try writer.writeAll("
          \n"); @@ -105,15 +106,15 @@ pub fn Renderer(comptime Writer: type, comptime Context: type) type { } }, .heading => { - try writer.print("", .{data.heading.level}); + try writer.print("", .{data.heading.level}); for (doc.extraChildren(data.heading.children)) |child| { try r.renderFn(r, doc, child, writer); } - try writer.print("\n", .{data.heading.level}); + try writer.print("\n", .{data.heading.level}); }, .code_block => { const content = doc.string(data.code_block.content); - try writer.print("
          {}
          \n", .{fmtHtml(content)}); + try writer.print("
          {f}
          \n", .{fmtHtml(content)}); }, .blockquote => { try writer.writeAll("
          \n"); @@ -134,7 +135,7 @@ pub fn Renderer(comptime Writer: type, comptime Context: type) type { }, .link => { const target = doc.string(data.link.target); - try writer.print("", .{fmtHtml(target)}); + try writer.print("", .{fmtHtml(target)}); for (doc.extraChildren(data.link.children)) |child| { try r.renderFn(r, doc, child, writer); } @@ -142,11 +143,11 @@ pub fn Renderer(comptime Writer: type, comptime Context: type) type { }, .autolink => { const target = doc.string(data.text.content); - try writer.print("{0}", .{fmtHtml(target)}); + try writer.print("{0f}", .{fmtHtml(target)}); }, .image => { const target = doc.string(data.link.target); - try writer.print("\"", { const content = doc.string(data.text.content); - try writer.print("{}", .{fmtHtml(content)}); + try writer.print("{f}", .{fmtHtml(content)}); }, .text => { const content = doc.string(data.text.content); - try writer.print("{}", .{fmtHtml(content)}); + try writer.print("{f}", .{fmtHtml(content)}); }, .line_break => { try writer.writeAll("
          \n"); @@ -221,7 +222,7 @@ pub fn renderInlineNodeText( }, .autolink, .code_span, .text => { const content = doc.string(data.text.content); - try writer.print("{}", .{fmtHtml(content)}); + try writer.print("{f}", .{fmtHtml(content)}); }, .line_break => { try writer.writeAll("\n"); @@ -229,18 +230,11 @@ pub fn renderInlineNodeText( } } -pub fn fmtHtml(bytes: []const u8) std.fmt.Formatter(formatHtml) { +pub fn fmtHtml(bytes: []const u8) std.fmt.Formatter([]const u8, formatHtml) { return .{ .data = bytes }; } -fn formatHtml( - bytes: []const u8, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = fmt; - _ = options; +fn formatHtml(bytes: []const u8, writer: *std.io.Writer) std.io.Writer.Error!void { for (bytes) |b| { switch (b) { '<' => try writer.writeAll("<"), diff --git a/lib/fuzzer.zig b/lib/fuzzer.zig index 6bfd40b6f0..ce23f63421 100644 --- a/lib/fuzzer.zig +++ b/lib/fuzzer.zig @@ -9,7 +9,8 @@ pub const std_options = std.Options{ .logFn = logOverride, }; -var log_file: ?std.fs.File = null; +var log_file_buffer: [256]u8 = undefined; +var log_file_writer: ?std.fs.File.Writer = null; fn logOverride( comptime level: std.log.Level, @@ -17,15 +18,17 @@ fn logOverride( comptime format: []const u8, args: anytype, ) void { - const f = if (log_file) |f| f else f: { + const fw = if (log_file_writer) |*f| f else f: { const f = fuzzer.cache_dir.createFile("tmp/libfuzzer.log", .{}) catch @panic("failed to open fuzzer log file"); - log_file = f; - break :f f; + log_file_writer = f.writer(&log_file_buffer); + break :f &log_file_writer.?; }; const prefix1 = comptime level.asText(); const prefix2 = if (scope == .default) ": " else "(" ++ @tagName(scope) ++ "): "; - f.writer().print(prefix1 ++ prefix2 ++ format ++ "\n", args) catch @panic("failed to write to fuzzer log"); + fw.interface.print(prefix1 ++ prefix2 ++ format ++ "\n", args) catch + @panic("failed to write to fuzzer log"); + fw.interface.flush() catch @panic("failed to flush fuzzer log"); } /// Helps determine run uniqueness in the face of recursion. @@ -226,18 +229,18 @@ const Fuzzer = struct { .read = true, }) catch |e| switch (e) { error.PathAlreadyExists => continue, - else => fatal("unable to create '{}{d}: {s}", .{ f.corpus_directory, i, @errorName(err) }), + else => fatal("unable to create '{f}{d}: {s}", .{ f.corpus_directory, i, @errorName(err) }), }; errdefer input_file.close(); // Initialize the mmap for the current input. f.input = MemoryMappedList.create(input_file, 0, std.heap.page_size_max) catch |e| { - fatal("unable to init memory map for input at '{}{d}': {s}", .{ + fatal("unable to init memory map for input at '{f}{d}': {s}", .{ f.corpus_directory, i, @errorName(e), }); }; break; }, - else => fatal("unable to read '{}{d}': {s}", .{ f.corpus_directory, i, @errorName(err) }), + else => fatal("unable to read '{f}{d}': {s}", .{ f.corpus_directory, i, @errorName(err) }), }; errdefer gpa.free(input); f.corpus.append(gpa, .{ @@ -263,7 +266,7 @@ const Fuzzer = struct { const sub_path = try std.fmt.allocPrint(gpa, "f/{s}", .{f.unit_test_name}); f.corpus_directory = .{ .handle = f.cache_dir.makeOpenPath(sub_path, .{}) catch |err| - fatal("unable to open corpus directory 'f/{s}': {s}", .{ sub_path, @errorName(err) }), + fatal("unable to open corpus directory 'f/{s}': {t}", .{ sub_path, err }), .path = sub_path, }; initNextInput(f); diff --git a/lib/init/src/root.zig b/lib/init/src/root.zig index 70f4bd8d82..9afb8debf2 100644 --- a/lib/init/src/root.zig +++ b/lib/init/src/root.zig @@ -5,7 +5,7 @@ pub fn bufferedPrint() !void { // Stdout is for the actual output of your application, for example if you // are implementing gzip, then only the compressed bytes should be sent to // stdout, not any debugging messages. - const stdout_file = std.io.getStdOut().writer(); + const stdout_file = std.fs.File.stdout().deprecatedWriter(); // Buffering can improve performance significantly in print-heavy programs. var bw = std.io.bufferedWriter(stdout_file); const stdout = bw.writer(); diff --git a/lib/std/Build.zig b/lib/std/Build.zig index ab064d1aea..d84b99d02d 100644 --- a/lib/std/Build.zig +++ b/lib/std/Build.zig @@ -284,7 +284,7 @@ pub fn create( .h_dir = undefined, .dest_dir = graph.env_map.get("DESTDIR"), .install_tls = .{ - .step = Step.init(.{ + .step = .init(.{ .id = TopLevelStep.base_id, .name = "install", .owner = b, @@ -292,7 +292,7 @@ pub fn create( .description = "Copy build artifacts to prefix path", }, .uninstall_tls = .{ - .step = Step.init(.{ + .step = .init(.{ .id = TopLevelStep.base_id, .name = "uninstall", .owner = b, @@ -342,7 +342,7 @@ fn createChildOnly( .graph = parent.graph, .allocator = allocator, .install_tls = .{ - .step = Step.init(.{ + .step = .init(.{ .id = TopLevelStep.base_id, .name = "install", .owner = child, @@ -350,7 +350,7 @@ fn createChildOnly( .description = "Copy build artifacts to prefix path", }, .uninstall_tls = .{ - .step = Step.init(.{ + .step = .init(.{ .id = TopLevelStep.base_id, .name = "uninstall", .owner = child, @@ -1525,7 +1525,7 @@ pub fn option(b: *Build, comptime T: type, name_raw: []const u8, description_raw pub fn step(b: *Build, name: []const u8, description: []const u8) *Step { const step_info = b.allocator.create(TopLevelStep) catch @panic("OOM"); step_info.* = .{ - .step = Step.init(.{ + .step = .init(.{ .id = TopLevelStep.base_id, .name = name, .owner = b, @@ -1745,7 +1745,7 @@ pub fn addUserInputOption(b: *Build, name_raw: []const u8, value_raw: []const u8 return true; }, .lazy_path, .lazy_path_list => { - log.warn("the lazy path value type isn't added from the CLI, but somehow '{s}' is a .{}", .{ name, std.zig.fmtId(@tagName(gop.value_ptr.value)) }); + log.warn("the lazy path value type isn't added from the CLI, but somehow '{s}' is a .{f}", .{ name, std.zig.fmtId(@tagName(gop.value_ptr.value)) }); return true; }, } @@ -1824,13 +1824,13 @@ pub fn validateUserInputDidItFail(b: *Build) bool { return b.invalid_user_input; } -fn allocPrintCmd(ally: Allocator, opt_cwd: ?[]const u8, argv: []const []const u8) error{OutOfMemory}![]u8 { - var buf = ArrayList(u8).init(ally); - if (opt_cwd) |cwd| try buf.writer().print("cd {s} && ", .{cwd}); +fn allocPrintCmd(gpa: Allocator, opt_cwd: ?[]const u8, argv: []const []const u8) error{OutOfMemory}![]u8 { + var buf: std.ArrayListUnmanaged(u8) = .empty; + if (opt_cwd) |cwd| try buf.print(gpa, "cd {s} && ", .{cwd}); for (argv) |arg| { - try buf.writer().print("{s} ", .{arg}); + try buf.print(gpa, "{s} ", .{arg}); } - return buf.toOwnedSlice(); + return buf.toOwnedSlice(gpa); } fn printCmd(ally: Allocator, cwd: ?[]const u8, argv: []const []const u8) void { @@ -2059,7 +2059,7 @@ pub fn runAllowFail( try Step.handleVerbose2(b, null, child.env_map, argv); try child.spawn(); - const stdout = child.stdout.?.reader().readAllAlloc(b.allocator, max_output_size) catch { + const stdout = child.stdout.?.deprecatedReader().readAllAlloc(b.allocator, max_output_size) catch { return error.ReadFailure; }; errdefer b.allocator.free(stdout); @@ -2466,10 +2466,9 @@ pub const GeneratedFile = struct { pub fn getPath2(gen: GeneratedFile, src_builder: *Build, asking_step: ?*Step) []const u8 { return gen.path orelse { - std.debug.lockStdErr(); - const stderr = std.io.getStdErr(); - dumpBadGetPathHelp(gen.step, stderr, src_builder, asking_step) catch {}; - std.debug.unlockStdErr(); + const w = debug.lockStderrWriter(&.{}); + dumpBadGetPathHelp(gen.step, w, .detect(.stderr()), src_builder, asking_step) catch {}; + debug.unlockStderrWriter(); @panic("misconfigured build script"); }; } @@ -2676,10 +2675,9 @@ pub const LazyPath = union(enum) { var file_path: Cache.Path = .{ .root_dir = Cache.Directory.cwd(), .sub_path = gen.file.path orelse { - std.debug.lockStdErr(); - const stderr = std.io.getStdErr(); - dumpBadGetPathHelp(gen.file.step, stderr, src_builder, asking_step) catch {}; - std.debug.unlockStdErr(); + const w = debug.lockStderrWriter(&.{}); + dumpBadGetPathHelp(gen.file.step, w, .detect(.stderr()), src_builder, asking_step) catch {}; + debug.unlockStderrWriter(); @panic("misconfigured build script"); }, }; @@ -2766,44 +2764,42 @@ fn dumpBadDirnameHelp( comptime msg: []const u8, args: anytype, ) anyerror!void { - debug.lockStdErr(); - defer debug.unlockStdErr(); + const w = debug.lockStderrWriter(&.{}); + defer debug.unlockStderrWriter(); - const stderr = io.getStdErr(); - const w = stderr.writer(); try w.print(msg, args); - const tty_config = std.io.tty.detectConfig(stderr); + const tty_config = std.io.tty.detectConfig(.stderr()); if (fail_step) |s| { tty_config.setColor(w, .red) catch {}; - try stderr.writeAll(" The step was created by this stack trace:\n"); + try w.writeAll(" The step was created by this stack trace:\n"); tty_config.setColor(w, .reset) catch {}; - s.dump(stderr); + s.dump(w, tty_config); } if (asking_step) |as| { tty_config.setColor(w, .red) catch {}; - try stderr.writer().print(" The step '{s}' that is missing a dependency on the above step was created by this stack trace:\n", .{as.name}); + try w.print(" The step '{s}' that is missing a dependency on the above step was created by this stack trace:\n", .{as.name}); tty_config.setColor(w, .reset) catch {}; - as.dump(stderr); + as.dump(w, tty_config); } tty_config.setColor(w, .red) catch {}; - try stderr.writeAll(" Hope that helps. Proceeding to panic.\n"); + try w.writeAll(" Hope that helps. Proceeding to panic.\n"); tty_config.setColor(w, .reset) catch {}; } /// In this function the stderr mutex has already been locked. pub fn dumpBadGetPathHelp( s: *Step, - stderr: fs.File, + w: *std.io.Writer, + tty_config: std.io.tty.Config, src_builder: *Build, asking_step: ?*Step, ) anyerror!void { - const w = stderr.writer(); try w.print( \\getPath() was called on a GeneratedFile that wasn't built yet. \\ source package path: {s} @@ -2814,21 +2810,20 @@ pub fn dumpBadGetPathHelp( s.name, }); - const tty_config = std.io.tty.detectConfig(stderr); tty_config.setColor(w, .red) catch {}; - try stderr.writeAll(" The step was created by this stack trace:\n"); + try w.writeAll(" The step was created by this stack trace:\n"); tty_config.setColor(w, .reset) catch {}; - s.dump(stderr); + s.dump(w, tty_config); if (asking_step) |as| { tty_config.setColor(w, .red) catch {}; - try stderr.writer().print(" The step '{s}' that is missing a dependency on the above step was created by this stack trace:\n", .{as.name}); + try w.print(" The step '{s}' that is missing a dependency on the above step was created by this stack trace:\n", .{as.name}); tty_config.setColor(w, .reset) catch {}; - as.dump(stderr); + as.dump(w, tty_config); } tty_config.setColor(w, .red) catch {}; - try stderr.writeAll(" Hope that helps. Proceeding to panic.\n"); + try w.writeAll(" Hope that helps. Proceeding to panic.\n"); tty_config.setColor(w, .reset) catch {}; } @@ -2866,11 +2861,6 @@ pub fn makeTempPath(b: *Build) []const u8 { return result_path; } -/// Deprecated; use `std.fmt.hex` instead. -pub fn hex64(x: u64) [16]u8 { - return std.fmt.hex(x); -} - /// A pair of target query and fully resolved target. /// This type is generally required by build system API that need to be given a /// target. The query is kept because the Zig toolchain needs to know which parts diff --git a/lib/std/Build/Cache.zig b/lib/std/Build/Cache.zig index bf63acdead..dfc1b14742 100644 --- a/lib/std/Build/Cache.zig +++ b/lib/std/Build/Cache.zig @@ -2,6 +2,18 @@ //! This is not a general-purpose cache. It is designed to be fast and simple, //! not to withstand attacks using specially-crafted input. +const Cache = @This(); +const std = @import("std"); +const builtin = @import("builtin"); +const crypto = std.crypto; +const fs = std.fs; +const assert = std.debug.assert; +const testing = std.testing; +const mem = std.mem; +const fmt = std.fmt; +const Allocator = std.mem.Allocator; +const log = std.log.scoped(.cache); + gpa: Allocator, manifest_dir: fs.Dir, hash: HashHelper = .{}, @@ -21,18 +33,6 @@ pub const Path = @import("Cache/Path.zig"); pub const Directory = @import("Cache/Directory.zig"); pub const DepTokenizer = @import("Cache/DepTokenizer.zig"); -const Cache = @This(); -const std = @import("std"); -const builtin = @import("builtin"); -const crypto = std.crypto; -const fs = std.fs; -const assert = std.debug.assert; -const testing = std.testing; -const mem = std.mem; -const fmt = std.fmt; -const Allocator = std.mem.Allocator; -const log = std.log.scoped(.cache); - pub fn addPrefix(cache: *Cache, directory: Directory) void { cache.prefixes_buffer[cache.prefixes_len] = directory; cache.prefixes_len += 1; @@ -68,7 +68,7 @@ const PrefixedPath = struct { fn findPrefix(cache: *const Cache, file_path: []const u8) !PrefixedPath { const gpa = cache.gpa; - const resolved_path = try fs.path.resolve(gpa, &[_][]const u8{file_path}); + const resolved_path = try fs.path.resolve(gpa, &.{file_path}); errdefer gpa.free(resolved_path); return findPrefixResolved(cache, resolved_path); } @@ -132,7 +132,7 @@ pub const Hasher = crypto.auth.siphash.SipHash128(1, 3); /// Initial state with random bytes, that can be copied. /// Refresh this with new random bytes when the manifest /// format is modified in a non-backwards-compatible way. -pub const hasher_init: Hasher = Hasher.init(&[_]u8{ +pub const hasher_init: Hasher = Hasher.init(&.{ 0x33, 0x52, 0xa2, 0x84, 0xcf, 0x17, 0x56, 0x57, 0x01, 0xbb, 0xcd, 0xe4, @@ -286,11 +286,8 @@ pub const HashHelper = struct { pub fn binToHex(bin_digest: BinDigest) HexDigest { var out_digest: HexDigest = undefined; - _ = fmt.bufPrint( - &out_digest, - "{s}", - .{fmt.fmtSliceHexLower(&bin_digest)}, - ) catch unreachable; + var w: std.io.Writer = .fixed(&out_digest); + w.printHex(&bin_digest, .lower) catch unreachable; return out_digest; } @@ -337,7 +334,6 @@ pub const Manifest = struct { manifest_create: fs.File.OpenError, manifest_read: fs.File.ReadError, manifest_lock: fs.File.LockError, - manifest_seek: fs.File.SeekError, file_open: FileOp, file_stat: FileOp, file_read: FileOp, @@ -611,12 +607,6 @@ pub const Manifest = struct { var file = self.files.pop().?; file.key.deinit(self.cache.gpa); } - // Also, seek the file back to the start. - self.manifest_file.?.seekTo(0) catch |err| { - self.diagnostic = .{ .manifest_seek = err }; - return error.CacheCheckFailed; - }; - switch (try self.hitWithCurrentLock()) { .hit => break :hit, .miss => |m| break :digests m.file_digests_populated, @@ -661,9 +651,8 @@ pub const Manifest = struct { return true; } - /// Assumes that `self.hash.hasher` has been updated only with the original digest, that - /// `self.files` contains only the original input files, and that `self.manifest_file.?` is - /// seeked to the start of the file. + /// Assumes that `self.hash.hasher` has been updated only with the original digest and that + /// `self.files` contains only the original input files. fn hitWithCurrentLock(self: *Manifest) HitError!union(enum) { hit, miss: struct { @@ -672,12 +661,13 @@ pub const Manifest = struct { } { const gpa = self.cache.gpa; const input_file_count = self.files.entries.len; - - const file_contents = self.manifest_file.?.reader().readAllAlloc(gpa, manifest_file_size_max) catch |err| switch (err) { + var manifest_reader = self.manifest_file.?.reader(&.{}); // Reads positionally from zero. + const limit: std.io.Limit = .limited(manifest_file_size_max); + const file_contents = manifest_reader.interface.allocRemaining(gpa, limit) catch |err| switch (err) { error.OutOfMemory => return error.OutOfMemory, error.StreamTooLong => return error.OutOfMemory, - else => |e| { - self.diagnostic = .{ .manifest_read = e }; + error.ReadFailed => { + self.diagnostic = .{ .manifest_read = manifest_reader.err.? }; return error.CacheCheckFailed; }, }; @@ -1063,14 +1053,17 @@ pub const Manifest = struct { } fn addDepFileMaybePost(self: *Manifest, dir: fs.Dir, dep_file_basename: []const u8) !void { - const dep_file_contents = try dir.readFileAlloc(self.cache.gpa, dep_file_basename, manifest_file_size_max); - defer self.cache.gpa.free(dep_file_contents); + const gpa = self.cache.gpa; + const dep_file_contents = try dir.readFileAlloc(gpa, dep_file_basename, manifest_file_size_max); + defer gpa.free(dep_file_contents); - var error_buf = std.ArrayList(u8).init(self.cache.gpa); - defer error_buf.deinit(); + var error_buf: std.ArrayListUnmanaged(u8) = .empty; + defer error_buf.deinit(gpa); + + var resolve_buf: std.ArrayListUnmanaged(u8) = .empty; + defer resolve_buf.deinit(gpa); var it: DepTokenizer = .{ .bytes = dep_file_contents }; - while (it.next()) |token| { switch (token) { // We don't care about targets, we only want the prereqs @@ -1080,16 +1073,14 @@ pub const Manifest = struct { _ = try self.addFile(file_path, null); } else try self.addFilePost(file_path), .prereq_must_resolve => { - var resolve_buf = std.ArrayList(u8).init(self.cache.gpa); - defer resolve_buf.deinit(); - - try token.resolve(resolve_buf.writer()); + resolve_buf.clearRetainingCapacity(); + try token.resolve(gpa, &resolve_buf); if (self.manifest_file == null) { _ = try self.addFile(resolve_buf.items, null); } else try self.addFilePost(resolve_buf.items); }, else => |err| { - try err.printError(error_buf.writer()); + try err.printError(gpa, &error_buf); log.err("failed parsing {s}: {s}", .{ dep_file_basename, error_buf.items }); return error.InvalidDepFile; }, @@ -1127,24 +1118,12 @@ pub const Manifest = struct { if (self.manifest_dirty) { self.manifest_dirty = false; - var contents = std.ArrayList(u8).init(self.cache.gpa); - defer contents.deinit(); - - const writer = contents.writer(); - try writer.writeAll(manifest_header ++ "\n"); - for (self.files.keys()) |file| { - try writer.print("{d} {d} {d} {} {d} {s}\n", .{ - file.stat.size, - file.stat.inode, - file.stat.mtime, - fmt.fmtSliceHexLower(&file.bin_digest), - file.prefixed_path.prefix, - file.prefixed_path.sub_path, - }); - } - - try manifest_file.setEndPos(contents.items.len); - try manifest_file.pwriteAll(contents.items, 0); + var buffer: [4000]u8 = undefined; + var fw = manifest_file.writer(&buffer); + writeDirtyManifestToStream(self, &fw) catch |err| switch (err) { + error.WriteFailed => return fw.err.?, + else => |e| return e, + }; } if (self.want_shared_lock) { @@ -1152,6 +1131,21 @@ pub const Manifest = struct { } } + fn writeDirtyManifestToStream(self: *Manifest, fw: *fs.File.Writer) !void { + try fw.interface.writeAll(manifest_header ++ "\n"); + for (self.files.keys()) |file| { + try fw.interface.print("{d} {d} {d} {x} {d} {s}\n", .{ + file.stat.size, + file.stat.inode, + file.stat.mtime, + &file.bin_digest, + file.prefixed_path.prefix, + file.prefixed_path.sub_path, + }); + } + try fw.end(); + } + fn downgradeToSharedLock(self: *Manifest) !void { if (!self.have_exclusive_lock) return; diff --git a/lib/std/Build/Cache/DepTokenizer.zig b/lib/std/Build/Cache/DepTokenizer.zig index a1e64c006d..8221f92dba 100644 --- a/lib/std/Build/Cache/DepTokenizer.zig +++ b/lib/std/Build/Cache/DepTokenizer.zig @@ -7,6 +7,7 @@ state: State = .lhs, const std = @import("std"); const testing = std.testing; const assert = std.debug.assert; +const Allocator = std.mem.Allocator; pub fn next(self: *Tokenizer) ?Token { var start = self.index; @@ -362,7 +363,7 @@ pub const Token = union(enum) { }; /// Resolve escapes in target or prereq. Only valid with .target_must_resolve or .prereq_must_resolve. - pub fn resolve(self: Token, writer: anytype) @TypeOf(writer).Error!void { + pub fn resolve(self: Token, gpa: Allocator, list: *std.ArrayListUnmanaged(u8)) error{OutOfMemory}!void { switch (self) { .target_must_resolve => |bytes| { var state: enum { start, escape, dollar } = .start; @@ -372,27 +373,27 @@ pub const Token = union(enum) { switch (c) { '\\' => state = .escape, '$' => state = .dollar, - else => try writer.writeByte(c), + else => try list.append(gpa, c), } }, .escape => { switch (c) { ' ', '#', '\\' => {}, '$' => { - try writer.writeByte('\\'); + try list.append(gpa, '\\'); state = .dollar; continue; }, - else => try writer.writeByte('\\'), + else => try list.append(gpa, '\\'), } - try writer.writeByte(c); + try list.append(gpa, c); state = .start; }, .dollar => { - try writer.writeByte('$'); + try list.append(gpa, '$'); switch (c) { '$' => {}, - else => try writer.writeByte(c), + else => try list.append(gpa, c), } state = .start; }, @@ -406,19 +407,19 @@ pub const Token = union(enum) { .start => { switch (c) { '\\' => state = .escape, - else => try writer.writeByte(c), + else => try list.append(gpa, c), } }, .escape => { switch (c) { ' ' => {}, '\\' => { - try writer.writeByte(c); + try list.append(gpa, c); continue; }, - else => try writer.writeByte('\\'), + else => try list.append(gpa, '\\'), } - try writer.writeByte(c); + try list.append(gpa, c); state = .start; }, } @@ -428,20 +429,20 @@ pub const Token = union(enum) { } } - pub fn printError(self: Token, writer: anytype) @TypeOf(writer).Error!void { + pub fn printError(self: Token, gpa: Allocator, list: *std.ArrayListUnmanaged(u8)) error{OutOfMemory}!void { switch (self) { .target, .target_must_resolve, .prereq, .prereq_must_resolve => unreachable, // not an error .incomplete_quoted_prerequisite, .incomplete_target, => |index_and_bytes| { - try writer.print("{s} '", .{self.errStr()}); + try list.print(gpa, "{s} '", .{self.errStr()}); if (self == .incomplete_target) { const tmp = Token{ .target_must_resolve = index_and_bytes.bytes }; - try tmp.resolve(writer); + try tmp.resolve(gpa, list); } else { - try printCharValues(writer, index_and_bytes.bytes); + try printCharValues(gpa, list, index_and_bytes.bytes); } - try writer.print("' at position {d}", .{index_and_bytes.index}); + try list.print(gpa, "' at position {d}", .{index_and_bytes.index}); }, .invalid_target, .bad_target_escape, @@ -450,9 +451,9 @@ pub const Token = union(enum) { .incomplete_escape, .expected_colon, => |index_and_char| { - try writer.writeAll("illegal char "); - try printUnderstandableChar(writer, index_and_char.char); - try writer.print(" at position {d}: {s}", .{ index_and_char.index, self.errStr() }); + try list.appendSlice(gpa, "illegal char "); + try printUnderstandableChar(gpa, list, index_and_char.char); + try list.print(gpa, " at position {d}: {s}", .{ index_and_char.index, self.errStr() }); }, } } @@ -1026,41 +1027,41 @@ fn depTokenizer(input: []const u8, expect: []const u8) !void { defer arena_allocator.deinit(); var it: Tokenizer = .{ .bytes = input }; - var buffer = std.ArrayList(u8).init(arena); - var resolve_buf = std.ArrayList(u8).init(arena); + var buffer: std.ArrayListUnmanaged(u8) = .empty; + var resolve_buf: std.ArrayListUnmanaged(u8) = .empty; var i: usize = 0; while (it.next()) |token| { - if (i != 0) try buffer.appendSlice("\n"); + if (i != 0) try buffer.appendSlice(arena, "\n"); switch (token) { .target, .prereq => |bytes| { - try buffer.appendSlice(@tagName(token)); - try buffer.appendSlice(" = {"); + try buffer.appendSlice(arena, @tagName(token)); + try buffer.appendSlice(arena, " = {"); for (bytes) |b| { - try buffer.append(printable_char_tab[b]); + try buffer.append(arena, printable_char_tab[b]); } - try buffer.appendSlice("}"); + try buffer.appendSlice(arena, "}"); }, .target_must_resolve => { - try buffer.appendSlice("target = {"); - try token.resolve(resolve_buf.writer()); + try buffer.appendSlice(arena, "target = {"); + try token.resolve(arena, &resolve_buf); for (resolve_buf.items) |b| { - try buffer.append(printable_char_tab[b]); + try buffer.append(arena, printable_char_tab[b]); } resolve_buf.items.len = 0; - try buffer.appendSlice("}"); + try buffer.appendSlice(arena, "}"); }, .prereq_must_resolve => { - try buffer.appendSlice("prereq = {"); - try token.resolve(resolve_buf.writer()); + try buffer.appendSlice(arena, "prereq = {"); + try token.resolve(arena, &resolve_buf); for (resolve_buf.items) |b| { - try buffer.append(printable_char_tab[b]); + try buffer.append(arena, printable_char_tab[b]); } resolve_buf.items.len = 0; - try buffer.appendSlice("}"); + try buffer.appendSlice(arena, "}"); }, else => { - try buffer.appendSlice("ERROR: "); - try token.printError(buffer.writer()); + try buffer.appendSlice(arena, "ERROR: "); + try token.printError(arena, &buffer); break; }, } @@ -1072,134 +1073,18 @@ fn depTokenizer(input: []const u8, expect: []const u8) !void { return; } - const out = std.io.getStdErr().writer(); - - try out.writeAll("\n"); - try printSection(out, "<<<< input", input); - try printSection(out, "==== expect", expect); - try printSection(out, ">>>> got", buffer.items); - try printRuler(out); - - try testing.expect(false); + try testing.expectEqualStrings(expect, buffer.items); } -fn printSection(out: anytype, label: []const u8, bytes: []const u8) !void { - try printLabel(out, label, bytes); - try hexDump(out, bytes); - try printRuler(out); - try out.writeAll(bytes); - try out.writeAll("\n"); +fn printCharValues(gpa: Allocator, list: *std.ArrayListUnmanaged(u8), bytes: []const u8) !void { + for (bytes) |b| try list.append(gpa, printable_char_tab[b]); } -fn printLabel(out: anytype, label: []const u8, bytes: []const u8) !void { - var buf: [80]u8 = undefined; - const text = try std.fmt.bufPrint(buf[0..], "{s} {d} bytes ", .{ label, bytes.len }); - try out.writeAll(text); - var i: usize = text.len; - const end = 79; - while (i < end) : (i += 1) { - try out.writeAll(&[_]u8{label[0]}); - } - try out.writeAll("\n"); -} - -fn printRuler(out: anytype) !void { - var i: usize = 0; - const end = 79; - while (i < end) : (i += 1) { - try out.writeAll("-"); - } - try out.writeAll("\n"); -} - -fn hexDump(out: anytype, bytes: []const u8) !void { - const n16 = bytes.len >> 4; - var line: usize = 0; - var offset: usize = 0; - while (line < n16) : (line += 1) { - try hexDump16(out, offset, bytes[offset..][0..16]); - offset += 16; - } - - const n = bytes.len & 0x0f; - if (n > 0) { - try printDecValue(out, offset, 8); - try out.writeAll(":"); - try out.writeAll(" "); - const end1 = @min(offset + n, offset + 8); - for (bytes[offset..end1]) |b| { - try out.writeAll(" "); - try printHexValue(out, b, 2); - } - const end2 = offset + n; - if (end2 > end1) { - try out.writeAll(" "); - for (bytes[end1..end2]) |b| { - try out.writeAll(" "); - try printHexValue(out, b, 2); - } - } - const short = 16 - n; - var i: usize = 0; - while (i < short) : (i += 1) { - try out.writeAll(" "); - } - if (end2 > end1) { - try out.writeAll(" |"); - } else { - try out.writeAll(" |"); - } - try printCharValues(out, bytes[offset..end2]); - try out.writeAll("|\n"); - offset += n; - } - - try printDecValue(out, offset, 8); - try out.writeAll(":"); - try out.writeAll("\n"); -} - -fn hexDump16(out: anytype, offset: usize, bytes: []const u8) !void { - try printDecValue(out, offset, 8); - try out.writeAll(":"); - try out.writeAll(" "); - for (bytes[0..8]) |b| { - try out.writeAll(" "); - try printHexValue(out, b, 2); - } - try out.writeAll(" "); - for (bytes[8..16]) |b| { - try out.writeAll(" "); - try printHexValue(out, b, 2); - } - try out.writeAll(" |"); - try printCharValues(out, bytes); - try out.writeAll("|\n"); -} - -fn printDecValue(out: anytype, value: u64, width: u8) !void { - var buffer: [20]u8 = undefined; - const len = std.fmt.formatIntBuf(buffer[0..], value, 10, .lower, .{ .width = width, .fill = '0' }); - try out.writeAll(buffer[0..len]); -} - -fn printHexValue(out: anytype, value: u64, width: u8) !void { - var buffer: [16]u8 = undefined; - const len = std.fmt.formatIntBuf(buffer[0..], value, 16, .lower, .{ .width = width, .fill = '0' }); - try out.writeAll(buffer[0..len]); -} - -fn printCharValues(out: anytype, bytes: []const u8) !void { - for (bytes) |b| { - try out.writeAll(&[_]u8{printable_char_tab[b]}); - } -} - -fn printUnderstandableChar(out: anytype, char: u8) !void { +fn printUnderstandableChar(gpa: Allocator, list: *std.ArrayListUnmanaged(u8), char: u8) !void { if (std.ascii.isPrint(char)) { - try out.print("'{c}'", .{char}); + try list.print(gpa, "'{c}'", .{char}); } else { - try out.print("\\x{X:0>2}", .{char}); + try list.print(gpa, "\\x{X:0>2}", .{char}); } } diff --git a/lib/std/Build/Cache/Directory.zig b/lib/std/Build/Cache/Directory.zig index 4de1cc18f1..14a5e8a24d 100644 --- a/lib/std/Build/Cache/Directory.zig +++ b/lib/std/Build/Cache/Directory.zig @@ -1,5 +1,6 @@ const Directory = @This(); const std = @import("../../std.zig"); +const assert = std.debug.assert; const fs = std.fs; const fmt = std.fmt; const Allocator = std.mem.Allocator; @@ -55,14 +56,7 @@ pub fn closeAndFree(self: *Directory, gpa: Allocator) void { self.* = undefined; } -pub fn format( - self: Directory, - comptime fmt_string: []const u8, - options: fmt.FormatOptions, - writer: anytype, -) !void { - _ = options; - if (fmt_string.len != 0) fmt.invalidFmtError(fmt_string, self); +pub fn format(self: Directory, writer: *std.io.Writer) std.io.Writer.Error!void { if (self.path) |p| { try writer.writeAll(p); try writer.writeAll(fs.path.sep_str); diff --git a/lib/std/Build/Cache/Path.zig b/lib/std/Build/Cache/Path.zig index 8822fb64be..a0a58067fc 100644 --- a/lib/std/Build/Cache/Path.zig +++ b/lib/std/Build/Cache/Path.zig @@ -1,3 +1,10 @@ +const Path = @This(); +const std = @import("../../std.zig"); +const assert = std.debug.assert; +const fs = std.fs; +const Allocator = std.mem.Allocator; +const Cache = std.Build.Cache; + root_dir: Cache.Directory, /// The path, relative to the root dir, that this `Path` represents. /// Empty string means the root_dir is the path. @@ -133,38 +140,42 @@ pub fn makePath(p: Path, sub_path: []const u8) !void { } pub fn toString(p: Path, allocator: Allocator) Allocator.Error![]u8 { - return std.fmt.allocPrint(allocator, "{}", .{p}); + return std.fmt.allocPrint(allocator, "{f}", .{p}); } pub fn toStringZ(p: Path, allocator: Allocator) Allocator.Error![:0]u8 { - return std.fmt.allocPrintZ(allocator, "{}", .{p}); + return std.fmt.allocPrintSentinel(allocator, "{f}", .{p}, 0); } -pub fn format( - self: Path, - comptime fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - if (fmt_string.len == 1) { - // Quote-escape the string. - const stringEscape = std.zig.stringEscape; - const f = switch (fmt_string[0]) { - 'q' => "", - '\'' => "\'", - else => @compileError("unsupported format string: " ++ fmt_string), - }; - if (self.root_dir.path) |p| { - try stringEscape(p, f, options, writer); - if (self.sub_path.len > 0) try stringEscape(fs.path.sep_str, f, options, writer); - } - if (self.sub_path.len > 0) { - try stringEscape(self.sub_path, f, options, writer); - } - return; +pub fn fmtEscapeString(path: Path) std.fmt.Formatter(Path, formatEscapeString) { + return .{ .data = path }; +} + +pub fn formatEscapeString(path: Path, writer: *std.io.Writer) std.io.Writer.Error!void { + if (path.root_dir.path) |p| { + try std.zig.stringEscape(p, writer); + if (path.sub_path.len > 0) try std.zig.stringEscape(fs.path.sep_str, writer); } - if (fmt_string.len > 0) - std.fmt.invalidFmtError(fmt_string, self); + if (path.sub_path.len > 0) { + try std.zig.stringEscape(path.sub_path, writer); + } +} + +pub fn fmtEscapeChar(path: Path) std.fmt.Formatter(Path, formatEscapeChar) { + return .{ .data = path }; +} + +pub fn formatEscapeChar(path: Path, writer: *std.io.Writer) std.io.Writer.Error!void { + if (path.root_dir.path) |p| { + try std.zig.charEscape(p, writer); + if (path.sub_path.len > 0) try std.zig.charEscape(fs.path.sep_str, writer); + } + if (path.sub_path.len > 0) { + try std.zig.charEscape(path.sub_path, writer); + } +} + +pub fn format(self: Path, writer: *std.io.Writer) std.io.Writer.Error!void { if (std.fs.path.isAbsolute(self.sub_path)) { try writer.writeAll(self.sub_path); return; @@ -223,9 +234,3 @@ pub const TableAdapter = struct { return a.eql(b); } }; - -const Path = @This(); -const std = @import("../../std.zig"); -const fs = std.fs; -const Allocator = std.mem.Allocator; -const Cache = std.Build.Cache; diff --git a/lib/std/Build/Fuzz.zig b/lib/std/Build/Fuzz.zig index 4ebdfdf8e5..28f8781dd1 100644 --- a/lib/std/Build/Fuzz.zig +++ b/lib/std/Build/Fuzz.zig @@ -112,7 +112,6 @@ fn rebuildTestsWorkerRun(run: *Step.Run, ttyconf: std.io.tty.Config, parent_prog fn rebuildTestsWorkerRunFallible(run: *Step.Run, ttyconf: std.io.tty.Config, parent_prog_node: std.Progress.Node) !void { const gpa = run.step.owner.allocator; - const stderr = std.io.getStdErr(); const compile = run.producer.?; const prog_node = parent_prog_node.start(compile.step.name, 0); @@ -125,9 +124,10 @@ fn rebuildTestsWorkerRunFallible(run: *Step.Run, ttyconf: std.io.tty.Config, par const show_stderr = compile.step.result_stderr.len > 0; if (show_error_msgs or show_compile_errors or show_stderr) { - std.debug.lockStdErr(); - defer std.debug.unlockStdErr(); - build_runner.printErrorMessages(gpa, &compile.step, .{ .ttyconf = ttyconf }, stderr, false) catch {}; + var buf: [256]u8 = undefined; + const w = std.debug.lockStderrWriter(&buf); + defer std.debug.unlockStderrWriter(); + build_runner.printErrorMessages(gpa, &compile.step, .{ .ttyconf = ttyconf }, w, false) catch {}; } const rebuilt_bin_path = result catch |err| switch (err) { @@ -152,10 +152,10 @@ fn fuzzWorkerRun( run.rerunInFuzzMode(web_server, unit_test_index, prog_node) catch |err| switch (err) { error.MakeFailed => { - const stderr = std.io.getStdErr(); - std.debug.lockStdErr(); - defer std.debug.unlockStdErr(); - build_runner.printErrorMessages(gpa, &run.step, .{ .ttyconf = ttyconf }, stderr, false) catch {}; + var buf: [256]u8 = undefined; + const w = std.debug.lockStderrWriter(&buf); + defer std.debug.unlockStderrWriter(); + build_runner.printErrorMessages(gpa, &run.step, .{ .ttyconf = ttyconf }, w, false) catch {}; return; }, else => { diff --git a/lib/std/Build/Fuzz/WebServer.zig b/lib/std/Build/Fuzz/WebServer.zig index ab44d4e7af..b28a6e185c 100644 --- a/lib/std/Build/Fuzz/WebServer.zig +++ b/lib/std/Build/Fuzz/WebServer.zig @@ -170,7 +170,7 @@ fn serveFile( // We load the file with every request so that the user can make changes to the file // and refresh the HTML page without restarting this server. const file_contents = ws.zig_lib_directory.handle.readFileAlloc(gpa, name, 10 * 1024 * 1024) catch |err| { - log.err("failed to read '{}{s}': {s}", .{ ws.zig_lib_directory, name, @errorName(err) }); + log.err("failed to read '{f}{s}': {s}", .{ ws.zig_lib_directory, name, @errorName(err) }); return error.AlreadyReported; }; defer gpa.free(file_contents); @@ -251,10 +251,10 @@ fn buildWasmBinary( "-fsingle-threaded", // "--dep", "Walk", // "--dep", "html_render", // - try std.fmt.allocPrint(arena, "-Mroot={}", .{main_src_path}), // - try std.fmt.allocPrint(arena, "-MWalk={}", .{walk_src_path}), // + try std.fmt.allocPrint(arena, "-Mroot={f}", .{main_src_path}), // + try std.fmt.allocPrint(arena, "-MWalk={f}", .{walk_src_path}), // "--dep", "Walk", // - try std.fmt.allocPrint(arena, "-Mhtml_render={}", .{html_render_src_path}), // + try std.fmt.allocPrint(arena, "-Mhtml_render={f}", .{html_render_src_path}), // "--listen=-", }); @@ -526,7 +526,7 @@ fn serveSourcesTar(ws: *WebServer, request: *std.http.Server.Request) !void { for (deduped_paths) |joined_path| { var file = joined_path.root_dir.handle.openFile(joined_path.sub_path, .{}) catch |err| { - log.err("failed to open {}: {s}", .{ joined_path, @errorName(err) }); + log.err("failed to open {f}: {s}", .{ joined_path, @errorName(err) }); continue; }; defer file.close(); @@ -604,7 +604,7 @@ fn prepareTables( const rebuilt_exe_path = run_step.rebuilt_executable.?; var debug_info = std.debug.Info.load(gpa, rebuilt_exe_path, &gop.value_ptr.coverage) catch |err| { - log.err("step '{s}': failed to load debug information for '{}': {s}", .{ + log.err("step '{s}': failed to load debug information for '{f}': {s}", .{ run_step.step.name, rebuilt_exe_path, @errorName(err), }); return error.AlreadyReported; @@ -616,7 +616,7 @@ fn prepareTables( .sub_path = "v/" ++ std.fmt.hex(coverage_id), }; var coverage_file = coverage_file_path.root_dir.handle.openFile(coverage_file_path.sub_path, .{}) catch |err| { - log.err("step '{s}': failed to load coverage file '{}': {s}", .{ + log.err("step '{s}': failed to load coverage file '{f}': {s}", .{ run_step.step.name, coverage_file_path, @errorName(err), }); return error.AlreadyReported; @@ -624,7 +624,7 @@ fn prepareTables( defer coverage_file.close(); const file_size = coverage_file.getEndPos() catch |err| { - log.err("unable to check len of coverage file '{}': {s}", .{ coverage_file_path, @errorName(err) }); + log.err("unable to check len of coverage file '{f}': {s}", .{ coverage_file_path, @errorName(err) }); return error.AlreadyReported; }; @@ -636,7 +636,7 @@ fn prepareTables( coverage_file.handle, 0, ) catch |err| { - log.err("failed to map coverage file '{}': {s}", .{ coverage_file_path, @errorName(err) }); + log.err("failed to map coverage file '{f}': {s}", .{ coverage_file_path, @errorName(err) }); return error.AlreadyReported; }; gop.value_ptr.mapped_memory = mapped_memory; diff --git a/lib/std/Build/Module.zig b/lib/std/Build/Module.zig index cc57aeb521..0fa8a9a623 100644 --- a/lib/std/Build/Module.zig +++ b/lib/std/Build/Module.zig @@ -186,7 +186,7 @@ pub const IncludeDir = union(enum) { .embed_path => |lazy_path| { // Special case: this is a single arg. const resolved = lazy_path.getPath3(b, asking_step); - const arg = b.fmt("--embed-dir={}", .{resolved}); + const arg = b.fmt("--embed-dir={f}", .{resolved}); return zig_args.append(arg); }, }; diff --git a/lib/std/Build/Step.zig b/lib/std/Build/Step.zig index 9d4802fbbc..5192249f12 100644 --- a/lib/std/Build/Step.zig +++ b/lib/std/Build/Step.zig @@ -286,9 +286,7 @@ pub fn cast(step: *Step, comptime T: type) ?*T { } /// For debugging purposes, prints identifying information about this Step. -pub fn dump(step: *Step, file: std.fs.File) void { - const w = file.writer(); - const tty_config = std.io.tty.detectConfig(file); +pub fn dump(step: *Step, w: *std.io.Writer, tty_config: std.io.tty.Config) void { const debug_info = std.debug.getSelfDebugInfo() catch |err| { w.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{ @errorName(err), @@ -482,9 +480,9 @@ pub fn evalZigProcess( pub fn installFile(s: *Step, src_lazy_path: Build.LazyPath, dest_path: []const u8) !std.fs.Dir.PrevStatus { const b = s.owner; const src_path = src_lazy_path.getPath3(b, s); - try handleVerbose(b, null, &.{ "install", "-C", b.fmt("{}", .{src_path}), dest_path }); + try handleVerbose(b, null, &.{ "install", "-C", b.fmt("{f}", .{src_path}), dest_path }); return src_path.root_dir.handle.updateFile(src_path.sub_path, std.fs.cwd(), dest_path, .{}) catch |err| { - return s.fail("unable to update file from '{}' to '{s}': {s}", .{ + return s.fail("unable to update file from '{f}' to '{s}': {s}", .{ src_path, dest_path, @errorName(err), }); }; @@ -821,7 +819,7 @@ fn failWithCacheError(s: *Step, man: *const Build.Cache.Manifest, err: Build.Cac switch (err) { error.CacheCheckFailed => switch (man.diagnostic) { .none => unreachable, - .manifest_create, .manifest_read, .manifest_lock, .manifest_seek => |e| return s.fail("failed to check cache: {s} {s}", .{ + .manifest_create, .manifest_read, .manifest_lock => |e| return s.fail("failed to check cache: {s} {s}", .{ @tagName(man.diagnostic), @errorName(e), }), .file_open, .file_stat, .file_read, .file_hash => |op| { diff --git a/lib/std/Build/Step/CheckObject.zig b/lib/std/Build/Step/CheckObject.zig index 3b78258d1c..06ad26ccc8 100644 --- a/lib/std/Build/Step/CheckObject.zig +++ b/lib/std/Build/Step/CheckObject.zig @@ -6,6 +6,7 @@ const macho = std.macho; const math = std.math; const mem = std.mem; const testing = std.testing; +const Writer = std.io.Writer; const CheckObject = @This(); @@ -28,7 +29,7 @@ pub fn create( const gpa = owner.allocator; const check_object = gpa.create(CheckObject) catch @panic("OOM"); check_object.* = .{ - .step = Step.init(.{ + .step = .init(.{ .id = base_id, .name = "CheckObject", .owner = owner, @@ -80,7 +81,7 @@ const Action = struct { const hay = mem.trim(u8, haystack, " "); const phrase = mem.trim(u8, act.phrase.resolve(b, step), " "); - var candidate_vars = std.ArrayList(struct { name: []const u8, value: u64 }).init(b.allocator); + var candidate_vars: std.ArrayList(struct { name: []const u8, value: u64 }) = .init(b.allocator); var hay_it = mem.tokenizeScalar(u8, hay, ' '); var needle_it = mem.tokenizeScalar(u8, phrase, ' '); @@ -229,18 +230,11 @@ const ComputeCompareExpected = struct { literal: u64, }, - pub fn format( - value: @This(), - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - if (fmt.len != 0) std.fmt.invalidFmtError(fmt, value); - _ = options; - try writer.print("{s} ", .{@tagName(value.op)}); + pub fn format(value: ComputeCompareExpected, w: *Writer) Writer.Error!void { + try w.print("{t} ", .{value.op}); switch (value.value) { - .variable => |name| try writer.writeAll(name), - .literal => |x| try writer.print("{x}", .{x}), + .variable => |name| try w.writeAll(name), + .literal => |x| try w.print("{x}", .{x}), } } }; @@ -565,9 +559,11 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void { null, .of(u64), null, - ) catch |err| return step.fail("unable to read '{'}': {s}", .{ src_path, @errorName(err) }); + ) catch |err| return step.fail("unable to read '{f}': {s}", .{ + std.fmt.alt(src_path, .formatEscapeChar), @errorName(err), + }); - var vars = std.StringHashMap(u64).init(gpa); + var vars: std.StringHashMap(u64) = .init(gpa); for (check_object.checks.items) |chk| { if (chk.kind == .compute_compare) { assert(chk.actions.items.len == 1); @@ -581,7 +577,7 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void { return step.fail( \\ \\========= comparison failed for action: =========== - \\{s} {} + \\{s} {f} \\=================================================== , .{ act.phrase.resolve(b, step), act.expected.? }); } @@ -600,7 +596,7 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void { // we either format message string with escaped codes, or not to aid debugging // the failed test. const fmtMessageString = struct { - fn fmtMessageString(kind: Check.Kind, msg: []const u8) std.fmt.Formatter(formatMessageString) { + fn fmtMessageString(kind: Check.Kind, msg: []const u8) std.fmt.Formatter(Ctx, formatMessageString) { return .{ .data = .{ .kind = kind, .msg = msg, @@ -612,17 +608,10 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void { msg: []const u8, }; - fn formatMessageString( - ctx: Ctx, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = unused_fmt_string; - _ = options; + fn formatMessageString(ctx: Ctx, w: *Writer) !void { switch (ctx.kind) { - .dump_section => try writer.print("{s}", .{std.fmt.fmtSliceEscapeLower(ctx.msg)}), - else => try writer.writeAll(ctx.msg), + .dump_section => try w.print("{f}", .{std.ascii.hexEscape(ctx.msg, .lower)}), + else => try w.writeAll(ctx.msg), } } }.fmtMessageString; @@ -637,11 +626,11 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void { return step.fail( \\ \\========= expected to find: ========================== - \\{s} + \\{f} \\========= but parsed file does not contain it: ======= - \\{s} + \\{f} \\========= file path: ================================= - \\{} + \\{f} , .{ fmtMessageString(chk.kind, act.phrase.resolve(b, step)), fmtMessageString(chk.kind, output), @@ -657,11 +646,11 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void { return step.fail( \\ \\========= expected to find: ========================== - \\*{s}* + \\*{f}* \\========= but parsed file does not contain it: ======= - \\{s} + \\{f} \\========= file path: ================================= - \\{} + \\{f} , .{ fmtMessageString(chk.kind, act.phrase.resolve(b, step)), fmtMessageString(chk.kind, output), @@ -676,11 +665,11 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void { return step.fail( \\ \\========= expected not to find: =================== - \\{s} + \\{f} \\========= but parsed file does contain it: ======== - \\{s} + \\{f} \\========= file path: ============================== - \\{} + \\{f} , .{ fmtMessageString(chk.kind, act.phrase.resolve(b, step)), fmtMessageString(chk.kind, output), @@ -696,13 +685,13 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void { return step.fail( \\ \\========= expected to find and extract: ============== - \\{s} + \\{f} \\========= but parsed file does not contain it: ======= - \\{s} + \\{f} \\========= file path: ============================== - \\{} + \\{f} , .{ - act.phrase.resolve(b, step), + fmtMessageString(chk.kind, act.phrase.resolve(b, step)), fmtMessageString(chk.kind, output), src_path, }); @@ -963,7 +952,7 @@ const MachODumper = struct { .UUID => { const uuid = lc.cast(macho.uuid_command).?; try writer.writeByte('\n'); - try writer.print("uuid {x}", .{std.fmt.fmtSliceHexLower(&uuid.uuid)}); + try writer.print("uuid {x}", .{&uuid.uuid}); }, .DATA_IN_CODE, @@ -2012,7 +2001,7 @@ const ElfDumper = struct { for (ctx.phdrs, 0..) |phdr, phndx| { try writer.print("phdr {d}\n", .{phndx}); - try writer.print("type {s}\n", .{fmtPhType(phdr.p_type)}); + try writer.print("type {f}\n", .{fmtPhType(phdr.p_type)}); try writer.print("vaddr {x}\n", .{phdr.p_vaddr}); try writer.print("paddr {x}\n", .{phdr.p_paddr}); try writer.print("offset {x}\n", .{phdr.p_offset}); @@ -2052,7 +2041,7 @@ const ElfDumper = struct { for (ctx.shdrs, 0..) |shdr, shndx| { try writer.print("shdr {d}\n", .{shndx}); try writer.print("name {s}\n", .{ctx.getSectionName(shndx)}); - try writer.print("type {s}\n", .{fmtShType(shdr.sh_type)}); + try writer.print("type {f}\n", .{fmtShType(shdr.sh_type)}); try writer.print("addr {x}\n", .{shdr.sh_addr}); try writer.print("offset {x}\n", .{shdr.sh_offset}); try writer.print("size {x}\n", .{shdr.sh_size}); @@ -2325,18 +2314,11 @@ const ElfDumper = struct { return mem.sliceTo(@as([*:0]const u8, @ptrCast(strtab.ptr + off)), 0); } - fn fmtShType(sh_type: u32) std.fmt.Formatter(formatShType) { + fn fmtShType(sh_type: u32) std.fmt.Formatter(u32, formatShType) { return .{ .data = sh_type }; } - fn formatShType( - sh_type: u32, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = unused_fmt_string; - _ = options; + fn formatShType(sh_type: u32, writer: *Writer) Writer.Error!void { const name = switch (sh_type) { elf.SHT_NULL => "NULL", elf.SHT_PROGBITS => "PROGBITS", @@ -2372,18 +2354,11 @@ const ElfDumper = struct { try writer.writeAll(name); } - fn fmtPhType(ph_type: u32) std.fmt.Formatter(formatPhType) { + fn fmtPhType(ph_type: u32) std.fmt.Formatter(u32, formatPhType) { return .{ .data = ph_type }; } - fn formatPhType( - ph_type: u32, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = unused_fmt_string; - _ = options; + fn formatPhType(ph_type: u32, writer: *Writer) Writer.Error!void { const p_type = switch (ph_type) { elf.PT_NULL => "NULL", elf.PT_LOAD => "LOAD", diff --git a/lib/std/Build/Step/Compile.zig b/lib/std/Build/Step/Compile.zig index 9352280d96..356ea4e34e 100644 --- a/lib/std/Build/Step/Compile.zig +++ b/lib/std/Build/Step/Compile.zig @@ -409,7 +409,7 @@ pub fn create(owner: *std.Build, options: Options) *Compile { .linkage = options.linkage, .kind = options.kind, .name = name, - .step = Step.init(.{ + .step = .init(.{ .id = base_id, .name = step_name, .owner = owner, @@ -1017,20 +1017,16 @@ fn getGeneratedFilePath(compile: *Compile, comptime tag_name: []const u8, asking const maybe_path: ?*GeneratedFile = @field(compile, tag_name); const generated_file = maybe_path orelse { - std.debug.lockStdErr(); - const stderr = std.io.getStdErr(); - - std.Build.dumpBadGetPathHelp(&compile.step, stderr, compile.step.owner, asking_step) catch {}; - + const w = std.debug.lockStderrWriter(&.{}); + std.Build.dumpBadGetPathHelp(&compile.step, w, .detect(.stderr()), compile.step.owner, asking_step) catch {}; + std.debug.unlockStderrWriter(); @panic("missing emit option for " ++ tag_name); }; const path = generated_file.path orelse { - std.debug.lockStdErr(); - const stderr = std.io.getStdErr(); - - std.Build.dumpBadGetPathHelp(&compile.step, stderr, compile.step.owner, asking_step) catch {}; - + const w = std.debug.lockStderrWriter(&.{}); + std.Build.dumpBadGetPathHelp(&compile.step, w, .detect(.stderr()), compile.step.owner, asking_step) catch {}; + std.debug.unlockStderrWriter(); @panic(tag_name ++ " is null. Is there a missing step dependency?"); }; @@ -1542,7 +1538,7 @@ fn getZigArgs(compile: *Compile, fuzz: bool) ![][]const u8 { if (compile.kind == .lib and compile.linkage != null and compile.linkage.? == .dynamic) { if (compile.version) |version| { try zig_args.append("--version"); - try zig_args.append(b.fmt("{}", .{version})); + try zig_args.append(b.fmt("{f}", .{version})); } if (compile.rootModuleTarget().os.tag.isDarwin()) { @@ -1696,9 +1692,7 @@ fn getZigArgs(compile: *Compile, fuzz: bool) ![][]const u8 { if (compile.build_id orelse b.build_id) |build_id| { try zig_args.append(switch (build_id) { - .hexstring => |hs| b.fmt("--build-id=0x{s}", .{ - std.fmt.fmtSliceHexLower(hs.toSlice()), - }), + .hexstring => |hs| b.fmt("--build-id=0x{x}", .{hs.toSlice()}), .none, .fast, .uuid, .sha1, .md5 => b.fmt("--build-id={s}", .{@tagName(build_id)}), }); } @@ -1706,7 +1700,7 @@ fn getZigArgs(compile: *Compile, fuzz: bool) ![][]const u8 { const opt_zig_lib_dir = if (compile.zig_lib_dir) |dir| dir.getPath2(b, step) else if (b.graph.zig_lib_directory.path) |_| - b.fmt("{}", .{b.graph.zig_lib_directory}) + b.fmt("{f}", .{b.graph.zig_lib_directory}) else null; @@ -1746,8 +1740,7 @@ fn getZigArgs(compile: *Compile, fuzz: bool) ![][]const u8 { } if (compile.error_limit) |err_limit| try zig_args.appendSlice(&.{ - "--error-limit", - b.fmt("{}", .{err_limit}), + "--error-limit", b.fmt("{d}", .{err_limit}), }); try addFlag(&zig_args, "incremental", b.graph.incremental); @@ -1771,12 +1764,12 @@ fn getZigArgs(compile: *Compile, fuzz: bool) ![][]const u8 { for (arg, 0..) |c, arg_idx| { if (c == '\\' or c == '"') { // Slow path for arguments that need to be escaped. We'll need to allocate and copy - var escaped = try ArrayList(u8).initCapacity(arena, arg.len + 1); - const writer = escaped.writer(); - try writer.writeAll(arg[0..arg_idx]); + var escaped: std.ArrayListUnmanaged(u8) = .empty; + try escaped.ensureTotalCapacityPrecise(arena, arg.len + 1); + try escaped.appendSlice(arena, arg[0..arg_idx]); for (arg[arg_idx..]) |to_escape| { - if (to_escape == '\\' or to_escape == '"') try writer.writeByte('\\'); - try writer.writeByte(to_escape); + if (to_escape == '\\' or to_escape == '"') try escaped.append(arena, '\\'); + try escaped.append(arena, to_escape); } escaped_args.appendAssumeCapacity(escaped.items); continue :arg_blk; @@ -1793,11 +1786,7 @@ fn getZigArgs(compile: *Compile, fuzz: bool) ![][]const u8 { var args_hash: [Sha256.digest_length]u8 = undefined; Sha256.hash(args, &args_hash, .{}); var args_hex_hash: [Sha256.digest_length * 2]u8 = undefined; - _ = try std.fmt.bufPrint( - &args_hex_hash, - "{s}", - .{std.fmt.fmtSliceHexLower(&args_hash)}, - ); + _ = try std.fmt.bufPrint(&args_hex_hash, "{x}", .{&args_hash}); const args_file = "args" ++ fs.path.sep_str ++ args_hex_hash; try b.cache_root.handle.writeFile(.{ .sub_path = args_file, .data = args }); @@ -1836,7 +1825,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void { // Update generated files if (maybe_output_dir) |output_dir| { if (compile.emit_directory) |lp| { - lp.path = b.fmt("{}", .{output_dir}); + lp.path = b.fmt("{f}", .{output_dir}); } // zig fmt: off @@ -1970,20 +1959,23 @@ fn addFlag(args: *ArrayList([]const u8), comptime name: []const u8, opt: ?bool) fn checkCompileErrors(compile: *Compile) !void { // Clear this field so that it does not get printed by the build runner. const actual_eb = compile.step.result_error_bundle; - compile.step.result_error_bundle = std.zig.ErrorBundle.empty; + compile.step.result_error_bundle = .empty; const arena = compile.step.owner.allocator; - var actual_errors_list = std.ArrayList(u8).init(arena); - try actual_eb.renderToWriter(.{ - .ttyconf = .no_color, - .include_reference_trace = false, - .include_source_line = false, - }, actual_errors_list.writer()); - const actual_errors = try actual_errors_list.toOwnedSlice(); + const actual_errors = ae: { + var aw: std.io.Writer.Allocating = .init(arena); + defer aw.deinit(); + try actual_eb.renderToWriter(.{ + .ttyconf = .no_color, + .include_reference_trace = false, + .include_source_line = false, + }, &aw.writer); + break :ae try aw.toOwnedSlice(); + }; // Render the expected lines into a string that we can compare verbatim. - var expected_generated = std.ArrayList(u8).init(arena); + var expected_generated: std.ArrayListUnmanaged(u8) = .empty; const expect_errors = compile.expect_errors.?; var actual_line_it = mem.splitScalar(u8, actual_errors, '\n'); @@ -2042,17 +2034,17 @@ fn checkCompileErrors(compile: *Compile) !void { .exact => |expect_lines| { for (expect_lines) |expect_line| { const actual_line = actual_line_it.next() orelse { - try expected_generated.appendSlice(expect_line); - try expected_generated.append('\n'); + try expected_generated.appendSlice(arena, expect_line); + try expected_generated.append(arena, '\n'); continue; }; if (matchCompileError(actual_line, expect_line)) { - try expected_generated.appendSlice(actual_line); - try expected_generated.append('\n'); + try expected_generated.appendSlice(arena, actual_line); + try expected_generated.append(arena, '\n'); continue; } - try expected_generated.appendSlice(expect_line); - try expected_generated.append('\n'); + try expected_generated.appendSlice(arena, expect_line); + try expected_generated.append(arena, '\n'); } if (mem.eql(u8, expected_generated.items, actual_errors)) return; diff --git a/lib/std/Build/Step/ConfigHeader.zig b/lib/std/Build/Step/ConfigHeader.zig index 967e1edd05..ea7da12ff6 100644 --- a/lib/std/Build/Step/ConfigHeader.zig +++ b/lib/std/Build/Step/ConfigHeader.zig @@ -2,6 +2,7 @@ const std = @import("std"); const ConfigHeader = @This(); const Step = std.Build.Step; const Allocator = std.mem.Allocator; +const Writer = std.io.Writer; pub const Style = union(enum) { /// A configure format supported by autotools that uses `#undef foo` to @@ -87,7 +88,7 @@ pub fn create(owner: *std.Build, options: Options) *ConfigHeader { owner.fmt("configure {s} header to {s}", .{ @tagName(options.style), include_path }); config_header.* = .{ - .step = Step.init(.{ + .step = .init(.{ .id = base_id, .name = name, .owner = owner, @@ -95,7 +96,7 @@ pub fn create(owner: *std.Build, options: Options) *ConfigHeader { .first_ret_addr = options.first_ret_addr orelse @returnAddress(), }), .style = options.style, - .values = std.StringArrayHashMap(Value).init(owner.allocator), + .values = .init(owner.allocator), .max_bytes = options.max_bytes, .include_path = include_path, @@ -195,8 +196,9 @@ fn make(step: *Step, options: Step.MakeOptions) !void { man.hash.addBytes(config_header.include_path); man.hash.addOptionalBytes(config_header.include_guard_override); - var output = std.ArrayList(u8).init(gpa); - defer output.deinit(); + var aw: std.io.Writer.Allocating = .init(gpa); + defer aw.deinit(); + const bw = &aw.writer; const header_text = "This file was generated by ConfigHeader using the Zig Build System."; const c_generated_line = "/* " ++ header_text ++ " */\n"; @@ -204,7 +206,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void { switch (config_header.style) { .autoconf_undef, .autoconf, .autoconf_at => |file_source| { - try output.appendSlice(c_generated_line); + try bw.writeAll(c_generated_line); const src_path = file_source.getPath2(b, step); const contents = std.fs.cwd().readFileAlloc(arena, src_path, config_header.max_bytes) catch |err| { return step.fail("unable to read autoconf input file '{s}': {s}", .{ @@ -212,32 +214,33 @@ fn make(step: *Step, options: Step.MakeOptions) !void { }); }; switch (config_header.style) { - .autoconf_undef, .autoconf => try render_autoconf_undef(step, contents, &output, config_header.values, src_path), - .autoconf_at => try render_autoconf_at(step, contents, &output, config_header.values, src_path), + .autoconf_undef, .autoconf => try render_autoconf_undef(step, contents, bw, config_header.values, src_path), + .autoconf_at => try render_autoconf_at(step, contents, &aw, config_header.values, src_path), else => unreachable, } }, .cmake => |file_source| { - try output.appendSlice(c_generated_line); + try bw.writeAll(c_generated_line); const src_path = file_source.getPath2(b, step); const contents = std.fs.cwd().readFileAlloc(arena, src_path, config_header.max_bytes) catch |err| { return step.fail("unable to read cmake input file '{s}': {s}", .{ src_path, @errorName(err), }); }; - try render_cmake(step, contents, &output, config_header.values, src_path); + try render_cmake(step, contents, bw, config_header.values, src_path); }, .blank => { - try output.appendSlice(c_generated_line); - try render_blank(&output, config_header.values, config_header.include_path, config_header.include_guard_override); + try bw.writeAll(c_generated_line); + try render_blank(gpa, bw, config_header.values, config_header.include_path, config_header.include_guard_override); }, .nasm => { - try output.appendSlice(asm_generated_line); - try render_nasm(&output, config_header.values); + try bw.writeAll(asm_generated_line); + try render_nasm(bw, config_header.values); }, } - man.hash.addBytes(output.items); + const output = aw.getWritten(); + man.hash.addBytes(output); if (try step.cacheHit(&man)) { const digest = man.final(); @@ -256,13 +259,13 @@ fn make(step: *Step, options: Step.MakeOptions) !void { const sub_path_dirname = std.fs.path.dirname(sub_path).?; b.cache_root.handle.makePath(sub_path_dirname) catch |err| { - return step.fail("unable to make path '{}{s}': {s}", .{ + return step.fail("unable to make path '{f}{s}': {s}", .{ b.cache_root, sub_path_dirname, @errorName(err), }); }; - b.cache_root.handle.writeFile(.{ .sub_path = sub_path, .data = output.items }) catch |err| { - return step.fail("unable to write file '{}{s}': {s}", .{ + b.cache_root.handle.writeFile(.{ .sub_path = sub_path, .data = output }) catch |err| { + return step.fail("unable to write file '{f}{s}': {s}", .{ b.cache_root, sub_path, @errorName(err), }); }; @@ -274,7 +277,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void { fn render_autoconf_undef( step: *Step, contents: []const u8, - output: *std.ArrayList(u8), + bw: *Writer, values: std.StringArrayHashMap(Value), src_path: []const u8, ) !void { @@ -289,15 +292,15 @@ fn render_autoconf_undef( var line_it = std.mem.splitScalar(u8, contents, '\n'); while (line_it.next()) |line| : (line_index += 1) { if (!std.mem.startsWith(u8, line, "#")) { - try output.appendSlice(line); - try output.appendSlice("\n"); + try bw.writeAll(line); + try bw.writeByte('\n'); continue; } var it = std.mem.tokenizeAny(u8, line[1..], " \t\r"); const undef = it.next().?; if (!std.mem.eql(u8, undef, "undef")) { - try output.appendSlice(line); - try output.appendSlice("\n"); + try bw.writeAll(line); + try bw.writeByte('\n'); continue; } const name = it.next().?; @@ -309,7 +312,7 @@ fn render_autoconf_undef( continue; }; is_used.set(index); - try renderValueC(output, name, values.values()[index]); + try renderValueC(bw, name, values.values()[index]); } var unused_value_it = is_used.iterator(.{ .kind = .unset }); @@ -326,12 +329,13 @@ fn render_autoconf_undef( fn render_autoconf_at( step: *Step, contents: []const u8, - output: *std.ArrayList(u8), + aw: *std.io.Writer.Allocating, values: std.StringArrayHashMap(Value), src_path: []const u8, ) !void { const build = step.owner; const allocator = build.allocator; + const bw = &aw.writer; const used = allocator.alloc(bool, values.count()) catch @panic("OOM"); for (used) |*u| u.* = false; @@ -343,11 +347,11 @@ fn render_autoconf_at( while (line_it.next()) |line| : (line_index += 1) { const last_line = line_it.index == line_it.buffer.len; - const old_len = output.items.len; - expand_variables_autoconf_at(output, line, values, used) catch |err| switch (err) { + const old_len = aw.getWritten().len; + expand_variables_autoconf_at(bw, line, values, used) catch |err| switch (err) { error.MissingValue => { - const name = output.items[old_len..]; - defer output.shrinkRetainingCapacity(old_len); + const name = aw.getWritten()[old_len..]; + defer aw.shrinkRetainingCapacity(old_len); try step.addError("{s}:{d}: error: unspecified config header value: '{s}'", .{ src_path, line_index + 1, name, }); @@ -362,9 +366,7 @@ fn render_autoconf_at( continue; }, }; - if (!last_line) { - try output.append('\n'); - } + if (!last_line) try bw.writeByte('\n'); } for (values.unmanaged.entries.slice().items(.key), used) |name, u| { @@ -374,15 +376,13 @@ fn render_autoconf_at( } } - if (any_errors) { - return error.MakeFailed; - } + if (any_errors) return error.MakeFailed; } fn render_cmake( step: *Step, contents: []const u8, - output: *std.ArrayList(u8), + bw: *Writer, values: std.StringArrayHashMap(Value), src_path: []const u8, ) !void { @@ -417,10 +417,8 @@ fn render_cmake( defer allocator.free(line); if (!std.mem.startsWith(u8, line, "#")) { - try output.appendSlice(line); - if (!last_line) { - try output.appendSlice("\n"); - } + try bw.writeAll(line); + if (!last_line) try bw.writeByte('\n'); continue; } var it = std.mem.tokenizeAny(u8, line[1..], " \t\r"); @@ -428,10 +426,8 @@ fn render_cmake( if (!std.mem.eql(u8, cmakedefine, "cmakedefine") and !std.mem.eql(u8, cmakedefine, "cmakedefine01")) { - try output.appendSlice(line); - if (!last_line) { - try output.appendSlice("\n"); - } + try bw.writeAll(line); + if (!last_line) try bw.writeByte('\n'); continue; } @@ -502,7 +498,7 @@ fn render_cmake( value = Value{ .ident = it.rest() }; } - try renderValueC(output, name, value); + try renderValueC(bw, name, value); } if (any_errors) { @@ -511,13 +507,14 @@ fn render_cmake( } fn render_blank( - output: *std.ArrayList(u8), + gpa: std.mem.Allocator, + bw: *Writer, defines: std.StringArrayHashMap(Value), include_path: []const u8, include_guard_override: ?[]const u8, ) !void { const include_guard_name = include_guard_override orelse blk: { - const name = try output.allocator.dupe(u8, include_path); + const name = try gpa.dupe(u8, include_path); for (name) |*byte| { switch (byte.*) { 'a'...'z' => byte.* = byte.* - 'a' + 'A', @@ -527,92 +524,53 @@ fn render_blank( } break :blk name; }; + defer if (include_guard_override == null) gpa.free(include_guard_name); - try output.appendSlice("#ifndef "); - try output.appendSlice(include_guard_name); - try output.appendSlice("\n#define "); - try output.appendSlice(include_guard_name); - try output.appendSlice("\n"); + try bw.print( + \\#ifndef {[0]s} + \\#define {[0]s} + \\ + , .{include_guard_name}); const values = defines.values(); - for (defines.keys(), 0..) |name, i| { - try renderValueC(output, name, values[i]); - } + for (defines.keys(), 0..) |name, i| try renderValueC(bw, name, values[i]); - try output.appendSlice("#endif /* "); - try output.appendSlice(include_guard_name); - try output.appendSlice(" */\n"); + try bw.print( + \\#endif /* {s} */ + \\ + , .{include_guard_name}); } -fn render_nasm(output: *std.ArrayList(u8), defines: std.StringArrayHashMap(Value)) !void { - const values = defines.values(); - for (defines.keys(), 0..) |name, i| { - try renderValueNasm(output, name, values[i]); - } +fn render_nasm(bw: *Writer, defines: std.StringArrayHashMap(Value)) !void { + for (defines.keys(), defines.values()) |name, value| try renderValueNasm(bw, name, value); } -fn renderValueC(output: *std.ArrayList(u8), name: []const u8, value: Value) !void { +fn renderValueC(bw: *Writer, name: []const u8, value: Value) !void { switch (value) { - .undef => { - try output.appendSlice("/* #undef "); - try output.appendSlice(name); - try output.appendSlice(" */\n"); - }, - .defined => { - try output.appendSlice("#define "); - try output.appendSlice(name); - try output.appendSlice("\n"); - }, - .boolean => |b| { - try output.appendSlice("#define "); - try output.appendSlice(name); - try output.appendSlice(if (b) " 1\n" else " 0\n"); - }, - .int => |i| { - try output.writer().print("#define {s} {d}\n", .{ name, i }); - }, - .ident => |ident| { - try output.writer().print("#define {s} {s}\n", .{ name, ident }); - }, - .string => |string| { - // TODO: use C-specific escaping instead of zig string literals - try output.writer().print("#define {s} \"{}\"\n", .{ name, std.zig.fmtEscapes(string) }); - }, + .undef => try bw.print("/* #undef {s} */\n", .{name}), + .defined => try bw.print("#define {s}\n", .{name}), + .boolean => |b| try bw.print("#define {s} {c}\n", .{ name, @as(u8, '0') + @intFromBool(b) }), + .int => |i| try bw.print("#define {s} {d}\n", .{ name, i }), + .ident => |ident| try bw.print("#define {s} {s}\n", .{ name, ident }), + // TODO: use C-specific escaping instead of zig string literals + .string => |string| try bw.print("#define {s} \"{f}\"\n", .{ name, std.zig.fmtString(string) }), } } -fn renderValueNasm(output: *std.ArrayList(u8), name: []const u8, value: Value) !void { +fn renderValueNasm(bw: *Writer, name: []const u8, value: Value) !void { switch (value) { - .undef => { - try output.appendSlice("; %undef "); - try output.appendSlice(name); - try output.appendSlice("\n"); - }, - .defined => { - try output.appendSlice("%define "); - try output.appendSlice(name); - try output.appendSlice("\n"); - }, - .boolean => |b| { - try output.appendSlice("%define "); - try output.appendSlice(name); - try output.appendSlice(if (b) " 1\n" else " 0\n"); - }, - .int => |i| { - try output.writer().print("%define {s} {d}\n", .{ name, i }); - }, - .ident => |ident| { - try output.writer().print("%define {s} {s}\n", .{ name, ident }); - }, - .string => |string| { - // TODO: use nasm-specific escaping instead of zig string literals - try output.writer().print("%define {s} \"{}\"\n", .{ name, std.zig.fmtEscapes(string) }); - }, + .undef => try bw.print("; %undef {s}\n", .{name}), + .defined => try bw.print("%define {s}\n", .{name}), + .boolean => |b| try bw.print("%define {s} {c}\n", .{ name, @as(u8, '0') + @intFromBool(b) }), + .int => |i| try bw.print("%define {s} {d}\n", .{ name, i }), + .ident => |ident| try bw.print("%define {s} {s}\n", .{ name, ident }), + // TODO: use nasm-specific escaping instead of zig string literals + .string => |string| try bw.print("%define {s} \"{f}\"\n", .{ name, std.zig.fmtString(string) }), } } fn expand_variables_autoconf_at( - output: *std.ArrayList(u8), + bw: *Writer, contents: []const u8, values: std.StringArrayHashMap(Value), used: []bool, @@ -637,23 +595,17 @@ fn expand_variables_autoconf_at( const key = contents[curr + 1 .. close_pos]; const index = values.getIndex(key) orelse { // Report the missing key to the caller. - try output.appendSlice(key); + try bw.writeAll(key); return error.MissingValue; }; const value = values.unmanaged.entries.slice().items(.value)[index]; used[index] = true; - try output.appendSlice(contents[source_offset..curr]); + try bw.writeAll(contents[source_offset..curr]); switch (value) { .undef, .defined => {}, - .boolean => |b| { - try output.append(if (b) '1' else '0'); - }, - .int => |i| { - try output.writer().print("{d}", .{i}); - }, - .ident, .string => |s| { - try output.appendSlice(s); - }, + .boolean => |b| try bw.writeByte(@as(u8, '0') + @intFromBool(b)), + .int => |i| try bw.print("{d}", .{i}), + .ident, .string => |s| try bw.writeAll(s), } curr = close_pos; @@ -661,7 +613,7 @@ fn expand_variables_autoconf_at( } } - try output.appendSlice(contents[source_offset..]); + try bw.writeAll(contents[source_offset..]); } fn expand_variables_cmake( @@ -669,7 +621,7 @@ fn expand_variables_cmake( contents: []const u8, values: std.StringArrayHashMap(Value), ) ![]const u8 { - var result = std.ArrayList(u8).init(allocator); + var result: std.ArrayList(u8) = .init(allocator); errdefer result.deinit(); const valid_varname_chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789/_.+-"; @@ -681,7 +633,7 @@ fn expand_variables_cmake( source: usize, target: usize, }; - var var_stack = std.ArrayList(Position).init(allocator); + var var_stack: std.ArrayList(Position) = .init(allocator); defer var_stack.deinit(); loop: while (curr < contents.len) : (curr += 1) { switch (contents[curr]) { @@ -707,7 +659,7 @@ fn expand_variables_cmake( try result.append(if (b) '1' else '0'); }, .int => |i| { - try result.writer().print("{d}", .{i}); + try result.print("{d}", .{i}); }, .ident, .string => |s| { try result.appendSlice(s); @@ -764,7 +716,7 @@ fn expand_variables_cmake( try result.append(if (b) '1' else '0'); }, .int => |i| { - try result.writer().print("{d}", .{i}); + try result.print("{d}", .{i}); }, .ident, .string => |s| { try result.appendSlice(s); @@ -801,17 +753,17 @@ fn testReplaceVariablesAutoconfAt( expected: []const u8, values: std.StringArrayHashMap(Value), ) !void { - var output = std.ArrayList(u8).init(allocator); - defer output.deinit(); + var aw: std.io.Writer.Allocating = .init(allocator); + defer aw.deinit(); const used = try allocator.alloc(bool, values.count()); for (used) |*u| u.* = false; defer allocator.free(used); - try expand_variables_autoconf_at(&output, contents, values, used); + try expand_variables_autoconf_at(&aw.writer, contents, values, used); for (used) |u| if (!u) return error.UnusedValue; - try std.testing.expectEqualStrings(expected, output.items); + try std.testing.expectEqualStrings(expected, aw.getWritten()); } fn testReplaceVariablesCMake( @@ -828,7 +780,7 @@ fn testReplaceVariablesCMake( test "expand_variables_autoconf_at simple cases" { const allocator = std.testing.allocator; - var values = std.StringArrayHashMap(Value).init(allocator); + var values: std.StringArrayHashMap(Value) = .init(allocator); defer values.deinit(); // empty strings are preserved @@ -924,7 +876,7 @@ test "expand_variables_autoconf_at simple cases" { test "expand_variables_autoconf_at edge cases" { const allocator = std.testing.allocator; - var values = std.StringArrayHashMap(Value).init(allocator); + var values: std.StringArrayHashMap(Value) = .init(allocator); defer values.deinit(); // @-vars resolved only when they wrap valid characters, otherwise considered literals @@ -940,7 +892,7 @@ test "expand_variables_autoconf_at edge cases" { test "expand_variables_cmake simple cases" { const allocator = std.testing.allocator; - var values = std.StringArrayHashMap(Value).init(allocator); + var values: std.StringArrayHashMap(Value) = .init(allocator); defer values.deinit(); try values.putNoClobber("undef", .undef); @@ -1028,7 +980,7 @@ test "expand_variables_cmake simple cases" { test "expand_variables_cmake edge cases" { const allocator = std.testing.allocator; - var values = std.StringArrayHashMap(Value).init(allocator); + var values: std.StringArrayHashMap(Value) = .init(allocator); defer values.deinit(); // special symbols @@ -1089,7 +1041,7 @@ test "expand_variables_cmake edge cases" { test "expand_variables_cmake escaped characters" { const allocator = std.testing.allocator; - var values = std.StringArrayHashMap(Value).init(allocator); + var values: std.StringArrayHashMap(Value) = .init(allocator); defer values.deinit(); try values.putNoClobber("string", Value{ .string = "text" }); diff --git a/lib/std/Build/Step/InstallArtifact.zig b/lib/std/Build/Step/InstallArtifact.zig index 6a5b834cae..c203ae924b 100644 --- a/lib/std/Build/Step/InstallArtifact.zig +++ b/lib/std/Build/Step/InstallArtifact.zig @@ -164,7 +164,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void { const full_h_prefix = b.getInstallPath(h_dir, dir.dest_rel_path); var src_dir = src_dir_path.root_dir.handle.openDir(src_dir_path.subPathOrDot(), .{ .iterate = true }) catch |err| { - return step.fail("unable to open source directory '{}': {s}", .{ + return step.fail("unable to open source directory '{f}': {s}", .{ src_dir_path, @errorName(err), }); }; diff --git a/lib/std/Build/Step/InstallDir.zig b/lib/std/Build/Step/InstallDir.zig index ece1184d8f..fd8a7d113f 100644 --- a/lib/std/Build/Step/InstallDir.zig +++ b/lib/std/Build/Step/InstallDir.zig @@ -65,7 +65,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void { const src_dir_path = install_dir.options.source_dir.getPath3(b, step); const need_derived_inputs = try step.addDirectoryWatchInput(install_dir.options.source_dir); var src_dir = src_dir_path.root_dir.handle.openDir(src_dir_path.subPathOrDot(), .{ .iterate = true }) catch |err| { - return step.fail("unable to open source directory '{}': {s}", .{ + return step.fail("unable to open source directory '{f}': {s}", .{ src_dir_path, @errorName(err), }); }; diff --git a/lib/std/Build/Step/Options.zig b/lib/std/Build/Step/Options.zig index dd09c0b5c0..6f8c40b1d5 100644 --- a/lib/std/Build/Step/Options.zig +++ b/lib/std/Build/Step/Options.zig @@ -12,23 +12,23 @@ pub const base_id: Step.Id = .options; step: Step, generated_file: GeneratedFile, -contents: std.ArrayList(u8), -args: std.ArrayList(Arg), -encountered_types: std.StringHashMap(void), +contents: std.ArrayListUnmanaged(u8), +args: std.ArrayListUnmanaged(Arg), +encountered_types: std.StringHashMapUnmanaged(void), pub fn create(owner: *std.Build) *Options { const options = owner.allocator.create(Options) catch @panic("OOM"); options.* = .{ - .step = Step.init(.{ + .step = .init(.{ .id = base_id, .name = "options", .owner = owner, .makeFn = make, }), .generated_file = undefined, - .contents = std.ArrayList(u8).init(owner.allocator), - .args = std.ArrayList(Arg).init(owner.allocator), - .encountered_types = std.StringHashMap(void).init(owner.allocator), + .contents = .empty, + .args = .empty, + .encountered_types = .empty, }; options.generated_file = .{ .step = &options.step }; @@ -40,110 +40,119 @@ pub fn addOption(options: *Options, comptime T: type, name: []const u8, value: T } fn addOptionFallible(options: *Options, comptime T: type, name: []const u8, value: T) !void { - const out = options.contents.writer(); - try printType(options, out, T, value, 0, name); + try printType(options, &options.contents, T, value, 0, name); } -fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent: u8, name: ?[]const u8) !void { +fn printType( + options: *Options, + out: *std.ArrayListUnmanaged(u8), + comptime T: type, + value: T, + indent: u8, + name: ?[]const u8, +) !void { + const gpa = options.step.owner.allocator; switch (T) { []const []const u8 => { if (name) |payload| { - try out.print("pub const {}: []const []const u8 = ", .{std.zig.fmtId(payload)}); + try out.print(gpa, "pub const {f}: []const []const u8 = ", .{std.zig.fmtId(payload)}); } - try out.writeAll("&[_][]const u8{\n"); + try out.appendSlice(gpa, "&[_][]const u8{\n"); for (value) |slice| { - try out.writeByteNTimes(' ', indent); - try out.print(" \"{}\",\n", .{std.zig.fmtEscapes(slice)}); + try out.appendNTimes(gpa, ' ', indent); + try out.print(gpa, " \"{f}\",\n", .{std.zig.fmtString(slice)}); } if (name != null) { - try out.writeAll("};\n"); + try out.appendSlice(gpa, "};\n"); } else { - try out.writeAll("},\n"); + try out.appendSlice(gpa, "},\n"); } return; }, []const u8 => { if (name) |some| { - try out.print("pub const {}: []const u8 = \"{}\";", .{ std.zig.fmtId(some), std.zig.fmtEscapes(value) }); + try out.print(gpa, "pub const {f}: []const u8 = \"{f}\";", .{ + std.zig.fmtId(some), std.zig.fmtString(value), + }); } else { - try out.print("\"{}\",", .{std.zig.fmtEscapes(value)}); + try out.print(gpa, "\"{f}\",", .{std.zig.fmtString(value)}); } - return out.writeAll("\n"); + return out.appendSlice(gpa, "\n"); }, [:0]const u8 => { if (name) |some| { - try out.print("pub const {}: [:0]const u8 = \"{}\";", .{ std.zig.fmtId(some), std.zig.fmtEscapes(value) }); + try out.print(gpa, "pub const {f}: [:0]const u8 = \"{f}\";", .{ std.zig.fmtId(some), std.zig.fmtString(value) }); } else { - try out.print("\"{}\",", .{std.zig.fmtEscapes(value)}); + try out.print(gpa, "\"{f}\",", .{std.zig.fmtString(value)}); } - return out.writeAll("\n"); + return out.appendSlice(gpa, "\n"); }, ?[]const u8 => { if (name) |some| { - try out.print("pub const {}: ?[]const u8 = ", .{std.zig.fmtId(some)}); + try out.print(gpa, "pub const {f}: ?[]const u8 = ", .{std.zig.fmtId(some)}); } if (value) |payload| { - try out.print("\"{}\"", .{std.zig.fmtEscapes(payload)}); + try out.print(gpa, "\"{f}\"", .{std.zig.fmtString(payload)}); } else { - try out.writeAll("null"); + try out.appendSlice(gpa, "null"); } if (name != null) { - try out.writeAll(";\n"); + try out.appendSlice(gpa, ";\n"); } else { - try out.writeAll(",\n"); + try out.appendSlice(gpa, ",\n"); } return; }, ?[:0]const u8 => { if (name) |some| { - try out.print("pub const {}: ?[:0]const u8 = ", .{std.zig.fmtId(some)}); + try out.print(gpa, "pub const {f}: ?[:0]const u8 = ", .{std.zig.fmtId(some)}); } if (value) |payload| { - try out.print("\"{}\"", .{std.zig.fmtEscapes(payload)}); + try out.print(gpa, "\"{f}\"", .{std.zig.fmtString(payload)}); } else { - try out.writeAll("null"); + try out.appendSlice(gpa, "null"); } if (name != null) { - try out.writeAll(";\n"); + try out.appendSlice(gpa, ";\n"); } else { - try out.writeAll(",\n"); + try out.appendSlice(gpa, ",\n"); } return; }, std.SemanticVersion => { if (name) |some| { - try out.print("pub const {}: @import(\"std\").SemanticVersion = ", .{std.zig.fmtId(some)}); + try out.print(gpa, "pub const {f}: @import(\"std\").SemanticVersion = ", .{std.zig.fmtId(some)}); } - try out.writeAll(".{\n"); - try out.writeByteNTimes(' ', indent); - try out.print(" .major = {d},\n", .{value.major}); - try out.writeByteNTimes(' ', indent); - try out.print(" .minor = {d},\n", .{value.minor}); - try out.writeByteNTimes(' ', indent); - try out.print(" .patch = {d},\n", .{value.patch}); + try out.appendSlice(gpa, ".{\n"); + try out.appendNTimes(gpa, ' ', indent); + try out.print(gpa, " .major = {d},\n", .{value.major}); + try out.appendNTimes(gpa, ' ', indent); + try out.print(gpa, " .minor = {d},\n", .{value.minor}); + try out.appendNTimes(gpa, ' ', indent); + try out.print(gpa, " .patch = {d},\n", .{value.patch}); if (value.pre) |some| { - try out.writeByteNTimes(' ', indent); - try out.print(" .pre = \"{}\",\n", .{std.zig.fmtEscapes(some)}); + try out.appendNTimes(gpa, ' ', indent); + try out.print(gpa, " .pre = \"{f}\",\n", .{std.zig.fmtString(some)}); } if (value.build) |some| { - try out.writeByteNTimes(' ', indent); - try out.print(" .build = \"{}\",\n", .{std.zig.fmtEscapes(some)}); + try out.appendNTimes(gpa, ' ', indent); + try out.print(gpa, " .build = \"{f}\",\n", .{std.zig.fmtString(some)}); } if (name != null) { - try out.writeAll("};\n"); + try out.appendSlice(gpa, "};\n"); } else { - try out.writeAll("},\n"); + try out.appendSlice(gpa, "},\n"); } return; }, @@ -153,21 +162,21 @@ fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent switch (@typeInfo(T)) { .array => { if (name) |some| { - try out.print("pub const {}: {s} = ", .{ std.zig.fmtId(some), @typeName(T) }); + try out.print(gpa, "pub const {f}: {s} = ", .{ std.zig.fmtId(some), @typeName(T) }); } - try out.print("{s} {{\n", .{@typeName(T)}); + try out.print(gpa, "{s} {{\n", .{@typeName(T)}); for (value) |item| { - try out.writeByteNTimes(' ', indent + 4); + try out.appendNTimes(gpa, ' ', indent + 4); try printType(options, out, @TypeOf(item), item, indent + 4, null); } - try out.writeByteNTimes(' ', indent); - try out.writeAll("}"); + try out.appendNTimes(gpa, ' ', indent); + try out.appendSlice(gpa, "}"); if (name != null) { - try out.writeAll(";\n"); + try out.appendSlice(gpa, ";\n"); } else { - try out.writeAll(",\n"); + try out.appendSlice(gpa, ",\n"); } return; }, @@ -177,27 +186,27 @@ fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent } if (name) |some| { - try out.print("pub const {}: {s} = ", .{ std.zig.fmtId(some), @typeName(T) }); + try out.print(gpa, "pub const {f}: {s} = ", .{ std.zig.fmtId(some), @typeName(T) }); } - try out.print("&[_]{s} {{\n", .{@typeName(p.child)}); + try out.print(gpa, "&[_]{s} {{\n", .{@typeName(p.child)}); for (value) |item| { - try out.writeByteNTimes(' ', indent + 4); + try out.appendNTimes(gpa, ' ', indent + 4); try printType(options, out, @TypeOf(item), item, indent + 4, null); } - try out.writeByteNTimes(' ', indent); - try out.writeAll("}"); + try out.appendNTimes(gpa, ' ', indent); + try out.appendSlice(gpa, "}"); if (name != null) { - try out.writeAll(";\n"); + try out.appendSlice(gpa, ";\n"); } else { - try out.writeAll(",\n"); + try out.appendSlice(gpa, ",\n"); } return; }, .optional => { if (name) |some| { - try out.print("pub const {}: {s} = ", .{ std.zig.fmtId(some), @typeName(T) }); + try out.print(gpa, "pub const {f}: {s} = ", .{ std.zig.fmtId(some), @typeName(T) }); } if (value) |inner| { @@ -206,13 +215,13 @@ fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent _ = options.contents.pop(); _ = options.contents.pop(); } else { - try out.writeAll("null"); + try out.appendSlice(gpa, "null"); } if (name != null) { - try out.writeAll(";\n"); + try out.appendSlice(gpa, ";\n"); } else { - try out.writeAll(",\n"); + try out.appendSlice(gpa, ",\n"); } return; }, @@ -224,9 +233,9 @@ fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent .null, => { if (name) |some| { - try out.print("pub const {}: {s} = {any};\n", .{ std.zig.fmtId(some), @typeName(T), value }); + try out.print(gpa, "pub const {f}: {s} = {any};\n", .{ std.zig.fmtId(some), @typeName(T), value }); } else { - try out.print("{any},\n", .{value}); + try out.print(gpa, "{any},\n", .{value}); } return; }, @@ -234,10 +243,10 @@ fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent try printEnum(options, out, T, info, indent); if (name) |some| { - try out.print("pub const {}: {} = .{p_};\n", .{ + try out.print(gpa, "pub const {f}: {f} = .{f};\n", .{ std.zig.fmtId(some), std.zig.fmtId(@typeName(T)), - std.zig.fmtId(@tagName(value)), + std.zig.fmtIdFlags(@tagName(value), .{ .allow_underscore = true, .allow_primitive = true }), }); } return; @@ -246,7 +255,7 @@ fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent try printStruct(options, out, T, info, indent); if (name) |some| { - try out.print("pub const {}: {} = ", .{ + try out.print(gpa, "pub const {f}: {f} = ", .{ std.zig.fmtId(some), std.zig.fmtId(@typeName(T)), }); @@ -258,7 +267,7 @@ fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent } } -fn printUserDefinedType(options: *Options, out: anytype, comptime T: type, indent: u8) !void { +fn printUserDefinedType(options: *Options, out: *std.ArrayListUnmanaged(u8), comptime T: type, indent: u8) !void { switch (@typeInfo(T)) { .@"enum" => |info| { return try printEnum(options, out, T, info, indent); @@ -270,94 +279,119 @@ fn printUserDefinedType(options: *Options, out: anytype, comptime T: type, inden } } -fn printEnum(options: *Options, out: anytype, comptime T: type, comptime val: std.builtin.Type.Enum, indent: u8) !void { - const gop = try options.encountered_types.getOrPut(@typeName(T)); +fn printEnum( + options: *Options, + out: *std.ArrayListUnmanaged(u8), + comptime T: type, + comptime val: std.builtin.Type.Enum, + indent: u8, +) !void { + const gpa = options.step.owner.allocator; + const gop = try options.encountered_types.getOrPut(gpa, @typeName(T)); if (gop.found_existing) return; - try out.writeByteNTimes(' ', indent); - try out.print("pub const {} = enum ({s}) {{\n", .{ std.zig.fmtId(@typeName(T)), @typeName(val.tag_type) }); + try out.appendNTimes(gpa, ' ', indent); + try out.print(gpa, "pub const {f} = enum ({s}) {{\n", .{ std.zig.fmtId(@typeName(T)), @typeName(val.tag_type) }); inline for (val.fields) |field| { - try out.writeByteNTimes(' ', indent); - try out.print(" {p} = {d},\n", .{ std.zig.fmtId(field.name), field.value }); + try out.appendNTimes(gpa, ' ', indent); + try out.print(gpa, " {f} = {d},\n", .{ + std.zig.fmtIdFlags(field.name, .{ .allow_primitive = true }), field.value, + }); } if (!val.is_exhaustive) { - try out.writeByteNTimes(' ', indent); - try out.writeAll(" _,\n"); + try out.appendNTimes(gpa, ' ', indent); + try out.appendSlice(gpa, " _,\n"); } - try out.writeByteNTimes(' ', indent); - try out.writeAll("};\n"); + try out.appendNTimes(gpa, ' ', indent); + try out.appendSlice(gpa, "};\n"); } -fn printStruct(options: *Options, out: anytype, comptime T: type, comptime val: std.builtin.Type.Struct, indent: u8) !void { - const gop = try options.encountered_types.getOrPut(@typeName(T)); +fn printStruct(options: *Options, out: *std.ArrayListUnmanaged(u8), comptime T: type, comptime val: std.builtin.Type.Struct, indent: u8) !void { + const gpa = options.step.owner.allocator; + const gop = try options.encountered_types.getOrPut(gpa, @typeName(T)); if (gop.found_existing) return; - try out.writeByteNTimes(' ', indent); - try out.print("pub const {} = ", .{std.zig.fmtId(@typeName(T))}); + try out.appendNTimes(gpa, ' ', indent); + try out.print(gpa, "pub const {f} = ", .{std.zig.fmtId(@typeName(T))}); switch (val.layout) { - .@"extern" => try out.writeAll("extern struct"), - .@"packed" => try out.writeAll("packed struct"), - else => try out.writeAll("struct"), + .@"extern" => try out.appendSlice(gpa, "extern struct"), + .@"packed" => try out.appendSlice(gpa, "packed struct"), + else => try out.appendSlice(gpa, "struct"), } - try out.writeAll(" {\n"); + try out.appendSlice(gpa, " {\n"); inline for (val.fields) |field| { - try out.writeByteNTimes(' ', indent); + try out.appendNTimes(gpa, ' ', indent); const type_name = @typeName(field.type); // If the type name doesn't contains a '.' the type is from zig builtins. if (std.mem.containsAtLeast(u8, type_name, 1, ".")) { - try out.print(" {p_}: {}", .{ std.zig.fmtId(field.name), std.zig.fmtId(type_name) }); + try out.print(gpa, " {f}: {f}", .{ + std.zig.fmtIdFlags(field.name, .{ .allow_underscore = true, .allow_primitive = true }), + std.zig.fmtId(type_name), + }); } else { - try out.print(" {p_}: {s}", .{ std.zig.fmtId(field.name), type_name }); + try out.print(gpa, " {f}: {s}", .{ + std.zig.fmtIdFlags(field.name, .{ .allow_underscore = true, .allow_primitive = true }), + type_name, + }); } if (field.defaultValue()) |default_value| { - try out.writeAll(" = "); + try out.appendSlice(gpa, " = "); switch (@typeInfo(@TypeOf(default_value))) { - .@"enum" => try out.print(".{s},\n", .{@tagName(default_value)}), + .@"enum" => try out.print(gpa, ".{s},\n", .{@tagName(default_value)}), .@"struct" => |info| { try printStructValue(options, out, info, default_value, indent + 4); }, else => try printType(options, out, @TypeOf(default_value), default_value, indent, null), } } else { - try out.writeAll(",\n"); + try out.appendSlice(gpa, ",\n"); } } // TODO: write declarations - try out.writeByteNTimes(' ', indent); - try out.writeAll("};\n"); + try out.appendNTimes(gpa, ' ', indent); + try out.appendSlice(gpa, "};\n"); inline for (val.fields) |field| { try printUserDefinedType(options, out, field.type, 0); } } -fn printStructValue(options: *Options, out: anytype, comptime struct_val: std.builtin.Type.Struct, val: anytype, indent: u8) !void { - try out.writeAll(".{\n"); +fn printStructValue( + options: *Options, + out: *std.ArrayListUnmanaged(u8), + comptime struct_val: std.builtin.Type.Struct, + val: anytype, + indent: u8, +) !void { + const gpa = options.step.owner.allocator; + try out.appendSlice(gpa, ".{\n"); if (struct_val.is_tuple) { inline for (struct_val.fields) |field| { - try out.writeByteNTimes(' ', indent); + try out.appendNTimes(gpa, ' ', indent); try printType(options, out, @TypeOf(@field(val, field.name)), @field(val, field.name), indent, null); } } else { inline for (struct_val.fields) |field| { - try out.writeByteNTimes(' ', indent); - try out.print(" .{p_} = ", .{std.zig.fmtId(field.name)}); + try out.appendNTimes(gpa, ' ', indent); + try out.print(gpa, " .{f} = ", .{ + std.zig.fmtIdFlags(field.name, .{ .allow_primitive = true, .allow_underscore = true }), + }); const field_name = @field(val, field.name); switch (@typeInfo(@TypeOf(field_name))) { - .@"enum" => try out.print(".{s},\n", .{@tagName(field_name)}), + .@"enum" => try out.print(gpa, ".{s},\n", .{@tagName(field_name)}), .@"struct" => |struct_info| { try printStructValue(options, out, struct_info, field_name, indent + 4); }, @@ -367,10 +401,10 @@ fn printStructValue(options: *Options, out: anytype, comptime struct_val: std.bu } if (indent == 0) { - try out.writeAll("};\n"); + try out.appendSlice(gpa, "};\n"); } else { - try out.writeByteNTimes(' ', indent); - try out.writeAll("},\n"); + try out.appendNTimes(gpa, ' ', indent); + try out.appendSlice(gpa, "},\n"); } } @@ -381,7 +415,8 @@ pub fn addOptionPath( name: []const u8, path: LazyPath, ) void { - options.args.append(.{ + const arena = options.step.owner.allocator; + options.args.append(arena, .{ .name = options.step.owner.dupe(name), .path = path.dupe(options.step.owner), }) catch @panic("OOM"); @@ -440,7 +475,7 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void { error.FileNotFound => { const sub_dirname = fs.path.dirname(sub_path).?; b.cache_root.handle.makePath(sub_dirname) catch |e| { - return step.fail("unable to make path '{}{s}': {s}", .{ + return step.fail("unable to make path '{f}{s}': {s}", .{ b.cache_root, sub_dirname, @errorName(e), }); }; @@ -452,13 +487,13 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void { const tmp_sub_path_dirname = fs.path.dirname(tmp_sub_path).?; b.cache_root.handle.makePath(tmp_sub_path_dirname) catch |err| { - return step.fail("unable to make temporary directory '{}{s}': {s}", .{ + return step.fail("unable to make temporary directory '{f}{s}': {s}", .{ b.cache_root, tmp_sub_path_dirname, @errorName(err), }); }; b.cache_root.handle.writeFile(.{ .sub_path = tmp_sub_path, .data = options.contents.items }) catch |err| { - return step.fail("unable to write options to '{}{s}': {s}", .{ + return step.fail("unable to write options to '{f}{s}': {s}", .{ b.cache_root, tmp_sub_path, @errorName(err), }); }; @@ -467,7 +502,7 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void { error.PathAlreadyExists => { // Other process beat us to it. Clean up the temp file. b.cache_root.handle.deleteFile(tmp_sub_path) catch |e| { - try step.addError("warning: unable to delete temp file '{}{s}': {s}", .{ + try step.addError("warning: unable to delete temp file '{f}{s}': {s}", .{ b.cache_root, tmp_sub_path, @errorName(e), }); }; @@ -475,7 +510,7 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void { return; }, else => { - return step.fail("unable to rename options from '{}{s}' to '{}{s}': {s}", .{ + return step.fail("unable to rename options from '{f}{s}' to '{f}{s}': {s}", .{ b.cache_root, tmp_sub_path, b.cache_root, sub_path, @errorName(err), @@ -483,7 +518,7 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void { }, }; }, - else => |e| return step.fail("unable to access options file '{}{s}': {s}", .{ + else => |e| return step.fail("unable to access options file '{f}{s}': {s}", .{ b.cache_root, sub_path, @errorName(e), }), } @@ -643,5 +678,5 @@ test Options { \\ , options.contents.items); - _ = try std.zig.Ast.parse(arena.allocator(), try options.contents.toOwnedSliceSentinel(0), .zig); + _ = try std.zig.Ast.parse(arena.allocator(), try options.contents.toOwnedSliceSentinel(arena.allocator(), 0), .zig); } diff --git a/lib/std/Build/Step/Run.zig b/lib/std/Build/Step/Run.zig index 4b8dabc14e..a835e137cf 100644 --- a/lib/std/Build/Step/Run.zig +++ b/lib/std/Build/Step/Run.zig @@ -832,7 +832,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void { else => unreachable, }; b.cache_root.handle.makePath(output_sub_dir_path) catch |err| { - return step.fail("unable to make path '{}{s}': {s}", .{ + return step.fail("unable to make path '{f}{s}': {s}", .{ b.cache_root, output_sub_dir_path, @errorName(err), }); }; @@ -864,7 +864,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void { else => unreachable, }; b.cache_root.handle.makePath(output_sub_dir_path) catch |err| { - return step.fail("unable to make path '{}{s}': {s}", .{ + return step.fail("unable to make path '{f}{s}': {s}", .{ b.cache_root, output_sub_dir_path, @errorName(err), }); }; @@ -903,21 +903,21 @@ fn make(step: *Step, options: Step.MakeOptions) !void { b.cache_root.handle.rename(tmp_dir_path, o_sub_path) catch |err| { if (err == error.PathAlreadyExists) { b.cache_root.handle.deleteTree(o_sub_path) catch |del_err| { - return step.fail("unable to remove dir '{}'{s}: {s}", .{ + return step.fail("unable to remove dir '{f}'{s}: {s}", .{ b.cache_root, tmp_dir_path, @errorName(del_err), }); }; b.cache_root.handle.rename(tmp_dir_path, o_sub_path) catch |retry_err| { - return step.fail("unable to rename dir '{}{s}' to '{}{s}': {s}", .{ + return step.fail("unable to rename dir '{f}{s}' to '{f}{s}': {s}", .{ b.cache_root, tmp_dir_path, b.cache_root, o_sub_path, @errorName(retry_err), }); }; } else { - return step.fail("unable to rename dir '{}{s}' to '{}{s}': {s}", .{ + return step.fail("unable to rename dir '{f}{s}' to '{f}{s}': {s}", .{ b.cache_root, tmp_dir_path, b.cache_root, o_sub_path, @errorName(err), @@ -964,7 +964,7 @@ pub fn rerunInFuzzMode( .artifact => |pa| { const artifact = pa.artifact; const file_path: []const u8 = p: { - if (artifact == run.producer.?) break :p b.fmt("{}", .{run.rebuilt_executable.?}); + if (artifact == run.producer.?) break :p b.fmt("{f}", .{run.rebuilt_executable.?}); break :p artifact.installed_path orelse artifact.generated_bin.?.path.?; }; try argv_list.append(arena, b.fmt("{s}{s}", .{ @@ -1011,24 +1011,17 @@ fn populateGeneratedPaths( } } -fn formatTerm( - term: ?std.process.Child.Term, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = fmt; - _ = options; +fn formatTerm(term: ?std.process.Child.Term, w: *std.io.Writer) std.io.Writer.Error!void { if (term) |t| switch (t) { - .Exited => |code| try writer.print("exited with code {}", .{code}), - .Signal => |sig| try writer.print("terminated with signal {}", .{sig}), - .Stopped => |sig| try writer.print("stopped with signal {}", .{sig}), - .Unknown => |code| try writer.print("terminated for unknown reason with code {}", .{code}), + .Exited => |code| try w.print("exited with code {d}", .{code}), + .Signal => |sig| try w.print("terminated with signal {d}", .{sig}), + .Stopped => |sig| try w.print("stopped with signal {d}", .{sig}), + .Unknown => |code| try w.print("terminated for unknown reason with code {d}", .{code}), } else { - try writer.writeAll("exited with any code"); + try w.writeAll("exited with any code"); } } -fn fmtTerm(term: ?std.process.Child.Term) std.fmt.Formatter(formatTerm) { +fn fmtTerm(term: ?std.process.Child.Term) std.fmt.Formatter(?std.process.Child.Term, formatTerm) { return .{ .data = term }; } @@ -1262,12 +1255,12 @@ fn runCommand( const sub_path = b.pathJoin(&output_components); const sub_path_dirname = fs.path.dirname(sub_path).?; b.cache_root.handle.makePath(sub_path_dirname) catch |err| { - return step.fail("unable to make path '{}{s}': {s}", .{ + return step.fail("unable to make path '{f}{s}': {s}", .{ b.cache_root, sub_path_dirname, @errorName(err), }); }; b.cache_root.handle.writeFile(.{ .sub_path = sub_path, .data = stream.bytes.? }) catch |err| { - return step.fail("unable to write file '{}{s}': {s}", .{ + return step.fail("unable to write file '{f}{s}': {s}", .{ b.cache_root, sub_path, @errorName(err), }); }; @@ -1346,7 +1339,7 @@ fn runCommand( }, .expect_term => |expected_term| { if (!termMatches(expected_term, result.term)) { - return step.fail("the following command {} (expected {}):\n{s}", .{ + return step.fail("the following command {f} (expected {f}):\n{s}", .{ fmtTerm(result.term), fmtTerm(expected_term), try Step.allocPrintCmd(arena, cwd, final_argv), @@ -1366,7 +1359,7 @@ fn runCommand( }; const expected_term: std.process.Child.Term = .{ .Exited = 0 }; if (!termMatches(expected_term, result.term)) { - return step.fail("{s}the following command {} (expected {}):\n{s}", .{ + return step.fail("{s}the following command {f} (expected {f}):\n{s}", .{ prefix, fmtTerm(result.term), fmtTerm(expected_term), @@ -1797,10 +1790,10 @@ fn evalGeneric(run: *Run, child: *std.process.Child) !StdIoResult { stdout_bytes = try poller.fifo(.stdout).toOwnedSlice(); stderr_bytes = try poller.fifo(.stderr).toOwnedSlice(); } else { - stdout_bytes = try stdout.reader().readAllAlloc(arena, run.max_stdio_size); + stdout_bytes = try stdout.deprecatedReader().readAllAlloc(arena, run.max_stdio_size); } } else if (child.stderr) |stderr| { - stderr_bytes = try stderr.reader().readAllAlloc(arena, run.max_stdio_size); + stderr_bytes = try stderr.deprecatedReader().readAllAlloc(arena, run.max_stdio_size); } if (stderr_bytes) |bytes| if (bytes.len > 0) { diff --git a/lib/std/Build/Step/UpdateSourceFiles.zig b/lib/std/Build/Step/UpdateSourceFiles.zig index d4a9565083..674e2a01c6 100644 --- a/lib/std/Build/Step/UpdateSourceFiles.zig +++ b/lib/std/Build/Step/UpdateSourceFiles.zig @@ -76,7 +76,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void { for (usf.output_source_files.items) |output_source_file| { if (fs.path.dirname(output_source_file.sub_path)) |dirname| { b.build_root.handle.makePath(dirname) catch |err| { - return step.fail("unable to make path '{}{s}': {s}", .{ + return step.fail("unable to make path '{f}{s}': {s}", .{ b.build_root, dirname, @errorName(err), }); }; @@ -84,7 +84,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void { switch (output_source_file.contents) { .bytes => |bytes| { b.build_root.handle.writeFile(.{ .sub_path = output_source_file.sub_path, .data = bytes }) catch |err| { - return step.fail("unable to write file '{}{s}': {s}", .{ + return step.fail("unable to write file '{f}{s}': {s}", .{ b.build_root, output_source_file.sub_path, @errorName(err), }); }; @@ -101,7 +101,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void { output_source_file.sub_path, .{}, ) catch |err| { - return step.fail("unable to update file from '{s}' to '{}{s}': {s}", .{ + return step.fail("unable to update file from '{s}' to '{f}{s}': {s}", .{ source_path, b.build_root, output_source_file.sub_path, @errorName(err), }); }; diff --git a/lib/std/Build/Step/WriteFile.zig b/lib/std/Build/Step/WriteFile.zig index 29fba1c871..b1cfb3b42a 100644 --- a/lib/std/Build/Step/WriteFile.zig +++ b/lib/std/Build/Step/WriteFile.zig @@ -217,7 +217,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void { const src_dir_path = dir.source.getPath3(b, step); var src_dir = src_dir_path.root_dir.handle.openDir(src_dir_path.subPathOrDot(), .{ .iterate = true }) catch |err| { - return step.fail("unable to open source directory '{}': {s}", .{ + return step.fail("unable to open source directory '{f}': {s}", .{ src_dir_path, @errorName(err), }); }; @@ -258,7 +258,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void { write_file.generated_directory.path = try b.cache_root.join(arena, &.{ "o", &digest }); var cache_dir = b.cache_root.handle.makeOpenPath(cache_path, .{}) catch |err| { - return step.fail("unable to make path '{}{s}': {s}", .{ + return step.fail("unable to make path '{f}{s}': {s}", .{ b.cache_root, cache_path, @errorName(err), }); }; @@ -269,7 +269,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void { for (write_file.files.items) |file| { if (fs.path.dirname(file.sub_path)) |dirname| { cache_dir.makePath(dirname) catch |err| { - return step.fail("unable to make path '{}{s}{c}{s}': {s}", .{ + return step.fail("unable to make path '{f}{s}{c}{s}': {s}", .{ b.cache_root, cache_path, fs.path.sep, dirname, @errorName(err), }); }; @@ -277,7 +277,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void { switch (file.contents) { .bytes => |bytes| { cache_dir.writeFile(.{ .sub_path = file.sub_path, .data = bytes }) catch |err| { - return step.fail("unable to write file '{}{s}{c}{s}': {s}", .{ + return step.fail("unable to write file '{f}{s}{c}{s}': {s}", .{ b.cache_root, cache_path, fs.path.sep, file.sub_path, @errorName(err), }); }; @@ -291,7 +291,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void { file.sub_path, .{}, ) catch |err| { - return step.fail("unable to update file from '{s}' to '{}{s}{c}{s}': {s}", .{ + return step.fail("unable to update file from '{s}' to '{f}{s}{c}{s}': {s}", .{ source_path, b.cache_root, cache_path, @@ -315,7 +315,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void { if (dest_dirname.len != 0) { cache_dir.makePath(dest_dirname) catch |err| { - return step.fail("unable to make path '{}{s}{c}{s}': {s}", .{ + return step.fail("unable to make path '{f}{s}{c}{s}': {s}", .{ b.cache_root, cache_path, fs.path.sep, dest_dirname, @errorName(err), }); }; @@ -338,7 +338,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void { dest_path, .{}, ) catch |err| { - return step.fail("unable to update file from '{}' to '{}{s}{c}{s}': {s}", .{ + return step.fail("unable to update file from '{f}' to '{f}{s}{c}{s}': {s}", .{ src_entry_path, b.cache_root, cache_path, fs.path.sep, dest_path, @errorName(err), }); }; diff --git a/lib/std/Build/Watch.zig b/lib/std/Build/Watch.zig index c48f6a0532..d6dec68806 100644 --- a/lib/std/Build/Watch.zig +++ b/lib/std/Build/Watch.zig @@ -211,7 +211,7 @@ const Os = switch (builtin.os.tag) { .ADD = true, .ONLYDIR = true, }, fan_mask, path.root_dir.handle.fd, path.subPathOrDot()) catch |err| { - fatal("unable to watch {}: {s}", .{ path, @errorName(err) }); + fatal("unable to watch {f}: {s}", .{ path, @errorName(err) }); }; } break :rs &dh_gop.value_ptr.reaction_set; @@ -265,7 +265,7 @@ const Os = switch (builtin.os.tag) { .ONLYDIR = true, }, fan_mask, path.root_dir.handle.fd, path.subPathOrDot()) catch |err| switch (err) { error.FileNotFound => {}, // Expected, harmless. - else => |e| std.log.warn("unable to unwatch '{}': {s}", .{ path, @errorName(e) }), + else => |e| std.log.warn("unable to unwatch '{f}': {s}", .{ path, @errorName(e) }), }; w.dir_table.swapRemoveAt(i); @@ -659,7 +659,7 @@ const Os = switch (builtin.os.tag) { path.root_dir.handle.fd else posix.openat(path.root_dir.handle.fd, path.sub_path, dir_open_flags, 0) catch |err| { - fatal("failed to open directory {}: {s}", .{ path, @errorName(err) }); + fatal("failed to open directory {f}: {s}", .{ path, @errorName(err) }); }; // Empirically the dir has to stay open or else no events are triggered. errdefer if (!skip_open_dir) posix.close(dir_fd); diff --git a/lib/std/Progress.zig b/lib/std/Progress.zig index 030f3f0a28..c9a866b0c8 100644 --- a/lib/std/Progress.zig +++ b/lib/std/Progress.zig @@ -9,6 +9,7 @@ const Progress = @This(); const posix = std.posix; const is_big_endian = builtin.cpu.arch.endian() == .big; const is_windows = builtin.os.tag == .windows; +const Writer = std.io.Writer; /// `null` if the current node (and its children) should /// not print on update() @@ -451,7 +452,7 @@ pub fn start(options: Options) Node { if (options.disable_printing) { return Node.none; } - const stderr = std.io.getStdErr(); + const stderr: std.fs.File = .stderr(); global_progress.terminal = stderr; if (stderr.getOrEnableAnsiEscapeSupport()) { global_progress.terminal_mode = .ansi_escape_codes; @@ -606,6 +607,36 @@ pub fn unlockStdErr() void { stderr_mutex.unlock(); } +/// Protected by `stderr_mutex`. +const stderr_writer: *Writer = &stderr_file_writer.interface; +/// Protected by `stderr_mutex`. +var stderr_file_writer: std.fs.File.Writer = .{ + .interface = std.fs.File.Writer.initInterface(&.{}), + .file = if (is_windows) undefined else .stderr(), + .mode = .streaming, +}; + +/// Allows the caller to freely write to the returned `Writer`, +/// initialized with `buffer`, until `unlockStderrWriter` is called. +/// +/// During the lock, any `std.Progress` information is cleared from the terminal. +/// +/// The lock is recursive; the same thread may hold the lock multiple times. +pub fn lockStderrWriter(buffer: []u8) *Writer { + stderr_mutex.lock(); + clearWrittenWithEscapeCodes() catch {}; + if (is_windows) stderr_file_writer.file = .stderr(); + stderr_writer.flush() catch {}; + stderr_writer.buffer = buffer; + return stderr_writer; +} + +pub fn unlockStderrWriter() void { + stderr_writer.flush() catch {}; + stderr_writer.buffer = &.{}; + stderr_mutex.unlock(); +} + fn ipcThreadRun(fd: posix.fd_t) anyerror!void { // Store this data in the thread so that it does not need to be part of the // linker data of the main executable. diff --git a/lib/std/Random/benchmark.zig b/lib/std/Random/benchmark.zig index ad76742f22..3141be3c3c 100644 --- a/lib/std/Random/benchmark.zig +++ b/lib/std/Random/benchmark.zig @@ -122,7 +122,7 @@ fn mode(comptime x: comptime_int) comptime_int { } pub fn main() !void { - const stdout = std.io.getStdOut().writer(); + const stdout = std.fs.File.stdout().deprecatedWriter(); var buffer: [1024]u8 = undefined; var fixed = std.heap.FixedBufferAllocator.init(buffer[0..]); diff --git a/lib/std/SemanticVersion.zig b/lib/std/SemanticVersion.zig index 7cb3888e54..52b5908693 100644 --- a/lib/std/SemanticVersion.zig +++ b/lib/std/SemanticVersion.zig @@ -150,17 +150,10 @@ fn parseNum(text: []const u8) error{ InvalidVersion, Overflow }!usize { }; } -pub fn format( - self: Version, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - out_stream: anytype, -) !void { - _ = options; - if (fmt.len != 0) std.fmt.invalidFmtError(fmt, self); - try std.fmt.format(out_stream, "{d}.{d}.{d}", .{ self.major, self.minor, self.patch }); - if (self.pre) |pre| try std.fmt.format(out_stream, "-{s}", .{pre}); - if (self.build) |build| try std.fmt.format(out_stream, "+{s}", .{build}); +pub fn format(self: Version, w: *std.io.Writer) std.io.Writer.Error!void { + try w.print("{d}.{d}.{d}", .{ self.major, self.minor, self.patch }); + if (self.pre) |pre| try w.print("-{s}", .{pre}); + if (self.build) |build| try w.print("+{s}", .{build}); } const expect = std.testing.expect; @@ -202,7 +195,7 @@ test format { "1.0.0+0.build.1-rc.10000aaa-kk-0.1", "5.4.0-1018-raspi", "5.7.123", - }) |valid| try std.testing.expectFmt(valid, "{}", .{try parse(valid)}); + }) |valid| try std.testing.expectFmt(valid, "{f}", .{try parse(valid)}); // Invalid version strings should be rejected. for ([_][]const u8{ @@ -269,12 +262,12 @@ test format { // Valid version string that may overflow. const big_valid = "99999999999999999999999.999999999999999999.99999999999999999"; if (parse(big_valid)) |ver| { - try std.testing.expectFmt(big_valid, "{}", .{ver}); + try std.testing.expectFmt(big_valid, "{f}", .{ver}); } else |err| try expect(err == error.Overflow); // Invalid version string that may overflow. const big_invalid = "99999999999999999999999.999999999999999999.99999999999999999----RC-SNAPSHOT.12.09.1--------------------------------..12"; - if (parse(big_invalid)) |ver| std.debug.panic("expected error, found {}", .{ver}) else |_| {} + if (parse(big_invalid)) |ver| std.debug.panic("expected error, found {f}", .{ver}) else |_| {} } test "precedence" { diff --git a/lib/std/Target.zig b/lib/std/Target.zig index c3b37abb7e..91deb9ddc1 100644 --- a/lib/std/Target.zig +++ b/lib/std/Target.zig @@ -301,29 +301,13 @@ pub const Os = struct { /// This function is defined to serialize a Zig source code representation of this /// type, that, when parsed, will deserialize into the same data. - pub fn format( - ver: WindowsVersion, - comptime fmt_str: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - const maybe_name = std.enums.tagName(WindowsVersion, ver); - if (comptime std.mem.eql(u8, fmt_str, "s")) { - if (maybe_name) |name| - try writer.print(".{s}", .{name}) - else - try writer.print(".{d}", .{@intFromEnum(ver)}); - } else if (comptime std.mem.eql(u8, fmt_str, "c")) { - if (maybe_name) |name| - try writer.print(".{s}", .{name}) - else - try writer.print("@enumFromInt(0x{X:0>8})", .{@intFromEnum(ver)}); - } else if (fmt_str.len == 0) { - if (maybe_name) |name| - try writer.print("WindowsVersion.{s}", .{name}) - else - try writer.print("WindowsVersion(0x{X:0>8})", .{@intFromEnum(ver)}); - } else std.fmt.invalidFmtError(fmt_str, ver); + pub fn format(wv: WindowsVersion, w: *std.io.Writer) std.io.Writer.Error!void { + if (std.enums.tagName(WindowsVersion, wv)) |name| { + var vecs: [2][]const u8 = .{ ".", name }; + return w.writeVecAll(&vecs); + } else { + return w.print("@enumFromInt(0x{X:0>8})", .{wv}); + } } }; diff --git a/lib/std/Target/Query.zig b/lib/std/Target/Query.zig index e453b70e5c..2d3b0f4436 100644 --- a/lib/std/Target/Query.zig +++ b/lib/std/Target/Query.zig @@ -394,25 +394,24 @@ pub fn canDetectLibC(self: Query) bool { /// Formats a version with the patch component omitted if it is zero, /// unlike SemanticVersion.format which formats all its version components regardless. -fn formatVersion(version: SemanticVersion, writer: anytype) !void { +fn formatVersion(version: SemanticVersion, gpa: Allocator, list: *std.ArrayListUnmanaged(u8)) !void { if (version.patch == 0) { - try writer.print("{d}.{d}", .{ version.major, version.minor }); + try list.print(gpa, "{d}.{d}", .{ version.major, version.minor }); } else { - try writer.print("{d}.{d}.{d}", .{ version.major, version.minor, version.patch }); + try list.print(gpa, "{d}.{d}.{d}", .{ version.major, version.minor, version.patch }); } } -pub fn zigTriple(self: Query, allocator: Allocator) Allocator.Error![]u8 { - if (self.isNativeTriple()) - return allocator.dupe(u8, "native"); +pub fn zigTriple(self: Query, gpa: Allocator) Allocator.Error![]u8 { + if (self.isNativeTriple()) return gpa.dupe(u8, "native"); const arch_name = if (self.cpu_arch) |arch| @tagName(arch) else "native"; const os_name = if (self.os_tag) |os_tag| @tagName(os_tag) else "native"; - var result = std.ArrayList(u8).init(allocator); - defer result.deinit(); + var result: std.ArrayListUnmanaged(u8) = .empty; + defer result.deinit(gpa); - try result.writer().print("{s}-{s}", .{ arch_name, os_name }); + try result.print(gpa, "{s}-{s}", .{ arch_name, os_name }); // The zig target syntax does not allow specifying a max os version with no min, so // if either are present, we need the min. @@ -420,11 +419,11 @@ pub fn zigTriple(self: Query, allocator: Allocator) Allocator.Error![]u8 { switch (min) { .none => {}, .semver => |v| { - try result.writer().writeAll("."); - try formatVersion(v, result.writer()); + try result.appendSlice(gpa, "."); + try formatVersion(v, gpa, &result); }, .windows => |v| { - try result.writer().print("{s}", .{v}); + try result.print(gpa, "{d}", .{v}); }, } } @@ -432,39 +431,39 @@ pub fn zigTriple(self: Query, allocator: Allocator) Allocator.Error![]u8 { switch (max) { .none => {}, .semver => |v| { - try result.writer().writeAll("..."); - try formatVersion(v, result.writer()); + try result.appendSlice(gpa, "..."); + try formatVersion(v, gpa, &result); }, .windows => |v| { // This is counting on a custom format() function defined on `WindowsVersion` // to add a prefix '.' and make there be a total of three dots. - try result.writer().print("..{s}", .{v}); + try result.print(gpa, "..{d}", .{v}); }, } } if (self.glibc_version) |v| { const name = if (self.abi) |abi| @tagName(abi) else "gnu"; - try result.ensureUnusedCapacity(name.len + 2); + try result.ensureUnusedCapacity(gpa, name.len + 2); result.appendAssumeCapacity('-'); result.appendSliceAssumeCapacity(name); result.appendAssumeCapacity('.'); - try formatVersion(v, result.writer()); + try formatVersion(v, gpa, &result); } else if (self.android_api_level) |lvl| { const name = if (self.abi) |abi| @tagName(abi) else "android"; - try result.ensureUnusedCapacity(name.len + 2); + try result.ensureUnusedCapacity(gpa, name.len + 2); result.appendAssumeCapacity('-'); result.appendSliceAssumeCapacity(name); result.appendAssumeCapacity('.'); - try result.writer().print("{d}", .{lvl}); + try result.print(gpa, "{d}", .{lvl}); } else if (self.abi) |abi| { const name = @tagName(abi); - try result.ensureUnusedCapacity(name.len + 1); + try result.ensureUnusedCapacity(gpa, name.len + 1); result.appendAssumeCapacity('-'); result.appendSliceAssumeCapacity(name); } - return result.toOwnedSlice(); + return result.toOwnedSlice(gpa); } /// Renders the query into a textual representation that can be parsed via the diff --git a/lib/std/Thread.zig b/lib/std/Thread.zig index bb46bd3f24..fd2111da71 100644 --- a/lib/std/Thread.zig +++ b/lib/std/Thread.zig @@ -167,7 +167,7 @@ pub fn setName(self: Thread, name: []const u8) SetNameError!void { const file = try std.fs.cwd().openFile(path, .{ .mode = .write_only }); defer file.close(); - try file.writer().writeAll(name); + try file.deprecatedWriter().writeAll(name); return; }, .windows => { @@ -281,7 +281,7 @@ pub fn getName(self: Thread, buffer_ptr: *[max_name_len:0]u8) GetNameError!?[]co const file = try std.fs.cwd().openFile(path, .{}); defer file.close(); - const data_len = try file.reader().readAll(buffer_ptr[0 .. max_name_len + 1]); + const data_len = try file.deprecatedReader().readAll(buffer_ptr[0 .. max_name_len + 1]); return if (data_len >= 1) buffer[0 .. data_len - 1] else null; }, @@ -1163,7 +1163,7 @@ const LinuxThreadImpl = struct { fn getCurrentId() Id { return tls_thread_id orelse { - const tid = @as(u32, @bitCast(linux.gettid())); + const tid: u32 = @bitCast(linux.gettid()); tls_thread_id = tid; return tid; }; diff --git a/lib/std/Uri.zig b/lib/std/Uri.zig index ee0c602125..19af1512c2 100644 --- a/lib/std/Uri.zig +++ b/lib/std/Uri.zig @@ -1,6 +1,10 @@ //! Uniform Resource Identifier (URI) parsing roughly adhering to . //! Does not do perfect grammar and character class checking, but should be robust against URIs in the wild. +const std = @import("std.zig"); +const testing = std.testing; +const Uri = @This(); + scheme: []const u8, user: ?Component = null, password: ?Component = null, @@ -34,27 +38,15 @@ pub const Component = union(enum) { return switch (component) { .raw => |raw| raw, .percent_encoded => |percent_encoded| if (std.mem.indexOfScalar(u8, percent_encoded, '%')) |_| - try std.fmt.allocPrint(arena, "{raw}", .{component}) + try std.fmt.allocPrint(arena, "{f}", .{std.fmt.alt(component, .formatRaw)}) else percent_encoded, }; } - pub fn format( - component: Component, - comptime fmt_str: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - if (fmt_str.len == 0) { - try writer.print("std.Uri.Component{{ .{s} = \"{}\" }}", .{ - @tagName(component), - std.zig.fmtEscapes(switch (component) { - .raw, .percent_encoded => |string| string, - }), - }); - } else if (comptime std.mem.eql(u8, fmt_str, "raw")) switch (component) { - .raw => |raw| try writer.writeAll(raw), + pub fn formatRaw(component: Component, w: *std.io.Writer) std.io.Writer.Error!void { + switch (component) { + .raw => |raw| try w.writeAll(raw), .percent_encoded => |percent_encoded| { var start: usize = 0; var index: usize = 0; @@ -63,51 +55,75 @@ pub const Component = union(enum) { if (percent_encoded.len - index < 2) continue; const percent_encoded_char = std.fmt.parseInt(u8, percent_encoded[index..][0..2], 16) catch continue; - try writer.print("{s}{c}", .{ + try w.print("{s}{c}", .{ percent_encoded[start..percent], percent_encoded_char, }); start = percent + 3; index = percent + 3; } - try writer.writeAll(percent_encoded[start..]); + try w.writeAll(percent_encoded[start..]); }, - } else if (comptime std.mem.eql(u8, fmt_str, "%")) switch (component) { - .raw => |raw| try percentEncode(writer, raw, isUnreserved), - .percent_encoded => |percent_encoded| try writer.writeAll(percent_encoded), - } else if (comptime std.mem.eql(u8, fmt_str, "user")) switch (component) { - .raw => |raw| try percentEncode(writer, raw, isUserChar), - .percent_encoded => |percent_encoded| try writer.writeAll(percent_encoded), - } else if (comptime std.mem.eql(u8, fmt_str, "password")) switch (component) { - .raw => |raw| try percentEncode(writer, raw, isPasswordChar), - .percent_encoded => |percent_encoded| try writer.writeAll(percent_encoded), - } else if (comptime std.mem.eql(u8, fmt_str, "host")) switch (component) { - .raw => |raw| try percentEncode(writer, raw, isHostChar), - .percent_encoded => |percent_encoded| try writer.writeAll(percent_encoded), - } else if (comptime std.mem.eql(u8, fmt_str, "path")) switch (component) { - .raw => |raw| try percentEncode(writer, raw, isPathChar), - .percent_encoded => |percent_encoded| try writer.writeAll(percent_encoded), - } else if (comptime std.mem.eql(u8, fmt_str, "query")) switch (component) { - .raw => |raw| try percentEncode(writer, raw, isQueryChar), - .percent_encoded => |percent_encoded| try writer.writeAll(percent_encoded), - } else if (comptime std.mem.eql(u8, fmt_str, "fragment")) switch (component) { - .raw => |raw| try percentEncode(writer, raw, isFragmentChar), - .percent_encoded => |percent_encoded| try writer.writeAll(percent_encoded), - } else @compileError("invalid format string '" ++ fmt_str ++ "'"); + } } - pub fn percentEncode( - writer: anytype, - raw: []const u8, - comptime isValidChar: fn (u8) bool, - ) @TypeOf(writer).Error!void { + pub fn formatEscaped(component: Component, w: *std.io.Writer) std.io.Writer.Error!void { + switch (component) { + .raw => |raw| try percentEncode(w, raw, isUnreserved), + .percent_encoded => |percent_encoded| try w.writeAll(percent_encoded), + } + } + + pub fn formatUser(component: Component, w: *std.io.Writer) std.io.Writer.Error!void { + switch (component) { + .raw => |raw| try percentEncode(w, raw, isUserChar), + .percent_encoded => |percent_encoded| try w.writeAll(percent_encoded), + } + } + + pub fn formatPassword(component: Component, w: *std.io.Writer) std.io.Writer.Error!void { + switch (component) { + .raw => |raw| try percentEncode(w, raw, isPasswordChar), + .percent_encoded => |percent_encoded| try w.writeAll(percent_encoded), + } + } + + pub fn formatHost(component: Component, w: *std.io.Writer) std.io.Writer.Error!void { + switch (component) { + .raw => |raw| try percentEncode(w, raw, isHostChar), + .percent_encoded => |percent_encoded| try w.writeAll(percent_encoded), + } + } + + pub fn formatPath(component: Component, w: *std.io.Writer) std.io.Writer.Error!void { + switch (component) { + .raw => |raw| try percentEncode(w, raw, isPathChar), + .percent_encoded => |percent_encoded| try w.writeAll(percent_encoded), + } + } + + pub fn formatQuery(component: Component, w: *std.io.Writer) std.io.Writer.Error!void { + switch (component) { + .raw => |raw| try percentEncode(w, raw, isQueryChar), + .percent_encoded => |percent_encoded| try w.writeAll(percent_encoded), + } + } + + pub fn formatFragment(component: Component, w: *std.io.Writer) std.io.Writer.Error!void { + switch (component) { + .raw => |raw| try percentEncode(w, raw, isFragmentChar), + .percent_encoded => |percent_encoded| try w.writeAll(percent_encoded), + } + } + + pub fn percentEncode(w: *std.io.Writer, raw: []const u8, comptime isValidChar: fn (u8) bool) std.io.Writer.Error!void { var start: usize = 0; for (raw, 0..) |char, index| { if (isValidChar(char)) continue; - try writer.print("{s}%{X:0>2}", .{ raw[start..index], char }); + try w.print("{s}%{X:0>2}", .{ raw[start..index], char }); start = index + 1; } - try writer.writeAll(raw[start..]); + try w.writeAll(raw[start..]); } }; @@ -224,91 +240,91 @@ pub fn parseAfterScheme(scheme: []const u8, text: []const u8) ParseError!Uri { return uri; } -pub const WriteToStreamOptions = struct { - /// When true, include the scheme part of the URI. - scheme: bool = false, +pub fn format(uri: *const Uri, writer: *std.io.Writer) std.io.Writer.Error!void { + return writeToStream(uri, writer, .all); +} - /// When true, include the user and password part of the URI. Ignored if `authority` is false. - authentication: bool = false, - - /// When true, include the authority part of the URI. - authority: bool = false, - - /// When true, include the path part of the URI. - path: bool = false, - - /// When true, include the query part of the URI. Ignored when `path` is false. - query: bool = false, - - /// When true, include the fragment part of the URI. Ignored when `path` is false. - fragment: bool = false, - - /// When true, include the port part of the URI. Ignored when `port` is null. - port: bool = true, -}; - -pub fn writeToStream( - uri: Uri, - options: WriteToStreamOptions, - writer: anytype, -) @TypeOf(writer).Error!void { - if (options.scheme) { +pub fn writeToStream(uri: *const Uri, writer: *std.io.Writer, flags: Format.Flags) std.io.Writer.Error!void { + if (flags.scheme) { try writer.print("{s}:", .{uri.scheme}); - if (options.authority and uri.host != null) { + if (flags.authority and uri.host != null) { try writer.writeAll("//"); } } - if (options.authority) { - if (options.authentication and uri.host != null) { + if (flags.authority) { + if (flags.authentication and uri.host != null) { if (uri.user) |user| { - try writer.print("{user}", .{user}); + try user.formatUser(writer); if (uri.password) |password| { - try writer.print(":{password}", .{password}); + try writer.writeByte(':'); + try password.formatPassword(writer); } try writer.writeByte('@'); } } if (uri.host) |host| { - try writer.print("{host}", .{host}); - if (options.port) { + try host.formatHost(writer); + if (flags.port) { if (uri.port) |port| try writer.print(":{d}", .{port}); } } } - if (options.path) { - try writer.print("{path}", .{ - if (uri.path.isEmpty()) Uri.Component{ .percent_encoded = "/" } else uri.path, - }); - if (options.query) { - if (uri.query) |query| try writer.print("?{query}", .{query}); + if (flags.path) { + const uri_path: Component = if (uri.path.isEmpty()) .{ .percent_encoded = "/" } else uri.path; + try uri_path.formatPath(writer); + if (flags.query) { + if (uri.query) |query| { + try writer.writeByte('?'); + try query.formatQuery(writer); + } } - if (options.fragment) { - if (uri.fragment) |fragment| try writer.print("#{fragment}", .{fragment}); + if (flags.fragment) { + if (uri.fragment) |fragment| { + try writer.writeByte('#'); + try fragment.formatFragment(writer); + } } } } -pub fn format( - uri: Uri, - comptime fmt_str: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, -) @TypeOf(writer).Error!void { - const scheme = comptime std.mem.indexOfScalar(u8, fmt_str, ';') != null or fmt_str.len == 0; - const authentication = comptime std.mem.indexOfScalar(u8, fmt_str, '@') != null or fmt_str.len == 0; - const authority = comptime std.mem.indexOfScalar(u8, fmt_str, '+') != null or fmt_str.len == 0; - const path = comptime std.mem.indexOfScalar(u8, fmt_str, '/') != null or fmt_str.len == 0; - const query = comptime std.mem.indexOfScalar(u8, fmt_str, '?') != null or fmt_str.len == 0; - const fragment = comptime std.mem.indexOfScalar(u8, fmt_str, '#') != null or fmt_str.len == 0; +pub const Format = struct { + uri: *const Uri, + flags: Flags = .{}, - return writeToStream(uri, .{ - .scheme = scheme, - .authentication = authentication, - .authority = authority, - .path = path, - .query = query, - .fragment = fragment, - }, writer); + pub const Flags = struct { + /// When true, include the scheme part of the URI. + scheme: bool = false, + /// When true, include the user and password part of the URI. Ignored if `authority` is false. + authentication: bool = false, + /// When true, include the authority part of the URI. + authority: bool = false, + /// When true, include the path part of the URI. + path: bool = false, + /// When true, include the query part of the URI. Ignored when `path` is false. + query: bool = false, + /// When true, include the fragment part of the URI. Ignored when `path` is false. + fragment: bool = false, + /// When true, include the port part of the URI. Ignored when `port` is null. + port: bool = true, + + pub const all: Flags = .{ + .scheme = true, + .authentication = true, + .authority = true, + .path = true, + .query = true, + .fragment = true, + .port = true, + }; + }; + + pub fn default(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + return writeToStream(f.uri, writer, f.flags); + } +}; + +pub fn fmt(uri: *const Uri, flags: Format.Flags) std.fmt.Formatter(Format, Format.default) { + return .{ .data = .{ .uri = uri, .flags = flags } }; } /// Parses the URI or returns an error. @@ -445,14 +461,13 @@ test remove_dot_segments { /// 5.2.3. Merge Paths fn merge_paths(base: Component, new: []u8, aux_buf: *[]u8) error{NoSpaceLeft}!Component { - var aux = std.io.fixedBufferStream(aux_buf.*); + var aux: std.io.Writer = .fixed(aux_buf.*); if (!base.isEmpty()) { - try aux.writer().print("{path}", .{base}); - aux.pos = std.mem.lastIndexOfScalar(u8, aux.getWritten(), '/') orelse - return remove_dot_segments(new); + base.formatPath(&aux) catch return error.NoSpaceLeft; + aux.end = std.mem.lastIndexOfScalar(u8, aux.buffered(), '/') orelse return remove_dot_segments(new); } - try aux.writer().print("/{s}", .{new}); - const merged_path = remove_dot_segments(aux.getWritten()); + aux.print("/{s}", .{new}) catch return error.NoSpaceLeft; + const merged_path = remove_dot_segments(aux.buffered()); aux_buf.* = aux_buf.*[merged_path.percent_encoded.len..]; return merged_path; } @@ -812,8 +827,11 @@ test "Special test" { test "URI percent encoding" { try std.testing.expectFmt( "%5C%C3%B6%2F%20%C3%A4%C3%B6%C3%9F%20~~.adas-https%3A%2F%2Fcanvas%3A123%2F%23ads%26%26sad", - "{%}", - .{Component{ .raw = "\\ö/ äöß ~~.adas-https://canvas:123/#ads&&sad" }}, + "{f}", + .{std.fmt.alt( + @as(Component, .{ .raw = "\\ö/ äöß ~~.adas-https://canvas:123/#ads&&sad" }), + .formatEscaped, + )}, ); } @@ -822,7 +840,10 @@ test "URI percent decoding" { const expected = "\\ö/ äöß ~~.adas-https://canvas:123/#ads&&sad"; var input = "%5C%C3%B6%2F%20%C3%A4%C3%B6%C3%9F%20~~.adas-https%3A%2F%2Fcanvas%3A123%2F%23ads%26%26sad".*; - try std.testing.expectFmt(expected, "{raw}", .{Component{ .percent_encoded = &input }}); + try std.testing.expectFmt(expected, "{f}", .{std.fmt.alt( + @as(Component, .{ .percent_encoded = &input }), + .formatRaw, + )}); var output: [expected.len]u8 = undefined; try std.testing.expectEqualStrings(percentDecodeBackwards(&output, &input), expected); @@ -834,7 +855,10 @@ test "URI percent decoding" { const expected = "/abc%"; var input = expected.*; - try std.testing.expectFmt(expected, "{raw}", .{Component{ .percent_encoded = &input }}); + try std.testing.expectFmt(expected, "{f}", .{std.fmt.alt( + @as(Component, .{ .percent_encoded = &input }), + .formatRaw, + )}); var output: [expected.len]u8 = undefined; try std.testing.expectEqualStrings(percentDecodeBackwards(&output, &input), expected); @@ -848,7 +872,9 @@ test "URI query encoding" { const parsed = try Uri.parse(address); // format the URI to percent encode it - try std.testing.expectFmt("/?response-content-type=application%2Foctet-stream", "{/?}", .{parsed}); + try std.testing.expectFmt("/?response-content-type=application%2Foctet-stream", "{f}", .{ + parsed.fmt(.{ .path = true, .query = true }), + }); } test "format" { @@ -862,7 +888,9 @@ test "format" { .query = null, .fragment = null, }; - try std.testing.expectFmt("file:/foo/bar/baz", "{;/?#}", .{uri}); + try std.testing.expectFmt("file:/foo/bar/baz", "{f}", .{ + uri.fmt(.{ .scheme = true, .path = true, .query = true, .fragment = true }), + }); } test "URI malformed input" { @@ -870,7 +898,3 @@ test "URI malformed input" { try std.testing.expectError(error.InvalidFormat, std.Uri.parse("http://]@[")); try std.testing.expectError(error.InvalidFormat, std.Uri.parse("http://lo]s\x85hc@[/8\x10?0Q")); } - -const std = @import("std.zig"); -const testing = std.testing; -const Uri = @This(); diff --git a/lib/std/array_list.zig b/lib/std/array_list.zig index 2a9159aeac..c3fade794f 100644 --- a/lib/std/array_list.zig +++ b/lib/std/array_list.zig @@ -338,11 +338,14 @@ pub fn ArrayListAligned(comptime T: type, comptime alignment: ?mem.Alignment) ty @memcpy(self.items[old_len..][0..items.len], items); } - pub const Writer = if (T != u8) - @compileError("The Writer interface is only defined for ArrayList(u8) " ++ - "but the given type is ArrayList(" ++ @typeName(T) ++ ")") - else - std.io.Writer(*Self, Allocator.Error, appendWrite); + pub fn print(self: *Self, comptime fmt: []const u8, args: anytype) error{OutOfMemory}!void { + const gpa = self.allocator; + var unmanaged = self.moveToUnmanaged(); + defer self.* = unmanaged.toManaged(gpa); + try unmanaged.print(gpa, fmt, args); + } + + pub const Writer = if (T != u8) void else std.io.GenericWriter(*Self, Allocator.Error, appendWrite); /// Initializes a Writer which will append to the list. pub fn writer(self: *Self) Writer { @@ -350,14 +353,14 @@ pub fn ArrayListAligned(comptime T: type, comptime alignment: ?mem.Alignment) ty } /// Same as `append` except it returns the number of bytes written, which is always the same - /// as `m.len`. The purpose of this function existing is to match `std.io.Writer` API. + /// as `m.len`. The purpose of this function existing is to match `std.io.GenericWriter` API. /// Invalidates element pointers if additional memory is needed. fn appendWrite(self: *Self, m: []const u8) Allocator.Error!usize { try self.appendSlice(m); return m.len; } - pub const FixedWriter = std.io.Writer(*Self, Allocator.Error, appendWriteFixed); + pub const FixedWriter = std.io.GenericWriter(*Self, Allocator.Error, appendWriteFixed); /// Initializes a Writer which will append to the list but will return /// `error.OutOfMemory` rather than increasing capacity. @@ -365,7 +368,7 @@ pub fn ArrayListAligned(comptime T: type, comptime alignment: ?mem.Alignment) ty return .{ .context = self }; } - /// The purpose of this function existing is to match `std.io.Writer` API. + /// The purpose of this function existing is to match `std.io.GenericWriter` API. fn appendWriteFixed(self: *Self, m: []const u8) error{OutOfMemory}!usize { const available_capacity = self.capacity - self.items.len; if (m.len > available_capacity) @@ -933,40 +936,56 @@ pub fn ArrayListAlignedUnmanaged(comptime T: type, comptime alignment: ?mem.Alig @memcpy(self.items[old_len..][0..items.len], items); } + pub fn print(self: *Self, gpa: Allocator, comptime fmt: []const u8, args: anytype) error{OutOfMemory}!void { + comptime assert(T == u8); + try self.ensureUnusedCapacity(gpa, fmt.len); + var aw: std.io.Writer.Allocating = .fromArrayList(gpa, self); + defer self.* = aw.toArrayList(); + return aw.writer.print(fmt, args) catch |err| switch (err) { + error.WriteFailed => return error.OutOfMemory, + }; + } + + pub fn printAssumeCapacity(self: *Self, comptime fmt: []const u8, args: anytype) void { + comptime assert(T == u8); + var w: std.io.Writer = .fixed(self.unusedCapacitySlice()); + w.print(fmt, args) catch unreachable; + self.items.len += w.end; + } + + /// Deprecated in favor of `print` or `std.io.Writer.Allocating`. pub const WriterContext = struct { self: *Self, allocator: Allocator, }; + /// Deprecated in favor of `print` or `std.io.Writer.Allocating`. pub const Writer = if (T != u8) @compileError("The Writer interface is only defined for ArrayList(u8) " ++ "but the given type is ArrayList(" ++ @typeName(T) ++ ")") else - std.io.Writer(WriterContext, Allocator.Error, appendWrite); + std.io.GenericWriter(WriterContext, Allocator.Error, appendWrite); - /// Initializes a Writer which will append to the list. + /// Deprecated in favor of `print` or `std.io.Writer.Allocating`. pub fn writer(self: *Self, gpa: Allocator) Writer { return .{ .context = .{ .self = self, .allocator = gpa } }; } - /// Same as `append` except it returns the number of bytes written, - /// which is always the same as `m.len`. The purpose of this function - /// existing is to match `std.io.Writer` API. - /// Invalidates element pointers if additional memory is needed. + /// Deprecated in favor of `print` or `std.io.Writer.Allocating`. fn appendWrite(context: WriterContext, m: []const u8) Allocator.Error!usize { try context.self.appendSlice(context.allocator, m); return m.len; } - pub const FixedWriter = std.io.Writer(*Self, Allocator.Error, appendWriteFixed); + /// Deprecated in favor of `print` or `std.io.Writer.Allocating`. + pub const FixedWriter = std.io.GenericWriter(*Self, Allocator.Error, appendWriteFixed); - /// Initializes a Writer which will append to the list but will return - /// `error.OutOfMemory` rather than increasing capacity. + /// Deprecated in favor of `print` or `std.io.Writer.Allocating`. pub fn fixedWriter(self: *Self) FixedWriter { return .{ .context = self }; } - /// The purpose of this function existing is to match `std.io.Writer` API. + /// Deprecated in favor of `print` or `std.io.Writer.Allocating`. fn appendWriteFixed(self: *Self, m: []const u8) error{OutOfMemory}!usize { const available_capacity = self.capacity - self.items.len; if (m.len > available_capacity) diff --git a/lib/std/ascii.zig b/lib/std/ascii.zig index a88b637ec0..99bebf09ab 100644 --- a/lib/std/ascii.zig +++ b/lib/std/ascii.zig @@ -10,6 +10,10 @@ const std = @import("std"); +pub const lowercase = "abcdefghijklmnopqrstuvwxyz"; +pub const uppercase = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; +pub const letters = lowercase ++ uppercase; + /// The C0 control codes of the ASCII encoding. /// /// See also: https://en.wikipedia.org/wiki/C0_and_C1_control_codes and `isControl` @@ -435,3 +439,44 @@ pub fn orderIgnoreCase(lhs: []const u8, rhs: []const u8) std.math.Order { pub fn lessThanIgnoreCase(lhs: []const u8, rhs: []const u8) bool { return orderIgnoreCase(lhs, rhs) == .lt; } + +pub const HexEscape = struct { + bytes: []const u8, + charset: *const [16]u8, + + pub const upper_charset = "0123456789ABCDEF"; + pub const lower_charset = "0123456789abcdef"; + + pub fn format(se: HexEscape, w: *std.io.Writer) std.io.Writer.Error!void { + const charset = se.charset; + + var buf: [4]u8 = undefined; + buf[0] = '\\'; + buf[1] = 'x'; + + for (se.bytes) |c| { + if (std.ascii.isPrint(c)) { + try w.writeByte(c); + } else { + buf[2] = charset[c >> 4]; + buf[3] = charset[c & 15]; + try w.writeAll(&buf); + } + } + } +}; + +/// Replaces non-ASCII bytes with hex escapes. +pub fn hexEscape(bytes: []const u8, case: std.fmt.Case) std.fmt.Formatter(HexEscape, HexEscape.format) { + return .{ .data = .{ .bytes = bytes, .charset = switch (case) { + .lower => HexEscape.lower_charset, + .upper => HexEscape.upper_charset, + } } }; +} + +test hexEscape { + try std.testing.expectFmt("abc 123", "{f}", .{hexEscape("abc 123", .lower)}); + try std.testing.expectFmt("ab\\xffc", "{f}", .{hexEscape("ab\xffc", .lower)}); + try std.testing.expectFmt("abc 123", "{f}", .{hexEscape("abc 123", .upper)}); + try std.testing.expectFmt("ab\\xFFc", "{f}", .{hexEscape("ab\xffc", .upper)}); +} diff --git a/lib/std/base64.zig b/lib/std/base64.zig index e88b723439..a84f4a0b4f 100644 --- a/lib/std/base64.zig +++ b/lib/std/base64.zig @@ -108,7 +108,7 @@ pub const Base64Encoder = struct { } } - // dest must be compatible with std.io.Writer's writeAll interface + // dest must be compatible with std.io.GenericWriter's writeAll interface pub fn encodeWriter(encoder: *const Base64Encoder, dest: anytype, source: []const u8) !void { var chunker = window(u8, source, 3, 3); while (chunker.next()) |chunk| { @@ -118,8 +118,8 @@ pub const Base64Encoder = struct { } } - // destWriter must be compatible with std.io.Writer's writeAll interface - // sourceReader must be compatible with std.io.Reader's read interface + // destWriter must be compatible with std.io.GenericWriter's writeAll interface + // sourceReader must be compatible with `std.io.GenericReader` read interface pub fn encodeFromReaderToWriter(encoder: *const Base64Encoder, destWriter: anytype, sourceReader: anytype) !void { while (true) { var tempSource: [3]u8 = undefined; diff --git a/lib/std/bounded_array.zig b/lib/std/bounded_array.zig index 1a4407e687..7864dfb775 100644 --- a/lib/std/bounded_array.zig +++ b/lib/std/bounded_array.zig @@ -277,7 +277,7 @@ pub fn BoundedArrayAligned( @compileError("The Writer interface is only defined for BoundedArray(u8, ...) " ++ "but the given type is BoundedArray(" ++ @typeName(T) ++ ", ...)") else - std.io.Writer(*Self, error{Overflow}, appendWrite); + std.io.GenericWriter(*Self, error{Overflow}, appendWrite); /// Initializes a writer which will write into the array. pub fn writer(self: *Self) Writer { @@ -285,7 +285,7 @@ pub fn BoundedArrayAligned( } /// Same as `appendSlice` except it returns the number of bytes written, which is always the same - /// as `m.len`. The purpose of this function existing is to match `std.io.Writer` API. + /// as `m.len`. The purpose of this function existing is to match `std.io.GenericWriter` API. fn appendWrite(self: *Self, m: []const u8) error{Overflow}!usize { try self.appendSlice(m); return m.len; diff --git a/lib/std/builtin.zig b/lib/std/builtin.zig index 548b308cad..a69a74e93d 100644 --- a/lib/std/builtin.zig +++ b/lib/std/builtin.zig @@ -34,24 +34,16 @@ pub const StackTrace = struct { index: usize, instruction_addresses: []usize, - pub fn format( - self: StackTrace, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - if (fmt.len != 0) std.fmt.invalidFmtError(fmt, self); - + pub fn format(self: StackTrace, writer: *std.io.Writer) std.io.Writer.Error!void { // TODO: re-evaluate whether to use format() methods at all. // Until then, avoid an error when using GeneralPurposeAllocator with WebAssembly // where it tries to call detectTTYConfig here. if (builtin.os.tag == .freestanding) return; - _ = options; const debug_info = std.debug.getSelfDebugInfo() catch |err| { return writer.print("\nUnable to print stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}); }; - const tty_config = std.io.tty.detectConfig(std.io.getStdErr()); + const tty_config = std.io.tty.detectConfig(std.fs.File.stderr()); try writer.writeAll("\n"); std.debug.writeStackTrace(self, writer, debug_info, tty_config) catch |err| { try writer.print("Unable to print stack trace: {s}\n", .{@errorName(err)}); diff --git a/lib/std/compress.zig b/lib/std/compress.zig index 7cc4a80d33..e07c3a4126 100644 --- a/lib/std/compress.zig +++ b/lib/std/compress.zig @@ -16,7 +16,7 @@ pub fn HashedReader(ReaderType: type, HasherType: type) type { hasher: HasherType, pub const Error = ReaderType.Error; - pub const Reader = std.io.Reader(*@This(), Error, read); + pub const Reader = std.io.GenericReader(*@This(), Error, read); pub fn read(self: *@This(), buf: []u8) Error!usize { const amt = try self.child_reader.read(buf); @@ -43,7 +43,7 @@ pub fn HashedWriter(WriterType: type, HasherType: type) type { hasher: HasherType, pub const Error = WriterType.Error; - pub const Writer = std.io.Writer(*@This(), Error, write); + pub const Writer = std.io.GenericWriter(*@This(), Error, write); pub fn write(self: *@This(), buf: []const u8) Error!usize { const amt = try self.child_writer.write(buf); diff --git a/lib/std/compress/flate/deflate.zig b/lib/std/compress/flate/deflate.zig index e953ecb354..fd93236000 100644 --- a/lib/std/compress/flate/deflate.zig +++ b/lib/std/compress/flate/deflate.zig @@ -355,7 +355,7 @@ fn Deflate(comptime container: Container, comptime WriterType: type, comptime Bl // Writer interface - pub const Writer = io.Writer(*Self, Error, write); + pub const Writer = io.GenericWriter(*Self, Error, write); pub const Error = BlockWriterType.Error; /// Write `input` of uncompressed data. @@ -512,7 +512,7 @@ fn SimpleCompressor( // Writer interface - pub const Writer = io.Writer(*Self, Error, write); + pub const Writer = io.GenericWriter(*Self, Error, write); pub const Error = BlockWriterType.Error; // Write `input` of uncompressed data. diff --git a/lib/std/compress/flate/inflate.zig b/lib/std/compress/flate/inflate.zig index bb4d158aca..2fcf3cafd4 100644 --- a/lib/std/compress/flate/inflate.zig +++ b/lib/std/compress/flate/inflate.zig @@ -341,7 +341,7 @@ pub fn Inflate(comptime container: Container, comptime LookaheadType: type, comp // Reader interface - pub const Reader = std.io.Reader(*Self, Error, read); + pub const Reader = std.io.GenericReader(*Self, Error, read); /// Returns the number of bytes read. It may be less than buffer.len. /// If the number of bytes read is 0, it means end of stream. diff --git a/lib/std/compress/lzma.zig b/lib/std/compress/lzma.zig index aa35c3ffa9..5d072f5381 100644 --- a/lib/std/compress/lzma.zig +++ b/lib/std/compress/lzma.zig @@ -30,7 +30,7 @@ pub fn Decompress(comptime ReaderType: type) type { Allocator.Error || error{ CorruptInput, EndOfStream, Overflow }; - pub const Reader = std.io.Reader(*Self, Error, read); + pub const Reader = std.io.GenericReader(*Self, Error, read); allocator: Allocator, in_reader: ReaderType, diff --git a/lib/std/compress/xz.zig b/lib/std/compress/xz.zig index 4514c8857f..445d103098 100644 --- a/lib/std/compress/xz.zig +++ b/lib/std/compress/xz.zig @@ -34,7 +34,7 @@ pub fn Decompress(comptime ReaderType: type) type { const Self = @This(); pub const Error = ReaderType.Error || block.Decoder(ReaderType).Error; - pub const Reader = std.io.Reader(*Self, Error, read); + pub const Reader = std.io.GenericReader(*Self, Error, read); allocator: Allocator, block_decoder: block.Decoder(ReaderType), diff --git a/lib/std/compress/xz/block.zig b/lib/std/compress/xz/block.zig index a3c289dfb8..6253341f36 100644 --- a/lib/std/compress/xz/block.zig +++ b/lib/std/compress/xz/block.zig @@ -27,7 +27,7 @@ pub fn Decoder(comptime ReaderType: type) type { ReaderType.Error || DecodeError || Allocator.Error; - pub const Reader = std.io.Reader(*Self, Error, read); + pub const Reader = std.io.GenericReader(*Self, Error, read); allocator: Allocator, inner_reader: ReaderType, diff --git a/lib/std/compress/zstandard.zig b/lib/std/compress/zstandard.zig index 7b41e1fe3e..df45e9686d 100644 --- a/lib/std/compress/zstandard.zig +++ b/lib/std/compress/zstandard.zig @@ -50,7 +50,7 @@ pub fn Decompressor(comptime ReaderType: type) type { OutOfMemory, }; - pub const Reader = std.io.Reader(*Self, Error, read); + pub const Reader = std.io.GenericReader(*Self, Error, read); pub fn init(source: ReaderType, options: DecompressorOptions) Self { return .{ diff --git a/lib/std/compress/zstandard/readers.zig b/lib/std/compress/zstandard/readers.zig index d7bf90ed80..7b15784187 100644 --- a/lib/std/compress/zstandard/readers.zig +++ b/lib/std/compress/zstandard/readers.zig @@ -4,7 +4,7 @@ pub const ReversedByteReader = struct { remaining_bytes: usize, bytes: []const u8, - const Reader = std.io.Reader(*ReversedByteReader, error{}, readFn); + const Reader = std.io.GenericReader(*ReversedByteReader, error{}, readFn); pub fn init(bytes: []const u8) ReversedByteReader { return .{ diff --git a/lib/std/crypto/25519/curve25519.zig b/lib/std/crypto/25519/curve25519.zig index 313dd577b0..825f0bd94c 100644 --- a/lib/std/crypto/25519/curve25519.zig +++ b/lib/std/crypto/25519/curve25519.zig @@ -124,9 +124,9 @@ test "curve25519" { const p = try Curve25519.basePoint.clampedMul(s); try p.rejectIdentity(); var buf: [128]u8 = undefined; - try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&p.toBytes())}), "E6F2A4D1C28EE5C7AD0329268255A468AD407D2672824C0C0EB30EA6EF450145"); + try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&p.toBytes()}), "E6F2A4D1C28EE5C7AD0329268255A468AD407D2672824C0C0EB30EA6EF450145"); const q = try p.clampedMul(s); - try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&q.toBytes())}), "3614E119FFE55EC55B87D6B19971A9F4CBC78EFE80BEC55B96392BABCC712537"); + try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&q.toBytes()}), "3614E119FFE55EC55B87D6B19971A9F4CBC78EFE80BEC55B96392BABCC712537"); try Curve25519.rejectNonCanonical(s); s[31] |= 0x80; diff --git a/lib/std/crypto/25519/ed25519.zig b/lib/std/crypto/25519/ed25519.zig index 94dd370d01..8151228bf2 100644 --- a/lib/std/crypto/25519/ed25519.zig +++ b/lib/std/crypto/25519/ed25519.zig @@ -509,8 +509,8 @@ test "key pair creation" { _ = try fmt.hexToBytes(seed[0..], "8052030376d47112be7f73ed7a019293dd12ad910b654455798b4667d73de166"); const key_pair = try Ed25519.KeyPair.generateDeterministic(seed); var buf: [256]u8 = undefined; - try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&key_pair.secret_key.toBytes())}), "8052030376D47112BE7F73ED7A019293DD12AD910B654455798B4667D73DE1662D6F7455D97B4A3A10D7293909D1A4F2058CB9A370E43FA8154BB280DB839083"); - try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&key_pair.public_key.toBytes())}), "2D6F7455D97B4A3A10D7293909D1A4F2058CB9A370E43FA8154BB280DB839083"); + try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&key_pair.secret_key.toBytes()}), "8052030376D47112BE7F73ED7A019293DD12AD910B654455798B4667D73DE1662D6F7455D97B4A3A10D7293909D1A4F2058CB9A370E43FA8154BB280DB839083"); + try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&key_pair.public_key.toBytes()}), "2D6F7455D97B4A3A10D7293909D1A4F2058CB9A370E43FA8154BB280DB839083"); } test "signature" { @@ -520,7 +520,7 @@ test "signature" { const sig = try key_pair.sign("test", null); var buf: [128]u8 = undefined; - try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&sig.toBytes())}), "10A442B4A80CC4225B154F43BEF28D2472CA80221951262EB8E0DF9091575E2687CC486E77263C3418C757522D54F84B0359236ABBBD4ACD20DC297FDCA66808"); + try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&sig.toBytes()}), "10A442B4A80CC4225B154F43BEF28D2472CA80221951262EB8E0DF9091575E2687CC486E77263C3418C757522D54F84B0359236ABBBD4ACD20DC297FDCA66808"); try sig.verify("test", key_pair.public_key); try std.testing.expectError(error.SignatureVerificationFailed, sig.verify("TEST", key_pair.public_key)); } diff --git a/lib/std/crypto/25519/edwards25519.zig b/lib/std/crypto/25519/edwards25519.zig index 527536f17d..47c07939ac 100644 --- a/lib/std/crypto/25519/edwards25519.zig +++ b/lib/std/crypto/25519/edwards25519.zig @@ -546,7 +546,7 @@ test "packing/unpacking" { var b = Edwards25519.basePoint; const pk = try b.mul(s); var buf: [128]u8 = undefined; - try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&pk.toBytes())}), "074BC7E0FCBD587FDBC0969444245FADC562809C8F6E97E949AF62484B5B81A6"); + try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&pk.toBytes()}), "074BC7E0FCBD587FDBC0969444245FADC562809C8F6E97E949AF62484B5B81A6"); const small_order_ss: [7][32]u8 = .{ .{ diff --git a/lib/std/crypto/25519/ristretto255.zig b/lib/std/crypto/25519/ristretto255.zig index 5a00bf523a..dd1a8a236e 100644 --- a/lib/std/crypto/25519/ristretto255.zig +++ b/lib/std/crypto/25519/ristretto255.zig @@ -175,21 +175,21 @@ pub const Ristretto255 = struct { test "ristretto255" { const p = Ristretto255.basePoint; var buf: [256]u8 = undefined; - try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&p.toBytes())}), "E2F2AE0A6ABC4E71A884A961C500515F58E30B6AA582DD8DB6A65945E08D2D76"); + try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&p.toBytes()}), "E2F2AE0A6ABC4E71A884A961C500515F58E30B6AA582DD8DB6A65945E08D2D76"); var r: [Ristretto255.encoded_length]u8 = undefined; _ = try fmt.hexToBytes(r[0..], "6a493210f7499cd17fecb510ae0cea23a110e8d5b901f8acadd3095c73a3b919"); var q = try Ristretto255.fromBytes(r); q = q.dbl().add(p); - try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&q.toBytes())}), "E882B131016B52C1D3337080187CF768423EFCCBB517BB495AB812C4160FF44E"); + try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&q.toBytes()}), "E882B131016B52C1D3337080187CF768423EFCCBB517BB495AB812C4160FF44E"); const s = [_]u8{15} ++ [_]u8{0} ** 31; const w = try p.mul(s); - try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&w.toBytes())}), "E0C418F7C8D9C4CDD7395B93EA124F3AD99021BB681DFC3302A9D99A2E53E64E"); + try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&w.toBytes()}), "E0C418F7C8D9C4CDD7395B93EA124F3AD99021BB681DFC3302A9D99A2E53E64E"); try std.testing.expect(p.dbl().dbl().dbl().dbl().equivalent(w.add(p))); const h = [_]u8{69} ** 32 ++ [_]u8{42} ** 32; const ph = Ristretto255.fromUniform(h); - try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&ph.toBytes())}), "DCCA54E037A4311EFBEEF413ACD21D35276518970B7A61DC88F8587B493D5E19"); + try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&ph.toBytes()}), "DCCA54E037A4311EFBEEF413ACD21D35276518970B7A61DC88F8587B493D5E19"); } diff --git a/lib/std/crypto/25519/scalar.zig b/lib/std/crypto/25519/scalar.zig index e7e74bf618..b07b1c774c 100644 --- a/lib/std/crypto/25519/scalar.zig +++ b/lib/std/crypto/25519/scalar.zig @@ -850,10 +850,10 @@ test "scalar25519" { var y = x.toBytes(); try rejectNonCanonical(y); var buf: [128]u8 = undefined; - try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&y)}), "1E979B917937F3DE71D18077F961F6CEFF01030405060708010203040506070F"); + try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&y}), "1E979B917937F3DE71D18077F961F6CEFF01030405060708010203040506070F"); const reduced = reduce(field_order_s); - try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&reduced)}), "0000000000000000000000000000000000000000000000000000000000000000"); + try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&reduced}), "0000000000000000000000000000000000000000000000000000000000000000"); } test "non-canonical scalar25519" { @@ -867,7 +867,7 @@ test "mulAdd overflow check" { const c: [32]u8 = [_]u8{0xff} ** 32; const x = mulAdd(a, b, c); var buf: [128]u8 = undefined; - try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&x)}), "D14DF91389432C25AD60FF9791B9FD1D67BEF517D273ECCE3D9A307C1B419903"); + try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&x}), "D14DF91389432C25AD60FF9791B9FD1D67BEF517D273ECCE3D9A307C1B419903"); } test "scalar field inversion" { diff --git a/lib/std/crypto/aegis.zig b/lib/std/crypto/aegis.zig index 2f565ce490..ea4176d13d 100644 --- a/lib/std/crypto/aegis.zig +++ b/lib/std/crypto/aegis.zig @@ -803,7 +803,7 @@ fn AegisMac(comptime T: type) type { } pub const Error = error{}; - pub const Writer = std.io.Writer(*Mac, Error, write); + pub const Writer = std.io.GenericWriter(*Mac, Error, write); fn write(self: *Mac, bytes: []const u8) Error!usize { self.update(bytes); diff --git a/lib/std/crypto/benchmark.zig b/lib/std/crypto/benchmark.zig index ee8809a55f..d064149778 100644 --- a/lib/std/crypto/benchmark.zig +++ b/lib/std/crypto/benchmark.zig @@ -458,7 +458,7 @@ fn mode(comptime x: comptime_int) comptime_int { } pub fn main() !void { - const stdout = std.io.getStdOut().writer(); + const stdout = std.fs.File.stdout().deprecatedWriter(); var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); defer arena.deinit(); diff --git a/lib/std/crypto/blake2.zig b/lib/std/crypto/blake2.zig index 1a285080b5..cd40978cf3 100644 --- a/lib/std/crypto/blake2.zig +++ b/lib/std/crypto/blake2.zig @@ -187,7 +187,7 @@ pub fn Blake2s(comptime out_bits: usize) type { } pub const Error = error{}; - pub const Writer = std.io.Writer(*Self, Error, write); + pub const Writer = std.io.GenericWriter(*Self, Error, write); fn write(self: *Self, bytes: []const u8) Error!usize { self.update(bytes); diff --git a/lib/std/crypto/blake3.zig b/lib/std/crypto/blake3.zig index 585c338417..eecb794c9d 100644 --- a/lib/std/crypto/blake3.zig +++ b/lib/std/crypto/blake3.zig @@ -476,7 +476,7 @@ pub const Blake3 = struct { } pub const Error = error{}; - pub const Writer = std.io.Writer(*Blake3, Error, write); + pub const Writer = std.io.GenericWriter(*Blake3, Error, write); fn write(self: *Blake3, bytes: []const u8) Error!usize { self.update(bytes); diff --git a/lib/std/crypto/chacha20.zig b/lib/std/crypto/chacha20.zig index 287e664c2b..c605a6cb34 100644 --- a/lib/std/crypto/chacha20.zig +++ b/lib/std/crypto/chacha20.zig @@ -1145,7 +1145,7 @@ test "xchacha20" { var c: [m.len]u8 = undefined; XChaCha20IETF.xor(c[0..], m[0..], 0, key, nonce); var buf: [2 * c.len]u8 = undefined; - try testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&c)}), "E0A1BCF939654AFDBDC1746EC49832647C19D891F0D1A81FC0C1703B4514BDEA584B512F6908C2C5E9DD18D5CBC1805DE5803FE3B9CA5F193FB8359E91FAB0C3BB40309A292EB1CF49685C65C4A3ADF4F11DB0CD2B6B67FBC174BC2E860E8F769FD3565BBFAD1C845E05A0FED9BE167C240D"); + try testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&c}), "E0A1BCF939654AFDBDC1746EC49832647C19D891F0D1A81FC0C1703B4514BDEA584B512F6908C2C5E9DD18D5CBC1805DE5803FE3B9CA5F193FB8359E91FAB0C3BB40309A292EB1CF49685C65C4A3ADF4F11DB0CD2B6B67FBC174BC2E860E8F769FD3565BBFAD1C845E05A0FED9BE167C240D"); } { const ad = "Additional data"; @@ -1154,7 +1154,7 @@ test "xchacha20" { var out: [m.len]u8 = undefined; try XChaCha20Poly1305.decrypt(out[0..], c[0..m.len], c[m.len..].*, ad, nonce, key); var buf: [2 * c.len]u8 = undefined; - try testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&c)}), "994D2DD32333F48E53650C02C7A2ABB8E018B0836D7175AEC779F52E961780768F815C58F1AA52D211498DB89B9216763F569C9433A6BBFCEFB4D4A49387A4C5207FBB3B5A92B5941294DF30588C6740D39DC16FA1F0E634F7246CF7CDCB978E44347D89381B7A74EB7084F754B90BDE9AAF5A94B8F2A85EFD0B50692AE2D425E234"); + try testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&c}), "994D2DD32333F48E53650C02C7A2ABB8E018B0836D7175AEC779F52E961780768F815C58F1AA52D211498DB89B9216763F569C9433A6BBFCEFB4D4A49387A4C5207FBB3B5A92B5941294DF30588C6740D39DC16FA1F0E634F7246CF7CDCB978E44347D89381B7A74EB7084F754B90BDE9AAF5A94B8F2A85EFD0B50692AE2D425E234"); try testing.expectEqualSlices(u8, out[0..], m); c[0] +%= 1; try testing.expectError(error.AuthenticationFailed, XChaCha20Poly1305.decrypt(out[0..], c[0..m.len], c[m.len..].*, ad, nonce, key)); diff --git a/lib/std/crypto/codecs/asn1/der/ArrayListReverse.zig b/lib/std/crypto/codecs/asn1/der/ArrayListReverse.zig index f580c54546..b6c0ab20d4 100644 --- a/lib/std/crypto/codecs/asn1/der/ArrayListReverse.zig +++ b/lib/std/crypto/codecs/asn1/der/ArrayListReverse.zig @@ -45,7 +45,7 @@ pub fn prependSlice(self: *ArrayListReverse, data: []const u8) Error!void { self.data.ptr = begin; } -pub const Writer = std.io.Writer(*ArrayListReverse, Error, prependSliceSize); +pub const Writer = std.io.GenericWriter(*ArrayListReverse, Error, prependSliceSize); /// Warning: This writer writes backwards. `fn print` will NOT work as expected. pub fn writer(self: *ArrayListReverse) Writer { return .{ .context = self }; diff --git a/lib/std/crypto/ml_kem.zig b/lib/std/crypto/ml_kem.zig index 99cb493b34..ce3edf9eb5 100644 --- a/lib/std/crypto/ml_kem.zig +++ b/lib/std/crypto/ml_kem.zig @@ -1741,7 +1741,7 @@ test "NIST KAT test" { for (0..100) |i| { g.fill(&seed); try std.fmt.format(fw, "count = {}\n", .{i}); - try std.fmt.format(fw, "seed = {s}\n", .{std.fmt.fmtSliceHexUpper(&seed)}); + try std.fmt.format(fw, "seed = {X}\n", .{&seed}); var g2 = NistDRBG.init(seed); // This is not equivalent to g2.fill(kseed[:]). As the reference @@ -1756,16 +1756,16 @@ test "NIST KAT test" { const e = kp.public_key.encaps(eseed); const ss2 = try kp.secret_key.decaps(&e.ciphertext); try testing.expectEqual(ss2, e.shared_secret); - try std.fmt.format(fw, "pk = {s}\n", .{std.fmt.fmtSliceHexUpper(&kp.public_key.toBytes())}); - try std.fmt.format(fw, "sk = {s}\n", .{std.fmt.fmtSliceHexUpper(&kp.secret_key.toBytes())}); - try std.fmt.format(fw, "ct = {s}\n", .{std.fmt.fmtSliceHexUpper(&e.ciphertext)}); - try std.fmt.format(fw, "ss = {s}\n\n", .{std.fmt.fmtSliceHexUpper(&e.shared_secret)}); + try std.fmt.format(fw, "pk = {X}\n", .{&kp.public_key.toBytes()}); + try std.fmt.format(fw, "sk = {X}\n", .{&kp.secret_key.toBytes()}); + try std.fmt.format(fw, "ct = {X}\n", .{&e.ciphertext}); + try std.fmt.format(fw, "ss = {X}\n\n", .{&e.shared_secret}); } var out: [32]u8 = undefined; f.final(&out); var outHex: [64]u8 = undefined; - _ = try std.fmt.bufPrint(&outHex, "{s}", .{std.fmt.fmtSliceHexLower(&out)}); + _ = try std.fmt.bufPrint(&outHex, "{x}", .{&out}); try testing.expectEqual(outHex, modeHash[1].*); } } diff --git a/lib/std/crypto/sha1.zig b/lib/std/crypto/sha1.zig index 2968517b68..55b5312bd3 100644 --- a/lib/std/crypto/sha1.zig +++ b/lib/std/crypto/sha1.zig @@ -269,7 +269,7 @@ pub const Sha1 = struct { } pub const Error = error{}; - pub const Writer = std.io.Writer(*Self, Error, write); + pub const Writer = std.io.GenericWriter(*Self, Error, write); fn write(self: *Self, bytes: []const u8) Error!usize { self.update(bytes); diff --git a/lib/std/crypto/sha2.zig b/lib/std/crypto/sha2.zig index f098f68ef4..20a883d49f 100644 --- a/lib/std/crypto/sha2.zig +++ b/lib/std/crypto/sha2.zig @@ -376,7 +376,7 @@ fn Sha2x32(comptime iv: Iv32, digest_bits: comptime_int) type { } pub const Error = error{}; - pub const Writer = std.io.Writer(*Self, Error, write); + pub const Writer = std.io.GenericWriter(*Self, Error, write); fn write(self: *Self, bytes: []const u8) Error!usize { self.update(bytes); diff --git a/lib/std/crypto/sha3.zig b/lib/std/crypto/sha3.zig index a001538c1d..78c3ff5527 100644 --- a/lib/std/crypto/sha3.zig +++ b/lib/std/crypto/sha3.zig @@ -82,7 +82,7 @@ pub fn Keccak(comptime f: u11, comptime output_bits: u11, comptime default_delim } pub const Error = error{}; - pub const Writer = std.io.Writer(*Self, Error, write); + pub const Writer = std.io.GenericWriter(*Self, Error, write); fn write(self: *Self, bytes: []const u8) Error!usize { self.update(bytes); @@ -193,7 +193,7 @@ fn ShakeLike(comptime security_level: u11, comptime default_delim: u8, comptime } pub const Error = error{}; - pub const Writer = std.io.Writer(*Self, Error, write); + pub const Writer = std.io.GenericWriter(*Self, Error, write); fn write(self: *Self, bytes: []const u8) Error!usize { self.update(bytes); @@ -286,7 +286,7 @@ fn CShakeLike(comptime security_level: u11, comptime default_delim: u8, comptime } pub const Error = error{}; - pub const Writer = std.io.Writer(*Self, Error, write); + pub const Writer = std.io.GenericWriter(*Self, Error, write); fn write(self: *Self, bytes: []const u8) Error!usize { self.update(bytes); @@ -392,7 +392,7 @@ fn KMacLike(comptime security_level: u11, comptime default_delim: u8, comptime r } pub const Error = error{}; - pub const Writer = std.io.Writer(*Self, Error, write); + pub const Writer = std.io.GenericWriter(*Self, Error, write); fn write(self: *Self, bytes: []const u8) Error!usize { self.update(bytes); @@ -484,7 +484,7 @@ fn TupleHashLike(comptime security_level: u11, comptime default_delim: u8, compt } pub const Error = error{}; - pub const Writer = std.io.Writer(*Self, Error, write); + pub const Writer = std.io.GenericWriter(*Self, Error, write); fn write(self: *Self, bytes: []const u8) Error!usize { self.update(bytes); diff --git a/lib/std/crypto/siphash.zig b/lib/std/crypto/siphash.zig index 5d1ac4f874..4334a6912d 100644 --- a/lib/std/crypto/siphash.zig +++ b/lib/std/crypto/siphash.zig @@ -240,7 +240,7 @@ fn SipHash(comptime T: type, comptime c_rounds: usize, comptime d_rounds: usize) } pub const Error = error{}; - pub const Writer = std.io.Writer(*Self, Error, write); + pub const Writer = std.io.GenericWriter(*Self, Error, write); fn write(self: *Self, bytes: []const u8) Error!usize { self.update(bytes); diff --git a/lib/std/crypto/tls/Client.zig b/lib/std/crypto/tls/Client.zig index bd5a74c2cb..d0cf762521 100644 --- a/lib/std/crypto/tls/Client.zig +++ b/lib/std/crypto/tls/Client.zig @@ -1512,11 +1512,11 @@ fn logSecrets(key_log_file: std.fs.File, context: anytype, secrets: anytype) voi const locked = if (key_log_file.lock(.exclusive)) |_| true else |_| false; defer if (locked) key_log_file.unlock(); key_log_file.seekFromEnd(0) catch {}; - inline for (@typeInfo(@TypeOf(secrets)).@"struct".fields) |field| key_log_file.writer().print("{s}" ++ - (if (@hasField(@TypeOf(context), "counter")) "_{d}" else "") ++ " {} {}\n", .{field.name} ++ + inline for (@typeInfo(@TypeOf(secrets)).@"struct".fields) |field| key_log_file.deprecatedWriter().print("{s}" ++ + (if (@hasField(@TypeOf(context), "counter")) "_{d}" else "") ++ " {x} {x}\n", .{field.name} ++ (if (@hasField(@TypeOf(context), "counter")) .{context.counter} else .{}) ++ .{ - std.fmt.fmtSliceHexLower(context.client_random), - std.fmt.fmtSliceHexLower(@field(secrets, field.name)), + context.client_random, + @field(secrets, field.name), }) catch {}; } diff --git a/lib/std/debug.zig b/lib/std/debug.zig index dbf8e110a2..5da650266e 100644 --- a/lib/std/debug.zig +++ b/lib/std/debug.zig @@ -12,6 +12,7 @@ const windows = std.os.windows; const native_arch = builtin.cpu.arch; const native_os = builtin.os.tag; const native_endian = native_arch.endian(); +const Writer = std.io.Writer; pub const MemoryAccessor = @import("debug/MemoryAccessor.zig"); pub const FixedBufferReader = @import("debug/FixedBufferReader.zig"); @@ -204,13 +205,26 @@ pub fn unlockStdErr() void { std.Progress.unlockStdErr(); } +/// Allows the caller to freely write to stderr until `unlockStdErr` is called. +/// +/// During the lock, any `std.Progress` information is cleared from the terminal. +/// +/// Returns a `Writer` with empty buffer, meaning that it is +/// in fact unbuffered and does not need to be flushed. +pub fn lockStderrWriter(buffer: []u8) *Writer { + return std.Progress.lockStderrWriter(buffer); +} + +pub fn unlockStderrWriter() void { + std.Progress.unlockStderrWriter(); +} + /// Print to stderr, unbuffered, and silently returning on failure. Intended -/// for use in "printf debugging." Use `std.log` functions for proper logging. +/// for use in "printf debugging". Use `std.log` functions for proper logging. pub fn print(comptime fmt: []const u8, args: anytype) void { - lockStdErr(); - defer unlockStdErr(); - const stderr = io.getStdErr().writer(); - nosuspend stderr.print(fmt, args) catch return; + const bw = lockStderrWriter(&.{}); + defer unlockStderrWriter(); + nosuspend bw.print(fmt, args) catch return; } pub fn getStderrMutex() *std.Thread.Mutex { @@ -232,50 +246,44 @@ pub fn getSelfDebugInfo() !*SelfInfo { /// Tries to print a hexadecimal view of the bytes, unbuffered, and ignores any error returned. /// Obtains the stderr mutex while dumping. pub fn dumpHex(bytes: []const u8) void { - lockStdErr(); - defer unlockStdErr(); - dumpHexFallible(bytes) catch {}; + const bw = lockStderrWriter(&.{}); + defer unlockStderrWriter(); + const ttyconf = std.io.tty.detectConfig(.stderr()); + dumpHexFallible(bw, ttyconf, bytes) catch {}; } -/// Prints a hexadecimal view of the bytes, unbuffered, returning any error that occurs. -pub fn dumpHexFallible(bytes: []const u8) !void { - const stderr = std.io.getStdErr(); - const ttyconf = std.io.tty.detectConfig(stderr); - const writer = stderr.writer(); - try dumpHexInternal(bytes, ttyconf, writer); -} - -fn dumpHexInternal(bytes: []const u8, ttyconf: std.io.tty.Config, writer: anytype) !void { +/// Prints a hexadecimal view of the bytes, returning any error that occurs. +pub fn dumpHexFallible(bw: *Writer, ttyconf: std.io.tty.Config, bytes: []const u8) !void { var chunks = mem.window(u8, bytes, 16, 16); while (chunks.next()) |window| { // 1. Print the address. const address = (@intFromPtr(bytes.ptr) + 0x10 * (std.math.divCeil(usize, chunks.index orelse bytes.len, 16) catch unreachable)) - 0x10; - try ttyconf.setColor(writer, .dim); + try ttyconf.setColor(bw, .dim); // We print the address in lowercase and the bytes in uppercase hexadecimal to distinguish them more. // Also, make sure all lines are aligned by padding the address. - try writer.print("{x:0>[1]} ", .{ address, @sizeOf(usize) * 2 }); - try ttyconf.setColor(writer, .reset); + try bw.print("{x:0>[1]} ", .{ address, @sizeOf(usize) * 2 }); + try ttyconf.setColor(bw, .reset); // 2. Print the bytes. for (window, 0..) |byte, index| { - try writer.print("{X:0>2} ", .{byte}); - if (index == 7) try writer.writeByte(' '); + try bw.print("{X:0>2} ", .{byte}); + if (index == 7) try bw.writeByte(' '); } - try writer.writeByte(' '); + try bw.writeByte(' '); if (window.len < 16) { var missing_columns = (16 - window.len) * 3; if (window.len < 8) missing_columns += 1; - try writer.writeByteNTimes(' ', missing_columns); + try bw.splatByteAll(' ', missing_columns); } // 3. Print the characters. for (window) |byte| { if (std.ascii.isPrint(byte)) { - try writer.writeByte(byte); + try bw.writeByte(byte); } else { // Related: https://github.com/ziglang/zig/issues/7600 if (ttyconf == .windows_api) { - try writer.writeByte('.'); + try bw.writeByte('.'); continue; } @@ -283,22 +291,23 @@ fn dumpHexInternal(bytes: []const u8, ttyconf: std.io.tty.Config, writer: anytyp // We don't want to do this for all control codes because most control codes apart from // the ones that Zig has escape sequences for are likely not very useful to print as symbols. switch (byte) { - '\n' => try writer.writeAll("␊"), - '\r' => try writer.writeAll("␍"), - '\t' => try writer.writeAll("␉"), - else => try writer.writeByte('.'), + '\n' => try bw.writeAll("␊"), + '\r' => try bw.writeAll("␍"), + '\t' => try bw.writeAll("␉"), + else => try bw.writeByte('.'), } } } - try writer.writeByte('\n'); + try bw.writeByte('\n'); } } -test dumpHexInternal { +test dumpHexFallible { const bytes: []const u8 = &.{ 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, 0x01, 0x12, 0x13 }; - var output = std.ArrayList(u8).init(std.testing.allocator); - defer output.deinit(); - try dumpHexInternal(bytes, .no_color, output.writer()); + var aw: std.io.Writer.Allocating = .init(std.testing.allocator); + defer aw.deinit(); + + try dumpHexFallible(&aw.writer, .no_color, bytes); const expected = try std.fmt.allocPrint(std.testing.allocator, \\{x:0>[2]} 00 11 22 33 44 55 66 77 88 99 AA BB CC DD EE FF .."3DUfw........ \\{x:0>[2]} 01 12 13 ... @@ -309,34 +318,36 @@ test dumpHexInternal { @sizeOf(usize) * 2, }); defer std.testing.allocator.free(expected); - try std.testing.expectEqualStrings(expected, output.items); + try std.testing.expectEqualStrings(expected, aw.getWritten()); } /// Tries to print the current stack trace to stderr, unbuffered, and ignores any error returned. -/// TODO multithreaded awareness pub fn dumpCurrentStackTrace(start_addr: ?usize) void { - nosuspend { - if (builtin.target.cpu.arch.isWasm()) { - if (native_os == .wasi) { - const stderr = io.getStdErr().writer(); - stderr.print("Unable to dump stack trace: not implemented for Wasm\n", .{}) catch return; - } - return; + const stderr = lockStderrWriter(&.{}); + defer unlockStderrWriter(); + nosuspend dumpCurrentStackTraceToWriter(start_addr, stderr) catch return; +} + +/// Prints the current stack trace to the provided writer. +pub fn dumpCurrentStackTraceToWriter(start_addr: ?usize, writer: *Writer) !void { + if (builtin.target.cpu.arch.isWasm()) { + if (native_os == .wasi) { + try writer.writeAll("Unable to dump stack trace: not implemented for Wasm\n"); } - const stderr = io.getStdErr().writer(); - if (builtin.strip_debug_info) { - stderr.print("Unable to dump stack trace: debug info stripped\n", .{}) catch return; - return; - } - const debug_info = getSelfDebugInfo() catch |err| { - stderr.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}) catch return; - return; - }; - writeCurrentStackTrace(stderr, debug_info, io.tty.detectConfig(io.getStdErr()), start_addr) catch |err| { - stderr.print("Unable to dump stack trace: {s}\n", .{@errorName(err)}) catch return; - return; - }; + return; } + if (builtin.strip_debug_info) { + try writer.writeAll("Unable to dump stack trace: debug info stripped\n"); + return; + } + const debug_info = getSelfDebugInfo() catch |err| { + try writer.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}); + return; + }; + writeCurrentStackTrace(writer, debug_info, io.tty.detectConfig(.stderr()), start_addr) catch |err| { + try writer.print("Unable to dump stack trace: {s}\n", .{@errorName(err)}); + return; + }; } pub const have_ucontext = posix.ucontext_t != void; @@ -402,16 +413,14 @@ pub inline fn getContext(context: *ThreadContext) bool { /// Tries to print the stack trace starting from the supplied base pointer to stderr, /// unbuffered, and ignores any error returned. /// TODO multithreaded awareness -pub fn dumpStackTraceFromBase(context: *ThreadContext) void { +pub fn dumpStackTraceFromBase(context: *ThreadContext, stderr: *Writer) void { nosuspend { if (builtin.target.cpu.arch.isWasm()) { if (native_os == .wasi) { - const stderr = io.getStdErr().writer(); stderr.print("Unable to dump stack trace: not implemented for Wasm\n", .{}) catch return; } return; } - const stderr = io.getStdErr().writer(); if (builtin.strip_debug_info) { stderr.print("Unable to dump stack trace: debug info stripped\n", .{}) catch return; return; @@ -420,7 +429,7 @@ pub fn dumpStackTraceFromBase(context: *ThreadContext) void { stderr.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}) catch return; return; }; - const tty_config = io.tty.detectConfig(io.getStdErr()); + const tty_config = io.tty.detectConfig(.stderr()); if (native_os == .windows) { // On x86_64 and aarch64, the stack will be unwound using RtlVirtualUnwind using the context // provided by the exception handler. On x86, RtlVirtualUnwind doesn't exist. Instead, a new backtrace @@ -510,21 +519,23 @@ pub fn dumpStackTrace(stack_trace: std.builtin.StackTrace) void { nosuspend { if (builtin.target.cpu.arch.isWasm()) { if (native_os == .wasi) { - const stderr = io.getStdErr().writer(); - stderr.print("Unable to dump stack trace: not implemented for Wasm\n", .{}) catch return; + const stderr = lockStderrWriter(&.{}); + defer unlockStderrWriter(); + stderr.writeAll("Unable to dump stack trace: not implemented for Wasm\n") catch return; } return; } - const stderr = io.getStdErr().writer(); + const stderr = lockStderrWriter(&.{}); + defer unlockStderrWriter(); if (builtin.strip_debug_info) { - stderr.print("Unable to dump stack trace: debug info stripped\n", .{}) catch return; + stderr.writeAll("Unable to dump stack trace: debug info stripped\n") catch return; return; } const debug_info = getSelfDebugInfo() catch |err| { stderr.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}) catch return; return; }; - writeStackTrace(stack_trace, stderr, debug_info, io.tty.detectConfig(io.getStdErr())) catch |err| { + writeStackTrace(stack_trace, stderr, debug_info, io.tty.detectConfig(.stderr())) catch |err| { stderr.print("Unable to dump stack trace: {s}\n", .{@errorName(err)}) catch return; return; }; @@ -573,14 +584,13 @@ pub fn panicExtra( const size = 0x1000; const trunc_msg = "(msg truncated)"; var buf: [size + trunc_msg.len]u8 = undefined; + var bw: Writer = .fixed(buf[0..size]); // a minor annoyance with this is that it will result in the NoSpaceLeft // error being part of the @panic stack trace (but that error should // only happen rarely) - const msg = std.fmt.bufPrint(buf[0..size], format, args) catch |err| switch (err) { - error.NoSpaceLeft => blk: { - @memcpy(buf[size..], trunc_msg); - break :blk &buf; - }, + const msg = if (bw.print(format, args)) |_| bw.buffered() else |_| blk: { + @memcpy(buf[size..], trunc_msg); + break :blk &buf; }; std.builtin.panic.call(msg, ret_addr); } @@ -675,10 +685,9 @@ pub fn defaultPanic( _ = panicking.fetchAdd(1, .seq_cst); { - lockStdErr(); - defer unlockStdErr(); + const stderr = lockStderrWriter(&.{}); + defer unlockStderrWriter(); - const stderr = io.getStdErr().writer(); if (builtin.single_threaded) { stderr.print("panic: ", .{}) catch posix.abort(); } else { @@ -688,7 +697,7 @@ pub fn defaultPanic( stderr.print("{s}\n", .{msg}) catch posix.abort(); if (@errorReturnTrace()) |t| dumpStackTrace(t.*); - dumpCurrentStackTrace(first_trace_addr orelse @returnAddress()); + dumpCurrentStackTraceToWriter(first_trace_addr orelse @returnAddress(), stderr) catch {}; } waitForOtherThreadToFinishPanicking(); @@ -699,7 +708,7 @@ pub fn defaultPanic( // A panic happened while trying to print a previous panic message. // We're still holding the mutex but that's fine as we're going to // call abort(). - io.getStdErr().writeAll("aborting due to recursive panic\n") catch {}; + fs.File.stderr().writeAll("aborting due to recursive panic\n") catch {}; }, else => {}, // Panicked while printing the recursive panic message. }; @@ -723,7 +732,7 @@ fn waitForOtherThreadToFinishPanicking() void { pub fn writeStackTrace( stack_trace: std.builtin.StackTrace, - out_stream: anytype, + writer: *Writer, debug_info: *SelfInfo, tty_config: io.tty.Config, ) !void { @@ -736,15 +745,15 @@ pub fn writeStackTrace( frame_index = (frame_index + 1) % stack_trace.instruction_addresses.len; }) { const return_address = stack_trace.instruction_addresses[frame_index]; - try printSourceAtAddress(debug_info, out_stream, return_address - 1, tty_config); + try printSourceAtAddress(debug_info, writer, return_address - 1, tty_config); } if (stack_trace.index > stack_trace.instruction_addresses.len) { const dropped_frames = stack_trace.index - stack_trace.instruction_addresses.len; - tty_config.setColor(out_stream, .bold) catch {}; - try out_stream.print("({d} additional stack frames skipped...)\n", .{dropped_frames}); - tty_config.setColor(out_stream, .reset) catch {}; + tty_config.setColor(writer, .bold) catch {}; + try writer.print("({d} additional stack frames skipped...)\n", .{dropped_frames}); + tty_config.setColor(writer, .reset) catch {}; } } @@ -954,7 +963,7 @@ pub const StackIterator = struct { }; pub fn writeCurrentStackTrace( - out_stream: anytype, + writer: *Writer, debug_info: *SelfInfo, tty_config: io.tty.Config, start_addr: ?usize, @@ -962,7 +971,7 @@ pub fn writeCurrentStackTrace( if (native_os == .windows) { var context: ThreadContext = undefined; assert(getContext(&context)); - return writeStackTraceWindows(out_stream, debug_info, tty_config, &context, start_addr); + return writeStackTraceWindows(writer, debug_info, tty_config, &context, start_addr); } var context: ThreadContext = undefined; const has_context = getContext(&context); @@ -973,7 +982,7 @@ pub fn writeCurrentStackTrace( defer it.deinit(); while (it.next()) |return_address| { - printLastUnwindError(&it, debug_info, out_stream, tty_config); + printLastUnwindError(&it, debug_info, writer, tty_config); // On arm64 macOS, the address of the last frame is 0x0 rather than 0x1 as on x86_64 macOS, // therefore, we do a check for `return_address == 0` before subtracting 1 from it to avoid @@ -981,8 +990,8 @@ pub fn writeCurrentStackTrace( // condition on the subsequent iteration and return `null` thus terminating the loop. // same behaviour for x86-windows-msvc const address = return_address -| 1; - try printSourceAtAddress(debug_info, out_stream, address, tty_config); - } else printLastUnwindError(&it, debug_info, out_stream, tty_config); + try printSourceAtAddress(debug_info, writer, address, tty_config); + } else printLastUnwindError(&it, debug_info, writer, tty_config); } pub noinline fn walkStackWindows(addresses: []usize, existing_context: ?*const windows.CONTEXT) usize { @@ -1042,7 +1051,7 @@ pub noinline fn walkStackWindows(addresses: []usize, existing_context: ?*const w } pub fn writeStackTraceWindows( - out_stream: anytype, + writer: *Writer, debug_info: *SelfInfo, tty_config: io.tty.Config, context: *const windows.CONTEXT, @@ -1058,14 +1067,14 @@ pub fn writeStackTraceWindows( return; } else 0; for (addrs[start_i..]) |addr| { - try printSourceAtAddress(debug_info, out_stream, addr - 1, tty_config); + try printSourceAtAddress(debug_info, writer, addr - 1, tty_config); } } -fn printUnknownSource(debug_info: *SelfInfo, out_stream: anytype, address: usize, tty_config: io.tty.Config) !void { +fn printUnknownSource(debug_info: *SelfInfo, writer: *Writer, address: usize, tty_config: io.tty.Config) !void { const module_name = debug_info.getModuleNameForAddress(address); return printLineInfo( - out_stream, + writer, null, address, "???", @@ -1075,38 +1084,38 @@ fn printUnknownSource(debug_info: *SelfInfo, out_stream: anytype, address: usize ); } -fn printLastUnwindError(it: *StackIterator, debug_info: *SelfInfo, out_stream: anytype, tty_config: io.tty.Config) void { +fn printLastUnwindError(it: *StackIterator, debug_info: *SelfInfo, writer: *Writer, tty_config: io.tty.Config) void { if (!have_ucontext) return; if (it.getLastError()) |unwind_error| { - printUnwindError(debug_info, out_stream, unwind_error.address, unwind_error.err, tty_config) catch {}; + printUnwindError(debug_info, writer, unwind_error.address, unwind_error.err, tty_config) catch {}; } } -fn printUnwindError(debug_info: *SelfInfo, out_stream: anytype, address: usize, err: UnwindError, tty_config: io.tty.Config) !void { +fn printUnwindError(debug_info: *SelfInfo, writer: *Writer, address: usize, err: UnwindError, tty_config: io.tty.Config) !void { const module_name = debug_info.getModuleNameForAddress(address) orelse "???"; - try tty_config.setColor(out_stream, .dim); + try tty_config.setColor(writer, .dim); if (err == error.MissingDebugInfo) { - try out_stream.print("Unwind information for `{s}:0x{x}` was not available, trace may be incomplete\n\n", .{ module_name, address }); + try writer.print("Unwind information for `{s}:0x{x}` was not available, trace may be incomplete\n\n", .{ module_name, address }); } else { - try out_stream.print("Unwind error at address `{s}:0x{x}` ({}), trace may be incomplete\n\n", .{ module_name, address, err }); + try writer.print("Unwind error at address `{s}:0x{x}` ({}), trace may be incomplete\n\n", .{ module_name, address, err }); } - try tty_config.setColor(out_stream, .reset); + try tty_config.setColor(writer, .reset); } -pub fn printSourceAtAddress(debug_info: *SelfInfo, out_stream: anytype, address: usize, tty_config: io.tty.Config) !void { +pub fn printSourceAtAddress(debug_info: *SelfInfo, writer: *Writer, address: usize, tty_config: io.tty.Config) !void { const module = debug_info.getModuleForAddress(address) catch |err| switch (err) { - error.MissingDebugInfo, error.InvalidDebugInfo => return printUnknownSource(debug_info, out_stream, address, tty_config), + error.MissingDebugInfo, error.InvalidDebugInfo => return printUnknownSource(debug_info, writer, address, tty_config), else => return err, }; const symbol_info = module.getSymbolAtAddress(debug_info.allocator, address) catch |err| switch (err) { - error.MissingDebugInfo, error.InvalidDebugInfo => return printUnknownSource(debug_info, out_stream, address, tty_config), + error.MissingDebugInfo, error.InvalidDebugInfo => return printUnknownSource(debug_info, writer, address, tty_config), else => return err, }; defer if (symbol_info.source_location) |sl| debug_info.allocator.free(sl.file_name); return printLineInfo( - out_stream, + writer, symbol_info.source_location, address, symbol_info.name, @@ -1117,7 +1126,7 @@ pub fn printSourceAtAddress(debug_info: *SelfInfo, out_stream: anytype, address: } fn printLineInfo( - out_stream: anytype, + writer: *Writer, source_location: ?SourceLocation, address: usize, symbol_name: []const u8, @@ -1126,34 +1135,34 @@ fn printLineInfo( comptime printLineFromFile: anytype, ) !void { nosuspend { - try tty_config.setColor(out_stream, .bold); + try tty_config.setColor(writer, .bold); if (source_location) |*sl| { - try out_stream.print("{s}:{d}:{d}", .{ sl.file_name, sl.line, sl.column }); + try writer.print("{s}:{d}:{d}", .{ sl.file_name, sl.line, sl.column }); } else { - try out_stream.writeAll("???:?:?"); + try writer.writeAll("???:?:?"); } - try tty_config.setColor(out_stream, .reset); - try out_stream.writeAll(": "); - try tty_config.setColor(out_stream, .dim); - try out_stream.print("0x{x} in {s} ({s})", .{ address, symbol_name, compile_unit_name }); - try tty_config.setColor(out_stream, .reset); - try out_stream.writeAll("\n"); + try tty_config.setColor(writer, .reset); + try writer.writeAll(": "); + try tty_config.setColor(writer, .dim); + try writer.print("0x{x} in {s} ({s})", .{ address, symbol_name, compile_unit_name }); + try tty_config.setColor(writer, .reset); + try writer.writeAll("\n"); // Show the matching source code line if possible if (source_location) |sl| { - if (printLineFromFile(out_stream, sl)) { + if (printLineFromFile(writer, sl)) { if (sl.column > 0) { // The caret already takes one char const space_needed = @as(usize, @intCast(sl.column - 1)); - try out_stream.writeByteNTimes(' ', space_needed); - try tty_config.setColor(out_stream, .green); - try out_stream.writeAll("^"); - try tty_config.setColor(out_stream, .reset); + try writer.splatByteAll(' ', space_needed); + try tty_config.setColor(writer, .green); + try writer.writeAll("^"); + try tty_config.setColor(writer, .reset); } - try out_stream.writeAll("\n"); + try writer.writeAll("\n"); } else |err| switch (err) { error.EndOfFile, error.FileNotFound => {}, error.BadPathName => {}, @@ -1164,7 +1173,7 @@ fn printLineInfo( } } -fn printLineFromFileAnyOs(out_stream: anytype, source_location: SourceLocation) !void { +fn printLineFromFileAnyOs(writer: *Writer, source_location: SourceLocation) !void { // Need this to always block even in async I/O mode, because this could potentially // be called from e.g. the event loop code crashing. var f = try fs.cwd().openFile(source_location.file_name, .{}); @@ -1197,31 +1206,31 @@ fn printLineFromFileAnyOs(out_stream: anytype, source_location: SourceLocation) if (mem.indexOfScalar(u8, slice, '\n')) |pos| { const line = slice[0 .. pos + 1]; mem.replaceScalar(u8, line, '\t', ' '); - return out_stream.writeAll(line); + return writer.writeAll(line); } else { // Line is the last inside the buffer, and requires another read to find delimiter. Alternatively the file ends. mem.replaceScalar(u8, slice, '\t', ' '); - try out_stream.writeAll(slice); + try writer.writeAll(slice); while (amt_read == buf.len) { amt_read = try f.read(buf[0..]); if (mem.indexOfScalar(u8, buf[0..amt_read], '\n')) |pos| { const line = buf[0 .. pos + 1]; mem.replaceScalar(u8, line, '\t', ' '); - return out_stream.writeAll(line); + return writer.writeAll(line); } else { const line = buf[0..amt_read]; mem.replaceScalar(u8, line, '\t', ' '); - try out_stream.writeAll(line); + try writer.writeAll(line); } } // Make sure printing last line of file inserts extra newline - try out_stream.writeByte('\n'); + try writer.writeByte('\n'); } } test printLineFromFileAnyOs { - var output = std.ArrayList(u8).init(std.testing.allocator); - defer output.deinit(); - const output_stream = output.writer(); + var aw: Writer.Allocating = .init(std.testing.allocator); + defer aw.deinit(); + const output_stream = &aw.writer; const allocator = std.testing.allocator; const join = std.fs.path.join; @@ -1243,8 +1252,8 @@ test printLineFromFileAnyOs { try expectError(error.EndOfFile, printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 2, .column = 0 })); try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 1, .column = 0 }); - try expectEqualStrings("no new lines in this file, but one is printed anyway\n", output.items); - output.clearRetainingCapacity(); + try expectEqualStrings("no new lines in this file, but one is printed anyway\n", aw.getWritten()); + aw.clearRetainingCapacity(); } { const path = try fs.path.join(allocator, &.{ test_dir_path, "three_lines.zig" }); @@ -1259,12 +1268,12 @@ test printLineFromFileAnyOs { }); try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 1, .column = 0 }); - try expectEqualStrings("1\n", output.items); - output.clearRetainingCapacity(); + try expectEqualStrings("1\n", aw.getWritten()); + aw.clearRetainingCapacity(); try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 3, .column = 0 }); - try expectEqualStrings("3\n", output.items); - output.clearRetainingCapacity(); + try expectEqualStrings("3\n", aw.getWritten()); + aw.clearRetainingCapacity(); } { const file = try test_dir.dir.createFile("line_overlaps_page_boundary.zig", .{}); @@ -1273,14 +1282,17 @@ test printLineFromFileAnyOs { defer allocator.free(path); const overlap = 10; - var writer = file.writer(); - try writer.writeByteNTimes('a', std.heap.page_size_min - overlap); + var buf: [16]u8 = undefined; + var file_writer = file.writer(&buf); + const writer = &file_writer.interface; + try writer.splatByteAll('a', std.heap.page_size_min - overlap); try writer.writeByte('\n'); - try writer.writeByteNTimes('a', overlap); + try writer.splatByteAll('a', overlap); + try writer.flush(); try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 2, .column = 0 }); - try expectEqualStrings(("a" ** overlap) ++ "\n", output.items); - output.clearRetainingCapacity(); + try expectEqualStrings(("a" ** overlap) ++ "\n", aw.getWritten()); + aw.clearRetainingCapacity(); } { const file = try test_dir.dir.createFile("file_ends_on_page_boundary.zig", .{}); @@ -1288,12 +1300,13 @@ test printLineFromFileAnyOs { const path = try fs.path.join(allocator, &.{ test_dir_path, "file_ends_on_page_boundary.zig" }); defer allocator.free(path); - var writer = file.writer(); - try writer.writeByteNTimes('a', std.heap.page_size_max); + var file_writer = file.writer(&.{}); + const writer = &file_writer.interface; + try writer.splatByteAll('a', std.heap.page_size_max); try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 1, .column = 0 }); - try expectEqualStrings(("a" ** std.heap.page_size_max) ++ "\n", output.items); - output.clearRetainingCapacity(); + try expectEqualStrings(("a" ** std.heap.page_size_max) ++ "\n", aw.getWritten()); + aw.clearRetainingCapacity(); } { const file = try test_dir.dir.createFile("very_long_first_line_spanning_multiple_pages.zig", .{}); @@ -1301,24 +1314,25 @@ test printLineFromFileAnyOs { const path = try fs.path.join(allocator, &.{ test_dir_path, "very_long_first_line_spanning_multiple_pages.zig" }); defer allocator.free(path); - var writer = file.writer(); - try writer.writeByteNTimes('a', 3 * std.heap.page_size_max); + var file_writer = file.writer(&.{}); + const writer = &file_writer.interface; + try writer.splatByteAll('a', 3 * std.heap.page_size_max); try expectError(error.EndOfFile, printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 2, .column = 0 })); try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 1, .column = 0 }); - try expectEqualStrings(("a" ** (3 * std.heap.page_size_max)) ++ "\n", output.items); - output.clearRetainingCapacity(); + try expectEqualStrings(("a" ** (3 * std.heap.page_size_max)) ++ "\n", aw.getWritten()); + aw.clearRetainingCapacity(); try writer.writeAll("a\na"); try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 1, .column = 0 }); - try expectEqualStrings(("a" ** (3 * std.heap.page_size_max)) ++ "a\n", output.items); - output.clearRetainingCapacity(); + try expectEqualStrings(("a" ** (3 * std.heap.page_size_max)) ++ "a\n", aw.getWritten()); + aw.clearRetainingCapacity(); try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 2, .column = 0 }); - try expectEqualStrings("a\n", output.items); - output.clearRetainingCapacity(); + try expectEqualStrings("a\n", aw.getWritten()); + aw.clearRetainingCapacity(); } { const file = try test_dir.dir.createFile("file_of_newlines.zig", .{}); @@ -1326,18 +1340,19 @@ test printLineFromFileAnyOs { const path = try fs.path.join(allocator, &.{ test_dir_path, "file_of_newlines.zig" }); defer allocator.free(path); - var writer = file.writer(); + var file_writer = file.writer(&.{}); + const writer = &file_writer.interface; const real_file_start = 3 * std.heap.page_size_min; - try writer.writeByteNTimes('\n', real_file_start); + try writer.splatByteAll('\n', real_file_start); try writer.writeAll("abc\ndef"); try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = real_file_start + 1, .column = 0 }); - try expectEqualStrings("abc\n", output.items); - output.clearRetainingCapacity(); + try expectEqualStrings("abc\n", aw.getWritten()); + aw.clearRetainingCapacity(); try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = real_file_start + 2, .column = 0 }); - try expectEqualStrings("def\n", output.items); - output.clearRetainingCapacity(); + try expectEqualStrings("def\n", aw.getWritten()); + aw.clearRetainingCapacity(); } } @@ -1461,7 +1476,8 @@ fn handleSegfaultPosix(sig: i32, info: *const posix.siginfo_t, ctx_ptr: ?*anyopa } fn dumpSegfaultInfoPosix(sig: i32, code: i32, addr: usize, ctx_ptr: ?*anyopaque) void { - const stderr = io.getStdErr().writer(); + const stderr = lockStderrWriter(&.{}); + defer unlockStderrWriter(); _ = switch (sig) { posix.SIG.SEGV => if (native_arch == .x86_64 and native_os == .linux and code == 128) // SI_KERNEL // x86_64 doesn't have a full 64-bit virtual address space. @@ -1471,7 +1487,7 @@ fn dumpSegfaultInfoPosix(sig: i32, code: i32, addr: usize, ctx_ptr: ?*anyopaque) // but can also happen when no addressable memory is involved; // for example when reading/writing model-specific registers // by executing `rdmsr` or `wrmsr` in user-space (unprivileged mode). - stderr.print("General protection exception (no address available)\n", .{}) + stderr.writeAll("General protection exception (no address available)\n") else stderr.print("Segmentation fault at address 0x{x}\n", .{addr}), posix.SIG.ILL => stderr.print("Illegal instruction at address 0x{x}\n", .{addr}), @@ -1509,7 +1525,7 @@ fn dumpSegfaultInfoPosix(sig: i32, code: i32, addr: usize, ctx_ptr: ?*anyopaque) }, @ptrCast(ctx)).__mcontext_data; } relocateContext(&new_ctx); - dumpStackTraceFromBase(&new_ctx); + dumpStackTraceFromBase(&new_ctx, stderr); }, else => {}, } @@ -1539,25 +1555,24 @@ fn handleSegfaultWindowsExtra(info: *windows.EXCEPTION_POINTERS, msg: u8, label: _ = panicking.fetchAdd(1, .seq_cst); { - lockStdErr(); - defer unlockStdErr(); + const stderr = lockStderrWriter(&.{}); + defer unlockStderrWriter(); - dumpSegfaultInfoWindows(info, msg, label); + dumpSegfaultInfoWindows(info, msg, label, stderr); } waitForOtherThreadToFinishPanicking(); }, 1 => { panic_stage = 2; - io.getStdErr().writeAll("aborting due to recursive panic\n") catch {}; + fs.File.stderr().writeAll("aborting due to recursive panic\n") catch {}; }, else => {}, }; posix.abort(); } -fn dumpSegfaultInfoWindows(info: *windows.EXCEPTION_POINTERS, msg: u8, label: ?[]const u8) void { - const stderr = io.getStdErr().writer(); +fn dumpSegfaultInfoWindows(info: *windows.EXCEPTION_POINTERS, msg: u8, label: ?[]const u8, stderr: *Writer) void { _ = switch (msg) { 0 => stderr.print("{s}\n", .{label.?}), 1 => stderr.print("Segmentation fault at address 0x{x}\n", .{info.ExceptionRecord.ExceptionInformation[1]}), @@ -1565,7 +1580,7 @@ fn dumpSegfaultInfoWindows(info: *windows.EXCEPTION_POINTERS, msg: u8, label: ?[ else => unreachable, } catch posix.abort(); - dumpStackTraceFromBase(info.ContextRecord); + dumpStackTraceFromBase(info.ContextRecord, stderr); } pub fn dumpStackPointerAddr(prefix: []const u8) void { @@ -1588,10 +1603,10 @@ test "manage resources correctly" { // self-hosted debug info is still too buggy if (builtin.zig_backend != .stage2_llvm) return error.SkipZigTest; - const writer = std.io.null_writer; + var discarding: std.io.Writer.Discarding = .init(&.{}); var di = try SelfInfo.open(testing.allocator); defer di.deinit(); - try printSourceAtAddress(&di, writer, showMyTrace(), io.tty.detectConfig(std.io.getStdErr())); + try printSourceAtAddress(&di, &discarding.writer, showMyTrace(), io.tty.detectConfig(.stderr())); } noinline fn showMyTrace() usize { @@ -1657,8 +1672,9 @@ pub fn ConfigurableTrace(comptime size: usize, comptime stack_frame_count: usize pub fn dump(t: @This()) void { if (!enabled) return; - const tty_config = io.tty.detectConfig(std.io.getStdErr()); - const stderr = io.getStdErr().writer(); + const tty_config = io.tty.detectConfig(.stderr()); + const stderr = lockStderrWriter(&.{}); + defer unlockStderrWriter(); const end = @min(t.index, size); const debug_info = getSelfDebugInfo() catch |err| { stderr.print( @@ -1688,7 +1704,7 @@ pub fn ConfigurableTrace(comptime size: usize, comptime stack_frame_count: usize t: @This(), comptime fmt: []const u8, options: std.fmt.FormatOptions, - writer: anytype, + writer: *Writer, ) !void { if (fmt.len != 0) std.fmt.invalidFmtError(fmt, t); _ = options; diff --git a/lib/std/debug/Dwarf.zig b/lib/std/debug/Dwarf.zig index 06b6c81075..3f1fc41feb 100644 --- a/lib/std/debug/Dwarf.zig +++ b/lib/std/debug/Dwarf.zig @@ -2302,11 +2302,7 @@ pub const ElfModule = struct { }; defer debuginfod_dir.close(); - const filename = std.fmt.allocPrint( - gpa, - "{s}/debuginfo", - .{std.fmt.fmtSliceHexLower(id)}, - ) catch break :blk; + const filename = std.fmt.allocPrint(gpa, "{x}/debuginfo", .{id}) catch break :blk; defer gpa.free(filename); const path: Path = .{ @@ -2330,12 +2326,8 @@ pub const ElfModule = struct { var id_prefix_buf: [2]u8 = undefined; var filename_buf: [38 + extension.len]u8 = undefined; - _ = std.fmt.bufPrint(&id_prefix_buf, "{s}", .{std.fmt.fmtSliceHexLower(id[0..1])}) catch unreachable; - const filename = std.fmt.bufPrint( - &filename_buf, - "{s}" ++ extension, - .{std.fmt.fmtSliceHexLower(id[1..])}, - ) catch break :blk; + _ = std.fmt.bufPrint(&id_prefix_buf, "{x}", .{id[0..1]}) catch unreachable; + const filename = std.fmt.bufPrint(&filename_buf, "{x}" ++ extension, .{id[1..]}) catch break :blk; for (global_debug_directories) |global_directory| { const path: Path = .{ diff --git a/lib/std/debug/Pdb.zig b/lib/std/debug/Pdb.zig index 3b1adb3c56..c8dccca497 100644 --- a/lib/std/debug/Pdb.zig +++ b/lib/std/debug/Pdb.zig @@ -395,7 +395,7 @@ const Msf = struct { streams: []MsfStream, fn init(allocator: Allocator, file: File) !Msf { - const in = file.reader(); + const in = file.deprecatedReader(); const superblock = try in.readStruct(pdb.SuperBlock); @@ -514,7 +514,7 @@ const MsfStream = struct { var offset = self.pos % self.block_size; try self.in_file.seekTo(block * self.block_size + offset); - const in = self.in_file.reader(); + const in = self.in_file.deprecatedReader(); var size: usize = 0; var rem_buffer = buffer; @@ -562,7 +562,7 @@ const MsfStream = struct { return block * self.block_size + offset; } - pub fn reader(self: *MsfStream) std.io.Reader(*MsfStream, Error, read) { + pub fn reader(self: *MsfStream) std.io.GenericReader(*MsfStream, Error, read) { return .{ .context = self }; } }; diff --git a/lib/std/debug/simple_panic.zig b/lib/std/debug/simple_panic.zig index 95f7d679ed..45e97777c4 100644 --- a/lib/std/debug/simple_panic.zig +++ b/lib/std/debug/simple_panic.zig @@ -15,7 +15,7 @@ pub fn call(msg: []const u8, ra: ?usize) noreturn { @branchHint(.cold); _ = ra; std.debug.lockStdErr(); - const stderr = std.io.getStdErr(); + const stderr: std.fs.File = .stderr(); stderr.writeAll(msg) catch {}; @trap(); } diff --git a/lib/std/elf.zig b/lib/std/elf.zig index 023430d110..4e15cd3a09 100644 --- a/lib/std/elf.zig +++ b/lib/std/elf.zig @@ -511,7 +511,7 @@ pub const Header = struct { pub fn read(parse_source: anytype) !Header { var hdr_buf: [@sizeOf(Elf64_Ehdr)]u8 align(@alignOf(Elf64_Ehdr)) = undefined; try parse_source.seekableStream().seekTo(0); - try parse_source.reader().readNoEof(&hdr_buf); + try parse_source.deprecatedReader().readNoEof(&hdr_buf); return Header.parse(&hdr_buf); } @@ -586,7 +586,7 @@ pub fn ProgramHeaderIterator(comptime ParseSource: anytype) type { var phdr: Elf64_Phdr = undefined; const offset = self.elf_header.phoff + @sizeOf(@TypeOf(phdr)) * self.index; try self.parse_source.seekableStream().seekTo(offset); - try self.parse_source.reader().readNoEof(mem.asBytes(&phdr)); + try self.parse_source.deprecatedReader().readNoEof(mem.asBytes(&phdr)); // ELF endianness matches native endianness. if (self.elf_header.endian == native_endian) return phdr; @@ -599,7 +599,7 @@ pub fn ProgramHeaderIterator(comptime ParseSource: anytype) type { var phdr: Elf32_Phdr = undefined; const offset = self.elf_header.phoff + @sizeOf(@TypeOf(phdr)) * self.index; try self.parse_source.seekableStream().seekTo(offset); - try self.parse_source.reader().readNoEof(mem.asBytes(&phdr)); + try self.parse_source.deprecatedReader().readNoEof(mem.asBytes(&phdr)); // ELF endianness does NOT match native endianness. if (self.elf_header.endian != native_endian) { @@ -636,7 +636,7 @@ pub fn SectionHeaderIterator(comptime ParseSource: anytype) type { var shdr: Elf64_Shdr = undefined; const offset = self.elf_header.shoff + @sizeOf(@TypeOf(shdr)) * self.index; try self.parse_source.seekableStream().seekTo(offset); - try self.parse_source.reader().readNoEof(mem.asBytes(&shdr)); + try self.parse_source.deprecatedReader().readNoEof(mem.asBytes(&shdr)); // ELF endianness matches native endianness. if (self.elf_header.endian == native_endian) return shdr; @@ -649,7 +649,7 @@ pub fn SectionHeaderIterator(comptime ParseSource: anytype) type { var shdr: Elf32_Shdr = undefined; const offset = self.elf_header.shoff + @sizeOf(@TypeOf(shdr)) * self.index; try self.parse_source.seekableStream().seekTo(offset); - try self.parse_source.reader().readNoEof(mem.asBytes(&shdr)); + try self.parse_source.deprecatedReader().readNoEof(mem.asBytes(&shdr)); // ELF endianness does NOT match native endianness. if (self.elf_header.endian != native_endian) { diff --git a/lib/std/fifo.zig b/lib/std/fifo.zig index fa5357cd6c..e18b5edb01 100644 --- a/lib/std/fifo.zig +++ b/lib/std/fifo.zig @@ -38,8 +38,8 @@ pub fn LinearFifo( count: usize, const Self = @This(); - pub const Reader = std.io.Reader(*Self, error{}, readFn); - pub const Writer = std.io.Writer(*Self, error{OutOfMemory}, appendWrite); + pub const Reader = std.io.GenericReader(*Self, error{}, readFn); + pub const Writer = std.io.GenericWriter(*Self, error{OutOfMemory}, appendWrite); // Type of Self argument for slice operations. // If buffer is inline (Static) then we need to ensure we haven't @@ -231,7 +231,7 @@ pub fn LinearFifo( } /// Same as `read` except it returns an error union - /// The purpose of this function existing is to match `std.io.Reader` API. + /// The purpose of this function existing is to match `std.io.GenericReader` API. fn readFn(self: *Self, dest: []u8) error{}!usize { return self.read(dest); } @@ -320,7 +320,7 @@ pub fn LinearFifo( } /// Same as `write` except it returns the number of bytes written, which is always the same - /// as `bytes.len`. The purpose of this function existing is to match `std.io.Writer` API. + /// as `bytes.len`. The purpose of this function existing is to match `std.io.GenericWriter` API. fn appendWrite(self: *Self, bytes: []const u8) error{OutOfMemory}!usize { try self.write(bytes); return bytes.len; diff --git a/lib/std/fmt.zig b/lib/std/fmt.zig index efd5ffd3a2..0c51d56e30 100644 --- a/lib/std/fmt.zig +++ b/lib/std/fmt.zig @@ -1,17 +1,20 @@ //! String formatting and parsing. -const std = @import("std.zig"); const builtin = @import("builtin"); +const std = @import("std.zig"); const io = std.io; const math = std.math; const assert = std.debug.assert; const mem = std.mem; -const unicode = std.unicode; const meta = std.meta; const lossyCast = math.lossyCast; const expectFmt = std.testing.expectFmt; const testing = std.testing; +const Allocator = std.mem.Allocator; +const Writer = std.io.Writer; + +pub const float = @import("fmt/float.zig"); pub const default_max_depth = 3; @@ -21,237 +24,91 @@ pub const Alignment = enum { right, }; +pub const Case = enum { lower, upper }; + const default_alignment = .right; const default_fill_char = ' '; -pub const FormatOptions = struct { +/// Deprecated in favor of `Options`. +pub const FormatOptions = Options; + +pub const Options = struct { precision: ?usize = null, width: ?usize = null, alignment: Alignment = default_alignment, - fill: u21 = default_fill_char, + fill: u8 = default_fill_char, + + pub fn toNumber(o: Options, mode: Number.Mode, case: Case) Number { + return .{ + .mode = mode, + .case = case, + .precision = o.precision, + .width = o.width, + .alignment = o.alignment, + .fill = o.fill, + }; + } }; -/// Renders fmt string with args, calling `writer` with slices of bytes. -/// If `writer` returns an error, the error is returned from `format` and -/// `writer` is not called again. -/// -/// The format string must be comptime-known and may contain placeholders following -/// this format: -/// `{[argument][specifier]:[fill][alignment][width].[precision]}` -/// -/// Above, each word including its surrounding [ and ] is a parameter which you have to replace with something: -/// -/// - *argument* is either the numeric index or the field name of the argument that should be inserted -/// - when using a field name, you are required to enclose the field name (an identifier) in square -/// brackets, e.g. {[score]...} as opposed to the numeric index form which can be written e.g. {2...} -/// - *specifier* is a type-dependent formatting option that determines how a type should formatted (see below) -/// - *fill* is a single unicode codepoint which is used to pad the formatted text -/// - *alignment* is one of the three bytes '<', '^', or '>' to make the text left-, center-, or right-aligned, respectively -/// - *width* is the total width of the field in unicode codepoints -/// - *precision* specifies how many decimals a formatted number should have -/// -/// Note that most of the parameters are optional and may be omitted. Also you can leave out separators like `:` and `.` when -/// all parameters after the separator are omitted. -/// Only exception is the *fill* parameter. If a non-zero *fill* character is required at the same time as *width* is specified, -/// one has to specify *alignment* as well, as otherwise the digit following `:` is interpreted as *width*, not *fill*. -/// -/// The *specifier* has several options for types: -/// - `x` and `X`: output numeric value in hexadecimal notation -/// - `s`: -/// - for pointer-to-many and C pointers of u8, print as a C-string using zero-termination -/// - for slices of u8, print the entire slice as a string without zero-termination -/// - `e`: output floating point value in scientific notation -/// - `d`: output numeric value in decimal notation -/// - `b`: output integer value in binary notation -/// - `o`: output integer value in octal notation -/// - `c`: output integer as an ASCII character. Integer type must have 8 bits at max. -/// - `u`: output integer as an UTF-8 sequence. Integer type must have 21 bits at max. -/// - `?`: output optional value as either the unwrapped value, or `null`; may be followed by a format specifier for the underlying value. -/// - `!`: output error union value as either the unwrapped value, or the formatted error value; may be followed by a format specifier for the underlying value. -/// - `*`: output the address of the value instead of the value itself. -/// - `any`: output a value of any type using its default format. -/// -/// If a formatted user type contains a function of the type -/// ``` -/// pub fn format(value: ?, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void -/// ``` -/// with `?` being the type formatted, this function will be called instead of the default implementation. -/// This allows user types to be formatted in a logical manner instead of dumping all fields of the type. -/// -/// A user type may be a `struct`, `vector`, `union` or `enum` type. -/// -/// To print literal curly braces, escape them by writing them twice, e.g. `{{` or `}}`. -pub fn format( - writer: anytype, - comptime fmt: []const u8, - args: anytype, -) !void { - const ArgsType = @TypeOf(args); - const args_type_info = @typeInfo(ArgsType); - if (args_type_info != .@"struct") { - @compileError("expected tuple or struct argument, found " ++ @typeName(ArgsType)); - } +pub const Number = struct { + mode: Mode = .decimal, + /// Affects hex digits as well as floating point "inf"/"INF". + case: Case = .lower, + precision: ?usize = null, + width: ?usize = null, + alignment: Alignment = default_alignment, + fill: u8 = default_fill_char, - const fields_info = args_type_info.@"struct".fields; - if (fields_info.len > max_format_args) { - @compileError("32 arguments max are supported per format call"); - } + pub const Mode = enum { + decimal, + binary, + octal, + hex, + scientific, - @setEvalBranchQuota(2000000); - comptime var arg_state: ArgState = .{ .args_len = fields_info.len }; - comptime var i = 0; - comptime var literal: []const u8 = ""; - inline while (true) { - const start_index = i; - - inline while (i < fmt.len) : (i += 1) { - switch (fmt[i]) { - '{', '}' => break, - else => {}, - } + pub fn base(mode: Mode) ?u8 { + return switch (mode) { + .decimal => 10, + .binary => 2, + .octal => 8, + .hex => 16, + .scientific => null, + }; } + }; +}; - comptime var end_index = i; - comptime var unescape_brace = false; - - // Handle {{ and }}, those are un-escaped as single braces - if (i + 1 < fmt.len and fmt[i + 1] == fmt[i]) { - unescape_brace = true; - // Make the first brace part of the literal... - end_index += 1; - // ...and skip both - i += 2; - } - - literal = literal ++ fmt[start_index..end_index]; - - // We've already skipped the other brace, restart the loop - if (unescape_brace) continue; - - // Write out the literal - if (literal.len != 0) { - try writer.writeAll(literal); - literal = ""; - } - - if (i >= fmt.len) break; - - if (fmt[i] == '}') { - @compileError("missing opening {"); - } - - // Get past the { - comptime assert(fmt[i] == '{'); - i += 1; - - const fmt_begin = i; - // Find the closing brace - inline while (i < fmt.len and fmt[i] != '}') : (i += 1) {} - const fmt_end = i; - - if (i >= fmt.len) { - @compileError("missing closing }"); - } - - // Get past the } - comptime assert(fmt[i] == '}'); - i += 1; - - const placeholder = comptime Placeholder.parse(fmt[fmt_begin..fmt_end].*); - const arg_pos = comptime switch (placeholder.arg) { - .none => null, - .number => |pos| pos, - .named => |arg_name| meta.fieldIndex(ArgsType, arg_name) orelse - @compileError("no argument with name '" ++ arg_name ++ "'"), - }; - - const width = switch (placeholder.width) { - .none => null, - .number => |v| v, - .named => |arg_name| blk: { - const arg_i = comptime meta.fieldIndex(ArgsType, arg_name) orelse - @compileError("no argument with name '" ++ arg_name ++ "'"); - _ = comptime arg_state.nextArg(arg_i) orelse @compileError("too few arguments"); - break :blk @field(args, arg_name); - }, - }; - - const precision = switch (placeholder.precision) { - .none => null, - .number => |v| v, - .named => |arg_name| blk: { - const arg_i = comptime meta.fieldIndex(ArgsType, arg_name) orelse - @compileError("no argument with name '" ++ arg_name ++ "'"); - _ = comptime arg_state.nextArg(arg_i) orelse @compileError("too few arguments"); - break :blk @field(args, arg_name); - }, - }; - - const arg_to_print = comptime arg_state.nextArg(arg_pos) orelse - @compileError("too few arguments"); - - try formatType( - @field(args, fields_info[arg_to_print].name), - placeholder.specifier_arg, - FormatOptions{ - .fill = placeholder.fill, - .alignment = placeholder.alignment, - .width = width, - .precision = precision, - }, - writer, - std.options.fmt_max_depth, - ); - } - - if (comptime arg_state.hasUnusedArgs()) { - const missing_count = arg_state.args_len - @popCount(arg_state.used_args); - switch (missing_count) { - 0 => unreachable, - 1 => @compileError("unused argument in '" ++ fmt ++ "'"), - else => @compileError(comptimePrint("{d}", .{missing_count}) ++ " unused arguments in '" ++ fmt ++ "'"), - } - } -} - -fn cacheString(str: anytype) []const u8 { - return &str; +/// Deprecated in favor of `Writer.print`. +pub fn format(writer: anytype, comptime fmt: []const u8, args: anytype) !void { + var adapter = writer.adaptToNewApi(); + return adapter.new_interface.print(fmt, args) catch |err| switch (err) { + error.WriteFailed => return adapter.err.?, + }; } pub const Placeholder = struct { specifier_arg: []const u8, - fill: u21, + fill: u8, alignment: Alignment, arg: Specifier, width: Specifier, precision: Specifier, - pub fn parse(comptime str: anytype) Placeholder { - const view = std.unicode.Utf8View.initComptime(&str); - comptime var parser = Parser{ - .iter = view.iterator(), - }; - - // Parse the positional argument number - const arg = comptime parser.specifier() catch |err| - @compileError(@errorName(err)); - - // Parse the format specifier - const specifier_arg = comptime parser.until(':'); - - // Skip the colon, if present - if (comptime parser.char()) |ch| { - if (ch != ':') { - @compileError("expected : or }, found '" ++ unicode.utf8EncodeComptime(ch) ++ "'"); - } + pub fn parse(comptime bytes: []const u8) Placeholder { + var parser: Parser = .{ .bytes = bytes, .i = 0 }; + const arg = parser.specifier() catch |err| @compileError(@errorName(err)); + const specifier_arg = parser.until(':'); + if (parser.char()) |b| { + if (b != ':') @compileError("expected : or }, found '" ++ &[1]u8{b} ++ "'"); } - // Parse the fill character, if present. - // When the width field is also specified, the fill character must + // Parse the fill byte, if present. + // + // When the width field is also specified, the fill byte must // be followed by an alignment specifier, unless it's '0' (zero) - // (in which case it's handled as part of the width specifier) - var fill: ?u21 = comptime if (parser.peek(1)) |ch| - switch (ch) { + // (in which case it's handled as part of the width specifier). + var fill: ?u8 = if (parser.peek(1)) |b| + switch (b) { '<', '^', '>' => parser.char(), else => null, } @@ -259,8 +116,8 @@ pub const Placeholder = struct { null; // Parse the alignment parameter - const alignment: ?Alignment = comptime if (parser.peek(0)) |ch| init: { - switch (ch) { + const alignment: ?Alignment = if (parser.peek(0)) |b| init: { + switch (b) { '<', '^', '>' => { // consume the character break :init switch (parser.char().?) { @@ -276,30 +133,26 @@ pub const Placeholder = struct { // When none of the fill character and the alignment specifier have // been provided, check whether the width starts with a zero. if (fill == null and alignment == null) { - fill = comptime if (parser.peek(0) == '0') '0' else null; + fill = if (parser.peek(0) == '0') '0' else null; } // Parse the width parameter - const width = comptime parser.specifier() catch |err| - @compileError(@errorName(err)); + const width = parser.specifier() catch |err| @compileError(@errorName(err)); // Skip the dot, if present - if (comptime parser.char()) |ch| { - if (ch != '.') { - @compileError("expected . or }, found '" ++ unicode.utf8EncodeComptime(ch) ++ "'"); - } + if (parser.char()) |b| { + if (b != '.') @compileError("expected . or }, found '" ++ &[1]u8{b} ++ "'"); } // Parse the precision parameter - const precision = comptime parser.specifier() catch |err| - @compileError(@errorName(err)); + const precision = parser.specifier() catch |err| @compileError(@errorName(err)); - if (comptime parser.char()) |ch| { - @compileError("extraneous trailing character '" ++ unicode.utf8EncodeComptime(ch) ++ "'"); - } + if (parser.char()) |b| @compileError("extraneous trailing character '" ++ &[1]u8{b} ++ "'"); - return Placeholder{ - .specifier_arg = cacheString(specifier_arg[0..specifier_arg.len].*), + const specifier_array = specifier_arg[0..specifier_arg.len].*; + + return .{ + .specifier_arg = &specifier_array, .fill = fill orelse default_fill_char, .alignment = alignment orelse default_alignment, .arg = arg, @@ -320,93 +173,64 @@ pub const Specifier = union(enum) { /// Allows to implement formatters compatible with std.fmt without replicating /// the standard library behavior. pub const Parser = struct { - iter: std.unicode.Utf8Iterator, + bytes: []const u8, + i: usize, - // Returns a decimal number or null if the current character is not a - // digit pub fn number(self: *@This()) ?usize { var r: ?usize = null; - - while (self.peek(0)) |code_point| { - switch (code_point) { + while (self.peek(0)) |byte| { + switch (byte) { '0'...'9' => { if (r == null) r = 0; r.? *= 10; - r.? += code_point - '0'; + r.? += byte - '0'; }, else => break, } - _ = self.iter.nextCodepoint(); + self.i += 1; } - return r; } - // Returns a substring of the input starting from the current position - // and ending where `ch` is found or until the end if not found - pub fn until(self: *@This(), ch: u21) []const u8 { - const start = self.iter.i; - while (self.peek(0)) |code_point| { - if (code_point == ch) - break; - _ = self.iter.nextCodepoint(); - } - return self.iter.bytes[start..self.iter.i]; + pub fn until(self: *@This(), delimiter: u8) []const u8 { + const start = self.i; + self.i = std.mem.indexOfScalarPos(u8, self.bytes, self.i, delimiter) orelse self.bytes.len; + return self.bytes[start..self.i]; } - // Returns the character pointed to by the iterator if available, or - // null otherwise - pub fn char(self: *@This()) ?u21 { - if (self.iter.nextCodepoint()) |code_point| { - return code_point; - } - return null; + pub fn char(self: *@This()) ?u8 { + const i = self.i; + if (self.bytes.len - i == 0) return null; + self.i = i + 1; + return self.bytes[i]; } - // Returns true if the iterator points to an existing character and - // false otherwise - pub fn maybe(self: *@This(), val: u21) bool { - if (self.peek(0) == val) { - _ = self.iter.nextCodepoint(); + pub fn maybe(self: *@This(), byte: u8) bool { + if (self.peek(0) == byte) { + self.i += 1; return true; } return false; } - // Returns a decimal number or null if the current character is not a - // digit pub fn specifier(self: *@This()) !Specifier { if (self.maybe('[')) { const arg_name = self.until(']'); - - if (!self.maybe(']')) - return @field(anyerror, "Expected closing ]"); - - return Specifier{ .named = arg_name }; + if (!self.maybe(']')) return error.@"Expected closing ]"; + return .{ .named = arg_name }; } - if (self.number()) |i| - return Specifier{ .number = i }; - - return Specifier{ .none = {} }; + if (self.number()) |i| return .{ .number = i }; + return .{ .none = {} }; } - // Returns the n-th next character or null if that's past the end - pub fn peek(self: *@This(), n: usize) ?u21 { - const original_i = self.iter.i; - defer self.iter.i = original_i; - - var i: usize = 0; - var code_point: ?u21 = null; - while (i <= n) : (i += 1) { - code_point = self.iter.nextCodepoint(); - if (code_point == null) return null; - } - return code_point; + pub fn peek(self: *@This(), i: usize) ?u8 { + const peek_index = self.i + i; + if (peek_index >= self.bytes.len) return null; + return self.bytes[peek_index]; } }; pub const ArgSetType = u32; -const max_format_args = @typeInfo(ArgSetType).int.bits; pub const ArgState = struct { next_arg: usize = 0, @@ -434,822 +258,12 @@ pub const ArgState = struct { } }; -pub fn formatAddress(value: anytype, options: FormatOptions, writer: anytype) @TypeOf(writer).Error!void { - _ = options; - const T = @TypeOf(value); - - switch (@typeInfo(T)) { - .pointer => |info| { - try writer.writeAll(@typeName(info.child) ++ "@"); - if (info.size == .slice) - try formatInt(@intFromPtr(value.ptr), 16, .lower, FormatOptions{}, writer) - else - try formatInt(@intFromPtr(value), 16, .lower, FormatOptions{}, writer); - return; - }, - .optional => |info| { - if (@typeInfo(info.child) == .pointer) { - try writer.writeAll(@typeName(info.child) ++ "@"); - try formatInt(@intFromPtr(value), 16, .lower, FormatOptions{}, writer); - return; - } - }, - else => {}, - } - - @compileError("cannot format non-pointer type " ++ @typeName(T) ++ " with * specifier"); -} - -// This ANY const is a workaround for: https://github.com/ziglang/zig/issues/7948 -const ANY = "any"; - -pub fn defaultSpec(comptime T: type) [:0]const u8 { - switch (@typeInfo(T)) { - .array, .vector => return ANY, - .pointer => |ptr_info| switch (ptr_info.size) { - .one => switch (@typeInfo(ptr_info.child)) { - .array => return ANY, - else => {}, - }, - .many, .c => return "*", - .slice => return ANY, - }, - .optional => |info| return "?" ++ defaultSpec(info.child), - .error_union => |info| return "!" ++ defaultSpec(info.payload), - else => {}, - } - return ""; -} - -fn stripOptionalOrErrorUnionSpec(comptime fmt: []const u8) []const u8 { - return if (std.mem.eql(u8, fmt[1..], ANY)) - ANY - else - fmt[1..]; -} - -pub fn invalidFmtError(comptime fmt: []const u8, value: anytype) void { - @compileError("invalid format string '" ++ fmt ++ "' for type '" ++ @typeName(@TypeOf(value)) ++ "'"); -} - -pub fn formatType( - value: anytype, - comptime fmt: []const u8, - options: FormatOptions, - writer: anytype, - max_depth: usize, -) @TypeOf(writer).Error!void { - const T = @TypeOf(value); - const actual_fmt = comptime if (std.mem.eql(u8, fmt, ANY)) - defaultSpec(T) - else if (fmt.len != 0 and (fmt[0] == '?' or fmt[0] == '!')) switch (@typeInfo(T)) { - .optional, .error_union => fmt, - else => stripOptionalOrErrorUnionSpec(fmt), - } else fmt; - - if (comptime std.mem.eql(u8, actual_fmt, "*")) { - return formatAddress(value, options, writer); - } - - if (std.meta.hasMethod(T, "format")) { - return try value.format(actual_fmt, options, writer); - } - - switch (@typeInfo(T)) { - .comptime_int, .int, .comptime_float, .float => { - return formatValue(value, actual_fmt, options, writer); - }, - .void => { - if (actual_fmt.len != 0) invalidFmtError(fmt, value); - return formatBuf("void", options, writer); - }, - .bool => { - if (actual_fmt.len != 0) invalidFmtError(fmt, value); - return formatBuf(if (value) "true" else "false", options, writer); - }, - .optional => { - if (actual_fmt.len == 0 or actual_fmt[0] != '?') - @compileError("cannot format optional without a specifier (i.e. {?} or {any})"); - const remaining_fmt = comptime stripOptionalOrErrorUnionSpec(actual_fmt); - if (value) |payload| { - return formatType(payload, remaining_fmt, options, writer, max_depth); - } else { - return formatBuf("null", options, writer); - } - }, - .error_union => { - if (actual_fmt.len == 0 or actual_fmt[0] != '!') - @compileError("cannot format error union without a specifier (i.e. {!} or {any})"); - const remaining_fmt = comptime stripOptionalOrErrorUnionSpec(actual_fmt); - if (value) |payload| { - return formatType(payload, remaining_fmt, options, writer, max_depth); - } else |err| { - return formatType(err, "", options, writer, max_depth); - } - }, - .error_set => { - if (actual_fmt.len != 0) invalidFmtError(fmt, value); - try writer.writeAll("error."); - return writer.writeAll(@errorName(value)); - }, - .@"enum" => |enumInfo| { - try writer.writeAll(@typeName(T)); - if (enumInfo.is_exhaustive) { - if (actual_fmt.len != 0) invalidFmtError(fmt, value); - try writer.writeAll("."); - try writer.writeAll(@tagName(value)); - return; - } - - // Use @tagName only if value is one of known fields - @setEvalBranchQuota(3 * enumInfo.fields.len); - inline for (enumInfo.fields) |enumField| { - if (@intFromEnum(value) == enumField.value) { - try writer.writeAll("."); - try writer.writeAll(@tagName(value)); - return; - } - } - - try writer.writeAll("("); - try formatType(@intFromEnum(value), actual_fmt, options, writer, max_depth); - try writer.writeAll(")"); - }, - .@"union" => |info| { - if (actual_fmt.len != 0) invalidFmtError(fmt, value); - try writer.writeAll(@typeName(T)); - if (max_depth == 0) { - return writer.writeAll("{ ... }"); - } - if (info.tag_type) |UnionTagType| { - try writer.writeAll("{ ."); - try writer.writeAll(@tagName(@as(UnionTagType, value))); - try writer.writeAll(" = "); - inline for (info.fields) |u_field| { - if (value == @field(UnionTagType, u_field.name)) { - try formatType(@field(value, u_field.name), ANY, options, writer, max_depth - 1); - } - } - try writer.writeAll(" }"); - } else { - try format(writer, "@{x}", .{@intFromPtr(&value)}); - } - }, - .@"struct" => |info| { - if (actual_fmt.len != 0) invalidFmtError(fmt, value); - if (info.is_tuple) { - // Skip the type and field names when formatting tuples. - if (max_depth == 0) { - return writer.writeAll("{ ... }"); - } - try writer.writeAll("{"); - inline for (info.fields, 0..) |f, i| { - if (i == 0) { - try writer.writeAll(" "); - } else { - try writer.writeAll(", "); - } - try formatType(@field(value, f.name), ANY, options, writer, max_depth - 1); - } - return writer.writeAll(" }"); - } - try writer.writeAll(@typeName(T)); - if (max_depth == 0) { - return writer.writeAll("{ ... }"); - } - try writer.writeAll("{"); - inline for (info.fields, 0..) |f, i| { - if (i == 0) { - try writer.writeAll(" ."); - } else { - try writer.writeAll(", ."); - } - try writer.writeAll(f.name); - try writer.writeAll(" = "); - try formatType(@field(value, f.name), ANY, options, writer, max_depth - 1); - } - try writer.writeAll(" }"); - }, - .pointer => |ptr_info| switch (ptr_info.size) { - .one => switch (@typeInfo(ptr_info.child)) { - .array, .@"enum", .@"union", .@"struct" => { - return formatType(value.*, actual_fmt, options, writer, max_depth); - }, - else => return format(writer, "{s}@{x}", .{ @typeName(ptr_info.child), @intFromPtr(value) }), - }, - .many, .c => { - if (actual_fmt.len == 0) - @compileError("cannot format pointer without a specifier (i.e. {s} or {*})"); - if (ptr_info.sentinel() != null) { - return formatType(mem.span(value), actual_fmt, options, writer, max_depth); - } - if (actual_fmt[0] == 's' and ptr_info.child == u8) { - return formatBuf(mem.span(value), options, writer); - } - invalidFmtError(fmt, value); - }, - .slice => { - if (actual_fmt.len == 0) - @compileError("cannot format slice without a specifier (i.e. {s} or {any})"); - if (max_depth == 0) { - return writer.writeAll("{ ... }"); - } - if (actual_fmt[0] == 's' and ptr_info.child == u8) { - return formatBuf(value, options, writer); - } - try writer.writeAll("{ "); - for (value, 0..) |elem, i| { - try formatType(elem, actual_fmt, options, writer, max_depth - 1); - if (i != value.len - 1) { - try writer.writeAll(", "); - } - } - try writer.writeAll(" }"); - }, - }, - .array => |info| { - if (actual_fmt.len == 0) - @compileError("cannot format array without a specifier (i.e. {s} or {any})"); - if (max_depth == 0) { - return writer.writeAll("{ ... }"); - } - if (actual_fmt[0] == 's' and info.child == u8) { - return formatBuf(&value, options, writer); - } - try writer.writeAll("{ "); - for (value, 0..) |elem, i| { - try formatType(elem, actual_fmt, options, writer, max_depth - 1); - if (i < value.len - 1) { - try writer.writeAll(", "); - } - } - try writer.writeAll(" }"); - }, - .vector => |info| { - if (max_depth == 0) { - return writer.writeAll("{ ... }"); - } - try writer.writeAll("{ "); - var i: usize = 0; - while (i < info.len) : (i += 1) { - try formatType(value[i], actual_fmt, options, writer, max_depth - 1); - if (i < info.len - 1) { - try writer.writeAll(", "); - } - } - try writer.writeAll(" }"); - }, - .@"fn" => @compileError("unable to format function body type, use '*const " ++ @typeName(T) ++ "' for a function pointer type"), - .type => { - if (actual_fmt.len != 0) invalidFmtError(fmt, value); - return formatBuf(@typeName(value), options, writer); - }, - .enum_literal => { - if (actual_fmt.len != 0) invalidFmtError(fmt, value); - const buffer = [_]u8{'.'} ++ @tagName(value); - return formatBuf(buffer, options, writer); - }, - .null => { - if (actual_fmt.len != 0) invalidFmtError(fmt, value); - return formatBuf("null", options, writer); - }, - else => @compileError("unable to format type '" ++ @typeName(T) ++ "'"), - } -} - -fn formatValue( - value: anytype, - comptime fmt: []const u8, - options: FormatOptions, - writer: anytype, -) !void { - const T = @TypeOf(value); - switch (@typeInfo(T)) { - .float, .comptime_float => return formatFloatValue(value, fmt, options, writer), - .int, .comptime_int => return formatIntValue(value, fmt, options, writer), - .bool => return formatBuf(if (value) "true" else "false", options, writer), - else => comptime unreachable, - } -} - -pub fn formatIntValue( - value: anytype, - comptime fmt: []const u8, - options: FormatOptions, - writer: anytype, -) !void { - comptime var base = 10; - comptime var case: Case = .lower; - - const int_value = if (@TypeOf(value) == comptime_int) blk: { - const Int = math.IntFittingRange(value, value); - break :blk @as(Int, value); - } else value; - - if (fmt.len == 0 or comptime std.mem.eql(u8, fmt, "d")) { - base = 10; - case = .lower; - } else if (comptime std.mem.eql(u8, fmt, "c")) { - if (@typeInfo(@TypeOf(int_value)).int.bits <= 8) { - return formatAsciiChar(@as(u8, int_value), options, writer); - } else { - @compileError("cannot print integer that is larger than 8 bits as an ASCII character"); - } - } else if (comptime std.mem.eql(u8, fmt, "u")) { - if (@typeInfo(@TypeOf(int_value)).int.bits <= 21) { - return formatUnicodeCodepoint(@as(u21, int_value), options, writer); - } else { - @compileError("cannot print integer that is larger than 21 bits as an UTF-8 sequence"); - } - } else if (comptime std.mem.eql(u8, fmt, "b")) { - base = 2; - case = .lower; - } else if (comptime std.mem.eql(u8, fmt, "x")) { - base = 16; - case = .lower; - } else if (comptime std.mem.eql(u8, fmt, "X")) { - base = 16; - case = .upper; - } else if (comptime std.mem.eql(u8, fmt, "o")) { - base = 8; - case = .lower; - } else { - invalidFmtError(fmt, value); - } - - return formatInt(int_value, base, case, options, writer); -} - -pub const format_float = @import("fmt/format_float.zig"); -pub const formatFloat = format_float.formatFloat; -pub const FormatFloatError = format_float.FormatError; - -fn formatFloatValue( - value: anytype, - comptime fmt: []const u8, - options: FormatOptions, - writer: anytype, -) !void { - var buf: [format_float.bufferSize(.decimal, f64)]u8 = undefined; - - if (fmt.len == 0 or comptime std.mem.eql(u8, fmt, "e")) { - const s = formatFloat(&buf, value, .{ .mode = .scientific, .precision = options.precision }) catch |err| switch (err) { - error.BufferTooSmall => "(float)", - }; - return formatBuf(s, options, writer); - } else if (comptime std.mem.eql(u8, fmt, "d")) { - const s = formatFloat(&buf, value, .{ .mode = .decimal, .precision = options.precision }) catch |err| switch (err) { - error.BufferTooSmall => "(float)", - }; - return formatBuf(s, options, writer); - } else if (comptime std.mem.eql(u8, fmt, "x")) { - var buf_stream = std.io.fixedBufferStream(&buf); - formatFloatHexadecimal(value, options, buf_stream.writer()) catch |err| switch (err) { - error.NoSpaceLeft => unreachable, - }; - return formatBuf(buf_stream.getWritten(), options, writer); - } else { - invalidFmtError(fmt, value); - } -} - -test { - _ = &format_float; -} - -pub const Case = enum { lower, upper }; - -fn SliceHex(comptime case: Case) type { - const charset = "0123456789" ++ if (case == .upper) "ABCDEF" else "abcdef"; - - return struct { - pub fn format( - bytes: []const u8, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = fmt; - _ = options; - var buf: [2]u8 = undefined; - - for (bytes) |c| { - buf[0] = charset[c >> 4]; - buf[1] = charset[c & 15]; - try writer.writeAll(&buf); - } - } - }; -} - -const formatSliceHexLower = SliceHex(.lower).format; -const formatSliceHexUpper = SliceHex(.upper).format; - -/// Return a Formatter for a []const u8 where every byte is formatted as a pair -/// of lowercase hexadecimal digits. -pub fn fmtSliceHexLower(bytes: []const u8) std.fmt.Formatter(formatSliceHexLower) { - return .{ .data = bytes }; -} - -/// Return a Formatter for a []const u8 where every byte is formatted as pair -/// of uppercase hexadecimal digits. -pub fn fmtSliceHexUpper(bytes: []const u8) std.fmt.Formatter(formatSliceHexUpper) { - return .{ .data = bytes }; -} - -fn SliceEscape(comptime case: Case) type { - const charset = "0123456789" ++ if (case == .upper) "ABCDEF" else "abcdef"; - - return struct { - pub fn format( - bytes: []const u8, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = fmt; - _ = options; - var buf: [4]u8 = undefined; - - buf[0] = '\\'; - buf[1] = 'x'; - - for (bytes) |c| { - if (std.ascii.isPrint(c)) { - try writer.writeByte(c); - } else { - buf[2] = charset[c >> 4]; - buf[3] = charset[c & 15]; - try writer.writeAll(&buf); - } - } - } - }; -} - -const formatSliceEscapeLower = SliceEscape(.lower).format; -const formatSliceEscapeUpper = SliceEscape(.upper).format; - -/// Return a Formatter for a []const u8 where every non-printable ASCII -/// character is escaped as \xNN, where NN is the character in lowercase -/// hexadecimal notation. -pub fn fmtSliceEscapeLower(bytes: []const u8) std.fmt.Formatter(formatSliceEscapeLower) { - return .{ .data = bytes }; -} - -/// Return a Formatter for a []const u8 where every non-printable ASCII -/// character is escaped as \xNN, where NN is the character in uppercase -/// hexadecimal notation. -pub fn fmtSliceEscapeUpper(bytes: []const u8) std.fmt.Formatter(formatSliceEscapeUpper) { - return .{ .data = bytes }; -} - -fn Size(comptime base: comptime_int) type { - return struct { - fn format( - value: u64, - comptime fmt: []const u8, - options: FormatOptions, - writer: anytype, - ) !void { - _ = fmt; - if (value == 0) { - return formatBuf("0B", options, writer); - } - // The worst case in terms of space needed is 32 bytes + 3 for the suffix. - var buf: [format_float.min_buffer_size + 3]u8 = undefined; - - const mags_si = " kMGTPEZY"; - const mags_iec = " KMGTPEZY"; - - const log2 = math.log2(value); - const magnitude = switch (base) { - 1000 => @min(log2 / comptime math.log2(1000), mags_si.len - 1), - 1024 => @min(log2 / 10, mags_iec.len - 1), - else => unreachable, - }; - const new_value = lossyCast(f64, value) / math.pow(f64, lossyCast(f64, base), lossyCast(f64, magnitude)); - const suffix = switch (base) { - 1000 => mags_si[magnitude], - 1024 => mags_iec[magnitude], - else => unreachable, - }; - - const s = switch (magnitude) { - 0 => buf[0..formatIntBuf(&buf, value, 10, .lower, .{})], - else => formatFloat(&buf, new_value, .{ .mode = .decimal, .precision = options.precision }) catch |err| switch (err) { - error.BufferTooSmall => unreachable, - }, - }; - - var i: usize = s.len; - if (suffix == ' ') { - buf[i] = 'B'; - i += 1; - } else switch (base) { - 1000 => { - buf[i..][0..2].* = [_]u8{ suffix, 'B' }; - i += 2; - }, - 1024 => { - buf[i..][0..3].* = [_]u8{ suffix, 'i', 'B' }; - i += 3; - }, - else => unreachable, - } - - return formatBuf(buf[0..i], options, writer); - } - }; -} -const formatSizeDec = Size(1000).format; -const formatSizeBin = Size(1024).format; - -/// Return a Formatter for a u64 value representing a file size. -/// This formatter represents the number as multiple of 1000 and uses the SI -/// measurement units (kB, MB, GB, ...). -/// Format option `precision` is ignored when `value` is less than 1kB -pub fn fmtIntSizeDec(value: u64) std.fmt.Formatter(formatSizeDec) { - return .{ .data = value }; -} - -/// Return a Formatter for a u64 value representing a file size. -/// This formatter represents the number as multiple of 1024 and uses the IEC -/// measurement units (KiB, MiB, GiB, ...). -/// Format option `precision` is ignored when `value` is less than 1KiB -pub fn fmtIntSizeBin(value: u64) std.fmt.Formatter(formatSizeBin) { - return .{ .data = value }; -} - -fn checkTextFmt(comptime fmt: []const u8) void { - if (fmt.len != 1) - @compileError("unsupported format string '" ++ fmt ++ "' when formatting text"); - switch (fmt[0]) { - // Example of deprecation: - // '[deprecated_specifier]' => @compileError("specifier '[deprecated_specifier]' has been deprecated, wrap your argument in `std.some_function` instead"), - 'x' => @compileError("specifier 'x' has been deprecated, wrap your argument in std.fmt.fmtSliceHexLower instead"), - 'X' => @compileError("specifier 'X' has been deprecated, wrap your argument in std.fmt.fmtSliceHexUpper instead"), - else => {}, - } -} - -pub fn formatText( - bytes: []const u8, - comptime fmt: []const u8, - options: FormatOptions, - writer: anytype, -) !void { - comptime checkTextFmt(fmt); - return formatBuf(bytes, options, writer); -} - -pub fn formatAsciiChar( - c: u8, - options: FormatOptions, - writer: anytype, -) !void { - return formatBuf(@as(*const [1]u8, &c), options, writer); -} - -pub fn formatUnicodeCodepoint( - c: u21, - options: FormatOptions, - writer: anytype, -) !void { - var buf: [4]u8 = undefined; - const len = unicode.utf8Encode(c, &buf) catch |err| switch (err) { - error.Utf8CannotEncodeSurrogateHalf, error.CodepointTooLarge => { - return formatBuf(&unicode.utf8EncodeComptime(unicode.replacement_character), options, writer); - }, - }; - return formatBuf(buf[0..len], options, writer); -} - -pub fn formatBuf( - buf: []const u8, - options: FormatOptions, - writer: anytype, -) !void { - if (options.width) |min_width| { - // In case of error assume the buffer content is ASCII-encoded - const width = unicode.utf8CountCodepoints(buf) catch buf.len; - const padding = if (width < min_width) min_width - width else 0; - - if (padding == 0) - return writer.writeAll(buf); - - var fill_buffer: [4]u8 = undefined; - const fill_utf8 = if (unicode.utf8Encode(options.fill, &fill_buffer)) |len| - fill_buffer[0..len] - else |err| switch (err) { - error.Utf8CannotEncodeSurrogateHalf, - error.CodepointTooLarge, - => &unicode.utf8EncodeComptime(unicode.replacement_character), - }; - switch (options.alignment) { - .left => { - try writer.writeAll(buf); - try writer.writeBytesNTimes(fill_utf8, padding); - }, - .center => { - const left_padding = padding / 2; - const right_padding = (padding + 1) / 2; - try writer.writeBytesNTimes(fill_utf8, left_padding); - try writer.writeAll(buf); - try writer.writeBytesNTimes(fill_utf8, right_padding); - }, - .right => { - try writer.writeBytesNTimes(fill_utf8, padding); - try writer.writeAll(buf); - }, - } - } else { - // Fast path, avoid counting the number of codepoints - try writer.writeAll(buf); - } -} - -pub fn formatFloatHexadecimal( - value: anytype, - options: FormatOptions, - writer: anytype, -) !void { - if (math.signbit(value)) { - try writer.writeByte('-'); - } - if (math.isNan(value)) { - return writer.writeAll("nan"); - } - if (math.isInf(value)) { - return writer.writeAll("inf"); - } - - const T = @TypeOf(value); - const TU = std.meta.Int(.unsigned, @bitSizeOf(T)); - - const mantissa_bits = math.floatMantissaBits(T); - const fractional_bits = math.floatFractionalBits(T); - const exponent_bits = math.floatExponentBits(T); - const mantissa_mask = (1 << mantissa_bits) - 1; - const exponent_mask = (1 << exponent_bits) - 1; - const exponent_bias = (1 << (exponent_bits - 1)) - 1; - - const as_bits = @as(TU, @bitCast(value)); - var mantissa = as_bits & mantissa_mask; - var exponent: i32 = @as(u16, @truncate((as_bits >> mantissa_bits) & exponent_mask)); - - const is_denormal = exponent == 0 and mantissa != 0; - const is_zero = exponent == 0 and mantissa == 0; - - if (is_zero) { - // Handle this case here to simplify the logic below. - try writer.writeAll("0x0"); - if (options.precision) |precision| { - if (precision > 0) { - try writer.writeAll("."); - try writer.writeByteNTimes('0', precision); - } - } else { - try writer.writeAll(".0"); - } - try writer.writeAll("p0"); - return; - } - - if (is_denormal) { - // Adjust the exponent for printing. - exponent += 1; - } else { - if (fractional_bits == mantissa_bits) - mantissa |= 1 << fractional_bits; // Add the implicit integer bit. - } - - const mantissa_digits = (fractional_bits + 3) / 4; - // Fill in zeroes to round the fraction width to a multiple of 4. - mantissa <<= mantissa_digits * 4 - fractional_bits; - - if (options.precision) |precision| { - // Round if needed. - if (precision < mantissa_digits) { - // We always have at least 4 extra bits. - var extra_bits = (mantissa_digits - precision) * 4; - // The result LSB is the Guard bit, we need two more (Round and - // Sticky) to round the value. - while (extra_bits > 2) { - mantissa = (mantissa >> 1) | (mantissa & 1); - extra_bits -= 1; - } - // Round to nearest, tie to even. - mantissa |= @intFromBool(mantissa & 0b100 != 0); - mantissa += 1; - // Drop the excess bits. - mantissa >>= 2; - // Restore the alignment. - mantissa <<= @as(math.Log2Int(TU), @intCast((mantissa_digits - precision) * 4)); - - const overflow = mantissa & (1 << 1 + mantissa_digits * 4) != 0; - // Prefer a normalized result in case of overflow. - if (overflow) { - mantissa >>= 1; - exponent += 1; - } - } - } - - // +1 for the decimal part. - var buf: [1 + mantissa_digits]u8 = undefined; - _ = formatIntBuf(&buf, mantissa, 16, .lower, .{ .fill = '0', .width = 1 + mantissa_digits }); - - try writer.writeAll("0x"); - try writer.writeByte(buf[0]); - const trimmed = mem.trimEnd(u8, buf[1..], "0"); - if (options.precision) |precision| { - if (precision > 0) try writer.writeAll("."); - } else if (trimmed.len > 0) { - try writer.writeAll("."); - } - try writer.writeAll(trimmed); - // Add trailing zeros if explicitly requested. - if (options.precision) |precision| if (precision > 0) { - if (precision > trimmed.len) - try writer.writeByteNTimes('0', precision - trimmed.len); - }; - try writer.writeAll("p"); - try formatInt(exponent - exponent_bias, 10, .lower, .{}, writer); -} - -pub fn formatInt( - value: anytype, - base: u8, - case: Case, - options: FormatOptions, - writer: anytype, -) !void { - assert(base >= 2); - - const int_value = if (@TypeOf(value) == comptime_int) blk: { - const Int = math.IntFittingRange(value, value); - break :blk @as(Int, value); - } else value; - - const value_info = @typeInfo(@TypeOf(int_value)).int; - - // The type must have the same size as `base` or be wider in order for the - // division to work - const min_int_bits = comptime @max(value_info.bits, 8); - const MinInt = std.meta.Int(.unsigned, min_int_bits); - - const abs_value = @abs(int_value); - // The worst case in terms of space needed is base 2, plus 1 for the sign - var buf: [1 + @max(@as(comptime_int, value_info.bits), 1)]u8 = undefined; - - var a: MinInt = abs_value; - var index: usize = buf.len; - - if (base == 10) { - while (a >= 100) : (a = @divTrunc(a, 100)) { - index -= 2; - buf[index..][0..2].* = digits2(@intCast(a % 100)); - } - - if (a < 10) { - index -= 1; - buf[index] = '0' + @as(u8, @intCast(a)); - } else { - index -= 2; - buf[index..][0..2].* = digits2(@intCast(a)); - } - } else { - while (true) { - const digit = a % base; - index -= 1; - buf[index] = digitToChar(@intCast(digit), case); - a /= base; - if (a == 0) break; - } - } - - if (value_info.signedness == .signed) { - if (value < 0) { - // Negative integer - index -= 1; - buf[index] = '-'; - } else if (options.width == null or options.width.? == 0) { - // Positive integer, omit the plus sign - } else { - // Positive integer - index -= 1; - buf[index] = '+'; - } - } - - return formatBuf(buf[index..], options, writer); -} - -pub fn formatIntBuf(out_buf: []u8, value: anytype, base: u8, case: Case, options: FormatOptions) usize { - var fbs = std.io.fixedBufferStream(out_buf); - formatInt(value, base, case, options, fbs.writer()) catch unreachable; - return fbs.pos; +/// Asserts the rendered integer value fits in `buffer`. +/// Returns the end index within `buffer`. +pub fn printInt(buffer: []u8, value: anytype, base: u8, case: Case, options: Options) usize { + var w: Writer = .fixed(buffer); + w.printInt(value, base, case, options) catch unreachable; + return w.end; } /// Converts values in the range [0, 100) to a base 10 string. @@ -1261,248 +275,49 @@ pub fn digits2(value: u8) [2]u8 { } } -const FormatDurationData = struct { - ns: u64, - negative: bool = false, -}; +/// Deprecated in favor of `Alt`. +pub const Formatter = Alt; -fn formatDuration(data: FormatDurationData, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { - _ = fmt; - - // worst case: "-XXXyXXwXXdXXhXXmXX.XXXs".len = 24 - var buf: [24]u8 = undefined; - var fbs = std.io.fixedBufferStream(&buf); - var buf_writer = fbs.writer(); - if (data.negative) { - buf_writer.writeByte('-') catch unreachable; - } - - var ns_remaining = data.ns; - inline for (.{ - .{ .ns = 365 * std.time.ns_per_day, .sep = 'y' }, - .{ .ns = std.time.ns_per_week, .sep = 'w' }, - .{ .ns = std.time.ns_per_day, .sep = 'd' }, - .{ .ns = std.time.ns_per_hour, .sep = 'h' }, - .{ .ns = std.time.ns_per_min, .sep = 'm' }, - }) |unit| { - if (ns_remaining >= unit.ns) { - const units = ns_remaining / unit.ns; - formatInt(units, 10, .lower, .{}, buf_writer) catch unreachable; - buf_writer.writeByte(unit.sep) catch unreachable; - ns_remaining -= units * unit.ns; - if (ns_remaining == 0) - return formatBuf(fbs.getWritten(), options, writer); - } - } - - inline for (.{ - .{ .ns = std.time.ns_per_s, .sep = "s" }, - .{ .ns = std.time.ns_per_ms, .sep = "ms" }, - .{ .ns = std.time.ns_per_us, .sep = "us" }, - }) |unit| { - const kunits = ns_remaining * 1000 / unit.ns; - if (kunits >= 1000) { - formatInt(kunits / 1000, 10, .lower, .{}, buf_writer) catch unreachable; - const frac = kunits % 1000; - if (frac > 0) { - // Write up to 3 decimal places - var decimal_buf = [_]u8{ '.', 0, 0, 0 }; - _ = formatIntBuf(decimal_buf[1..], frac, 10, .lower, .{ .fill = '0', .width = 3 }); - var end: usize = 4; - while (end > 1) : (end -= 1) { - if (decimal_buf[end - 1] != '0') break; - } - buf_writer.writeAll(decimal_buf[0..end]) catch unreachable; - } - buf_writer.writeAll(unit.sep) catch unreachable; - return formatBuf(fbs.getWritten(), options, writer); - } - } - - formatInt(ns_remaining, 10, .lower, .{}, buf_writer) catch unreachable; - buf_writer.writeAll("ns") catch unreachable; - return formatBuf(fbs.getWritten(), options, writer); -} - -/// Return a Formatter for number of nanoseconds according to its magnitude: -/// [#y][#w][#d][#h][#m]#[.###][n|u|m]s -pub fn fmtDuration(ns: u64) Formatter(formatDuration) { - const data = FormatDurationData{ .ns = ns }; - return .{ .data = data }; -} - -test fmtDuration { - var buf: [24]u8 = undefined; - inline for (.{ - .{ .s = "0ns", .d = 0 }, - .{ .s = "1ns", .d = 1 }, - .{ .s = "999ns", .d = std.time.ns_per_us - 1 }, - .{ .s = "1us", .d = std.time.ns_per_us }, - .{ .s = "1.45us", .d = 1450 }, - .{ .s = "1.5us", .d = 3 * std.time.ns_per_us / 2 }, - .{ .s = "14.5us", .d = 14500 }, - .{ .s = "145us", .d = 145000 }, - .{ .s = "999.999us", .d = std.time.ns_per_ms - 1 }, - .{ .s = "1ms", .d = std.time.ns_per_ms + 1 }, - .{ .s = "1.5ms", .d = 3 * std.time.ns_per_ms / 2 }, - .{ .s = "1.11ms", .d = 1110000 }, - .{ .s = "1.111ms", .d = 1111000 }, - .{ .s = "1.111ms", .d = 1111100 }, - .{ .s = "999.999ms", .d = std.time.ns_per_s - 1 }, - .{ .s = "1s", .d = std.time.ns_per_s }, - .{ .s = "59.999s", .d = std.time.ns_per_min - 1 }, - .{ .s = "1m", .d = std.time.ns_per_min }, - .{ .s = "1h", .d = std.time.ns_per_hour }, - .{ .s = "1d", .d = std.time.ns_per_day }, - .{ .s = "1w", .d = std.time.ns_per_week }, - .{ .s = "1y", .d = 365 * std.time.ns_per_day }, - .{ .s = "1y52w23h59m59.999s", .d = 730 * std.time.ns_per_day - 1 }, // 365d = 52w1d - .{ .s = "1y1h1.001s", .d = 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + std.time.ns_per_ms }, - .{ .s = "1y1h1s", .d = 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + 999 * std.time.ns_per_us }, - .{ .s = "1y1h999.999us", .d = 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms - 1 }, - .{ .s = "1y1h1ms", .d = 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms }, - .{ .s = "1y1h1ms", .d = 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms + 1 }, - .{ .s = "1y1m999ns", .d = 365 * std.time.ns_per_day + std.time.ns_per_min + 999 }, - .{ .s = "584y49w23h34m33.709s", .d = math.maxInt(u64) }, - }) |tc| { - const slice = try bufPrint(&buf, "{}", .{fmtDuration(tc.d)}); - try std.testing.expectEqualStrings(tc.s, slice); - } - - inline for (.{ - .{ .s = "=======0ns", .f = "{s:=>10}", .d = 0 }, - .{ .s = "1ns=======", .f = "{s:=<10}", .d = 1 }, - .{ .s = " 999ns ", .f = "{s:^10}", .d = std.time.ns_per_us - 1 }, - }) |tc| { - const slice = try bufPrint(&buf, tc.f, .{fmtDuration(tc.d)}); - try std.testing.expectEqualStrings(tc.s, slice); - } -} - -fn formatDurationSigned(ns: i64, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { - const data = FormatDurationData{ .ns = @abs(ns), .negative = ns < 0 }; - try formatDuration(data, fmt, options, writer); -} - -/// Return a Formatter for number of nanoseconds according to its signed magnitude: -/// [#y][#w][#d][#h][#m]#[.###][n|u|m]s -pub fn fmtDurationSigned(ns: i64) Formatter(formatDurationSigned) { - return .{ .data = ns }; -} - -test fmtDurationSigned { - var buf: [24]u8 = undefined; - inline for (.{ - .{ .s = "0ns", .d = 0 }, - .{ .s = "1ns", .d = 1 }, - .{ .s = "-1ns", .d = -(1) }, - .{ .s = "999ns", .d = std.time.ns_per_us - 1 }, - .{ .s = "-999ns", .d = -(std.time.ns_per_us - 1) }, - .{ .s = "1us", .d = std.time.ns_per_us }, - .{ .s = "-1us", .d = -(std.time.ns_per_us) }, - .{ .s = "1.45us", .d = 1450 }, - .{ .s = "-1.45us", .d = -(1450) }, - .{ .s = "1.5us", .d = 3 * std.time.ns_per_us / 2 }, - .{ .s = "-1.5us", .d = -(3 * std.time.ns_per_us / 2) }, - .{ .s = "14.5us", .d = 14500 }, - .{ .s = "-14.5us", .d = -(14500) }, - .{ .s = "145us", .d = 145000 }, - .{ .s = "-145us", .d = -(145000) }, - .{ .s = "999.999us", .d = std.time.ns_per_ms - 1 }, - .{ .s = "-999.999us", .d = -(std.time.ns_per_ms - 1) }, - .{ .s = "1ms", .d = std.time.ns_per_ms + 1 }, - .{ .s = "-1ms", .d = -(std.time.ns_per_ms + 1) }, - .{ .s = "1.5ms", .d = 3 * std.time.ns_per_ms / 2 }, - .{ .s = "-1.5ms", .d = -(3 * std.time.ns_per_ms / 2) }, - .{ .s = "1.11ms", .d = 1110000 }, - .{ .s = "-1.11ms", .d = -(1110000) }, - .{ .s = "1.111ms", .d = 1111000 }, - .{ .s = "-1.111ms", .d = -(1111000) }, - .{ .s = "1.111ms", .d = 1111100 }, - .{ .s = "-1.111ms", .d = -(1111100) }, - .{ .s = "999.999ms", .d = std.time.ns_per_s - 1 }, - .{ .s = "-999.999ms", .d = -(std.time.ns_per_s - 1) }, - .{ .s = "1s", .d = std.time.ns_per_s }, - .{ .s = "-1s", .d = -(std.time.ns_per_s) }, - .{ .s = "59.999s", .d = std.time.ns_per_min - 1 }, - .{ .s = "-59.999s", .d = -(std.time.ns_per_min - 1) }, - .{ .s = "1m", .d = std.time.ns_per_min }, - .{ .s = "-1m", .d = -(std.time.ns_per_min) }, - .{ .s = "1h", .d = std.time.ns_per_hour }, - .{ .s = "-1h", .d = -(std.time.ns_per_hour) }, - .{ .s = "1d", .d = std.time.ns_per_day }, - .{ .s = "-1d", .d = -(std.time.ns_per_day) }, - .{ .s = "1w", .d = std.time.ns_per_week }, - .{ .s = "-1w", .d = -(std.time.ns_per_week) }, - .{ .s = "1y", .d = 365 * std.time.ns_per_day }, - .{ .s = "-1y", .d = -(365 * std.time.ns_per_day) }, - .{ .s = "1y52w23h59m59.999s", .d = 730 * std.time.ns_per_day - 1 }, // 365d = 52w1d - .{ .s = "-1y52w23h59m59.999s", .d = -(730 * std.time.ns_per_day - 1) }, // 365d = 52w1d - .{ .s = "1y1h1.001s", .d = 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + std.time.ns_per_ms }, - .{ .s = "-1y1h1.001s", .d = -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + std.time.ns_per_ms) }, - .{ .s = "1y1h1s", .d = 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + 999 * std.time.ns_per_us }, - .{ .s = "-1y1h1s", .d = -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + 999 * std.time.ns_per_us) }, - .{ .s = "1y1h999.999us", .d = 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms - 1 }, - .{ .s = "-1y1h999.999us", .d = -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms - 1) }, - .{ .s = "1y1h1ms", .d = 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms }, - .{ .s = "-1y1h1ms", .d = -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms) }, - .{ .s = "1y1h1ms", .d = 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms + 1 }, - .{ .s = "-1y1h1ms", .d = -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms + 1) }, - .{ .s = "1y1m999ns", .d = 365 * std.time.ns_per_day + std.time.ns_per_min + 999 }, - .{ .s = "-1y1m999ns", .d = -(365 * std.time.ns_per_day + std.time.ns_per_min + 999) }, - .{ .s = "292y24w3d23h47m16.854s", .d = math.maxInt(i64) }, - .{ .s = "-292y24w3d23h47m16.854s", .d = math.minInt(i64) + 1 }, - .{ .s = "-292y24w3d23h47m16.854s", .d = math.minInt(i64) }, - }) |tc| { - const slice = try bufPrint(&buf, "{}", .{fmtDurationSigned(tc.d)}); - try std.testing.expectEqualStrings(tc.s, slice); - } - - inline for (.{ - .{ .s = "=======0ns", .f = "{s:=>10}", .d = 0 }, - .{ .s = "1ns=======", .f = "{s:=<10}", .d = 1 }, - .{ .s = "-1ns======", .f = "{s:=<10}", .d = -(1) }, - .{ .s = " -999ns ", .f = "{s:^10}", .d = -(std.time.ns_per_us - 1) }, - }) |tc| { - const slice = try bufPrint(&buf, tc.f, .{fmtDurationSigned(tc.d)}); - try std.testing.expectEqualStrings(tc.s, slice); - } -} - -pub const ParseIntError = error{ - /// The result cannot fit in the type specified - Overflow, - - /// The input was empty or contained an invalid character - InvalidCharacter, -}; - -/// Creates a Formatter type from a format function. Wrapping data in Formatter(func) causes -/// the data to be formatted using the given function `func`. `func` must be of the following -/// form: -/// -/// fn formatExample( -/// data: T, -/// comptime fmt: []const u8, -/// options: std.fmt.FormatOptions, -/// writer: anytype, -/// ) !void; -/// -pub fn Formatter(comptime formatFn: anytype) type { - const Data = @typeInfo(@TypeOf(formatFn)).@"fn".params[0].type.?; +/// Creates a type suitable for instantiating and passing to a "{f}" placeholder. +pub fn Alt( + comptime Data: type, + comptime formatFn: fn (data: Data, writer: *Writer) Writer.Error!void, +) type { return struct { data: Data, - pub fn format( - self: @This(), - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - try formatFn(self.data, fmt, options, writer); + pub inline fn format(self: @This(), writer: *Writer) Writer.Error!void { + try formatFn(self.data, writer); } }; } +/// Helper for calling alternate format methods besides one named "format". +pub fn alt( + context: anytype, + comptime func_name: @TypeOf(.enum_literal), +) Formatter(@TypeOf(context), @field(@TypeOf(context), @tagName(func_name))) { + return .{ .data = context }; +} + +test alt { + const Example = struct { + number: u8, + + pub fn other(ex: @This(), w: *Writer) Writer.Error!void { + try w.writeByte(ex.number); + } + }; + const ex: Example = .{ .number = 'a' }; + try expectFmt("a", "{f}", .{alt(ex, .other)}); +} + +pub const ParseIntError = error{ + /// The result cannot fit in the type specified. + Overflow, + /// The input was empty or contained an invalid character. + InvalidCharacter, +}; + /// Parses the string `buf` as signed or unsigned representation in the /// specified base of an integral value of type `T`. /// @@ -1793,15 +608,13 @@ pub const BufPrintError = error{ NoSpaceLeft, }; -/// Print a Formatter string into `buf`. Actually just a thin wrapper around `format` and `fixedBufferStream`. -/// Returns a slice of the bytes printed to. +/// Print a Formatter string into `buf`. Returns a slice of the bytes printed. pub fn bufPrint(buf: []u8, comptime fmt: []const u8, args: anytype) BufPrintError![]u8 { - var fbs = std.io.fixedBufferStream(buf); - format(fbs.writer().any(), fmt, args) catch |err| switch (err) { - error.NoSpaceLeft => return error.NoSpaceLeft, - else => unreachable, + var w: Writer = .fixed(buf); + w.print(fmt, args) catch |err| switch (err) { + error.WriteFailed => return error.NoSpaceLeft, }; - return fbs.getWritten(); + return w.buffered(); } pub fn bufPrintZ(buf: []u8, comptime fmt: []const u8, args: anytype) BufPrintError![:0]u8 { @@ -1809,51 +622,37 @@ pub fn bufPrintZ(buf: []u8, comptime fmt: []const u8, args: anytype) BufPrintErr return result[0 .. result.len - 1 :0]; } -/// Count the characters needed for format. Useful for preallocating memory -pub fn count(comptime fmt: []const u8, args: anytype) u64 { - var counting_writer = std.io.countingWriter(std.io.null_writer); - format(counting_writer.writer().any(), fmt, args) catch unreachable; - return counting_writer.bytes_written; -} - -pub const AllocPrintError = error{OutOfMemory}; - -pub fn allocPrint(allocator: mem.Allocator, comptime fmt: []const u8, args: anytype) AllocPrintError![]u8 { - const size = math.cast(usize, count(fmt, args)) orelse return error.OutOfMemory; - const buf = try allocator.alloc(u8, size); - return bufPrint(buf, fmt, args) catch |err| switch (err) { - error.NoSpaceLeft => unreachable, // we just counted the size above +/// Count the characters needed for format. +pub fn count(comptime fmt: []const u8, args: anytype) usize { + var trash_buffer: [64]u8 = undefined; + var dw: Writer.Discarding = .init(&trash_buffer); + dw.writer.print(fmt, args) catch |err| switch (err) { + error.WriteFailed => unreachable, }; + return @intCast(dw.count + dw.writer.end); } -pub fn allocPrintZ(allocator: mem.Allocator, comptime fmt: []const u8, args: anytype) AllocPrintError![:0]u8 { - const result = try allocPrint(allocator, fmt ++ "\x00", args); - return result[0 .. result.len - 1 :0]; +pub fn allocPrint(gpa: Allocator, comptime fmt: []const u8, args: anytype) Allocator.Error![]u8 { + var aw = try Writer.Allocating.initCapacity(gpa, fmt.len); + defer aw.deinit(); + aw.writer.print(fmt, args) catch |err| switch (err) { + error.WriteFailed => return error.OutOfMemory, + }; + return aw.toOwnedSlice(); } -test bufPrintIntToSlice { - var buffer: [100]u8 = undefined; - const buf = buffer[0..]; - - try std.testing.expectEqualSlices(u8, "-1", bufPrintIntToSlice(buf, @as(i1, -1), 10, .lower, FormatOptions{})); - - try std.testing.expectEqualSlices(u8, "-101111000110000101001110", bufPrintIntToSlice(buf, @as(i32, -12345678), 2, .lower, FormatOptions{})); - try std.testing.expectEqualSlices(u8, "-12345678", bufPrintIntToSlice(buf, @as(i32, -12345678), 10, .lower, FormatOptions{})); - try std.testing.expectEqualSlices(u8, "-bc614e", bufPrintIntToSlice(buf, @as(i32, -12345678), 16, .lower, FormatOptions{})); - try std.testing.expectEqualSlices(u8, "-BC614E", bufPrintIntToSlice(buf, @as(i32, -12345678), 16, .upper, FormatOptions{})); - - try std.testing.expectEqualSlices(u8, "12345678", bufPrintIntToSlice(buf, @as(u32, 12345678), 10, .upper, FormatOptions{})); - - try std.testing.expectEqualSlices(u8, " 666", bufPrintIntToSlice(buf, @as(u32, 666), 10, .lower, FormatOptions{ .width = 6 })); - try std.testing.expectEqualSlices(u8, " 1234", bufPrintIntToSlice(buf, @as(u32, 0x1234), 16, .lower, FormatOptions{ .width = 6 })); - try std.testing.expectEqualSlices(u8, "1234", bufPrintIntToSlice(buf, @as(u32, 0x1234), 16, .lower, FormatOptions{ .width = 1 })); - - try std.testing.expectEqualSlices(u8, "+42", bufPrintIntToSlice(buf, @as(i32, 42), 10, .lower, FormatOptions{ .width = 3 })); - try std.testing.expectEqualSlices(u8, "-42", bufPrintIntToSlice(buf, @as(i32, -42), 10, .lower, FormatOptions{ .width = 3 })); -} - -pub fn bufPrintIntToSlice(buf: []u8, value: anytype, base: u8, case: Case, options: FormatOptions) []u8 { - return buf[0..formatIntBuf(buf, value, base, case, options)]; +pub fn allocPrintSentinel( + gpa: Allocator, + comptime fmt: []const u8, + args: anytype, + comptime sentinel: u8, +) Allocator.Error![:sentinel]u8 { + var aw = try Writer.Allocating.initCapacity(gpa, fmt.len); + defer aw.deinit(); + aw.writer.print(fmt, args) catch |err| switch (err) { + error.WriteFailed => return error.OutOfMemory, + }; + return aw.toOwnedSliceSentinel(sentinel); } pub inline fn comptimePrint(comptime fmt: []const u8, args: anytype) *const [count(fmt, args):0]u8 { @@ -1984,26 +783,22 @@ test "int.padded" { try expectFmt("i16: '-12345'", "i16: '{:4}'", .{@as(i16, -12345)}); try expectFmt("i16: '+12345'", "i16: '{:4}'", .{@as(i16, 12345)}); try expectFmt("u16: '12345'", "u16: '{:4}'", .{@as(u16, 12345)}); - - try expectFmt("UTF-8: 'ü '", "UTF-8: '{u:<4}'", .{'ü'}); - try expectFmt("UTF-8: ' ü'", "UTF-8: '{u:>4}'", .{'ü'}); - try expectFmt("UTF-8: ' ü '", "UTF-8: '{u:^4}'", .{'ü'}); } test "buffer" { { var buf1: [32]u8 = undefined; - var fbs = std.io.fixedBufferStream(&buf1); - try formatType(1234, "", FormatOptions{}, fbs.writer(), std.options.fmt_max_depth); - try std.testing.expectEqualStrings("1234", fbs.getWritten()); + var w: Writer = .fixed(&buf1); + try w.printValue("", .{}, 1234, std.options.fmt_max_depth); + try std.testing.expectEqualStrings("1234", w.buffered()); - fbs.reset(); - try formatType('a', "c", FormatOptions{}, fbs.writer(), std.options.fmt_max_depth); - try std.testing.expectEqualStrings("a", fbs.getWritten()); + w = .fixed(&buf1); + try w.printValue("c", .{}, 'a', std.options.fmt_max_depth); + try std.testing.expectEqualStrings("a", w.buffered()); - fbs.reset(); - try formatType(0b1100, "b", FormatOptions{}, fbs.writer(), std.options.fmt_max_depth); - try std.testing.expectEqualStrings("1100", fbs.getWritten()); + w = .fixed(&buf1); + try w.printValue("b", .{}, 0b1100, std.options.fmt_max_depth); + try std.testing.expectEqualStrings("1100", w.buffered()); } } @@ -2017,36 +812,24 @@ fn expectArrayFmt(expected: []const u8, comptime template: []const u8, comptime } test "array" { - { - const value: [3]u8 = "abc".*; - try expectArrayFmt("array: abc\n", "array: {s}\n", value); - try expectArrayFmt("array: { 97, 98, 99 }\n", "array: {d}\n", value); - try expectArrayFmt("array: { 61, 62, 63 }\n", "array: {x}\n", value); - try expectArrayFmt("array: { 97, 98, 99 }\n", "array: {any}\n", value); + const value: [3]u8 = "abc".*; + try expectArrayFmt("array: abc\n", "array: {s}\n", value); + try expectArrayFmt("array: 616263\n", "array: {x}\n", value); + try expectArrayFmt("array: { 97, 98, 99 }\n", "array: {any}\n", value); - var buf: [100]u8 = undefined; - try expectFmt( - try bufPrint(buf[0..], "array: [3]u8@{x}\n", .{@intFromPtr(&value)}), - "array: {*}\n", - .{&value}, - ); - } - - { - const value = [2][3]u8{ "abc".*, "def".* }; - - try expectArrayFmt("array: { abc, def }\n", "array: {s}\n", value); - try expectArrayFmt("array: { { 97, 98, 99 }, { 100, 101, 102 } }\n", "array: {d}\n", value); - try expectArrayFmt("array: { { 61, 62, 63 }, { 64, 65, 66 } }\n", "array: {x}\n", value); - } + var buf: [100]u8 = undefined; + try expectFmt( + try bufPrint(buf[0..], "array: [3]u8@{x}\n", .{@intFromPtr(&value)}), + "array: {*}\n", + .{&value}, + ); } test "slice" { { const value: []const u8 = "abc"; try expectFmt("slice: abc\n", "slice: {s}\n", .{value}); - try expectFmt("slice: { 97, 98, 99 }\n", "slice: {d}\n", .{value}); - try expectFmt("slice: { 61, 62, 63 }\n", "slice: {x}\n", .{value}); + try expectFmt("slice: 616263\n", "slice: {x}\n", .{value}); try expectFmt("slice: { 97, 98, 99 }\n", "slice: {any}\n", .{value}); } { @@ -2060,45 +843,33 @@ test "slice" { try expectFmt("buf: \x00hello\x00\n", "buf: {s}\n", .{null_term_slice}); } - try expectFmt("buf: Test\n", "buf: {s:5}\n", .{"Test"}); try expectFmt("buf: Test\n Other text", "buf: {s}\n Other text", .{"Test"}); { var int_slice = [_]u32{ 1, 4096, 391891, 1111111111 }; - var runtime_zero: usize = 0; - _ = &runtime_zero; - try expectFmt("int: { 1, 4096, 391891, 1111111111 }", "int: {any}", .{int_slice[runtime_zero..]}); - try expectFmt("int: { 1, 4096, 391891, 1111111111 }", "int: {d}", .{int_slice[runtime_zero..]}); - try expectFmt("int: { 1, 1000, 5fad3, 423a35c7 }", "int: {x}", .{int_slice[runtime_zero..]}); - try expectFmt("int: { 00001, 01000, 5fad3, 423a35c7 }", "int: {x:0>5}", .{int_slice[runtime_zero..]}); + const input: []const u32 = &int_slice; + try expectFmt("int: { 1, 4096, 391891, 1111111111 }", "int: {any}", .{input}); } { const S1 = struct { x: u8, }; const struct_slice: []const S1 = &[_]S1{ S1{ .x = 8 }, S1{ .x = 42 } }; - try expectFmt("slice: { fmt.test.slice.S1{ .x = 8 }, fmt.test.slice.S1{ .x = 42 } }", "slice: {any}", .{struct_slice}); + try expectFmt("slice: { .{ .x = 8 }, .{ .x = 42 } }", "slice: {any}", .{struct_slice}); } { const S2 = struct { x: u8, - pub fn format(s: @This(), comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + pub fn format(s: @This(), writer: *Writer) Writer.Error!void { try writer.print("S2({})", .{s.x}); } }; const struct_slice: []const S2 = &[_]S2{ S2{ .x = 8 }, S2{ .x = 42 } }; - try expectFmt("slice: { S2(8), S2(42) }", "slice: {any}", .{struct_slice}); + try expectFmt("slice: { .{ .x = 8 }, .{ .x = 42 } }", "slice: {any}", .{struct_slice}); } } -test "escape non-printable" { - try expectFmt("abc 123", "{s}", .{fmtSliceEscapeLower("abc 123")}); - try expectFmt("ab\\xffc", "{s}", .{fmtSliceEscapeLower("ab\xffc")}); - try expectFmt("abc 123", "{s}", .{fmtSliceEscapeUpper("abc 123")}); - try expectFmt("ab\\xFFc", "{s}", .{fmtSliceEscapeUpper("ab\xffc")}); -} - test "pointer" { { const value = @as(*align(1) i32, @ptrFromInt(0xdeadbeef)); @@ -2122,26 +893,6 @@ test "cstr" { "cstr: {s}\n", .{@as([*c]const u8, @ptrCast("Test C"))}, ); - try expectFmt( - "cstr: Test C\n", - "cstr: {s:10}\n", - .{@as([*c]const u8, @ptrCast("Test C"))}, - ); -} - -test "filesize" { - try expectFmt("file size: 42B\n", "file size: {}\n", .{fmtIntSizeDec(42)}); - try expectFmt("file size: 42B\n", "file size: {}\n", .{fmtIntSizeBin(42)}); - try expectFmt("file size: 63MB\n", "file size: {}\n", .{fmtIntSizeDec(63 * 1000 * 1000)}); - try expectFmt("file size: 63MiB\n", "file size: {}\n", .{fmtIntSizeBin(63 * 1024 * 1024)}); - try expectFmt("file size: 42B\n", "file size: {:.2}\n", .{fmtIntSizeDec(42)}); - try expectFmt("file size: 42B\n", "file size: {:>9.2}\n", .{fmtIntSizeDec(42)}); - try expectFmt("file size: 66.06MB\n", "file size: {:.2}\n", .{fmtIntSizeDec(63 * 1024 * 1024)}); - try expectFmt("file size: 60.08MiB\n", "file size: {:.2}\n", .{fmtIntSizeBin(63 * 1000 * 1000)}); - try expectFmt("file size: =66.06MB=\n", "file size: {:=^9.2}\n", .{fmtIntSizeDec(63 * 1024 * 1024)}); - try expectFmt("file size: 66.06MB\n", "file size: {: >9.2}\n", .{fmtIntSizeDec(63 * 1024 * 1024)}); - try expectFmt("file size: 66.06MB \n", "file size: {: <9.2}\n", .{fmtIntSizeDec(63 * 1024 * 1024)}); - try expectFmt("file size: 0.01844674407370955ZB\n", "file size: {}\n", .{fmtIntSizeDec(math.maxInt(u64))}); } test "struct" { @@ -2150,8 +901,8 @@ test "struct" { field: u8, }; const value = Struct{ .field = 42 }; - try expectFmt("struct: fmt.test.struct.Struct{ .field = 42 }\n", "struct: {}\n", .{value}); - try expectFmt("struct: fmt.test.struct.Struct{ .field = 42 }\n", "struct: {}\n", .{&value}); + try expectFmt("struct: .{ .field = 42 }\n", "struct: {}\n", .{value}); + try expectFmt("struct: .{ .field = 42 }\n", "struct: {}\n", .{&value}); } { const Struct = struct { @@ -2159,7 +910,7 @@ test "struct" { b: u1, }; const value = Struct{ .a = 0, .b = 1 }; - try expectFmt("struct: fmt.test.struct.Struct{ .a = 0, .b = 1 }\n", "struct: {}\n", .{value}); + try expectFmt("struct: .{ .a = 0, .b = 1 }\n", "struct: {}\n", .{value}); } const S = struct { @@ -2172,11 +923,11 @@ test "struct" { .b = error.Unused, }; - try expectFmt("fmt.test.struct.S{ .a = 456, .b = error.Unused }", "{}", .{inst}); + try expectFmt(".{ .a = 456, .b = error.Unused }", "{}", .{inst}); // Tuples - try expectFmt("{ }", "{}", .{.{}}); - try expectFmt("{ -1 }", "{}", .{.{-1}}); - try expectFmt("{ -1, 42, 2.5e4 }", "{}", .{.{ -1, 42, 0.25e5 }}); + try expectFmt(".{ }", "{}", .{.{}}); + try expectFmt(".{ -1 }", "{}", .{.{-1}}); + try expectFmt(".{ -1, 42, 25000 }", "{}", .{.{ -1, 42, 0.25e5 }}); } test "enum" { @@ -2185,15 +936,15 @@ test "enum" { Two, }; const value = Enum.Two; - try expectFmt("enum: fmt.test.enum.Enum.Two\n", "enum: {}\n", .{value}); - try expectFmt("enum: fmt.test.enum.Enum.Two\n", "enum: {}\n", .{&value}); - try expectFmt("enum: fmt.test.enum.Enum.One\n", "enum: {}\n", .{Enum.One}); - try expectFmt("enum: fmt.test.enum.Enum.Two\n", "enum: {}\n", .{Enum.Two}); + try expectFmt("enum: .Two\n", "enum: {}\n", .{value}); + try expectFmt("enum: .Two\n", "enum: {}\n", .{&value}); + try expectFmt("enum: .One\n", "enum: {}\n", .{Enum.One}); + try expectFmt("enum: .Two\n", "enum: {}\n", .{Enum.Two}); // test very large enum to verify ct branch quota is large enough // TODO: https://github.com/ziglang/zig/issues/15609 if (!((builtin.cpu.arch == .wasm32) and builtin.mode == .Debug)) { - try expectFmt("enum: os.windows.win32error.Win32Error.INVALID_FUNCTION\n", "enum: {}\n", .{std.os.windows.Win32Error.INVALID_FUNCTION}); + try expectFmt("enum: .INVALID_FUNCTION\n", "enum: {}\n", .{std.os.windows.Win32Error.INVALID_FUNCTION}); } const E = enum { @@ -2204,7 +955,7 @@ test "enum" { const inst = E.Two; - try expectFmt("fmt.test.enum.E.Two", "{}", .{inst}); + try expectFmt(".Two", "{}", .{inst}); } test "non-exhaustive enum" { @@ -2213,13 +964,17 @@ test "non-exhaustive enum" { Two = 0xbeef, _, }; - try expectFmt("enum: fmt.test.non-exhaustive enum.Enum.One\n", "enum: {}\n", .{Enum.One}); - try expectFmt("enum: fmt.test.non-exhaustive enum.Enum.Two\n", "enum: {}\n", .{Enum.Two}); - try expectFmt("enum: fmt.test.non-exhaustive enum.Enum(4660)\n", "enum: {}\n", .{@as(Enum, @enumFromInt(0x1234))}); - try expectFmt("enum: fmt.test.non-exhaustive enum.Enum.One\n", "enum: {x}\n", .{Enum.One}); - try expectFmt("enum: fmt.test.non-exhaustive enum.Enum.Two\n", "enum: {x}\n", .{Enum.Two}); - try expectFmt("enum: fmt.test.non-exhaustive enum.Enum.Two\n", "enum: {X}\n", .{Enum.Two}); - try expectFmt("enum: fmt.test.non-exhaustive enum.Enum(1234)\n", "enum: {x}\n", .{@as(Enum, @enumFromInt(0x1234))}); + try expectFmt("enum: .One\n", "enum: {}\n", .{Enum.One}); + try expectFmt("enum: .Two\n", "enum: {}\n", .{Enum.Two}); + try expectFmt("enum: @enumFromInt(4660)\n", "enum: {}\n", .{@as(Enum, @enumFromInt(0x1234))}); + try expectFmt("enum: f\n", "enum: {x}\n", .{Enum.One}); + try expectFmt("enum: beef\n", "enum: {x}\n", .{Enum.Two}); + try expectFmt("enum: BEEF\n", "enum: {X}\n", .{Enum.Two}); + try expectFmt("enum: 1234\n", "enum: {x}\n", .{@as(Enum, @enumFromInt(0x1234))}); + + try expectFmt("enum: 15\n", "enum: {d}\n", .{Enum.One}); + try expectFmt("enum: 48879\n", "enum: {d}\n", .{Enum.Two}); + try expectFmt("enum: 4660\n", "enum: {d}\n", .{@as(Enum, @enumFromInt(0x1234))}); } test "float.scientific" { @@ -2345,41 +1100,6 @@ test "float.libc.sanity" { try expectFmt("f64: 18014400656965630.00000", "f64: {d:.5}", .{@as(f64, @as(f32, @bitCast(@as(u32, 1518338049))))}); } -test "custom" { - const Vec2 = struct { - const SelfType = @This(); - x: f32, - y: f32, - - pub fn format( - self: SelfType, - comptime fmt: []const u8, - options: FormatOptions, - writer: anytype, - ) !void { - _ = options; - if (fmt.len == 0 or comptime std.mem.eql(u8, fmt, "p")) { - return std.fmt.format(writer, "({d:.3},{d:.3})", .{ self.x, self.y }); - } else if (comptime std.mem.eql(u8, fmt, "d")) { - return std.fmt.format(writer, "{d:.3}x{d:.3}", .{ self.x, self.y }); - } else { - @compileError("unknown format character: '" ++ fmt ++ "'"); - } - } - }; - - var value = Vec2{ - .x = 10.2, - .y = 2.22, - }; - try expectFmt("point: (10.200,2.220)\n", "point: {}\n", .{&value}); - try expectFmt("dim: 10.200x2.220\n", "dim: {d}\n", .{&value}); - - // same thing but not passing a pointer - try expectFmt("point: (10.200,2.220)\n", "point: {}\n", .{value}); - try expectFmt("dim: 10.200x2.220\n", "dim: {d}\n", .{value}); -} - test "union" { const TU = union(enum) { float: f32, @@ -2396,18 +1116,13 @@ test "union" { int: u32, }; - const tu_inst = TU{ .int = 123 }; - const uu_inst = UU{ .int = 456 }; - const eu_inst = EU{ .float = 321.123 }; + const tu_inst: TU = .{ .int = 123 }; + const uu_inst: UU = .{ .int = 456 }; + const eu_inst: EU = .{ .float = 321.123 }; - try expectFmt("fmt.test.union.TU{ .int = 123 }", "{}", .{tu_inst}); - - var buf: [100]u8 = undefined; - const uu_result = try bufPrint(buf[0..], "{}", .{uu_inst}); - try std.testing.expectEqualStrings("fmt.test.union.UU@", uu_result[0..18]); - - const eu_result = try bufPrint(buf[0..], "{}", .{eu_inst}); - try std.testing.expectEqualStrings("fmt.test.union.EU@", eu_result[0..18]); + try expectFmt(".{ .int = 123 }", "{}", .{tu_inst}); + try expectFmt(".{ ... }", "{}", .{uu_inst}); + try expectFmt(".{ .float = 321.123, .int = 1134596030 }", "{}", .{eu_inst}); } test "struct.self-referential" { @@ -2421,7 +1136,7 @@ test "struct.self-referential" { }; inst.a = &inst; - try expectFmt("fmt.test.struct.self-referential.S{ .a = fmt.test.struct.self-referential.S{ .a = fmt.test.struct.self-referential.S{ .a = fmt.test.struct.self-referential.S{ ... } } } }", "{}", .{inst}); + try expectFmt(".{ .a = .{ .a = .{ .a = .{ ... } } } }", "{}", .{inst}); } test "struct.zero-size" { @@ -2436,18 +1151,7 @@ test "struct.zero-size" { const a = A{}; const b = B{ .a = a, .c = 0 }; - try expectFmt("fmt.test.struct.zero-size.B{ .a = fmt.test.struct.zero-size.A{ }, .c = 0 }", "{}", .{b}); -} - -test "bytes.hex" { - const some_bytes = "\xCA\xFE\xBA\xBE"; - try expectFmt("lowercase: cafebabe\n", "lowercase: {x}\n", .{fmtSliceHexLower(some_bytes)}); - try expectFmt("uppercase: CAFEBABE\n", "uppercase: {X}\n", .{fmtSliceHexUpper(some_bytes)}); - //Test Slices - try expectFmt("uppercase: CAFE\n", "uppercase: {X}\n", .{fmtSliceHexUpper(some_bytes[0..2])}); - try expectFmt("lowercase: babe\n", "lowercase: {x}\n", .{fmtSliceHexLower(some_bytes[2..])}); - const bytes_with_zeros = "\x00\x0E\xBA\xBE"; - try expectFmt("lowercase: 000ebabe\n", "lowercase: {x}\n", .{fmtSliceHexLower(bytes_with_zeros)}); + try expectFmt(".{ .a = .{ }, .c = 0 }", "{}", .{b}); } /// Encodes a sequence of bytes as hexadecimal digits. @@ -2494,110 +1198,14 @@ test bytesToHex { test hexToBytes { var buf: [32]u8 = undefined; - try expectFmt("90" ** 32, "{s}", .{fmtSliceHexUpper(try hexToBytes(&buf, "90" ** 32))}); - try expectFmt("ABCD", "{s}", .{fmtSliceHexUpper(try hexToBytes(&buf, "ABCD"))}); - try expectFmt("", "{s}", .{fmtSliceHexUpper(try hexToBytes(&buf, ""))}); + try expectFmt("90" ** 32, "{X}", .{try hexToBytes(&buf, "90" ** 32)}); + try expectFmt("ABCD", "{X}", .{try hexToBytes(&buf, "ABCD")}); + try expectFmt("", "{X}", .{try hexToBytes(&buf, "")}); try std.testing.expectError(error.InvalidCharacter, hexToBytes(&buf, "012Z")); try std.testing.expectError(error.InvalidLength, hexToBytes(&buf, "AAA")); try std.testing.expectError(error.NoSpaceLeft, hexToBytes(buf[0..1], "ABAB")); } -test "formatIntValue with comptime_int" { - const value: comptime_int = 123456789123456789; - - var buf: [20]u8 = undefined; - var fbs = std.io.fixedBufferStream(&buf); - try formatIntValue(value, "", FormatOptions{}, fbs.writer()); - try std.testing.expectEqualStrings("123456789123456789", fbs.getWritten()); -} - -test "formatFloatValue with comptime_float" { - const value: comptime_float = 1.0; - - var buf: [20]u8 = undefined; - var fbs = std.io.fixedBufferStream(&buf); - try formatFloatValue(value, "", FormatOptions{}, fbs.writer()); - try std.testing.expectEqualStrings(fbs.getWritten(), "1e0"); - - try expectFmt("1e0", "{}", .{value}); - try expectFmt("1e0", "{}", .{1.0}); -} - -test "formatType max_depth" { - const Vec2 = struct { - const SelfType = @This(); - x: f32, - y: f32, - - pub fn format( - self: SelfType, - comptime fmt: []const u8, - options: FormatOptions, - writer: anytype, - ) !void { - _ = options; - if (fmt.len == 0) { - return std.fmt.format(writer, "({d:.3},{d:.3})", .{ self.x, self.y }); - } else { - @compileError("unknown format string: '" ++ fmt ++ "'"); - } - } - }; - const E = enum { - One, - Two, - Three, - }; - const TU = union(enum) { - const SelfType = @This(); - float: f32, - int: u32, - ptr: ?*SelfType, - }; - const S = struct { - const SelfType = @This(); - a: ?*SelfType, - tu: TU, - e: E, - vec: Vec2, - }; - - var inst = S{ - .a = null, - .tu = TU{ .ptr = null }, - .e = E.Two, - .vec = Vec2{ .x = 10.2, .y = 2.22 }, - }; - inst.a = &inst; - inst.tu.ptr = &inst.tu; - - var buf: [1000]u8 = undefined; - var fbs = std.io.fixedBufferStream(&buf); - try formatType(inst, "", FormatOptions{}, fbs.writer(), 0); - try std.testing.expectEqualStrings("fmt.test.formatType max_depth.S{ ... }", fbs.getWritten()); - - fbs.reset(); - try formatType(inst, "", FormatOptions{}, fbs.writer(), 1); - try std.testing.expectEqualStrings("fmt.test.formatType max_depth.S{ .a = fmt.test.formatType max_depth.S{ ... }, .tu = fmt.test.formatType max_depth.TU{ ... }, .e = fmt.test.formatType max_depth.E.Two, .vec = (10.200,2.220) }", fbs.getWritten()); - - fbs.reset(); - try formatType(inst, "", FormatOptions{}, fbs.writer(), 2); - try std.testing.expectEqualStrings("fmt.test.formatType max_depth.S{ .a = fmt.test.formatType max_depth.S{ .a = fmt.test.formatType max_depth.S{ ... }, .tu = fmt.test.formatType max_depth.TU{ ... }, .e = fmt.test.formatType max_depth.E.Two, .vec = (10.200,2.220) }, .tu = fmt.test.formatType max_depth.TU{ .ptr = fmt.test.formatType max_depth.TU{ ... } }, .e = fmt.test.formatType max_depth.E.Two, .vec = (10.200,2.220) }", fbs.getWritten()); - - fbs.reset(); - try formatType(inst, "", FormatOptions{}, fbs.writer(), 3); - try std.testing.expectEqualStrings("fmt.test.formatType max_depth.S{ .a = fmt.test.formatType max_depth.S{ .a = fmt.test.formatType max_depth.S{ .a = fmt.test.formatType max_depth.S{ ... }, .tu = fmt.test.formatType max_depth.TU{ ... }, .e = fmt.test.formatType max_depth.E.Two, .vec = (10.200,2.220) }, .tu = fmt.test.formatType max_depth.TU{ .ptr = fmt.test.formatType max_depth.TU{ ... } }, .e = fmt.test.formatType max_depth.E.Two, .vec = (10.200,2.220) }, .tu = fmt.test.formatType max_depth.TU{ .ptr = fmt.test.formatType max_depth.TU{ .ptr = fmt.test.formatType max_depth.TU{ ... } } }, .e = fmt.test.formatType max_depth.E.Two, .vec = (10.200,2.220) }", fbs.getWritten()); - - const vec: @Vector(4, i32) = .{ 1, 2, 3, 4 }; - fbs.reset(); - try formatType(vec, "", FormatOptions{}, fbs.writer(), 0); - try std.testing.expectEqualStrings("{ ... }", fbs.getWritten()); - - fbs.reset(); - try formatType(vec, "", FormatOptions{}, fbs.writer(), 1); - try std.testing.expectEqualStrings("{ 1, 2, 3, 4 }", fbs.getWritten()); -} - test "positional" { try expectFmt("2 1 0", "{2} {1} {0}", .{ @as(usize, 0), @as(usize, 1), @as(usize, 2) }); try expectFmt("2 1 0", "{2} {1} {}", .{ @as(usize, 0), @as(usize, 1), @as(usize, 2) }); @@ -2654,33 +1262,17 @@ test "enum-literal" { test "padding" { try expectFmt("Simple", "{s}", .{"Simple"}); - try expectFmt(" true", "{:10}", .{true}); - try expectFmt(" true", "{:>10}", .{true}); - try expectFmt("======true", "{:=>10}", .{true}); - try expectFmt("true======", "{:=<10}", .{true}); - try expectFmt(" true ", "{:^10}", .{true}); - try expectFmt("===true===", "{:=^10}", .{true}); - try expectFmt(" Minimum width", "{s:18} width", .{"Minimum"}); - try expectFmt("==================Filled", "{s:=>24}", .{"Filled"}); - try expectFmt(" Centered ", "{s:^24}", .{"Centered"}); - try expectFmt("-", "{s:-^1}", .{""}); - try expectFmt("==crêpe===", "{s:=^10}", .{"crêpe"}); - try expectFmt("=====crêpe", "{s:=>10}", .{"crêpe"}); - try expectFmt("crêpe=====", "{s:=<10}", .{"crêpe"}); + try expectFmt(" 1234", "{:10}", .{1234}); + try expectFmt(" 1234", "{:>10}", .{1234}); + try expectFmt("======1234", "{:=>10}", .{1234}); + try expectFmt("1234======", "{:=<10}", .{1234}); + try expectFmt(" 1234 ", "{:^10}", .{1234}); + try expectFmt("===1234===", "{:=^10}", .{1234}); try expectFmt("====a", "{c:=>5}", .{'a'}); try expectFmt("==a==", "{c:=^5}", .{'a'}); try expectFmt("a====", "{c:=<5}", .{'a'}); } -test "padding fill char utf" { - try expectFmt("──crêpe───", "{s:─^10}", .{"crêpe"}); - try expectFmt("─────crêpe", "{s:─>10}", .{"crêpe"}); - try expectFmt("crêpe─────", "{s:─<10}", .{"crêpe"}); - try expectFmt("────a", "{c:─>5}", .{'a'}); - try expectFmt("──a──", "{c:─^5}", .{'a'}); - try expectFmt("a────", "{c:─<5}", .{'a'}); -} - test "decimal float padding" { const number: f32 = 3.1415; try expectFmt("left-pad: **3.142\n", "left-pad: {d:*>7.3}\n", .{number}); @@ -2723,17 +1315,17 @@ test "named arguments" { test "runtime width specifier" { const width: usize = 9; - try expectFmt("~~hello~~", "{s:~^[1]}", .{ "hello", width }); - try expectFmt("~~hello~~", "{s:~^[width]}", .{ .string = "hello", .width = width }); - try expectFmt(" hello", "{s:[1]}", .{ "hello", width }); - try expectFmt("42 hello", "{d} {s:[2]}", .{ 42, "hello", width }); + try expectFmt("~~12345~~", "{d:~^[1]}", .{ 12345, width }); + try expectFmt("~~12345~~", "{d:~^[width]}", .{ .string = 12345, .width = width }); + try expectFmt(" 12345", "{d:[1]}", .{ 12345, width }); + try expectFmt("42 12345", "{d} {d:[2]}", .{ 42, 12345, width }); } test "runtime precision specifier" { const number: f32 = 3.1415; const precision: usize = 2; - try expectFmt("3.14e0", "{:1.[1]}", .{ number, precision }); - try expectFmt("3.14e0", "{:1.[precision]}", .{ .number = number, .precision = precision }); + try expectFmt("3.14e0", "{e:1.[1]}", .{ number, precision }); + try expectFmt("3.14e0", "{e:1.[precision]}", .{ .number = number, .precision = precision }); } test "recursive format function" { @@ -2742,16 +1334,16 @@ test "recursive format function" { Leaf: i32, Branch: struct { left: *const R, right: *const R }, - pub fn format(self: R, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + pub fn format(self: R, writer: *Writer) Writer.Error!void { return switch (self) { - .Leaf => |n| std.fmt.format(writer, "Leaf({})", .{n}), - .Branch => |b| std.fmt.format(writer, "Branch({}, {})", .{ b.left, b.right }), + .Leaf => |n| writer.print("Leaf({})", .{n}), + .Branch => |b| writer.print("Branch({f}, {f})", .{ b.left, b.right }), }; } }; - var r = R{ .Leaf = 1 }; - try expectFmt("Leaf(1)\n", "{}\n", .{&r}); + var r: R = .{ .Leaf = 1 }; + try expectFmt("Leaf(1)\n", "{f}\n", .{&r}); } pub const hex_charset = "0123456789abcdef"; @@ -2785,54 +1377,39 @@ test hex { test "parser until" { { // return substring till ':' - var parser: Parser = .{ - .iter = .{ .bytes = "abc:1234", .i = 0 }, - }; + var parser: Parser = .{ .bytes = "abc:1234", .i = 0 }; try testing.expectEqualStrings("abc", parser.until(':')); } { // return the entire string - `ch` not found - var parser: Parser = .{ - .iter = .{ .bytes = "abc1234", .i = 0 }, - }; + var parser: Parser = .{ .bytes = "abc1234", .i = 0 }; try testing.expectEqualStrings("abc1234", parser.until(':')); } { // substring is empty - `ch` is the only character - var parser: Parser = .{ - .iter = .{ .bytes = ":", .i = 0 }, - }; + var parser: Parser = .{ .bytes = ":", .i = 0 }; try testing.expectEqualStrings("", parser.until(':')); } { // empty string and `ch` not found - var parser: Parser = .{ - .iter = .{ .bytes = "", .i = 0 }, - }; + var parser: Parser = .{ .bytes = "", .i = 0 }; try testing.expectEqualStrings("", parser.until(':')); } { // substring starts at index 2 and goes upto `ch` - var parser: Parser = .{ - .iter = .{ .bytes = "abc:1234", .i = 2 }, - }; + var parser: Parser = .{ .bytes = "abc:1234", .i = 2 }; try testing.expectEqualStrings("c", parser.until(':')); } { // substring starts at index 4 and goes upto the end - `ch` not found - var parser: Parser = .{ - .iter = .{ .bytes = "abc1234", .i = 4 }, - }; + var parser: Parser = .{ .bytes = "abc1234", .i = 4 }; try testing.expectEqualStrings("234", parser.until(':')); } } test "parser peek" { { // start iteration from the first index - var parser: Parser = .{ - .iter = .{ .bytes = "hello world", .i = 0 }, - }; - + var parser: Parser = .{ .bytes = "hello world", .i = 0 }; try testing.expectEqual('h', parser.peek(0)); try testing.expectEqual('e', parser.peek(1)); try testing.expectEqual(' ', parser.peek(5)); @@ -2841,9 +1418,7 @@ test "parser peek" { } { // start iteration from the second last index - var parser: Parser = .{ - .iter = .{ .bytes = "hello world!", .i = 10 }, - }; + var parser: Parser = .{ .bytes = "hello world!", .i = 10 }; try testing.expectEqual('d', parser.peek(0)); try testing.expectEqual('!', parser.peek(1)); @@ -2851,18 +1426,14 @@ test "parser peek" { } { // start iteration beyond the length of the string - var parser: Parser = .{ - .iter = .{ .bytes = "hello", .i = 5 }, - }; + var parser: Parser = .{ .bytes = "hello", .i = 5 }; try testing.expectEqual(null, parser.peek(0)); try testing.expectEqual(null, parser.peek(1)); } { // empty string - var parser: Parser = .{ - .iter = .{ .bytes = "", .i = 0 }, - }; + var parser: Parser = .{ .bytes = "", .i = 0 }; try testing.expectEqual(null, parser.peek(0)); try testing.expectEqual(null, parser.peek(2)); @@ -2871,78 +1442,78 @@ test "parser peek" { test "parser char" { // character exists - iterator at 0 - var parser: Parser = .{ .iter = .{ .bytes = "~~hello", .i = 0 } }; + var parser: Parser = .{ .bytes = "~~hello", .i = 0 }; try testing.expectEqual('~', parser.char()); // character exists - iterator in the middle - parser = .{ .iter = .{ .bytes = "~~hello", .i = 3 } }; + parser = .{ .bytes = "~~hello", .i = 3 }; try testing.expectEqual('e', parser.char()); // character exists - iterator at the end - parser = .{ .iter = .{ .bytes = "~~hello", .i = 6 } }; + parser = .{ .bytes = "~~hello", .i = 6 }; try testing.expectEqual('o', parser.char()); // character doesn't exist - iterator beyond the length of the string - parser = .{ .iter = .{ .bytes = "~~hello", .i = 7 } }; + parser = .{ .bytes = "~~hello", .i = 7 }; try testing.expectEqual(null, parser.char()); } test "parser maybe" { // character exists - iterator at 0 - var parser: Parser = .{ .iter = .{ .bytes = "hello world", .i = 0 } }; + var parser: Parser = .{ .bytes = "hello world", .i = 0 }; try testing.expect(parser.maybe('h')); // character exists - iterator at space - parser = .{ .iter = .{ .bytes = "hello world", .i = 5 } }; + parser = .{ .bytes = "hello world", .i = 5 }; try testing.expect(parser.maybe(' ')); // character exists - iterator at the end - parser = .{ .iter = .{ .bytes = "hello world", .i = 10 } }; + parser = .{ .bytes = "hello world", .i = 10 }; try testing.expect(parser.maybe('d')); // character doesn't exist - iterator beyond the length of the string - parser = .{ .iter = .{ .bytes = "hello world", .i = 11 } }; + parser = .{ .bytes = "hello world", .i = 11 }; try testing.expect(!parser.maybe('e')); } test "parser number" { // input is a single digit natural number - iterator at 0 - var parser: Parser = .{ .iter = .{ .bytes = "7", .i = 0 } }; + var parser: Parser = .{ .bytes = "7", .i = 0 }; try testing.expect(7 == parser.number()); // input is a two digit natural number - iterator at 1 - parser = .{ .iter = .{ .bytes = "29", .i = 1 } }; + parser = .{ .bytes = "29", .i = 1 }; try testing.expect(9 == parser.number()); // input is a two digit natural number - iterator beyond the length of the string - parser = .{ .iter = .{ .bytes = "32", .i = 2 } }; + parser = .{ .bytes = "32", .i = 2 }; try testing.expectEqual(null, parser.number()); // input is an integer - parser = .{ .iter = .{ .bytes = "0", .i = 0 } }; + parser = .{ .bytes = "0", .i = 0 }; try testing.expect(0 == parser.number()); // input is a negative integer - parser = .{ .iter = .{ .bytes = "-2", .i = 0 } }; + parser = .{ .bytes = "-2", .i = 0 }; try testing.expectEqual(null, parser.number()); // input is a string - parser = .{ .iter = .{ .bytes = "no_number", .i = 2 } }; + parser = .{ .bytes = "no_number", .i = 2 }; try testing.expectEqual(null, parser.number()); // input is a single character string - parser = .{ .iter = .{ .bytes = "n", .i = 0 } }; + parser = .{ .bytes = "n", .i = 0 }; try testing.expectEqual(null, parser.number()); // input is an empty string - parser = .{ .iter = .{ .bytes = "", .i = 0 } }; + parser = .{ .bytes = "", .i = 0 }; try testing.expectEqual(null, parser.number()); } test "parser specifier" { { // input string is a digit; iterator at 0 const expected: Specifier = Specifier{ .number = 1 }; - var parser: Parser = .{ .iter = .{ .bytes = "1", .i = 0 } }; + var parser: Parser = .{ .bytes = "1", .i = 0 }; const result = try parser.specifier(); try testing.expect(expected.number == result.number); @@ -2950,7 +1521,7 @@ test "parser specifier" { { // input string is a two digit number; iterator at 0 const digit: Specifier = Specifier{ .number = 42 }; - var parser: Parser = .{ .iter = .{ .bytes = "42", .i = 0 } }; + var parser: Parser = .{ .bytes = "42", .i = 0 }; const result = try parser.specifier(); try testing.expect(digit.number == result.number); @@ -2958,7 +1529,7 @@ test "parser specifier" { { // input string is a two digit number digit; iterator at 1 const digit: Specifier = Specifier{ .number = 8 }; - var parser: Parser = .{ .iter = .{ .bytes = "28", .i = 1 } }; + var parser: Parser = .{ .bytes = "28", .i = 1 }; const result = try parser.specifier(); try testing.expect(digit.number == result.number); @@ -2966,7 +1537,7 @@ test "parser specifier" { { // input string is a two digit number with square brackets; iterator at 0 const digit: Specifier = Specifier{ .named = "15" }; - var parser: Parser = .{ .iter = .{ .bytes = "[15]", .i = 0 } }; + var parser: Parser = .{ .bytes = "[15]", .i = 0 }; const result = try parser.specifier(); try testing.expectEqualStrings(digit.named, result.named); @@ -2974,21 +1545,21 @@ test "parser specifier" { { // input string is not a number and contains square brackets; iterator at 0 const digit: Specifier = Specifier{ .named = "hello" }; - var parser: Parser = .{ .iter = .{ .bytes = "[hello]", .i = 0 } }; + var parser: Parser = .{ .bytes = "[hello]", .i = 0 }; const result = try parser.specifier(); try testing.expectEqualStrings(digit.named, result.named); } { // input string is not a number and doesn't contain closing square bracket; iterator at 0 - var parser: Parser = .{ .iter = .{ .bytes = "[hello", .i = 0 } }; + var parser: Parser = .{ .bytes = "[hello", .i = 0 }; const result = parser.specifier(); try testing.expectError(@field(anyerror, "Expected closing ]"), result); } { // input string is not a number and doesn't contain closing square bracket; iterator at 2 - var parser: Parser = .{ .iter = .{ .bytes = "[[[[hello", .i = 2 } }; + var parser: Parser = .{ .bytes = "[[[[hello", .i = 2 }; const result = parser.specifier(); try testing.expectError(@field(anyerror, "Expected closing ]"), result); @@ -2996,7 +1567,7 @@ test "parser specifier" { { // input string is not a number and contains unbalanced square brackets; iterator at 0 const digit: Specifier = Specifier{ .named = "[[hello" }; - var parser: Parser = .{ .iter = .{ .bytes = "[[[hello]", .i = 0 } }; + var parser: Parser = .{ .bytes = "[[[hello]", .i = 0 }; const result = try parser.specifier(); try testing.expectEqualStrings(digit.named, result.named); @@ -3004,7 +1575,7 @@ test "parser specifier" { { // input string is not a number and contains unbalanced square brackets; iterator at 1 const digit: Specifier = Specifier{ .named = "[[hello" }; - var parser: Parser = .{ .iter = .{ .bytes = "[[[[hello]]]]]", .i = 1 } }; + var parser: Parser = .{ .bytes = "[[[[hello]]]]]", .i = 1 }; const result = try parser.specifier(); try testing.expectEqualStrings(digit.named, result.named); @@ -3012,9 +1583,13 @@ test "parser specifier" { { // input string is neither a digit nor a named argument const char: Specifier = Specifier{ .none = {} }; - var parser: Parser = .{ .iter = .{ .bytes = "hello", .i = 0 } }; + var parser: Parser = .{ .bytes = "hello", .i = 0 }; const result = try parser.specifier(); try testing.expectEqual(char.none, result.none); } } + +test { + _ = float; +} diff --git a/lib/std/fmt/format_float.zig b/lib/std/fmt/float.zig similarity index 99% rename from lib/std/fmt/format_float.zig rename to lib/std/fmt/float.zig index 4c4c1a2922..16df95ad28 100644 --- a/lib/std/fmt/format_float.zig +++ b/lib/std/fmt/float.zig @@ -11,7 +11,7 @@ const special_exponent = 0x7fffffff; pub const min_buffer_size = 53; /// Returns the minimum buffer size needed to print every float of a specific type and format. -pub fn bufferSize(comptime mode: Format, comptime T: type) comptime_int { +pub fn bufferSize(comptime mode: Mode, comptime T: type) comptime_int { comptime std.debug.assert(@typeInfo(T) == .float); return switch (mode) { .scientific => 53, @@ -27,17 +27,17 @@ pub fn bufferSize(comptime mode: Format, comptime T: type) comptime_int { }; } -pub const FormatError = error{ +pub const Error = error{ BufferTooSmall, }; -pub const Format = enum { +pub const Mode = enum { scientific, decimal, }; -pub const FormatOptions = struct { - mode: Format = .scientific, +pub const Options = struct { + mode: Mode = .scientific, precision: ?usize = null, }; @@ -52,11 +52,11 @@ pub const FormatOptions = struct { /// /// When printing full precision decimals, use `bufferSize` to get the required space. It is /// recommended to bound decimal output with a fixed precision to reduce the required buffer size. -pub fn formatFloat(buf: []u8, v_: anytype, options: FormatOptions) FormatError![]const u8 { - const v = switch (@TypeOf(v_)) { +pub fn render(buf: []u8, value: anytype, options: Options) Error![]const u8 { + const v = switch (@TypeOf(value)) { // comptime_float internally is a f128; this preserves precision. - comptime_float => @as(f128, v_), - else => v_, + comptime_float => @as(f128, value), + else => value, }; const T = @TypeOf(v); @@ -192,7 +192,7 @@ fn round(comptime T: type, f: FloatDecimal(T), mode: RoundMode, precision: usize /// will not fit. /// /// It is recommended to bound decimal formatting with an exact precision. -pub fn formatScientific(comptime T: type, buf: []u8, f_: FloatDecimal(T), precision: ?usize) FormatError![]const u8 { +pub fn formatScientific(comptime T: type, buf: []u8, f_: FloatDecimal(T), precision: ?usize) Error![]const u8 { std.debug.assert(buf.len >= min_buffer_size); var f = f_; @@ -263,7 +263,7 @@ pub fn formatScientific(comptime T: type, buf: []u8, f_: FloatDecimal(T), precis /// The buffer provided must be greater than `min_buffer_size` bytes in length. If no precision is /// specified, this may still return an error. If precision is specified, `2 + precision` bytes will /// always be written. -pub fn formatDecimal(comptime T: type, buf: []u8, f_: FloatDecimal(T), precision: ?usize) FormatError![]const u8 { +pub fn formatDecimal(comptime T: type, buf: []u8, f_: FloatDecimal(T), precision: ?usize) Error![]const u8 { std.debug.assert(buf.len >= min_buffer_size); var f = f_; @@ -1520,7 +1520,7 @@ fn check(comptime T: type, value: T, comptime expected: []const u8) !void { var buf: [6000]u8 = undefined; const value_bits: I = @bitCast(value); - const s = try formatFloat(&buf, value, .{}); + const s = try render(&buf, value, .{}); try std.testing.expectEqualStrings(expected, s); if (T == f80 and builtin.target.os.tag == .windows and builtin.target.cpu.arch == .x86_64) return; diff --git a/lib/std/fs/File.zig b/lib/std/fs/File.zig index 30b98cddf0..b6f0f659ba 100644 --- a/lib/std/fs/File.zig +++ b/lib/std/fs/File.zig @@ -1,3 +1,20 @@ +const builtin = @import("builtin"); +const Os = std.builtin.Os; +const native_os = builtin.os.tag; +const is_windows = native_os == .windows; + +const File = @This(); +const std = @import("../std.zig"); +const Allocator = std.mem.Allocator; +const posix = std.posix; +const io = std.io; +const math = std.math; +const assert = std.debug.assert; +const linux = std.os.linux; +const windows = std.os.windows; +const maxInt = std.math.maxInt; +const Alignment = std.mem.Alignment; + /// The OS-specific file descriptor or file handle. handle: Handle, @@ -168,6 +185,18 @@ pub const CreateFlags = struct { mode: Mode = default_mode, }; +pub fn stdout() File { + return .{ .handle = if (is_windows) windows.peb().ProcessParameters.hStdOutput else posix.STDOUT_FILENO }; +} + +pub fn stderr() File { + return .{ .handle = if (is_windows) windows.peb().ProcessParameters.hStdError else posix.STDERR_FILENO }; +} + +pub fn stdin() File { + return .{ .handle = if (is_windows) windows.peb().ProcessParameters.hStdInput else posix.STDIN_FILENO }; +} + /// Upon success, the stream is in an uninitialized state. To continue using it, /// you must use the open() function. pub fn close(self: File) void { @@ -351,8 +380,10 @@ pub fn getPos(self: File) GetSeekPosError!u64 { return posix.lseek_CUR_get(self.handle); } +pub const GetEndPosError = std.os.windows.GetFileSizeError || StatError; + /// TODO: integrate with async I/O -pub fn getEndPos(self: File) GetSeekPosError!u64 { +pub fn getEndPos(self: File) GetEndPosError!u64 { if (builtin.os.tag == .windows) { return windows.GetFileSizeEx(self.handle); } @@ -477,7 +508,6 @@ pub const Stat = struct { pub const StatError = posix.FStatError; /// Returns `Stat` containing basic information about the `File`. -/// Use `metadata` to retrieve more detailed information (e.g. creation time, permissions). /// TODO: integrate with async I/O pub fn stat(self: File) StatError!Stat { if (builtin.os.tag == .windows) { @@ -743,361 +773,6 @@ pub fn setPermissions(self: File, permissions: Permissions) SetPermissionsError! } } -/// Cross-platform representation of file metadata. -/// Platform-specific functionality is available through the `inner` field. -pub const Metadata = struct { - /// Exposes platform-specific functionality. - inner: switch (builtin.os.tag) { - .windows => MetadataWindows, - .linux => MetadataLinux, - .wasi => MetadataWasi, - else => MetadataUnix, - }, - - const Self = @This(); - - /// Returns the size of the file - pub fn size(self: Self) u64 { - return self.inner.size(); - } - - /// Returns a `Permissions` struct, representing the permissions on the file - pub fn permissions(self: Self) Permissions { - return self.inner.permissions(); - } - - /// Returns the `Kind` of file. - /// On Windows, can only return: `.file`, `.directory`, `.sym_link` or `.unknown` - pub fn kind(self: Self) Kind { - return self.inner.kind(); - } - - /// Returns the last time the file was accessed in nanoseconds since UTC 1970-01-01 - pub fn accessed(self: Self) i128 { - return self.inner.accessed(); - } - - /// Returns the time the file was modified in nanoseconds since UTC 1970-01-01 - pub fn modified(self: Self) i128 { - return self.inner.modified(); - } - - /// Returns the time the file was created in nanoseconds since UTC 1970-01-01 - /// On Windows, this cannot return null - /// On Linux, this returns null if the filesystem does not support creation times - /// On Unices, this returns null if the filesystem or OS does not support creation times - /// On MacOS, this returns the ctime if the filesystem does not support creation times; this is insanity, and yet another reason to hate on Apple - pub fn created(self: Self) ?i128 { - return self.inner.created(); - } -}; - -pub const MetadataUnix = struct { - stat: posix.Stat, - - const Self = @This(); - - /// Returns the size of the file - pub fn size(self: Self) u64 { - return @intCast(self.stat.size); - } - - /// Returns a `Permissions` struct, representing the permissions on the file - pub fn permissions(self: Self) Permissions { - return .{ .inner = .{ .mode = self.stat.mode } }; - } - - /// Returns the `Kind` of the file - pub fn kind(self: Self) Kind { - if (builtin.os.tag == .wasi and !builtin.link_libc) return switch (self.stat.filetype) { - .BLOCK_DEVICE => .block_device, - .CHARACTER_DEVICE => .character_device, - .DIRECTORY => .directory, - .SYMBOLIC_LINK => .sym_link, - .REGULAR_FILE => .file, - .SOCKET_STREAM, .SOCKET_DGRAM => .unix_domain_socket, - else => .unknown, - }; - - const m = self.stat.mode & posix.S.IFMT; - - switch (m) { - posix.S.IFBLK => return .block_device, - posix.S.IFCHR => return .character_device, - posix.S.IFDIR => return .directory, - posix.S.IFIFO => return .named_pipe, - posix.S.IFLNK => return .sym_link, - posix.S.IFREG => return .file, - posix.S.IFSOCK => return .unix_domain_socket, - else => {}, - } - - if (builtin.os.tag.isSolarish()) switch (m) { - posix.S.IFDOOR => return .door, - posix.S.IFPORT => return .event_port, - else => {}, - }; - - return .unknown; - } - - /// Returns the last time the file was accessed in nanoseconds since UTC 1970-01-01 - pub fn accessed(self: Self) i128 { - const atime = self.stat.atime(); - return @as(i128, atime.sec) * std.time.ns_per_s + atime.nsec; - } - - /// Returns the last time the file was modified in nanoseconds since UTC 1970-01-01 - pub fn modified(self: Self) i128 { - const mtime = self.stat.mtime(); - return @as(i128, mtime.sec) * std.time.ns_per_s + mtime.nsec; - } - - /// Returns the time the file was created in nanoseconds since UTC 1970-01-01. - /// Returns null if this is not supported by the OS or filesystem - pub fn created(self: Self) ?i128 { - if (!@hasDecl(@TypeOf(self.stat), "birthtime")) return null; - const birthtime = self.stat.birthtime(); - - // If the filesystem doesn't support this the value *should* be: - // On FreeBSD: nsec = 0, sec = -1 - // On NetBSD and OpenBSD: nsec = 0, sec = 0 - // On MacOS, it is set to ctime -- we cannot detect this!! - switch (builtin.os.tag) { - .freebsd => if (birthtime.sec == -1 and birthtime.nsec == 0) return null, - .netbsd, .openbsd => if (birthtime.sec == 0 and birthtime.nsec == 0) return null, - .macos => {}, - else => @compileError("Creation time detection not implemented for OS"), - } - - return @as(i128, birthtime.sec) * std.time.ns_per_s + birthtime.nsec; - } -}; - -/// `MetadataUnix`, but using Linux's `statx` syscall. -pub const MetadataLinux = struct { - statx: std.os.linux.Statx, - - const Self = @This(); - - /// Returns the size of the file - pub fn size(self: Self) u64 { - return self.statx.size; - } - - /// Returns a `Permissions` struct, representing the permissions on the file - pub fn permissions(self: Self) Permissions { - return Permissions{ .inner = PermissionsUnix{ .mode = self.statx.mode } }; - } - - /// Returns the `Kind` of the file - pub fn kind(self: Self) Kind { - const m = self.statx.mode & posix.S.IFMT; - - switch (m) { - posix.S.IFBLK => return .block_device, - posix.S.IFCHR => return .character_device, - posix.S.IFDIR => return .directory, - posix.S.IFIFO => return .named_pipe, - posix.S.IFLNK => return .sym_link, - posix.S.IFREG => return .file, - posix.S.IFSOCK => return .unix_domain_socket, - else => {}, - } - - return .unknown; - } - - /// Returns the last time the file was accessed in nanoseconds since UTC 1970-01-01 - pub fn accessed(self: Self) i128 { - return @as(i128, self.statx.atime.sec) * std.time.ns_per_s + self.statx.atime.nsec; - } - - /// Returns the last time the file was modified in nanoseconds since UTC 1970-01-01 - pub fn modified(self: Self) i128 { - return @as(i128, self.statx.mtime.sec) * std.time.ns_per_s + self.statx.mtime.nsec; - } - - /// Returns the time the file was created in nanoseconds since UTC 1970-01-01. - /// Returns null if this is not supported by the filesystem, or on kernels before than version 4.11 - pub fn created(self: Self) ?i128 { - if (self.statx.mask & std.os.linux.STATX_BTIME == 0) return null; - return @as(i128, self.statx.btime.sec) * std.time.ns_per_s + self.statx.btime.nsec; - } -}; - -pub const MetadataWasi = struct { - stat: std.os.wasi.filestat_t, - - pub fn size(self: @This()) u64 { - return self.stat.size; - } - - pub fn permissions(self: @This()) Permissions { - return .{ .inner = .{ .mode = self.stat.mode } }; - } - - pub fn kind(self: @This()) Kind { - return switch (self.stat.filetype) { - .BLOCK_DEVICE => .block_device, - .CHARACTER_DEVICE => .character_device, - .DIRECTORY => .directory, - .SYMBOLIC_LINK => .sym_link, - .REGULAR_FILE => .file, - .SOCKET_STREAM, .SOCKET_DGRAM => .unix_domain_socket, - else => .unknown, - }; - } - - pub fn accessed(self: @This()) i128 { - return self.stat.atim; - } - - pub fn modified(self: @This()) i128 { - return self.stat.mtim; - } - - pub fn created(self: @This()) ?i128 { - return self.stat.ctim; - } -}; - -pub const MetadataWindows = struct { - attributes: windows.DWORD, - reparse_tag: windows.DWORD, - _size: u64, - access_time: i128, - modified_time: i128, - creation_time: i128, - - const Self = @This(); - - /// Returns the size of the file - pub fn size(self: Self) u64 { - return self._size; - } - - /// Returns a `Permissions` struct, representing the permissions on the file - pub fn permissions(self: Self) Permissions { - return .{ .inner = .{ .attributes = self.attributes } }; - } - - /// Returns the `Kind` of the file. - /// Can only return: `.file`, `.directory`, `.sym_link` or `.unknown` - pub fn kind(self: Self) Kind { - if (self.attributes & windows.FILE_ATTRIBUTE_REPARSE_POINT != 0) { - if (self.reparse_tag & windows.reparse_tag_name_surrogate_bit != 0) { - return .sym_link; - } - } else if (self.attributes & windows.FILE_ATTRIBUTE_DIRECTORY != 0) { - return .directory; - } else { - return .file; - } - return .unknown; - } - - /// Returns the last time the file was accessed in nanoseconds since UTC 1970-01-01 - pub fn accessed(self: Self) i128 { - return self.access_time; - } - - /// Returns the time the file was modified in nanoseconds since UTC 1970-01-01 - pub fn modified(self: Self) i128 { - return self.modified_time; - } - - /// Returns the time the file was created in nanoseconds since UTC 1970-01-01. - /// This never returns null, only returning an optional for compatibility with other OSes - pub fn created(self: Self) ?i128 { - return self.creation_time; - } -}; - -pub const MetadataError = posix.FStatError; - -pub fn metadata(self: File) MetadataError!Metadata { - return .{ - .inner = switch (builtin.os.tag) { - .windows => blk: { - var io_status_block: windows.IO_STATUS_BLOCK = undefined; - var info: windows.FILE_ALL_INFORMATION = undefined; - - const rc = windows.ntdll.NtQueryInformationFile(self.handle, &io_status_block, &info, @sizeOf(windows.FILE_ALL_INFORMATION), .FileAllInformation); - switch (rc) { - .SUCCESS => {}, - // Buffer overflow here indicates that there is more information available than was able to be stored in the buffer - // size provided. This is treated as success because the type of variable-length information that this would be relevant for - // (name, volume name, etc) we don't care about. - .BUFFER_OVERFLOW => {}, - .INVALID_PARAMETER => unreachable, - .ACCESS_DENIED => return error.AccessDenied, - else => return windows.unexpectedStatus(rc), - } - - const reparse_tag: windows.DWORD = reparse_blk: { - if (info.BasicInformation.FileAttributes & windows.FILE_ATTRIBUTE_REPARSE_POINT != 0) { - var tag_info: windows.FILE_ATTRIBUTE_TAG_INFO = undefined; - const tag_rc = windows.ntdll.NtQueryInformationFile(self.handle, &io_status_block, &tag_info, @sizeOf(windows.FILE_ATTRIBUTE_TAG_INFO), .FileAttributeTagInformation); - switch (tag_rc) { - .SUCCESS => {}, - // INFO_LENGTH_MISMATCH and ACCESS_DENIED are the only documented possible errors - // https://learn.microsoft.com/en-us/openspecs/windows_protocols/ms-fscc/d295752f-ce89-4b98-8553-266d37c84f0e - .INFO_LENGTH_MISMATCH => unreachable, - .ACCESS_DENIED => return error.AccessDenied, - else => return windows.unexpectedStatus(rc), - } - break :reparse_blk tag_info.ReparseTag; - } - break :reparse_blk 0; - }; - - break :blk .{ - .attributes = info.BasicInformation.FileAttributes, - .reparse_tag = reparse_tag, - ._size = @as(u64, @bitCast(info.StandardInformation.EndOfFile)), - .access_time = windows.fromSysTime(info.BasicInformation.LastAccessTime), - .modified_time = windows.fromSysTime(info.BasicInformation.LastWriteTime), - .creation_time = windows.fromSysTime(info.BasicInformation.CreationTime), - }; - }, - .linux => blk: { - var stx = std.mem.zeroes(linux.Statx); - - // We are gathering information for Metadata, which is meant to contain all the - // native OS information about the file, so use all known flags. - const rc = linux.statx( - self.handle, - "", - linux.AT.EMPTY_PATH, - linux.STATX_BASIC_STATS | linux.STATX_BTIME, - &stx, - ); - - switch (linux.E.init(rc)) { - .SUCCESS => {}, - .ACCES => unreachable, - .BADF => unreachable, - .FAULT => unreachable, - .INVAL => unreachable, - .LOOP => unreachable, - .NAMETOOLONG => unreachable, - .NOENT => unreachable, - .NOMEM => return error.SystemResources, - .NOTDIR => unreachable, - else => |err| return posix.unexpectedErrno(err), - } - - break :blk .{ - .statx = stx, - }; - }, - .wasi => .{ .stat = try std.os.fstat_wasi(self.handle) }, - else => .{ .stat = try posix.fstat(self.handle) }, - }, - }; -} - pub const UpdateTimesError = posix.FutimensError || windows.SetFileTimeError; /// The underlying file system may have a different granularity than nanoseconds, @@ -1130,19 +805,12 @@ pub fn updateTimes( try posix.futimens(self.handle, ×); } -/// Reads all the bytes from the current position to the end of the file. -/// On success, caller owns returned buffer. -/// If the file is larger than `max_bytes`, returns `error.FileTooBig`. +/// Deprecated in favor of `Reader`. pub fn readToEndAlloc(self: File, allocator: Allocator, max_bytes: usize) ![]u8 { return self.readToEndAllocOptions(allocator, max_bytes, null, .of(u8), null); } -/// Reads all the bytes from the current position to the end of the file. -/// On success, caller owns returned buffer. -/// If the file is larger than `max_bytes`, returns `error.FileTooBig`. -/// If `size_hint` is specified the initial buffer size is calculated using -/// that value, otherwise an arbitrary value is used instead. -/// Allows specifying alignment and a sentinel value. +/// Deprecated in favor of `Reader`. pub fn readToEndAllocOptions( self: File, allocator: Allocator, @@ -1161,7 +829,7 @@ pub fn readToEndAllocOptions( var array_list = try std.ArrayListAligned(u8, alignment).initCapacity(allocator, initial_cap); defer array_list.deinit(); - self.reader().readAllArrayListAligned(alignment, &array_list, max_bytes) catch |err| switch (err) { + self.deprecatedReader().readAllArrayListAligned(alignment, &array_list, max_bytes) catch |err| switch (err) { error.StreamTooLong => return error.FileTooBig, else => |e| return e, }; @@ -1184,8 +852,7 @@ pub fn read(self: File, buffer: []u8) ReadError!usize { return posix.read(self.handle, buffer); } -/// Returns the number of bytes read. If the number read is smaller than `buffer.len`, it -/// means the file reached the end. Reaching the end of a file is not an error condition. +/// Deprecated in favor of `Reader`. pub fn readAll(self: File, buffer: []u8) ReadError!usize { var index: usize = 0; while (index != buffer.len) { @@ -1206,10 +873,7 @@ pub fn pread(self: File, buffer: []u8, offset: u64) PReadError!usize { return posix.pread(self.handle, buffer, offset); } -/// Returns the number of bytes read. If the number read is smaller than `buffer.len`, it -/// means the file reached the end. Reaching the end of a file is not an error condition. -/// On Windows, this function currently does alter the file pointer. -/// https://github.com/ziglang/zig/issues/12783 +/// Deprecated in favor of `Reader`. pub fn preadAll(self: File, buffer: []u8, offset: u64) PReadError!usize { var index: usize = 0; while (index != buffer.len) { @@ -1223,8 +887,7 @@ pub fn preadAll(self: File, buffer: []u8, offset: u64) PReadError!usize { /// See https://github.com/ziglang/zig/issues/7699 pub fn readv(self: File, iovecs: []const posix.iovec) ReadError!usize { if (is_windows) { - // TODO improve this to use ReadFileScatter - if (iovecs.len == 0) return @as(usize, 0); + if (iovecs.len == 0) return 0; const first = iovecs[0]; return windows.ReadFile(self.handle, first.base[0..first.len], null); } @@ -1232,19 +895,7 @@ pub fn readv(self: File, iovecs: []const posix.iovec) ReadError!usize { return posix.readv(self.handle, iovecs); } -/// Returns the number of bytes read. If the number read is smaller than the total bytes -/// from all the buffers, it means the file reached the end. Reaching the end of a file -/// is not an error condition. -/// -/// The `iovecs` parameter is mutable because: -/// * This function needs to mutate the fields in order to handle partial -/// reads from the underlying OS layer. -/// * The OS layer expects pointer addresses to be inside the application's address space -/// even if the length is zero. Meanwhile, in Zig, slices may have undefined pointer -/// addresses when the length is zero. So this function modifies the base fields -/// when the length is zero. -/// -/// Related open issue: https://github.com/ziglang/zig/issues/7699 +/// Deprecated in favor of `Reader`. pub fn readvAll(self: File, iovecs: []posix.iovec) ReadError!usize { if (iovecs.len == 0) return 0; @@ -1279,8 +930,7 @@ pub fn readvAll(self: File, iovecs: []posix.iovec) ReadError!usize { /// https://github.com/ziglang/zig/issues/12783 pub fn preadv(self: File, iovecs: []const posix.iovec, offset: u64) PReadError!usize { if (is_windows) { - // TODO improve this to use ReadFileScatter - if (iovecs.len == 0) return @as(usize, 0); + if (iovecs.len == 0) return 0; const first = iovecs[0]; return windows.ReadFile(self.handle, first.base[0..first.len], offset); } @@ -1288,14 +938,7 @@ pub fn preadv(self: File, iovecs: []const posix.iovec, offset: u64) PReadError!u return posix.preadv(self.handle, iovecs, offset); } -/// Returns the number of bytes read. If the number read is smaller than the total bytes -/// from all the buffers, it means the file reached the end. Reaching the end of a file -/// is not an error condition. -/// The `iovecs` parameter is mutable because this function needs to mutate the fields in -/// order to handle partial reads from the underlying OS layer. -/// See https://github.com/ziglang/zig/issues/7699 -/// On Windows, this function currently does alter the file pointer. -/// https://github.com/ziglang/zig/issues/12783 +/// Deprecated in favor of `Reader`. pub fn preadvAll(self: File, iovecs: []posix.iovec, offset: u64) PReadError!usize { if (iovecs.len == 0) return 0; @@ -1328,6 +971,7 @@ pub fn write(self: File, bytes: []const u8) WriteError!usize { return posix.write(self.handle, bytes); } +/// Deprecated in favor of `Writer`. pub fn writeAll(self: File, bytes: []const u8) WriteError!void { var index: usize = 0; while (index < bytes.len) { @@ -1345,8 +989,7 @@ pub fn pwrite(self: File, bytes: []const u8, offset: u64) PWriteError!usize { return posix.pwrite(self.handle, bytes, offset); } -/// On Windows, this function currently does alter the file pointer. -/// https://github.com/ziglang/zig/issues/12783 +/// Deprecated in favor of `Writer`. pub fn pwriteAll(self: File, bytes: []const u8, offset: u64) PWriteError!void { var index: usize = 0; while (index < bytes.len) { @@ -1355,11 +998,10 @@ pub fn pwriteAll(self: File, bytes: []const u8, offset: u64) PWriteError!void { } /// See https://github.com/ziglang/zig/issues/7699 -/// See equivalent function: `std.net.Stream.writev`. pub fn writev(self: File, iovecs: []const posix.iovec_const) WriteError!usize { if (is_windows) { // TODO improve this to use WriteFileScatter - if (iovecs.len == 0) return @as(usize, 0); + if (iovecs.len == 0) return 0; const first = iovecs[0]; return windows.WriteFile(self.handle, first.base[0..first.len], null); } @@ -1367,15 +1009,7 @@ pub fn writev(self: File, iovecs: []const posix.iovec_const) WriteError!usize { return posix.writev(self.handle, iovecs); } -/// The `iovecs` parameter is mutable because: -/// * This function needs to mutate the fields in order to handle partial -/// writes from the underlying OS layer. -/// * The OS layer expects pointer addresses to be inside the application's address space -/// even if the length is zero. Meanwhile, in Zig, slices may have undefined pointer -/// addresses when the length is zero. So this function modifies the base fields -/// when the length is zero. -/// See https://github.com/ziglang/zig/issues/7699 -/// See equivalent function: `std.net.Stream.writevAll`. +/// Deprecated in favor of `Writer`. pub fn writevAll(self: File, iovecs: []posix.iovec_const) WriteError!void { if (iovecs.len == 0) return; @@ -1405,8 +1039,7 @@ pub fn writevAll(self: File, iovecs: []posix.iovec_const) WriteError!void { /// https://github.com/ziglang/zig/issues/12783 pub fn pwritev(self: File, iovecs: []posix.iovec_const, offset: u64) PWriteError!usize { if (is_windows) { - // TODO improve this to use WriteFileScatter - if (iovecs.len == 0) return @as(usize, 0); + if (iovecs.len == 0) return 0; const first = iovecs[0]; return windows.WriteFile(self.handle, first.base[0..first.len], offset); } @@ -1414,14 +1047,9 @@ pub fn pwritev(self: File, iovecs: []posix.iovec_const, offset: u64) PWriteError return posix.pwritev(self.handle, iovecs, offset); } -/// The `iovecs` parameter is mutable because this function needs to mutate the fields in -/// order to handle partial writes from the underlying OS layer. -/// See https://github.com/ziglang/zig/issues/7699 -/// On Windows, this function currently does alter the file pointer. -/// https://github.com/ziglang/zig/issues/12783 +/// Deprecated in favor of `Writer`. pub fn pwritevAll(self: File, iovecs: []posix.iovec_const, offset: u64) PWriteError!void { if (iovecs.len == 0) return; - var i: usize = 0; var off: u64 = 0; while (true) { @@ -1439,14 +1067,14 @@ pub fn pwritevAll(self: File, iovecs: []posix.iovec_const, offset: u64) PWriteEr pub const CopyRangeError = posix.CopyFileRangeError; +/// Deprecated in favor of `Writer`. pub fn copyRange(in: File, in_offset: u64, out: File, out_offset: u64, len: u64) CopyRangeError!u64 { const adjusted_len = math.cast(usize, len) orelse maxInt(usize); const result = try posix.copy_file_range(in.handle, in_offset, out.handle, out_offset, adjusted_len, 0); return result; } -/// Returns the number of bytes copied. If the number read is smaller than `buffer.len`, it -/// means the in file reached the end. Reaching the end of a file is not an error condition. +/// Deprecated in favor of `Writer`. pub fn copyRangeAll(in: File, in_offset: u64, out: File, out_offset: u64, len: u64) CopyRangeError!u64 { var total_bytes_copied: u64 = 0; var in_off = in_offset; @@ -1461,24 +1089,18 @@ pub fn copyRangeAll(in: File, in_offset: u64, out: File, out_offset: u64, len: u return total_bytes_copied; } +/// Deprecated in favor of `Writer`. pub const WriteFileOptions = struct { in_offset: u64 = 0, - - /// `null` means the entire file. `0` means no bytes from the file. - /// When this is `null`, trailers must be sent in a separate writev() call - /// due to a flaw in the BSD sendfile API. Other operating systems, such as - /// Linux, already do this anyway due to API limitations. - /// If the size of the source file is known, passing the size here will save one syscall. in_len: ?u64 = null, - headers_and_trailers: []posix.iovec_const = &[0]posix.iovec_const{}, - - /// The trailer count is inferred from `headers_and_trailers.len - header_count` header_count: usize = 0, }; +/// Deprecated in favor of `Writer`. pub const WriteFileError = ReadError || error{EndOfStream} || WriteError; +/// Deprecated in favor of `Writer`. pub fn writeFileAll(self: File, in_file: File, args: WriteFileOptions) WriteFileError!void { return self.writeFileAllSendfile(in_file, args) catch |err| switch (err) { error.Unseekable, @@ -1488,35 +1110,27 @@ pub fn writeFileAll(self: File, in_file: File, args: WriteFileOptions) WriteFile error.NetworkUnreachable, error.NetworkSubsystemFailed, => return self.writeFileAllUnseekable(in_file, args), - else => |e| return e, }; } -/// Does not try seeking in either of the File parameters. -/// See `writeFileAll` as an alternative to calling this. +/// Deprecated in favor of `Writer`. pub fn writeFileAllUnseekable(self: File, in_file: File, args: WriteFileOptions) WriteFileError!void { const headers = args.headers_and_trailers[0..args.header_count]; const trailers = args.headers_and_trailers[args.header_count..]; - try self.writevAll(headers); - - try in_file.reader().skipBytes(args.in_offset, .{ .buf_size = 4096 }); - + try in_file.deprecatedReader().skipBytes(args.in_offset, .{ .buf_size = 4096 }); var fifo = std.fifo.LinearFifo(u8, .{ .Static = 4096 }).init(); if (args.in_len) |len| { - var stream = std.io.limitedReader(in_file.reader(), len); - try fifo.pump(stream.reader(), self.writer()); + var stream = std.io.limitedReader(in_file.deprecatedReader(), len); + try fifo.pump(stream.reader(), self.deprecatedWriter()); } else { - try fifo.pump(in_file.reader(), self.writer()); + try fifo.pump(in_file.deprecatedReader(), self.deprecatedWriter()); } - try self.writevAll(trailers); } -/// Low level function which can fail for OS-specific reasons. -/// See `writeFileAll` as an alternative to calling this. -/// TODO integrate with async I/O +/// Deprecated in favor of `Writer`. fn writeFileAllSendfile(self: File, in_file: File, args: WriteFileOptions) posix.SendFileError!void { const count = blk: { if (args.in_len) |l| { @@ -1581,18 +1195,23 @@ fn writeFileAllSendfile(self: File, in_file: File, args: WriteFileOptions) posix } } -pub const Reader = io.Reader(File, ReadError, read); +/// Deprecated in favor of `Reader`. +pub const DeprecatedReader = io.GenericReader(File, ReadError, read); -pub fn reader(file: File) Reader { +/// Deprecated in favor of `Reader`. +pub fn deprecatedReader(file: File) DeprecatedReader { return .{ .context = file }; } -pub const Writer = io.Writer(File, WriteError, write); +/// Deprecated in favor of `Writer`. +pub const DeprecatedWriter = io.GenericWriter(File, WriteError, write); -pub fn writer(file: File) Writer { +/// Deprecated in favor of `Writer`. +pub fn deprecatedWriter(file: File) DeprecatedWriter { return .{ .context = file }; } +/// Deprecated in favor of `Reader` and `Writer`. pub const SeekableStream = io.SeekableStream( File, SeekError, @@ -1603,10 +1222,800 @@ pub const SeekableStream = io.SeekableStream( getEndPos, ); +/// Deprecated in favor of `Reader` and `Writer`. pub fn seekableStream(file: File) SeekableStream { return .{ .context = file }; } +/// Memoizes key information about a file handle such as: +/// * The size from calling stat, or the error that occurred therein. +/// * The current seek position. +/// * The error that occurred when trying to seek. +/// * Whether reading should be done positionally or streaming. +/// * Whether reading should be done via fd-to-fd syscalls (e.g. `sendfile`) +/// versus plain variants (e.g. `read`). +/// +/// Fulfills the `std.io.Reader` interface. +pub const Reader = struct { + file: File, + err: ?ReadError = null, + mode: Reader.Mode = .positional, + /// Tracks the true seek position in the file. To obtain the logical + /// position, subtract the buffer size from this value. + pos: u64 = 0, + size: ?u64 = null, + size_err: ?GetEndPosError = null, + seek_err: ?Reader.SeekError = null, + interface: std.io.Reader, + + pub const SeekError = File.SeekError || error{ + /// Seeking fell back to reading, and reached the end before the requested seek position. + /// `pos` remains at the end of the file. + EndOfStream, + /// Seeking fell back to reading, which failed. + ReadFailed, + }; + + pub const Mode = enum { + streaming, + positional, + /// Avoid syscalls other than `read` and `readv`. + streaming_reading, + /// Avoid syscalls other than `pread` and `preadv`. + positional_reading, + /// Indicates reading cannot continue because of a seek failure. + failure, + + pub fn toStreaming(m: @This()) @This() { + return switch (m) { + .positional, .streaming => .streaming, + .positional_reading, .streaming_reading => .streaming_reading, + .failure => .failure, + }; + } + + pub fn toReading(m: @This()) @This() { + return switch (m) { + .positional, .positional_reading => .positional_reading, + .streaming, .streaming_reading => .streaming_reading, + .failure => .failure, + }; + } + }; + + pub fn initInterface(buffer: []u8) std.io.Reader { + return .{ + .vtable = &.{ + .stream = Reader.stream, + .discard = Reader.discard, + }, + .buffer = buffer, + .seek = 0, + .end = 0, + }; + } + + pub fn init(file: File, buffer: []u8) Reader { + return .{ + .file = file, + .interface = initInterface(buffer), + }; + } + + pub fn initSize(file: File, buffer: []u8, size: ?u64) Reader { + return .{ + .file = file, + .interface = initInterface(buffer), + .size = size, + }; + } + + pub fn initMode(file: File, buffer: []u8, init_mode: Reader.Mode) Reader { + return .{ + .file = file, + .interface = initInterface(buffer), + .mode = init_mode, + }; + } + + pub fn getSize(r: *Reader) GetEndPosError!u64 { + return r.size orelse { + if (r.size_err) |err| return err; + if (r.file.getEndPos()) |size| { + r.size = size; + return size; + } else |err| { + r.size_err = err; + return err; + } + }; + } + + pub fn seekBy(r: *Reader, offset: i64) Reader.SeekError!void { + switch (r.mode) { + .positional, .positional_reading => { + // TODO: make += operator allow any integer types + r.pos = @intCast(@as(i64, @intCast(r.pos)) + offset); + }, + .streaming, .streaming_reading => { + const seek_err = r.seek_err orelse e: { + if (posix.lseek_CUR(r.file.handle, offset)) |_| { + // TODO: make += operator allow any integer types + r.pos = @intCast(@as(i64, @intCast(r.pos)) + offset); + return; + } else |err| { + r.seek_err = err; + break :e err; + } + }; + var remaining = std.math.cast(u64, offset) orelse return seek_err; + while (remaining > 0) { + const n = discard(&r.interface, .limited64(remaining)) catch |err| { + r.seek_err = err; + return err; + }; + r.pos += n; + remaining -= n; + } + }, + .failure => return r.seek_err.?, + } + } + + pub fn seekTo(r: *Reader, offset: u64) Reader.SeekError!void { + switch (r.mode) { + .positional, .positional_reading => { + r.pos = offset; + }, + .streaming, .streaming_reading => { + if (offset >= r.pos) return Reader.seekBy(r, offset - r.pos); + if (r.seek_err) |err| return err; + posix.lseek_SET(r.file.handle, offset) catch |err| { + r.seek_err = err; + return err; + }; + r.pos = offset; + }, + .failure => return r.seek_err.?, + } + } + + /// Number of slices to store on the stack, when trying to send as many byte + /// vectors through the underlying read calls as possible. + const max_buffers_len = 16; + + fn stream(io_reader: *std.io.Reader, w: *std.io.Writer, limit: std.io.Limit) std.io.Reader.StreamError!usize { + const r: *Reader = @alignCast(@fieldParentPtr("interface", io_reader)); + switch (r.mode) { + .positional, .streaming => return w.sendFile(r, limit) catch |write_err| switch (write_err) { + error.Unimplemented => { + r.mode = r.mode.toReading(); + return 0; + }, + else => |e| return e, + }, + .positional_reading => { + if (is_windows) { + // Unfortunately, `ReadFileScatter` cannot be used since it + // requires page alignment. + const dest = limit.slice(try w.writableSliceGreedy(1)); + const n = try readPositional(r, dest); + w.advance(n); + return n; + } + var iovecs_buffer: [max_buffers_len]posix.iovec = undefined; + const dest = try w.writableVectorPosix(&iovecs_buffer, limit); + assert(dest[0].len > 0); + const n = posix.preadv(r.file.handle, dest, r.pos) catch |err| switch (err) { + error.Unseekable => { + r.mode = r.mode.toStreaming(); + const pos = r.pos; + if (pos != 0) { + r.pos = 0; + r.seekBy(@intCast(pos)) catch { + r.mode = .failure; + return error.ReadFailed; + }; + } + return 0; + }, + else => |e| { + r.err = e; + return error.ReadFailed; + }, + }; + if (n == 0) { + r.size = r.pos; + return error.EndOfStream; + } + r.pos += n; + return n; + }, + .streaming_reading => { + if (is_windows) { + // Unfortunately, `ReadFileScatter` cannot be used since it + // requires page alignment. + const dest = limit.slice(try w.writableSliceGreedy(1)); + const n = try readStreaming(r, dest); + w.advance(n); + return n; + } + var iovecs_buffer: [max_buffers_len]posix.iovec = undefined; + const dest = try w.writableVectorPosix(&iovecs_buffer, limit); + assert(dest[0].len > 0); + const n = posix.readv(r.file.handle, dest) catch |err| { + r.err = err; + return error.ReadFailed; + }; + if (n == 0) { + r.size = r.pos; + return error.EndOfStream; + } + r.pos += n; + return n; + }, + .failure => return error.ReadFailed, + } + } + + fn discard(io_reader: *std.io.Reader, limit: std.io.Limit) std.io.Reader.Error!usize { + const r: *Reader = @alignCast(@fieldParentPtr("interface", io_reader)); + const file = r.file; + const pos = r.pos; + switch (r.mode) { + .positional, .positional_reading => { + const size = r.size orelse { + if (file.getEndPos()) |size| { + r.size = size; + } else |err| { + r.size_err = err; + r.mode = r.mode.toStreaming(); + } + return 0; + }; + const delta = @min(@intFromEnum(limit), size - pos); + r.pos = pos + delta; + return delta; + }, + .streaming, .streaming_reading => { + // Unfortunately we can't seek forward without knowing the + // size because the seek syscalls provided to us will not + // return the true end position if a seek would exceed the + // end. + fallback: { + if (r.size_err == null and r.seek_err == null) break :fallback; + var trash_buffer: [128]u8 = undefined; + const trash = &trash_buffer; + if (is_windows) { + const n = windows.ReadFile(file.handle, trash, null) catch |err| { + r.err = err; + return error.ReadFailed; + }; + if (n == 0) { + r.size = pos; + return error.EndOfStream; + } + r.pos = pos + n; + return n; + } + var iovecs: [max_buffers_len]std.posix.iovec = undefined; + var iovecs_i: usize = 0; + var remaining = @intFromEnum(limit); + while (remaining > 0 and iovecs_i < iovecs.len) { + iovecs[iovecs_i] = .{ .base = trash, .len = @min(trash.len, remaining) }; + remaining -= iovecs[iovecs_i].len; + iovecs_i += 1; + } + const n = posix.readv(file.handle, iovecs[0..iovecs_i]) catch |err| { + r.err = err; + return error.ReadFailed; + }; + if (n == 0) { + r.size = pos; + return error.EndOfStream; + } + r.pos = pos + n; + return n; + } + const size = r.size orelse { + if (file.getEndPos()) |size| { + r.size = size; + } else |err| { + r.size_err = err; + } + return 0; + }; + const n = @min(size - pos, std.math.maxInt(i64), @intFromEnum(limit)); + file.seekBy(n) catch |err| { + r.seek_err = err; + return 0; + }; + r.pos = pos + n; + return n; + }, + .failure => return error.ReadFailed, + } + } + + pub fn readPositional(r: *Reader, dest: []u8) std.io.Reader.Error!usize { + const n = r.file.pread(dest, r.pos) catch |err| switch (err) { + error.Unseekable => { + r.mode = r.mode.toStreaming(); + const pos = r.pos; + if (pos != 0) { + r.pos = 0; + r.seekBy(@intCast(pos)) catch { + r.mode = .failure; + return error.ReadFailed; + }; + } + return 0; + }, + else => |e| { + r.err = e; + return error.ReadFailed; + }, + }; + if (n == 0) { + r.size = r.pos; + return error.EndOfStream; + } + r.pos += n; + return n; + } + + pub fn readStreaming(r: *Reader, dest: []u8) std.io.Reader.Error!usize { + const n = r.file.read(dest) catch |err| { + r.err = err; + return error.ReadFailed; + }; + if (n == 0) { + r.size = r.pos; + return error.EndOfStream; + } + r.pos += n; + return n; + } + + pub fn read(r: *Reader, dest: []u8) std.io.Reader.Error!usize { + switch (r.mode) { + .positional, .positional_reading => return readPositional(r, dest), + .streaming, .streaming_reading => return readStreaming(r, dest), + .failure => return error.ReadFailed, + } + } + + pub fn atEnd(r: *Reader) bool { + // Even if stat fails, size is set when end is encountered. + const size = r.size orelse return false; + return size - r.pos == 0; + } +}; + +pub const Writer = struct { + file: File, + err: ?WriteError = null, + mode: Writer.Mode = .positional, + /// Tracks the true seek position in the file. To obtain the logical + /// position, add the buffer size to this value. + pos: u64 = 0, + sendfile_err: ?SendfileError = null, + copy_file_range_err: ?CopyFileRangeError = null, + fcopyfile_err: ?FcopyfileError = null, + seek_err: ?SeekError = null, + interface: std.io.Writer, + + pub const Mode = Reader.Mode; + + pub const SendfileError = error{ + UnsupportedOperation, + SystemResources, + InputOutput, + BrokenPipe, + WouldBlock, + Unexpected, + }; + + pub const CopyFileRangeError = std.os.freebsd.CopyFileRangeError || std.os.linux.wrapped.CopyFileRangeError; + + pub const FcopyfileError = error{ + OperationNotSupported, + OutOfMemory, + Unexpected, + }; + + /// Number of slices to store on the stack, when trying to send as many byte + /// vectors through the underlying write calls as possible. + const max_buffers_len = 16; + + pub fn init(file: File, buffer: []u8) Writer { + return initMode(file, buffer, .positional); + } + + pub fn initMode(file: File, buffer: []u8, init_mode: Writer.Mode) Writer { + return .{ + .file = file, + .interface = initInterface(buffer), + .mode = init_mode, + }; + } + + pub fn initInterface(buffer: []u8) std.io.Writer { + return .{ + .vtable = &.{ + .drain = drain, + .sendFile = sendFile, + }, + .buffer = buffer, + }; + } + + pub fn moveToReader(w: *Writer) Reader { + defer w.* = undefined; + return .{ + .file = w.file, + .mode = w.mode, + .pos = w.pos, + .seek_err = w.seek_err, + }; + } + + pub fn drain(io_w: *std.io.Writer, data: []const []const u8, splat: usize) std.io.Writer.Error!usize { + const w: *Writer = @alignCast(@fieldParentPtr("interface", io_w)); + const handle = w.file.handle; + const buffered = io_w.buffered(); + if (is_windows) switch (w.mode) { + .positional, .positional_reading => { + if (buffered.len != 0) { + const n = windows.WriteFile(handle, buffered, w.pos) catch |err| { + w.err = err; + return error.WriteFailed; + }; + w.pos += n; + return io_w.consume(n); + } + for (data[0 .. data.len - 1]) |buf| { + if (buf.len == 0) continue; + const n = windows.WriteFile(handle, buf, w.pos) catch |err| { + w.err = err; + return error.WriteFailed; + }; + w.pos += n; + return io_w.consume(n); + } + const pattern = data[data.len - 1]; + if (pattern.len == 0 or splat == 0) return 0; + const n = windows.WriteFile(handle, pattern, w.pos) catch |err| { + w.err = err; + return error.WriteFailed; + }; + w.pos += n; + return io_w.consume(n); + }, + .streaming, .streaming_reading => { + if (buffered.len != 0) { + const n = windows.WriteFile(handle, buffered, null) catch |err| { + w.err = err; + return error.WriteFailed; + }; + w.pos += n; + return io_w.consume(n); + } + for (data[0 .. data.len - 1]) |buf| { + if (buf.len == 0) continue; + const n = windows.WriteFile(handle, buf, null) catch |err| { + w.err = err; + return error.WriteFailed; + }; + w.pos += n; + return io_w.consume(n); + } + const pattern = data[data.len - 1]; + if (pattern.len == 0 or splat == 0) return 0; + const n = windows.WriteFile(handle, pattern, null) catch |err| { + std.debug.print("windows write file failed3: {t}\n", .{err}); + w.err = err; + return error.WriteFailed; + }; + w.pos += n; + return io_w.consume(n); + }, + .failure => return error.WriteFailed, + }; + var iovecs: [max_buffers_len]std.posix.iovec_const = undefined; + var len: usize = 0; + if (buffered.len > 0) { + iovecs[len] = .{ .base = buffered.ptr, .len = buffered.len }; + len += 1; + } + for (data[0 .. data.len - 1]) |d| { + if (d.len == 0) continue; + iovecs[len] = .{ .base = d.ptr, .len = d.len }; + len += 1; + if (iovecs.len - len == 0) break; + } + const pattern = data[data.len - 1]; + if (iovecs.len - len != 0) switch (splat) { + 0 => {}, + 1 => if (pattern.len != 0) { + iovecs[len] = .{ .base = pattern.ptr, .len = pattern.len }; + len += 1; + }, + else => switch (pattern.len) { + 0 => {}, + 1 => { + const splat_buffer_candidate = io_w.buffer[io_w.end..]; + var backup_buffer: [64]u8 = undefined; + const splat_buffer = if (splat_buffer_candidate.len >= backup_buffer.len) + splat_buffer_candidate + else + &backup_buffer; + const memset_len = @min(splat_buffer.len, splat); + const buf = splat_buffer[0..memset_len]; + @memset(buf, pattern[0]); + iovecs[len] = .{ .base = buf.ptr, .len = buf.len }; + len += 1; + var remaining_splat = splat - buf.len; + while (remaining_splat > splat_buffer.len and iovecs.len - len != 0) { + assert(buf.len == splat_buffer.len); + iovecs[len] = .{ .base = splat_buffer.ptr, .len = splat_buffer.len }; + len += 1; + remaining_splat -= splat_buffer.len; + } + if (remaining_splat > 0 and iovecs.len - len != 0) { + iovecs[len] = .{ .base = splat_buffer.ptr, .len = remaining_splat }; + len += 1; + } + }, + else => for (0..splat) |_| { + iovecs[len] = .{ .base = pattern.ptr, .len = pattern.len }; + len += 1; + if (iovecs.len - len == 0) break; + }, + }, + }; + if (len == 0) return 0; + switch (w.mode) { + .positional, .positional_reading => { + const n = std.posix.pwritev(handle, iovecs[0..len], w.pos) catch |err| switch (err) { + error.Unseekable => { + w.mode = w.mode.toStreaming(); + const pos = w.pos; + if (pos != 0) { + w.pos = 0; + w.seekTo(@intCast(pos)) catch { + w.mode = .failure; + return error.WriteFailed; + }; + } + return 0; + }, + else => |e| { + w.err = e; + return error.WriteFailed; + }, + }; + w.pos += n; + return io_w.consume(n); + }, + .streaming, .streaming_reading => { + const n = std.posix.writev(handle, iovecs[0..len]) catch |err| { + w.err = err; + return error.WriteFailed; + }; + w.pos += n; + return io_w.consume(n); + }, + .failure => return error.WriteFailed, + } + } + + pub fn sendFile( + io_w: *std.io.Writer, + file_reader: *Reader, + limit: std.io.Limit, + ) std.io.Writer.FileError!usize { + const w: *Writer = @alignCast(@fieldParentPtr("interface", io_w)); + const out_fd = w.file.handle; + const in_fd = file_reader.file.handle; + // TODO try using copy_file_range on FreeBSD + // TODO try using sendfile on macOS + // TODO try using sendfile on FreeBSD + if (native_os == .linux and w.mode == .streaming) sf: { + // Try using sendfile on Linux. + if (w.sendfile_err != null) break :sf; + // Linux sendfile does not support headers. + const buffered = limit.slice(file_reader.interface.buffer); + if (io_w.end != 0 or buffered.len != 0) return drain(io_w, &.{buffered}, 1); + const max_count = 0x7ffff000; // Avoid EINVAL. + var off: std.os.linux.off_t = undefined; + const off_ptr: ?*std.os.linux.off_t, const count: usize = switch (file_reader.mode) { + .positional => o: { + const size = file_reader.size orelse { + if (file_reader.file.getEndPos()) |size| { + file_reader.size = size; + } else |err| { + file_reader.size_err = err; + file_reader.mode = .streaming; + } + return 0; + }; + off = std.math.cast(std.os.linux.off_t, file_reader.pos) orelse return error.ReadFailed; + break :o .{ &off, @min(@intFromEnum(limit), size - file_reader.pos, max_count) }; + }, + .streaming => .{ null, limit.minInt(max_count) }, + .streaming_reading, .positional_reading => break :sf, + .failure => return error.ReadFailed, + }; + const n = std.os.linux.wrapped.sendfile(out_fd, in_fd, off_ptr, count) catch |err| switch (err) { + error.Unseekable => { + file_reader.mode = file_reader.mode.toStreaming(); + const pos = file_reader.pos; + if (pos != 0) { + file_reader.pos = 0; + file_reader.seekBy(@intCast(pos)) catch { + file_reader.mode = .failure; + return error.ReadFailed; + }; + } + return 0; + }, + else => |e| { + w.sendfile_err = e; + return 0; + }, + }; + if (n == 0) { + file_reader.size = file_reader.pos; + return error.EndOfStream; + } + file_reader.pos += n; + w.pos += n; + return n; + } + const copy_file_range = switch (native_os) { + .freebsd => std.os.freebsd.copy_file_range, + .linux => if (std.c.versionCheck(.{ .major = 2, .minor = 27, .patch = 0 })) std.os.linux.wrapped.copy_file_range else {}, + else => {}, + }; + if (@TypeOf(copy_file_range) != void) cfr: { + if (w.copy_file_range_err != null) break :cfr; + const buffered = limit.slice(file_reader.interface.buffer); + if (io_w.end != 0 or buffered.len != 0) return drain(io_w, &.{buffered}, 1); + var off_in: i64 = undefined; + var off_out: i64 = undefined; + const off_in_ptr: ?*i64 = switch (file_reader.mode) { + .positional_reading, .streaming_reading => return error.Unimplemented, + .positional => p: { + off_in = @intCast(file_reader.pos); + break :p &off_in; + }, + .streaming => null, + .failure => return error.WriteFailed, + }; + const off_out_ptr: ?*i64 = switch (w.mode) { + .positional_reading, .streaming_reading => return error.Unimplemented, + .positional => p: { + off_out = @intCast(w.pos); + break :p &off_out; + }, + .streaming => null, + .failure => return error.WriteFailed, + }; + const n = copy_file_range(in_fd, off_in_ptr, out_fd, off_out_ptr, @intFromEnum(limit), 0) catch |err| { + w.copy_file_range_err = err; + return 0; + }; + if (n == 0) { + file_reader.size = file_reader.pos; + return error.EndOfStream; + } + file_reader.pos += n; + w.pos += n; + return n; + } + + if (builtin.os.tag.isDarwin()) fcf: { + if (w.fcopyfile_err != null) break :fcf; + if (file_reader.pos != 0) break :fcf; + if (w.pos != 0) break :fcf; + if (limit != .unlimited) break :fcf; + const rc = std.c.fcopyfile(in_fd, out_fd, null, .{ .DATA = true }); + switch (posix.errno(rc)) { + .SUCCESS => {}, + .INVAL => if (builtin.mode == .Debug) @panic("invalid API usage") else { + w.fcopyfile_err = error.Unexpected; + return 0; + }, + .NOMEM => { + w.fcopyfile_err = error.OutOfMemory; + return 0; + }, + .OPNOTSUPP => { + w.fcopyfile_err = error.OperationNotSupported; + return 0; + }, + else => |err| { + w.fcopyfile_err = posix.unexpectedErrno(err); + return 0; + }, + } + const n = if (file_reader.size) |size| size else @panic("TODO figure out how much copied"); + file_reader.pos = n; + w.pos = n; + return n; + } + + return error.Unimplemented; + } + + pub fn seekTo(w: *Writer, offset: u64) SeekError!void { + switch (w.mode) { + .positional, .positional_reading => { + w.pos = offset; + }, + .streaming, .streaming_reading => { + if (w.seek_err) |err| return err; + posix.lseek_SET(w.file.handle, offset) catch |err| { + w.seek_err = err; + return err; + }; + w.pos = offset; + }, + .failure => return w.seek_err.?, + } + } + + pub const EndError = SetEndPosError || std.io.Writer.Error; + + /// Flushes any buffered data and sets the end position of the file. + /// + /// If not overwriting existing contents, then calling `interface.flush` + /// directly is sufficient. + /// + /// Flush failure is handled by setting `err` so that it can be handled + /// along with other write failures. + pub fn end(w: *Writer) EndError!void { + try w.interface.flush(); + return w.file.setEndPos(w.pos); + } +}; + +/// Defaults to positional reading; falls back to streaming. +/// +/// Positional is more threadsafe, since the global seek position is not +/// affected. +pub fn reader(file: File, buffer: []u8) Reader { + return .init(file, buffer); +} + +/// Positional is more threadsafe, since the global seek position is not +/// affected, but when such syscalls are not available, preemptively choosing +/// `Reader.Mode.streaming` will skip a failed syscall. +pub fn readerStreaming(file: File, buffer: []u8) Reader { + return .{ + .file = file, + .interface = Reader.initInterface(buffer), + .mode = .streaming, + .seek_err = error.Unseekable, + }; +} + +/// Defaults to positional reading; falls back to streaming. +/// +/// Positional is more threadsafe, since the global seek position is not +/// affected. +pub fn writer(file: File, buffer: []u8) Writer { + return .init(file, buffer); +} + +/// Positional is more threadsafe, since the global seek position is not +/// affected, but when such syscalls are not available, preemptively choosing +/// `Writer.Mode.streaming` will skip a failed syscall. +pub fn writerStreaming(file: File, buffer: []u8) Writer { + return .initMode(file, buffer, .streaming); +} + const range_off: windows.LARGE_INTEGER = 0; const range_len: windows.LARGE_INTEGER = 1; @@ -1769,18 +2178,3 @@ pub fn downgradeLock(file: File) LockError!void { }; } } - -const File = @This(); -const std = @import("../std.zig"); -const builtin = @import("builtin"); -const Allocator = std.mem.Allocator; -const posix = std.posix; -const io = std.io; -const math = std.math; -const assert = std.debug.assert; -const linux = std.os.linux; -const windows = std.os.windows; -const Os = std.builtin.Os; -const maxInt = std.math.maxInt; -const is_windows = builtin.os.tag == .windows; -const Alignment = std.mem.Alignment; diff --git a/lib/std/fs/path.zig b/lib/std/fs/path.zig index 159eb02564..1cf4dc3c64 100644 --- a/lib/std/fs/path.zig +++ b/lib/std/fs/path.zig @@ -146,14 +146,11 @@ pub fn joinZ(allocator: Allocator, paths: []const []const u8) ![:0]u8 { return out[0 .. out.len - 1 :0]; } -pub fn fmtJoin(paths: []const []const u8) std.fmt.Formatter(formatJoin) { +pub fn fmtJoin(paths: []const []const u8) std.fmt.Formatter([]const []const u8, formatJoin) { return .{ .data = paths }; } -fn formatJoin(paths: []const []const u8, comptime fmt: []const u8, options: std.fmt.FormatOptions, w: anytype) !void { - _ = fmt; - _ = options; - +fn formatJoin(paths: []const []const u8, w: *std.io.Writer) std.io.Writer.Error!void { const first_path_idx = for (paths, 0..) |p, idx| { if (p.len != 0) break idx; } else return; diff --git a/lib/std/fs/test.zig b/lib/std/fs/test.zig index 99b6dd3920..ece7f79a74 100644 --- a/lib/std/fs/test.zig +++ b/lib/std/fs/test.zig @@ -1798,11 +1798,11 @@ test "walker" { var num_walked: usize = 0; while (try walker.next()) |entry| { testing.expect(expected_basenames.has(entry.basename)) catch |err| { - std.debug.print("found unexpected basename: {s}\n", .{std.fmt.fmtSliceEscapeLower(entry.basename)}); + std.debug.print("found unexpected basename: {f}\n", .{std.ascii.hexEscape(entry.basename, .lower)}); return err; }; testing.expect(expected_paths.has(entry.path)) catch |err| { - std.debug.print("found unexpected path: {s}\n", .{std.fmt.fmtSliceEscapeLower(entry.path)}); + std.debug.print("found unexpected path: {f}\n", .{std.ascii.hexEscape(entry.path, .lower)}); return err; }; // make sure that the entry.dir is the containing dir @@ -1953,113 +1953,6 @@ test "chown" { try dir.chown(null, null); } -test "File.Metadata" { - var tmp = tmpDir(.{}); - defer tmp.cleanup(); - - const file = try tmp.dir.createFile("test_file", .{ .read = true }); - defer file.close(); - - const metadata = try file.metadata(); - try testing.expectEqual(File.Kind.file, metadata.kind()); - try testing.expectEqual(@as(u64, 0), metadata.size()); - _ = metadata.accessed(); - _ = metadata.modified(); - _ = metadata.created(); -} - -test "File.Permissions" { - if (native_os == .wasi) - return error.SkipZigTest; - - var tmp = tmpDir(.{}); - defer tmp.cleanup(); - - const file = try tmp.dir.createFile("test_file", .{ .read = true }); - defer file.close(); - - const metadata = try file.metadata(); - var permissions = metadata.permissions(); - - try testing.expect(!permissions.readOnly()); - permissions.setReadOnly(true); - try testing.expect(permissions.readOnly()); - - try file.setPermissions(permissions); - const new_permissions = (try file.metadata()).permissions(); - try testing.expect(new_permissions.readOnly()); - - // Must be set to non-read-only to delete - permissions.setReadOnly(false); - try file.setPermissions(permissions); -} - -test "File.PermissionsUnix" { - if (native_os == .windows or native_os == .wasi) - return error.SkipZigTest; - - var tmp = tmpDir(.{}); - defer tmp.cleanup(); - - const file = try tmp.dir.createFile("test_file", .{ .mode = 0o666, .read = true }); - defer file.close(); - - const metadata = try file.metadata(); - var permissions = metadata.permissions(); - - permissions.setReadOnly(true); - try testing.expect(permissions.readOnly()); - try testing.expect(!permissions.inner.unixHas(.user, .write)); - permissions.inner.unixSet(.user, .{ .write = true }); - try testing.expect(!permissions.readOnly()); - try testing.expect(permissions.inner.unixHas(.user, .write)); - try testing.expect(permissions.inner.mode & 0o400 != 0); - - permissions.setReadOnly(true); - try file.setPermissions(permissions); - permissions = (try file.metadata()).permissions(); - try testing.expect(permissions.readOnly()); - - // Must be set to non-read-only to delete - permissions.setReadOnly(false); - try file.setPermissions(permissions); - - const permissions_unix = File.PermissionsUnix.unixNew(0o754); - try testing.expect(permissions_unix.unixHas(.user, .execute)); - try testing.expect(!permissions_unix.unixHas(.other, .execute)); -} - -test "delete a read-only file on windows" { - if (native_os != .windows) - return error.SkipZigTest; - - var tmp = testing.tmpDir(.{}); - defer tmp.cleanup(); - - const file = try tmp.dir.createFile("test_file", .{ .read = true }); - defer file.close(); - // Create a file and make it read-only - const metadata = try file.metadata(); - var permissions = metadata.permissions(); - permissions.setReadOnly(true); - try file.setPermissions(permissions); - - // If the OS and filesystem support it, POSIX_SEMANTICS and IGNORE_READONLY_ATTRIBUTE - // is used meaning that the deletion of a read-only file will succeed. - // Otherwise, this delete will fail and the read-only flag must be unset before it's - // able to be deleted. - const delete_result = tmp.dir.deleteFile("test_file"); - if (delete_result) { - try testing.expectError(error.FileNotFound, tmp.dir.deleteFile("test_file")); - } else |err| { - try testing.expectEqual(@as(anyerror, error.AccessDenied), err); - // Now make the file not read-only - permissions.setReadOnly(false); - try file.setPermissions(permissions); - try tmp.dir.deleteFile("test_file"); - } -} - test "delete a setAsCwd directory on Windows" { if (native_os != .windows) return error.SkipZigTest; diff --git a/lib/std/hash/benchmark.zig b/lib/std/hash/benchmark.zig index 6d5a6b93ac..980b41b8ae 100644 --- a/lib/std/hash/benchmark.zig +++ b/lib/std/hash/benchmark.zig @@ -346,7 +346,7 @@ fn mode(comptime x: comptime_int) comptime_int { } pub fn main() !void { - const stdout = std.io.getStdOut().writer(); + const stdout = std.fs.File.stdout().deprecatedWriter(); var buffer: [1024]u8 = undefined; var fixed = std.heap.FixedBufferAllocator.init(buffer[0..]); diff --git a/lib/std/heap/debug_allocator.zig b/lib/std/heap/debug_allocator.zig index e8778fc9c1..df4bb76f0f 100644 --- a/lib/std/heap/debug_allocator.zig +++ b/lib/std/heap/debug_allocator.zig @@ -436,7 +436,7 @@ pub fn DebugAllocator(comptime config: Config) type { const stack_trace = bucketStackTrace(bucket, slot_count, slot_index, .alloc); const page_addr = @intFromPtr(bucket) & ~(page_size - 1); const addr = page_addr + slot_index * size_class; - log.err("memory address 0x{x} leaked: {}", .{ addr, stack_trace }); + log.err("memory address 0x{x} leaked: {f}", .{ addr, stack_trace }); leaks = true; } } @@ -463,7 +463,7 @@ pub fn DebugAllocator(comptime config: Config) type { while (it.next()) |large_alloc| { if (config.retain_metadata and large_alloc.freed) continue; const stack_trace = large_alloc.getStackTrace(.alloc); - log.err("memory address 0x{x} leaked: {}", .{ + log.err("memory address 0x{x} leaked: {f}", .{ @intFromPtr(large_alloc.bytes.ptr), stack_trace, }); leaks = true; @@ -522,7 +522,7 @@ pub fn DebugAllocator(comptime config: Config) type { .index = 0, }; std.debug.captureStackTrace(ret_addr, &second_free_stack_trace); - log.err("Double free detected. Allocation: {} First free: {} Second free: {}", .{ + log.err("Double free detected. Allocation: {f} First free: {f} Second free: {f}", .{ alloc_stack_trace, free_stack_trace, second_free_stack_trace, }); } @@ -568,7 +568,7 @@ pub fn DebugAllocator(comptime config: Config) type { .index = 0, }; std.debug.captureStackTrace(ret_addr, &free_stack_trace); - log.err("Allocation size {d} bytes does not match free size {d}. Allocation: {} Free: {}", .{ + log.err("Allocation size {d} bytes does not match free size {d}. Allocation: {f} Free: {f}", .{ entry.value_ptr.bytes.len, old_mem.len, entry.value_ptr.getStackTrace(.alloc), @@ -678,7 +678,7 @@ pub fn DebugAllocator(comptime config: Config) type { .index = 0, }; std.debug.captureStackTrace(ret_addr, &free_stack_trace); - log.err("Allocation size {d} bytes does not match free size {d}. Allocation: {} Free: {}", .{ + log.err("Allocation size {d} bytes does not match free size {d}. Allocation: {f} Free: {f}", .{ entry.value_ptr.bytes.len, old_mem.len, entry.value_ptr.getStackTrace(.alloc), @@ -907,7 +907,7 @@ pub fn DebugAllocator(comptime config: Config) type { }; std.debug.captureStackTrace(return_address, &free_stack_trace); if (old_memory.len != requested_size) { - log.err("Allocation size {d} bytes does not match free size {d}. Allocation: {} Free: {}", .{ + log.err("Allocation size {d} bytes does not match free size {d}. Allocation: {f} Free: {f}", .{ requested_size, old_memory.len, bucketStackTrace(bucket, slot_count, slot_index, .alloc), @@ -915,7 +915,7 @@ pub fn DebugAllocator(comptime config: Config) type { }); } if (alignment != slot_alignment) { - log.err("Allocation alignment {d} does not match free alignment {d}. Allocation: {} Free: {}", .{ + log.err("Allocation alignment {d} does not match free alignment {d}. Allocation: {f} Free: {f}", .{ slot_alignment.toByteUnits(), alignment.toByteUnits(), bucketStackTrace(bucket, slot_count, slot_index, .alloc), @@ -1006,7 +1006,7 @@ pub fn DebugAllocator(comptime config: Config) type { }; std.debug.captureStackTrace(return_address, &free_stack_trace); if (memory.len != requested_size) { - log.err("Allocation size {d} bytes does not match free size {d}. Allocation: {} Free: {}", .{ + log.err("Allocation size {d} bytes does not match free size {d}. Allocation: {f} Free: {f}", .{ requested_size, memory.len, bucketStackTrace(bucket, slot_count, slot_index, .alloc), @@ -1014,7 +1014,7 @@ pub fn DebugAllocator(comptime config: Config) type { }); } if (alignment != slot_alignment) { - log.err("Allocation alignment {d} does not match free alignment {d}. Allocation: {} Free: {}", .{ + log.err("Allocation alignment {d} does not match free alignment {d}. Allocation: {f} Free: {f}", .{ slot_alignment.toByteUnits(), alignment.toByteUnits(), bucketStackTrace(bucket, slot_count, slot_index, .alloc), @@ -1054,7 +1054,7 @@ const TraceKind = enum { free, }; -const test_config = Config{}; +const test_config: Config = .{}; test "small allocations - free in same order" { var gpa = DebugAllocator(test_config){}; diff --git a/lib/std/http.zig b/lib/std/http.zig index d5d5583299..5bf12a1876 100644 --- a/lib/std/http.zig +++ b/lib/std/http.zig @@ -1,3 +1,7 @@ +const builtin = @import("builtin"); +const std = @import("std.zig"); +const assert = std.debug.assert; + pub const Client = @import("http/Client.zig"); pub const Server = @import("http/Server.zig"); pub const protocol = @import("http/protocol.zig"); @@ -38,8 +42,8 @@ pub const Method = enum(u64) { return x; } - pub fn write(self: Method, w: anytype) !void { - const bytes = std.mem.asBytes(&@intFromEnum(self)); + pub fn format(self: Method, w: *std.io.Writer) std.io.Writer.Error!void { + const bytes: []const u8 = @ptrCast(&@intFromEnum(self)); const str = std.mem.sliceTo(bytes, 0); try w.writeAll(str); } @@ -77,7 +81,9 @@ pub const Method = enum(u64) { }; } - /// An HTTP method is idempotent if an identical request can be made once or several times in a row with the same effect while leaving the server in the same state. + /// An HTTP method is idempotent if an identical request can be made once + /// or several times in a row with the same effect while leaving the server + /// in the same state. /// /// https://developer.mozilla.org/en-US/docs/Glossary/Idempotent /// @@ -90,7 +96,8 @@ pub const Method = enum(u64) { }; } - /// A cacheable response is an HTTP response that can be cached, that is stored to be retrieved and used later, saving a new request to the server. + /// A cacheable response can be stored to be retrieved and used later, + /// saving a new request to the server. /// /// https://developer.mozilla.org/en-US/docs/Glossary/cacheable /// @@ -282,10 +289,10 @@ pub const Status = enum(u10) { } }; +/// compression is intentionally omitted here since it is handled in `ContentEncoding`. pub const TransferEncoding = enum { chunked, none, - // compression is intentionally omitted here, as std.http.Client stores it as content-encoding }; pub const ContentEncoding = enum { @@ -308,9 +315,6 @@ pub const Header = struct { value: []const u8, }; -const builtin = @import("builtin"); -const std = @import("std.zig"); - test { if (builtin.os.tag != .wasi) { _ = Client; diff --git a/lib/std/http/Client.zig b/lib/std/http/Client.zig index d36cd10aee..838411bebc 100644 --- a/lib/std/http/Client.zig +++ b/lib/std/http/Client.zig @@ -311,7 +311,7 @@ pub const Connection = struct { EndOfStream, }; - pub const Reader = std.io.Reader(*Connection, ReadError, read); + pub const Reader = std.io.GenericReader(*Connection, ReadError, read); pub fn reader(conn: *Connection) Reader { return Reader{ .context = conn }; @@ -374,7 +374,7 @@ pub const Connection = struct { UnexpectedWriteFailure, }; - pub const Writer = std.io.Writer(*Connection, WriteError, write); + pub const Writer = std.io.GenericWriter(*Connection, WriteError, write); pub fn writer(conn: *Connection) Writer { return Writer{ .context = conn }; @@ -823,21 +823,28 @@ pub const Request = struct { return error.UnsupportedTransferEncoding; const connection = req.connection.?; - const w = connection.writer(); + var connection_writer_adapter = connection.writer().adaptToNewApi(); + const w = &connection_writer_adapter.new_interface; + sendAdapted(req, connection, w) catch |err| switch (err) { + error.WriteFailed => return connection_writer_adapter.err.?, + else => |e| return e, + }; + } - try req.method.write(w); + fn sendAdapted(req: *Request, connection: *Connection, w: *std.io.Writer) !void { + try req.method.format(w); try w.writeByte(' '); if (req.method == .CONNECT) { - try req.uri.writeToStream(.{ .authority = true }, w); + try req.uri.writeToStream(w, .{ .authority = true }); } else { - try req.uri.writeToStream(.{ + try req.uri.writeToStream(w, .{ .scheme = connection.proxied, .authentication = connection.proxied, .authority = connection.proxied, .path = true, .query = true, - }, w); + }); } try w.writeByte(' '); try w.writeAll(@tagName(req.version)); @@ -845,7 +852,7 @@ pub const Request = struct { if (try emitOverridableHeader("host: ", req.headers.host, w)) { try w.writeAll("host: "); - try req.uri.writeToStream(.{ .authority = true }, w); + try req.uri.writeToStream(w, .{ .authority = true }); try w.writeAll("\r\n"); } @@ -934,7 +941,7 @@ pub const Request = struct { const TransferReadError = Connection.ReadError || proto.HeadersParser.ReadError; - const TransferReader = std.io.Reader(*Request, TransferReadError, transferRead); + const TransferReader = std.io.GenericReader(*Request, TransferReadError, transferRead); fn transferReader(req: *Request) TransferReader { return .{ .context = req }; @@ -1094,7 +1101,7 @@ pub const Request = struct { pub const ReadError = TransferReadError || proto.HeadersParser.CheckCompleteHeadError || error{ DecompressionFailure, InvalidTrailers }; - pub const Reader = std.io.Reader(*Request, ReadError, read); + pub const Reader = std.io.GenericReader(*Request, ReadError, read); pub fn reader(req: *Request) Reader { return .{ .context = req }; @@ -1134,7 +1141,7 @@ pub const Request = struct { pub const WriteError = Connection.WriteError || error{ NotWriteable, MessageTooLong }; - pub const Writer = std.io.Writer(*Request, WriteError, write); + pub const Writer = std.io.GenericWriter(*Request, WriteError, write); pub fn writer(req: *Request) Writer { return .{ .context = req }; @@ -1283,26 +1290,32 @@ pub const basic_authorization = struct { } pub fn valueLengthFromUri(uri: Uri) usize { - var stream = std.io.countingWriter(std.io.null_writer); - try stream.writer().print("{user}", .{uri.user orelse Uri.Component.empty}); - const user_len = stream.bytes_written; - stream.bytes_written = 0; - try stream.writer().print("{password}", .{uri.password orelse Uri.Component.empty}); - const password_len = stream.bytes_written; + const user: Uri.Component = uri.user orelse .empty; + const password: Uri.Component = uri.password orelse .empty; + + var dw: std.io.Writer.Discarding = .init(&.{}); + user.formatUser(&dw.writer) catch unreachable; // discarding + const user_len = dw.count + dw.writer.end; + + dw.count = 0; + dw.writer.end = 0; + password.formatPassword(&dw.writer) catch unreachable; // discarding + const password_len = dw.count + dw.writer.end; + return valueLength(@intCast(user_len), @intCast(password_len)); } pub fn value(uri: Uri, out: []u8) []u8 { + const user: Uri.Component = uri.user orelse .empty; + const password: Uri.Component = uri.password orelse .empty; + var buf: [max_user_len + ":".len + max_password_len]u8 = undefined; - var stream = std.io.fixedBufferStream(&buf); - stream.writer().print("{user}", .{uri.user orelse Uri.Component.empty}) catch - unreachable; - assert(stream.pos <= max_user_len); - stream.writer().print(":{password}", .{uri.password orelse Uri.Component.empty}) catch - unreachable; + var w: std.io.Writer = .fixed(&buf); + user.formatUser(&w) catch unreachable; // fixed + password.formatPassword(&w) catch unreachable; // fixed @memcpy(out[0..prefix.len], prefix); - const base64 = std.base64.standard.Encoder.encode(out[prefix.len..], stream.getWritten()); + const base64 = std.base64.standard.Encoder.encode(out[prefix.len..], w.buffered()); return out[0 .. prefix.len + base64.len]; } }; diff --git a/lib/std/http/protocol.zig b/lib/std/http/protocol.zig index fc00a68ec3..797ed989ad 100644 --- a/lib/std/http/protocol.zig +++ b/lib/std/http/protocol.zig @@ -344,7 +344,7 @@ const MockBufferedConnection = struct { } pub const ReadError = std.io.FixedBufferStream([]const u8).ReadError || error{EndOfStream}; - pub const Reader = std.io.Reader(*MockBufferedConnection, ReadError, read); + pub const Reader = std.io.GenericReader(*MockBufferedConnection, ReadError, read); pub fn reader(conn: *MockBufferedConnection) Reader { return Reader{ .context = conn }; @@ -359,7 +359,7 @@ const MockBufferedConnection = struct { } pub const WriteError = std.io.FixedBufferStream([]const u8).WriteError; - pub const Writer = std.io.Writer(*MockBufferedConnection, WriteError, write); + pub const Writer = std.io.GenericWriter(*MockBufferedConnection, WriteError, write); pub fn writer(conn: *MockBufferedConnection) Writer { return Writer{ .context = conn }; diff --git a/lib/std/http/test.zig b/lib/std/http/test.zig index dc944fbabb..fdbed8eaf8 100644 --- a/lib/std/http/test.zig +++ b/lib/std/http/test.zig @@ -385,10 +385,8 @@ test "general client/server API coverage" { fn handleRequest(request: *http.Server.Request, listen_port: u16) !void { const log = std.log.scoped(.server); - log.info("{} {s} {s}", .{ - request.head.method, - @tagName(request.head.version), - request.head.target, + log.info("{f} {s} {s}", .{ + request.head.method, @tagName(request.head.version), request.head.target, }); const gpa = std.testing.allocator; diff --git a/lib/std/io.zig b/lib/std/io.zig index 597b8d5ec1..5339318cd1 100644 --- a/lib/std/io.zig +++ b/lib/std/io.zig @@ -14,54 +14,80 @@ const File = std.fs.File; const Allocator = std.mem.Allocator; const Alignment = std.mem.Alignment; -fn getStdOutHandle() posix.fd_t { - if (is_windows) { - return windows.peb().ProcessParameters.hStdOutput; +pub const Limit = enum(usize) { + nothing = 0, + unlimited = std.math.maxInt(usize), + _, + + /// `std.math.maxInt(usize)` is interpreted to mean `.unlimited`. + pub fn limited(n: usize) Limit { + return @enumFromInt(n); } - if (@hasDecl(root, "os") and @hasDecl(root.os, "io") and @hasDecl(root.os.io, "getStdOutHandle")) { - return root.os.io.getStdOutHandle(); + /// Any value grater than `std.math.maxInt(usize)` is interpreted to mean + /// `.unlimited`. + pub fn limited64(n: u64) Limit { + return @enumFromInt(@min(n, std.math.maxInt(usize))); } - return posix.STDOUT_FILENO; -} - -pub fn getStdOut() File { - return .{ .handle = getStdOutHandle() }; -} - -fn getStdErrHandle() posix.fd_t { - if (is_windows) { - return windows.peb().ProcessParameters.hStdError; + pub fn countVec(data: []const []const u8) Limit { + var total: usize = 0; + for (data) |d| total += d.len; + return .limited(total); } - if (@hasDecl(root, "os") and @hasDecl(root.os, "io") and @hasDecl(root.os.io, "getStdErrHandle")) { - return root.os.io.getStdErrHandle(); + pub fn min(a: Limit, b: Limit) Limit { + return @enumFromInt(@min(@intFromEnum(a), @intFromEnum(b))); } - return posix.STDERR_FILENO; -} - -pub fn getStdErr() File { - return .{ .handle = getStdErrHandle() }; -} - -fn getStdInHandle() posix.fd_t { - if (is_windows) { - return windows.peb().ProcessParameters.hStdInput; + pub fn minInt(l: Limit, n: usize) usize { + return @min(n, @intFromEnum(l)); } - if (@hasDecl(root, "os") and @hasDecl(root.os, "io") and @hasDecl(root.os.io, "getStdInHandle")) { - return root.os.io.getStdInHandle(); + pub fn minInt64(l: Limit, n: u64) usize { + return @min(n, @intFromEnum(l)); } - return posix.STDIN_FILENO; -} + pub fn slice(l: Limit, s: []u8) []u8 { + return s[0..l.minInt(s.len)]; + } -pub fn getStdIn() File { - return .{ .handle = getStdInHandle() }; -} + pub fn sliceConst(l: Limit, s: []const u8) []const u8 { + return s[0..l.minInt(s.len)]; + } + pub fn toInt(l: Limit) ?usize { + return switch (l) { + else => @intFromEnum(l), + .unlimited => null, + }; + } + + /// Reduces a slice to account for the limit, leaving room for one extra + /// byte above the limit, allowing for the use case of differentiating + /// between end-of-stream and reaching the limit. + pub fn slice1(l: Limit, non_empty_buffer: []u8) []u8 { + assert(non_empty_buffer.len >= 1); + return non_empty_buffer[0..@min(@intFromEnum(l) +| 1, non_empty_buffer.len)]; + } + + pub fn nonzero(l: Limit) bool { + return @intFromEnum(l) > 0; + } + + /// Return a new limit reduced by `amount` or return `null` indicating + /// limit would be exceeded. + pub fn subtract(l: Limit, amount: usize) ?Limit { + if (l == .unlimited) return .unlimited; + if (amount > @intFromEnum(l)) return null; + return @enumFromInt(@intFromEnum(l) - amount); + } +}; + +pub const Reader = @import("io/Reader.zig"); +pub const Writer = @import("io/Writer.zig"); + +/// Deprecated in favor of `Reader`. pub fn GenericReader( comptime Context: type, comptime ReadError: type, @@ -289,6 +315,7 @@ pub fn GenericReader( }; } +/// Deprecated in favor of `Writer`. pub fn GenericWriter( comptime Context: type, comptime WriteError: type, @@ -347,18 +374,39 @@ pub fn GenericWriter( const ptr: *const Context = @alignCast(@ptrCast(context)); return writeFn(ptr.*, bytes); } + + /// Helper for bridging to the new `Writer` API while upgrading. + pub fn adaptToNewApi(self: *const Self) Adapter { + return .{ + .derp_writer = self.*, + .new_interface = .{ + .buffer = &.{}, + .vtable = &.{ .drain = Adapter.drain }, + }, + }; + } + + pub const Adapter = struct { + derp_writer: Self, + new_interface: Writer, + err: ?Error = null, + + fn drain(w: *Writer, data: []const []const u8, splat: usize) Writer.Error!usize { + _ = splat; + const a: *@This() = @fieldParentPtr("new_interface", w); + return a.derp_writer.write(data[0]) catch |err| { + a.err = err; + return error.WriteFailed; + }; + } + }; }; } -/// Deprecated; consider switching to `AnyReader` or use `GenericReader` -/// to use previous API. -pub const Reader = GenericReader; -/// Deprecated; consider switching to `AnyWriter` or use `GenericWriter` -/// to use previous API. -pub const Writer = GenericWriter; - -pub const AnyReader = @import("io/Reader.zig"); -pub const AnyWriter = @import("io/Writer.zig"); +/// Deprecated in favor of `Reader`. +pub const AnyReader = @import("io/DeprecatedReader.zig"); +/// Deprecated in favor of `Writer`. +pub const AnyWriter = @import("io/DeprecatedWriter.zig"); pub const SeekableStream = @import("io/seekable_stream.zig").SeekableStream; @@ -407,7 +455,7 @@ pub const tty = @import("io/tty.zig"); /// A Writer that doesn't write to anything. pub const null_writer: NullWriter = .{ .context = {} }; -pub const NullWriter = Writer(void, error{}, dummyWrite); +pub const NullWriter = GenericWriter(void, error{}, dummyWrite); fn dummyWrite(context: void, data: []const u8) error{}!usize { _ = context; return data.len; @@ -819,8 +867,8 @@ pub fn PollFiles(comptime StreamEnum: type) type { } test { - _ = AnyReader; - _ = AnyWriter; + _ = Reader; + _ = Writer; _ = @import("io/bit_reader.zig"); _ = @import("io/bit_writer.zig"); _ = @import("io/buffered_atomic_file.zig"); diff --git a/lib/std/io/DeprecatedReader.zig b/lib/std/io/DeprecatedReader.zig new file mode 100644 index 0000000000..3f2429c3ae --- /dev/null +++ b/lib/std/io/DeprecatedReader.zig @@ -0,0 +1,386 @@ +context: *const anyopaque, +readFn: *const fn (context: *const anyopaque, buffer: []u8) anyerror!usize, + +pub const Error = anyerror; + +/// Returns the number of bytes read. It may be less than buffer.len. +/// If the number of bytes read is 0, it means end of stream. +/// End of stream is not an error condition. +pub fn read(self: Self, buffer: []u8) anyerror!usize { + return self.readFn(self.context, buffer); +} + +/// Returns the number of bytes read. If the number read is smaller than `buffer.len`, it +/// means the stream reached the end. Reaching the end of a stream is not an error +/// condition. +pub fn readAll(self: Self, buffer: []u8) anyerror!usize { + return readAtLeast(self, buffer, buffer.len); +} + +/// Returns the number of bytes read, calling the underlying read +/// function the minimal number of times until the buffer has at least +/// `len` bytes filled. If the number read is less than `len` it means +/// the stream reached the end. Reaching the end of the stream is not +/// an error condition. +pub fn readAtLeast(self: Self, buffer: []u8, len: usize) anyerror!usize { + assert(len <= buffer.len); + var index: usize = 0; + while (index < len) { + const amt = try self.read(buffer[index..]); + if (amt == 0) break; + index += amt; + } + return index; +} + +/// If the number read would be smaller than `buf.len`, `error.EndOfStream` is returned instead. +pub fn readNoEof(self: Self, buf: []u8) anyerror!void { + const amt_read = try self.readAll(buf); + if (amt_read < buf.len) return error.EndOfStream; +} + +/// Appends to the `std.ArrayList` contents by reading from the stream +/// until end of stream is found. +/// If the number of bytes appended would exceed `max_append_size`, +/// `error.StreamTooLong` is returned +/// and the `std.ArrayList` has exactly `max_append_size` bytes appended. +pub fn readAllArrayList( + self: Self, + array_list: *std.ArrayList(u8), + max_append_size: usize, +) anyerror!void { + return self.readAllArrayListAligned(null, array_list, max_append_size); +} + +pub fn readAllArrayListAligned( + self: Self, + comptime alignment: ?Alignment, + array_list: *std.ArrayListAligned(u8, alignment), + max_append_size: usize, +) anyerror!void { + try array_list.ensureTotalCapacity(@min(max_append_size, 4096)); + const original_len = array_list.items.len; + var start_index: usize = original_len; + while (true) { + array_list.expandToCapacity(); + const dest_slice = array_list.items[start_index..]; + const bytes_read = try self.readAll(dest_slice); + start_index += bytes_read; + + if (start_index - original_len > max_append_size) { + array_list.shrinkAndFree(original_len + max_append_size); + return error.StreamTooLong; + } + + if (bytes_read != dest_slice.len) { + array_list.shrinkAndFree(start_index); + return; + } + + // This will trigger ArrayList to expand superlinearly at whatever its growth rate is. + try array_list.ensureTotalCapacity(start_index + 1); + } +} + +/// Allocates enough memory to hold all the contents of the stream. If the allocated +/// memory would be greater than `max_size`, returns `error.StreamTooLong`. +/// Caller owns returned memory. +/// If this function returns an error, the contents from the stream read so far are lost. +pub fn readAllAlloc(self: Self, allocator: mem.Allocator, max_size: usize) anyerror![]u8 { + var array_list = std.ArrayList(u8).init(allocator); + defer array_list.deinit(); + try self.readAllArrayList(&array_list, max_size); + return try array_list.toOwnedSlice(); +} + +/// Deprecated: use `streamUntilDelimiter` with ArrayList's writer instead. +/// Replaces the `std.ArrayList` contents by reading from the stream until `delimiter` is found. +/// Does not include the delimiter in the result. +/// If the `std.ArrayList` length would exceed `max_size`, `error.StreamTooLong` is returned and the +/// `std.ArrayList` is populated with `max_size` bytes from the stream. +pub fn readUntilDelimiterArrayList( + self: Self, + array_list: *std.ArrayList(u8), + delimiter: u8, + max_size: usize, +) anyerror!void { + array_list.shrinkRetainingCapacity(0); + try self.streamUntilDelimiter(array_list.writer(), delimiter, max_size); +} + +/// Deprecated: use `streamUntilDelimiter` with ArrayList's writer instead. +/// Allocates enough memory to read until `delimiter`. If the allocated +/// memory would be greater than `max_size`, returns `error.StreamTooLong`. +/// Caller owns returned memory. +/// If this function returns an error, the contents from the stream read so far are lost. +pub fn readUntilDelimiterAlloc( + self: Self, + allocator: mem.Allocator, + delimiter: u8, + max_size: usize, +) anyerror![]u8 { + var array_list = std.ArrayList(u8).init(allocator); + defer array_list.deinit(); + try self.streamUntilDelimiter(array_list.writer(), delimiter, max_size); + return try array_list.toOwnedSlice(); +} + +/// Deprecated: use `streamUntilDelimiter` with FixedBufferStream's writer instead. +/// Reads from the stream until specified byte is found. If the buffer is not +/// large enough to hold the entire contents, `error.StreamTooLong` is returned. +/// If end-of-stream is found, `error.EndOfStream` is returned. +/// Returns a slice of the stream data, with ptr equal to `buf.ptr`. The +/// delimiter byte is written to the output buffer but is not included +/// in the returned slice. +pub fn readUntilDelimiter(self: Self, buf: []u8, delimiter: u8) anyerror![]u8 { + var fbs = std.io.fixedBufferStream(buf); + try self.streamUntilDelimiter(fbs.writer(), delimiter, fbs.buffer.len); + const output = fbs.getWritten(); + buf[output.len] = delimiter; // emulating old behaviour + return output; +} + +/// Deprecated: use `streamUntilDelimiter` with ArrayList's (or any other's) writer instead. +/// Allocates enough memory to read until `delimiter` or end-of-stream. +/// If the allocated memory would be greater than `max_size`, returns +/// `error.StreamTooLong`. If end-of-stream is found, returns the rest +/// of the stream. If this function is called again after that, returns +/// null. +/// Caller owns returned memory. +/// If this function returns an error, the contents from the stream read so far are lost. +pub fn readUntilDelimiterOrEofAlloc( + self: Self, + allocator: mem.Allocator, + delimiter: u8, + max_size: usize, +) anyerror!?[]u8 { + var array_list = std.ArrayList(u8).init(allocator); + defer array_list.deinit(); + self.streamUntilDelimiter(array_list.writer(), delimiter, max_size) catch |err| switch (err) { + error.EndOfStream => if (array_list.items.len == 0) { + return null; + }, + else => |e| return e, + }; + return try array_list.toOwnedSlice(); +} + +/// Deprecated: use `streamUntilDelimiter` with FixedBufferStream's writer instead. +/// Reads from the stream until specified byte is found. If the buffer is not +/// large enough to hold the entire contents, `error.StreamTooLong` is returned. +/// If end-of-stream is found, returns the rest of the stream. If this +/// function is called again after that, returns null. +/// Returns a slice of the stream data, with ptr equal to `buf.ptr`. The +/// delimiter byte is written to the output buffer but is not included +/// in the returned slice. +pub fn readUntilDelimiterOrEof(self: Self, buf: []u8, delimiter: u8) anyerror!?[]u8 { + var fbs = std.io.fixedBufferStream(buf); + self.streamUntilDelimiter(fbs.writer(), delimiter, fbs.buffer.len) catch |err| switch (err) { + error.EndOfStream => if (fbs.getWritten().len == 0) { + return null; + }, + + else => |e| return e, + }; + const output = fbs.getWritten(); + buf[output.len] = delimiter; // emulating old behaviour + return output; +} + +/// Appends to the `writer` contents by reading from the stream until `delimiter` is found. +/// Does not write the delimiter itself. +/// If `optional_max_size` is not null and amount of written bytes exceeds `optional_max_size`, +/// returns `error.StreamTooLong` and finishes appending. +/// If `optional_max_size` is null, appending is unbounded. +pub fn streamUntilDelimiter( + self: Self, + writer: anytype, + delimiter: u8, + optional_max_size: ?usize, +) anyerror!void { + if (optional_max_size) |max_size| { + for (0..max_size) |_| { + const byte: u8 = try self.readByte(); + if (byte == delimiter) return; + try writer.writeByte(byte); + } + return error.StreamTooLong; + } else { + while (true) { + const byte: u8 = try self.readByte(); + if (byte == delimiter) return; + try writer.writeByte(byte); + } + // Can not throw `error.StreamTooLong` since there are no boundary. + } +} + +/// Reads from the stream until specified byte is found, discarding all data, +/// including the delimiter. +/// If end-of-stream is found, this function succeeds. +pub fn skipUntilDelimiterOrEof(self: Self, delimiter: u8) anyerror!void { + while (true) { + const byte = self.readByte() catch |err| switch (err) { + error.EndOfStream => return, + else => |e| return e, + }; + if (byte == delimiter) return; + } +} + +/// Reads 1 byte from the stream or returns `error.EndOfStream`. +pub fn readByte(self: Self) anyerror!u8 { + var result: [1]u8 = undefined; + const amt_read = try self.read(result[0..]); + if (amt_read < 1) return error.EndOfStream; + return result[0]; +} + +/// Same as `readByte` except the returned byte is signed. +pub fn readByteSigned(self: Self) anyerror!i8 { + return @as(i8, @bitCast(try self.readByte())); +} + +/// Reads exactly `num_bytes` bytes and returns as an array. +/// `num_bytes` must be comptime-known +pub fn readBytesNoEof(self: Self, comptime num_bytes: usize) anyerror![num_bytes]u8 { + var bytes: [num_bytes]u8 = undefined; + try self.readNoEof(&bytes); + return bytes; +} + +/// Reads bytes until `bounded.len` is equal to `num_bytes`, +/// or the stream ends. +/// +/// * it is assumed that `num_bytes` will not exceed `bounded.capacity()` +pub fn readIntoBoundedBytes( + self: Self, + comptime num_bytes: usize, + bounded: *std.BoundedArray(u8, num_bytes), +) anyerror!void { + while (bounded.len < num_bytes) { + // get at most the number of bytes free in the bounded array + const bytes_read = try self.read(bounded.unusedCapacitySlice()); + if (bytes_read == 0) return; + + // bytes_read will never be larger than @TypeOf(bounded.len) + // due to `self.read` being bounded by `bounded.unusedCapacitySlice()` + bounded.len += @as(@TypeOf(bounded.len), @intCast(bytes_read)); + } +} + +/// Reads at most `num_bytes` and returns as a bounded array. +pub fn readBoundedBytes(self: Self, comptime num_bytes: usize) anyerror!std.BoundedArray(u8, num_bytes) { + var result = std.BoundedArray(u8, num_bytes){}; + try self.readIntoBoundedBytes(num_bytes, &result); + return result; +} + +pub inline fn readInt(self: Self, comptime T: type, endian: std.builtin.Endian) anyerror!T { + const bytes = try self.readBytesNoEof(@divExact(@typeInfo(T).int.bits, 8)); + return mem.readInt(T, &bytes, endian); +} + +pub fn readVarInt( + self: Self, + comptime ReturnType: type, + endian: std.builtin.Endian, + size: usize, +) anyerror!ReturnType { + assert(size <= @sizeOf(ReturnType)); + var bytes_buf: [@sizeOf(ReturnType)]u8 = undefined; + const bytes = bytes_buf[0..size]; + try self.readNoEof(bytes); + return mem.readVarInt(ReturnType, bytes, endian); +} + +/// Optional parameters for `skipBytes` +pub const SkipBytesOptions = struct { + buf_size: usize = 512, +}; + +// `num_bytes` is a `u64` to match `off_t` +/// Reads `num_bytes` bytes from the stream and discards them +pub fn skipBytes(self: Self, num_bytes: u64, comptime options: SkipBytesOptions) anyerror!void { + var buf: [options.buf_size]u8 = undefined; + var remaining = num_bytes; + + while (remaining > 0) { + const amt = @min(remaining, options.buf_size); + try self.readNoEof(buf[0..amt]); + remaining -= amt; + } +} + +/// Reads `slice.len` bytes from the stream and returns if they are the same as the passed slice +pub fn isBytes(self: Self, slice: []const u8) anyerror!bool { + var i: usize = 0; + var matches = true; + while (i < slice.len) : (i += 1) { + if (slice[i] != try self.readByte()) { + matches = false; + } + } + return matches; +} + +pub fn readStruct(self: Self, comptime T: type) anyerror!T { + // Only extern and packed structs have defined in-memory layout. + comptime assert(@typeInfo(T).@"struct".layout != .auto); + var res: [1]T = undefined; + try self.readNoEof(mem.sliceAsBytes(res[0..])); + return res[0]; +} + +pub fn readStructEndian(self: Self, comptime T: type, endian: std.builtin.Endian) anyerror!T { + var res = try self.readStruct(T); + if (native_endian != endian) { + mem.byteSwapAllFields(T, &res); + } + return res; +} + +/// Reads an integer with the same size as the given enum's tag type. If the integer matches +/// an enum tag, casts the integer to the enum tag and returns it. Otherwise, returns an `error.InvalidValue`. +/// TODO optimization taking advantage of most fields being in order +pub fn readEnum(self: Self, comptime Enum: type, endian: std.builtin.Endian) anyerror!Enum { + const E = error{ + /// An integer was read, but it did not match any of the tags in the supplied enum. + InvalidValue, + }; + const type_info = @typeInfo(Enum).@"enum"; + const tag = try self.readInt(type_info.tag_type, endian); + + inline for (std.meta.fields(Enum)) |field| { + if (tag == field.value) { + return @field(Enum, field.name); + } + } + + return E.InvalidValue; +} + +/// Reads the stream until the end, ignoring all the data. +/// Returns the number of bytes discarded. +pub fn discard(self: Self) anyerror!u64 { + var trash: [4096]u8 = undefined; + var index: u64 = 0; + while (true) { + const n = try self.read(&trash); + if (n == 0) return index; + index += n; + } +} + +const std = @import("../std.zig"); +const Self = @This(); +const math = std.math; +const assert = std.debug.assert; +const mem = std.mem; +const testing = std.testing; +const native_endian = @import("builtin").target.cpu.arch.endian(); +const Alignment = std.mem.Alignment; + +test { + _ = @import("Reader/test.zig"); +} diff --git a/lib/std/io/DeprecatedWriter.zig b/lib/std/io/DeprecatedWriter.zig new file mode 100644 index 0000000000..391b985357 --- /dev/null +++ b/lib/std/io/DeprecatedWriter.zig @@ -0,0 +1,109 @@ +const std = @import("../std.zig"); +const assert = std.debug.assert; +const mem = std.mem; +const native_endian = @import("builtin").target.cpu.arch.endian(); + +context: *const anyopaque, +writeFn: *const fn (context: *const anyopaque, bytes: []const u8) anyerror!usize, + +const Self = @This(); +pub const Error = anyerror; + +pub fn write(self: Self, bytes: []const u8) anyerror!usize { + return self.writeFn(self.context, bytes); +} + +pub fn writeAll(self: Self, bytes: []const u8) anyerror!void { + var index: usize = 0; + while (index != bytes.len) { + index += try self.write(bytes[index..]); + } +} + +pub fn print(self: Self, comptime format: []const u8, args: anytype) anyerror!void { + return std.fmt.format(self, format, args); +} + +pub fn writeByte(self: Self, byte: u8) anyerror!void { + const array = [1]u8{byte}; + return self.writeAll(&array); +} + +pub fn writeByteNTimes(self: Self, byte: u8, n: usize) anyerror!void { + var bytes: [256]u8 = undefined; + @memset(bytes[0..], byte); + + var remaining: usize = n; + while (remaining > 0) { + const to_write = @min(remaining, bytes.len); + try self.writeAll(bytes[0..to_write]); + remaining -= to_write; + } +} + +pub fn writeBytesNTimes(self: Self, bytes: []const u8, n: usize) anyerror!void { + var i: usize = 0; + while (i < n) : (i += 1) { + try self.writeAll(bytes); + } +} + +pub inline fn writeInt(self: Self, comptime T: type, value: T, endian: std.builtin.Endian) anyerror!void { + var bytes: [@divExact(@typeInfo(T).int.bits, 8)]u8 = undefined; + mem.writeInt(std.math.ByteAlignedInt(@TypeOf(value)), &bytes, value, endian); + return self.writeAll(&bytes); +} + +pub fn writeStruct(self: Self, value: anytype) anyerror!void { + // Only extern and packed structs have defined in-memory layout. + comptime assert(@typeInfo(@TypeOf(value)).@"struct".layout != .auto); + return self.writeAll(mem.asBytes(&value)); +} + +pub fn writeStructEndian(self: Self, value: anytype, endian: std.builtin.Endian) anyerror!void { + // TODO: make sure this value is not a reference type + if (native_endian == endian) { + return self.writeStruct(value); + } else { + var copy = value; + mem.byteSwapAllFields(@TypeOf(value), ©); + return self.writeStruct(copy); + } +} + +pub fn writeFile(self: Self, file: std.fs.File) anyerror!void { + // TODO: figure out how to adjust std lib abstractions so that this ends up + // doing sendfile or maybe even copy_file_range under the right conditions. + var buf: [4000]u8 = undefined; + while (true) { + const n = try file.readAll(&buf); + try self.writeAll(buf[0..n]); + if (n < buf.len) return; + } +} + +/// Helper for bridging to the new `Writer` API while upgrading. +pub fn adaptToNewApi(self: *const Self) Adapter { + return .{ + .derp_writer = self.*, + .new_interface = .{ + .buffer = &.{}, + .vtable = &.{ .drain = Adapter.drain }, + }, + }; +} + +pub const Adapter = struct { + derp_writer: Self, + new_interface: std.io.Writer, + err: ?Error = null, + + fn drain(w: *std.io.Writer, data: []const []const u8, splat: usize) std.io.Writer.Error!usize { + _ = splat; + const a: *@This() = @fieldParentPtr("new_interface", w); + return a.derp_writer.write(data[0]) catch |err| { + a.err = err; + return error.WriteFailed; + }; + } +}; diff --git a/lib/std/io/Reader.zig b/lib/std/io/Reader.zig index 3f2429c3ae..c2f0b25017 100644 --- a/lib/std/io/Reader.zig +++ b/lib/std/io/Reader.zig @@ -1,386 +1,1731 @@ -context: *const anyopaque, -readFn: *const fn (context: *const anyopaque, buffer: []u8) anyerror!usize, +const Reader = @This(); -pub const Error = anyerror; +const builtin = @import("builtin"); +const native_endian = builtin.target.cpu.arch.endian(); -/// Returns the number of bytes read. It may be less than buffer.len. -/// If the number of bytes read is 0, it means end of stream. -/// End of stream is not an error condition. -pub fn read(self: Self, buffer: []u8) anyerror!usize { - return self.readFn(self.context, buffer); +const std = @import("../std.zig"); +const Writer = std.io.Writer; +const assert = std.debug.assert; +const testing = std.testing; +const Allocator = std.mem.Allocator; +const ArrayList = std.ArrayListUnmanaged; +const Limit = std.io.Limit; + +pub const Limited = @import("Reader/Limited.zig"); + +vtable: *const VTable, +buffer: []u8, +/// Number of bytes which have been consumed from `buffer`. +seek: usize, +/// In `buffer` before this are buffered bytes, after this is `undefined`. +end: usize, + +pub const VTable = struct { + /// Writes bytes from the internally tracked logical position to `w`. + /// + /// Returns the number of bytes written, which will be at minimum `0` and + /// at most `limit`. The number returned, including zero, does not indicate + /// end of stream. `limit` is guaranteed to be at least as large as the + /// buffer capacity of `w`, a value whose minimum size is determined by the + /// stream implementation. + /// + /// The reader's internal logical seek position moves forward in accordance + /// with the number of bytes returned from this function. + /// + /// Implementations are encouraged to utilize mandatory minimum buffer + /// sizes combined with short reads (returning a value less than `limit`) + /// in order to minimize complexity. + /// + /// Although this function is usually called when `buffer` is empty, it is + /// also called when it needs to be filled more due to the API user + /// requesting contiguous memory. In either case, the existing buffer data + /// should be ignored; new data written to `w`. + /// + /// In addition to, or instead of writing to `w`, the implementation may + /// choose to store data in `buffer`, modifying `seek` and `end` + /// accordingly. Stream implementations are encouraged to take advantage of + /// this if simplifies the logic. + stream: *const fn (r: *Reader, w: *Writer, limit: Limit) StreamError!usize, + + /// Consumes bytes from the internally tracked stream position without + /// providing access to them. + /// + /// Returns the number of bytes discarded, which will be at minimum `0` and + /// at most `limit`. The number of bytes returned, including zero, does not + /// indicate end of stream. + /// + /// The reader's internal logical seek position moves forward in accordance + /// with the number of bytes returned from this function. + /// + /// Implementations are encouraged to utilize mandatory minimum buffer + /// sizes combined with short reads (returning a value less than `limit`) + /// in order to minimize complexity. + /// + /// The default implementation is is based on calling `stream`, borrowing + /// `buffer` to construct a temporary `Writer` and ignoring the written + /// data. + /// + /// This function is only called when `buffer` is empty. + discard: *const fn (r: *Reader, limit: Limit) Error!usize = defaultDiscard, +}; + +pub const StreamError = error{ + /// See the `Reader` implementation for detailed diagnostics. + ReadFailed, + /// See the `Writer` implementation for detailed diagnostics. + WriteFailed, + /// End of stream indicated from the `Reader`. This error cannot originate + /// from the `Writer`. + EndOfStream, +}; + +pub const Error = error{ + /// See the `Reader` implementation for detailed diagnostics. + ReadFailed, + EndOfStream, +}; + +pub const StreamRemainingError = error{ + /// See the `Reader` implementation for detailed diagnostics. + ReadFailed, + /// See the `Writer` implementation for detailed diagnostics. + WriteFailed, +}; + +pub const ShortError = error{ + /// See the `Reader` implementation for detailed diagnostics. + ReadFailed, +}; + +pub const failing: Reader = .{ + .vtable = &.{ + .read = failingStream, + .discard = failingDiscard, + }, + .buffer = &.{}, + .seek = 0, + .end = 0, +}; + +/// This is generally safe to `@constCast` because it has an empty buffer, so +/// there is not really a way to accidentally attempt mutation of these fields. +const ending_state: Reader = .fixed(&.{}); +pub const ending: *Reader = @constCast(&ending_state); + +pub fn limited(r: *Reader, limit: Limit, buffer: []u8) Limited { + return .init(r, limit, buffer); } -/// Returns the number of bytes read. If the number read is smaller than `buffer.len`, it -/// means the stream reached the end. Reaching the end of a stream is not an error -/// condition. -pub fn readAll(self: Self, buffer: []u8) anyerror!usize { - return readAtLeast(self, buffer, buffer.len); +/// Constructs a `Reader` such that it will read from `buffer` and then end. +pub fn fixed(buffer: []const u8) Reader { + return .{ + .vtable = &.{ + .stream = endingStream, + .discard = endingDiscard, + }, + // This cast is safe because all potential writes to it will instead + // return `error.EndOfStream`. + .buffer = @constCast(buffer), + .end = buffer.len, + .seek = 0, + }; } -/// Returns the number of bytes read, calling the underlying read -/// function the minimal number of times until the buffer has at least -/// `len` bytes filled. If the number read is less than `len` it means -/// the stream reached the end. Reaching the end of the stream is not -/// an error condition. -pub fn readAtLeast(self: Self, buffer: []u8, len: usize) anyerror!usize { - assert(len <= buffer.len); - var index: usize = 0; - while (index < len) { - const amt = try self.read(buffer[index..]); - if (amt == 0) break; - index += amt; +pub fn stream(r: *Reader, w: *Writer, limit: Limit) StreamError!usize { + const buffer = limit.slice(r.buffer[r.seek..r.end]); + if (buffer.len > 0) { + @branchHint(.likely); + const n = try w.write(buffer); + r.seek += n; + return n; } - return index; + const n = try r.vtable.stream(r, w, limit); + assert(n <= @intFromEnum(limit)); + return n; } -/// If the number read would be smaller than `buf.len`, `error.EndOfStream` is returned instead. -pub fn readNoEof(self: Self, buf: []u8) anyerror!void { - const amt_read = try self.readAll(buf); - if (amt_read < buf.len) return error.EndOfStream; +pub fn discard(r: *Reader, limit: Limit) Error!usize { + const buffered_len = r.end - r.seek; + const remaining: Limit = if (limit.toInt()) |n| l: { + if (buffered_len >= n) { + r.seek += n; + return n; + } + break :l .limited(n - buffered_len); + } else .unlimited; + r.seek = 0; + r.end = 0; + const n = try r.vtable.discard(r, remaining); + assert(n <= @intFromEnum(remaining)); + return buffered_len + n; } -/// Appends to the `std.ArrayList` contents by reading from the stream -/// until end of stream is found. -/// If the number of bytes appended would exceed `max_append_size`, -/// `error.StreamTooLong` is returned -/// and the `std.ArrayList` has exactly `max_append_size` bytes appended. -pub fn readAllArrayList( - self: Self, - array_list: *std.ArrayList(u8), - max_append_size: usize, -) anyerror!void { - return self.readAllArrayListAligned(null, array_list, max_append_size); +pub fn defaultDiscard(r: *Reader, limit: Limit) Error!usize { + assert(r.seek == 0); + assert(r.end == 0); + var dw: Writer.Discarding = .init(r.buffer); + const n = r.stream(&dw.writer, limit) catch |err| switch (err) { + error.WriteFailed => unreachable, + error.ReadFailed => return error.ReadFailed, + error.EndOfStream => return error.EndOfStream, + }; + assert(n <= @intFromEnum(limit)); + return n; } -pub fn readAllArrayListAligned( - self: Self, - comptime alignment: ?Alignment, - array_list: *std.ArrayListAligned(u8, alignment), - max_append_size: usize, -) anyerror!void { - try array_list.ensureTotalCapacity(@min(max_append_size, 4096)); - const original_len = array_list.items.len; - var start_index: usize = original_len; +/// "Pump" exactly `n` bytes from the reader to the writer. +pub fn streamExact(r: *Reader, w: *Writer, n: usize) StreamError!void { + var remaining = n; + while (remaining != 0) remaining -= try r.stream(w, .limited(remaining)); +} + +/// "Pump" data from the reader to the writer, handling `error.EndOfStream` as +/// a success case. +/// +/// Returns total number of bytes written to `w`. +pub fn streamRemaining(r: *Reader, w: *Writer) StreamRemainingError!usize { + var offset: usize = 0; while (true) { - array_list.expandToCapacity(); - const dest_slice = array_list.items[start_index..]; - const bytes_read = try self.readAll(dest_slice); - start_index += bytes_read; + offset += r.stream(w, .unlimited) catch |err| switch (err) { + error.EndOfStream => return offset, + else => |e| return e, + }; + } +} - if (start_index - original_len > max_append_size) { - array_list.shrinkAndFree(original_len + max_append_size); +/// Consumes the stream until the end, ignoring all the data, returning the +/// number of bytes discarded. +pub fn discardRemaining(r: *Reader) ShortError!usize { + var offset: usize = r.end - r.seek; + r.seek = 0; + r.end = 0; + while (true) { + offset += r.vtable.discard(r, .unlimited) catch |err| switch (err) { + error.EndOfStream => return offset, + else => |e| return e, + }; + } +} + +pub const LimitedAllocError = Allocator.Error || ShortError || error{StreamTooLong}; + +/// Transfers all bytes from the current position to the end of the stream, up +/// to `limit`, returning them as a caller-owned allocated slice. +/// +/// If `limit` would be exceeded, `error.StreamTooLong` is returned instead. In +/// such case, the next byte that would be read will be the first one to exceed +/// `limit`, and all preceeding bytes have been discarded. +/// +/// Asserts `buffer` has nonzero capacity. +/// +/// See also: +/// * `appendRemaining` +pub fn allocRemaining(r: *Reader, gpa: Allocator, limit: Limit) LimitedAllocError![]u8 { + var buffer: ArrayList(u8) = .empty; + defer buffer.deinit(gpa); + try appendRemaining(r, gpa, null, &buffer, limit); + return buffer.toOwnedSlice(gpa); +} + +/// Transfers all bytes from the current position to the end of the stream, up +/// to `limit`, appending them to `list`. +/// +/// If `limit` would be exceeded, `error.StreamTooLong` is returned instead. In +/// such case, the next byte that would be read will be the first one to exceed +/// `limit`, and all preceeding bytes have been appended to `list`. +/// +/// Asserts `buffer` has nonzero capacity. +/// +/// See also: +/// * `allocRemaining` +pub fn appendRemaining( + r: *Reader, + gpa: Allocator, + comptime alignment: ?std.mem.Alignment, + list: *std.ArrayListAlignedUnmanaged(u8, alignment), + limit: Limit, +) LimitedAllocError!void { + const buffer = r.buffer; + const buffer_contents = buffer[r.seek..r.end]; + const copy_len = limit.minInt(buffer_contents.len); + try list.ensureUnusedCapacity(gpa, copy_len); + @memcpy(list.unusedCapacitySlice()[0..copy_len], buffer[0..copy_len]); + list.items.len += copy_len; + r.seek += copy_len; + if (copy_len == buffer_contents.len) { + r.seek = 0; + r.end = 0; + } + var remaining = limit.subtract(copy_len).?; + while (true) { + try list.ensureUnusedCapacity(gpa, 1); + const dest = remaining.slice(list.unusedCapacitySlice()); + const additional_buffer: []u8 = if (@intFromEnum(remaining) == dest.len) buffer else &.{}; + const n = readVec(r, &.{ dest, additional_buffer }) catch |err| switch (err) { + error.EndOfStream => break, + error.ReadFailed => return error.ReadFailed, + }; + if (n > dest.len) { + r.end = n - dest.len; + list.items.len += dest.len; return error.StreamTooLong; } - - if (bytes_read != dest_slice.len) { - array_list.shrinkAndFree(start_index); - return; - } - - // This will trigger ArrayList to expand superlinearly at whatever its growth rate is. - try array_list.ensureTotalCapacity(start_index + 1); + list.items.len += n; + remaining = remaining.subtract(n).?; } } -/// Allocates enough memory to hold all the contents of the stream. If the allocated -/// memory would be greater than `max_size`, returns `error.StreamTooLong`. -/// Caller owns returned memory. -/// If this function returns an error, the contents from the stream read so far are lost. -pub fn readAllAlloc(self: Self, allocator: mem.Allocator, max_size: usize) anyerror![]u8 { - var array_list = std.ArrayList(u8).init(allocator); - defer array_list.deinit(); - try self.readAllArrayList(&array_list, max_size); - return try array_list.toOwnedSlice(); +/// Writes bytes from the internally tracked stream position to `data`. +/// +/// Returns the number of bytes written, which will be at minimum `0` and +/// at most the sum of each data slice length. The number of bytes read, +/// including zero, does not indicate end of stream. +/// +/// The reader's internal logical seek position moves forward in accordance +/// with the number of bytes returned from this function. +pub fn readVec(r: *Reader, data: []const []u8) Error!usize { + return readVecLimit(r, data, .unlimited); } -/// Deprecated: use `streamUntilDelimiter` with ArrayList's writer instead. -/// Replaces the `std.ArrayList` contents by reading from the stream until `delimiter` is found. -/// Does not include the delimiter in the result. -/// If the `std.ArrayList` length would exceed `max_size`, `error.StreamTooLong` is returned and the -/// `std.ArrayList` is populated with `max_size` bytes from the stream. -pub fn readUntilDelimiterArrayList( - self: Self, - array_list: *std.ArrayList(u8), - delimiter: u8, - max_size: usize, -) anyerror!void { - array_list.shrinkRetainingCapacity(0); - try self.streamUntilDelimiter(array_list.writer(), delimiter, max_size); +/// Equivalent to `readVec` but reads at most `limit` bytes. +/// +/// This ultimately will lower to a call to `stream`, but it must ensure +/// that the buffer used has at least as much capacity, in case that function +/// depends on a minimum buffer capacity. It also ensures that if the `stream` +/// implementation calls `Writer.writableVector`, it will get this data slice +/// along with the buffer at the end. +pub fn readVecLimit(r: *Reader, data: []const []u8, limit: Limit) Error!usize { + comptime assert(@intFromEnum(Limit.unlimited) == std.math.maxInt(usize)); + var remaining = @intFromEnum(limit); + for (data, 0..) |buf, i| { + const buffer_contents = r.buffer[r.seek..r.end]; + const copy_len = @min(buffer_contents.len, buf.len, remaining); + @memcpy(buf[0..copy_len], buffer_contents[0..copy_len]); + r.seek += copy_len; + remaining -= copy_len; + if (remaining == 0) break; + if (buf.len - copy_len == 0) continue; + + // All of `buffer` has been copied to `data`. We now set up a structure + // that enables the `Writer.writableVector` API, while also ensuring + // API that directly operates on the `Writable.buffer` has its minimum + // buffer capacity requirements met. + r.seek = 0; + r.end = 0; + const first = buf[copy_len..]; + const middle = data[i + 1 ..]; + var wrapper: Writer.VectorWrapper = .{ + .it = .{ + .first = first, + .middle = middle, + .last = r.buffer, + }, + .writer = .{ + .buffer = if (first.len >= r.buffer.len) first else r.buffer, + .vtable = Writer.VectorWrapper.vtable, + }, + }; + var n = r.vtable.stream(r, &wrapper.writer, .limited(remaining)) catch |err| switch (err) { + error.WriteFailed => { + assert(!wrapper.used); + if (wrapper.writer.buffer.ptr == first.ptr) { + remaining -= wrapper.writer.end; + } else { + assert(wrapper.writer.end <= r.buffer.len); + r.end = wrapper.writer.end; + } + break; + }, + else => |e| return e, + }; + if (!wrapper.used) { + if (wrapper.writer.buffer.ptr == first.ptr) { + remaining -= n; + } else { + assert(n <= r.buffer.len); + r.end = n; + } + break; + } + if (n < first.len) { + remaining -= n; + break; + } + remaining -= first.len; + n -= first.len; + for (middle) |mid| { + if (n < mid.len) { + remaining -= n; + break; + } + remaining -= mid.len; + n -= mid.len; + } + assert(n <= r.buffer.len); + r.end = n; + break; + } + return @intFromEnum(limit) - remaining; } -/// Deprecated: use `streamUntilDelimiter` with ArrayList's writer instead. -/// Allocates enough memory to read until `delimiter`. If the allocated -/// memory would be greater than `max_size`, returns `error.StreamTooLong`. -/// Caller owns returned memory. -/// If this function returns an error, the contents from the stream read so far are lost. -pub fn readUntilDelimiterAlloc( - self: Self, - allocator: mem.Allocator, - delimiter: u8, - max_size: usize, -) anyerror![]u8 { - var array_list = std.ArrayList(u8).init(allocator); - defer array_list.deinit(); - try self.streamUntilDelimiter(array_list.writer(), delimiter, max_size); - return try array_list.toOwnedSlice(); +pub fn buffered(r: *Reader) []u8 { + return r.buffer[r.seek..r.end]; } -/// Deprecated: use `streamUntilDelimiter` with FixedBufferStream's writer instead. -/// Reads from the stream until specified byte is found. If the buffer is not -/// large enough to hold the entire contents, `error.StreamTooLong` is returned. -/// If end-of-stream is found, `error.EndOfStream` is returned. -/// Returns a slice of the stream data, with ptr equal to `buf.ptr`. The -/// delimiter byte is written to the output buffer but is not included -/// in the returned slice. -pub fn readUntilDelimiter(self: Self, buf: []u8, delimiter: u8) anyerror![]u8 { - var fbs = std.io.fixedBufferStream(buf); - try self.streamUntilDelimiter(fbs.writer(), delimiter, fbs.buffer.len); - const output = fbs.getWritten(); - buf[output.len] = delimiter; // emulating old behaviour - return output; +pub fn bufferedLen(r: *const Reader) usize { + return r.end - r.seek; } -/// Deprecated: use `streamUntilDelimiter` with ArrayList's (or any other's) writer instead. -/// Allocates enough memory to read until `delimiter` or end-of-stream. -/// If the allocated memory would be greater than `max_size`, returns -/// `error.StreamTooLong`. If end-of-stream is found, returns the rest -/// of the stream. If this function is called again after that, returns -/// null. -/// Caller owns returned memory. -/// If this function returns an error, the contents from the stream read so far are lost. -pub fn readUntilDelimiterOrEofAlloc( - self: Self, - allocator: mem.Allocator, - delimiter: u8, - max_size: usize, -) anyerror!?[]u8 { - var array_list = std.ArrayList(u8).init(allocator); - defer array_list.deinit(); - self.streamUntilDelimiter(array_list.writer(), delimiter, max_size) catch |err| switch (err) { - error.EndOfStream => if (array_list.items.len == 0) { - return null; +pub fn hashed(r: *Reader, hasher: anytype) Hashed(@TypeOf(hasher)) { + return .{ .in = r, .hasher = hasher }; +} + +pub fn readVecAll(r: *Reader, data: [][]u8) Error!void { + var index: usize = 0; + var truncate: usize = 0; + while (index < data.len) { + { + const untruncated = data[index]; + data[index] = untruncated[truncate..]; + defer data[index] = untruncated; + truncate += try r.readVec(data[index..]); + } + while (index < data.len and truncate >= data[index].len) { + truncate -= data[index].len; + index += 1; + } + } +} + +/// Returns the next `len` bytes from the stream, filling the buffer as +/// necessary. +/// +/// Invalidates previously returned values from `peek`. +/// +/// Asserts that the `Reader` was initialized with a buffer capacity at +/// least as big as `len`. +/// +/// If there are fewer than `len` bytes left in the stream, `error.EndOfStream` +/// is returned instead. +/// +/// See also: +/// * `peek` +/// * `toss` +pub fn peek(r: *Reader, n: usize) Error![]u8 { + try r.fill(n); + return r.buffer[r.seek..][0..n]; +} + +/// Returns all the next buffered bytes, after filling the buffer to ensure it +/// contains at least `n` bytes. +/// +/// Invalidates previously returned values from `peek` and `peekGreedy`. +/// +/// Asserts that the `Reader` was initialized with a buffer capacity at +/// least as big as `n`. +/// +/// If there are fewer than `n` bytes left in the stream, `error.EndOfStream` +/// is returned instead. +/// +/// See also: +/// * `peek` +/// * `toss` +pub fn peekGreedy(r: *Reader, n: usize) Error![]u8 { + try r.fill(n); + return r.buffer[r.seek..r.end]; +} + +/// Skips the next `n` bytes from the stream, advancing the seek position. This +/// is typically and safely used after `peek`. +/// +/// Asserts that the number of bytes buffered is at least as many as `n`. +/// +/// The "tossed" memory remains alive until a "peek" operation occurs. +/// +/// See also: +/// * `peek`. +/// * `discard`. +pub fn toss(r: *Reader, n: usize) void { + r.seek += n; + assert(r.seek <= r.end); +} + +/// Equivalent to `toss(r.bufferedLen())`. +pub fn tossBuffered(r: *Reader) void { + r.seek = 0; + r.end = 0; +} + +/// Equivalent to `peek` followed by `toss`. +/// +/// The data returned is invalidated by the next call to `take`, `peek`, +/// `fill`, and functions with those prefixes. +pub fn take(r: *Reader, n: usize) Error![]u8 { + const result = try r.peek(n); + r.toss(n); + return result; +} + +/// Returns the next `n` bytes from the stream as an array, filling the buffer +/// as necessary and advancing the seek position `n` bytes. +/// +/// Asserts that the `Reader` was initialized with a buffer capacity at +/// least as big as `n`. +/// +/// If there are fewer than `n` bytes left in the stream, `error.EndOfStream` +/// is returned instead. +/// +/// See also: +/// * `take` +pub fn takeArray(r: *Reader, comptime n: usize) Error!*[n]u8 { + return (try r.take(n))[0..n]; +} + +/// Returns the next `n` bytes from the stream as an array, filling the buffer +/// as necessary, without advancing the seek position. +/// +/// Asserts that the `Reader` was initialized with a buffer capacity at +/// least as big as `n`. +/// +/// If there are fewer than `n` bytes left in the stream, `error.EndOfStream` +/// is returned instead. +/// +/// See also: +/// * `peek` +/// * `takeArray` +pub fn peekArray(r: *Reader, comptime n: usize) Error!*[n]u8 { + return (try r.peek(n))[0..n]; +} + +/// Skips the next `n` bytes from the stream, advancing the seek position. +/// +/// Unlike `toss` which is infallible, in this function `n` can be any amount. +/// +/// Returns `error.EndOfStream` if fewer than `n` bytes could be discarded. +/// +/// See also: +/// * `toss` +/// * `discardRemaining` +/// * `discardShort` +/// * `discard` +pub fn discardAll(r: *Reader, n: usize) Error!void { + if ((try r.discardShort(n)) != n) return error.EndOfStream; +} + +pub fn discardAll64(r: *Reader, n: u64) Error!void { + var remaining: u64 = n; + while (remaining > 0) { + const limited_remaining = std.math.cast(usize, remaining) orelse std.math.maxInt(usize); + try discardAll(r, limited_remaining); + remaining -= limited_remaining; + } +} + +/// Skips the next `n` bytes from the stream, advancing the seek position. +/// +/// Unlike `toss` which is infallible, in this function `n` can be any amount. +/// +/// Returns the number of bytes discarded, which is less than `n` if and only +/// if the stream reached the end. +/// +/// See also: +/// * `discardAll` +/// * `discardRemaining` +/// * `discard` +pub fn discardShort(r: *Reader, n: usize) ShortError!usize { + const proposed_seek = r.seek + n; + if (proposed_seek <= r.end) { + @branchHint(.likely); + r.seek = proposed_seek; + return n; + } + var remaining = n - (r.end - r.seek); + r.end = 0; + r.seek = 0; + while (true) { + const discard_len = r.vtable.discard(r, .limited(remaining)) catch |err| switch (err) { + error.EndOfStream => return n - remaining, + error.ReadFailed => return error.ReadFailed, + }; + remaining -= discard_len; + if (remaining == 0) return n; + } +} + +/// Fill `buffer` with the next `buffer.len` bytes from the stream, advancing +/// the seek position. +/// +/// Invalidates previously returned values from `peek`. +/// +/// If the provided buffer cannot be filled completely, `error.EndOfStream` is +/// returned instead. +/// +/// See also: +/// * `peek` +/// * `readSliceShort` +pub fn readSliceAll(r: *Reader, buffer: []u8) Error!void { + const n = try readSliceShort(r, buffer); + if (n != buffer.len) return error.EndOfStream; +} + +/// Fill `buffer` with the next `buffer.len` bytes from the stream, advancing +/// the seek position. +/// +/// Invalidates previously returned values from `peek`. +/// +/// Returns the number of bytes read, which is less than `buffer.len` if and +/// only if the stream reached the end. +/// +/// See also: +/// * `readSliceAll` +pub fn readSliceShort(r: *Reader, buffer: []u8) ShortError!usize { + const in_buffer = r.buffer[r.seek..r.end]; + const copy_len = @min(buffer.len, in_buffer.len); + @memcpy(buffer[0..copy_len], in_buffer[0..copy_len]); + if (buffer.len - copy_len == 0) { + r.seek += copy_len; + return buffer.len; + } + var i: usize = copy_len; + r.end = 0; + r.seek = 0; + while (true) { + const remaining = buffer[i..]; + var wrapper: Writer.VectorWrapper = .{ + .it = .{ + .first = remaining, + .last = r.buffer, + }, + .writer = .{ + .buffer = if (remaining.len >= r.buffer.len) remaining else r.buffer, + .vtable = Writer.VectorWrapper.vtable, + }, + }; + const n = r.vtable.stream(r, &wrapper.writer, .unlimited) catch |err| switch (err) { + error.WriteFailed => { + if (!wrapper.used) { + assert(r.seek == 0); + r.seek = remaining.len; + r.end = wrapper.writer.end; + @memcpy(remaining, r.buffer[0..remaining.len]); + } + return buffer.len; + }, + error.EndOfStream => return i, + error.ReadFailed => return error.ReadFailed, + }; + if (n < remaining.len) { + i += n; + continue; + } + r.end = n - remaining.len; + return buffer.len; + } +} + +/// Fill `buffer` with the next `buffer.len` bytes from the stream, advancing +/// the seek position. +/// +/// Invalidates previously returned values from `peek`. +/// +/// If the provided buffer cannot be filled completely, `error.EndOfStream` is +/// returned instead. +/// +/// The function is inline to avoid the dead code in case `endian` is +/// comptime-known and matches host endianness. +/// +/// See also: +/// * `readSliceAll` +/// * `readSliceEndianAlloc` +pub inline fn readSliceEndian( + r: *Reader, + comptime Elem: type, + buffer: []Elem, + endian: std.builtin.Endian, +) Error!void { + try readSliceAll(r, @ptrCast(buffer)); + if (native_endian != endian) for (buffer) |*elem| std.mem.byteSwapAllFields(Elem, elem); +} + +pub const ReadAllocError = Error || Allocator.Error; + +/// The function is inline to avoid the dead code in case `endian` is +/// comptime-known and matches host endianness. +pub inline fn readSliceEndianAlloc( + r: *Reader, + allocator: Allocator, + comptime Elem: type, + len: usize, + endian: std.builtin.Endian, +) ReadAllocError![]Elem { + const dest = try allocator.alloc(Elem, len); + errdefer allocator.free(dest); + try readSliceAll(r, @ptrCast(dest)); + if (native_endian != endian) for (dest) |*elem| std.mem.byteSwapAllFields(Elem, elem); + return dest; +} + +/// Shortcut for calling `readSliceAll` with a buffer provided by `allocator`. +pub fn readAlloc(r: *Reader, allocator: Allocator, len: usize) ReadAllocError![]u8 { + const dest = try allocator.alloc(u8, len); + errdefer allocator.free(dest); + try readSliceAll(r, dest); + return dest; +} + +pub const DelimiterError = error{ + /// See the `Reader` implementation for detailed diagnostics. + ReadFailed, + /// For "inclusive" functions, stream ended before the delimiter was found. + /// For "exclusive" functions, stream ended and there are no more bytes to + /// return. + EndOfStream, + /// The delimiter was not found within a number of bytes matching the + /// capacity of the `Reader`. + StreamTooLong, +}; + +/// Returns a slice of the next bytes of buffered data from the stream until +/// `sentinel` is found, advancing the seek position. +/// +/// Returned slice has a sentinel. +/// +/// Invalidates previously returned values from `peek`. +/// +/// See also: +/// * `peekSentinel` +/// * `takeDelimiterExclusive` +/// * `takeDelimiterInclusive` +pub fn takeSentinel(r: *Reader, comptime sentinel: u8) DelimiterError![:sentinel]u8 { + const result = try r.peekSentinel(sentinel); + r.toss(result.len + 1); + return result; +} + +/// Returns a slice of the next bytes of buffered data from the stream until +/// `sentinel` is found, without advancing the seek position. +/// +/// Returned slice has a sentinel; end of stream does not count as a delimiter. +/// +/// Invalidates previously returned values from `peek`. +/// +/// See also: +/// * `takeSentinel` +/// * `peekDelimiterExclusive` +/// * `peekDelimiterInclusive` +pub fn peekSentinel(r: *Reader, comptime sentinel: u8) DelimiterError![:sentinel]u8 { + const result = try r.peekDelimiterInclusive(sentinel); + return result[0 .. result.len - 1 :sentinel]; +} + +/// Returns a slice of the next bytes of buffered data from the stream until +/// `delimiter` is found, advancing the seek position. +/// +/// Returned slice includes the delimiter as the last byte. +/// +/// Invalidates previously returned values from `peek`. +/// +/// See also: +/// * `takeSentinel` +/// * `takeDelimiterExclusive` +/// * `peekDelimiterInclusive` +pub fn takeDelimiterInclusive(r: *Reader, delimiter: u8) DelimiterError![]u8 { + const result = try r.peekDelimiterInclusive(delimiter); + r.toss(result.len); + return result; +} + +/// Returns a slice of the next bytes of buffered data from the stream until +/// `delimiter` is found, without advancing the seek position. +/// +/// Returned slice includes the delimiter as the last byte. +/// +/// Invalidates previously returned values from `peek`. +/// +/// See also: +/// * `peekSentinel` +/// * `peekDelimiterExclusive` +/// * `takeDelimiterInclusive` +pub fn peekDelimiterInclusive(r: *Reader, delimiter: u8) DelimiterError![]u8 { + const buffer = r.buffer[0..r.end]; + const seek = r.seek; + if (std.mem.indexOfScalarPos(u8, buffer, seek, delimiter)) |end| { + @branchHint(.likely); + return buffer[seek .. end + 1]; + } + if (r.vtable.stream == &endingStream) { + // Protect the `@constCast` of `fixed`. + return error.EndOfStream; + } + r.rebase(); + while (r.buffer.len - r.end != 0) { + const end_cap = r.buffer[r.end..]; + var writer: Writer = .fixed(end_cap); + const n = r.vtable.stream(r, &writer, .limited(end_cap.len)) catch |err| switch (err) { + error.WriteFailed => unreachable, + else => |e| return e, + }; + r.end += n; + if (std.mem.indexOfScalarPos(u8, end_cap[0..n], 0, delimiter)) |end| { + return r.buffer[0 .. r.end - n + end + 1]; + } + } + return error.StreamTooLong; +} + +/// Returns a slice of the next bytes of buffered data from the stream until +/// `delimiter` is found, advancing the seek position. +/// +/// Returned slice excludes the delimiter. End-of-stream is treated equivalent +/// to a delimiter, unless it would result in a length 0 return value, in which +/// case `error.EndOfStream` is returned instead. +/// +/// If the delimiter is not found within a number of bytes matching the +/// capacity of this `Reader`, `error.StreamTooLong` is returned. In +/// such case, the stream state is unmodified as if this function was never +/// called. +/// +/// Invalidates previously returned values from `peek`. +/// +/// See also: +/// * `takeDelimiterInclusive` +/// * `peekDelimiterExclusive` +pub fn takeDelimiterExclusive(r: *Reader, delimiter: u8) DelimiterError![]u8 { + const result = r.peekDelimiterInclusive(delimiter) catch |err| switch (err) { + error.EndOfStream => { + const remaining = r.buffer[r.seek..r.end]; + if (remaining.len == 0) return error.EndOfStream; + r.toss(remaining.len); + return remaining; }, else => |e| return e, }; - return try array_list.toOwnedSlice(); + r.toss(result.len); + return result[0 .. result.len - 1]; } -/// Deprecated: use `streamUntilDelimiter` with FixedBufferStream's writer instead. -/// Reads from the stream until specified byte is found. If the buffer is not -/// large enough to hold the entire contents, `error.StreamTooLong` is returned. -/// If end-of-stream is found, returns the rest of the stream. If this -/// function is called again after that, returns null. -/// Returns a slice of the stream data, with ptr equal to `buf.ptr`. The -/// delimiter byte is written to the output buffer but is not included -/// in the returned slice. -pub fn readUntilDelimiterOrEof(self: Self, buf: []u8, delimiter: u8) anyerror!?[]u8 { - var fbs = std.io.fixedBufferStream(buf); - self.streamUntilDelimiter(fbs.writer(), delimiter, fbs.buffer.len) catch |err| switch (err) { - error.EndOfStream => if (fbs.getWritten().len == 0) { - return null; +/// Returns a slice of the next bytes of buffered data from the stream until +/// `delimiter` is found, without advancing the seek position. +/// +/// Returned slice excludes the delimiter. End-of-stream is treated equivalent +/// to a delimiter, unless it would result in a length 0 return value, in which +/// case `error.EndOfStream` is returned instead. +/// +/// If the delimiter is not found within a number of bytes matching the +/// capacity of this `Reader`, `error.StreamTooLong` is returned. In +/// such case, the stream state is unmodified as if this function was never +/// called. +/// +/// Invalidates previously returned values from `peek`. +/// +/// See also: +/// * `peekDelimiterInclusive` +/// * `takeDelimiterExclusive` +pub fn peekDelimiterExclusive(r: *Reader, delimiter: u8) DelimiterError![]u8 { + const result = r.peekDelimiterInclusive(delimiter) catch |err| switch (err) { + error.EndOfStream => { + const remaining = r.buffer[r.seek..r.end]; + if (remaining.len == 0) return error.EndOfStream; + r.toss(remaining.len); + return remaining; }, - else => |e| return e, }; - const output = fbs.getWritten(); - buf[output.len] = delimiter; // emulating old behaviour - return output; + return result[0 .. result.len - 1]; } -/// Appends to the `writer` contents by reading from the stream until `delimiter` is found. +/// Appends to `w` contents by reading from the stream until `delimiter` is +/// found. Does not write the delimiter itself. +/// +/// Returns number of bytes streamed, which may be zero, or error.EndOfStream +/// if the delimiter was not found. +/// +/// See also: +/// * `streamDelimiterEnding` +/// * `streamDelimiterLimit` +pub fn streamDelimiter(r: *Reader, w: *Writer, delimiter: u8) StreamError!usize { + const n = streamDelimiterLimit(r, w, delimiter, .unlimited) catch |err| switch (err) { + error.StreamTooLong => unreachable, // unlimited is passed + else => |e| return e, + }; + if (r.seek == r.end) return error.EndOfStream; + return n; +} + +/// Appends to `w` contents by reading from the stream until `delimiter` is found. /// Does not write the delimiter itself. -/// If `optional_max_size` is not null and amount of written bytes exceeds `optional_max_size`, -/// returns `error.StreamTooLong` and finishes appending. -/// If `optional_max_size` is null, appending is unbounded. -pub fn streamUntilDelimiter( - self: Self, - writer: anytype, +/// +/// Returns number of bytes streamed, which may be zero. End of stream can be +/// detected by checking if the next byte in the stream is the delimiter. +/// +/// See also: +/// * `streamDelimiter` +/// * `streamDelimiterLimit` +pub fn streamDelimiterEnding( + r: *Reader, + w: *Writer, delimiter: u8, - optional_max_size: ?usize, -) anyerror!void { - if (optional_max_size) |max_size| { - for (0..max_size) |_| { - const byte: u8 = try self.readByte(); - if (byte == delimiter) return; - try writer.writeByte(byte); +) StreamRemainingError!usize { + return streamDelimiterLimit(r, w, delimiter, .unlimited) catch |err| switch (err) { + error.StreamTooLong => unreachable, // unlimited is passed + else => |e| return e, + }; +} + +pub const StreamDelimiterLimitError = error{ + ReadFailed, + WriteFailed, + /// The delimiter was not found within the limit. + StreamTooLong, +}; + +/// Appends to `w` contents by reading from the stream until `delimiter` is found. +/// Does not write the delimiter itself. +/// +/// Returns number of bytes streamed, which may be zero. End of stream can be +/// detected by checking if the next byte in the stream is the delimiter. +pub fn streamDelimiterLimit( + r: *Reader, + w: *Writer, + delimiter: u8, + limit: Limit, +) StreamDelimiterLimitError!usize { + var remaining = @intFromEnum(limit); + while (remaining != 0) { + const available = Limit.limited(remaining).slice(r.peekGreedy(1) catch |err| switch (err) { + error.ReadFailed => return error.ReadFailed, + error.EndOfStream => return @intFromEnum(limit) - remaining, + }); + if (std.mem.indexOfScalar(u8, available, delimiter)) |delimiter_index| { + try w.writeAll(available[0..delimiter_index]); + r.toss(delimiter_index); + remaining -= delimiter_index; + return @intFromEnum(limit) - remaining; } - return error.StreamTooLong; - } else { - while (true) { - const byte: u8 = try self.readByte(); - if (byte == delimiter) return; - try writer.writeByte(byte); - } - // Can not throw `error.StreamTooLong` since there are no boundary. + try w.writeAll(available); + r.toss(available.len); + remaining -= available.len; } + return error.StreamTooLong; } /// Reads from the stream until specified byte is found, discarding all data, /// including the delimiter. -/// If end-of-stream is found, this function succeeds. -pub fn skipUntilDelimiterOrEof(self: Self, delimiter: u8) anyerror!void { - while (true) { - const byte = self.readByte() catch |err| switch (err) { - error.EndOfStream => return, +/// +/// Returns number of bytes discarded, or `error.EndOfStream` if the delimiter +/// is not found. +/// +/// See also: +/// * `discardDelimiterExclusive` +/// * `discardDelimiterLimit` +pub fn discardDelimiterInclusive(r: *Reader, delimiter: u8) Error!usize { + const n = discardDelimiterLimit(r, delimiter, .unlimited) catch |err| switch (err) { + error.StreamTooLong => unreachable, // unlimited is passed + else => |e| return e, + }; + if (r.seek == r.end) return error.EndOfStream; + assert(r.buffer[r.seek] == delimiter); + toss(r, 1); + return n + 1; +} + +/// Reads from the stream until specified byte is found, discarding all data, +/// excluding the delimiter. +/// +/// Returns the number of bytes discarded. +/// +/// Succeeds if stream ends before delimiter found. End of stream can be +/// detected by checking if the delimiter is buffered. +/// +/// See also: +/// * `discardDelimiterInclusive` +/// * `discardDelimiterLimit` +pub fn discardDelimiterExclusive(r: *Reader, delimiter: u8) ShortError!usize { + return discardDelimiterLimit(r, delimiter, .unlimited) catch |err| switch (err) { + error.StreamTooLong => unreachable, // unlimited is passed + else => |e| return e, + }; +} + +pub const DiscardDelimiterLimitError = error{ + ReadFailed, + /// The delimiter was not found within the limit. + StreamTooLong, +}; + +/// Reads from the stream until specified byte is found, discarding all data, +/// excluding the delimiter. +/// +/// Returns the number of bytes discarded. +/// +/// Succeeds if stream ends before delimiter found. End of stream can be +/// detected by checking if the delimiter is buffered. +pub fn discardDelimiterLimit(r: *Reader, delimiter: u8, limit: Limit) DiscardDelimiterLimitError!usize { + var remaining = @intFromEnum(limit); + while (remaining != 0) { + const available = Limit.limited(remaining).slice(r.peekGreedy(1) catch |err| switch (err) { + error.ReadFailed => return error.ReadFailed, + error.EndOfStream => return @intFromEnum(limit) - remaining, + }); + if (std.mem.indexOfScalar(u8, available, delimiter)) |delimiter_index| { + r.toss(delimiter_index); + remaining -= delimiter_index; + return @intFromEnum(limit) - remaining; + } + r.toss(available.len); + remaining -= available.len; + } + return error.StreamTooLong; +} + +/// Fills the buffer such that it contains at least `n` bytes, without +/// advancing the seek position. +/// +/// Returns `error.EndOfStream` if and only if there are fewer than `n` bytes +/// remaining. +/// +/// Asserts buffer capacity is at least `n`. +pub fn fill(r: *Reader, n: usize) Error!void { + assert(n <= r.buffer.len); + if (r.seek + n <= r.end) { + @branchHint(.likely); + return; + } + if (r.seek + n <= r.buffer.len) while (true) { + const end_cap = r.buffer[r.end..]; + var writer: Writer = .fixed(end_cap); + r.end += r.vtable.stream(r, &writer, .limited(end_cap.len)) catch |err| switch (err) { + error.WriteFailed => unreachable, else => |e| return e, }; - if (byte == delimiter) return; + if (r.seek + n <= r.end) return; + }; + if (r.vtable.stream == &endingStream) { + // Protect the `@constCast` of `fixed`. + return error.EndOfStream; } + rebaseCapacity(r, n); + var writer: Writer = .{ + .buffer = r.buffer, + .vtable = &.{ .drain = Writer.fixedDrain }, + }; + while (r.end < r.seek + n) { + writer.end = r.end; + r.end += r.vtable.stream(r, &writer, .limited(r.buffer.len - r.end)) catch |err| switch (err) { + error.WriteFailed => unreachable, + error.ReadFailed, error.EndOfStream => |e| return e, + }; + } +} + +/// Without advancing the seek position, does exactly one underlying read, filling the buffer as +/// much as possible. This may result in zero bytes added to the buffer, which is not an end of +/// stream condition. End of stream is communicated via returning `error.EndOfStream`. +/// +/// Asserts buffer capacity is at least 1. +pub fn fillMore(r: *Reader) Error!void { + rebaseCapacity(r, 1); + var writer: Writer = .{ + .buffer = r.buffer, + .end = r.end, + .vtable = &.{ .drain = Writer.fixedDrain }, + }; + r.end += r.vtable.stream(r, &writer, .limited(r.buffer.len - r.end)) catch |err| switch (err) { + error.WriteFailed => unreachable, + else => |e| return e, + }; +} + +/// Returns the next byte from the stream or returns `error.EndOfStream`. +/// +/// Does not advance the seek position. +/// +/// Asserts the buffer capacity is nonzero. +pub fn peekByte(r: *Reader) Error!u8 { + const buffer = r.buffer[0..r.end]; + const seek = r.seek; + if (seek < buffer.len) { + @branchHint(.likely); + return buffer[seek]; + } + try fill(r, 1); + return r.buffer[r.seek]; } /// Reads 1 byte from the stream or returns `error.EndOfStream`. -pub fn readByte(self: Self) anyerror!u8 { - var result: [1]u8 = undefined; - const amt_read = try self.read(result[0..]); - if (amt_read < 1) return error.EndOfStream; - return result[0]; -} - -/// Same as `readByte` except the returned byte is signed. -pub fn readByteSigned(self: Self) anyerror!i8 { - return @as(i8, @bitCast(try self.readByte())); -} - -/// Reads exactly `num_bytes` bytes and returns as an array. -/// `num_bytes` must be comptime-known -pub fn readBytesNoEof(self: Self, comptime num_bytes: usize) anyerror![num_bytes]u8 { - var bytes: [num_bytes]u8 = undefined; - try self.readNoEof(&bytes); - return bytes; -} - -/// Reads bytes until `bounded.len` is equal to `num_bytes`, -/// or the stream ends. /// -/// * it is assumed that `num_bytes` will not exceed `bounded.capacity()` -pub fn readIntoBoundedBytes( - self: Self, - comptime num_bytes: usize, - bounded: *std.BoundedArray(u8, num_bytes), -) anyerror!void { - while (bounded.len < num_bytes) { - // get at most the number of bytes free in the bounded array - const bytes_read = try self.read(bounded.unusedCapacitySlice()); - if (bytes_read == 0) return; - - // bytes_read will never be larger than @TypeOf(bounded.len) - // due to `self.read` being bounded by `bounded.unusedCapacitySlice()` - bounded.len += @as(@TypeOf(bounded.len), @intCast(bytes_read)); - } -} - -/// Reads at most `num_bytes` and returns as a bounded array. -pub fn readBoundedBytes(self: Self, comptime num_bytes: usize) anyerror!std.BoundedArray(u8, num_bytes) { - var result = std.BoundedArray(u8, num_bytes){}; - try self.readIntoBoundedBytes(num_bytes, &result); +/// Asserts the buffer capacity is nonzero. +pub fn takeByte(r: *Reader) Error!u8 { + const result = try peekByte(r); + r.seek += 1; return result; } -pub inline fn readInt(self: Self, comptime T: type, endian: std.builtin.Endian) anyerror!T { - const bytes = try self.readBytesNoEof(@divExact(@typeInfo(T).int.bits, 8)); - return mem.readInt(T, &bytes, endian); +/// Same as `takeByte` except the returned byte is signed. +pub fn takeByteSigned(r: *Reader) Error!i8 { + return @bitCast(try r.takeByte()); } -pub fn readVarInt( - self: Self, - comptime ReturnType: type, - endian: std.builtin.Endian, - size: usize, -) anyerror!ReturnType { - assert(size <= @sizeOf(ReturnType)); - var bytes_buf: [@sizeOf(ReturnType)]u8 = undefined; - const bytes = bytes_buf[0..size]; - try self.readNoEof(bytes); - return mem.readVarInt(ReturnType, bytes, endian); +/// Asserts the buffer was initialized with a capacity at least `@bitSizeOf(T) / 8`. +pub inline fn takeInt(r: *Reader, comptime T: type, endian: std.builtin.Endian) Error!T { + const n = @divExact(@typeInfo(T).int.bits, 8); + return std.mem.readInt(T, try r.takeArray(n), endian); } -/// Optional parameters for `skipBytes` -pub const SkipBytesOptions = struct { - buf_size: usize = 512, -}; - -// `num_bytes` is a `u64` to match `off_t` -/// Reads `num_bytes` bytes from the stream and discards them -pub fn skipBytes(self: Self, num_bytes: u64, comptime options: SkipBytesOptions) anyerror!void { - var buf: [options.buf_size]u8 = undefined; - var remaining = num_bytes; - - while (remaining > 0) { - const amt = @min(remaining, options.buf_size); - try self.readNoEof(buf[0..amt]); - remaining -= amt; - } +/// Asserts the buffer was initialized with a capacity at least `n`. +pub fn takeVarInt(r: *Reader, comptime Int: type, endian: std.builtin.Endian, n: usize) Error!Int { + assert(n <= @sizeOf(Int)); + return std.mem.readVarInt(Int, try r.take(n), endian); } -/// Reads `slice.len` bytes from the stream and returns if they are the same as the passed slice -pub fn isBytes(self: Self, slice: []const u8) anyerror!bool { - var i: usize = 0; - var matches = true; - while (i < slice.len) : (i += 1) { - if (slice[i] != try self.readByte()) { - matches = false; - } - } - return matches; -} - -pub fn readStruct(self: Self, comptime T: type) anyerror!T { +/// Asserts the buffer was initialized with a capacity at least `@sizeOf(T)`. +/// +/// Advances the seek position. +/// +/// See also: +/// * `peekStruct` +/// * `takeStructEndian` +pub fn takeStruct(r: *Reader, comptime T: type) Error!*align(1) T { // Only extern and packed structs have defined in-memory layout. comptime assert(@typeInfo(T).@"struct".layout != .auto); - var res: [1]T = undefined; - try self.readNoEof(mem.sliceAsBytes(res[0..])); - return res[0]; + return @ptrCast(try r.takeArray(@sizeOf(T))); } -pub fn readStructEndian(self: Self, comptime T: type, endian: std.builtin.Endian) anyerror!T { - var res = try self.readStruct(T); - if (native_endian != endian) { - mem.byteSwapAllFields(T, &res); - } +/// Asserts the buffer was initialized with a capacity at least `@sizeOf(T)`. +/// +/// Does not advance the seek position. +/// +/// See also: +/// * `takeStruct` +/// * `peekStructEndian` +pub fn peekStruct(r: *Reader, comptime T: type) Error!*align(1) T { + // Only extern and packed structs have defined in-memory layout. + comptime assert(@typeInfo(T).@"struct".layout != .auto); + return @ptrCast(try r.peekArray(@sizeOf(T))); +} + +/// Asserts the buffer was initialized with a capacity at least `@sizeOf(T)`. +/// +/// This function is inline to avoid referencing `std.mem.byteSwapAllFields` +/// when `endian` is comptime-known and matches the host endianness. +/// +/// See also: +/// * `takeStruct` +/// * `peekStructEndian` +pub inline fn takeStructEndian(r: *Reader, comptime T: type, endian: std.builtin.Endian) Error!T { + var res = (try r.takeStruct(T)).*; + if (native_endian != endian) std.mem.byteSwapAllFields(T, &res); return res; } -/// Reads an integer with the same size as the given enum's tag type. If the integer matches -/// an enum tag, casts the integer to the enum tag and returns it. Otherwise, returns an `error.InvalidValue`. -/// TODO optimization taking advantage of most fields being in order -pub fn readEnum(self: Self, comptime Enum: type, endian: std.builtin.Endian) anyerror!Enum { - const E = error{ - /// An integer was read, but it did not match any of the tags in the supplied enum. - InvalidValue, +/// Asserts the buffer was initialized with a capacity at least `@sizeOf(T)`. +/// +/// This function is inline to avoid referencing `std.mem.byteSwapAllFields` +/// when `endian` is comptime-known and matches the host endianness. +/// +/// See also: +/// * `takeStructEndian` +/// * `peekStruct` +pub inline fn peekStructEndian(r: *Reader, comptime T: type, endian: std.builtin.Endian) Error!T { + var res = (try r.peekStruct(T)).*; + if (native_endian != endian) std.mem.byteSwapAllFields(T, &res); + return res; +} + +pub const TakeEnumError = Error || error{InvalidEnumTag}; + +/// Reads an integer with the same size as the given enum's tag type. If the +/// integer matches an enum tag, casts the integer to the enum tag and returns +/// it. Otherwise, returns `error.InvalidEnumTag`. +/// +/// Asserts the buffer was initialized with a capacity at least `@sizeOf(Enum)`. +pub fn takeEnum(r: *Reader, comptime Enum: type, endian: std.builtin.Endian) TakeEnumError!Enum { + const Tag = @typeInfo(Enum).@"enum".tag_type; + const int = try r.takeInt(Tag, endian); + return std.meta.intToEnum(Enum, int); +} + +/// Reads an integer with the same size as the given nonexhaustive enum's tag type. +/// +/// Asserts the buffer was initialized with a capacity at least `@sizeOf(Enum)`. +pub fn takeEnumNonexhaustive(r: *Reader, comptime Enum: type, endian: std.builtin.Endian) Error!Enum { + const info = @typeInfo(Enum).@"enum"; + comptime assert(!info.is_exhaustive); + comptime assert(@bitSizeOf(info.tag_type) == @sizeOf(info.tag_type) * 8); + return takeEnum(r, Enum, endian) catch |err| switch (err) { + error.InvalidEnumTag => unreachable, + else => |e| return e, }; - const type_info = @typeInfo(Enum).@"enum"; - const tag = try self.readInt(type_info.tag_type, endian); - - inline for (std.meta.fields(Enum)) |field| { - if (tag == field.value) { - return @field(Enum, field.name); - } - } - - return E.InvalidValue; } -/// Reads the stream until the end, ignoring all the data. -/// Returns the number of bytes discarded. -pub fn discard(self: Self) anyerror!u64 { - var trash: [4096]u8 = undefined; - var index: u64 = 0; +pub const TakeLeb128Error = Error || error{Overflow}; + +/// Read a single LEB128 value as type T, or `error.Overflow` if the value cannot fit. +pub fn takeLeb128(r: *Reader, comptime Result: type) TakeLeb128Error!Result { + const result_info = @typeInfo(Result).int; + return std.math.cast(Result, try r.takeMultipleOf7Leb128(@Type(.{ .int = .{ + .signedness = result_info.signedness, + .bits = std.mem.alignForwardAnyAlign(u16, result_info.bits, 7), + } }))) orelse error.Overflow; +} + +pub fn expandTotalCapacity(r: *Reader, allocator: Allocator, n: usize) Allocator.Error!void { + if (n <= r.buffer.len) return; + if (r.seek > 0) rebase(r); + var list: ArrayList(u8) = .{ + .items = r.buffer[0..r.end], + .capacity = r.buffer.len, + }; + defer r.buffer = list.allocatedSlice(); + try list.ensureTotalCapacity(allocator, n); +} + +pub const FillAllocError = Error || Allocator.Error; + +pub fn fillAlloc(r: *Reader, allocator: Allocator, n: usize) FillAllocError!void { + try expandTotalCapacity(r, allocator, n); + return fill(r, n); +} + +/// Returns a slice into the unused capacity of `buffer` with at least +/// `min_len` bytes, extending `buffer` by resizing it with `gpa` as necessary. +/// +/// After calling this function, typically the caller will follow up with a +/// call to `advanceBufferEnd` to report the actual number of bytes buffered. +pub fn writableSliceGreedyAlloc(r: *Reader, allocator: Allocator, min_len: usize) Allocator.Error![]u8 { + { + const unused = r.buffer[r.end..]; + if (unused.len >= min_len) return unused; + } + if (r.seek > 0) rebase(r); + { + var list: ArrayList(u8) = .{ + .items = r.buffer[0..r.end], + .capacity = r.buffer.len, + }; + defer r.buffer = list.allocatedSlice(); + try list.ensureUnusedCapacity(allocator, min_len); + } + const unused = r.buffer[r.end..]; + assert(unused.len >= min_len); + return unused; +} + +/// After writing directly into the unused capacity of `buffer`, this function +/// updates `end` so that users of `Reader` can receive the data. +pub fn advanceBufferEnd(r: *Reader, n: usize) void { + assert(n <= r.buffer.len - r.end); + r.end += n; +} + +fn takeMultipleOf7Leb128(r: *Reader, comptime Result: type) TakeLeb128Error!Result { + const result_info = @typeInfo(Result).int; + comptime assert(result_info.bits % 7 == 0); + var remaining_bits: std.math.Log2IntCeil(Result) = result_info.bits; + const UnsignedResult = @Type(.{ .int = .{ + .signedness = .unsigned, + .bits = result_info.bits, + } }); + var result: UnsignedResult = 0; + var fits = true; while (true) { - const n = try self.read(&trash); - if (n == 0) return index; - index += n; + const buffer: []const packed struct(u8) { bits: u7, more: bool } = @ptrCast(try r.peekGreedy(1)); + for (buffer, 1..) |byte, len| { + if (remaining_bits > 0) { + result = @shlExact(@as(UnsignedResult, byte.bits), result_info.bits - 7) | + if (result_info.bits > 7) @shrExact(result, 7) else 0; + remaining_bits -= 7; + } else if (fits) fits = switch (result_info.signedness) { + .signed => @as(i7, @bitCast(byte.bits)) == + @as(i7, @truncate(@as(Result, @bitCast(result)) >> (result_info.bits - 1))), + .unsigned => byte.bits == 0, + }; + if (byte.more) continue; + r.toss(len); + return if (fits) @as(Result, @bitCast(result)) >> remaining_bits else error.Overflow; + } + r.toss(buffer.len); } } -const std = @import("../std.zig"); -const Self = @This(); -const math = std.math; -const assert = std.debug.assert; -const mem = std.mem; -const testing = std.testing; -const native_endian = @import("builtin").target.cpu.arch.endian(); -const Alignment = std.mem.Alignment; - -test { - _ = @import("Reader/test.zig"); +/// Left-aligns data such that `r.seek` becomes zero. +pub fn rebase(r: *Reader) void { + if (r.seek == 0) return; + const data = r.buffer[r.seek..r.end]; + @memmove(r.buffer[0..data.len], data); + r.seek = 0; + r.end = data.len; +} + +/// Ensures `capacity` more data can be buffered without rebasing, by rebasing +/// if necessary. +/// +/// Asserts `capacity` is within the buffer capacity. +pub fn rebaseCapacity(r: *Reader, capacity: usize) void { + if (r.end > r.buffer.len - capacity) rebase(r); +} + +/// Advances the stream and decreases the size of the storage buffer by `n`, +/// returning the range of bytes no longer accessible by `r`. +/// +/// This action can be undone by `restitute`. +/// +/// Asserts there are at least `n` buffered bytes already. +/// +/// Asserts that `r.seek` is zero, i.e. the buffer is in a rebased state. +pub fn steal(r: *Reader, n: usize) []u8 { + assert(r.seek == 0); + assert(n <= r.end); + const stolen = r.buffer[0..n]; + r.buffer = r.buffer[n..]; + r.end -= n; + return stolen; +} + +/// Expands the storage buffer, undoing the effects of `steal` +/// Assumes that `n` does not exceed the total number of stolen bytes. +pub fn restitute(r: *Reader, n: usize) void { + r.buffer = (r.buffer.ptr - n)[0 .. r.buffer.len + n]; + r.end += n; + r.seek += n; +} + +test fixed { + var r: Reader = .fixed("a\x02"); + try testing.expect((try r.takeByte()) == 'a'); + try testing.expect((try r.takeEnum(enum(u8) { + a = 0, + b = 99, + c = 2, + d = 3, + }, builtin.cpu.arch.endian())) == .c); + try testing.expectError(error.EndOfStream, r.takeByte()); +} + +test peek { + var r: Reader = .fixed("abc"); + try testing.expectEqualStrings("ab", try r.peek(2)); + try testing.expectEqualStrings("a", try r.peek(1)); +} + +test peekGreedy { + var r: Reader = .fixed("abc"); + try testing.expectEqualStrings("abc", try r.peekGreedy(1)); +} + +test toss { + var r: Reader = .fixed("abc"); + r.toss(1); + try testing.expectEqualStrings("bc", r.buffered()); +} + +test take { + var r: Reader = .fixed("abc"); + try testing.expectEqualStrings("ab", try r.take(2)); + try testing.expectEqualStrings("c", try r.take(1)); +} + +test takeArray { + var r: Reader = .fixed("abc"); + try testing.expectEqualStrings("ab", try r.takeArray(2)); + try testing.expectEqualStrings("c", try r.takeArray(1)); +} + +test peekArray { + var r: Reader = .fixed("abc"); + try testing.expectEqualStrings("ab", try r.peekArray(2)); + try testing.expectEqualStrings("a", try r.peekArray(1)); +} + +test discardAll { + var r: Reader = .fixed("foobar"); + try r.discardAll(3); + try testing.expectEqualStrings("bar", try r.take(3)); + try r.discardAll(0); + try testing.expectError(error.EndOfStream, r.discardAll(1)); +} + +test discardRemaining { + var r: Reader = .fixed("foobar"); + r.toss(1); + try testing.expectEqual(5, try r.discardRemaining()); + try testing.expectEqual(0, try r.discardRemaining()); +} + +test stream { + var out_buffer: [10]u8 = undefined; + var r: Reader = .fixed("foobar"); + var w: Writer = .fixed(&out_buffer); + // Short streams are possible with this function but not with fixed. + try testing.expectEqual(2, try r.stream(&w, .limited(2))); + try testing.expectEqualStrings("fo", w.buffered()); + try testing.expectEqual(4, try r.stream(&w, .unlimited)); + try testing.expectEqualStrings("foobar", w.buffered()); +} + +test takeSentinel { + var r: Reader = .fixed("ab\nc"); + try testing.expectEqualStrings("ab", try r.takeSentinel('\n')); + try testing.expectError(error.EndOfStream, r.takeSentinel('\n')); + try testing.expectEqualStrings("c", try r.peek(1)); +} + +test peekSentinel { + var r: Reader = .fixed("ab\nc"); + try testing.expectEqualStrings("ab", try r.peekSentinel('\n')); + try testing.expectEqualStrings("ab", try r.peekSentinel('\n')); +} + +test takeDelimiterInclusive { + var r: Reader = .fixed("ab\nc"); + try testing.expectEqualStrings("ab\n", try r.takeDelimiterInclusive('\n')); + try testing.expectError(error.EndOfStream, r.takeDelimiterInclusive('\n')); +} + +test peekDelimiterInclusive { + var r: Reader = .fixed("ab\nc"); + try testing.expectEqualStrings("ab\n", try r.peekDelimiterInclusive('\n')); + try testing.expectEqualStrings("ab\n", try r.peekDelimiterInclusive('\n')); + r.toss(3); + try testing.expectError(error.EndOfStream, r.peekDelimiterInclusive('\n')); +} + +test takeDelimiterExclusive { + var r: Reader = .fixed("ab\nc"); + try testing.expectEqualStrings("ab", try r.takeDelimiterExclusive('\n')); + try testing.expectEqualStrings("c", try r.takeDelimiterExclusive('\n')); + try testing.expectError(error.EndOfStream, r.takeDelimiterExclusive('\n')); +} + +test peekDelimiterExclusive { + var r: Reader = .fixed("ab\nc"); + try testing.expectEqualStrings("ab", try r.peekDelimiterExclusive('\n')); + try testing.expectEqualStrings("ab", try r.peekDelimiterExclusive('\n')); + r.toss(3); + try testing.expectEqualStrings("c", try r.peekDelimiterExclusive('\n')); +} + +test streamDelimiter { + var out_buffer: [10]u8 = undefined; + var r: Reader = .fixed("foo\nbars"); + var w: Writer = .fixed(&out_buffer); + try testing.expectEqual(3, try r.streamDelimiter(&w, '\n')); + try testing.expectEqualStrings("foo", w.buffered()); + try testing.expectEqual(0, try r.streamDelimiter(&w, '\n')); + r.toss(1); + try testing.expectError(error.EndOfStream, r.streamDelimiter(&w, '\n')); +} + +test streamDelimiterEnding { + var out_buffer: [10]u8 = undefined; + var r: Reader = .fixed("foo\nbars"); + var w: Writer = .fixed(&out_buffer); + try testing.expectEqual(3, try r.streamDelimiterEnding(&w, '\n')); + try testing.expectEqualStrings("foo", w.buffered()); + r.toss(1); + try testing.expectEqual(4, try r.streamDelimiterEnding(&w, '\n')); + try testing.expectEqualStrings("foobars", w.buffered()); + try testing.expectEqual(0, try r.streamDelimiterEnding(&w, '\n')); + try testing.expectEqual(0, try r.streamDelimiterEnding(&w, '\n')); +} + +test streamDelimiterLimit { + var out_buffer: [10]u8 = undefined; + var r: Reader = .fixed("foo\nbars"); + var w: Writer = .fixed(&out_buffer); + try testing.expectError(error.StreamTooLong, r.streamDelimiterLimit(&w, '\n', .limited(2))); + try testing.expectEqual(1, try r.streamDelimiterLimit(&w, '\n', .limited(3))); + try testing.expectEqualStrings("\n", try r.take(1)); + try testing.expectEqual(4, try r.streamDelimiterLimit(&w, '\n', .unlimited)); + try testing.expectEqualStrings("foobars", w.buffered()); +} + +test discardDelimiterExclusive { + var r: Reader = .fixed("foob\nar"); + try testing.expectEqual(4, try r.discardDelimiterExclusive('\n')); + try testing.expectEqualStrings("\n", try r.take(1)); + try testing.expectEqual(2, try r.discardDelimiterExclusive('\n')); + try testing.expectEqual(0, try r.discardDelimiterExclusive('\n')); +} + +test discardDelimiterInclusive { + var r: Reader = .fixed("foob\nar"); + try testing.expectEqual(5, try r.discardDelimiterInclusive('\n')); + try testing.expectError(error.EndOfStream, r.discardDelimiterInclusive('\n')); +} + +test discardDelimiterLimit { + var r: Reader = .fixed("foob\nar"); + try testing.expectError(error.StreamTooLong, r.discardDelimiterLimit('\n', .limited(4))); + try testing.expectEqual(0, try r.discardDelimiterLimit('\n', .limited(2))); + try testing.expectEqualStrings("\n", try r.take(1)); + try testing.expectEqual(2, try r.discardDelimiterLimit('\n', .unlimited)); + try testing.expectEqual(0, try r.discardDelimiterLimit('\n', .unlimited)); +} + +test fill { + var r: Reader = .fixed("abc"); + try r.fill(1); + try r.fill(3); +} + +test takeByte { + var r: Reader = .fixed("ab"); + try testing.expectEqual('a', try r.takeByte()); + try testing.expectEqual('b', try r.takeByte()); + try testing.expectError(error.EndOfStream, r.takeByte()); +} + +test takeByteSigned { + var r: Reader = .fixed(&.{ 255, 5 }); + try testing.expectEqual(-1, try r.takeByteSigned()); + try testing.expectEqual(5, try r.takeByteSigned()); + try testing.expectError(error.EndOfStream, r.takeByteSigned()); +} + +test takeInt { + var r: Reader = .fixed(&.{ 0x12, 0x34, 0x56 }); + try testing.expectEqual(0x1234, try r.takeInt(u16, .big)); + try testing.expectError(error.EndOfStream, r.takeInt(u16, .little)); +} + +test takeVarInt { + var r: Reader = .fixed(&.{ 0x12, 0x34, 0x56 }); + try testing.expectEqual(0x123456, try r.takeVarInt(u64, .big, 3)); + try testing.expectError(error.EndOfStream, r.takeVarInt(u16, .little, 1)); +} + +test takeStruct { + var r: Reader = .fixed(&.{ 0x12, 0x00, 0x34, 0x56 }); + const S = extern struct { a: u8, b: u16 }; + switch (native_endian) { + .little => try testing.expectEqual(@as(S, .{ .a = 0x12, .b = 0x5634 }), (try r.takeStruct(S)).*), + .big => try testing.expectEqual(@as(S, .{ .a = 0x12, .b = 0x3456 }), (try r.takeStruct(S)).*), + } + try testing.expectError(error.EndOfStream, r.takeStruct(S)); +} + +test peekStruct { + var r: Reader = .fixed(&.{ 0x12, 0x00, 0x34, 0x56 }); + const S = extern struct { a: u8, b: u16 }; + switch (native_endian) { + .little => { + try testing.expectEqual(@as(S, .{ .a = 0x12, .b = 0x5634 }), (try r.peekStruct(S)).*); + try testing.expectEqual(@as(S, .{ .a = 0x12, .b = 0x5634 }), (try r.peekStruct(S)).*); + }, + .big => { + try testing.expectEqual(@as(S, .{ .a = 0x12, .b = 0x3456 }), (try r.peekStruct(S)).*); + try testing.expectEqual(@as(S, .{ .a = 0x12, .b = 0x3456 }), (try r.peekStruct(S)).*); + }, + } +} + +test takeStructEndian { + var r: Reader = .fixed(&.{ 0x12, 0x00, 0x34, 0x56 }); + const S = extern struct { a: u8, b: u16 }; + try testing.expectEqual(@as(S, .{ .a = 0x12, .b = 0x3456 }), try r.takeStructEndian(S, .big)); + try testing.expectError(error.EndOfStream, r.takeStructEndian(S, .little)); +} + +test peekStructEndian { + var r: Reader = .fixed(&.{ 0x12, 0x00, 0x34, 0x56 }); + const S = extern struct { a: u8, b: u16 }; + try testing.expectEqual(@as(S, .{ .a = 0x12, .b = 0x3456 }), try r.peekStructEndian(S, .big)); + try testing.expectEqual(@as(S, .{ .a = 0x12, .b = 0x5634 }), try r.peekStructEndian(S, .little)); +} + +test takeEnum { + var r: Reader = .fixed(&.{ 2, 0, 1 }); + const E1 = enum(u8) { a, b, c }; + const E2 = enum(u16) { _ }; + try testing.expectEqual(E1.c, try r.takeEnum(E1, .little)); + try testing.expectEqual(@as(E2, @enumFromInt(0x0001)), try r.takeEnum(E2, .big)); +} + +test takeLeb128 { + var r: Reader = .fixed("\xc7\x9f\x7f\x80"); + try testing.expectEqual(-12345, try r.takeLeb128(i64)); + try testing.expectEqual(0x80, try r.peekByte()); + try testing.expectError(error.EndOfStream, r.takeLeb128(i64)); +} + +test readSliceShort { + var r: Reader = .fixed("HelloFren"); + var buf: [5]u8 = undefined; + try testing.expectEqual(5, try r.readSliceShort(&buf)); + try testing.expectEqualStrings("Hello", buf[0..5]); + try testing.expectEqual(4, try r.readSliceShort(&buf)); + try testing.expectEqualStrings("Fren", buf[0..4]); + try testing.expectEqual(0, try r.readSliceShort(&buf)); +} + +test readVec { + var r: Reader = .fixed(std.ascii.letters); + var flat_buffer: [52]u8 = undefined; + var bufs: [2][]u8 = .{ + flat_buffer[0..26], + flat_buffer[26..], + }; + // Short reads are possible with this function but not with fixed. + try testing.expectEqual(26 * 2, try r.readVec(&bufs)); + try testing.expectEqualStrings(std.ascii.letters[0..26], bufs[0]); + try testing.expectEqualStrings(std.ascii.letters[26..], bufs[1]); +} + +test readVecLimit { + var r: Reader = .fixed(std.ascii.letters); + var flat_buffer: [52]u8 = undefined; + var bufs: [2][]u8 = .{ + flat_buffer[0..26], + flat_buffer[26..], + }; + // Short reads are possible with this function but not with fixed. + try testing.expectEqual(50, try r.readVecLimit(&bufs, .limited(50))); + try testing.expectEqualStrings(std.ascii.letters[0..26], bufs[0]); + try testing.expectEqualStrings(std.ascii.letters[26..50], bufs[1][0..24]); +} + +test "expected error.EndOfStream" { + // Unit test inspired by https://github.com/ziglang/zig/issues/17733 + var buffer: [3]u8 = undefined; + var r: std.io.Reader = .fixed(&buffer); + r.end = 0; // capacity 3, but empty + try std.testing.expectError(error.EndOfStream, r.takeEnum(enum(u8) { a, b }, .little)); + try std.testing.expectError(error.EndOfStream, r.take(3)); +} + +fn endingStream(r: *Reader, w: *Writer, limit: Limit) StreamError!usize { + _ = r; + _ = w; + _ = limit; + return error.EndOfStream; +} + +fn endingDiscard(r: *Reader, limit: Limit) Error!usize { + _ = r; + _ = limit; + return error.EndOfStream; +} + +fn failingStream(r: *Reader, w: *Writer, limit: Limit) StreamError!usize { + _ = r; + _ = w; + _ = limit; + return error.ReadFailed; +} + +fn failingDiscard(r: *Reader, limit: Limit) Error!usize { + _ = r; + _ = limit; + return error.ReadFailed; +} + +test "readAlloc when the backing reader provides one byte at a time" { + const OneByteReader = struct { + str: []const u8, + i: usize, + reader: Reader, + + fn stream(r: *Reader, w: *Writer, limit: Limit) StreamError!usize { + assert(@intFromEnum(limit) >= 1); + const self: *@This() = @fieldParentPtr("reader", r); + if (self.str.len - self.i == 0) return error.EndOfStream; + try w.writeByte(self.str[self.i]); + self.i += 1; + return 1; + } + }; + const str = "This is a test"; + var one_byte_stream: OneByteReader = .{ + .str = str, + .i = 0, + .reader = .{ + .buffer = &.{}, + .vtable = &.{ .stream = OneByteReader.stream }, + .seek = 0, + .end = 0, + }, + }; + const res = try one_byte_stream.reader.allocRemaining(std.testing.allocator, .unlimited); + defer std.testing.allocator.free(res); + try std.testing.expectEqualStrings(str, res); +} + +test "takeDelimiterInclusive when it rebases" { + const written_line = "ABCDEFGHIJKLMNOPQRSTUVWXYZ\n"; + var buffer: [128]u8 = undefined; + var tr: std.testing.Reader = .init(&buffer, &.{ + .{ .buffer = written_line }, + .{ .buffer = written_line }, + .{ .buffer = written_line }, + .{ .buffer = written_line }, + .{ .buffer = written_line }, + .{ .buffer = written_line }, + }); + const r = &tr.interface; + for (0..6) |_| { + try std.testing.expectEqualStrings(written_line, try r.takeDelimiterInclusive('\n')); + } +} + +/// Provides a `Reader` implementation by passing data from an underlying +/// reader through `Hasher.update`. +/// +/// The underlying reader is best unbuffered. +/// +/// This implementation makes suboptimal buffering decisions due to being +/// generic. A better solution will involve creating a reader for each hash +/// function, where the discard buffer can be tailored to the hash +/// implementation details. +pub fn Hashed(comptime Hasher: type) type { + return struct { + in: *Reader, + hasher: Hasher, + interface: Reader, + + pub fn init(in: *Reader, hasher: Hasher, buffer: []u8) @This() { + return .{ + .in = in, + .hasher = hasher, + .interface = .{ + .vtable = &.{ + .read = @This().read, + .discard = @This().discard, + }, + .buffer = buffer, + .end = 0, + .seek = 0, + }, + }; + } + + fn read(r: *Reader, w: *Writer, limit: Limit) StreamError!usize { + const this: *@This() = @alignCast(@fieldParentPtr("interface", r)); + const data = w.writableVector(limit); + const n = try this.in.readVec(data); + const result = w.advanceVector(n); + var remaining: usize = n; + for (data) |slice| { + if (remaining < slice.len) { + this.hasher.update(slice[0..remaining]); + return result; + } else { + remaining -= slice.len; + this.hasher.update(slice); + } + } + assert(remaining == 0); + return result; + } + + fn discard(r: *Reader, limit: Limit) Error!usize { + const this: *@This() = @alignCast(@fieldParentPtr("interface", r)); + var w = this.hasher.writer(&.{}); + const n = this.in.stream(&w, limit) catch |err| switch (err) { + error.WriteFailed => unreachable, + else => |e| return e, + }; + return n; + } + }; } diff --git a/lib/std/io/Reader/Limited.zig b/lib/std/io/Reader/Limited.zig new file mode 100644 index 0000000000..9476b97804 --- /dev/null +++ b/lib/std/io/Reader/Limited.zig @@ -0,0 +1,42 @@ +const Limited = @This(); + +const std = @import("../../std.zig"); +const Reader = std.io.Reader; +const Writer = std.io.Writer; +const Limit = std.io.Limit; + +unlimited: *Reader, +remaining: Limit, +interface: Reader, + +pub fn init(reader: *Reader, limit: Limit, buffer: []u8) Limited { + return .{ + .unlimited = reader, + .remaining = limit, + .interface = .{ + .vtable = &.{ + .stream = stream, + .discard = discard, + }, + .buffer = buffer, + .seek = 0, + .end = 0, + }, + }; +} + +fn stream(context: ?*anyopaque, w: *Writer, limit: Limit) Reader.StreamError!usize { + const l: *Limited = @alignCast(@ptrCast(context)); + const combined_limit = limit.min(l.remaining); + const n = try l.unlimited_reader.read(w, combined_limit); + l.remaining = l.remaining.subtract(n).?; + return n; +} + +fn discard(context: ?*anyopaque, limit: Limit) Reader.Error!usize { + const l: *Limited = @alignCast(@ptrCast(context)); + const combined_limit = limit.min(l.remaining); + const n = try l.unlimited_reader.discard(combined_limit); + l.remaining = l.remaining.subtract(n).?; + return n; +} diff --git a/lib/std/io/Writer.zig b/lib/std/io/Writer.zig index 26d4f88def..d79959dcb1 100644 --- a/lib/std/io/Writer.zig +++ b/lib/std/io/Writer.zig @@ -1,83 +1,2486 @@ +const builtin = @import("builtin"); +const native_endian = builtin.target.cpu.arch.endian(); + +const Writer = @This(); const std = @import("../std.zig"); const assert = std.debug.assert; -const mem = std.mem; -const native_endian = @import("builtin").target.cpu.arch.endian(); +const Limit = std.io.Limit; +const File = std.fs.File; +const testing = std.testing; +const Allocator = std.mem.Allocator; -context: *const anyopaque, -writeFn: *const fn (context: *const anyopaque, bytes: []const u8) anyerror!usize, +vtable: *const VTable, +/// If this has length zero, the writer is unbuffered, and `flush` is a no-op. +buffer: []u8, +/// In `buffer` before this are buffered bytes, after this is `undefined`. +end: usize = 0, -const Self = @This(); -pub const Error = anyerror; +pub const VTable = struct { + /// Sends bytes to the logical sink. A write will only be sent here if it + /// could not fit into `buffer`, or during a `flush` operation. + /// + /// `buffer[0..end]` is consumed first, followed by each slice of `data` in + /// order. Elements of `data` may alias each other but may not alias + /// `buffer`. + /// + /// This function modifies `Writer.end` and `Writer.buffer` in an + /// implementation-defined manner. + /// + /// `data.len` must be nonzero. + /// + /// The last element of `data` is repeated as necessary so that it is + /// written `splat` number of times, which may be zero. + /// + /// This function may not be called if the data to be written could have + /// been stored in `buffer` instead, including when the amount of data to + /// be written is zero and the buffer capacity is zero. + /// + /// Number of bytes consumed from `data` is returned, excluding bytes from + /// `buffer`. + /// + /// Number of bytes returned may be zero, which does not indicate stream + /// end. A subsequent call may return nonzero, or signal end of stream via + /// `error.WriteFailed`. + drain: *const fn (w: *Writer, data: []const []const u8, splat: usize) Error!usize, -pub fn write(self: Self, bytes: []const u8) anyerror!usize { - return self.writeFn(self.context, bytes); + /// Copies contents from an open file to the logical sink. `buffer[0..end]` + /// is consumed first, followed by `limit` bytes from `file_reader`. + /// + /// Number of bytes logically written is returned. This excludes bytes from + /// `buffer` because they have already been logically written. Number of + /// bytes consumed from `buffer` are tracked by modifying `end`. + /// + /// Number of bytes returned may be zero, which does not indicate stream + /// end. A subsequent call may return nonzero, or signal end of stream via + /// `error.WriteFailed`. Caller may check `file_reader` state + /// (`File.Reader.atEnd`) to disambiguate between a zero-length read or + /// write, and whether the file reached the end. + /// + /// `error.Unimplemented` indicates the callee cannot offer a more + /// efficient implementation than the caller performing its own reads. + sendFile: *const fn ( + w: *Writer, + file_reader: *File.Reader, + /// Maximum amount of bytes to read from the file. Implementations may + /// assume that the file size does not exceed this amount. Data from + /// `buffer` does not count towards this limit. + limit: Limit, + ) FileError!usize = unimplementedSendFile, + + /// Consumes all remaining buffer. + /// + /// The default flush implementation calls drain repeatedly until `end` is + /// zero, however it is legal for implementations to manage `end` + /// differently. For instance, `Allocating` flush is a no-op. + /// + /// There may be subsequent calls to `drain` and `sendFile` after a `flush` + /// operation. + flush: *const fn (w: *Writer) Error!void = defaultFlush, +}; + +pub const Error = error{ + /// See the `Writer` implementation for detailed diagnostics. + WriteFailed, +}; + +pub const FileAllError = error{ + /// Detailed diagnostics are found on the `File.Reader` struct. + ReadFailed, + /// See the `Writer` implementation for detailed diagnostics. + WriteFailed, +}; + +pub const FileReadingError = error{ + /// Detailed diagnostics are found on the `File.Reader` struct. + ReadFailed, + /// See the `Writer` implementation for detailed diagnostics. + WriteFailed, + /// Reached the end of the file being read. + EndOfStream, +}; + +pub const FileError = error{ + /// Detailed diagnostics are found on the `File.Reader` struct. + ReadFailed, + /// See the `Writer` implementation for detailed diagnostics. + WriteFailed, + /// Reached the end of the file being read. + EndOfStream, + /// Indicates the caller should do its own file reading; the callee cannot + /// offer a more efficient implementation. + Unimplemented, +}; + +/// Writes to `buffer` and returns `error.WriteFailed` when it is full. +pub fn fixed(buffer: []u8) Writer { + return .{ + .vtable = &.{ .drain = fixedDrain }, + .buffer = buffer, + }; } -pub fn writeAll(self: Self, bytes: []const u8) anyerror!void { - var index: usize = 0; - while (index != bytes.len) { - index += try self.write(bytes[index..]); +pub fn hashed(w: *Writer, hasher: anytype, buffer: []u8) Hashed(@TypeOf(hasher)) { + return .initHasher(w, hasher, buffer); +} + +pub const failing: Writer = .{ + .vtable = &.{ + .drain = failingDrain, + .sendFile = failingSendFile, + }, +}; + +/// Returns the contents not yet drained. +pub fn buffered(w: *const Writer) []u8 { + return w.buffer[0..w.end]; +} + +pub fn countSplat(data: []const []const u8, splat: usize) usize { + var total: usize = 0; + for (data[0 .. data.len - 1]) |buf| total += buf.len; + total += data[data.len - 1].len * splat; + return total; +} + +pub fn countSendFileLowerBound(n: usize, file_reader: *File.Reader, limit: Limit) ?usize { + const total: u64 = @min(@intFromEnum(limit), file_reader.getSize() catch return null); + return std.math.lossyCast(usize, total + n); +} + +/// If the total number of bytes of `data` fits inside `unusedCapacitySlice`, +/// this function is guaranteed to not fail, not call into `VTable`, and return +/// the total bytes inside `data`. +pub fn writeVec(w: *Writer, data: []const []const u8) Error!usize { + return writeSplat(w, data, 1); +} + +/// If the number of bytes to write based on `data` and `splat` fits inside +/// `unusedCapacitySlice`, this function is guaranteed to not fail, not call +/// into `VTable`, and return the full number of bytes. +pub fn writeSplat(w: *Writer, data: []const []const u8, splat: usize) Error!usize { + assert(data.len > 0); + const buffer = w.buffer; + const count = countSplat(data, splat); + if (w.end + count > buffer.len) return w.vtable.drain(w, data, splat); + for (data[0 .. data.len - 1]) |bytes| { + @memcpy(buffer[w.end..][0..bytes.len], bytes); + w.end += bytes.len; + } + const pattern = data[data.len - 1]; + switch (pattern.len) { + 0 => {}, + 1 => { + @memset(buffer[w.end..][0..splat], pattern[0]); + w.end += splat; + }, + else => for (0..splat) |_| { + @memcpy(buffer[w.end..][0..pattern.len], pattern); + w.end += pattern.len; + }, + } + return count; +} + +/// Returns how many bytes were consumed from `header` and `data`. +pub fn writeSplatHeader( + w: *Writer, + header: []const u8, + data: []const []const u8, + splat: usize, +) Error!usize { + const new_end = w.end + header.len; + if (new_end <= w.buffer.len) { + @memcpy(w.buffer[w.end..][0..header.len], header); + w.end = new_end; + return header.len + try writeSplat(w, data, splat); + } + var vecs: [8][]const u8 = undefined; // Arbitrarily chosen size. + var i: usize = 1; + vecs[0] = header; + for (data[0 .. data.len - 1]) |buf| { + if (buf.len == 0) continue; + vecs[i] = buf; + i += 1; + if (vecs.len - i == 0) break; + } + const pattern = data[data.len - 1]; + const new_splat = s: { + if (pattern.len == 0 or vecs.len - i == 0) break :s 1; + vecs[i] = pattern; + i += 1; + break :s splat; + }; + return w.vtable.drain(w, vecs[0..i], new_splat); +} + +test "writeSplatHeader splatting avoids buffer aliasing temptation" { + const initial_buf = try testing.allocator.alloc(u8, 8); + var aw: std.io.Writer.Allocating = .initOwnedSlice(testing.allocator, initial_buf); + defer aw.deinit(); + // This test assumes 8 vector buffer in this function. + const n = try aw.writer.writeSplatHeader("header which is longer than buf ", &.{ + "1", "2", "3", "4", "5", "6", "foo", "bar", "foo", + }, 3); + try testing.expectEqual(41, n); + try testing.expectEqualStrings( + "header which is longer than buf 123456foo", + aw.writer.buffered(), + ); +} + +/// Drains all remaining buffered data. +pub fn flush(w: *Writer) Error!void { + return w.vtable.flush(w); +} + +/// Repeatedly calls `VTable.drain` until `end` is zero. +pub fn defaultFlush(w: *Writer) Error!void { + const drainFn = w.vtable.drain; + while (w.end != 0) _ = try drainFn(w, &.{""}, 1); +} + +/// Does nothing. +pub fn noopFlush(w: *Writer) Error!void { + _ = w; +} + +/// Calls `VTable.drain` but hides the last `preserve_length` bytes from the +/// implementation, keeping them buffered. +pub fn drainPreserve(w: *Writer, preserve_length: usize) Error!void { + const temp_end = w.end -| preserve_length; + const preserved = w.buffer[temp_end..w.end]; + w.end = temp_end; + defer w.end += preserved.len; + assert(0 == try w.vtable.drain(w, &.{""}, 1)); + assert(w.end <= temp_end + preserved.len); + @memmove(w.buffer[w.end..][0..preserved.len], preserved); +} + +pub fn unusedCapacitySlice(w: *const Writer) []u8 { + return w.buffer[w.end..]; +} + +pub fn unusedCapacityLen(w: *const Writer) usize { + return w.buffer.len - w.end; +} + +/// Asserts the provided buffer has total capacity enough for `len`. +/// +/// Advances the buffer end position by `len`. +pub fn writableArray(w: *Writer, comptime len: usize) Error!*[len]u8 { + const big_slice = try w.writableSliceGreedy(len); + advance(w, len); + return big_slice[0..len]; +} + +/// Asserts the provided buffer has total capacity enough for `len`. +/// +/// Advances the buffer end position by `len`. +pub fn writableSlice(w: *Writer, len: usize) Error![]u8 { + const big_slice = try w.writableSliceGreedy(len); + advance(w, len); + return big_slice[0..len]; +} + +/// Asserts the provided buffer has total capacity enough for `minimum_length`. +/// +/// Does not `advance` the buffer end position. +/// +/// If `minimum_length` is zero, this is equivalent to `unusedCapacitySlice`. +pub fn writableSliceGreedy(w: *Writer, minimum_length: usize) Error![]u8 { + assert(w.buffer.len >= minimum_length); + while (w.buffer.len - w.end < minimum_length) { + assert(0 == try w.vtable.drain(w, &.{""}, 1)); + } else { + @branchHint(.likely); + return w.buffer[w.end..]; } } -pub fn print(self: Self, comptime format: []const u8, args: anytype) anyerror!void { - return std.fmt.format(self, format, args); -} - -pub fn writeByte(self: Self, byte: u8) anyerror!void { - const array = [1]u8{byte}; - return self.writeAll(&array); -} - -pub fn writeByteNTimes(self: Self, byte: u8, n: usize) anyerror!void { - var bytes: [256]u8 = undefined; - @memset(bytes[0..], byte); - - var remaining: usize = n; - while (remaining > 0) { - const to_write = @min(remaining, bytes.len); - try self.writeAll(bytes[0..to_write]); - remaining -= to_write; +/// Asserts the provided buffer has total capacity enough for `minimum_length` +/// and `preserve_length` combined. +/// +/// Does not `advance` the buffer end position. +/// +/// When draining the buffer, ensures that at least `preserve_length` bytes +/// remain buffered. +/// +/// If `preserve_length` is zero, this is equivalent to `writableSliceGreedy`. +pub fn writableSliceGreedyPreserve(w: *Writer, preserve_length: usize, minimum_length: usize) Error![]u8 { + assert(w.buffer.len >= preserve_length + minimum_length); + while (w.buffer.len - w.end < minimum_length) { + try drainPreserve(w, preserve_length); + } else { + @branchHint(.likely); + return w.buffer[w.end..]; } } -pub fn writeBytesNTimes(self: Self, bytes: []const u8, n: usize) anyerror!void { +pub const WritableVectorIterator = struct { + first: []u8, + middle: []const []u8 = &.{}, + last: []u8 = &.{}, + index: usize = 0, + + pub fn next(it: *WritableVectorIterator) ?[]u8 { + while (true) { + const i = it.index; + it.index += 1; + if (i == 0) { + if (it.first.len == 0) continue; + return it.first; + } + const middle_index = i - 1; + if (middle_index < it.middle.len) { + const middle = it.middle[middle_index]; + if (middle.len == 0) continue; + return middle; + } + if (middle_index == it.middle.len) { + if (it.last.len == 0) continue; + return it.last; + } + return null; + } + } +}; + +pub const VectorWrapper = struct { + writer: Writer, + it: WritableVectorIterator, + /// Tracks whether the "writable vector" API was used. + used: bool = false, + pub const vtable: *const VTable = &unique_vtable_allocation; + /// This is intended to be constant but it must be a unique address for + /// `@fieldParentPtr` to work. + var unique_vtable_allocation: VTable = .{ .drain = fixedDrain }; +}; + +pub fn writableVectorIterator(w: *Writer) Error!WritableVectorIterator { + if (w.vtable == VectorWrapper.vtable) { + const wrapper: *VectorWrapper = @fieldParentPtr("writer", w); + wrapper.used = true; + return wrapper.it; + } + return .{ .first = try writableSliceGreedy(w, 1) }; +} + +pub fn writableVectorPosix(w: *Writer, buffer: []std.posix.iovec, limit: Limit) Error![]std.posix.iovec { + var it = try writableVectorIterator(w); var i: usize = 0; - while (i < n) : (i += 1) { - try self.writeAll(bytes); + var remaining = limit; + while (it.next()) |full_buffer| { + if (!remaining.nonzero()) break; + if (buffer.len - i == 0) break; + const buf = remaining.slice(full_buffer); + if (buf.len == 0) continue; + buffer[i] = .{ .base = buf.ptr, .len = buf.len }; + i += 1; + remaining = remaining.subtract(buf.len).?; + } + return buffer[0..i]; +} + +pub fn ensureUnusedCapacity(w: *Writer, n: usize) Error!void { + _ = try writableSliceGreedy(w, n); +} + +pub fn undo(w: *Writer, n: usize) void { + w.end -= n; +} + +/// After calling `writableSliceGreedy`, this function tracks how many bytes +/// were written to it. +/// +/// This is not needed when using `writableSlice` or `writableArray`. +pub fn advance(w: *Writer, n: usize) void { + const new_end = w.end + n; + assert(new_end <= w.buffer.len); + w.end = new_end; +} + +/// After calling `writableVector`, this function tracks how many bytes were +/// written to it. +pub fn advanceVector(w: *Writer, n: usize) usize { + return consume(w, n); +} + +/// The `data` parameter is mutable because this function needs to mutate the +/// fields in order to handle partial writes from `VTable.writeSplat`. +pub fn writeVecAll(w: *Writer, data: [][]const u8) Error!void { + var index: usize = 0; + var truncate: usize = 0; + while (index < data.len) { + { + const untruncated = data[index]; + data[index] = untruncated[truncate..]; + defer data[index] = untruncated; + truncate += try w.writeVec(data[index..]); + } + while (index < data.len and truncate >= data[index].len) { + truncate -= data[index].len; + index += 1; + } } } -pub inline fn writeInt(self: Self, comptime T: type, value: T, endian: std.builtin.Endian) anyerror!void { - var bytes: [@divExact(@typeInfo(T).int.bits, 8)]u8 = undefined; - mem.writeInt(std.math.ByteAlignedInt(@TypeOf(value)), &bytes, value, endian); - return self.writeAll(&bytes); +/// The `data` parameter is mutable because this function needs to mutate the +/// fields in order to handle partial writes from `VTable.writeSplat`. +pub fn writeSplatAll(w: *Writer, data: [][]const u8, splat: usize) Error!void { + var index: usize = 0; + var truncate: usize = 0; + var remaining_splat = splat; + while (index + 1 < data.len) { + { + const untruncated = data[index]; + data[index] = untruncated[truncate..]; + defer data[index] = untruncated; + truncate += try w.writeSplat(data[index..], remaining_splat); + } + while (truncate >= data[index].len) { + if (index + 1 < data.len) { + truncate -= data[index].len; + index += 1; + } else { + const last = data[data.len - 1]; + remaining_splat -= @divExact(truncate, last.len); + while (remaining_splat > 0) { + const n = try w.writeSplat(data[data.len - 1 ..][0..1], remaining_splat); + remaining_splat -= @divExact(n, last.len); + } + return; + } + } + } } -pub fn writeStruct(self: Self, value: anytype) anyerror!void { +pub fn write(w: *Writer, bytes: []const u8) Error!usize { + if (w.end + bytes.len <= w.buffer.len) { + @branchHint(.likely); + @memcpy(w.buffer[w.end..][0..bytes.len], bytes); + w.end += bytes.len; + return bytes.len; + } + return w.vtable.drain(w, &.{bytes}, 1); +} + +/// Asserts `buffer` capacity exceeds `preserve_length`. +pub fn writePreserve(w: *Writer, preserve_length: usize, bytes: []const u8) Error!usize { + assert(preserve_length <= w.buffer.len); + if (w.end + bytes.len <= w.buffer.len) { + @branchHint(.likely); + @memcpy(w.buffer[w.end..][0..bytes.len], bytes); + w.end += bytes.len; + return bytes.len; + } + const temp_end = w.end -| preserve_length; + const preserved = w.buffer[temp_end..w.end]; + w.end = temp_end; + defer w.end += preserved.len; + const n = try w.vtable.drain(w, &.{bytes}, 1); + assert(w.end <= temp_end + preserved.len); + @memmove(w.buffer[w.end..][0..preserved.len], preserved); + return n; +} + +/// Calls `drain` as many times as necessary such that all of `bytes` are +/// transferred. +pub fn writeAll(w: *Writer, bytes: []const u8) Error!void { + var index: usize = 0; + while (index < bytes.len) index += try w.write(bytes[index..]); +} + +/// Calls `drain` as many times as necessary such that all of `bytes` are +/// transferred. +/// +/// When draining the buffer, ensures that at least `preserve_length` bytes +/// remain buffered. +/// +/// Asserts `buffer` capacity exceeds `preserve_length`. +pub fn writeAllPreserve(w: *Writer, preserve_length: usize, bytes: []const u8) Error!void { + var index: usize = 0; + while (index < bytes.len) index += try w.writePreserve(preserve_length, bytes[index..]); +} + +/// Renders fmt string with args, calling `writer` with slices of bytes. +/// If `writer` returns an error, the error is returned from `format` and +/// `writer` is not called again. +/// +/// The format string must be comptime-known and may contain placeholders following +/// this format: +/// `{[argument][specifier]:[fill][alignment][width].[precision]}` +/// +/// Above, each word including its surrounding [ and ] is a parameter which you have to replace with something: +/// +/// - *argument* is either the numeric index or the field name of the argument that should be inserted +/// - when using a field name, you are required to enclose the field name (an identifier) in square +/// brackets, e.g. {[score]...} as opposed to the numeric index form which can be written e.g. {2...} +/// - *specifier* is a type-dependent formatting option that determines how a type should formatted (see below) +/// - *fill* is a single byte which is used to pad formatted numbers. +/// - *alignment* is one of the three bytes '<', '^', or '>' to make numbers +/// left, center, or right-aligned, respectively. +/// - Not all specifiers support alignment. +/// - Alignment is not Unicode-aware; appropriate only when used with raw bytes or ASCII. +/// - *width* is the total width of the field in bytes. This only applies to number formatting. +/// - *precision* specifies how many decimals a formatted number should have. +/// +/// Note that most of the parameters are optional and may be omitted. Also you +/// can leave out separators like `:` and `.` when all parameters after the +/// separator are omitted. +/// +/// Only exception is the *fill* parameter. If a non-zero *fill* character is +/// required at the same time as *width* is specified, one has to specify +/// *alignment* as well, as otherwise the digit following `:` is interpreted as +/// *width*, not *fill*. +/// +/// The *specifier* has several options for types: +/// - `x` and `X`: output numeric value in hexadecimal notation, or string in hexadecimal bytes +/// - `s`: +/// - for pointer-to-many and C pointers of u8, print as a C-string using zero-termination +/// - for slices of u8, print the entire slice as a string without zero-termination +/// - `t`: +/// - for enums and tagged unions: prints the tag name +/// - for error sets: prints the error name +/// - `b64`: output string as standard base64 +/// - `e`: output floating point value in scientific notation +/// - `d`: output numeric value in decimal notation +/// - `b`: output integer value in binary notation +/// - `o`: output integer value in octal notation +/// - `c`: output integer as an ASCII character. Integer type must have 8 bits at max. +/// - `u`: output integer as an UTF-8 sequence. Integer type must have 21 bits at max. +/// - `D`: output nanoseconds as duration +/// - `B`: output bytes in SI units (decimal) +/// - `Bi`: output bytes in IEC units (binary) +/// - `?`: output optional value as either the unwrapped value, or `null`; may be followed by a format specifier for the underlying value. +/// - `!`: output error union value as either the unwrapped value, or the formatted error value; may be followed by a format specifier for the underlying value. +/// - `*`: output the address of the value instead of the value itself. +/// - `any`: output a value of any type using its default format. +/// - `f`: delegates to a method on the type named "format" with the signature `fn (*Writer, args: anytype) Writer.Error!void`. +/// +/// A user type may be a `struct`, `vector`, `union` or `enum` type. +/// +/// To print literal curly braces, escape them by writing them twice, e.g. `{{` or `}}`. +pub fn print(w: *Writer, comptime fmt: []const u8, args: anytype) Error!void { + const ArgsType = @TypeOf(args); + const args_type_info = @typeInfo(ArgsType); + if (args_type_info != .@"struct") { + @compileError("expected tuple or struct argument, found " ++ @typeName(ArgsType)); + } + + const fields_info = args_type_info.@"struct".fields; + const max_format_args = @typeInfo(std.fmt.ArgSetType).int.bits; + if (fields_info.len > max_format_args) { + @compileError("32 arguments max are supported per format call"); + } + + @setEvalBranchQuota(fmt.len * 1000); + comptime var arg_state: std.fmt.ArgState = .{ .args_len = fields_info.len }; + comptime var i = 0; + comptime var literal: []const u8 = ""; + inline while (true) { + const start_index = i; + + inline while (i < fmt.len) : (i += 1) { + switch (fmt[i]) { + '{', '}' => break, + else => {}, + } + } + + comptime var end_index = i; + comptime var unescape_brace = false; + + // Handle {{ and }}, those are un-escaped as single braces + if (i + 1 < fmt.len and fmt[i + 1] == fmt[i]) { + unescape_brace = true; + // Make the first brace part of the literal... + end_index += 1; + // ...and skip both + i += 2; + } + + literal = literal ++ fmt[start_index..end_index]; + + // We've already skipped the other brace, restart the loop + if (unescape_brace) continue; + + // Write out the literal + if (literal.len != 0) { + try w.writeAll(literal); + literal = ""; + } + + if (i >= fmt.len) break; + + if (fmt[i] == '}') { + @compileError("missing opening {"); + } + + // Get past the { + comptime assert(fmt[i] == '{'); + i += 1; + + const fmt_begin = i; + // Find the closing brace + inline while (i < fmt.len and fmt[i] != '}') : (i += 1) {} + const fmt_end = i; + + if (i >= fmt.len) { + @compileError("missing closing }"); + } + + // Get past the } + comptime assert(fmt[i] == '}'); + i += 1; + + const placeholder_array = fmt[fmt_begin..fmt_end].*; + const placeholder = comptime std.fmt.Placeholder.parse(&placeholder_array); + const arg_pos = comptime switch (placeholder.arg) { + .none => null, + .number => |pos| pos, + .named => |arg_name| std.meta.fieldIndex(ArgsType, arg_name) orelse + @compileError("no argument with name '" ++ arg_name ++ "'"), + }; + + const width = switch (placeholder.width) { + .none => null, + .number => |v| v, + .named => |arg_name| blk: { + const arg_i = comptime std.meta.fieldIndex(ArgsType, arg_name) orelse + @compileError("no argument with name '" ++ arg_name ++ "'"); + _ = comptime arg_state.nextArg(arg_i) orelse @compileError("too few arguments"); + break :blk @field(args, arg_name); + }, + }; + + const precision = switch (placeholder.precision) { + .none => null, + .number => |v| v, + .named => |arg_name| blk: { + const arg_i = comptime std.meta.fieldIndex(ArgsType, arg_name) orelse + @compileError("no argument with name '" ++ arg_name ++ "'"); + _ = comptime arg_state.nextArg(arg_i) orelse @compileError("too few arguments"); + break :blk @field(args, arg_name); + }, + }; + + const arg_to_print = comptime arg_state.nextArg(arg_pos) orelse + @compileError("too few arguments"); + + try w.printValue( + placeholder.specifier_arg, + .{ + .fill = placeholder.fill, + .alignment = placeholder.alignment, + .width = width, + .precision = precision, + }, + @field(args, fields_info[arg_to_print].name), + std.options.fmt_max_depth, + ); + } + + if (comptime arg_state.hasUnusedArgs()) { + const missing_count = arg_state.args_len - @popCount(arg_state.used_args); + switch (missing_count) { + 0 => unreachable, + 1 => @compileError("unused argument in '" ++ fmt ++ "'"), + else => @compileError(std.fmt.comptimePrint("{d}", .{missing_count}) ++ " unused arguments in '" ++ fmt ++ "'"), + } + } +} + +/// Calls `drain` as many times as necessary such that `byte` is transferred. +pub fn writeByte(w: *Writer, byte: u8) Error!void { + while (w.buffer.len - w.end == 0) { + const n = try w.vtable.drain(w, &.{&.{byte}}, 1); + if (n > 0) return; + } else { + @branchHint(.likely); + w.buffer[w.end] = byte; + w.end += 1; + } +} + +/// When draining the buffer, ensures that at least `preserve_length` bytes +/// remain buffered. +pub fn writeBytePreserve(w: *Writer, preserve_length: usize, byte: u8) Error!void { + while (w.buffer.len - w.end == 0) { + try drainPreserve(w, preserve_length); + } else { + @branchHint(.likely); + w.buffer[w.end] = byte; + w.end += 1; + } +} + +/// Writes the same byte many times, performing the underlying write call as +/// many times as necessary. +pub fn splatByteAll(w: *Writer, byte: u8, n: usize) Error!void { + var remaining: usize = n; + while (remaining > 0) remaining -= try w.splatByte(byte, remaining); +} + +/// Writes the same byte many times, allowing short writes. +/// +/// Does maximum of one underlying `VTable.drain`. +pub fn splatByte(w: *Writer, byte: u8, n: usize) Error!usize { + return writeSplat(w, &.{&.{byte}}, n); +} + +/// Writes the same slice many times, performing the underlying write call as +/// many times as necessary. +pub fn splatBytesAll(w: *Writer, bytes: []const u8, splat: usize) Error!void { + var remaining_bytes: usize = bytes.len * splat; + remaining_bytes -= try w.splatBytes(bytes, splat); + while (remaining_bytes > 0) { + const leftover = remaining_bytes % bytes.len; + const buffers: [2][]const u8 = .{ bytes[bytes.len - leftover ..], bytes }; + remaining_bytes -= try w.splatBytes(&buffers, splat); + } +} + +/// Writes the same slice many times, allowing short writes. +/// +/// Does maximum of one underlying `VTable.writeSplat`. +pub fn splatBytes(w: *Writer, bytes: []const u8, n: usize) Error!usize { + return writeSplat(w, &.{bytes}, n); +} + +/// Asserts the `buffer` was initialized with a capacity of at least `@sizeOf(T)` bytes. +pub inline fn writeInt(w: *Writer, comptime T: type, value: T, endian: std.builtin.Endian) Error!void { + var bytes: [@divExact(@typeInfo(T).int.bits, 8)]u8 = undefined; + std.mem.writeInt(std.math.ByteAlignedInt(@TypeOf(value)), &bytes, value, endian); + return w.writeAll(&bytes); +} + +pub fn writeStruct(w: *Writer, value: anytype) Error!void { // Only extern and packed structs have defined in-memory layout. comptime assert(@typeInfo(@TypeOf(value)).@"struct".layout != .auto); - return self.writeAll(mem.asBytes(&value)); + return w.writeAll(std.mem.asBytes(&value)); } -pub fn writeStructEndian(self: Self, value: anytype, endian: std.builtin.Endian) anyerror!void { - // TODO: make sure this value is not a reference type +/// The function is inline to avoid the dead code in case `endian` is +/// comptime-known and matches host endianness. +/// TODO: make sure this value is not a reference type +pub inline fn writeStructEndian(w: *Writer, value: anytype, endian: std.builtin.Endian) Error!void { + switch (@typeInfo(@TypeOf(value))) { + .@"struct" => |info| switch (info.layout) { + .auto => @compileError("ill-defined memory layout"), + .@"extern" => { + if (native_endian == endian) { + return w.writeStruct(value); + } else { + var copy = value; + std.mem.byteSwapAllFields(@TypeOf(value), ©); + return w.writeStruct(copy); + } + }, + .@"packed" => { + return writeInt(w, info.backing_integer.?, @bitCast(value), endian); + }, + }, + else => @compileError("not a struct"), + } +} + +pub inline fn writeSliceEndian( + w: *Writer, + Elem: type, + slice: []const Elem, + endian: std.builtin.Endian, +) Error!void { if (native_endian == endian) { - return self.writeStruct(value); + return writeAll(w, @ptrCast(slice)); } else { - var copy = value; - mem.byteSwapAllFields(@TypeOf(value), ©); - return self.writeStruct(copy); + return w.writeArraySwap(w, Elem, slice); } } -pub fn writeFile(self: Self, file: std.fs.File) anyerror!void { - // TODO: figure out how to adjust std lib abstractions so that this ends up - // doing sendfile or maybe even copy_file_range under the right conditions. - var buf: [4000]u8 = undefined; - while (true) { - const n = try file.readAll(&buf); - try self.writeAll(buf[0..n]); - if (n < buf.len) return; +/// Unlike `writeSplat` and `writeVec`, this function will call into `VTable` +/// even if there is enough buffer capacity for the file contents. +/// +/// Although it would be possible to eliminate `error.Unimplemented` from the +/// error set by reading directly into the buffer in such case, this is not +/// done because it is more efficient to do it higher up the call stack so that +/// the error does not occur with each write. +/// +/// See `sendFileReading` for an alternative that does not have +/// `error.Unimplemented` in the error set. +pub fn sendFile(w: *Writer, file_reader: *File.Reader, limit: Limit) FileError!usize { + return w.vtable.sendFile(w, file_reader, limit); +} + +/// Returns how many bytes from `header` and `file_reader` were consumed. +pub fn sendFileHeader( + w: *Writer, + header: []const u8, + file_reader: *File.Reader, + limit: Limit, +) FileError!usize { + const new_end = w.end + header.len; + if (new_end <= w.buffer.len) { + @memcpy(w.buffer[w.end..][0..header.len], header); + w.end = new_end; + return header.len + try w.vtable.sendFile(w, file_reader, limit); + } + const buffered_contents = limit.slice(file_reader.interface.buffered()); + const n = try w.vtable.drain(w, &.{ header, buffered_contents }, 1); + file_reader.interface.toss(n - header.len); + return n; +} + +/// Asserts nonzero buffer capacity. +pub fn sendFileReading(w: *Writer, file_reader: *File.Reader, limit: Limit) FileReadingError!usize { + const dest = limit.slice(try w.writableSliceGreedy(1)); + const n = try file_reader.read(dest); + w.advance(n); + return n; +} + +/// Number of bytes logically written is returned. This excludes bytes from +/// `buffer` because they have already been logically written. +pub fn sendFileAll(w: *Writer, file_reader: *File.Reader, limit: Limit) FileAllError!usize { + var remaining = @intFromEnum(limit); + while (remaining > 0) { + const n = sendFile(w, file_reader, .limited(remaining)) catch |err| switch (err) { + error.EndOfStream => break, + error.Unimplemented => { + file_reader.mode = file_reader.mode.toReading(); + remaining -= try w.sendFileReadingAll(file_reader, .limited(remaining)); + break; + }, + else => |e| return e, + }; + remaining -= n; + } + return @intFromEnum(limit) - remaining; +} + +/// Equivalent to `sendFileAll` but uses direct `pread` and `read` calls on +/// `file` rather than `sendFile`. This is generally used as a fallback when +/// the underlying implementation returns `error.Unimplemented`, which is why +/// that error code does not appear in this function's error set. +/// +/// Asserts nonzero buffer capacity. +pub fn sendFileReadingAll(w: *Writer, file_reader: *File.Reader, limit: Limit) FileAllError!usize { + var remaining = @intFromEnum(limit); + while (remaining > 0) { + remaining -= sendFileReading(w, file_reader, .limited(remaining)) catch |err| switch (err) { + error.EndOfStream => break, + else => |e| return e, + }; + } + return @intFromEnum(limit) - remaining; +} + +pub fn alignBuffer( + w: *Writer, + buffer: []const u8, + width: usize, + alignment: std.fmt.Alignment, + fill: u8, +) Error!void { + const padding = if (buffer.len < width) width - buffer.len else 0; + if (padding == 0) { + @branchHint(.likely); + return w.writeAll(buffer); + } + switch (alignment) { + .left => { + try w.writeAll(buffer); + try w.splatByteAll(fill, padding); + }, + .center => { + const left_padding = padding / 2; + const right_padding = (padding + 1) / 2; + try w.splatByteAll(fill, left_padding); + try w.writeAll(buffer); + try w.splatByteAll(fill, right_padding); + }, + .right => { + try w.splatByteAll(fill, padding); + try w.writeAll(buffer); + }, } } + +pub fn alignBufferOptions(w: *Writer, buffer: []const u8, options: std.fmt.Options) Error!void { + return w.alignBuffer(buffer, options.width orelse buffer.len, options.alignment, options.fill); +} + +pub fn printAddress(w: *Writer, value: anytype) Error!void { + const T = @TypeOf(value); + switch (@typeInfo(T)) { + .pointer => |info| { + try w.writeAll(@typeName(info.child) ++ "@"); + const int = if (info.size == .slice) @intFromPtr(value.ptr) else @intFromPtr(value); + return w.printInt(int, 16, .lower, .{}); + }, + .optional => |info| { + if (@typeInfo(info.child) == .pointer) { + try w.writeAll(@typeName(info.child) ++ "@"); + try w.printInt(@intFromPtr(value), 16, .lower, .{}); + return; + } + }, + else => {}, + } + + @compileError("cannot format non-pointer type " ++ @typeName(T) ++ " with * specifier"); +} + +pub fn printValue( + w: *Writer, + comptime fmt: []const u8, + options: std.fmt.Options, + value: anytype, + max_depth: usize, +) Error!void { + const T = @TypeOf(value); + + switch (fmt.len) { + 1 => switch (fmt[0]) { + '*' => return w.printAddress(value), + 'f' => return value.format(w), + 'd' => switch (@typeInfo(T)) { + .float, .comptime_float => return printFloat(w, value, options.toNumber(.decimal, .lower)), + .int, .comptime_int => return printInt(w, value, 10, .lower, options), + .@"struct" => return value.formatNumber(w, options.toNumber(.decimal, .lower)), + .@"enum" => return printInt(w, @intFromEnum(value), 10, .lower, options), + .vector => return printVector(w, fmt, options, value, max_depth), + else => invalidFmtError(fmt, value), + }, + 'c' => return w.printAsciiChar(value, options), + 'u' => return w.printUnicodeCodepoint(value), + 'b' => switch (@typeInfo(T)) { + .int, .comptime_int => return printInt(w, value, 2, .lower, options), + .@"enum" => return printInt(w, @intFromEnum(value), 2, .lower, options), + .@"struct" => return value.formatNumber(w, options.toNumber(.binary, .lower)), + .vector => return printVector(w, fmt, options, value, max_depth), + else => invalidFmtError(fmt, value), + }, + 'o' => switch (@typeInfo(T)) { + .int, .comptime_int => return printInt(w, value, 8, .lower, options), + .@"enum" => return printInt(w, @intFromEnum(value), 8, .lower, options), + .@"struct" => return value.formatNumber(w, options.toNumber(.octal, .lower)), + .vector => return printVector(w, fmt, options, value, max_depth), + else => invalidFmtError(fmt, value), + }, + 'x' => switch (@typeInfo(T)) { + .float, .comptime_float => return printFloatHexOptions(w, value, options.toNumber(.hex, .lower)), + .int, .comptime_int => return printInt(w, value, 16, .lower, options), + .@"enum" => return printInt(w, @intFromEnum(value), 16, .lower, options), + .@"struct" => return value.formatNumber(w, options.toNumber(.hex, .lower)), + .pointer => |info| switch (info.size) { + .one, .slice => { + const slice: []const u8 = value; + optionsForbidden(options); + return printHex(w, slice, .lower); + }, + .many, .c => { + const slice: [:0]const u8 = std.mem.span(value); + optionsForbidden(options); + return printHex(w, slice, .lower); + }, + }, + .array => { + const slice: []const u8 = &value; + optionsForbidden(options); + return printHex(w, slice, .lower); + }, + .vector => return printVector(w, fmt, options, value, max_depth), + else => invalidFmtError(fmt, value), + }, + 'X' => switch (@typeInfo(T)) { + .float, .comptime_float => return printFloatHexOptions(w, value, options.toNumber(.hex, .lower)), + .int, .comptime_int => return printInt(w, value, 16, .upper, options), + .@"enum" => return printInt(w, @intFromEnum(value), 16, .upper, options), + .@"struct" => return value.formatNumber(w, options.toNumber(.hex, .upper)), + .pointer => |info| switch (info.size) { + .one, .slice => { + const slice: []const u8 = value; + optionsForbidden(options); + return printHex(w, slice, .upper); + }, + .many, .c => { + const slice: [:0]const u8 = std.mem.span(value); + optionsForbidden(options); + return printHex(w, slice, .upper); + }, + }, + .array => { + const slice: []const u8 = &value; + optionsForbidden(options); + return printHex(w, slice, .upper); + }, + .vector => return printVector(w, fmt, options, value, max_depth), + else => invalidFmtError(fmt, value), + }, + 's' => switch (@typeInfo(T)) { + .pointer => |info| switch (info.size) { + .one, .slice => { + const slice: []const u8 = value; + return w.alignBufferOptions(slice, options); + }, + .many, .c => { + const slice: [:0]const u8 = std.mem.span(value); + return w.alignBufferOptions(slice, options); + }, + }, + .array => { + const slice: []const u8 = &value; + return w.alignBufferOptions(slice, options); + }, + else => invalidFmtError(fmt, value), + }, + 'B' => switch (@typeInfo(T)) { + .int, .comptime_int => return w.printByteSize(value, .decimal, options), + .@"struct" => return value.formatByteSize(w, .decimal), + else => invalidFmtError(fmt, value), + }, + 'D' => switch (@typeInfo(T)) { + .int, .comptime_int => return w.printDuration(value, options), + .@"struct" => return value.formatDuration(w), + else => invalidFmtError(fmt, value), + }, + 'e' => switch (@typeInfo(T)) { + .float, .comptime_float => return printFloat(w, value, options.toNumber(.scientific, .lower)), + .@"struct" => return value.formatNumber(w, options.toNumber(.scientific, .lower)), + else => invalidFmtError(fmt, value), + }, + 'E' => switch (@typeInfo(T)) { + .float, .comptime_float => return printFloat(w, value, options.toNumber(.scientific, .upper)), + .@"struct" => return value.formatNumber(w, options.toNumber(.scientific, .upper)), + else => invalidFmtError(fmt, value), + }, + 't' => switch (@typeInfo(T)) { + .error_set => return w.writeAll(@errorName(value)), + .@"enum", .@"union" => return w.writeAll(@tagName(value)), + else => invalidFmtError(fmt, value), + }, + else => {}, + }, + 2 => switch (fmt[0]) { + 'B' => switch (fmt[1]) { + 'i' => switch (@typeInfo(T)) { + .int, .comptime_int => return w.printByteSize(value, .binary, options), + .@"struct" => return value.formatByteSize(w, .binary), + else => invalidFmtError(fmt, value), + }, + else => {}, + }, + else => {}, + }, + 3 => if (fmt[0] == 'b' and fmt[1] == '6' and fmt[2] == '4') switch (@typeInfo(T)) { + .pointer => |info| switch (info.size) { + .one, .slice => { + const slice: []const u8 = value; + optionsForbidden(options); + return w.printBase64(slice); + }, + .many, .c => { + const slice: [:0]const u8 = std.mem.span(value); + optionsForbidden(options); + return w.printBase64(slice); + }, + }, + .array => { + const slice: []const u8 = &value; + optionsForbidden(options); + return w.printBase64(slice); + }, + else => invalidFmtError(fmt, value), + }, + else => {}, + } + + const is_any = comptime std.mem.eql(u8, fmt, ANY); + if (!is_any and std.meta.hasMethod(T, "format") and fmt.len == 0) { + // after 0.15.0 is tagged, delete this compile error and its condition + @compileError("ambiguous format string; specify {f} to call format method, or {any} to skip it"); + } + + switch (@typeInfo(T)) { + .float, .comptime_float => { + if (!is_any and fmt.len != 0) invalidFmtError(fmt, value); + return printFloat(w, value, options.toNumber(.decimal, .lower)); + }, + .int, .comptime_int => { + if (!is_any and fmt.len != 0) invalidFmtError(fmt, value); + return printInt(w, value, 10, .lower, options); + }, + .bool => { + if (!is_any and fmt.len != 0) invalidFmtError(fmt, value); + const string: []const u8 = if (value) "true" else "false"; + return w.alignBufferOptions(string, options); + }, + .void => { + if (!is_any and fmt.len != 0) invalidFmtError(fmt, value); + return w.alignBufferOptions("void", options); + }, + .optional => { + const remaining_fmt = comptime if (fmt.len > 0 and fmt[0] == '?') + stripOptionalOrErrorUnionSpec(fmt) + else if (is_any) + ANY + else + @compileError("cannot print optional without a specifier (i.e. {?} or {any})"); + if (value) |payload| { + return w.printValue(remaining_fmt, options, payload, max_depth); + } else { + return w.alignBufferOptions("null", options); + } + }, + .error_union => { + const remaining_fmt = comptime if (fmt.len > 0 and fmt[0] == '!') + stripOptionalOrErrorUnionSpec(fmt) + else if (is_any) + ANY + else + @compileError("cannot print error union without a specifier (i.e. {!} or {any})"); + if (value) |payload| { + return w.printValue(remaining_fmt, options, payload, max_depth); + } else |err| { + return w.printValue("", options, err, max_depth); + } + }, + .error_set => { + if (!is_any and fmt.len != 0) invalidFmtError(fmt, value); + optionsForbidden(options); + return printErrorSet(w, value); + }, + .@"enum" => |info| { + if (!is_any and fmt.len != 0) invalidFmtError(fmt, value); + optionsForbidden(options); + if (info.is_exhaustive) { + return printEnumExhaustive(w, value); + } else { + return printEnumNonexhaustive(w, value); + } + }, + .@"union" => |info| { + if (!is_any) { + if (fmt.len != 0) invalidFmtError(fmt, value); + return printValue(w, ANY, options, value, max_depth); + } + if (max_depth == 0) { + try w.writeAll(".{ ... }"); + return; + } + if (info.tag_type) |UnionTagType| { + try w.writeAll(".{ ."); + try w.writeAll(@tagName(@as(UnionTagType, value))); + try w.writeAll(" = "); + inline for (info.fields) |u_field| { + if (value == @field(UnionTagType, u_field.name)) { + try w.printValue(ANY, options, @field(value, u_field.name), max_depth - 1); + } + } + try w.writeAll(" }"); + } else switch (info.layout) { + .auto => { + return w.writeAll(".{ ... }"); + }, + .@"extern", .@"packed" => { + if (info.fields.len == 0) return w.writeAll(".{}"); + try w.writeAll(".{ "); + inline for (info.fields) |field| { + try w.writeByte('.'); + try w.writeAll(field.name); + try w.writeAll(" = "); + try w.printValue(ANY, options, @field(value, field.name), max_depth - 1); + (try w.writableArray(2)).* = ", ".*; + } + w.buffer[w.end - 2 ..][0..2].* = " }".*; + }, + } + }, + .@"struct" => |info| { + if (!is_any) { + if (fmt.len != 0) invalidFmtError(fmt, value); + return printValue(w, ANY, options, value, max_depth); + } + if (info.is_tuple) { + // Skip the type and field names when formatting tuples. + if (max_depth == 0) { + try w.writeAll(".{ ... }"); + return; + } + try w.writeAll(".{"); + inline for (info.fields, 0..) |f, i| { + if (i == 0) { + try w.writeAll(" "); + } else { + try w.writeAll(", "); + } + try w.printValue(ANY, options, @field(value, f.name), max_depth - 1); + } + try w.writeAll(" }"); + return; + } + if (max_depth == 0) { + try w.writeAll(".{ ... }"); + return; + } + try w.writeAll(".{"); + inline for (info.fields, 0..) |f, i| { + if (i == 0) { + try w.writeAll(" ."); + } else { + try w.writeAll(", ."); + } + try w.writeAll(f.name); + try w.writeAll(" = "); + try w.printValue(ANY, options, @field(value, f.name), max_depth - 1); + } + try w.writeAll(" }"); + }, + .pointer => |ptr_info| switch (ptr_info.size) { + .one => switch (@typeInfo(ptr_info.child)) { + .array => |array_info| return w.printValue(fmt, options, @as([]const array_info.child, value), max_depth), + .@"enum", .@"union", .@"struct" => return w.printValue(fmt, options, value.*, max_depth), + else => { + var buffers: [2][]const u8 = .{ @typeName(ptr_info.child), "@" }; + try w.writeVecAll(&buffers); + try w.printInt(@intFromPtr(value), 16, .lower, options); + return; + }, + }, + .many, .c => { + if (!is_any) @compileError("cannot format pointer without a specifier (i.e. {s} or {*})"); + optionsForbidden(options); + try w.printAddress(value); + }, + .slice => { + if (!is_any) + @compileError("cannot format slice without a specifier (i.e. {s}, {x}, {b64}, or {any})"); + if (max_depth == 0) return w.writeAll("{ ... }"); + try w.writeAll("{ "); + for (value, 0..) |elem, i| { + try w.printValue(fmt, options, elem, max_depth - 1); + if (i != value.len - 1) { + try w.writeAll(", "); + } + } + try w.writeAll(" }"); + }, + }, + .array => { + if (!is_any) @compileError("cannot format array without a specifier (i.e. {s} or {any})"); + if (max_depth == 0) return w.writeAll("{ ... }"); + try w.writeAll("{ "); + for (value, 0..) |elem, i| { + try w.printValue(fmt, options, elem, max_depth - 1); + if (i < value.len - 1) { + try w.writeAll(", "); + } + } + try w.writeAll(" }"); + }, + .vector => { + if (!is_any and fmt.len != 0) invalidFmtError(fmt, value); + return printVector(w, fmt, options, value, max_depth); + }, + .@"fn" => @compileError("unable to format function body type, use '*const " ++ @typeName(T) ++ "' for a function pointer type"), + .type => { + if (!is_any and fmt.len != 0) invalidFmtError(fmt, value); + return w.alignBufferOptions(@typeName(value), options); + }, + .enum_literal => { + if (!is_any and fmt.len != 0) invalidFmtError(fmt, value); + optionsForbidden(options); + var vecs: [2][]const u8 = .{ ".", @tagName(value) }; + return w.writeVecAll(&vecs); + }, + .null => { + if (!is_any and fmt.len != 0) invalidFmtError(fmt, value); + return w.alignBufferOptions("null", options); + }, + else => @compileError("unable to format type '" ++ @typeName(T) ++ "'"), + } +} + +fn optionsForbidden(options: std.fmt.Options) void { + assert(options.precision == null); + assert(options.width == null); +} + +fn printErrorSet(w: *Writer, error_set: anyerror) Error!void { + var vecs: [2][]const u8 = .{ "error.", @errorName(error_set) }; + try w.writeVecAll(&vecs); +} + +fn printEnumExhaustive(w: *Writer, value: anytype) Error!void { + var vecs: [2][]const u8 = .{ ".", @tagName(value) }; + try w.writeVecAll(&vecs); +} + +fn printEnumNonexhaustive(w: *Writer, value: anytype) Error!void { + if (std.enums.tagName(@TypeOf(value), value)) |tag_name| { + var vecs: [2][]const u8 = .{ ".", tag_name }; + try w.writeVecAll(&vecs); + return; + } + try w.writeAll("@enumFromInt("); + try w.printInt(@intFromEnum(value), 10, .lower, .{}); + try w.writeByte(')'); +} + +pub fn printVector( + w: *Writer, + comptime fmt: []const u8, + options: std.fmt.Options, + value: anytype, + max_depth: usize, +) Error!void { + const len = @typeInfo(@TypeOf(value)).vector.len; + if (max_depth == 0) return w.writeAll("{ ... }"); + try w.writeAll("{ "); + inline for (0..len) |i| { + try w.printValue(fmt, options, value[i], max_depth - 1); + if (i < len - 1) try w.writeAll(", "); + } + try w.writeAll(" }"); +} + +// A wrapper around `printIntAny` to avoid the generic explosion of this +// function by funneling smaller integer types through `isize` and `usize`. +pub inline fn printInt( + w: *Writer, + value: anytype, + base: u8, + case: std.fmt.Case, + options: std.fmt.Options, +) Error!void { + switch (@TypeOf(value)) { + isize, usize => {}, + comptime_int => { + if (comptime std.math.cast(usize, value)) |x| return printIntAny(w, x, base, case, options); + if (comptime std.math.cast(isize, value)) |x| return printIntAny(w, x, base, case, options); + const Int = std.math.IntFittingRange(value, value); + return printIntAny(w, @as(Int, value), base, case, options); + }, + else => switch (@typeInfo(@TypeOf(value)).int.signedness) { + .signed => if (std.math.cast(isize, value)) |x| return printIntAny(w, x, base, case, options), + .unsigned => if (std.math.cast(usize, value)) |x| return printIntAny(w, x, base, case, options), + }, + } + return printIntAny(w, value, base, case, options); +} + +/// In general, prefer `printInt` to avoid generic explosion. However this +/// function may be used when optimal codegen for a particular integer type is +/// desired. +pub fn printIntAny( + w: *Writer, + value: anytype, + base: u8, + case: std.fmt.Case, + options: std.fmt.Options, +) Error!void { + assert(base >= 2); + const value_info = @typeInfo(@TypeOf(value)).int; + + // The type must have the same size as `base` or be wider in order for the + // division to work + const min_int_bits = comptime @max(value_info.bits, 8); + const MinInt = std.meta.Int(.unsigned, min_int_bits); + + const abs_value = @abs(value); + // The worst case in terms of space needed is base 2, plus 1 for the sign + var buf: [1 + @max(@as(comptime_int, value_info.bits), 1)]u8 = undefined; + + var a: MinInt = abs_value; + var index: usize = buf.len; + + if (base == 10) { + while (a >= 100) : (a = @divTrunc(a, 100)) { + index -= 2; + buf[index..][0..2].* = std.fmt.digits2(@intCast(a % 100)); + } + + if (a < 10) { + index -= 1; + buf[index] = '0' + @as(u8, @intCast(a)); + } else { + index -= 2; + buf[index..][0..2].* = std.fmt.digits2(@intCast(a)); + } + } else { + while (true) { + const digit = a % base; + index -= 1; + buf[index] = std.fmt.digitToChar(@intCast(digit), case); + a /= base; + if (a == 0) break; + } + } + + if (value_info.signedness == .signed) { + if (value < 0) { + // Negative integer + index -= 1; + buf[index] = '-'; + } else if (options.width == null or options.width.? == 0) { + // Positive integer, omit the plus sign + } else { + // Positive integer + index -= 1; + buf[index] = '+'; + } + } + + return w.alignBufferOptions(buf[index..], options); +} + +pub fn printAsciiChar(w: *Writer, c: u8, options: std.fmt.Options) Error!void { + return w.alignBufferOptions(@as(*const [1]u8, &c), options); +} + +pub fn printAscii(w: *Writer, bytes: []const u8, options: std.fmt.Options) Error!void { + return w.alignBufferOptions(bytes, options); +} + +pub fn printUnicodeCodepoint(w: *Writer, c: u21) Error!void { + var buf: [4]u8 = undefined; + const len = std.unicode.utf8Encode(c, &buf) catch |err| switch (err) { + error.Utf8CannotEncodeSurrogateHalf, error.CodepointTooLarge => l: { + buf[0..3].* = std.unicode.replacement_character_utf8; + break :l 3; + }, + }; + return w.writeAll(buf[0..len]); +} + +/// Uses a larger stack buffer; asserts mode is decimal or scientific. +pub fn printFloat(w: *Writer, value: anytype, options: std.fmt.Number) Error!void { + const mode: std.fmt.float.Mode = switch (options.mode) { + .decimal => .decimal, + .scientific => .scientific, + .binary, .octal, .hex => unreachable, + }; + var buf: [std.fmt.float.bufferSize(.decimal, f64)]u8 = undefined; + const s = std.fmt.float.render(&buf, value, .{ + .mode = mode, + .precision = options.precision, + }) catch |err| switch (err) { + error.BufferTooSmall => "(float)", + }; + return w.alignBuffer(s, options.width orelse s.len, options.alignment, options.fill); +} + +/// Uses a smaller stack buffer; asserts mode is not decimal or scientific. +pub fn printFloatHexOptions(w: *Writer, value: anytype, options: std.fmt.Number) Error!void { + var buf: [50]u8 = undefined; // for aligning + var sub_writer: Writer = .fixed(&buf); + switch (options.mode) { + .decimal => unreachable, + .scientific => unreachable, + .binary => @panic("TODO"), + .octal => @panic("TODO"), + .hex => {}, + } + printFloatHex(&sub_writer, value, options.case, options.precision) catch unreachable; // buf is large enough + + const printed = sub_writer.buffered(); + return w.alignBuffer(printed, options.width orelse printed.len, options.alignment, options.fill); +} + +pub fn printFloatHex(w: *Writer, value: anytype, case: std.fmt.Case, opt_precision: ?usize) Error!void { + if (std.math.signbit(value)) try w.writeByte('-'); + if (std.math.isNan(value)) return w.writeAll(switch (case) { + .lower => "nan", + .upper => "NAN", + }); + if (std.math.isInf(value)) return w.writeAll(switch (case) { + .lower => "inf", + .upper => "INF", + }); + + const T = @TypeOf(value); + const TU = std.meta.Int(.unsigned, @bitSizeOf(T)); + + const mantissa_bits = std.math.floatMantissaBits(T); + const fractional_bits = std.math.floatFractionalBits(T); + const exponent_bits = std.math.floatExponentBits(T); + const mantissa_mask = (1 << mantissa_bits) - 1; + const exponent_mask = (1 << exponent_bits) - 1; + const exponent_bias = (1 << (exponent_bits - 1)) - 1; + + const as_bits: TU = @bitCast(value); + var mantissa = as_bits & mantissa_mask; + var exponent: i32 = @as(u16, @truncate((as_bits >> mantissa_bits) & exponent_mask)); + + const is_denormal = exponent == 0 and mantissa != 0; + const is_zero = exponent == 0 and mantissa == 0; + + if (is_zero) { + // Handle this case here to simplify the logic below. + try w.writeAll("0x0"); + if (opt_precision) |precision| { + if (precision > 0) { + try w.writeAll("."); + try w.splatByteAll('0', precision); + } + } else { + try w.writeAll(".0"); + } + try w.writeAll("p0"); + return; + } + + if (is_denormal) { + // Adjust the exponent for printing. + exponent += 1; + } else { + if (fractional_bits == mantissa_bits) + mantissa |= 1 << fractional_bits; // Add the implicit integer bit. + } + + const mantissa_digits = (fractional_bits + 3) / 4; + // Fill in zeroes to round the fraction width to a multiple of 4. + mantissa <<= mantissa_digits * 4 - fractional_bits; + + if (opt_precision) |precision| { + // Round if needed. + if (precision < mantissa_digits) { + // We always have at least 4 extra bits. + var extra_bits = (mantissa_digits - precision) * 4; + // The result LSB is the Guard bit, we need two more (Round and + // Sticky) to round the value. + while (extra_bits > 2) { + mantissa = (mantissa >> 1) | (mantissa & 1); + extra_bits -= 1; + } + // Round to nearest, tie to even. + mantissa |= @intFromBool(mantissa & 0b100 != 0); + mantissa += 1; + // Drop the excess bits. + mantissa >>= 2; + // Restore the alignment. + mantissa <<= @as(std.math.Log2Int(TU), @intCast((mantissa_digits - precision) * 4)); + + const overflow = mantissa & (1 << 1 + mantissa_digits * 4) != 0; + // Prefer a normalized result in case of overflow. + if (overflow) { + mantissa >>= 1; + exponent += 1; + } + } + } + + // +1 for the decimal part. + var buf: [1 + mantissa_digits]u8 = undefined; + assert(std.fmt.printInt(&buf, mantissa, 16, case, .{ .fill = '0', .width = 1 + mantissa_digits }) == buf.len); + + try w.writeAll("0x"); + try w.writeByte(buf[0]); + const trimmed = std.mem.trimRight(u8, buf[1..], "0"); + if (opt_precision) |precision| { + if (precision > 0) try w.writeAll("."); + } else if (trimmed.len > 0) { + try w.writeAll("."); + } + try w.writeAll(trimmed); + // Add trailing zeros if explicitly requested. + if (opt_precision) |precision| if (precision > 0) { + if (precision > trimmed.len) + try w.splatByteAll('0', precision - trimmed.len); + }; + try w.writeAll("p"); + try w.printInt(exponent - exponent_bias, 10, case, .{}); +} + +pub const ByteSizeUnits = enum { + /// This formatter represents the number as multiple of 1000 and uses the SI + /// measurement units (kB, MB, GB, ...). + decimal, + /// This formatter represents the number as multiple of 1024 and uses the IEC + /// measurement units (KiB, MiB, GiB, ...). + binary, +}; + +/// Format option `precision` is ignored when `value` is less than 1kB +pub fn printByteSize( + w: *std.io.Writer, + value: u64, + comptime units: ByteSizeUnits, + options: std.fmt.Options, +) Error!void { + if (value == 0) return w.alignBufferOptions("0B", options); + // The worst case in terms of space needed is 32 bytes + 3 for the suffix. + var buf: [std.fmt.float.min_buffer_size + 3]u8 = undefined; + + const mags_si = " kMGTPEZY"; + const mags_iec = " KMGTPEZY"; + + const log2 = std.math.log2(value); + const base = switch (units) { + .decimal => 1000, + .binary => 1024, + }; + const magnitude = switch (units) { + .decimal => @min(log2 / comptime std.math.log2(1000), mags_si.len - 1), + .binary => @min(log2 / 10, mags_iec.len - 1), + }; + const new_value = std.math.lossyCast(f64, value) / std.math.pow(f64, std.math.lossyCast(f64, base), std.math.lossyCast(f64, magnitude)); + const suffix = switch (units) { + .decimal => mags_si[magnitude], + .binary => mags_iec[magnitude], + }; + + const s = switch (magnitude) { + 0 => buf[0..std.fmt.printInt(&buf, value, 10, .lower, .{})], + else => std.fmt.float.render(&buf, new_value, .{ .mode = .decimal, .precision = options.precision }) catch |err| switch (err) { + error.BufferTooSmall => unreachable, + }, + }; + + var i: usize = s.len; + if (suffix == ' ') { + buf[i] = 'B'; + i += 1; + } else switch (units) { + .decimal => { + buf[i..][0..2].* = [_]u8{ suffix, 'B' }; + i += 2; + }, + .binary => { + buf[i..][0..3].* = [_]u8{ suffix, 'i', 'B' }; + i += 3; + }, + } + + return w.alignBufferOptions(buf[0..i], options); +} + +// This ANY const is a workaround for: https://github.com/ziglang/zig/issues/7948 +const ANY = "any"; + +fn stripOptionalOrErrorUnionSpec(comptime fmt: []const u8) []const u8 { + return if (std.mem.eql(u8, fmt[1..], ANY)) + ANY + else + fmt[1..]; +} + +pub fn invalidFmtError(comptime fmt: []const u8, value: anytype) noreturn { + @compileError("invalid format string '" ++ fmt ++ "' for type '" ++ @typeName(@TypeOf(value)) ++ "'"); +} + +pub fn printDurationSigned(w: *Writer, ns: i64) Error!void { + if (ns < 0) try w.writeByte('-'); + return w.printDurationUnsigned(@abs(ns)); +} + +pub fn printDurationUnsigned(w: *Writer, ns: u64) Error!void { + var ns_remaining = ns; + inline for (.{ + .{ .ns = 365 * std.time.ns_per_day, .sep = 'y' }, + .{ .ns = std.time.ns_per_week, .sep = 'w' }, + .{ .ns = std.time.ns_per_day, .sep = 'd' }, + .{ .ns = std.time.ns_per_hour, .sep = 'h' }, + .{ .ns = std.time.ns_per_min, .sep = 'm' }, + }) |unit| { + if (ns_remaining >= unit.ns) { + const units = ns_remaining / unit.ns; + try w.printInt(units, 10, .lower, .{}); + try w.writeByte(unit.sep); + ns_remaining -= units * unit.ns; + if (ns_remaining == 0) return; + } + } + + inline for (.{ + .{ .ns = std.time.ns_per_s, .sep = "s" }, + .{ .ns = std.time.ns_per_ms, .sep = "ms" }, + .{ .ns = std.time.ns_per_us, .sep = "us" }, + }) |unit| { + const kunits = ns_remaining * 1000 / unit.ns; + if (kunits >= 1000) { + try w.printInt(kunits / 1000, 10, .lower, .{}); + const frac = kunits % 1000; + if (frac > 0) { + // Write up to 3 decimal places + var decimal_buf = [_]u8{ '.', 0, 0, 0 }; + var inner: Writer = .fixed(decimal_buf[1..]); + inner.printInt(frac, 10, .lower, .{ .fill = '0', .width = 3 }) catch unreachable; + var end: usize = 4; + while (end > 1) : (end -= 1) { + if (decimal_buf[end - 1] != '0') break; + } + try w.writeAll(decimal_buf[0..end]); + } + return w.writeAll(unit.sep); + } + } + + try w.printInt(ns_remaining, 10, .lower, .{}); + try w.writeAll("ns"); +} + +/// Writes number of nanoseconds according to its signed magnitude: +/// `[#y][#w][#d][#h][#m]#[.###][n|u|m]s` +/// `nanoseconds` must be an integer that coerces into `u64` or `i64`. +pub fn printDuration(w: *Writer, nanoseconds: anytype, options: std.fmt.Options) Error!void { + // worst case: "-XXXyXXwXXdXXhXXmXX.XXXs".len = 24 + var buf: [24]u8 = undefined; + var sub_writer: Writer = .fixed(&buf); + if (@TypeOf(nanoseconds) == comptime_int) { + if (nanoseconds >= 0) { + sub_writer.printDurationUnsigned(nanoseconds) catch unreachable; + } else { + sub_writer.printDurationSigned(nanoseconds) catch unreachable; + } + } else switch (@typeInfo(@TypeOf(nanoseconds)).int.signedness) { + .signed => sub_writer.printDurationSigned(nanoseconds) catch unreachable, + .unsigned => sub_writer.printDurationUnsigned(nanoseconds) catch unreachable, + } + return w.alignBufferOptions(sub_writer.buffered(), options); +} + +pub fn printHex(w: *Writer, bytes: []const u8, case: std.fmt.Case) Error!void { + const charset = switch (case) { + .upper => "0123456789ABCDEF", + .lower => "0123456789abcdef", + }; + for (bytes) |c| { + try w.writeByte(charset[c >> 4]); + try w.writeByte(charset[c & 15]); + } +} + +pub fn printBase64(w: *Writer, bytes: []const u8) Error!void { + var chunker = std.mem.window(u8, bytes, 3, 3); + var temp: [5]u8 = undefined; + while (chunker.next()) |chunk| { + try w.writeAll(std.base64.standard.Encoder.encode(&temp, chunk)); + } +} + +/// Write a single unsigned integer as LEB128 to the given writer. +pub fn writeUleb128(w: *Writer, value: anytype) Error!void { + try w.writeLeb128(switch (@typeInfo(@TypeOf(value))) { + .comptime_int => @as(std.math.IntFittingRange(0, @abs(value)), value), + .int => |value_info| switch (value_info.signedness) { + .signed => @as(@Type(.{ .int = .{ .signedness = .unsigned, .bits = value_info.bits -| 1 } }), @intCast(value)), + .unsigned => value, + }, + else => comptime unreachable, + }); +} + +/// Write a single signed integer as LEB128 to the given writer. +pub fn writeSleb128(w: *Writer, value: anytype) Error!void { + try w.writeLeb128(switch (@typeInfo(@TypeOf(value))) { + .comptime_int => @as(std.math.IntFittingRange(@min(value, -1), @max(0, value)), value), + .int => |value_info| switch (value_info.signedness) { + .signed => value, + .unsigned => @as(@Type(.{ .int = .{ .signedness = .signed, .bits = value_info.bits + 1 } }), value), + }, + else => comptime unreachable, + }); +} + +/// Write a single integer as LEB128 to the given writer. +pub fn writeLeb128(w: *Writer, value: anytype) Error!void { + const value_info = @typeInfo(@TypeOf(value)).int; + try w.writeMultipleOf7Leb128(@as(@Type(.{ .int = .{ + .signedness = value_info.signedness, + .bits = std.mem.alignForwardAnyAlign(u16, value_info.bits, 7), + } }), value)); +} + +fn writeMultipleOf7Leb128(w: *Writer, value: anytype) Error!void { + const value_info = @typeInfo(@TypeOf(value)).int; + comptime assert(value_info.bits % 7 == 0); + var remaining = value; + while (true) { + const buffer: []packed struct(u8) { bits: u7, more: bool } = @ptrCast(try w.writableSliceGreedy(1)); + for (buffer, 1..) |*byte, len| { + const more = switch (value_info.signedness) { + .signed => remaining >> 6 != remaining >> (value_info.bits - 1), + .unsigned => remaining > std.math.maxInt(u7), + }; + byte.* = if (@inComptime()) @typeInfo(@TypeOf(buffer)).pointer.child{ + .bits = @bitCast(@as(@Type(.{ .int = .{ + .signedness = value_info.signedness, + .bits = 7, + } }), @truncate(remaining))), + .more = more, + } else .{ + .bits = @bitCast(@as(@Type(.{ .int = .{ + .signedness = value_info.signedness, + .bits = 7, + } }), @truncate(remaining))), + .more = more, + }; + if (value_info.bits > 7) remaining >>= 7; + if (!more) return w.advance(len); + } + w.advance(buffer.len); + } +} + +test "printValue max_depth" { + const Vec2 = struct { + const SelfType = @This(); + x: f32, + y: f32, + + pub fn format(self: SelfType, w: *Writer) Error!void { + return w.print("({d:.3},{d:.3})", .{ self.x, self.y }); + } + }; + const E = enum { + One, + Two, + Three, + }; + const TU = union(enum) { + const SelfType = @This(); + float: f32, + int: u32, + ptr: ?*SelfType, + }; + const S = struct { + const SelfType = @This(); + a: ?*SelfType, + tu: TU, + e: E, + vec: Vec2, + }; + + var inst = S{ + .a = null, + .tu = TU{ .ptr = null }, + .e = E.Two, + .vec = Vec2{ .x = 10.2, .y = 2.22 }, + }; + inst.a = &inst; + inst.tu.ptr = &inst.tu; + + var buf: [1000]u8 = undefined; + var w: Writer = .fixed(&buf); + try w.printValue("", .{}, inst, 0); + try testing.expectEqualStrings(".{ ... }", w.buffered()); + + w = .fixed(&buf); + try w.printValue("", .{}, inst, 1); + try testing.expectEqualStrings(".{ .a = .{ ... }, .tu = .{ ... }, .e = .Two, .vec = .{ ... } }", w.buffered()); + + w = .fixed(&buf); + try w.printValue("", .{}, inst, 2); + try testing.expectEqualStrings(".{ .a = .{ .a = .{ ... }, .tu = .{ ... }, .e = .Two, .vec = .{ ... } }, .tu = .{ .ptr = .{ ... } }, .e = .Two, .vec = .{ .x = 10.2, .y = 2.22 } }", w.buffered()); + + w = .fixed(&buf); + try w.printValue("", .{}, inst, 3); + try testing.expectEqualStrings(".{ .a = .{ .a = .{ .a = .{ ... }, .tu = .{ ... }, .e = .Two, .vec = .{ ... } }, .tu = .{ .ptr = .{ ... } }, .e = .Two, .vec = .{ .x = 10.2, .y = 2.22 } }, .tu = .{ .ptr = .{ .ptr = .{ ... } } }, .e = .Two, .vec = .{ .x = 10.2, .y = 2.22 } }", w.buffered()); + + const vec: @Vector(4, i32) = .{ 1, 2, 3, 4 }; + w = .fixed(&buf); + try w.printValue("", .{}, vec, 0); + try testing.expectEqualStrings("{ ... }", w.buffered()); + + w = .fixed(&buf); + try w.printValue("", .{}, vec, 1); + try testing.expectEqualStrings("{ 1, 2, 3, 4 }", w.buffered()); +} + +test printDuration { + try testDurationCase("0ns", 0); + try testDurationCase("1ns", 1); + try testDurationCase("999ns", std.time.ns_per_us - 1); + try testDurationCase("1us", std.time.ns_per_us); + try testDurationCase("1.45us", 1450); + try testDurationCase("1.5us", 3 * std.time.ns_per_us / 2); + try testDurationCase("14.5us", 14500); + try testDurationCase("145us", 145000); + try testDurationCase("999.999us", std.time.ns_per_ms - 1); + try testDurationCase("1ms", std.time.ns_per_ms + 1); + try testDurationCase("1.5ms", 3 * std.time.ns_per_ms / 2); + try testDurationCase("1.11ms", 1110000); + try testDurationCase("1.111ms", 1111000); + try testDurationCase("1.111ms", 1111100); + try testDurationCase("999.999ms", std.time.ns_per_s - 1); + try testDurationCase("1s", std.time.ns_per_s); + try testDurationCase("59.999s", std.time.ns_per_min - 1); + try testDurationCase("1m", std.time.ns_per_min); + try testDurationCase("1h", std.time.ns_per_hour); + try testDurationCase("1d", std.time.ns_per_day); + try testDurationCase("1w", std.time.ns_per_week); + try testDurationCase("1y", 365 * std.time.ns_per_day); + try testDurationCase("1y52w23h59m59.999s", 730 * std.time.ns_per_day - 1); // 365d = 52w1 + try testDurationCase("1y1h1.001s", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + std.time.ns_per_ms); + try testDurationCase("1y1h1s", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + 999 * std.time.ns_per_us); + try testDurationCase("1y1h999.999us", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms - 1); + try testDurationCase("1y1h1ms", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms); + try testDurationCase("1y1h1ms", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms + 1); + try testDurationCase("1y1m999ns", 365 * std.time.ns_per_day + std.time.ns_per_min + 999); + try testDurationCase("584y49w23h34m33.709s", std.math.maxInt(u64)); + + try testing.expectFmt("=======0ns", "{D:=>10}", .{0}); + try testing.expectFmt("1ns=======", "{D:=<10}", .{1}); + try testing.expectFmt(" 999ns ", "{D:^10}", .{std.time.ns_per_us - 1}); +} + +test printDurationSigned { + try testDurationCaseSigned("0ns", 0); + try testDurationCaseSigned("1ns", 1); + try testDurationCaseSigned("-1ns", -(1)); + try testDurationCaseSigned("999ns", std.time.ns_per_us - 1); + try testDurationCaseSigned("-999ns", -(std.time.ns_per_us - 1)); + try testDurationCaseSigned("1us", std.time.ns_per_us); + try testDurationCaseSigned("-1us", -(std.time.ns_per_us)); + try testDurationCaseSigned("1.45us", 1450); + try testDurationCaseSigned("-1.45us", -(1450)); + try testDurationCaseSigned("1.5us", 3 * std.time.ns_per_us / 2); + try testDurationCaseSigned("-1.5us", -(3 * std.time.ns_per_us / 2)); + try testDurationCaseSigned("14.5us", 14500); + try testDurationCaseSigned("-14.5us", -(14500)); + try testDurationCaseSigned("145us", 145000); + try testDurationCaseSigned("-145us", -(145000)); + try testDurationCaseSigned("999.999us", std.time.ns_per_ms - 1); + try testDurationCaseSigned("-999.999us", -(std.time.ns_per_ms - 1)); + try testDurationCaseSigned("1ms", std.time.ns_per_ms + 1); + try testDurationCaseSigned("-1ms", -(std.time.ns_per_ms + 1)); + try testDurationCaseSigned("1.5ms", 3 * std.time.ns_per_ms / 2); + try testDurationCaseSigned("-1.5ms", -(3 * std.time.ns_per_ms / 2)); + try testDurationCaseSigned("1.11ms", 1110000); + try testDurationCaseSigned("-1.11ms", -(1110000)); + try testDurationCaseSigned("1.111ms", 1111000); + try testDurationCaseSigned("-1.111ms", -(1111000)); + try testDurationCaseSigned("1.111ms", 1111100); + try testDurationCaseSigned("-1.111ms", -(1111100)); + try testDurationCaseSigned("999.999ms", std.time.ns_per_s - 1); + try testDurationCaseSigned("-999.999ms", -(std.time.ns_per_s - 1)); + try testDurationCaseSigned("1s", std.time.ns_per_s); + try testDurationCaseSigned("-1s", -(std.time.ns_per_s)); + try testDurationCaseSigned("59.999s", std.time.ns_per_min - 1); + try testDurationCaseSigned("-59.999s", -(std.time.ns_per_min - 1)); + try testDurationCaseSigned("1m", std.time.ns_per_min); + try testDurationCaseSigned("-1m", -(std.time.ns_per_min)); + try testDurationCaseSigned("1h", std.time.ns_per_hour); + try testDurationCaseSigned("-1h", -(std.time.ns_per_hour)); + try testDurationCaseSigned("1d", std.time.ns_per_day); + try testDurationCaseSigned("-1d", -(std.time.ns_per_day)); + try testDurationCaseSigned("1w", std.time.ns_per_week); + try testDurationCaseSigned("-1w", -(std.time.ns_per_week)); + try testDurationCaseSigned("1y", 365 * std.time.ns_per_day); + try testDurationCaseSigned("-1y", -(365 * std.time.ns_per_day)); + try testDurationCaseSigned("1y52w23h59m59.999s", 730 * std.time.ns_per_day - 1); // 365d = 52w1d + try testDurationCaseSigned("-1y52w23h59m59.999s", -(730 * std.time.ns_per_day - 1)); // 365d = 52w1d + try testDurationCaseSigned("1y1h1.001s", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + std.time.ns_per_ms); + try testDurationCaseSigned("-1y1h1.001s", -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + std.time.ns_per_ms)); + try testDurationCaseSigned("1y1h1s", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + 999 * std.time.ns_per_us); + try testDurationCaseSigned("-1y1h1s", -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + 999 * std.time.ns_per_us)); + try testDurationCaseSigned("1y1h999.999us", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms - 1); + try testDurationCaseSigned("-1y1h999.999us", -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms - 1)); + try testDurationCaseSigned("1y1h1ms", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms); + try testDurationCaseSigned("-1y1h1ms", -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms)); + try testDurationCaseSigned("1y1h1ms", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms + 1); + try testDurationCaseSigned("-1y1h1ms", -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms + 1)); + try testDurationCaseSigned("1y1m999ns", 365 * std.time.ns_per_day + std.time.ns_per_min + 999); + try testDurationCaseSigned("-1y1m999ns", -(365 * std.time.ns_per_day + std.time.ns_per_min + 999)); + try testDurationCaseSigned("292y24w3d23h47m16.854s", std.math.maxInt(i64)); + try testDurationCaseSigned("-292y24w3d23h47m16.854s", std.math.minInt(i64) + 1); + try testDurationCaseSigned("-292y24w3d23h47m16.854s", std.math.minInt(i64)); + + try testing.expectFmt("=======0ns", "{D:=>10}", .{0}); + try testing.expectFmt("1ns=======", "{D:=<10}", .{1}); + try testing.expectFmt("-1ns======", "{D:=<10}", .{-(1)}); + try testing.expectFmt(" -999ns ", "{D:^10}", .{-(std.time.ns_per_us - 1)}); +} + +fn testDurationCase(expected: []const u8, input: u64) !void { + var buf: [24]u8 = undefined; + var w: Writer = .fixed(&buf); + try w.printDurationUnsigned(input); + try testing.expectEqualStrings(expected, w.buffered()); +} + +fn testDurationCaseSigned(expected: []const u8, input: i64) !void { + var buf: [24]u8 = undefined; + var w: Writer = .fixed(&buf); + try w.printDurationSigned(input); + try testing.expectEqualStrings(expected, w.buffered()); +} + +test printInt { + try testPrintIntCase("-1", @as(i1, -1), 10, .lower, .{}); + + try testPrintIntCase("-101111000110000101001110", @as(i32, -12345678), 2, .lower, .{}); + try testPrintIntCase("-12345678", @as(i32, -12345678), 10, .lower, .{}); + try testPrintIntCase("-bc614e", @as(i32, -12345678), 16, .lower, .{}); + try testPrintIntCase("-BC614E", @as(i32, -12345678), 16, .upper, .{}); + + try testPrintIntCase("12345678", @as(u32, 12345678), 10, .upper, .{}); + + try testPrintIntCase(" 666", @as(u32, 666), 10, .lower, .{ .width = 6 }); + try testPrintIntCase(" 1234", @as(u32, 0x1234), 16, .lower, .{ .width = 6 }); + try testPrintIntCase("1234", @as(u32, 0x1234), 16, .lower, .{ .width = 1 }); + + try testPrintIntCase("+42", @as(i32, 42), 10, .lower, .{ .width = 3 }); + try testPrintIntCase("-42", @as(i32, -42), 10, .lower, .{ .width = 3 }); + + try testPrintIntCase("123456789123456789", @as(comptime_int, 123456789123456789), 10, .lower, .{}); +} + +test "printFloat with comptime_float" { + var buf: [20]u8 = undefined; + var w: Writer = .fixed(&buf); + try w.printFloat(@as(comptime_float, 1.0), std.fmt.Options.toNumber(.{}, .scientific, .lower)); + try testing.expectEqualStrings(w.buffered(), "1e0"); + try testing.expectFmt("1", "{}", .{1.0}); +} + +fn testPrintIntCase(expected: []const u8, value: anytype, base: u8, case: std.fmt.Case, options: std.fmt.Options) !void { + var buffer: [100]u8 = undefined; + var w: Writer = .fixed(&buffer); + try w.printInt(value, base, case, options); + try testing.expectEqualStrings(expected, w.buffered()); +} + +test printByteSize { + try testing.expectFmt("file size: 42B\n", "file size: {B}\n", .{42}); + try testing.expectFmt("file size: 42B\n", "file size: {Bi}\n", .{42}); + try testing.expectFmt("file size: 63MB\n", "file size: {B}\n", .{63 * 1000 * 1000}); + try testing.expectFmt("file size: 63MiB\n", "file size: {Bi}\n", .{63 * 1024 * 1024}); + try testing.expectFmt("file size: 42B\n", "file size: {B:.2}\n", .{42}); + try testing.expectFmt("file size: 42B\n", "file size: {B:>9.2}\n", .{42}); + try testing.expectFmt("file size: 66.06MB\n", "file size: {B:.2}\n", .{63 * 1024 * 1024}); + try testing.expectFmt("file size: 60.08MiB\n", "file size: {Bi:.2}\n", .{63 * 1000 * 1000}); + try testing.expectFmt("file size: =66.06MB=\n", "file size: {B:=^9.2}\n", .{63 * 1024 * 1024}); + try testing.expectFmt("file size: 66.06MB\n", "file size: {B: >9.2}\n", .{63 * 1024 * 1024}); + try testing.expectFmt("file size: 66.06MB \n", "file size: {B: <9.2}\n", .{63 * 1024 * 1024}); + try testing.expectFmt("file size: 0.01844674407370955ZB\n", "file size: {B}\n", .{std.math.maxInt(u64)}); +} + +test "bytes.hex" { + const some_bytes = "\xCA\xFE\xBA\xBE"; + try testing.expectFmt("lowercase: cafebabe\n", "lowercase: {x}\n", .{some_bytes}); + try testing.expectFmt("uppercase: CAFEBABE\n", "uppercase: {X}\n", .{some_bytes}); + try testing.expectFmt("uppercase: CAFE\n", "uppercase: {X}\n", .{some_bytes[0..2]}); + try testing.expectFmt("lowercase: babe\n", "lowercase: {x}\n", .{some_bytes[2..]}); + const bytes_with_zeros = "\x00\x0E\xBA\xBE"; + try testing.expectFmt("lowercase: 000ebabe\n", "lowercase: {x}\n", .{bytes_with_zeros}); +} + +test fixed { + { + var buf: [255]u8 = undefined; + var w: Writer = .fixed(&buf); + try w.print("{s}{s}!", .{ "Hello", "World" }); + try testing.expectEqualStrings("HelloWorld!", w.buffered()); + } + + comptime { + var buf: [255]u8 = undefined; + var w: Writer = .fixed(&buf); + try w.print("{s}{s}!", .{ "Hello", "World" }); + try testing.expectEqualStrings("HelloWorld!", w.buffered()); + } +} + +test "fixed output" { + var buffer: [10]u8 = undefined; + var w: Writer = .fixed(&buffer); + + try w.writeAll("Hello"); + try testing.expect(std.mem.eql(u8, w.buffered(), "Hello")); + + try w.writeAll("world"); + try testing.expect(std.mem.eql(u8, w.buffered(), "Helloworld")); + + try testing.expectError(error.WriteFailed, w.writeAll("!")); + try testing.expect(std.mem.eql(u8, w.buffered(), "Helloworld")); + + w = .fixed(&buffer); + + try testing.expect(w.buffered().len == 0); + + try testing.expectError(error.WriteFailed, w.writeAll("Hello world!")); + try testing.expect(std.mem.eql(u8, w.buffered(), "Hello worl")); +} + +test "writeSplat 0 len splat larger than capacity" { + var buf: [8]u8 = undefined; + var w: std.io.Writer = .fixed(&buf); + const n = try w.writeSplat(&.{"something that overflows buf"}, 0); + try testing.expectEqual(0, n); +} + +pub fn failingDrain(w: *Writer, data: []const []const u8, splat: usize) Error!usize { + _ = w; + _ = data; + _ = splat; + return error.WriteFailed; +} + +pub fn failingSendFile(w: *Writer, file_reader: *File.Reader, limit: Limit) FileError!usize { + _ = w; + _ = file_reader; + _ = limit; + return error.WriteFailed; +} + +pub const Discarding = struct { + count: u64, + writer: Writer, + + pub fn init(buffer: []u8) Discarding { + return .{ + .count = 0, + .writer = .{ + .vtable = &.{ + .drain = Discarding.drain, + .sendFile = Discarding.sendFile, + }, + .buffer = buffer, + }, + }; + } + + pub fn drain(w: *Writer, data: []const []const u8, splat: usize) Error!usize { + const d: *Discarding = @alignCast(@fieldParentPtr("writer", w)); + const slice = data[0 .. data.len - 1]; + const pattern = data[slice.len..]; + var written: usize = pattern.len * splat; + for (slice) |bytes| written += bytes.len; + d.count += w.end + written; + w.end = 0; + return written; + } + + pub fn sendFile(w: *Writer, file_reader: *File.Reader, limit: Limit) FileError!usize { + if (File.Handle == void) return error.Unimplemented; + const d: *Discarding = @alignCast(@fieldParentPtr("writer", w)); + d.count += w.end; + w.end = 0; + if (file_reader.getSize()) |size| { + const n = limit.minInt64(size - file_reader.pos); + file_reader.seekBy(@intCast(n)) catch return error.Unimplemented; + w.end = 0; + d.count += n; + return n; + } else |_| { + // Error is observable on `file_reader` instance, and it is better to + // treat the file as a pipe. + return error.Unimplemented; + } + } +}; + +/// Removes the first `n` bytes from `buffer` by shifting buffer contents, +/// returning how many bytes are left after consuming the entire buffer, or +/// zero if the entire buffer was not consumed. +/// +/// Useful for `VTable.drain` function implementations to implement partial +/// drains. +pub fn consume(w: *Writer, n: usize) usize { + if (n < w.end) { + const remaining = w.buffer[n..w.end]; + @memmove(w.buffer[0..remaining.len], remaining); + w.end = remaining.len; + return 0; + } + defer w.end = 0; + return n - w.end; +} + +/// Shortcut for setting `end` to zero and returning zero. Equivalent to +/// calling `consume` with `end`. +pub fn consumeAll(w: *Writer) usize { + w.end = 0; + return 0; +} + +/// For use when the `Writer` implementation can cannot offer a more efficient +/// implementation than a basic read/write loop on the file. +pub fn unimplementedSendFile(w: *Writer, file_reader: *File.Reader, limit: Limit) FileError!usize { + _ = w; + _ = file_reader; + _ = limit; + return error.Unimplemented; +} + +/// When this function is called it usually means the buffer got full, so it's +/// time to return an error. However, we still need to make sure all of the +/// available buffer has been filled. Also, it may be called from `flush` in +/// which case it should return successfully. +pub fn fixedDrain(w: *Writer, data: []const []const u8, splat: usize) Error!usize { + if (data.len == 0) return 0; + for (data[0 .. data.len - 1]) |bytes| { + const dest = w.buffer[w.end..]; + const len = @min(bytes.len, dest.len); + @memcpy(dest[0..len], bytes[0..len]); + w.end += len; + if (bytes.len > dest.len) return error.WriteFailed; + } + const pattern = data[data.len - 1]; + const dest = w.buffer[w.end..]; + switch (pattern.len) { + 0 => return w.end, + 1 => { + assert(splat >= dest.len); + @memset(dest, pattern[0]); + w.end += dest.len; + return error.WriteFailed; + }, + else => { + for (0..splat) |i| { + const remaining = dest[i * pattern.len ..]; + const len = @min(pattern.len, remaining.len); + @memcpy(remaining[0..len], pattern[0..len]); + w.end += len; + if (pattern.len > remaining.len) return error.WriteFailed; + } + unreachable; + }, + } +} + +/// Provides a `Writer` implementation based on calling `Hasher.update`, sending +/// all data also to an underlying `Writer`. +/// +/// When using this, the underlying writer is best unbuffered because all +/// writes are passed on directly to it. +/// +/// This implementation makes suboptimal buffering decisions due to being +/// generic. A better solution will involve creating a writer for each hash +/// function, where the splat buffer can be tailored to the hash implementation +/// details. +pub fn Hashed(comptime Hasher: type) type { + return struct { + out: *Writer, + hasher: Hasher, + writer: Writer, + + pub fn init(out: *Writer, buffer: []u8) @This() { + return .initHasher(out, .{}, buffer); + } + + pub fn initHasher(out: *Writer, hasher: Hasher, buffer: []u8) @This() { + return .{ + .out = out, + .hasher = hasher, + .writer = .{ + .buffer = buffer, + .vtable = &.{ .drain = @This().drain }, + }, + }; + } + + fn drain(w: *Writer, data: []const []const u8, splat: usize) Error!usize { + const this: *@This() = @alignCast(@fieldParentPtr("writer", w)); + const aux = w.buffered(); + const aux_n = try this.out.writeSplatHeader(aux, data, splat); + if (aux_n < w.end) { + this.hasher.update(w.buffer[0..aux_n]); + const remaining = w.buffer[aux_n..w.end]; + @memmove(w.buffer[0..remaining.len], remaining); + w.end = remaining.len; + return 0; + } + this.hasher.update(aux); + const n = aux_n - w.end; + w.end = 0; + var remaining: usize = n; + for (data[0 .. data.len - 1]) |slice| { + if (remaining <= slice.len) { + this.hasher.update(slice[0..remaining]); + return n; + } + remaining -= slice.len; + this.hasher.update(slice); + } + const pattern = data[data.len - 1]; + assert(remaining == splat * pattern.len); + switch (pattern.len) { + 0 => { + assert(remaining == 0); + }, + 1 => { + var buffer: [64]u8 = undefined; + @memset(&buffer, pattern[0]); + while (remaining > 0) { + const update_len = @min(remaining, buffer.len); + this.hasher.update(buffer[0..update_len]); + remaining -= update_len; + } + }, + else => { + while (remaining > 0) { + const update_len = @min(remaining, pattern.len); + this.hasher.update(pattern[0..update_len]); + remaining -= update_len; + } + }, + } + return n; + } + }; +} + +/// Maintains `Writer` state such that it writes to the unused capacity of an +/// array list, filling it up completely before making a call through the +/// vtable, causing a resize. Consequently, the same, optimized, non-generic +/// machine code that uses `std.io.Reader`, such as formatted printing, takes +/// the hot paths when using this API. +/// +/// When using this API, it is not necessary to call `flush`. +pub const Allocating = struct { + allocator: Allocator, + writer: Writer, + + pub fn init(allocator: Allocator) Allocating { + return .{ + .allocator = allocator, + .writer = .{ + .buffer = &.{}, + .vtable = &vtable, + }, + }; + } + + pub fn initCapacity(allocator: Allocator, capacity: usize) error{OutOfMemory}!Allocating { + return .{ + .allocator = allocator, + .writer = .{ + .buffer = try allocator.alloc(u8, capacity), + .vtable = &vtable, + }, + }; + } + + pub fn initOwnedSlice(allocator: Allocator, slice: []u8) Allocating { + return .{ + .allocator = allocator, + .writer = .{ + .buffer = slice, + .vtable = &vtable, + }, + }; + } + + /// Replaces `array_list` with empty, taking ownership of the memory. + pub fn fromArrayList(allocator: Allocator, array_list: *std.ArrayListUnmanaged(u8)) Allocating { + defer array_list.* = .empty; + return .{ + .allocator = allocator, + .writer = .{ + .vtable = &vtable, + .buffer = array_list.allocatedSlice(), + .end = array_list.items.len, + }, + }; + } + + const vtable: VTable = .{ + .drain = Allocating.drain, + .sendFile = Allocating.sendFile, + .flush = noopFlush, + }; + + pub fn deinit(a: *Allocating) void { + a.allocator.free(a.writer.buffer); + a.* = undefined; + } + + /// Returns an array list that takes ownership of the allocated memory. + /// Resets the `Allocating` to an empty state. + pub fn toArrayList(a: *Allocating) std.ArrayListUnmanaged(u8) { + const w = &a.writer; + const result: std.ArrayListUnmanaged(u8) = .{ + .items = w.buffer[0..w.end], + .capacity = w.buffer.len, + }; + w.buffer = &.{}; + w.end = 0; + return result; + } + + pub fn toOwnedSlice(a: *Allocating) error{OutOfMemory}![]u8 { + var list = a.toArrayList(); + return list.toOwnedSlice(a.allocator); + } + + pub fn toOwnedSliceSentinel(a: *Allocating, comptime sentinel: u8) error{OutOfMemory}![:sentinel]u8 { + const gpa = a.allocator; + var list = toArrayList(a); + return list.toOwnedSliceSentinel(gpa, sentinel); + } + + pub fn getWritten(a: *Allocating) []u8 { + return a.writer.buffered(); + } + + pub fn shrinkRetainingCapacity(a: *Allocating, new_len: usize) void { + a.writer.end = new_len; + } + + pub fn clearRetainingCapacity(a: *Allocating) void { + a.shrinkRetainingCapacity(0); + } + + fn drain(w: *Writer, data: []const []const u8, splat: usize) Error!usize { + const a: *Allocating = @fieldParentPtr("writer", w); + const gpa = a.allocator; + const pattern = data[data.len - 1]; + const splat_len = pattern.len * splat; + var list = a.toArrayList(); + defer setArrayList(a, list); + const start_len = list.items.len; + // Even if we append no data, this function needs to ensure there is more + // capacity in the buffer to avoid infinite loop, hence the +1 in this loop. + assert(data.len != 0); + for (data) |bytes| { + list.ensureUnusedCapacity(gpa, bytes.len + splat_len + 1) catch return error.WriteFailed; + list.appendSliceAssumeCapacity(bytes); + } + if (splat == 0) { + list.items.len -= pattern.len; + } else switch (pattern.len) { + 0 => {}, + 1 => list.appendNTimesAssumeCapacity(pattern[0], splat - 1), + else => for (0..splat - 1) |_| list.appendSliceAssumeCapacity(pattern), + } + return list.items.len - start_len; + } + + fn sendFile(w: *Writer, file_reader: *File.Reader, limit: std.io.Limit) FileError!usize { + if (File.Handle == void) return error.Unimplemented; + const a: *Allocating = @fieldParentPtr("writer", w); + const gpa = a.allocator; + var list = a.toArrayList(); + defer setArrayList(a, list); + const pos = file_reader.pos; + const additional = if (file_reader.getSize()) |size| size - pos else |_| std.atomic.cache_line; + list.ensureUnusedCapacity(gpa, limit.minInt64(additional)) catch return error.WriteFailed; + const dest = limit.slice(list.unusedCapacitySlice()); + const n = file_reader.read(dest) catch |err| switch (err) { + error.ReadFailed => return error.ReadFailed, + error.EndOfStream => 0, + }; + list.items.len += n; + return n; + } + + fn setArrayList(a: *Allocating, list: std.ArrayListUnmanaged(u8)) void { + a.writer.buffer = list.allocatedSlice(); + a.writer.end = list.items.len; + } + + test Allocating { + var a: Allocating = .init(testing.allocator); + defer a.deinit(); + const w = &a.writer; + + const x: i32 = 42; + const y: i32 = 1234; + try w.print("x: {}\ny: {}\n", .{ x, y }); + + try testing.expectEqualSlices(u8, "x: 42\ny: 1234\n", a.getWritten()); + } +}; diff --git a/lib/std/io/buffered_atomic_file.zig b/lib/std/io/buffered_atomic_file.zig index 71edabb20a..48510bde52 100644 --- a/lib/std/io/buffered_atomic_file.zig +++ b/lib/std/io/buffered_atomic_file.zig @@ -11,7 +11,7 @@ pub const BufferedAtomicFile = struct { pub const buffer_size = 4096; pub const BufferedWriter = std.io.BufferedWriter(buffer_size, File.Writer); - pub const Writer = std.io.Writer(*BufferedWriter, BufferedWriter.Error, BufferedWriter.write); + pub const Writer = std.io.GenericWriter(*BufferedWriter, BufferedWriter.Error, BufferedWriter.write); /// TODO when https://github.com/ziglang/zig/issues/2761 is solved /// this API will not need an allocator @@ -33,7 +33,7 @@ pub const BufferedAtomicFile = struct { self.atomic_file = try dir.atomicFile(dest_path, atomic_file_options); errdefer self.atomic_file.deinit(); - self.file_writer = self.atomic_file.file.writer(); + self.file_writer = self.atomic_file.file.deprecatedWriter(); self.buffered_writer = .{ .unbuffered_writer = self.file_writer }; return self; } diff --git a/lib/std/io/buffered_reader.zig b/lib/std/io/buffered_reader.zig index bcf54fb882..548dd92f73 100644 --- a/lib/std/io/buffered_reader.zig +++ b/lib/std/io/buffered_reader.zig @@ -12,7 +12,7 @@ pub fn BufferedReader(comptime buffer_size: usize, comptime ReaderType: type) ty end: usize = 0, pub const Error = ReaderType.Error; - pub const Reader = io.Reader(*Self, Error, read); + pub const Reader = io.GenericReader(*Self, Error, read); const Self = @This(); @@ -61,7 +61,7 @@ test "OneByte" { const Error = error{NoError}; const Self = @This(); - const Reader = io.Reader(*Self, Error, read); + const Reader = io.GenericReader(*Self, Error, read); fn init(str: []const u8) Self { return Self{ @@ -105,7 +105,7 @@ test "Block" { const Error = error{NoError}; const Self = @This(); - const Reader = io.Reader(*Self, Error, read); + const Reader = io.GenericReader(*Self, Error, read); fn init(block: []const u8, reads_allowed: usize) Self { return Self{ diff --git a/lib/std/io/buffered_writer.zig b/lib/std/io/buffered_writer.zig index 906d6cce49..ef95de0f0c 100644 --- a/lib/std/io/buffered_writer.zig +++ b/lib/std/io/buffered_writer.zig @@ -10,7 +10,7 @@ pub fn BufferedWriter(comptime buffer_size: usize, comptime WriterType: type) ty end: usize = 0, pub const Error = WriterType.Error; - pub const Writer = io.Writer(*Self, Error, write); + pub const Writer = io.GenericWriter(*Self, Error, write); const Self = @This(); diff --git a/lib/std/io/c_writer.zig b/lib/std/io/c_writer.zig index 8c25e51029..30d0cabcf5 100644 --- a/lib/std/io/c_writer.zig +++ b/lib/std/io/c_writer.zig @@ -3,7 +3,7 @@ const builtin = @import("builtin"); const io = std.io; const testing = std.testing; -pub const CWriter = io.Writer(*std.c.FILE, std.fs.File.WriteError, cWriterWrite); +pub const CWriter = io.GenericWriter(*std.c.FILE, std.fs.File.WriteError, cWriterWrite); pub fn cWriter(c_file: *std.c.FILE) CWriter { return .{ .context = c_file }; diff --git a/lib/std/io/change_detection_stream.zig b/lib/std/io/change_detection_stream.zig index 5ba2bb3c10..d9da1c4a0e 100644 --- a/lib/std/io/change_detection_stream.zig +++ b/lib/std/io/change_detection_stream.zig @@ -8,7 +8,7 @@ pub fn ChangeDetectionStream(comptime WriterType: type) type { return struct { const Self = @This(); pub const Error = WriterType.Error; - pub const Writer = io.Writer(*Self, Error, write); + pub const Writer = io.GenericWriter(*Self, Error, write); anything_changed: bool, underlying_writer: WriterType, diff --git a/lib/std/io/counting_reader.zig b/lib/std/io/counting_reader.zig index 2ff9b8a08f..bc1e1b6ec7 100644 --- a/lib/std/io/counting_reader.zig +++ b/lib/std/io/counting_reader.zig @@ -9,7 +9,7 @@ pub fn CountingReader(comptime ReaderType: anytype) type { bytes_read: u64 = 0, pub const Error = ReaderType.Error; - pub const Reader = io.Reader(*@This(), Error, read); + pub const Reader = io.GenericReader(*@This(), Error, read); pub fn read(self: *@This(), buf: []u8) Error!usize { const amt = try self.child_reader.read(buf); diff --git a/lib/std/io/counting_writer.zig b/lib/std/io/counting_writer.zig index 9043e1a47c..32c3ed930f 100644 --- a/lib/std/io/counting_writer.zig +++ b/lib/std/io/counting_writer.zig @@ -9,7 +9,7 @@ pub fn CountingWriter(comptime WriterType: type) type { child_stream: WriterType, pub const Error = WriterType.Error; - pub const Writer = io.Writer(*Self, Error, write); + pub const Writer = io.GenericWriter(*Self, Error, write); const Self = @This(); diff --git a/lib/std/io/find_byte_writer.zig b/lib/std/io/find_byte_writer.zig index cb7efac2d9..fe6836f603 100644 --- a/lib/std/io/find_byte_writer.zig +++ b/lib/std/io/find_byte_writer.zig @@ -8,7 +8,7 @@ pub fn FindByteWriter(comptime UnderlyingWriter: type) type { return struct { const Self = @This(); pub const Error = UnderlyingWriter.Error; - pub const Writer = io.Writer(*Self, Error, write); + pub const Writer = io.GenericWriter(*Self, Error, write); underlying_writer: UnderlyingWriter, byte_found: bool, diff --git a/lib/std/io/fixed_buffer_stream.zig b/lib/std/io/fixed_buffer_stream.zig index bfc25eb6ac..67d6f3d286 100644 --- a/lib/std/io/fixed_buffer_stream.zig +++ b/lib/std/io/fixed_buffer_stream.zig @@ -4,8 +4,8 @@ const testing = std.testing; const mem = std.mem; const assert = std.debug.assert; -/// This turns a byte buffer into an `io.Writer`, `io.Reader`, or `io.SeekableStream`. -/// If the supplied byte buffer is const, then `io.Writer` is not available. +/// This turns a byte buffer into an `io.GenericWriter`, `io.GenericReader`, or `io.SeekableStream`. +/// If the supplied byte buffer is const, then `io.GenericWriter` is not available. pub fn FixedBufferStream(comptime Buffer: type) type { return struct { /// `Buffer` is either a `[]u8` or `[]const u8`. @@ -17,8 +17,8 @@ pub fn FixedBufferStream(comptime Buffer: type) type { pub const SeekError = error{}; pub const GetSeekPosError = error{}; - pub const Reader = io.Reader(*Self, ReadError, read); - pub const Writer = io.Writer(*Self, WriteError, write); + pub const Reader = io.GenericReader(*Self, ReadError, read); + pub const Writer = io.GenericWriter(*Self, WriteError, write); pub const SeekableStream = io.SeekableStream( *Self, diff --git a/lib/std/io/limited_reader.zig b/lib/std/io/limited_reader.zig index d7e2503881..b6b555f76d 100644 --- a/lib/std/io/limited_reader.zig +++ b/lib/std/io/limited_reader.zig @@ -9,7 +9,7 @@ pub fn LimitedReader(comptime ReaderType: type) type { bytes_left: u64, pub const Error = ReaderType.Error; - pub const Reader = io.Reader(*Self, Error, read); + pub const Reader = io.GenericReader(*Self, Error, read); const Self = @This(); diff --git a/lib/std/io/multi_writer.zig b/lib/std/io/multi_writer.zig index be109867b7..20e9e782de 100644 --- a/lib/std/io/multi_writer.zig +++ b/lib/std/io/multi_writer.zig @@ -15,7 +15,7 @@ pub fn MultiWriter(comptime Writers: type) type { streams: Writers, pub const Error = ErrSet; - pub const Writer = io.Writer(*Self, Error, write); + pub const Writer = io.GenericWriter(*Self, Error, write); pub fn writer(self: *Self) Writer { return .{ .context = self }; diff --git a/lib/std/io/stream_source.zig b/lib/std/io/stream_source.zig index 6e06af8204..2a3527e479 100644 --- a/lib/std/io/stream_source.zig +++ b/lib/std/io/stream_source.zig @@ -2,9 +2,9 @@ const std = @import("../std.zig"); const builtin = @import("builtin"); const io = std.io; -/// Provides `io.Reader`, `io.Writer`, and `io.SeekableStream` for in-memory buffers as +/// Provides `io.GenericReader`, `io.GenericWriter`, and `io.SeekableStream` for in-memory buffers as /// well as files. -/// For memory sources, if the supplied byte buffer is const, then `io.Writer` is not available. +/// For memory sources, if the supplied byte buffer is const, then `io.GenericWriter` is not available. /// The error set of the stream functions is the error set of the corresponding file functions. pub const StreamSource = union(enum) { // TODO: expose UEFI files to std.os in a way that allows this to be true @@ -26,8 +26,8 @@ pub const StreamSource = union(enum) { pub const SeekError = io.FixedBufferStream([]u8).SeekError || (if (has_file) std.fs.File.SeekError else error{}); pub const GetSeekPosError = io.FixedBufferStream([]u8).GetSeekPosError || (if (has_file) std.fs.File.GetSeekPosError else error{}); - pub const Reader = io.Reader(*StreamSource, ReadError, read); - pub const Writer = io.Writer(*StreamSource, WriteError, write); + pub const Reader = io.GenericReader(*StreamSource, ReadError, read); + pub const Writer = io.GenericWriter(*StreamSource, WriteError, write); pub const SeekableStream = io.SeekableStream( *StreamSource, SeekError, diff --git a/lib/std/io/test.zig b/lib/std/io/test.zig index 523b25c9c8..bf14f0c24c 100644 --- a/lib/std/io/test.zig +++ b/lib/std/io/test.zig @@ -24,7 +24,7 @@ test "write a file, read it, then delete it" { var file = try tmp.dir.createFile(tmp_file_name, .{}); defer file.close(); - var buf_stream = io.bufferedWriter(file.writer()); + var buf_stream = io.bufferedWriter(file.deprecatedWriter()); const st = buf_stream.writer(); try st.print("begin", .{}); try st.writeAll(data[0..]); @@ -45,7 +45,7 @@ test "write a file, read it, then delete it" { const expected_file_size: u64 = "begin".len + data.len + "end".len; try expectEqual(expected_file_size, file_size); - var buf_stream = io.bufferedReader(file.reader()); + var buf_stream = io.bufferedReader(file.deprecatedReader()); const st = buf_stream.reader(); const contents = try st.readAllAlloc(std.testing.allocator, 2 * 1024); defer std.testing.allocator.free(contents); @@ -66,7 +66,7 @@ test "BitStreams with File Stream" { var file = try tmp.dir.createFile(tmp_file_name, .{}); defer file.close(); - var bit_stream = io.bitWriter(native_endian, file.writer()); + var bit_stream = io.bitWriter(native_endian, file.deprecatedWriter()); try bit_stream.writeBits(@as(u2, 1), 1); try bit_stream.writeBits(@as(u5, 2), 2); @@ -80,7 +80,7 @@ test "BitStreams with File Stream" { var file = try tmp.dir.openFile(tmp_file_name, .{}); defer file.close(); - var bit_stream = io.bitReader(native_endian, file.reader()); + var bit_stream = io.bitReader(native_endian, file.deprecatedReader()); var out_bits: u16 = undefined; diff --git a/lib/std/io/tty.zig b/lib/std/io/tty.zig index c220e87880..fa17d9a16d 100644 --- a/lib/std/io/tty.zig +++ b/lib/std/io/tty.zig @@ -5,36 +5,9 @@ const process = std.process; const windows = std.os.windows; const native_os = builtin.os.tag; -/// Detect suitable TTY configuration options for the given file (commonly stdout/stderr). -/// This includes feature checks for ANSI escape codes and the Windows console API, as well as -/// respecting the `NO_COLOR` and `CLICOLOR_FORCE` environment variables to override the default. -/// Will attempt to enable ANSI escape code support if necessary/possible. +/// Deprecated in favor of `Config.detect`. pub fn detectConfig(file: File) Config { - const force_color: ?bool = if (builtin.os.tag == .wasi) - null // wasi does not support environment variables - else if (process.hasNonEmptyEnvVarConstant("NO_COLOR")) - false - else if (process.hasNonEmptyEnvVarConstant("CLICOLOR_FORCE")) - true - else - null; - - if (force_color == false) return .no_color; - - if (file.getOrEnableAnsiEscapeSupport()) return .escape_codes; - - if (native_os == .windows and file.isTty()) { - var info: windows.CONSOLE_SCREEN_BUFFER_INFO = undefined; - if (windows.kernel32.GetConsoleScreenBufferInfo(file.handle, &info) == windows.FALSE) { - return if (force_color == true) .escape_codes else .no_color; - } - return .{ .windows_api = .{ - .handle = file.handle, - .reset_attributes = info.wAttributes, - } }; - } - - return if (force_color == true) .escape_codes else .no_color; + return .detect(file); } pub const Color = enum { @@ -66,17 +39,46 @@ pub const Config = union(enum) { escape_codes, windows_api: if (native_os == .windows) WindowsContext else void, + /// Detect suitable TTY configuration options for the given file (commonly stdout/stderr). + /// This includes feature checks for ANSI escape codes and the Windows console API, as well as + /// respecting the `NO_COLOR` and `CLICOLOR_FORCE` environment variables to override the default. + /// Will attempt to enable ANSI escape code support if necessary/possible. + pub fn detect(file: File) Config { + const force_color: ?bool = if (builtin.os.tag == .wasi) + null // wasi does not support environment variables + else if (process.hasNonEmptyEnvVarConstant("NO_COLOR")) + false + else if (process.hasNonEmptyEnvVarConstant("CLICOLOR_FORCE")) + true + else + null; + + if (force_color == false) return .no_color; + + if (file.getOrEnableAnsiEscapeSupport()) return .escape_codes; + + if (native_os == .windows and file.isTty()) { + var info: windows.CONSOLE_SCREEN_BUFFER_INFO = undefined; + if (windows.kernel32.GetConsoleScreenBufferInfo(file.handle, &info) == windows.FALSE) { + return if (force_color == true) .escape_codes else .no_color; + } + return .{ .windows_api = .{ + .handle = file.handle, + .reset_attributes = info.wAttributes, + } }; + } + + return if (force_color == true) .escape_codes else .no_color; + } + pub const WindowsContext = struct { handle: File.Handle, reset_attributes: u16, }; - pub fn setColor( - conf: Config, - writer: anytype, - color: Color, - ) (@typeInfo(@TypeOf(writer.writeAll(""))).error_union.error_set || - windows.SetConsoleTextAttributeError)!void { + pub const SetColorError = std.os.windows.SetConsoleTextAttributeError || std.io.Writer.Error; + + pub fn setColor(conf: Config, w: *std.io.Writer, color: Color) SetColorError!void { nosuspend switch (conf) { .no_color => return, .escape_codes => { @@ -101,7 +103,7 @@ pub const Config = union(enum) { .dim => "\x1b[2m", .reset => "\x1b[0m", }; - try writer.writeAll(color_string); + try w.writeAll(color_string); }, .windows_api => |ctx| if (native_os == .windows) { const attributes = switch (color) { @@ -126,6 +128,7 @@ pub const Config = union(enum) { .dim => windows.FOREGROUND_INTENSITY, .reset => ctx.reset_attributes, }; + try w.flush(); try windows.SetConsoleTextAttribute(ctx.handle, attributes); } else { unreachable; diff --git a/lib/std/json.zig b/lib/std/json.zig index d977c34577..c0fb064c6a 100644 --- a/lib/std/json.zig +++ b/lib/std/json.zig @@ -1,12 +1,12 @@ //! JSON parsing and stringification conforming to RFC 8259. https://datatracker.ietf.org/doc/html/rfc8259 //! //! The low-level `Scanner` API produces `Token`s from an input slice or successive slices of inputs, -//! The `Reader` API connects a `std.io.Reader` to a `Scanner`. +//! The `Reader` API connects a `std.io.GenericReader` to a `Scanner`. //! //! The high-level `parseFromSlice` and `parseFromTokenSource` deserialize a JSON document into a Zig type. //! Parse into a dynamically-typed `Value` to load any JSON value for runtime inspection. //! -//! The low-level `writeStream` emits syntax-conformant JSON tokens to a `std.io.Writer`. +//! The low-level `writeStream` emits syntax-conformant JSON tokens to a `std.io.GenericWriter`. //! The high-level `stringify` serializes a Zig or `Value` type into JSON. const builtin = @import("builtin"); diff --git a/lib/std/json/dynamic.zig b/lib/std/json/dynamic.zig index a5aa80a888..4d24444390 100644 --- a/lib/std/json/dynamic.zig +++ b/lib/std/json/dynamic.zig @@ -56,7 +56,7 @@ pub const Value = union(enum) { std.debug.lockStdErr(); defer std.debug.unlockStdErr(); - const stderr = std.io.getStdErr().writer(); + const stderr = std.fs.File.stderr().deprecatedWriter(); stringify(self, .{}, stderr) catch return; } diff --git a/lib/std/json/dynamic_test.zig b/lib/std/json/dynamic_test.zig index 45cdc0d0c7..1362e3cfad 100644 --- a/lib/std/json/dynamic_test.zig +++ b/lib/std/json/dynamic_test.zig @@ -254,7 +254,7 @@ test "Value.jsonStringify" { \\ true, \\ 42, \\ 43, - \\ 4.2e1, + \\ 42, \\ "weeee", \\ [ \\ 1, @@ -266,7 +266,7 @@ test "Value.jsonStringify" { \\ } \\] ; - try testing.expectEqualSlices(u8, expected, fbs.getWritten()); + try testing.expectEqualStrings(expected, fbs.getWritten()); } test "parseFromValue(std.json.Value,...)" { diff --git a/lib/std/json/fmt.zig b/lib/std/json/fmt.zig index 09f58b3be6..c526dce1da 100644 --- a/lib/std/json/fmt.zig +++ b/lib/std/json/fmt.zig @@ -1,4 +1,5 @@ -const std = @import("std"); +const std = @import("../std.zig"); +const assert = std.debug.assert; const stringify = @import("stringify.zig").stringify; const StringifyOptions = @import("stringify.zig").StringifyOptions; @@ -14,14 +15,7 @@ pub fn Formatter(comptime T: type) type { value: T, options: StringifyOptions, - pub fn format( - self: @This(), - comptime fmt_spec: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = fmt_spec; - _ = options; + pub fn format(self: @This(), writer: *std.io.Writer) std.io.Writer.Error!void { try stringify(self.value, self.options, writer); } }; diff --git a/lib/std/json/scanner.zig b/lib/std/json/scanner.zig index 85a058af38..1836d6775b 100644 --- a/lib/std/json/scanner.zig +++ b/lib/std/json/scanner.zig @@ -219,7 +219,7 @@ pub const AllocWhen = enum { alloc_if_needed, alloc_always }; /// This limit can be specified by calling `nextAllocMax()` instead of `nextAlloc()`. pub const default_max_value_len = 4 * 1024 * 1024; -/// Connects a `std.io.Reader` to a `std.json.Scanner`. +/// Connects a `std.io.GenericReader` to a `std.json.Scanner`. /// All `next*()` methods here handle `error.BufferUnderrun` from `std.json.Scanner`, and then read from the reader. pub fn Reader(comptime buffer_size: usize, comptime ReaderType: type) type { return struct { diff --git a/lib/std/json/stringify.zig b/lib/std/json/stringify.zig index db2ba85318..aa49573695 100644 --- a/lib/std/json/stringify.zig +++ b/lib/std/json/stringify.zig @@ -38,7 +38,7 @@ pub const StringifyOptions = struct { emit_nonportable_numbers_as_strings: bool = false, }; -/// Writes the given value to the `std.io.Writer` stream. +/// Writes the given value to the `std.io.GenericWriter` stream. /// See `WriteStream` for how the given value is serialized into JSON. /// The maximum nesting depth of the output JSON document is 256. /// See also `stringifyMaxDepth` and `stringifyArbitraryDepth`. @@ -81,7 +81,7 @@ pub fn stringifyArbitraryDepth( } /// Calls `stringifyArbitraryDepth` and stores the result in dynamically allocated memory -/// instead of taking a `std.io.Writer`. +/// instead of taking a `std.io.GenericWriter`. /// /// Caller owns returned memory. pub fn stringifyAlloc( @@ -469,7 +469,6 @@ pub fn WriteStream( /// * When option `emit_nonportable_numbers_as_strings` is true, if the value is outside the range `+-1<<53` (the precise integer range of f64), it is rendered as a JSON string in base 10. Otherwise, it is rendered as JSON number. /// * Zig floats -> JSON number or string. /// * If the value cannot be precisely represented by an f64, it is rendered as a JSON string. Otherwise, it is rendered as JSON number. - /// * TODO: Float rendering will likely change in the future, e.g. to remove the unnecessary "e+00". /// * Zig `[]const u8`, `[]u8`, `*[N]u8`, `@Vector(N, u8)`, and similar -> JSON string. /// * See `StringifyOptions.emit_strings_as_arrays`. /// * If the content is not valid UTF-8, rendered as an array of numbers instead. @@ -689,7 +688,8 @@ fn outputUnicodeEscape(codepoint: u21, out_stream: anytype) !void { // then it may be represented as a six-character sequence: a reverse solidus, followed // by the lowercase letter u, followed by four hexadecimal digits that encode the character's code point. try out_stream.writeAll("\\u"); - try std.fmt.formatIntValue(codepoint, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream); + //try w.printInt("x", .{ .width = 4, .fill = '0' }, codepoint); + try std.fmt.format(out_stream, "{x:0>4}", .{codepoint}); } else { assert(codepoint <= 0x10FFFF); // To escape an extended character that is not in the Basic Multilingual Plane, @@ -697,9 +697,11 @@ fn outputUnicodeEscape(codepoint: u21, out_stream: anytype) !void { const high = @as(u16, @intCast((codepoint - 0x10000) >> 10)) + 0xD800; const low = @as(u16, @intCast(codepoint & 0x3FF)) + 0xDC00; try out_stream.writeAll("\\u"); - try std.fmt.formatIntValue(high, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream); + //try w.printInt("x", .{ .width = 4, .fill = '0' }, high); + try std.fmt.format(out_stream, "{x:0>4}", .{high}); try out_stream.writeAll("\\u"); - try std.fmt.formatIntValue(low, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream); + //try w.printInt("x", .{ .width = 4, .fill = '0' }, low); + try std.fmt.format(out_stream, "{x:0>4}", .{low}); } } diff --git a/lib/std/json/stringify_test.zig b/lib/std/json/stringify_test.zig index 52e38d1e30..22dd504285 100644 --- a/lib/std/json/stringify_test.zig +++ b/lib/std/json/stringify_test.zig @@ -74,16 +74,16 @@ fn testBasicWriteStream(w: anytype, slice_stream: anytype) !void { \\{ \\ "object": { \\ "one": 1, - \\ "two": 2e0 + \\ "two": 2 \\ }, \\ "string": "This is a string", \\ "array": [ \\ "Another string", \\ 1, - \\ 3.5e0 + \\ 3.5 \\ ], \\ "int": 10, - \\ "float": 3.5e0 + \\ "float": 3.5 \\} ; try std.testing.expectEqualStrings(expected, result); @@ -123,12 +123,12 @@ test "stringify basic types" { try testStringify("null", @as(?u8, null), .{}); try testStringify("null", @as(?*u32, null), .{}); try testStringify("42", 42, .{}); - try testStringify("4.2e1", 42.0, .{}); + try testStringify("42", 42.0, .{}); try testStringify("42", @as(u8, 42), .{}); try testStringify("42", @as(u128, 42), .{}); try testStringify("9999999999999999", 9999999999999999, .{}); - try testStringify("4.2e1", @as(f32, 42), .{}); - try testStringify("4.2e1", @as(f64, 42), .{}); + try testStringify("42", @as(f32, 42), .{}); + try testStringify("42", @as(f64, 42), .{}); try testStringify("\"ItBroke\"", @as(anyerror, error.ItBroke), .{}); try testStringify("\"ItBroke\"", error.ItBroke, .{}); } @@ -307,7 +307,7 @@ test "stringify tuple" { fn testStringify(expected: []const u8, value: anytype, options: StringifyOptions) !void { const ValidationWriter = struct { const Self = @This(); - pub const Writer = std.io.Writer(*Self, Error, write); + pub const Writer = std.io.GenericWriter(*Self, Error, write); pub const Error = error{ TooMuchData, DifferentData, diff --git a/lib/std/log.zig b/lib/std/log.zig index 3479766678..4cbb73e6a4 100644 --- a/lib/std/log.zig +++ b/lib/std/log.zig @@ -47,7 +47,7 @@ //! // Print the message to stderr, silently ignoring any errors //! std.debug.lockStdErr(); //! defer std.debug.unlockStdErr(); -//! const stderr = std.io.getStdErr().writer(); +//! const stderr = std.fs.File.stderr().deprecatedWriter(); //! nosuspend stderr.print(prefix ++ format ++ "\n", args) catch return; //! } //! @@ -148,7 +148,7 @@ pub fn defaultLog( ) void { const level_txt = comptime message_level.asText(); const prefix2 = if (scope == .default) ": " else "(" ++ @tagName(scope) ++ "): "; - const stderr = std.io.getStdErr().writer(); + const stderr = std.fs.File.stderr().deprecatedWriter(); var bw = std.io.bufferedWriter(stderr); const writer = bw.writer(); diff --git a/lib/std/math/big/int.zig b/lib/std/math/big/int.zig index 552ded4d51..33938dbd42 100644 --- a/lib/std/math/big/int.zig +++ b/lib/std/math/big/int.zig @@ -2028,6 +2028,14 @@ pub const Mutable = struct { pub fn normalize(r: *Mutable, length: usize) void { r.len = llnormalize(r.limbs[0..length]); } + + pub fn format(self: Mutable, w: *std.io.Writer) std.io.Writer.Error!void { + return formatNumber(self, w, .{}); + } + + pub fn formatNumber(self: Const, w: *std.io.Writer, n: std.fmt.Number) std.io.Writer.Error!void { + return self.toConst().formatNumber(w, n); + } }; /// A arbitrary-precision big integer, with a fixed set of immutable limbs. @@ -2317,50 +2325,25 @@ pub const Const = struct { return .{ normalized_res.reconstruct(if (self.positive) .positive else .negative), exactness }; } - /// To allow `std.fmt.format` to work with this type. /// If the absolute value of integer is greater than or equal to `pow(2, 64 * @sizeOf(usize) * 8)`, /// this function will fail to print the string, printing "(BigInt)" instead of a number. /// This is because the rendering algorithm requires reversing a string, which requires O(N) memory. /// See `toString` and `toStringAlloc` for a way to print big integers without failure. - pub fn format( - self: Const, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - out_stream: anytype, - ) !void { - _ = options; - comptime var base = 10; - comptime var case: std.fmt.Case = .lower; - - if (fmt.len == 0 or comptime mem.eql(u8, fmt, "d")) { - base = 10; - case = .lower; - } else if (comptime mem.eql(u8, fmt, "b")) { - base = 2; - case = .lower; - } else if (comptime mem.eql(u8, fmt, "x")) { - base = 16; - case = .lower; - } else if (comptime mem.eql(u8, fmt, "X")) { - base = 16; - case = .upper; - } else { - std.fmt.invalidFmtError(fmt, self); - } - + pub fn formatNumber(self: Const, w: *std.io.Writer, number: std.fmt.Number) std.io.Writer.Error!void { const available_len = 64; if (self.limbs.len > available_len) - return out_stream.writeAll("(BigInt)"); + return w.writeAll("(BigInt)"); - var limbs: [calcToStringLimbsBufferLen(available_len, base)]Limb = undefined; + var limbs: [calcToStringLimbsBufferLen(available_len, 10)]Limb = undefined; const biggest: Const = .{ .limbs = &([1]Limb{comptime math.maxInt(Limb)} ** available_len), .positive = false, }; - var buf: [biggest.sizeInBaseUpperBound(base)]u8 = undefined; - const len = self.toString(&buf, base, case, &limbs); - return out_stream.writeAll(buf[0..len]); + var buf: [biggest.sizeInBaseUpperBound(2)]u8 = undefined; + const base: u8 = number.mode.base() orelse @panic("TODO print big int in scientific form"); + const len = self.toString(&buf, base, number.case, &limbs); + return w.writeAll(buf[0..len]); } /// Converts self to a string in the requested base. @@ -2930,17 +2913,16 @@ pub const Managed = struct { } /// To allow `std.fmt.format` to work with `Managed`. + pub fn format(self: Managed, w: *std.io.Writer) std.io.Writer.Error!void { + return formatNumber(self, w, .{}); + } + /// If the absolute value of integer is greater than or equal to `pow(2, 64 * @sizeOf(usize) * 8)`, /// this function will fail to print the string, printing "(BigInt)" instead of a number. /// This is because the rendering algorithm requires reversing a string, which requires O(N) memory. /// See `toString` and `toStringAlloc` for a way to print big integers without failure. - pub fn format( - self: Managed, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - out_stream: anytype, - ) !void { - return self.toConst().format(fmt, options, out_stream); + pub fn formatNumber(self: Managed, w: *std.io.Writer, n: std.fmt.Number) std.io.Writer.Error!void { + return self.toConst().formatNumber(w, n); } /// Returns math.Order.lt, math.Order.eq, math.Order.gt if |a| < |b|, |a| == diff --git a/lib/std/math/big/int_test.zig b/lib/std/math/big/int_test.zig index 5a0fda52b2..3683eb2bcd 100644 --- a/lib/std/math/big/int_test.zig +++ b/lib/std/math/big/int_test.zig @@ -3813,13 +3813,10 @@ test "(BigInt) positive" { try a.pow(&a, 64 * @sizeOf(Limb) * 8); try b.sub(&a, &c); - const a_fmt = try std.fmt.allocPrintZ(testing.allocator, "{d}", .{a}); - defer testing.allocator.free(a_fmt); + try testing.expectFmt("(BigInt)", "{d}", .{a}); - const b_fmt = try std.fmt.allocPrintZ(testing.allocator, "{d}", .{b}); + const b_fmt = try std.fmt.allocPrint(testing.allocator, "{d}", .{b}); defer testing.allocator.free(b_fmt); - - try testing.expect(mem.eql(u8, a_fmt, "(BigInt)")); try testing.expect(!mem.eql(u8, b_fmt, "(BigInt)")); } @@ -3838,10 +3835,10 @@ test "(BigInt) negative" { a.negate(); try b.add(&a, &c); - const a_fmt = try std.fmt.allocPrintZ(testing.allocator, "{d}", .{a}); + const a_fmt = try std.fmt.allocPrint(testing.allocator, "{d}", .{a}); defer testing.allocator.free(a_fmt); - const b_fmt = try std.fmt.allocPrintZ(testing.allocator, "{d}", .{b}); + const b_fmt = try std.fmt.allocPrint(testing.allocator, "{d}", .{b}); defer testing.allocator.free(b_fmt); try testing.expect(mem.eql(u8, a_fmt, "(BigInt)")); diff --git a/lib/std/mem.zig b/lib/std/mem.zig index 59c26cc887..a5613536c6 100644 --- a/lib/std/mem.zig +++ b/lib/std/mem.zig @@ -1714,7 +1714,7 @@ pub fn readVarInt(comptime ReturnType: type, bytes: []const u8, endian: Endian) } }, } - return @as(ReturnType, @truncate(result)); + return @truncate(result); } test readVarInt { @@ -2196,7 +2196,9 @@ pub fn byteSwapAllFields(comptime S: type, ptr: *S) void { } } }, - else => @compileError("byteSwapAllFields expects a struct or array as the first argument"), + else => { + ptr.* = @byteSwap(ptr.*); + }, } } diff --git a/lib/std/multi_array_list.zig b/lib/std/multi_array_list.zig index 279a150799..160a9f2fba 100644 --- a/lib/std/multi_array_list.zig +++ b/lib/std/multi_array_list.zig @@ -991,6 +991,7 @@ test "0 sized struct" { test "struct with many fields" { const ManyFields = struct { fn Type(count: comptime_int) type { + @setEvalBranchQuota(50000); var fields: [count]std.builtin.Type.StructField = undefined; for (0..count) |i| { fields[i] = .{ diff --git a/lib/std/net.zig b/lib/std/net.zig index 9d821c4399..36d6062ef8 100644 --- a/lib/std/net.zig +++ b/lib/std/net.zig @@ -161,22 +161,13 @@ pub const Address = extern union { } } - pub fn format( - self: Address, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - out_stream: anytype, - ) !void { - if (fmt.len != 0) std.fmt.invalidFmtError(fmt, self); + pub fn format(self: Address, w: *std.io.Writer) std.io.Writer.Error!void { switch (self.any.family) { - posix.AF.INET => try self.in.format(fmt, options, out_stream), - posix.AF.INET6 => try self.in6.format(fmt, options, out_stream), + posix.AF.INET => try self.in.format(w), + posix.AF.INET6 => try self.in6.format(w), posix.AF.UNIX => { - if (!has_unix_sockets) { - unreachable; - } - - try std.fmt.format(out_stream, "{s}", .{std.mem.sliceTo(&self.un.path, 0)}); + if (!has_unix_sockets) unreachable; + try w.writeAll(std.mem.sliceTo(&self.un.path, 0)); }, else => unreachable, } @@ -349,22 +340,9 @@ pub const Ip4Address = extern struct { self.sa.port = mem.nativeToBig(u16, port); } - pub fn format( - self: Ip4Address, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - out_stream: anytype, - ) !void { - if (fmt.len != 0) std.fmt.invalidFmtError(fmt, self); - _ = options; - const bytes = @as(*const [4]u8, @ptrCast(&self.sa.addr)); - try std.fmt.format(out_stream, "{}.{}.{}.{}:{}", .{ - bytes[0], - bytes[1], - bytes[2], - bytes[3], - self.getPort(), - }); + pub fn format(self: Ip4Address, w: *std.io.Writer) std.io.Writer.Error!void { + const bytes: *const [4]u8 = @ptrCast(&self.sa.addr); + try w.print("{d}.{d}.{d}.{d}:{d}", .{ bytes[0], bytes[1], bytes[2], bytes[3], self.getPort() }); } pub fn getOsSockLen(self: Ip4Address) posix.socklen_t { @@ -653,17 +631,10 @@ pub const Ip6Address = extern struct { self.sa.port = mem.nativeToBig(u16, port); } - pub fn format( - self: Ip6Address, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - out_stream: anytype, - ) !void { - if (fmt.len != 0) std.fmt.invalidFmtError(fmt, self); - _ = options; + pub fn format(self: Ip6Address, w: *std.io.Writer) std.io.Writer.Error!void { const port = mem.bigToNative(u16, self.sa.port); if (mem.eql(u8, self.sa.addr[0..12], &[_]u8{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff })) { - try std.fmt.format(out_stream, "[::ffff:{}.{}.{}.{}]:{}", .{ + try w.print("[::ffff:{d}.{d}.{d}.{d}]:{d}", .{ self.sa.addr[12], self.sa.addr[13], self.sa.addr[14], @@ -711,14 +682,14 @@ pub const Ip6Address = extern struct { longest_len = 0; } - try out_stream.writeAll("["); + try w.writeAll("["); var i: usize = 0; var abbrv = false; while (i < native_endian_parts.len) : (i += 1) { if (i == longest_start) { // Emit "::" for the longest zero run if (!abbrv) { - try out_stream.writeAll(if (i == 0) "::" else ":"); + try w.writeAll(if (i == 0) "::" else ":"); abbrv = true; } i += longest_len - 1; // Skip the compressed range @@ -727,12 +698,12 @@ pub const Ip6Address = extern struct { if (abbrv) { abbrv = false; } - try std.fmt.format(out_stream, "{x}", .{native_endian_parts[i]}); + try w.print("{x}", .{native_endian_parts[i]}); if (i != native_endian_parts.len - 1) { - try out_stream.writeAll(":"); + try w.writeAll(":"); } } - try std.fmt.format(out_stream, "]:{}", .{port}); + try w.print("]:{}", .{port}); } pub fn getOsSockLen(self: Ip6Address) posix.socklen_t { @@ -894,7 +865,7 @@ pub fn getAddressList(allocator: mem.Allocator, name: []const u8, port: u16) Get const name_c = try allocator.dupeZ(u8, name); defer allocator.free(name_c); - const port_c = try std.fmt.allocPrintZ(allocator, "{}", .{port}); + const port_c = try std.fmt.allocPrintSentinel(allocator, "{}", .{port}, 0); defer allocator.free(port_c); const ws2_32 = windows.ws2_32; @@ -966,7 +937,7 @@ pub fn getAddressList(allocator: mem.Allocator, name: []const u8, port: u16) Get const name_c = try allocator.dupeZ(u8, name); defer allocator.free(name_c); - const port_c = try std.fmt.allocPrintZ(allocator, "{}", .{port}); + const port_c = try std.fmt.allocPrintSentinel(allocator, "{}", .{port}, 0); defer allocator.free(port_c); const hints: posix.addrinfo = .{ @@ -1356,7 +1327,7 @@ fn linuxLookupNameFromHosts( }; defer file.close(); - var buffered_reader = std.io.bufferedReader(file.reader()); + var buffered_reader = std.io.bufferedReader(file.deprecatedReader()); const reader = buffered_reader.reader(); var line_buf: [512]u8 = undefined; while (reader.readUntilDelimiterOrEof(&line_buf, '\n') catch |err| switch (err) { @@ -1557,7 +1528,7 @@ fn getResolvConf(allocator: mem.Allocator, rc: *ResolvConf) !void { }; defer file.close(); - var buf_reader = std.io.bufferedReader(file.reader()); + var buf_reader = std.io.bufferedReader(file.deprecatedReader()); const stream = buf_reader.reader(); var line_buf: [512]u8 = undefined; while (stream.readUntilDelimiterOrEof(&line_buf, '\n') catch |err| switch (err) { @@ -1845,8 +1816,8 @@ pub const Stream = struct { pub const ReadError = posix.ReadError; pub const WriteError = posix.WriteError; - pub const Reader = io.Reader(Stream, ReadError, read); - pub const Writer = io.Writer(Stream, WriteError, write); + pub const Reader = io.GenericReader(Stream, ReadError, read); + pub const Writer = io.GenericWriter(Stream, WriteError, write); pub fn reader(self: Stream) Reader { return .{ .context = self }; diff --git a/lib/std/net/test.zig b/lib/std/net/test.zig index 88a18094ac..02c927d566 100644 --- a/lib/std/net/test.zig +++ b/lib/std/net/test.zig @@ -5,20 +5,13 @@ const mem = std.mem; const testing = std.testing; test "parse and render IP addresses at comptime" { - if (builtin.os.tag == .wasi) return error.SkipZigTest; comptime { - var ipAddrBuffer: [16]u8 = undefined; - // Parses IPv6 at comptime const ipv6addr = net.Address.parseIp("::1", 0) catch unreachable; - var ipv6 = std.fmt.bufPrint(ipAddrBuffer[0..], "{}", .{ipv6addr}) catch unreachable; - try std.testing.expect(std.mem.eql(u8, "::1", ipv6[1 .. ipv6.len - 3])); + try std.testing.expectFmt("[::1]:0", "{f}", .{ipv6addr}); - // Parses IPv4 at comptime const ipv4addr = net.Address.parseIp("127.0.0.1", 0) catch unreachable; - var ipv4 = std.fmt.bufPrint(ipAddrBuffer[0..], "{}", .{ipv4addr}) catch unreachable; - try std.testing.expect(std.mem.eql(u8, "127.0.0.1", ipv4[0 .. ipv4.len - 2])); + try std.testing.expectFmt("127.0.0.1:0", "{f}", .{ipv4addr}); - // Returns error for invalid IP addresses at comptime try testing.expectError(error.InvalidIPAddressFormat, net.Address.parseIp("::123.123.123.123", 0)); try testing.expectError(error.InvalidIPAddressFormat, net.Address.parseIp("127.01.0.1", 0)); try testing.expectError(error.InvalidIPAddressFormat, net.Address.resolveIp("::123.123.123.123", 0)); @@ -27,47 +20,23 @@ test "parse and render IP addresses at comptime" { } test "format IPv6 address with no zero runs" { - if (builtin.os.tag == .wasi) return error.SkipZigTest; - const addr = try std.net.Address.parseIp6("2001:db8:1:2:3:4:5:6", 0); - - var buffer: [50]u8 = undefined; - const result = std.fmt.bufPrint(buffer[0..], "{}", .{addr}) catch unreachable; - - try std.testing.expectEqualStrings("[2001:db8:1:2:3:4:5:6]:0", result); + try std.testing.expectFmt("[2001:db8:1:2:3:4:5:6]:0", "{f}", .{addr}); } test "parse IPv6 addresses and check compressed form" { - if (builtin.os.tag == .wasi) return error.SkipZigTest; - - const alloc = testing.allocator; - - // 1) Parse an IPv6 address that should compress to [2001:db8::1:0:0:2]:0 - const addr1 = try std.net.Address.parseIp6("2001:0db8:0000:0000:0001:0000:0000:0002", 0); - - // 2) Parse an IPv6 address that should compress to [2001:db8::1:2]:0 - const addr2 = try std.net.Address.parseIp6("2001:0db8:0000:0000:0000:0000:0001:0002", 0); - - // 3) Parse an IPv6 address that should compress to [2001:db8:1:0:1::2]:0 - const addr3 = try std.net.Address.parseIp6("2001:0db8:0001:0000:0001:0000:0000:0002", 0); - - // Print each address in Zig's default "[ipv6]:port" form. - const printed1 = try std.fmt.allocPrint(alloc, "{any}", .{addr1}); - defer testing.allocator.free(printed1); - const printed2 = try std.fmt.allocPrint(alloc, "{any}", .{addr2}); - defer testing.allocator.free(printed2); - const printed3 = try std.fmt.allocPrint(alloc, "{any}", .{addr3}); - defer testing.allocator.free(printed3); - - // Check the exact compressed forms we expect. - try std.testing.expectEqualStrings("[2001:db8::1:0:0:2]:0", printed1); - try std.testing.expectEqualStrings("[2001:db8::1:2]:0", printed2); - try std.testing.expectEqualStrings("[2001:db8:1:0:1::2]:0", printed3); + try std.testing.expectFmt("[2001:db8::1:0:0:2]:0", "{f}", .{ + try std.net.Address.parseIp6("2001:0db8:0000:0000:0001:0000:0000:0002", 0), + }); + try std.testing.expectFmt("[2001:db8::1:2]:0", "{f}", .{ + try std.net.Address.parseIp6("2001:0db8:0000:0000:0000:0000:0001:0002", 0), + }); + try std.testing.expectFmt("[2001:db8:1:0:1::2]:0", "{f}", .{ + try std.net.Address.parseIp6("2001:0db8:0001:0000:0001:0000:0000:0002", 0), + }); } test "parse IPv6 address, check raw bytes" { - if (builtin.os.tag == .wasi) return error.SkipZigTest; - const expected_raw: [16]u8 = .{ 0x20, 0x01, 0x0d, 0xb8, // 2001:db8 0x00, 0x00, 0x00, 0x00, // :0000:0000 @@ -82,8 +51,6 @@ test "parse IPv6 address, check raw bytes" { } test "parse and render IPv6 addresses" { - if (builtin.os.tag == .wasi) return error.SkipZigTest; - var buffer: [100]u8 = undefined; const ips = [_][]const u8{ "FF01:0:0:0:0:0:0:FB", @@ -111,12 +78,12 @@ test "parse and render IPv6 addresses" { }; for (ips, 0..) |ip, i| { const addr = net.Address.parseIp6(ip, 0) catch unreachable; - var newIp = std.fmt.bufPrint(buffer[0..], "{}", .{addr}) catch unreachable; + var newIp = std.fmt.bufPrint(buffer[0..], "{f}", .{addr}) catch unreachable; try std.testing.expect(std.mem.eql(u8, printed[i], newIp[1 .. newIp.len - 3])); if (builtin.os.tag == .linux) { const addr_via_resolve = net.Address.resolveIp6(ip, 0) catch unreachable; - var newResolvedIp = std.fmt.bufPrint(buffer[0..], "{}", .{addr_via_resolve}) catch unreachable; + var newResolvedIp = std.fmt.bufPrint(buffer[0..], "{f}", .{addr_via_resolve}) catch unreachable; try std.testing.expect(std.mem.eql(u8, printed[i], newResolvedIp[1 .. newResolvedIp.len - 3])); } } @@ -148,8 +115,6 @@ test "invalid but parseable IPv6 scope ids" { } test "parse and render IPv4 addresses" { - if (builtin.os.tag == .wasi) return error.SkipZigTest; - var buffer: [18]u8 = undefined; for ([_][]const u8{ "0.0.0.0", @@ -159,7 +124,7 @@ test "parse and render IPv4 addresses" { "127.0.0.1", }) |ip| { const addr = net.Address.parseIp4(ip, 0) catch unreachable; - var newIp = std.fmt.bufPrint(buffer[0..], "{}", .{addr}) catch unreachable; + var newIp = std.fmt.bufPrint(buffer[0..], "{f}", .{addr}) catch unreachable; try std.testing.expect(std.mem.eql(u8, ip, newIp[0 .. newIp.len - 2])); } @@ -175,10 +140,8 @@ test "parse and render UNIX addresses" { if (builtin.os.tag == .wasi) return error.SkipZigTest; if (!net.has_unix_sockets) return error.SkipZigTest; - var buffer: [14]u8 = undefined; const addr = net.Address.initUnix("/tmp/testpath") catch unreachable; - const fmt_addr = std.fmt.bufPrint(buffer[0..], "{}", .{addr}) catch unreachable; - try std.testing.expectEqualSlices(u8, "/tmp/testpath", fmt_addr); + try std.testing.expectFmt("/tmp/testpath", "{f}", .{addr}); const too_long = [_]u8{'a'} ** 200; try testing.expectError(error.NameTooLong, net.Address.initUnix(too_long[0..])); diff --git a/lib/std/os.zig b/lib/std/os.zig index 3cf0c745cc..7de672191a 100644 --- a/lib/std/os.zig +++ b/lib/std/os.zig @@ -31,6 +31,7 @@ pub const uefi = @import("os/uefi.zig"); pub const wasi = @import("os/wasi.zig"); pub const emscripten = @import("os/emscripten.zig"); pub const windows = @import("os/windows.zig"); +pub const freebsd = @import("os/freebsd.zig"); test { _ = linux; diff --git a/lib/std/os/freebsd.zig b/lib/std/os/freebsd.zig new file mode 100644 index 0000000000..4c68405c22 --- /dev/null +++ b/lib/std/os/freebsd.zig @@ -0,0 +1,49 @@ +const std = @import("../std.zig"); +const fd_t = std.c.fd_t; +const off_t = std.c.off_t; +const unexpectedErrno = std.posix.unexpectedErrno; +const errno = std.posix.errno; + +pub const CopyFileRangeError = std.posix.UnexpectedError || error{ + /// If infd is not open for reading or outfd is not open for writing, or + /// opened for writing with O_APPEND, or if infd and outfd refer to the + /// same file. + BadFileFlags, + /// If the copy exceeds the process's file size limit or the maximum + /// file size for the file system outfd re- sides on. + FileTooBig, + /// A signal interrupted the system call before it could be completed. + /// This may happen for files on some NFS mounts. When this happens, + /// the values pointed to by inoffp and outoffp are reset to the + /// initial values for the system call. + Interrupted, + /// One of: + /// * infd and outfd refer to the same file and the byte ranges overlap. + /// * The flags argument is not zero. + /// * Either infd or outfd refers to a file object that is not a regular file. + InvalidArguments, + /// An I/O error occurred while reading/writing the files. + InputOutput, + /// Corrupted data was detected while reading from a file system. + CorruptedData, + /// Either infd or outfd refers to a directory. + IsDir, + /// File system that stores outfd is full. + NoSpaceLeft, +}; + +pub fn copy_file_range(fd_in: fd_t, off_in: ?*i64, fd_out: fd_t, off_out: ?*i64, len: usize, flags: u32) CopyFileRangeError!usize { + const rc = std.c.copy_file_range(fd_in, off_in, fd_out, off_out, len, flags); + switch (errno(rc)) { + .SUCCESS => return @intCast(rc), + .BADF => return error.BadFileFlags, + .FBIG => return error.FileTooBig, + .INTR => return error.Interrupted, + .INVAL => return error.InvalidArguments, + .IO => return error.InputOutput, + .INTEGRITY => return error.CorruptedData, + .ISDIR => return error.IsDir, + .NOSPC => return error.NoSpaceLeft, + else => |err| return unexpectedErrno(err), + } +} diff --git a/lib/std/os/linux.zig b/lib/std/os/linux.zig index f69fc8f348..75494145b9 100644 --- a/lib/std/os/linux.zig +++ b/lib/std/os/linux.zig @@ -103,8 +103,6 @@ pub const dev_t = arch_bits.dev_t; pub const ino_t = arch_bits.ino_t; pub const mcontext_t = arch_bits.mcontext_t; pub const mode_t = arch_bits.mode_t; -pub const msghdr = arch_bits.msghdr; -pub const msghdr_const = arch_bits.msghdr_const; pub const nlink_t = arch_bits.nlink_t; pub const off_t = arch_bits.off_t; pub const time_t = arch_bits.time_t; @@ -9403,3 +9401,151 @@ pub const SHADOW_STACK = struct { /// Set up a restore token in the shadow stack. pub const SET_TOKEN: u64 = 1 << 0; }; + +pub const msghdr = extern struct { + name: ?*sockaddr, + namelen: socklen_t, + iov: [*]iovec, + iovlen: usize, + control: ?*anyopaque, + controllen: usize, + flags: u32, +}; + +pub const msghdr_const = extern struct { + name: ?*const sockaddr, + namelen: socklen_t, + iov: [*]const iovec_const, + iovlen: usize, + control: ?*const anyopaque, + controllen: usize, + flags: u32, +}; + +/// The syscalls, but with Zig error sets, going through libc if linking libc, +/// and with some footguns eliminated. +pub const wrapped = struct { + pub const lfs64_abi = builtin.link_libc and (builtin.abi.isGnu() or builtin.abi.isAndroid()); + const system = if (builtin.link_libc) std.c else std.os.linux; + + pub const SendfileError = std.posix.UnexpectedError || error{ + /// `out_fd` is an unconnected socket, or out_fd closed its read end. + BrokenPipe, + /// Descriptor is not valid or locked, or an mmap(2)-like operation is not available for in_fd. + UnsupportedOperation, + /// Nonblocking I/O has been selected but the write would block. + WouldBlock, + /// Unspecified error while reading from in_fd. + InputOutput, + /// Insufficient kernel memory to read from in_fd. + SystemResources, + /// `offset` is not `null` but the input file is not seekable. + Unseekable, + }; + + pub fn sendfile( + out_fd: fd_t, + in_fd: fd_t, + in_offset: ?*off_t, + in_len: usize, + ) SendfileError!usize { + const adjusted_len = @min(in_len, 0x7ffff000); // Prevents EOVERFLOW. + const sendfileSymbol = if (lfs64_abi) system.sendfile64 else system.sendfile; + const rc = sendfileSymbol(out_fd, in_fd, in_offset, adjusted_len); + switch (errno(rc)) { + .SUCCESS => return @intCast(rc), + .BADF => return invalidApiUsage(), // Always a race condition. + .FAULT => return invalidApiUsage(), // Segmentation fault. + .OVERFLOW => return unexpectedErrno(.OVERFLOW), // We avoid passing too large of a `count`. + .NOTCONN => return error.BrokenPipe, // `out_fd` is an unconnected socket + .INVAL => return error.UnsupportedOperation, + .AGAIN => return error.WouldBlock, + .IO => return error.InputOutput, + .PIPE => return error.BrokenPipe, + .NOMEM => return error.SystemResources, + .NXIO => return error.Unseekable, + .SPIPE => return error.Unseekable, + else => |err| return unexpectedErrno(err), + } + } + + pub const CopyFileRangeError = std.posix.UnexpectedError || error{ + /// One of: + /// * One or more file descriptors are not valid. + /// * fd_in is not open for reading; or fd_out is not open for writing. + /// * The O_APPEND flag is set for the open file description referred + /// to by the file descriptor fd_out. + BadFileFlags, + /// One of: + /// * An attempt was made to write at a position past the maximum file + /// offset the kernel supports. + /// * An attempt was made to write a range that exceeds the allowed + /// maximum file size. The maximum file size differs between + /// filesystem implementations and can be different from the maximum + /// allowed file offset. + /// * An attempt was made to write beyond the process's file size + /// resource limit. This may also result in the process receiving a + /// SIGXFSZ signal. + FileTooBig, + /// One of: + /// * either fd_in or fd_out is not a regular file + /// * flags argument is not zero + /// * fd_in and fd_out refer to the same file and the source and target ranges overlap. + InvalidArguments, + /// A low-level I/O error occurred while copying. + InputOutput, + /// Either fd_in or fd_out refers to a directory. + IsDir, + OutOfMemory, + /// There is not enough space on the target filesystem to complete the copy. + NoSpaceLeft, + /// (since Linux 5.19) the filesystem does not support this operation. + OperationNotSupported, + /// The requested source or destination range is too large to represent + /// in the specified data types. + Overflow, + /// fd_out refers to an immutable file. + PermissionDenied, + /// Either fd_in or fd_out refers to an active swap file. + SwapFile, + /// The files referred to by fd_in and fd_out are not on the same + /// filesystem, and the source and target filesystems are not of the + /// same type, or do not support cross-filesystem copy. + NotSameFileSystem, + }; + + pub fn copy_file_range(fd_in: fd_t, off_in: ?*i64, fd_out: fd_t, off_out: ?*i64, len: usize, flags: u32) CopyFileRangeError!usize { + const rc = system.copy_file_range(fd_in, off_in, fd_out, off_out, len, flags); + switch (errno(rc)) { + .SUCCESS => return @intCast(rc), + .BADF => return error.BadFileFlags, + .FBIG => return error.FileTooBig, + .INVAL => return error.InvalidArguments, + .IO => return error.InputOutput, + .ISDIR => return error.IsDir, + .NOMEM => return error.OutOfMemory, + .NOSPC => return error.NoSpaceLeft, + .OPNOTSUPP => return error.OperationNotSupported, + .OVERFLOW => return error.Overflow, + .PERM => return error.PermissionDenied, + .TXTBSY => return error.SwapFile, + .XDEV => return error.NotSameFileSystem, + else => |err| return unexpectedErrno(err), + } + } + + const unexpectedErrno = std.posix.unexpectedErrno; + + fn invalidApiUsage() error{Unexpected} { + if (builtin.mode == .Debug) @panic("invalid API usage"); + return error.Unexpected; + } + + fn errno(rc: anytype) E { + if (builtin.link_libc) { + return if (rc == -1) @enumFromInt(std.c._errno().*) else .SUCCESS; + } else { + return errnoFromSyscall(rc); + } + } +}; diff --git a/lib/std/os/linux/aarch64.zig b/lib/std/os/linux/aarch64.zig index ed40145e1b..cd3d1ab027 100644 --- a/lib/std/os/linux/aarch64.zig +++ b/lib/std/os/linux/aarch64.zig @@ -199,30 +199,6 @@ pub const Flock = extern struct { __unused: [4]u8, }; -pub const msghdr = extern struct { - name: ?*sockaddr, - namelen: socklen_t, - iov: [*]iovec, - iovlen: i32, - __pad1: i32 = 0, - control: ?*anyopaque, - controllen: socklen_t, - __pad2: socklen_t = 0, - flags: i32, -}; - -pub const msghdr_const = extern struct { - name: ?*const sockaddr, - namelen: socklen_t, - iov: [*]const iovec_const, - iovlen: i32, - __pad1: i32 = 0, - control: ?*const anyopaque, - controllen: socklen_t, - __pad2: socklen_t = 0, - flags: i32, -}; - pub const blksize_t = i32; pub const nlink_t = u32; pub const time_t = isize; diff --git a/lib/std/os/linux/arm.zig b/lib/std/os/linux/arm.zig index 8fea30a5da..ec7616fe82 100644 --- a/lib/std/os/linux/arm.zig +++ b/lib/std/os/linux/arm.zig @@ -237,26 +237,6 @@ pub const Flock = extern struct { __unused: [4]u8, }; -pub const msghdr = extern struct { - name: ?*sockaddr, - namelen: socklen_t, - iov: [*]iovec, - iovlen: i32, - control: ?*anyopaque, - controllen: socklen_t, - flags: i32, -}; - -pub const msghdr_const = extern struct { - name: ?*const sockaddr, - namelen: socklen_t, - iov: [*]const iovec_const, - iovlen: i32, - control: ?*const anyopaque, - controllen: socklen_t, - flags: i32, -}; - pub const blksize_t = i32; pub const nlink_t = u32; pub const time_t = isize; diff --git a/lib/std/os/linux/mips.zig b/lib/std/os/linux/mips.zig index ed87b4f734..b12854ea17 100644 --- a/lib/std/os/linux/mips.zig +++ b/lib/std/os/linux/mips.zig @@ -309,26 +309,6 @@ pub const Flock = extern struct { __unused: [4]u8, }; -pub const msghdr = extern struct { - name: ?*sockaddr, - namelen: socklen_t, - iov: [*]iovec, - iovlen: i32, - control: ?*anyopaque, - controllen: socklen_t, - flags: i32, -}; - -pub const msghdr_const = extern struct { - name: ?*const sockaddr, - namelen: socklen_t, - iov: [*]const iovec_const, - iovlen: i32, - control: ?*const anyopaque, - controllen: socklen_t, - flags: i32, -}; - pub const blksize_t = u32; pub const nlink_t = u32; pub const time_t = i32; diff --git a/lib/std/os/linux/mips64.zig b/lib/std/os/linux/mips64.zig index 11148fd5c9..6e47f203a6 100644 --- a/lib/std/os/linux/mips64.zig +++ b/lib/std/os/linux/mips64.zig @@ -288,26 +288,6 @@ pub const Flock = extern struct { __unused: [4]u8, }; -pub const msghdr = extern struct { - name: ?*sockaddr, - namelen: socklen_t, - iov: [*]iovec, - iovlen: i32, - control: ?*anyopaque, - controllen: socklen_t, - flags: i32, -}; - -pub const msghdr_const = extern struct { - name: ?*const sockaddr, - namelen: socklen_t, - iov: [*]const iovec_const, - iovlen: i32, - control: ?*const anyopaque, - controllen: socklen_t, - flags: i32, -}; - pub const blksize_t = u32; pub const nlink_t = u32; pub const time_t = i32; diff --git a/lib/std/os/linux/powerpc.zig b/lib/std/os/linux/powerpc.zig index 9ca6119219..1fe4c8349f 100644 --- a/lib/std/os/linux/powerpc.zig +++ b/lib/std/os/linux/powerpc.zig @@ -247,26 +247,6 @@ pub const Flock = extern struct { pid: pid_t, }; -pub const msghdr = extern struct { - name: ?*sockaddr, - namelen: socklen_t, - iov: [*]iovec, - iovlen: usize, - control: ?*anyopaque, - controllen: socklen_t, - flags: i32, -}; - -pub const msghdr_const = extern struct { - name: ?*const sockaddr, - namelen: socklen_t, - iov: [*]const iovec_const, - iovlen: usize, - control: ?*const anyopaque, - controllen: socklen_t, - flags: i32, -}; - pub const blksize_t = i32; pub const nlink_t = u32; pub const time_t = isize; diff --git a/lib/std/os/linux/powerpc64.zig b/lib/std/os/linux/powerpc64.zig index 1d4205626c..88414b7207 100644 --- a/lib/std/os/linux/powerpc64.zig +++ b/lib/std/os/linux/powerpc64.zig @@ -233,26 +233,6 @@ pub const Flock = extern struct { __unused: [4]u8, }; -pub const msghdr = extern struct { - name: ?*sockaddr, - namelen: socklen_t, - iov: [*]iovec, - iovlen: usize, - control: ?*anyopaque, - controllen: usize, - flags: i32, -}; - -pub const msghdr_const = extern struct { - name: ?*const sockaddr, - namelen: socklen_t, - iov: [*]const iovec_const, - iovlen: usize, - control: ?*const anyopaque, - controllen: usize, - flags: i32, -}; - pub const blksize_t = i64; pub const nlink_t = u64; pub const time_t = i64; diff --git a/lib/std/os/linux/riscv32.zig b/lib/std/os/linux/riscv32.zig index 7c03b3259e..bbe9fab8f0 100644 --- a/lib/std/os/linux/riscv32.zig +++ b/lib/std/os/linux/riscv32.zig @@ -200,30 +200,6 @@ pub const Flock = extern struct { __unused: [4]u8, }; -pub const msghdr = extern struct { - name: ?*sockaddr, - namelen: socklen_t, - iov: [*]iovec, - iovlen: i32, - __pad1: i32 = 0, - control: ?*anyopaque, - controllen: socklen_t, - __pad2: socklen_t = 0, - flags: i32, -}; - -pub const msghdr_const = extern struct { - name: ?*const sockaddr, - namelen: socklen_t, - iov: [*]const iovec_const, - iovlen: i32, - __pad1: i32 = 0, - control: ?*const anyopaque, - controllen: socklen_t, - __pad2: socklen_t = 0, - flags: i32, -}; - // The `stat` definition used by the Linux kernel. pub const Stat = extern struct { dev: dev_t, diff --git a/lib/std/os/linux/riscv64.zig b/lib/std/os/linux/riscv64.zig index 3d3c8a5afa..5fea49b8e2 100644 --- a/lib/std/os/linux/riscv64.zig +++ b/lib/std/os/linux/riscv64.zig @@ -200,30 +200,6 @@ pub const Flock = extern struct { __unused: [4]u8, }; -pub const msghdr = extern struct { - name: ?*sockaddr, - namelen: socklen_t, - iov: [*]iovec, - iovlen: i32, - __pad1: i32 = 0, - control: ?*anyopaque, - controllen: socklen_t, - __pad2: socklen_t = 0, - flags: i32, -}; - -pub const msghdr_const = extern struct { - name: ?*const sockaddr, - namelen: socklen_t, - iov: [*]const iovec_const, - iovlen: i32, - __pad1: i32 = 0, - control: ?*const anyopaque, - controllen: socklen_t, - __pad2: socklen_t = 0, - flags: i32, -}; - // The `stat` definition used by the Linux kernel. pub const Stat = extern struct { dev: dev_t, diff --git a/lib/std/os/linux/sparc64.zig b/lib/std/os/linux/sparc64.zig index 34df73fcb1..d34ca92368 100644 --- a/lib/std/os/linux/sparc64.zig +++ b/lib/std/os/linux/sparc64.zig @@ -282,26 +282,6 @@ pub const Flock = extern struct { pid: pid_t, }; -pub const msghdr = extern struct { - name: ?*sockaddr, - namelen: socklen_t, - iov: [*]iovec, - iovlen: u64, - control: ?*anyopaque, - controllen: u64, - flags: i32, -}; - -pub const msghdr_const = extern struct { - name: ?*const sockaddr, - namelen: socklen_t, - iov: [*]const iovec_const, - iovlen: u64, - control: ?*const anyopaque, - controllen: u64, - flags: i32, -}; - pub const off_t = i64; pub const ino_t = u64; pub const time_t = isize; diff --git a/lib/std/os/linux/x86.zig b/lib/std/os/linux/x86.zig index 41e1ec7d99..cc3932c899 100644 --- a/lib/std/os/linux/x86.zig +++ b/lib/std/os/linux/x86.zig @@ -245,26 +245,6 @@ pub const Flock = extern struct { pid: pid_t, }; -pub const msghdr = extern struct { - name: ?*sockaddr, - namelen: socklen_t, - iov: [*]iovec, - iovlen: i32, - control: ?*anyopaque, - controllen: socklen_t, - flags: i32, -}; - -pub const msghdr_const = extern struct { - name: ?*const sockaddr, - namelen: socklen_t, - iov: [*]const iovec_const, - iovlen: i32, - control: ?*const anyopaque, - controllen: socklen_t, - flags: i32, -}; - pub const blksize_t = i32; pub const nlink_t = u32; pub const time_t = isize; diff --git a/lib/std/os/linux/x86_64.zig b/lib/std/os/linux/x86_64.zig index 7ef86fdcf8..bd08c3f147 100644 --- a/lib/std/os/linux/x86_64.zig +++ b/lib/std/os/linux/x86_64.zig @@ -233,30 +233,6 @@ pub const Flock = extern struct { pid: pid_t, }; -pub const msghdr = extern struct { - name: ?*sockaddr, - namelen: socklen_t, - iov: [*]iovec, - iovlen: i32, - __pad1: i32 = 0, - control: ?*anyopaque, - controllen: socklen_t, - __pad2: socklen_t = 0, - flags: i32, -}; - -pub const msghdr_const = extern struct { - name: ?*const sockaddr, - namelen: socklen_t, - iov: [*]const iovec_const, - iovlen: i32, - __pad1: i32 = 0, - control: ?*const anyopaque, - controllen: socklen_t, - __pad2: socklen_t = 0, - flags: i32, -}; - pub const off_t = i64; pub const ino_t = u64; pub const dev_t = u64; diff --git a/lib/std/os/uefi.zig b/lib/std/os/uefi.zig index c362f707c6..6526f590fa 100644 --- a/lib/std/os/uefi.zig +++ b/lib/std/os/uefi.zig @@ -1,4 +1,5 @@ const std = @import("../std.zig"); +const assert = std.debug.assert; /// A protocol is an interface identified by a GUID. pub const protocol = @import("uefi/protocol.zig"); @@ -59,31 +60,19 @@ pub const Guid = extern struct { node: [6]u8, /// Format GUID into hexadecimal lowercase xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx format - pub fn format( - self: @This(), - comptime f: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = options; - if (f.len == 0) { - const fmt = std.fmt.fmtSliceHexLower; + pub fn format(self: @This(), writer: *std.io.Writer) std.io.Writer.Error!void { + const time_low = @byteSwap(self.time_low); + const time_mid = @byteSwap(self.time_mid); + const time_high_and_version = @byteSwap(self.time_high_and_version); - const time_low = @byteSwap(self.time_low); - const time_mid = @byteSwap(self.time_mid); - const time_high_and_version = @byteSwap(self.time_high_and_version); - - return std.fmt.format(writer, "{:0>8}-{:0>4}-{:0>4}-{:0>2}{:0>2}-{:0>12}", .{ - fmt(std.mem.asBytes(&time_low)), - fmt(std.mem.asBytes(&time_mid)), - fmt(std.mem.asBytes(&time_high_and_version)), - fmt(std.mem.asBytes(&self.clock_seq_high_and_reserved)), - fmt(std.mem.asBytes(&self.clock_seq_low)), - fmt(std.mem.asBytes(&self.node)), - }); - } else { - std.fmt.invalidFmtError(f, self); - } + return writer.print("{x:0>8}-{x:0>4}-{x:0>4}-{x:0>2}{x:0>2}-{x:0>12}", .{ + std.mem.asBytes(&time_low), + std.mem.asBytes(&time_mid), + std.mem.asBytes(&time_high_and_version), + std.mem.asBytes(&self.clock_seq_high_and_reserved), + std.mem.asBytes(&self.clock_seq_low), + std.mem.asBytes(&self.node), + }); } pub fn eql(a: std.os.uefi.Guid, b: std.os.uefi.Guid) bool { diff --git a/lib/std/os/uefi/protocol/file.zig b/lib/std/os/uefi/protocol/file.zig index 2e9c251642..f8802fa64f 100644 --- a/lib/std/os/uefi/protocol/file.zig +++ b/lib/std/os/uefi/protocol/file.zig @@ -79,30 +79,6 @@ pub const File = extern struct { VolumeFull, }; - pub const SeekableStream = io.SeekableStream( - *File, - SeekError, - SeekError, - setPosition, - seekBy, - getPosition, - getEndPos, - ); - pub const Reader = io.Reader(*File, ReadError, read); - pub const Writer = io.Writer(*File, WriteError, write); - - pub fn seekableStream(self: *File) SeekableStream { - return .{ .context = self }; - } - - pub fn reader(self: *File) Reader { - return .{ .context = self }; - } - - pub fn writer(self: *File) Writer { - return .{ .context = self }; - } - pub fn open( self: *const File, file_name: [*:0]const u16, diff --git a/lib/std/os/windows.zig b/lib/std/os/windows.zig index 3aaff1d60a..a388072e74 100644 --- a/lib/std/os/windows.zig +++ b/lib/std/os/windows.zig @@ -1690,40 +1690,6 @@ pub fn getpeername(s: ws2_32.SOCKET, name: *ws2_32.sockaddr, namelen: *ws2_32.so return ws2_32.getpeername(s, name, @as(*i32, @ptrCast(namelen))); } -pub fn sendmsg( - s: ws2_32.SOCKET, - msg: *ws2_32.WSAMSG_const, - flags: u32, -) i32 { - var bytes_send: DWORD = undefined; - if (ws2_32.WSASendMsg(s, msg, flags, &bytes_send, null, null) == ws2_32.SOCKET_ERROR) { - return ws2_32.SOCKET_ERROR; - } else { - return @as(i32, @as(u31, @intCast(bytes_send))); - } -} - -pub fn sendto(s: ws2_32.SOCKET, buf: [*]const u8, len: usize, flags: u32, to: ?*const ws2_32.sockaddr, to_len: ws2_32.socklen_t) i32 { - var buffer = ws2_32.WSABUF{ .len = @as(u31, @truncate(len)), .buf = @constCast(buf) }; - var bytes_send: DWORD = undefined; - if (ws2_32.WSASendTo(s, @as([*]ws2_32.WSABUF, @ptrCast(&buffer)), 1, &bytes_send, flags, to, @as(i32, @intCast(to_len)), null, null) == ws2_32.SOCKET_ERROR) { - return ws2_32.SOCKET_ERROR; - } else { - return @as(i32, @as(u31, @intCast(bytes_send))); - } -} - -pub fn recvfrom(s: ws2_32.SOCKET, buf: [*]u8, len: usize, flags: u32, from: ?*ws2_32.sockaddr, from_len: ?*ws2_32.socklen_t) i32 { - var buffer = ws2_32.WSABUF{ .len = @as(u31, @truncate(len)), .buf = buf }; - var bytes_received: DWORD = undefined; - var flags_inout = flags; - if (ws2_32.WSARecvFrom(s, @as([*]ws2_32.WSABUF, @ptrCast(&buffer)), 1, &bytes_received, &flags_inout, from, @as(?*i32, @ptrCast(from_len)), null, null) == ws2_32.SOCKET_ERROR) { - return ws2_32.SOCKET_ERROR; - } else { - return @as(i32, @as(u31, @intCast(bytes_received))); - } -} - pub fn poll(fds: [*]ws2_32.pollfd, n: c_ulong, timeout: i32) i32 { return ws2_32.WSAPoll(fds, n, timeout); } @@ -2846,9 +2812,8 @@ pub fn unexpectedError(err: Win32Error) UnexpectedError { buf_wstr.len, null, ); - std.debug.print("error.Unexpected: GetLastError({}): {}\n", .{ - @intFromEnum(err), - std.unicode.fmtUtf16Le(buf_wstr[0..len]), + std.debug.print("error.Unexpected: GetLastError({d}): {f}\n", .{ + err, std.unicode.fmtUtf16Le(buf_wstr[0..len]), }); std.debug.dumpCurrentStackTrace(@returnAddress()); } diff --git a/lib/std/os/windows/test.zig b/lib/std/os/windows/test.zig index b4cecefbd7..b78e4c323a 100644 --- a/lib/std/os/windows/test.zig +++ b/lib/std/os/windows/test.zig @@ -30,7 +30,7 @@ fn testToPrefixedFileNoOracle(comptime path: []const u8, comptime expected_path: const expected_path_utf16 = std.unicode.utf8ToUtf16LeStringLiteral(expected_path); const actual_path = try windows.wToPrefixedFileW(null, path_utf16); std.testing.expectEqualSlices(u16, expected_path_utf16, actual_path.span()) catch |e| { - std.debug.print("got '{s}', expected '{s}'\n", .{ std.unicode.fmtUtf16Le(actual_path.span()), std.unicode.fmtUtf16Le(expected_path_utf16) }); + std.debug.print("got '{f}', expected '{f}'\n", .{ std.unicode.fmtUtf16Le(actual_path.span()), std.unicode.fmtUtf16Le(expected_path_utf16) }); return e; }; } @@ -48,7 +48,7 @@ fn testToPrefixedFileOnlyOracle(comptime path: []const u8) !void { const zig_result = try windows.wToPrefixedFileW(null, path_utf16); const win32_api_result = try RtlDosPathNameToNtPathName_U(path_utf16); std.testing.expectEqualSlices(u16, win32_api_result.span(), zig_result.span()) catch |e| { - std.debug.print("got '{s}', expected '{s}'\n", .{ std.unicode.fmtUtf16Le(zig_result.span()), std.unicode.fmtUtf16Le(win32_api_result.span()) }); + std.debug.print("got '{f}', expected '{f}'\n", .{ std.unicode.fmtUtf16Le(zig_result.span()), std.unicode.fmtUtf16Le(win32_api_result.span()) }); return e; }; } diff --git a/lib/std/os/windows/ws2_32.zig b/lib/std/os/windows/ws2_32.zig index e8375dc2c1..83194425fa 100644 --- a/lib/std/os/windows/ws2_32.zig +++ b/lib/std/os/windows/ws2_32.zig @@ -1829,7 +1829,7 @@ pub extern "ws2_32" fn sendto( buf: [*]const u8, len: i32, flags: i32, - to: *const sockaddr, + to: ?*const sockaddr, tolen: i32, ) callconv(.winapi) i32; @@ -2116,14 +2116,6 @@ pub extern "ws2_32" fn WSASendMsg( lpCompletionRoutine: ?LPWSAOVERLAPPED_COMPLETION_ROUTINE, ) callconv(.winapi) i32; -pub extern "ws2_32" fn WSARecvMsg( - s: SOCKET, - lpMsg: *WSAMSG, - lpdwNumberOfBytesRecv: ?*u32, - lpOverlapped: ?*OVERLAPPED, - lpCompletionRoutine: ?LPWSAOVERLAPPED_COMPLETION_ROUTINE, -) callconv(.winapi) i32; - pub extern "ws2_32" fn WSASendDisconnect( s: SOCKET, lpOutboundDisconnectData: ?*WSABUF, diff --git a/lib/std/posix.zig b/lib/std/posix.zig index ae638226a1..c42ddd84b7 100644 --- a/lib/std/posix.zig +++ b/lib/std/posix.zig @@ -651,7 +651,7 @@ fn getRandomBytesDevURandom(buf: []u8) !void { } const file: fs.File = .{ .handle = fd }; - const stream = file.reader(); + const stream = file.deprecatedReader(); stream.readNoEof(buf) catch return error.Unexpected; } diff --git a/lib/std/posix/test.zig b/lib/std/posix/test.zig index 776fe615b3..993b8e28cd 100644 --- a/lib/std/posix/test.zig +++ b/lib/std/posix/test.zig @@ -667,7 +667,7 @@ test "mmap" { const file = try tmp.dir.createFile(test_out_file, .{}); defer file.close(); - const stream = file.writer(); + const stream = file.deprecatedWriter(); var i: u32 = 0; while (i < alloc_size / @sizeOf(u32)) : (i += 1) { diff --git a/lib/std/process.zig b/lib/std/process.zig index 072cbeefdb..58d16eef1d 100644 --- a/lib/std/process.zig +++ b/lib/std/process.zig @@ -1553,7 +1553,7 @@ pub fn posixGetUserInfo(name: []const u8) !UserInfo { const file = try std.fs.openFileAbsolute("/etc/passwd", .{}); defer file.close(); - const reader = file.reader(); + const reader = file.deprecatedReader(); const State = enum { Start, @@ -1895,7 +1895,7 @@ pub fn createEnvironFromMap( var i: usize = 0; if (zig_progress_action == .add) { - envp_buf[i] = try std.fmt.allocPrintZ(arena, "ZIG_PROGRESS={d}", .{options.zig_progress_fd.?}); + envp_buf[i] = try std.fmt.allocPrintSentinel(arena, "ZIG_PROGRESS={d}", .{options.zig_progress_fd.?}, 0); i += 1; } @@ -1906,16 +1906,16 @@ pub fn createEnvironFromMap( .add => unreachable, .delete => continue, .edit => { - envp_buf[i] = try std.fmt.allocPrintZ(arena, "{s}={d}", .{ + envp_buf[i] = try std.fmt.allocPrintSentinel(arena, "{s}={d}", .{ pair.key_ptr.*, options.zig_progress_fd.?, - }); + }, 0); i += 1; continue; }, .nothing => {}, }; - envp_buf[i] = try std.fmt.allocPrintZ(arena, "{s}={s}", .{ pair.key_ptr.*, pair.value_ptr.* }); + envp_buf[i] = try std.fmt.allocPrintSentinel(arena, "{s}={s}", .{ pair.key_ptr.*, pair.value_ptr.* }, 0); i += 1; } } @@ -1965,7 +1965,7 @@ pub fn createEnvironFromExisting( var existing_index: usize = 0; if (zig_progress_action == .add) { - envp_buf[i] = try std.fmt.allocPrintZ(arena, "ZIG_PROGRESS={d}", .{options.zig_progress_fd.?}); + envp_buf[i] = try std.fmt.allocPrintSentinel(arena, "ZIG_PROGRESS={d}", .{options.zig_progress_fd.?}, 0); i += 1; } @@ -1974,7 +1974,7 @@ pub fn createEnvironFromExisting( .add => unreachable, .delete => continue, .edit => { - envp_buf[i] = try std.fmt.allocPrintZ(arena, "ZIG_PROGRESS={d}", .{options.zig_progress_fd.?}); + envp_buf[i] = try std.fmt.allocPrintSentinel(arena, "ZIG_PROGRESS={d}", .{options.zig_progress_fd.?}, 0); i += 1; continue; }, diff --git a/lib/std/process/Child.zig b/lib/std/process/Child.zig index f9dc28aaaf..c2effb523a 100644 --- a/lib/std/process/Child.zig +++ b/lib/std/process/Child.zig @@ -1004,12 +1004,12 @@ fn forkChildErrReport(fd: i32, err: ChildProcess.SpawnError) noreturn { fn writeIntFd(fd: i32, value: ErrInt) !void { const file: File = .{ .handle = fd }; - file.writer().writeInt(u64, @intCast(value), .little) catch return error.SystemResources; + file.deprecatedWriter().writeInt(u64, @intCast(value), .little) catch return error.SystemResources; } fn readIntFd(fd: i32) !ErrInt { const file: File = .{ .handle = fd }; - return @intCast(file.reader().readInt(u64, .little) catch return error.SystemResources); + return @intCast(file.deprecatedReader().readInt(u64, .little) catch return error.SystemResources); } const ErrInt = std.meta.Int(.unsigned, @sizeOf(anyerror) * 8); diff --git a/lib/std/tar.zig b/lib/std/tar.zig index f3aba4d381..729a07db0a 100644 --- a/lib/std/tar.zig +++ b/lib/std/tar.zig @@ -348,7 +348,7 @@ pub fn Iterator(comptime ReaderType: type) type { unread_bytes: *u64, parent_reader: ReaderType, - pub const Reader = std.io.Reader(File, ReaderType.Error, File.read); + pub const Reader = std.io.GenericReader(File, ReaderType.Error, File.read); pub fn reader(self: File) Reader { return .{ .context = self }; diff --git a/lib/std/testing.zig b/lib/std/testing.zig index f52135f237..8e7ffd3f23 100644 --- a/lib/std/testing.zig +++ b/lib/std/testing.zig @@ -105,7 +105,7 @@ fn expectEqualInner(comptime T: type, expected: T, actual: T) !void { .error_set, => { if (actual != expected) { - print("expected {}, found {}\n", .{ expected, actual }); + print("expected {any}, found {any}\n", .{ expected, actual }); return error.TestExpectedEqual; } }, @@ -267,9 +267,13 @@ test "expectEqual null" { /// This function is intended to be used only in tests. When the formatted result of the template /// and its arguments does not equal the expected text, it prints diagnostics to stderr to show how -/// they are not equal, then returns an error. It depends on `expectEqualStrings()` for printing +/// they are not equal, then returns an error. It depends on `expectEqualStrings` for printing /// diagnostics. pub fn expectFmt(expected: []const u8, comptime template: []const u8, args: anytype) !void { + if (@inComptime()) { + var buffer: [std.fmt.count(template, args)]u8 = undefined; + return expectEqualStrings(expected, try std.fmt.bufPrint(&buffer, template, args)); + } const actual = try std.fmt.allocPrint(allocator, template, args); defer allocator.free(actual); return expectEqualStrings(expected, actual); @@ -356,9 +360,6 @@ test expectApproxEqRel { /// The colorized output is optional and controlled by the return of `std.io.tty.detectConfig()`. /// If your inputs are UTF-8 encoded strings, consider calling `expectEqualStrings` instead. pub fn expectEqualSlices(comptime T: type, expected: []const T, actual: []const T) !void { - if (expected.ptr == actual.ptr and expected.len == actual.len) { - return; - } const diff_index: usize = diff_index: { const shortest = @min(expected.len, actual.len); var index: usize = 0; @@ -367,12 +368,21 @@ pub fn expectEqualSlices(comptime T: type, expected: []const T, actual: []const } break :diff_index if (expected.len == actual.len) return else shortest; }; + if (!backend_can_print) return error.TestExpectedEqual; + const stderr_w = std.debug.lockStderrWriter(&.{}); + defer std.debug.unlockStderrWriter(); + failEqualSlices(T, expected, actual, diff_index, stderr_w) catch {}; + return error.TestExpectedEqual; +} - if (!backend_can_print) { - return error.TestExpectedEqual; - } - - print("slices differ. first difference occurs at index {d} (0x{X})\n", .{ diff_index, diff_index }); +fn failEqualSlices( + comptime T: type, + expected: []const T, + actual: []const T, + diff_index: usize, + w: *std.io.Writer, +) !void { + try w.print("slices differ. first difference occurs at index {d} (0x{X})\n", .{ diff_index, diff_index }); // TODO: Should this be configurable by the caller? const max_lines: usize = 16; @@ -390,8 +400,7 @@ pub fn expectEqualSlices(comptime T: type, expected: []const T, actual: []const const actual_window = actual[window_start..@min(actual.len, window_start + max_window_size)]; const actual_truncated = window_start + actual_window.len < actual.len; - const stderr = std.io.getStdErr(); - const ttyconf = std.io.tty.detectConfig(stderr); + const ttyconf = std.io.tty.detectConfig(.stderr()); var differ = if (T == u8) BytesDiffer{ .expected = expected_window, .actual = actual_window, @@ -407,47 +416,47 @@ pub fn expectEqualSlices(comptime T: type, expected: []const T, actual: []const // that is usually useful. const index_fmt = if (T == u8) "0x{X}" else "{}"; - print("\n============ expected this output: ============= len: {} (0x{X})\n\n", .{ expected.len, expected.len }); + try w.print("\n============ expected this output: ============= len: {} (0x{X})\n\n", .{ expected.len, expected.len }); if (window_start > 0) { if (T == u8) { - print("... truncated, start index: " ++ index_fmt ++ " ...\n", .{window_start}); + try w.print("... truncated, start index: " ++ index_fmt ++ " ...\n", .{window_start}); } else { - print("... truncated ...\n", .{}); + try w.print("... truncated ...\n", .{}); } } - differ.write(stderr.writer()) catch {}; + differ.write(w) catch {}; if (expected_truncated) { const end_offset = window_start + expected_window.len; const num_missing_items = expected.len - (window_start + expected_window.len); if (T == u8) { - print("... truncated, indexes [" ++ index_fmt ++ "..] not shown, remaining bytes: " ++ index_fmt ++ " ...\n", .{ end_offset, num_missing_items }); + try w.print("... truncated, indexes [" ++ index_fmt ++ "..] not shown, remaining bytes: " ++ index_fmt ++ " ...\n", .{ end_offset, num_missing_items }); } else { - print("... truncated, remaining items: " ++ index_fmt ++ " ...\n", .{num_missing_items}); + try w.print("... truncated, remaining items: " ++ index_fmt ++ " ...\n", .{num_missing_items}); } } // now reverse expected/actual and print again differ.expected = actual_window; differ.actual = expected_window; - print("\n============= instead found this: ============== len: {} (0x{X})\n\n", .{ actual.len, actual.len }); + try w.print("\n============= instead found this: ============== len: {} (0x{X})\n\n", .{ actual.len, actual.len }); if (window_start > 0) { if (T == u8) { - print("... truncated, start index: " ++ index_fmt ++ " ...\n", .{window_start}); + try w.print("... truncated, start index: " ++ index_fmt ++ " ...\n", .{window_start}); } else { - print("... truncated ...\n", .{}); + try w.print("... truncated ...\n", .{}); } } - differ.write(stderr.writer()) catch {}; + differ.write(w) catch {}; if (actual_truncated) { const end_offset = window_start + actual_window.len; const num_missing_items = actual.len - (window_start + actual_window.len); if (T == u8) { - print("... truncated, indexes [" ++ index_fmt ++ "..] not shown, remaining bytes: " ++ index_fmt ++ " ...\n", .{ end_offset, num_missing_items }); + try w.print("... truncated, indexes [" ++ index_fmt ++ "..] not shown, remaining bytes: " ++ index_fmt ++ " ...\n", .{ end_offset, num_missing_items }); } else { - print("... truncated, remaining items: " ++ index_fmt ++ " ...\n", .{num_missing_items}); + try w.print("... truncated, remaining items: " ++ index_fmt ++ " ...\n", .{num_missing_items}); } } - print("\n================================================\n\n", .{}); + try w.print("\n================================================\n\n", .{}); return error.TestExpectedEqual; } @@ -461,7 +470,7 @@ fn SliceDiffer(comptime T: type) type { const Self = @This(); - pub fn write(self: Self, writer: anytype) !void { + pub fn write(self: Self, writer: *std.io.Writer) !void { for (self.expected, 0..) |value, i| { const full_index = self.start_index + i; const diff = if (i < self.actual.len) !std.meta.eql(self.actual[i], value) else true; @@ -482,7 +491,7 @@ const BytesDiffer = struct { actual: []const u8, ttyconf: std.io.tty.Config, - pub fn write(self: BytesDiffer, writer: anytype) !void { + pub fn write(self: BytesDiffer, writer: *std.io.Writer) !void { var expected_iterator = std.mem.window(u8, self.expected, 16, 16); var row: usize = 0; while (expected_iterator.next()) |chunk| { @@ -499,7 +508,7 @@ const BytesDiffer = struct { if (chunk.len < 16) { var missing_columns = (16 - chunk.len) * 3; if (chunk.len < 8) missing_columns += 1; - try writer.writeByteNTimes(' ', missing_columns); + try writer.splatByteAll(' ', missing_columns); } for (chunk, 0..) |byte, col| { const diff = diffs.isSet(col); @@ -528,7 +537,7 @@ const BytesDiffer = struct { } } - fn writeDiff(self: BytesDiffer, writer: anytype, comptime fmt: []const u8, args: anytype, diff: bool) !void { + fn writeDiff(self: BytesDiffer, writer: *std.io.Writer, comptime fmt: []const u8, args: anytype, diff: bool) !void { if (diff) try self.ttyconf.setColor(writer, .red); try writer.print(fmt, args); if (diff) try self.ttyconf.setColor(writer, .reset); @@ -637,6 +646,11 @@ pub fn tmpDir(opts: std.fs.Dir.OpenOptions) TmpDir { pub fn expectEqualStrings(expected: []const u8, actual: []const u8) !void { if (std.mem.indexOfDiff(u8, actual, expected)) |diff_index| { + if (@inComptime()) { + @compileError(std.fmt.comptimePrint("\nexpected:\n{s}\nfound:\n{s}\ndifference starts at index {d}", .{ + expected, actual, diff_index, + })); + } print("\n====== expected this output: =========\n", .{}); printWithVisibleNewlines(expected); print("\n======== instead found this: =========\n", .{}); @@ -1108,7 +1122,7 @@ pub fn checkAllAllocationFailures(backing_allocator: std.mem.Allocator, comptime const arg_i_str = comptime str: { var str_buf: [100]u8 = undefined; const args_i = i + 1; - const str_len = std.fmt.formatIntBuf(&str_buf, args_i, 10, .lower, .{}); + const str_len = std.fmt.printInt(&str_buf, args_i, 10, .lower, .{}); break :str str_buf[0..str_len]; }; @field(args, arg_i_str) = @field(extra_args, field.name); @@ -1138,7 +1152,7 @@ pub fn checkAllAllocationFailures(backing_allocator: std.mem.Allocator, comptime error.OutOfMemory => { if (failing_allocator_inst.allocated_bytes != failing_allocator_inst.freed_bytes) { print( - "\nfail_index: {d}/{d}\nallocated bytes: {d}\nfreed bytes: {d}\nallocations: {d}\ndeallocations: {d}\nallocation that was made to fail: {}", + "\nfail_index: {d}/{d}\nallocated bytes: {d}\nfreed bytes: {d}\nallocations: {d}\ndeallocations: {d}\nallocation that was made to fail: {f}", .{ fail_index, needed_alloc_count, @@ -1192,3 +1206,43 @@ pub inline fn fuzz( ) anyerror!void { return @import("root").fuzz(context, testOne, options); } + +/// A `std.io.Reader` that writes a predetermined list of buffers during `stream`. +pub const Reader = struct { + calls: []const Call, + interface: std.io.Reader, + next_call_index: usize, + next_offset: usize, + + pub const Call = struct { + buffer: []const u8, + }; + + pub fn init(buffer: []u8, calls: []const Call) Reader { + return .{ + .next_call_index = 0, + .next_offset = 0, + .interface = .{ + .vtable = &.{ .stream = stream }, + .buffer = buffer, + .seek = 0, + .end = 0, + }, + .calls = calls, + }; + } + + fn stream(io_r: *std.io.Reader, w: *std.io.Writer, limit: std.io.Limit) std.io.Reader.StreamError!usize { + const r: *Reader = @alignCast(@fieldParentPtr("interface", io_r)); + if (r.calls.len - r.next_call_index == 0) return error.EndOfStream; + const call = r.calls[r.next_call_index]; + const buffer = limit.sliceConst(call.buffer[r.next_offset..]); + const n = try w.write(buffer); + r.next_offset += n; + if (call.buffer.len - r.next_offset == 0) { + r.next_call_index += 1; + r.next_offset = 0; + } + return n; + } +}; diff --git a/lib/std/unicode.zig b/lib/std/unicode.zig index 4c6ec1294b..ef694f33ba 100644 --- a/lib/std/unicode.zig +++ b/lib/std/unicode.zig @@ -9,6 +9,7 @@ const native_endian = builtin.cpu.arch.endian(); /// /// See also: https://en.wikipedia.org/wiki/Specials_(Unicode_block)#Replacement_character pub const replacement_character: u21 = 0xFFFD; +pub const replacement_character_utf8: [3]u8 = utf8EncodeComptime(replacement_character); /// Returns how many bytes the UTF-8 representation would require /// for the given codepoint. @@ -802,14 +803,7 @@ fn testDecode(bytes: []const u8) !u21 { /// Ill-formed UTF-8 byte sequences are replaced by the replacement character (U+FFFD) /// according to "U+FFFD Substitution of Maximal Subparts" from Chapter 3 of /// the Unicode standard, and as specified by https://encoding.spec.whatwg.org/#utf-8-decoder -fn formatUtf8( - utf8: []const u8, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = fmt; - _ = options; +fn formatUtf8(utf8: []const u8, writer: *std.io.Writer) std.io.Writer.Error!void { var buf: [300]u8 = undefined; // just an arbitrary size var u8len: usize = 0; @@ -898,27 +892,27 @@ fn formatUtf8( /// Ill-formed UTF-8 byte sequences are replaced by the replacement character (U+FFFD) /// according to "U+FFFD Substitution of Maximal Subparts" from Chapter 3 of /// the Unicode standard, and as specified by https://encoding.spec.whatwg.org/#utf-8-decoder -pub fn fmtUtf8(utf8: []const u8) std.fmt.Formatter(formatUtf8) { +pub fn fmtUtf8(utf8: []const u8) std.fmt.Formatter([]const u8, formatUtf8) { return .{ .data = utf8 }; } test fmtUtf8 { const expectFmt = testing.expectFmt; - try expectFmt("", "{}", .{fmtUtf8("")}); - try expectFmt("foo", "{}", .{fmtUtf8("foo")}); - try expectFmt("𐐷", "{}", .{fmtUtf8("𐐷")}); + try expectFmt("", "{f}", .{fmtUtf8("")}); + try expectFmt("foo", "{f}", .{fmtUtf8("foo")}); + try expectFmt("𐐷", "{f}", .{fmtUtf8("𐐷")}); // Table 3-8. U+FFFD for Non-Shortest Form Sequences - try expectFmt("��������A", "{}", .{fmtUtf8("\xC0\xAF\xE0\x80\xBF\xF0\x81\x82A")}); + try expectFmt("��������A", "{f}", .{fmtUtf8("\xC0\xAF\xE0\x80\xBF\xF0\x81\x82A")}); // Table 3-9. U+FFFD for Ill-Formed Sequences for Surrogates - try expectFmt("��������A", "{}", .{fmtUtf8("\xED\xA0\x80\xED\xBF\xBF\xED\xAFA")}); + try expectFmt("��������A", "{f}", .{fmtUtf8("\xED\xA0\x80\xED\xBF\xBF\xED\xAFA")}); // Table 3-10. U+FFFD for Other Ill-Formed Sequences - try expectFmt("�����A��B", "{}", .{fmtUtf8("\xF4\x91\x92\x93\xFFA\x80\xBFB")}); + try expectFmt("�����A��B", "{f}", .{fmtUtf8("\xF4\x91\x92\x93\xFFA\x80\xBFB")}); // Table 3-11. U+FFFD for Truncated Sequences - try expectFmt("����A", "{}", .{fmtUtf8("\xE1\x80\xE2\xF0\x91\x92\xF1\xBFA")}); + try expectFmt("����A", "{f}", .{fmtUtf8("\xE1\x80\xE2\xF0\x91\x92\xF1\xBFA")}); } fn utf16LeToUtf8ArrayListImpl( @@ -1477,14 +1471,7 @@ test calcWtf16LeLen { /// Print the given `utf16le` string, encoded as UTF-8 bytes. /// Unpaired surrogates are replaced by the replacement character (U+FFFD). -fn formatUtf16Le( - utf16le: []const u16, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = fmt; - _ = options; +fn formatUtf16Le(utf16le: []const u16, writer: *std.io.Writer) std.io.Writer.Error!void { var buf: [300]u8 = undefined; // just an arbitrary size var it = Utf16LeIterator.init(utf16le); var u8len: usize = 0; @@ -1505,23 +1492,23 @@ pub const fmtUtf16le = @compileError("deprecated; renamed to fmtUtf16Le"); /// Return a Formatter for a (potentially ill-formed) UTF-16 LE string, /// which will be converted to UTF-8 during formatting. /// Unpaired surrogates are replaced by the replacement character (U+FFFD). -pub fn fmtUtf16Le(utf16le: []const u16) std.fmt.Formatter(formatUtf16Le) { +pub fn fmtUtf16Le(utf16le: []const u16) std.fmt.Formatter([]const u16, formatUtf16Le) { return .{ .data = utf16le }; } test fmtUtf16Le { const expectFmt = testing.expectFmt; - try expectFmt("", "{}", .{fmtUtf16Le(utf8ToUtf16LeStringLiteral(""))}); - try expectFmt("", "{}", .{fmtUtf16Le(wtf8ToWtf16LeStringLiteral(""))}); - try expectFmt("foo", "{}", .{fmtUtf16Le(utf8ToUtf16LeStringLiteral("foo"))}); - try expectFmt("foo", "{}", .{fmtUtf16Le(wtf8ToWtf16LeStringLiteral("foo"))}); - try expectFmt("𐐷", "{}", .{fmtUtf16Le(wtf8ToWtf16LeStringLiteral("𐐷"))}); - try expectFmt("퟿", "{}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\xff\xd7", native_endian)})}); - try expectFmt("�", "{}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\x00\xd8", native_endian)})}); - try expectFmt("�", "{}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\xff\xdb", native_endian)})}); - try expectFmt("�", "{}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\x00\xdc", native_endian)})}); - try expectFmt("�", "{}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\xff\xdf", native_endian)})}); - try expectFmt("", "{}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\x00\xe0", native_endian)})}); + try expectFmt("", "{f}", .{fmtUtf16Le(utf8ToUtf16LeStringLiteral(""))}); + try expectFmt("", "{f}", .{fmtUtf16Le(wtf8ToWtf16LeStringLiteral(""))}); + try expectFmt("foo", "{f}", .{fmtUtf16Le(utf8ToUtf16LeStringLiteral("foo"))}); + try expectFmt("foo", "{f}", .{fmtUtf16Le(wtf8ToWtf16LeStringLiteral("foo"))}); + try expectFmt("𐐷", "{f}", .{fmtUtf16Le(wtf8ToWtf16LeStringLiteral("𐐷"))}); + try expectFmt("퟿", "{f}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\xff\xd7", native_endian)})}); + try expectFmt("�", "{f}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\x00\xd8", native_endian)})}); + try expectFmt("�", "{f}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\xff\xdb", native_endian)})}); + try expectFmt("�", "{f}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\x00\xdc", native_endian)})}); + try expectFmt("�", "{f}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\xff\xdf", native_endian)})}); + try expectFmt("", "{f}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\x00\xe0", native_endian)})}); } fn testUtf8ToUtf16LeStringLiteral(utf8ToUtf16LeStringLiteral_: anytype) !void { diff --git a/lib/std/unicode/throughput_test.zig b/lib/std/unicode/throughput_test.zig index 084406dc78..c13a38b101 100644 --- a/lib/std/unicode/throughput_test.zig +++ b/lib/std/unicode/throughput_test.zig @@ -39,7 +39,7 @@ fn benchmarkCodepointCount(buf: []const u8) !ResultCount { } pub fn main() !void { - const stdout = std.io.getStdOut().writer(); + const stdout = std.fs.File.stdout().deprecatedWriter(); try stdout.print("short ASCII strings\n", .{}); { diff --git a/lib/std/zig.zig b/lib/std/zig.zig index cf7992baed..be3c982d09 100644 --- a/lib/std/zig.zig +++ b/lib/std/zig.zig @@ -48,7 +48,7 @@ pub const Color = enum { pub fn get_tty_conf(color: Color) std.io.tty.Config { return switch (color) { - .auto => std.io.tty.detectConfig(std.io.getStdErr()), + .auto => std.io.tty.detectConfig(std.fs.File.stderr()), .on => .escape_codes, .off => .no_color, }; @@ -363,149 +363,136 @@ const Allocator = std.mem.Allocator; /// Return a Formatter for a Zig identifier, escaping it with `@""` syntax if needed. /// -/// - An empty `{}` format specifier escapes invalid identifiers, identifiers that shadow primitives -/// and the reserved `_` identifier. -/// - Add `p` to the specifier to render identifiers that shadow primitives unescaped. -/// - Add `_` to the specifier to render the reserved `_` identifier unescaped. -/// - `p` and `_` can be combined, e.g. `{p_}`. +/// See also `fmtIdFlags`. +pub fn fmtId(bytes: []const u8) std.fmt.Formatter(FormatId, FormatId.render) { + return .{ .data = .{ .bytes = bytes, .flags = .{} } }; +} + +/// Return a Formatter for a Zig identifier, escaping it with `@""` syntax if needed. /// -pub fn fmtId(bytes: []const u8) std.fmt.Formatter(formatId) { - return .{ .data = bytes }; +/// See also `fmtId`. +pub fn fmtIdFlags(bytes: []const u8, flags: FormatId.Flags) std.fmt.Formatter(FormatId, FormatId.render) { + return .{ .data = .{ .bytes = bytes, .flags = flags } }; +} + +pub fn fmtIdPU(bytes: []const u8) std.fmt.Formatter(FormatId, FormatId.render) { + return .{ .data = .{ .bytes = bytes, .flags = .{ .allow_primitive = true, .allow_underscore = true } } }; +} + +pub fn fmtIdP(bytes: []const u8) std.fmt.Formatter(FormatId, FormatId.render) { + return .{ .data = .{ .bytes = bytes, .flags = .{ .allow_primitive = true } } }; } test fmtId { const expectFmt = std.testing.expectFmt; - try expectFmt("@\"while\"", "{}", .{fmtId("while")}); - try expectFmt("@\"while\"", "{p}", .{fmtId("while")}); - try expectFmt("@\"while\"", "{_}", .{fmtId("while")}); - try expectFmt("@\"while\"", "{p_}", .{fmtId("while")}); - try expectFmt("@\"while\"", "{_p}", .{fmtId("while")}); + try expectFmt("@\"while\"", "{f}", .{fmtId("while")}); + try expectFmt("@\"while\"", "{f}", .{fmtIdFlags("while", .{ .allow_primitive = true })}); + try expectFmt("@\"while\"", "{f}", .{fmtIdFlags("while", .{ .allow_underscore = true })}); + try expectFmt("@\"while\"", "{f}", .{fmtIdFlags("while", .{ .allow_primitive = true, .allow_underscore = true })}); - try expectFmt("hello", "{}", .{fmtId("hello")}); - try expectFmt("hello", "{p}", .{fmtId("hello")}); - try expectFmt("hello", "{_}", .{fmtId("hello")}); - try expectFmt("hello", "{p_}", .{fmtId("hello")}); - try expectFmt("hello", "{_p}", .{fmtId("hello")}); + try expectFmt("hello", "{f}", .{fmtId("hello")}); + try expectFmt("hello", "{f}", .{fmtIdFlags("hello", .{ .allow_primitive = true })}); + try expectFmt("hello", "{f}", .{fmtIdFlags("hello", .{ .allow_underscore = true })}); + try expectFmt("hello", "{f}", .{fmtIdFlags("hello", .{ .allow_primitive = true, .allow_underscore = true })}); - try expectFmt("@\"type\"", "{}", .{fmtId("type")}); - try expectFmt("type", "{p}", .{fmtId("type")}); - try expectFmt("@\"type\"", "{_}", .{fmtId("type")}); - try expectFmt("type", "{p_}", .{fmtId("type")}); - try expectFmt("type", "{_p}", .{fmtId("type")}); + try expectFmt("@\"type\"", "{f}", .{fmtId("type")}); + try expectFmt("type", "{f}", .{fmtIdFlags("type", .{ .allow_primitive = true })}); + try expectFmt("@\"type\"", "{f}", .{fmtIdFlags("type", .{ .allow_underscore = true })}); + try expectFmt("type", "{f}", .{fmtIdFlags("type", .{ .allow_primitive = true, .allow_underscore = true })}); - try expectFmt("@\"_\"", "{}", .{fmtId("_")}); - try expectFmt("@\"_\"", "{p}", .{fmtId("_")}); - try expectFmt("_", "{_}", .{fmtId("_")}); - try expectFmt("_", "{p_}", .{fmtId("_")}); - try expectFmt("_", "{_p}", .{fmtId("_")}); + try expectFmt("@\"_\"", "{f}", .{fmtId("_")}); + try expectFmt("@\"_\"", "{f}", .{fmtIdFlags("_", .{ .allow_primitive = true })}); + try expectFmt("_", "{f}", .{fmtIdFlags("_", .{ .allow_underscore = true })}); + try expectFmt("_", "{f}", .{fmtIdFlags("_", .{ .allow_primitive = true, .allow_underscore = true })}); - try expectFmt("@\"i123\"", "{}", .{fmtId("i123")}); - try expectFmt("i123", "{p}", .{fmtId("i123")}); - try expectFmt("@\"4four\"", "{}", .{fmtId("4four")}); - try expectFmt("_underscore", "{}", .{fmtId("_underscore")}); - try expectFmt("@\"11\\\"23\"", "{}", .{fmtId("11\"23")}); - try expectFmt("@\"11\\x0f23\"", "{}", .{fmtId("11\x0F23")}); + try expectFmt("@\"i123\"", "{f}", .{fmtId("i123")}); + try expectFmt("i123", "{f}", .{fmtIdFlags("i123", .{ .allow_primitive = true })}); + try expectFmt("@\"4four\"", "{f}", .{fmtId("4four")}); + try expectFmt("_underscore", "{f}", .{fmtId("_underscore")}); + try expectFmt("@\"11\\\"23\"", "{f}", .{fmtId("11\"23")}); + try expectFmt("@\"11\\x0f23\"", "{f}", .{fmtId("11\x0F23")}); // These are technically not currently legal in Zig. - try expectFmt("@\"\"", "{}", .{fmtId("")}); - try expectFmt("@\"\\x00\"", "{}", .{fmtId("\x00")}); + try expectFmt("@\"\"", "{f}", .{fmtId("")}); + try expectFmt("@\"\\x00\"", "{f}", .{fmtId("\x00")}); } -/// Print the string as a Zig identifier, escaping it with `@""` syntax if needed. -fn formatId( +pub const FormatId = struct { bytes: []const u8, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - const allow_primitive, const allow_underscore = comptime parse_fmt: { - var allow_primitive = false; - var allow_underscore = false; - for (fmt) |char| { - switch (char) { - 'p' => if (!allow_primitive) { - allow_primitive = true; - continue; - }, - '_' => if (!allow_underscore) { - allow_underscore = true; - continue; - }, - else => {}, - } - @compileError("expected {}, {p}, {_}, {p_} or {_p}, found {" ++ fmt ++ "}"); - } - break :parse_fmt .{ allow_primitive, allow_underscore }; + flags: Flags, + pub const Flags = struct { + allow_primitive: bool = false, + allow_underscore: bool = false, }; - if (isValidId(bytes) and - (allow_primitive or !std.zig.isPrimitive(bytes)) and - (allow_underscore or !isUnderscore(bytes))) - { - return writer.writeAll(bytes); + /// Print the string as a Zig identifier, escaping it with `@""` syntax if needed. + fn render(ctx: FormatId, writer: *std.io.Writer) std.io.Writer.Error!void { + const bytes = ctx.bytes; + if (isValidId(bytes) and + (ctx.flags.allow_primitive or !std.zig.isPrimitive(bytes)) and + (ctx.flags.allow_underscore or !isUnderscore(bytes))) + { + return writer.writeAll(bytes); + } + try writer.writeAll("@\""); + try stringEscape(bytes, writer); + try writer.writeByte('"'); } - try writer.writeAll("@\""); - try stringEscape(bytes, "", options, writer); - try writer.writeByte('"'); -} +}; -/// Return a Formatter for Zig Escapes of a double quoted string. -/// The format specifier must be one of: -/// * `{}` treats contents as a double-quoted string. -/// * `{'}` treats contents as a single-quoted string. -pub fn fmtEscapes(bytes: []const u8) std.fmt.Formatter(stringEscape) { +/// Return a formatter for escaping a double quoted Zig string. +pub fn fmtString(bytes: []const u8) std.fmt.Formatter([]const u8, stringEscape) { return .{ .data = bytes }; } -test fmtEscapes { - const expectFmt = std.testing.expectFmt; - try expectFmt("\\x0f", "{}", .{fmtEscapes("\x0f")}); - try expectFmt( - \\" \\ hi \x07 \x11 " derp \'" - , "\"{'}\"", .{fmtEscapes(" \\ hi \x07 \x11 \" derp '")}); - try expectFmt( - \\" \\ hi \x07 \x11 \" derp '" - , "\"{}\"", .{fmtEscapes(" \\ hi \x07 \x11 \" derp '")}); +/// Return a formatter for escaping a single quoted Zig string. +pub fn fmtChar(bytes: []const u8) std.fmt.Formatter([]const u8, charEscape) { + return .{ .data = bytes }; } -/// Print the string as escaped contents of a double quoted or single-quoted string. -/// Format `{}` treats contents as a double-quoted string. -/// Format `{'}` treats contents as a single-quoted string. -pub fn stringEscape( - bytes: []const u8, - comptime f: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = options; +test fmtString { + try std.testing.expectFmt("\\x0f", "{f}", .{fmtString("\x0f")}); + try std.testing.expectFmt( + \\" \\ hi \x07 \x11 \" derp '" + , "\"{f}\"", .{fmtString(" \\ hi \x07 \x11 \" derp '")}); +} + +test fmtChar { + try std.testing.expectFmt( + \\" \\ hi \x07 \x11 " derp \'" + , "\"{f}\"", .{fmtChar(" \\ hi \x07 \x11 \" derp '")}); +} + +/// Print the string as escaped contents of a double quoted string. +pub fn stringEscape(bytes: []const u8, w: *std.io.Writer) std.io.Writer.Error!void { for (bytes) |byte| switch (byte) { - '\n' => try writer.writeAll("\\n"), - '\r' => try writer.writeAll("\\r"), - '\t' => try writer.writeAll("\\t"), - '\\' => try writer.writeAll("\\\\"), - '"' => { - if (f.len == 1 and f[0] == '\'') { - try writer.writeByte('"'); - } else if (f.len == 0) { - try writer.writeAll("\\\""); - } else { - @compileError("expected {} or {'}, found {" ++ f ++ "}"); - } - }, - '\'' => { - if (f.len == 1 and f[0] == '\'') { - try writer.writeAll("\\'"); - } else if (f.len == 0) { - try writer.writeByte('\''); - } else { - @compileError("expected {} or {'}, found {" ++ f ++ "}"); - } - }, - ' ', '!', '#'...'&', '('...'[', ']'...'~' => try writer.writeByte(byte), - // Use hex escapes for rest any unprintable characters. + '\n' => try w.writeAll("\\n"), + '\r' => try w.writeAll("\\r"), + '\t' => try w.writeAll("\\t"), + '\\' => try w.writeAll("\\\\"), + '"' => try w.writeAll("\\\""), + '\'' => try w.writeByte('\''), + ' ', '!', '#'...'&', '('...'[', ']'...'~' => try w.writeByte(byte), else => { - try writer.writeAll("\\x"); - try std.fmt.formatInt(byte, 16, .lower, .{ .width = 2, .fill = '0' }, writer); + try w.writeAll("\\x"); + try w.printInt(byte, 16, .lower, .{ .width = 2, .fill = '0' }); + }, + }; +} + +/// Print the string as escaped contents of a single-quoted string. +pub fn charEscape(bytes: []const u8, w: *std.io.Writer) std.io.Writer.Error!void { + for (bytes) |byte| switch (byte) { + '\n' => try w.writeAll("\\n"), + '\r' => try w.writeAll("\\r"), + '\t' => try w.writeAll("\\t"), + '\\' => try w.writeAll("\\\\"), + '"' => try w.writeByte('"'), + '\'' => try w.writeAll("\\'"), + ' ', '!', '#'...'&', '('...'[', ']'...'~' => try w.writeByte(byte), + else => { + try w.writeAll("\\x"); + try w.printInt(byte, 16, .lower, .{ .width = 2, .fill = '0' }); }, }; } diff --git a/lib/std/zig/Ast.zig b/lib/std/zig/Ast.zig index 6c4c0ff602..ecdc7a8a4e 100644 --- a/lib/std/zig/Ast.zig +++ b/lib/std/zig/Ast.zig @@ -565,14 +565,14 @@ pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void { .invalid_byte => { const tok_slice = tree.source[tree.tokens.items(.start)[parse_error.token]..]; - return stream.print("{s} contains invalid byte: '{'}'", .{ + return stream.print("{s} contains invalid byte: '{f}'", .{ switch (tok_slice[0]) { '\'' => "character literal", '"', '\\' => "string literal", '/' => "comment", else => unreachable, }, - std.zig.fmtEscapes(tok_slice[parse_error.extra.offset..][0..1]), + std.zig.fmtChar(tok_slice[parse_error.extra.offset..][0..1]), }); }, diff --git a/lib/std/zig/AstGen.zig b/lib/std/zig/AstGen.zig index 966d49b827..e2ec0e9eb6 100644 --- a/lib/std/zig/AstGen.zig +++ b/lib/std/zig/AstGen.zig @@ -11305,13 +11305,7 @@ fn failWithStrLitError( offset: u32, ) InnerError { const raw_string = bytes[offset..]; - return failOff( - astgen, - token, - @intCast(offset + err.offset()), - "{}", - .{err.fmt(raw_string)}, - ); + return failOff(astgen, token, @intCast(offset + err.offset()), "{f}", .{err.fmt(raw_string)}); } fn failNode( diff --git a/lib/std/zig/ErrorBundle.zig b/lib/std/zig/ErrorBundle.zig index 503f9a3e39..b398d5eb4c 100644 --- a/lib/std/zig/ErrorBundle.zig +++ b/lib/std/zig/ErrorBundle.zig @@ -7,6 +7,12 @@ //! empty, it means there are no errors. This special encoding exists so that //! heap allocation is not needed in the common case of no errors. +const std = @import("std"); +const ErrorBundle = @This(); +const Allocator = std.mem.Allocator; +const assert = std.debug.assert; +const Writer = std.io.Writer; + string_bytes: []const u8, /// The first thing in this array is an `ErrorMessageList`. extra: []const u32, @@ -157,23 +163,23 @@ pub const RenderOptions = struct { }; pub fn renderToStdErr(eb: ErrorBundle, options: RenderOptions) void { - std.debug.lockStdErr(); - defer std.debug.unlockStdErr(); - const stderr = std.io.getStdErr(); - return renderToWriter(eb, options, stderr.writer()) catch return; + var buffer: [256]u8 = undefined; + const w = std.debug.lockStderrWriter(&buffer); + defer std.debug.unlockStderrWriter(); + renderToWriter(eb, options, w) catch return; } -pub fn renderToWriter(eb: ErrorBundle, options: RenderOptions, writer: anytype) anyerror!void { +pub fn renderToWriter(eb: ErrorBundle, options: RenderOptions, w: *Writer) (Writer.Error || std.posix.UnexpectedError)!void { if (eb.extra.len == 0) return; for (eb.getMessages()) |err_msg| { - try renderErrorMessageToWriter(eb, options, err_msg, writer, "error", .red, 0); + try renderErrorMessageToWriter(eb, options, err_msg, w, "error", .red, 0); } if (options.include_log_text) { const log_text = eb.getCompileLogOutput(); if (log_text.len != 0) { - try writer.writeAll("\nCompile Log Output:\n"); - try writer.writeAll(log_text); + try w.writeAll("\nCompile Log Output:\n"); + try w.writeAll(log_text); } } } @@ -182,74 +188,81 @@ fn renderErrorMessageToWriter( eb: ErrorBundle, options: RenderOptions, err_msg_index: MessageIndex, - stderr: anytype, + w: *Writer, kind: []const u8, color: std.io.tty.Color, indent: usize, -) anyerror!void { +) (Writer.Error || std.posix.UnexpectedError)!void { const ttyconf = options.ttyconf; - var counting_writer = std.io.countingWriter(stderr); - const counting_stderr = counting_writer.writer(); const err_msg = eb.getErrorMessage(err_msg_index); if (err_msg.src_loc != .none) { const src = eb.extraData(SourceLocation, @intFromEnum(err_msg.src_loc)); - try counting_stderr.writeByteNTimes(' ', indent); - try ttyconf.setColor(stderr, .bold); - try counting_stderr.print("{s}:{d}:{d}: ", .{ + var prefix: std.io.Writer.Discarding = .init(&.{}); + try w.splatByteAll(' ', indent); + prefix.count += indent; + try ttyconf.setColor(w, .bold); + try w.print("{s}:{d}:{d}: ", .{ eb.nullTerminatedString(src.data.src_path), src.data.line + 1, src.data.column + 1, }); - try ttyconf.setColor(stderr, color); - try counting_stderr.writeAll(kind); - try counting_stderr.writeAll(": "); + try prefix.writer.print("{s}:{d}:{d}: ", .{ + eb.nullTerminatedString(src.data.src_path), + src.data.line + 1, + src.data.column + 1, + }); + try ttyconf.setColor(w, color); + try w.writeAll(kind); + prefix.count += kind.len; + try w.writeAll(": "); + prefix.count += 2; // This is the length of the part before the error message: // e.g. "file.zig:4:5: error: " - const prefix_len: usize = @intCast(counting_stderr.context.bytes_written); - try ttyconf.setColor(stderr, .reset); - try ttyconf.setColor(stderr, .bold); + const prefix_len: usize = @intCast(prefix.count); + try ttyconf.setColor(w, .reset); + try ttyconf.setColor(w, .bold); if (err_msg.count == 1) { - try writeMsg(eb, err_msg, stderr, prefix_len); - try stderr.writeByte('\n'); + try writeMsg(eb, err_msg, w, prefix_len); + try w.writeByte('\n'); } else { - try writeMsg(eb, err_msg, stderr, prefix_len); - try ttyconf.setColor(stderr, .dim); - try stderr.print(" ({d} times)\n", .{err_msg.count}); + try writeMsg(eb, err_msg, w, prefix_len); + try ttyconf.setColor(w, .dim); + try w.print(" ({d} times)\n", .{err_msg.count}); } - try ttyconf.setColor(stderr, .reset); + try ttyconf.setColor(w, .reset); if (src.data.source_line != 0 and options.include_source_line) { const line = eb.nullTerminatedString(src.data.source_line); for (line) |b| switch (b) { - '\t' => try stderr.writeByte(' '), - else => try stderr.writeByte(b), + '\t' => try w.writeByte(' '), + else => try w.writeByte(b), }; - try stderr.writeByte('\n'); + try w.writeByte('\n'); // TODO basic unicode code point monospace width const before_caret = src.data.span_main - src.data.span_start; // -1 since span.main includes the caret const after_caret = src.data.span_end -| src.data.span_main -| 1; - try stderr.writeByteNTimes(' ', src.data.column - before_caret); - try ttyconf.setColor(stderr, .green); - try stderr.writeByteNTimes('~', before_caret); - try stderr.writeByte('^'); - try stderr.writeByteNTimes('~', after_caret); - try stderr.writeByte('\n'); - try ttyconf.setColor(stderr, .reset); + try w.splatByteAll(' ', src.data.column - before_caret); + try ttyconf.setColor(w, .green); + try w.splatByteAll('~', before_caret); + try w.writeByte('^'); + try w.splatByteAll('~', after_caret); + try w.writeByte('\n'); + try ttyconf.setColor(w, .reset); } for (eb.getNotes(err_msg_index)) |note| { - try renderErrorMessageToWriter(eb, options, note, stderr, "note", .cyan, indent); + try renderErrorMessageToWriter(eb, options, note, w, "note", .cyan, indent); } if (src.data.reference_trace_len > 0 and options.include_reference_trace) { - try ttyconf.setColor(stderr, .reset); - try ttyconf.setColor(stderr, .dim); - try stderr.print("referenced by:\n", .{}); + try ttyconf.setColor(w, .reset); + try ttyconf.setColor(w, .dim); + try w.print("referenced by:\n", .{}); var ref_index = src.end; for (0..src.data.reference_trace_len) |_| { const ref_trace = eb.extraData(ReferenceTrace, ref_index); ref_index = ref_trace.end; if (ref_trace.data.src_loc != .none) { const ref_src = eb.getSourceLocation(ref_trace.data.src_loc); - try stderr.print(" {s}: {s}:{d}:{d}\n", .{ + try w.print(" {s}: {s}:{d}:{d}\n", .{ eb.nullTerminatedString(ref_trace.data.decl_name), eb.nullTerminatedString(ref_src.src_path), ref_src.line + 1, @@ -257,36 +270,36 @@ fn renderErrorMessageToWriter( }); } else if (ref_trace.data.decl_name != 0) { const count = ref_trace.data.decl_name; - try stderr.print( + try w.print( " {d} reference(s) hidden; use '-freference-trace={d}' to see all references\n", .{ count, count + src.data.reference_trace_len - 1 }, ); } else { - try stderr.print( + try w.print( " remaining reference traces hidden; use '-freference-trace' to see all reference traces\n", .{}, ); } } - try ttyconf.setColor(stderr, .reset); + try ttyconf.setColor(w, .reset); } } else { - try ttyconf.setColor(stderr, color); - try stderr.writeByteNTimes(' ', indent); - try stderr.writeAll(kind); - try stderr.writeAll(": "); - try ttyconf.setColor(stderr, .reset); + try ttyconf.setColor(w, color); + try w.splatByteAll(' ', indent); + try w.writeAll(kind); + try w.writeAll(": "); + try ttyconf.setColor(w, .reset); const msg = eb.nullTerminatedString(err_msg.msg); if (err_msg.count == 1) { - try stderr.print("{s}\n", .{msg}); + try w.print("{s}\n", .{msg}); } else { - try stderr.print("{s}", .{msg}); - try ttyconf.setColor(stderr, .dim); - try stderr.print(" ({d} times)\n", .{err_msg.count}); + try w.print("{s}", .{msg}); + try ttyconf.setColor(w, .dim); + try w.print(" ({d} times)\n", .{err_msg.count}); } - try ttyconf.setColor(stderr, .reset); + try ttyconf.setColor(w, .reset); for (eb.getNotes(err_msg_index)) |note| { - try renderErrorMessageToWriter(eb, options, note, stderr, "note", .cyan, indent + 4); + try renderErrorMessageToWriter(eb, options, note, w, "note", .cyan, indent + 4); } } } @@ -295,21 +308,16 @@ fn renderErrorMessageToWriter( /// to allow for long, good-looking error messages. /// /// This is used to split the message in `@compileError("hello\nworld")` for example. -fn writeMsg(eb: ErrorBundle, err_msg: ErrorMessage, stderr: anytype, indent: usize) !void { +fn writeMsg(eb: ErrorBundle, err_msg: ErrorMessage, w: *Writer, indent: usize) !void { var lines = std.mem.splitScalar(u8, eb.nullTerminatedString(err_msg.msg), '\n'); while (lines.next()) |line| { - try stderr.writeAll(line); + try w.writeAll(line); if (lines.index == null) break; - try stderr.writeByte('\n'); - try stderr.writeByteNTimes(' ', indent); + try w.writeByte('\n'); + try w.splatByteAll(' ', indent); } } -const std = @import("std"); -const ErrorBundle = @This(); -const Allocator = std.mem.Allocator; -const assert = std.debug.assert; - pub const Wip = struct { gpa: Allocator, string_bytes: std.ArrayListUnmanaged(u8), @@ -398,7 +406,7 @@ pub const Wip = struct { pub fn printString(wip: *Wip, comptime fmt: []const u8, args: anytype) Allocator.Error!String { const gpa = wip.gpa; const index: String = @intCast(wip.string_bytes.items.len); - try wip.string_bytes.writer(gpa).print(fmt, args); + try wip.string_bytes.print(gpa, fmt, args); try wip.string_bytes.append(gpa, 0); return index; } @@ -788,9 +796,10 @@ pub const Wip = struct { const ttyconf: std.io.tty.Config = .no_color; - var bundle_buf = std.ArrayList(u8).init(std.testing.allocator); + var bundle_buf: std.io.Writer.Allocating = .init(std.testing.allocator); + const bundle_bw = &bundle_buf.interface; defer bundle_buf.deinit(); - try bundle.renderToWriter(.{ .ttyconf = ttyconf }, bundle_buf.writer()); + try bundle.renderToWriter(.{ .ttyconf = ttyconf }, bundle_bw); var copy = copy: { var wip: ErrorBundle.Wip = undefined; @@ -803,10 +812,11 @@ pub const Wip = struct { }; defer copy.deinit(std.testing.allocator); - var copy_buf = std.ArrayList(u8).init(std.testing.allocator); + var copy_buf: std.io.Writer.Allocating = .init(std.testing.allocator); + const copy_bw = ©_buf.interface; defer copy_buf.deinit(); - try copy.renderToWriter(.{ .ttyconf = ttyconf }, copy_buf.writer()); + try copy.renderToWriter(.{ .ttyconf = ttyconf }, copy_bw); - try std.testing.expectEqualStrings(bundle_buf.items, copy_buf.items); + try std.testing.expectEqualStrings(bundle_bw.getWritten(), copy_bw.getWritten()); } }; diff --git a/lib/std/zig/ZonGen.zig b/lib/std/zig/ZonGen.zig index a43663a609..ccc09a82ce 100644 --- a/lib/std/zig/ZonGen.zig +++ b/lib/std/zig/ZonGen.zig @@ -756,13 +756,7 @@ fn lowerStrLitError( raw_string: []const u8, offset: u32, ) Allocator.Error!void { - return ZonGen.addErrorTokOff( - zg, - token, - @intCast(offset + err.offset()), - "{}", - .{err.fmt(raw_string)}, - ); + return ZonGen.addErrorTokOff(zg, token, @intCast(offset + err.offset()), "{f}", .{err.fmt(raw_string)}); } fn lowerNumberError(zg: *ZonGen, err: std.zig.number_literal.Error, token: Ast.TokenIndex, bytes: []const u8) Allocator.Error!void { diff --git a/lib/std/zig/llvm/Builder.zig b/lib/std/zig/llvm/Builder.zig index 0e25af08c6..b3e0311cdd 100644 --- a/lib/std/zig/llvm/Builder.zig +++ b/lib/std/zig/llvm/Builder.zig @@ -1,3 +1,14 @@ +const std = @import("../../std.zig"); +const Allocator = std.mem.Allocator; +const assert = std.debug.assert; +const bitcode_writer = @import("bitcode_writer.zig"); +const Builder = @This(); +const builtin = @import("builtin"); +const DW = std.dwarf; +const ir = @import("ir.zig"); +const log = std.log.scoped(.llvm); +const Writer = std.io.Writer; + gpa: Allocator, strip: bool, @@ -90,31 +101,38 @@ pub const String = enum(u32) { const FormatData = struct { string: String, builder: *const Builder, + quote_behavior: ?QuoteBehavior, }; - fn format( - data: FormatData, - comptime fmt_str: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - if (comptime std.mem.indexOfNone(u8, fmt_str, "\"r")) |_| - @compileError("invalid format string: '" ++ fmt_str ++ "'"); + fn format(data: FormatData, w: *Writer) Writer.Error!void { assert(data.string != .none); const string_slice = data.string.slice(data.builder) orelse - return writer.print("{d}", .{@intFromEnum(data.string)}); - if (comptime std.mem.indexOfScalar(u8, fmt_str, 'r')) |_| - return writer.writeAll(string_slice); - try printEscapedString( - string_slice, - if (comptime std.mem.indexOfScalar(u8, fmt_str, '"')) |_| - .always_quote - else - .quote_unless_valid_identifier, - writer, - ); + return w.print("{d}", .{@intFromEnum(data.string)}); + const quote_behavior = data.quote_behavior orelse return w.writeAll(string_slice); + return printEscapedString(string_slice, quote_behavior, w); } - pub fn fmt(self: String, builder: *const Builder) std.fmt.Formatter(format) { - return .{ .data = .{ .string = self, .builder = builder } }; + + pub fn fmt(self: String, builder: *const Builder) std.fmt.Formatter(FormatData, format) { + return .{ .data = .{ + .string = self, + .builder = builder, + .quote_behavior = .quote_unless_valid_identifier, + } }; + } + + pub fn fmtQ(self: String, builder: *const Builder) std.fmt.Formatter(FormatData, format) { + return .{ .data = .{ + .string = self, + .builder = builder, + .quote_behavior = .always_quote, + } }; + } + + pub fn fmtRaw(self: String, builder: *const Builder) std.fmt.Formatter(FormatData, format) { + return .{ .data = .{ + .string = self, + .builder = builder, + .quote_behavior = null, + } }; } fn fromIndex(index: ?usize) String { @@ -228,7 +246,7 @@ pub const Type = enum(u32) { _, pub const ptr_amdgpu_constant = - @field(Type, std.fmt.comptimePrint("ptr{ }", .{AddrSpace.amdgpu.constant})); + @field(Type, std.fmt.comptimePrint("ptr{f}", .{AddrSpace.amdgpu.constant.fmt(" ")})); pub const Tag = enum(u4) { simple, @@ -653,18 +671,16 @@ pub const Type = enum(u32) { const FormatData = struct { type: Type, builder: *const Builder, + mode: Mode, + + const Mode = enum { default, m, lt, gt, percent }; }; - fn format( - data: FormatData, - comptime fmt_str: []const u8, - fmt_opts: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { + fn format(data: FormatData, w: *Writer) Writer.Error!void { assert(data.type != .none); - if (comptime std.mem.eql(u8, fmt_str, "m")) { + if (data.mode == .m) { const item = data.builder.type_items.items[@intFromEnum(data.type)]; switch (item.tag) { - .simple => try writer.writeAll(switch (@as(Simple, @enumFromInt(item.data))) { + .simple => try w.writeAll(switch (@as(Simple, @enumFromInt(item.data))) { .void => "isVoid", .half => "f16", .bfloat => "bf16", @@ -681,36 +697,36 @@ pub const Type = enum(u32) { .function, .vararg_function => |kind| { var extra = data.builder.typeExtraDataTrail(Type.Function, item.data); const params = extra.trail.next(extra.data.params_len, Type, data.builder); - try writer.print("f_{m}", .{extra.data.ret.fmt(data.builder)}); - for (params) |param| try writer.print("{m}", .{param.fmt(data.builder)}); + try w.print("f_{f}", .{extra.data.ret.fmt(data.builder, .m)}); + for (params) |param| try w.print("{f}", .{param.fmt(data.builder, .m)}); switch (kind) { .function => {}, - .vararg_function => try writer.writeAll("vararg"), + .vararg_function => try w.writeAll("vararg"), else => unreachable, } - try writer.writeByte('f'); + try w.writeByte('f'); }, - .integer => try writer.print("i{d}", .{item.data}), - .pointer => try writer.print("p{d}", .{item.data}), + .integer => try w.print("i{d}", .{item.data}), + .pointer => try w.print("p{d}", .{item.data}), .target => { var extra = data.builder.typeExtraDataTrail(Type.Target, item.data); const types = extra.trail.next(extra.data.types_len, Type, data.builder); const ints = extra.trail.next(extra.data.ints_len, u32, data.builder); - try writer.print("t{s}", .{extra.data.name.slice(data.builder).?}); - for (types) |ty| try writer.print("_{m}", .{ty.fmt(data.builder)}); - for (ints) |int| try writer.print("_{d}", .{int}); - try writer.writeByte('t'); + try w.print("t{s}", .{extra.data.name.slice(data.builder).?}); + for (types) |ty| try w.print("_{f}", .{ty.fmt(data.builder, .m)}); + for (ints) |int| try w.print("_{d}", .{int}); + try w.writeByte('t'); }, .vector, .scalable_vector => |kind| { const extra = data.builder.typeExtraData(Type.Vector, item.data); - try writer.print("{s}v{d}{m}", .{ + try w.print("{s}v{d}{f}", .{ switch (kind) { .vector => "", .scalable_vector => "nx", else => unreachable, }, extra.len, - extra.child.fmt(data.builder), + extra.child.fmt(data.builder, .m), }); }, inline .small_array, .array => |kind| { @@ -719,72 +735,72 @@ pub const Type = enum(u32) { .array => Type.Array, else => unreachable, }, item.data); - try writer.print("a{d}{m}", .{ extra.length(), extra.child.fmt(data.builder) }); + try w.print("a{d}{f}", .{ extra.length(), extra.child.fmt(data.builder, .m) }); }, .structure, .packed_structure => { var extra = data.builder.typeExtraDataTrail(Type.Structure, item.data); const fields = extra.trail.next(extra.data.fields_len, Type, data.builder); - try writer.writeAll("sl_"); - for (fields) |field| try writer.print("{m}", .{field.fmt(data.builder)}); - try writer.writeByte('s'); + try w.writeAll("sl_"); + for (fields) |field| try w.print("{f}", .{field.fmt(data.builder, .m)}); + try w.writeByte('s'); }, .named_structure => { const extra = data.builder.typeExtraData(Type.NamedStructure, item.data); - try writer.writeAll("s_"); - if (extra.id.slice(data.builder)) |id| try writer.writeAll(id); + try w.writeAll("s_"); + if (extra.id.slice(data.builder)) |id| try w.writeAll(id); }, } return; } - if (std.enums.tagName(Type, data.type)) |name| return writer.writeAll(name); + if (std.enums.tagName(Type, data.type)) |name| return w.writeAll(name); const item = data.builder.type_items.items[@intFromEnum(data.type)]; switch (item.tag) { .simple => unreachable, .function, .vararg_function => |kind| { var extra = data.builder.typeExtraDataTrail(Type.Function, item.data); const params = extra.trail.next(extra.data.params_len, Type, data.builder); - if (!comptime std.mem.eql(u8, fmt_str, ">")) - try writer.print("{%} ", .{extra.data.ret.fmt(data.builder)}); - if (!comptime std.mem.eql(u8, fmt_str, "<")) { - try writer.writeByte('('); + if (data.mode != .gt) + try w.print("{f} ", .{extra.data.ret.fmt(data.builder, .percent)}); + if (data.mode != .lt) { + try w.writeByte('('); for (params, 0..) |param, index| { - if (index > 0) try writer.writeAll(", "); - try writer.print("{%}", .{param.fmt(data.builder)}); + if (index > 0) try w.writeAll(", "); + try w.print("{f}", .{param.fmt(data.builder, .percent)}); } switch (kind) { .function => {}, .vararg_function => { - if (params.len > 0) try writer.writeAll(", "); - try writer.writeAll("..."); + if (params.len > 0) try w.writeAll(", "); + try w.writeAll("..."); }, else => unreachable, } - try writer.writeByte(')'); + try w.writeByte(')'); } }, - .integer => try writer.print("i{d}", .{item.data}), - .pointer => try writer.print("ptr{ }", .{@as(AddrSpace, @enumFromInt(item.data))}), + .integer => try w.print("i{d}", .{item.data}), + .pointer => try w.print("ptr{f}", .{@as(AddrSpace, @enumFromInt(item.data)).fmt(" ")}), .target => { var extra = data.builder.typeExtraDataTrail(Type.Target, item.data); const types = extra.trail.next(extra.data.types_len, Type, data.builder); const ints = extra.trail.next(extra.data.ints_len, u32, data.builder); - try writer.print( - \\target({"} - , .{extra.data.name.fmt(data.builder)}); - for (types) |ty| try writer.print(", {%}", .{ty.fmt(data.builder)}); - for (ints) |int| try writer.print(", {d}", .{int}); - try writer.writeByte(')'); + try w.print( + \\target({f} + , .{extra.data.name.fmtQ(data.builder)}); + for (types) |ty| try w.print(", {f}", .{ty.fmt(data.builder, .percent)}); + for (ints) |int| try w.print(", {d}", .{int}); + try w.writeByte(')'); }, .vector, .scalable_vector => |kind| { const extra = data.builder.typeExtraData(Type.Vector, item.data); - try writer.print("<{s}{d} x {%}>", .{ + try w.print("<{s}{d} x {f}>", .{ switch (kind) { .vector => "", .scalable_vector => "vscale x ", else => unreachable, }, extra.len, - extra.child.fmt(data.builder), + extra.child.fmt(data.builder, .percent), }); }, inline .small_array, .array => |kind| { @@ -793,44 +809,45 @@ pub const Type = enum(u32) { .array => Type.Array, else => unreachable, }, item.data); - try writer.print("[{d} x {%}]", .{ extra.length(), extra.child.fmt(data.builder) }); + try w.print("[{d} x {f}]", .{ extra.length(), extra.child.fmt(data.builder, .percent) }); }, .structure, .packed_structure => |kind| { var extra = data.builder.typeExtraDataTrail(Type.Structure, item.data); const fields = extra.trail.next(extra.data.fields_len, Type, data.builder); switch (kind) { .structure => {}, - .packed_structure => try writer.writeByte('<'), + .packed_structure => try w.writeByte('<'), else => unreachable, } - try writer.writeAll("{ "); + try w.writeAll("{ "); for (fields, 0..) |field, index| { - if (index > 0) try writer.writeAll(", "); - try writer.print("{%}", .{field.fmt(data.builder)}); + if (index > 0) try w.writeAll(", "); + try w.print("{f}", .{field.fmt(data.builder, .percent)}); } - try writer.writeAll(" }"); + try w.writeAll(" }"); switch (kind) { .structure => {}, - .packed_structure => try writer.writeByte('>'), + .packed_structure => try w.writeByte('>'), else => unreachable, } }, .named_structure => { const extra = data.builder.typeExtraData(Type.NamedStructure, item.data); - if (comptime std.mem.eql(u8, fmt_str, "%")) try writer.print("%{}", .{ + if (data.mode == .percent) try w.print("%{f}", .{ extra.id.fmt(data.builder), }) else switch (extra.body) { - .none => try writer.writeAll("opaque"), + .none => try w.writeAll("opaque"), else => try format(.{ .type = extra.body, .builder = data.builder, - }, fmt_str, fmt_opts, writer), + .mode = data.mode, + }, w), } }, } } - pub fn fmt(self: Type, builder: *const Builder) std.fmt.Formatter(format) { - return .{ .data = .{ .type = self, .builder = builder } }; + pub fn fmt(self: Type, builder: *const Builder, mode: FormatData.Mode) std.fmt.Formatter(FormatData, format) { + return .{ .data = .{ .type = self, .builder = builder, .mode = mode } }; } const IsSizedVisited = std.AutoHashMapUnmanaged(Type, void); @@ -1138,15 +1155,13 @@ pub const Attribute = union(Kind) { const FormatData = struct { attribute_index: Index, builder: *const Builder, + flags: Flags = .{}, + const Flags = struct { + pound: bool = false, + quote: bool = false, + }; }; - fn format( - data: FormatData, - comptime fmt_str: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - if (comptime std.mem.indexOfNone(u8, fmt_str, "\"#")) |_| - @compileError("invalid format string: '" ++ fmt_str ++ "'"); + fn format(data: FormatData, w: *Writer) Writer.Error!void { const attribute = data.attribute_index.toAttribute(data.builder); switch (attribute) { .zeroext, @@ -1219,97 +1234,99 @@ pub const Attribute = union(Kind) { .no_sanitize_address, .no_sanitize_hwaddress, .sanitize_address_dyninit, - => try writer.print(" {s}", .{@tagName(attribute)}), + => try w.print(" {s}", .{@tagName(attribute)}), .byval, .byref, .preallocated, .inalloca, .sret, .elementtype, - => |ty| try writer.print(" {s}({%})", .{ @tagName(attribute), ty.fmt(data.builder) }), - .@"align" => |alignment| try writer.print("{ }", .{alignment}), + => |ty| try w.print(" {s}({f})", .{ @tagName(attribute), ty.fmt(data.builder, .percent) }), + .@"align" => |alignment| try w.print("{f}", .{alignment.fmt(" ")}), .dereferenceable, .dereferenceable_or_null, - => |size| try writer.print(" {s}({d})", .{ @tagName(attribute), size }), + => |size| try w.print(" {s}({d})", .{ @tagName(attribute), size }), .nofpclass => |fpclass| { const Int = @typeInfo(FpClass).@"struct".backing_integer.?; - try writer.print(" {s}(", .{@tagName(attribute)}); + try w.print(" {s}(", .{@tagName(attribute)}); var any = false; var remaining: Int = @bitCast(fpclass); inline for (@typeInfo(FpClass).@"struct".decls) |decl| { const pattern: Int = @bitCast(@field(FpClass, decl.name)); if (remaining & pattern == pattern) { if (!any) { - try writer.writeByte(' '); + try w.writeByte(' '); any = true; } - try writer.writeAll(decl.name); + try w.writeAll(decl.name); remaining &= ~pattern; } } - try writer.writeByte(')'); + try w.writeByte(')'); + }, + .alignstack => |alignment| { + try w.print(" {t}", .{attribute}); + const alignment_bytes = alignment.toByteUnits() orelse return; + if (data.flags.pound) { + try w.print("={d}", .{alignment_bytes}); + } else { + try w.print("({d})", .{alignment_bytes}); + } }, - .alignstack => |alignment| try writer.print( - if (comptime std.mem.indexOfScalar(u8, fmt_str, '#') != null) - " {s}={d}" - else - " {s}({d})", - .{ @tagName(attribute), alignment.toByteUnits() orelse return }, - ), .allockind => |allockind| { - try writer.print(" {s}(\"", .{@tagName(attribute)}); + try w.print(" {t}(\"", .{attribute}); var any = false; inline for (@typeInfo(AllocKind).@"struct".fields) |field| { if (comptime std.mem.eql(u8, field.name, "_")) continue; if (@field(allockind, field.name)) { if (!any) { - try writer.writeByte(','); + try w.writeByte(','); any = true; } - try writer.writeAll(field.name); + try w.writeAll(field.name); } } - try writer.writeAll("\")"); + try w.writeAll("\")"); }, .allocsize => |allocsize| { - try writer.print(" {s}({d}", .{ @tagName(attribute), allocsize.elem_size }); + try w.print(" {t}({d}", .{ attribute, allocsize.elem_size }); if (allocsize.num_elems != AllocSize.none) - try writer.print(",{d}", .{allocsize.num_elems}); - try writer.writeByte(')'); + try w.print(",{d}", .{allocsize.num_elems}); + try w.writeByte(')'); }, .memory => |memory| { - try writer.print(" {s}(", .{@tagName(attribute)}); + try w.print(" {t}(", .{attribute}); var any = memory.other != .none or (memory.argmem == .none and memory.inaccessiblemem == .none); - if (any) try writer.writeAll(@tagName(memory.other)); + if (any) try w.writeAll(@tagName(memory.other)); inline for (.{ "argmem", "inaccessiblemem" }) |kind| { if (@field(memory, kind) != memory.other) { - if (any) try writer.writeAll(", "); - try writer.print("{s}: {s}", .{ kind, @tagName(@field(memory, kind)) }); + if (any) try w.writeAll(", "); + try w.print("{s}: {s}", .{ kind, @tagName(@field(memory, kind)) }); any = true; } } - try writer.writeByte(')'); + try w.writeByte(')'); }, .uwtable => |uwtable| if (uwtable != .none) { - try writer.print(" {s}", .{@tagName(attribute)}); - if (uwtable != UwTable.default) try writer.print("({s})", .{@tagName(uwtable)}); + try w.print(" {s}", .{@tagName(attribute)}); + if (uwtable != UwTable.default) try w.print("({s})", .{@tagName(uwtable)}); }, - .vscale_range => |vscale_range| try writer.print(" {s}({d},{d})", .{ + .vscale_range => |vscale_range| try w.print(" {s}({d},{d})", .{ @tagName(attribute), vscale_range.min.toByteUnits().?, vscale_range.max.toByteUnits() orelse 0, }), - .string => |string_attr| if (comptime std.mem.indexOfScalar(u8, fmt_str, '"') != null) { - try writer.print(" {\"}", .{string_attr.kind.fmt(data.builder)}); + .string => |string_attr| if (data.flags.quote) { + try w.print(" {f}", .{string_attr.kind.fmtQ(data.builder)}); if (string_attr.value != .empty) - try writer.print("={\"}", .{string_attr.value.fmt(data.builder)}); + try w.print("={f}", .{string_attr.value.fmtQ(data.builder)}); }, .none => unreachable, } } - pub fn fmt(self: Index, builder: *const Builder) std.fmt.Formatter(format) { - return .{ .data = .{ .attribute_index = self, .builder = builder } }; + pub fn fmt(self: Index, builder: *const Builder, mode: FormatData.mode) std.fmt.Formatter(FormatData, format) { + return .{ .data = .{ .attribute_index = self, .builder = builder, .mode = mode } }; } fn toStorage(self: Index, builder: *const Builder) Storage { @@ -1582,20 +1599,18 @@ pub const Attributes = enum(u32) { const FormatData = struct { attributes: Attributes, builder: *const Builder, + flags: Flags = .{}, + const Flags = Attribute.Index.FormatData.Flags; }; - fn format( - data: FormatData, - comptime fmt_str: []const u8, - fmt_opts: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { + fn format(data: FormatData, w: *Writer) Writer.Error!void { for (data.attributes.slice(data.builder)) |attribute_index| try Attribute.Index.format(.{ .attribute_index = attribute_index, .builder = data.builder, - }, fmt_str, fmt_opts, writer); + .flags = data.flags, + }, w); } - pub fn fmt(self: Attributes, builder: *const Builder) std.fmt.Formatter(format) { - return .{ .data = .{ .attributes = self, .builder = builder } }; + pub fn fmt(self: Attributes, builder: *const Builder, flags: FormatData.Flags) std.fmt.Formatter(FormatData, format) { + return .{ .data = .{ .attributes = self, .builder = builder, .flags = flags } }; } }; @@ -1781,24 +1796,14 @@ pub const Linkage = enum(u4) { extern_weak = 7, external = 0, - pub fn format( - self: Linkage, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - if (self != .external) try writer.print(" {s}", .{@tagName(self)}); + pub fn format(self: Linkage, w: *Writer) Writer.Error!void { + if (self != .external) try w.print(" {s}", .{@tagName(self)}); } - fn formatOptional( - data: ?Linkage, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - if (data) |linkage| try writer.print(" {s}", .{@tagName(linkage)}); + fn formatOptional(data: ?Linkage, w: *Writer) Writer.Error!void { + if (data) |linkage| try w.print(" {s}", .{@tagName(linkage)}); } - pub fn fmtOptional(self: ?Linkage) std.fmt.Formatter(formatOptional) { + pub fn fmtOptional(self: ?Linkage) std.fmt.Formatter(?Linkage, formatOptional) { return .{ .data = self }; } }; @@ -1808,13 +1813,8 @@ pub const Preemption = enum { dso_local, implicit_dso_local, - pub fn format( - self: Preemption, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - if (self == .dso_local) try writer.print(" {s}", .{@tagName(self)}); + pub fn format(self: Preemption, w: *Writer) Writer.Error!void { + if (self == .dso_local) try w.print(" {s}", .{@tagName(self)}); } }; @@ -1831,12 +1831,7 @@ pub const Visibility = enum(u2) { }; } - pub fn format( - self: Visibility, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { + pub fn format(self: Visibility, writer: *Writer) Writer.Error!void { if (self != .default) try writer.print(" {s}", .{@tagName(self)}); } }; @@ -1846,13 +1841,8 @@ pub const DllStorageClass = enum(u2) { dllimport = 1, dllexport = 2, - pub fn format( - self: DllStorageClass, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - if (self != .default) try writer.print(" {s}", .{@tagName(self)}); + pub fn format(self: DllStorageClass, w: *Writer) Writer.Error!void { + if (self != .default) try w.print(" {s}", .{@tagName(self)}); } }; @@ -1863,15 +1853,31 @@ pub const ThreadLocal = enum(u3) { initialexec = 3, localexec = 4, - pub fn format( - self: ThreadLocal, - comptime prefix: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - if (self == .default) return; - try writer.print("{s}thread_local", .{prefix}); - if (self != .generaldynamic) try writer.print("({s})", .{@tagName(self)}); + pub fn format(tl: ThreadLocal, w: *Writer) Writer.Error!void { + return Prefixed.format(.{ .thread_local = tl, .prefix = "" }, w); + } + + pub const Prefixed = struct { + thread_local: ThreadLocal, + prefix: []const u8, + + pub fn format(p: Prefixed, w: *Writer) Writer.Error!void { + switch (p.thread_local) { + .default => return, + .generaldynamic => { + var vecs: [2][]const u8 = .{ p.prefix, "thread_local" }; + return w.writeVecAll(&vecs); + }, + else => { + var vecs: [4][]const u8 = .{ p.prefix, "thread_local(", @tagName(p.thread_local), ")" }; + return w.writeVecAll(&vecs); + }, + } + } + }; + + pub fn fmt(tl: ThreadLocal, prefix: []const u8) Prefixed { + return .{ .thread_local = tl, .prefix = prefix }; } }; @@ -1882,13 +1888,8 @@ pub const UnnamedAddr = enum(u2) { unnamed_addr = 1, local_unnamed_addr = 2, - pub fn format( - self: UnnamedAddr, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - if (self != .default) try writer.print(" {s}", .{@tagName(self)}); + pub fn format(self: UnnamedAddr, w: *Writer) Writer.Error!void { + if (self != .default) try w.print(" {s}", .{@tagName(self)}); } }; @@ -1981,13 +1982,24 @@ pub const AddrSpace = enum(u24) { pub const funcref: AddrSpace = @enumFromInt(20); }; - pub fn format( - self: AddrSpace, - comptime prefix: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - if (self != .default) try writer.print("{s}addrspace({d})", .{ prefix, @intFromEnum(self) }); + pub fn format(addr_space: AddrSpace, w: *Writer) Writer.Error!void { + return Prefixed.format(.{ .addr_space = addr_space, .prefix = "" }, w); + } + + pub const Prefixed = struct { + addr_space: AddrSpace, + prefix: []const u8, + + pub fn format(p: Prefixed, w: *Writer) Writer.Error!void { + switch (p.addr_space) { + .default => return, + else => return w.print("{s}addrspace({d})", .{ p.prefix, p.addr_space }), + } + } + }; + + pub fn fmt(addr_space: AddrSpace, prefix: []const u8) Prefixed { + return .{ .addr_space = addr_space, .prefix = prefix }; } }; @@ -1995,15 +2007,8 @@ pub const ExternallyInitialized = enum { default, externally_initialized, - pub fn format( - self: ExternallyInitialized, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - if (self == .default) return; - try writer.writeByte(' '); - try writer.writeAll(@tagName(self)); + pub fn format(self: ExternallyInitialized, w: *Writer) Writer.Error!void { + if (self != .default) try w.print(" {s}", .{@tagName(self)}); } }; @@ -2026,13 +2031,18 @@ pub const Alignment = enum(u6) { return if (self == .default) 0 else (@intFromEnum(self) + 1); } - pub fn format( - self: Alignment, - comptime prefix: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - try writer.print("{s}align {d}", .{ prefix, self.toByteUnits() orelse return }); + pub const Prefixed = struct { + alignment: Alignment, + prefix: []const u8, + + pub fn format(p: Prefixed, w: *Writer) Writer.Error!void { + const byte_units = p.alignment.toByteUnits() orelse return; + return w.print("{s}align ({d})", .{ p.prefix, byte_units }); + } + }; + + pub fn fmt(alignment: Alignment, prefix: []const u8) Prefixed { + return .{ .alignment = alignment, .prefix = prefix }; } }; @@ -2105,12 +2115,7 @@ pub const CallConv = enum(u10) { pub const default = CallConv.ccc; - pub fn format( - self: CallConv, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { + pub fn format(self: CallConv, w: *Writer) Writer.Error!void { switch (self) { default => {}, .fastcc, @@ -2164,8 +2169,8 @@ pub const CallConv = enum(u10) { .aarch64_sme_preservemost_from_x2, .m68k_rtdcc, .riscv_vectorcallcc, - => try writer.print(" {s}", .{@tagName(self)}), - _ => try writer.print(" cc{d}", .{@intFromEnum(self)}), + => try w.print(" {s}", .{@tagName(self)}), + _ => try w.print(" cc{d}", .{@intFromEnum(self)}), } } }; @@ -2190,31 +2195,25 @@ pub const StrtabString = enum(u32) { const FormatData = struct { string: StrtabString, builder: *const Builder, + quote_behavior: ?QuoteBehavior, }; - fn format( - data: FormatData, - comptime fmt_str: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - if (comptime std.mem.indexOfNone(u8, fmt_str, "\"r")) |_| - @compileError("invalid format string: '" ++ fmt_str ++ "'"); + fn format(data: FormatData, w: *Writer) Writer.Error!void { assert(data.string != .none); const string_slice = data.string.slice(data.builder) orelse - return writer.print("{d}", .{@intFromEnum(data.string)}); - if (comptime std.mem.indexOfScalar(u8, fmt_str, 'r')) |_| - return writer.writeAll(string_slice); - try printEscapedString( - string_slice, - if (comptime std.mem.indexOfScalar(u8, fmt_str, '"')) |_| - .always_quote - else - .quote_unless_valid_identifier, - writer, - ); + return w.print("{d}", .{@intFromEnum(data.string)}); + const quote_behavior = data.quote_behavior orelse return w.writeAll(string_slice); + return printEscapedString(string_slice, quote_behavior, w); } - pub fn fmt(self: StrtabString, builder: *const Builder) std.fmt.Formatter(format) { - return .{ .data = .{ .string = self, .builder = builder } }; + pub fn fmt( + self: StrtabString, + builder: *const Builder, + quote_behavior: ?QuoteBehavior, + ) std.fmt.Formatter(FormatData, format) { + return .{ .data = .{ + .string = self, + .builder = builder, + .quote_behavior = quote_behavior, + } }; } fn fromIndex(index: ?usize) StrtabString { @@ -2264,7 +2263,7 @@ pub fn strtabStringFmt(self: *Builder, comptime fmt_str: []const u8, fmt_args: a } pub fn strtabStringFmtAssumeCapacity(self: *Builder, comptime fmt_str: []const u8, fmt_args: anytype) StrtabString { - self.strtab_string_bytes.writer(undefined).print(fmt_str, fmt_args) catch unreachable; + self.strtab_string_bytes.printAssumeCapacity(fmt_str, fmt_args); return self.trailingStrtabStringAssumeCapacity(); } @@ -2383,17 +2382,12 @@ pub const Global = struct { global: Index, builder: *const Builder, }; - fn format( - data: FormatData, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - try writer.print("@{}", .{ - data.global.unwrap(data.builder).name(data.builder).fmt(data.builder), + fn format(data: FormatData, w: *Writer) Writer.Error!void { + try w.print("@{f}", .{ + data.global.unwrap(data.builder).name(data.builder).fmt(data.builder, null), }); } - pub fn fmt(self: Index, builder: *const Builder) std.fmt.Formatter(format) { + pub fn fmt(self: Index, builder: *const Builder) std.fmt.Formatter(FormatData, format) { return .{ .data = .{ .global = self, .builder = builder } }; } @@ -4833,29 +4827,23 @@ pub const Function = struct { instruction: Instruction.Index, function: Function.Index, builder: *Builder, + flags: FormatFlags, }; - fn format( - data: FormatData, - comptime fmt_str: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - if (comptime std.mem.indexOfNone(u8, fmt_str, ", %")) |_| - @compileError("invalid format string: '" ++ fmt_str ++ "'"); - if (comptime std.mem.indexOfScalar(u8, fmt_str, ',') != null) { + fn format(data: FormatData, w: *Writer) Writer.Error!void { + if (data.flags.comma) { if (data.instruction == .none) return; - try writer.writeByte(','); + try w.writeByte(','); } - if (comptime std.mem.indexOfScalar(u8, fmt_str, ' ') != null) { + if (data.flags.space) { if (data.instruction == .none) return; - try writer.writeByte(' '); + try w.writeByte(' '); } - if (comptime std.mem.indexOfScalar(u8, fmt_str, '%') != null) try writer.print( - "{%} ", - .{data.instruction.typeOf(data.function, data.builder).fmt(data.builder)}, + if (data.flags.percent) try w.print( + "{f} ", + .{data.instruction.typeOf(data.function, data.builder).fmt(data.builder, .percent)}, ); assert(data.instruction != .none); - try writer.print("%{}", .{ + try w.print("%{f}", .{ data.instruction.name(data.function.ptrConst(data.builder)).fmt(data.builder), }); } @@ -4863,8 +4851,14 @@ pub const Function = struct { self: Instruction.Index, function: Function.Index, builder: *Builder, - ) std.fmt.Formatter(format) { - return .{ .data = .{ .instruction = self, .function = function, .builder = builder } }; + flags: FormatFlags, + ) std.fmt.Formatter(FormatData, format) { + return .{ .data = .{ + .instruction = self, + .function = function, + .builder = builder, + .flags = flags, + } }; } }; @@ -6361,10 +6355,10 @@ pub const WipFunction = struct { while (true) { gop.value_ptr.* = @enumFromInt(@intFromEnum(gop.value_ptr.*) + 1); - const unique_name = try wip_name.builder.fmt("{r}{s}{r}", .{ - name.fmt(wip_name.builder), + const unique_name = try wip_name.builder.fmt("{f}{s}{f}", .{ + name.fmtRaw(wip_name.builder), sep, - gop.value_ptr.fmt(wip_name.builder), + gop.value_ptr.fmtRaw(wip_name.builder), }); const unique_gop = try wip_name.next_unique_name.getOrPut(unique_name); if (!unique_gop.found_existing) { @@ -7031,13 +7025,27 @@ pub const MemoryAccessKind = enum(u1) { normal, @"volatile", - pub fn format( - self: MemoryAccessKind, - comptime prefix: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - if (self != .normal) try writer.print("{s}{s}", .{ prefix, @tagName(self) }); + pub fn format(memory_access_kind: MemoryAccessKind, w: *Writer) Writer.Error!void { + return Prefixed.format(.{ .memory_access_kind = memory_access_kind, .prefix = "" }, w); + } + + pub const Prefixed = struct { + memory_access_kind: MemoryAccessKind, + prefix: []const u8, + + pub fn format(p: Prefixed, w: *Writer) Writer.Error!void { + switch (p.memory_access_kind) { + .normal => return, + .@"volatile" => { + var vecs: [2][]const u8 = .{ p.prefix, "volatile" }; + return w.writeVecAll(&vecs); + }, + } + } + }; + + pub fn fmt(memory_access_kind: MemoryAccessKind, prefix: []const u8) Prefixed { + return .{ .memory_access_kind = memory_access_kind, .prefix = prefix }; } }; @@ -7045,15 +7053,27 @@ pub const SyncScope = enum(u1) { singlethread, system, - pub fn format( - self: SyncScope, - comptime prefix: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - if (self != .system) try writer.print( - \\{s}syncscope("{s}") - , .{ prefix, @tagName(self) }); + pub fn format(sync_scope: SyncScope, w: *Writer) Writer.Error!void { + return Prefixed.format(.{ .sync_scope = sync_scope, .prefix = "" }, w); + } + + pub const Prefixed = struct { + sync_scope: SyncScope, + prefix: []const u8, + + pub fn format(p: Prefixed, w: *Writer) Writer.Error!void { + switch (p.sync_scope) { + .system => return, + .singlethread => { + var vecs: [2][]const u8 = .{ p.prefix, "syncscope(\"singlethread\")" }; + return w.writeVecAll(&vecs); + }, + } + } + }; + + pub fn fmt(sync_scope: SyncScope, prefix: []const u8) Prefixed { + return .{ .sync_scope = sync_scope, .prefix = prefix }; } }; @@ -7066,13 +7086,27 @@ pub const AtomicOrdering = enum(u3) { acq_rel = 5, seq_cst = 6, - pub fn format( - self: AtomicOrdering, - comptime prefix: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - if (self != .none) try writer.print("{s}{s}", .{ prefix, @tagName(self) }); + pub fn format(atomic_ordering: AtomicOrdering, w: *Writer) Writer.Error!void { + return Prefixed.format(.{ .atomic_ordering = atomic_ordering, .prefix = "" }, w); + } + + pub const Prefixed = struct { + atomic_ordering: AtomicOrdering, + prefix: []const u8, + + pub fn format(p: Prefixed, w: *Writer) Writer.Error!void { + switch (p.atomic_ordering) { + .none => return, + else => { + var vecs: [2][]const u8 = .{ p.prefix, @tagName(p.atomic_ordering) }; + return w.writeVecAll(&vecs); + }, + } + } + }; + + pub fn fmt(atomic_ordering: AtomicOrdering, prefix: []const u8) Prefixed { + return .{ .atomic_ordering = atomic_ordering, .prefix = prefix }; } }; @@ -7486,27 +7520,21 @@ pub const Constant = enum(u32) { const FormatData = struct { constant: Constant, builder: *Builder, + flags: FormatFlags, }; - fn format( - data: FormatData, - comptime fmt_str: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - if (comptime std.mem.indexOfNone(u8, fmt_str, ", %")) |_| - @compileError("invalid format string: '" ++ fmt_str ++ "'"); - if (comptime std.mem.indexOfScalar(u8, fmt_str, ',') != null) { + fn format(data: FormatData, w: *Writer) Writer.Error!void { + if (data.flags.comma) { if (data.constant == .no_init) return; - try writer.writeByte(','); + try w.writeByte(','); } - if (comptime std.mem.indexOfScalar(u8, fmt_str, ' ') != null) { + if (data.flags.space) { if (data.constant == .no_init) return; - try writer.writeByte(' '); + try w.writeByte(' '); } - if (comptime std.mem.indexOfScalar(u8, fmt_str, '%') != null) - try writer.print("{%} ", .{data.constant.typeOf(data.builder).fmt(data.builder)}); + if (data.flags.percent) + try w.print("{f} ", .{data.constant.typeOf(data.builder).fmt(data.builder, .percent)}); assert(data.constant != .no_init); - if (std.enums.tagName(Constant, data.constant)) |name| return writer.writeAll(name); + if (std.enums.tagName(Constant, data.constant)) |name| return w.writeAll(name); switch (data.constant.unwrap()) { .constant => |constant| { const item = data.builder.constant_items.get(constant); @@ -7543,13 +7571,13 @@ pub const Constant = enum(u32) { var stack align(@alignOf(ExpectedContents)) = std.heap.stackFallback(@sizeOf(ExpectedContents), data.builder.gpa); const allocator = stack.get(); - const str = try bigint.toStringAlloc(allocator, 10, undefined); + const str = bigint.toStringAlloc(allocator, 10, undefined) catch return error.WriteFailed; defer allocator.free(str); - try writer.writeAll(str); + try w.writeAll(str); }, .half, .bfloat, - => |tag| try writer.print("0x{c}{X:0>4}", .{ @as(u8, switch (tag) { + => |tag| try w.print("0x{c}{X:0>4}", .{ @as(u8, switch (tag) { .half => 'H', .bfloat => 'R', else => unreachable, @@ -7580,7 +7608,7 @@ pub const Constant = enum(u32) { ) + 1, else => 0, }; - try writer.print("0x{X:0>16}", .{@as(u64, @bitCast(Float.Repr(f64){ + try w.print("0x{X:0>16}", .{@as(u64, @bitCast(Float.Repr(f64){ .mantissa = std.math.shl( Mantissa64, repr.mantissa, @@ -7602,13 +7630,13 @@ pub const Constant = enum(u32) { }, .double => { const extra = data.builder.constantExtraData(Double, item.data); - try writer.print("0x{X:0>8}{X:0>8}", .{ extra.hi, extra.lo }); + try w.print("0x{X:0>8}{X:0>8}", .{ extra.hi, extra.lo }); }, .fp128, .ppc_fp128, => |tag| { const extra = data.builder.constantExtraData(Fp128, item.data); - try writer.print("0x{c}{X:0>8}{X:0>8}{X:0>8}{X:0>8}", .{ + try w.print("0x{c}{X:0>8}{X:0>8}{X:0>8}{X:0>8}", .{ @as(u8, switch (tag) { .fp128 => 'L', .ppc_fp128 => 'M', @@ -7622,7 +7650,7 @@ pub const Constant = enum(u32) { }, .x86_fp80 => { const extra = data.builder.constantExtraData(Fp80, item.data); - try writer.print("0xK{X:0>4}{X:0>8}{X:0>8}", .{ + try w.print("0xK{X:0>4}{X:0>8}{X:0>8}", .{ extra.hi, extra.lo_hi, extra.lo_lo, }); }, @@ -7631,7 +7659,7 @@ pub const Constant = enum(u32) { .zeroinitializer, .undef, .poison, - => |tag| try writer.writeAll(@tagName(tag)), + => |tag| try w.writeAll(@tagName(tag)), .structure, .packed_structure, .array, @@ -7640,7 +7668,7 @@ pub const Constant = enum(u32) { var extra = data.builder.constantExtraDataTrail(Aggregate, item.data); const len: u32 = @intCast(extra.data.type.aggregateLen(data.builder)); const vals = extra.trail.next(len, Constant, data.builder); - try writer.writeAll(switch (tag) { + try w.writeAll(switch (tag) { .structure => "{ ", .packed_structure => "<{ ", .array => "[", @@ -7648,10 +7676,10 @@ pub const Constant = enum(u32) { else => unreachable, }); for (vals, 0..) |val, index| { - if (index > 0) try writer.writeAll(", "); - try writer.print("{%}", .{val.fmt(data.builder)}); + if (index > 0) try w.writeAll(", "); + try w.print("{f}", .{val.fmt(data.builder, .{ .percent = true })}); } - try writer.writeAll(switch (tag) { + try w.writeAll(switch (tag) { .structure => " }", .packed_structure => " }>", .array => "]", @@ -7662,30 +7690,30 @@ pub const Constant = enum(u32) { .splat => { const extra = data.builder.constantExtraData(Splat, item.data); const len = extra.type.vectorLen(data.builder); - try writer.writeByte('<'); + try w.writeByte('<'); for (0..len) |index| { - if (index > 0) try writer.writeAll(", "); - try writer.print("{%}", .{extra.value.fmt(data.builder)}); + if (index > 0) try w.writeAll(", "); + try w.print("{f}", .{extra.value.fmt(data.builder, .{ .percent = true })}); } - try writer.writeByte('>'); + try w.writeByte('>'); }, - .string => try writer.print("c{\"}", .{ - @as(String, @enumFromInt(item.data)).fmt(data.builder), + .string => try w.print("c{f}", .{ + @as(String, @enumFromInt(item.data)).fmtQ(data.builder), }), .blockaddress => |tag| { const extra = data.builder.constantExtraData(BlockAddress, item.data); const function = extra.function.ptrConst(data.builder); - try writer.print("{s}({}, {})", .{ + try w.print("{s}({f}, {f})", .{ @tagName(tag), function.global.fmt(data.builder), - extra.block.toInst(function).fmt(extra.function, data.builder), + extra.block.toInst(function).fmt(extra.function, data.builder, .{}), }); }, .dso_local_equivalent, .no_cfi, => |tag| { const function: Function.Index = @enumFromInt(item.data); - try writer.print("{s} {}", .{ + try w.print("{s} {f}", .{ @tagName(tag), function.ptrConst(data.builder).global.fmt(data.builder), }); @@ -7697,10 +7725,10 @@ pub const Constant = enum(u32) { .addrspacecast, => |tag| { const extra = data.builder.constantExtraData(Cast, item.data); - try writer.print("{s} ({%} to {%})", .{ + try w.print("{s} ({f} to {f})", .{ @tagName(tag), - extra.val.fmt(data.builder), - extra.type.fmt(data.builder), + extra.val.fmt(data.builder, .{ .percent = true }), + extra.type.fmt(data.builder, .percent), }); }, .getelementptr, @@ -7709,13 +7737,13 @@ pub const Constant = enum(u32) { var extra = data.builder.constantExtraDataTrail(GetElementPtr, item.data); const indices = extra.trail.next(extra.data.info.indices_len, Constant, data.builder); - try writer.print("{s} ({%}, {%}", .{ + try w.print("{s} ({f}, {f}", .{ @tagName(tag), - extra.data.type.fmt(data.builder), - extra.data.base.fmt(data.builder), + extra.data.type.fmt(data.builder, .percent), + extra.data.base.fmt(data.builder, .{ .percent = true }), }); - for (indices) |index| try writer.print(", {%}", .{index.fmt(data.builder)}); - try writer.writeByte(')'); + for (indices) |index| try w.print(", {f}", .{index.fmt(data.builder, .{ .percent = true })}); + try w.writeByte(')'); }, .add, .@"add nsw", @@ -7727,10 +7755,10 @@ pub const Constant = enum(u32) { .xor, => |tag| { const extra = data.builder.constantExtraData(Binary, item.data); - try writer.print("{s} ({%}, {%})", .{ + try w.print("{s} ({f}, {f})", .{ @tagName(tag), - extra.lhs.fmt(data.builder), - extra.rhs.fmt(data.builder), + extra.lhs.fmt(data.builder, .{ .percent = true }), + extra.rhs.fmt(data.builder, .{ .percent = true }), }); }, .@"asm", @@ -7751,19 +7779,23 @@ pub const Constant = enum(u32) { .@"asm sideeffect alignstack inteldialect unwind", => |tag| { const extra = data.builder.constantExtraData(Assembly, item.data); - try writer.print("{s} {\"}, {\"}", .{ + try w.print("{s} {f}, {f}", .{ @tagName(tag), - extra.assembly.fmt(data.builder), - extra.constraints.fmt(data.builder), + extra.assembly.fmtQ(data.builder), + extra.constraints.fmtQ(data.builder), }); }, } }, - .global => |global| try writer.print("{}", .{global.fmt(data.builder)}), + .global => |global| try w.print("{f}", .{global.fmt(data.builder)}), } } - pub fn fmt(self: Constant, builder: *Builder) std.fmt.Formatter(format) { - return .{ .data = .{ .constant = self, .builder = builder } }; + pub fn fmt(self: Constant, builder: *Builder, flags: FormatFlags) std.fmt.Formatter(FormatData, format) { + return .{ .data = .{ + .constant = self, + .builder = builder, + .flags = flags, + } }; } }; @@ -7818,28 +7850,26 @@ pub const Value = enum(u32) { value: Value, function: Function.Index, builder: *Builder, + flags: FormatFlags, }; - fn format( - data: FormatData, - comptime fmt_str: []const u8, - fmt_opts: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { + fn format(data: FormatData, w: *Writer) Writer.Error!void { switch (data.value.unwrap()) { .instruction => |instruction| try Function.Instruction.Index.format(.{ .instruction = instruction, .function = data.function, .builder = data.builder, - }, fmt_str, fmt_opts, writer), + .flags = data.flags, + }, w), .constant => |constant| try Constant.format(.{ .constant = constant, .builder = data.builder, - }, fmt_str, fmt_opts, writer), + .flags = data.flags, + }, w), .metadata => unreachable, } } - pub fn fmt(self: Value, function: Function.Index, builder: *Builder) std.fmt.Formatter(format) { - return .{ .data = .{ .value = self, .function = function, .builder = builder } }; + pub fn fmt(self: Value, function: Function.Index, builder: *Builder, flags: FormatFlags) std.fmt.Formatter(FormatData, format) { + return .{ .data = .{ .value = self, .function = function, .builder = builder, .flags = flags } }; } }; @@ -7869,15 +7899,10 @@ pub const MetadataString = enum(u32) { metadata_string: MetadataString, builder: *const Builder, }; - fn format( - data: FormatData, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - try printEscapedString(data.metadata_string.slice(data.builder), .always_quote, writer); + fn format(data: FormatData, w: *Writer) Writer.Error!void { + try printEscapedString(data.metadata_string.slice(data.builder), .always_quote, w); } - fn fmt(self: MetadataString, builder: *const Builder) std.fmt.Formatter(format) { + fn fmt(self: MetadataString, builder: *const Builder) std.fmt.Formatter(FormatData, format) { return .{ .data = .{ .metadata_string = self, .builder = builder } }; } }; @@ -8039,29 +8064,24 @@ pub const Metadata = enum(u32) { AllCallsDescribed: bool = false, Unused: u2 = 0, - pub fn format( - self: DIFlags, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { + pub fn format(self: DIFlags, w: *Writer) Writer.Error!void { var need_pipe = false; inline for (@typeInfo(DIFlags).@"struct".fields) |field| { switch (@typeInfo(field.type)) { .bool => if (@field(self, field.name)) { - if (need_pipe) try writer.writeAll(" | ") else need_pipe = true; - try writer.print("DIFlag{s}", .{field.name}); + if (need_pipe) try w.writeAll(" | ") else need_pipe = true; + try w.print("DIFlag{s}", .{field.name}); }, .@"enum" => if (@field(self, field.name) != .Zero) { - if (need_pipe) try writer.writeAll(" | ") else need_pipe = true; - try writer.print("DIFlag{s}", .{@tagName(@field(self, field.name))}); + if (need_pipe) try w.writeAll(" | ") else need_pipe = true; + try w.print("DIFlag{s}", .{@tagName(@field(self, field.name))}); }, .int => assert(@field(self, field.name) == 0), else => @compileError("bad field type: " ++ field.name ++ ": " ++ @typeName(field.type)), } } - if (!need_pipe) try writer.writeByte('0'); + if (!need_pipe) try w.writeByte('0'); } }; @@ -8101,29 +8121,24 @@ pub const Metadata = enum(u32) { ObjCDirect: bool = false, Unused: u20 = 0, - pub fn format( - self: DISPFlags, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { + pub fn format(self: DISPFlags, w: *Writer) Writer.Error!void { var need_pipe = false; inline for (@typeInfo(DISPFlags).@"struct".fields) |field| { switch (@typeInfo(field.type)) { .bool => if (@field(self, field.name)) { - if (need_pipe) try writer.writeAll(" | ") else need_pipe = true; - try writer.print("DISPFlag{s}", .{field.name}); + if (need_pipe) try w.writeAll(" | ") else need_pipe = true; + try w.print("DISPFlag{s}", .{field.name}); }, .@"enum" => if (@field(self, field.name) != .Zero) { - if (need_pipe) try writer.writeAll(" | ") else need_pipe = true; - try writer.print("DISPFlag{s}", .{@tagName(@field(self, field.name))}); + if (need_pipe) try w.writeAll(" | ") else need_pipe = true; + try w.print("DISPFlag{s}", .{@tagName(@field(self, field.name))}); }, .int => assert(@field(self, field.name) == 0), else => @compileError("bad field type: " ++ field.name ++ ": " ++ @typeName(field.type)), } } - if (!need_pipe) try writer.writeByte('0'); + if (!need_pipe) try w.writeByte('0'); } }; @@ -8298,6 +8313,7 @@ pub const Metadata = enum(u32) { formatter: *Formatter, prefix: []const u8 = "", node: Node, + specialized: ?FormatFlags, const Node = union(enum) { none, @@ -8323,20 +8339,14 @@ pub const Metadata = enum(u32) { }; }; }; - fn format( - data: FormatData, - comptime fmt_str: []const u8, - fmt_opts: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { + fn format(data: FormatData, w: *Writer) Writer.Error!void { if (data.node == .none) return; - const is_specialized = fmt_str.len > 0 and fmt_str[0] == 'S'; - const recurse_fmt_str = if (is_specialized) fmt_str[1..] else fmt_str; + const is_specialized = data.specialized != null; - if (data.formatter.need_comma) try writer.writeAll(", "); + if (data.formatter.need_comma) try w.writeAll(", "); defer data.formatter.need_comma = true; - try writer.writeAll(data.prefix); + try w.writeAll(data.prefix); const builder = data.formatter.builder; switch (data.node) { @@ -8351,54 +8361,57 @@ pub const Metadata = enum(u32) { .expression => { var extra = builder.metadataExtraDataTrail(Expression, item.data); const elements = extra.trail.next(extra.data.elements_len, u32, builder); - try writer.writeAll("!DIExpression("); + try w.writeAll("!DIExpression("); for (elements) |element| try format(.{ .formatter = data.formatter, .node = .{ .u64 = element }, - }, "%", fmt_opts, writer); - try writer.writeByte(')'); + .specialized = .{ .percent = true }, + }, w); + try w.writeByte(')'); }, .constant => try Constant.format(.{ .constant = @enumFromInt(item.data), .builder = builder, - }, recurse_fmt_str, fmt_opts, writer), + .flags = data.specialized orelse .{}, + }, w), else => unreachable, } }, - .index => |node| try writer.print("!{d}", .{node}), + .index => |node| try w.print("!{d}", .{node}), inline .local_value, .local_metadata => |node, tag| try Value.format(.{ .value = node.value, .function = node.function, .builder = builder, - }, switch (tag) { - .local_value => recurse_fmt_str, - .local_metadata => "%", - else => unreachable, - }, fmt_opts, writer), + .flags = switch (tag) { + .local_value => data.specialized orelse .{}, + .local_metadata => .{ .percent = true }, + else => unreachable, + }, + }, w), inline .local_inline, .local_index => |node, tag| { - if (comptime std.mem.eql(u8, recurse_fmt_str, "%")) - try writer.print("{%} ", .{Type.metadata.fmt(builder)}); + if (data.specialized) |flags| { + if (flags.onlyPercent()) { + try w.print("{f} ", .{Type.metadata.fmt(builder, .percent)}); + } + } try format(.{ .formatter = data.formatter, .node = @unionInit(FormatData.Node, @tagName(tag)["local_".len..], node), - }, "%", fmt_opts, writer); + .specialized = .{ .percent = true }, + }, w); }, - .string => |node| try writer.print((if (is_specialized) "" else "!") ++ "{}", .{ - node.fmt(builder), + .string => |node| try w.print("{s}{f}", .{ + @as([]const u8, if (is_specialized) "" else "!"), node.fmt(builder), }), - inline .bool, - .u32, - .u64, - .di_flags, - .sp_flags, - => |node| try writer.print("{}", .{node}), - .raw => |node| try writer.writeAll(node), + inline .bool, .u32, .u64 => |node| try w.print("{}", .{node}), + inline .di_flags, .sp_flags => |node| try w.print("{f}", .{node}), + .raw => |node| try w.writeAll(node), } } - inline fn fmt(formatter: *Formatter, prefix: []const u8, node: anytype) switch (@TypeOf(node)) { + inline fn fmt(formatter: *Formatter, prefix: []const u8, node: anytype, special: ?FormatFlags) switch (@TypeOf(node)) { Metadata => Allocator.Error, else => error{}, - }!std.fmt.Formatter(format) { + }!std.fmt.Formatter(FormatData, format) { const Node = @TypeOf(node); const MaybeNode = switch (@typeInfo(Node)) { .optional => Node, @@ -8435,6 +8448,7 @@ pub const Metadata = enum(u32) { .optional, .null => .none, else => unreachable, }, + .specialized = special, } }; } inline fn fmtLocal( @@ -8442,7 +8456,7 @@ pub const Metadata = enum(u32) { prefix: []const u8, value: Value, function: Function.Index, - ) Allocator.Error!std.fmt.Formatter(format) { + ) Allocator.Error!std.fmt.Formatter(FormatData, format) { return .{ .data = .{ .formatter = formatter, .prefix = prefix, @@ -8467,6 +8481,7 @@ pub const Metadata = enum(u32) { }; }, }, + .specialized = null, } }; } fn refUnwrapped(formatter: *Formatter, node: Metadata) Allocator.Error!FormatData.Node { @@ -8506,7 +8521,7 @@ pub const Metadata = enum(u32) { DIGlobalVariableExpression, }, nodes: anytype, - writer: anytype, + w: *Writer, ) !void { comptime var fmt_str: []const u8 = ""; const names = comptime std.meta.fieldNames(@TypeOf(nodes)); @@ -8523,10 +8538,10 @@ pub const Metadata = enum(u32) { } fmt_str = fmt_str ++ "("; inline for (fields[2..], names) |*field, name| { - fmt_str = fmt_str ++ "{[" ++ name ++ "]S}"; + fmt_str = fmt_str ++ "{[" ++ name ++ "]f}"; field.* = .{ .name = name, - .type = std.fmt.Formatter(format), + .type = std.fmt.Formatter(FormatData, format), .default_value_ptr = null, .is_comptime = false, .alignment = 0, @@ -8545,8 +8560,9 @@ pub const Metadata = enum(u32) { inline for (names) |name| @field(fmt_args, name) = try formatter.fmt( name ++ ": ", @field(nodes, name), + null, ); - try writer.print(fmt_str, fmt_args); + try w.print(fmt_str, fmt_args); } }; }; @@ -8636,7 +8652,7 @@ pub fn init(options: Options) Allocator.Error!Builder { inline for (.{ 0, 4 }) |addr_space_index| { const addr_space: AddrSpace = @enumFromInt(addr_space_index); assert(self.ptrTypeAssumeCapacity(addr_space) == - @field(Type, std.fmt.comptimePrint("ptr{ }", .{addr_space}))); + @field(Type, std.fmt.comptimePrint("ptr{f}", .{addr_space.fmt(" ")}))); } } @@ -8759,16 +8775,8 @@ pub fn deinit(self: *Builder) void { self.* = undefined; } -pub fn setModuleAsm(self: *Builder) std.ArrayListUnmanaged(u8).Writer { - self.module_asm.clearRetainingCapacity(); - return self.appendModuleAsm(); -} - -pub fn appendModuleAsm(self: *Builder) std.ArrayListUnmanaged(u8).Writer { - return self.module_asm.writer(self.gpa); -} - -pub fn finishModuleAsm(self: *Builder) Allocator.Error!void { +pub fn finishModuleAsm(self: *Builder, aw: *Writer.Allocating) Allocator.Error!void { + self.module_asm = aw.toArrayList(); if (self.module_asm.getLastOrNull()) |last| if (last != '\n') try self.module_asm.append(self.gpa, '\n'); } @@ -8804,7 +8812,7 @@ pub fn fmt(self: *Builder, comptime fmt_str: []const u8, fmt_args: anytype) Allo } pub fn fmtAssumeCapacity(self: *Builder, comptime fmt_str: []const u8, fmt_args: anytype) String { - self.string_bytes.writer(undefined).print(fmt_str, fmt_args) catch unreachable; + self.string_bytes.printAssumeCapacity(fmt_str, fmt_args); return self.trailingStringAssumeCapacity(); } @@ -9076,9 +9084,13 @@ pub fn getIntrinsic( const allocator = stack.get(); const name = name: { - const writer = self.strtab_string_bytes.writer(self.gpa); - try writer.print("llvm.{s}", .{@tagName(id)}); - for (overload) |ty| try writer.print(".{m}", .{ty.fmt(self)}); + { + var aw: Writer.Allocating = .fromArrayList(self.gpa, &self.strtab_string_bytes); + const w = &aw.writer; + defer self.strtab_string_bytes = aw.toArrayList(); + w.print("llvm.{s}", .{@tagName(id)}) catch return error.OutOfMemory; + for (overload) |ty| w.print(".{f}", .{ty.fmt(self, .m)}) catch return error.OutOfMemory; + } break :name try self.trailingStrtabString(); }; if (self.getGlobal(name)) |global| return global.ptrConst(self).kind.function; @@ -9492,139 +9504,105 @@ pub fn asmValue( return (try self.asmConst(ty, info, assembly, constraints)).toValue(); } -pub fn dump(self: *Builder) void { - self.print(std.io.getStdErr().writer()) catch {}; +pub fn dump(b: *Builder) void { + var buffer: [4000]u8 = undefined; + const stderr: std.fs.File = .stderr(); + b.printToFile(stderr, &buffer) catch {}; } -pub fn printToFile(self: *Builder, path: []const u8) Allocator.Error!bool { - var file = std.fs.cwd().createFile(path, .{}) catch |err| { - log.err("failed printing LLVM module to \"{s}\": {s}", .{ path, @errorName(err) }); - return false; - }; +pub fn printToFilePath(b: *Builder, dir: std.fs.Dir, path: []const u8) !void { + var buffer: [4000]u8 = undefined; + const file = try dir.createFile(path, .{}); defer file.close(); - self.print(file.writer()) catch |err| { - log.err("failed printing LLVM module to \"{s}\": {s}", .{ path, @errorName(err) }); - return false; - }; - return true; + try b.printToFile(file, &buffer); } -pub fn print(self: *Builder, writer: anytype) (@TypeOf(writer).Error || Allocator.Error)!void { - var bw = std.io.bufferedWriter(writer); - try self.printUnbuffered(bw.writer()); - try bw.flush(); +pub fn printToFile(b: *Builder, file: std.fs.File, buffer: []u8) !void { + var fw = file.writer(buffer); + try print(b, &fw.interface); + try fw.interface.flush(); } -fn WriterWithErrors(comptime BackingWriter: type, comptime ExtraErrors: type) type { - return struct { - backing_writer: BackingWriter, - - pub const Error = BackingWriter.Error || ExtraErrors; - pub const Writer = std.io.Writer(*const Self, Error, write); - - const Self = @This(); - - pub fn writer(self: *const Self) Writer { - return .{ .context = self }; - } - - pub fn write(self: *const Self, bytes: []const u8) Error!usize { - return self.backing_writer.write(bytes); - } - }; -} -fn writerWithErrors( - backing_writer: anytype, - comptime ExtraErrors: type, -) WriterWithErrors(@TypeOf(backing_writer), ExtraErrors) { - return .{ .backing_writer = backing_writer }; -} - -pub fn printUnbuffered( - self: *Builder, - backing_writer: anytype, -) (@TypeOf(backing_writer).Error || Allocator.Error)!void { - const writer_with_errors = writerWithErrors(backing_writer, Allocator.Error); - const writer = writer_with_errors.writer(); - +pub fn print(self: *Builder, w: *Writer) (Writer.Error || Allocator.Error)!void { var need_newline = false; var metadata_formatter: Metadata.Formatter = .{ .builder = self, .need_comma = undefined }; defer metadata_formatter.map.deinit(self.gpa); if (self.source_filename != .none or self.data_layout != .none or self.target_triple != .none) { - if (need_newline) try writer.writeByte('\n') else need_newline = true; - if (self.source_filename != .none) try writer.print( + if (need_newline) try w.writeByte('\n') else need_newline = true; + if (self.source_filename != .none) try w.print( \\; ModuleID = '{s}' - \\source_filename = {"} + \\source_filename = {f} \\ - , .{ self.source_filename.slice(self).?, self.source_filename.fmt(self) }); - if (self.data_layout != .none) try writer.print( - \\target datalayout = {"} + , .{ self.source_filename.slice(self).?, self.source_filename.fmtQ(self) }); + if (self.data_layout != .none) try w.print( + \\target datalayout = {f} \\ - , .{self.data_layout.fmt(self)}); - if (self.target_triple != .none) try writer.print( - \\target triple = {"} + , .{self.data_layout.fmtQ(self)}); + if (self.target_triple != .none) try w.print( + \\target triple = {f} \\ - , .{self.target_triple.fmt(self)}); + , .{self.target_triple.fmtQ(self)}); } if (self.module_asm.items.len > 0) { - if (need_newline) try writer.writeByte('\n') else need_newline = true; + if (need_newline) try w.writeByte('\n') else need_newline = true; var line_it = std.mem.tokenizeScalar(u8, self.module_asm.items, '\n'); while (line_it.next()) |line| { - try writer.writeAll("module asm "); - try printEscapedString(line, .always_quote, writer); - try writer.writeByte('\n'); + try w.writeAll("module asm "); + try printEscapedString(line, .always_quote, w); + try w.writeByte('\n'); } } if (self.types.count() > 0) { - if (need_newline) try writer.writeByte('\n') else need_newline = true; - for (self.types.keys(), self.types.values()) |id, ty| try writer.print( - \\%{} = type {} + if (need_newline) try w.writeByte('\n') else need_newline = true; + for (self.types.keys(), self.types.values()) |id, ty| try w.print( + \\%{f} = type {f} \\ - , .{ id.fmt(self), ty.fmt(self) }); + , .{ id.fmt(self), ty.fmt(self, .default) }); } if (self.variables.items.len > 0) { - if (need_newline) try writer.writeByte('\n') else need_newline = true; + if (need_newline) try w.writeByte('\n') else need_newline = true; for (self.variables.items) |variable| { if (variable.global.getReplacement(self) != .none) continue; const global = variable.global.ptrConst(self); metadata_formatter.need_comma = true; defer metadata_formatter.need_comma = undefined; - try writer.print( - \\{} ={}{}{}{}{ }{}{ }{} {s} {%}{ }{, }{} + try w.print( + \\{f} ={f}{f}{f}{f}{f}{f}{f}{f} {s} {f}{f}{f}{f} \\ , .{ variable.global.fmt(self), - Linkage.fmtOptional(if (global.linkage == .external and - variable.init != .no_init) null else global.linkage), + Linkage.fmtOptional( + if (global.linkage == .external and variable.init != .no_init) null else global.linkage, + ), global.preemption, global.visibility, global.dll_storage_class, - variable.thread_local, + variable.thread_local.fmt(" "), global.unnamed_addr, - global.addr_space, + global.addr_space.fmt(" "), global.externally_initialized, @tagName(variable.mutability), - global.type.fmt(self), - variable.init.fmt(self), - variable.alignment, - try metadata_formatter.fmt("!dbg ", global.dbg), + global.type.fmt(self, .percent), + variable.init.fmt(self, .{ .space = true }), + variable.alignment.fmt(", "), + try metadata_formatter.fmt("!dbg ", global.dbg, null), }); } } if (self.aliases.items.len > 0) { - if (need_newline) try writer.writeByte('\n') else need_newline = true; + if (need_newline) try w.writeByte('\n') else need_newline = true; for (self.aliases.items) |alias| { if (alias.global.getReplacement(self) != .none) continue; const global = alias.global.ptrConst(self); metadata_formatter.need_comma = true; defer metadata_formatter.need_comma = undefined; - try writer.print( - \\{} ={}{}{}{}{ }{} alias {%}, {%}{} + try w.print( + \\{f} ={f}{f}{f}{f}{f}{f} alias {f}, {f}{f} \\ , .{ alias.global.fmt(self), @@ -9632,11 +9610,11 @@ pub fn printUnbuffered( global.preemption, global.visibility, global.dll_storage_class, - alias.thread_local, + alias.thread_local.fmt(" "), global.unnamed_addr, - global.type.fmt(self), - alias.aliasee.fmt(self), - try metadata_formatter.fmt("!dbg ", global.dbg), + global.type.fmt(self, .percent), + alias.aliasee.fmt(self, .{ .percent = true }), + try metadata_formatter.fmt("!dbg ", global.dbg, null), }); } } @@ -9646,17 +9624,17 @@ pub fn printUnbuffered( for (0.., self.functions.items) |function_i, function| { if (function.global.getReplacement(self) != .none) continue; - if (need_newline) try writer.writeByte('\n') else need_newline = true; + if (need_newline) try w.writeByte('\n') else need_newline = true; const function_index: Function.Index = @enumFromInt(function_i); const global = function.global.ptrConst(self); const params_len = global.type.functionParameters(self).len; const function_attributes = function.attributes.func(self); - if (function_attributes != .none) try writer.print( - \\; Function Attrs:{} + if (function_attributes != .none) try w.print( + \\; Function Attrs:{f} \\ - , .{function_attributes.fmt(self)}); - try writer.print( - \\{s}{}{}{}{}{}{"} {%} {}( + , .{function_attributes.fmt(self, .{})}); + try w.print( + \\{s}{f}{f}{f}{f}{f}{f} {f} {f}( , .{ if (function.instructions.len > 0) "define" else "declare", global.linkage, @@ -9664,45 +9642,45 @@ pub fn printUnbuffered( global.visibility, global.dll_storage_class, function.call_conv, - function.attributes.ret(self).fmt(self), - global.type.functionReturn(self).fmt(self), + function.attributes.ret(self).fmt(self, .{}), + global.type.functionReturn(self).fmt(self, .percent), function.global.fmt(self), }); for (0..params_len) |arg| { - if (arg > 0) try writer.writeAll(", "); - try writer.print( - \\{%}{"} + if (arg > 0) try w.writeAll(", "); + try w.print( + \\{f}{f} , .{ - global.type.functionParameters(self)[arg].fmt(self), - function.attributes.param(arg, self).fmt(self), + global.type.functionParameters(self)[arg].fmt(self, .percent), + function.attributes.param(arg, self).fmt(self, .{}), }); if (function.instructions.len > 0) - try writer.print(" {}", .{function.arg(@intCast(arg)).fmt(function_index, self)}) + try w.print(" {f}", .{function.arg(@intCast(arg)).fmt(function_index, self, .{})}) else - try writer.print(" %{d}", .{arg}); + try w.print(" %{d}", .{arg}); } switch (global.type.functionKind(self)) { .normal => {}, .vararg => { - if (params_len > 0) try writer.writeAll(", "); - try writer.writeAll("..."); + if (params_len > 0) try w.writeAll(", "); + try w.writeAll("..."); }, } - try writer.print("){}{ }", .{ global.unnamed_addr, global.addr_space }); - if (function_attributes != .none) try writer.print(" #{d}", .{ + try w.print("){f}{f}", .{ global.unnamed_addr, global.addr_space.fmt(" ") }); + if (function_attributes != .none) try w.print(" #{d}", .{ (try attribute_groups.getOrPutValue(self.gpa, function_attributes, {})).index, }); { metadata_formatter.need_comma = false; defer metadata_formatter.need_comma = undefined; - try writer.print("{ }{}", .{ - function.alignment, - try metadata_formatter.fmt(" !dbg ", global.dbg), + try w.print("{f}{f}", .{ + function.alignment.fmt(" "), + try metadata_formatter.fmt(" !dbg ", global.dbg, null), }); } if (function.instructions.len > 0) { var block_incoming_len: u32 = undefined; - try writer.writeAll(" {\n"); + try w.writeAll(" {\n"); var maybe_dbg_index: ?u32 = null; for (params_len..function.instructions.len) |instruction_i| { const instruction_index: Function.Instruction.Index = @enumFromInt(instruction_i); @@ -9800,11 +9778,11 @@ pub fn printUnbuffered( .xor, => |tag| { const extra = function.extraData(Function.Instruction.Binary, instruction.data); - try writer.print(" %{} = {s} {%}, {}", .{ + try w.print(" %{f} = {s} {f}, {f}", .{ instruction_index.name(&function).fmt(self), @tagName(tag), - extra.lhs.fmt(function_index, self), - extra.rhs.fmt(function_index, self), + extra.lhs.fmt(function_index, self, .{ .percent = true }), + extra.rhs.fmt(function_index, self, .{ .percent = true }), }); }, .addrspacecast, @@ -9822,73 +9800,76 @@ pub fn printUnbuffered( .zext, => |tag| { const extra = function.extraData(Function.Instruction.Cast, instruction.data); - try writer.print(" %{} = {s} {%} to {%}", .{ + try w.print(" %{f} = {s} {f} to {f}", .{ instruction_index.name(&function).fmt(self), @tagName(tag), - extra.val.fmt(function_index, self), - extra.type.fmt(self), + extra.val.fmt(function_index, self, .{ .percent = true }), + extra.type.fmt(self, .percent), }); }, .alloca, .@"alloca inalloca", => |tag| { const extra = function.extraData(Function.Instruction.Alloca, instruction.data); - try writer.print(" %{} = {s} {%}{,%}{, }{, }", .{ + try w.print(" %{f} = {s} {f}{f}{f}{f}", .{ instruction_index.name(&function).fmt(self), @tagName(tag), - extra.type.fmt(self), + extra.type.fmt(self, .percent), Value.fmt(switch (extra.len) { .@"1" => .none, else => extra.len, - }, function_index, self), - extra.info.alignment, - extra.info.addr_space, + }, function_index, self, .{ + .comma = true, + .percent = true, + }), + extra.info.alignment.fmt(", "), + extra.info.addr_space.fmt(", "), }); }, .arg => unreachable, .atomicrmw => |tag| { const extra = function.extraData(Function.Instruction.AtomicRmw, instruction.data); - try writer.print(" %{} = {s}{ } {s} {%}, {%}{ }{ }{, }", .{ + try w.print(" %{f} = {t}{f} {t} {f}, {f}{f}{f}{f}", .{ instruction_index.name(&function).fmt(self), - @tagName(tag), - extra.info.access_kind, - @tagName(extra.info.atomic_rmw_operation), - extra.ptr.fmt(function_index, self), - extra.val.fmt(function_index, self), - extra.info.sync_scope, - extra.info.success_ordering, - extra.info.alignment, + tag, + extra.info.access_kind.fmt(" "), + extra.info.atomic_rmw_operation, + extra.ptr.fmt(function_index, self, .{ .percent = true }), + extra.val.fmt(function_index, self, .{ .percent = true }), + extra.info.sync_scope.fmt(" "), + extra.info.success_ordering.fmt(" "), + extra.info.alignment.fmt(", "), }); }, .block => { block_incoming_len = instruction.data; const name = instruction_index.name(&function); if (@intFromEnum(instruction_index) > params_len) - try writer.writeByte('\n'); - try writer.print("{}:\n", .{name.fmt(self)}); + try w.writeByte('\n'); + try w.print("{f}:\n", .{name.fmt(self)}); continue; }, .br => |tag| { const target: Function.Block.Index = @enumFromInt(instruction.data); - try writer.print(" {s} {%}", .{ - @tagName(tag), target.toInst(&function).fmt(function_index, self), + try w.print(" {s} {f}", .{ + @tagName(tag), target.toInst(&function).fmt(function_index, self, .{ .percent = true }), }); }, .br_cond => { const extra = function.extraData(Function.Instruction.BrCond, instruction.data); - try writer.print(" br {%}, {%}, {%}", .{ - extra.cond.fmt(function_index, self), - extra.then.toInst(&function).fmt(function_index, self), - extra.@"else".toInst(&function).fmt(function_index, self), + try w.print(" br {f}, {f}, {f}", .{ + extra.cond.fmt(function_index, self, .{ .percent = true }), + extra.then.toInst(&function).fmt(function_index, self, .{ .percent = true }), + extra.@"else".toInst(&function).fmt(function_index, self, .{ .percent = true }), }); metadata_formatter.need_comma = true; defer metadata_formatter.need_comma = undefined; switch (extra.weights) { .none => {}, - .unpredictable => try writer.writeAll("!unpredictable !{}"), - _ => try writer.print("{}", .{ - try metadata_formatter.fmt("!prof ", @as(Metadata, @enumFromInt(@intFromEnum(extra.weights)))), + .unpredictable => try w.writeAll("!unpredictable !{}"), + _ => try w.print("{f}", .{ + try metadata_formatter.fmt("!prof ", @as(Metadata, @enumFromInt(@intFromEnum(extra.weights))), null), }), } }, @@ -9904,42 +9885,42 @@ pub fn printUnbuffered( var extra = function.extraDataTrail(Function.Instruction.Call, instruction.data); const args = extra.trail.next(extra.data.args_len, Value, &function); - try writer.writeAll(" "); + try w.writeAll(" "); const ret_ty = extra.data.ty.functionReturn(self); switch (ret_ty) { .void => {}, - else => try writer.print("%{} = ", .{ + else => try w.print("%{f} = ", .{ instruction_index.name(&function).fmt(self), }), .none => unreachable, } - try writer.print("{s}{}{}{} {%} {}(", .{ - @tagName(tag), + try w.print("{t}{f}{f}{f} {f} {f}(", .{ + tag, extra.data.info.call_conv, - extra.data.attributes.ret(self).fmt(self), + extra.data.attributes.ret(self).fmt(self, .{}), extra.data.callee.typeOf(function_index, self).pointerAddrSpace(self), switch (extra.data.ty.functionKind(self)) { .normal => ret_ty, .vararg => extra.data.ty, - }.fmt(self), - extra.data.callee.fmt(function_index, self), + }.fmt(self, .percent), + extra.data.callee.fmt(function_index, self, .{}), }); for (0.., args) |arg_index, arg| { - if (arg_index > 0) try writer.writeAll(", "); + if (arg_index > 0) try w.writeAll(", "); metadata_formatter.need_comma = false; defer metadata_formatter.need_comma = undefined; - try writer.print("{%}{}{}", .{ - arg.typeOf(function_index, self).fmt(self), - extra.data.attributes.param(arg_index, self).fmt(self), + try w.print("{f}{f}{f}", .{ + arg.typeOf(function_index, self).fmt(self, .percent), + extra.data.attributes.param(arg_index, self).fmt(self, .{}), try metadata_formatter.fmtLocal(" ", arg, function_index), }); } - try writer.writeByte(')'); + try w.writeByte(')'); if (extra.data.info.has_op_bundle_cold) { - try writer.writeAll(" [ \"cold\"() ]"); + try w.writeAll(" [ \"cold\"() ]"); } const call_function_attributes = extra.data.attributes.func(self); - if (call_function_attributes != .none) try writer.print(" #{d}", .{ + if (call_function_attributes != .none) try w.print(" #{d}", .{ (try attribute_groups.getOrPutValue( self.gpa, call_function_attributes, @@ -9952,27 +9933,27 @@ pub fn printUnbuffered( => |tag| { const extra = function.extraData(Function.Instruction.CmpXchg, instruction.data); - try writer.print(" %{} = {s}{ } {%}, {%}, {%}{ }{ }{ }{, }", .{ + try w.print(" %{f} = {t}{f} {f}, {f}, {f}{f}{f}{f}{f}", .{ instruction_index.name(&function).fmt(self), - @tagName(tag), - extra.info.access_kind, - extra.ptr.fmt(function_index, self), - extra.cmp.fmt(function_index, self), - extra.new.fmt(function_index, self), - extra.info.sync_scope, - extra.info.success_ordering, - extra.info.failure_ordering, - extra.info.alignment, + tag, + extra.info.access_kind.fmt(" "), + extra.ptr.fmt(function_index, self, .{ .percent = true }), + extra.cmp.fmt(function_index, self, .{ .percent = true }), + extra.new.fmt(function_index, self, .{ .percent = true }), + extra.info.sync_scope.fmt(" "), + extra.info.success_ordering.fmt(" "), + extra.info.failure_ordering.fmt(" "), + extra.info.alignment.fmt(", "), }); }, .extractelement => |tag| { const extra = function.extraData(Function.Instruction.ExtractElement, instruction.data); - try writer.print(" %{} = {s} {%}, {%}", .{ + try w.print(" %{f} = {s} {f}, {f}", .{ instruction_index.name(&function).fmt(self), @tagName(tag), - extra.val.fmt(function_index, self), - extra.index.fmt(function_index, self), + extra.val.fmt(function_index, self, .{ .percent = true }), + extra.index.fmt(function_index, self, .{ .percent = true }), }); }, .extractvalue => |tag| { @@ -9981,29 +9962,29 @@ pub fn printUnbuffered( instruction.data, ); const indices = extra.trail.next(extra.data.indices_len, u32, &function); - try writer.print(" %{} = {s} {%}", .{ + try w.print(" %{f} = {s} {f}", .{ instruction_index.name(&function).fmt(self), @tagName(tag), - extra.data.val.fmt(function_index, self), + extra.data.val.fmt(function_index, self, .{ .percent = true }), }); - for (indices) |index| try writer.print(", {d}", .{index}); + for (indices) |index| try w.print(", {d}", .{index}); }, .fence => |tag| { const info: MemoryAccessInfo = @bitCast(instruction.data); - try writer.print(" {s}{ }{ }", .{ - @tagName(tag), - info.sync_scope, - info.success_ordering, + try w.print(" {t}{f}{f}", .{ + tag, + info.sync_scope.fmt(" "), + info.success_ordering.fmt(" "), }); }, .fneg, .@"fneg fast", => |tag| { const val: Value = @enumFromInt(instruction.data); - try writer.print(" %{} = {s} {%}", .{ + try w.print(" %{f} = {s} {f}", .{ instruction_index.name(&function).fmt(self), @tagName(tag), - val.fmt(function_index, self), + val.fmt(function_index, self, .{ .percent = true }), }); }, .getelementptr, @@ -10014,14 +9995,14 @@ pub fn printUnbuffered( instruction.data, ); const indices = extra.trail.next(extra.data.indices_len, Value, &function); - try writer.print(" %{} = {s} {%}, {%}", .{ + try w.print(" %{f} = {s} {f}, {f}", .{ instruction_index.name(&function).fmt(self), @tagName(tag), - extra.data.type.fmt(self), - extra.data.base.fmt(function_index, self), + extra.data.type.fmt(self, .percent), + extra.data.base.fmt(function_index, self, .{ .percent = true }), }); - for (indices) |index| try writer.print(", {%}", .{ - index.fmt(function_index, self), + for (indices) |index| try w.print(", {f}", .{ + index.fmt(function_index, self, .{ .percent = true }), }); }, .indirectbr => |tag| { @@ -10029,54 +10010,54 @@ pub fn printUnbuffered( function.extraDataTrail(Function.Instruction.IndirectBr, instruction.data); const targets = extra.trail.next(extra.data.targets_len, Function.Block.Index, &function); - try writer.print(" {s} {%}, [", .{ + try w.print(" {s} {f}, [", .{ @tagName(tag), - extra.data.addr.fmt(function_index, self), + extra.data.addr.fmt(function_index, self, .{ .percent = true }), }); for (0.., targets) |target_index, target| { - if (target_index > 0) try writer.writeAll(", "); - try writer.print("{%}", .{ - target.toInst(&function).fmt(function_index, self), + if (target_index > 0) try w.writeAll(", "); + try w.print("{f}", .{ + target.toInst(&function).fmt(function_index, self, .{ .percent = true }), }); } - try writer.writeByte(']'); + try w.writeByte(']'); }, .insertelement => |tag| { const extra = function.extraData(Function.Instruction.InsertElement, instruction.data); - try writer.print(" %{} = {s} {%}, {%}, {%}", .{ + try w.print(" %{f} = {s} {f}, {f}, {f}", .{ instruction_index.name(&function).fmt(self), @tagName(tag), - extra.val.fmt(function_index, self), - extra.elem.fmt(function_index, self), - extra.index.fmt(function_index, self), + extra.val.fmt(function_index, self, .{ .percent = true }), + extra.elem.fmt(function_index, self, .{ .percent = true }), + extra.index.fmt(function_index, self, .{ .percent = true }), }); }, .insertvalue => |tag| { var extra = function.extraDataTrail(Function.Instruction.InsertValue, instruction.data); const indices = extra.trail.next(extra.data.indices_len, u32, &function); - try writer.print(" %{} = {s} {%}, {%}", .{ + try w.print(" %{f} = {s} {f}, {f}", .{ instruction_index.name(&function).fmt(self), @tagName(tag), - extra.data.val.fmt(function_index, self), - extra.data.elem.fmt(function_index, self), + extra.data.val.fmt(function_index, self, .{ .percent = true }), + extra.data.elem.fmt(function_index, self, .{ .percent = true }), }); - for (indices) |index| try writer.print(", {d}", .{index}); + for (indices) |index| try w.print(", {d}", .{index}); }, .load, .@"load atomic", => |tag| { const extra = function.extraData(Function.Instruction.Load, instruction.data); - try writer.print(" %{} = {s}{ } {%}, {%}{ }{ }{, }", .{ + try w.print(" %{f} = {t}{f} {f}, {f}{f}{f}{f}", .{ instruction_index.name(&function).fmt(self), - @tagName(tag), - extra.info.access_kind, - extra.type.fmt(self), - extra.ptr.fmt(function_index, self), - extra.info.sync_scope, - extra.info.success_ordering, - extra.info.alignment, + tag, + extra.info.access_kind.fmt(" "), + extra.type.fmt(self, .percent), + extra.ptr.fmt(function_index, self, .{ .percent = true }), + extra.info.sync_scope.fmt(" "), + extra.info.success_ordering.fmt(" "), + extra.info.alignment.fmt(", "), }); }, .phi, @@ -10086,64 +10067,64 @@ pub fn printUnbuffered( const vals = extra.trail.next(block_incoming_len, Value, &function); const blocks = extra.trail.next(block_incoming_len, Function.Block.Index, &function); - try writer.print(" %{} = {s} {%} ", .{ + try w.print(" %{f} = {s} {f} ", .{ instruction_index.name(&function).fmt(self), @tagName(tag), - vals[0].typeOf(function_index, self).fmt(self), + vals[0].typeOf(function_index, self).fmt(self, .percent), }); for (0.., vals, blocks) |incoming_index, incoming_val, incoming_block| { - if (incoming_index > 0) try writer.writeAll(", "); - try writer.print("[ {}, {} ]", .{ - incoming_val.fmt(function_index, self), - incoming_block.toInst(&function).fmt(function_index, self), + if (incoming_index > 0) try w.writeAll(", "); + try w.print("[ {f}, {f} ]", .{ + incoming_val.fmt(function_index, self, .{}), + incoming_block.toInst(&function).fmt(function_index, self, .{}), }); } }, .ret => |tag| { const val: Value = @enumFromInt(instruction.data); - try writer.print(" {s} {%}", .{ + try w.print(" {s} {f}", .{ @tagName(tag), - val.fmt(function_index, self), + val.fmt(function_index, self, .{ .percent = true }), }); }, .@"ret void", .@"unreachable", - => |tag| try writer.print(" {s}", .{@tagName(tag)}), + => |tag| try w.print(" {s}", .{@tagName(tag)}), .select, .@"select fast", => |tag| { const extra = function.extraData(Function.Instruction.Select, instruction.data); - try writer.print(" %{} = {s} {%}, {%}, {%}", .{ + try w.print(" %{f} = {s} {f}, {f}, {f}", .{ instruction_index.name(&function).fmt(self), @tagName(tag), - extra.cond.fmt(function_index, self), - extra.lhs.fmt(function_index, self), - extra.rhs.fmt(function_index, self), + extra.cond.fmt(function_index, self, .{ .percent = true }), + extra.lhs.fmt(function_index, self, .{ .percent = true }), + extra.rhs.fmt(function_index, self, .{ .percent = true }), }); }, .shufflevector => |tag| { const extra = function.extraData(Function.Instruction.ShuffleVector, instruction.data); - try writer.print(" %{} = {s} {%}, {%}, {%}", .{ + try w.print(" %{f} = {s} {f}, {f}, {f}", .{ instruction_index.name(&function).fmt(self), @tagName(tag), - extra.lhs.fmt(function_index, self), - extra.rhs.fmt(function_index, self), - extra.mask.fmt(function_index, self), + extra.lhs.fmt(function_index, self, .{ .percent = true }), + extra.rhs.fmt(function_index, self, .{ .percent = true }), + extra.mask.fmt(function_index, self, .{ .percent = true }), }); }, .store, .@"store atomic", => |tag| { const extra = function.extraData(Function.Instruction.Store, instruction.data); - try writer.print(" {s}{ } {%}, {%}{ }{ }{, }", .{ - @tagName(tag), - extra.info.access_kind, - extra.val.fmt(function_index, self), - extra.ptr.fmt(function_index, self), - extra.info.sync_scope, - extra.info.success_ordering, - extra.info.alignment, + try w.print(" {t}{f} {f}, {f}{f}{f}{f}", .{ + tag, + extra.info.access_kind.fmt(" "), + extra.val.fmt(function_index, self, .{ .percent = true }), + extra.ptr.fmt(function_index, self, .{ .percent = true }), + extra.info.sync_scope.fmt(" "), + extra.info.success_ordering.fmt(" "), + extra.info.alignment.fmt(", "), }); }, .@"switch" => |tag| { @@ -10152,80 +10133,80 @@ pub fn printUnbuffered( const vals = extra.trail.next(extra.data.cases_len, Constant, &function); const blocks = extra.trail.next(extra.data.cases_len, Function.Block.Index, &function); - try writer.print(" {s} {%}, {%} [\n", .{ + try w.print(" {s} {f}, {f} [\n", .{ @tagName(tag), - extra.data.val.fmt(function_index, self), - extra.data.default.toInst(&function).fmt(function_index, self), + extra.data.val.fmt(function_index, self, .{ .percent = true }), + extra.data.default.toInst(&function).fmt(function_index, self, .{ .percent = true }), }); - for (vals, blocks) |case_val, case_block| try writer.print( - " {%}, {%}\n", + for (vals, blocks) |case_val, case_block| try w.print( + " {f}, {f}\n", .{ - case_val.fmt(self), - case_block.toInst(&function).fmt(function_index, self), + case_val.fmt(self, .{ .percent = true }), + case_block.toInst(&function).fmt(function_index, self, .{ .percent = true }), }, ); - try writer.writeAll(" ]"); + try w.writeAll(" ]"); metadata_formatter.need_comma = true; defer metadata_formatter.need_comma = undefined; switch (extra.data.weights) { .none => {}, - .unpredictable => try writer.writeAll("!unpredictable !{}"), - _ => try writer.print("{}", .{ - try metadata_formatter.fmt("!prof ", @as(Metadata, @enumFromInt(@intFromEnum(extra.data.weights)))), + .unpredictable => try w.writeAll("!unpredictable !{}"), + _ => try w.print("{f}", .{ + try metadata_formatter.fmt("!prof ", @as(Metadata, @enumFromInt(@intFromEnum(extra.data.weights))), null), }), } }, .va_arg => |tag| { const extra = function.extraData(Function.Instruction.VaArg, instruction.data); - try writer.print(" %{} = {s} {%}, {%}", .{ + try w.print(" %{f} = {s} {f}, {f}", .{ instruction_index.name(&function).fmt(self), @tagName(tag), - extra.list.fmt(function_index, self), - extra.type.fmt(self), + extra.list.fmt(function_index, self, .{ .percent = true }), + extra.type.fmt(self, .percent), }); }, } if (maybe_dbg_index) |dbg_index| { - try writer.print(", !dbg !{}", .{dbg_index}); + try w.print(", !dbg !{d}", .{dbg_index}); } - try writer.writeByte('\n'); + try w.writeByte('\n'); } - try writer.writeByte('}'); + try w.writeByte('}'); } - try writer.writeByte('\n'); + try w.writeByte('\n'); } if (attribute_groups.count() > 0) { - if (need_newline) try writer.writeByte('\n') else need_newline = true; + if (need_newline) try w.writeByte('\n') else need_newline = true; for (0.., attribute_groups.keys()) |attribute_group_index, attribute_group| - try writer.print( - \\attributes #{d} = {{{#"} }} + try w.print( + \\attributes #{d} = {{{f} }} \\ - , .{ attribute_group_index, attribute_group.fmt(self) }); + , .{ attribute_group_index, attribute_group.fmt(self, .{ .pound = true, .quote = true }) }); } if (self.metadata_named.count() > 0) { - if (need_newline) try writer.writeByte('\n') else need_newline = true; + if (need_newline) try w.writeByte('\n') else need_newline = true; for (self.metadata_named.keys(), self.metadata_named.values()) |name, data| { const elements: []const Metadata = @ptrCast(self.metadata_extra.items[data.index..][0..data.len]); - try writer.writeByte('!'); - try printEscapedString(name.slice(self), .quote_unless_valid_identifier, writer); - try writer.writeAll(" = !{"); + try w.writeByte('!'); + try printEscapedString(name.slice(self), .quote_unless_valid_identifier, w); + try w.writeAll(" = !{"); metadata_formatter.need_comma = false; defer metadata_formatter.need_comma = undefined; - for (elements) |element| try writer.print("{}", .{try metadata_formatter.fmt("", element)}); - try writer.writeAll("}\n"); + for (elements) |element| try w.print("{f}", .{try metadata_formatter.fmt("", element, null)}); + try w.writeAll("}\n"); } } if (metadata_formatter.map.count() > 0) { - if (need_newline) try writer.writeByte('\n') else need_newline = true; + if (need_newline) try w.writeByte('\n') else need_newline = true; var metadata_index: usize = 0; while (metadata_index < metadata_formatter.map.count()) : (metadata_index += 1) { @setEvalBranchQuota(10_000); - try writer.print("!{} = ", .{metadata_index}); + try w.print("!{d} = ", .{metadata_index}); metadata_formatter.need_comma = false; defer metadata_formatter.need_comma = undefined; @@ -10238,7 +10219,7 @@ pub fn printUnbuffered( .scope = location.scope, .inlinedAt = location.inlined_at, .isImplicitCode = false, - }, writer); + }, w); continue; }, .metadata => |metadata| self.metadata_items.get(@intFromEnum(metadata)), @@ -10254,7 +10235,7 @@ pub fn printUnbuffered( .checksumkind = null, .checksum = null, .source = null, - }, writer); + }, w); }, .compile_unit, .@"compile_unit optimized", @@ -10285,7 +10266,7 @@ pub fn printUnbuffered( .rangesBaseAddress = null, .sysroot = null, .sdk = null, - }, writer); + }, w); }, .subprogram, .@"subprogram local", @@ -10319,7 +10300,7 @@ pub fn printUnbuffered( .thrownTypes = null, .annotations = null, .targetFuncName = null, - }, writer); + }, w); }, .lexical_block => { const extra = self.metadataExtraData(Metadata.LexicalBlock, metadata_item.data); @@ -10328,7 +10309,7 @@ pub fn printUnbuffered( .file = extra.file, .line = extra.line, .column = extra.column, - }, writer); + }, w); }, .location => { const extra = self.metadataExtraData(Metadata.Location, metadata_item.data); @@ -10338,7 +10319,7 @@ pub fn printUnbuffered( .scope = extra.scope, .inlinedAt = extra.inlined_at, .isImplicitCode = false, - }, writer); + }, w); }, .basic_bool_type, .basic_unsigned_type, @@ -10367,7 +10348,7 @@ pub fn printUnbuffered( else => unreachable, }), .flags = null, - }, writer); + }, w); }, .composite_struct_type, .composite_union_type, @@ -10412,7 +10393,7 @@ pub fn printUnbuffered( .allocated = null, .rank = null, .annotations = null, - }, writer); + }, w); }, .derived_pointer_type, .derived_member_type, @@ -10445,7 +10426,7 @@ pub fn printUnbuffered( .extraData = null, .dwarfAddressSpace = null, .annotations = null, - }, writer); + }, w); }, .subroutine_type => { const extra = self.metadataExtraData(Metadata.SubroutineType, metadata_item.data); @@ -10453,7 +10434,7 @@ pub fn printUnbuffered( .flags = null, .cc = null, .types = extra.types_tuple, - }, writer); + }, w); }, .enumerator_unsigned, .enumerator_signed_positive, @@ -10503,7 +10484,7 @@ pub fn printUnbuffered( => false, else => unreachable, }, - }, writer); + }, w); }, .subrange => { const extra = self.metadataExtraData(Metadata.Subrange, metadata_item.data); @@ -10512,34 +10493,34 @@ pub fn printUnbuffered( .lowerBound = extra.lower_bound, .upperBound = null, .stride = null, - }, writer); + }, w); }, .tuple => { var extra = self.metadataExtraDataTrail(Metadata.Tuple, metadata_item.data); const elements = extra.trail.next(extra.data.elements_len, Metadata, self); - try writer.writeAll("!{"); - for (elements) |element| try writer.print("{[element]%}", .{ - .element = try metadata_formatter.fmt("", element), + try w.writeAll("!{"); + for (elements) |element| try w.print("{[element]f}", .{ + .element = try metadata_formatter.fmt("", element, .{ .percent = true }), }); - try writer.writeAll("}\n"); + try w.writeAll("}\n"); }, .str_tuple => { var extra = self.metadataExtraDataTrail(Metadata.StrTuple, metadata_item.data); const elements = extra.trail.next(extra.data.elements_len, Metadata, self); - try writer.print("!{{{[str]%}", .{ - .str = try metadata_formatter.fmt("", extra.data.str), + try w.print("!{{{[str]f}", .{ + .str = try metadata_formatter.fmt("", extra.data.str, .{ .percent = true }), }); - for (elements) |element| try writer.print("{[element]%}", .{ - .element = try metadata_formatter.fmt("", element), + for (elements) |element| try w.print("{[element]f}", .{ + .element = try metadata_formatter.fmt("", element, .{ .percent = true }), }); - try writer.writeAll("}\n"); + try w.writeAll("}\n"); }, .module_flag => { const extra = self.metadataExtraData(Metadata.ModuleFlag, metadata_item.data); - try writer.print("!{{{[behavior]%}{[name]%}{[constant]%}}}\n", .{ - .behavior = try metadata_formatter.fmt("", extra.behavior), - .name = try metadata_formatter.fmt("", extra.name), - .constant = try metadata_formatter.fmt("", extra.constant), + try w.print("!{{{[behavior]f}{[name]f}{[constant]f}}}\n", .{ + .behavior = try metadata_formatter.fmt("", extra.behavior, .{ .percent = true }), + .name = try metadata_formatter.fmt("", extra.name, .{ .percent = true }), + .constant = try metadata_formatter.fmt("", extra.constant, .{ .percent = true }), }); }, .local_var => { @@ -10554,7 +10535,7 @@ pub fn printUnbuffered( .flags = null, .@"align" = null, .annotations = null, - }, writer); + }, w); }, .parameter => { const extra = self.metadataExtraData(Metadata.Parameter, metadata_item.data); @@ -10568,7 +10549,7 @@ pub fn printUnbuffered( .flags = null, .@"align" = null, .annotations = null, - }, writer); + }, w); }, .global_var, .@"global_var local", @@ -10591,7 +10572,7 @@ pub fn printUnbuffered( .templateParams = null, .@"align" = null, .annotations = null, - }, writer); + }, w); }, .global_var_expression => { const extra = @@ -10599,7 +10580,7 @@ pub fn printUnbuffered( try metadata_formatter.specialized(.@"!", .DIGlobalVariableExpression, .{ .@"var" = extra.variable, .expr = extra.expression, - }, writer); + }, w); }, } } @@ -10618,22 +10599,18 @@ fn isValidIdentifier(id: []const u8) bool { } const QuoteBehavior = enum { always_quote, quote_unless_valid_identifier }; -fn printEscapedString( - slice: []const u8, - quotes: QuoteBehavior, - writer: anytype, -) @TypeOf(writer).Error!void { +fn printEscapedString(slice: []const u8, quotes: QuoteBehavior, w: *Writer) Writer.Error!void { const need_quotes = switch (quotes) { .always_quote => true, .quote_unless_valid_identifier => !isValidIdentifier(slice), }; - if (need_quotes) try writer.writeByte('"'); + if (need_quotes) try w.writeByte('"'); for (slice) |byte| switch (byte) { - '\\' => try writer.writeAll("\\\\"), - ' '...'"' - 1, '"' + 1...'\\' - 1, '\\' + 1...'~' => try writer.writeByte(byte), - else => try writer.print("\\{X:0>2}", .{byte}), + '\\' => try w.writeAll("\\\\"), + ' '...'"' - 1, '"' + 1...'\\' - 1, '\\' + 1...'~' => try w.writeByte(byte), + else => try w.print("\\{X:0>2}", .{byte}), }; - if (need_quotes) try writer.writeByte('"'); + if (need_quotes) try w.writeByte('"'); } fn ensureUnusedGlobalCapacity(self: *Builder, name: StrtabString) Allocator.Error!void { @@ -12018,7 +11995,7 @@ pub fn metadataStringFmt(self: *Builder, comptime fmt_str: []const u8, fmt_args: } pub fn metadataStringFmtAssumeCapacity(self: *Builder, comptime fmt_str: []const u8, fmt_args: anytype) MetadataString { - self.metadata_string_bytes.writer(undefined).print(fmt_str, fmt_args) catch unreachable; + self.metadata_string_bytes.printAssumeCapacity(fmt_str, fmt_args); return self.trailingMetadataStringAssumeCapacity(); } @@ -15261,12 +15238,12 @@ pub fn toBitcode(self: *Builder, allocator: Allocator, producer: Producer) bitco return bitcode.toOwnedSlice(); } -const Allocator = std.mem.Allocator; -const assert = std.debug.assert; -const bitcode_writer = @import("bitcode_writer.zig"); -const Builder = @This(); -const builtin = @import("builtin"); -const DW = std.dwarf; -const ir = @import("ir.zig"); -const log = std.log.scoped(.llvm); -const std = @import("../../std.zig"); +const FormatFlags = struct { + comma: bool = false, + space: bool = false, + percent: bool = false, + + fn onlyPercent(f: FormatFlags) bool { + return !f.comma and !f.space and f.percent; + } +}; diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig index 1bc6028129..8f2eb6d2c6 100644 --- a/lib/std/zig/parser_test.zig +++ b/lib/std/zig/parser_test.zig @@ -1,3 +1,9 @@ +const std = @import("std"); +const mem = std.mem; +const print = std.debug.print; +const io = std.io; +const maxInt = std.math.maxInt; + test "zig fmt: remove extra whitespace at start and end of file with comment between" { try testTransform( \\ @@ -2738,11 +2744,11 @@ test "zig fmt: preserve spacing" { \\const std = @import("std"); \\ \\pub fn main() !void { - \\ var stdout_file = std.io.getStdOut; - \\ var stdout_file = std.io.getStdOut; + \\ var stdout_file = std.lol.abcd; + \\ var stdout_file = std.lol.abcd; \\ - \\ var stdout_file = std.io.getStdOut; - \\ var stdout_file = std.io.getStdOut; + \\ var stdout_file = std.lol.abcd; + \\ var stdout_file = std.lol.abcd; \\} \\ ); @@ -6315,16 +6321,10 @@ test "ampersand" { , &.{}); } -const std = @import("std"); -const mem = std.mem; -const print = std.debug.print; -const io = std.io; -const maxInt = std.math.maxInt; - var fixed_buffer_mem: [100 * 1024]u8 = undefined; fn testParse(source: [:0]const u8, allocator: mem.Allocator, anything_changed: *bool) ![]u8 { - const stderr = io.getStdErr().writer(); + const stderr = std.fs.File.stderr().deprecatedWriter(); var tree = try std.zig.Ast.parse(allocator, source, .zig); defer tree.deinit(allocator); diff --git a/lib/std/zig/perf_test.zig b/lib/std/zig/perf_test.zig index 2a893013d9..087b081475 100644 --- a/lib/std/zig/perf_test.zig +++ b/lib/std/zig/perf_test.zig @@ -22,8 +22,8 @@ pub fn main() !void { const bytes_per_sec_float = @as(f64, @floatFromInt(source.len * iterations)) / elapsed_s; const bytes_per_sec = @as(u64, @intFromFloat(@floor(bytes_per_sec_float))); - var stdout_file = std.io.getStdOut(); - const stdout = stdout_file.writer(); + var stdout_file: std.fs.File = .stdout(); + const stdout = stdout_file.deprecatedWriter(); try stdout.print("parsing speed: {:.2}/s, {:.2} used \n", .{ fmtIntSizeBin(bytes_per_sec), fmtIntSizeBin(memory_used), diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig index c4c3379f32..56ab032771 100644 --- a/lib/std/zig/render.zig +++ b/lib/std/zig/render.zig @@ -1564,7 +1564,7 @@ fn renderBuiltinCall( defer r.gpa.free(new_string); try renderToken(r, builtin_token + 1, .none); // ( - try ais.writer().print("\"{}\"", .{std.zig.fmtEscapes(new_string)}); + try ais.writer().print("\"{f}\"", .{std.zig.fmtString(new_string)}); return renderToken(r, str_lit_token + 1, space); // ) } } @@ -2872,7 +2872,7 @@ fn renderIdentifierContents(writer: anytype, bytes: []const u8) !void { .success => |codepoint| { if (codepoint <= 0x7f) { const buf = [1]u8{@as(u8, @intCast(codepoint))}; - try std.fmt.format(writer, "{}", .{std.zig.fmtEscapes(&buf)}); + try std.fmt.format(writer, "{f}", .{std.zig.fmtString(&buf)}); } else { try writer.writeAll(escape_sequence); } @@ -2884,7 +2884,7 @@ fn renderIdentifierContents(writer: anytype, bytes: []const u8) !void { }, 0x00...('\\' - 1), ('\\' + 1)...0x7f => { const buf = [1]u8{byte}; - try std.fmt.format(writer, "{}", .{std.zig.fmtEscapes(&buf)}); + try std.fmt.format(writer, "{f}", .{std.zig.fmtString(&buf)}); pos += 1; }, 0x80...0xff => { @@ -3245,7 +3245,7 @@ fn AutoIndentingStream(comptime UnderlyingWriter: type) type { return struct { const Self = @This(); pub const WriteError = UnderlyingWriter.Error; - pub const Writer = std.io.Writer(*Self, WriteError, write); + pub const Writer = std.io.GenericWriter(*Self, WriteError, write); pub const IndentType = enum { normal, diff --git a/lib/std/zig/string_literal.zig b/lib/std/zig/string_literal.zig index 972219abbd..99b060eceb 100644 --- a/lib/std/zig/string_literal.zig +++ b/lib/std/zig/string_literal.zig @@ -44,14 +44,7 @@ pub const Error = union(enum) { raw_string: []const u8, }; - fn formatMessage( - self: FormatMessage, - comptime f: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = f; - _ = options; + fn formatMessage(self: FormatMessage, writer: *std.io.Writer) std.io.Writer.Error!void { switch (self.err) { .invalid_escape_character => |bad_index| try writer.print( "invalid escape character: '{c}'", @@ -93,7 +86,7 @@ pub const Error = union(enum) { } } - pub fn fmt(self: @This(), raw_string: []const u8) std.fmt.Formatter(formatMessage) { + pub fn fmt(self: @This(), raw_string: []const u8) std.fmt.Formatter(FormatMessage, formatMessage) { return .{ .data = .{ .err = self, .raw_string = raw_string, @@ -322,7 +315,7 @@ test parseCharLiteral { ); } -/// Parses `bytes` as a Zig string literal and writes the result to the std.io.Writer type. +/// Parses `bytes` as a Zig string literal and writes the result to the `std.io.GenericWriter` type. /// Asserts `bytes` has '"' at beginning and end. pub fn parseWrite(writer: anytype, bytes: []const u8) error{OutOfMemory}!Result { assert(bytes.len >= 2 and bytes[0] == '"' and bytes[bytes.len - 1] == '"'); diff --git a/lib/std/zig/system/linux.zig b/lib/std/zig/system/linux.zig index d8cff2403f..8044e1969d 100644 --- a/lib/std/zig/system/linux.zig +++ b/lib/std/zig/system/linux.zig @@ -391,7 +391,7 @@ pub fn detectNativeCpuAndFeatures() ?Target.Cpu { const current_arch = builtin.cpu.arch; switch (current_arch) { .arm, .armeb, .thumb, .thumbeb => { - return ArmCpuinfoParser.parse(current_arch, f.reader()) catch null; + return ArmCpuinfoParser.parse(current_arch, f.deprecatedReader()) catch null; }, .aarch64, .aarch64_be => { const registers = [12]u64{ @@ -413,13 +413,13 @@ pub fn detectNativeCpuAndFeatures() ?Target.Cpu { return core; }, .sparc64 => { - return SparcCpuinfoParser.parse(current_arch, f.reader()) catch null; + return SparcCpuinfoParser.parse(current_arch, f.deprecatedReader()) catch null; }, .powerpc, .powerpcle, .powerpc64, .powerpc64le => { - return PowerpcCpuinfoParser.parse(current_arch, f.reader()) catch null; + return PowerpcCpuinfoParser.parse(current_arch, f.deprecatedReader()) catch null; }, .riscv64, .riscv32 => { - return RiscvCpuinfoParser.parse(current_arch, f.reader()) catch null; + return RiscvCpuinfoParser.parse(current_arch, f.deprecatedReader()) catch null; }, else => {}, } diff --git a/lib/std/zip.zig b/lib/std/zip.zig index c149584fd5..e181bc1f65 100644 --- a/lib/std/zip.zig +++ b/lib/std/zip.zig @@ -106,7 +106,7 @@ pub const EndRecord = extern struct { /// Find and return the end record for the given seekable zip stream. /// Note that `seekable_stream` must be an instance of `std.io.SeekableStream` and /// its context must also have a `.reader()` method that returns an instance of -/// `std.io.Reader`. +/// `std.io.GenericReader`. pub fn findEndRecord(seekable_stream: anytype, stream_len: u64) !EndRecord { var buf: [@sizeOf(EndRecord) + std.math.maxInt(u16)]u8 = undefined; const record_len_max = @min(stream_len, buf.len); @@ -124,7 +124,7 @@ pub fn findEndRecord(seekable_stream: anytype, stream_len: u64) !EndRecord { try seekable_stream.seekTo(stream_len - @as(u64, new_loaded_len)); const read_buf: []u8 = buf[buf.len - new_loaded_len ..][0..read_len]; - const len = try seekable_stream.context.reader().readAll(read_buf); + const len = try (if (@TypeOf(seekable_stream.context) == std.fs.File) seekable_stream.context.deprecatedReader() else seekable_stream.context.reader()).readAll(read_buf); if (len != read_len) return error.ZipTruncated; loaded_len = new_loaded_len; @@ -295,7 +295,7 @@ pub fn Iterator(comptime SeekableStream: type) type { if (locator_end_offset > stream_len) return error.ZipTruncated; try stream.seekTo(stream_len - locator_end_offset); - const locator = try stream.context.reader().readStructEndian(EndLocator64, .little); + const locator = try (if (@TypeOf(stream.context) == std.fs.File) stream.context.deprecatedReader() else stream.context.reader()).readStructEndian(EndLocator64, .little); if (!std.mem.eql(u8, &locator.signature, &end_locator64_sig)) return error.ZipBadLocatorSig; if (locator.zip64_disk_count != 0) @@ -305,7 +305,7 @@ pub fn Iterator(comptime SeekableStream: type) type { try stream.seekTo(locator.record_file_offset); - const record64 = try stream.context.reader().readStructEndian(EndRecord64, .little); + const record64 = try (if (@TypeOf(stream.context) == std.fs.File) stream.context.deprecatedReader() else stream.context.reader()).readStructEndian(EndRecord64, .little); if (!std.mem.eql(u8, &record64.signature, &end_record64_sig)) return error.ZipBadEndRecord64Sig; @@ -357,7 +357,7 @@ pub fn Iterator(comptime SeekableStream: type) type { const header_zip_offset = self.cd_zip_offset + self.cd_record_offset; try self.stream.seekTo(header_zip_offset); - const header = try self.stream.context.reader().readStructEndian(CentralDirectoryFileHeader, .little); + const header = try (if (@TypeOf(self.stream.context) == std.fs.File) self.stream.context.deprecatedReader() else self.stream.context.reader()).readStructEndian(CentralDirectoryFileHeader, .little); if (!std.mem.eql(u8, &header.signature, ¢ral_file_header_sig)) return error.ZipBadCdOffset; @@ -386,7 +386,7 @@ pub fn Iterator(comptime SeekableStream: type) type { { try self.stream.seekTo(header_zip_offset + @sizeOf(CentralDirectoryFileHeader) + header.filename_len); - const len = try self.stream.context.reader().readAll(extra); + const len = try (if (@TypeOf(self.stream.context) == std.fs.File) self.stream.context.deprecatedReader() else self.stream.context.reader()).readAll(extra); if (len != extra.len) return error.ZipTruncated; } @@ -449,7 +449,7 @@ pub fn Iterator(comptime SeekableStream: type) type { try stream.seekTo(self.header_zip_offset + @sizeOf(CentralDirectoryFileHeader)); { - const len = try stream.context.reader().readAll(filename); + const len = try (if (@TypeOf(stream.context) == std.fs.File) stream.context.deprecatedReader() else stream.context.reader()).readAll(filename); if (len != filename.len) return error.ZipBadFileOffset; } @@ -457,7 +457,7 @@ pub fn Iterator(comptime SeekableStream: type) type { const local_data_header_offset: u64 = local_data_header_offset: { const local_header = blk: { try stream.seekTo(self.file_offset); - break :blk try stream.context.reader().readStructEndian(LocalFileHeader, .little); + break :blk try (if (@TypeOf(stream.context) == std.fs.File) stream.context.deprecatedReader() else stream.context.reader()).readStructEndian(LocalFileHeader, .little); }; if (!std.mem.eql(u8, &local_header.signature, &local_file_header_sig)) return error.ZipBadFileOffset; @@ -483,7 +483,7 @@ pub fn Iterator(comptime SeekableStream: type) type { { try stream.seekTo(self.file_offset + @sizeOf(LocalFileHeader) + local_header.filename_len); - const len = try stream.context.reader().readAll(extra); + const len = try (if (@TypeOf(stream.context) == std.fs.File) stream.context.deprecatedReader() else stream.context.reader()).readAll(extra); if (len != extra.len) return error.ZipTruncated; } @@ -552,12 +552,12 @@ pub fn Iterator(comptime SeekableStream: type) type { @as(u64, @sizeOf(LocalFileHeader)) + local_data_header_offset; try stream.seekTo(local_data_file_offset); - var limited_reader = std.io.limitedReader(stream.context.reader(), self.compressed_size); + var limited_reader = std.io.limitedReader((if (@TypeOf(stream.context) == std.fs.File) stream.context.deprecatedReader() else stream.context.reader()), self.compressed_size); const crc = try decompress( self.compression_method, self.uncompressed_size, limited_reader.reader(), - out_file.writer(), + out_file.deprecatedWriter(), ); if (limited_reader.bytes_left != 0) return error.ZipDecompressTruncated; @@ -617,7 +617,7 @@ pub const ExtractOptions = struct { /// Extract the zipped files inside `seekable_stream` to the given `dest` directory. /// Note that `seekable_stream` must be an instance of `std.io.SeekableStream` and /// its context must also have a `.reader()` method that returns an instance of -/// `std.io.Reader`. +/// `std.io.GenericReader`. pub fn extract(dest: std.fs.Dir, seekable_stream: anytype, options: ExtractOptions) !void { const SeekableStream = @TypeOf(seekable_stream); var iter = try Iterator(SeekableStream).init(seekable_stream); diff --git a/lib/std/zip/test.zig b/lib/std/zip/test.zig index aba49e7af2..27f12cf2a6 100644 --- a/lib/std/zip/test.zig +++ b/lib/std/zip/test.zig @@ -33,7 +33,7 @@ pub fn expectFiles( var file = try dir.openFile(normalized_sub_path, .{}); defer file.close(); var content_buf: [4096]u8 = undefined; - const n = try file.reader().readAll(&content_buf); + const n = try file.deprecatedReader().readAll(&content_buf); try testing.expectEqualStrings(test_file.content, content_buf[0..n]); } } diff --git a/lib/std/zon/parse.zig b/lib/std/zon/parse.zig index 3146ae224b..561fbd2a4c 100644 --- a/lib/std/zon/parse.zig +++ b/lib/std/zon/parse.zig @@ -64,22 +64,14 @@ pub const Error = union(enum) { } }; - fn formatMessage( - self: []const u8, - comptime f: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = f; - _ = options; - + fn formatMessage(self: []const u8, w: *std.io.Writer) std.io.Writer.Error!void { // Just writes the string for now, but we're keeping this behind a formatter so we have // the option to extend it in the future to print more advanced messages (like `Error` // does) without breaking the API. - try writer.writeAll(self); + try w.writeAll(self); } - pub fn fmtMessage(self: Note, diag: *const Diagnostics) std.fmt.Formatter(Note.formatMessage) { + pub fn fmtMessage(self: Note, diag: *const Diagnostics) std.fmt.Formatter([]const u8, Note.formatMessage) { return .{ .data = switch (self) { .zoir => |note| note.msg.get(diag.zoir), .type_check => |note| note.msg, @@ -155,21 +147,14 @@ pub const Error = union(enum) { diag: *const Diagnostics, }; - fn formatMessage( - self: FormatMessage, - comptime f: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = f; - _ = options; + fn formatMessage(self: FormatMessage, w: *std.io.Writer) std.io.Writer.Error!void { switch (self.err) { - .zoir => |err| try writer.writeAll(err.msg.get(self.diag.zoir)), - .type_check => |tc| try writer.writeAll(tc.message), + .zoir => |err| try w.writeAll(err.msg.get(self.diag.zoir)), + .type_check => |tc| try w.writeAll(tc.message), } } - pub fn fmtMessage(self: @This(), diag: *const Diagnostics) std.fmt.Formatter(formatMessage) { + pub fn fmtMessage(self: @This(), diag: *const Diagnostics) std.fmt.Formatter(FormatMessage, formatMessage) { return .{ .data = .{ .err = self, .diag = diag, @@ -241,25 +226,18 @@ pub const Diagnostics = struct { return .{ .diag = self }; } - pub fn format( - self: *const @This(), - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = fmt; - _ = options; + pub fn format(self: *const @This(), w: *std.io.Writer) std.io.Writer.Error!void { var errors = self.iterateErrors(); while (errors.next()) |err| { const loc = err.getLocation(self); const msg = err.fmtMessage(self); - try writer.print("{}:{}: error: {}\n", .{ loc.line + 1, loc.column + 1, msg }); + try w.print("{d}:{d}: error: {f}\n", .{ loc.line + 1, loc.column + 1, msg }); var notes = err.iterateNotes(self); while (notes.next()) |note| { const note_loc = note.getLocation(self); const note_msg = note.fmtMessage(self); - try writer.print("{}:{}: note: {s}\n", .{ + try w.print("{d}:{d}: note: {f}\n", .{ note_loc.line + 1, note_loc.column + 1, note_msg, @@ -646,7 +624,7 @@ const Parser = struct { .failure => |err| { const token = self.ast.nodeMainToken(ast_node); const raw_string = self.ast.tokenSlice(token); - return self.failTokenFmt(token, @intCast(err.offset()), "{s}", .{err.fmt(raw_string)}); + return self.failTokenFmt(token, @intCast(err.offset()), "{f}", .{err.fmt(raw_string)}); }, } @@ -1087,7 +1065,10 @@ const Parser = struct { try writer.writeAll(msg); inline for (info.fields, 0..) |field_info, i| { if (i != 0) try writer.writeAll(", "); - try writer.print("'{p_}'", .{std.zig.fmtId(field_info.name)}); + try writer.print("'{f}'", .{std.zig.fmtIdFlags(field_info.name, .{ + .allow_primitive = true, + .allow_underscore = true, + })}); } break :b .{ .token = token, @@ -1298,7 +1279,7 @@ test "std.zon ast errors" { error.ParseZon, fromSlice(struct {}, gpa, ".{.x = 1 .y = 2}", &diag, .{}), ); - try std.testing.expectFmt("1:13: error: expected ',' after initializer\n", "{}", .{diag}); + try std.testing.expectFmt("1:13: error: expected ',' after initializer\n", "{f}", .{diag}); } test "std.zon comments" { @@ -1320,7 +1301,7 @@ test "std.zon comments" { , &diag, .{})); try std.testing.expectFmt( "1:1: error: expected expression, found 'a document comment'\n", - "{}", + "{f}", .{diag}, ); } @@ -1341,7 +1322,7 @@ test "std.zon failure/oom formatting" { &diag, .{}, )); - try std.testing.expectFmt("", "{}", .{diag}); + try std.testing.expectFmt("", "{f}", .{diag}); } test "std.zon fromSlice syntax error" { @@ -1421,7 +1402,7 @@ test "std.zon unions" { \\1:4: note: supported: 'x', 'y' \\ , - "{}", + "{f}", .{diag}, ); } @@ -1435,7 +1416,7 @@ test "std.zon unions" { error.ParseZon, fromSlice(Union, gpa, ".{.x=1}", &diag, .{}), ); - try std.testing.expectFmt("1:6: error: expected type 'void'\n", "{}", .{diag}); + try std.testing.expectFmt("1:6: error: expected type 'void'\n", "{f}", .{diag}); } // Extra field @@ -1447,7 +1428,7 @@ test "std.zon unions" { error.ParseZon, fromSlice(Union, gpa, ".{.x = 1.5, .y = true}", &diag, .{}), ); - try std.testing.expectFmt("1:2: error: expected union\n", "{}", .{diag}); + try std.testing.expectFmt("1:2: error: expected union\n", "{f}", .{diag}); } // No fields @@ -1459,7 +1440,7 @@ test "std.zon unions" { error.ParseZon, fromSlice(Union, gpa, ".{}", &diag, .{}), ); - try std.testing.expectFmt("1:2: error: expected union\n", "{}", .{diag}); + try std.testing.expectFmt("1:2: error: expected union\n", "{f}", .{diag}); } // Enum literals cannot coerce into untagged unions @@ -1468,7 +1449,7 @@ test "std.zon unions" { var diag: Diagnostics = .{}; defer diag.deinit(gpa); try std.testing.expectError(error.ParseZon, fromSlice(Union, gpa, ".x", &diag, .{})); - try std.testing.expectFmt("1:2: error: expected union\n", "{}", .{diag}); + try std.testing.expectFmt("1:2: error: expected union\n", "{f}", .{diag}); } // Unknown field for enum literal coercion @@ -1482,7 +1463,7 @@ test "std.zon unions" { \\1:2: note: supported: 'x' \\ , - "{}", + "{f}", .{diag}, ); } @@ -1493,7 +1474,7 @@ test "std.zon unions" { var diag: Diagnostics = .{}; defer diag.deinit(gpa); try std.testing.expectError(error.ParseZon, fromSlice(Union, gpa, ".x", &diag, .{})); - try std.testing.expectFmt("1:2: error: expected union\n", "{}", .{diag}); + try std.testing.expectFmt("1:2: error: expected union\n", "{f}", .{diag}); } } @@ -1549,7 +1530,7 @@ test "std.zon structs" { \\1:12: note: supported: 'x', 'y' \\ , - "{}", + "{f}", .{diag}, ); } @@ -1567,7 +1548,7 @@ test "std.zon structs" { \\1:4: error: duplicate struct field name \\1:12: note: duplicate name here \\ - , "{}", .{diag}); + , "{f}", .{diag}); } // Ignore unknown fields @@ -1592,7 +1573,7 @@ test "std.zon structs" { \\1:4: error: unexpected field 'x' \\1:4: note: none expected \\ - , "{}", .{diag}); + , "{f}", .{diag}); } // Missing field @@ -1604,7 +1585,7 @@ test "std.zon structs" { error.ParseZon, fromSlice(Vec2, gpa, ".{.x=1.5}", &diag, .{}), ); - try std.testing.expectFmt("1:2: error: missing required field y\n", "{}", .{diag}); + try std.testing.expectFmt("1:2: error: missing required field y\n", "{f}", .{diag}); } // Default field @@ -1631,7 +1612,7 @@ test "std.zon structs" { try std.testing.expectFmt( \\1:18: error: cannot initialize comptime field \\ - , "{}", .{diag}); + , "{f}", .{diag}); } // Enum field (regression test, we were previously getting the field name in an @@ -1661,7 +1642,7 @@ test "std.zon structs" { \\1:1: error: types are not available in ZON \\1:1: note: replace the type with '.' \\ - , "{}", .{diag}); + , "{f}", .{diag}); } // Arrays @@ -1674,7 +1655,7 @@ test "std.zon structs" { \\1:1: error: types are not available in ZON \\1:1: note: replace the type with '.' \\ - , "{}", .{diag}); + , "{f}", .{diag}); } // Slices @@ -1687,7 +1668,7 @@ test "std.zon structs" { \\1:1: error: types are not available in ZON \\1:1: note: replace the type with '.' \\ - , "{}", .{diag}); + , "{f}", .{diag}); } // Tuples @@ -1706,7 +1687,7 @@ test "std.zon structs" { \\1:1: error: types are not available in ZON \\1:1: note: replace the type with '.' \\ - , "{}", .{diag}); + , "{f}", .{diag}); } // Nested @@ -1719,7 +1700,7 @@ test "std.zon structs" { \\1:9: error: types are not available in ZON \\1:9: note: replace the type with '.' \\ - , "{}", .{diag}); + , "{f}", .{diag}); } } } @@ -1764,7 +1745,7 @@ test "std.zon tuples" { error.ParseZon, fromSlice(Tuple, gpa, ".{0.5, true, 123}", &diag, .{}), ); - try std.testing.expectFmt("1:14: error: index 2 outside of tuple length 2\n", "{}", .{diag}); + try std.testing.expectFmt("1:14: error: index 2 outside of tuple length 2\n", "{f}", .{diag}); } // Extra field @@ -1778,7 +1759,7 @@ test "std.zon tuples" { ); try std.testing.expectFmt( "1:2: error: missing tuple field with index 1\n", - "{}", + "{f}", .{diag}, ); } @@ -1792,7 +1773,7 @@ test "std.zon tuples" { error.ParseZon, fromSlice(Tuple, gpa, ".{.foo = 10.0}", &diag, .{}), ); - try std.testing.expectFmt("1:2: error: expected tuple\n", "{}", .{diag}); + try std.testing.expectFmt("1:2: error: expected tuple\n", "{f}", .{diag}); } // Struct with missing field names @@ -1804,7 +1785,7 @@ test "std.zon tuples" { error.ParseZon, fromSlice(Struct, gpa, ".{10.0}", &diag, .{}), ); - try std.testing.expectFmt("1:2: error: expected struct\n", "{}", .{diag}); + try std.testing.expectFmt("1:2: error: expected struct\n", "{f}", .{diag}); } // Comptime field @@ -1824,7 +1805,7 @@ test "std.zon tuples" { try std.testing.expectFmt( \\1:9: error: cannot initialize comptime field \\ - , "{}", .{diag}); + , "{f}", .{diag}); } } @@ -1936,7 +1917,7 @@ test "std.zon arrays and slices" { ); try std.testing.expectFmt( "1:3: error: index 0 outside of array of length 0\n", - "{}", + "{f}", .{diag}, ); } @@ -1951,7 +1932,7 @@ test "std.zon arrays and slices" { ); try std.testing.expectFmt( "1:8: error: index 1 outside of array of length 1\n", - "{}", + "{f}", .{diag}, ); } @@ -1966,7 +1947,7 @@ test "std.zon arrays and slices" { ); try std.testing.expectFmt( "1:2: error: expected 2 array elements; found 1\n", - "{}", + "{f}", .{diag}, ); } @@ -1981,7 +1962,7 @@ test "std.zon arrays and slices" { ); try std.testing.expectFmt( "1:2: error: expected 3 array elements; found 0\n", - "{}", + "{f}", .{diag}, ); } @@ -1996,7 +1977,7 @@ test "std.zon arrays and slices" { error.ParseZon, fromSlice([3]bool, gpa, ".{'a', 'b', 'c'}", &diag, .{}), ); - try std.testing.expectFmt("1:3: error: expected type 'bool'\n", "{}", .{diag}); + try std.testing.expectFmt("1:3: error: expected type 'bool'\n", "{f}", .{diag}); } // Slice @@ -2007,7 +1988,7 @@ test "std.zon arrays and slices" { error.ParseZon, fromSlice([]bool, gpa, ".{'a', 'b', 'c'}", &diag, .{}), ); - try std.testing.expectFmt("1:3: error: expected type 'bool'\n", "{}", .{diag}); + try std.testing.expectFmt("1:3: error: expected type 'bool'\n", "{f}", .{diag}); } } @@ -2021,7 +2002,7 @@ test "std.zon arrays and slices" { error.ParseZon, fromSlice([3]u8, gpa, "'a'", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag}); } // Slice @@ -2032,7 +2013,7 @@ test "std.zon arrays and slices" { error.ParseZon, fromSlice([]u8, gpa, "'a'", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag}); } } @@ -2046,7 +2027,7 @@ test "std.zon arrays and slices" { ); try std.testing.expectFmt( "1:3: error: pointers are not available in ZON\n", - "{}", + "{f}", .{diag}, ); } @@ -2085,7 +2066,7 @@ test "std.zon string literal" { error.ParseZon, fromSlice([]u8, gpa, "\"abcd\"", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag}); } { @@ -2095,7 +2076,7 @@ test "std.zon string literal" { error.ParseZon, fromSlice([]u8, gpa, "\\\\abcd", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag}); } } @@ -2112,7 +2093,7 @@ test "std.zon string literal" { error.ParseZon, fromSlice([4:0]u8, gpa, "\"abcd\"", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag}); } { @@ -2122,7 +2103,7 @@ test "std.zon string literal" { error.ParseZon, fromSlice([4:0]u8, gpa, "\\\\abcd", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag}); } } @@ -2164,7 +2145,7 @@ test "std.zon string literal" { error.ParseZon, fromSlice([:1]const u8, gpa, "\"foo\"", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag}); } { @@ -2174,7 +2155,7 @@ test "std.zon string literal" { error.ParseZon, fromSlice([:1]const u8, gpa, "\\\\foo", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag}); } } @@ -2186,7 +2167,7 @@ test "std.zon string literal" { error.ParseZon, fromSlice([]const u8, gpa, "true", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected string\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected string\n", "{f}", .{diag}); } // Expecting string literal, getting an incompatible tuple @@ -2197,7 +2178,7 @@ test "std.zon string literal" { error.ParseZon, fromSlice([]const u8, gpa, ".{false}", &diag, .{}), ); - try std.testing.expectFmt("1:3: error: expected type 'u8'\n", "{}", .{diag}); + try std.testing.expectFmt("1:3: error: expected type 'u8'\n", "{f}", .{diag}); } // Invalid string literal @@ -2208,7 +2189,7 @@ test "std.zon string literal" { error.ParseZon, fromSlice([]const i8, gpa, "\"\\a\"", &diag, .{}), ); - try std.testing.expectFmt("1:3: error: invalid escape character: 'a'\n", "{}", .{diag}); + try std.testing.expectFmt("1:3: error: invalid escape character: 'a'\n", "{f}", .{diag}); } // Slice wrong child type @@ -2220,7 +2201,7 @@ test "std.zon string literal" { error.ParseZon, fromSlice([]const i8, gpa, "\"a\"", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag}); } { @@ -2230,7 +2211,7 @@ test "std.zon string literal" { error.ParseZon, fromSlice([]const i8, gpa, "\\\\a", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag}); } } @@ -2243,7 +2224,7 @@ test "std.zon string literal" { error.ParseZon, fromSlice([]align(2) const u8, gpa, "\"abc\"", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag}); } { @@ -2253,7 +2234,7 @@ test "std.zon string literal" { error.ParseZon, fromSlice([]align(2) const u8, gpa, "\\\\abc", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag}); } } @@ -2327,7 +2308,7 @@ test "std.zon enum literals" { \\1:2: note: supported: 'foo', 'bar', 'baz', '@"ab\nc"' \\ , - "{}", + "{f}", .{diag}, ); } @@ -2345,7 +2326,7 @@ test "std.zon enum literals" { \\1:2: note: supported: 'foo', 'bar', 'baz', '@"ab\nc"' \\ , - "{}", + "{f}", .{diag}, ); } @@ -2358,7 +2339,7 @@ test "std.zon enum literals" { error.ParseZon, fromSlice(Enum, gpa, "true", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected enum literal\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected enum literal\n", "{f}", .{diag}); } // Test embedded nulls in an identifier @@ -2371,7 +2352,7 @@ test "std.zon enum literals" { ); try std.testing.expectFmt( "1:2: error: identifier cannot contain null bytes\n", - "{}", + "{f}", .{diag}, ); } @@ -2397,13 +2378,13 @@ test "std.zon parse bool" { \\1:2: note: ZON allows identifiers 'true', 'false', 'null', 'inf', and 'nan' \\1:2: note: precede identifier with '.' for an enum literal \\ - , "{}", .{diag}); + , "{f}", .{diag}); } { var diag: Diagnostics = .{}; defer diag.deinit(gpa); try std.testing.expectError(error.ParseZon, fromSlice(bool, gpa, "123", &diag, .{})); - try std.testing.expectFmt("1:1: error: expected type 'bool'\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected type 'bool'\n", "{f}", .{diag}); } } @@ -2476,7 +2457,7 @@ test "std.zon parse int" { )); try std.testing.expectFmt( "1:1: error: type 'i66' cannot represent value\n", - "{}", + "{f}", .{diag}, ); } @@ -2492,7 +2473,7 @@ test "std.zon parse int" { )); try std.testing.expectFmt( "1:1: error: type 'i66' cannot represent value\n", - "{}", + "{f}", .{diag}, ); } @@ -2581,7 +2562,7 @@ test "std.zon parse int" { try std.testing.expectError(error.ParseZon, fromSlice(u8, gpa, "32a32", &diag, .{})); try std.testing.expectFmt( "1:3: error: invalid digit 'a' for decimal base\n", - "{}", + "{f}", .{diag}, ); } @@ -2591,7 +2572,7 @@ test "std.zon parse int" { var diag: Diagnostics = .{}; defer diag.deinit(gpa); try std.testing.expectError(error.ParseZon, fromSlice(u8, gpa, "true", &diag, .{})); - try std.testing.expectFmt("1:1: error: expected type 'u8'\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected type 'u8'\n", "{f}", .{diag}); } // Failing because an int is out of range @@ -2601,7 +2582,7 @@ test "std.zon parse int" { try std.testing.expectError(error.ParseZon, fromSlice(u8, gpa, "256", &diag, .{})); try std.testing.expectFmt( "1:1: error: type 'u8' cannot represent value\n", - "{}", + "{f}", .{diag}, ); } @@ -2613,7 +2594,7 @@ test "std.zon parse int" { try std.testing.expectError(error.ParseZon, fromSlice(i8, gpa, "-129", &diag, .{})); try std.testing.expectFmt( "1:1: error: type 'i8' cannot represent value\n", - "{}", + "{f}", .{diag}, ); } @@ -2625,7 +2606,7 @@ test "std.zon parse int" { try std.testing.expectError(error.ParseZon, fromSlice(u8, gpa, "-1", &diag, .{})); try std.testing.expectFmt( "1:1: error: type 'u8' cannot represent value\n", - "{}", + "{f}", .{diag}, ); } @@ -2637,7 +2618,7 @@ test "std.zon parse int" { try std.testing.expectError(error.ParseZon, fromSlice(u8, gpa, "1.5", &diag, .{})); try std.testing.expectFmt( "1:1: error: type 'u8' cannot represent value\n", - "{}", + "{f}", .{diag}, ); } @@ -2649,7 +2630,7 @@ test "std.zon parse int" { try std.testing.expectError(error.ParseZon, fromSlice(u8, gpa, "-1.0", &diag, .{})); try std.testing.expectFmt( "1:1: error: type 'u8' cannot represent value\n", - "{}", + "{f}", .{diag}, ); } @@ -2664,7 +2645,7 @@ test "std.zon parse int" { \\1:2: note: use '0' for an integer zero \\1:2: note: use '-0.0' for a floating-point signed zero \\ - , "{}", .{diag}); + , "{f}", .{diag}); } // Negative integer zero casted to float @@ -2677,7 +2658,7 @@ test "std.zon parse int" { \\1:2: note: use '0' for an integer zero \\1:2: note: use '-0.0' for a floating-point signed zero \\ - , "{}", .{diag}); + , "{f}", .{diag}); } // Negative float 0 is allowed @@ -2693,7 +2674,7 @@ test "std.zon parse int" { try std.testing.expectError(error.ParseZon, fromSlice(i8, gpa, "--2", &diag, .{})); try std.testing.expectFmt( "1:1: error: expected number or 'inf' after '-'\n", - "{}", + "{f}", .{diag}, ); } @@ -2707,7 +2688,7 @@ test "std.zon parse int" { ); try std.testing.expectFmt( "1:1: error: expected number or 'inf' after '-'\n", - "{}", + "{f}", .{diag}, ); } @@ -2717,7 +2698,7 @@ test "std.zon parse int" { var diag: Diagnostics = .{}; defer diag.deinit(gpa); try std.testing.expectError(error.ParseZon, fromSlice(u8, gpa, "0xg", &diag, .{})); - try std.testing.expectFmt("1:3: error: invalid digit 'g' for hex base\n", "{}", .{diag}); + try std.testing.expectFmt("1:3: error: invalid digit 'g' for hex base\n", "{f}", .{diag}); } // Notes on invalid int literal @@ -2729,7 +2710,7 @@ test "std.zon parse int" { \\1:1: error: number '0123' has leading zero \\1:1: note: use '0o' prefix for octal literals \\ - , "{}", .{diag}); + , "{f}", .{diag}); } } @@ -2742,7 +2723,7 @@ test "std.zon negative char" { try std.testing.expectError(error.ParseZon, fromSlice(f32, gpa, "-'a'", &diag, .{})); try std.testing.expectFmt( "1:1: error: expected number or 'inf' after '-'\n", - "{}", + "{f}", .{diag}, ); } @@ -2752,7 +2733,7 @@ test "std.zon negative char" { try std.testing.expectError(error.ParseZon, fromSlice(i16, gpa, "-'a'", &diag, .{})); try std.testing.expectFmt( "1:1: error: expected number or 'inf' after '-'\n", - "{}", + "{f}", .{diag}, ); } @@ -2841,7 +2822,7 @@ test "std.zon parse float" { try std.testing.expectError(error.ParseZon, fromSlice(f32, gpa, "-nan", &diag, .{})); try std.testing.expectFmt( "1:1: error: expected number or 'inf' after '-'\n", - "{}", + "{f}", .{diag}, ); } @@ -2851,7 +2832,7 @@ test "std.zon parse float" { var diag: Diagnostics = .{}; defer diag.deinit(gpa); try std.testing.expectError(error.ParseZon, fromSlice(i8, gpa, "nan", &diag, .{})); - try std.testing.expectFmt("1:1: error: expected type 'i8'\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected type 'i8'\n", "{f}", .{diag}); } // nan as int not allowed @@ -2859,7 +2840,7 @@ test "std.zon parse float" { var diag: Diagnostics = .{}; defer diag.deinit(gpa); try std.testing.expectError(error.ParseZon, fromSlice(i8, gpa, "nan", &diag, .{})); - try std.testing.expectFmt("1:1: error: expected type 'i8'\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected type 'i8'\n", "{f}", .{diag}); } // inf as int not allowed @@ -2867,7 +2848,7 @@ test "std.zon parse float" { var diag: Diagnostics = .{}; defer diag.deinit(gpa); try std.testing.expectError(error.ParseZon, fromSlice(i8, gpa, "inf", &diag, .{})); - try std.testing.expectFmt("1:1: error: expected type 'i8'\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected type 'i8'\n", "{f}", .{diag}); } // -inf as int not allowed @@ -2875,7 +2856,7 @@ test "std.zon parse float" { var diag: Diagnostics = .{}; defer diag.deinit(gpa); try std.testing.expectError(error.ParseZon, fromSlice(i8, gpa, "-inf", &diag, .{})); - try std.testing.expectFmt("1:1: error: expected type 'i8'\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected type 'i8'\n", "{f}", .{diag}); } // Bad identifier as float @@ -2888,7 +2869,7 @@ test "std.zon parse float" { \\1:1: note: ZON allows identifiers 'true', 'false', 'null', 'inf', and 'nan' \\1:1: note: precede identifier with '.' for an enum literal \\ - , "{}", .{diag}); + , "{f}", .{diag}); } { @@ -2897,7 +2878,7 @@ test "std.zon parse float" { try std.testing.expectError(error.ParseZon, fromSlice(f32, gpa, "-foo", &diag, .{})); try std.testing.expectFmt( "1:1: error: expected number or 'inf' after '-'\n", - "{}", + "{f}", .{diag}, ); } @@ -2910,7 +2891,7 @@ test "std.zon parse float" { error.ParseZon, fromSlice(f32, gpa, "\"foo\"", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected type 'f32'\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected type 'f32'\n", "{f}", .{diag}); } } @@ -3154,7 +3135,7 @@ test "std.zon vector" { ); try std.testing.expectFmt( "1:2: error: expected 2 vector elements; found 1\n", - "{}", + "{f}", .{diag}, ); } @@ -3169,7 +3150,7 @@ test "std.zon vector" { ); try std.testing.expectFmt( "1:2: error: expected 2 vector elements; found 3\n", - "{}", + "{f}", .{diag}, ); } @@ -3184,7 +3165,7 @@ test "std.zon vector" { ); try std.testing.expectFmt( "1:8: error: expected type 'f32'\n", - "{}", + "{f}", .{diag}, ); } @@ -3197,7 +3178,7 @@ test "std.zon vector" { error.ParseZon, fromSlice(@Vector(3, u8), gpa, "true", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected type '@Vector(3, u8)'\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected type '@Vector(3, u8)'\n", "{f}", .{diag}); } // Elements should get freed on error @@ -3208,7 +3189,7 @@ test "std.zon vector" { error.ParseZon, fromSlice(@Vector(3, *u8), gpa, ".{1, true, 3}", &diag, .{}), ); - try std.testing.expectFmt("1:6: error: expected type 'u8'\n", "{}", .{diag}); + try std.testing.expectFmt("1:6: error: expected type 'u8'\n", "{f}", .{diag}); } } @@ -3332,7 +3313,7 @@ test "std.zon add pointers" { error.ParseZon, fromSlice(*const ?*const u8, gpa, "true", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected type '?u8'\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected type '?u8'\n", "{f}", .{diag}); } { @@ -3342,7 +3323,7 @@ test "std.zon add pointers" { error.ParseZon, fromSlice(*const ?*const f32, gpa, "true", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected type '?f32'\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected type '?f32'\n", "{f}", .{diag}); } { @@ -3352,7 +3333,7 @@ test "std.zon add pointers" { error.ParseZon, fromSlice(*const ?*const @Vector(3, u8), gpa, "true", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected type '?@Vector(3, u8)'\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected type '?@Vector(3, u8)'\n", "{f}", .{diag}); } { @@ -3362,7 +3343,7 @@ test "std.zon add pointers" { error.ParseZon, fromSlice(*const ?*const bool, gpa, "10", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected type '?bool'\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected type '?bool'\n", "{f}", .{diag}); } { @@ -3372,7 +3353,7 @@ test "std.zon add pointers" { error.ParseZon, fromSlice(*const ?*const struct { a: i32 }, gpa, "true", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected optional struct\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected optional struct\n", "{f}", .{diag}); } { @@ -3382,7 +3363,7 @@ test "std.zon add pointers" { error.ParseZon, fromSlice(*const ?*const struct { i32 }, gpa, "true", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected optional tuple\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected optional tuple\n", "{f}", .{diag}); } { @@ -3392,7 +3373,7 @@ test "std.zon add pointers" { error.ParseZon, fromSlice(*const ?*const union { x: void }, gpa, "true", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected optional union\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected optional union\n", "{f}", .{diag}); } { @@ -3402,7 +3383,7 @@ test "std.zon add pointers" { error.ParseZon, fromSlice(*const ?*const [3]u8, gpa, "true", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected optional array\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected optional array\n", "{f}", .{diag}); } { @@ -3412,7 +3393,7 @@ test "std.zon add pointers" { error.ParseZon, fromSlice(?[3]u8, gpa, "true", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected optional array\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected optional array\n", "{f}", .{diag}); } { @@ -3422,7 +3403,7 @@ test "std.zon add pointers" { error.ParseZon, fromSlice(*const ?*const []u8, gpa, "true", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected optional array\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected optional array\n", "{f}", .{diag}); } { @@ -3432,7 +3413,7 @@ test "std.zon add pointers" { error.ParseZon, fromSlice(?[]u8, gpa, "true", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected optional array\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected optional array\n", "{f}", .{diag}); } { @@ -3442,7 +3423,7 @@ test "std.zon add pointers" { error.ParseZon, fromSlice(*const ?*const []const u8, gpa, "true", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected optional string\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected optional string\n", "{f}", .{diag}); } { @@ -3452,7 +3433,7 @@ test "std.zon add pointers" { error.ParseZon, fromSlice(*const ?*const enum { foo }, gpa, "true", &diag, .{}), ); - try std.testing.expectFmt("1:1: error: expected optional enum literal\n", "{}", .{diag}); + try std.testing.expectFmt("1:1: error: expected optional enum literal\n", "{f}", .{diag}); } } diff --git a/lib/std/zon/stringify.zig b/lib/std/zon/stringify.zig index 8682fdc5f4..1a38dc7579 100644 --- a/lib/std/zon/stringify.zig +++ b/lib/std/zon/stringify.zig @@ -615,7 +615,8 @@ pub fn Serializer(Writer: type) type { /// Serialize an integer. pub fn int(self: *Self, val: anytype) Writer.Error!void { - try std.fmt.formatInt(val, 10, .lower, .{}, self.writer); + //try self.writer.printInt(val, 10, .lower, .{}); + try std.fmt.format(self.writer, "{d}", .{val}); } /// Serialize a float. @@ -645,7 +646,7 @@ pub fn Serializer(Writer: type) type { /// /// Escapes the identifier if necessary. pub fn ident(self: *Self, name: []const u8) Writer.Error!void { - try self.writer.print(".{p_}", .{std.zig.fmtId(name)}); + try self.writer.print(".{f}", .{std.zig.fmtIdPU(name)}); } /// Serialize `val` as a Unicode codepoint. @@ -658,7 +659,7 @@ pub fn Serializer(Writer: type) type { var buf: [8]u8 = undefined; const len = std.unicode.utf8Encode(val, &buf) catch return error.InvalidCodepoint; const str = buf[0..len]; - try std.fmt.format(self.writer, "'{'}'", .{std.zig.fmtEscapes(str)}); + try std.fmt.format(self.writer, "'{f}'", .{std.zig.fmtChar(str)}); } /// Like `value`, but always serializes `val` as a tuple. @@ -716,7 +717,7 @@ pub fn Serializer(Writer: type) type { /// Like `value`, but always serializes `val` as a string. pub fn string(self: *Self, val: []const u8) Writer.Error!void { - try std.fmt.format(self.writer, "\"{}\"", .{std.zig.fmtEscapes(val)}); + try std.fmt.format(self.writer, "\"{f}\"", .{std.zig.fmtString(val)}); } /// Options for formatting multiline strings. diff --git a/lib/ubsan_rt.zig b/lib/ubsan_rt.zig index a2e0a6c1aa..cfb9ea2d99 100644 --- a/lib/ubsan_rt.zig +++ b/lib/ubsan_rt.zig @@ -119,14 +119,7 @@ const Value = extern struct { } } - pub fn format( - value: Value, - comptime fmt: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) !void { - comptime assert(fmt.len == 0); - + pub fn format(value: Value, writer: *std.io.Writer) std.io.Writer.Error!void { // Work around x86_64 backend limitation. if (builtin.zig_backend == .stage2_x86_64 and builtin.os.tag == .windows) { try writer.writeAll("(unknown)"); @@ -136,12 +129,12 @@ const Value = extern struct { switch (value.td.kind) { .integer => { if (value.td.isSigned()) { - try writer.print("{}", .{value.getSignedInteger()}); + try writer.print("{d}", .{value.getSignedInteger()}); } else { - try writer.print("{}", .{value.getUnsignedInteger()}); + try writer.print("{d}", .{value.getUnsignedInteger()}); } }, - .float => try writer.print("{}", .{value.getFloat()}), + .float => try writer.print("{d}", .{value.getFloat()}), .unknown => try writer.writeAll("(unknown)"), } } @@ -172,17 +165,12 @@ fn overflowHandler( ) callconv(.c) noreturn { const lhs: Value = .{ .handle = lhs_handle, .td = data.td }; const rhs: Value = .{ .handle = rhs_handle, .td = data.td }; - - const is_signed = data.td.isSigned(); - const fmt = "{s} integer overflow: " ++ "{} " ++ - operator ++ " {} cannot be represented in type {s}"; - - panic(@returnAddress(), fmt, .{ - if (is_signed) "signed" else "unsigned", - lhs, - rhs, - data.td.getName(), - }); + const signed_str = if (data.td.isSigned()) "signed" else "unsigned"; + panic( + @returnAddress(), + "{s} integer overflow: {f} " ++ operator ++ " {f} cannot be represented in type {s}", + .{ signed_str, lhs, rhs, data.td.getName() }, + ); } }; @@ -201,11 +189,9 @@ fn negationHandler( value_handle: ValueHandle, ) callconv(.c) noreturn { const value: Value = .{ .handle = value_handle, .td = data.td }; - panic( - @returnAddress(), - "negation of {} cannot be represented in type {s}", - .{ value, data.td.getName() }, - ); + panic(@returnAddress(), "negation of {f} cannot be represented in type {s}", .{ + value, data.td.getName(), + }); } fn divRemHandlerAbort( @@ -225,11 +211,9 @@ fn divRemHandler( const rhs: Value = .{ .handle = rhs_handle, .td = data.td }; if (rhs.isMinusOne()) { - panic( - @returnAddress(), - "division of {} by -1 cannot be represented in type {s}", - .{ lhs, data.td.getName() }, - ); + panic(@returnAddress(), "division of {f} by -1 cannot be represented in type {s}", .{ + lhs, data.td.getName(), + }); } else panic(@returnAddress(), "division by zero", .{}); } @@ -269,8 +253,8 @@ fn alignmentAssumptionHandler( if (maybe_offset) |offset| { panic( @returnAddress(), - "assumption of {} byte alignment (with offset of {} byte) for pointer of type {s} failed\n" ++ - "offset address is {} aligned, misalignment offset is {} bytes", + "assumption of {f} byte alignment (with offset of {d} byte) for pointer of type {s} failed\n" ++ + "offset address is {d} aligned, misalignment offset is {d} bytes", .{ alignment, @intFromPtr(offset), @@ -282,8 +266,8 @@ fn alignmentAssumptionHandler( } else { panic( @returnAddress(), - "assumption of {} byte alignment for pointer of type {s} failed\n" ++ - "address is {} aligned, misalignment offset is {} bytes", + "assumption of {f} byte alignment for pointer of type {s} failed\n" ++ + "address is {d} aligned, misalignment offset is {d} bytes", .{ alignment, data.td.getName(), @@ -320,21 +304,21 @@ fn shiftOob( rhs.getPositiveInteger() >= data.lhs_type.getIntegerSize()) { if (rhs.isNegative()) { - panic(@returnAddress(), "shift exponent {} is negative", .{rhs}); + panic(@returnAddress(), "shift exponent {f} is negative", .{rhs}); } else { panic( @returnAddress(), - "shift exponent {} is too large for {}-bit type {s}", + "shift exponent {f} is too large for {d}-bit type {s}", .{ rhs, data.lhs_type.getIntegerSize(), data.lhs_type.getName() }, ); } } else { if (lhs.isNegative()) { - panic(@returnAddress(), "left shift of negative value {}", .{lhs}); + panic(@returnAddress(), "left shift of negative value {f}", .{lhs}); } else { panic( @returnAddress(), - "left shift of {} by {} places cannot be represented in type {s}", + "left shift of {f} by {f} places cannot be represented in type {s}", .{ lhs, rhs, data.lhs_type.getName() }, ); } @@ -359,11 +343,10 @@ fn outOfBounds( index_handle: ValueHandle, ) callconv(.c) noreturn { const index: Value = .{ .handle = index_handle, .td = data.index_type }; - panic( - @returnAddress(), - "index {} out of bounds for type {s}", - .{ index, data.array_type.getName() }, - ); + panic(@returnAddress(), "index {f} out of bounds for type {s}", .{ + index, + data.array_type.getName(), + }); } const PointerOverflowData = extern struct { @@ -387,7 +370,7 @@ fn pointerOverflow( if (result == 0) { panic(@returnAddress(), "applying zero offset to null pointer", .{}); } else { - panic(@returnAddress(), "applying non-zero offset {} to null pointer", .{result}); + panic(@returnAddress(), "applying non-zero offset {d} to null pointer", .{result}); } } else { if (result == 0) { @@ -483,7 +466,7 @@ fn typeMismatch( } else if (!std.mem.isAligned(handle, alignment)) { panic( @returnAddress(), - "{s} misaligned address 0x{x} for type {s}, which requires {} byte alignment", + "{s} misaligned address 0x{x} for type {s}, which requires {d} byte alignment", .{ data.kind.getName(), handle, data.td.getName(), alignment }, ); } else { @@ -531,7 +514,7 @@ fn nonNullArgAbort(data: *const NonNullArgData) callconv(.c) noreturn { fn nonNullArg(data: *const NonNullArgData) callconv(.c) noreturn { panic( @returnAddress(), - "null pointer passed as argument {}, which is declared to never be null", + "null pointer passed as argument {d}, which is declared to never be null", .{data.arg_index}, ); } @@ -553,11 +536,9 @@ fn loadInvalidValue( value_handle: ValueHandle, ) callconv(.c) noreturn { const value: Value = .{ .handle = value_handle, .td = data.td }; - panic( - @returnAddress(), - "load of value {}, which is not valid for type {s}", - .{ value, data.td.getName() }, - ); + panic(@returnAddress(), "load of value {f}, which is not valid for type {s}", .{ + value, data.td.getName(), + }); } const InvalidBuiltinData = extern struct { @@ -596,11 +577,7 @@ fn vlaBoundNotPositive( bound_handle: ValueHandle, ) callconv(.c) noreturn { const bound: Value = .{ .handle = bound_handle, .td = data.td }; - panic( - @returnAddress(), - "variable length array bound evaluates to non-positive value {}", - .{bound}, - ); + panic(@returnAddress(), "variable length array bound evaluates to non-positive value {f}", .{bound}); } const FloatCastOverflowData = extern struct { @@ -631,13 +608,13 @@ fn floatCastOverflow( if (@as(u16, ptr[0]) + @as(u16, ptr[1]) < 2 or ptr[0] == 0xFF or ptr[1] == 0xFF) { const data: *const FloatCastOverflowData = @ptrCast(data_handle); const from_value: Value = .{ .handle = from_handle, .td = data.from }; - panic(@returnAddress(), "{} is outside the range of representable values of type {s}", .{ + panic(@returnAddress(), "{f} is outside the range of representable values of type {s}", .{ from_value, data.to.getName(), }); } else { const data: *const FloatCastOverflowDataV2 = @ptrCast(data_handle); const from_value: Value = .{ .handle = from_handle, .td = data.from }; - panic(@returnAddress(), "{} is outside the range of representable values of type {s}", .{ + panic(@returnAddress(), "{f} is outside the range of representable values of type {s}", .{ from_value, data.to.getName(), }); } diff --git a/src/Air.zig b/src/Air.zig index 5a5070276c..0bd496ca2c 100644 --- a/src/Air.zig +++ b/src/Air.zig @@ -746,7 +746,9 @@ pub const Inst = struct { /// Dest slice may have any alignment; source pointer may have any alignment. /// The two memory regions must not overlap. /// Result type is always void. + /// /// Uses the `bin_op` field. LHS is the dest slice. RHS is the source pointer. + /// /// If the length is compile-time known (due to the destination or /// source being a pointer-to-array), then it is guaranteed to be /// greater than zero. @@ -758,7 +760,9 @@ pub const Inst = struct { /// Dest slice may have any alignment; source pointer may have any alignment. /// The two memory regions may overlap. /// Result type is always void. + /// /// Uses the `bin_op` field. LHS is the dest slice. RHS is the source pointer. + /// /// If the length is compile-time known (due to the destination or /// source being a pointer-to-array), then it is guaranteed to be /// greater than zero. @@ -957,18 +961,13 @@ pub const Inst = struct { return index.unwrap().target; } - pub fn format( - index: Index, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - try writer.writeByte('%'); + pub fn format(index: Index, w: *std.io.Writer) std.io.Writer.Error!void { + try w.writeByte('%'); switch (index.unwrap()) { .ref => {}, - .target => try writer.writeByte('t'), + .target => try w.writeByte('t'), } - try writer.print("{d}", .{@as(u31, @truncate(@intFromEnum(index)))}); + try w.print("{d}", .{@as(u31, @truncate(@intFromEnum(index)))}); } }; diff --git a/src/Air/Liveness.zig b/src/Air/Liveness.zig index 94ed60fbf2..e369428636 100644 --- a/src/Air/Liveness.zig +++ b/src/Air/Liveness.zig @@ -1299,10 +1299,10 @@ fn analyzeOperands( // This logic must synchronize with `will_die_immediately` in `AnalyzeBigOperands.init`. const immediate_death = if (data.live_set.remove(inst)) blk: { - log.debug("[{}] %{}: removed from live set", .{ pass, @intFromEnum(inst) }); + log.debug("[{}] %{d}: removed from live set", .{ pass, @intFromEnum(inst) }); break :blk false; } else blk: { - log.debug("[{}] %{}: immediate death", .{ pass, @intFromEnum(inst) }); + log.debug("[{}] %{d}: immediate death", .{ pass, @intFromEnum(inst) }); break :blk true; }; @@ -1323,7 +1323,7 @@ fn analyzeOperands( const mask = @as(Bpi, 1) << @as(OperandInt, @intCast(i)); if ((try data.live_set.fetchPut(gpa, operand, {})) == null) { - log.debug("[{}] %{}: added %{} to live set (operand dies here)", .{ pass, @intFromEnum(inst), operand }); + log.debug("[{}] %{d}: added %{d} to live set (operand dies here)", .{ pass, @intFromEnum(inst), operand }); tomb_bits |= mask; } } @@ -1462,19 +1462,19 @@ fn analyzeInstBlock( }, .main_analysis => { - log.debug("[{}] %{}: block live set is {}", .{ pass, inst, fmtInstSet(&data.live_set) }); + log.debug("[{}] %{f}: block live set is {f}", .{ pass, inst, fmtInstSet(&data.live_set) }); // We can move the live set because the body should have a noreturn // instruction which overrides the set. try data.block_scopes.put(gpa, inst, .{ .live_set = data.live_set.move(), }); defer { - log.debug("[{}] %{}: popped block scope", .{ pass, inst }); + log.debug("[{}] %{f}: popped block scope", .{ pass, inst }); var scope = data.block_scopes.fetchRemove(inst).?.value; scope.live_set.deinit(gpa); } - log.debug("[{}] %{}: pushed new block scope", .{ pass, inst }); + log.debug("[{}] %{f}: pushed new block scope", .{ pass, inst }); try analyzeBody(a, pass, data, body); // If the block is noreturn, block deaths not only aren't useful, they're impossible to @@ -1501,7 +1501,7 @@ fn analyzeInstBlock( } assert(measured_num == num_deaths); // post-live-set should be a subset of pre-live-set try a.special.put(gpa, inst, extra_index); - log.debug("[{}] %{}: block deaths are {}", .{ + log.debug("[{}] %{f}: block deaths are {f}", .{ pass, inst, fmtInstList(@ptrCast(a.extra.items[extra_index + 1 ..][0..num_deaths])), @@ -1538,7 +1538,7 @@ fn writeLoopInfo( const block_inst = key.*; a.extra.appendAssumeCapacity(@intFromEnum(block_inst)); } - log.debug("[{}] %{}: includes breaks to {}", .{ LivenessPass.loop_analysis, inst, fmtInstSet(&data.breaks) }); + log.debug("[{}] %{f}: includes breaks to {f}", .{ LivenessPass.loop_analysis, inst, fmtInstSet(&data.breaks) }); // Now we put the live operands from the loop body in too const num_live = data.live_set.count(); @@ -1550,7 +1550,7 @@ fn writeLoopInfo( const alive = key.*; a.extra.appendAssumeCapacity(@intFromEnum(alive)); } - log.debug("[{}] %{}: maintain liveness of {}", .{ LivenessPass.loop_analysis, inst, fmtInstSet(&data.live_set) }); + log.debug("[{}] %{f}: maintain liveness of {f}", .{ LivenessPass.loop_analysis, inst, fmtInstSet(&data.live_set) }); try a.special.put(gpa, inst, extra_index); @@ -1591,7 +1591,7 @@ fn resolveLoopLiveSet( try data.live_set.ensureUnusedCapacity(gpa, @intCast(loop_live.len)); for (loop_live) |alive| data.live_set.putAssumeCapacity(alive, {}); - log.debug("[{}] %{}: block live set is {}", .{ LivenessPass.main_analysis, inst, fmtInstSet(&data.live_set) }); + log.debug("[{}] %{f}: block live set is {f}", .{ LivenessPass.main_analysis, inst, fmtInstSet(&data.live_set) }); for (breaks) |block_inst| { // We might break to this block, so include every operand that the block needs alive @@ -1604,7 +1604,7 @@ fn resolveLoopLiveSet( } } - log.debug("[{}] %{}: loop live set is {}", .{ LivenessPass.main_analysis, inst, fmtInstSet(&data.live_set) }); + log.debug("[{}] %{f}: loop live set is {f}", .{ LivenessPass.main_analysis, inst, fmtInstSet(&data.live_set) }); } fn analyzeInstLoop( @@ -1642,7 +1642,7 @@ fn analyzeInstLoop( .live_set = data.live_set.move(), }); defer { - log.debug("[{}] %{}: popped loop block scop", .{ pass, inst }); + log.debug("[{}] %{f}: popped loop block scop", .{ pass, inst }); var scope = data.block_scopes.fetchRemove(inst).?.value; scope.live_set.deinit(gpa); } @@ -1743,13 +1743,13 @@ fn analyzeInstCondBr( } } - log.debug("[{}] %{}: 'then' branch mirrored deaths are {}", .{ pass, inst, fmtInstList(then_mirrored_deaths.items) }); - log.debug("[{}] %{}: 'else' branch mirrored deaths are {}", .{ pass, inst, fmtInstList(else_mirrored_deaths.items) }); + log.debug("[{}] %{f}: 'then' branch mirrored deaths are {f}", .{ pass, inst, fmtInstList(then_mirrored_deaths.items) }); + log.debug("[{}] %{f}: 'else' branch mirrored deaths are {f}", .{ pass, inst, fmtInstList(else_mirrored_deaths.items) }); data.live_set.deinit(gpa); data.live_set = then_live.move(); // Really the union of both live sets - log.debug("[{}] %{}: new live set is {}", .{ pass, inst, fmtInstSet(&data.live_set) }); + log.debug("[{}] %{f}: new live set is {f}", .{ pass, inst, fmtInstSet(&data.live_set) }); // Write the mirrored deaths to `extra` const then_death_count = @as(u32, @intCast(then_mirrored_deaths.items.len)); @@ -1817,7 +1817,7 @@ fn analyzeInstSwitchBr( }); } defer if (is_dispatch_loop) { - log.debug("[{}] %{}: popped loop block scop", .{ pass, inst }); + log.debug("[{}] %{f}: popped loop block scop", .{ pass, inst }); var scope = data.block_scopes.fetchRemove(inst).?.value; scope.live_set.deinit(gpa); }; @@ -1875,13 +1875,13 @@ fn analyzeInstSwitchBr( } for (mirrored_deaths, 0..) |mirrored, i| { - log.debug("[{}] %{}: case {} mirrored deaths are {}", .{ pass, inst, i, fmtInstList(mirrored.items) }); + log.debug("[{}] %{f}: case {} mirrored deaths are {f}", .{ pass, inst, i, fmtInstList(mirrored.items) }); } data.live_set.deinit(gpa); data.live_set = all_alive.move(); - log.debug("[{}] %{}: new live set is {}", .{ pass, inst, fmtInstSet(&data.live_set) }); + log.debug("[{}] %{f}: new live set is {f}", .{ pass, inst, fmtInstSet(&data.live_set) }); } const else_death_count = @as(u32, @intCast(mirrored_deaths[ncases].items.len)); @@ -1980,7 +1980,7 @@ fn AnalyzeBigOperands(comptime pass: LivenessPass) type { .main_analysis => { if ((try big.data.live_set.fetchPut(gpa, operand, {})) == null) { - log.debug("[{}] %{}: added %{} to live set (operand dies here)", .{ pass, big.inst, operand }); + log.debug("[{}] %{f}: added %{f} to live set (operand dies here)", .{ pass, big.inst, operand }); big.extra_tombs[extra_byte] |= @as(u32, 1) << extra_bit; } }, @@ -2036,15 +2036,15 @@ fn fmtInstSet(set: *const std.AutoHashMapUnmanaged(Air.Inst.Index, void)) FmtIns const FmtInstSet = struct { set: *const std.AutoHashMapUnmanaged(Air.Inst.Index, void), - pub fn format(val: FmtInstSet, comptime _: []const u8, _: std.fmt.FormatOptions, w: anytype) !void { + pub fn format(val: FmtInstSet, w: *std.io.Writer) std.io.Writer.Error!void { if (val.set.count() == 0) { try w.writeAll("[no instructions]"); return; } var it = val.set.keyIterator(); - try w.print("%{}", .{it.next().?.*}); + try w.print("%{f}", .{it.next().?.*}); while (it.next()) |key| { - try w.print(" %{}", .{key.*}); + try w.print(" %{f}", .{key.*}); } } }; @@ -2056,14 +2056,14 @@ fn fmtInstList(list: []const Air.Inst.Index) FmtInstList { const FmtInstList = struct { list: []const Air.Inst.Index, - pub fn format(val: FmtInstList, comptime _: []const u8, _: std.fmt.FormatOptions, w: anytype) !void { + pub fn format(val: FmtInstList, w: *std.io.Writer) std.io.Writer.Error!void { if (val.list.len == 0) { try w.writeAll("[no instructions]"); return; } - try w.print("%{}", .{val.list[0]}); + try w.print("%{f}", .{val.list[0]}); for (val.list[1..]) |inst| { - try w.print(" %{}", .{inst}); + try w.print(" %{f}", .{inst}); } } }; diff --git a/src/Air/Liveness/Verify.zig b/src/Air/Liveness/Verify.zig index 85345ceb66..b1e13dbf40 100644 --- a/src/Air/Liveness/Verify.zig +++ b/src/Air/Liveness/Verify.zig @@ -73,7 +73,7 @@ fn verifyBody(self: *Verify, body: []const Air.Inst.Index) Error!void { .trap, .unreach => { try self.verifyInstOperands(inst, .{ .none, .none, .none }); // This instruction terminates the function, so everything should be dead - if (self.live.count() > 0) return invalid("%{}: instructions still alive", .{inst}); + if (self.live.count() > 0) return invalid("%{f}: instructions still alive", .{inst}); }, // unary @@ -166,7 +166,7 @@ fn verifyBody(self: *Verify, body: []const Air.Inst.Index) Error!void { const un_op = data[@intFromEnum(inst)].un_op; try self.verifyInstOperands(inst, .{ un_op, .none, .none }); // This instruction terminates the function, so everything should be dead - if (self.live.count() > 0) return invalid("%{}: instructions still alive", .{inst}); + if (self.live.count() > 0) return invalid("%{f}: instructions still alive", .{inst}); }, .dbg_var_ptr, .dbg_var_val, @@ -450,7 +450,7 @@ fn verifyBody(self: *Verify, body: []const Air.Inst.Index) Error!void { .repeat => { const repeat = data[@intFromEnum(inst)].repeat; const expected_live = self.loops.get(repeat.loop_inst) orelse - return invalid("%{}: loop %{} not in scope", .{ @intFromEnum(inst), @intFromEnum(repeat.loop_inst) }); + return invalid("%{d}: loop %{d} not in scope", .{ @intFromEnum(inst), @intFromEnum(repeat.loop_inst) }); try self.verifyMatchingLiveness(repeat.loop_inst, expected_live); }, @@ -460,7 +460,7 @@ fn verifyBody(self: *Verify, body: []const Air.Inst.Index) Error!void { try self.verifyOperand(inst, br.operand, self.liveness.operandDies(inst, 0)); const expected_live = self.loops.get(br.block_inst) orelse - return invalid("%{}: loop %{} not in scope", .{ @intFromEnum(inst), @intFromEnum(br.block_inst) }); + return invalid("%{d}: loop %{d} not in scope", .{ @intFromEnum(inst), @intFromEnum(br.block_inst) }); try self.verifyMatchingLiveness(br.block_inst, expected_live); }, @@ -511,7 +511,7 @@ fn verifyBody(self: *Verify, body: []const Air.Inst.Index) Error!void { // The same stuff should be alive after the loop as before it. const gop = try self.loops.getOrPut(self.gpa, inst); - if (gop.found_existing) return invalid("%{}: loop already exists", .{@intFromEnum(inst)}); + if (gop.found_existing) return invalid("%{d}: loop already exists", .{@intFromEnum(inst)}); defer { var live = self.loops.fetchRemove(inst).?; live.value.deinit(self.gpa); @@ -560,7 +560,7 @@ fn verifyBody(self: *Verify, body: []const Air.Inst.Index) Error!void { // after the loop as before it. { const gop = try self.loops.getOrPut(self.gpa, inst); - if (gop.found_existing) return invalid("%{}: loop already exists", .{@intFromEnum(inst)}); + if (gop.found_existing) return invalid("%{d}: loop already exists", .{@intFromEnum(inst)}); gop.value_ptr.* = self.live.move(); } defer { @@ -601,9 +601,11 @@ fn verifyOperand(self: *Verify, inst: Air.Inst.Index, op_ref: Air.Inst.Ref, dies return; }; if (dies) { - if (!self.live.remove(operand)) return invalid("%{}: dead operand %{} reused and killed again", .{ inst, operand }); + if (!self.live.remove(operand)) return invalid("%{f}: dead operand %{f} reused and killed again", .{ + inst, operand, + }); } else { - if (!self.live.contains(operand)) return invalid("%{}: dead operand %{} reused", .{ inst, operand }); + if (!self.live.contains(operand)) return invalid("%{f}: dead operand %{f} reused", .{ inst, operand }); } } @@ -628,9 +630,9 @@ fn verifyInst(self: *Verify, inst: Air.Inst.Index) Error!void { } fn verifyMatchingLiveness(self: *Verify, block: Air.Inst.Index, live: LiveMap) Error!void { - if (self.live.count() != live.count()) return invalid("%{}: different deaths across branches", .{block}); + if (self.live.count() != live.count()) return invalid("%{f}: different deaths across branches", .{block}); var live_it = self.live.keyIterator(); - while (live_it.next()) |live_inst| if (!live.contains(live_inst.*)) return invalid("%{}: different deaths across branches", .{block}); + while (live_it.next()) |live_inst| if (!live.contains(live_inst.*)) return invalid("%{f}: different deaths across branches", .{block}); } fn invalid(comptime fmt: []const u8, args: anytype) error{LivenessInvalid} { diff --git a/src/Air/print.zig b/src/Air/print.zig index 53efa72356..8ff199c02f 100644 --- a/src/Air/print.zig +++ b/src/Air/print.zig @@ -1,6 +1,5 @@ const std = @import("std"); const Allocator = std.mem.Allocator; -const fmtIntSizeBin = std.fmt.fmtIntSizeBin; const build_options = @import("build_options"); const Zcu = @import("../Zcu.zig"); @@ -9,7 +8,7 @@ const Type = @import("../Type.zig"); const Air = @import("../Air.zig"); const InternPool = @import("../InternPool.zig"); -pub fn write(air: Air, stream: anytype, pt: Zcu.PerThread, liveness: ?Air.Liveness) void { +pub fn write(air: Air, stream: *std.io.Writer, pt: Zcu.PerThread, liveness: ?Air.Liveness) void { comptime std.debug.assert(build_options.enable_debug_extensions); const instruction_bytes = air.instructions.len * // Here we don't use @sizeOf(Air.Inst.Data) because it would include @@ -25,20 +24,20 @@ pub fn write(air: Air, stream: anytype, pt: Zcu.PerThread, liveness: ?Air.Livene // zig fmt: off stream.print( - \\# Total AIR+Liveness bytes: {} - \\# AIR Instructions: {d} ({}) - \\# AIR Extra Data: {d} ({}) - \\# Liveness tomb_bits: {} - \\# Liveness Extra Data: {d} ({}) - \\# Liveness special table: {d} ({}) + \\# Total AIR+Liveness bytes: {Bi} + \\# AIR Instructions: {d} ({Bi}) + \\# AIR Extra Data: {d} ({Bi}) + \\# Liveness tomb_bits: {Bi} + \\# Liveness Extra Data: {d} ({Bi}) + \\# Liveness special table: {d} ({Bi}) \\ , .{ - fmtIntSizeBin(total_bytes), - air.instructions.len, fmtIntSizeBin(instruction_bytes), - air.extra.items.len, fmtIntSizeBin(extra_bytes), - fmtIntSizeBin(tomb_bytes), - if (liveness) |l| l.extra.len else 0, fmtIntSizeBin(liveness_extra_bytes), - if (liveness) |l| l.special.count() else 0, fmtIntSizeBin(liveness_special_bytes), + total_bytes, + air.instructions.len, instruction_bytes, + air.extra.items.len, extra_bytes, + tomb_bytes, + if (liveness) |l| l.extra.len else 0, liveness_extra_bytes, + if (liveness) |l| l.special.count() else 0, liveness_special_bytes, }) catch return; // zig fmt: on @@ -55,7 +54,7 @@ pub fn write(air: Air, stream: anytype, pt: Zcu.PerThread, liveness: ?Air.Livene pub fn writeInst( air: Air, - stream: anytype, + stream: *std.io.Writer, inst: Air.Inst.Index, pt: Zcu.PerThread, liveness: ?Air.Liveness, @@ -73,11 +72,15 @@ pub fn writeInst( } pub fn dump(air: Air, pt: Zcu.PerThread, liveness: ?Air.Liveness) void { - air.write(std.io.getStdErr().writer(), pt, liveness); + const stderr_bw = std.debug.lockStderrWriter(&.{}); + defer std.debug.unlockStderrWriter(); + air.write(stderr_bw, pt, liveness); } pub fn dumpInst(air: Air, inst: Air.Inst.Index, pt: Zcu.PerThread, liveness: ?Air.Liveness) void { - air.writeInst(std.io.getStdErr().writer(), inst, pt, liveness); + const stderr_bw = std.debug.lockStderrWriter(&.{}); + defer std.debug.unlockStderrWriter(); + air.writeInst(stderr_bw, inst, pt, liveness); } const Writer = struct { @@ -88,17 +91,19 @@ const Writer = struct { indent: usize, skip_body: bool, - fn writeBody(w: *Writer, s: anytype, body: []const Air.Inst.Index) @TypeOf(s).Error!void { + const Error = std.io.Writer.Error; + + fn writeBody(w: *Writer, s: *std.io.Writer, body: []const Air.Inst.Index) Error!void { for (body) |inst| { try w.writeInst(s, inst); try s.writeByte('\n'); } } - fn writeInst(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeInst(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const tag = w.air.instructions.items(.tag)[@intFromEnum(inst)]; - try s.writeByteNTimes(' ', w.indent); - try s.print("{}{c}= {s}(", .{ + try s.splatByteAll(' ', w.indent); + try s.print("{f}{c}= {s}(", .{ inst, @as(u8, if (if (w.liveness) |liveness| liveness.isUnused(inst) else false) '!' else ' '), @tagName(tag), @@ -335,47 +340,48 @@ const Writer = struct { try s.writeByte(')'); } - fn writeBinOp(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeBinOp(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const bin_op = w.air.instructions.items(.data)[@intFromEnum(inst)].bin_op; try w.writeOperand(s, inst, 0, bin_op.lhs); try s.writeAll(", "); try w.writeOperand(s, inst, 1, bin_op.rhs); } - fn writeUnOp(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeUnOp(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const un_op = w.air.instructions.items(.data)[@intFromEnum(inst)].un_op; try w.writeOperand(s, inst, 0, un_op); } - fn writeNoOp(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeNoOp(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { _ = w; + _ = s; _ = inst; // no-op, no argument to write } - fn writeType(w: *Writer, s: anytype, ty: Type) !void { + fn writeType(w: *Writer, s: *std.io.Writer, ty: Type) !void { return ty.print(s, w.pt); } - fn writeTy(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeTy(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const ty = w.air.instructions.items(.data)[@intFromEnum(inst)].ty; try w.writeType(s, ty); } - fn writeArg(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeArg(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const arg = w.air.instructions.items(.data)[@intFromEnum(inst)].arg; try w.writeType(s, arg.ty.toType()); try s.print(", {d}", .{arg.zir_param_index}); } - fn writeTyOp(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeTyOp(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const ty_op = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_op; try w.writeType(s, ty_op.ty.toType()); try s.writeAll(", "); try w.writeOperand(s, inst, 0, ty_op.operand); } - fn writeBlock(w: *Writer, s: anytype, tag: Air.Inst.Tag, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeBlock(w: *Writer, s: *std.io.Writer, tag: Air.Inst.Tag, inst: Air.Inst.Index) Error!void { const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; try w.writeType(s, ty_pl.ty.toType()); const body: []const Air.Inst.Index = @ptrCast(switch (tag) { @@ -408,15 +414,15 @@ const Writer = struct { w.indent += 2; try w.writeBody(s, body); w.indent = old_indent; - try s.writeByteNTimes(' ', w.indent); + try s.splatByteAll(' ', w.indent); try s.writeAll("}"); for (liveness_block.deaths) |operand| { - try s.print(" {}!", .{operand}); + try s.print(" {f}!", .{operand}); } } - fn writeLoop(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeLoop(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const extra = w.air.extraData(Air.Block, ty_pl.payload); const body: []const Air.Inst.Index = @ptrCast(w.air.extra.items[extra.end..][0..extra.data.body_len]); @@ -428,11 +434,11 @@ const Writer = struct { w.indent += 2; try w.writeBody(s, body); w.indent = old_indent; - try s.writeByteNTimes(' ', w.indent); + try s.splatByteAll(' ', w.indent); try s.writeAll("}"); } - fn writeAggregateInit(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeAggregateInit(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const zcu = w.pt.zcu; const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const vector_ty = ty_pl.ty.toType(); @@ -448,7 +454,7 @@ const Writer = struct { try s.writeAll("]"); } - fn writeUnionInit(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeUnionInit(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const extra = w.air.extraData(Air.UnionInit, ty_pl.payload).data; @@ -456,7 +462,7 @@ const Writer = struct { try w.writeOperand(s, inst, 0, extra.init); } - fn writeStructField(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeStructField(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const extra = w.air.extraData(Air.StructField, ty_pl.payload).data; @@ -464,7 +470,7 @@ const Writer = struct { try s.print(", {d}", .{extra.field_index}); } - fn writeTyPlBin(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeTyPlBin(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const data = w.air.instructions.items(.data); const ty_pl = data[@intFromEnum(inst)].ty_pl; const extra = w.air.extraData(Air.Bin, ty_pl.payload).data; @@ -477,7 +483,7 @@ const Writer = struct { try w.writeOperand(s, inst, 1, extra.rhs); } - fn writeCmpxchg(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeCmpxchg(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const extra = w.air.extraData(Air.Cmpxchg, ty_pl.payload).data; @@ -491,7 +497,7 @@ const Writer = struct { }); } - fn writeMulAdd(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeMulAdd(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; const extra = w.air.extraData(Air.Bin, pl_op.payload).data; @@ -502,7 +508,7 @@ const Writer = struct { try w.writeOperand(s, inst, 2, pl_op.operand); } - fn writeShuffleOne(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeShuffleOne(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const unwrapped = w.air.unwrapShuffleOne(w.pt.zcu, inst); try w.writeType(s, unwrapped.result_ty); try s.writeAll(", "); @@ -512,13 +518,13 @@ const Writer = struct { if (mask_idx > 0) try s.writeAll(", "); switch (mask_elem.unwrap()) { .elem => |idx| try s.print("elem {d}", .{idx}), - .value => |val| try s.print("val {}", .{Value.fromInterned(val).fmtValue(w.pt)}), + .value => |val| try s.print("val {f}", .{Value.fromInterned(val).fmtValue(w.pt)}), } } try s.writeByte(']'); } - fn writeShuffleTwo(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeShuffleTwo(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const unwrapped = w.air.unwrapShuffleTwo(w.pt.zcu, inst); try w.writeType(s, unwrapped.result_ty); try s.writeAll(", "); @@ -537,7 +543,7 @@ const Writer = struct { try s.writeByte(']'); } - fn writeSelect(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeSelect(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const zcu = w.pt.zcu; const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; const extra = w.air.extraData(Air.Bin, pl_op.payload).data; @@ -552,14 +558,14 @@ const Writer = struct { try w.writeOperand(s, inst, 2, extra.rhs); } - fn writeReduce(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeReduce(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const reduce = w.air.instructions.items(.data)[@intFromEnum(inst)].reduce; try w.writeOperand(s, inst, 0, reduce.operand); try s.print(", {s}", .{@tagName(reduce.operation)}); } - fn writeCmpVector(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeCmpVector(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const extra = w.air.extraData(Air.VectorCmp, ty_pl.payload).data; @@ -569,7 +575,7 @@ const Writer = struct { try w.writeOperand(s, inst, 1, extra.rhs); } - fn writeVectorStoreElem(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeVectorStoreElem(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const data = w.air.instructions.items(.data)[@intFromEnum(inst)].vector_store_elem; const extra = w.air.extraData(Air.VectorCmp, data.payload).data; @@ -580,21 +586,21 @@ const Writer = struct { try w.writeOperand(s, inst, 2, extra.rhs); } - fn writeRuntimeNavPtr(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeRuntimeNavPtr(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const ip = &w.pt.zcu.intern_pool; const ty_nav = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_nav; try w.writeType(s, .fromInterned(ty_nav.ty)); - try s.print(", '{}'", .{ip.getNav(ty_nav.nav).fqn.fmt(ip)}); + try s.print(", '{f}'", .{ip.getNav(ty_nav.nav).fqn.fmt(ip)}); } - fn writeAtomicLoad(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeAtomicLoad(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const atomic_load = w.air.instructions.items(.data)[@intFromEnum(inst)].atomic_load; try w.writeOperand(s, inst, 0, atomic_load.ptr); try s.print(", {s}", .{@tagName(atomic_load.order)}); } - fn writePrefetch(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writePrefetch(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const prefetch = w.air.instructions.items(.data)[@intFromEnum(inst)].prefetch; try w.writeOperand(s, inst, 0, prefetch.ptr); @@ -605,10 +611,10 @@ const Writer = struct { fn writeAtomicStore( w: *Writer, - s: anytype, + s: *std.io.Writer, inst: Air.Inst.Index, order: std.builtin.AtomicOrder, - ) @TypeOf(s).Error!void { + ) Error!void { const bin_op = w.air.instructions.items(.data)[@intFromEnum(inst)].bin_op; try w.writeOperand(s, inst, 0, bin_op.lhs); try s.writeAll(", "); @@ -616,7 +622,7 @@ const Writer = struct { try s.print(", {s}", .{@tagName(order)}); } - fn writeAtomicRmw(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeAtomicRmw(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; const extra = w.air.extraData(Air.AtomicRmw, pl_op.payload).data; @@ -626,7 +632,7 @@ const Writer = struct { try s.print(", {s}, {s}", .{ @tagName(extra.op()), @tagName(extra.ordering()) }); } - fn writeFieldParentPtr(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeFieldParentPtr(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const extra = w.air.extraData(Air.FieldParentPtr, ty_pl.payload).data; @@ -634,7 +640,7 @@ const Writer = struct { try s.print(", {d}", .{extra.field_index}); } - fn writeAssembly(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeAssembly(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const extra = w.air.extraData(Air.Asm, ty_pl.payload); const is_volatile = @as(u1, @truncate(extra.data.flags >> 31)) != 0; @@ -704,22 +710,22 @@ const Writer = struct { } } const asm_source = std.mem.sliceAsBytes(w.air.extra.items[extra_i..])[0..extra.data.source_len]; - try s.print(", \"{}\"", .{std.zig.fmtEscapes(asm_source)}); + try s.print(", \"{f}\"", .{std.zig.fmtString(asm_source)}); } - fn writeDbgStmt(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeDbgStmt(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const dbg_stmt = w.air.instructions.items(.data)[@intFromEnum(inst)].dbg_stmt; try s.print("{d}:{d}", .{ dbg_stmt.line + 1, dbg_stmt.column + 1 }); } - fn writeDbgVar(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeDbgVar(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; try w.writeOperand(s, inst, 0, pl_op.operand); const name: Air.NullTerminatedString = @enumFromInt(pl_op.payload); - try s.print(", \"{}\"", .{std.zig.fmtEscapes(name.toSlice(w.air))}); + try s.print(", \"{f}\"", .{std.zig.fmtString(name.toSlice(w.air))}); } - fn writeCall(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeCall(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; const extra = w.air.extraData(Air.Call, pl_op.payload); const args = @as([]const Air.Inst.Ref, @ptrCast(w.air.extra.items[extra.end..][0..extra.data.args_len])); @@ -732,19 +738,19 @@ const Writer = struct { try s.writeAll("]"); } - fn writeBr(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeBr(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const br = w.air.instructions.items(.data)[@intFromEnum(inst)].br; try w.writeInstIndex(s, br.block_inst, false); try s.writeAll(", "); try w.writeOperand(s, inst, 0, br.operand); } - fn writeRepeat(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeRepeat(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const repeat = w.air.instructions.items(.data)[@intFromEnum(inst)].repeat; try w.writeInstIndex(s, repeat.loop_inst, false); } - fn writeTry(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeTry(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; const extra = w.air.extraData(Air.Try, pl_op.payload); const body: []const Air.Inst.Index = @ptrCast(w.air.extra.items[extra.end..][0..extra.data.body_len]); @@ -760,25 +766,25 @@ const Writer = struct { w.indent += 2; if (liveness_condbr.else_deaths.len != 0) { - try s.writeByteNTimes(' ', w.indent); + try s.splatByteAll(' ', w.indent); for (liveness_condbr.else_deaths, 0..) |operand, i| { if (i != 0) try s.writeAll(" "); - try s.print("{}!", .{operand}); + try s.print("{f}!", .{operand}); } try s.writeAll("\n"); } try w.writeBody(s, body); w.indent = old_indent; - try s.writeByteNTimes(' ', w.indent); + try s.splatByteAll(' ', w.indent); try s.writeAll("}"); for (liveness_condbr.then_deaths) |operand| { - try s.print(" {}!", .{operand}); + try s.print(" {f}!", .{operand}); } } - fn writeTryPtr(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeTryPtr(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const extra = w.air.extraData(Air.TryPtr, ty_pl.payload); const body: []const Air.Inst.Index = @ptrCast(w.air.extra.items[extra.end..][0..extra.data.body_len]); @@ -797,25 +803,25 @@ const Writer = struct { w.indent += 2; if (liveness_condbr.else_deaths.len != 0) { - try s.writeByteNTimes(' ', w.indent); + try s.splatByteAll(' ', w.indent); for (liveness_condbr.else_deaths, 0..) |operand, i| { if (i != 0) try s.writeAll(" "); - try s.print("{}!", .{operand}); + try s.print("{f}!", .{operand}); } try s.writeAll("\n"); } try w.writeBody(s, body); w.indent = old_indent; - try s.writeByteNTimes(' ', w.indent); + try s.splatByteAll(' ', w.indent); try s.writeAll("}"); for (liveness_condbr.then_deaths) |operand| { - try s.print(" {}!", .{operand}); + try s.print(" {f}!", .{operand}); } } - fn writeCondBr(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeCondBr(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; const extra = w.air.extraData(Air.CondBr, pl_op.payload); const then_body: []const Air.Inst.Index = @ptrCast(w.air.extra.items[extra.end..][0..extra.data.then_body_len]); @@ -839,16 +845,16 @@ const Writer = struct { w.indent += 2; if (liveness_condbr.then_deaths.len != 0) { - try s.writeByteNTimes(' ', w.indent); + try s.splatByteAll(' ', w.indent); for (liveness_condbr.then_deaths, 0..) |operand, i| { if (i != 0) try s.writeAll(" "); - try s.print("{}!", .{operand}); + try s.print("{f}!", .{operand}); } try s.writeAll("\n"); } try w.writeBody(s, then_body); - try s.writeByteNTimes(' ', old_indent); + try s.splatByteAll(' ', old_indent); try s.writeAll("},"); if (extra.data.branch_hints.false != .none) { try s.print(" {s}", .{@tagName(extra.data.branch_hints.false)}); @@ -859,10 +865,10 @@ const Writer = struct { try s.writeAll(" {\n"); if (liveness_condbr.else_deaths.len != 0) { - try s.writeByteNTimes(' ', w.indent); + try s.splatByteAll(' ', w.indent); for (liveness_condbr.else_deaths, 0..) |operand, i| { if (i != 0) try s.writeAll(" "); - try s.print("{}!", .{operand}); + try s.print("{f}!", .{operand}); } try s.writeAll("\n"); } @@ -870,11 +876,11 @@ const Writer = struct { try w.writeBody(s, else_body); w.indent = old_indent; - try s.writeByteNTimes(' ', old_indent); + try s.splatByteAll(' ', old_indent); try s.writeAll("}"); } - fn writeSwitchBr(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeSwitchBr(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const switch_br = w.air.unwrapSwitch(inst); const liveness: Air.Liveness.SwitchBrTable = if (w.liveness) |liveness| @@ -916,17 +922,17 @@ const Writer = struct { const deaths = liveness.deaths[case.idx]; if (deaths.len != 0) { - try s.writeByteNTimes(' ', w.indent); + try s.splatByteAll(' ', w.indent); for (deaths, 0..) |operand, i| { if (i != 0) try s.writeAll(" "); - try s.print("{}!", .{operand}); + try s.print("{f}!", .{operand}); } try s.writeAll("\n"); } try w.writeBody(s, case.body); w.indent -= 2; - try s.writeByteNTimes(' ', w.indent); + try s.splatByteAll(' ', w.indent); try s.writeAll("}"); } @@ -942,47 +948,47 @@ const Writer = struct { const deaths = liveness.deaths[liveness.deaths.len - 1]; if (deaths.len != 0) { - try s.writeByteNTimes(' ', w.indent); + try s.splatByteAll(' ', w.indent); for (deaths, 0..) |operand, i| { if (i != 0) try s.writeAll(" "); - try s.print("{}!", .{operand}); + try s.print("{f}!", .{operand}); } try s.writeAll("\n"); } try w.writeBody(s, else_body); w.indent -= 2; - try s.writeByteNTimes(' ', w.indent); + try s.splatByteAll(' ', w.indent); try s.writeAll("}"); } try s.writeAll("\n"); - try s.writeByteNTimes(' ', old_indent); + try s.splatByteAll(' ', old_indent); } - fn writeWasmMemorySize(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeWasmMemorySize(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; try s.print("{d}", .{pl_op.payload}); } - fn writeWasmMemoryGrow(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeWasmMemoryGrow(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; try s.print("{d}, ", .{pl_op.payload}); try w.writeOperand(s, inst, 0, pl_op.operand); } - fn writeWorkDimension(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void { + fn writeWorkDimension(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; try s.print("{d}", .{pl_op.payload}); } fn writeOperand( w: *Writer, - s: anytype, + s: *std.io.Writer, inst: Air.Inst.Index, op_index: usize, operand: Air.Inst.Ref, - ) @TypeOf(s).Error!void { + ) Error!void { const small_tomb_bits = Air.Liveness.bpi - 1; const dies = if (w.liveness) |liveness| blk: { if (op_index < small_tomb_bits) @@ -1004,16 +1010,16 @@ const Writer = struct { fn writeInstRef( w: *Writer, - s: anytype, + s: *std.io.Writer, operand: Air.Inst.Ref, dies: bool, - ) @TypeOf(s).Error!void { + ) Error!void { if (@intFromEnum(operand) < InternPool.static_len) { return s.print("@{}", .{operand}); } else if (operand.toInterned()) |ip_index| { const pt = w.pt; const ty = Type.fromInterned(pt.zcu.intern_pool.indexToKey(ip_index).typeOf()); - try s.print("<{}, {}>", .{ + try s.print("<{f}, {f}>", .{ ty.fmt(pt), Value.fromInterned(ip_index).fmtValue(pt), }); @@ -1024,12 +1030,12 @@ const Writer = struct { fn writeInstIndex( w: *Writer, - s: anytype, + s: *std.io.Writer, inst: Air.Inst.Index, dies: bool, - ) @TypeOf(s).Error!void { + ) Error!void { _ = w; - try s.print("{}", .{inst}); + try s.print("{f}", .{inst}); if (dies) try s.writeByte('!'); } diff --git a/src/Builtin.zig b/src/Builtin.zig index d68d49c253..b2cb603f53 100644 --- a/src/Builtin.zig +++ b/src/Builtin.zig @@ -51,60 +51,60 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void { const zig_backend = opts.zig_backend; @setEvalBranchQuota(4000); - try buffer.writer().print( + try buffer.print( \\const std = @import("std"); \\/// Zig version. When writing code that supports multiple versions of Zig, prefer \\/// feature detection (i.e. with `@hasDecl` or `@hasField`) over version checks. \\pub const zig_version = std.SemanticVersion.parse(zig_version_string) catch unreachable; \\pub const zig_version_string = "{s}"; - \\pub const zig_backend = std.builtin.CompilerBackend.{p_}; + \\pub const zig_backend = std.builtin.CompilerBackend.{f}; \\ - \\pub const output_mode: std.builtin.OutputMode = .{p_}; - \\pub const link_mode: std.builtin.LinkMode = .{p_}; - \\pub const unwind_tables: std.builtin.UnwindTables = .{p_}; + \\pub const output_mode: std.builtin.OutputMode = .{f}; + \\pub const link_mode: std.builtin.LinkMode = .{f}; + \\pub const unwind_tables: std.builtin.UnwindTables = .{f}; \\pub const is_test = {}; \\pub const single_threaded = {}; - \\pub const abi: std.Target.Abi = .{p_}; + \\pub const abi: std.Target.Abi = .{f}; \\pub const cpu: std.Target.Cpu = .{{ - \\ .arch = .{p_}, - \\ .model = &std.Target.{p_}.cpu.{p_}, - \\ .features = std.Target.{p_}.featureSet(&.{{ + \\ .arch = .{f}, + \\ .model = &std.Target.{f}.cpu.{f}, + \\ .features = std.Target.{f}.featureSet(&.{{ \\ , .{ build_options.version, - std.zig.fmtId(@tagName(zig_backend)), - std.zig.fmtId(@tagName(opts.output_mode)), - std.zig.fmtId(@tagName(opts.link_mode)), - std.zig.fmtId(@tagName(opts.unwind_tables)), + std.zig.fmtIdPU(@tagName(zig_backend)), + std.zig.fmtIdPU(@tagName(opts.output_mode)), + std.zig.fmtIdPU(@tagName(opts.link_mode)), + std.zig.fmtIdPU(@tagName(opts.unwind_tables)), opts.is_test, opts.single_threaded, - std.zig.fmtId(@tagName(target.abi)), - std.zig.fmtId(@tagName(target.cpu.arch)), - std.zig.fmtId(arch_family_name), - std.zig.fmtId(target.cpu.model.name), - std.zig.fmtId(arch_family_name), + std.zig.fmtIdPU(@tagName(target.abi)), + std.zig.fmtIdPU(@tagName(target.cpu.arch)), + std.zig.fmtIdPU(arch_family_name), + std.zig.fmtIdPU(target.cpu.model.name), + std.zig.fmtIdPU(arch_family_name), }); for (target.cpu.arch.allFeaturesList(), 0..) |feature, index_usize| { const index = @as(std.Target.Cpu.Feature.Set.Index, @intCast(index_usize)); const is_enabled = target.cpu.features.isEnabled(index); if (is_enabled) { - try buffer.writer().print(" .{p_},\n", .{std.zig.fmtId(feature.name)}); + try buffer.print(" .{f},\n", .{std.zig.fmtIdPU(feature.name)}); } } - try buffer.writer().print( + try buffer.print( \\ }}), \\}}; \\pub const os: std.Target.Os = .{{ - \\ .tag = .{p_}, + \\ .tag = .{f}, \\ .version_range = .{{ , - .{std.zig.fmtId(@tagName(target.os.tag))}, + .{std.zig.fmtIdPU(@tagName(target.os.tag))}, ); switch (target.os.versionRange()) { .none => try buffer.appendSlice(" .none = {} },\n"), - .semver => |semver| try buffer.writer().print( + .semver => |semver| try buffer.print( \\ .semver = .{{ \\ .min = .{{ \\ .major = {}, @@ -127,7 +127,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void { semver.max.minor, semver.max.patch, }), - .linux => |linux| try buffer.writer().print( + .linux => |linux| try buffer.print( \\ .linux = .{{ \\ .range = .{{ \\ .min = .{{ @@ -164,7 +164,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void { linux.android, }), - .hurd => |hurd| try buffer.writer().print( + .hurd => |hurd| try buffer.print( \\ .hurd = .{{ \\ .range = .{{ \\ .min = .{{ @@ -198,10 +198,10 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void { hurd.glibc.minor, hurd.glibc.patch, }), - .windows => |windows| try buffer.writer().print( + .windows => |windows| try buffer.print( \\ .windows = .{{ - \\ .min = {c}, - \\ .max = {c}, + \\ .min = {f}, + \\ .max = {f}, \\ }}}}, \\ , .{ windows.min, windows.max }), @@ -217,7 +217,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void { ); if (target.dynamic_linker.get()) |dl| { - try buffer.writer().print( + try buffer.print( \\ .dynamic_linker = .init("{s}"), \\}}; \\ @@ -237,9 +237,9 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void { // knows libc will provide it, and likewise c.zig will not export memcpy. const link_libc = opts.link_libc; - try buffer.writer().print( - \\pub const object_format: std.Target.ObjectFormat = .{p_}; - \\pub const mode: std.builtin.OptimizeMode = .{p_}; + try buffer.print( + \\pub const object_format: std.Target.ObjectFormat = .{f}; + \\pub const mode: std.builtin.OptimizeMode = .{f}; \\pub const link_libc = {}; \\pub const link_libcpp = {}; \\pub const have_error_return_tracing = {}; @@ -249,12 +249,12 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void { \\pub const position_independent_code = {}; \\pub const position_independent_executable = {}; \\pub const strip_debug_info = {}; - \\pub const code_model: std.builtin.CodeModel = .{p_}; + \\pub const code_model: std.builtin.CodeModel = .{f}; \\pub const omit_frame_pointer = {}; \\ , .{ - std.zig.fmtId(@tagName(target.ofmt)), - std.zig.fmtId(@tagName(opts.optimize_mode)), + std.zig.fmtIdPU(@tagName(target.ofmt)), + std.zig.fmtIdPU(@tagName(opts.optimize_mode)), link_libc, opts.link_libcpp, opts.error_tracing, @@ -264,15 +264,15 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void { opts.pic, opts.pie, opts.strip, - std.zig.fmtId(@tagName(opts.code_model)), + std.zig.fmtIdPU(@tagName(opts.code_model)), opts.omit_frame_pointer, }); if (target.os.tag == .wasi) { - try buffer.writer().print( - \\pub const wasi_exec_model: std.builtin.WasiExecModel = .{p_}; + try buffer.print( + \\pub const wasi_exec_model: std.builtin.WasiExecModel = .{f}; \\ - , .{std.zig.fmtId(@tagName(opts.wasi_exec_model))}); + , .{std.zig.fmtIdPU(@tagName(opts.wasi_exec_model))}); } if (opts.is_test) { @@ -317,7 +317,7 @@ pub fn updateFileOnDisk(file: *File, comp: *Compilation) !void { if (root_dir.statFile(sub_path)) |stat| { if (stat.size != file.source.?.len) { std.log.warn( - "the cached file '{}' had the wrong size. Expected {d}, found {d}. " ++ + "the cached file '{f}' had the wrong size. Expected {d}, found {d}. " ++ "Overwriting with correct file contents now", .{ file.path.fmt(comp), file.source.?.len, stat.size }, ); diff --git a/src/Compilation.zig b/src/Compilation.zig index c85fb9608e..85184ab913 100644 --- a/src/Compilation.zig +++ b/src/Compilation.zig @@ -399,9 +399,7 @@ pub const Path = struct { const Formatter = struct { p: Path, comp: *Compilation, - pub fn format(f: Formatter, comptime unused_fmt: []const u8, options: std.fmt.FormatOptions, w: anytype) !void { - comptime assert(unused_fmt.len == 0); - _ = options; + pub fn format(f: Formatter, w: *std.io.Writer) std.io.Writer.Error!void { const root_path: []const u8 = switch (f.p.root) { .zig_lib => f.comp.dirs.zig_lib.path orelse ".", .global_cache => f.comp.dirs.global_cache.path orelse ".", @@ -730,10 +728,10 @@ pub const Directories = struct { }; if (std.mem.eql(u8, zig_lib.path orelse "", global_cache.path orelse "")) { - fatal("zig lib directory '{}' cannot be equal to global cache directory '{}'", .{ zig_lib, global_cache }); + fatal("zig lib directory '{f}' cannot be equal to global cache directory '{f}'", .{ zig_lib, global_cache }); } if (std.mem.eql(u8, zig_lib.path orelse "", local_cache.path orelse "")) { - fatal("zig lib directory '{}' cannot be equal to local cache directory '{}'", .{ zig_lib, local_cache }); + fatal("zig lib directory '{f}' cannot be equal to local cache directory '{f}'", .{ zig_lib, local_cache }); } return .{ @@ -1001,7 +999,7 @@ pub const CObject = struct { var line = std.ArrayList(u8).init(eb.gpa); defer line.deinit(); - file.reader().readUntilDelimiterArrayList(&line, '\n', 1 << 10) catch break :source_line 0; + file.deprecatedReader().readUntilDelimiterArrayList(&line, '\n', 1 << 10) catch break :source_line 0; break :source_line try eb.addString(line.items); }; @@ -1069,7 +1067,7 @@ pub const CObject = struct { const file = try std.fs.cwd().openFile(path, .{}); defer file.close(); - var br = std.io.bufferedReader(file.reader()); + var br = std.io.bufferedReader(file.deprecatedReader()); const reader = br.reader(); var bc = std.zig.llvm.BitcodeReader.init(gpa, .{ .reader = reader.any() }); defer bc.deinit(); @@ -1875,7 +1873,7 @@ pub fn create(gpa: Allocator, arena: Allocator, options: CreateOptions) !*Compil if (options.root_mod.resolved_target.llvm_cpu_features) |cf| print: { std.debug.lockStdErr(); defer std.debug.unlockStdErr(); - const stderr = std.io.getStdErr().writer(); + const stderr = std.fs.File.stderr().deprecatedWriter(); nosuspend { stderr.print("compilation: {s}\n", .{options.root_name}) catch break :print; stderr.print(" target: {s}\n", .{try target.zigTriple(arena)}) catch break :print; @@ -2689,7 +2687,7 @@ pub fn update(comp: *Compilation, main_progress_node: std.Progress.Node) !void { const is_hit = man.hit() catch |err| switch (err) { error.CacheCheckFailed => switch (man.diagnostic) { .none => unreachable, - .manifest_create, .manifest_read, .manifest_lock, .manifest_seek => |e| return comp.setMiscFailure( + .manifest_create, .manifest_read, .manifest_lock => |e| return comp.setMiscFailure( .check_whole_cache, "failed to check cache: {s} {s}", .{ @tagName(man.diagnostic), @errorName(e) }, @@ -2699,7 +2697,7 @@ pub fn update(comp: *Compilation, main_progress_node: std.Progress.Node) !void { const prefix = man.cache.prefixes()[pp.prefix]; return comp.setMiscFailure( .check_whole_cache, - "failed to check cache: '{}{s}' {s} {s}", + "failed to check cache: '{f}{s}' {s} {s}", .{ prefix, pp.sub_path, @tagName(man.diagnostic), @errorName(op.err) }, ); }, @@ -2916,7 +2914,7 @@ pub fn update(comp: *Compilation, main_progress_node: std.Progress.Node) !void { renameTmpIntoCache(comp.dirs.local_cache, tmp_dir_sub_path, o_sub_path) catch |err| { return comp.setMiscFailure( .rename_results, - "failed to rename compilation results ('{}{s}') into local cache ('{}{s}'): {s}", + "failed to rename compilation results ('{f}{s}') into local cache ('{f}{s}'): {s}", .{ comp.dirs.local_cache, tmp_dir_sub_path, comp.dirs.local_cache, o_sub_path, @@ -2983,7 +2981,7 @@ pub fn appendFileSystemInput(comp: *Compilation, path: Compilation.Path) Allocat break @intCast(i); } } else std.debug.panic( - "missing prefix directory '{s}' ('{}') for '{s}'", + "missing prefix directory '{s}' ('{f}') for '{s}'", .{ @tagName(path.root), want_prefix_dir, path.sub_path }, ); @@ -3322,7 +3320,7 @@ fn emitFromCObject( emit_path.root_dir.handle, emit_path.sub_path, .{}, - ) catch |err| log.err("unable to copy '{}' to '{}': {s}", .{ + ) catch |err| log.err("unable to copy '{f}' to '{f}': {s}", .{ src_path, emit_path, @errorName(err), @@ -3670,7 +3668,7 @@ pub fn getAllErrorsAlloc(comp: *Compilation) !ErrorBundle { .illegal_zig_import => try bundle.addString("this compiler implementation does not allow importing files from this directory"), }, .src_loc = try bundle.addSourceLocation(.{ - .src_path = try bundle.printString("{}", .{file.path.fmt(comp)}), + .src_path = try bundle.printString("{f}", .{file.path.fmt(comp)}), .span_start = start, .span_main = start, .span_end = @intCast(end), @@ -3717,7 +3715,7 @@ pub fn getAllErrorsAlloc(comp: *Compilation) !ErrorBundle { assert(!is_retryable); // AstGen/ZoirGen succeeded with errors. Note that this may include AST errors. _ = try file.getTree(zcu); // Tree must be loaded. - const path = try std.fmt.allocPrint(gpa, "{}", .{file.path.fmt(comp)}); + const path = try std.fmt.allocPrint(gpa, "{f}", .{file.path.fmt(comp)}); defer gpa.free(path); if (file.zir != null) { try bundle.addZirErrorMessages(file.zir.?, file.tree.?, file.source.?, path); @@ -3772,9 +3770,8 @@ pub fn getAllErrorsAlloc(comp: *Compilation) !ErrorBundle { if (!refs.contains(anal_unit)) continue; } - std.log.scoped(.zcu).debug("analysis error '{s}' reported from unit '{}'", .{ - error_msg.msg, - zcu.fmtAnalUnit(anal_unit), + std.log.scoped(.zcu).debug("analysis error '{s}' reported from unit '{f}'", .{ + error_msg.msg, zcu.fmtAnalUnit(anal_unit), }); try addModuleErrorMsg(zcu, &bundle, error_msg.*, added_any_analysis_error); @@ -3932,11 +3929,11 @@ pub fn getAllErrorsAlloc(comp: *Compilation) !ErrorBundle { // This AU is referenced and has a transitive compile error, meaning it referenced something with a compile error. // However, we haven't reported any such error. // This is a compiler bug. - const stderr = std.io.getStdErr().writer(); + const stderr = std.fs.File.stderr().deprecatedWriter(); try stderr.writeAll("referenced transitive analysis errors, but none actually emitted\n"); - try stderr.print("{} [transitive failure]\n", .{zcu.fmtAnalUnit(failed_unit)}); + try stderr.print("{f} [transitive failure]\n", .{zcu.fmtAnalUnit(failed_unit)}); while (ref) |r| { - try stderr.print("referenced by: {}{s}\n", .{ + try stderr.print("referenced by: {f}{s}\n", .{ zcu.fmtAnalUnit(r.referencer), if (zcu.transitive_failed_analysis.contains(r.referencer)) " [transitive failure]" else "", }); @@ -4035,7 +4032,7 @@ pub fn addModuleErrorMsg( const err_src_loc = module_err_msg.src_loc.upgrade(zcu); const err_source = err_src_loc.file_scope.getSource(zcu) catch |err| { try eb.addRootErrorMessage(.{ - .msg = try eb.printString("unable to load '{}': {s}", .{ + .msg = try eb.printString("unable to load '{f}': {s}", .{ err_src_loc.file_scope.path.fmt(zcu.comp), @errorName(err), }), }); @@ -4098,7 +4095,7 @@ pub fn addModuleErrorMsg( } const src_loc = try eb.addSourceLocation(.{ - .src_path = try eb.printString("{}", .{err_src_loc.file_scope.path.fmt(zcu.comp)}), + .src_path = try eb.printString("{f}", .{err_src_loc.file_scope.path.fmt(zcu.comp)}), .span_start = err_span.start, .span_main = err_span.main, .span_end = err_span.end, @@ -4130,7 +4127,7 @@ pub fn addModuleErrorMsg( const gop = try notes.getOrPutContext(gpa, .{ .msg = try eb.addString(module_note.msg), .src_loc = try eb.addSourceLocation(.{ - .src_path = try eb.printString("{}", .{note_src_loc.file_scope.path.fmt(zcu.comp)}), + .src_path = try eb.printString("{f}", .{note_src_loc.file_scope.path.fmt(zcu.comp)}), .span_start = span.start, .span_main = span.main, .span_end = span.end, @@ -4175,7 +4172,7 @@ fn addReferenceTraceFrame( try ref_traces.append(gpa, .{ .decl_name = try eb.printString("{s}{s}", .{ name, if (inlined) " [inlined]" else "" }), .src_loc = try eb.addSourceLocation(.{ - .src_path = try eb.printString("{}", .{src.file_scope.path.fmt(zcu.comp)}), + .src_path = try eb.printString("{f}", .{src.file_scope.path.fmt(zcu.comp)}), .span_start = span.start, .span_main = span.main, .span_end = span.end, @@ -4836,7 +4833,7 @@ fn docsCopyFallible(comp: *Compilation) anyerror!void { var out_dir = docs_path.root_dir.handle.makeOpenPath(docs_path.sub_path, .{}) catch |err| { return comp.lockAndSetMiscFailure( .docs_copy, - "unable to create output directory '{}': {s}", + "unable to create output directory '{f}': {s}", .{ docs_path, @errorName(err) }, ); }; @@ -4856,7 +4853,7 @@ fn docsCopyFallible(comp: *Compilation) anyerror!void { var tar_file = out_dir.createFile("sources.tar", .{}) catch |err| { return comp.lockAndSetMiscFailure( .docs_copy, - "unable to create '{}/sources.tar': {s}", + "unable to create '{f}/sources.tar': {s}", .{ docs_path, @errorName(err) }, ); }; @@ -4885,7 +4882,7 @@ fn docsCopyModule(comp: *Compilation, module: *Package.Module, name: []const u8, const root_dir, const sub_path = root.openInfo(comp.dirs); break :d root_dir.openDir(sub_path, .{ .iterate = true }); } catch |err| { - return comp.lockAndSetMiscFailure(.docs_copy, "unable to open directory '{}': {s}", .{ + return comp.lockAndSetMiscFailure(.docs_copy, "unable to open directory '{f}': {s}", .{ root.fmt(comp), @errorName(err), }); }; @@ -4894,7 +4891,7 @@ fn docsCopyModule(comp: *Compilation, module: *Package.Module, name: []const u8, var walker = try mod_dir.walk(comp.gpa); defer walker.deinit(); - var archiver = std.tar.writer(tar_file.writer().any()); + var archiver = std.tar.writer(tar_file.deprecatedWriter().any()); archiver.prefix = name; while (try walker.next()) |entry| { @@ -4907,13 +4904,13 @@ fn docsCopyModule(comp: *Compilation, module: *Package.Module, name: []const u8, else => continue, } var file = mod_dir.openFile(entry.path, .{}) catch |err| { - return comp.lockAndSetMiscFailure(.docs_copy, "unable to open '{}{s}': {s}", .{ + return comp.lockAndSetMiscFailure(.docs_copy, "unable to open '{f}{s}': {s}", .{ root.fmt(comp), entry.path, @errorName(err), }); }; defer file.close(); archiver.writeFile(entry.path, file) catch |err| { - return comp.lockAndSetMiscFailure(.docs_copy, "unable to archive '{}{s}': {s}", .{ + return comp.lockAndSetMiscFailure(.docs_copy, "unable to archive '{f}{s}': {s}", .{ root.fmt(comp), entry.path, @errorName(err), }); }; @@ -5043,7 +5040,7 @@ fn workerDocsWasmFallible(comp: *Compilation, prog_node: std.Progress.Node) anye var out_dir = docs_path.root_dir.handle.makeOpenPath(docs_path.sub_path, .{}) catch |err| { return comp.lockAndSetMiscFailure( .docs_copy, - "unable to create output directory '{}': {s}", + "unable to create output directory '{f}': {s}", .{ docs_path, @errorName(err) }, ); }; @@ -5055,10 +5052,8 @@ fn workerDocsWasmFallible(comp: *Compilation, prog_node: std.Progress.Node) anye "main.wasm", .{}, ) catch |err| { - return comp.lockAndSetMiscFailure(.docs_copy, "unable to copy '{}' to '{}': {s}", .{ - crt_file.full_object_path, - docs_path, - @errorName(err), + return comp.lockAndSetMiscFailure(.docs_copy, "unable to copy '{f}' to '{f}': {s}", .{ + crt_file.full_object_path, docs_path, @errorName(err), }); }; } @@ -5131,7 +5126,7 @@ fn workerUpdateBuiltinFile(comp: *Compilation, file: *Zcu.File) void { defer comp.mutex.unlock(); comp.setMiscFailure( .write_builtin_zig, - "unable to write '{}': {s}", + "unable to write '{f}': {s}", .{ file.path.fmt(comp), @errorName(err) }, ); }; @@ -5852,7 +5847,7 @@ fn updateCObject(comp: *Compilation, c_object: *CObject, c_obj_prog_node: std.Pr try child.spawn(); - const stderr = try child.stderr.?.reader().readAllAlloc(arena, std.math.maxInt(usize)); + const stderr = try child.stderr.?.deprecatedReader().readAllAlloc(arena, std.math.maxInt(usize)); const term = child.wait() catch |err| { return comp.failCObj(c_object, "failed to spawn zig clang {s}: {s}", .{ argv.items[0], @errorName(err) }); @@ -6012,9 +6007,7 @@ fn updateWin32Resource(comp: *Compilation, win32_resource: *Win32Resource, win32 // In .rc files, a " within a quoted string is escaped as "" const fmtRcEscape = struct { - fn formatRcEscape(bytes: []const u8, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { - _ = fmt; - _ = options; + fn formatRcEscape(bytes: []const u8, writer: *std.io.Writer) std.io.Writer.Error!void { for (bytes) |byte| switch (byte) { '"' => try writer.writeAll("\"\""), '\\' => try writer.writeAll("\\\\"), @@ -6022,7 +6015,7 @@ fn updateWin32Resource(comp: *Compilation, win32_resource: *Win32Resource, win32 }; } - pub fn fmtRcEscape(bytes: []const u8) std.fmt.Formatter(formatRcEscape) { + pub fn fmtRcEscape(bytes: []const u8) std.fmt.Formatter([]const u8, formatRcEscape) { return .{ .data = bytes }; } }.fmtRcEscape; @@ -6036,7 +6029,9 @@ fn updateWin32Resource(comp: *Compilation, win32_resource: *Win32Resource, win32 // 24 is RT_MANIFEST const resource_type = 24; - const input = try std.fmt.allocPrint(arena, "{} {} \"{s}\"", .{ resource_id, resource_type, fmtRcEscape(src_path) }); + const input = try std.fmt.allocPrint(arena, "{d} {d} \"{f}\"", .{ + resource_id, resource_type, fmtRcEscape(src_path), + }); try o_dir.writeFile(.{ .sub_path = rc_basename, .data = input }); @@ -6251,7 +6246,7 @@ fn spawnZigRc( } // Just in case there's a failure that didn't send an ErrorBundle (e.g. an error return trace) - const stderr_reader = child.stderr.?.reader(); + const stderr_reader = child.stderr.?.deprecatedReader(); const stderr = try stderr_reader.readAllAlloc(arena, 10 * 1024 * 1024); const term = child.wait() catch |err| { @@ -7214,7 +7209,7 @@ pub fn lockAndSetMiscFailure( pub fn dump_argv(argv: []const []const u8) void { std.debug.lockStdErr(); defer std.debug.unlockStdErr(); - const stderr = std.io.getStdErr().writer(); + const stderr = std.fs.File.stderr().deprecatedWriter(); for (argv[0 .. argv.len - 1]) |arg| { nosuspend stderr.print("{s} ", .{arg}) catch return; } diff --git a/src/IncrementalDebugServer.zig b/src/IncrementalDebugServer.zig index 531b71b4e8..e7c7461e5d 100644 --- a/src/IncrementalDebugServer.zig +++ b/src/IncrementalDebugServer.zig @@ -142,8 +142,8 @@ fn handleCommand(zcu: *Zcu, output: *std.ArrayListUnmanaged(u8), cmd_str: []cons const create_gen = zcu.incremental_debug_state.navs.get(nav_index) orelse return w.writeAll("unknown nav index"); const nav = ip.getNav(nav_index); try w.print( - \\name: '{}' - \\fqn: '{}' + \\name: '{f}' + \\fqn: '{f}' \\status: {s} \\created on generation: {d} \\ @@ -234,7 +234,7 @@ fn handleCommand(zcu: *Zcu, output: *std.ArrayListUnmanaged(u8), cmd_str: []cons for (unit_info.deps.items, 0..) |dependee, i| { try w.print("[{d}] ", .{i}); switch (dependee) { - .src_hash, .namespace, .namespace_name, .zon_file, .embed_file => try w.print("{}", .{zcu.fmtDependee(dependee)}), + .src_hash, .namespace, .namespace_name, .zon_file, .embed_file => try w.print("{f}", .{zcu.fmtDependee(dependee)}), .nav_val, .nav_ty => |nav| try w.print("{s} {d}", .{ @tagName(dependee), @intFromEnum(nav) }), .interned => |ip_index| switch (ip.indexToKey(ip_index)) { .struct_type, .union_type, .enum_type => try w.print("type {d}", .{@intFromEnum(ip_index)}), @@ -260,7 +260,7 @@ fn handleCommand(zcu: *Zcu, output: *std.ArrayListUnmanaged(u8), cmd_str: []cons const ip_index: InternPool.Index = @enumFromInt(parseIndex(arg_str) orelse return w.writeAll("malformed ip index")); const create_gen = zcu.incremental_debug_state.types.get(ip_index) orelse return w.writeAll("unknown type"); try w.print( - \\name: '{}' + \\name: '{f}' \\created on generation: {d} \\ , .{ @@ -365,7 +365,7 @@ fn printType(ty: Type, zcu: *const Zcu, w: anytype) !void { .union_type, .enum_type, .opaque_type, - => try w.print("{}[{d}]", .{ ty.containerTypeName(ip).fmt(ip), @intFromEnum(ty.toIntern()) }), + => try w.print("{f}[{d}]", .{ ty.containerTypeName(ip).fmt(ip), @intFromEnum(ty.toIntern()) }), else => unreachable, } diff --git a/src/InternPool.zig b/src/InternPool.zig index 0b92230bce..8471a1ad9e 100644 --- a/src/InternPool.zig +++ b/src/InternPool.zig @@ -1881,23 +1881,23 @@ pub const NullTerminatedString = enum(u32) { const FormatData = struct { string: NullTerminatedString, ip: *const InternPool, + id: bool, }; - fn format( - data: FormatData, - comptime specifier: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { + fn format(data: FormatData, writer: *std.io.Writer) std.io.Writer.Error!void { const slice = data.string.toSlice(data.ip); - if (comptime std.mem.eql(u8, specifier, "")) { + if (!data.id) { try writer.writeAll(slice); - } else if (comptime std.mem.eql(u8, specifier, "i")) { - try writer.print("{p}", .{std.zig.fmtId(slice)}); - } else @compileError("invalid format string '" ++ specifier ++ "' for '" ++ @typeName(NullTerminatedString) ++ "'"); + } else { + try writer.print("{f}", .{std.zig.fmtIdP(slice)}); + } } - pub fn fmt(string: NullTerminatedString, ip: *const InternPool) std.fmt.Formatter(format) { - return .{ .data = .{ .string = string, .ip = ip } }; + pub fn fmt(string: NullTerminatedString, ip: *const InternPool) std.fmt.Formatter(FormatData, format) { + return .{ .data = .{ .string = string, .ip = ip, .id = false } }; + } + + pub fn fmtId(string: NullTerminatedString, ip: *const InternPool) std.fmt.Formatter(FormatData, format) { + return .{ .data = .{ .string = string, .ip = ip, .id = true } }; } const debug_state = InternPool.debug_state; @@ -9750,7 +9750,7 @@ fn finishFuncInstance( const fn_namespace = fn_owner_nav.analysis.?.namespace; // TODO: improve this name - const nav_name = try ip.getOrPutStringFmt(gpa, tid, "{}__anon_{d}", .{ + const nav_name = try ip.getOrPutStringFmt(gpa, tid, "{f}__anon_{d}", .{ fn_owner_nav.name.fmt(ip), @intFromEnum(func_index), }, .no_embedded_nulls); const nav_index = try ip.createNav(gpa, tid, .{ @@ -11259,8 +11259,9 @@ fn dumpStatsFallible(ip: *const InternPool, arena: Allocator) anyerror!void { } fn dumpAllFallible(ip: *const InternPool) anyerror!void { - var bw = std.io.bufferedWriter(std.io.getStdErr().writer()); - const w = bw.writer(); + var buffer: [4096]u8 = undefined; + const stderr_bw = std.debug.lockStderrWriter(&buffer); + defer std.debug.unlockStderrWriter(); for (ip.locals, 0..) |*local, tid| { const items = local.shared.items.view(); for ( @@ -11269,12 +11270,12 @@ fn dumpAllFallible(ip: *const InternPool) anyerror!void { 0.., ) |tag, data, index| { const i = Index.Unwrapped.wrap(.{ .tid = @enumFromInt(tid), .index = @intCast(index) }, ip); - try w.print("${d} = {s}(", .{ i, @tagName(tag) }); + try stderr_bw.print("${d} = {s}(", .{ i, @tagName(tag) }); switch (tag) { .removed => {}, - .simple_type => try w.print("{s}", .{@tagName(@as(SimpleType, @enumFromInt(@intFromEnum(i))))}), - .simple_value => try w.print("{s}", .{@tagName(@as(SimpleValue, @enumFromInt(@intFromEnum(i))))}), + .simple_type => try stderr_bw.print("{s}", .{@tagName(@as(SimpleType, @enumFromInt(@intFromEnum(i))))}), + .simple_value => try stderr_bw.print("{s}", .{@tagName(@as(SimpleValue, @enumFromInt(@intFromEnum(i))))}), .type_int_signed, .type_int_unsigned, @@ -11347,17 +11348,16 @@ fn dumpAllFallible(ip: *const InternPool) anyerror!void { .func_coerced, .union_value, .memoized_call, - => try w.print("{d}", .{data}), + => try stderr_bw.print("{d}", .{data}), .opt_null, .type_slice, .only_possible_value, - => try w.print("${d}", .{data}), + => try stderr_bw.print("${d}", .{data}), } - try w.writeAll(")\n"); + try stderr_bw.writeAll(")\n"); } } - try bw.flush(); } pub fn dumpGenericInstances(ip: *const InternPool, allocator: Allocator) void { @@ -11369,9 +11369,6 @@ pub fn dumpGenericInstancesFallible(ip: *const InternPool, allocator: Allocator) defer arena_allocator.deinit(); const arena = arena_allocator.allocator(); - var bw = std.io.bufferedWriter(std.io.getStdErr().writer()); - const w = bw.writer(); - var instances: std.AutoArrayHashMapUnmanaged(Index, std.ArrayListUnmanaged(Index)) = .empty; for (ip.locals, 0..) |*local, tid| { const items = local.shared.items.view().slice(); @@ -11394,6 +11391,10 @@ pub fn dumpGenericInstancesFallible(ip: *const InternPool, allocator: Allocator) } } + var buffer: [4096]u8 = undefined; + const stderr_bw = std.debug.lockStderrWriter(&buffer); + defer std.debug.unlockStderrWriter(); + const SortContext = struct { values: []std.ArrayListUnmanaged(Index), pub fn lessThan(ctx: @This(), a_index: usize, b_index: usize) bool { @@ -11405,23 +11406,21 @@ pub fn dumpGenericInstancesFallible(ip: *const InternPool, allocator: Allocator) var it = instances.iterator(); while (it.next()) |entry| { const generic_fn_owner_nav = ip.getNav(ip.funcDeclInfo(entry.key_ptr.*).owner_nav); - try w.print("{} ({}): \n", .{ generic_fn_owner_nav.name.fmt(ip), entry.value_ptr.items.len }); + try stderr_bw.print("{f} ({d}): \n", .{ generic_fn_owner_nav.name.fmt(ip), entry.value_ptr.items.len }); for (entry.value_ptr.items) |index| { const unwrapped_index = index.unwrap(ip); const func = ip.extraFuncInstance(unwrapped_index.tid, unwrapped_index.getExtra(ip), unwrapped_index.getData(ip)); const owner_nav = ip.getNav(func.owner_nav); - try w.print(" {}: (", .{owner_nav.name.fmt(ip)}); + try stderr_bw.print(" {f}: (", .{owner_nav.name.fmt(ip)}); for (func.comptime_args.get(ip)) |arg| { if (arg != .none) { const key = ip.indexToKey(arg); - try w.print(" {} ", .{key}); + try stderr_bw.print(" {} ", .{key}); } } - try w.writeAll(")\n"); + try stderr_bw.writeAll(")\n"); } } - - try bw.flush(); } pub fn getNav(ip: *const InternPool, index: Nav.Index) Nav { diff --git a/src/Package.zig b/src/Package.zig index 2eb7321cee..f625dd4908 100644 --- a/src/Package.zig +++ b/src/Package.zig @@ -134,7 +134,7 @@ pub const Hash = struct { } var bin_digest: [Algo.digest_length]u8 = undefined; Algo.hash(sub_path, &bin_digest, .{}); - _ = std.fmt.bufPrint(result.bytes[i..], "{}", .{std.fmt.fmtSliceHexLower(&bin_digest)}) catch unreachable; + _ = std.fmt.bufPrint(result.bytes[i..], "{x}", .{&bin_digest}) catch unreachable; return result; } }; diff --git a/src/Package/Fetch.zig b/src/Package/Fetch.zig index 9acfe1d97a..a97b60a17c 100644 --- a/src/Package/Fetch.zig +++ b/src/Package/Fetch.zig @@ -27,6 +27,22 @@ //! All of this must be done with only referring to the state inside this struct //! because this work will be done in a dedicated thread. +const builtin = @import("builtin"); +const std = @import("std"); +const fs = std.fs; +const assert = std.debug.assert; +const ascii = std.ascii; +const Allocator = std.mem.Allocator; +const Cache = std.Build.Cache; +const ThreadPool = std.Thread.Pool; +const WaitGroup = std.Thread.WaitGroup; +const Fetch = @This(); +const git = @import("Fetch/git.zig"); +const Package = @import("../Package.zig"); +const Manifest = Package.Manifest; +const ErrorBundle = std.zig.ErrorBundle; +const native_os = builtin.os.tag; + arena: std.heap.ArenaAllocator, location: Location, location_tok: std.zig.Ast.TokenIndex, @@ -185,7 +201,7 @@ pub const JobQueue = struct { const hash_slice = hash.toSlice(); try buf.writer().print( - \\ pub const {} = struct {{ + \\ pub const {f} = struct {{ \\ , .{std.zig.fmtId(hash_slice)}); @@ -211,15 +227,15 @@ pub const JobQueue = struct { } try buf.writer().print( - \\ pub const build_root = "{q}"; + \\ pub const build_root = "{f}"; \\ - , .{fetch.package_root}); + , .{std.fmt.alt(fetch.package_root, .formatEscapeString)}); if (fetch.has_build_zig) { try buf.writer().print( - \\ pub const build_zig = @import("{}"); + \\ pub const build_zig = @import("{f}"); \\ - , .{std.zig.fmtEscapes(hash_slice)}); + , .{std.zig.fmtString(hash_slice)}); } if (fetch.manifest) |*manifest| { @@ -230,8 +246,8 @@ pub const JobQueue = struct { for (manifest.dependencies.keys(), manifest.dependencies.values()) |name, dep| { const h = depDigest(fetch.package_root, jq.global_cache, dep) orelse continue; try buf.writer().print( - " .{{ \"{}\", \"{}\" }},\n", - .{ std.zig.fmtEscapes(name), std.zig.fmtEscapes(h.toSlice()) }, + " .{{ \"{f}\", \"{f}\" }},\n", + .{ std.zig.fmtString(name), std.zig.fmtString(h.toSlice()) }, ); } @@ -262,8 +278,8 @@ pub const JobQueue = struct { for (root_manifest.dependencies.keys(), root_manifest.dependencies.values()) |name, dep| { const h = depDigest(root_fetch.package_root, jq.global_cache, dep) orelse continue; try buf.writer().print( - " .{{ \"{}\", \"{}\" }},\n", - .{ std.zig.fmtEscapes(name), std.zig.fmtEscapes(h.toSlice()) }, + " .{{ \"{f}\", \"{f}\" }},\n", + .{ std.zig.fmtString(name), std.zig.fmtString(h.toSlice()) }, ); } try buf.appendSlice("};\n"); @@ -353,7 +369,7 @@ pub fn run(f: *Fetch) RunError!void { if (!std.mem.startsWith(u8, pkg_root.sub_path, expected_prefix)) { return f.fail( f.location_tok, - try eb.printString("dependency path outside project: '{}'", .{pkg_root}), + try eb.printString("dependency path outside project: '{f}'", .{pkg_root}), ); } } @@ -420,14 +436,14 @@ pub fn run(f: *Fetch) RunError!void { } if (f.job_queue.read_only) return f.fail( f.name_tok, - try eb.printString("package not found at '{}{s}'", .{ + try eb.printString("package not found at '{f}{s}'", .{ cache_root, pkg_sub_path, }), ); }, else => |e| { try eb.addRootErrorMessage(.{ - .msg = try eb.printString("unable to open global package cache directory '{}{s}': {s}", .{ + .msg = try eb.printString("unable to open global package cache directory '{f}{s}': {s}", .{ cache_root, pkg_sub_path, @errorName(e), }), }); @@ -604,7 +620,7 @@ pub fn computedPackageHash(f: *const Fetch) Package.Hash { const saturated_size = std.math.cast(u32, f.computed_hash.total_size) orelse std.math.maxInt(u32); if (f.manifest) |man| { var version_buffer: [32]u8 = undefined; - const version: []const u8 = std.fmt.bufPrint(&version_buffer, "{}", .{man.version}) catch &version_buffer; + const version: []const u8 = std.fmt.bufPrint(&version_buffer, "{f}", .{man.version}) catch &version_buffer; return .init(f.computed_hash.digest, man.name, version, man.id, saturated_size); } // In the future build.zig.zon fields will be added to allow overriding these values @@ -622,7 +638,7 @@ fn checkBuildFileExistence(f: *Fetch) RunError!void { error.FileNotFound => {}, else => |e| { try eb.addRootErrorMessage(.{ - .msg = try eb.printString("unable to access '{}{s}': {s}", .{ + .msg = try eb.printString("unable to access '{f}{s}': {s}", .{ f.package_root, Package.build_zig_basename, @errorName(e), }), }); @@ -647,7 +663,7 @@ fn loadManifest(f: *Fetch, pkg_root: Cache.Path) RunError!void { else => |e| { const file_path = try pkg_root.join(arena, Manifest.basename); try eb.addRootErrorMessage(.{ - .msg = try eb.printString("unable to load package manifest '{}': {s}", .{ + .msg = try eb.printString("unable to load package manifest '{f}': {s}", .{ file_path, @errorName(e), }), }); @@ -659,7 +675,7 @@ fn loadManifest(f: *Fetch, pkg_root: Cache.Path) RunError!void { ast.* = try std.zig.Ast.parse(arena, manifest_bytes, .zon); if (ast.errors.len > 0) { - const file_path = try std.fmt.allocPrint(arena, "{}" ++ fs.path.sep_str ++ Manifest.basename, .{pkg_root}); + const file_path = try std.fmt.allocPrint(arena, "{f}" ++ fs.path.sep_str ++ Manifest.basename, .{pkg_root}); try std.zig.putAstErrorsIntoBundle(arena, ast.*, file_path, eb); return error.FetchFailed; } @@ -672,7 +688,7 @@ fn loadManifest(f: *Fetch, pkg_root: Cache.Path) RunError!void { const manifest = &f.manifest.?; if (manifest.errors.len > 0) { - const src_path = try eb.printString("{}" ++ fs.path.sep_str ++ "{s}", .{ pkg_root, Manifest.basename }); + const src_path = try eb.printString("{f}" ++ fs.path.sep_str ++ "{s}", .{ pkg_root, Manifest.basename }); try manifest.copyErrorsIntoBundle(ast.*, src_path, eb); return error.FetchFailed; } @@ -827,7 +843,7 @@ fn srcLoc( const ast = f.parent_manifest_ast orelse return .none; const eb = &f.error_bundle; const start_loc = ast.tokenLocation(0, tok); - const src_path = try eb.printString("{}" ++ fs.path.sep_str ++ Manifest.basename, .{f.parent_package_root}); + const src_path = try eb.printString("{f}" ++ fs.path.sep_str ++ Manifest.basename, .{f.parent_package_root}); const msg_off = 0; return eb.addSourceLocation(.{ .src_path = src_path, @@ -961,7 +977,7 @@ fn initResource(f: *Fetch, uri: std.Uri, server_header_buffer: []u8) RunError!Re if (ascii.eqlIgnoreCase(uri.scheme, "file")) { const path = try uri.path.toRawMaybeAlloc(arena); return .{ .file = f.parent_package_root.openFile(path, .{}) catch |err| { - return f.fail(f.location_tok, try eb.printString("unable to open '{}{s}': {s}", .{ + return f.fail(f.location_tok, try eb.printString("unable to open '{f}{s}': {s}", .{ f.parent_package_root, path, @errorName(err), })); } }; @@ -1063,13 +1079,16 @@ fn initResource(f: *Fetch, uri: std.Uri, server_header_buffer: []u8) RunError!Re }); const notes_start = try eb.reserveNotes(notes_len); eb.extra.items[notes_start] = @intFromEnum(try eb.addErrorMessage(.{ - .msg = try eb.printString("try .url = \"{;+/}#{}\",", .{ uri, want_oid }), + .msg = try eb.printString("try .url = \"{f}#{f}\",", .{ + uri.fmt(.{ .scheme = true, .authority = true, .path = true }), + want_oid, + }), })); return error.FetchFailed; } var want_oid_buf: [git.Oid.max_formatted_length]u8 = undefined; - _ = std.fmt.bufPrint(&want_oid_buf, "{}", .{want_oid}) catch unreachable; + _ = std.fmt.bufPrint(&want_oid_buf, "{f}", .{want_oid}) catch unreachable; var fetch_stream = session.fetch(&.{&want_oid_buf}, server_header_buffer) catch |err| { return f.fail(f.location_tok, try eb.printString( "unable to create fetch stream: {s}", @@ -1305,7 +1324,7 @@ fn unzip(f: *Fetch, out_dir: fs.Dir, reader: anytype) RunError!UnpackResult { .{@errorName(err)}, )); if (len == 0) break; - zip_file.writer().writeAll(buf[0..len]) catch |err| return f.fail(f.location_tok, try eb.printString( + zip_file.deprecatedWriter().writeAll(buf[0..len]) catch |err| return f.fail(f.location_tok, try eb.printString( "write temporary zip file failed: {s}", .{@errorName(err)}, )); @@ -1358,7 +1377,7 @@ fn unpackGitPack(f: *Fetch, out_dir: fs.Dir, resource: *Resource.Git) anyerror!U var pack_file = try pack_dir.createFile("pkg.pack", .{ .read = true }); defer pack_file.close(); var fifo = std.fifo.LinearFifo(u8, .{ .Static = 4096 }).init(); - try fifo.pump(resource.fetch_stream.reader(), pack_file.writer()); + try fifo.pump(resource.fetch_stream.reader(), pack_file.deprecatedWriter()); try pack_file.sync(); var index_file = try pack_dir.createFile("pkg.idx", .{ .read = true }); @@ -1366,7 +1385,7 @@ fn unpackGitPack(f: *Fetch, out_dir: fs.Dir, resource: *Resource.Git) anyerror!U { const index_prog_node = f.prog_node.start("Index pack", 0); defer index_prog_node.end(); - var index_buffered_writer = std.io.bufferedWriter(index_file.writer()); + var index_buffered_writer = std.io.bufferedWriter(index_file.deprecatedWriter()); try git.indexPack(gpa, object_format, pack_file, index_buffered_writer.writer()); try index_buffered_writer.flush(); try index_file.sync(); @@ -1508,7 +1527,7 @@ fn computeHash(f: *Fetch, pkg_path: Cache.Path, filter: Filter) RunError!Compute while (walker.next() catch |err| { try eb.addRootErrorMessage(.{ .msg = try eb.printString( - "unable to walk temporary directory '{}': {s}", + "unable to walk temporary directory '{f}': {s}", .{ pkg_path, @errorName(err) }, ) }); return error.FetchFailed; @@ -1638,14 +1657,14 @@ fn computeHash(f: *Fetch, pkg_path: Cache.Path, filter: Filter) RunError!Compute } fn dumpHashInfo(all_files: []const *const HashedFile) !void { - const stdout = std.io.getStdOut(); - var bw = std.io.bufferedWriter(stdout.writer()); + const stdout: std.fs.File = .stdout(); + var bw = std.io.bufferedWriter(stdout.deprecatedWriter()); const w = bw.writer(); for (all_files) |hashed_file| { - try w.print("{s}: {s}: {s}\n", .{ + try w.print("{s}: {x}: {s}\n", .{ @tagName(hashed_file.kind), - std.fmt.fmtSliceHexLower(&hashed_file.hash), + &hashed_file.hash, hashed_file.normalized_path, }); } @@ -1817,28 +1836,6 @@ pub fn depDigest(pkg_root: Cache.Path, cache_root: Cache.Directory, dep: Manifes } } -const builtin = @import("builtin"); -const std = @import("std"); -const fs = std.fs; -const assert = std.debug.assert; -const ascii = std.ascii; -const Allocator = std.mem.Allocator; -const Cache = std.Build.Cache; -const ThreadPool = std.Thread.Pool; -const WaitGroup = std.Thread.WaitGroup; -const Fetch = @This(); -const git = @import("Fetch/git.zig"); -const Package = @import("../Package.zig"); -const Manifest = Package.Manifest; -const ErrorBundle = std.zig.ErrorBundle; -const native_os = builtin.os.tag; - -test { - _ = Filter; - _ = FileType; - _ = UnpackResult; -} - // Detects executable header: ELF or Macho-O magic header or shebang line. const FileHeader = struct { header: [4]u8 = undefined, @@ -2056,15 +2053,15 @@ const UnpackResult = struct { // output errors to string var errors = try fetch.error_bundle.toOwnedBundle(""); defer errors.deinit(gpa); - var out = std.ArrayList(u8).init(gpa); - defer out.deinit(); - try errors.renderToWriter(.{ .ttyconf = .no_color }, out.writer()); + var aw: std.io.Writer.Allocating = .init(gpa); + defer aw.deinit(); + try errors.renderToWriter(.{ .ttyconf = .no_color }, &aw.writer); try std.testing.expectEqualStrings( \\error: unable to unpack \\ note: unable to create symlink from 'dir2/file2' to 'filename': SymlinkError \\ note: file 'dir2/file4' has unsupported type 'x' \\ - , out.items); + , aw.getWritten()); } }; @@ -2080,7 +2077,7 @@ test "zip" { { var zip_file = try tmp.dir.createFile("test.zip", .{}); defer zip_file.close(); - var bw = std.io.bufferedWriter(zip_file.writer()); + var bw = std.io.bufferedWriter(zip_file.deprecatedWriter()); var store: [test_files.len]std.zip.testutil.FileStore = undefined; try std.zip.testutil.writeZip(bw.writer(), &test_files, &store, .{}); try bw.flush(); @@ -2113,7 +2110,7 @@ test "zip with one root folder" { { var zip_file = try tmp.dir.createFile("test.zip", .{}); defer zip_file.close(); - var bw = std.io.bufferedWriter(zip_file.writer()); + var bw = std.io.bufferedWriter(zip_file.deprecatedWriter()); var store: [test_files.len]std.zip.testutil.FileStore = undefined; try std.zip.testutil.writeZip(bw.writer(), &test_files, &store, .{}); try bw.flush(); @@ -2431,9 +2428,15 @@ const TestFetchBuilder = struct { if (notes_len > 0) { try std.testing.expectEqual(notes_len, em.notes_len); } - var al = std.ArrayList(u8).init(std.testing.allocator); - defer al.deinit(); - try errors.renderToWriter(.{ .ttyconf = .no_color }, al.writer()); - try std.testing.expectEqualStrings(msg, al.items); + var aw: std.io.Writer.Allocating = .init(std.testing.allocator); + defer aw.deinit(); + try errors.renderToWriter(.{ .ttyconf = .no_color }, &aw.writer); + try std.testing.expectEqualStrings(msg, aw.getWritten()); } }; + +test { + _ = Filter; + _ = FileType; + _ = UnpackResult; +} diff --git a/src/Package/Fetch/git.zig b/src/Package/Fetch/git.zig index de3912041f..4d2dae904f 100644 --- a/src/Package/Fetch/git.zig +++ b/src/Package/Fetch/git.zig @@ -119,15 +119,8 @@ pub const Oid = union(Format) { } else error.InvalidOid; } - pub fn format( - oid: Oid, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - _ = fmt; - _ = options; - try writer.print("{}", .{std.fmt.fmtSliceHexLower(oid.slice())}); + pub fn format(oid: Oid, writer: *std.io.Writer) std.io.Writer.Error!void { + try writer.print("{x}", .{oid.slice()}); } pub fn slice(oid: *const Oid) []const u8 { @@ -353,7 +346,7 @@ const Odb = struct { fn init(allocator: Allocator, format: Oid.Format, pack_file: std.fs.File, index_file: std.fs.File) !Odb { try pack_file.seekTo(0); try index_file.seekTo(0); - const index_header = try IndexHeader.read(index_file.reader()); + const index_header = try IndexHeader.read(index_file.deprecatedReader()); return .{ .format = format, .pack_file = pack_file, @@ -377,7 +370,7 @@ const Odb = struct { const base_object = while (true) { if (odb.cache.get(base_offset)) |base_object| break base_object; - base_header = try EntryHeader.read(odb.format, odb.pack_file.reader()); + base_header = try EntryHeader.read(odb.format, odb.pack_file.deprecatedReader()); switch (base_header) { .ofs_delta => |ofs_delta| { try delta_offsets.append(odb.allocator, base_offset); @@ -390,7 +383,7 @@ const Odb = struct { base_offset = try odb.pack_file.getPos(); }, else => { - const base_data = try readObjectRaw(odb.allocator, odb.pack_file.reader(), base_header.uncompressedLength()); + const base_data = try readObjectRaw(odb.allocator, odb.pack_file.deprecatedReader(), base_header.uncompressedLength()); errdefer odb.allocator.free(base_data); const base_object: Object = .{ .type = base_header.objectType(), .data = base_data }; try odb.cache.put(odb.allocator, base_offset, base_object); @@ -420,7 +413,7 @@ const Odb = struct { const found_index = while (start_index < end_index) { const mid_index = start_index + (end_index - start_index) / 2; try odb.index_file.seekTo(IndexHeader.size + mid_index * oid_length); - const mid_oid = try Oid.readBytes(odb.format, odb.index_file.reader()); + const mid_oid = try Oid.readBytes(odb.format, odb.index_file.deprecatedReader()); switch (mem.order(u8, mid_oid.slice(), oid.slice())) { .lt => start_index = mid_index + 1, .gt => end_index = mid_index, @@ -431,12 +424,12 @@ const Odb = struct { const n_objects = odb.index_header.fan_out_table[255]; const offset_values_start = IndexHeader.size + n_objects * (oid_length + 4); try odb.index_file.seekTo(offset_values_start + found_index * 4); - const l1_offset: packed struct { value: u31, big: bool } = @bitCast(try odb.index_file.reader().readInt(u32, .big)); + const l1_offset: packed struct { value: u31, big: bool } = @bitCast(try odb.index_file.deprecatedReader().readInt(u32, .big)); const pack_offset = pack_offset: { if (l1_offset.big) { const l2_offset_values_start = offset_values_start + n_objects * 4; try odb.index_file.seekTo(l2_offset_values_start + l1_offset.value * 4); - break :pack_offset try odb.index_file.reader().readInt(u64, .big); + break :pack_offset try odb.index_file.deprecatedReader().readInt(u64, .big); } else { break :pack_offset l1_offset.value; } @@ -669,13 +662,21 @@ pub const Session = struct { fn init(allocator: Allocator, uri: std.Uri) !Location { const scheme = try allocator.dupe(u8, uri.scheme); errdefer allocator.free(scheme); - const user = if (uri.user) |user| try std.fmt.allocPrint(allocator, "{user}", .{user}) else null; + const user = if (uri.user) |user| try std.fmt.allocPrint(allocator, "{f}", .{ + std.fmt.alt(user, .formatUser), + }) else null; errdefer if (user) |s| allocator.free(s); - const password = if (uri.password) |password| try std.fmt.allocPrint(allocator, "{password}", .{password}) else null; + const password = if (uri.password) |password| try std.fmt.allocPrint(allocator, "{f}", .{ + std.fmt.alt(password, .formatPassword), + }) else null; errdefer if (password) |s| allocator.free(s); - const host = if (uri.host) |host| try std.fmt.allocPrint(allocator, "{host}", .{host}) else null; + const host = if (uri.host) |host| try std.fmt.allocPrint(allocator, "{f}", .{ + std.fmt.alt(host, .formatHost), + }) else null; errdefer if (host) |s| allocator.free(s); - const path = try std.fmt.allocPrint(allocator, "{path}", .{uri.path}); + const path = try std.fmt.allocPrint(allocator, "{f}", .{ + std.fmt.alt(uri.path, .formatPath), + }); errdefer allocator.free(path); // The query and fragment are not used as part of the base server URI. return .{ @@ -706,7 +707,9 @@ pub const Session = struct { fn getCapabilities(session: *Session, http_headers_buffer: []u8) !CapabilityIterator { var info_refs_uri = session.location.uri; { - const session_uri_path = try std.fmt.allocPrint(session.allocator, "{path}", .{session.location.uri.path}); + const session_uri_path = try std.fmt.allocPrint(session.allocator, "{f}", .{ + std.fmt.alt(session.location.uri.path, .formatPath), + }); defer session.allocator.free(session_uri_path); info_refs_uri.path = .{ .percent_encoded = try std.fs.path.resolvePosix(session.allocator, &.{ "/", session_uri_path, "info/refs" }) }; } @@ -730,7 +733,9 @@ pub const Session = struct { if (request.response.status != .ok) return error.ProtocolError; const any_redirects_occurred = request.redirect_behavior.remaining() < max_redirects; if (any_redirects_occurred) { - const request_uri_path = try std.fmt.allocPrint(session.allocator, "{path}", .{request.uri.path}); + const request_uri_path = try std.fmt.allocPrint(session.allocator, "{f}", .{ + std.fmt.alt(request.uri.path, .formatPath), + }); defer session.allocator.free(request_uri_path); if (!mem.endsWith(u8, request_uri_path, "/info/refs")) return error.UnparseableRedirect; var new_uri = request.uri; @@ -817,7 +822,9 @@ pub const Session = struct { pub fn listRefs(session: Session, options: ListRefsOptions) !RefIterator { var upload_pack_uri = session.location.uri; { - const session_uri_path = try std.fmt.allocPrint(session.allocator, "{path}", .{session.location.uri.path}); + const session_uri_path = try std.fmt.allocPrint(session.allocator, "{f}", .{ + std.fmt.alt(session.location.uri.path, .formatPath), + }); defer session.allocator.free(session_uri_path); upload_pack_uri.path = .{ .percent_encoded = try std.fs.path.resolvePosix(session.allocator, &.{ "/", session_uri_path, "git-upload-pack" }) }; } @@ -932,7 +939,9 @@ pub const Session = struct { ) !FetchStream { var upload_pack_uri = session.location.uri; { - const session_uri_path = try std.fmt.allocPrint(session.allocator, "{path}", .{session.location.uri.path}); + const session_uri_path = try std.fmt.allocPrint(session.allocator, "{f}", .{ + std.fmt.alt(session.location.uri.path, .formatPath), + }); defer session.allocator.free(session_uri_path); upload_pack_uri.path = .{ .percent_encoded = try std.fs.path.resolvePosix(session.allocator, &.{ "/", session_uri_path, "git-upload-pack" }) }; } @@ -1026,7 +1035,7 @@ pub const Session = struct { ProtocolError, UnexpectedPacket, }; - pub const Reader = std.io.Reader(*FetchStream, ReadError, read); + pub const Reader = std.io.GenericReader(*FetchStream, ReadError, read); const StreamCode = enum(u8) { pack_data = 1, @@ -1320,7 +1329,7 @@ fn indexPackFirstPass( index_entries: *std.AutoHashMapUnmanaged(Oid, IndexEntry), pending_deltas: *std.ArrayListUnmanaged(IndexEntry), ) !Oid { - var pack_buffered_reader = std.io.bufferedReader(pack.reader()); + var pack_buffered_reader = std.io.bufferedReader(pack.deprecatedReader()); var pack_counting_reader = std.io.countingReader(pack_buffered_reader.reader()); var pack_hashed_reader = std.compress.hashedReader(pack_counting_reader.reader(), Oid.Hasher.init(format)); const pack_reader = pack_hashed_reader.reader(); @@ -1400,7 +1409,7 @@ fn indexPackHashDelta( if (cache.get(base_offset)) |base_object| break base_object; try pack.seekTo(base_offset); - base_header = try EntryHeader.read(format, pack.reader()); + base_header = try EntryHeader.read(format, pack.deprecatedReader()); switch (base_header) { .ofs_delta => |ofs_delta| { try delta_offsets.append(allocator, base_offset); @@ -1411,7 +1420,7 @@ fn indexPackHashDelta( base_offset = (index_entries.get(ref_delta.base_object) orelse return null).offset; }, else => { - const base_data = try readObjectRaw(allocator, pack.reader(), base_header.uncompressedLength()); + const base_data = try readObjectRaw(allocator, pack.deprecatedReader(), base_header.uncompressedLength()); errdefer allocator.free(base_data); const base_object: Object = .{ .type = base_header.objectType(), .data = base_data }; try cache.put(allocator, base_offset, base_object); @@ -1448,8 +1457,8 @@ fn resolveDeltaChain( const delta_offset = delta_offsets[i]; try pack.seekTo(delta_offset); - const delta_header = try EntryHeader.read(format, pack.reader()); - const delta_data = try readObjectRaw(allocator, pack.reader(), delta_header.uncompressedLength()); + const delta_header = try EntryHeader.read(format, pack.deprecatedReader()); + const delta_data = try readObjectRaw(allocator, pack.deprecatedReader(), delta_header.uncompressedLength()); defer allocator.free(delta_data); var delta_stream = std.io.fixedBufferStream(delta_data); const delta_reader = delta_stream.reader(); @@ -1561,7 +1570,7 @@ fn runRepositoryTest(comptime format: Oid.Format, head_commit: []const u8) !void var index_file = try git_dir.dir.createFile("testrepo.idx", .{ .read = true }); defer index_file.close(); - try indexPack(testing.allocator, format, pack_file, index_file.writer()); + try indexPack(testing.allocator, format, pack_file, index_file.deprecatedWriter()); // Arbitrary size limit on files read while checking the repository contents // (all files in the test repo are known to be smaller than this) @@ -1678,7 +1687,7 @@ pub fn main() !void { std.debug.print("Starting index...\n", .{}); var index_file = try git_dir.createFile("idx", .{ .read = true }); defer index_file.close(); - var index_buffered_writer = std.io.bufferedWriter(index_file.writer()); + var index_buffered_writer = std.io.bufferedWriter(index_file.deprecatedWriter()); try indexPack(allocator, format, pack_file, index_buffered_writer.writer()); try index_buffered_writer.flush(); try index_file.sync(); diff --git a/src/Package/Manifest.zig b/src/Package/Manifest.zig index 6dff300503..1d71b60fa3 100644 --- a/src/Package/Manifest.zig +++ b/src/Package/Manifest.zig @@ -401,7 +401,7 @@ const Parse = struct { return fail(p, main_token, "name must be a valid bare zig identifier (hint: switch from string to enum literal)", .{}); if (name.len > max_name_len) - return fail(p, main_token, "name '{}' exceeds max length of {d}", .{ + return fail(p, main_token, "name '{f}' exceeds max length of {d}", .{ std.zig.fmtId(name), max_name_len, }); @@ -416,7 +416,7 @@ const Parse = struct { return fail(p, main_token, "name must be a valid bare zig identifier", .{}); if (ident_name.len > max_name_len) - return fail(p, main_token, "name '{}' exceeds max length of {d}", .{ + return fail(p, main_token, "name '{f}' exceeds max length of {d}", .{ std.zig.fmtId(ident_name), max_name_len, }); diff --git a/src/Sema.zig b/src/Sema.zig index 22ac21d644..92f6705e4e 100644 --- a/src/Sema.zig +++ b/src/Sema.zig @@ -5,6 +5,39 @@ //! Does type checking, comptime control flow, and safety-check generation. //! This is the the heart of the Zig compiler. +const std = @import("std"); +const math = std.math; +const mem = std.mem; +const Allocator = mem.Allocator; +const assert = std.debug.assert; +const log = std.log.scoped(.sema); + +const Sema = @This(); +const Value = @import("Value.zig"); +const MutableValue = @import("mutable_value.zig").MutableValue; +const Type = @import("Type.zig"); +const Air = @import("Air.zig"); +const Zir = std.zig.Zir; +const Zcu = @import("Zcu.zig"); +const trace = @import("tracy.zig").trace; +const Namespace = Zcu.Namespace; +const CompileError = Zcu.CompileError; +const SemaError = Zcu.SemaError; +const LazySrcLoc = Zcu.LazySrcLoc; +const RangeSet = @import("RangeSet.zig"); +const target_util = @import("target.zig"); +const Package = @import("Package.zig"); +const crash_report = @import("crash_report.zig"); +const build_options = @import("build_options"); +const Compilation = @import("Compilation.zig"); +const InternPool = @import("InternPool.zig"); +const Alignment = InternPool.Alignment; +const AnalUnit = InternPool.AnalUnit; +const ComptimeAllocIndex = InternPool.ComptimeAllocIndex; +const Cache = std.Build.Cache; +const LowerZon = @import("Sema/LowerZon.zig"); +const arith = @import("Sema/arith.zig"); + pt: Zcu.PerThread, /// Alias to `zcu.gpa`. gpa: Allocator, @@ -157,39 +190,6 @@ pub fn getComptimeAlloc(sema: *Sema, idx: ComptimeAllocIndex) *ComptimeAlloc { return &sema.comptime_allocs.items[@intFromEnum(idx)]; } -const std = @import("std"); -const math = std.math; -const mem = std.mem; -const Allocator = mem.Allocator; -const assert = std.debug.assert; -const log = std.log.scoped(.sema); - -const Sema = @This(); -const Value = @import("Value.zig"); -const MutableValue = @import("mutable_value.zig").MutableValue; -const Type = @import("Type.zig"); -const Air = @import("Air.zig"); -const Zir = std.zig.Zir; -const Zcu = @import("Zcu.zig"); -const trace = @import("tracy.zig").trace; -const Namespace = Zcu.Namespace; -const CompileError = Zcu.CompileError; -const SemaError = Zcu.SemaError; -const LazySrcLoc = Zcu.LazySrcLoc; -const RangeSet = @import("RangeSet.zig"); -const target_util = @import("target.zig"); -const Package = @import("Package.zig"); -const crash_report = @import("crash_report.zig"); -const build_options = @import("build_options"); -const Compilation = @import("Compilation.zig"); -const InternPool = @import("InternPool.zig"); -const Alignment = InternPool.Alignment; -const AnalUnit = InternPool.AnalUnit; -const ComptimeAllocIndex = InternPool.ComptimeAllocIndex; -const Cache = std.Build.Cache; -const LowerZon = @import("Sema/LowerZon.zig"); -const arith = @import("Sema/arith.zig"); - pub const default_branch_quota = 1000; pub const InferredErrorSet = struct { @@ -888,7 +888,7 @@ const ComptimeReason = union(enum) { /// Evaluating at comptime because of a comptime-only type. This field is separate so that /// the type in question can be included in the error message. AstGen could never emit this /// reason, because it knows nothing of types. - /// The format string looks like "foo '{}' bar", where "{}" is the comptime-only type. + /// The format string looks like "foo '{f}' bar", where "{f}" is the comptime-only type. /// We will then explain why this type is comptime-only. comptime_only: struct { ty: Type, @@ -930,17 +930,17 @@ const ComptimeReason = union(enum) { .struct_init => .{ "initializer of comptime-only struct", "must be comptime-known" }, .tuple_init => .{ "initializer of comptime-only tuple", "must be comptime-known" }, }; - try sema.errNote(src, err_msg, "{s} '{}' {s}", .{ pre, co.ty.fmt(sema.pt), post }); + try sema.errNote(src, err_msg, "{s} '{f}' {s}", .{ pre, co.ty.fmt(sema.pt), post }); try sema.explainWhyTypeIsComptime(err_msg, src, co.ty); }, .comptime_only_param_ty => |co| { - try sema.errNote(src, err_msg, "argument to parameter with comptime-only type '{}' must be comptime-known", .{co.ty.fmt(sema.pt)}); + try sema.errNote(src, err_msg, "argument to parameter with comptime-only type '{f}' must be comptime-known", .{co.ty.fmt(sema.pt)}); try sema.errNote(co.param_ty_src, err_msg, "parameter type declared here", .{}); try sema.explainWhyTypeIsComptime(err_msg, src, co.ty); }, .comptime_only_ret_ty => |co| { const function_with: []const u8 = if (co.is_generic_inst) "generic function instantiated with" else "function with"; - try sema.errNote(src, err_msg, "call to {s} comptime-only return type '{}' is evaluated at comptime", .{ function_with, co.ty.fmt(sema.pt) }); + try sema.errNote(src, err_msg, "call to {s} comptime-only return type '{f}' is evaluated at comptime", .{ function_with, co.ty.fmt(sema.pt) }); try sema.errNote(co.ret_ty_src, err_msg, "return type declared here", .{}); try sema.explainWhyTypeIsComptime(err_msg, src, co.ty); }, @@ -1144,7 +1144,7 @@ fn analyzeBodyInner( // The hashmap lookup in here is a little expensive, and LLVM fails to optimize it away. if (build_options.enable_logging) { - std.log.scoped(.sema_zir).debug("sema ZIR {} %{d}", .{ path: { + std.log.scoped(.sema_zir).debug("sema ZIR {f} %{d}", .{ path: { const file_index = block.src_base_inst.resolveFile(&zcu.intern_pool); const file = zcu.fileByIndex(file_index); break :path file.path.fmt(zcu.comp); @@ -1905,7 +1905,7 @@ fn analyzeBodyInner( const err_union = try sema.resolveInst(extra.data.operand); const err_union_ty = sema.typeOf(err_union); if (err_union_ty.zigTypeTag(zcu) != .error_union) { - return sema.fail(block, operand_src, "expected error union type, found '{}'", .{ + return sema.fail(block, operand_src, "expected error union type, found '{f}'", .{ err_union_ty.fmt(pt), }); } @@ -2339,7 +2339,7 @@ pub fn failWithDivideByZero(sema: *Sema, block: *Block, src: LazySrcLoc) Compile fn failWithModRemNegative(sema: *Sema, block: *Block, src: LazySrcLoc, lhs_ty: Type, rhs_ty: Type) CompileError { const pt = sema.pt; - return sema.fail(block, src, "remainder division with '{}' and '{}': signed integers and floats must use @rem or @mod", .{ + return sema.fail(block, src, "remainder division with '{f}' and '{f}': signed integers and floats must use @rem or @mod", .{ lhs_ty.fmt(pt), rhs_ty.fmt(pt), }); } @@ -2347,7 +2347,7 @@ fn failWithModRemNegative(sema: *Sema, block: *Block, src: LazySrcLoc, lhs_ty: T fn failWithExpectedOptionalType(sema: *Sema, block: *Block, src: LazySrcLoc, non_optional_ty: Type) CompileError { const pt = sema.pt; const msg = msg: { - const msg = try sema.errMsg(src, "expected optional type, found '{}'", .{ + const msg = try sema.errMsg(src, "expected optional type, found '{f}'", .{ non_optional_ty.fmt(pt), }); errdefer msg.destroy(sema.gpa); @@ -2363,12 +2363,12 @@ fn failWithExpectedOptionalType(sema: *Sema, block: *Block, src: LazySrcLoc, non fn failWithArrayInitNotSupported(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type) CompileError { const pt = sema.pt; const msg = msg: { - const msg = try sema.errMsg(src, "type '{}' does not support array initialization syntax", .{ + const msg = try sema.errMsg(src, "type '{f}' does not support array initialization syntax", .{ ty.fmt(pt), }); errdefer msg.destroy(sema.gpa); if (ty.isSlice(pt.zcu)) { - try sema.errNote(src, msg, "inferred array length is specified with an underscore: '[_]{}'", .{ty.elemType2(pt.zcu).fmt(pt)}); + try sema.errNote(src, msg, "inferred array length is specified with an underscore: '[_]{f}'", .{ty.elemType2(pt.zcu).fmt(pt)}); } break :msg msg; }; @@ -2377,7 +2377,7 @@ fn failWithArrayInitNotSupported(sema: *Sema, block: *Block, src: LazySrcLoc, ty fn failWithStructInitNotSupported(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type) CompileError { const pt = sema.pt; - return sema.fail(block, src, "type '{}' does not support struct initialization syntax", .{ + return sema.fail(block, src, "type '{f}' does not support struct initialization syntax", .{ ty.fmt(pt), }); } @@ -2390,7 +2390,7 @@ fn failWithErrorSetCodeMissing( src_err_set_ty: Type, ) CompileError { const pt = sema.pt; - return sema.fail(block, src, "expected type '{}', found type '{}'", .{ + return sema.fail(block, src, "expected type '{f}', found type '{f}'", .{ dest_err_set_ty.fmt(pt), src_err_set_ty.fmt(pt), }); } @@ -2398,7 +2398,7 @@ fn failWithErrorSetCodeMissing( pub fn failWithIntegerOverflow(sema: *Sema, block: *Block, src: LazySrcLoc, int_ty: Type, val: Value, vector_index: ?usize) CompileError { const pt = sema.pt; return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(src, "overflow of integer type '{}' with value '{}'", .{ + const msg = try sema.errMsg(src, "overflow of integer type '{f}' with value '{f}'", .{ int_ty.fmt(pt), val.fmtValueSema(pt, sema), }); errdefer msg.destroy(sema.gpa); @@ -2448,7 +2448,7 @@ fn failWithInvalidFieldAccess( const child_ty = inner_ty.optionalChild(zcu); if (!typeSupportsFieldAccess(zcu, child_ty, field_name)) break :opt; const msg = msg: { - const msg = try sema.errMsg(src, "optional type '{}' does not support field access", .{object_ty.fmt(pt)}); + const msg = try sema.errMsg(src, "optional type '{f}' does not support field access", .{object_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.errNote(src, msg, "consider using '.?', 'orelse', or 'if'", .{}); break :msg msg; @@ -2458,14 +2458,14 @@ fn failWithInvalidFieldAccess( const child_ty = inner_ty.errorUnionPayload(zcu); if (!typeSupportsFieldAccess(zcu, child_ty, field_name)) break :err; const msg = msg: { - const msg = try sema.errMsg(src, "error union type '{}' does not support field access", .{object_ty.fmt(pt)}); + const msg = try sema.errMsg(src, "error union type '{f}' does not support field access", .{object_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.errNote(src, msg, "consider using 'try', 'catch', or 'if'", .{}); break :msg msg; }; return sema.failWithOwnedErrorMsg(block, msg); } - return sema.fail(block, src, "type '{}' does not support field access", .{object_ty.fmt(pt)}); + return sema.fail(block, src, "type '{f}' does not support field access", .{object_ty.fmt(pt)}); } fn typeSupportsFieldAccess(zcu: *const Zcu, ty: Type, field_name: InternPool.NullTerminatedString) bool { @@ -2494,7 +2494,7 @@ fn failWithComptimeErrorRetTrace( const pt = sema.pt; const zcu = pt.zcu; const msg = msg: { - const msg = try sema.errMsg(src, "caught unexpected error '{}'", .{name.fmt(&zcu.intern_pool)}); + const msg = try sema.errMsg(src, "caught unexpected error '{f}'", .{name.fmt(&zcu.intern_pool)}); errdefer msg.destroy(sema.gpa); for (sema.comptime_err_ret_trace.items) |src_loc| { @@ -2763,7 +2763,7 @@ fn zirTupleDecl( const coerced_field_init = try sema.coerce(block, field_type, uncoerced_field_init, init_src); const field_init_val = try sema.resolveConstDefinedValue(block, init_src, coerced_field_init, .{ .simple = .tuple_field_default_value }); if (field_init_val.canMutateComptimeVarState(zcu)) { - const field_name = try zcu.intern_pool.getOrPutStringFmt(gpa, pt.tid, "{}", .{field_index}, .no_embedded_nulls); + const field_name = try zcu.intern_pool.getOrPutStringFmt(gpa, pt.tid, "{d}", .{field_index}, .no_embedded_nulls); return sema.failWithContainsReferenceToComptimeVar(block, init_src, field_name, "field default value", field_init_val); } break :init field_init_val.toIntern(); @@ -3005,7 +3005,7 @@ pub fn createTypeName( inst: ?Zir.Inst.Index, /// This is used purely to give the type a unique name in the `anon` case. type_index: InternPool.Index, -) !struct { +) CompileError!struct { name: InternPool.NullTerminatedString, nav: InternPool.Nav.Index.Optional, } { @@ -3024,11 +3024,10 @@ pub fn createTypeName( const fn_info = sema.code.getFnInfo(ip.funcZirBodyInst(sema.func_index).resolve(ip) orelse return error.AnalysisFail); const zir_tags = sema.code.instructions.items(.tag); - var buf: std.ArrayListUnmanaged(u8) = .empty; - defer buf.deinit(gpa); - - const writer = buf.writer(gpa); - try writer.print("{}(", .{block.type_name_ctx.fmt(ip)}); + var aw: std.io.Writer.Allocating = .init(gpa); + defer aw.deinit(); + const w = &aw.writer; + w.print("{f}(", .{block.type_name_ctx.fmt(ip)}) catch return error.OutOfMemory; var arg_i: usize = 0; for (fn_info.param_body) |zir_inst| switch (zir_tags[@intFromEnum(zir_inst)]) { @@ -3041,18 +3040,18 @@ pub fn createTypeName( // result in a compile error. const arg_val = try sema.resolveValue(arg) orelse break :func_strat; // fall through to anon strat - if (arg_i != 0) try writer.writeByte(','); + if (arg_i != 0) w.writeByte(',') catch return error.OutOfMemory; // Limiting the depth here helps avoid type names getting too long, which // in turn helps to avoid unreasonably long symbol names for namespaced // symbols. Such names should ideally be human-readable, and additionally, // some tooling may not support very long symbol names. - try writer.print("{}", .{Value.fmtValueSemaFull(.{ + w.print("{f}", .{Value.fmtValueSemaFull(.{ .val = arg_val, .pt = pt, .opt_sema = sema, .depth = 1, - })}); + })}) catch return error.OutOfMemory; arg_i += 1; continue; @@ -3060,9 +3059,9 @@ pub fn createTypeName( else => continue, }; - try writer.writeByte(')'); + w.writeByte(')') catch return error.OutOfMemory; return .{ - .name = try ip.getOrPutString(gpa, pt.tid, buf.items, .no_embedded_nulls), + .name = try ip.getOrPutString(gpa, pt.tid, aw.getWritten(), .no_embedded_nulls), .nav = .none, }; }, @@ -3074,7 +3073,7 @@ pub fn createTypeName( for (@intFromEnum(inst.?)..zir_tags.len) |i| switch (zir_tags[i]) { .dbg_var_ptr, .dbg_var_val => if (zir_data[i].str_op.operand == ref) { return .{ - .name = try ip.getOrPutStringFmt(gpa, pt.tid, "{}.{s}", .{ + .name = try ip.getOrPutStringFmt(gpa, pt.tid, "{f}.{s}", .{ block.type_name_ctx.fmt(ip), zir_data[i].str_op.getStr(sema.code), }, .no_embedded_nulls), .nav = .none, @@ -3097,7 +3096,7 @@ pub fn createTypeName( // that builtin from the language, we can consider this. return .{ - .name = try ip.getOrPutStringFmt(gpa, pt.tid, "{}__{s}_{d}", .{ + .name = try ip.getOrPutStringFmt(gpa, pt.tid, "{f}__{s}_{d}", .{ block.type_name_ctx.fmt(ip), anon_prefix, @intFromEnum(type_index), }, .no_embedded_nulls), .nav = .none, @@ -3581,7 +3580,7 @@ fn ensureResultUsed( }, else => { const msg = msg: { - const msg = try sema.errMsg(src, "value of type '{}' ignored", .{ty.fmt(pt)}); + const msg = try sema.errMsg(src, "value of type '{f}' ignored", .{ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.errNote(src, msg, "all non-void values must be used", .{}); try sema.errNote(src, msg, "to discard the value, assign it to '_'", .{}); @@ -3851,7 +3850,7 @@ fn zirMakePtrConst(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErro // The value was initialized through RLS, so we didn't detect the runtime condition earlier. // TODO: source location of runtime control flow const init_src = block.src(.{ .node_offset_var_decl_init = inst_data.src_node }); - return sema.fail(block, init_src, "value with comptime-only type '{}' depends on runtime control flow", .{elem_ty.fmt(pt)}); + return sema.fail(block, init_src, "value with comptime-only type '{f}' depends on runtime control flow", .{elem_ty.fmt(pt)}); } // This is a runtime value. @@ -4348,7 +4347,7 @@ fn zirResolveInferredAlloc(sema: *Sema, block: *Block, inst: Zir.Inst.Index) Com // The alloc wasn't comptime-known per the above logic, so the // type cannot be comptime-only. // TODO: source location of runtime control flow - return sema.fail(block, src, "value with comptime-only type '{}' depends on runtime control flow", .{final_elem_ty.fmt(pt)}); + return sema.fail(block, src, "value with comptime-only type '{f}' depends on runtime control flow", .{final_elem_ty.fmt(pt)}); } if (sema.func_is_naked and try final_elem_ty.hasRuntimeBitsSema(pt)) { const mut_src = block.src(.{ .node_offset_store_ptr = inst_data.src_node }); @@ -4445,7 +4444,7 @@ fn zirForLen(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air. if (!object_ty.isIndexable(zcu)) { // Instead of using checkIndexable we customize this error. const msg = msg: { - const msg = try sema.errMsg(arg_src, "type '{}' is not indexable and not a range", .{object_ty.fmt(pt)}); + const msg = try sema.errMsg(arg_src, "type '{f}' is not indexable and not a range", .{object_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.errNote(arg_src, msg, "for loop operand must be a range, array, slice, tuple, or vector", .{}); @@ -4480,10 +4479,10 @@ fn zirForLen(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air. .for_node_offset = inst_data.src_node, .input_index = len_idx, } }); - try sema.errNote(a_src, msg, "length {} here", .{ + try sema.errNote(a_src, msg, "length {f} here", .{ v.fmtValueSema(pt, sema), }); - try sema.errNote(arg_src, msg, "length {} here", .{ + try sema.errNote(arg_src, msg, "length {f} here", .{ arg_val.fmtValueSema(pt, sema), }); break :msg msg; @@ -4515,7 +4514,7 @@ fn zirForLen(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air. .for_node_offset = inst_data.src_node, .input_index = i, } }); - try sema.errNote(arg_src, msg, "type '{}' has no upper bound", .{ + try sema.errNote(arg_src, msg, "type '{f}' has no upper bound", .{ object_ty.fmt(pt), }); } @@ -4591,7 +4590,7 @@ fn zirCoercePtrElemTy(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileE switch (val_ty.zigTypeTag(zcu)) { .array, .vector => {}, else => if (!val_ty.isTuple(zcu)) { - return sema.fail(block, src, "expected array of '{}', found '{}'", .{ elem_ty.fmt(pt), val_ty.fmt(pt) }); + return sema.fail(block, src, "expected array of '{f}', found '{f}'", .{ elem_ty.fmt(pt), val_ty.fmt(pt) }); }, } const want_ty = try pt.arrayType(.{ @@ -4665,7 +4664,7 @@ fn zirValidateRefTy(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErr const ty_operand = try sema.resolveTypeOrPoison(block, src, un_tok.operand) orelse return; if (ty_operand.optEuBaseType(zcu).zigTypeTag(zcu) != .pointer) { return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(src, "expected type '{}', found pointer", .{ty_operand.fmt(pt)}); + const msg = try sema.errMsg(src, "expected type '{f}', found pointer", .{ty_operand.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.errNote(src, msg, "address-of operator always returns a pointer", .{}); break :msg msg; @@ -5074,7 +5073,7 @@ fn validateStructInit( } continue; }; - const template = "missing struct field: {}"; + const template = "missing struct field: {f}"; const args = .{field_name.fmt(ip)}; if (root_msg) |msg| { try sema.errNote(init_src, msg, template, args); @@ -5204,7 +5203,7 @@ fn validateStructInit( } continue; }; - const template = "missing struct field: {}"; + const template = "missing struct field: {f}"; const args = .{field_name.fmt(ip)}; if (root_msg) |msg| { try sema.errNote(init_src, msg, template, args); @@ -5508,11 +5507,11 @@ fn zirValidateDeref(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErr const operand_ty = sema.typeOf(operand); if (operand_ty.zigTypeTag(zcu) != .pointer) { - return sema.fail(block, src, "cannot dereference non-pointer type '{}'", .{operand_ty.fmt(pt)}); + return sema.fail(block, src, "cannot dereference non-pointer type '{f}'", .{operand_ty.fmt(pt)}); } else switch (operand_ty.ptrSize(zcu)) { .one, .c => {}, - .many => return sema.fail(block, src, "index syntax required for unknown-length pointer type '{}'", .{operand_ty.fmt(pt)}), - .slice => return sema.fail(block, src, "index syntax required for slice type '{}'", .{operand_ty.fmt(pt)}), + .many => return sema.fail(block, src, "index syntax required for unknown-length pointer type '{f}'", .{operand_ty.fmt(pt)}), + .slice => return sema.fail(block, src, "index syntax required for slice type '{f}'", .{operand_ty.fmt(pt)}), } if ((try sema.typeHasOnePossibleValue(operand_ty.childType(zcu))) != null) { @@ -5529,7 +5528,7 @@ fn zirValidateDeref(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErr const msg = msg: { const msg = try sema.errMsg( src, - "values of type '{}' must be comptime-known, but operand value is runtime-known", + "values of type '{f}' must be comptime-known, but operand value is runtime-known", .{elem_ty.fmt(pt)}, ); errdefer msg.destroy(sema.gpa); @@ -5561,7 +5560,7 @@ fn zirValidateDestructure(sema: *Sema, block: *Block, inst: Zir.Inst.Index) Comp if (!typeIsDestructurable(operand_ty, zcu)) { return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(src, "type '{}' cannot be destructured", .{operand_ty.fmt(pt)}); + const msg = try sema.errMsg(src, "type '{f}' cannot be destructured", .{operand_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.errNote(destructure_src, msg, "result destructured here", .{}); if (operand_ty.zigTypeTag(pt.zcu) == .error_union) { @@ -5575,9 +5574,8 @@ fn zirValidateDestructure(sema: *Sema, block: *Block, inst: Zir.Inst.Index) Comp if (operand_ty.arrayLen(zcu) != extra.expect_len) { return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(src, "expected {} elements for destructure, found {}", .{ - extra.expect_len, - operand_ty.arrayLen(zcu), + const msg = try sema.errMsg(src, "expected {d} elements for destructure, found {d}", .{ + extra.expect_len, operand_ty.arrayLen(zcu), }); errdefer msg.destroy(sema.gpa); try sema.errNote(destructure_src, msg, "result destructured here", .{}); @@ -5604,12 +5602,12 @@ fn failWithBadMemberAccess( else => unreachable, }; if (agg_ty.typeDeclInst(zcu)) |inst| if ((inst.resolve(ip) orelse return error.AnalysisFail) == .main_struct_inst) { - return sema.fail(block, field_src, "root source file struct '{}' has no member named '{}'", .{ + return sema.fail(block, field_src, "root source file struct '{f}' has no member named '{f}'", .{ agg_ty.fmt(pt), field_name.fmt(ip), }); }; - return sema.fail(block, field_src, "{s} '{}' has no member named '{}'", .{ + return sema.fail(block, field_src, "{s} '{f}' has no member named '{f}'", .{ kw_name, agg_ty.fmt(pt), field_name.fmt(ip), }); } @@ -5629,7 +5627,7 @@ fn failWithBadStructFieldAccess( const msg = msg: { const msg = try sema.errMsg( field_src, - "no field named '{}' in struct '{}'", + "no field named '{f}' in struct '{f}'", .{ field_name.fmt(ip), struct_type.name.fmt(ip) }, ); errdefer msg.destroy(sema.gpa); @@ -5655,7 +5653,7 @@ fn failWithBadUnionFieldAccess( const msg = msg: { const msg = try sema.errMsg( field_src, - "no field named '{}' in union '{}'", + "no field named '{f}' in union '{f}'", .{ field_name.fmt(ip), union_obj.name.fmt(ip) }, ); errdefer msg.destroy(gpa); @@ -5907,30 +5905,29 @@ fn zirCompileLog( const zcu = pt.zcu; const gpa = zcu.gpa; - var buf: std.ArrayListUnmanaged(u8) = .empty; - defer buf.deinit(gpa); - - const writer = buf.writer(gpa); + var aw: std.io.Writer.Allocating = .init(gpa); + defer aw.deinit(); + const writer = &aw.writer; const extra = sema.code.extraData(Zir.Inst.NodeMultiOp, extended.operand); const src_node = extra.data.src_node; const args = sema.code.refSlice(extra.end, extended.small); for (args, 0..) |arg_ref, i| { - if (i != 0) try writer.print(", ", .{}); + if (i != 0) writer.writeAll(", ") catch return error.OutOfMemory; const arg = try sema.resolveInst(arg_ref); const arg_ty = sema.typeOf(arg); if (try sema.resolveValueResolveLazy(arg)) |val| { - try writer.print("@as({}, {})", .{ + writer.print("@as({f}, {f})", .{ arg_ty.fmt(pt), val.fmtValueSema(pt, sema), - }); + }) catch return error.OutOfMemory; } else { - try writer.print("@as({}, [runtime value])", .{arg_ty.fmt(pt)}); + writer.print("@as({f}, [runtime value])", .{arg_ty.fmt(pt)}) catch return error.OutOfMemory; } } - const line_data = try zcu.intern_pool.getOrPutString(gpa, pt.tid, buf.items, .no_embedded_nulls); + const line_data = try zcu.intern_pool.getOrPutString(gpa, pt.tid, aw.getWritten(), .no_embedded_nulls); const line_idx: Zcu.CompileLogLine.Index = if (zcu.free_compile_log_lines.pop()) |idx| idx: { zcu.compile_log_lines.items[@intFromEnum(idx)] = .{ @@ -6472,7 +6469,7 @@ fn resolveAnalyzedBlock( const type_src = src; // TODO: better source location if (try resolved_ty.comptimeOnlySema(pt)) { const msg = msg: { - const msg = try sema.errMsg(type_src, "value with comptime-only type '{}' depends on runtime control flow", .{resolved_ty.fmt(pt)}); + const msg = try sema.errMsg(type_src, "value with comptime-only type '{f}' depends on runtime control flow", .{resolved_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); const runtime_src = child_block.runtime_cond orelse child_block.runtime_loop.?; @@ -6588,7 +6585,7 @@ fn zirExport(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!void { if (ptr_ty.zigTypeTag(zcu) != .pointer) { - return sema.fail(block, ptr_src, "expected pointer type, found '{}'", .{ptr_ty.fmt(pt)}); + return sema.fail(block, ptr_src, "expected pointer type, found '{f}'", .{ptr_ty.fmt(pt)}); } const ptr_ty_info = ptr_ty.ptrInfo(zcu); if (ptr_ty_info.flags.size == .slice) { @@ -6611,7 +6608,7 @@ fn zirExport(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!void const export_ty = Value.fromInterned(uav.val).typeOf(zcu); if (!try sema.validateExternType(export_ty, .other)) { return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(src, "unable to export type '{}'", .{export_ty.fmt(pt)}); + const msg = try sema.errMsg(src, "unable to export type '{f}'", .{export_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.explainWhyTypeIsNotExtern(msg, src, export_ty, .other); try sema.addDeclaredHereNote(msg, export_ty); @@ -6663,7 +6660,7 @@ pub fn analyzeExport( if (!try sema.validateExternType(export_ty, .other)) { return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(src, "unable to export type '{}'", .{export_ty.fmt(pt)}); + const msg = try sema.errMsg(src, "unable to export type '{f}'", .{export_ty.fmt(pt)}); errdefer msg.destroy(gpa); try sema.explainWhyTypeIsNotExtern(msg, src, export_ty, .other); @@ -7287,7 +7284,7 @@ fn checkCallArgumentCount( opt_child.childType(zcu).zigTypeTag(zcu) == .@"fn")) { const msg = msg: { - const msg = try sema.errMsg(func_src, "cannot call optional type '{}'", .{ + const msg = try sema.errMsg(func_src, "cannot call optional type '{f}'", .{ callee_ty.fmt(pt), }); errdefer msg.destroy(sema.gpa); @@ -7299,7 +7296,7 @@ fn checkCallArgumentCount( }, else => {}, } - return sema.fail(block, func_src, "type '{}' not a function", .{callee_ty.fmt(pt)}); + return sema.fail(block, func_src, "type '{f}' not a function", .{callee_ty.fmt(pt)}); }; const func_ty_info = zcu.typeToFunc(func_ty).?; @@ -7362,7 +7359,7 @@ fn callBuiltin( }, else => {}, } - std.debug.panic("type '{}' is not a function calling builtin fn", .{callee_ty.fmt(pt)}); + std.debug.panic("type '{f}' is not a function calling builtin fn", .{callee_ty.fmt(pt)}); }; const func_ty_info = zcu.typeToFunc(func_ty).?; @@ -7746,7 +7743,7 @@ fn analyzeCall( if (!param_ty.isValidParamType(zcu)) { const opaque_str = if (param_ty.zigTypeTag(zcu) == .@"opaque") "opaque " else ""; - return sema.fail(block, param_src, "parameter of {s}type '{}' not allowed", .{ + return sema.fail(block, param_src, "parameter of {s}type '{f}' not allowed", .{ opaque_str, param_ty.fmt(pt), }); } @@ -7843,7 +7840,7 @@ fn analyzeCall( if (!full_ty.isValidReturnType(zcu)) { const opaque_str = if (full_ty.zigTypeTag(zcu) == .@"opaque") "opaque " else ""; - return sema.fail(block, func_ret_ty_src, "{s}return type '{}' not allowed", .{ + return sema.fail(block, func_ret_ty_src, "{s}return type '{f}' not allowed", .{ opaque_str, full_ty.fmt(pt), }); } @@ -8301,7 +8298,7 @@ fn handleTailCall(sema: *Sema, block: *Block, call_src: LazySrcLoc, func_ty: Typ } const owner_func_ty: Type = .fromInterned(zcu.funcInfo(sema.owner.unwrap().func).ty); if (owner_func_ty.toIntern() != func_ty.toIntern()) { - return sema.fail(block, call_src, "unable to perform tail call: type of function being called '{}' does not match type of calling function '{}'", .{ + return sema.fail(block, call_src, "unable to perform tail call: type of function being called '{f}' does not match type of calling function '{f}'", .{ func_ty.fmt(pt), owner_func_ty.fmt(pt), }); } @@ -8325,9 +8322,9 @@ fn zirOptionalType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErro const operand_src = block.src(.{ .node_offset_un_op = inst_data.src_node }); const child_type = try sema.resolveType(block, operand_src, inst_data.operand); if (child_type.zigTypeTag(zcu) == .@"opaque") { - return sema.fail(block, operand_src, "opaque type '{}' cannot be optional", .{child_type.fmt(pt)}); + return sema.fail(block, operand_src, "opaque type '{f}' cannot be optional", .{child_type.fmt(pt)}); } else if (child_type.zigTypeTag(zcu) == .null) { - return sema.fail(block, operand_src, "type '{}' cannot be optional", .{child_type.fmt(pt)}); + return sema.fail(block, operand_src, "type '{f}' cannot be optional", .{child_type.fmt(pt)}); } const opt_type = try pt.optionalType(child_type.toIntern()); @@ -8388,7 +8385,7 @@ fn zirVecArrElemType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileEr const vec_ty = try sema.resolveTypeOrPoison(block, LazySrcLoc.unneeded, un_node.operand) orelse return .generic_poison_type; switch (vec_ty.zigTypeTag(zcu)) { .array, .vector => {}, - else => return sema.fail(block, block.nodeOffset(un_node.src_node), "expected array or vector type, found '{}'", .{vec_ty.fmt(pt)}), + else => return sema.fail(block, block.nodeOffset(un_node.src_node), "expected array or vector type, found '{f}'", .{vec_ty.fmt(pt)}), } return Air.internedToRef(vec_ty.childType(zcu).toIntern()); } @@ -8456,7 +8453,7 @@ fn validateArrayElemType(sema: *Sema, block: *Block, elem_type: Type, elem_src: const pt = sema.pt; const zcu = pt.zcu; if (elem_type.zigTypeTag(zcu) == .@"opaque") { - return sema.fail(block, elem_src, "array of opaque type '{}' not allowed", .{elem_type.fmt(pt)}); + return sema.fail(block, elem_src, "array of opaque type '{f}' not allowed", .{elem_type.fmt(pt)}); } else if (elem_type.zigTypeTag(zcu) == .noreturn) { return sema.fail(block, elem_src, "array of 'noreturn' not allowed", .{}); } @@ -8492,7 +8489,7 @@ fn zirErrorUnionType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileEr const payload = try sema.resolveType(block, rhs_src, extra.rhs); if (error_set.zigTypeTag(zcu) != .error_set) { - return sema.fail(block, lhs_src, "expected error set type, found '{}'", .{ + return sema.fail(block, lhs_src, "expected error set type, found '{f}'", .{ error_set.fmt(pt), }); } @@ -8505,11 +8502,11 @@ fn validateErrorUnionPayloadType(sema: *Sema, block: *Block, payload_ty: Type, p const pt = sema.pt; const zcu = pt.zcu; if (payload_ty.zigTypeTag(zcu) == .@"opaque") { - return sema.fail(block, payload_src, "error union with payload of opaque type '{}' not allowed", .{ + return sema.fail(block, payload_src, "error union with payload of opaque type '{f}' not allowed", .{ payload_ty.fmt(pt), }); } else if (payload_ty.zigTypeTag(zcu) == .error_set) { - return sema.fail(block, payload_src, "error union with payload of error set type '{}' not allowed", .{ + return sema.fail(block, payload_src, "error union with payload of error set type '{f}' not allowed", .{ payload_ty.fmt(pt), }); } @@ -8647,9 +8644,9 @@ fn zirMergeErrorSets(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileEr const lhs_ty = try sema.analyzeAsType(block, lhs_src, lhs); const rhs_ty = try sema.analyzeAsType(block, rhs_src, rhs); if (lhs_ty.zigTypeTag(zcu) != .error_set) - return sema.fail(block, lhs_src, "expected error set type, found '{}'", .{lhs_ty.fmt(pt)}); + return sema.fail(block, lhs_src, "expected error set type, found '{f}'", .{lhs_ty.fmt(pt)}); if (rhs_ty.zigTypeTag(zcu) != .error_set) - return sema.fail(block, rhs_src, "expected error set type, found '{}'", .{rhs_ty.fmt(pt)}); + return sema.fail(block, rhs_src, "expected error set type, found '{f}'", .{rhs_ty.fmt(pt)}); // Anything merged with anyerror is anyerror. if (lhs_ty.toIntern() == .anyerror_type or rhs_ty.toIntern() == .anyerror_type) { @@ -8759,7 +8756,7 @@ fn zirIntFromEnum(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError return sema.fail( block, operand_src, - "untagged union '{}' cannot be converted to integer", + "untagged union '{f}' cannot be converted to integer", .{operand_ty.fmt(pt)}, ); }; @@ -8767,7 +8764,7 @@ fn zirIntFromEnum(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError break :blk try sema.unionToTag(block, tag_ty, operand, operand_src); }, else => { - return sema.fail(block, operand_src, "expected enum or tagged union, found '{}'", .{ + return sema.fail(block, operand_src, "expected enum or tagged union, found '{f}'", .{ operand_ty.fmt(pt), }); }, @@ -8778,7 +8775,7 @@ fn zirIntFromEnum(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError // TODO: use correct solution // https://github.com/ziglang/zig/issues/15909 if (enum_tag_ty.enumFieldCount(zcu) == 0 and !enum_tag_ty.isNonexhaustiveEnum(zcu)) { - return sema.fail(block, operand_src, "cannot use @intFromEnum on empty enum '{}'", .{ + return sema.fail(block, operand_src, "cannot use @intFromEnum on empty enum '{f}'", .{ enum_tag_ty.fmt(pt), }); } @@ -8812,7 +8809,7 @@ fn zirEnumFromInt(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError const operand_ty = sema.typeOf(operand); if (dest_ty.zigTypeTag(zcu) != .@"enum") { - return sema.fail(block, src, "expected enum, found '{}'", .{dest_ty.fmt(pt)}); + return sema.fail(block, src, "expected enum, found '{f}'", .{dest_ty.fmt(pt)}); } _ = try sema.checkIntType(block, operand_src, operand_ty); @@ -8822,7 +8819,7 @@ fn zirEnumFromInt(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError if (try sema.intFitsInType(int_val, int_tag_ty, null)) { return Air.internedToRef((try pt.getCoerced(int_val, dest_ty)).toIntern()); } - return sema.fail(block, src, "int value '{}' out of range of non-exhaustive enum '{}'", .{ + return sema.fail(block, src, "int value '{f}' out of range of non-exhaustive enum '{f}'", .{ int_val.fmtValueSema(pt, sema), dest_ty.fmt(pt), }); } @@ -8830,7 +8827,7 @@ fn zirEnumFromInt(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError return sema.failWithUseOfUndef(block, operand_src); } if (!(try sema.enumHasInt(dest_ty, int_val))) { - return sema.fail(block, src, "enum '{}' has no tag with value '{}'", .{ + return sema.fail(block, src, "enum '{f}' has no tag with value '{f}'", .{ dest_ty.fmt(pt), int_val.fmtValueSema(pt, sema), }); } @@ -9024,7 +9021,7 @@ fn zirErrUnionPayload( const operand_src = src; const err_union_ty = sema.typeOf(operand); if (err_union_ty.zigTypeTag(zcu) != .error_union) { - return sema.fail(block, operand_src, "expected error union type, found '{}'", .{ + return sema.fail(block, operand_src, "expected error union type, found '{f}'", .{ err_union_ty.fmt(pt), }); } @@ -9092,7 +9089,7 @@ fn analyzeErrUnionPayloadPtr( assert(operand_ty.zigTypeTag(zcu) == .pointer); if (operand_ty.childType(zcu).zigTypeTag(zcu) != .error_union) { - return sema.fail(block, src, "expected error union type, found '{}'", .{ + return sema.fail(block, src, "expected error union type, found '{f}'", .{ operand_ty.childType(zcu).fmt(pt), }); } @@ -9169,7 +9166,7 @@ fn analyzeErrUnionCode(sema: *Sema, block: *Block, src: LazySrcLoc, operand: Air const zcu = pt.zcu; const operand_ty = sema.typeOf(operand); if (operand_ty.zigTypeTag(zcu) != .error_union) { - return sema.fail(block, src, "expected error union type, found '{}'", .{ + return sema.fail(block, src, "expected error union type, found '{f}'", .{ operand_ty.fmt(pt), }); } @@ -9205,7 +9202,7 @@ fn analyzeErrUnionCodePtr(sema: *Sema, block: *Block, src: LazySrcLoc, operand: assert(operand_ty.zigTypeTag(zcu) == .pointer); if (operand_ty.childType(zcu).zigTypeTag(zcu) != .error_union) { - return sema.fail(block, src, "expected error union type, found '{}'", .{ + return sema.fail(block, src, "expected error union type, found '{f}'", .{ operand_ty.childType(zcu).fmt(pt), }); } @@ -9450,19 +9447,17 @@ fn callConvSupportsVarArgs(cc: std.builtin.CallingConvention.Tag) bool { fn checkCallConvSupportsVarArgs(sema: *Sema, block: *Block, src: LazySrcLoc, cc: std.builtin.CallingConvention.Tag) CompileError!void { const CallingConventionsSupportingVarArgsList = struct { arch: std.Target.Cpu.Arch, - pub fn format(ctx: @This(), comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { - _ = fmt; - _ = options; + pub fn format(ctx: @This(), w: *std.io.Writer) std.io.Writer.Error!void { var first = true; for (calling_conventions_supporting_var_args) |cc_inner| { for (std.Target.Cpu.Arch.fromCallingConvention(cc_inner)) |supported_arch| { if (supported_arch == ctx.arch) break; } else continue; // callconv not supported by this arch if (!first) { - try writer.writeAll(", "); + try w.writeAll(", "); } first = false; - try writer.print("'{s}'", .{@tagName(cc_inner)}); + try w.print("'{s}'", .{@tagName(cc_inner)}); } } }; @@ -9472,7 +9467,7 @@ fn checkCallConvSupportsVarArgs(sema: *Sema, block: *Block, src: LazySrcLoc, cc: const msg = try sema.errMsg(src, "variadic function does not support '{s}' calling convention", .{@tagName(cc)}); errdefer msg.destroy(sema.gpa); const target = sema.pt.zcu.getTarget(); - try sema.errNote(src, msg, "supported calling conventions: {}", .{CallingConventionsSupportingVarArgsList{ .arch = target.cpu.arch }}); + try sema.errNote(src, msg, "supported calling conventions: {f}", .{CallingConventionsSupportingVarArgsList{ .arch = target.cpu.arch }}); break :msg msg; }); } @@ -9520,7 +9515,7 @@ fn checkMergeAllowed(sema: *Sema, block: *Block, src: LazySrcLoc, peer_ty: Type) } return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(src, "value with non-mergable pointer type '{}' depends on runtime control flow", .{peer_ty.fmt(pt)}); + const msg = try sema.errMsg(src, "value with non-mergable pointer type '{f}' depends on runtime control flow", .{peer_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); const runtime_src = block.runtime_cond orelse block.runtime_loop.?; @@ -9598,13 +9593,13 @@ fn funcCommon( } if (!param_ty.isValidParamType(zcu)) { const opaque_str = if (param_ty.zigTypeTag(zcu) == .@"opaque") "opaque " else ""; - return sema.fail(block, param_src, "parameter of {s}type '{}' not allowed", .{ + return sema.fail(block, param_src, "parameter of {s}type '{f}' not allowed", .{ opaque_str, param_ty.fmt(pt), }); } if (!param_ty_generic and !target_util.fnCallConvAllowsZigTypes(cc) and !try sema.validateExternType(param_ty, .param_ty)) { const msg = msg: { - const msg = try sema.errMsg(param_src, "parameter of type '{}' not allowed in function with calling convention '{s}'", .{ + const msg = try sema.errMsg(param_src, "parameter of type '{f}' not allowed in function with calling convention '{s}'", .{ param_ty.fmt(pt), @tagName(cc), }); errdefer msg.destroy(sema.gpa); @@ -9618,7 +9613,7 @@ fn funcCommon( } if (param_ty_comptime and !param_is_comptime and has_body and !block.isComptime()) { const msg = msg: { - const msg = try sema.errMsg(param_src, "parameter of type '{}' must be declared comptime", .{ + const msg = try sema.errMsg(param_src, "parameter of type '{f}' must be declared comptime", .{ param_ty.fmt(pt), }); errdefer msg.destroy(sema.gpa); @@ -9798,7 +9793,7 @@ fn finishFunc( if (!return_type.isValidReturnType(zcu)) { const opaque_str = if (return_type.zigTypeTag(zcu) == .@"opaque") "opaque " else ""; - return sema.fail(block, ret_ty_src, "{s}return type '{}' not allowed", .{ + return sema.fail(block, ret_ty_src, "{s}return type '{f}' not allowed", .{ opaque_str, return_type.fmt(pt), }); } @@ -9806,7 +9801,7 @@ fn finishFunc( !try sema.validateExternType(return_type, .ret_ty)) { const msg = msg: { - const msg = try sema.errMsg(ret_ty_src, "return type '{}' not allowed in function with calling convention '{s}'", .{ + const msg = try sema.errMsg(ret_ty_src, "return type '{f}' not allowed in function with calling convention '{s}'", .{ return_type.fmt(pt), @tagName(cc_resolved), }); errdefer msg.destroy(gpa); @@ -9828,7 +9823,7 @@ fn finishFunc( const msg = try sema.errMsg( ret_ty_src, - "function with comptime-only return type '{}' requires all parameters to be comptime", + "function with comptime-only return type '{f}' requires all parameters to be comptime", .{return_type.fmt(pt)}, ); errdefer msg.destroy(sema.gpa); @@ -9897,17 +9892,15 @@ fn finishFunc( .bad_arch => |allowed_archs| { const ArchListFormatter = struct { archs: []const std.Target.Cpu.Arch, - pub fn format(formatter: @This(), comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { - _ = fmt; - _ = options; + pub fn format(formatter: @This(), w: *std.io.Writer) std.io.Writer.Error!void { for (formatter.archs, 0..) |arch, i| { if (i != 0) - try writer.writeAll(", "); - try writer.print("'{s}'", .{@tagName(arch)}); + try w.writeAll(", "); + try w.print("'{s}'", .{@tagName(arch)}); } } }; - return sema.fail(block, cc_src, "calling convention '{s}' only available on architectures {}", .{ + return sema.fail(block, cc_src, "calling convention '{s}' only available on architectures {f}", .{ @tagName(cc_resolved), ArchListFormatter{ .archs = allowed_archs }, }); @@ -10008,7 +10001,7 @@ fn analyzeAs( const operand = try sema.resolveInst(zir_operand); const dest_ty = try sema.resolveTypeOrPoison(block, src, zir_dest_type) orelse return operand; switch (dest_ty.zigTypeTag(zcu)) { - .@"opaque" => return sema.fail(block, src, "cannot cast to opaque type '{}'", .{dest_ty.fmt(pt)}), + .@"opaque" => return sema.fail(block, src, "cannot cast to opaque type '{f}'", .{dest_ty.fmt(pt)}), .noreturn => return sema.fail(block, src, "cannot cast to noreturn", .{}), else => {}, } @@ -10036,12 +10029,12 @@ fn zirIntFromPtr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError! const ptr_ty = operand_ty.scalarType(zcu); const is_vector = operand_ty.zigTypeTag(zcu) == .vector; if (!ptr_ty.isPtrAtRuntime(zcu)) { - return sema.fail(block, ptr_src, "expected pointer, found '{}'", .{ptr_ty.fmt(pt)}); + return sema.fail(block, ptr_src, "expected pointer, found '{f}'", .{ptr_ty.fmt(pt)}); } const pointee_ty = ptr_ty.childType(zcu); if (try ptr_ty.comptimeOnlySema(pt)) { const msg = msg: { - const msg = try sema.errMsg(ptr_src, "comptime-only type '{}' has no pointer address", .{pointee_ty.fmt(pt)}); + const msg = try sema.errMsg(ptr_src, "comptime-only type '{f}' has no pointer address", .{pointee_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.explainWhyTypeIsComptime(msg, ptr_src, pointee_ty); break :msg msg; @@ -10289,14 +10282,14 @@ fn zirBitcast(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air .type, .undefined, .void, - => return sema.fail(block, src, "cannot @bitCast to '{}'", .{dest_ty.fmt(pt)}), + => return sema.fail(block, src, "cannot @bitCast to '{f}'", .{dest_ty.fmt(pt)}), .@"enum" => { const msg = msg: { - const msg = try sema.errMsg(src, "cannot @bitCast to '{}'", .{dest_ty.fmt(pt)}); + const msg = try sema.errMsg(src, "cannot @bitCast to '{f}'", .{dest_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); switch (operand_ty.zigTypeTag(zcu)) { - .int, .comptime_int => try sema.errNote(src, msg, "use @enumFromInt to cast from '{}'", .{operand_ty.fmt(pt)}), + .int, .comptime_int => try sema.errNote(src, msg, "use @enumFromInt to cast from '{f}'", .{operand_ty.fmt(pt)}), else => {}, } @@ -10307,11 +10300,11 @@ fn zirBitcast(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air .pointer => { const msg = msg: { - const msg = try sema.errMsg(src, "cannot @bitCast to '{}'", .{dest_ty.fmt(pt)}); + const msg = try sema.errMsg(src, "cannot @bitCast to '{f}'", .{dest_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); switch (operand_ty.zigTypeTag(zcu)) { - .int, .comptime_int => try sema.errNote(src, msg, "use @ptrFromInt to cast from '{}'", .{operand_ty.fmt(pt)}), - .pointer => try sema.errNote(src, msg, "use @ptrCast to cast from '{}'", .{operand_ty.fmt(pt)}), + .int, .comptime_int => try sema.errNote(src, msg, "use @ptrFromInt to cast from '{f}'", .{operand_ty.fmt(pt)}), + .pointer => try sema.errNote(src, msg, "use @ptrCast to cast from '{f}'", .{operand_ty.fmt(pt)}), else => {}, } @@ -10325,7 +10318,7 @@ fn zirBitcast(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air .@"union" => "union", else => unreachable, }; - return sema.fail(block, src, "cannot @bitCast to '{}'; {s} does not have a guaranteed in-memory layout", .{ + return sema.fail(block, src, "cannot @bitCast to '{f}'; {s} does not have a guaranteed in-memory layout", .{ dest_ty.fmt(pt), container, }); }, @@ -10353,14 +10346,14 @@ fn zirBitcast(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air .type, .undefined, .void, - => return sema.fail(block, operand_src, "cannot @bitCast from '{}'", .{operand_ty.fmt(pt)}), + => return sema.fail(block, operand_src, "cannot @bitCast from '{f}'", .{operand_ty.fmt(pt)}), .@"enum" => { const msg = msg: { - const msg = try sema.errMsg(operand_src, "cannot @bitCast from '{}'", .{operand_ty.fmt(pt)}); + const msg = try sema.errMsg(operand_src, "cannot @bitCast from '{f}'", .{operand_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); switch (dest_ty.zigTypeTag(zcu)) { - .int, .comptime_int => try sema.errNote(operand_src, msg, "use @intFromEnum to cast to '{}'", .{dest_ty.fmt(pt)}), + .int, .comptime_int => try sema.errNote(operand_src, msg, "use @intFromEnum to cast to '{f}'", .{dest_ty.fmt(pt)}), else => {}, } @@ -10370,11 +10363,11 @@ fn zirBitcast(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air }, .pointer => { const msg = msg: { - const msg = try sema.errMsg(operand_src, "cannot @bitCast from '{}'", .{operand_ty.fmt(pt)}); + const msg = try sema.errMsg(operand_src, "cannot @bitCast from '{f}'", .{operand_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); switch (dest_ty.zigTypeTag(zcu)) { - .int, .comptime_int => try sema.errNote(operand_src, msg, "use @intFromPtr to cast to '{}'", .{dest_ty.fmt(pt)}), - .pointer => try sema.errNote(operand_src, msg, "use @ptrCast to cast to '{}'", .{dest_ty.fmt(pt)}), + .int, .comptime_int => try sema.errNote(operand_src, msg, "use @intFromPtr to cast to '{f}'", .{dest_ty.fmt(pt)}), + .pointer => try sema.errNote(operand_src, msg, "use @ptrCast to cast to '{f}'", .{dest_ty.fmt(pt)}), else => {}, } @@ -10388,7 +10381,7 @@ fn zirBitcast(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air .@"union" => "union", else => unreachable, }; - return sema.fail(block, operand_src, "cannot @bitCast from '{}'; {s} does not have a guaranteed in-memory layout", .{ + return sema.fail(block, operand_src, "cannot @bitCast from '{f}'; {s} does not have a guaranteed in-memory layout", .{ operand_ty.fmt(pt), container, }); }, @@ -10431,7 +10424,7 @@ fn zirFloatCast(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!A else => return sema.fail( block, src, - "expected float or vector type, found '{}'", + "expected float or vector type, found '{f}'", .{dest_ty.fmt(pt)}, ), }; @@ -10441,7 +10434,7 @@ fn zirFloatCast(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!A else => return sema.fail( block, operand_src, - "expected float or vector type, found '{}'", + "expected float or vector type, found '{f}'", .{operand_ty.fmt(pt)}, ), } @@ -10525,7 +10518,7 @@ fn zirElemPtr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air if (indexable_ty.zigTypeTag(zcu) != .pointer) { const capture_src = block.src(.{ .for_capture_from_input = inst_data.src_node }); const msg = msg: { - const msg = try sema.errMsg(capture_src, "pointer capture of non pointer type '{}'", .{ + const msg = try sema.errMsg(capture_src, "pointer capture of non pointer type '{f}'", .{ indexable_ty.fmt(pt), }); errdefer msg.destroy(sema.gpa); @@ -10667,7 +10660,7 @@ fn zirSliceSentinelTy(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileE const lhs_ptr_ty = sema.typeOf(try sema.resolveInst(inst_data.operand)); const lhs_ty = switch (lhs_ptr_ty.zigTypeTag(zcu)) { .pointer => lhs_ptr_ty.childType(zcu), - else => return sema.fail(block, ptr_src, "expected pointer, found '{}'", .{lhs_ptr_ty.fmt(pt)}), + else => return sema.fail(block, ptr_src, "expected pointer, found '{f}'", .{lhs_ptr_ty.fmt(pt)}), }; const sentinel_ty: Type = switch (lhs_ty.zigTypeTag(zcu)) { @@ -10682,7 +10675,7 @@ fn zirSliceSentinelTy(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileE }; }, }, - else => return sema.fail(block, src, "slice of non-array type '{}'", .{lhs_ty.fmt(pt)}), + else => return sema.fail(block, src, "slice of non-array type '{f}'", .{lhs_ty.fmt(pt)}), }; return Air.internedToRef(sentinel_ty.toIntern()); @@ -10877,7 +10870,7 @@ const SwitchProngAnalysis = struct { .base_node_inst = capture_src.base_node_inst, .offset = .{ .switch_tag_capture = capture_src.offset.switch_capture }, }; - return sema.fail(block, tag_capture_src, "cannot capture tag of non-union type '{}'", .{ + return sema.fail(block, tag_capture_src, "cannot capture tag of non-union type '{f}'", .{ operand_ty.fmt(pt), }); } @@ -11309,7 +11302,7 @@ fn switchCond( .@"enum", => { if (operand_ty.isSlice(zcu)) { - return sema.fail(block, src, "switch on type '{}'", .{operand_ty.fmt(pt)}); + return sema.fail(block, src, "switch on type '{f}'", .{operand_ty.fmt(pt)}); } if ((try sema.typeHasOnePossibleValue(operand_ty))) |opv| { return Air.internedToRef(opv.toIntern()); @@ -11344,7 +11337,7 @@ fn switchCond( .vector, .frame, .@"anyframe", - => return sema.fail(block, src, "switch on type '{}'", .{operand_ty.fmt(pt)}), + => return sema.fail(block, src, "switch on type '{f}'", .{operand_ty.fmt(pt)}), } } @@ -11445,7 +11438,7 @@ fn zirSwitchBlockErrUnion(sema: *Sema, block: *Block, inst: Zir.Inst.Index) Comp operand_ty; if (operand_err_set.zigTypeTag(zcu) != .error_union) { - return sema.fail(block, switch_src, "expected error union type, found '{}'", .{ + return sema.fail(block, switch_src, "expected error union type, found '{f}'", .{ operand_ty.fmt(pt), }); } @@ -11699,7 +11692,7 @@ fn zirSwitchBlock(sema: *Sema, block: *Block, inst: Zir.Inst.Index, operand_is_r // Even if the operand is comptime-known, this `switch` is runtime. if (try operand_ty.comptimeOnlySema(pt)) { return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(operand_src, "operand of switch loop has comptime-only type '{}'", .{operand_ty.fmt(pt)}); + const msg = try sema.errMsg(operand_src, "operand of switch loop has comptime-only type '{f}'", .{operand_ty.fmt(pt)}); errdefer msg.destroy(gpa); try sema.errNote(operand_src, msg, "switch loops are evaluated at runtime outside of comptime scopes", .{}); break :msg msg; @@ -11923,14 +11916,14 @@ fn zirSwitchBlock(sema: *Sema, block: *Block, inst: Zir.Inst.Index, operand_is_r cond_ty, i, msg, - "unhandled enumeration value: '{}'", + "unhandled enumeration value: '{f}'", .{field_name.fmt(&zcu.intern_pool)}, ); } try sema.errNote( cond_ty.srcLoc(zcu), msg, - "enum '{}' declared here", + "enum '{f}' declared here", .{cond_ty.fmt(pt)}, ); break :msg msg; @@ -12142,7 +12135,7 @@ fn zirSwitchBlock(sema: *Sema, block: *Block, inst: Zir.Inst.Index, operand_is_r return sema.fail( block, src, - "else prong required when switching on type '{}'", + "else prong required when switching on type '{f}'", .{cond_ty.fmt(pt)}, ); } @@ -12218,7 +12211,7 @@ fn zirSwitchBlock(sema: *Sema, block: *Block, inst: Zir.Inst.Index, operand_is_r .@"anyframe", .comptime_float, .float, - => return sema.fail(block, operand_src, "invalid switch operand type '{}'", .{ + => return sema.fail(block, operand_src, "invalid switch operand type '{f}'", .{ raw_operand_ty.fmt(pt), }), } @@ -12747,7 +12740,7 @@ fn analyzeSwitchRuntimeBlock( if (special.is_inline) switch (operand_ty.zigTypeTag(zcu)) { .@"enum" => { if (operand_ty.isNonexhaustiveEnum(zcu) and !union_originally) { - return sema.fail(block, special_prong_src, "cannot enumerate values of type '{}' for 'inline else'", .{ + return sema.fail(block, special_prong_src, "cannot enumerate values of type '{f}' for 'inline else'", .{ operand_ty.fmt(pt), }); } @@ -12803,7 +12796,7 @@ fn analyzeSwitchRuntimeBlock( }, .error_set => { if (operand_ty.isAnyError(zcu)) { - return sema.fail(block, special_prong_src, "cannot enumerate values of type '{}' for 'inline else'", .{ + return sema.fail(block, special_prong_src, "cannot enumerate values of type '{f}' for 'inline else'", .{ operand_ty.fmt(pt), }); } @@ -12964,7 +12957,7 @@ fn analyzeSwitchRuntimeBlock( cases_extra.appendSliceAssumeCapacity(@ptrCast(case_block.instructions.items)); } }, - else => return sema.fail(block, special_prong_src, "cannot enumerate values of type '{}' for 'inline else'", .{ + else => return sema.fail(block, special_prong_src, "cannot enumerate values of type '{f}' for 'inline else'", .{ operand_ty.fmt(pt), }), }; @@ -13478,7 +13471,7 @@ fn validateErrSetSwitch( try sema.errNote( src, msg, - "unhandled error value: 'error.{}'", + "unhandled error value: 'error.{f}'", .{error_name.fmt(ip)}, ); } @@ -13704,7 +13697,7 @@ fn validateSwitchNoRange( const msg = msg: { const msg = try sema.errMsg( operand_src, - "ranges not allowed when switching on type '{}'", + "ranges not allowed when switching on type '{f}'", .{operand_ty.fmt(sema.pt)}, ); errdefer msg.destroy(sema.gpa); @@ -13862,7 +13855,7 @@ fn zirHasField(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai .array_type => break :hf field_name.eqlSlice("len", ip), else => {}, } - return sema.fail(block, ty_src, "type '{}' does not support '@hasField'", .{ + return sema.fail(block, ty_src, "type '{f}' does not support '@hasField'", .{ ty.fmt(pt), }); }; @@ -14050,7 +14043,7 @@ fn zirShl( while (i < rhs_ty.vectorLen(zcu)) : (i += 1) { const rhs_elem = try rhs_val.elemValue(pt, i); if (rhs_elem.compareHetero(.gte, bit_value, zcu)) { - return sema.fail(block, rhs_src, "shift amount '{}' at index '{d}' is too large for operand type '{}'", .{ + return sema.fail(block, rhs_src, "shift amount '{f}' at index '{d}' is too large for operand type '{f}'", .{ rhs_elem.fmtValueSema(pt, sema), i, scalar_ty.fmt(pt), @@ -14058,7 +14051,7 @@ fn zirShl( } } } else if (rhs_val.compareHetero(.gte, bit_value, zcu)) { - return sema.fail(block, rhs_src, "shift amount '{}' is too large for operand type '{}'", .{ + return sema.fail(block, rhs_src, "shift amount '{f}' is too large for operand type '{f}'", .{ rhs_val.fmtValueSema(pt, sema), scalar_ty.fmt(pt), }); @@ -14069,19 +14062,19 @@ fn zirShl( while (i < rhs_ty.vectorLen(zcu)) : (i += 1) { const rhs_elem = try rhs_val.elemValue(pt, i); if (rhs_elem.compareHetero(.lt, try pt.intValue(scalar_rhs_ty, 0), zcu)) { - return sema.fail(block, rhs_src, "shift by negative amount '{}' at index '{d}'", .{ + return sema.fail(block, rhs_src, "shift by negative amount '{f}' at index '{d}'", .{ rhs_elem.fmtValueSema(pt, sema), i, }); } } } else if (rhs_val.compareHetero(.lt, try pt.intValue(rhs_ty, 0), zcu)) { - return sema.fail(block, rhs_src, "shift by negative amount '{}'", .{ + return sema.fail(block, rhs_src, "shift by negative amount '{f}'", .{ rhs_val.fmtValueSema(pt, sema), }); } } else if (scalar_rhs_ty.isSignedInt(zcu)) { - return sema.fail(block, rhs_src, "shift by signed type '{}'", .{rhs_ty.fmt(pt)}); + return sema.fail(block, rhs_src, "shift by signed type '{f}'", .{rhs_ty.fmt(pt)}); } const runtime_src = if (maybe_lhs_val) |lhs_val| rs: { @@ -14231,7 +14224,7 @@ fn zirShr( while (i < rhs_ty.vectorLen(zcu)) : (i += 1) { const rhs_elem = try rhs_val.elemValue(pt, i); if (rhs_elem.compareHetero(.gte, bit_value, zcu)) { - return sema.fail(block, rhs_src, "shift amount '{}' at index '{d}' is too large for operand type '{}'", .{ + return sema.fail(block, rhs_src, "shift amount '{f}' at index '{d}' is too large for operand type '{f}'", .{ rhs_elem.fmtValueSema(pt, sema), i, scalar_ty.fmt(pt), @@ -14239,7 +14232,7 @@ fn zirShr( } } } else if (rhs_val.compareHetero(.gte, bit_value, zcu)) { - return sema.fail(block, rhs_src, "shift amount '{}' is too large for operand type '{}'", .{ + return sema.fail(block, rhs_src, "shift amount '{f}' is too large for operand type '{f}'", .{ rhs_val.fmtValueSema(pt, sema), scalar_ty.fmt(pt), }); @@ -14250,14 +14243,14 @@ fn zirShr( while (i < rhs_ty.vectorLen(zcu)) : (i += 1) { const rhs_elem = try rhs_val.elemValue(pt, i); if (rhs_elem.compareHetero(.lt, try pt.intValue(rhs_ty.childType(zcu), 0), zcu)) { - return sema.fail(block, rhs_src, "shift by negative amount '{}' at index '{d}'", .{ + return sema.fail(block, rhs_src, "shift by negative amount '{f}' at index '{d}'", .{ rhs_elem.fmtValueSema(pt, sema), i, }); } } } else if (rhs_val.compareHetero(.lt, try pt.intValue(rhs_ty, 0), zcu)) { - return sema.fail(block, rhs_src, "shift by negative amount '{}'", .{ + return sema.fail(block, rhs_src, "shift by negative amount '{f}'", .{ rhs_val.fmtValueSema(pt, sema), }); } @@ -14386,7 +14379,7 @@ fn zirBitNot(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air. const scalar_tag = scalar_ty.zigTypeTag(zcu); if (scalar_tag != .int and scalar_tag != .bool) - return sema.fail(block, operand_src, "bitwise not operation on type '{}'", .{operand_ty.fmt(pt)}); + return sema.fail(block, operand_src, "bitwise not operation on type '{f}'", .{operand_ty.fmt(pt)}); return analyzeBitNot(sema, block, operand, src); } @@ -14543,11 +14536,11 @@ fn zirArrayCat(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai const lhs_info = try sema.getArrayCatInfo(block, lhs_src, lhs, rhs_ty) orelse lhs_info: { if (lhs_is_tuple) break :lhs_info undefined; - return sema.fail(block, lhs_src, "expected indexable; found '{}'", .{lhs_ty.fmt(pt)}); + return sema.fail(block, lhs_src, "expected indexable; found '{f}'", .{lhs_ty.fmt(pt)}); }; const rhs_info = try sema.getArrayCatInfo(block, rhs_src, rhs, lhs_ty) orelse { assert(!rhs_is_tuple); - return sema.fail(block, rhs_src, "expected indexable; found '{}'", .{rhs_ty.fmt(pt)}); + return sema.fail(block, rhs_src, "expected indexable; found '{f}'", .{rhs_ty.fmt(pt)}); }; const resolved_elem_ty = t: { @@ -15000,7 +14993,7 @@ fn zirArrayMul(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai // Analyze the lhs first, to catch the case that someone tried to do exponentiation const lhs_info = try sema.getArrayCatInfo(block, lhs_src, lhs, lhs_ty) orelse { const msg = msg: { - const msg = try sema.errMsg(lhs_src, "expected indexable; found '{}'", .{lhs_ty.fmt(pt)}); + const msg = try sema.errMsg(lhs_src, "expected indexable; found '{f}'", .{lhs_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); switch (lhs_ty.zigTypeTag(zcu)) { .int, .float, .comptime_float, .comptime_int, .vector => { @@ -15132,7 +15125,7 @@ fn zirNegate(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air. .int, .comptime_int, .float, .comptime_float => false, else => true, }) { - return sema.fail(block, src, "negation of type '{}'", .{rhs_ty.fmt(pt)}); + return sema.fail(block, src, "negation of type '{f}'", .{rhs_ty.fmt(pt)}); } if (rhs_scalar_ty.isAnyFloat()) { @@ -15163,7 +15156,7 @@ fn zirNegateWrap(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError! switch (rhs_scalar_ty.zigTypeTag(zcu)) { .int, .comptime_int, .float, .comptime_float => {}, - else => return sema.fail(block, src, "negation of type '{}'", .{rhs_ty.fmt(pt)}), + else => return sema.fail(block, src, "negation of type '{f}'", .{rhs_ty.fmt(pt)}), } const lhs = Air.internedToRef((try sema.splat(rhs_ty, try pt.intValue(rhs_scalar_ty, 0))).toIntern()); @@ -15237,7 +15230,7 @@ fn zirDiv(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Ins return sema.fail( block, src, - "ambiguous coercion of division operands '{}' and '{}'; non-zero remainder '{}'", + "ambiguous coercion of division operands '{f}' and '{f}'; non-zero remainder '{f}'", .{ lhs_ty.fmt(pt), rhs_ty.fmt(pt), rem.fmtValueSema(pt, sema) }, ); } @@ -15289,7 +15282,7 @@ fn zirDiv(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Ins return sema.fail( block, src, - "division with '{}' and '{}': signed integers must use @divTrunc, @divFloor, or @divExact", + "division with '{f}' and '{f}': signed integers must use @divTrunc, @divFloor, or @divExact", .{ lhs_ty.fmt(pt), rhs_ty.fmt(pt) }, ); } @@ -15951,7 +15944,7 @@ fn zirOverflowArithmetic( const rhs = try sema.coerce(block, rhs_dest_ty, uncasted_rhs, rhs_src); if (dest_ty.scalarType(zcu).zigTypeTag(zcu) != .int) { - return sema.fail(block, src, "expected vector of integers or integer tag type, found '{}'", .{dest_ty.fmt(pt)}); + return sema.fail(block, src, "expected vector of integers or integer tag type, found '{f}'", .{dest_ty.fmt(pt)}); } const maybe_lhs_val = try sema.resolveValue(lhs); @@ -16157,14 +16150,14 @@ fn analyzeArithmetic( return sema.failWithInvalidPtrArithmetic(block, src, "pointer-pointer", "subtraction"); } if (!lhs_ty.elemType2(zcu).eql(rhs_ty.elemType2(zcu), zcu)) { - return sema.fail(block, src, "incompatible pointer arithmetic operands '{}' and '{}'", .{ + return sema.fail(block, src, "incompatible pointer arithmetic operands '{f}' and '{f}'", .{ lhs_ty.fmt(pt), rhs_ty.fmt(pt), }); } const elem_size = lhs_ty.elemType2(zcu).abiSize(zcu); if (elem_size == 0) { - return sema.fail(block, src, "pointer arithmetic requires element type '{}' to have runtime bits", .{ + return sema.fail(block, src, "pointer arithmetic requires element type '{f}' to have runtime bits", .{ lhs_ty.elemType2(zcu).fmt(pt), }); } @@ -16215,7 +16208,7 @@ fn analyzeArithmetic( }; if (!try lhs_ty.elemType2(zcu).hasRuntimeBitsSema(pt)) { - return sema.fail(block, src, "pointer arithmetic requires element type '{}' to have runtime bits", .{ + return sema.fail(block, src, "pointer arithmetic requires element type '{f}' to have runtime bits", .{ lhs_ty.elemType2(zcu).fmt(pt), }); } @@ -16619,7 +16612,7 @@ fn zirCmpEq( if (lhs_ty_tag == .null or rhs_ty_tag == .null) { const non_null_type = if (lhs_ty_tag == .null) rhs_ty else lhs_ty; - return sema.fail(block, src, "comparison of '{}' with null", .{non_null_type.fmt(pt)}); + return sema.fail(block, src, "comparison of '{f}' with null", .{non_null_type.fmt(pt)}); } if (lhs_ty_tag == .@"union" and (rhs_ty_tag == .enum_literal or rhs_ty_tag == .@"enum")) { @@ -16676,7 +16669,7 @@ fn analyzeCmpUnionTag( const msg = msg: { const msg = try sema.errMsg(un_src, "comparison of union and enum literal is only valid for tagged union types", .{}); errdefer msg.destroy(sema.gpa); - try sema.errNote(union_ty.srcLoc(zcu), msg, "union '{}' is not a tagged union", .{union_ty.fmt(pt)}); + try sema.errNote(union_ty.srcLoc(zcu), msg, "union '{f}' is not a tagged union", .{union_ty.fmt(pt)}); break :msg msg; }; return sema.failWithOwnedErrorMsg(block, msg); @@ -16762,7 +16755,7 @@ fn analyzeCmp( const instructions = &[_]Air.Inst.Ref{ lhs, rhs }; const resolved_type = try sema.resolvePeerTypes(block, src, instructions, .{ .override = &[_]?LazySrcLoc{ lhs_src, rhs_src } }); if (!resolved_type.isSelfComparable(zcu, is_equality_cmp)) { - return sema.fail(block, src, "operator {s} not allowed for type '{}'", .{ + return sema.fail(block, src, "operator {s} not allowed for type '{f}'", .{ compareOperatorName(op), resolved_type.fmt(pt), }); } @@ -16871,7 +16864,7 @@ fn zirSizeOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air. .undefined, .null, .@"opaque", - => return sema.fail(block, operand_src, "no size available for type '{}'", .{ty.fmt(pt)}), + => return sema.fail(block, operand_src, "no size available for type '{f}'", .{ty.fmt(pt)}), .type, .enum_literal, @@ -16912,7 +16905,7 @@ fn zirBitSizeOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!A .undefined, .null, .@"opaque", - => return sema.fail(block, operand_src, "no size available for type '{}'", .{operand_ty.fmt(pt)}), + => return sema.fail(block, operand_src, "no size available for type '{f}'", .{operand_ty.fmt(pt)}), .type, .enum_literal, @@ -17002,7 +16995,7 @@ fn zirClosureGet(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstDat const file, const src_base_node = Zcu.LazySrcLoc.resolveBaseNode(block.src_base_inst, zcu).?; const tree = file.getTree(zcu) catch |err| { // In this case we emit a warning + a less precise source location. - log.warn("unable to load {}: {s}", .{ + log.warn("unable to load {f}: {s}", .{ file.path.fmt(zcu.comp), @errorName(err), }); break :name null; @@ -17030,7 +17023,7 @@ fn zirClosureGet(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstDat const file, const src_base_node = Zcu.LazySrcLoc.resolveBaseNode(block.src_base_inst, zcu).?; const tree = file.getTree(zcu) catch |err| { // In this case we emit a warning + a less precise source location. - log.warn("unable to load {}: {s}", .{ + log.warn("unable to load {f}: {s}", .{ file.path.fmt(zcu.comp), @errorName(err), }); break :name null; @@ -18212,7 +18205,7 @@ fn log2IntType(sema: *Sema, block: *Block, operand: Type, src: LazySrcLoc) Compi return sema.fail( block, src, - "bit shifting operation expected integer type, found '{}'", + "bit shifting operation expected integer type, found '{f}'", .{operand.fmt(pt)}, ); } @@ -18271,7 +18264,7 @@ fn zirBoolNot(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air const uncasted_ty = sema.typeOf(uncasted_operand); if (uncasted_ty.isVector(zcu)) { if (uncasted_ty.scalarType(zcu).zigTypeTag(zcu) != .bool) { - return sema.fail(block, operand_src, "boolean not operation on type '{}'", .{ + return sema.fail(block, operand_src, "boolean not operation on type '{f}'", .{ uncasted_ty.fmt(pt), }); } @@ -18451,7 +18444,7 @@ fn checkSentinelType(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type) !voi const pt = sema.pt; const zcu = pt.zcu; if (!ty.isSelfComparable(zcu, true)) { - return sema.fail(block, src, "non-scalar sentinel type '{}'", .{ty.fmt(pt)}); + return sema.fail(block, src, "non-scalar sentinel type '{f}'", .{ty.fmt(pt)}); } } @@ -18501,7 +18494,7 @@ fn checkErrorType(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type) !void { const zcu = pt.zcu; switch (ty.zigTypeTag(zcu)) { .error_set, .error_union, .undefined => return, - else => return sema.fail(block, src, "expected error union type, found '{}'", .{ + else => return sema.fail(block, src, "expected error union type, found '{f}'", .{ ty.fmt(pt), }), } @@ -18645,7 +18638,7 @@ fn zirTry(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileError! const pt = sema.pt; const zcu = pt.zcu; if (err_union_ty.zigTypeTag(zcu) != .error_union) { - return sema.fail(parent_block, operand_src, "expected error union type, found '{}'", .{ + return sema.fail(parent_block, operand_src, "expected error union type, found '{f}'", .{ err_union_ty.fmt(pt), }); } @@ -18705,7 +18698,7 @@ fn zirTryPtr(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileErr const pt = sema.pt; const zcu = pt.zcu; if (err_union_ty.zigTypeTag(zcu) != .error_union) { - return sema.fail(parent_block, operand_src, "expected error union type, found '{}'", .{ + return sema.fail(parent_block, operand_src, "expected error union type, found '{f}'", .{ err_union_ty.fmt(pt), }); } @@ -18903,7 +18896,7 @@ fn zirRetImplicit( const base_tag = sema.fn_ret_ty.baseZigTypeTag(zcu); if (base_tag == .noreturn) { const msg = msg: { - const msg = try sema.errMsg(ret_ty_src, "function declared '{}' implicitly returns", .{ + const msg = try sema.errMsg(ret_ty_src, "function declared '{f}' implicitly returns", .{ sema.fn_ret_ty.fmt(pt), }); errdefer msg.destroy(sema.gpa); @@ -18913,7 +18906,7 @@ fn zirRetImplicit( return sema.failWithOwnedErrorMsg(block, msg); } else if (base_tag != .void) { const msg = msg: { - const msg = try sema.errMsg(ret_ty_src, "function with non-void return type '{}' implicitly returns", .{ + const msg = try sema.errMsg(ret_ty_src, "function with non-void return type '{f}' implicitly returns", .{ sema.fn_ret_ty.fmt(pt), }); errdefer msg.destroy(sema.gpa); @@ -19302,13 +19295,13 @@ fn zirPtrType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air if (host_size != 0) { if (bit_offset >= host_size * 8) { - return sema.fail(block, bitoffset_src, "packed type '{}' at bit offset {} starts {} bits after the end of a {} byte host integer", .{ + return sema.fail(block, bitoffset_src, "packed type '{f}' at bit offset {d} starts {d} bits after the end of a {d} byte host integer", .{ elem_ty.fmt(pt), bit_offset, bit_offset - host_size * 8, host_size, }); } const elem_bit_size = try elem_ty.bitSizeSema(pt); if (elem_bit_size > host_size * 8 - bit_offset) { - return sema.fail(block, bitoffset_src, "packed type '{}' at bit offset {} ends {} bits after the end of a {} byte host integer", .{ + return sema.fail(block, bitoffset_src, "packed type '{f}' at bit offset {d} ends {d} bits after the end of a {d} byte host integer", .{ elem_ty.fmt(pt), bit_offset, elem_bit_size - (host_size * 8 - bit_offset), host_size, }); } @@ -19323,7 +19316,7 @@ fn zirPtrType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air } else if (inst_data.size == .c) { if (!try sema.validateExternType(elem_ty, .other)) { const msg = msg: { - const msg = try sema.errMsg(elem_ty_src, "C pointers cannot point to non-C-ABI-compatible type '{}'", .{elem_ty.fmt(pt)}); + const msg = try sema.errMsg(elem_ty_src, "C pointers cannot point to non-C-ABI-compatible type '{f}'", .{elem_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.explainWhyTypeIsNotExtern(msg, elem_ty_src, elem_ty, .other); @@ -19340,7 +19333,7 @@ fn zirPtrType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air if (host_size != 0 and !try sema.validatePackedType(elem_ty)) { return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(elem_ty_src, "bit-pointer cannot refer to value of type '{}'", .{elem_ty.fmt(pt)}); + const msg = try sema.errMsg(elem_ty_src, "bit-pointer cannot refer to value of type '{f}'", .{elem_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.explainWhyTypeIsNotPacked(msg, elem_ty_src, elem_ty); break :msg msg; @@ -19509,7 +19502,7 @@ fn zirUnionInit(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!A const extra = sema.code.extraData(Zir.Inst.UnionInit, inst_data.payload_index).data; const union_ty = try sema.resolveType(block, ty_src, extra.union_type); if (union_ty.zigTypeTag(pt.zcu) != .@"union") { - return sema.fail(block, ty_src, "expected union type, found '{}'", .{union_ty.fmt(pt)}); + return sema.fail(block, ty_src, "expected union type, found '{f}'", .{union_ty.fmt(pt)}); } const field_name = try sema.resolveConstStringIntern(block, field_src, extra.field_name, .{ .simple = .union_field_name }); const init = try sema.resolveInst(extra.init); @@ -19672,7 +19665,7 @@ fn zirStructInit( const msg = try sema.errMsg(src, "cannot initialize 'noreturn' field of union", .{}); errdefer msg.destroy(sema.gpa); - try sema.addFieldErrNote(resolved_ty, field_index, msg, "field '{}' declared here", .{ + try sema.addFieldErrNote(resolved_ty, field_index, msg, "field '{f}' declared here", .{ field_name.fmt(ip), }); try sema.addDeclaredHereNote(msg, resolved_ty); @@ -19791,7 +19784,7 @@ fn finishStructInit( const field_init = struct_type.fieldInit(ip, i); if (field_init == .none) { const field_name = struct_type.field_names.get(ip)[i]; - const template = "missing struct field: {}"; + const template = "missing struct field: {f}"; const args = .{field_name.fmt(ip)}; if (root_msg) |msg| { try sema.errNote(init_src, msg, template, args); @@ -20406,7 +20399,7 @@ fn fieldType( }, else => {}, } - return sema.fail(block, ty_src, "expected struct or union; found '{}'", .{ + return sema.fail(block, ty_src, "expected struct or union; found '{f}'", .{ cur_ty.fmt(pt), }); } @@ -20453,7 +20446,7 @@ fn zirAlignOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air const operand_src = block.builtinCallArgSrc(inst_data.src_node, 0); const ty = try sema.resolveType(block, operand_src, inst_data.operand); if (ty.isNoReturn(zcu)) { - return sema.fail(block, operand_src, "no align available for type '{}'", .{ty.fmt(sema.pt)}); + return sema.fail(block, operand_src, "no align available for type '{f}'", .{ty.fmt(sema.pt)}); } const val = try ty.lazyAbiAlignment(sema.pt); return Air.internedToRef(val.toIntern()); @@ -20469,7 +20462,7 @@ fn zirIntFromBool(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError const is_vector = operand_ty.zigTypeTag(zcu) == .vector; const operand_scalar_ty = operand_ty.scalarType(zcu); if (operand_scalar_ty.toIntern() != .bool_type) { - return sema.fail(block, src, "expected 'bool', found '{}'", .{operand_scalar_ty.zigTypeTag(zcu)}); + return sema.fail(block, src, "expected 'bool', found '{t}'", .{operand_scalar_ty.zigTypeTag(zcu)}); } const len = if (is_vector) operand_ty.vectorLen(zcu) else undefined; const dest_ty: Type = if (is_vector) try pt.vectorType(.{ .child = .u1_type, .len = len }) else .u1; @@ -20531,7 +20524,7 @@ fn zirAbs( else => return sema.fail( block, operand_src, - "expected integer, float, or vector of either integers or floats, found '{}'", + "expected integer, float, or vector of either integers or floats, found '{f}'", .{operand_ty.fmt(pt)}, ), }; @@ -20600,7 +20593,7 @@ fn zirUnaryMath( else => return sema.fail( block, operand_src, - "expected vector of floats or float type, found '{}'", + "expected vector of floats or float type, found '{f}'", .{operand_ty.fmt(pt)}, ), } @@ -20629,8 +20622,8 @@ fn zirTagName(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air }, .@"enum" => operand_ty, .@"union" => operand_ty.unionTagType(zcu) orelse - return sema.fail(block, src, "union '{}' is untagged", .{operand_ty.fmt(pt)}), - else => return sema.fail(block, operand_src, "expected enum or union; found '{}'", .{ + return sema.fail(block, src, "union '{f}' is untagged", .{operand_ty.fmt(pt)}), + else => return sema.fail(block, operand_src, "expected enum or union; found '{f}'", .{ operand_ty.fmt(pt), }), }; @@ -20638,7 +20631,7 @@ fn zirTagName(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air // TODO I don't think this is the correct way to handle this but // it prevents a crash. // https://github.com/ziglang/zig/issues/15909 - return sema.fail(block, operand_src, "cannot get @tagName of empty enum '{}'", .{ + return sema.fail(block, operand_src, "cannot get @tagName of empty enum '{f}'", .{ enum_ty.fmt(pt), }); } @@ -20646,7 +20639,7 @@ fn zirTagName(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air if (try sema.resolveDefinedValue(block, operand_src, casted_operand)) |val| { const field_index = enum_ty.enumTagFieldIndex(val, zcu) orelse { const msg = msg: { - const msg = try sema.errMsg(src, "no field with value '{}' in enum '{}'", .{ + const msg = try sema.errMsg(src, "no field with value '{f}' in enum '{f}'", .{ val.fmtValueSema(pt, sema), enum_ty.fmt(pt), }); errdefer msg.destroy(sema.gpa); @@ -20752,7 +20745,7 @@ fn zirReify( 64 => .f64, 80 => .f80, 128 => .f128, - else => return sema.fail(block, src, "{}-bit float unsupported", .{float.bits}), + else => return sema.fail(block, src, "{d}-bit float unsupported", .{float.bits}), }; return Air.internedToRef(ty.toIntern()); }, @@ -20833,7 +20826,7 @@ fn zirReify( } else if (ptr_size == .c) { if (!try sema.validateExternType(elem_ty, .other)) { const msg = msg: { - const msg = try sema.errMsg(src, "C pointers cannot point to non-C-ABI-compatible type '{}'", .{elem_ty.fmt(pt)}); + const msg = try sema.errMsg(src, "C pointers cannot point to non-C-ABI-compatible type '{f}'", .{elem_ty.fmt(pt)}); errdefer msg.destroy(gpa); try sema.explainWhyTypeIsNotExtern(msg, src, elem_ty, .other); @@ -20946,7 +20939,7 @@ fn zirReify( _ = try pt.getErrorValue(name); const gop = names.getOrPutAssumeCapacity(name); if (gop.found_existing) { - return sema.fail(block, src, "duplicate error '{}'", .{ + return sema.fail(block, src, "duplicate error '{f}'", .{ name.fmt(ip), }); } @@ -21294,7 +21287,7 @@ fn reifyEnum( if (!try sema.intFitsInType(field_value_val, tag_ty, null)) { // TODO: better source location - return sema.fail(block, src, "field '{}' with enumeration value '{}' is too large for backing int type '{}'", .{ + return sema.fail(block, src, "field '{f}' with enumeration value '{f}' is too large for backing int type '{f}'", .{ field_name.fmt(ip), field_value_val.fmtValueSema(pt, sema), tag_ty.fmt(pt), @@ -21305,14 +21298,14 @@ fn reifyEnum( if (wip_ty.nextField(ip, field_name, coerced_field_val.toIntern())) |conflict| { return sema.failWithOwnedErrorMsg(block, switch (conflict.kind) { .name => msg: { - const msg = try sema.errMsg(src, "duplicate enum field '{}'", .{field_name.fmt(ip)}); + const msg = try sema.errMsg(src, "duplicate enum field '{f}'", .{field_name.fmt(ip)}); errdefer msg.destroy(gpa); _ = conflict.prev_field_idx; // TODO: this note is incorrect try sema.errNote(src, msg, "other field here", .{}); break :msg msg; }, .value => msg: { - const msg = try sema.errMsg(src, "enum tag value {} already taken", .{field_value_val.fmtValueSema(pt, sema)}); + const msg = try sema.errMsg(src, "enum tag value {f} already taken", .{field_value_val.fmtValueSema(pt, sema)}); errdefer msg.destroy(gpa); _ = conflict.prev_field_idx; // TODO: this note is incorrect try sema.errNote(src, msg, "other enum tag value here", .{}); @@ -21460,13 +21453,13 @@ fn reifyUnion( const enum_index = enum_tag_ty.enumFieldIndex(field_name, zcu) orelse { // TODO: better source location - return sema.fail(block, src, "no field named '{}' in enum '{}'", .{ + return sema.fail(block, src, "no field named '{f}' in enum '{f}'", .{ field_name.fmt(ip), enum_tag_ty.fmt(pt), }); }; if (seen_tags.isSet(enum_index)) { // TODO: better source location - return sema.fail(block, src, "duplicate union field {}", .{field_name.fmt(ip)}); + return sema.fail(block, src, "duplicate union field {f}", .{field_name.fmt(ip)}); } seen_tags.set(enum_index); @@ -21487,7 +21480,7 @@ fn reifyUnion( var it = seen_tags.iterator(.{ .kind = .unset }); while (it.next()) |enum_index| { const field_name = enum_tag_ty.enumFieldName(enum_index, zcu); - try sema.addFieldErrNote(enum_tag_ty, enum_index, msg, "field '{}' missing, declared here", .{ + try sema.addFieldErrNote(enum_tag_ty, enum_index, msg, "field '{f}' missing, declared here", .{ field_name.fmt(ip), }); } @@ -21512,7 +21505,7 @@ fn reifyUnion( const gop = field_names.getOrPutAssumeCapacity(field_name); if (gop.found_existing) { // TODO: better source location - return sema.fail(block, src, "duplicate union field {}", .{field_name.fmt(ip)}); + return sema.fail(block, src, "duplicate union field {f}", .{field_name.fmt(ip)}); } field_ty.* = field_type_val.toIntern(); @@ -21544,7 +21537,7 @@ fn reifyUnion( } if (layout == .@"extern" and !try sema.validateExternType(field_ty, .union_field)) { return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(src, "extern unions cannot contain fields of type '{}'", .{field_ty.fmt(pt)}); + const msg = try sema.errMsg(src, "extern unions cannot contain fields of type '{f}'", .{field_ty.fmt(pt)}); errdefer msg.destroy(gpa); try sema.explainWhyTypeIsNotExtern(msg, src, field_ty, .union_field); @@ -21554,7 +21547,7 @@ fn reifyUnion( }); } else if (layout == .@"packed" and !try sema.validatePackedType(field_ty)) { return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(src, "packed unions cannot contain fields of type '{}'", .{field_ty.fmt(pt)}); + const msg = try sema.errMsg(src, "packed unions cannot contain fields of type '{f}'", .{field_ty.fmt(pt)}); errdefer msg.destroy(gpa); try sema.explainWhyTypeIsNotPacked(msg, src, field_ty); @@ -21636,14 +21629,14 @@ fn reifyTuple( const field_name_index = field_name.toUnsigned(ip) orelse return sema.fail( block, src, - "tuple cannot have non-numeric field '{}'", + "tuple cannot have non-numeric field '{f}'", .{field_name.fmt(ip)}, ); if (field_name_index != field_idx) { return sema.fail( block, src, - "tuple field name '{}' does not match field index {}", + "tuple field name '{d}' does not match field index {d}", .{ field_name_index, field_idx }, ); } @@ -21814,7 +21807,7 @@ fn reifyStruct( const field_name = try sema.sliceToIpString(block, src, field_name_val, undefined); if (struct_type.addFieldName(ip, field_name)) |prev_index| { _ = prev_index; // TODO: better source location - return sema.fail(block, src, "duplicate struct field name {}", .{field_name.fmt(ip)}); + return sema.fail(block, src, "duplicate struct field name {f}", .{field_name.fmt(ip)}); } if (any_aligned_fields) { @@ -21883,7 +21876,7 @@ fn reifyStruct( } if (layout == .@"extern" and !try sema.validateExternType(field_ty, .struct_field)) { return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(src, "extern structs cannot contain fields of type '{}'", .{field_ty.fmt(pt)}); + const msg = try sema.errMsg(src, "extern structs cannot contain fields of type '{f}'", .{field_ty.fmt(pt)}); errdefer msg.destroy(gpa); try sema.explainWhyTypeIsNotExtern(msg, src, field_ty, .struct_field); @@ -21893,7 +21886,7 @@ fn reifyStruct( }); } else if (layout == .@"packed" and !try sema.validatePackedType(field_ty)) { return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(src, "packed structs cannot contain fields of type '{}'", .{field_ty.fmt(pt)}); + const msg = try sema.errMsg(src, "packed structs cannot contain fields of type '{f}'", .{field_ty.fmt(pt)}); errdefer msg.destroy(gpa); try sema.explainWhyTypeIsNotPacked(msg, src, field_ty); @@ -21970,7 +21963,7 @@ fn zirCVaArg(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) C if (!try sema.validateExternType(arg_ty, .param_ty)) { const msg = msg: { - const msg = try sema.errMsg(ty_src, "cannot get '{}' from variadic argument", .{arg_ty.fmt(sema.pt)}); + const msg = try sema.errMsg(ty_src, "cannot get '{f}' from variadic argument", .{arg_ty.fmt(sema.pt)}); errdefer msg.destroy(sema.gpa); try sema.explainWhyTypeIsNotExtern(msg, ty_src, arg_ty, .param_ty); @@ -22029,7 +22022,7 @@ fn zirTypeName(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai const ty_src = block.builtinCallArgSrc(inst_data.src_node, 0); const ty = try sema.resolveType(block, ty_src, inst_data.operand); - const type_name = try ip.getOrPutStringFmt(sema.gpa, pt.tid, "{}", .{ty.fmt(pt)}, .no_embedded_nulls); + const type_name = try ip.getOrPutStringFmt(sema.gpa, pt.tid, "{f}", .{ty.fmt(pt)}, .no_embedded_nulls); return sema.addNullTerminatedStrLit(type_name); } @@ -22157,7 +22150,7 @@ fn zirPtrFromInt(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError! if (ptr_ty.isSlice(zcu)) { const msg = msg: { - const msg = try sema.errMsg(src, "integer cannot be converted to slice type '{}'", .{ptr_ty.fmt(pt)}); + const msg = try sema.errMsg(src, "integer cannot be converted to slice type '{f}'", .{ptr_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.errNote(src, msg, "slice length cannot be inferred from address", .{}); break :msg msg; @@ -22184,7 +22177,7 @@ fn zirPtrFromInt(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError! } if (try ptr_ty.comptimeOnlySema(pt)) { return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(src, "pointer to comptime-only type '{}' must be comptime-known, but operand is runtime-known", .{ptr_ty.fmt(pt)}); + const msg = try sema.errMsg(src, "pointer to comptime-only type '{f}' must be comptime-known, but operand is runtime-known", .{ptr_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.explainWhyTypeIsComptime(msg, src, ptr_ty); @@ -22241,7 +22234,7 @@ fn ptrFromIntVal( } const addr = try operand_val.toUnsignedIntSema(pt); if (!ptr_ty.isAllowzeroPtr(zcu) and addr == 0) - return sema.fail(block, operand_src, "pointer type '{}' does not allow address zero", .{ptr_ty.fmt(pt)}); + return sema.fail(block, operand_src, "pointer type '{f}' does not allow address zero", .{ptr_ty.fmt(pt)}); if (addr != 0 and ptr_align != .none) { const masked_addr = if (ptr_ty.childType(zcu).fnPtrMaskOrNull(zcu)) |mask| addr & mask @@ -22249,7 +22242,7 @@ fn ptrFromIntVal( addr; if (!ptr_align.check(masked_addr)) { - return sema.fail(block, operand_src, "pointer type '{}' requires aligned address", .{ptr_ty.fmt(pt)}); + return sema.fail(block, operand_src, "pointer type '{f}' requires aligned address", .{ptr_ty.fmt(pt)}); } } @@ -22294,8 +22287,8 @@ fn zirErrorCast(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData errdefer msg.destroy(sema.gpa); const dest_payload_ty = dest_ty.errorUnionPayload(zcu); const operand_payload_ty = operand_ty.errorUnionPayload(zcu); - try sema.errNote(src, msg, "destination payload is '{}'", .{dest_payload_ty.fmt(pt)}); - try sema.errNote(src, msg, "operand payload is '{}'", .{operand_payload_ty.fmt(pt)}); + try sema.errNote(src, msg, "destination payload is '{f}'", .{dest_payload_ty.fmt(pt)}); + try sema.errNote(src, msg, "operand payload is '{f}'", .{operand_payload_ty.fmt(pt)}); try addDeclaredHereNote(sema, msg, dest_ty); try addDeclaredHereNote(sema, msg, operand_ty); break :msg msg; @@ -22340,7 +22333,7 @@ fn zirErrorCast(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData break :disjoint true; }; if (disjoint and !(operand_tag == .error_union and dest_tag == .error_union)) { - return sema.fail(block, src, "error sets '{}' and '{}' have no common errors", .{ + return sema.fail(block, src, "error sets '{f}' and '{f}' have no common errors", .{ operand_err_ty.fmt(pt), dest_err_ty.fmt(pt), }); } @@ -22360,7 +22353,7 @@ fn zirErrorCast(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData }; if (!dest_err_ty.isAnyError(zcu) and !Type.errorSetHasFieldIp(ip, dest_err_ty.toIntern(), err_name)) { - return sema.fail(block, src, "'error.{}' not a member of error set '{}'", .{ + return sema.fail(block, src, "'error.{f}' not a member of error set '{f}'", .{ err_name.fmt(ip), dest_err_ty.fmt(pt), }); } @@ -22520,13 +22513,15 @@ fn ptrCastFull( const src_elem_size = src_elem_ty.abiSize(zcu); const dest_elem_size = dest_elem_ty.abiSize(zcu); if (dest_elem_size == 0) { - return sema.fail(block, src, "cannot infer length of slice of zero-bit '{}' from '{}'", .{ dest_elem_ty.fmt(pt), operand_ty.fmt(pt) }); + return sema.fail(block, src, "cannot infer length of slice of zero-bit '{f}' from '{f}'", .{ + dest_elem_ty.fmt(pt), operand_ty.fmt(pt), + }); } if (opt_src_len) |src_len| { const bytes = src_len * src_elem_size; const dest_len = std.math.divExact(u64, bytes, dest_elem_size) catch switch (src_info.flags.size) { .slice => return sema.fail(block, src, "slice length '{d}' does not divide exactly into destination elements", .{src_len}), - .one => return sema.fail(block, src, "type '{}' does not divide exactly into destination elements", .{src_elem_ty.fmt(pt)}), + .one => return sema.fail(block, src, "type '{f}' does not divide exactly into destination elements", .{src_elem_ty.fmt(pt)}), else => unreachable, }; break :len .{ .constant = dest_len }; @@ -22544,7 +22539,9 @@ fn ptrCastFull( // The source value has `src_len * src_base_per_elem` values of type `src_base_ty`. // The result value will have `dest_len * dest_base_per_elem` values of type `dest_base_ty`. if (dest_base_ty.toIntern() != src_base_ty.toIntern()) { - return sema.fail(block, src, "cannot infer length of comptime-only '{}' from incompatible '{}'", .{ dest_ty.fmt(pt), operand_ty.fmt(pt) }); + return sema.fail(block, src, "cannot infer length of comptime-only '{f}' from incompatible '{f}'", .{ + dest_ty.fmt(pt), operand_ty.fmt(pt), + }); } // `src_base_ty` is comptime-only, so `src_elem_ty` is comptime-only, so `operand_ty` is // comptime-only, so `operand` is comptime-known, so `opt_src_len` is non-`null`. @@ -22552,7 +22549,7 @@ fn ptrCastFull( const base_len = src_len * src_base_per_elem; const dest_len = std.math.divExact(u64, base_len, dest_base_per_elem) catch switch (src_info.flags.size) { .slice => return sema.fail(block, src, "slice length '{d}' does not divide exactly into destination elements", .{src_len}), - .one => return sema.fail(block, src, "type '{}' does not divide exactly into destination elements", .{src_elem_ty.fmt(pt)}), + .one => return sema.fail(block, src, "type '{f}' does not divide exactly into destination elements", .{src_elem_ty.fmt(pt)}), else => unreachable, }; break :len .{ .constant = dest_len }; @@ -22613,7 +22610,7 @@ fn ptrCastFull( ); if (imc_res == .ok) break :check_child; return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(src, "pointer element type '{}' cannot coerce into element type '{}'", .{ + const msg = try sema.errMsg(src, "pointer element type '{f}' cannot coerce into element type '{f}'", .{ src_child.fmt(pt), dest_child.fmt(pt), }); errdefer msg.destroy(sema.gpa); @@ -22640,11 +22637,11 @@ fn ptrCastFull( } return sema.failWithOwnedErrorMsg(block, msg: { const msg = if (src_info.sentinel == .none) blk: { - break :blk try sema.errMsg(src, "destination pointer requires '{}' sentinel", .{ + break :blk try sema.errMsg(src, "destination pointer requires '{f}' sentinel", .{ Value.fromInterned(dest_info.sentinel).fmtValueSema(pt, sema), }); } else blk: { - break :blk try sema.errMsg(src, "pointer sentinel '{}' cannot coerce into pointer sentinel '{}'", .{ + break :blk try sema.errMsg(src, "pointer sentinel '{f}' cannot coerce into pointer sentinel '{f}'", .{ Value.fromInterned(src_info.sentinel).fmtValueSema(pt, sema), Value.fromInterned(dest_info.sentinel).fmtValueSema(pt, sema), }); @@ -22657,7 +22654,7 @@ fn ptrCastFull( if (src_info.packed_offset.host_size != dest_info.packed_offset.host_size) { return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(src, "pointer host size '{}' cannot coerce into pointer host size '{}'", .{ + const msg = try sema.errMsg(src, "pointer host size '{d}' cannot coerce into pointer host size '{d}'", .{ src_info.packed_offset.host_size, dest_info.packed_offset.host_size, }); @@ -22669,7 +22666,7 @@ fn ptrCastFull( if (src_info.packed_offset.bit_offset != dest_info.packed_offset.bit_offset) { return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(src, "pointer bit offset '{}' cannot coerce into pointer bit offset '{}'", .{ + const msg = try sema.errMsg(src, "pointer bit offset '{d}' cannot coerce into pointer bit offset '{d}'", .{ src_info.packed_offset.bit_offset, dest_info.packed_offset.bit_offset, }); @@ -22686,7 +22683,7 @@ fn ptrCastFull( if (dest_allows_zero) break :check_allowzero; return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(src, "'{}' could have null values which are illegal in type '{}'", .{ + const msg = try sema.errMsg(src, "'{f}' could have null values which are illegal in type '{f}'", .{ operand_ty.fmt(pt), dest_ty.fmt(pt), }); @@ -22714,10 +22711,10 @@ fn ptrCastFull( return sema.failWithOwnedErrorMsg(block, msg: { const msg = try sema.errMsg(src, "{s} increases pointer alignment", .{operation}); errdefer msg.destroy(sema.gpa); - try sema.errNote(operand_src, msg, "'{}' has alignment '{d}'", .{ + try sema.errNote(operand_src, msg, "'{f}' has alignment '{d}'", .{ operand_ty.fmt(pt), src_align.toByteUnits() orelse 0, }); - try sema.errNote(src, msg, "'{}' has alignment '{d}'", .{ + try sema.errNote(src, msg, "'{f}' has alignment '{d}'", .{ dest_ty.fmt(pt), dest_align.toByteUnits() orelse 0, }); try sema.errNote(src, msg, "use @alignCast to assert pointer alignment", .{}); @@ -22731,10 +22728,10 @@ fn ptrCastFull( return sema.failWithOwnedErrorMsg(block, msg: { const msg = try sema.errMsg(src, "{s} changes pointer address space", .{operation}); errdefer msg.destroy(sema.gpa); - try sema.errNote(operand_src, msg, "'{}' has address space '{s}'", .{ + try sema.errNote(operand_src, msg, "'{f}' has address space '{s}'", .{ operand_ty.fmt(pt), @tagName(src_info.flags.address_space), }); - try sema.errNote(src, msg, "'{}' has address space '{s}'", .{ + try sema.errNote(src, msg, "'{f}' has address space '{s}'", .{ dest_ty.fmt(pt), @tagName(dest_info.flags.address_space), }); try sema.errNote(src, msg, "use @addrSpaceCast to cast pointer address space", .{}); @@ -22801,7 +22798,7 @@ fn ptrCastFull( if (operand_val.isNull(zcu)) { if (!dest_ty.ptrAllowsZero(zcu)) { - return sema.fail(block, operand_src, "null pointer casted to type '{}'", .{dest_ty.fmt(pt)}); + return sema.fail(block, operand_src, "null pointer casted to type '{f}'", .{dest_ty.fmt(pt)}); } if (dest_ty.zigTypeTag(zcu) == .optional) { return Air.internedToRef((try pt.nullValue(dest_ty)).toIntern()); @@ -23092,7 +23089,7 @@ fn zirTruncate(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai const operand_is_vector = operand_ty.zigTypeTag(zcu) == .vector; const dest_is_vector = dest_ty.zigTypeTag(zcu) == .vector; if (operand_is_vector != dest_is_vector) { - return sema.fail(block, operand_src, "expected type '{}', found '{}'", .{ dest_ty.fmt(pt), operand_ty.fmt(pt) }); + return sema.fail(block, operand_src, "expected type '{f}', found '{f}'", .{ dest_ty.fmt(pt), operand_ty.fmt(pt) }); } if (dest_scalar_ty.zigTypeTag(zcu) == .comptime_int) { @@ -23112,7 +23109,7 @@ fn zirTruncate(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai } if (operand_info.signedness != dest_info.signedness) { - return sema.fail(block, operand_src, "expected {s} integer type, found '{}'", .{ + return sema.fail(block, operand_src, "expected {s} integer type, found '{f}'", .{ @tagName(dest_info.signedness), operand_ty.fmt(pt), }); } @@ -23121,7 +23118,7 @@ fn zirTruncate(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai const msg = msg: { const msg = try sema.errMsg( src, - "destination type '{}' has more bits than source type '{}'", + "destination type '{f}' has more bits than source type '{f}'", .{ dest_ty.fmt(pt), operand_ty.fmt(pt) }, ); errdefer msg.destroy(sema.gpa); @@ -23239,7 +23236,7 @@ fn zirByteSwap(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai return sema.fail( block, operand_src, - "@byteSwap requires the number of bits to be evenly divisible by 8, but {} has {} bits", + "@byteSwap requires the number of bits to be evenly divisible by 8, but {f} has {d} bits", .{ scalar_ty.fmt(pt), bits }, ); } @@ -23359,7 +23356,7 @@ fn bitOffsetOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!u6 try ty.resolveLayout(pt); switch (ty.zigTypeTag(zcu)) { .@"struct" => {}, - else => return sema.fail(block, ty_src, "expected struct type, found '{}'", .{ty.fmt(pt)}), + else => return sema.fail(block, ty_src, "expected struct type, found '{f}'", .{ty.fmt(pt)}), } const field_index = if (ty.isTuple(zcu)) blk: { @@ -23394,7 +23391,7 @@ fn checkNamespaceType(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type) Com const zcu = pt.zcu; switch (ty.zigTypeTag(zcu)) { .@"struct", .@"enum", .@"union", .@"opaque" => return, - else => return sema.fail(block, src, "expected struct, enum, union, or opaque; found '{}'", .{ty.fmt(pt)}), + else => return sema.fail(block, src, "expected struct, enum, union, or opaque; found '{f}'", .{ty.fmt(pt)}), } } @@ -23405,7 +23402,7 @@ fn checkIntType(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type) CompileEr switch (ty.zigTypeTag(zcu)) { .comptime_int => return true, .int => return false, - else => return sema.fail(block, src, "expected integer type, found '{}'", .{ty.fmt(pt)}), + else => return sema.fail(block, src, "expected integer type, found '{f}'", .{ty.fmt(pt)}), } } @@ -23459,7 +23456,7 @@ fn checkPtrOperand( const msg = msg: { const msg = try sema.errMsg( ty_src, - "expected pointer, found '{}'", + "expected pointer, found '{f}'", .{ty.fmt(pt)}, ); errdefer msg.destroy(sema.gpa); @@ -23473,7 +23470,7 @@ fn checkPtrOperand( .optional => if (ty.childType(zcu).zigTypeTag(zcu) == .pointer) return, else => {}, } - return sema.fail(block, ty_src, "expected pointer type, found '{}'", .{ty.fmt(pt)}); + return sema.fail(block, ty_src, "expected pointer type, found '{f}'", .{ty.fmt(pt)}); } fn checkPtrType( @@ -23491,7 +23488,7 @@ fn checkPtrType( const msg = msg: { const msg = try sema.errMsg( ty_src, - "expected pointer type, found '{}'", + "expected pointer type, found '{f}'", .{ty.fmt(pt)}, ); errdefer msg.destroy(sema.gpa); @@ -23505,7 +23502,7 @@ fn checkPtrType( .optional => if (ty.childType(zcu).zigTypeTag(zcu) == .pointer) return, else => {}, } - return sema.fail(block, ty_src, "expected pointer type, found '{}'", .{ty.fmt(pt)}); + return sema.fail(block, ty_src, "expected pointer type, found '{f}'", .{ty.fmt(pt)}); } fn checkLogicalPtrOperation(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type) !void { @@ -23516,7 +23513,7 @@ fn checkLogicalPtrOperation(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Typ const as = ty.ptrAddressSpace(zcu); if (target_util.arePointersLogical(target, as)) { return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(src, "illegal operation on logical pointer of type '{}'", .{ty.fmt(pt)}); + const msg = try sema.errMsg(src, "illegal operation on logical pointer of type '{f}'", .{ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.errNote( src, @@ -23547,7 +23544,7 @@ fn checkVectorElemType( .optional, .pointer => if (ty.isPtrAtRuntime(zcu)) return, else => {}, } - return sema.fail(block, ty_src, "expected integer, float, bool, or pointer for the vector element type; found '{}'", .{ty.fmt(pt)}); + return sema.fail(block, ty_src, "expected integer, float, bool, or pointer for the vector element type; found '{f}'", .{ty.fmt(pt)}); } fn checkFloatType( @@ -23560,7 +23557,7 @@ fn checkFloatType( const zcu = pt.zcu; switch (ty.zigTypeTag(zcu)) { .comptime_int, .comptime_float, .float => {}, - else => return sema.fail(block, ty_src, "expected float type, found '{}'", .{ty.fmt(pt)}), + else => return sema.fail(block, ty_src, "expected float type, found '{f}'", .{ty.fmt(pt)}), } } @@ -23576,9 +23573,9 @@ fn checkNumericType( .comptime_float, .float, .comptime_int, .int => {}, .vector => switch (ty.childType(zcu).zigTypeTag(zcu)) { .comptime_float, .float, .comptime_int, .int => {}, - else => |t| return sema.fail(block, ty_src, "expected number, found '{}'", .{t}), + else => |t| return sema.fail(block, ty_src, "expected number, found '{t}'", .{t}), }, - else => return sema.fail(block, ty_src, "expected number, found '{}'", .{ty.fmt(pt)}), + else => return sema.fail(block, ty_src, "expected number, found '{f}'", .{ty.fmt(pt)}), } } @@ -23612,7 +23609,7 @@ fn checkAtomicPtrOperand( error.BadType => return sema.fail( block, elem_ty_src, - "expected bool, integer, float, enum, packed struct, or pointer type; found '{}'", + "expected bool, integer, float, enum, packed struct, or pointer type; found '{f}'", .{elem_ty.fmt(pt)}, ), }; @@ -23673,12 +23670,12 @@ fn checkIntOrVector( const elem_ty = operand_ty.childType(zcu); switch (elem_ty.zigTypeTag(zcu)) { .int => return elem_ty, - else => return sema.fail(block, operand_src, "expected vector of integers; found vector of '{}'", .{ + else => return sema.fail(block, operand_src, "expected vector of integers; found vector of '{f}'", .{ elem_ty.fmt(pt), }), } }, - else => return sema.fail(block, operand_src, "expected integer or vector, found '{}'", .{ + else => return sema.fail(block, operand_src, "expected integer or vector, found '{f}'", .{ operand_ty.fmt(pt), }), } @@ -23698,12 +23695,12 @@ fn checkIntOrVectorAllowComptime( const elem_ty = operand_ty.childType(zcu); switch (elem_ty.zigTypeTag(zcu)) { .int, .comptime_int => return elem_ty, - else => return sema.fail(block, operand_src, "expected vector of integers; found vector of '{}'", .{ + else => return sema.fail(block, operand_src, "expected vector of integers; found vector of '{f}'", .{ elem_ty.fmt(pt), }), } }, - else => return sema.fail(block, operand_src, "expected integer or vector, found '{}'", .{ + else => return sema.fail(block, operand_src, "expected integer or vector, found '{f}'", .{ operand_ty.fmt(pt), }), } @@ -23794,7 +23791,7 @@ fn checkVectorizableBinaryOperands( } } else { const msg = msg: { - const msg = try sema.errMsg(src, "mixed scalar and vector operands: '{}' and '{}'", .{ + const msg = try sema.errMsg(src, "mixed scalar and vector operands: '{f}' and '{f}'", .{ lhs_ty.fmt(pt), rhs_ty.fmt(pt), }); errdefer msg.destroy(sema.gpa); @@ -23928,7 +23925,7 @@ fn zirCmpxchg( return sema.fail( block, elem_ty_src, - "expected bool, integer, enum, packed struct, or pointer type; found '{}'", + "expected bool, integer, enum, packed struct, or pointer type; found '{f}'", .{elem_ty.fmt(pt)}, ); } @@ -24012,7 +24009,7 @@ fn zirSplat(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.I switch (dest_ty.zigTypeTag(zcu)) { .array, .vector => {}, - else => return sema.fail(block, src, "expected array or vector type, found '{}'", .{dest_ty.fmt(pt)}), + else => return sema.fail(block, src, "expected array or vector type, found '{f}'", .{dest_ty.fmt(pt)}), } const operand = try sema.resolveInst(extra.rhs); @@ -24088,7 +24085,7 @@ fn zirReduce(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air. const zcu = pt.zcu; if (operand_ty.zigTypeTag(zcu) != .vector) { - return sema.fail(block, operand_src, "expected vector, found '{}'", .{operand_ty.fmt(pt)}); + return sema.fail(block, operand_src, "expected vector, found '{f}'", .{operand_ty.fmt(pt)}); } const scalar_ty = operand_ty.childType(zcu); @@ -24097,13 +24094,13 @@ fn zirReduce(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air. switch (operation) { .And, .Or, .Xor => switch (scalar_ty.zigTypeTag(zcu)) { .int, .bool => {}, - else => return sema.fail(block, operand_src, "@reduce operation '{s}' requires integer or boolean operand; found '{}'", .{ + else => return sema.fail(block, operand_src, "@reduce operation '{s}' requires integer or boolean operand; found '{f}'", .{ @tagName(operation), operand_ty.fmt(pt), }), }, .Min, .Max, .Add, .Mul => switch (scalar_ty.zigTypeTag(zcu)) { .int, .float => {}, - else => return sema.fail(block, operand_src, "@reduce operation '{s}' requires integer or float operand; found '{}'", .{ + else => return sema.fail(block, operand_src, "@reduce operation '{s}' requires integer or float operand; found '{f}'", .{ @tagName(operation), operand_ty.fmt(pt), }), }, @@ -24157,7 +24154,7 @@ fn zirShuffle(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air const mask_len = switch (sema.typeOf(mask).zigTypeTag(zcu)) { .array, .vector => sema.typeOf(mask).arrayLen(zcu), - else => return sema.fail(block, mask_src, "expected vector or array, found '{}'", .{sema.typeOf(mask).fmt(pt)}), + else => return sema.fail(block, mask_src, "expected vector or array, found '{f}'", .{sema.typeOf(mask).fmt(pt)}), }; mask_ty = try pt.vectorType(.{ .len = @intCast(mask_len), @@ -24184,11 +24181,14 @@ fn analyzeShuffle( const b_src = block.builtinCallArgSrc(src_node, 2); const mask_src = block.builtinCallArgSrc(src_node, 3); - // If the type of `a` is `@Type(.undefined)`, i.e. the argument is untyped, this is 0, because it is an error to index into this vector. + // If the type of `a` is `@Type(.undefined)`, i.e. the argument is untyped, + // this is 0, because it is an error to index into this vector. const a_len: u32 = switch (sema.typeOf(a_uncoerced).zigTypeTag(zcu)) { .array, .vector => @intCast(sema.typeOf(a_uncoerced).arrayLen(zcu)), .undefined => 0, - else => return sema.fail(block, a_src, "expected vector of '{}', found '{}'", .{ elem_ty.fmt(pt), sema.typeOf(a_uncoerced).fmt(pt) }), + else => return sema.fail(block, a_src, "expected vector of '{f}', found '{f}'", .{ + elem_ty.fmt(pt), sema.typeOf(a_uncoerced).fmt(pt), + }), }; const a_ty = try pt.vectorType(.{ .len = a_len, .child = elem_ty.toIntern() }); const a_coerced = try sema.coerce(block, a_ty, a_uncoerced, a_src); @@ -24197,7 +24197,9 @@ fn analyzeShuffle( const b_len: u32 = switch (sema.typeOf(b_uncoerced).zigTypeTag(zcu)) { .array, .vector => @intCast(sema.typeOf(b_uncoerced).arrayLen(zcu)), .undefined => 0, - else => return sema.fail(block, b_src, "expected vector of '{}', found '{}'", .{ elem_ty.fmt(pt), sema.typeOf(b_uncoerced).fmt(pt) }), + else => return sema.fail(block, b_src, "expected vector of '{f}', found '{f}'", .{ + elem_ty.fmt(pt), sema.typeOf(b_uncoerced).fmt(pt), + }), }; const b_ty = try pt.vectorType(.{ .len = b_len, .child = elem_ty.toIntern() }); const b_coerced = try sema.coerce(block, b_ty, b_uncoerced, b_src); @@ -24235,7 +24237,7 @@ fn analyzeShuffle( if (idx >= a_len) return sema.failWithOwnedErrorMsg(block, msg: { const msg = try sema.errMsg(mask_src, "mask element at index '{d}' selects out-of-bounds index", .{mask_idx}); errdefer msg.destroy(sema.gpa); - try sema.errNote(a_src, msg, "index '{d}' exceeds bounds of '{}' given here", .{ idx, a_ty.fmt(pt) }); + try sema.errNote(a_src, msg, "index '{d}' exceeds bounds of '{f}' given here", .{ idx, a_ty.fmt(pt) }); if (idx < b_len) { try sema.errNote(b_src, msg, "use '~@as(u32, {d})' to index into second vector given here", .{idx}); } @@ -24248,7 +24250,7 @@ fn analyzeShuffle( if (idx >= b_len) return sema.failWithOwnedErrorMsg(block, msg: { const msg = try sema.errMsg(mask_src, "mask element at index '{d}' selects out-of-bounds index", .{mask_idx}); errdefer msg.destroy(sema.gpa); - try sema.errNote(b_src, msg, "index '{d}' exceeds bounds of '{}' given here", .{ idx, b_ty.fmt(pt) }); + try sema.errNote(b_src, msg, "index '{d}' exceeds bounds of '{f}' given here", .{ idx, b_ty.fmt(pt) }); break :msg msg; }); } @@ -24351,7 +24353,7 @@ fn zirSelect(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) C const vec_len_u64 = switch (pred_ty.zigTypeTag(zcu)) { .vector, .array => pred_ty.arrayLen(zcu), - else => return sema.fail(block, pred_src, "expected vector or array, found '{}'", .{pred_ty.fmt(pt)}), + else => return sema.fail(block, pred_src, "expected vector or array, found '{f}'", .{pred_ty.fmt(pt)}), }; const vec_len: u32 = @intCast(try sema.usizeCast(block, pred_src, vec_len_u64)); @@ -24611,7 +24613,7 @@ fn zirMulAdd(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air. switch (ty.scalarType(zcu).zigTypeTag(zcu)) { .comptime_float, .float => {}, - else => return sema.fail(block, src, "expected vector of floats or float type, found '{}'", .{ty.fmt(pt)}), + else => return sema.fail(block, src, "expected vector of floats or float type, found '{f}'", .{ty.fmt(pt)}), } const runtime_src = if (maybe_mulend1) |mulend1_val| rs: { @@ -24712,7 +24714,7 @@ fn zirBuiltinCall(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError const args_ty = sema.typeOf(args); if (!args_ty.isTuple(zcu)) { - return sema.fail(block, args_src, "expected a tuple, found '{}'", .{args_ty.fmt(pt)}); + return sema.fail(block, args_src, "expected a tuple, found '{f}'", .{args_ty.fmt(pt)}); } const resolved_args: []Air.Inst.Ref = try sema.arena.alloc(Air.Inst.Ref, args_ty.structFieldCount(zcu)); @@ -24757,12 +24759,12 @@ fn zirFieldParentPtr(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.Ins try sema.checkPtrType(block, inst_src, parent_ptr_ty, true); const parent_ptr_info = parent_ptr_ty.ptrInfo(zcu); if (parent_ptr_info.flags.size != .one) { - return sema.fail(block, inst_src, "expected single pointer type, found '{}'", .{parent_ptr_ty.fmt(pt)}); + return sema.fail(block, inst_src, "expected single pointer type, found '{f}'", .{parent_ptr_ty.fmt(pt)}); } const parent_ty: Type = .fromInterned(parent_ptr_info.child); switch (parent_ty.zigTypeTag(zcu)) { .@"struct", .@"union" => {}, - else => return sema.fail(block, inst_src, "expected pointer to struct or union type, found '{}'", .{parent_ptr_ty.fmt(pt)}), + else => return sema.fail(block, inst_src, "expected pointer to struct or union type, found '{f}'", .{parent_ptr_ty.fmt(pt)}), } try parent_ty.resolveLayout(pt); @@ -24912,7 +24914,7 @@ fn zirFieldParentPtr(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.Ins } if (field.index != field_index) { - return sema.fail(block, inst_src, "field '{}' has index '{d}' but pointer value is index '{d}' of struct '{}'", .{ + return sema.fail(block, inst_src, "field '{f}' has index '{d}' but pointer value is index '{d}' of struct '{f}'", .{ field_name.fmt(ip), field_index, field.index, parent_ty.fmt(pt), }); } @@ -25033,7 +25035,7 @@ fn analyzeMinMax( try sema.checkNumericType(block, operand_src, operand_ty); if (operand_ty.zigTypeTag(zcu) != .vector) { return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(operand_src, "expected vector, found '{}'", .{operand_ty.fmt(pt)}); + const msg = try sema.errMsg(operand_src, "expected vector, found '{f}'", .{operand_ty.fmt(pt)}); errdefer msg.destroy(zcu.gpa); try sema.errNote(operand_srcs[0], msg, "vector operand here", .{}); break :msg msg; @@ -25041,7 +25043,7 @@ fn analyzeMinMax( } if (operand_ty.vectorLen(zcu) != vec_len) { return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(operand_src, "expected vector of length '{d}', found '{}'", .{ vec_len, operand_ty.fmt(pt) }); + const msg = try sema.errMsg(operand_src, "expected vector of length '{d}', found '{f}'", .{ vec_len, operand_ty.fmt(pt) }); errdefer msg.destroy(zcu.gpa); try sema.errNote(operand_srcs[0], msg, "vector of length '{d}' here", .{vec_len}); break :msg msg; @@ -25054,7 +25056,7 @@ fn analyzeMinMax( const operand_ty = sema.typeOf(operand); if (operand_ty.zigTypeTag(zcu) == .vector) { return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(operand_srcs[0], "expected vector, found '{}'", .{first_operand_ty.fmt(pt)}); + const msg = try sema.errMsg(operand_srcs[0], "expected vector, found '{f}'", .{first_operand_ty.fmt(pt)}); errdefer msg.destroy(zcu.gpa); try sema.errNote(operand_src, msg, "vector operand here", .{}); break :msg msg; @@ -25371,10 +25373,10 @@ fn zirMemcpy( const msg = msg: { const msg = try sema.errMsg(src, "unknown copy length", .{}); errdefer msg.destroy(sema.gpa); - try sema.errNote(dest_src, msg, "destination type '{}' provides no length", .{ + try sema.errNote(dest_src, msg, "destination type '{f}' provides no length", .{ dest_ty.fmt(pt), }); - try sema.errNote(src_src, msg, "source type '{}' provides no length", .{ + try sema.errNote(src_src, msg, "source type '{f}' provides no length", .{ src_ty.fmt(pt), }); break :msg msg; @@ -25398,7 +25400,7 @@ fn zirMemcpy( if (imc != .ok) return sema.failWithOwnedErrorMsg(block, msg: { const msg = try sema.errMsg( src, - "pointer element type '{}' cannot coerce into element type '{}'", + "pointer element type '{f}' cannot coerce into element type '{f}'", .{ src_elem_ty.fmt(pt), dest_elem_ty.fmt(pt) }, ); errdefer msg.destroy(sema.gpa); @@ -25417,10 +25419,10 @@ fn zirMemcpy( const msg = msg: { const msg = try sema.errMsg(src, "non-matching copy lengths", .{}); errdefer msg.destroy(sema.gpa); - try sema.errNote(dest_src, msg, "length {} here", .{ + try sema.errNote(dest_src, msg, "length {f} here", .{ dest_len_val.fmtValueSema(pt, sema), }); - try sema.errNote(src_src, msg, "length {} here", .{ + try sema.errNote(src_src, msg, "length {f} here", .{ src_len_val.fmtValueSema(pt, sema), }); break :msg msg; @@ -25635,7 +25637,7 @@ fn zirMemset(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!void return sema.failWithOwnedErrorMsg(block, msg: { const msg = try sema.errMsg(src, "unknown @memset length", .{}); errdefer msg.destroy(sema.gpa); - try sema.errNote(dest_src, msg, "destination type '{}' provides no length", .{ + try sema.errNote(dest_src, msg, "destination type '{f}' provides no length", .{ dest_ptr_ty.fmt(pt), }); break :msg msg; @@ -25815,7 +25817,7 @@ fn zirCUndef( const src = block.builtinCallArgSrc(extra.node, 0); const name = try sema.resolveConstString(block, src, extra.operand, .{ .simple = .operand_cUndef_macro_name }); - try block.c_import_buf.?.writer().print("#undef {s}\n", .{name}); + try block.c_import_buf.?.print("#undef {s}\n", .{name}); return .void_value; } @@ -25828,7 +25830,7 @@ fn zirCInclude( const src = block.builtinCallArgSrc(extra.node, 0); const name = try sema.resolveConstString(block, src, extra.operand, .{ .simple = .operand_cInclude_file_name }); - try block.c_import_buf.?.writer().print("#include <{s}>\n", .{name}); + try block.c_import_buf.?.print("#include <{s}>\n", .{name}); return .void_value; } @@ -25847,9 +25849,9 @@ fn zirCDefine( const rhs = try sema.resolveInst(extra.rhs); if (sema.typeOf(rhs).zigTypeTag(zcu) != .void) { const value = try sema.resolveConstString(block, val_src, extra.rhs, .{ .simple = .operand_cDefine_macro_value }); - try block.c_import_buf.?.writer().print("#define {s} {s}\n", .{ name, value }); + try block.c_import_buf.?.print("#define {s} {s}\n", .{ name, value }); } else { - try block.c_import_buf.?.writer().print("#define {s}\n", .{name}); + try block.c_import_buf.?.print("#define {s}\n", .{name}); } return .void_value; } @@ -26067,7 +26069,7 @@ fn zirBuiltinExtern( } if (!try sema.validateExternType(ty, .other)) { const msg = msg: { - const msg = try sema.errMsg(ty_src, "extern symbol cannot have type '{}'", .{ty.fmt(pt)}); + const msg = try sema.errMsg(ty_src, "extern symbol cannot have type '{f}'", .{ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.explainWhyTypeIsNotExtern(msg, ty_src, ty, .other); break :msg msg; @@ -26307,7 +26309,7 @@ pub fn validateVarType( if (is_extern) { if (!try sema.validateExternType(var_ty, .other)) { const msg = msg: { - const msg = try sema.errMsg(src, "extern variable cannot have type '{}'", .{var_ty.fmt(pt)}); + const msg = try sema.errMsg(src, "extern variable cannot have type '{f}'", .{var_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.explainWhyTypeIsNotExtern(msg, src, var_ty, .other); break :msg msg; @@ -26319,7 +26321,7 @@ pub fn validateVarType( return sema.fail( block, src, - "non-extern variable with opaque type '{}'", + "non-extern variable with opaque type '{f}'", .{var_ty.fmt(pt)}, ); } @@ -26328,7 +26330,7 @@ pub fn validateVarType( if (!try var_ty.comptimeOnlySema(pt)) return; const msg = msg: { - const msg = try sema.errMsg(src, "variable of type '{}' must be const or comptime", .{var_ty.fmt(pt)}); + const msg = try sema.errMsg(src, "variable of type '{f}' must be const or comptime", .{var_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.explainWhyTypeIsComptime(msg, src, var_ty); @@ -26378,7 +26380,7 @@ fn explainWhyTypeIsComptimeInner( => return, .@"fn" => { - try sema.errNote(src_loc, msg, "use '*const {}' for a function pointer type", .{ty.fmt(pt)}); + try sema.errNote(src_loc, msg, "use '*const {f}' for a function pointer type", .{ty.fmt(pt)}); }, .type => { @@ -26394,7 +26396,7 @@ fn explainWhyTypeIsComptimeInner( => return, .@"opaque" => { - try sema.errNote(src_loc, msg, "opaque type '{}' has undefined size", .{ty.fmt(pt)}); + try sema.errNote(src_loc, msg, "opaque type '{f}' has undefined size", .{ty.fmt(pt)}); }, .array, .vector => { @@ -26581,7 +26583,7 @@ fn explainWhyTypeIsNotExtern( if (!ty.isConstPtr(zcu) and pointee_ty.zigTypeTag(zcu) == .@"fn") { try sema.errNote(src_loc, msg, "pointer to extern function must be 'const'", .{}); } else if (try ty.comptimeOnlySema(pt)) { - try sema.errNote(src_loc, msg, "pointer to comptime-only type '{}'", .{pointee_ty.fmt(pt)}); + try sema.errNote(src_loc, msg, "pointer to comptime-only type '{f}'", .{pointee_ty.fmt(pt)}); try sema.explainWhyTypeIsComptime(msg, src_loc, ty); } try sema.explainWhyTypeIsNotExtern(msg, src_loc, pointee_ty, .other); @@ -26609,7 +26611,7 @@ fn explainWhyTypeIsNotExtern( }, .@"enum" => { const tag_ty = ty.intTagType(zcu); - try sema.errNote(src_loc, msg, "enum tag type '{}' is not extern compatible", .{tag_ty.fmt(pt)}); + try sema.errNote(src_loc, msg, "enum tag type '{f}' is not extern compatible", .{tag_ty.fmt(pt)}); try sema.explainWhyTypeIsNotExtern(msg, src_loc, tag_ty, position); }, .@"struct" => try sema.errNote(src_loc, msg, "only extern structs and ABI sized packed structs are extern compatible", .{}), @@ -27045,7 +27047,7 @@ fn fieldVal( return sema.fail( block, field_name_src, - "no member named '{}' in '{}'", + "no member named '{f}' in '{f}'", .{ field_name.fmt(ip), object_ty.fmt(pt) }, ); } @@ -27069,7 +27071,7 @@ fn fieldVal( return sema.fail( block, field_name_src, - "no member named '{}' in '{}'", + "no member named '{f}' in '{f}'", .{ field_name.fmt(ip), object_ty.fmt(pt) }, ); } @@ -27089,7 +27091,7 @@ fn fieldVal( switch (ip.indexToKey(child_type.toIntern())) { .error_set_type => |error_set_type| blk: { if (error_set_type.nameIndex(ip, field_name) != null) break :blk; - return sema.fail(block, src, "no error named '{}' in '{}'", .{ + return sema.fail(block, src, "no error named '{f}' in '{f}'", .{ field_name.fmt(ip), child_type.fmt(pt), }); }, @@ -27144,7 +27146,7 @@ fn fieldVal( return sema.failWithBadMemberAccess(block, child_type, src, field_name); }, else => return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(src, "type '{}' has no members", .{child_type.fmt(pt)}); + const msg = try sema.errMsg(src, "type '{f}' has no members", .{child_type.fmt(pt)}); errdefer msg.destroy(sema.gpa); if (child_type.isSlice(zcu)) try sema.errNote(src, msg, "slice values have 'len' and 'ptr' members", .{}); if (child_type.zigTypeTag(zcu) == .array) try sema.errNote(src, msg, "array values have 'len' member", .{}); @@ -27190,7 +27192,7 @@ fn fieldPtr( const object_ptr_ty = sema.typeOf(object_ptr); const object_ty = switch (object_ptr_ty.zigTypeTag(zcu)) { .pointer => object_ptr_ty.childType(zcu), - else => return sema.fail(block, object_ptr_src, "expected pointer, found '{}'", .{object_ptr_ty.fmt(pt)}), + else => return sema.fail(block, object_ptr_src, "expected pointer, found '{f}'", .{object_ptr_ty.fmt(pt)}), }; // Zig allows dereferencing a single pointer during field lookup. Note that @@ -27243,7 +27245,7 @@ fn fieldPtr( return sema.fail( block, field_name_src, - "no member named '{}' in '{}'", + "no member named '{f}' in '{f}'", .{ field_name.fmt(ip), object_ty.fmt(pt) }, ); } @@ -27298,7 +27300,7 @@ fn fieldPtr( return sema.fail( block, field_name_src, - "no member named '{}' in '{}'", + "no member named '{f}' in '{f}'", .{ field_name.fmt(ip), object_ty.fmt(pt) }, ); } @@ -27321,7 +27323,7 @@ fn fieldPtr( if (error_set_type.nameIndex(ip, field_name) != null) { break :blk; } - return sema.fail(block, src, "no error named '{}' in '{}'", .{ + return sema.fail(block, src, "no error named '{f}' in '{f}'", .{ field_name.fmt(ip), child_type.fmt(pt), }); }, @@ -27375,7 +27377,7 @@ fn fieldPtr( } return sema.failWithBadMemberAccess(block, child_type, field_name_src, field_name); }, - else => return sema.fail(block, src, "type '{}' has no members", .{child_type.fmt(pt)}), + else => return sema.fail(block, src, "type '{f}' has no members", .{child_type.fmt(pt)}), } }, .@"struct" => { @@ -27430,7 +27432,7 @@ fn fieldCallBind( const inner_ty = if (raw_ptr_ty.zigTypeTag(zcu) == .pointer and (raw_ptr_ty.ptrSize(zcu) == .one or raw_ptr_ty.ptrSize(zcu) == .c)) raw_ptr_ty.childType(zcu) else - return sema.fail(block, raw_ptr_src, "expected single pointer, found '{}'", .{raw_ptr_ty.fmt(pt)}); + return sema.fail(block, raw_ptr_src, "expected single pointer, found '{f}'", .{raw_ptr_ty.fmt(pt)}); // Optionally dereference a second pointer to get the concrete type. const is_double_ptr = inner_ty.zigTypeTag(zcu) == .pointer and inner_ty.ptrSize(zcu) == .one; @@ -27549,7 +27551,7 @@ fn fieldCallBind( }; const msg = msg: { - const msg = try sema.errMsg(src, "no field or member function named '{}' in '{}'", .{ + const msg = try sema.errMsg(src, "no field or member function named '{f}' in '{f}'", .{ field_name.fmt(ip), concrete_ty.fmt(pt), }); @@ -27559,7 +27561,7 @@ fn fieldCallBind( try sema.errNote( zcu.navSrcLoc(nav_index), msg, - "'{}' is not a member function", + "'{f}' is not a member function", .{field_name.fmt(ip)}, ); } @@ -27627,7 +27629,7 @@ fn namespaceLookup( if (try sema.lookupInNamespace(block, namespace, decl_name)) |lookup| { if (!lookup.accessible) { return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(src, "'{}' is not marked 'pub'", .{ + const msg = try sema.errMsg(src, "'{f}' is not marked 'pub'", .{ decl_name.fmt(&zcu.intern_pool), }); errdefer msg.destroy(gpa); @@ -27865,12 +27867,12 @@ fn tupleFieldIndex( assert(!field_name.eqlSlice("len", ip)); if (field_name.toUnsigned(ip)) |field_index| { if (field_index < tuple_ty.structFieldCount(pt.zcu)) return field_index; - return sema.fail(block, field_name_src, "index '{}' out of bounds of tuple '{}'", .{ + return sema.fail(block, field_name_src, "index '{f}' out of bounds of tuple '{f}'", .{ field_name.fmt(ip), tuple_ty.fmt(pt), }); } - return sema.fail(block, field_name_src, "no field named '{}' in tuple '{}'", .{ + return sema.fail(block, field_name_src, "no field named '{f}' in tuple '{f}'", .{ field_name.fmt(ip), tuple_ty.fmt(pt), }); } @@ -27957,7 +27959,7 @@ fn unionFieldPtr( const msg = try sema.errMsg(src, "cannot initialize 'noreturn' field of union", .{}); errdefer msg.destroy(sema.gpa); - try sema.addFieldErrNote(union_ty, field_index, msg, "field '{}' declared here", .{ + try sema.addFieldErrNote(union_ty, field_index, msg, "field '{f}' declared here", .{ field_name.fmt(ip), }); try sema.addDeclaredHereNote(msg, union_ty); @@ -27991,7 +27993,7 @@ fn unionFieldPtr( const msg = msg: { const active_index = Type.fromInterned(union_obj.enum_tag_ty).enumTagFieldIndex(Value.fromInterned(un.tag), zcu).?; const active_field_name = Type.fromInterned(union_obj.enum_tag_ty).enumFieldName(active_index, zcu); - const msg = try sema.errMsg(src, "access of union field '{}' while field '{}' is active", .{ + const msg = try sema.errMsg(src, "access of union field '{f}' while field '{f}' is active", .{ field_name.fmt(ip), active_field_name.fmt(ip), }); @@ -28059,7 +28061,7 @@ fn unionFieldVal( const msg = msg: { const active_index = Type.fromInterned(union_obj.enum_tag_ty).enumTagFieldIndex(Value.fromInterned(un.tag), zcu).?; const active_field_name = Type.fromInterned(union_obj.enum_tag_ty).enumFieldName(active_index, zcu); - const msg = try sema.errMsg(src, "access of union field '{}' while field '{}' is active", .{ + const msg = try sema.errMsg(src, "access of union field '{f}' while field '{f}' is active", .{ field_name.fmt(ip), active_field_name.fmt(ip), }); errdefer msg.destroy(sema.gpa); @@ -28117,7 +28119,7 @@ fn elemPtr( const indexable_ty = switch (indexable_ptr_ty.zigTypeTag(zcu)) { .pointer => indexable_ptr_ty.childType(zcu), - else => return sema.fail(block, indexable_ptr_src, "expected pointer, found '{}'", .{indexable_ptr_ty.fmt(pt)}), + else => return sema.fail(block, indexable_ptr_src, "expected pointer, found '{f}'", .{indexable_ptr_ty.fmt(pt)}), }; try sema.checkIndexable(block, src, indexable_ty); @@ -28288,7 +28290,7 @@ fn validateRuntimeElemAccess( const msg = msg: { const msg = try sema.errMsg( elem_index_src, - "values of type '{}' must be comptime-known, but index value is runtime-known", + "values of type '{f}' must be comptime-known, but index value is runtime-known", .{parent_ty.fmt(sema.pt)}, ); errdefer msg.destroy(sema.gpa); @@ -28304,7 +28306,7 @@ fn validateRuntimeElemAccess( const target = zcu.getTarget(); const as = parent_ty.ptrAddressSpace(zcu); if (target_util.arePointersLogical(target, as)) { - return sema.fail(block, elem_index_src, "cannot access element of logical pointer '{}'", .{parent_ty.fmt(pt)}); + return sema.fail(block, elem_index_src, "cannot access element of logical pointer '{f}'", .{parent_ty.fmt(pt)}); } } } @@ -29000,7 +29002,7 @@ fn coerceExtra( return sema.fail( block, inst_src, - "array literal requires address-of operator (&) to coerce to slice type '{}'", + "array literal requires address-of operator (&) to coerce to slice type '{f}'", .{dest_ty.fmt(pt)}, ); } @@ -29027,7 +29029,7 @@ fn coerceExtra( // pointer to tuple to slice if (!dest_info.flags.is_const) { const err_msg = err_msg: { - const err_msg = try sema.errMsg(inst_src, "cannot cast pointer to tuple to '{}'", .{dest_ty.fmt(pt)}); + const err_msg = try sema.errMsg(inst_src, "cannot cast pointer to tuple to '{f}'", .{dest_ty.fmt(pt)}); errdefer err_msg.destroy(sema.gpa); try sema.errNote(dest_ty_src, err_msg, "pointers to tuples can only coerce to constant pointers", .{}); break :err_msg err_msg; @@ -29082,7 +29084,7 @@ fn coerceExtra( // comptime-known integer to other number if (!(try sema.intFitsInType(val, dest_ty, null))) { if (!opts.report_err) return error.NotCoercible; - return sema.fail(block, inst_src, "type '{}' cannot represent integer value '{}'", .{ dest_ty.fmt(pt), val.fmtValueSema(pt, sema) }); + return sema.fail(block, inst_src, "type '{f}' cannot represent integer value '{f}'", .{ dest_ty.fmt(pt), val.fmtValueSema(pt, sema) }); } return switch (zcu.intern_pool.indexToKey(val.toIntern())) { .undef => try pt.undefRef(dest_ty), @@ -29124,7 +29126,7 @@ fn coerceExtra( return sema.fail( block, inst_src, - "type '{}' cannot represent float value '{}'", + "type '{f}' cannot represent float value '{f}'", .{ dest_ty.fmt(pt), val.fmtValueSema(pt, sema) }, ); } @@ -29157,7 +29159,7 @@ fn coerceExtra( // return sema.fail( // block, // inst_src, - // "type '{}' cannot represent integer value '{}'", + // "type '{f}' cannot represent integer value '{f}'", // .{ dest_ty.fmt(pt), val }, // ); //} @@ -29171,7 +29173,7 @@ fn coerceExtra( const val = try sema.resolveConstDefinedValue(block, LazySrcLoc.unneeded, inst, undefined); const string = zcu.intern_pool.indexToKey(val.toIntern()).enum_literal; const field_index = dest_ty.enumFieldIndex(string, zcu) orelse { - return sema.fail(block, inst_src, "no field named '{}' in enum '{}'", .{ + return sema.fail(block, inst_src, "no field named '{f}' in enum '{f}'", .{ string.fmt(&zcu.intern_pool), dest_ty.fmt(pt), }); }; @@ -29320,11 +29322,11 @@ fn coerceExtra( } const msg = msg: { - const msg = try sema.errMsg(inst_src, "expected type '{}', found '{}'", .{ dest_ty.fmt(pt), inst_ty.fmt(pt) }); + const msg = try sema.errMsg(inst_src, "expected type '{f}', found '{f}'", .{ dest_ty.fmt(pt), inst_ty.fmt(pt) }); errdefer msg.destroy(sema.gpa); if (!can_coerce_to) { - try sema.errNote(inst_src, msg, "cannot coerce to '{}'", .{dest_ty.fmt(pt)}); + try sema.errNote(inst_src, msg, "cannot coerce to '{f}'", .{dest_ty.fmt(pt)}); } // E!T to T @@ -29364,7 +29366,7 @@ fn coerceExtra( try sema.errNote(param_src, msg, "parameter type declared here", .{}); } - // TODO maybe add "cannot store an error in type '{}'" note + // TODO maybe add "cannot store an error in type '{f}'" note break :msg msg; }; @@ -29513,13 +29515,13 @@ const InMemoryCoercionResult = union(enum) { break; }, .comptime_int_not_coercible => |int| { - try sema.errNote(src, msg, "type '{}' cannot represent value '{}'", .{ + try sema.errNote(src, msg, "type '{f}' cannot represent value '{f}'", .{ int.wanted.fmt(pt), int.actual.fmtValueSema(pt, sema), }); break; }, .error_union_payload => |pair| { - try sema.errNote(src, msg, "error union payload '{}' cannot cast into error union payload '{}'", .{ + try sema.errNote(src, msg, "error union payload '{f}' cannot cast into error union payload '{f}'", .{ pair.actual.fmt(pt), pair.wanted.fmt(pt), }); cur = pair.child; @@ -29532,18 +29534,18 @@ const InMemoryCoercionResult = union(enum) { }, .array_sentinel => |sentinel| { if (sentinel.actual.toIntern() != .unreachable_value) { - try sema.errNote(src, msg, "array sentinel '{}' cannot cast into array sentinel '{}'", .{ + try sema.errNote(src, msg, "array sentinel '{f}' cannot cast into array sentinel '{f}'", .{ sentinel.actual.fmtValueSema(pt, sema), sentinel.wanted.fmtValueSema(pt, sema), }); } else { - try sema.errNote(src, msg, "destination array requires '{}' sentinel", .{ + try sema.errNote(src, msg, "destination array requires '{f}' sentinel", .{ sentinel.wanted.fmtValueSema(pt, sema), }); } break; }, .array_elem => |pair| { - try sema.errNote(src, msg, "array element type '{}' cannot cast into array element type '{}'", .{ + try sema.errNote(src, msg, "array element type '{f}' cannot cast into array element type '{f}'", .{ pair.actual.fmt(pt), pair.wanted.fmt(pt), }); cur = pair.child; @@ -29555,19 +29557,19 @@ const InMemoryCoercionResult = union(enum) { break; }, .vector_elem => |pair| { - try sema.errNote(src, msg, "vector element type '{}' cannot cast into vector element type '{}'", .{ + try sema.errNote(src, msg, "vector element type '{f}' cannot cast into vector element type '{f}'", .{ pair.actual.fmt(pt), pair.wanted.fmt(pt), }); cur = pair.child; }, .optional_shape => |pair| { - try sema.errNote(src, msg, "optional type child '{}' cannot cast into optional type child '{}'", .{ + try sema.errNote(src, msg, "optional type child '{f}' cannot cast into optional type child '{f}'", .{ pair.actual.optionalChild(pt.zcu).fmt(pt), pair.wanted.optionalChild(pt.zcu).fmt(pt), }); break; }, .optional_child => |pair| { - try sema.errNote(src, msg, "optional type child '{}' cannot cast into optional type child '{}'", .{ + try sema.errNote(src, msg, "optional type child '{f}' cannot cast into optional type child '{f}'", .{ pair.actual.fmt(pt), pair.wanted.fmt(pt), }); cur = pair.child; @@ -29578,7 +29580,7 @@ const InMemoryCoercionResult = union(enum) { }, .missing_error => |missing_errors| { for (missing_errors) |err| { - try sema.errNote(src, msg, "'error.{}' not a member of destination error set", .{err.fmt(&pt.zcu.intern_pool)}); + try sema.errNote(src, msg, "'error.{f}' not a member of destination error set", .{err.fmt(&pt.zcu.intern_pool)}); } break; }, @@ -29631,7 +29633,7 @@ const InMemoryCoercionResult = union(enum) { break; }, .fn_param => |param| { - try sema.errNote(src, msg, "parameter {d} '{}' cannot cast into '{}'", .{ + try sema.errNote(src, msg, "parameter {d} '{f}' cannot cast into '{f}'", .{ param.index, param.actual.fmt(pt), param.wanted.fmt(pt), }); cur = param.child; @@ -29641,13 +29643,13 @@ const InMemoryCoercionResult = union(enum) { break; }, .fn_return_type => |pair| { - try sema.errNote(src, msg, "return type '{}' cannot cast into return type '{}'", .{ + try sema.errNote(src, msg, "return type '{f}' cannot cast into return type '{f}'", .{ pair.actual.fmt(pt), pair.wanted.fmt(pt), }); cur = pair.child; }, .ptr_child => |pair| { - try sema.errNote(src, msg, "pointer type child '{}' cannot cast into pointer type child '{}'", .{ + try sema.errNote(src, msg, "pointer type child '{f}' cannot cast into pointer type child '{f}'", .{ pair.actual.fmt(pt), pair.wanted.fmt(pt), }); cur = pair.child; @@ -29658,11 +29660,11 @@ const InMemoryCoercionResult = union(enum) { }, .ptr_sentinel => |sentinel| { if (sentinel.actual.toIntern() != .unreachable_value) { - try sema.errNote(src, msg, "pointer sentinel '{}' cannot cast into pointer sentinel '{}'", .{ + try sema.errNote(src, msg, "pointer sentinel '{f}' cannot cast into pointer sentinel '{f}'", .{ sentinel.actual.fmtValueSema(pt, sema), sentinel.wanted.fmtValueSema(pt, sema), }); } else { - try sema.errNote(src, msg, "destination pointer requires '{}' sentinel", .{ + try sema.errNote(src, msg, "destination pointer requires '{f}' sentinel", .{ sentinel.wanted.fmtValueSema(pt, sema), }); } @@ -29676,11 +29678,11 @@ const InMemoryCoercionResult = union(enum) { const wanted_allow_zero = pair.wanted.ptrAllowsZero(pt.zcu); const actual_allow_zero = pair.actual.ptrAllowsZero(pt.zcu); if (actual_allow_zero and !wanted_allow_zero) { - try sema.errNote(src, msg, "'{}' could have null values which are illegal in type '{}'", .{ + try sema.errNote(src, msg, "'{f}' could have null values which are illegal in type '{f}'", .{ pair.actual.fmt(pt), pair.wanted.fmt(pt), }); } else { - try sema.errNote(src, msg, "mutable '{}' would allow illegal null values stored to type '{}'", .{ + try sema.errNote(src, msg, "mutable '{f}' would allow illegal null values stored to type '{f}'", .{ pair.wanted.fmt(pt), pair.actual.fmt(pt), }); } @@ -29692,7 +29694,7 @@ const InMemoryCoercionResult = union(enum) { if (actual_const and !wanted_const) { try sema.errNote(src, msg, "cast discards const qualifier", .{}); } else { - try sema.errNote(src, msg, "mutable '{}' would allow illegal const pointers stored to type '{}'", .{ + try sema.errNote(src, msg, "mutable '{f}' would allow illegal const pointers stored to type '{f}'", .{ pair.wanted.fmt(pt), pair.actual.fmt(pt), }); } @@ -29704,7 +29706,7 @@ const InMemoryCoercionResult = union(enum) { if (actual_volatile and !wanted_volatile) { try sema.errNote(src, msg, "cast discards volatile qualifier", .{}); } else { - try sema.errNote(src, msg, "mutable '{}' would allow illegal volatile pointers stored to type '{}'", .{ + try sema.errNote(src, msg, "mutable '{f}' would allow illegal volatile pointers stored to type '{f}'", .{ pair.wanted.fmt(pt), pair.actual.fmt(pt), }); } @@ -29712,12 +29714,12 @@ const InMemoryCoercionResult = union(enum) { }, .ptr_bit_range => |bit_range| { if (bit_range.actual_host != bit_range.wanted_host) { - try sema.errNote(src, msg, "pointer host size '{}' cannot cast into pointer host size '{}'", .{ + try sema.errNote(src, msg, "pointer host size '{d}' cannot cast into pointer host size '{d}'", .{ bit_range.actual_host, bit_range.wanted_host, }); } if (bit_range.actual_offset != bit_range.wanted_offset) { - try sema.errNote(src, msg, "pointer bit offset '{}' cannot cast into pointer bit offset '{}'", .{ + try sema.errNote(src, msg, "pointer bit offset '{d}' cannot cast into pointer bit offset '{d}'", .{ bit_range.actual_offset, bit_range.wanted_offset, }); } @@ -29730,13 +29732,13 @@ const InMemoryCoercionResult = union(enum) { break; }, .double_ptr_to_anyopaque => |pair| { - try sema.errNote(src, msg, "cannot implicitly cast double pointer '{}' to anyopaque pointer '{}'", .{ + try sema.errNote(src, msg, "cannot implicitly cast double pointer '{f}' to anyopaque pointer '{f}'", .{ pair.actual.fmt(pt), pair.wanted.fmt(pt), }); break; }, .slice_to_anyopaque => |pair| { - try sema.errNote(src, msg, "cannot implicitly cast slice '{}' to anyopaque pointer '{}'", .{ + try sema.errNote(src, msg, "cannot implicitly cast slice '{f}' to anyopaque pointer '{f}'", .{ pair.actual.fmt(pt), pair.wanted.fmt(pt), }); try sema.errNote(src, msg, "consider using '.ptr'", .{}); @@ -30510,7 +30512,7 @@ fn coerceVarArgParam( const coerced_ty = sema.typeOf(coerced); if (!try sema.validateExternType(coerced_ty, .param_ty)) { const msg = msg: { - const msg = try sema.errMsg(inst_src, "cannot pass '{}' to variadic function", .{coerced_ty.fmt(pt)}); + const msg = try sema.errMsg(inst_src, "cannot pass '{f}' to variadic function", .{coerced_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.explainWhyTypeIsNotExtern(msg, inst_src, coerced_ty, .param_ty); @@ -30613,7 +30615,7 @@ fn storePtr2( // is not comptime-only. We can hit this case with a `@ptrFromInt` pointer. if (try elem_ty.comptimeOnlySema(pt)) { return sema.failWithOwnedErrorMsg(block, msg: { - const msg = try sema.errMsg(src, "cannot store comptime-only type '{}' at runtime", .{elem_ty.fmt(pt)}); + const msg = try sema.errMsg(src, "cannot store comptime-only type '{f}' at runtime", .{elem_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.errNote(ptr_src, msg, "operation is runtime due to this pointer", .{}); break :msg msg; @@ -30646,7 +30648,7 @@ fn storePtr2( }); return; } - return sema.fail(block, ptr_src, "unable to determine vector element index of type '{}'", .{ + return sema.fail(block, ptr_src, "unable to determine vector element index of type '{f}'", .{ ptr_ty.fmt(pt), }); } @@ -30815,19 +30817,19 @@ fn storePtrVal( .{}, ), .undef => return sema.failWithUseOfUndef(block, src), - .err_payload => |err_name| return sema.fail(block, src, "attempt to unwrap error: {}", .{err_name.fmt(ip)}), + .err_payload => |err_name| return sema.fail(block, src, "attempt to unwrap error: {f}", .{err_name.fmt(ip)}), .null_payload => return sema.fail(block, src, "attempt to use null value", .{}), .inactive_union_field => return sema.fail(block, src, "access of inactive union field", .{}), .needed_well_defined => |ty| return sema.fail( block, src, - "comptime dereference requires '{}' to have a well-defined layout", + "comptime dereference requires '{f}' to have a well-defined layout", .{ty.fmt(pt)}, ), .out_of_bounds => |ty| return sema.fail( block, src, - "dereference of '{}' exceeds bounds of containing decl of type '{}'", + "dereference of '{f}' exceeds bounds of containing decl of type '{f}'", .{ ptr_ty.fmt(pt), ty.fmt(pt) }, ), .exceeds_host_size => return sema.fail(block, src, "bit-pointer target exceeds host size", .{}), @@ -30853,7 +30855,7 @@ fn bitCast( const old_bits = old_ty.bitSize(zcu); if (old_bits != dest_bits) { - return sema.fail(block, inst_src, "@bitCast size mismatch: destination type '{}' has {d} bits but source type '{}' has {d} bits", .{ + return sema.fail(block, inst_src, "@bitCast size mismatch: destination type '{f}' has {d} bits but source type '{f}' has {d} bits", .{ dest_ty.fmt(pt), dest_bits, old_ty.fmt(pt), @@ -30971,7 +30973,7 @@ fn coerceCompatiblePtrs( const inst_ty = sema.typeOf(inst); if (try sema.resolveValue(inst)) |val| { if (!val.isUndef(zcu) and val.isNull(zcu) and !dest_ty.isAllowzeroPtr(zcu)) { - return sema.fail(block, inst_src, "null pointer casted to type '{}'", .{dest_ty.fmt(pt)}); + return sema.fail(block, inst_src, "null pointer casted to type '{f}'", .{dest_ty.fmt(pt)}); } // The comptime Value representation is compatible with both types. return Air.internedToRef( @@ -31017,7 +31019,7 @@ fn coerceEnumToUnion( const tag_ty = union_ty.unionTagType(zcu) orelse { const msg = msg: { - const msg = try sema.errMsg(inst_src, "expected type '{}', found '{}'", .{ + const msg = try sema.errMsg(inst_src, "expected type '{f}', found '{f}'", .{ union_ty.fmt(pt), inst_ty.fmt(pt), }); errdefer msg.destroy(sema.gpa); @@ -31031,7 +31033,7 @@ fn coerceEnumToUnion( const enum_tag = try sema.coerce(block, tag_ty, inst, inst_src); if (try sema.resolveDefinedValue(block, inst_src, enum_tag)) |val| { const field_index = union_ty.unionTagFieldIndex(val, pt.zcu) orelse { - return sema.fail(block, inst_src, "union '{}' has no tag with value '{}'", .{ + return sema.fail(block, inst_src, "union '{f}' has no tag with value '{f}'", .{ union_ty.fmt(pt), val.fmtValueSema(pt, sema), }); }; @@ -31045,7 +31047,7 @@ fn coerceEnumToUnion( errdefer msg.destroy(sema.gpa); const field_name = union_obj.loadTagType(ip).names.get(ip)[field_index]; - try sema.addFieldErrNote(union_ty, field_index, msg, "field '{}' declared here", .{ + try sema.addFieldErrNote(union_ty, field_index, msg, "field '{f}' declared here", .{ field_name.fmt(ip), }); try sema.addDeclaredHereNote(msg, union_ty); @@ -31056,13 +31058,13 @@ fn coerceEnumToUnion( const opv = (try sema.typeHasOnePossibleValue(field_ty)) orelse { const msg = msg: { const field_name = union_obj.loadTagType(ip).names.get(ip)[field_index]; - const msg = try sema.errMsg(inst_src, "coercion from enum '{}' to union '{}' must initialize '{}' field '{}'", .{ + const msg = try sema.errMsg(inst_src, "coercion from enum '{f}' to union '{f}' must initialize '{f}' field '{f}'", .{ inst_ty.fmt(pt), union_ty.fmt(pt), field_ty.fmt(pt), field_name.fmt(ip), }); errdefer msg.destroy(sema.gpa); - try sema.addFieldErrNote(union_ty, field_index, msg, "field '{}' declared here", .{ + try sema.addFieldErrNote(union_ty, field_index, msg, "field '{f}' declared here", .{ field_name.fmt(ip), }); try sema.addDeclaredHereNote(msg, union_ty); @@ -31078,7 +31080,7 @@ fn coerceEnumToUnion( if (tag_ty.isNonexhaustiveEnum(zcu)) { const msg = msg: { - const msg = try sema.errMsg(inst_src, "runtime coercion to union '{}' from non-exhaustive enum", .{ + const msg = try sema.errMsg(inst_src, "runtime coercion to union '{f}' from non-exhaustive enum", .{ union_ty.fmt(pt), }); errdefer msg.destroy(sema.gpa); @@ -31097,7 +31099,7 @@ fn coerceEnumToUnion( if (Type.fromInterned(field_ty).zigTypeTag(zcu) == .noreturn) { const err_msg = msg orelse try sema.errMsg( inst_src, - "runtime coercion from enum '{}' to union '{}' which has a 'noreturn' field", + "runtime coercion from enum '{f}' to union '{f}' which has a 'noreturn' field", .{ tag_ty.fmt(pt), union_ty.fmt(pt) }, ); msg = err_msg; @@ -31120,7 +31122,7 @@ fn coerceEnumToUnion( const msg = msg: { const msg = try sema.errMsg( inst_src, - "runtime coercion from enum '{}' to union '{}' which has non-void fields", + "runtime coercion from enum '{f}' to union '{f}' which has non-void fields", .{ tag_ty.fmt(pt), union_ty.fmt(pt) }, ); errdefer msg.destroy(sema.gpa); @@ -31129,7 +31131,7 @@ fn coerceEnumToUnion( const field_name = union_obj.loadTagType(ip).names.get(ip)[field_index]; const field_ty: Type = .fromInterned(union_obj.field_types.get(ip)[field_index]); if (!(try field_ty.hasRuntimeBitsSema(pt))) continue; - try sema.addFieldErrNote(union_ty, field_index, msg, "field '{}' has type '{}'", .{ + try sema.addFieldErrNote(union_ty, field_index, msg, "field '{f}' has type '{f}'", .{ field_name.fmt(ip), field_ty.fmt(pt), }); @@ -31170,7 +31172,7 @@ fn coerceArrayLike( const dest_len = try sema.usizeCast(block, dest_ty_src, dest_ty.arrayLen(zcu)); if (dest_len != inst_len) { const msg = msg: { - const msg = try sema.errMsg(inst_src, "expected type '{}', found '{}'", .{ + const msg = try sema.errMsg(inst_src, "expected type '{f}', found '{f}'", .{ dest_ty.fmt(pt), inst_ty.fmt(pt), }); errdefer msg.destroy(sema.gpa); @@ -31258,7 +31260,7 @@ fn coerceTupleToArray( if (dest_len != inst_len) { const msg = msg: { - const msg = try sema.errMsg(inst_src, "expected type '{}', found '{}'", .{ + const msg = try sema.errMsg(inst_src, "expected type '{f}', found '{f}'", .{ dest_ty.fmt(pt), inst_ty.fmt(pt), }); errdefer msg.destroy(sema.gpa); @@ -31734,10 +31736,10 @@ fn analyzeLoad( const ptr_ty = sema.typeOf(ptr); const elem_ty = switch (ptr_ty.zigTypeTag(zcu)) { .pointer => ptr_ty.childType(zcu), - else => return sema.fail(block, ptr_src, "expected pointer, found '{}'", .{ptr_ty.fmt(pt)}), + else => return sema.fail(block, ptr_src, "expected pointer, found '{f}'", .{ptr_ty.fmt(pt)}), }; if (elem_ty.zigTypeTag(zcu) == .@"opaque") { - return sema.fail(block, ptr_src, "cannot load opaque type '{}'", .{elem_ty.fmt(pt)}); + return sema.fail(block, ptr_src, "cannot load opaque type '{f}'", .{elem_ty.fmt(pt)}); } if (try sema.typeHasOnePossibleValue(elem_ty)) |opv| { @@ -31758,7 +31760,7 @@ fn analyzeLoad( const bin_op = sema.getTmpAir().extraData(Air.Bin, ty_pl.payload).data; return block.addBinOp(.ptr_elem_val, bin_op.lhs, bin_op.rhs); } - return sema.fail(block, ptr_src, "unable to determine vector element index of type '{}'", .{ + return sema.fail(block, ptr_src, "unable to determine vector element index of type '{f}'", .{ ptr_ty.fmt(pt), }); } @@ -32046,7 +32048,7 @@ fn analyzeSlice( const ptr_ptr_ty = sema.typeOf(ptr_ptr); const ptr_ptr_child_ty = switch (ptr_ptr_ty.zigTypeTag(zcu)) { .pointer => ptr_ptr_ty.childType(zcu), - else => return sema.fail(block, ptr_src, "expected pointer, found '{}'", .{ptr_ptr_ty.fmt(pt)}), + else => return sema.fail(block, ptr_src, "expected pointer, found '{f}'", .{ptr_ptr_ty.fmt(pt)}), }; var array_ty = ptr_ptr_child_ty; @@ -32095,7 +32097,7 @@ fn analyzeSlice( try sema.errNote( start_src, msg, - "expected '{}', found '{}'", + "expected '{f}', found '{f}'", .{ Value.zero_comptime_int.fmtValueSema(pt, sema), start_value.fmtValueSema(pt, sema), @@ -32111,7 +32113,7 @@ fn analyzeSlice( try sema.errNote( end_src, msg, - "expected '{}', found '{}'", + "expected '{f}', found '{f}'", .{ Value.one_comptime_int.fmtValueSema(pt, sema), end_value.fmtValueSema(pt, sema), @@ -32126,7 +32128,7 @@ fn analyzeSlice( return sema.fail( block, end_src, - "end index {} out of bounds for slice of single-item pointer", + "end index {f} out of bounds for slice of single-item pointer", .{end_value.fmtValueSema(pt, sema)}, ); } @@ -32173,7 +32175,7 @@ fn analyzeSlice( elem_ty = ptr_ptr_child_ty.childType(zcu); }, }, - else => return sema.fail(block, src, "slice of non-array type '{}'", .{ptr_ptr_child_ty.fmt(pt)}), + else => return sema.fail(block, src, "slice of non-array type '{f}'", .{ptr_ptr_child_ty.fmt(pt)}), } const ptr = if (slice_ty.isSlice(zcu)) @@ -32220,7 +32222,7 @@ fn analyzeSlice( return sema.fail( block, end_src, - "end index {} out of bounds for array of length {}{s}", + "end index {f} out of bounds for array of length {f}{s}", .{ end_val.fmtValueSema(pt, sema), len_val.fmtValueSema(pt, sema), @@ -32265,7 +32267,7 @@ fn analyzeSlice( return sema.fail( block, end_src, - "end index {} out of bounds for slice of length {d}{s}", + "end index {f} out of bounds for slice of length {d}{s}", .{ end_val.fmtValueSema(pt, sema), try slice_val.sliceLen(pt), @@ -32324,7 +32326,7 @@ fn analyzeSlice( return sema.fail( block, start_src, - "start index {} is larger than end index {}", + "start index {f} is larger than end index {f}", .{ start_val.fmtValueSema(pt, sema), end_val.fmtValueSema(pt, sema), @@ -32348,13 +32350,13 @@ fn analyzeSlice( .needed_well_defined => |ty| return sema.fail( block, src, - "comptime dereference requires '{}' to have a well-defined layout", + "comptime dereference requires '{f}' to have a well-defined layout", .{ty.fmt(pt)}, ), .out_of_bounds => |ty| return sema.fail( block, end_src, - "slice end index {d} exceeds bounds of containing decl of type '{}'", + "slice end index {d} exceeds bounds of containing decl of type '{f}'", .{ end_int, ty.fmt(pt) }, ), }; @@ -32363,7 +32365,7 @@ fn analyzeSlice( const msg = msg: { const msg = try sema.errMsg(src, "value in memory does not match slice sentinel", .{}); errdefer msg.destroy(sema.gpa); - try sema.errNote(src, msg, "expected '{}', found '{}'", .{ + try sema.errNote(src, msg, "expected '{f}', found '{f}'", .{ expected_sentinel.fmtValueSema(pt, sema), actual_sentinel.fmtValueSema(pt, sema), }); @@ -33251,7 +33253,7 @@ const PeerResolveResult = union(enum) { }; }, .field_error => |field_error| { - const fmt = "struct field '{}' has conflicting types"; + const fmt = "struct field '{f}' has conflicting types"; const args = .{field_error.field_name.fmt(&pt.zcu.intern_pool)}; if (opt_msg) |msg| { try sema.errNote(src, msg, fmt, args); @@ -33282,7 +33284,7 @@ const PeerResolveResult = union(enum) { candidate_srcs.resolve(block, conflict_idx[1]), }; - const fmt = "incompatible types: '{}' and '{}'"; + const fmt = "incompatible types: '{f}' and '{f}'"; const args = .{ conflict_tys[0].fmt(pt), conflict_tys[1].fmt(pt), @@ -33296,8 +33298,8 @@ const PeerResolveResult = union(enum) { break :msg msg; }; - if (conflict_srcs[0]) |src_loc| try sema.errNote(src_loc, msg, "type '{}' here", .{conflict_tys[0].fmt(pt)}); - if (conflict_srcs[1]) |src_loc| try sema.errNote(src_loc, msg, "type '{}' here", .{conflict_tys[1].fmt(pt)}); + if (conflict_srcs[0]) |src_loc| try sema.errNote(src_loc, msg, "type '{f}' here", .{conflict_tys[0].fmt(pt)}); + if (conflict_srcs[1]) |src_loc| try sema.errNote(src_loc, msg, "type '{f}' here", .{conflict_tys[1].fmt(pt)}); // No child error break; @@ -34609,7 +34611,7 @@ pub fn resolveStructLayout(sema: *Sema, ty: Type) SemaError!void { if (struct_type.setLayoutWip(ip)) { const msg = try sema.errMsg( ty.srcLoc(zcu), - "struct '{}' depends on itself", + "struct '{f}' depends on itself", .{ty.fmt(pt)}, ); return sema.failWithOwnedErrorMsg(null, msg); @@ -34828,13 +34830,13 @@ fn checkBackingIntType(sema: *Sema, block: *Block, src: LazySrcLoc, backing_int_ const zcu = pt.zcu; if (!backing_int_ty.isInt(zcu)) { - return sema.fail(block, src, "expected backing integer type, found '{}'", .{backing_int_ty.fmt(pt)}); + return sema.fail(block, src, "expected backing integer type, found '{f}'", .{backing_int_ty.fmt(pt)}); } if (backing_int_ty.bitSize(zcu) != fields_bit_sum) { return sema.fail( block, src, - "backing integer type '{}' has bit size {} but the struct fields have a total bit size of {}", + "backing integer type '{f}' has bit size {d} but the struct fields have a total bit size of {d}", .{ backing_int_ty.fmt(pt), backing_int_ty.bitSize(zcu), fields_bit_sum }, ); } @@ -34844,7 +34846,7 @@ fn checkIndexable(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type) !void { const pt = sema.pt; if (!ty.isIndexable(pt.zcu)) { const msg = msg: { - const msg = try sema.errMsg(src, "type '{}' does not support indexing", .{ty.fmt(pt)}); + const msg = try sema.errMsg(src, "type '{f}' does not support indexing", .{ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.errNote(src, msg, "operand must be an array, slice, tuple, or vector", .{}); break :msg msg; @@ -34868,7 +34870,7 @@ fn checkMemOperand(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type) !void } } const msg = msg: { - const msg = try sema.errMsg(src, "type '{}' is not an indexable pointer", .{ty.fmt(pt)}); + const msg = try sema.errMsg(src, "type '{f}' is not an indexable pointer", .{ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.errNote(src, msg, "operand must be a slice, a many pointer or a pointer to an array", .{}); break :msg msg; @@ -34936,7 +34938,7 @@ pub fn resolveUnionLayout(sema: *Sema, ty: Type) SemaError!void { .field_types_wip, .layout_wip => { const msg = try sema.errMsg( ty.srcLoc(pt.zcu), - "union '{}' depends on itself", + "union '{f}' depends on itself", .{ty.fmt(pt)}, ); return sema.failWithOwnedErrorMsg(null, msg); @@ -35124,7 +35126,7 @@ pub fn resolveStructFieldTypes( if (struct_type.setFieldTypesWip(ip)) { const msg = try sema.errMsg( Type.fromInterned(ty).srcLoc(zcu), - "struct '{}' depends on itself", + "struct '{f}' depends on itself", .{Type.fromInterned(ty).fmt(pt)}, ); return sema.failWithOwnedErrorMsg(null, msg); @@ -35153,7 +35155,7 @@ pub fn resolveStructFieldInits(sema: *Sema, ty: Type) SemaError!void { if (struct_type.setInitsWip(ip)) { const msg = try sema.errMsg( ty.srcLoc(zcu), - "struct '{}' depends on itself", + "struct '{f}' depends on itself", .{ty.fmt(pt)}, ); return sema.failWithOwnedErrorMsg(null, msg); @@ -35177,11 +35179,7 @@ pub fn resolveUnionFieldTypes(sema: *Sema, ty: Type, union_type: InternPool.Load switch (union_type.flagsUnordered(ip).status) { .none => {}, .field_types_wip => { - const msg = try sema.errMsg( - ty.srcLoc(zcu), - "union '{}' depends on itself", - .{ty.fmt(pt)}, - ); + const msg = try sema.errMsg(ty.srcLoc(zcu), "union '{f}' depends on itself", .{ty.fmt(pt)}); return sema.failWithOwnedErrorMsg(null, msg); }, .have_field_types, @@ -35549,7 +35547,7 @@ fn structFields( switch (struct_type.layout) { .@"extern" => if (!try sema.validateExternType(field_ty, .struct_field)) { const msg = msg: { - const msg = try sema.errMsg(ty_src, "extern structs cannot contain fields of type '{}'", .{field_ty.fmt(pt)}); + const msg = try sema.errMsg(ty_src, "extern structs cannot contain fields of type '{f}'", .{field_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.explainWhyTypeIsNotExtern(msg, ty_src, field_ty, .struct_field); @@ -35561,7 +35559,7 @@ fn structFields( }, .@"packed" => if (!try sema.validatePackedType(field_ty)) { const msg = msg: { - const msg = try sema.errMsg(ty_src, "packed structs cannot contain fields of type '{}'", .{field_ty.fmt(pt)}); + const msg = try sema.errMsg(ty_src, "packed structs cannot contain fields of type '{f}'", .{field_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.explainWhyTypeIsNotPacked(msg, ty_src, field_ty); @@ -35808,7 +35806,7 @@ fn unionFields( // The provided type is an integer type and we must construct the enum tag type here. int_tag_ty = provided_ty; if (int_tag_ty.zigTypeTag(zcu) != .int and int_tag_ty.zigTypeTag(zcu) != .comptime_int) { - return sema.fail(&block_scope, tag_ty_src, "expected integer tag type, found '{}'", .{int_tag_ty.fmt(pt)}); + return sema.fail(&block_scope, tag_ty_src, "expected integer tag type, found '{f}'", .{int_tag_ty.fmt(pt)}); } if (fields_len > 0) { @@ -35817,7 +35815,7 @@ fn unionFields( const msg = msg: { const msg = try sema.errMsg(tag_ty_src, "specified integer tag type cannot represent every field", .{}); errdefer msg.destroy(sema.gpa); - try sema.errNote(tag_ty_src, msg, "type '{}' cannot fit values in range 0...{d}", .{ + try sema.errNote(tag_ty_src, msg, "type '{f}' cannot fit values in range 0...{d}", .{ int_tag_ty.fmt(pt), fields_len - 1, }); @@ -35832,7 +35830,7 @@ fn unionFields( // The provided type is the enum tag type. const enum_type = switch (ip.indexToKey(provided_ty.toIntern())) { .enum_type => ip.loadEnumType(provided_ty.toIntern()), - else => return sema.fail(&block_scope, tag_ty_src, "expected enum tag type, found '{}'", .{provided_ty.fmt(pt)}), + else => return sema.fail(&block_scope, tag_ty_src, "expected enum tag type, found '{f}'", .{provided_ty.fmt(pt)}), }; union_type.setTagType(ip, provided_ty.toIntern()); // The fields of the union must match the enum exactly. @@ -35929,7 +35927,7 @@ fn unionFields( if (result.overflow) return sema.fail( &block_scope, value_src, - "enumeration value '{}' too large for type '{}'", + "enumeration value '{f}' too large for type '{f}'", .{ result.val.fmtValueSema(pt, sema), int_tag_ty.fmt(pt) }, ); last_tag_val = result.val; @@ -35947,7 +35945,7 @@ fn unionFields( const msg = msg: { const msg = try sema.errMsg( value_src, - "enum tag value {} already taken", + "enum tag value {f} already taken", .{enum_tag_val.fmtValueSema(pt, sema)}, ); errdefer msg.destroy(gpa); @@ -35975,7 +35973,7 @@ fn unionFields( const tag_ty = union_type.tagTypeUnordered(ip); const tag_info = ip.loadEnumType(tag_ty); const enum_index = tag_info.nameIndex(ip, field_name) orelse { - return sema.fail(&block_scope, name_src, "no field named '{}' in enum '{}'", .{ + return sema.fail(&block_scope, name_src, "no field named '{f}' in enum '{f}'", .{ field_name.fmt(ip), Type.fromInterned(tag_ty).fmt(pt), }); }; @@ -35992,7 +35990,7 @@ fn unionFields( .base_node_inst = Type.fromInterned(tag_ty).typeDeclInstAllowGeneratedTag(zcu).?, .offset = .{ .container_field_name = enum_index }, }; - const msg = try sema.errMsg(name_src, "union field '{}' ordered differently than corresponding enum field", .{ + const msg = try sema.errMsg(name_src, "union field '{f}' ordered differently than corresponding enum field", .{ field_name.fmt(ip), }); errdefer msg.destroy(sema.gpa); @@ -36018,7 +36016,7 @@ fn unionFields( !try sema.validateExternType(field_ty, .union_field)) { const msg = msg: { - const msg = try sema.errMsg(type_src, "extern unions cannot contain fields of type '{}'", .{field_ty.fmt(pt)}); + const msg = try sema.errMsg(type_src, "extern unions cannot contain fields of type '{f}'", .{field_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.explainWhyTypeIsNotExtern(msg, type_src, field_ty, .union_field); @@ -36029,7 +36027,7 @@ fn unionFields( return sema.failWithOwnedErrorMsg(&block_scope, msg); } else if (layout == .@"packed" and !try sema.validatePackedType(field_ty)) { const msg = msg: { - const msg = try sema.errMsg(type_src, "packed unions cannot contain fields of type '{}'", .{field_ty.fmt(pt)}); + const msg = try sema.errMsg(type_src, "packed unions cannot contain fields of type '{f}'", .{field_ty.fmt(pt)}); errdefer msg.destroy(sema.gpa); try sema.explainWhyTypeIsNotPacked(msg, type_src, field_ty); @@ -36065,7 +36063,7 @@ fn unionFields( for (tag_info.names.get(ip), 0..) |field_name, field_index| { if (explicit_tags_seen[field_index]) continue; - try sema.addFieldErrNote(.fromInterned(tag_ty), field_index, msg, "field '{}' missing, declared here", .{ + try sema.addFieldErrNote(.fromInterned(tag_ty), field_index, msg, "field '{f}' missing, declared here", .{ field_name.fmt(ip), }); } @@ -36101,7 +36099,7 @@ fn generateUnionTagTypeNumbered( const name = try ip.getOrPutStringFmt( gpa, pt.tid, - "@typeInfo({}).@\"union\".tag_type.?", + "@typeInfo({f}).@\"union\".tag_type.?", .{union_name.fmt(ip)}, .no_embedded_nulls, ); @@ -36137,7 +36135,7 @@ fn generateUnionTagTypeSimple( const name = try ip.getOrPutStringFmt( gpa, pt.tid, - "@typeInfo({}).@\"union\".tag_type.?", + "@typeInfo({f}).@\"union\".tag_type.?", .{union_name.fmt(ip)}, .no_embedded_nulls, ); @@ -36671,13 +36669,13 @@ fn pointerDeref(sema: *Sema, block: *Block, src: LazySrcLoc, ptr_val: Value, ptr .needed_well_defined => |ty| return sema.fail( block, src, - "comptime dereference requires '{}' to have a well-defined layout", + "comptime dereference requires '{f}' to have a well-defined layout", .{ty.fmt(pt)}, ), .out_of_bounds => |ty| return sema.fail( block, src, - "dereference of '{}' exceeds bounds of containing decl of type '{}'", + "dereference of '{f}' exceeds bounds of containing decl of type '{f}'", .{ ptr_ty.fmt(pt), ty.fmt(pt) }, ), } @@ -36697,7 +36695,7 @@ fn pointerDerefExtra(sema: *Sema, block: *Block, src: LazySrcLoc, ptr_val: Value .success => |mv| return .{ .val = try mv.intern(pt, sema.arena) }, .runtime_load => return .runtime_load, .undef => return sema.failWithUseOfUndef(block, src), - .err_payload => |err_name| return sema.fail(block, src, "attempt to unwrap error: {}", .{err_name.fmt(ip)}), + .err_payload => |err_name| return sema.fail(block, src, "attempt to unwrap error: {f}", .{err_name.fmt(ip)}), .null_payload => return sema.fail(block, src, "attempt to use null value", .{}), .inactive_union_field => return sema.fail(block, src, "access of inactive union field", .{}), .needed_well_defined => |ty| return .{ .needed_well_defined = ty }, @@ -36822,12 +36820,12 @@ fn intFromFloatScalar( const float = val.toFloat(f128, zcu); if (std.math.isNan(float)) { - return sema.fail(block, src, "float value NaN cannot be stored in integer type '{}'", .{ + return sema.fail(block, src, "float value NaN cannot be stored in integer type '{f}'", .{ int_ty.fmt(pt), }); } if (std.math.isInf(float)) { - return sema.fail(block, src, "float value Inf cannot be stored in integer type '{}'", .{ + return sema.fail(block, src, "float value Inf cannot be stored in integer type '{f}'", .{ int_ty.fmt(pt), }); } @@ -36842,7 +36840,7 @@ fn intFromFloatScalar( .exact => return sema.fail( block, src, - "fractional component prevents float value '{}' from coercion to type '{}'", + "fractional component prevents float value '{f}' from coercion to type '{f}'", .{ val.fmtValueSema(pt, sema), int_ty.fmt(pt) }, ), .truncate => {}, @@ -36854,7 +36852,7 @@ fn intFromFloatScalar( const int_info = int_ty.intInfo(zcu); if (!big_int.toConst().fitsInTwosComp(int_info.signedness, int_info.bits)) { - return sema.fail(block, src, "float value '{}' cannot be stored in integer type '{}'", .{ + return sema.fail(block, src, "float value '{f}' cannot be stored in integer type '{f}'", .{ val.fmtValueSema(pt, sema), int_ty.fmt(pt), }); } @@ -37175,7 +37173,14 @@ fn explainWhyValueContainsReferenceToComptimeVar(sema: *Sema, msg: *Zcu.ErrorMsg } } -fn notePathToComptimeAllocPtr(sema: *Sema, msg: *Zcu.ErrorMsg, src: LazySrcLoc, val: Value, intermediate_value_count: u32, start_value_name: InternPool.NullTerminatedString) Allocator.Error!union(enum) { +fn notePathToComptimeAllocPtr( + sema: *Sema, + msg: *Zcu.ErrorMsg, + src: LazySrcLoc, + val: Value, + intermediate_value_count: u32, + start_value_name: InternPool.NullTerminatedString, +) Allocator.Error!union(enum) { done, new_val: Value, } { @@ -37186,9 +37191,9 @@ fn notePathToComptimeAllocPtr(sema: *Sema, msg: *Zcu.ErrorMsg, src: LazySrcLoc, var first_path: std.ArrayListUnmanaged(u8) = .empty; if (intermediate_value_count == 0) { - try first_path.writer(arena).print("{i}", .{start_value_name.fmt(ip)}); + try first_path.print(arena, "{f}", .{start_value_name.fmt(ip)}); } else { - try first_path.writer(arena).print("v{}", .{intermediate_value_count - 1}); + try first_path.print(arena, "v{d}", .{intermediate_value_count - 1}); } const comptime_ptr = try sema.notePathToComptimeAllocPtrInner(val, &first_path); @@ -37213,30 +37218,26 @@ fn notePathToComptimeAllocPtr(sema: *Sema, msg: *Zcu.ErrorMsg, src: LazySrcLoc, error.AnalysisFail => unreachable, }; - var second_path: std.ArrayListUnmanaged(u8) = .empty; + var second_path_aw: std.io.Writer.Allocating = .init(arena); + defer second_path_aw.deinit(); const inter_name = try std.fmt.allocPrint(arena, "v{d}", .{intermediate_value_count}); const deriv_start = @import("print_value.zig").printPtrDerivation( derivation, - second_path.writer(arena), + &second_path_aw.writer, pt, .lvalue, .{ .str = inter_name }, 20, - ) catch |err| switch (err) { - error.OutOfMemory => |e| return e, - error.AnalysisFail => unreachable, - error.ComptimeReturn => unreachable, - error.ComptimeBreak => unreachable, - }; + ) catch return error.OutOfMemory; switch (deriv_start) { .int, .nav_ptr => unreachable, .uav_ptr => |uav| { - try sema.errNote(src, msg, "'{s}' points to '{s}', where", .{ first_path.items, second_path.items }); + try sema.errNote(src, msg, "'{s}' points to '{s}', where", .{ first_path.items, second_path_aw.getWritten() }); return .{ .new_val = .fromInterned(uav.val) }; }, .comptime_alloc_ptr => |cta_info| { - try sema.errNote(src, msg, "'{s}' points to '{s}', where", .{ first_path.items, second_path.items }); + try sema.errNote(src, msg, "'{s}' points to '{s}', where", .{ first_path.items, second_path_aw.getWritten() }); const cta = sema.getComptimeAlloc(cta_info.idx); if (cta.is_const) { return .{ .new_val = cta_info.val }; @@ -37246,7 +37247,7 @@ fn notePathToComptimeAllocPtr(sema: *Sema, msg: *Zcu.ErrorMsg, src: LazySrcLoc, } }, .comptime_field_ptr => { - try sema.errNote(src, msg, "'{s}' points to '{s}', where", .{ first_path.items, second_path.items }); + try sema.errNote(src, msg, "'{s}' points to '{s}', where", .{ first_path.items, second_path_aw.getWritten() }); try sema.errNote(src, msg, "'{s}' is a comptime field", .{inter_name}); return .done; }, @@ -37286,7 +37287,7 @@ fn notePathToComptimeAllocPtrInner(sema: *Sema, val: Value, path: *std.ArrayList const backing_enum = union_ty.unionTagTypeHypothetical(zcu); const field_idx = backing_enum.enumTagFieldIndex(.fromInterned(un.tag), zcu).?; const field_name = backing_enum.enumFieldName(field_idx, zcu); - try path.writer(arena).print(".{i}", .{field_name.fmt(ip)}); + try path.print(arena, ".{f}", .{field_name.fmt(ip)}); return sema.notePathToComptimeAllocPtrInner(.fromInterned(un.val), path); }, .aggregate => |agg| { @@ -37301,17 +37302,17 @@ fn notePathToComptimeAllocPtrInner(sema: *Sema, val: Value, path: *std.ArrayList }; const agg_ty: Type = .fromInterned(agg.ty); switch (agg_ty.zigTypeTag(zcu)) { - .array, .vector => try path.writer(arena).print("[{d}]", .{elem_idx}), + .array, .vector => try path.print(arena, "[{d}]", .{elem_idx}), .pointer => switch (elem_idx) { Value.slice_ptr_index => try path.appendSlice(arena, ".ptr"), Value.slice_len_index => try path.appendSlice(arena, ".len"), else => unreachable, }, .@"struct" => if (agg_ty.isTuple(zcu)) { - try path.writer(arena).print("[{d}]", .{elem_idx}); + try path.print(arena, "[{d}]", .{elem_idx}); } else { const name = agg_ty.structFieldName(elem_idx, zcu).unwrap().?; - try path.writer(arena).print(".{i}", .{name.fmt(ip)}); + try path.print(arena, ".{f}", .{name.fmt(ip)}); }, else => unreachable, } @@ -37588,7 +37589,7 @@ fn resolveDeclaredEnumInner( if (tag_type_ref != .none) { const ty = try sema.resolveType(block, tag_ty_src, tag_type_ref); if (ty.zigTypeTag(zcu) != .int and ty.zigTypeTag(zcu) != .comptime_int) { - return sema.fail(block, tag_ty_src, "expected integer tag type, found '{}'", .{ty.fmt(pt)}); + return sema.fail(block, tag_ty_src, "expected integer tag type, found '{f}'", .{ty.fmt(pt)}); } break :ty ty; } else if (fields_len == 0) { @@ -37642,7 +37643,7 @@ fn resolveDeclaredEnumInner( .offset = .{ .container_field_value = conflict.prev_field_idx }, }; const msg = msg: { - const msg = try sema.errMsg(value_src, "enum tag value {} already taken", .{last_tag_val.?.fmtValueSema(pt, sema)}); + const msg = try sema.errMsg(value_src, "enum tag value {f} already taken", .{last_tag_val.?.fmtValueSema(pt, sema)}); errdefer msg.destroy(gpa); try sema.errNote(other_field_src, msg, "other occurrence here", .{}); break :msg msg; @@ -37665,7 +37666,7 @@ fn resolveDeclaredEnumInner( .offset = .{ .container_field_value = conflict.prev_field_idx }, }; const msg = msg: { - const msg = try sema.errMsg(value_src, "enum tag value {} already taken", .{last_tag_val.?.fmtValueSema(pt, sema)}); + const msg = try sema.errMsg(value_src, "enum tag value {f} already taken", .{last_tag_val.?.fmtValueSema(pt, sema)}); errdefer msg.destroy(gpa); try sema.errNote(other_field_src, msg, "other occurrence here", .{}); break :msg msg; @@ -37682,7 +37683,7 @@ fn resolveDeclaredEnumInner( }; if (tag_overflow) { - const msg = try sema.errMsg(value_src, "enumeration value '{}' too large for type '{}'", .{ + const msg = try sema.errMsg(value_src, "enumeration value '{f}' too large for type '{f}'", .{ last_tag_val.?.fmtValueSema(pt, sema), int_tag_ty.fmt(pt), }); return sema.failWithOwnedErrorMsg(block, msg); diff --git a/src/Sema/LowerZon.zig b/src/Sema/LowerZon.zig index 9bd876082f..8c70a5b784 100644 --- a/src/Sema/LowerZon.zig +++ b/src/Sema/LowerZon.zig @@ -338,7 +338,7 @@ fn failUnsupportedResultType( const gpa = sema.gpa; const pt = sema.pt; return sema.failWithOwnedErrorMsg(self.block, msg: { - const msg = try sema.errMsg(self.import_loc, "type '{}' is not available in ZON", .{ty.fmt(pt)}); + const msg = try sema.errMsg(self.import_loc, "type '{f}' is not available in ZON", .{ty.fmt(pt)}); errdefer msg.destroy(gpa); if (opt_note) |n| try sema.errNote(self.import_loc, msg, "{s}", .{n}); break :msg msg; @@ -360,11 +360,7 @@ fn fail( fn lowerExprKnownResTy(self: *LowerZon, node: Zoir.Node.Index, res_ty: Type) CompileError!InternPool.Index { const pt = self.sema.pt; return self.lowerExprKnownResTyInner(node, res_ty) catch |err| switch (err) { - error.WrongType => return self.fail( - node, - "expected type '{}'", - .{res_ty.fmt(pt)}, - ), + error.WrongType => return self.fail(node, "expected type '{f}'", .{res_ty.fmt(pt)}), else => |e| return e, }; } @@ -428,7 +424,7 @@ fn lowerExprKnownResTyInner( .frame, .@"anyframe", .void, - => return self.fail(node, "type '{}' not available in ZON", .{res_ty.fmt(pt)}), + => return self.fail(node, "type '{f}' not available in ZON", .{res_ty.fmt(pt)}), } } @@ -458,7 +454,7 @@ fn lowerInt( // If lhs is unsigned and rhs is less than 0, we're out of bounds if (lhs_info.signedness == .unsigned and rhs < 0) return self.fail( node, - "type '{}' cannot represent integer value '{}'", + "type '{f}' cannot represent integer value '{d}'", .{ res_ty.fmt(self.sema.pt), rhs }, ); @@ -478,7 +474,7 @@ fn lowerInt( if (rhs < min_int or rhs > max_int) { return self.fail( node, - "type '{}' cannot represent integer value '{}'", + "type '{f}' cannot represent integer value '{d}'", .{ res_ty.fmt(self.sema.pt), rhs }, ); } @@ -496,7 +492,7 @@ fn lowerInt( if (!val.fitsInTwosComp(int_info.signedness, int_info.bits)) { return self.fail( node, - "type '{}' cannot represent integer value '{}'", + "type '{f}' cannot represent integer value '{d}'", .{ res_ty.fmt(self.sema.pt), val }, ); } @@ -517,7 +513,7 @@ fn lowerInt( switch (big_int.setFloat(val, .trunc)) { .inexact => return self.fail( node, - "fractional component prevents float value '{}' from coercion to type '{}'", + "fractional component prevents float value '{d}' from coercion to type '{f}'", .{ val, res_ty.fmt(self.sema.pt) }, ), .exact => {}, @@ -528,8 +524,8 @@ fn lowerInt( if (!big_int.toConst().fitsInTwosComp(int_info.signedness, int_info.bits)) { return self.fail( node, - "type '{}' cannot represent integer value '{}'", - .{ val, res_ty.fmt(self.sema.pt) }, + "type '{f}' cannot represent integer value '{d}'", + .{ res_ty.fmt(self.sema.pt), val }, ); } @@ -550,7 +546,7 @@ fn lowerInt( if (val >= out_of_range) { return self.fail( node, - "type '{}' cannot represent integer value '{}'", + "type '{f}' cannot represent integer value '{d}'", .{ res_ty.fmt(self.sema.pt), val }, ); } @@ -584,7 +580,7 @@ fn lowerFloat( .pos_inf => b: { if (res_ty.toIntern() == .comptime_float_type) return self.fail( node, - "expected type '{}'", + "expected type '{f}'", .{res_ty.fmt(self.sema.pt)}, ); break :b try self.sema.pt.floatValue(res_ty, std.math.inf(f128)); @@ -592,7 +588,7 @@ fn lowerFloat( .neg_inf => b: { if (res_ty.toIntern() == .comptime_float_type) return self.fail( node, - "expected type '{}'", + "expected type '{f}'", .{res_ty.fmt(self.sema.pt)}, ); break :b try self.sema.pt.floatValue(res_ty, -std.math.inf(f128)); @@ -600,7 +596,7 @@ fn lowerFloat( .nan => b: { if (res_ty.toIntern() == .comptime_float_type) return self.fail( node, - "expected type '{}'", + "expected type '{f}'", .{res_ty.fmt(self.sema.pt)}, ); break :b try self.sema.pt.floatValue(res_ty, std.math.nan(f128)); @@ -661,7 +657,7 @@ fn lowerEnum(self: *LowerZon, node: Zoir.Node.Index, res_ty: Type) !InternPool.I const field_index = res_ty.enumFieldIndex(field_name_interned, self.sema.pt.zcu) orelse { return self.fail( node, - "enum {} has no member named '{}'", + "enum {f} has no member named '{f}'", .{ res_ty.fmt(self.sema.pt), std.zig.fmtId(field_name.get(self.file.zoir.?)), @@ -795,7 +791,7 @@ fn lowerStruct(self: *LowerZon, node: Zoir.Node.Index, res_ty: Type) !InternPool const field_node = fields.vals.at(@intCast(i)); const name_index = struct_info.nameIndex(ip, field_name) orelse { - return self.fail(field_node, "unexpected field '{}'", .{field_name.fmt(ip)}); + return self.fail(field_node, "unexpected field '{f}'", .{field_name.fmt(ip)}); }; const field_type: Type = .fromInterned(struct_info.field_types.get(ip)[name_index]); @@ -816,7 +812,7 @@ fn lowerStruct(self: *LowerZon, node: Zoir.Node.Index, res_ty: Type) !InternPool const field_names = struct_info.field_names.get(ip); for (field_values, field_names) |*value, name| { - if (value.* == .none) return self.fail(node, "missing field '{}'", .{name.fmt(ip)}); + if (value.* == .none) return self.fail(node, "missing field '{f}'", .{name.fmt(ip)}); } return self.sema.pt.intern(.{ .aggregate = .{ @@ -934,7 +930,7 @@ fn lowerUnion(self: *LowerZon, node: Zoir.Node.Index, res_ty: Type) !InternPool. .struct_literal => b: { const fields: @FieldType(Zoir.Node, "struct_literal") = switch (node.get(self.file.zoir.?)) { .struct_literal => |fields| fields, - else => return self.fail(node, "expected type '{}'", .{res_ty.fmt(self.sema.pt)}), + else => return self.fail(node, "expected type '{f}'", .{res_ty.fmt(self.sema.pt)}), }; if (fields.names.len != 1) { return error.WrongType; diff --git a/src/Type.zig b/src/Type.zig index 333e738b05..a199811c8e 100644 --- a/src/Type.zig +++ b/src/Type.zig @@ -121,15 +121,13 @@ pub fn eql(a: Type, b: Type, zcu: *const Zcu) bool { return a.toIntern() == b.toIntern(); } -pub fn format(ty: Type, comptime unused_fmt_string: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { +pub fn format(ty: Type, writer: *std.io.Writer) !void { _ = ty; - _ = unused_fmt_string; - _ = options; _ = writer; @compileError("do not format types directly; use either ty.fmtDebug() or ty.fmt()"); } -pub const Formatter = std.fmt.Formatter(format2); +pub const Formatter = std.fmt.Formatter(Format, Format.default); pub fn fmt(ty: Type, pt: Zcu.PerThread) Formatter { return .{ .data = .{ @@ -138,42 +136,28 @@ pub fn fmt(ty: Type, pt: Zcu.PerThread) Formatter { } }; } -const FormatContext = struct { +const Format = struct { ty: Type, pt: Zcu.PerThread, + + fn default(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + return print(f.ty, writer, f.pt); + } }; -fn format2( - ctx: FormatContext, - comptime unused_format_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - comptime assert(unused_format_string.len == 0); - _ = options; - return print(ctx.ty, writer, ctx.pt); -} - -pub fn fmtDebug(ty: Type) std.fmt.Formatter(dump) { +pub fn fmtDebug(ty: Type) std.fmt.Formatter(Type, dump) { return .{ .data = ty }; } /// This is a debug function. In order to print types in a meaningful way /// we also need access to the module. -pub fn dump( - start_type: Type, - comptime unused_format_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) @TypeOf(writer).Error!void { - _ = options; - comptime assert(unused_format_string.len == 0); +pub fn dump(start_type: Type, writer: *std.io.Writer) std.io.Writer.Error!void { return writer.print("{any}", .{start_type.ip_index}); } /// Prints a name suitable for `@typeName`. /// TODO: take an `opt_sema` to pass to `fmtValue` when printing sentinels. -pub fn print(ty: Type, writer: anytype, pt: Zcu.PerThread) @TypeOf(writer).Error!void { +pub fn print(ty: Type, writer: *std.io.Writer, pt: Zcu.PerThread) std.io.Writer.Error!void { const zcu = pt.zcu; const ip = &zcu.intern_pool; switch (ip.indexToKey(ty.toIntern())) { @@ -190,8 +174,8 @@ pub fn print(ty: Type, writer: anytype, pt: Zcu.PerThread) @TypeOf(writer).Error if (info.sentinel != .none) switch (info.flags.size) { .one, .c => unreachable, - .many => try writer.print("[*:{}]", .{Value.fromInterned(info.sentinel).fmtValue(pt)}), - .slice => try writer.print("[:{}]", .{Value.fromInterned(info.sentinel).fmtValue(pt)}), + .many => try writer.print("[*:{f}]", .{Value.fromInterned(info.sentinel).fmtValue(pt)}), + .slice => try writer.print("[:{f}]", .{Value.fromInterned(info.sentinel).fmtValue(pt)}), } else switch (info.flags.size) { .one => try writer.writeAll("*"), .many => try writer.writeAll("[*]"), @@ -235,7 +219,7 @@ pub fn print(ty: Type, writer: anytype, pt: Zcu.PerThread) @TypeOf(writer).Error try writer.print("[{d}]", .{array_type.len}); try print(Type.fromInterned(array_type.child), writer, pt); } else { - try writer.print("[{d}:{}]", .{ + try writer.print("[{d}:{f}]", .{ array_type.len, Value.fromInterned(array_type.sentinel).fmtValue(pt), }); @@ -265,7 +249,7 @@ pub fn print(ty: Type, writer: anytype, pt: Zcu.PerThread) @TypeOf(writer).Error }, .inferred_error_set_type => |func_index| { const func_nav = ip.getNav(zcu.funcInfo(func_index).owner_nav); - try writer.print("@typeInfo(@typeInfo(@TypeOf({})).@\"fn\".return_type.?).error_union.error_set", .{ + try writer.print("@typeInfo(@typeInfo(@TypeOf({f})).@\"fn\".return_type.?).error_union.error_set", .{ func_nav.fqn.fmt(ip), }); }, @@ -274,7 +258,7 @@ pub fn print(ty: Type, writer: anytype, pt: Zcu.PerThread) @TypeOf(writer).Error try writer.writeAll("error{"); for (names.get(ip), 0..) |name, i| { if (i != 0) try writer.writeByte(','); - try writer.print("{}", .{name.fmt(ip)}); + try writer.print("{f}", .{name.fmt(ip)}); } try writer.writeAll("}"); }, @@ -317,7 +301,7 @@ pub fn print(ty: Type, writer: anytype, pt: Zcu.PerThread) @TypeOf(writer).Error }, .struct_type => { const name = ip.loadStructType(ty.toIntern()).name; - try writer.print("{}", .{name.fmt(ip)}); + try writer.print("{f}", .{name.fmt(ip)}); }, .tuple_type => |tuple| { if (tuple.types.len == 0) { @@ -328,22 +312,22 @@ pub fn print(ty: Type, writer: anytype, pt: Zcu.PerThread) @TypeOf(writer).Error try writer.writeAll(if (i == 0) " " else ", "); if (val != .none) try writer.writeAll("comptime "); try print(Type.fromInterned(field_ty), writer, pt); - if (val != .none) try writer.print(" = {}", .{Value.fromInterned(val).fmtValue(pt)}); + if (val != .none) try writer.print(" = {f}", .{Value.fromInterned(val).fmtValue(pt)}); } try writer.writeAll(" }"); }, .union_type => { const name = ip.loadUnionType(ty.toIntern()).name; - try writer.print("{}", .{name.fmt(ip)}); + try writer.print("{f}", .{name.fmt(ip)}); }, .opaque_type => { const name = ip.loadOpaqueType(ty.toIntern()).name; - try writer.print("{}", .{name.fmt(ip)}); + try writer.print("{f}", .{name.fmt(ip)}); }, .enum_type => { const name = ip.loadEnumType(ty.toIntern()).name; - try writer.print("{}", .{name.fmt(ip)}); + try writer.print("{f}", .{name.fmt(ip)}); }, .func_type => |fn_info| { if (fn_info.is_noinline) { @@ -382,7 +366,9 @@ pub fn print(ty: Type, writer: anytype, pt: Zcu.PerThread) @TypeOf(writer).Error } } switch (fn_info.cc) { - .auto, .async, .naked, .@"inline" => try writer.print("callconv(.{}) ", .{std.zig.fmtId(@tagName(fn_info.cc))}), + .auto, .async, .naked, .@"inline" => try writer.print("callconv(.{f}) ", .{ + std.zig.fmtId(@tagName(fn_info.cc)), + }), else => try writer.print("callconv({any}) ", .{fn_info.cc}), } } diff --git a/src/Value.zig b/src/Value.zig index 96206b6c26..aed97c8754 100644 --- a/src/Value.zig +++ b/src/Value.zig @@ -15,31 +15,23 @@ const Value = @This(); ip_index: InternPool.Index, -pub fn format(val: Value, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { +pub fn format(val: Value, writer: *std.io.Writer) !void { _ = val; - _ = fmt; - _ = options; _ = writer; @compileError("do not use format values directly; use either fmtDebug or fmtValue"); } /// This is a debug function. In order to print values in a meaningful way /// we also need access to the type. -pub fn dump( - start_val: Value, - comptime fmt: []const u8, - _: std.fmt.FormatOptions, - out_stream: anytype, -) !void { - comptime assert(fmt.len == 0); - try out_stream.print("(interned: {})", .{start_val.toIntern()}); +pub fn dump(start_val: Value, w: std.io.Writer) std.io.Writer.Error!void { + try w.print("(interned: {})", .{start_val.toIntern()}); } -pub fn fmtDebug(val: Value) std.fmt.Formatter(dump) { +pub fn fmtDebug(val: Value) std.fmt.Formatter(Value, dump) { return .{ .data = val }; } -pub fn fmtValue(val: Value, pt: Zcu.PerThread) std.fmt.Formatter(print_value.format) { +pub fn fmtValue(val: Value, pt: Zcu.PerThread) std.fmt.Formatter(print_value.FormatContext, print_value.format) { return .{ .data = .{ .val = val, .pt = pt, @@ -48,7 +40,7 @@ pub fn fmtValue(val: Value, pt: Zcu.PerThread) std.fmt.Formatter(print_value.for } }; } -pub fn fmtValueSema(val: Value, pt: Zcu.PerThread, sema: *Sema) std.fmt.Formatter(print_value.formatSema) { +pub fn fmtValueSema(val: Value, pt: Zcu.PerThread, sema: *Sema) std.fmt.Formatter(print_value.FormatContext, print_value.formatSema) { return .{ .data = .{ .val = val, .pt = pt, @@ -57,7 +49,7 @@ pub fn fmtValueSema(val: Value, pt: Zcu.PerThread, sema: *Sema) std.fmt.Formatte } }; } -pub fn fmtValueSemaFull(ctx: print_value.FormatContext) std.fmt.Formatter(print_value.formatSema) { +pub fn fmtValueSemaFull(ctx: print_value.FormatContext) std.fmt.Formatter(print_value.FormatContext, print_value.formatSema) { return .{ .data = ctx }; } diff --git a/src/Zcu.zig b/src/Zcu.zig index a815921cf0..26ee09cfbf 100644 --- a/src/Zcu.zig +++ b/src/Zcu.zig @@ -15,6 +15,7 @@ const BigIntConst = std.math.big.int.Const; const BigIntMutable = std.math.big.int.Mutable; const Target = std.Target; const Ast = std.zig.Ast; +const Writer = std.io.Writer; const Zcu = @This(); const Compilation = @import("Compilation.zig"); @@ -858,7 +859,7 @@ pub const Namespace = struct { try ns.fileScope(zcu).renderFullyQualifiedDebugName(writer); break :sep ':'; }; - if (name != .empty) try writer.print("{c}{}", .{ sep, name.fmt(&zcu.intern_pool) }); + if (name != .empty) try writer.print("{c}{f}", .{ sep, name.fmt(&zcu.intern_pool) }); } pub fn internFullyQualifiedName( @@ -870,7 +871,7 @@ pub const Namespace = struct { ) !InternPool.NullTerminatedString { const ns_name = Type.fromInterned(ns.owner_type).containerTypeName(ip); if (name == .empty) return ns_name; - return ip.getOrPutStringFmt(gpa, tid, "{}.{}", .{ ns_name.fmt(ip), name.fmt(ip) }, .no_embedded_nulls); + return ip.getOrPutStringFmt(gpa, tid, "{f}.{f}", .{ ns_name.fmt(ip), name.fmt(ip) }, .no_embedded_nulls); } }; @@ -1039,12 +1040,12 @@ pub const File = struct { if (stat.size > std.math.maxInt(u32)) return error.FileTooBig; - const source = try gpa.allocSentinel(u8, @as(usize, @intCast(stat.size)), 0); + const source = try gpa.allocSentinel(u8, @intCast(stat.size), 0); errdefer gpa.free(source); - const amt = try f.readAll(source); - if (amt != stat.size) - return error.UnexpectedEndOfFile; + var file_reader = f.reader(&.{}); + file_reader.size = stat.size; + try file_reader.interface.readSliceAll(source); // Here we do not modify stat fields because this function is the one // used for error reporting. We need to keep the stat fields stale so that @@ -1097,11 +1098,10 @@ pub const File = struct { const gpa = pt.zcu.gpa; const ip = &pt.zcu.intern_pool; const strings = ip.getLocal(pt.tid).getMutableStrings(gpa); - const slice = try strings.addManyAsSlice(file.fullyQualifiedNameLen()); - var fbs = std.io.fixedBufferStream(slice[0]); - file.renderFullyQualifiedName(fbs.writer()) catch unreachable; - assert(fbs.pos == slice[0].len); - return ip.getOrPutTrailingString(gpa, pt.tid, @intCast(slice[0].len), .no_embedded_nulls); + var w: Writer = .fixed((try strings.addManyAsSlice(file.fullyQualifiedNameLen()))[0]); + file.renderFullyQualifiedName(&w) catch unreachable; + assert(w.end == w.buffer.len); + return ip.getOrPutTrailingString(gpa, pt.tid, @intCast(w.end), .no_embedded_nulls); } pub const Index = InternPool.FileIndex; @@ -1112,7 +1112,7 @@ pub const File = struct { eb: *std.zig.ErrorBundle.Wip, ) !std.zig.ErrorBundle.SourceLocationIndex { return eb.addSourceLocation(.{ - .src_path = try eb.printString("{}", .{file.path.fmt(zcu.comp)}), + .src_path = try eb.printString("{f}", .{file.path.fmt(zcu.comp)}), .span_start = 0, .span_main = 0, .span_end = 0, @@ -1133,7 +1133,7 @@ pub const File = struct { const end = start + tree.tokenSlice(tok).len; const loc = std.zig.findLineColumn(source.bytes, start); return eb.addSourceLocation(.{ - .src_path = try eb.printString("{}", .{file.path.fmt(zcu.comp)}), + .src_path = try eb.printString("{f}", .{file.path.fmt(zcu.comp)}), .span_start = start, .span_main = start, .span_end = @intCast(end), @@ -1190,13 +1190,8 @@ pub const ErrorMsg = struct { gpa.destroy(err_msg); } - pub fn init( - gpa: Allocator, - src_loc: LazySrcLoc, - comptime format: []const u8, - args: anytype, - ) !ErrorMsg { - return ErrorMsg{ + pub fn init(gpa: Allocator, src_loc: LazySrcLoc, comptime format: []const u8, args: anytype) !ErrorMsg { + return .{ .src_loc = src_loc, .msg = try std.fmt.allocPrint(gpa, format, args), }; @@ -2811,10 +2806,18 @@ comptime { } pub fn loadZirCache(gpa: Allocator, cache_file: std.fs.File) !Zir { - return loadZirCacheBody(gpa, try cache_file.reader().readStruct(Zir.Header), cache_file); + var buffer: [2000]u8 = undefined; + var file_reader = cache_file.reader(&buffer); + return result: { + const header = file_reader.interface.takeStruct(Zir.Header) catch |err| break :result err; + break :result loadZirCacheBody(gpa, header.*, &file_reader.interface); + } catch |err| switch (err) { + error.ReadFailed => return file_reader.err.?, + else => |e| return e, + }; } -pub fn loadZirCacheBody(gpa: Allocator, header: Zir.Header, cache_file: std.fs.File) !Zir { +pub fn loadZirCacheBody(gpa: Allocator, header: Zir.Header, cache_br: *std.io.Reader) !Zir { var instructions: std.MultiArrayList(Zir.Inst) = .{}; errdefer instructions.deinit(gpa); @@ -2837,34 +2840,16 @@ pub fn loadZirCacheBody(gpa: Allocator, header: Zir.Header, cache_file: std.fs.F undefined; defer if (data_has_safety_tag) gpa.free(safety_buffer); - const data_ptr = if (data_has_safety_tag) - @as([*]u8, @ptrCast(safety_buffer.ptr)) - else - @as([*]u8, @ptrCast(zir.instructions.items(.data).ptr)); - - var iovecs = [_]std.posix.iovec{ - .{ - .base = @as([*]u8, @ptrCast(zir.instructions.items(.tag).ptr)), - .len = header.instructions_len, - }, - .{ - .base = data_ptr, - .len = header.instructions_len * 8, - }, - .{ - .base = zir.string_bytes.ptr, - .len = header.string_bytes_len, - }, - .{ - .base = @as([*]u8, @ptrCast(zir.extra.ptr)), - .len = header.extra_len * 4, - }, + var vecs = [_][]u8{ + @ptrCast(zir.instructions.items(.tag)), + if (data_has_safety_tag) + @ptrCast(safety_buffer) + else + @ptrCast(zir.instructions.items(.data)), + zir.string_bytes, + @ptrCast(zir.extra), }; - const amt_read = try cache_file.readvAll(&iovecs); - const amt_expected = zir.instructions.len * 9 + - zir.string_bytes.len + - zir.extra.len * 4; - if (amt_read != amt_expected) return error.UnexpectedFileSize; + try cache_br.readVecAll(&vecs); if (data_has_safety_tag) { const tags = zir.instructions.items(.tag); for (zir.instructions.items(.data), 0..) |*data, i| { @@ -2876,7 +2861,6 @@ pub fn loadZirCacheBody(gpa: Allocator, header: Zir.Header, cache_file: std.fs.F }; } } - return zir; } @@ -2887,14 +2871,6 @@ pub fn saveZirCache(gpa: Allocator, cache_file: std.fs.File, stat: std.fs.File.S undefined; defer if (data_has_safety_tag) gpa.free(safety_buffer); - const data_ptr: [*]const u8 = if (data_has_safety_tag) - if (zir.instructions.len == 0) - undefined - else - @ptrCast(safety_buffer.ptr) - else - @ptrCast(zir.instructions.items(.data).ptr); - if (data_has_safety_tag) { // The `Data` union has a safety tag but in the file format we store it without. for (zir.instructions.items(.data), 0..) |*data, i| { @@ -2912,29 +2888,20 @@ pub fn saveZirCache(gpa: Allocator, cache_file: std.fs.File, stat: std.fs.File.S .stat_inode = stat.inode, .stat_mtime = stat.mtime, }; - var iovecs: [5]std.posix.iovec_const = .{ - .{ - .base = @ptrCast(&header), - .len = @sizeOf(Zir.Header), - }, - .{ - .base = @ptrCast(zir.instructions.items(.tag).ptr), - .len = zir.instructions.len, - }, - .{ - .base = data_ptr, - .len = zir.instructions.len * 8, - }, - .{ - .base = zir.string_bytes.ptr, - .len = zir.string_bytes.len, - }, - .{ - .base = @ptrCast(zir.extra.ptr), - .len = zir.extra.len * 4, - }, + var vecs = [_][]const u8{ + @ptrCast((&header)[0..1]), + @ptrCast(zir.instructions.items(.tag)), + if (data_has_safety_tag) + @ptrCast(safety_buffer) + else + @ptrCast(zir.instructions.items(.data)), + zir.string_bytes, + @ptrCast(zir.extra), + }; + var cache_fw = cache_file.writer(&.{}); + cache_fw.interface.writeVecAll(&vecs) catch |err| switch (err) { + error.WriteFailed => return cache_fw.err.?, }; - try cache_file.writevAll(&iovecs); } pub fn saveZoirCache(cache_file: std.fs.File, stat: std.fs.File.Stat, zoir: Zoir) std.fs.File.WriteError!void { @@ -2950,48 +2917,24 @@ pub fn saveZoirCache(cache_file: std.fs.File, stat: std.fs.File.Stat, zoir: Zoir .stat_inode = stat.inode, .stat_mtime = stat.mtime, }; - var iovecs: [9]std.posix.iovec_const = .{ - .{ - .base = @ptrCast(&header), - .len = @sizeOf(Zoir.Header), - }, - .{ - .base = @ptrCast(zoir.nodes.items(.tag)), - .len = zoir.nodes.len * @sizeOf(Zoir.Node.Repr.Tag), - }, - .{ - .base = @ptrCast(zoir.nodes.items(.data)), - .len = zoir.nodes.len * 4, - }, - .{ - .base = @ptrCast(zoir.nodes.items(.ast_node)), - .len = zoir.nodes.len * 4, - }, - .{ - .base = @ptrCast(zoir.extra), - .len = zoir.extra.len * 4, - }, - .{ - .base = @ptrCast(zoir.limbs), - .len = zoir.limbs.len * @sizeOf(std.math.big.Limb), - }, - .{ - .base = zoir.string_bytes.ptr, - .len = zoir.string_bytes.len, - }, - .{ - .base = @ptrCast(zoir.compile_errors), - .len = zoir.compile_errors.len * @sizeOf(Zoir.CompileError), - }, - .{ - .base = @ptrCast(zoir.error_notes), - .len = zoir.error_notes.len * @sizeOf(Zoir.CompileError.Note), - }, + var vecs = [_][]const u8{ + @ptrCast((&header)[0..1]), + @ptrCast(zoir.nodes.items(.tag)), + @ptrCast(zoir.nodes.items(.data)), + @ptrCast(zoir.nodes.items(.ast_node)), + @ptrCast(zoir.extra), + @ptrCast(zoir.limbs), + zoir.string_bytes, + @ptrCast(zoir.compile_errors), + @ptrCast(zoir.error_notes), + }; + var cache_fw = cache_file.writer(&.{}); + cache_fw.interface.writeVecAll(&vecs) catch |err| switch (err) { + error.WriteFailed => return cache_fw.err.?, }; - try cache_file.writevAll(&iovecs); } -pub fn loadZoirCacheBody(gpa: Allocator, header: Zoir.Header, cache_file: std.fs.File) !Zoir { +pub fn loadZoirCacheBody(gpa: Allocator, header: Zoir.Header, cache_br: *std.io.Reader) !Zoir { var zoir: Zoir = .{ .nodes = .empty, .extra = &.{}, @@ -3017,49 +2960,17 @@ pub fn loadZoirCacheBody(gpa: Allocator, header: Zoir.Header, cache_file: std.fs zoir.compile_errors = try gpa.alloc(Zoir.CompileError, header.compile_errors_len); zoir.error_notes = try gpa.alloc(Zoir.CompileError.Note, header.error_notes_len); - var iovecs: [8]std.posix.iovec = .{ - .{ - .base = @ptrCast(zoir.nodes.items(.tag)), - .len = header.nodes_len * @sizeOf(Zoir.Node.Repr.Tag), - }, - .{ - .base = @ptrCast(zoir.nodes.items(.data)), - .len = header.nodes_len * 4, - }, - .{ - .base = @ptrCast(zoir.nodes.items(.ast_node)), - .len = header.nodes_len * 4, - }, - .{ - .base = @ptrCast(zoir.extra), - .len = header.extra_len * 4, - }, - .{ - .base = @ptrCast(zoir.limbs), - .len = header.limbs_len * @sizeOf(std.math.big.Limb), - }, - .{ - .base = zoir.string_bytes.ptr, - .len = header.string_bytes_len, - }, - .{ - .base = @ptrCast(zoir.compile_errors), - .len = header.compile_errors_len * @sizeOf(Zoir.CompileError), - }, - .{ - .base = @ptrCast(zoir.error_notes), - .len = header.error_notes_len * @sizeOf(Zoir.CompileError.Note), - }, + var vecs = [_][]u8{ + @ptrCast(zoir.nodes.items(.tag)), + @ptrCast(zoir.nodes.items(.data)), + @ptrCast(zoir.nodes.items(.ast_node)), + @ptrCast(zoir.extra), + @ptrCast(zoir.limbs), + zoir.string_bytes, + @ptrCast(zoir.compile_errors), + @ptrCast(zoir.error_notes), }; - - const bytes_expected = expected: { - var n: usize = 0; - for (iovecs) |v| n += v.len; - break :expected n; - }; - - const bytes_read = try cache_file.readvAll(&iovecs); - if (bytes_read != bytes_expected) return error.UnexpectedFileSize; + try cache_br.readVecAll(&vecs); return zoir; } @@ -3071,7 +2982,7 @@ pub fn markDependeeOutdated( marked_po: enum { not_marked_po, marked_po }, dependee: InternPool.Dependee, ) !void { - log.debug("outdated dependee: {}", .{zcu.fmtDependee(dependee)}); + log.debug("outdated dependee: {f}", .{zcu.fmtDependee(dependee)}); var it = zcu.intern_pool.dependencyIterator(dependee); while (it.next()) |depender| { if (zcu.outdated.getPtr(depender)) |po_dep_count| { @@ -3079,9 +2990,9 @@ pub fn markDependeeOutdated( .not_marked_po => {}, .marked_po => { po_dep_count.* -= 1; - log.debug("outdated {} => already outdated {} po_deps={}", .{ zcu.fmtDependee(dependee), zcu.fmtAnalUnit(depender), po_dep_count.* }); + log.debug("outdated {f} => already outdated {f} po_deps={}", .{ zcu.fmtDependee(dependee), zcu.fmtAnalUnit(depender), po_dep_count.* }); if (po_dep_count.* == 0) { - log.debug("outdated ready: {}", .{zcu.fmtAnalUnit(depender)}); + log.debug("outdated ready: {f}", .{zcu.fmtAnalUnit(depender)}); try zcu.outdated_ready.put(zcu.gpa, depender, {}); } }, @@ -3102,9 +3013,9 @@ pub fn markDependeeOutdated( depender, new_po_dep_count, ); - log.debug("outdated {} => new outdated {} po_deps={}", .{ zcu.fmtDependee(dependee), zcu.fmtAnalUnit(depender), new_po_dep_count }); + log.debug("outdated {f} => new outdated {f} po_deps={}", .{ zcu.fmtDependee(dependee), zcu.fmtAnalUnit(depender), new_po_dep_count }); if (new_po_dep_count == 0) { - log.debug("outdated ready: {}", .{zcu.fmtAnalUnit(depender)}); + log.debug("outdated ready: {f}", .{zcu.fmtAnalUnit(depender)}); try zcu.outdated_ready.put(zcu.gpa, depender, {}); } // If this is a Decl and was not previously PO, we must recursively @@ -3117,16 +3028,16 @@ pub fn markDependeeOutdated( } pub fn markPoDependeeUpToDate(zcu: *Zcu, dependee: InternPool.Dependee) !void { - log.debug("up-to-date dependee: {}", .{zcu.fmtDependee(dependee)}); + log.debug("up-to-date dependee: {f}", .{zcu.fmtDependee(dependee)}); var it = zcu.intern_pool.dependencyIterator(dependee); while (it.next()) |depender| { if (zcu.outdated.getPtr(depender)) |po_dep_count| { // This depender is already outdated, but it now has one // less PO dependency! po_dep_count.* -= 1; - log.debug("up-to-date {} => {} po_deps={}", .{ zcu.fmtDependee(dependee), zcu.fmtAnalUnit(depender), po_dep_count.* }); + log.debug("up-to-date {f} => {f} po_deps={}", .{ zcu.fmtDependee(dependee), zcu.fmtAnalUnit(depender), po_dep_count.* }); if (po_dep_count.* == 0) { - log.debug("outdated ready: {}", .{zcu.fmtAnalUnit(depender)}); + log.debug("outdated ready: {f}", .{zcu.fmtAnalUnit(depender)}); try zcu.outdated_ready.put(zcu.gpa, depender, {}); } continue; @@ -3140,11 +3051,11 @@ pub fn markPoDependeeUpToDate(zcu: *Zcu, dependee: InternPool.Dependee) !void { }; if (ptr.* > 1) { ptr.* -= 1; - log.debug("up-to-date {} => {} po_deps={}", .{ zcu.fmtDependee(dependee), zcu.fmtAnalUnit(depender), ptr.* }); + log.debug("up-to-date {f} => {f} po_deps={}", .{ zcu.fmtDependee(dependee), zcu.fmtAnalUnit(depender), ptr.* }); continue; } - log.debug("up-to-date {} => {} po_deps=0 (up-to-date)", .{ zcu.fmtDependee(dependee), zcu.fmtAnalUnit(depender) }); + log.debug("up-to-date {f} => {f} po_deps=0 (up-to-date)", .{ zcu.fmtDependee(dependee), zcu.fmtAnalUnit(depender) }); // This dependency is no longer PO, i.e. is known to be up-to-date. assert(zcu.potentially_outdated.swapRemove(depender)); @@ -3173,7 +3084,7 @@ fn markTransitiveDependersPotentiallyOutdated(zcu: *Zcu, maybe_outdated: AnalUni .func => |func_index| .{ .interned = func_index }, // IES .memoized_state => |stage| .{ .memoized_state = stage }, }; - log.debug("potentially outdated dependee: {}", .{zcu.fmtDependee(dependee)}); + log.debug("potentially outdated dependee: {f}", .{zcu.fmtDependee(dependee)}); var it = ip.dependencyIterator(dependee); while (it.next()) |po| { if (zcu.outdated.getPtr(po)) |po_dep_count| { @@ -3183,17 +3094,17 @@ fn markTransitiveDependersPotentiallyOutdated(zcu: *Zcu, maybe_outdated: AnalUni _ = zcu.outdated_ready.swapRemove(po); } po_dep_count.* += 1; - log.debug("po {} => {} [outdated] po_deps={}", .{ zcu.fmtDependee(dependee), zcu.fmtAnalUnit(po), po_dep_count.* }); + log.debug("po {f} => {f} [outdated] po_deps={}", .{ zcu.fmtDependee(dependee), zcu.fmtAnalUnit(po), po_dep_count.* }); continue; } if (zcu.potentially_outdated.getPtr(po)) |n| { // There is now one more PO dependency. n.* += 1; - log.debug("po {} => {} po_deps={}", .{ zcu.fmtDependee(dependee), zcu.fmtAnalUnit(po), n.* }); + log.debug("po {f} => {f} po_deps={}", .{ zcu.fmtDependee(dependee), zcu.fmtAnalUnit(po), n.* }); continue; } try zcu.potentially_outdated.putNoClobber(zcu.gpa, po, 1); - log.debug("po {} => {} po_deps=1", .{ zcu.fmtDependee(dependee), zcu.fmtAnalUnit(po) }); + log.debug("po {f} => {f} po_deps=1", .{ zcu.fmtDependee(dependee), zcu.fmtAnalUnit(po) }); // This AnalUnit was not already PO, so we must recursively mark its dependers as also PO. try zcu.markTransitiveDependersPotentiallyOutdated(po); } @@ -3222,7 +3133,7 @@ pub fn findOutdatedToAnalyze(zcu: *Zcu) Allocator.Error!?AnalUnit { if (zcu.outdated_ready.count() > 0) { const unit = zcu.outdated_ready.keys()[0]; - log.debug("findOutdatedToAnalyze: trivial {}", .{zcu.fmtAnalUnit(unit)}); + log.debug("findOutdatedToAnalyze: trivial {f}", .{zcu.fmtAnalUnit(unit)}); return unit; } @@ -3273,7 +3184,7 @@ pub fn findOutdatedToAnalyze(zcu: *Zcu) Allocator.Error!?AnalUnit { } } - log.debug("findOutdatedToAnalyze: heuristic returned '{}' ({d} dependers)", .{ + log.debug("findOutdatedToAnalyze: heuristic returned '{f}' ({d} dependers)", .{ zcu.fmtAnalUnit(chosen_unit.?), chosen_unit_dependers, }); @@ -4072,7 +3983,7 @@ fn resolveReferencesInner(zcu: *Zcu) !std.AutoHashMapUnmanaged(AnalUnit, ?Resolv const referencer = kv.value; try checked_types.putNoClobber(gpa, ty, {}); - log.debug("handle type '{}'", .{Type.fromInterned(ty).containerTypeName(ip).fmt(ip)}); + log.debug("handle type '{f}'", .{Type.fromInterned(ty).containerTypeName(ip).fmt(ip)}); // If this type undergoes type resolution, the corresponding `AnalUnit` is automatically referenced. const has_resolution: bool = switch (ip.indexToKey(ty)) { @@ -4108,7 +4019,7 @@ fn resolveReferencesInner(zcu: *Zcu) !std.AutoHashMapUnmanaged(AnalUnit, ?Resolv // `comptime` decls are always analyzed. const unit: AnalUnit = .wrap(.{ .@"comptime" = cu }); if (!result.contains(unit)) { - log.debug("type '{}': ref comptime %{}", .{ + log.debug("type '{f}': ref comptime %{}", .{ Type.fromInterned(ty).containerTypeName(ip).fmt(ip), @intFromEnum(ip.getComptimeUnit(cu).zir_index.resolve(ip) orelse continue), }); @@ -4139,7 +4050,7 @@ fn resolveReferencesInner(zcu: *Zcu) !std.AutoHashMapUnmanaged(AnalUnit, ?Resolv }, }; if (want_analysis) { - log.debug("type '{}': ref test %{}", .{ + log.debug("type '{f}': ref test %{}", .{ Type.fromInterned(ty).containerTypeName(ip).fmt(ip), @intFromEnum(inst_info.inst), }); @@ -4158,7 +4069,7 @@ fn resolveReferencesInner(zcu: *Zcu) !std.AutoHashMapUnmanaged(AnalUnit, ?Resolv if (decl.linkage == .@"export") { const unit: AnalUnit = .wrap(.{ .nav_val = nav }); if (!result.contains(unit)) { - log.debug("type '{}': ref named %{}", .{ + log.debug("type '{f}': ref named %{}", .{ Type.fromInterned(ty).containerTypeName(ip).fmt(ip), @intFromEnum(inst_info.inst), }); @@ -4174,7 +4085,7 @@ fn resolveReferencesInner(zcu: *Zcu) !std.AutoHashMapUnmanaged(AnalUnit, ?Resolv if (decl.linkage == .@"export") { const unit: AnalUnit = .wrap(.{ .nav_val = nav }); if (!result.contains(unit)) { - log.debug("type '{}': ref named %{}", .{ + log.debug("type '{f}': ref named %{}", .{ Type.fromInterned(ty).containerTypeName(ip).fmt(ip), @intFromEnum(inst_info.inst), }); @@ -4199,7 +4110,7 @@ fn resolveReferencesInner(zcu: *Zcu) !std.AutoHashMapUnmanaged(AnalUnit, ?Resolv try unit_queue.put(gpa, other, kv.value); // same reference location } - log.debug("handle unit '{}'", .{zcu.fmtAnalUnit(unit)}); + log.debug("handle unit '{f}'", .{zcu.fmtAnalUnit(unit)}); if (zcu.reference_table.get(unit)) |first_ref_idx| { assert(first_ref_idx != std.math.maxInt(u32)); @@ -4207,7 +4118,7 @@ fn resolveReferencesInner(zcu: *Zcu) !std.AutoHashMapUnmanaged(AnalUnit, ?Resolv while (ref_idx != std.math.maxInt(u32)) { const ref = zcu.all_references.items[ref_idx]; if (!result.contains(ref.referenced)) { - log.debug("unit '{}': ref unit '{}'", .{ + log.debug("unit '{f}': ref unit '{f}'", .{ zcu.fmtAnalUnit(unit), zcu.fmtAnalUnit(ref.referenced), }); @@ -4226,7 +4137,7 @@ fn resolveReferencesInner(zcu: *Zcu) !std.AutoHashMapUnmanaged(AnalUnit, ?Resolv while (ref_idx != std.math.maxInt(u32)) { const ref = zcu.all_type_references.items[ref_idx]; if (!checked_types.contains(ref.referenced)) { - log.debug("unit '{}': ref type '{}'", .{ + log.debug("unit '{f}': ref type '{f}'", .{ zcu.fmtAnalUnit(unit), Type.fromInterned(ref.referenced).containerTypeName(ip).fmt(ip), }); @@ -4307,15 +4218,19 @@ pub fn navFileScope(zcu: *Zcu, nav: InternPool.Nav.Index) *File { return zcu.fileByIndex(zcu.navFileScopeIndex(nav)); } -pub fn fmtAnalUnit(zcu: *Zcu, unit: AnalUnit) std.fmt.Formatter(formatAnalUnit) { +pub fn fmtAnalUnit(zcu: *Zcu, unit: AnalUnit) std.fmt.Formatter(FormatAnalUnit, formatAnalUnit) { return .{ .data = .{ .unit = unit, .zcu = zcu } }; } -pub fn fmtDependee(zcu: *Zcu, d: InternPool.Dependee) std.fmt.Formatter(formatDependee) { +pub fn fmtDependee(zcu: *Zcu, d: InternPool.Dependee) std.fmt.Formatter(FormatDependee, formatDependee) { return .{ .data = .{ .dependee = d, .zcu = zcu } }; } -fn formatAnalUnit(data: struct { unit: AnalUnit, zcu: *Zcu }, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { - _ = .{ fmt, options }; +const FormatAnalUnit = struct { + unit: AnalUnit, + zcu: *Zcu, +}; + +fn formatAnalUnit(data: FormatAnalUnit, writer: *std.io.Writer) std.io.Writer.Error!void { const zcu = data.zcu; const ip = &zcu.intern_pool; switch (data.unit.unwrap()) { @@ -4323,23 +4238,25 @@ fn formatAnalUnit(data: struct { unit: AnalUnit, zcu: *Zcu }, comptime fmt: []co const cu = ip.getComptimeUnit(cu_id); if (cu.zir_index.resolveFull(ip)) |resolved| { const file_path = zcu.fileByIndex(resolved.file).path; - return writer.print("comptime(inst=('{}', %{}) [{}])", .{ file_path.fmt(zcu.comp), @intFromEnum(resolved.inst), @intFromEnum(cu_id) }); + return writer.print("comptime(inst=('{f}', %{}) [{}])", .{ file_path.fmt(zcu.comp), @intFromEnum(resolved.inst), @intFromEnum(cu_id) }); } else { return writer.print("comptime(inst= [{}])", .{@intFromEnum(cu_id)}); } }, - .nav_val => |nav| return writer.print("nav_val('{}' [{}])", .{ ip.getNav(nav).fqn.fmt(ip), @intFromEnum(nav) }), - .nav_ty => |nav| return writer.print("nav_ty('{}' [{}])", .{ ip.getNav(nav).fqn.fmt(ip), @intFromEnum(nav) }), - .type => |ty| return writer.print("ty('{}' [{}])", .{ Type.fromInterned(ty).containerTypeName(ip).fmt(ip), @intFromEnum(ty) }), + .nav_val => |nav| return writer.print("nav_val('{f}' [{}])", .{ ip.getNav(nav).fqn.fmt(ip), @intFromEnum(nav) }), + .nav_ty => |nav| return writer.print("nav_ty('{f}' [{}])", .{ ip.getNav(nav).fqn.fmt(ip), @intFromEnum(nav) }), + .type => |ty| return writer.print("ty('{f}' [{}])", .{ Type.fromInterned(ty).containerTypeName(ip).fmt(ip), @intFromEnum(ty) }), .func => |func| { const nav = zcu.funcInfo(func).owner_nav; - return writer.print("func('{}' [{}])", .{ ip.getNav(nav).fqn.fmt(ip), @intFromEnum(func) }); + return writer.print("func('{f}' [{}])", .{ ip.getNav(nav).fqn.fmt(ip), @intFromEnum(func) }); }, .memoized_state => return writer.writeAll("memoized_state"), } } -fn formatDependee(data: struct { dependee: InternPool.Dependee, zcu: *Zcu }, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { - _ = .{ fmt, options }; + +const FormatDependee = struct { dependee: InternPool.Dependee, zcu: *Zcu }; + +fn formatDependee(data: FormatDependee, writer: *std.io.Writer) std.io.Writer.Error!void { const zcu = data.zcu; const ip = &zcu.intern_pool; switch (data.dependee) { @@ -4348,42 +4265,42 @@ fn formatDependee(data: struct { dependee: InternPool.Dependee, zcu: *Zcu }, com return writer.writeAll("inst()"); }; const file_path = zcu.fileByIndex(info.file).path; - return writer.print("inst('{}', %{d})", .{ file_path.fmt(zcu.comp), @intFromEnum(info.inst) }); + return writer.print("inst('{f}', %{d})", .{ file_path.fmt(zcu.comp), @intFromEnum(info.inst) }); }, .nav_val => |nav| { const fqn = ip.getNav(nav).fqn; - return writer.print("nav_val('{}')", .{fqn.fmt(ip)}); + return writer.print("nav_val('{f}')", .{fqn.fmt(ip)}); }, .nav_ty => |nav| { const fqn = ip.getNav(nav).fqn; - return writer.print("nav_ty('{}')", .{fqn.fmt(ip)}); + return writer.print("nav_ty('{f}')", .{fqn.fmt(ip)}); }, .interned => |ip_index| switch (ip.indexToKey(ip_index)) { - .struct_type, .union_type, .enum_type => return writer.print("type('{}')", .{Type.fromInterned(ip_index).containerTypeName(ip).fmt(ip)}), - .func => |f| return writer.print("ies('{}')", .{ip.getNav(f.owner_nav).fqn.fmt(ip)}), + .struct_type, .union_type, .enum_type => return writer.print("type('{f}')", .{Type.fromInterned(ip_index).containerTypeName(ip).fmt(ip)}), + .func => |f| return writer.print("ies('{f}')", .{ip.getNav(f.owner_nav).fqn.fmt(ip)}), else => unreachable, }, .zon_file => |file| { const file_path = zcu.fileByIndex(file).path; - return writer.print("zon_file('{}')", .{file_path.fmt(zcu.comp)}); + return writer.print("zon_file('{f}')", .{file_path.fmt(zcu.comp)}); }, .embed_file => |ef_idx| { const ef = ef_idx.get(zcu); - return writer.print("embed_file('{}')", .{ef.path.fmt(zcu.comp)}); + return writer.print("embed_file('{f}')", .{ef.path.fmt(zcu.comp)}); }, .namespace => |ti| { const info = ti.resolveFull(ip) orelse { return writer.writeAll("namespace()"); }; const file_path = zcu.fileByIndex(info.file).path; - return writer.print("namespace('{}', %{d})", .{ file_path.fmt(zcu.comp), @intFromEnum(info.inst) }); + return writer.print("namespace('{f}', %{d})", .{ file_path.fmt(zcu.comp), @intFromEnum(info.inst) }); }, .namespace_name => |k| { const info = k.namespace.resolveFull(ip) orelse { - return writer.print("namespace(, '{}')", .{k.name.fmt(ip)}); + return writer.print("namespace(, '{f}')", .{k.name.fmt(ip)}); }; const file_path = zcu.fileByIndex(info.file).path; - return writer.print("namespace('{}', %{d}, '{}')", .{ file_path.fmt(zcu.comp), @intFromEnum(info.inst), k.name.fmt(ip) }); + return writer.print("namespace('{f}', %{d}, '{f}')", .{ file_path.fmt(zcu.comp), @intFromEnum(info.inst), k.name.fmt(ip) }); }, .memoized_state => return writer.writeAll("memoized_state"), } diff --git a/src/Zcu/PerThread.zig b/src/Zcu/PerThread.zig index 699358b135..d4a3d1598f 100644 --- a/src/Zcu/PerThread.zig +++ b/src/Zcu/PerThread.zig @@ -53,7 +53,7 @@ fn deinitFile(pt: Zcu.PerThread, file_index: Zcu.File.Index) void { const zcu = pt.zcu; const gpa = zcu.gpa; const file = zcu.fileByIndex(file_index); - log.debug("deinit File {}", .{file.path.fmt(zcu.comp)}); + log.debug("deinit File {f}", .{file.path.fmt(zcu.comp)}); file.path.deinit(gpa); file.unload(gpa); if (file.prev_zir) |prev_zir| { @@ -117,7 +117,7 @@ pub fn updateFile( var lock: std.fs.File.Lock = switch (file.status) { .never_loaded, .retryable_failure => lock: { // First, load the cached ZIR code, if any. - log.debug("AstGen checking cache: {} (local={}, digest={s})", .{ + log.debug("AstGen checking cache: {f} (local={}, digest={s})", .{ file.path.fmt(comp), want_local_cache, &hex_digest, }); @@ -130,11 +130,11 @@ pub fn updateFile( stat.inode == file.stat.inode; if (unchanged_metadata) { - log.debug("unmodified metadata of file: {}", .{file.path.fmt(comp)}); + log.debug("unmodified metadata of file: {f}", .{file.path.fmt(comp)}); return; } - log.debug("metadata changed: {}", .{file.path.fmt(comp)}); + log.debug("metadata changed: {f}", .{file.path.fmt(comp)}); break :lock .exclusive; }, @@ -190,7 +190,7 @@ pub fn updateFile( // failure was a race, or ENOENT, indicating deletion of the // directory of our open handle. if (builtin.os.tag != .macos) { - std.process.fatal("cache directory '{}' unexpectedly removed during compiler execution", .{ + std.process.fatal("cache directory '{f}' unexpectedly removed during compiler execution", .{ cache_directory, }); } @@ -202,7 +202,7 @@ pub fn updateFile( }) catch |excl_err| switch (excl_err) { error.PathAlreadyExists => continue, error.FileNotFound => { - std.process.fatal("cache directory '{}' unexpectedly removed during compiler execution", .{ + std.process.fatal("cache directory '{f}' unexpectedly removed during compiler execution", .{ cache_directory, }); }, @@ -221,12 +221,12 @@ pub fn updateFile( }; switch (result) { .success => { - log.debug("AstGen cached success: {}", .{file.path.fmt(comp)}); + log.debug("AstGen cached success: {f}", .{file.path.fmt(comp)}); break false; }, .invalid => {}, - .truncated => log.warn("unexpected EOF reading cached ZIR for {}", .{file.path.fmt(comp)}), - .stale => log.debug("AstGen cache stale: {}", .{file.path.fmt(comp)}), + .truncated => log.warn("unexpected EOF reading cached ZIR for {f}", .{file.path.fmt(comp)}), + .stale => log.debug("AstGen cache stale: {f}", .{file.path.fmt(comp)}), } // If we already have the exclusive lock then it is our job to update. @@ -249,11 +249,14 @@ pub fn updateFile( if (stat.size > std.math.maxInt(u32)) return error.FileTooBig; - const source = try gpa.allocSentinel(u8, @as(usize, @intCast(stat.size)), 0); + const source = try gpa.allocSentinel(u8, @intCast(stat.size), 0); defer if (file.source == null) gpa.free(source); - const amt = try source_file.readAll(source); - if (amt != stat.size) - return error.UnexpectedEndOfFile; + var source_fr = source_file.reader(&.{}); + source_fr.size = stat.size; + source_fr.interface.readSliceAll(source) catch |err| switch (err) { + error.ReadFailed => return source_fr.err.?, + error.EndOfStream => return error.UnexpectedEndOfFile, + }; file.source = source; @@ -265,7 +268,7 @@ pub fn updateFile( file.zir = try AstGen.generate(gpa, file.tree.?); Zcu.saveZirCache(gpa, cache_file, stat, file.zir.?) catch |err| switch (err) { error.OutOfMemory => |e| return e, - else => log.warn("unable to write cached ZIR code for {} to {}{s}: {s}", .{ + else => log.warn("unable to write cached ZIR code for {f} to {f}{s}: {s}", .{ file.path.fmt(comp), cache_directory, &hex_digest, @errorName(err), }), }; @@ -273,14 +276,14 @@ pub fn updateFile( .zon => { file.zoir = try ZonGen.generate(gpa, file.tree.?, .{}); Zcu.saveZoirCache(cache_file, stat, file.zoir.?) catch |err| { - log.warn("unable to write cached ZOIR code for {} to {}{s}: {s}", .{ + log.warn("unable to write cached ZOIR code for {f} to {f}{s}: {s}", .{ file.path.fmt(comp), cache_directory, &hex_digest, @errorName(err), }); }; }, } - log.debug("AstGen fresh success: {}", .{file.path.fmt(comp)}); + log.debug("AstGen fresh success: {f}", .{file.path.fmt(comp)}); } file.stat = .{ @@ -340,13 +343,19 @@ fn loadZirZoirCache( .zon => Zoir.Header, }; + var buffer: [2000]u8 = undefined; + var cache_fr = cache_file.reader(&buffer); + cache_fr.size = stat.size; + const cache_br = &cache_fr.interface; + // First we read the header to determine the lengths of arrays. - const header = cache_file.reader().readStruct(Header) catch |err| switch (err) { + const header = (cache_br.takeStruct(Header) catch |err| switch (err) { + error.ReadFailed => return cache_fr.err.?, // This can happen if Zig bails out of this function between creating // the cached file and writing it. error.EndOfStream => return .invalid, else => |e| return e, - }; + }).*; const unchanged_metadata = stat.size == header.stat_size and @@ -358,17 +367,15 @@ fn loadZirZoirCache( } switch (mode) { - .zig => { - file.zir = Zcu.loadZirCacheBody(gpa, header, cache_file) catch |err| switch (err) { - error.UnexpectedFileSize => return .truncated, - else => |e| return e, - }; + .zig => file.zir = Zcu.loadZirCacheBody(gpa, header, cache_br) catch |err| switch (err) { + error.ReadFailed => return cache_fr.err.?, + error.EndOfStream => return .truncated, + else => |e| return e, }, - .zon => { - file.zoir = Zcu.loadZoirCacheBody(gpa, header, cache_file) catch |err| switch (err) { - error.UnexpectedFileSize => return .truncated, - else => |e| return e, - }; + .zon => file.zoir = Zcu.loadZoirCacheBody(gpa, header, cache_br) catch |err| switch (err) { + error.ReadFailed => return cache_fr.err.?, + error.EndOfStream => return .truncated, + else => |e| return e, }, } @@ -477,11 +484,8 @@ pub fn updateZirRefs(pt: Zcu.PerThread) Allocator.Error!void { if (std.zig.srcHashEql(old_hash, new_hash)) { break :hash_changed; } - log.debug("hash for (%{d} -> %{d}) changed: {} -> {}", .{ - old_inst, - new_inst, - std.fmt.fmtSliceHexLower(&old_hash), - std.fmt.fmtSliceHexLower(&new_hash), + log.debug("hash for (%{d} -> %{d}) changed: {x} -> {x}", .{ + old_inst, new_inst, &old_hash, &new_hash, }); } // The source hash associated with this instruction changed - invalidate relevant dependencies. @@ -649,7 +653,7 @@ pub fn ensureMemoizedStateUpToDate(pt: Zcu.PerThread, stage: InternPool.Memoized // If this unit caused the error, it would have an entry in `failed_analysis`. // Since it does not, this must be a transitive failure. try zcu.transitive_failed_analysis.put(gpa, unit, {}); - log.debug("mark transitive analysis failure for {}", .{zcu.fmtAnalUnit(unit)}); + log.debug("mark transitive analysis failure for {f}", .{zcu.fmtAnalUnit(unit)}); } break :res .{ !prev_failed, true }; }, @@ -754,7 +758,7 @@ pub fn ensureComptimeUnitUpToDate(pt: Zcu.PerThread, cu_id: InternPool.ComptimeU const anal_unit: AnalUnit = .wrap(.{ .@"comptime" = cu_id }); - log.debug("ensureComptimeUnitUpToDate {}", .{zcu.fmtAnalUnit(anal_unit)}); + log.debug("ensureComptimeUnitUpToDate {f}", .{zcu.fmtAnalUnit(anal_unit)}); assert(!zcu.analysis_in_progress.contains(anal_unit)); @@ -805,7 +809,7 @@ pub fn ensureComptimeUnitUpToDate(pt: Zcu.PerThread, cu_id: InternPool.ComptimeU // If this unit caused the error, it would have an entry in `failed_analysis`. // Since it does not, this must be a transitive failure. try zcu.transitive_failed_analysis.put(gpa, anal_unit, {}); - log.debug("mark transitive analysis failure for {}", .{zcu.fmtAnalUnit(anal_unit)}); + log.debug("mark transitive analysis failure for {f}", .{zcu.fmtAnalUnit(anal_unit)}); } return error.AnalysisFail; }, @@ -835,7 +839,7 @@ fn analyzeComptimeUnit(pt: Zcu.PerThread, cu_id: InternPool.ComptimeUnit.Id) Zcu const anal_unit: AnalUnit = .wrap(.{ .@"comptime" = cu_id }); const comptime_unit = ip.getComptimeUnit(cu_id); - log.debug("analyzeComptimeUnit {}", .{zcu.fmtAnalUnit(anal_unit)}); + log.debug("analyzeComptimeUnit {f}", .{zcu.fmtAnalUnit(anal_unit)}); const inst_resolved = comptime_unit.zir_index.resolveFull(ip) orelse return error.AnalysisFail; const file = zcu.fileByIndex(inst_resolved.file); @@ -881,7 +885,7 @@ fn analyzeComptimeUnit(pt: Zcu.PerThread, cu_id: InternPool.ComptimeUnit.Id) Zcu .r = .{ .simple = .comptime_keyword }, } }, .src_base_inst = comptime_unit.zir_index, - .type_name_ctx = try ip.getOrPutStringFmt(gpa, pt.tid, "{}.comptime", .{ + .type_name_ctx = try ip.getOrPutStringFmt(gpa, pt.tid, "{f}.comptime", .{ Type.fromInterned(zcu.namespacePtr(comptime_unit.namespace).owner_type).containerTypeName(ip).fmt(ip), }, .no_embedded_nulls), }; @@ -933,7 +937,7 @@ pub fn ensureNavValUpToDate(pt: Zcu.PerThread, nav_id: InternPool.Nav.Index) Zcu const anal_unit: AnalUnit = .wrap(.{ .nav_val = nav_id }); const nav = ip.getNav(nav_id); - log.debug("ensureNavValUpToDate {}", .{zcu.fmtAnalUnit(anal_unit)}); + log.debug("ensureNavValUpToDate {f}", .{zcu.fmtAnalUnit(anal_unit)}); // Determine whether or not this `Nav`'s value is outdated. This also includes checking if the // status is `.unresolved`, which indicates that the value is outdated because it has *never* @@ -991,7 +995,7 @@ pub fn ensureNavValUpToDate(pt: Zcu.PerThread, nav_id: InternPool.Nav.Index) Zcu // If this unit caused the error, it would have an entry in `failed_analysis`. // Since it does not, this must be a transitive failure. try zcu.transitive_failed_analysis.put(gpa, anal_unit, {}); - log.debug("mark transitive analysis failure for {}", .{zcu.fmtAnalUnit(anal_unit)}); + log.debug("mark transitive analysis failure for {f}", .{zcu.fmtAnalUnit(anal_unit)}); } break :res .{ !prev_failed, true }; }, @@ -1062,7 +1066,7 @@ fn analyzeNavVal(pt: Zcu.PerThread, nav_id: InternPool.Nav.Index) Zcu.CompileErr const anal_unit: AnalUnit = .wrap(.{ .nav_val = nav_id }); const old_nav = ip.getNav(nav_id); - log.debug("analyzeNavVal {}", .{zcu.fmtAnalUnit(anal_unit)}); + log.debug("analyzeNavVal {f}", .{zcu.fmtAnalUnit(anal_unit)}); const inst_resolved = old_nav.analysis.?.zir_index.resolveFull(ip) orelse return error.AnalysisFail; const file = zcu.fileByIndex(inst_resolved.file); @@ -1321,7 +1325,7 @@ pub fn ensureNavTypeUpToDate(pt: Zcu.PerThread, nav_id: InternPool.Nav.Index) Zc const anal_unit: AnalUnit = .wrap(.{ .nav_ty = nav_id }); const nav = ip.getNav(nav_id); - log.debug("ensureNavTypeUpToDate {}", .{zcu.fmtAnalUnit(anal_unit)}); + log.debug("ensureNavTypeUpToDate {f}", .{zcu.fmtAnalUnit(anal_unit)}); const type_resolved_by_value: bool = from_val: { const analysis = nav.analysis orelse break :from_val false; @@ -1391,7 +1395,7 @@ pub fn ensureNavTypeUpToDate(pt: Zcu.PerThread, nav_id: InternPool.Nav.Index) Zc // If this unit caused the error, it would have an entry in `failed_analysis`. // Since it does not, this must be a transitive failure. try zcu.transitive_failed_analysis.put(gpa, anal_unit, {}); - log.debug("mark transitive analysis failure for {}", .{zcu.fmtAnalUnit(anal_unit)}); + log.debug("mark transitive analysis failure for {f}", .{zcu.fmtAnalUnit(anal_unit)}); } break :res .{ !prev_failed, true }; }, @@ -1433,7 +1437,7 @@ fn analyzeNavType(pt: Zcu.PerThread, nav_id: InternPool.Nav.Index) Zcu.CompileEr const anal_unit: AnalUnit = .wrap(.{ .nav_ty = nav_id }); const old_nav = ip.getNav(nav_id); - log.debug("analyzeNavType {}", .{zcu.fmtAnalUnit(anal_unit)}); + log.debug("analyzeNavType {f}", .{zcu.fmtAnalUnit(anal_unit)}); const inst_resolved = old_nav.analysis.?.zir_index.resolveFull(ip) orelse return error.AnalysisFail; const file = zcu.fileByIndex(inst_resolved.file); @@ -1563,7 +1567,7 @@ pub fn ensureFuncBodyUpToDate(pt: Zcu.PerThread, maybe_coerced_func_index: Inter const func_index = ip.unwrapCoercedFunc(maybe_coerced_func_index); const anal_unit: AnalUnit = .wrap(.{ .func = func_index }); - log.debug("ensureFuncBodyUpToDate {}", .{zcu.fmtAnalUnit(anal_unit)}); + log.debug("ensureFuncBodyUpToDate {f}", .{zcu.fmtAnalUnit(anal_unit)}); const func = zcu.funcInfo(maybe_coerced_func_index); @@ -1607,7 +1611,7 @@ pub fn ensureFuncBodyUpToDate(pt: Zcu.PerThread, maybe_coerced_func_index: Inter // If this function caused the error, it would have an entry in `failed_analysis`. // Since it does not, this must be a transitive failure. try zcu.transitive_failed_analysis.put(gpa, anal_unit, {}); - log.debug("mark transitive analysis failure for {}", .{zcu.fmtAnalUnit(anal_unit)}); + log.debug("mark transitive analysis failure for {f}", .{zcu.fmtAnalUnit(anal_unit)}); } // We consider the IES to be outdated if the function previously succeeded analysis; in this case, // we need to re-analyze dependants to ensure they hit a transitive error here, rather than reporting @@ -1677,7 +1681,7 @@ fn analyzeFuncBody( else .none; - log.debug("analyze and generate fn body {}", .{zcu.fmtAnalUnit(anal_unit)}); + log.debug("analyze and generate fn body {f}", .{zcu.fmtAnalUnit(anal_unit)}); var air = try pt.analyzeFnBodyInner(func_index); errdefer air.deinit(gpa); @@ -2299,7 +2303,7 @@ pub fn updateBuiltinModule(pt: Zcu.PerThread, opts: Builtin) Allocator.Error!voi Builtin.updateFileOnDisk(file, comp) catch |err| comp.setMiscFailure( .write_builtin_zig, - "unable to write '{}': {s}", + "unable to write '{f}': {s}", .{ file.path.fmt(comp), @errorName(err) }, ); } @@ -2414,8 +2418,12 @@ fn updateEmbedFileInner( const old_len = strings.mutate.len; errdefer strings.shrinkRetainingCapacity(old_len); const bytes = (try strings.addManyAsSlice(size_plus_one))[0]; - const actual_read = try file.readAll(bytes[0..size]); - if (actual_read != size) return error.UnexpectedEof; + var fr = file.reader(&.{}); + fr.size = stat.size; + fr.interface.readSliceAll(bytes[0..size]) catch |err| switch (err) { + error.ReadFailed => return fr.err.?, + error.EndOfStream => return error.UnexpectedEof, + }; bytes[size] = 0; break :str try ip.getOrPutTrailingString(gpa, tid, @intCast(bytes.len), .maybe_embedded_nulls); }; @@ -2584,7 +2592,7 @@ const ScanDeclIter = struct { var gop = try iter.seen_decls.getOrPut(gpa, name); var next_suffix: u32 = 0; while (gop.found_existing) { - name = try ip.getOrPutStringFmt(gpa, pt.tid, "{}_{d}", .{ name.fmt(ip), next_suffix }, .no_embedded_nulls); + name = try ip.getOrPutStringFmt(gpa, pt.tid, "{f}_{d}", .{ name.fmt(ip), next_suffix }, .no_embedded_nulls); gop = try iter.seen_decls.getOrPut(gpa, name); next_suffix += 1; } @@ -2716,7 +2724,7 @@ const ScanDeclIter = struct { if (existing_unit == null and (want_analysis or decl.linkage == .@"export")) { log.debug( - "scanDecl queue analyze_comptime_unit file='{s}' unit={}", + "scanDecl queue analyze_comptime_unit file='{s}' unit={f}", .{ namespace.fileScope(zcu).sub_file_path, zcu.fmtAnalUnit(unit) }, ); try comp.queueJob(.{ .analyze_comptime_unit = unit }); @@ -3134,7 +3142,7 @@ fn processExportsInner( if (gop.found_existing) { new_export.status = .failed_retryable; try zcu.failed_exports.ensureUnusedCapacity(gpa, 1); - const msg = try Zcu.ErrorMsg.create(gpa, new_export.src, "exported symbol collision: {}", .{ + const msg = try Zcu.ErrorMsg.create(gpa, new_export.src, "exported symbol collision: {f}", .{ new_export.opts.name.fmt(ip), }); errdefer msg.destroy(gpa); @@ -4376,12 +4384,11 @@ fn runCodegenInner(pt: Zcu.PerThread, func_index: InternPool.Index, air: *Air) e defer liveness.deinit(gpa); if (build_options.enable_debug_extensions and comp.verbose_air) { - std.debug.lockStdErr(); - defer std.debug.unlockStdErr(); - const stderr = std.io.getStdErr().writer(); - stderr.print("# Begin Function AIR: {}:\n", .{fqn.fmt(ip)}) catch {}; + const stderr = std.debug.lockStderrWriter(&.{}); + defer std.debug.unlockStderrWriter(); + stderr.print("# Begin Function AIR: {f}:\n", .{fqn.fmt(ip)}) catch {}; air.write(stderr, pt, liveness); - stderr.print("# End Function AIR: {}\n\n", .{fqn.fmt(ip)}) catch {}; + stderr.print("# End Function AIR: {f}\n\n", .{fqn.fmt(ip)}) catch {}; } if (std.debug.runtime_safety) { diff --git a/src/arch/riscv64/CodeGen.zig b/src/arch/riscv64/CodeGen.zig index b9a16d5a75..49baf7acad 100644 --- a/src/arch/riscv64/CodeGen.zig +++ b/src/arch/riscv64/CodeGen.zig @@ -435,7 +435,7 @@ const InstTracking = struct { fn trackSpill(inst_tracking: *InstTracking, function: *Func, inst: Air.Inst.Index) !void { try function.freeValue(inst_tracking.short); inst_tracking.reuseFrame(); - tracking_log.debug("%{d} => {} (spilled)", .{ inst, inst_tracking.* }); + tracking_log.debug("%{d} => {f} (spilled)", .{ inst, inst_tracking.* }); } fn verifyMaterialize(inst_tracking: InstTracking, target: InstTracking) void { @@ -499,14 +499,14 @@ const InstTracking = struct { else => target.long, } else target.long; inst_tracking.short = target.short; - tracking_log.debug("%{d} => {} (materialize)", .{ inst, inst_tracking.* }); + tracking_log.debug("%{d} => {f} (materialize)", .{ inst, inst_tracking.* }); } fn resurrect(inst_tracking: *InstTracking, inst: Air.Inst.Index, scope_generation: u32) void { switch (inst_tracking.short) { .dead => |die_generation| if (die_generation >= scope_generation) { inst_tracking.reuseFrame(); - tracking_log.debug("%{d} => {} (resurrect)", .{ inst, inst_tracking.* }); + tracking_log.debug("%{d} => {f} (resurrect)", .{ inst, inst_tracking.* }); }, else => {}, } @@ -516,7 +516,7 @@ const InstTracking = struct { if (inst_tracking.short == .dead) return; try function.freeValue(inst_tracking.short); inst_tracking.short = .{ .dead = function.scope_generation }; - tracking_log.debug("%{d} => {} (death)", .{ inst, inst_tracking.* }); + tracking_log.debug("%{d} => {f} (death)", .{ inst, inst_tracking.* }); } fn reuse( @@ -527,15 +527,15 @@ const InstTracking = struct { ) void { inst_tracking.short = .{ .dead = function.scope_generation }; if (new_inst) |inst| - tracking_log.debug("%{d} => {} (reuse %{d})", .{ inst, inst_tracking.*, old_inst }) + tracking_log.debug("%{d} => {f} (reuse %{d})", .{ inst, inst_tracking.*, old_inst }) else - tracking_log.debug("tmp => {} (reuse %{d})", .{ inst_tracking.*, old_inst }); + tracking_log.debug("tmp => {f} (reuse %{d})", .{ inst_tracking.*, old_inst }); } fn liveOut(inst_tracking: *InstTracking, function: *Func, inst: Air.Inst.Index) void { for (inst_tracking.getRegs()) |reg| { if (function.register_manager.isRegFree(reg)) { - tracking_log.debug("%{d} => {} (live-out)", .{ inst, inst_tracking.* }); + tracking_log.debug("%{d} => {f} (live-out)", .{ inst, inst_tracking.* }); continue; } @@ -562,16 +562,11 @@ const InstTracking = struct { // Perform side-effects of freeValue manually. function.register_manager.freeReg(reg); - tracking_log.debug("%{d} => {} (live-out %{d})", .{ inst, inst_tracking.*, tracked_inst }); + tracking_log.debug("%{d} => {f} (live-out %{d})", .{ inst, inst_tracking.*, tracked_inst }); } } - pub fn format( - inst_tracking: InstTracking, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { + pub fn format(inst_tracking: InstTracking, writer: *std.io.Writer) std.io.Writer.Error!void { if (!std.meta.eql(inst_tracking.long, inst_tracking.short)) try writer.print("|{}| ", .{inst_tracking.long}); try writer.print("{}", .{inst_tracking.short}); } @@ -802,7 +797,7 @@ pub fn generate( function.mir_instructions.deinit(gpa); } - wip_mir_log.debug("{}:", .{fmtNav(func.owner_nav, ip)}); + wip_mir_log.debug("{f}:", .{fmtNav(func.owner_nav, ip)}); try function.frame_allocs.resize(gpa, FrameIndex.named_count); function.frame_allocs.set( @@ -937,12 +932,7 @@ const FormatWipMirData = struct { func: *Func, inst: Mir.Inst.Index, }; -fn formatWipMir( - data: FormatWipMirData, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, -) @TypeOf(writer).Error!void { +fn formatWipMir(data: FormatWipMirData, writer: *std.io.Writer) std.io.Writer.Error!void { const pt = data.func.pt; const comp = pt.zcu.comp; var lower: Lower = .{ @@ -982,7 +972,7 @@ fn formatWipMir( first = false; } } -fn fmtWipMir(func: *Func, inst: Mir.Inst.Index) std.fmt.Formatter(formatWipMir) { +fn fmtWipMir(func: *Func, inst: Mir.Inst.Index) std.fmt.Formatter(FormatWipMirData, formatWipMir) { return .{ .data = .{ .func = func, .inst = inst } }; } @@ -990,15 +980,10 @@ const FormatNavData = struct { ip: *const InternPool, nav_index: InternPool.Nav.Index, }; -fn formatNav( - data: FormatNavData, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, -) @TypeOf(writer).Error!void { - try writer.print("{}", .{data.ip.getNav(data.nav_index).fqn.fmt(data.ip)}); +fn formatNav(data: FormatNavData, writer: *std.io.Writer) std.io.Writer.Error!void { + try writer.print("{f}", .{data.ip.getNav(data.nav_index).fqn.fmt(data.ip)}); } -fn fmtNav(nav_index: InternPool.Nav.Index, ip: *const InternPool) std.fmt.Formatter(formatNav) { +fn fmtNav(nav_index: InternPool.Nav.Index, ip: *const InternPool) std.fmt.Formatter(FormatNavData, formatNav) { return .{ .data = .{ .ip = ip, .nav_index = nav_index, @@ -1009,31 +994,25 @@ const FormatAirData = struct { func: *Func, inst: Air.Inst.Index, }; -fn formatAir( - data: FormatAirData, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, -) @TypeOf(writer).Error!void { - data.func.air.dumpInst(data.inst, data.func.pt, data.func.liveness); +fn formatAir(data: FormatAirData, writer: *std.io.Writer) std.io.Writer.Error!void { + // Not acceptable implementation because it ignores `writer`: + //data.func.air.dumpInst(data.inst, data.func.pt, data.func.liveness); + _ = data; + _ = writer; + @panic("unimplemented"); } -fn fmtAir(func: *Func, inst: Air.Inst.Index) std.fmt.Formatter(formatAir) { +fn fmtAir(func: *Func, inst: Air.Inst.Index) std.fmt.Formatter(FormatAirData, formatAir) { return .{ .data = .{ .func = func, .inst = inst } }; } const FormatTrackingData = struct { func: *Func, }; -fn formatTracking( - data: FormatTrackingData, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, -) @TypeOf(writer).Error!void { +fn formatTracking(data: FormatTrackingData, writer: *std.io.Writer) std.io.Writer.Error!void { var it = data.func.inst_tracking.iterator(); - while (it.next()) |entry| try writer.print("\n%{d} = {}", .{ entry.key_ptr.*, entry.value_ptr.* }); + while (it.next()) |entry| try writer.print("\n%{d} = {f}", .{ entry.key_ptr.*, entry.value_ptr.* }); } -fn fmtTracking(func: *Func) std.fmt.Formatter(formatTracking) { +fn fmtTracking(func: *Func) std.fmt.Formatter(FormatTrackingData, formatTracking) { return .{ .data = .{ .func = func } }; } @@ -1049,7 +1028,7 @@ fn addInst(func: *Func, inst: Mir.Inst) error{OutOfMemory}!Mir.Inst.Index { .pseudo_dbg_epilogue_begin, .pseudo_dead, => false, - }) wip_mir_log.debug("{}", .{func.fmtWipMir(result_index)}); + }) wip_mir_log.debug("{f}", .{func.fmtWipMir(result_index)}); return result_index; } @@ -1303,7 +1282,7 @@ fn genLazy(func: *Func, lazy_sym: link.File.LazySymbol) InnerError!void { switch (Type.fromInterned(lazy_sym.ty).zigTypeTag(zcu)) { .@"enum" => { const enum_ty = Type.fromInterned(lazy_sym.ty); - wip_mir_log.debug("{}.@tagName:", .{enum_ty.fmt(pt)}); + wip_mir_log.debug("{f}.@tagName:", .{enum_ty.fmt(pt)}); const param_regs = abi.Registers.Integer.function_arg_regs; const ret_reg = param_regs[0]; @@ -1385,7 +1364,7 @@ fn genLazy(func: *Func, lazy_sym: link.File.LazySymbol) InnerError!void { }); }, else => return func.fail( - "TODO implement {s} for {}", + "TODO implement {s} for {f}", .{ @tagName(lazy_sym.kind), Type.fromInterned(lazy_sym.ty).fmt(pt) }, ), } @@ -1399,8 +1378,8 @@ fn genBody(func: *Func, body: []const Air.Inst.Index) InnerError!void { for (body) |inst| { if (func.liveness.isUnused(inst) and !func.air.mustLower(inst, ip)) continue; - wip_mir_log.debug("{}", .{func.fmtAir(inst)}); - verbose_tracking_log.debug("{}", .{func.fmtTracking()}); + wip_mir_log.debug("{f}", .{func.fmtAir(inst)}); + verbose_tracking_log.debug("{f}", .{func.fmtTracking()}); const old_air_bookkeeping = func.air_bookkeeping; try func.ensureProcessDeathCapacity(Air.Liveness.bpi); @@ -1679,18 +1658,18 @@ fn genBody(func: *Func, body: []const Air.Inst.Index) InnerError!void { var it = func.register_manager.free_registers.iterator(.{ .kind = .unset }); while (it.next()) |index| { const tracked_inst = func.register_manager.registers[index]; - tracking_log.debug("tracked inst: {}", .{tracked_inst}); + tracking_log.debug("tracked inst: {f}", .{tracked_inst}); const tracking = func.getResolvedInstValue(tracked_inst); for (tracking.getRegs()) |reg| { if (RegisterManager.indexOfRegIntoTracked(reg).? == index) break; } else return std.debug.panic( - \\%{} takes up these regs: {any}, however this regs {any}, don't use it + \\%{f} takes up these regs: {any}, however this regs {any}, don't use it , .{ tracked_inst, tracking.getRegs(), RegisterManager.regAtTrackedIndex(@intCast(index)) }); } } } } - verbose_tracking_log.debug("{}", .{func.fmtTracking()}); + verbose_tracking_log.debug("{f}", .{func.fmtTracking()}); } fn getValue(func: *Func, value: MCValue, inst: ?Air.Inst.Index) !void { @@ -1713,7 +1692,7 @@ fn freeValue(func: *Func, value: MCValue) !void { fn feed(func: *Func, bt: *Air.Liveness.BigTomb, operand: Air.Inst.Ref) !void { if (bt.feed()) if (operand.toIndex()) |inst| { - log.debug("feed inst: %{}", .{inst}); + log.debug("feed inst: %{f}", .{inst}); try func.processDeath(inst); }; } @@ -1843,7 +1822,7 @@ fn computeFrameLayout(func: *Func) !FrameLayout { total_alloc_size + 64 + args_frame_size + spill_frame_size + call_frame_size, @intCast(frame_align[@intFromEnum(FrameIndex.base_ptr)].toByteUnits().?), ); - log.debug("frame size: {}", .{acc_frame_size}); + log.debug("frame size: {d}", .{acc_frame_size}); // store the ra at total_size - 8, so it's the very first thing in the stack // relative to the fp @@ -1907,7 +1886,7 @@ fn splitType(func: *Func, ty: Type) ![2]Type { else => return func.fail("TODO: splitType class {}", .{class}), }; } else if (parts[0].abiSize(zcu) + parts[1].abiSize(zcu) == ty.abiSize(zcu)) return parts; - return func.fail("TODO implement splitType for {}", .{ty.fmt(func.pt)}); + return func.fail("TODO implement splitType for {f}", .{ty.fmt(func.pt)}); } /// Truncates the value in the register in place. @@ -2020,7 +1999,7 @@ fn allocMemPtr(func: *Func, inst: Air.Inst.Index) !FrameIndex { const val_ty = ptr_ty.childType(zcu); return func.allocFrameIndex(FrameAlloc.init(.{ .size = math.cast(u32, val_ty.abiSize(zcu)) orelse { - return func.fail("type '{}' too big to fit into stack frame", .{val_ty.fmt(pt)}); + return func.fail("type '{f}' too big to fit into stack frame", .{val_ty.fmt(pt)}); }, .alignment = ptr_ty.ptrAlignment(zcu).max(.@"1"), })); @@ -2160,7 +2139,7 @@ pub fn spillRegisters(func: *Func, comptime registers: []const Register) !void { /// allocated. A second call to `copyToTmpRegister` may return the same register. /// This can have a side effect of spilling instructions to the stack to free up a register. fn copyToTmpRegister(func: *Func, ty: Type, mcv: MCValue) !Register { - log.debug("copyToTmpRegister ty: {}", .{ty.fmt(func.pt)}); + log.debug("copyToTmpRegister ty: {f}", .{ty.fmt(func.pt)}); const reg = try func.register_manager.allocReg(null, func.regTempClassForType(ty)); try func.genSetReg(ty, reg, mcv); return reg; @@ -2245,7 +2224,7 @@ fn airIntCast(func: *Func, inst: Air.Inst.Index) !void { break :result null; // TODO break :result dst_mcv; - } orelse return func.fail("TODO: implement airIntCast from {} to {}", .{ + } orelse return func.fail("TODO: implement airIntCast from {f} to {f}", .{ src_ty.fmt(pt), dst_ty.fmt(pt), }); @@ -2633,7 +2612,7 @@ fn genBinOp( .add_sat, => { if (bit_size != 64 or !is_unsigned) - return func.fail("TODO: genBinOp ty: {}", .{lhs_ty.fmt(pt)}); + return func.fail("TODO: genBinOp ty: {f}", .{lhs_ty.fmt(pt)}); const tmp_reg = try func.copyToTmpRegister(rhs_ty, .{ .register = rhs_reg }); const tmp_lock = func.register_manager.lockRegAssumeUnused(tmp_reg); @@ -4065,7 +4044,7 @@ fn airGetUnionTag(func: *Func, inst: Air.Inst.Index) !void { ); } else { return func.fail( - "TODO implement get_union_tag for ABI larger than 8 bytes and operand {}, tag {}", + "TODO implement get_union_tag for ABI larger than 8 bytes and operand {}, tag {f}", .{ frame_mcv, tag_ty.fmt(pt) }, ); } @@ -4186,7 +4165,7 @@ fn airAbs(func: *Func, inst: Air.Inst.Index) !void { switch (scalar_ty.zigTypeTag(zcu)) { .int => if (ty.zigTypeTag(zcu) == .vector) { - return func.fail("TODO implement airAbs for {}", .{ty.fmt(pt)}); + return func.fail("TODO implement airAbs for {f}", .{ty.fmt(pt)}); } else { const int_info = scalar_ty.intInfo(zcu); const int_bits = int_info.bits; @@ -4267,7 +4246,7 @@ fn airAbs(func: *Func, inst: Air.Inst.Index) !void { break :result return_mcv; }, - else => return func.fail("TODO: implement airAbs {}", .{scalar_ty.fmt(pt)}), + else => return func.fail("TODO: implement airAbs {f}", .{scalar_ty.fmt(pt)}), } break :result .unreach; @@ -4331,7 +4310,7 @@ fn airByteSwap(func: *Func, inst: Air.Inst.Index) !void { break :result dest_mcv; }, - else => return func.fail("TODO: airByteSwap {}", .{ty.fmt(pt)}), + else => return func.fail("TODO: airByteSwap {f}", .{ty.fmt(pt)}), } }; return func.finishAir(inst, result, .{ ty_op.operand, .none, .none }); @@ -4397,7 +4376,7 @@ fn airUnaryMath(func: *Func, inst: Air.Inst.Index, tag: Air.Inst.Tag) !void { else => return func.fail("TODO: airUnaryMath Float {s}", .{@tagName(tag)}), } }, - else => return func.fail("TODO: airUnaryMath ty: {}", .{ty.fmt(pt)}), + else => return func.fail("TODO: airUnaryMath ty: {f}", .{ty.fmt(pt)}), } break :result MCValue{ .register = dst_reg }; @@ -4497,7 +4476,7 @@ fn load(func: *Func, dst_mcv: MCValue, ptr_mcv: MCValue, ptr_ty: Type) InnerErro const zcu = pt.zcu; const dst_ty = ptr_ty.childType(zcu); - log.debug("loading {}:{} into {}", .{ ptr_mcv, ptr_ty.fmt(pt), dst_mcv }); + log.debug("loading {}:{f} into {}", .{ ptr_mcv, ptr_ty.fmt(pt), dst_mcv }); switch (ptr_mcv) { .none, @@ -4550,7 +4529,7 @@ fn airStore(func: *Func, inst: Air.Inst.Index, safety: bool) !void { fn store(func: *Func, ptr_mcv: MCValue, src_mcv: MCValue, ptr_ty: Type) !void { const zcu = func.pt.zcu; const src_ty = ptr_ty.childType(zcu); - log.debug("storing {}:{} in {}:{}", .{ src_mcv, src_ty.fmt(func.pt), ptr_mcv, ptr_ty.fmt(func.pt) }); + log.debug("storing {}:{f} in {}:{f}", .{ src_mcv, src_ty.fmt(func.pt), ptr_mcv, ptr_ty.fmt(func.pt) }); switch (ptr_mcv) { .none => unreachable, @@ -7305,7 +7284,7 @@ fn airBitCast(func: *Func, inst: Air.Inst.Index) !void { const bit_size = dst_ty.bitSize(zcu); if (abi_size * 8 <= bit_size) break :result dst_mcv; - return func.fail("TODO: airBitCast {} to {}", .{ src_ty.fmt(pt), dst_ty.fmt(pt) }); + return func.fail("TODO: airBitCast {f} to {f}", .{ src_ty.fmt(pt), dst_ty.fmt(pt) }); }; return func.finishAir(inst, result, .{ ty_op.operand, .none, .none }); } @@ -8121,7 +8100,7 @@ fn airAggregateInit(func: *Func, inst: Air.Inst.Index) !void { ); break :result .{ .load_frame = .{ .index = frame_index } }; }, - else => return func.fail("TODO: airAggregate {}", .{result_ty.fmt(pt)}), + else => return func.fail("TODO: airAggregate {f}", .{result_ty.fmt(pt)}), } }; @@ -8322,7 +8301,7 @@ fn resolveCallingConventionValues( }; result.return_value = switch (ret_tracking_i) { - else => return func.fail("ty {} took {} tracking return indices", .{ ret_ty.fmt(pt), ret_tracking_i }), + else => return func.fail("ty {f} took {} tracking return indices", .{ ret_ty.fmt(pt), ret_tracking_i }), 1 => ret_tracking[0], 2 => InstTracking.init(.{ .register_pair = .{ ret_tracking[0].short.register, ret_tracking[1].short.register, @@ -8377,7 +8356,7 @@ fn resolveCallingConventionValues( else => return func.fail("TODO: C calling convention arg class {}", .{class}), } else { arg.* = switch (arg_mcv_i) { - else => return func.fail("ty {} took {} tracking arg indices", .{ ty.fmt(pt), arg_mcv_i }), + else => return func.fail("ty {f} took {} tracking arg indices", .{ ty.fmt(pt), arg_mcv_i }), 1 => arg_mcv[0], 2 => .{ .register_pair = .{ arg_mcv[0].register, arg_mcv[1].register } }, }; diff --git a/src/arch/riscv64/Emit.zig b/src/arch/riscv64/Emit.zig index 0561eb2019..41875ed789 100644 --- a/src/arch/riscv64/Emit.zig +++ b/src/arch/riscv64/Emit.zig @@ -172,7 +172,7 @@ const Reloc = struct { fn fixupRelocs(emit: *Emit) Error!void { for (emit.relocs.items) |reloc| { - log.debug("target inst: {}", .{emit.lower.mir.instructions.get(reloc.target)}); + log.debug("target inst: {f}", .{emit.lower.mir.instructions.get(reloc.target)}); const target = emit.code_offset_mapping.get(reloc.target) orelse return emit.fail("relocation target not found!", .{}); diff --git a/src/arch/riscv64/Lower.zig b/src/arch/riscv64/Lower.zig index ff3d79ba41..08bb3e2b93 100644 --- a/src/arch/riscv64/Lower.zig +++ b/src/arch/riscv64/Lower.zig @@ -61,7 +61,7 @@ pub fn lowerMir(lower: *Lower, index: Mir.Inst.Index, options: struct { defer lower.result_relocs_len = undefined; const inst = lower.mir.instructions.get(index); - log.debug("lowerMir {}", .{inst}); + log.debug("lowerMir {f}", .{inst}); switch (inst.tag) { else => try lower.generic(inst), .pseudo_dbg_line_column, diff --git a/src/arch/riscv64/Mir.zig b/src/arch/riscv64/Mir.zig index 2ad75e4677..43ccf00058 100644 --- a/src/arch/riscv64/Mir.zig +++ b/src/arch/riscv64/Mir.zig @@ -92,13 +92,7 @@ pub const Inst = struct { }, }; - pub fn format( - inst: Inst, - comptime fmt: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) !void { - assert(fmt.len == 0); + pub fn format(inst: Inst, writer: *std.io.Writer) std.io.Writer.Error!void { try writer.print("Tag: {s}, Data: {s}", .{ @tagName(inst.tag), @tagName(inst.data) }); } }; diff --git a/src/arch/riscv64/bits.zig b/src/arch/riscv64/bits.zig index 328ffa5e03..94c64dfc98 100644 --- a/src/arch/riscv64/bits.zig +++ b/src/arch/riscv64/bits.zig @@ -255,23 +255,6 @@ pub const FrameIndex = enum(u32) { pub fn isNamed(fi: FrameIndex) bool { return @intFromEnum(fi) < named_count; } - - pub fn format( - fi: FrameIndex, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - try writer.writeAll("FrameIndex"); - if (fi.isNamed()) { - try writer.writeByte('.'); - try writer.writeAll(@tagName(fi)); - } else { - try writer.writeByte('('); - try std.fmt.formatType(@intFromEnum(fi), fmt, options, writer, 0); - try writer.writeByte(')'); - } - } }; /// A linker symbol not yet allocated in VM. diff --git a/src/arch/sparc64/CodeGen.zig b/src/arch/sparc64/CodeGen.zig index 5115c432a7..9174a5850e 100644 --- a/src/arch/sparc64/CodeGen.zig +++ b/src/arch/sparc64/CodeGen.zig @@ -723,7 +723,7 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void { if (std.debug.runtime_safety) { if (self.air_bookkeeping < old_air_bookkeeping + 1) { - std.debug.panic("in codegen.zig, handling of AIR instruction %{d} ('{}') did not do proper bookkeeping. Look for a missing call to finishAir.", .{ inst, air_tags[@intFromEnum(inst)] }); + std.debug.panic("in codegen.zig, handling of AIR instruction %{d} ('{t}') did not do proper bookkeeping. Look for a missing call to finishAir.", .{ inst, air_tags[@intFromEnum(inst)] }); } } } @@ -1001,7 +1001,7 @@ fn airArg(self: *Self, inst: Air.Inst.Index) InnerError!void { switch (self.args[arg_index]) { .stack_offset => |off| { const abi_size = math.cast(u32, ty.abiSize(zcu)) orelse { - return self.fail("type '{}' too big to fit into stack frame", .{ty.fmt(pt)}); + return self.fail("type '{f}' too big to fit into stack frame", .{ty.fmt(pt)}); }; const offset = off + abi_size; break :blk .{ .stack_offset = offset }; @@ -2748,7 +2748,7 @@ fn allocMemPtr(self: *Self, inst: Air.Inst.Index) !u32 { } const abi_size = math.cast(u32, elem_ty.abiSize(zcu)) orelse { - return self.fail("type '{}' too big to fit into stack frame", .{elem_ty.fmt(pt)}); + return self.fail("type '{f}' too big to fit into stack frame", .{elem_ty.fmt(pt)}); }; // TODO swap this for inst.ty.ptrAlign const abi_align = elem_ty.abiAlignment(zcu); @@ -2760,7 +2760,7 @@ fn allocRegOrMem(self: *Self, inst: Air.Inst.Index, reg_ok: bool) !MCValue { const zcu = pt.zcu; const elem_ty = self.typeOfIndex(inst); const abi_size = math.cast(u32, elem_ty.abiSize(zcu)) orelse { - return self.fail("type '{}' too big to fit into stack frame", .{elem_ty.fmt(pt)}); + return self.fail("type '{f}' too big to fit into stack frame", .{elem_ty.fmt(pt)}); }; const abi_align = elem_ty.abiAlignment(zcu); self.stack_align = self.stack_align.max(abi_align); @@ -4111,7 +4111,7 @@ fn getResolvedInstValue(self: *Self, inst: Air.Inst.Index) MCValue { while (true) { i -= 1; if (self.branch_stack.items[i].inst_table.get(inst)) |mcv| { - log.debug("getResolvedInstValue %{} => {}", .{ inst, mcv }); + log.debug("getResolvedInstValue %{f} => {}", .{ inst, mcv }); assert(mcv != .dead); return mcv; } @@ -4382,7 +4382,7 @@ fn processDeath(self: *Self, inst: Air.Inst.Index) void { const prev_value = self.getResolvedInstValue(inst); const branch = &self.branch_stack.items[self.branch_stack.items.len - 1]; branch.inst_table.putAssumeCapacity(inst, .dead); - log.debug("%{} death: {} -> .dead", .{ inst, prev_value }); + log.debug("%{f} death: {} -> .dead", .{ inst, prev_value }); switch (prev_value) { .register => |reg| { self.register_manager.freeReg(reg); diff --git a/src/arch/wasm/CodeGen.zig b/src/arch/wasm/CodeGen.zig index e396f69af3..acb79ad4fa 100644 --- a/src/arch/wasm/CodeGen.zig +++ b/src/arch/wasm/CodeGen.zig @@ -1463,7 +1463,7 @@ fn allocStack(cg: *CodeGen, ty: Type) !WValue { } const abi_size = std.math.cast(u32, ty.abiSize(zcu)) orelse { - return cg.fail("Type {} with ABI size of {d} exceeds stack frame size", .{ + return cg.fail("Type {f} with ABI size of {d} exceeds stack frame size", .{ ty.fmt(pt), ty.abiSize(zcu), }); }; @@ -1497,7 +1497,7 @@ fn allocStackPtr(cg: *CodeGen, inst: Air.Inst.Index) !WValue { const abi_alignment = ptr_ty.ptrAlignment(zcu); const abi_size = std.math.cast(u32, pointee_ty.abiSize(zcu)) orelse { - return cg.fail("Type {} with ABI size of {d} exceeds stack frame size", .{ + return cg.fail("Type {f} with ABI size of {d} exceeds stack frame size", .{ pointee_ty.fmt(pt), pointee_ty.abiSize(zcu), }); }; @@ -1959,7 +1959,7 @@ fn genInst(cg: *CodeGen, inst: Air.Inst.Index) InnerError!void { .wasm_memory_size => cg.airWasmMemorySize(inst), .wasm_memory_grow => cg.airWasmMemoryGrow(inst), - .memcpy => cg.airMemcpy(inst), + .memcpy, .memmove => cg.airMemcpy(inst), .ret_addr => cg.airRetAddr(inst), .tag_name => cg.airTagName(inst), @@ -1983,7 +1983,6 @@ fn genInst(cg: *CodeGen, inst: Air.Inst.Index) InnerError!void { .c_va_copy, .c_va_end, .c_va_start, - .memmove, => |tag| return cg.fail("TODO: Implement wasm inst: {s}", .{@tagName(tag)}), .atomic_load => cg.airAtomicLoad(inst), @@ -2046,7 +2045,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { try cg.genInst(inst); if (std.debug.runtime_safety and cg.air_bookkeeping < old_bookkeeping_value + 1) { - std.debug.panic("Missing call to `finishAir` in AIR instruction %{d} ('{}')", .{ + std.debug.panic("Missing call to `finishAir` in AIR instruction %{d} ('{t}')", .{ inst, cg.air.instructions.items(.tag)[@intFromEnum(inst)], }); @@ -2404,10 +2403,7 @@ fn store(cg: *CodeGen, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerErr try cg.memcpy(lhs, rhs, .{ .imm32 = @as(u32, @intCast(ty.abiSize(zcu))) }); }, else => if (abi_size > 8) { - return cg.fail("TODO: `store` for type `{}` with abisize `{d}`", .{ - ty.fmt(pt), - abi_size, - }); + return cg.fail("TODO: `store` for type `{f}` with abisize `{d}`", .{ ty.fmt(pt), abi_size }); }, } try cg.emitWValue(lhs); @@ -2596,10 +2592,7 @@ fn binOp(cg: *CodeGen, lhs: WValue, rhs: WValue, ty: Type, op: Op) InnerError!WV if (ty.zigTypeTag(zcu) == .int) { return cg.binOpBigInt(lhs, rhs, ty, op); } else { - return cg.fail( - "TODO: Implement binary operation for type: {}", - .{ty.fmt(pt)}, - ); + return cg.fail("TODO: Implement binary operation for type: {f}", .{ty.fmt(pt)}); } } @@ -2817,7 +2810,7 @@ fn airAbs(cg: *CodeGen, inst: Air.Inst.Index) InnerError!void { switch (scalar_ty.zigTypeTag(zcu)) { .int => if (ty.zigTypeTag(zcu) == .vector) { - return cg.fail("TODO implement airAbs for {}", .{ty.fmt(pt)}); + return cg.fail("TODO implement airAbs for {f}", .{ty.fmt(pt)}); } else { const int_bits = ty.intInfo(zcu).bits; const wasm_bits = toWasmBits(int_bits) orelse { @@ -3244,7 +3237,7 @@ fn lowerConstant(cg: *CodeGen, val: Value, ty: Type) InnerError!WValue { return .{ .imm32 = @intFromBool(!val.isNull(zcu)) }; }, .aggregate => switch (ip.indexToKey(ty.ip_index)) { - .array_type => return cg.fail("Wasm TODO: LowerConstant for {}", .{ty.fmt(pt)}), + .array_type => return cg.fail("Wasm TODO: LowerConstant for {f}", .{ty.fmt(pt)}), .vector_type => { assert(determineSimdStoreStrategy(ty, zcu, cg.target) == .direct); var buf: [16]u8 = undefined; @@ -3332,7 +3325,7 @@ fn emitUndefined(cg: *CodeGen, ty: Type) InnerError!WValue { }, else => unreachable, }, - else => return cg.fail("Wasm TODO: emitUndefined for type: {}\n", .{ty.zigTypeTag(zcu)}), + else => return cg.fail("Wasm TODO: emitUndefined for type: {t}\n", .{ty.zigTypeTag(zcu)}), } } @@ -3608,7 +3601,7 @@ fn airNot(cg: *CodeGen, inst: Air.Inst.Index) InnerError!void { } else { const int_info = operand_ty.intInfo(zcu); const wasm_bits = toWasmBits(int_info.bits) orelse { - return cg.fail("TODO: Implement binary NOT for {}", .{operand_ty.fmt(pt)}); + return cg.fail("TODO: Implement binary NOT for {f}", .{operand_ty.fmt(pt)}); }; switch (wasm_bits) { @@ -3874,7 +3867,7 @@ fn airStructFieldVal(cg: *CodeGen, inst: Air.Inst.Index) InnerError!void { }, else => result: { const offset = std.math.cast(u32, struct_ty.structFieldOffset(field_index, zcu)) orelse { - return cg.fail("Field type '{}' too big to fit into stack frame", .{field_ty.fmt(pt)}); + return cg.fail("Field type '{f}' too big to fit into stack frame", .{field_ty.fmt(pt)}); }; if (isByRef(field_ty, zcu, cg.target)) { switch (operand) { @@ -4360,7 +4353,7 @@ fn isNull(cg: *CodeGen, operand: WValue, optional_ty: Type, opcode: std.wasm.Opc // a pointer to the stack value if (payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) { const offset = std.math.cast(u32, payload_ty.abiSize(zcu)) orelse { - return cg.fail("Optional type {} too big to fit into stack frame", .{optional_ty.fmt(pt)}); + return cg.fail("Optional type {f} too big to fit into stack frame", .{optional_ty.fmt(pt)}); }; try cg.addMemArg(.i32_load8_u, .{ .offset = operand.offset() + offset, .alignment = 1 }); } @@ -4430,7 +4423,7 @@ fn airOptionalPayloadPtrSet(cg: *CodeGen, inst: Air.Inst.Index) InnerError!void } const offset = std.math.cast(u32, payload_ty.abiSize(zcu)) orelse { - return cg.fail("Optional type {} too big to fit into stack frame", .{opt_ty.fmt(pt)}); + return cg.fail("Optional type {f} too big to fit into stack frame", .{opt_ty.fmt(pt)}); }; try cg.emitWValue(operand); @@ -4462,7 +4455,7 @@ fn airWrapOptional(cg: *CodeGen, inst: Air.Inst.Index) InnerError!void { break :result cg.reuseOperand(ty_op.operand, operand); } const offset = std.math.cast(u32, payload_ty.abiSize(zcu)) orelse { - return cg.fail("Optional type {} too big to fit into stack frame", .{op_ty.fmt(pt)}); + return cg.fail("Optional type {f} too big to fit into stack frame", .{op_ty.fmt(pt)}); }; // Create optional type, set the non-null bit, and store the operand inside the optional type @@ -6196,7 +6189,7 @@ fn airMulWithOverflow(cg: *CodeGen, inst: Air.Inst.Index) InnerError!void { _ = try cg.load(overflow_ret, Type.i32, 0); try cg.addLocal(.local_set, overflow_bit.local.value); break :blk res; - } else return cg.fail("TODO: @mulWithOverflow for {}", .{ty.fmt(pt)}); + } else return cg.fail("TODO: @mulWithOverflow for {f}", .{ty.fmt(pt)}); var bin_op_local = try mul.toLocal(cg, ty); defer bin_op_local.free(cg); @@ -6749,7 +6742,7 @@ fn airMod(cg: *CodeGen, inst: Air.Inst.Index) InnerError!void { const add = try cg.binOp(rem, rhs, ty, .add); break :result try cg.binOp(add, rhs, ty, .rem); } - return cg.fail("TODO: @mod for {}", .{ty.fmt(pt)}); + return cg.fail("TODO: @mod for {f}", .{ty.fmt(pt)}); }; return cg.finishAir(inst, result, &.{ bin_op.lhs, bin_op.rhs }); @@ -6767,7 +6760,7 @@ fn airSatMul(cg: *CodeGen, inst: Air.Inst.Index) InnerError!void { const lhs = try cg.resolveInst(bin_op.lhs); const rhs = try cg.resolveInst(bin_op.rhs); const wasm_bits = toWasmBits(int_info.bits) orelse { - return cg.fail("TODO: mul_sat for {}", .{ty.fmt(pt)}); + return cg.fail("TODO: mul_sat for {f}", .{ty.fmt(pt)}); }; switch (wasm_bits) { @@ -6804,7 +6797,7 @@ fn airSatMul(cg: *CodeGen, inst: Air.Inst.Index) InnerError!void { }, 64 => { if (!(int_info.bits == 64 and int_info.signedness == .signed)) { - return cg.fail("TODO: mul_sat for {}", .{ty.fmt(pt)}); + return cg.fail("TODO: mul_sat for {f}", .{ty.fmt(pt)}); } const overflow_ret = try cg.allocStack(Type.i32); _ = try cg.callIntrinsic( @@ -6822,7 +6815,7 @@ fn airSatMul(cg: *CodeGen, inst: Air.Inst.Index) InnerError!void { }, 128 => { if (!(int_info.bits == 128 and int_info.signedness == .signed)) { - return cg.fail("TODO: mul_sat for {}", .{ty.fmt(pt)}); + return cg.fail("TODO: mul_sat for {f}", .{ty.fmt(pt)}); } const overflow_ret = try cg.allocStack(Type.i32); const ret = try cg.callIntrinsic( diff --git a/src/arch/x86_64/CodeGen.zig b/src/arch/x86_64/CodeGen.zig index 90c777aa45..486497a365 100644 --- a/src/arch/x86_64/CodeGen.zig +++ b/src/arch/x86_64/CodeGen.zig @@ -6,6 +6,7 @@ const log = std.log.scoped(.codegen); const tracking_log = std.log.scoped(.tracking); const verbose_tracking_log = std.log.scoped(.verbose_tracking); const wip_mir_log = std.log.scoped(.wip_mir); +const Writer = std.io.Writer; const Air = @import("../../Air.zig"); const Allocator = std.mem.Allocator; @@ -524,52 +525,47 @@ pub const MCValue = union(enum) { }; } - pub fn format( - mcv: MCValue, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { + pub fn format(mcv: MCValue, w: *Writer) Writer.Error!void { switch (mcv) { - .none, .unreach, .dead, .undef => try writer.print("({s})", .{@tagName(mcv)}), - .immediate => |pl| try writer.print("0x{x}", .{pl}), - .memory => |pl| try writer.print("[ds:0x{x}]", .{pl}), - inline .eflags, .register => |pl| try writer.print("{s}", .{@tagName(pl)}), - .register_pair => |pl| try writer.print("{s}:{s}", .{ @tagName(pl[1]), @tagName(pl[0]) }), - .register_triple => |pl| try writer.print("{s}:{s}:{s}", .{ + .none, .unreach, .dead, .undef => try w.print("({s})", .{@tagName(mcv)}), + .immediate => |pl| try w.print("0x{x}", .{pl}), + .memory => |pl| try w.print("[ds:0x{x}]", .{pl}), + inline .eflags, .register => |pl| try w.print("{s}", .{@tagName(pl)}), + .register_pair => |pl| try w.print("{s}:{s}", .{ @tagName(pl[1]), @tagName(pl[0]) }), + .register_triple => |pl| try w.print("{s}:{s}:{s}", .{ @tagName(pl[2]), @tagName(pl[1]), @tagName(pl[0]), }), - .register_quadruple => |pl| try writer.print("{s}:{s}:{s}:{s}", .{ + .register_quadruple => |pl| try w.print("{s}:{s}:{s}:{s}", .{ @tagName(pl[3]), @tagName(pl[2]), @tagName(pl[1]), @tagName(pl[0]), }), - .register_offset => |pl| try writer.print("{s} + 0x{x}", .{ @tagName(pl.reg), pl.off }), - .register_overflow => |pl| try writer.print("{s}:{s}", .{ + .register_offset => |pl| try w.print("{s} + 0x{x}", .{ @tagName(pl.reg), pl.off }), + .register_overflow => |pl| try w.print("{s}:{s}", .{ @tagName(pl.eflags), @tagName(pl.reg), }), - .register_mask => |pl| try writer.print("mask({s},{}):{c}{s}", .{ + .register_mask => |pl| try w.print("mask({s},{f}):{c}{s}", .{ @tagName(pl.info.kind), pl.info.scalar, @as(u8, if (pl.info.inverted) '!' else ' '), @tagName(pl.reg), }), - .indirect => |pl| try writer.print("[{s} + 0x{x}]", .{ @tagName(pl.reg), pl.off }), - .indirect_load_frame => |pl| try writer.print("[[{} + 0x{x}]]", .{ pl.index, pl.off }), - .load_frame => |pl| try writer.print("[{} + 0x{x}]", .{ pl.index, pl.off }), - .lea_frame => |pl| try writer.print("{} + 0x{x}", .{ pl.index, pl.off }), - .load_nav => |pl| try writer.print("[nav:{d}]", .{@intFromEnum(pl)}), - .lea_nav => |pl| try writer.print("nav:{d}", .{@intFromEnum(pl)}), - .load_uav => |pl| try writer.print("[uav:{d}]", .{@intFromEnum(pl.val)}), - .lea_uav => |pl| try writer.print("uav:{d}", .{@intFromEnum(pl.val)}), - .load_lazy_sym => |pl| try writer.print("[lazy:{s}:{d}]", .{ @tagName(pl.kind), @intFromEnum(pl.ty) }), - .lea_lazy_sym => |pl| try writer.print("lazy:{s}:{d}", .{ @tagName(pl.kind), @intFromEnum(pl.ty) }), - .load_extern_func => |pl| try writer.print("[extern:{d}]", .{@intFromEnum(pl)}), - .lea_extern_func => |pl| try writer.print("extern:{d}", .{@intFromEnum(pl)}), - .elementwise_args => |pl| try writer.print("elementwise:{d}:[{} + 0x{x}]", .{ + .indirect => |pl| try w.print("[{s} + 0x{x}]", .{ @tagName(pl.reg), pl.off }), + .indirect_load_frame => |pl| try w.print("[[{} + 0x{x}]]", .{ pl.index, pl.off }), + .load_frame => |pl| try w.print("[{} + 0x{x}]", .{ pl.index, pl.off }), + .lea_frame => |pl| try w.print("{} + 0x{x}", .{ pl.index, pl.off }), + .load_nav => |pl| try w.print("[nav:{d}]", .{@intFromEnum(pl)}), + .lea_nav => |pl| try w.print("nav:{d}", .{@intFromEnum(pl)}), + .load_uav => |pl| try w.print("[uav:{d}]", .{@intFromEnum(pl.val)}), + .lea_uav => |pl| try w.print("uav:{d}", .{@intFromEnum(pl.val)}), + .load_lazy_sym => |pl| try w.print("[lazy:{s}:{d}]", .{ @tagName(pl.kind), @intFromEnum(pl.ty) }), + .lea_lazy_sym => |pl| try w.print("lazy:{s}:{d}", .{ @tagName(pl.kind), @intFromEnum(pl.ty) }), + .load_extern_func => |pl| try w.print("[extern:{d}]", .{@intFromEnum(pl)}), + .lea_extern_func => |pl| try w.print("extern:{d}", .{@intFromEnum(pl)}), + .elementwise_args => |pl| try w.print("elementwise:{d}:[{} + 0x{x}]", .{ pl.regs, pl.frame_index, pl.frame_off, }), - .reserved_frame => |pl| try writer.print("(dead:{})", .{pl}), - .air_ref => |pl| try writer.print("(air:0x{x})", .{@intFromEnum(pl)}), + .reserved_frame => |pl| try w.print("(dead:{})", .{pl}), + .air_ref => |pl| try w.print("(air:0x{x})", .{@intFromEnum(pl)}), } } }; @@ -639,7 +635,7 @@ const InstTracking = struct { .reserved_frame => |index| self.long = .{ .load_frame = .{ .index = index } }, else => unreachable, } - tracking_log.debug("spill {} from {} to {}", .{ inst, self.short, self.long }); + tracking_log.debug("spill {f} from {f} to {f}", .{ inst, self.short, self.long }); try cg.genCopy(cg.typeOfIndex(inst), self.long, self.short, .{}); for (self.short.getRegs()) |reg| if (reg.isClass(.x87)) try cg.asmRegister(.{ .f_, .free }, reg); } @@ -672,7 +668,7 @@ const InstTracking = struct { else => {}, // TODO process stack allocation death } self.reuseFrame(); - tracking_log.debug("{} => {} (spilled)", .{ inst, self.* }); + tracking_log.debug("{f} => {f} (spilled)", .{ inst, self.* }); } fn verifyMaterialize(self: InstTracking, target: InstTracking) void { @@ -749,7 +745,7 @@ const InstTracking = struct { else => target.long, } else target.long; self.short = target.short; - tracking_log.debug("{} => {} (materialize)", .{ inst, self.* }); + tracking_log.debug("{f} => {f} (materialize)", .{ inst, self.* }); } fn resurrect(self: *InstTracking, function: *CodeGen, inst: Air.Inst.Index, scope_generation: u32) !void { @@ -757,7 +753,7 @@ const InstTracking = struct { .dead => |die_generation| if (die_generation >= scope_generation) { self.reuseFrame(); try function.getValue(self.short, inst); - tracking_log.debug("{} => {} (resurrect)", .{ inst, self.* }); + tracking_log.debug("{f} => {f} (resurrect)", .{ inst, self.* }); }, else => {}, } @@ -768,7 +764,7 @@ const InstTracking = struct { try function.freeValue(self.short, opts); if (self.long == .none) self.long = self.short; self.short = .{ .dead = function.scope_generation }; - tracking_log.debug("{} => {} (death)", .{ inst, self.* }); + tracking_log.debug("{f} => {f} (death)", .{ inst, self.* }); } fn reuse( @@ -778,13 +774,13 @@ const InstTracking = struct { old_inst: Air.Inst.Index, ) void { self.short = .{ .dead = function.scope_generation }; - tracking_log.debug("{?} => {} (reuse {})", .{ new_inst, self.*, old_inst }); + tracking_log.debug("{?f} => {f} (reuse {f})", .{ new_inst, self.*, old_inst }); } fn liveOut(self: *InstTracking, function: *CodeGen, inst: Air.Inst.Index) void { for (self.getRegs()) |reg| { if (function.register_manager.isRegFree(reg)) { - tracking_log.debug("{} => {} (live-out)", .{ inst, self.* }); + tracking_log.debug("{f} => {f} (live-out)", .{ inst, self.* }); continue; } @@ -812,18 +808,13 @@ const InstTracking = struct { // Perform side-effects of freeValue manually. function.register_manager.freeReg(reg); - tracking_log.debug("{} => {} (live-out {})", .{ inst, self.*, tracked_inst }); + tracking_log.debug("{f} => {f} (live-out {f})", .{ inst, self.*, tracked_inst }); } } - pub fn format( - tracking: InstTracking, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - if (!std.meta.eql(tracking.long, tracking.short)) try writer.print("|{}| ", .{tracking.long}); - try writer.print("{}", .{tracking.short}); + pub fn format(tracking: InstTracking, bw: *Writer) Writer.Error!void { + if (!std.meta.eql(tracking.long, tracking.short)) try bw.print("|{f}| ", .{tracking.long}); + try bw.print("{f}", .{tracking.short}); } }; @@ -939,7 +930,7 @@ pub fn generate( function.inst_tracking.putAssumeCapacityNoClobber(temp.toIndex(), .init(.none)); } - wip_mir_log.debug("{}:", .{fmtNav(func.owner_nav, ip)}); + wip_mir_log.debug("{f}:", .{fmtNav(func.owner_nav, ip)}); try function.frame_allocs.resize(gpa, FrameIndex.named_count); function.frame_allocs.set( @@ -1097,15 +1088,10 @@ const FormatNavData = struct { ip: *const InternPool, nav_index: InternPool.Nav.Index, }; -fn formatNav( - data: FormatNavData, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, -) @TypeOf(writer).Error!void { - try writer.print("{}", .{data.ip.getNav(data.nav_index).fqn.fmt(data.ip)}); +fn formatNav(data: FormatNavData, w: *Writer) Writer.Error!void { + try w.print("{f}", .{data.ip.getNav(data.nav_index).fqn.fmt(data.ip)}); } -fn fmtNav(nav_index: InternPool.Nav.Index, ip: *const InternPool) std.fmt.Formatter(formatNav) { +fn fmtNav(nav_index: InternPool.Nav.Index, ip: *const InternPool) std.fmt.Formatter(FormatNavData, formatNav) { return .{ .data = .{ .ip = ip, .nav_index = nav_index, @@ -1116,15 +1102,14 @@ const FormatAirData = struct { self: *CodeGen, inst: Air.Inst.Index, }; -fn formatAir( - data: FormatAirData, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, -) @TypeOf(writer).Error!void { - data.self.air.dumpInst(data.inst, data.self.pt, data.self.liveness); +fn formatAir(data: FormatAirData, w: *std.io.Writer) Writer.Error!void { + // not acceptable implementation because it ignores `w`: + //data.self.air.dumpInst(data.inst, data.self.pt, data.self.liveness); + _ = data; + _ = w; + @panic("TODO: unimplemented"); } -fn fmtAir(self: *CodeGen, inst: Air.Inst.Index) std.fmt.Formatter(formatAir) { +fn fmtAir(self: *CodeGen, inst: Air.Inst.Index) std.fmt.Formatter(FormatAirData, formatAir) { return .{ .data = .{ .self = self, .inst = inst } }; } @@ -1132,12 +1117,7 @@ const FormatWipMirData = struct { self: *CodeGen, inst: Mir.Inst.Index, }; -fn formatWipMir( - data: FormatWipMirData, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, -) @TypeOf(writer).Error!void { +fn formatWipMir(data: FormatWipMirData, w: *Writer) Writer.Error!void { var lower: Lower = .{ .target = data.self.target, .allocator = data.self.gpa, @@ -1152,27 +1132,22 @@ fn formatWipMir( lower.err_msg.?.deinit(data.self.gpa); lower.err_msg = null; } - try writer.writeAll(lower.err_msg.?.msg); + try w.writeAll(lower.err_msg.?.msg); return; }, - error.OutOfMemory, error.InvalidInstruction, error.CannotEncode => |e| { - try writer.writeAll(switch (e) { - error.OutOfMemory => "Out of memory", - error.InvalidInstruction => "CodeGen failed to find a viable instruction.", - error.CannotEncode => "CodeGen failed to encode the instruction.", - }); + else => |e| { + try w.writeAll(@errorName(e)); return; }, - else => |e| return e, }).insts) |lowered_inst| { - if (!first) try writer.writeAll("\ndebug(wip_mir): "); - try writer.print(" | {}", .{lowered_inst}); + if (!first) try w.writeAll("\ndebug(wip_mir): "); + try w.print(" | {f}", .{lowered_inst}); first = false; } if (first) { const ip = &data.self.pt.zcu.intern_pool; const mir_inst = lower.mir.instructions.get(data.inst); - try writer.print(" | .{s}", .{@tagName(mir_inst.ops)}); + try w.print(" | .{s}", .{@tagName(mir_inst.ops)}); switch (mir_inst.ops) { else => unreachable, .pseudo_dbg_prologue_end_none, @@ -1184,20 +1159,20 @@ fn formatWipMir( .pseudo_dbg_var_none, .pseudo_dead_none, => {}, - .pseudo_dbg_line_stmt_line_column, .pseudo_dbg_line_line_column => try writer.print( + .pseudo_dbg_line_stmt_line_column, .pseudo_dbg_line_line_column => try w.print( " {[line]d}, {[column]d}", mir_inst.data.line_column, ), - .pseudo_dbg_enter_inline_func, .pseudo_dbg_leave_inline_func => try writer.print(" {}", .{ + .pseudo_dbg_enter_inline_func, .pseudo_dbg_leave_inline_func => try w.print(" {f}", .{ ip.getNav(ip.indexToKey(mir_inst.data.ip_index).func.owner_nav).name.fmt(ip), }), - .pseudo_dbg_arg_i_s, .pseudo_dbg_var_i_s => try writer.print(" {d}", .{ + .pseudo_dbg_arg_i_s, .pseudo_dbg_var_i_s => try w.print(" {d}", .{ @as(i32, @bitCast(mir_inst.data.i.i)), }), - .pseudo_dbg_arg_i_u, .pseudo_dbg_var_i_u => try writer.print(" {d}", .{ + .pseudo_dbg_arg_i_u, .pseudo_dbg_var_i_u => try w.print(" {d}", .{ mir_inst.data.i.i, }), - .pseudo_dbg_arg_i_64, .pseudo_dbg_var_i_64 => try writer.print(" {d}", .{ + .pseudo_dbg_arg_i_64, .pseudo_dbg_var_i_64 => try w.print(" {d}", .{ mir_inst.data.i64, }), .pseudo_dbg_arg_ro, .pseudo_dbg_var_ro => { @@ -1205,44 +1180,39 @@ fn formatWipMir( .base = .{ .reg = mir_inst.data.ro.reg }, .disp = mir_inst.data.ro.off, }) }; - try writer.print(" {}", .{mem_op.fmt(.m)}); + try w.print(" {f}", .{mem_op.fmt(.m)}); }, .pseudo_dbg_arg_fa, .pseudo_dbg_var_fa => { const mem_op: encoder.Instruction.Operand = .{ .mem = .initSib(.qword, .{ .base = .{ .frame = mir_inst.data.fa.index }, .disp = mir_inst.data.fa.off, }) }; - try writer.print(" {}", .{mem_op.fmt(.m)}); + try w.print(" {f}", .{mem_op.fmt(.m)}); }, .pseudo_dbg_arg_m, .pseudo_dbg_var_m => { const mem_op: encoder.Instruction.Operand = .{ .mem = lower.mir.extraData(Mir.Memory, mir_inst.data.x.payload).data.decode(), }; - try writer.print(" {}", .{mem_op.fmt(.m)}); + try w.print(" {f}", .{mem_op.fmt(.m)}); }, - .pseudo_dbg_arg_val, .pseudo_dbg_var_val => try writer.print(" {}", .{ + .pseudo_dbg_arg_val, .pseudo_dbg_var_val => try w.print(" {f}", .{ Value.fromInterned(mir_inst.data.ip_index).fmtValue(data.self.pt), }), } } } -fn fmtWipMir(self: *CodeGen, inst: Mir.Inst.Index) std.fmt.Formatter(formatWipMir) { +fn fmtWipMir(self: *CodeGen, inst: Mir.Inst.Index) std.fmt.Formatter(FormatWipMirData, formatWipMir) { return .{ .data = .{ .self = self, .inst = inst } }; } const FormatTrackingData = struct { self: *CodeGen, }; -fn formatTracking( - data: FormatTrackingData, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, -) @TypeOf(writer).Error!void { +fn formatTracking(data: FormatTrackingData, w: *Writer) Writer.Error!void { var it = data.self.inst_tracking.iterator(); - while (it.next()) |entry| try writer.print("\n{} = {}", .{ entry.key_ptr.*, entry.value_ptr.* }); + while (it.next()) |entry| try w.print("\n{f} = {f}", .{ entry.key_ptr.*, entry.value_ptr.* }); } -fn fmtTracking(self: *CodeGen) std.fmt.Formatter(formatTracking) { +fn fmtTracking(self: *CodeGen) std.fmt.Formatter(FormatTrackingData, formatTracking) { return .{ .data = .{ .self = self } }; } @@ -1251,7 +1221,7 @@ fn addInst(self: *CodeGen, inst: Mir.Inst) error{OutOfMemory}!Mir.Inst.Index { try self.mir_instructions.ensureUnusedCapacity(gpa, 1); const result_index: Mir.Inst.Index = @intCast(self.mir_instructions.len); self.mir_instructions.appendAssumeCapacity(inst); - if (inst.ops != .pseudo_dead_none) wip_mir_log.debug("{}", .{self.fmtWipMir(result_index)}); + if (inst.ops != .pseudo_dead_none) wip_mir_log.debug("{f}", .{self.fmtWipMir(result_index)}); return result_index; } @@ -2056,7 +2026,7 @@ fn gen( .{}, ); self.ret_mcv.long = .{ .load_frame = .{ .index = frame_index } }; - tracking_log.debug("spill {} to {}", .{ self.ret_mcv.long, frame_index }); + tracking_log.debug("spill {f} to {}", .{ self.ret_mcv.long, frame_index }); }, else => unreachable, } @@ -2334,8 +2304,8 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { for (body) |inst| { if (cg.liveness.isUnused(inst) and !cg.air.mustLower(inst, ip)) continue; - wip_mir_log.debug("{}", .{cg.fmtAir(inst)}); - verbose_tracking_log.debug("{}", .{cg.fmtTracking()}); + wip_mir_log.debug("{f}", .{cg.fmtAir(inst)}); + verbose_tracking_log.debug("{f}", .{cg.fmtTracking()}); cg.reused_operands = .initEmpty(); try cg.inst_tracking.ensureUnusedCapacity(cg.gpa, 1); @@ -4339,7 +4309,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._nc, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), ops[0].tracking(cg), @@ -4351,7 +4321,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { else => unreachable, .add, .add_optimized => {}, .add_wrap => res[0].wrapInt(cg) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} wrap {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} wrap {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), res[0].tracking(cg), @@ -12917,7 +12887,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._nc, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), ops[0].tracking(cg), @@ -14947,7 +14917,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._nc, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), ops[0].tracking(cg), @@ -14959,7 +14929,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { else => unreachable, .sub, .sub_optimized => {}, .sub_wrap => res[0].wrapInt(cg) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} wrap {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} wrap {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), res[0].tracking(cg), @@ -21794,7 +21764,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._nc, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), ops[0].tracking(cg), @@ -24587,7 +24557,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._nc, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), ty.fmt(pt), ops[0].tracking(cg), @@ -27287,7 +27257,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._nc, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), ty.fmt(pt), ops[0].tracking(cg), @@ -27296,7 +27266,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { else => |e| return e, }; res[0].wrapInt(cg) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} wrap {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} wrap {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), res[0].tracking(cg), @@ -32512,7 +32482,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ .@"0:", ._, .mov, .memad(.dst0q, .add_size, -8), .tmp3q, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), ops[0].tracking(cg), @@ -33606,7 +33576,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { assert(air_tag == .div_exact); res[0] = ops[0].divTruncInts(&ops[1], cg) catch |err| break :err err; }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), ty.fmt(pt), ops[0].tracking(cg), @@ -34837,7 +34807,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { } }) else err: { res[0] = ops[0].divTruncInts(&ops[1], cg) catch |err| break :err err; }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), ty.fmt(pt), ops[0].tracking(cg), @@ -36148,7 +36118,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { } }, } }, }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), ops[0].tracking(cg), @@ -37614,7 +37584,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._nc, .j, .@"0b", ._, ._, ._ }, } }, } })) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), ty.fmt(pt), ops[0].tracking(cg), @@ -39248,7 +39218,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._nc, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), ops[0].tracking(cg), @@ -42077,7 +42047,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._nc, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), ops[0].tracking(cg), @@ -42191,7 +42161,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._, .lea, .dst0p, .leai(.src0, .dst0), ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), ops[0].tracking(cg), @@ -42320,7 +42290,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._, .lea, .dst0p, .leai(.src0, .dst0), ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), ops[0].tracking(cg), @@ -46485,7 +46455,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._nc, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), ops[0].tracking(cg), @@ -50644,7 +50614,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._nc, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), ops[0].tracking(cg), @@ -51493,7 +51463,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._, .mov, .memad(.dst0q, .add_src0_size, -8), .tmp0q, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), ty_pl.ty.toType().fmt(pt), ops[0].tracking(cg), @@ -52398,7 +52368,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._, .mov, .memad(.dst0q, .add_src0_size, -8), .tmp0q, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), ty_pl.ty.toType().fmt(pt), ops[0].tracking(cg), @@ -55995,7 +55965,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._, .@"or", .tmp2q, .tmp1q, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), ty_pl.ty.toType().fmt(pt), ops[0].tracking(cg), @@ -59340,7 +59310,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._, .@"or", .tmp4q, .tmp5q, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), ty_pl.ty.toType().fmt(pt), ops[0].tracking(cg), @@ -59735,7 +59705,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { } }, } }, }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), ops[0].tracking(cg), @@ -60298,7 +60268,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._nz, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), cg.typeOf(bin_op.rhs).fmt(pt), @@ -60660,7 +60630,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._ns, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), cg.typeOf(bin_op.rhs).fmt(pt), @@ -60672,7 +60642,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { switch (air_tag) { else => unreachable, .shl => res[0].wrapInt(cg) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} wrap {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} wrap {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), res[0].tracking(cg), @@ -60839,7 +60809,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._, .@"or", .dst0d, .tmp0d, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.rhs).fmt(pt), ops[1].tracking(cg), @@ -64096,7 +64066,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ .@"0:", ._, .mov, .memad(.dst0q, .add_size, -8), .tmp1q, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), lhs_ty.fmt(pt), ops[0].tracking(cg), @@ -65329,7 +65299,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._b, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f}", .{ @tagName(air_tag), ty_op.ty.toType().fmt(pt), ops[0].tracking(cg), @@ -68483,7 +68453,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._nc, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f}", .{ @tagName(air_tag), cg.typeOf(ty_op.operand).fmt(pt), ops[0].tracking(cg), @@ -68880,7 +68850,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ .@"0:", ._, .lea, .dst0d, .leasia(.dst0, .@"8", .tmp0, .add_8_src0_size), ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f}", .{ @tagName(air_tag), cg.typeOf(ty_op.operand).fmt(pt), ops[0].tracking(cg), @@ -69768,7 +69738,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._ae, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f}", .{ @tagName(air_tag), cg.typeOf(ty_op.operand).fmt(pt), ops[0].tracking(cg), @@ -70417,7 +70387,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._ae, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f}", .{ @tagName(air_tag), ty_op.ty.toType().fmt(pt), ops[0].tracking(cg), @@ -73519,7 +73489,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._ae, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f}", .{ @tagName(air_tag), ty_op.ty.toType().fmt(pt), ops[0].tracking(cg), @@ -74457,7 +74427,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._nc, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f}", .{ @tagName(air_tag), cg.typeOf(un_op).fmt(pt), ops[0].tracking(cg), @@ -75183,7 +75153,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { } }, } }, }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f}", .{ @tagName(air_tag), cg.typeOf(un_op).fmt(pt), ops[0].tracking(cg), @@ -76734,7 +76704,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._nc, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f}", .{ @tagName(air_tag), cg.typeOf(ty_op.operand).fmt(pt), ops[0].tracking(cg), @@ -77926,7 +77896,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { } }, } }, }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f}", .{ @tagName(air_tag), cg.typeOf(un_op).fmt(pt), ops[0].tracking(cg), @@ -78466,7 +78436,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._nc, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f}", .{ @tagName(air_tag), cg.typeOf(un_op).fmt(pt), ops[0].tracking(cg), @@ -78913,7 +78883,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { } else err: { res[0] = ops[0].cmpInts(cmp_op, &ops[1], cg) catch |err| break :err err; }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), ops[0].tracking(cg), @@ -79458,7 +79428,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .@"struct", .@"union" => { assert(ty.containerLayout(zcu) == .@"packed"); for (&ops) |*op| op.wrapInt(cg) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} wrap {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} wrap {f} {f}", .{ @tagName(air_tag), ty.fmt(pt), op.tracking(cg), @@ -79470,7 +79440,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { res[0] = ops[0].cmpInts(cmp_op, &ops[1], cg) catch |err| break :err err; }, }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), ty.fmt(pt), ops[0].tracking(cg), @@ -86551,7 +86521,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { } }, }), }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {s} {f} {f} {f}", .{ @tagName(air_tag), @tagName(vector_cmp.compareOperator()), cg.typeOf(vector_cmp.lhs).fmt(pt), @@ -88546,7 +88516,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._ae, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), ty_op.ty.toType().fmt(pt), cg.typeOf(ty_op.operand).fmt(pt), @@ -90221,7 +90191,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._ae, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), ty_op.ty.toType().fmt(pt), cg.typeOf(ty_op.operand).fmt(pt), @@ -94899,7 +94869,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._nz, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), dst_ty.fmt(pt), src_ty.fmt(pt), @@ -100565,7 +100535,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._nz, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), ty_op.ty.toType().fmt(pt), cg.typeOf(ty_op.operand).fmt(pt), @@ -111427,7 +111397,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._ae, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), ty_op.ty.toType().fmt(pt), cg.typeOf(ty_op.operand).fmt(pt), @@ -123446,7 +123416,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._ae, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f}", .{ @tagName(air_tag), ty_op.ty.toType().fmt(pt), cg.typeOf(ty_op.operand).fmt(pt), @@ -157216,7 +157186,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { } }, } }, }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s}.{s} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s}.{s} {f} {f}", .{ @tagName(air_tag), @tagName(reduce.operation), cg.typeOf(reduce.operand).fmt(pt), @@ -157227,7 +157197,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { switch (reduce.operation) { .And, .Or, .Xor, .Min, .Max => {}, .Add, .Mul => if (cg.intInfo(res_ty)) |_| res[0].wrapInt(cg) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s}.{s} wrap {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s}.{s} wrap {f} {f}", .{ @tagName(air_tag), @tagName(reduce.operation), res_ty.fmt(pt), @@ -164510,7 +164480,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { } }, } }, }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s}.{s} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s}.{s} {f} {f}", .{ @tagName(air_tag), @tagName(reduce.operation), cg.typeOf(reduce.operand).fmt(pt), @@ -166307,7 +166277,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._nz, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f}", .{ @tagName(air_tag), ty_op.ty.toType().fmt(pt), ops[0].tracking(cg), @@ -166323,7 +166293,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { const bin_op = air_datas[@intFromEnum(inst)].bin_op; var ops = try cg.tempsFromOperands(inst, .{ bin_op.lhs, bin_op.rhs }) ++ .{undefined}; ops[2] = ops[0].getByteLen(cg) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), cg.typeOf(bin_op.rhs).fmt(pt), @@ -166363,7 +166333,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { } }, }}, }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f} {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), cg.typeOf(bin_op.rhs).fmt(pt), @@ -166464,7 +166434,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._, .@"test", .src0p, .src0p, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f}", .{ @tagName(air_tag), cg.typeOf(un_op).fmt(pt), ops[0].tracking(cg), @@ -166552,7 +166522,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._, .call, .tmp0d, ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f}", .{ @tagName(air_tag), cg.typeOf(un_op).fmt(pt), ops[0].tracking(cg), @@ -166654,7 +166624,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._, .lea, .dst1d, .leai(.dst1, .tmp1), ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f}", .{ @tagName(air_tag), cg.typeOf(un_op).fmt(pt), ops[0].tracking(cg), @@ -166752,7 +166722,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._, .@"test", .src0d, .src0d, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f}", .{ @tagName(air_tag), ty_op.ty.toType().fmt(pt), ops[0].tracking(cg), @@ -166804,7 +166774,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { } } }, - .@"packed" => return cg.fail("failed to select {s} {}", .{ + .@"packed" => return cg.fail("failed to select {s} {f}", .{ @tagName(air_tag), agg_ty.fmt(pt), }), @@ -166825,7 +166795,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { elem_disp += @intCast(field_type.abiSize(zcu)); } }, - else => return cg.fail("failed to select {s} {}", .{ + else => return cg.fail("failed to select {s} {f}", .{ @tagName(air_tag), agg_ty.fmt(pt), }), @@ -168123,7 +168093,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._ae, .j, .@"0b", ._, ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {} {} {} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f} {f} {f} {f}", .{ @tagName(air_tag), cg.typeOf(bin_op.lhs).fmt(pt), ops[0].tracking(cg), @@ -168223,7 +168193,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .{ ._, ._, .cmp, .src0d, .lea(.tmp1d), ._, ._ }, } }, } }) catch |err| switch (err) { - error.SelectFailed => return cg.fail("failed to select {s} {}", .{ + error.SelectFailed => return cg.fail("failed to select {s} {f}", .{ @tagName(air_tag), ops[0].tracking(cg), }), @@ -168242,12 +168212,12 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { .ref => { const result = try cg.allocRegOrMem(err_ret_trace_index, true); try cg.genCopy(.usize, result, ops[0].tracking(cg).short, .{}); - tracking_log.debug("{} => {} (birth)", .{ err_ret_trace_index, result }); + tracking_log.debug("{f} => {f} (birth)", .{ err_ret_trace_index, result }); cg.inst_tracking.putAssumeCapacityNoClobber(err_ret_trace_index, .init(result)); }, .temp => |temp_index| { const temp_tracking = temp_index.tracking(cg); - tracking_log.debug("{} => {} (birth)", .{ err_ret_trace_index, temp_tracking.short }); + tracking_log.debug("{f} => {f} (birth)", .{ err_ret_trace_index, temp_tracking.short }); cg.inst_tracking.putAssumeCapacityNoClobber(err_ret_trace_index, temp_tracking.*); assert(cg.reuseTemp(err_ret_trace_index, temp_index.toIndex(), temp_tracking)); }, @@ -168917,7 +168887,7 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void { try cg.resetTemps(@enumFromInt(0)); cg.checkInvariantsAfterAirInst(); } - verbose_tracking_log.debug("{}", .{cg.fmtTracking()}); + verbose_tracking_log.debug("{f}", .{cg.fmtTracking()}); } fn genLazy(cg: *CodeGen, lazy_sym: link.File.LazySymbol) InnerError!void { @@ -168927,7 +168897,7 @@ fn genLazy(cg: *CodeGen, lazy_sym: link.File.LazySymbol) InnerError!void { switch (ip.indexToKey(lazy_sym.ty)) { .enum_type => { const enum_ty: Type = .fromInterned(lazy_sym.ty); - wip_mir_log.debug("{}.@tagName:", .{enum_ty.fmt(pt)}); + wip_mir_log.debug("{f}.@tagName:", .{enum_ty.fmt(pt)}); const param_regs = abi.getCAbiIntParamRegs(.auto); const param_locks = cg.register_manager.lockRegsAssumeUnused(2, param_regs[0..2].*); @@ -168976,7 +168946,7 @@ fn genLazy(cg: *CodeGen, lazy_sym: link.File.LazySymbol) InnerError!void { }, .error_set_type => |error_set_type| { const err_ty: Type = .fromInterned(lazy_sym.ty); - wip_mir_log.debug("{}.@errorCast:", .{err_ty.fmt(pt)}); + wip_mir_log.debug("{f}.@errorCast:", .{err_ty.fmt(pt)}); const param_regs = abi.getCAbiIntParamRegs(.auto); const param_locks = cg.register_manager.lockRegsAssumeUnused(2, param_regs[0..2].*); @@ -169016,7 +168986,7 @@ fn genLazy(cg: *CodeGen, lazy_sym: link.File.LazySymbol) InnerError!void { try cg.asmOpOnly(.{ ._, .ret }); }, else => return cg.fail( - "TODO implement {s} for {}", + "TODO implement {s} for {f}", .{ @tagName(lazy_sym.kind), Type.fromInterned(lazy_sym.ty).fmt(pt) }, ), } @@ -169076,7 +169046,7 @@ fn finishAirResult(self: *CodeGen, inst: Air.Inst.Index, result: MCValue) void { .none, .dead, .unreach => {}, else => unreachable, // Why didn't the result die? } else { - tracking_log.debug("{} => {} (birth)", .{ inst, result }); + tracking_log.debug("{f} => {f} (birth)", .{ inst, result }); self.inst_tracking.putAssumeCapacityNoClobber(inst, .init(result)); // In some cases, an operand may be reused as the result. // If that operand died and was a register, it was freed by @@ -169226,7 +169196,7 @@ fn allocMemPtr(self: *CodeGen, inst: Air.Inst.Index) !FrameIndex { const val_ty = ptr_ty.childType(zcu); return self.allocFrameIndex(.init(.{ .size = std.math.cast(u32, val_ty.abiSize(zcu)) orelse { - return self.fail("type '{}' too big to fit into stack frame", .{val_ty.fmt(pt)}); + return self.fail("type '{f}' too big to fit into stack frame", .{val_ty.fmt(pt)}); }, .alignment = ptr_ty.ptrAlignment(zcu).max(.@"1"), })); @@ -169244,7 +169214,7 @@ fn allocRegOrMemAdvanced(self: *CodeGen, ty: Type, inst: ?Air.Inst.Index, reg_ok const pt = self.pt; const zcu = pt.zcu; const abi_size = std.math.cast(u32, ty.abiSize(zcu)) orelse { - return self.fail("type '{}' too big to fit into stack frame", .{ty.fmt(pt)}); + return self.fail("type '{f}' too big to fit into stack frame", .{ty.fmt(pt)}); }; if (reg_ok) need_mem: { @@ -169749,7 +169719,7 @@ fn airFpext(self: *CodeGen, inst: Air.Inst.Index) !void { ); } break :result dst_mcv; - } orelse return self.fail("TODO implement airFpext from {} to {}", .{ + } orelse return self.fail("TODO implement airFpext from {f} to {f}", .{ src_ty.fmt(pt), dst_ty.fmt(pt), }); return self.finishAir(inst, result, .{ ty_op.operand, .none, .none }); @@ -170004,7 +169974,7 @@ fn airIntCast(self: *CodeGen, inst: Air.Inst.Index) !void { ); break :result dst_mcv; - }) orelse return self.fail("TODO implement airIntCast from {} to {}", .{ + }) orelse return self.fail("TODO implement airIntCast from {f} to {f}", .{ src_ty.fmt(pt), dst_ty.fmt(pt), }); return self.finishAir(inst, result, .{ ty_op.operand, .none, .none }); @@ -170076,7 +170046,7 @@ fn airTrunc(self: *CodeGen, inst: Air.Inst.Index) !void { else => null, }, else => null, - }) orelse return self.fail("TODO implement airTrunc for {}", .{dst_ty.fmt(pt)}); + }) orelse return self.fail("TODO implement airTrunc for {f}", .{dst_ty.fmt(pt)}); const dst_info = dst_elem_ty.intInfo(zcu); const src_info = src_elem_ty.intInfo(zcu); @@ -170497,7 +170467,7 @@ fn airAddSat(self: *CodeGen, inst: Air.Inst.Index) !void { const bin_op = self.air.instructions.items(.data)[@intFromEnum(inst)].bin_op; const ty = self.typeOf(bin_op.lhs); if (ty.zigTypeTag(zcu) == .vector or ty.abiSize(zcu) > 8) return self.fail( - "TODO implement airAddSat for {}", + "TODO implement airAddSat for {f}", .{ty.fmt(pt)}, ); @@ -170575,7 +170545,7 @@ fn airSubSat(self: *CodeGen, inst: Air.Inst.Index) !void { const bin_op = self.air.instructions.items(.data)[@intFromEnum(inst)].bin_op; const ty = self.typeOf(bin_op.lhs); if (ty.zigTypeTag(zcu) == .vector or ty.abiSize(zcu) > 8) return self.fail( - "TODO implement airSubSat for {}", + "TODO implement airSubSat for {f}", .{ty.fmt(pt)}, ); @@ -170726,7 +170696,7 @@ fn airMulSat(self: *CodeGen, inst: Air.Inst.Index) !void { } if (ty.zigTypeTag(zcu) == .vector or ty.abiSize(zcu) > 8) return self.fail( - "TODO implement airMulSat for {}", + "TODO implement airMulSat for {f}", .{ty.fmt(pt)}, ); @@ -171020,7 +170990,7 @@ fn airMulWithOverflow(self: *CodeGen, inst: Air.Inst.Index) !void { const tuple_ty = self.typeOfIndex(inst); const dst_ty = self.typeOf(bin_op.lhs); const result: MCValue = switch (dst_ty.zigTypeTag(zcu)) { - .vector => return self.fail("TODO implement airMulWithOverflow for {}", .{dst_ty.fmt(pt)}), + .vector => return self.fail("TODO implement airMulWithOverflow for {f}", .{dst_ty.fmt(pt)}), .int => result: { const dst_info = dst_ty.intInfo(zcu); if (dst_info.bits > 128 and dst_info.signedness == .unsigned) { @@ -171373,7 +171343,7 @@ fn airMulWithOverflow(self: *CodeGen, inst: Air.Inst.Index) !void { else => { // For now, this is the only supported multiply that doesn't fit in a register. if (dst_info.bits > 128 or src_bits != 64) - return self.fail("TODO implement airWithOverflow from {} to {}", .{ + return self.fail("TODO implement airWithOverflow from {f} to {f}", .{ src_ty.fmt(pt), dst_ty.fmt(pt), }); @@ -171774,7 +171744,7 @@ fn airShlShrBinOp(self: *CodeGen, inst: Air.Inst.Index) !void { }, else => {}, } - return self.fail("TODO implement airShlShrBinOp for {}", .{lhs_ty.fmt(pt)}); + return self.fail("TODO implement airShlShrBinOp for {f}", .{lhs_ty.fmt(pt)}); }; return self.finishAir(inst, result, .{ bin_op.lhs, bin_op.rhs, .none }); } @@ -172034,7 +172004,7 @@ fn airUnwrapErrUnionErr(self: *CodeGen, inst: Air.Inst.Index) !void { .index = frame_addr.index, .off = frame_addr.off + @as(i32, @intCast(err_off)), } }, - else => return self.fail("TODO implement unwrap_err_err for {}", .{operand}), + else => return self.fail("TODO implement unwrap_err_err for {f}", .{operand}), } }; return self.finishAir(inst, result, .{ ty_op.operand, .none, .none }); @@ -172196,7 +172166,7 @@ fn genUnwrapErrUnionPayloadMir( else .{ .register = try self.copyToTmpRegister(payload_ty, result_mcv) }; }, - else => return self.fail("TODO implement genUnwrapErrUnionPayloadMir for {}", .{err_union}), + else => return self.fail("TODO implement genUnwrapErrUnionPayloadMir for {f}", .{err_union}), } }; @@ -172362,7 +172332,7 @@ fn airSliceLen(self: *CodeGen, inst: Air.Inst.Index) !void { .index = frame_addr.index, .off = frame_addr.off + 8, } }, - else => return self.fail("TODO implement slice_len for {}", .{src_mcv}), + else => return self.fail("TODO implement slice_len for {f}", .{src_mcv}), }; if (self.reuseOperand(inst, ty_op.operand, 0, src_mcv)) { switch (src_mcv) { @@ -172645,7 +172615,7 @@ fn airArrayElemVal(self: *CodeGen, inst: Air.Inst.Index) !void { }.to64(), ), }, - else => return self.fail("TODO airArrayElemVal for {s} of {}", .{ + else => return self.fail("TODO airArrayElemVal for {s} of {f}", .{ @tagName(array_mat_mcv), array_ty.fmt(pt), }), } @@ -172688,7 +172658,7 @@ fn airArrayElemVal(self: *CodeGen, inst: Air.Inst.Index) !void { .load_extern_func, .lea_extern_func, => try self.genSetReg(addr_reg, .usize, array_mcv.address(), .{}), - else => return self.fail("TODO airArrayElemVal_val for {s} of {}", .{ + else => return self.fail("TODO airArrayElemVal_val for {s} of {f}", .{ @tagName(array_mcv), array_ty.fmt(pt), }), } @@ -172881,7 +172851,7 @@ fn airGetUnionTag(self: *CodeGen, inst: Air.Inst.Index) !void { } return self.fail( - "TODO implement get_union_tag for ABI larger than 8 bytes and operand {}", + "TODO implement get_union_tag for ABI larger than 8 bytes and operand {f}", .{operand}, ); }, @@ -172893,7 +172863,7 @@ fn airGetUnionTag(self: *CodeGen, inst: Air.Inst.Index) !void { .register = registerAlias(result.register, @intCast(layout.tag_size)), }; }, - else => return self.fail("TODO implement get_union_tag for {}", .{operand}), + else => return self.fail("TODO implement get_union_tag for {f}", .{operand}), } }; @@ -172909,7 +172879,7 @@ fn airClz(self: *CodeGen, inst: Air.Inst.Index) !void { const dst_ty = self.typeOfIndex(inst); const src_ty = self.typeOf(ty_op.operand); - if (src_ty.zigTypeTag(zcu) == .vector) return self.fail("TODO implement airClz for {}", .{ + if (src_ty.zigTypeTag(zcu) == .vector) return self.fail("TODO implement airClz for {f}", .{ src_ty.fmt(pt), }); @@ -173105,7 +173075,7 @@ fn airCtz(self: *CodeGen, inst: Air.Inst.Index) !void { const dst_ty = self.typeOfIndex(inst); const src_ty = self.typeOf(ty_op.operand); - if (src_ty.zigTypeTag(zcu) == .vector) return self.fail("TODO implement airCtz for {}", .{ + if (src_ty.zigTypeTag(zcu) == .vector) return self.fail("TODO implement airCtz for {f}", .{ src_ty.fmt(pt), }); @@ -173277,7 +173247,7 @@ fn airPopCount(self: *CodeGen, inst: Air.Inst.Index) !void { const src_ty = self.typeOf(ty_op.operand); const src_abi_size: u32 = @intCast(src_ty.abiSize(zcu)); if (src_ty.zigTypeTag(zcu) == .vector or src_abi_size > 16) - return self.fail("TODO implement airPopCount for {}", .{src_ty.fmt(pt)}); + return self.fail("TODO implement airPopCount for {f}", .{src_ty.fmt(pt)}); const src_mcv = try self.resolveInst(ty_op.operand); const mat_src_mcv = switch (src_mcv) { @@ -173430,7 +173400,7 @@ fn genByteSwap( const has_movbe = self.hasFeature(.movbe); if (src_ty.zigTypeTag(zcu) == .vector) return self.fail( - "TODO implement genByteSwap for {}", + "TODO implement genByteSwap for {f}", .{src_ty.fmt(pt)}, ); @@ -173739,7 +173709,7 @@ fn floatSign(self: *CodeGen, inst: Air.Inst.Index, tag: Air.Inst.Tag, operand: A const result = result: { const scalar_bits = ty.scalarType(zcu).floatBits(self.target); if (scalar_bits == 80) { - if (ty.zigTypeTag(zcu) != .float) return self.fail("TODO implement floatSign for {}", .{ + if (ty.zigTypeTag(zcu) != .float) return self.fail("TODO implement floatSign for {f}", .{ ty.fmt(pt), }); @@ -173763,7 +173733,7 @@ fn floatSign(self: *CodeGen, inst: Air.Inst.Index, tag: Air.Inst.Tag, operand: A const abi_size: u32 = switch (ty.abiSize(zcu)) { 1...16 => 16, 17...32 => 32, - else => return self.fail("TODO implement floatSign for {}", .{ + else => return self.fail("TODO implement floatSign for {f}", .{ ty.fmt(pt), }), }; @@ -173822,7 +173792,7 @@ fn floatSign(self: *CodeGen, inst: Air.Inst.Index, tag: Air.Inst.Tag, operand: A .abs => .{ .v_pd, .@"and" }, else => unreachable, }, - 80 => return self.fail("TODO implement floatSign for {}", .{ty.fmt(pt)}), + 80 => return self.fail("TODO implement floatSign for {f}", .{ty.fmt(pt)}), else => unreachable, }, registerAlias(dst_reg, abi_size), @@ -173848,7 +173818,7 @@ fn floatSign(self: *CodeGen, inst: Air.Inst.Index, tag: Air.Inst.Tag, operand: A .abs => .{ ._pd, .@"and" }, else => unreachable, }, - 80 => return self.fail("TODO implement floatSign for {}", .{ty.fmt(pt)}), + 80 => return self.fail("TODO implement floatSign for {f}", .{ty.fmt(pt)}), else => unreachable, }, registerAlias(dst_reg, abi_size), @@ -173928,7 +173898,7 @@ fn genRoundLibcall(self: *CodeGen, ty: Type, src_mcv: MCValue, mode: bits.RoundM if (self.getRoundTag(ty)) |_| return .none; if (ty.zigTypeTag(zcu) != .float) - return self.fail("TODO implement genRound for {}", .{ty.fmt(pt)}); + return self.fail("TODO implement genRound for {f}", .{ty.fmt(pt)}); var sym_buf: ["__trunc?".len]u8 = undefined; return try self.genCall(.{ .extern_func = .{ @@ -174164,7 +174134,7 @@ fn airAbs(self: *CodeGen, inst: Air.Inst.Index) !void { }, .float => return self.floatSign(inst, .abs, ty_op.operand, ty), }, - }) orelse return self.fail("TODO implement airAbs for {}", .{ty.fmt(pt)}); + }) orelse return self.fail("TODO implement airAbs for {f}", .{ty.fmt(pt)}); const abi_size: u32 = @intCast(ty.abiSize(zcu)); const src_mcv = try self.resolveInst(ty_op.operand); @@ -174323,7 +174293,7 @@ fn airSqrt(self: *CodeGen, inst: Air.Inst.Index) !void { else => unreachable, }, else => unreachable, - }) orelse return self.fail("TODO implement airSqrt for {}", .{ty.fmt(pt)}); + }) orelse return self.fail("TODO implement airSqrt for {f}", .{ty.fmt(pt)}); switch (mir_tag[0]) { .v_ss, .v_sd => if (src_mcv.isBase()) try self.asmRegisterRegisterMemory( mir_tag, @@ -174481,7 +174451,7 @@ fn packedLoad(self: *CodeGen, dst_mcv: MCValue, ptr_ty: Type, ptr_mcv: MCValue) return; } - if (val_abi_size > 8) return self.fail("TODO implement packed load of {}", .{val_ty.fmt(pt)}); + if (val_abi_size > 8) return self.fail("TODO implement packed load of {f}", .{val_ty.fmt(pt)}); const limb_abi_size: u31 = @min(val_abi_size, 8); const limb_abi_bits = limb_abi_size * 8; @@ -174753,7 +174723,7 @@ fn packedStore(self: *CodeGen, ptr_ty: Type, ptr_mcv: MCValue, src_mcv: MCValue) limb_mem, registerAlias(tmp_reg, limb_abi_size), ); - } else return self.fail("TODO: implement packed store of {}", .{src_ty.fmt(pt)}); + } else return self.fail("TODO: implement packed store of {f}", .{src_ty.fmt(pt)}); } } @@ -174856,7 +174826,7 @@ fn genUnOp(self: *CodeGen, maybe_inst: ?Air.Inst.Index, tag: Air.Inst.Tag, src_a const zcu = pt.zcu; const src_ty = self.typeOf(src_air); if (src_ty.zigTypeTag(zcu) == .vector) - return self.fail("TODO implement genUnOp for {}", .{src_ty.fmt(pt)}); + return self.fail("TODO implement genUnOp for {f}", .{src_ty.fmt(pt)}); var src_mcv = try self.resolveInst(src_air); switch (src_mcv) { @@ -174943,7 +174913,7 @@ fn genUnOp(self: *CodeGen, maybe_inst: ?Air.Inst.Index, tag: Air.Inst.Tag, src_a fn genUnOpMir(self: *CodeGen, mir_tag: Mir.Inst.FixedTag, dst_ty: Type, dst_mcv: MCValue) !void { const pt = self.pt; const abi_size: u32 = @intCast(dst_ty.abiSize(pt.zcu)); - if (abi_size > 8) return self.fail("TODO implement {} for {}", .{ mir_tag, dst_ty.fmt(pt) }); + if (abi_size > 8) return self.fail("TODO implement {} for {f}", .{ mir_tag, dst_ty.fmt(pt) }); switch (dst_mcv) { .none, .unreach, @@ -175672,7 +175642,7 @@ fn genBinOp( }, floatLibcAbiSuffix(lhs_ty), }), - else => return self.fail("TODO implement genBinOp for {s} {}", .{ + else => return self.fail("TODO implement genBinOp for {s} {f}", .{ @tagName(air_tag), lhs_ty.fmt(pt), }), } catch unreachable; @@ -175785,7 +175755,7 @@ fn genBinOp( ); break :adjusted .{ .register = dst_reg }; }, - 80, 128 => return self.fail("TODO implement genBinOp for {s} of {}", .{ + 80, 128 => return self.fail("TODO implement genBinOp for {s} of {f}", .{ @tagName(air_tag), lhs_ty.fmt(pt), }), else => unreachable, @@ -175819,7 +175789,7 @@ fn genBinOp( if (sse_op and ((lhs_ty.scalarType(zcu).isRuntimeFloat() and lhs_ty.scalarType(zcu).floatBits(self.target) == 80) or lhs_ty.abiSize(zcu) > self.vectorSize(.float))) - return self.fail("TODO implement genBinOp for {s} {}", .{ @tagName(air_tag), lhs_ty.fmt(pt) }); + return self.fail("TODO implement genBinOp for {s} {f}", .{ @tagName(air_tag), lhs_ty.fmt(pt) }); const maybe_mask_reg = switch (air_tag) { else => null, @@ -176199,7 +176169,7 @@ fn genBinOp( } }, - else => return self.fail("TODO implement genBinOp for {s} {}", .{ + else => return self.fail("TODO implement genBinOp for {s} {f}", .{ @tagName(air_tag), lhs_ty.fmt(pt), }), } @@ -176953,7 +176923,7 @@ fn genBinOp( else => unreachable, }, }, - }) orelse return self.fail("TODO implement genBinOp for {s} {}", .{ + }) orelse return self.fail("TODO implement genBinOp for {s} {f}", .{ @tagName(air_tag), lhs_ty.fmt(pt), }); @@ -177086,7 +177056,7 @@ fn genBinOp( else => unreachable, }, else => unreachable, - }) orelse return self.fail("TODO implement genBinOp for {s} {}", .{ + }) orelse return self.fail("TODO implement genBinOp for {s} {f}", .{ @tagName(air_tag), lhs_ty.fmt(pt), }), mask_reg, @@ -177118,7 +177088,7 @@ fn genBinOp( else => unreachable, }, else => unreachable, - }) orelse return self.fail("TODO implement genBinOp for {s} {}", .{ + }) orelse return self.fail("TODO implement genBinOp for {s} {f}", .{ @tagName(air_tag), lhs_ty.fmt(pt), }), dst_reg, @@ -177154,7 +177124,7 @@ fn genBinOp( else => unreachable, }, else => unreachable, - }) orelse return self.fail("TODO implement genBinOp for {s} {}", .{ + }) orelse return self.fail("TODO implement genBinOp for {s} {f}", .{ @tagName(air_tag), lhs_ty.fmt(pt), }), mask_reg, @@ -177185,7 +177155,7 @@ fn genBinOp( else => unreachable, }, else => unreachable, - }) orelse return self.fail("TODO implement genBinOp for {s} {}", .{ + }) orelse return self.fail("TODO implement genBinOp for {s} {f}", .{ @tagName(air_tag), lhs_ty.fmt(pt), }), dst_reg, @@ -177215,7 +177185,7 @@ fn genBinOp( else => unreachable, }, else => unreachable, - }) orelse return self.fail("TODO implement genBinOp for {s} {}", .{ + }) orelse return self.fail("TODO implement genBinOp for {s} {f}", .{ @tagName(air_tag), lhs_ty.fmt(pt), }); try self.asmRegisterRegister(.{ mir_fixes, .@"and" }, dst_reg, mask_reg); @@ -178022,7 +177992,7 @@ fn airArg(self: *CodeGen, inst: Air.Inst.Index) !void { break :result dst_mcv; }, - else => return self.fail("TODO implement arg for {}", .{src_mcv}), + else => return self.fail("TODO implement arg for {f}", .{src_mcv}), } }; return self.finishAir(inst, result, .{ .none, .none, .none }); @@ -179079,7 +179049,7 @@ fn genCondBrMir(self: *CodeGen, ty: Type, mcv: MCValue) !Mir.Inst.Index { const reg = try self.copyToTmpRegister(ty, mcv); return self.genCondBrMir(ty, .{ .register = reg }); } - return self.fail("TODO implement condbr when condition is {} with abi larger than 8 bytes", .{mcv}); + return self.fail("TODO implement condbr when condition is {f} with abi larger than 8 bytes", .{mcv}); }, else => return self.fail("TODO implement condbr when condition is {s}", .{@tagName(mcv)}), } @@ -179166,7 +179136,7 @@ fn isErr(self: *CodeGen, maybe_inst: ?Air.Inst.Index, eu_ty: Type, eu_mcv: MCVal } }, .{ .immediate = 0 }, ), - else => return self.fail("TODO implement isErr for {}", .{eu_mcv}), + else => return self.fail("TODO implement isErr for {f}", .{eu_mcv}), } if (maybe_inst) |inst| self.eflags_inst = inst; @@ -180916,7 +180886,7 @@ fn moveStrategy(cg: *CodeGen, ty: Type, class: Register.Class, aligned: bool) !M }, .ip, .cr, .dr => {}, } - return cg.fail("TODO moveStrategy for {}", .{ty.fmt(pt)}); + return cg.fail("TODO moveStrategy for {f}", .{ty.fmt(pt)}); } const CopyOptions = struct { @@ -181048,7 +181018,7 @@ fn genCopy(self: *CodeGen, ty: Type, dst_mcv: MCValue, src_mcv: MCValue, opts: C break :src_info .{ .addr_reg = src_addr_reg, .addr_lock = src_addr_lock }; }, .air_ref => |src_ref| return self.genCopy(ty, dst_mcv, try self.resolveInst(src_ref), opts), - else => return self.fail("TODO implement genCopy for {s} of {}", .{ + else => return self.fail("TODO implement genCopy for {s} of {f}", .{ @tagName(src_mcv), ty.fmt(pt), }), }; @@ -181424,7 +181394,7 @@ fn genSetReg( 80 => null, else => unreachable, }, - }) orelse return self.fail("TODO implement genSetReg for {}", .{ty.fmt(pt)}), + }) orelse return self.fail("TODO implement genSetReg for {f}", .{ty.fmt(pt)}), dst_alias, registerAlias(src_reg, abi_size), ), @@ -181532,7 +181502,7 @@ fn genSetReg( assert(!ty.optionalReprIsPayload(zcu)); break :first_ty opt_child; }, - else => std.debug.panic("{s}: {}\n", .{ @src().fn_name, ty.fmt(pt) }), + else => std.debug.panic("{s}: {f}\n", .{ @src().fn_name, ty.fmt(pt) }), }); const first_size: u31 = @intCast(first_ty.abiSize(zcu)); const frame_size = std.math.ceilPowerOfTwoAssert(u32, abi_size); @@ -181854,7 +181824,7 @@ fn genSetMem( opts, ); }, - else => return self.fail("TODO implement genSetMem for {s} of {}", .{ + else => return self.fail("TODO implement genSetMem for {s} of {f}", .{ @tagName(src_mcv), ty.fmt(pt), }), }, @@ -182167,7 +182137,7 @@ fn airFloatFromInt(self: *CodeGen, inst: Air.Inst.Index) !void { 32, 64 => src_size > 8, else => unreachable, }) { - if (src_bits > 128) return self.fail("TODO implement airFloatFromInt from {} to {}", .{ + if (src_bits > 128) return self.fail("TODO implement airFloatFromInt from {f} to {f}", .{ src_ty.fmt(pt), dst_ty.fmt(pt), }); @@ -182209,7 +182179,7 @@ fn airFloatFromInt(self: *CodeGen, inst: Air.Inst.Index) !void { else => unreachable, }, else => null, - }) orelse return self.fail("TODO implement airFloatFromInt from {} to {}", .{ + }) orelse return self.fail("TODO implement airFloatFromInt from {f} to {f}", .{ src_ty.fmt(pt), dst_ty.fmt(pt), }); const dst_alias = dst_reg.to128(); @@ -182247,7 +182217,7 @@ fn airIntFromFloat(self: *CodeGen, inst: Air.Inst.Index) !void { 32, 64 => dst_size > 8, else => unreachable, }) { - if (dst_bits > 128) return self.fail("TODO implement airIntFromFloat from {} to {}", .{ + if (dst_bits > 128) return self.fail("TODO implement airIntFromFloat from {f} to {f}", .{ src_ty.fmt(pt), dst_ty.fmt(pt), }); @@ -182531,7 +182501,7 @@ fn atomicOp( else => null, }, else => unreachable, - }) orelse return self.fail("TODO implement atomicOp of {s} for {}", .{ + }) orelse return self.fail("TODO implement atomicOp of {s} for {f}", .{ @tagName(op), val_ty.fmt(pt), }); try self.genSetReg(sse_reg, val_ty, .{ .register = .rax }, .{}); @@ -183286,7 +183256,7 @@ fn airSplat(self: *CodeGen, inst: Air.Inst.Index) !void { else => unreachable, }, } - return self.fail("TODO implement airSplat for {}", .{vector_ty.fmt(pt)}); + return self.fail("TODO implement airSplat for {f}", .{vector_ty.fmt(pt)}); }; return self.finishAir(inst, result, .{ ty_op.operand, .none, .none }); } @@ -183322,12 +183292,12 @@ fn airSelect(self: *CodeGen, inst: Air.Inst.Index) !void { else try self.copyToTmpRegister(pred_ty, pred_mcv) else - return self.fail("TODO implement airSelect for {}", .{ty.fmt(pt)}), + return self.fail("TODO implement airSelect for {f}", .{ty.fmt(pt)}), else => unreachable, }, .register_mask => |pred_reg_mask| { if (pred_reg_mask.info.scalar.bitSize(self.target) != 8 * elem_abi_size) - return self.fail("TODO implement airSelect for {}", .{ty.fmt(pt)}); + return self.fail("TODO implement airSelect for {f}", .{ty.fmt(pt)}); const mask_reg: Register = if (need_xmm0 and pred_reg_mask.reg.id() != comptime Register.xmm0.id()) mask_reg: { try self.register_manager.getKnownReg(.xmm0, null); @@ -183401,7 +183371,7 @@ fn airSelect(self: *CodeGen, inst: Air.Inst.Index) !void { else null else - null) orelse return self.fail("TODO implement airSelect for {}", .{ty.fmt(pt)}); + null) orelse return self.fail("TODO implement airSelect for {f}", .{ty.fmt(pt)}); if (has_avx) { const rhs_alias = if (reuse_mcv.isRegister()) registerAlias(reuse_mcv.getReg().?, abi_size) @@ -183554,7 +183524,7 @@ fn airSelect(self: *CodeGen, inst: Air.Inst.Index) !void { else => unreachable, }), ); - } else return self.fail("TODO implement airSelect for {}", .{ty.fmt(pt)}); + } else return self.fail("TODO implement airSelect for {f}", .{ty.fmt(pt)}); const elem_bits: u16 = @intCast(elem_abi_size * 8); if (!pred_fits_in_elem) if (self.hasFeature(.ssse3)) { const mask_len = elem_abi_size * vec_len; @@ -183583,7 +183553,7 @@ fn airSelect(self: *CodeGen, inst: Air.Inst.Index) !void { mask_alias, mask_mem, ); - } else return self.fail("TODO implement airSelect for {}", .{ty.fmt(pt)}); + } else return self.fail("TODO implement airSelect for {f}", .{ty.fmt(pt)}); { const mask_elem_ty = try pt.intType(.unsigned, elem_bits); const mask_ty = try pt.vectorType(.{ .len = vec_len, .child = mask_elem_ty.toIntern() }); @@ -183706,7 +183676,7 @@ fn airSelect(self: *CodeGen, inst: Air.Inst.Index) !void { else => null, }, }, - }) orelse return self.fail("TODO implement airSelect for {}", .{ty.fmt(pt)}); + }) orelse return self.fail("TODO implement airSelect for {f}", .{ty.fmt(pt)}); if (has_avx) { const rhs_alias = if (rhs_mcv.isRegister()) registerAlias(rhs_mcv.getReg().?, abi_size) @@ -184551,7 +184521,7 @@ fn airShuffle(self: *CodeGen, inst: Air.Inst.Index) !void { } break :result null; - }) orelse return self.fail("TODO implement airShuffle from {} and {} to {} with {}", .{ + }) orelse return self.fail("TODO implement airShuffle from {f} and {f} to {f} with {f}", .{ lhs_ty.fmt(pt), rhs_ty.fmt(pt), dst_ty.fmt(pt), @@ -184800,7 +184770,7 @@ fn airMulAdd(self: *CodeGen, inst: Air.Inst.Index) !void { 32, 64 => !self.hasFeature(.fma), else => unreachable, }) { - if (ty.zigTypeTag(zcu) != .float) return self.fail("TODO implement airMulAdd for {}", .{ + if (ty.zigTypeTag(zcu) != .float) return self.fail("TODO implement airMulAdd for {f}", .{ ty.fmt(pt), }); @@ -184930,7 +184900,7 @@ fn airMulAdd(self: *CodeGen, inst: Air.Inst.Index) !void { else => unreachable, } else - unreachable) orelse return self.fail("TODO implement airMulAdd for {}", .{ty.fmt(pt)}); + unreachable) orelse return self.fail("TODO implement airMulAdd for {f}", .{ty.fmt(pt)}); var mops: [3]MCValue = undefined; for (order, mcvs) |mop_index, mcv| mops[mop_index - 1] = mcv; @@ -185130,7 +185100,7 @@ fn airVaArg(self: *CodeGen, inst: Air.Inst.Index) !void { assert(classes.len == 1); unreachable; }, - else => return self.fail("TODO implement c_va_arg for {} on SysV", .{promote_ty.fmt(pt)}), + else => return self.fail("TODO implement c_va_arg for {f} on SysV", .{promote_ty.fmt(pt)}), } if (unused) break :result .unreach; @@ -185779,7 +185749,7 @@ fn splitType(self: *CodeGen, comptime parts_len: usize, ty: Type) ![parts_len]Ty for (parts) |part| part_sizes += part.abiSize(zcu); if (part_sizes == ty.abiSize(zcu)) return parts; }; - return self.fail("TODO implement splitType({d}, {})", .{ parts_len, ty.fmt(pt) }); + return self.fail("TODO implement splitType({d}, {f})", .{ parts_len, ty.fmt(pt) }); } /// Truncates the value in the register in place. @@ -186153,7 +186123,7 @@ const Temp = struct { cg.next_temp_index = @enumFromInt(@intFromEnum(new_temp_index) + 1); const mcv = temp.tracking(cg).short; switch (mcv) { - else => std.debug.panic("{s}: {}\n", .{ @src().fn_name, mcv }), + else => std.debug.panic("{s}: {f}\n", .{ @src().fn_name, mcv }), .register => |reg| { const new_reg = try cg.register_manager.allocReg(new_temp_index.toIndex(), abi.RegisterClass.gp); new_temp_index.tracking(cg).* = .init(.{ .register = new_reg }); @@ -186227,7 +186197,7 @@ const Temp = struct { const new_temp_index = cg.next_temp_index; cg.temp_type[@intFromEnum(new_temp_index)] = limb_ty; switch (temp.tracking(cg).short) { - else => |mcv| std.debug.panic("{s}: {}\n", .{ @src().fn_name, mcv }), + else => |mcv| std.debug.panic("{s}: {f}\n", .{ @src().fn_name, mcv }), .immediate => |imm| { assert(limb_index == 0); new_temp_index.tracking(cg).* = .init(.{ .immediate = imm }); @@ -186568,7 +186538,7 @@ const Temp = struct { }, else => {}, } - std.debug.panic("{s}: {} {}\n", .{ @src().fn_name, temp_tracking, overflow_temp_tracking }); + std.debug.panic("{s}: {f} {f}\n", .{ @src().fn_name, temp_tracking, overflow_temp_tracking }); } fn asMask(temp: Temp, info: MaskInfo, cg: *CodeGen) void { @@ -186658,7 +186628,7 @@ const Temp = struct { while (try ptr.toLea(cg)) {} const val_mcv = val.tracking(cg).short; switch (val_mcv) { - else => |mcv| std.debug.panic("{s}: {}\n", .{ @src().fn_name, mcv }), + else => |mcv| std.debug.panic("{s}: {f}\n", .{ @src().fn_name, mcv }), .register => |val_reg| try ptr.loadReg(val_ty, registerAlias( val_reg, @intCast(val_ty.abiSize(cg.pt.zcu)), @@ -186698,7 +186668,7 @@ const Temp = struct { {}) { const val_mcv = val.tracking(cg).short; switch (val_mcv) { - else => |mcv| std.debug.panic("{s}: {}\n", .{ @src().fn_name, mcv }), + else => |mcv| std.debug.panic("{s}: {f}\n", .{ @src().fn_name, mcv }), .undef => if (opts.safe) { var pat = try cg.tempInit(.u8, .{ .immediate = 0xaa }); var len = try cg.tempInit(.usize, .{ .immediate = val_ty.abiSize(cg.pt.zcu) }); @@ -186772,7 +186742,7 @@ const Temp = struct { assert(!val_ty.optionalReprIsPayload(cg.pt.zcu)); break :first_ty opt_child; }, - else => std.debug.panic("{s}: {}\n", .{ @src().fn_name, val_ty.fmt(cg.pt) }), + else => std.debug.panic("{s}: {f}\n", .{ @src().fn_name, val_ty.fmt(cg.pt) }), }); const first_size: u31 = @intCast(first_ty.abiSize(cg.pt.zcu)); try ptr.storeRegs(first_ty, &.{registerAlias(val_reg_ov.reg, first_size)}, cg); @@ -186804,7 +186774,7 @@ const Temp = struct { fn readTo(src: *Temp, val_ty: Type, val_mcv: MCValue, opts: AccessOptions, cg: *CodeGen) InnerError!void { switch (val_mcv) { - else => |mcv| std.debug.panic("{s}: {}\n", .{ @src().fn_name, mcv }), + else => |mcv| std.debug.panic("{s}: {f}\n", .{ @src().fn_name, mcv }), .register => |val_reg| try src.readReg(opts.disp, val_ty, registerAlias( val_reg, @intCast(cg.unalignedSize(val_ty)), @@ -186844,7 +186814,7 @@ const Temp = struct { {}) { const val_mcv = val.tracking(cg).short; switch (val_mcv) { - else => |mcv| std.debug.panic("{s}: {}\n", .{ @src().fn_name, mcv }), + else => |mcv| std.debug.panic("{s}: {f}\n", .{ @src().fn_name, mcv }), .none => {}, .undef => if (opts.safe) { var dst_ptr = try cg.tempInit(.usize, dst.tracking(cg).short.address().offset(opts.disp)); @@ -186905,7 +186875,7 @@ const Temp = struct { assert(!val_ty.optionalReprIsPayload(cg.pt.zcu)); break :first_ty opt_child; }, - else => std.debug.panic("{s}: {}\n", .{ @src().fn_name, val_ty.fmt(cg.pt) }), + else => std.debug.panic("{s}: {f}\n", .{ @src().fn_name, val_ty.fmt(cg.pt) }), }); const first_size: u31 = @intCast(first_ty.abiSize(cg.pt.zcu)); try dst.writeReg(opts.disp, first_ty, registerAlias(val_reg_ov.reg, first_size), cg); @@ -186960,7 +186930,7 @@ const Temp = struct { assert(src_regs.len == std.math.divCeil(u16, int_info.bits, 64) catch unreachable); break :part_ty .u64; } else part_ty: switch (ip.indexToKey(src_ty.toIntern())) { - else => std.debug.panic("{s}: {}\n", .{ @src().fn_name, src_ty.fmt(cg.pt) }), + else => std.debug.panic("{s}: {f}\n", .{ @src().fn_name, src_ty.fmt(cg.pt) }), .ptr_type => |ptr_info| { assert(ptr_info.flags.size == .slice); assert(src_regs.len == 2); @@ -186971,7 +186941,7 @@ const Temp = struct { break :part_ty try cg.pt.intType(.unsigned, @as(u16, 8) * @min(src_abi_size, 8)); }, .opt_type => |opt_child| switch (ip.indexToKey(opt_child)) { - else => std.debug.panic("{s}: {}\n", .{ @src().fn_name, src_ty.fmt(cg.pt) }), + else => std.debug.panic("{s}: {f}\n", .{ @src().fn_name, src_ty.fmt(cg.pt) }), .ptr_type => |ptr_info| { assert(ptr_info.flags.size == .slice); assert(src_regs.len == 2); @@ -191677,12 +191647,12 @@ const Temp = struct { break :result result; }, }; - tracking_log.debug("{} => {} (birth)", .{ inst, result }); + tracking_log.debug("{f} => {f} (birth)", .{ inst, result }); cg.inst_tracking.putAssumeCapacityNoClobber(inst, .init(result)); }, .temp => |temp_index| { const temp_tracking = temp_index.tracking(cg); - tracking_log.debug("{} => {} (birth)", .{ inst, temp_tracking.short }); + tracking_log.debug("{f} => {f} (birth)", .{ inst, temp_tracking.short }); cg.inst_tracking.putAssumeCapacityNoClobber(inst, .init(temp_tracking.short)); assert(cg.reuseTemp(inst, temp_index.toIndex(), temp_tracking)); }, @@ -191757,7 +191727,7 @@ fn resetTemps(cg: *CodeGen, from_index: Temp.Index) InnerError!void { const temp: Temp.Index = @enumFromInt(temp_index); if (temp.isValid(cg)) { any_valid = true; - tracking_log.err("failed to kill {}: {}", .{ + tracking_log.err("failed to kill {f}: {f}", .{ temp.toIndex(), cg.temp_type[temp_index].fmt(cg.pt), }); diff --git a/src/arch/x86_64/Emit.zig b/src/arch/x86_64/Emit.zig index 7ba8d460de..da15dc6bfb 100644 --- a/src/arch/x86_64/Emit.zig +++ b/src/arch/x86_64/Emit.zig @@ -707,7 +707,14 @@ fn encodeInst(emit: *Emit, lowered_inst: Instruction, reloc_info: []const RelocI const comp = emit.bin_file.comp; const gpa = comp.gpa; const start_offset: u32 = @intCast(emit.code.items.len); - try lowered_inst.encode(emit.code.writer(gpa), .{}); + { + var aw: std.io.Writer.Allocating = .fromArrayList(gpa, emit.code); + defer emit.code.* = aw.toArrayList(); + lowered_inst.encode(&aw.writer, .{}) catch |err| switch (err) { + error.WriteFailed => return error.OutOfMemory, + else => |e| return e, + }; + } const end_offset: u32 = @intCast(emit.code.items.len); for (reloc_info) |reloc| switch (reloc.target.type) { .inst => { diff --git a/src/arch/x86_64/Encoding.zig b/src/arch/x86_64/Encoding.zig index 1b7d5ed3d4..9b6f6bac5c 100644 --- a/src/arch/x86_64/Encoding.zig +++ b/src/arch/x86_64/Encoding.zig @@ -158,15 +158,7 @@ pub fn modRmExt(encoding: Encoding) u3 { }; } -pub fn format( - encoding: Encoding, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = options; - _ = fmt; - +pub fn format(encoding: Encoding, writer: *std.io.Writer) std.io.Writer.Error!void { var opc = encoding.opcode(); if (encoding.data.mode.isVex()) { try writer.writeAll("VEX."); @@ -187,7 +179,7 @@ pub fn format( }, } - try writer.print(".{}", .{std.fmt.fmtSliceHexUpper(opc[0 .. opc.len - 1])}); + try writer.print(".{X}", .{opc[0 .. opc.len - 1]}); opc = opc[opc.len - 1 ..]; try writer.writeAll(".W"); @@ -1014,19 +1006,28 @@ pub const Feature = enum { }; fn estimateInstructionLength(prefix: Prefix, encoding: Encoding, ops: []const Operand) usize { - var inst = Instruction{ + var inst: Instruction = .{ .prefix = prefix, .encoding = encoding, .ops = @splat(.none), }; @memcpy(inst.ops[0..ops.len], ops); - var cwriter = std.io.countingWriter(std.io.null_writer); - inst.encode(cwriter.writer(), .{ + // By using a buffer with maximum length of encoded instruction, we can use + // the `end` field of the Writer for the count. + var buf: [16]u8 = undefined; + var trash: std.io.Writer.Discarding = .init(&buf); + inst.encode(&trash.writer, .{ .allow_frame_locs = true, .allow_symbols = true, - }) catch unreachable; // Not allowed to fail here unless OOM. - return @as(usize, @intCast(cwriter.bytes_written)); + }) catch { + // Since the function signature for encode() does not mention under what + // conditions it can fail, I have changed `unreachable` to `@panic` here. + // This is a TODO item since it indicates this function + // (`estimateInstructionLength`) has the wrong function signature. + @panic("unexpected failure to encode"); + }; + return trash.writer.end; } const mnemonic_to_encodings_map = init: { diff --git a/src/arch/x86_64/bits.zig b/src/arch/x86_64/bits.zig index 18e7a364cb..16361acb4d 100644 --- a/src/arch/x86_64/bits.zig +++ b/src/arch/x86_64/bits.zig @@ -727,23 +727,6 @@ pub const FrameIndex = enum(u32) { pub fn isNamed(fi: FrameIndex) bool { return @intFromEnum(fi) < named_count; } - - pub fn format( - fi: FrameIndex, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - try writer.writeAll("FrameIndex"); - if (fi.isNamed()) { - try writer.writeByte('.'); - try writer.writeAll(@tagName(fi)); - } else { - try writer.writeByte('('); - try std.fmt.formatType(@intFromEnum(fi), fmt, options, writer, 0); - try writer.writeByte(')'); - } - } }; pub const FrameAddr = struct { index: FrameIndex, off: i32 = 0 }; @@ -844,12 +827,7 @@ pub const Memory = struct { }; } - pub fn format( - s: Size, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { + pub fn format(s: Size, writer: *std.io.Writer) std.io.Writer.Error!void { if (s == .none) return; try writer.writeAll(@tagName(s)); switch (s) { @@ -914,12 +892,7 @@ pub const Immediate = union(enum) { return .{ .signed = x }; } - pub fn format( - imm: Immediate, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { + pub fn format(imm: Immediate, writer: *std.io.Writer) std.io.Writer.Error!void { switch (imm) { inline else => |int| try writer.print("{d}", .{int}), .nav => |nav_off| try writer.print("Nav({d}) + {d}", .{ @intFromEnum(nav_off.nav), nav_off.off }), diff --git a/src/arch/x86_64/encoder.zig b/src/arch/x86_64/encoder.zig index 8d07dce83a..43d23af5fc 100644 --- a/src/arch/x86_64/encoder.zig +++ b/src/arch/x86_64/encoder.zig @@ -3,6 +3,7 @@ const assert = std.debug.assert; const log = std.log.scoped(.x86_64_encoder); const math = std.math; const testing = std.testing; +const Writer = std.io.Writer; const bits = @import("bits.zig"); const Encoding = @import("Encoding.zig"); @@ -226,101 +227,81 @@ pub const Instruction = struct { }; } - fn format( - op: Operand, - comptime unused_format_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = op; - _ = unused_format_string; - _ = options; - _ = writer; - @compileError("do not format Operand directly; use fmt() instead"); - } - - const FormatContext = struct { + const Format = struct { op: Operand, enc_op: Encoding.Op, + + fn default(f: Format, w: *Writer) Writer.Error!void { + const op = f.op; + const enc_op = f.enc_op; + switch (op) { + .none => {}, + .reg => |reg| try w.writeAll(@tagName(reg)), + .mem => |mem| switch (mem) { + .rip => |rip| { + try w.print("{f} [rip", .{rip.ptr_size}); + if (rip.disp != 0) try w.print(" {c} 0x{x}", .{ + @as(u8, if (rip.disp < 0) '-' else '+'), + @abs(rip.disp), + }); + try w.writeByte(']'); + }, + .sib => |sib| { + try w.print("{f} ", .{sib.ptr_size}); + + if (mem.isSegmentRegister()) { + return w.print("{s}:0x{x}", .{ @tagName(sib.base.reg), sib.disp }); + } + + try w.writeByte('['); + + var any = true; + switch (sib.base) { + .none => any = false, + .reg => |reg| try w.print("{s}", .{@tagName(reg)}), + .frame => |frame_index| try w.print("{}", .{frame_index}), + .table => try w.print("Table", .{}), + .rip_inst => |inst_index| try w.print("RipInst({d})", .{inst_index}), + .nav => |nav| try w.print("Nav({d})", .{@intFromEnum(nav)}), + .uav => |uav| try w.print("Uav({d})", .{@intFromEnum(uav.val)}), + .lazy_sym => |lazy_sym| try w.print("LazySym({s}, {d})", .{ + @tagName(lazy_sym.kind), + @intFromEnum(lazy_sym.ty), + }), + .extern_func => |extern_func| try w.print("ExternFunc({d})", .{@intFromEnum(extern_func)}), + } + if (mem.scaleIndex()) |si| { + if (any) try w.writeAll(" + "); + try w.print("{s} * {d}", .{ @tagName(si.index), si.scale }); + any = true; + } + if (sib.disp != 0 or !any) { + if (any) + try w.print(" {c} ", .{@as(u8, if (sib.disp < 0) '-' else '+')}) + else if (sib.disp < 0) + try w.writeByte('-'); + try w.print("0x{x}", .{@abs(sib.disp)}); + any = true; + } + + try w.writeByte(']'); + }, + .moffs => |moffs| try w.print("{s}:0x{x}", .{ + @tagName(moffs.seg), + moffs.offset, + }), + }, + .imm => |imm| if (enc_op.isSigned()) { + const imms = imm.asSigned(enc_op.immBitSize()); + if (imms < 0) try w.writeByte('-'); + try w.print("0x{x}", .{@abs(imms)}); + } else try w.print("0x{x}", .{imm.asUnsigned(enc_op.immBitSize())}), + .bytes => unreachable, + } + } }; - fn fmtContext( - ctx: FormatContext, - comptime unused_format_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - _ = unused_format_string; - _ = options; - const op = ctx.op; - const enc_op = ctx.enc_op; - switch (op) { - .none => {}, - .reg => |reg| try writer.writeAll(@tagName(reg)), - .mem => |mem| switch (mem) { - .rip => |rip| { - try writer.print("{} [rip", .{rip.ptr_size}); - if (rip.disp != 0) try writer.print(" {c} 0x{x}", .{ - @as(u8, if (rip.disp < 0) '-' else '+'), - @abs(rip.disp), - }); - try writer.writeByte(']'); - }, - .sib => |sib| { - try writer.print("{} ", .{sib.ptr_size}); - - if (mem.isSegmentRegister()) { - return writer.print("{s}:0x{x}", .{ @tagName(sib.base.reg), sib.disp }); - } - - try writer.writeByte('['); - - var any = true; - switch (sib.base) { - .none => any = false, - .reg => |reg| try writer.print("{s}", .{@tagName(reg)}), - .frame => |frame_index| try writer.print("{}", .{frame_index}), - .table => try writer.print("Table", .{}), - .rip_inst => |inst_index| try writer.print("RipInst({d})", .{inst_index}), - .nav => |nav| try writer.print("Nav({d})", .{@intFromEnum(nav)}), - .uav => |uav| try writer.print("Uav({d})", .{@intFromEnum(uav.val)}), - .lazy_sym => |lazy_sym| try writer.print("LazySym({s}, {d})", .{ - @tagName(lazy_sym.kind), - @intFromEnum(lazy_sym.ty), - }), - .extern_func => |extern_func| try writer.print("ExternFunc({d})", .{@intFromEnum(extern_func)}), - } - if (mem.scaleIndex()) |si| { - if (any) try writer.writeAll(" + "); - try writer.print("{s} * {d}", .{ @tagName(si.index), si.scale }); - any = true; - } - if (sib.disp != 0 or !any) { - if (any) - try writer.print(" {c} ", .{@as(u8, if (sib.disp < 0) '-' else '+')}) - else if (sib.disp < 0) - try writer.writeByte('-'); - try writer.print("0x{x}", .{@abs(sib.disp)}); - any = true; - } - - try writer.writeByte(']'); - }, - .moffs => |moffs| try writer.print("{s}:0x{x}", .{ - @tagName(moffs.seg), - moffs.offset, - }), - }, - .imm => |imm| if (enc_op.isSigned()) { - const imms = imm.asSigned(enc_op.immBitSize()); - if (imms < 0) try writer.writeByte('-'); - try writer.print("0x{x}", .{@abs(imms)}); - } else try writer.print("0x{x}", .{imm.asUnsigned(enc_op.immBitSize())}), - .bytes => unreachable, - } - } - - pub fn fmt(op: Operand, enc_op: Encoding.Op) std.fmt.Formatter(fmtContext) { + pub fn fmt(op: Operand, enc_op: Encoding.Op) std.fmt.Formatter(Format, Format.default) { return .{ .data = .{ .op = op, .enc_op = enc_op } }; } }; @@ -361,7 +342,7 @@ pub const Instruction = struct { }, }, }; - log.debug("selected encoding: {}", .{encoding}); + log.debug("selected encoding: {f}", .{encoding}); var inst: Instruction = .{ .prefix = prefix, @@ -372,30 +353,22 @@ pub const Instruction = struct { return inst; } - pub fn format( - inst: Instruction, - comptime unused_format_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - _ = unused_format_string; - _ = options; + pub fn format(inst: Instruction, w: *Writer) Writer.Error!void { switch (inst.prefix) { .none, .directive => {}, - else => try writer.print("{s} ", .{@tagName(inst.prefix)}), + else => try w.print("{s} ", .{@tagName(inst.prefix)}), } - try writer.print("{s}", .{@tagName(inst.encoding.mnemonic)}); + try w.print("{s}", .{@tagName(inst.encoding.mnemonic)}); for (inst.ops, inst.encoding.data.ops, 0..) |op, enc, i| { if (op == .none) break; - if (i > 0) try writer.writeByte(','); - try writer.writeByte(' '); - try writer.print("{}", .{op.fmt(enc)}); + if (i > 0) try w.writeByte(','); + try w.print(" {f}", .{op.fmt(enc)}); } } - pub fn encode(inst: Instruction, writer: anytype, comptime opts: Options) !void { + pub fn encode(inst: Instruction, w: *Writer, comptime opts: Options) !void { assert(inst.prefix != .directive); - const encoder = Encoder(@TypeOf(writer), opts){ .writer = writer }; + const encoder: Encoder(opts) = .{ .w = w }; const enc = inst.encoding; const data = enc.data; @@ -801,9 +774,9 @@ pub const LegacyPrefixes = packed struct { pub const Options = struct { allow_frame_locs: bool = false, allow_symbols: bool = false }; -fn Encoder(comptime T: type, comptime opts: Options) type { +fn Encoder(comptime opts: Options) type { return struct { - writer: T, + w: *Writer, const Self = @This(); pub const options = opts; @@ -818,31 +791,31 @@ fn Encoder(comptime T: type, comptime opts: Options) type { // Hopefully this path isn't taken very often, so we'll do it the slow way for now // LOCK - if (prefixes.prefix_f0) try self.writer.writeByte(0xf0); + if (prefixes.prefix_f0) try self.w.writeByte(0xf0); // REPNZ, REPNE, REP, Scalar Double-precision - if (prefixes.prefix_f2) try self.writer.writeByte(0xf2); + if (prefixes.prefix_f2) try self.w.writeByte(0xf2); // REPZ, REPE, REP, Scalar Single-precision - if (prefixes.prefix_f3) try self.writer.writeByte(0xf3); + if (prefixes.prefix_f3) try self.w.writeByte(0xf3); // CS segment override or Branch not taken - if (prefixes.prefix_2e) try self.writer.writeByte(0x2e); + if (prefixes.prefix_2e) try self.w.writeByte(0x2e); // DS segment override - if (prefixes.prefix_36) try self.writer.writeByte(0x36); + if (prefixes.prefix_36) try self.w.writeByte(0x36); // ES segment override - if (prefixes.prefix_26) try self.writer.writeByte(0x26); + if (prefixes.prefix_26) try self.w.writeByte(0x26); // FS segment override - if (prefixes.prefix_64) try self.writer.writeByte(0x64); + if (prefixes.prefix_64) try self.w.writeByte(0x64); // GS segment override - if (prefixes.prefix_65) try self.writer.writeByte(0x65); + if (prefixes.prefix_65) try self.w.writeByte(0x65); // Branch taken - if (prefixes.prefix_3e) try self.writer.writeByte(0x3e); + if (prefixes.prefix_3e) try self.w.writeByte(0x3e); // Operand size override - if (prefixes.prefix_66) try self.writer.writeByte(0x66); + if (prefixes.prefix_66) try self.w.writeByte(0x66); // Address size override - if (prefixes.prefix_67) try self.writer.writeByte(0x67); + if (prefixes.prefix_67) try self.w.writeByte(0x67); } } @@ -850,7 +823,7 @@ fn Encoder(comptime T: type, comptime opts: Options) type { /// /// Note that this flag is overridden by REX.W, if both are present. pub fn prefix16BitMode(self: Self) !void { - try self.writer.writeByte(0x66); + try self.w.writeByte(0x66); } /// Encodes a REX prefix byte given all the fields @@ -869,7 +842,7 @@ fn Encoder(comptime T: type, comptime opts: Options) type { if (fields.x) byte |= 0b0010; if (fields.b) byte |= 0b0001; - try self.writer.writeByte(byte); + try self.w.writeByte(byte); } /// Encodes a VEX prefix given all the fields @@ -877,24 +850,24 @@ fn Encoder(comptime T: type, comptime opts: Options) type { /// See struct `Vex` for a description of each field. pub fn vex(self: Self, fields: Vex) !void { if (fields.is3Byte()) { - try self.writer.writeByte(0b1100_0100); + try self.w.writeByte(0b1100_0100); - try self.writer.writeByte( + try self.w.writeByte( @as(u8, ~@intFromBool(fields.r)) << 7 | @as(u8, ~@intFromBool(fields.x)) << 6 | @as(u8, ~@intFromBool(fields.b)) << 5 | @as(u8, @intFromEnum(fields.m)) << 0, ); - try self.writer.writeByte( + try self.w.writeByte( @as(u8, @intFromBool(fields.w)) << 7 | @as(u8, ~@as(u4, @intCast(fields.v.enc()))) << 3 | @as(u8, @intFromBool(fields.l)) << 2 | @as(u8, @intFromEnum(fields.p)) << 0, ); } else { - try self.writer.writeByte(0b1100_0101); - try self.writer.writeByte( + try self.w.writeByte(0b1100_0101); + try self.w.writeByte( @as(u8, ~@intFromBool(fields.r)) << 7 | @as(u8, ~@as(u4, @intCast(fields.v.enc()))) << 3 | @as(u8, @intFromBool(fields.l)) << 2 | @@ -909,7 +882,7 @@ fn Encoder(comptime T: type, comptime opts: Options) type { /// Encodes a 1 byte opcode pub fn opcode_1byte(self: Self, opcode: u8) !void { - try self.writer.writeByte(opcode); + try self.w.writeByte(opcode); } /// Encodes a 2 byte opcode @@ -918,7 +891,7 @@ fn Encoder(comptime T: type, comptime opts: Options) type { /// /// encoder.opcode_2byte(0x0f, 0xaf); pub fn opcode_2byte(self: Self, prefix: u8, opcode: u8) !void { - try self.writer.writeAll(&.{ prefix, opcode }); + try self.w.writeAll(&.{ prefix, opcode }); } /// Encodes a 3 byte opcode @@ -927,7 +900,7 @@ fn Encoder(comptime T: type, comptime opts: Options) type { /// /// encoder.opcode_3byte(0xf2, 0x0f, 0x10); pub fn opcode_3byte(self: Self, prefix_1: u8, prefix_2: u8, opcode: u8) !void { - try self.writer.writeAll(&.{ prefix_1, prefix_2, opcode }); + try self.w.writeAll(&.{ prefix_1, prefix_2, opcode }); } /// Encodes a 1 byte opcode with a reg field @@ -935,7 +908,7 @@ fn Encoder(comptime T: type, comptime opts: Options) type { /// Remember to add a REX prefix byte if reg is extended! pub fn opcode_withReg(self: Self, opcode: u8, reg: u3) !void { assert(opcode & 0b111 == 0); - try self.writer.writeByte(opcode | reg); + try self.w.writeByte(opcode | reg); } // ------ @@ -946,7 +919,7 @@ fn Encoder(comptime T: type, comptime opts: Options) type { /// /// Remember to add a REX prefix byte if reg or rm are extended! pub fn modRm(self: Self, mod: u2, reg_or_opx: u3, rm: u3) !void { - try self.writer.writeByte(@as(u8, mod) << 6 | @as(u8, reg_or_opx) << 3 | rm); + try self.w.writeByte(@as(u8, mod) << 6 | @as(u8, reg_or_opx) << 3 | rm); } /// Construct a ModR/M byte using direct r/m addressing @@ -1032,7 +1005,7 @@ fn Encoder(comptime T: type, comptime opts: Options) type { /// /// Remember to add a REX prefix byte if index or base are extended! pub fn sib(self: Self, scale: u2, index: u3, base: u3) !void { - try self.writer.writeByte(@as(u8, scale) << 6 | @as(u8, index) << 3 | base); + try self.w.writeByte(@as(u8, scale) << 6 | @as(u8, index) << 3 | base); } /// Construct a SIB byte with scale * index + base, no frills. @@ -1124,42 +1097,42 @@ fn Encoder(comptime T: type, comptime opts: Options) type { /// /// It is sign-extended to 64 bits by the cpu. pub fn disp8(self: Self, disp: i8) !void { - try self.writer.writeByte(@as(u8, @bitCast(disp))); + try self.w.writeByte(@as(u8, @bitCast(disp))); } /// Encode an 32 bit displacement /// /// It is sign-extended to 64 bits by the cpu. pub fn disp32(self: Self, disp: i32) !void { - try self.writer.writeInt(i32, disp, .little); + try self.w.writeInt(i32, disp, .little); } /// Encode an 8 bit immediate /// /// It is sign-extended to 64 bits by the cpu. pub fn imm8(self: Self, imm: u8) !void { - try self.writer.writeByte(imm); + try self.w.writeByte(imm); } /// Encode an 16 bit immediate /// /// It is sign-extended to 64 bits by the cpu. pub fn imm16(self: Self, imm: u16) !void { - try self.writer.writeInt(u16, imm, .little); + try self.w.writeInt(u16, imm, .little); } /// Encode an 32 bit immediate /// /// It is sign-extended to 64 bits by the cpu. pub fn imm32(self: Self, imm: u32) !void { - try self.writer.writeInt(u32, imm, .little); + try self.w.writeInt(u32, imm, .little); } /// Encode an 64 bit immediate /// /// It is sign-extended to 64 bits by the cpu. pub fn imm64(self: Self, imm: u64) !void { - try self.writer.writeInt(u64, imm, .little); + try self.w.writeInt(u64, imm, .little); } }; } @@ -1205,9 +1178,9 @@ pub const Vex = struct { fn expectEqualHexStrings(expected: []const u8, given: []const u8, assembly: []const u8) !void { assert(expected.len > 0); if (std.mem.eql(u8, expected, given)) return; - const expected_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{std.fmt.fmtSliceHexLower(expected)}); + const expected_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{expected}); defer testing.allocator.free(expected_fmt); - const given_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{std.fmt.fmtSliceHexLower(given)}); + const given_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{given}); defer testing.allocator.free(given_fmt); const idx = std.mem.indexOfDiff(u8, expected_fmt, given_fmt).?; const padding = try testing.allocator.alloc(u8, idx + 5); @@ -2217,10 +2190,10 @@ const Assembler = struct { }; } - pub fn assemble(as: *Assembler, writer: anytype) !void { + pub fn assemble(as: *Assembler, w: *Writer) !void { while (try as.next()) |parsed_inst| { const inst: Instruction = try .new(.none, parsed_inst.mnemonic, &parsed_inst.ops); - try inst.encode(writer, .{}); + try inst.encode(w, .{}); } } diff --git a/src/codegen.zig b/src/codegen.zig index 8569a491b6..9bddc51963 100644 --- a/src/codegen.zig +++ b/src/codegen.zig @@ -237,7 +237,7 @@ pub fn generateLazySymbol( const target = &comp.root_mod.resolved_target.result; const endian = target.cpu.arch.endian(); - log.debug("generateLazySymbol: kind = {s}, ty = {}", .{ + log.debug("generateLazySymbol: kind = {s}, ty = {f}", .{ @tagName(lazy_sym.kind), Type.fromInterned(lazy_sym.ty).fmt(pt), }); @@ -277,7 +277,7 @@ pub fn generateLazySymbol( code.appendAssumeCapacity(0); } } else { - return zcu.codegenFailType(lazy_sym.ty, "TODO implement generateLazySymbol for {s} {}", .{ + return zcu.codegenFailType(lazy_sym.ty, "TODO implement generateLazySymbol for {s} {f}", .{ @tagName(lazy_sym.kind), Type.fromInterned(lazy_sym.ty).fmt(pt), }); } @@ -310,7 +310,7 @@ pub fn generateSymbol( const target = zcu.getTarget(); const endian = target.cpu.arch.endian(); - log.debug("generateSymbol: val = {}", .{val.fmtValue(pt)}); + log.debug("generateSymbol: val = {f}", .{val.fmtValue(pt)}); if (val.isUndefDeep(zcu)) { const abi_size = math.cast(usize, ty.abiSize(zcu)) orelse return error.Overflow; @@ -767,7 +767,7 @@ fn lowerUavRef( const uav_ty = Type.fromInterned(ip.typeOf(uav_val)); const is_fn_body = uav_ty.zigTypeTag(zcu) == .@"fn"; - log.debug("lowerUavRef: ty = {}", .{uav_ty.fmt(pt)}); + log.debug("lowerUavRef: ty = {f}", .{uav_ty.fmt(pt)}); try code.ensureUnusedCapacity(gpa, ptr_width_bytes); if (!is_fn_body and !uav_ty.hasRuntimeBits(zcu)) { @@ -913,7 +913,7 @@ pub fn genNavRef( const zcu = pt.zcu; const ip = &zcu.intern_pool; const nav = ip.getNav(nav_index); - log.debug("genNavRef({})", .{nav.fqn.fmt(ip)}); + log.debug("genNavRef({f})", .{nav.fqn.fmt(ip)}); const lib_name, const linkage, const is_threadlocal = if (nav.getExtern(ip)) |e| .{ e.lib_name, e.linkage, e.is_threadlocal and zcu.comp.config.any_non_single_threaded } @@ -1065,7 +1065,7 @@ pub fn lowerValue(pt: Zcu.PerThread, val: Value, target: *const std.Target) Allo const ip = &zcu.intern_pool; const ty = val.typeOf(zcu); - log.debug("lowerValue(@as({}, {}))", .{ ty.fmt(pt), val.fmtValue(pt) }); + log.debug("lowerValue(@as({f}, {f}))", .{ ty.fmt(pt), val.fmtValue(pt) }); if (val.isUndef(zcu)) return .undef; diff --git a/src/codegen/c.zig b/src/codegen/c.zig index 7ee33577d8..61bd5259ae 100644 --- a/src/codegen/c.zig +++ b/src/codegen/c.zig @@ -4,6 +4,7 @@ const assert = std.debug.assert; const mem = std.mem; const log = std.log.scoped(.c); const Allocator = mem.Allocator; +const Writer = std.io.Writer; const dev = @import("../dev.zig"); const link = @import("../link.zig"); @@ -55,6 +56,7 @@ pub const Mir = struct { /// less than the natural alignment. uavs: std.AutoArrayHashMapUnmanaged(InternPool.Index, Alignment), // These remaining fields are essentially just an owned version of `link.C.AvBlock`. + code_header: []u8, code: []u8, fwd_decl: []u8, ctype_pool: CType.Pool, @@ -62,6 +64,7 @@ pub const Mir = struct { pub fn deinit(mir: *Mir, gpa: Allocator) void { mir.uavs.deinit(gpa); + gpa.free(mir.code_header); gpa.free(mir.code); gpa.free(mir.fwd_decl); mir.ctype_pool.deinit(gpa); @@ -69,6 +72,8 @@ pub const Mir = struct { } }; +pub const Error = Writer.Error || std.mem.Allocator.Error || error{AnalysisFail}; + pub const CType = @import("c/Type.zig"); pub const CValue = union(enum) { @@ -340,53 +345,61 @@ fn isReservedIdent(ident: []const u8) bool { } else return reserved_idents.has(ident); } -fn formatIdent( - ident: []const u8, - comptime fmt_str: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, -) @TypeOf(writer).Error!void { - const solo = fmt_str.len != 0 and fmt_str[0] == ' '; // space means solo; not part of a bigger ident. +fn formatIdentSolo(ident: []const u8, w: *std.io.Writer) std.io.Writer.Error!void { + return formatIdentOptions(ident, w, true); +} + +fn formatIdentUnsolo(ident: []const u8, w: *std.io.Writer) std.io.Writer.Error!void { + return formatIdentOptions(ident, w, false); +} + +fn formatIdentOptions(ident: []const u8, w: *std.io.Writer, solo: bool) std.io.Writer.Error!void { if (solo and isReservedIdent(ident)) { - try writer.writeAll("zig_e_"); + try w.writeAll("zig_e_"); } for (ident, 0..) |c, i| { switch (c) { - 'a'...'z', 'A'...'Z', '_' => try writer.writeByte(c), - '.' => try writer.writeByte('_'), + 'a'...'z', 'A'...'Z', '_' => try w.writeByte(c), + '.' => try w.writeByte('_'), '0'...'9' => if (i == 0) { - try writer.print("_{x:2}", .{c}); + try w.print("_{x:2}", .{c}); } else { - try writer.writeByte(c); + try w.writeByte(c); }, - else => try writer.print("_{x:2}", .{c}), + else => try w.print("_{x:2}", .{c}), } } } -pub fn fmtIdent(ident: []const u8) std.fmt.Formatter(formatIdent) { + +pub fn fmtIdentSolo(ident: []const u8) std.fmt.Formatter([]const u8, formatIdentSolo) { + return .{ .data = ident }; +} + +pub fn fmtIdentUnsolo(ident: []const u8) std.fmt.Formatter([]const u8, formatIdentUnsolo) { return .{ .data = ident }; } const CTypePoolStringFormatData = struct { ctype_pool_string: CType.Pool.String, ctype_pool: *const CType.Pool, + solo: bool, }; -fn formatCTypePoolString( - data: CTypePoolStringFormatData, - comptime fmt_str: []const u8, - fmt_opts: std.fmt.FormatOptions, - writer: anytype, -) @TypeOf(writer).Error!void { +fn formatCTypePoolString(data: CTypePoolStringFormatData, w: *std.io.Writer) std.io.Writer.Error!void { if (data.ctype_pool_string.toSlice(data.ctype_pool)) |slice| - try formatIdent(slice, fmt_str, fmt_opts, writer) + try formatIdentOptions(slice, w, data.solo) else - try writer.print("{}", .{data.ctype_pool_string.fmt(data.ctype_pool)}); + try w.print("{f}", .{data.ctype_pool_string.fmt(data.ctype_pool)}); } pub fn fmtCTypePoolString( ctype_pool_string: CType.Pool.String, ctype_pool: *const CType.Pool, -) std.fmt.Formatter(formatCTypePoolString) { - return .{ .data = .{ .ctype_pool_string = ctype_pool_string, .ctype_pool = ctype_pool } }; + solo: bool, +) std.fmt.Formatter(CTypePoolStringFormatData, formatCTypePoolString) { + return .{ .data = .{ + .ctype_pool_string = ctype_pool_string, + .ctype_pool = ctype_pool, + .solo = solo, + } }; } // Returns true if `formatIdent` would make any edits to ident. @@ -440,18 +453,18 @@ pub const Function = struct { const ty = f.typeOf(ref); const result: CValue = if (lowersToArray(ty, pt)) result: { - const writer = f.object.codeHeaderWriter(); + const ch = &f.object.code_header.writer; const decl_c_value = try f.allocLocalValue(.{ .ctype = try f.ctypeFromType(ty, .complete), .alignas = CType.AlignAs.fromAbiAlignment(ty.abiAlignment(pt.zcu)), }); const gpa = f.object.dg.gpa; try f.allocs.put(gpa, decl_c_value.new_local, false); - try writer.writeAll("static "); - try f.object.dg.renderTypeAndName(writer, ty, decl_c_value, Const, .none, .complete); - try writer.writeAll(" = "); - try f.object.dg.renderValue(writer, val, .StaticInitializer); - try writer.writeAll(";\n "); + try ch.writeAll("static "); + try f.object.dg.renderTypeAndName(ch, ty, decl_c_value, Const, .none, .complete); + try ch.writeAll(" = "); + try f.object.dg.renderValue(ch, val, .StaticInitializer); + try ch.writeAll(";\n "); break :result .{ .local = decl_c_value.new_local }; } else .{ .constant = val }; @@ -504,7 +517,7 @@ pub const Function = struct { return result; } - fn writeCValue(f: *Function, w: anytype, c_value: CValue, location: ValueRenderLocation) !void { + fn writeCValue(f: *Function, w: *Writer, c_value: CValue, location: ValueRenderLocation) !void { switch (c_value) { .none => unreachable, .new_local, .local => |i| try w.print("t{d}", .{i}), @@ -517,7 +530,7 @@ pub const Function = struct { } } - fn writeCValueDeref(f: *Function, w: anytype, c_value: CValue) !void { + fn writeCValueDeref(f: *Function, w: *Writer, c_value: CValue) !void { switch (c_value) { .none => unreachable, .new_local, .local, .constant => { @@ -538,41 +551,41 @@ pub const Function = struct { fn writeCValueMember( f: *Function, - writer: anytype, + w: *Writer, c_value: CValue, member: CValue, - ) error{ OutOfMemory, AnalysisFail }!void { + ) Error!void { switch (c_value) { .new_local, .local, .local_ref, .constant, .arg, .arg_array => { - try f.writeCValue(writer, c_value, .Other); - try writer.writeByte('.'); - try f.writeCValue(writer, member, .Other); + try f.writeCValue(w, c_value, .Other); + try w.writeByte('.'); + try f.writeCValue(w, member, .Other); }, - else => return f.object.dg.writeCValueMember(writer, c_value, member), + else => return f.object.dg.writeCValueMember(w, c_value, member), } } - fn writeCValueDerefMember(f: *Function, writer: anytype, c_value: CValue, member: CValue) !void { + fn writeCValueDerefMember(f: *Function, w: *Writer, c_value: CValue, member: CValue) !void { switch (c_value) { .new_local, .local, .arg, .arg_array => { - try f.writeCValue(writer, c_value, .Other); - try writer.writeAll("->"); + try f.writeCValue(w, c_value, .Other); + try w.writeAll("->"); }, .constant => { - try writer.writeByte('('); - try f.writeCValue(writer, c_value, .Other); - try writer.writeAll(")->"); + try w.writeByte('('); + try f.writeCValue(w, c_value, .Other); + try w.writeAll(")->"); }, .local_ref => { - try f.writeCValueDeref(writer, c_value); - try writer.writeByte('.'); + try f.writeCValueDeref(w, c_value); + try w.writeByte('.'); }, - else => return f.object.dg.writeCValueDerefMember(writer, c_value, member), + else => return f.object.dg.writeCValueDerefMember(w, c_value, member), } - try f.writeCValue(writer, member, .Other); + try f.writeCValue(w, member, .Other); } - fn fail(f: *Function, comptime format: []const u8, args: anytype) error{ AnalysisFail, OutOfMemory } { + fn fail(f: *Function, comptime format: []const u8, args: anytype) Error { return f.object.dg.fail(format, args); } @@ -584,20 +597,24 @@ pub const Function = struct { return f.object.dg.byteSize(ctype); } - fn renderType(f: *Function, w: anytype, ctype: Type) !void { + fn renderType(f: *Function, w: *Writer, ctype: Type) !void { return f.object.dg.renderType(w, ctype); } - fn renderCType(f: *Function, w: anytype, ctype: CType) !void { + fn renderCType(f: *Function, w: *Writer, ctype: CType) !void { return f.object.dg.renderCType(w, ctype); } - fn renderIntCast(f: *Function, w: anytype, dest_ty: Type, src: CValue, v: Vectorize, src_ty: Type, location: ValueRenderLocation) !void { + fn renderIntCast(f: *Function, w: *Writer, dest_ty: Type, src: CValue, v: Vectorize, src_ty: Type, location: ValueRenderLocation) !void { return f.object.dg.renderIntCast(w, dest_ty, .{ .c_value = .{ .f = f, .value = src, .v = v } }, src_ty, location); } - fn fmtIntLiteral(f: *Function, val: Value) !std.fmt.Formatter(formatIntLiteral) { - return f.object.dg.fmtIntLiteral(val, .Other); + fn fmtIntLiteralDec(f: *Function, val: Value) !std.fmt.Formatter(FormatIntLiteralContext, formatIntLiteral) { + return f.object.dg.fmtIntLiteralDec(val, .Other); + } + + fn fmtIntLiteralHex(f: *Function, val: Value) !std.fmt.Formatter(FormatIntLiteralContext, formatIntLiteral) { + return f.object.dg.fmtIntLiteralHex(val, .Other); } fn getLazyFnName(f: *Function, key: LazyFnKey) ![]const u8 { @@ -614,16 +631,16 @@ pub const Function = struct { gop.value_ptr.* = .{ .fn_name = switch (key) { .tag_name, - => |enum_ty| try ctype_pool.fmt(gpa, "zig_{s}_{}__{d}", .{ + => |enum_ty| try ctype_pool.fmt(gpa, "zig_{s}_{f}__{d}", .{ @tagName(key), - fmtIdent(ip.loadEnumType(enum_ty).name.toSlice(ip)), + fmtIdentUnsolo(ip.loadEnumType(enum_ty).name.toSlice(ip)), @intFromEnum(enum_ty), }), .never_tail, .never_inline, - => |owner_nav| try ctype_pool.fmt(gpa, "zig_{s}_{}__{d}", .{ + => |owner_nav| try ctype_pool.fmt(gpa, "zig_{s}_{f}__{d}", .{ @tagName(key), - fmtIdent(ip.getNav(owner_nav).name.toSlice(ip)), + fmtIdentUnsolo(ip.getNav(owner_nav).name.toSlice(ip)), @intFromEnum(owner_nav), }), }, @@ -659,12 +676,12 @@ pub const Function = struct { }, else => {}, } - const writer = f.object.writer(); - const a = try Assignment.start(f, writer, ctype); - try f.writeCValue(writer, dst, .Other); - try a.assign(f, writer); - try f.writeCValue(writer, src, .Other); - try a.end(f, writer); + const w = &f.object.code.writer; + const a = try Assignment.start(f, w, ctype); + try f.writeCValue(w, dst, .Other); + try a.assign(f, w); + try f.writeCValue(w, src, .Other); + try a.end(f, w); } fn moveCValue(f: *Function, inst: Air.Inst.Index, ty: Type, src: CValue) !CValue { @@ -693,18 +710,32 @@ pub const Function = struct { /// It is not available when generating .h file. pub const Object = struct { dg: DeclGen, - /// This is a borrowed reference from `link.C`. - code: std.ArrayList(u8), - /// Goes before code. Initialized and deinitialized in `genFunc`. - code_header: std.ArrayList(u8) = undefined, - indent_writer: IndentWriter(std.ArrayList(u8).Writer), + code_header: std.io.Writer.Allocating, + code: std.io.Writer.Allocating, + indent_counter: usize, - fn writer(o: *Object) IndentWriter(std.ArrayList(u8).Writer).Writer { - return o.indent_writer.writer(); + const indent_width = 1; + const indent_char = ' '; + + fn newline(o: *Object) !void { + const w = &o.code.writer; + try w.writeByte('\n'); + try w.splatByteAll(indent_char, o.indent_counter); } - - fn codeHeaderWriter(o: *Object) ArrayListWriter { - return arrayListWriter(&o.code_header); + fn indent(o: *Object) void { + o.indent_counter += indent_width; + } + fn outdent(o: *Object) !void { + o.indent_counter -= indent_width; + const written = o.code.getWritten(); + switch (written[written.len - 1]) { + indent_char => o.code.shrinkRetainingCapacity(written.len - indent_width), + '\n' => try o.code.writer.splatByteAll(indent_char, o.indent_counter), + else => { + std.debug.print("\"{f}\"\n", .{std.zig.fmtString(written[written.len -| 100..])}); + unreachable; + }, + } } }; @@ -716,8 +747,7 @@ pub const DeclGen = struct { pass: Pass, is_naked_fn: bool, expected_block: ?u32, - /// This is a borrowed reference from `link.C`. - fwd_decl: std.ArrayList(u8), + fwd_decl: std.io.Writer.Allocating, error_msg: ?*Zcu.ErrorMsg, ctype_pool: CType.Pool, scratch: std.ArrayListUnmanaged(u32), @@ -734,11 +764,7 @@ pub const DeclGen = struct { flush, }; - fn fwdDeclWriter(dg: *DeclGen) ArrayListWriter { - return arrayListWriter(&dg.fwd_decl); - } - - fn fail(dg: *DeclGen, comptime format: []const u8, args: anytype) error{ AnalysisFail, OutOfMemory } { + fn fail(dg: *DeclGen, comptime format: []const u8, args: anytype) Error { @branchHint(.cold); const zcu = dg.pt.zcu; const src_loc = zcu.navSrcLoc(dg.pass.nav); @@ -748,10 +774,10 @@ pub const DeclGen = struct { fn renderUav( dg: *DeclGen, - writer: anytype, + w: *Writer, uav: InternPool.Key.Ptr.BaseAddr.Uav, location: ValueRenderLocation, - ) error{ OutOfMemory, AnalysisFail }!void { + ) Error!void { const pt = dg.pt; const zcu = pt.zcu; const ip = &zcu.intern_pool; @@ -762,14 +788,14 @@ pub const DeclGen = struct { // Render an undefined pointer if we have a pointer to a zero-bit or comptime type. const ptr_ty: Type = .fromInterned(uav.orig_ty); if (ptr_ty.isPtrAtRuntime(zcu) and !uav_ty.isFnOrHasRuntimeBits(zcu)) { - return dg.writeCValue(writer, .{ .undef = ptr_ty }); + return dg.writeCValue(w, .{ .undef = ptr_ty }); } // Chase function values in order to be able to reference the original function. switch (ip.indexToKey(uav.val)) { .variable => unreachable, - .func => |func| return dg.renderNav(writer, func.owner_nav, location), - .@"extern" => |@"extern"| return dg.renderNav(writer, @"extern".owner_nav, location), + .func => |func| return dg.renderNav(w, func.owner_nav, location), + .@"extern" => |@"extern"| return dg.renderNav(w, @"extern".owner_nav, location), else => {}, } @@ -783,13 +809,13 @@ pub const DeclGen = struct { const need_cast = !elem_ctype.eql(uav_ctype) and (elem_ctype.info(ctype_pool) != .function or uav_ctype.info(ctype_pool) != .function); if (need_cast) { - try writer.writeAll("(("); - try dg.renderCType(writer, ptr_ctype); - try writer.writeByte(')'); + try w.writeAll("(("); + try dg.renderCType(w, ptr_ctype); + try w.writeByte(')'); } - try writer.writeByte('&'); - try renderUavName(writer, uav_val); - if (need_cast) try writer.writeByte(')'); + try w.writeByte('&'); + try renderUavName(w, uav_val); + if (need_cast) try w.writeByte(')'); // Indicate that the anon decl should be rendered to the output so that // our reference above is not undefined. @@ -810,10 +836,10 @@ pub const DeclGen = struct { fn renderNav( dg: *DeclGen, - writer: anytype, + w: *Writer, nav_index: InternPool.Nav.Index, location: ValueRenderLocation, - ) error{ OutOfMemory, AnalysisFail }!void { + ) Error!void { _ = location; const pt = dg.pt; const zcu = pt.zcu; @@ -835,7 +861,7 @@ pub const DeclGen = struct { const nav_ty: Type = .fromInterned(ip.getNav(owner_nav).typeOf(ip)); const ptr_ty = try pt.navPtrType(owner_nav); if (!nav_ty.isFnOrHasRuntimeBits(zcu)) { - return dg.writeCValue(writer, .{ .undef = ptr_ty }); + return dg.writeCValue(w, .{ .undef = ptr_ty }); } // We shouldn't cast C function pointers as this is UB (when you call @@ -848,21 +874,21 @@ pub const DeclGen = struct { const need_cast = !elem_ctype.eql(nav_ctype) and (elem_ctype.info(ctype_pool) != .function or nav_ctype.info(ctype_pool) != .function); if (need_cast) { - try writer.writeAll("(("); - try dg.renderCType(writer, ctype); - try writer.writeByte(')'); + try w.writeAll("(("); + try dg.renderCType(w, ctype); + try w.writeByte(')'); } - try writer.writeByte('&'); - try dg.renderNavName(writer, owner_nav); - if (need_cast) try writer.writeByte(')'); + try w.writeByte('&'); + try dg.renderNavName(w, owner_nav); + if (need_cast) try w.writeByte(')'); } fn renderPointer( dg: *DeclGen, - writer: anytype, + w: *Writer, derivation: Value.PointerDeriveStep, location: ValueRenderLocation, - ) error{ OutOfMemory, AnalysisFail }!void { + ) Error!void { const pt = dg.pt; const zcu = pt.zcu; switch (derivation) { @@ -870,18 +896,18 @@ pub const DeclGen = struct { .int => |int| { const ptr_ctype = try dg.ctypeFromType(int.ptr_ty, .complete); const addr_val = try pt.intValue(.usize, int.addr); - try writer.writeByte('('); - try dg.renderCType(writer, ptr_ctype); - try writer.print("){x}", .{try dg.fmtIntLiteral(addr_val, .Other)}); + try w.writeByte('('); + try dg.renderCType(w, ptr_ctype); + try w.print("){f}", .{try dg.fmtIntLiteralHex(addr_val, .Other)}); }, - .nav_ptr => |nav| try dg.renderNav(writer, nav, location), - .uav_ptr => |uav| try dg.renderUav(writer, uav, location), + .nav_ptr => |nav| try dg.renderNav(w, nav, location), + .uav_ptr => |uav| try dg.renderUav(w, uav, location), inline .eu_payload_ptr, .opt_payload_ptr => |info| { - try writer.writeAll("&("); - try dg.renderPointer(writer, info.parent.*, location); - try writer.writeAll(")->payload"); + try w.writeAll("&("); + try dg.renderPointer(w, info.parent.*, location); + try w.writeAll(")->payload"); }, .field_ptr => |field| { @@ -893,26 +919,26 @@ pub const DeclGen = struct { switch (fieldLocation(parent_ptr_ty, field.result_ptr_ty, field.field_idx, pt)) { .begin => { const ptr_ctype = try dg.ctypeFromType(field.result_ptr_ty, .complete); - try writer.writeByte('('); - try dg.renderCType(writer, ptr_ctype); - try writer.writeByte(')'); - try dg.renderPointer(writer, field.parent.*, location); + try w.writeByte('('); + try dg.renderCType(w, ptr_ctype); + try w.writeByte(')'); + try dg.renderPointer(w, field.parent.*, location); }, .field => |name| { - try writer.writeAll("&("); - try dg.renderPointer(writer, field.parent.*, location); - try writer.writeAll(")->"); - try dg.writeCValue(writer, name); + try w.writeAll("&("); + try dg.renderPointer(w, field.parent.*, location); + try w.writeAll(")->"); + try dg.writeCValue(w, name); }, .byte_offset => |byte_offset| { const ptr_ctype = try dg.ctypeFromType(field.result_ptr_ty, .complete); - try writer.writeByte('('); - try dg.renderCType(writer, ptr_ctype); - try writer.writeByte(')'); + try w.writeByte('('); + try dg.renderCType(w, ptr_ctype); + try w.writeByte(')'); const offset_val = try pt.intValue(.usize, byte_offset); - try writer.writeAll("((char *)"); - try dg.renderPointer(writer, field.parent.*, location); - try writer.print(" + {})", .{try dg.fmtIntLiteral(offset_val, .Other)}); + try w.writeAll("((char *)"); + try dg.renderPointer(w, field.parent.*, location); + try w.print(" + {f})", .{try dg.fmtIntLiteralDec(offset_val, .Other)}); }, } }, @@ -920,10 +946,10 @@ pub const DeclGen = struct { .elem_ptr => |elem| if (!(try elem.parent.ptrType(pt)).childType(zcu).hasRuntimeBits(zcu)) { // Element type is zero-bit, so lowers to `void`. The index is irrelevant; just cast the pointer. const ptr_ctype = try dg.ctypeFromType(elem.result_ptr_ty, .complete); - try writer.writeByte('('); - try dg.renderCType(writer, ptr_ctype); - try writer.writeByte(')'); - try dg.renderPointer(writer, elem.parent.*, location); + try w.writeByte('('); + try dg.renderCType(w, ptr_ctype); + try w.writeByte(')'); + try dg.renderPointer(w, elem.parent.*, location); } else { const index_val = try pt.intValue(.usize, elem.elem_idx); // We want to do pointer arithmetic on a pointer to the element type. @@ -932,48 +958,47 @@ pub const DeclGen = struct { const parent_ctype = try dg.ctypeFromType(try elem.parent.ptrType(pt), .complete); if (result_ctype.eql(parent_ctype)) { // The pointer already has an appropriate type - just do the arithmetic. - try writer.writeByte('('); - try dg.renderPointer(writer, elem.parent.*, location); - try writer.print(" + {})", .{try dg.fmtIntLiteral(index_val, .Other)}); + try w.writeByte('('); + try dg.renderPointer(w, elem.parent.*, location); + try w.print(" + {f})", .{try dg.fmtIntLiteralDec(index_val, .Other)}); } else { // We probably have an array pointer `T (*)[n]`. Cast to an element pointer, // and *then* apply the index. - try writer.writeAll("(("); - try dg.renderCType(writer, result_ctype); - try writer.writeByte(')'); - try dg.renderPointer(writer, elem.parent.*, location); - try writer.print(" + {})", .{try dg.fmtIntLiteral(index_val, .Other)}); + try w.writeAll("(("); + try dg.renderCType(w, result_ctype); + try w.writeByte(')'); + try dg.renderPointer(w, elem.parent.*, location); + try w.print(" + {f})", .{try dg.fmtIntLiteralDec(index_val, .Other)}); } }, .offset_and_cast => |oac| { const ptr_ctype = try dg.ctypeFromType(oac.new_ptr_ty, .complete); - try writer.writeByte('('); - try dg.renderCType(writer, ptr_ctype); - try writer.writeByte(')'); + try w.writeByte('('); + try dg.renderCType(w, ptr_ctype); + try w.writeByte(')'); if (oac.byte_offset == 0) { - try dg.renderPointer(writer, oac.parent.*, location); + try dg.renderPointer(w, oac.parent.*, location); } else { const offset_val = try pt.intValue(.usize, oac.byte_offset); - try writer.writeAll("((char *)"); - try dg.renderPointer(writer, oac.parent.*, location); - try writer.print(" + {})", .{try dg.fmtIntLiteral(offset_val, .Other)}); + try w.writeAll("((char *)"); + try dg.renderPointer(w, oac.parent.*, location); + try w.print(" + {f})", .{try dg.fmtIntLiteralDec(offset_val, .Other)}); } }, } } - fn renderErrorName(dg: *DeclGen, writer: anytype, err_name: InternPool.NullTerminatedString) !void { - const ip = &dg.pt.zcu.intern_pool; - try writer.print("zig_error_{}", .{fmtIdent(err_name.toSlice(ip))}); + fn renderErrorName(dg: *DeclGen, w: *Writer, err_name: InternPool.NullTerminatedString) !void { + try w.print("zig_error_{f}", .{fmtIdentUnsolo(err_name.toSlice(&dg.pt.zcu.intern_pool))}); } fn renderValue( dg: *DeclGen, - writer: anytype, + w: *Writer, val: Value, location: ValueRenderLocation, - ) error{ OutOfMemory, AnalysisFail }!void { + ) Error!void { const pt = dg.pt; const zcu = pt.zcu; const ip = &zcu.intern_pool; @@ -986,7 +1011,7 @@ pub const DeclGen = struct { }; const ty = val.typeOf(zcu); - if (val.isUndefDeep(zcu)) return dg.renderUndefValue(writer, ty, location); + if (val.isUndefDeep(zcu)) return dg.renderUndefValue(w, ty, location); const ctype = try dg.ctypeFromType(ty, location.toCTypeKind()); switch (ip.indexToKey(val.toIntern())) { // types, not values @@ -1019,8 +1044,8 @@ pub const DeclGen = struct { .empty_tuple => unreachable, .@"unreachable" => unreachable, - .false => try writer.writeAll("false"), - .true => try writer.writeAll("true"), + .false => try w.writeAll("false"), + .true => try w.writeAll("true"), }, .variable, .@"extern", @@ -1029,45 +1054,45 @@ pub const DeclGen = struct { .empty_enum_value, => unreachable, // non-runtime values .int => |int| switch (int.storage) { - .u64, .i64, .big_int => try writer.print("{}", .{try dg.fmtIntLiteral(val, location)}), + .u64, .i64, .big_int => try w.print("{f}", .{try dg.fmtIntLiteralDec(val, location)}), .lazy_align, .lazy_size => { - try writer.writeAll("(("); - try dg.renderCType(writer, ctype); - try writer.print("){x})", .{try dg.fmtIntLiteral( + try w.writeAll("(("); + try dg.renderCType(w, ctype); + try w.print("){f})", .{try dg.fmtIntLiteralHex( try pt.intValue(.usize, val.toUnsignedInt(zcu)), .Other, )}); }, }, - .err => |err| try dg.renderErrorName(writer, err.name), + .err => |err| try dg.renderErrorName(w, err.name), .error_union => |error_union| switch (ctype.info(ctype_pool)) { .basic => switch (error_union.val) { - .err_name => |err_name| try dg.renderErrorName(writer, err_name), - .payload => try writer.writeAll("0"), + .err_name => |err_name| try dg.renderErrorName(w, err_name), + .payload => try w.writeByte('0'), }, .pointer, .aligned, .array, .vector, .fwd_decl, .function => unreachable, .aggregate => |aggregate| { if (!location.isInitializer()) { - try writer.writeByte('('); - try dg.renderCType(writer, ctype); - try writer.writeByte(')'); + try w.writeByte('('); + try dg.renderCType(w, ctype); + try w.writeByte(')'); } - try writer.writeByte('{'); + try w.writeByte('{'); for (0..aggregate.fields.len) |field_index| { - if (field_index > 0) try writer.writeByte(','); + if (field_index > 0) try w.writeByte(','); switch (aggregate.fields.at(field_index, ctype_pool).name.index) { .@"error" => switch (error_union.val) { - .err_name => |err_name| try dg.renderErrorName(writer, err_name), - .payload => try writer.writeByte('0'), + .err_name => |err_name| try dg.renderErrorName(w, err_name), + .payload => try w.writeByte('0'), }, .payload => switch (error_union.val) { .err_name => try dg.renderUndefValue( - writer, + w, ty.errorUnionPayload(zcu), initializer_type, ), .payload => |payload| try dg.renderValue( - writer, + w, Value.fromInterned(payload), initializer_type, ), @@ -1075,10 +1100,10 @@ pub const DeclGen = struct { else => unreachable, } } - try writer.writeByte('}'); + try w.writeByte('}'); }, }, - .enum_tag => |enum_tag| try dg.renderValue(writer, Value.fromInterned(enum_tag.int), location), + .enum_tag => |enum_tag| try dg.renderValue(w, Value.fromInterned(enum_tag.int), location), .float => { const bits = ty.floatBits(target); const f128_val = val.toFloat(f128, zcu); @@ -1105,18 +1130,18 @@ pub const DeclGen = struct { var empty = true; if (std.math.isFinite(f128_val)) { - try writer.writeAll("zig_make_"); - try dg.renderTypeForBuiltinFnName(writer, ty); - try writer.writeByte('('); + try w.writeAll("zig_make_"); + try dg.renderTypeForBuiltinFnName(w, ty); + try w.writeByte('('); switch (bits) { - 16 => try writer.print("{x}", .{val.toFloat(f16, zcu)}), - 32 => try writer.print("{x}", .{val.toFloat(f32, zcu)}), - 64 => try writer.print("{x}", .{val.toFloat(f64, zcu)}), - 80 => try writer.print("{x}", .{val.toFloat(f80, zcu)}), - 128 => try writer.print("{x}", .{f128_val}), + 16 => try w.print("{x}", .{val.toFloat(f16, zcu)}), + 32 => try w.print("{x}", .{val.toFloat(f32, zcu)}), + 64 => try w.print("{x}", .{val.toFloat(f64, zcu)}), + 80 => try w.print("{x}", .{val.toFloat(f80, zcu)}), + 128 => try w.print("{x}", .{f128_val}), else => unreachable, } - try writer.writeAll(", "); + try w.writeAll(", "); empty = false; } else { // isSignalNan is equivalent to isNan currently, and MSVC doesn't have nans, so prefer nan @@ -1140,45 +1165,45 @@ pub const DeclGen = struct { // return dg.fail("Only quiet nans are supported in global variable initializers", .{}); } - try writer.writeAll("zig_"); - try writer.writeAll(if (location == .StaticInitializer) "init" else "make"); - try writer.writeAll("_special_"); - try dg.renderTypeForBuiltinFnName(writer, ty); - try writer.writeByte('('); - if (std.math.signbit(f128_val)) try writer.writeByte('-'); - try writer.writeAll(", "); - try writer.writeAll(operation); - try writer.writeAll(", "); + try w.writeAll("zig_"); + try w.writeAll(if (location == .StaticInitializer) "init" else "make"); + try w.writeAll("_special_"); + try dg.renderTypeForBuiltinFnName(w, ty); + try w.writeByte('('); + if (std.math.signbit(f128_val)) try w.writeByte('-'); + try w.writeAll(", "); + try w.writeAll(operation); + try w.writeAll(", "); if (std.math.isNan(f128_val)) switch (bits) { // We only actually need to pass the significand, but it will get // properly masked anyway, so just pass the whole value. - 16 => try writer.print("\"0x{x}\"", .{@as(u16, @bitCast(val.toFloat(f16, zcu)))}), - 32 => try writer.print("\"0x{x}\"", .{@as(u32, @bitCast(val.toFloat(f32, zcu)))}), - 64 => try writer.print("\"0x{x}\"", .{@as(u64, @bitCast(val.toFloat(f64, zcu)))}), - 80 => try writer.print("\"0x{x}\"", .{@as(u80, @bitCast(val.toFloat(f80, zcu)))}), - 128 => try writer.print("\"0x{x}\"", .{@as(u128, @bitCast(f128_val))}), + 16 => try w.print("\"0x{x}\"", .{@as(u16, @bitCast(val.toFloat(f16, zcu)))}), + 32 => try w.print("\"0x{x}\"", .{@as(u32, @bitCast(val.toFloat(f32, zcu)))}), + 64 => try w.print("\"0x{x}\"", .{@as(u64, @bitCast(val.toFloat(f64, zcu)))}), + 80 => try w.print("\"0x{x}\"", .{@as(u80, @bitCast(val.toFloat(f80, zcu)))}), + 128 => try w.print("\"0x{x}\"", .{@as(u128, @bitCast(f128_val))}), else => unreachable, }; - try writer.writeAll(", "); + try w.writeAll(", "); empty = false; } - try writer.print("{x}", .{try dg.fmtIntLiteral( + try w.print("{f}", .{try dg.fmtIntLiteralHex( try pt.intValue_big(repr_ty, repr_val_big.toConst()), location, )}); - if (!empty) try writer.writeByte(')'); + if (!empty) try w.writeByte(')'); }, .slice => |slice| { const aggregate = ctype.info(ctype_pool).aggregate; if (!location.isInitializer()) { - try writer.writeByte('('); - try dg.renderCType(writer, ctype); - try writer.writeByte(')'); + try w.writeByte('('); + try dg.renderCType(w, ctype); + try w.writeByte(')'); } - try writer.writeByte('{'); + try w.writeByte('{'); for (0..aggregate.fields.len) |field_index| { - if (field_index > 0) try writer.writeByte(','); - try dg.renderValue(writer, Value.fromInterned( + if (field_index > 0) try w.writeByte(','); + try dg.renderValue(w, Value.fromInterned( switch (aggregate.fields.at(field_index, ctype_pool).name.index) { .ptr => slice.ptr, .len => slice.len, @@ -1186,33 +1211,33 @@ pub const DeclGen = struct { }, ), initializer_type); } - try writer.writeByte('}'); + try w.writeByte('}'); }, .ptr => { var arena = std.heap.ArenaAllocator.init(zcu.gpa); defer arena.deinit(); const derivation = try val.pointerDerivation(arena.allocator(), pt); - try dg.renderPointer(writer, derivation, location); + try dg.renderPointer(w, derivation, location); }, .opt => |opt| switch (ctype.info(ctype_pool)) { - .basic => if (ctype.isBool()) try writer.writeAll(switch (opt.val) { + .basic => if (ctype.isBool()) try w.writeAll(switch (opt.val) { .none => "true", else => "false", }) else switch (opt.val) { - .none => try writer.writeAll("0"), + .none => try w.writeByte('0'), else => |payload| switch (ip.indexToKey(payload)) { .undef => |err_ty| try dg.renderUndefValue( - writer, + w, .fromInterned(err_ty), location, ), - .err => |err| try dg.renderErrorName(writer, err.name), + .err => |err| try dg.renderErrorName(w, err.name), else => unreachable, }, }, .pointer => switch (opt.val) { - .none => try writer.writeAll("NULL"), - else => |payload| try dg.renderValue(writer, Value.fromInterned(payload), location), + .none => try w.writeAll("NULL"), + else => |payload| try dg.renderValue(w, Value.fromInterned(payload), location), }, .aligned, .array, .vector, .fwd_decl, .function => unreachable, .aggregate => |aggregate| { @@ -1221,7 +1246,7 @@ pub const DeclGen = struct { else => |payload| switch (aggregate.fields.at(0, ctype_pool).name.index) { .is_null, .payload => {}, .ptr, .len => return dg.renderValue( - writer, + w, Value.fromInterned(payload), location, ), @@ -1229,48 +1254,48 @@ pub const DeclGen = struct { }, } if (!location.isInitializer()) { - try writer.writeByte('('); - try dg.renderCType(writer, ctype); - try writer.writeByte(')'); + try w.writeByte('('); + try dg.renderCType(w, ctype); + try w.writeByte(')'); } - try writer.writeByte('{'); + try w.writeByte('{'); for (0..aggregate.fields.len) |field_index| { - if (field_index > 0) try writer.writeByte(','); + if (field_index > 0) try w.writeByte(','); switch (aggregate.fields.at(field_index, ctype_pool).name.index) { - .is_null => try writer.writeAll(switch (opt.val) { + .is_null => try w.writeAll(switch (opt.val) { .none => "true", else => "false", }), .payload => switch (opt.val) { .none => try dg.renderUndefValue( - writer, + w, ty.optionalChild(zcu), initializer_type, ), else => |payload| try dg.renderValue( - writer, + w, Value.fromInterned(payload), initializer_type, ), }, - .ptr => try writer.writeAll("NULL"), - .len => try dg.renderUndefValue(writer, .usize, initializer_type), + .ptr => try w.writeAll("NULL"), + .len => try dg.renderUndefValue(w, .usize, initializer_type), else => unreachable, } } - try writer.writeByte('}'); + try w.writeByte('}'); }, }, .aggregate => switch (ip.indexToKey(ty.toIntern())) { .array_type, .vector_type => { if (location == .FunctionArgument) { - try writer.writeByte('('); - try dg.renderCType(writer, ctype); - try writer.writeByte(')'); + try w.writeByte('('); + try dg.renderCType(w, ctype); + try w.writeByte(')'); } const ai = ty.arrayInfo(zcu); if (ai.elem_type.eql(.u8, zcu)) { - var literal = stringLiteral(writer, ty.arrayLenIncludingSentinel(zcu)); + var literal: StringLiteral = .init(w, @intCast(ty.arrayLenIncludingSentinel(zcu))); try literal.start(); var index: usize = 0; while (index < ai.len) : (index += 1) { @@ -1287,28 +1312,28 @@ pub const DeclGen = struct { } try literal.end(); } else { - try writer.writeByte('{'); + try w.writeByte('{'); var index: usize = 0; while (index < ai.len) : (index += 1) { - if (index != 0) try writer.writeByte(','); + if (index != 0) try w.writeByte(','); const elem_val = try val.elemValue(pt, index); - try dg.renderValue(writer, elem_val, initializer_type); + try dg.renderValue(w, elem_val, initializer_type); } if (ai.sentinel) |s| { - if (index != 0) try writer.writeByte(','); - try dg.renderValue(writer, s, initializer_type); + if (index != 0) try w.writeByte(','); + try dg.renderValue(w, s, initializer_type); } - try writer.writeByte('}'); + try w.writeByte('}'); } }, .tuple_type => |tuple| { if (!location.isInitializer()) { - try writer.writeByte('('); - try dg.renderCType(writer, ctype); - try writer.writeByte(')'); + try w.writeByte('('); + try dg.renderCType(w, ctype); + try w.writeByte(')'); } - try writer.writeByte('{'); + try w.writeByte('{'); var empty = true; for (0..tuple.types.len) |field_index| { const comptime_val = tuple.values.get(ip)[field_index]; @@ -1316,7 +1341,7 @@ pub const DeclGen = struct { const field_ty: Type = .fromInterned(tuple.types.get(ip)[field_index]); if (!field_ty.hasRuntimeBitsIgnoreComptime(zcu)) continue; - if (!empty) try writer.writeByte(','); + if (!empty) try w.writeByte(','); const field_val = Value.fromInterned( switch (ip.indexToKey(val.toIntern()).aggregate.storage) { @@ -1328,30 +1353,30 @@ pub const DeclGen = struct { .repeated_elem => |elem| elem, }, ); - try dg.renderValue(writer, field_val, initializer_type); + try dg.renderValue(w, field_val, initializer_type); empty = false; } - try writer.writeByte('}'); + try w.writeByte('}'); }, .struct_type => { const loaded_struct = ip.loadStructType(ty.toIntern()); switch (loaded_struct.layout) { .auto, .@"extern" => { if (!location.isInitializer()) { - try writer.writeByte('('); - try dg.renderCType(writer, ctype); - try writer.writeByte(')'); + try w.writeByte('('); + try dg.renderCType(w, ctype); + try w.writeByte(')'); } - try writer.writeByte('{'); + try w.writeByte('{'); var field_it = loaded_struct.iterateRuntimeOrder(ip); var need_comma = false; while (field_it.next()) |field_index| { const field_ty: Type = .fromInterned(loaded_struct.field_types.get(ip)[field_index]); if (!field_ty.hasRuntimeBitsIgnoreComptime(zcu)) continue; - if (need_comma) try writer.writeByte(','); + if (need_comma) try w.writeByte(','); need_comma = true; const field_val = switch (ip.indexToKey(val.toIntern()).aggregate.storage) { .bytes => |bytes| try pt.intern(.{ .int = .{ @@ -1361,9 +1386,9 @@ pub const DeclGen = struct { .elems => |elems| elems[field_index], .repeated_elem => |elem| elem, }; - try dg.renderValue(writer, Value.fromInterned(field_val), initializer_type); + try dg.renderValue(w, Value.fromInterned(field_val), initializer_type); } - try writer.writeByte('}'); + try w.writeByte('}'); }, .@"packed" => { const int_info = ty.intInfo(zcu); @@ -1381,16 +1406,16 @@ pub const DeclGen = struct { } if (eff_num_fields == 0) { - try writer.writeByte('('); - try dg.renderUndefValue(writer, ty, location); - try writer.writeByte(')'); + try w.writeByte('('); + try dg.renderUndefValue(w, ty, location); + try w.writeByte(')'); } else if (ty.bitSize(zcu) > 64) { // zig_or_u128(zig_or_u128(zig_shl_u128(a, a_off), zig_shl_u128(b, b_off)), zig_shl_u128(c, c_off)) var num_or = eff_num_fields - 1; while (num_or > 0) : (num_or -= 1) { - try writer.writeAll("zig_or_"); - try dg.renderTypeForBuiltinFnName(writer, ty); - try writer.writeByte('('); + try w.writeAll("zig_or_"); + try dg.renderTypeForBuiltinFnName(w, ty); + try w.writeByte('('); } var eff_index: usize = 0; @@ -1409,36 +1434,36 @@ pub const DeclGen = struct { }; const cast_context = IntCastContext{ .value = .{ .value = Value.fromInterned(field_val) } }; if (bit_offset != 0) { - try writer.writeAll("zig_shl_"); - try dg.renderTypeForBuiltinFnName(writer, ty); - try writer.writeByte('('); - try dg.renderIntCast(writer, ty, cast_context, field_ty, .FunctionArgument); - try writer.writeAll(", "); - try dg.renderValue(writer, try pt.intValue(bit_offset_ty, bit_offset), .FunctionArgument); - try writer.writeByte(')'); + try w.writeAll("zig_shl_"); + try dg.renderTypeForBuiltinFnName(w, ty); + try w.writeByte('('); + try dg.renderIntCast(w, ty, cast_context, field_ty, .FunctionArgument); + try w.writeAll(", "); + try dg.renderValue(w, try pt.intValue(bit_offset_ty, bit_offset), .FunctionArgument); + try w.writeByte(')'); } else { - try dg.renderIntCast(writer, ty, cast_context, field_ty, .FunctionArgument); + try dg.renderIntCast(w, ty, cast_context, field_ty, .FunctionArgument); } - if (needs_closing_paren) try writer.writeByte(')'); - if (eff_index != eff_num_fields - 1) try writer.writeAll(", "); + if (needs_closing_paren) try w.writeByte(')'); + if (eff_index != eff_num_fields - 1) try w.writeAll(", "); bit_offset += field_ty.bitSize(zcu); needs_closing_paren = true; eff_index += 1; } } else { - try writer.writeByte('('); + try w.writeByte('('); // a << a_off | b << b_off | c << c_off var empty = true; for (0..loaded_struct.field_types.len) |field_index| { const field_ty: Type = .fromInterned(loaded_struct.field_types.get(ip)[field_index]); if (!field_ty.hasRuntimeBitsIgnoreComptime(zcu)) continue; - if (!empty) try writer.writeAll(" | "); - try writer.writeByte('('); - try dg.renderCType(writer, ctype); - try writer.writeByte(')'); + if (!empty) try w.writeAll(" | "); + try w.writeByte('('); + try dg.renderCType(w, ctype); + try w.writeByte(')'); const field_val = switch (ip.indexToKey(val.toIntern()).aggregate.storage) { .bytes => |bytes| try pt.intern(.{ .int = .{ @@ -1455,24 +1480,24 @@ pub const DeclGen = struct { .{ .signedness = .unsigned, .bits = undefined }; switch (field_int_info.signedness) { .signed => { - try writer.writeByte('('); - try dg.renderValue(writer, Value.fromInterned(field_val), .Other); - try writer.writeAll(" & "); + try w.writeByte('('); + try dg.renderValue(w, Value.fromInterned(field_val), .Other); + try w.writeAll(" & "); const field_uint_ty = try pt.intType(.unsigned, field_int_info.bits); - try dg.renderValue(writer, try field_uint_ty.maxIntScalar(pt, field_uint_ty), .Other); - try writer.writeByte(')'); + try dg.renderValue(w, try field_uint_ty.maxIntScalar(pt, field_uint_ty), .Other); + try w.writeByte(')'); }, - .unsigned => try dg.renderValue(writer, Value.fromInterned(field_val), .Other), + .unsigned => try dg.renderValue(w, Value.fromInterned(field_val), .Other), } if (bit_offset != 0) { - try writer.writeAll(" << "); - try dg.renderValue(writer, try pt.intValue(bit_offset_ty, bit_offset), .FunctionArgument); + try w.writeAll(" << "); + try dg.renderValue(w, try pt.intValue(bit_offset_ty, bit_offset), .FunctionArgument); } bit_offset += field_ty.bitSize(zcu); empty = false; } - try writer.writeByte(')'); + try w.writeByte(')'); } }, } @@ -1486,11 +1511,11 @@ pub const DeclGen = struct { switch (loaded_union.flagsUnordered(ip).layout) { .@"packed" => { if (!location.isInitializer()) { - try writer.writeByte('('); - try dg.renderType(writer, backing_ty); - try writer.writeByte(')'); + try w.writeByte('('); + try dg.renderType(w, backing_ty); + try w.writeByte(')'); } - try dg.renderValue(writer, Value.fromInterned(un.val), location); + try dg.renderValue(w, Value.fromInterned(un.val), location); }, .@"extern" => { if (location == .StaticInitializer) { @@ -1498,21 +1523,21 @@ pub const DeclGen = struct { } const ptr_ty = try pt.singleConstPtrType(ty); - try writer.writeAll("*(("); - try dg.renderType(writer, ptr_ty); - try writer.writeAll(")("); - try dg.renderType(writer, backing_ty); - try writer.writeAll("){"); - try dg.renderValue(writer, Value.fromInterned(un.val), location); - try writer.writeAll("})"); + try w.writeAll("*(("); + try dg.renderType(w, ptr_ty); + try w.writeAll(")("); + try dg.renderType(w, backing_ty); + try w.writeAll("){"); + try dg.renderValue(w, Value.fromInterned(un.val), location); + try w.writeAll("})"); }, else => unreachable, } } else { if (!location.isInitializer()) { - try writer.writeByte('('); - try dg.renderCType(writer, ctype); - try writer.writeByte(')'); + try w.writeByte('('); + try dg.renderCType(w, ctype); + try w.writeByte(')'); } const field_index = zcu.unionTagFieldIndex(loaded_union, Value.fromInterned(un.tag)).?; @@ -1521,57 +1546,57 @@ pub const DeclGen = struct { if (loaded_union.flagsUnordered(ip).layout == .@"packed") { if (field_ty.hasRuntimeBits(zcu)) { if (field_ty.isPtrAtRuntime(zcu)) { - try writer.writeByte('('); - try dg.renderCType(writer, ctype); - try writer.writeByte(')'); + try w.writeByte('('); + try dg.renderCType(w, ctype); + try w.writeByte(')'); } else if (field_ty.zigTypeTag(zcu) == .float) { - try writer.writeByte('('); - try dg.renderCType(writer, ctype); - try writer.writeByte(')'); + try w.writeByte('('); + try dg.renderCType(w, ctype); + try w.writeByte(')'); } - try dg.renderValue(writer, Value.fromInterned(un.val), location); - } else try writer.writeAll("0"); + try dg.renderValue(w, Value.fromInterned(un.val), location); + } else try w.writeByte('0'); return; } const has_tag = loaded_union.hasTag(ip); - if (has_tag) try writer.writeByte('{'); + if (has_tag) try w.writeByte('{'); const aggregate = ctype.info(ctype_pool).aggregate; for (0..if (has_tag) aggregate.fields.len else 1) |outer_field_index| { - if (outer_field_index > 0) try writer.writeByte(','); + if (outer_field_index > 0) try w.writeByte(','); switch (if (has_tag) aggregate.fields.at(outer_field_index, ctype_pool).name.index else .payload) { .tag => try dg.renderValue( - writer, + w, Value.fromInterned(un.tag), initializer_type, ), .payload => { - try writer.writeByte('{'); + try w.writeByte('{'); if (field_ty.hasRuntimeBits(zcu)) { - try writer.print(" .{ } = ", .{fmtIdent(field_name.toSlice(ip))}); + try w.print(" .{f} = ", .{fmtIdentSolo(field_name.toSlice(ip))}); try dg.renderValue( - writer, + w, Value.fromInterned(un.val), initializer_type, ); - try writer.writeByte(' '); + try w.writeByte(' '); } else for (0..loaded_union.field_types.len) |inner_field_index| { const inner_field_ty: Type = .fromInterned( loaded_union.field_types.get(ip)[inner_field_index], ); if (!inner_field_ty.hasRuntimeBits(zcu)) continue; - try dg.renderUndefValue(writer, inner_field_ty, initializer_type); + try dg.renderUndefValue(w, inner_field_ty, initializer_type); break; } - try writer.writeByte('}'); + try w.writeByte('}'); }, else => unreachable, } } - if (has_tag) try writer.writeByte('}'); + if (has_tag) try w.writeByte('}'); } }, } @@ -1579,10 +1604,10 @@ pub const DeclGen = struct { fn renderUndefValue( dg: *DeclGen, - writer: anytype, + w: *Writer, ty: Type, location: ValueRenderLocation, - ) error{ OutOfMemory, AnalysisFail }!void { + ) Error!void { const pt = dg.pt; const zcu = pt.zcu; const ip = &zcu.intern_pool; @@ -1612,57 +1637,57 @@ pub const DeclGen = struct { // All unsigned ints matching float types are pre-allocated. const repr_ty = dg.pt.intType(.unsigned, bits) catch unreachable; - try writer.writeAll("zig_make_"); - try dg.renderTypeForBuiltinFnName(writer, ty); - try writer.writeByte('('); + try w.writeAll("zig_make_"); + try dg.renderTypeForBuiltinFnName(w, ty); + try w.writeByte('('); switch (bits) { - 16 => try writer.print("{x}", .{@as(f16, @bitCast(undefPattern(i16)))}), - 32 => try writer.print("{x}", .{@as(f32, @bitCast(undefPattern(i32)))}), - 64 => try writer.print("{x}", .{@as(f64, @bitCast(undefPattern(i64)))}), - 80 => try writer.print("{x}", .{@as(f80, @bitCast(undefPattern(i80)))}), - 128 => try writer.print("{x}", .{@as(f128, @bitCast(undefPattern(i128)))}), + 16 => try w.print("{x}", .{@as(f16, @bitCast(undefPattern(i16)))}), + 32 => try w.print("{x}", .{@as(f32, @bitCast(undefPattern(i32)))}), + 64 => try w.print("{x}", .{@as(f64, @bitCast(undefPattern(i64)))}), + 80 => try w.print("{x}", .{@as(f80, @bitCast(undefPattern(i80)))}), + 128 => try w.print("{x}", .{@as(f128, @bitCast(undefPattern(i128)))}), else => unreachable, } - try writer.writeAll(", "); - try dg.renderUndefValue(writer, repr_ty, .FunctionArgument); - return writer.writeByte(')'); + try w.writeAll(", "); + try dg.renderUndefValue(w, repr_ty, .FunctionArgument); + return w.writeByte(')'); }, - .bool_type => try writer.writeAll(if (safety_on) "0xaa" else "false"), + .bool_type => try w.writeAll(if (safety_on) "0xaa" else "false"), else => switch (ip.indexToKey(ty.toIntern())) { .simple_type, .int_type, .enum_type, .error_set_type, .inferred_error_set_type, - => return writer.print("{x}", .{ - try dg.fmtIntLiteral(try pt.undefValue(ty), location), + => return w.print("{f}", .{ + try dg.fmtIntLiteralHex(try pt.undefValue(ty), location), }), .ptr_type => |ptr_type| switch (ptr_type.flags.size) { .one, .many, .c => { - try writer.writeAll("(("); - try dg.renderCType(writer, ctype); - return writer.print("){x})", .{ - try dg.fmtIntLiteral(.undef_usize, .Other), + try w.writeAll("(("); + try dg.renderCType(w, ctype); + return w.print("){f})", .{ + try dg.fmtIntLiteralHex(.undef_usize, .Other), }); }, .slice => { if (!location.isInitializer()) { - try writer.writeByte('('); - try dg.renderCType(writer, ctype); - try writer.writeByte(')'); + try w.writeByte('('); + try dg.renderCType(w, ctype); + try w.writeByte(')'); } - try writer.writeAll("{("); + try w.writeAll("{("); const ptr_ty = ty.slicePtrFieldType(zcu); - try dg.renderType(writer, ptr_ty); - return writer.print("){x}, {0x}}}", .{ - try dg.fmtIntLiteral(.undef_usize, .Other), + try dg.renderType(w, ptr_ty); + return w.print("){f}, {0f}}}", .{ + try dg.fmtIntLiteralHex(.undef_usize, .Other), }); }, }, .opt_type => |child_type| switch (ctype.info(ctype_pool)) { .basic, .pointer => try dg.renderUndefValue( - writer, + w, .fromInterned(if (ctype.isBool()) .bool_type else child_type), location, ), @@ -1671,21 +1696,21 @@ pub const DeclGen = struct { switch (aggregate.fields.at(0, ctype_pool).name.index) { .is_null, .payload => {}, .ptr, .len => return dg.renderUndefValue( - writer, + w, .fromInterned(child_type), location, ), else => unreachable, } if (!location.isInitializer()) { - try writer.writeByte('('); - try dg.renderCType(writer, ctype); - try writer.writeByte(')'); + try w.writeByte('('); + try dg.renderCType(w, ctype); + try w.writeByte(')'); } - try writer.writeByte('{'); + try w.writeByte('{'); for (0..aggregate.fields.len) |field_index| { - if (field_index > 0) try writer.writeByte(','); - try dg.renderUndefValue(writer, .fromInterned( + if (field_index > 0) try w.writeByte(','); + try dg.renderUndefValue(w, .fromInterned( switch (aggregate.fields.at(field_index, ctype_pool).name.index) { .is_null => .bool_type, .payload => child_type, @@ -1693,7 +1718,7 @@ pub const DeclGen = struct { }, ), initializer_type); } - try writer.writeByte('}'); + try w.writeByte('}'); }, }, .struct_type => { @@ -1701,117 +1726,117 @@ pub const DeclGen = struct { switch (loaded_struct.layout) { .auto, .@"extern" => { if (!location.isInitializer()) { - try writer.writeByte('('); - try dg.renderCType(writer, ctype); - try writer.writeByte(')'); + try w.writeByte('('); + try dg.renderCType(w, ctype); + try w.writeByte(')'); } - try writer.writeByte('{'); + try w.writeByte('{'); var field_it = loaded_struct.iterateRuntimeOrder(ip); var need_comma = false; while (field_it.next()) |field_index| { const field_ty: Type = .fromInterned(loaded_struct.field_types.get(ip)[field_index]); if (!field_ty.hasRuntimeBitsIgnoreComptime(zcu)) continue; - if (need_comma) try writer.writeByte(','); + if (need_comma) try w.writeByte(','); need_comma = true; - try dg.renderUndefValue(writer, field_ty, initializer_type); + try dg.renderUndefValue(w, field_ty, initializer_type); } - return writer.writeByte('}'); + return w.writeByte('}'); }, - .@"packed" => return writer.print("{x}", .{ - try dg.fmtIntLiteral(try pt.undefValue(ty), .Other), + .@"packed" => return w.print("{f}", .{ + try dg.fmtIntLiteralHex(try pt.undefValue(ty), .Other), }), } }, .tuple_type => |tuple_info| { if (!location.isInitializer()) { - try writer.writeByte('('); - try dg.renderCType(writer, ctype); - try writer.writeByte(')'); + try w.writeByte('('); + try dg.renderCType(w, ctype); + try w.writeByte(')'); } - try writer.writeByte('{'); + try w.writeByte('{'); var need_comma = false; for (0..tuple_info.types.len) |field_index| { if (tuple_info.values.get(ip)[field_index] != .none) continue; const field_ty: Type = .fromInterned(tuple_info.types.get(ip)[field_index]); if (!field_ty.hasRuntimeBitsIgnoreComptime(zcu)) continue; - if (need_comma) try writer.writeByte(','); + if (need_comma) try w.writeByte(','); need_comma = true; - try dg.renderUndefValue(writer, field_ty, initializer_type); + try dg.renderUndefValue(w, field_ty, initializer_type); } - return writer.writeByte('}'); + return w.writeByte('}'); }, .union_type => { const loaded_union = ip.loadUnionType(ty.toIntern()); switch (loaded_union.flagsUnordered(ip).layout) { .auto, .@"extern" => { if (!location.isInitializer()) { - try writer.writeByte('('); - try dg.renderCType(writer, ctype); - try writer.writeByte(')'); + try w.writeByte('('); + try dg.renderCType(w, ctype); + try w.writeByte(')'); } const has_tag = loaded_union.hasTag(ip); - if (has_tag) try writer.writeByte('{'); + if (has_tag) try w.writeByte('{'); const aggregate = ctype.info(ctype_pool).aggregate; for (0..if (has_tag) aggregate.fields.len else 1) |outer_field_index| { - if (outer_field_index > 0) try writer.writeByte(','); + if (outer_field_index > 0) try w.writeByte(','); switch (if (has_tag) aggregate.fields.at(outer_field_index, ctype_pool).name.index else .payload) { .tag => try dg.renderUndefValue( - writer, + w, .fromInterned(loaded_union.enum_tag_ty), initializer_type, ), .payload => { - try writer.writeByte('{'); + try w.writeByte('{'); for (0..loaded_union.field_types.len) |inner_field_index| { const inner_field_ty: Type = .fromInterned( loaded_union.field_types.get(ip)[inner_field_index], ); if (!inner_field_ty.hasRuntimeBits(pt.zcu)) continue; try dg.renderUndefValue( - writer, + w, inner_field_ty, initializer_type, ); break; } - try writer.writeByte('}'); + try w.writeByte('}'); }, else => unreachable, } } - if (has_tag) try writer.writeByte('}'); + if (has_tag) try w.writeByte('}'); }, - .@"packed" => return writer.print("{x}", .{ - try dg.fmtIntLiteral(try pt.undefValue(ty), .Other), + .@"packed" => return w.print("{f}", .{ + try dg.fmtIntLiteralHex(try pt.undefValue(ty), .Other), }), } }, .error_union_type => |error_union_type| switch (ctype.info(ctype_pool)) { .basic => try dg.renderUndefValue( - writer, + w, .fromInterned(error_union_type.error_set_type), location, ), .pointer, .aligned, .array, .vector, .fwd_decl, .function => unreachable, .aggregate => |aggregate| { if (!location.isInitializer()) { - try writer.writeByte('('); - try dg.renderCType(writer, ctype); - try writer.writeByte(')'); + try w.writeByte('('); + try dg.renderCType(w, ctype); + try w.writeByte(')'); } - try writer.writeByte('{'); + try w.writeByte('{'); for (0..aggregate.fields.len) |field_index| { - if (field_index > 0) try writer.writeByte(','); + if (field_index > 0) try w.writeByte(','); try dg.renderUndefValue( - writer, + w, .fromInterned( switch (aggregate.fields.at(field_index, ctype_pool).name.index) { .@"error" => error_union_type.error_set_type, @@ -1822,14 +1847,14 @@ pub const DeclGen = struct { initializer_type, ); } - try writer.writeByte('}'); + try w.writeByte('}'); }, }, .array_type, .vector_type => { const ai = ty.arrayInfo(zcu); if (ai.elem_type.eql(.u8, zcu)) { const c_len = ty.arrayLenIncludingSentinel(zcu); - var literal = stringLiteral(writer, c_len); + var literal: StringLiteral = .init(w, @intCast(c_len)); try literal.start(); var index: u64 = 0; while (index < c_len) : (index += 1) @@ -1837,19 +1862,19 @@ pub const DeclGen = struct { return literal.end(); } else { if (!location.isInitializer()) { - try writer.writeByte('('); - try dg.renderCType(writer, ctype); - try writer.writeByte(')'); + try w.writeByte('('); + try dg.renderCType(w, ctype); + try w.writeByte(')'); } - try writer.writeByte('{'); + try w.writeByte('{'); const c_len = ty.arrayLenIncludingSentinel(zcu); var index: u64 = 0; while (index < c_len) : (index += 1) { - if (index > 0) try writer.writeAll(", "); - try dg.renderUndefValue(writer, ty.childType(zcu), initializer_type); + if (index > 0) try w.writeAll(", "); + try dg.renderUndefValue(w, ty.childType(zcu), initializer_type); } - return writer.writeByte('}'); + return w.writeByte('}'); } }, .anyframe_type, @@ -1882,13 +1907,13 @@ pub const DeclGen = struct { fn renderFunctionSignature( dg: *DeclGen, - w: anytype, + w: *Writer, fn_val: Value, fn_align: InternPool.Alignment, kind: CType.Kind, name: union(enum) { nav: InternPool.Nav.Index, - fmt_ctype_pool_string: std.fmt.Formatter(formatCTypePoolString), + fmt_ctype_pool_string: std.fmt.Formatter(CTypePoolStringFormatData, formatCTypePoolString), @"export": struct { main_name: InternPool.NullTerminatedString, extern_name: InternPool.NullTerminatedString, @@ -1925,15 +1950,15 @@ pub const DeclGen = struct { var trailing = try renderTypePrefix(dg.pass, &dg.ctype_pool, zcu, w, fn_ctype, .suffix, .{}); if (toCallingConvention(fn_info.cc, zcu)) |call_conv| { - try w.print("{}zig_callconv({s})", .{ trailing, call_conv }); + try w.print("{f}zig_callconv({s})", .{ trailing, call_conv }); trailing = .maybe_space; } - try w.print("{}", .{trailing}); + try w.print("{f}", .{trailing}); switch (name) { .nav => |nav| try dg.renderNavName(w, nav), - .fmt_ctype_pool_string => |fmt| try w.print("{ }", .{fmt}), - .@"export" => |@"export"| try w.print("{ }", .{fmtIdent(@"export".extern_name.toSlice(ip))}), + .fmt_ctype_pool_string => |fmt| try w.print("{f}", .{fmt}), + .@"export" => |@"export"| try w.print("{f}", .{fmtIdentSolo(@"export".extern_name.toSlice(ip))}), } try renderTypeSuffix( @@ -1960,17 +1985,17 @@ pub const DeclGen = struct { const is_mangled = isMangledIdent(extern_name, true); const is_export = @"export".extern_name != @"export".main_name; if (is_mangled and is_export) { - try w.print(" zig_mangled_export({ }, {s}, {s})", .{ - fmtIdent(extern_name), + try w.print(" zig_mangled_export({f}, {f}, {f})", .{ + fmtIdentSolo(extern_name), fmtStringLiteral(extern_name, null), fmtStringLiteral(@"export".main_name.toSlice(ip), null), }); } else if (is_mangled) { - try w.print(" zig_mangled({ }, {s})", .{ - fmtIdent(extern_name), fmtStringLiteral(extern_name, null), + try w.print(" zig_mangled({f}, {f})", .{ + fmtIdentSolo(extern_name), fmtStringLiteral(extern_name, null), }); } else if (is_export) { - try w.print(" zig_export({s}, {s})", .{ + try w.print(" zig_export({f}, {f})", .{ fmtStringLiteral(@"export".main_name.toSlice(ip), null), fmtStringLiteral(extern_name, null), }); @@ -2003,11 +2028,11 @@ pub const DeclGen = struct { /// | `renderTypeAndName` | "uint8_t *name" | "uint8_t *name[10]" | /// | `renderType` | "uint8_t *" | "uint8_t *[10]" | /// - fn renderType(dg: *DeclGen, w: anytype, t: Type) error{OutOfMemory}!void { + fn renderType(dg: *DeclGen, w: *Writer, t: Type) Error!void { try dg.renderCType(w, try dg.ctypeFromType(t, .complete)); } - fn renderCType(dg: *DeclGen, w: anytype, ctype: CType) error{OutOfMemory}!void { + fn renderCType(dg: *DeclGen, w: *Writer, ctype: CType) Error!void { _ = try renderTypePrefix(dg.pass, &dg.ctype_pool, dg.pt.zcu, w, ctype, .suffix, .{}); try renderTypeSuffix(dg.pass, &dg.ctype_pool, dg.pt.zcu, w, ctype, .suffix, .{}); } @@ -2022,7 +2047,7 @@ pub const DeclGen = struct { value: Value, }, - pub fn writeValue(self: *const IntCastContext, dg: *DeclGen, w: anytype, location: ValueRenderLocation) !void { + pub fn writeValue(self: *const IntCastContext, dg: *DeclGen, w: *Writer, location: ValueRenderLocation) !void { switch (self.*) { .c_value => |v| { try v.f.writeCValue(w, v.value, location); @@ -2068,7 +2093,7 @@ pub const DeclGen = struct { /// | > 64 bit integer | > 64 bit integer | zig_make_(zig_hi_(src), zig_lo_(src)) fn renderIntCast( dg: *DeclGen, - w: anytype, + w: *Writer, dest_ty: Type, context: IntCastContext, src_ty: Type, @@ -2118,7 +2143,7 @@ pub const DeclGen = struct { } else if (dest_bits > 64 and src_bits <= 64) { try w.writeAll("zig_make_"); try dg.renderTypeForBuiltinFnName(w, dest_ty); - try w.writeAll("(0, "); // TODO: Should the 0 go through fmtIntLiteral? + try w.writeAll("(0, "); if (src_is_ptr) { try w.writeByte('('); try dg.renderType(w, src_eff_ty); @@ -2152,13 +2177,13 @@ pub const DeclGen = struct { /// fn renderTypeAndName( dg: *DeclGen, - w: anytype, + w: *Writer, ty: Type, name: CValue, qualifiers: CQualifiers, alignment: Alignment, kind: CType.Kind, - ) error{ OutOfMemory, AnalysisFail }!void { + ) !void { try dg.renderCTypeAndName( w, try dg.ctypeFromType(ty, kind), @@ -2173,12 +2198,12 @@ pub const DeclGen = struct { fn renderCTypeAndName( dg: *DeclGen, - w: anytype, + w: *Writer, ctype: CType, name: CValue, qualifiers: CQualifiers, alignas: CType.AlignAs, - ) error{ OutOfMemory, AnalysisFail }!void { + ) !void { const zcu = dg.pt.zcu; switch (alignas.abiOrder()) { .lt => try w.print("zig_under_align({}) ", .{alignas.toByteUnits()}), @@ -2186,24 +2211,24 @@ pub const DeclGen = struct { .gt => try w.print("zig_align({}) ", .{alignas.toByteUnits()}), } - try w.print("{}", .{ + try w.print("{f}", .{ try renderTypePrefix(dg.pass, &dg.ctype_pool, zcu, w, ctype, .suffix, qualifiers), }); try dg.writeName(w, name); try renderTypeSuffix(dg.pass, &dg.ctype_pool, zcu, w, ctype, .suffix, .{}); } - fn writeName(dg: *DeclGen, w: anytype, c_value: CValue) !void { + fn writeName(dg: *DeclGen, w: *Writer, c_value: CValue) !void { switch (c_value) { .new_local, .local => |i| try w.print("t{d}", .{i}), .constant => |uav| try renderUavName(w, uav), .nav => |nav| try dg.renderNavName(w, nav), - .identifier => |ident| try w.print("{ }", .{fmtIdent(ident)}), + .identifier => |ident| try w.print("{f}", .{fmtIdentSolo(ident)}), else => unreachable, } } - fn writeCValue(dg: *DeclGen, w: anytype, c_value: CValue) !void { + fn writeCValue(dg: *DeclGen, w: *Writer, c_value: CValue) Error!void { switch (c_value) { .none, .new_local, .local, .local_ref => unreachable, .constant => |uav| try renderUavName(w, uav), @@ -2215,18 +2240,18 @@ pub const DeclGen = struct { try dg.renderNavName(w, nav); }, .undef => |ty| try dg.renderUndefValue(w, ty, .Other), - .identifier => |ident| try w.print("{ }", .{fmtIdent(ident)}), - .payload_identifier => |ident| try w.print("{ }.{ }", .{ - fmtIdent("payload"), - fmtIdent(ident), + .identifier => |ident| try w.print("{f}", .{fmtIdentSolo(ident)}), + .payload_identifier => |ident| try w.print("{f}.{f}", .{ + fmtIdentSolo("payload"), + fmtIdentSolo(ident), }), - .ctype_pool_string => |string| try w.print("{ }", .{ - fmtCTypePoolString(string, &dg.ctype_pool), + .ctype_pool_string => |string| try w.print("{f}", .{ + fmtCTypePoolString(string, &dg.ctype_pool, true), }), } } - fn writeCValueDeref(dg: *DeclGen, w: anytype, c_value: CValue) !void { + fn writeCValueDeref(dg: *DeclGen, w: *Writer, c_value: CValue) !void { switch (c_value) { .none, .new_local, @@ -2245,26 +2270,31 @@ pub const DeclGen = struct { }, .nav_ref => |nav| try dg.renderNavName(w, nav), .undef => unreachable, - .identifier => |ident| try w.print("(*{ })", .{fmtIdent(ident)}), - .payload_identifier => |ident| try w.print("(*{ }.{ })", .{ - fmtIdent("payload"), - fmtIdent(ident), + .identifier => |ident| try w.print("(*{f})", .{fmtIdentSolo(ident)}), + .payload_identifier => |ident| try w.print("(*{f}.{f})", .{ + fmtIdentSolo("payload"), + fmtIdentSolo(ident), }), } } fn writeCValueMember( dg: *DeclGen, - writer: anytype, + w: *Writer, c_value: CValue, member: CValue, - ) error{ OutOfMemory, AnalysisFail }!void { - try dg.writeCValue(writer, c_value); - try writer.writeByte('.'); - try dg.writeCValue(writer, member); + ) Error!void { + try dg.writeCValue(w, c_value); + try w.writeByte('.'); + try dg.writeCValue(w, member); } - fn writeCValueDerefMember(dg: *DeclGen, writer: anytype, c_value: CValue, member: CValue) !void { + fn writeCValueDerefMember( + dg: *DeclGen, + w: *Writer, + c_value: CValue, + member: CValue, + ) !void { switch (c_value) { .none, .new_local, @@ -2278,15 +2308,15 @@ pub const DeclGen = struct { .ctype_pool_string, => unreachable, .nav, .identifier, .payload_identifier => { - try dg.writeCValue(writer, c_value); - try writer.writeAll("->"); + try dg.writeCValue(w, c_value); + try w.writeAll("->"); }, .nav_ref => { - try dg.writeCValueDeref(writer, c_value); - try writer.writeByte('.'); + try dg.writeCValueDeref(w, c_value); + try w.writeByte('.'); }, } - try dg.writeCValue(writer, member); + try dg.writeCValue(w, member); } fn renderFwdDecl( @@ -2302,7 +2332,7 @@ pub const DeclGen = struct { const zcu = dg.pt.zcu; const ip = &zcu.intern_pool; const nav = ip.getNav(nav_index); - const fwd = dg.fwdDeclWriter(); + const fwd = &dg.fwd_decl.writer; try fwd.writeAll(switch (flags.linkage) { .internal => "static ", .strong, .weak, .link_once => "zig_extern ", @@ -2328,36 +2358,36 @@ pub const DeclGen = struct { try fwd.writeAll(";\n"); } - fn renderNavName(dg: *DeclGen, writer: anytype, nav_index: InternPool.Nav.Index) !void { + fn renderNavName(dg: *DeclGen, w: *Writer, nav_index: InternPool.Nav.Index) !void { const zcu = dg.pt.zcu; const ip = &zcu.intern_pool; const nav = ip.getNav(nav_index); if (nav.getExtern(ip)) |@"extern"| { - try writer.print("{ }", .{ - fmtIdent(ip.getNav(@"extern".owner_nav).name.toSlice(ip)), + try w.print("{f}", .{ + fmtIdentSolo(ip.getNav(@"extern".owner_nav).name.toSlice(ip)), }); } else { // MSVC has a limit of 4095 character token length limit, and fmtIdent can (worst case), // expand to 3x the length of its input, but let's cut it off at a much shorter limit. const fqn_slice = ip.getNav(nav_index).fqn.toSlice(ip); - try writer.print("{}__{d}", .{ - fmtIdent(fqn_slice[0..@min(fqn_slice.len, 100)]), + try w.print("{f}__{d}", .{ + fmtIdentUnsolo(fqn_slice[0..@min(fqn_slice.len, 100)]), @intFromEnum(nav_index), }); } } - fn renderUavName(writer: anytype, uav: Value) !void { - try writer.print("__anon_{d}", .{@intFromEnum(uav.toIntern())}); + fn renderUavName(w: *Writer, uav: Value) !void { + try w.print("__anon_{d}", .{@intFromEnum(uav.toIntern())}); } - fn renderTypeForBuiltinFnName(dg: *DeclGen, writer: anytype, ty: Type) !void { - try dg.renderCTypeForBuiltinFnName(writer, try dg.ctypeFromType(ty, .complete)); + fn renderTypeForBuiltinFnName(dg: *DeclGen, w: *Writer, ty: Type) !void { + try dg.renderCTypeForBuiltinFnName(w, try dg.ctypeFromType(ty, .complete)); } - fn renderCTypeForBuiltinFnName(dg: *DeclGen, writer: anytype, ctype: CType) !void { + fn renderCTypeForBuiltinFnName(dg: *DeclGen, w: *Writer, ctype: CType) !void { switch (ctype.info(&dg.ctype_pool)) { - else => |ctype_info| try writer.print("{c}{d}", .{ + else => |ctype_info| try w.print("{c}{d}", .{ if (ctype.isBool()) signAbbrev(.unsigned) else if (ctype.isInteger()) @@ -2370,11 +2400,11 @@ pub const DeclGen = struct { return dg.fail("TODO: CBE: implement renderTypeForBuiltinFnName for {s} type", .{@tagName(ctype_info)}), if (ctype.isFloat()) ctype.floatActiveBits(dg.mod) else dg.byteSize(ctype) * 8, }), - .array => try writer.writeAll("big"), + .array => try w.writeAll("big"), } } - fn renderBuiltinInfo(dg: *DeclGen, writer: anytype, ty: Type, info: BuiltinInfo) !void { + fn renderBuiltinInfo(dg: *DeclGen, w: *Writer, ty: Type, info: BuiltinInfo) !void { const ctype = try dg.ctypeFromType(ty, .complete); const is_big = ctype.info(&dg.ctype_pool) == .array; switch (info) { @@ -2389,8 +2419,8 @@ pub const DeclGen = struct { .bits = @intCast(ty.bitSize(zcu)), }; - if (is_big) try writer.print(", {}", .{int_info.signedness == .signed}); - try writer.print(", {}", .{try dg.fmtIntLiteral( + if (is_big) try w.print(", {}", .{int_info.signedness == .signed}); + try w.print(", {f}", .{try dg.fmtIntLiteralDec( try pt.intValue(if (is_big) .u16 else .u8, int_info.bits), .FunctionArgument, )}); @@ -2400,18 +2430,38 @@ pub const DeclGen = struct { dg: *DeclGen, val: Value, loc: ValueRenderLocation, - ) !std.fmt.Formatter(formatIntLiteral) { + base: u8, + case: std.fmt.Case, + ) !std.fmt.Formatter(FormatIntLiteralContext, formatIntLiteral) { const zcu = dg.pt.zcu; const kind = loc.toCTypeKind(); const ty = val.typeOf(zcu); - return std.fmt.Formatter(formatIntLiteral){ .data = .{ + return .{ .data = .{ .dg = dg, .int_info = ty.intInfo(zcu), .kind = kind, .ctype = try dg.ctypeFromType(ty, kind), .val = val, + .base = base, + .case = case, } }; } + + fn fmtIntLiteralDec( + dg: *DeclGen, + val: Value, + loc: ValueRenderLocation, + ) !std.fmt.Formatter(FormatIntLiteralContext, formatIntLiteral) { + return fmtIntLiteral(dg, val, loc, 10, .lower); + } + + fn fmtIntLiteralHex( + dg: *DeclGen, + val: Value, + loc: ValueRenderLocation, + ) !std.fmt.Formatter(FormatIntLiteralContext, formatIntLiteral) { + return fmtIntLiteral(dg, val, loc, 16, .lower); + } }; const CTypeFix = enum { prefix, suffix }; @@ -2421,28 +2471,19 @@ const RenderCTypeTrailing = enum { no_space, maybe_space, - pub fn format( - self: @This(), - comptime fmt: []const u8, - _: std.fmt.FormatOptions, - w: anytype, - ) @TypeOf(w).Error!void { - if (fmt.len != 0) - @compileError("invalid format string '" ++ fmt ++ "' for type '" ++ - @typeName(@This()) ++ "'"); - comptime assert(fmt.len == 0); + pub fn format(self: @This(), w: *Writer) Writer.Error!void { switch (self) { .no_space => {}, .maybe_space => try w.writeByte(' '), } } }; -fn renderAlignedTypeName(w: anytype, ctype: CType) !void { +fn renderAlignedTypeName(w: *Writer, ctype: CType) !void { try w.print("anon__aligned_{d}", .{@intFromEnum(ctype.index)}); } fn renderFwdDeclTypeName( zcu: *Zcu, - w: anytype, + w: *Writer, ctype: CType, fwd_decl: CType.Info.FwdDecl, attributes: []const u8, @@ -2451,8 +2492,8 @@ fn renderFwdDeclTypeName( try w.print("{s} {s}", .{ @tagName(fwd_decl.tag), attributes }); switch (fwd_decl.name) { .anon => try w.print("anon__lazy_{d}", .{@intFromEnum(ctype.index)}), - .index => |index| try w.print("{}__{d}", .{ - fmtIdent(Type.fromInterned(index).containerTypeName(ip).toSlice(&zcu.intern_pool)), + .index => |index| try w.print("{f}__{d}", .{ + fmtIdentUnsolo(Type.fromInterned(index).containerTypeName(ip).toSlice(&zcu.intern_pool)), @intFromEnum(index), }), } @@ -2461,17 +2502,17 @@ fn renderTypePrefix( pass: DeclGen.Pass, ctype_pool: *const CType.Pool, zcu: *Zcu, - w: anytype, + w: *Writer, ctype: CType, parent_fix: CTypeFix, qualifiers: CQualifiers, -) @TypeOf(w).Error!RenderCTypeTrailing { +) Writer.Error!RenderCTypeTrailing { var trailing = RenderCTypeTrailing.maybe_space; switch (ctype.info(ctype_pool)) { .basic => |basic_info| try w.writeAll(@tagName(basic_info)), .pointer => |pointer_info| { - try w.print("{}*", .{try renderTypePrefix( + try w.print("{f}*", .{try renderTypePrefix( pass, ctype_pool, zcu, @@ -2508,7 +2549,7 @@ fn renderTypePrefix( ); switch (parent_fix) { .prefix => { - try w.print("{}(", .{child_trailing}); + try w.print("{f}(", .{child_trailing}); return .no_space; }, .suffix => return child_trailing, @@ -2560,7 +2601,7 @@ fn renderTypePrefix( ); switch (parent_fix) { .prefix => { - try w.print("{}(", .{child_trailing}); + try w.print("{f}(", .{child_trailing}); return .no_space; }, .suffix => return child_trailing, @@ -2569,7 +2610,7 @@ fn renderTypePrefix( } var qualifier_it = qualifiers.iterator(); while (qualifier_it.next()) |qualifier| { - try w.print("{}{s}", .{ trailing, @tagName(qualifier) }); + try w.print("{f}{s}", .{ trailing, @tagName(qualifier) }); trailing = .maybe_space; } return trailing; @@ -2578,11 +2619,11 @@ fn renderTypeSuffix( pass: DeclGen.Pass, ctype_pool: *const CType.Pool, zcu: *Zcu, - w: anytype, + w: *Writer, ctype: CType, parent_fix: CTypeFix, qualifiers: CQualifiers, -) @TypeOf(w).Error!void { +) Writer.Error!void { switch (ctype.info(ctype_pool)) { .basic, .aligned, .fwd_decl, .aggregate => {}, .pointer => |pointer_info| try renderTypeSuffix( @@ -2617,7 +2658,7 @@ fn renderTypeSuffix( need_comma = true; const trailing = try renderTypePrefix(pass, ctype_pool, zcu, w, param_type, .suffix, qualifiers); - if (qualifiers.contains(.@"const")) try w.print("{}a{d}", .{ trailing, param_index }); + if (qualifiers.contains(.@"const")) try w.print("{f}a{d}", .{ trailing, param_index }); try renderTypeSuffix(pass, ctype_pool, zcu, w, param_type, .suffix, .{}); } if (function_info.varargs) { @@ -2634,49 +2675,49 @@ fn renderTypeSuffix( } fn renderFields( zcu: *Zcu, - writer: anytype, + w: *Writer, ctype_pool: *const CType.Pool, aggregate_info: CType.Info.Aggregate, indent: usize, ) !void { - try writer.writeAll("{\n"); + try w.writeAll("{\n"); for (0..aggregate_info.fields.len) |field_index| { const field_info = aggregate_info.fields.at(field_index, ctype_pool); - try writer.writeByteNTimes(' ', indent + 1); + try w.splatByteAll(' ', indent + 1); switch (field_info.alignas.abiOrder()) { .lt => { std.debug.assert(aggregate_info.@"packed"); - if (field_info.alignas.@"align" != .@"1") try writer.print("zig_under_align({}) ", .{ + if (field_info.alignas.@"align" != .@"1") try w.print("zig_under_align({}) ", .{ field_info.alignas.toByteUnits(), }); }, .eq => if (aggregate_info.@"packed" and field_info.alignas.@"align" != .@"1") - try writer.print("zig_align({}) ", .{field_info.alignas.toByteUnits()}), + try w.print("zig_align({}) ", .{field_info.alignas.toByteUnits()}), .gt => { std.debug.assert(field_info.alignas.@"align" != .@"1"); - try writer.print("zig_align({}) ", .{field_info.alignas.toByteUnits()}); + try w.print("zig_align({}) ", .{field_info.alignas.toByteUnits()}); }, } const trailing = try renderTypePrefix( .flush, ctype_pool, zcu, - writer, + w, field_info.ctype, .suffix, .{}, ); - try writer.print("{}{ }", .{ trailing, fmtCTypePoolString(field_info.name, ctype_pool) }); - try renderTypeSuffix(.flush, ctype_pool, zcu, writer, field_info.ctype, .suffix, .{}); - try writer.writeAll(";\n"); + try w.print("{f}{f}", .{ trailing, fmtCTypePoolString(field_info.name, ctype_pool, true) }); + try renderTypeSuffix(.flush, ctype_pool, zcu, w, field_info.ctype, .suffix, .{}); + try w.writeAll(";\n"); } - try writer.writeByteNTimes(' ', indent); - try writer.writeByte('}'); + try w.splatByteAll(' ', indent); + try w.writeByte('}'); } pub fn genTypeDecl( zcu: *Zcu, - writer: anytype, + w: *Writer, global_ctype_pool: *const CType.Pool, global_ctype: CType, pass: DeclGen.Pass, @@ -2689,27 +2730,27 @@ pub fn genTypeDecl( .aligned => |aligned_info| { if (!found_existing) { std.debug.assert(aligned_info.alignas.abiOrder().compare(.lt)); - try writer.print("typedef zig_under_align({d}) ", .{aligned_info.alignas.toByteUnits()}); - try writer.print("{}", .{try renderTypePrefix( + try w.print("typedef zig_under_align({d}) ", .{aligned_info.alignas.toByteUnits()}); + try w.print("{f}", .{try renderTypePrefix( .flush, global_ctype_pool, zcu, - writer, + w, aligned_info.ctype, .suffix, .{}, )}); - try renderAlignedTypeName(writer, global_ctype); - try renderTypeSuffix(.flush, global_ctype_pool, zcu, writer, aligned_info.ctype, .suffix, .{}); - try writer.writeAll(";\n"); + try renderAlignedTypeName(w, global_ctype); + try renderTypeSuffix(.flush, global_ctype_pool, zcu, w, aligned_info.ctype, .suffix, .{}); + try w.writeAll(";\n"); } switch (pass) { .nav, .uav => { - try writer.writeAll("typedef "); - _ = try renderTypePrefix(.flush, global_ctype_pool, zcu, writer, global_ctype, .suffix, .{}); - try writer.writeByte(' '); - _ = try renderTypePrefix(pass, decl_ctype_pool, zcu, writer, decl_ctype, .suffix, .{}); - try writer.writeAll(";\n"); + try w.writeAll("typedef "); + _ = try renderTypePrefix(.flush, global_ctype_pool, zcu, w, global_ctype, .suffix, .{}); + try w.writeByte(' '); + _ = try renderTypePrefix(pass, decl_ctype_pool, zcu, w, decl_ctype, .suffix, .{}); + try w.writeAll(";\n"); }, .flush => {}, } @@ -2717,24 +2758,24 @@ pub fn genTypeDecl( .fwd_decl => |fwd_decl_info| switch (fwd_decl_info.name) { .anon => switch (pass) { .nav, .uav => { - try writer.writeAll("typedef "); - _ = try renderTypePrefix(.flush, global_ctype_pool, zcu, writer, global_ctype, .suffix, .{}); - try writer.writeByte(' '); - _ = try renderTypePrefix(pass, decl_ctype_pool, zcu, writer, decl_ctype, .suffix, .{}); - try writer.writeAll(";\n"); + try w.writeAll("typedef "); + _ = try renderTypePrefix(.flush, global_ctype_pool, zcu, w, global_ctype, .suffix, .{}); + try w.writeByte(' '); + _ = try renderTypePrefix(pass, decl_ctype_pool, zcu, w, decl_ctype, .suffix, .{}); + try w.writeAll(";\n"); }, .flush => {}, }, .index => |index| if (!found_existing) { const ip = &zcu.intern_pool; const ty: Type = .fromInterned(index); - _ = try renderTypePrefix(.flush, global_ctype_pool, zcu, writer, global_ctype, .suffix, .{}); - try writer.writeByte(';'); + _ = try renderTypePrefix(.flush, global_ctype_pool, zcu, w, global_ctype, .suffix, .{}); + try w.writeByte(';'); const file_scope = ty.typeDeclInstAllowGeneratedTag(zcu).?.resolveFile(ip); - if (!zcu.fileByIndex(file_scope).mod.?.strip) try writer.print(" /* {} */", .{ + if (!zcu.fileByIndex(file_scope).mod.?.strip) try w.print(" /* {f} */", .{ ty.containerTypeName(ip).fmt(ip), }); - try writer.writeByte('\n'); + try w.writeByte('\n'); }, }, .aggregate => |aggregate_info| switch (aggregate_info.name) { @@ -2742,38 +2783,39 @@ pub fn genTypeDecl( .fwd_decl => |fwd_decl| if (!found_existing) { try renderFwdDeclTypeName( zcu, - writer, + w, fwd_decl, fwd_decl.info(global_ctype_pool).fwd_decl, if (aggregate_info.@"packed") "zig_packed(" else "", ); - try writer.writeByte(' '); - try renderFields(zcu, writer, global_ctype_pool, aggregate_info, 0); - if (aggregate_info.@"packed") try writer.writeByte(')'); - try writer.writeAll(";\n"); + try w.writeByte(' '); + try renderFields(zcu, w, global_ctype_pool, aggregate_info, 0); + if (aggregate_info.@"packed") try w.writeByte(')'); + try w.writeAll(";\n"); }, }, } } -pub fn genGlobalAsm(zcu: *Zcu, writer: anytype) !void { +pub fn genGlobalAsm(zcu: *Zcu, w: *Writer) !void { for (zcu.global_assembly.values()) |asm_source| { - try writer.print("__asm({s});\n", .{fmtStringLiteral(asm_source, null)}); + try w.print("__asm({f});\n", .{fmtStringLiteral(asm_source, null)}); } } -pub fn genErrDecls(o: *Object) !void { +pub fn genErrDecls(o: *Object) Error!void { const pt = o.dg.pt; const zcu = pt.zcu; const ip = &zcu.intern_pool; - const writer = o.writer(); + const w = &o.code.writer; var max_name_len: usize = 0; // do not generate an invalid empty enum when the global error set is empty const names = ip.global_error_set.getNamesFromMainThread(); if (names.len > 0) { - try writer.writeAll("enum {\n"); - o.indent_writer.pushIndent(); + try w.writeAll("enum {"); + o.indent(); + try o.newline(); for (names, 1..) |name_nts, value| { const name = name_nts.toSlice(ip); max_name_len = @max(name.len, max_name_len); @@ -2781,11 +2823,13 @@ pub fn genErrDecls(o: *Object) !void { .ty = .anyerror_type, .name = name_nts, } }); - try o.dg.renderValue(writer, Value.fromInterned(err_val), .Other); - try writer.print(" = {d}u,\n", .{value}); + try o.dg.renderValue(w, Value.fromInterned(err_val), .Other); + try w.print(" = {d}u,", .{value}); + try o.newline(); } - o.indent_writer.popIndent(); - try writer.writeAll("};\n"); + try o.outdent(); + try w.writeAll("};"); + try o.newline(); } const array_identifier = "zig_errorName"; const name_prefix = array_identifier ++ "_"; @@ -2808,18 +2852,19 @@ pub fn genErrDecls(o: *Object) !void { .storage = .{ .bytes = name.toString() }, } }); - try writer.writeAll("static "); + try w.writeAll("static "); try o.dg.renderTypeAndName( - writer, + w, name_ty, .{ .identifier = identifier }, Const, .none, .complete, ); - try writer.writeAll(" = "); - try o.dg.renderValue(writer, Value.fromInterned(name_val), .StaticInitializer); - try writer.writeAll(";\n"); + try w.writeAll(" = "); + try o.dg.renderValue(w, Value.fromInterned(name_val), .StaticInitializer); + try w.writeByte(';'); + try o.newline(); } const name_array_ty = try pt.arrayType(.{ @@ -2827,33 +2872,34 @@ pub fn genErrDecls(o: *Object) !void { .child = .slice_const_u8_sentinel_0_type, }); - try writer.writeAll("static "); + try w.writeAll("static "); try o.dg.renderTypeAndName( - writer, + w, name_array_ty, .{ .identifier = array_identifier }, Const, .none, .complete, ); - try writer.writeAll(" = {"); + try w.writeAll(" = {"); for (names, 1..) |name_nts, val| { const name = name_nts.toSlice(ip); - if (val > 1) try writer.writeAll(", "); - try writer.print("{{" ++ name_prefix ++ "{}, {}}}", .{ - fmtIdent(name), - try o.dg.fmtIntLiteral(try pt.intValue(.usize, name.len), .StaticInitializer), + if (val > 1) try w.writeAll(", "); + try w.print("{{" ++ name_prefix ++ "{f}, {f}}}", .{ + fmtIdentUnsolo(name), + try o.dg.fmtIntLiteralDec(try pt.intValue(.usize, name.len), .StaticInitializer), }); } - try writer.writeAll("};\n"); + try w.writeAll("};"); + try o.newline(); } -pub fn genLazyFn(o: *Object, lazy_ctype_pool: *const CType.Pool, lazy_fn: LazyFnMap.Entry) !void { +pub fn genLazyFn(o: *Object, lazy_ctype_pool: *const CType.Pool, lazy_fn: LazyFnMap.Entry) Error!void { const pt = o.dg.pt; const zcu = pt.zcu; const ip = &zcu.intern_pool; const ctype_pool = &o.dg.ctype_pool; - const w = o.writer(); + const w = &o.code.writer; const key = lazy_fn.key_ptr.*; const val = lazy_fn.value_ptr; switch (key) { @@ -2863,9 +2909,14 @@ pub fn genLazyFn(o: *Object, lazy_ctype_pool: *const CType.Pool, lazy_fn: LazyFn try w.writeAll("static "); try o.dg.renderType(w, name_slice_ty); - try w.print(" {}(", .{val.fn_name.fmt(lazy_ctype_pool)}); + try w.print(" {f}(", .{val.fn_name.fmt(lazy_ctype_pool)}); try o.dg.renderTypeAndName(w, enum_ty, .{ .identifier = "tag" }, Const, .none, .complete); - try w.writeAll(") {\n switch (tag) {\n"); + try w.writeAll(") {"); + o.indent(); + try o.newline(); + try w.writeAll("switch (tag) {"); + o.indent(); + try o.newline(); const tag_names = enum_ty.enumFields(zcu); for (0..tag_names.len) |tag_index| { const tag_name = tag_names.get(ip)[tag_index]; @@ -2882,34 +2933,43 @@ pub fn genLazyFn(o: *Object, lazy_ctype_pool: *const CType.Pool, lazy_fn: LazyFn .storage = .{ .bytes = tag_name.toString() }, } }); - try w.print(" case {}: {{\n static ", .{ - try o.dg.fmtIntLiteral(try tag_val.intFromEnum(enum_ty, pt), .Other), + try w.print("case {f}: {{", .{ + try o.dg.fmtIntLiteralDec(try tag_val.intFromEnum(enum_ty, pt), .Other), }); + o.indent(); + try o.newline(); + try w.writeAll("static "); try o.dg.renderTypeAndName(w, name_ty, .{ .identifier = "name" }, Const, .none, .complete); try w.writeAll(" = "); try o.dg.renderValue(w, Value.fromInterned(name_val), .StaticInitializer); - try w.writeAll(";\n return ("); + try w.writeByte(';'); + try o.newline(); + try w.writeAll("return ("); try o.dg.renderType(w, name_slice_ty); - try w.print("){{{}, {}}};\n", .{ - fmtIdent("name"), - try o.dg.fmtIntLiteral(try pt.intValue(.usize, tag_name_len), .Other), + try w.print("){{{f}, {f}}};", .{ + fmtIdentUnsolo("name"), + try o.dg.fmtIntLiteralDec(try pt.intValue(.usize, tag_name_len), .Other), }); - - try w.writeAll(" }\n"); + try o.newline(); + try o.outdent(); + try w.writeByte('}'); + try o.newline(); } - try w.writeAll(" }\n while ("); - try o.dg.renderValue(w, Value.true, .Other); - try w.writeAll(") "); - _ = try airBreakpoint(w); - try w.writeAll("}\n"); + try o.outdent(); + try w.writeByte('}'); + try o.newline(); + try airUnreach(o); + try o.outdent(); + try w.writeByte('}'); + try o.newline(); }, .never_tail, .never_inline => |fn_nav_index| { const fn_val = zcu.navValue(fn_nav_index); const fn_ctype = try o.dg.ctypeFromType(fn_val.typeOf(zcu), .complete); const fn_info = fn_ctype.info(ctype_pool).function; - const fn_name = fmtCTypePoolString(val.fn_name, lazy_ctype_pool); + const fn_name = fmtCTypePoolString(val.fn_name, lazy_ctype_pool, true); - const fwd = o.dg.fwdDeclWriter(); + const fwd = &o.dg.fwd_decl.writer; try fwd.print("static zig_{s} ", .{@tagName(key)}); try o.dg.renderFunctionSignature(fwd, fn_val, ip.getNav(fn_nav_index).getAlignment(), .forward, .{ .fmt_ctype_pool_string = fn_name, @@ -2920,14 +2980,21 @@ pub fn genLazyFn(o: *Object, lazy_ctype_pool: *const CType.Pool, lazy_fn: LazyFn try o.dg.renderFunctionSignature(w, fn_val, .none, .complete, .{ .fmt_ctype_pool_string = fn_name, }); - try w.writeAll(" {\n return "); + try w.writeAll(" {"); + o.indent(); + try o.newline(); + try w.writeAll("return "); try o.dg.renderNavName(w, fn_nav_index); try w.writeByte('('); for (0..fn_info.param_ctypes.len) |arg| { if (arg > 0) try w.writeAll(", "); try w.print("a{d}", .{arg}); } - try w.writeAll(");\n}\n"); + try w.writeAll(");"); + try o.newline(); + try o.outdent(); + try w.writeByte('}'); + try o.newline(); }, } } @@ -2967,12 +3034,14 @@ pub fn generate( .scratch = .empty, .uavs = .empty, }, + .code_header = .init(gpa), .code = .init(gpa), - .indent_writer = undefined, // set later so we can get a pointer to object.code + .indent_counter = 0, }, .lazy_fns = .empty, }; defer { + function.object.code_header.deinit(); function.object.code.deinit(); function.object.dg.fwd_decl.deinit(); function.object.dg.ctype_pool.deinit(gpa); @@ -2981,22 +3050,24 @@ pub fn generate( function.deinit(); } try function.object.dg.ctype_pool.init(gpa); - function.object.indent_writer = .{ .underlying_writer = function.object.code.writer() }; genFunc(&function) catch |err| switch (err) { error.AnalysisFail => return zcu.codegenFailMsg(func.owner_nav, function.object.dg.error_msg.?), - error.OutOfMemory => |e| return e, + error.OutOfMemory => return error.OutOfMemory, + error.WriteFailed => return error.OutOfMemory, }; var mir: Mir = .{ .uavs = .empty, .code = &.{}, + .code_header = &.{}, .fwd_decl = &.{}, .ctype_pool = .empty, .lazy_fns = .empty, }; errdefer mir.deinit(gpa); mir.uavs = function.object.dg.uavs.move(); + mir.code_header = try function.object.code_header.toOwnedSlice(); mir.code = try function.object.code.toOwnedSlice(); mir.fwd_decl = try function.object.dg.fwd_decl.toOwnedSlice(); mir.ctype_pool = function.object.dg.ctype_pool.move(); @@ -3004,7 +3075,7 @@ pub fn generate( return mir; } -fn genFunc(f: *Function) !void { +pub fn genFunc(f: *Function) Error!void { const tracy = trace(@src()); defer tracy.end(); @@ -3016,10 +3087,7 @@ fn genFunc(f: *Function) !void { const nav_val = zcu.navValue(nav_index); const nav = ip.getNav(nav_index); - o.code_header = std.ArrayList(u8).init(gpa); - defer o.code_header.deinit(); - - const fwd = o.dg.fwdDeclWriter(); + const fwd = &o.dg.fwd_decl.writer; try fwd.writeAll("static "); try o.dg.renderFunctionSignature( fwd, @@ -3030,29 +3098,26 @@ fn genFunc(f: *Function) !void { ); try fwd.writeAll(";\n"); + const ch = &o.code_header.writer; if (nav.status.fully_resolved.@"linksection".toSlice(ip)) |s| - try o.writer().print("zig_linksection_fn({s}) ", .{fmtStringLiteral(s, null)}); + try ch.print("zig_linksection_fn({f}) ", .{fmtStringLiteral(s, null)}); try o.dg.renderFunctionSignature( - o.writer(), + ch, nav_val, .none, .complete, .{ .nav = nav_index }, ); - try o.writer().writeByte(' '); - - // In case we need to use the header, populate it with a copy of the function - // signature here. We anticipate a brace, newline, and space. - try o.code_header.ensureUnusedCapacity(o.code.items.len + 3); - o.code_header.appendSliceAssumeCapacity(o.code.items); - o.code_header.appendSliceAssumeCapacity("{\n "); - const empty_header_len = o.code_header.items.len; + try ch.writeAll(" {\n "); f.free_locals_map.clearRetainingCapacity(); const main_body = f.air.getMainBody(); - try genBodyResolveState(f, undefined, &.{}, main_body, false); - try o.indent_writer.insertNewline(); + o.indent(); + try genBodyResolveState(f, undefined, &.{}, main_body, true); + try o.outdent(); + try o.code.writer.writeByte('}'); + try o.newline(); if (o.dg.expected_block) |_| return f.fail("runtime code not allowed in naked function", .{}); @@ -3083,24 +3148,16 @@ fn genFunc(f: *Function) !void { }; free_locals.sort(SortContext{ .keys = free_locals.keys() }); - const w = o.codeHeaderWriter(); for (free_locals.values()) |list| { for (list.keys()) |local_index| { const local = f.locals.items[local_index]; - try o.dg.renderCTypeAndName(w, local.ctype, .{ .local = local_index }, .{}, local.flags.alignas); - try w.writeAll(";\n "); + try o.dg.renderCTypeAndName(ch, local.ctype, .{ .local = local_index }, .{}, local.flags.alignas); + try ch.writeAll(";\n "); } } - - // If we have a header to insert, append the body to the header - // and then return the result, freeing the body. - if (o.code_header.items.len > empty_header_len) { - try o.code_header.appendSlice(o.code.items[empty_header_len..]); - mem.swap(std.ArrayList(u8), &o.code, &o.code_header); - } } -pub fn genDecl(o: *Object) !void { +pub fn genDecl(o: *Object) Error!void { const tracy = trace(@src()); defer tracy.end(); @@ -3120,7 +3177,7 @@ pub fn genDecl(o: *Object) !void { .visibility = @"extern".visibility, }); - const fwd = o.dg.fwdDeclWriter(); + const fwd = &o.dg.fwd_decl.writer; try fwd.writeAll("zig_extern "); try o.dg.renderFunctionSignature( fwd, @@ -3141,10 +3198,10 @@ pub fn genDecl(o: *Object) !void { .linkage = .internal, .visibility = .default, }); - const w = o.writer(); + const w = &o.code.writer; if (variable.is_threadlocal and !o.dg.mod.single_threaded) try w.writeAll("zig_threadlocal "); if (nav.status.fully_resolved.@"linksection".toSlice(&zcu.intern_pool)) |s| - try w.print("zig_linksection({s}) ", .{fmtStringLiteral(s, null)}); + try w.print("zig_linksection({f}) ", .{fmtStringLiteral(s, null)}); try o.dg.renderTypeAndName( w, nav_ty, @@ -3156,7 +3213,7 @@ pub fn genDecl(o: *Object) !void { try w.writeAll(" = "); try o.dg.renderValue(w, Value.fromInterned(variable.init), .StaticInitializer); try w.writeByte(';'); - try o.indent_writer.insertNewline(); + try o.newline(); }, else => try genDeclValue( o, @@ -3174,28 +3231,29 @@ pub fn genDeclValue( decl_c_value: CValue, alignment: Alignment, @"linksection": InternPool.OptionalNullTerminatedString, -) !void { +) Error!void { const zcu = o.dg.pt.zcu; const ty = val.typeOf(zcu); - const fwd = o.dg.fwdDeclWriter(); + const fwd = &o.dg.fwd_decl.writer; try fwd.writeAll("static "); try o.dg.renderTypeAndName(fwd, ty, decl_c_value, Const, alignment, .complete); try fwd.writeAll(";\n"); - const w = o.writer(); + const w = &o.code.writer; if (@"linksection".toSlice(&zcu.intern_pool)) |s| - try w.print("zig_linksection({s}) ", .{fmtStringLiteral(s, null)}); + try w.print("zig_linksection({f}) ", .{fmtStringLiteral(s, null)}); try o.dg.renderTypeAndName(w, ty, decl_c_value, Const, alignment, .complete); try w.writeAll(" = "); try o.dg.renderValue(w, val, .StaticInitializer); - try w.writeAll(";\n"); + try w.writeByte(';'); + try o.newline(); } pub fn genExports(dg: *DeclGen, exported: Zcu.Exported, export_indices: []const Zcu.Export.Index) !void { const zcu = dg.pt.zcu; const ip = &zcu.intern_pool; - const fwd = dg.fwdDeclWriter(); + const fwd = &dg.fwd_decl.writer; const main_name = export_indices[0].ptr(zcu).opts.name; try fwd.writeAll("#define "); @@ -3204,7 +3262,7 @@ pub fn genExports(dg: *DeclGen, exported: Zcu.Exported, export_indices: []const .uav => |uav| try DeclGen.renderUavName(fwd, Value.fromInterned(uav)), } try fwd.writeByte(' '); - try fwd.print("{ }", .{fmtIdent(main_name.toSlice(ip))}); + try fwd.print("{f}", .{fmtIdentSolo(main_name.toSlice(ip))}); try fwd.writeByte('\n'); const exported_val = exported.getValue(zcu); @@ -3234,7 +3292,7 @@ pub fn genExports(dg: *DeclGen, exported: Zcu.Exported, export_indices: []const const @"export" = export_index.ptr(zcu); try fwd.writeAll("zig_extern "); if (@"export".opts.linkage == .weak) try fwd.writeAll("zig_weak_linkage "); - if (@"export".opts.section.toSlice(ip)) |s| try fwd.print("zig_linksection({s}) ", .{ + if (@"export".opts.section.toSlice(ip)) |s| try fwd.print("zig_linksection({f}) ", .{ fmtStringLiteral(s, null), }); const extern_name = @"export".opts.name.toSlice(ip); @@ -3249,17 +3307,17 @@ pub fn genExports(dg: *DeclGen, exported: Zcu.Exported, export_indices: []const .complete, ); if (is_mangled and is_export) { - try fwd.print(" zig_mangled_export({ }, {s}, {s})", .{ - fmtIdent(extern_name), + try fwd.print(" zig_mangled_export({f}, {f}, {f})", .{ + fmtIdentSolo(extern_name), fmtStringLiteral(extern_name, null), fmtStringLiteral(main_name.toSlice(ip), null), }); } else if (is_mangled) { - try fwd.print(" zig_mangled({ }, {s})", .{ - fmtIdent(extern_name), fmtStringLiteral(extern_name, null), + try fwd.print(" zig_mangled({f}, {f})", .{ + fmtIdentSolo(extern_name), fmtStringLiteral(extern_name, null), }); } else if (is_export) { - try fwd.print(" zig_export({s}, {s})", .{ + try fwd.print(" zig_export({f}, {f})", .{ fmtStringLiteral(main_name.toSlice(ip), null), fmtStringLiteral(extern_name, null), }); @@ -3272,16 +3330,17 @@ pub fn genExports(dg: *DeclGen, exported: Zcu.Exported, export_indices: []const /// `value_map` and `free_locals_map` are undefined after the generation, and new locals may not /// have been added to `free_locals_map`. For a version of this function that restores this state, /// see `genBodyResolveState`. -fn genBody(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail, OutOfMemory }!void { - const writer = f.object.writer(); +fn genBody(f: *Function, body: []const Air.Inst.Index) Error!void { + const w = &f.object.code.writer; if (body.len == 0) { - try writer.writeAll("{}"); + try w.writeAll("{}"); } else { - try writer.writeAll("{\n"); - f.object.indent_writer.pushIndent(); + try w.writeByte('{'); + f.object.indent(); + try f.object.newline(); try genBodyInner(f, body); - f.object.indent_writer.popIndent(); - try writer.writeByte('}'); + try f.object.outdent(); + try w.writeByte('}'); } } @@ -3291,10 +3350,10 @@ fn genBody(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail, OutO /// `leading_deaths` have their deaths processed before the body is generated. /// A scope is introduced (using braces) only if `inner` is `false`. /// If `leading_deaths` is empty, `inst` may be `undefined`. -fn genBodyResolveState(f: *Function, inst: Air.Inst.Index, leading_deaths: []const Air.Inst.Index, body: []const Air.Inst.Index, inner: bool) error{ AnalysisFail, OutOfMemory }!void { +fn genBodyResolveState(f: *Function, inst: Air.Inst.Index, leading_deaths: []const Air.Inst.Index, body: []const Air.Inst.Index, inner: bool) Error!void { if (body.len == 0) { // Don't go to the expense of cloning everything! - if (!inner) try f.object.writer().writeAll("{}"); + if (!inner) try f.object.code.writer.writeAll("{}"); return; } @@ -3340,7 +3399,7 @@ fn genBodyResolveState(f: *Function, inst: Air.Inst.Index, leading_deaths: []con } } -fn genBodyInner(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail, OutOfMemory }!void { +fn genBodyInner(f: *Function, body: []const Air.Inst.Index) Error!void { const zcu = f.object.dg.pt.zcu; const ip = &zcu.intern_pool; const air_tags = f.air.instructions.items(.tag); @@ -3358,7 +3417,7 @@ fn genBodyInner(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail, .arg => try airArg(f, inst), - .breakpoint => try airBreakpoint(f.object.writer()), + .breakpoint => try airBreakpoint(f), .ret_addr => try airRetAddr(f, inst), .frame_addr => try airFrameAddress(f, inst), @@ -3611,8 +3670,8 @@ fn genBodyInner(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail, .ret => return airRet(f, inst, false), .ret_safe => return airRet(f, inst, false), // TODO .ret_load => return airRet(f, inst, true), - .trap => return airTrap(f, f.object.writer()), - .unreach => return airUnreach(f), + .trap => return airTrap(f, &f.object.code.writer), + .unreach => return airUnreach(&f.object), // Instructions which may be `noreturn`. .block => res: { @@ -3655,16 +3714,16 @@ fn airSliceField(f: *Function, inst: Air.Inst.Index, is_ptr: bool, field_name: [ const operand = try f.resolveInst(ty_op.operand); try reap(f, inst, &.{ty_op.operand}); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete)); - try f.writeCValue(writer, local, .Other); - try a.assign(f, writer); + const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete)); + try f.writeCValue(w, local, .Other); + try a.assign(f, w); if (is_ptr) { - try writer.writeByte('&'); - try f.writeCValueDerefMember(writer, operand, .{ .identifier = field_name }); - } else try f.writeCValueMember(writer, operand, .{ .identifier = field_name }); - try a.end(f, writer); + try w.writeByte('&'); + try f.writeCValueDerefMember(w, operand, .{ .identifier = field_name }); + } else try f.writeCValueMember(w, operand, .{ .identifier = field_name }); + try a.end(f, w); return local; } @@ -3681,16 +3740,16 @@ fn airPtrElemVal(f: *Function, inst: Air.Inst.Index) !CValue { const index = try f.resolveInst(bin_op.rhs); try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete)); - try f.writeCValue(writer, local, .Other); - try a.assign(f, writer); - try f.writeCValue(writer, ptr, .Other); - try writer.writeByte('['); - try f.writeCValue(writer, index, .Other); - try writer.writeByte(']'); - try a.end(f, writer); + const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete)); + try f.writeCValue(w, local, .Other); + try a.assign(f, w); + try f.writeCValue(w, ptr, .Other); + try w.writeByte('['); + try f.writeCValue(w, index, .Other); + try w.writeByte(']'); + try a.end(f, w); return local; } @@ -3708,25 +3767,25 @@ fn airPtrElemPtr(f: *Function, inst: Air.Inst.Index) !CValue { const index = try f.resolveInst(bin_op.rhs); try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete)); - try f.writeCValue(writer, local, .Other); - try a.assign(f, writer); - try writer.writeByte('('); - try f.renderType(writer, inst_ty); - try writer.writeByte(')'); - if (elem_has_bits) try writer.writeByte('&'); + const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete)); + try f.writeCValue(w, local, .Other); + try a.assign(f, w); + try w.writeByte('('); + try f.renderType(w, inst_ty); + try w.writeByte(')'); + if (elem_has_bits) try w.writeByte('&'); if (elem_has_bits and ptr_ty.ptrSize(zcu) == .one) { // It's a pointer to an array, so we need to de-reference. - try f.writeCValueDeref(writer, ptr); - } else try f.writeCValue(writer, ptr, .Other); + try f.writeCValueDeref(w, ptr); + } else try f.writeCValue(w, ptr, .Other); if (elem_has_bits) { - try writer.writeByte('['); - try f.writeCValue(writer, index, .Other); - try writer.writeByte(']'); + try w.writeByte('['); + try f.writeCValue(w, index, .Other); + try w.writeByte(']'); } - try a.end(f, writer); + try a.end(f, w); return local; } @@ -3743,16 +3802,16 @@ fn airSliceElemVal(f: *Function, inst: Air.Inst.Index) !CValue { const index = try f.resolveInst(bin_op.rhs); try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete)); - try f.writeCValue(writer, local, .Other); - try a.assign(f, writer); - try f.writeCValueMember(writer, slice, .{ .identifier = "ptr" }); - try writer.writeByte('['); - try f.writeCValue(writer, index, .Other); - try writer.writeByte(']'); - try a.end(f, writer); + const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete)); + try f.writeCValue(w, local, .Other); + try a.assign(f, w); + try f.writeCValueMember(w, slice, .{ .identifier = "ptr" }); + try w.writeByte('['); + try f.writeCValue(w, index, .Other); + try w.writeByte(']'); + try a.end(f, w); return local; } @@ -3771,19 +3830,19 @@ fn airSliceElemPtr(f: *Function, inst: Air.Inst.Index) !CValue { const index = try f.resolveInst(bin_op.rhs); try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete)); - try f.writeCValue(writer, local, .Other); - try a.assign(f, writer); - if (elem_has_bits) try writer.writeByte('&'); - try f.writeCValueMember(writer, slice, .{ .identifier = "ptr" }); + const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete)); + try f.writeCValue(w, local, .Other); + try a.assign(f, w); + if (elem_has_bits) try w.writeByte('&'); + try f.writeCValueMember(w, slice, .{ .identifier = "ptr" }); if (elem_has_bits) { - try writer.writeByte('['); - try f.writeCValue(writer, index, .Other); - try writer.writeByte(']'); + try w.writeByte('['); + try f.writeCValue(w, index, .Other); + try w.writeByte(']'); } - try a.end(f, writer); + try a.end(f, w); return local; } @@ -3800,16 +3859,16 @@ fn airArrayElemVal(f: *Function, inst: Air.Inst.Index) !CValue { const index = try f.resolveInst(bin_op.rhs); try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete)); - try f.writeCValue(writer, local, .Other); - try a.assign(f, writer); - try f.writeCValue(writer, array, .Other); - try writer.writeByte('['); - try f.writeCValue(writer, index, .Other); - try writer.writeByte(']'); - try a.end(f, writer); + const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete)); + try f.writeCValue(w, local, .Other); + try a.assign(f, w); + try f.writeCValue(w, array, .Other); + try w.writeByte('['); + try f.writeCValue(w, index, .Other); + try w.writeByte(']'); + try a.end(f, w); return local; } @@ -3863,12 +3922,13 @@ fn airArg(f: *Function, inst: Air.Inst.Index) !CValue { .{ .arg_array = i }; if (f.liveness.isUnused(inst)) { - const writer = f.object.writer(); - try writer.writeByte('('); - try f.renderType(writer, .void); - try writer.writeByte(')'); - try f.writeCValue(writer, result, .Other); - try writer.writeAll(";\n"); + const w = &f.object.code.writer; + try w.writeByte('('); + try f.renderType(w, .void); + try w.writeByte(')'); + try f.writeCValue(w, result, .Other); + try w.writeByte(';'); + try f.object.newline(); return .none; } @@ -3901,21 +3961,21 @@ fn airLoad(f: *Function, inst: Air.Inst.Index) !CValue { const is_array = lowersToArray(src_ty, pt); const need_memcpy = !is_aligned or is_array; - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, src_ty); - const v = try Vectorize.start(f, inst, writer, ptr_ty); + const v = try Vectorize.start(f, inst, w, ptr_ty); if (need_memcpy) { - try writer.writeAll("memcpy("); - if (!is_array) try writer.writeByte('&'); - try f.writeCValue(writer, local, .Other); - try v.elem(f, writer); - try writer.writeAll(", (const char *)"); - try f.writeCValue(writer, operand, .Other); - try v.elem(f, writer); - try writer.writeAll(", sizeof("); - try f.renderType(writer, src_ty); - try writer.writeAll("))"); + try w.writeAll("memcpy("); + if (!is_array) try w.writeByte('&'); + try f.writeCValue(w, local, .Other); + try v.elem(f, w); + try w.writeAll(", (const char *)"); + try f.writeCValue(w, operand, .Other); + try v.elem(f, w); + try w.writeAll(", sizeof("); + try f.renderType(w, src_ty); + try w.writeAll("))"); } else if (ptr_info.packed_offset.host_size > 0 and ptr_info.flags.vector_index == .none) { const host_bits: u16 = ptr_info.packed_offset.host_size * 8; const host_ty = try pt.intType(.unsigned, host_bits); @@ -3925,40 +3985,41 @@ fn airLoad(f: *Function, inst: Air.Inst.Index) !CValue { const field_ty = try pt.intType(.unsigned, @as(u16, @intCast(src_ty.bitSize(zcu)))); - try f.writeCValue(writer, local, .Other); - try v.elem(f, writer); - try writer.writeAll(" = ("); - try f.renderType(writer, src_ty); - try writer.writeAll(")zig_wrap_"); - try f.object.dg.renderTypeForBuiltinFnName(writer, field_ty); - try writer.writeAll("(("); - try f.renderType(writer, field_ty); - try writer.writeByte(')'); + try f.writeCValue(w, local, .Other); + try v.elem(f, w); + try w.writeAll(" = ("); + try f.renderType(w, src_ty); + try w.writeAll(")zig_wrap_"); + try f.object.dg.renderTypeForBuiltinFnName(w, field_ty); + try w.writeAll("(("); + try f.renderType(w, field_ty); + try w.writeByte(')'); const cant_cast = host_ty.isInt(zcu) and host_ty.bitSize(zcu) > 64; if (cant_cast) { if (field_ty.bitSize(zcu) > 64) return f.fail("TODO: C backend: implement casting between types > 64 bits", .{}); - try writer.writeAll("zig_lo_"); - try f.object.dg.renderTypeForBuiltinFnName(writer, host_ty); - try writer.writeByte('('); + try w.writeAll("zig_lo_"); + try f.object.dg.renderTypeForBuiltinFnName(w, host_ty); + try w.writeByte('('); } - try writer.writeAll("zig_shr_"); - try f.object.dg.renderTypeForBuiltinFnName(writer, host_ty); - try writer.writeByte('('); - try f.writeCValueDeref(writer, operand); - try v.elem(f, writer); - try writer.print(", {})", .{try f.fmtIntLiteral(bit_offset_val)}); - if (cant_cast) try writer.writeByte(')'); - try f.object.dg.renderBuiltinInfo(writer, field_ty, .bits); - try writer.writeByte(')'); + try w.writeAll("zig_shr_"); + try f.object.dg.renderTypeForBuiltinFnName(w, host_ty); + try w.writeByte('('); + try f.writeCValueDeref(w, operand); + try v.elem(f, w); + try w.print(", {f})", .{try f.fmtIntLiteralDec(bit_offset_val)}); + if (cant_cast) try w.writeByte(')'); + try f.object.dg.renderBuiltinInfo(w, field_ty, .bits); + try w.writeByte(')'); } else { - try f.writeCValue(writer, local, .Other); - try v.elem(f, writer); - try writer.writeAll(" = "); - try f.writeCValueDeref(writer, operand); - try v.elem(f, writer); + try f.writeCValue(w, local, .Other); + try v.elem(f, w); + try w.writeAll(" = "); + try f.writeCValueDeref(w, operand); + try v.elem(f, w); } - try writer.writeAll(";\n"); - try v.end(f, inst, writer); + try w.writeByte(';'); + try f.object.newline(); + try v.end(f, inst, w); return local; } @@ -3967,7 +4028,7 @@ fn airRet(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !void { const pt = f.object.dg.pt; const zcu = pt.zcu; const un_op = f.air.instructions.items(.data)[@intFromEnum(inst)].un_op; - const writer = f.object.writer(); + const w = &f.object.code.writer; const op_inst = un_op.toIndex(); const op_ty = f.typeOf(un_op); const ret_ty = if (is_ptr) op_ty.childType(zcu) else op_ty; @@ -3986,33 +4047,34 @@ fn airRet(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !void { .ctype = ret_ctype, .alignas = CType.AlignAs.fromAbiAlignment(ret_ty.abiAlignment(zcu)), }); - try writer.writeAll("memcpy("); - try f.writeCValueMember(writer, array_local, .{ .identifier = "array" }); - try writer.writeAll(", "); + try w.writeAll("memcpy("); + try f.writeCValueMember(w, array_local, .{ .identifier = "array" }); + try w.writeAll(", "); if (deref) - try f.writeCValueDeref(writer, operand) + try f.writeCValueDeref(w, operand) else - try f.writeCValue(writer, operand, .FunctionArgument); + try f.writeCValue(w, operand, .FunctionArgument); deref = false; - try writer.writeAll(", sizeof("); - try f.renderType(writer, ret_ty); - try writer.writeAll("));\n"); + try w.writeAll(", sizeof("); + try f.renderType(w, ret_ty); + try w.writeAll("));"); + try f.object.newline(); break :ret_val array_local; } else operand; - try writer.writeAll("return "); + try w.writeAll("return "); if (deref) - try f.writeCValueDeref(writer, ret_val) + try f.writeCValueDeref(w, ret_val) else - try f.writeCValue(writer, ret_val, .Other); - try writer.writeAll(";\n"); + try f.writeCValue(w, ret_val, .Other); + try w.writeAll(";\n"); if (is_array) { try freeLocal(f, inst, ret_val.new_local, null); } } else { try reap(f, inst, &.{un_op}); // Not even allowed to return void in a naked function. - if (!f.object.dg.is_naked_fn) try writer.writeAll("return;\n"); + if (!f.object.dg.is_naked_fn) try w.writeAll("return;\n"); } } @@ -4031,16 +4093,16 @@ fn airIntCast(f: *Function, inst: Air.Inst.Index) !CValue { if (f.object.dg.intCastIsNoop(inst_scalar_ty, scalar_ty)) return f.moveCValue(inst, inst_ty, operand); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - const v = try Vectorize.start(f, inst, writer, operand_ty); - const a = try Assignment.start(f, writer, try f.ctypeFromType(scalar_ty, .complete)); - try f.writeCValue(writer, local, .Other); - try v.elem(f, writer); - try a.assign(f, writer); - try f.renderIntCast(writer, inst_scalar_ty, operand, v, scalar_ty, .Other); - try a.end(f, writer); - try v.end(f, inst, writer); + const v = try Vectorize.start(f, inst, w, operand_ty); + const a = try Assignment.start(f, w, try f.ctypeFromType(scalar_ty, .complete)); + try f.writeCValue(w, local, .Other); + try v.elem(f, w); + try a.assign(f, w); + try f.renderIntCast(w, inst_scalar_ty, operand, v, scalar_ty, .Other); + try a.end(f, w); + try v.end(f, inst, w); return local; } @@ -4067,35 +4129,35 @@ fn airTrunc(f: *Function, inst: Air.Inst.Index) !CValue { const need_mask = dest_bits < 8 or !std.math.isPowerOfTwo(dest_bits); if (!need_cast and !need_lo and !need_mask) return f.moveCValue(inst, inst_ty, operand); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - const v = try Vectorize.start(f, inst, writer, operand_ty); - const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_scalar_ty, .complete)); - try f.writeCValue(writer, local, .Other); - try v.elem(f, writer); - try a.assign(f, writer); + const v = try Vectorize.start(f, inst, w, operand_ty); + const a = try Assignment.start(f, w, try f.ctypeFromType(inst_scalar_ty, .complete)); + try f.writeCValue(w, local, .Other); + try v.elem(f, w); + try a.assign(f, w); if (need_cast) { - try writer.writeByte('('); - try f.renderType(writer, inst_scalar_ty); - try writer.writeByte(')'); + try w.writeByte('('); + try f.renderType(w, inst_scalar_ty); + try w.writeByte(')'); } if (need_lo) { - try writer.writeAll("zig_lo_"); - try f.object.dg.renderTypeForBuiltinFnName(writer, scalar_ty); - try writer.writeByte('('); + try w.writeAll("zig_lo_"); + try f.object.dg.renderTypeForBuiltinFnName(w, scalar_ty); + try w.writeByte('('); } if (!need_mask) { - try f.writeCValue(writer, operand, .Other); - try v.elem(f, writer); + try f.writeCValue(w, operand, .Other); + try v.elem(f, w); } else switch (dest_int_info.signedness) { .unsigned => { - try writer.writeAll("zig_and_"); - try f.object.dg.renderTypeForBuiltinFnName(writer, scalar_ty); - try writer.writeByte('('); - try f.writeCValue(writer, operand, .FunctionArgument); - try v.elem(f, writer); - try writer.print(", {x})", .{ - try f.fmtIntLiteral(try inst_scalar_ty.maxIntScalar(pt, scalar_ty)), + try w.writeAll("zig_and_"); + try f.object.dg.renderTypeForBuiltinFnName(w, scalar_ty); + try w.writeByte('('); + try f.writeCValue(w, operand, .FunctionArgument); + try v.elem(f, w); + try w.print(", {f})", .{ + try f.fmtIntLiteralHex(try inst_scalar_ty.maxIntScalar(pt, scalar_ty)), }); }, .signed => { @@ -4103,30 +4165,30 @@ fn airTrunc(f: *Function, inst: Air.Inst.Index) !CValue { return f.fail("TODO: C backend: implement integer types larger than 128 bits", .{}); const shift_val = try pt.intValue(.u8, c_bits - dest_bits); - try writer.writeAll("zig_shr_"); - try f.object.dg.renderTypeForBuiltinFnName(writer, scalar_ty); + try w.writeAll("zig_shr_"); + try f.object.dg.renderTypeForBuiltinFnName(w, scalar_ty); if (c_bits == 128) { - try writer.print("(zig_bitCast_i{d}(", .{c_bits}); + try w.print("(zig_bitCast_i{d}(", .{c_bits}); } else { - try writer.print("((int{d}_t)", .{c_bits}); + try w.print("((int{d}_t)", .{c_bits}); } - try writer.print("zig_shl_u{d}(", .{c_bits}); + try w.print("zig_shl_u{d}(", .{c_bits}); if (c_bits == 128) { - try writer.print("zig_bitCast_u{d}(", .{c_bits}); + try w.print("zig_bitCast_u{d}(", .{c_bits}); } else { - try writer.print("(uint{d}_t)", .{c_bits}); + try w.print("(uint{d}_t)", .{c_bits}); } - try f.writeCValue(writer, operand, .FunctionArgument); - try v.elem(f, writer); - if (c_bits == 128) try writer.writeByte(')'); - try writer.print(", {})", .{try f.fmtIntLiteral(shift_val)}); - if (c_bits == 128) try writer.writeByte(')'); - try writer.print(", {})", .{try f.fmtIntLiteral(shift_val)}); + try f.writeCValue(w, operand, .FunctionArgument); + try v.elem(f, w); + if (c_bits == 128) try w.writeByte(')'); + try w.print(", {f})", .{try f.fmtIntLiteralDec(shift_val)}); + if (c_bits == 128) try w.writeByte(')'); + try w.print(", {f})", .{try f.fmtIntLiteralDec(shift_val)}); }, } - if (need_lo) try writer.writeByte(')'); - try a.end(f, writer); - try v.end(f, inst, writer); + if (need_lo) try w.writeByte(')'); + try a.end(f, w); + try v.end(f, inst, w); return local; } @@ -4145,15 +4207,16 @@ fn airStore(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue { const val_is_undef = if (try f.air.value(bin_op.rhs, pt)) |v| v.isUndefDeep(zcu) else false; + const w = &f.object.code.writer; if (val_is_undef) { try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); if (safety and ptr_info.packed_offset.host_size == 0) { - const writer = f.object.writer(); - try writer.writeAll("memset("); - try f.writeCValue(writer, ptr_val, .FunctionArgument); - try writer.writeAll(", 0xaa, sizeof("); - try f.renderType(writer, .fromInterned(ptr_info.child)); - try writer.writeAll("));\n"); + try w.writeAll("memset("); + try f.writeCValue(w, ptr_val, .FunctionArgument); + try w.writeAll(", 0xaa, sizeof("); + try f.renderType(w, .fromInterned(ptr_info.child)); + try w.writeAll("));"); + try f.object.newline(); } return .none; } @@ -4169,7 +4232,6 @@ fn airStore(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue { try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); const src_scalar_ctype = try f.ctypeFromType(src_ty.scalarType(zcu), .complete); - const writer = f.object.writer(); if (need_memcpy) { // For this memcpy to safely work we need the rhs to have the same // underlying type as the lhs (i.e. they must both be arrays of the same underlying type). @@ -4180,28 +4242,30 @@ fn airStore(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue { // TODO this should be done by manually initializing elements of the dest array const array_src = if (src_val == .constant) blk: { const new_local = try f.allocLocal(inst, src_ty); - try f.writeCValue(writer, new_local, .Other); - try writer.writeAll(" = "); - try f.writeCValue(writer, src_val, .Other); - try writer.writeAll(";\n"); + try f.writeCValue(w, new_local, .Other); + try w.writeAll(" = "); + try f.writeCValue(w, src_val, .Other); + try w.writeByte(';'); + try f.object.newline(); break :blk new_local; } else src_val; - const v = try Vectorize.start(f, inst, writer, ptr_ty); - try writer.writeAll("memcpy((char *)"); - try f.writeCValue(writer, ptr_val, .FunctionArgument); - try v.elem(f, writer); - try writer.writeAll(", "); - if (!is_array) try writer.writeByte('&'); - try f.writeCValue(writer, array_src, .FunctionArgument); - try v.elem(f, writer); - try writer.writeAll(", sizeof("); - try f.renderType(writer, src_ty); - try writer.writeAll("))"); + const v = try Vectorize.start(f, inst, w, ptr_ty); + try w.writeAll("memcpy((char *)"); + try f.writeCValue(w, ptr_val, .FunctionArgument); + try v.elem(f, w); + try w.writeAll(", "); + if (!is_array) try w.writeByte('&'); + try f.writeCValue(w, array_src, .FunctionArgument); + try v.elem(f, w); + try w.writeAll(", sizeof("); + try f.renderType(w, src_ty); + try w.writeAll("))"); try f.freeCValue(inst, array_src); - try writer.writeAll(";\n"); - try v.end(f, inst, writer); + try w.writeByte(';'); + try f.object.newline(); + try v.end(f, inst, w); } else if (ptr_info.packed_offset.host_size > 0 and ptr_info.flags.vector_index == .none) { const host_bits = ptr_info.packed_offset.host_size * 8; const host_ty = try pt.intType(.unsigned, host_bits); @@ -4218,50 +4282,50 @@ fn airStore(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue { var mask = try BigInt.Managed.initCapacity(stack.get(), BigInt.calcTwosCompLimbCount(host_bits)); defer mask.deinit(); - try mask.setTwosCompIntLimit(.max, .unsigned, @as(usize, @intCast(src_bits))); + try mask.setTwosCompIntLimit(.max, .unsigned, @intCast(src_bits)); try mask.shiftLeft(&mask, ptr_info.packed_offset.bit_offset); try mask.bitNotWrap(&mask, .unsigned, host_bits); const mask_val = try pt.intValue_big(host_ty, mask.toConst()); - const v = try Vectorize.start(f, inst, writer, ptr_ty); - const a = try Assignment.start(f, writer, src_scalar_ctype); - try f.writeCValueDeref(writer, ptr_val); - try v.elem(f, writer); - try a.assign(f, writer); - try writer.writeAll("zig_or_"); - try f.object.dg.renderTypeForBuiltinFnName(writer, host_ty); - try writer.writeAll("(zig_and_"); - try f.object.dg.renderTypeForBuiltinFnName(writer, host_ty); - try writer.writeByte('('); - try f.writeCValueDeref(writer, ptr_val); - try v.elem(f, writer); - try writer.print(", {x}), zig_shl_", .{try f.fmtIntLiteral(mask_val)}); - try f.object.dg.renderTypeForBuiltinFnName(writer, host_ty); - try writer.writeByte('('); + const v = try Vectorize.start(f, inst, w, ptr_ty); + const a = try Assignment.start(f, w, src_scalar_ctype); + try f.writeCValueDeref(w, ptr_val); + try v.elem(f, w); + try a.assign(f, w); + try w.writeAll("zig_or_"); + try f.object.dg.renderTypeForBuiltinFnName(w, host_ty); + try w.writeAll("(zig_and_"); + try f.object.dg.renderTypeForBuiltinFnName(w, host_ty); + try w.writeByte('('); + try f.writeCValueDeref(w, ptr_val); + try v.elem(f, w); + try w.print(", {f}), zig_shl_", .{try f.fmtIntLiteralHex(mask_val)}); + try f.object.dg.renderTypeForBuiltinFnName(w, host_ty); + try w.writeByte('('); const cant_cast = host_ty.isInt(zcu) and host_ty.bitSize(zcu) > 64; if (cant_cast) { if (src_ty.bitSize(zcu) > 64) return f.fail("TODO: C backend: implement casting between types > 64 bits", .{}); - try writer.writeAll("zig_make_"); - try f.object.dg.renderTypeForBuiltinFnName(writer, host_ty); - try writer.writeAll("(0, "); + try w.writeAll("zig_make_"); + try f.object.dg.renderTypeForBuiltinFnName(w, host_ty); + try w.writeAll("(0, "); } else { - try writer.writeByte('('); - try f.renderType(writer, host_ty); - try writer.writeByte(')'); + try w.writeByte('('); + try f.renderType(w, host_ty); + try w.writeByte(')'); } if (src_ty.isPtrAtRuntime(zcu)) { - try writer.writeByte('('); - try f.renderType(writer, .usize); - try writer.writeByte(')'); + try w.writeByte('('); + try f.renderType(w, .usize); + try w.writeByte(')'); } - try f.writeCValue(writer, src_val, .Other); - try v.elem(f, writer); - if (cant_cast) try writer.writeByte(')'); - try writer.print(", {}))", .{try f.fmtIntLiteral(bit_offset_val)}); - try a.end(f, writer); - try v.end(f, inst, writer); + try f.writeCValue(w, src_val, .Other); + try v.elem(f, w); + if (cant_cast) try w.writeByte(')'); + try w.print(", {f}))", .{try f.fmtIntLiteralDec(bit_offset_val)}); + try a.end(f, w); + try v.end(f, inst, w); } else { switch (ptr_val) { .local_ref => |ptr_local_index| switch (src_val) { @@ -4271,15 +4335,15 @@ fn airStore(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue { }, else => {}, } - const v = try Vectorize.start(f, inst, writer, ptr_ty); - const a = try Assignment.start(f, writer, src_scalar_ctype); - try f.writeCValueDeref(writer, ptr_val); - try v.elem(f, writer); - try a.assign(f, writer); - try f.writeCValue(writer, src_val, .Other); - try v.elem(f, writer); - try a.end(f, writer); - try v.end(f, inst, writer); + const v = try Vectorize.start(f, inst, w, ptr_ty); + const a = try Assignment.start(f, w, src_scalar_ctype); + try f.writeCValueDeref(w, ptr_val); + try v.elem(f, w); + try a.assign(f, w); + try f.writeCValue(w, src_val, .Other); + try v.elem(f, w); + try a.end(f, w); + try v.end(f, inst, w); } return .none; } @@ -4298,7 +4362,7 @@ fn airOverflow(f: *Function, inst: Air.Inst.Index, operation: []const u8, info: const operand_ty = f.typeOf(bin_op.lhs); const scalar_ty = operand_ty.scalarType(zcu); - const w = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); const v = try Vectorize.start(f, inst, w, operand_ty); try f.writeCValueMember(w, local, .{ .field = 1 }); @@ -4317,7 +4381,8 @@ fn airOverflow(f: *Function, inst: Air.Inst.Index, operation: []const u8, info: try f.writeCValue(w, rhs, .FunctionArgument); if (f.typeOf(bin_op.rhs).isVector(zcu)) try v.elem(f, w); try f.object.dg.renderBuiltinInfo(w, scalar_ty, info); - try w.writeAll(");\n"); + try w.writeAll(");"); + try f.object.newline(); try v.end(f, inst, w); return local; @@ -4336,17 +4401,18 @@ fn airNot(f: *Function, inst: Air.Inst.Index) !CValue { const inst_ty = f.typeOfIndex(inst); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - const v = try Vectorize.start(f, inst, writer, operand_ty); - try f.writeCValue(writer, local, .Other); - try v.elem(f, writer); - try writer.writeAll(" = "); - try writer.writeByte('!'); - try f.writeCValue(writer, op, .Other); - try v.elem(f, writer); - try writer.writeAll(";\n"); - try v.end(f, inst, writer); + const v = try Vectorize.start(f, inst, w, operand_ty); + try f.writeCValue(w, local, .Other); + try v.elem(f, w); + try w.writeAll(" = "); + try w.writeByte('!'); + try f.writeCValue(w, op, .Other); + try v.elem(f, w); + try w.writeByte(';'); + try f.object.newline(); + try v.end(f, inst, w); return local; } @@ -4372,21 +4438,22 @@ fn airBinOp( const inst_ty = f.typeOfIndex(inst); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - const v = try Vectorize.start(f, inst, writer, operand_ty); - try f.writeCValue(writer, local, .Other); - try v.elem(f, writer); - try writer.writeAll(" = "); - try f.writeCValue(writer, lhs, .Other); - try v.elem(f, writer); - try writer.writeByte(' '); - try writer.writeAll(operator); - try writer.writeByte(' '); - try f.writeCValue(writer, rhs, .Other); - try v.elem(f, writer); - try writer.writeAll(";\n"); - try v.end(f, inst, writer); + const v = try Vectorize.start(f, inst, w, operand_ty); + try f.writeCValue(w, local, .Other); + try v.elem(f, w); + try w.writeAll(" = "); + try f.writeCValue(w, lhs, .Other); + try v.elem(f, w); + try w.writeByte(' '); + try w.writeAll(operator); + try w.writeByte(' '); + try f.writeCValue(w, rhs, .Other); + try v.elem(f, w); + try w.writeByte(';'); + try f.object.newline(); + try v.end(f, inst, w); return local; } @@ -4422,27 +4489,27 @@ fn airCmpOp( const rhs_ty = f.typeOf(data.rhs); const need_cast = lhs_ty.isSinglePointer(zcu) or rhs_ty.isSinglePointer(zcu); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - const v = try Vectorize.start(f, inst, writer, lhs_ty); - const a = try Assignment.start(f, writer, try f.ctypeFromType(scalar_ty, .complete)); - try f.writeCValue(writer, local, .Other); - try v.elem(f, writer); - try a.assign(f, writer); - if (lhs != .undef and lhs.eql(rhs)) try writer.writeAll(switch (operator) { + const v = try Vectorize.start(f, inst, w, lhs_ty); + const a = try Assignment.start(f, w, try f.ctypeFromType(scalar_ty, .complete)); + try f.writeCValue(w, local, .Other); + try v.elem(f, w); + try a.assign(f, w); + if (lhs != .undef and lhs.eql(rhs)) try w.writeAll(switch (operator) { .lt, .neq, .gt => "false", .lte, .eq, .gte => "true", }) else { - if (need_cast) try writer.writeAll("(void*)"); - try f.writeCValue(writer, lhs, .Other); - try v.elem(f, writer); - try writer.writeAll(compareOperatorC(operator)); - if (need_cast) try writer.writeAll("(void*)"); - try f.writeCValue(writer, rhs, .Other); - try v.elem(f, writer); + if (need_cast) try w.writeAll("(void*)"); + try f.writeCValue(w, lhs, .Other); + try v.elem(f, w); + try w.writeAll(compareOperatorC(operator)); + if (need_cast) try w.writeAll("(void*)"); + try f.writeCValue(w, rhs, .Other); + try v.elem(f, w); } - try a.end(f, writer); - try v.end(f, inst, writer); + try a.end(f, w); + try v.end(f, inst, w); return local; } @@ -4475,41 +4542,41 @@ fn airEquality( const rhs = try f.resolveInst(bin_op.rhs); try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, .bool); - const a = try Assignment.start(f, writer, .bool); - try f.writeCValue(writer, local, .Other); - try a.assign(f, writer); + const a = try Assignment.start(f, w, .bool); + try f.writeCValue(w, local, .Other); + try a.assign(f, w); const operand_ctype = try f.ctypeFromType(operand_ty, .complete); - if (lhs != .undef and lhs.eql(rhs)) try writer.writeAll(switch (operator) { + if (lhs != .undef and lhs.eql(rhs)) try w.writeAll(switch (operator) { .lt, .lte, .gte, .gt => unreachable, .neq => "false", .eq => "true", }) else switch (operand_ctype.info(ctype_pool)) { .basic, .pointer => { - try f.writeCValue(writer, lhs, .Other); - try writer.writeAll(compareOperatorC(operator)); - try f.writeCValue(writer, rhs, .Other); + try f.writeCValue(w, lhs, .Other); + try w.writeAll(compareOperatorC(operator)); + try f.writeCValue(w, rhs, .Other); }, .aligned, .array, .vector, .fwd_decl, .function => unreachable, .aggregate => |aggregate| if (aggregate.fields.len == 2 and (aggregate.fields.at(0, ctype_pool).name.index == .is_null or aggregate.fields.at(1, ctype_pool).name.index == .is_null)) { - try f.writeCValueMember(writer, lhs, .{ .identifier = "is_null" }); - try writer.writeAll(" || "); - try f.writeCValueMember(writer, rhs, .{ .identifier = "is_null" }); - try writer.writeAll(" ? "); - try f.writeCValueMember(writer, lhs, .{ .identifier = "is_null" }); - try writer.writeAll(compareOperatorC(operator)); - try f.writeCValueMember(writer, rhs, .{ .identifier = "is_null" }); - try writer.writeAll(" : "); - try f.writeCValueMember(writer, lhs, .{ .identifier = "payload" }); - try writer.writeAll(compareOperatorC(operator)); - try f.writeCValueMember(writer, rhs, .{ .identifier = "payload" }); + try f.writeCValueMember(w, lhs, .{ .identifier = "is_null" }); + try w.writeAll(" || "); + try f.writeCValueMember(w, rhs, .{ .identifier = "is_null" }); + try w.writeAll(" ? "); + try f.writeCValueMember(w, lhs, .{ .identifier = "is_null" }); + try w.writeAll(compareOperatorC(operator)); + try f.writeCValueMember(w, rhs, .{ .identifier = "is_null" }); + try w.writeAll(" : "); + try f.writeCValueMember(w, lhs, .{ .identifier = "payload" }); + try w.writeAll(compareOperatorC(operator)); + try f.writeCValueMember(w, rhs, .{ .identifier = "payload" }); } else for (0..aggregate.fields.len) |field_index| { - if (field_index > 0) try writer.writeAll(switch (operator) { + if (field_index > 0) try w.writeAll(switch (operator) { .lt, .lte, .gte, .gt => unreachable, .eq => " && ", .neq => " || ", @@ -4517,12 +4584,12 @@ fn airEquality( const field_name: CValue = .{ .ctype_pool_string = aggregate.fields.at(field_index, ctype_pool).name, }; - try f.writeCValueMember(writer, lhs, field_name); - try writer.writeAll(compareOperatorC(operator)); - try f.writeCValueMember(writer, rhs, field_name); + try f.writeCValueMember(w, lhs, field_name); + try w.writeAll(compareOperatorC(operator)); + try f.writeCValueMember(w, rhs, field_name); }, } - try a.end(f, writer); + try a.end(f, w); return local; } @@ -4533,12 +4600,13 @@ fn airCmpLtErrorsLen(f: *Function, inst: Air.Inst.Index) !CValue { const operand = try f.resolveInst(un_op); try reap(f, inst, &.{un_op}); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, .bool); - try f.writeCValue(writer, local, .Other); - try writer.writeAll(" = "); - try f.writeCValue(writer, operand, .Other); - try writer.print(" < sizeof({ }) / sizeof(*{0 });\n", .{fmtIdent("zig_errorName")}); + try f.writeCValue(w, local, .Other); + try w.writeAll(" = "); + try f.writeCValue(w, operand, .Other); + try w.print(" < sizeof({f}) / sizeof(*{0f});", .{fmtIdentSolo("zig_errorName")}); + try f.object.newline(); return local; } @@ -4559,30 +4627,30 @@ fn airPtrAddSub(f: *Function, inst: Air.Inst.Index, operator: u8) !CValue { const inst_scalar_ctype = try f.ctypeFromType(inst_scalar_ty, .complete); const local = try f.allocLocal(inst, inst_ty); - const writer = f.object.writer(); - const v = try Vectorize.start(f, inst, writer, inst_ty); - const a = try Assignment.start(f, writer, inst_scalar_ctype); - try f.writeCValue(writer, local, .Other); - try v.elem(f, writer); - try a.assign(f, writer); + const w = &f.object.code.writer; + const v = try Vectorize.start(f, inst, w, inst_ty); + const a = try Assignment.start(f, w, inst_scalar_ctype); + try f.writeCValue(w, local, .Other); + try v.elem(f, w); + try a.assign(f, w); // We must convert to and from integer types to prevent UB if the operation // results in a NULL pointer, or if LHS is NULL. The operation is only UB // if the result is NULL and then dereferenced. - try writer.writeByte('('); - try f.renderCType(writer, inst_scalar_ctype); - try writer.writeAll(")(((uintptr_t)"); - try f.writeCValue(writer, lhs, .Other); - try v.elem(f, writer); - try writer.writeAll(") "); - try writer.writeByte(operator); - try writer.writeAll(" ("); - try f.writeCValue(writer, rhs, .Other); - try v.elem(f, writer); - try writer.writeAll("*sizeof("); - try f.renderType(writer, elem_ty); - try writer.writeAll(")))"); - try a.end(f, writer); - try v.end(f, inst, writer); + try w.writeByte('('); + try f.renderCType(w, inst_scalar_ctype); + try w.writeAll(")(((uintptr_t)"); + try f.writeCValue(w, lhs, .Other); + try v.elem(f, w); + try w.writeAll(") "); + try w.writeByte(operator); + try w.writeAll(" ("); + try f.writeCValue(w, rhs, .Other); + try v.elem(f, w); + try w.writeAll("*sizeof("); + try f.renderType(w, elem_ty); + try w.writeAll(")))"); + try a.end(f, w); + try v.end(f, inst, w); return local; } @@ -4601,28 +4669,29 @@ fn airMinMax(f: *Function, inst: Air.Inst.Index, operator: u8, operation: []cons const rhs = try f.resolveInst(bin_op.rhs); try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - const v = try Vectorize.start(f, inst, writer, inst_ty); - try f.writeCValue(writer, local, .Other); - try v.elem(f, writer); + const v = try Vectorize.start(f, inst, w, inst_ty); + try f.writeCValue(w, local, .Other); + try v.elem(f, w); // (lhs <> rhs) ? lhs : rhs - try writer.writeAll(" = ("); - try f.writeCValue(writer, lhs, .Other); - try v.elem(f, writer); - try writer.writeByte(' '); - try writer.writeByte(operator); - try writer.writeByte(' '); - try f.writeCValue(writer, rhs, .Other); - try v.elem(f, writer); - try writer.writeAll(") ? "); - try f.writeCValue(writer, lhs, .Other); - try v.elem(f, writer); - try writer.writeAll(" : "); - try f.writeCValue(writer, rhs, .Other); - try v.elem(f, writer); - try writer.writeAll(";\n"); - try v.end(f, inst, writer); + try w.writeAll(" = ("); + try f.writeCValue(w, lhs, .Other); + try v.elem(f, w); + try w.writeByte(' '); + try w.writeByte(operator); + try w.writeByte(' '); + try f.writeCValue(w, rhs, .Other); + try v.elem(f, w); + try w.writeAll(") ? "); + try f.writeCValue(w, lhs, .Other); + try v.elem(f, w); + try w.writeAll(" : "); + try f.writeCValue(w, rhs, .Other); + try v.elem(f, w); + try w.writeByte(';'); + try f.object.newline(); + try v.end(f, inst, w); return local; } @@ -4640,21 +4709,21 @@ fn airSlice(f: *Function, inst: Air.Inst.Index) !CValue { const inst_ty = f.typeOfIndex(inst); const ptr_ty = inst_ty.slicePtrFieldType(zcu); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); { - const a = try Assignment.start(f, writer, try f.ctypeFromType(ptr_ty, .complete)); - try f.writeCValueMember(writer, local, .{ .identifier = "ptr" }); - try a.assign(f, writer); - try f.writeCValue(writer, ptr, .Other); - try a.end(f, writer); + const a = try Assignment.start(f, w, try f.ctypeFromType(ptr_ty, .complete)); + try f.writeCValueMember(w, local, .{ .identifier = "ptr" }); + try a.assign(f, w); + try f.writeCValue(w, ptr, .Other); + try a.end(f, w); } { - const a = try Assignment.start(f, writer, .usize); - try f.writeCValueMember(writer, local, .{ .identifier = "len" }); - try a.assign(f, writer); - try f.writeCValue(writer, len, .Other); - try a.end(f, writer); + const a = try Assignment.start(f, w, .usize); + try f.writeCValueMember(w, local, .{ .identifier = "len" }); + try a.assign(f, w); + try f.writeCValue(w, len, .Other); + try a.end(f, w); } return local; } @@ -4671,7 +4740,7 @@ fn airCall( if (f.object.dg.is_naked_fn) return .none; const gpa = f.object.dg.gpa; - const writer = f.object.writer(); + const w = &f.object.code.writer; const pl_op = f.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; const extra = f.air.extraData(Air.Call, pl_op.payload); @@ -4692,13 +4761,14 @@ fn airCall( .ctype = arg_ctype, .alignas = CType.AlignAs.fromAbiAlignment(arg_ty.abiAlignment(zcu)), }); - try writer.writeAll("memcpy("); - try f.writeCValueMember(writer, array_local, .{ .identifier = "array" }); - try writer.writeAll(", "); - try f.writeCValue(writer, resolved_arg.*, .FunctionArgument); - try writer.writeAll(", sizeof("); - try f.renderCType(writer, arg_ctype); - try writer.writeAll("));\n"); + try w.writeAll("memcpy("); + try f.writeCValueMember(w, array_local, .{ .identifier = "array" }); + try w.writeAll(", "); + try f.writeCValue(w, resolved_arg.*, .FunctionArgument); + try w.writeAll(", sizeof("); + try f.renderCType(w, arg_ctype); + try w.writeAll("));"); + try f.object.newline(); resolved_arg.* = array_local; } } @@ -4726,22 +4796,22 @@ fn airCall( const result_local = result: { if (modifier == .always_tail) { - try writer.writeAll("zig_always_tail return "); + try w.writeAll("zig_always_tail return "); break :result .none; } else if (ret_ctype.index == .void) { break :result .none; } else if (f.liveness.isUnused(inst)) { - try writer.writeByte('('); - try f.renderCType(writer, .void); - try writer.writeByte(')'); + try w.writeByte('('); + try f.renderCType(w, .void); + try w.writeByte(')'); break :result .none; } else { const local = try f.allocAlignedLocal(inst, .{ .ctype = ret_ctype, .alignas = CType.AlignAs.fromAbiAlignment(ret_ty.abiAlignment(zcu)), }); - try f.writeCValue(writer, local, .Other); - try writer.writeAll(" = "); + try f.writeCValue(w, local, .Other); + try w.writeAll(" = "); break :result local; } }; @@ -4761,17 +4831,17 @@ fn airCall( else => break :known, }; if (need_cast) { - try writer.writeAll("(("); - try f.renderType(writer, if (callee_is_ptr) callee_ty else try pt.singleConstPtrType(callee_ty)); - try writer.writeByte(')'); - if (!callee_is_ptr) try writer.writeByte('&'); + try w.writeAll("(("); + try f.renderType(w, if (callee_is_ptr) callee_ty else try pt.singleConstPtrType(callee_ty)); + try w.writeByte(')'); + if (!callee_is_ptr) try w.writeByte('&'); } switch (modifier) { - .auto, .always_tail => try f.object.dg.renderNavName(writer, fn_nav), - inline .never_tail, .never_inline => |m| try writer.writeAll(try f.getLazyFnName(@unionInit(LazyFnKey, @tagName(m), fn_nav))), + .auto, .always_tail => try f.object.dg.renderNavName(w, fn_nav), + inline .never_tail, .never_inline => |m| try w.writeAll(try f.getLazyFnName(@unionInit(LazyFnKey, @tagName(m), fn_nav))), else => unreachable, } - if (need_cast) try writer.writeByte(')'); + if (need_cast) try w.writeByte(')'); break :callee; } switch (modifier) { @@ -4781,32 +4851,37 @@ fn airCall( else => unreachable, } // Fall back to function pointer call. - try f.writeCValue(writer, callee, .Other); + try f.writeCValue(w, callee, .Other); } - try writer.writeByte('('); + try w.writeByte('('); var need_comma = false; for (resolved_args) |resolved_arg| { if (resolved_arg == .none) continue; - if (need_comma) try writer.writeAll(", "); + if (need_comma) try w.writeAll(", "); need_comma = true; - try f.writeCValue(writer, resolved_arg, .FunctionArgument); + try f.writeCValue(w, resolved_arg, .FunctionArgument); try f.freeCValue(inst, resolved_arg); } - try writer.writeAll(");\n"); + try w.writeAll(");"); + switch (modifier) { + .always_tail => try w.writeByte('\n'), + else => try f.object.newline(), + } const result = result: { if (result_local == .none or !lowersToArray(ret_ty, pt)) break :result result_local; const array_local = try f.allocLocal(inst, ret_ty); - try writer.writeAll("memcpy("); - try f.writeCValue(writer, array_local, .FunctionArgument); - try writer.writeAll(", "); - try f.writeCValueMember(writer, result_local, .{ .identifier = "array" }); - try writer.writeAll(", sizeof("); - try f.renderType(writer, ret_ty); - try writer.writeAll("));\n"); + try w.writeAll("memcpy("); + try f.writeCValue(w, array_local, .FunctionArgument); + try w.writeAll(", "); + try f.writeCValueMember(w, result_local, .{ .identifier = "array" }); + try w.writeAll(", sizeof("); + try f.renderType(w, ret_ty); + try w.writeAll("));"); + try f.object.newline(); try freeLocal(f, inst, result_local.new_local, null); break :result array_local; }; @@ -4816,7 +4891,7 @@ fn airCall( fn airDbgStmt(f: *Function, inst: Air.Inst.Index) !CValue { const dbg_stmt = f.air.instructions.items(.data)[@intFromEnum(inst)].dbg_stmt; - const writer = f.object.writer(); + const w = &f.object.code.writer; // TODO re-evaluate whether to emit these or not. If we naively emit // these directives, the output file will report bogus line numbers because // every newline after the #line directive adds one to the line. @@ -4824,13 +4899,16 @@ fn airDbgStmt(f: *Function, inst: Air.Inst.Index) !CValue { // If we wanted to go this route, we would need to go all the way and not output // newlines until the next dbg_stmt occurs. // Perhaps an additional compilation option is in order? - //try writer.print("#line {d}\n", .{dbg_stmt.line + 1}); - try writer.print("/* file:{d}:{d} */\n", .{ dbg_stmt.line + 1, dbg_stmt.column + 1 }); + //try w.print("#line {d}", .{dbg_stmt.line + 1}); + //try f.object.newline(); + try w.print("/* file:{d}:{d} */", .{ dbg_stmt.line + 1, dbg_stmt.column + 1 }); + try f.object.newline(); return .none; } fn airDbgEmptyStmt(f: *Function, _: Air.Inst.Index) !CValue { - try f.object.writer().writeAll("(void)0;\n"); + try f.object.code.writer.writeAll("(void)0;"); + try f.object.newline(); return .none; } @@ -4841,8 +4919,9 @@ fn airDbgInlineBlock(f: *Function, inst: Air.Inst.Index) !CValue { const ty_pl = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const extra = f.air.extraData(Air.DbgInlineBlock, ty_pl.payload); const owner_nav = ip.getNav(zcu.funcInfo(extra.data.func).owner_nav); - const writer = f.object.writer(); - try writer.print("/* inline:{} */\n", .{owner_nav.fqn.fmt(&zcu.intern_pool)}); + const w = &f.object.code.writer; + try w.print("/* inline:{f} */", .{owner_nav.fqn.fmt(&zcu.intern_pool)}); + try f.object.newline(); return lowerBlock(f, inst, @ptrCast(f.air.extra.items[extra.end..][0..extra.data.body_len])); } @@ -4856,8 +4935,9 @@ fn airDbgVar(f: *Function, inst: Air.Inst.Index) !CValue { if (!operand_is_undef) _ = try f.resolveInst(pl_op.operand); try reap(f, inst, &.{pl_op.operand}); - const writer = f.object.writer(); - try writer.print("/* {s}:{s} */\n", .{ @tagName(tag), name.toSlice(f.air) }); + const w = &f.object.code.writer; + try w.print("/* {s}:{s} */", .{ @tagName(tag), name.toSlice(f.air) }); + try f.object.newline(); return .none; } @@ -4874,7 +4954,7 @@ fn lowerBlock(f: *Function, inst: Air.Inst.Index, body: []const Air.Inst.Index) const block_id = f.next_block_index; f.next_block_index += 1; - const writer = f.object.writer(); + const w = &f.object.code.writer; const inst_ty = f.typeOfIndex(inst); const result = if (inst_ty.hasRuntimeBitsIgnoreComptime(zcu) and !f.liveness.isUnused(inst)) @@ -4896,8 +4976,6 @@ fn lowerBlock(f: *Function, inst: Air.Inst.Index, body: []const Air.Inst.Index) try die(f, inst, death.toRef()); } - try f.object.indent_writer.insertNewline(); - // noreturn blocks have no `br` instructions reaching them, so we don't want a label if (f.object.dg.is_naked_fn) { if (f.object.dg.expected_block) |expected_block| { @@ -4907,7 +4985,8 @@ fn lowerBlock(f: *Function, inst: Air.Inst.Index, body: []const Air.Inst.Index) } } else if (!f.typeOfIndex(inst).isNoReturn(zcu)) { // label must be followed by an expression, include an empty one. - try writer.print("zig_block_{d}:;\n", .{block_id}); + try w.print("\nzig_block_{d}:;", .{block_id}); + try f.object.newline(); } return result; @@ -4944,31 +5023,31 @@ fn lowerTry( const err_union = try f.resolveInst(operand); const inst_ty = f.typeOfIndex(inst); const liveness_condbr = f.liveness.getCondBr(inst); - const writer = f.object.writer(); + const w = &f.object.code.writer; const payload_ty = err_union_ty.errorUnionPayload(zcu); const payload_has_bits = payload_ty.hasRuntimeBitsIgnoreComptime(zcu); if (!err_union_ty.errorUnionSet(zcu).errorSetIsEmpty(zcu)) { - try writer.writeAll("if ("); + try w.writeAll("if ("); if (!payload_has_bits) { if (is_ptr) - try f.writeCValueDeref(writer, err_union) + try f.writeCValueDeref(w, err_union) else - try f.writeCValue(writer, err_union, .Other); + try f.writeCValue(w, err_union, .Other); } else { // Reap the operand so that it can be reused inside genBody. // Remember we must avoid calling reap() twice for the same operand // in this function. try reap(f, inst, &.{operand}); if (is_ptr) - try f.writeCValueDerefMember(writer, err_union, .{ .identifier = "error" }) + try f.writeCValueDerefMember(w, err_union, .{ .identifier = "error" }) else - try f.writeCValueMember(writer, err_union, .{ .identifier = "error" }); + try f.writeCValueMember(w, err_union, .{ .identifier = "error" }); } - try writer.writeAll(") "); + try w.writeAll(") "); try genBodyResolveState(f, inst, liveness_condbr.else_deaths, body, false); - try f.object.indent_writer.insertNewline(); + try f.object.newline(); if (f.object.dg.expected_block) |_| return f.fail("runtime code not allowed in naked function", .{}); } @@ -4991,14 +5070,14 @@ fn lowerTry( if (f.liveness.isUnused(inst)) return .none; const local = try f.allocLocal(inst, inst_ty); - const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete)); - try f.writeCValue(writer, local, .Other); - try a.assign(f, writer); + const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete)); + try f.writeCValue(w, local, .Other); + try a.assign(f, w); if (is_ptr) { - try writer.writeByte('&'); - try f.writeCValueDerefMember(writer, err_union, .{ .identifier = "payload" }); - } else try f.writeCValueMember(writer, err_union, .{ .identifier = "payload" }); - try a.end(f, writer); + try w.writeByte('&'); + try f.writeCValueDerefMember(w, err_union, .{ .identifier = "payload" }); + } else try f.writeCValueMember(w, err_union, .{ .identifier = "payload" }); + try a.end(f, w); return local; } @@ -5006,7 +5085,7 @@ fn airBr(f: *Function, inst: Air.Inst.Index) !void { const branch = f.air.instructions.items(.data)[@intFromEnum(inst)].br; const block = f.blocks.get(branch.block_inst).?; const result = block.result; - const writer = f.object.writer(); + const w = &f.object.code.writer; if (f.object.dg.is_naked_fn) { if (result != .none) return f.fail("runtime code not allowed in naked function", .{}); @@ -5020,27 +5099,26 @@ fn airBr(f: *Function, inst: Air.Inst.Index) !void { const operand = try f.resolveInst(branch.operand); try reap(f, inst, &.{branch.operand}); - const a = try Assignment.start(f, writer, try f.ctypeFromType(operand_ty, .complete)); - try f.writeCValue(writer, result, .Other); - try a.assign(f, writer); - try f.writeCValue(writer, operand, .Other); - try a.end(f, writer); + const a = try Assignment.start(f, w, try f.ctypeFromType(operand_ty, .complete)); + try f.writeCValue(w, result, .Other); + try a.assign(f, w); + try f.writeCValue(w, operand, .Other); + try a.end(f, w); } - try writer.print("goto zig_block_{d};\n", .{block.block_id}); + try w.print("goto zig_block_{d};\n", .{block.block_id}); } fn airRepeat(f: *Function, inst: Air.Inst.Index) !void { const repeat = f.air.instructions.items(.data)[@intFromEnum(inst)].repeat; - const writer = f.object.writer(); - try writer.print("goto zig_loop_{d};\n", .{@intFromEnum(repeat.loop_inst)}); + try f.object.code.writer.print("goto zig_loop_{d};\n", .{@intFromEnum(repeat.loop_inst)}); } fn airSwitchDispatch(f: *Function, inst: Air.Inst.Index) !void { const pt = f.object.dg.pt; const zcu = pt.zcu; const br = f.air.instructions.items(.data)[@intFromEnum(inst)].br; - const writer = f.object.writer(); + const w = &f.object.code.writer; if (try f.air.value(br.operand, pt)) |cond_val| { // Comptime-known dispatch. Iterate the cases to find the correct @@ -5062,18 +5140,19 @@ fn airSwitchDispatch(f: *Function, inst: Air.Inst.Index) !void { } } } else switch_br.cases_len; - try writer.print("goto zig_switch_{d}_dispatch_{d};\n", .{ @intFromEnum(br.block_inst), target_case_idx }); + try w.print("goto zig_switch_{d}_dispatch_{d};\n", .{ @intFromEnum(br.block_inst), target_case_idx }); return; } // Runtime-known dispatch. Set the switch condition, and branch back. const cond = try f.resolveInst(br.operand); const cond_local = f.loop_switch_conds.get(br.block_inst).?; - try f.writeCValue(writer, .{ .local = cond_local }, .Other); - try writer.writeAll(" = "); - try f.writeCValue(writer, cond, .Other); - try writer.writeAll(";\n"); - try writer.print("goto zig_switch_{d}_loop;", .{@intFromEnum(br.block_inst)}); + try f.writeCValue(w, .{ .local = cond_local }, .Other); + try w.writeAll(" = "); + try f.writeCValue(w, cond, .Other); + try w.writeByte(';'); + try f.object.newline(); + try w.print("goto zig_switch_{d}_loop;\n", .{@intFromEnum(br.block_inst)}); } fn airBitcast(f: *Function, inst: Air.Inst.Index) !CValue { @@ -5093,7 +5172,7 @@ fn bitcast(f: *Function, dest_ty: Type, operand: CValue, operand_ty: Type) !CVal const zcu = pt.zcu; const target = &f.object.dg.mod.resolved_target.result; const ctype_pool = &f.object.dg.ctype_pool; - const writer = f.object.writer(); + const w = &f.object.code.writer; if (operand_ty.isAbiInt(zcu) and dest_ty.isAbiInt(zcu)) { const src_info = dest_ty.intInfo(zcu); @@ -5104,35 +5183,38 @@ fn bitcast(f: *Function, dest_ty: Type, operand: CValue, operand_ty: Type) !CVal if (dest_ty.isPtrAtRuntime(zcu) or operand_ty.isPtrAtRuntime(zcu)) { const local = try f.allocLocal(null, dest_ty); - try f.writeCValue(writer, local, .Other); - try writer.writeAll(" = ("); - try f.renderType(writer, dest_ty); - try writer.writeByte(')'); - try f.writeCValue(writer, operand, .Other); - try writer.writeAll(";\n"); + try f.writeCValue(w, local, .Other); + try w.writeAll(" = ("); + try f.renderType(w, dest_ty); + try w.writeByte(')'); + try f.writeCValue(w, operand, .Other); + try w.writeByte(';'); + try f.object.newline(); return local; } const operand_lval = if (operand == .constant) blk: { const operand_local = try f.allocLocal(null, operand_ty); - try f.writeCValue(writer, operand_local, .Other); - try writer.writeAll(" = "); - try f.writeCValue(writer, operand, .Other); - try writer.writeAll(";\n"); + try f.writeCValue(w, operand_local, .Other); + try w.writeAll(" = "); + try f.writeCValue(w, operand, .Other); + try w.writeByte(';'); + try f.object.newline(); break :blk operand_local; } else operand; const local = try f.allocLocal(null, dest_ty); - try writer.writeAll("memcpy(&"); - try f.writeCValue(writer, local, .Other); - try writer.writeAll(", &"); - try f.writeCValue(writer, operand_lval, .Other); - try writer.writeAll(", sizeof("); + try w.writeAll("memcpy(&"); + try f.writeCValue(w, local, .Other); + try w.writeAll(", &"); + try f.writeCValue(w, operand_lval, .Other); + try w.writeAll(", sizeof("); try f.renderType( - writer, + w, if (dest_ty.abiSize(zcu) <= operand_ty.abiSize(zcu)) dest_ty else operand_ty, ); - try writer.writeAll("));\n"); + try w.writeAll("));"); + try f.object.newline(); // Ensure padding bits have the expected value. if (dest_ty.isAbiInt(zcu)) { @@ -5142,11 +5224,11 @@ fn bitcast(f: *Function, dest_ty: Type, operand: CValue, operand_ty: Type) !CVal var wrap_ctype: ?CType = null; var need_bitcasts = false; - try f.writeCValue(writer, local, .Other); + try f.writeCValue(w, local, .Other); switch (dest_ctype.info(ctype_pool)) { else => {}, .array => |array_info| { - try writer.print("[{d}]", .{switch (target.cpu.arch.endian()) { + try w.print("[{d}]", .{switch (target.cpu.arch.endian()) { .little => array_info.len - 1, .big => 0, }}); @@ -5157,92 +5239,98 @@ fn bitcast(f: *Function, dest_ty: Type, operand: CValue, operand_ty: Type) !CVal bits += 1; }, } - try writer.writeAll(" = "); + try w.writeAll(" = "); if (need_bitcasts) { - try writer.writeAll("zig_bitCast_"); - try f.object.dg.renderCTypeForBuiltinFnName(writer, wrap_ctype.?.toUnsigned()); - try writer.writeByte('('); + try w.writeAll("zig_bitCast_"); + try f.object.dg.renderCTypeForBuiltinFnName(w, wrap_ctype.?.toUnsigned()); + try w.writeByte('('); } - try writer.writeAll("zig_wrap_"); + try w.writeAll("zig_wrap_"); const info_ty = try pt.intType(dest_info.signedness, bits); if (wrap_ctype) |ctype| - try f.object.dg.renderCTypeForBuiltinFnName(writer, ctype) + try f.object.dg.renderCTypeForBuiltinFnName(w, ctype) else - try f.object.dg.renderTypeForBuiltinFnName(writer, info_ty); - try writer.writeByte('('); + try f.object.dg.renderTypeForBuiltinFnName(w, info_ty); + try w.writeByte('('); if (need_bitcasts) { - try writer.writeAll("zig_bitCast_"); - try f.object.dg.renderCTypeForBuiltinFnName(writer, wrap_ctype.?); - try writer.writeByte('('); + try w.writeAll("zig_bitCast_"); + try f.object.dg.renderCTypeForBuiltinFnName(w, wrap_ctype.?); + try w.writeByte('('); } - try f.writeCValue(writer, local, .Other); + try f.writeCValue(w, local, .Other); switch (dest_ctype.info(ctype_pool)) { else => {}, - .array => |array_info| try writer.print("[{d}]", .{ + .array => |array_info| try w.print("[{d}]", .{ switch (target.cpu.arch.endian()) { .little => array_info.len - 1, .big => 0, }, }), } - if (need_bitcasts) try writer.writeByte(')'); - try f.object.dg.renderBuiltinInfo(writer, info_ty, .bits); - if (need_bitcasts) try writer.writeByte(')'); - try writer.writeAll(");\n"); + if (need_bitcasts) try w.writeByte(')'); + try f.object.dg.renderBuiltinInfo(w, info_ty, .bits); + if (need_bitcasts) try w.writeByte(')'); + try w.writeAll(");"); + try f.object.newline(); } try f.freeCValue(null, operand_lval); return local; } -fn airTrap(f: *Function, writer: anytype) !void { +fn airTrap(f: *Function, w: *Writer) !void { // Not even allowed to call trap in a naked function. if (f.object.dg.is_naked_fn) return; - try writer.writeAll("zig_trap();\n"); + try w.writeAll("zig_trap();\n"); } -fn airBreakpoint(writer: anytype) !CValue { - try writer.writeAll("zig_breakpoint();\n"); +fn airBreakpoint(f: *Function) !CValue { + const w = &f.object.code.writer; + try w.writeAll("zig_breakpoint();"); + try f.object.newline(); return .none; } fn airRetAddr(f: *Function, inst: Air.Inst.Index) !CValue { - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, .usize); - try f.writeCValue(writer, local, .Other); - try writer.writeAll(" = ("); - try f.renderType(writer, .usize); - try writer.writeAll(")zig_return_address();\n"); + try f.writeCValue(w, local, .Other); + try w.writeAll(" = ("); + try f.renderType(w, .usize); + try w.writeAll(")zig_return_address();"); + try f.object.newline(); return local; } fn airFrameAddress(f: *Function, inst: Air.Inst.Index) !CValue { - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, .usize); - try f.writeCValue(writer, local, .Other); - try writer.writeAll(" = ("); - try f.renderType(writer, .usize); - try writer.writeAll(")zig_frame_address();\n"); + try f.writeCValue(w, local, .Other); + try w.writeAll(" = ("); + try f.renderType(w, .usize); + try w.writeAll(")zig_frame_address();"); + try f.object.newline(); return local; } -fn airUnreach(f: *Function) !void { +fn airUnreach(o: *Object) !void { // Not even allowed to call unreachable in a naked function. - if (f.object.dg.is_naked_fn) return; - try f.object.writer().writeAll("zig_unreachable();\n"); + if (o.dg.is_naked_fn) return; + try o.code.writer.writeAll("zig_unreachable();\n"); } fn airLoop(f: *Function, inst: Air.Inst.Index) !void { const ty_pl = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const loop = f.air.extraData(Air.Block, ty_pl.payload); const body: []const Air.Inst.Index = @ptrCast(f.air.extra.items[loop.end..][0..loop.data.body_len]); - const writer = f.object.writer(); + const w = &f.object.code.writer; // `repeat` instructions matching this loop will branch to // this label. Since we need a label for arbitrary `repeat` // anyway, there's actually no need to use a "real" looping // construct at all! - try writer.print("zig_loop_{d}:\n", .{@intFromEnum(inst)}); + try w.print("zig_loop_{d}:", .{@intFromEnum(inst)}); + try f.object.newline(); try genBodyInner(f, body); // no need to restore state, we're noreturn } @@ -5254,14 +5342,14 @@ fn airCondBr(f: *Function, inst: Air.Inst.Index) !void { const then_body: []const Air.Inst.Index = @ptrCast(f.air.extra.items[extra.end..][0..extra.data.then_body_len]); const else_body: []const Air.Inst.Index = @ptrCast(f.air.extra.items[extra.end + then_body.len ..][0..extra.data.else_body_len]); const liveness_condbr = f.liveness.getCondBr(inst); - const writer = f.object.writer(); + const w = &f.object.code.writer; - try writer.writeAll("if ("); - try f.writeCValue(writer, cond, .Other); - try writer.writeAll(") "); + try w.writeAll("if ("); + try f.writeCValue(w, cond, .Other); + try w.writeAll(") "); try genBodyResolveState(f, inst, liveness_condbr.then_deaths, then_body, false); - try writer.writeByte('\n'); + try f.object.newline(); if (else_body.len > 0) if (f.object.dg.expected_block) |_| return f.fail("runtime code not allowed in naked function", .{}); @@ -5287,7 +5375,7 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index, is_dispatch_loop: bool) !void const init_condition = try f.resolveInst(switch_br.operand); try reap(f, inst, &.{switch_br.operand}); const condition_ty = f.typeOf(switch_br.operand); - const writer = f.object.writer(); + const w = &f.object.code.writer; // For dispatches, we will create a local alloc to contain the condition value. // This may not result in optimal codegen for switch loops, but it minimizes the @@ -5295,7 +5383,8 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index, is_dispatch_loop: bool) !void const condition = if (is_dispatch_loop) cond: { const new_local = try f.allocLocal(inst, condition_ty); try f.copyCValue(try f.ctypeFromType(condition_ty, .complete), new_local, init_condition); - try writer.print("zig_switch_{d}_loop:\n", .{@intFromEnum(inst)}); + try w.print("zig_switch_{d}_loop:", .{@intFromEnum(inst)}); + try f.object.newline(); try f.loop_switch_conds.put(gpa, inst, new_local.new_local); break :cond new_local; } else init_condition; @@ -5304,7 +5393,7 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index, is_dispatch_loop: bool) !void assert(f.loop_switch_conds.remove(inst)); }; - try writer.writeAll("switch ("); + try w.writeAll("switch ("); const lowered_condition_ty: Type = if (condition_ty.toIntern() == .bool_type) .u1 @@ -5313,13 +5402,13 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index, is_dispatch_loop: bool) !void else condition_ty; if (condition_ty.toIntern() != lowered_condition_ty.toIntern()) { - try writer.writeByte('('); - try f.renderType(writer, lowered_condition_ty); - try writer.writeByte(')'); + try w.writeByte('('); + try f.renderType(w, lowered_condition_ty); + try w.writeByte(')'); } - try f.writeCValue(writer, condition, .Other); - try writer.writeAll(") {"); - f.object.indent_writer.pushIndent(); + try f.writeCValue(w, condition, .Other); + try w.writeAll(") {"); + f.object.indent(); const liveness = try f.liveness.getSwitchBr(gpa, inst, switch_br.cases_len + 1); defer gpa.free(liveness.deaths); @@ -5332,35 +5421,37 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index, is_dispatch_loop: bool) !void continue; } for (case.items) |item| { - try f.object.indent_writer.insertNewline(); - try writer.writeAll("case "); + try f.object.newline(); + try w.writeAll("case "); const item_value = try f.air.value(item, pt); // If `item_value` is a pointer with a known integer address, print the address // with no cast to avoid a warning. write_val: { if (condition_ty.isPtrAtRuntime(zcu)) { if (item_value.?.getUnsignedInt(zcu)) |item_int| { - try writer.print("{}", .{try f.fmtIntLiteral(try pt.intValue(lowered_condition_ty, item_int))}); + try w.print("{f}", .{try f.fmtIntLiteralDec(try pt.intValue(lowered_condition_ty, item_int))}); break :write_val; } } if (condition_ty.isPtrAtRuntime(zcu)) { - try writer.writeByte('('); - try f.renderType(writer, .usize); - try writer.writeByte(')'); + try w.writeByte('('); + try f.renderType(w, .usize); + try w.writeByte(')'); } - try f.object.dg.renderValue(writer, (try f.air.value(item, pt)).?, .Other); + try f.object.dg.renderValue(w, (try f.air.value(item, pt)).?, .Other); } - try writer.writeByte(':'); + try w.writeByte(':'); } - try writer.writeAll(" {\n"); - f.object.indent_writer.pushIndent(); + try w.writeAll(" {"); + f.object.indent(); + try f.object.newline(); if (is_dispatch_loop) { - try writer.print("zig_switch_{d}_dispatch_{d}: ", .{ @intFromEnum(inst), case.idx }); + try w.print("zig_switch_{d}_dispatch_{d}:;", .{ @intFromEnum(inst), case.idx }); + try f.object.newline(); } try genBodyResolveState(f, inst, liveness.deaths[case.idx], case.body, true); - f.object.indent_writer.popIndent(); - try writer.writeByte('}'); + try f.object.outdent(); + try w.writeByte('}'); if (f.object.dg.expected_block) |_| return f.fail("runtime code not allowed in naked function", .{}); @@ -5368,9 +5459,9 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index, is_dispatch_loop: bool) !void } const else_body = it.elseBody(); - try f.object.indent_writer.insertNewline(); + try f.object.newline(); - try writer.writeAll("default: "); + try w.writeAll("default: "); if (any_range_cases) { // We will iterate the cases again to handle those with ranges, and generate // code using conditions rather than switch cases for such cases. @@ -5378,40 +5469,41 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index, is_dispatch_loop: bool) !void while (it.next()) |case| { if (case.ranges.len == 0) continue; // handled above - try writer.writeAll("if ("); + try w.writeAll("if ("); for (case.items, 0..) |item, item_i| { - if (item_i != 0) try writer.writeAll(" || "); - try f.writeCValue(writer, condition, .Other); - try writer.writeAll(" == "); - try f.object.dg.renderValue(writer, (try f.air.value(item, pt)).?, .Other); + if (item_i != 0) try w.writeAll(" || "); + try f.writeCValue(w, condition, .Other); + try w.writeAll(" == "); + try f.object.dg.renderValue(w, (try f.air.value(item, pt)).?, .Other); } for (case.ranges, 0..) |range, range_i| { - if (case.items.len != 0 or range_i != 0) try writer.writeAll(" || "); + if (case.items.len != 0 or range_i != 0) try w.writeAll(" || "); // "(x >= lower && x <= upper)" - try writer.writeByte('('); - try f.writeCValue(writer, condition, .Other); - try writer.writeAll(" >= "); - try f.object.dg.renderValue(writer, (try f.air.value(range[0], pt)).?, .Other); - try writer.writeAll(" && "); - try f.writeCValue(writer, condition, .Other); - try writer.writeAll(" <= "); - try f.object.dg.renderValue(writer, (try f.air.value(range[1], pt)).?, .Other); - try writer.writeByte(')'); + try w.writeByte('('); + try f.writeCValue(w, condition, .Other); + try w.writeAll(" >= "); + try f.object.dg.renderValue(w, (try f.air.value(range[0], pt)).?, .Other); + try w.writeAll(" && "); + try f.writeCValue(w, condition, .Other); + try w.writeAll(" <= "); + try f.object.dg.renderValue(w, (try f.air.value(range[1], pt)).?, .Other); + try w.writeByte(')'); } - try writer.writeAll(") {\n"); - f.object.indent_writer.pushIndent(); + try w.writeAll(") {"); + f.object.indent(); + try f.object.newline(); if (is_dispatch_loop) { - try writer.print("zig_switch_{d}_dispatch_{d}: ", .{ @intFromEnum(inst), case.idx }); + try w.print("zig_switch_{d}_dispatch_{d}: ", .{ @intFromEnum(inst), case.idx }); } try genBodyResolveState(f, inst, liveness.deaths[case.idx], case.body, true); - f.object.indent_writer.popIndent(); - try writer.writeByte('}'); + try f.object.outdent(); + try w.writeByte('}'); if (f.object.dg.expected_block) |_| return f.fail("runtime code not allowed in naked function", .{}); } } if (is_dispatch_loop) { - try writer.print("zig_switch_{d}_dispatch_{d}: ", .{ @intFromEnum(inst), switch_br.cases_len }); + try w.print("zig_switch_{d}_dispatch_{d}: ", .{ @intFromEnum(inst), switch_br.cases_len }); } if (else_body.len > 0) { // Note that this must be the last case, so we do not need to use `genBodyResolveState` since @@ -5422,13 +5514,10 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index, is_dispatch_loop: bool) !void try genBody(f, else_body); if (f.object.dg.expected_block) |_| return f.fail("runtime code not allowed in naked function", .{}); - } else { - try writer.writeAll("zig_unreachable();"); - } - try f.object.indent_writer.insertNewline(); - - f.object.indent_writer.popIndent(); - try writer.writeAll("}\n"); + } else try airUnreach(&f.object); + try f.object.newline(); + try f.object.outdent(); + try w.writeAll("}\n"); } fn asmInputNeedsLocal(f: *Function, constraint: []const u8, value: CValue) bool { @@ -5466,7 +5555,7 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue { extra_i += inputs.len; const result = result: { - const writer = f.object.writer(); + const w = &f.object.code.writer; const inst_ty = f.typeOfIndex(inst); const inst_local = if (inst_ty.hasRuntimeBitsIgnoreComptime(zcu)) local: { const inst_local = try f.allocLocalValue(.{ @@ -5474,10 +5563,11 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue { .alignas = CType.AlignAs.fromAbiAlignment(inst_ty.abiAlignment(zcu)), }); if (f.wantSafety()) { - try f.writeCValue(writer, inst_local, .Other); - try writer.writeAll(" = "); - try f.writeCValue(writer, .{ .undef = inst_ty }, .Other); - try writer.writeAll(";\n"); + try f.writeCValue(w, inst_local, .Other); + try w.writeAll(" = "); + try f.writeCValue(w, .{ .undef = inst_ty }, .Other); + try w.writeByte(';'); + try f.object.newline(); } break :local inst_local; } else .none; @@ -5501,21 +5591,22 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue { const is_reg = constraint[1] == '{'; if (is_reg) { const output_ty = if (output == .none) inst_ty else f.typeOf(output).childType(zcu); - try writer.writeAll("register "); + try w.writeAll("register "); const output_local = try f.allocLocalValue(.{ .ctype = try f.ctypeFromType(output_ty, .complete), .alignas = CType.AlignAs.fromAbiAlignment(output_ty.abiAlignment(zcu)), }); try f.allocs.put(gpa, output_local.new_local, false); - try f.object.dg.renderTypeAndName(writer, output_ty, output_local, .{}, .none, .complete); - try writer.writeAll(" __asm(\""); - try writer.writeAll(constraint["={".len .. constraint.len - "}".len]); - try writer.writeAll("\")"); + try f.object.dg.renderTypeAndName(w, output_ty, output_local, .{}, .none, .complete); + try w.writeAll(" __asm(\""); + try w.writeAll(constraint["={".len .. constraint.len - "}".len]); + try w.writeAll("\")"); if (f.wantSafety()) { - try writer.writeAll(" = "); - try f.writeCValue(writer, .{ .undef = output_ty }, .Other); + try w.writeAll(" = "); + try f.writeCValue(w, .{ .undef = output_ty }, .Other); } - try writer.writeAll(";\n"); + try w.writeByte(';'); + try f.object.newline(); } } for (inputs) |input| { @@ -5536,21 +5627,22 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue { const input_val = try f.resolveInst(input); if (asmInputNeedsLocal(f, constraint, input_val)) { const input_ty = f.typeOf(input); - if (is_reg) try writer.writeAll("register "); + if (is_reg) try w.writeAll("register "); const input_local = try f.allocLocalValue(.{ .ctype = try f.ctypeFromType(input_ty, .complete), .alignas = CType.AlignAs.fromAbiAlignment(input_ty.abiAlignment(zcu)), }); try f.allocs.put(gpa, input_local.new_local, false); - try f.object.dg.renderTypeAndName(writer, input_ty, input_local, Const, .none, .complete); + try f.object.dg.renderTypeAndName(w, input_ty, input_local, Const, .none, .complete); if (is_reg) { - try writer.writeAll(" __asm(\""); - try writer.writeAll(constraint["{".len .. constraint.len - "}".len]); - try writer.writeAll("\")"); + try w.writeAll(" __asm(\""); + try w.writeAll(constraint["{".len .. constraint.len - "}".len]); + try w.writeAll("\")"); } - try writer.writeAll(" = "); - try f.writeCValue(writer, input_val, .Other); - try writer.writeAll(";\n"); + try w.writeAll(" = "); + try f.writeCValue(w, input_val, .Other); + try w.writeByte(';'); + try f.object.newline(); } } for (0..clobbers_len) |_| { @@ -5610,14 +5702,14 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue { } } - try writer.writeAll("__asm"); - if (is_volatile) try writer.writeAll(" volatile"); - try writer.print("({s}", .{fmtStringLiteral(fixed_asm_source[0..dst_i], null)}); + try w.writeAll("__asm"); + if (is_volatile) try w.writeAll(" volatile"); + try w.print("({f}", .{fmtStringLiteral(fixed_asm_source[0..dst_i], null)}); } extra_i = constraints_extra_begin; var locals_index = locals_begin; - try writer.writeByte(':'); + try w.writeByte(':'); for (outputs, 0..) |output, index| { const extra_bytes = mem.sliceAsBytes(f.air.extra.items[extra_i..]); const constraint = mem.sliceTo(extra_bytes, 0); @@ -5626,22 +5718,22 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue { // for the string, we still use the next u32 for the null terminator. extra_i += (constraint.len + name.len + (2 + 3)) / 4; - if (index > 0) try writer.writeByte(','); - try writer.writeByte(' '); - if (!mem.eql(u8, name, "_")) try writer.print("[{s}]", .{name}); + if (index > 0) try w.writeByte(','); + try w.writeByte(' '); + if (!mem.eql(u8, name, "_")) try w.print("[{s}]", .{name}); const is_reg = constraint[1] == '{'; - try writer.print("{s}(", .{fmtStringLiteral(if (is_reg) "=r" else constraint, null)}); + try w.print("{f}(", .{fmtStringLiteral(if (is_reg) "=r" else constraint, null)}); if (is_reg) { - try f.writeCValue(writer, .{ .local = locals_index }, .Other); + try f.writeCValue(w, .{ .local = locals_index }, .Other); locals_index += 1; } else if (output == .none) { - try f.writeCValue(writer, inst_local, .FunctionArgument); + try f.writeCValue(w, inst_local, .FunctionArgument); } else { - try f.writeCValueDeref(writer, try f.resolveInst(output)); + try f.writeCValueDeref(w, try f.resolveInst(output)); } - try writer.writeByte(')'); + try w.writeByte(')'); } - try writer.writeByte(':'); + try w.writeByte(':'); for (inputs, 0..) |input, index| { const extra_bytes = mem.sliceAsBytes(f.air.extra.items[extra_i..]); const constraint = mem.sliceTo(extra_bytes, 0); @@ -5650,21 +5742,21 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue { // for the string, we still use the next u32 for the null terminator. extra_i += (constraint.len + name.len + (2 + 3)) / 4; - if (index > 0) try writer.writeByte(','); - try writer.writeByte(' '); - if (!mem.eql(u8, name, "_")) try writer.print("[{s}]", .{name}); + if (index > 0) try w.writeByte(','); + try w.writeByte(' '); + if (!mem.eql(u8, name, "_")) try w.print("[{s}]", .{name}); const is_reg = constraint[0] == '{'; const input_val = try f.resolveInst(input); - try writer.print("{s}(", .{fmtStringLiteral(if (is_reg) "r" else constraint, null)}); - try f.writeCValue(writer, if (asmInputNeedsLocal(f, constraint, input_val)) local: { + try w.print("{f}(", .{fmtStringLiteral(if (is_reg) "r" else constraint, null)}); + try f.writeCValue(w, if (asmInputNeedsLocal(f, constraint, input_val)) local: { const input_local_idx = locals_index; locals_index += 1; break :local .{ .local = input_local_idx }; } else input_val, .Other); - try writer.writeByte(')'); + try w.writeByte(')'); } - try writer.writeByte(':'); + try w.writeByte(':'); for (0..clobbers_len) |clobber_i| { const clobber = mem.sliceTo(mem.sliceAsBytes(f.air.extra.items[extra_i..]), 0); // This equation accounts for the fact that even if we have exactly 4 bytes @@ -5673,10 +5765,11 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue { if (clobber.len == 0) continue; - if (clobber_i > 0) try writer.writeByte(','); - try writer.print(" {s}", .{fmtStringLiteral(clobber, null)}); + if (clobber_i > 0) try w.writeByte(','); + try w.print(" {f}", .{fmtStringLiteral(clobber, null)}); } - try writer.writeAll(");\n"); + try w.writeAll(");"); + try f.object.newline(); extra_i = constraints_extra_begin; locals_index = locals_begin; @@ -5690,14 +5783,15 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue { const is_reg = constraint[1] == '{'; if (is_reg) { - try f.writeCValueDeref(writer, if (output == .none) + try f.writeCValueDeref(w, if (output == .none) .{ .local_ref = inst_local.new_local } else try f.resolveInst(output)); - try writer.writeAll(" = "); - try f.writeCValue(writer, .{ .local = locals_index }, .Other); + try w.writeAll(" = "); + try f.writeCValue(w, .{ .local = locals_index }, .Other); locals_index += 1; - try writer.writeAll(";\n"); + try w.writeByte(';'); + try f.object.newline(); } } @@ -5727,14 +5821,14 @@ fn airIsNull( const ctype_pool = &f.object.dg.ctype_pool; const un_op = f.air.instructions.items(.data)[@intFromEnum(inst)].un_op; - const writer = f.object.writer(); + const w = &f.object.code.writer; const operand = try f.resolveInst(un_op); try reap(f, inst, &.{un_op}); const local = try f.allocLocal(inst, .bool); - const a = try Assignment.start(f, writer, .bool); - try f.writeCValue(writer, local, .Other); - try a.assign(f, writer); + const a = try Assignment.start(f, w, .bool); + try f.writeCValue(w, local, .Other); + try a.assign(f, w); const operand_ty = f.typeOf(un_op); const optional_ty = if (is_ptr) operand_ty.childType(zcu) else operand_ty; @@ -5742,9 +5836,9 @@ fn airIsNull( const rhs = switch (opt_ctype.info(ctype_pool)) { .basic, .pointer => rhs: { if (is_ptr) - try f.writeCValueDeref(writer, operand) + try f.writeCValueDeref(w, operand) else - try f.writeCValue(writer, operand, .Other); + try f.writeCValue(w, operand, .Other); break :rhs if (opt_ctype.isBool()) "true" else if (opt_ctype.isInteger()) @@ -5756,24 +5850,24 @@ fn airIsNull( .aggregate => |aggregate| switch (aggregate.fields.at(0, ctype_pool).name.index) { .is_null, .payload => rhs: { if (is_ptr) - try f.writeCValueDerefMember(writer, operand, .{ .identifier = "is_null" }) + try f.writeCValueDerefMember(w, operand, .{ .identifier = "is_null" }) else - try f.writeCValueMember(writer, operand, .{ .identifier = "is_null" }); + try f.writeCValueMember(w, operand, .{ .identifier = "is_null" }); break :rhs "true"; }, .ptr, .len => rhs: { if (is_ptr) - try f.writeCValueDerefMember(writer, operand, .{ .identifier = "ptr" }) + try f.writeCValueDerefMember(w, operand, .{ .identifier = "ptr" }) else - try f.writeCValueMember(writer, operand, .{ .identifier = "ptr" }); + try f.writeCValueMember(w, operand, .{ .identifier = "ptr" }); break :rhs "NULL"; }, else => unreachable, }, }; - try writer.writeAll(compareOperatorC(operator)); - try writer.writeAll(rhs); - try a.end(f, writer); + try w.writeAll(compareOperatorC(operator)); + try w.writeAll(rhs); + try a.end(f, w); return local; } @@ -5795,16 +5889,16 @@ fn airOptionalPayload(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !CValue .aligned, .array, .vector, .fwd_decl, .function => unreachable, .aggregate => |aggregate| switch (aggregate.fields.at(0, ctype_pool).name.index) { .is_null, .payload => { - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete)); - try f.writeCValue(writer, local, .Other); - try a.assign(f, writer); + const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete)); + try f.writeCValue(w, local, .Other); + try a.assign(f, w); if (is_ptr) { - try writer.writeByte('&'); - try f.writeCValueDerefMember(writer, operand, .{ .identifier = "payload" }); - } else try f.writeCValueMember(writer, operand, .{ .identifier = "payload" }); - try a.end(f, writer); + try w.writeByte('&'); + try f.writeCValueDerefMember(w, operand, .{ .identifier = "payload" }); + } else try f.writeCValueMember(w, operand, .{ .identifier = "payload" }); + try a.end(f, w); return local; }, .ptr, .len => return f.moveCValue(inst, inst_ty, operand), @@ -5817,7 +5911,7 @@ fn airOptionalPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue { const pt = f.object.dg.pt; const zcu = pt.zcu; const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op; - const writer = f.object.writer(); + const w = &f.object.code.writer; const operand = try f.resolveInst(ty_op.operand); try reap(f, inst, &.{ty_op.operand}); const operand_ty = f.typeOf(ty_op.operand); @@ -5826,40 +5920,40 @@ fn airOptionalPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue { const opt_ctype = try f.ctypeFromType(operand_ty.childType(zcu), .complete); switch (opt_ctype.info(&f.object.dg.ctype_pool)) { .basic => { - const a = try Assignment.start(f, writer, opt_ctype); - try f.writeCValueDeref(writer, operand); - try a.assign(f, writer); - try f.object.dg.renderValue(writer, Value.false, .Other); - try a.end(f, writer); + const a = try Assignment.start(f, w, opt_ctype); + try f.writeCValueDeref(w, operand); + try a.assign(f, w); + try f.object.dg.renderValue(w, Value.false, .Other); + try a.end(f, w); return .none; }, .pointer => { if (f.liveness.isUnused(inst)) return .none; const local = try f.allocLocal(inst, inst_ty); - const a = try Assignment.start(f, writer, opt_ctype); - try f.writeCValue(writer, local, .Other); - try a.assign(f, writer); - try f.writeCValue(writer, operand, .Other); - try a.end(f, writer); + const a = try Assignment.start(f, w, opt_ctype); + try f.writeCValue(w, local, .Other); + try a.assign(f, w); + try f.writeCValue(w, operand, .Other); + try a.end(f, w); return local; }, .aligned, .array, .vector, .fwd_decl, .function => unreachable, .aggregate => { { - const a = try Assignment.start(f, writer, opt_ctype); - try f.writeCValueDerefMember(writer, operand, .{ .identifier = "is_null" }); - try a.assign(f, writer); - try f.object.dg.renderValue(writer, Value.false, .Other); - try a.end(f, writer); + const a = try Assignment.start(f, w, opt_ctype); + try f.writeCValueDerefMember(w, operand, .{ .identifier = "is_null" }); + try a.assign(f, w); + try f.object.dg.renderValue(w, Value.false, .Other); + try a.end(f, w); } if (f.liveness.isUnused(inst)) return .none; const local = try f.allocLocal(inst, inst_ty); - const a = try Assignment.start(f, writer, opt_ctype); - try f.writeCValue(writer, local, .Other); - try a.assign(f, writer); - try writer.writeByte('&'); - try f.writeCValueDerefMember(writer, operand, .{ .identifier = "payload" }); - try a.end(f, writer); + const a = try Assignment.start(f, w, opt_ctype); + try f.writeCValue(w, local, .Other); + try a.assign(f, w); + try w.writeByte('&'); + try f.writeCValueDerefMember(w, operand, .{ .identifier = "payload" }); + try a.end(f, w); return local; }, } @@ -5967,42 +6061,43 @@ fn airFieldParentPtr(f: *Function, inst: Air.Inst.Index) !CValue { const field_ptr_val = try f.resolveInst(extra.field_ptr); try reap(f, inst, &.{extra.field_ptr}); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, container_ptr_ty); - try f.writeCValue(writer, local, .Other); - try writer.writeAll(" = ("); - try f.renderType(writer, container_ptr_ty); - try writer.writeByte(')'); + try f.writeCValue(w, local, .Other); + try w.writeAll(" = ("); + try f.renderType(w, container_ptr_ty); + try w.writeByte(')'); switch (fieldLocation(container_ptr_ty, field_ptr_ty, extra.field_index, pt)) { - .begin => try f.writeCValue(writer, field_ptr_val, .Other), + .begin => try f.writeCValue(w, field_ptr_val, .Other), .field => |field| { const u8_ptr_ty = try pt.adjustPtrTypeChild(field_ptr_ty, .u8); - try writer.writeAll("(("); - try f.renderType(writer, u8_ptr_ty); - try writer.writeByte(')'); - try f.writeCValue(writer, field_ptr_val, .Other); - try writer.writeAll(" - offsetof("); - try f.renderType(writer, container_ty); - try writer.writeAll(", "); - try f.writeCValue(writer, field, .Other); - try writer.writeAll("))"); + try w.writeAll("(("); + try f.renderType(w, u8_ptr_ty); + try w.writeByte(')'); + try f.writeCValue(w, field_ptr_val, .Other); + try w.writeAll(" - offsetof("); + try f.renderType(w, container_ty); + try w.writeAll(", "); + try f.writeCValue(w, field, .Other); + try w.writeAll("))"); }, .byte_offset => |byte_offset| { const u8_ptr_ty = try pt.adjustPtrTypeChild(field_ptr_ty, .u8); - try writer.writeAll("(("); - try f.renderType(writer, u8_ptr_ty); - try writer.writeByte(')'); - try f.writeCValue(writer, field_ptr_val, .Other); - try writer.print(" - {})", .{ - try f.fmtIntLiteral(try pt.intValue(.usize, byte_offset)), + try w.writeAll("(("); + try f.renderType(w, u8_ptr_ty); + try w.writeByte(')'); + try f.writeCValue(w, field_ptr_val, .Other); + try w.print(" - {f})", .{ + try f.fmtIntLiteralDec(try pt.intValue(.usize, byte_offset)), }); }, } - try writer.writeAll(";\n"); + try w.writeByte(';'); + try f.object.newline(); return local; } @@ -6021,33 +6116,34 @@ fn fieldPtr( // Ensure complete type definition is visible before accessing fields. _ = try f.ctypeFromType(container_ty, .complete); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, field_ptr_ty); - try f.writeCValue(writer, local, .Other); - try writer.writeAll(" = ("); - try f.renderType(writer, field_ptr_ty); - try writer.writeByte(')'); + try f.writeCValue(w, local, .Other); + try w.writeAll(" = ("); + try f.renderType(w, field_ptr_ty); + try w.writeByte(')'); switch (fieldLocation(container_ptr_ty, field_ptr_ty, field_index, pt)) { - .begin => try f.writeCValue(writer, container_ptr_val, .Other), + .begin => try f.writeCValue(w, container_ptr_val, .Other), .field => |field| { - try writer.writeByte('&'); - try f.writeCValueDerefMember(writer, container_ptr_val, field); + try w.writeByte('&'); + try f.writeCValueDerefMember(w, container_ptr_val, field); }, .byte_offset => |byte_offset| { const u8_ptr_ty = try pt.adjustPtrTypeChild(field_ptr_ty, .u8); - try writer.writeAll("(("); - try f.renderType(writer, u8_ptr_ty); - try writer.writeByte(')'); - try f.writeCValue(writer, container_ptr_val, .Other); - try writer.print(" + {})", .{ - try f.fmtIntLiteral(try pt.intValue(.usize, byte_offset)), + try w.writeAll("(("); + try f.renderType(w, u8_ptr_ty); + try w.writeByte(')'); + try f.writeCValue(w, container_ptr_val, .Other); + try w.print(" + {f})", .{ + try f.fmtIntLiteralDec(try pt.intValue(.usize, byte_offset)), }); }, } - try writer.writeAll(";\n"); + try w.writeByte(';'); + try f.object.newline(); return local; } @@ -6067,7 +6163,7 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue { const struct_byval = try f.resolveInst(extra.struct_operand); try reap(f, inst, &.{extra.struct_operand}); const struct_ty = f.typeOf(extra.struct_operand); - const writer = f.object.writer(); + const w = &f.object.code.writer; // Ensure complete type definition is visible before accessing fields. _ = try f.ctypeFromType(struct_ty, .complete); @@ -6094,42 +6190,44 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue { const field_int_ty = try pt.intType(field_int_signedness, @as(u16, @intCast(inst_ty.bitSize(zcu)))); const temp_local = try f.allocLocal(inst, field_int_ty); - try f.writeCValue(writer, temp_local, .Other); - try writer.writeAll(" = zig_wrap_"); - try f.object.dg.renderTypeForBuiltinFnName(writer, field_int_ty); - try writer.writeAll("(("); - try f.renderType(writer, field_int_ty); - try writer.writeByte(')'); + try f.writeCValue(w, temp_local, .Other); + try w.writeAll(" = zig_wrap_"); + try f.object.dg.renderTypeForBuiltinFnName(w, field_int_ty); + try w.writeAll("(("); + try f.renderType(w, field_int_ty); + try w.writeByte(')'); const cant_cast = int_info.bits > 64; if (cant_cast) { if (field_int_ty.bitSize(zcu) > 64) return f.fail("TODO: C backend: implement casting between types > 64 bits", .{}); - try writer.writeAll("zig_lo_"); - try f.object.dg.renderTypeForBuiltinFnName(writer, struct_ty); - try writer.writeByte('('); + try w.writeAll("zig_lo_"); + try f.object.dg.renderTypeForBuiltinFnName(w, struct_ty); + try w.writeByte('('); } if (bit_offset > 0) { - try writer.writeAll("zig_shr_"); - try f.object.dg.renderTypeForBuiltinFnName(writer, struct_ty); - try writer.writeByte('('); + try w.writeAll("zig_shr_"); + try f.object.dg.renderTypeForBuiltinFnName(w, struct_ty); + try w.writeByte('('); } - try f.writeCValue(writer, struct_byval, .Other); - if (bit_offset > 0) try writer.print(", {})", .{ - try f.fmtIntLiteral(try pt.intValue(bit_offset_ty, bit_offset)), + try f.writeCValue(w, struct_byval, .Other); + if (bit_offset > 0) try w.print(", {f})", .{ + try f.fmtIntLiteralDec(try pt.intValue(bit_offset_ty, bit_offset)), }); - if (cant_cast) try writer.writeByte(')'); - try f.object.dg.renderBuiltinInfo(writer, field_int_ty, .bits); - try writer.writeAll(");\n"); + if (cant_cast) try w.writeByte(')'); + try f.object.dg.renderBuiltinInfo(w, field_int_ty, .bits); + try w.writeAll(");"); + try f.object.newline(); if (inst_ty.eql(field_int_ty, zcu)) return temp_local; const local = try f.allocLocal(inst, inst_ty); if (local.new_local != temp_local.new_local) { - try writer.writeAll("memcpy("); - try f.writeCValue(writer, .{ .local_ref = local.new_local }, .FunctionArgument); - try writer.writeAll(", "); - try f.writeCValue(writer, .{ .local_ref = temp_local.new_local }, .FunctionArgument); - try writer.writeAll(", sizeof("); - try f.renderType(writer, inst_ty); - try writer.writeAll("));\n"); + try w.writeAll("memcpy("); + try f.writeCValue(w, .{ .local_ref = local.new_local }, .FunctionArgument); + try w.writeAll(", "); + try f.writeCValue(w, .{ .local_ref = temp_local.new_local }, .FunctionArgument); + try w.writeAll(", sizeof("); + try f.renderType(w, inst_ty); + try w.writeAll("));"); + try f.object.newline(); } try freeLocal(f, inst, temp_local.new_local, null); return local; @@ -6150,10 +6248,11 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue { .@"packed" => { const operand_lval = if (struct_byval == .constant) blk: { const operand_local = try f.allocLocal(inst, struct_ty); - try f.writeCValue(writer, operand_local, .Other); - try writer.writeAll(" = "); - try f.writeCValue(writer, struct_byval, .Other); - try writer.writeAll(";\n"); + try f.writeCValue(w, operand_local, .Other); + try w.writeAll(" = "); + try f.writeCValue(w, struct_byval, .Other); + try w.writeByte(';'); + try f.object.newline(); break :blk operand_local; } else struct_byval; const local = try f.allocLocal(inst, inst_ty); @@ -6164,13 +6263,14 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue { }, else => true, }) { - try writer.writeAll("memcpy(&"); - try f.writeCValue(writer, local, .Other); - try writer.writeAll(", &"); - try f.writeCValue(writer, operand_lval, .Other); - try writer.writeAll(", sizeof("); - try f.renderType(writer, inst_ty); - try writer.writeAll("));\n"); + try w.writeAll("memcpy(&"); + try f.writeCValue(w, local, .Other); + try w.writeAll(", &"); + try f.writeCValue(w, operand_lval, .Other); + try w.writeAll(", sizeof("); + try f.renderType(w, inst_ty); + try w.writeAll("));"); + try f.object.newline(); } try f.freeCValue(inst, operand_lval); return local; @@ -6181,11 +6281,11 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue { }; const local = try f.allocLocal(inst, inst_ty); - const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete)); - try f.writeCValue(writer, local, .Other); - try a.assign(f, writer); - try f.writeCValueMember(writer, struct_byval, field_name); - try a.end(f, writer); + const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete)); + try f.writeCValue(w, local, .Other); + try a.assign(f, w); + try f.writeCValueMember(w, struct_byval, field_name); + try a.end(f, w); return local; } @@ -6212,21 +6312,22 @@ fn airUnwrapErrUnionErr(f: *Function, inst: Air.Inst.Index) !CValue { return local; } - const writer = f.object.writer(); - try f.writeCValue(writer, local, .Other); - try writer.writeAll(" = "); + const w = &f.object.code.writer; + try f.writeCValue(w, local, .Other); + try w.writeAll(" = "); if (!payload_ty.hasRuntimeBits(zcu)) - try f.writeCValue(writer, operand, .Other) + try f.writeCValue(w, operand, .Other) else if (error_ty.errorSetIsEmpty(zcu)) - try writer.print("{}", .{ - try f.fmtIntLiteral(try pt.intValue(try pt.errorIntType(), 0)), + try w.print("{f}", .{ + try f.fmtIntLiteralDec(try pt.intValue(try pt.errorIntType(), 0)), }) else if (operand_is_ptr) - try f.writeCValueDerefMember(writer, operand, .{ .identifier = "error" }) + try f.writeCValueDerefMember(w, operand, .{ .identifier = "error" }) else - try f.writeCValueMember(writer, operand, .{ .identifier = "error" }); - try writer.writeAll(";\n"); + try f.writeCValueMember(w, operand, .{ .identifier = "error" }); + try w.writeByte(';'); + try f.object.newline(); return local; } @@ -6241,29 +6342,30 @@ fn airUnwrapErrUnionPay(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !CValu const operand_ty = f.typeOf(ty_op.operand); const error_union_ty = if (is_ptr) operand_ty.childType(zcu) else operand_ty; - const writer = f.object.writer(); + const w = &f.object.code.writer; if (!error_union_ty.errorUnionPayload(zcu).hasRuntimeBits(zcu)) { if (!is_ptr) return .none; const local = try f.allocLocal(inst, inst_ty); - try f.writeCValue(writer, local, .Other); - try writer.writeAll(" = ("); - try f.renderType(writer, inst_ty); - try writer.writeByte(')'); - try f.writeCValue(writer, operand, .Other); - try writer.writeAll(";\n"); + try f.writeCValue(w, local, .Other); + try w.writeAll(" = ("); + try f.renderType(w, inst_ty); + try w.writeByte(')'); + try f.writeCValue(w, operand, .Other); + try w.writeByte(';'); + try f.object.newline(); return local; } const local = try f.allocLocal(inst, inst_ty); - const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete)); - try f.writeCValue(writer, local, .Other); - try a.assign(f, writer); + const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete)); + try f.writeCValue(w, local, .Other); + try a.assign(f, w); if (is_ptr) { - try writer.writeByte('&'); - try f.writeCValueDerefMember(writer, operand, .{ .identifier = "payload" }); - } else try f.writeCValueMember(writer, operand, .{ .identifier = "payload" }); - try a.end(f, writer); + try w.writeByte('&'); + try f.writeCValueDerefMember(w, operand, .{ .identifier = "payload" }); + } else try f.writeCValueMember(w, operand, .{ .identifier = "payload" }); + try a.end(f, w); return local; } @@ -6282,21 +6384,21 @@ fn airWrapOptional(f: *Function, inst: Air.Inst.Index) !CValue { .aggregate => |aggregate| switch (aggregate.fields.at(0, ctype_pool).name.index) { .is_null, .payload => { const operand_ctype = try f.ctypeFromType(f.typeOf(ty_op.operand), .complete); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); { - const a = try Assignment.start(f, writer, .bool); - try f.writeCValueMember(writer, local, .{ .identifier = "is_null" }); - try a.assign(f, writer); - try writer.writeAll("false"); - try a.end(f, writer); + const a = try Assignment.start(f, w, .bool); + try f.writeCValueMember(w, local, .{ .identifier = "is_null" }); + try a.assign(f, w); + try w.writeAll("false"); + try a.end(f, w); } { - const a = try Assignment.start(f, writer, operand_ctype); - try f.writeCValueMember(writer, local, .{ .identifier = "payload" }); - try a.assign(f, writer); - try f.writeCValue(writer, operand, .Other); - try a.end(f, writer); + const a = try Assignment.start(f, w, operand_ctype); + try f.writeCValueMember(w, local, .{ .identifier = "payload" }); + try a.assign(f, w); + try f.writeCValue(w, operand, .Other); + try a.end(f, w); } return local; }, @@ -6318,7 +6420,7 @@ fn airWrapErrUnionErr(f: *Function, inst: Air.Inst.Index) !CValue { const err = try f.resolveInst(ty_op.operand); try reap(f, inst, &.{ty_op.operand}); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); if (repr_is_err and err == .local and err.local == local.new_local) { @@ -6327,21 +6429,21 @@ fn airWrapErrUnionErr(f: *Function, inst: Air.Inst.Index) !CValue { } if (!repr_is_err) { - const a = try Assignment.start(f, writer, try f.ctypeFromType(payload_ty, .complete)); - try f.writeCValueMember(writer, local, .{ .identifier = "payload" }); - try a.assign(f, writer); - try f.object.dg.renderUndefValue(writer, payload_ty, .Other); - try a.end(f, writer); + const a = try Assignment.start(f, w, try f.ctypeFromType(payload_ty, .complete)); + try f.writeCValueMember(w, local, .{ .identifier = "payload" }); + try a.assign(f, w); + try f.object.dg.renderUndefValue(w, payload_ty, .Other); + try a.end(f, w); } { - const a = try Assignment.start(f, writer, try f.ctypeFromType(err_ty, .complete)); + const a = try Assignment.start(f, w, try f.ctypeFromType(err_ty, .complete)); if (repr_is_err) - try f.writeCValue(writer, local, .Other) + try f.writeCValue(w, local, .Other) else - try f.writeCValueMember(writer, local, .{ .identifier = "error" }); - try a.assign(f, writer); - try f.writeCValue(writer, err, .Other); - try a.end(f, writer); + try f.writeCValueMember(w, local, .{ .identifier = "error" }); + try a.assign(f, w); + try f.writeCValue(w, err, .Other); + try a.end(f, w); } return local; } @@ -6349,7 +6451,7 @@ fn airWrapErrUnionErr(f: *Function, inst: Air.Inst.Index) !CValue { fn airErrUnionPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue { const pt = f.object.dg.pt; const zcu = pt.zcu; - const writer = f.object.writer(); + const w = &f.object.code.writer; const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op; const inst_ty = f.typeOfIndex(inst); const operand = try f.resolveInst(ty_op.operand); @@ -6363,31 +6465,31 @@ fn airErrUnionPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue { // First, set the non-error value. if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) { - const a = try Assignment.start(f, writer, try f.ctypeFromType(operand_ty, .complete)); - try f.writeCValueDeref(writer, operand); - try a.assign(f, writer); - try writer.print("{}", .{try f.fmtIntLiteral(no_err)}); - try a.end(f, writer); + const a = try Assignment.start(f, w, try f.ctypeFromType(operand_ty, .complete)); + try f.writeCValueDeref(w, operand); + try a.assign(f, w); + try w.print("{f}", .{try f.fmtIntLiteralDec(no_err)}); + try a.end(f, w); return .none; } { - const a = try Assignment.start(f, writer, try f.ctypeFromType(err_int_ty, .complete)); - try f.writeCValueDerefMember(writer, operand, .{ .identifier = "error" }); - try a.assign(f, writer); - try writer.print("{}", .{try f.fmtIntLiteral(no_err)}); - try a.end(f, writer); + const a = try Assignment.start(f, w, try f.ctypeFromType(err_int_ty, .complete)); + try f.writeCValueDerefMember(w, operand, .{ .identifier = "error" }); + try a.assign(f, w); + try w.print("{f}", .{try f.fmtIntLiteralDec(no_err)}); + try a.end(f, w); } // Then return the payload pointer (only if it is used) if (f.liveness.isUnused(inst)) return .none; const local = try f.allocLocal(inst, inst_ty); - const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete)); - try f.writeCValue(writer, local, .Other); - try a.assign(f, writer); - try writer.writeByte('&'); - try f.writeCValueDerefMember(writer, operand, .{ .identifier = "payload" }); - try a.end(f, writer); + const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete)); + try f.writeCValue(w, local, .Other); + try a.assign(f, w); + try w.writeByte('&'); + try f.writeCValueDerefMember(w, operand, .{ .identifier = "payload" }); + try a.end(f, w); return local; } @@ -6418,24 +6520,24 @@ fn airWrapErrUnionPay(f: *Function, inst: Air.Inst.Index) !CValue { const err_ty = inst_ty.errorUnionSet(zcu); try reap(f, inst, &.{ty_op.operand}); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); if (!repr_is_err) { - const a = try Assignment.start(f, writer, try f.ctypeFromType(payload_ty, .complete)); - try f.writeCValueMember(writer, local, .{ .identifier = "payload" }); - try a.assign(f, writer); - try f.writeCValue(writer, payload, .Other); - try a.end(f, writer); + const a = try Assignment.start(f, w, try f.ctypeFromType(payload_ty, .complete)); + try f.writeCValueMember(w, local, .{ .identifier = "payload" }); + try a.assign(f, w); + try f.writeCValue(w, payload, .Other); + try a.end(f, w); } { - const a = try Assignment.start(f, writer, try f.ctypeFromType(err_ty, .complete)); + const a = try Assignment.start(f, w, try f.ctypeFromType(err_ty, .complete)); if (repr_is_err) - try f.writeCValue(writer, local, .Other) + try f.writeCValue(w, local, .Other) else - try f.writeCValueMember(writer, local, .{ .identifier = "error" }); - try a.assign(f, writer); - try f.object.dg.renderValue(writer, try pt.intValue(try pt.errorIntType(), 0), .Other); - try a.end(f, writer); + try f.writeCValueMember(w, local, .{ .identifier = "error" }); + try a.assign(f, w); + try f.object.dg.renderValue(w, try pt.intValue(try pt.errorIntType(), 0), .Other); + try a.end(f, w); } return local; } @@ -6445,7 +6547,7 @@ fn airIsErr(f: *Function, inst: Air.Inst.Index, is_ptr: bool, operator: []const const zcu = pt.zcu; const un_op = f.air.instructions.items(.data)[@intFromEnum(inst)].un_op; - const writer = f.object.writer(); + const w = &f.object.code.writer; const operand = try f.resolveInst(un_op); try reap(f, inst, &.{un_op}); const operand_ty = f.typeOf(un_op); @@ -6454,25 +6556,25 @@ fn airIsErr(f: *Function, inst: Air.Inst.Index, is_ptr: bool, operator: []const const payload_ty = err_union_ty.errorUnionPayload(zcu); const error_ty = err_union_ty.errorUnionSet(zcu); - const a = try Assignment.start(f, writer, .bool); - try f.writeCValue(writer, local, .Other); - try a.assign(f, writer); + const a = try Assignment.start(f, w, .bool); + try f.writeCValue(w, local, .Other); + try a.assign(f, w); const err_int_ty = try pt.errorIntType(); if (!error_ty.errorSetIsEmpty(zcu)) if (payload_ty.hasRuntimeBits(zcu)) if (is_ptr) - try f.writeCValueDerefMember(writer, operand, .{ .identifier = "error" }) + try f.writeCValueDerefMember(w, operand, .{ .identifier = "error" }) else - try f.writeCValueMember(writer, operand, .{ .identifier = "error" }) + try f.writeCValueMember(w, operand, .{ .identifier = "error" }) else - try f.writeCValue(writer, operand, .Other) + try f.writeCValue(w, operand, .Other) else - try f.object.dg.renderValue(writer, try pt.intValue(err_int_ty, 0), .Other); - try writer.writeByte(' '); - try writer.writeAll(operator); - try writer.writeByte(' '); - try f.object.dg.renderValue(writer, try pt.intValue(err_int_ty, 0), .Other); - try a.end(f, writer); + try f.object.dg.renderValue(w, try pt.intValue(err_int_ty, 0), .Other); + try w.writeByte(' '); + try w.writeAll(operator); + try w.writeByte(' '); + try f.object.dg.renderValue(w, try pt.intValue(err_int_ty, 0), .Other); + try a.end(f, w); return local; } @@ -6486,45 +6588,45 @@ fn airArrayToSlice(f: *Function, inst: Air.Inst.Index) !CValue { try reap(f, inst, &.{ty_op.operand}); const inst_ty = f.typeOfIndex(inst); const ptr_ty = inst_ty.slicePtrFieldType(zcu); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); const operand_ty = f.typeOf(ty_op.operand); const array_ty = operand_ty.childType(zcu); { - const a = try Assignment.start(f, writer, try f.ctypeFromType(ptr_ty, .complete)); - try f.writeCValueMember(writer, local, .{ .identifier = "ptr" }); - try a.assign(f, writer); + const a = try Assignment.start(f, w, try f.ctypeFromType(ptr_ty, .complete)); + try f.writeCValueMember(w, local, .{ .identifier = "ptr" }); + try a.assign(f, w); if (operand == .undef) { - try f.writeCValue(writer, .{ .undef = inst_ty.slicePtrFieldType(zcu) }, .Other); + try f.writeCValue(w, .{ .undef = inst_ty.slicePtrFieldType(zcu) }, .Other); } else { const ptr_ctype = try f.ctypeFromType(ptr_ty, .complete); const ptr_child_ctype = ptr_ctype.info(ctype_pool).pointer.elem_ctype; const elem_ty = array_ty.childType(zcu); const elem_ctype = try f.ctypeFromType(elem_ty, .complete); if (!ptr_child_ctype.eql(elem_ctype)) { - try writer.writeByte('('); - try f.renderCType(writer, ptr_ctype); - try writer.writeByte(')'); + try w.writeByte('('); + try f.renderCType(w, ptr_ctype); + try w.writeByte(')'); } const operand_ctype = try f.ctypeFromType(operand_ty, .complete); const operand_child_ctype = operand_ctype.info(ctype_pool).pointer.elem_ctype; if (operand_child_ctype.info(ctype_pool) == .array) { - try writer.writeByte('&'); - try f.writeCValueDeref(writer, operand); - try writer.print("[{}]", .{try f.fmtIntLiteral(.zero_usize)}); - } else try f.writeCValue(writer, operand, .Other); + try w.writeByte('&'); + try f.writeCValueDeref(w, operand); + try w.print("[{f}]", .{try f.fmtIntLiteralDec(.zero_usize)}); + } else try f.writeCValue(w, operand, .Other); } - try a.end(f, writer); + try a.end(f, w); } { - const a = try Assignment.start(f, writer, .usize); - try f.writeCValueMember(writer, local, .{ .identifier = "len" }); - try a.assign(f, writer); - try writer.print("{}", .{ - try f.fmtIntLiteral(try pt.intValue(.usize, array_ty.arrayLen(zcu))), + const a = try Assignment.start(f, w, .usize); + try f.writeCValueMember(w, local, .{ .identifier = "len" }); + try a.assign(f, w); + try w.print("{f}", .{ + try f.fmtIntLiteralDec(try pt.intValue(.usize, array_ty.arrayLen(zcu))), }); - try a.end(f, writer); + try a.end(f, w); } return local; @@ -6551,32 +6653,32 @@ fn airFloatCast(f: *Function, inst: Air.Inst.Index) !CValue { else unreachable; - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - const v = try Vectorize.start(f, inst, writer, operand_ty); - const a = try Assignment.start(f, writer, try f.ctypeFromType(scalar_ty, .complete)); - try f.writeCValue(writer, local, .Other); - try v.elem(f, writer); - try a.assign(f, writer); + const v = try Vectorize.start(f, inst, w, operand_ty); + const a = try Assignment.start(f, w, try f.ctypeFromType(scalar_ty, .complete)); + try f.writeCValue(w, local, .Other); + try v.elem(f, w); + try a.assign(f, w); if (inst_scalar_ty.isInt(zcu) and scalar_ty.isRuntimeFloat()) { - try writer.writeAll("zig_wrap_"); - try f.object.dg.renderTypeForBuiltinFnName(writer, inst_scalar_ty); - try writer.writeByte('('); + try w.writeAll("zig_wrap_"); + try f.object.dg.renderTypeForBuiltinFnName(w, inst_scalar_ty); + try w.writeByte('('); } - try writer.writeAll("zig_"); - try writer.writeAll(operation); - try writer.writeAll(compilerRtAbbrev(scalar_ty, zcu, target)); - try writer.writeAll(compilerRtAbbrev(inst_scalar_ty, zcu, target)); - try writer.writeByte('('); - try f.writeCValue(writer, operand, .FunctionArgument); - try v.elem(f, writer); - try writer.writeByte(')'); + try w.writeAll("zig_"); + try w.writeAll(operation); + try w.writeAll(compilerRtAbbrev(scalar_ty, zcu, target)); + try w.writeAll(compilerRtAbbrev(inst_scalar_ty, zcu, target)); + try w.writeByte('('); + try f.writeCValue(w, operand, .FunctionArgument); + try v.elem(f, w); + try w.writeByte(')'); if (inst_scalar_ty.isInt(zcu) and scalar_ty.isRuntimeFloat()) { - try f.object.dg.renderBuiltinInfo(writer, inst_scalar_ty, .bits); - try writer.writeByte(')'); + try f.object.dg.renderBuiltinInfo(w, inst_scalar_ty, .bits); + try w.writeByte(')'); } - try a.end(f, writer); - try v.end(f, inst, writer); + try a.end(f, w); + try v.end(f, inst, w); return local; } @@ -6601,27 +6703,28 @@ fn airUnBuiltinCall( const inst_scalar_ctype = try f.ctypeFromType(inst_scalar_ty, .complete); const ref_ret = inst_scalar_ctype.info(&f.object.dg.ctype_pool) == .array; - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - const v = try Vectorize.start(f, inst, writer, operand_ty); + const v = try Vectorize.start(f, inst, w, operand_ty); if (!ref_ret) { - try f.writeCValue(writer, local, .Other); - try v.elem(f, writer); - try writer.writeAll(" = "); + try f.writeCValue(w, local, .Other); + try v.elem(f, w); + try w.writeAll(" = "); } - try writer.print("zig_{s}_", .{operation}); - try f.object.dg.renderTypeForBuiltinFnName(writer, scalar_ty); - try writer.writeByte('('); + try w.print("zig_{s}_", .{operation}); + try f.object.dg.renderTypeForBuiltinFnName(w, scalar_ty); + try w.writeByte('('); if (ref_ret) { - try f.writeCValue(writer, local, .FunctionArgument); - try v.elem(f, writer); - try writer.writeAll(", "); + try f.writeCValue(w, local, .FunctionArgument); + try v.elem(f, w); + try w.writeAll(", "); } - try f.writeCValue(writer, operand, .FunctionArgument); - try v.elem(f, writer); - try f.object.dg.renderBuiltinInfo(writer, scalar_ty, info); - try writer.writeAll(");\n"); - try v.end(f, inst, writer); + try f.writeCValue(w, operand, .FunctionArgument); + try v.elem(f, w); + try f.object.dg.renderBuiltinInfo(w, scalar_ty, info); + try w.writeAll(");"); + try f.object.newline(); + try v.end(f, inst, w); return local; } @@ -6651,31 +6754,31 @@ fn airBinBuiltinCall( const inst_scalar_ctype = try f.ctypeFromType(inst_scalar_ty, .complete); const ref_ret = inst_scalar_ctype.info(&f.object.dg.ctype_pool) == .array; - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); if (is_big) try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); - const v = try Vectorize.start(f, inst, writer, operand_ty); + const v = try Vectorize.start(f, inst, w, operand_ty); if (!ref_ret) { - try f.writeCValue(writer, local, .Other); - try v.elem(f, writer); - try writer.writeAll(" = "); + try f.writeCValue(w, local, .Other); + try v.elem(f, w); + try w.writeAll(" = "); } - try writer.print("zig_{s}_", .{operation}); - try f.object.dg.renderTypeForBuiltinFnName(writer, scalar_ty); - try writer.writeByte('('); + try w.print("zig_{s}_", .{operation}); + try f.object.dg.renderTypeForBuiltinFnName(w, scalar_ty); + try w.writeByte('('); if (ref_ret) { - try f.writeCValue(writer, local, .FunctionArgument); - try v.elem(f, writer); - try writer.writeAll(", "); + try f.writeCValue(w, local, .FunctionArgument); + try v.elem(f, w); + try w.writeAll(", "); } - try f.writeCValue(writer, lhs, .FunctionArgument); - try v.elem(f, writer); - try writer.writeAll(", "); - try f.writeCValue(writer, rhs, .FunctionArgument); - if (f.typeOf(bin_op.rhs).isVector(zcu)) try v.elem(f, writer); - try f.object.dg.renderBuiltinInfo(writer, scalar_ty, info); - try writer.writeAll(");\n"); - try v.end(f, inst, writer); + try f.writeCValue(w, lhs, .FunctionArgument); + try v.elem(f, w); + try w.writeAll(", "); + try f.writeCValue(w, rhs, .FunctionArgument); + if (f.typeOf(bin_op.rhs).isVector(zcu)) try v.elem(f, w); + try f.object.dg.renderBuiltinInfo(w, scalar_ty, info); + try w.writeAll(");\n"); + try v.end(f, inst, w); return local; } @@ -6702,38 +6805,39 @@ fn airCmpBuiltinCall( const inst_scalar_ctype = try f.ctypeFromType(inst_scalar_ty, .complete); const ref_ret = inst_scalar_ctype.info(&f.object.dg.ctype_pool) == .array; - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - const v = try Vectorize.start(f, inst, writer, operand_ty); + const v = try Vectorize.start(f, inst, w, operand_ty); if (!ref_ret) { - try f.writeCValue(writer, local, .Other); - try v.elem(f, writer); - try writer.writeAll(" = "); + try f.writeCValue(w, local, .Other); + try v.elem(f, w); + try w.writeAll(" = "); } - try writer.print("zig_{s}_", .{switch (operation) { + try w.print("zig_{s}_", .{switch (operation) { else => @tagName(operation), .operator => compareOperatorAbbrev(operator), }}); - try f.object.dg.renderTypeForBuiltinFnName(writer, scalar_ty); - try writer.writeByte('('); + try f.object.dg.renderTypeForBuiltinFnName(w, scalar_ty); + try w.writeByte('('); if (ref_ret) { - try f.writeCValue(writer, local, .FunctionArgument); - try v.elem(f, writer); - try writer.writeAll(", "); + try f.writeCValue(w, local, .FunctionArgument); + try v.elem(f, w); + try w.writeAll(", "); } - try f.writeCValue(writer, lhs, .FunctionArgument); - try v.elem(f, writer); - try writer.writeAll(", "); - try f.writeCValue(writer, rhs, .FunctionArgument); - try v.elem(f, writer); - try f.object.dg.renderBuiltinInfo(writer, scalar_ty, info); - try writer.writeByte(')'); - if (!ref_ret) try writer.print("{s}{}", .{ + try f.writeCValue(w, lhs, .FunctionArgument); + try v.elem(f, w); + try w.writeAll(", "); + try f.writeCValue(w, rhs, .FunctionArgument); + try v.elem(f, w); + try f.object.dg.renderBuiltinInfo(w, scalar_ty, info); + try w.writeByte(')'); + if (!ref_ret) try w.print("{s}{f}", .{ compareOperatorC(operator), - try f.fmtIntLiteral(try pt.intValue(.i32, 0)), + try f.fmtIntLiteralDec(try pt.intValue(.i32, 0)), }); - try writer.writeAll(";\n"); - try v.end(f, inst, writer); + try w.writeByte(';'); + try f.object.newline(); + try v.end(f, inst, w); return local; } @@ -6751,7 +6855,7 @@ fn airCmpxchg(f: *Function, inst: Air.Inst.Index, flavor: [*:0]const u8) !CValue const ty = ptr_ty.childType(zcu); const ctype = try f.ctypeFromType(ty, .complete); - const writer = f.object.writer(); + const w = &f.object.code.writer; const new_value_mat = try Materialize.start(f, inst, ty, new_value); try reap(f, inst, &.{ extra.ptr, extra.expected_value, extra.new_value }); @@ -6763,76 +6867,78 @@ fn airCmpxchg(f: *Function, inst: Air.Inst.Index, flavor: [*:0]const u8) !CValue const local = try f.allocLocal(inst, inst_ty); if (inst_ty.isPtrLikeOptional(zcu)) { { - const a = try Assignment.start(f, writer, ctype); - try f.writeCValue(writer, local, .Other); - try a.assign(f, writer); - try f.writeCValue(writer, expected_value, .Other); - try a.end(f, writer); + const a = try Assignment.start(f, w, ctype); + try f.writeCValue(w, local, .Other); + try a.assign(f, w); + try f.writeCValue(w, expected_value, .Other); + try a.end(f, w); } - try writer.writeAll("if ("); - try writer.print("zig_cmpxchg_{s}((zig_atomic(", .{flavor}); - try f.renderType(writer, ty); - try writer.writeByte(')'); - if (ptr_ty.isVolatilePtr(zcu)) try writer.writeAll(" volatile"); - try writer.writeAll(" *)"); - try f.writeCValue(writer, ptr, .Other); - try writer.writeAll(", "); - try f.writeCValue(writer, local, .FunctionArgument); - try writer.writeAll(", "); - try new_value_mat.mat(f, writer); - try writer.writeAll(", "); - try writeMemoryOrder(writer, extra.successOrder()); - try writer.writeAll(", "); - try writeMemoryOrder(writer, extra.failureOrder()); - try writer.writeAll(", "); - try f.object.dg.renderTypeForBuiltinFnName(writer, ty); - try writer.writeAll(", "); - try f.renderType(writer, repr_ty); - try writer.writeByte(')'); - try writer.writeAll(") {\n"); - f.object.indent_writer.pushIndent(); + try w.writeAll("if ("); + try w.print("zig_cmpxchg_{s}((zig_atomic(", .{flavor}); + try f.renderType(w, ty); + try w.writeByte(')'); + if (ptr_ty.isVolatilePtr(zcu)) try w.writeAll(" volatile"); + try w.writeAll(" *)"); + try f.writeCValue(w, ptr, .Other); + try w.writeAll(", "); + try f.writeCValue(w, local, .FunctionArgument); + try w.writeAll(", "); + try new_value_mat.mat(f, w); + try w.writeAll(", "); + try writeMemoryOrder(w, extra.successOrder()); + try w.writeAll(", "); + try writeMemoryOrder(w, extra.failureOrder()); + try w.writeAll(", "); + try f.object.dg.renderTypeForBuiltinFnName(w, ty); + try w.writeAll(", "); + try f.renderType(w, repr_ty); + try w.writeByte(')'); + try w.writeAll(") {"); + f.object.indent(); + try f.object.newline(); { - const a = try Assignment.start(f, writer, ctype); - try f.writeCValue(writer, local, .Other); - try a.assign(f, writer); - try writer.writeAll("NULL"); - try a.end(f, writer); + const a = try Assignment.start(f, w, ctype); + try f.writeCValue(w, local, .Other); + try a.assign(f, w); + try w.writeAll("NULL"); + try a.end(f, w); } - f.object.indent_writer.popIndent(); - try writer.writeAll("}\n"); + try f.object.outdent(); + try w.writeByte('}'); + try f.object.newline(); } else { { - const a = try Assignment.start(f, writer, ctype); - try f.writeCValueMember(writer, local, .{ .identifier = "payload" }); - try a.assign(f, writer); - try f.writeCValue(writer, expected_value, .Other); - try a.end(f, writer); + const a = try Assignment.start(f, w, ctype); + try f.writeCValueMember(w, local, .{ .identifier = "payload" }); + try a.assign(f, w); + try f.writeCValue(w, expected_value, .Other); + try a.end(f, w); } { - const a = try Assignment.start(f, writer, .bool); - try f.writeCValueMember(writer, local, .{ .identifier = "is_null" }); - try a.assign(f, writer); - try writer.print("zig_cmpxchg_{s}((zig_atomic(", .{flavor}); - try f.renderType(writer, ty); - try writer.writeByte(')'); - if (ptr_ty.isVolatilePtr(zcu)) try writer.writeAll(" volatile"); - try writer.writeAll(" *)"); - try f.writeCValue(writer, ptr, .Other); - try writer.writeAll(", "); - try f.writeCValueMember(writer, local, .{ .identifier = "payload" }); - try writer.writeAll(", "); - try new_value_mat.mat(f, writer); - try writer.writeAll(", "); - try writeMemoryOrder(writer, extra.successOrder()); - try writer.writeAll(", "); - try writeMemoryOrder(writer, extra.failureOrder()); - try writer.writeAll(", "); - try f.object.dg.renderTypeForBuiltinFnName(writer, ty); - try writer.writeAll(", "); - try f.renderType(writer, repr_ty); - try writer.writeByte(')'); - try a.end(f, writer); + const a = try Assignment.start(f, w, .bool); + try f.writeCValueMember(w, local, .{ .identifier = "is_null" }); + try a.assign(f, w); + try w.print("zig_cmpxchg_{s}((zig_atomic(", .{flavor}); + try f.renderType(w, ty); + try w.writeByte(')'); + if (ptr_ty.isVolatilePtr(zcu)) try w.writeAll(" volatile"); + try w.writeAll(" *)"); + try f.writeCValue(w, ptr, .Other); + try w.writeAll(", "); + try f.writeCValueMember(w, local, .{ .identifier = "payload" }); + try w.writeAll(", "); + try new_value_mat.mat(f, w); + try w.writeAll(", "); + try writeMemoryOrder(w, extra.successOrder()); + try w.writeAll(", "); + try writeMemoryOrder(w, extra.failureOrder()); + try w.writeAll(", "); + try f.object.dg.renderTypeForBuiltinFnName(w, ty); + try w.writeAll(", "); + try f.renderType(w, repr_ty); + try w.writeByte(')'); + try a.end(f, w); } } try new_value_mat.end(f, inst); @@ -6856,7 +6962,7 @@ fn airAtomicRmw(f: *Function, inst: Air.Inst.Index) !CValue { const ptr = try f.resolveInst(pl_op.operand); const operand = try f.resolveInst(extra.operand); - const writer = f.object.writer(); + const w = &f.object.code.writer; const operand_mat = try Materialize.start(f, inst, ty, operand); try reap(f, inst, &.{ pl_op.operand, extra.operand }); @@ -6866,31 +6972,32 @@ fn airAtomicRmw(f: *Function, inst: Air.Inst.Index) !CValue { const repr_ty = if (is_float) pt.intType(.unsigned, repr_bits) catch unreachable else ty; const local = try f.allocLocal(inst, inst_ty); - try writer.print("zig_atomicrmw_{s}", .{toAtomicRmwSuffix(extra.op())}); - if (is_float) try writer.writeAll("_float") else if (is_128) try writer.writeAll("_int128"); - try writer.writeByte('('); - try f.writeCValue(writer, local, .Other); - try writer.writeAll(", ("); + try w.print("zig_atomicrmw_{s}", .{toAtomicRmwSuffix(extra.op())}); + if (is_float) try w.writeAll("_float") else if (is_128) try w.writeAll("_int128"); + try w.writeByte('('); + try f.writeCValue(w, local, .Other); + try w.writeAll(", ("); const use_atomic = switch (extra.op()) { else => true, // These are missing from stdatomic.h, so no atomic types unless a fallback is used. .Nand, .Min, .Max => is_float or is_128, }; - if (use_atomic) try writer.writeAll("zig_atomic("); - try f.renderType(writer, ty); - if (use_atomic) try writer.writeByte(')'); - if (ptr_ty.isVolatilePtr(zcu)) try writer.writeAll(" volatile"); - try writer.writeAll(" *)"); - try f.writeCValue(writer, ptr, .Other); - try writer.writeAll(", "); - try operand_mat.mat(f, writer); - try writer.writeAll(", "); - try writeMemoryOrder(writer, extra.ordering()); - try writer.writeAll(", "); - try f.object.dg.renderTypeForBuiltinFnName(writer, ty); - try writer.writeAll(", "); - try f.renderType(writer, repr_ty); - try writer.writeAll(");\n"); + if (use_atomic) try w.writeAll("zig_atomic("); + try f.renderType(w, ty); + if (use_atomic) try w.writeByte(')'); + if (ptr_ty.isVolatilePtr(zcu)) try w.writeAll(" volatile"); + try w.writeAll(" *)"); + try f.writeCValue(w, ptr, .Other); + try w.writeAll(", "); + try operand_mat.mat(f, w); + try w.writeAll(", "); + try writeMemoryOrder(w, extra.ordering()); + try w.writeAll(", "); + try f.object.dg.renderTypeForBuiltinFnName(w, ty); + try w.writeAll(", "); + try f.renderType(w, repr_ty); + try w.writeAll(");"); + try f.object.newline(); try operand_mat.end(f, inst); if (f.liveness.isUnused(inst)) { @@ -6916,24 +7023,25 @@ fn airAtomicLoad(f: *Function, inst: Air.Inst.Index) !CValue { ty; const inst_ty = f.typeOfIndex(inst); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - try writer.writeAll("zig_atomic_load("); - try f.writeCValue(writer, local, .Other); - try writer.writeAll(", (zig_atomic("); - try f.renderType(writer, ty); - try writer.writeByte(')'); - if (ptr_ty.isVolatilePtr(zcu)) try writer.writeAll(" volatile"); - try writer.writeAll(" *)"); - try f.writeCValue(writer, ptr, .Other); - try writer.writeAll(", "); - try writeMemoryOrder(writer, atomic_load.order); - try writer.writeAll(", "); - try f.object.dg.renderTypeForBuiltinFnName(writer, ty); - try writer.writeAll(", "); - try f.renderType(writer, repr_ty); - try writer.writeAll(");\n"); + try w.writeAll("zig_atomic_load("); + try f.writeCValue(w, local, .Other); + try w.writeAll(", (zig_atomic("); + try f.renderType(w, ty); + try w.writeByte(')'); + if (ptr_ty.isVolatilePtr(zcu)) try w.writeAll(" volatile"); + try w.writeAll(" *)"); + try f.writeCValue(w, ptr, .Other); + try w.writeAll(", "); + try writeMemoryOrder(w, atomic_load.order); + try w.writeAll(", "); + try f.object.dg.renderTypeForBuiltinFnName(w, ty); + try w.writeAll(", "); + try f.renderType(w, repr_ty); + try w.writeAll(");"); + try f.object.newline(); return local; } @@ -6947,7 +7055,7 @@ fn airAtomicStore(f: *Function, inst: Air.Inst.Index, order: [*:0]const u8) !CVa const ptr = try f.resolveInst(bin_op.lhs); const element = try f.resolveInst(bin_op.rhs); - const writer = f.object.writer(); + const w = &f.object.code.writer; const element_mat = try Materialize.start(f, inst, ty, element); try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); @@ -6956,31 +7064,32 @@ fn airAtomicStore(f: *Function, inst: Air.Inst.Index, order: [*:0]const u8) !CVa else ty; - try writer.writeAll("zig_atomic_store((zig_atomic("); - try f.renderType(writer, ty); - try writer.writeByte(')'); - if (ptr_ty.isVolatilePtr(zcu)) try writer.writeAll(" volatile"); - try writer.writeAll(" *)"); - try f.writeCValue(writer, ptr, .Other); - try writer.writeAll(", "); - try element_mat.mat(f, writer); - try writer.print(", {s}, ", .{order}); - try f.object.dg.renderTypeForBuiltinFnName(writer, ty); - try writer.writeAll(", "); - try f.renderType(writer, repr_ty); - try writer.writeAll(");\n"); + try w.writeAll("zig_atomic_store((zig_atomic("); + try f.renderType(w, ty); + try w.writeByte(')'); + if (ptr_ty.isVolatilePtr(zcu)) try w.writeAll(" volatile"); + try w.writeAll(" *)"); + try f.writeCValue(w, ptr, .Other); + try w.writeAll(", "); + try element_mat.mat(f, w); + try w.print(", {s}, ", .{order}); + try f.object.dg.renderTypeForBuiltinFnName(w, ty); + try w.writeAll(", "); + try f.renderType(w, repr_ty); + try w.writeAll(");"); + try f.object.newline(); try element_mat.end(f, inst); return .none; } -fn writeSliceOrPtr(f: *Function, writer: anytype, ptr: CValue, ptr_ty: Type) !void { +fn writeSliceOrPtr(f: *Function, w: *Writer, ptr: CValue, ptr_ty: Type) !void { const pt = f.object.dg.pt; const zcu = pt.zcu; if (ptr_ty.isSlice(zcu)) { - try f.writeCValueMember(writer, ptr, .{ .identifier = "ptr" }); + try f.writeCValueMember(w, ptr, .{ .identifier = "ptr" }); } else { - try f.writeCValue(writer, ptr, .FunctionArgument); + try f.writeCValue(w, ptr, .FunctionArgument); } } @@ -6994,7 +7103,7 @@ fn airMemset(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue { const elem_ty = f.typeOf(bin_op.rhs); const elem_abi_size = elem_ty.abiSize(zcu); const val_is_undef = if (try f.air.value(bin_op.rhs, pt)) |val| val.isUndefDeep(zcu) else false; - const writer = f.object.writer(); + const w = &f.object.code.writer; if (val_is_undef) { if (!safety) { @@ -7002,24 +7111,25 @@ fn airMemset(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue { return .none; } - try writer.writeAll("memset("); + try w.writeAll("memset("); switch (dest_ty.ptrSize(zcu)) { .slice => { - try f.writeCValueMember(writer, dest_slice, .{ .identifier = "ptr" }); - try writer.writeAll(", 0xaa, "); - try f.writeCValueMember(writer, dest_slice, .{ .identifier = "len" }); + try f.writeCValueMember(w, dest_slice, .{ .identifier = "ptr" }); + try w.writeAll(", 0xaa, "); + try f.writeCValueMember(w, dest_slice, .{ .identifier = "len" }); if (elem_abi_size > 1) { - try writer.print(" * {d});\n", .{elem_abi_size}); - } else { - try writer.writeAll(");\n"); + try w.print(" * {d}", .{elem_abi_size}); } + try w.writeAll(");"); + try f.object.newline(); }, .one => { const array_ty = dest_ty.childType(zcu); const len = array_ty.arrayLen(zcu) * elem_abi_size; - try f.writeCValue(writer, dest_slice, .FunctionArgument); - try writer.print(", 0xaa, {d});\n", .{len}); + try f.writeCValue(w, dest_slice, .FunctionArgument); + try w.print(", 0xaa, {d});", .{len}); + try f.object.newline(); }, .many, .c => unreachable, } @@ -7040,38 +7150,38 @@ fn airMemset(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue { const index = try f.allocLocal(inst, .usize); - try writer.writeAll("for ("); - try f.writeCValue(writer, index, .Other); - try writer.writeAll(" = "); - try f.object.dg.renderValue(writer, .zero_usize, .Other); - try writer.writeAll("; "); - try f.writeCValue(writer, index, .Other); - try writer.writeAll(" != "); + try w.writeAll("for ("); + try f.writeCValue(w, index, .Other); + try w.writeAll(" = "); + try f.object.dg.renderValue(w, .zero_usize, .Other); + try w.writeAll("; "); + try f.writeCValue(w, index, .Other); + try w.writeAll(" != "); switch (dest_ty.ptrSize(zcu)) { .slice => { - try f.writeCValueMember(writer, dest_slice, .{ .identifier = "len" }); + try f.writeCValueMember(w, dest_slice, .{ .identifier = "len" }); }, .one => { const array_ty = dest_ty.childType(zcu); - try writer.print("{d}", .{array_ty.arrayLen(zcu)}); + try w.print("{d}", .{array_ty.arrayLen(zcu)}); }, .many, .c => unreachable, } - try writer.writeAll("; ++"); - try f.writeCValue(writer, index, .Other); - try writer.writeAll(") "); + try w.writeAll("; ++"); + try f.writeCValue(w, index, .Other); + try w.writeAll(") "); - const a = try Assignment.start(f, writer, try f.ctypeFromType(elem_ty, .complete)); - try writer.writeAll("(("); - try f.renderType(writer, elem_ptr_ty); - try writer.writeByte(')'); - try writeSliceOrPtr(f, writer, dest_slice, dest_ty); - try writer.writeAll(")["); - try f.writeCValue(writer, index, .Other); - try writer.writeByte(']'); - try a.assign(f, writer); - try f.writeCValue(writer, value, .Other); - try a.end(f, writer); + const a = try Assignment.start(f, w, try f.ctypeFromType(elem_ty, .complete)); + try w.writeAll("(("); + try f.renderType(w, elem_ptr_ty); + try w.writeByte(')'); + try writeSliceOrPtr(f, w, dest_slice, dest_ty); + try w.writeAll(")["); + try f.writeCValue(w, index, .Other); + try w.writeByte(']'); + try a.assign(f, w); + try f.writeCValue(w, value, .Other); + try a.end(f, w); try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); try freeLocal(f, inst, index.new_local, null); @@ -7081,24 +7191,26 @@ fn airMemset(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue { const bitcasted = try bitcast(f, .u8, value, elem_ty); - try writer.writeAll("memset("); + try w.writeAll("memset("); switch (dest_ty.ptrSize(zcu)) { .slice => { - try f.writeCValueMember(writer, dest_slice, .{ .identifier = "ptr" }); - try writer.writeAll(", "); - try f.writeCValue(writer, bitcasted, .FunctionArgument); - try writer.writeAll(", "); - try f.writeCValueMember(writer, dest_slice, .{ .identifier = "len" }); - try writer.writeAll(");\n"); + try f.writeCValueMember(w, dest_slice, .{ .identifier = "ptr" }); + try w.writeAll(", "); + try f.writeCValue(w, bitcasted, .FunctionArgument); + try w.writeAll(", "); + try f.writeCValueMember(w, dest_slice, .{ .identifier = "len" }); + try w.writeAll(");"); + try f.object.newline(); }, .one => { const array_ty = dest_ty.childType(zcu); const len = array_ty.arrayLen(zcu) * elem_abi_size; - try f.writeCValue(writer, dest_slice, .FunctionArgument); - try writer.writeAll(", "); - try f.writeCValue(writer, bitcasted, .FunctionArgument); - try writer.print(", {d});\n", .{len}); + try f.writeCValue(w, dest_slice, .FunctionArgument); + try w.writeAll(", "); + try f.writeCValue(w, bitcasted, .FunctionArgument); + try w.print(", {d});", .{len}); + try f.object.newline(); }, .many, .c => unreachable, } @@ -7115,36 +7227,38 @@ fn airMemcpy(f: *Function, inst: Air.Inst.Index, function_paren: []const u8) !CV const src_ptr = try f.resolveInst(bin_op.rhs); const dest_ty = f.typeOf(bin_op.lhs); const src_ty = f.typeOf(bin_op.rhs); - const writer = f.object.writer(); + const w = &f.object.code.writer; if (dest_ty.ptrSize(zcu) != .one) { - try writer.writeAll("if ("); - try writeArrayLen(f, writer, dest_ptr, dest_ty); - try writer.writeAll(" != 0) "); + try w.writeAll("if ("); + try writeArrayLen(f, dest_ptr, dest_ty); + try w.writeAll(" != 0) "); } - try writer.writeAll(function_paren); - try writeSliceOrPtr(f, writer, dest_ptr, dest_ty); - try writer.writeAll(", "); - try writeSliceOrPtr(f, writer, src_ptr, src_ty); - try writer.writeAll(", "); - try writeArrayLen(f, writer, dest_ptr, dest_ty); - try writer.writeAll(" * sizeof("); - try f.renderType(writer, dest_ty.elemType2(zcu)); - try writer.writeAll("));\n"); + try w.writeAll(function_paren); + try writeSliceOrPtr(f, w, dest_ptr, dest_ty); + try w.writeAll(", "); + try writeSliceOrPtr(f, w, src_ptr, src_ty); + try w.writeAll(", "); + try writeArrayLen(f, dest_ptr, dest_ty); + try w.writeAll(" * sizeof("); + try f.renderType(w, dest_ty.elemType2(zcu)); + try w.writeAll("));"); + try f.object.newline(); try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); return .none; } -fn writeArrayLen(f: *Function, writer: ArrayListWriter, dest_ptr: CValue, dest_ty: Type) !void { +fn writeArrayLen(f: *Function, dest_ptr: CValue, dest_ty: Type) !void { const pt = f.object.dg.pt; const zcu = pt.zcu; + const w = &f.object.code.writer; switch (dest_ty.ptrSize(zcu)) { - .one => try writer.print("{}", .{ - try f.fmtIntLiteral(try pt.intValue(.usize, dest_ty.childType(zcu).arrayLen(zcu))), + .one => try w.print("{f}", .{ + try f.fmtIntLiteralDec(try pt.intValue(.usize, dest_ty.childType(zcu).arrayLen(zcu))), }), .many, .c => unreachable, - .slice => try f.writeCValueMember(writer, dest_ptr, .{ .identifier = "len" }), + .slice => try f.writeCValueMember(w, dest_ptr, .{ .identifier = "len" }), } } @@ -7161,12 +7275,12 @@ fn airSetUnionTag(f: *Function, inst: Air.Inst.Index) !CValue { if (layout.tag_size == 0) return .none; const tag_ty = union_ty.unionTagTypeSafety(zcu).?; - const writer = f.object.writer(); - const a = try Assignment.start(f, writer, try f.ctypeFromType(tag_ty, .complete)); - try f.writeCValueDerefMember(writer, union_ptr, .{ .identifier = "tag" }); - try a.assign(f, writer); - try f.writeCValue(writer, new_tag, .Other); - try a.end(f, writer); + const w = &f.object.code.writer; + const a = try Assignment.start(f, w, try f.ctypeFromType(tag_ty, .complete)); + try f.writeCValueDerefMember(w, union_ptr, .{ .identifier = "tag" }); + try a.assign(f, w); + try f.writeCValue(w, new_tag, .Other); + try a.end(f, w); return .none; } @@ -7183,13 +7297,13 @@ fn airGetUnionTag(f: *Function, inst: Air.Inst.Index) !CValue { if (layout.tag_size == 0) return .none; const inst_ty = f.typeOfIndex(inst); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_ty, .complete)); - try f.writeCValue(writer, local, .Other); - try a.assign(f, writer); - try f.writeCValueMember(writer, operand, .{ .identifier = "tag" }); - try a.end(f, writer); + const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete)); + try f.writeCValue(w, local, .Other); + try a.assign(f, w); + try f.writeCValueMember(w, operand, .{ .identifier = "tag" }); + try a.end(f, w); return local; } @@ -7201,14 +7315,15 @@ fn airTagName(f: *Function, inst: Air.Inst.Index) !CValue { const operand = try f.resolveInst(un_op); try reap(f, inst, &.{un_op}); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - try f.writeCValue(writer, local, .Other); - try writer.print(" = {s}(", .{ + try f.writeCValue(w, local, .Other); + try w.print(" = {s}(", .{ try f.getLazyFnName(.{ .tag_name = enum_ty.toIntern() }), }); - try f.writeCValue(writer, operand, .Other); - try writer.writeAll(");\n"); + try f.writeCValue(w, operand, .Other); + try w.writeAll(");"); + try f.object.newline(); return local; } @@ -7216,16 +7331,17 @@ fn airTagName(f: *Function, inst: Air.Inst.Index) !CValue { fn airErrorName(f: *Function, inst: Air.Inst.Index) !CValue { const un_op = f.air.instructions.items(.data)[@intFromEnum(inst)].un_op; - const writer = f.object.writer(); + const w = &f.object.code.writer; const inst_ty = f.typeOfIndex(inst); const operand = try f.resolveInst(un_op); try reap(f, inst, &.{un_op}); const local = try f.allocLocal(inst, inst_ty); - try f.writeCValue(writer, local, .Other); + try f.writeCValue(w, local, .Other); - try writer.writeAll(" = zig_errorName["); - try f.writeCValue(writer, operand, .Other); - try writer.writeAll(" - 1];\n"); + try w.writeAll(" = zig_errorName["); + try f.writeCValue(w, operand, .Other); + try w.writeAll(" - 1];"); + try f.object.newline(); return local; } @@ -7240,16 +7356,16 @@ fn airSplat(f: *Function, inst: Air.Inst.Index) !CValue { const inst_ty = f.typeOfIndex(inst); const inst_scalar_ty = inst_ty.scalarType(zcu); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - const v = try Vectorize.start(f, inst, writer, inst_ty); - const a = try Assignment.start(f, writer, try f.ctypeFromType(inst_scalar_ty, .complete)); - try f.writeCValue(writer, local, .Other); - try v.elem(f, writer); - try a.assign(f, writer); - try f.writeCValue(writer, operand, .Other); - try a.end(f, writer); - try v.end(f, inst, writer); + const v = try Vectorize.start(f, inst, w, inst_ty); + const a = try Assignment.start(f, w, try f.ctypeFromType(inst_scalar_ty, .complete)); + try f.writeCValue(w, local, .Other); + try v.elem(f, w); + try a.assign(f, w); + try f.writeCValue(w, operand, .Other); + try a.end(f, w); + try v.end(f, inst, w); return local; } @@ -7265,22 +7381,23 @@ fn airSelect(f: *Function, inst: Air.Inst.Index) !CValue { const inst_ty = f.typeOfIndex(inst); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - const v = try Vectorize.start(f, inst, writer, inst_ty); - try f.writeCValue(writer, local, .Other); - try v.elem(f, writer); - try writer.writeAll(" = "); - try f.writeCValue(writer, pred, .Other); - try v.elem(f, writer); - try writer.writeAll(" ? "); - try f.writeCValue(writer, lhs, .Other); - try v.elem(f, writer); - try writer.writeAll(" : "); - try f.writeCValue(writer, rhs, .Other); - try v.elem(f, writer); - try writer.writeAll(";\n"); - try v.end(f, inst, writer); + const v = try Vectorize.start(f, inst, w, inst_ty); + try f.writeCValue(w, local, .Other); + try v.elem(f, w); + try w.writeAll(" = "); + try f.writeCValue(w, pred, .Other); + try v.elem(f, w); + try w.writeAll(" ? "); + try f.writeCValue(w, lhs, .Other); + try v.elem(f, w); + try w.writeAll(" : "); + try f.writeCValue(w, rhs, .Other); + try v.elem(f, w); + try w.writeByte(';'); + try f.object.newline(); + try v.end(f, inst, w); return local; } @@ -7294,24 +7411,24 @@ fn airShuffleOne(f: *Function, inst: Air.Inst.Index) !CValue { const operand = try f.resolveInst(unwrapped.operand); const inst_ty = unwrapped.result_ty; - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); try reap(f, inst, &.{unwrapped.operand}); // local cannot alias operand for (mask, 0..) |mask_elem, out_idx| { - try f.writeCValue(writer, local, .Other); - try writer.writeByte('['); - try f.object.dg.renderValue(writer, try pt.intValue(.usize, out_idx), .Other); - try writer.writeAll("] = "); + try f.writeCValue(w, local, .Other); + try w.writeByte('['); + try f.object.dg.renderValue(w, try pt.intValue(.usize, out_idx), .Other); + try w.writeAll("] = "); switch (mask_elem.unwrap()) { .elem => |src_idx| { - try f.writeCValue(writer, operand, .Other); - try writer.writeByte('['); - try f.object.dg.renderValue(writer, try pt.intValue(.usize, src_idx), .Other); - try writer.writeByte(']'); + try f.writeCValue(w, operand, .Other); + try w.writeByte('['); + try f.object.dg.renderValue(w, try pt.intValue(.usize, src_idx), .Other); + try w.writeByte(']'); }, - .value => |val| try f.object.dg.renderValue(writer, .fromInterned(val), .Other), + .value => |val| try f.object.dg.renderValue(w, .fromInterned(val), .Other), } - try writer.writeAll(";\n"); + try w.writeAll(";\n"); } return local; @@ -7328,30 +7445,31 @@ fn airShuffleTwo(f: *Function, inst: Air.Inst.Index) !CValue { const inst_ty = unwrapped.result_ty; const elem_ty = inst_ty.childType(zcu); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); try reap(f, inst, &.{ unwrapped.operand_a, unwrapped.operand_b }); // local cannot alias operands for (mask, 0..) |mask_elem, out_idx| { - try f.writeCValue(writer, local, .Other); - try writer.writeByte('['); - try f.object.dg.renderValue(writer, try pt.intValue(.usize, out_idx), .Other); - try writer.writeAll("] = "); + try f.writeCValue(w, local, .Other); + try w.writeByte('['); + try f.object.dg.renderValue(w, try pt.intValue(.usize, out_idx), .Other); + try w.writeAll("] = "); switch (mask_elem.unwrap()) { .a_elem => |src_idx| { - try f.writeCValue(writer, operand_a, .Other); - try writer.writeByte('['); - try f.object.dg.renderValue(writer, try pt.intValue(.usize, src_idx), .Other); - try writer.writeByte(']'); + try f.writeCValue(w, operand_a, .Other); + try w.writeByte('['); + try f.object.dg.renderValue(w, try pt.intValue(.usize, src_idx), .Other); + try w.writeByte(']'); }, .b_elem => |src_idx| { - try f.writeCValue(writer, operand_b, .Other); - try writer.writeByte('['); - try f.object.dg.renderValue(writer, try pt.intValue(.usize, src_idx), .Other); - try writer.writeByte(']'); + try f.writeCValue(w, operand_b, .Other); + try w.writeByte('['); + try f.object.dg.renderValue(w, try pt.intValue(.usize, src_idx), .Other); + try w.writeByte(']'); }, - .undef => try f.object.dg.renderUndefValue(writer, elem_ty, .Other), + .undef => try f.object.dg.renderUndefValue(w, elem_ty, .Other), } - try writer.writeAll(";\n"); + try w.writeByte(';'); + try f.object.newline(); } return local; @@ -7366,7 +7484,7 @@ fn airReduce(f: *Function, inst: Air.Inst.Index) !CValue { const operand = try f.resolveInst(reduce.operand); try reap(f, inst, &.{reduce.operand}); const operand_ty = f.typeOf(reduce.operand); - const writer = f.object.writer(); + const w = &f.object.code.writer; const use_operator = scalar_ty.bitSize(zcu) <= 64; const op: union(enum) { @@ -7413,10 +7531,10 @@ fn airReduce(f: *Function, inst: Air.Inst.Index) !CValue { // } const accum = try f.allocLocal(inst, scalar_ty); - try f.writeCValue(writer, accum, .Other); - try writer.writeAll(" = "); + try f.writeCValue(w, accum, .Other); + try w.writeAll(" = "); - try f.object.dg.renderValue(writer, switch (reduce.operation) { + try f.object.dg.renderValue(w, switch (reduce.operation) { .Or, .Xor => switch (scalar_ty.zigTypeTag(zcu)) { .bool => Value.false, .int => try pt.intValue(scalar_ty, 0), @@ -7453,42 +7571,44 @@ fn airReduce(f: *Function, inst: Air.Inst.Index) !CValue { else => unreachable, }, }, .Other); - try writer.writeAll(";\n"); + try w.writeByte(';'); + try f.object.newline(); - const v = try Vectorize.start(f, inst, writer, operand_ty); - try f.writeCValue(writer, accum, .Other); + const v = try Vectorize.start(f, inst, w, operand_ty); + try f.writeCValue(w, accum, .Other); switch (op) { .builtin => |func| { - try writer.print(" = zig_{s}_", .{func.operation}); - try f.object.dg.renderTypeForBuiltinFnName(writer, scalar_ty); - try writer.writeByte('('); - try f.writeCValue(writer, accum, .FunctionArgument); - try writer.writeAll(", "); - try f.writeCValue(writer, operand, .Other); - try v.elem(f, writer); - try f.object.dg.renderBuiltinInfo(writer, scalar_ty, func.info); - try writer.writeByte(')'); + try w.print(" = zig_{s}_", .{func.operation}); + try f.object.dg.renderTypeForBuiltinFnName(w, scalar_ty); + try w.writeByte('('); + try f.writeCValue(w, accum, .FunctionArgument); + try w.writeAll(", "); + try f.writeCValue(w, operand, .Other); + try v.elem(f, w); + try f.object.dg.renderBuiltinInfo(w, scalar_ty, func.info); + try w.writeByte(')'); }, .infix => |ass| { - try writer.writeAll(ass); - try f.writeCValue(writer, operand, .Other); - try v.elem(f, writer); + try w.writeAll(ass); + try f.writeCValue(w, operand, .Other); + try v.elem(f, w); }, .ternary => |cmp| { - try writer.writeAll(" = "); - try f.writeCValue(writer, accum, .Other); - try writer.writeAll(cmp); - try f.writeCValue(writer, operand, .Other); - try v.elem(f, writer); - try writer.writeAll(" ? "); - try f.writeCValue(writer, accum, .Other); - try writer.writeAll(" : "); - try f.writeCValue(writer, operand, .Other); - try v.elem(f, writer); + try w.writeAll(" = "); + try f.writeCValue(w, accum, .Other); + try w.writeAll(cmp); + try f.writeCValue(w, operand, .Other); + try v.elem(f, w); + try w.writeAll(" ? "); + try f.writeCValue(w, accum, .Other); + try w.writeAll(" : "); + try f.writeCValue(w, operand, .Other); + try v.elem(f, w); }, } - try writer.writeAll(";\n"); - try v.end(f, inst, writer); + try w.writeByte(';'); + try f.object.newline(); + try v.end(f, inst, w); return accum; } @@ -7514,7 +7634,7 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue { } } - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); switch (ip.indexToKey(inst_ty.toIntern())) { inline .array_type, .vector_type => |info, tag| { @@ -7522,20 +7642,20 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue { .ctype = try f.ctypeFromType(.fromInterned(info.child), .complete), }; for (resolved_elements, 0..) |element, i| { - try a.restart(f, writer); - try f.writeCValue(writer, local, .Other); - try writer.print("[{d}]", .{i}); - try a.assign(f, writer); - try f.writeCValue(writer, element, .Other); - try a.end(f, writer); + try a.restart(f, w); + try f.writeCValue(w, local, .Other); + try w.print("[{d}]", .{i}); + try a.assign(f, w); + try f.writeCValue(w, element, .Other); + try a.end(f, w); } if (tag == .array_type and info.sentinel != .none) { - try a.restart(f, writer); - try f.writeCValue(writer, local, .Other); - try writer.print("[{d}]", .{info.len}); - try a.assign(f, writer); - try f.object.dg.renderValue(writer, Value.fromInterned(info.sentinel), .Other); - try a.end(f, writer); + try a.restart(f, w); + try f.writeCValue(w, local, .Other); + try w.print("[{d}]", .{info.len}); + try a.assign(f, w); + try f.object.dg.renderValue(w, Value.fromInterned(info.sentinel), .Other); + try a.end(f, w); } }, .struct_type => { @@ -7547,19 +7667,19 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue { const field_ty: Type = .fromInterned(loaded_struct.field_types.get(ip)[field_index]); if (!field_ty.hasRuntimeBitsIgnoreComptime(zcu)) continue; - const a = try Assignment.start(f, writer, try f.ctypeFromType(field_ty, .complete)); - try f.writeCValueMember(writer, local, if (loaded_struct.fieldName(ip, field_index).unwrap()) |field_name| + const a = try Assignment.start(f, w, try f.ctypeFromType(field_ty, .complete)); + try f.writeCValueMember(w, local, if (loaded_struct.fieldName(ip, field_index).unwrap()) |field_name| .{ .identifier = field_name.toSlice(ip) } else .{ .field = field_index }); - try a.assign(f, writer); - try f.writeCValue(writer, resolved_elements[field_index], .Other); - try a.end(f, writer); + try a.assign(f, w); + try f.writeCValue(w, resolved_elements[field_index], .Other); + try a.end(f, w); } }, .@"packed" => { - try f.writeCValue(writer, local, .Other); - try writer.writeAll(" = "); + try f.writeCValue(w, local, .Other); + try w.writeAll(" = "); const backing_int_ty: Type = .fromInterned(loaded_struct.backingIntTypeUnordered(ip)); const int_info = backing_int_ty.intInfo(zcu); @@ -7575,9 +7695,9 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue { if (!field_ty.hasRuntimeBitsIgnoreComptime(zcu)) continue; if (!empty) { - try writer.writeAll("zig_or_"); - try f.object.dg.renderTypeForBuiltinFnName(writer, inst_ty); - try writer.writeByte('('); + try w.writeAll("zig_or_"); + try f.object.dg.renderTypeForBuiltinFnName(w, inst_ty); + try w.writeByte('('); } empty = false; } @@ -7587,57 +7707,58 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue { const field_ty = inst_ty.fieldType(field_index, zcu); if (!field_ty.hasRuntimeBitsIgnoreComptime(zcu)) continue; - if (!empty) try writer.writeAll(", "); + if (!empty) try w.writeAll(", "); // TODO: Skip this entire shift if val is 0? - try writer.writeAll("zig_shlw_"); - try f.object.dg.renderTypeForBuiltinFnName(writer, inst_ty); - try writer.writeByte('('); + try w.writeAll("zig_shlw_"); + try f.object.dg.renderTypeForBuiltinFnName(w, inst_ty); + try w.writeByte('('); if (field_ty.isAbiInt(zcu)) { - try writer.writeAll("zig_and_"); - try f.object.dg.renderTypeForBuiltinFnName(writer, inst_ty); - try writer.writeByte('('); + try w.writeAll("zig_and_"); + try f.object.dg.renderTypeForBuiltinFnName(w, inst_ty); + try w.writeByte('('); } if (inst_ty.isAbiInt(zcu) and (field_ty.isAbiInt(zcu) or field_ty.isPtrAtRuntime(zcu))) { - try f.renderIntCast(writer, inst_ty, element, .{}, field_ty, .FunctionArgument); + try f.renderIntCast(w, inst_ty, element, .{}, field_ty, .FunctionArgument); } else { - try writer.writeByte('('); - try f.renderType(writer, inst_ty); - try writer.writeByte(')'); + try w.writeByte('('); + try f.renderType(w, inst_ty); + try w.writeByte(')'); if (field_ty.isPtrAtRuntime(zcu)) { - try writer.writeByte('('); - try f.renderType(writer, switch (int_info.signedness) { + try w.writeByte('('); + try f.renderType(w, switch (int_info.signedness) { .unsigned => .usize, .signed => .isize, }); - try writer.writeByte(')'); + try w.writeByte(')'); } - try f.writeCValue(writer, element, .Other); + try f.writeCValue(w, element, .Other); } if (field_ty.isAbiInt(zcu)) { - try writer.writeAll(", "); + try w.writeAll(", "); const field_int_info = field_ty.intInfo(zcu); const field_mask = if (int_info.signedness == .signed and int_info.bits == field_int_info.bits) try pt.intValue(backing_int_ty, -1) else try (try pt.intType(.unsigned, field_int_info.bits)).maxIntScalar(pt, backing_int_ty); - try f.object.dg.renderValue(writer, field_mask, .FunctionArgument); - try writer.writeByte(')'); + try f.object.dg.renderValue(w, field_mask, .FunctionArgument); + try w.writeByte(')'); } - try writer.print(", {}", .{ - try f.fmtIntLiteral(try pt.intValue(bit_offset_ty, bit_offset)), + try w.print(", {f}", .{ + try f.fmtIntLiteralDec(try pt.intValue(bit_offset_ty, bit_offset)), }); - try f.object.dg.renderBuiltinInfo(writer, inst_ty, .bits); - try writer.writeByte(')'); - if (!empty) try writer.writeByte(')'); + try f.object.dg.renderBuiltinInfo(w, inst_ty, .bits); + try w.writeByte(')'); + if (!empty) try w.writeByte(')'); bit_offset += field_ty.bitSize(zcu); empty = false; } - try writer.writeAll(";\n"); + try w.writeByte(';'); + try f.object.newline(); }, } }, @@ -7646,11 +7767,11 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue { const field_ty: Type = .fromInterned(tuple_info.types.get(ip)[field_index]); if (!field_ty.hasRuntimeBitsIgnoreComptime(zcu)) continue; - const a = try Assignment.start(f, writer, try f.ctypeFromType(field_ty, .complete)); - try f.writeCValueMember(writer, local, .{ .field = field_index }); - try a.assign(f, writer); - try f.writeCValue(writer, resolved_elements[field_index], .Other); - try a.end(f, writer); + const a = try Assignment.start(f, w, try f.ctypeFromType(field_ty, .complete)); + try f.writeCValueMember(w, local, .{ .field = field_index }); + try a.assign(f, w); + try f.writeCValue(w, resolved_elements[field_index], .Other); + try a.end(f, w); }, else => unreachable, } @@ -7672,7 +7793,7 @@ fn airUnionInit(f: *Function, inst: Air.Inst.Index) !CValue { const payload = try f.resolveInst(extra.init); try reap(f, inst, &.{extra.init}); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, union_ty); if (loaded_union.flagsUnordered(ip).layout == .@"packed") return f.moveCValue(inst, union_ty, payload); @@ -7682,20 +7803,20 @@ fn airUnionInit(f: *Function, inst: Air.Inst.Index) !CValue { const field_index = tag_ty.enumFieldIndex(field_name, zcu).?; const tag_val = try pt.enumValueFieldIndex(tag_ty, field_index); - const a = try Assignment.start(f, writer, try f.ctypeFromType(tag_ty, .complete)); - try f.writeCValueMember(writer, local, .{ .identifier = "tag" }); - try a.assign(f, writer); - try writer.print("{}", .{try f.fmtIntLiteral(try tag_val.intFromEnum(tag_ty, pt))}); - try a.end(f, writer); + const a = try Assignment.start(f, w, try f.ctypeFromType(tag_ty, .complete)); + try f.writeCValueMember(w, local, .{ .identifier = "tag" }); + try a.assign(f, w); + try w.print("{f}", .{try f.fmtIntLiteralDec(try tag_val.intFromEnum(tag_ty, pt))}); + try a.end(f, w); } break :field .{ .payload_identifier = field_name.toSlice(ip) }; } else .{ .identifier = field_name.toSlice(ip) }; - const a = try Assignment.start(f, writer, try f.ctypeFromType(payload_ty, .complete)); - try f.writeCValueMember(writer, local, field); - try a.assign(f, writer); - try f.writeCValue(writer, payload, .Other); - try a.end(f, writer); + const a = try Assignment.start(f, w, try f.ctypeFromType(payload_ty, .complete)); + try f.writeCValueMember(w, local, field); + try a.assign(f, w); + try f.writeCValue(w, payload, .Other); + try a.end(f, w); return local; } @@ -7708,15 +7829,16 @@ fn airPrefetch(f: *Function, inst: Air.Inst.Index) !CValue { const ptr = try f.resolveInst(prefetch.ptr); try reap(f, inst, &.{prefetch.ptr}); - const writer = f.object.writer(); + const w = &f.object.code.writer; switch (prefetch.cache) { .data => { - try writer.writeAll("zig_prefetch("); + try w.writeAll("zig_prefetch("); if (ptr_ty.isSlice(zcu)) - try f.writeCValueMember(writer, ptr, .{ .identifier = "ptr" }) + try f.writeCValueMember(w, ptr, .{ .identifier = "ptr" }) else - try f.writeCValue(writer, ptr, .FunctionArgument); - try writer.print(", {d}, {d});\n", .{ @intFromEnum(prefetch.rw), prefetch.locality }); + try f.writeCValue(w, ptr, .FunctionArgument); + try w.print(", {d}, {d});", .{ @intFromEnum(prefetch.rw), prefetch.locality }); + try f.object.newline(); }, // The available prefetch intrinsics do not accept a cache argument; only // address, rw, and locality. @@ -7729,13 +7851,14 @@ fn airPrefetch(f: *Function, inst: Air.Inst.Index) !CValue { fn airWasmMemorySize(f: *Function, inst: Air.Inst.Index) !CValue { const pl_op = f.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; - const writer = f.object.writer(); + const w = &f.object.code.writer; const inst_ty = f.typeOfIndex(inst); const local = try f.allocLocal(inst, inst_ty); - try f.writeCValue(writer, local, .Other); + try f.writeCValue(w, local, .Other); - try writer.writeAll(" = "); - try writer.print("zig_wasm_memory_size({d});\n", .{pl_op.payload}); + try w.writeAll(" = "); + try w.print("zig_wasm_memory_size({d});", .{pl_op.payload}); + try f.object.newline(); return local; } @@ -7743,17 +7866,18 @@ fn airWasmMemorySize(f: *Function, inst: Air.Inst.Index) !CValue { fn airWasmMemoryGrow(f: *Function, inst: Air.Inst.Index) !CValue { const pl_op = f.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; - const writer = f.object.writer(); + const w = &f.object.code.writer; const inst_ty = f.typeOfIndex(inst); const operand = try f.resolveInst(pl_op.operand); try reap(f, inst, &.{pl_op.operand}); const local = try f.allocLocal(inst, inst_ty); - try f.writeCValue(writer, local, .Other); + try f.writeCValue(w, local, .Other); - try writer.writeAll(" = "); - try writer.print("zig_wasm_memory_grow({d}, ", .{pl_op.payload}); - try f.writeCValue(writer, operand, .FunctionArgument); - try writer.writeAll(");\n"); + try w.writeAll(" = "); + try w.print("zig_wasm_memory_grow({d}, ", .{pl_op.payload}); + try f.writeCValue(w, operand, .FunctionArgument); + try w.writeAll(");"); + try f.object.newline(); return local; } @@ -7771,36 +7895,38 @@ fn airMulAdd(f: *Function, inst: Air.Inst.Index) !CValue { const inst_ty = f.typeOfIndex(inst); const inst_scalar_ty = inst_ty.scalarType(zcu); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - const v = try Vectorize.start(f, inst, writer, inst_ty); - try f.writeCValue(writer, local, .Other); - try v.elem(f, writer); - try writer.writeAll(" = zig_fma_"); - try f.object.dg.renderTypeForBuiltinFnName(writer, inst_scalar_ty); - try writer.writeByte('('); - try f.writeCValue(writer, mulend1, .FunctionArgument); - try v.elem(f, writer); - try writer.writeAll(", "); - try f.writeCValue(writer, mulend2, .FunctionArgument); - try v.elem(f, writer); - try writer.writeAll(", "); - try f.writeCValue(writer, addend, .FunctionArgument); - try v.elem(f, writer); - try writer.writeAll(");\n"); - try v.end(f, inst, writer); + const v = try Vectorize.start(f, inst, w, inst_ty); + try f.writeCValue(w, local, .Other); + try v.elem(f, w); + try w.writeAll(" = zig_fma_"); + try f.object.dg.renderTypeForBuiltinFnName(w, inst_scalar_ty); + try w.writeByte('('); + try f.writeCValue(w, mulend1, .FunctionArgument); + try v.elem(f, w); + try w.writeAll(", "); + try f.writeCValue(w, mulend2, .FunctionArgument); + try v.elem(f, w); + try w.writeAll(", "); + try f.writeCValue(w, addend, .FunctionArgument); + try v.elem(f, w); + try w.writeAll(");"); + try f.object.newline(); + try v.end(f, inst, w); return local; } fn airRuntimeNavPtr(f: *Function, inst: Air.Inst.Index) !CValue { const ty_nav = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_nav; - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, .fromInterned(ty_nav.ty)); - try f.writeCValue(writer, local, .Other); - try writer.writeAll(" = "); - try f.object.dg.renderNav(writer, ty_nav.nav, .Other); - try writer.writeAll(";\n"); + try f.writeCValue(w, local, .Other); + try w.writeAll(" = "); + try f.object.dg.renderNav(w, ty_nav.nav, .Other); + try w.writeByte(';'); + try f.object.newline(); return local; } @@ -7812,15 +7938,16 @@ fn airCVaStart(f: *Function, inst: Air.Inst.Index) !CValue { const function_info = (try f.ctypeFromType(function_ty, .complete)).info(&f.object.dg.ctype_pool).function; assert(function_info.varargs); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - try writer.writeAll("va_start(*(va_list *)&"); - try f.writeCValue(writer, local, .Other); + try w.writeAll("va_start(*(va_list *)&"); + try f.writeCValue(w, local, .Other); if (function_info.param_ctypes.len > 0) { - try writer.writeAll(", "); - try f.writeCValue(writer, .{ .arg = function_info.param_ctypes.len - 1 }, .FunctionArgument); + try w.writeAll(", "); + try f.writeCValue(w, .{ .arg = function_info.param_ctypes.len - 1 }, .FunctionArgument); } - try writer.writeAll(");\n"); + try w.writeAll(");"); + try f.object.newline(); return local; } @@ -7831,14 +7958,15 @@ fn airCVaArg(f: *Function, inst: Air.Inst.Index) !CValue { const va_list = try f.resolveInst(ty_op.operand); try reap(f, inst, &.{ty_op.operand}); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - try f.writeCValue(writer, local, .Other); - try writer.writeAll(" = va_arg(*(va_list *)"); - try f.writeCValue(writer, va_list, .Other); - try writer.writeAll(", "); - try f.renderType(writer, ty_op.ty.toType()); - try writer.writeAll(");\n"); + try f.writeCValue(w, local, .Other); + try w.writeAll(" = va_arg(*(va_list *)"); + try f.writeCValue(w, va_list, .Other); + try w.writeAll(", "); + try f.renderType(w, ty_op.ty.toType()); + try w.writeAll(");"); + try f.object.newline(); return local; } @@ -7848,10 +7976,11 @@ fn airCVaEnd(f: *Function, inst: Air.Inst.Index) !CValue { const va_list = try f.resolveInst(un_op); try reap(f, inst, &.{un_op}); - const writer = f.object.writer(); - try writer.writeAll("va_end(*(va_list *)"); - try f.writeCValue(writer, va_list, .Other); - try writer.writeAll(");\n"); + const w = &f.object.code.writer; + try w.writeAll("va_end(*(va_list *)"); + try f.writeCValue(w, va_list, .Other); + try w.writeAll(");"); + try f.object.newline(); return .none; } @@ -7862,13 +7991,14 @@ fn airCVaCopy(f: *Function, inst: Air.Inst.Index) !CValue { const va_list = try f.resolveInst(ty_op.operand); try reap(f, inst, &.{ty_op.operand}); - const writer = f.object.writer(); + const w = &f.object.code.writer; const local = try f.allocLocal(inst, inst_ty); - try writer.writeAll("va_copy(*(va_list *)&"); - try f.writeCValue(writer, local, .Other); - try writer.writeAll(", *(va_list *)"); - try f.writeCValue(writer, va_list, .Other); - try writer.writeAll(");\n"); + try w.writeAll("va_copy(*(va_list *)&"); + try f.writeCValue(w, local, .Other); + try w.writeAll(", *(va_list *)"); + try f.writeCValue(w, va_list, .Other); + try w.writeAll(");"); + try f.object.newline(); return local; } @@ -7883,7 +8013,7 @@ fn toMemoryOrder(order: std.builtin.AtomicOrder) [:0]const u8 { }; } -fn writeMemoryOrder(w: anytype, order: std.builtin.AtomicOrder) !void { +fn writeMemoryOrder(w: *Writer, order: std.builtin.AtomicOrder) !void { return w.writeAll(toMemoryOrder(order)); } @@ -7970,93 +8100,6 @@ fn toAtomicRmwSuffix(order: std.builtin.AtomicRmwOp) []const u8 { }; } -const ArrayListWriter = ErrorOnlyGenericWriter(std.ArrayList(u8).Writer.Error); - -fn arrayListWriter(list: *std.ArrayList(u8)) ArrayListWriter { - return .{ .context = .{ - .context = list, - .writeFn = struct { - fn write(context: *const anyopaque, bytes: []const u8) anyerror!usize { - const l: *std.ArrayList(u8) = @alignCast(@constCast(@ptrCast(context))); - return l.writer().write(bytes); - } - }.write, - } }; -} - -fn IndentWriter(comptime UnderlyingWriter: type) type { - return struct { - const Self = @This(); - pub const Error = UnderlyingWriter.Error; - pub const Writer = ErrorOnlyGenericWriter(Error); - - pub const indent_delta = 1; - - underlying_writer: UnderlyingWriter, - indent_count: usize = 0, - current_line_empty: bool = true, - - pub fn writer(self: *Self) Writer { - return .{ .context = .{ - .context = self, - .writeFn = writeAny, - } }; - } - - pub fn write(self: *Self, bytes: []const u8) Error!usize { - if (bytes.len == 0) return 0; - - const current_indent = self.indent_count * Self.indent_delta; - if (self.current_line_empty and current_indent > 0) { - try self.underlying_writer.writeByteNTimes(' ', current_indent); - } - self.current_line_empty = false; - - return self.writeNoIndent(bytes); - } - - fn writeAny(context: *const anyopaque, bytes: []const u8) anyerror!usize { - const self: *Self = @alignCast(@constCast(@ptrCast(context))); - return self.write(bytes); - } - - pub fn insertNewline(self: *Self) Error!void { - _ = try self.writeNoIndent("\n"); - } - - pub fn pushIndent(self: *Self) void { - self.indent_count += 1; - } - - pub fn popIndent(self: *Self) void { - assert(self.indent_count != 0); - self.indent_count -= 1; - } - - fn writeNoIndent(self: *Self, bytes: []const u8) Error!usize { - if (bytes.len == 0) return 0; - - try self.underlying_writer.writeAll(bytes); - if (bytes[bytes.len - 1] == '\n') { - self.current_line_empty = true; - } - return bytes.len; - } - }; -} - -/// A wrapper around `std.io.AnyWriter` that maintains a generic error set while -/// erasing the rest of the implementation. This is intended to avoid duplicate -/// generic instantiations for writer types which share the same error set, while -/// maintaining ease of error handling. -fn ErrorOnlyGenericWriter(comptime Error: type) type { - return std.io.GenericWriter(std.io.AnyWriter, Error, struct { - fn write(context: std.io.AnyWriter, bytes: []const u8) Error!usize { - return @errorCast(context.write(bytes)); - } - }.write); -} - fn toCIntBits(zig_bits: u32) ?u32 { for (&[_]u8{ 8, 16, 32, 64, 128 }) |c_bits| { if (zig_bits <= c_bits) { @@ -8111,7 +8154,12 @@ fn compareOperatorC(operator: std.math.CompareOperator) []const u8 { }; } -fn StringLiteral(comptime WriterType: type) type { +const StringLiteral = struct { + len: usize, + cur_len: usize, + w: *Writer, + first: bool, + // MSVC throws C2078 if an array of size 65536 or greater is initialized with a string literal, // regardless of the length of the string literal initializing it. Array initializer syntax is // used instead. @@ -8123,99 +8171,116 @@ fn StringLiteral(comptime WriterType: type) type { const max_char_len = 4; const max_literal_len = @min(16380 - max_char_len, 4095); - return struct { - len: u64, - cur_len: u64 = 0, - counting_writer: std.io.CountingWriter(WriterType), + fn init(w: *Writer, len: usize) StringLiteral { + return .{ + .cur_len = 0, + .len = len, + .w = w, + .first = true, + }; + } - pub const Error = WriterType.Error; - - const Self = @This(); - - pub fn start(self: *Self) Error!void { - const writer = self.counting_writer.writer(); - if (self.len <= max_string_initializer_len) { - try writer.writeByte('\"'); - } else { - try writer.writeByte('{'); - } + pub fn start(sl: *StringLiteral) Writer.Error!void { + if (sl.len <= max_string_initializer_len) { + try sl.w.writeByte('\"'); + } else { + try sl.w.writeByte('{'); } + } - pub fn end(self: *Self) Error!void { - const writer = self.counting_writer.writer(); - if (self.len <= max_string_initializer_len) { - try writer.writeByte('\"'); - } else { - try writer.writeByte('}'); - } + pub fn end(sl: *StringLiteral) Writer.Error!void { + if (sl.len <= max_string_initializer_len) { + try sl.w.writeByte('\"'); + } else { + try sl.w.writeByte('}'); } + } - fn writeStringLiteralChar(writer: anytype, c: u8) !void { - switch (c) { - 7 => try writer.writeAll("\\a"), - 8 => try writer.writeAll("\\b"), - '\t' => try writer.writeAll("\\t"), - '\n' => try writer.writeAll("\\n"), - 11 => try writer.writeAll("\\v"), - 12 => try writer.writeAll("\\f"), - '\r' => try writer.writeAll("\\r"), - '"', '\'', '?', '\\' => try writer.print("\\{c}", .{c}), - else => switch (c) { - ' '...'~' => try writer.writeByte(c), - else => try writer.print("\\{o:0>3}", .{c}), - }, - } + fn writeStringLiteralChar(sl: *StringLiteral, c: u8) Writer.Error!usize { + const w = sl.w; + switch (c) { + 7 => { + try w.writeAll("\\a"); + return 2; + }, + 8 => { + try w.writeAll("\\b"); + return 2; + }, + '\t' => { + try w.writeAll("\\t"); + return 2; + }, + '\n' => { + try w.writeAll("\\n"); + return 2; + }, + 11 => { + try w.writeAll("\\v"); + return 2; + }, + 12 => { + try w.writeAll("\\f"); + return 2; + }, + '\r' => { + try w.writeAll("\\r"); + return 2; + }, + '"', '\'', '?', '\\' => { + try w.print("\\{c}", .{c}); + return 2; + }, + ' '...'!', '#'...'&', '('...'>', '@'...'[', ']'...'~' => { + try w.writeByte(c); + return 1; + }, + else => { + var buf: [4]u8 = undefined; + const printed = std.fmt.bufPrint(&buf, "\\{o:0>3}", .{c}) catch unreachable; + try w.writeAll(printed); + return printed.len; + }, } + } - pub fn writeChar(self: *Self, c: u8) Error!void { - const writer = self.counting_writer.writer(); - if (self.len <= max_string_initializer_len) { - if (self.cur_len == 0 and self.counting_writer.bytes_written > 1) - try writer.writeAll("\"\""); + pub fn writeChar(sl: *StringLiteral, c: u8) Writer.Error!void { + if (sl.len <= max_string_initializer_len) { + if (sl.cur_len == 0 and !sl.first) try sl.w.writeAll("\"\""); - const len = self.counting_writer.bytes_written; - try writeStringLiteralChar(writer, c); + const char_len = try sl.writeStringLiteralChar(c); + assert(char_len <= max_char_len); + sl.cur_len += char_len; - const char_length = self.counting_writer.bytes_written - len; - assert(char_length <= max_char_len); - self.cur_len += char_length; - - if (self.cur_len >= max_literal_len) self.cur_len = 0; - } else { - if (self.counting_writer.bytes_written > 1) try writer.writeByte(','); - try writer.print("'\\x{x}'", .{c}); + if (sl.cur_len >= max_literal_len) { + sl.cur_len = 0; + sl.first = false; } + } else { + if (!sl.first) try sl.w.writeByte(','); + var buf: [6]u8 = undefined; + const printed = std.fmt.bufPrint(&buf, "'\\x{x}'", .{c}) catch unreachable; + try sl.w.writeAll(printed); + sl.cur_len += printed.len; + sl.first = false; } - }; -} + } +}; -fn stringLiteral( - child_stream: anytype, - len: u64, -) StringLiteral(@TypeOf(child_stream)) { - return .{ - .len = len, - .counting_writer = std.io.countingWriter(child_stream), - }; -} +const FormatStringContext = struct { + str: []const u8, + sentinel: ?u8, +}; -const FormatStringContext = struct { str: []const u8, sentinel: ?u8 }; -fn formatStringLiteral( - data: FormatStringContext, - comptime fmt: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, -) @TypeOf(writer).Error!void { - if (fmt.len != 1 or fmt[0] != 's') @compileError("Invalid fmt: " ++ fmt); - - var literal = stringLiteral(writer, data.str.len + @intFromBool(data.sentinel != null)); +fn formatStringLiteral(data: FormatStringContext, w: *std.io.Writer) std.io.Writer.Error!void { + var literal: StringLiteral = .init(w, data.str.len + @intFromBool(data.sentinel != null)); try literal.start(); for (data.str) |c| try literal.writeChar(c); if (data.sentinel) |sentinel| if (sentinel != 0) try literal.writeChar(sentinel); try literal.end(); } -fn fmtStringLiteral(str: []const u8, sentinel: ?u8) std.fmt.Formatter(formatStringLiteral) { +fn fmtStringLiteral(str: []const u8, sentinel: ?u8) std.fmt.Formatter(FormatStringContext, formatStringLiteral) { return .{ .data = .{ .str = str, .sentinel = sentinel } }; } @@ -8231,13 +8296,10 @@ const FormatIntLiteralContext = struct { kind: CType.Kind, ctype: CType, val: Value, + base: u8, + case: std.fmt.Case, }; -fn formatIntLiteral( - data: FormatIntLiteralContext, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) @TypeOf(writer).Error!void { +fn formatIntLiteral(data: FormatIntLiteralContext, w: *std.io.Writer) std.io.Writer.Error!void { const pt = data.dg.pt; const zcu = pt.zcu; const target = &data.dg.mod.resolved_target.result; @@ -8262,7 +8324,7 @@ fn formatIntLiteral( var int_buf: Value.BigIntSpace = undefined; const int = if (data.val.isUndefDeep(zcu)) blk: { - undef_limbs = try allocator.alloc(BigIntLimb, BigInt.calcTwosCompLimbCount(data.int_info.bits)); + undef_limbs = allocator.alloc(BigIntLimb, BigInt.calcTwosCompLimbCount(data.int_info.bits)) catch return error.WriteFailed; @memset(undef_limbs, undefPattern(BigIntLimb)); var undef_int = BigInt.Mutable{ @@ -8280,7 +8342,7 @@ fn formatIntLiteral( const one = BigInt.Mutable.init(&one_limbs, 1).toConst(); var wrap = BigInt.Mutable{ - .limbs = try allocator.alloc(BigIntLimb, BigInt.calcTwosCompLimbCount(c_bits)), + .limbs = allocator.alloc(BigIntLimb, BigInt.calcTwosCompLimbCount(c_bits)) catch return error.WriteFailed, .len = undefined, .positive = undefined, }; @@ -8317,46 +8379,29 @@ fn formatIntLiteral( if (c_limb_info.count == 1) { if (wrap.addWrap(int, one, data.int_info.signedness, c_bits) or data.int_info.signedness == .signed and wrap.subWrap(int, one, data.int_info.signedness, c_bits)) - return writer.print("{s}_{s}", .{ - data.ctype.getStandardDefineAbbrev() orelse return writer.print("zig_{s}Int_{c}{d}", .{ + return w.print("{s}_{s}", .{ + data.ctype.getStandardDefineAbbrev() orelse return w.print("zig_{s}Int_{c}{d}", .{ if (int.positive) "max" else "min", signAbbrev(data.int_info.signedness), c_bits, }), if (int.positive) "MAX" else "MIN", }); - if (!int.positive) try writer.writeByte('-'); - try data.ctype.renderLiteralPrefix(writer, data.kind, ctype_pool); + if (!int.positive) try w.writeByte('-'); + try data.ctype.renderLiteralPrefix(w, data.kind, ctype_pool); - const style: struct { base: u8, case: std.fmt.Case = undefined } = switch (fmt.len) { - 0 => .{ .base = 10 }, - 1 => switch (fmt[0]) { - 'b' => style: { - try writer.writeAll("0b"); - break :style .{ .base = 2 }; - }, - 'o' => style: { - try writer.writeByte('0'); - break :style .{ .base = 8 }; - }, - 'd' => .{ .base = 10 }, - 'x', 'X' => |base| style: { - try writer.writeAll("0x"); - break :style .{ .base = 16, .case = switch (base) { - 'x' => .lower, - 'X' => .upper, - else => unreachable, - } }; - }, - else => @compileError("Invalid fmt: " ++ fmt), - }, - else => @compileError("Invalid fmt: " ++ fmt), - }; - - const string = try int.abs().toStringAlloc(allocator, style.base, style.case); + switch (data.base) { + 2 => try w.writeAll("0b"), + 8 => try w.writeByte('0'), + 10 => {}, + 16 => try w.writeAll("0x"), + else => unreachable, + } + const string = int.abs().toStringAlloc(allocator, data.base, data.case) catch + return error.WriteFailed; defer allocator.free(string); - try writer.writeAll(string); + try w.writeAll(string); } else { - try data.ctype.renderLiteralPrefix(writer, data.kind, ctype_pool); + try data.ctype.renderLiteralPrefix(w, data.kind, ctype_pool); wrap.truncate(int, .unsigned, c_bits); @memset(wrap.limbs[wrap.len..], 0); wrap.len = wrap.limbs.len; @@ -8399,17 +8444,20 @@ fn formatIntLiteral( c_limb_ctype = c_limb_info.ctype; } - if (limb_offset > 0) try writer.writeAll(", "); + if (limb_offset > 0) try w.writeAll(", "); try formatIntLiteral(.{ .dg = data.dg, .int_info = c_limb_int_info, .kind = data.kind, .ctype = c_limb_ctype, - .val = try pt.intValue_big(.comptime_int, c_limb_mut.toConst()), - }, fmt, options, writer); + .val = pt.intValue_big(.comptime_int, c_limb_mut.toConst()) catch + return error.WriteFailed, + .base = data.base, + .case = data.case, + }, w); } } - try data.ctype.renderLiteralSuffix(writer, ctype_pool); + try data.ctype.renderLiteralSuffix(w, ctype_pool); } const Materialize = struct { @@ -8423,8 +8471,8 @@ const Materialize = struct { } }; } - pub fn mat(self: Materialize, f: *Function, writer: anytype) !void { - try f.writeCValue(writer, self.local, .Other); + pub fn mat(self: Materialize, f: *Function, w: *Writer) !void { + try f.writeCValue(w, self.local, .Other); } pub fn end(self: Materialize, f: *Function, inst: Air.Inst.Index) !void { @@ -8435,36 +8483,37 @@ const Materialize = struct { const Assignment = struct { ctype: CType, - pub fn start(f: *Function, writer: anytype, ctype: CType) !Assignment { + pub fn start(f: *Function, w: *Writer, ctype: CType) !Assignment { const self: Assignment = .{ .ctype = ctype }; - try self.restart(f, writer); + try self.restart(f, w); return self; } - pub fn restart(self: Assignment, f: *Function, writer: anytype) !void { + pub fn restart(self: Assignment, f: *Function, w: *Writer) !void { switch (self.strategy(f)) { .assign => {}, - .memcpy => try writer.writeAll("memcpy("), + .memcpy => try w.writeAll("memcpy("), } } - pub fn assign(self: Assignment, f: *Function, writer: anytype) !void { + pub fn assign(self: Assignment, f: *Function, w: *Writer) !void { switch (self.strategy(f)) { - .assign => try writer.writeAll(" = "), - .memcpy => try writer.writeAll(", "), + .assign => try w.writeAll(" = "), + .memcpy => try w.writeAll(", "), } } - pub fn end(self: Assignment, f: *Function, writer: anytype) !void { + pub fn end(self: Assignment, f: *Function, w: *Writer) !void { switch (self.strategy(f)) { .assign => {}, .memcpy => { - try writer.writeAll(", sizeof("); - try f.renderCType(writer, self.ctype); - try writer.writeAll("))"); + try w.writeAll(", sizeof("); + try f.renderCType(w, self.ctype); + try w.writeAll("))"); }, } - try writer.writeAll(";\n"); + try w.writeByte(';'); + try f.object.newline(); } fn strategy(self: Assignment, f: *Function) enum { assign, memcpy } { @@ -8478,37 +8527,39 @@ const Assignment = struct { const Vectorize = struct { index: CValue = .none, - pub fn start(f: *Function, inst: Air.Inst.Index, writer: anytype, ty: Type) !Vectorize { + pub fn start(f: *Function, inst: Air.Inst.Index, w: *Writer, ty: Type) !Vectorize { const pt = f.object.dg.pt; const zcu = pt.zcu; return if (ty.zigTypeTag(zcu) == .vector) index: { const local = try f.allocLocal(inst, .usize); - try writer.writeAll("for ("); - try f.writeCValue(writer, local, .Other); - try writer.print(" = {d}; ", .{try f.fmtIntLiteral(.zero_usize)}); - try f.writeCValue(writer, local, .Other); - try writer.print(" < {d}; ", .{try f.fmtIntLiteral(try pt.intValue(.usize, ty.vectorLen(zcu)))}); - try f.writeCValue(writer, local, .Other); - try writer.print(" += {d}) {{\n", .{try f.fmtIntLiteral(.one_usize)}); - f.object.indent_writer.pushIndent(); + try w.writeAll("for ("); + try f.writeCValue(w, local, .Other); + try w.print(" = {f}; ", .{try f.fmtIntLiteralDec(.zero_usize)}); + try f.writeCValue(w, local, .Other); + try w.print(" < {f}; ", .{try f.fmtIntLiteralDec(try pt.intValue(.usize, ty.vectorLen(zcu)))}); + try f.writeCValue(w, local, .Other); + try w.print(" += {f}) {{\n", .{try f.fmtIntLiteralDec(.one_usize)}); + f.object.indent(); + try f.object.newline(); break :index .{ .index = local }; } else .{}; } - pub fn elem(self: Vectorize, f: *Function, writer: anytype) !void { + pub fn elem(self: Vectorize, f: *Function, w: *Writer) !void { if (self.index != .none) { - try writer.writeByte('['); - try f.writeCValue(writer, self.index, .Other); - try writer.writeByte(']'); + try w.writeByte('['); + try f.writeCValue(w, self.index, .Other); + try w.writeByte(']'); } } - pub fn end(self: Vectorize, f: *Function, inst: Air.Inst.Index, writer: anytype) !void { + pub fn end(self: Vectorize, f: *Function, inst: Air.Inst.Index, w: *Writer) !void { if (self.index != .none) { - f.object.indent_writer.popIndent(); - try writer.writeAll("}\n"); + try f.object.outdent(); + try w.writeByte('}'); + try f.object.newline(); try freeLocal(f, inst, self.index.new_local, null); } } diff --git a/src/codegen/c/Type.zig b/src/codegen/c/Type.zig index 044d947702..77555d773c 100644 --- a/src/codegen/c/Type.zig +++ b/src/codegen/c/Type.zig @@ -209,7 +209,7 @@ pub fn getStandardDefineAbbrev(ctype: CType) ?[]const u8 { }; } -pub fn renderLiteralPrefix(ctype: CType, writer: anytype, kind: Kind, pool: *const Pool) @TypeOf(writer).Error!void { +pub fn renderLiteralPrefix(ctype: CType, w: *Writer, kind: Kind, pool: *const Pool) Writer.Error!void { switch (ctype.info(pool)) { .basic => |basic_info| switch (basic_info) { .void => unreachable, @@ -224,7 +224,7 @@ pub fn renderLiteralPrefix(ctype: CType, writer: anytype, kind: Kind, pool: *con .uintptr_t, .intptr_t, => switch (kind) { - else => try writer.print("({s})", .{@tagName(basic_info)}), + else => try w.print("({s})", .{@tagName(basic_info)}), .global => {}, }, .int, @@ -246,7 +246,7 @@ pub fn renderLiteralPrefix(ctype: CType, writer: anytype, kind: Kind, pool: *con .int32_t, .uint64_t, .int64_t, - => try writer.print("{s}_C(", .{ctype.getStandardDefineAbbrev().?}), + => try w.print("{s}_C(", .{ctype.getStandardDefineAbbrev().?}), .zig_u128, .zig_i128, .zig_f16, @@ -255,7 +255,7 @@ pub fn renderLiteralPrefix(ctype: CType, writer: anytype, kind: Kind, pool: *con .zig_f80, .zig_f128, .zig_c_longdouble, - => try writer.print("zig_{s}_{s}(", .{ + => try w.print("zig_{s}_{s}(", .{ switch (kind) { else => "make", .global => "init", @@ -265,12 +265,12 @@ pub fn renderLiteralPrefix(ctype: CType, writer: anytype, kind: Kind, pool: *con .va_list => unreachable, _ => unreachable, }, - .array, .vector => try writer.writeByte('{'), + .array, .vector => try w.writeByte('{'), else => unreachable, } } -pub fn renderLiteralSuffix(ctype: CType, writer: anytype, pool: *const Pool) @TypeOf(writer).Error!void { +pub fn renderLiteralSuffix(ctype: CType, w: *Writer, pool: *const Pool) Writer.Error!void { switch (ctype.info(pool)) { .basic => |basic_info| switch (basic_info) { .void => unreachable, @@ -280,20 +280,20 @@ pub fn renderLiteralSuffix(ctype: CType, writer: anytype, pool: *const Pool) @Ty .short, .int, => {}, - .long => try writer.writeByte('l'), - .@"long long" => try writer.writeAll("ll"), + .long => try w.writeByte('l'), + .@"long long" => try w.writeAll("ll"), .@"unsigned char", .@"unsigned short", .@"unsigned int", - => try writer.writeByte('u'), + => try w.writeByte('u'), .@"unsigned long", .size_t, .uintptr_t, - => try writer.writeAll("ul"), - .@"unsigned long long" => try writer.writeAll("ull"), - .float => try writer.writeByte('f'), + => try w.writeAll("ul"), + .@"unsigned long long" => try w.writeAll("ull"), + .float => try w.writeByte('f'), .double => {}, - .@"long double" => try writer.writeByte('l'), + .@"long double" => try w.writeByte('l'), .bool, .ptrdiff_t, .intptr_t, @@ -314,11 +314,11 @@ pub fn renderLiteralSuffix(ctype: CType, writer: anytype, pool: *const Pool) @Ty .zig_f80, .zig_f128, .zig_c_longdouble, - => try writer.writeByte(')'), + => try w.writeByte(')'), .va_list => unreachable, _ => unreachable, }, - .array, .vector => try writer.writeByte('}'), + .array, .vector => try w.writeByte('}'), else => unreachable, } } @@ -938,19 +938,13 @@ pub const Pool = struct { index: String.Index, const FormatData = struct { string: String, pool: *const Pool }; - fn format( - data: FormatData, - comptime fmt_str: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - if (fmt_str.len > 0) @compileError("invalid format string '" ++ fmt_str ++ "'"); + fn format(data: FormatData, writer: *Writer) Writer.Error!void { if (data.string.toSlice(data.pool)) |slice| try writer.writeAll(slice) else try writer.print("f{d}", .{@intFromEnum(data.string.index)}); } - pub fn fmt(str: String, pool: *const Pool) std.fmt.Formatter(format) { + pub fn fmt(str: String, pool: *const Pool) std.fmt.Formatter(FormatData, format) { return .{ .data = .{ .string = str, .pool = pool } }; } @@ -2890,7 +2884,7 @@ pub const Pool = struct { comptime fmt_str: []const u8, fmt_args: anytype, ) !String { - try pool.string_bytes.writer(allocator).print(fmt_str, fmt_args); + try pool.string_bytes.print(allocator, fmt_str, fmt_args); return pool.trailingString(allocator); } @@ -3281,10 +3275,12 @@ pub const AlignAs = packed struct { } }; +const std = @import("std"); const assert = std.debug.assert; +const Writer = std.io.Writer; + const CType = @This(); const InternPool = @import("../../InternPool.zig"); const Module = @import("../../Package/Module.zig"); -const std = @import("std"); const Type = @import("../../Type.zig"); const Zcu = @import("../../Zcu.zig"); diff --git a/src/codegen/llvm.zig b/src/codegen/llvm.zig index 5135095f69..3d670dce83 100644 --- a/src/codegen/llvm.zig +++ b/src/codegen/llvm.zig @@ -239,12 +239,12 @@ pub fn targetTriple(allocator: Allocator, target: *const std.Target) ![]const u8 .none, .windows, => {}, - .semver => |ver| try llvm_triple.writer().print("{d}.{d}.{d}", .{ + .semver => |ver| try llvm_triple.print("{d}.{d}.{d}", .{ ver.min.major, ver.min.minor, ver.min.patch, }), - inline .linux, .hurd => |ver| try llvm_triple.writer().print("{d}.{d}.{d}", .{ + inline .linux, .hurd => |ver| try llvm_triple.print("{d}.{d}.{d}", .{ ver.range.min.major, ver.range.min.minor, ver.range.min.patch, @@ -295,13 +295,13 @@ pub fn targetTriple(allocator: Allocator, target: *const std.Target) ![]const u8 .windows, => {}, inline .hurd, .linux => |ver| if (target.abi.isGnu()) { - try llvm_triple.writer().print("{d}.{d}.{d}", .{ + try llvm_triple.print("{d}.{d}.{d}", .{ ver.glibc.major, ver.glibc.minor, ver.glibc.patch, }); } else if (@TypeOf(ver) == std.Target.Os.LinuxVersionRange and target.abi.isAndroid()) { - try llvm_triple.writer().print("{d}", .{ver.android}); + try llvm_triple.print("{d}", .{ver.android}); }, } @@ -746,12 +746,18 @@ pub const Object = struct { try wip.finish(); } - fn genModuleLevelAssembly(object: *Object) !void { - const writer = object.builder.setModuleAsm(); + fn genModuleLevelAssembly(object: *Object) Allocator.Error!void { + const b = &object.builder; + const gpa = b.gpa; + b.module_asm.clearRetainingCapacity(); for (object.pt.zcu.global_assembly.values()) |assembly| { - try writer.print("{s}\n", .{assembly}); + try b.module_asm.ensureUnusedCapacity(gpa, assembly.len + 1); + b.module_asm.appendSliceAssumeCapacity(assembly); + b.module_asm.appendAssumeCapacity('\n'); + } + if (b.module_asm.getLastOrNull()) |last| { + if (last != '\n') try b.module_asm.append(gpa, '\n'); } - try object.builder.finishModuleAsm(); } pub const EmitOptions = struct { @@ -939,7 +945,9 @@ pub const Object = struct { if (std.mem.eql(u8, path, "-")) { o.builder.dump(); } else { - _ = try o.builder.printToFile(path); + o.builder.printToFilePath(std.fs.cwd(), path) catch |err| { + log.err("failed printing LLVM module to \"{s}\": {s}", .{ path, @errorName(err) }); + }; } } @@ -2486,7 +2494,7 @@ pub const Object = struct { var union_name_buf: ?[:0]const u8 = null; defer if (union_name_buf) |buf| gpa.free(buf); const union_name = if (layout.tag_size == 0) name else name: { - union_name_buf = try std.fmt.allocPrintZ(gpa, "{s}:Payload", .{name}); + union_name_buf = try std.fmt.allocPrintSentinel(gpa, "{s}:Payload", .{name}, 0); break :name union_name_buf.?; }; @@ -2680,10 +2688,12 @@ pub const Object = struct { } fn allocTypeName(o: *Object, ty: Type) Allocator.Error![:0]const u8 { - var buffer = std.ArrayList(u8).init(o.gpa); - errdefer buffer.deinit(); - try ty.print(buffer.writer(), o.pt); - return buffer.toOwnedSliceSentinel(0); + var aw: std.io.Writer.Allocating = .init(o.gpa); + defer aw.deinit(); + ty.print(&aw.writer, o.pt) catch |err| switch (err) { + error.WriteFailed => return error.OutOfMemory, + }; + return aw.toOwnedSliceSentinel(0); } /// If the llvm function does not exist, create it. @@ -4482,7 +4492,7 @@ pub const Object = struct { const target = &zcu.root_mod.resolved_target.result; const function_index = try o.builder.addFunction( try o.builder.fnType(ret_ty, &.{try o.lowerType(Type.fromInterned(enum_type.tag_ty))}, .normal), - try o.builder.strtabStringFmt("__zig_tag_name_{}", .{enum_type.name.fmt(ip)}), + try o.builder.strtabStringFmt("__zig_tag_name_{f}", .{enum_type.name.fmt(ip)}), toLlvmAddressSpace(.generic, target), ); @@ -4633,7 +4643,7 @@ pub const NavGen = struct { if (zcu.getTarget().cpu.arch.isWasm() and ty.zigTypeTag(zcu) == .@"fn") { if (lib_name.toSlice(ip)) |lib_name_slice| { if (!std.mem.eql(u8, lib_name_slice, "c")) { - break :decl_name try o.builder.strtabStringFmt("{}|{s}", .{ nav.name.fmt(ip), lib_name_slice }); + break :decl_name try o.builder.strtabStringFmt("{f}|{s}", .{ nav.name.fmt(ip), lib_name_slice }); } } } @@ -7472,7 +7482,7 @@ pub const FuncGen = struct { llvm_param_types[llvm_param_i] = llvm_elem_ty; } - try llvm_constraints.writer(self.gpa).print(",{d}", .{output_index}); + try llvm_constraints.print(self.gpa, ",{d}", .{output_index}); // In the case of indirect inputs, LLVM requires the callsite to have // an elementtype() attribute. @@ -7573,7 +7583,7 @@ pub const FuncGen = struct { // we should validate the assembly in Sema; by now it is too late return self.todo("unknown input or output name: '{s}'", .{name}); }; - try rendered_template.writer().print("{d}", .{index}); + try rendered_template.print("{d}", .{index}); if (byte == ':') { try rendered_template.append(':'); modifier_start = i + 1; @@ -10370,7 +10380,7 @@ pub const FuncGen = struct { const target = &zcu.root_mod.resolved_target.result; const function_index = try o.builder.addFunction( try o.builder.fnType(.i1, &.{try o.lowerType(Type.fromInterned(enum_type.tag_ty))}, .normal), - try o.builder.strtabStringFmt("__zig_is_named_enum_value_{}", .{enum_type.name.fmt(ip)}), + try o.builder.strtabStringFmt("__zig_is_named_enum_value_{f}", .{enum_type.name.fmt(ip)}), toLlvmAddressSpace(.generic, target), ); diff --git a/src/codegen/spirv.zig b/src/codegen/spirv.zig index 8a782c54aa..f263e567e8 100644 --- a/src/codegen/spirv.zig +++ b/src/codegen/spirv.zig @@ -817,7 +817,7 @@ const NavGen = struct { const result_ty_id = try self.resolveType(ty, repr); const ip = &zcu.intern_pool; - log.debug("lowering constant: ty = {}, val = {}, key = {s}", .{ ty.fmt(pt), val.fmtValue(pt), @tagName(ip.indexToKey(val.toIntern())) }); + log.debug("lowering constant: ty = {f}, val = {f}, key = {s}", .{ ty.fmt(pt), val.fmtValue(pt), @tagName(ip.indexToKey(val.toIntern())) }); if (val.isUndefDeep(zcu)) { return self.spv.constUndef(result_ty_id); } @@ -1147,7 +1147,7 @@ const NavGen = struct { return result_ptr_id; } - return self.fail("cannot perform pointer cast: '{}' to '{}'", .{ + return self.fail("cannot perform pointer cast: '{f}' to '{f}'", .{ parent_ptr_ty.fmt(pt), oac.new_ptr_ty.fmt(pt), }); @@ -1260,10 +1260,12 @@ const NavGen = struct { // Turn a Zig type's name into a cache reference. fn resolveTypeName(self: *NavGen, ty: Type) ![]const u8 { - var name = std.ArrayList(u8).init(self.gpa); - defer name.deinit(); - try ty.print(name.writer(), self.pt); - return try name.toOwnedSlice(); + var aw: std.io.Writer.Allocating = .init(self.gpa); + defer aw.deinit(); + ty.print(&aw.writer, self.pt) catch |err| switch (err) { + error.WriteFailed => return error.OutOfMemory, + }; + return try aw.toOwnedSlice(); } /// Create an integer type suitable for storing at least 'bits' bits. @@ -1462,7 +1464,7 @@ const NavGen = struct { const pt = self.pt; const zcu = pt.zcu; const ip = &zcu.intern_pool; - log.debug("resolveType: ty = {}", .{ty.fmt(pt)}); + log.debug("resolveType: ty = {f}", .{ty.fmt(pt)}); const target = self.spv.target; const section = &self.spv.sections.types_globals_constants; @@ -3068,7 +3070,7 @@ const NavGen = struct { try self.func.body.emit(self.spv.gpa, .OpFunctionEnd, {}); try self.spv.addFunction(spv_decl_index, self.func); - try self.spv.debugNameFmt(initializer_id, "initializer of {}", .{nav.fqn.fmt(ip)}); + try self.spv.debugNameFmt(initializer_id, "initializer of {f}", .{nav.fqn.fmt(ip)}); try self.spv.sections.types_globals_constants.emit(self.spv.gpa, .OpExtInst, .{ .id_result_type = ptr_ty_id, diff --git a/src/codegen/spirv/spec.zig b/src/codegen/spirv/spec.zig index 970f5bfe7f..82ec05ebba 100644 --- a/src/codegen/spirv/spec.zig +++ b/src/codegen/spirv/spec.zig @@ -1,6 +1,7 @@ //! This file is auto-generated by tools/gen_spirv_spec.zig. const std = @import("std"); +const assert = std.debug.assert; pub const Version = packed struct(Word) { padding: u8 = 0, @@ -18,15 +19,10 @@ pub const IdResult = enum(Word) { none, _, - pub fn format( - self: IdResult, - comptime _: []const u8, - _: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { + pub fn format(self: IdResult, writer: *std.io.Writer) std.io.Writer.Error!void { switch (self) { .none => try writer.writeAll("(none)"), - else => try writer.print("%{}", .{@intFromEnum(self)}), + else => try writer.print("%{d}", .{@intFromEnum(self)}), } } }; diff --git a/src/crash_report.zig b/src/crash_report.zig index 124564b440..e5c31d42b5 100644 --- a/src/crash_report.zig +++ b/src/crash_report.zig @@ -80,18 +80,19 @@ fn dumpStatusReport() !void { var fba = std.heap.FixedBufferAllocator.init(&crash_heap); const allocator = fba.allocator(); - const stderr = io.getStdErr().writer(); + var stderr_fw = std.fs.File.stderr().writer(&.{}); + const stderr = &stderr_fw.interface; const block: *Sema.Block = anal.block; const zcu = anal.sema.pt.zcu; const file, const src_base_node = Zcu.LazySrcLoc.resolveBaseNode(block.src_base_inst, zcu) orelse { const file = zcu.fileByIndex(block.src_base_inst.resolveFile(&zcu.intern_pool)); - try stderr.print("Analyzing lost instruction in file '{}'. This should not happen!\n\n", .{file.path.fmt(zcu.comp)}); + try stderr.print("Analyzing lost instruction in file '{f}'. This should not happen!\n\n", .{file.path.fmt(zcu.comp)}); return; }; try stderr.writeAll("Analyzing "); - try stderr.print("Analyzing '{}'\n", .{file.path.fmt(zcu.comp)}); + try stderr.print("Analyzing '{f}'\n", .{file.path.fmt(zcu.comp)}); print_zir.renderInstructionContext( allocator, @@ -107,7 +108,7 @@ fn dumpStatusReport() !void { }; try stderr.print( \\ For full context, use the command - \\ zig ast-check -t {} + \\ zig ast-check -t {f} \\ \\ , .{file.path.fmt(zcu.comp)}); @@ -116,7 +117,7 @@ fn dumpStatusReport() !void { while (parent) |curr| { fba.reset(); const cur_block_file = zcu.fileByIndex(curr.block.src_base_inst.resolveFile(&zcu.intern_pool)); - try stderr.print(" in {}\n", .{cur_block_file.path.fmt(zcu.comp)}); + try stderr.print(" in {f}\n", .{cur_block_file.path.fmt(zcu.comp)}); _, const cur_block_src_base_node = Zcu.LazySrcLoc.resolveBaseNode(curr.block.src_base_inst, zcu) orelse { try stderr.writeAll(" > [lost instruction; this should not happen]\n"); parent = curr.parent; @@ -139,7 +140,7 @@ fn dumpStatusReport() !void { parent = curr.parent; } - try stderr.writeAll("\n"); + try stderr.writeByte('\n'); } var crash_heap: [16 * 4096]u8 = undefined; @@ -268,11 +269,12 @@ const StackContext = union(enum) { debug.dumpCurrentStackTrace(ct.ret_addr); }, .exception => |context| { - debug.dumpStackTraceFromBase(context); + var stderr_fw = std.fs.File.stderr().writer(&.{}); + const stderr = &stderr_fw.interface; + debug.dumpStackTraceFromBase(context, stderr); }, .not_supported => { - const stderr = io.getStdErr().writer(); - stderr.writeAll("Stack trace not supported on this platform.\n") catch {}; + std.fs.File.stderr().writeAll("Stack trace not supported on this platform.\n") catch {}; }, } } @@ -379,7 +381,8 @@ const PanicSwitch = struct { state.recover_stage = .release_mutex; - const stderr = io.getStdErr().writer(); + var stderr_fw = std.fs.File.stderr().writer(&.{}); + const stderr = &stderr_fw.interface; if (builtin.single_threaded) { stderr.print("panic: ", .{}) catch goTo(releaseMutex, .{state}); } else { @@ -406,7 +409,8 @@ const PanicSwitch = struct { recover(state, trace, stack, msg); state.recover_stage = .release_mutex; - const stderr = io.getStdErr().writer(); + var stderr_fw = std.fs.File.stderr().writer(&.{}); + const stderr = &stderr_fw.interface; stderr.writeAll("\nOriginal Error:\n") catch {}; goTo(reportStack, .{state}); } @@ -477,7 +481,8 @@ const PanicSwitch = struct { recover(state, trace, stack, msg); state.recover_stage = .silent_abort; - const stderr = io.getStdErr().writer(); + var stderr_fw = std.fs.File.stderr().writer(&.{}); + const stderr = &stderr_fw.interface; stderr.writeAll("Aborting...\n") catch {}; goTo(abort, .{}); } @@ -505,7 +510,8 @@ const PanicSwitch = struct { // lower the verbosity, and restore it at the end if we don't panic. state.recover_verbosity = .message_only; - const stderr = io.getStdErr().writer(); + var stderr_fw = std.fs.File.stderr().writer(&.{}); + const stderr = &stderr_fw.interface; stderr.writeAll("\nPanicked during a panic: ") catch {}; stderr.writeAll(msg) catch {}; stderr.writeAll("\nInner panic stack:\n") catch {}; @@ -519,10 +525,11 @@ const PanicSwitch = struct { .message_only => { state.recover_verbosity = .silent; - const stderr = io.getStdErr().writer(); + var stderr_fw = std.fs.File.stderr().writer(&.{}); + const stderr = &stderr_fw.interface; stderr.writeAll("\nPanicked while dumping inner panic stack: ") catch {}; stderr.writeAll(msg) catch {}; - stderr.writeAll("\n") catch {}; + stderr.writeByte('\n') catch {}; // If we succeed, restore all the way to dumping the stack. state.recover_verbosity = .message_and_stack; diff --git a/src/deprecated.zig b/src/deprecated.zig new file mode 100644 index 0000000000..1f7d5c8c25 --- /dev/null +++ b/src/deprecated.zig @@ -0,0 +1,431 @@ +//! Deprecated. Stop using this API + +const std = @import("std"); +const math = std.math; +const mem = std.mem; +const Allocator = mem.Allocator; +const assert = std.debug.assert; +const testing = std.testing; + +pub fn LinearFifo(comptime T: type) type { + return struct { + allocator: Allocator, + buf: []T, + head: usize, + count: usize, + + const Self = @This(); + + pub fn init(allocator: Allocator) Self { + return .{ + .allocator = allocator, + .buf = &.{}, + .head = 0, + .count = 0, + }; + } + + pub fn deinit(self: *Self) void { + self.allocator.free(self.buf); + self.* = undefined; + } + + pub fn realign(self: *Self) void { + if (self.buf.len - self.head >= self.count) { + mem.copyForwards(T, self.buf[0..self.count], self.buf[self.head..][0..self.count]); + self.head = 0; + } else { + var tmp: [4096 / 2 / @sizeOf(T)]T = undefined; + + while (self.head != 0) { + const n = @min(self.head, tmp.len); + const m = self.buf.len - n; + @memcpy(tmp[0..n], self.buf[0..n]); + mem.copyForwards(T, self.buf[0..m], self.buf[n..][0..m]); + @memcpy(self.buf[m..][0..n], tmp[0..n]); + self.head -= n; + } + } + { // set unused area to undefined + const unused = mem.sliceAsBytes(self.buf[self.count..]); + @memset(unused, undefined); + } + } + + /// Reduce allocated capacity to `size`. + pub fn shrink(self: *Self, size: usize) void { + assert(size >= self.count); + self.realign(); + self.buf = self.allocator.realloc(self.buf, size) catch |e| switch (e) { + error.OutOfMemory => return, // no problem, capacity is still correct then. + }; + } + + /// Ensure that the buffer can fit at least `size` items + pub fn ensureTotalCapacity(self: *Self, size: usize) !void { + if (self.buf.len >= size) return; + self.realign(); + const new_size = math.ceilPowerOfTwo(usize, size) catch return error.OutOfMemory; + self.buf = try self.allocator.realloc(self.buf, new_size); + } + + /// Makes sure at least `size` items are unused + pub fn ensureUnusedCapacity(self: *Self, size: usize) error{OutOfMemory}!void { + if (self.writableLength() >= size) return; + + return try self.ensureTotalCapacity(math.add(usize, self.count, size) catch return error.OutOfMemory); + } + + /// Returns number of items currently in fifo + pub fn readableLength(self: Self) usize { + return self.count; + } + + /// Returns a writable slice from the 'read' end of the fifo + fn readableSliceMut(self: Self, offset: usize) []T { + if (offset > self.count) return &[_]T{}; + + var start = self.head + offset; + if (start >= self.buf.len) { + start -= self.buf.len; + return self.buf[start .. start + (self.count - offset)]; + } else { + const end = @min(self.head + self.count, self.buf.len); + return self.buf[start..end]; + } + } + + /// Returns a readable slice from `offset` + pub fn readableSlice(self: Self, offset: usize) []const T { + return self.readableSliceMut(offset); + } + + pub fn readableSliceOfLen(self: *Self, len: usize) []const T { + assert(len <= self.count); + const buf = self.readableSlice(0); + if (buf.len >= len) { + return buf[0..len]; + } else { + self.realign(); + return self.readableSlice(0)[0..len]; + } + } + + /// Discard first `count` items in the fifo + pub fn discard(self: *Self, count: usize) void { + assert(count <= self.count); + { // set old range to undefined. Note: may be wrapped around + const slice = self.readableSliceMut(0); + if (slice.len >= count) { + const unused = mem.sliceAsBytes(slice[0..count]); + @memset(unused, undefined); + } else { + const unused = mem.sliceAsBytes(slice[0..]); + @memset(unused, undefined); + const unused2 = mem.sliceAsBytes(self.readableSliceMut(slice.len)[0 .. count - slice.len]); + @memset(unused2, undefined); + } + } + var head = self.head + count; + // Note it is safe to do a wrapping subtract as + // bitwise & with all 1s is a noop + head &= self.buf.len -% 1; + self.head = head; + self.count -= count; + } + + /// Read the next item from the fifo + pub fn readItem(self: *Self) ?T { + if (self.count == 0) return null; + + const c = self.buf[self.head]; + self.discard(1); + return c; + } + + /// Read data from the fifo into `dst`, returns number of items copied. + pub fn read(self: *Self, dst: []T) usize { + var dst_left = dst; + + while (dst_left.len > 0) { + const slice = self.readableSlice(0); + if (slice.len == 0) break; + const n = @min(slice.len, dst_left.len); + @memcpy(dst_left[0..n], slice[0..n]); + self.discard(n); + dst_left = dst_left[n..]; + } + + return dst.len - dst_left.len; + } + + /// Same as `read` except it returns an error union + /// The purpose of this function existing is to match `std.io.Reader` API. + fn readFn(self: *Self, dest: []u8) error{}!usize { + return self.read(dest); + } + + /// Returns number of items available in fifo + pub fn writableLength(self: Self) usize { + return self.buf.len - self.count; + } + + /// Returns the first section of writable buffer. + /// Note that this may be of length 0 + pub fn writableSlice(self: Self, offset: usize) []T { + if (offset > self.buf.len) return &[_]T{}; + + const tail = self.head + offset + self.count; + if (tail < self.buf.len) { + return self.buf[tail..]; + } else { + return self.buf[tail - self.buf.len ..][0 .. self.writableLength() - offset]; + } + } + + /// Returns a writable buffer of at least `size` items, allocating memory as needed. + /// Use `fifo.update` once you've written data to it. + pub fn writableWithSize(self: *Self, size: usize) ![]T { + try self.ensureUnusedCapacity(size); + + // try to avoid realigning buffer + var slice = self.writableSlice(0); + if (slice.len < size) { + self.realign(); + slice = self.writableSlice(0); + } + return slice; + } + + /// Update the tail location of the buffer (usually follows use of writable/writableWithSize) + pub fn update(self: *Self, count: usize) void { + assert(self.count + count <= self.buf.len); + self.count += count; + } + + /// Appends the data in `src` to the fifo. + /// You must have ensured there is enough space. + pub fn writeAssumeCapacity(self: *Self, src: []const T) void { + assert(self.writableLength() >= src.len); + + var src_left = src; + while (src_left.len > 0) { + const writable_slice = self.writableSlice(0); + assert(writable_slice.len != 0); + const n = @min(writable_slice.len, src_left.len); + @memcpy(writable_slice[0..n], src_left[0..n]); + self.update(n); + src_left = src_left[n..]; + } + } + + /// Write a single item to the fifo + pub fn writeItem(self: *Self, item: T) !void { + try self.ensureUnusedCapacity(1); + return self.writeItemAssumeCapacity(item); + } + + pub fn writeItemAssumeCapacity(self: *Self, item: T) void { + var tail = self.head + self.count; + tail &= self.buf.len - 1; + self.buf[tail] = item; + self.update(1); + } + + /// Appends the data in `src` to the fifo. + /// Allocates more memory as necessary + pub fn write(self: *Self, src: []const T) !void { + try self.ensureUnusedCapacity(src.len); + + return self.writeAssumeCapacity(src); + } + + /// Same as `write` except it returns the number of bytes written, which is always the same + /// as `bytes.len`. The purpose of this function existing is to match `std.io.Writer` API. + fn appendWrite(self: *Self, bytes: []const u8) error{OutOfMemory}!usize { + try self.write(bytes); + return bytes.len; + } + + /// Make `count` items available before the current read location + fn rewind(self: *Self, count: usize) void { + assert(self.writableLength() >= count); + + var head = self.head + (self.buf.len - count); + head &= self.buf.len - 1; + self.head = head; + self.count += count; + } + + /// Place data back into the read stream + pub fn unget(self: *Self, src: []const T) !void { + try self.ensureUnusedCapacity(src.len); + + self.rewind(src.len); + + const slice = self.readableSliceMut(0); + if (src.len < slice.len) { + @memcpy(slice[0..src.len], src); + } else { + @memcpy(slice, src[0..slice.len]); + const slice2 = self.readableSliceMut(slice.len); + @memcpy(slice2[0 .. src.len - slice.len], src[slice.len..]); + } + } + + /// Returns the item at `offset`. + /// Asserts offset is within bounds. + pub fn peekItem(self: Self, offset: usize) T { + assert(offset < self.count); + + var index = self.head + offset; + index &= self.buf.len - 1; + return self.buf[index]; + } + + pub fn toOwnedSlice(self: *Self) Allocator.Error![]T { + if (self.head != 0) self.realign(); + assert(self.head == 0); + assert(self.count <= self.buf.len); + const allocator = self.allocator; + if (allocator.resize(self.buf, self.count)) { + const result = self.buf[0..self.count]; + self.* = Self.init(allocator); + return result; + } + const new_memory = try allocator.dupe(T, self.buf[0..self.count]); + allocator.free(self.buf); + self.* = Self.init(allocator); + return new_memory; + } + }; +} + +test "LinearFifo(u8, .Dynamic) discard(0) from empty buffer should not error on overflow" { + var fifo = LinearFifo(u8, .Dynamic).init(testing.allocator); + defer fifo.deinit(); + + // If overflow is not explicitly allowed this will crash in debug / safe mode + fifo.discard(0); +} + +test "LinearFifo(u8, .Dynamic)" { + var fifo = LinearFifo(u8, .Dynamic).init(testing.allocator); + defer fifo.deinit(); + + try fifo.write("HELLO"); + try testing.expectEqual(@as(usize, 5), fifo.readableLength()); + try testing.expectEqualSlices(u8, "HELLO", fifo.readableSlice(0)); + + { + var i: usize = 0; + while (i < 5) : (i += 1) { + try fifo.write(&[_]u8{fifo.peekItem(i)}); + } + try testing.expectEqual(@as(usize, 10), fifo.readableLength()); + try testing.expectEqualSlices(u8, "HELLOHELLO", fifo.readableSlice(0)); + } + + { + try testing.expectEqual(@as(u8, 'H'), fifo.readItem().?); + try testing.expectEqual(@as(u8, 'E'), fifo.readItem().?); + try testing.expectEqual(@as(u8, 'L'), fifo.readItem().?); + try testing.expectEqual(@as(u8, 'L'), fifo.readItem().?); + try testing.expectEqual(@as(u8, 'O'), fifo.readItem().?); + } + try testing.expectEqual(@as(usize, 5), fifo.readableLength()); + + { // Writes that wrap around + try testing.expectEqual(@as(usize, 11), fifo.writableLength()); + try testing.expectEqual(@as(usize, 6), fifo.writableSlice(0).len); + fifo.writeAssumeCapacity("6 true, else => false, }, @@ -152,6 +154,7 @@ pub const Env = enum { else => Env.ast_gen.supports(feature), }, .cbe => switch (feature) { + .legalize, .c_backend, .c_linker, => true, @@ -248,6 +251,8 @@ pub const Feature = enum { sema, legalize, + c_compiler, + llvm_backend, c_backend, wasm_backend, diff --git a/src/fmt.zig b/src/fmt.zig index 6b1d73951e..bbb02e0c34 100644 --- a/src/fmt.zig +++ b/src/fmt.zig @@ -1,3 +1,11 @@ +const std = @import("std"); +const mem = std.mem; +const fs = std.fs; +const process = std.process; +const Allocator = std.mem.Allocator; +const Color = std.zig.Color; +const fatal = std.process.fatal; + const usage_fmt = \\Usage: zig fmt [file]... \\ @@ -52,7 +60,7 @@ pub fn run( const arg = args[i]; if (mem.startsWith(u8, arg, "-")) { if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) { - const stdout = std.io.getStdOut().writer(); + const stdout = std.fs.File.stdout().deprecatedWriter(); try stdout.writeAll(usage_fmt); return process.cleanExit(); } else if (mem.eql(u8, arg, "--color")) { @@ -93,7 +101,7 @@ pub fn run( fatal("cannot use --stdin with positional arguments", .{}); } - const stdin = std.io.getStdIn(); + const stdin: fs.File = .stdin(); const source_code = std.zig.readSourceFileToEndAlloc(gpa, stdin, null) catch |err| { fatal("unable to read stdin: {}", .{err}); }; @@ -146,7 +154,7 @@ pub fn run( process.exit(code); } - return std.io.getStdOut().writeAll(formatted); + return std.fs.File.stdout().writeAll(formatted); } if (input_files.items.len == 0) { @@ -363,7 +371,7 @@ fn fmtPathFile( return; if (check_mode) { - const stdout = std.io.getStdOut().writer(); + const stdout = std.fs.File.stdout().deprecatedWriter(); try stdout.print("{s}\n", .{file_path}); fmt.any_error = true; } else { @@ -372,15 +380,7 @@ fn fmtPathFile( try af.file.writeAll(fmt.out_buffer.items); try af.finish(); - const stdout = std.io.getStdOut().writer(); + const stdout = std.fs.File.stdout().deprecatedWriter(); try stdout.print("{s}\n", .{file_path}); } } - -const std = @import("std"); -const mem = std.mem; -const fs = std.fs; -const process = std.process; -const Allocator = std.mem.Allocator; -const Color = std.zig.Color; -const fatal = std.process.fatal; diff --git a/src/libs/freebsd.zig b/src/libs/freebsd.zig index 55d097b71b..6baa899087 100644 --- a/src/libs/freebsd.zig +++ b/src/libs/freebsd.zig @@ -497,13 +497,13 @@ pub fn buildSharedObjects(comp: *Compilation, prog_node: std.Progress.Node) anye .lt => continue, .gt => { // TODO Expose via compile error mechanism instead of log. - log.warn("invalid target FreeBSD libc version: {}", .{target_version}); + log.warn("invalid target FreeBSD libc version: {f}", .{target_version}); return error.InvalidTargetLibCVersion; }, } } else blk: { const latest_index = metadata.all_versions.len - 1; - log.warn("zig cannot build new FreeBSD libc version {}; providing instead {}", .{ + log.warn("zig cannot build new FreeBSD libc version {f}; providing instead {f}", .{ target_version, metadata.all_versions[latest_index], }); break :blk latest_index; diff --git a/src/libs/glibc.zig b/src/libs/glibc.zig index da6ee74962..43baaf38d4 100644 --- a/src/libs/glibc.zig +++ b/src/libs/glibc.zig @@ -736,13 +736,13 @@ pub fn buildSharedObjects(comp: *Compilation, prog_node: std.Progress.Node) anye .lt => continue, .gt => { // TODO Expose via compile error mechanism instead of log. - log.warn("invalid target glibc version: {}", .{target_version}); + log.warn("invalid target glibc version: {f}", .{target_version}); return error.InvalidTargetGLibCVersion; }, } } else blk: { const latest_index = metadata.all_versions.len - 1; - log.warn("zig cannot build new glibc version {}; providing instead {}", .{ + log.warn("zig cannot build new glibc version {f}; providing instead {f}", .{ target_version, metadata.all_versions[latest_index], }); break :blk latest_index; diff --git a/src/libs/libtsan.zig b/src/libs/libtsan.zig index de5a770e30..d17baf8fa1 100644 --- a/src/libs/libtsan.zig +++ b/src/libs/libtsan.zig @@ -268,7 +268,7 @@ pub fn buildTsan(comp: *Compilation, prog_node: std.Progress.Node) BuildError!vo const skip_linker_dependencies = !target.os.tag.isDarwin(); const linker_allow_shlib_undefined = target.os.tag.isDarwin(); const install_name = if (target.os.tag.isDarwin()) - try std.fmt.allocPrintZ(arena, "@rpath/{s}", .{basename}) + try std.fmt.allocPrintSentinel(arena, "@rpath/{s}", .{basename}, 0) else null; // Workaround for https://github.com/llvm/llvm-project/issues/97627 diff --git a/src/libs/mingw.zig b/src/libs/mingw.zig index 00ab86e31f..1c2927eba0 100644 --- a/src/libs/mingw.zig +++ b/src/libs/mingw.zig @@ -306,7 +306,7 @@ pub fn buildImportLib(comp: *Compilation, lib_name: []const u8) !void { if (comp.verbose_cc) print: { std.debug.lockStdErr(); defer std.debug.unlockStdErr(); - const stderr = std.io.getStdErr().writer(); + const stderr = std.fs.File.stderr().deprecatedWriter(); nosuspend stderr.print("def file: {s}\n", .{def_file_path}) catch break :print; nosuspend stderr.print("include dir: {s}\n", .{include_dir}) catch break :print; nosuspend stderr.print("output path: {s}\n", .{def_final_path}) catch break :print; @@ -326,7 +326,7 @@ pub fn buildImportLib(comp: *Compilation, lib_name: []const u8) !void { for (aro_comp.diagnostics.list.items) |diagnostic| { if (diagnostic.kind == .@"fatal error" or diagnostic.kind == .@"error") { - aro.Diagnostics.render(&aro_comp, std.io.tty.detectConfig(std.io.getStdErr())); + aro.Diagnostics.render(&aro_comp, std.io.tty.detectConfig(std.fs.File.stderr())); return error.AroPreprocessorFailed; } } @@ -335,7 +335,7 @@ pub fn buildImportLib(comp: *Compilation, lib_name: []const u8) !void { // new scope to ensure definition file is written before passing the path to WriteImportLibrary const def_final_file = try o_dir.createFile(final_def_basename, .{ .truncate = true }); defer def_final_file.close(); - try pp.prettyPrintTokens(def_final_file.writer(), .result_only); + try pp.prettyPrintTokens(def_final_file.deprecatedWriter(), .result_only); } const lib_final_path = try std.fs.path.join(gpa, &.{ "o", &digest, final_lib_basename }); diff --git a/src/libs/netbsd.zig b/src/libs/netbsd.zig index 38570c43a6..094165b9c5 100644 --- a/src/libs/netbsd.zig +++ b/src/libs/netbsd.zig @@ -442,13 +442,13 @@ pub fn buildSharedObjects(comp: *Compilation, prog_node: std.Progress.Node) anye .lt => continue, .gt => { // TODO Expose via compile error mechanism instead of log. - log.warn("invalid target NetBSD libc version: {}", .{target_version}); + log.warn("invalid target NetBSD libc version: {f}", .{target_version}); return error.InvalidTargetLibCVersion; }, } } else blk: { const latest_index = metadata.all_versions.len - 1; - log.warn("zig cannot build new NetBSD libc version {}; providing instead {}", .{ + log.warn("zig cannot build new NetBSD libc version {f}; providing instead {f}", .{ target_version, metadata.all_versions[latest_index], }); break :blk latest_index; diff --git a/src/link.zig b/src/link.zig index 2a6e3b8031..0fbd4b28cf 100644 --- a/src/link.zig +++ b/src/link.zig @@ -323,7 +323,7 @@ pub const Diags = struct { const main_msg = try m; errdefer gpa.free(main_msg); try diags.msgs.ensureUnusedCapacity(gpa, 1); - const note = try std.fmt.allocPrint(gpa, "while parsing {}", .{path}); + const note = try std.fmt.allocPrint(gpa, "while parsing {f}", .{path}); errdefer gpa.free(note); const notes = try gpa.create([1]Msg); errdefer gpa.destroy(notes); @@ -838,8 +838,10 @@ pub const File = struct { const cached_pp_file_path = the_key.status.success.object_path; cached_pp_file_path.root_dir.handle.copyFile(cached_pp_file_path.sub_path, emit.root_dir.handle, emit.sub_path, .{}) catch |err| { const diags = &base.comp.link_diags; - return diags.fail("failed to copy '{'}' to '{'}': {s}", .{ - @as(Path, cached_pp_file_path), @as(Path, emit), @errorName(err), + return diags.fail("failed to copy '{f}' to '{f}': {s}", .{ + std.fmt.alt(@as(Path, cached_pp_file_path), .formatEscapeChar), + std.fmt.alt(@as(Path, emit), .formatEscapeChar), + @errorName(err), }); }; return; @@ -1351,7 +1353,7 @@ pub fn doPrelinkTask(comp: *Compilation, task: PrelinkTask) void { .search_strategy = .paths_first, }) catch |archive_err| switch (archive_err) { error.LinkFailure => return, // error reported via diags - else => |e| diags.addParseError(dso_path, "failed to parse archive {}: {s}", .{ archive_path, @errorName(e) }), + else => |e| diags.addParseError(dso_path, "failed to parse archive {f}: {s}", .{ archive_path, @errorName(e) }), }; }, error.LinkFailure => return, // error reported via diags @@ -1874,7 +1876,7 @@ pub fn resolveInputs( )) |lib_result| { switch (lib_result) { .ok => {}, - .no_match => fatal("{}: file not found", .{pq.path}), + .no_match => fatal("{f}: file not found", .{pq.path}), } } continue; @@ -1928,10 +1930,10 @@ fn resolveLibInput( .root_dir = lib_directory, .sub_path = try std.fmt.allocPrint(arena, "lib{s}.tbd", .{lib_name}), }; - try checked_paths.writer(gpa).print("\n {}", .{test_path}); + try checked_paths.writer(gpa).print("\n {f}", .{test_path}); var file = test_path.root_dir.handle.openFile(test_path.sub_path, .{}) catch |err| switch (err) { error.FileNotFound => break :tbd, - else => |e| fatal("unable to search for tbd library '{}': {s}", .{ test_path, @errorName(e) }), + else => |e| fatal("unable to search for tbd library '{f}': {s}", .{ test_path, @errorName(e) }), }; errdefer file.close(); return finishResolveLibInput(resolved_inputs, test_path, file, link_mode, name_query.query); @@ -1947,7 +1949,7 @@ fn resolveLibInput( }, }), }; - try checked_paths.writer(gpa).print("\n {}", .{test_path}); + try checked_paths.writer(gpa).print("\n {f}", .{test_path}); switch (try resolvePathInputLib(gpa, arena, unresolved_inputs, resolved_inputs, ld_script_bytes, target, .{ .path = test_path, .query = name_query.query, @@ -1964,10 +1966,10 @@ fn resolveLibInput( .root_dir = lib_directory, .sub_path = try std.fmt.allocPrint(arena, "lib{s}.so", .{lib_name}), }; - try checked_paths.writer(gpa).print("\n {}", .{test_path}); + try checked_paths.writer(gpa).print("\n {f}", .{test_path}); var file = test_path.root_dir.handle.openFile(test_path.sub_path, .{}) catch |err| switch (err) { error.FileNotFound => break :so, - else => |e| fatal("unable to search for so library '{}': {s}", .{ + else => |e| fatal("unable to search for so library '{f}': {s}", .{ test_path, @errorName(e), }), }; @@ -1982,10 +1984,10 @@ fn resolveLibInput( .root_dir = lib_directory, .sub_path = try std.fmt.allocPrint(arena, "lib{s}.a", .{lib_name}), }; - try checked_paths.writer(gpa).print("\n {}", .{test_path}); + try checked_paths.writer(gpa).print("\n {f}", .{test_path}); var file = test_path.root_dir.handle.openFile(test_path.sub_path, .{}) catch |err| switch (err) { error.FileNotFound => break :mingw, - else => |e| fatal("unable to search for static library '{}': {s}", .{ test_path, @errorName(e) }), + else => |e| fatal("unable to search for static library '{f}': {s}", .{ test_path, @errorName(e) }), }; errdefer file.close(); return finishResolveLibInput(resolved_inputs, test_path, file, link_mode, name_query.query); @@ -2037,7 +2039,7 @@ fn resolvePathInput( .shared_library => return try resolvePathInputLib(gpa, arena, unresolved_inputs, resolved_inputs, ld_script_bytes, target, pq, .dynamic, color), .object => { var file = pq.path.root_dir.handle.openFile(pq.path.sub_path, .{}) catch |err| - fatal("failed to open object {}: {s}", .{ pq.path, @errorName(err) }); + fatal("failed to open object {f}: {s}", .{ pq.path, @errorName(err) }); errdefer file.close(); try resolved_inputs.append(gpa, .{ .object = .{ .path = pq.path, @@ -2049,7 +2051,7 @@ fn resolvePathInput( }, .res => { var file = pq.path.root_dir.handle.openFile(pq.path.sub_path, .{}) catch |err| - fatal("failed to open windows resource {}: {s}", .{ pq.path, @errorName(err) }); + fatal("failed to open windows resource {f}: {s}", .{ pq.path, @errorName(err) }); errdefer file.close(); try resolved_inputs.append(gpa, .{ .res = .{ .path = pq.path, @@ -2057,7 +2059,7 @@ fn resolvePathInput( } }); return null; }, - else => fatal("{}: unrecognized file extension", .{pq.path}), + else => fatal("{f}: unrecognized file extension", .{pq.path}), } } @@ -2086,14 +2088,14 @@ fn resolvePathInputLib( }) { var file = test_path.root_dir.handle.openFile(test_path.sub_path, .{}) catch |err| switch (err) { error.FileNotFound => return .no_match, - else => |e| fatal("unable to search for {s} library '{'}': {s}", .{ - @tagName(link_mode), test_path, @errorName(e), + else => |e| fatal("unable to search for {s} library '{f}': {s}", .{ + @tagName(link_mode), std.fmt.alt(test_path, .formatEscapeChar), @errorName(e), }), }; errdefer file.close(); try ld_script_bytes.resize(gpa, @max(std.elf.MAGIC.len, std.elf.ARMAG.len)); - const n = file.preadAll(ld_script_bytes.items, 0) catch |err| fatal("failed to read '{'}': {s}", .{ - test_path, @errorName(err), + const n = file.preadAll(ld_script_bytes.items, 0) catch |err| fatal("failed to read '{f}': {s}", .{ + std.fmt.alt(test_path, .formatEscapeChar), @errorName(err), }); const buf = ld_script_bytes.items[0..n]; if (mem.startsWith(u8, buf, std.elf.MAGIC) or mem.startsWith(u8, buf, std.elf.ARMAG)) { @@ -2101,14 +2103,14 @@ fn resolvePathInputLib( return finishResolveLibInput(resolved_inputs, test_path, file, link_mode, pq.query); } const stat = file.stat() catch |err| - fatal("failed to stat {}: {s}", .{ test_path, @errorName(err) }); + fatal("failed to stat {f}: {s}", .{ test_path, @errorName(err) }); const size = std.math.cast(u32, stat.size) orelse - fatal("{}: linker script too big", .{test_path}); + fatal("{f}: linker script too big", .{test_path}); try ld_script_bytes.resize(gpa, size); const buf2 = ld_script_bytes.items[n..]; const n2 = file.preadAll(buf2, n) catch |err| - fatal("failed to read {}: {s}", .{ test_path, @errorName(err) }); - if (n2 != buf2.len) fatal("failed to read {}: unexpected end of file", .{test_path}); + fatal("failed to read {f}: {s}", .{ test_path, @errorName(err) }); + if (n2 != buf2.len) fatal("failed to read {f}: unexpected end of file", .{test_path}); var diags = Diags.init(gpa); defer diags.deinit(); const ld_script_result = LdScript.parse(gpa, &diags, test_path, ld_script_bytes.items); @@ -2128,7 +2130,7 @@ fn resolvePathInputLib( } var ld_script = ld_script_result catch |err| - fatal("{}: failed to parse linker script: {s}", .{ test_path, @errorName(err) }); + fatal("{f}: failed to parse linker script: {s}", .{ test_path, @errorName(err) }); defer ld_script.deinit(gpa); try unresolved_inputs.ensureUnusedCapacity(gpa, ld_script.args.len); @@ -2159,7 +2161,7 @@ fn resolvePathInputLib( var file = test_path.root_dir.handle.openFile(test_path.sub_path, .{}) catch |err| switch (err) { error.FileNotFound => return .no_match, - else => |e| fatal("unable to search for {s} library {}: {s}", .{ + else => |e| fatal("unable to search for {s} library {f}: {s}", .{ @tagName(link_mode), test_path, @errorName(e), }), }; @@ -2192,19 +2194,19 @@ pub fn openDso(path: Path, needed: bool, weak: bool, reexport: bool) !Input.Dso pub fn openObjectInput(diags: *Diags, path: Path) error{LinkFailure}!Input { return .{ .object = openObject(path, false, false) catch |err| { - return diags.failParse(path, "failed to open {}: {s}", .{ path, @errorName(err) }); + return diags.failParse(path, "failed to open {f}: {s}", .{ path, @errorName(err) }); } }; } pub fn openArchiveInput(diags: *Diags, path: Path, must_link: bool, hidden: bool) error{LinkFailure}!Input { return .{ .archive = openObject(path, must_link, hidden) catch |err| { - return diags.failParse(path, "failed to open {}: {s}", .{ path, @errorName(err) }); + return diags.failParse(path, "failed to open {f}: {s}", .{ path, @errorName(err) }); } }; } pub fn openDsoInput(diags: *Diags, path: Path, needed: bool, weak: bool, reexport: bool) error{LinkFailure}!Input { return .{ .dso = openDso(path, needed, weak, reexport) catch |err| { - return diags.failParse(path, "failed to open {}: {s}", .{ path, @errorName(err) }); + return diags.failParse(path, "failed to open {f}: {s}", .{ path, @errorName(err) }); } }; } diff --git a/src/link/C.zig b/src/link/C.zig index 1ea130f6b1..941d33e5f7 100644 --- a/src/link/C.zig +++ b/src/link/C.zig @@ -25,34 +25,34 @@ base: link.File, /// This linker backend does not try to incrementally link output C source code. /// Instead, it tracks all declarations in this table, and iterates over it /// in the flush function, stitching pre-rendered pieces of C code together. -navs: std.AutoArrayHashMapUnmanaged(InternPool.Nav.Index, AvBlock) = .empty, +navs: std.AutoArrayHashMapUnmanaged(InternPool.Nav.Index, AvBlock), /// All the string bytes of rendered C code, all squished into one array. /// While in progress, a separate buffer is used, and then when finished, the /// buffer is copied into this one. -string_bytes: std.ArrayListUnmanaged(u8) = .empty, +string_bytes: std.ArrayListUnmanaged(u8), /// Tracks all the anonymous decls that are used by all the decls so they can /// be rendered during flush(). -uavs: std.AutoArrayHashMapUnmanaged(InternPool.Index, AvBlock) = .empty, +uavs: std.AutoArrayHashMapUnmanaged(InternPool.Index, AvBlock), /// Sparse set of uavs that are overaligned. Underaligned anon decls are /// lowered the same as ABI-aligned anon decls. The keys here are a subset of /// the keys of `uavs`. -aligned_uavs: std.AutoArrayHashMapUnmanaged(InternPool.Index, Alignment) = .empty, +aligned_uavs: std.AutoArrayHashMapUnmanaged(InternPool.Index, Alignment), -exported_navs: std.AutoArrayHashMapUnmanaged(InternPool.Nav.Index, ExportedBlock) = .empty, -exported_uavs: std.AutoArrayHashMapUnmanaged(InternPool.Index, ExportedBlock) = .empty, +exported_navs: std.AutoArrayHashMapUnmanaged(InternPool.Nav.Index, ExportedBlock), +exported_uavs: std.AutoArrayHashMapUnmanaged(InternPool.Index, ExportedBlock), /// Optimization, `updateDecl` reuses this buffer rather than creating a new /// one with every call. -fwd_decl_buf: std.ArrayListUnmanaged(u8) = .empty, +fwd_decl_buf: []u8, /// Optimization, `updateDecl` reuses this buffer rather than creating a new /// one with every call. -code_buf: std.ArrayListUnmanaged(u8) = .empty, +code_header_buf: []u8, +/// Optimization, `updateDecl` reuses this buffer rather than creating a new +/// one with every call. +code_buf: []u8, /// Optimization, `flush` reuses this buffer rather than creating a new /// one with every call. -lazy_fwd_decl_buf: std.ArrayListUnmanaged(u8) = .empty, -/// Optimization, `flush` reuses this buffer rather than creating a new -/// one with every call. -lazy_code_buf: std.ArrayListUnmanaged(u8) = .empty, +scratch_buf: []u32, /// A reference into `string_bytes`. const String = extern struct { @@ -63,15 +63,23 @@ const String = extern struct { .start = 0, .len = 0, }; + + fn concat(lhs: String, rhs: String) String { + assert(lhs.start + lhs.len == rhs.start); + return .{ + .start = lhs.start, + .len = lhs.len + rhs.len, + }; + } }; /// Per-declaration data. pub const AvBlock = struct { - code: String = String.empty, - fwd_decl: String = String.empty, + fwd_decl: String = .empty, + code: String = .empty, /// Each `Decl` stores a set of used `CType`s. In `flush()`, we iterate /// over each `Decl` and generate the definition for each used `CType` once. - ctype_pool: codegen.CType.Pool = codegen.CType.Pool.empty, + ctype_pool: codegen.CType.Pool = .empty, /// May contain string references to ctype_pool lazy_fns: codegen.LazyFnMap = .{}, @@ -84,7 +92,7 @@ pub const AvBlock = struct { /// Per-exported-symbol data. pub const ExportedBlock = struct { - fwd_decl: String = String.empty, + fwd_decl: String = .empty, }; pub fn getString(this: C, s: String) []const u8 { @@ -147,6 +155,16 @@ pub fn createEmpty( .file = file, .build_id = options.build_id, }, + .navs = .empty, + .string_bytes = .empty, + .uavs = .empty, + .aligned_uavs = .empty, + .exported_navs = .empty, + .exported_uavs = .empty, + .fwd_decl_buf = &.{}, + .code_header_buf = &.{}, + .code_buf = &.{}, + .scratch_buf = &.{}, }; return c_file; @@ -170,10 +188,10 @@ pub fn deinit(self: *C) void { self.exported_uavs.deinit(gpa); self.string_bytes.deinit(gpa); - self.fwd_decl_buf.deinit(gpa); - self.code_buf.deinit(gpa); - self.lazy_fwd_decl_buf.deinit(gpa); - self.lazy_code_buf.deinit(gpa); + gpa.free(self.fwd_decl_buf); + gpa.free(self.code_header_buf); + gpa.free(self.code_buf); + gpa.free(self.scratch_buf); } pub fn updateFunc( @@ -194,20 +212,17 @@ pub fn updateFunc( .ctype_pool = mir.c.ctype_pool.move(), .lazy_fns = mir.c.lazy_fns.move(), }; - gop.value_ptr.code = try self.addString(mir.c.code); gop.value_ptr.fwd_decl = try self.addString(mir.c.fwd_decl); + const code_header = try self.addString(mir.c.code_header); + const code = try self.addString(mir.c.code); + gop.value_ptr.code = code_header.concat(code); try self.addUavsFromCodegen(&mir.c.uavs); } -fn updateUav(self: *C, pt: Zcu.PerThread, i: usize) !void { +fn updateUav(self: *C, pt: Zcu.PerThread, i: usize) link.File.FlushError!void { const gpa = self.base.comp.gpa; const uav = self.uavs.keys()[i]; - const fwd_decl = &self.fwd_decl_buf; - const code = &self.code_buf; - fwd_decl.clearRetainingCapacity(); - code.clearRetainingCapacity(); - var object: codegen.Object = .{ .dg = .{ .gpa = gpa, @@ -217,21 +232,24 @@ fn updateUav(self: *C, pt: Zcu.PerThread, i: usize) !void { .pass = .{ .uav = uav }, .is_naked_fn = false, .expected_block = null, - .fwd_decl = fwd_decl.toManaged(gpa), - .ctype_pool = codegen.CType.Pool.empty, - .scratch = .{}, + .fwd_decl = undefined, + .ctype_pool = .empty, + .scratch = .initBuffer(self.scratch_buf), .uavs = .empty, }, - .code = code.toManaged(gpa), - .indent_writer = undefined, // set later so we can get a pointer to object.code + .code_header = undefined, + .code = undefined, + .indent_counter = 0, }; - object.indent_writer = .{ .underlying_writer = object.code.writer() }; + object.dg.fwd_decl = .initOwnedSlice(gpa, self.fwd_decl_buf); + object.code = .initOwnedSlice(gpa, self.code_buf); defer { object.dg.uavs.deinit(gpa); - fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged(); object.dg.ctype_pool.deinit(object.dg.gpa); - object.dg.scratch.deinit(gpa); - code.* = object.code.moveToUnmanaged(); + + self.fwd_decl_buf = object.dg.fwd_decl.toArrayList().allocatedSlice(); + self.code_buf = object.code.toArrayList().allocatedSlice(); + self.scratch_buf = object.dg.scratch.allocatedSlice(); } try object.dg.ctype_pool.init(gpa); @@ -243,15 +261,15 @@ fn updateUav(self: *C, pt: Zcu.PerThread, i: usize) !void { //try zcu.failed_decls.put(gpa, decl_index, object.dg.error_msg.?); //return; }, - else => |e| return e, + error.WriteFailed, error.OutOfMemory => return error.OutOfMemory, }; try self.addUavsFromCodegen(&object.dg.uavs); object.dg.ctype_pool.freeUnusedCapacity(gpa); self.uavs.values()[i] = .{ - .code = try self.addString(object.code.items), - .fwd_decl = try self.addString(object.dg.fwd_decl.items), + .fwd_decl = try self.addString(object.dg.fwd_decl.getWritten()), + .code = try self.addString(object.code.getWritten()), .ctype_pool = object.dg.ctype_pool.move(), }; } @@ -277,12 +295,8 @@ pub fn updateNav(self: *C, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index) l errdefer _ = self.navs.pop(); if (!gop.found_existing) gop.value_ptr.* = .{}; const ctype_pool = &gop.value_ptr.ctype_pool; - const fwd_decl = &self.fwd_decl_buf; - const code = &self.code_buf; try ctype_pool.init(gpa); ctype_pool.clearRetainingCapacity(); - fwd_decl.clearRetainingCapacity(); - code.clearRetainingCapacity(); var object: codegen.Object = .{ .dg = .{ @@ -293,22 +307,25 @@ pub fn updateNav(self: *C, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index) l .pass = .{ .nav = nav_index }, .is_naked_fn = false, .expected_block = null, - .fwd_decl = fwd_decl.toManaged(gpa), + .fwd_decl = undefined, .ctype_pool = ctype_pool.*, - .scratch = .{}, + .scratch = .initBuffer(self.scratch_buf), .uavs = .empty, }, - .code = code.toManaged(gpa), - .indent_writer = undefined, // set later so we can get a pointer to object.code + .code_header = undefined, + .code = undefined, + .indent_counter = 0, }; - object.indent_writer = .{ .underlying_writer = object.code.writer() }; + object.dg.fwd_decl = .initOwnedSlice(gpa, self.fwd_decl_buf); + object.code = .initOwnedSlice(gpa, self.code_buf); defer { object.dg.uavs.deinit(gpa); - fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged(); ctype_pool.* = object.dg.ctype_pool.move(); ctype_pool.freeUnusedCapacity(gpa); - object.dg.scratch.deinit(gpa); - code.* = object.code.moveToUnmanaged(); + + self.fwd_decl_buf = object.dg.fwd_decl.toArrayList().allocatedSlice(); + self.code_buf = object.code.toArrayList().allocatedSlice(); + self.scratch_buf = object.dg.scratch.allocatedSlice(); } codegen.genDecl(&object) catch |err| switch (err) { @@ -316,10 +333,10 @@ pub fn updateNav(self: *C, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index) l error.CodegenFail => return, error.OutOfMemory => |e| return e, }, - else => |e| return e, + error.WriteFailed, error.OutOfMemory => return error.OutOfMemory, }; - gop.value_ptr.code = try self.addString(object.code.items); - gop.value_ptr.fwd_decl = try self.addString(object.dg.fwd_decl.items); + gop.value_ptr.fwd_decl = try self.addString(object.dg.fwd_decl.getWritten()); + gop.value_ptr.code = try self.addString(object.code.getWritten()); try self.addUavsFromCodegen(&object.dg.uavs); } @@ -331,19 +348,14 @@ pub fn updateLineNumber(self: *C, pt: Zcu.PerThread, ti_id: InternPool.TrackedIn _ = ti_id; } -fn abiDefines(self: *C, target: *const std.Target) !std.ArrayList(u8) { - const gpa = self.base.comp.gpa; - var defines = std.ArrayList(u8).init(gpa); - errdefer defines.deinit(); - const writer = defines.writer(); +fn abiDefines(w: *std.io.Writer, target: *const std.Target) !void { switch (target.abi) { - .msvc, .itanium => try writer.writeAll("#define ZIG_TARGET_ABI_MSVC\n"), + .msvc, .itanium => try w.writeAll("#define ZIG_TARGET_ABI_MSVC\n"), else => {}, } - try writer.print("#define ZIG_TARGET_MAX_INT_ALIGNMENT {d}\n", .{ + try w.print("#define ZIG_TARGET_MAX_INT_ALIGNMENT {d}\n", .{ target.cMaxIntAlignment(), }); - return defines; } pub fn flush(self: *C, arena: Allocator, tid: Zcu.PerThread.Id, prog_node: std.Progress.Node) link.File.FlushError!void { @@ -374,37 +386,47 @@ pub fn flush(self: *C, arena: Allocator, tid: Zcu.PerThread.Id, prog_node: std.P // emit-h is in `flushEmitH` below. var f: Flush = .{ - .ctype_pool = codegen.CType.Pool.empty, - .lazy_ctype_pool = codegen.CType.Pool.empty, + .ctype_pool = .empty, + .ctype_global_from_decl_map = .empty, + .ctypes = .empty, + + .lazy_ctype_pool = .empty, + .lazy_fns = .empty, + .lazy_fwd_decl = .empty, + .lazy_code = .empty, + + .all_buffers = .empty, + .file_size = 0, }; defer f.deinit(gpa); - const abi_defines = try self.abiDefines(zcu.getTarget()); - defer abi_defines.deinit(); + var abi_defines_aw: std.io.Writer.Allocating = .init(gpa); + defer abi_defines_aw.deinit(); + abiDefines(&abi_defines_aw.writer, zcu.getTarget()) catch |err| switch (err) { + error.WriteFailed => return error.OutOfMemory, + }; // Covers defines, zig.h, ctypes, asm, lazy fwd. try f.all_buffers.ensureUnusedCapacity(gpa, 5); - f.appendBufAssumeCapacity(abi_defines.items); + f.appendBufAssumeCapacity(abi_defines_aw.getWritten()); f.appendBufAssumeCapacity(zig_h); const ctypes_index = f.all_buffers.items.len; f.all_buffers.items.len += 1; - { - var asm_buf = f.asm_buf.toManaged(gpa); - defer f.asm_buf = asm_buf.moveToUnmanaged(); - try codegen.genGlobalAsm(zcu, asm_buf.writer()); - f.appendBufAssumeCapacity(asm_buf.items); - } + var asm_aw: std.io.Writer.Allocating = .init(gpa); + defer asm_aw.deinit(); + codegen.genGlobalAsm(zcu, &asm_aw.writer) catch |err| switch (err) { + error.WriteFailed => return error.OutOfMemory, + }; + f.appendBufAssumeCapacity(asm_aw.getWritten()); const lazy_index = f.all_buffers.items.len; f.all_buffers.items.len += 1; - self.lazy_fwd_decl_buf.clearRetainingCapacity(); - self.lazy_code_buf.clearRetainingCapacity(); try f.lazy_ctype_pool.init(gpa); - try self.flushErrDecls(pt, &f.lazy_ctype_pool); + try self.flushErrDecls(pt, &f); // Unlike other backends, the .c code we are emitting has order-dependent decls. // `CType`s, forward decls, and non-functions first. @@ -462,22 +484,15 @@ pub fn flush(self: *C, arena: Allocator, tid: Zcu.PerThread.Id, prog_node: std.P } } - f.all_buffers.items[ctypes_index] = .{ - .base = if (f.ctypes_buf.items.len > 0) f.ctypes_buf.items.ptr else "", - .len = f.ctypes_buf.items.len, - }; - f.file_size += f.ctypes_buf.items.len; + f.all_buffers.items[ctypes_index] = f.ctypes.items; + f.file_size += f.ctypes.items.len; - const lazy_fwd_decl_len = self.lazy_fwd_decl_buf.items.len; - f.all_buffers.items[lazy_index] = .{ - .base = if (lazy_fwd_decl_len > 0) self.lazy_fwd_decl_buf.items.ptr else "", - .len = lazy_fwd_decl_len, - }; - f.file_size += lazy_fwd_decl_len; + f.all_buffers.items[lazy_index] = f.lazy_fwd_decl.items; + f.file_size += f.lazy_fwd_decl.items.len; // Now the code. try f.all_buffers.ensureUnusedCapacity(gpa, 1 + (self.uavs.count() + self.navs.count()) * 2); - f.appendBufAssumeCapacity(self.lazy_code_buf.items); + f.appendBufAssumeCapacity(f.lazy_code.items); for (self.uavs.keys(), self.uavs.values()) |uav, av_block| f.appendCodeAssumeCapacity( if (self.exported_uavs.contains(uav)) .default else switch (ip.indexToKey(uav)) { .@"extern" => .zig_extern, @@ -493,31 +508,35 @@ pub fn flush(self: *C, arena: Allocator, tid: Zcu.PerThread.Id, prog_node: std.P const file = self.base.file.?; file.setEndPos(f.file_size) catch |err| return diags.fail("failed to allocate file: {s}", .{@errorName(err)}); - file.pwritevAll(f.all_buffers.items, 0) catch |err| return diags.fail("failed to write to '{'}': {s}", .{ - self.base.emit, @errorName(err), - }); + var fw = file.writer(&.{}); + var w = &fw.interface; + w.writeVecAll(f.all_buffers.items) catch |err| switch (err) { + error.WriteFailed => return diags.fail("failed to write to '{f}': {s}", .{ + std.fmt.alt(self.base.emit, .formatEscapeChar), @errorName(fw.err.?), + }), + }; } const Flush = struct { ctype_pool: codegen.CType.Pool, - ctype_global_from_decl_map: std.ArrayListUnmanaged(codegen.CType) = .empty, - ctypes_buf: std.ArrayListUnmanaged(u8) = .empty, + ctype_global_from_decl_map: std.ArrayListUnmanaged(codegen.CType), + ctypes: std.ArrayListUnmanaged(u8), lazy_ctype_pool: codegen.CType.Pool, - lazy_fns: LazyFns = .{}, - - asm_buf: std.ArrayListUnmanaged(u8) = .empty, + lazy_fns: LazyFns, + lazy_fwd_decl: std.ArrayListUnmanaged(u8), + lazy_code: std.ArrayListUnmanaged(u8), /// We collect a list of buffers to write, and write them all at once with pwritev 😎 - all_buffers: std.ArrayListUnmanaged(std.posix.iovec_const) = .empty, + all_buffers: std.ArrayListUnmanaged([]const u8), /// Keeps track of the total bytes of `all_buffers`. - file_size: u64 = 0, + file_size: u64, const LazyFns = std.AutoHashMapUnmanaged(codegen.LazyFnKey, void); fn appendBufAssumeCapacity(f: *Flush, buf: []const u8) void { if (buf.len == 0) return; - f.all_buffers.appendAssumeCapacity(.{ .base = buf.ptr, .len = buf.len }); + f.all_buffers.appendAssumeCapacity(buf); f.file_size += buf.len; } @@ -532,14 +551,15 @@ const Flush = struct { } fn deinit(f: *Flush, gpa: Allocator) void { - f.all_buffers.deinit(gpa); - f.asm_buf.deinit(gpa); - f.lazy_fns.deinit(gpa); - f.lazy_ctype_pool.deinit(gpa); - f.ctypes_buf.deinit(gpa); + f.ctype_pool.deinit(gpa); assert(f.ctype_global_from_decl_map.items.len == 0); f.ctype_global_from_decl_map.deinit(gpa); - f.ctype_pool.deinit(gpa); + f.ctypes.deinit(gpa); + f.lazy_ctype_pool.deinit(gpa); + f.lazy_fns.deinit(gpa); + f.lazy_fwd_decl.deinit(gpa); + f.lazy_code.deinit(gpa); + f.all_buffers.deinit(gpa); } }; @@ -562,9 +582,9 @@ fn flushCTypes( try global_from_decl_map.ensureTotalCapacity(gpa, decl_ctype_pool.items.len); defer global_from_decl_map.clearRetainingCapacity(); - var ctypes_buf = f.ctypes_buf.toManaged(gpa); - defer f.ctypes_buf = ctypes_buf.moveToUnmanaged(); - const writer = ctypes_buf.writer(); + var ctypes_aw: std.io.Writer.Allocating = .fromArrayList(gpa, &f.ctypes); + const ctypes_bw = &ctypes_aw.writer; + defer f.ctypes = ctypes_aw.toArrayList(); for (0..decl_ctype_pool.items.len) |decl_ctype_pool_index| { const PoolAdapter = struct { @@ -591,26 +611,25 @@ fn flushCTypes( PoolAdapter{ .global_from_decl_map = global_from_decl_map.items }, ); global_from_decl_map.appendAssumeCapacity(global_ctype); - try codegen.genTypeDecl( + codegen.genTypeDecl( zcu, - writer, + ctypes_bw, global_ctype_pool, global_ctype, pass, decl_ctype_pool, decl_ctype, found_existing, - ); + ) catch |err| switch (err) { + error.WriteFailed => return error.OutOfMemory, + }; } } -fn flushErrDecls(self: *C, pt: Zcu.PerThread, ctype_pool: *codegen.CType.Pool) FlushDeclError!void { +fn flushErrDecls(self: *C, pt: Zcu.PerThread, f: *Flush) FlushDeclError!void { const gpa = self.base.comp.gpa; - const fwd_decl = &self.lazy_fwd_decl_buf; - const code = &self.lazy_code_buf; - - var object = codegen.Object{ + var object: codegen.Object = .{ .dg = .{ .gpa = gpa, .pt = pt, @@ -619,27 +638,30 @@ fn flushErrDecls(self: *C, pt: Zcu.PerThread, ctype_pool: *codegen.CType.Pool) F .pass = .flush, .is_naked_fn = false, .expected_block = null, - .fwd_decl = fwd_decl.toManaged(gpa), - .ctype_pool = ctype_pool.*, - .scratch = .{}, + .fwd_decl = undefined, + .ctype_pool = f.lazy_ctype_pool, + .scratch = .initBuffer(self.scratch_buf), .uavs = .empty, }, - .code = code.toManaged(gpa), - .indent_writer = undefined, // set later so we can get a pointer to object.code + .code_header = undefined, + .code = undefined, + .indent_counter = 0, }; - object.indent_writer = .{ .underlying_writer = object.code.writer() }; + object.dg.fwd_decl = .fromArrayList(gpa, &f.lazy_fwd_decl); + object.code = .fromArrayList(gpa, &f.lazy_code); defer { object.dg.uavs.deinit(gpa); - fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged(); - ctype_pool.* = object.dg.ctype_pool.move(); - ctype_pool.freeUnusedCapacity(gpa); - object.dg.scratch.deinit(gpa); - code.* = object.code.moveToUnmanaged(); + f.lazy_ctype_pool = object.dg.ctype_pool.move(); + f.lazy_ctype_pool.freeUnusedCapacity(gpa); + + f.lazy_fwd_decl = object.dg.fwd_decl.toArrayList(); + f.lazy_code = object.code.toArrayList(); + self.scratch_buf = object.dg.scratch.allocatedSlice(); } codegen.genErrDecls(&object) catch |err| switch (err) { error.AnalysisFail => unreachable, - else => |e| return e, + error.WriteFailed, error.OutOfMemory => return error.OutOfMemory, }; try self.addUavsFromCodegen(&object.dg.uavs); @@ -649,16 +671,13 @@ fn flushLazyFn( self: *C, pt: Zcu.PerThread, mod: *Module, - ctype_pool: *codegen.CType.Pool, + f: *Flush, lazy_ctype_pool: *const codegen.CType.Pool, lazy_fn: codegen.LazyFnMap.Entry, ) FlushDeclError!void { const gpa = self.base.comp.gpa; - const fwd_decl = &self.lazy_fwd_decl_buf; - const code = &self.lazy_code_buf; - - var object = codegen.Object{ + var object: codegen.Object = .{ .dg = .{ .gpa = gpa, .pt = pt, @@ -667,29 +686,32 @@ fn flushLazyFn( .pass = .flush, .is_naked_fn = false, .expected_block = null, - .fwd_decl = fwd_decl.toManaged(gpa), - .ctype_pool = ctype_pool.*, - .scratch = .{}, + .fwd_decl = undefined, + .ctype_pool = f.lazy_ctype_pool, + .scratch = .initBuffer(self.scratch_buf), .uavs = .empty, }, - .code = code.toManaged(gpa), - .indent_writer = undefined, // set later so we can get a pointer to object.code + .code_header = undefined, + .code = undefined, + .indent_counter = 0, }; - object.indent_writer = .{ .underlying_writer = object.code.writer() }; + object.dg.fwd_decl = .fromArrayList(gpa, &f.lazy_fwd_decl); + object.code = .fromArrayList(gpa, &f.lazy_code); defer { // If this assert trips just handle the anon_decl_deps the same as // `updateFunc()` does. assert(object.dg.uavs.count() == 0); - fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged(); - ctype_pool.* = object.dg.ctype_pool.move(); - ctype_pool.freeUnusedCapacity(gpa); - object.dg.scratch.deinit(gpa); - code.* = object.code.moveToUnmanaged(); + f.lazy_ctype_pool = object.dg.ctype_pool.move(); + f.lazy_ctype_pool.freeUnusedCapacity(gpa); + + f.lazy_fwd_decl = object.dg.fwd_decl.toArrayList(); + f.lazy_code = object.code.toArrayList(); + self.scratch_buf = object.dg.scratch.allocatedSlice(); } codegen.genLazyFn(&object, lazy_ctype_pool, lazy_fn) catch |err| switch (err) { error.AnalysisFail => unreachable, - else => |e| return e, + error.WriteFailed, error.OutOfMemory => return error.OutOfMemory, }; } @@ -709,7 +731,7 @@ fn flushLazyFns( const gop = f.lazy_fns.getOrPutAssumeCapacity(entry.key_ptr.*); if (gop.found_existing) continue; gop.value_ptr.* = {}; - try self.flushLazyFn(pt, mod, &f.lazy_ctype_pool, lazy_ctype_pool, entry); + try self.flushLazyFn(pt, mod, f, lazy_ctype_pool, entry); } } @@ -802,8 +824,6 @@ pub fn updateExports( }, }; const ctype_pool = &decl_block.ctype_pool; - const fwd_decl = &self.fwd_decl_buf; - fwd_decl.clearRetainingCapacity(); var dg: codegen.DeclGen = .{ .gpa = gpa, .pt = pt, @@ -812,20 +832,24 @@ pub fn updateExports( .pass = pass, .is_naked_fn = false, .expected_block = null, - .fwd_decl = fwd_decl.toManaged(gpa), + .fwd_decl = undefined, .ctype_pool = decl_block.ctype_pool, - .scratch = .{}, + .scratch = .initBuffer(self.scratch_buf), .uavs = .empty, }; + dg.fwd_decl = .initOwnedSlice(gpa, self.fwd_decl_buf); defer { assert(dg.uavs.count() == 0); - fwd_decl.* = dg.fwd_decl.moveToUnmanaged(); ctype_pool.* = dg.ctype_pool.move(); ctype_pool.freeUnusedCapacity(gpa); - dg.scratch.deinit(gpa); + + self.fwd_decl_buf = dg.fwd_decl.toArrayList().allocatedSlice(); + self.scratch_buf = dg.scratch.allocatedSlice(); } - try codegen.genExports(&dg, exported, export_indices); - exported_block.* = .{ .fwd_decl = try self.addString(dg.fwd_decl.items) }; + codegen.genExports(&dg, exported, export_indices) catch |err| switch (err) { + error.WriteFailed, error.OutOfMemory => return error.OutOfMemory, + }; + exported_block.* = .{ .fwd_decl = try self.addString(dg.fwd_decl.getWritten()) }; } pub fn deleteExport( diff --git a/src/link/Coff.zig b/src/link/Coff.zig index ea5d6a901c..dd8ddd30f4 100644 --- a/src/link/Coff.zig +++ b/src/link/Coff.zig @@ -830,8 +830,8 @@ fn debugMem(allocator: Allocator, handle: std.process.Child.Id, pvaddr: std.os.w const buffer = try allocator.alloc(u8, code.len); defer allocator.free(buffer); const memread = try std.os.windows.ReadProcessMemory(handle, pvaddr, buffer); - log.debug("to write: {x}", .{std.fmt.fmtSliceHexLower(code)}); - log.debug("in memory: {x}", .{std.fmt.fmtSliceHexLower(memread)}); + log.debug("to write: {x}", .{code}); + log.debug("in memory: {x}", .{memread}); } fn writeMemProtected(handle: std.process.Child.Id, pvaddr: std.os.windows.LPVOID, code: []const u8) !void { @@ -1213,7 +1213,7 @@ fn updateLazySymbolAtom( var code_buffer: std.ArrayListUnmanaged(u8) = .empty; defer code_buffer.deinit(gpa); - const name = try allocPrint(gpa, "__lazy_{s}_{}", .{ + const name = try allocPrint(gpa, "__lazy_{s}_{f}", .{ @tagName(sym.kind), Type.fromInterned(sym.ty).fmt(pt), }); @@ -1333,7 +1333,7 @@ fn updateNavCode( const ip = &zcu.intern_pool; const nav = ip.getNav(nav_index); - log.debug("updateNavCode {} 0x{x}", .{ nav.fqn.fmt(ip), nav_index }); + log.debug("updateNavCode {f} 0x{x}", .{ nav.fqn.fmt(ip), nav_index }); const target = &zcu.navFileScope(nav_index).mod.?.resolved_target.result; const required_alignment = switch (pt.navAlignment(nav_index)) { @@ -1361,7 +1361,7 @@ fn updateNavCode( error.OutOfMemory => return error.OutOfMemory, else => |e| return coff.base.cgFail(nav_index, "failed to grow atom: {s}", .{@errorName(e)}), }; - log.debug("growing {} from 0x{x} to 0x{x}", .{ nav.fqn.fmt(ip), sym.value, vaddr }); + log.debug("growing {f} from 0x{x} to 0x{x}", .{ nav.fqn.fmt(ip), sym.value, vaddr }); log.debug(" (required alignment 0x{x}", .{required_alignment}); if (vaddr != sym.value) { @@ -1389,7 +1389,7 @@ fn updateNavCode( else => |e| return coff.base.cgFail(nav_index, "failed to allocate atom: {s}", .{@errorName(e)}), }; errdefer coff.freeAtom(atom_index); - log.debug("allocated atom for {} at 0x{x}", .{ nav.fqn.fmt(ip), vaddr }); + log.debug("allocated atom for {f} at 0x{x}", .{ nav.fqn.fmt(ip), vaddr }); coff.getAtomPtr(atom_index).size = code_len; sym.value = vaddr; @@ -1454,7 +1454,7 @@ pub fn updateExports( for (export_indices) |export_idx| { const exp = export_idx.ptr(zcu); - log.debug("adding new export '{}'", .{exp.opts.name.fmt(&zcu.intern_pool)}); + log.debug("adding new export '{f}'", .{exp.opts.name.fmt(&zcu.intern_pool)}); if (exp.opts.section.toSlice(&zcu.intern_pool)) |section_name| { if (!mem.eql(u8, section_name, ".text")) { @@ -1530,7 +1530,7 @@ pub fn deleteExport( const gpa = coff.base.comp.gpa; const sym_loc = SymbolWithLoc{ .sym_index = sym_index.*, .file = null }; const sym = coff.getSymbolPtr(sym_loc); - log.debug("deleting export '{}'", .{name.fmt(&zcu.intern_pool)}); + log.debug("deleting export '{f}'", .{name.fmt(&zcu.intern_pool)}); assert(sym.storage_class == .EXTERNAL and sym.section_number != .UNDEFINED); sym.* = .{ .name = [_]u8{0} ** 8, @@ -1748,7 +1748,7 @@ pub fn getNavVAddr( const zcu = pt.zcu; const ip = &zcu.intern_pool; const nav = ip.getNav(nav_index); - log.debug("getNavVAddr {}({d})", .{ nav.fqn.fmt(ip), nav_index }); + log.debug("getNavVAddr {f}({d})", .{ nav.fqn.fmt(ip), nav_index }); const sym_index = if (nav.getExtern(ip)) |e| try coff.getGlobalSymbol(nav.name.toSlice(ip), e.lib_name.toSlice(ip)) else @@ -2588,7 +2588,7 @@ fn logSymtab(coff: *Coff) void { .DEBUG => unreachable, // TODO else => @intFromEnum(sym.section_number), }; - log.debug(" %{d}: {?s} @{x} in {s}({d}), {s}", .{ + log.debug(" %{d}: {s} @{x} in {s}({d}), {s}", .{ sym_id, coff.getSymbolName(.{ .sym_index = @as(u32, @intCast(sym_id)), .file = null }), sym.value, @@ -2605,7 +2605,7 @@ fn logSymtab(coff: *Coff) void { } log.debug("GOT entries:", .{}); - log.debug("{}", .{coff.got_table}); + log.debug("{f}", .{coff.got_table}); } fn logSections(coff: *Coff) void { @@ -2625,7 +2625,7 @@ fn logImportTables(coff: *const Coff) void { log.debug("import tables:", .{}); for (coff.import_tables.keys(), 0..) |off, i| { const itable = coff.import_tables.values()[i]; - log.debug("{}", .{itable.fmtDebug(.{ + log.debug("{f}", .{itable.fmtDebug(.{ .coff = coff, .index = i, .name_off = off, @@ -3061,40 +3061,25 @@ const ImportTable = struct { return base_vaddr + index * @sizeOf(u64); } - const FormatContext = struct { + const Format = struct { itab: ImportTable, ctx: Context, + + fn default(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + const lib_name = f.ctx.coff.temp_strtab.getAssumeExists(f.ctx.name_off); + const base_vaddr = getBaseAddress(f.ctx); + try writer.print("IAT({s}.dll) @{x}:", .{ lib_name, base_vaddr }); + for (f.itab.entries.items, 0..) |entry, i| { + try writer.print("\n {d}@{?x} => {s}", .{ + i, + f.itab.getImportAddress(entry, f.ctx), + f.ctx.coff.getSymbolName(entry), + }); + } + } }; - fn format(itab: ImportTable, comptime unused_format_string: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { - _ = itab; - _ = unused_format_string; - _ = options; - _ = writer; - @compileError("do not format ImportTable directly; use itab.fmtDebug()"); - } - - fn format2( - fmt_ctx: FormatContext, - comptime unused_format_string: []const u8, - options: fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - _ = options; - comptime assert(unused_format_string.len == 0); - const lib_name = fmt_ctx.ctx.coff.temp_strtab.getAssumeExists(fmt_ctx.ctx.name_off); - const base_vaddr = getBaseAddress(fmt_ctx.ctx); - try writer.print("IAT({s}.dll) @{x}:", .{ lib_name, base_vaddr }); - for (fmt_ctx.itab.entries.items, 0..) |entry, i| { - try writer.print("\n {d}@{?x} => {s}", .{ - i, - fmt_ctx.itab.getImportAddress(entry, fmt_ctx.ctx), - fmt_ctx.ctx.coff.getSymbolName(entry), - }); - } - } - - fn fmtDebug(itab: ImportTable, ctx: Context) fmt.Formatter(format2) { + fn fmtDebug(itab: ImportTable, ctx: Context) fmt.Formatter(Format, Format.default) { return .{ .data = .{ .itab = itab, .ctx = ctx } }; } diff --git a/src/link/Dwarf.zig b/src/link/Dwarf.zig index 554c03bafb..a1a9dedd4b 100644 --- a/src/link/Dwarf.zig +++ b/src/link/Dwarf.zig @@ -973,7 +973,7 @@ const Entry = struct { else .main; if (sec.getUnit(ty_unit) == unit and unit.getEntry(other_entry) == entry) - log.err("missing Type({}({d}))", .{ + log.err("missing Type({f}({d}))", .{ Type.fromInterned(ty).fmt(.{ .tid = .main, .zcu = zcu }), @intFromEnum(ty), }); @@ -981,7 +981,7 @@ const Entry = struct { for (dwarf.navs.keys(), dwarf.navs.values()) |nav, other_entry| { const nav_unit = dwarf.getUnit(zcu.fileByIndex(ip.getNav(nav).srcInst(ip).resolveFile(ip)).mod.?) catch unreachable; if (sec.getUnit(nav_unit) == unit and unit.getEntry(other_entry) == entry) - log.err("missing Nav({}({d}))", .{ ip.getNav(nav).fqn.fmt(ip), @intFromEnum(nav) }); + log.err("missing Nav({f}({d}))", .{ ip.getNav(nav).fqn.fmt(ip), @intFromEnum(nav) }); } } @panic("missing dwarf relocation target"); @@ -1957,7 +1957,7 @@ pub const WipNav = struct { .{ .debug_output = .{ .dwarf = wip_nav } }, ); if (old_len + bytes != wip_nav.debug_info.items.len) { - std.debug.print("{} [{}]: {} != {}\n", .{ ty.fmt(wip_nav.pt), ty.toIntern(), bytes, wip_nav.debug_info.items.len - old_len }); + std.debug.print("{f} [{}]: {} != {}\n", .{ ty.fmt(wip_nav.pt), ty.toIntern(), bytes, wip_nav.debug_info.items.len - old_len }); unreachable; } } @@ -2427,7 +2427,7 @@ fn initWipNavInner( const inst_info = nav.srcInst(ip).resolveFull(ip).?; const file = zcu.fileByIndex(inst_info.file); const decl = file.zir.?.getDeclaration(inst_info.inst); - log.debug("initWipNav({s}:{d}:{d} %{d} = {})", .{ + log.debug("initWipNav({s}:{d}:{d} %{d} = {f})", .{ file.sub_file_path, decl.src_line + 1, decl.src_column + 1, @@ -2632,7 +2632,7 @@ pub fn finishWipNavFunc( const ip = &zcu.intern_pool; const nav = ip.getNav(nav_index); assert(wip_nav.func != .none); - log.debug("finishWipNavFunc({})", .{nav.fqn.fmt(ip)}); + log.debug("finishWipNavFunc({f})", .{nav.fqn.fmt(ip)}); { const external_relocs = &dwarf.debug_aranges.section.getUnit(wip_nav.unit).getEntry(wip_nav.entry).external_relocs; @@ -2733,7 +2733,7 @@ pub fn finishWipNav( const zcu = pt.zcu; const ip = &zcu.intern_pool; const nav = ip.getNav(nav_index); - log.debug("finishWipNav({})", .{nav.fqn.fmt(ip)}); + log.debug("finishWipNav({f})", .{nav.fqn.fmt(ip)}); try dwarf.debug_info.section.replaceEntry(wip_nav.unit, wip_nav.entry, dwarf, wip_nav.debug_info.items); if (wip_nav.debug_line.items.len > 0) { @@ -2765,7 +2765,7 @@ fn updateComptimeNavInner(dwarf: *Dwarf, pt: Zcu.PerThread, nav_index: InternPoo const inst_info = nav.srcInst(ip).resolveFull(ip).?; const file = zcu.fileByIndex(inst_info.file); const decl = file.zir.?.getDeclaration(inst_info.inst); - log.debug("updateComptimeNav({s}:{d}:{d} %{d} = {})", .{ + log.debug("updateComptimeNav({s}:{d}:{d} %{d} = {f})", .{ file.sub_file_path, decl.src_line + 1, decl.src_column + 1, @@ -3215,7 +3215,7 @@ fn updateLazyType( const ty: Type = .fromInterned(type_index); switch (type_index) { .generic_poison_type => log.debug("updateLazyType({s})", .{"anytype"}), - else => log.debug("updateLazyType({})", .{ty.fmt(pt)}), + else => log.debug("updateLazyType({f})", .{ty.fmt(pt)}), } var wip_nav: WipNav = .{ @@ -3243,7 +3243,7 @@ fn updateLazyType( const diw = wip_nav.debug_info.writer(dwarf.gpa); const name = switch (type_index) { .generic_poison_type => "", - else => try std.fmt.allocPrint(dwarf.gpa, "{}", .{ty.fmt(pt)}), + else => try std.fmt.allocPrint(dwarf.gpa, "{f}", .{ty.fmt(pt)}), }; defer dwarf.gpa.free(name); @@ -3718,7 +3718,7 @@ fn updateLazyValue( const zcu = pt.zcu; const ip = &zcu.intern_pool; assert(ip.typeOf(value_index) != .type_type); - log.debug("updateLazyValue(@as({}, {}))", .{ + log.debug("updateLazyValue(@as({f}, {f}))", .{ Value.fromInterned(value_index).typeOf(zcu).fmt(pt), Value.fromInterned(value_index).fmtValue(pt), }); @@ -4110,7 +4110,7 @@ pub fn updateContainerType(dwarf: *Dwarf, pt: Zcu.PerThread, type_index: InternP const ip = &zcu.intern_pool; const ty: Type = .fromInterned(type_index); const ty_src_loc = ty.srcLoc(zcu); - log.debug("updateContainerType({})", .{ty.fmt(pt)}); + log.debug("updateContainerType({f})", .{ty.fmt(pt)}); const inst_info = ty.typeDeclInst(zcu).?.resolveFull(ip).?; const file = zcu.fileByIndex(inst_info.file); @@ -4239,7 +4239,7 @@ pub fn updateContainerType(dwarf: *Dwarf, pt: Zcu.PerThread, type_index: InternP }; defer wip_nav.deinit(); const diw = wip_nav.debug_info.writer(dwarf.gpa); - const name = try std.fmt.allocPrint(dwarf.gpa, "{}", .{ty.fmt(pt)}); + const name = try std.fmt.allocPrint(dwarf.gpa, "{f}", .{ty.fmt(pt)}); defer dwarf.gpa.free(name); switch (ip.indexToKey(type_index)) { diff --git a/src/link/Elf.zig b/src/link/Elf.zig index dc27e0bdd7..785f200928 100644 --- a/src/link/Elf.zig +++ b/src/link/Elf.zig @@ -702,7 +702,7 @@ pub fn allocateChunk(self: *Elf, args: struct { shdr.sh_addr + res.value, shdr.sh_offset + res.value, }); - log.debug(" placement {}, {s}", .{ + log.debug(" placement {f}, {s}", .{ res.placement, if (self.atom(res.placement)) |atom_ptr| atom_ptr.name(self) else "", }); @@ -869,7 +869,7 @@ fn flushInner(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id) !void { // Dump the state for easy debugging. // State can be dumped via `--debug-log link_state`. if (build_options.enable_logging) { - state_log.debug("{}", .{self.dumpState()}); + state_log.debug("{f}", .{self.dumpState()}); } // Beyond this point, everything has been allocated a virtual address and we can resolve @@ -3544,7 +3544,7 @@ pub fn addRelaDyn(self: *Elf, opts: RelaDyn) !void { } pub fn addRelaDynAssumeCapacity(self: *Elf, opts: RelaDyn) void { - relocs_log.debug(" {s}: [{x} => {d}({s})] + {x}", .{ + relocs_log.debug(" {f}: [{x} => {d}({s})] + {x}", .{ relocation.fmtRelocType(opts.type, self.getTarget().cpu.arch), opts.offset, opts.sym, @@ -3791,7 +3791,7 @@ fn reportUndefinedSymbols(self: *Elf, undefs: anytype) !void { for (refs.items[0..nrefs]) |ref| { const atom_ptr = self.atom(ref).?; const file_ptr = atom_ptr.file(self).?; - err.addNote("referenced by {s}:{s}", .{ file_ptr.fmtPath(), atom_ptr.name(self) }); + err.addNote("referenced by {f}:{s}", .{ file_ptr.fmtPath(), atom_ptr.name(self) }); } if (refs.items.len > max_notes) { @@ -3813,12 +3813,12 @@ fn reportDuplicates(self: *Elf, dupes: anytype) error{ HasDuplicates, OutOfMemor var err = try diags.addErrorWithNotes(nnotes + 1); try err.addMsg("duplicate symbol definition: {s}", .{sym.name(self)}); - err.addNote("defined by {}", .{sym.file(self).?.fmtPath()}); + err.addNote("defined by {f}", .{sym.file(self).?.fmtPath()}); var inote: usize = 0; while (inote < @min(notes.items.len, max_notes)) : (inote += 1) { const file_ptr = self.file(notes.items[inote]).?; - err.addNote("defined by {}", .{file_ptr.fmtPath()}); + err.addNote("defined by {f}", .{file_ptr.fmtPath()}); } if (notes.items.len > max_notes) { @@ -3847,7 +3847,7 @@ pub fn addFileError( const diags = &self.base.comp.link_diags; var err = try diags.addErrorWithNotes(1); try err.addMsg(format, args); - err.addNote("while parsing {}", .{self.file(file_index).?.fmtPath()}); + err.addNote("while parsing {f}", .{self.file(file_index).?.fmtPath()}); } pub fn failFile( @@ -3860,28 +3860,21 @@ pub fn failFile( return error.LinkFailure; } -const FormatShdrCtx = struct { +const FormatShdr = struct { elf_file: *Elf, shdr: elf.Elf64_Shdr, }; -fn fmtShdr(self: *Elf, shdr: elf.Elf64_Shdr) std.fmt.Formatter(formatShdr) { +fn fmtShdr(self: *Elf, shdr: elf.Elf64_Shdr) std.fmt.Formatter(FormatShdr, formatShdr) { return .{ .data = .{ .shdr = shdr, .elf_file = self, } }; } -fn formatShdr( - ctx: FormatShdrCtx, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = options; - _ = unused_fmt_string; +fn formatShdr(ctx: FormatShdr, writer: *std.io.Writer) std.io.Writer.Error!void { const shdr = ctx.shdr; - try writer.print("{s} : @{x} ({x}) : align({x}) : size({x}) : entsize({x}) : flags({})", .{ + try writer.print("{s} : @{x} ({x}) : align({x}) : size({x}) : entsize({x}) : flags({f})", .{ ctx.elf_file.getShString(shdr.sh_name), shdr.sh_offset, shdr.sh_addr, shdr.sh_addralign, shdr.sh_size, shdr.sh_entsize, @@ -3889,18 +3882,11 @@ fn formatShdr( }); } -pub fn fmtShdrFlags(sh_flags: u64) std.fmt.Formatter(formatShdrFlags) { +pub fn fmtShdrFlags(sh_flags: u64) std.fmt.Formatter(u64, formatShdrFlags) { return .{ .data = sh_flags }; } -fn formatShdrFlags( - sh_flags: u64, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; +fn formatShdrFlags(sh_flags: u64, writer: *std.io.Writer) std.io.Writer.Error!void { if (elf.SHF_WRITE & sh_flags != 0) { try writer.writeAll("W"); } @@ -3945,26 +3931,19 @@ fn formatShdrFlags( } } -const FormatPhdrCtx = struct { +const FormatPhdr = struct { elf_file: *Elf, phdr: elf.Elf64_Phdr, }; -fn fmtPhdr(self: *Elf, phdr: elf.Elf64_Phdr) std.fmt.Formatter(formatPhdr) { +fn fmtPhdr(self: *Elf, phdr: elf.Elf64_Phdr) std.fmt.Formatter(FormatPhdr, formatPhdr) { return .{ .data = .{ .phdr = phdr, .elf_file = self, } }; } -fn formatPhdr( - ctx: FormatPhdrCtx, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = options; - _ = unused_fmt_string; +fn formatPhdr(ctx: FormatPhdr, writer: *std.io.Writer) std.io.Writer.Error!void { const phdr = ctx.phdr; const write = phdr.p_flags & elf.PF_W != 0; const read = phdr.p_flags & elf.PF_R != 0; @@ -3991,24 +3970,16 @@ fn formatPhdr( }); } -pub fn dumpState(self: *Elf) std.fmt.Formatter(fmtDumpState) { +pub fn dumpState(self: *Elf) std.fmt.Formatter(*Elf, fmtDumpState) { return .{ .data = self }; } -fn fmtDumpState( - self: *Elf, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; - +fn fmtDumpState(self: *Elf, writer: *std.io.Writer) std.io.Writer.Error!void { const shared_objects = self.shared_objects.values(); if (self.zigObjectPtr()) |zig_object| { try writer.print("zig_object({d}) : {s}\n", .{ zig_object.index, zig_object.basename }); - try writer.print("{}{}", .{ + try writer.print("{f}{f}", .{ zig_object.fmtAtoms(self), zig_object.fmtSymtab(self), }); @@ -4017,10 +3988,10 @@ fn fmtDumpState( for (self.objects.items) |index| { const object = self.file(index).?.object; - try writer.print("object({d}) : {}", .{ index, object.fmtPath() }); + try writer.print("object({d}) : {f}", .{ index, object.fmtPath() }); if (!object.alive) try writer.writeAll(" : [*]"); try writer.writeByte('\n'); - try writer.print("{}{}{}{}{}\n", .{ + try writer.print("{f}{f}{f}{f}{f}\n", .{ object.fmtAtoms(self), object.fmtCies(self), object.fmtFdes(self), @@ -4031,51 +4002,51 @@ fn fmtDumpState( for (shared_objects) |index| { const shared_object = self.file(index).?.shared_object; - try writer.print("shared_object({d}) : {} : needed({})", .{ + try writer.print("shared_object({d}) : {f} : needed({})", .{ index, shared_object.path, shared_object.needed, }); if (!shared_object.alive) try writer.writeAll(" : [*]"); try writer.writeByte('\n'); - try writer.print("{}\n", .{shared_object.fmtSymtab(self)}); + try writer.print("{f}\n", .{shared_object.fmtSymtab(self)}); } if (self.linker_defined_index) |index| { const linker_defined = self.file(index).?.linker_defined; try writer.print("linker_defined({d}) : (linker defined)\n", .{index}); - try writer.print("{}\n", .{linker_defined.fmtSymtab(self)}); + try writer.print("{f}\n", .{linker_defined.fmtSymtab(self)}); } const slice = self.sections.slice(); { try writer.writeAll("atom lists\n"); for (slice.items(.shdr), slice.items(.atom_list_2), 0..) |shdr, atom_list, shndx| { - try writer.print("shdr({d}) : {s} : {}\n", .{ shndx, self.getShString(shdr.sh_name), atom_list.fmt(self) }); + try writer.print("shdr({d}) : {s} : {f}\n", .{ shndx, self.getShString(shdr.sh_name), atom_list.fmt(self) }); } } if (self.requiresThunks()) { try writer.writeAll("thunks\n"); for (self.thunks.items, 0..) |th, index| { - try writer.print("thunk({d}) : {}\n", .{ index, th.fmt(self) }); + try writer.print("thunk({d}) : {f}\n", .{ index, th.fmt(self) }); } } - try writer.print("{}\n", .{self.got.fmt(self)}); - try writer.print("{}\n", .{self.plt.fmt(self)}); + try writer.print("{f}\n", .{self.got.fmt(self)}); + try writer.print("{f}\n", .{self.plt.fmt(self)}); try writer.writeAll("Output groups\n"); for (self.group_sections.items) |cg| { - try writer.print(" shdr({d}) : GROUP({})\n", .{ cg.shndx, cg.cg_ref }); + try writer.print(" shdr({d}) : GROUP({f})\n", .{ cg.shndx, cg.cg_ref }); } try writer.writeAll("\nOutput merge sections\n"); for (self.merge_sections.items) |msec| { - try writer.print(" shdr({d}) : {}\n", .{ msec.output_section_index, msec.fmt(self) }); + try writer.print(" shdr({d}) : {f}\n", .{ msec.output_section_index, msec.fmt(self) }); } try writer.writeAll("\nOutput shdrs\n"); for (slice.items(.shdr), slice.items(.phndx), 0..) |shdr, phndx, shndx| { - try writer.print(" shdr({d}) : phdr({?d}) : {}\n", .{ + try writer.print(" shdr({d}) : phdr({d}) : {f}\n", .{ shndx, phndx, self.fmtShdr(shdr), @@ -4083,7 +4054,7 @@ fn fmtDumpState( } try writer.writeAll("\nOutput phdrs\n"); for (self.phdrs.items, 0..) |phdr, phndx| { - try writer.print(" phdr({d}) : {}\n", .{ phndx, self.fmtPhdr(phdr) }); + try writer.print(" phdr({d}) : {f}\n", .{ phndx, self.fmtPhdr(phdr) }); } } @@ -4221,15 +4192,8 @@ pub const Ref = struct { return ref.index == other.index and ref.file == other.file; } - pub fn format( - ref: Ref, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = unused_fmt_string; - _ = options; - try writer.print("ref({},{})", .{ ref.index, ref.file }); + pub fn format(ref: Ref, writer: *std.io.Writer) std.io.Writer.Error!void { + try writer.print("ref({d},{d})", .{ ref.index, ref.file }); } }; @@ -4424,7 +4388,7 @@ fn createThunks(elf_file: *Elf, atom_list: *AtomList) !void { for (atom_list.atoms.keys()[start..i]) |ref| { const atom_ptr = elf_file.atom(ref).?; const file_ptr = atom_ptr.file(elf_file).?; - log.debug("atom({}) {s}", .{ ref, atom_ptr.name(elf_file) }); + log.debug("atom({f}) {s}", .{ ref, atom_ptr.name(elf_file) }); for (atom_ptr.relocs(elf_file)) |rel| { const is_reachable = switch (cpu_arch) { .aarch64 => r: { @@ -4453,7 +4417,7 @@ fn createThunks(elf_file: *Elf, atom_list: *AtomList) !void { thunk_ptr.value = try advance(atom_list, thunk_ptr.size(elf_file), Atom.Alignment.fromNonzeroByteUnits(2)); - log.debug("thunk({d}) : {}", .{ thunk_index, thunk_ptr.fmt(elf_file) }); + log.debug("thunk({d}) : {f}", .{ thunk_index, thunk_ptr.fmt(elf_file) }); } } diff --git a/src/link/Elf/Archive.zig b/src/link/Elf/Archive.zig index 0d177bc21a..2243dc4781 100644 --- a/src/link/Elf/Archive.zig +++ b/src/link/Elf/Archive.zig @@ -44,8 +44,8 @@ pub fn parse( pos += @sizeOf(elf.ar_hdr); if (!mem.eql(u8, &hdr.ar_fmag, elf.ARFMAG)) { - return diags.failParse(path, "invalid archive header delimiter: {s}", .{ - std.fmt.fmtSliceEscapeLower(&hdr.ar_fmag), + return diags.failParse(path, "invalid archive header delimiter: {f}", .{ + std.ascii.hexEscape(&hdr.ar_fmag, .lower), }); } @@ -83,7 +83,7 @@ pub fn parse( .alive = false, }; - log.debug("extracting object '{}' from archive '{}'", .{ + log.debug("extracting object '{f}' from archive '{f}'", .{ @as(Path, object.path), @as(Path, path), }); @@ -201,48 +201,28 @@ pub const ArSymtab = struct { } } - pub fn format( - ar: ArSymtab, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = ar; - _ = unused_fmt_string; - _ = options; - _ = writer; - @compileError("do not format ar symtab directly; use fmt instead"); - } - - const FormatContext = struct { + const Format = struct { ar: ArSymtab, elf_file: *Elf, + + fn default(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + const ar = f.ar; + const elf_file = f.elf_file; + for (ar.symtab.items, 0..) |entry, i| { + const name = ar.strtab.getAssumeExists(entry.off); + const file = elf_file.file(entry.file_index).?; + try writer.print(" {d}: {s} in file({d})({f})\n", .{ i, name, entry.file_index, file.fmtPath() }); + } + } }; - pub fn fmt(ar: ArSymtab, elf_file: *Elf) std.fmt.Formatter(format2) { + pub fn fmt(ar: ArSymtab, elf_file: *Elf) std.fmt.Formatter(Format, Format.default) { return .{ .data = .{ .ar = ar, .elf_file = elf_file, } }; } - fn format2( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = unused_fmt_string; - _ = options; - const ar = ctx.ar; - const elf_file = ctx.elf_file; - for (ar.symtab.items, 0..) |entry, i| { - const name = ar.strtab.getAssumeExists(entry.off); - const file = elf_file.file(entry.file_index).?; - try writer.print(" {d}: {s} in file({d})({})\n", .{ i, name, entry.file_index, file.fmtPath() }); - } - } - const Entry = struct { /// Offset into the string table. off: u32, @@ -280,15 +260,8 @@ pub const ArStrtab = struct { try writer.writeAll(ar.buffer.items); } - pub fn format( - ar: ArStrtab, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = unused_fmt_string; - _ = options; - try writer.print("{s}", .{std.fmt.fmtSliceEscapeLower(ar.buffer.items)}); + pub fn format(ar: ArStrtab, writer: *std.io.Writer) std.io.Writer.Error!void { + try writer.print("{f}", .{std.ascii.hexEscape(ar.buffer.items, .lower)}); } }; diff --git a/src/link/Elf/Atom.zig b/src/link/Elf/Atom.zig index 0869d6582e..5966bae02c 100644 --- a/src/link/Elf/Atom.zig +++ b/src/link/Elf/Atom.zig @@ -142,7 +142,7 @@ pub fn freeListEligible(self: Atom, elf_file: *Elf) bool { } pub fn free(self: *Atom, elf_file: *Elf) void { - log.debug("freeAtom atom({}) ({s})", .{ self.ref(), self.name(elf_file) }); + log.debug("freeAtom atom({f}) ({s})", .{ self.ref(), self.name(elf_file) }); const comp = elf_file.base.comp; const gpa = comp.gpa; @@ -243,7 +243,7 @@ pub fn writeRelocs(self: Atom, elf_file: *Elf, out_relocs: *std.ArrayList(elf.El }, } - relocs_log.debug(" {s}: [{x} => {d}({s})] + {x}", .{ + relocs_log.debug(" {f}: [{x} => {d}({s})] + {x}", .{ relocation.fmtRelocType(rel.r_type(), cpu_arch), r_offset, r_sym, @@ -316,7 +316,7 @@ pub fn scanRelocs(self: Atom, elf_file: *Elf, code: ?[]const u8, undefs: anytype }; // Violation of One Definition Rule for COMDATs. // TODO convert into an error - log.debug("{}: {s}: {s} refers to a discarded COMDAT section", .{ + log.debug("{f}: {s}: {s} refers to a discarded COMDAT section", .{ file_ptr.fmtPath(), self.name(elf_file), sym_name, @@ -519,11 +519,11 @@ fn dataType(symbol: *const Symbol, elf_file: *Elf) u2 { fn reportUnhandledRelocError(self: Atom, rel: elf.Elf64_Rela, elf_file: *Elf) RelocError!void { const diags = &elf_file.base.comp.link_diags; var err = try diags.addErrorWithNotes(1); - try err.addMsg("fatal linker error: unhandled relocation type {} at offset 0x{x}", .{ + try err.addMsg("fatal linker error: unhandled relocation type {f} at offset 0x{x}", .{ relocation.fmtRelocType(rel.r_type(), elf_file.getTarget().cpu.arch), rel.r_offset, }); - err.addNote("in {}:{s}", .{ self.file(elf_file).?.fmtPath(), self.name(elf_file) }); + err.addNote("in {f}:{s}", .{ self.file(elf_file).?.fmtPath(), self.name(elf_file) }); return error.RelocFailure; } @@ -539,7 +539,7 @@ fn reportTextRelocError( rel.r_offset, symbol.name(elf_file), }); - err.addNote("in {}:{s}", .{ self.file(elf_file).?.fmtPath(), self.name(elf_file) }); + err.addNote("in {f}:{s}", .{ self.file(elf_file).?.fmtPath(), self.name(elf_file) }); return error.RelocFailure; } @@ -555,7 +555,7 @@ fn reportPicError( rel.r_offset, symbol.name(elf_file), }); - err.addNote("in {}:{s}", .{ self.file(elf_file).?.fmtPath(), self.name(elf_file) }); + err.addNote("in {f}:{s}", .{ self.file(elf_file).?.fmtPath(), self.name(elf_file) }); err.addNote("recompile with -fPIC", .{}); return error.RelocFailure; } @@ -572,7 +572,7 @@ fn reportNoPicError( rel.r_offset, symbol.name(elf_file), }); - err.addNote("in {}:{s}", .{ self.file(elf_file).?.fmtPath(), self.name(elf_file) }); + err.addNote("in {f}:{s}", .{ self.file(elf_file).?.fmtPath(), self.name(elf_file) }); err.addNote("recompile with -fno-PIC", .{}); return error.RelocFailure; } @@ -652,7 +652,7 @@ pub fn resolveRelocsAlloc(self: Atom, elf_file: *Elf, code: []u8) RelocError!voi // Address of the dynamic thread pointer. const DTP = elf_file.dtpAddress(); - relocs_log.debug(" {s}: {x}: [{x} => {x}] GOT({x}) ({s})", .{ + relocs_log.debug(" {f}: {x}: [{x} => {x}] GOT({x}) ({s})", .{ relocation.fmtRelocType(rel.r_type(), cpu_arch), r_offset, P, @@ -823,7 +823,7 @@ pub fn resolveRelocsNonAlloc(self: Atom, elf_file: *Elf, code: []u8, undefs: any }; // Violation of One Definition Rule for COMDATs. // TODO convert into an error - log.debug("{}: {s}: {s} refers to a discarded COMDAT section", .{ + log.debug("{f}: {s}: {s} refers to a discarded COMDAT section", .{ file_ptr.fmtPath(), self.name(elf_file), sym_name, @@ -855,7 +855,7 @@ pub fn resolveRelocsNonAlloc(self: Atom, elf_file: *Elf, code: []u8, undefs: any const args = ResolveArgs{ P, A, S, GOT, 0, 0, DTP }; - relocs_log.debug(" {}: {x}: [{x} => {x}] ({s})", .{ + relocs_log.debug(" {f}: {x}: [{x} => {x}] ({s})", .{ relocation.fmtRelocType(rel.r_type(), cpu_arch), rel.r_offset, P, @@ -904,65 +904,45 @@ pub fn setExtra(atom: Atom, extras: Extra, elf_file: *Elf) void { atom.file(elf_file).?.setAtomExtra(atom.extra_index, extras); } -pub fn format( - atom: Atom, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = atom; - _ = unused_fmt_string; - _ = options; - _ = writer; - @compileError("do not format Atom directly"); -} - -pub fn fmt(atom: Atom, elf_file: *Elf) std.fmt.Formatter(format2) { +pub fn fmt(atom: Atom, elf_file: *Elf) std.fmt.Formatter(Format, Format.default) { return .{ .data = .{ .atom = atom, .elf_file = elf_file, } }; } -const FormatContext = struct { +const Format = struct { atom: Atom, elf_file: *Elf, -}; -fn format2( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = options; - _ = unused_fmt_string; - const atom = ctx.atom; - const elf_file = ctx.elf_file; - try writer.print("atom({d}) : {s} : @{x} : shdr({d}) : align({x}) : size({x}) : prev({}) : next({})", .{ - atom.atom_index, atom.name(elf_file), atom.address(elf_file), - atom.output_section_index, atom.alignment.toByteUnits() orelse 0, atom.size, - atom.prev_atom_ref, atom.next_atom_ref, - }); - if (atom.file(elf_file)) |atom_file| switch (atom_file) { - .object => |object| { - if (atom.fdes(object).len > 0) { - try writer.writeAll(" : fdes{ "); - const extras = atom.extra(elf_file); - for (atom.fdes(object), extras.fde_start..) |fde, i| { - try writer.print("{d}", .{i}); - if (!fde.alive) try writer.writeAll("([*])"); - if (i - extras.fde_start < extras.fde_count - 1) try writer.writeAll(", "); + fn default(f: Format, w: *std.io.Writer) std.io.Writer.Error!void { + const atom = f.atom; + const elf_file = f.elf_file; + try w.print("atom({d}) : {s} : @{x} : shdr({d}) : align({x}) : size({x}) : prev({f}) : next({f})", .{ + atom.atom_index, atom.name(elf_file), atom.address(elf_file), + atom.output_section_index, atom.alignment.toByteUnits() orelse 0, atom.size, + atom.prev_atom_ref, atom.next_atom_ref, + }); + if (atom.file(elf_file)) |atom_file| switch (atom_file) { + .object => |object| { + if (atom.fdes(object).len > 0) { + try w.writeAll(" : fdes{ "); + const extras = atom.extra(elf_file); + for (atom.fdes(object), extras.fde_start..) |fde, i| { + try w.print("{d}", .{i}); + if (!fde.alive) try w.writeAll("([*])"); + if (i - extras.fde_start < extras.fde_count - 1) try w.writeAll(", "); + } + try w.writeAll(" }"); } - try writer.writeAll(" }"); - } - }, - else => {}, - }; - if (!atom.alive) { - try writer.writeAll(" : [*]"); + }, + else => {}, + }; + if (!atom.alive) { + try w.writeAll(" : [*]"); + } } -} +}; pub const Index = u32; @@ -1189,7 +1169,7 @@ const x86_64 = struct { x86_64.relaxGotPcTlsDesc(code[r_offset - 3 ..], t) catch { var err = try diags.addErrorWithNotes(1); try err.addMsg("could not relax {s}", .{@tagName(r_type)}); - err.addNote("in {}:{s} at offset 0x{x}", .{ + err.addNote("in {f}:{s} at offset 0x{x}", .{ atom.file(elf_file).?.fmtPath(), atom.name(elf_file), rel.r_offset, @@ -1285,7 +1265,7 @@ const x86_64 = struct { }, t), else => return error.RelaxFailure, }; - relocs_log.debug(" relaxing {} => {}", .{ old_inst.encoding, inst.encoding }); + relocs_log.debug(" relaxing {f} => {f}", .{ old_inst.encoding, inst.encoding }); const nop: Instruction = try .new(.none, .nop, &.{}, t); try encode(&.{ nop, inst }, code); } @@ -1296,7 +1276,7 @@ const x86_64 = struct { switch (old_inst.encoding.mnemonic) { .mov => { const inst: Instruction = try .new(old_inst.prefix, .lea, &old_inst.ops, t); - relocs_log.debug(" relaxing {} => {}", .{ old_inst.encoding, inst.encoding }); + relocs_log.debug(" relaxing {f} => {f}", .{ old_inst.encoding, inst.encoding }); try encode(&.{inst}, code); }, else => return error.RelaxFailure, @@ -1330,11 +1310,11 @@ const x86_64 = struct { else => { var err = try diags.addErrorWithNotes(1); - try err.addMsg("TODO: rewrite {} when followed by {}", .{ + try err.addMsg("TODO: rewrite {f} when followed by {f}", .{ relocation.fmtRelocType(rels[0].r_type(), .x86_64), relocation.fmtRelocType(rels[1].r_type(), .x86_64), }); - err.addNote("in {}:{s} at offset 0x{x}", .{ + err.addNote("in {f}:{s} at offset 0x{x}", .{ self.file(elf_file).?.fmtPath(), self.name(elf_file), rels[0].r_offset, @@ -1386,11 +1366,11 @@ const x86_64 = struct { else => { var err = try diags.addErrorWithNotes(1); - try err.addMsg("TODO: rewrite {} when followed by {}", .{ + try err.addMsg("TODO: rewrite {f} when followed by {f}", .{ relocation.fmtRelocType(rels[0].r_type(), .x86_64), relocation.fmtRelocType(rels[1].r_type(), .x86_64), }); - err.addNote("in {}:{s} at offset 0x{x}", .{ + err.addNote("in {f}:{s} at offset 0x{x}", .{ self.file(elf_file).?.fmtPath(), self.name(elf_file), rels[0].r_offset, @@ -1410,7 +1390,8 @@ const x86_64 = struct { // TODO: hack to force imm32s in the assembler .{ .imm = .s(-129) }, }, t) catch return false; - inst.encode(std.io.null_writer, .{}) catch return false; + var trash: std.io.Writer.Discarding = .init(&.{}); + inst.encode(&trash.writer, .{}) catch return false; return true; }, else => return false, @@ -1427,7 +1408,7 @@ const x86_64 = struct { // TODO: hack to force imm32s in the assembler .{ .imm = .s(-129) }, }, t) catch unreachable; - relocs_log.debug(" relaxing {} => {}", .{ old_inst.encoding, inst.encoding }); + relocs_log.debug(" relaxing {f} => {f}", .{ old_inst.encoding, inst.encoding }); encode(&.{inst}, code) catch unreachable; }, else => unreachable, @@ -1444,7 +1425,7 @@ const x86_64 = struct { // TODO: hack to force imm32s in the assembler .{ .imm = .s(-129) }, }, target); - relocs_log.debug(" relaxing {} => {}", .{ old_inst.encoding, inst.encoding }); + relocs_log.debug(" relaxing {f} => {f}", .{ old_inst.encoding, inst.encoding }); try encode(&.{inst}, code); }, else => return error.RelaxFailure, @@ -1476,7 +1457,7 @@ const x86_64 = struct { std.mem.writeInt(i32, insts[12..][0..4], value, .little); try stream.seekBy(-4); try writer.writeAll(&insts); - relocs_log.debug(" relaxing {} and {}", .{ + relocs_log.debug(" relaxing {f} and {f}", .{ relocation.fmtRelocType(rels[0].r_type(), .x86_64), relocation.fmtRelocType(rels[1].r_type(), .x86_64), }); @@ -1484,11 +1465,11 @@ const x86_64 = struct { else => { var err = try diags.addErrorWithNotes(1); - try err.addMsg("fatal linker error: rewrite {} when followed by {}", .{ + try err.addMsg("fatal linker error: rewrite {f} when followed by {f}", .{ relocation.fmtRelocType(rels[0].r_type(), .x86_64), relocation.fmtRelocType(rels[1].r_type(), .x86_64), }); - err.addNote("in {}:{s} at offset 0x{x}", .{ + err.addNote("in {f}:{s} at offset 0x{x}", .{ self.file(elf_file).?.fmtPath(), self.name(elf_file), rels[0].r_offset, @@ -1505,11 +1486,8 @@ const x86_64 = struct { } fn encode(insts: []const Instruction, code: []u8) !void { - var stream = std.io.fixedBufferStream(code); - const writer = stream.writer(); - for (insts) |inst| { - try inst.encode(writer, .{}); - } + var stream: std.io.Writer = .fixed(code); + for (insts) |inst| try inst.encode(&stream, .{}); } const bits = @import("../../arch/x86_64/bits.zig"); @@ -1675,7 +1653,7 @@ const aarch64 = struct { // TODO: relax var err = try diags.addErrorWithNotes(1); try err.addMsg("TODO: relax ADR_GOT_PAGE", .{}); - err.addNote("in {}:{s} at offset 0x{x}", .{ + err.addNote("in {f}:{s} at offset 0x{x}", .{ atom.file(elf_file).?.fmtPath(), atom.name(elf_file), r_offset, @@ -1965,7 +1943,7 @@ const riscv = struct { // TODO: implement searching forward var err = try diags.addErrorWithNotes(1); try err.addMsg("TODO: find HI20 paired reloc scanning forward", .{}); - err.addNote("in {}:{s} at offset 0x{x}", .{ + err.addNote("in {f}:{s} at offset 0x{x}", .{ atom.file(elf_file).?.fmtPath(), atom.name(elf_file), rel.r_offset, diff --git a/src/link/Elf/AtomList.zig b/src/link/Elf/AtomList.zig index f8d57d04a1..513b0c372f 100644 --- a/src/link/Elf/AtomList.zig +++ b/src/link/Elf/AtomList.zig @@ -108,7 +108,7 @@ pub fn write(list: AtomList, buffer: *std.ArrayList(u8), undefs: anytype, elf_fi const off = math.cast(usize, atom_ptr.value - list.value) orelse return error.Overflow; const size = math.cast(usize, atom_ptr.size) orelse return error.Overflow; - log.debug(" atom({}) at 0x{x}", .{ ref, list.offset(elf_file) + off }); + log.debug(" atom({f}) at 0x{x}", .{ ref, list.offset(elf_file) + off }); const object = atom_ptr.file(elf_file).?.object; const code = try object.codeDecompressAlloc(elf_file, ref.index); @@ -144,7 +144,7 @@ pub fn writeRelocatable(list: AtomList, buffer: *std.ArrayList(u8), elf_file: *E const off = math.cast(usize, atom_ptr.value - list.value) orelse return error.Overflow; const size = math.cast(usize, atom_ptr.size) orelse return error.Overflow; - log.debug(" atom({}) at 0x{x}", .{ ref, list.offset(elf_file) + off }); + log.debug(" atom({f}) at 0x{x}", .{ ref, list.offset(elf_file) + off }); const object = atom_ptr.file(elf_file).?.object; const code = try object.codeDecompressAlloc(elf_file, ref.index); @@ -167,44 +167,29 @@ pub fn lastAtom(list: AtomList, elf_file: *Elf) *Atom { return elf_file.atom(list.atoms.keys()[list.atoms.keys().len - 1]).?; } -pub fn format( - list: AtomList, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = list; - _ = unused_fmt_string; - _ = options; - _ = writer; - @compileError("do not format AtomList directly"); -} +const Format = struct { + atom_list: AtomList, + elf_file: *Elf, -const FormatCtx = struct { AtomList, *Elf }; - -pub fn fmt(list: AtomList, elf_file: *Elf) std.fmt.Formatter(format2) { - return .{ .data = .{ list, elf_file } }; -} - -fn format2( - ctx: FormatCtx, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; - const list, const elf_file = ctx; - try writer.print("list : @{x} : shdr({d}) : align({x}) : size({x})", .{ - list.address(elf_file), list.output_section_index, - list.alignment.toByteUnits() orelse 0, list.size, - }); - try writer.writeAll(" : atoms{ "); - for (list.atoms.keys(), 0..) |ref, i| { - try writer.print("{}", .{ref}); - if (i < list.atoms.keys().len - 1) try writer.writeAll(", "); + fn default(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + const list = f.atom_list; + try writer.print("list : @{x} : shdr({d}) : align({x}) : size({x})", .{ + list.address(f.elf_file), + list.output_section_index, + list.alignment.toByteUnits() orelse 0, + list.size, + }); + try writer.writeAll(" : atoms{ "); + for (list.atoms.keys(), 0..) |ref, i| { + try writer.print("{f}", .{ref}); + if (i < list.atoms.keys().len - 1) try writer.writeAll(", "); + } + try writer.writeAll(" }"); } - try writer.writeAll(" }"); +}; + +pub fn fmt(atom_list: AtomList, elf_file: *Elf) std.fmt.Formatter(Format, Format.default) { + return .{ .data = .{ .atom_list = atom_list, .elf_file = elf_file } }; } const assert = std.debug.assert; diff --git a/src/link/Elf/LinkerDefined.zig b/src/link/Elf/LinkerDefined.zig index ad02d8d5d9..636fb7f4ab 100644 --- a/src/link/Elf/LinkerDefined.zig +++ b/src/link/Elf/LinkerDefined.zig @@ -147,9 +147,9 @@ pub fn initStartStopSymbols(self: *LinkerDefined, elf_file: *Elf) !void { for (slice.items(.shdr)) |shdr| { // TODO use getOrPut for incremental so that we don't create duplicates if (elf_file.getStartStopBasename(shdr)) |name| { - const start_name = try std.fmt.allocPrintZ(gpa, "__start_{s}", .{name}); + const start_name = try std.fmt.allocPrintSentinel(gpa, "__start_{s}", .{name}, 0); defer gpa.free(start_name); - const stop_name = try std.fmt.allocPrintZ(gpa, "__stop_{s}", .{name}); + const stop_name = try std.fmt.allocPrintSentinel(gpa, "__stop_{s}", .{name}, 0); defer gpa.free(stop_name); for (&[_][]const u8{ start_name, stop_name }) |nn| { @@ -437,38 +437,31 @@ pub fn setSymbolExtra(self: *LinkerDefined, index: u32, extra: Symbol.Extra) voi } } -pub fn fmtSymtab(self: *LinkerDefined, elf_file: *Elf) std.fmt.Formatter(formatSymtab) { +pub fn fmtSymtab(self: *LinkerDefined, elf_file: *Elf) std.fmt.Formatter(Format, Format.symtab) { return .{ .data = .{ .self = self, .elf_file = elf_file, } }; } -const FormatContext = struct { +const Format = struct { self: *LinkerDefined, elf_file: *Elf, -}; -fn formatSymtab( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; - const self = ctx.self; - const elf_file = ctx.elf_file; - try writer.writeAll(" globals\n"); - for (self.symbols.items, 0..) |sym, i| { - const ref = self.resolveSymbol(@intCast(i), elf_file); - if (elf_file.symbol(ref)) |ref_sym| { - try writer.print(" {}\n", .{ref_sym.fmt(elf_file)}); - } else { - try writer.print(" {s} : unclaimed\n", .{sym.name(elf_file)}); + fn symtab(ctx: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + const self = ctx.self; + const elf_file = ctx.elf_file; + try writer.writeAll(" globals\n"); + for (self.symbols.items, 0..) |sym, i| { + const ref = self.resolveSymbol(@intCast(i), elf_file); + if (elf_file.symbol(ref)) |ref_sym| { + try writer.print(" {f}\n", .{ref_sym.fmt(elf_file)}); + } else { + try writer.print(" {s} : unclaimed\n", .{sym.name(elf_file)}); + } } } -} +}; const assert = std.debug.assert; const elf = std.elf; diff --git a/src/link/Elf/Merge.zig b/src/link/Elf/Merge.zig index 33e4f9c5b2..71fb519354 100644 --- a/src/link/Elf/Merge.zig +++ b/src/link/Elf/Merge.zig @@ -157,54 +157,34 @@ pub const Section = struct { } }; - pub fn format( - msec: Section, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = msec; - _ = unused_fmt_string; - _ = options; - _ = writer; - @compileError("do not format directly"); - } - - pub fn fmt(msec: Section, elf_file: *Elf) std.fmt.Formatter(format2) { + pub fn fmt(msec: Section, elf_file: *Elf) std.fmt.Formatter(Format, Format.default) { return .{ .data = .{ .msec = msec, .elf_file = elf_file, } }; } - const FormatContext = struct { + const Format = struct { msec: Section, elf_file: *Elf, - }; - pub fn format2( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = options; - _ = unused_fmt_string; - const msec = ctx.msec; - const elf_file = ctx.elf_file; - try writer.print("{s} : @{x} : size({x}) : align({x}) : entsize({x}) : type({x}) : flags({x})\n", .{ - msec.name(elf_file), - msec.address(elf_file), - msec.size, - msec.alignment.toByteUnits() orelse 0, - msec.entsize, - msec.type, - msec.flags, - }); - for (msec.subsections.items) |msub| { - try writer.print(" {}\n", .{msub.fmt(elf_file)}); + pub fn default(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + const msec = f.msec; + const elf_file = f.elf_file; + try writer.print("{s} : @{x} : size({x}) : align({x}) : entsize({x}) : type({x}) : flags({x})\n", .{ + msec.name(elf_file), + msec.address(elf_file), + msec.size, + msec.alignment.toByteUnits() orelse 0, + msec.entsize, + msec.type, + msec.flags, + }); + for (msec.subsections.items) |msub| { + try writer.print(" {f}\n", .{msub.fmt(elf_file)}); + } } - } + }; pub const Index = u32; }; @@ -231,48 +211,28 @@ pub const Subsection = struct { return msec.bytes.items[msub.string_index..][0..msub.size]; } - pub fn format( - msub: Subsection, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = msub; - _ = unused_fmt_string; - _ = options; - _ = writer; - @compileError("do not format directly"); - } - - pub fn fmt(msub: Subsection, elf_file: *Elf) std.fmt.Formatter(format2) { + pub fn fmt(msub: Subsection, elf_file: *Elf) std.fmt.Formatter(Format, Format.default) { return .{ .data = .{ .msub = msub, .elf_file = elf_file, } }; } - const FormatContext = struct { + const Format = struct { msub: Subsection, elf_file: *Elf, - }; - pub fn format2( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = options; - _ = unused_fmt_string; - const msub = ctx.msub; - const elf_file = ctx.elf_file; - try writer.print("@{x} : align({x}) : size({x})", .{ - msub.address(elf_file), - msub.alignment, - msub.size, - }); - if (!msub.alive) try writer.writeAll(" : [*]"); - } + pub fn default(ctx: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + const msub = ctx.msub; + const elf_file = ctx.elf_file; + try writer.print("@{x} : align({x}) : size({x})", .{ + msub.address(elf_file), + msub.alignment, + msub.size, + }); + if (!msub.alive) try writer.writeAll(" : [*]"); + } + }; pub const Index = u32; }; diff --git a/src/link/Elf/Object.zig b/src/link/Elf/Object.zig index 4d5b5378c4..a0faade696 100644 --- a/src/link/Elf/Object.zig +++ b/src/link/Elf/Object.zig @@ -281,7 +281,7 @@ fn initAtoms( elf.SHT_GROUP => { if (shdr.sh_info >= self.symtab.items.len) { // TODO convert into an error - log.debug("{}: invalid symbol index in sh_info", .{self.fmtPath()}); + log.debug("{f}: invalid symbol index in sh_info", .{self.fmtPath()}); continue; } const group_info_sym = self.symtab.items[shdr.sh_info]; @@ -488,10 +488,7 @@ fn parseEhFrame( if (cie.offset == cie_ptr) break @as(u32, @intCast(cie_index)); } else { // TODO convert into an error - log.debug("{s}: no matching CIE found for FDE at offset {x}", .{ - self.fmtPath(), - fde.offset, - }); + log.debug("{f}: no matching CIE found for FDE at offset {x}", .{ self.fmtPath(), fde.offset }); continue; }; fde.cie_index = cie_index; @@ -582,7 +579,7 @@ pub fn scanRelocs(self: *Object, elf_file: *Elf, undefs: anytype) !void { if (sym.flags.import) { if (sym.type(elf_file) != elf.STT_FUNC) // TODO convert into an error - log.debug("{s}: {s}: CIE referencing external data reference", .{ + log.debug("{f}: {s}: CIE referencing external data reference", .{ self.fmtPath(), sym.name(elf_file), }); sym.flags.needs_plt = true; @@ -796,7 +793,7 @@ pub fn initInputMergeSections(self: *Object, elf_file: *Elf) !void { if (!isNull(data[end .. end + sh_entsize])) { var err = try diags.addErrorWithNotes(1); try err.addMsg("string not null terminated", .{}); - err.addNote("in {}:{s}", .{ self.fmtPath(), atom_ptr.name(elf_file) }); + err.addNote("in {f}:{s}", .{ self.fmtPath(), atom_ptr.name(elf_file) }); return error.LinkFailure; } end += sh_entsize; @@ -811,7 +808,7 @@ pub fn initInputMergeSections(self: *Object, elf_file: *Elf) !void { if (shdr.sh_size % sh_entsize != 0) { var err = try diags.addErrorWithNotes(1); try err.addMsg("size not a multiple of sh_entsize", .{}); - err.addNote("in {}:{s}", .{ self.fmtPath(), atom_ptr.name(elf_file) }); + err.addNote("in {f}:{s}", .{ self.fmtPath(), atom_ptr.name(elf_file) }); return error.LinkFailure; } @@ -889,7 +886,7 @@ pub fn resolveMergeSubsections(self: *Object, elf_file: *Elf) error{ var err = try diags.addErrorWithNotes(2); try err.addMsg("invalid symbol value: {x}", .{esym.st_value}); err.addNote("for symbol {s}", .{sym.name(elf_file)}); - err.addNote("in {}", .{self.fmtPath()}); + err.addNote("in {f}", .{self.fmtPath()}); return error.LinkFailure; }; @@ -914,7 +911,7 @@ pub fn resolveMergeSubsections(self: *Object, elf_file: *Elf) error{ const res = imsec.findSubsection(@intCast(@as(i64, @intCast(esym.st_value)) + rel.r_addend)) orelse { var err = try diags.addErrorWithNotes(1); try err.addMsg("invalid relocation at offset 0x{x}", .{rel.r_offset}); - err.addNote("in {}:{s}", .{ self.fmtPath(), atom_ptr.name(elf_file) }); + err.addNote("in {f}:{s}", .{ self.fmtPath(), atom_ptr.name(elf_file) }); return error.LinkFailure; }; @@ -1432,171 +1429,116 @@ pub fn group(self: *Object, index: Elf.Group.Index) *Elf.Group { return &self.groups.items[index]; } -pub fn format( - self: *Object, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = self; - _ = unused_fmt_string; - _ = options; - _ = writer; - @compileError("do not format objects directly"); -} - -pub fn fmtSymtab(self: *Object, elf_file: *Elf) std.fmt.Formatter(formatSymtab) { +pub fn fmtSymtab(self: *Object, elf_file: *Elf) std.fmt.Formatter(Format, Format.symtab) { return .{ .data = .{ .object = self, .elf_file = elf_file, } }; } -const FormatContext = struct { +const Format = struct { object: *Object, elf_file: *Elf, + + fn symtab(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + const object = f.object; + const elf_file = f.elf_file; + try writer.writeAll(" locals\n"); + for (object.locals()) |sym| { + try writer.print(" {f}\n", .{sym.fmt(elf_file)}); + } + try writer.writeAll(" globals\n"); + for (object.globals(), 0..) |sym, i| { + const first_global = object.first_global.?; + const ref = object.resolveSymbol(@intCast(i + first_global), elf_file); + if (elf_file.symbol(ref)) |ref_sym| { + try writer.print(" {f}\n", .{ref_sym.fmt(elf_file)}); + } else { + try writer.print(" {s} : unclaimed\n", .{sym.name(elf_file)}); + } + } + } + + fn atoms(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + const object = f.object; + try writer.writeAll(" atoms\n"); + for (object.atoms_indexes.items) |atom_index| { + const atom_ptr = object.atom(atom_index) orelse continue; + try writer.print(" {f}\n", .{atom_ptr.fmt(f.elf_file)}); + } + } + + fn cies(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + const object = f.object; + try writer.writeAll(" cies\n"); + for (object.cies.items, 0..) |cie, i| { + try writer.print(" cie({d}) : {f}\n", .{ i, cie.fmt(f.elf_file) }); + } + } + + fn fdes(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + const object = f.object; + try writer.writeAll(" fdes\n"); + for (object.fdes.items, 0..) |fde, i| { + try writer.print(" fde({d}) : {f}\n", .{ i, fde.fmt(f.elf_file) }); + } + } + + fn groups(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + const object = f.object; + const elf_file = f.elf_file; + try writer.writeAll(" groups\n"); + for (object.groups.items, 0..) |g, g_index| { + try writer.print(" {s}({d})", .{ if (g.is_comdat) "COMDAT" else "GROUP", g_index }); + if (!g.alive) try writer.writeAll(" : [*]"); + try writer.writeByte('\n'); + const g_members = g.members(elf_file); + for (g_members) |shndx| { + const atom_index = object.atoms_indexes.items[shndx]; + const atom_ptr = object.atom(atom_index) orelse continue; + try writer.print(" atom({d}) : {s}\n", .{ atom_index, atom_ptr.name(elf_file) }); + } + } + } }; -fn formatSymtab( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; - const object = ctx.object; - const elf_file = ctx.elf_file; - try writer.writeAll(" locals\n"); - for (object.locals()) |sym| { - try writer.print(" {}\n", .{sym.fmt(elf_file)}); - } - try writer.writeAll(" globals\n"); - for (object.globals(), 0..) |sym, i| { - const first_global = object.first_global.?; - const ref = object.resolveSymbol(@intCast(i + first_global), elf_file); - if (elf_file.symbol(ref)) |ref_sym| { - try writer.print(" {}\n", .{ref_sym.fmt(elf_file)}); - } else { - try writer.print(" {s} : unclaimed\n", .{sym.name(elf_file)}); - } - } -} - -pub fn fmtAtoms(self: *Object, elf_file: *Elf) std.fmt.Formatter(formatAtoms) { +pub fn fmtAtoms(self: *Object, elf_file: *Elf) std.fmt.Formatter(Format, Format.atoms) { return .{ .data = .{ .object = self, .elf_file = elf_file, } }; } -fn formatAtoms( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; - const object = ctx.object; - try writer.writeAll(" atoms\n"); - for (object.atoms_indexes.items) |atom_index| { - const atom_ptr = object.atom(atom_index) orelse continue; - try writer.print(" {}\n", .{atom_ptr.fmt(ctx.elf_file)}); - } -} - -pub fn fmtCies(self: *Object, elf_file: *Elf) std.fmt.Formatter(formatCies) { +pub fn fmtCies(self: *Object, elf_file: *Elf) std.fmt.Formatter(Format, Format.cies) { return .{ .data = .{ .object = self, .elf_file = elf_file, } }; } -fn formatCies( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; - const object = ctx.object; - try writer.writeAll(" cies\n"); - for (object.cies.items, 0..) |cie, i| { - try writer.print(" cie({d}) : {}\n", .{ i, cie.fmt(ctx.elf_file) }); - } -} - -pub fn fmtFdes(self: *Object, elf_file: *Elf) std.fmt.Formatter(formatFdes) { +pub fn fmtFdes(self: *Object, elf_file: *Elf) std.fmt.Formatter(Format, Format.fdes) { return .{ .data = .{ .object = self, .elf_file = elf_file, } }; } -fn formatFdes( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; - const object = ctx.object; - try writer.writeAll(" fdes\n"); - for (object.fdes.items, 0..) |fde, i| { - try writer.print(" fde({d}) : {}\n", .{ i, fde.fmt(ctx.elf_file) }); - } -} - -pub fn fmtGroups(self: *Object, elf_file: *Elf) std.fmt.Formatter(formatGroups) { +pub fn fmtGroups(self: *Object, elf_file: *Elf) std.fmt.Formatter(Format, Format.groups) { return .{ .data = .{ .object = self, .elf_file = elf_file, } }; } -fn formatGroups( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; - const object = ctx.object; - const elf_file = ctx.elf_file; - try writer.writeAll(" groups\n"); - for (object.groups.items, 0..) |g, g_index| { - try writer.print(" {s}({d})", .{ if (g.is_comdat) "COMDAT" else "GROUP", g_index }); - if (!g.alive) try writer.writeAll(" : [*]"); - try writer.writeByte('\n'); - const g_members = g.members(elf_file); - for (g_members) |shndx| { - const atom_index = object.atoms_indexes.items[shndx]; - const atom_ptr = object.atom(atom_index) orelse continue; - try writer.print(" atom({d}) : {s}\n", .{ atom_index, atom_ptr.name(elf_file) }); - } - } -} - -pub fn fmtPath(self: Object) std.fmt.Formatter(formatPath) { +pub fn fmtPath(self: Object) std.fmt.Formatter(Object, formatPath) { return .{ .data = self }; } -fn formatPath( - object: Object, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; +fn formatPath(object: Object, writer: *std.io.Writer) std.io.Writer.Error!void { if (object.archive) |ar| { - try writer.print("{}({})", .{ ar.path, object.path }); + try writer.print("{f}({f})", .{ ar.path, object.path }); } else { - try writer.print("{}", .{object.path}); + try writer.print("{f}", .{object.path}); } } diff --git a/src/link/Elf/SharedObject.zig b/src/link/Elf/SharedObject.zig index 30def4429b..b5c8773f2c 100644 --- a/src/link/Elf/SharedObject.zig +++ b/src/link/Elf/SharedObject.zig @@ -509,51 +509,31 @@ pub fn setSymbolExtra(self: *SharedObject, index: u32, extra: Symbol.Extra) void } } -pub fn format( - self: SharedObject, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = self; - _ = unused_fmt_string; - _ = options; - _ = writer; - @compileError("unreachable"); -} - -pub fn fmtSymtab(self: SharedObject, elf_file: *Elf) std.fmt.Formatter(formatSymtab) { +pub fn fmtSymtab(self: SharedObject, elf_file: *Elf) std.fmt.Formatter(Format, Format.symtab) { return .{ .data = .{ .shared = self, .elf_file = elf_file, } }; } -const FormatContext = struct { +const Format = struct { shared: SharedObject, elf_file: *Elf, -}; -fn formatSymtab( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; - const shared = ctx.shared; - const elf_file = ctx.elf_file; - try writer.writeAll(" globals\n"); - for (shared.symbols.items, 0..) |sym, i| { - const ref = shared.resolveSymbol(@intCast(i), elf_file); - if (elf_file.symbol(ref)) |ref_sym| { - try writer.print(" {}\n", .{ref_sym.fmt(elf_file)}); - } else { - try writer.print(" {s} : unclaimed\n", .{sym.name(elf_file)}); + fn symtab(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + const shared = f.shared; + const elf_file = f.elf_file; + try writer.writeAll(" globals\n"); + for (shared.symbols.items, 0..) |sym, i| { + const ref = shared.resolveSymbol(@intCast(i), elf_file); + if (elf_file.symbol(ref)) |ref_sym| { + try writer.print(" {f}\n", .{ref_sym.fmt(elf_file)}); + } else { + try writer.print(" {s} : unclaimed\n", .{sym.name(elf_file)}); + } } } -} +}; const SharedObject = @This(); diff --git a/src/link/Elf/Symbol.zig b/src/link/Elf/Symbol.zig index 843c23dca4..e7a507298d 100644 --- a/src/link/Elf/Symbol.zig +++ b/src/link/Elf/Symbol.zig @@ -316,99 +316,72 @@ pub fn setOutputSym(symbol: Symbol, elf_file: *Elf, out: *elf.Elf64_Sym) void { out.st_size = esym.st_size; } -pub fn format( - symbol: Symbol, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = symbol; - _ = unused_fmt_string; - _ = options; - _ = writer; - @compileError("do not format Symbol directly"); -} - -const FormatContext = struct { +const Format = struct { symbol: Symbol, elf_file: *Elf, + + fn name(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + const elf_file = f.elf_file; + const symbol = f.symbol; + try writer.writeAll(symbol.name(elf_file)); + switch (symbol.version_index.VERSION) { + @intFromEnum(elf.VER_NDX.LOCAL), @intFromEnum(elf.VER_NDX.GLOBAL) => {}, + else => { + const file_ptr = symbol.file(elf_file).?; + assert(file_ptr == .shared_object); + const shared_object = file_ptr.shared_object; + try writer.print("@{s}", .{shared_object.versionString(symbol.version_index)}); + }, + } + } + + fn default(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + const symbol = f.symbol; + const elf_file = f.elf_file; + try writer.print("%{d} : {f} : @{x}", .{ + symbol.esym_index, + symbol.fmtName(elf_file), + symbol.address(.{ .plt = false, .trampoline = false }, elf_file), + }); + if (symbol.file(elf_file)) |file_ptr| { + if (symbol.isAbs(elf_file)) { + if (symbol.elfSym(elf_file).st_shndx == elf.SHN_UNDEF) { + try writer.writeAll(" : undef"); + } else { + try writer.writeAll(" : absolute"); + } + } else if (symbol.outputShndx(elf_file)) |shndx| { + try writer.print(" : shdr({d})", .{shndx}); + } + if (symbol.atom(elf_file)) |atom_ptr| { + try writer.print(" : atom({d})", .{atom_ptr.atom_index}); + } + var buf: [2]u8 = .{'_'} ** 2; + if (symbol.flags.@"export") buf[0] = 'E'; + if (symbol.flags.import) buf[1] = 'I'; + try writer.print(" : {s}", .{&buf}); + if (symbol.flags.weak) try writer.writeAll(" : weak"); + switch (file_ptr) { + inline else => |x| try writer.print(" : {s}({d})", .{ @tagName(file_ptr), x.index }), + } + } else try writer.writeAll(" : unresolved"); + } }; -pub fn fmtName(symbol: Symbol, elf_file: *Elf) std.fmt.Formatter(formatName) { +pub fn fmtName(symbol: Symbol, elf_file: *Elf) std.fmt.Formatter(Format, Format.name) { return .{ .data = .{ .symbol = symbol, .elf_file = elf_file, } }; } -fn formatName( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = options; - _ = unused_fmt_string; - const elf_file = ctx.elf_file; - const symbol = ctx.symbol; - try writer.writeAll(symbol.name(elf_file)); - switch (symbol.version_index.VERSION) { - @intFromEnum(elf.VER_NDX.LOCAL), @intFromEnum(elf.VER_NDX.GLOBAL) => {}, - else => { - const file_ptr = symbol.file(elf_file).?; - assert(file_ptr == .shared_object); - const shared_object = file_ptr.shared_object; - try writer.print("@{s}", .{shared_object.versionString(symbol.version_index)}); - }, - } -} - -pub fn fmt(symbol: Symbol, elf_file: *Elf) std.fmt.Formatter(format2) { +pub fn fmt(symbol: Symbol, elf_file: *Elf) std.fmt.Formatter(Format, Format.default) { return .{ .data = .{ .symbol = symbol, .elf_file = elf_file, } }; } -fn format2( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = options; - _ = unused_fmt_string; - const symbol = ctx.symbol; - const elf_file = ctx.elf_file; - try writer.print("%{d} : {s} : @{x}", .{ - symbol.esym_index, - symbol.fmtName(elf_file), - symbol.address(.{ .plt = false, .trampoline = false }, elf_file), - }); - if (symbol.file(elf_file)) |file_ptr| { - if (symbol.isAbs(elf_file)) { - if (symbol.elfSym(elf_file).st_shndx == elf.SHN_UNDEF) { - try writer.writeAll(" : undef"); - } else { - try writer.writeAll(" : absolute"); - } - } else if (symbol.outputShndx(elf_file)) |shndx| { - try writer.print(" : shdr({d})", .{shndx}); - } - if (symbol.atom(elf_file)) |atom_ptr| { - try writer.print(" : atom({d})", .{atom_ptr.atom_index}); - } - var buf: [2]u8 = .{'_'} ** 2; - if (symbol.flags.@"export") buf[0] = 'E'; - if (symbol.flags.import) buf[1] = 'I'; - try writer.print(" : {s}", .{&buf}); - if (symbol.flags.weak) try writer.writeAll(" : weak"); - switch (file_ptr) { - inline else => |x| try writer.print(" : {s}({d})", .{ @tagName(file_ptr), x.index }), - } - } else try writer.writeAll(" : unresolved"); -} - pub const Flags = packed struct { /// Whether the symbol is imported at runtime. import: bool = false, diff --git a/src/link/Elf/Thunk.zig b/src/link/Elf/Thunk.zig index 23dc2f3b0b..2af0c9c9d3 100644 --- a/src/link/Elf/Thunk.zig +++ b/src/link/Elf/Thunk.zig @@ -65,47 +65,27 @@ fn trampolineSize(cpu_arch: std.Target.Cpu.Arch) usize { }; } -pub fn format( - thunk: Thunk, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = thunk; - _ = unused_fmt_string; - _ = options; - _ = writer; - @compileError("do not format Thunk directly"); -} - -pub fn fmt(thunk: Thunk, elf_file: *Elf) std.fmt.Formatter(format2) { +pub fn fmt(thunk: Thunk, elf_file: *Elf) std.fmt.Formatter(Format, Format.default) { return .{ .data = .{ .thunk = thunk, .elf_file = elf_file, } }; } -const FormatContext = struct { +const Format = struct { thunk: Thunk, elf_file: *Elf, -}; -fn format2( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = options; - _ = unused_fmt_string; - const thunk = ctx.thunk; - const elf_file = ctx.elf_file; - try writer.print("@{x} : size({x})\n", .{ thunk.value, thunk.size(elf_file) }); - for (thunk.symbols.keys()) |ref| { - const sym = elf_file.symbol(ref).?; - try writer.print(" {} : {s} : @{x}\n", .{ ref, sym.name(elf_file), sym.value }); + fn default(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + const thunk = f.thunk; + const elf_file = f.elf_file; + try writer.print("@{x} : size({x})\n", .{ thunk.value, thunk.size(elf_file) }); + for (thunk.symbols.keys()) |ref| { + const sym = elf_file.symbol(ref).?; + try writer.print(" {f} : {s} : @{x}\n", .{ ref, sym.name(elf_file), sym.value }); + } } -} +}; pub const Index = u32; diff --git a/src/link/Elf/ZigObject.zig b/src/link/Elf/ZigObject.zig index f4b340a549..c8f37125ed 100644 --- a/src/link/Elf/ZigObject.zig +++ b/src/link/Elf/ZigObject.zig @@ -803,9 +803,9 @@ pub fn initRelaSections(self: *ZigObject, elf_file: *Elf) !void { const out_shndx = atom_ptr.output_section_index; const out_shdr = elf_file.sections.items(.shdr)[out_shndx]; if (out_shdr.sh_type == elf.SHT_NOBITS) continue; - const rela_sect_name = try std.fmt.allocPrintZ(gpa, ".rela{s}", .{ + const rela_sect_name = try std.fmt.allocPrintSentinel(gpa, ".rela{s}", .{ elf_file.getShString(out_shdr.sh_name), - }); + }, 0); defer gpa.free(rela_sect_name); _ = elf_file.sectionByName(rela_sect_name) orelse try elf_file.addRelaShdr(try elf_file.insertShString(rela_sect_name), out_shndx); @@ -824,9 +824,9 @@ pub fn addAtomsToRelaSections(self: *ZigObject, elf_file: *Elf) !void { const out_shndx = atom_ptr.output_section_index; const out_shdr = elf_file.sections.items(.shdr)[out_shndx]; if (out_shdr.sh_type == elf.SHT_NOBITS) continue; - const rela_sect_name = try std.fmt.allocPrintZ(gpa, ".rela{s}", .{ + const rela_sect_name = try std.fmt.allocPrintSentinel(gpa, ".rela{s}", .{ elf_file.getShString(out_shdr.sh_name), - }); + }, 0); defer gpa.free(rela_sect_name); const out_rela_shndx = elf_file.sectionByName(rela_sect_name).?; const out_rela_shdr = &elf_file.sections.items(.shdr)[out_rela_shndx]; @@ -925,7 +925,7 @@ pub fn getNavVAddr( const zcu = pt.zcu; const ip = &zcu.intern_pool; const nav = ip.getNav(nav_index); - log.debug("getNavVAddr {}({d})", .{ nav.fqn.fmt(ip), nav_index }); + log.debug("getNavVAddr {f}({d})", .{ nav.fqn.fmt(ip), nav_index }); const this_sym_index = if (nav.getExtern(ip)) |@"extern"| try self.getGlobalSymbol( elf_file, nav.name.toSlice(ip), @@ -1268,7 +1268,7 @@ fn updateNavCode( const ip = &zcu.intern_pool; const nav = ip.getNav(nav_index); - log.debug("updateNavCode {}({d})", .{ nav.fqn.fmt(ip), nav_index }); + log.debug("updateNavCode {f}({d})", .{ nav.fqn.fmt(ip), nav_index }); const target = &zcu.navFileScope(nav_index).mod.?.resolved_target.result; const required_alignment = switch (pt.navAlignment(nav_index)) { @@ -1302,7 +1302,7 @@ fn updateNavCode( self.allocateAtom(atom_ptr, true, elf_file) catch |err| return elf_file.base.cgFail(nav_index, "failed to allocate atom: {s}", .{@errorName(err)}); - log.debug("growing {} from 0x{x} to 0x{x}", .{ nav.fqn.fmt(ip), old_vaddr, atom_ptr.value }); + log.debug("growing {f} from 0x{x} to 0x{x}", .{ nav.fqn.fmt(ip), old_vaddr, atom_ptr.value }); if (old_vaddr != atom_ptr.value) { sym.value = 0; esym.st_value = 0; @@ -1347,7 +1347,7 @@ fn updateNavCode( const file_offset = atom_ptr.offset(elf_file); elf_file.base.file.?.pwriteAll(code, file_offset) catch |err| return elf_file.base.cgFail(nav_index, "failed to write to output file: {s}", .{@errorName(err)}); - log.debug("writing {} from 0x{x} to 0x{x}", .{ nav.fqn.fmt(ip), file_offset, file_offset + code.len }); + log.debug("writing {f} from 0x{x} to 0x{x}", .{ nav.fqn.fmt(ip), file_offset, file_offset + code.len }); } } @@ -1365,7 +1365,7 @@ fn updateTlv( const gpa = zcu.gpa; const nav = ip.getNav(nav_index); - log.debug("updateTlv {}({d})", .{ nav.fqn.fmt(ip), nav_index }); + log.debug("updateTlv {f}({d})", .{ nav.fqn.fmt(ip), nav_index }); const required_alignment = pt.navAlignment(nav_index); @@ -1424,7 +1424,7 @@ pub fn updateFunc( const gpa = elf_file.base.comp.gpa; const func = zcu.funcInfo(func_index); - log.debug("updateFunc {}({d})", .{ ip.getNav(func.owner_nav).fqn.fmt(ip), func.owner_nav }); + log.debug("updateFunc {f}({d})", .{ ip.getNav(func.owner_nav).fqn.fmt(ip), func.owner_nav }); const sym_index = try self.getOrCreateMetadataForNav(zcu, func.owner_nav); self.atom(self.symbol(sym_index).ref.index).?.freeRelocs(self); @@ -1447,7 +1447,7 @@ pub fn updateFunc( const code = code_buffer.items; const shndx = try self.getNavShdrIndex(elf_file, zcu, func.owner_nav, sym_index, code); - log.debug("setting shdr({x},{s}) for {}", .{ + log.debug("setting shdr({x},{s}) for {f}", .{ shndx, elf_file.getShString(elf_file.sections.items(.shdr)[shndx].sh_name), ip.getNav(func.owner_nav).fqn.fmt(ip), @@ -1529,7 +1529,7 @@ pub fn updateNav( const ip = &zcu.intern_pool; const nav = ip.getNav(nav_index); - log.debug("updateNav {}({d})", .{ nav.fqn.fmt(ip), nav_index }); + log.debug("updateNav {f}({d})", .{ nav.fqn.fmt(ip), nav_index }); const nav_init = switch (ip.indexToKey(nav.status.fully_resolved.val)) { .func => .none, @@ -1576,7 +1576,7 @@ pub fn updateNav( const code = code_buffer.items; const shndx = try self.getNavShdrIndex(elf_file, zcu, nav_index, sym_index, code); - log.debug("setting shdr({x},{s}) for {}", .{ + log.debug("setting shdr({x},{s}) for {f}", .{ shndx, elf_file.getShString(elf_file.sections.items(.shdr)[shndx].sh_name), nav.fqn.fmt(ip), @@ -1622,7 +1622,7 @@ fn updateLazySymbol( defer code_buffer.deinit(gpa); const name_str_index = blk: { - const name = try std.fmt.allocPrint(gpa, "__lazy_{s}_{}", .{ + const name = try std.fmt.allocPrint(gpa, "__lazy_{s}_{f}", .{ @tagName(sym.kind), Type.fromInterned(sym.ty).fmt(pt), }); @@ -1941,7 +1941,7 @@ pub fn allocateAtom(self: *ZigObject, atom_ptr: *Atom, requires_padding: bool, e .requires_padding = requires_padding, }); atom_ptr.value = @intCast(alloc_res.value); - log.debug("allocated {s} at {x}\n placement {?}", .{ + log.debug("allocated {s} at {x}\n placement {f}", .{ atom_ptr.name(elf_file), atom_ptr.offset(elf_file), alloc_res.placement, @@ -1986,7 +1986,7 @@ pub fn allocateAtom(self: *ZigObject, atom_ptr: *Atom, requires_padding: bool, e atom_ptr.next_atom_ref = .{ .index = 0, .file = 0 }; } - log.debug(" prev {?}, next {?}", .{ atom_ptr.prev_atom_ref, atom_ptr.next_atom_ref }); + log.debug(" prev {f}, next {f}", .{ atom_ptr.prev_atom_ref, atom_ptr.next_atom_ref }); } pub fn resetShdrIndexes(self: *ZigObject, backlinks: []const u32) void { @@ -2195,60 +2195,46 @@ pub fn setSymbolExtra(self: *ZigObject, index: u32, extra: Symbol.Extra) void { } } -pub fn fmtSymtab(self: *ZigObject, elf_file: *Elf) std.fmt.Formatter(formatSymtab) { - return .{ .data = .{ - .self = self, - .elf_file = elf_file, - } }; -} - -const FormatContext = struct { +const Format = struct { self: *ZigObject, elf_file: *Elf, + + fn symtab(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + const self = f.self; + const elf_file = f.elf_file; + try writer.writeAll(" locals\n"); + for (self.local_symbols.items) |index| { + const local = self.symbols.items[index]; + try writer.print(" {f}\n", .{local.fmt(elf_file)}); + } + try writer.writeAll(" globals\n"); + for (f.self.global_symbols.items) |index| { + const global = self.symbols.items[index]; + try writer.print(" {f}\n", .{global.fmt(elf_file)}); + } + } + + fn atoms(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + try writer.writeAll(" atoms\n"); + for (f.self.atoms_indexes.items) |atom_index| { + const atom_ptr = f.self.atom(atom_index) orelse continue; + try writer.print(" {f}\n", .{atom_ptr.fmt(f.elf_file)}); + } + } }; -fn formatSymtab( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; - const self = ctx.self; - const elf_file = ctx.elf_file; - try writer.writeAll(" locals\n"); - for (self.local_symbols.items) |index| { - const local = self.symbols.items[index]; - try writer.print(" {}\n", .{local.fmt(elf_file)}); - } - try writer.writeAll(" globals\n"); - for (ctx.self.global_symbols.items) |index| { - const global = self.symbols.items[index]; - try writer.print(" {}\n", .{global.fmt(elf_file)}); - } -} - -pub fn fmtAtoms(self: *ZigObject, elf_file: *Elf) std.fmt.Formatter(formatAtoms) { +pub fn fmtSymtab(self: *ZigObject, elf_file: *Elf) std.fmt.Formatter(Format, Format.symtab) { return .{ .data = .{ .self = self, .elf_file = elf_file, } }; } -fn formatAtoms( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; - try writer.writeAll(" atoms\n"); - for (ctx.self.atoms_indexes.items) |atom_index| { - const atom_ptr = ctx.self.atom(atom_index) orelse continue; - try writer.print(" {}\n", .{atom_ptr.fmt(ctx.elf_file)}); - } +pub fn fmtAtoms(self: *ZigObject, elf_file: *Elf) std.fmt.Formatter(Format, Format.atoms) { + return .{ .data = .{ + .self = self, + .elf_file = elf_file, + } }; } const ElfSym = struct { @@ -2285,7 +2271,7 @@ fn checkNavAllocated(pt: Zcu.PerThread, index: InternPool.Nav.Index, meta: AvMet const zcu = pt.zcu; const ip = &zcu.intern_pool; const nav = ip.getNav(index); - log.err("NAV {}({d}) assigned symbol {d} but not allocated!", .{ + log.err("NAV {f}({d}) assigned symbol {d} but not allocated!", .{ nav.fqn.fmt(ip), index, meta.symbol_index, @@ -2298,7 +2284,7 @@ fn checkUavAllocated(pt: Zcu.PerThread, index: InternPool.Index, meta: AvMetadat const zcu = pt.zcu; const uav = Value.fromInterned(index); const ty = uav.typeOf(zcu); - log.err("UAV {}({d}) assigned symbol {d} but not allocated!", .{ + log.err("UAV {f}({d}) assigned symbol {d} but not allocated!", .{ ty.fmt(pt), index, meta.symbol_index, diff --git a/src/link/Elf/eh_frame.zig b/src/link/Elf/eh_frame.zig index bf46fb0262..98284d7840 100644 --- a/src/link/Elf/eh_frame.zig +++ b/src/link/Elf/eh_frame.zig @@ -47,52 +47,32 @@ pub const Fde = struct { return object.relocs.items[fde.rel_index..][0..fde.rel_num]; } - pub fn format( - fde: Fde, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = fde; - _ = unused_fmt_string; - _ = options; - _ = writer; - @compileError("do not format FDEs directly"); - } - - pub fn fmt(fde: Fde, elf_file: *Elf) std.fmt.Formatter(format2) { + pub fn fmt(fde: Fde, elf_file: *Elf) std.fmt.Formatter(Format, Format.default) { return .{ .data = .{ .fde = fde, .elf_file = elf_file, } }; } - const FdeFormatContext = struct { + const Format = struct { fde: Fde, elf_file: *Elf, - }; - fn format2( - ctx: FdeFormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = unused_fmt_string; - _ = options; - const fde = ctx.fde; - const elf_file = ctx.elf_file; - const base_addr = fde.address(elf_file); - const object = elf_file.file(fde.file_index).?.object; - const atom_name = fde.atom(object).name(elf_file); - try writer.print("@{x} : size({x}) : cie({d}) : {s}", .{ - base_addr + fde.out_offset, - fde.calcSize(), - fde.cie_index, - atom_name, - }); - if (!fde.alive) try writer.writeAll(" : [*]"); - } + fn default(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + const fde = f.fde; + const elf_file = f.elf_file; + const base_addr = fde.address(elf_file); + const object = elf_file.file(fde.file_index).?.object; + const atom_name = fde.atom(object).name(elf_file); + try writer.print("@{x} : size({x}) : cie({d}) : {s}", .{ + base_addr + fde.out_offset, + fde.calcSize(), + fde.cie_index, + atom_name, + }); + if (!fde.alive) try writer.writeAll(" : [*]"); + } + }; }; pub const Cie = struct { @@ -150,48 +130,28 @@ pub const Cie = struct { return true; } - pub fn format( - cie: Cie, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = cie; - _ = unused_fmt_string; - _ = options; - _ = writer; - @compileError("do not format CIEs directly"); - } - - pub fn fmt(cie: Cie, elf_file: *Elf) std.fmt.Formatter(format2) { + pub fn fmt(cie: Cie, elf_file: *Elf) std.fmt.Formatter(Format, Format.default) { return .{ .data = .{ .cie = cie, .elf_file = elf_file, } }; } - const CieFormatContext = struct { + const Format = struct { cie: Cie, elf_file: *Elf, - }; - fn format2( - ctx: CieFormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = unused_fmt_string; - _ = options; - const cie = ctx.cie; - const elf_file = ctx.elf_file; - const base_addr = cie.address(elf_file); - try writer.print("@{x} : size({x})", .{ - base_addr + cie.out_offset, - cie.calcSize(), - }); - if (!cie.alive) try writer.writeAll(" : [*]"); - } + fn default(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + const cie = f.cie; + const elf_file = f.elf_file; + const base_addr = cie.address(elf_file); + try writer.print("@{x} : size({x})", .{ + base_addr + cie.out_offset, + cie.calcSize(), + }); + if (!cie.alive) try writer.writeAll(" : [*]"); + } + }; }; pub const Iterator = struct { @@ -316,7 +276,7 @@ fn resolveReloc(rec: anytype, sym: *const Symbol, rel: elf.Elf64_Rela, elf_file: const S = math.cast(i64, sym.address(.{}, elf_file)) orelse return error.Overflow; const A = rel.r_addend; - relocs_log.debug(" {s}: {x}: [{x} => {x}] ({s})", .{ + relocs_log.debug(" {f}: {x}: [{x} => {x}] ({s})", .{ relocation.fmtRelocType(rel.r_type(), cpu_arch), offset, P, @@ -438,7 +398,7 @@ fn emitReloc(elf_file: *Elf, r_offset: u64, sym: *const Symbol, rel: elf.Elf64_R }, } - relocs_log.debug(" {s}: [{x} => {d}({s})] + {x}", .{ + relocs_log.debug(" {f}: [{x} => {d}({s})] + {x}", .{ relocation.fmtRelocType(r_type, cpu_arch), r_offset, r_sym, @@ -607,11 +567,11 @@ const riscv = struct { fn reportInvalidReloc(rec: anytype, elf_file: *Elf, rel: elf.Elf64_Rela) !void { const diags = &elf_file.base.comp.link_diags; var err = try diags.addErrorWithNotes(1); - try err.addMsg("invalid relocation type {} at offset 0x{x}", .{ + try err.addMsg("invalid relocation type {f} at offset 0x{x}", .{ relocation.fmtRelocType(rel.r_type(), elf_file.getTarget().cpu.arch), rel.r_offset, }); - err.addNote("in {}:.eh_frame", .{elf_file.file(rec.file_index).?.fmtPath()}); + err.addNote("in {f}:.eh_frame", .{elf_file.file(rec.file_index).?.fmtPath()}); return error.RelocFailure; } diff --git a/src/link/Elf/file.zig b/src/link/Elf/file.zig index 7292f8ca5d..e60a41e2c7 100644 --- a/src/link/Elf/file.zig +++ b/src/link/Elf/file.zig @@ -10,23 +10,16 @@ pub const File = union(enum) { }; } - pub fn fmtPath(file: File) std.fmt.Formatter(formatPath) { + pub fn fmtPath(file: File) std.fmt.Formatter(File, formatPath) { return .{ .data = file }; } - fn formatPath( - file: File, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = unused_fmt_string; - _ = options; + fn formatPath(file: File, writer: *std.io.Writer) std.io.Writer.Error!void { switch (file) { .zig_object => |zo| try writer.writeAll(zo.basename), .linker_defined => try writer.writeAll("(linker defined)"), - .object => |x| try writer.print("{}", .{x.fmtPath()}), - .shared_object => |x| try writer.print("{}", .{@as(Path, x.path)}), + .object => |x| try writer.print("{f}", .{x.fmtPath()}), + .shared_object => |x| try writer.print("{f}", .{@as(Path, x.path)}), } } diff --git a/src/link/Elf/gc.zig b/src/link/Elf/gc.zig index ff6c0bb7ce..61eb2098da 100644 --- a/src/link/Elf/gc.zig +++ b/src/link/Elf/gc.zig @@ -111,7 +111,7 @@ fn markLive(atom: *Atom, elf_file: *Elf) void { const target_sym = elf_file.symbol(ref) orelse continue; const target_atom = target_sym.atom(elf_file) orelse continue; target_atom.alive = true; - gc_track_live_log.debug("{}marking live atom({d})", .{ track_live_level, target_atom.atom_index }); + gc_track_live_log.debug("{f}marking live atom({d})", .{ track_live_level, target_atom.atom_index }); if (markAtom(target_atom)) markLive(target_atom, elf_file); } } @@ -128,7 +128,7 @@ fn markLive(atom: *Atom, elf_file: *Elf) void { } const target_atom = target_sym.atom(elf_file) orelse continue; target_atom.alive = true; - gc_track_live_log.debug("{}marking live atom({d})", .{ track_live_level, target_atom.atom_index }); + gc_track_live_log.debug("{f}marking live atom({d})", .{ track_live_level, target_atom.atom_index }); if (markAtom(target_atom)) markLive(target_atom, elf_file); } } @@ -163,14 +163,14 @@ fn prune(elf_file: *Elf) void { } pub fn dumpPrunedAtoms(elf_file: *Elf) !void { - const stderr = std.io.getStdErr().writer(); + const stderr = std.fs.File.stderr().deprecatedWriter(); for (elf_file.objects.items) |index| { const file = elf_file.file(index).?; for (file.atoms()) |atom_index| { const atom = file.atom(atom_index) orelse continue; if (!atom.alive) // TODO should we simply print to stderr? - try stderr.print("link: removing unused section '{s}' in file '{}'\n", .{ + try stderr.print("link: removing unused section '{s}' in file '{f}'\n", .{ atom.name(elf_file), atom.file(elf_file).?.fmtPath(), }); @@ -185,15 +185,8 @@ const Level = struct { self.value += 1; } - pub fn format( - self: *const @This(), - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = unused_fmt_string; - _ = options; - try writer.writeByteNTimes(' ', self.value); + pub fn format(self: *const @This(), w: *std.io.Writer) std.io.Writer.Error!void { + try w.splatByteAll(' ', self.value); } }; diff --git a/src/link/Elf/relocatable.zig b/src/link/Elf/relocatable.zig index 6541364f92..7599e82908 100644 --- a/src/link/Elf/relocatable.zig +++ b/src/link/Elf/relocatable.zig @@ -31,7 +31,7 @@ pub fn flushStaticLib(elf_file: *Elf, comp: *Compilation) !void { try elf_file.allocateNonAllocSections(); if (build_options.enable_logging) { - state_log.debug("{}", .{elf_file.dumpState()}); + state_log.debug("{f}", .{elf_file.dumpState()}); } try elf_file.writeMergeSections(); @@ -96,8 +96,8 @@ pub fn flushStaticLib(elf_file: *Elf, comp: *Compilation) !void { }; if (build_options.enable_logging) { - state_log.debug("ar_symtab\n{}\n", .{ar_symtab.fmt(elf_file)}); - state_log.debug("ar_strtab\n{}\n", .{ar_strtab}); + state_log.debug("ar_symtab\n{f}\n", .{ar_symtab.fmt(elf_file)}); + state_log.debug("ar_strtab\n{f}\n", .{ar_strtab}); } var buffer = std.ArrayList(u8).init(gpa); @@ -170,7 +170,7 @@ pub fn flushObject(elf_file: *Elf, comp: *Compilation) !void { try elf_file.allocateNonAllocSections(); if (build_options.enable_logging) { - state_log.debug("{}", .{elf_file.dumpState()}); + state_log.debug("{f}", .{elf_file.dumpState()}); } try writeAtoms(elf_file); diff --git a/src/link/Elf/relocation.zig b/src/link/Elf/relocation.zig index 047312cd68..305dcda789 100644 --- a/src/link/Elf/relocation.zig +++ b/src/link/Elf/relocation.zig @@ -141,21 +141,14 @@ const FormatRelocTypeCtx = struct { cpu_arch: std.Target.Cpu.Arch, }; -pub fn fmtRelocType(r_type: u32, cpu_arch: std.Target.Cpu.Arch) std.fmt.Formatter(formatRelocType) { +pub fn fmtRelocType(r_type: u32, cpu_arch: std.Target.Cpu.Arch) std.fmt.Formatter(FormatRelocTypeCtx, formatRelocType) { return .{ .data = .{ .r_type = r_type, .cpu_arch = cpu_arch, } }; } -fn formatRelocType( - ctx: FormatRelocTypeCtx, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; +fn formatRelocType(ctx: FormatRelocTypeCtx, writer: *std.io.Writer) std.io.Writer.Error!void { const r_type = ctx.r_type; switch (ctx.cpu_arch) { .x86_64 => try writer.print("R_X86_64_{s}", .{@tagName(@as(elf.R_X86_64, @enumFromInt(r_type)))}), diff --git a/src/link/Elf/synthetic_sections.zig b/src/link/Elf/synthetic_sections.zig index aca0c17d0c..3f2ac7cb16 100644 --- a/src/link/Elf/synthetic_sections.zig +++ b/src/link/Elf/synthetic_sections.zig @@ -606,37 +606,30 @@ pub const GotSection = struct { } } - const FormatCtx = struct { + const Format = struct { got: GotSection, elf_file: *Elf, + + pub fn default(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + const got = f.got; + const elf_file = f.elf_file; + try writer.writeAll("GOT\n"); + for (got.entries.items) |entry| { + const symbol = elf_file.symbol(entry.ref).?; + try writer.print(" {d}@0x{x} => {f}@0x{x} ({s})\n", .{ + entry.cell_index, + entry.address(elf_file), + entry.ref, + symbol.address(.{}, elf_file), + symbol.name(elf_file), + }); + } + } }; - pub fn fmt(got: GotSection, elf_file: *Elf) std.fmt.Formatter(format2) { + pub fn fmt(got: GotSection, elf_file: *Elf) std.fmt.Formatter(Format, Format.default) { return .{ .data = .{ .got = got, .elf_file = elf_file } }; } - - pub fn format2( - ctx: FormatCtx, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = options; - _ = unused_fmt_string; - const got = ctx.got; - const elf_file = ctx.elf_file; - try writer.writeAll("GOT\n"); - for (got.entries.items) |entry| { - const symbol = elf_file.symbol(entry.ref).?; - try writer.print(" {d}@0x{x} => {}@0x{x} ({s})\n", .{ - entry.cell_index, - entry.address(elf_file), - entry.ref, - symbol.address(.{}, elf_file), - symbol.name(elf_file), - }); - } - } }; pub const PltSection = struct { @@ -703,7 +696,7 @@ pub const PltSection = struct { const r_sym: u64 = extra.dynamic; const r_type = relocation.encode(.jump_slot, cpu_arch); - relocs_log.debug(" {s}: [{x} => {d}({s})] + 0", .{ + relocs_log.debug(" {f}: [{x} => {d}({s})] + 0", .{ relocation.fmtRelocType(r_type, cpu_arch), r_offset, r_sym, @@ -749,38 +742,31 @@ pub const PltSection = struct { } } - const FormatCtx = struct { + const Format = struct { plt: PltSection, elf_file: *Elf, + + pub fn default(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { + const plt = f.plt; + const elf_file = f.elf_file; + try writer.writeAll("PLT\n"); + for (plt.symbols.items, 0..) |ref, i| { + const symbol = elf_file.symbol(ref).?; + try writer.print(" {d}@0x{x} => {f}@0x{x} ({s})\n", .{ + i, + symbol.pltAddress(elf_file), + ref, + symbol.address(.{}, elf_file), + symbol.name(elf_file), + }); + } + } }; - pub fn fmt(plt: PltSection, elf_file: *Elf) std.fmt.Formatter(format2) { + pub fn fmt(plt: PltSection, elf_file: *Elf) std.fmt.Formatter(Format, Format.default) { return .{ .data = .{ .plt = plt, .elf_file = elf_file } }; } - pub fn format2( - ctx: FormatCtx, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = options; - _ = unused_fmt_string; - const plt = ctx.plt; - const elf_file = ctx.elf_file; - try writer.writeAll("PLT\n"); - for (plt.symbols.items, 0..) |ref, i| { - const symbol = elf_file.symbol(ref).?; - try writer.print(" {d}@0x{x} => {}@0x{x} ({s})\n", .{ - i, - symbol.pltAddress(elf_file), - ref, - symbol.address(.{}, elf_file), - symbol.name(elf_file), - }); - } - } - const x86_64 = struct { fn write(plt: PltSection, elf_file: *Elf, writer: anytype) !void { const shdrs = elf_file.sections.items(.shdr); diff --git a/src/link/LdScript.zig b/src/link/LdScript.zig index ed5dbc4681..534f9766bb 100644 --- a/src/link/LdScript.zig +++ b/src/link/LdScript.zig @@ -41,8 +41,8 @@ pub fn parse( try line_col.append(gpa, .{ .line = line, .column = column }); switch (tok.id) { .invalid => { - return diags.failParse(path, "invalid token in LD script: '{s}' ({d}:{d})", .{ - std.fmt.fmtSliceEscapeLower(tok.get(data)), line, column, + return diags.failParse(path, "invalid token in LD script: '{f}' ({d}:{d})", .{ + std.ascii.hexEscape(tok.get(data), .lower), line, column, }); }, .new_line => { diff --git a/src/link/Lld.zig b/src/link/Lld.zig index caf9da1ed2..1aeeb5d214 100644 --- a/src/link/Lld.zig +++ b/src/link/Lld.zig @@ -294,7 +294,7 @@ fn linkAsArchive(lld: *Lld, arena: Allocator) !void { break :p try comp.resolveEmitPathFlush(arena, .temp, base.zcu_object_basename.?); } else null; - log.debug("zcu_obj_path={?}", .{zcu_obj_path}); + log.debug("zcu_obj_path={?f}", .{zcu_obj_path}); const compiler_rt_path: ?Cache.Path = if (comp.compiler_rt_strat == .obj) comp.compiler_rt_obj.?.full_object_path @@ -437,7 +437,7 @@ fn coffLink(lld: *Lld, arena: Allocator) !void { try argv.append(try allocPrint(arena, "-PDBALTPATH:{s}", .{out_pdb_basename})); } if (comp.version) |version| { - try argv.append(try allocPrint(arena, "-VERSION:{}.{}", .{ version.major, version.minor })); + try argv.append(try allocPrint(arena, "-VERSION:{d}.{d}", .{ version.major, version.minor })); } if (target_util.llvmMachineAbi(target)) |mabi| { @@ -507,7 +507,7 @@ fn coffLink(lld: *Lld, arena: Allocator) !void { if (comp.emit_implib) |raw_emit_path| { const path = try comp.resolveEmitPathFlush(arena, .temp, raw_emit_path); - try argv.append(try allocPrint(arena, "-IMPLIB:{}", .{path})); + try argv.append(try allocPrint(arena, "-IMPLIB:{f}", .{path})); } if (comp.config.link_libc) { @@ -533,7 +533,7 @@ fn coffLink(lld: *Lld, arena: Allocator) !void { }, .object, .archive => |obj| { if (obj.must_link) { - argv.appendAssumeCapacity(try allocPrint(arena, "-WHOLEARCHIVE:{}", .{@as(Cache.Path, obj.path)})); + argv.appendAssumeCapacity(try allocPrint(arena, "-WHOLEARCHIVE:{f}", .{@as(Cache.Path, obj.path)})); } else { argv.appendAssumeCapacity(try obj.path.toString(arena)); } @@ -933,9 +933,7 @@ fn elfLink(lld: *Lld, arena: Allocator) !void { .fast, .uuid, .sha1, .md5 => try argv.append(try std.fmt.allocPrint(arena, "--build-id={s}", .{ @tagName(base.build_id), })), - .hexstring => |hs| try argv.append(try std.fmt.allocPrint(arena, "--build-id=0x{s}", .{ - std.fmt.fmtSliceHexLower(hs.toSlice()), - })), + .hexstring => |hs| try argv.append(try std.fmt.allocPrint(arena, "--build-id=0x{x}", .{hs.toSlice()})), } try argv.append(try std.fmt.allocPrint(arena, "--image-base={d}", .{elf.image_base})); @@ -1218,7 +1216,7 @@ fn elfLink(lld: *Lld, arena: Allocator) !void { if (target.os.versionRange().gnuLibCVersion().?.order(rem_in) != .lt) continue; } - const lib_path = try std.fmt.allocPrint(arena, "{}{c}lib{s}.so.{d}", .{ + const lib_path = try std.fmt.allocPrint(arena, "{f}{c}lib{s}.so.{d}", .{ comp.glibc_so_files.?.dir_path, fs.path.sep, lib.name, lib.sover, }); try argv.append(lib_path); @@ -1231,14 +1229,14 @@ fn elfLink(lld: *Lld, arena: Allocator) !void { })); } else if (target.isFreeBSDLibC()) { for (freebsd.libs) |lib| { - const lib_path = try std.fmt.allocPrint(arena, "{}{c}lib{s}.so.{d}", .{ + const lib_path = try std.fmt.allocPrint(arena, "{f}{c}lib{s}.so.{d}", .{ comp.freebsd_so_files.?.dir_path, fs.path.sep, lib.name, lib.sover, }); try argv.append(lib_path); } } else if (target.isNetBSDLibC()) { for (netbsd.libs) |lib| { - const lib_path = try std.fmt.allocPrint(arena, "{}{c}lib{s}.so.{d}", .{ + const lib_path = try std.fmt.allocPrint(arena, "{f}{c}lib{s}.so.{d}", .{ comp.netbsd_so_files.?.dir_path, fs.path.sep, lib.name, lib.sover, }); try argv.append(lib_path); @@ -1511,9 +1509,7 @@ fn wasmLink(lld: *Lld, arena: Allocator) !void { .fast, .uuid, .sha1 => try argv.append(try std.fmt.allocPrint(arena, "--build-id={s}", .{ @tagName(base.build_id), })), - .hexstring => |hs| try argv.append(try std.fmt.allocPrint(arena, "--build-id=0x{s}", .{ - std.fmt.fmtSliceHexLower(hs.toSlice()), - })), + .hexstring => |hs| try argv.append(try std.fmt.allocPrint(arena, "--build-id=0x{x}", .{hs.toSlice()})), .md5 => {}, } @@ -1653,7 +1649,7 @@ fn spawnLld( child.stderr_behavior = .Pipe; child.spawn() catch |err| break :term err; - stderr = try child.stderr.?.reader().readAllAlloc(comp.gpa, std.math.maxInt(usize)); + stderr = try child.stderr.?.deprecatedReader().readAllAlloc(comp.gpa, std.math.maxInt(usize)); break :term child.wait(); }) catch |first_err| term: { const err = switch (first_err) { @@ -1667,7 +1663,7 @@ fn spawnLld( log.warn("failed to delete response file {s}: {s}", .{ rsp_path, @errorName(err) }); { defer rsp_file.close(); - var rsp_buf = std.io.bufferedWriter(rsp_file.writer()); + var rsp_buf = std.io.bufferedWriter(rsp_file.deprecatedWriter()); const rsp_writer = rsp_buf.writer(); for (argv[2..]) |arg| { try rsp_writer.writeByte('"'); @@ -1701,7 +1697,7 @@ fn spawnLld( rsp_child.stderr_behavior = .Pipe; rsp_child.spawn() catch |err| break :err err; - stderr = try rsp_child.stderr.?.reader().readAllAlloc(comp.gpa, std.math.maxInt(usize)); + stderr = try rsp_child.stderr.?.deprecatedReader().readAllAlloc(comp.gpa, std.math.maxInt(usize)); break :term rsp_child.wait() catch |err| break :err err; } }, diff --git a/src/link/MachO.zig b/src/link/MachO.zig index 7443435264..734b4b6a04 100644 --- a/src/link/MachO.zig +++ b/src/link/MachO.zig @@ -543,7 +543,7 @@ pub fn flush( self.allocateSyntheticSymbols(); if (build_options.enable_logging) { - state_log.debug("{}", .{self.dumpState()}); + state_log.debug("{f}", .{self.dumpState()}); } // Beyond this point, everything has been allocated a virtual address and we can resolve @@ -677,12 +677,12 @@ fn dumpArgv(self: *MachO, comp: *Compilation) !void { try argv.append("-platform_version"); try argv.append(@tagName(self.platform.os_tag)); - try argv.append(try std.fmt.allocPrint(arena, "{}", .{self.platform.version})); + try argv.append(try std.fmt.allocPrint(arena, "{f}", .{self.platform.version})); if (self.sdk_version) |ver| { try argv.append(try std.fmt.allocPrint(arena, "{d}.{d}", .{ ver.major, ver.minor })); } else { - try argv.append(try std.fmt.allocPrint(arena, "{}", .{self.platform.version})); + try argv.append(try std.fmt.allocPrint(arena, "{f}", .{self.platform.version})); } if (comp.sysroot) |syslibroot| { @@ -863,7 +863,7 @@ pub fn classifyInputFile(self: *MachO, input: link.Input) !void { const path, const file = input.pathAndFile().?; // TODO don't classify now, it's too late. The input file has already been classified - log.debug("classifying input file {}", .{path}); + log.debug("classifying input file {f}", .{path}); const fh = try self.addFileHandle(file); var buffer: [Archive.SARMAG]u8 = undefined; @@ -1591,7 +1591,7 @@ fn reportUndefs(self: *MachO) !void { const ref = refs.items[inote]; const file = self.getFile(ref.file).?; const atom = ref.getAtom(self).?; - err.addNote("referenced by {}:{s}", .{ file.fmtPath(), atom.getName(self) }); + err.addNote("referenced by {f}:{s}", .{ file.fmtPath(), atom.getName(self) }); } if (refs.items.len > max_notes) { @@ -3791,7 +3791,7 @@ pub fn reportParseError2( const diags = &self.base.comp.link_diags; var err = try diags.addErrorWithNotes(1); try err.addMsg(format, args); - err.addNote("while parsing {}", .{self.getFile(file_index).?.fmtPath()}); + err.addNote("while parsing {f}", .{self.getFile(file_index).?.fmtPath()}); } fn reportMissingDependencyError( @@ -3806,7 +3806,7 @@ fn reportMissingDependencyError( var err = try diags.addErrorWithNotes(2 + checked_paths.len); try err.addMsg(format, args); err.addNote("while resolving {s}", .{path}); - err.addNote("a dependency of {}", .{self.getFile(parent).?.fmtPath()}); + err.addNote("a dependency of {f}", .{self.getFile(parent).?.fmtPath()}); for (checked_paths) |p| { err.addNote("tried {s}", .{p}); } @@ -3823,7 +3823,7 @@ fn reportDependencyError( var err = try diags.addErrorWithNotes(2); try err.addMsg(format, args); err.addNote("while parsing {s}", .{path}); - err.addNote("a dependency of {}", .{self.getFile(parent).?.fmtPath()}); + err.addNote("a dependency of {f}", .{self.getFile(parent).?.fmtPath()}); } fn reportDuplicates(self: *MachO) error{ HasDuplicates, OutOfMemory }!void { @@ -3853,12 +3853,12 @@ fn reportDuplicates(self: *MachO) error{ HasDuplicates, OutOfMemory }!void { var err = try diags.addErrorWithNotes(nnotes + 1); try err.addMsg("duplicate symbol definition: {s}", .{sym.getName(self)}); - err.addNote("defined by {}", .{sym.getFile(self).?.fmtPath()}); + err.addNote("defined by {f}", .{sym.getFile(self).?.fmtPath()}); var inote: usize = 0; while (inote < @min(notes.items.len, max_notes)) : (inote += 1) { const file = self.getFile(notes.items[inote]).?; - err.addNote("defined by {}", .{file.fmtPath()}); + err.addNote("defined by {f}", .{file.fmtPath()}); } if (notes.items.len > max_notes) { @@ -3900,35 +3900,28 @@ pub fn ptraceDetach(self: *MachO, pid: std.posix.pid_t) !void { self.hot_state.mach_task = null; } -pub fn dumpState(self: *MachO) std.fmt.Formatter(fmtDumpState) { +pub fn dumpState(self: *MachO) std.fmt.Formatter(*MachO, fmtDumpState) { return .{ .data = self }; } -fn fmtDumpState( - self: *MachO, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = options; - _ = unused_fmt_string; +fn fmtDumpState(self: *MachO, w: *Writer) Writer.Error!void { if (self.getZigObject()) |zo| { - try writer.print("zig_object({d}) : {s}\n", .{ zo.index, zo.basename }); - try writer.print("{}{}\n", .{ + try w.print("zig_object({d}) : {s}\n", .{ zo.index, zo.basename }); + try w.print("{f}{f}\n", .{ zo.fmtAtoms(self), zo.fmtSymtab(self), }); } for (self.objects.items) |index| { const object = self.getFile(index).?.object; - try writer.print("object({d}) : {} : has_debug({})", .{ + try w.print("object({d}) : {f} : has_debug({})", .{ index, object.fmtPath(), object.hasDebugInfo(), }); - if (!object.alive) try writer.writeAll(" : ([*])"); - try writer.writeByte('\n'); - try writer.print("{}{}{}{}{}\n", .{ + if (!object.alive) try w.writeAll(" : ([*])"); + try w.writeByte('\n'); + try w.print("{f}{f}{f}{f}{f}\n", .{ object.fmtAtoms(self), object.fmtCies(self), object.fmtFdes(self), @@ -3938,48 +3931,41 @@ fn fmtDumpState( } for (self.dylibs.items) |index| { const dylib = self.getFile(index).?.dylib; - try writer.print("dylib({d}) : {} : needed({}) : weak({})", .{ + try w.print("dylib({d}) : {f} : needed({}) : weak({})", .{ index, @as(Path, dylib.path), dylib.needed, dylib.weak, }); - if (!dylib.isAlive(self)) try writer.writeAll(" : ([*])"); - try writer.writeByte('\n'); - try writer.print("{}\n", .{dylib.fmtSymtab(self)}); + if (!dylib.isAlive(self)) try w.writeAll(" : ([*])"); + try w.writeByte('\n'); + try w.print("{f}\n", .{dylib.fmtSymtab(self)}); } if (self.getInternalObject()) |internal| { - try writer.print("internal({d}) : internal\n", .{internal.index}); - try writer.print("{}{}\n", .{ internal.fmtAtoms(self), internal.fmtSymtab(self) }); + try w.print("internal({d}) : internal\n", .{internal.index}); + try w.print("{f}{f}\n", .{ internal.fmtAtoms(self), internal.fmtSymtab(self) }); } - try writer.writeAll("thunks\n"); + try w.writeAll("thunks\n"); for (self.thunks.items, 0..) |thunk, index| { - try writer.print("thunk({d}) : {}\n", .{ index, thunk.fmt(self) }); + try w.print("thunk({d}) : {f}\n", .{ index, thunk.fmt(self) }); } - try writer.print("stubs\n{}\n", .{self.stubs.fmt(self)}); - try writer.print("objc_stubs\n{}\n", .{self.objc_stubs.fmt(self)}); - try writer.print("got\n{}\n", .{self.got.fmt(self)}); - try writer.print("tlv_ptr\n{}\n", .{self.tlv_ptr.fmt(self)}); - try writer.writeByte('\n'); - try writer.print("sections\n{}\n", .{self.fmtSections()}); - try writer.print("segments\n{}\n", .{self.fmtSegments()}); + try w.print("stubs\n{f}\n", .{self.stubs.fmt(self)}); + try w.print("objc_stubs\n{f}\n", .{self.objc_stubs.fmt(self)}); + try w.print("got\n{f}\n", .{self.got.fmt(self)}); + try w.print("tlv_ptr\n{f}\n", .{self.tlv_ptr.fmt(self)}); + try w.writeByte('\n'); + try w.print("sections\n{f}\n", .{self.fmtSections()}); + try w.print("segments\n{f}\n", .{self.fmtSegments()}); } -fn fmtSections(self: *MachO) std.fmt.Formatter(formatSections) { +fn fmtSections(self: *MachO) std.fmt.Formatter(*MachO, formatSections) { return .{ .data = self }; } -fn formatSections( - self: *MachO, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = options; - _ = unused_fmt_string; +fn formatSections(self: *MachO, w: *Writer) Writer.Error!void { const slice = self.sections.slice(); for (slice.items(.header), slice.items(.segment_id), 0..) |header, seg_id, i| { - try writer.print( + try w.print( "sect({d}) : seg({d}) : {s},{s} : @{x} ({x}) : align({x}) : size({x}) : relocs({x};{d})\n", .{ i, seg_id, header.segName(), header.sectName(), header.addr, header.offset, @@ -3989,38 +3975,24 @@ fn formatSections( } } -fn fmtSegments(self: *MachO) std.fmt.Formatter(formatSegments) { +fn fmtSegments(self: *MachO) std.fmt.Formatter(*MachO, formatSegments) { return .{ .data = self }; } -fn formatSegments( - self: *MachO, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = options; - _ = unused_fmt_string; +fn formatSegments(self: *MachO, w: *Writer) Writer.Error!void { for (self.segments.items, 0..) |seg, i| { - try writer.print("seg({d}) : {s} : @{x}-{x} ({x}-{x})\n", .{ + try w.print("seg({d}) : {s} : @{x}-{x} ({x}-{x})\n", .{ i, seg.segName(), seg.vmaddr, seg.vmaddr + seg.vmsize, seg.fileoff, seg.fileoff + seg.filesize, }); } } -pub fn fmtSectType(tt: u8) std.fmt.Formatter(formatSectType) { +pub fn fmtSectType(tt: u8) std.fmt.Formatter(u8, formatSectType) { return .{ .data = tt }; } -fn formatSectType( - tt: u8, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = options; - _ = unused_fmt_string; +fn formatSectType(tt: u8, w: *Writer) Writer.Error!void { const name = switch (tt) { macho.S_REGULAR => "REGULAR", macho.S_ZEROFILL => "ZEROFILL", @@ -4044,9 +4016,9 @@ fn formatSectType( macho.S_THREAD_LOCAL_VARIABLE_POINTERS => "THREAD_LOCAL_VARIABLE_POINTERS", macho.S_THREAD_LOCAL_INIT_FUNCTION_POINTERS => "THREAD_LOCAL_INIT_FUNCTION_POINTERS", macho.S_INIT_FUNC_OFFSETS => "INIT_FUNC_OFFSETS", - else => |x| return writer.print("UNKNOWN({x})", .{x}), + else => |x| return w.print("UNKNOWN({x})", .{x}), }; - try writer.print("{s}", .{name}); + try w.print("{s}", .{name}); } const is_hot_update_compatible = switch (builtin.target.os.tag) { @@ -4279,34 +4251,27 @@ pub const Platform = struct { return false; } - pub fn fmtTarget(plat: Platform, cpu_arch: std.Target.Cpu.Arch) std.fmt.Formatter(formatTarget) { + pub fn fmtTarget(plat: Platform, cpu_arch: std.Target.Cpu.Arch) std.fmt.Formatter(Format, Format.target) { return .{ .data = .{ .platform = plat, .cpu_arch = cpu_arch } }; } - const FmtCtx = struct { + const Format = struct { platform: Platform, cpu_arch: std.Target.Cpu.Arch, - }; - pub fn formatTarget( - ctx: FmtCtx, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = unused_fmt_string; - _ = options; - try writer.print("{s}-{s}", .{ @tagName(ctx.cpu_arch), @tagName(ctx.platform.os_tag) }); - if (ctx.platform.abi != .none) { - try writer.print("-{s}", .{@tagName(ctx.platform.abi)}); + pub fn target(f: Format, w: *Writer) Writer.Error!void { + try w.print("{s}-{s}", .{ @tagName(f.cpu_arch), @tagName(f.platform.os_tag) }); + if (f.platform.abi != .none) { + try w.print("-{s}", .{@tagName(f.platform.abi)}); + } } - } + }; /// Caller owns the memory. pub fn allocPrintTarget(plat: Platform, gpa: Allocator, cpu_arch: std.Target.Cpu.Arch) error{OutOfMemory}![]u8 { var buffer = std.ArrayList(u8).init(gpa); defer buffer.deinit(); - try buffer.writer().print("{}", .{plat.fmtTarget(cpu_arch)}); + try buffer.writer().print("{f}", .{plat.fmtTarget(cpu_arch)}); return buffer.toOwnedSlice(); } @@ -4507,15 +4472,8 @@ pub const Ref = struct { }; } - pub fn format( - ref: Ref, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = unused_fmt_string; - _ = options; - try writer.print("%{d} in file({d})", .{ ref.index, ref.file }); + pub fn format(ref: Ref, bw: *Writer) Writer.Error!void { + try bw.print("%{d} in file({d})", .{ ref.index, ref.file }); } }; @@ -5315,7 +5273,7 @@ fn createThunks(macho_file: *MachO, sect_id: u8) !void { try scanThunkRelocs(thunk_index, gpa, atoms[start..i], macho_file); thunk.value = advanceSection(header, thunk.size(), .@"4"); - log.debug("thunk({d}) : {}", .{ thunk_index, thunk.fmt(macho_file) }); + log.debug("thunk({d}) : {f}", .{ thunk_index, thunk.fmt(macho_file) }); } } @@ -5414,6 +5372,7 @@ const macho = std.macho; const math = std.math; const mem = std.mem; const meta = std.meta; +const Writer = std.io.Writer; const aarch64 = @import("../arch/aarch64/bits.zig"); const bind = @import("MachO/dyld_info/bind.zig"); diff --git a/src/link/MachO/Archive.zig b/src/link/MachO/Archive.zig index 82818aa697..ae71dcde8d 100644 --- a/src/link/MachO/Archive.zig +++ b/src/link/MachO/Archive.zig @@ -29,8 +29,8 @@ pub fn unpack(self: *Archive, macho_file: *MachO, path: Path, handle_index: File pos += @sizeOf(ar_hdr); if (!mem.eql(u8, &hdr.ar_fmag, ARFMAG)) { - return diags.failParse(path, "invalid header delimiter: expected '{s}', found '{s}'", .{ - std.fmt.fmtSliceEscapeLower(ARFMAG), std.fmt.fmtSliceEscapeLower(&hdr.ar_fmag), + return diags.failParse(path, "invalid header delimiter: expected '{f}', found '{f}'", .{ + std.ascii.hexEscape(ARFMAG, .lower), std.ascii.hexEscape(&hdr.ar_fmag, .lower), }); } @@ -71,7 +71,7 @@ pub fn unpack(self: *Archive, macho_file: *MachO, path: Path, handle_index: File .mtime = hdr.date() catch 0, }; - log.debug("extracting object '{}' from archive '{}'", .{ object.path, path }); + log.debug("extracting object '{f}' from archive '{f}'", .{ object.path, path }); try self.objects.append(gpa, object); } @@ -230,32 +230,25 @@ pub const ArSymtab = struct { } } - const FormatContext = struct { + const PrintFormat = struct { ar: ArSymtab, macho_file: *MachO, + + fn default(f: PrintFormat, bw: *Writer) Writer.Error!void { + const ar = f.ar; + const macho_file = f.macho_file; + for (ar.entries.items, 0..) |entry, i| { + const name = ar.strtab.getAssumeExists(entry.off); + const file = macho_file.getFile(entry.file).?; + try bw.print(" {d}: {s} in file({d})({f})\n", .{ i, name, entry.file, file.fmtPath() }); + } + } }; - pub fn fmt(ar: ArSymtab, macho_file: *MachO) std.fmt.Formatter(format2) { + pub fn fmt(ar: ArSymtab, macho_file: *MachO) std.fmt.Formatter(PrintFormat, PrintFormat.default) { return .{ .data = .{ .ar = ar, .macho_file = macho_file } }; } - fn format2( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = unused_fmt_string; - _ = options; - const ar = ctx.ar; - const macho_file = ctx.macho_file; - for (ar.entries.items, 0..) |entry, i| { - const name = ar.strtab.getAssumeExists(entry.off); - const file = macho_file.getFile(entry.file).?; - try writer.print(" {d}: {s} in file({d})({})\n", .{ i, name, entry.file, file.fmtPath() }); - } - } - const Entry = struct { /// Symbol name offset off: u32, @@ -304,8 +297,9 @@ const log = std.log.scoped(.link); const macho = std.macho; const mem = std.mem; const std = @import("std"); -const Allocator = mem.Allocator; +const Allocator = std.mem.Allocator; const Path = std.Build.Cache.Path; +const Writer = std.io.Writer; const Archive = @This(); const File = @import("file.zig").File; diff --git a/src/link/MachO/Atom.zig b/src/link/MachO/Atom.zig index baa9e6172c..e084ae0385 100644 --- a/src/link/MachO/Atom.zig +++ b/src/link/MachO/Atom.zig @@ -602,7 +602,7 @@ pub fn resolveRelocs(self: Atom, macho_file: *MachO, buffer: []u8) !void { }; try macho_file.reportParseError2( file.getIndex(), - "{s}: 0x{x}: 0x{x}: failed to relax relocation: type {}, target {s}", + "{s}: 0x{x}: 0x{x}: failed to relax relocation: type {f}, target {s}", .{ name, self.getAddress(macho_file), @@ -653,7 +653,7 @@ fn resolveRelocInner( const divExact = struct { fn divExact(atom: Atom, r: Relocation, num: u12, den: u12, ctx: *MachO) !u12 { return math.divExact(u12, num, den) catch { - try ctx.reportParseError2(atom.getFile(ctx).getIndex(), "{s}: unexpected remainder when resolving {s} at offset 0x{x}", .{ + try ctx.reportParseError2(atom.getFile(ctx).getIndex(), "{s}: unexpected remainder when resolving {f} at offset 0x{x}", .{ atom.getName(ctx), r.fmtPretty(ctx.getTarget().cpu.arch), r.offset, @@ -664,14 +664,14 @@ fn resolveRelocInner( }.divExact; switch (rel.tag) { - .local => relocs_log.debug(" {x}<+{d}>: {}: [=> {x}] atom({d})", .{ + .local => relocs_log.debug(" {x}<+{d}>: {f}: [=> {x}] atom({d})", .{ P, rel_offset, rel.fmtPretty(cpu_arch), S + A - SUB, rel.getTargetAtom(self, macho_file).atom_index, }), - .@"extern" => relocs_log.debug(" {x}<+{d}>: {}: [=> {x}] G({x}) ({s})", .{ + .@"extern" => relocs_log.debug(" {x}<+{d}>: {f}: [=> {x}] G({x}) ({s})", .{ P, rel_offset, rel.fmtPretty(cpu_arch), @@ -900,19 +900,19 @@ const x86_64 = struct { switch (old_inst.encoding.mnemonic) { .mov => { const inst = Instruction.new(old_inst.prefix, .lea, &old_inst.ops, t) catch return error.RelaxFail; - relocs_log.debug(" relaxing {} => {}", .{ old_inst.encoding, inst.encoding }); + relocs_log.debug(" relaxing {f} => {f}", .{ old_inst.encoding, inst.encoding }); encode(&.{inst}, code) catch return error.RelaxFail; }, else => |x| { var err = try diags.addErrorWithNotes(2); - try err.addMsg("{s}: 0x{x}: 0x{x}: failed to relax relocation of type {}", .{ + try err.addMsg("{s}: 0x{x}: 0x{x}: failed to relax relocation of type {f}", .{ self.getName(macho_file), self.getAddress(macho_file), rel.offset, rel.fmtPretty(.x86_64), }); err.addNote("expected .mov instruction but found .{s}", .{@tagName(x)}); - err.addNote("while parsing {}", .{self.getFile(macho_file).fmtPath()}); + err.addNote("while parsing {f}", .{self.getFile(macho_file).fmtPath()}); return error.RelaxFailUnexpectedInstruction; }, } @@ -924,7 +924,7 @@ const x86_64 = struct { switch (old_inst.encoding.mnemonic) { .mov => { const inst = Instruction.new(old_inst.prefix, .lea, &old_inst.ops, t) catch return error.RelaxFail; - relocs_log.debug(" relaxing {} => {}", .{ old_inst.encoding, inst.encoding }); + relocs_log.debug(" relaxing {f} => {f}", .{ old_inst.encoding, inst.encoding }); encode(&.{inst}, code) catch return error.RelaxFail; }, else => return error.RelaxFail, @@ -938,11 +938,8 @@ const x86_64 = struct { } fn encode(insts: []const Instruction, code: []u8) !void { - var stream = std.io.fixedBufferStream(code); - const writer = stream.writer(); - for (insts) |inst| { - try inst.encode(writer, .{}); - } + var stream: Writer = .fixed(code); + for (insts) |inst| try inst.encode(&stream, .{}); } const bits = @import("../../arch/x86_64/bits.zig"); @@ -1003,7 +1000,7 @@ pub fn writeRelocs(self: Atom, macho_file: *MachO, code: []u8, buffer: []macho.r } switch (rel.tag) { - .local => relocs_log.debug(" {}: [{x} => {d}({s},{s})] + {x}", .{ + .local => relocs_log.debug(" {f}: [{x} => {d}({s},{s})] + {x}", .{ rel.fmtPretty(cpu_arch), r_address, r_symbolnum, @@ -1011,7 +1008,7 @@ pub fn writeRelocs(self: Atom, macho_file: *MachO, code: []u8, buffer: []macho.r macho_file.sections.items(.header)[r_symbolnum - 1].sectName(), addend, }), - .@"extern" => relocs_log.debug(" {}: [{x} => {d}({s})] + {x}", .{ + .@"extern" => relocs_log.debug(" {f}: [{x} => {d}({s})] + {x}", .{ rel.fmtPretty(cpu_arch), r_address, r_symbolnum, @@ -1117,60 +1114,40 @@ pub fn writeRelocs(self: Atom, macho_file: *MachO, code: []u8, buffer: []macho.r assert(i == buffer.len); } -pub fn format( - atom: Atom, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = atom; - _ = unused_fmt_string; - _ = options; - _ = writer; - @compileError("do not format Atom directly"); -} - -pub fn fmt(atom: Atom, macho_file: *MachO) std.fmt.Formatter(format2) { +pub fn fmt(atom: Atom, macho_file: *MachO) std.fmt.Formatter(Format, Format.print) { return .{ .data = .{ .atom = atom, .macho_file = macho_file, } }; } -const FormatContext = struct { +const Format = struct { atom: Atom, macho_file: *MachO, -}; -fn format2( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = options; - _ = unused_fmt_string; - const atom = ctx.atom; - const macho_file = ctx.macho_file; - const file = atom.getFile(macho_file); - try writer.print("atom({d}) : {s} : @{x} : sect({d}) : align({x}) : size({x}) : nreloc({d}) : thunk({d})", .{ - atom.atom_index, atom.getName(macho_file), atom.getAddress(macho_file), - atom.out_n_sect, atom.alignment, atom.size, - atom.getRelocs(macho_file).len, atom.getExtra(macho_file).thunk, - }); - if (!atom.isAlive()) try writer.writeAll(" : [*]"); - if (atom.getUnwindRecords(macho_file).len > 0) { - try writer.writeAll(" : unwind{ "); - const extra = atom.getExtra(macho_file); - for (atom.getUnwindRecords(macho_file), extra.unwind_index..) |index, i| { - const rec = file.object.getUnwindRecord(index); - try writer.print("{d}", .{index}); - if (!rec.alive) try writer.writeAll("([*])"); - if (i < extra.unwind_index + extra.unwind_count - 1) try writer.writeAll(", "); + fn print(f: Format, w: *Writer) Writer.Error!void { + const atom = f.atom; + const macho_file = f.macho_file; + const file = atom.getFile(macho_file); + try w.print("atom({d}) : {s} : @{x} : sect({d}) : align({x}) : size({x}) : nreloc({d}) : thunk({d})", .{ + atom.atom_index, atom.getName(macho_file), atom.getAddress(macho_file), + atom.out_n_sect, atom.alignment, atom.size, + atom.getRelocs(macho_file).len, atom.getExtra(macho_file).thunk, + }); + if (!atom.isAlive()) try w.writeAll(" : [*]"); + if (atom.getUnwindRecords(macho_file).len > 0) { + try w.writeAll(" : unwind{ "); + const extra = atom.getExtra(macho_file); + for (atom.getUnwindRecords(macho_file), extra.unwind_index..) |index, i| { + const rec = file.object.getUnwindRecord(index); + try w.print("{d}", .{index}); + if (!rec.alive) try w.writeAll("([*])"); + if (i < extra.unwind_index + extra.unwind_count - 1) try w.writeAll(", "); + } + try w.writeAll(" }"); } - try writer.writeAll(" }"); } -} +}; pub const Index = u32; @@ -1205,19 +1182,20 @@ pub const Extra = struct { pub const Alignment = @import("../../InternPool.zig").Alignment; -const aarch64 = @import("../aarch64.zig"); +const std = @import("std"); const assert = std.debug.assert; const macho = std.macho; const math = std.math; const mem = std.mem; const log = std.log.scoped(.link); const relocs_log = std.log.scoped(.link_relocs); -const std = @import("std"); -const trace = @import("../../tracy.zig").trace; - +const Writer = std.io.Writer; const Allocator = mem.Allocator; -const Atom = @This(); const AtomicBool = std.atomic.Value(bool); + +const aarch64 = @import("../aarch64.zig"); +const trace = @import("../../tracy.zig").trace; +const Atom = @This(); const File = @import("file.zig").File; const MachO = @import("../MachO.zig"); const Object = @import("Object.zig"); diff --git a/src/link/MachO/DebugSymbols.zig b/src/link/MachO/DebugSymbols.zig index eef3492b48..4212827b2c 100644 --- a/src/link/MachO/DebugSymbols.zig +++ b/src/link/MachO/DebugSymbols.zig @@ -460,6 +460,7 @@ const math = std.math; const mem = std.mem; const padToIdeal = MachO.padToIdeal; const trace = @import("../../tracy.zig").trace; +const Writer = std.io.Writer; const Allocator = mem.Allocator; const MachO = @import("../MachO.zig"); diff --git a/src/link/MachO/Dylib.zig b/src/link/MachO/Dylib.zig index 01dd25fa8c..5bb5025e9e 100644 --- a/src/link/MachO/Dylib.zig +++ b/src/link/MachO/Dylib.zig @@ -61,7 +61,7 @@ fn parseBinary(self: *Dylib, macho_file: *MachO) !void { const file = macho_file.getFileHandle(self.file_handle); const offset = self.offset; - log.debug("parsing dylib from binary: {}", .{@as(Path, self.path)}); + log.debug("parsing dylib from binary: {f}", .{@as(Path, self.path)}); var header_buffer: [@sizeOf(macho.mach_header_64)]u8 = undefined; { @@ -140,7 +140,7 @@ fn parseBinary(self: *Dylib, macho_file: *MachO) !void { if (self.platform) |platform| { if (!macho_file.platform.eqlTarget(platform)) { - try macho_file.reportParseError2(self.index, "invalid platform: {}", .{ + try macho_file.reportParseError2(self.index, "invalid platform: {f}", .{ platform.fmtTarget(macho_file.getTarget().cpu.arch), }); return error.InvalidTarget; @@ -148,7 +148,7 @@ fn parseBinary(self: *Dylib, macho_file: *MachO) !void { // TODO: this can cause the CI to fail so I'm commenting this check out so that // I can work out the rest of the changes first // if (macho_file.platform.version.order(platform.version) == .lt) { - // try macho_file.reportParseError2(self.index, "object file built for newer platform: {}: {} < {}", .{ + // try macho_file.reportParseError2(self.index, "object file built for newer platform: {f}: {f} < {f}", .{ // macho_file.platform.fmtTarget(macho_file.getTarget().cpu.arch), // macho_file.platform.version, // platform.version, @@ -267,7 +267,7 @@ fn parseTbd(self: *Dylib, macho_file: *MachO) !void { const gpa = macho_file.base.comp.gpa; - log.debug("parsing dylib from stub: {}", .{self.path}); + log.debug("parsing dylib from stub: {f}", .{self.path}); const file = macho_file.getFileHandle(self.file_handle); var lib_stub = LibStub.loadFromFile(gpa, file) catch |err| { @@ -691,52 +691,32 @@ pub fn setSymbolExtra(self: *Dylib, index: u32, extra: Symbol.Extra) void { } } -pub fn format( - self: *Dylib, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = self; - _ = unused_fmt_string; - _ = options; - _ = writer; - @compileError("do not format dylib directly"); -} - -pub fn fmtSymtab(self: *Dylib, macho_file: *MachO) std.fmt.Formatter(formatSymtab) { +pub fn fmtSymtab(self: *Dylib, macho_file: *MachO) std.fmt.Formatter(Format, Format.symtab) { return .{ .data = .{ .dylib = self, .macho_file = macho_file, } }; } -const FormatContext = struct { +const Format = struct { dylib: *Dylib, macho_file: *MachO, -}; -fn formatSymtab( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; - const dylib = ctx.dylib; - const macho_file = ctx.macho_file; - try writer.writeAll(" globals\n"); - for (dylib.symbols.items, 0..) |sym, i| { - const ref = dylib.getSymbolRef(@intCast(i), macho_file); - if (ref.getFile(macho_file) == null) { - // TODO any better way of handling this? - try writer.print(" {s} : unclaimed\n", .{sym.getName(macho_file)}); - } else { - try writer.print(" {}\n", .{ref.getSymbol(macho_file).?.fmt(macho_file)}); + fn symtab(f: Format, w: *Writer) Writer.Error!void { + const dylib = f.dylib; + const macho_file = f.macho_file; + try w.writeAll(" globals\n"); + for (dylib.symbols.items, 0..) |sym, i| { + const ref = dylib.getSymbolRef(@intCast(i), macho_file); + if (ref.getFile(macho_file) == null) { + // TODO any better way of handling this? + try w.print(" {s} : unclaimed\n", .{sym.getName(macho_file)}); + } else { + try w.print(" {f}\n", .{ref.getSymbol(macho_file).?.fmt(macho_file)}); + } } } -} +}; pub const TargetMatcher = struct { allocator: Allocator, @@ -948,19 +928,17 @@ const Export = struct { }; }; +const std = @import("std"); const assert = std.debug.assert; -const fat = @import("fat.zig"); const fs = std.fs; const fmt = std.fmt; const log = std.log.scoped(.link); const macho = std.macho; const math = std.math; const mem = std.mem; -const tapi = @import("../tapi.zig"); -const trace = @import("../../tracy.zig").trace; -const std = @import("std"); const Allocator = mem.Allocator; const Path = std.Build.Cache.Path; +const Writer = std.io.Writer; const Dylib = @This(); const File = @import("file.zig").File; @@ -969,3 +947,6 @@ const LoadCommandIterator = macho.LoadCommandIterator; const MachO = @import("../MachO.zig"); const Symbol = @import("Symbol.zig"); const Tbd = tapi.Tbd; +const fat = @import("fat.zig"); +const tapi = @import("../tapi.zig"); +const trace = @import("../../tracy.zig").trace; diff --git a/src/link/MachO/InternalObject.zig b/src/link/MachO/InternalObject.zig index 0218f0c1bb..e9355591e4 100644 --- a/src/link/MachO/InternalObject.zig +++ b/src/link/MachO/InternalObject.zig @@ -836,60 +836,46 @@ fn needsObjcMsgsendSymbol(self: InternalObject) bool { return false; } -const FormatContext = struct { +const Format = struct { self: *InternalObject, macho_file: *MachO, -}; -pub fn fmtAtoms(self: *InternalObject, macho_file: *MachO) std.fmt.Formatter(formatAtoms) { - return .{ .data = .{ - .self = self, - .macho_file = macho_file, - } }; -} - -fn formatAtoms( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; - try writer.writeAll(" atoms\n"); - for (ctx.self.getAtoms()) |atom_index| { - const atom = ctx.self.getAtom(atom_index) orelse continue; - try writer.print(" {}\n", .{atom.fmt(ctx.macho_file)}); - } -} - -pub fn fmtSymtab(self: *InternalObject, macho_file: *MachO) std.fmt.Formatter(formatSymtab) { - return .{ .data = .{ - .self = self, - .macho_file = macho_file, - } }; -} - -fn formatSymtab( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; - const macho_file = ctx.macho_file; - const self = ctx.self; - try writer.writeAll(" symbols\n"); - for (self.symbols.items, 0..) |sym, i| { - const ref = self.getSymbolRef(@intCast(i), macho_file); - if (ref.getFile(macho_file) == null) { - // TODO any better way of handling this? - try writer.print(" {s} : unclaimed\n", .{sym.getName(macho_file)}); - } else { - try writer.print(" {}\n", .{ref.getSymbol(macho_file).?.fmt(macho_file)}); + fn atoms(f: Format, w: *Writer) Writer.Error!void { + try w.writeAll(" atoms\n"); + for (f.self.getAtoms()) |atom_index| { + const atom = f.self.getAtom(atom_index) orelse continue; + try w.print(" {f}\n", .{atom.fmt(f.macho_file)}); } } + + fn symtab(f: Format, w: *Writer) Writer.Error!void { + const macho_file = f.macho_file; + const self = f.self; + try w.writeAll(" symbols\n"); + for (self.symbols.items, 0..) |sym, i| { + const ref = self.getSymbolRef(@intCast(i), macho_file); + if (ref.getFile(macho_file) == null) { + // TODO any better way of handling this? + try w.print(" {s} : unclaimed\n", .{sym.getName(macho_file)}); + } else { + try w.print(" {f}\n", .{ref.getSymbol(macho_file).?.fmt(macho_file)}); + } + } + } +}; + +pub fn fmtAtoms(self: *InternalObject, macho_file: *MachO) std.fmt.Formatter(Format, Format.atoms) { + return .{ .data = .{ + .self = self, + .macho_file = macho_file, + } }; +} + +pub fn fmtSymtab(self: *InternalObject, macho_file: *MachO) std.fmt.Formatter(Format, Format.symtab) { + return .{ .data = .{ + .self = self, + .macho_file = macho_file, + } }; } const Section = struct { @@ -908,6 +894,7 @@ const macho = std.macho; const mem = std.mem; const std = @import("std"); const trace = @import("../../tracy.zig").trace; +const Writer = std.io.Writer; const Allocator = std.mem.Allocator; const Atom = @import("Atom.zig"); diff --git a/src/link/MachO/Object.zig b/src/link/MachO/Object.zig index ec9d07aabc..6b1dac3e72 100644 --- a/src/link/MachO/Object.zig +++ b/src/link/MachO/Object.zig @@ -72,7 +72,7 @@ pub fn parse(self: *Object, macho_file: *MachO) !void { const tracy = trace(@src()); defer tracy.end(); - log.debug("parsing {}", .{self.fmtPath()}); + log.debug("parsing {f}", .{self.fmtPath()}); const gpa = macho_file.base.comp.gpa; const handle = macho_file.getFileHandle(self.file_handle); @@ -239,7 +239,7 @@ pub fn parse(self: *Object, macho_file: *MachO) !void { if (self.platform) |platform| { if (!macho_file.platform.eqlTarget(platform)) { - try macho_file.reportParseError2(self.index, "invalid platform: {}", .{ + try macho_file.reportParseError2(self.index, "invalid platform: {f}", .{ platform.fmtTarget(cpu_arch), }); return error.InvalidTarget; @@ -247,7 +247,7 @@ pub fn parse(self: *Object, macho_file: *MachO) !void { // TODO: this causes the CI to fail so I'm commenting this check out so that // I can work out the rest of the changes first // if (macho_file.platform.version.order(platform.version) == .lt) { - // try macho_file.reportParseError2(self.index, "object file built for newer platform: {}: {} < {}", .{ + // try macho_file.reportParseError2(self.index, "object file built for newer platform: {f}: {f} < {f}", .{ // macho_file.platform.fmtTarget(macho_file.getTarget().cpu.arch), // macho_file.platform.version, // platform.version, @@ -308,7 +308,9 @@ fn initSubsections(self: *Object, allocator: Allocator, nlists: anytype) !void { } else nlists.len; if (nlist_start == nlist_end or nlists[nlist_start].nlist.n_value > sect.addr) { - const name = try std.fmt.allocPrintZ(allocator, "{s}${s}$begin", .{ sect.segName(), sect.sectName() }); + const name = try std.fmt.allocPrintSentinel(allocator, "{s}${s}$begin", .{ + sect.segName(), sect.sectName(), + }, 0); defer allocator.free(name); const size = if (nlist_start == nlist_end) sect.size else nlists[nlist_start].nlist.n_value - sect.addr; const atom_index = try self.addAtom(allocator, .{ @@ -364,7 +366,9 @@ fn initSubsections(self: *Object, allocator: Allocator, nlists: anytype) !void { // which cannot be contained in any non-zero atom (since then this atom // would exceed section boundaries). In order to facilitate this behaviour, // we create a dummy zero-sized atom at section end (addr + size). - const name = try std.fmt.allocPrintZ(allocator, "{s}${s}$end", .{ sect.segName(), sect.sectName() }); + const name = try std.fmt.allocPrintSentinel(allocator, "{s}${s}$end", .{ + sect.segName(), sect.sectName(), + }, 0); defer allocator.free(name); const atom_index = try self.addAtom(allocator, .{ .name = try self.addString(allocator, name), @@ -394,7 +398,7 @@ fn initSections(self: *Object, allocator: Allocator, nlists: anytype) !void { if (isFixedSizeLiteral(sect)) continue; if (isPtrLiteral(sect)) continue; - const name = try std.fmt.allocPrintZ(allocator, "{s}${s}", .{ sect.segName(), sect.sectName() }); + const name = try std.fmt.allocPrintSentinel(allocator, "{s}${s}", .{ sect.segName(), sect.sectName() }, 0); defer allocator.free(name); const atom_index = try self.addAtom(allocator, .{ @@ -462,7 +466,7 @@ fn initCstringLiterals(self: *Object, allocator: Allocator, file: File.Handle, m } end += 1; - const name = try std.fmt.allocPrintZ(allocator, "l._str{d}", .{count}); + const name = try std.fmt.allocPrintSentinel(allocator, "l._str{d}", .{count}, 0); defer allocator.free(name); const name_str = try self.addString(allocator, name); @@ -529,7 +533,7 @@ fn initFixedSizeLiterals(self: *Object, allocator: Allocator, macho_file: *MachO pos += rec_size; count += 1; }) { - const name = try std.fmt.allocPrintZ(allocator, "l._literal{d}", .{count}); + const name = try std.fmt.allocPrintSentinel(allocator, "l._literal{d}", .{count}, 0); defer allocator.free(name); const name_str = try self.addString(allocator, name); @@ -587,7 +591,7 @@ fn initPointerLiterals(self: *Object, allocator: Allocator, macho_file: *MachO) for (0..num_ptrs) |i| { const pos: u32 = @as(u32, @intCast(i)) * rec_size; - const name = try std.fmt.allocPrintZ(allocator, "l._ptr{d}", .{i}); + const name = try std.fmt.allocPrintSentinel(allocator, "l._ptr{d}", .{i}, 0); defer allocator.free(name); const name_str = try self.addString(allocator, name); @@ -1558,7 +1562,7 @@ pub fn convertTentativeDefinitions(self: *Object, macho_file: *MachO) !void { const nlist = &self.symtab.items(.nlist)[nlist_idx]; const nlist_atom = &self.symtab.items(.atom)[nlist_idx]; - const name = try std.fmt.allocPrintZ(gpa, "__DATA$__common${s}", .{sym.getName(macho_file)}); + const name = try std.fmt.allocPrintSentinel(gpa, "__DATA$__common${s}", .{sym.getName(macho_file)}, 0); defer gpa.free(name); const alignment = (nlist.n_desc >> 8) & 0x0f; @@ -2512,172 +2516,114 @@ pub fn readSectionData(self: Object, allocator: Allocator, file: File.Handle, n_ return data; } -pub fn format( - self: *Object, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = self; - _ = unused_fmt_string; - _ = options; - _ = writer; - @compileError("do not format objects directly"); -} - -const FormatContext = struct { +const Format = struct { object: *Object, macho_file: *MachO, + + fn atoms(f: Format, w: *Writer) Writer.Error!void { + const object = f.object; + const macho_file = f.macho_file; + try w.writeAll(" atoms\n"); + for (object.getAtoms()) |atom_index| { + const atom = object.getAtom(atom_index) orelse continue; + try w.print(" {f}\n", .{atom.fmt(macho_file)}); + } + } + fn cies(f: Format, w: *Writer) Writer.Error!void { + const object = f.object; + try w.writeAll(" cies\n"); + for (object.cies.items, 0..) |cie, i| { + try w.print(" cie({d}) : {f}\n", .{ i, cie.fmt(f.macho_file) }); + } + } + fn fdes(f: Format, w: *Writer) Writer.Error!void { + const object = f.object; + try w.writeAll(" fdes\n"); + for (object.fdes.items, 0..) |fde, i| { + try w.print(" fde({d}) : {f}\n", .{ i, fde.fmt(f.macho_file) }); + } + } + fn unwindRecords(f: Format, w: *Writer) Writer.Error!void { + const object = f.object; + const macho_file = f.macho_file; + try w.writeAll(" unwind records\n"); + for (object.unwind_records_indexes.items) |rec| { + try w.print(" rec({d}) : {f}\n", .{ rec, object.getUnwindRecord(rec).fmt(macho_file) }); + } + } + + fn symtab(f: Format, w: *Writer) Writer.Error!void { + const object = f.object; + const macho_file = f.macho_file; + try w.writeAll(" symbols\n"); + for (object.symbols.items, 0..) |sym, i| { + const ref = object.getSymbolRef(@intCast(i), macho_file); + if (ref.getFile(macho_file) == null) { + // TODO any better way of handling this? + try w.print(" {s} : unclaimed\n", .{sym.getName(macho_file)}); + } else { + try w.print(" {f}\n", .{ref.getSymbol(macho_file).?.fmt(macho_file)}); + } + } + for (object.stab_files.items) |sf| { + try w.print(" stabs({s},{s},{s})\n", .{ + sf.getCompDir(object.*), + sf.getTuName(object.*), + sf.getOsoPath(object.*), + }); + for (sf.stabs.items) |stab| { + try w.print(" {f}", .{stab.fmt(object.*)}); + } + } + } }; -pub fn fmtAtoms(self: *Object, macho_file: *MachO) std.fmt.Formatter(formatAtoms) { +pub fn fmtAtoms(self: *Object, macho_file: *MachO) std.fmt.Formatter(Format, Format.atoms) { return .{ .data = .{ .object = self, .macho_file = macho_file, } }; } -fn formatAtoms( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; - const object = ctx.object; - const macho_file = ctx.macho_file; - try writer.writeAll(" atoms\n"); - for (object.getAtoms()) |atom_index| { - const atom = object.getAtom(atom_index) orelse continue; - try writer.print(" {}\n", .{atom.fmt(macho_file)}); - } -} - -pub fn fmtCies(self: *Object, macho_file: *MachO) std.fmt.Formatter(formatCies) { +pub fn fmtCies(self: *Object, macho_file: *MachO) std.fmt.Formatter(Format, Format.cies) { return .{ .data = .{ .object = self, .macho_file = macho_file, } }; } -fn formatCies( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; - const object = ctx.object; - try writer.writeAll(" cies\n"); - for (object.cies.items, 0..) |cie, i| { - try writer.print(" cie({d}) : {}\n", .{ i, cie.fmt(ctx.macho_file) }); - } -} - -pub fn fmtFdes(self: *Object, macho_file: *MachO) std.fmt.Formatter(formatFdes) { +pub fn fmtFdes(self: *Object, macho_file: *MachO) std.fmt.Formatter(Format, Format.fdes) { return .{ .data = .{ .object = self, .macho_file = macho_file, } }; } -fn formatFdes( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; - const object = ctx.object; - try writer.writeAll(" fdes\n"); - for (object.fdes.items, 0..) |fde, i| { - try writer.print(" fde({d}) : {}\n", .{ i, fde.fmt(ctx.macho_file) }); - } -} - -pub fn fmtUnwindRecords(self: *Object, macho_file: *MachO) std.fmt.Formatter(formatUnwindRecords) { +pub fn fmtUnwindRecords(self: *Object, macho_file: *MachO) std.fmt.Formatter(Format, Format.unwindRecords) { return .{ .data = .{ .object = self, .macho_file = macho_file, } }; } -fn formatUnwindRecords( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; - const object = ctx.object; - const macho_file = ctx.macho_file; - try writer.writeAll(" unwind records\n"); - for (object.unwind_records_indexes.items) |rec| { - try writer.print(" rec({d}) : {}\n", .{ rec, object.getUnwindRecord(rec).fmt(macho_file) }); - } -} - -pub fn fmtSymtab(self: *Object, macho_file: *MachO) std.fmt.Formatter(formatSymtab) { +pub fn fmtSymtab(self: *Object, macho_file: *MachO) std.fmt.Formatter(Format, Format.symtab) { return .{ .data = .{ .object = self, .macho_file = macho_file, } }; } -fn formatSymtab( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; - const object = ctx.object; - const macho_file = ctx.macho_file; - try writer.writeAll(" symbols\n"); - for (object.symbols.items, 0..) |sym, i| { - const ref = object.getSymbolRef(@intCast(i), macho_file); - if (ref.getFile(macho_file) == null) { - // TODO any better way of handling this? - try writer.print(" {s} : unclaimed\n", .{sym.getName(macho_file)}); - } else { - try writer.print(" {}\n", .{ref.getSymbol(macho_file).?.fmt(macho_file)}); - } - } - for (object.stab_files.items) |sf| { - try writer.print(" stabs({s},{s},{s})\n", .{ - sf.getCompDir(object.*), - sf.getTuName(object.*), - sf.getOsoPath(object.*), - }); - for (sf.stabs.items) |stab| { - try writer.print(" {}", .{stab.fmt(object.*)}); - } - } -} - -pub fn fmtPath(self: Object) std.fmt.Formatter(formatPath) { +pub fn fmtPath(self: Object) std.fmt.Formatter(Object, formatPath) { return .{ .data = self }; } -fn formatPath( - object: Object, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; +fn formatPath(object: Object, w: *Writer) Writer.Error!void { if (object.in_archive) |ar| { - try writer.print("{}({s})", .{ - @as(Path, ar.path), object.path.basename(), + try w.print("{f}({s})", .{ + ar.path, object.path.basename(), }); } else { - try writer.print("{}", .{@as(Path, object.path)}); + try w.print("{f}", .{object.path}); } } @@ -2731,42 +2677,25 @@ const StabFile = struct { return object.symbols.items[index]; } - pub fn format( + const Format = struct { stab: Stab, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = stab; - _ = unused_fmt_string; - _ = options; - _ = writer; - @compileError("do not format stabs directly"); - } + object: Object, - const StabFormatContext = struct { Stab, Object }; - - pub fn fmt(stab: Stab, object: Object) std.fmt.Formatter(format2) { - return .{ .data = .{ stab, object } }; - } - - fn format2( - ctx: StabFormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = unused_fmt_string; - _ = options; - const stab, const object = ctx; - const sym = stab.getSymbol(object).?; - if (stab.is_func) { - try writer.print("func({d})", .{stab.index.?}); - } else if (sym.visibility == .global) { - try writer.print("gsym({d})", .{stab.index.?}); - } else { - try writer.print("stsym({d})", .{stab.index.?}); + fn default(f: Stab.Format, w: *Writer) Writer.Error!void { + const stab = f.stab; + const sym = stab.getSymbol(f.object).?; + if (stab.is_func) { + try w.print("func({d})", .{stab.index.?}); + } else if (sym.visibility == .global) { + try w.print("gsym({d})", .{stab.index.?}); + } else { + try w.print("stsym({d})", .{stab.index.?}); + } } + }; + + pub fn fmt(stab: Stab, object: Object) std.fmt.Formatter(Stab.Format, Stab.Format.default) { + return .{ .data = .{ .stab = stab, .object = object } }; } }; }; @@ -3157,17 +3086,18 @@ const aarch64 = struct { } }; +const std = @import("std"); const assert = std.debug.assert; -const eh_frame = @import("eh_frame.zig"); const log = std.log.scoped(.link); const macho = std.macho; const math = std.math; const mem = std.mem; -const trace = @import("../../tracy.zig").trace; -const std = @import("std"); const Path = std.Build.Cache.Path; +const Allocator = std.mem.Allocator; +const Writer = std.io.Writer; -const Allocator = mem.Allocator; +const eh_frame = @import("eh_frame.zig"); +const trace = @import("../../tracy.zig").trace; const Archive = @import("Archive.zig"); const Atom = @import("Atom.zig"); const Cie = eh_frame.Cie; diff --git a/src/link/MachO/Relocation.zig b/src/link/MachO/Relocation.zig index c732dc3a89..7982dff6a4 100644 --- a/src/link/MachO/Relocation.zig +++ b/src/link/MachO/Relocation.zig @@ -70,57 +70,51 @@ pub fn lessThan(ctx: void, lhs: Relocation, rhs: Relocation) bool { return lhs.offset < rhs.offset; } -const FormatCtx = struct { Relocation, std.Target.Cpu.Arch }; - -pub fn fmtPretty(rel: Relocation, cpu_arch: std.Target.Cpu.Arch) std.fmt.Formatter(formatPretty) { - return .{ .data = .{ rel, cpu_arch } }; +pub fn fmtPretty(rel: Relocation, cpu_arch: std.Target.Cpu.Arch) std.fmt.Formatter(Format, Format.pretty) { + return .{ .data = .{ .relocation = rel, .arch = cpu_arch } }; } -fn formatPretty( - ctx: FormatCtx, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = options; - _ = unused_fmt_string; - const rel, const cpu_arch = ctx; - const str = switch (rel.type) { - .signed => "X86_64_RELOC_SIGNED", - .signed1 => "X86_64_RELOC_SIGNED_1", - .signed2 => "X86_64_RELOC_SIGNED_2", - .signed4 => "X86_64_RELOC_SIGNED_4", - .got_load => "X86_64_RELOC_GOT_LOAD", - .tlv => "X86_64_RELOC_TLV", - .page => "ARM64_RELOC_PAGE21", - .pageoff => "ARM64_RELOC_PAGEOFF12", - .got_load_page => "ARM64_RELOC_GOT_LOAD_PAGE21", - .got_load_pageoff => "ARM64_RELOC_GOT_LOAD_PAGEOFF12", - .tlvp_page => "ARM64_RELOC_TLVP_LOAD_PAGE21", - .tlvp_pageoff => "ARM64_RELOC_TLVP_LOAD_PAGEOFF12", - .branch => switch (cpu_arch) { - .x86_64 => "X86_64_RELOC_BRANCH", - .aarch64 => "ARM64_RELOC_BRANCH26", - else => unreachable, - }, - .got => switch (cpu_arch) { - .x86_64 => "X86_64_RELOC_GOT", - .aarch64 => "ARM64_RELOC_POINTER_TO_GOT", - else => unreachable, - }, - .subtractor => switch (cpu_arch) { - .x86_64 => "X86_64_RELOC_SUBTRACTOR", - .aarch64 => "ARM64_RELOC_SUBTRACTOR", - else => unreachable, - }, - .unsigned => switch (cpu_arch) { - .x86_64 => "X86_64_RELOC_UNSIGNED", - .aarch64 => "ARM64_RELOC_UNSIGNED", - else => unreachable, - }, - }; - try writer.writeAll(str); -} +const Format = struct { + relocation: Relocation, + arch: std.Target.Cpu.Arch, + + fn pretty(f: Format, w: *Writer) Writer.Error!void { + try w.writeAll(switch (f.relocation.type) { + .signed => "X86_64_RELOC_SIGNED", + .signed1 => "X86_64_RELOC_SIGNED_1", + .signed2 => "X86_64_RELOC_SIGNED_2", + .signed4 => "X86_64_RELOC_SIGNED_4", + .got_load => "X86_64_RELOC_GOT_LOAD", + .tlv => "X86_64_RELOC_TLV", + .page => "ARM64_RELOC_PAGE21", + .pageoff => "ARM64_RELOC_PAGEOFF12", + .got_load_page => "ARM64_RELOC_GOT_LOAD_PAGE21", + .got_load_pageoff => "ARM64_RELOC_GOT_LOAD_PAGEOFF12", + .tlvp_page => "ARM64_RELOC_TLVP_LOAD_PAGE21", + .tlvp_pageoff => "ARM64_RELOC_TLVP_LOAD_PAGEOFF12", + .branch => switch (f.arch) { + .x86_64 => "X86_64_RELOC_BRANCH", + .aarch64 => "ARM64_RELOC_BRANCH26", + else => unreachable, + }, + .got => switch (f.arch) { + .x86_64 => "X86_64_RELOC_GOT", + .aarch64 => "ARM64_RELOC_POINTER_TO_GOT", + else => unreachable, + }, + .subtractor => switch (f.arch) { + .x86_64 => "X86_64_RELOC_SUBTRACTOR", + .aarch64 => "ARM64_RELOC_SUBTRACTOR", + else => unreachable, + }, + .unsigned => switch (f.arch) { + .x86_64 => "X86_64_RELOC_UNSIGNED", + .aarch64 => "ARM64_RELOC_UNSIGNED", + else => unreachable, + }, + }); + } +}; pub const Type = enum { // x86_64 @@ -164,10 +158,11 @@ pub const Type = enum { const Tag = enum { local, @"extern" }; +const std = @import("std"); const assert = std.debug.assert; const macho = std.macho; const math = std.math; -const std = @import("std"); +const Writer = std.io.Writer; const Atom = @import("Atom.zig"); const MachO = @import("../MachO.zig"); diff --git a/src/link/MachO/Symbol.zig b/src/link/MachO/Symbol.zig index be126b0963..654e7c402c 100644 --- a/src/link/MachO/Symbol.zig +++ b/src/link/MachO/Symbol.zig @@ -286,71 +286,51 @@ pub fn setOutputSym(symbol: Symbol, macho_file: *MachO, out: *macho.nlist_64) vo } } -pub fn format( - symbol: Symbol, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = symbol; - _ = unused_fmt_string; - _ = options; - _ = writer; - @compileError("do not format symbols directly"); -} - -const FormatContext = struct { - symbol: Symbol, - macho_file: *MachO, -}; - -pub fn fmt(symbol: Symbol, macho_file: *MachO) std.fmt.Formatter(format2) { +pub fn fmt(symbol: Symbol, macho_file: *MachO) std.fmt.Formatter(Format, Format.default) { return .{ .data = .{ .symbol = symbol, .macho_file = macho_file, } }; } -fn format2( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = options; - _ = unused_fmt_string; - const symbol = ctx.symbol; - try writer.print("%{d} : {s} : @{x}", .{ - symbol.nlist_idx, - symbol.getName(ctx.macho_file), - symbol.getAddress(.{}, ctx.macho_file), - }); - if (symbol.getFile(ctx.macho_file)) |file| { - if (symbol.getOutputSectionIndex(ctx.macho_file) != 0) { - try writer.print(" : sect({d})", .{symbol.getOutputSectionIndex(ctx.macho_file)}); - } - if (symbol.getAtom(ctx.macho_file)) |atom| { - try writer.print(" : atom({d})", .{atom.atom_index}); - } - var buf: [3]u8 = .{'_'} ** 3; - if (symbol.flags.@"export") buf[0] = 'E'; - if (symbol.flags.import) buf[1] = 'I'; - switch (symbol.visibility) { - .local => buf[2] = 'L', - .hidden => buf[2] = 'H', - .global => buf[2] = 'G', - } - try writer.print(" : {s}", .{&buf}); - if (symbol.flags.weak) try writer.writeAll(" : weak"); - if (symbol.isSymbolStab(ctx.macho_file)) try writer.writeAll(" : stab"); - switch (file) { - .zig_object => |x| try writer.print(" : zig_object({d})", .{x.index}), - .internal => |x| try writer.print(" : internal({d})", .{x.index}), - .object => |x| try writer.print(" : object({d})", .{x.index}), - .dylib => |x| try writer.print(" : dylib({d})", .{x.index}), - } - } else try writer.writeAll(" : unresolved"); -} +const Format = struct { + symbol: Symbol, + macho_file: *MachO, + + fn default(f: Format, w: *Writer) Writer.Error!void { + const symbol = f.symbol; + try w.print("%{d} : {s} : @{x}", .{ + symbol.nlist_idx, + symbol.getName(f.macho_file), + symbol.getAddress(.{}, f.macho_file), + }); + if (symbol.getFile(f.macho_file)) |file| { + if (symbol.getOutputSectionIndex(f.macho_file) != 0) { + try w.print(" : sect({d})", .{symbol.getOutputSectionIndex(f.macho_file)}); + } + if (symbol.getAtom(f.macho_file)) |atom| { + try w.print(" : atom({d})", .{atom.atom_index}); + } + var buf: [3]u8 = .{'_'} ** 3; + if (symbol.flags.@"export") buf[0] = 'E'; + if (symbol.flags.import) buf[1] = 'I'; + switch (symbol.visibility) { + .local => buf[2] = 'L', + .hidden => buf[2] = 'H', + .global => buf[2] = 'G', + } + try w.print(" : {s}", .{&buf}); + if (symbol.flags.weak) try w.writeAll(" : weak"); + if (symbol.isSymbolStab(f.macho_file)) try w.writeAll(" : stab"); + switch (file) { + .zig_object => |x| try w.print(" : zig_object({d})", .{x.index}), + .internal => |x| try w.print(" : internal({d})", .{x.index}), + .object => |x| try w.print(" : object({d})", .{x.index}), + .dylib => |x| try w.print(" : dylib({d})", .{x.index}), + } + } else try w.writeAll(" : unresolved"); + } +}; pub const Flags = packed struct { /// Whether the symbol is imported at runtime. @@ -437,6 +417,7 @@ pub const Index = u32; const assert = std.debug.assert; const macho = std.macho; const std = @import("std"); +const Writer = std.io.Writer; const Atom = @import("Atom.zig"); const File = @import("file.zig").File; diff --git a/src/link/MachO/Thunk.zig b/src/link/MachO/Thunk.zig index d720d4fd25..e9f67dd5e5 100644 --- a/src/link/MachO/Thunk.zig +++ b/src/link/MachO/Thunk.zig @@ -61,47 +61,27 @@ pub fn writeSymtab(thunk: Thunk, macho_file: *MachO, ctx: anytype) void { } } -pub fn format( - thunk: Thunk, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = thunk; - _ = unused_fmt_string; - _ = options; - _ = writer; - @compileError("do not format Thunk directly"); -} - -pub fn fmt(thunk: Thunk, macho_file: *MachO) std.fmt.Formatter(format2) { +pub fn fmt(thunk: Thunk, macho_file: *MachO) std.fmt.Formatter(Format, Format.default) { return .{ .data = .{ .thunk = thunk, .macho_file = macho_file, } }; } -const FormatContext = struct { +const Format = struct { thunk: Thunk, macho_file: *MachO, -}; -fn format2( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = options; - _ = unused_fmt_string; - const thunk = ctx.thunk; - const macho_file = ctx.macho_file; - try writer.print("@{x} : size({x})\n", .{ thunk.value, thunk.size() }); - for (thunk.symbols.keys()) |ref| { - const sym = ref.getSymbol(macho_file).?; - try writer.print(" {} : {s} : @{x}\n", .{ ref, sym.getName(macho_file), sym.value }); + fn default(f: Format, w: *Writer) Writer.Error!void { + const thunk = f.thunk; + const macho_file = f.macho_file; + try w.print("@{x} : size({x})\n", .{ thunk.value, thunk.size() }); + for (thunk.symbols.keys()) |ref| { + const sym = ref.getSymbol(macho_file).?; + try w.print(" {f} : {s} : @{x}\n", .{ ref, sym.getName(macho_file), sym.value }); + } } -} +}; const trampoline_size = 3 * @sizeOf(u32); @@ -115,6 +95,7 @@ const math = std.math; const mem = std.mem; const std = @import("std"); const trace = @import("../../tracy.zig").trace; +const Writer = std.io.Writer; const Allocator = mem.Allocator; const Atom = @import("Atom.zig"); diff --git a/src/link/MachO/UnwindInfo.zig b/src/link/MachO/UnwindInfo.zig index ffeeaddb23..a0ffdfe06a 100644 --- a/src/link/MachO/UnwindInfo.zig +++ b/src/link/MachO/UnwindInfo.zig @@ -133,7 +133,7 @@ pub fn generate(info: *UnwindInfo, macho_file: *MachO) !void { for (info.records.items) |ref| { const rec = ref.getUnwindRecord(macho_file); const atom = rec.getAtom(macho_file); - log.debug("@{x}-{x} : {s} : rec({d}) : object({d}) : {}", .{ + log.debug("@{x}-{x} : {s} : rec({d}) : object({d}) : {f}", .{ rec.getAtomAddress(macho_file), rec.getAtomAddress(macho_file) + rec.length, atom.getName(macho_file), @@ -202,7 +202,7 @@ pub fn generate(info: *UnwindInfo, macho_file: *MachO) !void { if (i >= max_common_encodings) break; if (slice[i].count < 2) continue; info.appendCommonEncoding(slice[i].enc); - log.debug("adding common encoding: {d} => {}", .{ i, slice[i].enc }); + log.debug("adding common encoding: {d} => {f}", .{ i, slice[i].enc }); } } @@ -255,7 +255,7 @@ pub fn generate(info: *UnwindInfo, macho_file: *MachO) !void { page.kind = .compressed; } - log.debug("{}", .{page.fmt(info.*)}); + log.debug("{f}", .{page.fmt(info.*)}); try info.pages.append(gpa, page); } @@ -455,15 +455,8 @@ pub const Encoding = extern struct { return enc.enc == other.enc; } - pub fn format( - enc: Encoding, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = unused_fmt_string; - _ = options; - try writer.print("0x{x:0>8}", .{enc.enc}); + pub fn format(enc: Encoding, w: *Writer) Writer.Error!void { + try w.print("0x{x:0>8}", .{enc.enc}); } }; @@ -517,48 +510,28 @@ pub const Record = struct { return lsda.getAddress(macho_file) + rec.lsda_offset; } - pub fn format( - rec: Record, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = rec; - _ = unused_fmt_string; - _ = options; - _ = writer; - @compileError("do not format UnwindInfo.Records directly"); - } - - pub fn fmt(rec: Record, macho_file: *MachO) std.fmt.Formatter(format2) { + pub fn fmt(rec: Record, macho_file: *MachO) std.fmt.Formatter(Format, Format.default) { return .{ .data = .{ .rec = rec, .macho_file = macho_file, } }; } - const FormatContext = struct { + const Format = struct { rec: Record, macho_file: *MachO, - }; - fn format2( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = unused_fmt_string; - _ = options; - const rec = ctx.rec; - const macho_file = ctx.macho_file; - try writer.print("{x} : len({x})", .{ - rec.enc.enc, rec.length, - }); - if (rec.enc.isDwarf(macho_file)) try writer.print(" : fde({d})", .{rec.fde}); - try writer.print(" : {s}", .{rec.getAtom(macho_file).getName(macho_file)}); - if (!rec.alive) try writer.writeAll(" : [*]"); - } + fn default(f: Format, w: *Writer) Writer.Error!void { + const rec = f.rec; + const macho_file = f.macho_file; + try w.print("{x} : len({x})", .{ + rec.enc.enc, rec.length, + }); + if (rec.enc.isDwarf(macho_file)) try w.print(" : fde({d})", .{rec.fde}); + try w.print(" : {s}", .{rec.getAtom(macho_file).getName(macho_file)}); + if (!rec.alive) try w.writeAll(" : [*]"); + } + }; pub const Index = u32; @@ -613,45 +586,25 @@ const Page = struct { return null; } - fn format( - page: *const Page, - comptime unused_format_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = page; - _ = unused_format_string; - _ = options; - _ = writer; - @compileError("do not format Page directly; use page.fmt()"); - } - - const FormatPageContext = struct { + const Format = struct { page: Page, info: UnwindInfo, + + fn default(f: Format, w: *Writer) Writer.Error!void { + try w.writeAll("Page:\n"); + try w.print(" kind: {s}\n", .{@tagName(f.page.kind)}); + try w.print(" entries: {d} - {d}\n", .{ + f.page.start, + f.page.start + f.page.count, + }); + try w.print(" encodings (count = {d})\n", .{f.page.page_encodings_count}); + for (f.page.page_encodings[0..f.page.page_encodings_count], 0..) |enc, i| { + try w.print(" {d}: {f}\n", .{ f.info.common_encodings_count + i, enc }); + } + } }; - fn format2( - ctx: FormatPageContext, - comptime unused_format_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) @TypeOf(writer).Error!void { - _ = options; - _ = unused_format_string; - try writer.writeAll("Page:\n"); - try writer.print(" kind: {s}\n", .{@tagName(ctx.page.kind)}); - try writer.print(" entries: {d} - {d}\n", .{ - ctx.page.start, - ctx.page.start + ctx.page.count, - }); - try writer.print(" encodings (count = {d})\n", .{ctx.page.page_encodings_count}); - for (ctx.page.page_encodings[0..ctx.page.page_encodings_count], 0..) |enc, i| { - try writer.print(" {d}: {}\n", .{ ctx.info.common_encodings_count + i, enc }); - } - } - - fn fmt(page: Page, info: UnwindInfo) std.fmt.Formatter(format2) { + fn fmt(page: Page, info: UnwindInfo) std.fmt.Formatter(Format, Format.default) { return .{ .data = .{ .page = page, .info = info, @@ -720,6 +673,7 @@ const macho = std.macho; const math = std.math; const mem = std.mem; const trace = @import("../../tracy.zig").trace; +const Writer = std.io.Writer; const Allocator = mem.Allocator; const Atom = @import("Atom.zig"); diff --git a/src/link/MachO/ZigObject.zig b/src/link/MachO/ZigObject.zig index 97c1a0ad54..d6a56f8411 100644 --- a/src/link/MachO/ZigObject.zig +++ b/src/link/MachO/ZigObject.zig @@ -618,7 +618,7 @@ pub fn getNavVAddr( const zcu = pt.zcu; const ip = &zcu.intern_pool; const nav = ip.getNav(nav_index); - log.debug("getNavVAddr {}({d})", .{ nav.fqn.fmt(ip), nav_index }); + log.debug("getNavVAddr {f}({d})", .{ nav.fqn.fmt(ip), nav_index }); const sym_index = if (nav.getExtern(ip)) |@"extern"| try self.getGlobalSymbol( macho_file, nav.name.toSlice(ip), @@ -943,7 +943,7 @@ fn updateNavCode( const ip = &zcu.intern_pool; const nav = ip.getNav(nav_index); - log.debug("updateNavCode {} 0x{x}", .{ nav.fqn.fmt(ip), nav_index }); + log.debug("updateNavCode {f} 0x{x}", .{ nav.fqn.fmt(ip), nav_index }); const target = &zcu.navFileScope(nav_index).mod.?.resolved_target.result; const required_alignment = switch (pt.navAlignment(nav_index)) { @@ -959,7 +959,7 @@ fn updateNavCode( sym.out_n_sect = sect_index; atom.out_n_sect = sect_index; - const sym_name = try std.fmt.allocPrintZ(gpa, "_{s}", .{nav.fqn.toSlice(ip)}); + const sym_name = try std.fmt.allocPrintSentinel(gpa, "_{s}", .{nav.fqn.toSlice(ip)}, 0); defer gpa.free(sym_name); sym.name = try self.addString(gpa, sym_name); atom.setAlive(true); @@ -981,7 +981,7 @@ fn updateNavCode( if (need_realloc) { atom.grow(macho_file) catch |err| return macho_file.base.cgFail(nav_index, "failed to grow atom: {s}", .{@errorName(err)}); - log.debug("growing {} from 0x{x} to 0x{x}", .{ nav.fqn.fmt(ip), old_vaddr, atom.value }); + log.debug("growing {f} from 0x{x} to 0x{x}", .{ nav.fqn.fmt(ip), old_vaddr, atom.value }); if (old_vaddr != atom.value) { sym.value = 0; nlist.n_value = 0; @@ -1023,7 +1023,7 @@ fn updateTlv( const ip = &pt.zcu.intern_pool; const nav = ip.getNav(nav_index); - log.debug("updateTlv {} (0x{x})", .{ nav.fqn.fmt(ip), nav_index }); + log.debug("updateTlv {f} (0x{x})", .{ nav.fqn.fmt(ip), nav_index }); // 1. Lower TLV initializer const init_sym_index = try self.createTlvInitializer( @@ -1351,7 +1351,7 @@ fn updateLazySymbol( defer code_buffer.deinit(gpa); const name_str = blk: { - const name = try std.fmt.allocPrint(gpa, "__lazy_{s}_{}", .{ + const name = try std.fmt.allocPrint(gpa, "__lazy_{s}_{f}", .{ @tagName(lazy_sym.kind), Type.fromInterned(lazy_sym.ty).fmt(pt), }); @@ -1430,7 +1430,7 @@ pub fn deleteExport( } orelse return; const nlist_index = metadata.@"export"(self, name.toSlice(&zcu.intern_pool)) orelse return; - log.debug("deleting export '{}'", .{name.fmt(&zcu.intern_pool)}); + log.debug("deleting export '{f}'", .{name.fmt(&zcu.intern_pool)}); const nlist = &self.symtab.items(.nlist)[nlist_index.*]; self.symtab.items(.size)[nlist_index.*] = 0; @@ -1678,64 +1678,50 @@ pub fn asFile(self: *ZigObject) File { return .{ .zig_object = self }; } -pub fn fmtSymtab(self: *ZigObject, macho_file: *MachO) std.fmt.Formatter(formatSymtab) { +pub fn fmtSymtab(self: *ZigObject, macho_file: *MachO) std.fmt.Formatter(Format, Format.symtab) { return .{ .data = .{ .self = self, .macho_file = macho_file, } }; } -const FormatContext = struct { +const Format = struct { self: *ZigObject, macho_file: *MachO, -}; -fn formatSymtab( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; - try writer.writeAll(" symbols\n"); - const self = ctx.self; - const macho_file = ctx.macho_file; - for (self.symbols.items, 0..) |sym, i| { - const ref = self.getSymbolRef(@intCast(i), macho_file); - if (ref.getFile(macho_file) == null) { - // TODO any better way of handling this? - try writer.print(" {s} : unclaimed\n", .{sym.getName(macho_file)}); - } else { - try writer.print(" {}\n", .{ref.getSymbol(macho_file).?.fmt(macho_file)}); + fn symtab(f: Format, w: *Writer) Writer.Error!void { + try w.writeAll(" symbols\n"); + const self = f.self; + const macho_file = f.macho_file; + for (self.symbols.items, 0..) |sym, i| { + const ref = self.getSymbolRef(@intCast(i), macho_file); + if (ref.getFile(macho_file) == null) { + // TODO any better way of handling this? + try w.print(" {s} : unclaimed\n", .{sym.getName(macho_file)}); + } else { + try w.print(" {f}\n", .{ref.getSymbol(macho_file).?.fmt(macho_file)}); + } } } -} -pub fn fmtAtoms(self: *ZigObject, macho_file: *MachO) std.fmt.Formatter(formatAtoms) { + fn atoms(f: Format, w: *Writer) Writer.Error!void { + const self = f.self; + const macho_file = f.macho_file; + try w.writeAll(" atoms\n"); + for (self.getAtoms()) |atom_index| { + const atom = self.getAtom(atom_index) orelse continue; + try w.print(" {f}\n", .{atom.fmt(macho_file)}); + } + } +}; + +pub fn fmtAtoms(self: *ZigObject, macho_file: *MachO) std.fmt.Formatter(Format, Format.atoms) { return .{ .data = .{ .self = self, .macho_file = macho_file, } }; } -fn formatAtoms( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = unused_fmt_string; - _ = options; - const self = ctx.self; - const macho_file = ctx.macho_file; - try writer.writeAll(" atoms\n"); - for (self.getAtoms()) |atom_index| { - const atom = self.getAtom(atom_index) orelse continue; - try writer.print(" {}\n", .{atom.fmt(macho_file)}); - } -} - const AvMetadata = struct { symbol_index: Symbol.Index, /// A list of all exports aliases of this Av. @@ -1797,6 +1783,7 @@ const mem = std.mem; const target_util = @import("../../target.zig"); const trace = @import("../../tracy.zig").trace; const std = @import("std"); +const Writer = std.io.Writer; const Allocator = std.mem.Allocator; const Archive = @import("Archive.zig"); diff --git a/src/link/MachO/dead_strip.zig b/src/link/MachO/dead_strip.zig index 24d7e18d1a..79ff4e1707 100644 --- a/src/link/MachO/dead_strip.zig +++ b/src/link/MachO/dead_strip.zig @@ -117,7 +117,7 @@ fn mark(roots: []*Atom, objects: []const File.Index, macho_file: *MachO) void { fn markLive(atom: *Atom, macho_file: *MachO) void { assert(atom.visited.load(.seq_cst)); atom.setAlive(true); - track_live_log.debug("{}marking live atom({d},{s})", .{ + track_live_log.debug("{f}marking live atom({d},{s})", .{ track_live_level, atom.atom_index, atom.getName(macho_file), @@ -196,15 +196,8 @@ const Level = struct { self.value += 1; } - pub fn format( - self: *const @This(), - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = unused_fmt_string; - _ = options; - try writer.writeByteNTimes(' ', self.value); + pub fn format(self: *const @This(), w: *Writer) Writer.Error!void { + try w.splatByteAll(' ', self.value); } }; @@ -219,6 +212,7 @@ const mem = std.mem; const trace = @import("../../tracy.zig").trace; const track_live_log = std.log.scoped(.dead_strip_track_live); const std = @import("std"); +const Writer = std.io.Writer; const Allocator = mem.Allocator; const Atom = @import("Atom.zig"); diff --git a/src/link/MachO/dyld_info/Rebase.zig b/src/link/MachO/dyld_info/Rebase.zig index dc4fcbcc1d..13764546f6 100644 --- a/src/link/MachO/dyld_info/Rebase.zig +++ b/src/link/MachO/dyld_info/Rebase.zig @@ -654,9 +654,10 @@ const log = std.log.scoped(.link_dyld_info); const macho = std.macho; const mem = std.mem; const testing = std.testing; -const trace = @import("../../../tracy.zig").trace; - const Allocator = mem.Allocator; +const Writer = std.io.Writer; + +const trace = @import("../../../tracy.zig").trace; const File = @import("../file.zig").File; const MachO = @import("../../MachO.zig"); const Rebase = @This(); diff --git a/src/link/MachO/dyld_info/Trie.zig b/src/link/MachO/dyld_info/Trie.zig index b45651eb67..8224dc8424 100644 --- a/src/link/MachO/dyld_info/Trie.zig +++ b/src/link/MachO/dyld_info/Trie.zig @@ -336,9 +336,9 @@ const Edge = struct { fn expectEqualHexStrings(expected: []const u8, given: []const u8) !void { assert(expected.len > 0); if (mem.eql(u8, expected, given)) return; - const expected_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{std.fmt.fmtSliceHexLower(expected)}); + const expected_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{expected}); defer testing.allocator.free(expected_fmt); - const given_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{std.fmt.fmtSliceHexLower(given)}); + const given_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{given}); defer testing.allocator.free(given_fmt); const idx = mem.indexOfDiff(u8, expected_fmt, given_fmt).?; const padding = try testing.allocator.alloc(u8, idx + 5); diff --git a/src/link/MachO/dyld_info/bind.zig b/src/link/MachO/dyld_info/bind.zig index 328d6a402c..9711f689ea 100644 --- a/src/link/MachO/dyld_info/bind.zig +++ b/src/link/MachO/dyld_info/bind.zig @@ -205,7 +205,7 @@ pub const Bind = struct { } } - log.debug("{x}, {d}, {x}, {?x}, {s}", .{ offset, count, skip, addend, @tagName(state) }); + log.debug("{x}, {d}, {x}, {x}, {s}", .{ offset, count, skip, addend, @tagName(state) }); log.debug(" => {x}", .{current.offset}); switch (state) { .start => { @@ -447,7 +447,7 @@ pub const WeakBind = struct { } } - log.debug("{x}, {d}, {x}, {?x}, {s}", .{ offset, count, skip, addend, @tagName(state) }); + log.debug("{x}, {d}, {x}, {x}, {s}", .{ offset, count, skip, addend, @tagName(state) }); log.debug(" => {x}", .{current.offset}); switch (state) { .start => { diff --git a/src/link/MachO/eh_frame.zig b/src/link/MachO/eh_frame.zig index ccabffb1dc..975b4784a8 100644 --- a/src/link/MachO/eh_frame.zig +++ b/src/link/MachO/eh_frame.zig @@ -81,46 +81,26 @@ pub const Cie = struct { return true; } - pub fn format( - cie: Cie, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = cie; - _ = unused_fmt_string; - _ = options; - _ = writer; - @compileError("do not format CIEs directly"); - } - - pub fn fmt(cie: Cie, macho_file: *MachO) std.fmt.Formatter(format2) { + pub fn fmt(cie: Cie, macho_file: *MachO) std.fmt.Formatter(Format, Format.default) { return .{ .data = .{ .cie = cie, .macho_file = macho_file, } }; } - const FormatContext = struct { + const Format = struct { cie: Cie, macho_file: *MachO, - }; - fn format2( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = unused_fmt_string; - _ = options; - const cie = ctx.cie; - try writer.print("@{x} : size({x})", .{ - cie.offset, - cie.getSize(), - }); - if (!cie.alive) try writer.writeAll(" : [*]"); - } + fn default(f: Format, w: *Writer) Writer.Error!void { + const cie = f.cie; + try w.print("@{x} : size({x})", .{ + cie.offset, + cie.getSize(), + }); + if (!cie.alive) try w.writeAll(" : [*]"); + } + }; pub const Index = u32; @@ -231,49 +211,29 @@ pub const Fde = struct { return fde.getObject(macho_file).getAtom(fde.lsda); } - pub fn format( - fde: Fde, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = fde; - _ = unused_fmt_string; - _ = options; - _ = writer; - @compileError("do not format FDEs directly"); - } - - pub fn fmt(fde: Fde, macho_file: *MachO) std.fmt.Formatter(format2) { + pub fn fmt(fde: Fde, macho_file: *MachO) std.fmt.Formatter(Format, Format.default) { return .{ .data = .{ .fde = fde, .macho_file = macho_file, } }; } - const FormatContext = struct { + const Format = struct { fde: Fde, macho_file: *MachO, - }; - fn format2( - ctx: FormatContext, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = unused_fmt_string; - _ = options; - const fde = ctx.fde; - const macho_file = ctx.macho_file; - try writer.print("@{x} : size({x}) : cie({d}) : {s}", .{ - fde.offset, - fde.getSize(), - fde.cie, - fde.getAtom(macho_file).getName(macho_file), - }); - if (!fde.alive) try writer.writeAll(" : [*]"); - } + fn default(f: Format, writer: *Writer) Writer.Error!void { + const fde = f.fde; + const macho_file = f.macho_file; + try writer.print("@{x} : size({x}) : cie({d}) : {s}", .{ + fde.offset, + fde.getSize(), + fde.cie, + fde.getAtom(macho_file).getName(macho_file), + }); + if (!fde.alive) try writer.writeAll(" : [*]"); + } + }; pub const Index = u32; }; @@ -545,6 +505,7 @@ const math = std.math; const mem = std.mem; const std = @import("std"); const trace = @import("../../tracy.zig").trace; +const Writer = std.io.Writer; const Allocator = std.mem.Allocator; const Atom = @import("Atom.zig"); diff --git a/src/link/MachO/file.zig b/src/link/MachO/file.zig index 6d90dde4a9..225021aa0f 100644 --- a/src/link/MachO/file.zig +++ b/src/link/MachO/file.zig @@ -10,23 +10,16 @@ pub const File = union(enum) { }; } - pub fn fmtPath(file: File) std.fmt.Formatter(formatPath) { + pub fn fmtPath(file: File) std.fmt.Formatter(File, formatPath) { return .{ .data = file }; } - fn formatPath( - file: File, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = unused_fmt_string; - _ = options; + fn formatPath(file: File, w: *Writer) Writer.Error!void { switch (file) { - .zig_object => |zo| try writer.writeAll(zo.basename), - .internal => try writer.writeAll("internal"), - .object => |x| try writer.print("{}", .{x.fmtPath()}), - .dylib => |dl| try writer.print("{}", .{@as(Path, dl.path)}), + .zig_object => |zo| try w.writeAll(zo.basename), + .internal => try w.writeAll("internal"), + .object => |x| try w.print("{f}", .{x.fmtPath()}), + .dylib => |dl| try w.print("{f}", .{@as(Path, dl.path)}), } } @@ -371,6 +364,7 @@ const log = std.log.scoped(.link); const macho = std.macho; const Allocator = std.mem.Allocator; const Path = std.Build.Cache.Path; +const Writer = std.io.Writer; const trace = @import("../../tracy.zig").trace; const Archive = @import("Archive.zig"); diff --git a/src/link/MachO/load_commands.zig b/src/link/MachO/load_commands.zig index 08ab11e3f9..7c7b4416f4 100644 --- a/src/link/MachO/load_commands.zig +++ b/src/link/MachO/load_commands.zig @@ -3,6 +3,7 @@ const assert = std.debug.assert; const log = std.log.scoped(.link); const macho = std.macho; const mem = std.mem; +const Writer = std.io.Writer; const Allocator = mem.Allocator; const DebugSymbols = @import("DebugSymbols.zig"); diff --git a/src/link/MachO/relocatable.zig b/src/link/MachO/relocatable.zig index 4edf6e043d..4bbdb73a3e 100644 --- a/src/link/MachO/relocatable.zig +++ b/src/link/MachO/relocatable.zig @@ -20,13 +20,13 @@ pub fn flushObject(macho_file: *MachO, comp: *Compilation, module_obj_path: ?Pat // the *only* input file over. const path = positionals.items[0].path().?; const in_file = path.root_dir.handle.openFile(path.sub_path, .{}) catch |err| - return diags.fail("failed to open {}: {s}", .{ path, @errorName(err) }); + return diags.fail("failed to open {f}: {s}", .{ path, @errorName(err) }); const stat = in_file.stat() catch |err| - return diags.fail("failed to stat {}: {s}", .{ path, @errorName(err) }); + return diags.fail("failed to stat {f}: {s}", .{ path, @errorName(err) }); const amt = in_file.copyRangeAll(0, macho_file.base.file.?, 0, stat.size) catch |err| - return diags.fail("failed to copy range of file {}: {s}", .{ path, @errorName(err) }); + return diags.fail("failed to copy range of file {f}: {s}", .{ path, @errorName(err) }); if (amt != stat.size) - return diags.fail("unexpected short write in copy range of file {}", .{path}); + return diags.fail("unexpected short write in copy range of file {f}", .{path}); return; } @@ -62,7 +62,7 @@ pub fn flushObject(macho_file: *MachO, comp: *Compilation, module_obj_path: ?Pat allocateSegment(macho_file); if (build_options.enable_logging) { - state_log.debug("{}", .{macho_file.dumpState()}); + state_log.debug("{f}", .{macho_file.dumpState()}); } try writeSections(macho_file); @@ -126,7 +126,7 @@ pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ? allocateSegment(macho_file); if (build_options.enable_logging) { - state_log.debug("{}", .{macho_file.dumpState()}); + state_log.debug("{f}", .{macho_file.dumpState()}); } try writeSections(macho_file); @@ -202,7 +202,7 @@ pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ? }; if (build_options.enable_logging) { - state_log.debug("ar_symtab\n{}\n", .{ar_symtab.fmt(macho_file)}); + state_log.debug("ar_symtab\n{f}\n", .{ar_symtab.fmt(macho_file)}); } var buffer = std.ArrayList(u8).init(gpa); @@ -784,6 +784,7 @@ const macho = std.macho; const math = std.math; const mem = std.mem; const state_log = std.log.scoped(.link_state); +const Writer = std.io.Writer; const Archive = @import("Archive.zig"); const Atom = @import("Atom.zig"); diff --git a/src/link/MachO/synthetic.zig b/src/link/MachO/synthetic.zig index 6042fe628d..c91c41df5f 100644 --- a/src/link/MachO/synthetic.zig +++ b/src/link/MachO/synthetic.zig @@ -37,34 +37,27 @@ pub const GotSection = struct { } } - const FormatCtx = struct { + const Format = struct { got: GotSection, macho_file: *MachO, + + pub fn print(f: Format, w: *Writer) Writer.Error!void { + for (f.got.symbols.items, 0..) |ref, i| { + const symbol = ref.getSymbol(f.macho_file).?; + try w.print(" {d}@0x{x} => {f}@0x{x} ({s})\n", .{ + i, + symbol.getGotAddress(f.macho_file), + ref, + symbol.getAddress(.{}, f.macho_file), + symbol.getName(f.macho_file), + }); + } + } }; - pub fn fmt(got: GotSection, macho_file: *MachO) std.fmt.Formatter(format2) { + pub fn fmt(got: GotSection, macho_file: *MachO) std.fmt.Formatter(Format, Format.print) { return .{ .data = .{ .got = got, .macho_file = macho_file } }; } - - pub fn format2( - ctx: FormatCtx, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = options; - _ = unused_fmt_string; - for (ctx.got.symbols.items, 0..) |ref, i| { - const symbol = ref.getSymbol(ctx.macho_file).?; - try writer.print(" {d}@0x{x} => {d}@0x{x} ({s})\n", .{ - i, - symbol.getGotAddress(ctx.macho_file), - ref, - symbol.getAddress(.{}, ctx.macho_file), - symbol.getName(ctx.macho_file), - }); - } - } }; pub const StubsSection = struct { @@ -128,34 +121,27 @@ pub const StubsSection = struct { } } - const FormatCtx = struct { - stubs: StubsSection, - macho_file: *MachO, - }; - - pub fn fmt(stubs: StubsSection, macho_file: *MachO) std.fmt.Formatter(format2) { + pub fn fmt(stubs: StubsSection, macho_file: *MachO) std.fmt.Formatter(Format, Format.print) { return .{ .data = .{ .stubs = stubs, .macho_file = macho_file } }; } - pub fn format2( - ctx: FormatCtx, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = options; - _ = unused_fmt_string; - for (ctx.stubs.symbols.items, 0..) |ref, i| { - const symbol = ref.getSymbol(ctx.macho_file).?; - try writer.print(" {d}@0x{x} => {d}@0x{x} ({s})\n", .{ - i, - symbol.getStubsAddress(ctx.macho_file), - ref, - symbol.getAddress(.{}, ctx.macho_file), - symbol.getName(ctx.macho_file), - }); + const Format = struct { + stubs: StubsSection, + macho_file: *MachO, + + pub fn print(f: Format, w: *Writer) Writer.Error!void { + for (f.stubs.symbols.items, 0..) |ref, i| { + const symbol = ref.getSymbol(f.macho_file).?; + try w.print(" {d}@0x{x} => {f}@0x{x} ({s})\n", .{ + i, + symbol.getStubsAddress(f.macho_file), + ref, + symbol.getAddress(.{}, f.macho_file), + symbol.getName(f.macho_file), + }); + } } - } + }; }; pub const StubsHelperSection = struct { @@ -357,34 +343,27 @@ pub const TlvPtrSection = struct { } } - const FormatCtx = struct { - tlv: TlvPtrSection, - macho_file: *MachO, - }; - - pub fn fmt(tlv: TlvPtrSection, macho_file: *MachO) std.fmt.Formatter(format2) { + pub fn fmt(tlv: TlvPtrSection, macho_file: *MachO) std.fmt.Formatter(Format, Format.print) { return .{ .data = .{ .tlv = tlv, .macho_file = macho_file } }; } - pub fn format2( - ctx: FormatCtx, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = options; - _ = unused_fmt_string; - for (ctx.tlv.symbols.items, 0..) |ref, i| { - const symbol = ref.getSymbol(ctx.macho_file).?; - try writer.print(" {d}@0x{x} => {d}@0x{x} ({s})\n", .{ - i, - symbol.getTlvPtrAddress(ctx.macho_file), - ref, - symbol.getAddress(.{}, ctx.macho_file), - symbol.getName(ctx.macho_file), - }); + const Format = struct { + tlv: TlvPtrSection, + macho_file: *MachO, + + pub fn print(f: Format, w: *Writer) Writer.Error!void { + for (f.tlv.symbols.items, 0..) |ref, i| { + const symbol = ref.getSymbol(f.macho_file).?; + try w.print(" {d}@0x{x} => {f}@0x{x} ({s})\n", .{ + i, + symbol.getTlvPtrAddress(f.macho_file), + ref, + symbol.getAddress(.{}, f.macho_file), + symbol.getName(f.macho_file), + }); + } } - } + }; }; pub const ObjcStubsSection = struct { @@ -482,34 +461,27 @@ pub const ObjcStubsSection = struct { } } - const FormatCtx = struct { - objc: ObjcStubsSection, - macho_file: *MachO, - }; - - pub fn fmt(objc: ObjcStubsSection, macho_file: *MachO) std.fmt.Formatter(format2) { + pub fn fmt(objc: ObjcStubsSection, macho_file: *MachO) std.fmt.Formatter(Format, Format.print) { return .{ .data = .{ .objc = objc, .macho_file = macho_file } }; } - pub fn format2( - ctx: FormatCtx, - comptime unused_fmt_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = options; - _ = unused_fmt_string; - for (ctx.objc.symbols.items, 0..) |ref, i| { - const symbol = ref.getSymbol(ctx.macho_file).?; - try writer.print(" {d}@0x{x} => {d}@0x{x} ({s})\n", .{ - i, - symbol.getObjcStubsAddress(ctx.macho_file), - ref, - symbol.getAddress(.{}, ctx.macho_file), - symbol.getName(ctx.macho_file), - }); + const Format = struct { + objc: ObjcStubsSection, + macho_file: *MachO, + + pub fn print(f: Format, w: *Writer) Writer.Error!void { + for (f.objc.symbols.items, 0..) |ref, i| { + const symbol = ref.getSymbol(f.macho_file).?; + try w.print(" {d}@0x{x} => {f}@0x{x} ({s})\n", .{ + i, + symbol.getObjcStubsAddress(f.macho_file), + ref, + symbol.getAddress(.{}, f.macho_file), + symbol.getName(f.macho_file), + }); + } } - } + }; pub const Index = u32; }; @@ -625,13 +597,14 @@ pub const DataInCode = struct { }; }; +const std = @import("std"); const aarch64 = @import("../aarch64.zig"); const assert = std.debug.assert; const macho = std.macho; const math = std.math; -const std = @import("std"); -const trace = @import("../../tracy.zig").trace; - const Allocator = std.mem.Allocator; +const Writer = std.io.Writer; + +const trace = @import("../../tracy.zig").trace; const MachO = @import("../MachO.zig"); const Symbol = @import("Symbol.zig"); diff --git a/src/link/Plan9.zig b/src/link/Plan9.zig index 9658495bce..815e162833 100644 --- a/src/link/Plan9.zig +++ b/src/link/Plan9.zig @@ -445,7 +445,7 @@ pub fn updateNav(self: *Plan9, pt: Zcu.PerThread, nav_index: InternPool.Nav.Inde .func => return, .variable => |variable| Value.fromInterned(variable.init), .@"extern" => { - log.debug("found extern decl: {}", .{nav.name.fmt(ip)}); + log.debug("found extern decl: {f}", .{nav.name.fmt(ip)}); return; }, else => nav_val, @@ -675,7 +675,7 @@ pub fn flush( const off = self.getAddr(text_i, .t); text_i += out.code.len; atom.offset = off; - log.debug("write text nav 0x{x} ({}), lines {d} to {d}.;__GOT+0x{x} vaddr: 0x{x}", .{ nav_index, nav.name.fmt(&pt.zcu.intern_pool), out.start_line + 1, out.end_line, atom.got_index.? * 8, off }); + log.debug("write text nav 0x{x} ({f}), lines {d} to {d}.;__GOT+0x{x} vaddr: 0x{x}", .{ nav_index, nav.name.fmt(&pt.zcu.intern_pool), out.start_line + 1, out.end_line, atom.got_index.? * 8, off }); if (!self.sixtyfour_bit) { mem.writeInt(u32, got_table[atom.got_index.? * 4 ..][0..4], @intCast(off), target.cpu.arch.endian()); } else { @@ -974,11 +974,11 @@ pub fn seeNav(self: *Plan9, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index) self.etext_edata_end_atom_indices[2] = atom_idx; } try self.updateFinish(pt, nav_index); - log.debug("seeNav(extern) for {} (got_addr=0x{x})", .{ + log.debug("seeNav(extern) for {f} (got_addr=0x{x})", .{ nav.name.fmt(ip), self.getAtom(atom_idx).getOffsetTableAddress(self), }); - } else log.debug("seeNav for {}", .{nav.name.fmt(ip)}); + } else log.debug("seeNav for {f}", .{nav.name.fmt(ip)}); return atom_idx; } @@ -1043,7 +1043,7 @@ fn updateLazySymbolAtom( defer code_buffer.deinit(gpa); // create the symbol for the name - const name = try std.fmt.allocPrint(gpa, "__lazy_{s}_{}", .{ + const name = try std.fmt.allocPrint(gpa, "__lazy_{s}_{f}", .{ @tagName(sym.kind), Type.fromInterned(sym.ty).fmt(pt), }); @@ -1314,7 +1314,7 @@ pub fn getNavVAddr( ) !u64 { const ip = &pt.zcu.intern_pool; const nav = ip.getNav(nav_index); - log.debug("getDeclVAddr for {}", .{nav.name.fmt(ip)}); + log.debug("getDeclVAddr for {f}", .{nav.name.fmt(ip)}); if (nav.getExtern(ip) != null) { if (nav.name.eqlSlice("etext", ip)) { try self.addReloc(reloc_info.parent.atom_index, .{ diff --git a/src/link/SpirV.zig b/src/link/SpirV.zig index 7b908d56ed..93f7000401 100644 --- a/src/link/SpirV.zig +++ b/src/link/SpirV.zig @@ -117,7 +117,7 @@ pub fn updateNav(self: *SpirV, pt: Zcu.PerThread, nav: InternPool.Nav.Index) lin } const ip = &pt.zcu.intern_pool; - log.debug("lowering nav {}({d})", .{ ip.getNav(nav).fqn.fmt(ip), nav }); + log.debug("lowering nav {f}({d})", .{ ip.getNav(nav).fqn.fmt(ip), nav }); try self.object.updateNav(pt, nav); } @@ -203,10 +203,10 @@ pub fn flush( // We need to export the list of error names somewhere so that we can pretty-print them in the // executor. This is not really an important thing though, so we can just dump it in any old // nonsemantic instruction. For now, just put it in OpSourceExtension with a special name. - var error_info = std.ArrayList(u8).init(self.object.gpa); + var error_info: std.io.Writer.Allocating = .init(self.object.gpa); defer error_info.deinit(); - try error_info.appendSlice("zig_errors:"); + error_info.writer.writeAll("zig_errors:") catch return error.OutOfMemory; const ip = &self.base.comp.zcu.?.intern_pool; for (ip.global_error_set.getNamesFromMainThread()) |name| { // Errors can contain pretty much any character - to encode them in a string we must escape @@ -214,9 +214,9 @@ pub fn flush( // name if it contains no strange characters is nice for debugging. URI encoding fits the bill. // We're using : as separator, which is a reserved character. - try error_info.append(':'); - try std.Uri.Component.percentEncode( - error_info.writer(), + error_info.writer.writeByte(':') catch return error.OutOfMemory; + std.Uri.Component.percentEncode( + &error_info.writer, name.toSlice(ip), struct { fn isValidChar(c: u8) bool { @@ -226,10 +226,10 @@ pub fn flush( }; } }.isValidChar, - ); + ) catch return error.OutOfMemory; } try spv.sections.debug_strings.emit(gpa, .OpSourceExtension, .{ - .extension = error_info.items, + .extension = error_info.getWritten(), }); const module = try spv.finalize(arena); diff --git a/src/link/SpirV/deduplicate.zig b/src/link/SpirV/deduplicate.zig index 2526900938..46b9642458 100644 --- a/src/link/SpirV/deduplicate.zig +++ b/src/link/SpirV/deduplicate.zig @@ -110,7 +110,7 @@ const ModuleInfo = struct { .TypeDeclaration, .ConstantCreation => { const entry = try entities.getOrPut(result_id); if (entry.found_existing) { - log.err("type or constant {} has duplicate definition", .{result_id}); + log.err("type or constant {f} has duplicate definition", .{result_id}); return error.DuplicateId; } entry.value_ptr.* = entity; diff --git a/src/link/SpirV/lower_invocation_globals.zig b/src/link/SpirV/lower_invocation_globals.zig index 9d91a142e4..5a64f38a35 100644 --- a/src/link/SpirV/lower_invocation_globals.zig +++ b/src/link/SpirV/lower_invocation_globals.zig @@ -92,7 +92,7 @@ const ModuleInfo = struct { const entry_point: ResultId = @enumFromInt(inst.operands[1]); const entry = try entry_points.getOrPut(entry_point); if (entry.found_existing) { - log.err("Entry point type {} has duplicate definition", .{entry_point}); + log.err("Entry point type {f} has duplicate definition", .{entry_point}); return error.DuplicateId; } }, @@ -103,7 +103,7 @@ const ModuleInfo = struct { const entry = try fn_types.getOrPut(fn_type); if (entry.found_existing) { - log.err("Function type {} has duplicate definition", .{fn_type}); + log.err("Function type {f} has duplicate definition", .{fn_type}); return error.DuplicateId; } @@ -135,7 +135,7 @@ const ModuleInfo = struct { }, .OpFunction => { if (maybe_current_function) |current_function| { - log.err("OpFunction {} does not have an OpFunctionEnd", .{current_function}); + log.err("OpFunction {f} does not have an OpFunctionEnd", .{current_function}); return error.InvalidPhysicalFormat; } @@ -154,7 +154,7 @@ const ModuleInfo = struct { }; const entry = try functions.getOrPut(current_function); if (entry.found_existing) { - log.err("Function {} has duplicate definition", .{current_function}); + log.err("Function {f} has duplicate definition", .{current_function}); return error.DuplicateId; } @@ -162,7 +162,7 @@ const ModuleInfo = struct { try callee_store.appendSlice(calls.keys()); const fn_type = fn_types.get(fn_ty_id) orelse { - log.err("Function {} has invalid OpFunction type", .{current_function}); + log.err("Function {f} has invalid OpFunction type", .{current_function}); return error.InvalidId; }; @@ -187,7 +187,7 @@ const ModuleInfo = struct { } if (maybe_current_function) |current_function| { - log.err("OpFunction {} does not have an OpFunctionEnd", .{current_function}); + log.err("OpFunction {f} does not have an OpFunctionEnd", .{current_function}); return error.InvalidPhysicalFormat; } @@ -222,7 +222,7 @@ const ModuleInfo = struct { seen: *std.DynamicBitSetUnmanaged, ) !void { const index = self.functions.getIndex(id) orelse { - log.err("function calls invalid function {}", .{id}); + log.err("function calls invalid function {f}", .{id}); return error.InvalidId; }; @@ -261,7 +261,7 @@ const ModuleInfo = struct { seen: *std.DynamicBitSetUnmanaged, ) !void { const index = self.invocation_globals.getIndex(id) orelse { - log.err("invalid invocation global {}", .{id}); + log.err("invalid invocation global {f}", .{id}); return error.InvalidId; }; @@ -276,7 +276,7 @@ const ModuleInfo = struct { } const initializer = self.functions.get(info.initializer) orelse { - log.err("invocation global {} has invalid initializer {}", .{ id, info.initializer }); + log.err("invocation global {f} has invalid initializer {f}", .{ id, info.initializer }); return error.InvalidId; }; diff --git a/src/link/SpirV/prune_unused.zig b/src/link/SpirV/prune_unused.zig index 51903f9424..275458564e 100644 --- a/src/link/SpirV/prune_unused.zig +++ b/src/link/SpirV/prune_unused.zig @@ -128,7 +128,7 @@ const ModuleInfo = struct { switch (inst.opcode) { .OpFunction => { if (maybe_current_function) |current_function| { - log.err("OpFunction {} does not have an OpFunctionEnd", .{current_function}); + log.err("OpFunction {f} does not have an OpFunctionEnd", .{current_function}); return error.InvalidPhysicalFormat; } @@ -145,7 +145,7 @@ const ModuleInfo = struct { }; const entry = try functions.getOrPut(current_function); if (entry.found_existing) { - log.err("Function {} has duplicate definition", .{current_function}); + log.err("Function {f} has duplicate definition", .{current_function}); return error.DuplicateId; } @@ -163,7 +163,7 @@ const ModuleInfo = struct { } if (maybe_current_function) |current_function| { - log.err("OpFunction {} does not have an OpFunctionEnd", .{current_function}); + log.err("OpFunction {f} does not have an OpFunctionEnd", .{current_function}); return error.InvalidPhysicalFormat; } @@ -184,7 +184,7 @@ const AliveMarker = struct { fn markAlive(self: *AliveMarker, result_id: ResultId) BinaryModule.ParseError!void { const index = self.info.result_id_to_code_offset.getIndex(result_id) orelse { - log.err("undefined result-id {}", .{result_id}); + log.err("undefined result-id {f}", .{result_id}); return error.InvalidId; }; diff --git a/src/link/Wasm.zig b/src/link/Wasm.zig index 6c67547a00..74c30c0c18 100644 --- a/src/link/Wasm.zig +++ b/src/link/Wasm.zig @@ -547,7 +547,7 @@ pub const SourceLocation = enum(u32) { switch (sl.unpack(wasm)) { .none => unreachable, .zig_object_nofile => diags.addError("zig compilation unit: " ++ f, args), - .object_index => |i| diags.addError("{}: " ++ f, .{i.ptr(wasm).path} ++ args), + .object_index => |i| diags.addError("{f}: " ++ f, .{i.ptr(wasm).path} ++ args), .source_location_index => @panic("TODO"), } } @@ -579,9 +579,9 @@ pub const SourceLocation = enum(u32) { .object_index => |i| { const obj = i.ptr(wasm); return if (obj.archive_member_name.slice(wasm)) |obj_name| - try bundle.printString("{} ({s}): {s}", .{ obj.path, std.fs.path.basename(obj_name), msg }) + try bundle.printString("{f} ({s}): {s}", .{ obj.path, std.fs.path.basename(obj_name), msg }) else - try bundle.printString("{}: {s}", .{ obj.path, msg }); + try bundle.printString("{f}: {s}", .{ obj.path, msg }); }, .source_location_index => @panic("TODO"), }; @@ -2126,14 +2126,7 @@ pub const FunctionType = extern struct { wasm: *const Wasm, ft: FunctionType, - pub fn format( - self: Formatter, - comptime format_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - if (format_string.len != 0) std.fmt.invalidFmtError(format_string, self); - _ = options; + pub fn format(self: Formatter, writer: *std.io.Writer) std.io.Writer.Error!void { const params = self.ft.params.slice(self.wasm); const returns = self.ft.returns.slice(self.wasm); @@ -2912,9 +2905,7 @@ pub const Feature = packed struct(u8) { @"=", }; - pub fn format(feature: Feature, comptime fmt: []const u8, opt: std.fmt.FormatOptions, writer: anytype) !void { - _ = opt; - _ = fmt; + pub fn format(feature: Feature, writer: *std.io.Writer) std.io.Writer.Error!void { try writer.print("{s} {s}", .{ @tagName(feature.prefix), @tagName(feature.tag) }); } @@ -3036,7 +3027,7 @@ fn openParseObjectReportingFailure(wasm: *Wasm, path: Path) void { } fn parseObject(wasm: *Wasm, obj: link.Input.Object) !void { - log.debug("parseObject {}", .{obj.path}); + log.debug("parseObject {f}", .{obj.path}); const gpa = wasm.base.comp.gpa; const gc_sections = wasm.base.gc_sections; @@ -3060,7 +3051,7 @@ fn parseObject(wasm: *Wasm, obj: link.Input.Object) !void { } fn parseArchive(wasm: *Wasm, obj: link.Input.Object) !void { - log.debug("parseArchive {}", .{obj.path}); + log.debug("parseArchive {f}", .{obj.path}); const gpa = wasm.base.comp.gpa; const gc_sections = wasm.base.gc_sections; @@ -3196,7 +3187,7 @@ pub fn updateFunc( const is_obj = zcu.comp.config.output_mode == .Obj; const target = &zcu.comp.root_mod.resolved_target.result; const owner_nav = zcu.funcInfo(func_index).owner_nav; - log.debug("updateFunc {}", .{ip.getNav(owner_nav).fqn.fmt(ip)}); + log.debug("updateFunc {f}", .{ip.getNav(owner_nav).fqn.fmt(ip)}); // For Wasm, we do not lower the MIR to code just yet. That lowering happens during `flush`, // after garbage collection, which can affect function and global indexes, which affects the @@ -3307,7 +3298,7 @@ pub fn updateNav(wasm: *Wasm, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index .variable => |variable| .{ variable.init, variable.owner_nav }, else => .{ nav.status.fully_resolved.val, nav_index }, }; - //log.debug("updateNav {} {d}", .{ nav.fqn.fmt(ip), chased_nav_index }); + //log.debug("updateNav {f} {d}", .{ nav.fqn.fmt(ip), chased_nav_index }); assert(!wasm.imports.contains(chased_nav_index)); if (nav_init != .none and !Value.fromInterned(nav_init).typeOf(zcu).hasRuntimeBits(zcu)) { @@ -4347,7 +4338,7 @@ fn resolveFunctionSynthetic( }); if (import.type != correct_func_type) { const diags = &wasm.base.comp.link_diags; - return import.source_location.fail(diags, "synthetic function {s} {} imported with incorrect signature {}", .{ + return import.source_location.fail(diags, "synthetic function {s} {f} imported with incorrect signature {f}", .{ @tagName(res), correct_func_type.fmt(wasm), import.type.fmt(wasm), }); } diff --git a/src/link/Wasm/Flush.zig b/src/link/Wasm/Flush.zig index 60f5971e40..edb118e495 100644 --- a/src/link/Wasm/Flush.zig +++ b/src/link/Wasm/Flush.zig @@ -534,7 +534,7 @@ pub fn finish(f: *Flush, wasm: *Wasm) !void { wasm.memories.limits.max = @intCast(max_memory / page_size); wasm.memories.limits.flags.has_max = true; if (shared_memory) wasm.memories.limits.flags.is_shared = true; - log.debug("maximum memory pages: {?d}", .{wasm.memories.limits.max}); + log.debug("maximum memory pages: {d}", .{wasm.memories.limits.max}); } f.memory_layout_finished = true; @@ -1035,20 +1035,14 @@ pub fn finish(f: *Flush, wasm: *Wasm) !void { var id: [16]u8 = undefined; std.crypto.hash.sha3.TurboShake128(null).hash(binary_bytes.items, &id, .{}); var uuid: [36]u8 = undefined; - _ = try std.fmt.bufPrint(&uuid, "{s}-{s}-{s}-{s}-{s}", .{ - std.fmt.fmtSliceHexLower(id[0..4]), - std.fmt.fmtSliceHexLower(id[4..6]), - std.fmt.fmtSliceHexLower(id[6..8]), - std.fmt.fmtSliceHexLower(id[8..10]), - std.fmt.fmtSliceHexLower(id[10..]), + _ = try std.fmt.bufPrint(&uuid, "{x}-{x}-{x}-{x}-{x}", .{ + id[0..4], id[4..6], id[6..8], id[8..10], id[10..], }); try emitBuildIdSection(gpa, binary_bytes, &uuid); }, .hexstring => |hs| { var buffer: [32 * 2]u8 = undefined; - const str = std.fmt.bufPrint(&buffer, "{s}", .{ - std.fmt.fmtSliceHexLower(hs.toSlice()), - }) catch unreachable; + const str = std.fmt.bufPrint(&buffer, "{x}", .{hs.toSlice()}) catch unreachable; try emitBuildIdSection(gpa, binary_bytes, str); }, else => |mode| { diff --git a/src/link/Wasm/Object.zig b/src/link/Wasm/Object.zig index 2bdd64efe2..2137ed3b38 100644 --- a/src/link/Wasm/Object.zig +++ b/src/link/Wasm/Object.zig @@ -856,7 +856,7 @@ pub fn parse( start_function = @enumFromInt(functions_start + index); }, .element => { - log.warn("unimplemented: element section in {} {?s}", .{ path, archive_member_name }); + log.warn("unimplemented: element section in {f} {?s}", .{ path, archive_member_name }); pos = section_end; }, .code => { @@ -984,10 +984,10 @@ pub fn parse( if (gop.value_ptr.type != fn_ty_index) { var err = try diags.addErrorWithNotes(2); try err.addMsg("symbol '{s}' mismatching function signatures", .{name.slice(wasm)}); - gop.value_ptr.source_location.addNote(&err, "imported as {} here", .{ + gop.value_ptr.source_location.addNote(&err, "imported as {f} here", .{ gop.value_ptr.type.fmt(wasm), }); - source_location.addNote(&err, "imported as {} here", .{fn_ty_index.fmt(wasm)}); + source_location.addNote(&err, "imported as {f} here", .{fn_ty_index.fmt(wasm)}); continue; } if (gop.value_ptr.module_name != ptr.module_name.toOptional()) { @@ -1155,11 +1155,11 @@ pub fn parse( if (gop.value_ptr.type != ptr.type_index) { var err = try diags.addErrorWithNotes(2); try err.addMsg("function signature mismatch: {s}", .{name.slice(wasm)}); - gop.value_ptr.source_location.addNote(&err, "exported as {} here", .{ + gop.value_ptr.source_location.addNote(&err, "exported as {f} here", .{ ptr.type_index.fmt(wasm), }); const word = if (gop.value_ptr.resolution == .unresolved) "imported" else "exported"; - source_location.addNote(&err, "{s} as {} here", .{ word, gop.value_ptr.type.fmt(wasm) }); + source_location.addNote(&err, "{s} as {f} here", .{ word, gop.value_ptr.type.fmt(wasm) }); continue; } if (gop.value_ptr.resolution == .unresolved or gop.value_ptr.flags.binding == .weak) { @@ -1176,8 +1176,8 @@ pub fn parse( } var err = try diags.addErrorWithNotes(2); try err.addMsg("symbol collision: {s}", .{name.slice(wasm)}); - gop.value_ptr.source_location.addNote(&err, "exported as {} here", .{ptr.type_index.fmt(wasm)}); - source_location.addNote(&err, "exported as {} here", .{gop.value_ptr.type.fmt(wasm)}); + gop.value_ptr.source_location.addNote(&err, "exported as {f} here", .{ptr.type_index.fmt(wasm)}); + source_location.addNote(&err, "exported as {f} here", .{gop.value_ptr.type.fmt(wasm)}); continue; } else { gop.value_ptr.* = .{ diff --git a/src/link/table_section.zig b/src/link/table_section.zig index c579198c91..72cfe96c5a 100644 --- a/src/link/table_section.zig +++ b/src/link/table_section.zig @@ -39,14 +39,7 @@ pub fn TableSection(comptime Entry: type) type { return self.entries.items.len; } - pub fn format( - self: Self, - comptime unused_format_string: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = options; - comptime assert(unused_format_string.len == 0); + pub fn format(self: Self, writer: *std.io.Writer) std.io.Writer.Error!void { try writer.writeAll("TableSection:\n"); for (self.entries.items, 0..) |entry, i| { try writer.print(" {d} => {}\n", .{ i, entry }); diff --git a/src/link/tapi/parse.zig b/src/link/tapi/parse.zig index f6556dd5dd..0f418dea30 100644 --- a/src/link/tapi/parse.zig +++ b/src/link/tapi/parse.zig @@ -57,14 +57,9 @@ pub const Node = struct { } } - pub fn format( - self: *const Node, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { + pub fn format(self: *const Node, writer: *std.io.Writer) std.io.Writer.Error!void { switch (self.tag) { - inline else => |tag| return @as(*tag.Type(), @fieldParentPtr("base", self)).format(fmt, options, writer), + inline else => |tag| return @as(*tag.Type(), @fieldParentPtr("base", self)).format(writer), } } @@ -86,24 +81,17 @@ pub const Node = struct { } } - pub fn format( - self: *const Doc, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = options; - _ = fmt; + pub fn format(self: *const Doc, writer: *std.io.Writer) std.io.Writer.Error!void { if (self.directive) |id| { - try std.fmt.format(writer, "{{ ", .{}); + try writer.print("{{ ", .{}); const directive = self.base.tree.getRaw(id, id); - try std.fmt.format(writer, ".directive = {s}, ", .{directive}); + try writer.print(".directive = {s}, ", .{directive}); } if (self.value) |node| { - try std.fmt.format(writer, "{}", .{node}); + try writer.print("{}", .{node}); } if (self.directive != null) { - try std.fmt.format(writer, " }}", .{}); + try writer.print(" }}", .{}); } } }; @@ -133,14 +121,7 @@ pub const Node = struct { self.values.deinit(allocator); } - pub fn format( - self: *const Map, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = options; - _ = fmt; + pub fn format(self: *const Map, writer: *std.io.Writer) std.io.Writer.Error!void { try std.fmt.format(writer, "{{ ", .{}); for (self.values.items) |entry| { const key = self.base.tree.getRaw(entry.key, entry.key); @@ -172,14 +153,7 @@ pub const Node = struct { self.values.deinit(allocator); } - pub fn format( - self: *const List, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = options; - _ = fmt; + pub fn format(self: *const List, writer: *std.io.Writer) std.io.Writer.Error!void { try std.fmt.format(writer, "[ ", .{}); for (self.values.items) |node| { try std.fmt.format(writer, "{}, ", .{node}); @@ -203,14 +177,7 @@ pub const Node = struct { self.string_value.deinit(allocator); } - pub fn format( - self: *const Value, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = options; - _ = fmt; + pub fn format(self: *const Value, writer: *std.io.Writer) std.io.Writer.Error!void { const raw = self.base.tree.getRaw(self.base.start, self.base.end); return std.fmt.format(writer, "{s}", .{raw}); } diff --git a/src/main.zig b/src/main.zig index f3e8eb9634..1d178730b0 100644 --- a/src/main.zig +++ b/src/main.zig @@ -65,6 +65,9 @@ pub fn wasi_cwd() std.os.wasi.fd_t { const fatal = std.process.fatal; +/// This can be global since stdout is a singleton. +var stdio_buffer: [4096]u8 = undefined; + /// Shaming all the locations that inappropriately use an O(N) search algorithm. /// Please delete this and fix the compilation errors! pub const @"bad O(N)" = void; @@ -340,11 +343,11 @@ fn mainArgs(gpa: Allocator, arena: Allocator, args: []const []const u8) !void { } else if (mem.eql(u8, cmd, "targets")) { dev.check(.targets_command); const host = std.zig.resolveTargetQueryOrFatal(.{}); - const stdout = io.getStdOut().writer(); + const stdout = fs.File.stdout().deprecatedWriter(); return @import("print_targets.zig").cmdTargets(arena, cmd_args, stdout, &host); } else if (mem.eql(u8, cmd, "version")) { dev.check(.version_command); - try std.io.getStdOut().writeAll(build_options.version ++ "\n"); + try fs.File.stdout().writeAll(build_options.version ++ "\n"); // Check libc++ linkage to make sure Zig was built correctly, but only // for "env" and "version" to avoid affecting the startup time for // build-critical commands (check takes about ~10 μs) @@ -352,7 +355,7 @@ fn mainArgs(gpa: Allocator, arena: Allocator, args: []const []const u8) !void { } else if (mem.eql(u8, cmd, "env")) { dev.check(.env_command); verifyLibcxxCorrectlyLinked(); - return @import("print_env.zig").cmdEnv(arena, cmd_args, io.getStdOut().writer()); + return @import("print_env.zig").cmdEnv(arena, cmd_args); } else if (mem.eql(u8, cmd, "reduce")) { return jitCmd(gpa, arena, cmd_args, .{ .cmd_name = "reduce", @@ -360,10 +363,10 @@ fn mainArgs(gpa: Allocator, arena: Allocator, args: []const []const u8) !void { }); } else if (mem.eql(u8, cmd, "zen")) { dev.check(.zen_command); - return io.getStdOut().writeAll(info_zen); + return fs.File.stdout().writeAll(info_zen); } else if (mem.eql(u8, cmd, "help") or mem.eql(u8, cmd, "-h") or mem.eql(u8, cmd, "--help")) { dev.check(.help_command); - return io.getStdOut().writeAll(usage); + return fs.File.stdout().writeAll(usage); } else if (mem.eql(u8, cmd, "ast-check")) { return cmdAstCheck(arena, cmd_args); } else if (mem.eql(u8, cmd, "detect-cpu")) { @@ -1038,7 +1041,7 @@ fn buildOutputType( }; } else if (mem.startsWith(u8, arg, "-")) { if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) { - try io.getStdOut().writeAll(usage_build_generic); + try fs.File.stdout().writeAll(usage_build_generic); return cleanExit(); } else if (mem.eql(u8, arg, "--")) { if (arg_mode == .run) { @@ -1806,6 +1809,7 @@ fn buildOutputType( } else manifest_file = arg; }, .assembly, .assembly_with_cpp, .c, .cpp, .h, .hpp, .hm, .hmm, .ll, .bc, .m, .mm => { + dev.check(.c_compiler); try create_module.c_source_files.append(arena, .{ // Populated after module creation. .owner = undefined, @@ -1816,6 +1820,7 @@ fn buildOutputType( }); }, .rc => { + dev.check(.win32_resource); try create_module.rc_source_files.append(arena, .{ // Populated after module creation. .owner = undefined, @@ -2766,9 +2771,9 @@ fn buildOutputType( } else if (mem.eql(u8, arg, "-V")) { warn("ignoring request for supported emulations: unimplemented", .{}); } else if (mem.eql(u8, arg, "-v")) { - try std.io.getStdOut().writeAll("zig ld " ++ build_options.version ++ "\n"); + try fs.File.stdout().writeAll("zig ld " ++ build_options.version ++ "\n"); } else if (mem.eql(u8, arg, "--version")) { - try std.io.getStdOut().writeAll("zig ld " ++ build_options.version ++ "\n"); + try fs.File.stdout().writeAll("zig ld " ++ build_options.version ++ "\n"); process.exit(0); } else { fatal("unsupported linker arg: {s}", .{arg}); @@ -3301,6 +3306,7 @@ fn buildOutputType( defer thread_pool.deinit(); for (create_module.c_source_files.items) |*src| { + dev.check(.c_compiler); if (!mem.eql(u8, src.src_path, "-")) continue; const ext = src.ext orelse @@ -3325,17 +3331,20 @@ fn buildOutputType( // for the hashing algorithm here and in the cache are the same. // We are providing our own cache key, because this file has nothing // to do with the cache manifest. - var hasher = Cache.Hasher.init("0123456789abcdef"); - var w = io.multiWriter(.{ f.writer(), hasher.writer() }); - var fifo = std.fifo.LinearFifo(u8, .{ .Static = 4096 }).init(); - try fifo.pump(io.getStdIn().reader(), w.writer()); + var file_writer = f.writer(&.{}); + var buffer: [1000]u8 = undefined; + var hasher = file_writer.interface.hashed(Cache.Hasher.init("0123456789abcdef"), &buffer); + var stdin_reader = fs.File.stdin().readerStreaming(&.{}); + _ = hasher.writer.sendFileAll(&stdin_reader, .unlimited) catch |err| switch (err) { + error.WriteFailed => fatal("failed to write {s}: {t}", .{ dump_path, file_writer.err.? }), + else => fatal("failed to pipe stdin to {s}: {t}", .{ dump_path, err }), + }; + try hasher.writer.flush(); - var bin_digest: Cache.BinDigest = undefined; - hasher.final(&bin_digest); + const bin_digest: Cache.BinDigest = hasher.hasher.finalResult(); - const sub_path = try std.fmt.allocPrint(arena, "tmp" ++ sep ++ "{s}-stdin{s}", .{ - std.fmt.fmtSliceHexLower(&bin_digest), - ext.canonicalName(target), + const sub_path = try std.fmt.allocPrint(arena, "tmp" ++ sep ++ "{x}-stdin{s}", .{ + &bin_digest, ext.canonicalName(target), }); try dirs.local_cache.handle.rename(dump_path, sub_path); @@ -3506,7 +3515,7 @@ fn buildOutputType( if (t.arch == target.cpu.arch and t.os == target.os.tag) { // If there's a `glibc_min`, there's also an `os_ver`. if (t.glibc_min) |glibc_min| { - std.log.info("zig can provide libc for related target {s}-{s}.{}-{s}.{d}.{d}", .{ + std.log.info("zig can provide libc for related target {s}-{s}.{f}-{s}.{d}.{d}", .{ @tagName(t.arch), @tagName(t.os), t.os_ver.?, @@ -3515,7 +3524,7 @@ fn buildOutputType( glibc_min.minor, }); } else if (t.os_ver) |os_ver| { - std.log.info("zig can provide libc for related target {s}-{s}.{}-{s}", .{ + std.log.info("zig can provide libc for related target {s}-{s}.{f}-{s}", .{ @tagName(t.arch), @tagName(t.os), os_ver, @@ -3546,15 +3555,15 @@ fn buildOutputType( if (show_builtin) { const builtin_opts = comp.root_mod.getBuiltinOptions(comp.config); const source = try builtin_opts.generate(arena); - return std.io.getStdOut().writeAll(source); + return fs.File.stdout().writeAll(source); } switch (listen) { .none => {}, .stdio => { try serve( comp, - std.io.getStdIn(), - std.io.getStdOut(), + .stdin(), + .stdout(), test_exec_args.items, self_exe_path, arg_mode, @@ -4606,7 +4615,7 @@ fn cmdTranslateC( fatal("unable to open cached translated zig file '{s}{s}{s}': {s}", .{ path, fs.path.sep_str, out_zig_path, @errorName(err) }); }; defer zig_file.close(); - try io.getStdOut().writeFileAll(zig_file, .{}); + try fs.File.stdout().writeFileAll(zig_file, .{}); return cleanExit(); } } @@ -4636,7 +4645,7 @@ fn cmdInit(gpa: Allocator, arena: Allocator, args: []const []const u8) !void { if (mem.eql(u8, arg, "-s") or mem.eql(u8, arg, "--strip")) { strip = true; } else if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) { - try io.getStdOut().writeAll(usage_init); + try fs.File.stdout().writeAll(usage_init); return cleanExit(); } else { fatal("unrecognized parameter: '{s}'", .{arg}); @@ -5287,7 +5296,7 @@ fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !void { const s = fs.path.sep_str; const tmp_sub_path = "tmp" ++ s ++ results_tmp_file_nonce; const stdout = dirs.local_cache.handle.readFileAlloc(arena, tmp_sub_path, 50 * 1024 * 1024) catch |err| { - fatal("unable to read results of configure phase from '{}{s}': {s}", .{ + fatal("unable to read results of configure phase from '{f}{s}': {s}", .{ dirs.local_cache, tmp_sub_path, @errorName(err), }); }; @@ -5481,8 +5490,8 @@ fn jitCmd( defer comp.destroy(); if (options.server) { - var server = std.zig.Server{ - .out = std.io.getStdOut(), + var server: std.zig.Server = .{ + .out = fs.File.stdout(), .in = undefined, // won't be receiving messages .receive_fifo = undefined, // won't be receiving messages }; @@ -6015,7 +6024,7 @@ fn cmdAstCheck( const arg = args[i]; if (mem.startsWith(u8, arg, "-")) { if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) { - try io.getStdOut().writeAll(usage_ast_check); + try fs.File.stdout().writeAll(usage_ast_check); return cleanExit(); } else if (mem.eql(u8, arg, "-t")) { want_output_text = true; @@ -6046,7 +6055,7 @@ fn cmdAstCheck( break :file fs.cwd().openFile(p, .{}) catch |err| { fatal("unable to open file '{s}' for ast-check: {s}", .{ display_path, @errorName(err) }); }; - } else io.getStdIn(); + } else fs.File.stdin(); defer if (zig_source_path != null) f.close(); break :s std.zig.readSourceFileToEndAlloc(arena, f, null) catch |err| { fatal("unable to load file '{s}' for ast-check: {s}", .{ display_path, @errorName(err) }); @@ -6065,6 +6074,8 @@ fn cmdAstCheck( const tree = try Ast.parse(arena, source, mode); + var stdout_writer = fs.File.stdout().writerStreaming(&stdio_buffer); + const stdout_bw = &stdout_writer.interface; switch (mode) { .zig => { const zir = try AstGen.generate(arena, tree); @@ -6107,31 +6118,30 @@ fn cmdAstCheck( const extra_bytes = zir.extra.len * @sizeOf(u32); const total_bytes = @sizeOf(Zir) + instruction_bytes + extra_bytes + zir.string_bytes.len * @sizeOf(u8); - const stdout = io.getStdOut(); - const fmtIntSizeBin = std.fmt.fmtIntSizeBin; // zig fmt: off - try stdout.writer().print( - \\# Source bytes: {} - \\# Tokens: {} ({}) - \\# AST Nodes: {} ({}) - \\# Total ZIR bytes: {} - \\# Instructions: {d} ({}) + try stdout_bw.print( + \\# Source bytes: {Bi} + \\# Tokens: {} ({Bi}) + \\# AST Nodes: {} ({Bi}) + \\# Total ZIR bytes: {Bi} + \\# Instructions: {d} ({Bi}) \\# String Table Bytes: {} - \\# Extra Data Items: {d} ({}) + \\# Extra Data Items: {d} ({Bi}) \\ , .{ - fmtIntSizeBin(source.len), - tree.tokens.len, fmtIntSizeBin(token_bytes), - tree.nodes.len, fmtIntSizeBin(tree_bytes), - fmtIntSizeBin(total_bytes), - zir.instructions.len, fmtIntSizeBin(instruction_bytes), - fmtIntSizeBin(zir.string_bytes.len), - zir.extra.len, fmtIntSizeBin(extra_bytes), + source.len, + tree.tokens.len, token_bytes, + tree.nodes.len, tree_bytes, + total_bytes, + zir.instructions.len, instruction_bytes, + zir.string_bytes.len, + zir.extra.len, extra_bytes, }); // zig fmt: on } - try @import("print_zir.zig").renderAsTextToFile(arena, tree, zir, io.getStdOut()); + try @import("print_zir.zig").renderAsText(arena, tree, zir, stdout_bw); + try stdout_bw.flush(); if (zir.hasCompileErrors()) { process.exit(1); @@ -6158,7 +6168,8 @@ fn cmdAstCheck( fatal("-t option only available in builds of zig with debug extensions", .{}); } - try @import("print_zoir.zig").renderToFile(zoir, arena, io.getStdOut()); + try @import("print_zoir.zig").renderToWriter(zoir, arena, stdout_bw); + try stdout_bw.flush(); return cleanExit(); }, } @@ -6186,8 +6197,7 @@ fn cmdDetectCpu(args: []const []const u8) !void { const arg = args[i]; if (mem.startsWith(u8, arg, "-")) { if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) { - const stdout = io.getStdOut().writer(); - try stdout.writeAll(detect_cpu_usage); + try fs.File.stdout().writeAll(detect_cpu_usage); return cleanExit(); } else if (mem.eql(u8, arg, "--llvm")) { use_llvm = true; @@ -6279,11 +6289,11 @@ fn detectNativeCpuWithLLVM( } fn printCpu(cpu: std.Target.Cpu) !void { - var bw = io.bufferedWriter(io.getStdOut().writer()); - const stdout = bw.writer(); + var stdout_writer = fs.File.stdout().writerStreaming(&stdio_buffer); + const stdout_bw = &stdout_writer.interface; if (cpu.model.llvm_name) |llvm_name| { - try stdout.print("{s}\n", .{llvm_name}); + try stdout_bw.print("{s}\n", .{llvm_name}); } const all_features = cpu.arch.allFeaturesList(); @@ -6292,10 +6302,10 @@ fn printCpu(cpu: std.Target.Cpu) !void { const index: std.Target.Cpu.Feature.Set.Index = @intCast(index_usize); const is_enabled = cpu.features.isEnabled(index); const plus_or_minus = "-+"[@intFromBool(is_enabled)]; - try stdout.print("{c}{s}\n", .{ plus_or_minus, llvm_name }); + try stdout_bw.print("{c}{s}\n", .{ plus_or_minus, llvm_name }); } - try bw.flush(); + try stdout_bw.flush(); } fn cmdDumpLlvmInts( @@ -6328,16 +6338,14 @@ fn cmdDumpLlvmInts( const dl = tm.createTargetDataLayout(); const context = llvm.Context.create(); - var bw = io.bufferedWriter(io.getStdOut().writer()); - const stdout = bw.writer(); - + var stdout_writer = fs.File.stdout().writerStreaming(&stdio_buffer); + const stdout_bw = &stdout_writer.interface; for ([_]u16{ 1, 8, 16, 32, 64, 128, 256 }) |bits| { const int_type = context.intType(bits); const alignment = dl.abiAlignmentOfType(int_type); - try stdout.print("LLVMABIAlignmentOfType(i{d}) == {d}\n", .{ bits, alignment }); + try stdout_bw.print("LLVMABIAlignmentOfType(i{d}) == {d}\n", .{ bits, alignment }); } - - try bw.flush(); + try stdout_bw.flush(); return cleanExit(); } @@ -6359,6 +6367,8 @@ fn cmdDumpZir( defer f.close(); const zir = try Zcu.loadZirCache(arena, f); + var stdout_writer = fs.File.stdout().writerStreaming(&stdio_buffer); + const stdout_bw = &stdout_writer.interface; { const instruction_bytes = zir.instructions.len * @@ -6368,25 +6378,24 @@ fn cmdDumpZir( const extra_bytes = zir.extra.len * @sizeOf(u32); const total_bytes = @sizeOf(Zir) + instruction_bytes + extra_bytes + zir.string_bytes.len * @sizeOf(u8); - const stdout = io.getStdOut(); - const fmtIntSizeBin = std.fmt.fmtIntSizeBin; // zig fmt: off - try stdout.writer().print( - \\# Total ZIR bytes: {} - \\# Instructions: {d} ({}) - \\# String Table Bytes: {} - \\# Extra Data Items: {d} ({}) + try stdout_bw.print( + \\# Total ZIR bytes: {Bi} + \\# Instructions: {d} ({Bi}) + \\# String Table Bytes: {Bi} + \\# Extra Data Items: {d} ({Bi}) \\ , .{ - fmtIntSizeBin(total_bytes), - zir.instructions.len, fmtIntSizeBin(instruction_bytes), - fmtIntSizeBin(zir.string_bytes.len), - zir.extra.len, fmtIntSizeBin(extra_bytes), + total_bytes, + zir.instructions.len, instruction_bytes, + zir.string_bytes.len, + zir.extra.len, extra_bytes, }); // zig fmt: on } - return @import("print_zir.zig").renderAsTextToFile(arena, null, zir, io.getStdOut()); + try @import("print_zir.zig").renderAsText(arena, null, zir, stdout_bw); + try stdout_bw.flush(); } /// This is only enabled for debug builds. @@ -6444,19 +6453,19 @@ fn cmdChangelist( var inst_map: std.AutoHashMapUnmanaged(Zir.Inst.Index, Zir.Inst.Index) = .empty; try Zcu.mapOldZirToNew(arena, old_zir, new_zir, &inst_map); - var bw = io.bufferedWriter(io.getStdOut().writer()); - const stdout = bw.writer(); + var stdout_writer = fs.File.stdout().writerStreaming(&stdio_buffer); + const stdout_bw = &stdout_writer.interface; { - try stdout.print("Instruction mappings:\n", .{}); + try stdout_bw.print("Instruction mappings:\n", .{}); var it = inst_map.iterator(); while (it.next()) |entry| { - try stdout.print(" %{d} => %{d}\n", .{ + try stdout_bw.print(" %{d} => %{d}\n", .{ @intFromEnum(entry.key_ptr.*), @intFromEnum(entry.value_ptr.*), }); } } - try bw.flush(); + try stdout_bw.flush(); } fn eatIntPrefix(arg: []const u8, base: u8) []const u8 { @@ -6718,13 +6727,10 @@ fn accessFrameworkPath( for (&[_][]const u8{ ".tbd", ".dylib", "" }) |ext| { test_path.clearRetainingCapacity(); - try test_path.writer().print("{s}" ++ sep ++ "{s}.framework" ++ sep ++ "{s}{s}", .{ - framework_dir_path, - framework_name, - framework_name, - ext, + try test_path.print("{s}" ++ sep ++ "{s}.framework" ++ sep ++ "{s}{s}", .{ + framework_dir_path, framework_name, framework_name, ext, }); - try checked_paths.writer().print("\n {s}", .{test_path.items}); + try checked_paths.print("\n {s}", .{test_path.items}); fs.cwd().access(test_path.items, .{}) catch |err| switch (err) { error.FileNotFound => continue, else => |e| fatal("unable to search for {s} framework '{s}': {s}", .{ @@ -6794,8 +6800,7 @@ fn cmdFetch( const arg = args[i]; if (mem.startsWith(u8, arg, "-")) { if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) { - const stdout = io.getStdOut().writer(); - try stdout.writeAll(usage_fetch); + try fs.File.stdout().writeAll(usage_fetch); return cleanExit(); } else if (mem.eql(u8, arg, "--global-cache-dir")) { if (i + 1 >= args.len) fatal("expected argument after '{s}'", .{arg}); @@ -6908,7 +6913,9 @@ fn cmdFetch( const name = switch (save) { .no => { - try io.getStdOut().writer().print("{s}\n", .{package_hash_slice}); + var stdout = fs.File.stdout().writerStreaming(&stdio_buffer); + try stdout.interface.print("{s}\n", .{package_hash_slice}); + try stdout.interface.flush(); return cleanExit(); }, .yes, .exact => |name| name: { @@ -6944,7 +6951,7 @@ fn cmdFetch( var saved_path_or_url = path_or_url; if (fetch.latest_commit) |latest_commit| resolved: { - const latest_commit_hex = try std.fmt.allocPrint(arena, "{}", .{latest_commit}); + const latest_commit_hex = try std.fmt.allocPrint(arena, "{f}", .{latest_commit}); var uri = try std.Uri.parse(path_or_url); @@ -6957,7 +6964,9 @@ fn cmdFetch( std.log.info("resolved ref '{s}' to commit {s}", .{ target_ref, latest_commit_hex }); // include the original refspec in a query parameter, could be used to check for updates - uri.query = .{ .percent_encoded = try std.fmt.allocPrint(arena, "ref={%}", .{fragment}) }; + uri.query = .{ .percent_encoded = try std.fmt.allocPrint(arena, "ref={f}", .{ + std.fmt.alt(fragment, .formatEscaped), + }) }; } else { std.log.info("resolved to commit {s}", .{latest_commit_hex}); } @@ -6966,23 +6975,23 @@ fn cmdFetch( uri.fragment = .{ .raw = latest_commit_hex }; switch (save) { - .yes => saved_path_or_url = try std.fmt.allocPrint(arena, "{}", .{uri}), + .yes => saved_path_or_url = try std.fmt.allocPrint(arena, "{f}", .{uri}), .no, .exact => {}, // keep the original URL } } const new_node_init = try std.fmt.allocPrint(arena, \\.{{ - \\ .url = "{}", - \\ .hash = "{}", + \\ .url = "{f}", + \\ .hash = "{f}", \\ }} , .{ - std.zig.fmtEscapes(saved_path_or_url), - std.zig.fmtEscapes(package_hash_slice), + std.zig.fmtString(saved_path_or_url), + std.zig.fmtString(package_hash_slice), }); - const new_node_text = try std.fmt.allocPrint(arena, ".{p_} = {s},\n", .{ - std.zig.fmtId(name), new_node_init, + const new_node_text = try std.fmt.allocPrint(arena, ".{f} = {s},\n", .{ + std.zig.fmtIdPU(name), new_node_init, }); const dependencies_init = try std.fmt.allocPrint(arena, ".{{\n {s} }}", .{ @@ -7008,13 +7017,13 @@ fn cmdFetch( const location_replace = try std.fmt.allocPrint( arena, - "\"{}\"", - .{std.zig.fmtEscapes(saved_path_or_url)}, + "\"{f}\"", + .{std.zig.fmtString(saved_path_or_url)}, ); const hash_replace = try std.fmt.allocPrint( arena, - "\"{}\"", - .{std.zig.fmtEscapes(package_hash_slice)}, + "\"{f}\"", + .{std.zig.fmtString(package_hash_slice)}, ); warn("overwriting existing dependency named '{s}'", .{name}); diff --git a/src/print_env.zig b/src/print_env.zig index cc5db2e40a..af433ddfc1 100644 --- a/src/print_env.zig +++ b/src/print_env.zig @@ -4,7 +4,7 @@ const introspect = @import("introspect.zig"); const Allocator = std.mem.Allocator; const fatal = std.process.fatal; -pub fn cmdEnv(arena: Allocator, args: []const []const u8, stdout: std.fs.File.Writer) !void { +pub fn cmdEnv(arena: Allocator, args: []const []const u8) !void { _ = args; const cwd_path = try introspect.getResolvedCwd(arena); const self_exe_path = try std.fs.selfExePathAlloc(arena); @@ -21,7 +21,7 @@ pub fn cmdEnv(arena: Allocator, args: []const []const u8, stdout: std.fs.File.Wr const host = try std.zig.system.resolveTargetQuery(.{}); const triple = try host.zigTriple(arena); - var bw = std.io.bufferedWriter(stdout); + var bw = std.io.bufferedWriter(std.fs.File.stdout().deprecatedWriter()); const w = bw.writer(); var jws = std.json.writeStream(w, .{ .whitespace = .indent_1 }); diff --git a/src/print_targets.zig b/src/print_targets.zig index 96371eb1ee..e234aeda26 100644 --- a/src/print_targets.zig +++ b/src/print_targets.zig @@ -64,7 +64,7 @@ pub fn cmdTargets( { var glibc_obj = try root_obj.beginTupleField("glibc", .{}); for (glibc_abi.all_versions) |ver| { - const tmp = try std.fmt.allocPrint(allocator, "{}", .{ver}); + const tmp = try std.fmt.allocPrint(allocator, "{f}", .{ver}); defer allocator.free(tmp); try glibc_obj.field(tmp, .{}); } diff --git a/src/print_value.zig b/src/print_value.zig index 0bd5bcee23..e0a489ee40 100644 --- a/src/print_value.zig +++ b/src/print_value.zig @@ -20,15 +20,8 @@ pub const FormatContext = struct { depth: u8, }; -pub fn formatSema( - ctx: FormatContext, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = options; +pub fn formatSema(ctx: FormatContext, writer: *std.io.Writer) std.io.Writer.Error!void { const sema = ctx.opt_sema.?; - comptime std.debug.assert(fmt.len == 0); return print(ctx.val, writer, ctx.depth, ctx.pt, sema) catch |err| switch (err) { error.OutOfMemory => @panic("OOM"), // We're not allowed to return this from a format function error.ComptimeBreak, error.ComptimeReturn => unreachable, @@ -37,15 +30,8 @@ pub fn formatSema( }; } -pub fn format( - ctx: FormatContext, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, -) !void { - _ = options; +pub fn format(ctx: FormatContext, writer: *std.io.Writer) std.io.Writer.Error!void { std.debug.assert(ctx.opt_sema == null); - comptime std.debug.assert(fmt.len == 0); return print(ctx.val, writer, ctx.depth, ctx.pt, null) catch |err| switch (err) { error.OutOfMemory => @panic("OOM"), // We're not allowed to return this from a format function error.ComptimeBreak, error.ComptimeReturn, error.AnalysisFail => unreachable, @@ -55,11 +41,11 @@ pub fn format( pub fn print( val: Value, - writer: anytype, + writer: *std.io.Writer, level: u8, pt: Zcu.PerThread, opt_sema: ?*Sema, -) (@TypeOf(writer).Error || Zcu.CompileError)!void { +) (std.io.Writer.Error || Zcu.CompileError)!void { const zcu = pt.zcu; const ip = &zcu.intern_pool; switch (ip.indexToKey(val.toIntern())) { @@ -87,35 +73,36 @@ pub fn print( else => try writer.writeAll(@tagName(simple_value)), }, .variable => try writer.writeAll("(variable)"), - .@"extern" => |e| try writer.print("(extern '{}')", .{e.name.fmt(ip)}), - .func => |func| try writer.print("(function '{}')", .{ip.getNav(func.owner_nav).name.fmt(ip)}), + .@"extern" => |e| try writer.print("(extern '{f}')", .{e.name.fmt(ip)}), + .func => |func| try writer.print("(function '{f}')", .{ip.getNav(func.owner_nav).name.fmt(ip)}), .int => |int| switch (int.storage) { - inline .u64, .i64, .big_int => |x| try writer.print("{}", .{x}), + inline .u64, .i64 => |x| try writer.print("{d}", .{x}), + .big_int => |x| try writer.print("{d}", .{x}), .lazy_align => |ty| if (opt_sema != null) { const a = try Type.fromInterned(ty).abiAlignmentSema(pt); - try writer.print("{}", .{a.toByteUnits() orelse 0}); - } else try writer.print("@alignOf({})", .{Type.fromInterned(ty).fmt(pt)}), + try writer.print("{d}", .{a.toByteUnits() orelse 0}); + } else try writer.print("@alignOf({f})", .{Type.fromInterned(ty).fmt(pt)}), .lazy_size => |ty| if (opt_sema != null) { const s = try Type.fromInterned(ty).abiSizeSema(pt); - try writer.print("{}", .{s}); - } else try writer.print("@sizeOf({})", .{Type.fromInterned(ty).fmt(pt)}), + try writer.print("{d}", .{s}); + } else try writer.print("@sizeOf({f})", .{Type.fromInterned(ty).fmt(pt)}), }, - .err => |err| try writer.print("error.{}", .{ + .err => |err| try writer.print("error.{f}", .{ err.name.fmt(ip), }), .error_union => |error_union| switch (error_union.val) { - .err_name => |err_name| try writer.print("error.{}", .{ + .err_name => |err_name| try writer.print("error.{f}", .{ err_name.fmt(ip), }), .payload => |payload| try print(Value.fromInterned(payload), writer, level, pt, opt_sema), }, - .enum_literal => |enum_literal| try writer.print(".{}", .{ + .enum_literal => |enum_literal| try writer.print(".{f}", .{ enum_literal.fmt(ip), }), .enum_tag => |enum_tag| { const enum_type = ip.loadEnumType(val.typeOf(zcu).toIntern()); if (enum_type.tagValueIndex(ip, val.toIntern())) |tag_index| { - return writer.print(".{i}", .{enum_type.names.get(ip)[tag_index].fmt(ip)}); + return writer.print(".{f}", .{enum_type.names.get(ip)[tag_index].fmt(ip)}); } if (level == 0) { return writer.writeAll("@enumFromInt(...)"); @@ -178,7 +165,7 @@ pub fn print( } if (un.tag == .none) { const backing_ty = try val.typeOf(zcu).unionBackingType(pt); - try writer.print("@bitCast(@as({}, ", .{backing_ty.fmt(pt)}); + try writer.print("@bitCast(@as({f}, ", .{backing_ty.fmt(pt)}); try print(Value.fromInterned(un.val), writer, level - 1, pt, opt_sema); try writer.writeAll("))"); } else { @@ -197,11 +184,11 @@ fn printAggregate( val: Value, aggregate: InternPool.Key.Aggregate, is_ref: bool, - writer: anytype, + writer: *std.io.Writer, level: u8, pt: Zcu.PerThread, opt_sema: ?*Sema, -) (@TypeOf(writer).Error || Zcu.CompileError)!void { +) (std.io.Writer.Error || Zcu.CompileError)!void { if (level == 0) { if (is_ref) try writer.writeByte('&'); return writer.writeAll(".{ ... }"); @@ -220,7 +207,7 @@ fn printAggregate( for (0..max_len) |i| { if (i != 0) try writer.writeAll(", "); const field_name = ty.structFieldName(@intCast(i), zcu).unwrap().?; - try writer.print(".{i} = ", .{field_name.fmt(ip)}); + try writer.print(".{f} = ", .{field_name.fmt(ip)}); try print(try val.fieldValue(pt, i), writer, level - 1, pt, opt_sema); } try writer.writeAll(" }"); @@ -232,7 +219,7 @@ fn printAggregate( const len = ty.arrayLenIncludingSentinel(zcu); if (len == 0) break :string; const slice = bytes.toSlice(if (bytes.at(len - 1, ip) == 0) len - 1 else len, ip); - try writer.print("\"{}\"", .{std.zig.fmtEscapes(slice)}); + try writer.print("\"{f}\"", .{std.zig.fmtString(slice)}); if (!is_ref) try writer.writeAll(".*"); return; }, @@ -249,7 +236,7 @@ fn printAggregate( const elem_val = Value.fromInterned(aggregate.storage.values()[0]); if (elem_val.isUndef(zcu)) break :one_byte_str; const byte = elem_val.toUnsignedInt(zcu); - try writer.print("\"{}\"", .{std.zig.fmtEscapes(&.{@intCast(byte)})}); + try writer.print("\"{f}\"", .{std.zig.fmtString(&.{@intCast(byte)})}); if (!is_ref) try writer.writeAll(".*"); return; }, @@ -283,11 +270,11 @@ fn printPtr( ptr_val: Value, /// Whether to print `derivation` as an lvalue or rvalue. If `null`, the more concise option is chosen. want_kind: ?PrintPtrKind, - writer: anytype, + writer: *std.io.Writer, level: u8, pt: Zcu.PerThread, opt_sema: ?*Sema, -) (@TypeOf(writer).Error || Zcu.CompileError)!void { +) (std.io.Writer.Error || Zcu.CompileError)!void { const ptr = switch (pt.zcu.intern_pool.indexToKey(ptr_val.toIntern())) { .undef => return writer.writeAll("undefined"), .ptr => |ptr| ptr, @@ -329,7 +316,7 @@ const PrintPtrKind = enum { lvalue, rvalue }; /// Returns the root derivation, which may be ignored. pub fn printPtrDerivation( derivation: Value.PointerDeriveStep, - writer: anytype, + writer: *std.io.Writer, pt: Zcu.PerThread, /// Whether to print `derivation` as an lvalue or rvalue. If `null`, the more concise option is chosen. /// If this is `.rvalue`, the result may look like `&foo`, so it's not necessarily valid to treat it as @@ -405,14 +392,14 @@ pub fn printPtrDerivation( const agg_ty = (try field.parent.ptrType(pt)).childType(zcu); switch (agg_ty.zigTypeTag(zcu)) { .@"struct" => if (agg_ty.structFieldName(field.field_idx, zcu).unwrap()) |field_name| { - try writer.print(".{i}", .{field_name.fmt(ip)}); + try writer.print(".{f}", .{field_name.fmt(ip)}); } else { try writer.print("[{d}]", .{field.field_idx}); }, .@"union" => { const tag_ty = agg_ty.unionTagTypeHypothetical(zcu); const field_name = tag_ty.enumFieldName(field.field_idx, zcu); - try writer.print(".{i}", .{field_name.fmt(ip)}); + try writer.print(".{f}", .{field_name.fmt(ip)}); }, .pointer => switch (field.field_idx) { Value.slice_ptr_index => try writer.writeAll(".ptr"), @@ -430,12 +417,12 @@ pub fn printPtrDerivation( }, .offset_and_cast => |oac| if (oac.byte_offset == 0) root: { - try writer.print("@as({}, @ptrCast(", .{oac.new_ptr_ty.fmt(pt)}); + try writer.print("@as({f}, @ptrCast(", .{oac.new_ptr_ty.fmt(pt)}); const root = try printPtrDerivation(oac.parent.*, writer, pt, .rvalue, root_strat, ptr_depth - 1); try writer.writeAll("))"); break :root root; } else root: { - try writer.print("@as({}, @ptrFromInt(@intFromPtr(", .{oac.new_ptr_ty.fmt(pt)}); + try writer.print("@as({f}, @ptrFromInt(@intFromPtr(", .{oac.new_ptr_ty.fmt(pt)}); const root = try printPtrDerivation(oac.parent.*, writer, pt, .rvalue, root_strat, ptr_depth - 1); try writer.print(") + {d}))", .{oac.byte_offset}); break :root root; @@ -447,22 +434,22 @@ pub fn printPtrDerivation( if (root_or_null == null) switch (root_strat) { .str => |x| try writer.writeAll(x), .print_val => |x| switch (derivation) { - .int => |int| try writer.print("@as({}, @ptrFromInt(0x{x}))", .{ int.ptr_ty.fmt(pt), int.addr }), - .nav_ptr => |nav| try writer.print("{}", .{ip.getNav(nav).fqn.fmt(ip)}), + .int => |int| try writer.print("@as({f}, @ptrFromInt(0x{x}))", .{ int.ptr_ty.fmt(pt), int.addr }), + .nav_ptr => |nav| try writer.print("{f}", .{ip.getNav(nav).fqn.fmt(ip)}), .uav_ptr => |uav| { const ty = Value.fromInterned(uav.val).typeOf(zcu); - try writer.print("@as({}, ", .{ty.fmt(pt)}); + try writer.print("@as({f}, ", .{ty.fmt(pt)}); try print(Value.fromInterned(uav.val), writer, x.level - 1, pt, x.opt_sema); try writer.writeByte(')'); }, .comptime_alloc_ptr => |info| { - try writer.print("@as({}, ", .{info.val.typeOf(zcu).fmt(pt)}); + try writer.print("@as({f}, ", .{info.val.typeOf(zcu).fmt(pt)}); try print(info.val, writer, x.level - 1, pt, x.opt_sema); try writer.writeByte(')'); }, .comptime_field_ptr => |val| { const ty = val.typeOf(zcu); - try writer.print("@as({}, ", .{ty.fmt(pt)}); + try writer.print("@as({f}, ", .{ty.fmt(pt)}); try print(val, writer, x.level - 1, pt, x.opt_sema); try writer.writeByte(')'); }, diff --git a/src/print_zir.zig b/src/print_zir.zig index c9e1a18d89..ae674e43e5 100644 --- a/src/print_zir.zig +++ b/src/print_zir.zig @@ -9,13 +9,8 @@ const Zir = std.zig.Zir; const Zcu = @import("Zcu.zig"); const LazySrcLoc = Zcu.LazySrcLoc; -/// Write human-readable, debug formatted ZIR code to a file. -pub fn renderAsTextToFile( - gpa: Allocator, - tree: ?Ast, - zir: Zir, - fs_file: std.fs.File, -) !void { +/// Write human-readable, debug formatted ZIR code. +pub fn renderAsText(gpa: Allocator, tree: ?Ast, zir: Zir, bw: *std.io.Writer) !void { var arena = std.heap.ArenaAllocator.init(gpa); defer arena.deinit(); @@ -30,16 +25,13 @@ pub fn renderAsTextToFile( .recurse_blocks = true, }; - var raw_stream = std.io.bufferedWriter(fs_file.writer()); - const stream = raw_stream.writer(); - const main_struct_inst: Zir.Inst.Index = .main_struct_inst; - try stream.print("%{d} ", .{@intFromEnum(main_struct_inst)}); - try writer.writeInstToStream(stream, main_struct_inst); - try stream.writeAll("\n"); + try bw.print("%{d} ", .{@intFromEnum(main_struct_inst)}); + try writer.writeInstToStream(bw, main_struct_inst); + try bw.writeAll("\n"); const imports_index = zir.extra[@intFromEnum(Zir.ExtraIndex.imports)]; if (imports_index != 0) { - try stream.writeAll("Imports:\n"); + try bw.writeAll("Imports:\n"); const extra = zir.extraData(Zir.Inst.Imports, imports_index); var extra_index = extra.end; @@ -49,15 +41,13 @@ pub fn renderAsTextToFile( extra_index = item.end; const import_path = zir.nullTerminatedString(item.data.name); - try stream.print(" @import(\"{}\") ", .{ - std.zig.fmtEscapes(import_path), + try bw.print(" @import(\"{f}\") ", .{ + std.zig.fmtString(import_path), }); - try writer.writeSrcTokAbs(stream, item.data.token); - try stream.writeAll("\n"); + try writer.writeSrcTokAbs(bw, item.data.token); + try bw.writeAll("\n"); } } - - try raw_stream.flush(); } pub fn renderInstructionContext( @@ -67,7 +57,7 @@ pub fn renderInstructionContext( scope_file: *Zcu.File, parent_decl_node: Ast.Node.Index, indent: u32, - stream: anytype, + bw: *std.io.Writer, ) !void { var arena = std.heap.ArenaAllocator.init(gpa); defer arena.deinit(); @@ -83,13 +73,13 @@ pub fn renderInstructionContext( .recurse_blocks = true, }; - try writer.writeBody(stream, block[0..block_index]); - try stream.writeByteNTimes(' ', writer.indent - 2); - try stream.print("> %{d} ", .{@intFromEnum(block[block_index])}); - try writer.writeInstToStream(stream, block[block_index]); - try stream.writeByte('\n'); + try writer.writeBody(bw, block[0..block_index]); + try bw.splatByteAll(' ', writer.indent - 2); + try bw.print("> %{d} ", .{@intFromEnum(block[block_index])}); + try writer.writeInstToStream(bw, block[block_index]); + try bw.writeByte('\n'); if (block_index + 1 < block.len) { - try writer.writeBody(stream, block[block_index + 1 ..]); + try writer.writeBody(bw, block[block_index + 1 ..]); } } @@ -99,7 +89,7 @@ pub fn renderSingleInstruction( scope_file: *Zcu.File, parent_decl_node: Ast.Node.Index, indent: u32, - stream: anytype, + bw: *std.io.Writer, ) !void { var arena = std.heap.ArenaAllocator.init(gpa); defer arena.deinit(); @@ -115,8 +105,8 @@ pub fn renderSingleInstruction( .recurse_blocks = false, }; - try stream.print("%{d} ", .{@intFromEnum(inst)}); - try writer.writeInstToStream(stream, inst); + try bw.print("%{d} ", .{@intFromEnum(inst)}); + try writer.writeInstToStream(bw, inst); } const Writer = struct { @@ -186,11 +176,13 @@ const Writer = struct { } } = .{}, + const Error = std.io.Writer.Error || Allocator.Error; + fn writeInstToStream( self: *Writer, - stream: anytype, + stream: *std.io.Writer, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) Error!void { const tags = self.code.instructions.items(.tag); const tag = tags[@intFromEnum(inst)]; try stream.print("= {s}(", .{@tagName(tags[@intFromEnum(inst)])}); @@ -516,7 +508,7 @@ const Writer = struct { } } - fn writeExtended(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeExtended(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const extended = self.code.instructions.items(.data)[@intFromEnum(inst)].extended; try stream.print("{s}(", .{@tagName(extended.opcode)}); switch (extended.opcode) { @@ -623,13 +615,13 @@ const Writer = struct { } } - fn writeExtNode(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeExtNode(self: *Writer, stream: *std.io.Writer, extended: Zir.Inst.Extended.InstData) !void { try stream.writeAll(")) "); const src_node: Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand))); try self.writeSrcNode(stream, src_node); } - fn writeArrayInitElemType(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeArrayInitElemType(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].bin; try self.writeInstRef(stream, inst_data.lhs); try stream.print(", {d})", .{@intFromEnum(inst_data.rhs)}); @@ -637,9 +629,9 @@ const Writer = struct { fn writeUnNode( self: *Writer, - stream: anytype, + stream: *std.io.Writer, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) Error!void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].un_node; try self.writeInstRef(stream, inst_data.operand); try stream.writeAll(") "); @@ -648,9 +640,9 @@ const Writer = struct { fn writeUnTok( self: *Writer, - stream: anytype, + stream: *std.io.Writer, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) Error!void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].un_tok; try self.writeInstRef(stream, inst_data.operand); try stream.writeAll(") "); @@ -659,9 +651,9 @@ const Writer = struct { fn writeValidateDestructure( self: *Writer, - stream: anytype, + stream: *std.io.Writer, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) Error!void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.ValidateDestructure, inst_data.payload_index).data; try self.writeInstRef(stream, extra.operand); @@ -673,9 +665,9 @@ const Writer = struct { fn writeValidateArrayInitTy( self: *Writer, - stream: anytype, + stream: *std.io.Writer, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) Error!void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.ArrayInit, inst_data.payload_index).data; try self.writeInstRef(stream, extra.ty); @@ -685,9 +677,9 @@ const Writer = struct { fn writeArrayTypeSentinel( self: *Writer, - stream: anytype, + stream: *std.io.Writer, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) Error!void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.ArrayTypeSentinel, inst_data.payload_index).data; try self.writeInstRef(stream, extra.len); @@ -701,9 +693,9 @@ const Writer = struct { fn writePtrType( self: *Writer, - stream: anytype, + stream: *std.io.Writer, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) Error!void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].ptr_type; const str_allowzero = if (inst_data.flags.is_allowzero) "allowzero, " else ""; const str_const = if (!inst_data.flags.is_mutable) "const, " else ""; @@ -744,12 +736,12 @@ const Writer = struct { try self.writeSrcNode(stream, extra.data.src_node); } - fn writeInt(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeInt(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].int; try stream.print("{d})", .{inst_data}); } - fn writeIntBig(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeIntBig(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].str; const byte_count = inst_data.len * @sizeOf(std.math.big.Limb); const limb_bytes = self.code.string_bytes[@intFromEnum(inst_data.start)..][0..byte_count]; @@ -768,12 +760,12 @@ const Writer = struct { try stream.print("{s})", .{as_string}); } - fn writeFloat(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeFloat(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const number = self.code.instructions.items(.data)[@intFromEnum(inst)].float; try stream.print("{d})", .{number}); } - fn writeFloat128(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeFloat128(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.Float128, inst_data.payload_index).data; const number = extra.get(); @@ -784,15 +776,15 @@ const Writer = struct { fn writeStr( self: *Writer, - stream: anytype, + stream: *std.io.Writer, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) Error!void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].str; const str = inst_data.get(self.code); - try stream.print("\"{}\")", .{std.zig.fmtEscapes(str)}); + try stream.print("\"{f}\")", .{std.zig.fmtString(str)}); } - fn writeSliceStart(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeSliceStart(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.SliceStart, inst_data.payload_index).data; try self.writeInstRef(stream, extra.lhs); @@ -802,7 +794,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeSliceEnd(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeSliceEnd(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.SliceEnd, inst_data.payload_index).data; try self.writeInstRef(stream, extra.lhs); @@ -814,7 +806,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeSliceSentinel(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeSliceSentinel(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.SliceSentinel, inst_data.payload_index).data; try self.writeInstRef(stream, extra.lhs); @@ -828,7 +820,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeSliceLength(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeSliceLength(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.SliceLength, inst_data.payload_index).data; try self.writeInstRef(stream, extra.lhs); @@ -844,7 +836,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeUnionInit(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeUnionInit(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.UnionInit, inst_data.payload_index).data; try self.writeInstRef(stream, extra.union_type); @@ -856,7 +848,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeShuffle(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeShuffle(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.Shuffle, inst_data.payload_index).data; try self.writeInstRef(stream, extra.elem_type); @@ -870,7 +862,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeSelect(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeSelect(self: *Writer, stream: *std.io.Writer, extended: Zir.Inst.Extended.InstData) !void { const extra = self.code.extraData(Zir.Inst.Select, extended.operand).data; try self.writeInstRef(stream, extra.elem_type); try stream.writeAll(", "); @@ -883,7 +875,7 @@ const Writer = struct { try self.writeSrcNode(stream, extra.node); } - fn writeMulAdd(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeMulAdd(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.MulAdd, inst_data.payload_index).data; try self.writeInstRef(stream, extra.mulend1); @@ -895,7 +887,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeBuiltinCall(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeBuiltinCall(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.BuiltinCall, inst_data.payload_index).data; @@ -911,7 +903,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeFieldParentPtr(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeFieldParentPtr(self: *Writer, stream: *std.io.Writer, extended: Zir.Inst.Extended.InstData) !void { const extra = self.code.extraData(Zir.Inst.FieldParentPtr, extended.operand).data; const FlagsInt = @typeInfo(Zir.Inst.FullPtrCastFlags).@"struct".backing_integer.?; const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(FlagsInt, @truncate(extended.small))); @@ -928,12 +920,12 @@ const Writer = struct { try self.writeSrcNode(stream, extra.src_node); } - fn writeParam(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeParam(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_tok; const extra = self.code.extraData(Zir.Inst.Param, inst_data.payload_index); const body = self.code.bodySlice(extra.end, extra.data.type.body_len); - try stream.print("\"{}\", ", .{ - std.zig.fmtEscapes(self.code.nullTerminatedString(extra.data.name)), + try stream.print("\"{f}\", ", .{ + std.zig.fmtString(self.code.nullTerminatedString(extra.data.name)), }); if (extra.data.type.is_generic) try stream.writeAll("[generic] "); @@ -943,7 +935,7 @@ const Writer = struct { try self.writeSrcTok(stream, inst_data.src_tok); } - fn writePlNodeBin(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writePlNodeBin(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data; try self.writeInstRef(stream, extra.lhs); @@ -953,7 +945,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writePlNodeMultiOp(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writePlNodeMultiOp(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.MultiOp, inst_data.payload_index); const args = self.code.refSlice(extra.end, extra.data.operands_len); @@ -966,7 +958,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeArrayMul(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeArrayMul(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.ArrayMul, inst_data.payload_index).data; try self.writeInstRef(stream, extra.res_ty); @@ -978,13 +970,13 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeElemValImm(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeElemValImm(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].elem_val_imm; try self.writeInstRef(stream, inst_data.operand); try stream.print(", {d})", .{inst_data.idx}); } - fn writeArrayInitElemPtr(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeArrayInitElemPtr(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.ElemPtrImm, inst_data.payload_index).data; @@ -993,7 +985,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writePlNodeExport(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writePlNodeExport(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.Export, inst_data.payload_index).data; @@ -1004,7 +996,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeValidateArrayInitRefTy(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeValidateArrayInitRefTy(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.ArrayInitRefTy, inst_data.payload_index).data; @@ -1014,7 +1006,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeStructInit(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeStructInit(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.StructInit, inst_data.payload_index); var field_i: u32 = 0; @@ -1038,7 +1030,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeCmpxchg(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeCmpxchg(self: *Writer, stream: *std.io.Writer, extended: Zir.Inst.Extended.InstData) !void { const extra = self.code.extraData(Zir.Inst.Cmpxchg, extended.operand).data; try self.writeInstRef(stream, extra.ptr); @@ -1054,7 +1046,7 @@ const Writer = struct { try self.writeSrcNode(stream, extra.node); } - fn writePtrCastFull(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writePtrCastFull(self: *Writer, stream: *std.io.Writer, extended: Zir.Inst.Extended.InstData) !void { const FlagsInt = @typeInfo(Zir.Inst.FullPtrCastFlags).@"struct".backing_integer.?; const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(FlagsInt, @truncate(extended.small))); const extra = self.code.extraData(Zir.Inst.BinNode, extended.operand).data; @@ -1070,7 +1062,7 @@ const Writer = struct { try self.writeSrcNode(stream, extra.node); } - fn writePtrCastNoDest(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writePtrCastNoDest(self: *Writer, stream: *std.io.Writer, extended: Zir.Inst.Extended.InstData) !void { const FlagsInt = @typeInfo(Zir.Inst.FullPtrCastFlags).@"struct".backing_integer.?; const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(FlagsInt, @truncate(extended.small))); const extra = self.code.extraData(Zir.Inst.UnNode, extended.operand).data; @@ -1081,7 +1073,7 @@ const Writer = struct { try self.writeSrcNode(stream, extra.node); } - fn writeAtomicLoad(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeAtomicLoad(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.AtomicLoad, inst_data.payload_index).data; @@ -1094,7 +1086,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeAtomicStore(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeAtomicStore(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.AtomicStore, inst_data.payload_index).data; @@ -1107,7 +1099,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeAtomicRmw(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeAtomicRmw(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.AtomicRmw, inst_data.payload_index).data; @@ -1122,7 +1114,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeStructInitAnon(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeStructInitAnon(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.StructInitAnon, inst_data.payload_index); var field_i: u32 = 0; @@ -1143,7 +1135,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeStructInitFieldType(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeStructInitFieldType(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.FieldType, inst_data.payload_index).data; try self.writeInstRef(stream, extra.container_type); @@ -1152,7 +1144,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeFieldTypeRef(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeFieldTypeRef(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.FieldTypeRef, inst_data.payload_index).data; try self.writeInstRef(stream, extra.container_type); @@ -1162,7 +1154,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeNodeMultiOp(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeNodeMultiOp(self: *Writer, stream: *std.io.Writer, extended: Zir.Inst.Extended.InstData) !void { const extra = self.code.extraData(Zir.Inst.NodeMultiOp, extended.operand); const operands = self.code.refSlice(extra.end, extended.small); @@ -1176,9 +1168,9 @@ const Writer = struct { fn writeInstNode( self: *Writer, - stream: anytype, + stream: *std.io.Writer, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) Error!void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].inst_node; try self.writeInstIndex(stream, inst_data.inst); try stream.writeAll(") "); @@ -1187,7 +1179,7 @@ const Writer = struct { fn writeAsm( self: *Writer, - stream: anytype, + stream: *std.io.Writer, extended: Zir.Inst.Extended.InstData, tmpl_is_expr: bool, ) !void { @@ -1203,7 +1195,7 @@ const Writer = struct { try stream.writeAll(", "); } else { const asm_source = self.code.nullTerminatedString(extra.data.asm_source); - try stream.print("\"{}\", ", .{std.zig.fmtEscapes(asm_source)}); + try stream.print("\"{f}\", ", .{std.zig.fmtString(asm_source)}); } try stream.writeAll(", "); @@ -1220,8 +1212,8 @@ const Writer = struct { const name = self.code.nullTerminatedString(output.data.name); const constraint = self.code.nullTerminatedString(output.data.constraint); - try stream.print("output({p}, \"{}\", ", .{ - std.zig.fmtId(name), std.zig.fmtEscapes(constraint), + try stream.print("output({f}, \"{f}\", ", .{ + std.zig.fmtIdP(name), std.zig.fmtString(constraint), }); try self.writeFlag(stream, "->", is_type); try self.writeInstRef(stream, output.data.operand); @@ -1239,8 +1231,8 @@ const Writer = struct { const name = self.code.nullTerminatedString(input.data.name); const constraint = self.code.nullTerminatedString(input.data.constraint); - try stream.print("input({p}, \"{}\", ", .{ - std.zig.fmtId(name), std.zig.fmtEscapes(constraint), + try stream.print("input({f}, \"{f}\", ", .{ + std.zig.fmtIdP(name), std.zig.fmtString(constraint), }); try self.writeInstRef(stream, input.data.operand); try stream.writeAll(")"); @@ -1255,7 +1247,7 @@ const Writer = struct { const str_index = self.code.extra[extra_i]; extra_i += 1; const clobber = self.code.nullTerminatedString(@enumFromInt(str_index)); - try stream.print("{p}", .{std.zig.fmtId(clobber)}); + try stream.print("{f}", .{std.zig.fmtIdP(clobber)}); if (i + 1 < clobbers_len) { try stream.writeAll(", "); } @@ -1265,7 +1257,7 @@ const Writer = struct { try self.writeSrcNode(stream, extra.data.src_node); } - fn writeOverflowArithmetic(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeOverflowArithmetic(self: *Writer, stream: *std.io.Writer, extended: Zir.Inst.Extended.InstData) !void { const extra = self.code.extraData(Zir.Inst.BinNode, extended.operand).data; try self.writeInstRef(stream, extra.lhs); @@ -1277,7 +1269,7 @@ const Writer = struct { fn writeCall( self: *Writer, - stream: anytype, + stream: *std.io.Writer, inst: Zir.Inst.Index, comptime kind: enum { direct, field }, ) !void { @@ -1299,7 +1291,7 @@ const Writer = struct { .field => { const field_name = self.code.nullTerminatedString(extra.data.field_name_start); try self.writeInstRef(stream, extra.data.obj_ptr); - try stream.print(", \"{}\"", .{std.zig.fmtEscapes(field_name)}); + try stream.print(", \"{f}\"", .{std.zig.fmtString(field_name)}); }, } try stream.writeAll(", ["); @@ -1311,7 +1303,7 @@ const Writer = struct { var i: usize = 0; var arg_start: u32 = args_len; while (i < args_len) : (i += 1) { - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); const arg_end = self.code.extra[extra.end + i]; defer arg_start = arg_end; const arg_body = body[arg_start..arg_end]; @@ -1321,14 +1313,14 @@ const Writer = struct { } self.indent -= 2; if (args_len != 0) { - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); } try stream.writeAll("]) "); try self.writeSrcNode(stream, inst_data.src_node); } - fn writeBlock(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeBlock(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.Block, inst_data.payload_index); const body = self.code.bodySlice(extra.end, extra.data.body_len); @@ -1337,7 +1329,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeBlockComptime(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeBlockComptime(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.BlockComptime, inst_data.payload_index); const body = self.code.bodySlice(extra.end, extra.data.body_len); @@ -1347,7 +1339,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeCondBr(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeCondBr(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.CondBr, inst_data.payload_index); const then_body = self.code.bodySlice(extra.end, extra.data.then_body_len); @@ -1361,7 +1353,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeTry(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeTry(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.Try, inst_data.payload_index); const body = self.code.bodySlice(extra.end, extra.data.body_len); @@ -1372,7 +1364,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeStructDecl(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeStructDecl(self: *Writer, stream: *std.io.Writer, extended: Zir.Inst.Extended.InstData) !void { const small: Zir.Inst.StructDecl.Small = @bitCast(extended.small); const extra = self.code.extraData(Zir.Inst.StructDecl, extended.operand); @@ -1388,7 +1380,7 @@ const Writer = struct { extra.data.fields_hash_3, }); - try stream.print("hash({}) ", .{std.fmt.fmtSliceHexLower(&fields_hash)}); + try stream.print("hash({x}) ", .{&fields_hash}); var extra_index: usize = extra.end; @@ -1446,7 +1438,7 @@ const Writer = struct { try self.writeBody(stream, self.code.bodySlice(extra_index, decls_len)); self.indent -= 2; extra_index += decls_len; - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.writeAll("}, "); } @@ -1515,11 +1507,11 @@ const Writer = struct { self.indent += 2; for (fields, 0..) |field, i| { - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try self.writeFlag(stream, "comptime ", field.is_comptime); if (field.name != .empty) { const field_name = self.code.nullTerminatedString(field.name); - try stream.print("{p}: ", .{std.zig.fmtId(field_name)}); + try stream.print("{f}: ", .{std.zig.fmtIdP(field_name)}); } else { try stream.print("@\"{d}\": ", .{i}); } @@ -1558,13 +1550,13 @@ const Writer = struct { } self.indent -= 2; - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.writeAll("}) "); } try self.writeSrcNode(stream, .zero); } - fn writeUnionDecl(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeUnionDecl(self: *Writer, stream: *std.io.Writer, extended: Zir.Inst.Extended.InstData) !void { const small = @as(Zir.Inst.UnionDecl.Small, @bitCast(extended.small)); const extra = self.code.extraData(Zir.Inst.UnionDecl, extended.operand); @@ -1580,7 +1572,7 @@ const Writer = struct { extra.data.fields_hash_3, }); - try stream.print("hash({}) ", .{std.fmt.fmtSliceHexLower(&fields_hash)}); + try stream.print("hash({x}) ", .{&fields_hash}); var extra_index: usize = extra.end; @@ -1630,7 +1622,7 @@ const Writer = struct { try self.writeBody(stream, self.code.bodySlice(extra_index, decls_len)); self.indent -= 2; extra_index += decls_len; - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.writeAll("}"); } @@ -1681,8 +1673,8 @@ const Writer = struct { const field_name = self.code.nullTerminatedString(field_name_index); extra_index += 1; - try stream.writeByteNTimes(' ', self.indent); - try stream.print("{p}", .{std.zig.fmtId(field_name)}); + try stream.splatByteAll(' ', self.indent); + try stream.print("{f}", .{std.zig.fmtIdP(field_name)}); if (has_type) { const field_type = @as(Zir.Inst.Ref, @enumFromInt(self.code.extra[extra_index])); @@ -1710,12 +1702,12 @@ const Writer = struct { } self.indent -= 2; - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.writeAll("}) "); try self.writeSrcNode(stream, .zero); } - fn writeEnumDecl(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeEnumDecl(self: *Writer, stream: *std.io.Writer, extended: Zir.Inst.Extended.InstData) !void { const small = @as(Zir.Inst.EnumDecl.Small, @bitCast(extended.small)); const extra = self.code.extraData(Zir.Inst.EnumDecl, extended.operand); @@ -1731,7 +1723,7 @@ const Writer = struct { extra.data.fields_hash_3, }); - try stream.print("hash({}) ", .{std.fmt.fmtSliceHexLower(&fields_hash)}); + try stream.print("hash({x}) ", .{&fields_hash}); var extra_index: usize = extra.end; @@ -1779,7 +1771,7 @@ const Writer = struct { try self.writeBody(stream, self.code.bodySlice(extra_index, decls_len)); self.indent -= 2; extra_index += decls_len; - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.writeAll("}, "); } @@ -1815,8 +1807,8 @@ const Writer = struct { const field_name = self.code.nullTerminatedString(@enumFromInt(self.code.extra[extra_index])); extra_index += 1; - try stream.writeByteNTimes(' ', self.indent); - try stream.print("{p}", .{std.zig.fmtId(field_name)}); + try stream.splatByteAll(' ', self.indent); + try stream.print("{f}", .{std.zig.fmtIdP(field_name)}); if (has_tag_value) { const tag_value_ref = @as(Zir.Inst.Ref, @enumFromInt(self.code.extra[extra_index])); @@ -1828,7 +1820,7 @@ const Writer = struct { try stream.writeAll(",\n"); } self.indent -= 2; - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.writeAll("}) "); } try self.writeSrcNode(stream, .zero); @@ -1836,7 +1828,7 @@ const Writer = struct { fn writeOpaqueDecl( self: *Writer, - stream: anytype, + stream: *std.io.Writer, extended: Zir.Inst.Extended.InstData, ) !void { const small = @as(Zir.Inst.OpaqueDecl.Small, @bitCast(extended.small)); @@ -1872,13 +1864,13 @@ const Writer = struct { self.indent += 2; try self.writeBody(stream, self.code.bodySlice(extra_index, decls_len)); self.indent -= 2; - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.writeAll("}) "); } try self.writeSrcNode(stream, .zero); } - fn writeTupleDecl(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeTupleDecl(self: *Writer, stream: *std.io.Writer, extended: Zir.Inst.Extended.InstData) !void { const fields_len = extended.small; assert(fields_len != 0); const extra = self.code.extraData(Zir.Inst.TupleDecl, extended.operand); @@ -1906,7 +1898,7 @@ const Writer = struct { fn writeErrorSetDecl( self: *Writer, - stream: anytype, + stream: *std.io.Writer, inst: Zir.Inst.Index, ) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; @@ -1920,18 +1912,18 @@ const Writer = struct { while (extra_index < extra_index_end) : (extra_index += 1) { const name_index: Zir.NullTerminatedString = @enumFromInt(self.code.extra[extra_index]); const name = self.code.nullTerminatedString(name_index); - try stream.writeByteNTimes(' ', self.indent); - try stream.print("{p},\n", .{std.zig.fmtId(name)}); + try stream.splatByteAll(' ', self.indent); + try stream.print("{f},\n", .{std.zig.fmtIdP(name)}); } self.indent -= 2; - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.writeAll("}) "); try self.writeSrcNode(stream, inst_data.src_node); } - fn writeSwitchBlockErrUnion(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeSwitchBlockErrUnion(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.SwitchBlockErrUnion, inst_data.payload_index); @@ -1967,7 +1959,7 @@ const Writer = struct { extra_index += body.len; try stream.writeAll(",\n"); - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.writeAll("non_err => "); try self.writeBracedBody(stream, body); } @@ -1985,7 +1977,7 @@ const Writer = struct { extra_index += body.len; try stream.writeAll(",\n"); - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.print("{s}{s}else => ", .{ capture_text, inline_text }); try self.writeBracedBody(stream, body); } @@ -2002,7 +1994,7 @@ const Writer = struct { extra_index += info.body_len; try stream.writeAll(",\n"); - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); switch (info.capture) { .none => {}, .by_val => try stream.writeAll("by_val "), @@ -2027,7 +2019,7 @@ const Writer = struct { extra_index += items_len; try stream.writeAll(",\n"); - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); switch (info.capture) { .none => {}, .by_val => try stream.writeAll("by_val "), @@ -2068,7 +2060,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeSwitchBlock(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeSwitchBlock(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.SwitchBlock, inst_data.payload_index); @@ -2115,7 +2107,7 @@ const Writer = struct { extra_index += body.len; try stream.writeAll(",\n"); - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.print("{s}{s}{s} => ", .{ capture_text, inline_text, prong_name }); try self.writeBracedBody(stream, body); } @@ -2132,7 +2124,7 @@ const Writer = struct { extra_index += info.body_len; try stream.writeAll(",\n"); - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); switch (info.capture) { .none => {}, .by_val => try stream.writeAll("by_val "), @@ -2157,7 +2149,7 @@ const Writer = struct { extra_index += items_len; try stream.writeAll(",\n"); - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); switch (info.capture) { .none => {}, .by_val => try stream.writeAll("by_val "), @@ -2198,16 +2190,16 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writePlNodeField(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writePlNodeField(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.Field, inst_data.payload_index).data; const name = self.code.nullTerminatedString(extra.field_name_start); try self.writeInstRef(stream, extra.lhs); - try stream.print(", \"{}\") ", .{std.zig.fmtEscapes(name)}); + try stream.print(", \"{f}\") ", .{std.zig.fmtString(name)}); try self.writeSrcNode(stream, inst_data.src_node); } - fn writePlNodeFieldNamed(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writePlNodeFieldNamed(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.FieldNamed, inst_data.payload_index).data; try self.writeInstRef(stream, extra.lhs); @@ -2217,7 +2209,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeAs(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeAs(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.As, inst_data.payload_index).data; try self.writeInstRef(stream, extra.dest_type); @@ -2229,9 +2221,9 @@ const Writer = struct { fn writeNode( self: *Writer, - stream: anytype, + stream: *std.io.Writer, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) Error!void { const src_node = self.code.instructions.items(.data)[@intFromEnum(inst)].node; try stream.writeAll(") "); try self.writeSrcNode(stream, src_node); @@ -2239,25 +2231,25 @@ const Writer = struct { fn writeStrTok( self: *Writer, - stream: anytype, + stream: *std.io.Writer, inst: Zir.Inst.Index, - ) (@TypeOf(stream).Error || error{OutOfMemory})!void { + ) Error!void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].str_tok; const str = inst_data.get(self.code); - try stream.print("\"{}\") ", .{std.zig.fmtEscapes(str)}); + try stream.print("\"{f}\") ", .{std.zig.fmtString(str)}); try self.writeSrcTok(stream, inst_data.src_tok); } - fn writeStrOp(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeStrOp(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].str_op; const str = inst_data.getStr(self.code); try self.writeInstRef(stream, inst_data.operand); - try stream.print(", \"{}\")", .{std.zig.fmtEscapes(str)}); + try stream.print(", \"{f}\")", .{std.zig.fmtString(str)}); } fn writeFunc( self: *Writer, - stream: anytype, + stream: *std.io.Writer, inst: Zir.Inst.Index, inferred_error_set: bool, ) !void { @@ -2308,7 +2300,7 @@ const Writer = struct { ); } - fn writeFuncFancy(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeFuncFancy(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.FuncFancy, inst_data.payload_index); @@ -2367,7 +2359,7 @@ const Writer = struct { ); } - fn writeAllocExtended(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeAllocExtended(self: *Writer, stream: *std.io.Writer, extended: Zir.Inst.Extended.InstData) !void { const extra = self.code.extraData(Zir.Inst.AllocExtended, extended.operand); const small = @as(Zir.Inst.AllocExtended.Small, @bitCast(extended.small)); @@ -2390,7 +2382,7 @@ const Writer = struct { try self.writeSrcNode(stream, extra.data.src_node); } - fn writeTypeofPeer(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeTypeofPeer(self: *Writer, stream: *std.io.Writer, extended: Zir.Inst.Extended.InstData) !void { const extra = self.code.extraData(Zir.Inst.TypeOfPeer, extended.operand); const body = self.code.bodySlice(extra.data.body_index, extra.data.body_len); try self.writeBracedBody(stream, body); @@ -2403,7 +2395,7 @@ const Writer = struct { try stream.writeAll("])"); } - fn writeBoolBr(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeBoolBr(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.BoolBr, inst_data.payload_index); const body = self.code.bodySlice(extra.end, extra.data.body_len); @@ -2414,7 +2406,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeIntType(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeIntType(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const int_type = self.code.instructions.items(.data)[@intFromEnum(inst)].int_type; const prefix: u8 = switch (int_type.signedness) { .signed => 'i', @@ -2424,7 +2416,7 @@ const Writer = struct { try self.writeSrcNode(stream, int_type.src_node); } - fn writeSaveErrRetIndex(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeSaveErrRetIndex(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].save_err_ret_index; try self.writeInstRef(stream, inst_data.operand); @@ -2432,7 +2424,7 @@ const Writer = struct { try stream.writeAll(")"); } - fn writeRestoreErrRetIndex(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeRestoreErrRetIndex(self: *Writer, stream: *std.io.Writer, extended: Zir.Inst.Extended.InstData) !void { const extra = self.code.extraData(Zir.Inst.RestoreErrRetIndex, extended.operand).data; try self.writeInstRef(stream, extra.block); @@ -2442,7 +2434,7 @@ const Writer = struct { try self.writeSrcNode(stream, extra.src_node); } - fn writeBreak(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeBreak(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].@"break"; const extra = self.code.extraData(Zir.Inst.Break, inst_data.payload_index).data; @@ -2452,7 +2444,7 @@ const Writer = struct { try stream.writeAll(")"); } - fn writeArrayInit(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeArrayInit(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.MultiOp, inst_data.payload_index); @@ -2468,7 +2460,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeArrayInitAnon(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeArrayInitAnon(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.MultiOp, inst_data.payload_index); @@ -2483,7 +2475,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeArrayInitSent(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeArrayInitSent(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node; const extra = self.code.extraData(Zir.Inst.MultiOp, inst_data.payload_index); @@ -2503,7 +2495,7 @@ const Writer = struct { try self.writeSrcNode(stream, inst_data.src_node); } - fn writeUnreachable(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeUnreachable(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].@"unreachable"; try stream.writeAll(") "); try self.writeSrcNode(stream, inst_data.src_node); @@ -2511,7 +2503,7 @@ const Writer = struct { fn writeFuncCommon( self: *Writer, - stream: anytype, + stream: *std.io.Writer, inferred_error_set: bool, var_args: bool, is_noinline: bool, @@ -2548,19 +2540,19 @@ const Writer = struct { try self.writeSrcNode(stream, src_node); } - fn writeDbgStmt(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeDbgStmt(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].dbg_stmt; try stream.print("{d}, {d})", .{ inst_data.line + 1, inst_data.column + 1 }); } - fn writeDefer(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeDefer(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].@"defer"; const body = self.code.bodySlice(inst_data.index, inst_data.len); try self.writeBracedBody(stream, body); try stream.writeByte(')'); } - fn writeDeferErrCode(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeDeferErrCode(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].defer_err_code; const extra = self.code.extraData(Zir.Inst.DeferErrCode, inst_data.payload_index).data; @@ -2573,7 +2565,7 @@ const Writer = struct { try stream.writeByte(')'); } - fn writeDeclaration(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeDeclaration(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const decl = self.code.getDeclaration(inst); const prev_parent_decl_node = self.parent_decl_node; @@ -2594,10 +2586,8 @@ const Writer = struct { }, } const src_hash = self.code.getAssociatedSrcHash(inst).?; - try stream.print(" line({d}) column({d}) hash({})", .{ - decl.src_line, - decl.src_column, - std.fmt.fmtSliceHexLower(&src_hash), + try stream.print(" line({d}) column({d}) hash({x})", .{ + decl.src_line, decl.src_column, &src_hash, }); { @@ -2631,26 +2621,26 @@ const Writer = struct { try self.writeSrcNode(stream, .zero); } - fn writeClosureGet(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeClosureGet(self: *Writer, stream: *std.io.Writer, extended: Zir.Inst.Extended.InstData) !void { try stream.print("{d})) ", .{extended.small}); const src_node: Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand))); try self.writeSrcNode(stream, src_node); } - fn writeBuiltinValue(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeBuiltinValue(self: *Writer, stream: *std.io.Writer, extended: Zir.Inst.Extended.InstData) !void { const val: Zir.Inst.BuiltinValue = @enumFromInt(extended.small); try stream.print("{s})) ", .{@tagName(val)}); const src_node: Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand))); try self.writeSrcNode(stream, src_node); } - fn writeInplaceArithResultTy(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void { + fn writeInplaceArithResultTy(self: *Writer, stream: *std.io.Writer, extended: Zir.Inst.Extended.InstData) !void { const op: Zir.Inst.InplaceOp = @enumFromInt(extended.small); try self.writeInstRef(stream, @enumFromInt(extended.operand)); try stream.print(", {s}))", .{@tagName(op)}); } - fn writeInstRef(self: *Writer, stream: anytype, ref: Zir.Inst.Ref) !void { + fn writeInstRef(self: *Writer, stream: *std.io.Writer, ref: Zir.Inst.Ref) !void { if (ref == .none) { return stream.writeAll(".none"); } else if (ref.toIndex()) |i| { @@ -2661,12 +2651,12 @@ const Writer = struct { } } - fn writeInstIndex(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeInstIndex(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { _ = self; return stream.print("%{d}", .{@intFromEnum(inst)}); } - fn writeCaptures(self: *Writer, stream: anytype, extra_index: usize, captures_len: u32) !usize { + fn writeCaptures(self: *Writer, stream: *std.io.Writer, extra_index: usize, captures_len: u32) !usize { if (captures_len == 0) { try stream.writeAll("{}"); return extra_index; @@ -2686,7 +2676,7 @@ const Writer = struct { return extra_index + 2 * captures_len; } - fn writeCapture(self: *Writer, stream: anytype, capture: Zir.Inst.Capture) !void { + fn writeCapture(self: *Writer, stream: *std.io.Writer, capture: Zir.Inst.Capture) !void { switch (capture.unwrap()) { .nested => |i| return stream.print("[{d}]", .{i}), .instruction => |inst| return self.writeInstIndex(stream, inst), @@ -2694,18 +2684,18 @@ const Writer = struct { try stream.writeAll("load "); try self.writeInstIndex(stream, ptr_inst); }, - .decl_val => |str| try stream.print("decl_val \"{}\"", .{ - std.zig.fmtEscapes(self.code.nullTerminatedString(str)), + .decl_val => |str| try stream.print("decl_val \"{f}\"", .{ + std.zig.fmtString(self.code.nullTerminatedString(str)), }), - .decl_ref => |str| try stream.print("decl_ref \"{}\"", .{ - std.zig.fmtEscapes(self.code.nullTerminatedString(str)), + .decl_ref => |str| try stream.print("decl_ref \"{f}\"", .{ + std.zig.fmtString(self.code.nullTerminatedString(str)), }), } } fn writeOptionalInstRef( self: *Writer, - stream: anytype, + stream: *std.io.Writer, prefix: []const u8, inst: Zir.Inst.Ref, ) !void { @@ -2716,7 +2706,7 @@ const Writer = struct { fn writeOptionalInstRefOrBody( self: *Writer, - stream: anytype, + stream: *std.io.Writer, prefix: []const u8, ref: Zir.Inst.Ref, body: []const Zir.Inst.Index, @@ -2734,7 +2724,7 @@ const Writer = struct { fn writeFlag( self: *Writer, - stream: anytype, + stream: *std.io.Writer, name: []const u8, flag: bool, ) !void { @@ -2743,7 +2733,7 @@ const Writer = struct { try stream.writeAll(name); } - fn writeSrcNode(self: *Writer, stream: anytype, src_node: Ast.Node.Offset) !void { + fn writeSrcNode(self: *Writer, stream: *std.io.Writer, src_node: Ast.Node.Offset) !void { const tree = self.tree orelse return; const abs_node = src_node.toAbsolute(self.parent_decl_node); const src_span = tree.nodeToSpan(abs_node); @@ -2755,7 +2745,7 @@ const Writer = struct { }); } - fn writeSrcTok(self: *Writer, stream: anytype, src_tok: Ast.TokenOffset) !void { + fn writeSrcTok(self: *Writer, stream: *std.io.Writer, src_tok: Ast.TokenOffset) !void { const tree = self.tree orelse return; const abs_tok = src_tok.toAbsolute(tree.firstToken(self.parent_decl_node)); const span_start = tree.tokenStart(abs_tok); @@ -2768,7 +2758,7 @@ const Writer = struct { }); } - fn writeSrcTokAbs(self: *Writer, stream: anytype, src_tok: Ast.TokenIndex) !void { + fn writeSrcTokAbs(self: *Writer, stream: *std.io.Writer, src_tok: Ast.TokenIndex) !void { const tree = self.tree orelse return; const span_start = tree.tokenStart(src_tok); const span_end = span_start + @as(u32, @intCast(tree.tokenSlice(src_tok).len)); @@ -2780,15 +2770,15 @@ const Writer = struct { }); } - fn writeBracedDecl(self: *Writer, stream: anytype, body: []const Zir.Inst.Index) !void { + fn writeBracedDecl(self: *Writer, stream: *std.io.Writer, body: []const Zir.Inst.Index) !void { try self.writeBracedBodyConditional(stream, body, self.recurse_decls); } - fn writeBracedBody(self: *Writer, stream: anytype, body: []const Zir.Inst.Index) !void { + fn writeBracedBody(self: *Writer, stream: *std.io.Writer, body: []const Zir.Inst.Index) !void { try self.writeBracedBodyConditional(stream, body, self.recurse_blocks); } - fn writeBracedBodyConditional(self: *Writer, stream: anytype, body: []const Zir.Inst.Index, enabled: bool) !void { + fn writeBracedBodyConditional(self: *Writer, stream: *std.io.Writer, body: []const Zir.Inst.Index, enabled: bool) !void { if (body.len == 0) { try stream.writeAll("{}"); } else if (enabled) { @@ -2796,7 +2786,7 @@ const Writer = struct { self.indent += 2; try self.writeBody(stream, body); self.indent -= 2; - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.writeAll("}"); } else if (body.len == 1) { try stream.writeByte('{'); @@ -2817,21 +2807,21 @@ const Writer = struct { } } - fn writeBody(self: *Writer, stream: anytype, body: []const Zir.Inst.Index) !void { + fn writeBody(self: *Writer, stream: *std.io.Writer, body: []const Zir.Inst.Index) !void { for (body) |inst| { - try stream.writeByteNTimes(' ', self.indent); + try stream.splatByteAll(' ', self.indent); try stream.print("%{d} ", .{@intFromEnum(inst)}); try self.writeInstToStream(stream, inst); try stream.writeByte('\n'); } } - fn writeImport(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + fn writeImport(self: *Writer, stream: *std.io.Writer, inst: Zir.Inst.Index) !void { const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_tok; const extra = self.code.extraData(Zir.Inst.Import, inst_data.payload_index).data; try self.writeInstRef(stream, extra.res_ty); const import_path = self.code.nullTerminatedString(extra.path); - try stream.print(", \"{}\") ", .{std.zig.fmtEscapes(import_path)}); + try stream.print(", \"{f}\") ", .{std.zig.fmtString(import_path)}); try self.writeSrcTok(stream, inst_data.src_tok); } }; diff --git a/src/print_zoir.zig b/src/print_zoir.zig index b6cc8fe4d9..85d4c22b48 100644 --- a/src/print_zoir.zig +++ b/src/print_zoir.zig @@ -1,13 +1,8 @@ -pub fn renderToFile(zoir: Zoir, arena: Allocator, f: std.fs.File) (std.fs.File.WriteError || Allocator.Error)!void { - var bw = std.io.bufferedWriter(f.writer()); - try renderToWriter(zoir, arena, bw.writer()); - try bw.flush(); -} +pub const Error = error{ WriteFailed, OutOfMemory }; -pub fn renderToWriter(zoir: Zoir, arena: Allocator, w: anytype) (@TypeOf(w).Error || Allocator.Error)!void { +pub fn renderToWriter(zoir: Zoir, arena: Allocator, w: *Writer) Error!void { assert(!zoir.hasCompileErrors()); - const fmtIntSizeBin = std.fmt.fmtIntSizeBin; const bytes_per_node = comptime n: { var n: usize = 0; for (@typeInfo(Zoir.Node.Repr).@"struct".fields) |f| { @@ -23,42 +18,42 @@ pub fn renderToWriter(zoir: Zoir, arena: Allocator, w: anytype) (@TypeOf(w).Erro // zig fmt: off try w.print( - \\# Nodes: {} ({}) - \\# Extra Data Items: {} ({}) - \\# BigInt Limbs: {} ({}) - \\# String Table Bytes: {} - \\# Total ZON Bytes: {} + \\# Nodes: {} ({Bi}) + \\# Extra Data Items: {} ({Bi}) + \\# BigInt Limbs: {} ({Bi}) + \\# String Table Bytes: {Bi} + \\# Total ZON Bytes: {Bi} \\ , .{ - zoir.nodes.len, fmtIntSizeBin(node_bytes), - zoir.extra.len, fmtIntSizeBin(extra_bytes), - zoir.limbs.len, fmtIntSizeBin(limb_bytes), - fmtIntSizeBin(string_bytes), - fmtIntSizeBin(node_bytes + extra_bytes + limb_bytes + string_bytes), + zoir.nodes.len, node_bytes, + zoir.extra.len, extra_bytes, + zoir.limbs.len, limb_bytes, + string_bytes, + node_bytes + extra_bytes + limb_bytes + string_bytes, }); // zig fmt: on var pz: PrintZon = .{ - .w = w.any(), + .w = w, .arena = arena, .zoir = zoir, .indent = 0, }; - return @errorCast(pz.renderRoot()); + return pz.renderRoot(); } const PrintZon = struct { - w: std.io.AnyWriter, + w: *Writer, arena: Allocator, zoir: Zoir, indent: u32, - fn renderRoot(pz: *PrintZon) anyerror!void { + fn renderRoot(pz: *PrintZon) Error!void { try pz.renderNode(.root); try pz.w.writeByte('\n'); } - fn renderNode(pz: *PrintZon, node: Zoir.Node.Index) anyerror!void { + fn renderNode(pz: *PrintZon, node: Zoir.Node.Index) Error!void { const zoir = pz.zoir; try pz.w.print("%{d} = ", .{@intFromEnum(node)}); switch (node.get(zoir)) { @@ -77,8 +72,8 @@ const PrintZon = struct { }, .float_literal => |x| try pz.w.print("float({d})", .{x}), .char_literal => |x| try pz.w.print("char({d})", .{x}), - .enum_literal => |x| try pz.w.print("enum_literal({p})", .{std.zig.fmtId(x.get(zoir))}), - .string_literal => |x| try pz.w.print("str(\"{}\")", .{std.zig.fmtEscapes(x)}), + .enum_literal => |x| try pz.w.print("enum_literal({f})", .{std.zig.fmtIdP(x.get(zoir))}), + .string_literal => |x| try pz.w.print("str(\"{f}\")", .{std.zig.fmtString(x)}), .empty_literal => try pz.w.writeAll("empty_literal(.{})"), .array_literal => |vals| { try pz.w.writeAll("array_literal({"); @@ -97,7 +92,7 @@ const PrintZon = struct { pz.indent += 1; for (s.names, 0..s.vals.len) |name, idx| { try pz.newline(); - try pz.w.print("[{p}] ", .{std.zig.fmtId(name.get(zoir))}); + try pz.w.print("[{f}] ", .{std.zig.fmtIdP(name.get(zoir))}); try pz.renderNode(s.vals.at(@intCast(idx))); try pz.w.writeByte(','); } @@ -110,9 +105,7 @@ const PrintZon = struct { fn newline(pz: *PrintZon) !void { try pz.w.writeByte('\n'); - for (0..pz.indent) |_| { - try pz.w.writeByteNTimes(' ', 2); - } + try pz.w.splatByteAll(' ', 2 * pz.indent); } }; @@ -120,3 +113,4 @@ const std = @import("std"); const assert = std.debug.assert; const Allocator = std.mem.Allocator; const Zoir = std.zig.Zoir; +const Writer = std.io.Writer; diff --git a/src/register_manager.zig b/src/register_manager.zig index 90fe09980a..bc6761ad3b 100644 --- a/src/register_manager.zig +++ b/src/register_manager.zig @@ -238,7 +238,7 @@ pub fn RegisterManager( if (i < count) return null; for (regs, insts) |reg, inst| { - log.debug("tryAllocReg {} for inst {?}", .{ reg, inst }); + log.debug("tryAllocReg {} for inst {?f}", .{ reg, inst }); self.markRegAllocated(reg); if (inst) |tracked_inst| { @@ -317,7 +317,7 @@ pub fn RegisterManager( tracked_index: TrackedIndex, inst: ?Air.Inst.Index, ) AllocationError!void { - log.debug("getReg {} for inst {?}", .{ regAtTrackedIndex(tracked_index), inst }); + log.debug("getReg {} for inst {?f}", .{ regAtTrackedIndex(tracked_index), inst }); if (!self.isRegIndexFree(tracked_index)) { // Move the instruction that was previously there to a // stack allocation. @@ -349,7 +349,7 @@ pub fn RegisterManager( tracked_index: TrackedIndex, inst: ?Air.Inst.Index, ) void { - log.debug("getRegAssumeFree {} for inst {?}", .{ regAtTrackedIndex(tracked_index), inst }); + log.debug("getRegAssumeFree {} for inst {?f}", .{ regAtTrackedIndex(tracked_index), inst }); self.markRegIndexAllocated(tracked_index); assert(self.isRegIndexFree(tracked_index)); diff --git a/src/translate_c.zig b/src/translate_c.zig index dda2ee8e2e..301e0a219d 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -357,7 +357,7 @@ fn transFileScopeAsm(c: *Context, scope: *Scope, file_scope_asm: *const clang.Fi var len: usize = undefined; const bytes_ptr = asm_string.getString_bytes_begin_size(&len); - const str = try std.fmt.allocPrint(c.arena, "\"{}\"", .{std.zig.fmtEscapes(bytes_ptr[0..len])}); + const str = try std.fmt.allocPrint(c.arena, "\"{f}\"", .{std.zig.fmtString(bytes_ptr[0..len])}); const str_node = try Tag.string_literal.create(c.arena, str); const asm_node = try Tag.asm_simple.create(c.arena, str_node); @@ -2276,7 +2276,7 @@ fn transNarrowStringLiteral( var len: usize = undefined; const bytes_ptr = stmt.getString_bytes_begin_size(&len); - const str = try std.fmt.allocPrint(c.arena, "\"{}\"", .{std.zig.fmtEscapes(bytes_ptr[0..len])}); + const str = try std.fmt.allocPrint(c.arena, "\"{f}\"", .{std.zig.fmtString(bytes_ptr[0..len])}); const node = try Tag.string_literal.create(c.arena, str); return maybeSuppressResult(c, result_used, node); } @@ -3338,7 +3338,7 @@ fn transPredefinedExpr(c: *Context, scope: *Scope, expr: *const clang.Predefined fn transCreateCharLitNode(c: *Context, narrow: bool, val: u32) TransError!Node { return Tag.char_literal.create(c.arena, if (narrow) - try std.fmt.allocPrint(c.arena, "'{'}'", .{std.zig.fmtEscapes(&.{@as(u8, @intCast(val))})}) + try std.fmt.allocPrint(c.arena, "'{f}'", .{std.zig.fmtChar(&.{@as(u8, @intCast(val))})}) else try std.fmt.allocPrint(c.arena, "'\\u{{{x}}}'", .{val})); } @@ -5832,7 +5832,7 @@ fn zigifyEscapeSequences(ctx: *Context, m: *MacroCtx) ![]const u8 { num += c - 'A' + 10; }, else => { - i += std.fmt.formatIntBuf(bytes[i..], num, 16, .lower, std.fmt.FormatOptions{ .fill = '0', .width = 2 }); + i += std.fmt.printInt(bytes[i..], num, 16, .lower, .{ .fill = '0', .width = 2 }); num = 0; if (c == '\\') state = .escape @@ -5858,7 +5858,7 @@ fn zigifyEscapeSequences(ctx: *Context, m: *MacroCtx) ![]const u8 { }; num += c - '0'; } else { - i += std.fmt.formatIntBuf(bytes[i..], num, 16, .lower, std.fmt.FormatOptions{ .fill = '0', .width = 2 }); + i += std.fmt.printInt(bytes[i..], num, 16, .lower, .{ .fill = '0', .width = 2 }); num = 0; count = 0; if (c == '\\') @@ -5872,21 +5872,21 @@ fn zigifyEscapeSequences(ctx: *Context, m: *MacroCtx) ![]const u8 { } } if (state == .hex or state == .octal) - i += std.fmt.formatIntBuf(bytes[i..], num, 16, .lower, std.fmt.FormatOptions{ .fill = '0', .width = 2 }); + i += std.fmt.printInt(bytes[i..], num, 16, .lower, .{ .fill = '0', .width = 2 }); return bytes[0..i]; } -/// non-ASCII characters (c > 127) are also treated as non-printable by fmtSliceEscapeLower. +/// non-ASCII characters (c > 127) are also treated as non-printable by ascii.hexEscape. /// If a C string literal or char literal in a macro is not valid UTF-8, we need to escape /// non-ASCII characters so that the Zig source we output will itself be UTF-8. fn escapeUnprintables(ctx: *Context, m: *MacroCtx) ![]const u8 { const zigified = try zigifyEscapeSequences(ctx, m); if (std.unicode.utf8ValidateSlice(zigified)) return zigified; - const formatter = std.fmt.fmtSliceEscapeLower(zigified); - const encoded_size = @as(usize, @intCast(std.fmt.count("{s}", .{formatter}))); + const formatter = std.ascii.hexEscape(zigified, .lower); + const encoded_size: usize = @intCast(std.fmt.count("{f}", .{formatter})); const output = try ctx.arena.alloc(u8, encoded_size); - return std.fmt.bufPrint(output, "{s}", .{formatter}) catch |err| switch (err) { + return std.fmt.bufPrint(output, "{f}", .{formatter}) catch |err| switch (err) { error.NoSpaceLeft => unreachable, else => |e| return e, }; @@ -5905,7 +5905,7 @@ fn parseCPrimaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node { if (slice[0] != '\'' or slice[1] == '\\' or slice.len == 3) { return Tag.char_literal.create(c.arena, try escapeUnprintables(c, m)); } else { - const str = try std.fmt.allocPrint(c.arena, "0x{s}", .{std.fmt.fmtSliceHexLower(slice[1 .. slice.len - 1])}); + const str = try std.fmt.allocPrint(c.arena, "0x{x}", .{slice[1 .. slice.len - 1]}); return Tag.integer_literal.create(c.arena, str); } }, diff --git a/stage1/wasi.c b/stage1/wasi.c index ef2183dae8..83240d39b4 100644 --- a/stage1/wasi.c +++ b/stage1/wasi.c @@ -520,12 +520,15 @@ uint32_t wasi_snapshot_preview1_fd_read(uint32_t fd, uint32_t iovs, uint32_t iov default: panic("unimplemented: fd_read special file"); } + if (fds[fd].stream == NULL) { + store32_align2(res_size_ptr, 0); + return wasi_errno_success; + } + size_t size = 0; for (uint32_t i = 0; i < iovs_len; i += 1) { uint32_t len = load32_align2(&iovs_ptr[i].len); - size_t read_size = 0; - if (fds[fd].stream != NULL) - read_size = fread(&m[load32_align2(&iovs_ptr[i].ptr)], 1, len, fds[fd].stream); + size_t read_size = fread(&m[load32_align2(&iovs_ptr[i].ptr)], 1, len, fds[fd].stream); size += read_size; if (read_size < len) break; } @@ -633,8 +636,10 @@ uint32_t wasi_snapshot_preview1_fd_pwrite(uint32_t fd, uint32_t iovs, uint32_t i } fpos_t pos; - if (fgetpos(fds[fd].stream, &pos) < 0) return wasi_errno_io; - if (fseek(fds[fd].stream, offset, SEEK_SET) < 0) return wasi_errno_io; + if (fds[fd].stream != NULL) { + if (fgetpos(fds[fd].stream, &pos) < 0) return wasi_errno_io; + if (fseek(fds[fd].stream, offset, SEEK_SET) < 0) return wasi_errno_io; + } size_t size = 0; for (uint32_t i = 0; i < iovs_len; i += 1) { @@ -648,7 +653,9 @@ uint32_t wasi_snapshot_preview1_fd_pwrite(uint32_t fd, uint32_t iovs, uint32_t i if (written_size < len) break; } - if (fsetpos(fds[fd].stream, &pos) < 0) return wasi_errno_io; + if (fds[fd].stream != NULL) { + if (fsetpos(fds[fd].stream, &pos) < 0) return wasi_errno_io; + } if (size > 0) { time_t now = time(NULL); @@ -964,6 +971,11 @@ uint32_t wasi_snapshot_preview1_fd_pread(uint32_t fd, uint32_t iovs, uint32_t io default: panic("unimplemented: fd_pread special file"); } + if (fds[fd].stream == NULL) { + store32_align2(res_size_ptr, 0); + return wasi_errno_success; + } + fpos_t pos; if (fgetpos(fds[fd].stream, &pos) < 0) return wasi_errno_io; if (fseek(fds[fd].stream, offset, SEEK_SET) < 0) return wasi_errno_io; diff --git a/test/behavior/error.zig b/test/behavior/error.zig index e5afd8255e..70edb900ff 100644 --- a/test/behavior/error.zig +++ b/test/behavior/error.zig @@ -1032,24 +1032,6 @@ test "function called at runtime is properly analyzed for inferred error set" { }; } -test "generic type constructed from inferred error set of unresolved function" { - if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; - if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; - if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO - - const S = struct { - fn write(_: void, bytes: []const u8) !usize { - _ = bytes; - return 0; - } - const T = std.io.Writer(void, @typeInfo(@typeInfo(@TypeOf(write)).@"fn".return_type.?).error_union.error_set, write); - fn writer() T { - return .{ .context = {} }; - } - }; - _ = std.io.multiWriter(.{S.writer()}); -} - test "errorCast to adhoc inferred error set" { const S = struct { inline fn baz() !i32 { diff --git a/test/behavior/union_with_members.zig b/test/behavior/union_with_members.zig index 70f1086276..e5e4669608 100644 --- a/test/behavior/union_with_members.zig +++ b/test/behavior/union_with_members.zig @@ -10,8 +10,8 @@ const ET = union(enum) { pub fn print(a: *const ET, buf: []u8) anyerror!usize { return switch (a.*) { - ET.SINT => |x| fmt.formatIntBuf(buf, x, 10, .lower, fmt.FormatOptions{}), - ET.UINT => |x| fmt.formatIntBuf(buf, x, 10, .lower, fmt.FormatOptions{}), + ET.SINT => |x| fmt.printInt(buf, x, 10, .lower, fmt.FormatOptions{}), + ET.UINT => |x| fmt.printInt(buf, x, 10, .lower, fmt.FormatOptions{}), }; } }; diff --git a/test/cases/safety/slice sentinel mismatch - floats.zig b/test/cases/safety/slice sentinel mismatch - floats.zig index 45577acb00..be63272f0c 100644 --- a/test/cases/safety/slice sentinel mismatch - floats.zig +++ b/test/cases/safety/slice sentinel mismatch - floats.zig @@ -2,7 +2,7 @@ const std = @import("std"); pub fn panic(message: []const u8, stack_trace: ?*std.builtin.StackTrace, _: ?usize) noreturn { _ = stack_trace; - if (std.mem.eql(u8, message, "sentinel mismatch: expected 1.2e0, found 4e0")) { + if (std.mem.eql(u8, message, "sentinel mismatch: expected 1.2, found 4")) { std.process.exit(0); } std.process.exit(1); diff --git a/test/compare_output.zig b/test/compare_output.zig index 3603163a2d..283e6d90c9 100644 --- a/test/compare_output.zig +++ b/test/compare_output.zig @@ -17,15 +17,6 @@ pub fn addCases(cases: *tests.CompareOutputContext) void { \\} , "Hello, world!" ++ if (@import("builtin").os.tag == .windows) "\r\n" else "\n"); - cases.add("hello world without libc", - \\const io = @import("std").io; - \\ - \\pub fn main() void { - \\ const stdout = io.getStdOut().writer(); - \\ stdout.print("Hello, world!\n{d:4} {x:3} {c}\n", .{@as(u32, 12), @as(u16, 0x12), @as(u8, 'a')}) catch unreachable; - \\} - , "Hello, world!\n 12 12 a\n"); - cases.addC("number literals", \\const std = @import("std"); \\const builtin = @import("builtin"); @@ -158,24 +149,6 @@ pub fn addCases(cases: *tests.CompareOutputContext) void { \\ ); - cases.add("order-independent declarations", - \\const io = @import("std").io; - \\const z = io.stdin_fileno; - \\const x : @TypeOf(y) = 1234; - \\const y : u16 = 5678; - \\pub fn main() void { - \\ var x_local : i32 = print_ok(x); - \\ _ = &x_local; - \\} - \\fn print_ok(val: @TypeOf(x)) @TypeOf(foo) { - \\ _ = val; - \\ const stdout = io.getStdOut().writer(); - \\ stdout.print("OK\n", .{}) catch unreachable; - \\ return 0; - \\} - \\const foo : i32 = 0; - , "OK\n"); - cases.addC("expose function pointer to C land", \\const c = @cImport(@cInclude("stdlib.h")); \\ @@ -236,267 +209,11 @@ pub fn addCases(cases: *tests.CompareOutputContext) void { \\} , "3.25\n3\n3.00\n-0.40\n"); - cases.add("same named methods in incomplete struct", - \\const io = @import("std").io; - \\ - \\const Foo = struct { - \\ field1: Bar, - \\ - \\ fn method(a: *const Foo) bool { - \\ _ = a; - \\ return true; - \\ } - \\}; - \\ - \\const Bar = struct { - \\ field2: i32, - \\ - \\ fn method(b: *const Bar) bool { - \\ _ = b; - \\ return true; - \\ } - \\}; - \\ - \\pub fn main() void { - \\ const bar = Bar {.field2 = 13,}; - \\ const foo = Foo {.field1 = bar,}; - \\ const stdout = io.getStdOut().writer(); - \\ if (!foo.method()) { - \\ stdout.print("BAD\n", .{}) catch unreachable; - \\ } - \\ if (!bar.method()) { - \\ stdout.print("BAD\n", .{}) catch unreachable; - \\ } - \\ stdout.print("OK\n", .{}) catch unreachable; - \\} - , "OK\n"); - - cases.add("defer with only fallthrough", - \\const io = @import("std").io; - \\pub fn main() void { - \\ const stdout = io.getStdOut().writer(); - \\ stdout.print("before\n", .{}) catch unreachable; - \\ defer stdout.print("defer1\n", .{}) catch unreachable; - \\ defer stdout.print("defer2\n", .{}) catch unreachable; - \\ defer stdout.print("defer3\n", .{}) catch unreachable; - \\ stdout.print("after\n", .{}) catch unreachable; - \\} - , "before\nafter\ndefer3\ndefer2\ndefer1\n"); - - cases.add("defer with return", - \\const io = @import("std").io; - \\const os = @import("std").os; - \\pub fn main() void { - \\ const stdout = io.getStdOut().writer(); - \\ stdout.print("before\n", .{}) catch unreachable; - \\ defer stdout.print("defer1\n", .{}) catch unreachable; - \\ defer stdout.print("defer2\n", .{}) catch unreachable; - \\ var gpa: @import("std").heap.GeneralPurposeAllocator(.{}) = .init; - \\ defer _ = gpa.deinit(); - \\ var arena = @import("std").heap.ArenaAllocator.init(gpa.allocator()); - \\ defer arena.deinit(); - \\ var args_it = @import("std").process.argsWithAllocator(arena.allocator()) catch unreachable; - \\ if (args_it.skip() and !args_it.skip()) return; - \\ defer stdout.print("defer3\n", .{}) catch unreachable; - \\ stdout.print("after\n", .{}) catch unreachable; - \\} - , "before\ndefer2\ndefer1\n"); - - cases.add("errdefer and it fails", - \\const io = @import("std").io; - \\pub fn main() void { - \\ do_test() catch return; - \\} - \\fn do_test() !void { - \\ const stdout = io.getStdOut().writer(); - \\ stdout.print("before\n", .{}) catch unreachable; - \\ defer stdout.print("defer1\n", .{}) catch unreachable; - \\ errdefer stdout.print("deferErr\n", .{}) catch unreachable; - \\ try its_gonna_fail(); - \\ defer stdout.print("defer3\n", .{}) catch unreachable; - \\ stdout.print("after\n", .{}) catch unreachable; - \\} - \\fn its_gonna_fail() !void { - \\ return error.IToldYouItWouldFail; - \\} - , "before\ndeferErr\ndefer1\n"); - - cases.add("errdefer and it passes", - \\const io = @import("std").io; - \\pub fn main() void { - \\ do_test() catch return; - \\} - \\fn do_test() !void { - \\ const stdout = io.getStdOut().writer(); - \\ stdout.print("before\n", .{}) catch unreachable; - \\ defer stdout.print("defer1\n", .{}) catch unreachable; - \\ errdefer stdout.print("deferErr\n", .{}) catch unreachable; - \\ try its_gonna_pass(); - \\ defer stdout.print("defer3\n", .{}) catch unreachable; - \\ stdout.print("after\n", .{}) catch unreachable; - \\} - \\fn its_gonna_pass() anyerror!void { } - , "before\nafter\ndefer3\ndefer1\n"); - - cases.addCase(x: { - var tc = cases.create("@embedFile", - \\const foo_txt = @embedFile("foo.txt"); - \\const io = @import("std").io; - \\ - \\pub fn main() void { - \\ const stdout = io.getStdOut().writer(); - \\ stdout.print(foo_txt, .{}) catch unreachable; - \\} - , "1234\nabcd\n"); - - tc.addSourceFile("foo.txt", "1234\nabcd\n"); - - break :x tc; - }); - - cases.addCase(x: { - var tc = cases.create("parsing args", - \\const std = @import("std"); - \\const io = std.io; - \\const os = std.os; - \\ - \\pub fn main() !void { - \\ var gpa: std.heap.GeneralPurposeAllocator(.{}) = .init; - \\ defer _ = gpa.deinit(); - \\ var arena = std.heap.ArenaAllocator.init(gpa.allocator()); - \\ defer arena.deinit(); - \\ var args_it = try std.process.argsWithAllocator(arena.allocator()); - \\ const stdout = io.getStdOut().writer(); - \\ var index: usize = 0; - \\ _ = args_it.skip(); - \\ while (args_it.next()) |arg| : (index += 1) { - \\ try stdout.print("{}: {s}\n", .{index, arg}); - \\ } - \\} - , - \\0: first arg - \\1: 'a' 'b' \ - \\2: bare - \\3: ba""re - \\4: " - \\5: last arg - \\ - ); - - tc.setCommandLineArgs(&[_][]const u8{ - "first arg", - "'a' 'b' \\", - "bare", - "ba\"\"re", - "\"", - "last arg", - }); - - break :x tc; - }); - - cases.addCase(x: { - var tc = cases.create("parsing args new API", - \\const std = @import("std"); - \\const io = std.io; - \\const os = std.os; - \\ - \\pub fn main() !void { - \\ var gpa: std.heap.GeneralPurposeAllocator(.{}) = .init; - \\ defer _ = gpa.deinit(); - \\ var arena = std.heap.ArenaAllocator.init(gpa.allocator()); - \\ defer arena.deinit(); - \\ var args_it = try std.process.argsWithAllocator(arena.allocator()); - \\ const stdout = io.getStdOut().writer(); - \\ var index: usize = 0; - \\ _ = args_it.skip(); - \\ while (args_it.next()) |arg| : (index += 1) { - \\ try stdout.print("{}: {s}\n", .{index, arg}); - \\ } - \\} - , - \\0: first arg - \\1: 'a' 'b' \ - \\2: bare - \\3: ba""re - \\4: " - \\5: last arg - \\ - ); - - tc.setCommandLineArgs(&[_][]const u8{ - "first arg", - "'a' 'b' \\", - "bare", - "ba\"\"re", - "\"", - "last arg", - }); - - break :x tc; - }); - - // It is required to override the log function in order to print to stdout instead of stderr - cases.add("std.log per scope log level override", - \\const std = @import("std"); - \\ - \\pub const std_options: std.Options = .{ - \\ .log_level = .debug, - \\ - \\ .log_scope_levels = &.{ - \\ .{ .scope = .a, .level = .warn }, - \\ .{ .scope = .c, .level = .err }, - \\ }, - \\ .logFn = log, - \\}; - \\ - \\const loga = std.log.scoped(.a); - \\const logb = std.log.scoped(.b); - \\const logc = std.log.scoped(.c); - \\ - \\pub fn main() !void { - \\ loga.debug("", .{}); - \\ logb.debug("", .{}); - \\ logc.debug("", .{}); - \\ - \\ loga.info("", .{}); - \\ logb.info("", .{}); - \\ logc.info("", .{}); - \\ - \\ loga.warn("", .{}); - \\ logb.warn("", .{}); - \\ logc.warn("", .{}); - \\ - \\ loga.err("", .{}); - \\ logb.err("", .{}); - \\ logc.err("", .{}); - \\} - \\pub fn log( - \\ comptime level: std.log.Level, - \\ comptime scope: @TypeOf(.EnumLiteral), - \\ comptime format: []const u8, - \\ args: anytype, - \\) void { - \\ const level_txt = comptime level.asText(); - \\ const prefix2 = if (scope == .default) ": " else "(" ++ @tagName(scope) ++ "):"; - \\ const stdout = std.io.getStdOut().writer(); - \\ nosuspend stdout.print(level_txt ++ prefix2 ++ format ++ "\n", args) catch return; - \\} - , - \\debug(b): - \\info(b): - \\warning(a): - \\warning(b): - \\error(a): - \\error(b): - \\error(c): - \\ - ); - - cases.add("valid carriage return example", "const io = @import(\"std\").io;\r\n" ++ // Testing CRLF line endings are valid + cases.add("valid carriage return example", "const std = @import(\"std\");\r\n" ++ // Testing CRLF line endings are valid "\r\n" ++ "pub \r fn main() void {\r\n" ++ // Testing isolated carriage return as whitespace is valid - " const stdout = io.getStdOut().writer();\r\n" ++ + " var file_writer = std.fs.File.stdout().writerStreaming(&.{});\r\n" ++ + " const stdout = &file_writer.interface;\r\n" ++ " stdout.print(\\\\A Multiline\r\n" ++ // testing CRLF at end of multiline string line is valid and normalises to \n in the output " \\\\String\r\n" ++ " , .{}) catch unreachable;\r\n" ++ diff --git a/test/incremental/add_decl b/test/incremental/add_decl index 87f33b1c51..39a25e72de 100644 --- a/test/incremental/add_decl +++ b/test/incremental/add_decl @@ -6,7 +6,7 @@ #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll(foo); + try std.fs.File.stdout().writeAll(foo); } const foo = "good morning\n"; #expect_stdout="good morning\n" @@ -15,7 +15,7 @@ const foo = "good morning\n"; #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll(foo); + try std.fs.File.stdout().writeAll(foo); } const foo = "good morning\n"; const bar = "good evening\n"; @@ -25,7 +25,7 @@ const bar = "good evening\n"; #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll(bar); + try std.fs.File.stdout().writeAll(bar); } const foo = "good morning\n"; const bar = "good evening\n"; @@ -35,17 +35,17 @@ const bar = "good evening\n"; #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll(qux); + try std.fs.File.stdout().writeAll(qux); } const foo = "good morning\n"; const bar = "good evening\n"; -#expect_error=main.zig:3:37: error: use of undeclared identifier 'qux' +#expect_error=main.zig:3:39: error: use of undeclared identifier 'qux' #update=add missing declaration #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll(qux); + try std.fs.File.stdout().writeAll(qux); } const foo = "good morning\n"; const bar = "good evening\n"; @@ -56,7 +56,7 @@ const qux = "good night\n"; #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll(qux); + try std.fs.File.stdout().writeAll(qux); } const qux = "good night\n"; #expect_stdout="good night\n" diff --git a/test/incremental/add_decl_namespaced b/test/incremental/add_decl_namespaced index 84472effb5..7e2fe5742c 100644 --- a/test/incremental/add_decl_namespaced +++ b/test/incremental/add_decl_namespaced @@ -6,7 +6,7 @@ #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll(@This().foo); + try std.fs.File.stdout().writeAll(@This().foo); } const foo = "good morning\n"; #expect_stdout="good morning\n" @@ -15,7 +15,7 @@ const foo = "good morning\n"; #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll(@This().foo); + try std.fs.File.stdout().writeAll(@This().foo); } const foo = "good morning\n"; const bar = "good evening\n"; @@ -25,7 +25,7 @@ const bar = "good evening\n"; #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll(@This().bar); + try std.fs.File.stdout().writeAll(@This().bar); } const foo = "good morning\n"; const bar = "good evening\n"; @@ -35,18 +35,18 @@ const bar = "good evening\n"; #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll(@This().qux); + try std.fs.File.stdout().writeAll(@This().qux); } const foo = "good morning\n"; const bar = "good evening\n"; -#expect_error=main.zig:3:44: error: root source file struct 'main' has no member named 'qux' +#expect_error=main.zig:3:46: error: root source file struct 'main' has no member named 'qux' #expect_error=main.zig:1:1: note: struct declared here #update=add missing declaration #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll(@This().qux); + try std.fs.File.stdout().writeAll(@This().qux); } const foo = "good morning\n"; const bar = "good evening\n"; @@ -57,7 +57,7 @@ const qux = "good night\n"; #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll(@This().qux); + try std.fs.File.stdout().writeAll(@This().qux); } const qux = "good night\n"; #expect_stdout="good night\n" diff --git a/test/incremental/bad_import b/test/incremental/bad_import index a26ab75423..4e78b7074a 100644 --- a/test/incremental/bad_import +++ b/test/incremental/bad_import @@ -7,7 +7,7 @@ #file=main.zig pub fn main() !void { _ = @import("foo.zig"); - try std.io.getStdOut().writeAll("success\n"); + try std.fs.File.stdout().writeAll("success\n"); } const std = @import("std"); #file=foo.zig @@ -29,7 +29,7 @@ comptime { #file=main.zig pub fn main() !void { //_ = @import("foo.zig"); - try std.io.getStdOut().writeAll("success\n"); + try std.fs.File.stdout().writeAll("success\n"); } const std = @import("std"); #expect_stdout="success\n" diff --git a/test/incremental/change_embed_file b/test/incremental/change_embed_file index 171e4d3178..7c23b120f1 100644 --- a/test/incremental/change_embed_file +++ b/test/incremental/change_embed_file @@ -7,7 +7,7 @@ const std = @import("std"); const string = @embedFile("string.txt"); pub fn main() !void { - try std.io.getStdOut().writeAll(string); + try std.fs.File.stdout().writeAll(string); } #file=string.txt Hello, World! @@ -27,7 +27,7 @@ Hello again, World! const std = @import("std"); const string = @embedFile("string.txt"); pub fn main() !void { - try std.io.getStdOut().writeAll("a hardcoded string\n"); + try std.fs.File.stdout().writeAll("a hardcoded string\n"); } #expect_stdout="a hardcoded string\n" @@ -36,7 +36,7 @@ pub fn main() !void { const std = @import("std"); const string = @embedFile("string.txt"); pub fn main() !void { - try std.io.getStdOut().writeAll(string); + try std.fs.File.stdout().writeAll(string); } #expect_error=main.zig:2:27: error: unable to open 'string.txt': FileNotFound diff --git a/test/incremental/change_enum_tag_type b/test/incremental/change_enum_tag_type index d3f6c85c37..1691764fbc 100644 --- a/test/incremental/change_enum_tag_type +++ b/test/incremental/change_enum_tag_type @@ -14,7 +14,8 @@ const Foo = enum(Tag) { pub fn main() !void { var val: Foo = undefined; val = .a; - try std.io.getStdOut().writer().print("{s}\n", .{@tagName(val)}); + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + try stdout_writer.interface.print("{s}\n", .{@tagName(val)}); } const std = @import("std"); #expect_stdout="a\n" @@ -31,7 +32,8 @@ const Foo = enum(Tag) { pub fn main() !void { var val: Foo = undefined; val = .a; - try std.io.getStdOut().writer().print("{s}\n", .{@tagName(val)}); + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + try stdout_writer.interface.print("{s}\n", .{@tagName(val)}); } comptime { // These can't be true at the same time; analysis should stop as soon as it sees `Foo` @@ -53,7 +55,8 @@ const Foo = enum(Tag) { pub fn main() !void { var val: Foo = undefined; val = .a; - try std.io.getStdOut().writer().print("{s}\n", .{@tagName(val)}); + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + try stdout_writer.interface.print("{s}\n", .{@tagName(val)}); } const std = @import("std"); #expect_stdout="a\n" diff --git a/test/incremental/change_exports b/test/incremental/change_exports index f0e2ea8d34..e492930031 100644 --- a/test/incremental/change_exports +++ b/test/incremental/change_exports @@ -16,7 +16,8 @@ pub fn main() !void { extern const bar: u32; }; S.foo(); - try std.io.getStdOut().writer().print("{}\n", .{S.bar}); + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + try stdout_writer.interface.print("{}\n", .{S.bar}); } const std = @import("std"); #expect_stdout="123\n" @@ -37,7 +38,8 @@ pub fn main() !void { extern const other: u32; }; S.foo(); - try std.io.getStdOut().writer().print("{} {}\n", .{ S.bar, S.other }); + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + try stdout_writer.interface.print("{} {}\n", .{ S.bar, S.other }); } const std = @import("std"); #expect_error=main.zig:6:5: error: exported symbol collision: foo @@ -59,7 +61,8 @@ pub fn main() !void { extern const other: u32; }; S.foo(); - try std.io.getStdOut().writer().print("{} {}\n", .{ S.bar, S.other }); + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + try stdout_writer.interface.print("{} {}\n", .{ S.bar, S.other }); } const std = @import("std"); #expect_stdout="123 456\n" @@ -83,7 +86,8 @@ pub fn main() !void { extern const other: u32; }; S.foo(); - try std.io.getStdOut().writer().print("{} {}\n", .{ S.bar, S.other }); + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + try stdout_writer.interface.print("{} {}\n", .{ S.bar, S.other }); } const std = @import("std"); #expect_stdout="123 456\n" @@ -128,7 +132,8 @@ pub fn main() !void { extern const other: u32; }; S.foo(); - try std.io.getStdOut().writer().print("{} {}\n", .{ S.bar, S.other }); + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + try stdout_writer.interface.print("{} {}\n", .{ S.bar, S.other }); } const std = @import("std"); #expect_stdout="123 456\n" @@ -152,7 +157,8 @@ pub fn main() !void { extern const other: u32; }; S.foo(); - try std.io.getStdOut().writer().print("{} {}\n", .{ S.bar, S.other }); + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + try stdout_writer.interface.print("{} {}\n", .{ S.bar, S.other }); } const std = @import("std"); #expect_error=main.zig:5:5: error: exported symbol collision: bar diff --git a/test/incremental/change_fn_type b/test/incremental/change_fn_type index 892d0dd9b6..24392b25f7 100644 --- a/test/incremental/change_fn_type +++ b/test/incremental/change_fn_type @@ -7,7 +7,8 @@ pub fn main() !void { try foo(123); } fn foo(x: u8) !void { - return std.io.getStdOut().writer().print("{d}\n", .{x}); + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + return stdout_writer.interface.print("{d}\n", .{x}); } const std = @import("std"); #expect_stdout="123\n" @@ -18,7 +19,8 @@ pub fn main() !void { try foo(123); } fn foo(x: i64) !void { - return std.io.getStdOut().writer().print("{d}\n", .{x}); + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + return stdout_writer.interface.print("{d}\n", .{x}); } const std = @import("std"); #expect_stdout="123\n" @@ -29,7 +31,8 @@ pub fn main() !void { try foo(-42); } fn foo(x: i64) !void { - return std.io.getStdOut().writer().print("{d}\n", .{x}); + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + return stdout_writer.interface.print("{d}\n", .{x}); } const std = @import("std"); #expect_stdout="-42\n" diff --git a/test/incremental/change_generic_line_number b/test/incremental/change_generic_line_number index bed4372b37..c9eb2be929 100644 --- a/test/incremental/change_generic_line_number +++ b/test/incremental/change_generic_line_number @@ -6,7 +6,7 @@ const std = @import("std"); fn Printer(message: []const u8) type { return struct { fn print() !void { - try std.io.getStdOut().writeAll(message); + try std.fs.File.stdout().writeAll(message); } }; } @@ -22,7 +22,7 @@ const std = @import("std"); fn Printer(message: []const u8) type { return struct { fn print() !void { - try std.io.getStdOut().writeAll(message); + try std.fs.File.stdout().writeAll(message); } }; } diff --git a/test/incremental/change_line_number b/test/incremental/change_line_number index 887e5ffd21..0754d39182 100644 --- a/test/incremental/change_line_number +++ b/test/incremental/change_line_number @@ -4,7 +4,7 @@ #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll("foo\n"); + try std.fs.File.stdout().writeAll("foo\n"); } #expect_stdout="foo\n" #update=change line number @@ -12,6 +12,6 @@ pub fn main() !void { const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll("foo\n"); + try std.fs.File.stdout().writeAll("foo\n"); } #expect_stdout="foo\n" diff --git a/test/incremental/change_panic_handler b/test/incremental/change_panic_handler index 34a1f32dab..699134100e 100644 --- a/test/incremental/change_panic_handler +++ b/test/incremental/change_panic_handler @@ -11,7 +11,8 @@ pub fn main() !u8 { } pub const panic = std.debug.FullPanic(myPanic); fn myPanic(msg: []const u8, _: ?usize) noreturn { - std.io.getStdOut().writer().print("panic message: {s}\n", .{msg}) catch {}; + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + stdout_writer.interface.print("panic message: {s}\n", .{msg}) catch {}; std.process.exit(0); } const std = @import("std"); @@ -27,7 +28,8 @@ pub fn main() !u8 { } pub const panic = std.debug.FullPanic(myPanic); fn myPanic(msg: []const u8, _: ?usize) noreturn { - std.io.getStdOut().writer().print("new panic message: {s}\n", .{msg}) catch {}; + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + stdout_writer.interface.print("new panic message: {s}\n", .{msg}) catch {}; std.process.exit(0); } const std = @import("std"); @@ -43,7 +45,8 @@ pub fn main() !u8 { } pub const panic = std.debug.FullPanic(myPanicNew); fn myPanicNew(msg: []const u8, _: ?usize) noreturn { - std.io.getStdOut().writer().print("third panic message: {s}\n", .{msg}) catch {}; + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + stdout_writer.interface.print("third panic message: {s}\n", .{msg}) catch {}; std.process.exit(0); } const std = @import("std"); diff --git a/test/incremental/change_panic_handler_explicit b/test/incremental/change_panic_handler_explicit index ad5d3d124a..2d068d593e 100644 --- a/test/incremental/change_panic_handler_explicit +++ b/test/incremental/change_panic_handler_explicit @@ -41,7 +41,8 @@ pub const panic = struct { pub const noreturnReturned = no_panic.noreturnReturned; }; fn myPanic(msg: []const u8, _: ?usize) noreturn { - std.io.getStdOut().writer().print("panic message: {s}\n", .{msg}) catch {}; + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + stdout_writer.interface.print("panic message: {s}\n", .{msg}) catch {}; std.process.exit(0); } const std = @import("std"); @@ -87,7 +88,8 @@ pub const panic = struct { pub const noreturnReturned = no_panic.noreturnReturned; }; fn myPanic(msg: []const u8, _: ?usize) noreturn { - std.io.getStdOut().writer().print("new panic message: {s}\n", .{msg}) catch {}; + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + stdout_writer.interface.print("new panic message: {s}\n", .{msg}) catch {}; std.process.exit(0); } const std = @import("std"); @@ -133,7 +135,8 @@ pub const panic = struct { pub const noreturnReturned = no_panic.noreturnReturned; }; fn myPanicNew(msg: []const u8, _: ?usize) noreturn { - std.io.getStdOut().writer().print("third panic message: {s}\n", .{msg}) catch {}; + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + stdout_writer.interface.print("third panic message: {s}\n", .{msg}) catch {}; std.process.exit(0); } const std = @import("std"); diff --git a/test/incremental/change_shift_op b/test/incremental/change_shift_op index bface3a383..ccb904581d 100644 --- a/test/incremental/change_shift_op +++ b/test/incremental/change_shift_op @@ -8,7 +8,8 @@ pub fn main() !void { try foo(0x1300); } fn foo(x: u16) !void { - try std.io.getStdOut().writer().print("0x{x}\n", .{x << 4}); + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + try stdout_writer.interface.print("0x{x}\n", .{x << 4}); } const std = @import("std"); #expect_stdout="0x3000\n" @@ -18,7 +19,8 @@ pub fn main() !void { try foo(0x1300); } fn foo(x: u16) !void { - try std.io.getStdOut().writer().print("0x{x}\n", .{x >> 4}); + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + try stdout_writer.interface.print("0x{x}\n", .{x >> 4}); } const std = @import("std"); #expect_stdout="0x130\n" diff --git a/test/incremental/change_struct_same_fields b/test/incremental/change_struct_same_fields index 60fdcc8944..97049a1fc0 100644 --- a/test/incremental/change_struct_same_fields +++ b/test/incremental/change_struct_same_fields @@ -10,7 +10,8 @@ pub fn main() !void { try foo(&val); } fn foo(val: *const S) !void { - try std.io.getStdOut().writer().print( + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + try stdout_writer.interface.print( "{d} {d}\n", .{ val.x, val.y }, ); @@ -26,7 +27,8 @@ pub fn main() !void { try foo(&val); } fn foo(val: *const S) !void { - try std.io.getStdOut().writer().print( + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + try stdout_writer.interface.print( "{d} {d}\n", .{ val.x, val.y }, ); @@ -42,7 +44,8 @@ pub fn main() !void { try foo(&val); } fn foo(val: *const S) !void { - try std.io.getStdOut().writer().print( + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + try stdout_writer.interface.print( "{d} {d}\n", .{ val.x, val.y }, ); diff --git a/test/incremental/change_zon_file b/test/incremental/change_zon_file index e12f3db080..247f78828e 100644 --- a/test/incremental/change_zon_file +++ b/test/incremental/change_zon_file @@ -7,7 +7,7 @@ const std = @import("std"); const message: []const u8 = @import("message.zon"); pub fn main() !void { - try std.io.getStdOut().writeAll(message); + try std.fs.File.stdout().writeAll(message); } #file=message.zon "Hello, World!\n" @@ -28,7 +28,7 @@ pub fn main() !void { const std = @import("std"); const message: []const u8 = @import("message.zon"); pub fn main() !void { - try std.io.getStdOut().writeAll("a hardcoded string\n"); + try std.fs.File.stdout().writeAll("a hardcoded string\n"); } #expect_error=message.zon:1:1: error: unable to load 'message.zon': FileNotFound #expect_error=main.zig:2:37: note: file imported here @@ -43,6 +43,6 @@ pub fn main() !void { const std = @import("std"); const message: []const u8 = @import("message.zon"); pub fn main() !void { - try std.io.getStdOut().writeAll(message); + try std.fs.File.stdout().writeAll(message); } #expect_stdout="We're back, World!\n" diff --git a/test/incremental/change_zon_file_no_result_type b/test/incremental/change_zon_file_no_result_type index 84f4a69bcf..231558e3e9 100644 --- a/test/incremental/change_zon_file_no_result_type +++ b/test/incremental/change_zon_file_no_result_type @@ -6,7 +6,7 @@ #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll(@import("foo.zon").message); + try std.fs.File.stdout().writeAll(@import("foo.zon").message); } #file=foo.zon .{ diff --git a/test/incremental/compile_log b/test/incremental/compile_log index f7fa8ff7e1..de41524563 100644 --- a/test/incremental/compile_log +++ b/test/incremental/compile_log @@ -7,7 +7,7 @@ #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll("Hello, World!\n"); + try std.fs.File.stdout().writeAll("Hello, World!\n"); } #expect_stdout="Hello, World!\n" @@ -15,7 +15,7 @@ pub fn main() !void { #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll("Hello, World!\n"); + try std.fs.File.stdout().writeAll("Hello, World!\n"); @compileLog("this is a log"); } #expect_error=main.zig:4:5: error: found compile log statement @@ -25,6 +25,6 @@ pub fn main() !void { #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll("Hello, World!\n"); + try std.fs.File.stdout().writeAll("Hello, World!\n"); } #expect_stdout="Hello, World!\n" diff --git a/test/incremental/fix_astgen_failure b/test/incremental/fix_astgen_failure index 51972e9232..9c427c7a96 100644 --- a/test/incremental/fix_astgen_failure +++ b/test/incremental/fix_astgen_failure @@ -9,28 +9,28 @@ pub fn main() !void { } #file=foo.zig pub fn hello() !void { - try std.io.getStdOut().writeAll("Hello, World!\n"); + try std.fs.File.stdout().writeAll("Hello, World!\n"); } #expect_error=foo.zig:2:9: error: use of undeclared identifier 'std' #update=fix the error #file=foo.zig const std = @import("std"); pub fn hello() !void { - try std.io.getStdOut().writeAll("Hello, World!\n"); + try std.fs.File.stdout().writeAll("Hello, World!\n"); } #expect_stdout="Hello, World!\n" #update=add new error #file=foo.zig const std = @import("std"); pub fn hello() !void { - try std.io.getStdOut().writeAll(hello_str); + try std.fs.File.stdout().writeAll(hello_str); } -#expect_error=foo.zig:3:37: error: use of undeclared identifier 'hello_str' +#expect_error=foo.zig:3:39: error: use of undeclared identifier 'hello_str' #update=fix the new error #file=foo.zig const std = @import("std"); const hello_str = "Hello, World! Again!\n"; pub fn hello() !void { - try std.io.getStdOut().writeAll(hello_str); + try std.fs.File.stdout().writeAll(hello_str); } #expect_stdout="Hello, World! Again!\n" diff --git a/test/incremental/function_becomes_inline b/test/incremental/function_becomes_inline index 607cd6805e..8f36a31b69 100644 --- a/test/incremental/function_becomes_inline +++ b/test/incremental/function_becomes_inline @@ -7,7 +7,7 @@ pub fn main() !void { try foo(); } fn foo() !void { - try std.io.getStdOut().writer().writeAll("Hello, World!\n"); + try std.fs.File.stdout().writeAll("Hello, World!\n"); } const std = @import("std"); #expect_stdout="Hello, World!\n" @@ -18,7 +18,7 @@ pub fn main() !void { try foo(); } inline fn foo() !void { - try std.io.getStdOut().writer().writeAll("Hello, World!\n"); + try std.fs.File.stdout().writeAll("Hello, World!\n"); } const std = @import("std"); #expect_stdout="Hello, World!\n" @@ -29,7 +29,7 @@ pub fn main() !void { try foo(); } inline fn foo() !void { - try std.io.getStdOut().writer().writeAll("Hello, `inline` World!\n"); + try std.fs.File.stdout().writeAll("Hello, `inline` World!\n"); } const std = @import("std"); #expect_stdout="Hello, `inline` World!\n" diff --git a/test/incremental/hello b/test/incremental/hello index d1bc876071..c30cd50c6f 100644 --- a/test/incremental/hello +++ b/test/incremental/hello @@ -6,13 +6,13 @@ #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll("good morning\n"); + try std.fs.File.stdout().writeAll("good morning\n"); } #expect_stdout="good morning\n" #update=change the string #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll("おはようございます\n"); + try std.fs.File.stdout().writeAll("おはようございます\n"); } #expect_stdout="おはようございます\n" diff --git a/test/incremental/make_decl_pub b/test/incremental/make_decl_pub index 89388dca74..139593b2b0 100644 --- a/test/incremental/make_decl_pub +++ b/test/incremental/make_decl_pub @@ -11,7 +11,7 @@ pub fn main() !void { #file=foo.zig const std = @import("std"); fn hello() !void { - try std.io.getStdOut().writeAll("Hello, World!\n"); + try std.fs.File.stdout().writeAll("Hello, World!\n"); } #expect_error=main.zig:3:12: error: 'hello' is not marked 'pub' #expect_error=foo.zig:2:1: note: declared here @@ -20,6 +20,6 @@ fn hello() !void { #file=foo.zig const std = @import("std"); pub fn hello() !void { - try std.io.getStdOut().writeAll("Hello, World!\n"); + try std.fs.File.stdout().writeAll("Hello, World!\n"); } #expect_stdout="Hello, World!\n" diff --git a/test/incremental/modify_inline_fn b/test/incremental/modify_inline_fn index 726b2ca22a..ef31df2d5e 100644 --- a/test/incremental/modify_inline_fn +++ b/test/incremental/modify_inline_fn @@ -7,7 +7,7 @@ const std = @import("std"); pub fn main() !void { const str = getStr(); - try std.io.getStdOut().writeAll(str); + try std.fs.File.stdout().writeAll(str); } inline fn getStr() []const u8 { return "foo\n"; @@ -18,7 +18,7 @@ inline fn getStr() []const u8 { const std = @import("std"); pub fn main() !void { const str = getStr(); - try std.io.getStdOut().writeAll(str); + try std.fs.File.stdout().writeAll(str); } inline fn getStr() []const u8 { return "bar\n"; diff --git a/test/incremental/move_src b/test/incremental/move_src index 3e93513430..c2ff12761f 100644 --- a/test/incremental/move_src +++ b/test/incremental/move_src @@ -6,23 +6,9 @@ #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writer().print("{d} {d}\n", .{ foo(), bar() }); + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + try stdout_writer.interface.print("{d} {d}\n", .{ foo(), bar() }); } -fn foo() u32 { - return @src().line; -} -fn bar() u32 { - return 123; -} -#expect_stdout="6 123\n" - -#update=add newline -#file=main.zig -const std = @import("std"); -pub fn main() !void { - try std.io.getStdOut().writer().print("{d} {d}\n", .{ foo(), bar() }); -} - fn foo() u32 { return @src().line; } @@ -30,3 +16,19 @@ fn bar() u32 { return 123; } #expect_stdout="7 123\n" + +#update=add newline +#file=main.zig +const std = @import("std"); +pub fn main() !void { + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + try stdout_writer.interface.print("{d} {d}\n", .{ foo(), bar() }); +} + +fn foo() u32 { + return @src().line; +} +fn bar() u32 { + return 123; +} +#expect_stdout="8 123\n" diff --git a/test/incremental/no_change_preserves_tag_names b/test/incremental/no_change_preserves_tag_names index 55219b8356..f7386db2a4 100644 --- a/test/incremental/no_change_preserves_tag_names +++ b/test/incremental/no_change_preserves_tag_names @@ -7,7 +7,7 @@ const std = @import("std"); var some_enum: enum { first, second } = .first; pub fn main() !void { - try std.io.getStdOut().writeAll(@tagName(some_enum)); + try std.fs.File.stdout().writeAll(@tagName(some_enum)); } #expect_stdout="first" #update=no change @@ -15,6 +15,6 @@ pub fn main() !void { const std = @import("std"); var some_enum: enum { first, second } = .first; pub fn main() !void { - try std.io.getStdOut().writeAll(@tagName(some_enum)); + try std.fs.File.stdout().writeAll(@tagName(some_enum)); } #expect_stdout="first" diff --git a/test/incremental/recursive_function_becomes_non_recursive b/test/incremental/recursive_function_becomes_non_recursive index 2ec483e3e5..9bba6bc038 100644 --- a/test/incremental/recursive_function_becomes_non_recursive +++ b/test/incremental/recursive_function_becomes_non_recursive @@ -8,7 +8,7 @@ pub fn main() !void { try foo(false); } fn foo(recurse: bool) !void { - const stdout = std.io.getStdOut().writer(); + const stdout = std.fs.File.stdout(); if (recurse) return foo(true); try stdout.writeAll("non-recursive path\n"); } @@ -21,7 +21,7 @@ pub fn main() !void { try foo(true); } fn foo(recurse: bool) !void { - const stdout = std.io.getStdOut().writer(); + const stdout = std.fs.File.stdout(); if (recurse) return stdout.writeAll("x==1\n"); try stdout.writeAll("non-recursive path\n"); } diff --git a/test/incremental/remove_enum_field b/test/incremental/remove_enum_field index 8d3796b7c3..7623922d3d 100644 --- a/test/incremental/remove_enum_field +++ b/test/incremental/remove_enum_field @@ -9,7 +9,8 @@ const MyEnum = enum(u8) { bar = 2, }; pub fn main() !void { - try std.io.getStdOut().writer().print("{}\n", .{@intFromEnum(MyEnum.foo)}); + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + try stdout_writer.interface.print("{}\n", .{@intFromEnum(MyEnum.foo)}); } const std = @import("std"); #expect_stdout="1\n" @@ -20,8 +21,9 @@ const MyEnum = enum(u8) { bar = 2, }; pub fn main() !void { - try std.io.getStdOut().writer().print("{}\n", .{@intFromEnum(MyEnum.foo)}); + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + try stdout_writer.interface.print("{}\n", .{@intFromEnum(MyEnum.foo)}); } const std = @import("std"); -#expect_error=main.zig:6:73: error: enum 'main.MyEnum' has no member named 'foo' +#expect_error=main.zig:7:69: error: enum 'main.MyEnum' has no member named 'foo' #expect_error=main.zig:1:16: note: enum declared here diff --git a/test/incremental/unreferenced_error b/test/incremental/unreferenced_error index 6025f3fdae..51e078a82a 100644 --- a/test/incremental/unreferenced_error +++ b/test/incremental/unreferenced_error @@ -6,7 +6,7 @@ #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll(a); + try std.fs.File.stdout().writeAll(a); } const a = "Hello, World!\n"; #expect_stdout="Hello, World!\n" @@ -15,7 +15,7 @@ const a = "Hello, World!\n"; #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll(a); + try std.fs.File.stdout().writeAll(a); } const a = @compileError("bad a"); #expect_error=main.zig:5:11: error: bad a @@ -24,7 +24,7 @@ const a = @compileError("bad a"); #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll(b); + try std.fs.File.stdout().writeAll(b); } const a = @compileError("bad a"); const b = "Hi there!\n"; @@ -34,7 +34,7 @@ const b = "Hi there!\n"; #file=main.zig const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll(a); + try std.fs.File.stdout().writeAll(a); } const a = "Back to a\n"; const b = @compileError("bad b"); diff --git a/test/link/bss/main.zig b/test/link/bss/main.zig index aaf865a0c8..2785a8360f 100644 --- a/test/link/bss/main.zig +++ b/test/link/bss/main.zig @@ -4,8 +4,11 @@ const std = @import("std"); var buffer: [0x1000000]u64 = [1]u64{0} ** 0x1000000; pub fn main() anyerror!void { + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + buffer[0x10] = 1; - try std.io.getStdOut().writer().print("{d}, {d}, {d}\n", .{ + + try stdout_writer.interface.print("{d}, {d}, {d}\n", .{ // workaround the dreaded decl_val (&buffer)[0], (&buffer)[0x10], diff --git a/test/link/elf.zig b/test/link/elf.zig index 14b70441ba..f6dfbbea86 100644 --- a/test/link/elf.zig +++ b/test/link/elf.zig @@ -1315,8 +1315,8 @@ fn testGcSectionsZig(b: *Build, opts: Options) *Step { \\extern var live_var2: i32; \\extern fn live_fn2() void; \\pub fn main() void { - \\ const stdout = std.io.getStdOut(); - \\ stdout.writer().print("{d} {d}\n", .{ live_var1, live_var2 }) catch unreachable; + \\ var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + \\ stdout_writer.interface.print("{d} {d}\n", .{ live_var1, live_var2 }) catch @panic("fail"); \\ live_fn2(); \\} , @@ -1357,8 +1357,8 @@ fn testGcSectionsZig(b: *Build, opts: Options) *Step { \\extern var live_var2: i32; \\extern fn live_fn2() void; \\pub fn main() void { - \\ const stdout = std.io.getStdOut(); - \\ stdout.writer().print("{d} {d}\n", .{ live_var1, live_var2 }) catch unreachable; + \\ var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + \\ stdout_writer.interface.print("{d} {d}\n", .{ live_var1, live_var2 }) catch @panic("fail"); \\ live_fn2(); \\} , diff --git a/test/link/macho.zig b/test/link/macho.zig index e5e38f88e0..80d861eea0 100644 --- a/test/link/macho.zig +++ b/test/link/macho.zig @@ -710,7 +710,7 @@ fn testHelloZig(b: *Build, opts: Options) *Step { const exe = addExecutable(b, opts, .{ .name = "main", .zig_source_bytes = \\const std = @import("std"); \\pub fn main() void { - \\ std.io.getStdOut().writer().print("Hello world!\n", .{}) catch unreachable; + \\ std.fs.File.stdout().writeAll("Hello world!\n") catch @panic("fail"); \\} }); @@ -2365,10 +2365,11 @@ fn testTlsZig(b: *Build, opts: Options) *Step { \\threadlocal var x: i32 = 0; \\threadlocal var y: i32 = -1; \\pub fn main() void { - \\ std.io.getStdOut().writer().print("{d} {d}\n", .{x, y}) catch unreachable; + \\ var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + \\ stdout_writer.interface.print("{d} {d}\n", .{x, y}) catch unreachable; \\ x -= 1; \\ y += 1; - \\ std.io.getStdOut().writer().print("{d} {d}\n", .{x, y}) catch unreachable; + \\ stdout_writer.interface.print("{d} {d}\n", .{x, y}) catch unreachable; \\} }); diff --git a/test/link/wasm/extern/main.zig b/test/link/wasm/extern/main.zig index b9fa1226eb..9635f64a40 100644 --- a/test/link/wasm/extern/main.zig +++ b/test/link/wasm/extern/main.zig @@ -3,6 +3,6 @@ const std = @import("std"); extern const foo: u32; pub fn main() void { - const std_out = std.io.getStdOut(); - std_out.writer().print("Result: {d}", .{foo}) catch {}; + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + stdout_writer.interface.print("Result: {d}", .{foo}) catch {}; } diff --git a/test/src/check-stack-trace.zig b/test/src/check-stack-trace.zig index 43800086af..30f3dd9789 100644 --- a/test/src/check-stack-trace.zig +++ b/test/src/check-stack-trace.zig @@ -84,5 +84,5 @@ pub fn main() !void { break :got_result try buf.toOwnedSlice(); }; - try std.io.getStdOut().writeAll(got); + try std.fs.File.stdout().writeAll(got); } diff --git a/test/standalone/child_process/child.zig b/test/standalone/child_process/child.zig index e9edcf9f4b..b02bec3500 100644 --- a/test/standalone/child_process/child.zig +++ b/test/standalone/child_process/child.zig @@ -27,12 +27,12 @@ fn run(allocator: std.mem.Allocator) !void { } // test stdout pipe; parent verifies - try std.io.getStdOut().writer().writeAll("hello from stdout"); + try std.fs.File.stdout().writeAll("hello from stdout"); // test stdin pipe from parent const hello_stdin = "hello from stdin"; var buf: [hello_stdin.len]u8 = undefined; - const stdin = std.io.getStdIn().reader(); + const stdin: std.fs.File = .stdin(); const n = try stdin.readAll(&buf); if (!std.mem.eql(u8, buf[0..n], hello_stdin)) { testError("stdin: '{s}'; want '{s}'", .{ buf[0..n], hello_stdin }); @@ -40,7 +40,8 @@ fn run(allocator: std.mem.Allocator) !void { } fn testError(comptime fmt: []const u8, args: anytype) void { - const stderr = std.io.getStdErr().writer(); + var stderr_writer = std.fs.File.stderr().writer(&.{}); + const stderr = &stderr_writer.interface; stderr.print("CHILD TEST ERROR: ", .{}) catch {}; stderr.print(fmt, args) catch {}; if (fmt[fmt.len - 1] != '\n') { diff --git a/test/standalone/child_process/main.zig b/test/standalone/child_process/main.zig index 068ff8d1a9..6537f90acf 100644 --- a/test/standalone/child_process/main.zig +++ b/test/standalone/child_process/main.zig @@ -19,13 +19,13 @@ pub fn main() !void { child.stderr_behavior = .Inherit; try child.spawn(); const child_stdin = child.stdin.?; - try child_stdin.writer().writeAll("hello from stdin"); // verified in child + try child_stdin.writeAll("hello from stdin"); // verified in child child_stdin.close(); child.stdin = null; const hello_stdout = "hello from stdout"; var buf: [hello_stdout.len]u8 = undefined; - const n = try child.stdout.?.reader().readAll(&buf); + const n = try child.stdout.?.deprecatedReader().readAll(&buf); if (!std.mem.eql(u8, buf[0..n], hello_stdout)) { testError("child stdout: '{s}'; want '{s}'", .{ buf[0..n], hello_stdout }); } @@ -45,7 +45,8 @@ pub fn main() !void { var parent_test_error = false; fn testError(comptime fmt: []const u8, args: anytype) void { - const stderr = std.io.getStdErr().writer(); + var stderr_writer = std.fs.File.stderr().writer(&.{}); + const stderr = &stderr_writer.interface; stderr.print("PARENT TEST ERROR: ", .{}) catch {}; stderr.print(fmt, args) catch {}; if (fmt[fmt.len - 1] != '\n') { diff --git a/test/standalone/run_output_paths/create_file.zig b/test/standalone/run_output_paths/create_file.zig index 041ebc3e50..da949d4fc1 100644 --- a/test/standalone/run_output_paths/create_file.zig +++ b/test/standalone/run_output_paths/create_file.zig @@ -10,7 +10,7 @@ pub fn main() !void { dir_name, .{}); const file_name = args.next().?; const file = try dir.createFile(file_name, .{}); - try file.writer().print( + try file.deprecatedWriter().print( \\{s} \\{s} \\Hello, world! diff --git a/test/standalone/sigpipe/breakpipe.zig b/test/standalone/sigpipe/breakpipe.zig index 293a6839a1..51f667fc15 100644 --- a/test/standalone/sigpipe/breakpipe.zig +++ b/test/standalone/sigpipe/breakpipe.zig @@ -10,7 +10,7 @@ pub fn main() !void { std.posix.close(pipe[0]); _ = std.posix.write(pipe[1], "a") catch |err| switch (err) { error.BrokenPipe => { - try std.io.getStdOut().writer().writeAll("BrokenPipe\n"); + try std.fs.File.stdout().writeAll("BrokenPipe\n"); std.posix.exit(123); }, else => |e| return e, diff --git a/test/standalone/simple/brace_expansion.zig b/test/standalone/simple/brace_expansion.zig deleted file mode 100644 index facaf4a754..0000000000 --- a/test/standalone/simple/brace_expansion.zig +++ /dev/null @@ -1,292 +0,0 @@ -const std = @import("std"); -const io = std.io; -const mem = std.mem; -const debug = std.debug; -const assert = debug.assert; -const testing = std.testing; -const ArrayList = std.ArrayList; -const maxInt = std.math.maxInt; - -const Token = union(enum) { - Word: []const u8, - OpenBrace, - CloseBrace, - Comma, - Eof, -}; - -var gpa: std.heap.GeneralPurposeAllocator(.{}) = .init; -var global_allocator = gpa.allocator(); - -fn tokenize(input: []const u8) !ArrayList(Token) { - const State = enum { - Start, - Word, - }; - - var token_list = ArrayList(Token).init(global_allocator); - errdefer token_list.deinit(); - var tok_begin: usize = undefined; - var state = State.Start; - - for (input, 0..) |b, i| { - switch (state) { - .Start => switch (b) { - 'a'...'z', 'A'...'Z' => { - state = State.Word; - tok_begin = i; - }, - '{' => try token_list.append(Token.OpenBrace), - '}' => try token_list.append(Token.CloseBrace), - ',' => try token_list.append(Token.Comma), - else => return error.InvalidInput, - }, - .Word => switch (b) { - 'a'...'z', 'A'...'Z' => {}, - '{', '}', ',' => { - try token_list.append(Token{ .Word = input[tok_begin..i] }); - switch (b) { - '{' => try token_list.append(Token.OpenBrace), - '}' => try token_list.append(Token.CloseBrace), - ',' => try token_list.append(Token.Comma), - else => unreachable, - } - state = State.Start; - }, - else => return error.InvalidInput, - }, - } - } - switch (state) { - State.Start => {}, - State.Word => try token_list.append(Token{ .Word = input[tok_begin..] }), - } - try token_list.append(Token.Eof); - return token_list; -} - -const Node = union(enum) { - Scalar: []const u8, - List: ArrayList(Node), - Combine: []Node, - - fn deinit(self: Node) void { - switch (self) { - .Scalar => {}, - .Combine => |pair| { - pair[0].deinit(); - pair[1].deinit(); - global_allocator.free(pair); - }, - .List => |list| { - for (list.items) |item| { - item.deinit(); - } - list.deinit(); - }, - } - } -}; - -const ParseError = error{ - InvalidInput, - OutOfMemory, -}; - -fn parse(tokens: *const ArrayList(Token), token_index: *usize) ParseError!Node { - const first_token = tokens.items[token_index.*]; - token_index.* += 1; - - const result_node = switch (first_token) { - .Word => |word| Node{ .Scalar = word }, - .OpenBrace => blk: { - var list = ArrayList(Node).init(global_allocator); - errdefer { - for (list.items) |node| node.deinit(); - list.deinit(); - } - while (true) { - try list.append(try parse(tokens, token_index)); - - const token = tokens.items[token_index.*]; - token_index.* += 1; - - switch (token) { - .CloseBrace => break, - .Comma => continue, - else => return error.InvalidInput, - } - } - break :blk Node{ .List = list }; - }, - else => return error.InvalidInput, - }; - - switch (tokens.items[token_index.*]) { - .Word, .OpenBrace => { - const pair = try global_allocator.alloc(Node, 2); - errdefer global_allocator.free(pair); - pair[0] = result_node; - pair[1] = try parse(tokens, token_index); - return Node{ .Combine = pair }; - }, - else => return result_node, - } -} - -fn expandString(input: []const u8, output: *ArrayList(u8)) !void { - const tokens = try tokenize(input); - defer tokens.deinit(); - if (tokens.items.len == 1) { - return output.resize(0); - } - - var token_index: usize = 0; - const root = try parse(&tokens, &token_index); - defer root.deinit(); - const last_token = tokens.items[token_index]; - switch (last_token) { - Token.Eof => {}, - else => return error.InvalidInput, - } - - var result_list = ArrayList(ArrayList(u8)).init(global_allocator); - defer { - for (result_list.items) |*buf| buf.deinit(); - result_list.deinit(); - } - - try expandNode(root, &result_list); - - try output.resize(0); - for (result_list.items, 0..) |buf, i| { - if (i != 0) { - try output.append(' '); - } - try output.appendSlice(buf.items); - } -} - -const ExpandNodeError = error{OutOfMemory}; - -fn expandNode(node: Node, output: *ArrayList(ArrayList(u8))) ExpandNodeError!void { - assert(output.items.len == 0); - switch (node) { - .Scalar => |scalar| { - var list = ArrayList(u8).init(global_allocator); - errdefer list.deinit(); - try list.appendSlice(scalar); - try output.append(list); - }, - .Combine => |pair| { - const a_node = pair[0]; - const b_node = pair[1]; - - var child_list_a = ArrayList(ArrayList(u8)).init(global_allocator); - defer { - for (child_list_a.items) |*buf| buf.deinit(); - child_list_a.deinit(); - } - try expandNode(a_node, &child_list_a); - - var child_list_b = ArrayList(ArrayList(u8)).init(global_allocator); - defer { - for (child_list_b.items) |*buf| buf.deinit(); - child_list_b.deinit(); - } - try expandNode(b_node, &child_list_b); - - for (child_list_a.items) |buf_a| { - for (child_list_b.items) |buf_b| { - var combined_buf = ArrayList(u8).init(global_allocator); - errdefer combined_buf.deinit(); - - try combined_buf.appendSlice(buf_a.items); - try combined_buf.appendSlice(buf_b.items); - try output.append(combined_buf); - } - } - }, - .List => |list| { - for (list.items) |child_node| { - var child_list = ArrayList(ArrayList(u8)).init(global_allocator); - errdefer for (child_list.items) |*buf| buf.deinit(); - defer child_list.deinit(); - - try expandNode(child_node, &child_list); - - for (child_list.items) |buf| { - try output.append(buf); - } - } - }, - } -} - -pub fn main() !void { - defer _ = gpa.deinit(); - const stdin_file = io.getStdIn(); - const stdout_file = io.getStdOut(); - - const stdin = try stdin_file.reader().readAllAlloc(global_allocator, std.math.maxInt(usize)); - defer global_allocator.free(stdin); - - var result_buf = ArrayList(u8).init(global_allocator); - defer result_buf.deinit(); - - try expandString(stdin, &result_buf); - try stdout_file.writeAll(result_buf.items); -} - -test "invalid inputs" { - global_allocator = std.testing.allocator; - - try expectError("}ABC", error.InvalidInput); - try expectError("{ABC", error.InvalidInput); - try expectError("}{", error.InvalidInput); - try expectError("{}", error.InvalidInput); - try expectError("A,B,C", error.InvalidInput); - try expectError("{A{B,C}", error.InvalidInput); - try expectError("{A,}", error.InvalidInput); - - try expectError("\n", error.InvalidInput); -} - -fn expectError(test_input: []const u8, expected_err: anyerror) !void { - var output_buf = ArrayList(u8).init(global_allocator); - defer output_buf.deinit(); - - try testing.expectError(expected_err, expandString(test_input, &output_buf)); -} - -test "valid inputs" { - global_allocator = std.testing.allocator; - - try expectExpansion("{x,y,z}", "x y z"); - try expectExpansion("{A,B}{x,y}", "Ax Ay Bx By"); - try expectExpansion("{A,B{x,y}}", "A Bx By"); - - try expectExpansion("{ABC}", "ABC"); - try expectExpansion("{A,B,C}", "A B C"); - try expectExpansion("ABC", "ABC"); - - try expectExpansion("", ""); - try expectExpansion("{A,B}{C,{x,y}}{g,h}", "ACg ACh Axg Axh Ayg Ayh BCg BCh Bxg Bxh Byg Byh"); - try expectExpansion("{A,B}{C,C{x,y}}{g,h}", "ACg ACh ACxg ACxh ACyg ACyh BCg BCh BCxg BCxh BCyg BCyh"); - try expectExpansion("{A,B}a", "Aa Ba"); - try expectExpansion("{C,{x,y}}", "C x y"); - try expectExpansion("z{C,{x,y}}", "zC zx zy"); - try expectExpansion("a{b,c{d,e{f,g}}}", "ab acd acef aceg"); - try expectExpansion("a{x,y}b", "axb ayb"); - try expectExpansion("z{{a,b}}", "za zb"); - try expectExpansion("a{b}", "ab"); -} - -fn expectExpansion(test_input: []const u8, expected_result: []const u8) !void { - var result = ArrayList(u8).init(global_allocator); - defer result.deinit(); - - expandString(test_input, &result) catch unreachable; - - try testing.expectEqualSlices(u8, expected_result, result.items); -} diff --git a/test/standalone/simple/build.zig b/test/standalone/simple/build.zig index e9270c3588..51d9b3a9b1 100644 --- a/test/standalone/simple/build.zig +++ b/test/standalone/simple/build.zig @@ -109,10 +109,6 @@ const cases = [_]Case{ //.{ // .src_path = "issue_9693/main.zig", //}, - .{ - .src_path = "brace_expansion.zig", - .is_test = true, - }, .{ .src_path = "issue_7030.zig", .target = .{ diff --git a/test/standalone/simple/cat/main.zig b/test/standalone/simple/cat/main.zig index 740e73a33e..7bb976e9e7 100644 --- a/test/standalone/simple/cat/main.zig +++ b/test/standalone/simple/cat/main.zig @@ -1,42 +1,42 @@ const std = @import("std"); const io = std.io; -const process = std.process; const fs = std.fs; const mem = std.mem; const warn = std.log.warn; +const fatal = std.process.fatal; pub fn main() !void { var arena_instance = std.heap.ArenaAllocator.init(std.heap.page_allocator); defer arena_instance.deinit(); const arena = arena_instance.allocator(); - const args = try process.argsAlloc(arena); + const args = try std.process.argsAlloc(arena); const exe = args[0]; var catted_anything = false; - const stdout_file = io.getStdOut(); + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + const stdout = &stdout_writer.interface; + var stdin_reader = std.fs.File.stdin().reader(&.{}); const cwd = fs.cwd(); for (args[1..]) |arg| { if (mem.eql(u8, arg, "-")) { catted_anything = true; - try stdout_file.writeFileAll(io.getStdIn(), .{}); + _ = try stdout.sendFileAll(&stdin_reader, .unlimited); } else if (mem.startsWith(u8, arg, "-")) { return usage(exe); } else { - const file = cwd.openFile(arg, .{}) catch |err| { - warn("Unable to open file: {s}\n", .{@errorName(err)}); - return err; - }; + const file = cwd.openFile(arg, .{}) catch |err| fatal("unable to open file: {t}\n", .{err}); defer file.close(); catted_anything = true; - try stdout_file.writeFileAll(file, .{}); + var file_reader = file.reader(&.{}); + _ = try stdout.sendFileAll(&file_reader, .unlimited); } } if (!catted_anything) { - try stdout_file.writeFileAll(io.getStdIn(), .{}); + _ = try stdout.sendFileAll(&stdin_reader, .unlimited); } } diff --git a/test/standalone/simple/guess_number/main.zig b/test/standalone/simple/guess_number/main.zig index 2c95c8993f..d477de2b78 100644 --- a/test/standalone/simple/guess_number/main.zig +++ b/test/standalone/simple/guess_number/main.zig @@ -1,37 +1,35 @@ const builtin = @import("builtin"); const std = @import("std"); -const io = std.io; -const fmt = std.fmt; pub fn main() !void { - const stdout = io.getStdOut().writer(); - const stdin = io.getStdIn(); + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + const out = &stdout_writer.interface; + const stdin: std.fs.File = .stdin(); - try stdout.print("Welcome to the Guess Number Game in Zig.\n", .{}); + try out.writeAll("Welcome to the Guess Number Game in Zig.\n"); const answer = std.crypto.random.intRangeLessThan(u8, 0, 100) + 1; while (true) { - try stdout.print("\nGuess a number between 1 and 100: ", .{}); + try out.writeAll("\nGuess a number between 1 and 100: "); var line_buf: [20]u8 = undefined; - const amt = try stdin.read(&line_buf); if (amt == line_buf.len) { - try stdout.print("Input too long.\n", .{}); + try out.writeAll("Input too long.\n"); continue; } const line = std.mem.trimEnd(u8, line_buf[0..amt], "\r\n"); - const guess = fmt.parseUnsigned(u8, line, 10) catch { - try stdout.print("Invalid number.\n", .{}); + const guess = std.fmt.parseUnsigned(u8, line, 10) catch { + try out.writeAll("Invalid number.\n"); continue; }; if (guess > answer) { - try stdout.print("Guess lower.\n", .{}); + try out.writeAll("Guess lower.\n"); } else if (guess < answer) { - try stdout.print("Guess higher.\n", .{}); + try out.writeAll("Guess higher.\n"); } else { - try stdout.print("You win!\n", .{}); + try out.writeAll("You win!\n"); return; } } diff --git a/test/standalone/simple/hello_world/hello.zig b/test/standalone/simple/hello_world/hello.zig index eabb226eb2..3b2b910687 100644 --- a/test/standalone/simple/hello_world/hello.zig +++ b/test/standalone/simple/hello_world/hello.zig @@ -1,5 +1,5 @@ const std = @import("std"); pub fn main() !void { - try std.io.getStdOut().writeAll("Hello, World!\n"); + try std.fs.File.stdout().writeAll("Hello, World!\n"); } diff --git a/test/standalone/simple/std_enums_big_enums.zig b/test/standalone/simple/std_enums_big_enums.zig index 1ad24a4147..de6cfe3ec7 100644 --- a/test/standalone/simple/std_enums_big_enums.zig +++ b/test/standalone/simple/std_enums_big_enums.zig @@ -6,6 +6,7 @@ pub fn main() void { const Big = @Type(.{ .@"enum" = .{ .tag_type = u16, .fields = make_fields: { + @setEvalBranchQuota(500000); var fields: [1001]std.builtin.Type.EnumField = undefined; for (&fields, 0..) |*field, i| { field.* = .{ .name = std.fmt.comptimePrint("field_{d}", .{i}), .value = i }; diff --git a/test/standalone/windows_argv/fuzz.zig b/test/standalone/windows_argv/fuzz.zig index 6d08c1bf84..bbe956c365 100644 --- a/test/standalone/windows_argv/fuzz.zig +++ b/test/standalone/windows_argv/fuzz.zig @@ -58,7 +58,7 @@ pub fn main() !void { std.debug.print(">>> found discrepancy <<<\n", .{}); const cmd_line_wtf8 = try std.unicode.wtf16LeToWtf8Alloc(allocator, cmd_line_w); defer allocator.free(cmd_line_wtf8); - std.debug.print("\"{}\"\n\n", .{std.zig.fmtEscapes(cmd_line_wtf8)}); + std.debug.print("\"{f}\"\n\n", .{std.zig.fmtString(cmd_line_wtf8)}); errors += 1; } diff --git a/test/standalone/windows_argv/lib.zig b/test/standalone/windows_argv/lib.zig index 074273ae21..d41ad95313 100644 --- a/test/standalone/windows_argv/lib.zig +++ b/test/standalone/windows_argv/lib.zig @@ -27,8 +27,8 @@ fn testArgv(expected_args: []const [*:0]const u16) !void { wtf8_buf.clearRetainingCapacity(); try std.unicode.wtf16LeToWtf8ArrayList(&wtf8_buf, std.mem.span(expected_arg)); if (!std.mem.eql(u8, wtf8_buf.items, arg_wtf8)) { - std.debug.print("{}: expected: \"{}\"\n", .{ i, std.zig.fmtEscapes(wtf8_buf.items) }); - std.debug.print("{}: actual: \"{}\"\n", .{ i, std.zig.fmtEscapes(arg_wtf8) }); + std.debug.print("{}: expected: \"{f}\"\n", .{ i, std.zig.fmtString(wtf8_buf.items) }); + std.debug.print("{}: actual: \"{f}\"\n", .{ i, std.zig.fmtString(arg_wtf8) }); eql = false; } } @@ -36,22 +36,22 @@ fn testArgv(expected_args: []const [*:0]const u16) !void { for (expected_args[min_len..], min_len..) |arg, i| { wtf8_buf.clearRetainingCapacity(); try std.unicode.wtf16LeToWtf8ArrayList(&wtf8_buf, std.mem.span(arg)); - std.debug.print("{}: expected: \"{}\"\n", .{ i, std.zig.fmtEscapes(wtf8_buf.items) }); + std.debug.print("{}: expected: \"{f}\"\n", .{ i, std.zig.fmtString(wtf8_buf.items) }); } for (args[min_len..], min_len..) |arg, i| { - std.debug.print("{}: actual: \"{}\"\n", .{ i, std.zig.fmtEscapes(arg) }); + std.debug.print("{}: actual: \"{f}\"\n", .{ i, std.zig.fmtString(arg) }); } const peb = std.os.windows.peb(); const lpCmdLine: [*:0]u16 = @ptrCast(peb.ProcessParameters.CommandLine.Buffer); wtf8_buf.clearRetainingCapacity(); try std.unicode.wtf16LeToWtf8ArrayList(&wtf8_buf, std.mem.span(lpCmdLine)); - std.debug.print("command line: \"{}\"\n", .{std.zig.fmtEscapes(wtf8_buf.items)}); + std.debug.print("command line: \"{f}\"\n", .{std.zig.fmtString(wtf8_buf.items)}); std.debug.print("expected argv:\n", .{}); std.debug.print("&.{{\n", .{}); for (expected_args) |arg| { wtf8_buf.clearRetainingCapacity(); try std.unicode.wtf16LeToWtf8ArrayList(&wtf8_buf, std.mem.span(arg)); - std.debug.print(" \"{}\",\n", .{std.zig.fmtEscapes(wtf8_buf.items)}); + std.debug.print(" \"{f}\",\n", .{std.zig.fmtString(wtf8_buf.items)}); } std.debug.print("}}\n", .{}); return error.ArgvMismatch; diff --git a/test/standalone/windows_bat_args/echo-args.zig b/test/standalone/windows_bat_args/echo-args.zig index 2552045aed..054c4a6975 100644 --- a/test/standalone/windows_bat_args/echo-args.zig +++ b/test/standalone/windows_bat_args/echo-args.zig @@ -5,7 +5,8 @@ pub fn main() !void { defer arena_state.deinit(); const arena = arena_state.allocator(); - const stdout = std.io.getStdOut().writer(); + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + const stdout = &stdout_writer.interface; var args = try std.process.argsAlloc(arena); for (args[1..], 1..) |arg, i| { try stdout.writeAll(arg); diff --git a/test/standalone/windows_spawn/hello.zig b/test/standalone/windows_spawn/hello.zig index dcf917c430..fb4a827e23 100644 --- a/test/standalone/windows_spawn/hello.zig +++ b/test/standalone/windows_spawn/hello.zig @@ -1,6 +1,7 @@ const std = @import("std"); pub fn main() !void { - const stdout = std.io.getStdOut().writer(); + var stdout_writer = std.fs.File.stdout().writerStreaming(&.{}); + const stdout = &stdout_writer.interface; try stdout.writeAll("hello from exe\n"); } diff --git a/test/tests.zig b/test/tests.zig index 9068d65012..3693e18d91 100644 --- a/test/tests.zig +++ b/test/tests.zig @@ -918,14 +918,16 @@ const test_targets = blk: { .link_libc = true, }, - .{ - .target = std.Target.Query.parse(.{ - .arch_os_abi = "riscv64-linux-none", - .cpu_features = "baseline+v+zbb", - }) catch unreachable, - .use_llvm = false, - .use_lld = false, - }, + // TODO implement codegen airFieldParentPtr + // TODO implement airMemmove for riscv64 + //.{ + // .target = std.Target.Query.parse(.{ + // .arch_os_abi = "riscv64-linux-none", + // .cpu_features = "baseline+v+zbb", + // }) catch unreachable, + // .use_llvm = false, + // .use_lld = false, + //}, .{ .target = .{ .cpu_arch = .riscv64, @@ -2753,7 +2755,7 @@ pub fn addIncrementalTests(b: *std.Build, test_step: *Step) !void { run.addArg(b.graph.zig_exe); run.addFileArg(b.path("test/incremental/").path(b, entry.path)); - run.addArgs(&.{ "--zig-lib-dir", b.fmt("{}", .{b.graph.zig_lib_directory}) }); + run.addArgs(&.{ "--zig-lib-dir", b.fmt("{f}", .{b.graph.zig_lib_directory}) }); run.addCheck(.{ .expect_term = .{ .Exited = 0 } }); diff --git a/tools/docgen.zig b/tools/docgen.zig index f110280972..3f48ba39a8 100644 --- a/tools/docgen.zig +++ b/tools/docgen.zig @@ -43,8 +43,7 @@ pub fn main() !void { while (args_it.next()) |arg| { if (mem.startsWith(u8, arg, "-")) { if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) { - const stdout = io.getStdOut().writer(); - try stdout.writeAll(usage); + try fs.File.stdout().writeAll(usage); process.exit(0); } else if (mem.eql(u8, arg, "--code-dir")) { if (args_it.next()) |param| { @@ -76,9 +75,9 @@ pub fn main() !void { var code_dir = try fs.cwd().openDir(code_dir_path, .{}); defer code_dir.close(); - const input_file_bytes = try in_file.reader().readAllAlloc(arena, max_doc_file_size); + const input_file_bytes = try in_file.deprecatedReader().readAllAlloc(arena, max_doc_file_size); - var buffered_writer = io.bufferedWriter(out_file.writer()); + var buffered_writer = io.bufferedWriter(out_file.deprecatedWriter()); var tokenizer = Tokenizer.init(input_path, input_file_bytes); var toc = try genToc(arena, &tokenizer); @@ -426,7 +425,7 @@ fn genToc(allocator: Allocator, tokenizer: *Tokenizer) !Toc { try toc.writeByte('\n'); try toc.writeByteNTimes(' ', header_stack_size * 4); if (last_columns) |n| { - try toc.print("
            \n", .{n}); + try toc.print("
              \n", .{n}); } else { try toc.writeAll("
                \n"); } diff --git a/tools/doctest.zig b/tools/doctest.zig index 10ccade66d..8f9d1fe8cf 100644 --- a/tools/doctest.zig +++ b/tools/doctest.zig @@ -44,7 +44,7 @@ pub fn main() !void { while (args_it.next()) |arg| { if (mem.startsWith(u8, arg, "-")) { if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) { - try std.io.getStdOut().writeAll(usage); + try std.fs.File.stdout().writeAll(usage); process.exit(0); } else if (mem.eql(u8, arg, "-i")) { opt_input = args_it.next() orelse fatal("expected parameter after -i", .{}); @@ -85,7 +85,7 @@ pub fn main() !void { var out_file = try fs.cwd().createFile(output_path, .{}); defer out_file.close(); - var bw = std.io.bufferedWriter(out_file.writer()); + var bw = std.io.bufferedWriter(out_file.deprecatedWriter()); const out = bw.writer(); try printSourceBlock(arena, out, source, fs.path.basename(input_path)); diff --git a/tools/dump-cov.zig b/tools/dump-cov.zig index 65bd19000d..7699ac702b 100644 --- a/tools/dump-cov.zig +++ b/tools/dump-cov.zig @@ -48,8 +48,9 @@ pub fn main() !void { fatal("failed to load coverage file {}: {s}", .{ cov_path, @errorName(err) }); }; - var bw = std.io.bufferedWriter(std.io.getStdOut().writer()); - const stdout = bw.writer(); + var stdout_buffer: [4000]u8 = undefined; + var stdout_writer = std.fs.File.stdout().writerStreaming(&stdout_buffer); + const stdout = &stdout_writer.interface; const header: *SeenPcsHeader = @ptrCast(cov_bytes); try stdout.print("{any}\n", .{header.*}); @@ -83,5 +84,5 @@ pub fn main() !void { }); } - try bw.flush(); + try stdout.flush(); } diff --git a/tools/fetch_them_macos_headers.zig b/tools/fetch_them_macos_headers.zig index 4b6bb87ae0..b81ee001b5 100644 --- a/tools/fetch_them_macos_headers.zig +++ b/tools/fetch_them_macos_headers.zig @@ -5,6 +5,8 @@ const mem = std.mem; const process = std.process; const assert = std.debug.assert; const tmpDir = std.testing.tmpDir; +const fatal = std.process.fatal; +const info = std.log.info; const Allocator = mem.Allocator; const OsTag = std.Target.Os.Tag; @@ -245,19 +247,6 @@ const ArgsIterator = struct { } }; -fn info(comptime format: []const u8, args: anytype) void { - const msg = std.fmt.allocPrint(gpa, "info: " ++ format ++ "\n", args) catch return; - std.io.getStdOut().writeAll(msg) catch {}; -} - -fn fatal(comptime format: []const u8, args: anytype) noreturn { - ret: { - const msg = std.fmt.allocPrint(gpa, "fatal: " ++ format ++ "\n", args) catch break :ret; - std.io.getStdErr().writeAll(msg) catch {}; - } - std.process.exit(1); -} - const Version = struct { major: u16, minor: u8, diff --git a/tools/gen_macos_headers_c.zig b/tools/gen_macos_headers_c.zig index 69f7cc33ff..a56194a9a8 100644 --- a/tools/gen_macos_headers_c.zig +++ b/tools/gen_macos_headers_c.zig @@ -1,5 +1,7 @@ const std = @import("std"); const assert = std.debug.assert; +const info = std.log.info; +const fatal = std.process.fatal; const Allocator = std.mem.Allocator; @@ -13,19 +15,6 @@ const usage = \\-h, --help Print this help and exit ; -fn info(comptime format: []const u8, args: anytype) void { - const msg = std.fmt.allocPrint(gpa, "info: " ++ format ++ "\n", args) catch return; - std.io.getStdOut().writeAll(msg) catch {}; -} - -fn fatal(comptime format: []const u8, args: anytype) noreturn { - ret: { - const msg = std.fmt.allocPrint(gpa, "fatal: " ++ format ++ "\n", args) catch break :ret; - std.io.getStdErr().writeAll(msg) catch {}; - } - std.process.exit(1); -} - pub fn main() anyerror!void { var arena_allocator = std.heap.ArenaAllocator.init(gpa); defer arena_allocator.deinit(); @@ -58,16 +47,19 @@ pub fn main() anyerror!void { std.mem.sort([]const u8, paths.items, {}, SortFn.lessThan); - const stdout = std.io.getStdOut().writer(); - try stdout.writeAll("#define _XOPEN_SOURCE\n"); + var buffer: [2000]u8 = undefined; + var stdout_writer = std.fs.File.stdout().writerStreaming(&buffer); + const w = &stdout_writer.interface; + try w.writeAll("#define _XOPEN_SOURCE\n"); for (paths.items) |path| { - try stdout.print("#include <{s}>\n", .{path}); + try w.print("#include <{s}>\n", .{path}); } - try stdout.writeAll( + try w.writeAll( \\int main(int argc, char **argv) { \\ return 0; \\} ); + try w.flush(); } fn findHeaders( diff --git a/tools/gen_outline_atomics.zig b/tools/gen_outline_atomics.zig index 2f989ed1a1..bcd757978a 100644 --- a/tools/gen_outline_atomics.zig +++ b/tools/gen_outline_atomics.zig @@ -17,8 +17,9 @@ pub fn main() !void { //const args = try std.process.argsAlloc(arena); - var bw = std.io.bufferedWriter(std.io.getStdOut().writer()); - const w = bw.writer(); + var stdout_buffer: [2000]u8 = undefined; + var stdout_writer = std.fs.File.stdout().writerStreaming(&stdout_buffer); + const w = &stdout_writer.interface; try w.writeAll( \\//! This file is generated by tools/gen_outline_atomics.zig. @@ -57,7 +58,7 @@ pub fn main() !void { try w.writeAll(footer.items); try w.writeAll("}\n"); - try bw.flush(); + try w.flush(); } fn writeFunction( diff --git a/tools/gen_spirv_spec.zig b/tools/gen_spirv_spec.zig index 0ba27c49a3..a772d99660 100644 --- a/tools/gen_spirv_spec.zig +++ b/tools/gen_spirv_spec.zig @@ -91,9 +91,10 @@ pub fn main() !void { try readExtRegistry(&exts, a, std.fs.cwd(), args[2]); - var bw = std.io.bufferedWriter(std.io.getStdOut().writer()); - try render(bw.writer(), a, core_spec, exts.items); - try bw.flush(); + var buffer: [4000]u8 = undefined; + var w = std.fs.File.stdout().writerStreaming(&buffer); + try render(&w, a, core_spec, exts.items); + try w.flush(); } fn readExtRegistry(exts: *std.ArrayList(Extension), a: Allocator, dir: std.fs.Dir, sub_path: []const u8) !void { @@ -166,7 +167,7 @@ fn tagPriorityScore(tag: []const u8) usize { } } -fn render(writer: anytype, a: Allocator, registry: CoreRegistry, extensions: []const Extension) !void { +fn render(writer: *std.io.Writer, a: Allocator, registry: CoreRegistry, extensions: []const Extension) !void { try writer.writeAll( \\//! This file is auto-generated by tools/gen_spirv_spec.zig. \\ @@ -188,15 +189,10 @@ fn render(writer: anytype, a: Allocator, registry: CoreRegistry, extensions: []c \\ none, \\ _, \\ - \\ pub fn format( - \\ self: IdResult, - \\ comptime _: []const u8, - \\ _: std.fmt.FormatOptions, - \\ writer: anytype, - \\ ) @TypeOf(writer).Error!void { + \\ pub fn format(self: IdResult, writer: *std.io.Writer) std.io.Writer.Error!void { \\ switch (self) { \\ .none => try writer.writeAll("(none)"), - \\ else => try writer.print("%{}", .{@intFromEnum(self)}), + \\ else => try writer.print("%{d}", .{@intFromEnum(self)}), \\ } \\ } \\}; @@ -899,7 +895,8 @@ fn parseHexInt(text: []const u8) !u31 { } fn usageAndExit(arg0: []const u8, code: u8) noreturn { - std.io.getStdErr().writer().print( + const stderr = std.debug.lockStderrWriter(&.{}); + stderr.print( \\Usage: {s} \\ \\Generates Zig bindings for SPIR-V specifications found in the SPIRV-Headers diff --git a/tools/gen_stubs.zig b/tools/gen_stubs.zig index 31095399d3..4978611cc1 100644 --- a/tools/gen_stubs.zig +++ b/tools/gen_stubs.zig @@ -333,7 +333,9 @@ pub fn main() !void { } } - const stdout = std.io.getStdOut().writer(); + var stdout_buffer: [2000]u8 = undefined; + var stdout_writer = std.fs.File.stdout().writerStreaming(&stdout_buffer); + const stdout = &stdout_writer.interface; try stdout.writeAll( \\#ifdef PTR64 \\#define WEAK64 .weak @@ -533,6 +535,8 @@ pub fn main() !void { .all => {}, .single, .multi, .family, .time32 => try stdout.writeAll("#endif\n"), } + + try stdout.flush(); } fn parseElf(parse: Parse, comptime is_64: bool, comptime endian: builtin.Endian) !void { diff --git a/tools/generate_JSONTestSuite.zig b/tools/generate_JSONTestSuite.zig index 42dc777e82..56c6bc7261 100644 --- a/tools/generate_JSONTestSuite.zig +++ b/tools/generate_JSONTestSuite.zig @@ -6,7 +6,9 @@ pub fn main() !void { var gpa: std.heap.GeneralPurposeAllocator(.{}) = .init; var allocator = gpa.allocator(); - var output = std.io.getStdOut().writer(); + var stdout_buffer: [2000]u8 = undefined; + var stdout_writer = std.fs.File.stdout().writerStreaming(&stdout_buffer); + const output = &stdout_writer.interface; try output.writeAll( \\// This file was generated by _generate_JSONTestSuite.zig \\// These test cases are sourced from: https://github.com/nst/JSONTestSuite @@ -44,6 +46,8 @@ pub fn main() !void { try writeString(output, contents); try output.writeAll(");\n}\n"); } + + try output.flush(); } const i_structure_500_nested_arrays = "[" ** 500 ++ "]" ** 500; diff --git a/tools/generate_c_size_and_align_checks.zig b/tools/generate_c_size_and_align_checks.zig index 588deb4935..8c278407e4 100644 --- a/tools/generate_c_size_and_align_checks.zig +++ b/tools/generate_c_size_and_align_checks.zig @@ -42,20 +42,23 @@ pub fn main() !void { const query = try std.Target.Query.parse(.{ .arch_os_abi = args[1] }); const target = try std.zig.system.resolveTargetQuery(query); - const stdout = std.io.getStdOut().writer(); + var buffer: [2000]u8 = undefined; + var stdout_writer = std.fs.File.stdout().writerStreaming(&buffer); + const w = &stdout_writer.interface; inline for (@typeInfo(std.Target.CType).@"enum".fields) |field| { const c_type: std.Target.CType = @enumFromInt(field.value); - try stdout.print("_Static_assert(sizeof({0s}) == {1d}, \"sizeof({0s}) == {1d}\");\n", .{ + try w.print("_Static_assert(sizeof({0s}) == {1d}, \"sizeof({0s}) == {1d}\");\n", .{ cName(c_type), target.cTypeByteSize(c_type), }); - try stdout.print("_Static_assert(_Alignof({0s}) == {1d}, \"_Alignof({0s}) == {1d}\");\n", .{ + try w.print("_Static_assert(_Alignof({0s}) == {1d}, \"_Alignof({0s}) == {1d}\");\n", .{ cName(c_type), target.cTypeAlignment(c_type), }); - try stdout.print("_Static_assert(__alignof({0s}) == {1d}, \"__alignof({0s}) == {1d}\");\n\n", .{ + try w.print("_Static_assert(__alignof({0s}) == {1d}, \"__alignof({0s}) == {1d}\");\n\n", .{ cName(c_type), target.cTypePreferredAlignment(c_type), }); } + try w.flush(); } diff --git a/tools/generate_linux_syscalls.zig b/tools/generate_linux_syscalls.zig index 38726e6501..1ee153c40c 100644 --- a/tools/generate_linux_syscalls.zig +++ b/tools/generate_linux_syscalls.zig @@ -666,13 +666,16 @@ pub fn main() !void { const allocator = arena.allocator(); const args = try std.process.argsAlloc(allocator); - if (args.len < 3 or mem.eql(u8, args[1], "--help")) - usageAndExit(std.io.getStdErr(), args[0], 1); + if (args.len < 3 or mem.eql(u8, args[1], "--help")) { + usage(std.debug.lockStderrWriter(&.{}), args[0]) catch std.process.exit(2); + std.process.exit(1); + } const zig_exe = args[1]; const linux_path = args[2]; - var buf_out = std.io.bufferedWriter(std.io.getStdOut().writer()); - const writer = buf_out.writer(); + var stdout_buffer: [2000]u8 = undefined; + var stdout_writer = std.fs.File.stdout().writerStreaming(&stdout_buffer); + const writer = &stdout_writer.interface; var linux_dir = try std.fs.cwd().openDir(linux_path, .{}); defer linux_dir.close(); @@ -714,17 +717,16 @@ pub fn main() !void { } } - try buf_out.flush(); + try writer.flush(); } -fn usageAndExit(file: fs.File, arg0: []const u8, code: u8) noreturn { - file.writer().print( +fn usage(w: *std.io.Writer, arg0: []const u8) std.io.Writer.Error!void { + try w.print( \\Usage: {s} /path/to/zig /path/to/linux \\Alternative Usage: zig run /path/to/git/zig/tools/generate_linux_syscalls.zig -- /path/to/zig /path/to/linux \\ \\Generates the list of Linux syscalls for each supported cpu arch, using the Linux development tree. \\Prints to stdout Zig code which you can use to replace the file lib/std/os/linux/syscalls.zig. \\ - , .{arg0}) catch std.process.exit(1); - std.process.exit(code); + , .{arg0}); } diff --git a/tools/update_clang_options.zig b/tools/update_clang_options.zig index 3c2ef84952..8bf49ad93a 100644 --- a/tools/update_clang_options.zig +++ b/tools/update_clang_options.zig @@ -634,25 +634,25 @@ pub fn main() anyerror!void { const allocator = arena.allocator(); const args = try std.process.argsAlloc(allocator); - if (args.len <= 1) { - usageAndExit(std.io.getStdErr(), args[0], 1); - } + var stdout_buffer: [4000]u8 = undefined; + var stdout_writer = fs.stdout().writerStreaming(&stdout_buffer); + const stdout = &stdout_writer.interface; + + if (args.len <= 1) printUsageAndExit(args[0]); + if (std.mem.eql(u8, args[1], "--help")) { - usageAndExit(std.io.getStdOut(), args[0], 0); - } - if (args.len < 3) { - usageAndExit(std.io.getStdErr(), args[0], 1); + printUsage(stdout, args[0]) catch std.process.exit(2); + stdout.flush() catch std.process.exit(2); + std.process.exit(0); } + if (args.len < 3) printUsageAndExit(args[0]); + const llvm_tblgen_exe = args[1]; - if (std.mem.startsWith(u8, llvm_tblgen_exe, "-")) { - usageAndExit(std.io.getStdErr(), args[0], 1); - } + if (std.mem.startsWith(u8, llvm_tblgen_exe, "-")) printUsageAndExit(args[0]); const llvm_src_root = args[2]; - if (std.mem.startsWith(u8, llvm_src_root, "-")) { - usageAndExit(std.io.getStdErr(), args[0], 1); - } + if (std.mem.startsWith(u8, llvm_src_root, "-")) printUsageAndExit(args[0]); var llvm_to_zig_cpu_features = std.StringHashMap([]const u8).init(allocator); @@ -719,8 +719,6 @@ pub fn main() anyerror!void { // "W" and "Wl,". So we sort this list in order of descending priority. std.mem.sort(*json.ObjectMap, all_objects.items, {}, objectLessThan); - var buffered_stdout = std.io.bufferedWriter(std.io.getStdOut().writer()); - const stdout = buffered_stdout.writer(); try stdout.writeAll( \\// This file is generated by tools/update_clang_options.zig. \\// zig fmt: off @@ -815,7 +813,7 @@ pub fn main() anyerror!void { \\ ); - try buffered_stdout.flush(); + try stdout.flush(); } // TODO we should be able to import clang_options.zig but currently this is problematic because it will @@ -966,13 +964,17 @@ fn objectLessThan(context: void, a: *json.ObjectMap, b: *json.ObjectMap) bool { return std.mem.lessThan(u8, a_key, b_key); } -fn usageAndExit(file: fs.File, arg0: []const u8, code: u8) noreturn { - file.writer().print( +fn printUsageAndExit(arg0: []const u8) noreturn { + printUsage(std.debug.lockStderrWriter(&.{}), arg0) catch std.process.exit(2); + std.process.exit(1); +} + +fn printUsage(w: *std.io.Writer, arg0: []const u8) std.io.Writer.Error!void { + try w.print( \\Usage: {s} /path/to/llvm-tblgen /path/to/git/llvm/llvm-project \\Alternative Usage: zig run /path/to/git/zig/tools/update_clang_options.zig -- /path/to/llvm-tblgen /path/to/git/llvm/llvm-project \\ \\Prints to stdout Zig code which you can use to replace the file src/clang_options_data.zig. \\ - , .{arg0}) catch std.process.exit(1); - std.process.exit(code); + , .{arg0}); } diff --git a/tools/update_cpu_features.zig b/tools/update_cpu_features.zig index cb6043f0c8..b54d74ca1c 100644 --- a/tools/update_cpu_features.zig +++ b/tools/update_cpu_features.zig @@ -2082,8 +2082,8 @@ fn processOneTarget(job: Job) void { } fn usageAndExit(arg0: []const u8, code: u8) noreturn { - const stderr = std.io.getStdErr(); - stderr.writer().print( + const stderr = std.debug.lockStderrWriter(&.{}); + stderr.print( \\Usage: {s} /path/to/llvm-tblgen /path/git/llvm-project /path/git/zig [zig_name filter] \\ \\Updates lib/std/target/.zig from llvm/lib/Target//.td . diff --git a/tools/update_crc_catalog.zig b/tools/update_crc_catalog.zig index 5ccac15112..1ae45cf1bc 100644 --- a/tools/update_crc_catalog.zig +++ b/tools/update_crc_catalog.zig @@ -11,14 +11,10 @@ pub fn main() anyerror!void { const arena = arena_state.allocator(); const args = try std.process.argsAlloc(arena); - if (args.len <= 1) { - usageAndExit(std.io.getStdErr(), args[0], 1); - } + if (args.len <= 1) printUsageAndExit(args[0]); const zig_src_root = args[1]; - if (mem.startsWith(u8, zig_src_root, "-")) { - usageAndExit(std.io.getStdErr(), args[0], 1); - } + if (mem.startsWith(u8, zig_src_root, "-")) printUsageAndExit(args[0]); var zig_src_dir = try fs.cwd().openDir(zig_src_root, .{}); defer zig_src_dir.close(); @@ -193,10 +189,14 @@ pub fn main() anyerror!void { } } -fn usageAndExit(file: fs.File, arg0: []const u8, code: u8) noreturn { - file.writer().print( +fn printUsageAndExit(arg0: []const u8) noreturn { + printUsage(std.debug.lockStderrWriter(&.{}), arg0) catch std.process.exit(2); + std.process.exit(1); +} + +fn printUsage(w: *std.io.Writer, arg0: []const u8) std.io.Writer.Error!void { + return w.print( \\Usage: {s} /path/git/zig \\ - , .{arg0}) catch std.process.exit(1); - std.process.exit(code); + , .{arg0}); }