Merge pull request #24329 from ziglang/writergate

Deprecates all existing std.io readers and writers in favor of the newly
provided std.io.Reader and std.io.Writer which are non-generic and have the
buffer above the vtable - in other words the buffer is in the interface, not
the implementation. This means that although Reader and Writer are no longer
generic, they are still transparent to optimization; all of the interface
functions have a concrete hot path operating on the buffer, and only make
vtable calls when the buffer is full.
This commit is contained in:
Andrew Kelley 2025-07-10 12:04:27 +02:00 committed by GitHub
commit 1a998886c8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
356 changed files with 15258 additions and 13455 deletions

View File

@ -436,7 +436,6 @@ set(ZIG_STAGE2_SOURCES
lib/std/elf.zig
lib/std/fifo.zig
lib/std/fmt.zig
lib/std/fmt/format_float.zig
lib/std/fmt/parse_float.zig
lib/std/fs.zig
lib/std/fs/AtomicFile.zig

View File

@ -279,7 +279,7 @@ pub fn build(b: *std.Build) !void {
const ancestor_ver = try std.SemanticVersion.parse(tagged_ancestor);
if (zig_version.order(ancestor_ver) != .gt) {
std.debug.print("Zig version '{}' must be greater than tagged ancestor '{}'\n", .{ zig_version, ancestor_ver });
std.debug.print("Zig version '{f}' must be greater than tagged ancestor '{f}'\n", .{ zig_version, ancestor_ver });
std.process.exit(1);
}
@ -1449,7 +1449,7 @@ fn generateLangRef(b: *std.Build) std.Build.LazyPath {
}
var dir = b.build_root.handle.openDir("doc/langref", .{ .iterate = true }) catch |err| {
std.debug.panic("unable to open '{}doc/langref' directory: {s}", .{
std.debug.panic("unable to open '{f}doc/langref' directory: {s}", .{
b.build_root, @errorName(err),
});
};
@ -1470,7 +1470,7 @@ fn generateLangRef(b: *std.Build) std.Build.LazyPath {
// in a temporary directory
"--cache-root", b.cache_root.path orelse ".",
});
cmd.addArgs(&.{ "--zig-lib-dir", b.fmt("{}", .{b.graph.zig_lib_directory}) });
cmd.addArgs(&.{ "--zig-lib-dir", b.fmt("{f}", .{b.graph.zig_lib_directory}) });
cmd.addArgs(&.{"-i"});
cmd.addFileArg(b.path(b.fmt("doc/langref/{s}", .{entry.name})));

View File

@ -374,7 +374,8 @@
<p>
Most of the time, it is more appropriate to write to stderr rather than stdout, and
whether or not the message is successfully written to the stream is irrelevant.
For this common case, there is a simpler API:
Also, formatted printing often comes in handy. For this common case,
there is a simpler API:
</p>
{#code|hello_again.zig#}

View File

@ -17,7 +17,7 @@ pub fn main() !void {
.maximum = 0.20,
};
const category = threshold.categorize(0.90);
try std.io.getStdOut().writeAll(@tagName(category));
try std.fs.File.stdout().writeAll(@tagName(category));
}
const std = @import("std");

View File

@ -1,8 +1,7 @@
const std = @import("std");
pub fn main() !void {
const stdout = std.io.getStdOut().writer();
try stdout.print("Hello, {s}!\n", .{"world"});
try std.fs.File.stdout().writeAll("Hello, World!\n");
}
// exe=succeed

View File

@ -1,7 +1,7 @@
const std = @import("std");
pub fn main() void {
std.debug.print("Hello, world!\n", .{});
std.debug.print("Hello, {s}!\n", .{"World"});
}
// exe=succeed

View File

@ -1432,7 +1432,7 @@ fn getFileContents(comp: *Compilation, path: []const u8, limit: ?u32) ![]const u
defer buf.deinit();
const max = limit orelse std.math.maxInt(u32);
file.reader().readAllArrayList(&buf, max) catch |e| switch (e) {
file.deprecatedReader().readAllArrayList(&buf, max) catch |e| switch (e) {
error.StreamTooLong => if (limit == null) return e,
else => return e,
};

View File

@ -1,4 +1,5 @@
const std = @import("std");
const assert = std.debug.assert;
const Allocator = mem.Allocator;
const mem = std.mem;
const Source = @import("Source.zig");
@ -323,12 +324,13 @@ pub fn addExtra(
pub fn render(comp: *Compilation, config: std.io.tty.Config) void {
if (comp.diagnostics.list.items.len == 0) return;
var m = defaultMsgWriter(config);
var buffer: [1000]u8 = undefined;
var m = defaultMsgWriter(config, &buffer);
defer m.deinit();
renderMessages(comp, &m);
}
pub fn defaultMsgWriter(config: std.io.tty.Config) MsgWriter {
return MsgWriter.init(config);
pub fn defaultMsgWriter(config: std.io.tty.Config, buffer: []u8) MsgWriter {
return MsgWriter.init(config, buffer);
}
pub fn renderMessages(comp: *Compilation, m: anytype) void {
@ -443,18 +445,13 @@ pub fn renderMessage(comp: *Compilation, m: anytype, msg: Message) void {
printRt(m, prop.msg, .{"{s}"}, .{&str});
} else {
var buf: [3]u8 = undefined;
const str = std.fmt.bufPrint(&buf, "x{x}", .{std.fmt.fmtSliceHexLower(&.{msg.extra.invalid_escape.char})}) catch unreachable;
const str = std.fmt.bufPrint(&buf, "x{x}", .{msg.extra.invalid_escape.char}) catch unreachable;
printRt(m, prop.msg, .{"{s}"}, .{str});
}
},
.normalized => {
const f = struct {
pub fn f(
bytes: []const u8,
comptime _: []const u8,
_: std.fmt.FormatOptions,
writer: anytype,
) !void {
pub fn f(bytes: []const u8, writer: *std.io.Writer) std.io.Writer.Error!void {
var it: std.unicode.Utf8Iterator = .{
.bytes = bytes,
.i = 0,
@ -464,22 +461,16 @@ pub fn renderMessage(comp: *Compilation, m: anytype, msg: Message) void {
try writer.writeByte(@intCast(codepoint));
} else if (codepoint < 0xFFFF) {
try writer.writeAll("\\u");
try std.fmt.formatInt(codepoint, 16, .upper, .{
.fill = '0',
.width = 4,
}, writer);
try writer.printInt(codepoint, 16, .upper, .{ .fill = '0', .width = 4 });
} else {
try writer.writeAll("\\U");
try std.fmt.formatInt(codepoint, 16, .upper, .{
.fill = '0',
.width = 8,
}, writer);
try writer.printInt(codepoint, 16, .upper, .{ .fill = '0', .width = 8 });
}
}
}
}.f;
printRt(m, prop.msg, .{"{s}"}, .{
std.fmt.Formatter(f){ .data = msg.extra.normalized },
printRt(m, prop.msg, .{"{f}"}, .{
std.fmt.Formatter([]const u8, f){ .data = msg.extra.normalized },
});
},
.none, .offset => m.write(prop.msg),
@ -535,32 +526,31 @@ fn tagKind(d: *Diagnostics, tag: Tag, langopts: LangOpts) Kind {
}
const MsgWriter = struct {
w: std.io.BufferedWriter(4096, std.fs.File.Writer),
writer: *std.io.Writer,
config: std.io.tty.Config,
fn init(config: std.io.tty.Config) MsgWriter {
std.debug.lockStdErr();
fn init(config: std.io.tty.Config, buffer: []u8) MsgWriter {
return .{
.w = std.io.bufferedWriter(std.io.getStdErr().writer()),
.writer = std.debug.lockStderrWriter(buffer),
.config = config,
};
}
pub fn deinit(m: *MsgWriter) void {
m.w.flush() catch {};
std.debug.unlockStdErr();
std.debug.unlockStderrWriter();
m.* = undefined;
}
pub fn print(m: *MsgWriter, comptime fmt: []const u8, args: anytype) void {
m.w.writer().print(fmt, args) catch {};
m.writer.print(fmt, args) catch {};
}
fn write(m: *MsgWriter, msg: []const u8) void {
m.w.writer().writeAll(msg) catch {};
m.writer.writeAll(msg) catch {};
}
fn setColor(m: *MsgWriter, color: std.io.tty.Color) void {
m.config.setColor(m.w.writer(), color) catch {};
m.config.setColor(m.writer, color) catch {};
}
fn location(m: *MsgWriter, path: []const u8, line: u32, col: u32) void {

View File

@ -519,7 +519,7 @@ fn option(arg: []const u8, name: []const u8) ?[]const u8 {
fn addSource(d: *Driver, path: []const u8) !Source {
if (mem.eql(u8, "-", path)) {
const stdin = std.io.getStdIn().reader();
const stdin = std.fs.File.stdin().deprecatedReader();
const input = try stdin.readAllAlloc(d.comp.gpa, std.math.maxInt(u32));
defer d.comp.gpa.free(input);
return d.comp.addSourceFromBuffer("<stdin>", input);
@ -541,7 +541,7 @@ pub fn fatal(d: *Driver, comptime fmt: []const u8, args: anytype) error{ FatalEr
}
pub fn renderErrors(d: *Driver) void {
Diagnostics.render(d.comp, d.detectConfig(std.io.getStdErr()));
Diagnostics.render(d.comp, d.detectConfig(std.fs.File.stderr()));
}
pub fn detectConfig(d: *Driver, file: std.fs.File) std.io.tty.Config {
@ -591,7 +591,7 @@ pub fn main(d: *Driver, tc: *Toolchain, args: []const []const u8, comptime fast_
var macro_buf = std.ArrayList(u8).init(d.comp.gpa);
defer macro_buf.deinit();
const std_out = std.io.getStdOut().writer();
const std_out = std.fs.File.stdout().deprecatedWriter();
if (try parseArgs(d, std_out, macro_buf.writer(), args)) return;
const linking = !(d.only_preprocess or d.only_syntax or d.only_compile or d.only_preprocess_and_compile);
@ -686,10 +686,10 @@ fn processSource(
std.fs.cwd().createFile(some, .{}) catch |er|
return d.fatal("unable to create output file '{s}': {s}", .{ some, errorDescription(er) })
else
std.io.getStdOut();
std.fs.File.stdout();
defer if (d.output_name != null) file.close();
var buf_w = std.io.bufferedWriter(file.writer());
var buf_w = std.io.bufferedWriter(file.deprecatedWriter());
pp.prettyPrintTokens(buf_w.writer(), dump_mode) catch |er|
return d.fatal("unable to write result: {s}", .{errorDescription(er)});
@ -704,8 +704,8 @@ fn processSource(
defer tree.deinit();
if (d.verbose_ast) {
const stdout = std.io.getStdOut();
var buf_writer = std.io.bufferedWriter(stdout.writer());
const stdout = std.fs.File.stdout();
var buf_writer = std.io.bufferedWriter(stdout.deprecatedWriter());
tree.dump(d.detectConfig(stdout), buf_writer.writer()) catch {};
buf_writer.flush() catch {};
}
@ -734,8 +734,8 @@ fn processSource(
defer ir.deinit(d.comp.gpa);
if (d.verbose_ir) {
const stdout = std.io.getStdOut();
var buf_writer = std.io.bufferedWriter(stdout.writer());
const stdout = std.fs.File.stdout();
var buf_writer = std.io.bufferedWriter(stdout.deprecatedWriter());
ir.dump(d.comp.gpa, d.detectConfig(stdout), buf_writer.writer()) catch {};
buf_writer.flush() catch {};
}
@ -806,10 +806,10 @@ fn processSource(
}
fn dumpLinkerArgs(items: []const []const u8) !void {
const stdout = std.io.getStdOut().writer();
const stdout = std.fs.File.stdout().deprecatedWriter();
for (items, 0..) |item, i| {
if (i > 0) try stdout.writeByte(' ');
try stdout.print("\"{}\"", .{std.zig.fmtEscapes(item)});
try stdout.print("\"{f}\"", .{std.zig.fmtString(item)});
}
try stdout.writeByte('\n');
}

View File

@ -500,8 +500,8 @@ fn checkDeprecatedUnavailable(p: *Parser, ty: Type, usage_tok: TokenIndex, decl_
const w = p.strings.writer();
const msg_str = p.comp.interner.get(@"error".msg.ref()).bytes;
try w.print("call to '{s}' declared with attribute error: {}", .{
p.tokSlice(@"error".__name_tok), std.zig.fmtEscapes(msg_str),
try w.print("call to '{s}' declared with attribute error: {f}", .{
p.tokSlice(@"error".__name_tok), std.zig.fmtString(msg_str),
});
const str = try p.comp.diagnostics.arena.allocator().dupe(u8, p.strings.items[strings_top..]);
try p.errStr(.error_attribute, usage_tok, str);
@ -512,8 +512,8 @@ fn checkDeprecatedUnavailable(p: *Parser, ty: Type, usage_tok: TokenIndex, decl_
const w = p.strings.writer();
const msg_str = p.comp.interner.get(warning.msg.ref()).bytes;
try w.print("call to '{s}' declared with attribute warning: {}", .{
p.tokSlice(warning.__name_tok), std.zig.fmtEscapes(msg_str),
try w.print("call to '{s}' declared with attribute warning: {f}", .{
p.tokSlice(warning.__name_tok), std.zig.fmtString(msg_str),
});
const str = try p.comp.diagnostics.arena.allocator().dupe(u8, p.strings.items[strings_top..]);
try p.errStr(.warning_attribute, usage_tok, str);
@ -542,7 +542,7 @@ fn errDeprecated(p: *Parser, tag: Diagnostics.Tag, tok_i: TokenIndex, msg: ?Valu
try w.writeAll(reason);
if (msg) |m| {
const str = p.comp.interner.get(m.ref()).bytes;
try w.print(": {}", .{std.zig.fmtEscapes(str)});
try w.print(": {f}", .{std.zig.fmtString(str)});
}
const str = try p.comp.diagnostics.arena.allocator().dupe(u8, p.strings.items[strings_top..]);
return p.errStr(tag, tok_i, str);

View File

@ -811,7 +811,7 @@ fn verboseLog(pp: *Preprocessor, raw: RawToken, comptime fmt: []const u8, args:
const source = pp.comp.getSource(raw.source);
const line_col = source.lineCol(.{ .id = raw.source, .line = raw.line, .byte_offset = raw.start });
const stderr = std.io.getStdErr().writer();
const stderr = std.fs.File.stderr().deprecatedWriter();
var buf_writer = std.io.bufferedWriter(stderr);
const writer = buf_writer.writer();
defer buf_writer.flush() catch {};
@ -3262,7 +3262,8 @@ fn printLinemarker(
// containing the same bytes as the input regardless of encoding.
else => {
try w.writeAll("\\x");
try std.fmt.formatInt(byte, 16, .lower, .{ .width = 2, .fill = '0' }, w);
// TODO try w.printInt(byte, 16, .lower, .{ .width = 2, .fill = '0' });
try w.print("{x:0>2}", .{byte});
},
};
try w.writeByte('"');

View File

@ -961,7 +961,7 @@ pub fn print(v: Value, ty: Type, comp: *const Compilation, w: anytype) @TypeOf(w
switch (key) {
.null => return w.writeAll("nullptr_t"),
.int => |repr| switch (repr) {
inline else => |x| return w.print("{d}", .{x}),
inline .u64, .i64, .big_int => |x| return w.print("{d}", .{x}),
},
.float => |repr| switch (repr) {
.f16 => |x| return w.print("{d}", .{@round(@as(f64, @floatCast(x)) * 1000) / 1000}),
@ -982,7 +982,7 @@ pub fn printString(bytes: []const u8, ty: Type, comp: *const Compilation, w: any
const without_null = bytes[0 .. bytes.len - @intFromEnum(size)];
try w.writeByte('"');
switch (size) {
.@"1" => try w.print("{}", .{std.zig.fmtEscapes(without_null)}),
.@"1" => try w.print("{f}", .{std.zig.fmtString(without_null)}),
.@"2" => {
var items: [2]u16 = undefined;
var i: usize = 0;

View File

@ -171,7 +171,7 @@ pub fn addRelocation(elf: *Elf, name: []const u8, section_kind: Object.Section,
/// strtab
/// section headers
pub fn finish(elf: *Elf, file: std.fs.File) !void {
var buf_writer = std.io.bufferedWriter(file.writer());
var buf_writer = std.io.bufferedWriter(file.deprecatedWriter());
const w = buf_writer.writer();
var num_sections: std.elf.Elf64_Half = additional_sections;

View File

@ -1781,7 +1781,8 @@ test "Macro matching" {
fn renderErrorsAndExit(comp: *aro.Compilation) noreturn {
defer std.process.exit(1);
var writer = aro.Diagnostics.defaultMsgWriter(std.io.tty.detectConfig(std.io.getStdErr()));
var buffer: [1000]u8 = undefined;
var writer = aro.Diagnostics.defaultMsgWriter(std.io.tty.detectConfig(std.fs.File.stderr()), &buffer);
defer writer.deinit(); // writer deinit must run *before* exit so that stderr is flushed
var saw_error = false;
@ -1824,6 +1825,6 @@ pub fn main() !void {
defer tree.deinit(gpa);
const formatted = try tree.render(arena);
try std.io.getStdOut().writeAll(formatted);
try std.fs.File.stdout().writeAll(formatted);
return std.process.cleanExit();
}

View File

@ -849,7 +849,7 @@ const Context = struct {
fn addIdentifier(c: *Context, bytes: []const u8) Allocator.Error!TokenIndex {
if (std.zig.primitives.isPrimitive(bytes))
return c.addTokenFmt(.identifier, "@\"{s}\"", .{bytes});
return c.addTokenFmt(.identifier, "{p}", .{std.zig.fmtId(bytes)});
return c.addTokenFmt(.identifier, "{f}", .{std.zig.fmtIdFlags(bytes, .{ .allow_primitive = true })});
}
fn listToSpan(c: *Context, list: []const NodeIndex) Allocator.Error!NodeSubRange {
@ -1201,7 +1201,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
const compile_error_tok = try c.addToken(.builtin, "@compileError");
_ = try c.addToken(.l_paren, "(");
const err_msg_tok = try c.addTokenFmt(.string_literal, "\"{}\"", .{std.zig.fmtEscapes(payload.mangled)});
const err_msg_tok = try c.addTokenFmt(.string_literal, "\"{f}\"", .{std.zig.fmtString(payload.mangled)});
const err_msg = try c.addNode(.{
.tag = .string_literal,
.main_token = err_msg_tok,
@ -2116,7 +2116,7 @@ fn renderRecord(c: *Context, node: Node) !NodeIndex {
defer c.gpa.free(members);
for (payload.fields, 0..) |field, i| {
const name_tok = try c.addTokenFmt(.identifier, "{p}", .{std.zig.fmtId(field.name)});
const name_tok = try c.addTokenFmt(.identifier, "{f}", .{std.zig.fmtIdFlags(field.name, .{ .allow_primitive = true })});
_ = try c.addToken(.colon, ":");
const type_expr = try renderNode(c, field.type);
@ -2205,7 +2205,7 @@ fn renderFieldAccess(c: *Context, lhs: NodeIndex, field_name: []const u8) !NodeI
.main_token = try c.addToken(.period, "."),
.data = .{ .node_and_token = .{
lhs,
try c.addTokenFmt(.identifier, "{p}", .{std.zig.fmtId(field_name)}),
try c.addTokenFmt(.identifier, "{f}", .{std.zig.fmtIdFlags(field_name, .{ .allow_primitive = true })}),
} },
});
}
@ -2681,7 +2681,7 @@ fn renderVar(c: *Context, node: Node) !NodeIndex {
_ = try c.addToken(.l_paren, "(");
const res = try c.addNode(.{
.tag = .string_literal,
.main_token = try c.addTokenFmt(.string_literal, "\"{}\"", .{std.zig.fmtEscapes(some)}),
.main_token = try c.addTokenFmt(.string_literal, "\"{f}\"", .{std.zig.fmtString(some)}),
.data = undefined,
});
_ = try c.addToken(.r_paren, ")");
@ -2765,7 +2765,7 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex {
_ = try c.addToken(.l_paren, "(");
const res = try c.addNode(.{
.tag = .string_literal,
.main_token = try c.addTokenFmt(.string_literal, "\"{}\"", .{std.zig.fmtEscapes(some)}),
.main_token = try c.addTokenFmt(.string_literal, "\"{f}\"", .{std.zig.fmtString(some)}),
.data = undefined,
});
_ = try c.addToken(.r_paren, ")");

View File

@ -12,6 +12,7 @@ const Watch = std.Build.Watch;
const Fuzz = std.Build.Fuzz;
const Allocator = std.mem.Allocator;
const fatal = std.process.fatal;
const Writer = std.io.Writer;
const runner = @This();
pub const root = @import("@build");
@ -330,7 +331,7 @@ pub fn main() !void {
}
}
const stderr = std.io.getStdErr();
const stderr: std.fs.File = .stderr();
const ttyconf = get_tty_conf(color, stderr);
switch (ttyconf) {
.no_color => try graph.env_map.put("NO_COLOR", "1"),
@ -365,7 +366,7 @@ pub fn main() !void {
.data = buffer.items,
.flags = .{ .exclusive = true },
}) catch |err| {
fatal("unable to write configuration results to '{}{s}': {s}", .{
fatal("unable to write configuration results to '{f}{s}': {s}", .{
local_cache_directory, tmp_sub_path, @errorName(err),
});
};
@ -378,13 +379,19 @@ pub fn main() !void {
validateSystemLibraryOptions(builder);
const stdout_writer = io.getStdOut().writer();
if (help_menu) {
var w = initStdoutWriter();
printUsage(builder, w) catch return stdout_writer_allocation.err.?;
w.flush() catch return stdout_writer_allocation.err.?;
return;
}
if (help_menu)
return usage(builder, stdout_writer);
if (steps_menu)
return steps(builder, stdout_writer);
if (steps_menu) {
var w = initStdoutWriter();
printSteps(builder, w) catch return stdout_writer_allocation.err.?;
w.flush() catch return stdout_writer_allocation.err.?;
return;
}
var run: Run = .{
.max_rss = max_rss,
@ -696,24 +703,23 @@ fn runStepNames(
const ttyconf = run.ttyconf;
if (run.summary != .none) {
std.debug.lockStdErr();
defer std.debug.unlockStdErr();
const stderr = run.stderr;
const w = std.debug.lockStderrWriter(&stdio_buffer_allocation);
defer std.debug.unlockStderrWriter();
const total_count = success_count + failure_count + pending_count + skipped_count;
ttyconf.setColor(stderr, .cyan) catch {};
stderr.writeAll("Build Summary:") catch {};
ttyconf.setColor(stderr, .reset) catch {};
stderr.writer().print(" {d}/{d} steps succeeded", .{ success_count, total_count }) catch {};
if (skipped_count > 0) stderr.writer().print("; {d} skipped", .{skipped_count}) catch {};
if (failure_count > 0) stderr.writer().print("; {d} failed", .{failure_count}) catch {};
ttyconf.setColor(w, .cyan) catch {};
w.writeAll("Build Summary:") catch {};
ttyconf.setColor(w, .reset) catch {};
w.print(" {d}/{d} steps succeeded", .{ success_count, total_count }) catch {};
if (skipped_count > 0) w.print("; {d} skipped", .{skipped_count}) catch {};
if (failure_count > 0) w.print("; {d} failed", .{failure_count}) catch {};
if (test_count > 0) stderr.writer().print("; {d}/{d} tests passed", .{ test_pass_count, test_count }) catch {};
if (test_skip_count > 0) stderr.writer().print("; {d} skipped", .{test_skip_count}) catch {};
if (test_fail_count > 0) stderr.writer().print("; {d} failed", .{test_fail_count}) catch {};
if (test_leak_count > 0) stderr.writer().print("; {d} leaked", .{test_leak_count}) catch {};
if (test_count > 0) w.print("; {d}/{d} tests passed", .{ test_pass_count, test_count }) catch {};
if (test_skip_count > 0) w.print("; {d} skipped", .{test_skip_count}) catch {};
if (test_fail_count > 0) w.print("; {d} failed", .{test_fail_count}) catch {};
if (test_leak_count > 0) w.print("; {d} leaked", .{test_leak_count}) catch {};
stderr.writeAll("\n") catch {};
w.writeAll("\n") catch {};
// Print a fancy tree with build results.
var step_stack_copy = try step_stack.clone(gpa);
@ -722,7 +728,7 @@ fn runStepNames(
var print_node: PrintNode = .{ .parent = null };
if (step_names.len == 0) {
print_node.last = true;
printTreeStep(b, b.default_step, run, stderr, ttyconf, &print_node, &step_stack_copy) catch {};
printTreeStep(b, b.default_step, run, w, ttyconf, &print_node, &step_stack_copy) catch {};
} else {
const last_index = if (run.summary == .all) b.top_level_steps.count() else blk: {
var i: usize = step_names.len;
@ -741,9 +747,10 @@ fn runStepNames(
for (step_names, 0..) |step_name, i| {
const tls = b.top_level_steps.get(step_name).?;
print_node.last = i + 1 == last_index;
printTreeStep(b, &tls.step, run, stderr, ttyconf, &print_node, &step_stack_copy) catch {};
printTreeStep(b, &tls.step, run, w, ttyconf, &print_node, &step_stack_copy) catch {};
}
}
w.writeByte('\n') catch {};
}
if (failure_count == 0) {
@ -775,7 +782,7 @@ const PrintNode = struct {
last: bool = false,
};
fn printPrefix(node: *PrintNode, stderr: File, ttyconf: std.io.tty.Config) !void {
fn printPrefix(node: *PrintNode, stderr: *Writer, ttyconf: std.io.tty.Config) !void {
const parent = node.parent orelse return;
if (parent.parent == null) return;
try printPrefix(parent, stderr, ttyconf);
@ -789,7 +796,7 @@ fn printPrefix(node: *PrintNode, stderr: File, ttyconf: std.io.tty.Config) !void
}
}
fn printChildNodePrefix(stderr: File, ttyconf: std.io.tty.Config) !void {
fn printChildNodePrefix(stderr: *Writer, ttyconf: std.io.tty.Config) !void {
try stderr.writeAll(switch (ttyconf) {
.no_color, .windows_api => "+- ",
.escape_codes => "\x1B\x28\x30\x6d\x71\x1B\x28\x42 ", //
@ -798,7 +805,7 @@ fn printChildNodePrefix(stderr: File, ttyconf: std.io.tty.Config) !void {
fn printStepStatus(
s: *Step,
stderr: File,
stderr: *Writer,
ttyconf: std.io.tty.Config,
run: *const Run,
) !void {
@ -820,10 +827,10 @@ fn printStepStatus(
try stderr.writeAll(" cached");
} else if (s.test_results.test_count > 0) {
const pass_count = s.test_results.passCount();
try stderr.writer().print(" {d} passed", .{pass_count});
try stderr.print(" {d} passed", .{pass_count});
if (s.test_results.skip_count > 0) {
try ttyconf.setColor(stderr, .yellow);
try stderr.writer().print(" {d} skipped", .{s.test_results.skip_count});
try stderr.print(" {d} skipped", .{s.test_results.skip_count});
}
} else {
try stderr.writeAll(" success");
@ -832,15 +839,15 @@ fn printStepStatus(
if (s.result_duration_ns) |ns| {
try ttyconf.setColor(stderr, .dim);
if (ns >= std.time.ns_per_min) {
try stderr.writer().print(" {d}m", .{ns / std.time.ns_per_min});
try stderr.print(" {d}m", .{ns / std.time.ns_per_min});
} else if (ns >= std.time.ns_per_s) {
try stderr.writer().print(" {d}s", .{ns / std.time.ns_per_s});
try stderr.print(" {d}s", .{ns / std.time.ns_per_s});
} else if (ns >= std.time.ns_per_ms) {
try stderr.writer().print(" {d}ms", .{ns / std.time.ns_per_ms});
try stderr.print(" {d}ms", .{ns / std.time.ns_per_ms});
} else if (ns >= std.time.ns_per_us) {
try stderr.writer().print(" {d}us", .{ns / std.time.ns_per_us});
try stderr.print(" {d}us", .{ns / std.time.ns_per_us});
} else {
try stderr.writer().print(" {d}ns", .{ns});
try stderr.print(" {d}ns", .{ns});
}
try ttyconf.setColor(stderr, .reset);
}
@ -848,13 +855,13 @@ fn printStepStatus(
const rss = s.result_peak_rss;
try ttyconf.setColor(stderr, .dim);
if (rss >= 1000_000_000) {
try stderr.writer().print(" MaxRSS:{d}G", .{rss / 1000_000_000});
try stderr.print(" MaxRSS:{d}G", .{rss / 1000_000_000});
} else if (rss >= 1000_000) {
try stderr.writer().print(" MaxRSS:{d}M", .{rss / 1000_000});
try stderr.print(" MaxRSS:{d}M", .{rss / 1000_000});
} else if (rss >= 1000) {
try stderr.writer().print(" MaxRSS:{d}K", .{rss / 1000});
try stderr.print(" MaxRSS:{d}K", .{rss / 1000});
} else {
try stderr.writer().print(" MaxRSS:{d}B", .{rss});
try stderr.print(" MaxRSS:{d}B", .{rss});
}
try ttyconf.setColor(stderr, .reset);
}
@ -866,7 +873,7 @@ fn printStepStatus(
if (skip == .skipped_oom) {
try stderr.writeAll(" (not enough memory)");
try ttyconf.setColor(stderr, .dim);
try stderr.writer().print(" upper bound of {d} exceeded runner limit ({d})", .{ s.max_rss, run.max_rss });
try stderr.print(" upper bound of {d} exceeded runner limit ({d})", .{ s.max_rss, run.max_rss });
try ttyconf.setColor(stderr, .yellow);
}
try stderr.writeAll("\n");
@ -878,23 +885,23 @@ fn printStepStatus(
fn printStepFailure(
s: *Step,
stderr: File,
stderr: *Writer,
ttyconf: std.io.tty.Config,
) !void {
if (s.result_error_bundle.errorMessageCount() > 0) {
try ttyconf.setColor(stderr, .red);
try stderr.writer().print(" {d} errors\n", .{
try stderr.print(" {d} errors\n", .{
s.result_error_bundle.errorMessageCount(),
});
try ttyconf.setColor(stderr, .reset);
} else if (!s.test_results.isSuccess()) {
try stderr.writer().print(" {d}/{d} passed", .{
try stderr.print(" {d}/{d} passed", .{
s.test_results.passCount(), s.test_results.test_count,
});
if (s.test_results.fail_count > 0) {
try stderr.writeAll(", ");
try ttyconf.setColor(stderr, .red);
try stderr.writer().print("{d} failed", .{
try stderr.print("{d} failed", .{
s.test_results.fail_count,
});
try ttyconf.setColor(stderr, .reset);
@ -902,7 +909,7 @@ fn printStepFailure(
if (s.test_results.skip_count > 0) {
try stderr.writeAll(", ");
try ttyconf.setColor(stderr, .yellow);
try stderr.writer().print("{d} skipped", .{
try stderr.print("{d} skipped", .{
s.test_results.skip_count,
});
try ttyconf.setColor(stderr, .reset);
@ -910,7 +917,7 @@ fn printStepFailure(
if (s.test_results.leak_count > 0) {
try stderr.writeAll(", ");
try ttyconf.setColor(stderr, .red);
try stderr.writer().print("{d} leaked", .{
try stderr.print("{d} leaked", .{
s.test_results.leak_count,
});
try ttyconf.setColor(stderr, .reset);
@ -932,7 +939,7 @@ fn printTreeStep(
b: *std.Build,
s: *Step,
run: *const Run,
stderr: File,
stderr: *Writer,
ttyconf: std.io.tty.Config,
parent_node: *PrintNode,
step_stack: *std.AutoArrayHashMapUnmanaged(*Step, void),
@ -992,7 +999,7 @@ fn printTreeStep(
if (s.dependencies.items.len == 0) {
try stderr.writeAll(" (reused)\n");
} else {
try stderr.writer().print(" (+{d} more reused dependencies)\n", .{
try stderr.print(" (+{d} more reused dependencies)\n", .{
s.dependencies.items.len,
});
}
@ -1129,11 +1136,11 @@ fn workerMakeOneStep(
const show_stderr = s.result_stderr.len > 0;
if (show_error_msgs or show_compile_errors or show_stderr) {
std.debug.lockStdErr();
defer std.debug.unlockStdErr();
const bw = std.debug.lockStderrWriter(&stdio_buffer_allocation);
defer std.debug.unlockStderrWriter();
const gpa = b.allocator;
printErrorMessages(gpa, s, .{ .ttyconf = run.ttyconf }, run.stderr, run.prominent_compile_errors) catch {};
printErrorMessages(gpa, s, .{ .ttyconf = run.ttyconf }, bw, run.prominent_compile_errors) catch {};
}
handle_result: {
@ -1190,7 +1197,7 @@ pub fn printErrorMessages(
gpa: Allocator,
failing_step: *Step,
options: std.zig.ErrorBundle.RenderOptions,
stderr: File,
stderr: *Writer,
prominent_compile_errors: bool,
) !void {
// Provide context for where these error messages are coming from by
@ -1209,7 +1216,7 @@ pub fn printErrorMessages(
var indent: usize = 0;
while (step_stack.pop()) |s| : (indent += 1) {
if (indent > 0) {
try stderr.writer().writeByteNTimes(' ', (indent - 1) * 3);
try stderr.splatByteAll(' ', (indent - 1) * 3);
try printChildNodePrefix(stderr, ttyconf);
}
@ -1231,7 +1238,7 @@ pub fn printErrorMessages(
}
if (!prominent_compile_errors and failing_step.result_error_bundle.errorMessageCount() > 0) {
try failing_step.result_error_bundle.renderToWriter(options, stderr.writer());
try failing_step.result_error_bundle.renderToWriter(options, stderr);
}
for (failing_step.result_error_msgs.items) |msg| {
@ -1243,27 +1250,27 @@ pub fn printErrorMessages(
}
}
fn steps(builder: *std.Build, out_stream: anytype) !void {
fn printSteps(builder: *std.Build, w: *Writer) !void {
const allocator = builder.allocator;
for (builder.top_level_steps.values()) |top_level_step| {
const name = if (&top_level_step.step == builder.default_step)
try fmt.allocPrint(allocator, "{s} (default)", .{top_level_step.step.name})
else
top_level_step.step.name;
try out_stream.print(" {s:<28} {s}\n", .{ name, top_level_step.description });
try w.print(" {s:<28} {s}\n", .{ name, top_level_step.description });
}
}
fn usage(b: *std.Build, out_stream: anytype) !void {
try out_stream.print(
fn printUsage(b: *std.Build, w: *Writer) !void {
try w.print(
\\Usage: {s} build [steps] [options]
\\
\\Steps:
\\
, .{b.graph.zig_exe});
try steps(b, out_stream);
try printSteps(b, w);
try out_stream.writeAll(
try w.writeAll(
\\
\\General Options:
\\ -p, --prefix [path] Where to install files (default: zig-out)
@ -1319,25 +1326,25 @@ fn usage(b: *std.Build, out_stream: anytype) !void {
const arena = b.allocator;
if (b.available_options_list.items.len == 0) {
try out_stream.print(" (none)\n", .{});
try w.print(" (none)\n", .{});
} else {
for (b.available_options_list.items) |option| {
const name = try fmt.allocPrint(arena, " -D{s}=[{s}]", .{
option.name,
@tagName(option.type_id),
});
try out_stream.print("{s:<30} {s}\n", .{ name, option.description });
try w.print("{s:<30} {s}\n", .{ name, option.description });
if (option.enum_options) |enum_options| {
const padding = " " ** 33;
try out_stream.writeAll(padding ++ "Supported Values:\n");
try w.writeAll(padding ++ "Supported Values:\n");
for (enum_options) |enum_option| {
try out_stream.print(padding ++ " {s}\n", .{enum_option});
try w.print(padding ++ " {s}\n", .{enum_option});
}
}
}
}
try out_stream.writeAll(
try w.writeAll(
\\
\\System Integration Options:
\\ --search-prefix [path] Add a path to look for binaries, libraries, headers
@ -1352,7 +1359,7 @@ fn usage(b: *std.Build, out_stream: anytype) !void {
\\
);
if (b.graph.system_library_options.entries.len == 0) {
try out_stream.writeAll(" (none) -\n");
try w.writeAll(" (none) -\n");
} else {
for (b.graph.system_library_options.keys(), b.graph.system_library_options.values()) |k, v| {
const status = switch (v) {
@ -1360,11 +1367,11 @@ fn usage(b: *std.Build, out_stream: anytype) !void {
.declared_disabled => "no",
.user_enabled, .user_disabled => unreachable, // already emitted error
};
try out_stream.print(" {s:<43} {s}\n", .{ k, status });
try w.print(" {s:<43} {s}\n", .{ k, status });
}
}
try out_stream.writeAll(
try w.writeAll(
\\
\\Advanced Options:
\\ -freference-trace[=num] How many lines of reference trace should be shown per compile error
@ -1544,3 +1551,11 @@ fn createModuleDependenciesForStep(step: *Step) Allocator.Error!void {
};
}
}
var stdio_buffer_allocation: [256]u8 = undefined;
var stdout_writer_allocation: std.fs.File.Writer = undefined;
fn initStdoutWriter() *Writer {
stdout_writer_allocation = std.fs.File.stdout().writerStreaming(&stdio_buffer_allocation);
return &stdout_writer_allocation.interface;
}

View File

@ -40,7 +40,7 @@ pub fn main() !void {
const arg = args[i];
if (mem.startsWith(u8, arg, "-")) {
if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) {
const stdout = std.io.getStdOut().writer();
const stdout = std.fs.File.stdout().deprecatedWriter();
try stdout.writeAll(usage_libc);
return std.process.cleanExit();
} else if (mem.eql(u8, arg, "-target")) {
@ -97,7 +97,7 @@ pub fn main() !void {
fatal("no include dirs detected for target {s}", .{zig_target});
}
var bw = std.io.bufferedWriter(std.io.getStdOut().writer());
var bw = std.io.bufferedWriter(std.fs.File.stdout().deprecatedWriter());
var writer = bw.writer();
for (libc_dirs.libc_include_dir_list) |include_dir| {
try writer.writeAll(include_dir);
@ -125,7 +125,7 @@ pub fn main() !void {
};
defer libc.deinit(gpa);
var bw = std.io.bufferedWriter(std.io.getStdOut().writer());
var bw = std.io.bufferedWriter(std.fs.File.stdout().deprecatedWriter());
try libc.render(bw.writer());
try bw.flush();
}

View File

@ -54,7 +54,7 @@ fn cmdObjCopy(
fatal("unexpected positional argument: '{s}'", .{arg});
}
} else if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) {
return std.io.getStdOut().writeAll(usage);
return std.fs.File.stdout().writeAll(usage);
} else if (mem.eql(u8, arg, "-O") or mem.eql(u8, arg, "--output-target")) {
i += 1;
if (i >= args.len) fatal("expected another argument after '{s}'", .{arg});
@ -227,8 +227,8 @@ fn cmdObjCopy(
if (listen) {
var server = try Server.init(.{
.gpa = gpa,
.in = std.io.getStdIn(),
.out = std.io.getStdOut(),
.in = .stdin(),
.out = .stdout(),
.zig_version = builtin.zig_version_string,
});
defer server.deinit();
@ -635,11 +635,11 @@ const HexWriter = struct {
const payload_bytes = self.getPayloadBytes();
assert(payload_bytes.len <= MAX_PAYLOAD_LEN);
const line = try std.fmt.bufPrint(&outbuf, ":{0X:0>2}{1X:0>4}{2X:0>2}{3s}{4X:0>2}" ++ linesep, .{
const line = try std.fmt.bufPrint(&outbuf, ":{0X:0>2}{1X:0>4}{2X:0>2}{3X}{4X:0>2}" ++ linesep, .{
@as(u8, @intCast(payload_bytes.len)),
self.address,
@intFromEnum(self.payload),
std.fmt.fmtSliceHexUpper(payload_bytes),
payload_bytes,
self.checksum(),
});
try file.writeAll(line);
@ -1495,7 +1495,7 @@ const ElfFileHelper = struct {
if (size < prefix.len) return null;
try in_file.seekTo(offset);
var section_reader = std.io.limitedReader(in_file.reader(), size);
var section_reader = std.io.limitedReader(in_file.deprecatedReader(), size);
// allocate as large as decompressed data. if the compression doesn't fit, keep the data uncompressed.
const compressed_data = try allocator.alignedAlloc(u8, .@"8", @intCast(size));

View File

@ -68,7 +68,7 @@ pub fn main() !void {
const arg = args[i];
if (mem.startsWith(u8, arg, "-")) {
if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) {
const stdout = std.io.getStdOut().writer();
const stdout = std.fs.File.stdout().deprecatedWriter();
try stdout.writeAll(usage);
return std.process.cleanExit();
} else if (mem.eql(u8, arg, "--")) {

View File

@ -125,13 +125,12 @@ pub const Diagnostics = struct {
}
pub fn renderToStdErr(self: *Diagnostics, args: []const []const u8, config: std.io.tty.Config) void {
std.debug.lockStdErr();
defer std.debug.unlockStdErr();
const stderr = std.io.getStdErr().writer();
const stderr = std.debug.lockStderrWriter(&.{});
defer std.debug.unlockStderrWriter();
self.renderToWriter(args, stderr, config) catch return;
}
pub fn renderToWriter(self: *Diagnostics, args: []const []const u8, writer: anytype, config: std.io.tty.Config) !void {
pub fn renderToWriter(self: *Diagnostics, args: []const []const u8, writer: *std.io.Writer, config: std.io.tty.Config) !void {
for (self.errors.items) |err_details| {
try renderErrorMessage(writer, config, err_details, args);
}
@ -1403,7 +1402,7 @@ test parsePercent {
try std.testing.expectError(error.InvalidFormat, parsePercent("~1"));
}
pub fn renderErrorMessage(writer: anytype, config: std.io.tty.Config, err_details: Diagnostics.ErrorDetails, args: []const []const u8) !void {
pub fn renderErrorMessage(writer: *std.io.Writer, config: std.io.tty.Config, err_details: Diagnostics.ErrorDetails, args: []const []const u8) !void {
try config.setColor(writer, .dim);
try writer.writeAll("<cli>");
try config.setColor(writer, .reset);
@ -1481,27 +1480,27 @@ pub fn renderErrorMessage(writer: anytype, config: std.io.tty.Config, err_detail
try writer.writeByte('\n');
try config.setColor(writer, .green);
try writer.writeByteNTimes(' ', prefix.len);
try writer.splatByteAll(' ', prefix.len);
// Special case for when the option is *only* a prefix (e.g. invalid option: -)
if (err_details.arg_span.prefix_len == arg_with_name.len) {
try writer.writeByteNTimes('^', err_details.arg_span.prefix_len);
try writer.splatByteAll('^', err_details.arg_span.prefix_len);
} else {
try writer.writeByteNTimes('~', err_details.arg_span.prefix_len);
try writer.writeByteNTimes(' ', err_details.arg_span.name_offset - err_details.arg_span.prefix_len);
try writer.splatByteAll('~', err_details.arg_span.prefix_len);
try writer.splatByteAll(' ', err_details.arg_span.name_offset - err_details.arg_span.prefix_len);
if (!err_details.arg_span.point_at_next_arg and err_details.arg_span.value_offset == 0) {
try writer.writeByte('^');
try writer.writeByteNTimes('~', name_slice.len - 1);
try writer.splatByteAll('~', name_slice.len - 1);
} else if (err_details.arg_span.value_offset > 0) {
try writer.writeByteNTimes('~', err_details.arg_span.value_offset - err_details.arg_span.name_offset);
try writer.splatByteAll('~', err_details.arg_span.value_offset - err_details.arg_span.name_offset);
try writer.writeByte('^');
if (err_details.arg_span.value_offset < arg_with_name.len) {
try writer.writeByteNTimes('~', arg_with_name.len - err_details.arg_span.value_offset - 1);
try writer.splatByteAll('~', arg_with_name.len - err_details.arg_span.value_offset - 1);
}
} else if (err_details.arg_span.point_at_next_arg) {
try writer.writeByteNTimes('~', arg_with_name.len - err_details.arg_span.name_offset + 1);
try writer.splatByteAll('~', arg_with_name.len - err_details.arg_span.name_offset + 1);
try writer.writeByte('^');
if (next_arg_len > 0) {
try writer.writeByteNTimes('~', next_arg_len - 1);
try writer.splatByteAll('~', next_arg_len - 1);
}
}
}

View File

@ -570,7 +570,7 @@ pub const Compiler = struct {
switch (predefined_type) {
.GROUP_ICON, .GROUP_CURSOR => {
// Check for animated icon first
if (ani.isAnimatedIcon(file.reader())) {
if (ani.isAnimatedIcon(file.deprecatedReader())) {
// Animated icons are just put into the resource unmodified,
// and the resource type changes to ANIICON/ANICURSOR
@ -586,14 +586,14 @@ pub const Compiler = struct {
try header.write(writer, self.errContext(node.id));
try file.seekTo(0);
try writeResourceData(writer, file.reader(), header.data_size);
try writeResourceData(writer, file.deprecatedReader(), header.data_size);
return;
}
// isAnimatedIcon moved the file cursor so reset to the start
try file.seekTo(0);
const icon_dir = ico.read(self.allocator, file.reader(), try file.getEndPos()) catch |err| switch (err) {
const icon_dir = ico.read(self.allocator, file.deprecatedReader(), try file.getEndPos()) catch |err| switch (err) {
error.OutOfMemory => |e| return e,
else => |e| {
return self.iconReadError(
@ -672,7 +672,7 @@ pub const Compiler = struct {
}
try file.seekTo(entry.data_offset_from_start_of_file);
var header_bytes = file.reader().readBytesNoEof(16) catch {
var header_bytes = file.deprecatedReader().readBytesNoEof(16) catch {
return self.iconReadError(
error.UnexpectedEOF,
filename_utf8,
@ -803,7 +803,7 @@ pub const Compiler = struct {
}
try file.seekTo(entry.data_offset_from_start_of_file);
try writeResourceDataNoPadding(writer, file.reader(), entry.data_size_in_bytes);
try writeResourceDataNoPadding(writer, file.deprecatedReader(), entry.data_size_in_bytes);
try writeDataPadding(writer, full_data_size);
if (self.state.icon_id == std.math.maxInt(u16)) {
@ -859,7 +859,7 @@ pub const Compiler = struct {
header.applyMemoryFlags(node.common_resource_attributes, self.source);
const file_size = try file.getEndPos();
const bitmap_info = bmp.read(file.reader(), file_size) catch |err| {
const bitmap_info = bmp.read(file.deprecatedReader(), file_size) catch |err| {
const filename_string_index = try self.diagnostics.putString(filename_utf8);
return self.addErrorDetailsAndFail(.{
.err = .bmp_read_error,
@ -922,7 +922,7 @@ pub const Compiler = struct {
header.data_size = bmp_bytes_to_write;
try header.write(writer, self.errContext(node.id));
try file.seekTo(bmp.file_header_len);
const file_reader = file.reader();
const file_reader = file.deprecatedReader();
try writeResourceDataNoPadding(writer, file_reader, bitmap_info.dib_header_size);
if (bitmap_info.getBitmasksByteLen() > 0) {
try writeResourceDataNoPadding(writer, file_reader, bitmap_info.getBitmasksByteLen());
@ -968,7 +968,7 @@ pub const Compiler = struct {
header.data_size = @intCast(file_size);
try header.write(writer, self.errContext(node.id));
var header_slurping_reader = headerSlurpingReader(148, file.reader());
var header_slurping_reader = headerSlurpingReader(148, file.deprecatedReader());
try writeResourceData(writer, header_slurping_reader.reader(), header.data_size);
try self.state.font_dir.add(self.arena, FontDir.Font{
@ -1002,7 +1002,7 @@ pub const Compiler = struct {
// We now know that the data size will fit in a u32
header.data_size = @intCast(data_size);
try header.write(writer, self.errContext(node.id));
try writeResourceData(writer, file.reader(), header.data_size);
try writeResourceData(writer, file.deprecatedReader(), header.data_size);
}
fn iconReadError(
@ -2949,7 +2949,7 @@ pub fn HeaderSlurpingReader(comptime size: usize, comptime ReaderType: anytype)
slurped_header: [size]u8 = [_]u8{0x00} ** size,
pub const Error = ReaderType.Error;
pub const Reader = std.io.Reader(*@This(), Error, read);
pub const Reader = std.io.GenericReader(*@This(), Error, read);
pub fn read(self: *@This(), buf: []u8) Error!usize {
const amt = try self.child_reader.read(buf);
@ -2983,7 +2983,7 @@ pub fn LimitedWriter(comptime WriterType: type) type {
bytes_left: u64,
pub const Error = error{NoSpaceLeft} || WriterType.Error;
pub const Writer = std.io.Writer(*Self, Error, write);
pub const Writer = std.io.GenericWriter(*Self, Error, write);
const Self = @This();

View File

@ -1,4 +1,5 @@
const std = @import("std");
const assert = std.debug.assert;
const Token = @import("lex.zig").Token;
const SourceMappings = @import("source_mapping.zig").SourceMappings;
const utils = @import("utils.zig");
@ -61,16 +62,15 @@ pub const Diagnostics = struct {
}
pub fn renderToStdErr(self: *Diagnostics, cwd: std.fs.Dir, source: []const u8, tty_config: std.io.tty.Config, source_mappings: ?SourceMappings) void {
std.debug.lockStdErr();
defer std.debug.unlockStdErr();
const stderr = std.io.getStdErr().writer();
const stderr = std.debug.lockStderrWriter(&.{});
defer std.debug.unlockStderrWriter();
for (self.errors.items) |err_details| {
renderErrorMessage(stderr, tty_config, cwd, err_details, source, self.strings.items, source_mappings) catch return;
}
}
pub fn renderToStdErrDetectTTY(self: *Diagnostics, cwd: std.fs.Dir, source: []const u8, source_mappings: ?SourceMappings) void {
const tty_config = std.io.tty.detectConfig(std.io.getStdErr());
const tty_config = std.io.tty.detectConfig(std.fs.File.stderr());
return self.renderToStdErr(cwd, source, tty_config, source_mappings);
}
@ -409,15 +409,7 @@ pub const ErrorDetails = struct {
failed_to_open_cwd,
};
fn formatToken(
ctx: TokenFormatContext,
comptime fmt: []const u8,
options: std.fmt.FormatOptions,
writer: anytype,
) !void {
_ = fmt;
_ = options;
fn formatToken(ctx: TokenFormatContext, writer: *std.io.Writer) std.io.Writer.Error!void {
switch (ctx.token.id) {
.eof => return writer.writeAll(ctx.token.id.nameForErrorDisplay()),
else => {},
@ -441,7 +433,7 @@ pub const ErrorDetails = struct {
code_page: SupportedCodePage,
};
fn fmtToken(self: ErrorDetails, source: []const u8) std.fmt.Formatter(formatToken) {
fn fmtToken(self: ErrorDetails, source: []const u8) std.fmt.Formatter(TokenFormatContext, formatToken) {
return .{ .data = .{
.token = self.token,
.code_page = self.code_page,
@ -452,7 +444,7 @@ pub const ErrorDetails = struct {
pub fn render(self: ErrorDetails, writer: anytype, source: []const u8, strings: []const []const u8) !void {
switch (self.err) {
.unfinished_string_literal => {
return writer.print("unfinished string literal at '{s}', expected closing '\"'", .{self.fmtToken(source)});
return writer.print("unfinished string literal at '{f}', expected closing '\"'", .{self.fmtToken(source)});
},
.string_literal_too_long => {
return writer.print("string literal too long (max is currently {} characters)", .{self.extra.number});
@ -466,10 +458,14 @@ pub const ErrorDetails = struct {
.hint => return,
},
.illegal_byte => {
return writer.print("character '{s}' is not allowed", .{std.fmt.fmtSliceEscapeUpper(self.token.slice(source))});
return writer.print("character '{f}' is not allowed", .{
std.ascii.hexEscape(self.token.slice(source), .upper),
});
},
.illegal_byte_outside_string_literals => {
return writer.print("character '{s}' is not allowed outside of string literals", .{std.fmt.fmtSliceEscapeUpper(self.token.slice(source))});
return writer.print("character '{f}' is not allowed outside of string literals", .{
std.ascii.hexEscape(self.token.slice(source), .upper),
});
},
.illegal_codepoint_outside_string_literals => {
// This is somewhat hacky, but we know that:
@ -527,26 +523,26 @@ pub const ErrorDetails = struct {
return writer.print("unsupported code page '{s} (id={})' in #pragma code_page", .{ @tagName(code_page), number });
},
.unfinished_raw_data_block => {
return writer.print("unfinished raw data block at '{s}', expected closing '}}' or 'END'", .{self.fmtToken(source)});
return writer.print("unfinished raw data block at '{f}', expected closing '}}' or 'END'", .{self.fmtToken(source)});
},
.unfinished_string_table_block => {
return writer.print("unfinished STRINGTABLE block at '{s}', expected closing '}}' or 'END'", .{self.fmtToken(source)});
return writer.print("unfinished STRINGTABLE block at '{f}', expected closing '}}' or 'END'", .{self.fmtToken(source)});
},
.expected_token => {
return writer.print("expected '{s}', got '{s}'", .{ self.extra.expected.nameForErrorDisplay(), self.fmtToken(source) });
return writer.print("expected '{s}', got '{f}'", .{ self.extra.expected.nameForErrorDisplay(), self.fmtToken(source) });
},
.expected_something_else => {
try writer.writeAll("expected ");
try self.extra.expected_types.writeCommaSeparated(writer);
return writer.print("; got '{s}'", .{self.fmtToken(source)});
return writer.print("; got '{f}'", .{self.fmtToken(source)});
},
.resource_type_cant_use_raw_data => switch (self.type) {
.err, .warning => try writer.print("expected '<filename>', found '{s}' (resource type '{s}' can't use raw data)", .{ self.fmtToken(source), self.extra.resource.nameForErrorDisplay() }),
.note => try writer.print("if '{s}' is intended to be a filename, it must be specified as a quoted string literal", .{self.fmtToken(source)}),
.err, .warning => try writer.print("expected '<filename>', found '{f}' (resource type '{s}' can't use raw data)", .{ self.fmtToken(source), self.extra.resource.nameForErrorDisplay() }),
.note => try writer.print("if '{f}' is intended to be a filename, it must be specified as a quoted string literal", .{self.fmtToken(source)}),
.hint => return,
},
.id_must_be_ordinal => {
try writer.print("id of resource type '{s}' must be an ordinal (u16), got '{s}'", .{ self.extra.resource.nameForErrorDisplay(), self.fmtToken(source) });
try writer.print("id of resource type '{s}' must be an ordinal (u16), got '{f}'", .{ self.extra.resource.nameForErrorDisplay(), self.fmtToken(source) });
},
.name_or_id_not_allowed => {
try writer.print("name or id is not allowed for resource type '{s}'", .{self.extra.resource.nameForErrorDisplay()});
@ -562,7 +558,7 @@ pub const ErrorDetails = struct {
try writer.writeAll("ASCII character not equivalent to virtual key code");
},
.empty_menu_not_allowed => {
try writer.print("empty menu of type '{s}' not allowed", .{self.fmtToken(source)});
try writer.print("empty menu of type '{f}' not allowed", .{self.fmtToken(source)});
},
.rc_would_miscompile_version_value_padding => switch (self.type) {
.err, .warning => return writer.print("the padding before this quoted string value would be miscompiled by the Win32 RC compiler", .{}),
@ -627,7 +623,7 @@ pub const ErrorDetails = struct {
.string_already_defined => switch (self.type) {
.err, .warning => {
const language = self.extra.string_and_language.language;
return writer.print("string with id {d} (0x{X}) already defined for language {}", .{ self.extra.string_and_language.id, self.extra.string_and_language.id, language });
return writer.print("string with id {d} (0x{X}) already defined for language {f}", .{ self.extra.string_and_language.id, self.extra.string_and_language.id, language });
},
.note => return writer.print("previous definition of string with id {d} (0x{X}) here", .{ self.extra.string_and_language.id, self.extra.string_and_language.id }),
.hint => return,
@ -642,7 +638,7 @@ pub const ErrorDetails = struct {
try writer.print("unable to open file '{s}': {s}", .{ strings[self.extra.file_open_error.filename_string_index], @tagName(self.extra.file_open_error.err) });
},
.invalid_accelerator_key => {
try writer.print("invalid accelerator key '{s}': {s}", .{ self.fmtToken(source), @tagName(self.extra.accelerator_error.err) });
try writer.print("invalid accelerator key '{f}': {s}", .{ self.fmtToken(source), @tagName(self.extra.accelerator_error.err) });
},
.accelerator_type_required => {
try writer.writeAll("accelerator type [ASCII or VIRTKEY] required when key is an integer");
@ -898,7 +894,7 @@ fn cellCount(code_page: SupportedCodePage, source: []const u8, start_index: usiz
const truncated_str = "<...truncated...>";
pub fn renderErrorMessage(writer: anytype, tty_config: std.io.tty.Config, cwd: std.fs.Dir, err_details: ErrorDetails, source: []const u8, strings: []const []const u8, source_mappings: ?SourceMappings) !void {
pub fn renderErrorMessage(writer: *std.io.Writer, tty_config: std.io.tty.Config, cwd: std.fs.Dir, err_details: ErrorDetails, source: []const u8, strings: []const []const u8, source_mappings: ?SourceMappings) !void {
if (err_details.type == .hint) return;
const source_line_start = err_details.token.getLineStartForErrorDisplay(source);
@ -981,10 +977,10 @@ pub fn renderErrorMessage(writer: anytype, tty_config: std.io.tty.Config, cwd: s
try tty_config.setColor(writer, .green);
const num_spaces = truncated_visual_info.point_offset - truncated_visual_info.before_len;
try writer.writeByteNTimes(' ', num_spaces);
try writer.writeByteNTimes('~', truncated_visual_info.before_len);
try writer.splatByteAll(' ', num_spaces);
try writer.splatByteAll('~', truncated_visual_info.before_len);
try writer.writeByte('^');
try writer.writeByteNTimes('~', truncated_visual_info.after_len);
try writer.splatByteAll('~', truncated_visual_info.after_len);
try writer.writeByte('\n');
try tty_config.setColor(writer, .reset);
@ -1085,7 +1081,7 @@ const CorrespondingLines = struct {
buffered_reader: BufferedReaderType,
code_page: SupportedCodePage,
const BufferedReaderType = std.io.BufferedReader(512, std.fs.File.Reader);
const BufferedReaderType = std.io.BufferedReader(512, std.fs.File.DeprecatedReader);
pub fn init(cwd: std.fs.Dir, err_details: ErrorDetails, line_for_comparison: []const u8, corresponding_span: SourceMappings.CorrespondingSpan, corresponding_file: []const u8) !CorrespondingLines {
// We don't do line comparison for this error, so don't print the note if the line
@ -1106,7 +1102,7 @@ const CorrespondingLines = struct {
.code_page = err_details.code_page,
};
corresponding_lines.buffered_reader = BufferedReaderType{
.unbuffered_reader = corresponding_lines.file.reader(),
.unbuffered_reader = corresponding_lines.file.deprecatedReader(),
};
errdefer corresponding_lines.deinit();

View File

@ -237,7 +237,9 @@ pub const Lexer = struct {
}
pub fn dump(self: *Self, token: *const Token) void {
std.debug.print("{s}:{d}: {s}\n", .{ @tagName(token.id), token.line_number, std.fmt.fmtSliceEscapeLower(token.slice(self.buffer)) });
std.debug.print("{s}:{d}: {f}\n", .{
@tagName(token.id), token.line_number, std.ascii.hexEscape(token.slice(self.buffer), .lower),
});
}
pub const LexMethod = enum {

View File

@ -22,14 +22,14 @@ pub fn main() !void {
defer arena_state.deinit();
const arena = arena_state.allocator();
const stderr = std.io.getStdErr();
const stderr = std.fs.File.stderr();
const stderr_config = std.io.tty.detectConfig(stderr);
const args = try std.process.argsAlloc(allocator);
defer std.process.argsFree(allocator, args);
if (args.len < 2) {
try renderErrorMessage(stderr.writer(), stderr_config, .err, "expected zig lib dir as first argument", .{});
try renderErrorMessage(std.debug.lockStderrWriter(&.{}), stderr_config, .err, "expected zig lib dir as first argument", .{});
std.process.exit(1);
}
const zig_lib_dir = args[1];
@ -44,7 +44,7 @@ pub fn main() !void {
var error_handler: ErrorHandler = switch (zig_integration) {
true => .{
.server = .{
.out = std.io.getStdOut(),
.out = std.fs.File.stdout(),
.in = undefined, // won't be receiving messages
.receive_fifo = undefined, // won't be receiving messages
},
@ -81,15 +81,15 @@ pub fn main() !void {
defer options.deinit();
if (options.print_help_and_exit) {
const stdout = std.io.getStdOut();
try cli.writeUsage(stdout.writer(), "zig rc");
const stdout = std.fs.File.stdout();
try cli.writeUsage(stdout.deprecatedWriter(), "zig rc");
return;
}
// Don't allow verbose when integrating with Zig via stdout
options.verbose = false;
const stdout_writer = std.io.getStdOut().writer();
const stdout_writer = std.fs.File.stdout().deprecatedWriter();
if (options.verbose) {
try options.dumpVerbose(stdout_writer);
try stdout_writer.writeByte('\n');
@ -290,7 +290,7 @@ pub fn main() !void {
};
defer depfile.close();
const depfile_writer = depfile.writer();
const depfile_writer = depfile.deprecatedWriter();
var depfile_buffered_writer = std.io.bufferedWriter(depfile_writer);
switch (options.depfile_fmt) {
.json => {
@ -343,7 +343,7 @@ pub fn main() !void {
switch (err) {
error.DuplicateResource => {
const duplicate_resource = resources.list.items[cvtres_diagnostics.duplicate_resource];
try error_handler.emitMessage(allocator, .err, "duplicate resource [id: {}, type: {}, language: {}]", .{
try error_handler.emitMessage(allocator, .err, "duplicate resource [id: {f}, type: {f}, language: {f}]", .{
duplicate_resource.name_value,
fmtResourceType(duplicate_resource.type_value),
duplicate_resource.language,
@ -352,7 +352,7 @@ pub fn main() !void {
error.ResourceDataTooLong => {
const overflow_resource = resources.list.items[cvtres_diagnostics.duplicate_resource];
try error_handler.emitMessage(allocator, .err, "resource has a data length that is too large to be written into a coff section", .{});
try error_handler.emitMessage(allocator, .note, "the resource with the invalid size is [id: {}, type: {}, language: {}]", .{
try error_handler.emitMessage(allocator, .note, "the resource with the invalid size is [id: {f}, type: {f}, language: {f}]", .{
overflow_resource.name_value,
fmtResourceType(overflow_resource.type_value),
overflow_resource.language,
@ -361,7 +361,7 @@ pub fn main() !void {
error.TotalResourceDataTooLong => {
const overflow_resource = resources.list.items[cvtres_diagnostics.duplicate_resource];
try error_handler.emitMessage(allocator, .err, "total resource data exceeds the maximum of the coff 'size of raw data' field", .{});
try error_handler.emitMessage(allocator, .note, "size overflow occurred when attempting to write this resource: [id: {}, type: {}, language: {}]", .{
try error_handler.emitMessage(allocator, .note, "size overflow occurred when attempting to write this resource: [id: {f}, type: {f}, language: {f}]", .{
overflow_resource.name_value,
fmtResourceType(overflow_resource.type_value),
overflow_resource.language,
@ -471,7 +471,7 @@ const IoStream = struct {
allocator: std.mem.Allocator,
};
pub const WriteError = std.mem.Allocator.Error || std.fs.File.WriteError;
pub const Writer = std.io.Writer(WriterContext, WriteError, write);
pub const Writer = std.io.GenericWriter(WriterContext, WriteError, write);
pub fn write(ctx: WriterContext, bytes: []const u8) WriteError!usize {
switch (ctx.self.*) {
@ -645,7 +645,9 @@ const ErrorHandler = union(enum) {
},
.tty => {
// extra newline to separate this line from the aro errors
try renderErrorMessage(std.io.getStdErr().writer(), self.tty, .err, "{s}\n", .{fail_msg});
const stderr = std.debug.lockStderrWriter(&.{});
defer std.debug.unlockStderrWriter();
try renderErrorMessage(stderr, self.tty, .err, "{s}\n", .{fail_msg});
aro.Diagnostics.render(comp, self.tty);
},
}
@ -690,7 +692,9 @@ const ErrorHandler = union(enum) {
try server.serveErrorBundle(error_bundle);
},
.tty => {
try renderErrorMessage(std.io.getStdErr().writer(), self.tty, msg_type, format, args);
const stderr = std.debug.lockStderrWriter(&.{});
defer std.debug.unlockStderrWriter();
try renderErrorMessage(stderr, self.tty, msg_type, format, args);
},
}
}

View File

@ -1,4 +1,5 @@
const std = @import("std");
const assert = std.debug.assert;
const rc = @import("rc.zig");
const ResourceType = rc.ResourceType;
const CommonResourceAttributes = rc.CommonResourceAttributes;
@ -163,14 +164,7 @@ pub const Language = packed struct(u16) {
return @bitCast(self);
}
pub fn format(
language: Language,
comptime fmt: []const u8,
options: std.fmt.FormatOptions,
out_stream: anytype,
) !void {
_ = fmt;
_ = options;
pub fn format(language: Language, w: *std.io.Writer) std.io.Writer.Error!void {
const language_id = language.asInt();
const language_name = language_name: {
if (std.enums.fromInt(lang.LanguageId, language_id)) |lang_enum_val| {
@ -181,7 +175,7 @@ pub const Language = packed struct(u16) {
}
break :language_name "<UNKNOWN>";
};
try out_stream.print("{s} (0x{X})", .{ language_name, language_id });
try w.print("{s} (0x{X})", .{ language_name, language_id });
}
};
@ -445,47 +439,33 @@ pub const NameOrOrdinal = union(enum) {
}
}
pub fn format(
self: NameOrOrdinal,
comptime fmt: []const u8,
options: std.fmt.FormatOptions,
out_stream: anytype,
) !void {
_ = fmt;
_ = options;
pub fn format(self: NameOrOrdinal, w: *std.io.Writer) !void {
switch (self) {
.name => |name| {
try out_stream.print("{s}", .{std.unicode.fmtUtf16Le(name)});
try w.print("{f}", .{std.unicode.fmtUtf16Le(name)});
},
.ordinal => |ordinal| {
try out_stream.print("{d}", .{ordinal});
try w.print("{d}", .{ordinal});
},
}
}
fn formatResourceType(
self: NameOrOrdinal,
comptime fmt: []const u8,
options: std.fmt.FormatOptions,
out_stream: anytype,
) !void {
_ = fmt;
_ = options;
fn formatResourceType(self: NameOrOrdinal, w: *std.io.Writer) std.io.Writer.Error!void {
switch (self) {
.name => |name| {
try out_stream.print("{s}", .{std.unicode.fmtUtf16Le(name)});
try w.print("{f}", .{std.unicode.fmtUtf16Le(name)});
},
.ordinal => |ordinal| {
if (std.enums.tagName(RT, @enumFromInt(ordinal))) |predefined_type_name| {
try out_stream.print("{s}", .{predefined_type_name});
try w.print("{s}", .{predefined_type_name});
} else {
try out_stream.print("{d}", .{ordinal});
try w.print("{d}", .{ordinal});
}
},
}
}
pub fn fmtResourceType(type_value: NameOrOrdinal) std.fmt.Formatter(formatResourceType) {
pub fn fmtResourceType(type_value: NameOrOrdinal) std.fmt.Formatter(NameOrOrdinal, formatResourceType) {
return .{ .data = type_value };
}
};

View File

@ -86,7 +86,7 @@ pub const ErrorMessageType = enum { err, warning, note };
/// Used for generic colored errors/warnings/notes, more context-specific error messages
/// are handled elsewhere.
pub fn renderErrorMessage(writer: anytype, config: std.io.tty.Config, msg_type: ErrorMessageType, comptime format: []const u8, args: anytype) !void {
pub fn renderErrorMessage(writer: *std.io.Writer, config: std.io.tty.Config, msg_type: ErrorMessageType, comptime format: []const u8, args: anytype) !void {
switch (msg_type) {
.err => {
try config.setColor(writer, .bold);

View File

@ -7,7 +7,7 @@ const assert = std.debug.assert;
const Cache = std.Build.Cache;
fn usage() noreturn {
io.getStdOut().writeAll(
std.fs.File.stdout().writeAll(
\\Usage: zig std [options]
\\
\\Options:
@ -63,7 +63,7 @@ pub fn main() !void {
var http_server = try address.listen(.{});
const port = http_server.listen_address.in.getPort();
const url_with_newline = try std.fmt.allocPrint(arena, "http://127.0.0.1:{d}/\n", .{port});
std.io.getStdOut().writeAll(url_with_newline) catch {};
std.fs.File.stdout().writeAll(url_with_newline) catch {};
if (should_open_browser) {
openBrowserTab(gpa, url_with_newline[0 .. url_with_newline.len - 1 :'\n']) catch |err| {
std.log.err("unable to open browser: {s}", .{@errorName(err)});

View File

@ -69,8 +69,8 @@ fn mainServer() !void {
@disableInstrumentation();
var server = try std.zig.Server.init(.{
.gpa = fba.allocator(),
.in = std.io.getStdIn(),
.out = std.io.getStdOut(),
.in = .stdin(),
.out = .stdout(),
.zig_version = builtin.zig_version_string,
});
defer server.deinit();
@ -191,7 +191,7 @@ fn mainTerminal() void {
.root_name = "Test",
.estimated_total_items = test_fn_list.len,
});
const have_tty = std.io.getStdErr().isTty();
const have_tty = std.fs.File.stderr().isTty();
var async_frame_buffer: []align(builtin.target.stackAlignment()) u8 = undefined;
// TODO this is on the next line (using `undefined` above) because otherwise zig incorrectly
@ -301,7 +301,7 @@ pub fn mainSimple() anyerror!void {
var failed: u64 = 0;
// we don't want to bring in File and Writer if the backend doesn't support it
const stderr = if (comptime enable_print) std.io.getStdErr() else {};
const stderr = if (comptime enable_print) std.fs.File.stderr() else {};
for (builtin.test_functions) |test_fn| {
if (test_fn.func()) |_| {
@ -328,7 +328,7 @@ pub fn mainSimple() anyerror!void {
passed += 1;
}
if (enable_print and print_summary) {
stderr.writer().print("{} passed, {} skipped, {} failed\n", .{ passed, skipped, failed }) catch {};
stderr.deprecatedWriter().print("{} passed, {} skipped, {} failed\n", .{ passed, skipped, failed }) catch {};
}
if (failed != 0) std.process.exit(1);
}

View File

@ -440,7 +440,7 @@ fn parse(file_name: []const u8, source: []u8) Oom!Ast {
const err_loc = std.zig.findLineColumn(ast.source, err_offset);
rendered_err.clearRetainingCapacity();
try ast.renderError(err, rendered_err.writer(gpa));
log.err("{s}:{}:{}: {s}", .{ file_name, err_loc.line + 1, err_loc.column + 1, rendered_err.items });
log.err("{s}:{d}:{d}: {s}", .{ file_name, err_loc.line + 1, err_loc.column + 1, rendered_err.items });
}
return Ast.parse(gpa, "", .zig);
}

View File

@ -717,9 +717,9 @@ fn render_docs(
try writer.writeAll("<a href=\"#");
_ = missing_feature_url_escape;
try writer.writeAll(g.link_buffer.items);
try writer.print("\">{}</a>", .{markdown.fmtHtml(content)});
try writer.print("\">{f}</a>", .{markdown.fmtHtml(content)});
} else {
try writer.print("{}", .{markdown.fmtHtml(content)});
try writer.print("{f}", .{markdown.fmtHtml(content)});
}
try writer.writeAll("</code>");

View File

@ -145,7 +145,7 @@ fn mainImpl() !void {
var parser = try Parser.init(gpa);
defer parser.deinit();
var stdin_buf = std.io.bufferedReader(std.io.getStdIn().reader());
var stdin_buf = std.io.bufferedReader(std.fs.File.stdin().deprecatedReader());
var line_buf = std.ArrayList(u8).init(gpa);
defer line_buf.deinit();
while (stdin_buf.reader().streamUntilDelimiter(line_buf.writer(), '\n', null)) {
@ -160,7 +160,7 @@ fn mainImpl() !void {
var doc = try parser.endInput();
defer doc.deinit(gpa);
var stdout_buf = std.io.bufferedWriter(std.io.getStdOut().writer());
var stdout_buf = std.io.bufferedWriter(std.fs.File.stdout().deprecatedWriter());
try doc.render(stdout_buf.writer());
try stdout_buf.flush();
}

View File

@ -1,6 +1,7 @@
const std = @import("std");
const Document = @import("Document.zig");
const Node = Document.Node;
const assert = std.debug.assert;
/// A Markdown document renderer.
///
@ -41,7 +42,7 @@ pub fn Renderer(comptime Writer: type, comptime Context: type) type {
if (start == 1) {
try writer.writeAll("<ol>\n");
} else {
try writer.print("<ol start=\"{}\">\n", .{start});
try writer.print("<ol start=\"{d}\">\n", .{start});
}
} else {
try writer.writeAll("<ul>\n");
@ -105,15 +106,15 @@ pub fn Renderer(comptime Writer: type, comptime Context: type) type {
}
},
.heading => {
try writer.print("<h{}>", .{data.heading.level});
try writer.print("<h{d}>", .{data.heading.level});
for (doc.extraChildren(data.heading.children)) |child| {
try r.renderFn(r, doc, child, writer);
}
try writer.print("</h{}>\n", .{data.heading.level});
try writer.print("</h{d}>\n", .{data.heading.level});
},
.code_block => {
const content = doc.string(data.code_block.content);
try writer.print("<pre><code>{}</code></pre>\n", .{fmtHtml(content)});
try writer.print("<pre><code>{f}</code></pre>\n", .{fmtHtml(content)});
},
.blockquote => {
try writer.writeAll("<blockquote>\n");
@ -134,7 +135,7 @@ pub fn Renderer(comptime Writer: type, comptime Context: type) type {
},
.link => {
const target = doc.string(data.link.target);
try writer.print("<a href=\"{}\">", .{fmtHtml(target)});
try writer.print("<a href=\"{f}\">", .{fmtHtml(target)});
for (doc.extraChildren(data.link.children)) |child| {
try r.renderFn(r, doc, child, writer);
}
@ -142,11 +143,11 @@ pub fn Renderer(comptime Writer: type, comptime Context: type) type {
},
.autolink => {
const target = doc.string(data.text.content);
try writer.print("<a href=\"{0}\">{0}</a>", .{fmtHtml(target)});
try writer.print("<a href=\"{0f}\">{0f}</a>", .{fmtHtml(target)});
},
.image => {
const target = doc.string(data.link.target);
try writer.print("<img src=\"{}\" alt=\"", .{fmtHtml(target)});
try writer.print("<img src=\"{f}\" alt=\"", .{fmtHtml(target)});
for (doc.extraChildren(data.link.children)) |child| {
try renderInlineNodeText(doc, child, writer);
}
@ -168,11 +169,11 @@ pub fn Renderer(comptime Writer: type, comptime Context: type) type {
},
.code_span => {
const content = doc.string(data.text.content);
try writer.print("<code>{}</code>", .{fmtHtml(content)});
try writer.print("<code>{f}</code>", .{fmtHtml(content)});
},
.text => {
const content = doc.string(data.text.content);
try writer.print("{}", .{fmtHtml(content)});
try writer.print("{f}", .{fmtHtml(content)});
},
.line_break => {
try writer.writeAll("<br />\n");
@ -221,7 +222,7 @@ pub fn renderInlineNodeText(
},
.autolink, .code_span, .text => {
const content = doc.string(data.text.content);
try writer.print("{}", .{fmtHtml(content)});
try writer.print("{f}", .{fmtHtml(content)});
},
.line_break => {
try writer.writeAll("\n");
@ -229,18 +230,11 @@ pub fn renderInlineNodeText(
}
}
pub fn fmtHtml(bytes: []const u8) std.fmt.Formatter(formatHtml) {
pub fn fmtHtml(bytes: []const u8) std.fmt.Formatter([]const u8, formatHtml) {
return .{ .data = bytes };
}
fn formatHtml(
bytes: []const u8,
comptime fmt: []const u8,
options: std.fmt.FormatOptions,
writer: anytype,
) !void {
_ = fmt;
_ = options;
fn formatHtml(bytes: []const u8, writer: *std.io.Writer) std.io.Writer.Error!void {
for (bytes) |b| {
switch (b) {
'<' => try writer.writeAll("&lt;"),

View File

@ -9,7 +9,8 @@ pub const std_options = std.Options{
.logFn = logOverride,
};
var log_file: ?std.fs.File = null;
var log_file_buffer: [256]u8 = undefined;
var log_file_writer: ?std.fs.File.Writer = null;
fn logOverride(
comptime level: std.log.Level,
@ -17,15 +18,17 @@ fn logOverride(
comptime format: []const u8,
args: anytype,
) void {
const f = if (log_file) |f| f else f: {
const fw = if (log_file_writer) |*f| f else f: {
const f = fuzzer.cache_dir.createFile("tmp/libfuzzer.log", .{}) catch
@panic("failed to open fuzzer log file");
log_file = f;
break :f f;
log_file_writer = f.writer(&log_file_buffer);
break :f &log_file_writer.?;
};
const prefix1 = comptime level.asText();
const prefix2 = if (scope == .default) ": " else "(" ++ @tagName(scope) ++ "): ";
f.writer().print(prefix1 ++ prefix2 ++ format ++ "\n", args) catch @panic("failed to write to fuzzer log");
fw.interface.print(prefix1 ++ prefix2 ++ format ++ "\n", args) catch
@panic("failed to write to fuzzer log");
fw.interface.flush() catch @panic("failed to flush fuzzer log");
}
/// Helps determine run uniqueness in the face of recursion.
@ -226,18 +229,18 @@ const Fuzzer = struct {
.read = true,
}) catch |e| switch (e) {
error.PathAlreadyExists => continue,
else => fatal("unable to create '{}{d}: {s}", .{ f.corpus_directory, i, @errorName(err) }),
else => fatal("unable to create '{f}{d}: {s}", .{ f.corpus_directory, i, @errorName(err) }),
};
errdefer input_file.close();
// Initialize the mmap for the current input.
f.input = MemoryMappedList.create(input_file, 0, std.heap.page_size_max) catch |e| {
fatal("unable to init memory map for input at '{}{d}': {s}", .{
fatal("unable to init memory map for input at '{f}{d}': {s}", .{
f.corpus_directory, i, @errorName(e),
});
};
break;
},
else => fatal("unable to read '{}{d}': {s}", .{ f.corpus_directory, i, @errorName(err) }),
else => fatal("unable to read '{f}{d}': {s}", .{ f.corpus_directory, i, @errorName(err) }),
};
errdefer gpa.free(input);
f.corpus.append(gpa, .{
@ -263,7 +266,7 @@ const Fuzzer = struct {
const sub_path = try std.fmt.allocPrint(gpa, "f/{s}", .{f.unit_test_name});
f.corpus_directory = .{
.handle = f.cache_dir.makeOpenPath(sub_path, .{}) catch |err|
fatal("unable to open corpus directory 'f/{s}': {s}", .{ sub_path, @errorName(err) }),
fatal("unable to open corpus directory 'f/{s}': {t}", .{ sub_path, err }),
.path = sub_path,
};
initNextInput(f);

View File

@ -5,7 +5,7 @@ pub fn bufferedPrint() !void {
// Stdout is for the actual output of your application, for example if you
// are implementing gzip, then only the compressed bytes should be sent to
// stdout, not any debugging messages.
const stdout_file = std.io.getStdOut().writer();
const stdout_file = std.fs.File.stdout().deprecatedWriter();
// Buffering can improve performance significantly in print-heavy programs.
var bw = std.io.bufferedWriter(stdout_file);
const stdout = bw.writer();

View File

@ -284,7 +284,7 @@ pub fn create(
.h_dir = undefined,
.dest_dir = graph.env_map.get("DESTDIR"),
.install_tls = .{
.step = Step.init(.{
.step = .init(.{
.id = TopLevelStep.base_id,
.name = "install",
.owner = b,
@ -292,7 +292,7 @@ pub fn create(
.description = "Copy build artifacts to prefix path",
},
.uninstall_tls = .{
.step = Step.init(.{
.step = .init(.{
.id = TopLevelStep.base_id,
.name = "uninstall",
.owner = b,
@ -342,7 +342,7 @@ fn createChildOnly(
.graph = parent.graph,
.allocator = allocator,
.install_tls = .{
.step = Step.init(.{
.step = .init(.{
.id = TopLevelStep.base_id,
.name = "install",
.owner = child,
@ -350,7 +350,7 @@ fn createChildOnly(
.description = "Copy build artifacts to prefix path",
},
.uninstall_tls = .{
.step = Step.init(.{
.step = .init(.{
.id = TopLevelStep.base_id,
.name = "uninstall",
.owner = child,
@ -1525,7 +1525,7 @@ pub fn option(b: *Build, comptime T: type, name_raw: []const u8, description_raw
pub fn step(b: *Build, name: []const u8, description: []const u8) *Step {
const step_info = b.allocator.create(TopLevelStep) catch @panic("OOM");
step_info.* = .{
.step = Step.init(.{
.step = .init(.{
.id = TopLevelStep.base_id,
.name = name,
.owner = b,
@ -1745,7 +1745,7 @@ pub fn addUserInputOption(b: *Build, name_raw: []const u8, value_raw: []const u8
return true;
},
.lazy_path, .lazy_path_list => {
log.warn("the lazy path value type isn't added from the CLI, but somehow '{s}' is a .{}", .{ name, std.zig.fmtId(@tagName(gop.value_ptr.value)) });
log.warn("the lazy path value type isn't added from the CLI, but somehow '{s}' is a .{f}", .{ name, std.zig.fmtId(@tagName(gop.value_ptr.value)) });
return true;
},
}
@ -1824,13 +1824,13 @@ pub fn validateUserInputDidItFail(b: *Build) bool {
return b.invalid_user_input;
}
fn allocPrintCmd(ally: Allocator, opt_cwd: ?[]const u8, argv: []const []const u8) error{OutOfMemory}![]u8 {
var buf = ArrayList(u8).init(ally);
if (opt_cwd) |cwd| try buf.writer().print("cd {s} && ", .{cwd});
fn allocPrintCmd(gpa: Allocator, opt_cwd: ?[]const u8, argv: []const []const u8) error{OutOfMemory}![]u8 {
var buf: std.ArrayListUnmanaged(u8) = .empty;
if (opt_cwd) |cwd| try buf.print(gpa, "cd {s} && ", .{cwd});
for (argv) |arg| {
try buf.writer().print("{s} ", .{arg});
try buf.print(gpa, "{s} ", .{arg});
}
return buf.toOwnedSlice();
return buf.toOwnedSlice(gpa);
}
fn printCmd(ally: Allocator, cwd: ?[]const u8, argv: []const []const u8) void {
@ -2059,7 +2059,7 @@ pub fn runAllowFail(
try Step.handleVerbose2(b, null, child.env_map, argv);
try child.spawn();
const stdout = child.stdout.?.reader().readAllAlloc(b.allocator, max_output_size) catch {
const stdout = child.stdout.?.deprecatedReader().readAllAlloc(b.allocator, max_output_size) catch {
return error.ReadFailure;
};
errdefer b.allocator.free(stdout);
@ -2466,10 +2466,9 @@ pub const GeneratedFile = struct {
pub fn getPath2(gen: GeneratedFile, src_builder: *Build, asking_step: ?*Step) []const u8 {
return gen.path orelse {
std.debug.lockStdErr();
const stderr = std.io.getStdErr();
dumpBadGetPathHelp(gen.step, stderr, src_builder, asking_step) catch {};
std.debug.unlockStdErr();
const w = debug.lockStderrWriter(&.{});
dumpBadGetPathHelp(gen.step, w, .detect(.stderr()), src_builder, asking_step) catch {};
debug.unlockStderrWriter();
@panic("misconfigured build script");
};
}
@ -2676,10 +2675,9 @@ pub const LazyPath = union(enum) {
var file_path: Cache.Path = .{
.root_dir = Cache.Directory.cwd(),
.sub_path = gen.file.path orelse {
std.debug.lockStdErr();
const stderr = std.io.getStdErr();
dumpBadGetPathHelp(gen.file.step, stderr, src_builder, asking_step) catch {};
std.debug.unlockStdErr();
const w = debug.lockStderrWriter(&.{});
dumpBadGetPathHelp(gen.file.step, w, .detect(.stderr()), src_builder, asking_step) catch {};
debug.unlockStderrWriter();
@panic("misconfigured build script");
},
};
@ -2766,44 +2764,42 @@ fn dumpBadDirnameHelp(
comptime msg: []const u8,
args: anytype,
) anyerror!void {
debug.lockStdErr();
defer debug.unlockStdErr();
const w = debug.lockStderrWriter(&.{});
defer debug.unlockStderrWriter();
const stderr = io.getStdErr();
const w = stderr.writer();
try w.print(msg, args);
const tty_config = std.io.tty.detectConfig(stderr);
const tty_config = std.io.tty.detectConfig(.stderr());
if (fail_step) |s| {
tty_config.setColor(w, .red) catch {};
try stderr.writeAll(" The step was created by this stack trace:\n");
try w.writeAll(" The step was created by this stack trace:\n");
tty_config.setColor(w, .reset) catch {};
s.dump(stderr);
s.dump(w, tty_config);
}
if (asking_step) |as| {
tty_config.setColor(w, .red) catch {};
try stderr.writer().print(" The step '{s}' that is missing a dependency on the above step was created by this stack trace:\n", .{as.name});
try w.print(" The step '{s}' that is missing a dependency on the above step was created by this stack trace:\n", .{as.name});
tty_config.setColor(w, .reset) catch {};
as.dump(stderr);
as.dump(w, tty_config);
}
tty_config.setColor(w, .red) catch {};
try stderr.writeAll(" Hope that helps. Proceeding to panic.\n");
try w.writeAll(" Hope that helps. Proceeding to panic.\n");
tty_config.setColor(w, .reset) catch {};
}
/// In this function the stderr mutex has already been locked.
pub fn dumpBadGetPathHelp(
s: *Step,
stderr: fs.File,
w: *std.io.Writer,
tty_config: std.io.tty.Config,
src_builder: *Build,
asking_step: ?*Step,
) anyerror!void {
const w = stderr.writer();
try w.print(
\\getPath() was called on a GeneratedFile that wasn't built yet.
\\ source package path: {s}
@ -2814,21 +2810,20 @@ pub fn dumpBadGetPathHelp(
s.name,
});
const tty_config = std.io.tty.detectConfig(stderr);
tty_config.setColor(w, .red) catch {};
try stderr.writeAll(" The step was created by this stack trace:\n");
try w.writeAll(" The step was created by this stack trace:\n");
tty_config.setColor(w, .reset) catch {};
s.dump(stderr);
s.dump(w, tty_config);
if (asking_step) |as| {
tty_config.setColor(w, .red) catch {};
try stderr.writer().print(" The step '{s}' that is missing a dependency on the above step was created by this stack trace:\n", .{as.name});
try w.print(" The step '{s}' that is missing a dependency on the above step was created by this stack trace:\n", .{as.name});
tty_config.setColor(w, .reset) catch {};
as.dump(stderr);
as.dump(w, tty_config);
}
tty_config.setColor(w, .red) catch {};
try stderr.writeAll(" Hope that helps. Proceeding to panic.\n");
try w.writeAll(" Hope that helps. Proceeding to panic.\n");
tty_config.setColor(w, .reset) catch {};
}
@ -2866,11 +2861,6 @@ pub fn makeTempPath(b: *Build) []const u8 {
return result_path;
}
/// Deprecated; use `std.fmt.hex` instead.
pub fn hex64(x: u64) [16]u8 {
return std.fmt.hex(x);
}
/// A pair of target query and fully resolved target.
/// This type is generally required by build system API that need to be given a
/// target. The query is kept because the Zig toolchain needs to know which parts

View File

@ -2,6 +2,18 @@
//! This is not a general-purpose cache. It is designed to be fast and simple,
//! not to withstand attacks using specially-crafted input.
const Cache = @This();
const std = @import("std");
const builtin = @import("builtin");
const crypto = std.crypto;
const fs = std.fs;
const assert = std.debug.assert;
const testing = std.testing;
const mem = std.mem;
const fmt = std.fmt;
const Allocator = std.mem.Allocator;
const log = std.log.scoped(.cache);
gpa: Allocator,
manifest_dir: fs.Dir,
hash: HashHelper = .{},
@ -21,18 +33,6 @@ pub const Path = @import("Cache/Path.zig");
pub const Directory = @import("Cache/Directory.zig");
pub const DepTokenizer = @import("Cache/DepTokenizer.zig");
const Cache = @This();
const std = @import("std");
const builtin = @import("builtin");
const crypto = std.crypto;
const fs = std.fs;
const assert = std.debug.assert;
const testing = std.testing;
const mem = std.mem;
const fmt = std.fmt;
const Allocator = std.mem.Allocator;
const log = std.log.scoped(.cache);
pub fn addPrefix(cache: *Cache, directory: Directory) void {
cache.prefixes_buffer[cache.prefixes_len] = directory;
cache.prefixes_len += 1;
@ -68,7 +68,7 @@ const PrefixedPath = struct {
fn findPrefix(cache: *const Cache, file_path: []const u8) !PrefixedPath {
const gpa = cache.gpa;
const resolved_path = try fs.path.resolve(gpa, &[_][]const u8{file_path});
const resolved_path = try fs.path.resolve(gpa, &.{file_path});
errdefer gpa.free(resolved_path);
return findPrefixResolved(cache, resolved_path);
}
@ -132,7 +132,7 @@ pub const Hasher = crypto.auth.siphash.SipHash128(1, 3);
/// Initial state with random bytes, that can be copied.
/// Refresh this with new random bytes when the manifest
/// format is modified in a non-backwards-compatible way.
pub const hasher_init: Hasher = Hasher.init(&[_]u8{
pub const hasher_init: Hasher = Hasher.init(&.{
0x33, 0x52, 0xa2, 0x84,
0xcf, 0x17, 0x56, 0x57,
0x01, 0xbb, 0xcd, 0xe4,
@ -286,11 +286,8 @@ pub const HashHelper = struct {
pub fn binToHex(bin_digest: BinDigest) HexDigest {
var out_digest: HexDigest = undefined;
_ = fmt.bufPrint(
&out_digest,
"{s}",
.{fmt.fmtSliceHexLower(&bin_digest)},
) catch unreachable;
var w: std.io.Writer = .fixed(&out_digest);
w.printHex(&bin_digest, .lower) catch unreachable;
return out_digest;
}
@ -337,7 +334,6 @@ pub const Manifest = struct {
manifest_create: fs.File.OpenError,
manifest_read: fs.File.ReadError,
manifest_lock: fs.File.LockError,
manifest_seek: fs.File.SeekError,
file_open: FileOp,
file_stat: FileOp,
file_read: FileOp,
@ -611,12 +607,6 @@ pub const Manifest = struct {
var file = self.files.pop().?;
file.key.deinit(self.cache.gpa);
}
// Also, seek the file back to the start.
self.manifest_file.?.seekTo(0) catch |err| {
self.diagnostic = .{ .manifest_seek = err };
return error.CacheCheckFailed;
};
switch (try self.hitWithCurrentLock()) {
.hit => break :hit,
.miss => |m| break :digests m.file_digests_populated,
@ -661,9 +651,8 @@ pub const Manifest = struct {
return true;
}
/// Assumes that `self.hash.hasher` has been updated only with the original digest, that
/// `self.files` contains only the original input files, and that `self.manifest_file.?` is
/// seeked to the start of the file.
/// Assumes that `self.hash.hasher` has been updated only with the original digest and that
/// `self.files` contains only the original input files.
fn hitWithCurrentLock(self: *Manifest) HitError!union(enum) {
hit,
miss: struct {
@ -672,12 +661,13 @@ pub const Manifest = struct {
} {
const gpa = self.cache.gpa;
const input_file_count = self.files.entries.len;
const file_contents = self.manifest_file.?.reader().readAllAlloc(gpa, manifest_file_size_max) catch |err| switch (err) {
var manifest_reader = self.manifest_file.?.reader(&.{}); // Reads positionally from zero.
const limit: std.io.Limit = .limited(manifest_file_size_max);
const file_contents = manifest_reader.interface.allocRemaining(gpa, limit) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.StreamTooLong => return error.OutOfMemory,
else => |e| {
self.diagnostic = .{ .manifest_read = e };
error.ReadFailed => {
self.diagnostic = .{ .manifest_read = manifest_reader.err.? };
return error.CacheCheckFailed;
},
};
@ -1063,14 +1053,17 @@ pub const Manifest = struct {
}
fn addDepFileMaybePost(self: *Manifest, dir: fs.Dir, dep_file_basename: []const u8) !void {
const dep_file_contents = try dir.readFileAlloc(self.cache.gpa, dep_file_basename, manifest_file_size_max);
defer self.cache.gpa.free(dep_file_contents);
const gpa = self.cache.gpa;
const dep_file_contents = try dir.readFileAlloc(gpa, dep_file_basename, manifest_file_size_max);
defer gpa.free(dep_file_contents);
var error_buf = std.ArrayList(u8).init(self.cache.gpa);
defer error_buf.deinit();
var error_buf: std.ArrayListUnmanaged(u8) = .empty;
defer error_buf.deinit(gpa);
var resolve_buf: std.ArrayListUnmanaged(u8) = .empty;
defer resolve_buf.deinit(gpa);
var it: DepTokenizer = .{ .bytes = dep_file_contents };
while (it.next()) |token| {
switch (token) {
// We don't care about targets, we only want the prereqs
@ -1080,16 +1073,14 @@ pub const Manifest = struct {
_ = try self.addFile(file_path, null);
} else try self.addFilePost(file_path),
.prereq_must_resolve => {
var resolve_buf = std.ArrayList(u8).init(self.cache.gpa);
defer resolve_buf.deinit();
try token.resolve(resolve_buf.writer());
resolve_buf.clearRetainingCapacity();
try token.resolve(gpa, &resolve_buf);
if (self.manifest_file == null) {
_ = try self.addFile(resolve_buf.items, null);
} else try self.addFilePost(resolve_buf.items);
},
else => |err| {
try err.printError(error_buf.writer());
try err.printError(gpa, &error_buf);
log.err("failed parsing {s}: {s}", .{ dep_file_basename, error_buf.items });
return error.InvalidDepFile;
},
@ -1127,24 +1118,12 @@ pub const Manifest = struct {
if (self.manifest_dirty) {
self.manifest_dirty = false;
var contents = std.ArrayList(u8).init(self.cache.gpa);
defer contents.deinit();
const writer = contents.writer();
try writer.writeAll(manifest_header ++ "\n");
for (self.files.keys()) |file| {
try writer.print("{d} {d} {d} {} {d} {s}\n", .{
file.stat.size,
file.stat.inode,
file.stat.mtime,
fmt.fmtSliceHexLower(&file.bin_digest),
file.prefixed_path.prefix,
file.prefixed_path.sub_path,
});
}
try manifest_file.setEndPos(contents.items.len);
try manifest_file.pwriteAll(contents.items, 0);
var buffer: [4000]u8 = undefined;
var fw = manifest_file.writer(&buffer);
writeDirtyManifestToStream(self, &fw) catch |err| switch (err) {
error.WriteFailed => return fw.err.?,
else => |e| return e,
};
}
if (self.want_shared_lock) {
@ -1152,6 +1131,21 @@ pub const Manifest = struct {
}
}
fn writeDirtyManifestToStream(self: *Manifest, fw: *fs.File.Writer) !void {
try fw.interface.writeAll(manifest_header ++ "\n");
for (self.files.keys()) |file| {
try fw.interface.print("{d} {d} {d} {x} {d} {s}\n", .{
file.stat.size,
file.stat.inode,
file.stat.mtime,
&file.bin_digest,
file.prefixed_path.prefix,
file.prefixed_path.sub_path,
});
}
try fw.end();
}
fn downgradeToSharedLock(self: *Manifest) !void {
if (!self.have_exclusive_lock) return;

View File

@ -7,6 +7,7 @@ state: State = .lhs,
const std = @import("std");
const testing = std.testing;
const assert = std.debug.assert;
const Allocator = std.mem.Allocator;
pub fn next(self: *Tokenizer) ?Token {
var start = self.index;
@ -362,7 +363,7 @@ pub const Token = union(enum) {
};
/// Resolve escapes in target or prereq. Only valid with .target_must_resolve or .prereq_must_resolve.
pub fn resolve(self: Token, writer: anytype) @TypeOf(writer).Error!void {
pub fn resolve(self: Token, gpa: Allocator, list: *std.ArrayListUnmanaged(u8)) error{OutOfMemory}!void {
switch (self) {
.target_must_resolve => |bytes| {
var state: enum { start, escape, dollar } = .start;
@ -372,27 +373,27 @@ pub const Token = union(enum) {
switch (c) {
'\\' => state = .escape,
'$' => state = .dollar,
else => try writer.writeByte(c),
else => try list.append(gpa, c),
}
},
.escape => {
switch (c) {
' ', '#', '\\' => {},
'$' => {
try writer.writeByte('\\');
try list.append(gpa, '\\');
state = .dollar;
continue;
},
else => try writer.writeByte('\\'),
else => try list.append(gpa, '\\'),
}
try writer.writeByte(c);
try list.append(gpa, c);
state = .start;
},
.dollar => {
try writer.writeByte('$');
try list.append(gpa, '$');
switch (c) {
'$' => {},
else => try writer.writeByte(c),
else => try list.append(gpa, c),
}
state = .start;
},
@ -406,19 +407,19 @@ pub const Token = union(enum) {
.start => {
switch (c) {
'\\' => state = .escape,
else => try writer.writeByte(c),
else => try list.append(gpa, c),
}
},
.escape => {
switch (c) {
' ' => {},
'\\' => {
try writer.writeByte(c);
try list.append(gpa, c);
continue;
},
else => try writer.writeByte('\\'),
else => try list.append(gpa, '\\'),
}
try writer.writeByte(c);
try list.append(gpa, c);
state = .start;
},
}
@ -428,20 +429,20 @@ pub const Token = union(enum) {
}
}
pub fn printError(self: Token, writer: anytype) @TypeOf(writer).Error!void {
pub fn printError(self: Token, gpa: Allocator, list: *std.ArrayListUnmanaged(u8)) error{OutOfMemory}!void {
switch (self) {
.target, .target_must_resolve, .prereq, .prereq_must_resolve => unreachable, // not an error
.incomplete_quoted_prerequisite,
.incomplete_target,
=> |index_and_bytes| {
try writer.print("{s} '", .{self.errStr()});
try list.print(gpa, "{s} '", .{self.errStr()});
if (self == .incomplete_target) {
const tmp = Token{ .target_must_resolve = index_and_bytes.bytes };
try tmp.resolve(writer);
try tmp.resolve(gpa, list);
} else {
try printCharValues(writer, index_and_bytes.bytes);
try printCharValues(gpa, list, index_and_bytes.bytes);
}
try writer.print("' at position {d}", .{index_and_bytes.index});
try list.print(gpa, "' at position {d}", .{index_and_bytes.index});
},
.invalid_target,
.bad_target_escape,
@ -450,9 +451,9 @@ pub const Token = union(enum) {
.incomplete_escape,
.expected_colon,
=> |index_and_char| {
try writer.writeAll("illegal char ");
try printUnderstandableChar(writer, index_and_char.char);
try writer.print(" at position {d}: {s}", .{ index_and_char.index, self.errStr() });
try list.appendSlice(gpa, "illegal char ");
try printUnderstandableChar(gpa, list, index_and_char.char);
try list.print(gpa, " at position {d}: {s}", .{ index_and_char.index, self.errStr() });
},
}
}
@ -1026,41 +1027,41 @@ fn depTokenizer(input: []const u8, expect: []const u8) !void {
defer arena_allocator.deinit();
var it: Tokenizer = .{ .bytes = input };
var buffer = std.ArrayList(u8).init(arena);
var resolve_buf = std.ArrayList(u8).init(arena);
var buffer: std.ArrayListUnmanaged(u8) = .empty;
var resolve_buf: std.ArrayListUnmanaged(u8) = .empty;
var i: usize = 0;
while (it.next()) |token| {
if (i != 0) try buffer.appendSlice("\n");
if (i != 0) try buffer.appendSlice(arena, "\n");
switch (token) {
.target, .prereq => |bytes| {
try buffer.appendSlice(@tagName(token));
try buffer.appendSlice(" = {");
try buffer.appendSlice(arena, @tagName(token));
try buffer.appendSlice(arena, " = {");
for (bytes) |b| {
try buffer.append(printable_char_tab[b]);
try buffer.append(arena, printable_char_tab[b]);
}
try buffer.appendSlice("}");
try buffer.appendSlice(arena, "}");
},
.target_must_resolve => {
try buffer.appendSlice("target = {");
try token.resolve(resolve_buf.writer());
try buffer.appendSlice(arena, "target = {");
try token.resolve(arena, &resolve_buf);
for (resolve_buf.items) |b| {
try buffer.append(printable_char_tab[b]);
try buffer.append(arena, printable_char_tab[b]);
}
resolve_buf.items.len = 0;
try buffer.appendSlice("}");
try buffer.appendSlice(arena, "}");
},
.prereq_must_resolve => {
try buffer.appendSlice("prereq = {");
try token.resolve(resolve_buf.writer());
try buffer.appendSlice(arena, "prereq = {");
try token.resolve(arena, &resolve_buf);
for (resolve_buf.items) |b| {
try buffer.append(printable_char_tab[b]);
try buffer.append(arena, printable_char_tab[b]);
}
resolve_buf.items.len = 0;
try buffer.appendSlice("}");
try buffer.appendSlice(arena, "}");
},
else => {
try buffer.appendSlice("ERROR: ");
try token.printError(buffer.writer());
try buffer.appendSlice(arena, "ERROR: ");
try token.printError(arena, &buffer);
break;
},
}
@ -1072,134 +1073,18 @@ fn depTokenizer(input: []const u8, expect: []const u8) !void {
return;
}
const out = std.io.getStdErr().writer();
try out.writeAll("\n");
try printSection(out, "<<<< input", input);
try printSection(out, "==== expect", expect);
try printSection(out, ">>>> got", buffer.items);
try printRuler(out);
try testing.expect(false);
try testing.expectEqualStrings(expect, buffer.items);
}
fn printSection(out: anytype, label: []const u8, bytes: []const u8) !void {
try printLabel(out, label, bytes);
try hexDump(out, bytes);
try printRuler(out);
try out.writeAll(bytes);
try out.writeAll("\n");
fn printCharValues(gpa: Allocator, list: *std.ArrayListUnmanaged(u8), bytes: []const u8) !void {
for (bytes) |b| try list.append(gpa, printable_char_tab[b]);
}
fn printLabel(out: anytype, label: []const u8, bytes: []const u8) !void {
var buf: [80]u8 = undefined;
const text = try std.fmt.bufPrint(buf[0..], "{s} {d} bytes ", .{ label, bytes.len });
try out.writeAll(text);
var i: usize = text.len;
const end = 79;
while (i < end) : (i += 1) {
try out.writeAll(&[_]u8{label[0]});
}
try out.writeAll("\n");
}
fn printRuler(out: anytype) !void {
var i: usize = 0;
const end = 79;
while (i < end) : (i += 1) {
try out.writeAll("-");
}
try out.writeAll("\n");
}
fn hexDump(out: anytype, bytes: []const u8) !void {
const n16 = bytes.len >> 4;
var line: usize = 0;
var offset: usize = 0;
while (line < n16) : (line += 1) {
try hexDump16(out, offset, bytes[offset..][0..16]);
offset += 16;
}
const n = bytes.len & 0x0f;
if (n > 0) {
try printDecValue(out, offset, 8);
try out.writeAll(":");
try out.writeAll(" ");
const end1 = @min(offset + n, offset + 8);
for (bytes[offset..end1]) |b| {
try out.writeAll(" ");
try printHexValue(out, b, 2);
}
const end2 = offset + n;
if (end2 > end1) {
try out.writeAll(" ");
for (bytes[end1..end2]) |b| {
try out.writeAll(" ");
try printHexValue(out, b, 2);
}
}
const short = 16 - n;
var i: usize = 0;
while (i < short) : (i += 1) {
try out.writeAll(" ");
}
if (end2 > end1) {
try out.writeAll(" |");
} else {
try out.writeAll(" |");
}
try printCharValues(out, bytes[offset..end2]);
try out.writeAll("|\n");
offset += n;
}
try printDecValue(out, offset, 8);
try out.writeAll(":");
try out.writeAll("\n");
}
fn hexDump16(out: anytype, offset: usize, bytes: []const u8) !void {
try printDecValue(out, offset, 8);
try out.writeAll(":");
try out.writeAll(" ");
for (bytes[0..8]) |b| {
try out.writeAll(" ");
try printHexValue(out, b, 2);
}
try out.writeAll(" ");
for (bytes[8..16]) |b| {
try out.writeAll(" ");
try printHexValue(out, b, 2);
}
try out.writeAll(" |");
try printCharValues(out, bytes);
try out.writeAll("|\n");
}
fn printDecValue(out: anytype, value: u64, width: u8) !void {
var buffer: [20]u8 = undefined;
const len = std.fmt.formatIntBuf(buffer[0..], value, 10, .lower, .{ .width = width, .fill = '0' });
try out.writeAll(buffer[0..len]);
}
fn printHexValue(out: anytype, value: u64, width: u8) !void {
var buffer: [16]u8 = undefined;
const len = std.fmt.formatIntBuf(buffer[0..], value, 16, .lower, .{ .width = width, .fill = '0' });
try out.writeAll(buffer[0..len]);
}
fn printCharValues(out: anytype, bytes: []const u8) !void {
for (bytes) |b| {
try out.writeAll(&[_]u8{printable_char_tab[b]});
}
}
fn printUnderstandableChar(out: anytype, char: u8) !void {
fn printUnderstandableChar(gpa: Allocator, list: *std.ArrayListUnmanaged(u8), char: u8) !void {
if (std.ascii.isPrint(char)) {
try out.print("'{c}'", .{char});
try list.print(gpa, "'{c}'", .{char});
} else {
try out.print("\\x{X:0>2}", .{char});
try list.print(gpa, "\\x{X:0>2}", .{char});
}
}

View File

@ -1,5 +1,6 @@
const Directory = @This();
const std = @import("../../std.zig");
const assert = std.debug.assert;
const fs = std.fs;
const fmt = std.fmt;
const Allocator = std.mem.Allocator;
@ -55,14 +56,7 @@ pub fn closeAndFree(self: *Directory, gpa: Allocator) void {
self.* = undefined;
}
pub fn format(
self: Directory,
comptime fmt_string: []const u8,
options: fmt.FormatOptions,
writer: anytype,
) !void {
_ = options;
if (fmt_string.len != 0) fmt.invalidFmtError(fmt_string, self);
pub fn format(self: Directory, writer: *std.io.Writer) std.io.Writer.Error!void {
if (self.path) |p| {
try writer.writeAll(p);
try writer.writeAll(fs.path.sep_str);

View File

@ -1,3 +1,10 @@
const Path = @This();
const std = @import("../../std.zig");
const assert = std.debug.assert;
const fs = std.fs;
const Allocator = std.mem.Allocator;
const Cache = std.Build.Cache;
root_dir: Cache.Directory,
/// The path, relative to the root dir, that this `Path` represents.
/// Empty string means the root_dir is the path.
@ -133,38 +140,42 @@ pub fn makePath(p: Path, sub_path: []const u8) !void {
}
pub fn toString(p: Path, allocator: Allocator) Allocator.Error![]u8 {
return std.fmt.allocPrint(allocator, "{}", .{p});
return std.fmt.allocPrint(allocator, "{f}", .{p});
}
pub fn toStringZ(p: Path, allocator: Allocator) Allocator.Error![:0]u8 {
return std.fmt.allocPrintZ(allocator, "{}", .{p});
return std.fmt.allocPrintSentinel(allocator, "{f}", .{p}, 0);
}
pub fn format(
self: Path,
comptime fmt_string: []const u8,
options: std.fmt.FormatOptions,
writer: anytype,
) !void {
if (fmt_string.len == 1) {
// Quote-escape the string.
const stringEscape = std.zig.stringEscape;
const f = switch (fmt_string[0]) {
'q' => "",
'\'' => "\'",
else => @compileError("unsupported format string: " ++ fmt_string),
};
if (self.root_dir.path) |p| {
try stringEscape(p, f, options, writer);
if (self.sub_path.len > 0) try stringEscape(fs.path.sep_str, f, options, writer);
}
if (self.sub_path.len > 0) {
try stringEscape(self.sub_path, f, options, writer);
}
return;
pub fn fmtEscapeString(path: Path) std.fmt.Formatter(Path, formatEscapeString) {
return .{ .data = path };
}
pub fn formatEscapeString(path: Path, writer: *std.io.Writer) std.io.Writer.Error!void {
if (path.root_dir.path) |p| {
try std.zig.stringEscape(p, writer);
if (path.sub_path.len > 0) try std.zig.stringEscape(fs.path.sep_str, writer);
}
if (fmt_string.len > 0)
std.fmt.invalidFmtError(fmt_string, self);
if (path.sub_path.len > 0) {
try std.zig.stringEscape(path.sub_path, writer);
}
}
pub fn fmtEscapeChar(path: Path) std.fmt.Formatter(Path, formatEscapeChar) {
return .{ .data = path };
}
pub fn formatEscapeChar(path: Path, writer: *std.io.Writer) std.io.Writer.Error!void {
if (path.root_dir.path) |p| {
try std.zig.charEscape(p, writer);
if (path.sub_path.len > 0) try std.zig.charEscape(fs.path.sep_str, writer);
}
if (path.sub_path.len > 0) {
try std.zig.charEscape(path.sub_path, writer);
}
}
pub fn format(self: Path, writer: *std.io.Writer) std.io.Writer.Error!void {
if (std.fs.path.isAbsolute(self.sub_path)) {
try writer.writeAll(self.sub_path);
return;
@ -223,9 +234,3 @@ pub const TableAdapter = struct {
return a.eql(b);
}
};
const Path = @This();
const std = @import("../../std.zig");
const fs = std.fs;
const Allocator = std.mem.Allocator;
const Cache = std.Build.Cache;

View File

@ -112,7 +112,6 @@ fn rebuildTestsWorkerRun(run: *Step.Run, ttyconf: std.io.tty.Config, parent_prog
fn rebuildTestsWorkerRunFallible(run: *Step.Run, ttyconf: std.io.tty.Config, parent_prog_node: std.Progress.Node) !void {
const gpa = run.step.owner.allocator;
const stderr = std.io.getStdErr();
const compile = run.producer.?;
const prog_node = parent_prog_node.start(compile.step.name, 0);
@ -125,9 +124,10 @@ fn rebuildTestsWorkerRunFallible(run: *Step.Run, ttyconf: std.io.tty.Config, par
const show_stderr = compile.step.result_stderr.len > 0;
if (show_error_msgs or show_compile_errors or show_stderr) {
std.debug.lockStdErr();
defer std.debug.unlockStdErr();
build_runner.printErrorMessages(gpa, &compile.step, .{ .ttyconf = ttyconf }, stderr, false) catch {};
var buf: [256]u8 = undefined;
const w = std.debug.lockStderrWriter(&buf);
defer std.debug.unlockStderrWriter();
build_runner.printErrorMessages(gpa, &compile.step, .{ .ttyconf = ttyconf }, w, false) catch {};
}
const rebuilt_bin_path = result catch |err| switch (err) {
@ -152,10 +152,10 @@ fn fuzzWorkerRun(
run.rerunInFuzzMode(web_server, unit_test_index, prog_node) catch |err| switch (err) {
error.MakeFailed => {
const stderr = std.io.getStdErr();
std.debug.lockStdErr();
defer std.debug.unlockStdErr();
build_runner.printErrorMessages(gpa, &run.step, .{ .ttyconf = ttyconf }, stderr, false) catch {};
var buf: [256]u8 = undefined;
const w = std.debug.lockStderrWriter(&buf);
defer std.debug.unlockStderrWriter();
build_runner.printErrorMessages(gpa, &run.step, .{ .ttyconf = ttyconf }, w, false) catch {};
return;
},
else => {

View File

@ -170,7 +170,7 @@ fn serveFile(
// We load the file with every request so that the user can make changes to the file
// and refresh the HTML page without restarting this server.
const file_contents = ws.zig_lib_directory.handle.readFileAlloc(gpa, name, 10 * 1024 * 1024) catch |err| {
log.err("failed to read '{}{s}': {s}", .{ ws.zig_lib_directory, name, @errorName(err) });
log.err("failed to read '{f}{s}': {s}", .{ ws.zig_lib_directory, name, @errorName(err) });
return error.AlreadyReported;
};
defer gpa.free(file_contents);
@ -251,10 +251,10 @@ fn buildWasmBinary(
"-fsingle-threaded", //
"--dep", "Walk", //
"--dep", "html_render", //
try std.fmt.allocPrint(arena, "-Mroot={}", .{main_src_path}), //
try std.fmt.allocPrint(arena, "-MWalk={}", .{walk_src_path}), //
try std.fmt.allocPrint(arena, "-Mroot={f}", .{main_src_path}), //
try std.fmt.allocPrint(arena, "-MWalk={f}", .{walk_src_path}), //
"--dep", "Walk", //
try std.fmt.allocPrint(arena, "-Mhtml_render={}", .{html_render_src_path}), //
try std.fmt.allocPrint(arena, "-Mhtml_render={f}", .{html_render_src_path}), //
"--listen=-",
});
@ -526,7 +526,7 @@ fn serveSourcesTar(ws: *WebServer, request: *std.http.Server.Request) !void {
for (deduped_paths) |joined_path| {
var file = joined_path.root_dir.handle.openFile(joined_path.sub_path, .{}) catch |err| {
log.err("failed to open {}: {s}", .{ joined_path, @errorName(err) });
log.err("failed to open {f}: {s}", .{ joined_path, @errorName(err) });
continue;
};
defer file.close();
@ -604,7 +604,7 @@ fn prepareTables(
const rebuilt_exe_path = run_step.rebuilt_executable.?;
var debug_info = std.debug.Info.load(gpa, rebuilt_exe_path, &gop.value_ptr.coverage) catch |err| {
log.err("step '{s}': failed to load debug information for '{}': {s}", .{
log.err("step '{s}': failed to load debug information for '{f}': {s}", .{
run_step.step.name, rebuilt_exe_path, @errorName(err),
});
return error.AlreadyReported;
@ -616,7 +616,7 @@ fn prepareTables(
.sub_path = "v/" ++ std.fmt.hex(coverage_id),
};
var coverage_file = coverage_file_path.root_dir.handle.openFile(coverage_file_path.sub_path, .{}) catch |err| {
log.err("step '{s}': failed to load coverage file '{}': {s}", .{
log.err("step '{s}': failed to load coverage file '{f}': {s}", .{
run_step.step.name, coverage_file_path, @errorName(err),
});
return error.AlreadyReported;
@ -624,7 +624,7 @@ fn prepareTables(
defer coverage_file.close();
const file_size = coverage_file.getEndPos() catch |err| {
log.err("unable to check len of coverage file '{}': {s}", .{ coverage_file_path, @errorName(err) });
log.err("unable to check len of coverage file '{f}': {s}", .{ coverage_file_path, @errorName(err) });
return error.AlreadyReported;
};
@ -636,7 +636,7 @@ fn prepareTables(
coverage_file.handle,
0,
) catch |err| {
log.err("failed to map coverage file '{}': {s}", .{ coverage_file_path, @errorName(err) });
log.err("failed to map coverage file '{f}': {s}", .{ coverage_file_path, @errorName(err) });
return error.AlreadyReported;
};
gop.value_ptr.mapped_memory = mapped_memory;

View File

@ -186,7 +186,7 @@ pub const IncludeDir = union(enum) {
.embed_path => |lazy_path| {
// Special case: this is a single arg.
const resolved = lazy_path.getPath3(b, asking_step);
const arg = b.fmt("--embed-dir={}", .{resolved});
const arg = b.fmt("--embed-dir={f}", .{resolved});
return zig_args.append(arg);
},
};

View File

@ -286,9 +286,7 @@ pub fn cast(step: *Step, comptime T: type) ?*T {
}
/// For debugging purposes, prints identifying information about this Step.
pub fn dump(step: *Step, file: std.fs.File) void {
const w = file.writer();
const tty_config = std.io.tty.detectConfig(file);
pub fn dump(step: *Step, w: *std.io.Writer, tty_config: std.io.tty.Config) void {
const debug_info = std.debug.getSelfDebugInfo() catch |err| {
w.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{
@errorName(err),
@ -482,9 +480,9 @@ pub fn evalZigProcess(
pub fn installFile(s: *Step, src_lazy_path: Build.LazyPath, dest_path: []const u8) !std.fs.Dir.PrevStatus {
const b = s.owner;
const src_path = src_lazy_path.getPath3(b, s);
try handleVerbose(b, null, &.{ "install", "-C", b.fmt("{}", .{src_path}), dest_path });
try handleVerbose(b, null, &.{ "install", "-C", b.fmt("{f}", .{src_path}), dest_path });
return src_path.root_dir.handle.updateFile(src_path.sub_path, std.fs.cwd(), dest_path, .{}) catch |err| {
return s.fail("unable to update file from '{}' to '{s}': {s}", .{
return s.fail("unable to update file from '{f}' to '{s}': {s}", .{
src_path, dest_path, @errorName(err),
});
};
@ -821,7 +819,7 @@ fn failWithCacheError(s: *Step, man: *const Build.Cache.Manifest, err: Build.Cac
switch (err) {
error.CacheCheckFailed => switch (man.diagnostic) {
.none => unreachable,
.manifest_create, .manifest_read, .manifest_lock, .manifest_seek => |e| return s.fail("failed to check cache: {s} {s}", .{
.manifest_create, .manifest_read, .manifest_lock => |e| return s.fail("failed to check cache: {s} {s}", .{
@tagName(man.diagnostic), @errorName(e),
}),
.file_open, .file_stat, .file_read, .file_hash => |op| {

View File

@ -6,6 +6,7 @@ const macho = std.macho;
const math = std.math;
const mem = std.mem;
const testing = std.testing;
const Writer = std.io.Writer;
const CheckObject = @This();
@ -28,7 +29,7 @@ pub fn create(
const gpa = owner.allocator;
const check_object = gpa.create(CheckObject) catch @panic("OOM");
check_object.* = .{
.step = Step.init(.{
.step = .init(.{
.id = base_id,
.name = "CheckObject",
.owner = owner,
@ -80,7 +81,7 @@ const Action = struct {
const hay = mem.trim(u8, haystack, " ");
const phrase = mem.trim(u8, act.phrase.resolve(b, step), " ");
var candidate_vars = std.ArrayList(struct { name: []const u8, value: u64 }).init(b.allocator);
var candidate_vars: std.ArrayList(struct { name: []const u8, value: u64 }) = .init(b.allocator);
var hay_it = mem.tokenizeScalar(u8, hay, ' ');
var needle_it = mem.tokenizeScalar(u8, phrase, ' ');
@ -229,18 +230,11 @@ const ComputeCompareExpected = struct {
literal: u64,
},
pub fn format(
value: @This(),
comptime fmt: []const u8,
options: std.fmt.FormatOptions,
writer: anytype,
) !void {
if (fmt.len != 0) std.fmt.invalidFmtError(fmt, value);
_ = options;
try writer.print("{s} ", .{@tagName(value.op)});
pub fn format(value: ComputeCompareExpected, w: *Writer) Writer.Error!void {
try w.print("{t} ", .{value.op});
switch (value.value) {
.variable => |name| try writer.writeAll(name),
.literal => |x| try writer.print("{x}", .{x}),
.variable => |name| try w.writeAll(name),
.literal => |x| try w.print("{x}", .{x}),
}
}
};
@ -565,9 +559,11 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
null,
.of(u64),
null,
) catch |err| return step.fail("unable to read '{'}': {s}", .{ src_path, @errorName(err) });
) catch |err| return step.fail("unable to read '{f}': {s}", .{
std.fmt.alt(src_path, .formatEscapeChar), @errorName(err),
});
var vars = std.StringHashMap(u64).init(gpa);
var vars: std.StringHashMap(u64) = .init(gpa);
for (check_object.checks.items) |chk| {
if (chk.kind == .compute_compare) {
assert(chk.actions.items.len == 1);
@ -581,7 +577,7 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
return step.fail(
\\
\\========= comparison failed for action: ===========
\\{s} {}
\\{s} {f}
\\===================================================
, .{ act.phrase.resolve(b, step), act.expected.? });
}
@ -600,7 +596,7 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
// we either format message string with escaped codes, or not to aid debugging
// the failed test.
const fmtMessageString = struct {
fn fmtMessageString(kind: Check.Kind, msg: []const u8) std.fmt.Formatter(formatMessageString) {
fn fmtMessageString(kind: Check.Kind, msg: []const u8) std.fmt.Formatter(Ctx, formatMessageString) {
return .{ .data = .{
.kind = kind,
.msg = msg,
@ -612,17 +608,10 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
msg: []const u8,
};
fn formatMessageString(
ctx: Ctx,
comptime unused_fmt_string: []const u8,
options: std.fmt.FormatOptions,
writer: anytype,
) !void {
_ = unused_fmt_string;
_ = options;
fn formatMessageString(ctx: Ctx, w: *Writer) !void {
switch (ctx.kind) {
.dump_section => try writer.print("{s}", .{std.fmt.fmtSliceEscapeLower(ctx.msg)}),
else => try writer.writeAll(ctx.msg),
.dump_section => try w.print("{f}", .{std.ascii.hexEscape(ctx.msg, .lower)}),
else => try w.writeAll(ctx.msg),
}
}
}.fmtMessageString;
@ -637,11 +626,11 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
return step.fail(
\\
\\========= expected to find: ==========================
\\{s}
\\{f}
\\========= but parsed file does not contain it: =======
\\{s}
\\{f}
\\========= file path: =================================
\\{}
\\{f}
, .{
fmtMessageString(chk.kind, act.phrase.resolve(b, step)),
fmtMessageString(chk.kind, output),
@ -657,11 +646,11 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
return step.fail(
\\
\\========= expected to find: ==========================
\\*{s}*
\\*{f}*
\\========= but parsed file does not contain it: =======
\\{s}
\\{f}
\\========= file path: =================================
\\{}
\\{f}
, .{
fmtMessageString(chk.kind, act.phrase.resolve(b, step)),
fmtMessageString(chk.kind, output),
@ -676,11 +665,11 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
return step.fail(
\\
\\========= expected not to find: ===================
\\{s}
\\{f}
\\========= but parsed file does contain it: ========
\\{s}
\\{f}
\\========= file path: ==============================
\\{}
\\{f}
, .{
fmtMessageString(chk.kind, act.phrase.resolve(b, step)),
fmtMessageString(chk.kind, output),
@ -696,13 +685,13 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
return step.fail(
\\
\\========= expected to find and extract: ==============
\\{s}
\\{f}
\\========= but parsed file does not contain it: =======
\\{s}
\\{f}
\\========= file path: ==============================
\\{}
\\{f}
, .{
act.phrase.resolve(b, step),
fmtMessageString(chk.kind, act.phrase.resolve(b, step)),
fmtMessageString(chk.kind, output),
src_path,
});
@ -963,7 +952,7 @@ const MachODumper = struct {
.UUID => {
const uuid = lc.cast(macho.uuid_command).?;
try writer.writeByte('\n');
try writer.print("uuid {x}", .{std.fmt.fmtSliceHexLower(&uuid.uuid)});
try writer.print("uuid {x}", .{&uuid.uuid});
},
.DATA_IN_CODE,
@ -2012,7 +2001,7 @@ const ElfDumper = struct {
for (ctx.phdrs, 0..) |phdr, phndx| {
try writer.print("phdr {d}\n", .{phndx});
try writer.print("type {s}\n", .{fmtPhType(phdr.p_type)});
try writer.print("type {f}\n", .{fmtPhType(phdr.p_type)});
try writer.print("vaddr {x}\n", .{phdr.p_vaddr});
try writer.print("paddr {x}\n", .{phdr.p_paddr});
try writer.print("offset {x}\n", .{phdr.p_offset});
@ -2052,7 +2041,7 @@ const ElfDumper = struct {
for (ctx.shdrs, 0..) |shdr, shndx| {
try writer.print("shdr {d}\n", .{shndx});
try writer.print("name {s}\n", .{ctx.getSectionName(shndx)});
try writer.print("type {s}\n", .{fmtShType(shdr.sh_type)});
try writer.print("type {f}\n", .{fmtShType(shdr.sh_type)});
try writer.print("addr {x}\n", .{shdr.sh_addr});
try writer.print("offset {x}\n", .{shdr.sh_offset});
try writer.print("size {x}\n", .{shdr.sh_size});
@ -2325,18 +2314,11 @@ const ElfDumper = struct {
return mem.sliceTo(@as([*:0]const u8, @ptrCast(strtab.ptr + off)), 0);
}
fn fmtShType(sh_type: u32) std.fmt.Formatter(formatShType) {
fn fmtShType(sh_type: u32) std.fmt.Formatter(u32, formatShType) {
return .{ .data = sh_type };
}
fn formatShType(
sh_type: u32,
comptime unused_fmt_string: []const u8,
options: std.fmt.FormatOptions,
writer: anytype,
) !void {
_ = unused_fmt_string;
_ = options;
fn formatShType(sh_type: u32, writer: *Writer) Writer.Error!void {
const name = switch (sh_type) {
elf.SHT_NULL => "NULL",
elf.SHT_PROGBITS => "PROGBITS",
@ -2372,18 +2354,11 @@ const ElfDumper = struct {
try writer.writeAll(name);
}
fn fmtPhType(ph_type: u32) std.fmt.Formatter(formatPhType) {
fn fmtPhType(ph_type: u32) std.fmt.Formatter(u32, formatPhType) {
return .{ .data = ph_type };
}
fn formatPhType(
ph_type: u32,
comptime unused_fmt_string: []const u8,
options: std.fmt.FormatOptions,
writer: anytype,
) !void {
_ = unused_fmt_string;
_ = options;
fn formatPhType(ph_type: u32, writer: *Writer) Writer.Error!void {
const p_type = switch (ph_type) {
elf.PT_NULL => "NULL",
elf.PT_LOAD => "LOAD",

View File

@ -409,7 +409,7 @@ pub fn create(owner: *std.Build, options: Options) *Compile {
.linkage = options.linkage,
.kind = options.kind,
.name = name,
.step = Step.init(.{
.step = .init(.{
.id = base_id,
.name = step_name,
.owner = owner,
@ -1017,20 +1017,16 @@ fn getGeneratedFilePath(compile: *Compile, comptime tag_name: []const u8, asking
const maybe_path: ?*GeneratedFile = @field(compile, tag_name);
const generated_file = maybe_path orelse {
std.debug.lockStdErr();
const stderr = std.io.getStdErr();
std.Build.dumpBadGetPathHelp(&compile.step, stderr, compile.step.owner, asking_step) catch {};
const w = std.debug.lockStderrWriter(&.{});
std.Build.dumpBadGetPathHelp(&compile.step, w, .detect(.stderr()), compile.step.owner, asking_step) catch {};
std.debug.unlockStderrWriter();
@panic("missing emit option for " ++ tag_name);
};
const path = generated_file.path orelse {
std.debug.lockStdErr();
const stderr = std.io.getStdErr();
std.Build.dumpBadGetPathHelp(&compile.step, stderr, compile.step.owner, asking_step) catch {};
const w = std.debug.lockStderrWriter(&.{});
std.Build.dumpBadGetPathHelp(&compile.step, w, .detect(.stderr()), compile.step.owner, asking_step) catch {};
std.debug.unlockStderrWriter();
@panic(tag_name ++ " is null. Is there a missing step dependency?");
};
@ -1542,7 +1538,7 @@ fn getZigArgs(compile: *Compile, fuzz: bool) ![][]const u8 {
if (compile.kind == .lib and compile.linkage != null and compile.linkage.? == .dynamic) {
if (compile.version) |version| {
try zig_args.append("--version");
try zig_args.append(b.fmt("{}", .{version}));
try zig_args.append(b.fmt("{f}", .{version}));
}
if (compile.rootModuleTarget().os.tag.isDarwin()) {
@ -1696,9 +1692,7 @@ fn getZigArgs(compile: *Compile, fuzz: bool) ![][]const u8 {
if (compile.build_id orelse b.build_id) |build_id| {
try zig_args.append(switch (build_id) {
.hexstring => |hs| b.fmt("--build-id=0x{s}", .{
std.fmt.fmtSliceHexLower(hs.toSlice()),
}),
.hexstring => |hs| b.fmt("--build-id=0x{x}", .{hs.toSlice()}),
.none, .fast, .uuid, .sha1, .md5 => b.fmt("--build-id={s}", .{@tagName(build_id)}),
});
}
@ -1706,7 +1700,7 @@ fn getZigArgs(compile: *Compile, fuzz: bool) ![][]const u8 {
const opt_zig_lib_dir = if (compile.zig_lib_dir) |dir|
dir.getPath2(b, step)
else if (b.graph.zig_lib_directory.path) |_|
b.fmt("{}", .{b.graph.zig_lib_directory})
b.fmt("{f}", .{b.graph.zig_lib_directory})
else
null;
@ -1746,8 +1740,7 @@ fn getZigArgs(compile: *Compile, fuzz: bool) ![][]const u8 {
}
if (compile.error_limit) |err_limit| try zig_args.appendSlice(&.{
"--error-limit",
b.fmt("{}", .{err_limit}),
"--error-limit", b.fmt("{d}", .{err_limit}),
});
try addFlag(&zig_args, "incremental", b.graph.incremental);
@ -1771,12 +1764,12 @@ fn getZigArgs(compile: *Compile, fuzz: bool) ![][]const u8 {
for (arg, 0..) |c, arg_idx| {
if (c == '\\' or c == '"') {
// Slow path for arguments that need to be escaped. We'll need to allocate and copy
var escaped = try ArrayList(u8).initCapacity(arena, arg.len + 1);
const writer = escaped.writer();
try writer.writeAll(arg[0..arg_idx]);
var escaped: std.ArrayListUnmanaged(u8) = .empty;
try escaped.ensureTotalCapacityPrecise(arena, arg.len + 1);
try escaped.appendSlice(arena, arg[0..arg_idx]);
for (arg[arg_idx..]) |to_escape| {
if (to_escape == '\\' or to_escape == '"') try writer.writeByte('\\');
try writer.writeByte(to_escape);
if (to_escape == '\\' or to_escape == '"') try escaped.append(arena, '\\');
try escaped.append(arena, to_escape);
}
escaped_args.appendAssumeCapacity(escaped.items);
continue :arg_blk;
@ -1793,11 +1786,7 @@ fn getZigArgs(compile: *Compile, fuzz: bool) ![][]const u8 {
var args_hash: [Sha256.digest_length]u8 = undefined;
Sha256.hash(args, &args_hash, .{});
var args_hex_hash: [Sha256.digest_length * 2]u8 = undefined;
_ = try std.fmt.bufPrint(
&args_hex_hash,
"{s}",
.{std.fmt.fmtSliceHexLower(&args_hash)},
);
_ = try std.fmt.bufPrint(&args_hex_hash, "{x}", .{&args_hash});
const args_file = "args" ++ fs.path.sep_str ++ args_hex_hash;
try b.cache_root.handle.writeFile(.{ .sub_path = args_file, .data = args });
@ -1836,7 +1825,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
// Update generated files
if (maybe_output_dir) |output_dir| {
if (compile.emit_directory) |lp| {
lp.path = b.fmt("{}", .{output_dir});
lp.path = b.fmt("{f}", .{output_dir});
}
// zig fmt: off
@ -1970,20 +1959,23 @@ fn addFlag(args: *ArrayList([]const u8), comptime name: []const u8, opt: ?bool)
fn checkCompileErrors(compile: *Compile) !void {
// Clear this field so that it does not get printed by the build runner.
const actual_eb = compile.step.result_error_bundle;
compile.step.result_error_bundle = std.zig.ErrorBundle.empty;
compile.step.result_error_bundle = .empty;
const arena = compile.step.owner.allocator;
var actual_errors_list = std.ArrayList(u8).init(arena);
try actual_eb.renderToWriter(.{
.ttyconf = .no_color,
.include_reference_trace = false,
.include_source_line = false,
}, actual_errors_list.writer());
const actual_errors = try actual_errors_list.toOwnedSlice();
const actual_errors = ae: {
var aw: std.io.Writer.Allocating = .init(arena);
defer aw.deinit();
try actual_eb.renderToWriter(.{
.ttyconf = .no_color,
.include_reference_trace = false,
.include_source_line = false,
}, &aw.writer);
break :ae try aw.toOwnedSlice();
};
// Render the expected lines into a string that we can compare verbatim.
var expected_generated = std.ArrayList(u8).init(arena);
var expected_generated: std.ArrayListUnmanaged(u8) = .empty;
const expect_errors = compile.expect_errors.?;
var actual_line_it = mem.splitScalar(u8, actual_errors, '\n');
@ -2042,17 +2034,17 @@ fn checkCompileErrors(compile: *Compile) !void {
.exact => |expect_lines| {
for (expect_lines) |expect_line| {
const actual_line = actual_line_it.next() orelse {
try expected_generated.appendSlice(expect_line);
try expected_generated.append('\n');
try expected_generated.appendSlice(arena, expect_line);
try expected_generated.append(arena, '\n');
continue;
};
if (matchCompileError(actual_line, expect_line)) {
try expected_generated.appendSlice(actual_line);
try expected_generated.append('\n');
try expected_generated.appendSlice(arena, actual_line);
try expected_generated.append(arena, '\n');
continue;
}
try expected_generated.appendSlice(expect_line);
try expected_generated.append('\n');
try expected_generated.appendSlice(arena, expect_line);
try expected_generated.append(arena, '\n');
}
if (mem.eql(u8, expected_generated.items, actual_errors)) return;

View File

@ -2,6 +2,7 @@ const std = @import("std");
const ConfigHeader = @This();
const Step = std.Build.Step;
const Allocator = std.mem.Allocator;
const Writer = std.io.Writer;
pub const Style = union(enum) {
/// A configure format supported by autotools that uses `#undef foo` to
@ -87,7 +88,7 @@ pub fn create(owner: *std.Build, options: Options) *ConfigHeader {
owner.fmt("configure {s} header to {s}", .{ @tagName(options.style), include_path });
config_header.* = .{
.step = Step.init(.{
.step = .init(.{
.id = base_id,
.name = name,
.owner = owner,
@ -95,7 +96,7 @@ pub fn create(owner: *std.Build, options: Options) *ConfigHeader {
.first_ret_addr = options.first_ret_addr orelse @returnAddress(),
}),
.style = options.style,
.values = std.StringArrayHashMap(Value).init(owner.allocator),
.values = .init(owner.allocator),
.max_bytes = options.max_bytes,
.include_path = include_path,
@ -195,8 +196,9 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
man.hash.addBytes(config_header.include_path);
man.hash.addOptionalBytes(config_header.include_guard_override);
var output = std.ArrayList(u8).init(gpa);
defer output.deinit();
var aw: std.io.Writer.Allocating = .init(gpa);
defer aw.deinit();
const bw = &aw.writer;
const header_text = "This file was generated by ConfigHeader using the Zig Build System.";
const c_generated_line = "/* " ++ header_text ++ " */\n";
@ -204,7 +206,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
switch (config_header.style) {
.autoconf_undef, .autoconf, .autoconf_at => |file_source| {
try output.appendSlice(c_generated_line);
try bw.writeAll(c_generated_line);
const src_path = file_source.getPath2(b, step);
const contents = std.fs.cwd().readFileAlloc(arena, src_path, config_header.max_bytes) catch |err| {
return step.fail("unable to read autoconf input file '{s}': {s}", .{
@ -212,32 +214,33 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
});
};
switch (config_header.style) {
.autoconf_undef, .autoconf => try render_autoconf_undef(step, contents, &output, config_header.values, src_path),
.autoconf_at => try render_autoconf_at(step, contents, &output, config_header.values, src_path),
.autoconf_undef, .autoconf => try render_autoconf_undef(step, contents, bw, config_header.values, src_path),
.autoconf_at => try render_autoconf_at(step, contents, &aw, config_header.values, src_path),
else => unreachable,
}
},
.cmake => |file_source| {
try output.appendSlice(c_generated_line);
try bw.writeAll(c_generated_line);
const src_path = file_source.getPath2(b, step);
const contents = std.fs.cwd().readFileAlloc(arena, src_path, config_header.max_bytes) catch |err| {
return step.fail("unable to read cmake input file '{s}': {s}", .{
src_path, @errorName(err),
});
};
try render_cmake(step, contents, &output, config_header.values, src_path);
try render_cmake(step, contents, bw, config_header.values, src_path);
},
.blank => {
try output.appendSlice(c_generated_line);
try render_blank(&output, config_header.values, config_header.include_path, config_header.include_guard_override);
try bw.writeAll(c_generated_line);
try render_blank(gpa, bw, config_header.values, config_header.include_path, config_header.include_guard_override);
},
.nasm => {
try output.appendSlice(asm_generated_line);
try render_nasm(&output, config_header.values);
try bw.writeAll(asm_generated_line);
try render_nasm(bw, config_header.values);
},
}
man.hash.addBytes(output.items);
const output = aw.getWritten();
man.hash.addBytes(output);
if (try step.cacheHit(&man)) {
const digest = man.final();
@ -256,13 +259,13 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
const sub_path_dirname = std.fs.path.dirname(sub_path).?;
b.cache_root.handle.makePath(sub_path_dirname) catch |err| {
return step.fail("unable to make path '{}{s}': {s}", .{
return step.fail("unable to make path '{f}{s}': {s}", .{
b.cache_root, sub_path_dirname, @errorName(err),
});
};
b.cache_root.handle.writeFile(.{ .sub_path = sub_path, .data = output.items }) catch |err| {
return step.fail("unable to write file '{}{s}': {s}", .{
b.cache_root.handle.writeFile(.{ .sub_path = sub_path, .data = output }) catch |err| {
return step.fail("unable to write file '{f}{s}': {s}", .{
b.cache_root, sub_path, @errorName(err),
});
};
@ -274,7 +277,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
fn render_autoconf_undef(
step: *Step,
contents: []const u8,
output: *std.ArrayList(u8),
bw: *Writer,
values: std.StringArrayHashMap(Value),
src_path: []const u8,
) !void {
@ -289,15 +292,15 @@ fn render_autoconf_undef(
var line_it = std.mem.splitScalar(u8, contents, '\n');
while (line_it.next()) |line| : (line_index += 1) {
if (!std.mem.startsWith(u8, line, "#")) {
try output.appendSlice(line);
try output.appendSlice("\n");
try bw.writeAll(line);
try bw.writeByte('\n');
continue;
}
var it = std.mem.tokenizeAny(u8, line[1..], " \t\r");
const undef = it.next().?;
if (!std.mem.eql(u8, undef, "undef")) {
try output.appendSlice(line);
try output.appendSlice("\n");
try bw.writeAll(line);
try bw.writeByte('\n');
continue;
}
const name = it.next().?;
@ -309,7 +312,7 @@ fn render_autoconf_undef(
continue;
};
is_used.set(index);
try renderValueC(output, name, values.values()[index]);
try renderValueC(bw, name, values.values()[index]);
}
var unused_value_it = is_used.iterator(.{ .kind = .unset });
@ -326,12 +329,13 @@ fn render_autoconf_undef(
fn render_autoconf_at(
step: *Step,
contents: []const u8,
output: *std.ArrayList(u8),
aw: *std.io.Writer.Allocating,
values: std.StringArrayHashMap(Value),
src_path: []const u8,
) !void {
const build = step.owner;
const allocator = build.allocator;
const bw = &aw.writer;
const used = allocator.alloc(bool, values.count()) catch @panic("OOM");
for (used) |*u| u.* = false;
@ -343,11 +347,11 @@ fn render_autoconf_at(
while (line_it.next()) |line| : (line_index += 1) {
const last_line = line_it.index == line_it.buffer.len;
const old_len = output.items.len;
expand_variables_autoconf_at(output, line, values, used) catch |err| switch (err) {
const old_len = aw.getWritten().len;
expand_variables_autoconf_at(bw, line, values, used) catch |err| switch (err) {
error.MissingValue => {
const name = output.items[old_len..];
defer output.shrinkRetainingCapacity(old_len);
const name = aw.getWritten()[old_len..];
defer aw.shrinkRetainingCapacity(old_len);
try step.addError("{s}:{d}: error: unspecified config header value: '{s}'", .{
src_path, line_index + 1, name,
});
@ -362,9 +366,7 @@ fn render_autoconf_at(
continue;
},
};
if (!last_line) {
try output.append('\n');
}
if (!last_line) try bw.writeByte('\n');
}
for (values.unmanaged.entries.slice().items(.key), used) |name, u| {
@ -374,15 +376,13 @@ fn render_autoconf_at(
}
}
if (any_errors) {
return error.MakeFailed;
}
if (any_errors) return error.MakeFailed;
}
fn render_cmake(
step: *Step,
contents: []const u8,
output: *std.ArrayList(u8),
bw: *Writer,
values: std.StringArrayHashMap(Value),
src_path: []const u8,
) !void {
@ -417,10 +417,8 @@ fn render_cmake(
defer allocator.free(line);
if (!std.mem.startsWith(u8, line, "#")) {
try output.appendSlice(line);
if (!last_line) {
try output.appendSlice("\n");
}
try bw.writeAll(line);
if (!last_line) try bw.writeByte('\n');
continue;
}
var it = std.mem.tokenizeAny(u8, line[1..], " \t\r");
@ -428,10 +426,8 @@ fn render_cmake(
if (!std.mem.eql(u8, cmakedefine, "cmakedefine") and
!std.mem.eql(u8, cmakedefine, "cmakedefine01"))
{
try output.appendSlice(line);
if (!last_line) {
try output.appendSlice("\n");
}
try bw.writeAll(line);
if (!last_line) try bw.writeByte('\n');
continue;
}
@ -502,7 +498,7 @@ fn render_cmake(
value = Value{ .ident = it.rest() };
}
try renderValueC(output, name, value);
try renderValueC(bw, name, value);
}
if (any_errors) {
@ -511,13 +507,14 @@ fn render_cmake(
}
fn render_blank(
output: *std.ArrayList(u8),
gpa: std.mem.Allocator,
bw: *Writer,
defines: std.StringArrayHashMap(Value),
include_path: []const u8,
include_guard_override: ?[]const u8,
) !void {
const include_guard_name = include_guard_override orelse blk: {
const name = try output.allocator.dupe(u8, include_path);
const name = try gpa.dupe(u8, include_path);
for (name) |*byte| {
switch (byte.*) {
'a'...'z' => byte.* = byte.* - 'a' + 'A',
@ -527,92 +524,53 @@ fn render_blank(
}
break :blk name;
};
defer if (include_guard_override == null) gpa.free(include_guard_name);
try output.appendSlice("#ifndef ");
try output.appendSlice(include_guard_name);
try output.appendSlice("\n#define ");
try output.appendSlice(include_guard_name);
try output.appendSlice("\n");
try bw.print(
\\#ifndef {[0]s}
\\#define {[0]s}
\\
, .{include_guard_name});
const values = defines.values();
for (defines.keys(), 0..) |name, i| {
try renderValueC(output, name, values[i]);
}
for (defines.keys(), 0..) |name, i| try renderValueC(bw, name, values[i]);
try output.appendSlice("#endif /* ");
try output.appendSlice(include_guard_name);
try output.appendSlice(" */\n");
try bw.print(
\\#endif /* {s} */
\\
, .{include_guard_name});
}
fn render_nasm(output: *std.ArrayList(u8), defines: std.StringArrayHashMap(Value)) !void {
const values = defines.values();
for (defines.keys(), 0..) |name, i| {
try renderValueNasm(output, name, values[i]);
}
fn render_nasm(bw: *Writer, defines: std.StringArrayHashMap(Value)) !void {
for (defines.keys(), defines.values()) |name, value| try renderValueNasm(bw, name, value);
}
fn renderValueC(output: *std.ArrayList(u8), name: []const u8, value: Value) !void {
fn renderValueC(bw: *Writer, name: []const u8, value: Value) !void {
switch (value) {
.undef => {
try output.appendSlice("/* #undef ");
try output.appendSlice(name);
try output.appendSlice(" */\n");
},
.defined => {
try output.appendSlice("#define ");
try output.appendSlice(name);
try output.appendSlice("\n");
},
.boolean => |b| {
try output.appendSlice("#define ");
try output.appendSlice(name);
try output.appendSlice(if (b) " 1\n" else " 0\n");
},
.int => |i| {
try output.writer().print("#define {s} {d}\n", .{ name, i });
},
.ident => |ident| {
try output.writer().print("#define {s} {s}\n", .{ name, ident });
},
.string => |string| {
// TODO: use C-specific escaping instead of zig string literals
try output.writer().print("#define {s} \"{}\"\n", .{ name, std.zig.fmtEscapes(string) });
},
.undef => try bw.print("/* #undef {s} */\n", .{name}),
.defined => try bw.print("#define {s}\n", .{name}),
.boolean => |b| try bw.print("#define {s} {c}\n", .{ name, @as(u8, '0') + @intFromBool(b) }),
.int => |i| try bw.print("#define {s} {d}\n", .{ name, i }),
.ident => |ident| try bw.print("#define {s} {s}\n", .{ name, ident }),
// TODO: use C-specific escaping instead of zig string literals
.string => |string| try bw.print("#define {s} \"{f}\"\n", .{ name, std.zig.fmtString(string) }),
}
}
fn renderValueNasm(output: *std.ArrayList(u8), name: []const u8, value: Value) !void {
fn renderValueNasm(bw: *Writer, name: []const u8, value: Value) !void {
switch (value) {
.undef => {
try output.appendSlice("; %undef ");
try output.appendSlice(name);
try output.appendSlice("\n");
},
.defined => {
try output.appendSlice("%define ");
try output.appendSlice(name);
try output.appendSlice("\n");
},
.boolean => |b| {
try output.appendSlice("%define ");
try output.appendSlice(name);
try output.appendSlice(if (b) " 1\n" else " 0\n");
},
.int => |i| {
try output.writer().print("%define {s} {d}\n", .{ name, i });
},
.ident => |ident| {
try output.writer().print("%define {s} {s}\n", .{ name, ident });
},
.string => |string| {
// TODO: use nasm-specific escaping instead of zig string literals
try output.writer().print("%define {s} \"{}\"\n", .{ name, std.zig.fmtEscapes(string) });
},
.undef => try bw.print("; %undef {s}\n", .{name}),
.defined => try bw.print("%define {s}\n", .{name}),
.boolean => |b| try bw.print("%define {s} {c}\n", .{ name, @as(u8, '0') + @intFromBool(b) }),
.int => |i| try bw.print("%define {s} {d}\n", .{ name, i }),
.ident => |ident| try bw.print("%define {s} {s}\n", .{ name, ident }),
// TODO: use nasm-specific escaping instead of zig string literals
.string => |string| try bw.print("%define {s} \"{f}\"\n", .{ name, std.zig.fmtString(string) }),
}
}
fn expand_variables_autoconf_at(
output: *std.ArrayList(u8),
bw: *Writer,
contents: []const u8,
values: std.StringArrayHashMap(Value),
used: []bool,
@ -637,23 +595,17 @@ fn expand_variables_autoconf_at(
const key = contents[curr + 1 .. close_pos];
const index = values.getIndex(key) orelse {
// Report the missing key to the caller.
try output.appendSlice(key);
try bw.writeAll(key);
return error.MissingValue;
};
const value = values.unmanaged.entries.slice().items(.value)[index];
used[index] = true;
try output.appendSlice(contents[source_offset..curr]);
try bw.writeAll(contents[source_offset..curr]);
switch (value) {
.undef, .defined => {},
.boolean => |b| {
try output.append(if (b) '1' else '0');
},
.int => |i| {
try output.writer().print("{d}", .{i});
},
.ident, .string => |s| {
try output.appendSlice(s);
},
.boolean => |b| try bw.writeByte(@as(u8, '0') + @intFromBool(b)),
.int => |i| try bw.print("{d}", .{i}),
.ident, .string => |s| try bw.writeAll(s),
}
curr = close_pos;
@ -661,7 +613,7 @@ fn expand_variables_autoconf_at(
}
}
try output.appendSlice(contents[source_offset..]);
try bw.writeAll(contents[source_offset..]);
}
fn expand_variables_cmake(
@ -669,7 +621,7 @@ fn expand_variables_cmake(
contents: []const u8,
values: std.StringArrayHashMap(Value),
) ![]const u8 {
var result = std.ArrayList(u8).init(allocator);
var result: std.ArrayList(u8) = .init(allocator);
errdefer result.deinit();
const valid_varname_chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789/_.+-";
@ -681,7 +633,7 @@ fn expand_variables_cmake(
source: usize,
target: usize,
};
var var_stack = std.ArrayList(Position).init(allocator);
var var_stack: std.ArrayList(Position) = .init(allocator);
defer var_stack.deinit();
loop: while (curr < contents.len) : (curr += 1) {
switch (contents[curr]) {
@ -707,7 +659,7 @@ fn expand_variables_cmake(
try result.append(if (b) '1' else '0');
},
.int => |i| {
try result.writer().print("{d}", .{i});
try result.print("{d}", .{i});
},
.ident, .string => |s| {
try result.appendSlice(s);
@ -764,7 +716,7 @@ fn expand_variables_cmake(
try result.append(if (b) '1' else '0');
},
.int => |i| {
try result.writer().print("{d}", .{i});
try result.print("{d}", .{i});
},
.ident, .string => |s| {
try result.appendSlice(s);
@ -801,17 +753,17 @@ fn testReplaceVariablesAutoconfAt(
expected: []const u8,
values: std.StringArrayHashMap(Value),
) !void {
var output = std.ArrayList(u8).init(allocator);
defer output.deinit();
var aw: std.io.Writer.Allocating = .init(allocator);
defer aw.deinit();
const used = try allocator.alloc(bool, values.count());
for (used) |*u| u.* = false;
defer allocator.free(used);
try expand_variables_autoconf_at(&output, contents, values, used);
try expand_variables_autoconf_at(&aw.writer, contents, values, used);
for (used) |u| if (!u) return error.UnusedValue;
try std.testing.expectEqualStrings(expected, output.items);
try std.testing.expectEqualStrings(expected, aw.getWritten());
}
fn testReplaceVariablesCMake(
@ -828,7 +780,7 @@ fn testReplaceVariablesCMake(
test "expand_variables_autoconf_at simple cases" {
const allocator = std.testing.allocator;
var values = std.StringArrayHashMap(Value).init(allocator);
var values: std.StringArrayHashMap(Value) = .init(allocator);
defer values.deinit();
// empty strings are preserved
@ -924,7 +876,7 @@ test "expand_variables_autoconf_at simple cases" {
test "expand_variables_autoconf_at edge cases" {
const allocator = std.testing.allocator;
var values = std.StringArrayHashMap(Value).init(allocator);
var values: std.StringArrayHashMap(Value) = .init(allocator);
defer values.deinit();
// @-vars resolved only when they wrap valid characters, otherwise considered literals
@ -940,7 +892,7 @@ test "expand_variables_autoconf_at edge cases" {
test "expand_variables_cmake simple cases" {
const allocator = std.testing.allocator;
var values = std.StringArrayHashMap(Value).init(allocator);
var values: std.StringArrayHashMap(Value) = .init(allocator);
defer values.deinit();
try values.putNoClobber("undef", .undef);
@ -1028,7 +980,7 @@ test "expand_variables_cmake simple cases" {
test "expand_variables_cmake edge cases" {
const allocator = std.testing.allocator;
var values = std.StringArrayHashMap(Value).init(allocator);
var values: std.StringArrayHashMap(Value) = .init(allocator);
defer values.deinit();
// special symbols
@ -1089,7 +1041,7 @@ test "expand_variables_cmake edge cases" {
test "expand_variables_cmake escaped characters" {
const allocator = std.testing.allocator;
var values = std.StringArrayHashMap(Value).init(allocator);
var values: std.StringArrayHashMap(Value) = .init(allocator);
defer values.deinit();
try values.putNoClobber("string", Value{ .string = "text" });

View File

@ -164,7 +164,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
const full_h_prefix = b.getInstallPath(h_dir, dir.dest_rel_path);
var src_dir = src_dir_path.root_dir.handle.openDir(src_dir_path.subPathOrDot(), .{ .iterate = true }) catch |err| {
return step.fail("unable to open source directory '{}': {s}", .{
return step.fail("unable to open source directory '{f}': {s}", .{
src_dir_path, @errorName(err),
});
};

View File

@ -65,7 +65,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
const src_dir_path = install_dir.options.source_dir.getPath3(b, step);
const need_derived_inputs = try step.addDirectoryWatchInput(install_dir.options.source_dir);
var src_dir = src_dir_path.root_dir.handle.openDir(src_dir_path.subPathOrDot(), .{ .iterate = true }) catch |err| {
return step.fail("unable to open source directory '{}': {s}", .{
return step.fail("unable to open source directory '{f}': {s}", .{
src_dir_path, @errorName(err),
});
};

View File

@ -12,23 +12,23 @@ pub const base_id: Step.Id = .options;
step: Step,
generated_file: GeneratedFile,
contents: std.ArrayList(u8),
args: std.ArrayList(Arg),
encountered_types: std.StringHashMap(void),
contents: std.ArrayListUnmanaged(u8),
args: std.ArrayListUnmanaged(Arg),
encountered_types: std.StringHashMapUnmanaged(void),
pub fn create(owner: *std.Build) *Options {
const options = owner.allocator.create(Options) catch @panic("OOM");
options.* = .{
.step = Step.init(.{
.step = .init(.{
.id = base_id,
.name = "options",
.owner = owner,
.makeFn = make,
}),
.generated_file = undefined,
.contents = std.ArrayList(u8).init(owner.allocator),
.args = std.ArrayList(Arg).init(owner.allocator),
.encountered_types = std.StringHashMap(void).init(owner.allocator),
.contents = .empty,
.args = .empty,
.encountered_types = .empty,
};
options.generated_file = .{ .step = &options.step };
@ -40,110 +40,119 @@ pub fn addOption(options: *Options, comptime T: type, name: []const u8, value: T
}
fn addOptionFallible(options: *Options, comptime T: type, name: []const u8, value: T) !void {
const out = options.contents.writer();
try printType(options, out, T, value, 0, name);
try printType(options, &options.contents, T, value, 0, name);
}
fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent: u8, name: ?[]const u8) !void {
fn printType(
options: *Options,
out: *std.ArrayListUnmanaged(u8),
comptime T: type,
value: T,
indent: u8,
name: ?[]const u8,
) !void {
const gpa = options.step.owner.allocator;
switch (T) {
[]const []const u8 => {
if (name) |payload| {
try out.print("pub const {}: []const []const u8 = ", .{std.zig.fmtId(payload)});
try out.print(gpa, "pub const {f}: []const []const u8 = ", .{std.zig.fmtId(payload)});
}
try out.writeAll("&[_][]const u8{\n");
try out.appendSlice(gpa, "&[_][]const u8{\n");
for (value) |slice| {
try out.writeByteNTimes(' ', indent);
try out.print(" \"{}\",\n", .{std.zig.fmtEscapes(slice)});
try out.appendNTimes(gpa, ' ', indent);
try out.print(gpa, " \"{f}\",\n", .{std.zig.fmtString(slice)});
}
if (name != null) {
try out.writeAll("};\n");
try out.appendSlice(gpa, "};\n");
} else {
try out.writeAll("},\n");
try out.appendSlice(gpa, "},\n");
}
return;
},
[]const u8 => {
if (name) |some| {
try out.print("pub const {}: []const u8 = \"{}\";", .{ std.zig.fmtId(some), std.zig.fmtEscapes(value) });
try out.print(gpa, "pub const {f}: []const u8 = \"{f}\";", .{
std.zig.fmtId(some), std.zig.fmtString(value),
});
} else {
try out.print("\"{}\",", .{std.zig.fmtEscapes(value)});
try out.print(gpa, "\"{f}\",", .{std.zig.fmtString(value)});
}
return out.writeAll("\n");
return out.appendSlice(gpa, "\n");
},
[:0]const u8 => {
if (name) |some| {
try out.print("pub const {}: [:0]const u8 = \"{}\";", .{ std.zig.fmtId(some), std.zig.fmtEscapes(value) });
try out.print(gpa, "pub const {f}: [:0]const u8 = \"{f}\";", .{ std.zig.fmtId(some), std.zig.fmtString(value) });
} else {
try out.print("\"{}\",", .{std.zig.fmtEscapes(value)});
try out.print(gpa, "\"{f}\",", .{std.zig.fmtString(value)});
}
return out.writeAll("\n");
return out.appendSlice(gpa, "\n");
},
?[]const u8 => {
if (name) |some| {
try out.print("pub const {}: ?[]const u8 = ", .{std.zig.fmtId(some)});
try out.print(gpa, "pub const {f}: ?[]const u8 = ", .{std.zig.fmtId(some)});
}
if (value) |payload| {
try out.print("\"{}\"", .{std.zig.fmtEscapes(payload)});
try out.print(gpa, "\"{f}\"", .{std.zig.fmtString(payload)});
} else {
try out.writeAll("null");
try out.appendSlice(gpa, "null");
}
if (name != null) {
try out.writeAll(";\n");
try out.appendSlice(gpa, ";\n");
} else {
try out.writeAll(",\n");
try out.appendSlice(gpa, ",\n");
}
return;
},
?[:0]const u8 => {
if (name) |some| {
try out.print("pub const {}: ?[:0]const u8 = ", .{std.zig.fmtId(some)});
try out.print(gpa, "pub const {f}: ?[:0]const u8 = ", .{std.zig.fmtId(some)});
}
if (value) |payload| {
try out.print("\"{}\"", .{std.zig.fmtEscapes(payload)});
try out.print(gpa, "\"{f}\"", .{std.zig.fmtString(payload)});
} else {
try out.writeAll("null");
try out.appendSlice(gpa, "null");
}
if (name != null) {
try out.writeAll(";\n");
try out.appendSlice(gpa, ";\n");
} else {
try out.writeAll(",\n");
try out.appendSlice(gpa, ",\n");
}
return;
},
std.SemanticVersion => {
if (name) |some| {
try out.print("pub const {}: @import(\"std\").SemanticVersion = ", .{std.zig.fmtId(some)});
try out.print(gpa, "pub const {f}: @import(\"std\").SemanticVersion = ", .{std.zig.fmtId(some)});
}
try out.writeAll(".{\n");
try out.writeByteNTimes(' ', indent);
try out.print(" .major = {d},\n", .{value.major});
try out.writeByteNTimes(' ', indent);
try out.print(" .minor = {d},\n", .{value.minor});
try out.writeByteNTimes(' ', indent);
try out.print(" .patch = {d},\n", .{value.patch});
try out.appendSlice(gpa, ".{\n");
try out.appendNTimes(gpa, ' ', indent);
try out.print(gpa, " .major = {d},\n", .{value.major});
try out.appendNTimes(gpa, ' ', indent);
try out.print(gpa, " .minor = {d},\n", .{value.minor});
try out.appendNTimes(gpa, ' ', indent);
try out.print(gpa, " .patch = {d},\n", .{value.patch});
if (value.pre) |some| {
try out.writeByteNTimes(' ', indent);
try out.print(" .pre = \"{}\",\n", .{std.zig.fmtEscapes(some)});
try out.appendNTimes(gpa, ' ', indent);
try out.print(gpa, " .pre = \"{f}\",\n", .{std.zig.fmtString(some)});
}
if (value.build) |some| {
try out.writeByteNTimes(' ', indent);
try out.print(" .build = \"{}\",\n", .{std.zig.fmtEscapes(some)});
try out.appendNTimes(gpa, ' ', indent);
try out.print(gpa, " .build = \"{f}\",\n", .{std.zig.fmtString(some)});
}
if (name != null) {
try out.writeAll("};\n");
try out.appendSlice(gpa, "};\n");
} else {
try out.writeAll("},\n");
try out.appendSlice(gpa, "},\n");
}
return;
},
@ -153,21 +162,21 @@ fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent
switch (@typeInfo(T)) {
.array => {
if (name) |some| {
try out.print("pub const {}: {s} = ", .{ std.zig.fmtId(some), @typeName(T) });
try out.print(gpa, "pub const {f}: {s} = ", .{ std.zig.fmtId(some), @typeName(T) });
}
try out.print("{s} {{\n", .{@typeName(T)});
try out.print(gpa, "{s} {{\n", .{@typeName(T)});
for (value) |item| {
try out.writeByteNTimes(' ', indent + 4);
try out.appendNTimes(gpa, ' ', indent + 4);
try printType(options, out, @TypeOf(item), item, indent + 4, null);
}
try out.writeByteNTimes(' ', indent);
try out.writeAll("}");
try out.appendNTimes(gpa, ' ', indent);
try out.appendSlice(gpa, "}");
if (name != null) {
try out.writeAll(";\n");
try out.appendSlice(gpa, ";\n");
} else {
try out.writeAll(",\n");
try out.appendSlice(gpa, ",\n");
}
return;
},
@ -177,27 +186,27 @@ fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent
}
if (name) |some| {
try out.print("pub const {}: {s} = ", .{ std.zig.fmtId(some), @typeName(T) });
try out.print(gpa, "pub const {f}: {s} = ", .{ std.zig.fmtId(some), @typeName(T) });
}
try out.print("&[_]{s} {{\n", .{@typeName(p.child)});
try out.print(gpa, "&[_]{s} {{\n", .{@typeName(p.child)});
for (value) |item| {
try out.writeByteNTimes(' ', indent + 4);
try out.appendNTimes(gpa, ' ', indent + 4);
try printType(options, out, @TypeOf(item), item, indent + 4, null);
}
try out.writeByteNTimes(' ', indent);
try out.writeAll("}");
try out.appendNTimes(gpa, ' ', indent);
try out.appendSlice(gpa, "}");
if (name != null) {
try out.writeAll(";\n");
try out.appendSlice(gpa, ";\n");
} else {
try out.writeAll(",\n");
try out.appendSlice(gpa, ",\n");
}
return;
},
.optional => {
if (name) |some| {
try out.print("pub const {}: {s} = ", .{ std.zig.fmtId(some), @typeName(T) });
try out.print(gpa, "pub const {f}: {s} = ", .{ std.zig.fmtId(some), @typeName(T) });
}
if (value) |inner| {
@ -206,13 +215,13 @@ fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent
_ = options.contents.pop();
_ = options.contents.pop();
} else {
try out.writeAll("null");
try out.appendSlice(gpa, "null");
}
if (name != null) {
try out.writeAll(";\n");
try out.appendSlice(gpa, ";\n");
} else {
try out.writeAll(",\n");
try out.appendSlice(gpa, ",\n");
}
return;
},
@ -224,9 +233,9 @@ fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent
.null,
=> {
if (name) |some| {
try out.print("pub const {}: {s} = {any};\n", .{ std.zig.fmtId(some), @typeName(T), value });
try out.print(gpa, "pub const {f}: {s} = {any};\n", .{ std.zig.fmtId(some), @typeName(T), value });
} else {
try out.print("{any},\n", .{value});
try out.print(gpa, "{any},\n", .{value});
}
return;
},
@ -234,10 +243,10 @@ fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent
try printEnum(options, out, T, info, indent);
if (name) |some| {
try out.print("pub const {}: {} = .{p_};\n", .{
try out.print(gpa, "pub const {f}: {f} = .{f};\n", .{
std.zig.fmtId(some),
std.zig.fmtId(@typeName(T)),
std.zig.fmtId(@tagName(value)),
std.zig.fmtIdFlags(@tagName(value), .{ .allow_underscore = true, .allow_primitive = true }),
});
}
return;
@ -246,7 +255,7 @@ fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent
try printStruct(options, out, T, info, indent);
if (name) |some| {
try out.print("pub const {}: {} = ", .{
try out.print(gpa, "pub const {f}: {f} = ", .{
std.zig.fmtId(some),
std.zig.fmtId(@typeName(T)),
});
@ -258,7 +267,7 @@ fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent
}
}
fn printUserDefinedType(options: *Options, out: anytype, comptime T: type, indent: u8) !void {
fn printUserDefinedType(options: *Options, out: *std.ArrayListUnmanaged(u8), comptime T: type, indent: u8) !void {
switch (@typeInfo(T)) {
.@"enum" => |info| {
return try printEnum(options, out, T, info, indent);
@ -270,94 +279,119 @@ fn printUserDefinedType(options: *Options, out: anytype, comptime T: type, inden
}
}
fn printEnum(options: *Options, out: anytype, comptime T: type, comptime val: std.builtin.Type.Enum, indent: u8) !void {
const gop = try options.encountered_types.getOrPut(@typeName(T));
fn printEnum(
options: *Options,
out: *std.ArrayListUnmanaged(u8),
comptime T: type,
comptime val: std.builtin.Type.Enum,
indent: u8,
) !void {
const gpa = options.step.owner.allocator;
const gop = try options.encountered_types.getOrPut(gpa, @typeName(T));
if (gop.found_existing) return;
try out.writeByteNTimes(' ', indent);
try out.print("pub const {} = enum ({s}) {{\n", .{ std.zig.fmtId(@typeName(T)), @typeName(val.tag_type) });
try out.appendNTimes(gpa, ' ', indent);
try out.print(gpa, "pub const {f} = enum ({s}) {{\n", .{ std.zig.fmtId(@typeName(T)), @typeName(val.tag_type) });
inline for (val.fields) |field| {
try out.writeByteNTimes(' ', indent);
try out.print(" {p} = {d},\n", .{ std.zig.fmtId(field.name), field.value });
try out.appendNTimes(gpa, ' ', indent);
try out.print(gpa, " {f} = {d},\n", .{
std.zig.fmtIdFlags(field.name, .{ .allow_primitive = true }), field.value,
});
}
if (!val.is_exhaustive) {
try out.writeByteNTimes(' ', indent);
try out.writeAll(" _,\n");
try out.appendNTimes(gpa, ' ', indent);
try out.appendSlice(gpa, " _,\n");
}
try out.writeByteNTimes(' ', indent);
try out.writeAll("};\n");
try out.appendNTimes(gpa, ' ', indent);
try out.appendSlice(gpa, "};\n");
}
fn printStruct(options: *Options, out: anytype, comptime T: type, comptime val: std.builtin.Type.Struct, indent: u8) !void {
const gop = try options.encountered_types.getOrPut(@typeName(T));
fn printStruct(options: *Options, out: *std.ArrayListUnmanaged(u8), comptime T: type, comptime val: std.builtin.Type.Struct, indent: u8) !void {
const gpa = options.step.owner.allocator;
const gop = try options.encountered_types.getOrPut(gpa, @typeName(T));
if (gop.found_existing) return;
try out.writeByteNTimes(' ', indent);
try out.print("pub const {} = ", .{std.zig.fmtId(@typeName(T))});
try out.appendNTimes(gpa, ' ', indent);
try out.print(gpa, "pub const {f} = ", .{std.zig.fmtId(@typeName(T))});
switch (val.layout) {
.@"extern" => try out.writeAll("extern struct"),
.@"packed" => try out.writeAll("packed struct"),
else => try out.writeAll("struct"),
.@"extern" => try out.appendSlice(gpa, "extern struct"),
.@"packed" => try out.appendSlice(gpa, "packed struct"),
else => try out.appendSlice(gpa, "struct"),
}
try out.writeAll(" {\n");
try out.appendSlice(gpa, " {\n");
inline for (val.fields) |field| {
try out.writeByteNTimes(' ', indent);
try out.appendNTimes(gpa, ' ', indent);
const type_name = @typeName(field.type);
// If the type name doesn't contains a '.' the type is from zig builtins.
if (std.mem.containsAtLeast(u8, type_name, 1, ".")) {
try out.print(" {p_}: {}", .{ std.zig.fmtId(field.name), std.zig.fmtId(type_name) });
try out.print(gpa, " {f}: {f}", .{
std.zig.fmtIdFlags(field.name, .{ .allow_underscore = true, .allow_primitive = true }),
std.zig.fmtId(type_name),
});
} else {
try out.print(" {p_}: {s}", .{ std.zig.fmtId(field.name), type_name });
try out.print(gpa, " {f}: {s}", .{
std.zig.fmtIdFlags(field.name, .{ .allow_underscore = true, .allow_primitive = true }),
type_name,
});
}
if (field.defaultValue()) |default_value| {
try out.writeAll(" = ");
try out.appendSlice(gpa, " = ");
switch (@typeInfo(@TypeOf(default_value))) {
.@"enum" => try out.print(".{s},\n", .{@tagName(default_value)}),
.@"enum" => try out.print(gpa, ".{s},\n", .{@tagName(default_value)}),
.@"struct" => |info| {
try printStructValue(options, out, info, default_value, indent + 4);
},
else => try printType(options, out, @TypeOf(default_value), default_value, indent, null),
}
} else {
try out.writeAll(",\n");
try out.appendSlice(gpa, ",\n");
}
}
// TODO: write declarations
try out.writeByteNTimes(' ', indent);
try out.writeAll("};\n");
try out.appendNTimes(gpa, ' ', indent);
try out.appendSlice(gpa, "};\n");
inline for (val.fields) |field| {
try printUserDefinedType(options, out, field.type, 0);
}
}
fn printStructValue(options: *Options, out: anytype, comptime struct_val: std.builtin.Type.Struct, val: anytype, indent: u8) !void {
try out.writeAll(".{\n");
fn printStructValue(
options: *Options,
out: *std.ArrayListUnmanaged(u8),
comptime struct_val: std.builtin.Type.Struct,
val: anytype,
indent: u8,
) !void {
const gpa = options.step.owner.allocator;
try out.appendSlice(gpa, ".{\n");
if (struct_val.is_tuple) {
inline for (struct_val.fields) |field| {
try out.writeByteNTimes(' ', indent);
try out.appendNTimes(gpa, ' ', indent);
try printType(options, out, @TypeOf(@field(val, field.name)), @field(val, field.name), indent, null);
}
} else {
inline for (struct_val.fields) |field| {
try out.writeByteNTimes(' ', indent);
try out.print(" .{p_} = ", .{std.zig.fmtId(field.name)});
try out.appendNTimes(gpa, ' ', indent);
try out.print(gpa, " .{f} = ", .{
std.zig.fmtIdFlags(field.name, .{ .allow_primitive = true, .allow_underscore = true }),
});
const field_name = @field(val, field.name);
switch (@typeInfo(@TypeOf(field_name))) {
.@"enum" => try out.print(".{s},\n", .{@tagName(field_name)}),
.@"enum" => try out.print(gpa, ".{s},\n", .{@tagName(field_name)}),
.@"struct" => |struct_info| {
try printStructValue(options, out, struct_info, field_name, indent + 4);
},
@ -367,10 +401,10 @@ fn printStructValue(options: *Options, out: anytype, comptime struct_val: std.bu
}
if (indent == 0) {
try out.writeAll("};\n");
try out.appendSlice(gpa, "};\n");
} else {
try out.writeByteNTimes(' ', indent);
try out.writeAll("},\n");
try out.appendNTimes(gpa, ' ', indent);
try out.appendSlice(gpa, "},\n");
}
}
@ -381,7 +415,8 @@ pub fn addOptionPath(
name: []const u8,
path: LazyPath,
) void {
options.args.append(.{
const arena = options.step.owner.allocator;
options.args.append(arena, .{
.name = options.step.owner.dupe(name),
.path = path.dupe(options.step.owner),
}) catch @panic("OOM");
@ -440,7 +475,7 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
error.FileNotFound => {
const sub_dirname = fs.path.dirname(sub_path).?;
b.cache_root.handle.makePath(sub_dirname) catch |e| {
return step.fail("unable to make path '{}{s}': {s}", .{
return step.fail("unable to make path '{f}{s}': {s}", .{
b.cache_root, sub_dirname, @errorName(e),
});
};
@ -452,13 +487,13 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
const tmp_sub_path_dirname = fs.path.dirname(tmp_sub_path).?;
b.cache_root.handle.makePath(tmp_sub_path_dirname) catch |err| {
return step.fail("unable to make temporary directory '{}{s}': {s}", .{
return step.fail("unable to make temporary directory '{f}{s}': {s}", .{
b.cache_root, tmp_sub_path_dirname, @errorName(err),
});
};
b.cache_root.handle.writeFile(.{ .sub_path = tmp_sub_path, .data = options.contents.items }) catch |err| {
return step.fail("unable to write options to '{}{s}': {s}", .{
return step.fail("unable to write options to '{f}{s}': {s}", .{
b.cache_root, tmp_sub_path, @errorName(err),
});
};
@ -467,7 +502,7 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
error.PathAlreadyExists => {
// Other process beat us to it. Clean up the temp file.
b.cache_root.handle.deleteFile(tmp_sub_path) catch |e| {
try step.addError("warning: unable to delete temp file '{}{s}': {s}", .{
try step.addError("warning: unable to delete temp file '{f}{s}': {s}", .{
b.cache_root, tmp_sub_path, @errorName(e),
});
};
@ -475,7 +510,7 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
return;
},
else => {
return step.fail("unable to rename options from '{}{s}' to '{}{s}': {s}", .{
return step.fail("unable to rename options from '{f}{s}' to '{f}{s}': {s}", .{
b.cache_root, tmp_sub_path,
b.cache_root, sub_path,
@errorName(err),
@ -483,7 +518,7 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
},
};
},
else => |e| return step.fail("unable to access options file '{}{s}': {s}", .{
else => |e| return step.fail("unable to access options file '{f}{s}': {s}", .{
b.cache_root, sub_path, @errorName(e),
}),
}
@ -643,5 +678,5 @@ test Options {
\\
, options.contents.items);
_ = try std.zig.Ast.parse(arena.allocator(), try options.contents.toOwnedSliceSentinel(0), .zig);
_ = try std.zig.Ast.parse(arena.allocator(), try options.contents.toOwnedSliceSentinel(arena.allocator(), 0), .zig);
}

View File

@ -832,7 +832,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
else => unreachable,
};
b.cache_root.handle.makePath(output_sub_dir_path) catch |err| {
return step.fail("unable to make path '{}{s}': {s}", .{
return step.fail("unable to make path '{f}{s}': {s}", .{
b.cache_root, output_sub_dir_path, @errorName(err),
});
};
@ -864,7 +864,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
else => unreachable,
};
b.cache_root.handle.makePath(output_sub_dir_path) catch |err| {
return step.fail("unable to make path '{}{s}': {s}", .{
return step.fail("unable to make path '{f}{s}': {s}", .{
b.cache_root, output_sub_dir_path, @errorName(err),
});
};
@ -903,21 +903,21 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
b.cache_root.handle.rename(tmp_dir_path, o_sub_path) catch |err| {
if (err == error.PathAlreadyExists) {
b.cache_root.handle.deleteTree(o_sub_path) catch |del_err| {
return step.fail("unable to remove dir '{}'{s}: {s}", .{
return step.fail("unable to remove dir '{f}'{s}: {s}", .{
b.cache_root,
tmp_dir_path,
@errorName(del_err),
});
};
b.cache_root.handle.rename(tmp_dir_path, o_sub_path) catch |retry_err| {
return step.fail("unable to rename dir '{}{s}' to '{}{s}': {s}", .{
return step.fail("unable to rename dir '{f}{s}' to '{f}{s}': {s}", .{
b.cache_root, tmp_dir_path,
b.cache_root, o_sub_path,
@errorName(retry_err),
});
};
} else {
return step.fail("unable to rename dir '{}{s}' to '{}{s}': {s}", .{
return step.fail("unable to rename dir '{f}{s}' to '{f}{s}': {s}", .{
b.cache_root, tmp_dir_path,
b.cache_root, o_sub_path,
@errorName(err),
@ -964,7 +964,7 @@ pub fn rerunInFuzzMode(
.artifact => |pa| {
const artifact = pa.artifact;
const file_path: []const u8 = p: {
if (artifact == run.producer.?) break :p b.fmt("{}", .{run.rebuilt_executable.?});
if (artifact == run.producer.?) break :p b.fmt("{f}", .{run.rebuilt_executable.?});
break :p artifact.installed_path orelse artifact.generated_bin.?.path.?;
};
try argv_list.append(arena, b.fmt("{s}{s}", .{
@ -1011,24 +1011,17 @@ fn populateGeneratedPaths(
}
}
fn formatTerm(
term: ?std.process.Child.Term,
comptime fmt: []const u8,
options: std.fmt.FormatOptions,
writer: anytype,
) !void {
_ = fmt;
_ = options;
fn formatTerm(term: ?std.process.Child.Term, w: *std.io.Writer) std.io.Writer.Error!void {
if (term) |t| switch (t) {
.Exited => |code| try writer.print("exited with code {}", .{code}),
.Signal => |sig| try writer.print("terminated with signal {}", .{sig}),
.Stopped => |sig| try writer.print("stopped with signal {}", .{sig}),
.Unknown => |code| try writer.print("terminated for unknown reason with code {}", .{code}),
.Exited => |code| try w.print("exited with code {d}", .{code}),
.Signal => |sig| try w.print("terminated with signal {d}", .{sig}),
.Stopped => |sig| try w.print("stopped with signal {d}", .{sig}),
.Unknown => |code| try w.print("terminated for unknown reason with code {d}", .{code}),
} else {
try writer.writeAll("exited with any code");
try w.writeAll("exited with any code");
}
}
fn fmtTerm(term: ?std.process.Child.Term) std.fmt.Formatter(formatTerm) {
fn fmtTerm(term: ?std.process.Child.Term) std.fmt.Formatter(?std.process.Child.Term, formatTerm) {
return .{ .data = term };
}
@ -1262,12 +1255,12 @@ fn runCommand(
const sub_path = b.pathJoin(&output_components);
const sub_path_dirname = fs.path.dirname(sub_path).?;
b.cache_root.handle.makePath(sub_path_dirname) catch |err| {
return step.fail("unable to make path '{}{s}': {s}", .{
return step.fail("unable to make path '{f}{s}': {s}", .{
b.cache_root, sub_path_dirname, @errorName(err),
});
};
b.cache_root.handle.writeFile(.{ .sub_path = sub_path, .data = stream.bytes.? }) catch |err| {
return step.fail("unable to write file '{}{s}': {s}", .{
return step.fail("unable to write file '{f}{s}': {s}", .{
b.cache_root, sub_path, @errorName(err),
});
};
@ -1346,7 +1339,7 @@ fn runCommand(
},
.expect_term => |expected_term| {
if (!termMatches(expected_term, result.term)) {
return step.fail("the following command {} (expected {}):\n{s}", .{
return step.fail("the following command {f} (expected {f}):\n{s}", .{
fmtTerm(result.term),
fmtTerm(expected_term),
try Step.allocPrintCmd(arena, cwd, final_argv),
@ -1366,7 +1359,7 @@ fn runCommand(
};
const expected_term: std.process.Child.Term = .{ .Exited = 0 };
if (!termMatches(expected_term, result.term)) {
return step.fail("{s}the following command {} (expected {}):\n{s}", .{
return step.fail("{s}the following command {f} (expected {f}):\n{s}", .{
prefix,
fmtTerm(result.term),
fmtTerm(expected_term),
@ -1797,10 +1790,10 @@ fn evalGeneric(run: *Run, child: *std.process.Child) !StdIoResult {
stdout_bytes = try poller.fifo(.stdout).toOwnedSlice();
stderr_bytes = try poller.fifo(.stderr).toOwnedSlice();
} else {
stdout_bytes = try stdout.reader().readAllAlloc(arena, run.max_stdio_size);
stdout_bytes = try stdout.deprecatedReader().readAllAlloc(arena, run.max_stdio_size);
}
} else if (child.stderr) |stderr| {
stderr_bytes = try stderr.reader().readAllAlloc(arena, run.max_stdio_size);
stderr_bytes = try stderr.deprecatedReader().readAllAlloc(arena, run.max_stdio_size);
}
if (stderr_bytes) |bytes| if (bytes.len > 0) {

View File

@ -76,7 +76,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
for (usf.output_source_files.items) |output_source_file| {
if (fs.path.dirname(output_source_file.sub_path)) |dirname| {
b.build_root.handle.makePath(dirname) catch |err| {
return step.fail("unable to make path '{}{s}': {s}", .{
return step.fail("unable to make path '{f}{s}': {s}", .{
b.build_root, dirname, @errorName(err),
});
};
@ -84,7 +84,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
switch (output_source_file.contents) {
.bytes => |bytes| {
b.build_root.handle.writeFile(.{ .sub_path = output_source_file.sub_path, .data = bytes }) catch |err| {
return step.fail("unable to write file '{}{s}': {s}", .{
return step.fail("unable to write file '{f}{s}': {s}", .{
b.build_root, output_source_file.sub_path, @errorName(err),
});
};
@ -101,7 +101,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
output_source_file.sub_path,
.{},
) catch |err| {
return step.fail("unable to update file from '{s}' to '{}{s}': {s}", .{
return step.fail("unable to update file from '{s}' to '{f}{s}': {s}", .{
source_path, b.build_root, output_source_file.sub_path, @errorName(err),
});
};

View File

@ -217,7 +217,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
const src_dir_path = dir.source.getPath3(b, step);
var src_dir = src_dir_path.root_dir.handle.openDir(src_dir_path.subPathOrDot(), .{ .iterate = true }) catch |err| {
return step.fail("unable to open source directory '{}': {s}", .{
return step.fail("unable to open source directory '{f}': {s}", .{
src_dir_path, @errorName(err),
});
};
@ -258,7 +258,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
write_file.generated_directory.path = try b.cache_root.join(arena, &.{ "o", &digest });
var cache_dir = b.cache_root.handle.makeOpenPath(cache_path, .{}) catch |err| {
return step.fail("unable to make path '{}{s}': {s}", .{
return step.fail("unable to make path '{f}{s}': {s}", .{
b.cache_root, cache_path, @errorName(err),
});
};
@ -269,7 +269,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
for (write_file.files.items) |file| {
if (fs.path.dirname(file.sub_path)) |dirname| {
cache_dir.makePath(dirname) catch |err| {
return step.fail("unable to make path '{}{s}{c}{s}': {s}", .{
return step.fail("unable to make path '{f}{s}{c}{s}': {s}", .{
b.cache_root, cache_path, fs.path.sep, dirname, @errorName(err),
});
};
@ -277,7 +277,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
switch (file.contents) {
.bytes => |bytes| {
cache_dir.writeFile(.{ .sub_path = file.sub_path, .data = bytes }) catch |err| {
return step.fail("unable to write file '{}{s}{c}{s}': {s}", .{
return step.fail("unable to write file '{f}{s}{c}{s}': {s}", .{
b.cache_root, cache_path, fs.path.sep, file.sub_path, @errorName(err),
});
};
@ -291,7 +291,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
file.sub_path,
.{},
) catch |err| {
return step.fail("unable to update file from '{s}' to '{}{s}{c}{s}': {s}", .{
return step.fail("unable to update file from '{s}' to '{f}{s}{c}{s}': {s}", .{
source_path,
b.cache_root,
cache_path,
@ -315,7 +315,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
if (dest_dirname.len != 0) {
cache_dir.makePath(dest_dirname) catch |err| {
return step.fail("unable to make path '{}{s}{c}{s}': {s}", .{
return step.fail("unable to make path '{f}{s}{c}{s}': {s}", .{
b.cache_root, cache_path, fs.path.sep, dest_dirname, @errorName(err),
});
};
@ -338,7 +338,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
dest_path,
.{},
) catch |err| {
return step.fail("unable to update file from '{}' to '{}{s}{c}{s}': {s}", .{
return step.fail("unable to update file from '{f}' to '{f}{s}{c}{s}': {s}", .{
src_entry_path, b.cache_root, cache_path, fs.path.sep, dest_path, @errorName(err),
});
};

View File

@ -211,7 +211,7 @@ const Os = switch (builtin.os.tag) {
.ADD = true,
.ONLYDIR = true,
}, fan_mask, path.root_dir.handle.fd, path.subPathOrDot()) catch |err| {
fatal("unable to watch {}: {s}", .{ path, @errorName(err) });
fatal("unable to watch {f}: {s}", .{ path, @errorName(err) });
};
}
break :rs &dh_gop.value_ptr.reaction_set;
@ -265,7 +265,7 @@ const Os = switch (builtin.os.tag) {
.ONLYDIR = true,
}, fan_mask, path.root_dir.handle.fd, path.subPathOrDot()) catch |err| switch (err) {
error.FileNotFound => {}, // Expected, harmless.
else => |e| std.log.warn("unable to unwatch '{}': {s}", .{ path, @errorName(e) }),
else => |e| std.log.warn("unable to unwatch '{f}': {s}", .{ path, @errorName(e) }),
};
w.dir_table.swapRemoveAt(i);
@ -659,7 +659,7 @@ const Os = switch (builtin.os.tag) {
path.root_dir.handle.fd
else
posix.openat(path.root_dir.handle.fd, path.sub_path, dir_open_flags, 0) catch |err| {
fatal("failed to open directory {}: {s}", .{ path, @errorName(err) });
fatal("failed to open directory {f}: {s}", .{ path, @errorName(err) });
};
// Empirically the dir has to stay open or else no events are triggered.
errdefer if (!skip_open_dir) posix.close(dir_fd);

View File

@ -9,6 +9,7 @@ const Progress = @This();
const posix = std.posix;
const is_big_endian = builtin.cpu.arch.endian() == .big;
const is_windows = builtin.os.tag == .windows;
const Writer = std.io.Writer;
/// `null` if the current node (and its children) should
/// not print on update()
@ -451,7 +452,7 @@ pub fn start(options: Options) Node {
if (options.disable_printing) {
return Node.none;
}
const stderr = std.io.getStdErr();
const stderr: std.fs.File = .stderr();
global_progress.terminal = stderr;
if (stderr.getOrEnableAnsiEscapeSupport()) {
global_progress.terminal_mode = .ansi_escape_codes;
@ -606,6 +607,36 @@ pub fn unlockStdErr() void {
stderr_mutex.unlock();
}
/// Protected by `stderr_mutex`.
const stderr_writer: *Writer = &stderr_file_writer.interface;
/// Protected by `stderr_mutex`.
var stderr_file_writer: std.fs.File.Writer = .{
.interface = std.fs.File.Writer.initInterface(&.{}),
.file = if (is_windows) undefined else .stderr(),
.mode = .streaming,
};
/// Allows the caller to freely write to the returned `Writer`,
/// initialized with `buffer`, until `unlockStderrWriter` is called.
///
/// During the lock, any `std.Progress` information is cleared from the terminal.
///
/// The lock is recursive; the same thread may hold the lock multiple times.
pub fn lockStderrWriter(buffer: []u8) *Writer {
stderr_mutex.lock();
clearWrittenWithEscapeCodes() catch {};
if (is_windows) stderr_file_writer.file = .stderr();
stderr_writer.flush() catch {};
stderr_writer.buffer = buffer;
return stderr_writer;
}
pub fn unlockStderrWriter() void {
stderr_writer.flush() catch {};
stderr_writer.buffer = &.{};
stderr_mutex.unlock();
}
fn ipcThreadRun(fd: posix.fd_t) anyerror!void {
// Store this data in the thread so that it does not need to be part of the
// linker data of the main executable.

View File

@ -122,7 +122,7 @@ fn mode(comptime x: comptime_int) comptime_int {
}
pub fn main() !void {
const stdout = std.io.getStdOut().writer();
const stdout = std.fs.File.stdout().deprecatedWriter();
var buffer: [1024]u8 = undefined;
var fixed = std.heap.FixedBufferAllocator.init(buffer[0..]);

View File

@ -150,17 +150,10 @@ fn parseNum(text: []const u8) error{ InvalidVersion, Overflow }!usize {
};
}
pub fn format(
self: Version,
comptime fmt: []const u8,
options: std.fmt.FormatOptions,
out_stream: anytype,
) !void {
_ = options;
if (fmt.len != 0) std.fmt.invalidFmtError(fmt, self);
try std.fmt.format(out_stream, "{d}.{d}.{d}", .{ self.major, self.minor, self.patch });
if (self.pre) |pre| try std.fmt.format(out_stream, "-{s}", .{pre});
if (self.build) |build| try std.fmt.format(out_stream, "+{s}", .{build});
pub fn format(self: Version, w: *std.io.Writer) std.io.Writer.Error!void {
try w.print("{d}.{d}.{d}", .{ self.major, self.minor, self.patch });
if (self.pre) |pre| try w.print("-{s}", .{pre});
if (self.build) |build| try w.print("+{s}", .{build});
}
const expect = std.testing.expect;
@ -202,7 +195,7 @@ test format {
"1.0.0+0.build.1-rc.10000aaa-kk-0.1",
"5.4.0-1018-raspi",
"5.7.123",
}) |valid| try std.testing.expectFmt(valid, "{}", .{try parse(valid)});
}) |valid| try std.testing.expectFmt(valid, "{f}", .{try parse(valid)});
// Invalid version strings should be rejected.
for ([_][]const u8{
@ -269,12 +262,12 @@ test format {
// Valid version string that may overflow.
const big_valid = "99999999999999999999999.999999999999999999.99999999999999999";
if (parse(big_valid)) |ver| {
try std.testing.expectFmt(big_valid, "{}", .{ver});
try std.testing.expectFmt(big_valid, "{f}", .{ver});
} else |err| try expect(err == error.Overflow);
// Invalid version string that may overflow.
const big_invalid = "99999999999999999999999.999999999999999999.99999999999999999----RC-SNAPSHOT.12.09.1--------------------------------..12";
if (parse(big_invalid)) |ver| std.debug.panic("expected error, found {}", .{ver}) else |_| {}
if (parse(big_invalid)) |ver| std.debug.panic("expected error, found {f}", .{ver}) else |_| {}
}
test "precedence" {

View File

@ -301,29 +301,13 @@ pub const Os = struct {
/// This function is defined to serialize a Zig source code representation of this
/// type, that, when parsed, will deserialize into the same data.
pub fn format(
ver: WindowsVersion,
comptime fmt_str: []const u8,
_: std.fmt.FormatOptions,
writer: anytype,
) @TypeOf(writer).Error!void {
const maybe_name = std.enums.tagName(WindowsVersion, ver);
if (comptime std.mem.eql(u8, fmt_str, "s")) {
if (maybe_name) |name|
try writer.print(".{s}", .{name})
else
try writer.print(".{d}", .{@intFromEnum(ver)});
} else if (comptime std.mem.eql(u8, fmt_str, "c")) {
if (maybe_name) |name|
try writer.print(".{s}", .{name})
else
try writer.print("@enumFromInt(0x{X:0>8})", .{@intFromEnum(ver)});
} else if (fmt_str.len == 0) {
if (maybe_name) |name|
try writer.print("WindowsVersion.{s}", .{name})
else
try writer.print("WindowsVersion(0x{X:0>8})", .{@intFromEnum(ver)});
} else std.fmt.invalidFmtError(fmt_str, ver);
pub fn format(wv: WindowsVersion, w: *std.io.Writer) std.io.Writer.Error!void {
if (std.enums.tagName(WindowsVersion, wv)) |name| {
var vecs: [2][]const u8 = .{ ".", name };
return w.writeVecAll(&vecs);
} else {
return w.print("@enumFromInt(0x{X:0>8})", .{wv});
}
}
};

View File

@ -394,25 +394,24 @@ pub fn canDetectLibC(self: Query) bool {
/// Formats a version with the patch component omitted if it is zero,
/// unlike SemanticVersion.format which formats all its version components regardless.
fn formatVersion(version: SemanticVersion, writer: anytype) !void {
fn formatVersion(version: SemanticVersion, gpa: Allocator, list: *std.ArrayListUnmanaged(u8)) !void {
if (version.patch == 0) {
try writer.print("{d}.{d}", .{ version.major, version.minor });
try list.print(gpa, "{d}.{d}", .{ version.major, version.minor });
} else {
try writer.print("{d}.{d}.{d}", .{ version.major, version.minor, version.patch });
try list.print(gpa, "{d}.{d}.{d}", .{ version.major, version.minor, version.patch });
}
}
pub fn zigTriple(self: Query, allocator: Allocator) Allocator.Error![]u8 {
if (self.isNativeTriple())
return allocator.dupe(u8, "native");
pub fn zigTriple(self: Query, gpa: Allocator) Allocator.Error![]u8 {
if (self.isNativeTriple()) return gpa.dupe(u8, "native");
const arch_name = if (self.cpu_arch) |arch| @tagName(arch) else "native";
const os_name = if (self.os_tag) |os_tag| @tagName(os_tag) else "native";
var result = std.ArrayList(u8).init(allocator);
defer result.deinit();
var result: std.ArrayListUnmanaged(u8) = .empty;
defer result.deinit(gpa);
try result.writer().print("{s}-{s}", .{ arch_name, os_name });
try result.print(gpa, "{s}-{s}", .{ arch_name, os_name });
// The zig target syntax does not allow specifying a max os version with no min, so
// if either are present, we need the min.
@ -420,11 +419,11 @@ pub fn zigTriple(self: Query, allocator: Allocator) Allocator.Error![]u8 {
switch (min) {
.none => {},
.semver => |v| {
try result.writer().writeAll(".");
try formatVersion(v, result.writer());
try result.appendSlice(gpa, ".");
try formatVersion(v, gpa, &result);
},
.windows => |v| {
try result.writer().print("{s}", .{v});
try result.print(gpa, "{d}", .{v});
},
}
}
@ -432,39 +431,39 @@ pub fn zigTriple(self: Query, allocator: Allocator) Allocator.Error![]u8 {
switch (max) {
.none => {},
.semver => |v| {
try result.writer().writeAll("...");
try formatVersion(v, result.writer());
try result.appendSlice(gpa, "...");
try formatVersion(v, gpa, &result);
},
.windows => |v| {
// This is counting on a custom format() function defined on `WindowsVersion`
// to add a prefix '.' and make there be a total of three dots.
try result.writer().print("..{s}", .{v});
try result.print(gpa, "..{d}", .{v});
},
}
}
if (self.glibc_version) |v| {
const name = if (self.abi) |abi| @tagName(abi) else "gnu";
try result.ensureUnusedCapacity(name.len + 2);
try result.ensureUnusedCapacity(gpa, name.len + 2);
result.appendAssumeCapacity('-');
result.appendSliceAssumeCapacity(name);
result.appendAssumeCapacity('.');
try formatVersion(v, result.writer());
try formatVersion(v, gpa, &result);
} else if (self.android_api_level) |lvl| {
const name = if (self.abi) |abi| @tagName(abi) else "android";
try result.ensureUnusedCapacity(name.len + 2);
try result.ensureUnusedCapacity(gpa, name.len + 2);
result.appendAssumeCapacity('-');
result.appendSliceAssumeCapacity(name);
result.appendAssumeCapacity('.');
try result.writer().print("{d}", .{lvl});
try result.print(gpa, "{d}", .{lvl});
} else if (self.abi) |abi| {
const name = @tagName(abi);
try result.ensureUnusedCapacity(name.len + 1);
try result.ensureUnusedCapacity(gpa, name.len + 1);
result.appendAssumeCapacity('-');
result.appendSliceAssumeCapacity(name);
}
return result.toOwnedSlice();
return result.toOwnedSlice(gpa);
}
/// Renders the query into a textual representation that can be parsed via the

View File

@ -167,7 +167,7 @@ pub fn setName(self: Thread, name: []const u8) SetNameError!void {
const file = try std.fs.cwd().openFile(path, .{ .mode = .write_only });
defer file.close();
try file.writer().writeAll(name);
try file.deprecatedWriter().writeAll(name);
return;
},
.windows => {
@ -281,7 +281,7 @@ pub fn getName(self: Thread, buffer_ptr: *[max_name_len:0]u8) GetNameError!?[]co
const file = try std.fs.cwd().openFile(path, .{});
defer file.close();
const data_len = try file.reader().readAll(buffer_ptr[0 .. max_name_len + 1]);
const data_len = try file.deprecatedReader().readAll(buffer_ptr[0 .. max_name_len + 1]);
return if (data_len >= 1) buffer[0 .. data_len - 1] else null;
},
@ -1163,7 +1163,7 @@ const LinuxThreadImpl = struct {
fn getCurrentId() Id {
return tls_thread_id orelse {
const tid = @as(u32, @bitCast(linux.gettid()));
const tid: u32 = @bitCast(linux.gettid());
tls_thread_id = tid;
return tid;
};

View File

@ -1,6 +1,10 @@
//! Uniform Resource Identifier (URI) parsing roughly adhering to <https://tools.ietf.org/html/rfc3986>.
//! Does not do perfect grammar and character class checking, but should be robust against URIs in the wild.
const std = @import("std.zig");
const testing = std.testing;
const Uri = @This();
scheme: []const u8,
user: ?Component = null,
password: ?Component = null,
@ -34,27 +38,15 @@ pub const Component = union(enum) {
return switch (component) {
.raw => |raw| raw,
.percent_encoded => |percent_encoded| if (std.mem.indexOfScalar(u8, percent_encoded, '%')) |_|
try std.fmt.allocPrint(arena, "{raw}", .{component})
try std.fmt.allocPrint(arena, "{f}", .{std.fmt.alt(component, .formatRaw)})
else
percent_encoded,
};
}
pub fn format(
component: Component,
comptime fmt_str: []const u8,
_: std.fmt.FormatOptions,
writer: anytype,
) @TypeOf(writer).Error!void {
if (fmt_str.len == 0) {
try writer.print("std.Uri.Component{{ .{s} = \"{}\" }}", .{
@tagName(component),
std.zig.fmtEscapes(switch (component) {
.raw, .percent_encoded => |string| string,
}),
});
} else if (comptime std.mem.eql(u8, fmt_str, "raw")) switch (component) {
.raw => |raw| try writer.writeAll(raw),
pub fn formatRaw(component: Component, w: *std.io.Writer) std.io.Writer.Error!void {
switch (component) {
.raw => |raw| try w.writeAll(raw),
.percent_encoded => |percent_encoded| {
var start: usize = 0;
var index: usize = 0;
@ -63,51 +55,75 @@ pub const Component = union(enum) {
if (percent_encoded.len - index < 2) continue;
const percent_encoded_char =
std.fmt.parseInt(u8, percent_encoded[index..][0..2], 16) catch continue;
try writer.print("{s}{c}", .{
try w.print("{s}{c}", .{
percent_encoded[start..percent],
percent_encoded_char,
});
start = percent + 3;
index = percent + 3;
}
try writer.writeAll(percent_encoded[start..]);
try w.writeAll(percent_encoded[start..]);
},
} else if (comptime std.mem.eql(u8, fmt_str, "%")) switch (component) {
.raw => |raw| try percentEncode(writer, raw, isUnreserved),
.percent_encoded => |percent_encoded| try writer.writeAll(percent_encoded),
} else if (comptime std.mem.eql(u8, fmt_str, "user")) switch (component) {
.raw => |raw| try percentEncode(writer, raw, isUserChar),
.percent_encoded => |percent_encoded| try writer.writeAll(percent_encoded),
} else if (comptime std.mem.eql(u8, fmt_str, "password")) switch (component) {
.raw => |raw| try percentEncode(writer, raw, isPasswordChar),
.percent_encoded => |percent_encoded| try writer.writeAll(percent_encoded),
} else if (comptime std.mem.eql(u8, fmt_str, "host")) switch (component) {
.raw => |raw| try percentEncode(writer, raw, isHostChar),
.percent_encoded => |percent_encoded| try writer.writeAll(percent_encoded),
} else if (comptime std.mem.eql(u8, fmt_str, "path")) switch (component) {
.raw => |raw| try percentEncode(writer, raw, isPathChar),
.percent_encoded => |percent_encoded| try writer.writeAll(percent_encoded),
} else if (comptime std.mem.eql(u8, fmt_str, "query")) switch (component) {
.raw => |raw| try percentEncode(writer, raw, isQueryChar),
.percent_encoded => |percent_encoded| try writer.writeAll(percent_encoded),
} else if (comptime std.mem.eql(u8, fmt_str, "fragment")) switch (component) {
.raw => |raw| try percentEncode(writer, raw, isFragmentChar),
.percent_encoded => |percent_encoded| try writer.writeAll(percent_encoded),
} else @compileError("invalid format string '" ++ fmt_str ++ "'");
}
}
pub fn percentEncode(
writer: anytype,
raw: []const u8,
comptime isValidChar: fn (u8) bool,
) @TypeOf(writer).Error!void {
pub fn formatEscaped(component: Component, w: *std.io.Writer) std.io.Writer.Error!void {
switch (component) {
.raw => |raw| try percentEncode(w, raw, isUnreserved),
.percent_encoded => |percent_encoded| try w.writeAll(percent_encoded),
}
}
pub fn formatUser(component: Component, w: *std.io.Writer) std.io.Writer.Error!void {
switch (component) {
.raw => |raw| try percentEncode(w, raw, isUserChar),
.percent_encoded => |percent_encoded| try w.writeAll(percent_encoded),
}
}
pub fn formatPassword(component: Component, w: *std.io.Writer) std.io.Writer.Error!void {
switch (component) {
.raw => |raw| try percentEncode(w, raw, isPasswordChar),
.percent_encoded => |percent_encoded| try w.writeAll(percent_encoded),
}
}
pub fn formatHost(component: Component, w: *std.io.Writer) std.io.Writer.Error!void {
switch (component) {
.raw => |raw| try percentEncode(w, raw, isHostChar),
.percent_encoded => |percent_encoded| try w.writeAll(percent_encoded),
}
}
pub fn formatPath(component: Component, w: *std.io.Writer) std.io.Writer.Error!void {
switch (component) {
.raw => |raw| try percentEncode(w, raw, isPathChar),
.percent_encoded => |percent_encoded| try w.writeAll(percent_encoded),
}
}
pub fn formatQuery(component: Component, w: *std.io.Writer) std.io.Writer.Error!void {
switch (component) {
.raw => |raw| try percentEncode(w, raw, isQueryChar),
.percent_encoded => |percent_encoded| try w.writeAll(percent_encoded),
}
}
pub fn formatFragment(component: Component, w: *std.io.Writer) std.io.Writer.Error!void {
switch (component) {
.raw => |raw| try percentEncode(w, raw, isFragmentChar),
.percent_encoded => |percent_encoded| try w.writeAll(percent_encoded),
}
}
pub fn percentEncode(w: *std.io.Writer, raw: []const u8, comptime isValidChar: fn (u8) bool) std.io.Writer.Error!void {
var start: usize = 0;
for (raw, 0..) |char, index| {
if (isValidChar(char)) continue;
try writer.print("{s}%{X:0>2}", .{ raw[start..index], char });
try w.print("{s}%{X:0>2}", .{ raw[start..index], char });
start = index + 1;
}
try writer.writeAll(raw[start..]);
try w.writeAll(raw[start..]);
}
};
@ -224,91 +240,91 @@ pub fn parseAfterScheme(scheme: []const u8, text: []const u8) ParseError!Uri {
return uri;
}
pub const WriteToStreamOptions = struct {
/// When true, include the scheme part of the URI.
scheme: bool = false,
pub fn format(uri: *const Uri, writer: *std.io.Writer) std.io.Writer.Error!void {
return writeToStream(uri, writer, .all);
}
/// When true, include the user and password part of the URI. Ignored if `authority` is false.
authentication: bool = false,
/// When true, include the authority part of the URI.
authority: bool = false,
/// When true, include the path part of the URI.
path: bool = false,
/// When true, include the query part of the URI. Ignored when `path` is false.
query: bool = false,
/// When true, include the fragment part of the URI. Ignored when `path` is false.
fragment: bool = false,
/// When true, include the port part of the URI. Ignored when `port` is null.
port: bool = true,
};
pub fn writeToStream(
uri: Uri,
options: WriteToStreamOptions,
writer: anytype,
) @TypeOf(writer).Error!void {
if (options.scheme) {
pub fn writeToStream(uri: *const Uri, writer: *std.io.Writer, flags: Format.Flags) std.io.Writer.Error!void {
if (flags.scheme) {
try writer.print("{s}:", .{uri.scheme});
if (options.authority and uri.host != null) {
if (flags.authority and uri.host != null) {
try writer.writeAll("//");
}
}
if (options.authority) {
if (options.authentication and uri.host != null) {
if (flags.authority) {
if (flags.authentication and uri.host != null) {
if (uri.user) |user| {
try writer.print("{user}", .{user});
try user.formatUser(writer);
if (uri.password) |password| {
try writer.print(":{password}", .{password});
try writer.writeByte(':');
try password.formatPassword(writer);
}
try writer.writeByte('@');
}
}
if (uri.host) |host| {
try writer.print("{host}", .{host});
if (options.port) {
try host.formatHost(writer);
if (flags.port) {
if (uri.port) |port| try writer.print(":{d}", .{port});
}
}
}
if (options.path) {
try writer.print("{path}", .{
if (uri.path.isEmpty()) Uri.Component{ .percent_encoded = "/" } else uri.path,
});
if (options.query) {
if (uri.query) |query| try writer.print("?{query}", .{query});
if (flags.path) {
const uri_path: Component = if (uri.path.isEmpty()) .{ .percent_encoded = "/" } else uri.path;
try uri_path.formatPath(writer);
if (flags.query) {
if (uri.query) |query| {
try writer.writeByte('?');
try query.formatQuery(writer);
}
}
if (options.fragment) {
if (uri.fragment) |fragment| try writer.print("#{fragment}", .{fragment});
if (flags.fragment) {
if (uri.fragment) |fragment| {
try writer.writeByte('#');
try fragment.formatFragment(writer);
}
}
}
}
pub fn format(
uri: Uri,
comptime fmt_str: []const u8,
_: std.fmt.FormatOptions,
writer: anytype,
) @TypeOf(writer).Error!void {
const scheme = comptime std.mem.indexOfScalar(u8, fmt_str, ';') != null or fmt_str.len == 0;
const authentication = comptime std.mem.indexOfScalar(u8, fmt_str, '@') != null or fmt_str.len == 0;
const authority = comptime std.mem.indexOfScalar(u8, fmt_str, '+') != null or fmt_str.len == 0;
const path = comptime std.mem.indexOfScalar(u8, fmt_str, '/') != null or fmt_str.len == 0;
const query = comptime std.mem.indexOfScalar(u8, fmt_str, '?') != null or fmt_str.len == 0;
const fragment = comptime std.mem.indexOfScalar(u8, fmt_str, '#') != null or fmt_str.len == 0;
pub const Format = struct {
uri: *const Uri,
flags: Flags = .{},
return writeToStream(uri, .{
.scheme = scheme,
.authentication = authentication,
.authority = authority,
.path = path,
.query = query,
.fragment = fragment,
}, writer);
pub const Flags = struct {
/// When true, include the scheme part of the URI.
scheme: bool = false,
/// When true, include the user and password part of the URI. Ignored if `authority` is false.
authentication: bool = false,
/// When true, include the authority part of the URI.
authority: bool = false,
/// When true, include the path part of the URI.
path: bool = false,
/// When true, include the query part of the URI. Ignored when `path` is false.
query: bool = false,
/// When true, include the fragment part of the URI. Ignored when `path` is false.
fragment: bool = false,
/// When true, include the port part of the URI. Ignored when `port` is null.
port: bool = true,
pub const all: Flags = .{
.scheme = true,
.authentication = true,
.authority = true,
.path = true,
.query = true,
.fragment = true,
.port = true,
};
};
pub fn default(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void {
return writeToStream(f.uri, writer, f.flags);
}
};
pub fn fmt(uri: *const Uri, flags: Format.Flags) std.fmt.Formatter(Format, Format.default) {
return .{ .data = .{ .uri = uri, .flags = flags } };
}
/// Parses the URI or returns an error.
@ -445,14 +461,13 @@ test remove_dot_segments {
/// 5.2.3. Merge Paths
fn merge_paths(base: Component, new: []u8, aux_buf: *[]u8) error{NoSpaceLeft}!Component {
var aux = std.io.fixedBufferStream(aux_buf.*);
var aux: std.io.Writer = .fixed(aux_buf.*);
if (!base.isEmpty()) {
try aux.writer().print("{path}", .{base});
aux.pos = std.mem.lastIndexOfScalar(u8, aux.getWritten(), '/') orelse
return remove_dot_segments(new);
base.formatPath(&aux) catch return error.NoSpaceLeft;
aux.end = std.mem.lastIndexOfScalar(u8, aux.buffered(), '/') orelse return remove_dot_segments(new);
}
try aux.writer().print("/{s}", .{new});
const merged_path = remove_dot_segments(aux.getWritten());
aux.print("/{s}", .{new}) catch return error.NoSpaceLeft;
const merged_path = remove_dot_segments(aux.buffered());
aux_buf.* = aux_buf.*[merged_path.percent_encoded.len..];
return merged_path;
}
@ -812,8 +827,11 @@ test "Special test" {
test "URI percent encoding" {
try std.testing.expectFmt(
"%5C%C3%B6%2F%20%C3%A4%C3%B6%C3%9F%20~~.adas-https%3A%2F%2Fcanvas%3A123%2F%23ads%26%26sad",
"{%}",
.{Component{ .raw = "\\ö/ äöß ~~.adas-https://canvas:123/#ads&&sad" }},
"{f}",
.{std.fmt.alt(
@as(Component, .{ .raw = "\\ö/ äöß ~~.adas-https://canvas:123/#ads&&sad" }),
.formatEscaped,
)},
);
}
@ -822,7 +840,10 @@ test "URI percent decoding" {
const expected = "\\ö/ äöß ~~.adas-https://canvas:123/#ads&&sad";
var input = "%5C%C3%B6%2F%20%C3%A4%C3%B6%C3%9F%20~~.adas-https%3A%2F%2Fcanvas%3A123%2F%23ads%26%26sad".*;
try std.testing.expectFmt(expected, "{raw}", .{Component{ .percent_encoded = &input }});
try std.testing.expectFmt(expected, "{f}", .{std.fmt.alt(
@as(Component, .{ .percent_encoded = &input }),
.formatRaw,
)});
var output: [expected.len]u8 = undefined;
try std.testing.expectEqualStrings(percentDecodeBackwards(&output, &input), expected);
@ -834,7 +855,10 @@ test "URI percent decoding" {
const expected = "/abc%";
var input = expected.*;
try std.testing.expectFmt(expected, "{raw}", .{Component{ .percent_encoded = &input }});
try std.testing.expectFmt(expected, "{f}", .{std.fmt.alt(
@as(Component, .{ .percent_encoded = &input }),
.formatRaw,
)});
var output: [expected.len]u8 = undefined;
try std.testing.expectEqualStrings(percentDecodeBackwards(&output, &input), expected);
@ -848,7 +872,9 @@ test "URI query encoding" {
const parsed = try Uri.parse(address);
// format the URI to percent encode it
try std.testing.expectFmt("/?response-content-type=application%2Foctet-stream", "{/?}", .{parsed});
try std.testing.expectFmt("/?response-content-type=application%2Foctet-stream", "{f}", .{
parsed.fmt(.{ .path = true, .query = true }),
});
}
test "format" {
@ -862,7 +888,9 @@ test "format" {
.query = null,
.fragment = null,
};
try std.testing.expectFmt("file:/foo/bar/baz", "{;/?#}", .{uri});
try std.testing.expectFmt("file:/foo/bar/baz", "{f}", .{
uri.fmt(.{ .scheme = true, .path = true, .query = true, .fragment = true }),
});
}
test "URI malformed input" {
@ -870,7 +898,3 @@ test "URI malformed input" {
try std.testing.expectError(error.InvalidFormat, std.Uri.parse("http://]@["));
try std.testing.expectError(error.InvalidFormat, std.Uri.parse("http://lo]s\x85hc@[/8\x10?0Q"));
}
const std = @import("std.zig");
const testing = std.testing;
const Uri = @This();

View File

@ -338,11 +338,14 @@ pub fn ArrayListAligned(comptime T: type, comptime alignment: ?mem.Alignment) ty
@memcpy(self.items[old_len..][0..items.len], items);
}
pub const Writer = if (T != u8)
@compileError("The Writer interface is only defined for ArrayList(u8) " ++
"but the given type is ArrayList(" ++ @typeName(T) ++ ")")
else
std.io.Writer(*Self, Allocator.Error, appendWrite);
pub fn print(self: *Self, comptime fmt: []const u8, args: anytype) error{OutOfMemory}!void {
const gpa = self.allocator;
var unmanaged = self.moveToUnmanaged();
defer self.* = unmanaged.toManaged(gpa);
try unmanaged.print(gpa, fmt, args);
}
pub const Writer = if (T != u8) void else std.io.GenericWriter(*Self, Allocator.Error, appendWrite);
/// Initializes a Writer which will append to the list.
pub fn writer(self: *Self) Writer {
@ -350,14 +353,14 @@ pub fn ArrayListAligned(comptime T: type, comptime alignment: ?mem.Alignment) ty
}
/// Same as `append` except it returns the number of bytes written, which is always the same
/// as `m.len`. The purpose of this function existing is to match `std.io.Writer` API.
/// as `m.len`. The purpose of this function existing is to match `std.io.GenericWriter` API.
/// Invalidates element pointers if additional memory is needed.
fn appendWrite(self: *Self, m: []const u8) Allocator.Error!usize {
try self.appendSlice(m);
return m.len;
}
pub const FixedWriter = std.io.Writer(*Self, Allocator.Error, appendWriteFixed);
pub const FixedWriter = std.io.GenericWriter(*Self, Allocator.Error, appendWriteFixed);
/// Initializes a Writer which will append to the list but will return
/// `error.OutOfMemory` rather than increasing capacity.
@ -365,7 +368,7 @@ pub fn ArrayListAligned(comptime T: type, comptime alignment: ?mem.Alignment) ty
return .{ .context = self };
}
/// The purpose of this function existing is to match `std.io.Writer` API.
/// The purpose of this function existing is to match `std.io.GenericWriter` API.
fn appendWriteFixed(self: *Self, m: []const u8) error{OutOfMemory}!usize {
const available_capacity = self.capacity - self.items.len;
if (m.len > available_capacity)
@ -933,40 +936,56 @@ pub fn ArrayListAlignedUnmanaged(comptime T: type, comptime alignment: ?mem.Alig
@memcpy(self.items[old_len..][0..items.len], items);
}
pub fn print(self: *Self, gpa: Allocator, comptime fmt: []const u8, args: anytype) error{OutOfMemory}!void {
comptime assert(T == u8);
try self.ensureUnusedCapacity(gpa, fmt.len);
var aw: std.io.Writer.Allocating = .fromArrayList(gpa, self);
defer self.* = aw.toArrayList();
return aw.writer.print(fmt, args) catch |err| switch (err) {
error.WriteFailed => return error.OutOfMemory,
};
}
pub fn printAssumeCapacity(self: *Self, comptime fmt: []const u8, args: anytype) void {
comptime assert(T == u8);
var w: std.io.Writer = .fixed(self.unusedCapacitySlice());
w.print(fmt, args) catch unreachable;
self.items.len += w.end;
}
/// Deprecated in favor of `print` or `std.io.Writer.Allocating`.
pub const WriterContext = struct {
self: *Self,
allocator: Allocator,
};
/// Deprecated in favor of `print` or `std.io.Writer.Allocating`.
pub const Writer = if (T != u8)
@compileError("The Writer interface is only defined for ArrayList(u8) " ++
"but the given type is ArrayList(" ++ @typeName(T) ++ ")")
else
std.io.Writer(WriterContext, Allocator.Error, appendWrite);
std.io.GenericWriter(WriterContext, Allocator.Error, appendWrite);
/// Initializes a Writer which will append to the list.
/// Deprecated in favor of `print` or `std.io.Writer.Allocating`.
pub fn writer(self: *Self, gpa: Allocator) Writer {
return .{ .context = .{ .self = self, .allocator = gpa } };
}
/// Same as `append` except it returns the number of bytes written,
/// which is always the same as `m.len`. The purpose of this function
/// existing is to match `std.io.Writer` API.
/// Invalidates element pointers if additional memory is needed.
/// Deprecated in favor of `print` or `std.io.Writer.Allocating`.
fn appendWrite(context: WriterContext, m: []const u8) Allocator.Error!usize {
try context.self.appendSlice(context.allocator, m);
return m.len;
}
pub const FixedWriter = std.io.Writer(*Self, Allocator.Error, appendWriteFixed);
/// Deprecated in favor of `print` or `std.io.Writer.Allocating`.
pub const FixedWriter = std.io.GenericWriter(*Self, Allocator.Error, appendWriteFixed);
/// Initializes a Writer which will append to the list but will return
/// `error.OutOfMemory` rather than increasing capacity.
/// Deprecated in favor of `print` or `std.io.Writer.Allocating`.
pub fn fixedWriter(self: *Self) FixedWriter {
return .{ .context = self };
}
/// The purpose of this function existing is to match `std.io.Writer` API.
/// Deprecated in favor of `print` or `std.io.Writer.Allocating`.
fn appendWriteFixed(self: *Self, m: []const u8) error{OutOfMemory}!usize {
const available_capacity = self.capacity - self.items.len;
if (m.len > available_capacity)

View File

@ -10,6 +10,10 @@
const std = @import("std");
pub const lowercase = "abcdefghijklmnopqrstuvwxyz";
pub const uppercase = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
pub const letters = lowercase ++ uppercase;
/// The C0 control codes of the ASCII encoding.
///
/// See also: https://en.wikipedia.org/wiki/C0_and_C1_control_codes and `isControl`
@ -435,3 +439,44 @@ pub fn orderIgnoreCase(lhs: []const u8, rhs: []const u8) std.math.Order {
pub fn lessThanIgnoreCase(lhs: []const u8, rhs: []const u8) bool {
return orderIgnoreCase(lhs, rhs) == .lt;
}
pub const HexEscape = struct {
bytes: []const u8,
charset: *const [16]u8,
pub const upper_charset = "0123456789ABCDEF";
pub const lower_charset = "0123456789abcdef";
pub fn format(se: HexEscape, w: *std.io.Writer) std.io.Writer.Error!void {
const charset = se.charset;
var buf: [4]u8 = undefined;
buf[0] = '\\';
buf[1] = 'x';
for (se.bytes) |c| {
if (std.ascii.isPrint(c)) {
try w.writeByte(c);
} else {
buf[2] = charset[c >> 4];
buf[3] = charset[c & 15];
try w.writeAll(&buf);
}
}
}
};
/// Replaces non-ASCII bytes with hex escapes.
pub fn hexEscape(bytes: []const u8, case: std.fmt.Case) std.fmt.Formatter(HexEscape, HexEscape.format) {
return .{ .data = .{ .bytes = bytes, .charset = switch (case) {
.lower => HexEscape.lower_charset,
.upper => HexEscape.upper_charset,
} } };
}
test hexEscape {
try std.testing.expectFmt("abc 123", "{f}", .{hexEscape("abc 123", .lower)});
try std.testing.expectFmt("ab\\xffc", "{f}", .{hexEscape("ab\xffc", .lower)});
try std.testing.expectFmt("abc 123", "{f}", .{hexEscape("abc 123", .upper)});
try std.testing.expectFmt("ab\\xFFc", "{f}", .{hexEscape("ab\xffc", .upper)});
}

View File

@ -108,7 +108,7 @@ pub const Base64Encoder = struct {
}
}
// dest must be compatible with std.io.Writer's writeAll interface
// dest must be compatible with std.io.GenericWriter's writeAll interface
pub fn encodeWriter(encoder: *const Base64Encoder, dest: anytype, source: []const u8) !void {
var chunker = window(u8, source, 3, 3);
while (chunker.next()) |chunk| {
@ -118,8 +118,8 @@ pub const Base64Encoder = struct {
}
}
// destWriter must be compatible with std.io.Writer's writeAll interface
// sourceReader must be compatible with std.io.Reader's read interface
// destWriter must be compatible with std.io.GenericWriter's writeAll interface
// sourceReader must be compatible with `std.io.GenericReader` read interface
pub fn encodeFromReaderToWriter(encoder: *const Base64Encoder, destWriter: anytype, sourceReader: anytype) !void {
while (true) {
var tempSource: [3]u8 = undefined;

View File

@ -277,7 +277,7 @@ pub fn BoundedArrayAligned(
@compileError("The Writer interface is only defined for BoundedArray(u8, ...) " ++
"but the given type is BoundedArray(" ++ @typeName(T) ++ ", ...)")
else
std.io.Writer(*Self, error{Overflow}, appendWrite);
std.io.GenericWriter(*Self, error{Overflow}, appendWrite);
/// Initializes a writer which will write into the array.
pub fn writer(self: *Self) Writer {
@ -285,7 +285,7 @@ pub fn BoundedArrayAligned(
}
/// Same as `appendSlice` except it returns the number of bytes written, which is always the same
/// as `m.len`. The purpose of this function existing is to match `std.io.Writer` API.
/// as `m.len`. The purpose of this function existing is to match `std.io.GenericWriter` API.
fn appendWrite(self: *Self, m: []const u8) error{Overflow}!usize {
try self.appendSlice(m);
return m.len;

View File

@ -34,24 +34,16 @@ pub const StackTrace = struct {
index: usize,
instruction_addresses: []usize,
pub fn format(
self: StackTrace,
comptime fmt: []const u8,
options: std.fmt.FormatOptions,
writer: anytype,
) !void {
if (fmt.len != 0) std.fmt.invalidFmtError(fmt, self);
pub fn format(self: StackTrace, writer: *std.io.Writer) std.io.Writer.Error!void {
// TODO: re-evaluate whether to use format() methods at all.
// Until then, avoid an error when using GeneralPurposeAllocator with WebAssembly
// where it tries to call detectTTYConfig here.
if (builtin.os.tag == .freestanding) return;
_ = options;
const debug_info = std.debug.getSelfDebugInfo() catch |err| {
return writer.print("\nUnable to print stack trace: Unable to open debug info: {s}\n", .{@errorName(err)});
};
const tty_config = std.io.tty.detectConfig(std.io.getStdErr());
const tty_config = std.io.tty.detectConfig(std.fs.File.stderr());
try writer.writeAll("\n");
std.debug.writeStackTrace(self, writer, debug_info, tty_config) catch |err| {
try writer.print("Unable to print stack trace: {s}\n", .{@errorName(err)});

View File

@ -16,7 +16,7 @@ pub fn HashedReader(ReaderType: type, HasherType: type) type {
hasher: HasherType,
pub const Error = ReaderType.Error;
pub const Reader = std.io.Reader(*@This(), Error, read);
pub const Reader = std.io.GenericReader(*@This(), Error, read);
pub fn read(self: *@This(), buf: []u8) Error!usize {
const amt = try self.child_reader.read(buf);
@ -43,7 +43,7 @@ pub fn HashedWriter(WriterType: type, HasherType: type) type {
hasher: HasherType,
pub const Error = WriterType.Error;
pub const Writer = std.io.Writer(*@This(), Error, write);
pub const Writer = std.io.GenericWriter(*@This(), Error, write);
pub fn write(self: *@This(), buf: []const u8) Error!usize {
const amt = try self.child_writer.write(buf);

View File

@ -355,7 +355,7 @@ fn Deflate(comptime container: Container, comptime WriterType: type, comptime Bl
// Writer interface
pub const Writer = io.Writer(*Self, Error, write);
pub const Writer = io.GenericWriter(*Self, Error, write);
pub const Error = BlockWriterType.Error;
/// Write `input` of uncompressed data.
@ -512,7 +512,7 @@ fn SimpleCompressor(
// Writer interface
pub const Writer = io.Writer(*Self, Error, write);
pub const Writer = io.GenericWriter(*Self, Error, write);
pub const Error = BlockWriterType.Error;
// Write `input` of uncompressed data.

View File

@ -341,7 +341,7 @@ pub fn Inflate(comptime container: Container, comptime LookaheadType: type, comp
// Reader interface
pub const Reader = std.io.Reader(*Self, Error, read);
pub const Reader = std.io.GenericReader(*Self, Error, read);
/// Returns the number of bytes read. It may be less than buffer.len.
/// If the number of bytes read is 0, it means end of stream.

View File

@ -30,7 +30,7 @@ pub fn Decompress(comptime ReaderType: type) type {
Allocator.Error ||
error{ CorruptInput, EndOfStream, Overflow };
pub const Reader = std.io.Reader(*Self, Error, read);
pub const Reader = std.io.GenericReader(*Self, Error, read);
allocator: Allocator,
in_reader: ReaderType,

View File

@ -34,7 +34,7 @@ pub fn Decompress(comptime ReaderType: type) type {
const Self = @This();
pub const Error = ReaderType.Error || block.Decoder(ReaderType).Error;
pub const Reader = std.io.Reader(*Self, Error, read);
pub const Reader = std.io.GenericReader(*Self, Error, read);
allocator: Allocator,
block_decoder: block.Decoder(ReaderType),

View File

@ -27,7 +27,7 @@ pub fn Decoder(comptime ReaderType: type) type {
ReaderType.Error ||
DecodeError ||
Allocator.Error;
pub const Reader = std.io.Reader(*Self, Error, read);
pub const Reader = std.io.GenericReader(*Self, Error, read);
allocator: Allocator,
inner_reader: ReaderType,

View File

@ -50,7 +50,7 @@ pub fn Decompressor(comptime ReaderType: type) type {
OutOfMemory,
};
pub const Reader = std.io.Reader(*Self, Error, read);
pub const Reader = std.io.GenericReader(*Self, Error, read);
pub fn init(source: ReaderType, options: DecompressorOptions) Self {
return .{

View File

@ -4,7 +4,7 @@ pub const ReversedByteReader = struct {
remaining_bytes: usize,
bytes: []const u8,
const Reader = std.io.Reader(*ReversedByteReader, error{}, readFn);
const Reader = std.io.GenericReader(*ReversedByteReader, error{}, readFn);
pub fn init(bytes: []const u8) ReversedByteReader {
return .{

View File

@ -124,9 +124,9 @@ test "curve25519" {
const p = try Curve25519.basePoint.clampedMul(s);
try p.rejectIdentity();
var buf: [128]u8 = undefined;
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&p.toBytes())}), "E6F2A4D1C28EE5C7AD0329268255A468AD407D2672824C0C0EB30EA6EF450145");
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&p.toBytes()}), "E6F2A4D1C28EE5C7AD0329268255A468AD407D2672824C0C0EB30EA6EF450145");
const q = try p.clampedMul(s);
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&q.toBytes())}), "3614E119FFE55EC55B87D6B19971A9F4CBC78EFE80BEC55B96392BABCC712537");
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&q.toBytes()}), "3614E119FFE55EC55B87D6B19971A9F4CBC78EFE80BEC55B96392BABCC712537");
try Curve25519.rejectNonCanonical(s);
s[31] |= 0x80;

View File

@ -509,8 +509,8 @@ test "key pair creation" {
_ = try fmt.hexToBytes(seed[0..], "8052030376d47112be7f73ed7a019293dd12ad910b654455798b4667d73de166");
const key_pair = try Ed25519.KeyPair.generateDeterministic(seed);
var buf: [256]u8 = undefined;
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&key_pair.secret_key.toBytes())}), "8052030376D47112BE7F73ED7A019293DD12AD910B654455798B4667D73DE1662D6F7455D97B4A3A10D7293909D1A4F2058CB9A370E43FA8154BB280DB839083");
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&key_pair.public_key.toBytes())}), "2D6F7455D97B4A3A10D7293909D1A4F2058CB9A370E43FA8154BB280DB839083");
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&key_pair.secret_key.toBytes()}), "8052030376D47112BE7F73ED7A019293DD12AD910B654455798B4667D73DE1662D6F7455D97B4A3A10D7293909D1A4F2058CB9A370E43FA8154BB280DB839083");
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&key_pair.public_key.toBytes()}), "2D6F7455D97B4A3A10D7293909D1A4F2058CB9A370E43FA8154BB280DB839083");
}
test "signature" {
@ -520,7 +520,7 @@ test "signature" {
const sig = try key_pair.sign("test", null);
var buf: [128]u8 = undefined;
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&sig.toBytes())}), "10A442B4A80CC4225B154F43BEF28D2472CA80221951262EB8E0DF9091575E2687CC486E77263C3418C757522D54F84B0359236ABBBD4ACD20DC297FDCA66808");
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&sig.toBytes()}), "10A442B4A80CC4225B154F43BEF28D2472CA80221951262EB8E0DF9091575E2687CC486E77263C3418C757522D54F84B0359236ABBBD4ACD20DC297FDCA66808");
try sig.verify("test", key_pair.public_key);
try std.testing.expectError(error.SignatureVerificationFailed, sig.verify("TEST", key_pair.public_key));
}

View File

@ -546,7 +546,7 @@ test "packing/unpacking" {
var b = Edwards25519.basePoint;
const pk = try b.mul(s);
var buf: [128]u8 = undefined;
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&pk.toBytes())}), "074BC7E0FCBD587FDBC0969444245FADC562809C8F6E97E949AF62484B5B81A6");
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&pk.toBytes()}), "074BC7E0FCBD587FDBC0969444245FADC562809C8F6E97E949AF62484B5B81A6");
const small_order_ss: [7][32]u8 = .{
.{

View File

@ -175,21 +175,21 @@ pub const Ristretto255 = struct {
test "ristretto255" {
const p = Ristretto255.basePoint;
var buf: [256]u8 = undefined;
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&p.toBytes())}), "E2F2AE0A6ABC4E71A884A961C500515F58E30B6AA582DD8DB6A65945E08D2D76");
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&p.toBytes()}), "E2F2AE0A6ABC4E71A884A961C500515F58E30B6AA582DD8DB6A65945E08D2D76");
var r: [Ristretto255.encoded_length]u8 = undefined;
_ = try fmt.hexToBytes(r[0..], "6a493210f7499cd17fecb510ae0cea23a110e8d5b901f8acadd3095c73a3b919");
var q = try Ristretto255.fromBytes(r);
q = q.dbl().add(p);
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&q.toBytes())}), "E882B131016B52C1D3337080187CF768423EFCCBB517BB495AB812C4160FF44E");
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&q.toBytes()}), "E882B131016B52C1D3337080187CF768423EFCCBB517BB495AB812C4160FF44E");
const s = [_]u8{15} ++ [_]u8{0} ** 31;
const w = try p.mul(s);
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&w.toBytes())}), "E0C418F7C8D9C4CDD7395B93EA124F3AD99021BB681DFC3302A9D99A2E53E64E");
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&w.toBytes()}), "E0C418F7C8D9C4CDD7395B93EA124F3AD99021BB681DFC3302A9D99A2E53E64E");
try std.testing.expect(p.dbl().dbl().dbl().dbl().equivalent(w.add(p)));
const h = [_]u8{69} ** 32 ++ [_]u8{42} ** 32;
const ph = Ristretto255.fromUniform(h);
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&ph.toBytes())}), "DCCA54E037A4311EFBEEF413ACD21D35276518970B7A61DC88F8587B493D5E19");
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&ph.toBytes()}), "DCCA54E037A4311EFBEEF413ACD21D35276518970B7A61DC88F8587B493D5E19");
}

View File

@ -850,10 +850,10 @@ test "scalar25519" {
var y = x.toBytes();
try rejectNonCanonical(y);
var buf: [128]u8 = undefined;
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&y)}), "1E979B917937F3DE71D18077F961F6CEFF01030405060708010203040506070F");
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&y}), "1E979B917937F3DE71D18077F961F6CEFF01030405060708010203040506070F");
const reduced = reduce(field_order_s);
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&reduced)}), "0000000000000000000000000000000000000000000000000000000000000000");
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&reduced}), "0000000000000000000000000000000000000000000000000000000000000000");
}
test "non-canonical scalar25519" {
@ -867,7 +867,7 @@ test "mulAdd overflow check" {
const c: [32]u8 = [_]u8{0xff} ** 32;
const x = mulAdd(a, b, c);
var buf: [128]u8 = undefined;
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&x)}), "D14DF91389432C25AD60FF9791B9FD1D67BEF517D273ECCE3D9A307C1B419903");
try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&x}), "D14DF91389432C25AD60FF9791B9FD1D67BEF517D273ECCE3D9A307C1B419903");
}
test "scalar field inversion" {

View File

@ -803,7 +803,7 @@ fn AegisMac(comptime T: type) type {
}
pub const Error = error{};
pub const Writer = std.io.Writer(*Mac, Error, write);
pub const Writer = std.io.GenericWriter(*Mac, Error, write);
fn write(self: *Mac, bytes: []const u8) Error!usize {
self.update(bytes);

View File

@ -458,7 +458,7 @@ fn mode(comptime x: comptime_int) comptime_int {
}
pub fn main() !void {
const stdout = std.io.getStdOut().writer();
const stdout = std.fs.File.stdout().deprecatedWriter();
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena.deinit();

View File

@ -187,7 +187,7 @@ pub fn Blake2s(comptime out_bits: usize) type {
}
pub const Error = error{};
pub const Writer = std.io.Writer(*Self, Error, write);
pub const Writer = std.io.GenericWriter(*Self, Error, write);
fn write(self: *Self, bytes: []const u8) Error!usize {
self.update(bytes);

View File

@ -476,7 +476,7 @@ pub const Blake3 = struct {
}
pub const Error = error{};
pub const Writer = std.io.Writer(*Blake3, Error, write);
pub const Writer = std.io.GenericWriter(*Blake3, Error, write);
fn write(self: *Blake3, bytes: []const u8) Error!usize {
self.update(bytes);

View File

@ -1145,7 +1145,7 @@ test "xchacha20" {
var c: [m.len]u8 = undefined;
XChaCha20IETF.xor(c[0..], m[0..], 0, key, nonce);
var buf: [2 * c.len]u8 = undefined;
try testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&c)}), "E0A1BCF939654AFDBDC1746EC49832647C19D891F0D1A81FC0C1703B4514BDEA584B512F6908C2C5E9DD18D5CBC1805DE5803FE3B9CA5F193FB8359E91FAB0C3BB40309A292EB1CF49685C65C4A3ADF4F11DB0CD2B6B67FBC174BC2E860E8F769FD3565BBFAD1C845E05A0FED9BE167C240D");
try testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&c}), "E0A1BCF939654AFDBDC1746EC49832647C19D891F0D1A81FC0C1703B4514BDEA584B512F6908C2C5E9DD18D5CBC1805DE5803FE3B9CA5F193FB8359E91FAB0C3BB40309A292EB1CF49685C65C4A3ADF4F11DB0CD2B6B67FBC174BC2E860E8F769FD3565BBFAD1C845E05A0FED9BE167C240D");
}
{
const ad = "Additional data";
@ -1154,7 +1154,7 @@ test "xchacha20" {
var out: [m.len]u8 = undefined;
try XChaCha20Poly1305.decrypt(out[0..], c[0..m.len], c[m.len..].*, ad, nonce, key);
var buf: [2 * c.len]u8 = undefined;
try testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&c)}), "994D2DD32333F48E53650C02C7A2ABB8E018B0836D7175AEC779F52E961780768F815C58F1AA52D211498DB89B9216763F569C9433A6BBFCEFB4D4A49387A4C5207FBB3B5A92B5941294DF30588C6740D39DC16FA1F0E634F7246CF7CDCB978E44347D89381B7A74EB7084F754B90BDE9AAF5A94B8F2A85EFD0B50692AE2D425E234");
try testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&c}), "994D2DD32333F48E53650C02C7A2ABB8E018B0836D7175AEC779F52E961780768F815C58F1AA52D211498DB89B9216763F569C9433A6BBFCEFB4D4A49387A4C5207FBB3B5A92B5941294DF30588C6740D39DC16FA1F0E634F7246CF7CDCB978E44347D89381B7A74EB7084F754B90BDE9AAF5A94B8F2A85EFD0B50692AE2D425E234");
try testing.expectEqualSlices(u8, out[0..], m);
c[0] +%= 1;
try testing.expectError(error.AuthenticationFailed, XChaCha20Poly1305.decrypt(out[0..], c[0..m.len], c[m.len..].*, ad, nonce, key));

View File

@ -45,7 +45,7 @@ pub fn prependSlice(self: *ArrayListReverse, data: []const u8) Error!void {
self.data.ptr = begin;
}
pub const Writer = std.io.Writer(*ArrayListReverse, Error, prependSliceSize);
pub const Writer = std.io.GenericWriter(*ArrayListReverse, Error, prependSliceSize);
/// Warning: This writer writes backwards. `fn print` will NOT work as expected.
pub fn writer(self: *ArrayListReverse) Writer {
return .{ .context = self };

View File

@ -1741,7 +1741,7 @@ test "NIST KAT test" {
for (0..100) |i| {
g.fill(&seed);
try std.fmt.format(fw, "count = {}\n", .{i});
try std.fmt.format(fw, "seed = {s}\n", .{std.fmt.fmtSliceHexUpper(&seed)});
try std.fmt.format(fw, "seed = {X}\n", .{&seed});
var g2 = NistDRBG.init(seed);
// This is not equivalent to g2.fill(kseed[:]). As the reference
@ -1756,16 +1756,16 @@ test "NIST KAT test" {
const e = kp.public_key.encaps(eseed);
const ss2 = try kp.secret_key.decaps(&e.ciphertext);
try testing.expectEqual(ss2, e.shared_secret);
try std.fmt.format(fw, "pk = {s}\n", .{std.fmt.fmtSliceHexUpper(&kp.public_key.toBytes())});
try std.fmt.format(fw, "sk = {s}\n", .{std.fmt.fmtSliceHexUpper(&kp.secret_key.toBytes())});
try std.fmt.format(fw, "ct = {s}\n", .{std.fmt.fmtSliceHexUpper(&e.ciphertext)});
try std.fmt.format(fw, "ss = {s}\n\n", .{std.fmt.fmtSliceHexUpper(&e.shared_secret)});
try std.fmt.format(fw, "pk = {X}\n", .{&kp.public_key.toBytes()});
try std.fmt.format(fw, "sk = {X}\n", .{&kp.secret_key.toBytes()});
try std.fmt.format(fw, "ct = {X}\n", .{&e.ciphertext});
try std.fmt.format(fw, "ss = {X}\n\n", .{&e.shared_secret});
}
var out: [32]u8 = undefined;
f.final(&out);
var outHex: [64]u8 = undefined;
_ = try std.fmt.bufPrint(&outHex, "{s}", .{std.fmt.fmtSliceHexLower(&out)});
_ = try std.fmt.bufPrint(&outHex, "{x}", .{&out});
try testing.expectEqual(outHex, modeHash[1].*);
}
}

View File

@ -269,7 +269,7 @@ pub const Sha1 = struct {
}
pub const Error = error{};
pub const Writer = std.io.Writer(*Self, Error, write);
pub const Writer = std.io.GenericWriter(*Self, Error, write);
fn write(self: *Self, bytes: []const u8) Error!usize {
self.update(bytes);

View File

@ -376,7 +376,7 @@ fn Sha2x32(comptime iv: Iv32, digest_bits: comptime_int) type {
}
pub const Error = error{};
pub const Writer = std.io.Writer(*Self, Error, write);
pub const Writer = std.io.GenericWriter(*Self, Error, write);
fn write(self: *Self, bytes: []const u8) Error!usize {
self.update(bytes);

View File

@ -82,7 +82,7 @@ pub fn Keccak(comptime f: u11, comptime output_bits: u11, comptime default_delim
}
pub const Error = error{};
pub const Writer = std.io.Writer(*Self, Error, write);
pub const Writer = std.io.GenericWriter(*Self, Error, write);
fn write(self: *Self, bytes: []const u8) Error!usize {
self.update(bytes);
@ -193,7 +193,7 @@ fn ShakeLike(comptime security_level: u11, comptime default_delim: u8, comptime
}
pub const Error = error{};
pub const Writer = std.io.Writer(*Self, Error, write);
pub const Writer = std.io.GenericWriter(*Self, Error, write);
fn write(self: *Self, bytes: []const u8) Error!usize {
self.update(bytes);
@ -286,7 +286,7 @@ fn CShakeLike(comptime security_level: u11, comptime default_delim: u8, comptime
}
pub const Error = error{};
pub const Writer = std.io.Writer(*Self, Error, write);
pub const Writer = std.io.GenericWriter(*Self, Error, write);
fn write(self: *Self, bytes: []const u8) Error!usize {
self.update(bytes);
@ -392,7 +392,7 @@ fn KMacLike(comptime security_level: u11, comptime default_delim: u8, comptime r
}
pub const Error = error{};
pub const Writer = std.io.Writer(*Self, Error, write);
pub const Writer = std.io.GenericWriter(*Self, Error, write);
fn write(self: *Self, bytes: []const u8) Error!usize {
self.update(bytes);
@ -484,7 +484,7 @@ fn TupleHashLike(comptime security_level: u11, comptime default_delim: u8, compt
}
pub const Error = error{};
pub const Writer = std.io.Writer(*Self, Error, write);
pub const Writer = std.io.GenericWriter(*Self, Error, write);
fn write(self: *Self, bytes: []const u8) Error!usize {
self.update(bytes);

View File

@ -240,7 +240,7 @@ fn SipHash(comptime T: type, comptime c_rounds: usize, comptime d_rounds: usize)
}
pub const Error = error{};
pub const Writer = std.io.Writer(*Self, Error, write);
pub const Writer = std.io.GenericWriter(*Self, Error, write);
fn write(self: *Self, bytes: []const u8) Error!usize {
self.update(bytes);

View File

@ -1512,11 +1512,11 @@ fn logSecrets(key_log_file: std.fs.File, context: anytype, secrets: anytype) voi
const locked = if (key_log_file.lock(.exclusive)) |_| true else |_| false;
defer if (locked) key_log_file.unlock();
key_log_file.seekFromEnd(0) catch {};
inline for (@typeInfo(@TypeOf(secrets)).@"struct".fields) |field| key_log_file.writer().print("{s}" ++
(if (@hasField(@TypeOf(context), "counter")) "_{d}" else "") ++ " {} {}\n", .{field.name} ++
inline for (@typeInfo(@TypeOf(secrets)).@"struct".fields) |field| key_log_file.deprecatedWriter().print("{s}" ++
(if (@hasField(@TypeOf(context), "counter")) "_{d}" else "") ++ " {x} {x}\n", .{field.name} ++
(if (@hasField(@TypeOf(context), "counter")) .{context.counter} else .{}) ++ .{
std.fmt.fmtSliceHexLower(context.client_random),
std.fmt.fmtSliceHexLower(@field(secrets, field.name)),
context.client_random,
@field(secrets, field.name),
}) catch {};
}

View File

@ -12,6 +12,7 @@ const windows = std.os.windows;
const native_arch = builtin.cpu.arch;
const native_os = builtin.os.tag;
const native_endian = native_arch.endian();
const Writer = std.io.Writer;
pub const MemoryAccessor = @import("debug/MemoryAccessor.zig");
pub const FixedBufferReader = @import("debug/FixedBufferReader.zig");
@ -204,13 +205,26 @@ pub fn unlockStdErr() void {
std.Progress.unlockStdErr();
}
/// Allows the caller to freely write to stderr until `unlockStdErr` is called.
///
/// During the lock, any `std.Progress` information is cleared from the terminal.
///
/// Returns a `Writer` with empty buffer, meaning that it is
/// in fact unbuffered and does not need to be flushed.
pub fn lockStderrWriter(buffer: []u8) *Writer {
return std.Progress.lockStderrWriter(buffer);
}
pub fn unlockStderrWriter() void {
std.Progress.unlockStderrWriter();
}
/// Print to stderr, unbuffered, and silently returning on failure. Intended
/// for use in "printf debugging." Use `std.log` functions for proper logging.
/// for use in "printf debugging". Use `std.log` functions for proper logging.
pub fn print(comptime fmt: []const u8, args: anytype) void {
lockStdErr();
defer unlockStdErr();
const stderr = io.getStdErr().writer();
nosuspend stderr.print(fmt, args) catch return;
const bw = lockStderrWriter(&.{});
defer unlockStderrWriter();
nosuspend bw.print(fmt, args) catch return;
}
pub fn getStderrMutex() *std.Thread.Mutex {
@ -232,50 +246,44 @@ pub fn getSelfDebugInfo() !*SelfInfo {
/// Tries to print a hexadecimal view of the bytes, unbuffered, and ignores any error returned.
/// Obtains the stderr mutex while dumping.
pub fn dumpHex(bytes: []const u8) void {
lockStdErr();
defer unlockStdErr();
dumpHexFallible(bytes) catch {};
const bw = lockStderrWriter(&.{});
defer unlockStderrWriter();
const ttyconf = std.io.tty.detectConfig(.stderr());
dumpHexFallible(bw, ttyconf, bytes) catch {};
}
/// Prints a hexadecimal view of the bytes, unbuffered, returning any error that occurs.
pub fn dumpHexFallible(bytes: []const u8) !void {
const stderr = std.io.getStdErr();
const ttyconf = std.io.tty.detectConfig(stderr);
const writer = stderr.writer();
try dumpHexInternal(bytes, ttyconf, writer);
}
fn dumpHexInternal(bytes: []const u8, ttyconf: std.io.tty.Config, writer: anytype) !void {
/// Prints a hexadecimal view of the bytes, returning any error that occurs.
pub fn dumpHexFallible(bw: *Writer, ttyconf: std.io.tty.Config, bytes: []const u8) !void {
var chunks = mem.window(u8, bytes, 16, 16);
while (chunks.next()) |window| {
// 1. Print the address.
const address = (@intFromPtr(bytes.ptr) + 0x10 * (std.math.divCeil(usize, chunks.index orelse bytes.len, 16) catch unreachable)) - 0x10;
try ttyconf.setColor(writer, .dim);
try ttyconf.setColor(bw, .dim);
// We print the address in lowercase and the bytes in uppercase hexadecimal to distinguish them more.
// Also, make sure all lines are aligned by padding the address.
try writer.print("{x:0>[1]} ", .{ address, @sizeOf(usize) * 2 });
try ttyconf.setColor(writer, .reset);
try bw.print("{x:0>[1]} ", .{ address, @sizeOf(usize) * 2 });
try ttyconf.setColor(bw, .reset);
// 2. Print the bytes.
for (window, 0..) |byte, index| {
try writer.print("{X:0>2} ", .{byte});
if (index == 7) try writer.writeByte(' ');
try bw.print("{X:0>2} ", .{byte});
if (index == 7) try bw.writeByte(' ');
}
try writer.writeByte(' ');
try bw.writeByte(' ');
if (window.len < 16) {
var missing_columns = (16 - window.len) * 3;
if (window.len < 8) missing_columns += 1;
try writer.writeByteNTimes(' ', missing_columns);
try bw.splatByteAll(' ', missing_columns);
}
// 3. Print the characters.
for (window) |byte| {
if (std.ascii.isPrint(byte)) {
try writer.writeByte(byte);
try bw.writeByte(byte);
} else {
// Related: https://github.com/ziglang/zig/issues/7600
if (ttyconf == .windows_api) {
try writer.writeByte('.');
try bw.writeByte('.');
continue;
}
@ -283,22 +291,23 @@ fn dumpHexInternal(bytes: []const u8, ttyconf: std.io.tty.Config, writer: anytyp
// We don't want to do this for all control codes because most control codes apart from
// the ones that Zig has escape sequences for are likely not very useful to print as symbols.
switch (byte) {
'\n' => try writer.writeAll(""),
'\r' => try writer.writeAll(""),
'\t' => try writer.writeAll(""),
else => try writer.writeByte('.'),
'\n' => try bw.writeAll(""),
'\r' => try bw.writeAll(""),
'\t' => try bw.writeAll(""),
else => try bw.writeByte('.'),
}
}
}
try writer.writeByte('\n');
try bw.writeByte('\n');
}
}
test dumpHexInternal {
test dumpHexFallible {
const bytes: []const u8 = &.{ 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, 0x01, 0x12, 0x13 };
var output = std.ArrayList(u8).init(std.testing.allocator);
defer output.deinit();
try dumpHexInternal(bytes, .no_color, output.writer());
var aw: std.io.Writer.Allocating = .init(std.testing.allocator);
defer aw.deinit();
try dumpHexFallible(&aw.writer, .no_color, bytes);
const expected = try std.fmt.allocPrint(std.testing.allocator,
\\{x:0>[2]} 00 11 22 33 44 55 66 77 88 99 AA BB CC DD EE FF .."3DUfw........
\\{x:0>[2]} 01 12 13 ...
@ -309,34 +318,36 @@ test dumpHexInternal {
@sizeOf(usize) * 2,
});
defer std.testing.allocator.free(expected);
try std.testing.expectEqualStrings(expected, output.items);
try std.testing.expectEqualStrings(expected, aw.getWritten());
}
/// Tries to print the current stack trace to stderr, unbuffered, and ignores any error returned.
/// TODO multithreaded awareness
pub fn dumpCurrentStackTrace(start_addr: ?usize) void {
nosuspend {
if (builtin.target.cpu.arch.isWasm()) {
if (native_os == .wasi) {
const stderr = io.getStdErr().writer();
stderr.print("Unable to dump stack trace: not implemented for Wasm\n", .{}) catch return;
}
return;
const stderr = lockStderrWriter(&.{});
defer unlockStderrWriter();
nosuspend dumpCurrentStackTraceToWriter(start_addr, stderr) catch return;
}
/// Prints the current stack trace to the provided writer.
pub fn dumpCurrentStackTraceToWriter(start_addr: ?usize, writer: *Writer) !void {
if (builtin.target.cpu.arch.isWasm()) {
if (native_os == .wasi) {
try writer.writeAll("Unable to dump stack trace: not implemented for Wasm\n");
}
const stderr = io.getStdErr().writer();
if (builtin.strip_debug_info) {
stderr.print("Unable to dump stack trace: debug info stripped\n", .{}) catch return;
return;
}
const debug_info = getSelfDebugInfo() catch |err| {
stderr.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}) catch return;
return;
};
writeCurrentStackTrace(stderr, debug_info, io.tty.detectConfig(io.getStdErr()), start_addr) catch |err| {
stderr.print("Unable to dump stack trace: {s}\n", .{@errorName(err)}) catch return;
return;
};
return;
}
if (builtin.strip_debug_info) {
try writer.writeAll("Unable to dump stack trace: debug info stripped\n");
return;
}
const debug_info = getSelfDebugInfo() catch |err| {
try writer.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)});
return;
};
writeCurrentStackTrace(writer, debug_info, io.tty.detectConfig(.stderr()), start_addr) catch |err| {
try writer.print("Unable to dump stack trace: {s}\n", .{@errorName(err)});
return;
};
}
pub const have_ucontext = posix.ucontext_t != void;
@ -402,16 +413,14 @@ pub inline fn getContext(context: *ThreadContext) bool {
/// Tries to print the stack trace starting from the supplied base pointer to stderr,
/// unbuffered, and ignores any error returned.
/// TODO multithreaded awareness
pub fn dumpStackTraceFromBase(context: *ThreadContext) void {
pub fn dumpStackTraceFromBase(context: *ThreadContext, stderr: *Writer) void {
nosuspend {
if (builtin.target.cpu.arch.isWasm()) {
if (native_os == .wasi) {
const stderr = io.getStdErr().writer();
stderr.print("Unable to dump stack trace: not implemented for Wasm\n", .{}) catch return;
}
return;
}
const stderr = io.getStdErr().writer();
if (builtin.strip_debug_info) {
stderr.print("Unable to dump stack trace: debug info stripped\n", .{}) catch return;
return;
@ -420,7 +429,7 @@ pub fn dumpStackTraceFromBase(context: *ThreadContext) void {
stderr.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}) catch return;
return;
};
const tty_config = io.tty.detectConfig(io.getStdErr());
const tty_config = io.tty.detectConfig(.stderr());
if (native_os == .windows) {
// On x86_64 and aarch64, the stack will be unwound using RtlVirtualUnwind using the context
// provided by the exception handler. On x86, RtlVirtualUnwind doesn't exist. Instead, a new backtrace
@ -510,21 +519,23 @@ pub fn dumpStackTrace(stack_trace: std.builtin.StackTrace) void {
nosuspend {
if (builtin.target.cpu.arch.isWasm()) {
if (native_os == .wasi) {
const stderr = io.getStdErr().writer();
stderr.print("Unable to dump stack trace: not implemented for Wasm\n", .{}) catch return;
const stderr = lockStderrWriter(&.{});
defer unlockStderrWriter();
stderr.writeAll("Unable to dump stack trace: not implemented for Wasm\n") catch return;
}
return;
}
const stderr = io.getStdErr().writer();
const stderr = lockStderrWriter(&.{});
defer unlockStderrWriter();
if (builtin.strip_debug_info) {
stderr.print("Unable to dump stack trace: debug info stripped\n", .{}) catch return;
stderr.writeAll("Unable to dump stack trace: debug info stripped\n") catch return;
return;
}
const debug_info = getSelfDebugInfo() catch |err| {
stderr.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}) catch return;
return;
};
writeStackTrace(stack_trace, stderr, debug_info, io.tty.detectConfig(io.getStdErr())) catch |err| {
writeStackTrace(stack_trace, stderr, debug_info, io.tty.detectConfig(.stderr())) catch |err| {
stderr.print("Unable to dump stack trace: {s}\n", .{@errorName(err)}) catch return;
return;
};
@ -573,14 +584,13 @@ pub fn panicExtra(
const size = 0x1000;
const trunc_msg = "(msg truncated)";
var buf: [size + trunc_msg.len]u8 = undefined;
var bw: Writer = .fixed(buf[0..size]);
// a minor annoyance with this is that it will result in the NoSpaceLeft
// error being part of the @panic stack trace (but that error should
// only happen rarely)
const msg = std.fmt.bufPrint(buf[0..size], format, args) catch |err| switch (err) {
error.NoSpaceLeft => blk: {
@memcpy(buf[size..], trunc_msg);
break :blk &buf;
},
const msg = if (bw.print(format, args)) |_| bw.buffered() else |_| blk: {
@memcpy(buf[size..], trunc_msg);
break :blk &buf;
};
std.builtin.panic.call(msg, ret_addr);
}
@ -675,10 +685,9 @@ pub fn defaultPanic(
_ = panicking.fetchAdd(1, .seq_cst);
{
lockStdErr();
defer unlockStdErr();
const stderr = lockStderrWriter(&.{});
defer unlockStderrWriter();
const stderr = io.getStdErr().writer();
if (builtin.single_threaded) {
stderr.print("panic: ", .{}) catch posix.abort();
} else {
@ -688,7 +697,7 @@ pub fn defaultPanic(
stderr.print("{s}\n", .{msg}) catch posix.abort();
if (@errorReturnTrace()) |t| dumpStackTrace(t.*);
dumpCurrentStackTrace(first_trace_addr orelse @returnAddress());
dumpCurrentStackTraceToWriter(first_trace_addr orelse @returnAddress(), stderr) catch {};
}
waitForOtherThreadToFinishPanicking();
@ -699,7 +708,7 @@ pub fn defaultPanic(
// A panic happened while trying to print a previous panic message.
// We're still holding the mutex but that's fine as we're going to
// call abort().
io.getStdErr().writeAll("aborting due to recursive panic\n") catch {};
fs.File.stderr().writeAll("aborting due to recursive panic\n") catch {};
},
else => {}, // Panicked while printing the recursive panic message.
};
@ -723,7 +732,7 @@ fn waitForOtherThreadToFinishPanicking() void {
pub fn writeStackTrace(
stack_trace: std.builtin.StackTrace,
out_stream: anytype,
writer: *Writer,
debug_info: *SelfInfo,
tty_config: io.tty.Config,
) !void {
@ -736,15 +745,15 @@ pub fn writeStackTrace(
frame_index = (frame_index + 1) % stack_trace.instruction_addresses.len;
}) {
const return_address = stack_trace.instruction_addresses[frame_index];
try printSourceAtAddress(debug_info, out_stream, return_address - 1, tty_config);
try printSourceAtAddress(debug_info, writer, return_address - 1, tty_config);
}
if (stack_trace.index > stack_trace.instruction_addresses.len) {
const dropped_frames = stack_trace.index - stack_trace.instruction_addresses.len;
tty_config.setColor(out_stream, .bold) catch {};
try out_stream.print("({d} additional stack frames skipped...)\n", .{dropped_frames});
tty_config.setColor(out_stream, .reset) catch {};
tty_config.setColor(writer, .bold) catch {};
try writer.print("({d} additional stack frames skipped...)\n", .{dropped_frames});
tty_config.setColor(writer, .reset) catch {};
}
}
@ -954,7 +963,7 @@ pub const StackIterator = struct {
};
pub fn writeCurrentStackTrace(
out_stream: anytype,
writer: *Writer,
debug_info: *SelfInfo,
tty_config: io.tty.Config,
start_addr: ?usize,
@ -962,7 +971,7 @@ pub fn writeCurrentStackTrace(
if (native_os == .windows) {
var context: ThreadContext = undefined;
assert(getContext(&context));
return writeStackTraceWindows(out_stream, debug_info, tty_config, &context, start_addr);
return writeStackTraceWindows(writer, debug_info, tty_config, &context, start_addr);
}
var context: ThreadContext = undefined;
const has_context = getContext(&context);
@ -973,7 +982,7 @@ pub fn writeCurrentStackTrace(
defer it.deinit();
while (it.next()) |return_address| {
printLastUnwindError(&it, debug_info, out_stream, tty_config);
printLastUnwindError(&it, debug_info, writer, tty_config);
// On arm64 macOS, the address of the last frame is 0x0 rather than 0x1 as on x86_64 macOS,
// therefore, we do a check for `return_address == 0` before subtracting 1 from it to avoid
@ -981,8 +990,8 @@ pub fn writeCurrentStackTrace(
// condition on the subsequent iteration and return `null` thus terminating the loop.
// same behaviour for x86-windows-msvc
const address = return_address -| 1;
try printSourceAtAddress(debug_info, out_stream, address, tty_config);
} else printLastUnwindError(&it, debug_info, out_stream, tty_config);
try printSourceAtAddress(debug_info, writer, address, tty_config);
} else printLastUnwindError(&it, debug_info, writer, tty_config);
}
pub noinline fn walkStackWindows(addresses: []usize, existing_context: ?*const windows.CONTEXT) usize {
@ -1042,7 +1051,7 @@ pub noinline fn walkStackWindows(addresses: []usize, existing_context: ?*const w
}
pub fn writeStackTraceWindows(
out_stream: anytype,
writer: *Writer,
debug_info: *SelfInfo,
tty_config: io.tty.Config,
context: *const windows.CONTEXT,
@ -1058,14 +1067,14 @@ pub fn writeStackTraceWindows(
return;
} else 0;
for (addrs[start_i..]) |addr| {
try printSourceAtAddress(debug_info, out_stream, addr - 1, tty_config);
try printSourceAtAddress(debug_info, writer, addr - 1, tty_config);
}
}
fn printUnknownSource(debug_info: *SelfInfo, out_stream: anytype, address: usize, tty_config: io.tty.Config) !void {
fn printUnknownSource(debug_info: *SelfInfo, writer: *Writer, address: usize, tty_config: io.tty.Config) !void {
const module_name = debug_info.getModuleNameForAddress(address);
return printLineInfo(
out_stream,
writer,
null,
address,
"???",
@ -1075,38 +1084,38 @@ fn printUnknownSource(debug_info: *SelfInfo, out_stream: anytype, address: usize
);
}
fn printLastUnwindError(it: *StackIterator, debug_info: *SelfInfo, out_stream: anytype, tty_config: io.tty.Config) void {
fn printLastUnwindError(it: *StackIterator, debug_info: *SelfInfo, writer: *Writer, tty_config: io.tty.Config) void {
if (!have_ucontext) return;
if (it.getLastError()) |unwind_error| {
printUnwindError(debug_info, out_stream, unwind_error.address, unwind_error.err, tty_config) catch {};
printUnwindError(debug_info, writer, unwind_error.address, unwind_error.err, tty_config) catch {};
}
}
fn printUnwindError(debug_info: *SelfInfo, out_stream: anytype, address: usize, err: UnwindError, tty_config: io.tty.Config) !void {
fn printUnwindError(debug_info: *SelfInfo, writer: *Writer, address: usize, err: UnwindError, tty_config: io.tty.Config) !void {
const module_name = debug_info.getModuleNameForAddress(address) orelse "???";
try tty_config.setColor(out_stream, .dim);
try tty_config.setColor(writer, .dim);
if (err == error.MissingDebugInfo) {
try out_stream.print("Unwind information for `{s}:0x{x}` was not available, trace may be incomplete\n\n", .{ module_name, address });
try writer.print("Unwind information for `{s}:0x{x}` was not available, trace may be incomplete\n\n", .{ module_name, address });
} else {
try out_stream.print("Unwind error at address `{s}:0x{x}` ({}), trace may be incomplete\n\n", .{ module_name, address, err });
try writer.print("Unwind error at address `{s}:0x{x}` ({}), trace may be incomplete\n\n", .{ module_name, address, err });
}
try tty_config.setColor(out_stream, .reset);
try tty_config.setColor(writer, .reset);
}
pub fn printSourceAtAddress(debug_info: *SelfInfo, out_stream: anytype, address: usize, tty_config: io.tty.Config) !void {
pub fn printSourceAtAddress(debug_info: *SelfInfo, writer: *Writer, address: usize, tty_config: io.tty.Config) !void {
const module = debug_info.getModuleForAddress(address) catch |err| switch (err) {
error.MissingDebugInfo, error.InvalidDebugInfo => return printUnknownSource(debug_info, out_stream, address, tty_config),
error.MissingDebugInfo, error.InvalidDebugInfo => return printUnknownSource(debug_info, writer, address, tty_config),
else => return err,
};
const symbol_info = module.getSymbolAtAddress(debug_info.allocator, address) catch |err| switch (err) {
error.MissingDebugInfo, error.InvalidDebugInfo => return printUnknownSource(debug_info, out_stream, address, tty_config),
error.MissingDebugInfo, error.InvalidDebugInfo => return printUnknownSource(debug_info, writer, address, tty_config),
else => return err,
};
defer if (symbol_info.source_location) |sl| debug_info.allocator.free(sl.file_name);
return printLineInfo(
out_stream,
writer,
symbol_info.source_location,
address,
symbol_info.name,
@ -1117,7 +1126,7 @@ pub fn printSourceAtAddress(debug_info: *SelfInfo, out_stream: anytype, address:
}
fn printLineInfo(
out_stream: anytype,
writer: *Writer,
source_location: ?SourceLocation,
address: usize,
symbol_name: []const u8,
@ -1126,34 +1135,34 @@ fn printLineInfo(
comptime printLineFromFile: anytype,
) !void {
nosuspend {
try tty_config.setColor(out_stream, .bold);
try tty_config.setColor(writer, .bold);
if (source_location) |*sl| {
try out_stream.print("{s}:{d}:{d}", .{ sl.file_name, sl.line, sl.column });
try writer.print("{s}:{d}:{d}", .{ sl.file_name, sl.line, sl.column });
} else {
try out_stream.writeAll("???:?:?");
try writer.writeAll("???:?:?");
}
try tty_config.setColor(out_stream, .reset);
try out_stream.writeAll(": ");
try tty_config.setColor(out_stream, .dim);
try out_stream.print("0x{x} in {s} ({s})", .{ address, symbol_name, compile_unit_name });
try tty_config.setColor(out_stream, .reset);
try out_stream.writeAll("\n");
try tty_config.setColor(writer, .reset);
try writer.writeAll(": ");
try tty_config.setColor(writer, .dim);
try writer.print("0x{x} in {s} ({s})", .{ address, symbol_name, compile_unit_name });
try tty_config.setColor(writer, .reset);
try writer.writeAll("\n");
// Show the matching source code line if possible
if (source_location) |sl| {
if (printLineFromFile(out_stream, sl)) {
if (printLineFromFile(writer, sl)) {
if (sl.column > 0) {
// The caret already takes one char
const space_needed = @as(usize, @intCast(sl.column - 1));
try out_stream.writeByteNTimes(' ', space_needed);
try tty_config.setColor(out_stream, .green);
try out_stream.writeAll("^");
try tty_config.setColor(out_stream, .reset);
try writer.splatByteAll(' ', space_needed);
try tty_config.setColor(writer, .green);
try writer.writeAll("^");
try tty_config.setColor(writer, .reset);
}
try out_stream.writeAll("\n");
try writer.writeAll("\n");
} else |err| switch (err) {
error.EndOfFile, error.FileNotFound => {},
error.BadPathName => {},
@ -1164,7 +1173,7 @@ fn printLineInfo(
}
}
fn printLineFromFileAnyOs(out_stream: anytype, source_location: SourceLocation) !void {
fn printLineFromFileAnyOs(writer: *Writer, source_location: SourceLocation) !void {
// Need this to always block even in async I/O mode, because this could potentially
// be called from e.g. the event loop code crashing.
var f = try fs.cwd().openFile(source_location.file_name, .{});
@ -1197,31 +1206,31 @@ fn printLineFromFileAnyOs(out_stream: anytype, source_location: SourceLocation)
if (mem.indexOfScalar(u8, slice, '\n')) |pos| {
const line = slice[0 .. pos + 1];
mem.replaceScalar(u8, line, '\t', ' ');
return out_stream.writeAll(line);
return writer.writeAll(line);
} else { // Line is the last inside the buffer, and requires another read to find delimiter. Alternatively the file ends.
mem.replaceScalar(u8, slice, '\t', ' ');
try out_stream.writeAll(slice);
try writer.writeAll(slice);
while (amt_read == buf.len) {
amt_read = try f.read(buf[0..]);
if (mem.indexOfScalar(u8, buf[0..amt_read], '\n')) |pos| {
const line = buf[0 .. pos + 1];
mem.replaceScalar(u8, line, '\t', ' ');
return out_stream.writeAll(line);
return writer.writeAll(line);
} else {
const line = buf[0..amt_read];
mem.replaceScalar(u8, line, '\t', ' ');
try out_stream.writeAll(line);
try writer.writeAll(line);
}
}
// Make sure printing last line of file inserts extra newline
try out_stream.writeByte('\n');
try writer.writeByte('\n');
}
}
test printLineFromFileAnyOs {
var output = std.ArrayList(u8).init(std.testing.allocator);
defer output.deinit();
const output_stream = output.writer();
var aw: Writer.Allocating = .init(std.testing.allocator);
defer aw.deinit();
const output_stream = &aw.writer;
const allocator = std.testing.allocator;
const join = std.fs.path.join;
@ -1243,8 +1252,8 @@ test printLineFromFileAnyOs {
try expectError(error.EndOfFile, printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 2, .column = 0 }));
try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 1, .column = 0 });
try expectEqualStrings("no new lines in this file, but one is printed anyway\n", output.items);
output.clearRetainingCapacity();
try expectEqualStrings("no new lines in this file, but one is printed anyway\n", aw.getWritten());
aw.clearRetainingCapacity();
}
{
const path = try fs.path.join(allocator, &.{ test_dir_path, "three_lines.zig" });
@ -1259,12 +1268,12 @@ test printLineFromFileAnyOs {
});
try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 1, .column = 0 });
try expectEqualStrings("1\n", output.items);
output.clearRetainingCapacity();
try expectEqualStrings("1\n", aw.getWritten());
aw.clearRetainingCapacity();
try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 3, .column = 0 });
try expectEqualStrings("3\n", output.items);
output.clearRetainingCapacity();
try expectEqualStrings("3\n", aw.getWritten());
aw.clearRetainingCapacity();
}
{
const file = try test_dir.dir.createFile("line_overlaps_page_boundary.zig", .{});
@ -1273,14 +1282,17 @@ test printLineFromFileAnyOs {
defer allocator.free(path);
const overlap = 10;
var writer = file.writer();
try writer.writeByteNTimes('a', std.heap.page_size_min - overlap);
var buf: [16]u8 = undefined;
var file_writer = file.writer(&buf);
const writer = &file_writer.interface;
try writer.splatByteAll('a', std.heap.page_size_min - overlap);
try writer.writeByte('\n');
try writer.writeByteNTimes('a', overlap);
try writer.splatByteAll('a', overlap);
try writer.flush();
try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 2, .column = 0 });
try expectEqualStrings(("a" ** overlap) ++ "\n", output.items);
output.clearRetainingCapacity();
try expectEqualStrings(("a" ** overlap) ++ "\n", aw.getWritten());
aw.clearRetainingCapacity();
}
{
const file = try test_dir.dir.createFile("file_ends_on_page_boundary.zig", .{});
@ -1288,12 +1300,13 @@ test printLineFromFileAnyOs {
const path = try fs.path.join(allocator, &.{ test_dir_path, "file_ends_on_page_boundary.zig" });
defer allocator.free(path);
var writer = file.writer();
try writer.writeByteNTimes('a', std.heap.page_size_max);
var file_writer = file.writer(&.{});
const writer = &file_writer.interface;
try writer.splatByteAll('a', std.heap.page_size_max);
try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 1, .column = 0 });
try expectEqualStrings(("a" ** std.heap.page_size_max) ++ "\n", output.items);
output.clearRetainingCapacity();
try expectEqualStrings(("a" ** std.heap.page_size_max) ++ "\n", aw.getWritten());
aw.clearRetainingCapacity();
}
{
const file = try test_dir.dir.createFile("very_long_first_line_spanning_multiple_pages.zig", .{});
@ -1301,24 +1314,25 @@ test printLineFromFileAnyOs {
const path = try fs.path.join(allocator, &.{ test_dir_path, "very_long_first_line_spanning_multiple_pages.zig" });
defer allocator.free(path);
var writer = file.writer();
try writer.writeByteNTimes('a', 3 * std.heap.page_size_max);
var file_writer = file.writer(&.{});
const writer = &file_writer.interface;
try writer.splatByteAll('a', 3 * std.heap.page_size_max);
try expectError(error.EndOfFile, printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 2, .column = 0 }));
try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 1, .column = 0 });
try expectEqualStrings(("a" ** (3 * std.heap.page_size_max)) ++ "\n", output.items);
output.clearRetainingCapacity();
try expectEqualStrings(("a" ** (3 * std.heap.page_size_max)) ++ "\n", aw.getWritten());
aw.clearRetainingCapacity();
try writer.writeAll("a\na");
try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 1, .column = 0 });
try expectEqualStrings(("a" ** (3 * std.heap.page_size_max)) ++ "a\n", output.items);
output.clearRetainingCapacity();
try expectEqualStrings(("a" ** (3 * std.heap.page_size_max)) ++ "a\n", aw.getWritten());
aw.clearRetainingCapacity();
try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 2, .column = 0 });
try expectEqualStrings("a\n", output.items);
output.clearRetainingCapacity();
try expectEqualStrings("a\n", aw.getWritten());
aw.clearRetainingCapacity();
}
{
const file = try test_dir.dir.createFile("file_of_newlines.zig", .{});
@ -1326,18 +1340,19 @@ test printLineFromFileAnyOs {
const path = try fs.path.join(allocator, &.{ test_dir_path, "file_of_newlines.zig" });
defer allocator.free(path);
var writer = file.writer();
var file_writer = file.writer(&.{});
const writer = &file_writer.interface;
const real_file_start = 3 * std.heap.page_size_min;
try writer.writeByteNTimes('\n', real_file_start);
try writer.splatByteAll('\n', real_file_start);
try writer.writeAll("abc\ndef");
try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = real_file_start + 1, .column = 0 });
try expectEqualStrings("abc\n", output.items);
output.clearRetainingCapacity();
try expectEqualStrings("abc\n", aw.getWritten());
aw.clearRetainingCapacity();
try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = real_file_start + 2, .column = 0 });
try expectEqualStrings("def\n", output.items);
output.clearRetainingCapacity();
try expectEqualStrings("def\n", aw.getWritten());
aw.clearRetainingCapacity();
}
}
@ -1461,7 +1476,8 @@ fn handleSegfaultPosix(sig: i32, info: *const posix.siginfo_t, ctx_ptr: ?*anyopa
}
fn dumpSegfaultInfoPosix(sig: i32, code: i32, addr: usize, ctx_ptr: ?*anyopaque) void {
const stderr = io.getStdErr().writer();
const stderr = lockStderrWriter(&.{});
defer unlockStderrWriter();
_ = switch (sig) {
posix.SIG.SEGV => if (native_arch == .x86_64 and native_os == .linux and code == 128) // SI_KERNEL
// x86_64 doesn't have a full 64-bit virtual address space.
@ -1471,7 +1487,7 @@ fn dumpSegfaultInfoPosix(sig: i32, code: i32, addr: usize, ctx_ptr: ?*anyopaque)
// but can also happen when no addressable memory is involved;
// for example when reading/writing model-specific registers
// by executing `rdmsr` or `wrmsr` in user-space (unprivileged mode).
stderr.print("General protection exception (no address available)\n", .{})
stderr.writeAll("General protection exception (no address available)\n")
else
stderr.print("Segmentation fault at address 0x{x}\n", .{addr}),
posix.SIG.ILL => stderr.print("Illegal instruction at address 0x{x}\n", .{addr}),
@ -1509,7 +1525,7 @@ fn dumpSegfaultInfoPosix(sig: i32, code: i32, addr: usize, ctx_ptr: ?*anyopaque)
}, @ptrCast(ctx)).__mcontext_data;
}
relocateContext(&new_ctx);
dumpStackTraceFromBase(&new_ctx);
dumpStackTraceFromBase(&new_ctx, stderr);
},
else => {},
}
@ -1539,25 +1555,24 @@ fn handleSegfaultWindowsExtra(info: *windows.EXCEPTION_POINTERS, msg: u8, label:
_ = panicking.fetchAdd(1, .seq_cst);
{
lockStdErr();
defer unlockStdErr();
const stderr = lockStderrWriter(&.{});
defer unlockStderrWriter();
dumpSegfaultInfoWindows(info, msg, label);
dumpSegfaultInfoWindows(info, msg, label, stderr);
}
waitForOtherThreadToFinishPanicking();
},
1 => {
panic_stage = 2;
io.getStdErr().writeAll("aborting due to recursive panic\n") catch {};
fs.File.stderr().writeAll("aborting due to recursive panic\n") catch {};
},
else => {},
};
posix.abort();
}
fn dumpSegfaultInfoWindows(info: *windows.EXCEPTION_POINTERS, msg: u8, label: ?[]const u8) void {
const stderr = io.getStdErr().writer();
fn dumpSegfaultInfoWindows(info: *windows.EXCEPTION_POINTERS, msg: u8, label: ?[]const u8, stderr: *Writer) void {
_ = switch (msg) {
0 => stderr.print("{s}\n", .{label.?}),
1 => stderr.print("Segmentation fault at address 0x{x}\n", .{info.ExceptionRecord.ExceptionInformation[1]}),
@ -1565,7 +1580,7 @@ fn dumpSegfaultInfoWindows(info: *windows.EXCEPTION_POINTERS, msg: u8, label: ?[
else => unreachable,
} catch posix.abort();
dumpStackTraceFromBase(info.ContextRecord);
dumpStackTraceFromBase(info.ContextRecord, stderr);
}
pub fn dumpStackPointerAddr(prefix: []const u8) void {
@ -1588,10 +1603,10 @@ test "manage resources correctly" {
// self-hosted debug info is still too buggy
if (builtin.zig_backend != .stage2_llvm) return error.SkipZigTest;
const writer = std.io.null_writer;
var discarding: std.io.Writer.Discarding = .init(&.{});
var di = try SelfInfo.open(testing.allocator);
defer di.deinit();
try printSourceAtAddress(&di, writer, showMyTrace(), io.tty.detectConfig(std.io.getStdErr()));
try printSourceAtAddress(&di, &discarding.writer, showMyTrace(), io.tty.detectConfig(.stderr()));
}
noinline fn showMyTrace() usize {
@ -1657,8 +1672,9 @@ pub fn ConfigurableTrace(comptime size: usize, comptime stack_frame_count: usize
pub fn dump(t: @This()) void {
if (!enabled) return;
const tty_config = io.tty.detectConfig(std.io.getStdErr());
const stderr = io.getStdErr().writer();
const tty_config = io.tty.detectConfig(.stderr());
const stderr = lockStderrWriter(&.{});
defer unlockStderrWriter();
const end = @min(t.index, size);
const debug_info = getSelfDebugInfo() catch |err| {
stderr.print(
@ -1688,7 +1704,7 @@ pub fn ConfigurableTrace(comptime size: usize, comptime stack_frame_count: usize
t: @This(),
comptime fmt: []const u8,
options: std.fmt.FormatOptions,
writer: anytype,
writer: *Writer,
) !void {
if (fmt.len != 0) std.fmt.invalidFmtError(fmt, t);
_ = options;

View File

@ -2302,11 +2302,7 @@ pub const ElfModule = struct {
};
defer debuginfod_dir.close();
const filename = std.fmt.allocPrint(
gpa,
"{s}/debuginfo",
.{std.fmt.fmtSliceHexLower(id)},
) catch break :blk;
const filename = std.fmt.allocPrint(gpa, "{x}/debuginfo", .{id}) catch break :blk;
defer gpa.free(filename);
const path: Path = .{
@ -2330,12 +2326,8 @@ pub const ElfModule = struct {
var id_prefix_buf: [2]u8 = undefined;
var filename_buf: [38 + extension.len]u8 = undefined;
_ = std.fmt.bufPrint(&id_prefix_buf, "{s}", .{std.fmt.fmtSliceHexLower(id[0..1])}) catch unreachable;
const filename = std.fmt.bufPrint(
&filename_buf,
"{s}" ++ extension,
.{std.fmt.fmtSliceHexLower(id[1..])},
) catch break :blk;
_ = std.fmt.bufPrint(&id_prefix_buf, "{x}", .{id[0..1]}) catch unreachable;
const filename = std.fmt.bufPrint(&filename_buf, "{x}" ++ extension, .{id[1..]}) catch break :blk;
for (global_debug_directories) |global_directory| {
const path: Path = .{

View File

@ -395,7 +395,7 @@ const Msf = struct {
streams: []MsfStream,
fn init(allocator: Allocator, file: File) !Msf {
const in = file.reader();
const in = file.deprecatedReader();
const superblock = try in.readStruct(pdb.SuperBlock);
@ -514,7 +514,7 @@ const MsfStream = struct {
var offset = self.pos % self.block_size;
try self.in_file.seekTo(block * self.block_size + offset);
const in = self.in_file.reader();
const in = self.in_file.deprecatedReader();
var size: usize = 0;
var rem_buffer = buffer;
@ -562,7 +562,7 @@ const MsfStream = struct {
return block * self.block_size + offset;
}
pub fn reader(self: *MsfStream) std.io.Reader(*MsfStream, Error, read) {
pub fn reader(self: *MsfStream) std.io.GenericReader(*MsfStream, Error, read) {
return .{ .context = self };
}
};

View File

@ -15,7 +15,7 @@ pub fn call(msg: []const u8, ra: ?usize) noreturn {
@branchHint(.cold);
_ = ra;
std.debug.lockStdErr();
const stderr = std.io.getStdErr();
const stderr: std.fs.File = .stderr();
stderr.writeAll(msg) catch {};
@trap();
}

View File

@ -511,7 +511,7 @@ pub const Header = struct {
pub fn read(parse_source: anytype) !Header {
var hdr_buf: [@sizeOf(Elf64_Ehdr)]u8 align(@alignOf(Elf64_Ehdr)) = undefined;
try parse_source.seekableStream().seekTo(0);
try parse_source.reader().readNoEof(&hdr_buf);
try parse_source.deprecatedReader().readNoEof(&hdr_buf);
return Header.parse(&hdr_buf);
}
@ -586,7 +586,7 @@ pub fn ProgramHeaderIterator(comptime ParseSource: anytype) type {
var phdr: Elf64_Phdr = undefined;
const offset = self.elf_header.phoff + @sizeOf(@TypeOf(phdr)) * self.index;
try self.parse_source.seekableStream().seekTo(offset);
try self.parse_source.reader().readNoEof(mem.asBytes(&phdr));
try self.parse_source.deprecatedReader().readNoEof(mem.asBytes(&phdr));
// ELF endianness matches native endianness.
if (self.elf_header.endian == native_endian) return phdr;
@ -599,7 +599,7 @@ pub fn ProgramHeaderIterator(comptime ParseSource: anytype) type {
var phdr: Elf32_Phdr = undefined;
const offset = self.elf_header.phoff + @sizeOf(@TypeOf(phdr)) * self.index;
try self.parse_source.seekableStream().seekTo(offset);
try self.parse_source.reader().readNoEof(mem.asBytes(&phdr));
try self.parse_source.deprecatedReader().readNoEof(mem.asBytes(&phdr));
// ELF endianness does NOT match native endianness.
if (self.elf_header.endian != native_endian) {
@ -636,7 +636,7 @@ pub fn SectionHeaderIterator(comptime ParseSource: anytype) type {
var shdr: Elf64_Shdr = undefined;
const offset = self.elf_header.shoff + @sizeOf(@TypeOf(shdr)) * self.index;
try self.parse_source.seekableStream().seekTo(offset);
try self.parse_source.reader().readNoEof(mem.asBytes(&shdr));
try self.parse_source.deprecatedReader().readNoEof(mem.asBytes(&shdr));
// ELF endianness matches native endianness.
if (self.elf_header.endian == native_endian) return shdr;
@ -649,7 +649,7 @@ pub fn SectionHeaderIterator(comptime ParseSource: anytype) type {
var shdr: Elf32_Shdr = undefined;
const offset = self.elf_header.shoff + @sizeOf(@TypeOf(shdr)) * self.index;
try self.parse_source.seekableStream().seekTo(offset);
try self.parse_source.reader().readNoEof(mem.asBytes(&shdr));
try self.parse_source.deprecatedReader().readNoEof(mem.asBytes(&shdr));
// ELF endianness does NOT match native endianness.
if (self.elf_header.endian != native_endian) {

View File

@ -38,8 +38,8 @@ pub fn LinearFifo(
count: usize,
const Self = @This();
pub const Reader = std.io.Reader(*Self, error{}, readFn);
pub const Writer = std.io.Writer(*Self, error{OutOfMemory}, appendWrite);
pub const Reader = std.io.GenericReader(*Self, error{}, readFn);
pub const Writer = std.io.GenericWriter(*Self, error{OutOfMemory}, appendWrite);
// Type of Self argument for slice operations.
// If buffer is inline (Static) then we need to ensure we haven't
@ -231,7 +231,7 @@ pub fn LinearFifo(
}
/// Same as `read` except it returns an error union
/// The purpose of this function existing is to match `std.io.Reader` API.
/// The purpose of this function existing is to match `std.io.GenericReader` API.
fn readFn(self: *Self, dest: []u8) error{}!usize {
return self.read(dest);
}
@ -320,7 +320,7 @@ pub fn LinearFifo(
}
/// Same as `write` except it returns the number of bytes written, which is always the same
/// as `bytes.len`. The purpose of this function existing is to match `std.io.Writer` API.
/// as `bytes.len`. The purpose of this function existing is to match `std.io.GenericWriter` API.
fn appendWrite(self: *Self, bytes: []const u8) error{OutOfMemory}!usize {
try self.write(bytes);
return bytes.len;

File diff suppressed because it is too large Load Diff

View File

@ -11,7 +11,7 @@ const special_exponent = 0x7fffffff;
pub const min_buffer_size = 53;
/// Returns the minimum buffer size needed to print every float of a specific type and format.
pub fn bufferSize(comptime mode: Format, comptime T: type) comptime_int {
pub fn bufferSize(comptime mode: Mode, comptime T: type) comptime_int {
comptime std.debug.assert(@typeInfo(T) == .float);
return switch (mode) {
.scientific => 53,
@ -27,17 +27,17 @@ pub fn bufferSize(comptime mode: Format, comptime T: type) comptime_int {
};
}
pub const FormatError = error{
pub const Error = error{
BufferTooSmall,
};
pub const Format = enum {
pub const Mode = enum {
scientific,
decimal,
};
pub const FormatOptions = struct {
mode: Format = .scientific,
pub const Options = struct {
mode: Mode = .scientific,
precision: ?usize = null,
};
@ -52,11 +52,11 @@ pub const FormatOptions = struct {
///
/// When printing full precision decimals, use `bufferSize` to get the required space. It is
/// recommended to bound decimal output with a fixed precision to reduce the required buffer size.
pub fn formatFloat(buf: []u8, v_: anytype, options: FormatOptions) FormatError![]const u8 {
const v = switch (@TypeOf(v_)) {
pub fn render(buf: []u8, value: anytype, options: Options) Error![]const u8 {
const v = switch (@TypeOf(value)) {
// comptime_float internally is a f128; this preserves precision.
comptime_float => @as(f128, v_),
else => v_,
comptime_float => @as(f128, value),
else => value,
};
const T = @TypeOf(v);
@ -192,7 +192,7 @@ fn round(comptime T: type, f: FloatDecimal(T), mode: RoundMode, precision: usize
/// will not fit.
///
/// It is recommended to bound decimal formatting with an exact precision.
pub fn formatScientific(comptime T: type, buf: []u8, f_: FloatDecimal(T), precision: ?usize) FormatError![]const u8 {
pub fn formatScientific(comptime T: type, buf: []u8, f_: FloatDecimal(T), precision: ?usize) Error![]const u8 {
std.debug.assert(buf.len >= min_buffer_size);
var f = f_;
@ -263,7 +263,7 @@ pub fn formatScientific(comptime T: type, buf: []u8, f_: FloatDecimal(T), precis
/// The buffer provided must be greater than `min_buffer_size` bytes in length. If no precision is
/// specified, this may still return an error. If precision is specified, `2 + precision` bytes will
/// always be written.
pub fn formatDecimal(comptime T: type, buf: []u8, f_: FloatDecimal(T), precision: ?usize) FormatError![]const u8 {
pub fn formatDecimal(comptime T: type, buf: []u8, f_: FloatDecimal(T), precision: ?usize) Error![]const u8 {
std.debug.assert(buf.len >= min_buffer_size);
var f = f_;
@ -1520,7 +1520,7 @@ fn check(comptime T: type, value: T, comptime expected: []const u8) !void {
var buf: [6000]u8 = undefined;
const value_bits: I = @bitCast(value);
const s = try formatFloat(&buf, value, .{});
const s = try render(&buf, value, .{});
try std.testing.expectEqualStrings(expected, s);
if (T == f80 and builtin.target.os.tag == .windows and builtin.target.cpu.arch == .x86_64) return;

File diff suppressed because it is too large Load Diff

View File

@ -146,14 +146,11 @@ pub fn joinZ(allocator: Allocator, paths: []const []const u8) ![:0]u8 {
return out[0 .. out.len - 1 :0];
}
pub fn fmtJoin(paths: []const []const u8) std.fmt.Formatter(formatJoin) {
pub fn fmtJoin(paths: []const []const u8) std.fmt.Formatter([]const []const u8, formatJoin) {
return .{ .data = paths };
}
fn formatJoin(paths: []const []const u8, comptime fmt: []const u8, options: std.fmt.FormatOptions, w: anytype) !void {
_ = fmt;
_ = options;
fn formatJoin(paths: []const []const u8, w: *std.io.Writer) std.io.Writer.Error!void {
const first_path_idx = for (paths, 0..) |p, idx| {
if (p.len != 0) break idx;
} else return;

Some files were not shown because too many files have changed in this diff Show More