std.Io: delete GenericReader

and delete deprecated alias std.io
This commit is contained in:
Andrew Kelley 2025-08-27 21:20:18 -07:00
parent 558bea2a76
commit 79f267f6b9
156 changed files with 972 additions and 1852 deletions

View File

@ -117,8 +117,7 @@ pub fn nameFromUniqueIndex(index: u16, buf: []u8) []u8 {
var node_index: u16 = 0; var node_index: u16 = 0;
var count: u16 = index; var count: u16 = index;
var fbs = std.io.fixedBufferStream(buf); var w: std.Io.Writer = .fixed(buf);
const w = fbs.writer();
while (true) { while (true) {
var sibling_index = dafsa[node_index].child_index; var sibling_index = dafsa[node_index].child_index;
@ -140,7 +139,7 @@ pub fn nameFromUniqueIndex(index: u16, buf: []u8) []u8 {
if (count == 0) break; if (count == 0) break;
} }
return fbs.getWritten(); return w.buffered();
} }
const Node = packed struct(u32) { const Node = packed struct(u32) {

View File

@ -1645,8 +1645,8 @@ test "addSourceFromReader" {
var comp = Compilation.init(std.testing.allocator, std.fs.cwd()); var comp = Compilation.init(std.testing.allocator, std.fs.cwd());
defer comp.deinit(); defer comp.deinit();
var buf_reader = std.io.fixedBufferStream(str); var buf_reader: std.Io.Reader = .fixed(str);
const source = try comp.addSourceFromReader(buf_reader.reader(), "path", .user); const source = try comp.addSourceFromReader(&buf_reader, "path", .user);
try std.testing.expectEqualStrings(expected, source.buf); try std.testing.expectEqualStrings(expected, source.buf);
try std.testing.expectEqual(warning_count, @as(u32, @intCast(comp.diagnostics.list.items.len))); try std.testing.expectEqual(warning_count, @as(u32, @intCast(comp.diagnostics.list.items.len)));
@ -1727,8 +1727,8 @@ test "ignore BOM at beginning of file" {
var comp = Compilation.init(std.testing.allocator, std.fs.cwd()); var comp = Compilation.init(std.testing.allocator, std.fs.cwd());
defer comp.deinit(); defer comp.deinit();
var buf_reader = std.io.fixedBufferStream(buf); var buf_reader: std.Io.Reader = .fixed(buf);
const source = try comp.addSourceFromReader(buf_reader.reader(), "file.c", .user); const source = try comp.addSourceFromReader(&buf_reader, "file.c", .user);
const expected_output = if (mem.startsWith(u8, buf, BOM)) buf[BOM.len..] else buf; const expected_output = if (mem.startsWith(u8, buf, BOM)) buf[BOM.len..] else buf;
try std.testing.expectEqualStrings(expected_output, source.buf); try std.testing.expectEqualStrings(expected_output, source.buf);
} }

View File

@ -322,14 +322,14 @@ pub fn addExtra(
return error.FatalError; return error.FatalError;
} }
pub fn render(comp: *Compilation, config: std.io.tty.Config) void { pub fn render(comp: *Compilation, config: std.Io.tty.Config) void {
if (comp.diagnostics.list.items.len == 0) return; if (comp.diagnostics.list.items.len == 0) return;
var buffer: [1000]u8 = undefined; var buffer: [1000]u8 = undefined;
var m = defaultMsgWriter(config, &buffer); var m = defaultMsgWriter(config, &buffer);
defer m.deinit(); defer m.deinit();
renderMessages(comp, &m); renderMessages(comp, &m);
} }
pub fn defaultMsgWriter(config: std.io.tty.Config, buffer: []u8) MsgWriter { pub fn defaultMsgWriter(config: std.Io.tty.Config, buffer: []u8) MsgWriter {
return MsgWriter.init(config, buffer); return MsgWriter.init(config, buffer);
} }
@ -451,7 +451,7 @@ pub fn renderMessage(comp: *Compilation, m: anytype, msg: Message) void {
}, },
.normalized => { .normalized => {
const f = struct { const f = struct {
pub fn f(bytes: []const u8, writer: *std.io.Writer) std.io.Writer.Error!void { pub fn f(bytes: []const u8, writer: *std.Io.Writer) std.Io.Writer.Error!void {
var it: std.unicode.Utf8Iterator = .{ var it: std.unicode.Utf8Iterator = .{
.bytes = bytes, .bytes = bytes,
.i = 0, .i = 0,
@ -526,10 +526,10 @@ fn tagKind(d: *Diagnostics, tag: Tag, langopts: LangOpts) Kind {
} }
const MsgWriter = struct { const MsgWriter = struct {
writer: *std.io.Writer, writer: *std.Io.Writer,
config: std.io.tty.Config, config: std.Io.tty.Config,
fn init(config: std.io.tty.Config, buffer: []u8) MsgWriter { fn init(config: std.Io.tty.Config, buffer: []u8) MsgWriter {
return .{ return .{
.writer = std.debug.lockStderrWriter(buffer), .writer = std.debug.lockStderrWriter(buffer),
.config = config, .config = config,
@ -549,7 +549,7 @@ const MsgWriter = struct {
m.writer.writeAll(msg) catch {}; m.writer.writeAll(msg) catch {};
} }
fn setColor(m: *MsgWriter, color: std.io.tty.Color) void { fn setColor(m: *MsgWriter, color: std.Io.tty.Color) void {
m.config.setColor(m.writer, color) catch {}; m.config.setColor(m.writer, color) catch {};
} }

View File

@ -544,7 +544,7 @@ pub fn renderErrors(d: *Driver) void {
Diagnostics.render(d.comp, d.detectConfig(std.fs.File.stderr())); Diagnostics.render(d.comp, d.detectConfig(std.fs.File.stderr()));
} }
pub fn detectConfig(d: *Driver, file: std.fs.File) std.io.tty.Config { pub fn detectConfig(d: *Driver, file: std.fs.File) std.Io.tty.Config {
if (d.color == true) return .escape_codes; if (d.color == true) return .escape_codes;
if (d.color == false) return .no_color; if (d.color == false) return .no_color;

View File

@ -800,7 +800,7 @@ pub fn nodeLoc(tree: *const Tree, node: NodeIndex) ?Source.Location {
return tree.tokens.items(.loc)[@intFromEnum(tok_i)]; return tree.tokens.items(.loc)[@intFromEnum(tok_i)];
} }
pub fn dump(tree: *const Tree, config: std.io.tty.Config, writer: anytype) !void { pub fn dump(tree: *const Tree, config: std.Io.tty.Config, writer: anytype) !void {
const mapper = tree.comp.string_interner.getFastTypeMapper(tree.comp.gpa) catch tree.comp.string_interner.getSlowTypeMapper(); const mapper = tree.comp.string_interner.getFastTypeMapper(tree.comp.gpa) catch tree.comp.string_interner.getSlowTypeMapper();
defer mapper.deinit(tree.comp.gpa); defer mapper.deinit(tree.comp.gpa);
@ -855,17 +855,17 @@ fn dumpNode(
node: NodeIndex, node: NodeIndex,
level: u32, level: u32,
mapper: StringInterner.TypeMapper, mapper: StringInterner.TypeMapper,
config: std.io.tty.Config, config: std.Io.tty.Config,
w: anytype, w: anytype,
) !void { ) !void {
const delta = 2; const delta = 2;
const half = delta / 2; const half = delta / 2;
const TYPE = std.io.tty.Color.bright_magenta; const TYPE = std.Io.tty.Color.bright_magenta;
const TAG = std.io.tty.Color.bright_cyan; const TAG = std.Io.tty.Color.bright_cyan;
const IMPLICIT = std.io.tty.Color.bright_blue; const IMPLICIT = std.Io.tty.Color.bright_blue;
const NAME = std.io.tty.Color.bright_red; const NAME = std.Io.tty.Color.bright_red;
const LITERAL = std.io.tty.Color.bright_green; const LITERAL = std.Io.tty.Color.bright_green;
const ATTRIBUTE = std.io.tty.Color.bright_yellow; const ATTRIBUTE = std.Io.tty.Color.bright_yellow;
std.debug.assert(node != .none); std.debug.assert(node != .none);
const tag = tree.nodes.items(.tag)[@intFromEnum(node)]; const tag = tree.nodes.items(.tag)[@intFromEnum(node)];

View File

@ -578,8 +578,7 @@ pub fn toLLVMTriple(target: std.Target, buf: []u8) []const u8 {
// 64 bytes is assumed to be large enough to hold any target triple; increase if necessary // 64 bytes is assumed to be large enough to hold any target triple; increase if necessary
std.debug.assert(buf.len >= 64); std.debug.assert(buf.len >= 64);
var stream = std.io.fixedBufferStream(buf); var writer: std.Io.Writer = .fixed(buf);
const writer = stream.writer();
const llvm_arch = switch (target.cpu.arch) { const llvm_arch = switch (target.cpu.arch) {
.arm => "arm", .arm => "arm",
@ -718,7 +717,7 @@ pub fn toLLVMTriple(target: std.Target, buf: []u8) []const u8 {
.ohoseabi => "ohoseabi", .ohoseabi => "ohoseabi",
}; };
writer.writeAll(llvm_abi) catch unreachable; writer.writeAll(llvm_abi) catch unreachable;
return stream.getWritten(); return writer.buffered();
} }
test "alignment functions - smoke test" { test "alignment functions - smoke test" {

View File

@ -374,21 +374,21 @@ pub fn deinit(ir: *Ir, gpa: std.mem.Allocator) void {
ir.* = undefined; ir.* = undefined;
} }
const TYPE = std.io.tty.Color.bright_magenta; const TYPE = std.Io.tty.Color.bright_magenta;
const INST = std.io.tty.Color.bright_cyan; const INST = std.Io.tty.Color.bright_cyan;
const REF = std.io.tty.Color.bright_blue; const REF = std.Io.tty.Color.bright_blue;
const LITERAL = std.io.tty.Color.bright_green; const LITERAL = std.Io.tty.Color.bright_green;
const ATTRIBUTE = std.io.tty.Color.bright_yellow; const ATTRIBUTE = std.Io.tty.Color.bright_yellow;
const RefMap = std.AutoArrayHashMap(Ref, void); const RefMap = std.AutoArrayHashMap(Ref, void);
pub fn dump(ir: *const Ir, gpa: Allocator, config: std.io.tty.Config, w: anytype) !void { pub fn dump(ir: *const Ir, gpa: Allocator, config: std.Io.tty.Config, w: anytype) !void {
for (ir.decls.keys(), ir.decls.values()) |name, *decl| { for (ir.decls.keys(), ir.decls.values()) |name, *decl| {
try ir.dumpDecl(decl, gpa, name, config, w); try ir.dumpDecl(decl, gpa, name, config, w);
} }
} }
fn dumpDecl(ir: *const Ir, decl: *const Decl, gpa: Allocator, name: []const u8, config: std.io.tty.Config, w: anytype) !void { fn dumpDecl(ir: *const Ir, decl: *const Decl, gpa: Allocator, name: []const u8, config: std.Io.tty.Config, w: anytype) !void {
const tags = decl.instructions.items(.tag); const tags = decl.instructions.items(.tag);
const data = decl.instructions.items(.data); const data = decl.instructions.items(.data);
@ -609,7 +609,7 @@ fn dumpDecl(ir: *const Ir, decl: *const Decl, gpa: Allocator, name: []const u8,
try w.writeAll("}\n\n"); try w.writeAll("}\n\n");
} }
fn writeType(ir: Ir, ty_ref: Interner.Ref, config: std.io.tty.Config, w: anytype) !void { fn writeType(ir: Ir, ty_ref: Interner.Ref, config: std.Io.tty.Config, w: anytype) !void {
const ty = ir.interner.get(ty_ref); const ty = ir.interner.get(ty_ref);
try config.setColor(w, TYPE); try config.setColor(w, TYPE);
switch (ty) { switch (ty) {
@ -639,7 +639,7 @@ fn writeType(ir: Ir, ty_ref: Interner.Ref, config: std.io.tty.Config, w: anytype
} }
} }
fn writeValue(ir: Ir, val: Interner.Ref, config: std.io.tty.Config, w: anytype) !void { fn writeValue(ir: Ir, val: Interner.Ref, config: std.Io.tty.Config, w: anytype) !void {
try config.setColor(w, LITERAL); try config.setColor(w, LITERAL);
const key = ir.interner.get(val); const key = ir.interner.get(val);
switch (key) { switch (key) {
@ -655,7 +655,7 @@ fn writeValue(ir: Ir, val: Interner.Ref, config: std.io.tty.Config, w: anytype)
} }
} }
fn writeRef(ir: Ir, decl: *const Decl, ref_map: *RefMap, ref: Ref, config: std.io.tty.Config, w: anytype) !void { fn writeRef(ir: Ir, decl: *const Decl, ref_map: *RefMap, ref: Ref, config: std.Io.tty.Config, w: anytype) !void {
assert(ref != .none); assert(ref != .none);
const index = @intFromEnum(ref); const index = @intFromEnum(ref);
const ty_ref = decl.instructions.items(.ty)[index]; const ty_ref = decl.instructions.items(.ty)[index];
@ -678,7 +678,7 @@ fn writeRef(ir: Ir, decl: *const Decl, ref_map: *RefMap, ref: Ref, config: std.i
try w.print(" %{d}", .{ref_index}); try w.print(" %{d}", .{ref_index});
} }
fn writeNewRef(ir: Ir, decl: *const Decl, ref_map: *RefMap, ref: Ref, config: std.io.tty.Config, w: anytype) !void { fn writeNewRef(ir: Ir, decl: *const Decl, ref_map: *RefMap, ref: Ref, config: std.Io.tty.Config, w: anytype) !void {
try ref_map.put(ref, {}); try ref_map.put(ref, {});
try w.writeAll(" "); try w.writeAll(" ");
try ir.writeRef(decl, ref_map, ref, config, w); try ir.writeRef(decl, ref_map, ref, config, w);
@ -687,7 +687,7 @@ fn writeNewRef(ir: Ir, decl: *const Decl, ref_map: *RefMap, ref: Ref, config: st
try config.setColor(w, INST); try config.setColor(w, INST);
} }
fn writeLabel(decl: *const Decl, label_map: *RefMap, ref: Ref, config: std.io.tty.Config, w: anytype) !void { fn writeLabel(decl: *const Decl, label_map: *RefMap, ref: Ref, config: std.Io.tty.Config, w: anytype) !void {
assert(ref != .none); assert(ref != .none);
const index = @intFromEnum(ref); const index = @intFromEnum(ref);
const label = decl.instructions.items(.data)[index].label; const label = decl.instructions.items(.data)[index].label;

View File

@ -1783,7 +1783,7 @@ fn renderErrorsAndExit(comp: *aro.Compilation) noreturn {
defer std.process.exit(1); defer std.process.exit(1);
var buffer: [1000]u8 = undefined; var buffer: [1000]u8 = undefined;
var writer = aro.Diagnostics.defaultMsgWriter(std.io.tty.detectConfig(std.fs.File.stderr()), &buffer); var writer = aro.Diagnostics.defaultMsgWriter(std.Io.tty.detectConfig(std.fs.File.stderr()), &buffer);
defer writer.deinit(); // writer deinit must run *before* exit so that stderr is flushed defer writer.deinit(); // writer deinit must run *before* exit so that stderr is flushed
var saw_error = false; var saw_error = false;

View File

@ -1,7 +1,6 @@
const std = @import("std"); const std = @import("std");
const builtin = @import("builtin"); const builtin = @import("builtin");
const assert = std.debug.assert; const assert = std.debug.assert;
const io = std.io;
const fmt = std.fmt; const fmt = std.fmt;
const mem = std.mem; const mem = std.mem;
const process = std.process; const process = std.process;
@ -11,8 +10,9 @@ const Watch = std.Build.Watch;
const WebServer = std.Build.WebServer; const WebServer = std.Build.WebServer;
const Allocator = std.mem.Allocator; const Allocator = std.mem.Allocator;
const fatal = std.process.fatal; const fatal = std.process.fatal;
const Writer = std.io.Writer; const Writer = std.Io.Writer;
const runner = @This(); const runner = @This();
const tty = std.Io.tty;
pub const root = @import("@build"); pub const root = @import("@build");
pub const dependencies = @import("@dependencies"); pub const dependencies = @import("@dependencies");
@ -576,7 +576,7 @@ const Run = struct {
claimed_rss: usize, claimed_rss: usize,
summary: Summary, summary: Summary,
ttyconf: std.io.tty.Config, ttyconf: tty.Config,
stderr: File, stderr: File,
fn cleanExit(run: Run) void { fn cleanExit(run: Run) void {
@ -819,7 +819,7 @@ const PrintNode = struct {
last: bool = false, last: bool = false,
}; };
fn printPrefix(node: *PrintNode, stderr: *Writer, ttyconf: std.io.tty.Config) !void { fn printPrefix(node: *PrintNode, stderr: *Writer, ttyconf: tty.Config) !void {
const parent = node.parent orelse return; const parent = node.parent orelse return;
if (parent.parent == null) return; if (parent.parent == null) return;
try printPrefix(parent, stderr, ttyconf); try printPrefix(parent, stderr, ttyconf);
@ -833,7 +833,7 @@ fn printPrefix(node: *PrintNode, stderr: *Writer, ttyconf: std.io.tty.Config) !v
} }
} }
fn printChildNodePrefix(stderr: *Writer, ttyconf: std.io.tty.Config) !void { fn printChildNodePrefix(stderr: *Writer, ttyconf: tty.Config) !void {
try stderr.writeAll(switch (ttyconf) { try stderr.writeAll(switch (ttyconf) {
.no_color, .windows_api => "+- ", .no_color, .windows_api => "+- ",
.escape_codes => "\x1B\x28\x30\x6d\x71\x1B\x28\x42 ", // .escape_codes => "\x1B\x28\x30\x6d\x71\x1B\x28\x42 ", //
@ -843,7 +843,7 @@ fn printChildNodePrefix(stderr: *Writer, ttyconf: std.io.tty.Config) !void {
fn printStepStatus( fn printStepStatus(
s: *Step, s: *Step,
stderr: *Writer, stderr: *Writer,
ttyconf: std.io.tty.Config, ttyconf: tty.Config,
run: *const Run, run: *const Run,
) !void { ) !void {
switch (s.state) { switch (s.state) {
@ -923,7 +923,7 @@ fn printStepStatus(
fn printStepFailure( fn printStepFailure(
s: *Step, s: *Step,
stderr: *Writer, stderr: *Writer,
ttyconf: std.io.tty.Config, ttyconf: tty.Config,
) !void { ) !void {
if (s.result_error_bundle.errorMessageCount() > 0) { if (s.result_error_bundle.errorMessageCount() > 0) {
try ttyconf.setColor(stderr, .red); try ttyconf.setColor(stderr, .red);
@ -977,7 +977,7 @@ fn printTreeStep(
s: *Step, s: *Step,
run: *const Run, run: *const Run,
stderr: *Writer, stderr: *Writer,
ttyconf: std.io.tty.Config, ttyconf: tty.Config,
parent_node: *PrintNode, parent_node: *PrintNode,
step_stack: *std.AutoArrayHashMapUnmanaged(*Step, void), step_stack: *std.AutoArrayHashMapUnmanaged(*Step, void),
) !void { ) !void {
@ -1494,9 +1494,9 @@ fn uncleanExit() error{UncleanExit} {
const Color = std.zig.Color; const Color = std.zig.Color;
const Summary = enum { all, new, failures, none }; const Summary = enum { all, new, failures, none };
fn get_tty_conf(color: Color, stderr: File) std.io.tty.Config { fn get_tty_conf(color: Color, stderr: File) tty.Config {
return switch (color) { return switch (color) {
.auto => std.io.tty.detectConfig(stderr), .auto => tty.detectConfig(stderr),
.on => .escape_codes, .on => .escape_codes,
.off => .no_color, .off => .no_color,
}; };

View File

@ -1,6 +1,5 @@
const std = @import("std"); const std = @import("std");
const mem = std.mem; const mem = std.mem;
const io = std.io;
const LibCInstallation = std.zig.LibCInstallation; const LibCInstallation = std.zig.LibCInstallation;
const usage_libc = const usage_libc =

View File

@ -381,7 +381,7 @@ fn transformationsToFixups(
} }
} }
var other_source: std.io.Writer.Allocating = .init(gpa); var other_source: std.Io.Writer.Allocating = .init(gpa);
defer other_source.deinit(); defer other_source.deinit();
try other_source.writer.writeAll("struct {\n"); try other_source.writer.writeAll("struct {\n");
try other_file_ast.render(gpa, &other_source.writer, inlined_fixups); try other_file_ast.render(gpa, &other_source.writer, inlined_fixups);

View File

@ -22,7 +22,7 @@ pub const Tree = struct {
return @alignCast(@fieldParentPtr("base", self.node)); return @alignCast(@fieldParentPtr("base", self.node));
} }
pub fn dump(self: *Tree, writer: *std.io.Writer) !void { pub fn dump(self: *Tree, writer: *std.Io.Writer) !void {
try self.node.dump(self, writer, 0); try self.node.dump(self, writer, 0);
} }
}; };
@ -726,9 +726,9 @@ pub const Node = struct {
pub fn dump( pub fn dump(
node: *const Node, node: *const Node,
tree: *const Tree, tree: *const Tree,
writer: *std.io.Writer, writer: *std.Io.Writer,
indent: usize, indent: usize,
) std.io.Writer.Error!void { ) std.Io.Writer.Error!void {
try writer.splatByteAll(' ', indent); try writer.splatByteAll(' ', indent);
try writer.writeAll(@tagName(node.id)); try writer.writeAll(@tagName(node.id));
switch (node.id) { switch (node.id) {

View File

@ -124,13 +124,13 @@ pub const Diagnostics = struct {
try self.errors.append(self.allocator, error_details); try self.errors.append(self.allocator, error_details);
} }
pub fn renderToStdErr(self: *Diagnostics, args: []const []const u8, config: std.io.tty.Config) void { pub fn renderToStdErr(self: *Diagnostics, args: []const []const u8, config: std.Io.tty.Config) void {
const stderr = std.debug.lockStderrWriter(&.{}); const stderr = std.debug.lockStderrWriter(&.{});
defer std.debug.unlockStderrWriter(); defer std.debug.unlockStderrWriter();
self.renderToWriter(args, stderr, config) catch return; self.renderToWriter(args, stderr, config) catch return;
} }
pub fn renderToWriter(self: *Diagnostics, args: []const []const u8, writer: *std.io.Writer, config: std.io.tty.Config) !void { pub fn renderToWriter(self: *Diagnostics, args: []const []const u8, writer: *std.Io.Writer, config: std.Io.tty.Config) !void {
for (self.errors.items) |err_details| { for (self.errors.items) |err_details| {
try renderErrorMessage(writer, config, err_details, args); try renderErrorMessage(writer, config, err_details, args);
} }
@ -1343,7 +1343,7 @@ test parsePercent {
try std.testing.expectError(error.InvalidFormat, parsePercent("~1")); try std.testing.expectError(error.InvalidFormat, parsePercent("~1"));
} }
pub fn renderErrorMessage(writer: *std.io.Writer, config: std.io.tty.Config, err_details: Diagnostics.ErrorDetails, args: []const []const u8) !void { pub fn renderErrorMessage(writer: *std.Io.Writer, config: std.Io.tty.Config, err_details: Diagnostics.ErrorDetails, args: []const []const u8) !void {
try config.setColor(writer, .dim); try config.setColor(writer, .dim);
try writer.writeAll("<cli>"); try writer.writeAll("<cli>");
try config.setColor(writer, .reset); try config.setColor(writer, .reset);
@ -1470,7 +1470,7 @@ fn testParseOutput(args: []const []const u8, expected_output: []const u8) !?Opti
var diagnostics = Diagnostics.init(std.testing.allocator); var diagnostics = Diagnostics.init(std.testing.allocator);
defer diagnostics.deinit(); defer diagnostics.deinit();
var output: std.io.Writer.Allocating = .init(std.testing.allocator); var output: std.Io.Writer.Allocating = .init(std.testing.allocator);
defer output.deinit(); defer output.deinit();
var options = parse(std.testing.allocator, args, &diagnostics) catch |err| switch (err) { var options = parse(std.testing.allocator, args, &diagnostics) catch |err| switch (err) {

View File

@ -61,7 +61,7 @@ pub const Diagnostics = struct {
return @intCast(index); return @intCast(index);
} }
pub fn renderToStdErr(self: *Diagnostics, cwd: std.fs.Dir, source: []const u8, tty_config: std.io.tty.Config, source_mappings: ?SourceMappings) void { pub fn renderToStdErr(self: *Diagnostics, cwd: std.fs.Dir, source: []const u8, tty_config: std.Io.tty.Config, source_mappings: ?SourceMappings) void {
const stderr = std.debug.lockStderrWriter(&.{}); const stderr = std.debug.lockStderrWriter(&.{});
defer std.debug.unlockStderrWriter(); defer std.debug.unlockStderrWriter();
for (self.errors.items) |err_details| { for (self.errors.items) |err_details| {
@ -70,7 +70,7 @@ pub const Diagnostics = struct {
} }
pub fn renderToStdErrDetectTTY(self: *Diagnostics, cwd: std.fs.Dir, source: []const u8, source_mappings: ?SourceMappings) void { pub fn renderToStdErrDetectTTY(self: *Diagnostics, cwd: std.fs.Dir, source: []const u8, source_mappings: ?SourceMappings) void {
const tty_config = std.io.tty.detectConfig(std.fs.File.stderr()); const tty_config = std.Io.tty.detectConfig(std.fs.File.stderr());
return self.renderToStdErr(cwd, source, tty_config, source_mappings); return self.renderToStdErr(cwd, source, tty_config, source_mappings);
} }
@ -409,7 +409,7 @@ pub const ErrorDetails = struct {
failed_to_open_cwd, failed_to_open_cwd,
}; };
fn formatToken(ctx: TokenFormatContext, writer: *std.io.Writer) std.io.Writer.Error!void { fn formatToken(ctx: TokenFormatContext, writer: *std.Io.Writer) std.Io.Writer.Error!void {
switch (ctx.token.id) { switch (ctx.token.id) {
.eof => return writer.writeAll(ctx.token.id.nameForErrorDisplay()), .eof => return writer.writeAll(ctx.token.id.nameForErrorDisplay()),
else => {}, else => {},
@ -894,7 +894,7 @@ fn cellCount(code_page: SupportedCodePage, source: []const u8, start_index: usiz
const truncated_str = "<...truncated...>"; const truncated_str = "<...truncated...>";
pub fn renderErrorMessage(writer: *std.io.Writer, tty_config: std.io.tty.Config, cwd: std.fs.Dir, err_details: ErrorDetails, source: []const u8, strings: []const []const u8, source_mappings: ?SourceMappings) !void { pub fn renderErrorMessage(writer: *std.Io.Writer, tty_config: std.Io.tty.Config, cwd: std.fs.Dir, err_details: ErrorDetails, source: []const u8, strings: []const []const u8, source_mappings: ?SourceMappings) !void {
if (err_details.type == .hint) return; if (err_details.type == .hint) return;
const source_line_start = err_details.token.getLineStartForErrorDisplay(source); const source_line_start = err_details.token.getLineStartForErrorDisplay(source);

View File

@ -24,7 +24,7 @@ pub fn main() !void {
const arena = arena_state.allocator(); const arena = arena_state.allocator();
const stderr = std.fs.File.stderr(); const stderr = std.fs.File.stderr();
const stderr_config = std.io.tty.detectConfig(stderr); const stderr_config = std.Io.tty.detectConfig(stderr);
const args = try std.process.argsAlloc(allocator); const args = try std.process.argsAlloc(allocator);
defer std.process.argsFree(allocator, args); defer std.process.argsFree(allocator, args);
@ -621,7 +621,7 @@ const SourceMappings = @import("source_mapping.zig").SourceMappings;
const ErrorHandler = union(enum) { const ErrorHandler = union(enum) {
server: std.zig.Server, server: std.zig.Server,
tty: std.io.tty.Config, tty: std.Io.tty.Config,
pub fn emitCliDiagnostics( pub fn emitCliDiagnostics(
self: *ErrorHandler, self: *ErrorHandler,
@ -984,7 +984,7 @@ const MsgWriter = struct {
m.buf.appendSlice(msg) catch {}; m.buf.appendSlice(msg) catch {};
} }
pub fn setColor(m: *MsgWriter, color: std.io.tty.Color) void { pub fn setColor(m: *MsgWriter, color: std.Io.tty.Color) void {
_ = m; _ = m;
_ = color; _ = color;
} }

View File

@ -164,7 +164,7 @@ pub const Language = packed struct(u16) {
return @bitCast(self); return @bitCast(self);
} }
pub fn format(language: Language, w: *std.io.Writer) std.io.Writer.Error!void { pub fn format(language: Language, w: *std.Io.Writer) std.Io.Writer.Error!void {
const language_id = language.asInt(); const language_id = language.asInt();
const language_name = language_name: { const language_name = language_name: {
if (std.enums.fromInt(lang.LanguageId, language_id)) |lang_enum_val| { if (std.enums.fromInt(lang.LanguageId, language_id)) |lang_enum_val| {
@ -439,7 +439,7 @@ pub const NameOrOrdinal = union(enum) {
} }
} }
pub fn format(self: NameOrOrdinal, w: *std.io.Writer) !void { pub fn format(self: NameOrOrdinal, w: *std.Io.Writer) !void {
switch (self) { switch (self) {
.name => |name| { .name => |name| {
try w.print("{f}", .{std.unicode.fmtUtf16Le(name)}); try w.print("{f}", .{std.unicode.fmtUtf16Le(name)});
@ -450,7 +450,7 @@ pub const NameOrOrdinal = union(enum) {
} }
} }
fn formatResourceType(self: NameOrOrdinal, w: *std.io.Writer) std.io.Writer.Error!void { fn formatResourceType(self: NameOrOrdinal, w: *std.Io.Writer) std.Io.Writer.Error!void {
switch (self) { switch (self) {
.name => |name| { .name => |name| {
try w.print("{f}", .{std.unicode.fmtUtf16Le(name)}); try w.print("{f}", .{std.unicode.fmtUtf16Le(name)});

View File

@ -1,7 +1,6 @@
const std = @import("std"); const std = @import("std");
const builtin = @import("builtin"); const builtin = @import("builtin");
/// Like std.io.FixedBufferStream but does no bounds checking
pub const UncheckedSliceWriter = struct { pub const UncheckedSliceWriter = struct {
const Self = @This(); const Self = @This();
@ -86,7 +85,7 @@ pub const ErrorMessageType = enum { err, warning, note };
/// Used for generic colored errors/warnings/notes, more context-specific error messages /// Used for generic colored errors/warnings/notes, more context-specific error messages
/// are handled elsewhere. /// are handled elsewhere.
pub fn renderErrorMessage(writer: *std.io.Writer, config: std.io.tty.Config, msg_type: ErrorMessageType, comptime format: []const u8, args: anytype) !void { pub fn renderErrorMessage(writer: *std.Io.Writer, config: std.Io.tty.Config, msg_type: ErrorMessageType, comptime format: []const u8, args: anytype) !void {
switch (msg_type) { switch (msg_type) {
.err => { .err => {
try config.setColor(writer, .bold); try config.setColor(writer, .bold);

View File

@ -1,7 +1,6 @@
const builtin = @import("builtin"); const builtin = @import("builtin");
const std = @import("std"); const std = @import("std");
const mem = std.mem; const mem = std.mem;
const io = std.io;
const Allocator = std.mem.Allocator; const Allocator = std.mem.Allocator;
const assert = std.debug.assert; const assert = std.debug.assert;
const Cache = std.Build.Cache; const Cache = std.Build.Cache;
@ -318,7 +317,7 @@ fn buildWasmBinary(
child.stderr_behavior = .Pipe; child.stderr_behavior = .Pipe;
try child.spawn(); try child.spawn();
var poller = std.io.poll(gpa, enum { stdout, stderr }, .{ var poller = std.Io.poll(gpa, enum { stdout, stderr }, .{
.stdout = child.stdout.?, .stdout = child.stdout.?,
.stderr = child.stderr.?, .stderr = child.stderr.?,
}); });

View File

@ -1,6 +1,5 @@
const std = @import("std.zig"); const std = @import("std.zig");
const builtin = @import("builtin"); const builtin = @import("builtin");
const io = std.io;
const fs = std.fs; const fs = std.fs;
const mem = std.mem; const mem = std.mem;
const debug = std.debug; const debug = std.debug;
@ -1830,7 +1829,8 @@ pub fn runAllowFail(
try Step.handleVerbose2(b, null, child.env_map, argv); try Step.handleVerbose2(b, null, child.env_map, argv);
try child.spawn(); try child.spawn();
const stdout = child.stdout.?.deprecatedReader().readAllAlloc(b.allocator, max_output_size) catch { var stdout_reader = child.stdout.?.readerStreaming(&.{});
const stdout = stdout_reader.interface.allocRemaining(b.allocator, .limited(max_output_size)) catch {
return error.ReadFailure; return error.ReadFailure;
}; };
errdefer b.allocator.free(stdout); errdefer b.allocator.free(stdout);
@ -2540,7 +2540,7 @@ fn dumpBadDirnameHelp(
try w.print(msg, args); try w.print(msg, args);
const tty_config = std.io.tty.detectConfig(.stderr()); const tty_config = std.Io.tty.detectConfig(.stderr());
if (fail_step) |s| { if (fail_step) |s| {
tty_config.setColor(w, .red) catch {}; tty_config.setColor(w, .red) catch {};
@ -2566,8 +2566,8 @@ fn dumpBadDirnameHelp(
/// In this function the stderr mutex has already been locked. /// In this function the stderr mutex has already been locked.
pub fn dumpBadGetPathHelp( pub fn dumpBadGetPathHelp(
s: *Step, s: *Step,
w: *std.io.Writer, w: *std.Io.Writer,
tty_config: std.io.tty.Config, tty_config: std.Io.tty.Config,
src_builder: *Build, src_builder: *Build,
asking_step: ?*Step, asking_step: ?*Step,
) anyerror!void { ) anyerror!void {

View File

@ -286,7 +286,7 @@ pub const HashHelper = struct {
pub fn binToHex(bin_digest: BinDigest) HexDigest { pub fn binToHex(bin_digest: BinDigest) HexDigest {
var out_digest: HexDigest = undefined; var out_digest: HexDigest = undefined;
var w: std.io.Writer = .fixed(&out_digest); var w: std.Io.Writer = .fixed(&out_digest);
w.printHex(&bin_digest, .lower) catch unreachable; w.printHex(&bin_digest, .lower) catch unreachable;
return out_digest; return out_digest;
} }
@ -664,7 +664,7 @@ pub const Manifest = struct {
const input_file_count = self.files.entries.len; const input_file_count = self.files.entries.len;
var tiny_buffer: [1]u8 = undefined; // allows allocRemaining to detect limit exceeded var tiny_buffer: [1]u8 = undefined; // allows allocRemaining to detect limit exceeded
var manifest_reader = self.manifest_file.?.reader(&tiny_buffer); // Reads positionally from zero. var manifest_reader = self.manifest_file.?.reader(&tiny_buffer); // Reads positionally from zero.
const limit: std.io.Limit = .limited(manifest_file_size_max); const limit: std.Io.Limit = .limited(manifest_file_size_max);
const file_contents = manifest_reader.interface.allocRemaining(gpa, limit) catch |err| switch (err) { const file_contents = manifest_reader.interface.allocRemaining(gpa, limit) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory, error.OutOfMemory => return error.OutOfMemory,
error.StreamTooLong => return error.OutOfMemory, error.StreamTooLong => return error.OutOfMemory,

View File

@ -56,7 +56,7 @@ pub fn closeAndFree(self: *Directory, gpa: Allocator) void {
self.* = undefined; self.* = undefined;
} }
pub fn format(self: Directory, writer: *std.io.Writer) std.io.Writer.Error!void { pub fn format(self: Directory, writer: *std.Io.Writer) std.Io.Writer.Error!void {
if (self.path) |p| { if (self.path) |p| {
try writer.writeAll(p); try writer.writeAll(p);
try writer.writeAll(fs.path.sep_str); try writer.writeAll(fs.path.sep_str);

View File

@ -151,7 +151,7 @@ pub fn fmtEscapeString(path: Path) std.fmt.Formatter(Path, formatEscapeString) {
return .{ .data = path }; return .{ .data = path };
} }
pub fn formatEscapeString(path: Path, writer: *std.io.Writer) std.io.Writer.Error!void { pub fn formatEscapeString(path: Path, writer: *std.Io.Writer) std.Io.Writer.Error!void {
if (path.root_dir.path) |p| { if (path.root_dir.path) |p| {
try std.zig.stringEscape(p, writer); try std.zig.stringEscape(p, writer);
if (path.sub_path.len > 0) try std.zig.stringEscape(fs.path.sep_str, writer); if (path.sub_path.len > 0) try std.zig.stringEscape(fs.path.sep_str, writer);
@ -167,7 +167,7 @@ pub fn fmtEscapeChar(path: Path) std.fmt.Formatter(Path, formatEscapeChar) {
} }
/// Deprecated, use double quoted escape to print paths. /// Deprecated, use double quoted escape to print paths.
pub fn formatEscapeChar(path: Path, writer: *std.io.Writer) std.io.Writer.Error!void { pub fn formatEscapeChar(path: Path, writer: *std.Io.Writer) std.Io.Writer.Error!void {
if (path.root_dir.path) |p| { if (path.root_dir.path) |p| {
for (p) |byte| try std.zig.charEscape(byte, writer); for (p) |byte| try std.zig.charEscape(byte, writer);
if (path.sub_path.len > 0) try writer.writeByte(fs.path.sep); if (path.sub_path.len > 0) try writer.writeByte(fs.path.sep);
@ -177,7 +177,7 @@ pub fn formatEscapeChar(path: Path, writer: *std.io.Writer) std.io.Writer.Error!
} }
} }
pub fn format(self: Path, writer: *std.io.Writer) std.io.Writer.Error!void { pub fn format(self: Path, writer: *std.Io.Writer) std.Io.Writer.Error!void {
if (std.fs.path.isAbsolute(self.sub_path)) { if (std.fs.path.isAbsolute(self.sub_path)) {
try writer.writeAll(self.sub_path); try writer.writeAll(self.sub_path);
return; return;

View File

@ -127,7 +127,7 @@ pub fn deinit(fuzz: *Fuzz) void {
gpa.free(fuzz.run_steps); gpa.free(fuzz.run_steps);
} }
fn rebuildTestsWorkerRun(run: *Step.Run, gpa: Allocator, ttyconf: std.io.tty.Config, parent_prog_node: std.Progress.Node) void { fn rebuildTestsWorkerRun(run: *Step.Run, gpa: Allocator, ttyconf: std.Io.tty.Config, parent_prog_node: std.Progress.Node) void {
rebuildTestsWorkerRunFallible(run, gpa, ttyconf, parent_prog_node) catch |err| { rebuildTestsWorkerRunFallible(run, gpa, ttyconf, parent_prog_node) catch |err| {
const compile = run.producer.?; const compile = run.producer.?;
log.err("step '{s}': failed to rebuild in fuzz mode: {s}", .{ log.err("step '{s}': failed to rebuild in fuzz mode: {s}", .{
@ -136,7 +136,7 @@ fn rebuildTestsWorkerRun(run: *Step.Run, gpa: Allocator, ttyconf: std.io.tty.Con
}; };
} }
fn rebuildTestsWorkerRunFallible(run: *Step.Run, gpa: Allocator, ttyconf: std.io.tty.Config, parent_prog_node: std.Progress.Node) !void { fn rebuildTestsWorkerRunFallible(run: *Step.Run, gpa: Allocator, ttyconf: std.Io.tty.Config, parent_prog_node: std.Progress.Node) !void {
const compile = run.producer.?; const compile = run.producer.?;
const prog_node = parent_prog_node.start(compile.step.name, 0); const prog_node = parent_prog_node.start(compile.step.name, 0);
defer prog_node.end(); defer prog_node.end();

View File

@ -6,7 +6,7 @@ const macho = std.macho;
const math = std.math; const math = std.math;
const mem = std.mem; const mem = std.mem;
const testing = std.testing; const testing = std.testing;
const Writer = std.io.Writer; const Writer = std.Io.Writer;
const CheckObject = @This(); const CheckObject = @This();
@ -1462,7 +1462,7 @@ const MachODumper = struct {
const TrieIterator = struct { const TrieIterator = struct {
stream: std.Io.Reader, stream: std.Io.Reader,
fn readUleb128(it: *TrieIterator) !u64 { fn takeLeb128(it: *TrieIterator) !u64 {
return it.stream.takeLeb128(u64); return it.stream.takeLeb128(u64);
} }
@ -1470,7 +1470,7 @@ const MachODumper = struct {
return it.stream.takeSentinel(0); return it.stream.takeSentinel(0);
} }
fn readByte(it: *TrieIterator) !u8 { fn takeByte(it: *TrieIterator) !u8 {
return it.stream.takeByte(); return it.stream.takeByte();
} }
}; };
@ -1518,12 +1518,12 @@ const MachODumper = struct {
prefix: []const u8, prefix: []const u8,
exports: *std.array_list.Managed(Export), exports: *std.array_list.Managed(Export),
) !void { ) !void {
const size = try it.readUleb128(); const size = try it.takeLeb128();
if (size > 0) { if (size > 0) {
const flags = try it.readUleb128(); const flags = try it.takeLeb128();
switch (flags) { switch (flags) {
macho.EXPORT_SYMBOL_FLAGS_REEXPORT => { macho.EXPORT_SYMBOL_FLAGS_REEXPORT => {
const ord = try it.readUleb128(); const ord = try it.takeLeb128();
const name = try arena.dupe(u8, try it.readString()); const name = try arena.dupe(u8, try it.readString());
try exports.append(.{ try exports.append(.{
.name = if (name.len > 0) name else prefix, .name = if (name.len > 0) name else prefix,
@ -1532,8 +1532,8 @@ const MachODumper = struct {
}); });
}, },
macho.EXPORT_SYMBOL_FLAGS_STUB_AND_RESOLVER => { macho.EXPORT_SYMBOL_FLAGS_STUB_AND_RESOLVER => {
const stub_offset = try it.readUleb128(); const stub_offset = try it.takeLeb128();
const resolver_offset = try it.readUleb128(); const resolver_offset = try it.takeLeb128();
try exports.append(.{ try exports.append(.{
.name = prefix, .name = prefix,
.tag = .stub_resolver, .tag = .stub_resolver,
@ -1544,7 +1544,7 @@ const MachODumper = struct {
}); });
}, },
else => { else => {
const vmoff = try it.readUleb128(); const vmoff = try it.takeLeb128();
try exports.append(.{ try exports.append(.{
.name = prefix, .name = prefix,
.tag = .@"export", .tag = .@"export",
@ -1563,10 +1563,10 @@ const MachODumper = struct {
} }
} }
const nedges = try it.readByte(); const nedges = try it.takeByte();
for (0..nedges) |_| { for (0..nedges) |_| {
const label = try it.readString(); const label = try it.readString();
const off = try it.readUleb128(); const off = try it.takeLeb128();
const prefix_label = try std.fmt.allocPrint(arena, "{s}{s}", .{ prefix, label }); const prefix_label = try std.fmt.allocPrint(arena, "{s}{s}", .{ prefix, label });
const curr = it.stream.seek; const curr = it.stream.seek;
it.stream.seek = off; it.stream.seek = off;
@ -1701,10 +1701,9 @@ const ElfDumper = struct {
fn parseAndDumpArchive(step: *Step, check: Check, bytes: []const u8) ![]const u8 { fn parseAndDumpArchive(step: *Step, check: Check, bytes: []const u8) ![]const u8 {
const gpa = step.owner.allocator; const gpa = step.owner.allocator;
var stream = std.io.fixedBufferStream(bytes); var reader: std.Io.Reader = .fixed(bytes);
const reader = stream.reader();
const magic = try reader.readBytesNoEof(elf.ARMAG.len); const magic = try reader.takeArray(elf.ARMAG.len);
if (!mem.eql(u8, &magic, elf.ARMAG)) { if (!mem.eql(u8, &magic, elf.ARMAG)) {
return error.InvalidArchiveMagicNumber; return error.InvalidArchiveMagicNumber;
} }
@ -1722,28 +1721,26 @@ const ElfDumper = struct {
} }
while (true) { while (true) {
if (stream.pos >= ctx.data.len) break; if (reader.seek >= ctx.data.len) break;
if (!mem.isAligned(stream.pos, 2)) stream.pos += 1; if (!mem.isAligned(reader.seek, 2)) reader.seek += 1;
const hdr = try reader.readStruct(elf.ar_hdr); const hdr = try reader.takeStruct(elf.ar_hdr, .little);
if (!mem.eql(u8, &hdr.ar_fmag, elf.ARFMAG)) return error.InvalidArchiveHeaderMagicNumber; if (!mem.eql(u8, &hdr.ar_fmag, elf.ARFMAG)) return error.InvalidArchiveHeaderMagicNumber;
const size = try hdr.size(); const size = try hdr.size();
defer { defer reader.seek += size;
_ = stream.seekBy(size) catch {};
}
if (hdr.isSymtab()) { if (hdr.isSymtab()) {
try ctx.parseSymtab(ctx.data[stream.pos..][0..size], .p32); try ctx.parseSymtab(ctx.data[reader.seek..][0..size], .p32);
continue; continue;
} }
if (hdr.isSymtab64()) { if (hdr.isSymtab64()) {
try ctx.parseSymtab(ctx.data[stream.pos..][0..size], .p64); try ctx.parseSymtab(ctx.data[reader.seek..][0..size], .p64);
continue; continue;
} }
if (hdr.isStrtab()) { if (hdr.isStrtab()) {
ctx.strtab = ctx.data[stream.pos..][0..size]; ctx.strtab = ctx.data[reader.seek..][0..size];
continue; continue;
} }
if (hdr.isSymdef() or hdr.isSymdefSorted()) continue; if (hdr.isSymdef() or hdr.isSymdefSorted()) continue;
@ -1755,7 +1752,7 @@ const ElfDumper = struct {
else else
unreachable; unreachable;
try ctx.objects.append(gpa, .{ .name = name, .off = stream.pos, .len = size }); try ctx.objects.append(gpa, .{ .name = name, .off = reader.seek, .len = size });
} }
var output: std.Io.Writer.Allocating = .init(gpa); var output: std.Io.Writer.Allocating = .init(gpa);
@ -1783,11 +1780,10 @@ const ElfDumper = struct {
objects: std.ArrayListUnmanaged(struct { name: []const u8, off: usize, len: usize }) = .empty, objects: std.ArrayListUnmanaged(struct { name: []const u8, off: usize, len: usize }) = .empty,
fn parseSymtab(ctx: *ArchiveContext, raw: []const u8, ptr_width: enum { p32, p64 }) !void { fn parseSymtab(ctx: *ArchiveContext, raw: []const u8, ptr_width: enum { p32, p64 }) !void {
var stream = std.io.fixedBufferStream(raw); var reader: std.Io.Reader = .fixed(raw);
const reader = stream.reader();
const num = switch (ptr_width) { const num = switch (ptr_width) {
.p32 => try reader.readInt(u32, .big), .p32 => try reader.takeInt(u32, .big),
.p64 => try reader.readInt(u64, .big), .p64 => try reader.takeInt(u64, .big),
}; };
const ptr_size: usize = switch (ptr_width) { const ptr_size: usize = switch (ptr_width) {
.p32 => @sizeOf(u32), .p32 => @sizeOf(u32),
@ -1802,8 +1798,8 @@ const ElfDumper = struct {
var stroff: usize = 0; var stroff: usize = 0;
for (0..num) |_| { for (0..num) |_| {
const off = switch (ptr_width) { const off = switch (ptr_width) {
.p32 => try reader.readInt(u32, .big), .p32 => try reader.takeInt(u32, .big),
.p64 => try reader.readInt(u64, .big), .p64 => try reader.takeInt(u64, .big),
}; };
const name = mem.sliceTo(@as([*:0]const u8, @ptrCast(strtab.ptr + stroff)), 0); const name = mem.sliceTo(@as([*:0]const u8, @ptrCast(strtab.ptr + stroff)), 0);
stroff += name.len + 1; stroff += name.len + 1;
@ -1868,10 +1864,9 @@ const ElfDumper = struct {
fn parseAndDumpObject(step: *Step, check: Check, bytes: []const u8) ![]const u8 { fn parseAndDumpObject(step: *Step, check: Check, bytes: []const u8) ![]const u8 {
const gpa = step.owner.allocator; const gpa = step.owner.allocator;
var stream = std.io.fixedBufferStream(bytes); var reader: std.Io.Reader = .fixed(bytes);
const reader = stream.reader();
const hdr = try reader.readStruct(elf.Elf64_Ehdr); const hdr = try reader.takeStruct(elf.Elf64_Ehdr, .little);
if (!mem.eql(u8, hdr.e_ident[0..4], "\x7fELF")) { if (!mem.eql(u8, hdr.e_ident[0..4], "\x7fELF")) {
return error.InvalidMagicNumber; return error.InvalidMagicNumber;
} }
@ -2360,10 +2355,9 @@ const WasmDumper = struct {
fn parseAndDump(step: *Step, check: Check, bytes: []const u8) ![]const u8 { fn parseAndDump(step: *Step, check: Check, bytes: []const u8) ![]const u8 {
const gpa = step.owner.allocator; const gpa = step.owner.allocator;
var fbs = std.io.fixedBufferStream(bytes); var reader: std.Io.Reader = .fixed(bytes);
const reader = fbs.reader();
const buf = try reader.readBytesNoEof(8); const buf = try reader.takeArray(8);
if (!mem.eql(u8, buf[0..4], &std.wasm.magic)) { if (!mem.eql(u8, buf[0..4], &std.wasm.magic)) {
return error.InvalidMagicByte; return error.InvalidMagicByte;
} }
@ -2373,7 +2367,7 @@ const WasmDumper = struct {
var output: std.Io.Writer.Allocating = .init(gpa); var output: std.Io.Writer.Allocating = .init(gpa);
defer output.deinit(); defer output.deinit();
parseAndDumpInner(step, check, bytes, &fbs, &output.writer) catch |err| switch (err) { parseAndDumpInner(step, check, bytes, &reader, &output.writer) catch |err| switch (err) {
error.EndOfStream => try output.writer.writeAll("\n<UnexpectedEndOfStream>"), error.EndOfStream => try output.writer.writeAll("\n<UnexpectedEndOfStream>"),
else => |e| return e, else => |e| return e,
}; };
@ -2384,21 +2378,19 @@ const WasmDumper = struct {
step: *Step, step: *Step,
check: Check, check: Check,
bytes: []const u8, bytes: []const u8,
fbs: *std.io.FixedBufferStream([]const u8), reader: *std.Io.Reader,
writer: *std.Io.Writer, writer: *std.Io.Writer,
) !void { ) !void {
const reader = fbs.reader();
switch (check.kind) { switch (check.kind) {
.headers => { .headers => {
while (reader.readByte()) |current_byte| { while (reader.takeByte()) |current_byte| {
const section = std.enums.fromInt(std.wasm.Section, current_byte) orelse { const section = std.enums.fromInt(std.wasm.Section, current_byte) orelse {
return step.fail("Found invalid section id '{d}'", .{current_byte}); return step.fail("Found invalid section id '{d}'", .{current_byte});
}; };
const section_length = try std.leb.readUleb128(u32, reader); const section_length = try reader.takeLeb128(u32);
try parseAndDumpSection(step, section, bytes[fbs.pos..][0..section_length], writer); try parseAndDumpSection(step, section, bytes[reader.seek..][0..section_length], writer);
fbs.pos += section_length; reader.seek += section_length;
} else |_| {} // reached end of stream } else |_| {} // reached end of stream
}, },
@ -2410,10 +2402,9 @@ const WasmDumper = struct {
step: *Step, step: *Step,
section: std.wasm.Section, section: std.wasm.Section,
data: []const u8, data: []const u8,
writer: anytype, writer: *std.Io.Writer,
) !void { ) !void {
var fbs = std.io.fixedBufferStream(data); var reader: std.Io.Reader = .fixed(data);
const reader = fbs.reader();
try writer.print( try writer.print(
\\Section {s} \\Section {s}
@ -2432,31 +2423,31 @@ const WasmDumper = struct {
.code, .code,
.data, .data,
=> { => {
const entries = try std.leb.readUleb128(u32, reader); const entries = try reader.takeLeb128(u32);
try writer.print("\nentries {d}\n", .{entries}); try writer.print("\nentries {d}\n", .{entries});
try parseSection(step, section, data[fbs.pos..], entries, writer); try parseSection(step, section, data[reader.seek..], entries, writer);
}, },
.custom => { .custom => {
const name_length = try std.leb.readUleb128(u32, reader); const name_length = try reader.takeLeb128(u32);
const name = data[fbs.pos..][0..name_length]; const name = data[reader.seek..][0..name_length];
fbs.pos += name_length; reader.seek += name_length;
try writer.print("\nname {s}\n", .{name}); try writer.print("\nname {s}\n", .{name});
if (mem.eql(u8, name, "name")) { if (mem.eql(u8, name, "name")) {
try parseDumpNames(step, reader, writer, data); try parseDumpNames(step, &reader, writer, data);
} else if (mem.eql(u8, name, "producers")) { } else if (mem.eql(u8, name, "producers")) {
try parseDumpProducers(reader, writer, data); try parseDumpProducers(&reader, writer, data);
} else if (mem.eql(u8, name, "target_features")) { } else if (mem.eql(u8, name, "target_features")) {
try parseDumpFeatures(reader, writer, data); try parseDumpFeatures(&reader, writer, data);
} }
// TODO: Implement parsing and dumping other custom sections (such as relocations) // TODO: Implement parsing and dumping other custom sections (such as relocations)
}, },
.start => { .start => {
const start = try std.leb.readUleb128(u32, reader); const start = try reader.takeLeb128(u32);
try writer.print("\nstart {d}\n", .{start}); try writer.print("\nstart {d}\n", .{start});
}, },
.data_count => { .data_count => {
const count = try std.leb.readUleb128(u32, reader); const count = try reader.takeLeb128(u32);
try writer.print("\ncount {d}\n", .{count}); try writer.print("\ncount {d}\n", .{count});
}, },
else => {}, // skip unknown sections else => {}, // skip unknown sections
@ -2464,41 +2455,40 @@ const WasmDumper = struct {
} }
fn parseSection(step: *Step, section: std.wasm.Section, data: []const u8, entries: u32, writer: anytype) !void { fn parseSection(step: *Step, section: std.wasm.Section, data: []const u8, entries: u32, writer: anytype) !void {
var fbs = std.io.fixedBufferStream(data); var reader: std.Io.Reader = .fixed(data);
const reader = fbs.reader();
switch (section) { switch (section) {
.type => { .type => {
var i: u32 = 0; var i: u32 = 0;
while (i < entries) : (i += 1) { while (i < entries) : (i += 1) {
const func_type = try reader.readByte(); const func_type = try reader.takeByte();
if (func_type != std.wasm.function_type) { if (func_type != std.wasm.function_type) {
return step.fail("expected function type, found byte '{d}'", .{func_type}); return step.fail("expected function type, found byte '{d}'", .{func_type});
} }
const params = try std.leb.readUleb128(u32, reader); const params = try reader.takeLeb128(u32);
try writer.print("params {d}\n", .{params}); try writer.print("params {d}\n", .{params});
var index: u32 = 0; var index: u32 = 0;
while (index < params) : (index += 1) { while (index < params) : (index += 1) {
_ = try parseDumpType(step, std.wasm.Valtype, reader, writer); _ = try parseDumpType(step, std.wasm.Valtype, &reader, writer);
} else index = 0; } else index = 0;
const returns = try std.leb.readUleb128(u32, reader); const returns = try reader.takeLeb128(u32);
try writer.print("returns {d}\n", .{returns}); try writer.print("returns {d}\n", .{returns});
while (index < returns) : (index += 1) { while (index < returns) : (index += 1) {
_ = try parseDumpType(step, std.wasm.Valtype, reader, writer); _ = try parseDumpType(step, std.wasm.Valtype, &reader, writer);
} }
} }
}, },
.import => { .import => {
var i: u32 = 0; var i: u32 = 0;
while (i < entries) : (i += 1) { while (i < entries) : (i += 1) {
const module_name_len = try std.leb.readUleb128(u32, reader); const module_name_len = try reader.takeLeb128(u32);
const module_name = data[fbs.pos..][0..module_name_len]; const module_name = data[reader.seek..][0..module_name_len];
fbs.pos += module_name_len; reader.seek += module_name_len;
const name_len = try std.leb.readUleb128(u32, reader); const name_len = try reader.takeLeb128(u32);
const name = data[fbs.pos..][0..name_len]; const name = data[reader.seek..][0..name_len];
fbs.pos += name_len; reader.seek += name_len;
const kind = std.enums.fromInt(std.wasm.ExternalKind, try reader.readByte()) orelse { const kind = std.enums.fromInt(std.wasm.ExternalKind, try reader.takeByte()) orelse {
return step.fail("invalid import kind", .{}); return step.fail("invalid import kind", .{});
}; };
@ -2510,18 +2500,18 @@ const WasmDumper = struct {
try writer.writeByte('\n'); try writer.writeByte('\n');
switch (kind) { switch (kind) {
.function => { .function => {
try writer.print("index {d}\n", .{try std.leb.readUleb128(u32, reader)}); try writer.print("index {d}\n", .{try reader.takeLeb128(u32)});
}, },
.memory => { .memory => {
try parseDumpLimits(reader, writer); try parseDumpLimits(&reader, writer);
}, },
.global => { .global => {
_ = try parseDumpType(step, std.wasm.Valtype, reader, writer); _ = try parseDumpType(step, std.wasm.Valtype, &reader, writer);
try writer.print("mutable {}\n", .{0x01 == try std.leb.readUleb128(u32, reader)}); try writer.print("mutable {}\n", .{0x01 == try reader.takeLeb128(u32)});
}, },
.table => { .table => {
_ = try parseDumpType(step, std.wasm.RefType, reader, writer); _ = try parseDumpType(step, std.wasm.RefType, &reader, writer);
try parseDumpLimits(reader, writer); try parseDumpLimits(&reader, writer);
}, },
} }
} }
@ -2529,41 +2519,41 @@ const WasmDumper = struct {
.function => { .function => {
var i: u32 = 0; var i: u32 = 0;
while (i < entries) : (i += 1) { while (i < entries) : (i += 1) {
try writer.print("index {d}\n", .{try std.leb.readUleb128(u32, reader)}); try writer.print("index {d}\n", .{try reader.takeLeb128(u32)});
} }
}, },
.table => { .table => {
var i: u32 = 0; var i: u32 = 0;
while (i < entries) : (i += 1) { while (i < entries) : (i += 1) {
_ = try parseDumpType(step, std.wasm.RefType, reader, writer); _ = try parseDumpType(step, std.wasm.RefType, &reader, writer);
try parseDumpLimits(reader, writer); try parseDumpLimits(&reader, writer);
} }
}, },
.memory => { .memory => {
var i: u32 = 0; var i: u32 = 0;
while (i < entries) : (i += 1) { while (i < entries) : (i += 1) {
try parseDumpLimits(reader, writer); try parseDumpLimits(&reader, writer);
} }
}, },
.global => { .global => {
var i: u32 = 0; var i: u32 = 0;
while (i < entries) : (i += 1) { while (i < entries) : (i += 1) {
_ = try parseDumpType(step, std.wasm.Valtype, reader, writer); _ = try parseDumpType(step, std.wasm.Valtype, &reader, writer);
try writer.print("mutable {}\n", .{0x01 == try std.leb.readUleb128(u1, reader)}); try writer.print("mutable {}\n", .{0x01 == try reader.takeLeb128(u1)});
try parseDumpInit(step, reader, writer); try parseDumpInit(step, &reader, writer);
} }
}, },
.@"export" => { .@"export" => {
var i: u32 = 0; var i: u32 = 0;
while (i < entries) : (i += 1) { while (i < entries) : (i += 1) {
const name_len = try std.leb.readUleb128(u32, reader); const name_len = try reader.takeLeb128(u32);
const name = data[fbs.pos..][0..name_len]; const name = data[reader.seek..][0..name_len];
fbs.pos += name_len; reader.seek += name_len;
const kind_byte = try std.leb.readUleb128(u8, reader); const kind_byte = try reader.takeLeb128(u8);
const kind = std.enums.fromInt(std.wasm.ExternalKind, kind_byte) orelse { const kind = std.enums.fromInt(std.wasm.ExternalKind, kind_byte) orelse {
return step.fail("invalid export kind value '{d}'", .{kind_byte}); return step.fail("invalid export kind value '{d}'", .{kind_byte});
}; };
const index = try std.leb.readUleb128(u32, reader); const index = try reader.takeLeb128(u32);
try writer.print( try writer.print(
\\name {s} \\name {s}
\\kind {s} \\kind {s}
@ -2575,14 +2565,14 @@ const WasmDumper = struct {
.element => { .element => {
var i: u32 = 0; var i: u32 = 0;
while (i < entries) : (i += 1) { while (i < entries) : (i += 1) {
try writer.print("table index {d}\n", .{try std.leb.readUleb128(u32, reader)}); try writer.print("table index {d}\n", .{try reader.takeLeb128(u32)});
try parseDumpInit(step, reader, writer); try parseDumpInit(step, &reader, writer);
const function_indexes = try std.leb.readUleb128(u32, reader); const function_indexes = try reader.takeLeb128(u32);
var function_index: u32 = 0; var function_index: u32 = 0;
try writer.print("indexes {d}\n", .{function_indexes}); try writer.print("indexes {d}\n", .{function_indexes});
while (function_index < function_indexes) : (function_index += 1) { while (function_index < function_indexes) : (function_index += 1) {
try writer.print("index {d}\n", .{try std.leb.readUleb128(u32, reader)}); try writer.print("index {d}\n", .{try reader.takeLeb128(u32)});
} }
} }
}, },
@ -2590,27 +2580,27 @@ const WasmDumper = struct {
.data => { .data => {
var i: u32 = 0; var i: u32 = 0;
while (i < entries) : (i += 1) { while (i < entries) : (i += 1) {
const flags = try std.leb.readUleb128(u32, reader); const flags = try reader.takeLeb128(u32);
const index = if (flags & 0x02 != 0) const index = if (flags & 0x02 != 0)
try std.leb.readUleb128(u32, reader) try reader.takeLeb128(u32)
else else
0; 0;
try writer.print("memory index 0x{x}\n", .{index}); try writer.print("memory index 0x{x}\n", .{index});
if (flags == 0) { if (flags == 0) {
try parseDumpInit(step, reader, writer); try parseDumpInit(step, &reader, writer);
} }
const size = try std.leb.readUleb128(u32, reader); const size = try reader.takeLeb128(u32);
try writer.print("size {d}\n", .{size}); try writer.print("size {d}\n", .{size});
try reader.skipBytes(size, .{}); // we do not care about the content of the segments try reader.discardAll(size); // we do not care about the content of the segments
} }
}, },
else => unreachable, else => unreachable,
} }
} }
fn parseDumpType(step: *Step, comptime E: type, reader: anytype, writer: anytype) !E { fn parseDumpType(step: *Step, comptime E: type, reader: *std.Io.Reader, writer: *std.Io.Writer) !E {
const byte = try reader.readByte(); const byte = try reader.takeByte();
const tag = std.enums.fromInt(E, byte) orelse { const tag = std.enums.fromInt(E, byte) orelse {
return step.fail("invalid wasm type value '{d}'", .{byte}); return step.fail("invalid wasm type value '{d}'", .{byte});
}; };
@ -2619,43 +2609,43 @@ const WasmDumper = struct {
} }
fn parseDumpLimits(reader: anytype, writer: anytype) !void { fn parseDumpLimits(reader: anytype, writer: anytype) !void {
const flags = try std.leb.readUleb128(u8, reader); const flags = try reader.takeLeb128(u8);
const min = try std.leb.readUleb128(u32, reader); const min = try reader.takeLeb128(u32);
try writer.print("min {x}\n", .{min}); try writer.print("min {x}\n", .{min});
if (flags != 0) { if (flags != 0) {
try writer.print("max {x}\n", .{try std.leb.readUleb128(u32, reader)}); try writer.print("max {x}\n", .{try reader.takeLeb128(u32)});
} }
} }
fn parseDumpInit(step: *Step, reader: anytype, writer: anytype) !void { fn parseDumpInit(step: *Step, reader: *std.Io.Reader, writer: *std.Io.Writer) !void {
const byte = try reader.readByte(); const byte = try reader.takeByte();
const opcode = std.enums.fromInt(std.wasm.Opcode, byte) orelse { const opcode = std.enums.fromInt(std.wasm.Opcode, byte) orelse {
return step.fail("invalid wasm opcode '{d}'", .{byte}); return step.fail("invalid wasm opcode '{d}'", .{byte});
}; };
switch (opcode) { switch (opcode) {
.i32_const => try writer.print("i32.const {x}\n", .{try std.leb.readIleb128(i32, reader)}), .i32_const => try writer.print("i32.const {x}\n", .{try std.leb.readIleb128(i32)}),
.i64_const => try writer.print("i64.const {x}\n", .{try std.leb.readIleb128(i64, reader)}), .i64_const => try writer.print("i64.const {x}\n", .{try std.leb.readIleb128(i64)}),
.f32_const => try writer.print("f32.const {x}\n", .{@as(f32, @bitCast(try reader.readInt(u32, .little)))}), .f32_const => try writer.print("f32.const {x}\n", .{@as(f32, @bitCast(try reader.readInt(u32, .little)))}),
.f64_const => try writer.print("f64.const {x}\n", .{@as(f64, @bitCast(try reader.readInt(u64, .little)))}), .f64_const => try writer.print("f64.const {x}\n", .{@as(f64, @bitCast(try reader.readInt(u64, .little)))}),
.global_get => try writer.print("global.get {x}\n", .{try std.leb.readUleb128(u32, reader)}), .global_get => try writer.print("global.get {x}\n", .{try reader.takeLeb128(u32)}),
else => unreachable, else => unreachable,
} }
const end_opcode = try std.leb.readUleb128(u8, reader); const end_opcode = try reader.takeLeb128(u8);
if (end_opcode != @intFromEnum(std.wasm.Opcode.end)) { if (end_opcode != @intFromEnum(std.wasm.Opcode.end)) {
return step.fail("expected 'end' opcode in init expression", .{}); return step.fail("expected 'end' opcode in init expression", .{});
} }
} }
/// https://webassembly.github.io/spec/core/appendix/custom.html /// https://webassembly.github.io/spec/core/appendix/custom.html
fn parseDumpNames(step: *Step, reader: anytype, writer: anytype, data: []const u8) !void { fn parseDumpNames(step: *Step, reader: *std.Io.Reader, writer: *std.Io.Writer, data: []const u8) !void {
while (reader.context.pos < data.len) { while (reader.context.pos < data.len) {
switch (try parseDumpType(step, std.wasm.NameSubsection, reader, writer)) { switch (try parseDumpType(step, std.wasm.NameSubsection, reader, writer)) {
// The module name subsection ... consists of a single name // The module name subsection ... consists of a single name
// that is assigned to the module itself. // that is assigned to the module itself.
.module => { .module => {
const size = try std.leb.readUleb128(u32, reader); const size = try reader.takeLeb128(u32);
const name_len = try std.leb.readUleb128(u32, reader); const name_len = try reader.takeLeb128(u32);
if (size != name_len + 1) return error.BadSubsectionSize; if (size != name_len + 1) return error.BadSubsectionSize;
if (reader.context.pos + name_len > data.len) return error.UnexpectedEndOfStream; if (reader.context.pos + name_len > data.len) return error.UnexpectedEndOfStream;
try writer.print("name {s}\n", .{data[reader.context.pos..][0..name_len]}); try writer.print("name {s}\n", .{data[reader.context.pos..][0..name_len]});
@ -2665,16 +2655,16 @@ const WasmDumper = struct {
// The function name subsection ... consists of a name map // The function name subsection ... consists of a name map
// assigning function names to function indices. // assigning function names to function indices.
.function, .global, .data_segment => { .function, .global, .data_segment => {
const size = try std.leb.readUleb128(u32, reader); const size = try reader.takeLeb128(u32);
const entries = try std.leb.readUleb128(u32, reader); const entries = try reader.takeLeb128(u32);
try writer.print( try writer.print(
\\size {d} \\size {d}
\\names {d} \\names {d}
\\ \\
, .{ size, entries }); , .{ size, entries });
for (0..entries) |_| { for (0..entries) |_| {
const index = try std.leb.readUleb128(u32, reader); const index = try reader.takeLeb128(u32);
const name_len = try std.leb.readUleb128(u32, reader); const name_len = try reader.takeLeb128(u32);
if (reader.context.pos + name_len > data.len) return error.UnexpectedEndOfStream; if (reader.context.pos + name_len > data.len) return error.UnexpectedEndOfStream;
const name = data[reader.context.pos..][0..name_len]; const name = data[reader.context.pos..][0..name_len];
reader.context.pos += name.len; reader.context.pos += name.len;
@ -2699,16 +2689,16 @@ const WasmDumper = struct {
} }
} }
fn parseDumpProducers(reader: anytype, writer: anytype, data: []const u8) !void { fn parseDumpProducers(reader: *std.Io.Reader, writer: *std.Io.Writer, data: []const u8) !void {
const field_count = try std.leb.readUleb128(u32, reader); const field_count = try reader.takeLeb128(u32);
try writer.print("fields {d}\n", .{field_count}); try writer.print("fields {d}\n", .{field_count});
var current_field: u32 = 0; var current_field: u32 = 0;
while (current_field < field_count) : (current_field += 1) { while (current_field < field_count) : (current_field += 1) {
const field_name_length = try std.leb.readUleb128(u32, reader); const field_name_length = try reader.takeLeb128(u32);
const field_name = data[reader.context.pos..][0..field_name_length]; const field_name = data[reader.context.pos..][0..field_name_length];
reader.context.pos += field_name_length; reader.context.pos += field_name_length;
const value_count = try std.leb.readUleb128(u32, reader); const value_count = try reader.takeLeb128(u32);
try writer.print( try writer.print(
\\field_name {s} \\field_name {s}
\\values {d} \\values {d}
@ -2716,11 +2706,11 @@ const WasmDumper = struct {
try writer.writeByte('\n'); try writer.writeByte('\n');
var current_value: u32 = 0; var current_value: u32 = 0;
while (current_value < value_count) : (current_value += 1) { while (current_value < value_count) : (current_value += 1) {
const value_length = try std.leb.readUleb128(u32, reader); const value_length = try reader.takeLeb128(u32);
const value = data[reader.context.pos..][0..value_length]; const value = data[reader.context.pos..][0..value_length];
reader.context.pos += value_length; reader.context.pos += value_length;
const version_length = try std.leb.readUleb128(u32, reader); const version_length = try reader.takeLeb128(u32);
const version = data[reader.context.pos..][0..version_length]; const version = data[reader.context.pos..][0..version_length];
reader.context.pos += version_length; reader.context.pos += version_length;
@ -2733,14 +2723,14 @@ const WasmDumper = struct {
} }
} }
fn parseDumpFeatures(reader: anytype, writer: anytype, data: []const u8) !void { fn parseDumpFeatures(reader: *std.Io.Reader, writer: *std.Io.Writer, data: []const u8) !void {
const feature_count = try std.leb.readUleb128(u32, reader); const feature_count = try reader.takeLeb128(u32);
try writer.print("features {d}\n", .{feature_count}); try writer.print("features {d}\n", .{feature_count});
var index: u32 = 0; var index: u32 = 0;
while (index < feature_count) : (index += 1) { while (index < feature_count) : (index += 1) {
const prefix_byte = try std.leb.readUleb128(u8, reader); const prefix_byte = try reader.takeLeb128(u8);
const name_length = try std.leb.readUleb128(u32, reader); const name_length = try reader.takeLeb128(u32);
const feature_name = data[reader.context.pos..][0..name_length]; const feature_name = data[reader.context.pos..][0..name_length];
reader.context.pos += name_length; reader.context.pos += name_length;

View File

@ -2021,7 +2021,7 @@ fn checkCompileErrors(compile: *Compile) !void {
const arena = compile.step.owner.allocator; const arena = compile.step.owner.allocator;
const actual_errors = ae: { const actual_errors = ae: {
var aw: std.io.Writer.Allocating = .init(arena); var aw: std.Io.Writer.Allocating = .init(arena);
defer aw.deinit(); defer aw.deinit();
try actual_eb.renderToWriter(.{ try actual_eb.renderToWriter(.{
.ttyconf = .no_color, .ttyconf = .no_color,

View File

@ -2,7 +2,7 @@ const std = @import("std");
const ConfigHeader = @This(); const ConfigHeader = @This();
const Step = std.Build.Step; const Step = std.Build.Step;
const Allocator = std.mem.Allocator; const Allocator = std.mem.Allocator;
const Writer = std.io.Writer; const Writer = std.Io.Writer;
pub const Style = union(enum) { pub const Style = union(enum) {
/// A configure format supported by autotools that uses `#undef foo` to /// A configure format supported by autotools that uses `#undef foo` to
@ -196,7 +196,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
man.hash.addBytes(config_header.include_path); man.hash.addBytes(config_header.include_path);
man.hash.addOptionalBytes(config_header.include_guard_override); man.hash.addOptionalBytes(config_header.include_guard_override);
var aw: std.io.Writer.Allocating = .init(gpa); var aw: Writer.Allocating = .init(gpa);
defer aw.deinit(); defer aw.deinit();
const bw = &aw.writer; const bw = &aw.writer;
@ -329,7 +329,7 @@ fn render_autoconf_undef(
fn render_autoconf_at( fn render_autoconf_at(
step: *Step, step: *Step,
contents: []const u8, contents: []const u8,
aw: *std.io.Writer.Allocating, aw: *Writer.Allocating,
values: std.StringArrayHashMap(Value), values: std.StringArrayHashMap(Value),
src_path: []const u8, src_path: []const u8,
) !void { ) !void {
@ -753,7 +753,7 @@ fn testReplaceVariablesAutoconfAt(
expected: []const u8, expected: []const u8,
values: std.StringArrayHashMap(Value), values: std.StringArrayHashMap(Value),
) !void { ) !void {
var aw: std.io.Writer.Allocating = .init(allocator); var aw: Writer.Allocating = .init(allocator);
defer aw.deinit(); defer aw.deinit();
const used = try allocator.alloc(bool, values.count()); const used = try allocator.alloc(bool, values.count());

View File

@ -9,7 +9,6 @@ const InstallDir = std.Build.InstallDir;
const Step = std.Build.Step; const Step = std.Build.Step;
const elf = std.elf; const elf = std.elf;
const fs = std.fs; const fs = std.fs;
const io = std.io;
const sort = std.sort; const sort = std.sort;
pub const base_id: Step.Id = .objcopy; pub const base_id: Step.Id = .objcopy;

View File

@ -3,7 +3,7 @@ thread_pool: *std.Thread.Pool,
graph: *const Build.Graph, graph: *const Build.Graph,
all_steps: []const *Build.Step, all_steps: []const *Build.Step,
listen_address: std.net.Address, listen_address: std.net.Address,
ttyconf: std.io.tty.Config, ttyconf: std.Io.tty.Config,
root_prog_node: std.Progress.Node, root_prog_node: std.Progress.Node,
watch: bool, watch: bool,
@ -53,7 +53,7 @@ pub const Options = struct {
thread_pool: *std.Thread.Pool, thread_pool: *std.Thread.Pool,
graph: *const std.Build.Graph, graph: *const std.Build.Graph,
all_steps: []const *Build.Step, all_steps: []const *Build.Step,
ttyconf: std.io.tty.Config, ttyconf: std.Io.tty.Config,
root_prog_node: std.Progress.Node, root_prog_node: std.Progress.Node,
watch: bool, watch: bool,
listen_address: std.net.Address, listen_address: std.net.Address,
@ -557,7 +557,7 @@ fn buildClientWasm(ws: *WebServer, arena: Allocator, optimize: std.builtin.Optim
child.stderr_behavior = .Pipe; child.stderr_behavior = .Pipe;
try child.spawn(); try child.spawn();
var poller = std.io.poll(gpa, enum { stdout, stderr }, .{ var poller = std.Io.poll(gpa, enum { stdout, stderr }, .{
.stdout = child.stdout.?, .stdout = child.stdout.?,
.stderr = child.stderr.?, .stderr = child.stderr.?,
}); });

View File

@ -82,202 +82,6 @@ pub const Limit = enum(usize) {
pub const Reader = @import("Io/Reader.zig"); pub const Reader = @import("Io/Reader.zig");
pub const Writer = @import("Io/Writer.zig"); pub const Writer = @import("Io/Writer.zig");
/// Deprecated in favor of `Reader`.
pub fn GenericReader(
comptime Context: type,
comptime ReadError: type,
/// Returns the number of bytes read. It may be less than buffer.len.
/// If the number of bytes read is 0, it means end of stream.
/// End of stream is not an error condition.
comptime readFn: fn (context: Context, buffer: []u8) ReadError!usize,
) type {
return struct {
context: Context,
pub const Error = ReadError;
pub const NoEofError = ReadError || error{
EndOfStream,
};
pub inline fn read(self: Self, buffer: []u8) Error!usize {
return readFn(self.context, buffer);
}
pub inline fn readAll(self: Self, buffer: []u8) Error!usize {
return @errorCast(self.any().readAll(buffer));
}
pub inline fn readAtLeast(self: Self, buffer: []u8, len: usize) Error!usize {
return @errorCast(self.any().readAtLeast(buffer, len));
}
pub inline fn readNoEof(self: Self, buf: []u8) NoEofError!void {
return @errorCast(self.any().readNoEof(buf));
}
pub inline fn readAllArrayList(
self: Self,
array_list: *std.array_list.Managed(u8),
max_append_size: usize,
) (error{StreamTooLong} || Allocator.Error || Error)!void {
return @errorCast(self.any().readAllArrayList(array_list, max_append_size));
}
pub inline fn readAllArrayListAligned(
self: Self,
comptime alignment: ?Alignment,
array_list: *std.array_list.AlignedManaged(u8, alignment),
max_append_size: usize,
) (error{StreamTooLong} || Allocator.Error || Error)!void {
return @errorCast(self.any().readAllArrayListAligned(
alignment,
array_list,
max_append_size,
));
}
pub inline fn readAllAlloc(
self: Self,
allocator: Allocator,
max_size: usize,
) (Error || Allocator.Error || error{StreamTooLong})![]u8 {
return @errorCast(self.any().readAllAlloc(allocator, max_size));
}
pub inline fn streamUntilDelimiter(
self: Self,
writer: anytype,
delimiter: u8,
optional_max_size: ?usize,
) (NoEofError || error{StreamTooLong} || @TypeOf(writer).Error)!void {
return @errorCast(self.any().streamUntilDelimiter(
writer,
delimiter,
optional_max_size,
));
}
pub inline fn skipUntilDelimiterOrEof(self: Self, delimiter: u8) Error!void {
return @errorCast(self.any().skipUntilDelimiterOrEof(delimiter));
}
pub inline fn readByte(self: Self) NoEofError!u8 {
return @errorCast(self.any().readByte());
}
pub inline fn readByteSigned(self: Self) NoEofError!i8 {
return @errorCast(self.any().readByteSigned());
}
pub inline fn readBytesNoEof(
self: Self,
comptime num_bytes: usize,
) NoEofError![num_bytes]u8 {
return @errorCast(self.any().readBytesNoEof(num_bytes));
}
pub inline fn readInt(self: Self, comptime T: type, endian: std.builtin.Endian) NoEofError!T {
return @errorCast(self.any().readInt(T, endian));
}
pub inline fn readVarInt(
self: Self,
comptime ReturnType: type,
endian: std.builtin.Endian,
size: usize,
) NoEofError!ReturnType {
return @errorCast(self.any().readVarInt(ReturnType, endian, size));
}
pub const SkipBytesOptions = AnyReader.SkipBytesOptions;
pub inline fn skipBytes(
self: Self,
num_bytes: u64,
comptime options: SkipBytesOptions,
) NoEofError!void {
return @errorCast(self.any().skipBytes(num_bytes, options));
}
pub inline fn isBytes(self: Self, slice: []const u8) NoEofError!bool {
return @errorCast(self.any().isBytes(slice));
}
pub inline fn readStruct(self: Self, comptime T: type) NoEofError!T {
return @errorCast(self.any().readStruct(T));
}
pub inline fn readStructEndian(self: Self, comptime T: type, endian: std.builtin.Endian) NoEofError!T {
return @errorCast(self.any().readStructEndian(T, endian));
}
pub const ReadEnumError = NoEofError || error{
/// An integer was read, but it did not match any of the tags in the supplied enum.
InvalidValue,
};
pub inline fn readEnum(
self: Self,
comptime Enum: type,
endian: std.builtin.Endian,
) ReadEnumError!Enum {
return @errorCast(self.any().readEnum(Enum, endian));
}
pub inline fn any(self: *const Self) AnyReader {
return .{
.context = @ptrCast(&self.context),
.readFn = typeErasedReadFn,
};
}
const Self = @This();
fn typeErasedReadFn(context: *const anyopaque, buffer: []u8) anyerror!usize {
const ptr: *const Context = @ptrCast(@alignCast(context));
return readFn(ptr.*, buffer);
}
/// Helper for bridging to the new `Reader` API while upgrading.
pub fn adaptToNewApi(self: *const Self, buffer: []u8) Adapter {
return .{
.derp_reader = self.*,
.new_interface = .{
.buffer = buffer,
.vtable = &.{ .stream = Adapter.stream },
.seek = 0,
.end = 0,
},
};
}
pub const Adapter = struct {
derp_reader: Self,
new_interface: Reader,
err: ?Error = null,
fn stream(r: *Reader, w: *Writer, limit: Limit) Reader.StreamError!usize {
const a: *@This() = @alignCast(@fieldParentPtr("new_interface", r));
const buf = limit.slice(try w.writableSliceGreedy(1));
const n = a.derp_reader.read(buf) catch |err| {
a.err = err;
return error.ReadFailed;
};
if (n == 0) return error.EndOfStream;
w.advance(n);
return n;
}
};
};
}
/// Deprecated in favor of `Reader`.
pub const AnyReader = @import("Io/DeprecatedReader.zig");
/// Deprecated in favor of `Reader`.
pub const FixedBufferStream = @import("Io/fixed_buffer_stream.zig").FixedBufferStream;
/// Deprecated in favor of `Reader`.
pub const fixedBufferStream = @import("Io/fixed_buffer_stream.zig").fixedBufferStream;
pub const tty = @import("Io/tty.zig"); pub const tty = @import("Io/tty.zig");
pub fn poll( pub fn poll(
@ -746,7 +550,6 @@ pub fn PollFiles(comptime StreamEnum: type) type {
test { test {
_ = Reader; _ = Reader;
_ = Writer; _ = Writer;
_ = FixedBufferStream;
_ = tty; _ = tty;
_ = @import("Io/test.zig"); _ = @import("Io/test.zig");
} }

View File

@ -1,292 +0,0 @@
context: *const anyopaque,
readFn: *const fn (context: *const anyopaque, buffer: []u8) anyerror!usize,
pub const Error = anyerror;
/// Returns the number of bytes read. It may be less than buffer.len.
/// If the number of bytes read is 0, it means end of stream.
/// End of stream is not an error condition.
pub fn read(self: Self, buffer: []u8) anyerror!usize {
return self.readFn(self.context, buffer);
}
/// Returns the number of bytes read. If the number read is smaller than `buffer.len`, it
/// means the stream reached the end. Reaching the end of a stream is not an error
/// condition.
pub fn readAll(self: Self, buffer: []u8) anyerror!usize {
return readAtLeast(self, buffer, buffer.len);
}
/// Returns the number of bytes read, calling the underlying read
/// function the minimal number of times until the buffer has at least
/// `len` bytes filled. If the number read is less than `len` it means
/// the stream reached the end. Reaching the end of the stream is not
/// an error condition.
pub fn readAtLeast(self: Self, buffer: []u8, len: usize) anyerror!usize {
assert(len <= buffer.len);
var index: usize = 0;
while (index < len) {
const amt = try self.read(buffer[index..]);
if (amt == 0) break;
index += amt;
}
return index;
}
/// If the number read would be smaller than `buf.len`, `error.EndOfStream` is returned instead.
pub fn readNoEof(self: Self, buf: []u8) anyerror!void {
const amt_read = try self.readAll(buf);
if (amt_read < buf.len) return error.EndOfStream;
}
/// Appends to the `std.array_list.Managed` contents by reading from the stream
/// until end of stream is found.
/// If the number of bytes appended would exceed `max_append_size`,
/// `error.StreamTooLong` is returned
/// and the `std.array_list.Managed` has exactly `max_append_size` bytes appended.
pub fn readAllArrayList(
self: Self,
array_list: *std.array_list.Managed(u8),
max_append_size: usize,
) anyerror!void {
return self.readAllArrayListAligned(null, array_list, max_append_size);
}
pub fn readAllArrayListAligned(
self: Self,
comptime alignment: ?Alignment,
array_list: *std.array_list.AlignedManaged(u8, alignment),
max_append_size: usize,
) anyerror!void {
try array_list.ensureTotalCapacity(@min(max_append_size, 4096));
const original_len = array_list.items.len;
var start_index: usize = original_len;
while (true) {
array_list.expandToCapacity();
const dest_slice = array_list.items[start_index..];
const bytes_read = try self.readAll(dest_slice);
start_index += bytes_read;
if (start_index - original_len > max_append_size) {
array_list.shrinkAndFree(original_len + max_append_size);
return error.StreamTooLong;
}
if (bytes_read != dest_slice.len) {
array_list.shrinkAndFree(start_index);
return;
}
// This will trigger ArrayList to expand superlinearly at whatever its growth rate is.
try array_list.ensureTotalCapacity(start_index + 1);
}
}
/// Allocates enough memory to hold all the contents of the stream. If the allocated
/// memory would be greater than `max_size`, returns `error.StreamTooLong`.
/// Caller owns returned memory.
/// If this function returns an error, the contents from the stream read so far are lost.
pub fn readAllAlloc(self: Self, allocator: mem.Allocator, max_size: usize) anyerror![]u8 {
var array_list = std.array_list.Managed(u8).init(allocator);
defer array_list.deinit();
try self.readAllArrayList(&array_list, max_size);
return try array_list.toOwnedSlice();
}
/// Appends to the `writer` contents by reading from the stream until `delimiter` is found.
/// Does not write the delimiter itself.
/// If `optional_max_size` is not null and amount of written bytes exceeds `optional_max_size`,
/// returns `error.StreamTooLong` and finishes appending.
/// If `optional_max_size` is null, appending is unbounded.
pub fn streamUntilDelimiter(
self: Self,
writer: anytype,
delimiter: u8,
optional_max_size: ?usize,
) anyerror!void {
if (optional_max_size) |max_size| {
for (0..max_size) |_| {
const byte: u8 = try self.readByte();
if (byte == delimiter) return;
try writer.writeByte(byte);
}
return error.StreamTooLong;
} else {
while (true) {
const byte: u8 = try self.readByte();
if (byte == delimiter) return;
try writer.writeByte(byte);
}
// Can not throw `error.StreamTooLong` since there are no boundary.
}
}
/// Reads from the stream until specified byte is found, discarding all data,
/// including the delimiter.
/// If end-of-stream is found, this function succeeds.
pub fn skipUntilDelimiterOrEof(self: Self, delimiter: u8) anyerror!void {
while (true) {
const byte = self.readByte() catch |err| switch (err) {
error.EndOfStream => return,
else => |e| return e,
};
if (byte == delimiter) return;
}
}
/// Reads 1 byte from the stream or returns `error.EndOfStream`.
pub fn readByte(self: Self) anyerror!u8 {
var result: [1]u8 = undefined;
const amt_read = try self.read(result[0..]);
if (amt_read < 1) return error.EndOfStream;
return result[0];
}
/// Same as `readByte` except the returned byte is signed.
pub fn readByteSigned(self: Self) anyerror!i8 {
return @as(i8, @bitCast(try self.readByte()));
}
/// Reads exactly `num_bytes` bytes and returns as an array.
/// `num_bytes` must be comptime-known
pub fn readBytesNoEof(self: Self, comptime num_bytes: usize) anyerror![num_bytes]u8 {
var bytes: [num_bytes]u8 = undefined;
try self.readNoEof(&bytes);
return bytes;
}
pub inline fn readInt(self: Self, comptime T: type, endian: std.builtin.Endian) anyerror!T {
const bytes = try self.readBytesNoEof(@divExact(@typeInfo(T).int.bits, 8));
return mem.readInt(T, &bytes, endian);
}
pub fn readVarInt(
self: Self,
comptime ReturnType: type,
endian: std.builtin.Endian,
size: usize,
) anyerror!ReturnType {
assert(size <= @sizeOf(ReturnType));
var bytes_buf: [@sizeOf(ReturnType)]u8 = undefined;
const bytes = bytes_buf[0..size];
try self.readNoEof(bytes);
return mem.readVarInt(ReturnType, bytes, endian);
}
/// Optional parameters for `skipBytes`
pub const SkipBytesOptions = struct {
buf_size: usize = 512,
};
// `num_bytes` is a `u64` to match `off_t`
/// Reads `num_bytes` bytes from the stream and discards them
pub fn skipBytes(self: Self, num_bytes: u64, comptime options: SkipBytesOptions) anyerror!void {
var buf: [options.buf_size]u8 = undefined;
var remaining = num_bytes;
while (remaining > 0) {
const amt = @min(remaining, options.buf_size);
try self.readNoEof(buf[0..amt]);
remaining -= amt;
}
}
/// Reads `slice.len` bytes from the stream and returns if they are the same as the passed slice
pub fn isBytes(self: Self, slice: []const u8) anyerror!bool {
var i: usize = 0;
var matches = true;
while (i < slice.len) : (i += 1) {
if (slice[i] != try self.readByte()) {
matches = false;
}
}
return matches;
}
pub fn readStruct(self: Self, comptime T: type) anyerror!T {
// Only extern and packed structs have defined in-memory layout.
comptime assert(@typeInfo(T).@"struct".layout != .auto);
var res: [1]T = undefined;
try self.readNoEof(mem.sliceAsBytes(res[0..]));
return res[0];
}
pub fn readStructEndian(self: Self, comptime T: type, endian: std.builtin.Endian) anyerror!T {
var res = try self.readStruct(T);
if (native_endian != endian) {
mem.byteSwapAllFields(T, &res);
}
return res;
}
/// Reads an integer with the same size as the given enum's tag type. If the integer matches
/// an enum tag, casts the integer to the enum tag and returns it. Otherwise, returns an `error.InvalidValue`.
/// TODO optimization taking advantage of most fields being in order
pub fn readEnum(self: Self, comptime Enum: type, endian: std.builtin.Endian) anyerror!Enum {
const E = error{
/// An integer was read, but it did not match any of the tags in the supplied enum.
InvalidValue,
};
const type_info = @typeInfo(Enum).@"enum";
const tag = try self.readInt(type_info.tag_type, endian);
inline for (std.meta.fields(Enum)) |field| {
if (tag == field.value) {
return @field(Enum, field.name);
}
}
return E.InvalidValue;
}
/// Reads the stream until the end, ignoring all the data.
/// Returns the number of bytes discarded.
pub fn discard(self: Self) anyerror!u64 {
var trash: [4096]u8 = undefined;
var index: u64 = 0;
while (true) {
const n = try self.read(&trash);
if (n == 0) return index;
index += n;
}
}
/// Helper for bridging to the new `Reader` API while upgrading.
pub fn adaptToNewApi(self: *const Self, buffer: []u8) Adapter {
return .{
.derp_reader = self.*,
.new_interface = .{
.buffer = buffer,
.vtable = &.{ .stream = Adapter.stream },
.seek = 0,
.end = 0,
},
};
}
pub const Adapter = struct {
derp_reader: Self,
new_interface: std.io.Reader,
err: ?Error = null,
fn stream(r: *std.io.Reader, w: *std.io.Writer, limit: std.io.Limit) std.io.Reader.StreamError!usize {
const a: *@This() = @alignCast(@fieldParentPtr("new_interface", r));
const buf = limit.slice(try w.writableSliceGreedy(1));
const n = a.derp_reader.read(buf) catch |err| {
a.err = err;
return error.ReadFailed;
};
if (n == 0) return error.EndOfStream;
w.advance(n);
return n;
}
};
const std = @import("../std.zig");
const Self = @This();
const math = std.math;
const assert = std.debug.assert;
const mem = std.mem;
const testing = std.testing;
const native_endian = @import("builtin").target.cpu.arch.endian();
const Alignment = std.mem.Alignment;

View File

@ -4,12 +4,12 @@ const builtin = @import("builtin");
const native_endian = builtin.target.cpu.arch.endian(); const native_endian = builtin.target.cpu.arch.endian();
const std = @import("../std.zig"); const std = @import("../std.zig");
const Writer = std.io.Writer; const Writer = std.Io.Writer;
const Limit = std.Io.Limit;
const assert = std.debug.assert; const assert = std.debug.assert;
const testing = std.testing; const testing = std.testing;
const Allocator = std.mem.Allocator; const Allocator = std.mem.Allocator;
const ArrayList = std.ArrayList; const ArrayList = std.ArrayList;
const Limit = std.io.Limit;
pub const Limited = @import("Reader/Limited.zig"); pub const Limited = @import("Reader/Limited.zig");
@ -1592,7 +1592,7 @@ test readVec {
test "expected error.EndOfStream" { test "expected error.EndOfStream" {
// Unit test inspired by https://github.com/ziglang/zig/issues/17733 // Unit test inspired by https://github.com/ziglang/zig/issues/17733
var buffer: [3]u8 = undefined; var buffer: [3]u8 = undefined;
var r: std.io.Reader = .fixed(&buffer); var r: std.Io.Reader = .fixed(&buffer);
r.end = 0; // capacity 3, but empty r.end = 0; // capacity 3, but empty
try std.testing.expectError(error.EndOfStream, r.takeEnum(enum(u8) { a, b }, .little)); try std.testing.expectError(error.EndOfStream, r.takeEnum(enum(u8) { a, b }, .little));
try std.testing.expectError(error.EndOfStream, r.take(3)); try std.testing.expectError(error.EndOfStream, r.take(3));
@ -1647,15 +1647,6 @@ fn failingDiscard(r: *Reader, limit: Limit) Error!usize {
return error.ReadFailed; return error.ReadFailed;
} }
pub fn adaptToOldInterface(r: *Reader) std.Io.AnyReader {
return .{ .context = r, .readFn = derpRead };
}
fn derpRead(context: *const anyopaque, buffer: []u8) anyerror!usize {
const r: *Reader = @ptrCast(@alignCast(@constCast(context)));
return r.readSliceShort(buffer);
}
test "readAlloc when the backing reader provides one byte at a time" { test "readAlloc when the backing reader provides one byte at a time" {
const str = "This is a test"; const str = "This is a test";
var tiny_buffer: [1]u8 = undefined; var tiny_buffer: [1]u8 = undefined;
@ -1878,6 +1869,94 @@ pub fn writableVector(r: *Reader, buffer: [][]u8, data: []const []u8) Error!stru
return .{ i, n }; return .{ i, n };
} }
test "deserialize signed LEB128" {
// Truncated
try testing.expectError(error.EndOfStream, testLeb128(i64, "\x80"));
// Overflow
try testing.expectError(error.Overflow, testLeb128(i8, "\x80\x80\x40"));
try testing.expectError(error.Overflow, testLeb128(i16, "\x80\x80\x80\x40"));
try testing.expectError(error.Overflow, testLeb128(i32, "\x80\x80\x80\x80\x40"));
try testing.expectError(error.Overflow, testLeb128(i64, "\x80\x80\x80\x80\x80\x80\x80\x80\x80\x40"));
try testing.expectError(error.Overflow, testLeb128(i8, "\xff\x7e"));
try testing.expectError(error.Overflow, testLeb128(i32, "\x80\x80\x80\x80\x08"));
try testing.expectError(error.Overflow, testLeb128(i64, "\x80\x80\x80\x80\x80\x80\x80\x80\x80\x01"));
// Decode SLEB128
try testing.expect((try testLeb128(i64, "\x00")) == 0);
try testing.expect((try testLeb128(i64, "\x01")) == 1);
try testing.expect((try testLeb128(i64, "\x3f")) == 63);
try testing.expect((try testLeb128(i64, "\x40")) == -64);
try testing.expect((try testLeb128(i64, "\x41")) == -63);
try testing.expect((try testLeb128(i64, "\x7f")) == -1);
try testing.expect((try testLeb128(i64, "\x80\x01")) == 128);
try testing.expect((try testLeb128(i64, "\x81\x01")) == 129);
try testing.expect((try testLeb128(i64, "\xff\x7e")) == -129);
try testing.expect((try testLeb128(i64, "\x80\x7f")) == -128);
try testing.expect((try testLeb128(i64, "\x81\x7f")) == -127);
try testing.expect((try testLeb128(i64, "\xc0\x00")) == 64);
try testing.expect((try testLeb128(i64, "\xc7\x9f\x7f")) == -12345);
try testing.expect((try testLeb128(i8, "\xff\x7f")) == -1);
try testing.expect((try testLeb128(i16, "\xff\xff\x7f")) == -1);
try testing.expect((try testLeb128(i32, "\xff\xff\xff\xff\x7f")) == -1);
try testing.expect((try testLeb128(i32, "\x80\x80\x80\x80\x78")) == -0x80000000);
try testing.expect((try testLeb128(i64, "\x80\x80\x80\x80\x80\x80\x80\x80\x80\x7f")) == @as(i64, @bitCast(@as(u64, @intCast(0x8000000000000000)))));
try testing.expect((try testLeb128(i64, "\x80\x80\x80\x80\x80\x80\x80\x80\x40")) == -0x4000000000000000);
try testing.expect((try testLeb128(i64, "\x80\x80\x80\x80\x80\x80\x80\x80\x80\x7f")) == -0x8000000000000000);
// Decode unnormalized SLEB128 with extra padding bytes.
try testing.expect((try testLeb128(i64, "\x80\x00")) == 0);
try testing.expect((try testLeb128(i64, "\x80\x80\x00")) == 0);
try testing.expect((try testLeb128(i64, "\xff\x00")) == 0x7f);
try testing.expect((try testLeb128(i64, "\xff\x80\x00")) == 0x7f);
try testing.expect((try testLeb128(i64, "\x80\x81\x00")) == 0x80);
try testing.expect((try testLeb128(i64, "\x80\x81\x80\x00")) == 0x80);
}
test "deserialize unsigned LEB128" {
// Truncated
try testing.expectError(error.EndOfStream, testLeb128(u64, "\x80"));
try testing.expectError(error.EndOfStream, testLeb128(u16, "\x80\x80\x84"));
try testing.expectError(error.EndOfStream, testLeb128(u32, "\x80\x80\x80\x80\x90"));
// Overflow
try testing.expectError(error.Overflow, testLeb128(u8, "\x80\x02"));
try testing.expectError(error.Overflow, testLeb128(u8, "\x80\x80\x40"));
try testing.expectError(error.Overflow, testLeb128(u16, "\x80\x80\x80\x40"));
try testing.expectError(error.Overflow, testLeb128(u32, "\x80\x80\x80\x80\x40"));
try testing.expectError(error.Overflow, testLeb128(u64, "\x80\x80\x80\x80\x80\x80\x80\x80\x80\x40"));
// Decode ULEB128
try testing.expect((try testLeb128(u64, "\x00")) == 0);
try testing.expect((try testLeb128(u64, "\x01")) == 1);
try testing.expect((try testLeb128(u64, "\x3f")) == 63);
try testing.expect((try testLeb128(u64, "\x40")) == 64);
try testing.expect((try testLeb128(u64, "\x7f")) == 0x7f);
try testing.expect((try testLeb128(u64, "\x80\x01")) == 0x80);
try testing.expect((try testLeb128(u64, "\x81\x01")) == 0x81);
try testing.expect((try testLeb128(u64, "\x90\x01")) == 0x90);
try testing.expect((try testLeb128(u64, "\xff\x01")) == 0xff);
try testing.expect((try testLeb128(u64, "\x80\x02")) == 0x100);
try testing.expect((try testLeb128(u64, "\x81\x02")) == 0x101);
try testing.expect((try testLeb128(u64, "\x80\xc1\x80\x80\x10")) == 4294975616);
try testing.expect((try testLeb128(u64, "\x80\x80\x80\x80\x80\x80\x80\x80\x80\x01")) == 0x8000000000000000);
// Decode ULEB128 with extra padding bytes
try testing.expect((try testLeb128(u64, "\x80\x00")) == 0);
try testing.expect((try testLeb128(u64, "\x80\x80\x00")) == 0);
try testing.expect((try testLeb128(u64, "\xff\x00")) == 0x7f);
try testing.expect((try testLeb128(u64, "\xff\x80\x00")) == 0x7f);
try testing.expect((try testLeb128(u64, "\x80\x81\x00")) == 0x80);
try testing.expect((try testLeb128(u64, "\x80\x81\x80\x00")) == 0x80);
}
fn testLeb128(comptime T: type, encoded: []const u8) !T {
var reader: std.Io.Reader = .fixed(encoded);
const result = try reader.takeLeb128(T);
try testing.expect(reader.seek == reader.end);
return result;
}
test { test {
_ = Limited; _ = Limited;
} }

View File

@ -1,9 +1,9 @@
const Limited = @This(); const Limited = @This();
const std = @import("../../std.zig"); const std = @import("../../std.zig");
const Reader = std.io.Reader; const Reader = std.Io.Reader;
const Writer = std.io.Writer; const Writer = std.Io.Writer;
const Limit = std.io.Limit; const Limit = std.Io.Limit;
unlimited: *Reader, unlimited: *Reader,
remaining: Limit, remaining: Limit,

View File

@ -1,114 +0,0 @@
const std = @import("../std.zig");
const io = std.io;
const testing = std.testing;
const mem = std.mem;
const assert = std.debug.assert;
/// Deprecated in favor of `std.Io.Reader.fixed` and `std.Io.Writer.fixed`.
pub fn FixedBufferStream(comptime Buffer: type) type {
return struct {
/// `Buffer` is either a `[]u8` or `[]const u8`.
buffer: Buffer,
pos: usize,
pub const ReadError = error{};
pub const WriteError = error{NoSpaceLeft};
pub const SeekError = error{};
pub const GetSeekPosError = error{};
pub const Reader = io.GenericReader(*Self, ReadError, read);
const Self = @This();
pub fn reader(self: *Self) Reader {
return .{ .context = self };
}
pub fn read(self: *Self, dest: []u8) ReadError!usize {
const size = @min(dest.len, self.buffer.len - self.pos);
const end = self.pos + size;
@memcpy(dest[0..size], self.buffer[self.pos..end]);
self.pos = end;
return size;
}
pub fn seekTo(self: *Self, pos: u64) SeekError!void {
self.pos = @min(std.math.lossyCast(usize, pos), self.buffer.len);
}
pub fn seekBy(self: *Self, amt: i64) SeekError!void {
if (amt < 0) {
const abs_amt = @abs(amt);
const abs_amt_usize = std.math.cast(usize, abs_amt) orelse std.math.maxInt(usize);
if (abs_amt_usize > self.pos) {
self.pos = 0;
} else {
self.pos -= abs_amt_usize;
}
} else {
const amt_usize = std.math.cast(usize, amt) orelse std.math.maxInt(usize);
const new_pos = std.math.add(usize, self.pos, amt_usize) catch std.math.maxInt(usize);
self.pos = @min(self.buffer.len, new_pos);
}
}
pub fn getEndPos(self: *Self) GetSeekPosError!u64 {
return self.buffer.len;
}
pub fn getPos(self: *Self) GetSeekPosError!u64 {
return self.pos;
}
pub fn reset(self: *Self) void {
self.pos = 0;
}
};
}
pub fn fixedBufferStream(buffer: anytype) FixedBufferStream(Slice(@TypeOf(buffer))) {
return .{ .buffer = buffer, .pos = 0 };
}
fn Slice(comptime T: type) type {
switch (@typeInfo(T)) {
.pointer => |ptr_info| {
var new_ptr_info = ptr_info;
switch (ptr_info.size) {
.slice => {},
.one => switch (@typeInfo(ptr_info.child)) {
.array => |info| new_ptr_info.child = info.child,
else => @compileError("invalid type given to fixedBufferStream"),
},
else => @compileError("invalid type given to fixedBufferStream"),
}
new_ptr_info.size = .slice;
return @Type(.{ .pointer = new_ptr_info });
},
else => @compileError("invalid type given to fixedBufferStream"),
}
}
test "input" {
const bytes = [_]u8{ 1, 2, 3, 4, 5, 6, 7 };
var fbs = fixedBufferStream(&bytes);
var dest: [4]u8 = undefined;
var read = try fbs.reader().read(&dest);
try testing.expect(read == 4);
try testing.expect(mem.eql(u8, dest[0..4], bytes[0..4]));
read = try fbs.reader().read(&dest);
try testing.expect(read == 3);
try testing.expect(mem.eql(u8, dest[0..3], bytes[4..7]));
read = try fbs.reader().read(&dest);
try testing.expect(read == 0);
try fbs.seekTo((try fbs.getEndPos()) + 1);
read = try fbs.reader().read(&dest);
try testing.expect(read == 0);
}

View File

@ -1,5 +1,4 @@
const std = @import("std"); const std = @import("std");
const io = std.io;
const DefaultPrng = std.Random.DefaultPrng; const DefaultPrng = std.Random.DefaultPrng;
const expect = std.testing.expect; const expect = std.testing.expect;
const expectEqual = std.testing.expectEqual; const expectEqual = std.testing.expectEqual;
@ -122,24 +121,3 @@ test "updateTimes" {
try expect(stat_new.atime < stat_old.atime); try expect(stat_new.atime < stat_old.atime);
try expect(stat_new.mtime < stat_old.mtime); try expect(stat_new.mtime < stat_old.mtime);
} }
test "GenericReader methods can return error.EndOfStream" {
// https://github.com/ziglang/zig/issues/17733
var fbs = std.io.fixedBufferStream("");
try std.testing.expectError(
error.EndOfStream,
fbs.reader().readEnum(enum(u8) { a, b }, .little),
);
try std.testing.expectError(
error.EndOfStream,
fbs.reader().isBytes("foo"),
);
}
test "Adapted DeprecatedReader EndOfStream" {
var fbs: io.FixedBufferStream([]const u8) = .{ .buffer = &.{}, .pos = 0 };
const reader = fbs.reader();
var buf: [1]u8 = undefined;
var adapted = reader.adaptToNewApi(&buf);
try std.testing.expectError(error.EndOfStream, adapted.new_interface.takeByte());
}

View File

@ -76,9 +76,9 @@ pub const Config = union(enum) {
reset_attributes: u16, reset_attributes: u16,
}; };
pub const SetColorError = std.os.windows.SetConsoleTextAttributeError || std.io.Writer.Error; pub const SetColorError = std.os.windows.SetConsoleTextAttributeError || std.Io.Writer.Error;
pub fn setColor(conf: Config, w: *std.io.Writer, color: Color) SetColorError!void { pub fn setColor(conf: Config, w: *std.Io.Writer, color: Color) SetColorError!void {
nosuspend switch (conf) { nosuspend switch (conf) {
.no_color => return, .no_color => return,
.escape_codes => { .escape_codes => {

View File

@ -9,7 +9,7 @@ const Progress = @This();
const posix = std.posix; const posix = std.posix;
const is_big_endian = builtin.cpu.arch.endian() == .big; const is_big_endian = builtin.cpu.arch.endian() == .big;
const is_windows = builtin.os.tag == .windows; const is_windows = builtin.os.tag == .windows;
const Writer = std.io.Writer; const Writer = std.Io.Writer;
/// `null` if the current node (and its children) should /// `null` if the current node (and its children) should
/// not print on update() /// not print on update()

View File

@ -150,7 +150,7 @@ fn parseNum(text: []const u8) error{ InvalidVersion, Overflow }!usize {
}; };
} }
pub fn format(self: Version, w: *std.io.Writer) std.io.Writer.Error!void { pub fn format(self: Version, w: *std.Io.Writer) std.Io.Writer.Error!void {
try w.print("{d}.{d}.{d}", .{ self.major, self.minor, self.patch }); try w.print("{d}.{d}.{d}", .{ self.major, self.minor, self.patch });
if (self.pre) |pre| try w.print("-{s}", .{pre}); if (self.pre) |pre| try w.print("-{s}", .{pre});
if (self.build) |build| try w.print("+{s}", .{build}); if (self.build) |build| try w.print("+{s}", .{build});

View File

@ -308,7 +308,7 @@ pub const Os = struct {
/// This function is defined to serialize a Zig source code representation of this /// This function is defined to serialize a Zig source code representation of this
/// type, that, when parsed, will deserialize into the same data. /// type, that, when parsed, will deserialize into the same data.
pub fn format(wv: WindowsVersion, w: *std.io.Writer) std.io.Writer.Error!void { pub fn format(wv: WindowsVersion, w: *std.Io.Writer) std.Io.Writer.Error!void {
if (std.enums.tagName(WindowsVersion, wv)) |name| { if (std.enums.tagName(WindowsVersion, wv)) |name| {
var vecs: [2][]const u8 = .{ ".", name }; var vecs: [2][]const u8 = .{ ".", name };
return w.writeVecAll(&vecs); return w.writeVecAll(&vecs);

View File

@ -281,8 +281,10 @@ pub fn getName(self: Thread, buffer_ptr: *[max_name_len:0]u8) GetNameError!?[]co
const file = try std.fs.cwd().openFile(path, .{}); const file = try std.fs.cwd().openFile(path, .{});
defer file.close(); defer file.close();
const data_len = try file.deprecatedReader().readAll(buffer_ptr[0 .. max_name_len + 1]); var file_reader = file.readerStreaming(&.{});
const data_len = file_reader.readSliceShort(buffer_ptr[0 .. max_name_len + 1]) catch |err| switch (err) {
error.ReadFailed => return file_reader.err.?,
};
return if (data_len >= 1) buffer[0 .. data_len - 1] else null; return if (data_len >= 1) buffer[0 .. data_len - 1] else null;
}, },
.windows => { .windows => {

View File

@ -1038,14 +1038,14 @@ pub fn Aligned(comptime T: type, comptime alignment: ?mem.Alignment) type {
pub fn printAssumeCapacity(self: *Self, comptime fmt: []const u8, args: anytype) void { pub fn printAssumeCapacity(self: *Self, comptime fmt: []const u8, args: anytype) void {
comptime assert(T == u8); comptime assert(T == u8);
var w: std.io.Writer = .fixed(self.unusedCapacitySlice()); var w: std.Io.Writer = .fixed(self.unusedCapacitySlice());
w.print(fmt, args) catch unreachable; w.print(fmt, args) catch unreachable;
self.items.len += w.end; self.items.len += w.end;
} }
pub fn printBounded(self: *Self, comptime fmt: []const u8, args: anytype) error{OutOfMemory}!void { pub fn printBounded(self: *Self, comptime fmt: []const u8, args: anytype) error{OutOfMemory}!void {
comptime assert(T == u8); comptime assert(T == u8);
var w: std.io.Writer = .fixed(self.unusedCapacitySlice()); var w: std.Io.Writer = .fixed(self.unusedCapacitySlice());
w.print(fmt, args) catch return error.OutOfMemory; w.print(fmt, args) catch return error.OutOfMemory;
self.items.len += w.end; self.items.len += w.end;
} }

View File

@ -444,7 +444,7 @@ pub const HexEscape = struct {
pub const upper_charset = "0123456789ABCDEF"; pub const upper_charset = "0123456789ABCDEF";
pub const lower_charset = "0123456789abcdef"; pub const lower_charset = "0123456789abcdef";
pub fn format(se: HexEscape, w: *std.io.Writer) std.io.Writer.Error!void { pub fn format(se: HexEscape, w: *std.Io.Writer) std.Io.Writer.Error!void {
const charset = se.charset; const charset = se.charset;
var buf: [4]u8 = undefined; var buf: [4]u8 = undefined;

View File

@ -38,7 +38,7 @@ pub const StackTrace = struct {
index: usize, index: usize,
instruction_addresses: []usize, instruction_addresses: []usize,
pub fn format(self: StackTrace, writer: *std.io.Writer) std.io.Writer.Error!void { pub fn format(self: StackTrace, writer: *std.Io.Writer) std.Io.Writer.Error!void {
// TODO: re-evaluate whether to use format() methods at all. // TODO: re-evaluate whether to use format() methods at all.
// Until then, avoid an error when using GeneralPurposeAllocator with WebAssembly // Until then, avoid an error when using GeneralPurposeAllocator with WebAssembly
// where it tries to call detectTTYConfig here. // where it tries to call detectTTYConfig here.
@ -47,7 +47,7 @@ pub const StackTrace = struct {
const debug_info = std.debug.getSelfDebugInfo() catch |err| { const debug_info = std.debug.getSelfDebugInfo() catch |err| {
return writer.print("\nUnable to print stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}); return writer.print("\nUnable to print stack trace: Unable to open debug info: {s}\n", .{@errorName(err)});
}; };
const tty_config = std.io.tty.detectConfig(std.fs.File.stderr()); const tty_config = std.Io.tty.detectConfig(std.fs.File.stderr());
try writer.writeAll("\n"); try writer.writeAll("\n");
std.debug.writeStackTrace(self, writer, debug_info, tty_config) catch |err| { std.debug.writeStackTrace(self, writer, debug_info, tty_config) catch |err| {
try writer.print("Unable to print stack trace: {s}\n", .{@errorName(err)}); try writer.print("Unable to print stack trace: {s}\n", .{@errorName(err)});

View File

@ -1087,14 +1087,11 @@ pub const Coff = struct {
const pe_pointer_offset = 0x3C; const pe_pointer_offset = 0x3C;
const pe_magic = "PE\x00\x00"; const pe_magic = "PE\x00\x00";
var stream = std.io.fixedBufferStream(data); var reader: std.Io.Reader = .fixed(data);
const reader = stream.reader(); reader.seek = pe_pointer_offset;
try stream.seekTo(pe_pointer_offset); const coff_header_offset = try reader.takeInt(u32, .little);
const coff_header_offset = try reader.readInt(u32, .little); reader.seek = coff_header_offset;
try stream.seekTo(coff_header_offset); const is_image = mem.eql(u8, pe_magic, try reader.takeArray(4));
var buf: [4]u8 = undefined;
try reader.readNoEof(&buf);
const is_image = mem.eql(u8, pe_magic, &buf);
var coff = @This(){ var coff = @This(){
.data = data, .data = data,
@ -1123,16 +1120,15 @@ pub const Coff = struct {
if (@intFromEnum(DirectoryEntry.DEBUG) >= data_dirs.len) return null; if (@intFromEnum(DirectoryEntry.DEBUG) >= data_dirs.len) return null;
const debug_dir = data_dirs[@intFromEnum(DirectoryEntry.DEBUG)]; const debug_dir = data_dirs[@intFromEnum(DirectoryEntry.DEBUG)];
var stream = std.io.fixedBufferStream(self.data); var reader: std.Io.Reader = .fixed(self.data);
const reader = stream.reader();
if (self.is_loaded) { if (self.is_loaded) {
try stream.seekTo(debug_dir.virtual_address); reader.seek = debug_dir.virtual_address;
} else { } else {
// Find what section the debug_dir is in, in order to convert the RVA to a file offset // Find what section the debug_dir is in, in order to convert the RVA to a file offset
for (self.getSectionHeaders()) |*sect| { for (self.getSectionHeaders()) |*sect| {
if (debug_dir.virtual_address >= sect.virtual_address and debug_dir.virtual_address < sect.virtual_address + sect.virtual_size) { if (debug_dir.virtual_address >= sect.virtual_address and debug_dir.virtual_address < sect.virtual_address + sect.virtual_size) {
try stream.seekTo(sect.pointer_to_raw_data + (debug_dir.virtual_address - sect.virtual_address)); reader.seek = sect.pointer_to_raw_data + (debug_dir.virtual_address - sect.virtual_address);
break; break;
} }
} else return error.InvalidDebugDirectory; } else return error.InvalidDebugDirectory;
@ -1143,24 +1139,23 @@ pub const Coff = struct {
const debug_dir_entry_count = debug_dir.size / @sizeOf(DebugDirectoryEntry); const debug_dir_entry_count = debug_dir.size / @sizeOf(DebugDirectoryEntry);
var i: u32 = 0; var i: u32 = 0;
while (i < debug_dir_entry_count) : (i += 1) { while (i < debug_dir_entry_count) : (i += 1) {
const debug_dir_entry = try reader.readStruct(DebugDirectoryEntry); const debug_dir_entry = try reader.takeStruct(DebugDirectoryEntry, .little);
if (debug_dir_entry.type == .CODEVIEW) { if (debug_dir_entry.type == .CODEVIEW) {
const dir_offset = if (self.is_loaded) debug_dir_entry.address_of_raw_data else debug_dir_entry.pointer_to_raw_data; const dir_offset = if (self.is_loaded) debug_dir_entry.address_of_raw_data else debug_dir_entry.pointer_to_raw_data;
try stream.seekTo(dir_offset); reader.seek = dir_offset;
break; break;
} }
} else return null; } else return null;
var cv_signature: [4]u8 = undefined; // CodeView signature const code_view_signature = try reader.takeArray(4);
try reader.readNoEof(cv_signature[0..]);
// 'RSDS' indicates PDB70 format, used by lld. // 'RSDS' indicates PDB70 format, used by lld.
if (!mem.eql(u8, &cv_signature, "RSDS")) if (!mem.eql(u8, code_view_signature, "RSDS"))
return error.InvalidPEMagic; return error.InvalidPEMagic;
try reader.readNoEof(self.guid[0..]); try reader.readSliceAll(self.guid[0..]);
self.age = try reader.readInt(u32, .little); self.age = try reader.takeInt(u32, .little);
// Finally read the null-terminated string. // Finally read the null-terminated string.
const start = reader.context.pos; const start = reader.seek;
const len = std.mem.indexOfScalar(u8, self.data[start..], 0) orelse return null; const len = std.mem.indexOfScalar(u8, self.data[start..], 0) orelse return null;
return self.data[start .. start + len]; return self.data[start .. start + len];
} }

View File

@ -1,9 +1,8 @@
//! Accepts list of tokens, decides what is best block type to write. What block //! Accepts list of tokens, decides what is best block type to write. What block
//! type will provide best compression. Writes header and body of the block. //! type will provide best compression. Writes header and body of the block.
const std = @import("std"); const std = @import("std");
const io = std.io;
const assert = std.debug.assert; const assert = std.debug.assert;
const Writer = std.io.Writer; const Writer = std.Io.Writer;
const BlockWriter = @This(); const BlockWriter = @This();
const flate = @import("../flate.zig"); const flate = @import("../flate.zig");

View File

@ -1,10 +1,10 @@
const Decompress = @This(); const Decompress = @This();
const std = @import("std"); const std = @import("std");
const assert = std.debug.assert; const assert = std.debug.assert;
const Reader = std.io.Reader; const Reader = std.Io.Reader;
const Limit = std.io.Limit; const Limit = std.Io.Limit;
const zstd = @import("../zstd.zig"); const zstd = @import("../zstd.zig");
const Writer = std.io.Writer; const Writer = std.Io.Writer;
input: *Reader, input: *Reader,
reader: Reader, reader: Reader,

View File

@ -23,30 +23,29 @@ pub fn rescanMac(cb: *Bundle, gpa: Allocator) RescanMacError!void {
const bytes = try file.readToEndAlloc(gpa, std.math.maxInt(u32)); const bytes = try file.readToEndAlloc(gpa, std.math.maxInt(u32));
defer gpa.free(bytes); defer gpa.free(bytes);
var stream = std.io.fixedBufferStream(bytes); var reader: std.Io.Reader = .fixed(bytes);
const reader = stream.reader();
const db_header = try reader.readStructEndian(ApplDbHeader, .big); const db_header = try reader.takeStruct(ApplDbHeader, .big);
assert(mem.eql(u8, &db_header.signature, "kych")); assert(mem.eql(u8, &db_header.signature, "kych"));
try stream.seekTo(db_header.schema_offset); reader.seek = db_header.schema_offset;
const db_schema = try reader.readStructEndian(ApplDbSchema, .big); const db_schema = try reader.takeStruct(ApplDbSchema, .big);
var table_list = try gpa.alloc(u32, db_schema.table_count); var table_list = try gpa.alloc(u32, db_schema.table_count);
defer gpa.free(table_list); defer gpa.free(table_list);
var table_idx: u32 = 0; var table_idx: u32 = 0;
while (table_idx < table_list.len) : (table_idx += 1) { while (table_idx < table_list.len) : (table_idx += 1) {
table_list[table_idx] = try reader.readInt(u32, .big); table_list[table_idx] = try reader.takeInt(u32, .big);
} }
const now_sec = std.time.timestamp(); const now_sec = std.time.timestamp();
for (table_list) |table_offset| { for (table_list) |table_offset| {
try stream.seekTo(db_header.schema_offset + table_offset); reader.seek = db_header.schema_offset + table_offset;
const table_header = try reader.readStructEndian(TableHeader, .big); const table_header = try reader.takeStruct(TableHeader, .big);
if (@as(std.c.DB_RECORDTYPE, @enumFromInt(table_header.table_id)) != .X509_CERTIFICATE) { if (@as(std.c.DB_RECORDTYPE, @enumFromInt(table_header.table_id)) != .X509_CERTIFICATE) {
continue; continue;
@ -57,7 +56,7 @@ pub fn rescanMac(cb: *Bundle, gpa: Allocator) RescanMacError!void {
var record_idx: u32 = 0; var record_idx: u32 = 0;
while (record_idx < record_list.len) : (record_idx += 1) { while (record_idx < record_list.len) : (record_idx += 1) {
record_list[record_idx] = try reader.readInt(u32, .big); record_list[record_idx] = try reader.takeInt(u32, .big);
} }
for (record_list) |record_offset| { for (record_list) |record_offset| {
@ -65,15 +64,15 @@ pub fn rescanMac(cb: *Bundle, gpa: Allocator) RescanMacError!void {
// An offset that is not 4-byte-aligned is invalid. // An offset that is not 4-byte-aligned is invalid.
if (record_offset == 0 or record_offset % 4 != 0) continue; if (record_offset == 0 or record_offset % 4 != 0) continue;
try stream.seekTo(db_header.schema_offset + table_offset + record_offset); reader.seek = db_header.schema_offset + table_offset + record_offset;
const cert_header = try reader.readStructEndian(X509CertHeader, .big); const cert_header = try reader.takeStruct(X509CertHeader, .big);
if (cert_header.cert_size == 0) continue; if (cert_header.cert_size == 0) continue;
const cert_start = @as(u32, @intCast(cb.bytes.items.len)); const cert_start = @as(u32, @intCast(cb.bytes.items.len));
const dest_buf = try cb.bytes.addManyAsSlice(gpa, cert_header.cert_size); const dest_buf = try cb.bytes.addManyAsSlice(gpa, cert_header.cert_size);
try reader.readNoEof(dest_buf); try reader.readSliceAll(dest_buf);
try cb.parseCert(gpa, cert_start, now_sec); try cb.parseCert(gpa, cert_start, now_sec);
} }

View File

@ -69,15 +69,15 @@ pub const Tag = struct {
return .{ .number = number, .constructed = constructed, .class = .universal }; return .{ .number = number, .constructed = constructed, .class = .universal };
} }
pub fn decode(reader: anytype) !Tag { pub fn decode(reader: *std.Io.Reader) !Tag {
const tag1: FirstTag = @bitCast(try reader.readByte()); const tag1: FirstTag = @bitCast(try reader.takeByte());
var number: u14 = tag1.number; var number: u14 = tag1.number;
if (tag1.number == 15) { if (tag1.number == 15) {
const tag2: NextTag = @bitCast(try reader.readByte()); const tag2: NextTag = @bitCast(try reader.takeByte());
number = tag2.number; number = tag2.number;
if (tag2.continues) { if (tag2.continues) {
const tag3: NextTag = @bitCast(try reader.readByte()); const tag3: NextTag = @bitCast(try reader.takeByte());
number = (number << 7) + tag3.number; number = (number << 7) + tag3.number;
if (tag3.continues) return error.InvalidLength; if (tag3.continues) return error.InvalidLength;
} }
@ -90,7 +90,7 @@ pub const Tag = struct {
}; };
} }
pub fn encode(self: Tag, writer: anytype) @TypeOf(writer).Error!void { pub fn encode(self: Tag, writer: *std.Io.Writer) @TypeOf(writer).Error!void {
var tag1 = FirstTag{ var tag1 = FirstTag{
.number = undefined, .number = undefined,
.constructed = self.constructed, .constructed = self.constructed,
@ -98,8 +98,7 @@ pub const Tag = struct {
}; };
var buffer: [3]u8 = undefined; var buffer: [3]u8 = undefined;
var stream = std.io.fixedBufferStream(&buffer); var writer2: std.Io.Writer = .init(&buffer);
var writer2 = stream.writer();
switch (@intFromEnum(self.number)) { switch (@intFromEnum(self.number)) {
0...std.math.maxInt(u5) => |n| { 0...std.math.maxInt(u5) => |n| {
@ -122,7 +121,7 @@ pub const Tag = struct {
}, },
} }
_ = try writer.write(stream.getWritten()); _ = try writer.write(writer2.buffered());
} }
const FirstTag = packed struct(u8) { number: u5, constructed: bool, class: Tag.Class }; const FirstTag = packed struct(u8) { number: u5, constructed: bool, class: Tag.Class };
@ -161,8 +160,8 @@ pub const Tag = struct {
test Tag { test Tag {
const buf = [_]u8{0xa3}; const buf = [_]u8{0xa3};
var stream = std.io.fixedBufferStream(&buf); var reader: std.Io.Reader = .fixed(&buf);
const t = Tag.decode(stream.reader()); const t = Tag.decode(&reader);
try std.testing.expectEqual(Tag.init(@enumFromInt(3), true, .context_specific), t); try std.testing.expectEqual(Tag.init(@enumFromInt(3), true, .context_specific), t);
} }
@ -191,11 +190,10 @@ pub const Element = struct {
/// - Ensures length is within `bytes` /// - Ensures length is within `bytes`
/// - Ensures length is less than `std.math.maxInt(Index)` /// - Ensures length is less than `std.math.maxInt(Index)`
pub fn decode(bytes: []const u8, index: Index) DecodeError!Element { pub fn decode(bytes: []const u8, index: Index) DecodeError!Element {
var stream = std.io.fixedBufferStream(bytes[index..]); var reader: std.Io.Reader = .fixed(bytes[index..]);
var reader = stream.reader();
const tag = try Tag.decode(reader); const tag = try Tag.decode(&reader);
const size_or_len_size = try reader.readByte(); const size_or_len_size = try reader.takeByte();
var start = index + 2; var start = index + 2;
var end = start + size_or_len_size; var end = start + size_or_len_size;
@ -208,7 +206,7 @@ pub const Element = struct {
start += len_size; start += len_size;
if (len_size > @sizeOf(Index)) return error.InvalidLength; if (len_size > @sizeOf(Index)) return error.InvalidLength;
const len = try reader.readVarInt(Index, .big, len_size); const len = try reader.takeVarInt(Index, .big, len_size);
if (len < 128) return error.InvalidLength; // should have used short form if (len < 128) return error.InvalidLength; // should have used short form
end = std.math.add(Index, start, len) catch return error.InvalidLength; end = std.math.add(Index, start, len) catch return error.InvalidLength;

View File

@ -4,7 +4,7 @@
//! organizations, or policy documents. //! organizations, or policy documents.
encoded: []const u8, encoded: []const u8,
pub const InitError = std.fmt.ParseIntError || error{MissingPrefix} || std.io.FixedBufferStream(u8).WriteError; pub const InitError = std.fmt.ParseIntError || error{MissingPrefix} || std.Io.Writer.Error;
pub fn fromDot(dot_notation: []const u8, out: []u8) InitError!Oid { pub fn fromDot(dot_notation: []const u8, out: []u8) InitError!Oid {
var split = std.mem.splitScalar(u8, dot_notation, '.'); var split = std.mem.splitScalar(u8, dot_notation, '.');
@ -14,8 +14,7 @@ pub fn fromDot(dot_notation: []const u8, out: []u8) InitError!Oid {
const first = try std.fmt.parseInt(u8, first_str, 10); const first = try std.fmt.parseInt(u8, first_str, 10);
const second = try std.fmt.parseInt(u8, second_str, 10); const second = try std.fmt.parseInt(u8, second_str, 10);
var stream = std.io.fixedBufferStream(out); var writer: std.Io.Writer = .fixed(out);
var writer = stream.writer();
try writer.writeByte(first * 40 + second); try writer.writeByte(first * 40 + second);
@ -37,7 +36,7 @@ pub fn fromDot(dot_notation: []const u8, out: []u8) InitError!Oid {
i += 1; i += 1;
} }
return .{ .encoded = stream.getWritten() }; return .{ .encoded = writer.buffered() };
} }
test fromDot { test fromDot {
@ -80,9 +79,9 @@ test toDot {
var buf: [256]u8 = undefined; var buf: [256]u8 = undefined;
for (test_cases) |t| { for (test_cases) |t| {
var stream = std.io.fixedBufferStream(&buf); var stream: std.Io.Writer = .fixed(&buf);
try toDot(Oid{ .encoded = t.encoded }, stream.writer()); try toDot(Oid{ .encoded = t.encoded }, &stream);
try std.testing.expectEqualStrings(t.dot_notation, stream.getWritten()); try std.testing.expectEqualStrings(t.dot_notation, stream.written());
} }
} }

View File

@ -2,7 +2,6 @@ const builtin = @import("builtin");
const std = @import("std"); const std = @import("std");
const crypto = std.crypto; const crypto = std.crypto;
const fmt = std.fmt; const fmt = std.fmt;
const io = std.io;
const mem = std.mem; const mem = std.mem;
const sha3 = crypto.hash.sha3; const sha3 = crypto.hash.sha3;
const testing = std.testing; const testing = std.testing;
@ -135,8 +134,7 @@ pub fn Ecdsa(comptime Curve: type, comptime Hash: type) type {
/// The maximum length of the DER encoding is der_encoded_length_max. /// The maximum length of the DER encoding is der_encoded_length_max.
/// The function returns a slice, that can be shorter than der_encoded_length_max. /// The function returns a slice, that can be shorter than der_encoded_length_max.
pub fn toDer(sig: Signature, buf: *[der_encoded_length_max]u8) []u8 { pub fn toDer(sig: Signature, buf: *[der_encoded_length_max]u8) []u8 {
var fb = io.fixedBufferStream(buf); var w: std.Io.Writer = .fixed(buf);
const w = fb.writer();
const r_len = @as(u8, @intCast(sig.r.len + (sig.r[0] >> 7))); const r_len = @as(u8, @intCast(sig.r.len + (sig.r[0] >> 7)));
const s_len = @as(u8, @intCast(sig.s.len + (sig.s[0] >> 7))); const s_len = @as(u8, @intCast(sig.s.len + (sig.s[0] >> 7)));
const seq_len = @as(u8, @intCast(2 + r_len + 2 + s_len)); const seq_len = @as(u8, @intCast(2 + r_len + 2 + s_len));
@ -151,24 +149,23 @@ pub fn Ecdsa(comptime Curve: type, comptime Hash: type) type {
w.writeByte(0x00) catch unreachable; w.writeByte(0x00) catch unreachable;
} }
w.writeAll(&sig.s) catch unreachable; w.writeAll(&sig.s) catch unreachable;
return fb.getWritten(); return w.buffered();
} }
// Read a DER-encoded integer. // Read a DER-encoded integer.
fn readDerInt(out: []u8, reader: anytype) EncodingError!void { fn readDerInt(out: []u8, reader: *std.Io.Reader) EncodingError!void {
var buf: [2]u8 = undefined; const buf = reader.takeArray(2) catch return error.InvalidEncoding;
_ = reader.readNoEof(&buf) catch return error.InvalidEncoding;
if (buf[0] != 0x02) return error.InvalidEncoding; if (buf[0] != 0x02) return error.InvalidEncoding;
var expected_len = @as(usize, buf[1]); var expected_len: usize = buf[1];
if (expected_len == 0 or expected_len > 1 + out.len) return error.InvalidEncoding; if (expected_len == 0 or expected_len > 1 + out.len) return error.InvalidEncoding;
var has_top_bit = false; var has_top_bit = false;
if (expected_len == 1 + out.len) { if (expected_len == 1 + out.len) {
if ((reader.readByte() catch return error.InvalidEncoding) != 0) return error.InvalidEncoding; if ((reader.takeByte() catch return error.InvalidEncoding) != 0) return error.InvalidEncoding;
expected_len -= 1; expected_len -= 1;
has_top_bit = true; has_top_bit = true;
} }
const out_slice = out[out.len - expected_len ..]; const out_slice = out[out.len - expected_len ..];
reader.readNoEof(out_slice) catch return error.InvalidEncoding; reader.readSliceAll(out_slice) catch return error.InvalidEncoding;
if (@intFromBool(has_top_bit) != out[0] >> 7) return error.InvalidEncoding; if (@intFromBool(has_top_bit) != out[0] >> 7) return error.InvalidEncoding;
} }
@ -176,16 +173,14 @@ pub fn Ecdsa(comptime Curve: type, comptime Hash: type) type {
/// Returns InvalidEncoding if the DER encoding is invalid. /// Returns InvalidEncoding if the DER encoding is invalid.
pub fn fromDer(der: []const u8) EncodingError!Signature { pub fn fromDer(der: []const u8) EncodingError!Signature {
var sig: Signature = mem.zeroInit(Signature, .{}); var sig: Signature = mem.zeroInit(Signature, .{});
var fb = io.fixedBufferStream(der); var reader: std.Io.Reader = .fixed(der);
const reader = fb.reader(); const buf = reader.takeArray(2) catch return error.InvalidEncoding;
var buf: [2]u8 = undefined;
_ = reader.readNoEof(&buf) catch return error.InvalidEncoding;
if (buf[0] != 0x30 or @as(usize, buf[1]) + 2 != der.len) { if (buf[0] != 0x30 or @as(usize, buf[1]) + 2 != der.len) {
return error.InvalidEncoding; return error.InvalidEncoding;
} }
try readDerInt(&sig.r, reader); try readDerInt(&sig.r, &reader);
try readDerInt(&sig.s, reader); try readDerInt(&sig.s, &reader);
if (fb.getPos() catch unreachable != der.len) return error.InvalidEncoding; if (reader.seek != der.len) return error.InvalidEncoding;
return sig; return sig;
} }

View File

@ -2,7 +2,6 @@
const std = @import("std"); const std = @import("std");
const fmt = std.fmt; const fmt = std.fmt;
const io = std.io;
const mem = std.mem; const mem = std.mem;
const meta = std.meta; const meta = std.meta;
const Writer = std.Io.Writer; const Writer = std.Io.Writer;

View File

@ -5,7 +5,6 @@
const std = @import("std"); const std = @import("std");
const crypto = std.crypto; const crypto = std.crypto;
const fmt = std.fmt; const fmt = std.fmt;
const io = std.io;
const math = std.math; const math = std.math;
const mem = std.mem; const mem = std.mem;
const meta = std.meta; const meta = std.meta;

View File

@ -655,7 +655,7 @@ pub const Decoder = struct {
} }
/// Use this function to increase `their_end`. /// Use this function to increase `their_end`.
pub fn readAtLeast(d: *Decoder, stream: *std.io.Reader, their_amt: usize) !void { pub fn readAtLeast(d: *Decoder, stream: *std.Io.Reader, their_amt: usize) !void {
assert(!d.disable_reads); assert(!d.disable_reads);
const existing_amt = d.cap - d.idx; const existing_amt = d.cap - d.idx;
d.their_end = d.idx + their_amt; d.their_end = d.idx + their_amt;
@ -672,7 +672,7 @@ pub const Decoder = struct {
/// Same as `readAtLeast` but also increases `our_end` by exactly `our_amt`. /// Same as `readAtLeast` but also increases `our_end` by exactly `our_amt`.
/// Use when `our_amt` is calculated by us, not by them. /// Use when `our_amt` is calculated by us, not by them.
pub fn readAtLeastOurAmt(d: *Decoder, stream: *std.io.Reader, our_amt: usize) !void { pub fn readAtLeastOurAmt(d: *Decoder, stream: *std.Io.Reader, our_amt: usize) !void {
assert(!d.disable_reads); assert(!d.disable_reads);
try readAtLeast(d, stream, our_amt); try readAtLeast(d, stream, our_amt);
d.our_end = d.idx + our_amt; d.our_end = d.idx + our_amt;

View File

@ -2,7 +2,6 @@ const builtin = @import("builtin");
const std = @import("std.zig"); const std = @import("std.zig");
const math = std.math; const math = std.math;
const mem = std.mem; const mem = std.mem;
const io = std.io;
const posix = std.posix; const posix = std.posix;
const fs = std.fs; const fs = std.fs;
const testing = std.testing; const testing = std.testing;
@ -12,7 +11,8 @@ const windows = std.os.windows;
const native_arch = builtin.cpu.arch; const native_arch = builtin.cpu.arch;
const native_os = builtin.os.tag; const native_os = builtin.os.tag;
const native_endian = native_arch.endian(); const native_endian = native_arch.endian();
const Writer = std.io.Writer; const Writer = std.Io.Writer;
const tty = std.Io.tty;
pub const Dwarf = @import("debug/Dwarf.zig"); pub const Dwarf = @import("debug/Dwarf.zig");
pub const Pdb = @import("debug/Pdb.zig"); pub const Pdb = @import("debug/Pdb.zig");
@ -246,12 +246,12 @@ pub fn getSelfDebugInfo() !*SelfInfo {
pub fn dumpHex(bytes: []const u8) void { pub fn dumpHex(bytes: []const u8) void {
const bw = lockStderrWriter(&.{}); const bw = lockStderrWriter(&.{});
defer unlockStderrWriter(); defer unlockStderrWriter();
const ttyconf = std.io.tty.detectConfig(.stderr()); const ttyconf = tty.detectConfig(.stderr());
dumpHexFallible(bw, ttyconf, bytes) catch {}; dumpHexFallible(bw, ttyconf, bytes) catch {};
} }
/// Prints a hexadecimal view of the bytes, returning any error that occurs. /// Prints a hexadecimal view of the bytes, returning any error that occurs.
pub fn dumpHexFallible(bw: *Writer, ttyconf: std.io.tty.Config, bytes: []const u8) !void { pub fn dumpHexFallible(bw: *Writer, ttyconf: tty.Config, bytes: []const u8) !void {
var chunks = mem.window(u8, bytes, 16, 16); var chunks = mem.window(u8, bytes, 16, 16);
while (chunks.next()) |window| { while (chunks.next()) |window| {
// 1. Print the address. // 1. Print the address.
@ -302,7 +302,7 @@ pub fn dumpHexFallible(bw: *Writer, ttyconf: std.io.tty.Config, bytes: []const u
test dumpHexFallible { test dumpHexFallible {
const bytes: []const u8 = &.{ 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, 0x01, 0x12, 0x13 }; const bytes: []const u8 = &.{ 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, 0x01, 0x12, 0x13 };
var aw: std.io.Writer.Allocating = .init(std.testing.allocator); var aw: Writer.Allocating = .init(std.testing.allocator);
defer aw.deinit(); defer aw.deinit();
try dumpHexFallible(&aw.writer, .no_color, bytes); try dumpHexFallible(&aw.writer, .no_color, bytes);
@ -342,7 +342,7 @@ pub fn dumpCurrentStackTraceToWriter(start_addr: ?usize, writer: *Writer) !void
try writer.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}); try writer.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)});
return; return;
}; };
writeCurrentStackTrace(writer, debug_info, io.tty.detectConfig(.stderr()), start_addr) catch |err| { writeCurrentStackTrace(writer, debug_info, tty.detectConfig(.stderr()), start_addr) catch |err| {
try writer.print("Unable to dump stack trace: {s}\n", .{@errorName(err)}); try writer.print("Unable to dump stack trace: {s}\n", .{@errorName(err)});
return; return;
}; };
@ -427,7 +427,7 @@ pub fn dumpStackTraceFromBase(context: *ThreadContext, stderr: *Writer) void {
stderr.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}) catch return; stderr.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}) catch return;
return; return;
}; };
const tty_config = io.tty.detectConfig(.stderr()); const tty_config = tty.detectConfig(.stderr());
if (native_os == .windows) { if (native_os == .windows) {
// On x86_64 and aarch64, the stack will be unwound using RtlVirtualUnwind using the context // On x86_64 and aarch64, the stack will be unwound using RtlVirtualUnwind using the context
// provided by the exception handler. On x86, RtlVirtualUnwind doesn't exist. Instead, a new backtrace // provided by the exception handler. On x86, RtlVirtualUnwind doesn't exist. Instead, a new backtrace
@ -533,7 +533,7 @@ pub fn dumpStackTrace(stack_trace: std.builtin.StackTrace) void {
stderr.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}) catch return; stderr.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}) catch return;
return; return;
}; };
writeStackTrace(stack_trace, stderr, debug_info, io.tty.detectConfig(.stderr())) catch |err| { writeStackTrace(stack_trace, stderr, debug_info, tty.detectConfig(.stderr())) catch |err| {
stderr.print("Unable to dump stack trace: {s}\n", .{@errorName(err)}) catch return; stderr.print("Unable to dump stack trace: {s}\n", .{@errorName(err)}) catch return;
return; return;
}; };
@ -738,7 +738,7 @@ pub fn writeStackTrace(
stack_trace: std.builtin.StackTrace, stack_trace: std.builtin.StackTrace,
writer: *Writer, writer: *Writer,
debug_info: *SelfInfo, debug_info: *SelfInfo,
tty_config: io.tty.Config, tty_config: tty.Config,
) !void { ) !void {
if (builtin.strip_debug_info) return error.MissingDebugInfo; if (builtin.strip_debug_info) return error.MissingDebugInfo;
var frame_index: usize = 0; var frame_index: usize = 0;
@ -959,7 +959,7 @@ pub const StackIterator = struct {
pub fn writeCurrentStackTrace( pub fn writeCurrentStackTrace(
writer: *Writer, writer: *Writer,
debug_info: *SelfInfo, debug_info: *SelfInfo,
tty_config: io.tty.Config, tty_config: tty.Config,
start_addr: ?usize, start_addr: ?usize,
) !void { ) !void {
if (native_os == .windows) { if (native_os == .windows) {
@ -1047,7 +1047,7 @@ pub noinline fn walkStackWindows(addresses: []usize, existing_context: ?*const w
pub fn writeStackTraceWindows( pub fn writeStackTraceWindows(
writer: *Writer, writer: *Writer,
debug_info: *SelfInfo, debug_info: *SelfInfo,
tty_config: io.tty.Config, tty_config: tty.Config,
context: *const windows.CONTEXT, context: *const windows.CONTEXT,
start_addr: ?usize, start_addr: ?usize,
) !void { ) !void {
@ -1065,7 +1065,7 @@ pub fn writeStackTraceWindows(
} }
} }
fn printUnknownSource(debug_info: *SelfInfo, writer: *Writer, address: usize, tty_config: io.tty.Config) !void { fn printUnknownSource(debug_info: *SelfInfo, writer: *Writer, address: usize, tty_config: tty.Config) !void {
const module_name = debug_info.getModuleNameForAddress(address); const module_name = debug_info.getModuleNameForAddress(address);
return printLineInfo( return printLineInfo(
writer, writer,
@ -1078,14 +1078,14 @@ fn printUnknownSource(debug_info: *SelfInfo, writer: *Writer, address: usize, tt
); );
} }
fn printLastUnwindError(it: *StackIterator, debug_info: *SelfInfo, writer: *Writer, tty_config: io.tty.Config) void { fn printLastUnwindError(it: *StackIterator, debug_info: *SelfInfo, writer: *Writer, tty_config: tty.Config) void {
if (!have_ucontext) return; if (!have_ucontext) return;
if (it.getLastError()) |unwind_error| { if (it.getLastError()) |unwind_error| {
printUnwindError(debug_info, writer, unwind_error.address, unwind_error.err, tty_config) catch {}; printUnwindError(debug_info, writer, unwind_error.address, unwind_error.err, tty_config) catch {};
} }
} }
fn printUnwindError(debug_info: *SelfInfo, writer: *Writer, address: usize, err: UnwindError, tty_config: io.tty.Config) !void { fn printUnwindError(debug_info: *SelfInfo, writer: *Writer, address: usize, err: UnwindError, tty_config: tty.Config) !void {
const module_name = debug_info.getModuleNameForAddress(address) orelse "???"; const module_name = debug_info.getModuleNameForAddress(address) orelse "???";
try tty_config.setColor(writer, .dim); try tty_config.setColor(writer, .dim);
if (err == error.MissingDebugInfo) { if (err == error.MissingDebugInfo) {
@ -1096,7 +1096,7 @@ fn printUnwindError(debug_info: *SelfInfo, writer: *Writer, address: usize, err:
try tty_config.setColor(writer, .reset); try tty_config.setColor(writer, .reset);
} }
pub fn printSourceAtAddress(debug_info: *SelfInfo, writer: *Writer, address: usize, tty_config: io.tty.Config) !void { pub fn printSourceAtAddress(debug_info: *SelfInfo, writer: *Writer, address: usize, tty_config: tty.Config) !void {
const module = debug_info.getModuleForAddress(address) catch |err| switch (err) { const module = debug_info.getModuleForAddress(address) catch |err| switch (err) {
error.MissingDebugInfo, error.InvalidDebugInfo => return printUnknownSource(debug_info, writer, address, tty_config), error.MissingDebugInfo, error.InvalidDebugInfo => return printUnknownSource(debug_info, writer, address, tty_config),
else => return err, else => return err,
@ -1125,7 +1125,7 @@ fn printLineInfo(
address: usize, address: usize,
symbol_name: []const u8, symbol_name: []const u8,
compile_unit_name: []const u8, compile_unit_name: []const u8,
tty_config: io.tty.Config, tty_config: tty.Config,
comptime printLineFromFile: anytype, comptime printLineFromFile: anytype,
) !void { ) !void {
nosuspend { nosuspend {
@ -1597,10 +1597,10 @@ test "manage resources correctly" {
// self-hosted debug info is still too buggy // self-hosted debug info is still too buggy
if (builtin.zig_backend != .stage2_llvm) return error.SkipZigTest; if (builtin.zig_backend != .stage2_llvm) return error.SkipZigTest;
var discarding: std.io.Writer.Discarding = .init(&.{}); var discarding: Writer.Discarding = .init(&.{});
var di = try SelfInfo.open(testing.allocator); var di = try SelfInfo.open(testing.allocator);
defer di.deinit(); defer di.deinit();
try printSourceAtAddress(&di, &discarding.writer, showMyTrace(), io.tty.detectConfig(.stderr())); try printSourceAtAddress(&di, &discarding.writer, showMyTrace(), tty.detectConfig(.stderr()));
} }
noinline fn showMyTrace() usize { noinline fn showMyTrace() usize {
@ -1666,7 +1666,7 @@ pub fn ConfigurableTrace(comptime size: usize, comptime stack_frame_count: usize
pub fn dump(t: @This()) void { pub fn dump(t: @This()) void {
if (!enabled) return; if (!enabled) return;
const tty_config = io.tty.detectConfig(.stderr()); const tty_config = tty.detectConfig(.stderr());
const stderr = lockStderrWriter(&.{}); const stderr = lockStderrWriter(&.{});
defer unlockStderrWriter(); defer unlockStderrWriter();
const end = @min(t.index, size); const end = @min(t.index, size);

View File

@ -51,15 +51,9 @@ const Opcode = enum(u8) {
pub const hi_user = 0x3f; pub const hi_user = 0x3f;
}; };
fn readBlock(stream: *std.io.FixedBufferStream([]const u8)) ![]const u8 { fn readBlock(reader: *std.Io.Reader) ![]const u8 {
const reader = stream.reader(); const block_len = try reader.takeLeb128(usize);
const block_len = try leb.readUleb128(usize, reader); return reader.take(block_len);
if (stream.pos + block_len > stream.buffer.len) return error.InvalidOperand;
const block = stream.buffer[stream.pos..][0..block_len];
reader.context.pos += block_len;
return block;
} }
pub const Instruction = union(Opcode) { pub const Instruction = union(Opcode) {
@ -147,12 +141,11 @@ pub const Instruction = union(Opcode) {
}, },
pub fn read( pub fn read(
stream: *std.io.FixedBufferStream([]const u8), reader: *std.Io.Reader,
addr_size_bytes: u8, addr_size_bytes: u8,
endian: std.builtin.Endian, endian: std.builtin.Endian,
) !Instruction { ) !Instruction {
const reader = stream.reader(); switch (try reader.takeByte()) {
switch (try reader.readByte()) {
Opcode.lo_inline...Opcode.hi_inline => |opcode| { Opcode.lo_inline...Opcode.hi_inline => |opcode| {
const e: Opcode = @enumFromInt(opcode & 0b11000000); const e: Opcode = @enumFromInt(opcode & 0b11000000);
const value: u6 = @intCast(opcode & 0b111111); const value: u6 = @intCast(opcode & 0b111111);
@ -163,7 +156,7 @@ pub const Instruction = union(Opcode) {
.offset => .{ .offset => .{
.offset = .{ .offset = .{
.register = value, .register = value,
.offset = try leb.readUleb128(u64, reader), .offset = try reader.takeLeb128(u64),
}, },
}, },
.restore => .{ .restore => .{
@ -183,111 +176,111 @@ pub const Instruction = union(Opcode) {
.set_loc => .{ .set_loc => .{
.set_loc = .{ .set_loc = .{
.address = switch (addr_size_bytes) { .address = switch (addr_size_bytes) {
2 => try reader.readInt(u16, endian), 2 => try reader.takeInt(u16, endian),
4 => try reader.readInt(u32, endian), 4 => try reader.takeInt(u32, endian),
8 => try reader.readInt(u64, endian), 8 => try reader.takeInt(u64, endian),
else => return error.InvalidAddrSize, else => return error.InvalidAddrSize,
}, },
}, },
}, },
.advance_loc1 => .{ .advance_loc1 => .{
.advance_loc1 = .{ .delta = try reader.readByte() }, .advance_loc1 = .{ .delta = try reader.takeByte() },
}, },
.advance_loc2 => .{ .advance_loc2 => .{
.advance_loc2 = .{ .delta = try reader.readInt(u16, endian) }, .advance_loc2 = .{ .delta = try reader.takeInt(u16, endian) },
}, },
.advance_loc4 => .{ .advance_loc4 => .{
.advance_loc4 = .{ .delta = try reader.readInt(u32, endian) }, .advance_loc4 = .{ .delta = try reader.takeInt(u32, endian) },
}, },
.offset_extended => .{ .offset_extended => .{
.offset_extended = .{ .offset_extended = .{
.register = try leb.readUleb128(u8, reader), .register = try reader.takeLeb128(u8),
.offset = try leb.readUleb128(u64, reader), .offset = try reader.takeLeb128(u64),
}, },
}, },
.restore_extended => .{ .restore_extended => .{
.restore_extended = .{ .restore_extended = .{
.register = try leb.readUleb128(u8, reader), .register = try reader.takeLeb128(u8),
}, },
}, },
.undefined => .{ .undefined => .{
.undefined = .{ .undefined = .{
.register = try leb.readUleb128(u8, reader), .register = try reader.takeLeb128(u8),
}, },
}, },
.same_value => .{ .same_value => .{
.same_value = .{ .same_value = .{
.register = try leb.readUleb128(u8, reader), .register = try reader.takeLeb128(u8),
}, },
}, },
.register => .{ .register => .{
.register = .{ .register = .{
.register = try leb.readUleb128(u8, reader), .register = try reader.takeLeb128(u8),
.target_register = try leb.readUleb128(u8, reader), .target_register = try reader.takeLeb128(u8),
}, },
}, },
.remember_state => .{ .remember_state = {} }, .remember_state => .{ .remember_state = {} },
.restore_state => .{ .restore_state = {} }, .restore_state => .{ .restore_state = {} },
.def_cfa => .{ .def_cfa => .{
.def_cfa = .{ .def_cfa = .{
.register = try leb.readUleb128(u8, reader), .register = try reader.takeLeb128(u8),
.offset = try leb.readUleb128(u64, reader), .offset = try reader.takeLeb128(u64),
}, },
}, },
.def_cfa_register => .{ .def_cfa_register => .{
.def_cfa_register = .{ .def_cfa_register = .{
.register = try leb.readUleb128(u8, reader), .register = try reader.takeLeb128(u8),
}, },
}, },
.def_cfa_offset => .{ .def_cfa_offset => .{
.def_cfa_offset = .{ .def_cfa_offset = .{
.offset = try leb.readUleb128(u64, reader), .offset = try reader.takeLeb128(u64),
}, },
}, },
.def_cfa_expression => .{ .def_cfa_expression => .{
.def_cfa_expression = .{ .def_cfa_expression = .{
.block = try readBlock(stream), .block = try readBlock(reader),
}, },
}, },
.expression => .{ .expression => .{
.expression = .{ .expression = .{
.register = try leb.readUleb128(u8, reader), .register = try reader.takeLeb128(u8),
.block = try readBlock(stream), .block = try readBlock(reader),
}, },
}, },
.offset_extended_sf => .{ .offset_extended_sf => .{
.offset_extended_sf = .{ .offset_extended_sf = .{
.register = try leb.readUleb128(u8, reader), .register = try reader.takeLeb128(u8),
.offset = try leb.readIleb128(i64, reader), .offset = try reader.takeLeb128(i64),
}, },
}, },
.def_cfa_sf => .{ .def_cfa_sf => .{
.def_cfa_sf = .{ .def_cfa_sf = .{
.register = try leb.readUleb128(u8, reader), .register = try reader.takeLeb128(u8),
.offset = try leb.readIleb128(i64, reader), .offset = try reader.takeLeb128(i64),
}, },
}, },
.def_cfa_offset_sf => .{ .def_cfa_offset_sf => .{
.def_cfa_offset_sf = .{ .def_cfa_offset_sf = .{
.offset = try leb.readIleb128(i64, reader), .offset = try reader.takeLeb128(i64),
}, },
}, },
.val_offset => .{ .val_offset => .{
.val_offset = .{ .val_offset = .{
.register = try leb.readUleb128(u8, reader), .register = try reader.takeLeb128(u8),
.offset = try leb.readUleb128(u64, reader), .offset = try reader.takeLeb128(u64),
}, },
}, },
.val_offset_sf => .{ .val_offset_sf => .{
.val_offset_sf = .{ .val_offset_sf = .{
.register = try leb.readUleb128(u8, reader), .register = try reader.takeLeb128(u8),
.offset = try leb.readIleb128(i64, reader), .offset = try reader.takeLeb128(i64),
}, },
}, },
.val_expression => .{ .val_expression => .{
.val_expression = .{ .val_expression = .{
.register = try leb.readUleb128(u8, reader), .register = try reader.takeLeb128(u8),
.block = try readBlock(stream), .block = try readBlock(reader),
}, },
}, },
}; };

View File

@ -62,7 +62,7 @@ pub const Error = error{
InvalidTypeLength, InvalidTypeLength,
TruncatedIntegralType, TruncatedIntegralType,
} || abi.RegBytesError || error{ EndOfStream, Overflow, OutOfMemory, DivisionByZero }; } || abi.RegBytesError || error{ EndOfStream, Overflow, OutOfMemory, DivisionByZero, ReadFailed };
/// A stack machine that can decode and run DWARF expressions. /// A stack machine that can decode and run DWARF expressions.
/// Expressions can be decoded for non-native address size and endianness, /// Expressions can be decoded for non-native address size and endianness,
@ -178,61 +178,60 @@ pub fn StackMachine(comptime options: Options) type {
} }
} }
pub fn readOperand(stream: *std.io.FixedBufferStream([]const u8), opcode: u8, context: Context) !?Operand { pub fn readOperand(reader: *std.Io.Reader, opcode: u8, context: Context) !?Operand {
const reader = stream.reader();
return switch (opcode) { return switch (opcode) {
OP.addr => generic(try reader.readInt(addr_type, options.endian)), OP.addr => generic(try reader.takeInt(addr_type, options.endian)),
OP.call_ref => switch (context.format) { OP.call_ref => switch (context.format) {
.@"32" => generic(try reader.readInt(u32, options.endian)), .@"32" => generic(try reader.takeInt(u32, options.endian)),
.@"64" => generic(try reader.readInt(u64, options.endian)), .@"64" => generic(try reader.takeInt(u64, options.endian)),
}, },
OP.const1u, OP.const1u,
OP.pick, OP.pick,
=> generic(try reader.readByte()), => generic(try reader.takeByte()),
OP.deref_size, OP.deref_size,
OP.xderef_size, OP.xderef_size,
=> .{ .type_size = try reader.readByte() }, => .{ .type_size = try reader.takeByte() },
OP.const1s => generic(try reader.readByteSigned()), OP.const1s => generic(try reader.takeByteSigned()),
OP.const2u, OP.const2u,
OP.call2, OP.call2,
=> generic(try reader.readInt(u16, options.endian)), => generic(try reader.takeInt(u16, options.endian)),
OP.call4 => generic(try reader.readInt(u32, options.endian)), OP.call4 => generic(try reader.takeInt(u32, options.endian)),
OP.const2s => generic(try reader.readInt(i16, options.endian)), OP.const2s => generic(try reader.takeInt(i16, options.endian)),
OP.bra, OP.bra,
OP.skip, OP.skip,
=> .{ .branch_offset = try reader.readInt(i16, options.endian) }, => .{ .branch_offset = try reader.takeInt(i16, options.endian) },
OP.const4u => generic(try reader.readInt(u32, options.endian)), OP.const4u => generic(try reader.takeInt(u32, options.endian)),
OP.const4s => generic(try reader.readInt(i32, options.endian)), OP.const4s => generic(try reader.takeInt(i32, options.endian)),
OP.const8u => generic(try reader.readInt(u64, options.endian)), OP.const8u => generic(try reader.takeInt(u64, options.endian)),
OP.const8s => generic(try reader.readInt(i64, options.endian)), OP.const8s => generic(try reader.takeInt(i64, options.endian)),
OP.constu, OP.constu,
OP.plus_uconst, OP.plus_uconst,
OP.addrx, OP.addrx,
OP.constx, OP.constx,
OP.convert, OP.convert,
OP.reinterpret, OP.reinterpret,
=> generic(try leb.readUleb128(u64, reader)), => generic(try reader.takeLeb128(u64)),
OP.consts, OP.consts,
OP.fbreg, OP.fbreg,
=> generic(try leb.readIleb128(i64, reader)), => generic(try reader.takeLeb128(i64)),
OP.lit0...OP.lit31 => |n| generic(n - OP.lit0), OP.lit0...OP.lit31 => |n| generic(n - OP.lit0),
OP.reg0...OP.reg31 => |n| .{ .register = n - OP.reg0 }, OP.reg0...OP.reg31 => |n| .{ .register = n - OP.reg0 },
OP.breg0...OP.breg31 => |n| .{ .base_register = .{ OP.breg0...OP.breg31 => |n| .{ .base_register = .{
.base_register = n - OP.breg0, .base_register = n - OP.breg0,
.offset = try leb.readIleb128(i64, reader), .offset = try reader.takeLeb128(i64),
} }, } },
OP.regx => .{ .register = try leb.readUleb128(u8, reader) }, OP.regx => .{ .register = try reader.takeLeb128(u8) },
OP.bregx => blk: { OP.bregx => blk: {
const base_register = try leb.readUleb128(u8, reader); const base_register = try reader.takeLeb128(u8);
const offset = try leb.readIleb128(i64, reader); const offset = try reader.takeLeb128(i64);
break :blk .{ .base_register = .{ break :blk .{ .base_register = .{
.base_register = base_register, .base_register = base_register,
.offset = offset, .offset = offset,
} }; } };
}, },
OP.regval_type => blk: { OP.regval_type => blk: {
const register = try leb.readUleb128(u8, reader); const register = try reader.takeLeb128(u8);
const type_offset = try leb.readUleb128(addr_type, reader); const type_offset = try reader.takeLeb128(addr_type);
break :blk .{ .register_type = .{ break :blk .{ .register_type = .{
.register = register, .register = register,
.type_offset = type_offset, .type_offset = type_offset,
@ -240,33 +239,27 @@ pub fn StackMachine(comptime options: Options) type {
}, },
OP.piece => .{ OP.piece => .{
.composite_location = .{ .composite_location = .{
.size = try leb.readUleb128(u8, reader), .size = try reader.takeLeb128(u8),
.offset = 0, .offset = 0,
}, },
}, },
OP.bit_piece => blk: { OP.bit_piece => blk: {
const size = try leb.readUleb128(u8, reader); const size = try reader.takeLeb128(u8);
const offset = try leb.readIleb128(i64, reader); const offset = try reader.takeLeb128(i64);
break :blk .{ .composite_location = .{ break :blk .{ .composite_location = .{
.size = size, .size = size,
.offset = offset, .offset = offset,
} }; } };
}, },
OP.implicit_value, OP.entry_value => blk: { OP.implicit_value, OP.entry_value => blk: {
const size = try leb.readUleb128(u8, reader); const size = try reader.takeLeb128(u8);
if (stream.pos + size > stream.buffer.len) return error.InvalidExpression; const block = try reader.take(size);
const block = stream.buffer[stream.pos..][0..size]; break :blk .{ .block = block };
stream.pos += size;
break :blk .{
.block = block,
};
}, },
OP.const_type => blk: { OP.const_type => blk: {
const type_offset = try leb.readUleb128(addr_type, reader); const type_offset = try reader.takeLeb128(addr_type);
const size = try reader.readByte(); const size = try reader.takeByte();
if (stream.pos + size > stream.buffer.len) return error.InvalidExpression; const value_bytes = try reader.take(size);
const value_bytes = stream.buffer[stream.pos..][0..size];
stream.pos += size;
break :blk .{ .const_type = .{ break :blk .{ .const_type = .{
.type_offset = type_offset, .type_offset = type_offset,
.value_bytes = value_bytes, .value_bytes = value_bytes,
@ -276,8 +269,8 @@ pub fn StackMachine(comptime options: Options) type {
OP.xderef_type, OP.xderef_type,
=> .{ => .{
.deref_type = .{ .deref_type = .{
.size = try reader.readByte(), .size = try reader.takeByte(),
.type_offset = try leb.readUleb128(addr_type, reader), .type_offset = try reader.takeLeb128(addr_type),
}, },
}, },
OP.lo_user...OP.hi_user => return error.UnimplementedUserOpcode, OP.lo_user...OP.hi_user => return error.UnimplementedUserOpcode,
@ -293,7 +286,7 @@ pub fn StackMachine(comptime options: Options) type {
initial_value: ?usize, initial_value: ?usize,
) Error!?Value { ) Error!?Value {
if (initial_value) |i| try self.stack.append(allocator, .{ .generic = i }); if (initial_value) |i| try self.stack.append(allocator, .{ .generic = i });
var stream = std.io.fixedBufferStream(expression); var stream: std.Io.Reader = .fixed(expression);
while (try self.step(&stream, allocator, context)) {} while (try self.step(&stream, allocator, context)) {}
if (self.stack.items.len == 0) return null; if (self.stack.items.len == 0) return null;
return self.stack.items[self.stack.items.len - 1]; return self.stack.items[self.stack.items.len - 1];
@ -302,14 +295,14 @@ pub fn StackMachine(comptime options: Options) type {
/// Reads an opcode and its operands from `stream`, then executes it /// Reads an opcode and its operands from `stream`, then executes it
pub fn step( pub fn step(
self: *Self, self: *Self,
stream: *std.io.FixedBufferStream([]const u8), stream: *std.Io.Reader,
allocator: std.mem.Allocator, allocator: std.mem.Allocator,
context: Context, context: Context,
) Error!bool { ) Error!bool {
if (@sizeOf(usize) != @sizeOf(addr_type) or options.endian != native_endian) if (@sizeOf(usize) != @sizeOf(addr_type) or options.endian != native_endian)
@compileError("Execution of non-native address sizes / endianness is not supported"); @compileError("Execution of non-native address sizes / endianness is not supported");
const opcode = try stream.reader().readByte(); const opcode = try stream.takeByte();
if (options.call_frame_context and !isOpcodeValidInCFA(opcode)) return error.InvalidCFAOpcode; if (options.call_frame_context and !isOpcodeValidInCFA(opcode)) return error.InvalidCFAOpcode;
const operand = try readOperand(stream, opcode, context); const operand = try readOperand(stream, opcode, context);
switch (opcode) { switch (opcode) {
@ -663,11 +656,11 @@ pub fn StackMachine(comptime options: Options) type {
if (condition) { if (condition) {
const new_pos = std.math.cast( const new_pos = std.math.cast(
usize, usize,
try std.math.add(isize, @as(isize, @intCast(stream.pos)), branch_offset), try std.math.add(isize, @as(isize, @intCast(stream.seek)), branch_offset),
) orelse return error.InvalidExpression; ) orelse return error.InvalidExpression;
if (new_pos < 0 or new_pos > stream.buffer.len) return error.InvalidExpression; if (new_pos < 0 or new_pos > stream.buffer.len) return error.InvalidExpression;
stream.pos = new_pos; stream.seek = new_pos;
} }
}, },
OP.call2, OP.call2,
@ -746,7 +739,7 @@ pub fn StackMachine(comptime options: Options) type {
if (isOpcodeRegisterLocation(block[0])) { if (isOpcodeRegisterLocation(block[0])) {
if (context.thread_context == null) return error.IncompleteExpressionContext; if (context.thread_context == null) return error.IncompleteExpressionContext;
var block_stream = std.io.fixedBufferStream(block); var block_stream: std.Io.Reader = .fixed(block);
const register = (try readOperand(&block_stream, block[0], context)).?.register; const register = (try readOperand(&block_stream, block[0], context)).?.register;
const value = mem.readInt(usize, (try abi.regBytes(context.thread_context.?, register, context.reg_context))[0..@sizeOf(usize)], native_endian); const value = mem.readInt(usize, (try abi.regBytes(context.thread_context.?, register, context.reg_context))[0..@sizeOf(usize)], native_endian);
try self.stack.append(allocator, .{ .generic = value }); try self.stack.append(allocator, .{ .generic = value });
@ -769,7 +762,7 @@ pub fn StackMachine(comptime options: Options) type {
}, },
} }
return stream.pos < stream.buffer.len; return stream.seek < stream.buffer.len;
} }
}; };
} }

View File

@ -2017,15 +2017,12 @@ pub const VirtualMachine = struct {
var prev_row: Row = self.current_row; var prev_row: Row = self.current_row;
var cie_stream = std.io.fixedBufferStream(cie.initial_instructions); var cie_stream: std.Io.Reader = .fixed(cie.initial_instructions);
var fde_stream = std.io.fixedBufferStream(fde.instructions); var fde_stream: std.Io.Reader = .fixed(fde.instructions);
var streams = [_]*std.io.FixedBufferStream([]const u8){ const streams = [_]*std.Io.Reader{ &cie_stream, &fde_stream };
&cie_stream,
&fde_stream,
};
for (&streams, 0..) |stream, i| { for (&streams, 0..) |stream, i| {
while (stream.pos < stream.buffer.len) { while (stream.seek < stream.buffer.len) {
const instruction = try std.debug.Dwarf.call_frame.Instruction.read(stream, addr_size_bytes, endian); const instruction = try std.debug.Dwarf.call_frame.Instruction.read(stream, addr_size_bytes, endian);
prev_row = try self.step(allocator, cie, i == 0, instruction); prev_row = try self.step(allocator, cie, i == 0, instruction);
if (pc < fde.pc_begin + self.current_row.offset) return prev_row; if (pc < fde.pc_begin + self.current_row.offset) return prev_row;

View File

@ -609,7 +609,7 @@ pub const ProgramHeaderBufferIterator = struct {
} }
}; };
fn takePhdr(reader: *std.io.Reader, elf_header: Header) !?Elf64_Phdr { fn takePhdr(reader: *std.Io.Reader, elf_header: Header) !?Elf64_Phdr {
if (elf_header.is_64) { if (elf_header.is_64) {
const phdr = try reader.takeStruct(Elf64_Phdr, elf_header.endian); const phdr = try reader.takeStruct(Elf64_Phdr, elf_header.endian);
return phdr; return phdr;

View File

@ -3,7 +3,6 @@
const builtin = @import("builtin"); const builtin = @import("builtin");
const std = @import("std.zig"); const std = @import("std.zig");
const io = std.io;
const math = std.math; const math = std.math;
const assert = std.debug.assert; const assert = std.debug.assert;
const mem = std.mem; const mem = std.mem;
@ -12,7 +11,7 @@ const lossyCast = math.lossyCast;
const expectFmt = std.testing.expectFmt; const expectFmt = std.testing.expectFmt;
const testing = std.testing; const testing = std.testing;
const Allocator = std.mem.Allocator; const Allocator = std.mem.Allocator;
const Writer = std.io.Writer; const Writer = std.Io.Writer;
pub const float = @import("fmt/float.zig"); pub const float = @import("fmt/float.zig");

View File

@ -7,7 +7,6 @@ const File = @This();
const std = @import("../std.zig"); const std = @import("../std.zig");
const Allocator = std.mem.Allocator; const Allocator = std.mem.Allocator;
const posix = std.posix; const posix = std.posix;
const io = std.io;
const math = std.math; const math = std.math;
const assert = std.debug.assert; const assert = std.debug.assert;
const linux = std.os.linux; const linux = std.os.linux;
@ -805,42 +804,6 @@ pub fn updateTimes(
try posix.futimens(self.handle, &times); try posix.futimens(self.handle, &times);
} }
/// Deprecated in favor of `Reader`.
pub fn readToEndAlloc(self: File, allocator: Allocator, max_bytes: usize) ![]u8 {
return self.readToEndAllocOptions(allocator, max_bytes, null, .of(u8), null);
}
/// Deprecated in favor of `Reader`.
pub fn readToEndAllocOptions(
self: File,
allocator: Allocator,
max_bytes: usize,
size_hint: ?usize,
comptime alignment: Alignment,
comptime optional_sentinel: ?u8,
) !(if (optional_sentinel) |s| [:s]align(alignment.toByteUnits()) u8 else []align(alignment.toByteUnits()) u8) {
// If no size hint is provided fall back to the size=0 code path
const size = size_hint orelse 0;
// The file size returned by stat is used as hint to set the buffer
// size. If the reported size is zero, as it happens on Linux for files
// in /proc, a small buffer is allocated instead.
const initial_cap = @min((if (size > 0) size else 1024), max_bytes) + @intFromBool(optional_sentinel != null);
var array_list = try std.array_list.AlignedManaged(u8, alignment).initCapacity(allocator, initial_cap);
defer array_list.deinit();
self.deprecatedReader().readAllArrayListAligned(alignment, &array_list, max_bytes) catch |err| switch (err) {
error.StreamTooLong => return error.FileTooBig,
else => |e| return e,
};
if (optional_sentinel) |sentinel| {
return try array_list.toOwnedSliceSentinel(sentinel);
} else {
return try array_list.toOwnedSlice();
}
}
pub const ReadError = posix.ReadError; pub const ReadError = posix.ReadError;
pub const PReadError = posix.PReadError; pub const PReadError = posix.PReadError;
@ -1089,14 +1052,6 @@ pub fn copyRangeAll(in: File, in_offset: u64, out: File, out_offset: u64, len: u
return total_bytes_copied; return total_bytes_copied;
} }
/// Deprecated in favor of `Reader`.
pub const DeprecatedReader = io.GenericReader(File, ReadError, read);
/// Deprecated in favor of `Reader`.
pub fn deprecatedReader(file: File) DeprecatedReader {
return .{ .context = file };
}
/// Memoizes key information about a file handle such as: /// Memoizes key information about a file handle such as:
/// * The size from calling stat, or the error that occurred therein. /// * The size from calling stat, or the error that occurred therein.
/// * The current seek position. /// * The current seek position.

View File

@ -150,7 +150,7 @@ pub fn fmtJoin(paths: []const []const u8) std.fmt.Formatter([]const []const u8,
return .{ .data = paths }; return .{ .data = paths };
} }
fn formatJoin(paths: []const []const u8, w: *std.io.Writer) std.io.Writer.Error!void { fn formatJoin(paths: []const []const u8, w: *std.Io.Writer) std.Io.Writer.Error!void {
const first_path_idx = for (paths, 0..) |p, idx| { const first_path_idx = for (paths, 0..) |p, idx| {
if (p.len != 0) break idx; if (p.len != 0) break idx;
} else return; } else return;

View File

@ -1,7 +1,7 @@
//! JSON parsing and stringification conforming to RFC 8259. https://datatracker.ietf.org/doc/html/rfc8259 //! JSON parsing and stringification conforming to RFC 8259. https://datatracker.ietf.org/doc/html/rfc8259
//! //!
//! The low-level `Scanner` API produces `Token`s from an input slice or successive slices of inputs, //! The low-level `Scanner` API produces `Token`s from an input slice or successive slices of inputs,
//! The `Reader` API connects a `std.io.GenericReader` to a `Scanner`. //! The `Reader` API connects a `std.Io.GenericReader` to a `Scanner`.
//! //!
//! The high-level `parseFromSlice` and `parseFromTokenSource` deserialize a JSON document into a Zig type. //! The high-level `parseFromSlice` and `parseFromTokenSource` deserialize a JSON document into a Zig type.
//! Parse into a dynamically-typed `Value` to load any JSON value for runtime inspection. //! Parse into a dynamically-typed `Value` to load any JSON value for runtime inspection.
@ -42,7 +42,7 @@ test Value {
} }
test Stringify { test Stringify {
var out: std.io.Writer.Allocating = .init(testing.allocator); var out: std.Io.Writer.Allocating = .init(testing.allocator);
var write_stream: Stringify = .{ var write_stream: Stringify = .{
.writer = &out.writer, .writer = &out.writer,
.options = .{ .whitespace = .indent_2 }, .options = .{ .whitespace = .indent_2 },

View File

@ -23,7 +23,7 @@ const Allocator = std.mem.Allocator;
const ArrayList = std.ArrayList; const ArrayList = std.ArrayList;
const BitStack = std.BitStack; const BitStack = std.BitStack;
const Stringify = @This(); const Stringify = @This();
const Writer = std.io.Writer; const Writer = std.Io.Writer;
const IndentationMode = enum(u1) { const IndentationMode = enum(u1) {
object = 0, object = 0,
@ -576,7 +576,7 @@ pub fn value(v: anytype, options: Options, writer: *Writer) Error!void {
} }
test value { test value {
var out: std.io.Writer.Allocating = .init(std.testing.allocator); var out: Writer.Allocating = .init(std.testing.allocator);
const writer = &out.writer; const writer = &out.writer;
defer out.deinit(); defer out.deinit();
@ -616,7 +616,7 @@ test value {
/// ///
/// Caller owns returned memory. /// Caller owns returned memory.
pub fn valueAlloc(gpa: Allocator, v: anytype, options: Options) error{OutOfMemory}![]u8 { pub fn valueAlloc(gpa: Allocator, v: anytype, options: Options) error{OutOfMemory}![]u8 {
var aw: std.io.Writer.Allocating = .init(gpa); var aw: Writer.Allocating = .init(gpa);
defer aw.deinit(); defer aw.deinit();
value(v, options, &aw.writer) catch return error.OutOfMemory; value(v, options, &aw.writer) catch return error.OutOfMemory;
return aw.toOwnedSlice(); return aw.toOwnedSlice();

View File

@ -4,7 +4,7 @@ const mem = std.mem;
const testing = std.testing; const testing = std.testing;
const ArenaAllocator = std.heap.ArenaAllocator; const ArenaAllocator = std.heap.ArenaAllocator;
const Allocator = std.mem.Allocator; const Allocator = std.mem.Allocator;
const Writer = std.io.Writer; const Writer = std.Io.Writer;
const ObjectMap = @import("dynamic.zig").ObjectMap; const ObjectMap = @import("dynamic.zig").ObjectMap;
const Array = @import("dynamic.zig").Array; const Array = @import("dynamic.zig").Array;

View File

@ -2,120 +2,6 @@ const builtin = @import("builtin");
const std = @import("std"); const std = @import("std");
const testing = std.testing; const testing = std.testing;
/// Read a single unsigned LEB128 value from the given reader as type T,
/// or error.Overflow if the value cannot fit.
pub fn readUleb128(comptime T: type, reader: anytype) !T {
const U = if (@typeInfo(T).int.bits < 8) u8 else T;
const ShiftT = std.math.Log2Int(U);
const max_group = (@typeInfo(U).int.bits + 6) / 7;
var value: U = 0;
var group: ShiftT = 0;
while (group < max_group) : (group += 1) {
const byte = try reader.readByte();
const ov = @shlWithOverflow(@as(U, byte & 0x7f), group * 7);
if (ov[1] != 0) return error.Overflow;
value |= ov[0];
if (byte & 0x80 == 0) break;
} else {
return error.Overflow;
}
// only applies in the case that we extended to u8
if (U != T) {
if (value > std.math.maxInt(T)) return error.Overflow;
}
return @as(T, @truncate(value));
}
/// Read a single signed LEB128 value from the given reader as type T,
/// or error.Overflow if the value cannot fit.
pub fn readIleb128(comptime T: type, reader: anytype) !T {
const S = if (@typeInfo(T).int.bits < 8) i8 else T;
const U = std.meta.Int(.unsigned, @typeInfo(S).int.bits);
const ShiftU = std.math.Log2Int(U);
const max_group = (@typeInfo(U).int.bits + 6) / 7;
var value = @as(U, 0);
var group = @as(ShiftU, 0);
while (group < max_group) : (group += 1) {
const byte = try reader.readByte();
const shift = group * 7;
const ov = @shlWithOverflow(@as(U, byte & 0x7f), shift);
if (ov[1] != 0) {
// Overflow is ok so long as the sign bit is set and this is the last byte
if (byte & 0x80 != 0) return error.Overflow;
if (@as(S, @bitCast(ov[0])) >= 0) return error.Overflow;
// and all the overflowed bits are 1
const remaining_shift = @as(u3, @intCast(@typeInfo(U).int.bits - @as(u16, shift)));
const remaining_bits = @as(i8, @bitCast(byte | 0x80)) >> remaining_shift;
if (remaining_bits != -1) return error.Overflow;
} else {
// If we don't overflow and this is the last byte and the number being decoded
// is negative, check that the remaining bits are 1
if ((byte & 0x80 == 0) and (@as(S, @bitCast(ov[0])) < 0)) {
const remaining_shift = @as(u3, @intCast(@typeInfo(U).int.bits - @as(u16, shift)));
const remaining_bits = @as(i8, @bitCast(byte | 0x80)) >> remaining_shift;
if (remaining_bits != -1) return error.Overflow;
}
}
value |= ov[0];
if (byte & 0x80 == 0) {
const needs_sign_ext = group + 1 < max_group;
if (byte & 0x40 != 0 and needs_sign_ext) {
const ones = @as(S, -1);
value |= @as(U, @bitCast(ones)) << (shift + 7);
}
break;
}
} else {
return error.Overflow;
}
const result = @as(S, @bitCast(value));
// Only applies if we extended to i8
if (S != T) {
if (result > std.math.maxInt(T) or result < std.math.minInt(T)) return error.Overflow;
}
return @as(T, @truncate(result));
}
/// Write a single signed integer as signed LEB128 to the given writer.
pub fn writeIleb128(writer: anytype, arg: anytype) !void {
const Arg = @TypeOf(arg);
const Int = switch (Arg) {
comptime_int => std.math.IntFittingRange(-@abs(arg), @abs(arg)),
else => Arg,
};
const Signed = if (@typeInfo(Int).int.bits < 8) i8 else Int;
const Unsigned = std.meta.Int(.unsigned, @typeInfo(Signed).int.bits);
var value: Signed = arg;
while (true) {
const unsigned: Unsigned = @bitCast(value);
const byte: u8 = @truncate(unsigned);
value >>= 6;
if (value == -1 or value == 0) {
try writer.writeByte(byte & 0x7F);
break;
} else {
value >>= 1;
try writer.writeByte(byte | 0x80);
}
}
}
/// This is an "advanced" function. It allows one to use a fixed amount of memory to store a /// This is an "advanced" function. It allows one to use a fixed amount of memory to store a
/// ULEB128. This defeats the entire purpose of using this data encoding; it will no longer use /// ULEB128. This defeats the entire purpose of using this data encoding; it will no longer use
/// fewer bytes to store smaller numbers. The advantage of using a fixed width is that it makes /// fewer bytes to store smaller numbers. The advantage of using a fixed width is that it makes
@ -149,22 +35,26 @@ test writeUnsignedFixed {
{ {
var buf: [4]u8 = undefined; var buf: [4]u8 = undefined;
writeUnsignedFixed(4, &buf, 0); writeUnsignedFixed(4, &buf, 0);
try testing.expect((try test_read_uleb128(u64, &buf)) == 0); var reader: std.Io.Reader = .fixed(&buf);
try testing.expectEqual(0, try reader.takeLeb128(u64));
} }
{ {
var buf: [4]u8 = undefined; var buf: [4]u8 = undefined;
writeUnsignedFixed(4, &buf, 1); writeUnsignedFixed(4, &buf, 1);
try testing.expect((try test_read_uleb128(u64, &buf)) == 1); var reader: std.Io.Reader = .fixed(&buf);
try testing.expectEqual(1, try reader.takeLeb128(u64));
} }
{ {
var buf: [4]u8 = undefined; var buf: [4]u8 = undefined;
writeUnsignedFixed(4, &buf, 1000); writeUnsignedFixed(4, &buf, 1000);
try testing.expect((try test_read_uleb128(u64, &buf)) == 1000); var reader: std.Io.Reader = .fixed(&buf);
try testing.expectEqual(1000, try reader.takeLeb128(u64));
} }
{ {
var buf: [4]u8 = undefined; var buf: [4]u8 = undefined;
writeUnsignedFixed(4, &buf, 10000000); writeUnsignedFixed(4, &buf, 10000000);
try testing.expect((try test_read_uleb128(u64, &buf)) == 10000000); var reader: std.Io.Reader = .fixed(&buf);
try testing.expectEqual(10000000, try reader.takeLeb128(u64));
} }
} }
@ -193,162 +83,43 @@ test writeSignedFixed {
{ {
var buf: [4]u8 = undefined; var buf: [4]u8 = undefined;
writeSignedFixed(4, &buf, 0); writeSignedFixed(4, &buf, 0);
try testing.expect((try test_read_ileb128(i64, &buf)) == 0); var reader: std.Io.Reader = .fixed(&buf);
try testing.expectEqual(0, try reader.takeLeb128(i64));
} }
{ {
var buf: [4]u8 = undefined; var buf: [4]u8 = undefined;
writeSignedFixed(4, &buf, 1); writeSignedFixed(4, &buf, 1);
try testing.expect((try test_read_ileb128(i64, &buf)) == 1); var reader: std.Io.Reader = .fixed(&buf);
try testing.expectEqual(1, try reader.takeLeb128(i64));
} }
{ {
var buf: [4]u8 = undefined; var buf: [4]u8 = undefined;
writeSignedFixed(4, &buf, -1); writeSignedFixed(4, &buf, -1);
try testing.expect((try test_read_ileb128(i64, &buf)) == -1); var reader: std.Io.Reader = .fixed(&buf);
try testing.expectEqual(-1, try reader.takeLeb128(i64));
} }
{ {
var buf: [4]u8 = undefined; var buf: [4]u8 = undefined;
writeSignedFixed(4, &buf, 1000); writeSignedFixed(4, &buf, 1000);
try testing.expect((try test_read_ileb128(i64, &buf)) == 1000); var reader: std.Io.Reader = .fixed(&buf);
try testing.expectEqual(1000, try reader.takeLeb128(i64));
} }
{ {
var buf: [4]u8 = undefined; var buf: [4]u8 = undefined;
writeSignedFixed(4, &buf, -1000); writeSignedFixed(4, &buf, -1000);
try testing.expect((try test_read_ileb128(i64, &buf)) == -1000); var reader: std.Io.Reader = .fixed(&buf);
try testing.expectEqual(-1000, try reader.takeLeb128(i64));
} }
{ {
var buf: [4]u8 = undefined; var buf: [4]u8 = undefined;
writeSignedFixed(4, &buf, -10000000); writeSignedFixed(4, &buf, -10000000);
try testing.expect((try test_read_ileb128(i64, &buf)) == -10000000); var reader: std.Io.Reader = .fixed(&buf);
try testing.expectEqual(-10000000, try reader.takeLeb128(i64));
} }
{ {
var buf: [4]u8 = undefined; var buf: [4]u8 = undefined;
writeSignedFixed(4, &buf, 10000000); writeSignedFixed(4, &buf, 10000000);
try testing.expect((try test_read_ileb128(i64, &buf)) == 10000000); var reader: std.Io.Reader = .fixed(&buf);
try testing.expectEqual(10000000, try reader.takeLeb128(i64));
} }
} }
// tests
fn test_read_stream_ileb128(comptime T: type, encoded: []const u8) !T {
var reader = std.io.fixedBufferStream(encoded);
return try readIleb128(T, reader.reader());
}
fn test_read_stream_uleb128(comptime T: type, encoded: []const u8) !T {
var reader = std.io.fixedBufferStream(encoded);
return try readUleb128(T, reader.reader());
}
fn test_read_ileb128(comptime T: type, encoded: []const u8) !T {
var reader = std.io.fixedBufferStream(encoded);
const v1 = try readIleb128(T, reader.reader());
return v1;
}
fn test_read_uleb128(comptime T: type, encoded: []const u8) !T {
var reader = std.io.fixedBufferStream(encoded);
const v1 = try readUleb128(T, reader.reader());
return v1;
}
fn test_read_ileb128_seq(comptime T: type, comptime N: usize, encoded: []const u8) !void {
var reader = std.io.fixedBufferStream(encoded);
var i: usize = 0;
while (i < N) : (i += 1) {
_ = try readIleb128(T, reader.reader());
}
}
fn test_read_uleb128_seq(comptime T: type, comptime N: usize, encoded: []const u8) !void {
var reader = std.io.fixedBufferStream(encoded);
var i: usize = 0;
while (i < N) : (i += 1) {
_ = try readUleb128(T, reader.reader());
}
}
test "deserialize signed LEB128" {
// Truncated
try testing.expectError(error.EndOfStream, test_read_stream_ileb128(i64, "\x80"));
// Overflow
try testing.expectError(error.Overflow, test_read_ileb128(i8, "\x80\x80\x40"));
try testing.expectError(error.Overflow, test_read_ileb128(i16, "\x80\x80\x80\x40"));
try testing.expectError(error.Overflow, test_read_ileb128(i32, "\x80\x80\x80\x80\x40"));
try testing.expectError(error.Overflow, test_read_ileb128(i64, "\x80\x80\x80\x80\x80\x80\x80\x80\x80\x40"));
try testing.expectError(error.Overflow, test_read_ileb128(i8, "\xff\x7e"));
try testing.expectError(error.Overflow, test_read_ileb128(i32, "\x80\x80\x80\x80\x08"));
try testing.expectError(error.Overflow, test_read_ileb128(i64, "\x80\x80\x80\x80\x80\x80\x80\x80\x80\x01"));
// Decode SLEB128
try testing.expect((try test_read_ileb128(i64, "\x00")) == 0);
try testing.expect((try test_read_ileb128(i64, "\x01")) == 1);
try testing.expect((try test_read_ileb128(i64, "\x3f")) == 63);
try testing.expect((try test_read_ileb128(i64, "\x40")) == -64);
try testing.expect((try test_read_ileb128(i64, "\x41")) == -63);
try testing.expect((try test_read_ileb128(i64, "\x7f")) == -1);
try testing.expect((try test_read_ileb128(i64, "\x80\x01")) == 128);
try testing.expect((try test_read_ileb128(i64, "\x81\x01")) == 129);
try testing.expect((try test_read_ileb128(i64, "\xff\x7e")) == -129);
try testing.expect((try test_read_ileb128(i64, "\x80\x7f")) == -128);
try testing.expect((try test_read_ileb128(i64, "\x81\x7f")) == -127);
try testing.expect((try test_read_ileb128(i64, "\xc0\x00")) == 64);
try testing.expect((try test_read_ileb128(i64, "\xc7\x9f\x7f")) == -12345);
try testing.expect((try test_read_ileb128(i8, "\xff\x7f")) == -1);
try testing.expect((try test_read_ileb128(i16, "\xff\xff\x7f")) == -1);
try testing.expect((try test_read_ileb128(i32, "\xff\xff\xff\xff\x7f")) == -1);
try testing.expect((try test_read_ileb128(i32, "\x80\x80\x80\x80\x78")) == -0x80000000);
try testing.expect((try test_read_ileb128(i64, "\x80\x80\x80\x80\x80\x80\x80\x80\x80\x7f")) == @as(i64, @bitCast(@as(u64, @intCast(0x8000000000000000)))));
try testing.expect((try test_read_ileb128(i64, "\x80\x80\x80\x80\x80\x80\x80\x80\x40")) == -0x4000000000000000);
try testing.expect((try test_read_ileb128(i64, "\x80\x80\x80\x80\x80\x80\x80\x80\x80\x7f")) == -0x8000000000000000);
// Decode unnormalized SLEB128 with extra padding bytes.
try testing.expect((try test_read_ileb128(i64, "\x80\x00")) == 0);
try testing.expect((try test_read_ileb128(i64, "\x80\x80\x00")) == 0);
try testing.expect((try test_read_ileb128(i64, "\xff\x00")) == 0x7f);
try testing.expect((try test_read_ileb128(i64, "\xff\x80\x00")) == 0x7f);
try testing.expect((try test_read_ileb128(i64, "\x80\x81\x00")) == 0x80);
try testing.expect((try test_read_ileb128(i64, "\x80\x81\x80\x00")) == 0x80);
// Decode sequence of SLEB128 values
try test_read_ileb128_seq(i64, 4, "\x81\x01\x3f\x80\x7f\x80\x80\x80\x00");
}
test "deserialize unsigned LEB128" {
// Truncated
try testing.expectError(error.EndOfStream, test_read_stream_uleb128(u64, "\x80"));
// Overflow
try testing.expectError(error.Overflow, test_read_uleb128(u8, "\x80\x02"));
try testing.expectError(error.Overflow, test_read_uleb128(u8, "\x80\x80\x40"));
try testing.expectError(error.Overflow, test_read_uleb128(u16, "\x80\x80\x84"));
try testing.expectError(error.Overflow, test_read_uleb128(u16, "\x80\x80\x80\x40"));
try testing.expectError(error.Overflow, test_read_uleb128(u32, "\x80\x80\x80\x80\x90"));
try testing.expectError(error.Overflow, test_read_uleb128(u32, "\x80\x80\x80\x80\x40"));
try testing.expectError(error.Overflow, test_read_uleb128(u64, "\x80\x80\x80\x80\x80\x80\x80\x80\x80\x40"));
// Decode ULEB128
try testing.expect((try test_read_uleb128(u64, "\x00")) == 0);
try testing.expect((try test_read_uleb128(u64, "\x01")) == 1);
try testing.expect((try test_read_uleb128(u64, "\x3f")) == 63);
try testing.expect((try test_read_uleb128(u64, "\x40")) == 64);
try testing.expect((try test_read_uleb128(u64, "\x7f")) == 0x7f);
try testing.expect((try test_read_uleb128(u64, "\x80\x01")) == 0x80);
try testing.expect((try test_read_uleb128(u64, "\x81\x01")) == 0x81);
try testing.expect((try test_read_uleb128(u64, "\x90\x01")) == 0x90);
try testing.expect((try test_read_uleb128(u64, "\xff\x01")) == 0xff);
try testing.expect((try test_read_uleb128(u64, "\x80\x02")) == 0x100);
try testing.expect((try test_read_uleb128(u64, "\x81\x02")) == 0x101);
try testing.expect((try test_read_uleb128(u64, "\x80\xc1\x80\x80\x10")) == 4294975616);
try testing.expect((try test_read_uleb128(u64, "\x80\x80\x80\x80\x80\x80\x80\x80\x80\x01")) == 0x8000000000000000);
// Decode ULEB128 with extra padding bytes
try testing.expect((try test_read_uleb128(u64, "\x80\x00")) == 0);
try testing.expect((try test_read_uleb128(u64, "\x80\x80\x00")) == 0);
try testing.expect((try test_read_uleb128(u64, "\xff\x00")) == 0x7f);
try testing.expect((try test_read_uleb128(u64, "\xff\x80\x00")) == 0x7f);
try testing.expect((try test_read_uleb128(u64, "\x80\x81\x00")) == 0x80);
try testing.expect((try test_read_uleb128(u64, "\x80\x81\x80\x00")) == 0x80);
// Decode sequence of ULEB128 values
try test_read_uleb128_seq(u64, 4, "\x81\x01\x3f\x80\x7f\x80\x80\x80\x00");
}

View File

@ -1,7 +1,6 @@
const std = @import("std"); const std = @import("std");
const builtin = @import("builtin"); const builtin = @import("builtin");
const assert = std.debug.assert; const assert = std.debug.assert;
const io = std.io;
const mem = std.mem; const mem = std.mem;
const meta = std.meta; const meta = std.meta;
const testing = std.testing; const testing = std.testing;

View File

@ -2029,11 +2029,11 @@ pub const Mutable = struct {
r.len = llnormalize(r.limbs[0..length]); r.len = llnormalize(r.limbs[0..length]);
} }
pub fn format(self: Mutable, w: *std.io.Writer) std.io.Writer.Error!void { pub fn format(self: Mutable, w: *std.Io.Writer) std.Io.Writer.Error!void {
return formatNumber(self, w, .{}); return formatNumber(self, w, .{});
} }
pub fn formatNumber(self: Const, w: *std.io.Writer, n: std.fmt.Number) std.io.Writer.Error!void { pub fn formatNumber(self: Const, w: *std.Io.Writer, n: std.fmt.Number) std.Io.Writer.Error!void {
return self.toConst().formatNumber(w, n); return self.toConst().formatNumber(w, n);
} }
}; };
@ -2326,7 +2326,7 @@ pub const Const = struct {
/// this function will fail to print the string, printing "(BigInt)" instead of a number. /// this function will fail to print the string, printing "(BigInt)" instead of a number.
/// This is because the rendering algorithm requires reversing a string, which requires O(N) memory. /// This is because the rendering algorithm requires reversing a string, which requires O(N) memory.
/// See `toString` and `toStringAlloc` for a way to print big integers without failure. /// See `toString` and `toStringAlloc` for a way to print big integers without failure.
pub fn formatNumber(self: Const, w: *std.io.Writer, number: std.fmt.Number) std.io.Writer.Error!void { pub fn formatNumber(self: Const, w: *std.Io.Writer, number: std.fmt.Number) std.Io.Writer.Error!void {
const available_len = 64; const available_len = 64;
if (self.limbs.len > available_len) if (self.limbs.len > available_len)
return w.writeAll("(BigInt)"); return w.writeAll("(BigInt)");
@ -2907,7 +2907,7 @@ pub const Managed = struct {
} }
/// To allow `std.fmt.format` to work with `Managed`. /// To allow `std.fmt.format` to work with `Managed`.
pub fn format(self: Managed, w: *std.io.Writer) std.io.Writer.Error!void { pub fn format(self: Managed, w: *std.Io.Writer) std.Io.Writer.Error!void {
return formatNumber(self, w, .{}); return formatNumber(self, w, .{});
} }
@ -2915,7 +2915,7 @@ pub const Managed = struct {
/// this function will fail to print the string, printing "(BigInt)" instead of a number. /// this function will fail to print the string, printing "(BigInt)" instead of a number.
/// This is because the rendering algorithm requires reversing a string, which requires O(N) memory. /// This is because the rendering algorithm requires reversing a string, which requires O(N) memory.
/// See `toString` and `toStringAlloc` for a way to print big integers without failure. /// See `toString` and `toStringAlloc` for a way to print big integers without failure.
pub fn formatNumber(self: Managed, w: *std.io.Writer, n: std.fmt.Number) std.io.Writer.Error!void { pub fn formatNumber(self: Managed, w: *std.Io.Writer, n: std.fmt.Number) std.Io.Writer.Error!void {
return self.toConst().formatNumber(w, n); return self.toConst().formatNumber(w, n);
} }

View File

@ -106,7 +106,7 @@ pub const Guid = extern struct {
node: [6]u8, node: [6]u8,
/// Format GUID into hexadecimal lowercase xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx format /// Format GUID into hexadecimal lowercase xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx format
pub fn format(self: Guid, writer: *std.io.Writer) std.io.Writer.Error!void { pub fn format(self: Guid, writer: *std.Io.Writer) std.Io.Writer.Error!void {
const time_low = @byteSwap(self.time_low); const time_low = @byteSwap(self.time_low);
const time_mid = @byteSwap(self.time_mid); const time_mid = @byteSwap(self.time_mid);
const time_high_and_version = @byteSwap(self.time_high_and_version); const time_high_and_version = @byteSwap(self.time_high_and_version);

View File

@ -1,6 +1,5 @@
const std = @import("std"); const std = @import("std");
const uefi = std.os.uefi; const uefi = std.os.uefi;
const io = std.io;
const Guid = uefi.Guid; const Guid = uefi.Guid;
const Time = uefi.Time; const Time = uefi.Time;
const Status = uefi.Status; const Status = uefi.Status;

View File

@ -90,7 +90,7 @@ pub const MemoryType = enum(u32) {
return @truncate(as_int - vendor_start); return @truncate(as_int - vendor_start);
} }
pub fn format(self: MemoryType, w: *std.io.Writer) std.io.Writer.Error!void { pub fn format(self: MemoryType, w: *std.Io.Writer) std.Io.Writer.Error!void {
if (self.toOem()) |oemval| if (self.toOem()) |oemval|
try w.print("OEM({X})", .{oemval}) try w.print("OEM({X})", .{oemval})
else if (self.toVendor()) |vendorval| else if (self.toVendor()) |vendorval|

View File

@ -8,7 +8,6 @@
//! documentation and/or contributors. //! documentation and/or contributors.
const std = @import("std.zig"); const std = @import("std.zig");
const io = std.io;
const math = std.math; const math = std.math;
const mem = std.mem; const mem = std.mem;
const coff = std.coff; const coff = std.coff;

View File

@ -671,8 +671,8 @@ fn getRandomBytesDevURandom(buf: []u8) !void {
} }
const file: fs.File = .{ .handle = fd }; const file: fs.File = .{ .handle = fd };
const stream = file.deprecatedReader(); var file_reader = file.readerStreaming(&.{});
stream.readNoEof(buf) catch return error.Unexpected; file_reader.readSliceAll(buf) catch return error.Unexpected;
} }
/// Causes abnormal process termination. /// Causes abnormal process termination.

View File

@ -4,7 +4,6 @@ const testing = std.testing;
const expect = testing.expect; const expect = testing.expect;
const expectEqual = testing.expectEqual; const expectEqual = testing.expectEqual;
const expectError = testing.expectError; const expectError = testing.expectError;
const io = std.io;
const fs = std.fs; const fs = std.fs;
const mem = std.mem; const mem = std.mem;
const elf = std.elf; const elf = std.elf;
@ -706,12 +705,11 @@ test "mmap" {
); );
defer posix.munmap(data); defer posix.munmap(data);
var mem_stream = io.fixedBufferStream(data); var stream: std.Io.Reader = .fixed(data);
const stream = mem_stream.reader();
var i: u32 = 0; var i: u32 = 0;
while (i < alloc_size / @sizeOf(u32)) : (i += 1) { while (i < alloc_size / @sizeOf(u32)) : (i += 1) {
try testing.expectEqual(i, try stream.readInt(u32, .little)); try testing.expectEqual(i, try stream.takeInt(u32, .little));
} }
} }
@ -730,12 +728,11 @@ test "mmap" {
); );
defer posix.munmap(data); defer posix.munmap(data);
var mem_stream = io.fixedBufferStream(data); var stream: std.Io.Reader = .fixed(data);
const stream = mem_stream.reader();
var i: u32 = alloc_size / 2 / @sizeOf(u32); var i: u32 = alloc_size / 2 / @sizeOf(u32);
while (i < alloc_size / @sizeOf(u32)) : (i += 1) { while (i < alloc_size / @sizeOf(u32)) : (i += 1) {
try testing.expectEqual(i, try stream.readInt(u32, .little)); try testing.expectEqual(i, try stream.takeInt(u32, .little));
} }
} }
} }

View File

@ -1552,103 +1552,108 @@ pub fn getUserInfo(name: []const u8) !UserInfo {
pub fn posixGetUserInfo(name: []const u8) !UserInfo { pub fn posixGetUserInfo(name: []const u8) !UserInfo {
const file = try std.fs.openFileAbsolute("/etc/passwd", .{}); const file = try std.fs.openFileAbsolute("/etc/passwd", .{});
defer file.close(); defer file.close();
var buffer: [4096]u8 = undefined;
var file_reader = file.reader(&buffer);
return posixGetUserInfoPasswdStream(name, &file_reader.interface) catch |err| switch (err) {
error.ReadFailed => return file_reader.err.?,
error.EndOfStream => return error.UserNotFound,
error.CorruptPasswordFile => return error.CorruptPasswordFile,
};
}
const reader = file.deprecatedReader(); fn posixGetUserInfoPasswdStream(name: []const u8, reader: *std.Io.Reader) !UserInfo {
const State = enum { const State = enum {
Start, start,
WaitForNextLine, wait_for_next_line,
SkipPassword, skip_password,
ReadUserId, read_user_id,
ReadGroupId, read_group_id,
}; };
var buf: [std.heap.page_size_min]u8 = undefined;
var name_index: usize = 0; var name_index: usize = 0;
var state = State.Start;
var uid: posix.uid_t = 0; var uid: posix.uid_t = 0;
var gid: posix.gid_t = 0; var gid: posix.gid_t = 0;
while (true) { sw: switch (State.start) {
const amt_read = try reader.read(buf[0..]); .start => switch (try reader.takeByte()) {
for (buf[0..amt_read]) |byte| { ':' => {
switch (state) { if (name_index == name.len) {
.Start => switch (byte) { continue :sw .skip_password;
':' => { } else {
state = if (name_index == name.len) State.SkipPassword else State.WaitForNextLine; continue :sw .wait_for_next_line;
}, }
'\n' => return error.CorruptPasswordFile, },
else => { '\n' => return error.CorruptPasswordFile,
if (name_index == name.len or name[name_index] != byte) { else => |byte| {
state = .WaitForNextLine; if (name_index == name.len or name[name_index] != byte) {
} continue :sw .wait_for_next_line;
name_index += 1; }
}, name_index += 1;
}, continue :sw .start;
.WaitForNextLine => switch (byte) { },
'\n' => { },
name_index = 0; .wait_for_next_line => switch (try reader.takeByte()) {
state = .Start; '\n' => {
}, name_index = 0;
else => continue, continue :sw .start;
}, },
.SkipPassword => switch (byte) { else => continue :sw .wait_for_next_line,
'\n' => return error.CorruptPasswordFile, },
':' => { .skip_password => switch (try reader.takeByte()) {
state = .ReadUserId; '\n' => return error.CorruptPasswordFile,
}, ':' => {
else => continue, continue :sw .read_user_id;
}, },
.ReadUserId => switch (byte) { else => continue :sw .skip_password,
':' => { },
state = .ReadGroupId; .read_user_id => switch (try reader.takeByte()) {
}, ':' => {
'\n' => return error.CorruptPasswordFile, continue :sw .read_group_id;
else => { },
const digit = switch (byte) { '\n' => return error.CorruptPasswordFile,
'0'...'9' => byte - '0', else => |byte| {
else => return error.CorruptPasswordFile, const digit = switch (byte) {
}; '0'...'9' => byte - '0',
{ else => return error.CorruptPasswordFile,
const ov = @mulWithOverflow(uid, 10); };
if (ov[1] != 0) return error.CorruptPasswordFile; {
uid = ov[0]; const ov = @mulWithOverflow(uid, 10);
} if (ov[1] != 0) return error.CorruptPasswordFile;
{ uid = ov[0];
const ov = @addWithOverflow(uid, digit); }
if (ov[1] != 0) return error.CorruptPasswordFile; {
uid = ov[0]; const ov = @addWithOverflow(uid, digit);
} if (ov[1] != 0) return error.CorruptPasswordFile;
}, uid = ov[0];
}, }
.ReadGroupId => switch (byte) { continue :sw .read_user_id;
'\n', ':' => { },
return UserInfo{ },
.uid = uid, .read_group_id => switch (try reader.takeByte()) {
.gid = gid, '\n', ':' => return .{
}; .uid = uid,
}, .gid = gid,
else => { },
const digit = switch (byte) { else => |byte| {
'0'...'9' => byte - '0', const digit = switch (byte) {
else => return error.CorruptPasswordFile, '0'...'9' => byte - '0',
}; else => return error.CorruptPasswordFile,
{ };
const ov = @mulWithOverflow(gid, 10); {
if (ov[1] != 0) return error.CorruptPasswordFile; const ov = @mulWithOverflow(gid, 10);
gid = ov[0]; if (ov[1] != 0) return error.CorruptPasswordFile;
} gid = ov[0];
{ }
const ov = @addWithOverflow(gid, digit); {
if (ov[1] != 0) return error.CorruptPasswordFile; const ov = @addWithOverflow(gid, digit);
gid = ov[0]; if (ov[1] != 0) return error.CorruptPasswordFile;
} gid = ov[0];
}, }
}, continue :sw .read_group_id;
} },
} },
if (amt_read < buf.len) return error.UserNotFound;
} }
comptime unreachable;
} }
pub fn getBaseAddress() usize { pub fn getBaseAddress() usize {

View File

@ -78,8 +78,6 @@ pub const hash = @import("hash.zig");
pub const hash_map = @import("hash_map.zig"); pub const hash_map = @import("hash_map.zig");
pub const heap = @import("heap.zig"); pub const heap = @import("heap.zig");
pub const http = @import("http.zig"); pub const http = @import("http.zig");
/// Deprecated
pub const io = Io;
pub const json = @import("json.zig"); pub const json = @import("json.zig");
pub const leb = @import("leb128.zig"); pub const leb = @import("leb128.zig");
pub const log = @import("log.zig"); pub const log = @import("log.zig");

View File

@ -336,7 +336,7 @@ fn testCase(case: Case) !void {
var file_name_buffer: [std.fs.max_path_bytes]u8 = undefined; var file_name_buffer: [std.fs.max_path_bytes]u8 = undefined;
var link_name_buffer: [std.fs.max_path_bytes]u8 = undefined; var link_name_buffer: [std.fs.max_path_bytes]u8 = undefined;
var br: std.io.Reader = .fixed(case.data); var br: std.Io.Reader = .fixed(case.data);
var it: tar.Iterator = .init(&br, .{ var it: tar.Iterator = .init(&br, .{
.file_name_buffer = &file_name_buffer, .file_name_buffer = &file_name_buffer,
.link_name_buffer = &link_name_buffer, .link_name_buffer = &link_name_buffer,
@ -387,7 +387,7 @@ fn testLongNameCase(case: Case) !void {
var min_file_name_buffer: [256]u8 = undefined; var min_file_name_buffer: [256]u8 = undefined;
var min_link_name_buffer: [100]u8 = undefined; var min_link_name_buffer: [100]u8 = undefined;
var br: std.io.Reader = .fixed(case.data); var br: std.Io.Reader = .fixed(case.data);
var iter: tar.Iterator = .init(&br, .{ var iter: tar.Iterator = .init(&br, .{
.file_name_buffer = &min_file_name_buffer, .file_name_buffer = &min_file_name_buffer,
.link_name_buffer = &min_link_name_buffer, .link_name_buffer = &min_link_name_buffer,
@ -407,7 +407,7 @@ test "insufficient buffer in Header name filed" {
var min_file_name_buffer: [9]u8 = undefined; var min_file_name_buffer: [9]u8 = undefined;
var min_link_name_buffer: [100]u8 = undefined; var min_link_name_buffer: [100]u8 = undefined;
var br: std.io.Reader = .fixed(gnu_case.data); var br: std.Io.Reader = .fixed(gnu_case.data);
var iter: tar.Iterator = .init(&br, .{ var iter: tar.Iterator = .init(&br, .{
.file_name_buffer = &min_file_name_buffer, .file_name_buffer = &min_file_name_buffer,
.link_name_buffer = &min_link_name_buffer, .link_name_buffer = &min_link_name_buffer,
@ -462,7 +462,7 @@ test "should not overwrite existing file" {
// This ensures that file is not overwritten. // This ensures that file is not overwritten.
// //
const data = @embedFile("testdata/overwrite_file.tar"); const data = @embedFile("testdata/overwrite_file.tar");
var r: std.io.Reader = .fixed(data); var r: std.Io.Reader = .fixed(data);
// Unpack with strip_components = 1 should fail // Unpack with strip_components = 1 should fail
var root = std.testing.tmpDir(.{}); var root = std.testing.tmpDir(.{});
@ -490,7 +490,7 @@ test "case sensitivity" {
// 18089/alacritty/Darkermatrix.yml // 18089/alacritty/Darkermatrix.yml
// //
const data = @embedFile("testdata/18089.tar"); const data = @embedFile("testdata/18089.tar");
var r: std.io.Reader = .fixed(data); var r: std.Io.Reader = .fixed(data);
var root = std.testing.tmpDir(.{}); var root = std.testing.tmpDir(.{});
defer root.cleanup(); defer root.cleanup();

View File

@ -358,7 +358,7 @@ test expectApproxEqRel {
/// This function is intended to be used only in tests. When the two slices are not /// This function is intended to be used only in tests. When the two slices are not
/// equal, prints diagnostics to stderr to show exactly how they are not equal (with /// equal, prints diagnostics to stderr to show exactly how they are not equal (with
/// the differences highlighted in red), then returns a test failure error. /// the differences highlighted in red), then returns a test failure error.
/// The colorized output is optional and controlled by the return of `std.io.tty.detectConfig()`. /// The colorized output is optional and controlled by the return of `std.Io.tty.detectConfig()`.
/// If your inputs are UTF-8 encoded strings, consider calling `expectEqualStrings` instead. /// If your inputs are UTF-8 encoded strings, consider calling `expectEqualStrings` instead.
pub fn expectEqualSlices(comptime T: type, expected: []const T, actual: []const T) !void { pub fn expectEqualSlices(comptime T: type, expected: []const T, actual: []const T) !void {
const diff_index: usize = diff_index: { const diff_index: usize = diff_index: {
@ -381,7 +381,7 @@ fn failEqualSlices(
expected: []const T, expected: []const T,
actual: []const T, actual: []const T,
diff_index: usize, diff_index: usize,
w: *std.io.Writer, w: *std.Io.Writer,
) !void { ) !void {
try w.print("slices differ. first difference occurs at index {d} (0x{X})\n", .{ diff_index, diff_index }); try w.print("slices differ. first difference occurs at index {d} (0x{X})\n", .{ diff_index, diff_index });
@ -401,7 +401,7 @@ fn failEqualSlices(
const actual_window = actual[window_start..@min(actual.len, window_start + max_window_size)]; const actual_window = actual[window_start..@min(actual.len, window_start + max_window_size)];
const actual_truncated = window_start + actual_window.len < actual.len; const actual_truncated = window_start + actual_window.len < actual.len;
const ttyconf = std.io.tty.detectConfig(.stderr()); const ttyconf = std.Io.tty.detectConfig(.stderr());
var differ = if (T == u8) BytesDiffer{ var differ = if (T == u8) BytesDiffer{
.expected = expected_window, .expected = expected_window,
.actual = actual_window, .actual = actual_window,
@ -467,11 +467,11 @@ fn SliceDiffer(comptime T: type) type {
start_index: usize, start_index: usize,
expected: []const T, expected: []const T,
actual: []const T, actual: []const T,
ttyconf: std.io.tty.Config, ttyconf: std.Io.tty.Config,
const Self = @This(); const Self = @This();
pub fn write(self: Self, writer: *std.io.Writer) !void { pub fn write(self: Self, writer: *std.Io.Writer) !void {
for (self.expected, 0..) |value, i| { for (self.expected, 0..) |value, i| {
const full_index = self.start_index + i; const full_index = self.start_index + i;
const diff = if (i < self.actual.len) !std.meta.eql(self.actual[i], value) else true; const diff = if (i < self.actual.len) !std.meta.eql(self.actual[i], value) else true;
@ -490,9 +490,9 @@ fn SliceDiffer(comptime T: type) type {
const BytesDiffer = struct { const BytesDiffer = struct {
expected: []const u8, expected: []const u8,
actual: []const u8, actual: []const u8,
ttyconf: std.io.tty.Config, ttyconf: std.Io.tty.Config,
pub fn write(self: BytesDiffer, writer: *std.io.Writer) !void { pub fn write(self: BytesDiffer, writer: *std.Io.Writer) !void {
var expected_iterator = std.mem.window(u8, self.expected, 16, 16); var expected_iterator = std.mem.window(u8, self.expected, 16, 16);
var row: usize = 0; var row: usize = 0;
while (expected_iterator.next()) |chunk| { while (expected_iterator.next()) |chunk| {
@ -538,7 +538,7 @@ const BytesDiffer = struct {
} }
} }
fn writeDiff(self: BytesDiffer, writer: *std.io.Writer, comptime fmt: []const u8, args: anytype, diff: bool) !void { fn writeDiff(self: BytesDiffer, writer: *std.Io.Writer, comptime fmt: []const u8, args: anytype, diff: bool) !void {
if (diff) try self.ttyconf.setColor(writer, .red); if (diff) try self.ttyconf.setColor(writer, .red);
try writer.print(fmt, args); try writer.print(fmt, args);
if (diff) try self.ttyconf.setColor(writer, .reset); if (diff) try self.ttyconf.setColor(writer, .reset);

View File

@ -804,7 +804,7 @@ fn testDecode(bytes: []const u8) !u21 {
/// Ill-formed UTF-8 byte sequences are replaced by the replacement character (U+FFFD) /// Ill-formed UTF-8 byte sequences are replaced by the replacement character (U+FFFD)
/// according to "U+FFFD Substitution of Maximal Subparts" from Chapter 3 of /// according to "U+FFFD Substitution of Maximal Subparts" from Chapter 3 of
/// the Unicode standard, and as specified by https://encoding.spec.whatwg.org/#utf-8-decoder /// the Unicode standard, and as specified by https://encoding.spec.whatwg.org/#utf-8-decoder
fn formatUtf8(utf8: []const u8, writer: *std.io.Writer) std.io.Writer.Error!void { fn formatUtf8(utf8: []const u8, writer: *std.Io.Writer) std.Io.Writer.Error!void {
var buf: [300]u8 = undefined; // just an arbitrary size var buf: [300]u8 = undefined; // just an arbitrary size
var u8len: usize = 0; var u8len: usize = 0;
@ -1464,7 +1464,7 @@ test calcWtf16LeLen {
/// Print the given `utf16le` string, encoded as UTF-8 bytes. /// Print the given `utf16le` string, encoded as UTF-8 bytes.
/// Unpaired surrogates are replaced by the replacement character (U+FFFD). /// Unpaired surrogates are replaced by the replacement character (U+FFFD).
fn formatUtf16Le(utf16le: []const u16, writer: *std.io.Writer) std.io.Writer.Error!void { fn formatUtf16Le(utf16le: []const u16, writer: *std.Io.Writer) std.Io.Writer.Error!void {
var buf: [300]u8 = undefined; // just an arbitrary size var buf: [300]u8 = undefined; // just an arbitrary size
var it = Utf16LeIterator.init(utf16le); var it = Utf16LeIterator.init(utf16le);
var u8len: usize = 0; var u8len: usize = 0;

View File

@ -51,9 +51,9 @@ pub const Color = enum {
/// Assume stderr is a terminal. /// Assume stderr is a terminal.
on, on,
pub fn get_tty_conf(color: Color) std.io.tty.Config { pub fn get_tty_conf(color: Color) std.Io.tty.Config {
return switch (color) { return switch (color) {
.auto => std.io.tty.detectConfig(std.fs.File.stderr()), .auto => std.Io.tty.detectConfig(std.fs.File.stderr()),
.on => .escape_codes, .on => .escape_codes,
.off => .no_color, .off => .no_color,
}; };
@ -322,7 +322,7 @@ pub const BuildId = union(enum) {
try std.testing.expectError(error.InvalidBuildIdStyle, parse("yaddaxxx")); try std.testing.expectError(error.InvalidBuildIdStyle, parse("yaddaxxx"));
} }
pub fn format(id: BuildId, writer: *std.io.Writer) std.io.Writer.Error!void { pub fn format(id: BuildId, writer: *std.Io.Writer) std.Io.Writer.Error!void {
switch (id) { switch (id) {
.none, .fast, .uuid, .sha1, .md5 => { .none, .fast, .uuid, .sha1, .md5 => {
try writer.writeAll(@tagName(id)); try writer.writeAll(@tagName(id));

View File

@ -204,7 +204,7 @@ pub fn parse(gpa: Allocator, source: [:0]const u8, mode: Mode) Allocator.Error!A
/// `gpa` is used for allocating the resulting formatted source code. /// `gpa` is used for allocating the resulting formatted source code.
/// Caller owns the returned slice of bytes, allocated with `gpa`. /// Caller owns the returned slice of bytes, allocated with `gpa`.
pub fn renderAlloc(tree: Ast, gpa: Allocator) error{OutOfMemory}![]u8 { pub fn renderAlloc(tree: Ast, gpa: Allocator) error{OutOfMemory}![]u8 {
var aw: std.io.Writer.Allocating = .init(gpa); var aw: std.Io.Writer.Allocating = .init(gpa);
defer aw.deinit(); defer aw.deinit();
render(tree, gpa, &aw.writer, .{}) catch |err| switch (err) { render(tree, gpa, &aw.writer, .{}) catch |err| switch (err) {
error.WriteFailed, error.OutOfMemory => return error.OutOfMemory, error.WriteFailed, error.OutOfMemory => return error.OutOfMemory,

View File

@ -6,7 +6,7 @@ const meta = std.meta;
const Ast = std.zig.Ast; const Ast = std.zig.Ast;
const Token = std.zig.Token; const Token = std.zig.Token;
const primitives = std.zig.primitives; const primitives = std.zig.primitives;
const Writer = std.io.Writer; const Writer = std.Io.Writer;
const Render = @This(); const Render = @This();
@ -2169,7 +2169,7 @@ fn renderArrayInit(
const section_exprs = row_exprs[0..section_end]; const section_exprs = row_exprs[0..section_end];
var sub_expr_buffer: std.io.Writer.Allocating = .init(gpa); var sub_expr_buffer: Writer.Allocating = .init(gpa);
defer sub_expr_buffer.deinit(); defer sub_expr_buffer.deinit();
const sub_expr_buffer_starts = try gpa.alloc(usize, section_exprs.len + 1); const sub_expr_buffer_starts = try gpa.alloc(usize, section_exprs.len + 1);

View File

@ -11339,7 +11339,7 @@ fn parseStrLit(
) InnerError!void { ) InnerError!void {
const raw_string = bytes[offset..]; const raw_string = bytes[offset..];
const result = r: { const result = r: {
var aw: std.io.Writer.Allocating = .fromArrayList(astgen.gpa, buf); var aw: std.Io.Writer.Allocating = .fromArrayList(astgen.gpa, buf);
defer buf.* = aw.toArrayList(); defer buf.* = aw.toArrayList();
break :r std.zig.string_literal.parseWrite(&aw.writer, raw_string) catch |err| switch (err) { break :r std.zig.string_literal.parseWrite(&aw.writer, raw_string) catch |err| switch (err) {
error.WriteFailed => return error.OutOfMemory, error.WriteFailed => return error.OutOfMemory,
@ -13785,7 +13785,7 @@ fn lowerAstErrors(astgen: *AstGen) error{OutOfMemory}!void {
const tree = astgen.tree; const tree = astgen.tree;
assert(tree.errors.len > 0); assert(tree.errors.len > 0);
var msg: std.io.Writer.Allocating = .init(gpa); var msg: std.Io.Writer.Allocating = .init(gpa);
defer msg.deinit(); defer msg.deinit();
const msg_w = &msg.writer; const msg_w = &msg.writer;

View File

@ -11,7 +11,7 @@ const std = @import("std");
const ErrorBundle = @This(); const ErrorBundle = @This();
const Allocator = std.mem.Allocator; const Allocator = std.mem.Allocator;
const assert = std.debug.assert; const assert = std.debug.assert;
const Writer = std.io.Writer; const Writer = std.Io.Writer;
string_bytes: []const u8, string_bytes: []const u8,
/// The first thing in this array is an `ErrorMessageList`. /// The first thing in this array is an `ErrorMessageList`.
@ -156,7 +156,7 @@ pub fn nullTerminatedString(eb: ErrorBundle, index: String) [:0]const u8 {
} }
pub const RenderOptions = struct { pub const RenderOptions = struct {
ttyconf: std.io.tty.Config, ttyconf: std.Io.tty.Config,
include_reference_trace: bool = true, include_reference_trace: bool = true,
include_source_line: bool = true, include_source_line: bool = true,
include_log_text: bool = true, include_log_text: bool = true,
@ -190,14 +190,14 @@ fn renderErrorMessageToWriter(
err_msg_index: MessageIndex, err_msg_index: MessageIndex,
w: *Writer, w: *Writer,
kind: []const u8, kind: []const u8,
color: std.io.tty.Color, color: std.Io.tty.Color,
indent: usize, indent: usize,
) (Writer.Error || std.posix.UnexpectedError)!void { ) (Writer.Error || std.posix.UnexpectedError)!void {
const ttyconf = options.ttyconf; const ttyconf = options.ttyconf;
const err_msg = eb.getErrorMessage(err_msg_index); const err_msg = eb.getErrorMessage(err_msg_index);
if (err_msg.src_loc != .none) { if (err_msg.src_loc != .none) {
const src = eb.extraData(SourceLocation, @intFromEnum(err_msg.src_loc)); const src = eb.extraData(SourceLocation, @intFromEnum(err_msg.src_loc));
var prefix: std.io.Writer.Discarding = .init(&.{}); var prefix: Writer.Discarding = .init(&.{});
try w.splatByteAll(' ', indent); try w.splatByteAll(' ', indent);
prefix.count += indent; prefix.count += indent;
try ttyconf.setColor(w, .bold); try ttyconf.setColor(w, .bold);
@ -794,9 +794,9 @@ pub const Wip = struct {
}; };
defer bundle.deinit(std.testing.allocator); defer bundle.deinit(std.testing.allocator);
const ttyconf: std.io.tty.Config = .no_color; const ttyconf: std.Io.tty.Config = .no_color;
var bundle_buf: std.io.Writer.Allocating = .init(std.testing.allocator); var bundle_buf: Writer.Allocating = .init(std.testing.allocator);
const bundle_bw = &bundle_buf.interface; const bundle_bw = &bundle_buf.interface;
defer bundle_buf.deinit(); defer bundle_buf.deinit();
try bundle.renderToWriter(.{ .ttyconf = ttyconf }, bundle_bw); try bundle.renderToWriter(.{ .ttyconf = ttyconf }, bundle_bw);
@ -812,7 +812,7 @@ pub const Wip = struct {
}; };
defer copy.deinit(std.testing.allocator); defer copy.deinit(std.testing.allocator);
var copy_buf: std.io.Writer.Allocating = .init(std.testing.allocator); var copy_buf: Writer.Allocating = .init(std.testing.allocator);
const copy_bw = &copy_buf.interface; const copy_bw = &copy_buf.interface;
defer copy_buf.deinit(); defer copy_buf.deinit();
try copy.renderToWriter(.{ .ttyconf = ttyconf }, copy_bw); try copy.renderToWriter(.{ .ttyconf = ttyconf }, copy_bw);

View File

@ -9,7 +9,7 @@ const StringIndexContext = std.hash_map.StringIndexContext;
const ZonGen = @This(); const ZonGen = @This();
const Zoir = @import("Zoir.zig"); const Zoir = @import("Zoir.zig");
const Ast = @import("Ast.zig"); const Ast = @import("Ast.zig");
const Writer = std.io.Writer; const Writer = std.Io.Writer;
gpa: Allocator, gpa: Allocator,
tree: Ast, tree: Ast,
@ -472,7 +472,7 @@ fn appendIdentStr(zg: *ZonGen, ident_token: Ast.TokenIndex) error{ OutOfMemory,
const raw_string = zg.tree.tokenSlice(ident_token)[offset..]; const raw_string = zg.tree.tokenSlice(ident_token)[offset..];
try zg.string_bytes.ensureUnusedCapacity(gpa, raw_string.len); try zg.string_bytes.ensureUnusedCapacity(gpa, raw_string.len);
const result = r: { const result = r: {
var aw: std.io.Writer.Allocating = .fromArrayList(gpa, &zg.string_bytes); var aw: Writer.Allocating = .fromArrayList(gpa, &zg.string_bytes);
defer zg.string_bytes = aw.toArrayList(); defer zg.string_bytes = aw.toArrayList();
break :r std.zig.string_literal.parseWrite(&aw.writer, raw_string) catch |err| switch (err) { break :r std.zig.string_literal.parseWrite(&aw.writer, raw_string) catch |err| switch (err) {
error.WriteFailed => return error.OutOfMemory, error.WriteFailed => return error.OutOfMemory,
@ -570,7 +570,7 @@ fn strLitAsString(zg: *ZonGen, str_node: Ast.Node.Index) error{ OutOfMemory, Bad
const size_hint = strLitSizeHint(zg.tree, str_node); const size_hint = strLitSizeHint(zg.tree, str_node);
try string_bytes.ensureUnusedCapacity(gpa, size_hint); try string_bytes.ensureUnusedCapacity(gpa, size_hint);
const result = r: { const result = r: {
var aw: std.io.Writer.Allocating = .fromArrayList(gpa, &zg.string_bytes); var aw: Writer.Allocating = .fromArrayList(gpa, &zg.string_bytes);
defer zg.string_bytes = aw.toArrayList(); defer zg.string_bytes = aw.toArrayList();
break :r parseStrLit(zg.tree, str_node, &aw.writer) catch |err| switch (err) { break :r parseStrLit(zg.tree, str_node, &aw.writer) catch |err| switch (err) {
error.WriteFailed => return error.OutOfMemory, error.WriteFailed => return error.OutOfMemory,
@ -885,7 +885,7 @@ fn lowerAstErrors(zg: *ZonGen) Allocator.Error!void {
const tree = zg.tree; const tree = zg.tree;
assert(tree.errors.len > 0); assert(tree.errors.len > 0);
var msg: std.io.Writer.Allocating = .init(gpa); var msg: Writer.Allocating = .init(gpa);
defer msg.deinit(); defer msg.deinit();
const msg_bw = &msg.writer; const msg_bw = &msg.writer;

View File

@ -7,7 +7,7 @@ const builtin = @import("builtin");
const DW = std.dwarf; const DW = std.dwarf;
const ir = @import("ir.zig"); const ir = @import("ir.zig");
const log = std.log.scoped(.llvm); const log = std.log.scoped(.llvm);
const Writer = std.io.Writer; const Writer = std.Io.Writer;
gpa: Allocator, gpa: Allocator,
strip: bool, strip: bool,

View File

@ -1,7 +1,6 @@
const std = @import("std"); const std = @import("std");
const mem = std.mem; const mem = std.mem;
const print = std.debug.print; const print = std.debug.print;
const io = std.io;
const maxInt = std.math.maxInt; const maxInt = std.math.maxInt;
test "zig fmt: remove extra whitespace at start and end of file with comment between" { test "zig fmt: remove extra whitespace at start and end of file with comment between" {

View File

@ -1,7 +1,7 @@
const std = @import("../std.zig"); const std = @import("../std.zig");
const assert = std.debug.assert; const assert = std.debug.assert;
const utf8Encode = std.unicode.utf8Encode; const utf8Encode = std.unicode.utf8Encode;
const Writer = std.io.Writer; const Writer = std.Io.Writer;
pub const ParseError = error{ pub const ParseError = error{
OutOfMemory, OutOfMemory,
@ -45,7 +45,7 @@ pub const Error = union(enum) {
raw_string: []const u8, raw_string: []const u8,
}; };
fn formatMessage(self: FormatMessage, writer: *std.io.Writer) std.io.Writer.Error!void { fn formatMessage(self: FormatMessage, writer: *Writer) Writer.Error!void {
switch (self.err) { switch (self.err) {
.invalid_escape_character => |bad_index| try writer.print( .invalid_escape_character => |bad_index| try writer.print(
"invalid escape character: '{c}'", "invalid escape character: '{c}'",
@ -358,7 +358,7 @@ pub fn parseWrite(writer: *Writer, bytes: []const u8) Writer.Error!Result {
/// Higher level API. Does not return extra info about parse errors. /// Higher level API. Does not return extra info about parse errors.
/// Caller owns returned memory. /// Caller owns returned memory.
pub fn parseAlloc(allocator: std.mem.Allocator, bytes: []const u8) ParseError![]u8 { pub fn parseAlloc(allocator: std.mem.Allocator, bytes: []const u8) ParseError![]u8 {
var aw: std.io.Writer.Allocating = .init(allocator); var aw: Writer.Allocating = .init(allocator);
defer aw.deinit(); defer aw.deinit();
const result = parseWrite(&aw.writer, bytes) catch |err| switch (err) { const result = parseWrite(&aw.writer, bytes) catch |err| switch (err) {
error.WriteFailed => return error.OutOfMemory, error.WriteFailed => return error.OutOfMemory,

View File

@ -195,12 +195,12 @@ pub const Decompress = struct {
}; };
} }
fn streamStore(r: *Reader, w: *Writer, limit: std.io.Limit) Reader.StreamError!usize { fn streamStore(r: *Reader, w: *Writer, limit: std.Io.Limit) Reader.StreamError!usize {
const d: *Decompress = @fieldParentPtr("interface", r); const d: *Decompress = @fieldParentPtr("interface", r);
return d.store.read(w, limit); return d.store.read(w, limit);
} }
fn streamDeflate(r: *Reader, w: *Writer, limit: std.io.Limit) Reader.StreamError!usize { fn streamDeflate(r: *Reader, w: *Writer, limit: std.Io.Limit) Reader.StreamError!usize {
const d: *Decompress = @fieldParentPtr("interface", r); const d: *Decompress = @fieldParentPtr("interface", r);
return flate.Decompress.read(&d.inflate, w, limit); return flate.Decompress.read(&d.inflate, w, limit);
} }

View File

@ -119,7 +119,7 @@ const Value = extern struct {
} }
} }
pub fn format(value: Value, writer: *std.io.Writer) std.io.Writer.Error!void { pub fn format(value: Value, writer: *std.Io.Writer) std.Io.Writer.Error!void {
// Work around x86_64 backend limitation. // Work around x86_64 backend limitation.
if (builtin.zig_backend == .stage2_x86_64 and builtin.os.tag == .windows) { if (builtin.zig_backend == .stage2_x86_64 and builtin.os.tag == .windows) {
try writer.writeAll("(unknown)"); try writer.writeAll("(unknown)");

View File

@ -961,7 +961,7 @@ pub const Inst = struct {
return index.unwrap().target; return index.unwrap().target;
} }
pub fn format(index: Index, w: *std.io.Writer) std.io.Writer.Error!void { pub fn format(index: Index, w: *std.Io.Writer) std.Io.Writer.Error!void {
try w.writeByte('%'); try w.writeByte('%');
switch (index.unwrap()) { switch (index.unwrap()) {
.ref => {}, .ref => {},

View File

@ -10,6 +10,7 @@ const log = std.log.scoped(.liveness);
const assert = std.debug.assert; const assert = std.debug.assert;
const Allocator = std.mem.Allocator; const Allocator = std.mem.Allocator;
const Log2Int = std.math.Log2Int; const Log2Int = std.math.Log2Int;
const Writer = std.Io.Writer;
const Liveness = @This(); const Liveness = @This();
const trace = @import("../tracy.zig").trace; const trace = @import("../tracy.zig").trace;
@ -2037,7 +2038,7 @@ fn fmtInstSet(set: *const std.AutoHashMapUnmanaged(Air.Inst.Index, void)) FmtIns
const FmtInstSet = struct { const FmtInstSet = struct {
set: *const std.AutoHashMapUnmanaged(Air.Inst.Index, void), set: *const std.AutoHashMapUnmanaged(Air.Inst.Index, void),
pub fn format(val: FmtInstSet, w: *std.io.Writer) std.io.Writer.Error!void { pub fn format(val: FmtInstSet, w: *Writer) Writer.Error!void {
if (val.set.count() == 0) { if (val.set.count() == 0) {
try w.writeAll("[no instructions]"); try w.writeAll("[no instructions]");
return; return;
@ -2057,7 +2058,7 @@ fn fmtInstList(list: []const Air.Inst.Index) FmtInstList {
const FmtInstList = struct { const FmtInstList = struct {
list: []const Air.Inst.Index, list: []const Air.Inst.Index,
pub fn format(val: FmtInstList, w: *std.io.Writer) std.io.Writer.Error!void { pub fn format(val: FmtInstList, w: *Writer) Writer.Error!void {
if (val.list.len == 0) { if (val.list.len == 0) {
try w.writeAll("[no instructions]"); try w.writeAll("[no instructions]");
return; return;

View File

@ -9,7 +9,7 @@ const Type = @import("../Type.zig");
const Air = @import("../Air.zig"); const Air = @import("../Air.zig");
const InternPool = @import("../InternPool.zig"); const InternPool = @import("../InternPool.zig");
pub fn write(air: Air, stream: *std.io.Writer, pt: Zcu.PerThread, liveness: ?Air.Liveness) void { pub fn write(air: Air, stream: *std.Io.Writer, pt: Zcu.PerThread, liveness: ?Air.Liveness) void {
comptime assert(build_options.enable_debug_extensions); comptime assert(build_options.enable_debug_extensions);
const instruction_bytes = air.instructions.len * const instruction_bytes = air.instructions.len *
// Here we don't use @sizeOf(Air.Inst.Data) because it would include // Here we don't use @sizeOf(Air.Inst.Data) because it would include
@ -55,7 +55,7 @@ pub fn write(air: Air, stream: *std.io.Writer, pt: Zcu.PerThread, liveness: ?Air
pub fn writeInst( pub fn writeInst(
air: Air, air: Air,
stream: *std.io.Writer, stream: *std.Io.Writer,
inst: Air.Inst.Index, inst: Air.Inst.Index,
pt: Zcu.PerThread, pt: Zcu.PerThread,
liveness: ?Air.Liveness, liveness: ?Air.Liveness,
@ -92,16 +92,16 @@ const Writer = struct {
indent: usize, indent: usize,
skip_body: bool, skip_body: bool,
const Error = std.io.Writer.Error; const Error = std.Io.Writer.Error;
fn writeBody(w: *Writer, s: *std.io.Writer, body: []const Air.Inst.Index) Error!void { fn writeBody(w: *Writer, s: *std.Io.Writer, body: []const Air.Inst.Index) Error!void {
for (body) |inst| { for (body) |inst| {
try w.writeInst(s, inst); try w.writeInst(s, inst);
try s.writeByte('\n'); try s.writeByte('\n');
} }
} }
fn writeInst(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeInst(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const tag = w.air.instructions.items(.tag)[@intFromEnum(inst)]; const tag = w.air.instructions.items(.tag)[@intFromEnum(inst)];
try s.splatByteAll(' ', w.indent); try s.splatByteAll(' ', w.indent);
try s.print("{f}{c}= {s}(", .{ try s.print("{f}{c}= {s}(", .{
@ -341,48 +341,48 @@ const Writer = struct {
try s.writeByte(')'); try s.writeByte(')');
} }
fn writeBinOp(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeBinOp(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const bin_op = w.air.instructions.items(.data)[@intFromEnum(inst)].bin_op; const bin_op = w.air.instructions.items(.data)[@intFromEnum(inst)].bin_op;
try w.writeOperand(s, inst, 0, bin_op.lhs); try w.writeOperand(s, inst, 0, bin_op.lhs);
try s.writeAll(", "); try s.writeAll(", ");
try w.writeOperand(s, inst, 1, bin_op.rhs); try w.writeOperand(s, inst, 1, bin_op.rhs);
} }
fn writeUnOp(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeUnOp(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const un_op = w.air.instructions.items(.data)[@intFromEnum(inst)].un_op; const un_op = w.air.instructions.items(.data)[@intFromEnum(inst)].un_op;
try w.writeOperand(s, inst, 0, un_op); try w.writeOperand(s, inst, 0, un_op);
} }
fn writeNoOp(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeNoOp(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
_ = w; _ = w;
_ = s; _ = s;
_ = inst; _ = inst;
// no-op, no argument to write // no-op, no argument to write
} }
fn writeType(w: *Writer, s: *std.io.Writer, ty: Type) !void { fn writeType(w: *Writer, s: *std.Io.Writer, ty: Type) !void {
return ty.print(s, w.pt); return ty.print(s, w.pt);
} }
fn writeTy(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeTy(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const ty = w.air.instructions.items(.data)[@intFromEnum(inst)].ty; const ty = w.air.instructions.items(.data)[@intFromEnum(inst)].ty;
try w.writeType(s, ty); try w.writeType(s, ty);
} }
fn writeArg(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeArg(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const arg = w.air.instructions.items(.data)[@intFromEnum(inst)].arg; const arg = w.air.instructions.items(.data)[@intFromEnum(inst)].arg;
try w.writeType(s, arg.ty.toType()); try w.writeType(s, arg.ty.toType());
try s.print(", {d}", .{arg.zir_param_index}); try s.print(", {d}", .{arg.zir_param_index});
} }
fn writeTyOp(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeTyOp(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const ty_op = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_op; const ty_op = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
try w.writeType(s, ty_op.ty.toType()); try w.writeType(s, ty_op.ty.toType());
try s.writeAll(", "); try s.writeAll(", ");
try w.writeOperand(s, inst, 0, ty_op.operand); try w.writeOperand(s, inst, 0, ty_op.operand);
} }
fn writeBlock(w: *Writer, s: *std.io.Writer, tag: Air.Inst.Tag, inst: Air.Inst.Index) Error!void { fn writeBlock(w: *Writer, s: *std.Io.Writer, tag: Air.Inst.Tag, inst: Air.Inst.Index) Error!void {
const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
try w.writeType(s, ty_pl.ty.toType()); try w.writeType(s, ty_pl.ty.toType());
const body: []const Air.Inst.Index = @ptrCast(switch (tag) { const body: []const Air.Inst.Index = @ptrCast(switch (tag) {
@ -423,7 +423,7 @@ const Writer = struct {
} }
} }
fn writeLoop(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeLoop(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
const extra = w.air.extraData(Air.Block, ty_pl.payload); const extra = w.air.extraData(Air.Block, ty_pl.payload);
const body: []const Air.Inst.Index = @ptrCast(w.air.extra.items[extra.end..][0..extra.data.body_len]); const body: []const Air.Inst.Index = @ptrCast(w.air.extra.items[extra.end..][0..extra.data.body_len]);
@ -439,7 +439,7 @@ const Writer = struct {
try s.writeAll("}"); try s.writeAll("}");
} }
fn writeAggregateInit(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeAggregateInit(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const zcu = w.pt.zcu; const zcu = w.pt.zcu;
const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
const vector_ty = ty_pl.ty.toType(); const vector_ty = ty_pl.ty.toType();
@ -455,7 +455,7 @@ const Writer = struct {
try s.writeAll("]"); try s.writeAll("]");
} }
fn writeUnionInit(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeUnionInit(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
const extra = w.air.extraData(Air.UnionInit, ty_pl.payload).data; const extra = w.air.extraData(Air.UnionInit, ty_pl.payload).data;
@ -463,7 +463,7 @@ const Writer = struct {
try w.writeOperand(s, inst, 0, extra.init); try w.writeOperand(s, inst, 0, extra.init);
} }
fn writeStructField(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeStructField(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
const extra = w.air.extraData(Air.StructField, ty_pl.payload).data; const extra = w.air.extraData(Air.StructField, ty_pl.payload).data;
@ -471,7 +471,7 @@ const Writer = struct {
try s.print(", {d}", .{extra.field_index}); try s.print(", {d}", .{extra.field_index});
} }
fn writeTyPlBin(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeTyPlBin(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const data = w.air.instructions.items(.data); const data = w.air.instructions.items(.data);
const ty_pl = data[@intFromEnum(inst)].ty_pl; const ty_pl = data[@intFromEnum(inst)].ty_pl;
const extra = w.air.extraData(Air.Bin, ty_pl.payload).data; const extra = w.air.extraData(Air.Bin, ty_pl.payload).data;
@ -484,7 +484,7 @@ const Writer = struct {
try w.writeOperand(s, inst, 1, extra.rhs); try w.writeOperand(s, inst, 1, extra.rhs);
} }
fn writeCmpxchg(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeCmpxchg(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
const extra = w.air.extraData(Air.Cmpxchg, ty_pl.payload).data; const extra = w.air.extraData(Air.Cmpxchg, ty_pl.payload).data;
@ -498,7 +498,7 @@ const Writer = struct {
}); });
} }
fn writeMulAdd(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeMulAdd(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op;
const extra = w.air.extraData(Air.Bin, pl_op.payload).data; const extra = w.air.extraData(Air.Bin, pl_op.payload).data;
@ -509,7 +509,7 @@ const Writer = struct {
try w.writeOperand(s, inst, 2, pl_op.operand); try w.writeOperand(s, inst, 2, pl_op.operand);
} }
fn writeShuffleOne(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeShuffleOne(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const unwrapped = w.air.unwrapShuffleOne(w.pt.zcu, inst); const unwrapped = w.air.unwrapShuffleOne(w.pt.zcu, inst);
try w.writeType(s, unwrapped.result_ty); try w.writeType(s, unwrapped.result_ty);
try s.writeAll(", "); try s.writeAll(", ");
@ -525,7 +525,7 @@ const Writer = struct {
try s.writeByte(']'); try s.writeByte(']');
} }
fn writeShuffleTwo(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeShuffleTwo(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const unwrapped = w.air.unwrapShuffleTwo(w.pt.zcu, inst); const unwrapped = w.air.unwrapShuffleTwo(w.pt.zcu, inst);
try w.writeType(s, unwrapped.result_ty); try w.writeType(s, unwrapped.result_ty);
try s.writeAll(", "); try s.writeAll(", ");
@ -544,7 +544,7 @@ const Writer = struct {
try s.writeByte(']'); try s.writeByte(']');
} }
fn writeSelect(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeSelect(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const zcu = w.pt.zcu; const zcu = w.pt.zcu;
const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op;
const extra = w.air.extraData(Air.Bin, pl_op.payload).data; const extra = w.air.extraData(Air.Bin, pl_op.payload).data;
@ -559,14 +559,14 @@ const Writer = struct {
try w.writeOperand(s, inst, 2, extra.rhs); try w.writeOperand(s, inst, 2, extra.rhs);
} }
fn writeReduce(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeReduce(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const reduce = w.air.instructions.items(.data)[@intFromEnum(inst)].reduce; const reduce = w.air.instructions.items(.data)[@intFromEnum(inst)].reduce;
try w.writeOperand(s, inst, 0, reduce.operand); try w.writeOperand(s, inst, 0, reduce.operand);
try s.print(", {s}", .{@tagName(reduce.operation)}); try s.print(", {s}", .{@tagName(reduce.operation)});
} }
fn writeCmpVector(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeCmpVector(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
const extra = w.air.extraData(Air.VectorCmp, ty_pl.payload).data; const extra = w.air.extraData(Air.VectorCmp, ty_pl.payload).data;
@ -576,7 +576,7 @@ const Writer = struct {
try w.writeOperand(s, inst, 1, extra.rhs); try w.writeOperand(s, inst, 1, extra.rhs);
} }
fn writeVectorStoreElem(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeVectorStoreElem(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const data = w.air.instructions.items(.data)[@intFromEnum(inst)].vector_store_elem; const data = w.air.instructions.items(.data)[@intFromEnum(inst)].vector_store_elem;
const extra = w.air.extraData(Air.VectorCmp, data.payload).data; const extra = w.air.extraData(Air.VectorCmp, data.payload).data;
@ -587,21 +587,21 @@ const Writer = struct {
try w.writeOperand(s, inst, 2, extra.rhs); try w.writeOperand(s, inst, 2, extra.rhs);
} }
fn writeRuntimeNavPtr(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeRuntimeNavPtr(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const ip = &w.pt.zcu.intern_pool; const ip = &w.pt.zcu.intern_pool;
const ty_nav = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_nav; const ty_nav = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_nav;
try w.writeType(s, .fromInterned(ty_nav.ty)); try w.writeType(s, .fromInterned(ty_nav.ty));
try s.print(", '{f}'", .{ip.getNav(ty_nav.nav).fqn.fmt(ip)}); try s.print(", '{f}'", .{ip.getNav(ty_nav.nav).fqn.fmt(ip)});
} }
fn writeAtomicLoad(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeAtomicLoad(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const atomic_load = w.air.instructions.items(.data)[@intFromEnum(inst)].atomic_load; const atomic_load = w.air.instructions.items(.data)[@intFromEnum(inst)].atomic_load;
try w.writeOperand(s, inst, 0, atomic_load.ptr); try w.writeOperand(s, inst, 0, atomic_load.ptr);
try s.print(", {s}", .{@tagName(atomic_load.order)}); try s.print(", {s}", .{@tagName(atomic_load.order)});
} }
fn writePrefetch(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writePrefetch(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const prefetch = w.air.instructions.items(.data)[@intFromEnum(inst)].prefetch; const prefetch = w.air.instructions.items(.data)[@intFromEnum(inst)].prefetch;
try w.writeOperand(s, inst, 0, prefetch.ptr); try w.writeOperand(s, inst, 0, prefetch.ptr);
@ -612,7 +612,7 @@ const Writer = struct {
fn writeAtomicStore( fn writeAtomicStore(
w: *Writer, w: *Writer,
s: *std.io.Writer, s: *std.Io.Writer,
inst: Air.Inst.Index, inst: Air.Inst.Index,
order: std.builtin.AtomicOrder, order: std.builtin.AtomicOrder,
) Error!void { ) Error!void {
@ -623,7 +623,7 @@ const Writer = struct {
try s.print(", {s}", .{@tagName(order)}); try s.print(", {s}", .{@tagName(order)});
} }
fn writeAtomicRmw(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeAtomicRmw(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op;
const extra = w.air.extraData(Air.AtomicRmw, pl_op.payload).data; const extra = w.air.extraData(Air.AtomicRmw, pl_op.payload).data;
@ -633,7 +633,7 @@ const Writer = struct {
try s.print(", {s}, {s}", .{ @tagName(extra.op()), @tagName(extra.ordering()) }); try s.print(", {s}, {s}", .{ @tagName(extra.op()), @tagName(extra.ordering()) });
} }
fn writeFieldParentPtr(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeFieldParentPtr(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
const extra = w.air.extraData(Air.FieldParentPtr, ty_pl.payload).data; const extra = w.air.extraData(Air.FieldParentPtr, ty_pl.payload).data;
@ -641,7 +641,7 @@ const Writer = struct {
try s.print(", {d}", .{extra.field_index}); try s.print(", {d}", .{extra.field_index});
} }
fn writeAssembly(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeAssembly(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
const extra = w.air.extraData(Air.Asm, ty_pl.payload); const extra = w.air.extraData(Air.Asm, ty_pl.payload);
const is_volatile = extra.data.flags.is_volatile; const is_volatile = extra.data.flags.is_volatile;
@ -730,19 +730,19 @@ const Writer = struct {
try s.print(", \"{f}\"", .{std.zig.fmtString(asm_source)}); try s.print(", \"{f}\"", .{std.zig.fmtString(asm_source)});
} }
fn writeDbgStmt(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeDbgStmt(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const dbg_stmt = w.air.instructions.items(.data)[@intFromEnum(inst)].dbg_stmt; const dbg_stmt = w.air.instructions.items(.data)[@intFromEnum(inst)].dbg_stmt;
try s.print("{d}:{d}", .{ dbg_stmt.line + 1, dbg_stmt.column + 1 }); try s.print("{d}:{d}", .{ dbg_stmt.line + 1, dbg_stmt.column + 1 });
} }
fn writeDbgVar(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeDbgVar(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op;
try w.writeOperand(s, inst, 0, pl_op.operand); try w.writeOperand(s, inst, 0, pl_op.operand);
const name: Air.NullTerminatedString = @enumFromInt(pl_op.payload); const name: Air.NullTerminatedString = @enumFromInt(pl_op.payload);
try s.print(", \"{f}\"", .{std.zig.fmtString(name.toSlice(w.air))}); try s.print(", \"{f}\"", .{std.zig.fmtString(name.toSlice(w.air))});
} }
fn writeCall(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeCall(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op;
const extra = w.air.extraData(Air.Call, pl_op.payload); const extra = w.air.extraData(Air.Call, pl_op.payload);
const args = @as([]const Air.Inst.Ref, @ptrCast(w.air.extra.items[extra.end..][0..extra.data.args_len])); const args = @as([]const Air.Inst.Ref, @ptrCast(w.air.extra.items[extra.end..][0..extra.data.args_len]));
@ -755,19 +755,19 @@ const Writer = struct {
try s.writeAll("]"); try s.writeAll("]");
} }
fn writeBr(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeBr(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const br = w.air.instructions.items(.data)[@intFromEnum(inst)].br; const br = w.air.instructions.items(.data)[@intFromEnum(inst)].br;
try w.writeInstIndex(s, br.block_inst, false); try w.writeInstIndex(s, br.block_inst, false);
try s.writeAll(", "); try s.writeAll(", ");
try w.writeOperand(s, inst, 0, br.operand); try w.writeOperand(s, inst, 0, br.operand);
} }
fn writeRepeat(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeRepeat(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const repeat = w.air.instructions.items(.data)[@intFromEnum(inst)].repeat; const repeat = w.air.instructions.items(.data)[@intFromEnum(inst)].repeat;
try w.writeInstIndex(s, repeat.loop_inst, false); try w.writeInstIndex(s, repeat.loop_inst, false);
} }
fn writeTry(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeTry(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op;
const extra = w.air.extraData(Air.Try, pl_op.payload); const extra = w.air.extraData(Air.Try, pl_op.payload);
const body: []const Air.Inst.Index = @ptrCast(w.air.extra.items[extra.end..][0..extra.data.body_len]); const body: []const Air.Inst.Index = @ptrCast(w.air.extra.items[extra.end..][0..extra.data.body_len]);
@ -801,7 +801,7 @@ const Writer = struct {
} }
} }
fn writeTryPtr(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeTryPtr(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl; const ty_pl = w.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
const extra = w.air.extraData(Air.TryPtr, ty_pl.payload); const extra = w.air.extraData(Air.TryPtr, ty_pl.payload);
const body: []const Air.Inst.Index = @ptrCast(w.air.extra.items[extra.end..][0..extra.data.body_len]); const body: []const Air.Inst.Index = @ptrCast(w.air.extra.items[extra.end..][0..extra.data.body_len]);
@ -838,7 +838,7 @@ const Writer = struct {
} }
} }
fn writeCondBr(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeCondBr(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op;
const extra = w.air.extraData(Air.CondBr, pl_op.payload); const extra = w.air.extraData(Air.CondBr, pl_op.payload);
const then_body: []const Air.Inst.Index = @ptrCast(w.air.extra.items[extra.end..][0..extra.data.then_body_len]); const then_body: []const Air.Inst.Index = @ptrCast(w.air.extra.items[extra.end..][0..extra.data.then_body_len]);
@ -897,7 +897,7 @@ const Writer = struct {
try s.writeAll("}"); try s.writeAll("}");
} }
fn writeSwitchBr(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeSwitchBr(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const switch_br = w.air.unwrapSwitch(inst); const switch_br = w.air.unwrapSwitch(inst);
const liveness: Air.Liveness.SwitchBrTable = if (w.liveness) |liveness| const liveness: Air.Liveness.SwitchBrTable = if (w.liveness) |liveness|
@ -983,25 +983,25 @@ const Writer = struct {
try s.splatByteAll(' ', old_indent); try s.splatByteAll(' ', old_indent);
} }
fn writeWasmMemorySize(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeWasmMemorySize(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op;
try s.print("{d}", .{pl_op.payload}); try s.print("{d}", .{pl_op.payload});
} }
fn writeWasmMemoryGrow(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeWasmMemoryGrow(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op;
try s.print("{d}, ", .{pl_op.payload}); try s.print("{d}, ", .{pl_op.payload});
try w.writeOperand(s, inst, 0, pl_op.operand); try w.writeOperand(s, inst, 0, pl_op.operand);
} }
fn writeWorkDimension(w: *Writer, s: *std.io.Writer, inst: Air.Inst.Index) Error!void { fn writeWorkDimension(w: *Writer, s: *std.Io.Writer, inst: Air.Inst.Index) Error!void {
const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op;
try s.print("{d}", .{pl_op.payload}); try s.print("{d}", .{pl_op.payload});
} }
fn writeOperand( fn writeOperand(
w: *Writer, w: *Writer,
s: *std.io.Writer, s: *std.Io.Writer,
inst: Air.Inst.Index, inst: Air.Inst.Index,
op_index: usize, op_index: usize,
operand: Air.Inst.Ref, operand: Air.Inst.Ref,
@ -1027,7 +1027,7 @@ const Writer = struct {
fn writeInstRef( fn writeInstRef(
w: *Writer, w: *Writer,
s: *std.io.Writer, s: *std.Io.Writer,
operand: Air.Inst.Ref, operand: Air.Inst.Ref,
dies: bool, dies: bool,
) Error!void { ) Error!void {
@ -1047,7 +1047,7 @@ const Writer = struct {
fn writeInstIndex( fn writeInstIndex(
w: *Writer, w: *Writer,
s: *std.io.Writer, s: *std.Io.Writer,
inst: Air.Inst.Index, inst: Air.Inst.Index,
dies: bool, dies: bool,
) Error!void { ) Error!void {

View File

@ -12,7 +12,7 @@ const ThreadPool = std.Thread.Pool;
const WaitGroup = std.Thread.WaitGroup; const WaitGroup = std.Thread.WaitGroup;
const ErrorBundle = std.zig.ErrorBundle; const ErrorBundle = std.zig.ErrorBundle;
const fatal = std.process.fatal; const fatal = std.process.fatal;
const Writer = std.io.Writer; const Writer = std.Io.Writer;
const Value = @import("Value.zig"); const Value = @import("Value.zig");
const Type = @import("Type.zig"); const Type = @import("Type.zig");
@ -468,7 +468,7 @@ pub const Path = struct {
const Formatter = struct { const Formatter = struct {
p: Path, p: Path,
comp: *Compilation, comp: *Compilation,
pub fn format(f: Formatter, w: *std.io.Writer) std.io.Writer.Error!void { pub fn format(f: Formatter, w: *Writer) Writer.Error!void {
const root_path: []const u8 = switch (f.p.root) { const root_path: []const u8 = switch (f.p.root) {
.zig_lib => f.comp.dirs.zig_lib.path orelse ".", .zig_lib => f.comp.dirs.zig_lib.path orelse ".",
.global_cache => f.comp.dirs.global_cache.path orelse ".", .global_cache => f.comp.dirs.global_cache.path orelse ".",
@ -1883,7 +1883,7 @@ pub const CreateDiagnostic = union(enum) {
sub: []const u8, sub: []const u8,
err: (fs.Dir.MakeError || fs.Dir.OpenError || fs.Dir.StatFileError), err: (fs.Dir.MakeError || fs.Dir.OpenError || fs.Dir.StatFileError),
}; };
pub fn format(diag: CreateDiagnostic, w: *std.Io.Writer) std.Io.Writer.Error!void { pub fn format(diag: CreateDiagnostic, w: *Writer) Writer.Error!void {
switch (diag) { switch (diag) {
.export_table_import_table_conflict => try w.writeAll("'--import-table' and '--export-table' cannot be used together"), .export_table_import_table_conflict => try w.writeAll("'--import-table' and '--export-table' cannot be used together"),
.emit_h_without_zcu => try w.writeAll("cannot emit C header with no Zig source files"), .emit_h_without_zcu => try w.writeAll("cannot emit C header with no Zig source files"),
@ -6457,7 +6457,7 @@ fn updateWin32Resource(comp: *Compilation, win32_resource: *Win32Resource, win32
// In .rc files, a " within a quoted string is escaped as "" // In .rc files, a " within a quoted string is escaped as ""
const fmtRcEscape = struct { const fmtRcEscape = struct {
fn formatRcEscape(bytes: []const u8, writer: *std.io.Writer) std.io.Writer.Error!void { fn formatRcEscape(bytes: []const u8, writer: *Writer) Writer.Error!void {
for (bytes) |byte| switch (byte) { for (bytes) |byte| switch (byte) {
'"' => try writer.writeAll("\"\""), '"' => try writer.writeAll("\"\""),
'\\' => try writer.writeAll("\\\\"), '\\' => try writer.writeAll("\\\\"),

View File

@ -1,6 +1,20 @@
//! All interned objects have both a value and a type. //! All interned objects have both a value and a type.
//! This data structure is self-contained. //! This data structure is self-contained.
const builtin = @import("builtin");
const std = @import("std");
const Allocator = std.mem.Allocator;
const assert = std.debug.assert;
const BigIntConst = std.math.big.int.Const;
const BigIntMutable = std.math.big.int.Mutable;
const Cache = std.Build.Cache;
const Limb = std.math.big.Limb;
const Hash = std.hash.Wyhash;
const InternPool = @This();
const Zcu = @import("Zcu.zig");
const Zir = std.zig.Zir;
/// One item per thread, indexed by `tid`, which is dense and unique per thread. /// One item per thread, indexed by `tid`, which is dense and unique per thread.
locals: []Local, locals: []Local,
/// Length must be a power of two and represents the number of simultaneous /// Length must be a power of two and represents the number of simultaneous
@ -1606,20 +1620,6 @@ fn getIndexMask(ip: *const InternPool, comptime BackingInt: type) u32 {
const FieldMap = std.ArrayHashMapUnmanaged(void, void, std.array_hash_map.AutoContext(void), false); const FieldMap = std.ArrayHashMapUnmanaged(void, void, std.array_hash_map.AutoContext(void), false);
const builtin = @import("builtin");
const std = @import("std");
const Allocator = std.mem.Allocator;
const assert = std.debug.assert;
const BigIntConst = std.math.big.int.Const;
const BigIntMutable = std.math.big.int.Mutable;
const Cache = std.Build.Cache;
const Limb = std.math.big.Limb;
const Hash = std.hash.Wyhash;
const InternPool = @This();
const Zcu = @import("Zcu.zig");
const Zir = std.zig.Zir;
/// An index into `maps` which might be `none`. /// An index into `maps` which might be `none`.
pub const OptionalMapIndex = enum(u32) { pub const OptionalMapIndex = enum(u32) {
none = std.math.maxInt(u32), none = std.math.maxInt(u32),
@ -1895,7 +1895,7 @@ pub const NullTerminatedString = enum(u32) {
ip: *const InternPool, ip: *const InternPool,
id: bool, id: bool,
}; };
fn format(data: FormatData, writer: *std.io.Writer) std.io.Writer.Error!void { fn format(data: FormatData, writer: *std.Io.Writer) std.Io.Writer.Error!void {
const slice = data.string.toSlice(data.ip); const slice = data.string.toSlice(data.ip);
if (!data.id) { if (!data.id) {
try writer.writeAll(slice); try writer.writeAll(slice);

View File

@ -2020,7 +2020,7 @@ const UnpackResult = struct {
// output errors to string // output errors to string
var errors = try fetch.error_bundle.toOwnedBundle(""); var errors = try fetch.error_bundle.toOwnedBundle("");
defer errors.deinit(gpa); defer errors.deinit(gpa);
var aw: std.io.Writer.Allocating = .init(gpa); var aw: std.Io.Writer.Allocating = .init(gpa);
defer aw.deinit(); defer aw.deinit();
try errors.renderToWriter(.{ .ttyconf = .no_color }, &aw.writer); try errors.renderToWriter(.{ .ttyconf = .no_color }, &aw.writer);
try std.testing.expectEqualStrings( try std.testing.expectEqualStrings(
@ -2329,7 +2329,7 @@ const TestFetchBuilder = struct {
if (notes_len > 0) { if (notes_len > 0) {
try std.testing.expectEqual(notes_len, em.notes_len); try std.testing.expectEqual(notes_len, em.notes_len);
} }
var aw: std.io.Writer.Allocating = .init(std.testing.allocator); var aw: std.Io.Writer.Allocating = .init(std.testing.allocator);
defer aw.deinit(); defer aw.deinit();
try errors.renderToWriter(.{ .ttyconf = .no_color }, &aw.writer); try errors.renderToWriter(.{ .ttyconf = .no_color }, &aw.writer);
try std.testing.expectEqualStrings(msg, aw.written()); try std.testing.expectEqualStrings(msg, aw.written());

View File

@ -146,7 +146,7 @@ pub const Oid = union(Format) {
} else error.InvalidOid; } else error.InvalidOid;
} }
pub fn format(oid: Oid, writer: *std.io.Writer) std.io.Writer.Error!void { pub fn format(oid: Oid, writer: *std.Io.Writer) std.Io.Writer.Error!void {
try writer.print("{x}", .{oid.slice()}); try writer.print("{x}", .{oid.slice()});
} }

View File

@ -472,7 +472,7 @@ const Parse = struct {
) InnerError!void { ) InnerError!void {
const raw_string = bytes[offset..]; const raw_string = bytes[offset..];
const result = r: { const result = r: {
var aw: std.io.Writer.Allocating = .fromArrayList(p.gpa, buf); var aw: std.Io.Writer.Allocating = .fromArrayList(p.gpa, buf);
defer buf.* = aw.toArrayList(); defer buf.* = aw.toArrayList();
break :r std.zig.string_literal.parseWrite(&aw.writer, raw_string) catch |err| switch (err) { break :r std.zig.string_literal.parseWrite(&aw.writer, raw_string) catch |err| switch (err) {
error.WriteFailed => return error.OutOfMemory, error.WriteFailed => return error.OutOfMemory,

View File

@ -3080,7 +3080,7 @@ pub fn createTypeName(
const fn_info = sema.code.getFnInfo(ip.funcZirBodyInst(sema.func_index).resolve(ip) orelse return error.AnalysisFail); const fn_info = sema.code.getFnInfo(ip.funcZirBodyInst(sema.func_index).resolve(ip) orelse return error.AnalysisFail);
const zir_tags = sema.code.instructions.items(.tag); const zir_tags = sema.code.instructions.items(.tag);
var aw: std.io.Writer.Allocating = .init(gpa); var aw: std.Io.Writer.Allocating = .init(gpa);
defer aw.deinit(); defer aw.deinit();
const w = &aw.writer; const w = &aw.writer;
w.print("{f}(", .{block.type_name_ctx.fmt(ip)}) catch return error.OutOfMemory; w.print("{f}(", .{block.type_name_ctx.fmt(ip)}) catch return error.OutOfMemory;
@ -5508,7 +5508,7 @@ fn zirCompileLog(
const zcu = pt.zcu; const zcu = pt.zcu;
const gpa = zcu.gpa; const gpa = zcu.gpa;
var aw: std.io.Writer.Allocating = .init(gpa); var aw: std.Io.Writer.Allocating = .init(gpa);
defer aw.deinit(); defer aw.deinit();
const writer = &aw.writer; const writer = &aw.writer;
@ -9080,7 +9080,7 @@ fn callConvSupportsVarArgs(cc: std.builtin.CallingConvention.Tag) bool {
fn checkCallConvSupportsVarArgs(sema: *Sema, block: *Block, src: LazySrcLoc, cc: std.builtin.CallingConvention.Tag) CompileError!void { fn checkCallConvSupportsVarArgs(sema: *Sema, block: *Block, src: LazySrcLoc, cc: std.builtin.CallingConvention.Tag) CompileError!void {
const CallingConventionsSupportingVarArgsList = struct { const CallingConventionsSupportingVarArgsList = struct {
arch: std.Target.Cpu.Arch, arch: std.Target.Cpu.Arch,
pub fn format(ctx: @This(), w: *std.io.Writer) std.io.Writer.Error!void { pub fn format(ctx: @This(), w: *std.Io.Writer) std.Io.Writer.Error!void {
var first = true; var first = true;
for (calling_conventions_supporting_var_args) |cc_inner| { for (calling_conventions_supporting_var_args) |cc_inner| {
for (std.Target.Cpu.Arch.fromCallingConvention(cc_inner)) |supported_arch| { for (std.Target.Cpu.Arch.fromCallingConvention(cc_inner)) |supported_arch| {
@ -9521,7 +9521,7 @@ fn finishFunc(
.bad_arch => |allowed_archs| { .bad_arch => |allowed_archs| {
const ArchListFormatter = struct { const ArchListFormatter = struct {
archs: []const std.Target.Cpu.Arch, archs: []const std.Target.Cpu.Arch,
pub fn format(formatter: @This(), w: *std.io.Writer) std.io.Writer.Error!void { pub fn format(formatter: @This(), w: *std.Io.Writer) std.Io.Writer.Error!void {
for (formatter.archs, 0..) |arch, i| { for (formatter.archs, 0..) |arch, i| {
if (i != 0) if (i != 0)
try w.writeAll(", "); try w.writeAll(", ");
@ -36962,7 +36962,7 @@ fn notePathToComptimeAllocPtr(
error.AnalysisFail => unreachable, error.AnalysisFail => unreachable,
}; };
var second_path_aw: std.io.Writer.Allocating = .init(arena); var second_path_aw: std.Io.Writer.Allocating = .init(arena);
defer second_path_aw.deinit(); defer second_path_aw.deinit();
const inter_name = try std.fmt.allocPrint(arena, "v{d}", .{intermediate_value_count}); const inter_name = try std.fmt.allocPrint(arena, "v{d}", .{intermediate_value_count});
const deriv_start = @import("print_value.zig").printPtrDerivation( const deriv_start = @import("print_value.zig").printPtrDerivation(

View File

@ -121,7 +121,7 @@ pub fn eql(a: Type, b: Type, zcu: *const Zcu) bool {
return a.toIntern() == b.toIntern(); return a.toIntern() == b.toIntern();
} }
pub fn format(ty: Type, writer: *std.io.Writer) !void { pub fn format(ty: Type, writer: *std.Io.Writer) !void {
_ = ty; _ = ty;
_ = writer; _ = writer;
@compileError("do not format types directly; use either ty.fmtDebug() or ty.fmt()"); @compileError("do not format types directly; use either ty.fmtDebug() or ty.fmt()");
@ -140,7 +140,7 @@ const Format = struct {
ty: Type, ty: Type,
pt: Zcu.PerThread, pt: Zcu.PerThread,
fn default(f: Format, writer: *std.io.Writer) std.io.Writer.Error!void { fn default(f: Format, writer: *std.Io.Writer) std.Io.Writer.Error!void {
return print(f.ty, writer, f.pt); return print(f.ty, writer, f.pt);
} }
}; };
@ -151,13 +151,13 @@ pub fn fmtDebug(ty: Type) std.fmt.Formatter(Type, dump) {
/// This is a debug function. In order to print types in a meaningful way /// This is a debug function. In order to print types in a meaningful way
/// we also need access to the module. /// we also need access to the module.
pub fn dump(start_type: Type, writer: *std.io.Writer) std.io.Writer.Error!void { pub fn dump(start_type: Type, writer: *std.Io.Writer) std.Io.Writer.Error!void {
return writer.print("{any}", .{start_type.ip_index}); return writer.print("{any}", .{start_type.ip_index});
} }
/// Prints a name suitable for `@typeName`. /// Prints a name suitable for `@typeName`.
/// TODO: take an `opt_sema` to pass to `fmtValue` when printing sentinels. /// TODO: take an `opt_sema` to pass to `fmtValue` when printing sentinels.
pub fn print(ty: Type, writer: *std.io.Writer, pt: Zcu.PerThread) std.io.Writer.Error!void { pub fn print(ty: Type, writer: *std.Io.Writer, pt: Zcu.PerThread) std.Io.Writer.Error!void {
const zcu = pt.zcu; const zcu = pt.zcu;
const ip = &zcu.intern_pool; const ip = &zcu.intern_pool;
switch (ip.indexToKey(ty.toIntern())) { switch (ip.indexToKey(ty.toIntern())) {

View File

@ -15,7 +15,7 @@ const Value = @This();
ip_index: InternPool.Index, ip_index: InternPool.Index,
pub fn format(val: Value, writer: *std.io.Writer) !void { pub fn format(val: Value, writer: *std.Io.Writer) !void {
_ = val; _ = val;
_ = writer; _ = writer;
@compileError("do not use format values directly; use either fmtDebug or fmtValue"); @compileError("do not use format values directly; use either fmtDebug or fmtValue");
@ -23,7 +23,7 @@ pub fn format(val: Value, writer: *std.io.Writer) !void {
/// This is a debug function. In order to print values in a meaningful way /// This is a debug function. In order to print values in a meaningful way
/// we also need access to the type. /// we also need access to the type.
pub fn dump(start_val: Value, w: std.io.Writer) std.io.Writer.Error!void { pub fn dump(start_val: Value, w: std.Io.Writer) std.Io.Writer.Error!void {
try w.print("(interned: {})", .{start_val.toIntern()}); try w.print("(interned: {})", .{start_val.toIntern()});
} }

View File

@ -15,7 +15,7 @@ const BigIntConst = std.math.big.int.Const;
const BigIntMutable = std.math.big.int.Mutable; const BigIntMutable = std.math.big.int.Mutable;
const Target = std.Target; const Target = std.Target;
const Ast = std.zig.Ast; const Ast = std.zig.Ast;
const Writer = std.io.Writer; const Writer = std.Io.Writer;
const Zcu = @This(); const Zcu = @This();
const Compilation = @import("Compilation.zig"); const Compilation = @import("Compilation.zig");
@ -2872,7 +2872,7 @@ pub fn loadZirCache(gpa: Allocator, cache_file: std.fs.File) !Zir {
}; };
} }
pub fn loadZirCacheBody(gpa: Allocator, header: Zir.Header, cache_br: *std.io.Reader) !Zir { pub fn loadZirCacheBody(gpa: Allocator, header: Zir.Header, cache_br: *std.Io.Reader) !Zir {
var instructions: std.MultiArrayList(Zir.Inst) = .{}; var instructions: std.MultiArrayList(Zir.Inst) = .{};
errdefer instructions.deinit(gpa); errdefer instructions.deinit(gpa);
@ -2989,7 +2989,7 @@ pub fn saveZoirCache(cache_file: std.fs.File, stat: std.fs.File.Stat, zoir: Zoir
}; };
} }
pub fn loadZoirCacheBody(gpa: Allocator, header: Zoir.Header, cache_br: *std.io.Reader) !Zoir { pub fn loadZoirCacheBody(gpa: Allocator, header: Zoir.Header, cache_br: *std.Io.Reader) !Zoir {
var zoir: Zoir = .{ var zoir: Zoir = .{
.nodes = .empty, .nodes = .empty,
.extra = &.{}, .extra = &.{},
@ -4318,7 +4318,7 @@ const FormatAnalUnit = struct {
zcu: *Zcu, zcu: *Zcu,
}; };
fn formatAnalUnit(data: FormatAnalUnit, writer: *std.io.Writer) std.io.Writer.Error!void { fn formatAnalUnit(data: FormatAnalUnit, writer: *std.Io.Writer) std.Io.Writer.Error!void {
const zcu = data.zcu; const zcu = data.zcu;
const ip = &zcu.intern_pool; const ip = &zcu.intern_pool;
switch (data.unit.unwrap()) { switch (data.unit.unwrap()) {
@ -4344,7 +4344,7 @@ fn formatAnalUnit(data: FormatAnalUnit, writer: *std.io.Writer) std.io.Writer.Er
const FormatDependee = struct { dependee: InternPool.Dependee, zcu: *Zcu }; const FormatDependee = struct { dependee: InternPool.Dependee, zcu: *Zcu };
fn formatDependee(data: FormatDependee, writer: *std.io.Writer) std.io.Writer.Error!void { fn formatDependee(data: FormatDependee, writer: *std.Io.Writer) std.Io.Writer.Error!void {
const zcu = data.zcu; const zcu = data.zcu;
const ip = &zcu.intern_pool; const ip = &zcu.intern_pool;
switch (data.dependee) { switch (data.dependee) {

Some files were not shown because too many files have changed in this diff Show More