std.Io: delete CountingWriter

This commit is contained in:
Andrew Kelley 2025-08-07 22:06:20 -07:00
parent 3fb86841cc
commit 94dd28b7f7
9 changed files with 39 additions and 91 deletions

View File

@ -665,19 +665,16 @@ const ResourceTree = struct {
pub fn writeCoff( pub fn writeCoff(
self: *const ResourceTree, self: *const ResourceTree,
allocator: Allocator, allocator: Allocator,
writer: anytype, w: anytype,
resources_in_data_order: []const Resource, resources_in_data_order: []const Resource,
lengths: Lengths, lengths: Lengths,
coff_string_table: *StringTable, coff_string_table: *StringTable,
) ![]const std.coff.Symbol { ) ![]const std.coff.Symbol {
if (self.type_to_name_map.count() == 0) { if (self.type_to_name_map.count() == 0) {
try writer.writeByteNTimes(0, 16); try w.writeByteNTimes(0, 16);
return &.{}; return &.{};
} }
var counting_writer = std.io.countingWriter(writer);
const w = counting_writer.writer();
var level2_list: std.ArrayListUnmanaged(*const NameToLanguageMap) = .empty; var level2_list: std.ArrayListUnmanaged(*const NameToLanguageMap) = .empty;
defer level2_list.deinit(allocator); defer level2_list.deinit(allocator);
@ -735,7 +732,6 @@ const ResourceTree = struct {
try level2_list.append(allocator, name_to_lang_map); try level2_list.append(allocator, name_to_lang_map);
} }
} }
std.debug.assert(counting_writer.bytes_written == level2_start);
const level3_start = level2_start + lengths.level2; const level3_start = level2_start + lengths.level2;
var level3_address = level3_start; var level3_address = level3_start;
@ -771,7 +767,6 @@ const ResourceTree = struct {
try level3_list.append(allocator, lang_to_resources_map); try level3_list.append(allocator, lang_to_resources_map);
} }
} }
std.debug.assert(counting_writer.bytes_written == level3_start);
var reloc_addresses = try allocator.alloc(u32, resources_in_data_order.len); var reloc_addresses = try allocator.alloc(u32, resources_in_data_order.len);
defer allocator.free(reloc_addresses); defer allocator.free(reloc_addresses);
@ -813,7 +808,6 @@ const ResourceTree = struct {
try resources_list.append(allocator, reloc_resource); try resources_list.append(allocator, reloc_resource);
} }
} }
std.debug.assert(counting_writer.bytes_written == data_entries_start);
for (resources_list.items, 0..) |reloc_resource, i| { for (resources_list.items, 0..) |reloc_resource, i| {
// TODO: This logic works but is convoluted, would be good to clean this up // TODO: This logic works but is convoluted, would be good to clean this up
@ -827,7 +821,6 @@ const ResourceTree = struct {
}; };
try w.writeStructEndian(data_entry, .little); try w.writeStructEndian(data_entry, .little);
} }
std.debug.assert(counting_writer.bytes_written == strings_start);
for (self.rsrc_string_table.keys()) |v| { for (self.rsrc_string_table.keys()) |v| {
const str = v.name; const str = v.name;

View File

@ -432,10 +432,6 @@ pub const FixedBufferStream = @import("Io/fixed_buffer_stream.zig").FixedBufferS
/// Deprecated in favor of `Reader`. /// Deprecated in favor of `Reader`.
pub const fixedBufferStream = @import("Io/fixed_buffer_stream.zig").fixedBufferStream; pub const fixedBufferStream = @import("Io/fixed_buffer_stream.zig").fixedBufferStream;
/// Deprecated with no replacement; inefficient pattern /// Deprecated with no replacement; inefficient pattern
pub const CountingWriter = @import("Io/counting_writer.zig").CountingWriter;
/// Deprecated with no replacement; inefficient pattern
pub const countingWriter = @import("Io/counting_writer.zig").countingWriter;
/// Deprecated with no replacement; inefficient pattern
pub const CountingReader = @import("Io/counting_reader.zig").CountingReader; pub const CountingReader = @import("Io/counting_reader.zig").CountingReader;
/// Deprecated with no replacement; inefficient pattern /// Deprecated with no replacement; inefficient pattern
pub const countingReader = @import("Io/counting_reader.zig").countingReader; pub const countingReader = @import("Io/counting_reader.zig").countingReader;
@ -917,7 +913,6 @@ test {
_ = Reader; _ = Reader;
_ = Writer; _ = Writer;
_ = BufferedWriter; _ = BufferedWriter;
_ = CountingWriter;
_ = CountingReader; _ = CountingReader;
_ = FixedBufferStream; _ = FixedBufferStream;
_ = tty; _ = tty;

View File

@ -1,39 +0,0 @@
const std = @import("../std.zig");
const io = std.io;
const testing = std.testing;
/// A Writer that counts how many bytes has been written to it.
pub fn CountingWriter(comptime WriterType: type) type {
return struct {
bytes_written: u64,
child_stream: WriterType,
pub const Error = WriterType.Error;
pub const Writer = io.GenericWriter(*Self, Error, write);
const Self = @This();
pub fn write(self: *Self, bytes: []const u8) Error!usize {
const amt = try self.child_stream.write(bytes);
self.bytes_written += amt;
return amt;
}
pub fn writer(self: *Self) Writer {
return .{ .context = self };
}
};
}
pub fn countingWriter(child_stream: anytype) CountingWriter(@TypeOf(child_stream)) {
return .{ .bytes_written = 0, .child_stream = child_stream };
}
test CountingWriter {
var counting_stream = countingWriter(std.io.null_writer);
const stream = counting_stream.writer();
const bytes = "yay" ** 100;
stream.writeAll(bytes) catch unreachable;
try testing.expect(counting_stream.bytes_written == bytes.len);
}

View File

@ -5,6 +5,7 @@ const fmt = std.fmt;
const io = std.io; const io = std.io;
const mem = std.mem; const mem = std.mem;
const meta = std.meta; const meta = std.meta;
const Writer = std.Io.Writer;
const fields_delimiter = "$"; const fields_delimiter = "$";
const fields_delimiter_scalar = '$'; const fields_delimiter_scalar = '$';
@ -188,19 +189,20 @@ pub fn deserialize(comptime HashResult: type, str: []const u8) Error!HashResult
/// ///
/// `params` can also include any additional parameters. /// `params` can also include any additional parameters.
pub fn serialize(params: anytype, str: []u8) Error![]const u8 { pub fn serialize(params: anytype, str: []u8) Error![]const u8 {
var buf = io.fixedBufferStream(str); var w: Writer = .fixed(str);
try serializeTo(params, buf.writer()); serializeTo(params, &w) catch return error.NoSpaceLeft;
return buf.getWritten(); return w.buffered();
} }
/// Compute the number of bytes required to serialize `params` /// Compute the number of bytes required to serialize `params`
pub fn calcSize(params: anytype) usize { pub fn calcSize(params: anytype) usize {
var buf = io.countingWriter(io.null_writer); var trash: [128]u8 = undefined;
serializeTo(params, buf.writer()) catch unreachable; var d: Writer.Discarding = .init(&trash);
return @as(usize, @intCast(buf.bytes_written)); serializeTo(params, &d.writer) catch unreachable;
return @intCast(d.fullCount());
} }
fn serializeTo(params: anytype, out: anytype) !void { fn serializeTo(params: anytype, out: *std.Io.Writer) !void {
const HashResult = @TypeOf(params); const HashResult = @TypeOf(params);
if (@hasField(HashResult, version_param_name)) { if (@hasField(HashResult, version_param_name)) {

View File

@ -311,9 +311,10 @@ const crypt_format = struct {
/// Compute the number of bytes required to serialize `params` /// Compute the number of bytes required to serialize `params`
pub fn calcSize(params: anytype) usize { pub fn calcSize(params: anytype) usize {
var buf = io.countingWriter(io.null_writer); var trash: [128]u8 = undefined;
serializeTo(params, buf.writer()) catch unreachable; var d: std.Io.Writer.Discarding = .init(&trash);
return @as(usize, @intCast(buf.bytes_written)); serializeTo(params, &d) catch unreachable;
return @intCast(d.fullCount());
} }
fn serializeTo(params: anytype, out: anytype) !void { fn serializeTo(params: anytype, out: anytype) !void {

View File

@ -1204,11 +1204,10 @@ const TestEncode = struct {
mnemonic: Instruction.Mnemonic, mnemonic: Instruction.Mnemonic,
ops: []const Instruction.Operand, ops: []const Instruction.Operand,
) !void { ) !void {
var stream = std.io.fixedBufferStream(&enc.buffer); var writer: std.Io.Writer = .fixed(&enc.buffer);
var count_writer = std.io.countingWriter(stream.writer());
const inst: Instruction = try .new(.none, mnemonic, ops); const inst: Instruction = try .new(.none, mnemonic, ops);
try inst.encode(count_writer.writer(), .{}); try inst.encode(&writer, .{});
enc.index = count_writer.bytes_written; enc.index = writer.bufferedLen();
} }
fn code(enc: TestEncode) []const u8 { fn code(enc: TestEncode) []const u8 {

View File

@ -3152,10 +3152,11 @@ fn writeSyntheticSections(self: *Elf) !void {
if (self.section_indexes.gnu_hash) |shndx| { if (self.section_indexes.gnu_hash) |shndx| {
const shdr = slice.items(.shdr)[shndx]; const shdr = slice.items(.shdr)[shndx];
var buffer = try std.ArrayList(u8).initCapacity(gpa, self.gnu_hash.size()); var aw: std.Io.Writer.Allocating = .init(gpa);
defer buffer.deinit(); try aw.ensureUnusedCapacity(self.gnu_hash.size());
try self.gnu_hash.write(self, buffer.writer()); defer aw.deinit();
try self.pwriteAll(buffer.items, shdr.sh_offset); try self.gnu_hash.write(self, &aw.writer);
try self.pwriteAll(aw.getWritten(), shdr.sh_offset);
} }
if (self.section_indexes.versym) |shndx| { if (self.section_indexes.versym) |shndx| {

View File

@ -1237,17 +1237,14 @@ pub const GnuHashSection = struct {
return header_size + hash.num_bloom * 8 + hash.num_buckets * 4 + hash.num_exports * 4; return header_size + hash.num_bloom * 8 + hash.num_buckets * 4 + hash.num_exports * 4;
} }
pub fn write(hash: GnuHashSection, elf_file: *Elf, writer: anytype) !void { pub fn write(hash: GnuHashSection, elf_file: *Elf, writer: *std.Io.Writer) !void {
const exports = getExports(elf_file); const exports = getExports(elf_file);
const export_off = elf_file.dynsym.count() - hash.num_exports; const export_off = elf_file.dynsym.count() - hash.num_exports;
var counting = std.io.countingWriter(writer); try writer.writeInt(u32, hash.num_buckets, .little);
const cwriter = counting.writer(); try writer.writeInt(u32, export_off, .little);
try writer.writeInt(u32, hash.num_bloom, .little);
try cwriter.writeInt(u32, hash.num_buckets, .little); try writer.writeInt(u32, bloom_shift, .little);
try cwriter.writeInt(u32, export_off, .little);
try cwriter.writeInt(u32, hash.num_bloom, .little);
try cwriter.writeInt(u32, bloom_shift, .little);
const comp = elf_file.base.comp; const comp = elf_file.base.comp;
const gpa = comp.gpa; const gpa = comp.gpa;
@ -1271,7 +1268,7 @@ pub const GnuHashSection = struct {
bloom[idx] |= @as(u64, 1) << @as(u6, @intCast((h >> bloom_shift) % 64)); bloom[idx] |= @as(u64, 1) << @as(u6, @intCast((h >> bloom_shift) % 64));
} }
try cwriter.writeAll(mem.sliceAsBytes(bloom)); try writer.writeAll(mem.sliceAsBytes(bloom));
// Fill in the hash bucket indices // Fill in the hash bucket indices
const buckets = try gpa.alloc(u32, hash.num_buckets); const buckets = try gpa.alloc(u32, hash.num_buckets);
@ -1284,7 +1281,7 @@ pub const GnuHashSection = struct {
} }
} }
try cwriter.writeAll(mem.sliceAsBytes(buckets)); try writer.writeAll(mem.sliceAsBytes(buckets));
// Finally, write the hash table // Finally, write the hash table
const table = try gpa.alloc(u32, hash.num_exports); const table = try gpa.alloc(u32, hash.num_exports);
@ -1300,9 +1297,7 @@ pub const GnuHashSection = struct {
} }
} }
try cwriter.writeAll(mem.sliceAsBytes(table)); try writer.writeAll(mem.sliceAsBytes(table));
assert(counting.bytes_written == hash.size());
} }
pub fn hasher(name: [:0]const u8) u32 { pub fn hasher(name: [:0]const u8) u32 {

View File

@ -186,17 +186,18 @@ const FinalizeNodeResult = struct {
/// Updates offset of this node in the output byte stream. /// Updates offset of this node in the output byte stream.
fn finalizeNode(self: *Trie, node_index: Node.Index, offset_in_trie: u32) !FinalizeNodeResult { fn finalizeNode(self: *Trie, node_index: Node.Index, offset_in_trie: u32) !FinalizeNodeResult {
var stream = std.io.countingWriter(std.io.null_writer); var trash_buffer: [64]u8 = undefined;
const writer = stream.writer(); var stream: std.Io.Writer.Discarding = .init(&trash_buffer);
const writer = &stream.writer;
const slice = self.nodes.slice(); const slice = self.nodes.slice();
var node_size: u32 = 0; var node_size: u32 = 0;
if (slice.items(.is_terminal)[node_index]) { if (slice.items(.is_terminal)[node_index]) {
const export_flags = slice.items(.export_flags)[node_index]; const export_flags = slice.items(.export_flags)[node_index];
const vmaddr_offset = slice.items(.vmaddr_offset)[node_index]; const vmaddr_offset = slice.items(.vmaddr_offset)[node_index];
try leb.writeUleb128(writer, export_flags); try writer.writeUleb128(export_flags);
try leb.writeUleb128(writer, vmaddr_offset); try writer.writeUleb128(vmaddr_offset);
try leb.writeUleb128(writer, stream.bytes_written); try writer.writeUleb128(stream.fullCount());
} else { } else {
node_size += 1; // 0x0 for non-terminal nodes node_size += 1; // 0x0 for non-terminal nodes
} }
@ -206,13 +207,13 @@ fn finalizeNode(self: *Trie, node_index: Node.Index, offset_in_trie: u32) !Final
const edge = &self.edges.items[edge_index]; const edge = &self.edges.items[edge_index];
const next_node_offset = slice.items(.trie_offset)[edge.node]; const next_node_offset = slice.items(.trie_offset)[edge.node];
node_size += @intCast(edge.label.len + 1); node_size += @intCast(edge.label.len + 1);
try leb.writeUleb128(writer, next_node_offset); try writer.writeUleb128(next_node_offset);
} }
const trie_offset = slice.items(.trie_offset)[node_index]; const trie_offset = slice.items(.trie_offset)[node_index];
const updated = offset_in_trie != trie_offset; const updated = offset_in_trie != trie_offset;
slice.items(.trie_offset)[node_index] = offset_in_trie; slice.items(.trie_offset)[node_index] = offset_in_trie;
node_size += @intCast(stream.bytes_written); node_size += @intCast(stream.fullCount());
return .{ .node_size = node_size, .updated = updated }; return .{ .node_size = node_size, .updated = updated };
} }