diff --git a/lib/std/io.zig b/lib/std/io.zig index b5ffd06de5..3f214062e4 100644 --- a/lib/std/io.zig +++ b/lib/std/io.zig @@ -16,9 +16,6 @@ pub const BufferedReader = @import("io/BufferedReader.zig"); pub const BufferedWriter = @import("io/BufferedWriter.zig"); pub const AllocatingWriter = @import("io/AllocatingWriter.zig"); -pub const MultiWriter = @import("io/multi_writer.zig").MultiWriter; -pub const multiWriter = @import("io/multi_writer.zig").multiWriter; - pub const ChangeDetectionStream = @import("io/change_detection_stream.zig").ChangeDetectionStream; pub const changeDetectionStream = @import("io/change_detection_stream.zig").changeDetectionStream; diff --git a/lib/std/io/multi_writer.zig b/lib/std/io/multi_writer.zig deleted file mode 100644 index 385ac98d76..0000000000 --- a/lib/std/io/multi_writer.zig +++ /dev/null @@ -1,53 +0,0 @@ -const std = @import("../std.zig"); -const io = std.io; - -/// Takes a tuple of streams, and constructs a new stream that writes to all of them -pub fn MultiWriter(comptime Writers: type) type { - comptime var ErrSet = error{}; - inline for (@typeInfo(Writers).@"struct".fields) |field| { - const StreamType = field.type; - ErrSet = ErrSet || if (@hasDecl(StreamType, "Error")) StreamType.Error else anyerror; - } - - return struct { - const Self = @This(); - - streams: Writers, - - pub const Error = ErrSet; - pub const Writer = io.Writer(*Self, Error, write); - - pub fn writer(self: *Self) Writer { - return .{ .context = self }; - } - - pub fn write(self: *Self, bytes: []const u8) Error!usize { - inline for (self.streams) |stream| - try stream.writeAll(bytes); - return bytes.len; - } - }; -} - -pub fn multiWriter(streams: anytype) MultiWriter(@TypeOf(streams)) { - return .{ .streams = streams }; -} - -const testing = std.testing; - -test "MultiWriter" { - var tmp = testing.tmpDir(.{}); - defer tmp.cleanup(); - var f = try tmp.dir.createFile("t.txt", .{}); - - var buf1: [255]u8 = undefined; - var fbs1 = io.fixedBufferStream(&buf1); - var buf2: [255]u8 = undefined; - var stream = multiWriter(.{ fbs1.writer(), f.writer() }); - - try stream.writer().print("HI", .{}); - f.close(); - - try testing.expectEqualSlices(u8, "HI", fbs1.getWritten()); - try testing.expectEqualSlices(u8, "HI", try tmp.dir.readFile("t.txt", &buf2)); -} diff --git a/src/main.zig b/src/main.zig index 7c4a3c1a63..561935035f 100644 --- a/src/main.zig +++ b/src/main.zig @@ -3331,12 +3331,18 @@ fn buildOutputType( // We are providing our own cache key, because this file has nothing // to do with the cache manifest. var hasher = Cache.Hasher.init("0123456789abcdef"); - var w = io.multiWriter(.{ f.writer(), hasher.writer() }); - var fifo = std.fifo.LinearFifo(u8, .{ .Static = 4096 }).init(); - try fifo.pump(fs.File.stdin().reader().unbuffered(), w.writer().unbuffered()); + var file_writer = f.writer(); + var file_writer_bw = file_writer.interface().unbuffered(); + var hasher_writer = hasher.writer(&file_writer_bw); + var buffer: [1000]u8 = undefined; + var bw = hasher_writer.interface().buffered(&buffer); + bw.writeFileAll(.stdin(), .{}) catch |err| switch (err) { + error.WriteFailed => fatal("failed to write {s}: {s}", .{ dump_path, file_writer.err.? }), + else => fatal("failed to pipe stdin to {s}: {s}", .{ dump_path, err }), + }; + try bw.flush(); - var bin_digest: Cache.BinDigest = undefined; - hasher.final(&bin_digest); + const bin_digest: Cache.BinDigest = hasher_writer.final(); const sub_path = try std.fmt.allocPrint(arena, "tmp" ++ sep ++ "{x}-stdin{s}", .{ &bin_digest, ext.canonicalName(target),