more of the compiler updated to new Writer API

This commit is contained in:
Andrew Kelley 2025-02-17 21:43:52 -08:00
parent 09af68de80
commit bf00eb3006
6 changed files with 28 additions and 68 deletions

View File

@ -125,9 +125,10 @@ pub const Options = struct {
/// Verify that the server certificate is authorized by a given ca bundle.
bundle: Certificate.Bundle,
},
/// If non-null, ssl secrets are logged to this stream. Creating such a log file allows
/// If non-null, ssl secrets are logged to this file. Creating such a log file allows
/// other programs with access to that file to decrypt all traffic over this connection.
ssl_key_log_file: ?*std.io.BufferedWriter = null,
/// TODO `std.crypto` should have no dependencies on `std.fs`.
ssl_key_log_file: ?std.fs.File = null,
};
pub fn InitError(comptime Stream: type) type {

View File

@ -123,59 +123,17 @@ pub fn GenericReader(
return @errorCast(self.any().readAllAlloc(allocator, max_size));
}
pub inline fn readUntilDelimiterAlloc(
self: Self,
allocator: Allocator,
delimiter: u8,
max_size: usize,
) (NoEofError || Allocator.Error || error{StreamTooLong})![]u8 {
return @errorCast(self.any().readUntilDelimiterAlloc(
allocator,
delimiter,
max_size,
));
}
pub inline fn readUntilDelimiter(
self: Self,
buf: []u8,
delimiter: u8,
) (NoEofError || error{StreamTooLong})![]u8 {
return @errorCast(self.any().readUntilDelimiter(buf, delimiter));
}
pub inline fn readUntilDelimiterOrEofAlloc(
self: Self,
allocator: Allocator,
delimiter: u8,
max_size: usize,
) (Error || Allocator.Error || error{StreamTooLong})!?[]u8 {
return @errorCast(self.any().readUntilDelimiterOrEofAlloc(
allocator,
delimiter,
max_size,
));
}
pub inline fn readUntilDelimiterOrEof(
self: Self,
buf: []u8,
delimiter: u8,
) (Error || error{StreamTooLong})!?[]u8 {
return @errorCast(self.any().readUntilDelimiterOrEof(buf, delimiter));
}
pub inline fn streamUntilDelimiter(
self: Self,
writer: anytype,
writer: *std.io.BufferedWriter,
delimiter: u8,
optional_max_size: ?usize,
) (NoEofError || error{StreamTooLong} || @TypeOf(writer).Error)!void {
return @errorCast(self.any().streamUntilDelimiter(
) anyerror!void {
return self.any().streamUntilDelimiter(
writer,
delimiter,
optional_max_size,
));
);
}
pub inline fn skipUntilDelimiterOrEof(self: Self, delimiter: u8) Error!void {

View File

@ -663,7 +663,7 @@ pub fn printValue(
}
},
.error_set => {
if (actual_fmt.len > 0 and actual_fmt.len[0] == 's') {
if (actual_fmt.len > 0 and actual_fmt[0] == 's') {
return bw.writeAll(@errorName(value));
} else if (actual_fmt.len != 0) {
invalidFmtError(fmt, value);
@ -1147,13 +1147,11 @@ pub fn printByteSize(
const magnitude = switch (units) {
.decimal => @min(log2 / comptime std.math.log2(1000), mags_si.len - 1),
.binary => @min(log2 / 10, mags_iec.len - 1),
else => unreachable,
};
const new_value = std.math.lossyCast(f64, value) / std.math.pow(f64, std.math.lossyCast(f64, base), std.math.lossyCast(f64, magnitude));
const suffix = switch (units) {
.decimal => mags_si[magnitude],
.binary => mags_iec[magnitude],
else => unreachable,
};
const s = switch (magnitude) {
@ -1176,10 +1174,9 @@ pub fn printByteSize(
buf[i..][0..3].* = [_]u8{ suffix, 'i', 'B' };
i += 3;
},
else => unreachable,
}
return alignBufferOptions(buf[0..i], options, bw);
return alignBufferOptions(bw, buf[0..i], options);
}
// This ANY const is a workaround for: https://github.com/ziglang/zig/issues/7948

View File

@ -1880,9 +1880,8 @@ pub fn create(gpa: Allocator, arena: Allocator, options: CreateOptions) !*Compil
if (options.verbose_llvm_cpu_features) {
if (options.root_mod.resolved_target.llvm_cpu_features) |cf| print: {
std.debug.lockStdErr();
var stderr = std.debug.lockStdErr2();
defer std.debug.unlockStdErr();
const stderr = std.io.getStdErr().writer();
nosuspend {
stderr.print("compilation: {s}\n", .{options.root_name}) catch break :print;
stderr.print(" target: {s}\n", .{try target.zigTriple(arena)}) catch break :print;

View File

@ -184,7 +184,7 @@ pub const JobQueue = struct {
const hash_slice = hash.toSlice();
try buf.writer().print(
try buf.print(
\\ pub const {} = struct {{
\\
, .{std.zig.fmtId(hash_slice)});
@ -210,13 +210,13 @@ pub const JobQueue = struct {
}
}
try buf.writer().print(
try buf.print(
\\ pub const build_root = "{q}";
\\
, .{fetch.package_root});
if (fetch.has_build_zig) {
try buf.writer().print(
try buf.print(
\\ pub const build_zig = @import("{}");
\\
, .{std.zig.fmtEscapes(hash_slice)});
@ -229,7 +229,7 @@ pub const JobQueue = struct {
);
for (manifest.dependencies.keys(), manifest.dependencies.values()) |name, dep| {
const h = depDigest(fetch.package_root, jq.global_cache, dep) orelse continue;
try buf.writer().print(
try buf.print(
" .{{ \"{}\", \"{}\" }},\n",
.{ std.zig.fmtEscapes(name), std.zig.fmtEscapes(h.toSlice()) },
);
@ -261,7 +261,7 @@ pub const JobQueue = struct {
for (root_manifest.dependencies.keys(), root_manifest.dependencies.values()) |name, dep| {
const h = depDigest(root_fetch.package_root, jq.global_cache, dep) orelse continue;
try buf.writer().print(
try buf.print(
" .{{ \"{}\", \"{}\" }},\n",
.{ std.zig.fmtEscapes(name), std.zig.fmtEscapes(h.toSlice()) },
);
@ -1366,8 +1366,12 @@ fn unpackGitPack(f: *Fetch, out_dir: fs.Dir, resource: *Resource.Git) anyerror!U
{
const index_prog_node = f.prog_node.start("Index pack", 0);
defer index_prog_node.end();
var index_buffered_writer = std.io.bufferedWriter(index_file.writer());
try git.indexPack(gpa, object_format, pack_file, index_buffered_writer.writer());
var buffer: [4096]u8 = undefined;
var index_buffered_writer: std.io.BufferedWriter = .{
.unbuffered_writer = index_file.writer(),
.buffer = &buffer,
};
try git.indexPack(gpa, object_format, pack_file, &index_buffered_writer);
try index_buffered_writer.flush();
try index_file.sync();
}
@ -1638,12 +1642,13 @@ fn computeHash(f: *Fetch, pkg_path: Cache.Path, filter: Filter) RunError!Compute
}
fn dumpHashInfo(all_files: []const *const HashedFile) !void {
const stdout = std.io.getStdOut();
var bw = std.io.bufferedWriter(stdout.writer());
const w = bw.writer();
var buffer: [4096]u8 = undefined;
var bw: std.io.BufferedWriter = .{
.unbuffered_writer = std.io.getStdOut().writer(),
.buffer = &buffer,
};
for (all_files) |hashed_file| {
try w.print("{s}: {x}: {s}\n", .{
try bw.print("{s}: {x}: {s}\n", .{
@tagName(hashed_file.kind), &hashed_file.hash, hashed_file.normalized_path,
});
}

View File

@ -26,7 +26,7 @@ pub fn cmdEnv(arena: Allocator, args: []const []const u8) !void {
.buffer = &buffer,
.unbuffered_writer = std.io.getStdOut().writer(),
};
var jws = std.json.writeStream(bw, .{ .whitespace = .indent_1 });
var jws: std.json.Stringify = .{ .writer = &bw, .options = .{ .whitespace = .indent_1 } };
try jws.beginObject();