Merge remote-tracking branch 'origin/master' into wrangle-writer-buffering

This commit is contained in:
Andrew Kelley 2025-07-15 20:52:56 -07:00
commit e9cb8a777b
8 changed files with 121 additions and 26 deletions

View File

@ -50,24 +50,6 @@ jobs:
uses: actions/checkout@v4
- name: Build and Test
run: sh ci/aarch64-linux-release.sh
riscv64-linux-debug:
if: ${{ github.event_name == 'push' }}
timeout-minutes: 1020
runs-on: [self-hosted, Linux, riscv64]
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Build and Test
run: sh ci/riscv64-linux-debug.sh
riscv64-linux-release:
if: ${{ github.event_name == 'push' }}
timeout-minutes: 900
runs-on: [self-hosted, Linux, riscv64]
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Build and Test
run: sh ci/riscv64-linux-release.sh
x86_64-macos-release:
runs-on: "macos-13"
env:

22
.github/workflows/riscv.yaml vendored Normal file
View File

@ -0,0 +1,22 @@
name: riscv
on:
workflow_dispatch:
permissions:
contents: read
jobs:
riscv64-linux-debug:
timeout-minutes: 1020
runs-on: [self-hosted, Linux, riscv64]
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Build and Test
run: sh ci/riscv64-linux-debug.sh
riscv64-linux-release:
timeout-minutes: 900
runs-on: [self-hosted, Linux, riscv64]
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Build and Test
run: sh ci/riscv64-linux-release.sh

View File

@ -1303,6 +1303,13 @@ fn takeMultipleOf7Leb128(r: *Reader, comptime Result: type) TakeLeb128Error!Resu
}
/// Left-aligns data such that `r.seek` becomes zero.
///
/// If `r.seek` is not already zero then `buffer` is mutated, making it illegal
/// to call this function with a const-casted `buffer`, such as in the case of
/// `fixed`. This issue can be avoided:
/// * in implementations, by attempting a read before a rebase, in which
/// case the read will return `error.EndOfStream`, preventing the rebase.
/// * in usage, by copying into a mutable buffer before initializing `fixed`.
pub fn rebase(r: *Reader) void {
if (r.seek == 0) return;
const data = r.buffer[r.seek..r.end];
@ -1315,6 +1322,13 @@ pub fn rebase(r: *Reader) void {
/// if necessary.
///
/// Asserts `capacity` is within the buffer capacity.
///
/// If the rebase occurs then `buffer` is mutated, making it illegal to call
/// this function with a const-casted `buffer`, such as in the case of `fixed`.
/// This issue can be avoided:
/// * in implementations, by attempting a read before a rebase, in which
/// case the read will return `error.EndOfStream`, preventing the rebase.
/// * in usage, by copying into a mutable buffer before initializing `fixed`.
pub fn rebaseCapacity(r: *Reader, capacity: usize) void {
if (r.end > r.buffer.len - capacity) rebase(r);
}

View File

@ -2194,8 +2194,10 @@ pub const Discarding = struct {
const d: *Discarding = @alignCast(@fieldParentPtr("writer", w));
d.count += w.end;
w.end = 0;
if (limit == .nothing) return 0;
if (file_reader.getSize()) |size| {
const n = limit.minInt64(size - file_reader.pos);
if (n == 0) return error.EndOfStream;
file_reader.seekBy(@intCast(n)) catch return error.Unimplemented;
w.end = 0;
d.count += n;
@ -2489,18 +2491,17 @@ pub const Allocating = struct {
fn sendFile(w: *Writer, file_reader: *File.Reader, limit: std.io.Limit) FileError!usize {
if (File.Handle == void) return error.Unimplemented;
if (limit == .nothing) return 0;
const a: *Allocating = @fieldParentPtr("writer", w);
const gpa = a.allocator;
var list = a.toArrayList();
defer setArrayList(a, list);
const pos = file_reader.pos;
const additional = if (file_reader.getSize()) |size| size - pos else |_| std.atomic.cache_line;
if (additional == 0) return error.EndOfStream;
list.ensureUnusedCapacity(gpa, limit.minInt64(additional)) catch return error.WriteFailed;
const dest = limit.slice(list.unusedCapacitySlice());
const n = file_reader.read(dest) catch |err| switch (err) {
error.ReadFailed => return error.ReadFailed,
error.EndOfStream => 0,
};
const n = try file_reader.read(dest);
list.items.len += n;
return n;
}
@ -2522,3 +2523,43 @@ pub const Allocating = struct {
try testing.expectEqualSlices(u8, "x: 42\ny: 1234\n", a.getWritten());
}
};
test "discarding sendFile" {
var tmp_dir = testing.tmpDir(.{});
defer tmp_dir.cleanup();
const file = try tmp_dir.dir.createFile("input.txt", .{ .read = true });
defer file.close();
var r_buffer: [256]u8 = undefined;
var file_writer: std.fs.File.Writer = .init(file, &r_buffer);
try file_writer.interface.writeByte('h');
try file_writer.interface.flush();
var file_reader = file_writer.moveToReader();
try file_reader.seekTo(0);
var w_buffer: [256]u8 = undefined;
var discarding: std.io.Writer.Discarding = .init(&w_buffer);
_ = try file_reader.interface.streamRemaining(&discarding.writer);
}
test "allocating sendFile" {
var tmp_dir = testing.tmpDir(.{});
defer tmp_dir.cleanup();
const file = try tmp_dir.dir.createFile("input.txt", .{ .read = true });
defer file.close();
var r_buffer: [256]u8 = undefined;
var file_writer: std.fs.File.Writer = .init(file, &r_buffer);
try file_writer.interface.writeByte('h');
try file_writer.interface.flush();
var file_reader = file_writer.moveToReader();
try file_reader.seekTo(0);
var allocating: std.io.Writer.Allocating = .init(std.testing.allocator);
defer allocating.deinit();
_ = try file_reader.interface.streamRemaining(&allocating.writer);
}

View File

@ -1690,6 +1690,40 @@ pub fn getpeername(s: ws2_32.SOCKET, name: *ws2_32.sockaddr, namelen: *ws2_32.so
return ws2_32.getpeername(s, name, @as(*i32, @ptrCast(namelen)));
}
pub fn sendmsg(
s: ws2_32.SOCKET,
msg: *ws2_32.WSAMSG_const,
flags: u32,
) i32 {
var bytes_send: DWORD = undefined;
if (ws2_32.WSASendMsg(s, msg, flags, &bytes_send, null, null) == ws2_32.SOCKET_ERROR) {
return ws2_32.SOCKET_ERROR;
} else {
return @as(i32, @as(u31, @intCast(bytes_send)));
}
}
pub fn sendto(s: ws2_32.SOCKET, buf: [*]const u8, len: usize, flags: u32, to: ?*const ws2_32.sockaddr, to_len: ws2_32.socklen_t) i32 {
var buffer = ws2_32.WSABUF{ .len = @as(u31, @truncate(len)), .buf = @constCast(buf) };
var bytes_send: DWORD = undefined;
if (ws2_32.WSASendTo(s, @as([*]ws2_32.WSABUF, @ptrCast(&buffer)), 1, &bytes_send, flags, to, @as(i32, @intCast(to_len)), null, null) == ws2_32.SOCKET_ERROR) {
return ws2_32.SOCKET_ERROR;
} else {
return @as(i32, @as(u31, @intCast(bytes_send)));
}
}
pub fn recvfrom(s: ws2_32.SOCKET, buf: [*]u8, len: usize, flags: u32, from: ?*ws2_32.sockaddr, from_len: ?*ws2_32.socklen_t) i32 {
var buffer = ws2_32.WSABUF{ .len = @as(u31, @truncate(len)), .buf = buf };
var bytes_received: DWORD = undefined;
var flags_inout = flags;
if (ws2_32.WSARecvFrom(s, @as([*]ws2_32.WSABUF, @ptrCast(&buffer)), 1, &bytes_received, &flags_inout, from, @as(?*i32, @ptrCast(from_len)), null, null) == ws2_32.SOCKET_ERROR) {
return ws2_32.SOCKET_ERROR;
} else {
return @as(i32, @as(u31, @intCast(bytes_received)));
}
}
pub fn poll(fds: [*]ws2_32.pollfd, n: c_ulong, timeout: i32) i32 {
return ws2_32.WSAPoll(fds, n, timeout);
}

View File

@ -31,6 +31,7 @@ pub fn build(b: *std.Build) void {
const tools_target = b.resolveTargetQuery(.{});
for ([_][]const u8{
// Alphabetically sorted. No need to build `tools/spirv/grammar.zig`.
"../../tools/dump-cov.zig",
"../../tools/fetch_them_macos_headers.zig",
"../../tools/gen_macos_headers_c.zig",
"../../tools/gen_outline_atomics.zig",

View File

@ -1,5 +1,6 @@
.{
.name = "standalone_test_cases",
.name = .standalone_test_cases,
.fingerprint = 0xc0dbdf9c818957be,
.version = "0.0.0",
.dependencies = .{
.simple = .{

View File

@ -33,7 +33,7 @@ pub fn main() !void {
defer coverage.deinit(gpa);
var debug_info = std.debug.Info.load(gpa, exe_path, &coverage) catch |err| {
fatal("failed to load debug info for {}: {s}", .{ exe_path, @errorName(err) });
fatal("failed to load debug info for {f}: {s}", .{ exe_path, @errorName(err) });
};
defer debug_info.deinit(gpa);
@ -42,10 +42,10 @@ pub fn main() !void {
cov_path.sub_path,
1 << 30,
null,
@alignOf(SeenPcsHeader),
.of(SeenPcsHeader),
null,
) catch |err| {
fatal("failed to load coverage file {}: {s}", .{ cov_path, @errorName(err) });
fatal("failed to load coverage file {f}: {s}", .{ cov_path, @errorName(err) });
};
var stdout_buffer: [4000]u8 = undefined;