This commit is contained in:
Andrew Kelley 2025-07-16 18:57:55 -07:00
parent 51ebebcea3
commit fbf8b16ae6
7 changed files with 63 additions and 63 deletions

View File

@ -530,18 +530,16 @@ test isUnderscore {
try std.testing.expect(!isUnderscore("\\x5f"));
}
pub fn readSourceFileToEndAlloc(gpa: Allocator, input: std.fs.File, size_hint: usize) ![:0]u8 {
pub fn readSourceFileToEndAlloc(gpa: Allocator, file_reader: *std.fs.File.Reader) ![:0]u8 {
var buffer: std.ArrayListAlignedUnmanaged(u8, .@"2") = .empty;
defer buffer.deinit(gpa);
try buffer.ensureUnusedCapacity(gpa, size_hint);
if (file_reader.getSize()) |size| {
const casted_size = std.math.cast(u32, size) orelse return error.StreamTooLong;
try buffer.ensureTotalCapacityPrecise(gpa, casted_size);
} else |_| {}
input.readIntoArrayList(gpa, .limited(max_src_size), .@"2", &buffer) catch |err| switch (err) {
error.ConnectionResetByPeer => unreachable,
error.ConnectionTimedOut => unreachable,
error.NotOpenForReading => unreachable,
else => |e| return e,
};
try file_reader.interface.appendRemaining(gpa, .@"2", &buffer, .limited(max_src_size));
// Detect unsupported file types with their Byte Order Mark
const unsupported_boms = [_][]const u8{

View File

@ -207,7 +207,7 @@ pub fn renderAlloc(tree: Ast, gpa: Allocator) error{OutOfMemory}![]u8 {
var aw: std.io.Writer.Allocating = .init(gpa);
defer aw.deinit();
render(tree, gpa, &aw.writer, .{}) catch |err| switch (err) {
error.WriteFailed => return error.OutOfMemory,
error.WriteFailed, error.OutOfMemory => return error.OutOfMemory,
};
return aw.toOwnedSlice();
}
@ -215,7 +215,7 @@ pub fn renderAlloc(tree: Ast, gpa: Allocator) error{OutOfMemory}![]u8 {
pub const Render = @import("Ast/Render.zig");
pub fn render(tree: Ast, gpa: Allocator, w: *Writer, fixups: Render.Fixups) Render.Error!void {
return Render.tree(gpa, w, tree, fixups);
return Render.renderTree(gpa, w, tree, fixups);
}
/// Returns an extra offset for column and byte offset of errors that

View File

@ -2399,24 +2399,24 @@ fn renderAsmLegacy(
try renderToken(r, first_clobber - 2, .none);
try renderToken(r, first_clobber - 1, .space);
try ais.writer().writeAll(".{ ");
try ais.writeAll(".{ ");
var tok_i = first_clobber;
while (true) : (tok_i += 1) {
try ais.writer().writeByte('.');
try ais.writeByte('.');
_ = try writeStringLiteralAsIdentifier(r, tok_i);
try ais.writer().writeAll(" = true");
try ais.writeAll(" = true");
tok_i += 1;
switch (tree.tokenTag(tok_i)) {
.r_paren => {
try ais.writer().writeAll(" }");
try ais.writeAll(" }");
ais.popIndent();
return renderToken(r, tok_i, space);
},
.comma => {
if (tree.tokenTag(tok_i + 1) == .r_paren) {
try ais.writer().writeAll(" }");
try ais.writeAll(" }");
ais.popIndent();
return renderToken(r, tok_i + 1, space);
} else {
@ -2511,16 +2511,16 @@ fn renderAsmLegacy(
};
try renderToken(r, colon3, .space); // :
try ais.writer().writeAll(".{ ");
try ais.writeAll(".{ ");
const first_clobber = asm_node.first_clobber.?;
var tok_i = first_clobber;
while (true) {
switch (tree.tokenTag(tok_i + 1)) {
.r_paren => {
ais.setIndentDelta(indent_delta);
try ais.writer().writeByte('.');
try ais.writeByte('.');
const lexeme_len = try writeStringLiteralAsIdentifier(r, tok_i);
try ais.writer().writeAll(" = true }");
try ais.writeAll(" = true }");
try renderSpace(r, tok_i, lexeme_len, .newline);
ais.popIndent();
return renderToken(r, tok_i + 1, space);
@ -2529,17 +2529,17 @@ fn renderAsmLegacy(
switch (tree.tokenTag(tok_i + 2)) {
.r_paren => {
ais.setIndentDelta(indent_delta);
try ais.writer().writeByte('.');
try ais.writeByte('.');
const lexeme_len = try writeStringLiteralAsIdentifier(r, tok_i);
try ais.writer().writeAll(" = true }");
try ais.writeAll(" = true }");
try renderSpace(r, tok_i, lexeme_len, .newline);
ais.popIndent();
return renderToken(r, tok_i + 2, space);
},
else => {
try ais.writer().writeByte('.');
try ais.writeByte('.');
_ = try writeStringLiteralAsIdentifier(r, tok_i);
try ais.writer().writeAll(" = true");
try ais.writeAll(" = true");
try renderToken(r, tok_i + 1, .space);
tok_i += 2;
},
@ -2827,7 +2827,7 @@ fn renderSpace(r: *Render, token_index: Ast.TokenIndex, lexeme_len: usize, space
if (space == .skip) return;
if (space == .comma and next_token_tag != .comma) {
try ais.underlying_writer.writeByte(',');
try ais.writeByte(',');
}
if (space == .semicolon or space == .comma) ais.enableSpaceMode(space);
defer ais.disableSpaceMode();
@ -3242,11 +3242,11 @@ fn writeStringLiteralAsIdentifier(r: *Render, token_index: Ast.TokenIndex) !usiz
const lexeme = tokenSliceForRender(tree, token_index);
const unquoted = lexeme[1..][0 .. lexeme.len - 2];
if (std.zig.isValidId(unquoted)) {
try ais.writer().writeAll(unquoted);
try ais.writeAll(unquoted);
return unquoted.len;
} else {
try ais.writer().writeByte('@');
try ais.writer().writeAll(lexeme);
try ais.writeByte('@');
try ais.writeAll(lexeme);
return lexeme.len + 1;
}
}

View File

@ -1,4 +1,19 @@
in: *std.io.Reader,
const Server = @This();
const builtin = @import("builtin");
const std = @import("std");
const Allocator = std.mem.Allocator;
const assert = std.debug.assert;
const native_endian = builtin.target.cpu.arch.endian();
const need_bswap = native_endian != .little;
const Cache = std.Build.Cache;
const OutMessage = std.zig.Server.Message;
const InMessage = std.zig.Client.Message;
const Reader = std.Io.Reader;
const Writer = std.Io.Writer;
in: *Reader,
out: *Writer,
pub const Message = struct {
@ -93,7 +108,7 @@ pub const Message = struct {
};
pub const Options = struct {
in: *std.io.Reader,
in: *Reader,
out: *Writer,
zig_version: []const u8,
};
@ -108,7 +123,7 @@ pub fn init(options: Options) !Server {
}
pub fn receiveMessage(s: *Server) !InMessage.Header {
return try s.in.takeStructEndian(InMessage.Header, .little);
return s.in.takeStruct(InMessage.Header, .little);
}
pub fn receiveBody_u32(s: *Server) !u32 {
@ -203,16 +218,3 @@ pub fn serveTestMetadata(s: *Server, test_metadata: TestMetadata) !void {
try s.out.writeAll(test_metadata.string_bytes);
try s.out.flush();
}
const OutMessage = std.zig.Server.Message;
const InMessage = std.zig.Client.Message;
const Server = @This();
const builtin = @import("builtin");
const std = @import("std");
const Allocator = std.mem.Allocator;
const assert = std.debug.assert;
const native_endian = builtin.target.cpu.arch.endian();
const need_bswap = native_endian != .little;
const Cache = std.Build.Cache;
const Writer = std.io.Writer;

View File

@ -1,6 +1,11 @@
const BitcodeReader = @This();
const std = @import("../../std.zig");
const assert = std.debug.assert;
allocator: std.mem.Allocator,
record_arena: std.heap.ArenaAllocator.State,
reader: *std.io.Reader,
reader: *std.Io.Reader,
keep_names: bool,
bit_buffer: u32,
bit_offset: u5,
@ -93,7 +98,7 @@ pub const Record = struct {
};
pub const InitOptions = struct {
reader: *std.io.Reader,
reader: *std.Io.Reader,
keep_names: bool = false,
};
pub fn init(allocator: std.mem.Allocator, options: InitOptions) BitcodeReader {
@ -376,12 +381,12 @@ fn read32Bits(bc: *BitcodeReader) !u32 {
fn readBytes(bc: *BitcodeReader, bytes: []u8) !void {
assert(bc.bit_offset == 0);
try bc.reader.read(bytes);
try bc.reader.readSliceAll(bytes);
const trailing_bytes = bytes.len % 4;
if (trailing_bytes > 0) {
var bit_buffer: [4]u8 = @splat(0);
try bc.reader.read(bit_buffer[trailing_bytes..]);
try bc.reader.readSliceAll(bit_buffer[trailing_bytes..]);
bc.bit_buffer = std.mem.readInt(u32, &bit_buffer, .little);
bc.bit_offset = @intCast(8 * trailing_bytes);
}
@ -508,8 +513,3 @@ const Abbrev = struct {
}
};
};
const assert = std.debug.assert;
const std = @import("../../std.zig");
const BitcodeReader = @This();

View File

@ -6367,31 +6367,32 @@ test "ampersand" {
var fixed_buffer_mem: [100 * 1024]u8 = undefined;
fn testParse(source: [:0]const u8, allocator: mem.Allocator, anything_changed: *bool) ![]u8 {
const stderr: std.fs.File = .stderr();
const stderr_writer = stderr.writer().unbuffered();
var buffer: [64]u8 = undefined;
const stderr = std.debug.lockStderrWriter(&buffer);
defer std.debug.unlockStderrWriter();
var tree = try std.zig.Ast.parse(allocator, source, .zig);
defer tree.deinit(allocator);
for (tree.errors) |parse_error| {
const loc = tree.tokenLocation(0, parse_error.token);
try stderr_writer.print("(memory buffer):{d}:{d}: error: ", .{ loc.line + 1, loc.column + 1 });
try tree.renderError(parse_error, stderr_writer);
try stderr_writer.print("\n{s}\n", .{source[loc.line_start..loc.line_end]});
try stderr.print("(memory buffer):{d}:{d}: error: ", .{ loc.line + 1, loc.column + 1 });
try tree.renderError(parse_error, stderr);
try stderr.print("\n{s}\n", .{source[loc.line_start..loc.line_end]});
{
var i: usize = 0;
while (i < loc.column) : (i += 1) {
try stderr_writer.writeAll(" ");
try stderr.writeAll(" ");
}
try stderr_writer.writeAll("^");
try stderr.writeAll("^");
}
try stderr_writer.writeAll("\n");
try stderr.writeAll("\n");
}
if (tree.errors.len != 0) {
return error.ParseError;
}
const formatted = try tree.render(allocator);
const formatted = try tree.renderAlloc(allocator);
anything_changed.* = !mem.eql(u8, formatted, source);
return formatted;
}

View File

@ -1,7 +1,6 @@
const std = @import("std");
const builtin = @import("builtin");
const mem = std.mem;
const io = std.io;
const fs = std.fs;
const fmt = std.fmt;
const testing = std.testing;
@ -344,7 +343,7 @@ fn testParser(
expected_model: *const Target.Cpu.Model,
input: []const u8,
) !void {
var r: std.io.Reader = .fixed(input);
var r: std.Io.Reader = .fixed(input);
const result = try parser.parse(arch, &r);
try testing.expectEqual(expected_model, result.?.model);
try testing.expect(expected_model.features.eql(result.?.features));
@ -357,7 +356,7 @@ fn testParser(
// When all the lines have been analyzed the finalize method is called.
fn CpuinfoParser(comptime impl: anytype) type {
return struct {
fn parse(arch: Target.Cpu.Arch, reader: *std.io.Reader) !?Target.Cpu {
fn parse(arch: Target.Cpu.Arch, reader: *std.Io.Reader) !?Target.Cpu {
var obj: impl = .{};
while (reader.takeDelimiterExclusive('\n')) |line| {
const colon_pos = mem.indexOfScalar(u8, line, ':') orelse continue;