much of the compiler upgraded to new API

This commit is contained in:
Andrew Kelley 2025-02-17 15:51:48 -08:00
parent f3be721785
commit bb1dffcf32
26 changed files with 847 additions and 877 deletions

View File

@ -125,9 +125,9 @@ pub const Options = struct {
/// Verify that the server certificate is authorized by a given ca bundle.
bundle: Certificate.Bundle,
},
/// If non-null, ssl secrets are logged to this file. Creating such a log file allows
/// If non-null, ssl secrets are logged to this stream. Creating such a log file allows
/// other programs with access to that file to decrypt all traffic over this connection.
ssl_key_log_file: ?std.fs.File = null,
ssl_key_log_file: ?*std.io.BufferedWriter = null,
};
pub fn InitError(comptime Stream: type) type {

View File

@ -282,8 +282,6 @@ pub const Reader = GenericReader;
pub const Writer = @import("io/Writer.zig");
pub const AnyReader = @import("io/Reader.zig");
/// Deprecated; to be removed after 0.14.0 is tagged.
pub const AnyWriter = Writer;
pub const SeekableStream = @import("io/seekable_stream.zig").SeekableStream;

View File

@ -663,9 +663,14 @@ pub fn printValue(
}
},
.error_set => {
if (actual_fmt.len != 0) invalidFmtError(fmt, value);
try bw.writeAll("error.");
return bw.writeAll(@errorName(value));
if (actual_fmt.len > 0 and actual_fmt.len[0] == 's') {
return bw.writeAll(@errorName(value));
} else if (actual_fmt.len != 0) {
invalidFmtError(fmt, value);
} else {
try bw.writeAll("error.");
return bw.writeAll(@errorName(value));
}
},
.@"enum" => |enumInfo| {
try bw.writeAll(@typeName(T));

View File

@ -603,9 +603,10 @@ fn PaxIterator(comptime ReaderType: type) type {
return null;
}
fn readUntil(self: *Self, delimiter: u8) ![]const u8 {
var fbs: std.io.FixedBufferStream = .{ .buffer = &self.scratch };
try self.reader.streamUntilDelimiter(fbs.writer(), delimiter, null);
fn readUntil(self: *Self, delimiter: u8) anyerror![]const u8 {
var fbs: std.io.BufferedWriter = undefined;
fbs.initFixed(&self.scratch);
try self.reader.streamUntilDelimiter(&fbs, delimiter, null);
return fbs.getWritten();
}

View File

@ -199,27 +199,24 @@ pub fn parse(gpa: Allocator, source: [:0]const u8, mode: Mode) Allocator.Error!A
/// `gpa` is used for allocating the resulting formatted source code.
/// Caller owns the returned slice of bytes, allocated with `gpa`.
pub fn render(tree: Ast, gpa: Allocator) RenderError![]u8 {
var buffer = std.ArrayList(u8).init(gpa);
defer buffer.deinit();
pub fn renderAlloc(tree: Ast, gpa: Allocator) RenderError![]u8 {
var aw: std.io.AllocatingWriter = undefined;
const bw = aw.init(gpa);
errdefer aw.deinit();
render(tree, gpa, bw, .{}) catch |err| return @errorCast(err); // TODO try @errorCast(...)
return aw.toOwnedSlice();
}
try tree.renderToArrayList(&buffer, .{});
return buffer.toOwnedSlice();
pub fn render(tree: Ast, gpa: Allocator, bw: *std.io.BufferedWriter, fixups: Fixups) anyerror!void {
return @import("./render.zig").renderTree(gpa, bw, tree, fixups);
}
pub const Fixups = private_render.Fixups;
pub fn renderToArrayList(tree: Ast, buffer: *std.ArrayList(u8), fixups: Fixups) RenderError!void {
return @import("./render.zig").renderTree(buffer, tree, fixups);
}
/// Returns an extra offset for column and byte offset of errors that
/// should point after the token in the error message.
pub fn errorOffset(tree: Ast, parse_error: Error) u32 {
return if (parse_error.token_is_prev)
@as(u32, @intCast(tree.tokenSlice(parse_error.token).len))
else
0;
return if (parse_error.token_is_prev) @intCast(tree.tokenSlice(parse_error.token).len) else 0;
}
pub fn tokenLocation(self: Ast, start_offset: ByteOffset, token_index: TokenIndex) Location {
@ -318,254 +315,254 @@ pub fn rootDecls(tree: Ast) []const Node.Index {
}
}
pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void {
pub fn renderError(tree: Ast, parse_error: Error, bw: *std.io.BufferedWriter) anyerror!void {
switch (parse_error.tag) {
.asterisk_after_ptr_deref => {
// Note that the token will point at the `.*` but ideally the source
// location would point to the `*` after the `.*`.
return stream.writeAll("'.*' cannot be followed by '*'; are you missing a space?");
return bw.writeAll("'.*' cannot be followed by '*'; are you missing a space?");
},
.chained_comparison_operators => {
return stream.writeAll("comparison operators cannot be chained");
return bw.writeAll("comparison operators cannot be chained");
},
.decl_between_fields => {
return stream.writeAll("declarations are not allowed between container fields");
return bw.writeAll("declarations are not allowed between container fields");
},
.expected_block => {
return stream.print("expected block, found '{s}'", .{
return bw.print("expected block, found '{s}'", .{
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_block_or_assignment => {
return stream.print("expected block or assignment, found '{s}'", .{
return bw.print("expected block or assignment, found '{s}'", .{
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_block_or_expr => {
return stream.print("expected block or expression, found '{s}'", .{
return bw.print("expected block or expression, found '{s}'", .{
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_block_or_field => {
return stream.print("expected block or field, found '{s}'", .{
return bw.print("expected block or field, found '{s}'", .{
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_container_members => {
return stream.print("expected test, comptime, var decl, or container field, found '{s}'", .{
return bw.print("expected test, comptime, var decl, or container field, found '{s}'", .{
tree.tokenTag(parse_error.token).symbol(),
});
},
.expected_expr => {
return stream.print("expected expression, found '{s}'", .{
return bw.print("expected expression, found '{s}'", .{
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_expr_or_assignment => {
return stream.print("expected expression or assignment, found '{s}'", .{
return bw.print("expected expression or assignment, found '{s}'", .{
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_expr_or_var_decl => {
return stream.print("expected expression or var decl, found '{s}'", .{
return bw.print("expected expression or var decl, found '{s}'", .{
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_fn => {
return stream.print("expected function, found '{s}'", .{
return bw.print("expected function, found '{s}'", .{
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_inlinable => {
return stream.print("expected 'while' or 'for', found '{s}'", .{
return bw.print("expected 'while' or 'for', found '{s}'", .{
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_labelable => {
return stream.print("expected 'while', 'for', 'inline', or '{{', found '{s}'", .{
return bw.print("expected 'while', 'for', 'inline', or '{{', found '{s}'", .{
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_param_list => {
return stream.print("expected parameter list, found '{s}'", .{
return bw.print("expected parameter list, found '{s}'", .{
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_prefix_expr => {
return stream.print("expected prefix expression, found '{s}'", .{
return bw.print("expected prefix expression, found '{s}'", .{
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_primary_type_expr => {
return stream.print("expected primary type expression, found '{s}'", .{
return bw.print("expected primary type expression, found '{s}'", .{
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_pub_item => {
return stream.writeAll("expected function or variable declaration after pub");
return bw.writeAll("expected function or variable declaration after pub");
},
.expected_return_type => {
return stream.print("expected return type expression, found '{s}'", .{
return bw.print("expected return type expression, found '{s}'", .{
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_semi_or_else => {
return stream.writeAll("expected ';' or 'else' after statement");
return bw.writeAll("expected ';' or 'else' after statement");
},
.expected_semi_or_lbrace => {
return stream.writeAll("expected ';' or block after function prototype");
return bw.writeAll("expected ';' or block after function prototype");
},
.expected_statement => {
return stream.print("expected statement, found '{s}'", .{
return bw.print("expected statement, found '{s}'", .{
tree.tokenTag(parse_error.token).symbol(),
});
},
.expected_suffix_op => {
return stream.print("expected pointer dereference, optional unwrap, or field access, found '{s}'", .{
return bw.print("expected pointer dereference, optional unwrap, or field access, found '{s}'", .{
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_type_expr => {
return stream.print("expected type expression, found '{s}'", .{
return bw.print("expected type expression, found '{s}'", .{
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_var_decl => {
return stream.print("expected variable declaration, found '{s}'", .{
return bw.print("expected variable declaration, found '{s}'", .{
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_var_decl_or_fn => {
return stream.print("expected variable declaration or function, found '{s}'", .{
return bw.print("expected variable declaration or function, found '{s}'", .{
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_loop_payload => {
return stream.print("expected loop payload, found '{s}'", .{
return bw.print("expected loop payload, found '{s}'", .{
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_container => {
return stream.print("expected a struct, enum or union, found '{s}'", .{
return bw.print("expected a struct, enum or union, found '{s}'", .{
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.extern_fn_body => {
return stream.writeAll("extern functions have no body");
return bw.writeAll("extern functions have no body");
},
.extra_addrspace_qualifier => {
return stream.writeAll("extra addrspace qualifier");
return bw.writeAll("extra addrspace qualifier");
},
.extra_align_qualifier => {
return stream.writeAll("extra align qualifier");
return bw.writeAll("extra align qualifier");
},
.extra_allowzero_qualifier => {
return stream.writeAll("extra allowzero qualifier");
return bw.writeAll("extra allowzero qualifier");
},
.extra_const_qualifier => {
return stream.writeAll("extra const qualifier");
return bw.writeAll("extra const qualifier");
},
.extra_volatile_qualifier => {
return stream.writeAll("extra volatile qualifier");
return bw.writeAll("extra volatile qualifier");
},
.ptr_mod_on_array_child_type => {
return stream.print("pointer modifier '{s}' not allowed on array child type", .{
return bw.print("pointer modifier '{s}' not allowed on array child type", .{
tree.tokenTag(parse_error.token).symbol(),
});
},
.invalid_bit_range => {
return stream.writeAll("bit range not allowed on slices and arrays");
return bw.writeAll("bit range not allowed on slices and arrays");
},
.same_line_doc_comment => {
return stream.writeAll("same line documentation comment");
return bw.writeAll("same line documentation comment");
},
.unattached_doc_comment => {
return stream.writeAll("unattached documentation comment");
return bw.writeAll("unattached documentation comment");
},
.test_doc_comment => {
return stream.writeAll("documentation comments cannot be attached to tests");
return bw.writeAll("documentation comments cannot be attached to tests");
},
.comptime_doc_comment => {
return stream.writeAll("documentation comments cannot be attached to comptime blocks");
return bw.writeAll("documentation comments cannot be attached to comptime blocks");
},
.varargs_nonfinal => {
return stream.writeAll("function prototype has parameter after varargs");
return bw.writeAll("function prototype has parameter after varargs");
},
.expected_continue_expr => {
return stream.writeAll("expected ':' before while continue expression");
return bw.writeAll("expected ':' before while continue expression");
},
.expected_semi_after_decl => {
return stream.writeAll("expected ';' after declaration");
return bw.writeAll("expected ';' after declaration");
},
.expected_semi_after_stmt => {
return stream.writeAll("expected ';' after statement");
return bw.writeAll("expected ';' after statement");
},
.expected_comma_after_field => {
return stream.writeAll("expected ',' after field");
return bw.writeAll("expected ',' after field");
},
.expected_comma_after_arg => {
return stream.writeAll("expected ',' after argument");
return bw.writeAll("expected ',' after argument");
},
.expected_comma_after_param => {
return stream.writeAll("expected ',' after parameter");
return bw.writeAll("expected ',' after parameter");
},
.expected_comma_after_initializer => {
return stream.writeAll("expected ',' after initializer");
return bw.writeAll("expected ',' after initializer");
},
.expected_comma_after_switch_prong => {
return stream.writeAll("expected ',' after switch prong");
return bw.writeAll("expected ',' after switch prong");
},
.expected_comma_after_for_operand => {
return stream.writeAll("expected ',' after for operand");
return bw.writeAll("expected ',' after for operand");
},
.expected_comma_after_capture => {
return stream.writeAll("expected ',' after for capture");
return bw.writeAll("expected ',' after for capture");
},
.expected_initializer => {
return stream.writeAll("expected field initializer");
return bw.writeAll("expected field initializer");
},
.mismatched_binary_op_whitespace => {
return stream.print("binary operator '{s}' has whitespace on one side, but not the other", .{tree.tokenTag(parse_error.token).lexeme().?});
return bw.print("binary operator '{s}' has whitespace on one side, but not the other", .{tree.tokenTag(parse_error.token).lexeme().?});
},
.invalid_ampersand_ampersand => {
return stream.writeAll("ambiguous use of '&&'; use 'and' for logical AND, or change whitespace to ' & &' for bitwise AND");
return bw.writeAll("ambiguous use of '&&'; use 'and' for logical AND, or change whitespace to ' & &' for bitwise AND");
},
.c_style_container => {
return stream.print("'{s} {s}' is invalid", .{
return bw.print("'{s} {s}' is invalid", .{
parse_error.extra.expected_tag.symbol(), tree.tokenSlice(parse_error.token),
});
},
.zig_style_container => {
return stream.print("to declare a container do 'const {s} = {s}'", .{
return bw.print("to declare a container do 'const {s} = {s}'", .{
tree.tokenSlice(parse_error.token), parse_error.extra.expected_tag.symbol(),
});
},
.previous_field => {
return stream.writeAll("field before declarations here");
return bw.writeAll("field before declarations here");
},
.next_field => {
return stream.writeAll("field after declarations here");
return bw.writeAll("field after declarations here");
},
.expected_var_const => {
return stream.writeAll("expected 'var' or 'const' before variable declaration");
return bw.writeAll("expected 'var' or 'const' before variable declaration");
},
.wrong_equal_var_decl => {
return stream.writeAll("variable initialized with '==' instead of '='");
return bw.writeAll("variable initialized with '==' instead of '='");
},
.var_const_decl => {
return stream.writeAll("use 'var' or 'const' to declare variable");
return bw.writeAll("use 'var' or 'const' to declare variable");
},
.extra_for_capture => {
return stream.writeAll("extra capture in for loop");
return bw.writeAll("extra capture in for loop");
},
.for_input_not_captured => {
return stream.writeAll("for input is not captured");
return bw.writeAll("for input is not captured");
},
.invalid_byte => {
const tok_slice = tree.source[tree.tokens.items(.start)[parse_error.token]..];
return stream.print("{s} contains invalid byte: '{'}'", .{
return bw.print("{s} contains invalid byte: '{'}'", .{
switch (tok_slice[0]) {
'\'' => "character literal",
'"', '\\' => "string literal",
@ -580,10 +577,10 @@ pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void {
const found_tag = tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev));
const expected_symbol = parse_error.extra.expected_tag.symbol();
switch (found_tag) {
.invalid => return stream.print("expected '{s}', found invalid bytes", .{
.invalid => return bw.print("expected '{s}', found invalid bytes", .{
expected_symbol,
}),
else => return stream.print("expected '{s}', found '{s}'", .{
else => return bw.print("expected '{s}', found '{s}'", .{
expected_symbol, found_tag.symbol(),
}),
}

View File

@ -11441,10 +11441,13 @@ fn parseStrLit(
offset: u32,
) InnerError!void {
const raw_string = bytes[offset..];
var buf_managed = buf.toManaged(astgen.gpa);
const result = std.zig.string_literal.parseWrite(buf_managed.writer(), raw_string);
buf.* = buf_managed.moveToUnmanaged();
switch (try result) {
const result = r: {
var aw: std.io.AllocatingWriter = undefined;
const bw = aw.fromArrayList(astgen.gpa, buf);
defer buf.* = aw.toArrayList();
break :r std.zig.string_literal.parseWrite(bw, raw_string) catch |err| return @errorCast(err);
};
switch (result) {
.success => return,
.failure => |err| return astgen.failWithStrLitError(err, token, bytes, offset),
}
@ -11493,17 +11496,18 @@ fn appendErrorNodeNotes(
notes: []const u32,
) Allocator.Error!void {
@branchHint(.cold);
const gpa = astgen.gpa;
const string_bytes = &astgen.string_bytes;
const msg: Zir.NullTerminatedString = @enumFromInt(string_bytes.items.len);
try string_bytes.writer(astgen.gpa).print(format ++ "\x00", args);
try string_bytes.print(gpa, format ++ "\x00", args);
const notes_index: u32 = if (notes.len != 0) blk: {
const notes_start = astgen.extra.items.len;
try astgen.extra.ensureTotalCapacity(astgen.gpa, notes_start + 1 + notes.len);
try astgen.extra.ensureTotalCapacity(gpa, notes_start + 1 + notes.len);
astgen.extra.appendAssumeCapacity(@intCast(notes.len));
astgen.extra.appendSliceAssumeCapacity(notes);
break :blk @intCast(notes_start);
} else 0;
try astgen.compile_errors.append(astgen.gpa, .{
try astgen.compile_errors.append(gpa, .{
.msg = msg,
.node = node.toOptional(),
.token = .none,
@ -11587,7 +11591,7 @@ fn appendErrorTokNotesOff(
const gpa = astgen.gpa;
const string_bytes = &astgen.string_bytes;
const msg: Zir.NullTerminatedString = @enumFromInt(string_bytes.items.len);
try string_bytes.writer(gpa).print(format ++ "\x00", args);
try string_bytes.print(gpa, format ++ "\x00", args);
const notes_index: u32 = if (notes.len != 0) blk: {
const notes_start = astgen.extra.items.len;
try astgen.extra.ensureTotalCapacity(gpa, notes_start + 1 + notes.len);
@ -11623,7 +11627,7 @@ fn errNoteTokOff(
@branchHint(.cold);
const string_bytes = &astgen.string_bytes;
const msg: Zir.NullTerminatedString = @enumFromInt(string_bytes.items.len);
try string_bytes.writer(astgen.gpa).print(format ++ "\x00", args);
try string_bytes.print(astgen.gpa, format ++ "\x00", args);
return astgen.addExtra(Zir.Inst.CompileErrors.Item{
.msg = msg,
.node = .none,
@ -11642,7 +11646,7 @@ fn errNoteNode(
@branchHint(.cold);
const string_bytes = &astgen.string_bytes;
const msg: Zir.NullTerminatedString = @enumFromInt(string_bytes.items.len);
try string_bytes.writer(astgen.gpa).print(format ++ "\x00", args);
try string_bytes.print(astgen.gpa, format ++ "\x00", args);
return astgen.addExtra(Zir.Inst.CompileErrors.Item{
.msg = msg,
.node = node.toOptional(),
@ -13888,13 +13892,14 @@ fn emitDbgStmtForceCurrentIndex(gz: *GenZir, lc: LineColumn) !void {
} });
}
fn lowerAstErrors(astgen: *AstGen) !void {
fn lowerAstErrors(astgen: *AstGen) error{OutOfMemory}!void {
const gpa = astgen.gpa;
const tree = astgen.tree;
assert(tree.errors.len > 0);
var msg: std.ArrayListUnmanaged(u8) = .empty;
defer msg.deinit(gpa);
var msg: std.io.AllocatingWriter = undefined;
const msg_writer = msg.init(gpa);
defer msg.deinit();
var notes: std.ArrayListUnmanaged(u32) = .empty;
defer notes.deinit(gpa);
@ -13928,20 +13933,20 @@ fn lowerAstErrors(astgen: *AstGen) !void {
.extra = .{ .offset = bad_off },
};
msg.clearRetainingCapacity();
try tree.renderError(err, msg.writer(gpa));
return try astgen.appendErrorTokNotesOff(tok, bad_off, "{s}", .{msg.items}, notes.items);
tree.renderError(err, msg_writer) catch |e| return @errorCast(e); // TODO try @errorCast(...)
return try astgen.appendErrorTokNotesOff(tok, bad_off, "{s}", .{msg.getWritten()}, notes.items);
}
var cur_err = tree.errors[0];
for (tree.errors[1..]) |err| {
if (err.is_note) {
try tree.renderError(err, msg.writer(gpa));
try notes.append(gpa, try astgen.errNoteTok(err.token, "{s}", .{msg.items}));
tree.renderError(err, msg_writer) catch |e| return @errorCast(e); // TODO try @errorCast(...)
try notes.append(gpa, try astgen.errNoteTok(err.token, "{s}", .{msg.getWritten()}));
} else {
// Flush error
const extra_offset = tree.errorOffset(cur_err);
try tree.renderError(cur_err, msg.writer(gpa));
try astgen.appendErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.items}, notes.items);
tree.renderError(cur_err, msg_writer) catch |e| return @errorCast(e); // TODO try @errorCast(...)
try astgen.appendErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.getWritten()}, notes.items);
notes.clearRetainingCapacity();
cur_err = err;
@ -13954,8 +13959,8 @@ fn lowerAstErrors(astgen: *AstGen) !void {
// Flush error
const extra_offset = tree.errorOffset(cur_err);
try tree.renderError(cur_err, msg.writer(gpa));
try astgen.appendErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.items}, notes.items);
tree.renderError(cur_err, msg_writer) catch |e| return @errorCast(e); // TODO try @errorCast(...)
try astgen.appendErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.getWritten()}, notes.items);
}
const DeclarationName = union(enum) {

View File

@ -452,37 +452,43 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
}
}
fn appendIdentStr(zg: *ZonGen, ident_token: Ast.TokenIndex) !u32 {
fn appendIdentStr(zg: *ZonGen, ident_token: Ast.TokenIndex) error{ OutOfMemory, BadString }!u32 {
const gpa = zg.gpa;
const tree = zg.tree;
assert(tree.tokenTag(ident_token) == .identifier);
const ident_name = tree.tokenSlice(ident_token);
if (!mem.startsWith(u8, ident_name, "@")) {
const start = zg.string_bytes.items.len;
try zg.string_bytes.appendSlice(zg.gpa, ident_name);
try zg.string_bytes.appendSlice(gpa, ident_name);
return @intCast(start);
} else {
const offset = 1;
const start: u32 = @intCast(zg.string_bytes.items.len);
const raw_string = zg.tree.tokenSlice(ident_token)[offset..];
try zg.string_bytes.ensureUnusedCapacity(zg.gpa, raw_string.len);
switch (try std.zig.string_literal.parseWrite(zg.string_bytes.writer(zg.gpa), raw_string)) {
.success => {},
.failure => |err| {
try zg.lowerStrLitError(err, ident_token, raw_string, offset);
return error.BadString;
},
}
const slice = zg.string_bytes.items[start..];
if (mem.indexOfScalar(u8, slice, 0) != null) {
try zg.addErrorTok(ident_token, "identifier cannot contain null bytes", .{});
return error.BadString;
} else if (slice.len == 0) {
try zg.addErrorTok(ident_token, "identifier cannot be empty", .{});
return error.BadString;
}
return start;
}
const offset = 1;
const start: u32 = @intCast(zg.string_bytes.items.len);
const raw_string = zg.tree.tokenSlice(ident_token)[offset..];
try zg.string_bytes.ensureUnusedCapacity(gpa, raw_string.len);
const result = r: {
var aw: std.io.AllocatingWriter = undefined;
const bw = aw.fromArrayList(gpa, &zg.string_bytes);
defer zg.string_bytes = aw.toArrayList();
break :r std.zig.string_literal.parseWrite(bw, raw_string) catch |err| return @errorCast(err);
};
switch (result) {
.success => {},
.failure => |err| {
try zg.lowerStrLitError(err, ident_token, raw_string, offset);
return error.BadString;
},
}
const slice = zg.string_bytes.items[start..];
if (mem.indexOfScalar(u8, slice, 0) != null) {
try zg.addErrorTok(ident_token, "identifier cannot contain null bytes", .{});
return error.BadString;
} else if (slice.len == 0) {
try zg.addErrorTok(ident_token, "identifier cannot be empty", .{});
return error.BadString;
}
return start;
}
/// Estimates the size of a string node without parsing it.
@ -513,8 +519,8 @@ pub fn strLitSizeHint(tree: Ast, node: Ast.Node.Index) usize {
pub fn parseStrLit(
tree: Ast,
node: Ast.Node.Index,
writer: anytype,
) error{OutOfMemory}!std.zig.string_literal.Result {
writer: *std.io.BufferedWriter,
) anyerror!std.zig.string_literal.Result {
switch (tree.nodeTag(node)) {
.string_literal => {
const token = tree.nodeMainToken(node);
@ -549,15 +555,21 @@ const StringLiteralResult = union(enum) {
slice: struct { start: u32, len: u32 },
};
fn strLitAsString(zg: *ZonGen, str_node: Ast.Node.Index) !StringLiteralResult {
fn strLitAsString(zg: *ZonGen, str_node: Ast.Node.Index) error{ OutOfMemory, BadString }!StringLiteralResult {
if (!zg.options.parse_str_lits) return .{ .slice = .{ .start = 0, .len = 0 } };
const gpa = zg.gpa;
const string_bytes = &zg.string_bytes;
const str_index: u32 = @intCast(zg.string_bytes.items.len);
const size_hint = strLitSizeHint(zg.tree, str_node);
try string_bytes.ensureUnusedCapacity(zg.gpa, size_hint);
switch (try parseStrLit(zg.tree, str_node, zg.string_bytes.writer(zg.gpa))) {
try string_bytes.ensureUnusedCapacity(gpa, size_hint);
const result = r: {
var aw: std.io.AllocatingWriter = undefined;
const bw = aw.fromArrayList(gpa, &zg.string_bytes);
defer zg.string_bytes = aw.toArrayList();
break :r parseStrLit(zg.tree, str_node, bw) catch |err| return @errorCast(err);
};
switch (result) {
.success => {},
.failure => |err| {
const token = zg.tree.nodeMainToken(str_node);
@ -805,10 +817,7 @@ fn lowerNumberError(zg: *ZonGen, err: std.zig.number_literal.Error, token: Ast.T
fn errNoteNode(zg: *ZonGen, node: Ast.Node.Index, comptime format: []const u8, args: anytype) Allocator.Error!Zoir.CompileError.Note {
const message_idx: u32 = @intCast(zg.string_bytes.items.len);
const writer = zg.string_bytes.writer(zg.gpa);
try writer.print(format, args);
try writer.writeByte(0);
try zg.string_bytes.print(zg.gpa, format ++ "\x00", args);
return .{
.msg = @enumFromInt(message_idx),
.token = .none,
@ -818,10 +827,7 @@ fn errNoteNode(zg: *ZonGen, node: Ast.Node.Index, comptime format: []const u8, a
fn errNoteTok(zg: *ZonGen, tok: Ast.TokenIndex, comptime format: []const u8, args: anytype) Allocator.Error!Zoir.CompileError.Note {
const message_idx: u32 = @intCast(zg.string_bytes.items.len);
const writer = zg.string_bytes.writer(zg.gpa);
try writer.print(format, args);
try writer.writeByte(0);
try zg.string_bytes.print(zg.gpa, format ++ "\x00", args);
return .{
.msg = @enumFromInt(message_idx),
.token = .fromToken(tok),
@ -862,9 +868,7 @@ fn addErrorInner(
try zg.error_notes.appendSlice(gpa, notes);
const message_idx: u32 = @intCast(zg.string_bytes.items.len);
const writer = zg.string_bytes.writer(zg.gpa);
try writer.print(format, args);
try writer.writeByte(0);
try zg.string_bytes.print(gpa, format ++ "\x00", args);
try zg.compile_errors.append(gpa, .{
.msg = @enumFromInt(message_idx),
@ -880,8 +884,9 @@ fn lowerAstErrors(zg: *ZonGen) Allocator.Error!void {
const tree = zg.tree;
assert(tree.errors.len > 0);
var msg: std.ArrayListUnmanaged(u8) = .empty;
defer msg.deinit(gpa);
var msg: std.io.AllocatingWriter = undefined;
const msg_bw = msg.init(gpa);
defer msg.deinit();
var notes: std.ArrayListUnmanaged(Zoir.CompileError.Note) = .empty;
defer notes.deinit(gpa);
@ -889,18 +894,20 @@ fn lowerAstErrors(zg: *ZonGen) Allocator.Error!void {
var cur_err = tree.errors[0];
for (tree.errors[1..]) |err| {
if (err.is_note) {
try tree.renderError(err, msg.writer(gpa));
try notes.append(gpa, try zg.errNoteTok(err.token, "{s}", .{msg.items}));
tree.renderError(err, msg_bw) catch |e| return @errorCast(e); // TODO: try @errorCast(...)
try notes.append(gpa, try zg.errNoteTok(err.token, "{s}", .{msg.getWritten()}));
} else {
// Flush error
try tree.renderError(cur_err, msg.writer(gpa));
tree.renderError(cur_err, msg_bw) catch |e| return @errorCast(e); // TODO try @errorCast(...)
const extra_offset = tree.errorOffset(cur_err);
try zg.addErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.items}, notes.items);
try zg.addErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.getWritten()}, notes.items);
notes.clearRetainingCapacity();
cur_err = err;
// TODO: `Parse` currently does not have good error recovery mechanisms, so the remaining errors could be bogus.
// As such, we'll ignore all remaining errors for now. We should improve `Parse` so that we can report all the errors.
// TODO: `Parse` currently does not have good error recovery
// mechanisms, so the remaining errors could be bogus. As such,
// we'll ignore all remaining errors for now. We should improve
// `Parse` so that we can report all the errors.
return;
}
msg.clearRetainingCapacity();
@ -908,8 +915,8 @@ fn lowerAstErrors(zg: *ZonGen) Allocator.Error!void {
// Flush error
const extra_offset = tree.errorOffset(cur_err);
try tree.renderError(cur_err, msg.writer(gpa));
try zg.addErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.items}, notes.items);
tree.renderError(cur_err, msg_bw) catch |e| return @errorCast(e); // TODO try @errorCast(...)
try zg.addErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.getWritten()}, notes.items);
}
const std = @import("std");

File diff suppressed because it is too large Load Diff

View File

@ -322,9 +322,9 @@ test parseCharLiteral {
);
}
/// Parses `bytes` as a Zig string literal and writes the result to the std.io.Writer type.
/// Parses `bytes` as a Zig string literal and writes the result to the `std.io.Writer` type.
/// Asserts `bytes` has '"' at beginning and end.
pub fn parseWrite(writer: anytype, bytes: []const u8) error{OutOfMemory}!Result {
pub fn parseWrite(writer: *std.io.BufferedWriter, bytes: []const u8) anyerror!Result {
assert(bytes.len >= 2 and bytes[0] == '"' and bytes[bytes.len - 1] == '"');
var index: usize = 1;
@ -340,18 +340,18 @@ pub fn parseWrite(writer: anytype, bytes: []const u8) error{OutOfMemory}!Result
if (bytes[escape_char_index] == 'u') {
var buf: [4]u8 = undefined;
const len = utf8Encode(codepoint, &buf) catch {
return Result{ .failure = .{ .invalid_unicode_codepoint = escape_char_index + 1 } };
return .{ .failure = .{ .invalid_unicode_codepoint = escape_char_index + 1 } };
};
try writer.writeAll(buf[0..len]);
} else {
try writer.writeByte(@as(u8, @intCast(codepoint)));
}
},
.failure => |err| return Result{ .failure = err },
.failure => |err| return .{ .failure = err },
}
},
'\n' => return Result{ .failure = .{ .invalid_character = index } },
'"' => return Result.success,
'\n' => return .{ .failure = .{ .invalid_character = index } },
'"' => return .success,
else => {
try writer.writeByte(b);
index += 1;
@ -363,10 +363,12 @@ pub fn parseWrite(writer: anytype, bytes: []const u8) error{OutOfMemory}!Result
/// Higher level API. Does not return extra info about parse errors.
/// Caller owns returned memory.
pub fn parseAlloc(allocator: std.mem.Allocator, bytes: []const u8) ParseError![]u8 {
var buf = std.ArrayList(u8).init(allocator);
var buf: std.io.AllocatingWriter = undefined;
const bw = buf.init(allocator);
defer buf.deinit();
switch (try parseWrite(buf.writer(), bytes)) {
// TODO try @errorCast(...)
const result = parseWrite(bw, bytes) catch |err| return @errorCast(err);
switch (result) {
.success => return buf.toOwnedSlice(),
.failure => return error.InvalidLiteral,
}

View File

@ -1,6 +1,5 @@
const std = @import("std");
const Allocator = std.mem.Allocator;
const fmtIntSizeBin = std.fmt.fmtIntSizeBin;
const build_options = @import("build_options");
const Zcu = @import("../Zcu.zig");
@ -25,20 +24,20 @@ pub fn write(air: Air, stream: anytype, pt: Zcu.PerThread, liveness: ?Air.Livene
// zig fmt: off
stream.print(
\\# Total AIR+Liveness bytes: {}
\\# AIR Instructions: {d} ({})
\\# AIR Extra Data: {d} ({})
\\# Liveness tomb_bits: {}
\\# Liveness Extra Data: {d} ({})
\\# Liveness special table: {d} ({})
\\# Total AIR+Liveness bytes: {Bi}
\\# AIR Instructions: {d} ({Bi})
\\# AIR Extra Data: {d} ({Bi})
\\# Liveness tomb_bits: {Bi}
\\# Liveness Extra Data: {d} ({Bi})
\\# Liveness special table: {d} ({Bi})
\\
, .{
fmtIntSizeBin(total_bytes),
air.instructions.len, fmtIntSizeBin(instruction_bytes),
air.extra.items.len, fmtIntSizeBin(extra_bytes),
fmtIntSizeBin(tomb_bytes),
if (liveness) |l| l.extra.len else 0, fmtIntSizeBin(liveness_extra_bytes),
if (liveness) |l| l.special.count() else 0, fmtIntSizeBin(liveness_special_bytes),
total_bytes,
air.instructions.len, instruction_bytes,
air.extra.items.len, extra_bytes,
tomb_bytes,
if (liveness) |l| l.extra.len else 0, liveness_extra_bytes,
if (liveness) |l| l.special.count() else 0, liveness_special_bytes,
}) catch return;
// zig fmt: on

View File

@ -51,7 +51,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void {
const zig_backend = opts.zig_backend;
@setEvalBranchQuota(4000);
try buffer.writer().print(
try buffer.print(
\\const std = @import("std");
\\/// Zig version. When writing code that supports multiple versions of Zig, prefer
\\/// feature detection (i.e. with `@hasDecl` or `@hasField`) over version checks.
@ -89,10 +89,10 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void {
const index = @as(std.Target.Cpu.Feature.Set.Index, @intCast(index_usize));
const is_enabled = target.cpu.features.isEnabled(index);
if (is_enabled) {
try buffer.writer().print(" .{p_},\n", .{std.zig.fmtId(feature.name)});
try buffer.print(" .{p_},\n", .{std.zig.fmtId(feature.name)});
}
}
try buffer.writer().print(
try buffer.print(
\\ }}),
\\}};
\\pub const os: std.Target.Os = .{{
@ -104,7 +104,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void {
switch (target.os.versionRange()) {
.none => try buffer.appendSlice(" .none = {} },\n"),
.semver => |semver| try buffer.writer().print(
.semver => |semver| try buffer.print(
\\ .semver = .{{
\\ .min = .{{
\\ .major = {},
@ -127,7 +127,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void {
semver.max.minor,
semver.max.patch,
}),
.linux => |linux| try buffer.writer().print(
.linux => |linux| try buffer.print(
\\ .linux = .{{
\\ .range = .{{
\\ .min = .{{
@ -164,7 +164,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void {
linux.android,
}),
.hurd => |hurd| try buffer.writer().print(
.hurd => |hurd| try buffer.print(
\\ .hurd = .{{
\\ .range = .{{
\\ .min = .{{
@ -198,7 +198,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void {
hurd.glibc.minor,
hurd.glibc.patch,
}),
.windows => |windows| try buffer.writer().print(
.windows => |windows| try buffer.print(
\\ .windows = .{{
\\ .min = {c},
\\ .max = {c},
@ -217,7 +217,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void {
);
if (target.dynamic_linker.get()) |dl| {
try buffer.writer().print(
try buffer.print(
\\ .dynamic_linker = .init("{s}"),
\\}};
\\
@ -237,7 +237,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void {
// knows libc will provide it, and likewise c.zig will not export memcpy.
const link_libc = opts.link_libc;
try buffer.writer().print(
try buffer.print(
\\pub const object_format: std.Target.ObjectFormat = .{p_};
\\pub const mode: std.builtin.OptimizeMode = .{p_};
\\pub const link_libc = {};
@ -269,7 +269,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void {
});
if (target.os.tag == .wasi) {
try buffer.writer().print(
try buffer.print(
\\pub const wasi_exec_model: std.builtin.WasiExecModel = .{p_};
\\
, .{std.zig.fmtId(@tagName(opts.wasi_exec_model))});

View File

@ -1643,10 +1643,8 @@ fn dumpHashInfo(all_files: []const *const HashedFile) !void {
const w = bw.writer();
for (all_files) |hashed_file| {
try w.print("{s}: {s}: {s}\n", .{
@tagName(hashed_file.kind),
std.fmt.fmtSliceHexLower(&hashed_file.hash),
hashed_file.normalized_path,
try w.print("{s}: {x}: {s}\n", .{
@tagName(hashed_file.kind), &hashed_file.hash, hashed_file.normalized_path,
});
}

View File

@ -127,7 +127,7 @@ pub const Oid = union(Format) {
) @TypeOf(writer).Error!void {
_ = fmt;
_ = options;
try writer.print("{}", .{std.fmt.fmtSliceHexLower(oid.slice())});
try writer.print("{x}", .{oid.slice()});
}
pub fn slice(oid: *const Oid) []const u8 {

View File

@ -477,11 +477,8 @@ pub fn updateZirRefs(pt: Zcu.PerThread) Allocator.Error!void {
if (std.zig.srcHashEql(old_hash, new_hash)) {
break :hash_changed;
}
log.debug("hash for (%{d} -> %{d}) changed: {} -> {}", .{
old_inst,
new_inst,
std.fmt.fmtSliceHexLower(&old_hash),
std.fmt.fmtSliceHexLower(&new_hash),
log.debug("hash for (%{d} -> %{d}) changed: {x} -> {x}", .{
old_inst, new_inst, &old_hash, &new_hash,
});
}
// The source hash associated with this instruction changed - invalidate relevant dependencies.

View File

@ -1205,9 +1205,9 @@ pub const Vex = struct {
fn expectEqualHexStrings(expected: []const u8, given: []const u8, assembly: []const u8) !void {
assert(expected.len > 0);
if (std.mem.eql(u8, expected, given)) return;
const expected_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{std.fmt.fmtSliceHexLower(expected)});
const expected_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{expected});
defer testing.allocator.free(expected_fmt);
const given_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{std.fmt.fmtSliceHexLower(given)});
const given_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{given});
defer testing.allocator.free(given_fmt);
const idx = std.mem.indexOfDiff(u8, expected_fmt, given_fmt).?;
const padding = try testing.allocator.alloc(u8, idx + 5);

View File

@ -190,41 +190,7 @@ pub fn run(
}
}
const FmtError = error{
SystemResources,
OperationAborted,
IoPending,
BrokenPipe,
Unexpected,
WouldBlock,
Canceled,
FileClosed,
DestinationAddressRequired,
DiskQuota,
FileTooBig,
MessageTooBig,
InputOutput,
NoSpaceLeft,
AccessDenied,
OutOfMemory,
RenameAcrossMountPoints,
ReadOnlyFileSystem,
LinkQuotaExceeded,
FileBusy,
EndOfStream,
Unseekable,
NotOpenForWriting,
UnsupportedEncoding,
InvalidEncoding,
ConnectionResetByPeer,
SocketNotConnected,
LockViolation,
NetNameDeleted,
InvalidArgument,
ProcessNotFound,
} || fs.File.OpenError;
fn fmtPath(fmt: *Fmt, file_path: []const u8, check_mode: bool, dir: fs.Dir, sub_path: []const u8) FmtError!void {
fn fmtPath(fmt: *Fmt, file_path: []const u8, check_mode: bool, dir: fs.Dir, sub_path: []const u8) anyerror!void {
fmtPathFile(fmt, file_path, check_mode, dir, sub_path) catch |err| switch (err) {
error.IsDir, error.AccessDenied => return fmtPathDir(fmt, file_path, check_mode, dir, sub_path),
else => {
@ -241,7 +207,7 @@ fn fmtPathDir(
check_mode: bool,
parent_dir: fs.Dir,
parent_sub_path: []const u8,
) FmtError!void {
) anyerror!void {
var dir = try parent_dir.openDir(parent_sub_path, .{ .iterate = true });
defer dir.close();
@ -277,7 +243,7 @@ fn fmtPathFile(
check_mode: bool,
dir: fs.Dir,
sub_path: []const u8,
) FmtError!void {
) anyerror!void {
const source_file = try dir.openFile(sub_path, .{});
var file_closed = false;
errdefer if (!file_closed) source_file.close();

View File

@ -388,7 +388,7 @@ pub fn libExists(
/// This function body is verbose but all it does is test 3 different paths and
/// see if a .def file exists.
fn findDef(
allocator: Allocator,
gpa: Allocator,
target: *const std.Target,
zig_lib_directory: Cache.Directory,
lib_name: []const u8,
@ -401,7 +401,8 @@ fn findDef(
else => unreachable,
};
var override_path = std.ArrayList(u8).init(allocator);
var override_path: std.io.AllocatingWriter = undefined;
const override_path_writer = override_path.init(gpa);
defer override_path.deinit();
const s = path.sep_str;
@ -410,11 +411,11 @@ fn findDef(
// Try the archtecture-specific path first.
const fmt_path = "libc" ++ s ++ "mingw" ++ s ++ "{s}" ++ s ++ "{s}.def";
if (zig_lib_directory.path) |p| {
try override_path.writer().print("{s}" ++ s ++ fmt_path, .{ p, lib_path, lib_name });
try override_path_writer.print("{s}" ++ s ++ fmt_path, .{ p, lib_path, lib_name });
} else {
try override_path.writer().print(fmt_path, .{ lib_path, lib_name });
try override_path_writer.print(fmt_path, .{ lib_path, lib_name });
}
if (std.fs.cwd().access(override_path.items, .{})) |_| {
if (std.fs.cwd().access(override_path.getWritten(), .{})) |_| {
return override_path.toOwnedSlice();
} else |err| switch (err) {
error.FileNotFound => {},
@ -424,14 +425,14 @@ fn findDef(
{
// Try the generic version.
override_path.shrinkRetainingCapacity(0);
override_path.clearRetainingCapacity();
const fmt_path = "libc" ++ s ++ "mingw" ++ s ++ "lib-common" ++ s ++ "{s}.def";
if (zig_lib_directory.path) |p| {
try override_path.writer().print("{s}" ++ s ++ fmt_path, .{ p, lib_name });
try override_path_writer.print("{s}" ++ s ++ fmt_path, .{ p, lib_name });
} else {
try override_path.writer().print(fmt_path, .{lib_name});
try override_path_writer.print(fmt_path, .{lib_name});
}
if (std.fs.cwd().access(override_path.items, .{})) |_| {
if (std.fs.cwd().access(override_path.getWritten(), .{})) |_| {
return override_path.toOwnedSlice();
} else |err| switch (err) {
error.FileNotFound => {},
@ -441,14 +442,14 @@ fn findDef(
{
// Try the generic version and preprocess it.
override_path.shrinkRetainingCapacity(0);
override_path.clearRetainingCapacity();
const fmt_path = "libc" ++ s ++ "mingw" ++ s ++ "lib-common" ++ s ++ "{s}.def.in";
if (zig_lib_directory.path) |p| {
try override_path.writer().print("{s}" ++ s ++ fmt_path, .{ p, lib_name });
try override_path_writer.print("{s}" ++ s ++ fmt_path, .{ p, lib_name });
} else {
try override_path.writer().print(fmt_path, .{lib_name});
try override_path_writer.print(fmt_path, .{lib_name});
}
if (std.fs.cwd().access(override_path.items, .{})) |_| {
if (std.fs.cwd().access(override_path.getWritten(), .{})) |_| {
return override_path.toOwnedSlice();
} else |err| switch (err) {
error.FileNotFound => {},

View File

@ -830,8 +830,8 @@ fn debugMem(allocator: Allocator, handle: std.process.Child.Id, pvaddr: std.os.w
const buffer = try allocator.alloc(u8, code.len);
defer allocator.free(buffer);
const memread = try std.os.windows.ReadProcessMemory(handle, pvaddr, buffer);
log.debug("to write: {x}", .{std.fmt.fmtSliceHexLower(code)});
log.debug("in memory: {x}", .{std.fmt.fmtSliceHexLower(memread)});
log.debug("to write: {x}", .{code});
log.debug("in memory: {x}", .{memread});
}
fn writeMemProtected(handle: std.process.Child.Id, pvaddr: std.os.windows.LPVOID, code: []const u8) !void {

View File

@ -336,9 +336,9 @@ const Edge = struct {
fn expectEqualHexStrings(expected: []const u8, given: []const u8) !void {
assert(expected.len > 0);
if (mem.eql(u8, expected, given)) return;
const expected_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{std.fmt.fmtSliceHexLower(expected)});
const expected_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{expected});
defer testing.allocator.free(expected_fmt);
const given_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{std.fmt.fmtSliceHexLower(given)});
const given_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{given});
defer testing.allocator.free(given_fmt);
const idx = mem.indexOfDiff(u8, expected_fmt, given_fmt).?;
const padding = try testing.allocator.alloc(u8, idx + 5);

View File

@ -1035,20 +1035,14 @@ pub fn finish(f: *Flush, wasm: *Wasm) !void {
var id: [16]u8 = undefined;
std.crypto.hash.sha3.TurboShake128(null).hash(binary_bytes.items, &id, .{});
var uuid: [36]u8 = undefined;
_ = try std.fmt.bufPrint(&uuid, "{s}-{s}-{s}-{s}-{s}", .{
std.fmt.fmtSliceHexLower(id[0..4]),
std.fmt.fmtSliceHexLower(id[4..6]),
std.fmt.fmtSliceHexLower(id[6..8]),
std.fmt.fmtSliceHexLower(id[8..10]),
std.fmt.fmtSliceHexLower(id[10..]),
_ = try std.fmt.bufPrint(&uuid, "{x}-{x}-{x}-{x}-{x}", .{
id[0..4], id[4..6], id[6..8], id[8..10], id[10..],
});
try emitBuildIdSection(gpa, binary_bytes, &uuid);
},
.hexstring => |hs| {
var buffer: [32 * 2]u8 = undefined;
const str = std.fmt.bufPrint(&buffer, "{s}", .{
std.fmt.fmtSliceHexLower(hs.toSlice()),
}) catch unreachable;
const str = std.fmt.bufPrint(&buffer, "{x}", .{hs.toSlice()}) catch unreachable;
try emitBuildIdSection(gpa, binary_bytes, str);
},
else => |mode| {

View File

@ -65,6 +65,9 @@ pub fn wasi_cwd() std.os.wasi.fd_t {
const fatal = std.process.fatal;
/// This can be global since stdout is a singleton.
var stdout_buffer: [4096]u8 = undefined;
/// Shaming all the locations that inappropriately use an O(N) search algorithm.
/// Please delete this and fix the compilation errors!
pub const @"bad O(N)" = void;
@ -338,9 +341,7 @@ fn mainArgs(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
return cmdInit(gpa, arena, cmd_args);
} else if (mem.eql(u8, cmd, "targets")) {
dev.check(.targets_command);
const host = std.zig.resolveTargetQueryOrFatal(.{});
const stdout = io.getStdOut().writer();
return @import("print_targets.zig").cmdTargets(arena, cmd_args, stdout, &host);
return @import("print_targets.zig").cmdTargets(arena, cmd_args);
} else if (mem.eql(u8, cmd, "version")) {
dev.check(.version_command);
try std.io.getStdOut().writeAll(build_options.version ++ "\n");
@ -351,7 +352,7 @@ fn mainArgs(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
} else if (mem.eql(u8, cmd, "env")) {
dev.check(.env_command);
verifyLibcxxCorrectlyLinked();
return @import("print_env.zig").cmdEnv(arena, cmd_args, io.getStdOut().writer());
return @import("print_env.zig").cmdEnv(arena, cmd_args);
} else if (mem.eql(u8, cmd, "reduce")) {
return jitCmd(gpa, arena, cmd_args, .{
.cmd_name = "reduce",
@ -3334,9 +3335,8 @@ fn buildOutputType(
var bin_digest: Cache.BinDigest = undefined;
hasher.final(&bin_digest);
const sub_path = try std.fmt.allocPrint(arena, "tmp" ++ sep ++ "{s}-stdin{s}", .{
std.fmt.fmtSliceHexLower(&bin_digest),
ext.canonicalName(target),
const sub_path = try std.fmt.allocPrint(arena, "tmp" ++ sep ++ "{x}-stdin{s}", .{
&bin_digest, ext.canonicalName(target),
});
try dirs.local_cache.handle.rename(dump_path, sub_path);
@ -6061,6 +6061,11 @@ fn cmdAstCheck(
const tree = try Ast.parse(arena, source, mode);
var bw: std.io.BufferedWriter = .{
.unbuffered_writer = io.getStdOut().writer(),
.buffer = &stdout_buffer,
};
switch (mode) {
.zig => {
const zir = try AstGen.generate(arena, tree);
@ -6103,31 +6108,30 @@ fn cmdAstCheck(
const extra_bytes = zir.extra.len * @sizeOf(u32);
const total_bytes = @sizeOf(Zir) + instruction_bytes + extra_bytes +
zir.string_bytes.len * @sizeOf(u8);
const stdout = io.getStdOut();
const fmtIntSizeBin = std.fmt.fmtIntSizeBin;
// zig fmt: off
try stdout.writer().print(
\\# Source bytes: {}
\\# Tokens: {} ({})
\\# AST Nodes: {} ({})
\\# Total ZIR bytes: {}
\\# Instructions: {d} ({})
try bw.print(
\\# Source bytes: {Bi}
\\# Tokens: {} ({Bi})
\\# AST Nodes: {} ({Bi})
\\# Total ZIR bytes: {Bi}
\\# Instructions: {d} ({Bi})
\\# String Table Bytes: {}
\\# Extra Data Items: {d} ({})
\\# Extra Data Items: {d} ({Bi})
\\
, .{
fmtIntSizeBin(source.len),
tree.tokens.len, fmtIntSizeBin(token_bytes),
tree.nodes.len, fmtIntSizeBin(tree_bytes),
fmtIntSizeBin(total_bytes),
zir.instructions.len, fmtIntSizeBin(instruction_bytes),
fmtIntSizeBin(zir.string_bytes.len),
zir.extra.len, fmtIntSizeBin(extra_bytes),
source.len,
tree.tokens.len, token_bytes,
tree.nodes.len, tree_bytes,
total_bytes,
zir.instructions.len, instruction_bytes,
zir.string_bytes.len,
zir.extra.len, extra_bytes,
});
// zig fmt: on
}
try @import("print_zir.zig").renderAsTextToFile(arena, tree, zir, io.getStdOut());
try @import("print_zir.zig").renderAsText(arena, tree, zir, &bw);
try bw.flush();
if (zir.hasCompileErrors()) {
process.exit(1);
@ -6154,7 +6158,8 @@ fn cmdAstCheck(
fatal("-t option only available in builds of zig with debug extensions", .{});
}
try @import("print_zoir.zig").renderToFile(zoir, arena, io.getStdOut());
try @import("print_zoir.zig").renderToWriter(zoir, arena, &bw);
try bw.flush();
return cleanExit();
},
}
@ -6275,11 +6280,13 @@ fn detectNativeCpuWithLLVM(
}
fn printCpu(cpu: std.Target.Cpu) !void {
var bw = io.bufferedWriter(io.getStdOut().writer());
const stdout = bw.writer();
var bw: std.io.BufferedWriter = .{
.unbuffered_writer = io.getStdOut().writer(),
.buffer = &stdout_buffer,
};
if (cpu.model.llvm_name) |llvm_name| {
try stdout.print("{s}\n", .{llvm_name});
try bw.print("{s}\n", .{llvm_name});
}
const all_features = cpu.arch.allFeaturesList();
@ -6288,7 +6295,7 @@ fn printCpu(cpu: std.Target.Cpu) !void {
const index: std.Target.Cpu.Feature.Set.Index = @intCast(index_usize);
const is_enabled = cpu.features.isEnabled(index);
const plus_or_minus = "-+"[@intFromBool(is_enabled)];
try stdout.print("{c}{s}\n", .{ plus_or_minus, llvm_name });
try bw.print("{c}{s}\n", .{ plus_or_minus, llvm_name });
}
try bw.flush();
@ -6356,6 +6363,11 @@ fn cmdDumpZir(
const zir = try Zcu.loadZirCache(arena, f);
var bw: std.io.BufferedWriter = .{
.unbuffered_writer = io.getStdOut().writer(),
.buffer = &stdout_buffer,
};
{
const instruction_bytes = zir.instructions.len *
// Here we don't use @sizeOf(Zir.Inst.Data) because it would include
@ -6364,25 +6376,24 @@ fn cmdDumpZir(
const extra_bytes = zir.extra.len * @sizeOf(u32);
const total_bytes = @sizeOf(Zir) + instruction_bytes + extra_bytes +
zir.string_bytes.len * @sizeOf(u8);
const stdout = io.getStdOut();
const fmtIntSizeBin = std.fmt.fmtIntSizeBin;
// zig fmt: off
try stdout.writer().print(
\\# Total ZIR bytes: {}
\\# Instructions: {d} ({})
\\# String Table Bytes: {}
\\# Extra Data Items: {d} ({})
try bw.print(
\\# Total ZIR bytes: {Bi}
\\# Instructions: {d} ({Bi})
\\# String Table Bytes: {Bi}
\\# Extra Data Items: {d} ({Bi})
\\
, .{
fmtIntSizeBin(total_bytes),
zir.instructions.len, fmtIntSizeBin(instruction_bytes),
fmtIntSizeBin(zir.string_bytes.len),
zir.extra.len, fmtIntSizeBin(extra_bytes),
total_bytes,
zir.instructions.len, instruction_bytes,
zir.string_bytes.len,
zir.extra.len, extra_bytes,
});
// zig fmt: on
}
return @import("print_zir.zig").renderAsTextToFile(arena, null, zir, io.getStdOut());
try @import("print_zir.zig").renderAsText(arena, null, zir, &bw);
try bw.flush();
}
/// This is only enabled for debug builds.
@ -6440,13 +6451,15 @@ fn cmdChangelist(
var inst_map: std.AutoHashMapUnmanaged(Zir.Inst.Index, Zir.Inst.Index) = .empty;
try Zcu.mapOldZirToNew(arena, old_zir, new_zir, &inst_map);
var bw = io.bufferedWriter(io.getStdOut().writer());
const stdout = bw.writer();
var bw: std.io.BufferedWriter = .{
.unbuffered_writer = io.getStdOut().writer(),
.buffer = &stdout_buffer,
};
{
try stdout.print("Instruction mappings:\n", .{});
try bw.print("Instruction mappings:\n", .{});
var it = inst_map.iterator();
while (it.next()) |entry| {
try stdout.print(" %{d} => %{d}\n", .{
try bw.print(" %{d} => %{d}\n", .{
@intFromEnum(entry.key_ptr.*),
@intFromEnum(entry.value_ptr.*),
});
@ -6714,13 +6727,10 @@ fn accessFrameworkPath(
for (&[_][]const u8{ ".tbd", ".dylib", "" }) |ext| {
test_path.clearRetainingCapacity();
try test_path.writer().print("{s}" ++ sep ++ "{s}.framework" ++ sep ++ "{s}{s}", .{
framework_dir_path,
framework_name,
framework_name,
ext,
try test_path.print("{s}" ++ sep ++ "{s}.framework" ++ sep ++ "{s}{s}", .{
framework_dir_path, framework_name, framework_name, ext,
});
try checked_paths.writer().print("\n {s}", .{test_path.items});
try checked_paths.print("\n {s}", .{test_path.items});
fs.cwd().access(test_path.items, .{}) catch |err| switch (err) {
error.FileNotFound => continue,
else => |e| fatal("unable to search for {s} framework '{s}': {s}", .{
@ -7033,14 +7043,19 @@ fn cmdFetch(
try fixups.append_string_after_node.put(gpa, manifest.version_node, dependencies_text);
}
var rendered = std.ArrayList(u8).init(gpa);
defer rendered.deinit();
try ast.renderToArrayList(&rendered, fixups);
build_root.directory.handle.writeFile(.{ .sub_path = Package.Manifest.basename, .data = rendered.items }) catch |err| {
fatal("unable to write {s} file: {s}", .{ Package.Manifest.basename, @errorName(err) });
var file = build_root.directory.handle.createFile(Package.Manifest.basename, .{}) catch |err| {
fatal("unable to create {s} file: {s}", .{ Package.Manifest.basename, err });
};
defer file.close();
var buffer: [4096]u8 = undefined;
var bw: std.io.BufferedWriter = .{
.unbuffered_writer = file.writer(),
.buffer = &buffer,
};
ast.render(gpa, &bw, fixups) catch |err| fatal("failed to render AST to {s}: {s}", .{
Package.Manifest.basename, err,
});
bw.flush() catch |err| fatal("failed to flush {s}: {s}", .{ Package.Manifest.basename, err });
return cleanExit();
}

View File

@ -4,7 +4,7 @@ const introspect = @import("introspect.zig");
const Allocator = std.mem.Allocator;
const fatal = std.process.fatal;
pub fn cmdEnv(arena: Allocator, args: []const []const u8, stdout: std.fs.File.Writer) !void {
pub fn cmdEnv(arena: Allocator, args: []const []const u8) !void {
_ = args;
const cwd_path = try introspect.getResolvedCwd(arena);
const self_exe_path = try std.fs.selfExePathAlloc(arena);
@ -21,10 +21,12 @@ pub fn cmdEnv(arena: Allocator, args: []const []const u8, stdout: std.fs.File.Wr
const host = try std.zig.system.resolveTargetQuery(.{});
const triple = try host.zigTriple(arena);
var bw = std.io.bufferedWriter(stdout);
const w = bw.writer();
var jws = std.json.writeStream(w, .{ .whitespace = .indent_1 });
var buffer: [1024]u8 = undefined;
var bw: std.io.BufferedWriter = .{
.buffer = &buffer,
.unbuffered_writer = std.io.getStdOut().writer(),
};
var jws = std.json.writeStream(bw, .{ .whitespace = .indent_1 });
try jws.beginObject();
@ -55,7 +57,7 @@ pub fn cmdEnv(arena: Allocator, args: []const []const u8, stdout: std.fs.File.Wr
try jws.endObject();
try jws.endObject();
try w.writeByte('\n');
try bw.writeByte('\n');
try bw.flush();
}

View File

@ -11,36 +11,36 @@ const assert = std.debug.assert;
const glibc = @import("libs/glibc.zig");
const introspect = @import("introspect.zig");
pub fn cmdTargets(
allocator: Allocator,
args: []const []const u8,
/// Output stream
stdout: anytype,
native_target: *const Target,
) !void {
pub fn cmdTargets(arena: Allocator, args: []const []const u8) anyerror!void {
_ = args;
var zig_lib_directory = introspect.findZigLibDir(allocator) catch |err| {
const host = std.zig.resolveTargetQueryOrFatal(.{});
var buffer: [1024]u8 = undefined;
var bw: std.io.BufferedWriter = .{
.unbuffered_writer = io.getStdOut().writer(),
.buffer = &buffer,
};
try print(arena, &bw, host);
try bw.flush();
}
fn print(arena: Allocator, output: *std.io.BufferedWriter, host: *const Target) anyerror!void {
var zig_lib_directory = introspect.findZigLibDir(arena) catch |err| {
fatal("unable to find zig installation directory: {s}\n", .{@errorName(err)});
};
defer zig_lib_directory.handle.close();
defer allocator.free(zig_lib_directory.path.?);
const abilists_contents = zig_lib_directory.handle.readFileAlloc(
allocator,
arena,
glibc.abilists_path,
glibc.abilists_max_size,
) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
else => fatal("unable to read " ++ glibc.abilists_path ++ ": {s}", .{@errorName(err)}),
};
defer allocator.free(abilists_contents);
const glibc_abi = try glibc.loadMetaData(allocator, abilists_contents);
defer glibc_abi.destroy(allocator);
const glibc_abi = try glibc.loadMetaData(arena, abilists_contents);
var bw = io.bufferedWriter(stdout);
const w = bw.writer();
var sz = std.zon.stringify.serializer(w, .{});
var sz = std.zon.stringify.serializer(output, .{});
{
var root_obj = try sz.beginStruct(.{});
@ -52,10 +52,9 @@ pub fn cmdTargets(
{
var libc_obj = try root_obj.beginTupleField("libc", .{});
for (std.zig.target.available_libcs) |libc| {
const tmp = try std.fmt.allocPrint(allocator, "{s}-{s}-{s}", .{
const tmp = try std.fmt.allocPrint(arena, "{s}-{s}-{s}", .{
@tagName(libc.arch), @tagName(libc.os), @tagName(libc.abi),
});
defer allocator.free(tmp);
try libc_obj.field(tmp, .{});
}
try libc_obj.end();
@ -64,8 +63,7 @@ pub fn cmdTargets(
{
var glibc_obj = try root_obj.beginTupleField("glibc", .{});
for (glibc_abi.all_versions) |ver| {
const tmp = try std.fmt.allocPrint(allocator, "{}", .{ver});
defer allocator.free(tmp);
const tmp = try std.fmt.allocPrint(arena, "{}", .{ver});
try glibc_obj.field(tmp, .{});
}
try glibc_obj.end();
@ -105,21 +103,20 @@ pub fn cmdTargets(
{
var native_obj = try root_obj.beginStructField("native", .{});
{
const triple = try native_target.zigTriple(allocator);
defer allocator.free(triple);
const triple = try host.zigTriple(arena);
try native_obj.field("triple", triple, .{});
}
{
var cpu_obj = try native_obj.beginStructField("cpu", .{});
try cpu_obj.field("arch", @tagName(native_target.cpu.arch), .{});
try cpu_obj.field("arch", @tagName(host.cpu.arch), .{});
try cpu_obj.field("name", native_target.cpu.model.name, .{});
try cpu_obj.field("name", host.cpu.model.name, .{});
{
var features = try native_obj.beginTupleField("features", .{});
for (native_target.cpu.arch.allFeaturesList(), 0..) |feature, i_usize| {
for (host.cpu.arch.allFeaturesList(), 0..) |feature, i_usize| {
const index = @as(Target.Cpu.Feature.Set.Index, @intCast(i_usize));
if (native_target.cpu.features.isEnabled(index)) {
if (host.cpu.features.isEnabled(index)) {
try features.field(feature.name, .{});
}
}
@ -128,14 +125,13 @@ pub fn cmdTargets(
try cpu_obj.end();
}
try native_obj.field("os", @tagName(native_target.os.tag), .{});
try native_obj.field("abi", @tagName(native_target.abi), .{});
try native_obj.field("os", @tagName(host.os.tag), .{});
try native_obj.field("abi", @tagName(host.abi), .{});
try native_obj.end();
}
try root_obj.end();
}
try w.writeByte('\n');
return bw.flush();
try output.writeByte('\n');
}

File diff suppressed because it is too large Load Diff

View File

@ -1,13 +1,6 @@
pub fn renderToFile(zoir: Zoir, arena: Allocator, f: std.fs.File) (std.fs.File.WriteError || Allocator.Error)!void {
var bw = std.io.bufferedWriter(f.writer());
try renderToWriter(zoir, arena, bw.writer());
try bw.flush();
}
pub fn renderToWriter(zoir: Zoir, arena: Allocator, w: anytype) (@TypeOf(w).Error || Allocator.Error)!void {
pub fn renderToWriter(zoir: Zoir, arena: Allocator, w: *std.io.BufferedWriter) anyerror!void {
assert(!zoir.hasCompileErrors());
const fmtIntSizeBin = std.fmt.fmtIntSizeBin;
const bytes_per_node = comptime n: {
var n: usize = 0;
for (@typeInfo(Zoir.Node.Repr).@"struct".fields) |f| {
@ -23,22 +16,22 @@ pub fn renderToWriter(zoir: Zoir, arena: Allocator, w: anytype) (@TypeOf(w).Erro
// zig fmt: off
try w.print(
\\# Nodes: {} ({})
\\# Extra Data Items: {} ({})
\\# BigInt Limbs: {} ({})
\\# String Table Bytes: {}
\\# Total ZON Bytes: {}
\\# Nodes: {} ({Bi})
\\# Extra Data Items: {} ({Bi})
\\# BigInt Limbs: {} ({Bi})
\\# String Table Bytes: {Bi}
\\# Total ZON Bytes: {Bi}
\\
, .{
zoir.nodes.len, fmtIntSizeBin(node_bytes),
zoir.extra.len, fmtIntSizeBin(extra_bytes),
zoir.limbs.len, fmtIntSizeBin(limb_bytes),
fmtIntSizeBin(string_bytes),
fmtIntSizeBin(node_bytes + extra_bytes + limb_bytes + string_bytes),
zoir.nodes.len, node_bytes,
zoir.extra.len, extra_bytes,
zoir.limbs.len, limb_bytes,
string_bytes,
node_bytes + extra_bytes + limb_bytes + string_bytes,
});
// zig fmt: on
var pz: PrintZon = .{
.w = w.any(),
.w = w,
.arena = arena,
.zoir = zoir,
.indent = 0,
@ -48,7 +41,7 @@ pub fn renderToWriter(zoir: Zoir, arena: Allocator, w: anytype) (@TypeOf(w).Erro
}
const PrintZon = struct {
w: std.io.AnyWriter,
w: *std.io.BufferedWriter,
arena: Allocator,
zoir: Zoir,
indent: u32,

View File

@ -5905,7 +5905,7 @@ fn parseCPrimaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node {
if (slice[0] != '\'' or slice[1] == '\\' or slice.len == 3) {
return Tag.char_literal.create(c.arena, try escapeUnprintables(c, m));
} else {
const str = try std.fmt.allocPrint(c.arena, "0x{s}", .{std.fmt.fmtSliceHexLower(slice[1 .. slice.len - 1])});
const str = try std.fmt.allocPrint(c.arena, "0x{x}", .{slice[1 .. slice.len - 1]});
return Tag.integer_literal.create(c.arena, str);
}
},