aro: update to not use GenericWriter

This commit is contained in:
Andrew Kelley 2025-08-26 22:31:28 -07:00
parent 2151b10a41
commit 5d7507214d
9 changed files with 181 additions and 103 deletions

View File

@ -780,7 +780,7 @@ fn ignoredAttrErr(p: *Parser, tok: TokenIndex, attr: Attribute.Tag, context: []c
const strings_top = p.strings.items.len;
defer p.strings.items.len = strings_top;
try p.strings.writer().print("attribute '{s}' ignored on {s}", .{ @tagName(attr), context });
try p.strings.print("attribute '{s}' ignored on {s}", .{ @tagName(attr), context });
const str = try p.comp.diagnostics.arena.allocator().dupe(u8, p.strings.items[strings_top..]);
try p.errStr(.ignored_attribute, tok, str);
}

View File

@ -119,8 +119,7 @@ pub fn nameFromUniqueIndex(index: u16, buf: []u8) []u8 {
var node_index: u16 = 0;
var count: u16 = index;
var fbs = std.io.fixedBufferStream(buf);
const w = fbs.writer();
var w: std.Io.Writer = .fixed(buf);
while (true) {
var sibling_index = dafsa[node_index].child_index;
@ -142,7 +141,7 @@ pub fn nameFromUniqueIndex(index: u16, buf: []u8) []u8 {
if (count == 0) break;
}
return fbs.getWritten();
return w.buffered();
}
/// We're 1 bit shy of being able to fit this in a u32:

View File

@ -16,6 +16,7 @@ const Pragma = @import("Pragma.zig");
const StrInt = @import("StringInterner.zig");
const record_layout = @import("record_layout.zig");
const target_util = @import("target.zig");
const Writer = std.Io.Writer;
pub const Error = error{
/// A fatal error has ocurred and compilation has stopped.
@ -199,7 +200,7 @@ fn getTimestamp(comp: *Compilation) !u47 {
return @intCast(std.math.clamp(timestamp, 0, max_timestamp));
}
fn generateDateAndTime(w: anytype, timestamp: u47) !void {
fn generateDateAndTime(w: *Writer, timestamp: u47) !void {
const epoch_seconds = EpochSeconds{ .secs = timestamp };
const epoch_day = epoch_seconds.getEpochDay();
const day_seconds = epoch_seconds.getDaySeconds();
@ -242,7 +243,7 @@ pub const SystemDefinesMode = enum {
include_system_defines,
};
fn generateSystemDefines(comp: *Compilation, w: anytype) !void {
fn generateSystemDefines(comp: *Compilation, w: *Writer) !void {
const ptr_width = comp.target.ptrBitWidth();
if (comp.langopts.gnuc_version > 0) {
@ -533,11 +534,13 @@ fn generateSystemDefines(comp: *Compilation, w: anytype) !void {
pub fn generateBuiltinMacros(comp: *Compilation, system_defines_mode: SystemDefinesMode) !Source {
try comp.generateBuiltinTypes();
var buf = std.array_list.Managed(u8).init(comp.gpa);
defer buf.deinit();
var allocating: std.Io.Writer.Allocating = .init(comp.gpa);
defer allocating.deinit();
const buf = &allocating.writer;
if (system_defines_mode == .include_system_defines) {
try buf.appendSlice(
try buf.writeAll(
\\#define __VERSION__ "Aro
++ " " ++ @import("../backend.zig").version_str ++ "\"\n" ++
\\#define __Aro__
@ -545,11 +548,11 @@ pub fn generateBuiltinMacros(comp: *Compilation, system_defines_mode: SystemDefi
);
}
try buf.appendSlice("#define __STDC__ 1\n");
try buf.writeAll("#define __STDC__ 1\n");
try buf.print("#define __STDC_HOSTED__ {d}\n", .{@intFromBool(comp.target.os.tag != .freestanding)});
// standard macros
try buf.appendSlice(
try buf.writeAll(
\\#define __STDC_NO_COMPLEX__ 1
\\#define __STDC_NO_THREADS__ 1
\\#define __STDC_NO_VLA__ 1
@ -561,23 +564,23 @@ pub fn generateBuiltinMacros(comp: *Compilation, system_defines_mode: SystemDefi
\\
);
if (comp.langopts.standard.StdCVersionMacro()) |stdc_version| {
try buf.appendSlice("#define __STDC_VERSION__ ");
try buf.appendSlice(stdc_version);
try buf.append('\n');
try buf.writeAll("#define __STDC_VERSION__ ");
try buf.writeAll(stdc_version);
try buf.writeByte('\n');
}
// timestamps
const timestamp = try comp.getTimestamp();
try generateDateAndTime(buf.writer(), timestamp);
try generateDateAndTime(buf, timestamp);
if (system_defines_mode == .include_system_defines) {
try comp.generateSystemDefines(buf.writer());
try comp.generateSystemDefines(buf);
}
return comp.addSourceFromBuffer("<builtin>", buf.items);
return comp.addSourceFromBuffer("<builtin>", allocating.written());
}
fn generateFloatMacros(w: anytype, prefix: []const u8, semantics: target_util.FPSemantics, ext: []const u8) !void {
fn generateFloatMacros(w: *Writer, prefix: []const u8, semantics: target_util.FPSemantics, ext: []const u8) !void {
const denormMin = semantics.chooseValue(
[]const u8,
.{
@ -656,7 +659,7 @@ fn generateFloatMacros(w: anytype, prefix: []const u8, semantics: target_util.FP
try w.print("#define {s}MIN__ {s}{s}\n", .{ prefix_slice, min, ext });
}
fn generateTypeMacro(w: anytype, mapper: StrInt.TypeMapper, name: []const u8, ty: Type, langopts: LangOpts) !void {
fn generateTypeMacro(w: *Writer, mapper: StrInt.TypeMapper, name: []const u8, ty: Type, langopts: LangOpts) !void {
try w.print("#define {s} ", .{name});
try ty.print(mapper, langopts, w);
try w.writeByte('\n');
@ -762,7 +765,7 @@ fn generateFastOrLeastType(
bits: usize,
kind: enum { least, fast },
signedness: std.builtin.Signedness,
w: anytype,
w: *Writer,
mapper: StrInt.TypeMapper,
) !void {
const ty = comp.intLeastN(bits, signedness); // defining the fast types as the least types is permitted
@ -793,7 +796,7 @@ fn generateFastOrLeastType(
try comp.generateFmt(prefix, w, ty);
}
fn generateFastAndLeastWidthTypes(comp: *Compilation, w: anytype, mapper: StrInt.TypeMapper) !void {
fn generateFastAndLeastWidthTypes(comp: *Compilation, w: *Writer, mapper: StrInt.TypeMapper) !void {
const sizes = [_]usize{ 8, 16, 32, 64 };
for (sizes) |size| {
try comp.generateFastOrLeastType(size, .least, .signed, w, mapper);
@ -803,7 +806,7 @@ fn generateFastAndLeastWidthTypes(comp: *Compilation, w: anytype, mapper: StrInt
}
}
fn generateExactWidthTypes(comp: *const Compilation, w: anytype, mapper: StrInt.TypeMapper) !void {
fn generateExactWidthTypes(comp: *const Compilation, w: *Writer, mapper: StrInt.TypeMapper) !void {
try comp.generateExactWidthType(w, mapper, .schar);
if (comp.intSize(.short) > comp.intSize(.char)) {
@ -851,7 +854,7 @@ fn generateExactWidthTypes(comp: *const Compilation, w: anytype, mapper: StrInt.
}
}
fn generateFmt(comp: *const Compilation, prefix: []const u8, w: anytype, ty: Type) !void {
fn generateFmt(comp: *const Compilation, prefix: []const u8, w: *Writer, ty: Type) !void {
const unsigned = ty.isUnsignedInt(comp);
const modifier = ty.formatModifier();
const formats = if (unsigned) "ouxX" else "di";
@ -860,7 +863,7 @@ fn generateFmt(comp: *const Compilation, prefix: []const u8, w: anytype, ty: Typ
}
}
fn generateSuffixMacro(comp: *const Compilation, prefix: []const u8, w: anytype, ty: Type) !void {
fn generateSuffixMacro(comp: *const Compilation, prefix: []const u8, w: *Writer, ty: Type) !void {
return w.print("#define {s}_C_SUFFIX__ {s}\n", .{ prefix, ty.intValueSuffix(comp) });
}
@ -868,7 +871,7 @@ fn generateSuffixMacro(comp: *const Compilation, prefix: []const u8, w: anytype,
/// Name macro (e.g. #define __UINT32_TYPE__ unsigned int)
/// Format strings (e.g. #define __UINT32_FMTu__ "u")
/// Suffix macro (e.g. #define __UINT32_C_SUFFIX__ U)
fn generateExactWidthType(comp: *const Compilation, w: anytype, mapper: StrInt.TypeMapper, specifier: Type.Specifier) !void {
fn generateExactWidthType(comp: *const Compilation, w: *Writer, mapper: StrInt.TypeMapper, specifier: Type.Specifier) !void {
var ty = Type{ .specifier = specifier };
const width = 8 * ty.sizeof(comp).?;
const unsigned = ty.isUnsignedInt(comp);
@ -998,7 +1001,7 @@ fn generateVaListType(comp: *Compilation) !Type {
return ty;
}
fn generateIntMax(comp: *const Compilation, w: anytype, name: []const u8, ty: Type) !void {
fn generateIntMax(comp: *const Compilation, w: *Writer, name: []const u8, ty: Type) !void {
const bit_count: u8 = @intCast(ty.sizeof(comp).? * 8);
const unsigned = ty.isUnsignedInt(comp);
const max: u128 = switch (bit_count) {
@ -1023,7 +1026,7 @@ pub fn wcharMax(comp: *const Compilation) u32 {
};
}
fn generateExactWidthIntMax(comp: *const Compilation, w: anytype, specifier: Type.Specifier) !void {
fn generateExactWidthIntMax(comp: *const Compilation, w: *Writer, specifier: Type.Specifier) !void {
var ty = Type{ .specifier = specifier };
const bit_count: u8 = @intCast(ty.sizeof(comp).? * 8);
const unsigned = ty.isUnsignedInt(comp);
@ -1040,16 +1043,16 @@ fn generateExactWidthIntMax(comp: *const Compilation, w: anytype, specifier: Typ
return comp.generateIntMax(w, name, ty);
}
fn generateIntWidth(comp: *Compilation, w: anytype, name: []const u8, ty: Type) !void {
fn generateIntWidth(comp: *Compilation, w: *Writer, name: []const u8, ty: Type) !void {
try w.print("#define __{s}_WIDTH__ {d}\n", .{ name, 8 * ty.sizeof(comp).? });
}
fn generateIntMaxAndWidth(comp: *Compilation, w: anytype, name: []const u8, ty: Type) !void {
fn generateIntMaxAndWidth(comp: *Compilation, w: *Writer, name: []const u8, ty: Type) !void {
try comp.generateIntMax(w, name, ty);
try comp.generateIntWidth(w, name, ty);
}
fn generateSizeofType(comp: *Compilation, w: anytype, name: []const u8, ty: Type) !void {
fn generateSizeofType(comp: *Compilation, w: *Writer, name: []const u8, ty: Type) !void {
try w.print("#define {s} {d}\n", .{ name, ty.sizeof(comp).? });
}

View File

@ -101,7 +101,7 @@ value_map: Tree.ValueMap,
// buffers used during compilation
syms: SymbolStack = .{},
strings: std.array_list.AlignedManaged(u8, .@"4"),
strings: std.array_list.Managed(u8),
labels: std.array_list.Managed(Label),
list_buf: NodeList,
decl_buf: NodeList,
@ -447,7 +447,17 @@ pub fn typeStr(p: *Parser, ty: Type) ![]const u8 {
defer p.strings.items.len = strings_top;
const mapper = p.comp.string_interner.getSlowTypeMapper();
try ty.print(mapper, p.comp.langopts, p.strings.writer());
{
var unmanaged = p.strings.moveToUnmanaged();
var allocating: std.Io.Writer.Allocating = .fromArrayList(p.comp.gpa, &unmanaged);
defer {
unmanaged = allocating.toArrayList();
p.strings = unmanaged.toManaged(p.comp.gpa);
}
ty.print(mapper, p.comp.langopts, &allocating.writer) catch |e| switch (e) {
error.WriteFailed => return error.OutOfMemory,
};
}
return try p.comp.diagnostics.arena.allocator().dupe(u8, p.strings.items[strings_top..]);
}
@ -455,7 +465,7 @@ pub fn typePairStr(p: *Parser, a: Type, b: Type) ![]const u8 {
return p.typePairStrExtra(a, " and ", b);
}
pub fn typePairStrExtra(p: *Parser, a: Type, msg: []const u8, b: Type) ![]const u8 {
pub fn typePairStrExtra(p: *Parser, a: Type, msg: []const u8, b: Type) Error![]const u8 {
if (@import("builtin").mode != .Debug) {
if (a.is(.invalid) or b.is(.invalid)) {
return "Tried to render invalid type - this is an aro bug.";
@ -466,29 +476,60 @@ pub fn typePairStrExtra(p: *Parser, a: Type, msg: []const u8, b: Type) ![]const
try p.strings.append('\'');
const mapper = p.comp.string_interner.getSlowTypeMapper();
try a.print(mapper, p.comp.langopts, p.strings.writer());
{
var unmanaged = p.strings.moveToUnmanaged();
var allocating: std.Io.Writer.Allocating = .fromArrayList(p.comp.gpa, &unmanaged);
defer {
unmanaged = allocating.toArrayList();
p.strings = unmanaged.toManaged(p.comp.gpa);
}
a.print(mapper, p.comp.langopts, &allocating.writer) catch |e| switch (e) {
error.WriteFailed => return error.OutOfMemory,
};
}
try p.strings.append('\'');
try p.strings.appendSlice(msg);
try p.strings.append('\'');
try b.print(mapper, p.comp.langopts, p.strings.writer());
{
var unmanaged = p.strings.moveToUnmanaged();
var allocating: std.Io.Writer.Allocating = .fromArrayList(p.comp.gpa, &unmanaged);
defer {
unmanaged = allocating.toArrayList();
p.strings = unmanaged.toManaged(p.comp.gpa);
}
b.print(mapper, p.comp.langopts, &allocating.writer) catch |e| switch (e) {
error.WriteFailed => return error.OutOfMemory,
};
}
try p.strings.append('\'');
return try p.comp.diagnostics.arena.allocator().dupe(u8, p.strings.items[strings_top..]);
}
pub fn valueChangedStr(p: *Parser, res: *Result, old_value: Value, int_ty: Type) ![]const u8 {
pub fn valueChangedStr(p: *Parser, res: *Result, old_value: Value, int_ty: Type) Error![]const u8 {
const strings_top = p.strings.items.len;
defer p.strings.items.len = strings_top;
var w = p.strings.writer();
const type_pair_str = try p.typePairStrExtra(res.ty, " to ", int_ty);
try w.writeAll(type_pair_str);
{
var unmanaged = p.strings.moveToUnmanaged();
var allocating: std.Io.Writer.Allocating = .fromArrayList(p.comp.gpa, &unmanaged);
defer {
unmanaged = allocating.toArrayList();
p.strings = unmanaged.toManaged(p.comp.gpa);
}
allocating.writer.writeAll(type_pair_str) catch return error.OutOfMemory;
try w.writeAll(" changes ");
if (res.val.isZero(p.comp)) try w.writeAll("non-zero ");
try w.writeAll("value from ");
try old_value.print(res.ty, p.comp, w);
try w.writeAll(" to ");
try res.val.print(int_ty, p.comp, w);
allocating.writer.writeAll(" changes ") catch return error.OutOfMemory;
if (res.val.isZero(p.comp)) allocating.writer.writeAll("non-zero ") catch return error.OutOfMemory;
allocating.writer.writeAll("value from ") catch return error.OutOfMemory;
old_value.print(res.ty, p.comp, &allocating.writer) catch |e| switch (e) {
error.WriteFailed => return error.OutOfMemory,
};
allocating.writer.writeAll(" to ") catch return error.OutOfMemory;
res.val.print(int_ty, p.comp, &allocating.writer) catch |e| switch (e) {
error.WriteFailed => return error.OutOfMemory,
};
}
return try p.comp.diagnostics.arena.allocator().dupe(u8, p.strings.items[strings_top..]);
}
@ -498,9 +539,8 @@ fn checkDeprecatedUnavailable(p: *Parser, ty: Type, usage_tok: TokenIndex, decl_
const strings_top = p.strings.items.len;
defer p.strings.items.len = strings_top;
const w = p.strings.writer();
const msg_str = p.comp.interner.get(@"error".msg.ref()).bytes;
try w.print("call to '{s}' declared with attribute error: {f}", .{
try p.strings.print("call to '{s}' declared with attribute error: {f}", .{
p.tokSlice(@"error".__name_tok), std.zig.fmtString(msg_str),
});
const str = try p.comp.diagnostics.arena.allocator().dupe(u8, p.strings.items[strings_top..]);
@ -510,9 +550,8 @@ fn checkDeprecatedUnavailable(p: *Parser, ty: Type, usage_tok: TokenIndex, decl_
const strings_top = p.strings.items.len;
defer p.strings.items.len = strings_top;
const w = p.strings.writer();
const msg_str = p.comp.interner.get(warning.msg.ref()).bytes;
try w.print("call to '{s}' declared with attribute warning: {f}", .{
try p.strings.print("call to '{s}' declared with attribute warning: {f}", .{
p.tokSlice(warning.__name_tok), std.zig.fmtString(msg_str),
});
const str = try p.comp.diagnostics.arena.allocator().dupe(u8, p.strings.items[strings_top..]);
@ -532,17 +571,16 @@ fn errDeprecated(p: *Parser, tag: Diagnostics.Tag, tok_i: TokenIndex, msg: ?Valu
const strings_top = p.strings.items.len;
defer p.strings.items.len = strings_top;
const w = p.strings.writer();
try w.print("'{s}' is ", .{p.tokSlice(tok_i)});
try p.strings.print("'{s}' is ", .{p.tokSlice(tok_i)});
const reason: []const u8 = switch (tag) {
.unavailable => "unavailable",
.deprecated_declarations => "deprecated",
else => unreachable,
};
try w.writeAll(reason);
try p.strings.appendSlice(reason);
if (msg) |m| {
const str = p.comp.interner.get(m.ref()).bytes;
try w.print(": {f}", .{std.zig.fmtString(str)});
try p.strings.print(": {f}", .{std.zig.fmtString(str)});
}
const str = try p.comp.diagnostics.arena.allocator().dupe(u8, p.strings.items[strings_top..]);
return p.errStr(tag, tok_i, str);
@ -693,7 +731,7 @@ pub fn parse(pp: *Preprocessor) Compilation.Error!Tree {
.gpa = pp.comp.gpa,
.arena = arena.allocator(),
.tok_ids = pp.tokens.items(.id),
.strings = std.array_list.AlignedManaged(u8, .@"4").init(pp.comp.gpa),
.strings = std.array_list.Managed(u8).init(pp.comp.gpa),
.value_map = Tree.ValueMap.init(pp.comp.gpa),
.data = NodeList.init(pp.comp.gpa),
.labels = std.array_list.Managed(Label).init(pp.comp.gpa),
@ -1218,38 +1256,46 @@ fn decl(p: *Parser) Error!bool {
return true;
}
fn staticAssertMessage(p: *Parser, cond_node: NodeIndex, message: Result) !?[]const u8 {
fn staticAssertMessage(p: *Parser, cond_node: NodeIndex, message: Result) Error!?[]const u8 {
const cond_tag = p.nodes.items(.tag)[@intFromEnum(cond_node)];
if (cond_tag != .builtin_types_compatible_p and message.node == .none) return null;
var buf = std.array_list.Managed(u8).init(p.gpa);
defer buf.deinit();
var allocating: std.Io.Writer.Allocating = .init(p.gpa);
defer allocating.deinit();
const buf = &allocating.writer;
if (cond_tag == .builtin_types_compatible_p) {
const mapper = p.comp.string_interner.getSlowTypeMapper();
const data = p.nodes.items(.data)[@intFromEnum(cond_node)].bin;
try buf.appendSlice("'__builtin_types_compatible_p(");
buf.writeAll("'__builtin_types_compatible_p(") catch return error.OutOfMemory;
const lhs_ty = p.nodes.items(.ty)[@intFromEnum(data.lhs)];
try lhs_ty.print(mapper, p.comp.langopts, buf.writer());
try buf.appendSlice(", ");
lhs_ty.print(mapper, p.comp.langopts, buf) catch |e| switch (e) {
error.WriteFailed => return error.OutOfMemory,
};
buf.writeAll(", ") catch return error.OutOfMemory;
const rhs_ty = p.nodes.items(.ty)[@intFromEnum(data.rhs)];
try rhs_ty.print(mapper, p.comp.langopts, buf.writer());
rhs_ty.print(mapper, p.comp.langopts, buf) catch |e| switch (e) {
error.WriteFailed => return error.OutOfMemory,
};
try buf.appendSlice(")'");
buf.writeAll(")'") catch return error.OutOfMemory;
}
if (message.node != .none) {
assert(p.nodes.items(.tag)[@intFromEnum(message.node)] == .string_literal_expr);
if (buf.items.len > 0) {
try buf.append(' ');
if (buf.buffered().len > 0) {
buf.writeByte(' ') catch return error.OutOfMemory;
}
const bytes = p.comp.interner.get(message.val.ref()).bytes;
try buf.ensureUnusedCapacity(bytes.len);
try Value.printString(bytes, message.ty, p.comp, buf.writer());
try allocating.ensureUnusedCapacity(bytes.len);
Value.printString(bytes, message.ty, p.comp, buf) catch |e| switch (e) {
error.WriteFailed => return error.OutOfMemory,
};
}
return try p.comp.diagnostics.arena.allocator().dupe(u8, buf.items);
return try p.comp.diagnostics.arena.allocator().dupe(u8, allocating.written());
}
/// staticAssert
@ -4981,7 +5027,7 @@ const CallExpr = union(enum) {
return true;
}
fn checkVarArg(self: CallExpr, p: *Parser, first_after: TokenIndex, param_tok: TokenIndex, arg: *Result, arg_idx: u32) !void {
fn checkVarArg(self: CallExpr, p: *Parser, first_after: TokenIndex, param_tok: TokenIndex, arg: *Result, arg_idx: u32) Error!void {
if (self == .standard) return;
const builtin_tok = p.nodes.items(.data)[@intFromEnum(self.builtin.node)].decl.name;
@ -5183,7 +5229,17 @@ pub const Result = struct {
const strings_top = p.strings.items.len;
defer p.strings.items.len = strings_top;
try res.val.print(res.ty, p.comp, p.strings.writer());
{
var unmanaged = p.strings.moveToUnmanaged();
var allocating: std.Io.Writer.Allocating = .fromArrayList(p.comp.gpa, &unmanaged);
defer {
unmanaged = allocating.toArrayList();
p.strings = unmanaged.toManaged(p.comp.gpa);
}
res.val.print(res.ty, p.comp, &allocating.writer) catch |e| switch (e) {
error.WriteFailed => return error.OutOfMemory,
};
}
return try p.comp.diagnostics.arena.allocator().dupe(u8, p.strings.items[strings_top..]);
}
@ -5347,7 +5403,7 @@ pub const Result = struct {
conditional,
add,
sub,
}) !bool {
}) Error!bool {
if (b.ty.specifier == .invalid) {
try a.saveValue(p);
a.ty = Type.invalid;
@ -5643,7 +5699,7 @@ pub const Result = struct {
}
}
fn floatToIntWarning(res: *Result, p: *Parser, int_ty: Type, old_value: Value, change_kind: Value.FloatToIntChangeKind, tok: TokenIndex) !void {
fn floatToIntWarning(res: *Result, p: *Parser, int_ty: Type, old_value: Value, change_kind: Value.FloatToIntChangeKind, tok: TokenIndex) Error!void {
switch (change_kind) {
.none => return p.errStr(.float_to_int, tok, try p.typePairStrExtra(res.ty, " to ", int_ty)),
.out_of_range => return p.errStr(.float_out_of_range, tok, try p.typePairStrExtra(res.ty, " to ", int_ty)),
@ -5866,7 +5922,7 @@ pub const Result = struct {
res.val = .{};
}
fn castType(res: *Result, p: *Parser, to: Type, operand_tok: TokenIndex, l_paren: TokenIndex) !void {
fn castType(res: *Result, p: *Parser, to: Type, operand_tok: TokenIndex, l_paren: TokenIndex) Error!void {
var cast_kind: Tree.CastKind = undefined;
if (to.is(.void)) {
@ -7595,9 +7651,19 @@ fn validateFieldAccess(p: *Parser, record_ty: *const Type.Record, expr_ty: Type,
p.strings.items.len = 0;
try p.strings.writer().print("'{s}' in '", .{p.tokSlice(field_name_tok)});
try p.strings.print("'{s}' in '", .{p.tokSlice(field_name_tok)});
const mapper = p.comp.string_interner.getSlowTypeMapper();
try expr_ty.print(mapper, p.comp.langopts, p.strings.writer());
{
var unmanaged = p.strings.moveToUnmanaged();
var allocating: std.Io.Writer.Allocating = .fromArrayList(p.comp.gpa, &unmanaged);
defer {
unmanaged = allocating.toArrayList();
p.strings = unmanaged.toManaged(p.comp.gpa);
}
expr_ty.print(mapper, p.comp.langopts, &allocating.writer) catch |e| switch (e) {
error.WriteFailed => return error.OutOfMemory,
};
}
try p.strings.append('\'');
const duped = try p.comp.diagnostics.arena.allocator().dupe(u8, p.strings.items);
@ -8016,7 +8082,17 @@ fn primaryExpr(p: *Parser) Error!Result {
defer p.strings.items.len = strings_top;
const mapper = p.comp.string_interner.getSlowTypeMapper();
try Type.printNamed(func_ty, p.tokSlice(p.func.name), mapper, p.comp.langopts, p.strings.writer());
{
var unmanaged = p.strings.moveToUnmanaged();
var allocating: std.Io.Writer.Allocating = .fromArrayList(p.comp.gpa, &unmanaged);
defer {
unmanaged = allocating.toArrayList();
p.strings = unmanaged.toManaged(p.comp.gpa);
}
Type.printNamed(func_ty, p.tokSlice(p.func.name), mapper, p.comp.langopts, &allocating.writer) catch |e| switch (e) {
error.WriteFailed => return error.OutOfMemory,
};
}
try p.strings.append(0);
const predef = try p.makePredefinedIdentifier(strings_top);
ty = predef.ty;

View File

@ -15,6 +15,7 @@ const TokenWithExpansionLocs = Tree.TokenWithExpansionLocs;
const Attribute = @import("Attribute.zig");
const features = @import("features.zig");
const Hideset = @import("Hideset.zig");
const Writer = std.Io.Writer;
const DefineMap = std.StringHashMapUnmanaged(Macro);
const RawTokenList = std.array_list.Managed(RawToken);
@ -982,7 +983,7 @@ fn expr(pp: *Preprocessor, tokenizer: *Tokenizer) MacroError!bool {
.tok_i = @intCast(token_state.tokens_len),
.arena = pp.arena.allocator(),
.in_macro = true,
.strings = std.array_list.AlignedManaged(u8, .@"4").init(pp.comp.gpa),
.strings = std.array_list.Managed(u8).init(pp.comp.gpa),
.data = undefined,
.value_map = undefined,
@ -1193,24 +1194,21 @@ fn expandObjMacro(pp: *Preprocessor, simple_macro: *const Macro) Error!ExpandBuf
.macro_file => {
const start = pp.comp.generated_buf.items.len;
const source = pp.comp.getSource(pp.expansion_source_loc.id);
const w = pp.comp.generated_buf.writer(pp.gpa);
try w.print("\"{s}\"\n", .{source.path});
try pp.comp.generated_buf.print(pp.gpa, "\"{s}\"\n", .{source.path});
buf.appendAssumeCapacity(try pp.makeGeneratedToken(start, .string_literal, tok));
},
.macro_line => {
const start = pp.comp.generated_buf.items.len;
const source = pp.comp.getSource(pp.expansion_source_loc.id);
const w = pp.comp.generated_buf.writer(pp.gpa);
try w.print("{d}\n", .{source.physicalLine(pp.expansion_source_loc)});
try pp.comp.generated_buf.print(pp.gpa, "{d}\n", .{source.physicalLine(pp.expansion_source_loc)});
buf.appendAssumeCapacity(try pp.makeGeneratedToken(start, .pp_num, tok));
},
.macro_counter => {
defer pp.counter += 1;
const start = pp.comp.generated_buf.items.len;
const w = pp.comp.generated_buf.writer(pp.gpa);
try w.print("{d}\n", .{pp.counter});
try pp.comp.generated_buf.print(pp.gpa, "{d}\n", .{pp.counter});
buf.appendAssumeCapacity(try pp.makeGeneratedToken(start, .pp_num, tok));
},
@ -1682,8 +1680,7 @@ fn expandFuncMacro(
break :blk false;
} else try pp.handleBuiltinMacro(raw.id, arg, macro_tok.loc);
const start = pp.comp.generated_buf.items.len;
const w = pp.comp.generated_buf.writer(pp.gpa);
try w.print("{}\n", .{@intFromBool(result)});
try pp.comp.generated_buf.print(pp.gpa, "{}\n", .{@intFromBool(result)});
try buf.append(try pp.makeGeneratedToken(start, .pp_num, tokFromRaw(raw)));
},
.macro_param_has_c_attribute => {
@ -2988,18 +2985,16 @@ fn embed(pp: *Preprocessor, tokenizer: *Tokenizer) MacroError!void {
// TODO: We currently only support systems with CHAR_BIT == 8
// If the target's CHAR_BIT is not 8, we need to write out correctly-sized embed_bytes
// and correctly account for the target's endianness
const writer = pp.comp.generated_buf.writer(pp.gpa);
{
const byte = embed_bytes[0];
const start = pp.comp.generated_buf.items.len;
try writer.print("{d}", .{byte});
try pp.comp.generated_buf.print(pp.gpa, "{d}", .{byte});
pp.addTokenAssumeCapacity(try pp.makeGeneratedToken(start, .embed_byte, filename_tok));
}
for (embed_bytes[1..]) |byte| {
const start = pp.comp.generated_buf.items.len;
try writer.print(",{d}", .{byte});
try pp.comp.generated_buf.print(pp.gpa, ",{d}", .{byte});
pp.addTokenAssumeCapacity(.{ .id = .comma, .loc = .{ .id = .generated, .byte_offset = @intCast(start) } });
pp.addTokenAssumeCapacity(try pp.makeGeneratedToken(start + 1, .embed_byte, filename_tok));
}
@ -3241,7 +3236,7 @@ fn findIncludeSource(pp: *Preprocessor, tokenizer: *Tokenizer, first: RawToken,
fn printLinemarker(
pp: *Preprocessor,
w: anytype,
w: *Writer,
line_no: u32,
source: Source,
start_resume: enum(u8) { start, @"resume", none },
@ -3301,7 +3296,7 @@ pub const DumpMode = enum {
/// Pretty-print the macro define or undef at location `loc`.
/// We re-tokenize the directive because we are printing a macro that may have the same name as one in
/// `pp.defines` but a different definition (due to being #undef'ed and then redefined)
fn prettyPrintMacro(pp: *Preprocessor, w: anytype, loc: Source.Location, parts: enum { name_only, name_and_body }) !void {
fn prettyPrintMacro(pp: *Preprocessor, w: *Writer, loc: Source.Location, parts: enum { name_only, name_and_body }) !void {
const source = pp.comp.getSource(loc.id);
var tokenizer: Tokenizer = .{
.buf = source.buf,
@ -3339,7 +3334,7 @@ fn prettyPrintMacro(pp: *Preprocessor, w: anytype, loc: Source.Location, parts:
}
}
fn prettyPrintMacrosOnly(pp: *Preprocessor, w: anytype) !void {
fn prettyPrintMacrosOnly(pp: *Preprocessor, w: *Writer) !void {
var it = pp.defines.valueIterator();
while (it.next()) |macro| {
if (macro.is_builtin) continue;
@ -3351,7 +3346,7 @@ fn prettyPrintMacrosOnly(pp: *Preprocessor, w: anytype) !void {
}
/// Pretty print tokens and try to preserve whitespace.
pub fn prettyPrintTokens(pp: *Preprocessor, w: anytype, macro_dump_mode: DumpMode) !void {
pub fn prettyPrintTokens(pp: *Preprocessor, w: *Writer, macro_dump_mode: DumpMode) !void {
if (macro_dump_mode == .macros_only) {
return pp.prettyPrintMacrosOnly(w);
}

View File

@ -9,6 +9,7 @@ const StringInterner = @import("StringInterner.zig");
const StringId = StringInterner.StringId;
const target_util = @import("target.zig");
const LangOpts = @import("LangOpts.zig");
const Writer = std.Io.Writer;
pub const Qualifiers = packed struct {
@"const": bool = false,
@ -23,7 +24,7 @@ pub const Qualifiers = packed struct {
return quals.@"const" or quals.restrict or quals.@"volatile" or quals.atomic;
}
pub fn dump(quals: Qualifiers, w: anytype) !void {
pub fn dump(quals: Qualifiers, w: *Writer) !void {
if (quals.@"const") try w.writeAll("const ");
if (quals.atomic) try w.writeAll("_Atomic ");
if (quals.@"volatile") try w.writeAll("volatile ");
@ -2411,12 +2412,12 @@ pub fn intValueSuffix(ty: Type, comp: *const Compilation) []const u8 {
}
/// Print type in C style
pub fn print(ty: Type, mapper: StringInterner.TypeMapper, langopts: LangOpts, w: anytype) @TypeOf(w).Error!void {
pub fn print(ty: Type, mapper: StringInterner.TypeMapper, langopts: LangOpts, w: *Writer) Writer.Error!void {
_ = try ty.printPrologue(mapper, langopts, w);
try ty.printEpilogue(mapper, langopts, w);
}
pub fn printNamed(ty: Type, name: []const u8, mapper: StringInterner.TypeMapper, langopts: LangOpts, w: anytype) @TypeOf(w).Error!void {
pub fn printNamed(ty: Type, name: []const u8, mapper: StringInterner.TypeMapper, langopts: LangOpts, w: *Writer) Writer.Error!void {
const simple = try ty.printPrologue(mapper, langopts, w);
if (simple) try w.writeByte(' ');
try w.writeAll(name);
@ -2426,7 +2427,7 @@ pub fn printNamed(ty: Type, name: []const u8, mapper: StringInterner.TypeMapper,
const StringGetter = fn (TokenIndex) []const u8;
/// return true if `ty` is simple
fn printPrologue(ty: Type, mapper: StringInterner.TypeMapper, langopts: LangOpts, w: anytype) @TypeOf(w).Error!bool {
fn printPrologue(ty: Type, mapper: StringInterner.TypeMapper, langopts: LangOpts, w: *Writer) Writer.Error!bool {
if (ty.qual.atomic) {
var non_atomic_ty = ty;
non_atomic_ty.qual.atomic = false;
@ -2497,7 +2498,7 @@ fn printPrologue(ty: Type, mapper: StringInterner.TypeMapper, langopts: LangOpts
return true;
}
fn printEpilogue(ty: Type, mapper: StringInterner.TypeMapper, langopts: LangOpts, w: anytype) @TypeOf(w).Error!void {
fn printEpilogue(ty: Type, mapper: StringInterner.TypeMapper, langopts: LangOpts, w: *Writer) Writer.Error!void {
if (ty.qual.atomic) return;
if (ty.isPtr()) {
const elem_ty = ty.elemType();
@ -2564,7 +2565,7 @@ fn printEpilogue(ty: Type, mapper: StringInterner.TypeMapper, langopts: LangOpts
const dump_detailed_containers = false;
// Print as Zig types since those are actually readable
pub fn dump(ty: Type, mapper: StringInterner.TypeMapper, langopts: LangOpts, w: anytype) @TypeOf(w).Error!void {
pub fn dump(ty: Type, mapper: StringInterner.TypeMapper, langopts: LangOpts, w: *Writer) Writer.Error!void {
try ty.qual.dump(w);
switch (ty.specifier) {
.invalid => try w.writeAll("invalid"),
@ -2656,7 +2657,7 @@ pub fn dump(ty: Type, mapper: StringInterner.TypeMapper, langopts: LangOpts, w:
}
}
fn dumpEnum(@"enum": *Enum, mapper: StringInterner.TypeMapper, w: anytype) @TypeOf(w).Error!void {
fn dumpEnum(@"enum": *Enum, mapper: StringInterner.TypeMapper, w: *Writer) Writer.Error!void {
try w.writeAll(" {");
for (@"enum".fields) |field| {
try w.print(" {s} = {d},", .{ mapper.lookup(field.name), field.value });
@ -2664,7 +2665,7 @@ fn dumpEnum(@"enum": *Enum, mapper: StringInterner.TypeMapper, w: anytype) @Type
try w.writeAll(" }");
}
fn dumpRecord(record: *Record, mapper: StringInterner.TypeMapper, langopts: LangOpts, w: anytype) @TypeOf(w).Error!void {
fn dumpRecord(record: *Record, mapper: StringInterner.TypeMapper, langopts: LangOpts, w: *Writer) Writer.Error!void {
try w.writeAll(" {");
for (record.fields) |field| {
try w.writeByte(' ');

View File

@ -9,6 +9,7 @@ const Compilation = @import("Compilation.zig");
const Type = @import("Type.zig");
const target_util = @import("target.zig");
const annex_g = @import("annex_g.zig");
const Writer = std.Io.Writer;
const Value = @This();
@ -953,7 +954,7 @@ pub fn maxInt(ty: Type, comp: *Compilation) !Value {
return twosCompIntLimit(.max, ty, comp);
}
pub fn print(v: Value, ty: Type, comp: *const Compilation, w: anytype) @TypeOf(w).Error!void {
pub fn print(v: Value, ty: Type, comp: *const Compilation, w: *Writer) Writer.Error!void {
if (ty.is(.bool)) {
return w.writeAll(if (v.isZero(comp)) "false" else "true");
}
@ -977,7 +978,7 @@ pub fn print(v: Value, ty: Type, comp: *const Compilation, w: anytype) @TypeOf(w
}
}
pub fn printString(bytes: []const u8, ty: Type, comp: *const Compilation, w: anytype) @TypeOf(w).Error!void {
pub fn printString(bytes: []const u8, ty: Type, comp: *const Compilation, w: *Writer) Writer.Error!void {
const size: Compilation.CharUnitSize = @enumFromInt(ty.elemType().sizeof(comp).?);
const without_null = bytes[0 .. bytes.len - @intFromEnum(size)];
try w.writeByte('"');

View File

@ -832,7 +832,7 @@ const Context = struct {
fn addTokenFmt(c: *Context, tag: TokenTag, comptime format: []const u8, args: anytype) Allocator.Error!TokenIndex {
const start_index = c.buf.items.len;
try c.buf.writer().print(format ++ " ", args);
try c.buf.print(format ++ " ", args);
try c.tokens.append(c.gpa, .{
.tag = tag,

View File

@ -334,7 +334,10 @@ pub fn buildImportLib(comp: *Compilation, lib_name: []const u8) !void {
// new scope to ensure definition file is written before passing the path to WriteImportLibrary
const def_final_file = try o_dir.createFile(final_def_basename, .{ .truncate = true });
defer def_final_file.close();
try pp.prettyPrintTokens(def_final_file.deprecatedWriter(), .result_only);
var buffer: [1024]u8 = undefined;
var def_final_file_writer = def_final_file.writer(&buffer);
try pp.prettyPrintTokens(&def_final_file_writer.interface, .result_only);
try def_final_file_writer.interface.flush();
}
const lib_final_path = try std.fs.path.join(gpa, &.{ "o", &digest, final_lib_basename });