update aro & translate-c

This commit is contained in:
Veikka Tuominen 2025-09-14 16:41:44 +03:00 committed by Andrew Kelley
parent e05073b9e4
commit d83c76eb5a
17 changed files with 278 additions and 196 deletions

View File

@ -792,12 +792,7 @@ pub fn normalize(name: []const u8) []const u8 {
}
fn ignoredAttrErr(p: *Parser, tok: TokenIndex, attr: Attribute.Tag, context: []const u8) !void {
const strings_top = p.strings.items.len;
defer p.strings.items.len = strings_top;
try p.strings.print("attribute '{s}' ignored on {s}", .{ @tagName(attr), context });
const str = try p.comp.diagnostics.arena.allocator().dupe(u8, p.strings.items[strings_top..]);
try p.errStr(.ignored_attribute, tok, str);
try p.err(tok, .ignored_attribute, .{ @tagName(attr), context });
}
pub fn applyParameterAttributes(p: *Parser, qt: QualType, attr_buf_start: usize, diagnostic: ?Parser.Diagnostic) !QualType {

View File

@ -132,6 +132,8 @@ sources: std.StringArrayHashMapUnmanaged(Source) = .empty,
/// Allocated into `gpa`, but keys are externally managed.
include_dirs: std.ArrayList([]const u8) = .empty,
/// Allocated into `gpa`, but keys are externally managed.
iquote_include_dirs: std.ArrayList([]const u8) = .empty,
/// Allocated into `gpa`, but keys are externally managed.
system_include_dirs: std.ArrayList([]const u8) = .empty,
/// Allocated into `gpa`, but keys are externally managed.
after_include_dirs: std.ArrayList([]const u8) = .empty,
@ -192,6 +194,7 @@ pub fn deinit(comp: *Compilation) void {
}
comp.sources.deinit(gpa);
comp.include_dirs.deinit(gpa);
comp.iquote_include_dirs.deinit(gpa);
comp.system_include_dirs.deinit(gpa);
comp.after_include_dirs.deinit(gpa);
comp.framework_dirs.deinit(gpa);
@ -240,12 +243,26 @@ fn generateSystemDefines(comp: *Compilation, w: *std.Io.Writer) !void {
const ptr_width = comp.target.ptrBitWidth();
const is_gnu = comp.langopts.standard.isGNU();
if (comp.langopts.gnuc_version > 0) {
try w.print("#define __GNUC__ {d}\n", .{comp.langopts.gnuc_version / 10_000});
try w.print("#define __GNUC_MINOR__ {d}\n", .{comp.langopts.gnuc_version / 100 % 100});
try w.print("#define __GNUC_PATCHLEVEL__ {d}\n", .{comp.langopts.gnuc_version % 100});
const gnuc_version = comp.langopts.gnuc_version orelse comp.langopts.emulate.defaultGccVersion();
if (gnuc_version > 0) {
try w.print("#define __GNUC__ {d}\n", .{gnuc_version / 10_000});
try w.print("#define __GNUC_MINOR__ {d}\n", .{gnuc_version / 100 % 100});
try w.print("#define __GNUC_PATCHLEVEL__ {d}\n", .{gnuc_version % 100});
}
try w.writeAll(
\\#define __ARO_EMULATE_CLANG__ 1
\\#define __ARO_EMULATE_GCC__ 2
\\#define __ARO_EMULATE_MSVC__ 3
\\
);
const emulated = switch (comp.langopts.emulate) {
.clang => "__ARO_EMULATE_CLANG__",
.gcc => "__ARO_EMULATE_GCC__",
.msvc => "__ARO_EMULATE_MSVC__",
};
try w.print("#define __ARO_EMULATE__ {s}\n", .{emulated});
if (comp.code_gen_options.optimization_level.hasAnyOptimizations()) {
try define(w, "__OPTIMIZE__");
}
@ -330,6 +347,8 @@ fn generateSystemDefines(comp: *Compilation, w: *std.Io.Writer) !void {
=> try define(w, "__APPLE__"),
.wasi => try define(w, "__wasi__"),
.emscripten => try define(w, "__EMSCRIPTEN__"),
.@"3ds" => try define(w, "__3DS__"),
.vita => try define(w, "__vita__"),
else => {},
}
@ -431,10 +450,13 @@ fn generateSystemDefines(comp: *Compilation, w: *std.Io.Writer) !void {
.{ .f16c, "__F16C__" },
.{ .gfni, "__GFNI__" },
.{ .evex512, "__EVEX512__" },
.{ .avx10_1_256, "__AVX10_1__" },
.{ .avx10_1_512, "__AVX10_1_512__" },
.{ .avx10_2_256, "__AVX10_2__" },
.{ .avx10_2_512, "__AVX10_2_512__" },
.{ .avx10_1, "__AVX10_1__" },
.{ .avx10_1, "__AVX10_1_512__" },
.{ .avx10_2, "__AVX10_2__" },
.{ .avx10_2, "__AVX10_2_512__" },
.{ .avx512cd, "__AVX512CD__" },
.{ .avx512vpopcntdq, "__AVX512VPOPCNTDQ__" },
.{ .avx512vnni, "__AVX512VNNI__" },
@ -935,7 +957,7 @@ fn generateSystemDefines(comp: *Compilation, w: *std.Io.Writer) !void {
pub fn generateBuiltinMacros(comp: *Compilation, system_defines_mode: SystemDefinesMode) AddSourceError!Source {
try comp.type_store.initNamedTypes(comp);
var allocating: std.io.Writer.Allocating = try .initCapacity(comp.gpa, 2 << 13);
var allocating: std.Io.Writer.Allocating = try .initCapacity(comp.gpa, 2 << 13);
defer allocating.deinit();
comp.writeBuiltinMacros(system_defines_mode, &allocating.writer) catch |err| switch (err) {
@ -1297,6 +1319,11 @@ fn generateIntWidth(comp: *Compilation, w: *std.Io.Writer, name: []const u8, qt:
try w.print("#define __{s}_WIDTH__ {d}\n", .{ name, qt.sizeof(comp) * 8 });
}
fn generateIntMaxAndWidth(comp: *Compilation, w: *std.Io.Writer, name: []const u8, qt: QualType) !void {
try comp.generateIntMax(w, name, qt);
try comp.generateIntWidth(w, name, qt);
}
fn generateSizeofType(comp: *Compilation, w: *std.Io.Writer, name: []const u8, qt: QualType) !void {
try w.print("#define {s} {d}\n", .{ name, qt.sizeof(comp) });
}
@ -1597,7 +1624,7 @@ pub fn hasInclude(
which: WhichInclude,
opt_dep_file: ?*DepFile,
) Compilation.Error!bool {
if (try FindInclude.run(comp, filename, switch (which) {
if (try FindInclude.run(comp, filename, include_type, switch (which) {
.next => .{ .only_search_after_dir = comp.getSource(includer_token_source).path },
.first => switch (include_type) {
.quotes => .{ .allow_same_dir = comp.getSource(includer_token_source).path },
@ -1629,6 +1656,7 @@ const FindInclude = struct {
fn run(
comp: *Compilation,
include_path: []const u8,
include_type: IncludeType,
search_strat: union(enum) {
allow_same_dir: []const u8,
only_search,
@ -1663,7 +1691,12 @@ const FindInclude = struct {
find.wait_for = std.fs.path.dirname(other_file);
},
}
switch (include_type) {
.quotes => for (comp.iquote_include_dirs.items) |dir| {
if (try find.checkIncludeDir(dir, .user)) |res| return res;
},
.angle_brackets => {},
}
for (comp.include_dirs.items) |dir| {
if (try find.checkIncludeDir(dir, .user)) |res| return res;
}
@ -1876,7 +1909,7 @@ pub fn findInclude(
/// include vs include_next
which: WhichInclude,
) Compilation.Error!?Source {
const found = try FindInclude.run(comp, filename, switch (which) {
const found = try FindInclude.run(comp, filename, include_type, switch (which) {
.next => .{ .only_search_after_dir = comp.getSource(includer_token.source).path },
.first => switch (include_type) {
.quotes => .{ .allow_same_dir = comp.getSource(includer_token.source).path },

View File

@ -194,6 +194,7 @@ pub const Option = enum {
@"microsoft-anon-tag",
@"out-of-scope-function",
@"date-time",
@"variadic-macro-arguments-omitted",
@"attribute-todo",
/// GNU extensions
@ -496,27 +497,35 @@ pub fn formatArgs(w: *std.Io.Writer, fmt: []const u8, args: anytype) std.Io.Writ
else => switch (@typeInfo(@TypeOf(arg))) {
.int, .comptime_int => try Diagnostics.formatInt(w, fmt[i..], arg),
.pointer => try Diagnostics.formatString(w, fmt[i..], arg),
else => unreachable,
else => comptime unreachable,
},
};
}
try w.writeAll(fmt[i..]);
}
pub fn formatString(w: *std.Io.Writer, fmt: []const u8, str: []const u8) std.Io.Writer.Error!usize {
const template = "{s}";
const i = std.mem.indexOf(u8, fmt, template).?;
pub fn templateIndex(w: *std.Io.Writer, fmt: []const u8, template: []const u8) std.Io.Writer.Error!usize {
const i = std.mem.indexOf(u8, fmt, template) orelse {
if (@import("builtin").mode == .Debug) {
std.debug.panic("template `{s}` not found in format string `{s}`", .{ template, fmt });
}
try w.print("template `{s}` not found in format string `{s}` (this is a bug in arocc)", .{ template, fmt });
return 0;
};
try w.writeAll(fmt[0..i]);
try w.writeAll(str);
return i + template.len;
}
pub fn formatString(w: *std.Io.Writer, fmt: []const u8, str: []const u8) std.Io.Writer.Error!usize {
const i = templateIndex(w, fmt, "{s}");
try w.writeAll(str);
return i;
}
pub fn formatInt(w: *std.Io.Writer, fmt: []const u8, int: anytype) std.Io.Writer.Error!usize {
const template = "{d}";
const i = std.mem.indexOf(u8, fmt, template).?;
try w.writeAll(fmt[0..i]);
const i = templateIndex(w, fmt, "{d}");
try w.printInt(int, 10, .lower, .{});
return i + template.len;
return i;
}
fn addMessage(d: *Diagnostics, msg: Message) Compilation.Error!void {

View File

@ -203,6 +203,7 @@ pub const usage =
\\ -fuse-line-directives Use `#line <num>` linemarkers in preprocessed output
\\ -fno-use-line-directives
\\ Use `# <num>` linemarkers in preprocessed output
\\ -iquote <dir> Add directory to QUOTE include search path
\\ -I <dir> Add directory to include search path
\\ -idirafter <dir> Add directory to AFTER include search path
\\ -isystem <dir> Add directory to SYSTEM include search path
@ -275,7 +276,7 @@ pub fn parseArgs(
var i: usize = 1;
var comment_arg: []const u8 = "";
var hosted: ?bool = null;
var gnuc_version: []const u8 = "4.2.1"; // default value set by clang
var gnuc_version: ?[]const u8 = null;
var pic_arg: []const u8 = "";
var declspec_attrs: ?bool = null;
var ms_extensions: ?bool = null;
@ -529,6 +530,17 @@ pub fn parseArgs(
path = args[i];
}
try d.comp.system_include_dirs.append(d.comp.gpa, path);
} else if (mem.startsWith(u8, arg, "-iquote")) {
var path = arg["-iquote".len..];
if (path.len == 0) {
i += 1;
if (i >= args.len) {
try d.err("expected argument after -iquote", .{});
continue;
}
path = args[i];
}
try d.comp.iquote_include_dirs.append(d.comp.gpa, path);
} else if (mem.startsWith(u8, arg, "-F")) {
var path = arg["-F".len..];
if (path.len == 0) {
@ -784,11 +796,13 @@ pub fn parseArgs(
d.comp.target.os.tag = .freestanding;
}
}
const version = GCCVersion.parse(gnuc_version);
if (gnuc_version) |unwrapped| {
const version = GCCVersion.parse(unwrapped);
if (version.major == -1) {
return d.fatal("invalid value '{0s}' in '-fgnuc-version={0s}'", .{gnuc_version});
return d.fatal("invalid value '{s}' in '-fgnuc-version={s}'", .{ unwrapped, unwrapped });
}
d.comp.langopts.gnuc_version = version.toUnsigned();
}
const pic_level, const is_pie = try d.getPICMode(pic_arg);
d.comp.code_gen_options.pic_level = pic_level;
d.comp.code_gen_options.is_pie = is_pie;
@ -1039,7 +1053,7 @@ fn getRandomFilename(d: *Driver, buf: *[std.fs.max_name_bytes]u8, extension: []c
const fmt_template = "/tmp/{s}{s}";
const fmt_args = .{
random_name,
@as([]const u8, &random_name),
extension,
};
return std.fmt.bufPrint(buf, fmt_template, fmt_args) catch return d.fatal("Filename too long for filesystem: " ++ fmt_template, fmt_args);

View File

@ -57,7 +57,7 @@ pub fn parse(text: []const u8) GCCVersion {
var good = bad;
var it = mem.splitScalar(u8, text, '.');
const first = it.next().?;
const first = it.first();
const second = it.next() orelse "";
const rest = it.next() orelse "";

View File

@ -7,6 +7,14 @@ pub const Compiler = enum {
clang,
gcc,
msvc,
pub fn defaultGccVersion(self: Compiler) u32 {
return switch (self) {
.clang => 4 * 10_000 + 2 * 100 + 1,
.gcc => 7 * 10_000 + 1 * 100 + 0,
.msvc => 0,
};
}
};
/// The floating-point evaluation method for intermediate results within a single expression
@ -139,7 +147,7 @@ preserve_comments_in_macros: bool = false,
/// Used ONLY for generating __GNUC__ and related macros. Does not control the presence/absence of any features
/// Encoded as major * 10,000 + minor * 100 + patch
/// e.g. 4.2.1 == 40201
gnuc_version: u32 = 0,
gnuc_version: ?u32 = null,
pub fn setStandard(self: *LangOpts, name: []const u8) error{InvalidStandard}!void {
self.standard = Standard.NameMap.get(name) orelse return error.InvalidStandard;

View File

@ -469,7 +469,7 @@ fn formatArgs(p: *Parser, w: *std.Io.Writer, fmt: []const u8, args: anytype) !vo
else => switch (@typeInfo(@TypeOf(arg))) {
.int, .comptime_int => try Diagnostics.formatInt(w, fmt[i..], arg),
.pointer => try Diagnostics.formatString(w, fmt[i..], arg),
else => unreachable,
else => comptime unreachable,
},
};
}
@ -477,22 +477,18 @@ fn formatArgs(p: *Parser, w: *std.Io.Writer, fmt: []const u8, args: anytype) !vo
}
fn formatTokenId(w: *std.Io.Writer, fmt: []const u8, tok_id: Tree.Token.Id) !usize {
const template = "{tok_id}";
const i = std.mem.indexOf(u8, fmt, template).?;
try w.writeAll(fmt[0..i]);
const i = Diagnostics.templateIndex(w, fmt, "{tok_id}");
try w.writeAll(tok_id.symbol());
return i + template.len;
return i;
}
fn formatQualType(p: *Parser, w: *std.Io.Writer, fmt: []const u8, qt: QualType) !usize {
const template = "{qt}";
const i = std.mem.indexOf(u8, fmt, template).?;
try w.writeAll(fmt[0..i]);
const i = Diagnostics.templateIndex(w, fmt, "{qt}");
try w.writeByte('\'');
try qt.print(p.comp, w);
try w.writeByte('\'');
if (qt.isC23Auto()) return i + template.len;
if (qt.isC23Auto()) return i;
if (qt.get(p.comp, .vector)) |vector_ty| {
try w.print(" (vector of {d} '", .{vector_ty.len});
try vector_ty.elem.printDesugared(p.comp, w);
@ -502,14 +498,11 @@ fn formatQualType(p: *Parser, w: *std.Io.Writer, fmt: []const u8, qt: QualType)
try qt.printDesugared(p.comp, w);
try w.writeAll("')");
}
return i + template.len;
return i;
}
fn formatResult(p: *Parser, w: *std.Io.Writer, fmt: []const u8, res: Result) !usize {
const template = "{value}";
const i = std.mem.indexOf(u8, fmt, template).?;
try w.writeAll(fmt[0..i]);
const i = Diagnostics.templateIndex(w, fmt, "{value}");
switch (res.val.opt_ref) {
.none => try w.writeAll("(none)"),
.null => try w.writeAll("nullptr_t"),
@ -521,8 +514,7 @@ fn formatResult(p: *Parser, w: *std.Io.Writer, fmt: []const u8, res: Result) !us
},
},
}
return i + template.len;
return i;
}
const Normalized = struct {
@ -532,10 +524,8 @@ const Normalized = struct {
return .{ .str = str };
}
pub fn format(ctx: Normalized, w: *std.Io.Writer, fmt_str: []const u8) !usize {
const template = "{normalized}";
const i = std.mem.indexOf(u8, fmt_str, template).?;
try w.writeAll(fmt_str[0..i]);
pub fn format(ctx: Normalized, w: *std.Io.Writer, fmt: []const u8) !usize {
const i = Diagnostics.templateIndex(w, fmt, "{normalized}");
var it: std.unicode.Utf8Iterator = .{
.bytes = ctx.str,
.i = 0,
@ -557,7 +547,7 @@ const Normalized = struct {
});
}
}
return i + template.len;
return i;
}
};
@ -568,12 +558,10 @@ const Codepoint = struct {
return .{ .codepoint = codepoint };
}
pub fn format(ctx: Codepoint, w: *std.Io.Writer, fmt_str: []const u8) !usize {
const template = "{codepoint}";
const i = std.mem.indexOf(u8, fmt_str, template).?;
try w.writeAll(fmt_str[0..i]);
pub fn format(ctx: Codepoint, w: *std.Io.Writer, fmt: []const u8) !usize {
const i = Diagnostics.templateIndex(w, fmt, "{codepoint}");
try w.print("{X:0>4}", .{ctx.codepoint});
return i + template.len;
return i;
}
};
@ -584,12 +572,10 @@ const Escaped = struct {
return .{ .str = str };
}
pub fn format(ctx: Escaped, w: *std.Io.Writer, fmt_str: []const u8) !usize {
const template = "{s}";
const i = std.mem.indexOf(u8, fmt_str, template).?;
try w.writeAll(fmt_str[0..i]);
pub fn format(ctx: Escaped, w: *std.Io.Writer, fmt: []const u8) !usize {
const i = Diagnostics.templateIndex(w, fmt, "{s}");
try std.zig.stringEscape(ctx.str, w);
return i + template.len;
return i;
}
};
@ -626,11 +612,11 @@ pub fn errValueChanged(p: *Parser, tok_i: TokenIndex, diagnostic: Diagnostic, re
fn checkDeprecatedUnavailable(p: *Parser, ty: QualType, usage_tok: TokenIndex, decl_tok: TokenIndex) !void {
if (ty.getAttribute(p.comp, .@"error")) |@"error"| {
const msg_str = p.comp.interner.get(@"error".msg.ref()).bytes;
try p.err(usage_tok, .error_attribute, .{ p.tokSlice(@"error".__name_tok), std.zig.fmtString(msg_str) });
try p.err(usage_tok, .error_attribute, .{ p.tokSlice(@"error".__name_tok), Escaped.init(msg_str) });
}
if (ty.getAttribute(p.comp, .warning)) |warning| {
const msg_str = p.comp.interner.get(warning.msg.ref()).bytes;
try p.err(usage_tok, .warning_attribute, .{ p.tokSlice(warning.__name_tok), std.zig.fmtString(msg_str) });
try p.err(usage_tok, .warning_attribute, .{ p.tokSlice(warning.__name_tok), Escaped.init(msg_str) });
}
if (ty.getAttribute(p.comp, .unavailable)) |unavailable| {
try p.errDeprecated(usage_tok, .unavailable, unavailable.msg);
@ -4734,7 +4720,7 @@ fn asmOperand(p: *Parser, names: *std.ArrayList(?TokenIndex), constraints: *Node
try constraints.append(gpa, constraint.node);
const l_paren = p.eatToken(.l_paren) orelse {
try p.err(p.tok_i, .expected_token, .{ p.tok_ids[p.tok_i], .l_paren });
try p.err(p.tok_i, .expected_token, .{ p.tok_ids[p.tok_i], Token.Id.l_paren });
return error.ParsingFailed;
};
const maybe_res = try p.expr();
@ -10221,12 +10207,30 @@ test "Node locations" {
try std.testing.expectEqual(0, comp.diagnostics.total);
for (tree.root_decls.items[tree.root_decls.items.len - 3 ..], 0..) |node, i| {
const slice = tree.tokSlice(node.tok(&tree));
const expected = switch (i) {
const expected_slice = switch (i) {
0 => "foo",
1 => "bar",
2 => "main",
else => unreachable,
};
try std.testing.expectEqualStrings(expected, slice);
try std.testing.expectEqualStrings(expected_slice, slice);
const loc = node.loc(&tree).expand(&comp);
const expected_col: u32 = switch (i) {
0 => 5,
1 => 5,
2 => 5,
else => unreachable,
};
try std.testing.expectEqual(expected_col, loc.col);
const expected_line_no = i + 1;
try std.testing.expectEqual(expected_line_no, loc.line_no);
const expected_source_path = "file.c";
try std.testing.expectEqualStrings(expected_source_path, loc.path);
const expected_source_kind = Source.Kind.user;
try std.testing.expectEqual(expected_source_kind, loc.kind);
}
}

View File

@ -1703,7 +1703,10 @@ fn expandFuncMacro(
else
&[1]TokenWithExpansionLocs{tokFromRaw(raw_next)},
.macro_param, .macro_param_no_expand => getPasteArgs(args.items[raw_next.end]),
.keyword_va_args => variable_arguments.items,
.keyword_va_args => if (variable_arguments.items.len == 0) blk: {
try pp.err(raw_next, .no_argument_variadic_macro, .{});
break :blk &[1]TokenWithExpansionLocs{.{ .id = .placemarker, .loc = .{ .id = .generated } }};
} else variable_arguments.items,
.keyword_va_opt => blk: {
try pp.expandVaOpt(&va_opt_buf, raw_next, variable_arguments.items.len != 0);
if (va_opt_buf.items.len == 0) break;

View File

@ -449,3 +449,10 @@ pub const date_time: Diagnostic = .{
.opt = .@"date-time",
.show_in_system_headers = true,
};
pub const no_argument_variadic_macro: Diagnostic = .{
.fmt = "passing no argument for the '...' parameter of a variadic macro is incompatible with C standards before C23",
.opt = .@"variadic-macro-arguments-omitted",
.kind = .off,
.extension = true,
};

View File

@ -1699,9 +1699,9 @@ pub const Node = union(enum) {
return tree.nodes.items(.tok)[@intFromEnum(index)];
}
pub fn loc(index: Index, tree: *const Tree) ?Source.Location {
pub fn loc(index: Index, tree: *const Tree) Source.Location {
const tok_i = index.tok(tree);
return tree.tokens.items(.loc)[@intFromEnum(tok_i)];
return tree.tokens.items(.loc)[tok_i];
}
pub fn qt(index: Index, tree: *const Tree) QualType {

View File

@ -681,10 +681,12 @@ pub fn toLLVMTriple(target: std.Target, buf: []u8) []const u8 {
.driverkit => "driverkit",
.visionos => "xros",
.serenity => "serenity",
.vulkan => "vulkan",
.managarm => "managarm",
.@"3ds",
.vita,
.opencl,
.opengl,
.vulkan,
.plan9,
.other,
=> "unknown",

View File

@ -154,17 +154,14 @@ pub const Ascii = struct {
return .{ .val = @intCast(val) };
}
pub fn format(ctx: Ascii, w: *std.Io.Writer, fmt_str: []const u8) !usize {
const template = "{c}";
const i = std.mem.indexOf(u8, fmt_str, template).?;
try w.writeAll(fmt_str[0..i]);
pub fn format(ctx: Ascii, w: *std.Io.Writer, fmt: []const u8) !usize {
const i = Diagnostics.templateIndex(w, fmt, "{c}");
if (std.ascii.isPrint(ctx.val)) {
try w.writeByte(ctx.val);
} else {
try w.print("x{x:0>2}", .{ctx.val});
}
return i + template.len;
return i;
}
};
@ -345,7 +342,7 @@ pub const Parser = struct {
else => switch (@typeInfo(@TypeOf(arg))) {
.int, .comptime_int => try Diagnostics.formatInt(w, fmt[i..], arg),
.pointer => try Diagnostics.formatString(w, fmt[i..], arg),
else => unreachable,
else => comptime unreachable,
},
};
}

View File

@ -175,8 +175,10 @@ pub fn transMacro(mt: *MacroTranslator) ParseError!void {
}
fn createMacroFn(mt: *MacroTranslator, name: []const u8, ref: ZigNode, proto_alias: *ast.Payload.Func) !ZigNode {
var fn_params = std.array_list.Managed(ast.Payload.Param).init(mt.t.gpa);
defer fn_params.deinit();
const gpa = mt.t.gpa;
const arena = mt.t.arena;
var fn_params: std.ArrayList(ast.Payload.Param) = .empty;
defer fn_params.deinit(gpa);
var block_scope = try Scope.Block.init(mt.t, &mt.t.global_scope.base, false);
defer block_scope.deinit();
@ -184,7 +186,7 @@ fn createMacroFn(mt: *MacroTranslator, name: []const u8, ref: ZigNode, proto_ali
for (proto_alias.data.params) |param| {
const param_name = try block_scope.makeMangledName(param.name orelse "arg");
try fn_params.append(.{
try fn_params.append(gpa, .{
.name = param_name,
.type = param.type,
.is_noalias = param.is_noalias,
@ -198,27 +200,28 @@ fn createMacroFn(mt: *MacroTranslator, name: []const u8, ref: ZigNode, proto_ali
else
unreachable;
const unwrap_expr = try ZigTag.unwrap.create(mt.t.arena, init);
const args = try mt.t.arena.alloc(ZigNode, fn_params.items.len);
const unwrap_expr = try ZigTag.unwrap.create(arena, init);
const args = try arena.alloc(ZigNode, fn_params.items.len);
for (fn_params.items, 0..) |param, i| {
args[i] = try ZigTag.identifier.create(mt.t.arena, param.name.?);
args[i] = try ZigTag.identifier.create(arena, param.name.?);
}
const call_expr = try ZigTag.call.create(mt.t.arena, .{
const call_expr = try ZigTag.call.create(arena, .{
.lhs = unwrap_expr,
.args = args,
});
const return_expr = try ZigTag.@"return".create(mt.t.arena, call_expr);
const block = try ZigTag.block_single.create(mt.t.arena, return_expr);
const return_expr = try ZigTag.@"return".create(arena, call_expr);
const block = try ZigTag.block_single.create(arena, return_expr);
return ZigTag.pub_inline_fn.create(mt.t.arena, .{
return ZigTag.pub_inline_fn.create(arena, .{
.name = name,
.params = try mt.t.arena.dupe(ast.Payload.Param, fn_params.items),
.params = try arena.dupe(ast.Payload.Param, fn_params.items),
.return_type = proto_alias.data.return_type,
.body = block,
});
}
fn parseCExpr(mt: *MacroTranslator, scope: *Scope) ParseError!ZigNode {
const arena = mt.t.arena;
// TODO parseCAssignExpr here
var block_scope = try Scope.Block.init(mt.t, scope, true);
defer block_scope.deinit();
@ -229,14 +232,14 @@ fn parseCExpr(mt: *MacroTranslator, scope: *Scope) ParseError!ZigNode {
var last = node;
while (true) {
// suppress result
const ignore = try ZigTag.discard.create(mt.t.arena, .{ .should_skip = false, .value = last });
const ignore = try ZigTag.discard.create(arena, .{ .should_skip = false, .value = last });
try block_scope.statements.append(mt.t.gpa, ignore);
last = try mt.parseCCondExpr(&block_scope.base);
if (!mt.eat(.comma)) break;
}
const break_node = try ZigTag.break_val.create(mt.t.arena, .{
const break_node = try ZigTag.break_val.create(arena, .{
.label = block_scope.label,
.val = last,
});
@ -245,10 +248,11 @@ fn parseCExpr(mt: *MacroTranslator, scope: *Scope) ParseError!ZigNode {
}
fn parseCNumLit(mt: *MacroTranslator) ParseError!ZigNode {
const arena = mt.t.arena;
const lit_bytes = mt.tokSlice();
mt.i += 1;
var bytes = try std.ArrayListUnmanaged(u8).initCapacity(mt.t.arena, lit_bytes.len + 3);
var bytes = try std.ArrayListUnmanaged(u8).initCapacity(arena, lit_bytes.len + 3);
const prefix = aro.Tree.Token.NumberPrefix.fromString(lit_bytes);
switch (prefix) {
@ -330,7 +334,7 @@ fn parseCNumLit(mt: *MacroTranslator) ParseError!ZigNode {
}
if (is_float) {
const type_node = try ZigTag.type.create(mt.t.arena, switch (suffix) {
const type_node = try ZigTag.type.create(arena, switch (suffix) {
.F16 => "f16",
.F => "f32",
.None => "f64",
@ -339,10 +343,10 @@ fn parseCNumLit(mt: *MacroTranslator) ParseError!ZigNode {
.Q, .F128 => "f128",
else => unreachable,
});
const rhs = try ZigTag.float_literal.create(mt.t.arena, bytes.items);
return ZigTag.as.create(mt.t.arena, .{ .lhs = type_node, .rhs = rhs });
const rhs = try ZigTag.float_literal.create(arena, bytes.items);
return ZigTag.as.create(arena, .{ .lhs = type_node, .rhs = rhs });
} else {
const type_node = try ZigTag.type.create(mt.t.arena, switch (suffix) {
const type_node = try ZigTag.type.create(arena, switch (suffix) {
.None => "c_int",
.U => "c_uint",
.L => "c_long",
@ -365,11 +369,11 @@ fn parseCNumLit(mt: *MacroTranslator) ParseError!ZigNode {
else => unreachable,
};
const literal_node = try ZigTag.integer_literal.create(mt.t.arena, bytes.items);
const literal_node = try ZigTag.integer_literal.create(arena, bytes.items);
if (guaranteed_to_fit) {
return ZigTag.as.create(mt.t.arena, .{ .lhs = type_node, .rhs = literal_node });
return ZigTag.as.create(arena, .{ .lhs = type_node, .rhs = literal_node });
} else {
return mt.t.createHelperCallNode(.promoteIntLiteral, &.{ type_node, literal_node, try ZigTag.enum_literal.create(mt.t.arena, @tagName(prefix)) });
return mt.t.createHelperCallNode(.promoteIntLiteral, &.{ type_node, literal_node, try ZigTag.enum_literal.create(arena, @tagName(prefix)) });
}
}
}
@ -563,6 +567,7 @@ fn escapeUnprintables(mt: *MacroTranslator) ![]const u8 {
}
fn parseCPrimaryExpr(mt: *MacroTranslator, scope: *Scope) ParseError!ZigNode {
const arena = mt.t.arena;
const tok = mt.peek();
switch (tok) {
.char_literal,
@ -573,12 +578,12 @@ fn parseCPrimaryExpr(mt: *MacroTranslator, scope: *Scope) ParseError!ZigNode {
=> {
const slice = mt.tokSlice();
if (slice[0] != '\'' or slice[1] == '\\' or slice.len == 3) {
return ZigTag.char_literal.create(mt.t.arena, try mt.escapeUnprintables());
return ZigTag.char_literal.create(arena, try mt.escapeUnprintables());
} else {
mt.i += 1;
const str = try std.fmt.allocPrint(mt.t.arena, "0x{x}", .{slice[1 .. slice.len - 1]});
return ZigTag.integer_literal.create(mt.t.arena, str);
const str = try std.fmt.allocPrint(arena, "0x{x}", .{slice[1 .. slice.len - 1]});
return ZigTag.integer_literal.create(arena, str);
}
},
.string_literal,
@ -586,7 +591,7 @@ fn parseCPrimaryExpr(mt: *MacroTranslator, scope: *Scope) ParseError!ZigNode {
.string_literal_utf_8,
.string_literal_utf_32,
.string_literal_wide,
=> return ZigTag.string_literal.create(mt.t.arena, try mt.escapeUnprintables()),
=> return ZigTag.string_literal.create(arena, try mt.escapeUnprintables()),
.pp_num => return mt.parseCNumLit(),
.l_paren => {
mt.i += 1;
@ -600,7 +605,7 @@ fn parseCPrimaryExpr(mt: *MacroTranslator, scope: *Scope) ParseError!ZigNode {
mt.i += 1;
const mangled_name = scope.getAlias(param) orelse param;
return try ZigTag.identifier.create(mt.t.arena, mangled_name);
return try ZigTag.identifier.create(arena, mangled_name);
},
.identifier, .extended_identifier => {
const slice = mt.tokSlice();
@ -608,17 +613,17 @@ fn parseCPrimaryExpr(mt: *MacroTranslator, scope: *Scope) ParseError!ZigNode {
const mangled_name = scope.getAlias(slice) orelse slice;
if (Translator.builtin_typedef_map.get(mangled_name)) |ty| {
return ZigTag.type.create(mt.t.arena, ty);
return ZigTag.type.create(arena, ty);
}
if (builtins.map.get(mangled_name)) |builtin| {
const builtin_identifier = try ZigTag.identifier.create(mt.t.arena, "__builtin");
return ZigTag.field_access.create(mt.t.arena, .{
const builtin_identifier = try ZigTag.identifier.create(arena, "__builtin");
return ZigTag.field_access.create(arena, .{
.lhs = builtin_identifier,
.field_name = builtin.name,
});
}
const identifier = try ZigTag.identifier.create(mt.t.arena, mangled_name);
const identifier = try ZigTag.identifier.create(arena, mangled_name);
scope.skipVariableDiscard(mangled_name);
refs_var: {
const ident_node = mt.t.global_scope.sym_table.get(slice) orelse break :refs_var;
@ -1114,6 +1119,8 @@ fn parseCPostfixExpr(mt: *MacroTranslator, scope: *Scope, type_name: ?ZigNode) P
}
fn parseCPostfixExprInner(mt: *MacroTranslator, scope: *Scope, type_name: ?ZigNode) ParseError!ZigNode {
const gpa = mt.t.gpa;
const arena = mt.t.arena;
var node = type_name orelse try mt.parseCPrimaryExpr(scope);
while (true) {
switch (mt.peek()) {
@ -1122,39 +1129,39 @@ fn parseCPostfixExprInner(mt: *MacroTranslator, scope: *Scope, type_name: ?ZigNo
const field_name = mt.tokSlice();
try mt.expect(.identifier);
node = try ZigTag.field_access.create(mt.t.arena, .{ .lhs = node, .field_name = field_name });
node = try ZigTag.field_access.create(arena, .{ .lhs = node, .field_name = field_name });
},
.arrow => {
mt.i += 1;
const field_name = mt.tokSlice();
try mt.expect(.identifier);
const deref = try ZigTag.deref.create(mt.t.arena, node);
node = try ZigTag.field_access.create(mt.t.arena, .{ .lhs = deref, .field_name = field_name });
const deref = try ZigTag.deref.create(arena, node);
node = try ZigTag.field_access.create(arena, .{ .lhs = deref, .field_name = field_name });
},
.l_bracket => {
mt.i += 1;
const index_val = try mt.macroIntFromBool(try mt.parseCExpr(scope));
const index = try ZigTag.as.create(mt.t.arena, .{
.lhs = try ZigTag.type.create(mt.t.arena, "usize"),
.rhs = try ZigTag.int_cast.create(mt.t.arena, index_val),
const index = try ZigTag.as.create(arena, .{
.lhs = try ZigTag.type.create(arena, "usize"),
.rhs = try ZigTag.int_cast.create(arena, index_val),
});
node = try ZigTag.array_access.create(mt.t.arena, .{ .lhs = node, .rhs = index });
node = try ZigTag.array_access.create(arena, .{ .lhs = node, .rhs = index });
try mt.expect(.r_bracket);
},
.l_paren => {
mt.i += 1;
if (mt.eat(.r_paren)) {
node = try ZigTag.call.create(mt.t.arena, .{ .lhs = node, .args = &.{} });
node = try ZigTag.call.create(arena, .{ .lhs = node, .args = &.{} });
} else {
var args = std.array_list.Managed(ZigNode).init(mt.t.gpa);
defer args.deinit();
var args: std.ArrayList(ZigNode) = .empty;
defer args.deinit(gpa);
while (true) {
const arg = try mt.parseCCondExpr(scope);
try args.append(arg);
try args.append(gpa, arg);
const next_id = mt.peek();
switch (next_id) {
@ -1171,7 +1178,7 @@ fn parseCPostfixExprInner(mt: *MacroTranslator, scope: *Scope, type_name: ?ZigNo
},
}
}
node = try ZigTag.call.create(mt.t.arena, .{ .lhs = node, .args = try mt.t.arena.dupe(ZigNode, args.items) });
node = try ZigTag.call.create(arena, .{ .lhs = node, .args = try arena.dupe(ZigNode, args.items) });
}
},
.l_brace => {
@ -1179,8 +1186,8 @@ fn parseCPostfixExprInner(mt: *MacroTranslator, scope: *Scope, type_name: ?ZigNo
// Check for designated field initializers
if (mt.peek() == .period) {
var init_vals = std.array_list.Managed(ast.Payload.ContainerInitDot.Initializer).init(mt.t.gpa);
defer init_vals.deinit();
var init_vals: std.ArrayList(ast.Payload.ContainerInitDot.Initializer) = .empty;
defer init_vals.deinit(gpa);
while (true) {
try mt.expect(.period);
@ -1189,7 +1196,7 @@ fn parseCPostfixExprInner(mt: *MacroTranslator, scope: *Scope, type_name: ?ZigNo
try mt.expect(.equal);
const val = try mt.parseCCondExpr(scope);
try init_vals.append(.{ .name = name, .value = val });
try init_vals.append(gpa, .{ .name = name, .value = val });
const next_id = mt.peek();
switch (next_id) {
@ -1206,17 +1213,17 @@ fn parseCPostfixExprInner(mt: *MacroTranslator, scope: *Scope, type_name: ?ZigNo
},
}
}
const tuple_node = try ZigTag.container_init_dot.create(mt.t.arena, try mt.t.arena.dupe(ast.Payload.ContainerInitDot.Initializer, init_vals.items));
node = try ZigTag.std_mem_zeroinit.create(mt.t.arena, .{ .lhs = node, .rhs = tuple_node });
const tuple_node = try ZigTag.container_init_dot.create(arena, try arena.dupe(ast.Payload.ContainerInitDot.Initializer, init_vals.items));
node = try ZigTag.std_mem_zeroinit.create(arena, .{ .lhs = node, .rhs = tuple_node });
continue;
}
var init_vals = std.array_list.Managed(ZigNode).init(mt.t.gpa);
defer init_vals.deinit();
var init_vals: std.ArrayList(ZigNode) = .empty;
defer init_vals.deinit(gpa);
while (true) {
const val = try mt.parseCCondExpr(scope);
try init_vals.append(val);
try init_vals.append(gpa, val);
const next_id = mt.peek();
switch (next_id) {
@ -1233,8 +1240,8 @@ fn parseCPostfixExprInner(mt: *MacroTranslator, scope: *Scope, type_name: ?ZigNo
},
}
}
const tuple_node = try ZigTag.tuple.create(mt.t.arena, try mt.t.arena.dupe(ZigNode, init_vals.items));
node = try ZigTag.std_mem_zeroinit.create(mt.t.arena, .{ .lhs = node, .rhs = tuple_node });
const tuple_node = try ZigTag.tuple.create(arena, try arena.dupe(ZigNode, init_vals.items));
node = try ZigTag.std_mem_zeroinit.create(arena, .{ .lhs = node, .rhs = tuple_node });
},
.plus_plus, .minus_minus => {
try mt.fail("TODO postfix inc/dec expr", .{});

View File

@ -91,11 +91,11 @@ const Pattern = struct {
fn init(pl: *Pattern, allocator: mem.Allocator, template: Template) Error!void {
const source = template[0];
const impl = template[1];
var tok_list = std.array_list.Managed(CToken).init(allocator);
defer tok_list.deinit();
var tok_list: std.ArrayList(CToken) = .empty;
defer tok_list.deinit(allocator);
pl.* = .{
.slicer = try tokenizeMacro(source, &tok_list),
.slicer = try tokenizeMacro(allocator, source, &tok_list),
.impl = impl,
};
}
@ -170,7 +170,7 @@ pub fn match(pl: PatternList, ms: MacroSlicer) Error!?Impl {
return null;
}
fn tokenizeMacro(source: []const u8, tok_list: *std.array_list.Managed(CToken)) Error!MacroSlicer {
fn tokenizeMacro(allocator: mem.Allocator, source: []const u8, tok_list: *std.ArrayList(CToken)) Error!MacroSlicer {
var param_count: u32 = 0;
var param_buf: [8][]const u8 = undefined;
@ -207,7 +207,7 @@ fn tokenizeMacro(source: []const u8, tok_list: *std.array_list.Managed(CToken))
const slice = source[tok.start..tok.end];
for (param_buf[0..param_count], 0..) |param, i| {
if (std.mem.eql(u8, param, slice)) {
try tok_list.append(.{
try tok_list.append(allocator, .{
.id = .macro_param,
.source = .unused,
.end = @intCast(i),
@ -224,12 +224,12 @@ fn tokenizeMacro(source: []const u8, tok_list: *std.array_list.Managed(CToken))
.nl, .eof => break,
else => {},
}
try tok_list.append(tok);
try tok_list.append(allocator, tok);
}
return .{
.source = source,
.tokens = try tok_list.toOwnedSlice(),
.tokens = try tok_list.toOwnedSlice(allocator),
.params = param_count,
};
}
@ -243,9 +243,9 @@ test "Macro matching" {
source: []const u8,
comptime expected_match: ?Impl,
) !void {
var tok_list = std.array_list.Managed(CToken).init(allocator);
defer tok_list.deinit();
const ms = try tokenizeMacro(source, &tok_list);
var tok_list: std.ArrayList(CToken) = .empty;
defer tok_list.deinit(allocator);
const ms = try tokenizeMacro(allocator, source, &tok_list);
defer allocator.free(ms.tokens);
const matched = try pattern_list.match(ms);

View File

@ -216,10 +216,10 @@ pub fn translate(options: Options) mem.Allocator.Error![]u8 {
try translator.global_scope.processContainerMemberFns();
var aw: std.Io.Writer.Allocating = .init(gpa);
defer aw.deinit();
var allocating: std.Io.Writer.Allocating = .init(gpa);
defer allocating.deinit();
aw.writer.writeAll(
allocating.writer.writeAll(
\\pub const __builtin = @import("std").zig.c_translation.builtins;
\\pub const __helpers = @import("std").zig.c_translation.helpers;
\\
@ -231,8 +231,8 @@ pub fn translate(options: Options) mem.Allocator.Error![]u8 {
gpa.free(zig_ast.source);
zig_ast.deinit(gpa);
}
zig_ast.render(gpa, &aw.writer, .{}) catch return error.OutOfMemory;
return aw.toOwnedSlice();
zig_ast.render(gpa, &allocating.writer, .{}) catch return error.OutOfMemory;
return allocating.toOwnedSlice();
}
fn prepopulateGlobalNameTable(t: *Translator) !void {
@ -489,11 +489,12 @@ fn transRecordDecl(t: *Translator, scope: *Scope, record_qt: QualType) Error!voi
break :init ZigTag.opaque_literal.init();
}
var fields = try std.array_list.Managed(ast.Payload.Container.Field).initCapacity(t.gpa, record_ty.fields.len);
defer fields.deinit();
var fields: std.ArrayList(ast.Payload.Container.Field) = .empty;
defer fields.deinit(t.gpa);
try fields.ensureUnusedCapacity(t.gpa, record_ty.fields.len);
var functions = std.array_list.Managed(ZigNode).init(t.gpa);
defer functions.deinit();
var functions: std.ArrayList(ZigNode) = .empty;
defer functions.deinit(t.gpa);
var unnamed_field_count: u32 = 0;
@ -558,7 +559,7 @@ fn transRecordDecl(t: *Translator, scope: *Scope, record_qt: QualType) Error!voi
field_name = try std.fmt.allocPrint(t.arena, "_{s}", .{field_name});
const member = try t.createFlexibleMemberFn(member_name, field_name);
try functions.append(member);
try functions.append(t.gpa, member);
break :field_type zero_array;
}
@ -600,7 +601,7 @@ fn transRecordDecl(t: *Translator, scope: *Scope, record_qt: QualType) Error!voi
const padding_bits = record_ty.layout.?.size_bits;
const alignment_bits = record_ty.layout.?.field_alignment_bits;
try fields.append(.{
try fields.append(t.gpa, .{
.name = "_padding",
.type = try ZigTag.type.create(t.arena, try std.fmt.allocPrint(t.arena, "u{d}", .{padding_bits})),
.alignment = @divExact(alignment_bits, 8),
@ -1789,8 +1790,8 @@ fn transSwitch(t: *Translator, scope: *Scope, switch_stmt: Node.SwitchStmt) Tran
defer cond_scope.deinit();
const switch_expr = try t.transExpr(&cond_scope.base, switch_stmt.cond, .used);
var cases = std.array_list.Managed(ZigNode).init(t.gpa);
defer cases.deinit();
var cases: std.ArrayList(ZigNode) = .empty;
defer cases.deinit(t.gpa);
var has_default = false;
const body_node = switch_stmt.body.get(t.tree);
@ -1803,21 +1804,21 @@ fn transSwitch(t: *Translator, scope: *Scope, switch_stmt: Node.SwitchStmt) Tran
for (body, 0..) |stmt, i| {
switch (stmt.get(t.tree)) {
.case_stmt => {
var items = std.array_list.Managed(ZigNode).init(t.gpa);
defer items.deinit();
var items: std.ArrayList(ZigNode) = .empty;
defer items.deinit(t.gpa);
const sub = try t.transCaseStmt(base_scope, stmt, &items);
const res = try t.transSwitchProngStmt(base_scope, sub, body[i..]);
if (items.items.len == 0) {
has_default = true;
const switch_else = try ZigTag.switch_else.create(t.arena, res);
try cases.append(switch_else);
try cases.append(t.gpa, switch_else);
} else {
const switch_prong = try ZigTag.switch_prong.create(t.arena, .{
.cases = try t.arena.dupe(ZigNode, items.items),
.cond = res,
});
try cases.append(switch_prong);
try cases.append(t.gpa, switch_prong);
}
},
.default_stmt => |default_stmt| {
@ -1833,7 +1834,7 @@ fn transSwitch(t: *Translator, scope: *Scope, switch_stmt: Node.SwitchStmt) Tran
const res = try t.transSwitchProngStmt(base_scope, sub, body[i..]);
const switch_else = try ZigTag.switch_else.create(t.arena, res);
try cases.append(switch_else);
try cases.append(t.gpa, switch_else);
},
else => {}, // collected in transSwitchProngStmt
}
@ -1841,7 +1842,7 @@ fn transSwitch(t: *Translator, scope: *Scope, switch_stmt: Node.SwitchStmt) Tran
if (!has_default) {
const else_prong = try ZigTag.switch_else.create(t.arena, ZigTag.empty_block.init());
try cases.append(else_prong);
try cases.append(t.gpa, else_prong);
}
const switch_node = try ZigTag.@"switch".create(t.arena, .{
@ -1861,7 +1862,7 @@ fn transCaseStmt(
t: *Translator,
scope: *Scope,
stmt: Node.Index,
items: *std.array_list.Managed(ZigNode),
items: *std.ArrayList(ZigNode),
) TransError!Node.Index {
var sub = stmt;
var seen_default = false;
@ -1886,7 +1887,7 @@ fn transCaseStmt(
break :blk try ZigTag.ellipsis3.create(t.arena, .{ .lhs = start_node, .rhs = end_node });
} else try t.transExpr(scope, case_stmt.start, .used);
try items.append(expr);
try items.append(t.gpa, expr);
sub = case_stmt.body;
},
else => return sub,
@ -3873,7 +3874,7 @@ fn createNumberNode(t: *Translator, num: anytype, num_kind: enum { int, float })
fn createCharLiteralNode(t: *Translator, narrow: bool, val: u32) TransError!ZigNode {
return ZigTag.char_literal.create(t.arena, if (narrow)
try std.fmt.allocPrint(t.arena, "'{f}'", .{std.zig.fmtChar(@intCast(val))})
try std.fmt.allocPrint(t.arena, "'{f}'", .{std.zig.fmtChar(@as(u8, @intCast(val)))})
else
try std.fmt.allocPrint(t.arena, "'\\u{{{x}}}'", .{val}));
}
@ -3986,8 +3987,8 @@ fn createFlexibleMemberFn(
// =================
fn transMacros(t: *Translator) !void {
var tok_list = std.array_list.Managed(CToken).init(t.gpa);
defer tok_list.deinit();
var tok_list: std.ArrayList(CToken) = .empty;
defer tok_list.deinit(t.gpa);
var pattern_list = try PatternList.init(t.gpa);
defer pattern_list.deinit(t.gpa);
@ -3999,7 +4000,7 @@ fn transMacros(t: *Translator) !void {
}
tok_list.items.len = 0;
try tok_list.ensureUnusedCapacity(macro.tokens.len);
try tok_list.ensureUnusedCapacity(t.gpa, macro.tokens.len);
for (macro.tokens) |tok| {
switch (tok.id) {
.invalid => continue,

View File

@ -798,9 +798,8 @@ pub const Payload = struct {
pub fn render(gpa: Allocator, nodes: []const Node) !std.zig.Ast {
var ctx: Context = .{
.gpa = gpa,
.buf = std.array_list.Managed(u8).init(gpa),
};
defer ctx.buf.deinit();
defer ctx.buf.deinit(gpa);
defer ctx.nodes.deinit(gpa);
defer ctx.extra_data.deinit(gpa);
defer ctx.tokens.deinit(gpa);
@ -813,7 +812,7 @@ pub fn render(gpa: Allocator, nodes: []const Node) !std.zig.Ast {
try ctx.tokens.ensureTotalCapacity(gpa, estimated_tokens_count);
// Estimate that each each token is 3 bytes long.
const estimated_buf_len = estimated_tokens_count * 3;
try ctx.buf.ensureTotalCapacity(estimated_buf_len);
try ctx.buf.ensureTotalCapacity(gpa, estimated_buf_len);
ctx.nodes.appendAssumeCapacity(.{
.tag = .root,
@ -822,12 +821,12 @@ pub fn render(gpa: Allocator, nodes: []const Node) !std.zig.Ast {
});
const root_members = blk: {
var result = std.array_list.Managed(NodeIndex).init(gpa);
defer result.deinit();
var result: std.ArrayList(NodeIndex) = .empty;
defer result.deinit(gpa);
for (nodes) |node| {
const res = (try renderNodeOpt(&ctx, node)) orelse continue;
try result.append(res);
try result.append(gpa, res);
}
break :blk try ctx.listToSpan(result.items);
};
@ -843,7 +842,7 @@ pub fn render(gpa: Allocator, nodes: []const Node) !std.zig.Ast {
});
return .{
.source = try ctx.buf.toOwnedSliceSentinel(0),
.source = try ctx.buf.toOwnedSliceSentinel(gpa, 0),
.tokens = ctx.tokens.toOwnedSlice(),
.nodes = ctx.nodes.toOwnedSlice(),
.extra_data = try ctx.extra_data.toOwnedSlice(gpa),
@ -859,14 +858,14 @@ const TokenTag = std.zig.Token.Tag;
const Context = struct {
gpa: Allocator,
buf: std.array_list.Managed(u8),
nodes: std.zig.Ast.NodeList = .{},
buf: std.ArrayList(u8) = .empty,
nodes: std.zig.Ast.NodeList = .empty,
extra_data: std.ArrayListUnmanaged(u32) = .empty,
tokens: std.zig.Ast.TokenList = .{},
tokens: std.zig.Ast.TokenList = .empty,
fn addTokenFmt(c: *Context, tag: TokenTag, comptime format: []const u8, args: anytype) Allocator.Error!TokenIndex {
const start_index = c.buf.items.len;
try c.buf.print(format ++ " ", args);
try c.buf.print(c.gpa, format ++ " ", args);
try c.tokens.append(c.gpa, .{
.tag = tag,
@ -925,8 +924,8 @@ fn renderNodeOpt(c: *Context, node: Node) Allocator.Error!?NodeIndex {
switch (node.tag()) {
.warning => {
const payload = node.castTag(.warning).?.data;
try c.buf.appendSlice(payload);
try c.buf.append('\n');
try c.buf.appendSlice(c.gpa, payload);
try c.buf.append(c.gpa, '\n');
return null;
},
.discard => {
@ -1687,12 +1686,12 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
}
const l_brace = try c.addToken(.l_brace, "{");
var stmts = std.array_list.Managed(NodeIndex).init(c.gpa);
defer stmts.deinit();
var stmts: std.ArrayList(NodeIndex) = .empty;
defer stmts.deinit(c.gpa);
for (payload.stmts) |stmt| {
const res = (try renderNodeOpt(c, stmt)) orelse continue;
try addSemicolonIfNeeded(c, stmt);
try stmts.append(res);
try stmts.append(c.gpa, res);
}
const span = try c.listToSpan(stmts.items);
_ = try c.addToken(.r_brace, "}");
@ -2830,8 +2829,8 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex {
const fn_token = try c.addToken(.keyword_fn, "fn");
if (payload.name) |some| _ = try c.addIdentifier(some);
const params = try renderParams(c, payload.params, payload.is_var_args);
defer params.deinit();
var params = try renderParams(c, payload.params, payload.is_var_args);
defer params.deinit(c.gpa);
var span: NodeSubRange = undefined;
if (params.items.len > 1) span = try c.listToSpan(params.items);
@ -2998,8 +2997,8 @@ fn renderMacroFunc(c: *Context, node: Node) !NodeIndex {
const fn_token = try c.addToken(.keyword_fn, "fn");
_ = try c.addIdentifier(payload.name);
const params = try renderParams(c, payload.params, false);
defer params.deinit();
var params = try renderParams(c, payload.params, false);
defer params.deinit(c.gpa);
var span: NodeSubRange = undefined;
if (params.items.len > 1) span = try c.listToSpan(params.items);
@ -3035,10 +3034,11 @@ fn renderMacroFunc(c: *Context, node: Node) !NodeIndex {
});
}
fn renderParams(c: *Context, params: []Payload.Param, is_var_args: bool) !std.array_list.Managed(NodeIndex) {
fn renderParams(c: *Context, params: []Payload.Param, is_var_args: bool) !std.ArrayList(NodeIndex) {
_ = try c.addToken(.l_paren, "(");
var rendered = try std.array_list.Managed(NodeIndex).initCapacity(c.gpa, @max(params.len, 1));
errdefer rendered.deinit();
var rendered: std.ArrayList(NodeIndex) = .empty;
errdefer rendered.deinit(c.gpa);
try rendered.ensureUnusedCapacity(c.gpa, @max(params.len, 1));
for (params, 0..) |param, i| {
if (i != 0) _ = try c.addToken(.comma, ",");

View File

@ -110,7 +110,7 @@ fn translate(d: *aro.Driver, tc: *aro.Toolchain, args: [][:0]u8) !void {
defer macro_buf.deinit(gpa);
var discard_buf: [256]u8 = undefined;
var discarding: std.io.Writer.Discarding = .init(&discard_buf);
var discarding: std.Io.Writer.Discarding = .init(&discard_buf);
assert(!try d.parseArgs(&discarding.writer, &macro_buf, aro_args));
if (macro_buf.items.len > std.math.maxInt(u32)) {
return d.fatal("user provided macro source exceeded max size", .{});
@ -206,9 +206,11 @@ fn translate(d: *aro.Driver, tc: *aro.Toolchain, args: [][:0]u8) !void {
out_file_path = path;
}
var out_writer = out_file.writer(&.{});
out_writer.interface.writeAll(rendered_zig) catch
return d.fatal("failed to write result to '{s}': {s}", .{ out_file_path, aro.Driver.errorDescription(out_writer.err.?) });
var out_writer = out_file.writer(&out_buf);
out_writer.interface.writeAll(rendered_zig) catch {};
out_writer.interface.flush() catch {};
if (out_writer.err) |write_err|
return d.fatal("failed to write result to '{s}': {s}", .{ out_file_path, aro.Driver.errorDescription(write_err) });
if (fast_exit) process.exit(0);
}