Merge pull request #19122 from ziglang/lazy-aro

make aro-based translate-c lazily built from source
This commit is contained in:
Andrew Kelley 2024-02-28 18:21:30 -08:00 committed by GitHub
commit f5aad47287
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
68 changed files with 16578 additions and 22667 deletions

View File

@ -643,11 +643,8 @@ set(ZIG_STAGE2_SOURCES
"${CMAKE_SOURCE_DIR}/src/target.zig" "${CMAKE_SOURCE_DIR}/src/target.zig"
"${CMAKE_SOURCE_DIR}/src/tracy.zig" "${CMAKE_SOURCE_DIR}/src/tracy.zig"
"${CMAKE_SOURCE_DIR}/src/translate_c.zig" "${CMAKE_SOURCE_DIR}/src/translate_c.zig"
"${CMAKE_SOURCE_DIR}/src/translate_c/ast.zig"
"${CMAKE_SOURCE_DIR}/src/type.zig" "${CMAKE_SOURCE_DIR}/src/type.zig"
"${CMAKE_SOURCE_DIR}/src/wasi_libc.zig" "${CMAKE_SOURCE_DIR}/src/wasi_libc.zig"
"${CMAKE_SOURCE_DIR}/src/stubs/aro_builtins.zig"
"${CMAKE_SOURCE_DIR}/src/stubs/aro_names.zig"
) )
if(MSVC) if(MSVC)
@ -822,18 +819,7 @@ set(BUILD_ZIG2_ARGS
--dep "aro" --dep "aro"
--mod "root" "src/main.zig" --mod "root" "src/main.zig"
--mod "build_options" "${ZIG_CONFIG_ZIG_OUT}" --mod "build_options" "${ZIG_CONFIG_ZIG_OUT}"
--mod "aro_options" "src/stubs/aro_options.zig" --mod "aro" "lib/compiler/aro/aro.zig"
--mod "Builtins/Builtin.def" "src/stubs/aro_builtins.zig"
--mod "Attribute/names.def" "src/stubs/aro_names.zig"
--mod "Diagnostics/messages.def" "src/stubs/aro_messages.zig"
--dep "build_options=aro_options"
--mod "aro_backend" "deps/aro/backend.zig"
--dep "Builtins/Builtin.def"
--dep "Attribute/names.def"
--dep "Diagnostics/messages.def"
--dep "build_options=aro_options"
--dep "backend=aro_backend"
--mod "aro" "deps/aro/aro.zig"
) )
add_custom_command( add_custom_command(

View File

@ -156,22 +156,8 @@ int main(int argc, char **argv) {
"--dep", "build_options", "--dep", "build_options",
"--dep", "aro", "--dep", "aro",
"--mod", "root", "src/main.zig", "--mod", "root", "src/main.zig",
"--mod", "build_options", "config.zig", "--mod", "build_options", "config.zig",
"--mod", "aro_options", "src/stubs/aro_options.zig", "--mod", "aro", "lib/compiler/aro/aro.zig",
"--mod", "Builtins/Builtin.def", "src/stubs/aro_builtins.zig",
"--mod", "Attribute/names.def", "src/stubs/aro_names.zig",
"--mod", "Diagnostics/messages.def", "src/stubs/aro_messages.zig",
"--dep", "build_options=aro_options",
"--mod", "aro_backend", "deps/aro/backend.zig",
"--dep", "Builtins/Builtin.def",
"--dep", "Attribute/names.def",
"--dep", "Diagnostics/messages.def",
"--dep", "build_options=aro_options",
"--dep", "backend=aro_backend",
"--mod", "aro", "deps/aro/aro.zig",
NULL, NULL,
}; };
print_and_run(child_argv); print_and_run(child_argv);

View File

@ -8,7 +8,6 @@ const io = std.io;
const fs = std.fs; const fs = std.fs;
const InstallDirectoryOptions = std.Build.InstallDirectoryOptions; const InstallDirectoryOptions = std.Build.InstallDirectoryOptions;
const assert = std.debug.assert; const assert = std.debug.assert;
const GenerateDef = @import("deps/aro/build/GenerateDef.zig");
const zig_version = std.SemanticVersion{ .major = 0, .minor = 12, .patch = 0 }; const zig_version = std.SemanticVersion{ .major = 0, .minor = 12, .patch = 0 };
const stack_size = 32 * 1024 * 1024; const stack_size = 32 * 1024 * 1024;
@ -636,34 +635,22 @@ fn addCompilerStep(b: *std.Build, options: AddCompilerStepOptions) *std.Build.St
}); });
exe.stack_size = stack_size; exe.stack_size = stack_size;
const aro_options = b.addOptions();
aro_options.addOption([]const u8, "version_str", "aro-zig");
const aro_options_module = aro_options.createModule();
const aro_backend = b.createModule(.{
.root_source_file = .{ .path = "deps/aro/backend.zig" },
.imports = &.{.{
.name = "build_options",
.module = aro_options_module,
}},
});
const aro_module = b.createModule(.{ const aro_module = b.createModule(.{
.root_source_file = .{ .path = "deps/aro/aro.zig" }, .root_source_file = .{ .path = "lib/compiler/aro/aro.zig" },
});
const aro_translate_c_module = b.createModule(.{
.root_source_file = .{ .path = "lib/compiler/aro_translate_c.zig" },
.imports = &.{ .imports = &.{
.{ .{
.name = "build_options", .name = "aro",
.module = aro_options_module, .module = aro_module,
}, },
.{
.name = "backend",
.module = aro_backend,
},
GenerateDef.create(b, .{ .name = "Builtins/Builtin.def", .src_prefix = "deps/aro/aro" }),
GenerateDef.create(b, .{ .name = "Attribute/names.def", .src_prefix = "deps/aro/aro" }),
GenerateDef.create(b, .{ .name = "Diagnostics/messages.def", .src_prefix = "deps/aro/aro", .kind = .named }),
}, },
}); });
exe.root_module.addImport("aro", aro_module); exe.root_module.addImport("aro", aro_module);
exe.root_module.addImport("aro_translate_c", aro_translate_c_module);
return exe; return exe;
} }

View File

@ -1,431 +0,0 @@
# multiple
deprecated
.tag = .deprecated
.c23 = true
.gnu = true
.declspec = true
fallthrough
.tag = .fallthrough
.c23 = true
.gnu = true
noreturn
.tag = .@"noreturn"
.c23 = true
.gnu = true
.declspec = true
no_sanitize_address
.tag = .no_sanitize_address
.gnu = true
.declspec = true
noinline
.tag = .@"noinline"
.gnu = true
.declspec = true
# c23 only
nodiscard
.tag = .nodiscard
.c23 = true
reproducible
.tag = .reproducible
.c23 = true
unsequenced
.tag = .unsequenced
.c23 = true
maybe_unused
.tag = .unused
.c23 = true
# gnu only
access
.tag = .access
.gnu = true
alias
.tag = .alias
.gnu = true
aligned
.tag = .aligned
.gnu = true
alloc_align
.tag = .alloc_align
.gnu = true
alloc_size
.tag = .alloc_size
.gnu = true
always_inline
.tag = .always_inline
.gnu = true
artificial
.tag = .artificial
.gnu = true
assume_aligned
.tag = .assume_aligned
.gnu = true
cleanup
.tag = .cleanup
.gnu = true
cold
.tag = .cold
.gnu = true
common
.tag = .common
.gnu = true
const
.tag = .@"const"
.gnu = true
constructor
.tag = .constructor
.gnu = true
copy
.tag = .copy
.gnu = true
designated_init
.tag = .designated_init
.gnu = true
destructor
.tag = .destructor
.gnu = true
error
.tag = .@"error"
.gnu = true
externally_visible
.tag = .externally_visible
.gnu = true
flatten
.tag = .flatten
.gnu = true
format
.tag = .format
.gnu = true
format_arg
.tag = .format_arg
.gnu = true
gnu_inline
.tag = .gnu_inline
.gnu = true
hot
.tag = .hot
.gnu = true
ifunc
.tag = .ifunc
.gnu = true
interrupt
.tag = .interrupt
.gnu = true
interrupt_handler
.tag = .interrupt_handler
.gnu = true
leaf
.tag = .leaf
.gnu = true
malloc
.tag = .malloc
.gnu = true
may_alias
.tag = .may_alias
.gnu = true
mode
.tag = .mode
.gnu = true
no_address_safety_analysis
.tag = .no_address_safety_analysis
.gnu = true
no_icf
.tag = .no_icf
.gnu = true
no_instrument_function
.tag = .no_instrument_function
.gnu = true
no_profile_instrument_function
.tag = .no_profile_instrument_function
.gnu = true
no_reorder
.tag = .no_reorder
.gnu = true
no_sanitize
.tag = .no_sanitize
.gnu = true
no_sanitize_coverage
.tag = .no_sanitize_coverage
.gnu = true
no_sanitize_thread
.tag = .no_sanitize_thread
.gnu = true
no_sanitize_undefined
.tag = .no_sanitize_undefined
.gnu = true
no_split_stack
.tag = .no_split_stack
.gnu = true
no_stack_limit
.tag = .no_stack_limit
.gnu = true
no_stack_protector
.tag = .no_stack_protector
.gnu = true
noclone
.tag = .noclone
.gnu = true
nocommon
.tag = .nocommon
.gnu = true
noinit
.tag = .noinit
.gnu = true
noipa
.tag = .noipa
.gnu = true
# nonnull
# .tag = .nonnull
# .gnu = true
nonstring
.tag = .nonstring
.gnu = true
noplt
.tag = .noplt
.gnu = true
# optimize
# .tag = .optimize
# .gnu = true
packed
.tag = .@"packed"
.gnu = true
patchable_function_entry
.tag = .patchable_function_entry
.gnu = true
persistent
.tag = .persistent
.gnu = true
pure
.tag = .pure
.gnu = true
retain
.tag = .retain
.gnu = true
returns_nonnull
.tag = .returns_nonnull
.gnu = true
returns_twice
.tag = .returns_twice
.gnu = true
scalar_storage_order
.tag = .scalar_storage_order
.gnu = true
section
.tag = .section
.gnu = true
sentinel
.tag = .sentinel
.gnu = true
simd
.tag = .simd
.gnu = true
stack_protect
.tag = .stack_protect
.gnu = true
symver
.tag = .symver
.gnu = true
target
.tag = .target
.gnu = true
target_clones
.tag = .target_clones
.gnu = true
tls_model
.tag = .tls_model
.gnu = true
transparent_union
.tag = .transparent_union
.gnu = true
unavailable
.tag = .unavailable
.gnu = true
uninitialized
.tag = .uninitialized
.gnu = true
unused
.tag = .unused
.gnu = true
used
.tag = .used
.gnu = true
vector_size
.tag = .vector_size
.gnu = true
visibility
.tag = .visibility
.gnu = true
warn_if_not_aligned
.tag = .warn_if_not_aligned
.gnu = true
warn_unused_result
.tag = .warn_unused_result
.gnu = true
warning
.tag = .warning
.gnu = true
weak
.tag = .weak
.gnu = true
weakref
.tag = .weakref
.gnu = true
zero_call_used_regs
.tag = .zero_call_used_regs
.gnu = true
# declspec only
align
.tag = .aligned
.declspec = true
allocate
.tag = .allocate
.declspec = true
allocator
.tag = .allocator
.declspec = true
appdomain
.tag = .appdomain
.declspec = true
code_seg
.tag = .code_seg
.declspec = true
dllexport
.tag = .dllexport
.declspec = true
dllimport
.tag = .dllimport
.declspec = true
jitintrinsic
.tag = .jitintrinsic
.declspec = true
naked
.tag = .naked
.declspec = true
noalias
.tag = .@"noalias"
.declspec = true
process
.tag = .process
.declspec = true
restrict
.tag = .restrict
.declspec = true
safebuffers
.tag = .safebuffers
.declspec = true
selectany
.tag = .selectany
.declspec = true
spectre
.tag = .spectre
.declspec = true
thread
.tag = .thread
.declspec = true
uuid
.tag = .uuid
.declspec = true

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,683 +0,0 @@
const std = @import("std");
const Step = std.Build.Step;
const Allocator = std.mem.Allocator;
const GeneratedFile = std.Build.GeneratedFile;
const GenerateDef = @This();
step: Step,
path: []const u8,
name: []const u8,
kind: Options.Kind,
generated_file: GeneratedFile,
pub const base_id: Step.Id = .custom;
pub const Options = struct {
name: []const u8,
src_prefix: []const u8 = "src/aro",
kind: Kind = .dafsa,
pub const Kind = enum { dafsa, named };
};
pub fn create(owner: *std.Build, options: Options) std.Build.Module.Import {
const self = owner.allocator.create(GenerateDef) catch @panic("OOM");
const path = owner.pathJoin(&.{ options.src_prefix, options.name });
const name = owner.fmt("GenerateDef {s}", .{options.name});
self.* = .{
.step = Step.init(.{
.id = base_id,
.name = name,
.owner = owner,
.makeFn = make,
}),
.path = path,
.name = options.name,
.kind = options.kind,
.generated_file = .{ .step = &self.step },
};
const module = self.step.owner.createModule(.{
.root_source_file = .{ .generated = &self.generated_file },
});
return .{
.module = module,
.name = self.name,
};
}
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
_ = prog_node;
const b = step.owner;
const self = @fieldParentPtr(GenerateDef, "step", step);
const arena = b.allocator;
var man = b.graph.cache.obtain();
defer man.deinit();
// Random bytes to make GenerateDef unique. Refresh this with new
// random bytes when GenerateDef implementation is modified in a
// non-backwards-compatible way.
man.hash.add(@as(u32, 0xDCC14144));
const contents = try b.build_root.handle.readFileAlloc(arena, self.path, std.math.maxInt(u32));
man.hash.addBytes(contents);
const out_name = b.fmt("{s}.zig", .{std.fs.path.stem(self.path)});
if (try step.cacheHit(&man)) {
const digest = man.final();
self.generated_file.path = try b.cache_root.join(arena, &.{
"o", &digest, out_name,
});
return;
}
const digest = man.final();
const sub_path = try std.fs.path.join(arena, &.{ "o", &digest, out_name });
const sub_path_dirname = std.fs.path.dirname(sub_path).?;
b.cache_root.handle.makePath(sub_path_dirname) catch |err| {
return step.fail("unable to make path '{}{s}': {s}", .{
b.cache_root, sub_path_dirname, @errorName(err),
});
};
const output = try self.generate(contents);
b.cache_root.handle.writeFile(sub_path, output) catch |err| {
return step.fail("unable to write file '{}{s}': {s}", .{
b.cache_root, sub_path, @errorName(err),
});
};
self.generated_file.path = try b.cache_root.join(arena, &.{sub_path});
try man.writeManifest();
}
const Value = struct {
name: []const u8,
properties: []const []const u8,
};
fn generate(self: *GenerateDef, input: []const u8) ![]const u8 {
const arena = self.step.owner.allocator;
var values = std.StringArrayHashMap([]const []const u8).init(arena);
defer values.deinit();
var properties = std.ArrayList([]const u8).init(arena);
defer properties.deinit();
var headers = std.ArrayList([]const u8).init(arena);
defer headers.deinit();
var value_name: ?[]const u8 = null;
var it = std.mem.tokenizeAny(u8, input, "\r\n");
while (it.next()) |line_untrimmed| {
const line = std.mem.trim(u8, line_untrimmed, " \t");
if (line.len == 0 or line[0] == '#') continue;
if (std.mem.startsWith(u8, line, "const ") or std.mem.startsWith(u8, line, "pub const ")) {
try headers.append(line);
continue;
}
if (line[0] == '.') {
if (value_name == null) {
return self.step.fail("property not attached to a value:\n\"{s}\"", .{line});
}
try properties.append(line);
continue;
}
if (value_name) |name| {
const old = try values.fetchPut(name, try properties.toOwnedSlice());
if (old != null) return self.step.fail("duplicate value \"{s}\"", .{name});
}
value_name = line;
}
if (value_name) |name| {
const old = try values.fetchPut(name, try properties.toOwnedSlice());
if (old != null) return self.step.fail("duplicate value \"{s}\"", .{name});
}
{
const sorted_list = try arena.dupe([]const u8, values.keys());
defer arena.free(sorted_list);
std.mem.sort([]const u8, sorted_list, {}, struct {
pub fn lessThan(_: void, a: []const u8, b: []const u8) bool {
return std.mem.lessThan(u8, a, b);
}
}.lessThan);
var longest_name: usize = 0;
var shortest_name: usize = std.math.maxInt(usize);
var builder = try DafsaBuilder.init(arena);
defer builder.deinit();
for (sorted_list) |name| {
try builder.insert(name);
longest_name = @max(name.len, longest_name);
shortest_name = @min(name.len, shortest_name);
}
try builder.finish();
builder.calcNumbers();
// As a sanity check, confirm that the minimal perfect hashing doesn't
// have any collisions
{
var index_set = std.AutoHashMap(usize, void).init(arena);
defer index_set.deinit();
for (values.keys()) |name| {
const index = builder.getUniqueIndex(name).?;
const result = try index_set.getOrPut(index);
if (result.found_existing) {
return self.step.fail("clobbered {}, name={s}\n", .{ index, name });
}
}
}
var out_buf = std.ArrayList(u8).init(arena);
defer out_buf.deinit();
const writer = out_buf.writer();
try writer.print(
\\//! Autogenerated by GenerateDef from {s}, do not edit
\\
\\const std = @import("std");
\\
\\pub fn with(comptime Properties: type) type {{
\\return struct {{
\\
, .{self.path});
for (headers.items) |line| {
try writer.print("{s}\n", .{line});
}
if (self.kind == .named) {
try writer.writeAll("pub const Tag = enum {\n");
for (values.keys()) |property| {
try writer.print(" {s},\n", .{std.zig.fmtId(property)});
}
try writer.writeAll(
\\
\\ pub fn property(tag: Tag) Properties {
\\ return named_data[@intFromEnum(tag)];
\\ }
\\
\\ const named_data = [_]Properties{
\\
);
for (values.values()) |val_props| {
try writer.writeAll(" .{");
for (val_props, 0..) |val_prop, j| {
if (j != 0) try writer.writeByte(',');
try writer.writeByte(' ');
try writer.writeAll(val_prop);
}
try writer.writeAll(" },\n");
}
try writer.writeAll(
\\ };
\\};
\\};
\\}
\\
);
return out_buf.toOwnedSlice();
}
var values_array = try arena.alloc(Value, values.count());
defer arena.free(values_array);
for (values.keys(), values.values()) |name, props| {
const unique_index = builder.getUniqueIndex(name).?;
const data_index = unique_index - 1;
values_array[data_index] = .{ .name = name, .properties = props };
}
try writer.writeAll(
\\
\\tag: Tag,
\\properties: Properties,
\\
\\/// Integer starting at 0 derived from the unique index,
\\/// corresponds with the data array index.
\\pub const Tag = enum(u16) { _ };
\\
\\const Self = @This();
\\
\\pub fn fromName(name: []const u8) ?@This() {
\\ const data_index = tagFromName(name) orelse return null;
\\ return data[@intFromEnum(data_index)];
\\}
\\
\\pub fn tagFromName(name: []const u8) ?Tag {
\\ const unique_index = uniqueIndex(name) orelse return null;
\\ return @enumFromInt(unique_index - 1);
\\}
\\
\\pub fn fromTag(tag: Tag) @This() {
\\ return data[@intFromEnum(tag)];
\\}
\\
\\pub fn nameFromTagIntoBuf(tag: Tag, name_buf: []u8) []u8 {
\\ std.debug.assert(name_buf.len >= longest_name);
\\ const unique_index = @intFromEnum(tag) + 1;
\\ return nameFromUniqueIndex(unique_index, name_buf);
\\}
\\
\\pub fn nameFromTag(tag: Tag) NameBuf {
\\ var name_buf: NameBuf = undefined;
\\ const unique_index = @intFromEnum(tag) + 1;
\\ const name = nameFromUniqueIndex(unique_index, &name_buf.buf);
\\ name_buf.len = @intCast(name.len);
\\ return name_buf;
\\}
\\
\\pub const NameBuf = struct {
\\ buf: [longest_name]u8 = undefined,
\\ len: std.math.IntFittingRange(0, longest_name),
\\
\\ pub fn span(self: *const NameBuf) []const u8 {
\\ return self.buf[0..self.len];
\\ }
\\};
\\
\\pub fn exists(name: []const u8) bool {
\\ if (name.len < shortest_name or name.len > longest_name) return false;
\\
\\ var index: u16 = 0;
\\ for (name) |c| {
\\ index = findInList(dafsa[index].child_index, c) orelse return false;
\\ }
\\ return dafsa[index].end_of_word;
\\}
\\
\\
);
try writer.print("pub const shortest_name = {};\n", .{shortest_name});
try writer.print("pub const longest_name = {};\n\n", .{longest_name});
try writer.writeAll(
\\/// Search siblings of `first_child_index` for the `char`
\\/// If found, returns the index of the node within the `dafsa` array.
\\/// Otherwise, returns `null`.
\\pub fn findInList(first_child_index: u16, char: u8) ?u16 {
\\ var index = first_child_index;
\\ while (true) {
\\ if (dafsa[index].char == char) return index;
\\ if (dafsa[index].end_of_list) return null;
\\ index += 1;
\\ }
\\ unreachable;
\\}
\\
\\/// Returns a unique (minimal perfect hash) index (starting at 1) for the `name`,
\\/// or null if the name was not found.
\\pub fn uniqueIndex(name: []const u8) ?u16 {
\\ if (name.len < shortest_name or name.len > longest_name) return null;
\\
\\ var index: u16 = 0;
\\ var node_index: u16 = 0;
\\
\\ for (name) |c| {
\\ const child_index = findInList(dafsa[node_index].child_index, c) orelse return null;
\\ var sibling_index = dafsa[node_index].child_index;
\\ while (true) {
\\ const sibling_c = dafsa[sibling_index].char;
\\ std.debug.assert(sibling_c != 0);
\\ if (sibling_c < c) {
\\ index += dafsa[sibling_index].number;
\\ }
\\ if (dafsa[sibling_index].end_of_list) break;
\\ sibling_index += 1;
\\ }
\\ node_index = child_index;
\\ if (dafsa[node_index].end_of_word) index += 1;
\\ }
\\
\\ if (!dafsa[node_index].end_of_word) return null;
\\
\\ return index;
\\}
\\
\\/// Returns a slice of `buf` with the name associated with the given `index`.
\\/// This function should only be called with an `index` that
\\/// is already known to exist within the `dafsa`, e.g. an index
\\/// returned from `uniqueIndex`.
\\pub fn nameFromUniqueIndex(index: u16, buf: []u8) []u8 {
\\ std.debug.assert(index >= 1 and index <= data.len);
\\
\\ var node_index: u16 = 0;
\\ var count: u16 = index;
\\ var fbs = std.io.fixedBufferStream(buf);
\\ const w = fbs.writer();
\\
\\ while (true) {
\\ var sibling_index = dafsa[node_index].child_index;
\\ while (true) {
\\ if (dafsa[sibling_index].number > 0 and dafsa[sibling_index].number < count) {
\\ count -= dafsa[sibling_index].number;
\\ } else {
\\ w.writeByte(dafsa[sibling_index].char) catch unreachable;
\\ node_index = sibling_index;
\\ if (dafsa[node_index].end_of_word) {
\\ count -= 1;
\\ }
\\ break;
\\ }
\\
\\ if (dafsa[sibling_index].end_of_list) break;
\\ sibling_index += 1;
\\ }
\\ if (count == 0) break;
\\ }
\\
\\ return fbs.getWritten();
\\}
\\
\\
);
try writer.writeAll(
\\/// We're 1 bit shy of being able to fit this in a u32:
\\/// - char only contains 0-9, a-z, A-Z, and _, so it could use a enum(u6) with a way to convert <-> u8
\\/// (note: this would have a performance cost that may make the u32 not worth it)
\\/// - number has a max value of > 2047 and < 4095 (the first _ node has the largest number),
\\/// so it could fit into a u12
\\/// - child_index currently has a max of > 4095 and < 8191, so it could fit into a u13
\\///
\\/// with the end_of_word/end_of_list 2 bools, that makes 33 bits total
\\const Node = packed struct(u64) {
\\ char: u8,
\\ /// Nodes are numbered with "an integer which gives the number of words that
\\ /// would be accepted by the automaton starting from that state." This numbering
\\ /// allows calculating "a one-to-one correspondence between the integers 1 to L
\\ /// (L is the number of words accepted by the automaton) and the words themselves."
\\ ///
\\ /// Essentially, this allows us to have a minimal perfect hashing scheme such that
\\ /// it's possible to store & lookup the properties of each builtin using a separate array.
\\ number: u16,
\\ /// If true, this node is the end of a valid builtin.
\\ /// Note: This does not necessarily mean that this node does not have child nodes.
\\ end_of_word: bool,
\\ /// If true, this node is the end of a sibling list.
\\ /// If false, then (index + 1) will contain the next sibling.
\\ end_of_list: bool,
\\ /// Padding bits to get to u64, unsure if there's some way to use these to improve something.
\\ _extra: u22 = 0,
\\ /// Index of the first child of this node.
\\ child_index: u16,
\\};
\\
\\
);
try builder.writeDafsa(writer);
try writeData(writer, values_array);
try writer.writeAll(
\\};
\\}
\\
);
return out_buf.toOwnedSlice();
}
}
fn writeData(writer: anytype, values: []const Value) !void {
try writer.writeAll("pub const data = blk: {\n");
try writer.print(" @setEvalBranchQuota({});\n", .{values.len});
try writer.writeAll(" break :blk [_]@This(){\n");
for (values, 0..) |value, i| {
try writer.print(" // {s}\n", .{value.name});
try writer.print(" .{{ .tag = @enumFromInt({}), .properties = .{{", .{i});
for (value.properties, 0..) |property, j| {
if (j != 0) try writer.writeByte(',');
try writer.writeByte(' ');
try writer.writeAll(property);
}
if (value.properties.len != 0) try writer.writeByte(' ');
try writer.writeAll("} },\n");
}
try writer.writeAll(" };\n");
try writer.writeAll("};\n");
}
const DafsaBuilder = struct {
root: *Node,
arena: std.heap.ArenaAllocator.State,
allocator: Allocator,
unchecked_nodes: std.ArrayListUnmanaged(UncheckedNode),
minimized_nodes: std.HashMapUnmanaged(*Node, *Node, Node.DuplicateContext, std.hash_map.default_max_load_percentage),
previous_word_buf: [128]u8 = undefined,
previous_word: []u8 = &[_]u8{},
const UncheckedNode = struct {
parent: *Node,
char: u8,
child: *Node,
};
pub fn init(allocator: Allocator) !DafsaBuilder {
var arena = std.heap.ArenaAllocator.init(allocator);
errdefer arena.deinit();
const root = try arena.allocator().create(Node);
root.* = .{};
return DafsaBuilder{
.root = root,
.allocator = allocator,
.arena = arena.state,
.unchecked_nodes = .{},
.minimized_nodes = .{},
};
}
pub fn deinit(self: *DafsaBuilder) void {
self.arena.promote(self.allocator).deinit();
self.unchecked_nodes.deinit(self.allocator);
self.minimized_nodes.deinit(self.allocator);
self.* = undefined;
}
const Node = struct {
children: [256]?*Node = [_]?*Node{null} ** 256,
is_terminal: bool = false,
number: usize = 0,
const DuplicateContext = struct {
pub fn hash(ctx: @This(), key: *Node) u64 {
_ = ctx;
var hasher = std.hash.Wyhash.init(0);
std.hash.autoHash(&hasher, key.children);
std.hash.autoHash(&hasher, key.is_terminal);
return hasher.final();
}
pub fn eql(ctx: @This(), a: *Node, b: *Node) bool {
_ = ctx;
return a.is_terminal == b.is_terminal and std.mem.eql(?*Node, &a.children, &b.children);
}
};
pub fn calcNumbers(self: *Node) void {
self.number = @intFromBool(self.is_terminal);
for (self.children) |maybe_child| {
const child = maybe_child orelse continue;
// A node's number is the sum of the
// numbers of its immediate child nodes.
child.calcNumbers();
self.number += child.number;
}
}
pub fn numDirectChildren(self: *const Node) u8 {
var num: u8 = 0;
for (self.children) |child| {
if (child != null) num += 1;
}
return num;
}
};
pub fn insert(self: *DafsaBuilder, str: []const u8) !void {
if (std.mem.order(u8, str, self.previous_word) == .lt) {
@panic("insertion order must be sorted");
}
var common_prefix_len: usize = 0;
for (0..@min(str.len, self.previous_word.len)) |i| {
if (str[i] != self.previous_word[i]) break;
common_prefix_len += 1;
}
try self.minimize(common_prefix_len);
var node = if (self.unchecked_nodes.items.len == 0)
self.root
else
self.unchecked_nodes.getLast().child;
for (str[common_prefix_len..]) |c| {
std.debug.assert(node.children[c] == null);
var arena = self.arena.promote(self.allocator);
const child = try arena.allocator().create(Node);
self.arena = arena.state;
child.* = .{};
node.children[c] = child;
try self.unchecked_nodes.append(self.allocator, .{
.parent = node,
.char = c,
.child = child,
});
node = node.children[c].?;
}
node.is_terminal = true;
self.previous_word = self.previous_word_buf[0..str.len];
@memcpy(self.previous_word, str);
}
pub fn minimize(self: *DafsaBuilder, down_to: usize) !void {
if (self.unchecked_nodes.items.len == 0) return;
while (self.unchecked_nodes.items.len > down_to) {
const unchecked_node = self.unchecked_nodes.pop();
if (self.minimized_nodes.getPtr(unchecked_node.child)) |child| {
unchecked_node.parent.children[unchecked_node.char] = child.*;
} else {
try self.minimized_nodes.put(self.allocator, unchecked_node.child, unchecked_node.child);
}
}
}
pub fn finish(self: *DafsaBuilder) !void {
try self.minimize(0);
}
fn nodeCount(self: *const DafsaBuilder) usize {
return self.minimized_nodes.count();
}
fn edgeCount(self: *const DafsaBuilder) usize {
var count: usize = 0;
var it = self.minimized_nodes.iterator();
while (it.next()) |entry| {
for (entry.key_ptr.*.children) |child| {
if (child != null) count += 1;
}
}
return count;
}
fn contains(self: *const DafsaBuilder, str: []const u8) bool {
var node = self.root;
for (str) |c| {
node = node.children[c] orelse return false;
}
return node.is_terminal;
}
fn calcNumbers(self: *const DafsaBuilder) void {
self.root.calcNumbers();
}
fn getUniqueIndex(self: *const DafsaBuilder, str: []const u8) ?usize {
var index: usize = 0;
var node = self.root;
for (str) |c| {
const child = node.children[c] orelse return null;
for (node.children, 0..) |sibling, sibling_c| {
if (sibling == null) continue;
if (sibling_c < c) {
index += sibling.?.number;
}
}
node = child;
if (node.is_terminal) index += 1;
}
return index;
}
fn writeDafsa(self: *const DafsaBuilder, writer: anytype) !void {
try writer.writeAll("const dafsa = [_]Node{\n");
// write root
try writer.writeAll(" .{ .char = 0, .end_of_word = false, .end_of_list = true, .number = 0, .child_index = 1 },\n");
var queue = std.ArrayList(*Node).init(self.allocator);
defer queue.deinit();
var child_indexes = std.AutoHashMap(*Node, usize).init(self.allocator);
defer child_indexes.deinit();
try child_indexes.ensureTotalCapacity(@intCast(self.edgeCount()));
var first_available_index: usize = self.root.numDirectChildren() + 1;
first_available_index = try writeDafsaChildren(self.root, writer, &queue, &child_indexes, first_available_index);
while (queue.items.len > 0) {
// TODO: something with better time complexity
const node = queue.orderedRemove(0);
first_available_index = try writeDafsaChildren(node, writer, &queue, &child_indexes, first_available_index);
}
try writer.writeAll("};\n");
}
fn writeDafsaChildren(
node: *Node,
writer: anytype,
queue: *std.ArrayList(*Node),
child_indexes: *std.AutoHashMap(*Node, usize),
first_available_index: usize,
) !usize {
var cur_available_index = first_available_index;
const num_children = node.numDirectChildren();
var child_i: usize = 0;
for (node.children, 0..) |maybe_child, c_usize| {
const child = maybe_child orelse continue;
const c: u8 = @intCast(c_usize);
const is_last_child = child_i == num_children - 1;
if (!child_indexes.contains(child)) {
const child_num_children = child.numDirectChildren();
if (child_num_children > 0) {
child_indexes.putAssumeCapacityNoClobber(child, cur_available_index);
cur_available_index += child_num_children;
}
try queue.append(child);
}
try writer.print(
" .{{ .char = '{c}', .end_of_word = {}, .end_of_list = {}, .number = {}, .child_index = {} }},\n",
.{ c, child.is_terminal, is_last_child, child.number, child_indexes.get(child) orelse 0 },
);
child_i += 1;
}
return cur_available_index;
}
};

View File

@ -13,7 +13,7 @@ pub const TypeMapper = @import("aro/StringInterner.zig").TypeMapper;
pub const target_util = @import("aro/target.zig"); pub const target_util = @import("aro/target.zig");
pub const Value = @import("aro/Value.zig"); pub const Value = @import("aro/Value.zig");
const backend = @import("backend"); const backend = @import("backend.zig");
pub const Interner = backend.Interner; pub const Interner = backend.Interner;
pub const Ir = backend.Ir; pub const Ir = backend.Ir;
pub const Object = backend.Object; pub const Object = backend.Object;

View File

@ -1,7 +1,7 @@
const std = @import("std"); const std = @import("std");
const mem = std.mem; const mem = std.mem;
const ZigType = std.builtin.Type; const ZigType = std.builtin.Type;
const CallingConvention = @import("backend").CallingConvention; const CallingConvention = @import("../backend.zig").CallingConvention;
const Compilation = @import("Compilation.zig"); const Compilation = @import("Compilation.zig");
const Diagnostics = @import("Diagnostics.zig"); const Diagnostics = @import("Diagnostics.zig");
const Parser = @import("Parser.zig"); const Parser = @import("Parser.zig");
@ -687,7 +687,7 @@ pub fn fromString(kind: Kind, namespace: ?[]const u8, name: []const u8) ?Tag {
declspec: bool = false, declspec: bool = false,
c23: bool = false, c23: bool = false,
}; };
const attribute_names = @import("Attribute/names.def").with(Properties); const attribute_names = @import("Attribute/names.zig").with(Properties);
const normalized = normalize(name); const normalized = normalize(name);
const actual_kind: Kind = if (namespace) |ns| blk: { const actual_kind: Kind = if (namespace) |ns| blk: {

File diff suppressed because it is too large Load Diff

View File

@ -8,7 +8,7 @@ const LangOpts = @import("LangOpts.zig");
const Parser = @import("Parser.zig"); const Parser = @import("Parser.zig");
const Properties = @import("Builtins/Properties.zig"); const Properties = @import("Builtins/Properties.zig");
pub const Builtin = @import("Builtins/Builtin.def").with(Properties); pub const Builtin = @import("Builtins/Builtin.zig").with(Properties);
const Expanded = struct { const Expanded = struct {
ty: Type, ty: Type,

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
const std = @import("std"); const std = @import("std");
const Allocator = std.mem.Allocator; const Allocator = std.mem.Allocator;
const assert = std.debug.assert; const assert = std.debug.assert;
const backend = @import("backend"); const backend = @import("../backend.zig");
const Interner = backend.Interner; const Interner = backend.Interner;
const Ir = backend.Ir; const Ir = backend.Ir;
const Builtins = @import("Builtins.zig"); const Builtins = @import("Builtins.zig");

View File

@ -3,7 +3,7 @@ const Allocator = mem.Allocator;
const assert = std.debug.assert; const assert = std.debug.assert;
const EpochSeconds = std.time.epoch.EpochSeconds; const EpochSeconds = std.time.epoch.EpochSeconds;
const mem = std.mem; const mem = std.mem;
const Interner = @import("backend").Interner; const Interner = @import("../backend.zig").Interner;
const Builtins = @import("Builtins.zig"); const Builtins = @import("Builtins.zig");
const Builtin = Builtins.Builtin; const Builtin = Builtins.Builtin;
const Diagnostics = @import("Diagnostics.zig"); const Diagnostics = @import("Diagnostics.zig");
@ -507,7 +507,7 @@ pub fn generateBuiltinMacros(comp: *Compilation, system_defines_mode: SystemDefi
if (system_defines_mode == .include_system_defines) { if (system_defines_mode == .include_system_defines) {
try buf.appendSlice( try buf.appendSlice(
\\#define __VERSION__ "Aro \\#define __VERSION__ "Aro
++ @import("backend").version_str ++ "\"\n" ++ ++ @import("../backend.zig").version_str ++ "\"\n" ++
\\#define __Aro__ \\#define __Aro__
\\ \\
); );

View File

@ -92,7 +92,7 @@ const Properties = struct {
pub const max_bits = Compilation.bit_int_max_bits; pub const max_bits = Compilation.bit_int_max_bits;
}; };
pub const Tag = @import("Diagnostics/messages.def").with(Properties).Tag; pub const Tag = @import("Diagnostics/messages.zig").with(Properties).Tag;
pub const Kind = enum { @"fatal error", @"error", note, warning, off, default }; pub const Kind = enum { @"fatal error", @"error", note, warning, off, default };

File diff suppressed because it is too large Load Diff

View File

@ -2,7 +2,7 @@ const std = @import("std");
const mem = std.mem; const mem = std.mem;
const Allocator = mem.Allocator; const Allocator = mem.Allocator;
const process = std.process; const process = std.process;
const backend = @import("backend"); const backend = @import("../backend.zig");
const Ir = backend.Ir; const Ir = backend.Ir;
const Object = backend.Object; const Object = backend.Object;
const Compilation = @import("Compilation.zig"); const Compilation = @import("Compilation.zig");
@ -189,7 +189,7 @@ pub fn parseArgs(
}; };
return true; return true;
} else if (mem.eql(u8, arg, "-v") or mem.eql(u8, arg, "--version")) { } else if (mem.eql(u8, arg, "-v") or mem.eql(u8, arg, "--version")) {
std_out.writeAll(@import("backend").version_str ++ "\n") catch |er| { std_out.writeAll(@import("../backend.zig").version_str ++ "\n") catch |er| {
return d.fatal("unable to print version: {s}", .{errorDescription(er)}); return d.fatal("unable to print version: {s}", .{errorDescription(er)});
}; };
return true; return true;

View File

@ -1,5 +1,5 @@
const std = @import("std"); const std = @import("std");
const Interner = @import("backend").Interner; const Interner = @import("../backend.zig").Interner;
const Attribute = @import("Attribute.zig"); const Attribute = @import("Attribute.zig");
const CodeGen = @import("CodeGen.zig"); const CodeGen = @import("CodeGen.zig");
const Compilation = @import("Compilation.zig"); const Compilation = @import("Compilation.zig");

View File

@ -2,7 +2,7 @@ const std = @import("std");
const assert = std.debug.assert; const assert = std.debug.assert;
const BigIntConst = std.math.big.int.Const; const BigIntConst = std.math.big.int.Const;
const BigIntMutable = std.math.big.int.Mutable; const BigIntMutable = std.math.big.int.Mutable;
const backend = @import("backend"); const backend = @import("../backend.zig");
const Interner = backend.Interner; const Interner = backend.Interner;
const BigIntSpace = Interner.Tag.Int.BigIntSpace; const BigIntSpace = Interner.Tag.Int.BigIntSpace;
const Compilation = @import("Compilation.zig"); const Compilation = @import("Compilation.zig");

View File

@ -9,5 +9,5 @@ pub const CallingConvention = enum {
vectorcall, vectorcall,
}; };
pub const version_str = @import("build_options").version_str; pub const version_str = "aro-zig";
pub const version = @import("std").SemanticVersion.parse(version_str) catch unreachable; pub const version = @import("std").SemanticVersion.parse(version_str) catch unreachable;

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,4 @@
const std = @import("std"); const std = @import("std");
const Type = @import("../type.zig").Type;
const Allocator = std.mem.Allocator; const Allocator = std.mem.Allocator;
pub const Node = extern union { pub const Node = extern union {

View File

@ -4007,8 +4007,6 @@ pub fn cImport(comp: *Compilation, c_src: []const u8, owner_mod: *Package.Module
} }
var tree = switch (comp.config.c_frontend) { var tree = switch (comp.config.c_frontend) {
.aro => tree: { .aro => tree: {
const translate_c = @import("aro_translate_c.zig");
_ = translate_c;
if (true) @panic("TODO"); if (true) @panic("TODO");
break :tree undefined; break :tree undefined;
}, },

View File

@ -1,678 +0,0 @@
const std = @import("std");
const mem = std.mem;
const assert = std.debug.assert;
const CallingConvention = std.builtin.CallingConvention;
const translate_c = @import("translate_c.zig");
const aro = @import("aro");
const Tree = aro.Tree;
const NodeIndex = Tree.NodeIndex;
const TokenIndex = Tree.TokenIndex;
const Type = aro.Type;
const ast = @import("translate_c/ast.zig");
const ZigNode = ast.Node;
const ZigTag = ZigNode.Tag;
const common = @import("translate_c/common.zig");
const Error = common.Error;
const MacroProcessingError = common.MacroProcessingError;
const TypeError = common.TypeError;
const TransError = common.TransError;
const SymbolTable = common.SymbolTable;
const AliasList = common.AliasList;
const ResultUsed = common.ResultUsed;
const Scope = common.ScopeExtra(Context, Type);
const Context = struct {
gpa: mem.Allocator,
arena: mem.Allocator,
decl_table: std.AutoArrayHashMapUnmanaged(usize, []const u8) = .{},
alias_list: AliasList,
global_scope: *Scope.Root,
mangle_count: u32 = 0,
/// Table of record decls that have been demoted to opaques.
opaque_demotes: std.AutoHashMapUnmanaged(usize, void) = .{},
/// Table of unnamed enums and records that are child types of typedefs.
unnamed_typedefs: std.AutoHashMapUnmanaged(usize, []const u8) = .{},
/// Needed to decide if we are parsing a typename
typedefs: std.StringArrayHashMapUnmanaged(void) = .{},
/// This one is different than the root scope's name table. This contains
/// a list of names that we found by visiting all the top level decls without
/// translating them. The other maps are updated as we translate; this one is updated
/// up front in a pre-processing step.
global_names: std.StringArrayHashMapUnmanaged(void) = .{},
/// This is similar to `global_names`, but contains names which we would
/// *like* to use, but do not strictly *have* to if they are unavailable.
/// These are relevant to types, which ideally we would name like
/// 'struct_foo' with an alias 'foo', but if either of those names is taken,
/// may be mangled.
/// This is distinct from `global_names` so we can detect at a type
/// declaration whether or not the name is available.
weak_global_names: std.StringArrayHashMapUnmanaged(void) = .{},
pattern_list: translate_c.PatternList,
tree: Tree,
comp: *aro.Compilation,
mapper: aro.TypeMapper,
fn getMangle(c: *Context) u32 {
c.mangle_count += 1;
return c.mangle_count;
}
/// Convert a clang source location to a file:line:column string
fn locStr(c: *Context, loc: TokenIndex) ![]const u8 {
_ = c;
_ = loc;
// const spelling_loc = c.source_manager.getSpellingLoc(loc);
// const filename_c = c.source_manager.getFilename(spelling_loc);
// const filename = if (filename_c) |s| try c.str(s) else @as([]const u8, "(no file)");
// const line = c.source_manager.getSpellingLineNumber(spelling_loc);
// const column = c.source_manager.getSpellingColumnNumber(spelling_loc);
// return std.fmt.allocPrint(c.arena, "{s}:{d}:{d}", .{ filename, line, column });
return "somewhere";
}
};
fn maybeSuppressResult(c: *Context, used: ResultUsed, result: ZigNode) TransError!ZigNode {
if (used == .used) return result;
return ZigTag.discard.create(c.arena, .{ .should_skip = false, .value = result });
}
fn addTopLevelDecl(c: *Context, name: []const u8, decl_node: ZigNode) !void {
const gop = try c.global_scope.sym_table.getOrPut(name);
if (!gop.found_existing) {
gop.value_ptr.* = decl_node;
try c.global_scope.nodes.append(decl_node);
}
}
fn failDecl(c: *Context, loc: TokenIndex, name: []const u8, comptime format: []const u8, args: anytype) Error!void {
// location
// pub const name = @compileError(msg);
const fail_msg = try std.fmt.allocPrint(c.arena, format, args);
try addTopLevelDecl(c, name, try ZigTag.fail_decl.create(c.arena, .{ .actual = name, .mangled = fail_msg }));
const str = try c.locStr(loc);
const location_comment = try std.fmt.allocPrint(c.arena, "// {s}", .{str});
try c.global_scope.nodes.append(try ZigTag.warning.create(c.arena, location_comment));
}
fn warn(c: *Context, scope: *Scope, loc: TokenIndex, comptime format: []const u8, args: anytype) !void {
const str = try c.locStr(loc);
const value = try std.fmt.allocPrint(c.arena, "// {s}: warning: " ++ format, .{str} ++ args);
try scope.appendNode(try ZigTag.warning.create(c.arena, value));
}
pub fn translate(
gpa: mem.Allocator,
comp: *aro.Compilation,
args: []const []const u8,
) !std.zig.Ast {
try comp.addDefaultPragmaHandlers();
comp.langopts.setEmulatedCompiler(aro.target_util.systemCompiler(comp.target));
var driver: aro.Driver = .{ .comp = comp };
defer driver.deinit();
var macro_buf = std.ArrayList(u8).init(gpa);
defer macro_buf.deinit();
assert(!try driver.parseArgs(std.io.null_writer, macro_buf.writer(), args));
assert(driver.inputs.items.len == 1);
const source = driver.inputs.items[0];
const builtin_macros = try comp.generateBuiltinMacros(.include_system_defines);
const user_macros = try comp.addSourceFromBuffer("<command line>", macro_buf.items);
var pp = try aro.Preprocessor.initDefault(comp);
defer pp.deinit();
try pp.preprocessSources(&.{ source, builtin_macros, user_macros });
var tree = try pp.parse();
defer tree.deinit();
if (driver.comp.diagnostics.errors != 0) {
return error.SemanticAnalyzeFail;
}
const mapper = tree.comp.string_interner.getFastTypeMapper(tree.comp.gpa) catch tree.comp.string_interner.getSlowTypeMapper();
defer mapper.deinit(tree.comp.gpa);
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
defer arena_allocator.deinit();
const arena = arena_allocator.allocator();
var context = Context{
.gpa = gpa,
.arena = arena,
.alias_list = AliasList.init(gpa),
.global_scope = try arena.create(Scope.Root),
.pattern_list = try translate_c.PatternList.init(gpa),
.comp = comp,
.mapper = mapper,
.tree = tree,
};
context.global_scope.* = Scope.Root.init(&context);
defer {
context.decl_table.deinit(gpa);
context.alias_list.deinit();
context.global_names.deinit(gpa);
context.opaque_demotes.deinit(gpa);
context.unnamed_typedefs.deinit(gpa);
context.typedefs.deinit(gpa);
context.global_scope.deinit();
context.pattern_list.deinit(gpa);
}
inline for (@typeInfo(std.zig.c_builtins).Struct.decls) |decl| {
const builtin_fn = try ZigTag.pub_var_simple.create(arena, .{
.name = decl.name,
.init = try ZigTag.import_c_builtin.create(arena, decl.name),
});
try addTopLevelDecl(&context, decl.name, builtin_fn);
}
try prepopulateGlobalNameTable(&context);
try transTopLevelDecls(&context);
for (context.alias_list.items) |alias| {
if (!context.global_scope.sym_table.contains(alias.alias)) {
const node = try ZigTag.alias.create(arena, .{ .actual = alias.alias, .mangled = alias.name });
try addTopLevelDecl(&context, alias.alias, node);
}
}
return ast.render(gpa, context.global_scope.nodes.items);
}
fn prepopulateGlobalNameTable(c: *Context) !void {
const node_tags = c.tree.nodes.items(.tag);
const node_types = c.tree.nodes.items(.ty);
const node_data = c.tree.nodes.items(.data);
for (c.tree.root_decls) |node| {
const data = node_data[@intFromEnum(node)];
const decl_name = switch (node_tags[@intFromEnum(node)]) {
.typedef => @panic("TODO"),
.static_assert,
.struct_decl_two,
.union_decl_two,
.struct_decl,
.union_decl,
=> blk: {
const ty = node_types[@intFromEnum(node)];
const name_id = ty.data.record.name;
break :blk c.mapper.lookup(name_id);
},
.enum_decl_two,
.enum_decl,
=> blk: {
const ty = node_types[@intFromEnum(node)];
const name_id = ty.data.@"enum".name;
break :blk c.mapper.lookup(name_id);
},
.fn_proto,
.static_fn_proto,
.inline_fn_proto,
.inline_static_fn_proto,
.fn_def,
.static_fn_def,
.inline_fn_def,
.inline_static_fn_def,
.@"var",
.static_var,
.threadlocal_var,
.threadlocal_static_var,
.extern_var,
.threadlocal_extern_var,
=> c.tree.tokSlice(data.decl.name),
else => unreachable,
};
try c.global_names.put(c.gpa, decl_name, {});
}
}
fn transTopLevelDecls(c: *Context) !void {
const node_tags = c.tree.nodes.items(.tag);
const node_data = c.tree.nodes.items(.data);
for (c.tree.root_decls) |node| {
const data = node_data[@intFromEnum(node)];
switch (node_tags[@intFromEnum(node)]) {
.typedef => {
try transTypeDef(c, &c.global_scope.base, node);
},
.static_assert,
.struct_decl_two,
.union_decl_two,
.struct_decl,
.union_decl,
=> {
try transRecordDecl(c, &c.global_scope.base, node);
},
.enum_decl_two => {
var fields = [2]NodeIndex{ data.bin.lhs, data.bin.rhs };
var field_count: u8 = 0;
if (fields[0] != .none) field_count += 1;
if (fields[1] != .none) field_count += 1;
try transEnumDecl(c, &c.global_scope.base, node, fields[0..field_count]);
},
.enum_decl => {
const fields = c.tree.data[data.range.start..data.range.end];
try transEnumDecl(c, &c.global_scope.base, node, fields);
},
.fn_proto,
.static_fn_proto,
.inline_fn_proto,
.inline_static_fn_proto,
.fn_def,
.static_fn_def,
.inline_fn_def,
.inline_static_fn_def,
=> {
try transFnDecl(c, node);
},
.@"var",
.static_var,
.threadlocal_var,
.threadlocal_static_var,
.extern_var,
.threadlocal_extern_var,
=> {
try transVarDecl(c, node, null);
},
else => unreachable,
}
}
}
fn transTypeDef(_: *Context, _: *Scope, _: NodeIndex) Error!void {
@panic("TODO");
}
fn transRecordDecl(_: *Context, _: *Scope, _: NodeIndex) Error!void {
@panic("TODO");
}
fn transFnDecl(c: *Context, fn_decl: NodeIndex) Error!void {
const raw_ty = c.tree.nodes.items(.ty)[@intFromEnum(fn_decl)];
const fn_ty = raw_ty.canonicalize(.standard);
const node_data = c.tree.nodes.items(.data)[@intFromEnum(fn_decl)];
if (c.decl_table.get(@intFromPtr(fn_ty.data.func))) |_|
return; // Avoid processing this decl twice
const fn_name = c.tree.tokSlice(node_data.decl.name);
if (c.global_scope.sym_table.contains(fn_name))
return; // Avoid processing this decl twice
const fn_decl_loc = 0; // TODO
const has_body = node_data.decl.node != .none;
const is_always_inline = has_body and raw_ty.getAttribute(.always_inline) != null;
const proto_ctx = FnProtoContext{
.fn_name = fn_name,
.is_inline = is_always_inline,
.is_extern = !has_body,
.is_export = switch (c.tree.nodes.items(.tag)[@intFromEnum(fn_decl)]) {
.fn_proto, .fn_def => has_body and !is_always_inline,
.inline_fn_proto, .inline_fn_def, .inline_static_fn_proto, .inline_static_fn_def, .static_fn_proto, .static_fn_def => false,
else => unreachable,
},
};
const proto_node = transFnType(c, &c.global_scope.base, raw_ty, fn_ty, fn_decl_loc, proto_ctx) catch |err| switch (err) {
error.UnsupportedType => {
return failDecl(c, fn_decl_loc, fn_name, "unable to resolve prototype of function", .{});
},
error.OutOfMemory => |e| return e,
};
if (!has_body) {
return addTopLevelDecl(c, fn_name, proto_node);
}
const proto_payload = proto_node.castTag(.func).?;
// actual function definition with body
const body_stmt = node_data.decl.node;
var block_scope = try Scope.Block.init(c, &c.global_scope.base, false);
block_scope.return_type = fn_ty.data.func.return_type;
defer block_scope.deinit();
var scope = &block_scope.base;
_ = &scope;
var param_id: c_uint = 0;
for (proto_payload.data.params, fn_ty.data.func.params) |*param, param_info| {
const param_name = param.name orelse {
proto_payload.data.is_extern = true;
proto_payload.data.is_export = false;
proto_payload.data.is_inline = false;
try warn(c, &c.global_scope.base, fn_decl_loc, "function {s} parameter has no name, demoted to extern", .{fn_name});
return addTopLevelDecl(c, fn_name, proto_node);
};
const is_const = param_info.ty.qual.@"const";
const mangled_param_name = try block_scope.makeMangledName(c, param_name);
param.name = mangled_param_name;
if (!is_const) {
const bare_arg_name = try std.fmt.allocPrint(c.arena, "arg_{s}", .{mangled_param_name});
const arg_name = try block_scope.makeMangledName(c, bare_arg_name);
param.name = arg_name;
const redecl_node = try ZigTag.arg_redecl.create(c.arena, .{ .actual = mangled_param_name, .mangled = arg_name });
try block_scope.statements.append(redecl_node);
}
try block_scope.discardVariable(c, mangled_param_name);
param_id += 1;
}
transCompoundStmtInline(c, body_stmt, &block_scope) catch |err| switch (err) {
error.OutOfMemory => |e| return e,
error.UnsupportedTranslation,
error.UnsupportedType,
=> {
proto_payload.data.is_extern = true;
proto_payload.data.is_export = false;
proto_payload.data.is_inline = false;
try warn(c, &c.global_scope.base, fn_decl_loc, "unable to translate function, demoted to extern", .{});
return addTopLevelDecl(c, fn_name, proto_node);
},
};
proto_payload.data.body = try block_scope.complete(c);
return addTopLevelDecl(c, fn_name, proto_node);
}
fn transVarDecl(_: *Context, _: NodeIndex, _: ?usize) Error!void {
@panic("TODO");
}
fn transEnumDecl(c: *Context, scope: *Scope, enum_decl: NodeIndex, field_nodes: []const NodeIndex) Error!void {
const node_types = c.tree.nodes.items(.ty);
const ty = node_types[@intFromEnum(enum_decl)];
if (c.decl_table.get(@intFromPtr(ty.data.@"enum"))) |_|
return; // Avoid processing this decl twice
const toplevel = scope.id == .root;
const bs: *Scope.Block = if (!toplevel) try scope.findBlockScope(c) else undefined;
var is_unnamed = false;
var bare_name: []const u8 = c.mapper.lookup(ty.data.@"enum".name);
var name = bare_name;
if (c.unnamed_typedefs.get(@intFromPtr(ty.data.@"enum"))) |typedef_name| {
bare_name = typedef_name;
name = typedef_name;
} else {
if (bare_name.len == 0) {
bare_name = try std.fmt.allocPrint(c.arena, "unnamed_{d}", .{c.getMangle()});
is_unnamed = true;
}
name = try std.fmt.allocPrint(c.arena, "enum_{s}", .{bare_name});
}
if (!toplevel) name = try bs.makeMangledName(c, name);
try c.decl_table.putNoClobber(c.gpa, @intFromPtr(ty.data.@"enum"), name);
const enum_type_node = if (!ty.data.@"enum".isIncomplete()) blk: {
for (ty.data.@"enum".fields, field_nodes) |field, field_node| {
var enum_val_name: []const u8 = c.mapper.lookup(field.name);
if (!toplevel) {
enum_val_name = try bs.makeMangledName(c, enum_val_name);
}
const enum_const_type_node: ?ZigNode = transType(c, scope, field.ty, field.name_tok) catch |err| switch (err) {
error.UnsupportedType => null,
else => |e| return e,
};
const val = c.tree.value_map.get(field_node).?;
const enum_const_def = try ZigTag.enum_constant.create(c.arena, .{
.name = enum_val_name,
.is_public = toplevel,
.type = enum_const_type_node,
.value = try transCreateNodeAPInt(c, val),
});
if (toplevel)
try addTopLevelDecl(c, enum_val_name, enum_const_def)
else {
try scope.appendNode(enum_const_def);
try bs.discardVariable(c, enum_val_name);
}
}
break :blk transType(c, scope, ty.data.@"enum".tag_ty, 0) catch |err| switch (err) {
error.UnsupportedType => {
return failDecl(c, 0, name, "unable to translate enum integer type", .{});
},
else => |e| return e,
};
} else blk: {
try c.opaque_demotes.put(c.gpa, @intFromPtr(ty.data.@"enum"), {});
break :blk ZigTag.opaque_literal.init();
};
const is_pub = toplevel and !is_unnamed;
const payload = try c.arena.create(ast.Payload.SimpleVarDecl);
payload.* = .{
.base = .{ .tag = ([2]ZigTag{ .var_simple, .pub_var_simple })[@intFromBool(is_pub)] },
.data = .{
.init = enum_type_node,
.name = name,
},
};
const node = ZigNode.initPayload(&payload.base);
if (toplevel) {
try addTopLevelDecl(c, name, node);
if (!is_unnamed)
try c.alias_list.append(.{ .alias = bare_name, .name = name });
} else {
try scope.appendNode(node);
if (node.tag() != .pub_var_simple) {
try bs.discardVariable(c, name);
}
}
}
fn transType(c: *Context, scope: *Scope, raw_ty: Type, source_loc: TokenIndex) TypeError!ZigNode {
const ty = raw_ty.canonicalize(.standard);
switch (ty.specifier) {
.void => return ZigTag.type.create(c.arena, "anyopaque"),
.bool => return ZigTag.type.create(c.arena, "bool"),
.char => return ZigTag.type.create(c.arena, "c_char"),
.schar => return ZigTag.type.create(c.arena, "i8"),
.uchar => return ZigTag.type.create(c.arena, "u8"),
.short => return ZigTag.type.create(c.arena, "c_short"),
.ushort => return ZigTag.type.create(c.arena, "c_ushort"),
.int => return ZigTag.type.create(c.arena, "c_int"),
.uint => return ZigTag.type.create(c.arena, "c_uint"),
.long => return ZigTag.type.create(c.arena, "c_long"),
.ulong => return ZigTag.type.create(c.arena, "c_ulong"),
.long_long => return ZigTag.type.create(c.arena, "c_longlong"),
.ulong_long => return ZigTag.type.create(c.arena, "c_ulonglong"),
.int128 => return ZigTag.type.create(c.arena, "i128"),
.uint128 => return ZigTag.type.create(c.arena, "u128"),
.fp16, .float16 => return ZigTag.type.create(c.arena, "f16"),
.float => return ZigTag.type.create(c.arena, "f32"),
.double => return ZigTag.type.create(c.arena, "f64"),
.long_double => return ZigTag.type.create(c.arena, "c_longdouble"),
.float80 => return ZigTag.type.create(c.arena, "f80"),
.float128 => return ZigTag.type.create(c.arena, "f128"),
.func,
.var_args_func,
.old_style_func,
=> return transFnType(c, scope, raw_ty, ty, source_loc, .{}),
else => return error.UnsupportedType,
}
}
fn zigAlignment(bit_alignment: u29) u32 {
return bit_alignment / 8;
}
const FnProtoContext = struct {
is_pub: bool = false,
is_export: bool = false,
is_extern: bool = false,
is_inline: bool = false,
fn_name: ?[]const u8 = null,
};
fn transFnType(
c: *Context,
scope: *Scope,
raw_ty: Type,
fn_ty: Type,
source_loc: TokenIndex,
ctx: FnProtoContext,
) !ZigNode {
const param_count: usize = fn_ty.data.func.params.len;
const fn_params = try c.arena.alloc(ast.Payload.Param, param_count);
for (fn_ty.data.func.params, fn_params) |param_info, *param_node| {
const param_ty = param_info.ty;
const is_noalias = param_ty.qual.restrict;
const param_name: ?[]const u8 = if (param_info.name == .empty)
null
else
c.mapper.lookup(param_info.name);
const type_node = try transType(c, scope, param_ty, param_info.name_tok);
param_node.* = .{
.is_noalias = is_noalias,
.name = param_name,
.type = type_node,
};
}
const linksection_string = blk: {
if (raw_ty.getAttribute(.section)) |section| {
break :blk c.comp.interner.get(section.name.ref()).bytes;
}
break :blk null;
};
const alignment = if (raw_ty.requestedAlignment(c.comp)) |alignment| zigAlignment(alignment) else null;
const explicit_callconv = null;
// const explicit_callconv = if ((ctx.is_inline or ctx.is_export or ctx.is_extern) and ctx.cc == .C) null else ctx.cc;
const return_type_node = blk: {
if (raw_ty.getAttribute(.noreturn) != null) {
break :blk ZigTag.noreturn_type.init();
} else {
const return_ty = fn_ty.data.func.return_type;
if (return_ty.is(.void)) {
// convert primitive anyopaque to actual void (only for return type)
break :blk ZigTag.void_type.init();
} else {
break :blk transType(c, scope, return_ty, source_loc) catch |err| switch (err) {
error.UnsupportedType => {
try warn(c, scope, source_loc, "unsupported function proto return type", .{});
return err;
},
error.OutOfMemory => |e| return e,
};
}
}
};
const payload = try c.arena.create(ast.Payload.Func);
payload.* = .{
.base = .{ .tag = .func },
.data = .{
.is_pub = ctx.is_pub,
.is_extern = ctx.is_extern,
.is_export = ctx.is_export,
.is_inline = ctx.is_inline,
.is_var_args = switch (fn_ty.specifier) {
.func => false,
.var_args_func => true,
.old_style_func => !ctx.is_export and !ctx.is_inline,
else => unreachable,
},
.name = ctx.fn_name,
.linksection_string = linksection_string,
.explicit_callconv = explicit_callconv,
.params = fn_params,
.return_type = return_type_node,
.body = null,
.alignment = alignment,
},
};
return ZigNode.initPayload(&payload.base);
}
fn transStmt(c: *Context, node: NodeIndex) TransError!ZigNode {
return transExpr(c, node, .unused);
}
fn transCompoundStmtInline(c: *Context, compound: NodeIndex, block: *Scope.Block) TransError!void {
const data = c.tree.nodes.items(.data)[@intFromEnum(compound)];
var buf: [2]NodeIndex = undefined;
// TODO move these helpers to Aro
const stmts = switch (c.tree.nodes.items(.tag)[@intFromEnum(compound)]) {
.compound_stmt_two => blk: {
if (data.bin.lhs != .none) buf[0] = data.bin.lhs;
if (data.bin.rhs != .none) buf[1] = data.bin.rhs;
break :blk buf[0 .. @as(u32, @intFromBool(data.bin.lhs != .none)) + @intFromBool(data.bin.rhs != .none)];
},
.compound_stmt => c.tree.data[data.range.start..data.range.end],
else => unreachable,
};
for (stmts) |stmt| {
const result = try transStmt(c, stmt);
switch (result.tag()) {
.declaration, .empty_block => {},
else => try block.statements.append(result),
}
}
}
fn transCompoundStmt(c: *Context, scope: *Scope, compound: NodeIndex) TransError!ZigNode {
var block_scope = try Scope.Block.init(c, scope, false);
defer block_scope.deinit();
try transCompoundStmtInline(c, compound, &block_scope);
return try block_scope.complete(c);
}
fn transExpr(c: *Context, node: NodeIndex, result_used: ResultUsed) TransError!ZigNode {
std.debug.assert(node != .none);
const ty = c.tree.nodes.items(.ty)[@intFromEnum(node)];
if (c.tree.value_map.get(node)) |val| {
// TODO handle other values
const int = try transCreateNodeAPInt(c, val);
const as_node = try ZigTag.as.create(c.arena, .{
.lhs = try transType(c, undefined, ty, undefined),
.rhs = int,
});
return maybeSuppressResult(c, result_used, as_node);
}
const node_tags = c.tree.nodes.items(.tag);
switch (node_tags[@intFromEnum(node)]) {
else => unreachable, // Not an expression.
}
return .none;
}
fn transCreateNodeAPInt(c: *Context, int: aro.Value) !ZigNode {
var space: aro.Interner.Tag.Int.BigIntSpace = undefined;
var big = int.toBigInt(&space, c.comp);
const is_negative = !big.positive;
big.positive = true;
const str = big.toStringAlloc(c.arena, 10, .lower) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
};
const res = try ZigTag.integer_literal.create(c.arena, str);
if (is_negative) return ZigTag.negate.create(c.arena, res);
return res;
}

View File

@ -294,13 +294,20 @@ fn mainArgs(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
} else if (mem.eql(u8, cmd, "rc")) { } else if (mem.eql(u8, cmd, "rc")) {
return cmdRc(gpa, arena, args[1..]); return cmdRc(gpa, arena, args[1..]);
} else if (mem.eql(u8, cmd, "fmt")) { } else if (mem.eql(u8, cmd, "fmt")) {
return jitCmd(gpa, arena, cmd_args, "fmt", "fmt.zig", false); return jitCmd(gpa, arena, cmd_args, .{
.cmd_name = "fmt",
.root_src_path = "fmt.zig",
});
} else if (mem.eql(u8, cmd, "objcopy")) { } else if (mem.eql(u8, cmd, "objcopy")) {
return @import("objcopy.zig").cmdObjCopy(gpa, arena, cmd_args); return @import("objcopy.zig").cmdObjCopy(gpa, arena, cmd_args);
} else if (mem.eql(u8, cmd, "fetch")) { } else if (mem.eql(u8, cmd, "fetch")) {
return cmdFetch(gpa, arena, cmd_args); return cmdFetch(gpa, arena, cmd_args);
} else if (mem.eql(u8, cmd, "libc")) { } else if (mem.eql(u8, cmd, "libc")) {
return jitCmd(gpa, arena, cmd_args, "libc", "libc.zig", true); return jitCmd(gpa, arena, cmd_args, .{
.cmd_name = "libc",
.root_src_path = "libc.zig",
.prepend_zig_lib_dir_path = true,
});
} else if (mem.eql(u8, cmd, "init")) { } else if (mem.eql(u8, cmd, "init")) {
return cmdInit(gpa, arena, cmd_args); return cmdInit(gpa, arena, cmd_args);
} else if (mem.eql(u8, cmd, "targets")) { } else if (mem.eql(u8, cmd, "targets")) {
@ -317,7 +324,10 @@ fn mainArgs(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
verifyLibcxxCorrectlyLinked(); verifyLibcxxCorrectlyLinked();
return @import("print_env.zig").cmdEnv(arena, cmd_args, io.getStdOut().writer()); return @import("print_env.zig").cmdEnv(arena, cmd_args, io.getStdOut().writer());
} else if (mem.eql(u8, cmd, "reduce")) { } else if (mem.eql(u8, cmd, "reduce")) {
return jitCmd(gpa, arena, cmd_args, "reduce", "reduce.zig", false); return jitCmd(gpa, arena, cmd_args, .{
.cmd_name = "reduce",
.root_src_path = "reduce.zig",
});
} else if (mem.eql(u8, cmd, "zen")) { } else if (mem.eql(u8, cmd, "zen")) {
return io.getStdOut().writeAll(info_zen); return io.getStdOut().writeAll(info_zen);
} else if (mem.eql(u8, cmd, "help") or mem.eql(u8, cmd, "-h") or mem.eql(u8, cmd, "--help")) { } else if (mem.eql(u8, cmd, "help") or mem.eql(u8, cmd, "-h") or mem.eql(u8, cmd, "--help")) {
@ -4459,7 +4469,13 @@ fn cmdTranslateC(comp: *Compilation, arena: Allocator, fancy_output: ?*Compilati
const digest = if (try man.hit()) man.final() else digest: { const digest = if (try man.hit()) man.final() else digest: {
if (fancy_output) |p| p.cache_hit = false; if (fancy_output) |p| p.cache_hit = false;
var argv = std.ArrayList([]const u8).init(arena); var argv = std.ArrayList([]const u8).init(arena);
try argv.append(@tagName(comp.config.c_frontend)); // argv[0] is program name, actual args start at [1] switch (comp.config.c_frontend) {
.aro => {},
.clang => {
// argv[0] is program name, actual args start at [1]
try argv.append(@tagName(comp.config.c_frontend));
},
}
var zig_cache_tmp_dir = try comp.local_cache_directory.handle.makeOpenPath("tmp", .{}); var zig_cache_tmp_dir = try comp.local_cache_directory.handle.makeOpenPath("tmp", .{});
defer zig_cache_tmp_dir.close(); defer zig_cache_tmp_dir.close();
@ -4484,24 +4500,18 @@ fn cmdTranslateC(comp: *Compilation, arena: Allocator, fancy_output: ?*Compilati
Compilation.dump_argv(argv.items); Compilation.dump_argv(argv.items);
} }
var tree = switch (comp.config.c_frontend) { const formatted = switch (comp.config.c_frontend) {
.aro => tree: { .aro => f: {
const aro = @import("aro"); var stdout: []u8 = undefined;
const translate_c = @import("aro_translate_c.zig"); try jitCmd(comp.gpa, arena, argv.items, .{
var aro_comp = aro.Compilation.init(comp.gpa); .cmd_name = "aro_translate_c",
defer aro_comp.deinit(); .root_src_path = "aro_translate_c.zig",
.depend_on_aro = true,
break :tree translate_c.translate(comp.gpa, &aro_comp, argv.items) catch |err| switch (err) { .capture = &stdout,
error.SemanticAnalyzeFail, error.FatalError => { });
// TODO convert these to zig errors break :f stdout;
aro.Diagnostics.render(&aro_comp, std.io.tty.detectConfig(std.io.getStdErr()));
process.exit(1);
},
error.OutOfMemory => return error.OutOfMemory,
error.StreamTooLong => fatal("StreamTooLong?", .{}),
};
}, },
.clang => tree: { .clang => f: {
if (!build_options.have_llvm) unreachable; if (!build_options.have_llvm) unreachable;
const translate_c = @import("translate_c.zig"); const translate_c = @import("translate_c.zig");
@ -4519,7 +4529,7 @@ fn cmdTranslateC(comp: *Compilation, arena: Allocator, fancy_output: ?*Compilati
const c_headers_dir_path_z = try comp.zig_lib_directory.joinZ(arena, &[_][]const u8{"include"}); const c_headers_dir_path_z = try comp.zig_lib_directory.joinZ(arena, &[_][]const u8{"include"});
var errors = std.zig.ErrorBundle.empty; var errors = std.zig.ErrorBundle.empty;
break :tree translate_c.translate( var tree = translate_c.translate(
comp.gpa, comp.gpa,
new_argv.ptr, new_argv.ptr,
new_argv.ptr + new_argv.len, new_argv.ptr + new_argv.len,
@ -4537,9 +4547,10 @@ fn cmdTranslateC(comp: *Compilation, arena: Allocator, fancy_output: ?*Compilati
} }
}, },
}; };
defer tree.deinit(comp.gpa);
break :f try tree.render(arena);
}, },
}; };
defer tree.deinit(comp.gpa);
if (out_dep_path) |dep_file_path| { if (out_dep_path) |dep_file_path| {
const dep_basename = fs.path.basename(dep_file_path); const dep_basename = fs.path.basename(dep_file_path);
@ -4560,9 +4571,6 @@ fn cmdTranslateC(comp: *Compilation, arena: Allocator, fancy_output: ?*Compilati
var zig_file = try o_dir.createFile(translated_zig_basename, .{}); var zig_file = try o_dir.createFile(translated_zig_basename, .{});
defer zig_file.close(); defer zig_file.close();
const formatted = try tree.render(comp.gpa);
defer comp.gpa.free(formatted);
try zig_file.writeAll(formatted); try zig_file.writeAll(formatted);
man.writeManifest() catch |err| warn("failed to write cache manifest: {s}", .{ man.writeManifest() catch |err| warn("failed to write cache manifest: {s}", .{
@ -5522,13 +5530,19 @@ fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
} }
} }
const JitCmdOptions = struct {
cmd_name: []const u8,
root_src_path: []const u8,
prepend_zig_lib_dir_path: bool = false,
depend_on_aro: bool = false,
capture: ?*[]u8 = null,
};
fn jitCmd( fn jitCmd(
gpa: Allocator, gpa: Allocator,
arena: Allocator, arena: Allocator,
args: []const []const u8, args: []const []const u8,
cmd_name: []const u8, options: JitCmdOptions,
root_src_path: []const u8,
prepend_zig_lib_dir_path: bool,
) !void { ) !void {
const color: Color = .auto; const color: Color = .auto;
@ -5540,7 +5554,7 @@ fn jitCmd(
}; };
const exe_basename = try std.zig.binNameAlloc(arena, .{ const exe_basename = try std.zig.binNameAlloc(arena, .{
.root_name = cmd_name, .root_name = options.cmd_name,
.target = resolved_target.result, .target = resolved_target.result,
.output_mode = .Exe, .output_mode = .Exe,
}); });
@ -5595,7 +5609,7 @@ fn jitCmd(
.root_dir = zig_lib_directory, .root_dir = zig_lib_directory,
.sub_path = "compiler", .sub_path = "compiler",
}, },
.root_src_path = root_src_path, .root_src_path = options.root_src_path,
}; };
const config = try Compilation.Config.resolve(.{ const config = try Compilation.Config.resolve(.{
@ -5623,11 +5637,35 @@ fn jitCmd(
.builtin_mod = null, .builtin_mod = null,
}); });
if (options.depend_on_aro) {
const aro_mod = try Package.Module.create(arena, .{
.global_cache_directory = global_cache_directory,
.paths = .{
.root = .{
.root_dir = zig_lib_directory,
.sub_path = "compiler/aro",
},
.root_src_path = "aro.zig",
},
.fully_qualified_name = "aro",
.cc_argv = &.{},
.inherited = .{
.resolved_target = resolved_target,
.optimize_mode = optimize_mode,
.strip = strip,
},
.global = config,
.parent = null,
.builtin_mod = root_mod.getBuiltinDependency(),
});
try root_mod.deps.put(arena, "aro", aro_mod);
}
const comp = Compilation.create(gpa, arena, .{ const comp = Compilation.create(gpa, arena, .{
.zig_lib_directory = zig_lib_directory, .zig_lib_directory = zig_lib_directory,
.local_cache_directory = global_cache_directory, .local_cache_directory = global_cache_directory,
.global_cache_directory = global_cache_directory, .global_cache_directory = global_cache_directory,
.root_name = cmd_name, .root_name = options.cmd_name,
.config = config, .config = config,
.root_mod = root_mod, .root_mod = root_mod,
.main_mod = root_mod, .main_mod = root_mod,
@ -5650,12 +5688,12 @@ fn jitCmd(
child_argv.appendAssumeCapacity(exe_path); child_argv.appendAssumeCapacity(exe_path);
} }
if (prepend_zig_lib_dir_path) if (options.prepend_zig_lib_dir_path)
child_argv.appendAssumeCapacity(zig_lib_directory.path.?); child_argv.appendAssumeCapacity(zig_lib_directory.path.?);
child_argv.appendSliceAssumeCapacity(args); child_argv.appendSliceAssumeCapacity(args);
if (process.can_execv) { if (process.can_execv and options.capture == null) {
const err = process.execv(gpa, child_argv.items); const err = process.execv(gpa, child_argv.items);
const cmd = try std.mem.join(arena, " ", child_argv.items); const cmd = try std.mem.join(arena, " ", child_argv.items);
fatal("the following command failed to execve with '{s}':\n{s}", .{ fatal("the following command failed to execve with '{s}':\n{s}", .{
@ -5673,13 +5711,22 @@ fn jitCmd(
var child = std.ChildProcess.init(child_argv.items, gpa); var child = std.ChildProcess.init(child_argv.items, gpa);
child.stdin_behavior = .Inherit; child.stdin_behavior = .Inherit;
child.stdout_behavior = .Inherit; child.stdout_behavior = if (options.capture == null) .Inherit else .Pipe;
child.stderr_behavior = .Inherit; child.stderr_behavior = .Inherit;
const term = try child.spawnAndWait(); try child.spawn();
if (options.capture) |ptr| {
ptr.* = try child.stdout.?.readToEndAlloc(arena, std.math.maxInt(u32));
}
const term = try child.wait();
switch (term) { switch (term) {
.Exited => |code| { .Exited => |code| {
if (code == 0) return cleanExit(); if (code == 0) {
if (options.capture != null) return;
return cleanExit();
}
const cmd = try std.mem.join(arena, " ", child_argv.items); const cmd = try std.mem.join(arena, " ", child_argv.items);
fatal("the following build command failed with exit code {d}:\n{s}", .{ code, cmd }); fatal("the following build command failed with exit code {d}:\n{s}", .{ code, cmd });
}, },

View File

@ -1,35 +0,0 @@
//! Stub implementation only used when bootstrapping stage2
//! Keep in sync with deps/aro/build/GenerateDef.zig
pub fn with(comptime Properties: type) type {
return struct {
tag: Tag = @enumFromInt(0),
properties: Properties = undefined,
pub const max_param_count = 1;
pub const longest_name = 0;
pub const data = [_]@This(){.{}};
pub inline fn fromName(_: []const u8) ?@This() {
return .{};
}
pub fn nameFromUniqueIndex(_: u16, _: []u8) []u8 {
return "";
}
pub fn uniqueIndex(_: []const u8) ?u16 {
return null;
}
pub const Tag = enum(u16) { _ };
pub fn nameFromTag(_: Tag) NameBuf {
return .{};
}
pub fn tagFromName(name: []const u8) ?Tag {
var res: u16 = 0;
for (name) |c| res +%= c;
return @enumFromInt(res);
}
pub const NameBuf = struct {
pub fn span(_: *const NameBuf) []const u8 {
return "";
}
};
};
}

View File

@ -1,508 +0,0 @@
//! Stub implementation only used when bootstrapping stage2
//! Keep in sync with deps/aro/build/GenerateDef.zig
pub fn with(comptime Properties: type) type {
return struct {
pub const Tag = enum {
todo,
error_directive,
warning_directive,
elif_without_if,
elif_after_else,
elifdef_without_if,
elifdef_after_else,
elifndef_without_if,
elifndef_after_else,
else_without_if,
else_after_else,
endif_without_if,
unknown_pragma,
line_simple_digit,
line_invalid_filename,
unterminated_conditional_directive,
invalid_preprocessing_directive,
macro_name_missing,
extra_tokens_directive_end,
expected_value_in_expr,
closing_paren,
to_match_paren,
to_match_brace,
to_match_bracket,
header_str_closing,
header_str_match,
string_literal_in_pp_expr,
float_literal_in_pp_expr,
defined_as_macro_name,
macro_name_must_be_identifier,
whitespace_after_macro_name,
hash_hash_at_start,
hash_hash_at_end,
pasting_formed_invalid,
missing_paren_param_list,
unterminated_macro_param_list,
invalid_token_param_list,
expected_comma_param_list,
hash_not_followed_param,
expected_filename,
empty_filename,
expected_invalid,
expected_eof,
expected_token,
expected_expr,
expected_integer_constant_expr,
missing_type_specifier,
missing_type_specifier_c23,
multiple_storage_class,
static_assert_failure,
static_assert_failure_message,
expected_type,
cannot_combine_spec,
duplicate_decl_spec,
restrict_non_pointer,
expected_external_decl,
expected_ident_or_l_paren,
missing_declaration,
func_not_in_root,
illegal_initializer,
extern_initializer,
spec_from_typedef,
param_before_var_args,
void_only_param,
void_param_qualified,
void_must_be_first_param,
invalid_storage_on_param,
threadlocal_non_var,
func_spec_non_func,
illegal_storage_on_func,
illegal_storage_on_global,
expected_stmt,
func_cannot_return_func,
func_cannot_return_array,
undeclared_identifier,
not_callable,
unsupported_str_cat,
static_func_not_global,
implicit_func_decl,
unknown_builtin,
implicit_builtin,
implicit_builtin_header_note,
expected_param_decl,
invalid_old_style_params,
expected_fn_body,
invalid_void_param,
unused_value,
continue_not_in_loop,
break_not_in_loop_or_switch,
unreachable_code,
duplicate_label,
previous_label,
undeclared_label,
case_not_in_switch,
duplicate_switch_case,
multiple_default,
previous_case,
expected_arguments,
expected_arguments_old,
expected_at_least_arguments,
invalid_static_star,
static_non_param,
array_qualifiers,
star_non_param,
variable_len_array_file_scope,
useless_static,
negative_array_size,
array_incomplete_elem,
array_func_elem,
static_non_outermost_array,
qualifier_non_outermost_array,
unterminated_macro_arg_list,
unknown_warning,
overflow,
int_literal_too_big,
indirection_ptr,
addr_of_rvalue,
addr_of_bitfield,
not_assignable,
ident_or_l_brace,
empty_enum,
redefinition,
previous_definition,
expected_identifier,
expected_str_literal,
expected_str_literal_in,
parameter_missing,
empty_record,
empty_record_size,
wrong_tag,
expected_parens_around_typename,
alignof_expr,
invalid_alignof,
invalid_sizeof,
macro_redefined,
generic_qual_type,
generic_array_type,
generic_func_type,
generic_duplicate,
generic_duplicate_here,
generic_duplicate_default,
generic_no_match,
escape_sequence_overflow,
invalid_universal_character,
incomplete_universal_character,
multichar_literal_warning,
invalid_multichar_literal,
wide_multichar_literal,
char_lit_too_wide,
char_too_large,
must_use_struct,
must_use_union,
must_use_enum,
redefinition_different_sym,
redefinition_incompatible,
redefinition_of_parameter,
invalid_bin_types,
comparison_ptr_int,
comparison_distinct_ptr,
incompatible_pointers,
invalid_argument_un,
incompatible_assign,
implicit_ptr_to_int,
invalid_cast_to_float,
invalid_cast_to_pointer,
invalid_cast_type,
qual_cast,
invalid_index,
invalid_subscript,
array_after,
array_before,
statement_int,
statement_scalar,
func_should_return,
incompatible_return,
incompatible_return_sign,
implicit_int_to_ptr,
func_does_not_return,
void_func_returns_value,
incompatible_arg,
incompatible_ptr_arg,
incompatible_ptr_arg_sign,
parameter_here,
atomic_array,
atomic_func,
atomic_incomplete,
addr_of_register,
variable_incomplete_ty,
parameter_incomplete_ty,
tentative_array,
deref_incomplete_ty_ptr,
alignas_on_func,
alignas_on_param,
minimum_alignment,
maximum_alignment,
negative_alignment,
align_ignored,
zero_align_ignored,
non_pow2_align,
pointer_mismatch,
static_assert_not_constant,
static_assert_missing_message,
pre_c23_compat,
unbound_vla,
array_too_large,
incompatible_ptr_init,
incompatible_ptr_init_sign,
incompatible_ptr_assign,
incompatible_ptr_assign_sign,
vla_init,
func_init,
incompatible_init,
empty_scalar_init,
excess_scalar_init,
excess_str_init,
excess_struct_init,
excess_array_init,
str_init_too_long,
arr_init_too_long,
invalid_typeof,
division_by_zero,
division_by_zero_macro,
builtin_choose_cond,
alignas_unavailable,
case_val_unavailable,
enum_val_unavailable,
incompatible_array_init,
array_init_str,
initializer_overrides,
previous_initializer,
invalid_array_designator,
negative_array_designator,
oob_array_designator,
invalid_field_designator,
no_such_field_designator,
empty_aggregate_init_braces,
ptr_init_discards_quals,
ptr_assign_discards_quals,
ptr_ret_discards_quals,
ptr_arg_discards_quals,
unknown_attribute,
ignored_attribute,
invalid_fallthrough,
cannot_apply_attribute_to_statement,
builtin_macro_redefined,
feature_check_requires_identifier,
missing_tok_builtin,
gnu_label_as_value,
expected_record_ty,
member_expr_not_ptr,
member_expr_ptr,
no_such_member,
malformed_warning_check,
invalid_computed_goto,
pragma_warning_message,
pragma_error_message,
pragma_message,
pragma_requires_string_literal,
poisoned_identifier,
pragma_poison_identifier,
pragma_poison_macro,
newline_eof,
empty_translation_unit,
omitting_parameter_name,
non_int_bitfield,
negative_bitwidth,
zero_width_named_field,
bitfield_too_big,
invalid_utf8,
implicitly_unsigned_literal,
invalid_preproc_operator,
invalid_preproc_expr_start,
c99_compat,
unexpected_character,
invalid_identifier_start_char,
unicode_zero_width,
unicode_homoglyph,
meaningless_asm_qual,
duplicate_asm_qual,
invalid_asm_str,
dollar_in_identifier_extension,
dollars_in_identifiers,
expanded_from_here,
skipping_macro_backtrace,
pragma_operator_string_literal,
unknown_gcc_pragma,
unknown_gcc_pragma_directive,
predefined_top_level,
incompatible_va_arg,
too_many_scalar_init_braces,
uninitialized_in_own_init,
gnu_statement_expression,
stmt_expr_not_allowed_file_scope,
gnu_imaginary_constant,
plain_complex,
complex_int,
qual_on_ret_type,
cli_invalid_standard,
cli_invalid_target,
cli_invalid_emulate,
cli_unknown_arg,
cli_error,
cli_unused_link_object,
cli_unknown_linker,
extra_semi,
func_field,
vla_field,
field_incomplete_ty,
flexible_in_union,
flexible_non_final,
flexible_in_empty,
duplicate_member,
binary_integer_literal,
gnu_va_macro,
builtin_must_be_called,
va_start_not_in_func,
va_start_fixed_args,
va_start_not_last_param,
attribute_not_enough_args,
attribute_too_many_args,
attribute_arg_invalid,
unknown_attr_enum,
attribute_requires_identifier,
declspec_not_enabled,
declspec_attr_not_supported,
deprecated_declarations,
deprecated_note,
unavailable,
unavailable_note,
warning_attribute,
error_attribute,
ignored_record_attr,
backslash_newline_escape,
array_size_non_int,
cast_to_smaller_int,
gnu_switch_range,
empty_case_range,
non_standard_escape_char,
invalid_pp_stringify_escape,
vla,
float_overflow_conversion,
float_out_of_range,
float_zero_conversion,
float_value_changed,
float_to_int,
const_decl_folded,
const_decl_folded_vla,
redefinition_of_typedef,
undefined_macro,
fn_macro_undefined,
preprocessing_directive_only,
missing_lparen_after_builtin,
offsetof_ty,
offsetof_incomplete,
offsetof_array,
pragma_pack_lparen,
pragma_pack_rparen,
pragma_pack_unknown_action,
pragma_pack_show,
pragma_pack_int,
pragma_pack_int_ident,
pragma_pack_undefined_pop,
pragma_pack_empty_stack,
cond_expr_type,
too_many_includes,
enumerator_too_small,
enumerator_too_large,
include_next,
include_next_outside_header,
enumerator_overflow,
enum_not_representable,
enum_too_large,
enum_fixed,
enum_prev_nonfixed,
enum_prev_fixed,
enum_different_explicit_ty,
enum_not_representable_fixed,
transparent_union_wrong_type,
transparent_union_one_field,
transparent_union_size,
transparent_union_size_note,
designated_init_invalid,
designated_init_needed,
ignore_common,
ignore_nocommon,
non_string_ignored,
local_variable_attribute,
ignore_cold,
ignore_hot,
ignore_noinline,
ignore_always_inline,
invalid_noreturn,
nodiscard_unused,
warn_unused_result,
invalid_vec_elem_ty,
vec_size_not_multiple,
invalid_imag,
invalid_real,
zero_length_array,
old_style_flexible_struct,
comma_deletion_va_args,
main_return_type,
expansion_to_defined,
invalid_int_suffix,
invalid_float_suffix,
invalid_octal_digit,
invalid_binary_digit,
exponent_has_no_digits,
hex_floating_constant_requires_exponent,
sizeof_returns_zero,
declspec_not_allowed_after_declarator,
declarator_name_tok,
type_not_supported_on_target,
bit_int,
unsigned_bit_int_too_small,
signed_bit_int_too_small,
bit_int_too_big,
keyword_macro,
ptr_arithmetic_incomplete,
callconv_not_supported,
pointer_arith_void,
sizeof_array_arg,
array_address_to_bool,
string_literal_to_bool,
constant_expression_conversion_not_allowed,
invalid_object_cast,
cli_invalid_fp_eval_method,
suggest_pointer_for_invalid_fp16,
bitint_suffix,
auto_type_extension,
auto_type_not_allowed,
auto_type_requires_initializer,
auto_type_requires_single_declarator,
auto_type_requires_plain_declarator,
invalid_cast_to_auto_type,
auto_type_from_bitfield,
array_of_auto_type,
auto_type_with_init_list,
missing_semicolon,
tentative_definition_incomplete,
forward_declaration_here,
gnu_union_cast,
invalid_union_cast,
cast_to_incomplete_type,
invalid_source_epoch,
fuse_ld_path,
invalid_rtlib,
unsupported_rtlib_gcc,
invalid_unwindlib,
incompatible_unwindlib,
gnu_asm_disabled,
extension_token_used,
complex_component_init,
complex_prefix_postfix_op,
not_floating_type,
argument_types_differ,
ms_search_rule,
ctrl_z_eof,
illegal_char_encoding_warning,
illegal_char_encoding_error,
ucn_basic_char_error,
ucn_basic_char_warning,
ucn_control_char_error,
ucn_control_char_warning,
c89_ucn_in_literal,
four_char_char_literal,
multi_char_char_literal,
missing_hex_escape,
unknown_escape_sequence,
attribute_requires_string,
unterminated_string_literal_warning,
unterminated_string_literal_error,
empty_char_literal_warning,
empty_char_literal_error,
unterminated_char_literal_warning,
unterminated_char_literal_error,
unterminated_comment,
def_no_proto_deprecated,
passing_args_to_kr,
unknown_type_name,
label_compound_end,
u8_char_lit,
malformed_embed_param,
malformed_embed_limit,
duplicate_embed_param,
unsupported_embed_param,
invalid_compound_literal_storage_class,
va_opt_lparen,
va_opt_rparen,
attribute_int_out_of_range,
identifier_not_normalized,
c23_auto_plain_declarator,
c23_auto_single_declarator,
c32_auto_requires_initializer,
c23_auto_scalar_init,
pub fn property(_: Tag) Properties {
return undefined;
}
};
};
}

View File

@ -1,10 +0,0 @@
//! Stub implementation only used when bootstrapping stage2
//! Keep in sync with deps/aro/build/GenerateDef.zig
pub fn with(comptime _: type) type {
return struct {
pub inline fn fromName(_: []const u8) ?@This() {
return null;
}
};
}

View File

@ -1 +0,0 @@
pub const version_str: []const u8 = "bootstrap-stub";

View File

@ -8,10 +8,10 @@ const CallingConvention = std.builtin.CallingConvention;
const clang = @import("clang.zig"); const clang = @import("clang.zig");
const aro = @import("aro"); const aro = @import("aro");
const CToken = aro.Tokenizer.Token; const CToken = aro.Tokenizer.Token;
const ast = @import("translate_c/ast.zig");
const Node = ast.Node; const Node = ast.Node;
const Tag = Node.Tag; const Tag = Node.Tag;
const common = @import("translate_c/common.zig"); const common = @import("aro_translate_c");
const ast = common.ast;
const Error = common.Error; const Error = common.Error;
const MacroProcessingError = common.MacroProcessingError; const MacroProcessingError = common.MacroProcessingError;
const TypeError = common.TypeError; const TypeError = common.TypeError;
@ -20,10 +20,8 @@ const SymbolTable = common.SymbolTable;
const AliasList = common.AliasList; const AliasList = common.AliasList;
const ResultUsed = common.ResultUsed; const ResultUsed = common.ResultUsed;
const Scope = common.ScopeExtra(Context, clang.QualType); const Scope = common.ScopeExtra(Context, clang.QualType);
const PatternList = common.PatternList;
// Maps macro parameter names to token position, for determining if different const MacroSlicer = common.MacroSlicer;
// identifiers refer to the same positional argument in different macros.
const ArgsPositionMap = std.StringArrayHashMapUnmanaged(usize);
pub const Context = struct { pub const Context = struct {
gpa: mem.Allocator, gpa: mem.Allocator,
@ -5093,265 +5091,6 @@ pub fn failDecl(c: *Context, loc: clang.SourceLocation, name: []const u8, compti
try c.global_scope.nodes.append(try Tag.warning.create(c.arena, location_comment)); try c.global_scope.nodes.append(try Tag.warning.create(c.arena, location_comment));
} }
pub const PatternList = struct {
patterns: []Pattern,
/// Templates must be function-like macros
/// first element is macro source, second element is the name of the function
/// in std.lib.zig.c_translation.Macros which implements it
const templates = [_][2][]const u8{
[2][]const u8{ "f_SUFFIX(X) (X ## f)", "F_SUFFIX" },
[2][]const u8{ "F_SUFFIX(X) (X ## F)", "F_SUFFIX" },
[2][]const u8{ "u_SUFFIX(X) (X ## u)", "U_SUFFIX" },
[2][]const u8{ "U_SUFFIX(X) (X ## U)", "U_SUFFIX" },
[2][]const u8{ "l_SUFFIX(X) (X ## l)", "L_SUFFIX" },
[2][]const u8{ "L_SUFFIX(X) (X ## L)", "L_SUFFIX" },
[2][]const u8{ "ul_SUFFIX(X) (X ## ul)", "UL_SUFFIX" },
[2][]const u8{ "uL_SUFFIX(X) (X ## uL)", "UL_SUFFIX" },
[2][]const u8{ "Ul_SUFFIX(X) (X ## Ul)", "UL_SUFFIX" },
[2][]const u8{ "UL_SUFFIX(X) (X ## UL)", "UL_SUFFIX" },
[2][]const u8{ "ll_SUFFIX(X) (X ## ll)", "LL_SUFFIX" },
[2][]const u8{ "LL_SUFFIX(X) (X ## LL)", "LL_SUFFIX" },
[2][]const u8{ "ull_SUFFIX(X) (X ## ull)", "ULL_SUFFIX" },
[2][]const u8{ "uLL_SUFFIX(X) (X ## uLL)", "ULL_SUFFIX" },
[2][]const u8{ "Ull_SUFFIX(X) (X ## Ull)", "ULL_SUFFIX" },
[2][]const u8{ "ULL_SUFFIX(X) (X ## ULL)", "ULL_SUFFIX" },
[2][]const u8{ "f_SUFFIX(X) X ## f", "F_SUFFIX" },
[2][]const u8{ "F_SUFFIX(X) X ## F", "F_SUFFIX" },
[2][]const u8{ "u_SUFFIX(X) X ## u", "U_SUFFIX" },
[2][]const u8{ "U_SUFFIX(X) X ## U", "U_SUFFIX" },
[2][]const u8{ "l_SUFFIX(X) X ## l", "L_SUFFIX" },
[2][]const u8{ "L_SUFFIX(X) X ## L", "L_SUFFIX" },
[2][]const u8{ "ul_SUFFIX(X) X ## ul", "UL_SUFFIX" },
[2][]const u8{ "uL_SUFFIX(X) X ## uL", "UL_SUFFIX" },
[2][]const u8{ "Ul_SUFFIX(X) X ## Ul", "UL_SUFFIX" },
[2][]const u8{ "UL_SUFFIX(X) X ## UL", "UL_SUFFIX" },
[2][]const u8{ "ll_SUFFIX(X) X ## ll", "LL_SUFFIX" },
[2][]const u8{ "LL_SUFFIX(X) X ## LL", "LL_SUFFIX" },
[2][]const u8{ "ull_SUFFIX(X) X ## ull", "ULL_SUFFIX" },
[2][]const u8{ "uLL_SUFFIX(X) X ## uLL", "ULL_SUFFIX" },
[2][]const u8{ "Ull_SUFFIX(X) X ## Ull", "ULL_SUFFIX" },
[2][]const u8{ "ULL_SUFFIX(X) X ## ULL", "ULL_SUFFIX" },
[2][]const u8{ "CAST_OR_CALL(X, Y) (X)(Y)", "CAST_OR_CALL" },
[2][]const u8{ "CAST_OR_CALL(X, Y) ((X)(Y))", "CAST_OR_CALL" },
[2][]const u8{
\\wl_container_of(ptr, sample, member) \
\\(__typeof__(sample))((char *)(ptr) - \
\\ offsetof(__typeof__(*sample), member))
,
"WL_CONTAINER_OF",
},
[2][]const u8{ "IGNORE_ME(X) ((void)(X))", "DISCARD" },
[2][]const u8{ "IGNORE_ME(X) (void)(X)", "DISCARD" },
[2][]const u8{ "IGNORE_ME(X) ((const void)(X))", "DISCARD" },
[2][]const u8{ "IGNORE_ME(X) (const void)(X)", "DISCARD" },
[2][]const u8{ "IGNORE_ME(X) ((volatile void)(X))", "DISCARD" },
[2][]const u8{ "IGNORE_ME(X) (volatile void)(X)", "DISCARD" },
[2][]const u8{ "IGNORE_ME(X) ((const volatile void)(X))", "DISCARD" },
[2][]const u8{ "IGNORE_ME(X) (const volatile void)(X)", "DISCARD" },
[2][]const u8{ "IGNORE_ME(X) ((volatile const void)(X))", "DISCARD" },
[2][]const u8{ "IGNORE_ME(X) (volatile const void)(X)", "DISCARD" },
};
/// Assumes that `ms` represents a tokenized function-like macro.
fn buildArgsHash(allocator: mem.Allocator, ms: MacroSlicer, hash: *ArgsPositionMap) MacroProcessingError!void {
assert(ms.tokens.len > 2);
assert(ms.tokens[0].id == .identifier or ms.tokens[0].id == .extended_identifier);
assert(ms.tokens[1].id == .l_paren);
var i: usize = 2;
while (true) : (i += 1) {
const token = ms.tokens[i];
switch (token.id) {
.r_paren => break,
.comma => continue,
.identifier, .extended_identifier => {
const identifier = ms.slice(token);
try hash.put(allocator, identifier, i);
},
else => return error.UnexpectedMacroToken,
}
}
}
const Pattern = struct {
tokens: []const CToken,
source: []const u8,
impl: []const u8,
args_hash: ArgsPositionMap,
fn init(self: *Pattern, allocator: mem.Allocator, template: [2][]const u8) Error!void {
const source = template[0];
const impl = template[1];
var tok_list = std.ArrayList(CToken).init(allocator);
defer tok_list.deinit();
try tokenizeMacro(source, &tok_list);
const tokens = try allocator.dupe(CToken, tok_list.items);
self.* = .{
.tokens = tokens,
.source = source,
.impl = impl,
.args_hash = .{},
};
const ms = MacroSlicer{ .source = source, .tokens = tokens };
buildArgsHash(allocator, ms, &self.args_hash) catch |err| switch (err) {
error.UnexpectedMacroToken => unreachable,
else => |e| return e,
};
}
fn deinit(self: *Pattern, allocator: mem.Allocator) void {
self.args_hash.deinit(allocator);
allocator.free(self.tokens);
}
/// This function assumes that `ms` has already been validated to contain a function-like
/// macro, and that the parsed template macro in `self` also contains a function-like
/// macro. Please review this logic carefully if changing that assumption. Two
/// function-like macros are considered equivalent if and only if they contain the same
/// list of tokens, modulo parameter names.
pub fn isEquivalent(self: Pattern, ms: MacroSlicer, args_hash: ArgsPositionMap) bool {
if (self.tokens.len != ms.tokens.len) return false;
if (args_hash.count() != self.args_hash.count()) return false;
var i: usize = 2;
while (self.tokens[i].id != .r_paren) : (i += 1) {}
const pattern_slicer = MacroSlicer{ .source = self.source, .tokens = self.tokens };
while (i < self.tokens.len) : (i += 1) {
const pattern_token = self.tokens[i];
const macro_token = ms.tokens[i];
if (pattern_token.id != macro_token.id) return false;
const pattern_bytes = pattern_slicer.slice(pattern_token);
const macro_bytes = ms.slice(macro_token);
switch (pattern_token.id) {
.identifier, .extended_identifier => {
const pattern_arg_index = self.args_hash.get(pattern_bytes);
const macro_arg_index = args_hash.get(macro_bytes);
if (pattern_arg_index == null and macro_arg_index == null) {
if (!mem.eql(u8, pattern_bytes, macro_bytes)) return false;
} else if (pattern_arg_index != null and macro_arg_index != null) {
if (pattern_arg_index.? != macro_arg_index.?) return false;
} else {
return false;
}
},
.string_literal, .char_literal, .pp_num => {
if (!mem.eql(u8, pattern_bytes, macro_bytes)) return false;
},
else => {
// other tags correspond to keywords and operators that do not contain a "payload"
// that can vary
},
}
}
return true;
}
};
pub fn init(allocator: mem.Allocator) Error!PatternList {
const patterns = try allocator.alloc(Pattern, templates.len);
for (templates, 0..) |template, i| {
try patterns[i].init(allocator, template);
}
return PatternList{ .patterns = patterns };
}
pub fn deinit(self: *PatternList, allocator: mem.Allocator) void {
for (self.patterns) |*pattern| pattern.deinit(allocator);
allocator.free(self.patterns);
}
pub fn match(self: PatternList, allocator: mem.Allocator, ms: MacroSlicer) Error!?Pattern {
var args_hash: ArgsPositionMap = .{};
defer args_hash.deinit(allocator);
buildArgsHash(allocator, ms, &args_hash) catch |err| switch (err) {
error.UnexpectedMacroToken => return null,
else => |e| return e,
};
for (self.patterns) |pattern| if (pattern.isEquivalent(ms, args_hash)) return pattern;
return null;
}
};
const MacroSlicer = struct {
source: []const u8,
tokens: []const CToken,
fn slice(self: MacroSlicer, token: CToken) []const u8 {
return self.source[token.start..token.end];
}
};
// Testing here instead of test/translate_c.zig allows us to also test that the
// mapped function exists in `std.zig.c_translation.Macros`
test "Macro matching" {
const helper = struct {
const MacroFunctions = std.zig.c_translation.Macros;
fn checkMacro(allocator: mem.Allocator, pattern_list: PatternList, source: []const u8, comptime expected_match: ?[]const u8) !void {
var tok_list = std.ArrayList(CToken).init(allocator);
defer tok_list.deinit();
try tokenizeMacro(source, &tok_list);
const macro_slicer = MacroSlicer{ .source = source, .tokens = tok_list.items };
const matched = try pattern_list.match(allocator, macro_slicer);
if (expected_match) |expected| {
try testing.expectEqualStrings(expected, matched.?.impl);
try testing.expect(@hasDecl(MacroFunctions, expected));
} else {
try testing.expectEqual(@as(@TypeOf(matched), null), matched);
}
}
};
const allocator = std.testing.allocator;
var pattern_list = try PatternList.init(allocator);
defer pattern_list.deinit(allocator);
try helper.checkMacro(allocator, pattern_list, "BAR(Z) (Z ## F)", "F_SUFFIX");
try helper.checkMacro(allocator, pattern_list, "BAR(Z) (Z ## U)", "U_SUFFIX");
try helper.checkMacro(allocator, pattern_list, "BAR(Z) (Z ## L)", "L_SUFFIX");
try helper.checkMacro(allocator, pattern_list, "BAR(Z) (Z ## LL)", "LL_SUFFIX");
try helper.checkMacro(allocator, pattern_list, "BAR(Z) (Z ## UL)", "UL_SUFFIX");
try helper.checkMacro(allocator, pattern_list, "BAR(Z) (Z ## ULL)", "ULL_SUFFIX");
try helper.checkMacro(allocator, pattern_list,
\\container_of(a, b, c) \
\\(__typeof__(b))((char *)(a) - \
\\ offsetof(__typeof__(*b), c))
, "WL_CONTAINER_OF");
try helper.checkMacro(allocator, pattern_list, "NO_MATCH(X, Y) (X + Y)", null);
try helper.checkMacro(allocator, pattern_list, "CAST_OR_CALL(X, Y) (X)(Y)", "CAST_OR_CALL");
try helper.checkMacro(allocator, pattern_list, "CAST_OR_CALL(X, Y) ((X)(Y))", "CAST_OR_CALL");
try helper.checkMacro(allocator, pattern_list, "IGNORE_ME(X) (void)(X)", "DISCARD");
try helper.checkMacro(allocator, pattern_list, "IGNORE_ME(X) ((void)(X))", "DISCARD");
try helper.checkMacro(allocator, pattern_list, "IGNORE_ME(X) (const void)(X)", "DISCARD");
try helper.checkMacro(allocator, pattern_list, "IGNORE_ME(X) ((const void)(X))", "DISCARD");
try helper.checkMacro(allocator, pattern_list, "IGNORE_ME(X) (volatile void)(X)", "DISCARD");
try helper.checkMacro(allocator, pattern_list, "IGNORE_ME(X) ((volatile void)(X))", "DISCARD");
try helper.checkMacro(allocator, pattern_list, "IGNORE_ME(X) (const volatile void)(X)", "DISCARD");
try helper.checkMacro(allocator, pattern_list, "IGNORE_ME(X) ((const volatile void)(X))", "DISCARD");
try helper.checkMacro(allocator, pattern_list, "IGNORE_ME(X) (volatile const void)(X)", "DISCARD");
try helper.checkMacro(allocator, pattern_list, "IGNORE_ME(X) ((volatile const void)(X))", "DISCARD");
}
const MacroCtx = struct { const MacroCtx = struct {
source: []const u8, source: []const u8,
list: []const CToken, list: []const CToken,
@ -5392,7 +5131,7 @@ const MacroCtx = struct {
} }
fn makeSlicer(self: *const MacroCtx) MacroSlicer { fn makeSlicer(self: *const MacroCtx) MacroSlicer {
return MacroSlicer{ .source = self.source, .tokens = self.list }; return .{ .source = self.source, .tokens = self.list };
} }
const MacroTranslateError = union(enum) { const MacroTranslateError = union(enum) {
@ -5432,26 +5171,6 @@ const MacroCtx = struct {
} }
}; };
fn tokenizeMacro(source: []const u8, tok_list: *std.ArrayList(CToken)) Error!void {
var tokenizer: aro.Tokenizer = .{
.buf = source,
.source = .unused,
.langopts = .{},
};
while (true) {
const tok = tokenizer.next();
switch (tok.id) {
.whitespace => continue,
.nl, .eof => {
try tok_list.append(tok);
break;
},
else => {},
}
try tok_list.append(tok);
}
}
fn getMacroText(unit: *const clang.ASTUnit, c: *const Context, macro: *const clang.MacroDefinitionRecord) ![]const u8 { fn getMacroText(unit: *const clang.ASTUnit, c: *const Context, macro: *const clang.MacroDefinitionRecord) ![]const u8 {
const begin_loc = macro.getSourceRange_getBegin(); const begin_loc = macro.getSourceRange_getBegin();
const end_loc = clang.Lexer.getLocForEndOfToken(macro.getSourceRange_getEnd(), c.source_manager, unit); const end_loc = clang.Lexer.getLocForEndOfToken(macro.getSourceRange_getEnd(), c.source_manager, unit);
@ -5491,7 +5210,7 @@ fn transPreprocessorEntities(c: *Context, unit: *clang.ASTUnit) Error!void {
const source = try getMacroText(unit, c, macro); const source = try getMacroText(unit, c, macro);
try tokenizeMacro(source, &tok_list); try common.tokenizeMacro(source, &tok_list);
var macro_ctx = MacroCtx{ var macro_ctx = MacroCtx{
.source = source, .source = source,

View File

@ -1,322 +0,0 @@
const std = @import("std");
const ast = @import("ast.zig");
const Node = ast.Node;
const Tag = Node.Tag;
const CallingConvention = std.builtin.CallingConvention;
pub const Error = std.mem.Allocator.Error;
pub const MacroProcessingError = Error || error{UnexpectedMacroToken};
pub const TypeError = Error || error{UnsupportedType};
pub const TransError = TypeError || error{UnsupportedTranslation};
pub const SymbolTable = std.StringArrayHashMap(Node);
pub const AliasList = std.ArrayList(struct {
alias: []const u8,
name: []const u8,
});
pub const ResultUsed = enum {
used,
unused,
};
pub fn ScopeExtra(comptime Context: type, comptime Type: type) type {
return struct {
id: Id,
parent: ?*Scope,
const Scope = @This();
pub const Id = enum {
block,
root,
condition,
loop,
do_loop,
};
/// Used for the scope of condition expressions, for example `if (cond)`.
/// The block is lazily initialised because it is only needed for rare
/// cases of comma operators being used.
pub const Condition = struct {
base: Scope,
block: ?Block = null,
pub fn getBlockScope(self: *Condition, c: *Context) !*Block {
if (self.block) |*b| return b;
self.block = try Block.init(c, &self.base, true);
return &self.block.?;
}
pub fn deinit(self: *Condition) void {
if (self.block) |*b| b.deinit();
}
};
/// Represents an in-progress Node.Block. This struct is stack-allocated.
/// When it is deinitialized, it produces an Node.Block which is allocated
/// into the main arena.
pub const Block = struct {
base: Scope,
statements: std.ArrayList(Node),
variables: AliasList,
mangle_count: u32 = 0,
label: ?[]const u8 = null,
/// By default all variables are discarded, since we do not know in advance if they
/// will be used. This maps the variable's name to the Discard payload, so that if
/// the variable is subsequently referenced we can indicate that the discard should
/// be skipped during the intermediate AST -> Zig AST render step.
variable_discards: std.StringArrayHashMap(*ast.Payload.Discard),
/// When the block corresponds to a function, keep track of the return type
/// so that the return expression can be cast, if necessary
return_type: ?Type = null,
/// C static local variables are wrapped in a block-local struct. The struct
/// is named after the (mangled) variable name, the Zig variable within the
/// struct itself is given this name.
pub const static_inner_name = "static";
pub fn init(c: *Context, parent: *Scope, labeled: bool) !Block {
var blk = Block{
.base = .{
.id = .block,
.parent = parent,
},
.statements = std.ArrayList(Node).init(c.gpa),
.variables = AliasList.init(c.gpa),
.variable_discards = std.StringArrayHashMap(*ast.Payload.Discard).init(c.gpa),
};
if (labeled) {
blk.label = try blk.makeMangledName(c, "blk");
}
return blk;
}
pub fn deinit(self: *Block) void {
self.statements.deinit();
self.variables.deinit();
self.variable_discards.deinit();
self.* = undefined;
}
pub fn complete(self: *Block, c: *Context) !Node {
if (self.base.parent.?.id == .do_loop) {
// We reserve 1 extra statement if the parent is a do_loop. This is in case of
// do while, we want to put `if (cond) break;` at the end.
const alloc_len = self.statements.items.len + @intFromBool(self.base.parent.?.id == .do_loop);
var stmts = try c.arena.alloc(Node, alloc_len);
stmts.len = self.statements.items.len;
@memcpy(stmts[0..self.statements.items.len], self.statements.items);
return Tag.block.create(c.arena, .{
.label = self.label,
.stmts = stmts,
});
}
if (self.statements.items.len == 0) return Tag.empty_block.init();
return Tag.block.create(c.arena, .{
.label = self.label,
.stmts = try c.arena.dupe(Node, self.statements.items),
});
}
/// Given the desired name, return a name that does not shadow anything from outer scopes.
/// Inserts the returned name into the scope.
/// The name will not be visible to callers of getAlias.
pub fn reserveMangledName(scope: *Block, c: *Context, name: []const u8) ![]const u8 {
return scope.createMangledName(c, name, true);
}
/// Same as reserveMangledName, but enables the alias immediately.
pub fn makeMangledName(scope: *Block, c: *Context, name: []const u8) ![]const u8 {
return scope.createMangledName(c, name, false);
}
pub fn createMangledName(scope: *Block, c: *Context, name: []const u8, reservation: bool) ![]const u8 {
const name_copy = try c.arena.dupe(u8, name);
var proposed_name = name_copy;
while (scope.contains(proposed_name)) {
scope.mangle_count += 1;
proposed_name = try std.fmt.allocPrint(c.arena, "{s}_{d}", .{ name, scope.mangle_count });
}
const new_mangle = try scope.variables.addOne();
if (reservation) {
new_mangle.* = .{ .name = name_copy, .alias = name_copy };
} else {
new_mangle.* = .{ .name = name_copy, .alias = proposed_name };
}
return proposed_name;
}
pub fn getAlias(scope: *Block, name: []const u8) []const u8 {
for (scope.variables.items) |p| {
if (std.mem.eql(u8, p.name, name))
return p.alias;
}
return scope.base.parent.?.getAlias(name);
}
pub fn localContains(scope: *Block, name: []const u8) bool {
for (scope.variables.items) |p| {
if (std.mem.eql(u8, p.alias, name))
return true;
}
return false;
}
pub fn contains(scope: *Block, name: []const u8) bool {
if (scope.localContains(name))
return true;
return scope.base.parent.?.contains(name);
}
pub fn discardVariable(scope: *Block, c: *Context, name: []const u8) Error!void {
const name_node = try Tag.identifier.create(c.arena, name);
const discard = try Tag.discard.create(c.arena, .{ .should_skip = false, .value = name_node });
try scope.statements.append(discard);
try scope.variable_discards.putNoClobber(name, discard.castTag(.discard).?);
}
};
pub const Root = struct {
base: Scope,
sym_table: SymbolTable,
macro_table: SymbolTable,
blank_macros: std.StringArrayHashMap(void),
context: *Context,
nodes: std.ArrayList(Node),
pub fn init(c: *Context) Root {
return .{
.base = .{
.id = .root,
.parent = null,
},
.sym_table = SymbolTable.init(c.gpa),
.macro_table = SymbolTable.init(c.gpa),
.blank_macros = std.StringArrayHashMap(void).init(c.gpa),
.context = c,
.nodes = std.ArrayList(Node).init(c.gpa),
};
}
pub fn deinit(scope: *Root) void {
scope.sym_table.deinit();
scope.macro_table.deinit();
scope.blank_macros.deinit();
scope.nodes.deinit();
}
/// Check if the global scope contains this name, without looking into the "future", e.g.
/// ignore the preprocessed decl and macro names.
pub fn containsNow(scope: *Root, name: []const u8) bool {
return scope.sym_table.contains(name) or scope.macro_table.contains(name);
}
/// Check if the global scope contains the name, includes all decls that haven't been translated yet.
pub fn contains(scope: *Root, name: []const u8) bool {
return scope.containsNow(name) or scope.context.global_names.contains(name) or scope.context.weak_global_names.contains(name);
}
};
pub fn findBlockScope(inner: *Scope, c: *Context) !*Scope.Block {
var scope = inner;
while (true) {
switch (scope.id) {
.root => unreachable,
.block => return @fieldParentPtr(Block, "base", scope),
.condition => return @fieldParentPtr(Condition, "base", scope).getBlockScope(c),
else => scope = scope.parent.?,
}
}
}
pub fn findBlockReturnType(inner: *Scope) Type {
var scope = inner;
while (true) {
switch (scope.id) {
.root => unreachable,
.block => {
const block = @fieldParentPtr(Block, "base", scope);
if (block.return_type) |ty| return ty;
scope = scope.parent.?;
},
else => scope = scope.parent.?,
}
}
}
pub fn getAlias(scope: *Scope, name: []const u8) []const u8 {
return switch (scope.id) {
.root => return name,
.block => @fieldParentPtr(Block, "base", scope).getAlias(name),
.loop, .do_loop, .condition => scope.parent.?.getAlias(name),
};
}
pub fn contains(scope: *Scope, name: []const u8) bool {
return switch (scope.id) {
.root => @fieldParentPtr(Root, "base", scope).contains(name),
.block => @fieldParentPtr(Block, "base", scope).contains(name),
.loop, .do_loop, .condition => scope.parent.?.contains(name),
};
}
pub fn getBreakableScope(inner: *Scope) *Scope {
var scope = inner;
while (true) {
switch (scope.id) {
.root => unreachable,
.loop, .do_loop => return scope,
else => scope = scope.parent.?,
}
}
}
/// Appends a node to the first block scope if inside a function, or to the root tree if not.
pub fn appendNode(inner: *Scope, node: Node) !void {
var scope = inner;
while (true) {
switch (scope.id) {
.root => {
const root = @fieldParentPtr(Root, "base", scope);
return root.nodes.append(node);
},
.block => {
const block = @fieldParentPtr(Block, "base", scope);
return block.statements.append(node);
},
else => scope = scope.parent.?,
}
}
}
pub fn skipVariableDiscard(inner: *Scope, name: []const u8) void {
if (true) {
// TODO: due to 'local variable is never mutated' errors, we can
// only skip discards if a variable is used as an lvalue, which
// we don't currently have detection for in translate-c.
// Once #17584 is completed, perhaps we can do away with this
// logic entirely, and instead rely on render to fixup code.
return;
}
var scope = inner;
while (true) {
switch (scope.id) {
.root => return,
.block => {
const block = @fieldParentPtr(Block, "base", scope);
if (block.variable_discards.get(name)) |discard| {
discard.data.should_skip = true;
return;
}
},
else => {},
}
scope = scope.parent.?;
}
}
};
}