wasm linker: implement @tagName functions when tags are autoassigned

This commit is contained in:
Andrew Kelley 2025-01-11 22:12:43 -08:00
parent d0d0847cd0
commit b5261599d7
3 changed files with 217 additions and 38 deletions

View File

@ -19,8 +19,8 @@ const Ast = std.zig.Ast;
const Zcu = @This(); const Zcu = @This();
const Compilation = @import("Compilation.zig"); const Compilation = @import("Compilation.zig");
const Cache = std.Build.Cache; const Cache = std.Build.Cache;
const Value = @import("Value.zig"); pub const Value = @import("Value.zig");
const Type = @import("Type.zig"); pub const Type = @import("Type.zig");
const Package = @import("Package.zig"); const Package = @import("Package.zig");
const link = @import("link.zig"); const link = @import("link.zig");
const Air = @import("Air.zig"); const Air = @import("Air.zig");

View File

@ -46,7 +46,6 @@ const lldMain = @import("../main.zig").lldMain;
const trace = @import("../tracy.zig").trace; const trace = @import("../tracy.zig").trace;
const wasi_libc = @import("../wasi_libc.zig"); const wasi_libc = @import("../wasi_libc.zig");
const Value = @import("../Value.zig"); const Value = @import("../Value.zig");
const ZcuType = @import("../Type.zig");
base: link.File, base: link.File,
/// Null-terminated strings, indexes have type String and string_table provides /// Null-terminated strings, indexes have type String and string_table provides
@ -190,6 +189,7 @@ navs_exe: std.AutoArrayHashMapUnmanaged(InternPool.Nav.Index, ZcuDataExe) = .emp
uavs_obj: std.AutoArrayHashMapUnmanaged(InternPool.Index, ZcuDataObj) = .empty, uavs_obj: std.AutoArrayHashMapUnmanaged(InternPool.Index, ZcuDataObj) = .empty,
/// Tracks ref count to optimize LEB encodings for UAV references. /// Tracks ref count to optimize LEB encodings for UAV references.
uavs_exe: std.AutoArrayHashMapUnmanaged(InternPool.Index, ZcuDataExe) = .empty, uavs_exe: std.AutoArrayHashMapUnmanaged(InternPool.Index, ZcuDataExe) = .empty,
/// When the key is an enum type, this represents a `@tagName` function.
zcu_funcs: std.AutoArrayHashMapUnmanaged(InternPool.Index, ZcuFunc) = .empty, zcu_funcs: std.AutoArrayHashMapUnmanaged(InternPool.Index, ZcuFunc) = .empty,
nav_exports: std.AutoArrayHashMapUnmanaged(NavExport, Zcu.Export.Index) = .empty, nav_exports: std.AutoArrayHashMapUnmanaged(NavExport, Zcu.Export.Index) = .empty,
uav_exports: std.AutoArrayHashMapUnmanaged(UavExport, Zcu.Export.Index) = .empty, uav_exports: std.AutoArrayHashMapUnmanaged(UavExport, Zcu.Export.Index) = .empty,
@ -269,6 +269,7 @@ object_indirect_function_import_set: std.AutoArrayHashMapUnmanaged(String, void)
object_indirect_function_set: std.AutoArrayHashMapUnmanaged(ObjectFunctionIndex, void) = .empty, object_indirect_function_set: std.AutoArrayHashMapUnmanaged(ObjectFunctionIndex, void) = .empty,
error_name_table_ref_count: u32 = 0, error_name_table_ref_count: u32 = 0,
tag_name_table_ref_count: u32 = 0,
/// Set to true if any `GLOBAL_INDEX` relocation is encountered with /// Set to true if any `GLOBAL_INDEX` relocation is encountered with
/// `SymbolFlags.tls` set to true. This is for objects only; final /// `SymbolFlags.tls` set to true. This is for objects only; final
@ -294,6 +295,14 @@ error_name_bytes: std.ArrayListUnmanaged(u8) = .empty,
/// is stored. No need to serialize; trivially reconstructed. /// is stored. No need to serialize; trivially reconstructed.
error_name_offs: std.ArrayListUnmanaged(u32) = .empty, error_name_offs: std.ArrayListUnmanaged(u32) = .empty,
tag_name_bytes: std.ArrayListUnmanaged(u8) = .empty,
tag_name_offs: std.ArrayListUnmanaged(u32) = .empty,
pub const TagNameOff = extern struct {
off: u32,
len: u32,
};
/// Index into `Wasm.zcu_indirect_function_set`. /// Index into `Wasm.zcu_indirect_function_set`.
pub const ZcuIndirectFunctionSetIndex = enum(u32) { pub const ZcuIndirectFunctionSetIndex = enum(u32) {
_, _,
@ -857,8 +866,16 @@ const ZcuDataStarts = struct {
} }
}; };
pub const ZcuFunc = extern struct { pub const ZcuFunc = union {
function: CodeGen.Function, function: CodeGen.Function,
tag_name: TagName,
pub const TagName = extern struct {
symbol_name: String,
type_index: FunctionType.Index,
/// Index into `Wasm.tag_name_offs`.
table_index: u32,
};
/// Index into `Wasm.zcu_funcs`. /// Index into `Wasm.zcu_funcs`.
/// Note that swapRemove is sometimes performed on `zcu_funcs`. /// Note that swapRemove is sometimes performed on `zcu_funcs`.
@ -876,20 +893,35 @@ pub const ZcuFunc = extern struct {
pub fn name(i: @This(), wasm: *const Wasm) [:0]const u8 { pub fn name(i: @This(), wasm: *const Wasm) [:0]const u8 {
const zcu = wasm.base.comp.zcu.?; const zcu = wasm.base.comp.zcu.?;
const ip = &zcu.intern_pool; const ip = &zcu.intern_pool;
const func = ip.toFunc(i.key(wasm).*); const ip_index = i.key(wasm).*;
const nav = ip.getNav(func.owner_nav); switch (ip.indexToKey(ip_index)) {
return nav.fqn.toSlice(ip); .func => |func| {
const nav = ip.getNav(func.owner_nav);
return nav.fqn.toSlice(ip);
},
.enum_type => {
return i.value(wasm).tag_name.symbol_name.slice(wasm);
},
else => unreachable,
}
} }
pub fn typeIndex(i: @This(), wasm: *Wasm) ?FunctionType.Index { pub fn typeIndex(i: @This(), wasm: *Wasm) FunctionType.Index {
const comp = wasm.base.comp; const comp = wasm.base.comp;
const zcu = comp.zcu.?; const zcu = comp.zcu.?;
const target = &comp.root_mod.resolved_target.result; const target = &comp.root_mod.resolved_target.result;
const ip = &zcu.intern_pool; const ip = &zcu.intern_pool;
const func = ip.toFunc(i.key(wasm).*); switch (ip.indexToKey(i.key(wasm).*)) {
const fn_ty = zcu.navValue(func.owner_nav).typeOf(zcu); .func => |func| {
const fn_info = zcu.typeToFunc(fn_ty).?; const fn_ty = zcu.navValue(func.owner_nav).typeOf(zcu);
return wasm.getExistingFunctionType(fn_info.cc, fn_info.param_types.get(ip), .fromInterned(fn_info.return_type), target); const fn_info = zcu.typeToFunc(fn_ty).?;
return wasm.getExistingFunctionType(fn_info.cc, fn_info.param_types.get(ip), .fromInterned(fn_info.return_type), target).?;
},
.enum_type => {
return i.value(wasm).tag_name.type_index;
},
else => unreachable,
}
} }
}; };
}; };
@ -988,8 +1020,12 @@ pub const FunctionImport = extern struct {
return fromIpIndex(wasm, ip.getNav(nav_index).status.fully_resolved.val); return fromIpIndex(wasm, ip.getNav(nav_index).status.fully_resolved.val);
} }
pub fn fromZcuFunc(wasm: *const Wasm, i: ZcuFunc.Index) Resolution {
return pack(wasm, .{ .zcu_func = i });
}
pub fn fromIpIndex(wasm: *const Wasm, ip_index: InternPool.Index) Resolution { pub fn fromIpIndex(wasm: *const Wasm, ip_index: InternPool.Index) Resolution {
return pack(wasm, .{ .zcu_func = @enumFromInt(wasm.zcu_funcs.getIndex(ip_index).?) }); return fromZcuFunc(wasm, @enumFromInt(wasm.zcu_funcs.getIndex(ip_index).?));
} }
pub fn fromObjectFunction(wasm: *const Wasm, object_function: ObjectFunctionIndex) Resolution { pub fn fromObjectFunction(wasm: *const Wasm, object_function: ObjectFunctionIndex) Resolution {
@ -1012,7 +1048,7 @@ pub const FunctionImport = extern struct {
=> getExistingFuncType2(wasm, &.{}, &.{}), => getExistingFuncType2(wasm, &.{}, &.{}),
.__wasm_init_tls => getExistingFuncType2(wasm, &.{.i32}, &.{}), .__wasm_init_tls => getExistingFuncType2(wasm, &.{.i32}, &.{}),
.object_function => |i| i.ptr(wasm).type_index, .object_function => |i| i.ptr(wasm).type_index,
.zcu_func => |i| i.typeIndex(wasm).?, .zcu_func => |i| i.typeIndex(wasm),
}; };
} }
@ -1717,6 +1753,10 @@ pub const DataPayload = extern struct {
pub const DataSegmentId = enum(u32) { pub const DataSegmentId = enum(u32) {
__zig_error_names, __zig_error_names,
__zig_error_name_table, __zig_error_name_table,
/// All name string bytes for all `@tagName` implementations, concatenated together.
__zig_tag_names,
/// All tag name slices for all `@tagName` implementations, concatenated together.
__zig_tag_name_table,
/// This and `__heap_end` are better retrieved via a global, but there is /// This and `__heap_end` are better retrieved via a global, but there is
/// some suboptimal code out there (wasi libc) that additionally needs them /// some suboptimal code out there (wasi libc) that additionally needs them
/// as data symbols. /// as data symbols.
@ -1742,6 +1782,8 @@ pub const DataSegmentId = enum(u32) {
pub const Unpacked = union(enum) { pub const Unpacked = union(enum) {
__zig_error_names, __zig_error_names,
__zig_error_name_table, __zig_error_name_table,
__zig_tag_names,
__zig_tag_name_table,
__heap_base, __heap_base,
__heap_end, __heap_end,
object: ObjectDataSegment.Index, object: ObjectDataSegment.Index,
@ -1755,6 +1797,8 @@ pub const DataSegmentId = enum(u32) {
return switch (unpacked) { return switch (unpacked) {
.__zig_error_names => .__zig_error_names, .__zig_error_names => .__zig_error_names,
.__zig_error_name_table => .__zig_error_name_table, .__zig_error_name_table => .__zig_error_name_table,
.__zig_tag_names => .__zig_tag_names,
.__zig_tag_name_table => .__zig_tag_name_table,
.__heap_base => .__heap_base, .__heap_base => .__heap_base,
.__heap_end => .__heap_end, .__heap_end => .__heap_end,
.object => |i| @enumFromInt(first_object + @intFromEnum(i)), .object => |i| @enumFromInt(first_object + @intFromEnum(i)),
@ -1768,6 +1812,8 @@ pub const DataSegmentId = enum(u32) {
return switch (id) { return switch (id) {
.__zig_error_names => .__zig_error_names, .__zig_error_names => .__zig_error_names,
.__zig_error_name_table => .__zig_error_name_table, .__zig_error_name_table => .__zig_error_name_table,
.__zig_tag_names => .__zig_tag_names,
.__zig_tag_name_table => .__zig_tag_name_table,
.__heap_base => .__heap_base, .__heap_base => .__heap_base,
.__heap_end => .__heap_end, .__heap_end => .__heap_end,
_ => { _ => {
@ -1815,7 +1861,14 @@ pub const DataSegmentId = enum(u32) {
pub fn category(id: DataSegmentId, wasm: *const Wasm) Category { pub fn category(id: DataSegmentId, wasm: *const Wasm) Category {
return switch (unpack(id, wasm)) { return switch (unpack(id, wasm)) {
.__zig_error_names, .__zig_error_name_table, .__heap_base, .__heap_end => .data, .__zig_error_names,
.__zig_error_name_table,
.__zig_tag_names,
.__zig_tag_name_table,
.__heap_base,
.__heap_end,
=> .data,
.object => |i| { .object => |i| {
const ptr = i.ptr(wasm); const ptr = i.ptr(wasm);
if (ptr.flags.tls) return .tls; if (ptr.flags.tls) return .tls;
@ -1836,7 +1889,14 @@ pub const DataSegmentId = enum(u32) {
pub fn isTls(id: DataSegmentId, wasm: *const Wasm) bool { pub fn isTls(id: DataSegmentId, wasm: *const Wasm) bool {
return switch (unpack(id, wasm)) { return switch (unpack(id, wasm)) {
.__zig_error_names, .__zig_error_name_table, .__heap_base, .__heap_end => false, .__zig_error_names,
.__zig_error_name_table,
.__zig_tag_names,
.__zig_tag_name_table,
.__heap_base,
.__heap_end,
=> false,
.object => |i| i.ptr(wasm).flags.tls, .object => |i| i.ptr(wasm).flags.tls,
.uav_exe, .uav_obj => false, .uav_exe, .uav_obj => false,
inline .nav_exe, .nav_obj => |i| { inline .nav_exe, .nav_obj => |i| {
@ -1854,7 +1914,16 @@ pub const DataSegmentId = enum(u32) {
pub fn name(id: DataSegmentId, wasm: *const Wasm) []const u8 { pub fn name(id: DataSegmentId, wasm: *const Wasm) []const u8 {
return switch (unpack(id, wasm)) { return switch (unpack(id, wasm)) {
.__zig_error_names, .__zig_error_name_table, .uav_exe, .uav_obj, .__heap_base, .__heap_end => ".data", .__zig_error_names,
.__zig_error_name_table,
.__zig_tag_names,
.__zig_tag_name_table,
.uav_exe,
.uav_obj,
.__heap_base,
.__heap_end,
=> ".data",
.object => |i| i.ptr(wasm).name.unwrap().?.slice(wasm), .object => |i| i.ptr(wasm).name.unwrap().?.slice(wasm),
inline .nav_exe, .nav_obj => |i| { inline .nav_exe, .nav_obj => |i| {
const zcu = wasm.base.comp.zcu.?; const zcu = wasm.base.comp.zcu.?;
@ -1867,14 +1936,14 @@ pub const DataSegmentId = enum(u32) {
pub fn alignment(id: DataSegmentId, wasm: *const Wasm) Alignment { pub fn alignment(id: DataSegmentId, wasm: *const Wasm) Alignment {
return switch (unpack(id, wasm)) { return switch (unpack(id, wasm)) {
.__zig_error_names => .@"1", .__zig_error_names, .__zig_tag_names => .@"1",
.__zig_error_name_table, .__heap_base, .__heap_end => wasm.pointerAlignment(), .__zig_error_name_table, .__zig_tag_name_table, .__heap_base, .__heap_end => wasm.pointerAlignment(),
.object => |i| i.ptr(wasm).flags.alignment, .object => |i| i.ptr(wasm).flags.alignment,
inline .uav_exe, .uav_obj => |i| { inline .uav_exe, .uav_obj => |i| {
const zcu = wasm.base.comp.zcu.?; const zcu = wasm.base.comp.zcu.?;
const ip = &zcu.intern_pool; const ip = &zcu.intern_pool;
const ip_index = i.key(wasm).*; const ip_index = i.key(wasm).*;
const ty: ZcuType = .fromInterned(ip.typeOf(ip_index)); const ty: Zcu.Type = .fromInterned(ip.typeOf(ip_index));
const result = ty.abiAlignment(zcu); const result = ty.abiAlignment(zcu);
assert(result != .none); assert(result != .none);
return result; return result;
@ -1885,7 +1954,7 @@ pub const DataSegmentId = enum(u32) {
const nav = ip.getNav(i.key(wasm).*); const nav = ip.getNav(i.key(wasm).*);
const explicit = nav.getAlignment(); const explicit = nav.getAlignment();
if (explicit != .none) return explicit; if (explicit != .none) return explicit;
const ty: ZcuType = .fromInterned(nav.typeOf(ip)); const ty: Zcu.Type = .fromInterned(nav.typeOf(ip));
const result = ty.abiAlignment(zcu); const result = ty.abiAlignment(zcu);
assert(result != .none); assert(result != .none);
return result; return result;
@ -1897,6 +1966,8 @@ pub const DataSegmentId = enum(u32) {
return switch (unpack(id, wasm)) { return switch (unpack(id, wasm)) {
.__zig_error_names => @intCast(wasm.error_name_offs.items.len), .__zig_error_names => @intCast(wasm.error_name_offs.items.len),
.__zig_error_name_table => wasm.error_name_table_ref_count, .__zig_error_name_table => wasm.error_name_table_ref_count,
.__zig_tag_names => @intCast(wasm.tag_name_offs.items.len),
.__zig_tag_name_table => wasm.tag_name_table_ref_count,
.object, .uav_obj, .nav_obj, .__heap_base, .__heap_end => 0, .object, .uav_obj, .nav_obj, .__heap_base, .__heap_end => 0,
inline .uav_exe, .nav_exe => |i| i.value(wasm).count, inline .uav_exe, .nav_exe => |i| i.value(wasm).count,
}; };
@ -1906,7 +1977,14 @@ pub const DataSegmentId = enum(u32) {
const comp = wasm.base.comp; const comp = wasm.base.comp;
if (comp.config.import_memory and !id.isBss(wasm)) return true; if (comp.config.import_memory and !id.isBss(wasm)) return true;
return switch (unpack(id, wasm)) { return switch (unpack(id, wasm)) {
.__zig_error_names, .__zig_error_name_table, .__heap_base, .__heap_end => false, .__zig_error_names,
.__zig_error_name_table,
.__zig_tag_names,
.__zig_tag_name_table,
.__heap_base,
.__heap_end,
=> false,
.object => |i| i.ptr(wasm).flags.is_passive, .object => |i| i.ptr(wasm).flags.is_passive,
.uav_exe, .uav_obj, .nav_exe, .nav_obj => false, .uav_exe, .uav_obj, .nav_exe, .nav_obj => false,
}; };
@ -1914,7 +1992,14 @@ pub const DataSegmentId = enum(u32) {
pub fn isEmpty(id: DataSegmentId, wasm: *const Wasm) bool { pub fn isEmpty(id: DataSegmentId, wasm: *const Wasm) bool {
return switch (unpack(id, wasm)) { return switch (unpack(id, wasm)) {
.__zig_error_names, .__zig_error_name_table, .__heap_base, .__heap_end => false, .__zig_error_names,
.__zig_error_name_table,
.__zig_tag_names,
.__zig_tag_name_table,
.__heap_base,
.__heap_end,
=> false,
.object => |i| i.ptr(wasm).payload.off == .none, .object => |i| i.ptr(wasm).payload.off == .none,
inline .uav_exe, .uav_obj, .nav_exe, .nav_obj => |i| i.value(wasm).code.off == .none, inline .uav_exe, .uav_obj, .nav_exe, .nav_obj => |i| i.value(wasm).code.off == .none,
}; };
@ -1927,9 +2012,17 @@ pub const DataSegmentId = enum(u32) {
const comp = wasm.base.comp; const comp = wasm.base.comp;
const zcu = comp.zcu.?; const zcu = comp.zcu.?;
const errors_len = wasm.error_name_offs.items.len; const errors_len = wasm.error_name_offs.items.len;
const elem_size = ZcuType.slice_const_u8_sentinel_0.abiSize(zcu); const elem_size = Zcu.Type.slice_const_u8_sentinel_0.abiSize(zcu);
return @intCast(errors_len * elem_size); return @intCast(errors_len * elem_size);
}, },
.__zig_tag_names => @intCast(wasm.tag_name_bytes.items.len),
.__zig_tag_name_table => {
const comp = wasm.base.comp;
const zcu = comp.zcu.?;
const table_len = wasm.tag_name_offs.items.len;
const elem_size = Zcu.Type.slice_const_u8_sentinel_0.abiSize(zcu);
return @intCast(table_len * elem_size);
},
.__heap_base, .__heap_end => wasm.pointerSize(), .__heap_base, .__heap_end => wasm.pointerSize(),
.object => |i| i.ptr(wasm).payload.len, .object => |i| i.ptr(wasm).payload.len,
inline .uav_exe, .uav_obj, .nav_exe, .nav_obj => |i| i.value(wasm).code.len, inline .uav_exe, .uav_obj, .nav_exe, .nav_obj => |i| i.value(wasm).code.len,
@ -3052,6 +3145,8 @@ pub fn deinit(wasm: *Wasm) void {
wasm.error_name_bytes.deinit(gpa); wasm.error_name_bytes.deinit(gpa);
wasm.error_name_offs.deinit(gpa); wasm.error_name_offs.deinit(gpa);
wasm.tag_name_bytes.deinit(gpa);
wasm.tag_name_offs.deinit(gpa);
wasm.missing_exports.deinit(gpa); wasm.missing_exports.deinit(gpa);
} }
@ -4197,7 +4292,7 @@ pub fn internFunctionType(
wasm: *Wasm, wasm: *Wasm,
cc: std.builtin.CallingConvention, cc: std.builtin.CallingConvention,
params: []const InternPool.Index, params: []const InternPool.Index,
return_type: ZcuType, return_type: Zcu.Type,
target: *const std.Target, target: *const std.Target,
) Allocator.Error!FunctionType.Index { ) Allocator.Error!FunctionType.Index {
try convertZcuFnType(wasm.base.comp, cc, params, return_type, target, &wasm.params_scratch, &wasm.returns_scratch); try convertZcuFnType(wasm.base.comp, cc, params, return_type, target, &wasm.params_scratch, &wasm.returns_scratch);
@ -4211,7 +4306,7 @@ pub fn getExistingFunctionType(
wasm: *Wasm, wasm: *Wasm,
cc: std.builtin.CallingConvention, cc: std.builtin.CallingConvention,
params: []const InternPool.Index, params: []const InternPool.Index,
return_type: ZcuType, return_type: Zcu.Type,
target: *const std.Target, target: *const std.Target,
) ?FunctionType.Index { ) ?FunctionType.Index {
convertZcuFnType(wasm.base.comp, cc, params, return_type, target, &wasm.params_scratch, &wasm.returns_scratch) catch |err| switch (err) { convertZcuFnType(wasm.base.comp, cc, params, return_type, target, &wasm.params_scratch, &wasm.returns_scratch) catch |err| switch (err) {
@ -4395,7 +4490,7 @@ fn convertZcuFnType(
comp: *Compilation, comp: *Compilation,
cc: std.builtin.CallingConvention, cc: std.builtin.CallingConvention,
params: []const InternPool.Index, params: []const InternPool.Index,
return_type: ZcuType, return_type: Zcu.Type,
target: *const std.Target, target: *const std.Target,
params_buffer: *std.ArrayListUnmanaged(std.wasm.Valtype), params_buffer: *std.ArrayListUnmanaged(std.wasm.Valtype),
returns_buffer: *std.ArrayListUnmanaged(std.wasm.Valtype), returns_buffer: *std.ArrayListUnmanaged(std.wasm.Valtype),
@ -4423,7 +4518,7 @@ fn convertZcuFnType(
// param types // param types
for (params) |param_type_ip| { for (params) |param_type_ip| {
const param_type = ZcuType.fromInterned(param_type_ip); const param_type = Zcu.Type.fromInterned(param_type_ip);
if (!param_type.hasRuntimeBitsIgnoreComptime(zcu)) continue; if (!param_type.hasRuntimeBitsIgnoreComptime(zcu)) continue;
switch (cc) { switch (cc) {

View File

@ -109,6 +109,7 @@ pub fn finish(f: *Flush, wasm: *Wasm) !void {
const entry_name = if (wasm.entry_resolution.isNavOrUnresolved(wasm)) wasm.entry_name else .none; const entry_name = if (wasm.entry_resolution.isNavOrUnresolved(wasm)) wasm.entry_name else .none;
// Detect any intrinsics that were called; they need to have dependencies on the symbols marked. // Detect any intrinsics that were called; they need to have dependencies on the symbols marked.
// Likewise detect `@tagName` calls so those functions can be included in the output and synthesized.
for (wasm.mir_instructions.items(.tag), wasm.mir_instructions.items(.data)) |tag, *data| switch (tag) { for (wasm.mir_instructions.items(.tag), wasm.mir_instructions.items(.data)) |tag, *data| switch (tag) {
.call_intrinsic => { .call_intrinsic => {
const symbol_name = try wasm.internString(@tagName(data.intrinsic)); const symbol_name = try wasm.internString(@tagName(data.intrinsic));
@ -119,6 +120,28 @@ pub fn finish(f: *Flush, wasm: *Wasm) !void {
}); });
try wasm.markFunctionImport(symbol_name, i.value(wasm), i); try wasm.markFunctionImport(symbol_name, i.value(wasm), i);
}, },
.call_tag_name => {
const zcu = comp.zcu.?;
const ip = &zcu.intern_pool;
assert(ip.indexToKey(data.ip_index) == .enum_type);
const gop = try wasm.zcu_funcs.getOrPut(gpa, data.ip_index);
if (!gop.found_existing) {
wasm.tag_name_table_ref_count += 1;
const int_tag_ty = Zcu.Type.fromInterned(data.ip_index).intTagType(zcu);
gop.value_ptr.* = .{ .tag_name = .{
.symbol_name = try wasm.internStringFmt("__zig_tag_name_{d}", .{@intFromEnum(data.ip_index)}),
.type_index = try wasm.internFunctionType(.Unspecified, &.{int_tag_ty.ip_index}, .slice_const_u8_sentinel_0, target),
.table_index = @intCast(wasm.tag_name_offs.items.len),
} };
try wasm.functions.put(gpa, .fromZcuFunc(wasm, @enumFromInt(gop.index)), {});
const tag_names = ip.loadEnumType(data.ip_index).names;
for (tag_names.get(ip)) |tag_name| {
const slice = tag_name.toSlice(ip);
try wasm.tag_name_offs.append(gpa, @intCast(wasm.tag_name_bytes.items.len));
try wasm.tag_name_bytes.appendSlice(gpa, slice[0 .. slice.len + 1]);
}
}
},
else => continue, else => continue,
}; };
@ -222,7 +245,7 @@ pub fn finish(f: *Flush, wasm: *Wasm) !void {
// unused segments can be omitted. // unused segments can be omitted.
try f.data_segments.ensureUnusedCapacity(gpa, wasm.data_segments.entries.len + try f.data_segments.ensureUnusedCapacity(gpa, wasm.data_segments.entries.len +
wasm.uavs_obj.entries.len + wasm.navs_obj.entries.len + wasm.uavs_obj.entries.len + wasm.navs_obj.entries.len +
wasm.uavs_exe.entries.len + wasm.navs_exe.entries.len + 2); wasm.uavs_exe.entries.len + wasm.navs_exe.entries.len + 4);
if (is_obj) assert(wasm.uavs_exe.entries.len == 0); if (is_obj) assert(wasm.uavs_exe.entries.len == 0);
if (is_obj) assert(wasm.navs_exe.entries.len == 0); if (is_obj) assert(wasm.navs_exe.entries.len == 0);
if (!is_obj) assert(wasm.uavs_obj.entries.len == 0); if (!is_obj) assert(wasm.uavs_obj.entries.len == 0);
@ -243,6 +266,10 @@ pub fn finish(f: *Flush, wasm: *Wasm) !void {
f.data_segments.putAssumeCapacity(.__zig_error_names, @as(u32, undefined)); f.data_segments.putAssumeCapacity(.__zig_error_names, @as(u32, undefined));
f.data_segments.putAssumeCapacity(.__zig_error_name_table, @as(u32, undefined)); f.data_segments.putAssumeCapacity(.__zig_error_name_table, @as(u32, undefined));
} }
if (wasm.tag_name_table_ref_count > 0) {
f.data_segments.putAssumeCapacity(.__zig_tag_names, @as(u32, undefined));
f.data_segments.putAssumeCapacity(.__zig_tag_name_table, @as(u32, undefined));
}
for (wasm.data_segments.keys()) |data_id| f.data_segments.putAssumeCapacity(data_id, @as(u32, undefined)); for (wasm.data_segments.keys()) |data_id| f.data_segments.putAssumeCapacity(data_id, @as(u32, undefined));
try wasm.functions.ensureUnusedCapacity(gpa, 3); try wasm.functions.ensureUnusedCapacity(gpa, 3);
@ -751,7 +778,14 @@ pub fn finish(f: *Flush, wasm: *Wasm) !void {
log.debug("lowering function code for '{s}'", .{resolution.name(wasm).?}); log.debug("lowering function code for '{s}'", .{resolution.name(wasm).?});
try i.value(wasm).function.lower(wasm, binary_bytes); const zcu = comp.zcu.?;
const ip = &zcu.intern_pool;
switch (ip.indexToKey(i.key(wasm).*)) {
.enum_type => {
try emitTagNameFunction(gpa, binary_bytes, f.data_segments.get(.__zig_tag_name_table).?, i.value(wasm).tag_name.table_index);
},
else => try i.value(wasm).function.lower(wasm, binary_bytes),
}
}, },
}; };
@ -849,9 +883,23 @@ pub fn finish(f: *Flush, wasm: *Wasm) !void {
if (is_obj) @panic("TODO error name table reloc"); if (is_obj) @panic("TODO error name table reloc");
const base = f.data_segments.get(.__zig_error_names).?; const base = f.data_segments.get(.__zig_error_names).?;
if (!is64) { if (!is64) {
try emitErrorNameTable(gpa, binary_bytes, wasm.error_name_offs.items, wasm.error_name_bytes.items, base, u32); try emitTagNameTable(gpa, binary_bytes, wasm.error_name_offs.items, wasm.error_name_bytes.items, base, u32);
} else { } else {
try emitErrorNameTable(gpa, binary_bytes, wasm.error_name_offs.items, wasm.error_name_bytes.items, base, u64); try emitTagNameTable(gpa, binary_bytes, wasm.error_name_offs.items, wasm.error_name_bytes.items, base, u64);
}
break :append;
},
.__zig_tag_names => {
try binary_bytes.appendSlice(gpa, wasm.tag_name_bytes.items);
break :append;
},
.__zig_tag_name_table => {
if (is_obj) @panic("TODO tag name table reloc");
const base = f.data_segments.get(.__zig_tag_names).?;
if (!is64) {
try emitTagNameTable(gpa, binary_bytes, wasm.tag_name_offs.items, wasm.tag_name_bytes.items, base, u32);
} else {
try emitTagNameTable(gpa, binary_bytes, wasm.tag_name_offs.items, wasm.tag_name_bytes.items, base, u64);
} }
break :append; break :append;
}, },
@ -1497,18 +1545,18 @@ fn uleb128size(x: u32) u32 {
return size; return size;
} }
fn emitErrorNameTable( fn emitTagNameTable(
gpa: Allocator, gpa: Allocator,
code: *std.ArrayListUnmanaged(u8), code: *std.ArrayListUnmanaged(u8),
error_name_offs: []const u32, tag_name_offs: []const u32,
error_name_bytes: []const u8, tag_name_bytes: []const u8,
base: u32, base: u32,
comptime Int: type, comptime Int: type,
) error{OutOfMemory}!void { ) error{OutOfMemory}!void {
const ptr_size_bytes = @divExact(@bitSizeOf(Int), 8); const ptr_size_bytes = @divExact(@bitSizeOf(Int), 8);
try code.ensureUnusedCapacity(gpa, ptr_size_bytes * 2 * error_name_offs.len); try code.ensureUnusedCapacity(gpa, ptr_size_bytes * 2 * tag_name_offs.len);
for (error_name_offs) |off| { for (tag_name_offs) |off| {
const name_len: u32 = @intCast(mem.indexOfScalar(u8, error_name_bytes[off..], 0).?); const name_len: u32 = @intCast(mem.indexOfScalar(u8, tag_name_bytes[off..], 0).?);
mem.writeInt(Int, code.addManyAsArrayAssumeCapacity(ptr_size_bytes), base + off, .little); mem.writeInt(Int, code.addManyAsArrayAssumeCapacity(ptr_size_bytes), base + off, .little);
mem.writeInt(Int, code.addManyAsArrayAssumeCapacity(ptr_size_bytes), name_len, .little); mem.writeInt(Int, code.addManyAsArrayAssumeCapacity(ptr_size_bytes), name_len, .little);
} }
@ -1849,6 +1897,42 @@ fn emitInitMemoryFunction(
binary_bytes.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.end)); binary_bytes.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.end));
} }
fn emitTagNameFunction(
gpa: Allocator,
code: *std.ArrayListUnmanaged(u8),
table_base_addr: u32,
table_index: u32,
) Allocator.Error!void {
try code.ensureUnusedCapacity(gpa, 7 * 5 + 6 + 1 * 6);
appendReservedUleb32(code, 0); // no locals
const slice_abi_size = 8;
const encoded_alignment = @ctz(@as(u32, 4));
const all_tag_values_autoassigned = true;
if (all_tag_values_autoassigned) {
// Then it's a direct table lookup.
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.local_get));
appendReservedUleb32(code, 0);
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.local_get));
appendReservedUleb32(code, 1);
appendReservedI32Const(code, slice_abi_size);
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.i32_mul));
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.i64_load));
appendReservedUleb32(code, encoded_alignment);
appendReservedUleb32(code, table_base_addr + table_index * 8);
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.i64_store));
appendReservedUleb32(code, encoded_alignment);
appendReservedUleb32(code, 0);
}
// End of the function body
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.end));
}
/// Writes an unsigned 32-bit integer as a LEB128-encoded 'i32.const' value. /// Writes an unsigned 32-bit integer as a LEB128-encoded 'i32.const' value.
fn appendReservedI32Const(bytes: *std.ArrayListUnmanaged(u8), val: u32) void { fn appendReservedI32Const(bytes: *std.ArrayListUnmanaged(u8), val: u32) void {
bytes.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.i32_const)); bytes.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.i32_const));