mirror of
https://github.com/ziglang/zig.git
synced 2026-02-15 05:48:31 +00:00
Merge pull request #20494 from mlugg/the-great-decl-split
refactors ad infinitum
This commit is contained in:
commit
790b8428a2
@ -522,6 +522,7 @@ set(ZIG_STAGE2_SOURCES
|
||||
src/Sema.zig
|
||||
src/Sema/bitcast.zig
|
||||
src/Sema/comptime_ptr_access.zig
|
||||
src/Type.zig
|
||||
src/Value.zig
|
||||
src/Zcu.zig
|
||||
src/arch/aarch64/CodeGen.zig
|
||||
@ -673,7 +674,6 @@ set(ZIG_STAGE2_SOURCES
|
||||
src/target.zig
|
||||
src/tracy.zig
|
||||
src/translate_c.zig
|
||||
src/type.zig
|
||||
src/wasi_libc.zig
|
||||
)
|
||||
|
||||
|
||||
28
build.zig
28
build.zig
@ -82,15 +82,6 @@ pub fn build(b: *std.Build) !void {
|
||||
docs_step.dependOn(langref_step);
|
||||
docs_step.dependOn(std_docs_step);
|
||||
|
||||
const check_case_exe = b.addExecutable(.{
|
||||
.name = "check-case",
|
||||
.root_source_file = b.path("test/src/Cases.zig"),
|
||||
.target = b.graph.host,
|
||||
.optimize = optimize,
|
||||
.single_threaded = single_threaded,
|
||||
});
|
||||
check_case_exe.stack_size = stack_size;
|
||||
|
||||
const skip_debug = b.option(bool, "skip-debug", "Main test suite skips debug builds") orelse false;
|
||||
const skip_release = b.option(bool, "skip-release", "Main test suite skips release builds") orelse false;
|
||||
const skip_release_small = b.option(bool, "skip-release-small", "Main test suite skips release-small builds") orelse skip_release;
|
||||
@ -222,7 +213,6 @@ pub fn build(b: *std.Build) !void {
|
||||
if (target.result.os.tag == .windows and target.result.abi == .gnu) {
|
||||
// LTO is currently broken on mingw, this can be removed when it's fixed.
|
||||
exe.want_lto = false;
|
||||
check_case_exe.want_lto = false;
|
||||
}
|
||||
|
||||
const use_llvm = b.option(bool, "use-llvm", "Use the llvm backend");
|
||||
@ -245,7 +235,6 @@ pub fn build(b: *std.Build) !void {
|
||||
|
||||
if (link_libc) {
|
||||
exe.linkLibC();
|
||||
check_case_exe.linkLibC();
|
||||
}
|
||||
|
||||
const is_debug = optimize == .Debug;
|
||||
@ -339,21 +328,17 @@ pub fn build(b: *std.Build) !void {
|
||||
}
|
||||
|
||||
try addCmakeCfgOptionsToExe(b, cfg, exe, use_zig_libcxx);
|
||||
try addCmakeCfgOptionsToExe(b, cfg, check_case_exe, use_zig_libcxx);
|
||||
} else {
|
||||
// Here we are -Denable-llvm but no cmake integration.
|
||||
try addStaticLlvmOptionsToExe(exe);
|
||||
try addStaticLlvmOptionsToExe(check_case_exe);
|
||||
}
|
||||
if (target.result.os.tag == .windows) {
|
||||
inline for (.{ exe, check_case_exe }) |artifact| {
|
||||
// LLVM depends on networking as of version 18.
|
||||
artifact.linkSystemLibrary("ws2_32");
|
||||
// LLVM depends on networking as of version 18.
|
||||
exe.linkSystemLibrary("ws2_32");
|
||||
|
||||
artifact.linkSystemLibrary("version");
|
||||
artifact.linkSystemLibrary("uuid");
|
||||
artifact.linkSystemLibrary("ole32");
|
||||
}
|
||||
exe.linkSystemLibrary("version");
|
||||
exe.linkSystemLibrary("uuid");
|
||||
exe.linkSystemLibrary("ole32");
|
||||
}
|
||||
}
|
||||
|
||||
@ -394,7 +379,6 @@ pub fn build(b: *std.Build) !void {
|
||||
const test_filters = b.option([]const []const u8, "test-filter", "Skip tests that do not match any filter") orelse &[0][]const u8{};
|
||||
|
||||
const test_cases_options = b.addOptions();
|
||||
check_case_exe.root_module.addOptions("build_options", test_cases_options);
|
||||
|
||||
test_cases_options.addOption(bool, "enable_tracy", false);
|
||||
test_cases_options.addOption(bool, "enable_debug_extensions", enable_debug_extensions);
|
||||
@ -458,7 +442,7 @@ pub fn build(b: *std.Build) !void {
|
||||
test_step.dependOn(check_fmt);
|
||||
|
||||
const test_cases_step = b.step("test-cases", "Run the main compiler test cases");
|
||||
try tests.addCases(b, test_cases_step, test_filters, check_case_exe, target, .{
|
||||
try tests.addCases(b, test_cases_step, test_filters, target, .{
|
||||
.skip_translate_c = skip_translate_c,
|
||||
.skip_run_translated_c = skip_run_translated_c,
|
||||
}, .{
|
||||
|
||||
@ -17,12 +17,15 @@ pub const DynLib = struct {
|
||||
DlDynLib,
|
||||
.windows => WindowsDynLib,
|
||||
.macos, .tvos, .watchos, .ios, .visionos, .freebsd, .netbsd, .openbsd, .dragonfly, .solaris, .illumos => DlDynLib,
|
||||
else => @compileError("unsupported platform"),
|
||||
else => struct {
|
||||
const open = @compileError("unsupported platform");
|
||||
const openZ = @compileError("unsupported platform");
|
||||
},
|
||||
};
|
||||
|
||||
inner: InnerType,
|
||||
|
||||
pub const Error = ElfDynLib.Error || DlDynLib.Error || WindowsDynLib.Error;
|
||||
pub const Error = ElfDynLibError || DlDynLibError || WindowsDynLibError;
|
||||
|
||||
/// Trusts the file. Malicious file will be able to execute arbitrary code.
|
||||
pub fn open(path: []const u8) Error!DynLib {
|
||||
@ -122,6 +125,18 @@ pub fn linkmap_iterator(phdrs: []elf.Phdr) error{InvalidExe}!LinkMap.Iterator {
|
||||
return .{ .current = link_map_ptr };
|
||||
}
|
||||
|
||||
/// Separated to avoid referencing `ElfDynLib`, because its field types may not
|
||||
/// be valid on other targets.
|
||||
const ElfDynLibError = error{
|
||||
FileTooBig,
|
||||
NotElfFile,
|
||||
NotDynamicLibrary,
|
||||
MissingDynamicLinkingInformation,
|
||||
ElfStringSectionNotFound,
|
||||
ElfSymSectionNotFound,
|
||||
ElfHashTableNotFound,
|
||||
} || posix.OpenError || posix.MMapError;
|
||||
|
||||
pub const ElfDynLib = struct {
|
||||
strings: [*:0]u8,
|
||||
syms: [*]elf.Sym,
|
||||
@ -130,15 +145,7 @@ pub const ElfDynLib = struct {
|
||||
verdef: ?*elf.Verdef,
|
||||
memory: []align(mem.page_size) u8,
|
||||
|
||||
pub const Error = error{
|
||||
FileTooBig,
|
||||
NotElfFile,
|
||||
NotDynamicLibrary,
|
||||
MissingDynamicLinkingInformation,
|
||||
ElfStringSectionNotFound,
|
||||
ElfSymSectionNotFound,
|
||||
ElfHashTableNotFound,
|
||||
} || posix.OpenError || posix.MMapError;
|
||||
pub const Error = ElfDynLibError;
|
||||
|
||||
/// Trusts the file. Malicious file will be able to execute arbitrary code.
|
||||
pub fn open(path: []const u8) Error!ElfDynLib {
|
||||
@ -350,11 +357,15 @@ test "ElfDynLib" {
|
||||
try testing.expectError(error.FileNotFound, ElfDynLib.open("invalid_so.so"));
|
||||
}
|
||||
|
||||
/// Separated to avoid referencing `WindowsDynLib`, because its field types may not
|
||||
/// be valid on other targets.
|
||||
const WindowsDynLibError = error{
|
||||
FileNotFound,
|
||||
InvalidPath,
|
||||
} || windows.LoadLibraryError;
|
||||
|
||||
pub const WindowsDynLib = struct {
|
||||
pub const Error = error{
|
||||
FileNotFound,
|
||||
InvalidPath,
|
||||
} || windows.LoadLibraryError;
|
||||
pub const Error = WindowsDynLibError;
|
||||
|
||||
dll: windows.HMODULE,
|
||||
|
||||
@ -413,8 +424,12 @@ pub const WindowsDynLib = struct {
|
||||
}
|
||||
};
|
||||
|
||||
/// Separated to avoid referencing `DlDynLib`, because its field types may not
|
||||
/// be valid on other targets.
|
||||
const DlDynLibError = error{ FileNotFound, NameTooLong };
|
||||
|
||||
pub const DlDynLib = struct {
|
||||
pub const Error = error{ FileNotFound, NameTooLong };
|
||||
pub const Error = DlDynLibError;
|
||||
|
||||
handle: *anyopaque,
|
||||
|
||||
|
||||
@ -311,13 +311,13 @@ const builtin = @import("builtin");
|
||||
const std = @import("std.zig");
|
||||
|
||||
test {
|
||||
_ = Client;
|
||||
_ = Method;
|
||||
_ = Server;
|
||||
_ = Status;
|
||||
_ = HeadParser;
|
||||
_ = ChunkParser;
|
||||
if (builtin.os.tag != .wasi) {
|
||||
_ = Client;
|
||||
_ = Method;
|
||||
_ = Server;
|
||||
_ = Status;
|
||||
_ = HeadParser;
|
||||
_ = ChunkParser;
|
||||
_ = @import("http/test.zig");
|
||||
}
|
||||
}
|
||||
|
||||
@ -1930,8 +1930,10 @@ pub const Server = struct {
|
||||
};
|
||||
|
||||
test {
|
||||
_ = @import("net/test.zig");
|
||||
_ = Server;
|
||||
_ = Stream;
|
||||
_ = Address;
|
||||
if (builtin.os.tag != .wasi) {
|
||||
_ = Server;
|
||||
_ = Stream;
|
||||
_ = Address;
|
||||
_ = @import("net/test.zig");
|
||||
}
|
||||
}
|
||||
|
||||
@ -207,16 +207,16 @@ typedef char bool;
|
||||
__asm(zig_mangle_c(name) " = " zig_mangle_c(symbol))
|
||||
#endif
|
||||
|
||||
#define zig_mangled_tentative zig_mangled
|
||||
#define zig_mangled_final zig_mangled
|
||||
#if _MSC_VER
|
||||
#define zig_mangled_tentative(mangled, unmangled)
|
||||
#define zig_mangled_final(mangled, unmangled) ; \
|
||||
#define zig_mangled(mangled, unmangled) ; \
|
||||
zig_export(#mangled, unmangled)
|
||||
#define zig_mangled_export(mangled, unmangled, symbol) \
|
||||
zig_export(unmangled, #mangled) \
|
||||
zig_export(symbol, unmangled)
|
||||
#else /* _MSC_VER */
|
||||
#define zig_mangled_tentative(mangled, unmangled) __asm(zig_mangle_c(unmangled))
|
||||
#define zig_mangled_final(mangled, unmangled) zig_mangled_tentative(mangled, unmangled)
|
||||
#define zig_mangled(mangled, unmangled) __asm(zig_mangle_c(unmangled))
|
||||
#define zig_mangled_export(mangled, unmangled, symbol) \
|
||||
zig_mangled_final(mangled, unmangled) \
|
||||
zig_export(symbol, unmangled)
|
||||
|
||||
@ -9,7 +9,7 @@ const assert = std.debug.assert;
|
||||
|
||||
const Air = @This();
|
||||
const Value = @import("Value.zig");
|
||||
const Type = @import("type.zig").Type;
|
||||
const Type = @import("Type.zig");
|
||||
const InternPool = @import("InternPool.zig");
|
||||
const Zcu = @import("Zcu.zig");
|
||||
/// Deprecated.
|
||||
@ -1801,3 +1801,5 @@ pub fn mustLower(air: Air, inst: Air.Inst.Index, ip: *const InternPool) bool {
|
||||
.atomic_load => air.typeOf(data.atomic_load.ptr, ip).isVolatilePtrIp(ip),
|
||||
};
|
||||
}
|
||||
|
||||
pub const typesFullyResolved = @import("Air/types_resolved.zig").typesFullyResolved;
|
||||
|
||||
521
src/Air/types_resolved.zig
Normal file
521
src/Air/types_resolved.zig
Normal file
@ -0,0 +1,521 @@
|
||||
const Air = @import("../Air.zig");
|
||||
const Zcu = @import("../Zcu.zig");
|
||||
const Type = @import("../Type.zig");
|
||||
const Value = @import("../Value.zig");
|
||||
const InternPool = @import("../InternPool.zig");
|
||||
|
||||
/// Given a body of AIR instructions, returns whether all type resolution necessary for codegen is complete.
|
||||
/// If `false`, then type resolution must have failed, so codegen cannot proceed.
|
||||
pub fn typesFullyResolved(air: Air, zcu: *Zcu) bool {
|
||||
return checkBody(air, air.getMainBody(), zcu);
|
||||
}
|
||||
|
||||
fn checkBody(air: Air, body: []const Air.Inst.Index, zcu: *Zcu) bool {
|
||||
const tags = air.instructions.items(.tag);
|
||||
const datas = air.instructions.items(.data);
|
||||
|
||||
for (body) |inst| {
|
||||
const data = datas[@intFromEnum(inst)];
|
||||
switch (tags[@intFromEnum(inst)]) {
|
||||
.inferred_alloc, .inferred_alloc_comptime => unreachable,
|
||||
|
||||
.arg => {
|
||||
if (!checkType(data.arg.ty.toType(), zcu)) return false;
|
||||
},
|
||||
|
||||
.add,
|
||||
.add_safe,
|
||||
.add_optimized,
|
||||
.add_wrap,
|
||||
.add_sat,
|
||||
.sub,
|
||||
.sub_safe,
|
||||
.sub_optimized,
|
||||
.sub_wrap,
|
||||
.sub_sat,
|
||||
.mul,
|
||||
.mul_safe,
|
||||
.mul_optimized,
|
||||
.mul_wrap,
|
||||
.mul_sat,
|
||||
.div_float,
|
||||
.div_float_optimized,
|
||||
.div_trunc,
|
||||
.div_trunc_optimized,
|
||||
.div_floor,
|
||||
.div_floor_optimized,
|
||||
.div_exact,
|
||||
.div_exact_optimized,
|
||||
.rem,
|
||||
.rem_optimized,
|
||||
.mod,
|
||||
.mod_optimized,
|
||||
.max,
|
||||
.min,
|
||||
.bit_and,
|
||||
.bit_or,
|
||||
.shr,
|
||||
.shr_exact,
|
||||
.shl,
|
||||
.shl_exact,
|
||||
.shl_sat,
|
||||
.xor,
|
||||
.cmp_lt,
|
||||
.cmp_lt_optimized,
|
||||
.cmp_lte,
|
||||
.cmp_lte_optimized,
|
||||
.cmp_eq,
|
||||
.cmp_eq_optimized,
|
||||
.cmp_gte,
|
||||
.cmp_gte_optimized,
|
||||
.cmp_gt,
|
||||
.cmp_gt_optimized,
|
||||
.cmp_neq,
|
||||
.cmp_neq_optimized,
|
||||
.bool_and,
|
||||
.bool_or,
|
||||
.store,
|
||||
.store_safe,
|
||||
.set_union_tag,
|
||||
.array_elem_val,
|
||||
.slice_elem_val,
|
||||
.ptr_elem_val,
|
||||
.memset,
|
||||
.memset_safe,
|
||||
.memcpy,
|
||||
.atomic_store_unordered,
|
||||
.atomic_store_monotonic,
|
||||
.atomic_store_release,
|
||||
.atomic_store_seq_cst,
|
||||
=> {
|
||||
if (!checkRef(data.bin_op.lhs, zcu)) return false;
|
||||
if (!checkRef(data.bin_op.rhs, zcu)) return false;
|
||||
},
|
||||
|
||||
.not,
|
||||
.bitcast,
|
||||
.clz,
|
||||
.ctz,
|
||||
.popcount,
|
||||
.byte_swap,
|
||||
.bit_reverse,
|
||||
.abs,
|
||||
.load,
|
||||
.fptrunc,
|
||||
.fpext,
|
||||
.intcast,
|
||||
.trunc,
|
||||
.optional_payload,
|
||||
.optional_payload_ptr,
|
||||
.optional_payload_ptr_set,
|
||||
.wrap_optional,
|
||||
.unwrap_errunion_payload,
|
||||
.unwrap_errunion_err,
|
||||
.unwrap_errunion_payload_ptr,
|
||||
.unwrap_errunion_err_ptr,
|
||||
.errunion_payload_ptr_set,
|
||||
.wrap_errunion_payload,
|
||||
.wrap_errunion_err,
|
||||
.struct_field_ptr_index_0,
|
||||
.struct_field_ptr_index_1,
|
||||
.struct_field_ptr_index_2,
|
||||
.struct_field_ptr_index_3,
|
||||
.get_union_tag,
|
||||
.slice_len,
|
||||
.slice_ptr,
|
||||
.ptr_slice_len_ptr,
|
||||
.ptr_slice_ptr_ptr,
|
||||
.array_to_slice,
|
||||
.int_from_float,
|
||||
.int_from_float_optimized,
|
||||
.float_from_int,
|
||||
.splat,
|
||||
.error_set_has_value,
|
||||
.addrspace_cast,
|
||||
.c_va_arg,
|
||||
.c_va_copy,
|
||||
=> {
|
||||
if (!checkType(data.ty_op.ty.toType(), zcu)) return false;
|
||||
if (!checkRef(data.ty_op.operand, zcu)) return false;
|
||||
},
|
||||
|
||||
.alloc,
|
||||
.ret_ptr,
|
||||
.c_va_start,
|
||||
=> {
|
||||
if (!checkType(data.ty, zcu)) return false;
|
||||
},
|
||||
|
||||
.ptr_add,
|
||||
.ptr_sub,
|
||||
.add_with_overflow,
|
||||
.sub_with_overflow,
|
||||
.mul_with_overflow,
|
||||
.shl_with_overflow,
|
||||
.slice,
|
||||
.slice_elem_ptr,
|
||||
.ptr_elem_ptr,
|
||||
=> {
|
||||
const bin = air.extraData(Air.Bin, data.ty_pl.payload).data;
|
||||
if (!checkType(data.ty_pl.ty.toType(), zcu)) return false;
|
||||
if (!checkRef(bin.lhs, zcu)) return false;
|
||||
if (!checkRef(bin.rhs, zcu)) return false;
|
||||
},
|
||||
|
||||
.block,
|
||||
.loop,
|
||||
=> {
|
||||
const extra = air.extraData(Air.Block, data.ty_pl.payload);
|
||||
if (!checkType(data.ty_pl.ty.toType(), zcu)) return false;
|
||||
if (!checkBody(
|
||||
air,
|
||||
@ptrCast(air.extra[extra.end..][0..extra.data.body_len]),
|
||||
zcu,
|
||||
)) return false;
|
||||
},
|
||||
|
||||
.dbg_inline_block => {
|
||||
const extra = air.extraData(Air.DbgInlineBlock, data.ty_pl.payload);
|
||||
if (!checkType(data.ty_pl.ty.toType(), zcu)) return false;
|
||||
if (!checkBody(
|
||||
air,
|
||||
@ptrCast(air.extra[extra.end..][0..extra.data.body_len]),
|
||||
zcu,
|
||||
)) return false;
|
||||
},
|
||||
|
||||
.sqrt,
|
||||
.sin,
|
||||
.cos,
|
||||
.tan,
|
||||
.exp,
|
||||
.exp2,
|
||||
.log,
|
||||
.log2,
|
||||
.log10,
|
||||
.floor,
|
||||
.ceil,
|
||||
.round,
|
||||
.trunc_float,
|
||||
.neg,
|
||||
.neg_optimized,
|
||||
.is_null,
|
||||
.is_non_null,
|
||||
.is_null_ptr,
|
||||
.is_non_null_ptr,
|
||||
.is_err,
|
||||
.is_non_err,
|
||||
.is_err_ptr,
|
||||
.is_non_err_ptr,
|
||||
.int_from_ptr,
|
||||
.int_from_bool,
|
||||
.ret,
|
||||
.ret_safe,
|
||||
.ret_load,
|
||||
.is_named_enum_value,
|
||||
.tag_name,
|
||||
.error_name,
|
||||
.cmp_lt_errors_len,
|
||||
.c_va_end,
|
||||
.set_err_return_trace,
|
||||
=> {
|
||||
if (!checkRef(data.un_op, zcu)) return false;
|
||||
},
|
||||
|
||||
.br => {
|
||||
if (!checkRef(data.br.operand, zcu)) return false;
|
||||
},
|
||||
|
||||
.cmp_vector,
|
||||
.cmp_vector_optimized,
|
||||
=> {
|
||||
const extra = air.extraData(Air.VectorCmp, data.ty_pl.payload).data;
|
||||
if (!checkType(data.ty_pl.ty.toType(), zcu)) return false;
|
||||
if (!checkRef(extra.lhs, zcu)) return false;
|
||||
if (!checkRef(extra.rhs, zcu)) return false;
|
||||
},
|
||||
|
||||
.reduce,
|
||||
.reduce_optimized,
|
||||
=> {
|
||||
if (!checkRef(data.reduce.operand, zcu)) return false;
|
||||
},
|
||||
|
||||
.struct_field_ptr,
|
||||
.struct_field_val,
|
||||
=> {
|
||||
const extra = air.extraData(Air.StructField, data.ty_pl.payload).data;
|
||||
if (!checkType(data.ty_pl.ty.toType(), zcu)) return false;
|
||||
if (!checkRef(extra.struct_operand, zcu)) return false;
|
||||
},
|
||||
|
||||
.shuffle => {
|
||||
const extra = air.extraData(Air.Shuffle, data.ty_pl.payload).data;
|
||||
if (!checkType(data.ty_pl.ty.toType(), zcu)) return false;
|
||||
if (!checkRef(extra.a, zcu)) return false;
|
||||
if (!checkRef(extra.b, zcu)) return false;
|
||||
if (!checkVal(Value.fromInterned(extra.mask), zcu)) return false;
|
||||
},
|
||||
|
||||
.cmpxchg_weak,
|
||||
.cmpxchg_strong,
|
||||
=> {
|
||||
const extra = air.extraData(Air.Cmpxchg, data.ty_pl.payload).data;
|
||||
if (!checkType(data.ty_pl.ty.toType(), zcu)) return false;
|
||||
if (!checkRef(extra.ptr, zcu)) return false;
|
||||
if (!checkRef(extra.expected_value, zcu)) return false;
|
||||
if (!checkRef(extra.new_value, zcu)) return false;
|
||||
},
|
||||
|
||||
.aggregate_init => {
|
||||
const ty = data.ty_pl.ty.toType();
|
||||
const elems_len: usize = @intCast(ty.arrayLen(zcu));
|
||||
const elems: []const Air.Inst.Ref = @ptrCast(air.extra[data.ty_pl.payload..][0..elems_len]);
|
||||
if (!checkType(ty, zcu)) return false;
|
||||
if (ty.zigTypeTag(zcu) == .Struct) {
|
||||
for (elems, 0..) |elem, elem_idx| {
|
||||
if (ty.structFieldIsComptime(elem_idx, zcu)) continue;
|
||||
if (!checkRef(elem, zcu)) return false;
|
||||
}
|
||||
} else {
|
||||
for (elems) |elem| {
|
||||
if (!checkRef(elem, zcu)) return false;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
.union_init => {
|
||||
const extra = air.extraData(Air.UnionInit, data.ty_pl.payload).data;
|
||||
if (!checkType(data.ty_pl.ty.toType(), zcu)) return false;
|
||||
if (!checkRef(extra.init, zcu)) return false;
|
||||
},
|
||||
|
||||
.field_parent_ptr => {
|
||||
const extra = air.extraData(Air.FieldParentPtr, data.ty_pl.payload).data;
|
||||
if (!checkType(data.ty_pl.ty.toType(), zcu)) return false;
|
||||
if (!checkRef(extra.field_ptr, zcu)) return false;
|
||||
},
|
||||
|
||||
.atomic_load => {
|
||||
if (!checkRef(data.atomic_load.ptr, zcu)) return false;
|
||||
},
|
||||
|
||||
.prefetch => {
|
||||
if (!checkRef(data.prefetch.ptr, zcu)) return false;
|
||||
},
|
||||
|
||||
.vector_store_elem => {
|
||||
const bin = air.extraData(Air.Bin, data.vector_store_elem.payload).data;
|
||||
if (!checkRef(data.vector_store_elem.vector_ptr, zcu)) return false;
|
||||
if (!checkRef(bin.lhs, zcu)) return false;
|
||||
if (!checkRef(bin.rhs, zcu)) return false;
|
||||
},
|
||||
|
||||
.select,
|
||||
.mul_add,
|
||||
=> {
|
||||
const bin = air.extraData(Air.Bin, data.pl_op.payload).data;
|
||||
if (!checkRef(data.pl_op.operand, zcu)) return false;
|
||||
if (!checkRef(bin.lhs, zcu)) return false;
|
||||
if (!checkRef(bin.rhs, zcu)) return false;
|
||||
},
|
||||
|
||||
.atomic_rmw => {
|
||||
const extra = air.extraData(Air.AtomicRmw, data.pl_op.payload).data;
|
||||
if (!checkRef(data.pl_op.operand, zcu)) return false;
|
||||
if (!checkRef(extra.operand, zcu)) return false;
|
||||
},
|
||||
|
||||
.call,
|
||||
.call_always_tail,
|
||||
.call_never_tail,
|
||||
.call_never_inline,
|
||||
=> {
|
||||
const extra = air.extraData(Air.Call, data.pl_op.payload);
|
||||
const args: []const Air.Inst.Ref = @ptrCast(air.extra[extra.end..][0..extra.data.args_len]);
|
||||
if (!checkRef(data.pl_op.operand, zcu)) return false;
|
||||
for (args) |arg| if (!checkRef(arg, zcu)) return false;
|
||||
},
|
||||
|
||||
.dbg_var_ptr,
|
||||
.dbg_var_val,
|
||||
=> {
|
||||
if (!checkRef(data.pl_op.operand, zcu)) return false;
|
||||
},
|
||||
|
||||
.@"try" => {
|
||||
const extra = air.extraData(Air.Try, data.pl_op.payload);
|
||||
if (!checkRef(data.pl_op.operand, zcu)) return false;
|
||||
if (!checkBody(
|
||||
air,
|
||||
@ptrCast(air.extra[extra.end..][0..extra.data.body_len]),
|
||||
zcu,
|
||||
)) return false;
|
||||
},
|
||||
|
||||
.try_ptr => {
|
||||
const extra = air.extraData(Air.TryPtr, data.ty_pl.payload);
|
||||
if (!checkType(data.ty_pl.ty.toType(), zcu)) return false;
|
||||
if (!checkRef(extra.data.ptr, zcu)) return false;
|
||||
if (!checkBody(
|
||||
air,
|
||||
@ptrCast(air.extra[extra.end..][0..extra.data.body_len]),
|
||||
zcu,
|
||||
)) return false;
|
||||
},
|
||||
|
||||
.cond_br => {
|
||||
const extra = air.extraData(Air.CondBr, data.pl_op.payload);
|
||||
if (!checkRef(data.pl_op.operand, zcu)) return false;
|
||||
if (!checkBody(
|
||||
air,
|
||||
@ptrCast(air.extra[extra.end..][0..extra.data.then_body_len]),
|
||||
zcu,
|
||||
)) return false;
|
||||
if (!checkBody(
|
||||
air,
|
||||
@ptrCast(air.extra[extra.end + extra.data.then_body_len ..][0..extra.data.else_body_len]),
|
||||
zcu,
|
||||
)) return false;
|
||||
},
|
||||
|
||||
.switch_br => {
|
||||
const extra = air.extraData(Air.SwitchBr, data.pl_op.payload);
|
||||
if (!checkRef(data.pl_op.operand, zcu)) return false;
|
||||
var extra_index = extra.end;
|
||||
for (0..extra.data.cases_len) |_| {
|
||||
const case = air.extraData(Air.SwitchBr.Case, extra_index);
|
||||
extra_index = case.end;
|
||||
const items: []const Air.Inst.Ref = @ptrCast(air.extra[extra_index..][0..case.data.items_len]);
|
||||
extra_index += case.data.items_len;
|
||||
for (items) |item| if (!checkRef(item, zcu)) return false;
|
||||
if (!checkBody(
|
||||
air,
|
||||
@ptrCast(air.extra[extra_index..][0..case.data.body_len]),
|
||||
zcu,
|
||||
)) return false;
|
||||
extra_index += case.data.body_len;
|
||||
}
|
||||
if (!checkBody(
|
||||
air,
|
||||
@ptrCast(air.extra[extra_index..][0..extra.data.else_body_len]),
|
||||
zcu,
|
||||
)) return false;
|
||||
},
|
||||
|
||||
.assembly => {
|
||||
const extra = air.extraData(Air.Asm, data.ty_pl.payload);
|
||||
if (!checkType(data.ty_pl.ty.toType(), zcu)) return false;
|
||||
// Luckily, we only care about the inputs and outputs, so we don't have to do
|
||||
// the whole null-terminated string dance.
|
||||
const outputs: []const Air.Inst.Ref = @ptrCast(air.extra[extra.end..][0..extra.data.outputs_len]);
|
||||
const inputs: []const Air.Inst.Ref = @ptrCast(air.extra[extra.end + extra.data.outputs_len ..][0..extra.data.inputs_len]);
|
||||
for (outputs) |output| if (output != .none and !checkRef(output, zcu)) return false;
|
||||
for (inputs) |input| if (input != .none and !checkRef(input, zcu)) return false;
|
||||
},
|
||||
|
||||
.trap,
|
||||
.breakpoint,
|
||||
.ret_addr,
|
||||
.frame_addr,
|
||||
.unreach,
|
||||
.wasm_memory_size,
|
||||
.wasm_memory_grow,
|
||||
.work_item_id,
|
||||
.work_group_size,
|
||||
.work_group_id,
|
||||
.fence,
|
||||
.dbg_stmt,
|
||||
.err_return_trace,
|
||||
.save_err_return_trace_index,
|
||||
=> {},
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
fn checkRef(ref: Air.Inst.Ref, zcu: *Zcu) bool {
|
||||
const ip_index = ref.toInterned() orelse {
|
||||
// This operand refers back to a previous instruction.
|
||||
// We have already checked that instruction's type.
|
||||
// So, there's no need to check this operand's type.
|
||||
return true;
|
||||
};
|
||||
return checkVal(Value.fromInterned(ip_index), zcu);
|
||||
}
|
||||
|
||||
fn checkVal(val: Value, zcu: *Zcu) bool {
|
||||
if (!checkType(val.typeOf(zcu), zcu)) return false;
|
||||
// Check for lazy values
|
||||
switch (zcu.intern_pool.indexToKey(val.toIntern())) {
|
||||
.int => |int| switch (int.storage) {
|
||||
.u64, .i64, .big_int => return true,
|
||||
.lazy_align, .lazy_size => |ty_index| {
|
||||
return checkType(Type.fromInterned(ty_index), zcu);
|
||||
},
|
||||
},
|
||||
else => return true,
|
||||
}
|
||||
}
|
||||
|
||||
fn checkType(ty: Type, zcu: *Zcu) bool {
|
||||
const ip = &zcu.intern_pool;
|
||||
return switch (ty.zigTypeTag(zcu)) {
|
||||
.Type,
|
||||
.Void,
|
||||
.Bool,
|
||||
.NoReturn,
|
||||
.Int,
|
||||
.Float,
|
||||
.ErrorSet,
|
||||
.Enum,
|
||||
.Opaque,
|
||||
.Vector,
|
||||
// These types can appear due to some dummy instructions Sema introduces and expects to be omitted by Liveness.
|
||||
// It's a little silly -- but fine, we'll return `true`.
|
||||
.ComptimeFloat,
|
||||
.ComptimeInt,
|
||||
.Undefined,
|
||||
.Null,
|
||||
.EnumLiteral,
|
||||
=> true,
|
||||
|
||||
.Frame,
|
||||
.AnyFrame,
|
||||
=> @panic("TODO Air.types_resolved.checkType async frames"),
|
||||
|
||||
.Optional => checkType(ty.childType(zcu), zcu),
|
||||
.ErrorUnion => checkType(ty.errorUnionPayload(zcu), zcu),
|
||||
.Pointer => checkType(ty.childType(zcu), zcu),
|
||||
.Array => checkType(ty.childType(zcu), zcu),
|
||||
|
||||
.Fn => {
|
||||
const info = zcu.typeToFunc(ty).?;
|
||||
for (0..info.param_types.len) |i| {
|
||||
const param_ty = info.param_types.get(ip)[i];
|
||||
if (!checkType(Type.fromInterned(param_ty), zcu)) return false;
|
||||
}
|
||||
return checkType(Type.fromInterned(info.return_type), zcu);
|
||||
},
|
||||
.Struct => switch (ip.indexToKey(ty.toIntern())) {
|
||||
.struct_type => {
|
||||
const struct_obj = zcu.typeToStruct(ty).?;
|
||||
return switch (struct_obj.layout) {
|
||||
.@"packed" => struct_obj.backingIntType(ip).* != .none,
|
||||
.auto, .@"extern" => struct_obj.flagsPtr(ip).fully_resolved,
|
||||
};
|
||||
},
|
||||
.anon_struct_type => |tuple| {
|
||||
for (0..tuple.types.len) |i| {
|
||||
const field_is_comptime = tuple.values.get(ip)[i] != .none;
|
||||
if (field_is_comptime) continue;
|
||||
const field_ty = tuple.types.get(ip)[i];
|
||||
if (!checkType(Type.fromInterned(field_ty), zcu)) return false;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
else => unreachable,
|
||||
},
|
||||
.Union => return zcu.typeToUnion(ty).?.flagsPtr(ip).status == .fully_resolved,
|
||||
};
|
||||
}
|
||||
@ -12,7 +12,7 @@ const WaitGroup = std.Thread.WaitGroup;
|
||||
const ErrorBundle = std.zig.ErrorBundle;
|
||||
|
||||
const Value = @import("Value.zig");
|
||||
const Type = @import("type.zig").Type;
|
||||
const Type = @import("Type.zig");
|
||||
const target_util = @import("target.zig");
|
||||
const Package = @import("Package.zig");
|
||||
const link = @import("link.zig");
|
||||
@ -31,11 +31,13 @@ const clangMain = @import("main.zig").clangMain;
|
||||
const Zcu = @import("Zcu.zig");
|
||||
/// Deprecated; use `Zcu`.
|
||||
const Module = Zcu;
|
||||
const Sema = @import("Sema.zig");
|
||||
const InternPool = @import("InternPool.zig");
|
||||
const Cache = std.Build.Cache;
|
||||
const c_codegen = @import("codegen/c.zig");
|
||||
const libtsan = @import("libtsan.zig");
|
||||
const Zir = std.zig.Zir;
|
||||
const Air = @import("Air.zig");
|
||||
const Builtin = @import("Builtin.zig");
|
||||
const LlvmObject = @import("codegen/llvm.zig").Object;
|
||||
|
||||
@ -315,18 +317,29 @@ const Job = union(enum) {
|
||||
codegen_decl: InternPool.DeclIndex,
|
||||
/// Write the machine code for a function to the output file.
|
||||
/// This will either be a non-generic `func_decl` or a `func_instance`.
|
||||
codegen_func: InternPool.Index,
|
||||
codegen_func: struct {
|
||||
func: InternPool.Index,
|
||||
/// This `Air` is owned by the `Job` and allocated with `gpa`.
|
||||
/// It must be deinited when the job is processed.
|
||||
air: Air,
|
||||
},
|
||||
/// Render the .h file snippet for the Decl.
|
||||
emit_h_decl: InternPool.DeclIndex,
|
||||
/// The Decl needs to be analyzed and possibly export itself.
|
||||
/// It may have already be analyzed, or it may have been determined
|
||||
/// to be outdated; in this case perform semantic analysis again.
|
||||
analyze_decl: InternPool.DeclIndex,
|
||||
/// Analyze the body of a runtime function.
|
||||
/// After analysis, a `codegen_func` job will be queued.
|
||||
/// These must be separate jobs to ensure any needed type resolution occurs *before* codegen.
|
||||
analyze_func: InternPool.Index,
|
||||
/// The source file containing the Decl has been updated, and so the
|
||||
/// Decl may need its line number information updated in the debug info.
|
||||
update_line_number: InternPool.DeclIndex,
|
||||
/// The main source file for the module needs to be analyzed.
|
||||
analyze_mod: *Package.Module,
|
||||
/// Fully resolve the given `struct` or `union` type.
|
||||
resolve_type_fully: InternPool.Index,
|
||||
|
||||
/// one of the glibc static objects
|
||||
glibc_crt_file: glibc.CRTFile,
|
||||
@ -2628,22 +2641,24 @@ fn reportMultiModuleErrors(mod: *Module) !void {
|
||||
for (notes[0..num_notes], file.references.items[0..num_notes], 0..) |*note, ref, i| {
|
||||
errdefer for (notes[0..i]) |*n| n.deinit(mod.gpa);
|
||||
note.* = switch (ref) {
|
||||
.import => |loc| blk: {
|
||||
break :blk try Module.ErrorMsg.init(
|
||||
mod.gpa,
|
||||
loc,
|
||||
"imported from module {s}",
|
||||
.{loc.file_scope.mod.fully_qualified_name},
|
||||
);
|
||||
},
|
||||
.root => |pkg| blk: {
|
||||
break :blk try Module.ErrorMsg.init(
|
||||
mod.gpa,
|
||||
.{ .file_scope = file, .base_node = 0, .lazy = .entire_file },
|
||||
"root of module {s}",
|
||||
.{pkg.fully_qualified_name},
|
||||
);
|
||||
},
|
||||
.import => |import| try Module.ErrorMsg.init(
|
||||
mod.gpa,
|
||||
.{
|
||||
.base_node_inst = try mod.intern_pool.trackZir(mod.gpa, import.file, .main_struct_inst),
|
||||
.offset = .{ .token_abs = import.token },
|
||||
},
|
||||
"imported from module {s}",
|
||||
.{import.file.mod.fully_qualified_name},
|
||||
),
|
||||
.root => |pkg| try Module.ErrorMsg.init(
|
||||
mod.gpa,
|
||||
.{
|
||||
.base_node_inst = try mod.intern_pool.trackZir(mod.gpa, file, .main_struct_inst),
|
||||
.offset = .entire_file,
|
||||
},
|
||||
"root of module {s}",
|
||||
.{pkg.fully_qualified_name},
|
||||
),
|
||||
};
|
||||
}
|
||||
errdefer for (notes[0..num_notes]) |*n| n.deinit(mod.gpa);
|
||||
@ -2651,7 +2666,10 @@ fn reportMultiModuleErrors(mod: *Module) !void {
|
||||
if (omitted > 0) {
|
||||
notes[num_notes] = try Module.ErrorMsg.init(
|
||||
mod.gpa,
|
||||
.{ .file_scope = file, .base_node = 0, .lazy = .entire_file },
|
||||
.{
|
||||
.base_node_inst = try mod.intern_pool.trackZir(mod.gpa, file, .main_struct_inst),
|
||||
.offset = .entire_file,
|
||||
},
|
||||
"{} more references omitted",
|
||||
.{omitted},
|
||||
);
|
||||
@ -2660,7 +2678,10 @@ fn reportMultiModuleErrors(mod: *Module) !void {
|
||||
|
||||
const err = try Module.ErrorMsg.create(
|
||||
mod.gpa,
|
||||
.{ .file_scope = file, .base_node = 0, .lazy = .entire_file },
|
||||
.{
|
||||
.base_node_inst = try mod.intern_pool.trackZir(mod.gpa, file, .main_struct_inst),
|
||||
.offset = .entire_file,
|
||||
},
|
||||
"file exists in multiple modules",
|
||||
.{},
|
||||
);
|
||||
@ -2831,11 +2852,11 @@ pub fn totalErrorCount(comp: *Compilation) u32 {
|
||||
}
|
||||
}
|
||||
|
||||
if (comp.module) |module| {
|
||||
total += module.failed_exports.count();
|
||||
total += module.failed_embed_files.count();
|
||||
if (comp.module) |zcu| {
|
||||
total += zcu.failed_exports.count();
|
||||
total += zcu.failed_embed_files.count();
|
||||
|
||||
for (module.failed_files.keys(), module.failed_files.values()) |file, error_msg| {
|
||||
for (zcu.failed_files.keys(), zcu.failed_files.values()) |file, error_msg| {
|
||||
if (error_msg) |_| {
|
||||
total += 1;
|
||||
} else {
|
||||
@ -2851,23 +2872,27 @@ pub fn totalErrorCount(comp: *Compilation) u32 {
|
||||
// When a parse error is introduced, we keep all the semantic analysis for
|
||||
// the previous parse success, including compile errors, but we cannot
|
||||
// emit them until the file succeeds parsing.
|
||||
for (module.failed_decls.keys()) |key| {
|
||||
if (module.declFileScope(key).okToReportErrors()) {
|
||||
for (zcu.failed_analysis.keys()) |key| {
|
||||
const decl_index = switch (key.unwrap()) {
|
||||
.decl => |d| d,
|
||||
.func => |ip_index| zcu.funcInfo(ip_index).owner_decl,
|
||||
};
|
||||
if (zcu.declFileScope(decl_index).okToReportErrors()) {
|
||||
total += 1;
|
||||
if (module.cimport_errors.get(key)) |errors| {
|
||||
if (zcu.cimport_errors.get(key)) |errors| {
|
||||
total += errors.errorMessageCount();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (module.emit_h) |emit_h| {
|
||||
if (zcu.emit_h) |emit_h| {
|
||||
for (emit_h.failed_decls.keys()) |key| {
|
||||
if (module.declFileScope(key).okToReportErrors()) {
|
||||
if (zcu.declFileScope(key).okToReportErrors()) {
|
||||
total += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (module.global_error_set.entries.len - 1 > module.error_limit) {
|
||||
if (zcu.global_error_set.entries.len - 1 > zcu.error_limit) {
|
||||
total += 1;
|
||||
}
|
||||
}
|
||||
@ -2882,8 +2907,8 @@ pub fn totalErrorCount(comp: *Compilation) u32 {
|
||||
|
||||
// Compile log errors only count if there are no other errors.
|
||||
if (total == 0) {
|
||||
if (comp.module) |module| {
|
||||
total += @intFromBool(module.compile_log_decls.count() != 0);
|
||||
if (comp.module) |zcu| {
|
||||
total += @intFromBool(zcu.compile_log_sources.count() != 0);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2934,10 +2959,13 @@ pub fn getAllErrorsAlloc(comp: *Compilation) !ErrorBundle {
|
||||
.msg = try bundle.addString("memory allocation failure"),
|
||||
});
|
||||
}
|
||||
if (comp.module) |module| {
|
||||
for (module.failed_files.keys(), module.failed_files.values()) |file, error_msg| {
|
||||
if (comp.module) |zcu| {
|
||||
var all_references = try zcu.resolveReferences();
|
||||
defer all_references.deinit(gpa);
|
||||
|
||||
for (zcu.failed_files.keys(), zcu.failed_files.values()) |file, error_msg| {
|
||||
if (error_msg) |msg| {
|
||||
try addModuleErrorMsg(module, &bundle, msg.*);
|
||||
try addModuleErrorMsg(zcu, &bundle, msg.*, &all_references);
|
||||
} else {
|
||||
// Must be ZIR errors. Note that this may include AST errors.
|
||||
// addZirErrorMessages asserts that the tree is loaded.
|
||||
@ -2945,54 +2973,59 @@ pub fn getAllErrorsAlloc(comp: *Compilation) !ErrorBundle {
|
||||
try addZirErrorMessages(&bundle, file);
|
||||
}
|
||||
}
|
||||
for (module.failed_embed_files.values()) |error_msg| {
|
||||
try addModuleErrorMsg(module, &bundle, error_msg.*);
|
||||
for (zcu.failed_embed_files.values()) |error_msg| {
|
||||
try addModuleErrorMsg(zcu, &bundle, error_msg.*, &all_references);
|
||||
}
|
||||
for (module.failed_decls.keys(), module.failed_decls.values()) |decl_index, error_msg| {
|
||||
for (zcu.failed_analysis.keys(), zcu.failed_analysis.values()) |anal_unit, error_msg| {
|
||||
const decl_index = switch (anal_unit.unwrap()) {
|
||||
.decl => |d| d,
|
||||
.func => |ip_index| zcu.funcInfo(ip_index).owner_decl,
|
||||
};
|
||||
|
||||
// Skip errors for Decls within files that had a parse failure.
|
||||
// We'll try again once parsing succeeds.
|
||||
if (module.declFileScope(decl_index).okToReportErrors()) {
|
||||
try addModuleErrorMsg(module, &bundle, error_msg.*);
|
||||
if (module.cimport_errors.get(decl_index)) |errors| {
|
||||
for (errors.getMessages()) |err_msg_index| {
|
||||
const err_msg = errors.getErrorMessage(err_msg_index);
|
||||
try bundle.addRootErrorMessage(.{
|
||||
.msg = try bundle.addString(errors.nullTerminatedString(err_msg.msg)),
|
||||
.src_loc = if (err_msg.src_loc != .none) blk: {
|
||||
const src_loc = errors.getSourceLocation(err_msg.src_loc);
|
||||
break :blk try bundle.addSourceLocation(.{
|
||||
.src_path = try bundle.addString(errors.nullTerminatedString(src_loc.src_path)),
|
||||
.span_start = src_loc.span_start,
|
||||
.span_main = src_loc.span_main,
|
||||
.span_end = src_loc.span_end,
|
||||
.line = src_loc.line,
|
||||
.column = src_loc.column,
|
||||
.source_line = if (src_loc.source_line != 0) try bundle.addString(errors.nullTerminatedString(src_loc.source_line)) else 0,
|
||||
});
|
||||
} else .none,
|
||||
});
|
||||
}
|
||||
if (!zcu.declFileScope(decl_index).okToReportErrors()) continue;
|
||||
|
||||
try addModuleErrorMsg(zcu, &bundle, error_msg.*, &all_references);
|
||||
if (zcu.cimport_errors.get(anal_unit)) |errors| {
|
||||
for (errors.getMessages()) |err_msg_index| {
|
||||
const err_msg = errors.getErrorMessage(err_msg_index);
|
||||
try bundle.addRootErrorMessage(.{
|
||||
.msg = try bundle.addString(errors.nullTerminatedString(err_msg.msg)),
|
||||
.src_loc = if (err_msg.src_loc != .none) blk: {
|
||||
const src_loc = errors.getSourceLocation(err_msg.src_loc);
|
||||
break :blk try bundle.addSourceLocation(.{
|
||||
.src_path = try bundle.addString(errors.nullTerminatedString(src_loc.src_path)),
|
||||
.span_start = src_loc.span_start,
|
||||
.span_main = src_loc.span_main,
|
||||
.span_end = src_loc.span_end,
|
||||
.line = src_loc.line,
|
||||
.column = src_loc.column,
|
||||
.source_line = if (src_loc.source_line != 0) try bundle.addString(errors.nullTerminatedString(src_loc.source_line)) else 0,
|
||||
});
|
||||
} else .none,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
if (module.emit_h) |emit_h| {
|
||||
if (zcu.emit_h) |emit_h| {
|
||||
for (emit_h.failed_decls.keys(), emit_h.failed_decls.values()) |decl_index, error_msg| {
|
||||
// Skip errors for Decls within files that had a parse failure.
|
||||
// We'll try again once parsing succeeds.
|
||||
if (module.declFileScope(decl_index).okToReportErrors()) {
|
||||
try addModuleErrorMsg(module, &bundle, error_msg.*);
|
||||
if (zcu.declFileScope(decl_index).okToReportErrors()) {
|
||||
try addModuleErrorMsg(zcu, &bundle, error_msg.*, &all_references);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (module.failed_exports.values()) |value| {
|
||||
try addModuleErrorMsg(module, &bundle, value.*);
|
||||
for (zcu.failed_exports.values()) |value| {
|
||||
try addModuleErrorMsg(zcu, &bundle, value.*, &all_references);
|
||||
}
|
||||
|
||||
const actual_error_count = module.global_error_set.entries.len - 1;
|
||||
if (actual_error_count > module.error_limit) {
|
||||
const actual_error_count = zcu.global_error_set.entries.len - 1;
|
||||
if (actual_error_count > zcu.error_limit) {
|
||||
try bundle.addRootErrorMessage(.{
|
||||
.msg = try bundle.printString("module used more errors than possible: used {d}, max {d}", .{
|
||||
actual_error_count, module.error_limit,
|
||||
.msg = try bundle.printString("ZCU used more errors than possible: used {d}, max {d}", .{
|
||||
actual_error_count, zcu.error_limit,
|
||||
}),
|
||||
.notes_len = 1,
|
||||
});
|
||||
@ -3041,25 +3074,28 @@ pub fn getAllErrorsAlloc(comp: *Compilation) !ErrorBundle {
|
||||
}
|
||||
|
||||
if (comp.module) |zcu| {
|
||||
if (bundle.root_list.items.len == 0 and zcu.compile_log_decls.count() != 0) {
|
||||
const values = zcu.compile_log_decls.values();
|
||||
if (bundle.root_list.items.len == 0 and zcu.compile_log_sources.count() != 0) {
|
||||
var all_references = try zcu.resolveReferences();
|
||||
defer all_references.deinit(gpa);
|
||||
|
||||
const values = zcu.compile_log_sources.values();
|
||||
// First one will be the error; subsequent ones will be notes.
|
||||
const src_loc = values[0].src().upgrade(zcu);
|
||||
const src_loc = values[0].src();
|
||||
const err_msg: Module.ErrorMsg = .{
|
||||
.src_loc = src_loc,
|
||||
.msg = "found compile log statement",
|
||||
.notes = try gpa.alloc(Module.ErrorMsg, zcu.compile_log_decls.count() - 1),
|
||||
.notes = try gpa.alloc(Module.ErrorMsg, zcu.compile_log_sources.count() - 1),
|
||||
};
|
||||
defer gpa.free(err_msg.notes);
|
||||
|
||||
for (values[1..], err_msg.notes) |src_info, *note| {
|
||||
note.* = .{
|
||||
.src_loc = src_info.src().upgrade(zcu),
|
||||
.src_loc = src_info.src(),
|
||||
.msg = "also here",
|
||||
};
|
||||
}
|
||||
|
||||
try addModuleErrorMsg(zcu, &bundle, err_msg);
|
||||
try addModuleErrorMsg(zcu, &bundle, err_msg, &all_references);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3115,11 +3151,17 @@ pub const ErrorNoteHashContext = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub fn addModuleErrorMsg(mod: *Module, eb: *ErrorBundle.Wip, module_err_msg: Module.ErrorMsg) !void {
|
||||
pub fn addModuleErrorMsg(
|
||||
mod: *Module,
|
||||
eb: *ErrorBundle.Wip,
|
||||
module_err_msg: Module.ErrorMsg,
|
||||
all_references: *const std.AutoHashMapUnmanaged(InternPool.AnalUnit, Zcu.ResolvedReference),
|
||||
) !void {
|
||||
const gpa = eb.gpa;
|
||||
const ip = &mod.intern_pool;
|
||||
const err_source = module_err_msg.src_loc.file_scope.getSource(gpa) catch |err| {
|
||||
const file_path = try module_err_msg.src_loc.file_scope.fullPath(gpa);
|
||||
const err_src_loc = module_err_msg.src_loc.upgrade(mod);
|
||||
const err_source = err_src_loc.file_scope.getSource(gpa) catch |err| {
|
||||
const file_path = try err_src_loc.file_scope.fullPath(gpa);
|
||||
defer gpa.free(file_path);
|
||||
try eb.addRootErrorMessage(.{
|
||||
.msg = try eb.printString("unable to load '{s}': {s}", .{
|
||||
@ -3128,47 +3170,57 @@ pub fn addModuleErrorMsg(mod: *Module, eb: *ErrorBundle.Wip, module_err_msg: Mod
|
||||
});
|
||||
return;
|
||||
};
|
||||
const err_span = try module_err_msg.src_loc.span(gpa);
|
||||
const err_span = try err_src_loc.span(gpa);
|
||||
const err_loc = std.zig.findLineColumn(err_source.bytes, err_span.main);
|
||||
const file_path = try module_err_msg.src_loc.file_scope.fullPath(gpa);
|
||||
const file_path = try err_src_loc.file_scope.fullPath(gpa);
|
||||
defer gpa.free(file_path);
|
||||
|
||||
var ref_traces: std.ArrayListUnmanaged(ErrorBundle.ReferenceTrace) = .{};
|
||||
defer ref_traces.deinit(gpa);
|
||||
|
||||
const remaining_references: ?u32 = remaining: {
|
||||
if (mod.comp.reference_trace) |_| {
|
||||
if (module_err_msg.hidden_references > 0) break :remaining module_err_msg.hidden_references;
|
||||
} else {
|
||||
if (module_err_msg.reference_trace.len > 0) break :remaining 0;
|
||||
}
|
||||
break :remaining null;
|
||||
};
|
||||
try ref_traces.ensureTotalCapacityPrecise(gpa, module_err_msg.reference_trace.len +
|
||||
@intFromBool(remaining_references != null));
|
||||
if (module_err_msg.reference_trace_root.unwrap()) |rt_root| {
|
||||
var seen: std.AutoHashMapUnmanaged(InternPool.AnalUnit, void) = .{};
|
||||
defer seen.deinit(gpa);
|
||||
|
||||
for (module_err_msg.reference_trace) |module_reference| {
|
||||
const source = try module_reference.src_loc.file_scope.getSource(gpa);
|
||||
const span = try module_reference.src_loc.span(gpa);
|
||||
const loc = std.zig.findLineColumn(source.bytes, span.main);
|
||||
const rt_file_path = try module_reference.src_loc.file_scope.fullPath(gpa);
|
||||
defer gpa.free(rt_file_path);
|
||||
ref_traces.appendAssumeCapacity(.{
|
||||
.decl_name = try eb.addString(module_reference.decl.toSlice(ip)),
|
||||
.src_loc = try eb.addSourceLocation(.{
|
||||
.src_path = try eb.addString(rt_file_path),
|
||||
.span_start = span.start,
|
||||
.span_main = span.main,
|
||||
.span_end = span.end,
|
||||
.line = @intCast(loc.line),
|
||||
.column = @intCast(loc.column),
|
||||
.source_line = 0,
|
||||
}),
|
||||
});
|
||||
const max_references = mod.comp.reference_trace orelse Sema.default_reference_trace_len;
|
||||
|
||||
var referenced_by = rt_root;
|
||||
while (all_references.get(referenced_by)) |ref| {
|
||||
const gop = try seen.getOrPut(gpa, ref.referencer);
|
||||
if (gop.found_existing) break;
|
||||
if (ref_traces.items.len < max_references) {
|
||||
const src = ref.src.upgrade(mod);
|
||||
const source = try src.file_scope.getSource(gpa);
|
||||
const span = try src.span(gpa);
|
||||
const loc = std.zig.findLineColumn(source.bytes, span.main);
|
||||
const rt_file_path = try src.file_scope.fullPath(gpa);
|
||||
const name = switch (ref.referencer.unwrap()) {
|
||||
.decl => |d| mod.declPtr(d).name,
|
||||
.func => |f| mod.funcOwnerDeclPtr(f).name,
|
||||
};
|
||||
try ref_traces.append(gpa, .{
|
||||
.decl_name = try eb.addString(name.toSlice(ip)),
|
||||
.src_loc = try eb.addSourceLocation(.{
|
||||
.src_path = try eb.addString(rt_file_path),
|
||||
.span_start = span.start,
|
||||
.span_main = span.main,
|
||||
.span_end = span.end,
|
||||
.line = @intCast(loc.line),
|
||||
.column = @intCast(loc.column),
|
||||
.source_line = 0,
|
||||
}),
|
||||
});
|
||||
}
|
||||
referenced_by = ref.referencer;
|
||||
}
|
||||
|
||||
if (seen.count() > ref_traces.items.len) {
|
||||
try ref_traces.append(gpa, .{
|
||||
.decl_name = @intCast(seen.count() - ref_traces.items.len),
|
||||
.src_loc = .none,
|
||||
});
|
||||
}
|
||||
}
|
||||
if (remaining_references) |remaining| ref_traces.appendAssumeCapacity(
|
||||
.{ .decl_name = remaining, .src_loc = .none },
|
||||
);
|
||||
|
||||
const src_loc = try eb.addSourceLocation(.{
|
||||
.src_path = try eb.addString(file_path),
|
||||
@ -3177,7 +3229,7 @@ pub fn addModuleErrorMsg(mod: *Module, eb: *ErrorBundle.Wip, module_err_msg: Mod
|
||||
.span_end = err_span.end,
|
||||
.line = @intCast(err_loc.line),
|
||||
.column = @intCast(err_loc.column),
|
||||
.source_line = if (module_err_msg.src_loc.lazy == .entire_file)
|
||||
.source_line = if (err_src_loc.lazy == .entire_file)
|
||||
0
|
||||
else
|
||||
try eb.addString(err_loc.source_line),
|
||||
@ -3194,10 +3246,11 @@ pub fn addModuleErrorMsg(mod: *Module, eb: *ErrorBundle.Wip, module_err_msg: Mod
|
||||
defer notes.deinit(gpa);
|
||||
|
||||
for (module_err_msg.notes) |module_note| {
|
||||
const source = try module_note.src_loc.file_scope.getSource(gpa);
|
||||
const span = try module_note.src_loc.span(gpa);
|
||||
const note_src_loc = module_note.src_loc.upgrade(mod);
|
||||
const source = try note_src_loc.file_scope.getSource(gpa);
|
||||
const span = try note_src_loc.span(gpa);
|
||||
const loc = std.zig.findLineColumn(source.bytes, span.main);
|
||||
const note_file_path = try module_note.src_loc.file_scope.fullPath(gpa);
|
||||
const note_file_path = try note_src_loc.file_scope.fullPath(gpa);
|
||||
defer gpa.free(note_file_path);
|
||||
|
||||
const gop = try notes.getOrPutContext(gpa, .{
|
||||
@ -3348,7 +3401,7 @@ pub fn performAllTheWork(
|
||||
if (try zcu.findOutdatedToAnalyze()) |outdated| {
|
||||
switch (outdated.unwrap()) {
|
||||
.decl => |decl| try comp.work_queue.writeItem(.{ .analyze_decl = decl }),
|
||||
.func => |func| try comp.work_queue.writeItem(.{ .codegen_func = func }),
|
||||
.func => |func| try comp.work_queue.writeItem(.{ .analyze_func = func }),
|
||||
}
|
||||
continue;
|
||||
}
|
||||
@ -3398,6 +3451,14 @@ fn processOneJob(comp: *Compilation, job: Job, prog_node: std.Progress.Node) !vo
|
||||
const named_frame = tracy.namedFrame("codegen_func");
|
||||
defer named_frame.end();
|
||||
|
||||
const module = comp.module.?;
|
||||
// This call takes ownership of `func.air`.
|
||||
try module.linkerUpdateFunc(func.func, func.air);
|
||||
},
|
||||
.analyze_func => |func| {
|
||||
const named_frame = tracy.namedFrame("analyze_func");
|
||||
defer named_frame.end();
|
||||
|
||||
const module = comp.module.?;
|
||||
module.ensureFuncBodyAnalyzed(func) catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
@ -3405,6 +3466,9 @@ fn processOneJob(comp: *Compilation, job: Job, prog_node: std.Progress.Node) !vo
|
||||
};
|
||||
},
|
||||
.emit_h_decl => |decl_index| {
|
||||
if (true) @panic("regressed compiler feature: emit-h should hook into updateExports, " ++
|
||||
"not decl analysis, which is too early to know about @export calls");
|
||||
|
||||
const module = comp.module.?;
|
||||
const decl = module.declPtr(decl_index);
|
||||
|
||||
@ -3477,6 +3541,16 @@ fn processOneJob(comp: *Compilation, job: Job, prog_node: std.Progress.Node) !vo
|
||||
try module.ensureFuncBodyAnalysisQueued(decl.val.toIntern());
|
||||
}
|
||||
},
|
||||
.resolve_type_fully => |ty| {
|
||||
const named_frame = tracy.namedFrame("resolve_type_fully");
|
||||
defer named_frame.end();
|
||||
|
||||
const zcu = comp.module.?;
|
||||
Type.fromInterned(ty).resolveFully(zcu) catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
error.AnalysisFail => return,
|
||||
};
|
||||
},
|
||||
.update_line_number => |decl_index| {
|
||||
const named_frame = tracy.namedFrame("update_line_number");
|
||||
defer named_frame.end();
|
||||
@ -3486,15 +3560,18 @@ fn processOneJob(comp: *Compilation, job: Job, prog_node: std.Progress.Node) !vo
|
||||
const decl = module.declPtr(decl_index);
|
||||
const lf = comp.bin_file.?;
|
||||
lf.updateDeclLineNumber(module, decl_index) catch |err| {
|
||||
try module.failed_decls.ensureUnusedCapacity(gpa, 1);
|
||||
module.failed_decls.putAssumeCapacityNoClobber(decl_index, try Module.ErrorMsg.create(
|
||||
gpa,
|
||||
decl.navSrcLoc(module).upgrade(module),
|
||||
"unable to update line number: {s}",
|
||||
.{@errorName(err)},
|
||||
));
|
||||
try module.failed_analysis.ensureUnusedCapacity(gpa, 1);
|
||||
module.failed_analysis.putAssumeCapacityNoClobber(
|
||||
InternPool.AnalUnit.wrap(.{ .decl = decl_index }),
|
||||
try Module.ErrorMsg.create(
|
||||
gpa,
|
||||
decl.navSrcLoc(module),
|
||||
"unable to update line number: {s}",
|
||||
.{@errorName(err)},
|
||||
),
|
||||
);
|
||||
decl.analysis = .codegen_failure;
|
||||
try module.retryable_failures.append(gpa, InternPool.AnalSubject.wrap(.{ .decl = decl_index }));
|
||||
try module.retryable_failures.append(gpa, InternPool.AnalUnit.wrap(.{ .decl = decl_index }));
|
||||
};
|
||||
},
|
||||
.analyze_mod => |pkg| {
|
||||
@ -3989,9 +4066,8 @@ fn workerAstGenFile(
|
||||
const res = mod.importFile(file, import_path) catch continue;
|
||||
if (!res.is_pkg) {
|
||||
res.file.addReference(mod.*, .{ .import = .{
|
||||
.file_scope = file,
|
||||
.base_node = 0,
|
||||
.lazy = .{ .token_abs = item.data.token },
|
||||
.file = file,
|
||||
.token = item.data.token,
|
||||
} }) catch continue;
|
||||
}
|
||||
break :blk res;
|
||||
@ -4364,20 +4440,14 @@ fn reportRetryableAstGenError(
|
||||
|
||||
file.status = .retryable_failure;
|
||||
|
||||
const src_loc: Module.SrcLoc = switch (src) {
|
||||
const src_loc: Module.LazySrcLoc = switch (src) {
|
||||
.root => .{
|
||||
.file_scope = file,
|
||||
.base_node = 0,
|
||||
.lazy = .entire_file,
|
||||
.base_node_inst = try mod.intern_pool.trackZir(gpa, file, .main_struct_inst),
|
||||
.offset = .entire_file,
|
||||
},
|
||||
.import => |info| blk: {
|
||||
const importing_file = info.importing_file;
|
||||
|
||||
break :blk .{
|
||||
.file_scope = importing_file,
|
||||
.base_node = 0,
|
||||
.lazy = .{ .token_abs = info.import_tok },
|
||||
};
|
||||
.import => |info| .{
|
||||
.base_node_inst = try mod.intern_pool.trackZir(gpa, info.importing_file, .main_struct_inst),
|
||||
.offset = .{ .token_abs = info.import_tok },
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@ -81,7 +81,7 @@ namespace_name_deps: std.AutoArrayHashMapUnmanaged(NamespaceNameKey, DepEntry.In
|
||||
/// Given a `Depender`, points to an entry in `dep_entries` whose `depender`
|
||||
/// matches. The `next_dependee` field can be used to iterate all such entries
|
||||
/// and remove them from the corresponding lists.
|
||||
first_dependency: std.AutoArrayHashMapUnmanaged(AnalSubject, DepEntry.Index) = .{},
|
||||
first_dependency: std.AutoArrayHashMapUnmanaged(AnalUnit, DepEntry.Index) = .{},
|
||||
|
||||
/// Stores dependency information. The hashmaps declared above are used to look
|
||||
/// up entries in this list as required. This is not stored in `extra` so that
|
||||
@ -132,36 +132,36 @@ pub fn trackZir(ip: *InternPool, gpa: Allocator, file: *Module.File, inst: Zir.I
|
||||
return @enumFromInt(gop.index);
|
||||
}
|
||||
|
||||
/// Analysis Subject. Represents a single entity which undergoes semantic analysis.
|
||||
/// Analysis Unit. Represents a single entity which undergoes semantic analysis.
|
||||
/// This is either a `Decl` (in future `Cau`) or a runtime function.
|
||||
/// The LSB is used as a tag bit.
|
||||
/// This is the "source" of an incremental dependency edge.
|
||||
pub const AnalSubject = packed struct(u32) {
|
||||
pub const AnalUnit = packed struct(u32) {
|
||||
kind: enum(u1) { decl, func },
|
||||
index: u31,
|
||||
pub const Unwrapped = union(enum) {
|
||||
decl: DeclIndex,
|
||||
func: InternPool.Index,
|
||||
};
|
||||
pub fn unwrap(as: AnalSubject) Unwrapped {
|
||||
pub fn unwrap(as: AnalUnit) Unwrapped {
|
||||
return switch (as.kind) {
|
||||
.decl => .{ .decl = @enumFromInt(as.index) },
|
||||
.func => .{ .func = @enumFromInt(as.index) },
|
||||
};
|
||||
}
|
||||
pub fn wrap(raw: Unwrapped) AnalSubject {
|
||||
pub fn wrap(raw: Unwrapped) AnalUnit {
|
||||
return switch (raw) {
|
||||
.decl => |decl| .{ .kind = .decl, .index = @intCast(@intFromEnum(decl)) },
|
||||
.func => |func| .{ .kind = .func, .index = @intCast(@intFromEnum(func)) },
|
||||
};
|
||||
}
|
||||
pub fn toOptional(as: AnalSubject) Optional {
|
||||
pub fn toOptional(as: AnalUnit) Optional {
|
||||
return @enumFromInt(@as(u32, @bitCast(as)));
|
||||
}
|
||||
pub const Optional = enum(u32) {
|
||||
none = std.math.maxInt(u32),
|
||||
_,
|
||||
pub fn unwrap(opt: Optional) ?AnalSubject {
|
||||
pub fn unwrap(opt: Optional) ?AnalUnit {
|
||||
return switch (opt) {
|
||||
.none => null,
|
||||
_ => @bitCast(@intFromEnum(opt)),
|
||||
@ -178,7 +178,7 @@ pub const Dependee = union(enum) {
|
||||
namespace_name: NamespaceNameKey,
|
||||
};
|
||||
|
||||
pub fn removeDependenciesForDepender(ip: *InternPool, gpa: Allocator, depender: AnalSubject) void {
|
||||
pub fn removeDependenciesForDepender(ip: *InternPool, gpa: Allocator, depender: AnalUnit) void {
|
||||
var opt_idx = (ip.first_dependency.fetchSwapRemove(depender) orelse return).value.toOptional();
|
||||
|
||||
while (opt_idx.unwrap()) |idx| {
|
||||
@ -207,7 +207,7 @@ pub fn removeDependenciesForDepender(ip: *InternPool, gpa: Allocator, depender:
|
||||
pub const DependencyIterator = struct {
|
||||
ip: *const InternPool,
|
||||
next_entry: DepEntry.Index.Optional,
|
||||
pub fn next(it: *DependencyIterator) ?AnalSubject {
|
||||
pub fn next(it: *DependencyIterator) ?AnalUnit {
|
||||
const idx = it.next_entry.unwrap() orelse return null;
|
||||
const entry = it.ip.dep_entries.items[@intFromEnum(idx)];
|
||||
it.next_entry = entry.next;
|
||||
@ -236,7 +236,7 @@ pub fn dependencyIterator(ip: *const InternPool, dependee: Dependee) DependencyI
|
||||
};
|
||||
}
|
||||
|
||||
pub fn addDependency(ip: *InternPool, gpa: Allocator, depender: AnalSubject, dependee: Dependee) Allocator.Error!void {
|
||||
pub fn addDependency(ip: *InternPool, gpa: Allocator, depender: AnalUnit, dependee: Dependee) Allocator.Error!void {
|
||||
const first_depender_dep: DepEntry.Index.Optional = if (ip.first_dependency.get(depender)) |idx| dep: {
|
||||
// The entry already exists, so there is capacity to overwrite it later.
|
||||
break :dep idx.toOptional();
|
||||
@ -300,7 +300,7 @@ pub const DepEntry = extern struct {
|
||||
/// the first and only entry in one of `intern_pool.*_deps`, and does not
|
||||
/// appear in any list by `first_dependency`, but is not in
|
||||
/// `free_dep_entries` since `*_deps` stores a reference to it.
|
||||
depender: AnalSubject.Optional,
|
||||
depender: AnalUnit.Optional,
|
||||
/// Index into `dep_entries` forming a doubly linked list of all dependencies on this dependee.
|
||||
/// Used to iterate all dependers for a given dependee during an update.
|
||||
/// null if this is the end of the list.
|
||||
|
||||
@ -3,7 +3,7 @@ const assert = std.debug.assert;
|
||||
const Order = std.math.Order;
|
||||
|
||||
const InternPool = @import("InternPool.zig");
|
||||
const Type = @import("type.zig").Type;
|
||||
const Type = @import("Type.zig");
|
||||
const Value = @import("Value.zig");
|
||||
const Zcu = @import("Zcu.zig");
|
||||
/// Deprecated.
|
||||
|
||||
1775
src/Sema.zig
1775
src/Sema.zig
File diff suppressed because it is too large
Load Diff
@ -78,8 +78,8 @@ fn bitCastInner(
|
||||
|
||||
const val_ty = val.typeOf(zcu);
|
||||
|
||||
try sema.resolveTypeLayout(val_ty);
|
||||
try sema.resolveTypeLayout(dest_ty);
|
||||
try val_ty.resolveLayout(zcu);
|
||||
try dest_ty.resolveLayout(zcu);
|
||||
|
||||
assert(val_ty.hasWellDefinedLayout(zcu));
|
||||
|
||||
@ -136,8 +136,8 @@ fn bitCastSpliceInner(
|
||||
const val_ty = val.typeOf(zcu);
|
||||
const splice_val_ty = splice_val.typeOf(zcu);
|
||||
|
||||
try sema.resolveTypeLayout(val_ty);
|
||||
try sema.resolveTypeLayout(splice_val_ty);
|
||||
try val_ty.resolveLayout(zcu);
|
||||
try splice_val_ty.resolveLayout(zcu);
|
||||
|
||||
const splice_bits = splice_val_ty.bitSize(zcu);
|
||||
|
||||
@ -767,6 +767,6 @@ const assert = std.debug.assert;
|
||||
const Sema = @import("../Sema.zig");
|
||||
const Zcu = @import("../Zcu.zig");
|
||||
const InternPool = @import("../InternPool.zig");
|
||||
const Type = @import("../type.zig").Type;
|
||||
const Type = @import("../Type.zig");
|
||||
const Value = @import("../Value.zig");
|
||||
const CompileError = Zcu.CompileError;
|
||||
|
||||
@ -1054,7 +1054,7 @@ const ComptimeAllocIndex = InternPool.ComptimeAllocIndex;
|
||||
const Sema = @import("../Sema.zig");
|
||||
const Block = Sema.Block;
|
||||
const MutableValue = @import("../mutable_value.zig").MutableValue;
|
||||
const Type = @import("../type.zig").Type;
|
||||
const Type = @import("../Type.zig");
|
||||
const Value = @import("../Value.zig");
|
||||
const Zcu = @import("../Zcu.zig");
|
||||
const LazySrcLoc = Zcu.LazySrcLoc;
|
||||
|
||||
4009
src/Type.zig
Normal file
4009
src/Type.zig
Normal file
File diff suppressed because it is too large
Load Diff
308
src/Value.zig
308
src/Value.zig
@ -1,6 +1,6 @@
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const Type = @import("type.zig").Type;
|
||||
const Type = @import("Type.zig");
|
||||
const assert = std.debug.assert;
|
||||
const BigIntConst = std.math.big.int.Const;
|
||||
const BigIntMutable = std.math.big.int.Mutable;
|
||||
@ -161,9 +161,11 @@ pub fn intFromEnum(val: Value, ty: Type, mod: *Module) Allocator.Error!Value {
|
||||
};
|
||||
}
|
||||
|
||||
pub const ResolveStrat = Type.ResolveStrat;
|
||||
|
||||
/// Asserts the value is an integer.
|
||||
pub fn toBigInt(val: Value, space: *BigIntSpace, mod: *Module) BigIntConst {
|
||||
return val.toBigIntAdvanced(space, mod, null) catch unreachable;
|
||||
return val.toBigIntAdvanced(space, mod, .normal) catch unreachable;
|
||||
}
|
||||
|
||||
/// Asserts the value is an integer.
|
||||
@ -171,7 +173,7 @@ pub fn toBigIntAdvanced(
|
||||
val: Value,
|
||||
space: *BigIntSpace,
|
||||
mod: *Module,
|
||||
opt_sema: ?*Sema,
|
||||
strat: ResolveStrat,
|
||||
) Module.CompileError!BigIntConst {
|
||||
return switch (val.toIntern()) {
|
||||
.bool_false => BigIntMutable.init(&space.limbs, 0).toConst(),
|
||||
@ -181,7 +183,7 @@ pub fn toBigIntAdvanced(
|
||||
.int => |int| switch (int.storage) {
|
||||
.u64, .i64, .big_int => int.storage.toBigInt(space),
|
||||
.lazy_align, .lazy_size => |ty| {
|
||||
if (opt_sema) |sema| try sema.resolveTypeLayout(Type.fromInterned(ty));
|
||||
if (strat == .sema) try Type.fromInterned(ty).resolveLayout(mod);
|
||||
const x = switch (int.storage) {
|
||||
else => unreachable,
|
||||
.lazy_align => Type.fromInterned(ty).abiAlignment(mod).toByteUnits() orelse 0,
|
||||
@ -190,10 +192,10 @@ pub fn toBigIntAdvanced(
|
||||
return BigIntMutable.init(&space.limbs, x).toConst();
|
||||
},
|
||||
},
|
||||
.enum_tag => |enum_tag| Value.fromInterned(enum_tag.int).toBigIntAdvanced(space, mod, opt_sema),
|
||||
.enum_tag => |enum_tag| Value.fromInterned(enum_tag.int).toBigIntAdvanced(space, mod, strat),
|
||||
.opt, .ptr => BigIntMutable.init(
|
||||
&space.limbs,
|
||||
(try val.getUnsignedIntAdvanced(mod, opt_sema)).?,
|
||||
(try val.getUnsignedIntAdvanced(mod, strat)).?,
|
||||
).toConst(),
|
||||
else => unreachable,
|
||||
},
|
||||
@ -228,12 +230,12 @@ pub fn getVariable(val: Value, mod: *Module) ?InternPool.Key.Variable {
|
||||
/// If the value fits in a u64, return it, otherwise null.
|
||||
/// Asserts not undefined.
|
||||
pub fn getUnsignedInt(val: Value, mod: *Module) ?u64 {
|
||||
return getUnsignedIntAdvanced(val, mod, null) catch unreachable;
|
||||
return getUnsignedIntAdvanced(val, mod, .normal) catch unreachable;
|
||||
}
|
||||
|
||||
/// If the value fits in a u64, return it, otherwise null.
|
||||
/// Asserts not undefined.
|
||||
pub fn getUnsignedIntAdvanced(val: Value, mod: *Module, opt_sema: ?*Sema) !?u64 {
|
||||
pub fn getUnsignedIntAdvanced(val: Value, mod: *Module, strat: ResolveStrat) !?u64 {
|
||||
return switch (val.toIntern()) {
|
||||
.undef => unreachable,
|
||||
.bool_false => 0,
|
||||
@ -244,28 +246,22 @@ pub fn getUnsignedIntAdvanced(val: Value, mod: *Module, opt_sema: ?*Sema) !?u64
|
||||
.big_int => |big_int| big_int.to(u64) catch null,
|
||||
.u64 => |x| x,
|
||||
.i64 => |x| std.math.cast(u64, x),
|
||||
.lazy_align => |ty| if (opt_sema) |sema|
|
||||
(try Type.fromInterned(ty).abiAlignmentAdvanced(mod, .{ .sema = sema })).scalar.toByteUnits() orelse 0
|
||||
else
|
||||
Type.fromInterned(ty).abiAlignment(mod).toByteUnits() orelse 0,
|
||||
.lazy_size => |ty| if (opt_sema) |sema|
|
||||
(try Type.fromInterned(ty).abiSizeAdvanced(mod, .{ .sema = sema })).scalar
|
||||
else
|
||||
Type.fromInterned(ty).abiSize(mod),
|
||||
.lazy_align => |ty| (try Type.fromInterned(ty).abiAlignmentAdvanced(mod, strat.toLazy())).scalar.toByteUnits() orelse 0,
|
||||
.lazy_size => |ty| (try Type.fromInterned(ty).abiSizeAdvanced(mod, strat.toLazy())).scalar,
|
||||
},
|
||||
.ptr => |ptr| switch (ptr.base_addr) {
|
||||
.int => ptr.byte_offset,
|
||||
.field => |field| {
|
||||
const base_addr = (try Value.fromInterned(field.base).getUnsignedIntAdvanced(mod, opt_sema)) orelse return null;
|
||||
const base_addr = (try Value.fromInterned(field.base).getUnsignedIntAdvanced(mod, strat)) orelse return null;
|
||||
const struct_ty = Value.fromInterned(field.base).typeOf(mod).childType(mod);
|
||||
if (opt_sema) |sema| try sema.resolveTypeLayout(struct_ty);
|
||||
if (strat == .sema) try struct_ty.resolveLayout(mod);
|
||||
return base_addr + struct_ty.structFieldOffset(@intCast(field.index), mod) + ptr.byte_offset;
|
||||
},
|
||||
else => null,
|
||||
},
|
||||
.opt => |opt| switch (opt.val) {
|
||||
.none => 0,
|
||||
else => |payload| Value.fromInterned(payload).getUnsignedIntAdvanced(mod, opt_sema),
|
||||
else => |payload| Value.fromInterned(payload).getUnsignedIntAdvanced(mod, strat),
|
||||
},
|
||||
else => null,
|
||||
},
|
||||
@ -273,13 +269,13 @@ pub fn getUnsignedIntAdvanced(val: Value, mod: *Module, opt_sema: ?*Sema) !?u64
|
||||
}
|
||||
|
||||
/// Asserts the value is an integer and it fits in a u64
|
||||
pub fn toUnsignedInt(val: Value, mod: *Module) u64 {
|
||||
return getUnsignedInt(val, mod).?;
|
||||
pub fn toUnsignedInt(val: Value, zcu: *Zcu) u64 {
|
||||
return getUnsignedInt(val, zcu).?;
|
||||
}
|
||||
|
||||
/// Asserts the value is an integer and it fits in a u64
|
||||
pub fn toUnsignedIntAdvanced(val: Value, sema: *Sema) !u64 {
|
||||
return (try getUnsignedIntAdvanced(val, sema.mod, sema)).?;
|
||||
pub fn toUnsignedIntSema(val: Value, zcu: *Zcu) !u64 {
|
||||
return (try getUnsignedIntAdvanced(val, zcu, .sema)).?;
|
||||
}
|
||||
|
||||
/// Asserts the value is an integer and it fits in a i64
|
||||
@ -1028,13 +1024,13 @@ pub fn floatHasFraction(self: Value, mod: *const Module) bool {
|
||||
}
|
||||
|
||||
pub fn orderAgainstZero(lhs: Value, mod: *Module) std.math.Order {
|
||||
return orderAgainstZeroAdvanced(lhs, mod, null) catch unreachable;
|
||||
return orderAgainstZeroAdvanced(lhs, mod, .normal) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn orderAgainstZeroAdvanced(
|
||||
lhs: Value,
|
||||
mod: *Module,
|
||||
opt_sema: ?*Sema,
|
||||
strat: ResolveStrat,
|
||||
) Module.CompileError!std.math.Order {
|
||||
return switch (lhs.toIntern()) {
|
||||
.bool_false => .eq,
|
||||
@ -1052,13 +1048,13 @@ pub fn orderAgainstZeroAdvanced(
|
||||
.lazy_size => |ty| return if (Type.fromInterned(ty).hasRuntimeBitsAdvanced(
|
||||
mod,
|
||||
false,
|
||||
if (opt_sema) |sema| .{ .sema = sema } else .eager,
|
||||
strat.toLazy(),
|
||||
) catch |err| switch (err) {
|
||||
error.NeedLazy => unreachable,
|
||||
else => |e| return e,
|
||||
}) .gt else .eq,
|
||||
},
|
||||
.enum_tag => |enum_tag| Value.fromInterned(enum_tag.int).orderAgainstZeroAdvanced(mod, opt_sema),
|
||||
.enum_tag => |enum_tag| Value.fromInterned(enum_tag.int).orderAgainstZeroAdvanced(mod, strat),
|
||||
.float => |float| switch (float.storage) {
|
||||
inline else => |x| std.math.order(x, 0),
|
||||
},
|
||||
@ -1069,14 +1065,13 @@ pub fn orderAgainstZeroAdvanced(
|
||||
|
||||
/// Asserts the value is comparable.
|
||||
pub fn order(lhs: Value, rhs: Value, mod: *Module) std.math.Order {
|
||||
return orderAdvanced(lhs, rhs, mod, null) catch unreachable;
|
||||
return orderAdvanced(lhs, rhs, mod, .normal) catch unreachable;
|
||||
}
|
||||
|
||||
/// Asserts the value is comparable.
|
||||
/// If opt_sema is null then this function asserts things are resolved and cannot fail.
|
||||
pub fn orderAdvanced(lhs: Value, rhs: Value, mod: *Module, opt_sema: ?*Sema) !std.math.Order {
|
||||
const lhs_against_zero = try lhs.orderAgainstZeroAdvanced(mod, opt_sema);
|
||||
const rhs_against_zero = try rhs.orderAgainstZeroAdvanced(mod, opt_sema);
|
||||
pub fn orderAdvanced(lhs: Value, rhs: Value, mod: *Module, strat: ResolveStrat) !std.math.Order {
|
||||
const lhs_against_zero = try lhs.orderAgainstZeroAdvanced(mod, strat);
|
||||
const rhs_against_zero = try rhs.orderAgainstZeroAdvanced(mod, strat);
|
||||
switch (lhs_against_zero) {
|
||||
.lt => if (rhs_against_zero != .lt) return .lt,
|
||||
.eq => return rhs_against_zero.invert(),
|
||||
@ -1096,15 +1091,15 @@ pub fn orderAdvanced(lhs: Value, rhs: Value, mod: *Module, opt_sema: ?*Sema) !st
|
||||
|
||||
var lhs_bigint_space: BigIntSpace = undefined;
|
||||
var rhs_bigint_space: BigIntSpace = undefined;
|
||||
const lhs_bigint = try lhs.toBigIntAdvanced(&lhs_bigint_space, mod, opt_sema);
|
||||
const rhs_bigint = try rhs.toBigIntAdvanced(&rhs_bigint_space, mod, opt_sema);
|
||||
const lhs_bigint = try lhs.toBigIntAdvanced(&lhs_bigint_space, mod, strat);
|
||||
const rhs_bigint = try rhs.toBigIntAdvanced(&rhs_bigint_space, mod, strat);
|
||||
return lhs_bigint.order(rhs_bigint);
|
||||
}
|
||||
|
||||
/// Asserts the value is comparable. Does not take a type parameter because it supports
|
||||
/// comparisons between heterogeneous types.
|
||||
pub fn compareHetero(lhs: Value, op: std.math.CompareOperator, rhs: Value, mod: *Module) bool {
|
||||
return compareHeteroAdvanced(lhs, op, rhs, mod, null) catch unreachable;
|
||||
return compareHeteroAdvanced(lhs, op, rhs, mod, .normal) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn compareHeteroAdvanced(
|
||||
@ -1112,7 +1107,7 @@ pub fn compareHeteroAdvanced(
|
||||
op: std.math.CompareOperator,
|
||||
rhs: Value,
|
||||
mod: *Module,
|
||||
opt_sema: ?*Sema,
|
||||
strat: ResolveStrat,
|
||||
) !bool {
|
||||
if (lhs.pointerDecl(mod)) |lhs_decl| {
|
||||
if (rhs.pointerDecl(mod)) |rhs_decl| {
|
||||
@ -1135,7 +1130,7 @@ pub fn compareHeteroAdvanced(
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
return (try orderAdvanced(lhs, rhs, mod, opt_sema)).compare(op);
|
||||
return (try orderAdvanced(lhs, rhs, mod, strat)).compare(op);
|
||||
}
|
||||
|
||||
/// Asserts the values are comparable. Both operands have type `ty`.
|
||||
@ -1176,22 +1171,22 @@ pub fn compareScalar(
|
||||
///
|
||||
/// Note that `!compareAllWithZero(.eq, ...) != compareAllWithZero(.neq, ...)`
|
||||
pub fn compareAllWithZero(lhs: Value, op: std.math.CompareOperator, mod: *Module) bool {
|
||||
return compareAllWithZeroAdvancedExtra(lhs, op, mod, null) catch unreachable;
|
||||
return compareAllWithZeroAdvancedExtra(lhs, op, mod, .normal) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn compareAllWithZeroAdvanced(
|
||||
pub fn compareAllWithZeroSema(
|
||||
lhs: Value,
|
||||
op: std.math.CompareOperator,
|
||||
sema: *Sema,
|
||||
zcu: *Zcu,
|
||||
) Module.CompileError!bool {
|
||||
return compareAllWithZeroAdvancedExtra(lhs, op, sema.mod, sema);
|
||||
return compareAllWithZeroAdvancedExtra(lhs, op, zcu, .sema);
|
||||
}
|
||||
|
||||
pub fn compareAllWithZeroAdvancedExtra(
|
||||
lhs: Value,
|
||||
op: std.math.CompareOperator,
|
||||
mod: *Module,
|
||||
opt_sema: ?*Sema,
|
||||
strat: ResolveStrat,
|
||||
) Module.CompileError!bool {
|
||||
if (lhs.isInf(mod)) {
|
||||
switch (op) {
|
||||
@ -1211,14 +1206,14 @@ pub fn compareAllWithZeroAdvancedExtra(
|
||||
if (!std.math.order(byte, 0).compare(op)) break false;
|
||||
} else true,
|
||||
.elems => |elems| for (elems) |elem| {
|
||||
if (!try Value.fromInterned(elem).compareAllWithZeroAdvancedExtra(op, mod, opt_sema)) break false;
|
||||
if (!try Value.fromInterned(elem).compareAllWithZeroAdvancedExtra(op, mod, strat)) break false;
|
||||
} else true,
|
||||
.repeated_elem => |elem| Value.fromInterned(elem).compareAllWithZeroAdvancedExtra(op, mod, opt_sema),
|
||||
.repeated_elem => |elem| Value.fromInterned(elem).compareAllWithZeroAdvancedExtra(op, mod, strat),
|
||||
},
|
||||
.undef => return false,
|
||||
else => {},
|
||||
}
|
||||
return (try orderAgainstZeroAdvanced(lhs, mod, opt_sema)).compare(op);
|
||||
return (try orderAgainstZeroAdvanced(lhs, mod, strat)).compare(op);
|
||||
}
|
||||
|
||||
pub fn eql(a: Value, b: Value, ty: Type, mod: *Module) bool {
|
||||
@ -1279,9 +1274,9 @@ pub fn slicePtr(val: Value, mod: *Module) Value {
|
||||
}
|
||||
|
||||
/// Gets the `len` field of a slice value as a `u64`.
|
||||
/// Resolves the length using the provided `Sema` if necessary.
|
||||
pub fn sliceLen(val: Value, sema: *Sema) !u64 {
|
||||
return Value.fromInterned(sema.mod.intern_pool.sliceLen(val.toIntern())).toUnsignedIntAdvanced(sema);
|
||||
/// Resolves the length using `Sema` if necessary.
|
||||
pub fn sliceLen(val: Value, zcu: *Zcu) !u64 {
|
||||
return Value.fromInterned(zcu.intern_pool.sliceLen(val.toIntern())).toUnsignedIntSema(zcu);
|
||||
}
|
||||
|
||||
/// Asserts the value is an aggregate, and returns the element value at the given index.
|
||||
@ -1482,29 +1477,29 @@ pub fn isFloat(self: Value, mod: *const Module) bool {
|
||||
}
|
||||
|
||||
pub fn floatFromInt(val: Value, arena: Allocator, int_ty: Type, float_ty: Type, mod: *Module) !Value {
|
||||
return floatFromIntAdvanced(val, arena, int_ty, float_ty, mod, null) catch |err| switch (err) {
|
||||
return floatFromIntAdvanced(val, arena, int_ty, float_ty, mod, .normal) catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn floatFromIntAdvanced(val: Value, arena: Allocator, int_ty: Type, float_ty: Type, mod: *Module, opt_sema: ?*Sema) !Value {
|
||||
pub fn floatFromIntAdvanced(val: Value, arena: Allocator, int_ty: Type, float_ty: Type, mod: *Module, strat: ResolveStrat) !Value {
|
||||
if (int_ty.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(InternPool.Index, int_ty.vectorLen(mod));
|
||||
const scalar_ty = float_ty.scalarType(mod);
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
const elem_val = try val.elemValue(mod, i);
|
||||
scalar.* = (try floatFromIntScalar(elem_val, scalar_ty, mod, opt_sema)).toIntern();
|
||||
scalar.* = (try floatFromIntScalar(elem_val, scalar_ty, mod, strat)).toIntern();
|
||||
}
|
||||
return Value.fromInterned((try mod.intern(.{ .aggregate = .{
|
||||
.ty = float_ty.toIntern(),
|
||||
.storage = .{ .elems = result_data },
|
||||
} })));
|
||||
}
|
||||
return floatFromIntScalar(val, float_ty, mod, opt_sema);
|
||||
return floatFromIntScalar(val, float_ty, mod, strat);
|
||||
}
|
||||
|
||||
pub fn floatFromIntScalar(val: Value, float_ty: Type, mod: *Module, opt_sema: ?*Sema) !Value {
|
||||
pub fn floatFromIntScalar(val: Value, float_ty: Type, mod: *Module, strat: ResolveStrat) !Value {
|
||||
return switch (mod.intern_pool.indexToKey(val.toIntern())) {
|
||||
.undef => try mod.undefValue(float_ty),
|
||||
.int => |int| switch (int.storage) {
|
||||
@ -1513,16 +1508,8 @@ pub fn floatFromIntScalar(val: Value, float_ty: Type, mod: *Module, opt_sema: ?*
|
||||
return mod.floatValue(float_ty, float);
|
||||
},
|
||||
inline .u64, .i64 => |x| floatFromIntInner(x, float_ty, mod),
|
||||
.lazy_align => |ty| if (opt_sema) |sema| {
|
||||
return floatFromIntInner((try Type.fromInterned(ty).abiAlignmentAdvanced(mod, .{ .sema = sema })).scalar.toByteUnits() orelse 0, float_ty, mod);
|
||||
} else {
|
||||
return floatFromIntInner(Type.fromInterned(ty).abiAlignment(mod).toByteUnits() orelse 0, float_ty, mod);
|
||||
},
|
||||
.lazy_size => |ty| if (opt_sema) |sema| {
|
||||
return floatFromIntInner((try Type.fromInterned(ty).abiSizeAdvanced(mod, .{ .sema = sema })).scalar, float_ty, mod);
|
||||
} else {
|
||||
return floatFromIntInner(Type.fromInterned(ty).abiSize(mod), float_ty, mod);
|
||||
},
|
||||
.lazy_align => |ty| return floatFromIntInner((try Type.fromInterned(ty).abiAlignmentAdvanced(mod, strat.toLazy())).scalar.toByteUnits() orelse 0, float_ty, mod),
|
||||
.lazy_size => |ty| return floatFromIntInner((try Type.fromInterned(ty).abiSizeAdvanced(mod, strat.toLazy())).scalar, float_ty, mod),
|
||||
},
|
||||
else => unreachable,
|
||||
};
|
||||
@ -3616,17 +3603,15 @@ pub const RuntimeIndex = InternPool.RuntimeIndex;
|
||||
|
||||
/// `parent_ptr` must be a single-pointer to some optional.
|
||||
/// Returns a pointer to the payload of the optional.
|
||||
/// This takes a `Sema` because it may need to perform type resolution.
|
||||
pub fn ptrOptPayload(parent_ptr: Value, sema: *Sema) !Value {
|
||||
const zcu = sema.mod;
|
||||
|
||||
/// May perform type resolution.
|
||||
pub fn ptrOptPayload(parent_ptr: Value, zcu: *Zcu) !Value {
|
||||
const parent_ptr_ty = parent_ptr.typeOf(zcu);
|
||||
const opt_ty = parent_ptr_ty.childType(zcu);
|
||||
|
||||
assert(parent_ptr_ty.ptrSize(zcu) == .One);
|
||||
assert(opt_ty.zigTypeTag(zcu) == .Optional);
|
||||
|
||||
const result_ty = try sema.ptrType(info: {
|
||||
const result_ty = try zcu.ptrTypeSema(info: {
|
||||
var new = parent_ptr_ty.ptrInfo(zcu);
|
||||
// We can correctly preserve alignment `.none`, since an optional has the same
|
||||
// natural alignment as its child type.
|
||||
@ -3651,17 +3636,15 @@ pub fn ptrOptPayload(parent_ptr: Value, sema: *Sema) !Value {
|
||||
|
||||
/// `parent_ptr` must be a single-pointer to some error union.
|
||||
/// Returns a pointer to the payload of the error union.
|
||||
/// This takes a `Sema` because it may need to perform type resolution.
|
||||
pub fn ptrEuPayload(parent_ptr: Value, sema: *Sema) !Value {
|
||||
const zcu = sema.mod;
|
||||
|
||||
/// May perform type resolution.
|
||||
pub fn ptrEuPayload(parent_ptr: Value, zcu: *Zcu) !Value {
|
||||
const parent_ptr_ty = parent_ptr.typeOf(zcu);
|
||||
const eu_ty = parent_ptr_ty.childType(zcu);
|
||||
|
||||
assert(parent_ptr_ty.ptrSize(zcu) == .One);
|
||||
assert(eu_ty.zigTypeTag(zcu) == .ErrorUnion);
|
||||
|
||||
const result_ty = try sema.ptrType(info: {
|
||||
const result_ty = try zcu.ptrTypeSema(info: {
|
||||
var new = parent_ptr_ty.ptrInfo(zcu);
|
||||
// We can correctly preserve alignment `.none`, since an error union has a
|
||||
// natural alignment greater than or equal to that of its payload type.
|
||||
@ -3682,10 +3665,8 @@ pub fn ptrEuPayload(parent_ptr: Value, sema: *Sema) !Value {
|
||||
/// `parent_ptr` must be a single-pointer to a struct, union, or slice.
|
||||
/// Returns a pointer to the aggregate field at the specified index.
|
||||
/// For slices, uses `slice_ptr_index` and `slice_len_index`.
|
||||
/// This takes a `Sema` because it may need to perform type resolution.
|
||||
pub fn ptrField(parent_ptr: Value, field_idx: u32, sema: *Sema) !Value {
|
||||
const zcu = sema.mod;
|
||||
|
||||
/// May perform type resolution.
|
||||
pub fn ptrField(parent_ptr: Value, field_idx: u32, zcu: *Zcu) !Value {
|
||||
const parent_ptr_ty = parent_ptr.typeOf(zcu);
|
||||
const aggregate_ty = parent_ptr_ty.childType(zcu);
|
||||
|
||||
@ -3698,17 +3679,17 @@ pub fn ptrField(parent_ptr: Value, field_idx: u32, sema: *Sema) !Value {
|
||||
.Struct => field: {
|
||||
const field_ty = aggregate_ty.structFieldType(field_idx, zcu);
|
||||
switch (aggregate_ty.containerLayout(zcu)) {
|
||||
.auto => break :field .{ field_ty, try aggregate_ty.structFieldAlignAdvanced(@intCast(field_idx), zcu, sema) },
|
||||
.auto => break :field .{ field_ty, try aggregate_ty.structFieldAlignAdvanced(@intCast(field_idx), zcu, .sema) },
|
||||
.@"extern" => {
|
||||
// Well-defined layout, so just offset the pointer appropriately.
|
||||
const byte_off = aggregate_ty.structFieldOffset(field_idx, zcu);
|
||||
const field_align = a: {
|
||||
const parent_align = if (parent_ptr_info.flags.alignment == .none) pa: {
|
||||
break :pa try sema.typeAbiAlignment(aggregate_ty);
|
||||
break :pa (try aggregate_ty.abiAlignmentAdvanced(zcu, .sema)).scalar;
|
||||
} else parent_ptr_info.flags.alignment;
|
||||
break :a InternPool.Alignment.fromLog2Units(@min(parent_align.toLog2Units(), @ctz(byte_off)));
|
||||
};
|
||||
const result_ty = try sema.ptrType(info: {
|
||||
const result_ty = try zcu.ptrTypeSema(info: {
|
||||
var new = parent_ptr_info;
|
||||
new.child = field_ty.toIntern();
|
||||
new.flags.alignment = field_align;
|
||||
@ -3723,14 +3704,14 @@ pub fn ptrField(parent_ptr: Value, field_idx: u32, sema: *Sema) !Value {
|
||||
new.packed_offset = packed_offset;
|
||||
new.child = field_ty.toIntern();
|
||||
if (new.flags.alignment == .none) {
|
||||
new.flags.alignment = try sema.typeAbiAlignment(aggregate_ty);
|
||||
new.flags.alignment = (try aggregate_ty.abiAlignmentAdvanced(zcu, .sema)).scalar;
|
||||
}
|
||||
break :info new;
|
||||
});
|
||||
return zcu.getCoerced(parent_ptr, result_ty);
|
||||
},
|
||||
.byte_ptr => |ptr_info| {
|
||||
const result_ty = try sema.ptrType(info: {
|
||||
const result_ty = try zcu.ptrTypeSema(info: {
|
||||
var new = parent_ptr_info;
|
||||
new.child = field_ty.toIntern();
|
||||
new.packed_offset = .{
|
||||
@ -3749,10 +3730,10 @@ pub fn ptrField(parent_ptr: Value, field_idx: u32, sema: *Sema) !Value {
|
||||
const union_obj = zcu.typeToUnion(aggregate_ty).?;
|
||||
const field_ty = Type.fromInterned(union_obj.field_types.get(&zcu.intern_pool)[field_idx]);
|
||||
switch (aggregate_ty.containerLayout(zcu)) {
|
||||
.auto => break :field .{ field_ty, try aggregate_ty.structFieldAlignAdvanced(@intCast(field_idx), zcu, sema) },
|
||||
.auto => break :field .{ field_ty, try aggregate_ty.structFieldAlignAdvanced(@intCast(field_idx), zcu, .sema) },
|
||||
.@"extern" => {
|
||||
// Point to the same address.
|
||||
const result_ty = try sema.ptrType(info: {
|
||||
const result_ty = try zcu.ptrTypeSema(info: {
|
||||
var new = parent_ptr_info;
|
||||
new.child = field_ty.toIntern();
|
||||
break :info new;
|
||||
@ -3762,28 +3743,28 @@ pub fn ptrField(parent_ptr: Value, field_idx: u32, sema: *Sema) !Value {
|
||||
.@"packed" => {
|
||||
// If the field has an ABI size matching its bit size, then we can continue to use a
|
||||
// non-bit pointer if the parent pointer is also a non-bit pointer.
|
||||
if (parent_ptr_info.packed_offset.host_size == 0 and try sema.typeAbiSize(field_ty) * 8 == try field_ty.bitSizeAdvanced(zcu, sema)) {
|
||||
if (parent_ptr_info.packed_offset.host_size == 0 and (try field_ty.abiSizeAdvanced(zcu, .sema)).scalar * 8 == try field_ty.bitSizeAdvanced(zcu, .sema)) {
|
||||
// We must offset the pointer on big-endian targets, since the bits of packed memory don't align nicely.
|
||||
const byte_offset = switch (zcu.getTarget().cpu.arch.endian()) {
|
||||
.little => 0,
|
||||
.big => try sema.typeAbiSize(aggregate_ty) - try sema.typeAbiSize(field_ty),
|
||||
.big => (try aggregate_ty.abiSizeAdvanced(zcu, .sema)).scalar - (try field_ty.abiSizeAdvanced(zcu, .sema)).scalar,
|
||||
};
|
||||
const result_ty = try sema.ptrType(info: {
|
||||
const result_ty = try zcu.ptrTypeSema(info: {
|
||||
var new = parent_ptr_info;
|
||||
new.child = field_ty.toIntern();
|
||||
new.flags.alignment = InternPool.Alignment.fromLog2Units(
|
||||
@ctz(byte_offset | (try parent_ptr_ty.ptrAlignmentAdvanced(zcu, sema)).toByteUnits().?),
|
||||
@ctz(byte_offset | (try parent_ptr_ty.ptrAlignmentAdvanced(zcu, .sema)).toByteUnits().?),
|
||||
);
|
||||
break :info new;
|
||||
});
|
||||
return parent_ptr.getOffsetPtr(byte_offset, result_ty, zcu);
|
||||
} else {
|
||||
// The result must be a bit-pointer if it is not already.
|
||||
const result_ty = try sema.ptrType(info: {
|
||||
const result_ty = try zcu.ptrTypeSema(info: {
|
||||
var new = parent_ptr_info;
|
||||
new.child = field_ty.toIntern();
|
||||
if (new.packed_offset.host_size == 0) {
|
||||
new.packed_offset.host_size = @intCast(((try aggregate_ty.bitSizeAdvanced(zcu, sema)) + 7) / 8);
|
||||
new.packed_offset.host_size = @intCast(((try aggregate_ty.bitSizeAdvanced(zcu, .sema)) + 7) / 8);
|
||||
assert(new.packed_offset.bit_offset == 0);
|
||||
}
|
||||
break :info new;
|
||||
@ -3805,14 +3786,14 @@ pub fn ptrField(parent_ptr: Value, field_idx: u32, sema: *Sema) !Value {
|
||||
};
|
||||
|
||||
const new_align: InternPool.Alignment = if (parent_ptr_info.flags.alignment != .none) a: {
|
||||
const ty_align = try sema.typeAbiAlignment(field_ty);
|
||||
const ty_align = (try field_ty.abiAlignmentAdvanced(zcu, .sema)).scalar;
|
||||
const true_field_align = if (field_align == .none) ty_align else field_align;
|
||||
const new_align = true_field_align.min(parent_ptr_info.flags.alignment);
|
||||
if (new_align == ty_align) break :a .none;
|
||||
break :a new_align;
|
||||
} else field_align;
|
||||
|
||||
const result_ty = try sema.ptrType(info: {
|
||||
const result_ty = try zcu.ptrTypeSema(info: {
|
||||
var new = parent_ptr_info;
|
||||
new.child = field_ty.toIntern();
|
||||
new.flags.alignment = new_align;
|
||||
@ -3834,10 +3815,8 @@ pub fn ptrField(parent_ptr: Value, field_idx: u32, sema: *Sema) !Value {
|
||||
|
||||
/// `orig_parent_ptr` must be either a single-pointer to an array or vector, or a many-pointer or C-pointer or slice.
|
||||
/// Returns a pointer to the element at the specified index.
|
||||
/// This takes a `Sema` because it may need to perform type resolution.
|
||||
pub fn ptrElem(orig_parent_ptr: Value, field_idx: u64, sema: *Sema) !Value {
|
||||
const zcu = sema.mod;
|
||||
|
||||
/// May perform type resolution.
|
||||
pub fn ptrElem(orig_parent_ptr: Value, field_idx: u64, zcu: *Zcu) !Value {
|
||||
const parent_ptr = switch (orig_parent_ptr.typeOf(zcu).ptrSize(zcu)) {
|
||||
.One, .Many, .C => orig_parent_ptr,
|
||||
.Slice => orig_parent_ptr.slicePtr(zcu),
|
||||
@ -3845,7 +3824,7 @@ pub fn ptrElem(orig_parent_ptr: Value, field_idx: u64, sema: *Sema) !Value {
|
||||
|
||||
const parent_ptr_ty = parent_ptr.typeOf(zcu);
|
||||
const elem_ty = parent_ptr_ty.childType(zcu);
|
||||
const result_ty = try sema.elemPtrType(parent_ptr_ty, @intCast(field_idx));
|
||||
const result_ty = try parent_ptr_ty.elemPtrType(@intCast(field_idx), zcu);
|
||||
|
||||
if (parent_ptr.isUndef(zcu)) return zcu.undefValue(result_ty);
|
||||
|
||||
@ -3862,21 +3841,21 @@ pub fn ptrElem(orig_parent_ptr: Value, field_idx: u64, sema: *Sema) !Value {
|
||||
|
||||
const strat: PtrStrat = switch (parent_ptr_ty.ptrSize(zcu)) {
|
||||
.One => switch (elem_ty.zigTypeTag(zcu)) {
|
||||
.Vector => .{ .offset = field_idx * @divExact(try elem_ty.childType(zcu).bitSizeAdvanced(zcu, sema), 8) },
|
||||
.Vector => .{ .offset = field_idx * @divExact(try elem_ty.childType(zcu).bitSizeAdvanced(zcu, .sema), 8) },
|
||||
.Array => strat: {
|
||||
const arr_elem_ty = elem_ty.childType(zcu);
|
||||
if (try sema.typeRequiresComptime(arr_elem_ty)) {
|
||||
if (try arr_elem_ty.comptimeOnlyAdvanced(zcu, .sema)) {
|
||||
break :strat .{ .elem_ptr = arr_elem_ty };
|
||||
}
|
||||
break :strat .{ .offset = field_idx * try sema.typeAbiSize(arr_elem_ty) };
|
||||
break :strat .{ .offset = field_idx * (try arr_elem_ty.abiSizeAdvanced(zcu, .sema)).scalar };
|
||||
},
|
||||
else => unreachable,
|
||||
},
|
||||
|
||||
.Many, .C => if (try sema.typeRequiresComptime(elem_ty))
|
||||
.Many, .C => if (try elem_ty.comptimeOnlyAdvanced(zcu, .sema))
|
||||
.{ .elem_ptr = elem_ty }
|
||||
else
|
||||
.{ .offset = field_idx * try sema.typeAbiSize(elem_ty) },
|
||||
.{ .offset = field_idx * (try elem_ty.abiSizeAdvanced(zcu, .sema)).scalar },
|
||||
|
||||
.Slice => unreachable,
|
||||
};
|
||||
@ -4014,11 +3993,7 @@ pub const PointerDeriveStep = union(enum) {
|
||||
pub fn pointerDerivation(ptr_val: Value, arena: Allocator, zcu: *Zcu) Allocator.Error!PointerDeriveStep {
|
||||
return ptr_val.pointerDerivationAdvanced(arena, zcu, null) catch |err| switch (err) {
|
||||
error.OutOfMemory => |e| return e,
|
||||
error.AnalysisFail,
|
||||
error.GenericPoison,
|
||||
error.ComptimeReturn,
|
||||
error.ComptimeBreak,
|
||||
=> unreachable,
|
||||
error.AnalysisFail => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
@ -4087,8 +4062,8 @@ pub fn pointerDerivationAdvanced(ptr_val: Value, arena: Allocator, zcu: *Zcu, op
|
||||
const base_ptr_ty = base_ptr.typeOf(zcu);
|
||||
const agg_ty = base_ptr_ty.childType(zcu);
|
||||
const field_ty, const field_align = switch (agg_ty.zigTypeTag(zcu)) {
|
||||
.Struct => .{ agg_ty.structFieldType(@intCast(field.index), zcu), try agg_ty.structFieldAlignAdvanced(@intCast(field.index), zcu, opt_sema) },
|
||||
.Union => .{ agg_ty.unionFieldTypeByIndex(@intCast(field.index), zcu), try agg_ty.structFieldAlignAdvanced(@intCast(field.index), zcu, opt_sema) },
|
||||
.Struct => .{ agg_ty.structFieldType(@intCast(field.index), zcu), try agg_ty.structFieldAlignAdvanced(@intCast(field.index), zcu, .sema) },
|
||||
.Union => .{ agg_ty.unionFieldTypeByIndex(@intCast(field.index), zcu), try agg_ty.structFieldAlignAdvanced(@intCast(field.index), zcu, .sema) },
|
||||
.Pointer => .{ switch (field.index) {
|
||||
Value.slice_ptr_index => agg_ty.slicePtrFieldType(zcu),
|
||||
Value.slice_len_index => Type.usize,
|
||||
@ -4269,3 +4244,118 @@ pub fn pointerDerivationAdvanced(ptr_val: Value, arena: Allocator, zcu: *Zcu, op
|
||||
.new_ptr_ty = Type.fromInterned(ptr.ty),
|
||||
} };
|
||||
}
|
||||
|
||||
pub fn resolveLazy(val: Value, arena: Allocator, zcu: *Zcu) Zcu.SemaError!Value {
|
||||
switch (zcu.intern_pool.indexToKey(val.toIntern())) {
|
||||
.int => |int| switch (int.storage) {
|
||||
.u64, .i64, .big_int => return val,
|
||||
.lazy_align, .lazy_size => return zcu.intValue(
|
||||
Type.fromInterned(int.ty),
|
||||
(try val.getUnsignedIntAdvanced(zcu, .sema)).?,
|
||||
),
|
||||
},
|
||||
.slice => |slice| {
|
||||
const ptr = try Value.fromInterned(slice.ptr).resolveLazy(arena, zcu);
|
||||
const len = try Value.fromInterned(slice.len).resolveLazy(arena, zcu);
|
||||
if (ptr.toIntern() == slice.ptr and len.toIntern() == slice.len) return val;
|
||||
return Value.fromInterned(try zcu.intern(.{ .slice = .{
|
||||
.ty = slice.ty,
|
||||
.ptr = ptr.toIntern(),
|
||||
.len = len.toIntern(),
|
||||
} }));
|
||||
},
|
||||
.ptr => |ptr| {
|
||||
switch (ptr.base_addr) {
|
||||
.decl, .comptime_alloc, .anon_decl, .int => return val,
|
||||
.comptime_field => |field_val| {
|
||||
const resolved_field_val = (try Value.fromInterned(field_val).resolveLazy(arena, zcu)).toIntern();
|
||||
return if (resolved_field_val == field_val)
|
||||
val
|
||||
else
|
||||
Value.fromInterned((try zcu.intern(.{ .ptr = .{
|
||||
.ty = ptr.ty,
|
||||
.base_addr = .{ .comptime_field = resolved_field_val },
|
||||
.byte_offset = ptr.byte_offset,
|
||||
} })));
|
||||
},
|
||||
.eu_payload, .opt_payload => |base| {
|
||||
const resolved_base = (try Value.fromInterned(base).resolveLazy(arena, zcu)).toIntern();
|
||||
return if (resolved_base == base)
|
||||
val
|
||||
else
|
||||
Value.fromInterned((try zcu.intern(.{ .ptr = .{
|
||||
.ty = ptr.ty,
|
||||
.base_addr = switch (ptr.base_addr) {
|
||||
.eu_payload => .{ .eu_payload = resolved_base },
|
||||
.opt_payload => .{ .opt_payload = resolved_base },
|
||||
else => unreachable,
|
||||
},
|
||||
.byte_offset = ptr.byte_offset,
|
||||
} })));
|
||||
},
|
||||
.arr_elem, .field => |base_index| {
|
||||
const resolved_base = (try Value.fromInterned(base_index.base).resolveLazy(arena, zcu)).toIntern();
|
||||
return if (resolved_base == base_index.base)
|
||||
val
|
||||
else
|
||||
Value.fromInterned((try zcu.intern(.{ .ptr = .{
|
||||
.ty = ptr.ty,
|
||||
.base_addr = switch (ptr.base_addr) {
|
||||
.arr_elem => .{ .arr_elem = .{
|
||||
.base = resolved_base,
|
||||
.index = base_index.index,
|
||||
} },
|
||||
.field => .{ .field = .{
|
||||
.base = resolved_base,
|
||||
.index = base_index.index,
|
||||
} },
|
||||
else => unreachable,
|
||||
},
|
||||
.byte_offset = ptr.byte_offset,
|
||||
} })));
|
||||
},
|
||||
}
|
||||
},
|
||||
.aggregate => |aggregate| switch (aggregate.storage) {
|
||||
.bytes => return val,
|
||||
.elems => |elems| {
|
||||
var resolved_elems: []InternPool.Index = &.{};
|
||||
for (elems, 0..) |elem, i| {
|
||||
const resolved_elem = (try Value.fromInterned(elem).resolveLazy(arena, zcu)).toIntern();
|
||||
if (resolved_elems.len == 0 and resolved_elem != elem) {
|
||||
resolved_elems = try arena.alloc(InternPool.Index, elems.len);
|
||||
@memcpy(resolved_elems[0..i], elems[0..i]);
|
||||
}
|
||||
if (resolved_elems.len > 0) resolved_elems[i] = resolved_elem;
|
||||
}
|
||||
return if (resolved_elems.len == 0) val else Value.fromInterned((try zcu.intern(.{ .aggregate = .{
|
||||
.ty = aggregate.ty,
|
||||
.storage = .{ .elems = resolved_elems },
|
||||
} })));
|
||||
},
|
||||
.repeated_elem => |elem| {
|
||||
const resolved_elem = (try Value.fromInterned(elem).resolveLazy(arena, zcu)).toIntern();
|
||||
return if (resolved_elem == elem) val else Value.fromInterned((try zcu.intern(.{ .aggregate = .{
|
||||
.ty = aggregate.ty,
|
||||
.storage = .{ .repeated_elem = resolved_elem },
|
||||
} })));
|
||||
},
|
||||
},
|
||||
.un => |un| {
|
||||
const resolved_tag = if (un.tag == .none)
|
||||
.none
|
||||
else
|
||||
(try Value.fromInterned(un.tag).resolveLazy(arena, zcu)).toIntern();
|
||||
const resolved_val = (try Value.fromInterned(un.val).resolveLazy(arena, zcu)).toIntern();
|
||||
return if (resolved_tag == un.tag and resolved_val == un.val)
|
||||
val
|
||||
else
|
||||
Value.fromInterned((try zcu.intern(.{ .un = .{
|
||||
.ty = un.ty,
|
||||
.tag = resolved_tag,
|
||||
.val = resolved_val,
|
||||
} })));
|
||||
},
|
||||
else => return val,
|
||||
}
|
||||
}
|
||||
|
||||
790
src/Zcu.zig
790
src/Zcu.zig
File diff suppressed because it is too large
Load Diff
@ -8,7 +8,7 @@ const Air = @import("../../Air.zig");
|
||||
const Mir = @import("Mir.zig");
|
||||
const Emit = @import("Emit.zig");
|
||||
const Liveness = @import("../../Liveness.zig");
|
||||
const Type = @import("../../type.zig").Type;
|
||||
const Type = @import("../../Type.zig");
|
||||
const Value = @import("../../Value.zig");
|
||||
const link = @import("../../link.zig");
|
||||
const Zcu = @import("../../Zcu.zig");
|
||||
@ -59,7 +59,7 @@ args: []MCValue,
|
||||
ret_mcv: MCValue,
|
||||
fn_type: Type,
|
||||
arg_index: u32,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
stack_align: u32,
|
||||
|
||||
/// MIR Instructions
|
||||
@ -331,7 +331,7 @@ const Self = @This();
|
||||
|
||||
pub fn generate(
|
||||
lf: *link.File,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
func_index: InternPool.Index,
|
||||
air: Air,
|
||||
liveness: Liveness,
|
||||
|
||||
@ -22,7 +22,7 @@ bin_file: *link.File,
|
||||
debug_output: DebugInfoOutput,
|
||||
target: *const std.Target,
|
||||
err_msg: ?*ErrorMsg = null,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
code: *std.ArrayList(u8),
|
||||
|
||||
prev_di_line: u32,
|
||||
|
||||
@ -3,7 +3,7 @@ const builtin = @import("builtin");
|
||||
const bits = @import("bits.zig");
|
||||
const Register = bits.Register;
|
||||
const RegisterManagerFn = @import("../../register_manager.zig").RegisterManager;
|
||||
const Type = @import("../../type.zig").Type;
|
||||
const Type = @import("../../Type.zig");
|
||||
const Zcu = @import("../../Zcu.zig");
|
||||
/// Deprecated.
|
||||
const Module = Zcu;
|
||||
|
||||
@ -8,7 +8,7 @@ const Air = @import("../../Air.zig");
|
||||
const Mir = @import("Mir.zig");
|
||||
const Emit = @import("Emit.zig");
|
||||
const Liveness = @import("../../Liveness.zig");
|
||||
const Type = @import("../../type.zig").Type;
|
||||
const Type = @import("../../Type.zig");
|
||||
const Value = @import("../../Value.zig");
|
||||
const link = @import("../../link.zig");
|
||||
const Zcu = @import("../../Zcu.zig");
|
||||
@ -59,7 +59,7 @@ args: []MCValue,
|
||||
ret_mcv: MCValue,
|
||||
fn_type: Type,
|
||||
arg_index: u32,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
stack_align: u32,
|
||||
|
||||
/// MIR Instructions
|
||||
@ -338,7 +338,7 @@ const Self = @This();
|
||||
|
||||
pub fn generate(
|
||||
lf: *link.File,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
func_index: InternPool.Index,
|
||||
air: Air,
|
||||
liveness: Liveness,
|
||||
|
||||
@ -11,7 +11,7 @@ const link = @import("../../link.zig");
|
||||
const Zcu = @import("../../Zcu.zig");
|
||||
/// Deprecated.
|
||||
const Module = Zcu;
|
||||
const Type = @import("../../type.zig").Type;
|
||||
const Type = @import("../../Type.zig");
|
||||
const ErrorMsg = Module.ErrorMsg;
|
||||
const Target = std.Target;
|
||||
const assert = std.debug.assert;
|
||||
@ -26,7 +26,7 @@ bin_file: *link.File,
|
||||
debug_output: DebugInfoOutput,
|
||||
target: *const std.Target,
|
||||
err_msg: ?*ErrorMsg = null,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
code: *std.ArrayList(u8),
|
||||
|
||||
prev_di_line: u32,
|
||||
|
||||
@ -3,7 +3,7 @@ const assert = std.debug.assert;
|
||||
const bits = @import("bits.zig");
|
||||
const Register = bits.Register;
|
||||
const RegisterManagerFn = @import("../../register_manager.zig").RegisterManager;
|
||||
const Type = @import("../../type.zig").Type;
|
||||
const Type = @import("../../Type.zig");
|
||||
const Zcu = @import("../../Zcu.zig");
|
||||
/// Deprecated.
|
||||
const Module = Zcu;
|
||||
|
||||
@ -7,7 +7,7 @@ const Air = @import("../../Air.zig");
|
||||
const Mir = @import("Mir.zig");
|
||||
const Emit = @import("Emit.zig");
|
||||
const Liveness = @import("../../Liveness.zig");
|
||||
const Type = @import("../../type.zig").Type;
|
||||
const Type = @import("../../Type.zig");
|
||||
const Value = @import("../../Value.zig");
|
||||
const link = @import("../../link.zig");
|
||||
const Zcu = @import("../../Zcu.zig");
|
||||
@ -59,7 +59,7 @@ args: []MCValue,
|
||||
ret_mcv: InstTracking,
|
||||
fn_type: Type,
|
||||
arg_index: usize,
|
||||
src_loc: Zcu.SrcLoc,
|
||||
src_loc: Zcu.LazySrcLoc,
|
||||
|
||||
/// MIR Instructions
|
||||
mir_instructions: std.MultiArrayList(Mir.Inst) = .{},
|
||||
@ -696,7 +696,7 @@ const CallView = enum(u1) {
|
||||
|
||||
pub fn generate(
|
||||
bin_file: *link.File,
|
||||
src_loc: Zcu.SrcLoc,
|
||||
src_loc: Zcu.LazySrcLoc,
|
||||
func_index: InternPool.Index,
|
||||
air: Air,
|
||||
liveness: Liveness,
|
||||
|
||||
@ -8,7 +8,7 @@ allocator: Allocator,
|
||||
mir: Mir,
|
||||
cc: std.builtin.CallingConvention,
|
||||
err_msg: ?*ErrorMsg = null,
|
||||
src_loc: Zcu.SrcLoc,
|
||||
src_loc: Zcu.LazySrcLoc,
|
||||
result_insts_len: u8 = undefined,
|
||||
result_relocs_len: u8 = undefined,
|
||||
result_insts: [
|
||||
|
||||
@ -431,7 +431,7 @@ pub const RegisterList = struct {
|
||||
const Mir = @This();
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const Type = @import("../../type.zig").Type;
|
||||
const Type = @import("../../Type.zig");
|
||||
|
||||
const assert = std.debug.assert;
|
||||
|
||||
|
||||
@ -2,7 +2,7 @@ const std = @import("std");
|
||||
const bits = @import("bits.zig");
|
||||
const Register = bits.Register;
|
||||
const RegisterManagerFn = @import("../../register_manager.zig").RegisterManager;
|
||||
const Type = @import("../../type.zig").Type;
|
||||
const Type = @import("../../Type.zig");
|
||||
const InternPool = @import("../../InternPool.zig");
|
||||
const Zcu = @import("../../Zcu.zig");
|
||||
const assert = std.debug.assert;
|
||||
|
||||
@ -21,7 +21,7 @@ const Air = @import("../../Air.zig");
|
||||
const Mir = @import("Mir.zig");
|
||||
const Emit = @import("Emit.zig");
|
||||
const Liveness = @import("../../Liveness.zig");
|
||||
const Type = @import("../../type.zig").Type;
|
||||
const Type = @import("../../Type.zig");
|
||||
const CodeGenError = codegen.CodeGenError;
|
||||
const Result = @import("../../codegen.zig").Result;
|
||||
const DebugInfoOutput = @import("../../codegen.zig").DebugInfoOutput;
|
||||
@ -64,7 +64,7 @@ args: []MCValue,
|
||||
ret_mcv: MCValue,
|
||||
fn_type: Type,
|
||||
arg_index: usize,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
stack_align: Alignment,
|
||||
|
||||
/// MIR Instructions
|
||||
@ -263,7 +263,7 @@ const BigTomb = struct {
|
||||
|
||||
pub fn generate(
|
||||
lf: *link.File,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
func_index: InternPool.Index,
|
||||
air: Air,
|
||||
liveness: Liveness,
|
||||
|
||||
@ -24,7 +24,7 @@ bin_file: *link.File,
|
||||
debug_output: DebugInfoOutput,
|
||||
target: *const std.Target,
|
||||
err_msg: ?*ErrorMsg = null,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
code: *std.ArrayList(u8),
|
||||
|
||||
prev_di_line: u32,
|
||||
|
||||
@ -13,7 +13,7 @@ const codegen = @import("../../codegen.zig");
|
||||
const Zcu = @import("../../Zcu.zig");
|
||||
const InternPool = @import("../../InternPool.zig");
|
||||
const Decl = Zcu.Decl;
|
||||
const Type = @import("../../type.zig").Type;
|
||||
const Type = @import("../../Type.zig");
|
||||
const Value = @import("../../Value.zig");
|
||||
const Compilation = @import("../../Compilation.zig");
|
||||
const link = @import("../../link.zig");
|
||||
@ -765,7 +765,7 @@ pub fn deinit(func: *CodeGen) void {
|
||||
/// Sets `err_msg` on `CodeGen` and returns `error.CodegenFail` which is caught in link/Wasm.zig
|
||||
fn fail(func: *CodeGen, comptime fmt: []const u8, args: anytype) InnerError {
|
||||
const mod = func.bin_file.base.comp.module.?;
|
||||
const src_loc = func.decl.navSrcLoc(mod).upgrade(mod);
|
||||
const src_loc = func.decl.navSrcLoc(mod);
|
||||
func.err_msg = try Zcu.ErrorMsg.create(func.gpa, src_loc, fmt, args);
|
||||
return error.CodegenFail;
|
||||
}
|
||||
@ -1202,7 +1202,7 @@ fn genFunctype(
|
||||
|
||||
pub fn generate(
|
||||
bin_file: *link.File,
|
||||
src_loc: Zcu.SrcLoc,
|
||||
src_loc: Zcu.LazySrcLoc,
|
||||
func_index: InternPool.Index,
|
||||
air: Air,
|
||||
liveness: Liveness,
|
||||
@ -3162,7 +3162,7 @@ fn lowerAnonDeclRef(
|
||||
}
|
||||
|
||||
const decl_align = mod.intern_pool.indexToKey(anon_decl.orig_ty).ptr_type.flags.alignment;
|
||||
const res = try func.bin_file.lowerAnonDecl(decl_val, decl_align, func.decl.navSrcLoc(mod).upgrade(mod));
|
||||
const res = try func.bin_file.lowerAnonDecl(decl_val, decl_align, func.decl.navSrcLoc(mod));
|
||||
switch (res) {
|
||||
.ok => {},
|
||||
.fail => |em| {
|
||||
|
||||
@ -257,7 +257,7 @@ fn fail(emit: *Emit, comptime format: []const u8, args: anytype) InnerError {
|
||||
const comp = emit.bin_file.base.comp;
|
||||
const zcu = comp.module.?;
|
||||
const gpa = comp.gpa;
|
||||
emit.error_msg = try Zcu.ErrorMsg.create(gpa, zcu.declPtr(emit.decl_index).navSrcLoc(zcu).upgrade(zcu), format, args);
|
||||
emit.error_msg = try Zcu.ErrorMsg.create(gpa, zcu.declPtr(emit.decl_index).navSrcLoc(zcu), format, args);
|
||||
return error.EmitFail;
|
||||
}
|
||||
|
||||
|
||||
@ -8,7 +8,7 @@ const std = @import("std");
|
||||
const Target = std.Target;
|
||||
const assert = std.debug.assert;
|
||||
|
||||
const Type = @import("../../type.zig").Type;
|
||||
const Type = @import("../../Type.zig");
|
||||
const Zcu = @import("../../Zcu.zig");
|
||||
|
||||
/// Defines how to pass a type as part of a function signature,
|
||||
|
||||
@ -32,7 +32,7 @@ const Module = Zcu;
|
||||
const InternPool = @import("../../InternPool.zig");
|
||||
const Alignment = InternPool.Alignment;
|
||||
const Target = std.Target;
|
||||
const Type = @import("../../type.zig").Type;
|
||||
const Type = @import("../../Type.zig");
|
||||
const Value = @import("../../Value.zig");
|
||||
const Instruction = @import("encoder.zig").Instruction;
|
||||
|
||||
@ -74,7 +74,7 @@ va_info: union {
|
||||
ret_mcv: InstTracking,
|
||||
fn_type: Type,
|
||||
arg_index: u32,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
|
||||
eflags_inst: ?Air.Inst.Index = null,
|
||||
|
||||
@ -795,7 +795,7 @@ const Self = @This();
|
||||
|
||||
pub fn generate(
|
||||
bin_file: *link.File,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
func_index: InternPool.Index,
|
||||
air: Air,
|
||||
liveness: Liveness,
|
||||
@ -971,7 +971,7 @@ pub fn generate(
|
||||
|
||||
pub fn generateLazy(
|
||||
bin_file: *link.File,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
lazy_sym: link.File.LazySymbol,
|
||||
code: *std.ArrayList(u8),
|
||||
debug_output: DebugInfoOutput,
|
||||
|
||||
@ -8,7 +8,7 @@ allocator: Allocator,
|
||||
mir: Mir,
|
||||
cc: std.builtin.CallingConvention,
|
||||
err_msg: ?*ErrorMsg = null,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
result_insts_len: u8 = undefined,
|
||||
result_relocs_len: u8 = undefined,
|
||||
result_insts: [
|
||||
|
||||
@ -537,6 +537,6 @@ const testing = std.testing;
|
||||
const InternPool = @import("../../InternPool.zig");
|
||||
const Register = @import("bits.zig").Register;
|
||||
const RegisterManagerFn = @import("../../register_manager.zig").RegisterManager;
|
||||
const Type = @import("../../type.zig").Type;
|
||||
const Type = @import("../../Type.zig");
|
||||
const Value = @import("../../Value.zig");
|
||||
const Zcu = @import("../../Zcu.zig");
|
||||
|
||||
@ -20,7 +20,7 @@ const Zcu = @import("Zcu.zig");
|
||||
/// Deprecated.
|
||||
const Module = Zcu;
|
||||
const Target = std.Target;
|
||||
const Type = @import("type.zig").Type;
|
||||
const Type = @import("Type.zig");
|
||||
const Value = @import("Value.zig");
|
||||
const Zir = std.zig.Zir;
|
||||
const Alignment = InternPool.Alignment;
|
||||
@ -47,7 +47,7 @@ pub const DebugInfoOutput = union(enum) {
|
||||
|
||||
pub fn generateFunction(
|
||||
lf: *link.File,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
func_index: InternPool.Index,
|
||||
air: Air,
|
||||
liveness: Liveness,
|
||||
@ -79,7 +79,7 @@ pub fn generateFunction(
|
||||
|
||||
pub fn generateLazyFunction(
|
||||
lf: *link.File,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
lazy_sym: link.File.LazySymbol,
|
||||
code: *std.ArrayList(u8),
|
||||
debug_output: DebugInfoOutput,
|
||||
@ -105,7 +105,7 @@ fn writeFloat(comptime F: type, f: F, target: Target, endian: std.builtin.Endian
|
||||
|
||||
pub fn generateLazySymbol(
|
||||
bin_file: *link.File,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
lazy_sym: link.File.LazySymbol,
|
||||
// TODO don't use an "out" parameter like this; put it in the result instead
|
||||
alignment: *Alignment,
|
||||
@ -171,7 +171,7 @@ pub fn generateLazySymbol(
|
||||
|
||||
pub fn generateSymbol(
|
||||
bin_file: *link.File,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
val: Value,
|
||||
code: *std.ArrayList(u8),
|
||||
debug_output: DebugInfoOutput,
|
||||
@ -618,7 +618,7 @@ pub fn generateSymbol(
|
||||
|
||||
fn lowerPtr(
|
||||
bin_file: *link.File,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
ptr_val: InternPool.Index,
|
||||
code: *std.ArrayList(u8),
|
||||
debug_output: DebugInfoOutput,
|
||||
@ -683,7 +683,7 @@ const RelocInfo = struct {
|
||||
|
||||
fn lowerAnonDeclRef(
|
||||
lf: *link.File,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
anon_decl: InternPool.Key.Ptr.BaseAddr.AnonDecl,
|
||||
code: *std.ArrayList(u8),
|
||||
debug_output: DebugInfoOutput,
|
||||
@ -730,7 +730,7 @@ fn lowerAnonDeclRef(
|
||||
|
||||
fn lowerDeclRef(
|
||||
lf: *link.File,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
decl_index: InternPool.DeclIndex,
|
||||
code: *std.ArrayList(u8),
|
||||
debug_output: DebugInfoOutput,
|
||||
@ -814,7 +814,7 @@ pub const GenResult = union(enum) {
|
||||
|
||||
fn fail(
|
||||
gpa: Allocator,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) Allocator.Error!GenResult {
|
||||
@ -825,7 +825,7 @@ pub const GenResult = union(enum) {
|
||||
|
||||
fn genDeclRef(
|
||||
lf: *link.File,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
val: Value,
|
||||
ptr_decl_index: InternPool.DeclIndex,
|
||||
) CodeGenError!GenResult {
|
||||
@ -931,7 +931,7 @@ fn genDeclRef(
|
||||
|
||||
fn genUnnamedConst(
|
||||
lf: *link.File,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
val: Value,
|
||||
owner_decl_index: InternPool.DeclIndex,
|
||||
) CodeGenError!GenResult {
|
||||
@ -970,7 +970,7 @@ fn genUnnamedConst(
|
||||
|
||||
pub fn genTypedValue(
|
||||
lf: *link.File,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
val: Value,
|
||||
owner_decl_index: InternPool.DeclIndex,
|
||||
) CodeGenError!GenResult {
|
||||
|
||||
@ -9,7 +9,7 @@ const Zcu = @import("../Zcu.zig");
|
||||
const Module = @import("../Package/Module.zig");
|
||||
const Compilation = @import("../Compilation.zig");
|
||||
const Value = @import("../Value.zig");
|
||||
const Type = @import("../type.zig").Type;
|
||||
const Type = @import("../Type.zig");
|
||||
const C = link.File.C;
|
||||
const Decl = Zcu.Decl;
|
||||
const trace = @import("../tracy.zig").trace;
|
||||
@ -637,7 +637,7 @@ pub const DeclGen = struct {
|
||||
const zcu = dg.zcu;
|
||||
const decl_index = dg.pass.decl;
|
||||
const decl = zcu.declPtr(decl_index);
|
||||
const src_loc = decl.navSrcLoc(zcu).upgrade(zcu);
|
||||
const src_loc = decl.navSrcLoc(zcu);
|
||||
dg.error_msg = try Zcu.ErrorMsg.create(dg.gpa, src_loc, format, args);
|
||||
return error.AnalysisFail;
|
||||
}
|
||||
@ -731,8 +731,6 @@ pub const DeclGen = struct {
|
||||
if (decl.val.getExternFunc(zcu)) |extern_func| if (extern_func.decl != decl_index)
|
||||
return dg.renderDeclValue(writer, extern_func.decl, location);
|
||||
|
||||
if (decl.val.getVariable(zcu)) |variable| try dg.renderFwdDecl(decl_index, variable, .tentative);
|
||||
|
||||
// We shouldn't cast C function pointers as this is UB (when you call
|
||||
// them). The analysis until now should ensure that the C function
|
||||
// pointers are compatible. If they are not, then there is a bug
|
||||
@ -748,7 +746,7 @@ pub const DeclGen = struct {
|
||||
try writer.writeByte(')');
|
||||
}
|
||||
try writer.writeByte('&');
|
||||
try dg.renderDeclName(writer, decl_index, 0);
|
||||
try dg.renderDeclName(writer, decl_index);
|
||||
if (need_cast) try writer.writeByte(')');
|
||||
}
|
||||
|
||||
@ -1765,19 +1763,22 @@ pub const DeclGen = struct {
|
||||
fn renderFunctionSignature(
|
||||
dg: *DeclGen,
|
||||
w: anytype,
|
||||
fn_decl_index: InternPool.DeclIndex,
|
||||
fn_val: Value,
|
||||
fn_align: InternPool.Alignment,
|
||||
kind: CType.Kind,
|
||||
name: union(enum) {
|
||||
export_index: u32,
|
||||
ident: []const u8,
|
||||
decl: InternPool.DeclIndex,
|
||||
fmt_ctype_pool_string: std.fmt.Formatter(formatCTypePoolString),
|
||||
@"export": struct {
|
||||
main_name: InternPool.NullTerminatedString,
|
||||
extern_name: InternPool.NullTerminatedString,
|
||||
},
|
||||
},
|
||||
) !void {
|
||||
const zcu = dg.zcu;
|
||||
const ip = &zcu.intern_pool;
|
||||
|
||||
const fn_decl = zcu.declPtr(fn_decl_index);
|
||||
const fn_ty = fn_decl.typeOf(zcu);
|
||||
const fn_ty = fn_val.typeOf(zcu);
|
||||
const fn_ctype = try dg.ctypeFromType(fn_ty, kind);
|
||||
|
||||
const fn_info = zcu.typeToFunc(fn_ty).?;
|
||||
@ -1788,7 +1789,7 @@ pub const DeclGen = struct {
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
if (fn_decl.val.getFunction(zcu)) |func| if (func.analysis(ip).is_cold)
|
||||
if (fn_val.getFunction(zcu)) |func| if (func.analysis(ip).is_cold)
|
||||
try w.writeAll("zig_cold ");
|
||||
if (fn_info.return_type == .noreturn_type) try w.writeAll("zig_noreturn ");
|
||||
|
||||
@ -1799,22 +1800,11 @@ pub const DeclGen = struct {
|
||||
trailing = .maybe_space;
|
||||
}
|
||||
|
||||
switch (kind) {
|
||||
.forward => {},
|
||||
.complete => if (fn_decl.alignment.toByteUnits()) |a| {
|
||||
try w.print("{}zig_align_fn({})", .{ trailing, a });
|
||||
trailing = .maybe_space;
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
|
||||
try w.print("{}", .{trailing});
|
||||
switch (name) {
|
||||
.export_index => |export_index| {
|
||||
try w.print("{}", .{trailing});
|
||||
try dg.renderDeclName(w, fn_decl_index, export_index);
|
||||
},
|
||||
.ident => |ident| try w.print("{}{ }", .{ trailing, fmtIdent(ident) }),
|
||||
.fmt_ctype_pool_string => |fmt| try w.print("{}{ }", .{ trailing, fmt }),
|
||||
.decl => |decl_index| try dg.renderDeclName(w, decl_index),
|
||||
.fmt_ctype_pool_string => |fmt| try w.print("{ }", .{fmt}),
|
||||
.@"export" => |@"export"| try w.print("{ }", .{fmtIdent(@"export".extern_name.toSlice(ip))}),
|
||||
}
|
||||
|
||||
try renderTypeSuffix(
|
||||
@ -1833,44 +1823,30 @@ pub const DeclGen = struct {
|
||||
|
||||
switch (kind) {
|
||||
.forward => {
|
||||
if (fn_decl.alignment.toByteUnits()) |a| {
|
||||
try w.print(" zig_align_fn({})", .{a});
|
||||
}
|
||||
if (fn_align.toByteUnits()) |a| try w.print(" zig_align_fn({})", .{a});
|
||||
switch (name) {
|
||||
.export_index => |export_index| mangled: {
|
||||
const maybe_exports = zcu.decl_exports.get(fn_decl_index);
|
||||
const external_name = (if (maybe_exports) |exports|
|
||||
exports.items[export_index].opts.name
|
||||
else if (fn_decl.isExtern(zcu))
|
||||
fn_decl.name
|
||||
else
|
||||
break :mangled).toSlice(ip);
|
||||
const is_mangled = isMangledIdent(external_name, true);
|
||||
const is_export = export_index > 0;
|
||||
.decl, .fmt_ctype_pool_string => {},
|
||||
.@"export" => |@"export"| {
|
||||
const extern_name = @"export".extern_name.toSlice(ip);
|
||||
const is_mangled = isMangledIdent(extern_name, true);
|
||||
const is_export = @"export".extern_name != @"export".main_name;
|
||||
if (is_mangled and is_export) {
|
||||
try w.print(" zig_mangled_export({ }, {s}, {s})", .{
|
||||
fmtIdent(external_name),
|
||||
fmtStringLiteral(external_name, null),
|
||||
fmtStringLiteral(
|
||||
maybe_exports.?.items[0].opts.name.toSlice(ip),
|
||||
null,
|
||||
),
|
||||
fmtIdent(extern_name),
|
||||
fmtStringLiteral(extern_name, null),
|
||||
fmtStringLiteral(@"export".main_name.toSlice(ip), null),
|
||||
});
|
||||
} else if (is_mangled) {
|
||||
try w.print(" zig_mangled_final({ }, {s})", .{
|
||||
fmtIdent(external_name), fmtStringLiteral(external_name, null),
|
||||
try w.print(" zig_mangled({ }, {s})", .{
|
||||
fmtIdent(extern_name), fmtStringLiteral(extern_name, null),
|
||||
});
|
||||
} else if (is_export) {
|
||||
try w.print(" zig_export({s}, {s})", .{
|
||||
fmtStringLiteral(
|
||||
maybe_exports.?.items[0].opts.name.toSlice(ip),
|
||||
null,
|
||||
),
|
||||
fmtStringLiteral(external_name, null),
|
||||
fmtStringLiteral(@"export".main_name.toSlice(ip), null),
|
||||
fmtStringLiteral(extern_name, null),
|
||||
});
|
||||
}
|
||||
},
|
||||
.ident, .fmt_ctype_pool_string => {},
|
||||
}
|
||||
},
|
||||
.complete => {},
|
||||
@ -2085,21 +2061,11 @@ pub const DeclGen = struct {
|
||||
try renderTypeSuffix(dg.pass, &dg.ctype_pool, dg.zcu, w, ctype, .suffix, .{});
|
||||
}
|
||||
|
||||
fn declIsGlobal(dg: *DeclGen, val: Value) bool {
|
||||
const zcu = dg.zcu;
|
||||
return switch (zcu.intern_pool.indexToKey(val.toIntern())) {
|
||||
.variable => |variable| zcu.decl_exports.contains(variable.decl),
|
||||
.extern_func => true,
|
||||
.func => |func| zcu.decl_exports.contains(func.owner_decl),
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
fn writeName(dg: *DeclGen, w: anytype, c_value: CValue) !void {
|
||||
switch (c_value) {
|
||||
.new_local, .local => |i| try w.print("t{d}", .{i}),
|
||||
.constant => |val| try renderAnonDeclName(w, val),
|
||||
.decl => |decl| try dg.renderDeclName(w, decl, 0),
|
||||
.decl => |decl| try dg.renderDeclName(w, decl),
|
||||
.identifier => |ident| try w.print("{ }", .{fmtIdent(ident)}),
|
||||
else => unreachable,
|
||||
}
|
||||
@ -2111,10 +2077,10 @@ pub const DeclGen = struct {
|
||||
.constant => |val| try renderAnonDeclName(w, val),
|
||||
.arg, .arg_array => unreachable,
|
||||
.field => |i| try w.print("f{d}", .{i}),
|
||||
.decl => |decl| try dg.renderDeclName(w, decl, 0),
|
||||
.decl => |decl| try dg.renderDeclName(w, decl),
|
||||
.decl_ref => |decl| {
|
||||
try w.writeByte('&');
|
||||
try dg.renderDeclName(w, decl, 0);
|
||||
try dg.renderDeclName(w, decl);
|
||||
},
|
||||
.undef => |ty| try dg.renderUndefValue(w, ty, .Other),
|
||||
.identifier => |ident| try w.print("{ }", .{fmtIdent(ident)}),
|
||||
@ -2142,10 +2108,10 @@ pub const DeclGen = struct {
|
||||
.field => |i| try w.print("f{d}", .{i}),
|
||||
.decl => |decl| {
|
||||
try w.writeAll("(*");
|
||||
try dg.renderDeclName(w, decl, 0);
|
||||
try dg.renderDeclName(w, decl);
|
||||
try w.writeByte(')');
|
||||
},
|
||||
.decl_ref => |decl| try dg.renderDeclName(w, decl, 0),
|
||||
.decl_ref => |decl| try dg.renderDeclName(w, decl),
|
||||
.undef => unreachable,
|
||||
.identifier => |ident| try w.print("(*{ })", .{fmtIdent(ident)}),
|
||||
.payload_identifier => |ident| try w.print("(*{ }.{ })", .{
|
||||
@ -2195,19 +2161,12 @@ pub const DeclGen = struct {
|
||||
dg: *DeclGen,
|
||||
decl_index: InternPool.DeclIndex,
|
||||
variable: InternPool.Key.Variable,
|
||||
fwd_kind: enum { tentative, final },
|
||||
) !void {
|
||||
const zcu = dg.zcu;
|
||||
const decl = zcu.declPtr(decl_index);
|
||||
const fwd = dg.fwdDeclWriter();
|
||||
const is_global = variable.is_extern or dg.declIsGlobal(decl.val);
|
||||
try fwd.writeAll(if (is_global) "zig_extern " else "static ");
|
||||
const maybe_exports = zcu.decl_exports.get(decl_index);
|
||||
const export_weak_linkage = if (maybe_exports) |exports|
|
||||
exports.items[0].opts.linkage == .weak
|
||||
else
|
||||
false;
|
||||
if (variable.is_weak_linkage or export_weak_linkage) try fwd.writeAll("zig_weak_linkage ");
|
||||
try fwd.writeAll(if (variable.is_extern) "zig_extern " else "static ");
|
||||
if (variable.is_weak_linkage) try fwd.writeAll("zig_weak_linkage ");
|
||||
if (variable.is_threadlocal and !dg.mod.single_threaded) try fwd.writeAll("zig_threadlocal ");
|
||||
try dg.renderTypeAndName(
|
||||
fwd,
|
||||
@ -2217,38 +2176,17 @@ pub const DeclGen = struct {
|
||||
decl.alignment,
|
||||
.complete,
|
||||
);
|
||||
mangled: {
|
||||
const external_name = (if (maybe_exports) |exports|
|
||||
exports.items[0].opts.name
|
||||
else if (variable.is_extern)
|
||||
decl.name
|
||||
else
|
||||
break :mangled).toSlice(&zcu.intern_pool);
|
||||
if (isMangledIdent(external_name, true)) {
|
||||
try fwd.print(" zig_mangled_{s}({ }, {s})", .{
|
||||
@tagName(fwd_kind),
|
||||
fmtIdent(external_name),
|
||||
fmtStringLiteral(external_name, null),
|
||||
});
|
||||
}
|
||||
}
|
||||
try fwd.writeAll(";\n");
|
||||
}
|
||||
|
||||
fn renderDeclName(dg: *DeclGen, writer: anytype, decl_index: InternPool.DeclIndex, export_index: u32) !void {
|
||||
fn renderDeclName(dg: *DeclGen, writer: anytype, decl_index: InternPool.DeclIndex) !void {
|
||||
const zcu = dg.zcu;
|
||||
const ip = &zcu.intern_pool;
|
||||
const decl = zcu.declPtr(decl_index);
|
||||
|
||||
if (zcu.decl_exports.get(decl_index)) |exports| {
|
||||
try writer.print("{ }", .{
|
||||
fmtIdent(exports.items[export_index].opts.name.toSlice(ip)),
|
||||
});
|
||||
} else if (decl.getExternDecl(zcu).unwrap()) |extern_decl_index| {
|
||||
try writer.print("{ }", .{
|
||||
fmtIdent(zcu.declPtr(extern_decl_index).name.toSlice(ip)),
|
||||
});
|
||||
} else {
|
||||
if (decl.getExternDecl(zcu).unwrap()) |extern_decl_index| try writer.print("{ }", .{
|
||||
fmtIdent(zcu.declPtr(extern_decl_index).name.toSlice(ip)),
|
||||
}) else {
|
||||
// MSVC has a limit of 4095 character token length limit, and fmtIdent can (worst case),
|
||||
// expand to 3x the length of its input, but let's cut it off at a much shorter limit.
|
||||
var name: [100]u8 = undefined;
|
||||
@ -2761,69 +2699,6 @@ pub fn genErrDecls(o: *Object) !void {
|
||||
try writer.writeAll("};\n");
|
||||
}
|
||||
|
||||
fn genExports(o: *Object) !void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const zcu = o.dg.zcu;
|
||||
const ip = &zcu.intern_pool;
|
||||
const decl_index = switch (o.dg.pass) {
|
||||
.decl => |decl| decl,
|
||||
.anon, .flush => return,
|
||||
};
|
||||
const decl = zcu.declPtr(decl_index);
|
||||
const fwd = o.dg.fwdDeclWriter();
|
||||
|
||||
const exports = zcu.decl_exports.get(decl_index) orelse return;
|
||||
if (exports.items.len < 2) return;
|
||||
|
||||
const is_variable_const = switch (ip.indexToKey(decl.val.toIntern())) {
|
||||
.func => return for (exports.items[1..], 1..) |@"export", i| {
|
||||
try fwd.writeAll("zig_extern ");
|
||||
if (@"export".opts.linkage == .weak) try fwd.writeAll("zig_weak_linkage_fn ");
|
||||
try o.dg.renderFunctionSignature(
|
||||
fwd,
|
||||
decl_index,
|
||||
.forward,
|
||||
.{ .export_index = @intCast(i) },
|
||||
);
|
||||
try fwd.writeAll(";\n");
|
||||
},
|
||||
.extern_func => {
|
||||
// TODO: when sema allows re-exporting extern decls
|
||||
unreachable;
|
||||
},
|
||||
.variable => |variable| variable.is_const,
|
||||
else => true,
|
||||
};
|
||||
for (exports.items[1..]) |@"export"| {
|
||||
try fwd.writeAll("zig_extern ");
|
||||
if (@"export".opts.linkage == .weak) try fwd.writeAll("zig_weak_linkage ");
|
||||
const export_name = @"export".opts.name.toSlice(ip);
|
||||
try o.dg.renderTypeAndName(
|
||||
fwd,
|
||||
decl.typeOf(zcu),
|
||||
.{ .identifier = export_name },
|
||||
CQualifiers.init(.{ .@"const" = is_variable_const }),
|
||||
decl.alignment,
|
||||
.complete,
|
||||
);
|
||||
if (isMangledIdent(export_name, true)) {
|
||||
try fwd.print(" zig_mangled_export({ }, {s}, {s})", .{
|
||||
fmtIdent(export_name),
|
||||
fmtStringLiteral(export_name, null),
|
||||
fmtStringLiteral(exports.items[0].opts.name.toSlice(ip), null),
|
||||
});
|
||||
} else {
|
||||
try fwd.print(" zig_export({s}, {s})", .{
|
||||
fmtStringLiteral(exports.items[0].opts.name.toSlice(ip), null),
|
||||
fmtStringLiteral(export_name, null),
|
||||
});
|
||||
}
|
||||
try fwd.writeAll(";\n");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn genLazyFn(o: *Object, lazy_ctype_pool: *const CType.Pool, lazy_fn: LazyFnMap.Entry) !void {
|
||||
const zcu = o.dg.zcu;
|
||||
const ip = &zcu.intern_pool;
|
||||
@ -2885,19 +2760,19 @@ pub fn genLazyFn(o: *Object, lazy_ctype_pool: *const CType.Pool, lazy_fn: LazyFn
|
||||
const fn_info = fn_ctype.info(ctype_pool).function;
|
||||
const fn_name = fmtCTypePoolString(val.fn_name, lazy_ctype_pool);
|
||||
|
||||
const fwd_decl_writer = o.dg.fwdDeclWriter();
|
||||
try fwd_decl_writer.print("static zig_{s} ", .{@tagName(key)});
|
||||
try o.dg.renderFunctionSignature(fwd_decl_writer, fn_decl_index, .forward, .{
|
||||
const fwd = o.dg.fwdDeclWriter();
|
||||
try fwd.print("static zig_{s} ", .{@tagName(key)});
|
||||
try o.dg.renderFunctionSignature(fwd, fn_decl.val, fn_decl.alignment, .forward, .{
|
||||
.fmt_ctype_pool_string = fn_name,
|
||||
});
|
||||
try fwd_decl_writer.writeAll(";\n");
|
||||
try fwd.writeAll(";\n");
|
||||
|
||||
try w.print("static zig_{s} ", .{@tagName(key)});
|
||||
try o.dg.renderFunctionSignature(w, fn_decl_index, .complete, .{
|
||||
try w.print("zig_{s} ", .{@tagName(key)});
|
||||
try o.dg.renderFunctionSignature(w, fn_decl.val, .none, .complete, .{
|
||||
.fmt_ctype_pool_string = fn_name,
|
||||
});
|
||||
try w.writeAll(" {\n return ");
|
||||
try o.dg.renderDeclName(w, fn_decl_index, 0);
|
||||
try o.dg.renderDeclName(w, fn_decl_index);
|
||||
try w.writeByte('(');
|
||||
for (0..fn_info.param_ctypes.len) |arg| {
|
||||
if (arg > 0) try w.writeAll(", ");
|
||||
@ -2921,21 +2796,26 @@ pub fn genFunc(f: *Function) !void {
|
||||
o.code_header = std.ArrayList(u8).init(gpa);
|
||||
defer o.code_header.deinit();
|
||||
|
||||
const is_global = o.dg.declIsGlobal(decl.val);
|
||||
const fwd_decl_writer = o.dg.fwdDeclWriter();
|
||||
try fwd_decl_writer.writeAll(if (is_global) "zig_extern " else "static ");
|
||||
const fwd = o.dg.fwdDeclWriter();
|
||||
try fwd.writeAll("static ");
|
||||
try o.dg.renderFunctionSignature(
|
||||
fwd,
|
||||
decl.val,
|
||||
decl.alignment,
|
||||
.forward,
|
||||
.{ .decl = decl_index },
|
||||
);
|
||||
try fwd.writeAll(";\n");
|
||||
|
||||
if (zcu.decl_exports.get(decl_index)) |exports|
|
||||
if (exports.items[0].opts.linkage == .weak) try fwd_decl_writer.writeAll("zig_weak_linkage_fn ");
|
||||
try o.dg.renderFunctionSignature(fwd_decl_writer, decl_index, .forward, .{ .export_index = 0 });
|
||||
try fwd_decl_writer.writeAll(";\n");
|
||||
try genExports(o);
|
||||
|
||||
try o.indent_writer.insertNewline();
|
||||
if (!is_global) try o.writer().writeAll("static ");
|
||||
if (decl.@"linksection".toSlice(&zcu.intern_pool)) |s|
|
||||
try o.writer().print("zig_linksection_fn({s}) ", .{fmtStringLiteral(s, null)});
|
||||
try o.dg.renderFunctionSignature(o.writer(), decl_index, .complete, .{ .export_index = 0 });
|
||||
try o.dg.renderFunctionSignature(
|
||||
o.writer(),
|
||||
decl.val,
|
||||
.none,
|
||||
.complete,
|
||||
.{ .decl = decl_index },
|
||||
);
|
||||
try o.writer().writeByte(' ');
|
||||
|
||||
// In case we need to use the header, populate it with a copy of the function
|
||||
@ -2949,7 +2829,6 @@ pub fn genFunc(f: *Function) !void {
|
||||
|
||||
const main_body = f.air.getMainBody();
|
||||
try genBodyResolveState(f, undefined, &.{}, main_body, false);
|
||||
|
||||
try o.indent_writer.insertNewline();
|
||||
|
||||
// Take advantage of the free_locals map to bucket locals per type. All
|
||||
@ -3007,20 +2886,25 @@ pub fn genDecl(o: *Object) !void {
|
||||
|
||||
if (!decl_ty.isFnOrHasRuntimeBitsIgnoreComptime(zcu)) return;
|
||||
if (decl.val.getExternFunc(zcu)) |_| {
|
||||
const fwd_decl_writer = o.dg.fwdDeclWriter();
|
||||
try fwd_decl_writer.writeAll("zig_extern ");
|
||||
try o.dg.renderFunctionSignature(fwd_decl_writer, decl_index, .forward, .{ .export_index = 0 });
|
||||
try fwd_decl_writer.writeAll(";\n");
|
||||
try genExports(o);
|
||||
const fwd = o.dg.fwdDeclWriter();
|
||||
try fwd.writeAll("zig_extern ");
|
||||
try o.dg.renderFunctionSignature(
|
||||
fwd,
|
||||
decl.val,
|
||||
decl.alignment,
|
||||
.forward,
|
||||
.{ .@"export" = .{
|
||||
.main_name = decl.name,
|
||||
.extern_name = decl.name,
|
||||
} },
|
||||
);
|
||||
try fwd.writeAll(";\n");
|
||||
} else if (decl.val.getVariable(zcu)) |variable| {
|
||||
try o.dg.renderFwdDecl(decl_index, variable, .final);
|
||||
try genExports(o);
|
||||
try o.dg.renderFwdDecl(decl_index, variable);
|
||||
|
||||
if (variable.is_extern) return;
|
||||
|
||||
const is_global = variable.is_extern or o.dg.declIsGlobal(decl.val);
|
||||
const w = o.writer();
|
||||
if (!is_global) try w.writeAll("static ");
|
||||
if (variable.is_weak_linkage) try w.writeAll("zig_weak_linkage ");
|
||||
if (variable.is_threadlocal and !o.dg.mod.single_threaded) try w.writeAll("zig_threadlocal ");
|
||||
if (decl.@"linksection".toSlice(&zcu.intern_pool)) |s|
|
||||
@ -3032,46 +2916,27 @@ pub fn genDecl(o: *Object) !void {
|
||||
try w.writeByte(';');
|
||||
try o.indent_writer.insertNewline();
|
||||
} else {
|
||||
const is_global = o.dg.zcu.decl_exports.contains(decl_index);
|
||||
const decl_c_value = .{ .decl = decl_index };
|
||||
try genDeclValue(o, decl.val, is_global, decl_c_value, decl.alignment, decl.@"linksection");
|
||||
try genDeclValue(o, decl.val, decl_c_value, decl.alignment, decl.@"linksection");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn genDeclValue(
|
||||
o: *Object,
|
||||
val: Value,
|
||||
is_global: bool,
|
||||
decl_c_value: CValue,
|
||||
alignment: Alignment,
|
||||
@"linksection": InternPool.OptionalNullTerminatedString,
|
||||
) !void {
|
||||
const zcu = o.dg.zcu;
|
||||
const fwd_decl_writer = o.dg.fwdDeclWriter();
|
||||
|
||||
const ty = val.typeOf(zcu);
|
||||
|
||||
try fwd_decl_writer.writeAll(if (is_global) "zig_extern " else "static ");
|
||||
try o.dg.renderTypeAndName(fwd_decl_writer, ty, decl_c_value, Const, alignment, .complete);
|
||||
switch (o.dg.pass) {
|
||||
.decl => |decl_index| {
|
||||
if (zcu.decl_exports.get(decl_index)) |exports| {
|
||||
const export_name = exports.items[0].opts.name.toSlice(&zcu.intern_pool);
|
||||
if (isMangledIdent(export_name, true)) {
|
||||
try fwd_decl_writer.print(" zig_mangled_final({ }, {s})", .{
|
||||
fmtIdent(export_name), fmtStringLiteral(export_name, null),
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
.anon => {},
|
||||
.flush => unreachable,
|
||||
}
|
||||
try fwd_decl_writer.writeAll(";\n");
|
||||
try genExports(o);
|
||||
const fwd = o.dg.fwdDeclWriter();
|
||||
try fwd.writeAll("static ");
|
||||
try o.dg.renderTypeAndName(fwd, ty, decl_c_value, Const, alignment, .complete);
|
||||
try fwd.writeAll(";\n");
|
||||
|
||||
const w = o.writer();
|
||||
if (!is_global) try w.writeAll("static ");
|
||||
if (@"linksection".toSlice(&zcu.intern_pool)) |s|
|
||||
try w.print("zig_linksection({s}) ", .{fmtStringLiteral(s, null)});
|
||||
try o.dg.renderTypeAndName(w, ty, decl_c_value, Const, alignment, .complete);
|
||||
@ -3080,22 +2945,73 @@ pub fn genDeclValue(
|
||||
try w.writeAll(";\n");
|
||||
}
|
||||
|
||||
pub fn genHeader(dg: *DeclGen) error{ AnalysisFail, OutOfMemory }!void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
pub fn genExports(dg: *DeclGen, exported: Zcu.Exported, export_indices: []const u32) !void {
|
||||
const zcu = dg.zcu;
|
||||
const decl_index = dg.pass.decl;
|
||||
const decl = zcu.declPtr(decl_index);
|
||||
const writer = dg.fwdDeclWriter();
|
||||
const ip = &zcu.intern_pool;
|
||||
const fwd = dg.fwdDeclWriter();
|
||||
|
||||
switch (decl.typeOf(zcu).zigTypeTag(zcu)) {
|
||||
.Fn => if (dg.declIsGlobal(decl.val)) {
|
||||
try writer.writeAll("zig_extern ");
|
||||
try dg.renderFunctionSignature(writer, dg.pass.decl, .complete, .{ .export_index = 0 });
|
||||
try dg.fwd_decl.appendSlice(";\n");
|
||||
const main_name = zcu.all_exports.items[export_indices[0]].opts.name;
|
||||
try fwd.writeAll("#define ");
|
||||
switch (exported) {
|
||||
.decl_index => |decl_index| try dg.renderDeclName(fwd, decl_index),
|
||||
.value => |value| try DeclGen.renderAnonDeclName(fwd, Value.fromInterned(value)),
|
||||
}
|
||||
try fwd.writeByte(' ');
|
||||
try fwd.print("{ }", .{fmtIdent(main_name.toSlice(ip))});
|
||||
try fwd.writeByte('\n');
|
||||
|
||||
const is_const = switch (ip.indexToKey(exported.getValue(zcu).toIntern())) {
|
||||
.func, .extern_func => return for (export_indices) |export_index| {
|
||||
const @"export" = &zcu.all_exports.items[export_index];
|
||||
try fwd.writeAll("zig_extern ");
|
||||
if (@"export".opts.linkage == .weak) try fwd.writeAll("zig_weak_linkage_fn ");
|
||||
try dg.renderFunctionSignature(
|
||||
fwd,
|
||||
exported.getValue(zcu),
|
||||
exported.getAlign(zcu),
|
||||
.forward,
|
||||
.{ .@"export" = .{
|
||||
.main_name = main_name,
|
||||
.extern_name = @"export".opts.name,
|
||||
} },
|
||||
);
|
||||
try fwd.writeAll(";\n");
|
||||
},
|
||||
else => {},
|
||||
.variable => |variable| variable.is_const,
|
||||
else => true,
|
||||
};
|
||||
for (export_indices) |export_index| {
|
||||
const @"export" = &zcu.all_exports.items[export_index];
|
||||
try fwd.writeAll("zig_extern ");
|
||||
if (@"export".opts.linkage == .weak) try fwd.writeAll("zig_weak_linkage ");
|
||||
const extern_name = @"export".opts.name.toSlice(ip);
|
||||
const is_mangled = isMangledIdent(extern_name, true);
|
||||
const is_export = @"export".opts.name != main_name;
|
||||
try dg.renderTypeAndName(
|
||||
fwd,
|
||||
exported.getValue(zcu).typeOf(zcu),
|
||||
.{ .identifier = extern_name },
|
||||
CQualifiers.init(.{ .@"const" = is_const }),
|
||||
exported.getAlign(zcu),
|
||||
.complete,
|
||||
);
|
||||
if (is_mangled and is_export) {
|
||||
try fwd.print(" zig_mangled_export({ }, {s}, {s})", .{
|
||||
fmtIdent(extern_name),
|
||||
fmtStringLiteral(extern_name, null),
|
||||
fmtStringLiteral(main_name.toSlice(ip), null),
|
||||
});
|
||||
} else if (is_mangled) {
|
||||
try fwd.print(" zig_mangled({ }, {s})", .{
|
||||
fmtIdent(extern_name), fmtStringLiteral(extern_name, null),
|
||||
});
|
||||
} else if (is_export) {
|
||||
try fwd.print(" zig_export({s}, {s})", .{
|
||||
fmtStringLiteral(main_name.toSlice(ip), null),
|
||||
fmtStringLiteral(extern_name, null),
|
||||
});
|
||||
}
|
||||
try fwd.writeAll(";\n");
|
||||
}
|
||||
}
|
||||
|
||||
@ -4552,7 +4468,7 @@ fn airCall(
|
||||
};
|
||||
};
|
||||
switch (modifier) {
|
||||
.auto, .always_tail => try f.object.dg.renderDeclName(writer, fn_decl, 0),
|
||||
.auto, .always_tail => try f.object.dg.renderDeclName(writer, fn_decl),
|
||||
inline .never_tail, .never_inline => |m| try writer.writeAll(try f.getLazyFnName(
|
||||
@unionInit(LazyFnKey, @tagName(m), fn_decl),
|
||||
@unionInit(LazyFnValue.Data, @tagName(m), {}),
|
||||
|
||||
@ -2583,6 +2583,6 @@ const assert = std.debug.assert;
|
||||
const CType = @This();
|
||||
const Module = @import("../../Package/Module.zig");
|
||||
const std = @import("std");
|
||||
const Type = @import("../../type.zig").Type;
|
||||
const Type = @import("../../Type.zig");
|
||||
const Zcu = @import("../../Zcu.zig");
|
||||
const DeclIndex = @import("../../InternPool.zig").DeclIndex;
|
||||
|
||||
@ -22,7 +22,7 @@ const Package = @import("../Package.zig");
|
||||
const Air = @import("../Air.zig");
|
||||
const Liveness = @import("../Liveness.zig");
|
||||
const Value = @import("../Value.zig");
|
||||
const Type = @import("../type.zig").Type;
|
||||
const Type = @import("../Type.zig");
|
||||
const x86_64_abi = @import("../arch/x86_64/abi.zig");
|
||||
const wasm_c_abi = @import("../arch/wasm/abi.zig");
|
||||
const aarch64_c_abi = @import("../arch/aarch64/abi.zig");
|
||||
@ -848,10 +848,6 @@ pub const Object = struct {
|
||||
/// Note that the values are not added until `emit`, when all errors in
|
||||
/// the compilation are known.
|
||||
error_name_table: Builder.Variable.Index,
|
||||
/// This map is usually very close to empty. It tracks only the cases when a
|
||||
/// second extern Decl could not be emitted with the correct name due to a
|
||||
/// name collision.
|
||||
extern_collisions: std.AutoArrayHashMapUnmanaged(InternPool.DeclIndex, void),
|
||||
|
||||
/// Memoizes a null `?usize` value.
|
||||
null_opt_usize: Builder.Constant,
|
||||
@ -1011,7 +1007,6 @@ pub const Object = struct {
|
||||
.named_enum_map = .{},
|
||||
.type_map = .{},
|
||||
.error_name_table = .none,
|
||||
.extern_collisions = .{},
|
||||
.null_opt_usize = .no_init,
|
||||
.struct_field_map = .{},
|
||||
};
|
||||
@ -1029,7 +1024,6 @@ pub const Object = struct {
|
||||
self.anon_decl_map.deinit(gpa);
|
||||
self.named_enum_map.deinit(gpa);
|
||||
self.type_map.deinit(gpa);
|
||||
self.extern_collisions.deinit(gpa);
|
||||
self.builder.deinit();
|
||||
self.struct_field_map.deinit(gpa);
|
||||
self.* = undefined;
|
||||
@ -1121,61 +1115,6 @@ pub const Object = struct {
|
||||
try object.builder.finishModuleAsm();
|
||||
}
|
||||
|
||||
fn resolveExportExternCollisions(object: *Object) !void {
|
||||
const mod = object.module;
|
||||
|
||||
// This map has externs with incorrect symbol names.
|
||||
for (object.extern_collisions.keys()) |decl_index| {
|
||||
const global = object.decl_map.get(decl_index) orelse continue;
|
||||
// Same logic as below but for externs instead of exports.
|
||||
const decl_name = object.builder.strtabStringIfExists(mod.declPtr(decl_index).name.toSlice(&mod.intern_pool)) orelse continue;
|
||||
const other_global = object.builder.getGlobal(decl_name) orelse continue;
|
||||
if (other_global.toConst().getBase(&object.builder) ==
|
||||
global.toConst().getBase(&object.builder)) continue;
|
||||
|
||||
try global.replace(other_global, &object.builder);
|
||||
}
|
||||
object.extern_collisions.clearRetainingCapacity();
|
||||
|
||||
for (mod.decl_exports.keys(), mod.decl_exports.values()) |decl_index, export_list| {
|
||||
const global = object.decl_map.get(decl_index) orelse continue;
|
||||
try resolveGlobalCollisions(object, global, export_list.items);
|
||||
}
|
||||
|
||||
for (mod.value_exports.keys(), mod.value_exports.values()) |val, export_list| {
|
||||
const global = object.anon_decl_map.get(val) orelse continue;
|
||||
try resolveGlobalCollisions(object, global, export_list.items);
|
||||
}
|
||||
}
|
||||
|
||||
fn resolveGlobalCollisions(
|
||||
object: *Object,
|
||||
global: Builder.Global.Index,
|
||||
export_list: []const *Module.Export,
|
||||
) !void {
|
||||
const mod = object.module;
|
||||
const global_base = global.toConst().getBase(&object.builder);
|
||||
for (export_list) |exp| {
|
||||
// Detect if the LLVM global has already been created as an extern. In such
|
||||
// case, we need to replace all uses of it with this exported global.
|
||||
const exp_name = object.builder.strtabStringIfExists(exp.opts.name.toSlice(&mod.intern_pool)) orelse continue;
|
||||
|
||||
const other_global = object.builder.getGlobal(exp_name) orelse continue;
|
||||
if (other_global.toConst().getBase(&object.builder) == global_base) continue;
|
||||
|
||||
try global.takeName(other_global, &object.builder);
|
||||
try other_global.replace(global, &object.builder);
|
||||
// Problem: now we need to replace in the decl_map that
|
||||
// the extern decl index points to this new global. However we don't
|
||||
// know the decl index.
|
||||
// Even if we did, a future incremental update to the extern would then
|
||||
// treat the LLVM global as an extern rather than an export, so it would
|
||||
// need a way to check that.
|
||||
// This is a TODO that needs to be solved when making
|
||||
// the LLVM backend support incremental compilation.
|
||||
}
|
||||
}
|
||||
|
||||
pub const EmitOptions = struct {
|
||||
pre_ir_path: ?[]const u8,
|
||||
pre_bc_path: ?[]const u8,
|
||||
@ -1193,7 +1132,6 @@ pub const Object = struct {
|
||||
|
||||
pub fn emit(self: *Object, options: EmitOptions) !void {
|
||||
{
|
||||
try self.resolveExportExternCollisions();
|
||||
try self.genErrorNameTable();
|
||||
try self.genCmpLtErrorsLenFunction();
|
||||
try self.genModuleLevelAssembly();
|
||||
@ -1698,8 +1636,7 @@ pub const Object = struct {
|
||||
const file = try o.getDebugFile(namespace.file_scope);
|
||||
|
||||
const line_number = decl.navSrcLine(zcu) + 1;
|
||||
const is_internal_linkage = decl.val.getExternFunc(zcu) == null and
|
||||
!zcu.decl_exports.contains(decl_index);
|
||||
const is_internal_linkage = decl.val.getExternFunc(zcu) == null;
|
||||
const debug_decl_type = try o.lowerDebugType(decl.typeOf(zcu));
|
||||
|
||||
const subprogram = try o.builder.debugSubprogram(
|
||||
@ -1752,7 +1689,7 @@ pub const Object = struct {
|
||||
fg.genBody(air.getMainBody()) catch |err| switch (err) {
|
||||
error.CodegenFail => {
|
||||
decl.analysis = .codegen_failure;
|
||||
try zcu.failed_decls.put(zcu.gpa, decl_index, dg.err_msg.?);
|
||||
try zcu.failed_analysis.put(zcu.gpa, InternPool.AnalUnit.wrap(.{ .decl = decl_index }), dg.err_msg.?);
|
||||
dg.err_msg = null;
|
||||
return;
|
||||
},
|
||||
@ -1760,8 +1697,6 @@ pub const Object = struct {
|
||||
};
|
||||
|
||||
try fg.wip.finish();
|
||||
|
||||
try o.updateExports(zcu, .{ .decl_index = decl_index }, zcu.getDeclExports(decl_index));
|
||||
}
|
||||
|
||||
pub fn updateDecl(self: *Object, module: *Module, decl_index: InternPool.DeclIndex) !void {
|
||||
@ -1775,72 +1710,31 @@ pub const Object = struct {
|
||||
dg.genDecl() catch |err| switch (err) {
|
||||
error.CodegenFail => {
|
||||
decl.analysis = .codegen_failure;
|
||||
try module.failed_decls.put(module.gpa, decl_index, dg.err_msg.?);
|
||||
try module.failed_analysis.put(module.gpa, InternPool.AnalUnit.wrap(.{ .decl = decl_index }), dg.err_msg.?);
|
||||
dg.err_msg = null;
|
||||
return;
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
try self.updateExports(module, .{ .decl_index = decl_index }, module.getDeclExports(decl_index));
|
||||
}
|
||||
|
||||
pub fn updateExports(
|
||||
self: *Object,
|
||||
mod: *Module,
|
||||
exported: Module.Exported,
|
||||
exports: []const *Module.Export,
|
||||
export_indices: []const u32,
|
||||
) link.File.UpdateExportsError!void {
|
||||
const decl_index = switch (exported) {
|
||||
.decl_index => |i| i,
|
||||
.value => |val| return updateExportedValue(self, mod, val, exports),
|
||||
.value => |val| return updateExportedValue(self, mod, val, export_indices),
|
||||
};
|
||||
const gpa = mod.gpa;
|
||||
const ip = &mod.intern_pool;
|
||||
// If the module does not already have the function, we ignore this function call
|
||||
// because we call `updateExports` at the end of `updateFunc` and `updateDecl`.
|
||||
const global_index = self.decl_map.get(decl_index) orelse return;
|
||||
const global_index = self.decl_map.get(decl_index).?;
|
||||
const decl = mod.declPtr(decl_index);
|
||||
const comp = mod.comp;
|
||||
if (decl.isExtern(mod)) {
|
||||
const decl_name = decl_name: {
|
||||
if (mod.getTarget().isWasm() and decl.val.typeOf(mod).zigTypeTag(mod) == .Fn) {
|
||||
if (decl.getOwnedExternFunc(mod).?.lib_name.toSlice(ip)) |lib_name| {
|
||||
if (!std.mem.eql(u8, lib_name, "c")) {
|
||||
break :decl_name try self.builder.strtabStringFmt("{}|{s}", .{ decl.name.fmt(ip), lib_name });
|
||||
}
|
||||
}
|
||||
}
|
||||
break :decl_name try self.builder.strtabString(decl.name.toSlice(ip));
|
||||
};
|
||||
|
||||
if (self.builder.getGlobal(decl_name)) |other_global| {
|
||||
if (other_global != global_index) {
|
||||
try self.extern_collisions.put(gpa, decl_index, {});
|
||||
}
|
||||
}
|
||||
|
||||
try global_index.rename(decl_name, &self.builder);
|
||||
global_index.setLinkage(.external, &self.builder);
|
||||
global_index.setUnnamedAddr(.default, &self.builder);
|
||||
if (comp.config.dll_export_fns)
|
||||
global_index.setDllStorageClass(.default, &self.builder);
|
||||
|
||||
if (decl.val.getVariable(mod)) |decl_var| {
|
||||
global_index.ptrConst(&self.builder).kind.variable.setThreadLocal(
|
||||
if (decl_var.is_threadlocal) .generaldynamic else .default,
|
||||
&self.builder,
|
||||
);
|
||||
if (decl_var.is_weak_linkage) global_index.setLinkage(.extern_weak, &self.builder);
|
||||
}
|
||||
} else if (exports.len != 0) {
|
||||
const main_exp_name = try self.builder.strtabString(exports[0].opts.name.toSlice(ip));
|
||||
try global_index.rename(main_exp_name, &self.builder);
|
||||
|
||||
if (decl.val.getVariable(mod)) |decl_var| if (decl_var.is_threadlocal)
|
||||
global_index.ptrConst(&self.builder).kind
|
||||
.variable.setThreadLocal(.generaldynamic, &self.builder);
|
||||
|
||||
return updateExportedGlobal(self, mod, global_index, exports);
|
||||
if (export_indices.len != 0) {
|
||||
return updateExportedGlobal(self, mod, global_index, export_indices);
|
||||
} else {
|
||||
const fqn = try self.builder.strtabString((try decl.fullyQualifiedName(mod)).toSlice(ip));
|
||||
try global_index.rename(fqn, &self.builder);
|
||||
@ -1848,17 +1742,6 @@ pub const Object = struct {
|
||||
if (comp.config.dll_export_fns)
|
||||
global_index.setDllStorageClass(.default, &self.builder);
|
||||
global_index.setUnnamedAddr(.unnamed_addr, &self.builder);
|
||||
if (decl.val.getVariable(mod)) |decl_var| {
|
||||
const decl_namespace = mod.namespacePtr(decl.src_namespace);
|
||||
const single_threaded = decl_namespace.file_scope.mod.single_threaded;
|
||||
global_index.ptrConst(&self.builder).kind.variable.setThreadLocal(
|
||||
if (decl_var.is_threadlocal and !single_threaded)
|
||||
.generaldynamic
|
||||
else
|
||||
.default,
|
||||
&self.builder,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1866,11 +1749,11 @@ pub const Object = struct {
|
||||
o: *Object,
|
||||
mod: *Module,
|
||||
exported_value: InternPool.Index,
|
||||
exports: []const *Module.Export,
|
||||
export_indices: []const u32,
|
||||
) link.File.UpdateExportsError!void {
|
||||
const gpa = mod.gpa;
|
||||
const ip = &mod.intern_pool;
|
||||
const main_exp_name = try o.builder.strtabString(exports[0].opts.name.toSlice(ip));
|
||||
const main_exp_name = try o.builder.strtabString(mod.all_exports.items[export_indices[0]].opts.name.toSlice(ip));
|
||||
const global_index = i: {
|
||||
const gop = try o.anon_decl_map.getOrPut(gpa, exported_value);
|
||||
if (gop.found_existing) {
|
||||
@ -1894,32 +1777,57 @@ pub const Object = struct {
|
||||
try variable_index.setInitializer(init_val, &o.builder);
|
||||
break :i global_index;
|
||||
};
|
||||
return updateExportedGlobal(o, mod, global_index, exports);
|
||||
return updateExportedGlobal(o, mod, global_index, export_indices);
|
||||
}
|
||||
|
||||
fn updateExportedGlobal(
|
||||
o: *Object,
|
||||
mod: *Module,
|
||||
global_index: Builder.Global.Index,
|
||||
exports: []const *Module.Export,
|
||||
export_indices: []const u32,
|
||||
) link.File.UpdateExportsError!void {
|
||||
const comp = mod.comp;
|
||||
const ip = &mod.intern_pool;
|
||||
const first_export = mod.all_exports.items[export_indices[0]];
|
||||
|
||||
// We will rename this global to have a name matching `first_export`.
|
||||
// Successive exports become aliases.
|
||||
// If the first export name already exists, then there is a corresponding
|
||||
// extern global - we replace it with this global.
|
||||
const first_exp_name = try o.builder.strtabString(first_export.opts.name.toSlice(ip));
|
||||
if (o.builder.getGlobal(first_exp_name)) |other_global| replace: {
|
||||
if (other_global.toConst().getBase(&o.builder) == global_index.toConst().getBase(&o.builder)) {
|
||||
break :replace; // this global already has the name we want
|
||||
}
|
||||
try global_index.takeName(other_global, &o.builder);
|
||||
try other_global.replace(global_index, &o.builder);
|
||||
// Problem: now we need to replace in the decl_map that
|
||||
// the extern decl index points to this new global. However we don't
|
||||
// know the decl index.
|
||||
// Even if we did, a future incremental update to the extern would then
|
||||
// treat the LLVM global as an extern rather than an export, so it would
|
||||
// need a way to check that.
|
||||
// This is a TODO that needs to be solved when making
|
||||
// the LLVM backend support incremental compilation.
|
||||
} else {
|
||||
try global_index.rename(first_exp_name, &o.builder);
|
||||
}
|
||||
|
||||
global_index.setUnnamedAddr(.default, &o.builder);
|
||||
if (comp.config.dll_export_fns)
|
||||
global_index.setDllStorageClass(.dllexport, &o.builder);
|
||||
global_index.setLinkage(switch (exports[0].opts.linkage) {
|
||||
global_index.setLinkage(switch (first_export.opts.linkage) {
|
||||
.internal => unreachable,
|
||||
.strong => .external,
|
||||
.weak => .weak_odr,
|
||||
.link_once => .linkonce_odr,
|
||||
}, &o.builder);
|
||||
global_index.setVisibility(switch (exports[0].opts.visibility) {
|
||||
global_index.setVisibility(switch (first_export.opts.visibility) {
|
||||
.default => .default,
|
||||
.hidden => .hidden,
|
||||
.protected => .protected,
|
||||
}, &o.builder);
|
||||
if (exports[0].opts.section.toSlice(ip)) |section|
|
||||
if (first_export.opts.section.toSlice(ip)) |section|
|
||||
switch (global_index.ptrConst(&o.builder).kind) {
|
||||
.variable => |impl_index| impl_index.setSection(
|
||||
try o.builder.string(section),
|
||||
@ -1936,7 +1844,8 @@ pub const Object = struct {
|
||||
// The planned solution to this is https://github.com/ziglang/zig/issues/13265
|
||||
// Until then we iterate over existing aliases and make them point
|
||||
// to the correct decl, or otherwise add a new alias. Old aliases are leaked.
|
||||
for (exports[1..]) |exp| {
|
||||
for (export_indices[1..]) |export_idx| {
|
||||
const exp = mod.all_exports.items[export_idx];
|
||||
const exp_name = try o.builder.strtabString(exp.opts.name.toSlice(ip));
|
||||
if (o.builder.getGlobal(exp_name)) |global| {
|
||||
switch (global.ptrConst(&o.builder).kind) {
|
||||
@ -1944,7 +1853,13 @@ pub const Object = struct {
|
||||
alias.setAliasee(global_index.toConst(), &o.builder);
|
||||
continue;
|
||||
},
|
||||
.variable, .function => {},
|
||||
.variable, .function => {
|
||||
// This existing global is an `extern` corresponding to this export.
|
||||
// Replace it with the global being exported.
|
||||
// This existing global must be replaced with the alias.
|
||||
try global.rename(.empty, &o.builder);
|
||||
try global.replace(global_index, &o.builder);
|
||||
},
|
||||
.replaced => unreachable,
|
||||
}
|
||||
}
|
||||
@ -2688,7 +2603,10 @@ pub const Object = struct {
|
||||
if (!Type.fromInterned(field_ty).hasRuntimeBitsIgnoreComptime(mod)) continue;
|
||||
|
||||
const field_size = Type.fromInterned(field_ty).abiSize(mod);
|
||||
const field_align = mod.unionFieldNormalAlignment(union_type, @intCast(field_index));
|
||||
const field_align: InternPool.Alignment = switch (union_type.flagsPtr(ip).layout) {
|
||||
.@"packed" => .none,
|
||||
.auto, .@"extern" => mod.unionFieldNormalAlignment(union_type, @intCast(field_index)),
|
||||
};
|
||||
|
||||
const field_name = tag_type.names.get(ip)[field_index];
|
||||
fields.appendAssumeCapacity(try o.builder.debugMemberType(
|
||||
@ -4729,7 +4647,7 @@ pub const DeclGen = struct {
|
||||
const o = dg.object;
|
||||
const gpa = o.gpa;
|
||||
const mod = o.module;
|
||||
const src_loc = dg.decl.navSrcLoc(mod).upgrade(mod);
|
||||
const src_loc = dg.decl.navSrcLoc(mod);
|
||||
dg.err_msg = try Module.ErrorMsg.create(gpa, src_loc, "TODO (LLVM): " ++ format, args);
|
||||
return error.CodegenFail;
|
||||
}
|
||||
@ -4762,36 +4680,77 @@ pub const DeclGen = struct {
|
||||
else => try o.lowerValue(init_val),
|
||||
}, &o.builder);
|
||||
|
||||
if (decl.val.getVariable(zcu)) |decl_var| {
|
||||
const decl_namespace = zcu.namespacePtr(decl.src_namespace);
|
||||
const single_threaded = decl_namespace.file_scope.mod.single_threaded;
|
||||
variable_index.setThreadLocal(
|
||||
if (decl_var.is_threadlocal and !single_threaded) .generaldynamic else .default,
|
||||
&o.builder,
|
||||
);
|
||||
}
|
||||
|
||||
const line_number = decl.navSrcLine(zcu) + 1;
|
||||
const is_internal_linkage = !o.module.decl_exports.contains(decl_index);
|
||||
|
||||
const namespace = zcu.namespacePtr(decl.src_namespace);
|
||||
const owner_mod = namespace.file_scope.mod;
|
||||
|
||||
if (owner_mod.strip) return;
|
||||
if (!owner_mod.strip) {
|
||||
const debug_file = try o.getDebugFile(namespace.file_scope);
|
||||
|
||||
const debug_file = try o.getDebugFile(namespace.file_scope);
|
||||
const debug_global_var = try o.builder.debugGlobalVar(
|
||||
try o.builder.metadataString(decl.name.toSlice(ip)), // Name
|
||||
try o.builder.metadataStringFromStrtabString(variable_index.name(&o.builder)), // Linkage name
|
||||
debug_file, // File
|
||||
debug_file, // Scope
|
||||
line_number,
|
||||
try o.lowerDebugType(decl.typeOf(zcu)),
|
||||
variable_index,
|
||||
.{ .local = !decl.isExtern(zcu) },
|
||||
);
|
||||
|
||||
const debug_global_var = try o.builder.debugGlobalVar(
|
||||
try o.builder.metadataString(decl.name.toSlice(ip)), // Name
|
||||
try o.builder.metadataStringFromStrtabString(variable_index.name(&o.builder)), // Linkage name
|
||||
debug_file, // File
|
||||
debug_file, // Scope
|
||||
line_number,
|
||||
try o.lowerDebugType(decl.typeOf(zcu)),
|
||||
variable_index,
|
||||
.{ .local = is_internal_linkage },
|
||||
);
|
||||
const debug_expression = try o.builder.debugExpression(&.{});
|
||||
|
||||
const debug_expression = try o.builder.debugExpression(&.{});
|
||||
const debug_global_var_expression = try o.builder.debugGlobalVarExpression(
|
||||
debug_global_var,
|
||||
debug_expression,
|
||||
);
|
||||
|
||||
const debug_global_var_expression = try o.builder.debugGlobalVarExpression(
|
||||
debug_global_var,
|
||||
debug_expression,
|
||||
);
|
||||
variable_index.setGlobalVariableExpression(debug_global_var_expression, &o.builder);
|
||||
try o.debug_globals.append(o.gpa, debug_global_var_expression);
|
||||
}
|
||||
}
|
||||
|
||||
variable_index.setGlobalVariableExpression(debug_global_var_expression, &o.builder);
|
||||
try o.debug_globals.append(o.gpa, debug_global_var_expression);
|
||||
if (decl.isExtern(zcu)) {
|
||||
const global_index = o.decl_map.get(decl_index).?;
|
||||
|
||||
const decl_name = decl_name: {
|
||||
if (zcu.getTarget().isWasm() and decl.typeOf(zcu).zigTypeTag(zcu) == .Fn) {
|
||||
if (decl.getOwnedExternFunc(zcu).?.lib_name.toSlice(ip)) |lib_name| {
|
||||
if (!std.mem.eql(u8, lib_name, "c")) {
|
||||
break :decl_name try o.builder.strtabStringFmt("{}|{s}", .{ decl.name.fmt(ip), lib_name });
|
||||
}
|
||||
}
|
||||
}
|
||||
break :decl_name try o.builder.strtabString(decl.name.toSlice(ip));
|
||||
};
|
||||
|
||||
if (o.builder.getGlobal(decl_name)) |other_global| {
|
||||
if (other_global != global_index) {
|
||||
// Another global already has this name; just use it in place of this global.
|
||||
try global_index.replace(other_global, &o.builder);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
try global_index.rename(decl_name, &o.builder);
|
||||
global_index.setLinkage(.external, &o.builder);
|
||||
global_index.setUnnamedAddr(.default, &o.builder);
|
||||
if (zcu.comp.config.dll_export_fns)
|
||||
global_index.setDllStorageClass(.default, &o.builder);
|
||||
|
||||
if (decl.val.getVariable(zcu)) |decl_var| {
|
||||
if (decl_var.is_weak_linkage) global_index.setLinkage(.extern_weak, &o.builder);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -5193,7 +5152,6 @@ pub const FuncGen = struct {
|
||||
|
||||
const fqn = try decl.fullyQualifiedName(zcu);
|
||||
|
||||
const is_internal_linkage = !zcu.decl_exports.contains(decl_index);
|
||||
const fn_ty = try zcu.funcType(.{
|
||||
.param_types = &.{},
|
||||
.return_type = .void_type,
|
||||
@ -5211,7 +5169,7 @@ pub const FuncGen = struct {
|
||||
.sp_flags = .{
|
||||
.Optimized = owner_mod.optimize_mode != .Debug,
|
||||
.Definition = true,
|
||||
.LocalToUnit = is_internal_linkage,
|
||||
.LocalToUnit = true, // TODO: we can't know this at this point, since the function could be exported later!
|
||||
},
|
||||
},
|
||||
o.debug_compile_unit,
|
||||
|
||||
@ -9,7 +9,7 @@ const Zcu = @import("../Zcu.zig");
|
||||
/// Deprecated.
|
||||
const Module = Zcu;
|
||||
const Decl = Module.Decl;
|
||||
const Type = @import("../type.zig").Type;
|
||||
const Type = @import("../Type.zig");
|
||||
const Value = @import("../Value.zig");
|
||||
const Air = @import("../Air.zig");
|
||||
const Liveness = @import("../Liveness.zig");
|
||||
@ -218,7 +218,7 @@ pub const Object = struct {
|
||||
|
||||
decl_gen.genDecl() catch |err| switch (err) {
|
||||
error.CodegenFail => {
|
||||
try mod.failed_decls.put(mod.gpa, decl_index, decl_gen.error_msg.?);
|
||||
try mod.failed_analysis.put(mod.gpa, InternPool.AnalUnit.wrap(.{ .decl = decl_index }), decl_gen.error_msg.?);
|
||||
},
|
||||
else => |other| {
|
||||
// There might be an error that happened *after* self.error_msg
|
||||
@ -415,7 +415,7 @@ const DeclGen = struct {
|
||||
pub fn fail(self: *DeclGen, comptime format: []const u8, args: anytype) Error {
|
||||
@setCold(true);
|
||||
const mod = self.module;
|
||||
const src_loc = self.module.declPtr(self.decl_index).navSrcLoc(mod).upgrade(mod);
|
||||
const src_loc = self.module.declPtr(self.decl_index).navSrcLoc(mod);
|
||||
assert(self.error_msg == null);
|
||||
self.error_msg = try Module.ErrorMsg.create(self.module.gpa, src_loc, format, args);
|
||||
return error.CodegenFail;
|
||||
@ -6439,7 +6439,7 @@ const DeclGen = struct {
|
||||
// TODO: Translate proper error locations.
|
||||
assert(as.errors.items.len != 0);
|
||||
assert(self.error_msg == null);
|
||||
const src_loc = self.module.declPtr(self.decl_index).navSrcLoc(mod).upgrade(mod);
|
||||
const src_loc = self.module.declPtr(self.decl_index).navSrcLoc(mod);
|
||||
self.error_msg = try Module.ErrorMsg.create(self.module.gpa, src_loc, "failed to assemble SPIR-V inline assembly", .{});
|
||||
const notes = try self.module.gpa.alloc(Module.ErrorMsg, as.errors.items.len);
|
||||
|
||||
|
||||
17
src/link.zig
17
src/link.zig
@ -18,7 +18,7 @@ const Zcu = @import("Zcu.zig");
|
||||
/// Deprecated.
|
||||
const Module = Zcu;
|
||||
const InternPool = @import("InternPool.zig");
|
||||
const Type = @import("type.zig").Type;
|
||||
const Type = @import("Type.zig");
|
||||
const Value = @import("Value.zig");
|
||||
const LlvmObject = @import("codegen/llvm.zig").Object;
|
||||
const lldMain = @import("main.zig").lldMain;
|
||||
@ -606,12 +606,12 @@ pub const File = struct {
|
||||
base: *File,
|
||||
module: *Module,
|
||||
exported: Module.Exported,
|
||||
exports: []const *Module.Export,
|
||||
export_indices: []const u32,
|
||||
) UpdateExportsError!void {
|
||||
switch (base.tag) {
|
||||
inline else => |tag| {
|
||||
if (tag != .c and build_options.only_c) unreachable;
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).updateExports(module, exported, exports);
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).updateExports(module, exported, export_indices);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -646,7 +646,7 @@ pub const File = struct {
|
||||
base: *File,
|
||||
decl_val: InternPool.Index,
|
||||
decl_align: InternPool.Alignment,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
) !LowerResult {
|
||||
if (build_options.only_c) @compileError("unreachable");
|
||||
switch (base.tag) {
|
||||
@ -671,21 +671,20 @@ pub const File = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deleteDeclExport(
|
||||
pub fn deleteExport(
|
||||
base: *File,
|
||||
decl_index: InternPool.DeclIndex,
|
||||
exported: Zcu.Exported,
|
||||
name: InternPool.NullTerminatedString,
|
||||
) !void {
|
||||
) void {
|
||||
if (build_options.only_c) @compileError("unreachable");
|
||||
switch (base.tag) {
|
||||
.plan9,
|
||||
.c,
|
||||
.spirv,
|
||||
.nvptx,
|
||||
=> {},
|
||||
|
||||
inline else => |tag| {
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).deleteDeclExport(decl_index, name);
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).deleteExport(exported, name);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
172
src/link/C.zig
172
src/link/C.zig
@ -14,7 +14,7 @@ const Compilation = @import("../Compilation.zig");
|
||||
const codegen = @import("../codegen/c.zig");
|
||||
const link = @import("../link.zig");
|
||||
const trace = @import("../tracy.zig").trace;
|
||||
const Type = @import("../type.zig").Type;
|
||||
const Type = @import("../Type.zig");
|
||||
const Value = @import("../Value.zig");
|
||||
const Air = @import("../Air.zig");
|
||||
const Liveness = @import("../Liveness.zig");
|
||||
@ -39,6 +39,9 @@ anon_decls: std.AutoArrayHashMapUnmanaged(InternPool.Index, DeclBlock) = .{},
|
||||
/// the keys of `anon_decls`.
|
||||
aligned_anon_decls: std.AutoArrayHashMapUnmanaged(InternPool.Index, Alignment) = .{},
|
||||
|
||||
exported_decls: std.AutoArrayHashMapUnmanaged(InternPool.DeclIndex, ExportedBlock) = .{},
|
||||
exported_values: std.AutoArrayHashMapUnmanaged(InternPool.Index, ExportedBlock) = .{},
|
||||
|
||||
/// Optimization, `updateDecl` reuses this buffer rather than creating a new
|
||||
/// one with every call.
|
||||
fwd_decl_buf: std.ArrayListUnmanaged(u8) = .{},
|
||||
@ -80,6 +83,11 @@ pub const DeclBlock = struct {
|
||||
}
|
||||
};
|
||||
|
||||
/// Per-exported-symbol data.
|
||||
pub const ExportedBlock = struct {
|
||||
fwd_decl: String = String.empty,
|
||||
};
|
||||
|
||||
pub fn getString(this: C, s: String) []const u8 {
|
||||
return this.string_bytes.items[s.start..][0..s.len];
|
||||
}
|
||||
@ -238,9 +246,13 @@ pub fn updateFunc(
|
||||
function.deinit();
|
||||
}
|
||||
|
||||
try zcu.failed_analysis.ensureUnusedCapacity(gpa, 1);
|
||||
codegen.genFunc(&function) catch |err| switch (err) {
|
||||
error.AnalysisFail => {
|
||||
try zcu.failed_decls.put(gpa, decl_index, function.object.dg.error_msg.?);
|
||||
zcu.failed_analysis.putAssumeCapacityNoClobber(
|
||||
InternPool.AnalUnit.wrap(.{ .decl = decl_index }),
|
||||
function.object.dg.error_msg.?,
|
||||
);
|
||||
return;
|
||||
},
|
||||
else => |e| return e,
|
||||
@ -288,7 +300,7 @@ fn updateAnonDecl(self: *C, zcu: *Zcu, i: usize) !void {
|
||||
|
||||
const c_value: codegen.CValue = .{ .constant = Value.fromInterned(anon_decl) };
|
||||
const alignment: Alignment = self.aligned_anon_decls.get(anon_decl) orelse .none;
|
||||
codegen.genDeclValue(&object, c_value.constant, false, c_value, alignment, .none) catch |err| switch (err) {
|
||||
codegen.genDeclValue(&object, c_value.constant, c_value, alignment, .none) catch |err| switch (err) {
|
||||
error.AnalysisFail => {
|
||||
@panic("TODO: C backend AnalysisFail on anonymous decl");
|
||||
//try zcu.failed_decls.put(gpa, decl_index, object.dg.error_msg.?);
|
||||
@ -351,9 +363,13 @@ pub fn updateDecl(self: *C, zcu: *Zcu, decl_index: InternPool.DeclIndex) !void {
|
||||
code.* = object.code.moveToUnmanaged();
|
||||
}
|
||||
|
||||
try zcu.failed_analysis.ensureUnusedCapacity(gpa, 1);
|
||||
codegen.genDecl(&object) catch |err| switch (err) {
|
||||
error.AnalysisFail => {
|
||||
try zcu.failed_decls.put(gpa, decl_index, object.dg.error_msg.?);
|
||||
zcu.failed_analysis.putAssumeCapacityNoClobber(
|
||||
InternPool.AnalUnit.wrap(.{ .decl = decl_index }),
|
||||
object.dg.error_msg.?,
|
||||
);
|
||||
return;
|
||||
},
|
||||
else => |e| return e,
|
||||
@ -451,20 +467,40 @@ pub fn flushModule(self: *C, arena: Allocator, prog_node: std.Progress.Node) !vo
|
||||
{
|
||||
var export_names: std.AutoHashMapUnmanaged(InternPool.NullTerminatedString, void) = .{};
|
||||
defer export_names.deinit(gpa);
|
||||
try export_names.ensureTotalCapacity(gpa, @intCast(zcu.decl_exports.entries.len));
|
||||
for (zcu.decl_exports.values()) |exports| for (exports.items) |@"export"|
|
||||
try export_names.put(gpa, @"export".opts.name, {});
|
||||
|
||||
for (self.anon_decls.values()) |*decl_block| {
|
||||
try self.flushDeclBlock(zcu, zcu.root_mod, &f, decl_block, export_names, .none);
|
||||
try export_names.ensureTotalCapacity(gpa, @intCast(zcu.single_exports.count()));
|
||||
for (zcu.single_exports.values()) |export_index| {
|
||||
export_names.putAssumeCapacity(zcu.all_exports.items[export_index].opts.name, {});
|
||||
}
|
||||
for (zcu.multi_exports.values()) |info| {
|
||||
try export_names.ensureUnusedCapacity(gpa, info.len);
|
||||
for (zcu.all_exports.items[info.index..][0..info.len]) |@"export"| {
|
||||
export_names.putAssumeCapacity(@"export".opts.name, {});
|
||||
}
|
||||
}
|
||||
|
||||
for (self.anon_decls.keys(), self.anon_decls.values()) |value, *decl_block| try self.flushDeclBlock(
|
||||
zcu,
|
||||
zcu.root_mod,
|
||||
&f,
|
||||
decl_block,
|
||||
self.exported_values.getPtr(value),
|
||||
export_names,
|
||||
.none,
|
||||
);
|
||||
|
||||
for (self.decl_table.keys(), self.decl_table.values()) |decl_index, *decl_block| {
|
||||
const decl = zcu.declPtr(decl_index);
|
||||
assert(decl.has_tv);
|
||||
const extern_symbol_name = if (decl.isExtern(zcu)) decl.name.toOptional() else .none;
|
||||
const extern_name = if (decl.isExtern(zcu)) decl.name.toOptional() else .none;
|
||||
const mod = zcu.namespacePtr(decl.src_namespace).file_scope.mod;
|
||||
try self.flushDeclBlock(zcu, mod, &f, decl_block, export_names, extern_symbol_name);
|
||||
try self.flushDeclBlock(
|
||||
zcu,
|
||||
mod,
|
||||
&f,
|
||||
decl_block,
|
||||
self.exported_decls.getPtr(decl_index),
|
||||
export_names,
|
||||
extern_name,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -497,12 +533,27 @@ pub fn flushModule(self: *C, arena: Allocator, prog_node: std.Progress.Node) !vo
|
||||
f.file_size += lazy_fwd_decl_len;
|
||||
|
||||
// Now the code.
|
||||
const anon_decl_values = self.anon_decls.values();
|
||||
const decl_values = self.decl_table.values();
|
||||
try f.all_buffers.ensureUnusedCapacity(gpa, 1 + anon_decl_values.len + decl_values.len);
|
||||
try f.all_buffers.ensureUnusedCapacity(gpa, 1 + (self.anon_decls.count() + self.decl_table.count()) * 2);
|
||||
f.appendBufAssumeCapacity(self.lazy_code_buf.items);
|
||||
for (anon_decl_values) |db| f.appendBufAssumeCapacity(self.getString(db.code));
|
||||
for (decl_values) |db| f.appendBufAssumeCapacity(self.getString(db.code));
|
||||
for (self.anon_decls.keys(), self.anon_decls.values()) |anon_decl, decl_block| f.appendCodeAssumeCapacity(
|
||||
if (self.exported_values.contains(anon_decl))
|
||||
.default
|
||||
else switch (zcu.intern_pool.indexToKey(anon_decl)) {
|
||||
.extern_func => .zig_extern,
|
||||
.variable => |variable| if (variable.is_extern) .zig_extern else .static,
|
||||
else => .static,
|
||||
},
|
||||
self.getString(decl_block.code),
|
||||
);
|
||||
for (self.decl_table.keys(), self.decl_table.values()) |decl_index, decl_block| f.appendCodeAssumeCapacity(
|
||||
if (self.exported_decls.contains(decl_index))
|
||||
.default
|
||||
else if (zcu.declPtr(decl_index).isExtern(zcu))
|
||||
.zig_extern
|
||||
else
|
||||
.static,
|
||||
self.getString(decl_block.code),
|
||||
);
|
||||
|
||||
const file = self.base.file.?;
|
||||
try file.setEndPos(f.file_size);
|
||||
@ -532,6 +583,16 @@ const Flush = struct {
|
||||
f.file_size += buf.len;
|
||||
}
|
||||
|
||||
fn appendCodeAssumeCapacity(f: *Flush, storage: enum { default, zig_extern, static }, code: []const u8) void {
|
||||
if (code.len == 0) return;
|
||||
f.appendBufAssumeCapacity(switch (storage) {
|
||||
.default => "\n",
|
||||
.zig_extern => "\nzig_extern ",
|
||||
.static => "\nstatic ",
|
||||
});
|
||||
f.appendBufAssumeCapacity(code);
|
||||
}
|
||||
|
||||
fn deinit(f: *Flush, gpa: Allocator) void {
|
||||
f.all_buffers.deinit(gpa);
|
||||
f.asm_buf.deinit(gpa);
|
||||
@ -719,19 +780,20 @@ fn flushDeclBlock(
|
||||
zcu: *Zcu,
|
||||
mod: *Module,
|
||||
f: *Flush,
|
||||
decl_block: *DeclBlock,
|
||||
decl_block: *const DeclBlock,
|
||||
exported_block: ?*const ExportedBlock,
|
||||
export_names: std.AutoHashMapUnmanaged(InternPool.NullTerminatedString, void),
|
||||
extern_symbol_name: InternPool.OptionalNullTerminatedString,
|
||||
extern_name: InternPool.OptionalNullTerminatedString,
|
||||
) FlushDeclError!void {
|
||||
const gpa = self.base.comp.gpa;
|
||||
try self.flushLazyFns(zcu, mod, f, &decl_block.ctype_pool, decl_block.lazy_fns);
|
||||
try f.all_buffers.ensureUnusedCapacity(gpa, 1);
|
||||
fwd_decl: {
|
||||
if (extern_symbol_name.unwrap()) |name| {
|
||||
if (export_names.contains(name)) break :fwd_decl;
|
||||
}
|
||||
f.appendBufAssumeCapacity(self.getString(decl_block.fwd_decl));
|
||||
}
|
||||
// avoid emitting extern decls that are already exported
|
||||
if (extern_name.unwrap()) |name| if (export_names.contains(name)) return;
|
||||
f.appendBufAssumeCapacity(self.getString(if (exported_block) |exported|
|
||||
exported.fwd_decl
|
||||
else
|
||||
decl_block.fwd_decl));
|
||||
}
|
||||
|
||||
pub fn flushEmitH(zcu: *Zcu) !void {
|
||||
@ -781,10 +843,58 @@ pub fn updateExports(
|
||||
self: *C,
|
||||
zcu: *Zcu,
|
||||
exported: Zcu.Exported,
|
||||
exports: []const *Zcu.Export,
|
||||
export_indices: []const u32,
|
||||
) !void {
|
||||
_ = exports;
|
||||
_ = exported;
|
||||
_ = zcu;
|
||||
_ = self;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const mod, const pass: codegen.DeclGen.Pass, const decl_block, const exported_block = switch (exported) {
|
||||
.decl_index => |decl_index| .{
|
||||
zcu.namespacePtr(zcu.declPtr(decl_index).src_namespace).file_scope.mod,
|
||||
.{ .decl = decl_index },
|
||||
self.decl_table.getPtr(decl_index).?,
|
||||
(try self.exported_decls.getOrPut(gpa, decl_index)).value_ptr,
|
||||
},
|
||||
.value => |value| .{
|
||||
zcu.root_mod,
|
||||
.{ .anon = value },
|
||||
self.anon_decls.getPtr(value).?,
|
||||
(try self.exported_values.getOrPut(gpa, value)).value_ptr,
|
||||
},
|
||||
};
|
||||
const ctype_pool = &decl_block.ctype_pool;
|
||||
const fwd_decl = &self.fwd_decl_buf;
|
||||
fwd_decl.clearRetainingCapacity();
|
||||
var dg: codegen.DeclGen = .{
|
||||
.gpa = gpa,
|
||||
.zcu = zcu,
|
||||
.mod = mod,
|
||||
.error_msg = null,
|
||||
.pass = pass,
|
||||
.is_naked_fn = false,
|
||||
.fwd_decl = fwd_decl.toManaged(gpa),
|
||||
.ctype_pool = decl_block.ctype_pool,
|
||||
.scratch = .{},
|
||||
.anon_decl_deps = .{},
|
||||
.aligned_anon_decls = .{},
|
||||
};
|
||||
defer {
|
||||
assert(dg.anon_decl_deps.count() == 0);
|
||||
assert(dg.aligned_anon_decls.count() == 0);
|
||||
fwd_decl.* = dg.fwd_decl.moveToUnmanaged();
|
||||
ctype_pool.* = dg.ctype_pool.move();
|
||||
ctype_pool.freeUnusedCapacity(gpa);
|
||||
dg.scratch.deinit(gpa);
|
||||
}
|
||||
try codegen.genExports(&dg, exported, export_indices);
|
||||
exported_block.* = .{ .fwd_decl = try self.addString(dg.fwd_decl.items) };
|
||||
}
|
||||
|
||||
pub fn deleteExport(
|
||||
self: *C,
|
||||
exported: Zcu.Exported,
|
||||
_: InternPool.NullTerminatedString,
|
||||
) void {
|
||||
switch (exported) {
|
||||
.decl_index => |decl_index| _ = self.exported_decls.swapRemove(decl_index),
|
||||
.value => |value| _ = self.exported_values.swapRemove(value),
|
||||
}
|
||||
}
|
||||
|
||||
@ -1144,7 +1144,7 @@ pub fn updateFunc(self: *Coff, mod: *Module, func_index: InternPool.Index, air:
|
||||
|
||||
const res = try codegen.generateFunction(
|
||||
&self.base,
|
||||
decl.navSrcLoc(mod).upgrade(mod),
|
||||
decl.navSrcLoc(mod),
|
||||
func_index,
|
||||
air,
|
||||
liveness,
|
||||
@ -1155,16 +1155,14 @@ pub fn updateFunc(self: *Coff, mod: *Module, func_index: InternPool.Index, air:
|
||||
.ok => code_buffer.items,
|
||||
.fail => |em| {
|
||||
func.analysis(&mod.intern_pool).state = .codegen_failure;
|
||||
try mod.failed_decls.put(mod.gpa, decl_index, em);
|
||||
try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
|
||||
return;
|
||||
},
|
||||
};
|
||||
|
||||
try self.updateDeclCode(decl_index, code, .FUNCTION);
|
||||
|
||||
// Since we updated the vaddr and the size, each corresponding export
|
||||
// symbol also needs to be updated.
|
||||
return self.updateExports(mod, .{ .decl_index = decl_index }, mod.getDeclExports(decl_index));
|
||||
// Exports will be updated by `Zcu.processExports` after the update.
|
||||
}
|
||||
|
||||
pub fn lowerUnnamedConst(self: *Coff, val: Value, decl_index: InternPool.DeclIndex) !u32 {
|
||||
@ -1181,11 +1179,11 @@ pub fn lowerUnnamedConst(self: *Coff, val: Value, decl_index: InternPool.DeclInd
|
||||
const sym_name = try std.fmt.allocPrint(gpa, "__unnamed_{}_{d}", .{ decl_name.fmt(&mod.intern_pool), index });
|
||||
defer gpa.free(sym_name);
|
||||
const ty = val.typeOf(mod);
|
||||
const atom_index = switch (try self.lowerConst(sym_name, val, ty.abiAlignment(mod), self.rdata_section_index.?, decl.navSrcLoc(mod).upgrade(mod))) {
|
||||
const atom_index = switch (try self.lowerConst(sym_name, val, ty.abiAlignment(mod), self.rdata_section_index.?, decl.navSrcLoc(mod))) {
|
||||
.ok => |atom_index| atom_index,
|
||||
.fail => |em| {
|
||||
decl.analysis = .codegen_failure;
|
||||
try mod.failed_decls.put(mod.gpa, decl_index, em);
|
||||
try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
|
||||
log.err("{s}", .{em.msg});
|
||||
return error.CodegenFail;
|
||||
},
|
||||
@ -1199,7 +1197,7 @@ const LowerConstResult = union(enum) {
|
||||
fail: *Module.ErrorMsg,
|
||||
};
|
||||
|
||||
fn lowerConst(self: *Coff, name: []const u8, val: Value, required_alignment: InternPool.Alignment, sect_id: u16, src_loc: Module.SrcLoc) !LowerConstResult {
|
||||
fn lowerConst(self: *Coff, name: []const u8, val: Value, required_alignment: InternPool.Alignment, sect_id: u16, src_loc: Module.LazySrcLoc) !LowerConstResult {
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
var code_buffer = std.ArrayList(u8).init(gpa);
|
||||
@ -1272,23 +1270,21 @@ pub fn updateDecl(
|
||||
defer code_buffer.deinit();
|
||||
|
||||
const decl_val = if (decl.val.getVariable(mod)) |variable| Value.fromInterned(variable.init) else decl.val;
|
||||
const res = try codegen.generateSymbol(&self.base, decl.navSrcLoc(mod).upgrade(mod), decl_val, &code_buffer, .none, .{
|
||||
const res = try codegen.generateSymbol(&self.base, decl.navSrcLoc(mod), decl_val, &code_buffer, .none, .{
|
||||
.parent_atom_index = atom.getSymbolIndex().?,
|
||||
});
|
||||
const code = switch (res) {
|
||||
.ok => code_buffer.items,
|
||||
.fail => |em| {
|
||||
decl.analysis = .codegen_failure;
|
||||
try mod.failed_decls.put(mod.gpa, decl_index, em);
|
||||
try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
|
||||
return;
|
||||
},
|
||||
};
|
||||
|
||||
try self.updateDeclCode(decl_index, code, .NULL);
|
||||
|
||||
// Since we updated the vaddr and the size, each corresponding export
|
||||
// symbol also needs to be updated.
|
||||
return self.updateExports(mod, .{ .decl_index = decl_index }, mod.getDeclExports(decl_index));
|
||||
// Exports will be updated by `Zcu.processExports` after the update.
|
||||
}
|
||||
|
||||
fn updateLazySymbolAtom(
|
||||
@ -1313,14 +1309,7 @@ fn updateLazySymbolAtom(
|
||||
const atom = self.getAtomPtr(atom_index);
|
||||
const local_sym_index = atom.getSymbolIndex().?;
|
||||
|
||||
const src = if (sym.ty.srcLocOrNull(mod)) |src|
|
||||
src.upgrade(mod)
|
||||
else
|
||||
Module.SrcLoc{
|
||||
.file_scope = undefined,
|
||||
.base_node = undefined,
|
||||
.lazy = .unneeded,
|
||||
};
|
||||
const src = sym.ty.srcLocOrNull(mod) orelse Module.LazySrcLoc.unneeded;
|
||||
const res = try codegen.generateLazySymbol(
|
||||
&self.base,
|
||||
src,
|
||||
@ -1509,7 +1498,7 @@ pub fn updateExports(
|
||||
self: *Coff,
|
||||
mod: *Module,
|
||||
exported: Module.Exported,
|
||||
exports: []const *Module.Export,
|
||||
export_indices: []const u32,
|
||||
) link.File.UpdateExportsError!void {
|
||||
if (build_options.skip_non_native and builtin.object_format != .coff) {
|
||||
@panic("Attempted to compile for object format that was disabled by build configuration");
|
||||
@ -1522,7 +1511,8 @@ pub fn updateExports(
|
||||
if (comp.config.use_llvm) {
|
||||
// Even in the case of LLVM, we need to notice certain exported symbols in order to
|
||||
// detect the default subsystem.
|
||||
for (exports) |exp| {
|
||||
for (export_indices) |export_idx| {
|
||||
const exp = mod.all_exports.items[export_idx];
|
||||
const exported_decl_index = switch (exp.exported) {
|
||||
.decl_index => |i| i,
|
||||
.value => continue,
|
||||
@ -1552,7 +1542,7 @@ pub fn updateExports(
|
||||
}
|
||||
}
|
||||
|
||||
if (self.llvm_object) |llvm_object| return llvm_object.updateExports(mod, exported, exports);
|
||||
if (self.llvm_object) |llvm_object| return llvm_object.updateExports(mod, exported, export_indices);
|
||||
|
||||
const gpa = comp.gpa;
|
||||
|
||||
@ -1562,15 +1552,15 @@ pub fn updateExports(
|
||||
break :blk self.decls.getPtr(decl_index).?;
|
||||
},
|
||||
.value => |value| self.anon_decls.getPtr(value) orelse blk: {
|
||||
const first_exp = exports[0];
|
||||
const res = try self.lowerAnonDecl(value, .none, first_exp.getSrcLoc(mod));
|
||||
const first_exp = mod.all_exports.items[export_indices[0]];
|
||||
const res = try self.lowerAnonDecl(value, .none, first_exp.src);
|
||||
switch (res) {
|
||||
.ok => {},
|
||||
.fail => |em| {
|
||||
// TODO maybe it's enough to return an error here and let Module.processExportsInner
|
||||
// handle the error?
|
||||
try mod.failed_exports.ensureUnusedCapacity(mod.gpa, 1);
|
||||
mod.failed_exports.putAssumeCapacityNoClobber(first_exp, em);
|
||||
mod.failed_exports.putAssumeCapacityNoClobber(export_indices[0], em);
|
||||
return;
|
||||
},
|
||||
}
|
||||
@ -1580,14 +1570,15 @@ pub fn updateExports(
|
||||
const atom_index = metadata.atom;
|
||||
const atom = self.getAtom(atom_index);
|
||||
|
||||
for (exports) |exp| {
|
||||
for (export_indices) |export_idx| {
|
||||
const exp = mod.all_exports.items[export_idx];
|
||||
log.debug("adding new export '{}'", .{exp.opts.name.fmt(&mod.intern_pool)});
|
||||
|
||||
if (exp.opts.section.toSlice(&mod.intern_pool)) |section_name| {
|
||||
if (!mem.eql(u8, section_name, ".text")) {
|
||||
try mod.failed_exports.putNoClobber(gpa, exp, try Module.ErrorMsg.create(
|
||||
try mod.failed_exports.putNoClobber(gpa, export_idx, try Module.ErrorMsg.create(
|
||||
gpa,
|
||||
exp.getSrcLoc(mod),
|
||||
exp.src,
|
||||
"Unimplemented: ExportOptions.section",
|
||||
.{},
|
||||
));
|
||||
@ -1596,9 +1587,9 @@ pub fn updateExports(
|
||||
}
|
||||
|
||||
if (exp.opts.linkage == .link_once) {
|
||||
try mod.failed_exports.putNoClobber(gpa, exp, try Module.ErrorMsg.create(
|
||||
try mod.failed_exports.putNoClobber(gpa, export_idx, try Module.ErrorMsg.create(
|
||||
gpa,
|
||||
exp.getSrcLoc(mod),
|
||||
exp.src,
|
||||
"Unimplemented: GlobalLinkage.link_once",
|
||||
.{},
|
||||
));
|
||||
@ -1641,13 +1632,16 @@ pub fn updateExports(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deleteDeclExport(
|
||||
pub fn deleteExport(
|
||||
self: *Coff,
|
||||
decl_index: InternPool.DeclIndex,
|
||||
exported: Zcu.Exported,
|
||||
name: InternPool.NullTerminatedString,
|
||||
) void {
|
||||
if (self.llvm_object) |_| return;
|
||||
const metadata = self.decls.getPtr(decl_index) orelse return;
|
||||
const metadata = switch (exported) {
|
||||
.decl_index => |decl_index| self.decls.getPtr(decl_index) orelse return,
|
||||
.value => |value| self.anon_decls.getPtr(value) orelse return,
|
||||
};
|
||||
const mod = self.base.comp.module.?;
|
||||
const name_slice = name.toSlice(&mod.intern_pool);
|
||||
const sym_index = metadata.getExportPtr(self, name_slice) orelse return;
|
||||
@ -1866,7 +1860,7 @@ pub fn lowerAnonDecl(
|
||||
self: *Coff,
|
||||
decl_val: InternPool.Index,
|
||||
explicit_alignment: InternPool.Alignment,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
) !codegen.Result {
|
||||
const gpa = self.base.comp.gpa;
|
||||
const mod = self.base.comp.module.?;
|
||||
@ -2748,8 +2742,9 @@ const Object = @import("Coff/Object.zig");
|
||||
const Relocation = @import("Coff/Relocation.zig");
|
||||
const TableSection = @import("table_section.zig").TableSection;
|
||||
const StringTable = @import("StringTable.zig");
|
||||
const Type = @import("../type.zig").Type;
|
||||
const Type = @import("../Type.zig");
|
||||
const Value = @import("../Value.zig");
|
||||
const AnalUnit = InternPool.AnalUnit;
|
||||
|
||||
pub const base_tag: link.File.Tag = .coff;
|
||||
|
||||
|
||||
@ -2969,5 +2969,5 @@ const Zcu = @import("../Zcu.zig");
|
||||
const Module = Zcu;
|
||||
const InternPool = @import("../InternPool.zig");
|
||||
const StringTable = @import("StringTable.zig");
|
||||
const Type = @import("../type.zig").Type;
|
||||
const Type = @import("../Type.zig");
|
||||
const Value = @import("../Value.zig");
|
||||
|
||||
@ -552,7 +552,7 @@ pub fn lowerAnonDecl(
|
||||
self: *Elf,
|
||||
decl_val: InternPool.Index,
|
||||
explicit_alignment: InternPool.Alignment,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
) !codegen.Result {
|
||||
return self.zigObjectPtr().?.lowerAnonDecl(self, decl_val, explicit_alignment, src_loc);
|
||||
}
|
||||
@ -3011,13 +3011,13 @@ pub fn updateExports(
|
||||
self: *Elf,
|
||||
mod: *Module,
|
||||
exported: Module.Exported,
|
||||
exports: []const *Module.Export,
|
||||
export_indices: []const u32,
|
||||
) link.File.UpdateExportsError!void {
|
||||
if (build_options.skip_non_native and builtin.object_format != .elf) {
|
||||
@panic("Attempted to compile for object format that was disabled by build configuration");
|
||||
}
|
||||
if (self.llvm_object) |llvm_object| return llvm_object.updateExports(mod, exported, exports);
|
||||
return self.zigObjectPtr().?.updateExports(self, mod, exported, exports);
|
||||
if (self.llvm_object) |llvm_object| return llvm_object.updateExports(mod, exported, export_indices);
|
||||
return self.zigObjectPtr().?.updateExports(self, mod, exported, export_indices);
|
||||
}
|
||||
|
||||
pub fn updateDeclLineNumber(self: *Elf, mod: *Module, decl_index: InternPool.DeclIndex) !void {
|
||||
@ -3025,13 +3025,13 @@ pub fn updateDeclLineNumber(self: *Elf, mod: *Module, decl_index: InternPool.Dec
|
||||
return self.zigObjectPtr().?.updateDeclLineNumber(mod, decl_index);
|
||||
}
|
||||
|
||||
pub fn deleteDeclExport(
|
||||
pub fn deleteExport(
|
||||
self: *Elf,
|
||||
decl_index: InternPool.DeclIndex,
|
||||
exported: Zcu.Exported,
|
||||
name: InternPool.NullTerminatedString,
|
||||
) void {
|
||||
if (self.llvm_object) |_| return;
|
||||
return self.zigObjectPtr().?.deleteDeclExport(self, decl_index, name);
|
||||
return self.zigObjectPtr().?.deleteExport(self, exported, name);
|
||||
}
|
||||
|
||||
fn addLinkerDefinedSymbols(self: *Elf) !void {
|
||||
|
||||
@ -686,7 +686,7 @@ pub fn lowerAnonDecl(
|
||||
elf_file: *Elf,
|
||||
decl_val: InternPool.Index,
|
||||
explicit_alignment: InternPool.Alignment,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
) !codegen.Result {
|
||||
const gpa = elf_file.base.comp.gpa;
|
||||
const mod = elf_file.base.comp.module.?;
|
||||
@ -1074,7 +1074,7 @@ pub fn updateFunc(
|
||||
const res = if (decl_state) |*ds|
|
||||
try codegen.generateFunction(
|
||||
&elf_file.base,
|
||||
decl.navSrcLoc(mod).upgrade(mod),
|
||||
decl.navSrcLoc(mod),
|
||||
func_index,
|
||||
air,
|
||||
liveness,
|
||||
@ -1084,7 +1084,7 @@ pub fn updateFunc(
|
||||
else
|
||||
try codegen.generateFunction(
|
||||
&elf_file.base,
|
||||
decl.navSrcLoc(mod).upgrade(mod),
|
||||
decl.navSrcLoc(mod),
|
||||
func_index,
|
||||
air,
|
||||
liveness,
|
||||
@ -1096,7 +1096,7 @@ pub fn updateFunc(
|
||||
.ok => code_buffer.items,
|
||||
.fail => |em| {
|
||||
func.analysis(&mod.intern_pool).state = .codegen_failure;
|
||||
try mod.failed_decls.put(mod.gpa, decl_index, em);
|
||||
try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
|
||||
return;
|
||||
},
|
||||
};
|
||||
@ -1115,9 +1115,7 @@ pub fn updateFunc(
|
||||
);
|
||||
}
|
||||
|
||||
// Since we updated the vaddr and the size, each corresponding export
|
||||
// symbol also needs to be updated.
|
||||
return self.updateExports(elf_file, mod, .{ .decl_index = decl_index }, mod.getDeclExports(decl_index));
|
||||
// Exports will be updated by `Zcu.processExports` after the update.
|
||||
}
|
||||
|
||||
pub fn updateDecl(
|
||||
@ -1158,13 +1156,13 @@ pub fn updateDecl(
|
||||
// TODO implement .debug_info for global variables
|
||||
const decl_val = if (decl.val.getVariable(mod)) |variable| Value.fromInterned(variable.init) else decl.val;
|
||||
const res = if (decl_state) |*ds|
|
||||
try codegen.generateSymbol(&elf_file.base, decl.navSrcLoc(mod).upgrade(mod), decl_val, &code_buffer, .{
|
||||
try codegen.generateSymbol(&elf_file.base, decl.navSrcLoc(mod), decl_val, &code_buffer, .{
|
||||
.dwarf = ds,
|
||||
}, .{
|
||||
.parent_atom_index = sym_index,
|
||||
})
|
||||
else
|
||||
try codegen.generateSymbol(&elf_file.base, decl.navSrcLoc(mod).upgrade(mod), decl_val, &code_buffer, .none, .{
|
||||
try codegen.generateSymbol(&elf_file.base, decl.navSrcLoc(mod), decl_val, &code_buffer, .none, .{
|
||||
.parent_atom_index = sym_index,
|
||||
});
|
||||
|
||||
@ -1172,7 +1170,7 @@ pub fn updateDecl(
|
||||
.ok => code_buffer.items,
|
||||
.fail => |em| {
|
||||
decl.analysis = .codegen_failure;
|
||||
try mod.failed_decls.put(mod.gpa, decl_index, em);
|
||||
try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
|
||||
return;
|
||||
},
|
||||
};
|
||||
@ -1194,9 +1192,7 @@ pub fn updateDecl(
|
||||
);
|
||||
}
|
||||
|
||||
// Since we updated the vaddr and the size, each corresponding export
|
||||
// symbol also needs to be updated.
|
||||
return self.updateExports(elf_file, mod, .{ .decl_index = decl_index }, mod.getDeclExports(decl_index));
|
||||
// Exports will be updated by `Zcu.processExports` after the update.
|
||||
}
|
||||
|
||||
fn updateLazySymbol(
|
||||
@ -1221,14 +1217,7 @@ fn updateLazySymbol(
|
||||
break :blk try self.strtab.insert(gpa, name);
|
||||
};
|
||||
|
||||
const src = if (sym.ty.srcLocOrNull(mod)) |src|
|
||||
src.upgrade(mod)
|
||||
else
|
||||
Module.SrcLoc{
|
||||
.file_scope = undefined,
|
||||
.base_node = undefined,
|
||||
.lazy = .unneeded,
|
||||
};
|
||||
const src = sym.ty.srcLocOrNull(mod) orelse Module.LazySrcLoc.unneeded;
|
||||
const res = try codegen.generateLazySymbol(
|
||||
&elf_file.base,
|
||||
src,
|
||||
@ -1306,12 +1295,12 @@ pub fn lowerUnnamedConst(
|
||||
val,
|
||||
ty.abiAlignment(mod),
|
||||
elf_file.zig_data_rel_ro_section_index.?,
|
||||
decl.navSrcLoc(mod).upgrade(mod),
|
||||
decl.navSrcLoc(mod),
|
||||
)) {
|
||||
.ok => |sym_index| sym_index,
|
||||
.fail => |em| {
|
||||
decl.analysis = .codegen_failure;
|
||||
try mod.failed_decls.put(mod.gpa, decl_index, em);
|
||||
try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
|
||||
log.err("{s}", .{em.msg});
|
||||
return error.CodegenFail;
|
||||
},
|
||||
@ -1333,7 +1322,7 @@ fn lowerConst(
|
||||
val: Value,
|
||||
required_alignment: InternPool.Alignment,
|
||||
output_section_index: u32,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
) !LowerConstResult {
|
||||
const gpa = elf_file.base.comp.gpa;
|
||||
|
||||
@ -1386,7 +1375,7 @@ pub fn updateExports(
|
||||
elf_file: *Elf,
|
||||
mod: *Module,
|
||||
exported: Module.Exported,
|
||||
exports: []const *Module.Export,
|
||||
export_indices: []const u32,
|
||||
) link.File.UpdateExportsError!void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
@ -1398,15 +1387,15 @@ pub fn updateExports(
|
||||
break :blk self.decls.getPtr(decl_index).?;
|
||||
},
|
||||
.value => |value| self.anon_decls.getPtr(value) orelse blk: {
|
||||
const first_exp = exports[0];
|
||||
const res = try self.lowerAnonDecl(elf_file, value, .none, first_exp.getSrcLoc(mod));
|
||||
const first_exp = mod.all_exports.items[export_indices[0]];
|
||||
const res = try self.lowerAnonDecl(elf_file, value, .none, first_exp.src);
|
||||
switch (res) {
|
||||
.ok => {},
|
||||
.fail => |em| {
|
||||
// TODO maybe it's enough to return an error here and let Module.processExportsInner
|
||||
// handle the error?
|
||||
try mod.failed_exports.ensureUnusedCapacity(mod.gpa, 1);
|
||||
mod.failed_exports.putAssumeCapacityNoClobber(first_exp, em);
|
||||
mod.failed_exports.putAssumeCapacityNoClobber(export_indices[0], em);
|
||||
return;
|
||||
},
|
||||
}
|
||||
@ -1418,13 +1407,14 @@ pub fn updateExports(
|
||||
const esym = self.local_esyms.items(.elf_sym)[esym_index];
|
||||
const esym_shndx = self.local_esyms.items(.shndx)[esym_index];
|
||||
|
||||
for (exports) |exp| {
|
||||
for (export_indices) |export_idx| {
|
||||
const exp = mod.all_exports.items[export_idx];
|
||||
if (exp.opts.section.unwrap()) |section_name| {
|
||||
if (!section_name.eqlSlice(".text", &mod.intern_pool)) {
|
||||
try mod.failed_exports.ensureUnusedCapacity(mod.gpa, 1);
|
||||
mod.failed_exports.putAssumeCapacityNoClobber(exp, try Module.ErrorMsg.create(
|
||||
mod.failed_exports.putAssumeCapacityNoClobber(export_idx, try Module.ErrorMsg.create(
|
||||
gpa,
|
||||
exp.getSrcLoc(mod),
|
||||
exp.src,
|
||||
"Unimplemented: ExportOptions.section",
|
||||
.{},
|
||||
));
|
||||
@ -1437,9 +1427,9 @@ pub fn updateExports(
|
||||
.weak => elf.STB_WEAK,
|
||||
.link_once => {
|
||||
try mod.failed_exports.ensureUnusedCapacity(mod.gpa, 1);
|
||||
mod.failed_exports.putAssumeCapacityNoClobber(exp, try Module.ErrorMsg.create(
|
||||
mod.failed_exports.putAssumeCapacityNoClobber(export_idx, try Module.ErrorMsg.create(
|
||||
gpa,
|
||||
exp.getSrcLoc(mod),
|
||||
exp.src,
|
||||
"Unimplemented: GlobalLinkage.LinkOnce",
|
||||
.{},
|
||||
));
|
||||
@ -1487,13 +1477,16 @@ pub fn updateDeclLineNumber(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deleteDeclExport(
|
||||
pub fn deleteExport(
|
||||
self: *ZigObject,
|
||||
elf_file: *Elf,
|
||||
decl_index: InternPool.DeclIndex,
|
||||
exported: Zcu.Exported,
|
||||
name: InternPool.NullTerminatedString,
|
||||
) void {
|
||||
const metadata = self.decls.getPtr(decl_index) orelse return;
|
||||
const metadata = switch (exported) {
|
||||
.decl_index => |decl_index| self.decls.getPtr(decl_index) orelse return,
|
||||
.value => |value| self.anon_decls.getPtr(value) orelse return,
|
||||
};
|
||||
const mod = elf_file.base.comp.module.?;
|
||||
const exp_name = name.toSlice(&mod.intern_pool);
|
||||
const esym_index = metadata.@"export"(self, exp_name) orelse return;
|
||||
@ -1654,6 +1647,7 @@ const Module = Zcu;
|
||||
const Object = @import("Object.zig");
|
||||
const Symbol = @import("Symbol.zig");
|
||||
const StringTable = @import("../StringTable.zig");
|
||||
const Type = @import("../../type.zig").Type;
|
||||
const Type = @import("../../Type.zig");
|
||||
const Value = @import("../../Value.zig");
|
||||
const AnalUnit = InternPool.AnalUnit;
|
||||
const ZigObject = @This();
|
||||
|
||||
@ -3207,22 +3207,22 @@ pub fn updateExports(
|
||||
self: *MachO,
|
||||
mod: *Module,
|
||||
exported: Module.Exported,
|
||||
exports: []const *Module.Export,
|
||||
export_indices: []const u32,
|
||||
) link.File.UpdateExportsError!void {
|
||||
if (build_options.skip_non_native and builtin.object_format != .macho) {
|
||||
@panic("Attempted to compile for object format that was disabled by build configuration");
|
||||
}
|
||||
if (self.llvm_object) |llvm_object| return llvm_object.updateExports(mod, exported, exports);
|
||||
return self.getZigObject().?.updateExports(self, mod, exported, exports);
|
||||
if (self.llvm_object) |llvm_object| return llvm_object.updateExports(mod, exported, export_indices);
|
||||
return self.getZigObject().?.updateExports(self, mod, exported, export_indices);
|
||||
}
|
||||
|
||||
pub fn deleteDeclExport(
|
||||
pub fn deleteExport(
|
||||
self: *MachO,
|
||||
decl_index: InternPool.DeclIndex,
|
||||
exported: Zcu.Exported,
|
||||
name: InternPool.NullTerminatedString,
|
||||
) Allocator.Error!void {
|
||||
) void {
|
||||
if (self.llvm_object) |_| return;
|
||||
return self.getZigObject().?.deleteDeclExport(self, decl_index, name);
|
||||
return self.getZigObject().?.deleteExport(self, exported, name);
|
||||
}
|
||||
|
||||
pub fn freeDecl(self: *MachO, decl_index: InternPool.DeclIndex) void {
|
||||
@ -3239,7 +3239,7 @@ pub fn lowerAnonDecl(
|
||||
self: *MachO,
|
||||
decl_val: InternPool.Index,
|
||||
explicit_alignment: InternPool.Alignment,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
) !codegen.Result {
|
||||
return self.getZigObject().?.lowerAnonDecl(self, decl_val, explicit_alignment, src_loc);
|
||||
}
|
||||
|
||||
@ -459,4 +459,4 @@ const trace = @import("../../tracy.zig").trace;
|
||||
const Allocator = mem.Allocator;
|
||||
const MachO = @import("../MachO.zig");
|
||||
const StringTable = @import("../StringTable.zig");
|
||||
const Type = @import("../../type.zig").Type;
|
||||
const Type = @import("../../Type.zig");
|
||||
|
||||
@ -572,7 +572,7 @@ pub fn lowerAnonDecl(
|
||||
macho_file: *MachO,
|
||||
decl_val: InternPool.Index,
|
||||
explicit_alignment: Atom.Alignment,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
) !codegen.Result {
|
||||
const gpa = macho_file.base.comp.gpa;
|
||||
const mod = macho_file.base.comp.module.?;
|
||||
@ -682,7 +682,7 @@ pub fn updateFunc(
|
||||
const dio: codegen.DebugInfoOutput = if (decl_state) |*ds| .{ .dwarf = ds } else .none;
|
||||
const res = try codegen.generateFunction(
|
||||
&macho_file.base,
|
||||
decl.navSrcLoc(mod).upgrade(mod),
|
||||
decl.navSrcLoc(mod),
|
||||
func_index,
|
||||
air,
|
||||
liveness,
|
||||
@ -694,7 +694,7 @@ pub fn updateFunc(
|
||||
.ok => code_buffer.items,
|
||||
.fail => |em| {
|
||||
func.analysis(&mod.intern_pool).state = .codegen_failure;
|
||||
try mod.failed_decls.put(mod.gpa, decl_index, em);
|
||||
try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
|
||||
return;
|
||||
},
|
||||
};
|
||||
@ -713,9 +713,7 @@ pub fn updateFunc(
|
||||
);
|
||||
}
|
||||
|
||||
// Since we updated the vaddr and the size, each corresponding export
|
||||
// symbol also needs to be updated.
|
||||
return self.updateExports(macho_file, mod, .{ .decl_index = decl_index }, mod.getDeclExports(decl_index));
|
||||
// Exports will be updated by `Zcu.processExports` after the update.
|
||||
}
|
||||
|
||||
pub fn updateDecl(
|
||||
@ -756,7 +754,7 @@ pub fn updateDecl(
|
||||
|
||||
const decl_val = if (decl.val.getVariable(mod)) |variable| Value.fromInterned(variable.init) else decl.val;
|
||||
const dio: codegen.DebugInfoOutput = if (decl_state) |*ds| .{ .dwarf = ds } else .none;
|
||||
const res = try codegen.generateSymbol(&macho_file.base, decl.navSrcLoc(mod).upgrade(mod), decl_val, &code_buffer, dio, .{
|
||||
const res = try codegen.generateSymbol(&macho_file.base, decl.navSrcLoc(mod), decl_val, &code_buffer, dio, .{
|
||||
.parent_atom_index = sym_index,
|
||||
});
|
||||
|
||||
@ -764,7 +762,7 @@ pub fn updateDecl(
|
||||
.ok => code_buffer.items,
|
||||
.fail => |em| {
|
||||
decl.analysis = .codegen_failure;
|
||||
try mod.failed_decls.put(mod.gpa, decl_index, em);
|
||||
try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
|
||||
return;
|
||||
},
|
||||
};
|
||||
@ -790,9 +788,7 @@ pub fn updateDecl(
|
||||
);
|
||||
}
|
||||
|
||||
// Since we updated the vaddr and the size, each corresponding export symbol also
|
||||
// needs to be updated.
|
||||
try self.updateExports(macho_file, mod, .{ .decl_index = decl_index }, mod.getDeclExports(decl_index));
|
||||
// Exports will be updated by `Zcu.processExports` after the update.
|
||||
}
|
||||
|
||||
fn updateDeclCode(
|
||||
@ -1104,12 +1100,12 @@ pub fn lowerUnnamedConst(
|
||||
val,
|
||||
val.typeOf(mod).abiAlignment(mod),
|
||||
macho_file.zig_const_sect_index.?,
|
||||
decl.navSrcLoc(mod).upgrade(mod),
|
||||
decl.navSrcLoc(mod),
|
||||
)) {
|
||||
.ok => |sym_index| sym_index,
|
||||
.fail => |em| {
|
||||
decl.analysis = .codegen_failure;
|
||||
try mod.failed_decls.put(mod.gpa, decl_index, em);
|
||||
try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
|
||||
log.err("{s}", .{em.msg});
|
||||
return error.CodegenFail;
|
||||
},
|
||||
@ -1131,7 +1127,7 @@ fn lowerConst(
|
||||
val: Value,
|
||||
required_alignment: Atom.Alignment,
|
||||
output_section_index: u8,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
) !LowerConstResult {
|
||||
const gpa = macho_file.base.comp.gpa;
|
||||
|
||||
@ -1187,7 +1183,7 @@ pub fn updateExports(
|
||||
macho_file: *MachO,
|
||||
mod: *Module,
|
||||
exported: Module.Exported,
|
||||
exports: []const *Module.Export,
|
||||
export_indices: []const u32,
|
||||
) link.File.UpdateExportsError!void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
@ -1199,15 +1195,15 @@ pub fn updateExports(
|
||||
break :blk self.decls.getPtr(decl_index).?;
|
||||
},
|
||||
.value => |value| self.anon_decls.getPtr(value) orelse blk: {
|
||||
const first_exp = exports[0];
|
||||
const res = try self.lowerAnonDecl(macho_file, value, .none, first_exp.getSrcLoc(mod));
|
||||
const first_exp = mod.all_exports.items[export_indices[0]];
|
||||
const res = try self.lowerAnonDecl(macho_file, value, .none, first_exp.src);
|
||||
switch (res) {
|
||||
.ok => {},
|
||||
.fail => |em| {
|
||||
// TODO maybe it's enough to return an error here and let Module.processExportsInner
|
||||
// handle the error?
|
||||
try mod.failed_exports.ensureUnusedCapacity(mod.gpa, 1);
|
||||
mod.failed_exports.putAssumeCapacityNoClobber(first_exp, em);
|
||||
mod.failed_exports.putAssumeCapacityNoClobber(export_indices[0], em);
|
||||
return;
|
||||
},
|
||||
}
|
||||
@ -1218,13 +1214,14 @@ pub fn updateExports(
|
||||
const nlist_idx = macho_file.getSymbol(sym_index).nlist_idx;
|
||||
const nlist = self.symtab.items(.nlist)[nlist_idx];
|
||||
|
||||
for (exports) |exp| {
|
||||
for (export_indices) |export_idx| {
|
||||
const exp = mod.all_exports.items[export_idx];
|
||||
if (exp.opts.section.unwrap()) |section_name| {
|
||||
if (!section_name.eqlSlice("__text", &mod.intern_pool)) {
|
||||
try mod.failed_exports.ensureUnusedCapacity(mod.gpa, 1);
|
||||
mod.failed_exports.putAssumeCapacityNoClobber(exp, try Module.ErrorMsg.create(
|
||||
mod.failed_exports.putAssumeCapacityNoClobber(export_idx, try Module.ErrorMsg.create(
|
||||
gpa,
|
||||
exp.getSrcLoc(mod),
|
||||
exp.src,
|
||||
"Unimplemented: ExportOptions.section",
|
||||
.{},
|
||||
));
|
||||
@ -1232,9 +1229,9 @@ pub fn updateExports(
|
||||
}
|
||||
}
|
||||
if (exp.opts.linkage == .link_once) {
|
||||
try mod.failed_exports.putNoClobber(mod.gpa, exp, try Module.ErrorMsg.create(
|
||||
try mod.failed_exports.putNoClobber(mod.gpa, export_idx, try Module.ErrorMsg.create(
|
||||
gpa,
|
||||
exp.getSrcLoc(mod),
|
||||
exp.src,
|
||||
"Unimplemented: GlobalLinkage.link_once",
|
||||
.{},
|
||||
));
|
||||
@ -1294,14 +1291,7 @@ fn updateLazySymbol(
|
||||
break :blk try self.strtab.insert(gpa, name);
|
||||
};
|
||||
|
||||
const src = if (lazy_sym.ty.srcLocOrNull(mod)) |src|
|
||||
src.upgrade(mod)
|
||||
else
|
||||
Module.SrcLoc{
|
||||
.file_scope = undefined,
|
||||
.base_node = undefined,
|
||||
.lazy = .unneeded,
|
||||
};
|
||||
const src = lazy_sym.ty.srcLocOrNull(mod) orelse Module.LazySrcLoc.unneeded;
|
||||
const res = try codegen.generateLazySymbol(
|
||||
&macho_file.base,
|
||||
src,
|
||||
@ -1364,15 +1354,18 @@ pub fn updateDeclLineNumber(self: *ZigObject, mod: *Module, decl_index: InternPo
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deleteDeclExport(
|
||||
pub fn deleteExport(
|
||||
self: *ZigObject,
|
||||
macho_file: *MachO,
|
||||
decl_index: InternPool.DeclIndex,
|
||||
exported: Zcu.Exported,
|
||||
name: InternPool.NullTerminatedString,
|
||||
) void {
|
||||
const mod = macho_file.base.comp.module.?;
|
||||
|
||||
const metadata = self.decls.getPtr(decl_index) orelse return;
|
||||
const metadata = switch (exported) {
|
||||
.decl_index => |decl_index| self.decls.getPtr(decl_index) orelse return,
|
||||
.value => |value| self.anon_decls.getPtr(value) orelse return,
|
||||
};
|
||||
const nlist_index = metadata.@"export"(self, name.toSlice(&mod.intern_pool)) orelse return;
|
||||
|
||||
log.debug("deleting export '{}'", .{name.fmt(&mod.intern_pool)});
|
||||
@ -1594,6 +1587,7 @@ const Object = @import("Object.zig");
|
||||
const Relocation = @import("Relocation.zig");
|
||||
const Symbol = @import("Symbol.zig");
|
||||
const StringTable = @import("../StringTable.zig");
|
||||
const Type = @import("../../type.zig").Type;
|
||||
const Type = @import("../../Type.zig");
|
||||
const Value = @import("../../Value.zig");
|
||||
const AnalUnit = InternPool.AnalUnit;
|
||||
const ZigObject = @This();
|
||||
|
||||
@ -96,12 +96,12 @@ pub fn updateExports(
|
||||
self: *NvPtx,
|
||||
module: *Module,
|
||||
exported: Module.Exported,
|
||||
exports: []const *Module.Export,
|
||||
export_indices: []const u32,
|
||||
) !void {
|
||||
if (build_options.skip_non_native and builtin.object_format != .nvptx)
|
||||
@panic("Attempted to compile for object format that was disabled by build configuration");
|
||||
|
||||
return self.llvm_object.updateExports(module, exported, exports);
|
||||
return self.llvm_object.updateExports(module, exported, export_indices);
|
||||
}
|
||||
|
||||
pub fn freeDecl(self: *NvPtx, decl_index: InternPool.DeclIndex) void {
|
||||
|
||||
@ -15,8 +15,9 @@ const File = link.File;
|
||||
const build_options = @import("build_options");
|
||||
const Air = @import("../Air.zig");
|
||||
const Liveness = @import("../Liveness.zig");
|
||||
const Type = @import("../type.zig").Type;
|
||||
const Type = @import("../Type.zig");
|
||||
const Value = @import("../Value.zig");
|
||||
const AnalUnit = InternPool.AnalUnit;
|
||||
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
@ -60,6 +61,9 @@ fn_decl_table: std.AutoArrayHashMapUnmanaged(
|
||||
) = .{},
|
||||
/// the code is modified when relocated, so that is why it is mutable
|
||||
data_decl_table: std.AutoArrayHashMapUnmanaged(InternPool.DeclIndex, []u8) = .{},
|
||||
/// When `updateExports` is called, we store the export indices here, to be used
|
||||
/// during flush.
|
||||
decl_exports: std.AutoArrayHashMapUnmanaged(InternPool.DeclIndex, []u32) = .{},
|
||||
|
||||
/// Table of unnamed constants associated with a parent `Decl`.
|
||||
/// We store them here so that we can free the constants whenever the `Decl`
|
||||
@ -435,7 +439,7 @@ pub fn updateFunc(self: *Plan9, mod: *Module, func_index: InternPool.Index, air:
|
||||
|
||||
const res = try codegen.generateFunction(
|
||||
&self.base,
|
||||
decl.navSrcLoc(mod).upgrade(mod),
|
||||
decl.navSrcLoc(mod),
|
||||
func_index,
|
||||
air,
|
||||
liveness,
|
||||
@ -446,7 +450,7 @@ pub fn updateFunc(self: *Plan9, mod: *Module, func_index: InternPool.Index, air:
|
||||
.ok => try code_buffer.toOwnedSlice(),
|
||||
.fail => |em| {
|
||||
func.analysis(&mod.intern_pool).state = .codegen_failure;
|
||||
try mod.failed_decls.put(mod.gpa, decl_index, em);
|
||||
try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
|
||||
return;
|
||||
},
|
||||
};
|
||||
@ -501,7 +505,7 @@ pub fn lowerUnnamedConst(self: *Plan9, val: Value, decl_index: InternPool.DeclIn
|
||||
};
|
||||
self.syms.items[info.sym_index.?] = sym;
|
||||
|
||||
const res = try codegen.generateSymbol(&self.base, decl.navSrcLoc(mod).upgrade(mod), val, &code_buffer, .{
|
||||
const res = try codegen.generateSymbol(&self.base, decl.navSrcLoc(mod), val, &code_buffer, .{
|
||||
.none = {},
|
||||
}, .{
|
||||
.parent_atom_index = new_atom_idx,
|
||||
@ -510,7 +514,7 @@ pub fn lowerUnnamedConst(self: *Plan9, val: Value, decl_index: InternPool.DeclIn
|
||||
.ok => code_buffer.items,
|
||||
.fail => |em| {
|
||||
decl.analysis = .codegen_failure;
|
||||
try mod.failed_decls.put(mod.gpa, decl_index, em);
|
||||
try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
|
||||
log.err("{s}", .{em.msg});
|
||||
return error.CodegenFail;
|
||||
},
|
||||
@ -540,14 +544,14 @@ pub fn updateDecl(self: *Plan9, mod: *Module, decl_index: InternPool.DeclIndex)
|
||||
defer code_buffer.deinit();
|
||||
const decl_val = if (decl.val.getVariable(mod)) |variable| Value.fromInterned(variable.init) else decl.val;
|
||||
// TODO we need the symbol index for symbol in the table of locals for the containing atom
|
||||
const res = try codegen.generateSymbol(&self.base, decl.navSrcLoc(mod).upgrade(mod), decl_val, &code_buffer, .{ .none = {} }, .{
|
||||
const res = try codegen.generateSymbol(&self.base, decl.navSrcLoc(mod), decl_val, &code_buffer, .{ .none = {} }, .{
|
||||
.parent_atom_index = @as(Atom.Index, @intCast(atom_idx)),
|
||||
});
|
||||
const code = switch (res) {
|
||||
.ok => code_buffer.items,
|
||||
.fail => |em| {
|
||||
decl.analysis = .codegen_failure;
|
||||
try mod.failed_decls.put(mod.gpa, decl_index, em);
|
||||
try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
|
||||
return;
|
||||
},
|
||||
};
|
||||
@ -770,8 +774,8 @@ pub fn flushModule(self: *Plan9, arena: Allocator, prog_node: std.Progress.Node)
|
||||
mem.writeInt(u64, got_table[atom.got_index.? * 8 ..][0..8], off, target.cpu.arch.endian());
|
||||
}
|
||||
self.syms.items[atom.sym_index.?].value = off;
|
||||
if (mod.decl_exports.get(decl_index)) |exports| {
|
||||
try self.addDeclExports(mod, decl_index, exports.items);
|
||||
if (self.decl_exports.get(decl_index)) |export_indices| {
|
||||
try self.addDeclExports(mod, decl_index, export_indices);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -836,8 +840,8 @@ pub fn flushModule(self: *Plan9, arena: Allocator, prog_node: std.Progress.Node)
|
||||
mem.writeInt(u64, got_table[atom.got_index.? * 8 ..][0..8], off, target.cpu.arch.endian());
|
||||
}
|
||||
self.syms.items[atom.sym_index.?].value = off;
|
||||
if (mod.decl_exports.get(decl_index)) |exports| {
|
||||
try self.addDeclExports(mod, decl_index, exports.items);
|
||||
if (self.decl_exports.get(decl_index)) |export_indices| {
|
||||
try self.addDeclExports(mod, decl_index, export_indices);
|
||||
}
|
||||
}
|
||||
// write the unnamed constants after the other data decls
|
||||
@ -1007,22 +1011,23 @@ fn addDeclExports(
|
||||
self: *Plan9,
|
||||
mod: *Module,
|
||||
decl_index: InternPool.DeclIndex,
|
||||
exports: []const *Module.Export,
|
||||
export_indices: []const u32,
|
||||
) !void {
|
||||
const gpa = self.base.comp.gpa;
|
||||
const metadata = self.decls.getPtr(decl_index).?;
|
||||
const atom = self.getAtom(metadata.index);
|
||||
|
||||
for (exports) |exp| {
|
||||
for (export_indices) |export_idx| {
|
||||
const exp = mod.all_exports.items[export_idx];
|
||||
const exp_name = exp.opts.name.toSlice(&mod.intern_pool);
|
||||
// plan9 does not support custom sections
|
||||
if (exp.opts.section.unwrap()) |section_name| {
|
||||
if (!section_name.eqlSlice(".text", &mod.intern_pool) and
|
||||
!section_name.eqlSlice(".data", &mod.intern_pool))
|
||||
{
|
||||
try mod.failed_exports.put(mod.gpa, exp, try Module.ErrorMsg.create(
|
||||
try mod.failed_exports.put(mod.gpa, export_idx, try Module.ErrorMsg.create(
|
||||
gpa,
|
||||
mod.declPtr(decl_index).navSrcLoc(mod).upgrade(mod),
|
||||
mod.declPtr(decl_index).navSrcLoc(mod),
|
||||
"plan9 does not support extra sections",
|
||||
.{},
|
||||
));
|
||||
@ -1152,15 +1157,23 @@ pub fn updateExports(
|
||||
self: *Plan9,
|
||||
module: *Module,
|
||||
exported: Module.Exported,
|
||||
exports: []const *Module.Export,
|
||||
export_indices: []const u32,
|
||||
) !void {
|
||||
const gpa = self.base.comp.gpa;
|
||||
switch (exported) {
|
||||
.value => @panic("TODO: plan9 updateExports handling values"),
|
||||
.decl_index => |decl_index| _ = try self.seeDecl(decl_index),
|
||||
.decl_index => |decl_index| {
|
||||
_ = try self.seeDecl(decl_index);
|
||||
if (self.decl_exports.fetchSwapRemove(decl_index)) |kv| {
|
||||
gpa.free(kv.value);
|
||||
}
|
||||
try self.decl_exports.ensureUnusedCapacity(gpa, 1);
|
||||
const duped_indices = try gpa.dupe(u32, export_indices);
|
||||
self.decl_exports.putAssumeCapacityNoClobber(decl_index, duped_indices);
|
||||
},
|
||||
}
|
||||
// we do all the things in flush
|
||||
// all proper work is done in flush
|
||||
_ = module;
|
||||
_ = exports;
|
||||
}
|
||||
|
||||
pub fn getOrCreateAtomForLazySymbol(self: *Plan9, sym: File.LazySymbol) !Atom.Index {
|
||||
@ -1212,14 +1225,7 @@ fn updateLazySymbolAtom(self: *Plan9, sym: File.LazySymbol, atom_index: Atom.Ind
|
||||
self.syms.items[self.getAtomPtr(atom_index).sym_index.?] = symbol;
|
||||
|
||||
// generate the code
|
||||
const src = if (sym.ty.srcLocOrNull(mod)) |src|
|
||||
src.upgrade(mod)
|
||||
else
|
||||
Module.SrcLoc{
|
||||
.file_scope = undefined,
|
||||
.base_node = undefined,
|
||||
.lazy = .unneeded,
|
||||
};
|
||||
const src = sym.ty.srcLocOrNull(mod) orelse Module.LazySrcLoc.unneeded;
|
||||
const res = try codegen.generateLazySymbol(
|
||||
&self.base,
|
||||
src,
|
||||
@ -1290,6 +1296,10 @@ pub fn deinit(self: *Plan9) void {
|
||||
gpa.free(self.syms.items[sym_index].name);
|
||||
}
|
||||
self.data_decl_table.deinit(gpa);
|
||||
for (self.decl_exports.values()) |export_indices| {
|
||||
gpa.free(export_indices);
|
||||
}
|
||||
self.decl_exports.deinit(gpa);
|
||||
self.syms.deinit(gpa);
|
||||
self.got_index_free_list.deinit(gpa);
|
||||
self.syms_index_free_list.deinit(gpa);
|
||||
@ -1395,10 +1405,13 @@ pub fn writeSyms(self: *Plan9, buf: *std.ArrayList(u8)) !void {
|
||||
const atom = self.getAtom(decl_metadata.index);
|
||||
const sym = self.syms.items[atom.sym_index.?];
|
||||
try self.writeSym(writer, sym);
|
||||
if (self.base.comp.module.?.decl_exports.get(decl_index)) |exports| {
|
||||
for (exports.items) |e| if (decl_metadata.getExport(self, e.opts.name.toSlice(ip))) |exp_i| {
|
||||
try self.writeSym(writer, self.syms.items[exp_i]);
|
||||
};
|
||||
if (self.decl_exports.get(decl_index)) |export_indices| {
|
||||
for (export_indices) |export_idx| {
|
||||
const exp = mod.all_exports.items[export_idx];
|
||||
if (decl_metadata.getExport(self, exp.opts.name.toSlice(ip))) |exp_i| {
|
||||
try self.writeSym(writer, self.syms.items[exp_i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1442,13 +1455,16 @@ pub fn writeSyms(self: *Plan9, buf: *std.ArrayList(u8)) !void {
|
||||
const atom = self.getAtom(decl_metadata.index);
|
||||
const sym = self.syms.items[atom.sym_index.?];
|
||||
try self.writeSym(writer, sym);
|
||||
if (self.base.comp.module.?.decl_exports.get(decl_index)) |exports| {
|
||||
for (exports.items) |e| if (decl_metadata.getExport(self, e.opts.name.toSlice(ip))) |exp_i| {
|
||||
const s = self.syms.items[exp_i];
|
||||
if (mem.eql(u8, s.name, "_start"))
|
||||
self.entry_val = s.value;
|
||||
try self.writeSym(writer, s);
|
||||
};
|
||||
if (self.decl_exports.get(decl_index)) |export_indices| {
|
||||
for (export_indices) |export_idx| {
|
||||
const exp = mod.all_exports.items[export_idx];
|
||||
if (decl_metadata.getExport(self, exp.opts.name.toSlice(ip))) |exp_i| {
|
||||
const s = self.syms.items[exp_i];
|
||||
if (mem.eql(u8, s.name, "_start"))
|
||||
self.entry_val = s.value;
|
||||
try self.writeSym(writer, s);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1530,7 +1546,7 @@ pub fn lowerAnonDecl(
|
||||
self: *Plan9,
|
||||
decl_val: InternPool.Index,
|
||||
explicit_alignment: InternPool.Alignment,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
) !codegen.Result {
|
||||
_ = explicit_alignment;
|
||||
// This is basically the same as lowerUnnamedConst.
|
||||
|
||||
@ -152,7 +152,7 @@ pub fn updateExports(
|
||||
self: *SpirV,
|
||||
mod: *Module,
|
||||
exported: Module.Exported,
|
||||
exports: []const *Module.Export,
|
||||
export_indices: []const u32,
|
||||
) !void {
|
||||
const decl_index = switch (exported) {
|
||||
.decl_index => |i| i,
|
||||
@ -177,7 +177,8 @@ pub fn updateExports(
|
||||
if ((!is_vulkan and execution_model == .Kernel) or
|
||||
(is_vulkan and (execution_model == .Fragment or execution_model == .Vertex)))
|
||||
{
|
||||
for (exports) |exp| {
|
||||
for (export_indices) |export_idx| {
|
||||
const exp = mod.all_exports.items[export_idx];
|
||||
try self.object.spv.declareEntryPoint(
|
||||
spv_decl_index,
|
||||
exp.opts.name.toSlice(&mod.intern_pool),
|
||||
|
||||
@ -33,7 +33,7 @@ const Zcu = @import("../Zcu.zig");
|
||||
const Module = Zcu;
|
||||
const Object = @import("Wasm/Object.zig");
|
||||
const Symbol = @import("Wasm/Symbol.zig");
|
||||
const Type = @import("../type.zig").Type;
|
||||
const Type = @import("../Type.zig");
|
||||
const Value = @import("../Value.zig");
|
||||
const ZigObject = @import("Wasm/ZigObject.zig");
|
||||
|
||||
@ -1533,7 +1533,7 @@ pub fn lowerAnonDecl(
|
||||
wasm: *Wasm,
|
||||
decl_val: InternPool.Index,
|
||||
explicit_alignment: Alignment,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
) !codegen.Result {
|
||||
return wasm.zigObjectPtr().?.lowerAnonDecl(wasm, decl_val, explicit_alignment, src_loc);
|
||||
}
|
||||
@ -1542,26 +1542,26 @@ pub fn getAnonDeclVAddr(wasm: *Wasm, decl_val: InternPool.Index, reloc_info: lin
|
||||
return wasm.zigObjectPtr().?.getAnonDeclVAddr(wasm, decl_val, reloc_info);
|
||||
}
|
||||
|
||||
pub fn deleteDeclExport(
|
||||
pub fn deleteExport(
|
||||
wasm: *Wasm,
|
||||
decl_index: InternPool.DeclIndex,
|
||||
exported: Zcu.Exported,
|
||||
name: InternPool.NullTerminatedString,
|
||||
) void {
|
||||
if (wasm.llvm_object) |_| return;
|
||||
return wasm.zigObjectPtr().?.deleteDeclExport(wasm, decl_index, name);
|
||||
return wasm.zigObjectPtr().?.deleteExport(wasm, exported, name);
|
||||
}
|
||||
|
||||
pub fn updateExports(
|
||||
wasm: *Wasm,
|
||||
mod: *Module,
|
||||
exported: Module.Exported,
|
||||
exports: []const *Module.Export,
|
||||
export_indices: []const u32,
|
||||
) !void {
|
||||
if (build_options.skip_non_native and builtin.object_format != .wasm) {
|
||||
@panic("Attempted to compile for object format that was disabled by build configuration");
|
||||
}
|
||||
if (wasm.llvm_object) |llvm_object| return llvm_object.updateExports(mod, exported, exports);
|
||||
return wasm.zigObjectPtr().?.updateExports(wasm, mod, exported, exports);
|
||||
if (wasm.llvm_object) |llvm_object| return llvm_object.updateExports(mod, exported, export_indices);
|
||||
return wasm.zigObjectPtr().?.updateExports(wasm, mod, exported, export_indices);
|
||||
}
|
||||
|
||||
pub fn freeDecl(wasm: *Wasm, decl_index: InternPool.DeclIndex) void {
|
||||
|
||||
@ -269,7 +269,7 @@ pub fn updateDecl(
|
||||
|
||||
const res = try codegen.generateSymbol(
|
||||
&wasm_file.base,
|
||||
decl.navSrcLoc(mod).upgrade(mod),
|
||||
decl.navSrcLoc(mod),
|
||||
val,
|
||||
&code_writer,
|
||||
.none,
|
||||
@ -280,7 +280,7 @@ pub fn updateDecl(
|
||||
.ok => code_writer.items,
|
||||
.fail => |em| {
|
||||
decl.analysis = .codegen_failure;
|
||||
try mod.failed_decls.put(mod.gpa, decl_index, em);
|
||||
try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
|
||||
return;
|
||||
},
|
||||
};
|
||||
@ -308,7 +308,7 @@ pub fn updateFunc(
|
||||
defer code_writer.deinit();
|
||||
const result = try codegen.generateFunction(
|
||||
&wasm_file.base,
|
||||
decl.navSrcLoc(mod).upgrade(mod),
|
||||
decl.navSrcLoc(mod),
|
||||
func_index,
|
||||
air,
|
||||
liveness,
|
||||
@ -320,7 +320,7 @@ pub fn updateFunc(
|
||||
.ok => code_writer.items,
|
||||
.fail => |em| {
|
||||
decl.analysis = .codegen_failure;
|
||||
try mod.failed_decls.put(mod.gpa, decl_index, em);
|
||||
try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
|
||||
return;
|
||||
},
|
||||
};
|
||||
@ -439,7 +439,7 @@ pub fn lowerAnonDecl(
|
||||
wasm_file: *Wasm,
|
||||
decl_val: InternPool.Index,
|
||||
explicit_alignment: InternPool.Alignment,
|
||||
src_loc: Module.SrcLoc,
|
||||
src_loc: Module.LazySrcLoc,
|
||||
) !codegen.Result {
|
||||
const gpa = wasm_file.base.comp.gpa;
|
||||
const gop = try zig_object.anon_decls.getOrPut(gpa, decl_val);
|
||||
@ -494,14 +494,14 @@ pub fn lowerUnnamedConst(zig_object: *ZigObject, wasm_file: *Wasm, val: Value, d
|
||||
else
|
||||
decl.navSrcLoc(mod);
|
||||
|
||||
switch (try zig_object.lowerConst(wasm_file, name, val, decl_src.upgrade(mod))) {
|
||||
switch (try zig_object.lowerConst(wasm_file, name, val, decl_src)) {
|
||||
.ok => |atom_index| {
|
||||
try wasm_file.getAtomPtr(parent_atom_index).locals.append(gpa, atom_index);
|
||||
return @intFromEnum(wasm_file.getAtom(atom_index).sym_index);
|
||||
},
|
||||
.fail => |em| {
|
||||
decl.analysis = .codegen_failure;
|
||||
try mod.failed_decls.put(mod.gpa, decl_index, em);
|
||||
try mod.failed_analysis.put(mod.gpa, AnalUnit.wrap(.{ .decl = decl_index }), em);
|
||||
return error.CodegenFail;
|
||||
},
|
||||
}
|
||||
@ -512,7 +512,7 @@ const LowerConstResult = union(enum) {
|
||||
fail: *Module.ErrorMsg,
|
||||
};
|
||||
|
||||
fn lowerConst(zig_object: *ZigObject, wasm_file: *Wasm, name: []const u8, val: Value, src_loc: Module.SrcLoc) !LowerConstResult {
|
||||
fn lowerConst(zig_object: *ZigObject, wasm_file: *Wasm, name: []const u8, val: Value, src_loc: Module.LazySrcLoc) !LowerConstResult {
|
||||
const gpa = wasm_file.base.comp.gpa;
|
||||
const mod = wasm_file.base.comp.module.?;
|
||||
|
||||
@ -833,13 +833,17 @@ pub fn getAnonDeclVAddr(
|
||||
return target_symbol_index;
|
||||
}
|
||||
|
||||
pub fn deleteDeclExport(
|
||||
pub fn deleteExport(
|
||||
zig_object: *ZigObject,
|
||||
wasm_file: *Wasm,
|
||||
decl_index: InternPool.DeclIndex,
|
||||
exported: Zcu.Exported,
|
||||
name: InternPool.NullTerminatedString,
|
||||
) void {
|
||||
const mod = wasm_file.base.comp.module.?;
|
||||
const decl_index = switch (exported) {
|
||||
.decl_index => |decl_index| decl_index,
|
||||
.value => @panic("TODO: implement Wasm linker code for exporting a constant value"),
|
||||
};
|
||||
const decl_info = zig_object.decls_map.getPtr(decl_index) orelse return;
|
||||
if (decl_info.@"export"(zig_object, name.toSlice(&mod.intern_pool))) |sym_index| {
|
||||
const sym = zig_object.symbol(sym_index);
|
||||
@ -856,7 +860,7 @@ pub fn updateExports(
|
||||
wasm_file: *Wasm,
|
||||
mod: *Module,
|
||||
exported: Module.Exported,
|
||||
exports: []const *Module.Export,
|
||||
export_indices: []const u32,
|
||||
) !void {
|
||||
const decl_index = switch (exported) {
|
||||
.decl_index => |i| i,
|
||||
@ -873,11 +877,12 @@ pub fn updateExports(
|
||||
const gpa = mod.gpa;
|
||||
log.debug("Updating exports for decl '{}'", .{decl.name.fmt(&mod.intern_pool)});
|
||||
|
||||
for (exports) |exp| {
|
||||
for (export_indices) |export_idx| {
|
||||
const exp = mod.all_exports.items[export_idx];
|
||||
if (exp.opts.section.toSlice(&mod.intern_pool)) |section| {
|
||||
try mod.failed_exports.putNoClobber(gpa, exp, try Module.ErrorMsg.create(
|
||||
try mod.failed_exports.putNoClobber(gpa, export_idx, try Module.ErrorMsg.create(
|
||||
gpa,
|
||||
decl.navSrcLoc(mod).upgrade(mod),
|
||||
decl.navSrcLoc(mod),
|
||||
"Unimplemented: ExportOptions.section '{s}'",
|
||||
.{section},
|
||||
));
|
||||
@ -908,9 +913,9 @@ pub fn updateExports(
|
||||
},
|
||||
.strong => {}, // symbols are strong by default
|
||||
.link_once => {
|
||||
try mod.failed_exports.putNoClobber(gpa, exp, try Module.ErrorMsg.create(
|
||||
try mod.failed_exports.putNoClobber(gpa, export_idx, try Module.ErrorMsg.create(
|
||||
gpa,
|
||||
decl.navSrcLoc(mod).upgrade(mod),
|
||||
decl.navSrcLoc(mod),
|
||||
"Unimplemented: LinkOnce",
|
||||
.{},
|
||||
));
|
||||
@ -1247,7 +1252,8 @@ const Zcu = @import("../../Zcu.zig");
|
||||
const Module = Zcu;
|
||||
const StringTable = @import("../StringTable.zig");
|
||||
const Symbol = @import("Symbol.zig");
|
||||
const Type = @import("../../type.zig").Type;
|
||||
const Type = @import("../../Type.zig");
|
||||
const Value = @import("../../Value.zig");
|
||||
const Wasm = @import("../Wasm.zig");
|
||||
const AnalUnit = InternPool.AnalUnit;
|
||||
const ZigObject = @This();
|
||||
|
||||
@ -3,7 +3,7 @@ const assert = std.debug.assert;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Zcu = @import("Zcu.zig");
|
||||
const InternPool = @import("InternPool.zig");
|
||||
const Type = @import("type.zig").Type;
|
||||
const Type = @import("Type.zig");
|
||||
const Value = @import("Value.zig");
|
||||
|
||||
/// We use a tagged union here because while it wastes a few bytes for some tags, having a fixed
|
||||
|
||||
@ -4,7 +4,7 @@ const fmtIntSizeBin = std.fmt.fmtIntSizeBin;
|
||||
|
||||
const Zcu = @import("Zcu.zig");
|
||||
const Value = @import("Value.zig");
|
||||
const Type = @import("type.zig").Type;
|
||||
const Type = @import("Type.zig");
|
||||
const Air = @import("Air.zig");
|
||||
const Liveness = @import("Liveness.zig");
|
||||
const InternPool = @import("InternPool.zig");
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
//! It is a thin wrapper around a `Value` which also, redundantly, stores its `Type`.
|
||||
|
||||
const std = @import("std");
|
||||
const Type = @import("type.zig").Type;
|
||||
const Type = @import("Type.zig");
|
||||
const Value = @import("Value.zig");
|
||||
const Zcu = @import("Zcu.zig");
|
||||
/// Deprecated.
|
||||
@ -81,12 +81,12 @@ pub fn print(
|
||||
}),
|
||||
.int => |int| switch (int.storage) {
|
||||
inline .u64, .i64, .big_int => |x| try writer.print("{}", .{x}),
|
||||
.lazy_align => |ty| if (opt_sema) |sema| {
|
||||
const a = (try Type.fromInterned(ty).abiAlignmentAdvanced(mod, .{ .sema = sema })).scalar;
|
||||
.lazy_align => |ty| if (opt_sema != null) {
|
||||
const a = (try Type.fromInterned(ty).abiAlignmentAdvanced(mod, .sema)).scalar;
|
||||
try writer.print("{}", .{a.toByteUnits() orelse 0});
|
||||
} else try writer.print("@alignOf({})", .{Type.fromInterned(ty).fmt(mod)}),
|
||||
.lazy_size => |ty| if (opt_sema) |sema| {
|
||||
const s = (try Type.fromInterned(ty).abiSizeAdvanced(mod, .{ .sema = sema })).scalar;
|
||||
.lazy_size => |ty| if (opt_sema != null) {
|
||||
const s = (try Type.fromInterned(ty).abiSizeAdvanced(mod, .sema)).scalar;
|
||||
try writer.print("{}", .{s});
|
||||
} else try writer.print("@sizeOf({})", .{Type.fromInterned(ty).fmt(mod)}),
|
||||
},
|
||||
|
||||
@ -5,7 +5,7 @@ const assert = std.debug.assert;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Air = @import("Air.zig");
|
||||
const StaticBitSet = std.bit_set.StaticBitSet;
|
||||
const Type = @import("type.zig").Type;
|
||||
const Type = @import("Type.zig");
|
||||
const Zcu = @import("Zcu.zig");
|
||||
/// Deprecated.
|
||||
const Module = Zcu;
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
const std = @import("std");
|
||||
const Type = @import("type.zig").Type;
|
||||
const Type = @import("Type.zig");
|
||||
const AddressSpace = std.builtin.AddressSpace;
|
||||
const Alignment = @import("InternPool.zig").Alignment;
|
||||
const Feature = @import("Zcu.zig").Feature;
|
||||
|
||||
3617
src/type.zig
3617
src/type.zig
File diff suppressed because it is too large
Load Diff
@ -16,6 +16,7 @@ pub export fn entry() void {
|
||||
// target=native
|
||||
//
|
||||
// :6:5: error: found compile log statement
|
||||
// :6:5: note: also here
|
||||
//
|
||||
// Compile Log Output:
|
||||
// @as(tmp.Bar, .{ .X = 123 })
|
||||
|
||||
@ -18,6 +18,7 @@ export fn baz() void {
|
||||
//
|
||||
// :6:5: error: found compile log statement
|
||||
// :12:5: note: also here
|
||||
// :6:5: note: also here
|
||||
//
|
||||
// Compile Log Output:
|
||||
// @as(*const [5:0]u8, "begin")
|
||||
|
||||
@ -10,4 +10,3 @@ export fn entry() usize {
|
||||
// target=native
|
||||
//
|
||||
// :1:11: error: struct 'tmp.A' depends on itself
|
||||
// :2:5: note: while checking this field
|
||||
|
||||
@ -16,6 +16,3 @@ export fn entry() usize {
|
||||
// target=native
|
||||
//
|
||||
// :1:11: error: struct 'tmp.A' depends on itself
|
||||
// :8:5: note: while checking this field
|
||||
// :5:5: note: while checking this field
|
||||
// :2:5: note: while checking this field
|
||||
|
||||
@ -13,4 +13,3 @@ export fn entry() usize {
|
||||
// target=native
|
||||
//
|
||||
// :1:13: error: struct 'tmp.Foo' depends on itself
|
||||
// :2:5: note: while checking this field
|
||||
|
||||
@ -13,4 +13,3 @@ export fn entry() usize {
|
||||
// target=native
|
||||
//
|
||||
// :1:13: error: union 'tmp.Foo' depends on itself
|
||||
// :2:5: note: while checking this field
|
||||
|
||||
@ -16,4 +16,3 @@ comptime {
|
||||
// target=native
|
||||
//
|
||||
// :6:21: error: struct layout depends on it having runtime bits
|
||||
// :4:13: note: while checking this field
|
||||
|
||||
@ -15,5 +15,3 @@ export fn entry() void {
|
||||
// target=native
|
||||
//
|
||||
// :1:17: error: struct 'tmp.LhsExpr' depends on itself
|
||||
// :5:5: note: while checking this field
|
||||
// :2:5: note: while checking this field
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
pub export fn entry(param: usize) usize {
|
||||
return struct { param };
|
||||
return struct { @TypeOf(param) };
|
||||
}
|
||||
|
||||
// error
|
||||
|
||||
@ -395,10 +395,7 @@ fn addFromDirInner(
|
||||
if (entry.kind != .file) continue;
|
||||
|
||||
// Ignore stuff such as .swp files
|
||||
switch (Compilation.classifyFileExt(entry.basename)) {
|
||||
.unknown => continue,
|
||||
else => {},
|
||||
}
|
||||
if (!knownFileExtension(entry.basename)) continue;
|
||||
try filenames.append(try ctx.arena.dupe(u8, entry.path));
|
||||
}
|
||||
|
||||
@ -623,8 +620,6 @@ pub fn lowerToBuildSteps(
|
||||
b: *std.Build,
|
||||
parent_step: *std.Build.Step,
|
||||
test_filters: []const []const u8,
|
||||
cases_dir_path: []const u8,
|
||||
incremental_exe: *std.Build.Step.Compile,
|
||||
) void {
|
||||
const host = std.zig.system.resolveTargetQuery(.{}) catch |err|
|
||||
std.debug.panic("unable to detect native host: {s}\n", .{@errorName(err)});
|
||||
@ -637,20 +632,11 @@ pub fn lowerToBuildSteps(
|
||||
// compilation is in a happier state.
|
||||
continue;
|
||||
}
|
||||
for (test_filters) |test_filter| {
|
||||
if (std.mem.indexOf(u8, incr_case.base_path, test_filter)) |_| break;
|
||||
} else if (test_filters.len > 0) continue;
|
||||
const case_base_path_with_dir = std.fs.path.join(b.allocator, &.{
|
||||
cases_dir_path, incr_case.base_path,
|
||||
}) catch @panic("OOM");
|
||||
const run = b.addRunArtifact(incremental_exe);
|
||||
run.setName(incr_case.base_path);
|
||||
run.addArgs(&.{
|
||||
case_base_path_with_dir,
|
||||
b.graph.zig_exe,
|
||||
});
|
||||
run.expectStdOutEqual("");
|
||||
parent_step.dependOn(&run.step);
|
||||
// TODO: the logic for running these was bad, so I've ripped it out. Rewrite this
|
||||
// in a way that actually spawns the compiler, communicating with it over the
|
||||
// compiler server protocol.
|
||||
_ = incr_case;
|
||||
@panic("TODO implement incremental test case executor");
|
||||
}
|
||||
|
||||
for (self.cases.items) |case| {
|
||||
@ -1236,192 +1222,6 @@ const assert = std.debug.assert;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const getExternalExecutor = std.zig.system.getExternalExecutor;
|
||||
|
||||
const Compilation = @import("../../src/Compilation.zig");
|
||||
const zig_h = @import("../../src/link.zig").File.C.zig_h;
|
||||
const introspect = @import("../../src/introspect.zig");
|
||||
const ThreadPool = std.Thread.Pool;
|
||||
const WaitGroup = std.Thread.WaitGroup;
|
||||
const build_options = @import("build_options");
|
||||
const Package = @import("../../src/Package.zig");
|
||||
|
||||
pub const std_options = .{
|
||||
.log_level = .err,
|
||||
};
|
||||
|
||||
var general_purpose_allocator = std.heap.GeneralPurposeAllocator(.{
|
||||
.stack_trace_frames = build_options.mem_leak_frames,
|
||||
}){};
|
||||
|
||||
// TODO: instead of embedding the compiler in this process, spawn the compiler
|
||||
// as a sub-process and communicate the updates using the compiler protocol.
|
||||
pub fn main() !void {
|
||||
const use_gpa = build_options.force_gpa or !builtin.link_libc;
|
||||
const gpa = gpa: {
|
||||
if (use_gpa) {
|
||||
break :gpa general_purpose_allocator.allocator();
|
||||
}
|
||||
// We would prefer to use raw libc allocator here, but cannot
|
||||
// use it if it won't support the alignment we need.
|
||||
if (@alignOf(std.c.max_align_t) < @alignOf(i128)) {
|
||||
break :gpa std.heap.c_allocator;
|
||||
}
|
||||
break :gpa std.heap.raw_c_allocator;
|
||||
};
|
||||
|
||||
var single_threaded_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
defer single_threaded_arena.deinit();
|
||||
|
||||
var thread_safe_arena: std.heap.ThreadSafeAllocator = .{
|
||||
.child_allocator = single_threaded_arena.allocator(),
|
||||
};
|
||||
const arena = thread_safe_arena.allocator();
|
||||
|
||||
const args = try std.process.argsAlloc(arena);
|
||||
const case_file_path = args[1];
|
||||
const zig_exe_path = args[2];
|
||||
|
||||
var filenames = std.ArrayList([]const u8).init(arena);
|
||||
|
||||
const case_dirname = std.fs.path.dirname(case_file_path).?;
|
||||
var iterable_dir = try std.fs.cwd().openDir(case_dirname, .{ .iterate = true });
|
||||
defer iterable_dir.close();
|
||||
|
||||
if (std.mem.endsWith(u8, case_file_path, ".0.zig")) {
|
||||
const stem = case_file_path[case_dirname.len + 1 .. case_file_path.len - "0.zig".len];
|
||||
var it = iterable_dir.iterate();
|
||||
while (try it.next()) |entry| {
|
||||
if (entry.kind != .file) continue;
|
||||
if (!std.mem.startsWith(u8, entry.name, stem)) continue;
|
||||
try filenames.append(try std.fs.path.join(arena, &.{ case_dirname, entry.name }));
|
||||
}
|
||||
} else {
|
||||
try filenames.append(case_file_path);
|
||||
}
|
||||
|
||||
if (filenames.items.len == 0) {
|
||||
std.debug.print("failed to find the input source file(s) from '{s}'\n", .{
|
||||
case_file_path,
|
||||
});
|
||||
std.process.exit(1);
|
||||
}
|
||||
|
||||
// Sort filenames, so that incremental tests are contiguous and in-order
|
||||
sortTestFilenames(filenames.items);
|
||||
|
||||
var ctx = Cases.init(gpa, arena);
|
||||
|
||||
var test_it = TestIterator{ .filenames = filenames.items };
|
||||
while (try test_it.next()) |batch| {
|
||||
const strategy: TestStrategy = if (batch.len > 1) .incremental else .independent;
|
||||
var cases = std.ArrayList(usize).init(arena);
|
||||
|
||||
for (batch) |filename| {
|
||||
const max_file_size = 10 * 1024 * 1024;
|
||||
const src = try iterable_dir.readFileAllocOptions(arena, filename, max_file_size, null, 1, 0);
|
||||
|
||||
// Parse the manifest
|
||||
var manifest = try TestManifest.parse(arena, src);
|
||||
|
||||
if (cases.items.len == 0) {
|
||||
const backends = try manifest.getConfigForKeyAlloc(arena, "backend", Backend);
|
||||
const targets = try manifest.getConfigForKeyAlloc(arena, "target", std.Target.Query);
|
||||
const c_frontends = try manifest.getConfigForKeyAlloc(ctx.arena, "c_frontend", CFrontend);
|
||||
const is_test = try manifest.getConfigForKeyAssertSingle("is_test", bool);
|
||||
const link_libc = try manifest.getConfigForKeyAssertSingle("link_libc", bool);
|
||||
const output_mode = try manifest.getConfigForKeyAssertSingle("output_mode", std.builtin.OutputMode);
|
||||
|
||||
if (manifest.type == .translate_c) {
|
||||
for (c_frontends) |c_frontend| {
|
||||
for (targets) |target_query| {
|
||||
const output = try manifest.trailingLinesSplit(ctx.arena);
|
||||
try ctx.translate.append(.{
|
||||
.name = std.fs.path.stem(filename),
|
||||
.c_frontend = c_frontend,
|
||||
.target = resolveTargetQuery(target_query),
|
||||
.is_test = is_test,
|
||||
.link_libc = link_libc,
|
||||
.input = src,
|
||||
.kind = .{ .translate = output },
|
||||
});
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (manifest.type == .run_translated_c) {
|
||||
for (c_frontends) |c_frontend| {
|
||||
for (targets) |target_query| {
|
||||
const output = try manifest.trailingSplit(ctx.arena);
|
||||
try ctx.translate.append(.{
|
||||
.name = std.fs.path.stem(filename),
|
||||
.c_frontend = c_frontend,
|
||||
.target = resolveTargetQuery(target_query),
|
||||
.is_test = is_test,
|
||||
.link_libc = link_libc,
|
||||
.output = output,
|
||||
.input = src,
|
||||
.kind = .{ .run = output },
|
||||
});
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Cross-product to get all possible test combinations
|
||||
for (backends) |backend| {
|
||||
for (targets) |target| {
|
||||
const next = ctx.cases.items.len;
|
||||
try ctx.cases.append(.{
|
||||
.name = std.fs.path.stem(filename),
|
||||
.target = target,
|
||||
.backend = backend,
|
||||
.updates = std.ArrayList(Cases.Update).init(ctx.cases.allocator),
|
||||
.is_test = is_test,
|
||||
.output_mode = output_mode,
|
||||
.link_libc = backend == .llvm,
|
||||
.deps = std.ArrayList(DepModule).init(ctx.cases.allocator),
|
||||
});
|
||||
try cases.append(next);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (cases.items) |case_index| {
|
||||
const case = &ctx.cases.items[case_index];
|
||||
if (strategy == .incremental and case.backend == .stage2 and case.target.getCpuArch() == .x86_64 and !case.link_libc and case.target.getOsTag() != .plan9) {
|
||||
// https://github.com/ziglang/zig/issues/15174
|
||||
continue;
|
||||
}
|
||||
|
||||
switch (manifest.type) {
|
||||
.compile => {
|
||||
case.addCompile(src);
|
||||
},
|
||||
.@"error" => {
|
||||
const errors = try manifest.trailingLines(arena);
|
||||
switch (strategy) {
|
||||
.independent => {
|
||||
case.addError(src, errors);
|
||||
},
|
||||
.incremental => {
|
||||
case.addErrorNamed("update", src, errors);
|
||||
},
|
||||
}
|
||||
},
|
||||
.run => {
|
||||
const output = try manifest.trailingSplit(ctx.arena);
|
||||
case.addCompareOutput(src, output);
|
||||
},
|
||||
.translate_c => @panic("c_frontend specified for compile case"),
|
||||
.run_translated_c => @panic("c_frontend specified for compile case"),
|
||||
.cli => @panic("TODO cli tests"),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return runCases(&ctx, zig_exe_path);
|
||||
}
|
||||
|
||||
fn resolveTargetQuery(query: std.Target.Query) std.Build.ResolvedTarget {
|
||||
return .{
|
||||
.query = query,
|
||||
@ -1430,470 +1230,33 @@ fn resolveTargetQuery(query: std.Target.Query) std.Build.ResolvedTarget {
|
||||
};
|
||||
}
|
||||
|
||||
fn runCases(self: *Cases, zig_exe_path: []const u8) !void {
|
||||
const host = try std.zig.system.resolveTargetQuery(.{});
|
||||
|
||||
var progress = std.Progress{};
|
||||
const root_node = progress.start("compiler", self.cases.items.len);
|
||||
progress.terminal = null;
|
||||
defer root_node.end();
|
||||
|
||||
var zig_lib_directory = try introspect.findZigLibDirFromSelfExe(self.gpa, zig_exe_path);
|
||||
defer zig_lib_directory.handle.close();
|
||||
defer self.gpa.free(zig_lib_directory.path.?);
|
||||
|
||||
var aux_thread_pool: ThreadPool = undefined;
|
||||
try aux_thread_pool.init(.{ .allocator = self.gpa });
|
||||
defer aux_thread_pool.deinit();
|
||||
|
||||
// Use the same global cache dir for all the tests, such that we for example don't have to
|
||||
// rebuild musl libc for every case (when LLVM backend is enabled).
|
||||
var global_tmp = std.testing.tmpDir(.{});
|
||||
defer global_tmp.cleanup();
|
||||
|
||||
var cache_dir = try global_tmp.dir.makeOpenPath(".zig-cache", .{});
|
||||
defer cache_dir.close();
|
||||
const tmp_dir_path = try std.fs.path.join(self.gpa, &[_][]const u8{ ".", ".zig-cache", "tmp", &global_tmp.sub_path });
|
||||
defer self.gpa.free(tmp_dir_path);
|
||||
|
||||
const global_cache_directory: Compilation.Directory = .{
|
||||
.handle = cache_dir,
|
||||
.path = try std.fs.path.join(self.gpa, &[_][]const u8{ tmp_dir_path, ".zig-cache" }),
|
||||
};
|
||||
defer self.gpa.free(global_cache_directory.path.?);
|
||||
|
||||
{
|
||||
for (self.cases.items) |*case| {
|
||||
if (build_options.skip_non_native) {
|
||||
if (case.target.getCpuArch() != builtin.cpu.arch)
|
||||
continue;
|
||||
if (case.target.getObjectFormat() != builtin.object_format)
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip tests that require LLVM backend when it is not available
|
||||
if (!build_options.have_llvm and case.backend == .llvm)
|
||||
continue;
|
||||
|
||||
assert(case.backend != .stage1);
|
||||
|
||||
for (build_options.test_filters) |test_filter| {
|
||||
if (std.mem.indexOf(u8, case.name, test_filter)) |_| break;
|
||||
} else if (build_options.test_filters.len > 0) continue;
|
||||
|
||||
var prg_node = root_node.start(case.name, case.updates.items.len);
|
||||
prg_node.activate();
|
||||
defer prg_node.end();
|
||||
|
||||
try runOneCase(
|
||||
self.gpa,
|
||||
&prg_node,
|
||||
case.*,
|
||||
zig_lib_directory,
|
||||
zig_exe_path,
|
||||
&aux_thread_pool,
|
||||
global_cache_directory,
|
||||
host,
|
||||
);
|
||||
}
|
||||
|
||||
for (self.translate.items) |*case| {
|
||||
_ = case;
|
||||
@panic("TODO is this even used?");
|
||||
}
|
||||
fn knownFileExtension(filename: []const u8) bool {
|
||||
// List taken from `Compilation.classifyFileExt` in the compiler.
|
||||
for ([_][]const u8{
|
||||
".c", ".C", ".cc", ".cpp",
|
||||
".cxx", ".stub", ".m", ".mm",
|
||||
".ll", ".bc", ".s", ".S",
|
||||
".h", ".zig", ".so", ".dll",
|
||||
".dylib", ".tbd", ".a", ".lib",
|
||||
".o", ".obj", ".cu", ".def",
|
||||
".rc", ".res", ".manifest",
|
||||
}) |ext| {
|
||||
if (std.mem.endsWith(u8, filename, ext)) return true;
|
||||
}
|
||||
}
|
||||
|
||||
fn runOneCase(
|
||||
allocator: Allocator,
|
||||
root_node: *std.Progress.Node,
|
||||
case: Case,
|
||||
zig_lib_directory: Compilation.Directory,
|
||||
zig_exe_path: []const u8,
|
||||
thread_pool: *ThreadPool,
|
||||
global_cache_directory: Compilation.Directory,
|
||||
host: std.Target,
|
||||
) !void {
|
||||
const tmp_src_path = "tmp.zig";
|
||||
const enable_rosetta = build_options.enable_rosetta;
|
||||
const enable_qemu = build_options.enable_qemu;
|
||||
const enable_wine = build_options.enable_wine;
|
||||
const enable_wasmtime = build_options.enable_wasmtime;
|
||||
const enable_darling = build_options.enable_darling;
|
||||
const glibc_runtimes_dir: ?[]const u8 = build_options.glibc_runtimes_dir;
|
||||
|
||||
const target = try std.zig.system.resolveTargetQuery(case.target);
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(allocator);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = arena_allocator.allocator();
|
||||
|
||||
var tmp = std.testing.tmpDir(.{});
|
||||
defer tmp.cleanup();
|
||||
|
||||
var cache_dir = try tmp.dir.makeOpenPath(".zig-cache", .{});
|
||||
defer cache_dir.close();
|
||||
|
||||
const tmp_dir_path = try std.fs.path.join(
|
||||
arena,
|
||||
&[_][]const u8{ ".", ".zig-cache", "tmp", &tmp.sub_path },
|
||||
);
|
||||
const local_cache_path = try std.fs.path.join(
|
||||
arena,
|
||||
&[_][]const u8{ tmp_dir_path, ".zig-cache" },
|
||||
);
|
||||
|
||||
const zig_cache_directory: Compilation.Directory = .{
|
||||
.handle = cache_dir,
|
||||
.path = local_cache_path,
|
||||
};
|
||||
|
||||
var main_pkg: Package = .{
|
||||
.root_src_directory = .{ .path = tmp_dir_path, .handle = tmp.dir },
|
||||
.root_src_path = tmp_src_path,
|
||||
};
|
||||
defer {
|
||||
var it = main_pkg.table.iterator();
|
||||
while (it.next()) |kv| {
|
||||
allocator.free(kv.key_ptr.*);
|
||||
kv.value_ptr.*.destroy(allocator);
|
||||
}
|
||||
main_pkg.table.deinit(allocator);
|
||||
}
|
||||
|
||||
for (case.deps.items) |dep| {
|
||||
var pkg = try Package.create(
|
||||
allocator,
|
||||
tmp_dir_path,
|
||||
dep.path,
|
||||
);
|
||||
errdefer pkg.destroy(allocator);
|
||||
try main_pkg.add(allocator, dep.name, pkg);
|
||||
}
|
||||
|
||||
const bin_name = try std.zig.binNameAlloc(arena, .{
|
||||
.root_name = "test_case",
|
||||
.target = target,
|
||||
.output_mode = case.output_mode,
|
||||
});
|
||||
|
||||
const emit_directory: Compilation.Directory = .{
|
||||
.path = tmp_dir_path,
|
||||
.handle = tmp.dir,
|
||||
};
|
||||
const emit_bin: Compilation.EmitLoc = .{
|
||||
.directory = emit_directory,
|
||||
.basename = bin_name,
|
||||
};
|
||||
const emit_h: ?Compilation.EmitLoc = if (case.emit_h) .{
|
||||
.directory = emit_directory,
|
||||
.basename = "test_case.h",
|
||||
} else null;
|
||||
const use_llvm: bool = switch (case.backend) {
|
||||
.llvm => true,
|
||||
else => false,
|
||||
};
|
||||
const comp = try Compilation.create(allocator, .{
|
||||
.local_cache_directory = zig_cache_directory,
|
||||
.global_cache_directory = global_cache_directory,
|
||||
.zig_lib_directory = zig_lib_directory,
|
||||
.thread_pool = thread_pool,
|
||||
.root_name = "test_case",
|
||||
.target = target,
|
||||
// TODO: support tests for object file building, and library builds
|
||||
// and linking. This will require a rework to support multi-file
|
||||
// tests.
|
||||
.output_mode = case.output_mode,
|
||||
.is_test = case.is_test,
|
||||
.optimize_mode = case.optimize_mode,
|
||||
.emit_bin = emit_bin,
|
||||
.emit_h = emit_h,
|
||||
.main_pkg = &main_pkg,
|
||||
.keep_source_files_loaded = true,
|
||||
.is_native_os = case.target.isNativeOs(),
|
||||
.is_native_abi = case.target.isNativeAbi(),
|
||||
.dynamic_linker = target.dynamic_linker.get(),
|
||||
.link_libc = case.link_libc,
|
||||
.use_llvm = use_llvm,
|
||||
.self_exe_path = zig_exe_path,
|
||||
// TODO instead of turning off color, pass in a std.Progress.Node
|
||||
.color = .off,
|
||||
.reference_trace = 0,
|
||||
// TODO: force self-hosted linkers with stage2 backend to avoid LLD creeping in
|
||||
// until the auto-select mechanism deems them worthy
|
||||
.use_lld = switch (case.backend) {
|
||||
.stage2 => false,
|
||||
else => null,
|
||||
},
|
||||
});
|
||||
defer comp.destroy();
|
||||
|
||||
update: for (case.updates.items, 0..) |update, update_index| {
|
||||
var update_node = root_node.start(update.name, 3);
|
||||
update_node.activate();
|
||||
defer update_node.end();
|
||||
|
||||
var sync_node = update_node.start("write", 0);
|
||||
sync_node.activate();
|
||||
for (update.files.items) |file| {
|
||||
try tmp.dir.writeFile(.{ .sub_path = file.path, .data = file.src });
|
||||
}
|
||||
sync_node.end();
|
||||
|
||||
var module_node = update_node.start("parse/analysis/codegen", 0);
|
||||
module_node.activate();
|
||||
try comp.makeBinFileWritable();
|
||||
try comp.update(&module_node);
|
||||
module_node.end();
|
||||
|
||||
if (update.case != .Error) {
|
||||
var all_errors = try comp.getAllErrorsAlloc();
|
||||
defer all_errors.deinit(allocator);
|
||||
if (all_errors.errorMessageCount() > 0) {
|
||||
all_errors.renderToStdErr(.{
|
||||
.ttyconf = std.io.tty.detectConfig(std.io.getStdErr()),
|
||||
});
|
||||
// TODO print generated C code
|
||||
return error.UnexpectedCompileErrors;
|
||||
}
|
||||
}
|
||||
|
||||
switch (update.case) {
|
||||
.Header => |expected_output| {
|
||||
var file = try tmp.dir.openFile("test_case.h", .{ .mode = .read_only });
|
||||
defer file.close();
|
||||
const out = try file.reader().readAllAlloc(arena, 5 * 1024 * 1024);
|
||||
|
||||
try std.testing.expectEqualStrings(expected_output, out);
|
||||
},
|
||||
.CompareObjectFile => |expected_output| {
|
||||
var file = try tmp.dir.openFile(bin_name, .{ .mode = .read_only });
|
||||
defer file.close();
|
||||
const out = try file.reader().readAllAlloc(arena, 5 * 1024 * 1024);
|
||||
|
||||
try std.testing.expectEqualStrings(expected_output, out);
|
||||
},
|
||||
.Compile => {},
|
||||
.Error => |expected_errors| {
|
||||
var test_node = update_node.start("assert", 0);
|
||||
test_node.activate();
|
||||
defer test_node.end();
|
||||
|
||||
var error_bundle = try comp.getAllErrorsAlloc();
|
||||
defer error_bundle.deinit(allocator);
|
||||
|
||||
if (error_bundle.errorMessageCount() == 0) {
|
||||
return error.ExpectedCompilationErrors;
|
||||
}
|
||||
|
||||
var actual_stderr = std.ArrayList(u8).init(arena);
|
||||
try error_bundle.renderToWriter(.{
|
||||
.ttyconf = .no_color,
|
||||
.include_reference_trace = false,
|
||||
.include_source_line = false,
|
||||
}, actual_stderr.writer());
|
||||
|
||||
// Render the expected lines into a string that we can compare verbatim.
|
||||
var expected_generated = std.ArrayList(u8).init(arena);
|
||||
|
||||
var actual_line_it = std.mem.splitScalar(u8, actual_stderr.items, '\n');
|
||||
for (expected_errors) |expect_line| {
|
||||
const actual_line = actual_line_it.next() orelse {
|
||||
try expected_generated.appendSlice(expect_line);
|
||||
try expected_generated.append('\n');
|
||||
continue;
|
||||
};
|
||||
if (std.mem.endsWith(u8, actual_line, expect_line)) {
|
||||
try expected_generated.appendSlice(actual_line);
|
||||
try expected_generated.append('\n');
|
||||
continue;
|
||||
}
|
||||
if (std.mem.startsWith(u8, expect_line, ":?:?: ")) {
|
||||
if (std.mem.endsWith(u8, actual_line, expect_line[":?:?: ".len..])) {
|
||||
try expected_generated.appendSlice(actual_line);
|
||||
try expected_generated.append('\n');
|
||||
continue;
|
||||
}
|
||||
}
|
||||
try expected_generated.appendSlice(expect_line);
|
||||
try expected_generated.append('\n');
|
||||
}
|
||||
|
||||
try std.testing.expectEqualStrings(expected_generated.items, actual_stderr.items);
|
||||
},
|
||||
.Execution => |expected_stdout| {
|
||||
if (!std.process.can_spawn) {
|
||||
std.debug.print("Unable to spawn child processes on {s}, skipping test.\n", .{@tagName(builtin.os.tag)});
|
||||
continue :update; // Pass test.
|
||||
}
|
||||
|
||||
update_node.setEstimatedTotalItems(4);
|
||||
|
||||
var argv = std.ArrayList([]const u8).init(allocator);
|
||||
defer argv.deinit();
|
||||
|
||||
const exec_result = x: {
|
||||
var exec_node = update_node.start("execute", 0);
|
||||
exec_node.activate();
|
||||
defer exec_node.end();
|
||||
|
||||
// We go out of our way here to use the unique temporary directory name in
|
||||
// the exe_path so that it makes its way into the cache hash, avoiding
|
||||
// cache collisions from multiple threads doing `zig run` at the same time
|
||||
// on the same test_case.c input filename.
|
||||
const ss = std.fs.path.sep_str;
|
||||
const exe_path = try std.fmt.allocPrint(
|
||||
arena,
|
||||
".." ++ ss ++ "{s}" ++ ss ++ "{s}",
|
||||
.{ &tmp.sub_path, bin_name },
|
||||
);
|
||||
if (case.target.ofmt != null and case.target.ofmt.? == .c) {
|
||||
if (getExternalExecutor(host, &target, .{ .link_libc = true }) != .native) {
|
||||
// We wouldn't be able to run the compiled C code.
|
||||
continue :update; // Pass test.
|
||||
}
|
||||
try argv.appendSlice(&[_][]const u8{
|
||||
zig_exe_path,
|
||||
"run",
|
||||
"-cflags",
|
||||
"-std=c99",
|
||||
"-pedantic",
|
||||
"-Werror",
|
||||
"-Wno-incompatible-library-redeclaration", // https://github.com/ziglang/zig/issues/875
|
||||
"--",
|
||||
"-lc",
|
||||
exe_path,
|
||||
});
|
||||
if (zig_lib_directory.path) |p| {
|
||||
try argv.appendSlice(&.{ "-I", p });
|
||||
}
|
||||
} else switch (getExternalExecutor(host, &target, .{ .link_libc = case.link_libc })) {
|
||||
.native => {
|
||||
if (case.backend == .stage2 and case.target.getCpuArch().isArmOrThumb()) {
|
||||
// https://github.com/ziglang/zig/issues/13623
|
||||
continue :update; // Pass test.
|
||||
}
|
||||
try argv.append(exe_path);
|
||||
},
|
||||
.bad_dl, .bad_os_or_cpu => continue :update, // Pass test.
|
||||
|
||||
.rosetta => if (enable_rosetta) {
|
||||
try argv.append(exe_path);
|
||||
} else {
|
||||
continue :update; // Rosetta not available, pass test.
|
||||
},
|
||||
|
||||
.qemu => |qemu_bin_name| if (enable_qemu) {
|
||||
const need_cross_glibc = target.isGnuLibC() and case.link_libc;
|
||||
const glibc_dir_arg: ?[]const u8 = if (need_cross_glibc)
|
||||
glibc_runtimes_dir orelse continue :update // glibc dir not available; pass test
|
||||
else
|
||||
null;
|
||||
try argv.append(qemu_bin_name);
|
||||
if (glibc_dir_arg) |dir| {
|
||||
const linux_triple = try target.linuxTriple(arena);
|
||||
const full_dir = try std.fs.path.join(arena, &[_][]const u8{
|
||||
dir,
|
||||
linux_triple,
|
||||
});
|
||||
|
||||
try argv.append("-L");
|
||||
try argv.append(full_dir);
|
||||
}
|
||||
try argv.append(exe_path);
|
||||
} else {
|
||||
continue :update; // QEMU not available; pass test.
|
||||
},
|
||||
|
||||
.wine => |wine_bin_name| if (enable_wine) {
|
||||
try argv.append(wine_bin_name);
|
||||
try argv.append(exe_path);
|
||||
} else {
|
||||
continue :update; // Wine not available; pass test.
|
||||
},
|
||||
|
||||
.wasmtime => |wasmtime_bin_name| if (enable_wasmtime) {
|
||||
try argv.append(wasmtime_bin_name);
|
||||
try argv.append("--dir=.");
|
||||
try argv.append(exe_path);
|
||||
} else {
|
||||
continue :update; // wasmtime not available; pass test.
|
||||
},
|
||||
|
||||
.darling => |darling_bin_name| if (enable_darling) {
|
||||
try argv.append(darling_bin_name);
|
||||
// Since we use relative to cwd here, we invoke darling with
|
||||
// "shell" subcommand.
|
||||
try argv.append("shell");
|
||||
try argv.append(exe_path);
|
||||
} else {
|
||||
continue :update; // Darling not available; pass test.
|
||||
},
|
||||
}
|
||||
|
||||
try comp.makeBinFileExecutable();
|
||||
|
||||
while (true) {
|
||||
break :x std.process.Child.run(.{
|
||||
.allocator = allocator,
|
||||
.argv = argv.items,
|
||||
.cwd_dir = tmp.dir,
|
||||
.cwd = tmp_dir_path,
|
||||
}) catch |err| switch (err) {
|
||||
error.FileBusy => {
|
||||
// There is a fundamental design flaw in Unix systems with how
|
||||
// ETXTBSY interacts with fork+exec.
|
||||
// https://github.com/golang/go/issues/22315
|
||||
// https://bugs.openjdk.org/browse/JDK-8068370
|
||||
// Unfortunately, this could be a real error, but we can't
|
||||
// tell the difference here.
|
||||
continue;
|
||||
},
|
||||
else => {
|
||||
std.debug.print("\n{s}.{d} The following command failed with {s}:\n", .{
|
||||
case.name, update_index, @errorName(err),
|
||||
});
|
||||
dumpArgs(argv.items);
|
||||
return error.ChildProcessExecution;
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
var test_node = update_node.start("test", 0);
|
||||
test_node.activate();
|
||||
defer test_node.end();
|
||||
defer allocator.free(exec_result.stdout);
|
||||
defer allocator.free(exec_result.stderr);
|
||||
switch (exec_result.term) {
|
||||
.Exited => |code| {
|
||||
if (code != 0) {
|
||||
std.debug.print("\n{s}\n{s}: execution exited with code {d}:\n", .{
|
||||
exec_result.stderr, case.name, code,
|
||||
});
|
||||
dumpArgs(argv.items);
|
||||
return error.ChildProcessExecution;
|
||||
}
|
||||
},
|
||||
else => {
|
||||
std.debug.print("\n{s}\n{s}: execution crashed:\n", .{
|
||||
exec_result.stderr, case.name,
|
||||
});
|
||||
dumpArgs(argv.items);
|
||||
return error.ChildProcessExecution;
|
||||
},
|
||||
}
|
||||
try std.testing.expectEqualStrings(expected_stdout, exec_result.stdout);
|
||||
// We allow stderr to have garbage in it because wasmtime prints a
|
||||
// warning about --invoke even though we don't pass it.
|
||||
//std.testing.expectEqualStrings("", exec_result.stderr);
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn dumpArgs(argv: []const []const u8) void {
|
||||
for (argv) |arg| {
|
||||
std.debug.print("{s} ", .{arg});
|
||||
}
|
||||
std.debug.print("\n", .{});
|
||||
// Final check for .so.X, .so.X.Y, .so.X.Y.Z.
|
||||
// From `Compilation.hasSharedLibraryExt`.
|
||||
var it = std.mem.splitScalar(u8, filename, '.');
|
||||
_ = it.first();
|
||||
var so_txt = it.next() orelse return false;
|
||||
while (!std.mem.eql(u8, so_txt, "so")) {
|
||||
so_txt = it.next() orelse return false;
|
||||
}
|
||||
const n1 = it.next() orelse return false;
|
||||
const n2 = it.next();
|
||||
const n3 = it.next();
|
||||
_ = std.fmt.parseInt(u32, n1, 10) catch return false;
|
||||
if (n2) |x| _ = std.fmt.parseInt(u32, x, 10) catch return false;
|
||||
if (n3) |x| _ = std.fmt.parseInt(u32, x, 10) catch return false;
|
||||
if (it.next() != null) return false;
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -1250,7 +1250,6 @@ pub fn addCases(
|
||||
b: *std.Build,
|
||||
parent_step: *Step,
|
||||
test_filters: []const []const u8,
|
||||
check_case_exe: *std.Build.Step.Compile,
|
||||
target: std.Build.ResolvedTarget,
|
||||
translate_c_options: @import("src/Cases.zig").TranslateCOptions,
|
||||
build_options: @import("cases.zig").BuildOptions,
|
||||
@ -1268,12 +1267,9 @@ pub fn addCases(
|
||||
|
||||
cases.lowerToTranslateCSteps(b, parent_step, test_filters, target, translate_c_options);
|
||||
|
||||
const cases_dir_path = try b.build_root.join(b.allocator, &.{ "test", "cases" });
|
||||
cases.lowerToBuildSteps(
|
||||
b,
|
||||
parent_step,
|
||||
test_filters,
|
||||
cases_dir_path,
|
||||
check_case_exe,
|
||||
);
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user