From 1b8ed7842cc09ff687aa7386bf3af8565055a8d1 Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Fri, 25 Feb 2022 15:26:24 +0100 Subject: [PATCH 1/3] macho: redo selection of segment/section for decls and consts * fix alignment issues for consts with natural ABI alignment not matching that of the `ldr` instruction in `aarch64` - solved by preceeding the `ldr` with an additional `add` instruction to form the full address before dereferencing the pointer. * redo selection of segment/section for decls and consts based on combined type and value --- src/arch/aarch64/Emit.zig | 26 ++++-- src/link/MachO.zig | 173 ++++++++++++++++++-------------------- src/link/MachO/Atom.zig | 10 +-- test/behavior/align.zig | 1 - test/behavior/basic.zig | 3 - test/behavior/union.zig | 11 --- 6 files changed, 110 insertions(+), 114 deletions(-) diff --git a/src/arch/aarch64/Emit.zig b/src/arch/aarch64/Emit.zig index 31e7ea81c8..d98aa09e56 100644 --- a/src/arch/aarch64/Emit.zig +++ b/src/arch/aarch64/Emit.zig @@ -208,8 +208,8 @@ fn instructionSize(emit: *Emit, inst: Mir.Inst.Index) usize { } switch (tag) { + .load_memory_direct => return 3 * 4, .load_memory_got, - .load_memory_direct, .load_memory_ptr_got, .load_memory_ptr_direct, => return 2 * 4, @@ -654,15 +654,31 @@ fn mirLoadMemoryPie(emit: *Emit, inst: Mir.Inst.Index) !void { const data = emit.mir.extraData(Mir.LoadMemoryPie, payload).data; const reg = @intToEnum(Register, data.register); - // PC-relative displacement to the entry in the GOT table. + // PC-relative displacement to the entry in memory. // adrp const offset = @intCast(u32, emit.code.items.len); try emit.writeInstruction(Instruction.adrp(reg, 0)); switch (tag) { - .load_memory_got, - .load_memory_direct, - => { + .load_memory_got => { + // ldr reg, reg, offset + try emit.writeInstruction(Instruction.ldr( + reg, + reg, + Instruction.LoadStoreOffset.imm(0), + )); + }, + .load_memory_direct => { + // We cannot load the offset directly as it may not be aligned properly. + // For example, load for 64bit register will require the target address offset + // to be 8-byte aligned, while the value might have non-8-byte natural alignment, + // meaning the linker might have put it at a non-8-byte aligned address. To circumvent + // this, we use `adrp, add` to form the address value which we then dereference with + // `ldr`. + // Note that this can potentially be optimised out by the codegen/linker if the + // target address is appropriately aligned. + // add reg, reg, offset + try emit.writeInstruction(Instruction.add(reg, reg, 0, false)); // ldr reg, reg, offset try emit.writeInstruction(Instruction.ldr( reg, diff --git a/src/link/MachO.zig b/src/link/MachO.zig index 00019879ad..351acaeb17 100644 --- a/src/link/MachO.zig +++ b/src/link/MachO.zig @@ -3797,10 +3797,11 @@ pub fn lowerUnnamedConst(self: *MachO, typed_value: TypedValue, decl: *Module.De atom.code.clearRetainingCapacity(); try atom.code.appendSlice(self.base.allocator, code); - const match = try self.getMatchingSectionAtom(atom, typed_value.ty, typed_value.val); + const match = try self.getMatchingSectionAtom(atom, decl_name, typed_value.ty, typed_value.val); const addr = try self.allocateAtom(atom, code.len, required_alignment, match); log.debug("allocated atom for {s} at 0x{x}", .{ name, addr }); + log.debug(" (required alignment 0x{x})", .{required_alignment}); errdefer self.freeAtom(atom, match, true); @@ -3903,28 +3904,60 @@ pub fn updateDecl(self: *MachO, module: *Module, decl: *Module.Decl) !void { try self.updateDeclExports(module, decl, decl_exports); } -fn isElemTyPointer(ty: Type) bool { +/// Checks if the value, or any of its embedded values stores a pointer, and thus requires +/// a rebase opcode for the dynamic linker. +fn needsPointerRebase(ty: Type, val: Value) bool { + if (ty.zigTypeTag() == .Fn) { + return false; + } + if (val.pointerDecl()) |_| { + return true; + } + switch (ty.zigTypeTag()) { - .Fn => return false, + .Fn => unreachable, .Pointer => return true, - .Array => { - const elem_ty = ty.elemType(); - return isElemTyPointer(elem_ty); + .Array, .Vector => { + if (ty.arrayLen() == 0) return false; + const elem_ty = ty.childType(); + var elem_value_buf: Value.ElemValueBuffer = undefined; + const elem_val = val.elemValueBuffer(0, &elem_value_buf); + return needsPointerRebase(elem_ty, elem_val); }, - .Struct, .Union => { - const len = ty.structFieldCount(); - var i: usize = 0; - while (i < len) : (i += 1) { - const field_ty = ty.structFieldType(i); - if (isElemTyPointer(field_ty)) return true; - } - return false; + .Struct => { + const fields = ty.structFields().values(); + if (fields.len == 0) return false; + if (val.castTag(.@"struct")) |payload| { + const field_values = payload.data; + for (field_values) |field_val, i| { + if (needsPointerRebase(fields[i].ty, field_val)) return true; + } else return false; + } else return false; + }, + .Optional => { + if (val.castTag(.opt_payload)) |payload| { + const sub_val = payload.data; + var buffer: Type.Payload.ElemType = undefined; + const sub_ty = ty.optionalChild(&buffer); + return needsPointerRebase(sub_ty, sub_val); + } else return false; + }, + .Union => { + const union_obj = val.cast(Value.Payload.Union).?.data; + const active_field_ty = ty.unionFieldType(union_obj.tag); + return needsPointerRebase(active_field_ty, union_obj.val); + }, + .ErrorUnion => { + if (val.castTag(.eu_payload)) |payload| { + const payload_ty = ty.errorUnionPayload(); + return needsPointerRebase(payload_ty, payload.data); + } else return false; }, else => return false, } } -fn getMatchingSectionAtom(self: *MachO, atom: *Atom, ty: Type, val: Value) !MatchingSection { +fn getMatchingSectionAtom(self: *MachO, atom: *Atom, name: []const u8, ty: Type, val: Value) !MatchingSection { const code = atom.code.items; const alignment = ty.abiAlignment(self.base.options.target); const align_log_2 = math.log2(alignment); @@ -3938,10 +3971,25 @@ fn getMatchingSectionAtom(self: *MachO, atom: *Atom, ty: Type, val: Value) !Matc .seg = self.data_segment_cmd_index.?, .sect = self.bss_section_index.?, }; + } else { + break :blk MatchingSection{ + .seg = self.data_segment_cmd_index.?, + .sect = self.data_section_index.?, + }; } + } + + if (val.castTag(.variable)) |_| { + break :blk MatchingSection{ + .seg = self.data_segment_cmd_index.?, + .sect = self.data_section_index.?, + }; + } + + if (needsPointerRebase(ty, val)) { break :blk (try self.getMatchingSection(.{ - .segname = makeStaticString("__DATA"), - .sectname = makeStaticString("__data"), + .segname = makeStaticString("__DATA_CONST"), + .sectname = makeStaticString("__const"), .size = code.len, .@"align" = align_log_2, })).?; @@ -3954,8 +4002,8 @@ fn getMatchingSectionAtom(self: *MachO, atom: *Atom, ty: Type, val: Value) !Matc .sect = self.text_section_index.?, }; }, - .Array => switch (val.tag()) { - .bytes => { + .Array => { + if (val.tag() == .bytes) { switch (ty.tag()) { .array_u8_sentinel_0, .const_slice_u8_sentinel_0, @@ -3969,79 +4017,23 @@ fn getMatchingSectionAtom(self: *MachO, atom: *Atom, ty: Type, val: Value) !Matc .@"align" = align_log_2, })).?; }, - else => { - break :blk (try self.getMatchingSection(.{ - .segname = makeStaticString("__TEXT"), - .sectname = makeStaticString("__const"), - .size = code.len, - .@"align" = align_log_2, - })).?; - }, + else => {}, } - }, - .array => { - if (isElemTyPointer(ty)) { - break :blk (try self.getMatchingSection(.{ - .segname = makeStaticString("__DATA_CONST"), - .sectname = makeStaticString("__const"), - .size = code.len, - .@"align" = align_log_2, - })).?; - } else { - break :blk (try self.getMatchingSection(.{ - .segname = makeStaticString("__TEXT"), - .sectname = makeStaticString("__const"), - .size = code.len, - .@"align" = align_log_2, - })).?; - } - }, - else => { - break :blk (try self.getMatchingSection(.{ - .segname = makeStaticString("__TEXT"), - .sectname = makeStaticString("__const"), - .size = code.len, - .@"align" = align_log_2, - })).?; - }, - }, - .Pointer => { - if (val.castTag(.variable)) |_| { - break :blk MatchingSection{ - .seg = self.data_segment_cmd_index.?, - .sect = self.data_section_index.?, - }; - } else { - break :blk (try self.getMatchingSection(.{ - .segname = makeStaticString("__DATA_CONST"), - .sectname = makeStaticString("__const"), - .size = code.len, - .@"align" = align_log_2, - })).?; - } - }, - else => { - if (val.castTag(.variable)) |_| { - break :blk MatchingSection{ - .seg = self.data_segment_cmd_index.?, - .sect = self.data_section_index.?, - }; - } else { - break :blk (try self.getMatchingSection(.{ - .segname = makeStaticString("__TEXT"), - .sectname = makeStaticString("__const"), - .size = code.len, - .@"align" = align_log_2, - })).?; } }, + else => {}, } + break :blk (try self.getMatchingSection(.{ + .segname = makeStaticString("__TEXT"), + .sectname = makeStaticString("__const"), + .size = code.len, + .@"align" = align_log_2, + })).?; }; - const local = self.locals.items[atom.local_sym_index]; const seg = self.load_commands.items[match.seg].segment; const sect = seg.sections.items[match.sect]; log.debug(" allocating atom '{s}' in '{s},{s}' ({d},{d})", .{ - self.getString(local.n_strx), + name, sect.segName(), sect.sectName(), match.seg, @@ -4055,14 +4047,15 @@ fn placeDecl(self: *MachO, decl: *Module.Decl, code_len: usize) !*macho.nlist_64 assert(decl.link.macho.local_sym_index != 0); // Caller forgot to call allocateDeclIndexes() const symbol = &self.locals.items[decl.link.macho.local_sym_index]; - const decl_ptr = self.decls.getPtr(decl).?; - if (decl_ptr.* == null) { - decl_ptr.* = try self.getMatchingSectionAtom(&decl.link.macho, decl.ty, decl.val); - } - const match = decl_ptr.*.?; const sym_name = try decl.getFullyQualifiedName(self.base.allocator); defer self.base.allocator.free(sym_name); + const decl_ptr = self.decls.getPtr(decl).?; + if (decl_ptr.* == null) { + decl_ptr.* = try self.getMatchingSectionAtom(&decl.link.macho, sym_name, decl.ty, decl.val); + } + const match = decl_ptr.*.?; + if (decl.link.macho.size != 0) { const capacity = decl.link.macho.capacity(self.*); const need_realloc = code_len > capacity or !mem.isAlignedGeneric(u64, symbol.n_value, required_alignment); @@ -4071,6 +4064,7 @@ fn placeDecl(self: *MachO, decl: *Module.Decl, code_len: usize) !*macho.nlist_64 const vaddr = try self.growAtom(&decl.link.macho, code_len, required_alignment, match); log.debug("growing {s} and moving from 0x{x} to 0x{x}", .{ sym_name, symbol.n_value, vaddr }); + log.debug(" (required alignment 0x{x})", .{required_alignment}); if (vaddr != symbol.n_value) { log.debug(" (writing new GOT entry)", .{}); @@ -4105,6 +4099,7 @@ fn placeDecl(self: *MachO, decl: *Module.Decl, code_len: usize) !*macho.nlist_64 const addr = try self.allocateAtom(&decl.link.macho, code_len, required_alignment, match); log.debug("allocated atom for {s} at 0x{x}", .{ sym_name, addr }); + log.debug(" (required alignment 0x{x})", .{required_alignment}); errdefer self.freeAtom(&decl.link.macho, match, false); diff --git a/src/link/MachO/Atom.zig b/src/link/MachO/Atom.zig index fae1ff4eba..d0d82a12af 100644 --- a/src/link/MachO/Atom.zig +++ b/src/link/MachO/Atom.zig @@ -691,11 +691,11 @@ pub fn resolveRelocs(self: *Atom, macho_file: *MachO) !void { if (is_via_got) { const got_index = macho_file.got_entries_table.get(rel.target) orelse { - const n_strx = switch (rel.target) { - .local => |sym_index| macho_file.locals.items[sym_index].n_strx, - .global => |n_strx| n_strx, - }; - log.err("expected GOT entry for symbol '{s}'", .{macho_file.getString(n_strx)}); + log.err("expected GOT entry for symbol", .{}); + switch (rel.target) { + .local => |sym_index| log.err(" local @{d}", .{sym_index}), + .global => |n_strx| log.err(" global @'{s}'", .{macho_file.getString(n_strx)}), + } log.err(" this is an internal linker error", .{}); return error.FailedToResolveRelocationTarget; }; diff --git a/test/behavior/align.zig b/test/behavior/align.zig index 5a617b84d5..28dcd7a508 100644 --- a/test/behavior/align.zig +++ b/test/behavior/align.zig @@ -7,7 +7,6 @@ var foo: u8 align(4) = 100; test "global variable alignment" { if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; - if (builtin.zig_backend == .stage2_x86_64 and builtin.os.tag == .macos) return error.SkipZigTest; comptime try expect(@typeInfo(@TypeOf(&foo)).Pointer.alignment == 4); comptime try expect(@TypeOf(&foo) == *align(4) u8); diff --git a/test/behavior/basic.zig b/test/behavior/basic.zig index 304b4a83f7..dd3e85be67 100644 --- a/test/behavior/basic.zig +++ b/test/behavior/basic.zig @@ -195,9 +195,6 @@ test "multiline string comments at multiple places" { } test "string concatenation" { - if (builtin.zig_backend == .stage2_aarch64 and builtin.os.tag == .macos) return error.SkipZigTest; - if (builtin.zig_backend == .stage2_x86_64 and builtin.os.tag == .macos) return error.SkipZigTest; - try expect(mem.eql(u8, "OK" ++ " IT " ++ "WORKED", "OK IT WORKED")); } diff --git a/test/behavior/union.zig b/test/behavior/union.zig index 5a7fbbd98e..7f78ca9adf 100644 --- a/test/behavior/union.zig +++ b/test/behavior/union.zig @@ -44,7 +44,6 @@ fn setInt(foo: *Foo, x: i32) void { test "comptime union field access" { if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; - if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; comptime { var foo = Foo{ .int = 0 }; @@ -77,14 +76,12 @@ const ExternPtrOrInt = extern union { }; test "extern union size" { if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; - if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; comptime try expect(@sizeOf(ExternPtrOrInt) == 8); } test "0-sized extern union definition" { if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; - if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; const U = extern union { a: void, @@ -115,9 +112,7 @@ const err = @as(anyerror!Agg, Agg{ const array = [_]Value{ v1, v2, v1, v2 }; test "unions embedded in aggregate types" { - if (builtin.zig_backend == .stage2_x86_64 and builtin.os.tag == .macos) return error.SkipZigTest; if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; - if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; switch (array[1]) { Value.Array => |arr| try expect(arr[4] == 3), @@ -131,7 +126,6 @@ test "unions embedded in aggregate types" { test "access a member of tagged union with conflicting enum tag name" { if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; - if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; const Bar = union(enum) { A: A, @@ -176,7 +170,6 @@ const TaggedUnionWithPayload = union(enum) { test "union alignment" { if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; - if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; comptime { try expect(@alignOf(AlignTestTaggedUnion) >= @alignOf([9]u8)); @@ -276,7 +269,6 @@ fn testCastUnionToTag() !void { test "union field access gives the enum values" { if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; - if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; try expect(TheUnion.A == TheTag.A); try expect(TheUnion.B == TheTag.B); @@ -352,7 +344,6 @@ const PackedPtrOrInt = packed union { }; test "packed union size" { if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; - if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; comptime try expect(@sizeOf(PackedPtrOrInt) == 8); } @@ -362,7 +353,6 @@ const ZeroBits = union { }; test "union with only 1 field which is void should be zero bits" { if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; - if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; comptime try expect(@sizeOf(ZeroBits) == 0); } @@ -422,7 +412,6 @@ test "union with only 1 field casted to its enum type" { test "union with one member defaults to u0 tag type" { if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; - if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; const U0 = union(enum) { X: u32, From 4b14384989362f93cd014628810c63b5cd9d3fff Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Fri, 25 Feb 2022 16:46:40 +0100 Subject: [PATCH 2/3] aarch64: check if type has runtime bits before allocating mem ptr --- src/arch/aarch64/CodeGen.zig | 5 +++++ src/arch/x86_64/CodeGen.zig | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/src/arch/aarch64/CodeGen.zig b/src/arch/aarch64/CodeGen.zig index 2b863fd359..dc64e77f81 100644 --- a/src/arch/aarch64/CodeGen.zig +++ b/src/arch/aarch64/CodeGen.zig @@ -786,6 +786,11 @@ fn allocMem(self: *Self, inst: Air.Inst.Index, abi_size: u32, abi_align: u32) !u /// Use a pointer instruction as the basis for allocating stack memory. fn allocMemPtr(self: *Self, inst: Air.Inst.Index) !u32 { const elem_ty = self.air.typeOfIndex(inst).elemType(); + + if (!elem_ty.hasRuntimeBits()) { + return self.allocMem(inst, @sizeOf(usize), @alignOf(usize)); + } + const abi_size = math.cast(u32, elem_ty.abiSize(self.target.*)) catch { return self.fail("type '{}' too big to fit into stack frame", .{elem_ty}); }; diff --git a/src/arch/x86_64/CodeGen.zig b/src/arch/x86_64/CodeGen.zig index f7713a4e69..25dc8d81aa 100644 --- a/src/arch/x86_64/CodeGen.zig +++ b/src/arch/x86_64/CodeGen.zig @@ -852,7 +852,7 @@ fn allocMemPtr(self: *Self, inst: Air.Inst.Index) !u32 { const elem_ty = ptr_ty.elemType(); if (!elem_ty.hasRuntimeBits()) { - return self.allocMem(inst, 8, 8); + return self.allocMem(inst, @sizeOf(usize), @alignOf(usize)); } const abi_size = math.cast(u32, elem_ty.abiSize(self.target.*)) catch { From e0f5627d4a9cb1b3a1361d70d40043c8c170b2af Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Fri, 25 Feb 2022 21:53:46 +0100 Subject: [PATCH 3/3] x64+aarch64: check for pointer to zero-bit type when lowering decl Unless the pointer is a pointer to a function, if the pointee type has zero-bits, we need to return `MCValue.none` as the `Decl` has not been lowered to memory, and therefore, any GOT reference will be wrong. --- src/arch/aarch64/CodeGen.zig | 10 ++++++++++ src/arch/x86_64/CodeGen.zig | 9 +++++++++ src/arch/x86_64/Emit.zig | 1 + src/link/MachO.zig | 7 +++++++ test/behavior/basic.zig | 2 -- test/behavior/struct.zig | 2 -- 6 files changed, 27 insertions(+), 4 deletions(-) diff --git a/src/arch/aarch64/CodeGen.zig b/src/arch/aarch64/CodeGen.zig index dc64e77f81..0a3070e881 100644 --- a/src/arch/aarch64/CodeGen.zig +++ b/src/arch/aarch64/CodeGen.zig @@ -3550,6 +3550,15 @@ fn getResolvedInstValue(self: *Self, inst: Air.Inst.Index) MCValue { fn lowerDeclRef(self: *Self, tv: TypedValue, decl: *Module.Decl) InnerError!MCValue { const ptr_bits = self.target.cpu.arch.ptrBitWidth(); const ptr_bytes: u64 = @divExact(ptr_bits, 8); + + // TODO this feels clunky. Perhaps we should check for it in `genTypedValue`? + if (tv.ty.zigTypeTag() == .Pointer) blk: { + if (tv.ty.castPtrToFn()) |_| break :blk; + if (!tv.ty.elemType2().hasRuntimeBits()) { + return MCValue.none; + } + } + decl.alive = true; if (self.bin_file.cast(link.File.Elf)) |elf_file| { const got = &elf_file.program_headers.items[elf_file.phdr_got_index.?]; @@ -3558,6 +3567,7 @@ fn lowerDeclRef(self: *Self, tv: TypedValue, decl: *Module.Decl) InnerError!MCVa } else if (self.bin_file.cast(link.File.MachO)) |_| { // Because MachO is PIE-always-on, we defer memory address resolution until // the linker has enough info to perform relocations. + assert(decl.link.macho.local_sym_index != 0); return MCValue{ .got_load = decl.link.macho.local_sym_index }; } else if (self.bin_file.cast(link.File.Coff)) |coff_file| { const got_addr = coff_file.offset_table_virtual_address + decl.link.coff.offset_table_index * ptr_bytes; diff --git a/src/arch/x86_64/CodeGen.zig b/src/arch/x86_64/CodeGen.zig index 25dc8d81aa..b30a38fc40 100644 --- a/src/arch/x86_64/CodeGen.zig +++ b/src/arch/x86_64/CodeGen.zig @@ -5333,6 +5333,14 @@ fn lowerDeclRef(self: *Self, tv: TypedValue, decl: *Module.Decl) InnerError!MCVa const ptr_bits = self.target.cpu.arch.ptrBitWidth(); const ptr_bytes: u64 = @divExact(ptr_bits, 8); + // TODO this feels clunky. Perhaps we should check for it in `genTypedValue`? + if (tv.ty.zigTypeTag() == .Pointer) blk: { + if (tv.ty.castPtrToFn()) |_| break :blk; + if (!tv.ty.elemType2().hasRuntimeBits()) { + return MCValue.none; + } + } + decl.alive = true; if (self.bin_file.cast(link.File.Elf)) |elf_file| { const got = &elf_file.program_headers.items[elf_file.phdr_got_index.?]; @@ -5341,6 +5349,7 @@ fn lowerDeclRef(self: *Self, tv: TypedValue, decl: *Module.Decl) InnerError!MCVa } else if (self.bin_file.cast(link.File.MachO)) |_| { // Because MachO is PIE-always-on, we defer memory address resolution until // the linker has enough info to perform relocations. + assert(decl.link.macho.local_sym_index != 0); return MCValue{ .got_load = decl.link.macho.local_sym_index }; } else if (self.bin_file.cast(link.File.Coff)) |coff_file| { const got_addr = coff_file.offset_table_virtual_address + decl.link.coff.offset_table_index * ptr_bytes; diff --git a/src/arch/x86_64/Emit.zig b/src/arch/x86_64/Emit.zig index 2a66513670..fc43e61c17 100644 --- a/src/arch/x86_64/Emit.zig +++ b/src/arch/x86_64/Emit.zig @@ -857,6 +857,7 @@ fn mirLeaPie(emit: *Emit, inst: Mir.Inst.Index) InnerError!void { else => return emit.fail("TODO unused LEA PIE variants 0b10 and 0b11", .{}), }; const atom = macho_file.atom_by_index_table.get(load_reloc.atom_index).?; + log.debug("adding reloc of type {} to local @{d}", .{ reloc_type, load_reloc.sym_index }); try atom.relocs.append(emit.bin_file.allocator, .{ .offset = @intCast(u32, end_offset - 4), .target = .{ .local = load_reloc.sym_index }, diff --git a/src/link/MachO.zig b/src/link/MachO.zig index 351acaeb17..9f038d9b9f 100644 --- a/src/link/MachO.zig +++ b/src/link/MachO.zig @@ -4286,6 +4286,7 @@ pub fn deleteExport(self: *MachO, exp: Export) void { } fn freeUnnamedConsts(self: *MachO, decl: *Module.Decl) void { + log.debug("freeUnnamedConsts for decl {*}", .{decl}); const unnamed_consts = self.unnamed_const_atoms.getPtr(decl) orelse return; for (unnamed_consts.items) |atom| { self.freeAtom(atom, .{ @@ -4295,6 +4296,7 @@ fn freeUnnamedConsts(self: *MachO, decl: *Module.Decl) void { self.locals_free_list.append(self.base.allocator, atom.local_sym_index) catch {}; self.locals.items[atom.local_sym_index].n_type = 0; _ = self.atom_by_index_table.remove(atom.local_sym_index); + log.debug(" adding local symbol index {d} to free list", .{atom.local_sym_index}); atom.local_sym_index = 0; } unnamed_consts.clearAndFree(self.base.allocator); @@ -4319,10 +4321,15 @@ pub fn freeDecl(self: *MachO, decl: *Module.Decl) void { self.got_entries_free_list.append(self.base.allocator, @intCast(u32, got_index)) catch {}; self.got_entries.items[got_index] = .{ .target = .{ .local = 0 }, .atom = undefined }; _ = self.got_entries_table.swapRemove(.{ .local = decl.link.macho.local_sym_index }); + log.debug(" adding GOT index {d} to free list (target local@{d})", .{ + got_index, + decl.link.macho.local_sym_index, + }); } self.locals.items[decl.link.macho.local_sym_index].n_type = 0; _ = self.atom_by_index_table.remove(decl.link.macho.local_sym_index); + log.debug(" adding local symbol index {d} to free list", .{decl.link.macho.local_sym_index}); decl.link.macho.local_sym_index = 0; } if (self.d_sym) |*d_sym| { diff --git a/test/behavior/basic.zig b/test/behavior/basic.zig index dd3e85be67..d2d943940f 100644 --- a/test/behavior/basic.zig +++ b/test/behavior/basic.zig @@ -399,8 +399,6 @@ fn testTakeAddressOfParameter(f: f32) !void { test "pointer to void return type" { if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO - if (builtin.zig_backend == .stage2_aarch64 and builtin.os.tag == .macos) return error.SkipZigTest; - if (builtin.zig_backend == .stage2_x86_64 and builtin.os.tag == .macos) return error.SkipZigTest; try testPointerToVoidReturnType(); } diff --git a/test/behavior/struct.zig b/test/behavior/struct.zig index e3b0bb193b..a1d60632a9 100644 --- a/test/behavior/struct.zig +++ b/test/behavior/struct.zig @@ -370,8 +370,6 @@ test "empty struct method call" { if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO - if (builtin.zig_backend == .stage2_aarch64 and builtin.os.tag == .macos) return error.SkipZigTest; // TODO - if (builtin.zig_backend == .stage2_x86_64 and builtin.os.tag == .macos) return error.SkipZigTest; // TODO const es = EmptyStruct{}; try expect(es.method() == 1234);