mirror of
https://github.com/ziglang/zig.git
synced 2026-02-13 21:08:36 +00:00
Merge pull request #15185 from ziglang/macho-tls
macho: add TLS support
This commit is contained in:
commit
5e19250a12
@ -51,7 +51,6 @@ stage3/bin/zig build test docs \
|
||||
--zig-lib-dir "$(pwd)/../lib" \
|
||||
-Denable-macos-sdk \
|
||||
-Dstatic-llvm \
|
||||
-Dskip-non-native \
|
||||
--search-prefix "$PREFIX"
|
||||
|
||||
# Produce the experimental std lib documentation.
|
||||
|
||||
@ -51,7 +51,6 @@ stage3/bin/zig build test docs \
|
||||
--zig-lib-dir "$(pwd)/../lib" \
|
||||
-Denable-macos-sdk \
|
||||
-Dstatic-llvm \
|
||||
-Dskip-non-native \
|
||||
--search-prefix "$PREFIX"
|
||||
|
||||
# Produce the experimental std lib documentation.
|
||||
|
||||
@ -6171,6 +6171,7 @@ fn genTypedValue(self: *Self, arg_tv: TypedValue) InnerError!MCValue {
|
||||
.linker_load => |ll| .{ .linker_load = ll },
|
||||
.immediate => |imm| .{ .immediate = imm },
|
||||
.memory => |addr| .{ .memory = addr },
|
||||
.tlv_reloc => unreachable, // TODO
|
||||
},
|
||||
.fail => |msg| {
|
||||
self.err_msg = msg;
|
||||
|
||||
@ -673,7 +673,7 @@ fn mirCallExtern(emit: *Emit, inst: Mir.Inst.Index) !void {
|
||||
const atom_index = macho_file.getAtomIndexForSymbol(.{ .sym_index = relocation.atom_index, .file = null }).?;
|
||||
const target = macho_file.getGlobalByIndex(relocation.sym_index);
|
||||
try link.File.MachO.Atom.addRelocation(macho_file, atom_index, .{
|
||||
.type = @enumToInt(std.macho.reloc_type_arm64.ARM64_RELOC_BRANCH26),
|
||||
.type = .branch,
|
||||
.target = target,
|
||||
.offset = offset,
|
||||
.addend = 0,
|
||||
@ -883,41 +883,32 @@ fn mirLoadMemoryPie(emit: *Emit, inst: Mir.Inst.Index) !void {
|
||||
}
|
||||
|
||||
if (emit.bin_file.cast(link.File.MachO)) |macho_file| {
|
||||
const Atom = link.File.MachO.Atom;
|
||||
const Relocation = Atom.Relocation;
|
||||
const atom_index = macho_file.getAtomIndexForSymbol(.{ .sym_index = data.atom_index, .file = null }).?;
|
||||
// TODO this causes segfault in stage1
|
||||
// try atom.addRelocations(macho_file, 2, .{
|
||||
try link.File.MachO.Atom.addRelocation(macho_file, atom_index, .{
|
||||
try Atom.addRelocations(macho_file, atom_index, &[_]Relocation{ .{
|
||||
.target = .{ .sym_index = data.sym_index, .file = null },
|
||||
.offset = offset,
|
||||
.addend = 0,
|
||||
.pcrel = true,
|
||||
.length = 2,
|
||||
.type = switch (tag) {
|
||||
.load_memory_got,
|
||||
.load_memory_ptr_got,
|
||||
=> @enumToInt(std.macho.reloc_type_arm64.ARM64_RELOC_GOT_LOAD_PAGE21),
|
||||
.load_memory_direct,
|
||||
.load_memory_ptr_direct,
|
||||
=> @enumToInt(std.macho.reloc_type_arm64.ARM64_RELOC_PAGE21),
|
||||
.load_memory_got, .load_memory_ptr_got => Relocation.Type.got_page,
|
||||
.load_memory_direct, .load_memory_ptr_direct => Relocation.Type.page,
|
||||
else => unreachable,
|
||||
},
|
||||
});
|
||||
try link.File.MachO.Atom.addRelocation(macho_file, atom_index, .{
|
||||
}, .{
|
||||
.target = .{ .sym_index = data.sym_index, .file = null },
|
||||
.offset = offset + 4,
|
||||
.addend = 0,
|
||||
.pcrel = false,
|
||||
.length = 2,
|
||||
.type = switch (tag) {
|
||||
.load_memory_got,
|
||||
.load_memory_ptr_got,
|
||||
=> @enumToInt(std.macho.reloc_type_arm64.ARM64_RELOC_GOT_LOAD_PAGEOFF12),
|
||||
.load_memory_direct,
|
||||
.load_memory_ptr_direct,
|
||||
=> @enumToInt(std.macho.reloc_type_arm64.ARM64_RELOC_PAGEOFF12),
|
||||
.load_memory_got, .load_memory_ptr_got => Relocation.Type.got_pageoff,
|
||||
.load_memory_direct, .load_memory_ptr_direct => Relocation.Type.pageoff,
|
||||
else => unreachable,
|
||||
},
|
||||
});
|
||||
} });
|
||||
} else if (emit.bin_file.cast(link.File.Coff)) |coff_file| {
|
||||
const atom_index = coff_file.getAtomIndexForSymbol(.{ .sym_index = data.atom_index, .file = null }).?;
|
||||
const target = switch (tag) {
|
||||
|
||||
@ -6114,7 +6114,7 @@ fn genTypedValue(self: *Self, arg_tv: TypedValue) InnerError!MCValue {
|
||||
.mcv => |mcv| switch (mcv) {
|
||||
.none => .none,
|
||||
.undef => .undef,
|
||||
.linker_load => unreachable, // TODO
|
||||
.tlv_reloc, .linker_load => unreachable, // TODO
|
||||
.immediate => |imm| .{ .immediate = @truncate(u32, imm) },
|
||||
.memory => |addr| .{ .memory = addr },
|
||||
},
|
||||
|
||||
@ -2572,7 +2572,7 @@ fn genTypedValue(self: *Self, typed_value: TypedValue) InnerError!MCValue {
|
||||
.mcv => |mcv| switch (mcv) {
|
||||
.none => .none,
|
||||
.undef => .undef,
|
||||
.linker_load => unreachable, // TODO
|
||||
.tlv_reloc, .linker_load => unreachable, // TODO
|
||||
.immediate => |imm| .{ .immediate = imm },
|
||||
.memory => |addr| .{ .memory = addr },
|
||||
},
|
||||
|
||||
@ -3931,7 +3931,7 @@ fn genTypedValue(self: *Self, typed_value: TypedValue) InnerError!MCValue {
|
||||
.mcv => |mcv| switch (mcv) {
|
||||
.none => .none,
|
||||
.undef => .undef,
|
||||
.linker_load => unreachable, // TODO
|
||||
.tlv_reloc, .linker_load => unreachable, // TODO
|
||||
.immediate => |imm| .{ .immediate = imm },
|
||||
.memory => |addr| .{ .memory = addr },
|
||||
},
|
||||
|
||||
@ -132,6 +132,10 @@ pub const MCValue = union(enum) {
|
||||
memory: u64,
|
||||
/// The value is in memory but requires a linker relocation fixup.
|
||||
linker_load: codegen.LinkerLoad,
|
||||
/// Pointer to a threadlocal variable.
|
||||
/// The address resolution will be deferred until the linker allocates everything in virtual memory.
|
||||
/// Payload is a symbol index.
|
||||
tlv_reloc: u32,
|
||||
/// The value is one of the stack variables.
|
||||
/// If the type is a pointer, it means the pointer address is in the stack at this offset.
|
||||
stack_offset: i32,
|
||||
@ -146,6 +150,7 @@ pub const MCValue = union(enum) {
|
||||
.stack_offset,
|
||||
.ptr_stack_offset,
|
||||
.linker_load,
|
||||
.tlv_reloc,
|
||||
=> true,
|
||||
else => false,
|
||||
};
|
||||
@ -731,6 +736,40 @@ fn asmMemoryRegisterImmediate(
|
||||
});
|
||||
}
|
||||
|
||||
fn asmMovLinker(self: *Self, reg: Register, atom_index: u32, linker_load: codegen.LinkerLoad) !void {
|
||||
const ops: Mir.Inst.Ops = switch (linker_load.type) {
|
||||
.got => .got_reloc,
|
||||
.direct => .direct_reloc,
|
||||
.import => .import_reloc,
|
||||
};
|
||||
_ = try self.addInst(.{
|
||||
.tag = .mov_linker,
|
||||
.ops = ops,
|
||||
.data = .{ .payload = try self.addExtra(Mir.LeaRegisterReloc{
|
||||
.reg = @enumToInt(reg),
|
||||
.atom_index = atom_index,
|
||||
.sym_index = linker_load.sym_index,
|
||||
}) },
|
||||
});
|
||||
}
|
||||
|
||||
fn asmLeaLinker(self: *Self, reg: Register, atom_index: u32, linker_load: codegen.LinkerLoad) !void {
|
||||
const ops: Mir.Inst.Ops = switch (linker_load.type) {
|
||||
.got => .got_reloc,
|
||||
.direct => .direct_reloc,
|
||||
.import => .import_reloc,
|
||||
};
|
||||
_ = try self.addInst(.{
|
||||
.tag = .lea_linker,
|
||||
.ops = ops,
|
||||
.data = .{ .payload = try self.addExtra(Mir.LeaRegisterReloc{
|
||||
.reg = @enumToInt(reg),
|
||||
.atom_index = atom_index,
|
||||
.sym_index = linker_load.sym_index,
|
||||
}) },
|
||||
});
|
||||
}
|
||||
|
||||
fn gen(self: *Self) InnerError!void {
|
||||
const cc = self.fn_type.fnCallingConvention();
|
||||
if (cc != .Naked) {
|
||||
@ -2863,7 +2902,7 @@ fn airArrayElemVal(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const offset_reg_lock = self.register_manager.lockRegAssumeUnused(offset_reg);
|
||||
defer self.register_manager.unlockReg(offset_reg_lock);
|
||||
|
||||
const addr_reg = try self.register_manager.allocReg(null, gp);
|
||||
const addr_reg = (try self.register_manager.allocReg(null, gp)).to64();
|
||||
switch (array) {
|
||||
.register => {
|
||||
const off = @intCast(i32, try self.allocMem(
|
||||
@ -2872,19 +2911,33 @@ fn airArrayElemVal(self: *Self, inst: Air.Inst.Index) !void {
|
||||
array_ty.abiAlignment(self.target.*),
|
||||
));
|
||||
try self.genSetStack(array_ty, off, array, .{});
|
||||
try self.asmRegisterMemory(.lea, addr_reg.to64(), Memory.sib(.qword, .{
|
||||
try self.asmRegisterMemory(.lea, addr_reg, Memory.sib(.qword, .{
|
||||
.base = .rbp,
|
||||
.disp = -off,
|
||||
}));
|
||||
},
|
||||
.stack_offset => |off| {
|
||||
try self.asmRegisterMemory(.lea, addr_reg.to64(), Memory.sib(.qword, .{
|
||||
try self.asmRegisterMemory(.lea, addr_reg, Memory.sib(.qword, .{
|
||||
.base = .rbp,
|
||||
.disp = -off,
|
||||
}));
|
||||
},
|
||||
.memory, .linker_load => {
|
||||
try self.loadMemPtrIntoRegister(addr_reg, Type.usize, array);
|
||||
.memory => |addr| try self.genSetReg(Type.usize, addr_reg, .{ .immediate = addr }),
|
||||
.tlv_reloc => try self.genSetReg(array_ty, addr_reg, array),
|
||||
.linker_load => |load_struct| {
|
||||
const atom_index = if (self.bin_file.cast(link.File.MachO)) |macho_file| blk: {
|
||||
const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk macho_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| blk: {
|
||||
const atom = try coff_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk coff_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else unreachable;
|
||||
|
||||
switch (load_struct.type) {
|
||||
.import => unreachable,
|
||||
.got => try self.asmMovLinker(addr_reg, atom_index, load_struct),
|
||||
.direct => try self.asmLeaLinker(addr_reg, atom_index, load_struct),
|
||||
}
|
||||
},
|
||||
else => return self.fail("TODO implement array_elem_val when array is {}", .{array}),
|
||||
}
|
||||
@ -3597,10 +3650,30 @@ fn load(self: *Self, dst_mcv: MCValue, ptr: MCValue, ptr_ty: Type) InnerError!vo
|
||||
else => return self.fail("TODO implement loading from register into {}", .{dst_mcv}),
|
||||
}
|
||||
},
|
||||
.memory, .linker_load => {
|
||||
.memory, .tlv_reloc => {
|
||||
const reg = try self.copyToTmpRegister(ptr_ty, ptr);
|
||||
try self.load(dst_mcv, .{ .register = reg }, ptr_ty);
|
||||
},
|
||||
.linker_load => |load_struct| {
|
||||
const addr_reg = (try self.register_manager.allocReg(null, gp)).to64();
|
||||
const addr_reg_lock = self.register_manager.lockRegAssumeUnused(addr_reg);
|
||||
defer self.register_manager.unlockReg(addr_reg_lock);
|
||||
|
||||
const atom_index = if (self.bin_file.cast(link.File.MachO)) |macho_file| blk: {
|
||||
const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk macho_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| blk: {
|
||||
const atom = try coff_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk coff_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else unreachable;
|
||||
|
||||
switch (load_struct.type) {
|
||||
.import => unreachable,
|
||||
.got, .direct => try self.asmMovLinker(addr_reg, atom_index, load_struct),
|
||||
}
|
||||
|
||||
try self.load(dst_mcv, .{ .register = addr_reg }, ptr_ty);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@ -3630,41 +3703,6 @@ fn airLoad(self: *Self, inst: Air.Inst.Index) !void {
|
||||
return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
|
||||
}
|
||||
|
||||
fn loadMemPtrIntoRegister(self: *Self, reg: Register, ptr_ty: Type, ptr: MCValue) InnerError!void {
|
||||
switch (ptr) {
|
||||
.linker_load => |load_struct| {
|
||||
const abi_size = @intCast(u32, ptr_ty.abiSize(self.target.*));
|
||||
const atom_index = if (self.bin_file.cast(link.File.MachO)) |macho_file| blk: {
|
||||
const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk macho_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| blk: {
|
||||
const atom = try coff_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk coff_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else unreachable;
|
||||
const ops: Mir.Inst.Ops = switch (load_struct.type) {
|
||||
.got => .got_reloc,
|
||||
.direct => .direct_reloc,
|
||||
.import => .import_reloc,
|
||||
};
|
||||
_ = try self.addInst(.{
|
||||
.tag = .lea_linker,
|
||||
.ops = ops,
|
||||
.data = .{ .payload = try self.addExtra(Mir.LeaRegisterReloc{
|
||||
.reg = @enumToInt(registerAlias(reg, abi_size)),
|
||||
.atom_index = atom_index,
|
||||
.sym_index = load_struct.sym_index,
|
||||
}) },
|
||||
});
|
||||
},
|
||||
.memory => |addr| {
|
||||
// TODO: in case the address fits in an imm32 we can use [ds:imm32]
|
||||
// instead of wasting an instruction copying the address to a register
|
||||
try self.genSetReg(ptr_ty, reg, .{ .immediate = addr });
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type) InnerError!void {
|
||||
const abi_size = @intCast(u32, value_ty.abiSize(self.target.*));
|
||||
switch (ptr) {
|
||||
@ -3775,11 +3813,29 @@ fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type
|
||||
|
||||
try self.store(ptr, .{ .register = tmp_reg }, ptr_ty, value_ty);
|
||||
} else {
|
||||
const addr_reg = try self.register_manager.allocReg(null, gp);
|
||||
const addr_reg = (try self.register_manager.allocReg(null, gp)).to64();
|
||||
const addr_lock = self.register_manager.lockRegAssumeUnused(addr_reg);
|
||||
defer self.register_manager.unlockReg(addr_lock);
|
||||
|
||||
try self.loadMemPtrIntoRegister(addr_reg, Type.usize, value);
|
||||
switch (value) {
|
||||
.memory => |addr| try self.genSetReg(Type.usize, addr_reg, .{ .immediate = addr }),
|
||||
.linker_load => |load_struct| {
|
||||
const atom_index = if (self.bin_file.cast(link.File.MachO)) |macho_file| blk: {
|
||||
const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk macho_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| blk: {
|
||||
const atom = try coff_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk coff_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else unreachable;
|
||||
switch (load_struct.type) {
|
||||
.import => unreachable,
|
||||
.got => try self.asmMovLinker(addr_reg, atom_index, load_struct),
|
||||
.direct => try self.asmLeaLinker(addr_reg, atom_index, load_struct),
|
||||
}
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
|
||||
try self.genInlineMemcpy(
|
||||
ptr,
|
||||
.{ .register = addr_reg },
|
||||
@ -3799,7 +3855,7 @@ fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type
|
||||
.{ .immediate = abi_size },
|
||||
.{},
|
||||
),
|
||||
.ptr_stack_offset => {
|
||||
.ptr_stack_offset, .tlv_reloc => {
|
||||
const tmp_reg = try self.copyToTmpRegister(value_ty, value);
|
||||
const tmp_lock = self.register_manager.lockRegAssumeUnused(tmp_reg);
|
||||
defer self.register_manager.unlockReg(tmp_lock);
|
||||
@ -3815,19 +3871,41 @@ fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type
|
||||
};
|
||||
defer if (value_lock) |lock| self.register_manager.unlockReg(lock);
|
||||
|
||||
const addr_reg = try self.register_manager.allocReg(null, gp);
|
||||
const addr_reg = (try self.register_manager.allocReg(null, gp)).to64();
|
||||
const addr_reg_lock = self.register_manager.lockRegAssumeUnused(addr_reg);
|
||||
defer self.register_manager.unlockReg(addr_reg_lock);
|
||||
|
||||
try self.loadMemPtrIntoRegister(addr_reg, ptr_ty, ptr);
|
||||
// Load the pointer, which is stored in memory
|
||||
try self.asmRegisterMemory(
|
||||
.mov,
|
||||
addr_reg.to64(),
|
||||
Memory.sib(.qword, .{ .base = addr_reg.to64() }),
|
||||
);
|
||||
switch (ptr) {
|
||||
.memory => |addr| {
|
||||
try self.genSetReg(ptr_ty, addr_reg, .{ .immediate = addr });
|
||||
// Load the pointer, which is stored in memory
|
||||
try self.asmRegisterMemory(.mov, addr_reg, Memory.sib(.qword, .{ .base = addr_reg }));
|
||||
},
|
||||
.linker_load => |load_struct| {
|
||||
const atom_index = if (self.bin_file.cast(link.File.MachO)) |macho_file| blk: {
|
||||
const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk macho_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| blk: {
|
||||
const atom = try coff_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk coff_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else unreachable;
|
||||
switch (load_struct.type) {
|
||||
.import => unreachable,
|
||||
.got, .direct => try self.asmMovLinker(addr_reg, atom_index, load_struct),
|
||||
}
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
|
||||
const new_ptr = MCValue{ .register = addr_reg.to64() };
|
||||
const new_ptr = MCValue{ .register = addr_reg };
|
||||
try self.store(new_ptr, value, ptr_ty, value_ty);
|
||||
},
|
||||
.tlv_reloc => {
|
||||
const addr_reg = try self.copyToTmpRegister(Type.usize, ptr);
|
||||
const addr_reg_lock = self.register_manager.lockRegAssumeUnused(addr_reg);
|
||||
defer self.register_manager.unlockReg(addr_reg_lock);
|
||||
|
||||
const new_ptr = MCValue{ .register = addr_reg };
|
||||
try self.store(new_ptr, value, ptr_ty, value_ty);
|
||||
},
|
||||
}
|
||||
@ -3875,7 +3953,7 @@ fn fieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, index: u32
|
||||
|
||||
const dst_mcv: MCValue = result: {
|
||||
switch (mcv) {
|
||||
.stack_offset => {
|
||||
.stack_offset, .tlv_reloc => {
|
||||
const offset_reg = try self.copyToTmpRegister(ptr_ty, .{
|
||||
.immediate = field_offset,
|
||||
});
|
||||
@ -4157,12 +4235,31 @@ fn genUnOpMir(self: *Self, mir_tag: Mir.Inst.Tag, dst_ty: Type, dst_mcv: MCValue
|
||||
}));
|
||||
},
|
||||
.ptr_stack_offset => unreachable,
|
||||
.tlv_reloc => unreachable,
|
||||
.memory, .linker_load => {
|
||||
const addr_reg = (try self.register_manager.allocReg(null, gp)).to64();
|
||||
const addr_reg_lock = self.register_manager.lockRegAssumeUnused(addr_reg);
|
||||
defer self.register_manager.unlockReg(addr_reg_lock);
|
||||
|
||||
try self.loadMemPtrIntoRegister(addr_reg, Type.usize, dst_mcv);
|
||||
switch (dst_mcv) {
|
||||
.memory => |addr| try self.genSetReg(Type.usize, addr_reg, .{ .immediate = addr }),
|
||||
.linker_load => |load_struct| {
|
||||
const atom_index = if (self.bin_file.cast(link.File.MachO)) |macho_file| blk: {
|
||||
const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk macho_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| blk: {
|
||||
const atom = try coff_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk coff_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else unreachable;
|
||||
|
||||
switch (load_struct.type) {
|
||||
.import => unreachable,
|
||||
.got => try self.asmMovLinker(addr_reg, atom_index, load_struct),
|
||||
.direct => try self.asmLeaLinker(addr_reg, atom_index, load_struct),
|
||||
}
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
try self.asmMemory(
|
||||
mir_tag,
|
||||
Memory.sib(Memory.PtrSize.fromSize(abi_size), .{ .base = addr_reg }),
|
||||
@ -4800,6 +4897,7 @@ fn genBinOp(
|
||||
.eflags,
|
||||
.register_overflow,
|
||||
.ptr_stack_offset,
|
||||
.tlv_reloc,
|
||||
=> unreachable,
|
||||
.register => |src_reg| try self.asmCmovccRegisterRegister(
|
||||
registerAlias(tmp_reg, cmov_abi_size),
|
||||
@ -4819,7 +4917,26 @@ fn genBinOp(
|
||||
const addr_reg_lock = self.register_manager.lockRegAssumeUnused(addr_reg);
|
||||
defer self.register_manager.unlockReg(addr_reg_lock);
|
||||
|
||||
try self.loadMemPtrIntoRegister(addr_reg, Type.usize, mat_src_mcv);
|
||||
switch (mat_src_mcv) {
|
||||
.memory => |addr| try self.genSetReg(Type.usize, addr_reg, .{ .immediate = addr }),
|
||||
.linker_load => |load_struct| {
|
||||
const atom_index = if (self.bin_file.cast(link.File.MachO)) |macho_file| blk: {
|
||||
const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk macho_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| blk: {
|
||||
const atom = try coff_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk coff_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else unreachable;
|
||||
|
||||
switch (load_struct.type) {
|
||||
.import => unreachable,
|
||||
.got => try self.asmMovLinker(addr_reg, atom_index, load_struct),
|
||||
.direct => try self.asmLeaLinker(addr_reg, atom_index, load_struct),
|
||||
}
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
|
||||
try self.asmCmovccRegisterMemory(
|
||||
registerAlias(tmp_reg, cmov_abi_size),
|
||||
Memory.sib(Memory.PtrSize.fromSize(cmov_abi_size), .{ .base = addr_reg }),
|
||||
@ -4865,7 +4982,7 @@ fn genBinOpMir(self: *Self, mir_tag: Mir.Inst.Tag, ty: Type, dst_mcv: MCValue, s
|
||||
.undef => unreachable,
|
||||
.dead, .unreach => unreachable,
|
||||
.register_overflow => unreachable,
|
||||
.ptr_stack_offset => {
|
||||
.ptr_stack_offset, .tlv_reloc => {
|
||||
const dst_reg_lock = self.register_manager.lockReg(dst_reg);
|
||||
defer if (dst_reg_lock) |lock| self.register_manager.unlockReg(lock);
|
||||
|
||||
@ -4942,11 +5059,30 @@ fn genBinOpMir(self: *Self, mir_tag: Mir.Inst.Tag, ty: Type, dst_mcv: MCValue, s
|
||||
} = switch (dst_mcv) {
|
||||
else => unreachable,
|
||||
.memory, .linker_load => dst: {
|
||||
const dst_addr_reg = try self.register_manager.allocReg(null, gp);
|
||||
const dst_addr_reg = (try self.register_manager.allocReg(null, gp)).to64();
|
||||
const dst_addr_lock = self.register_manager.lockRegAssumeUnused(dst_addr_reg);
|
||||
errdefer self.register_manager.unlockReg(dst_addr_lock);
|
||||
|
||||
try self.loadMemPtrIntoRegister(dst_addr_reg, Type.usize, dst_mcv);
|
||||
switch (dst_mcv) {
|
||||
.memory => |addr| try self.genSetReg(Type.usize, dst_addr_reg, .{ .immediate = addr }),
|
||||
.linker_load => |load_struct| {
|
||||
const atom_index = if (self.bin_file.cast(link.File.MachO)) |macho_file| blk: {
|
||||
const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk macho_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| blk: {
|
||||
const atom = try coff_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk coff_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else unreachable;
|
||||
|
||||
switch (load_struct.type) {
|
||||
.import => unreachable,
|
||||
.got => try self.asmMovLinker(dst_addr_reg, atom_index, load_struct),
|
||||
.direct => try self.asmLeaLinker(dst_addr_reg, atom_index, load_struct),
|
||||
}
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
|
||||
break :dst .{
|
||||
.addr_reg = dst_addr_reg,
|
||||
.addr_lock = dst_addr_lock,
|
||||
@ -4968,11 +5104,30 @@ fn genBinOpMir(self: *Self, mir_tag: Mir.Inst.Tag, ty: Type, dst_mcv: MCValue, s
|
||||
const src_limb_lock = self.register_manager.lockRegAssumeUnused(src_limb_reg);
|
||||
errdefer self.register_manager.unlockReg(src_limb_lock);
|
||||
|
||||
const src_addr_reg = try self.register_manager.allocReg(null, gp);
|
||||
const src_addr_reg = (try self.register_manager.allocReg(null, gp)).to64();
|
||||
const src_addr_lock = self.register_manager.lockRegAssumeUnused(src_addr_reg);
|
||||
errdefer self.register_manager.unlockReg(src_addr_lock);
|
||||
|
||||
try self.loadMemPtrIntoRegister(src_addr_reg, Type.usize, src_mcv);
|
||||
switch (src_mcv) {
|
||||
.memory => |addr| try self.genSetReg(Type.usize, src_addr_reg, .{ .immediate = addr }),
|
||||
.linker_load => |load_struct| {
|
||||
const atom_index = if (self.bin_file.cast(link.File.MachO)) |macho_file| blk: {
|
||||
const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk macho_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| blk: {
|
||||
const atom = try coff_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk coff_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else unreachable;
|
||||
|
||||
switch (load_struct.type) {
|
||||
.import => unreachable,
|
||||
.got => try self.asmMovLinker(src_addr_reg, atom_index, load_struct),
|
||||
.direct => try self.asmLeaLinker(src_addr_reg, atom_index, load_struct),
|
||||
}
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
|
||||
break :src .{
|
||||
.addr_reg = src_addr_reg,
|
||||
.addr_lock = src_addr_lock,
|
||||
@ -5078,7 +5233,7 @@ fn genBinOpMir(self: *Self, mir_tag: Mir.Inst.Tag, ty: Type, dst_mcv: MCValue, s
|
||||
else => unreachable,
|
||||
}
|
||||
},
|
||||
.memory, .linker_load => {
|
||||
.memory, .linker_load, .tlv_reloc => {
|
||||
try self.asmRegisterMemory(
|
||||
.mov,
|
||||
registerAlias(src.?.limb_reg, limb_abi_size),
|
||||
@ -5117,6 +5272,7 @@ fn genBinOpMir(self: *Self, mir_tag: Mir.Inst.Tag, ty: Type, dst_mcv: MCValue, s
|
||||
}
|
||||
},
|
||||
.ptr_stack_offset => unreachable,
|
||||
.tlv_reloc => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
@ -5130,6 +5286,7 @@ fn genIntMulComplexOpMir(self: *Self, dst_ty: Type, dst_mcv: MCValue, src_mcv: M
|
||||
.dead, .unreach, .immediate => unreachable,
|
||||
.eflags => unreachable,
|
||||
.ptr_stack_offset => unreachable,
|
||||
.tlv_reloc => unreachable,
|
||||
.register_overflow => unreachable,
|
||||
.register => |dst_reg| {
|
||||
const dst_alias = registerAlias(dst_reg, abi_size);
|
||||
@ -5141,6 +5298,7 @@ fn genIntMulComplexOpMir(self: *Self, dst_ty: Type, dst_mcv: MCValue, src_mcv: M
|
||||
.undef => try self.genSetReg(dst_ty, dst_reg, .undef),
|
||||
.dead, .unreach => unreachable,
|
||||
.ptr_stack_offset => unreachable,
|
||||
.tlv_reloc => unreachable,
|
||||
.register_overflow => unreachable,
|
||||
.register => |src_reg| try self.asmRegisterRegister(
|
||||
.imul,
|
||||
@ -5184,6 +5342,7 @@ fn genIntMulComplexOpMir(self: *Self, dst_ty: Type, dst_mcv: MCValue, src_mcv: M
|
||||
.undef => return self.genSetStack(dst_ty, off, .undef, .{}),
|
||||
.dead, .unreach => unreachable,
|
||||
.ptr_stack_offset => unreachable,
|
||||
.tlv_reloc => unreachable,
|
||||
.register_overflow => unreachable,
|
||||
.register => |src_reg| {
|
||||
// copy dst to a register
|
||||
@ -5406,6 +5565,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
|
||||
.linker_load => unreachable,
|
||||
.eflags => unreachable,
|
||||
.register_overflow => unreachable,
|
||||
.tlv_reloc => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
@ -5444,6 +5604,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
|
||||
.linker_load => unreachable,
|
||||
.eflags => unreachable,
|
||||
.register_overflow => unreachable,
|
||||
.tlv_reloc => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
@ -5965,6 +6126,7 @@ fn isNull(self: *Self, inst: Air.Inst.Index, opt_ty: Type, opt_mcv: MCValue) !MC
|
||||
.register_overflow,
|
||||
.ptr_stack_offset,
|
||||
.eflags,
|
||||
.tlv_reloc,
|
||||
=> unreachable,
|
||||
|
||||
.register => |opt_reg| {
|
||||
@ -5990,7 +6152,25 @@ fn isNull(self: *Self, inst: Air.Inst.Index, opt_ty: Type, opt_mcv: MCValue) !MC
|
||||
const addr_reg_lock = self.register_manager.lockRegAssumeUnused(addr_reg);
|
||||
defer self.register_manager.unlockReg(addr_reg_lock);
|
||||
|
||||
try self.loadMemPtrIntoRegister(addr_reg, Type.usize, opt_mcv);
|
||||
switch (opt_mcv) {
|
||||
.memory => |addr| try self.genSetReg(Type.usize, addr_reg, .{ .immediate = addr }),
|
||||
.linker_load => |load_struct| {
|
||||
const atom_index = if (self.bin_file.cast(link.File.MachO)) |macho_file| blk: {
|
||||
const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk macho_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| blk: {
|
||||
const atom = try coff_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk coff_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else unreachable;
|
||||
|
||||
switch (load_struct.type) {
|
||||
.import => unreachable,
|
||||
.got => try self.asmMovLinker(addr_reg, atom_index, load_struct),
|
||||
.direct => try self.asmLeaLinker(addr_reg, atom_index, load_struct),
|
||||
}
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
|
||||
const some_abi_size = @intCast(u32, some_info.ty.abiSize(self.target.*));
|
||||
try self.asmMemoryImmediate(.cmp, Memory.sib(
|
||||
@ -6925,11 +7105,30 @@ fn genSetStackArg(self: *Self, ty: Type, stack_offset: i32, mcv: MCValue) InnerE
|
||||
return self.genSetStackArg(ty, stack_offset, MCValue{ .register = reg });
|
||||
}
|
||||
|
||||
const addr_reg = try self.register_manager.allocReg(null, gp);
|
||||
const addr_reg = (try self.register_manager.allocReg(null, gp)).to64();
|
||||
const addr_lock = self.register_manager.lockRegAssumeUnused(addr_reg);
|
||||
defer self.register_manager.unlockReg(addr_lock);
|
||||
|
||||
try self.loadMemPtrIntoRegister(addr_reg, Type.usize, mcv);
|
||||
switch (mcv) {
|
||||
.memory => |addr| try self.genSetReg(Type.usize, addr_reg, .{ .immediate = addr }),
|
||||
.linker_load => |load_struct| {
|
||||
const atom_index = if (self.bin_file.cast(link.File.MachO)) |macho_file| blk: {
|
||||
const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk macho_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| blk: {
|
||||
const atom = try coff_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk coff_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else unreachable;
|
||||
|
||||
switch (load_struct.type) {
|
||||
.import => unreachable,
|
||||
.got => try self.asmMovLinker(addr_reg, atom_index, load_struct),
|
||||
.direct => try self.asmLeaLinker(addr_reg, atom_index, load_struct),
|
||||
}
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
|
||||
try self.genInlineMemcpy(
|
||||
.{ .ptr_stack_offset = stack_offset },
|
||||
.{ .register = addr_reg },
|
||||
@ -6971,7 +7170,7 @@ fn genSetStackArg(self: *Self, ty: Type, stack_offset: i32, mcv: MCValue) InnerE
|
||||
},
|
||||
}
|
||||
},
|
||||
.ptr_stack_offset => {
|
||||
.ptr_stack_offset, .tlv_reloc => {
|
||||
const reg = try self.copyToTmpRegister(ty, mcv);
|
||||
return self.genSetStackArg(ty, stack_offset, MCValue{ .register = reg });
|
||||
},
|
||||
@ -7133,11 +7332,30 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: i32, mcv: MCValue, opts: Inl
|
||||
const reg = try self.copyToTmpRegister(ty, mcv);
|
||||
return self.genSetStack(ty, stack_offset, MCValue{ .register = reg }, opts);
|
||||
} else {
|
||||
const addr_reg = try self.register_manager.allocReg(null, gp);
|
||||
const addr_reg = (try self.register_manager.allocReg(null, gp)).to64();
|
||||
const addr_lock = self.register_manager.lockRegAssumeUnused(addr_reg);
|
||||
defer self.register_manager.unlockReg(addr_lock);
|
||||
|
||||
try self.loadMemPtrIntoRegister(addr_reg, Type.usize, mcv);
|
||||
switch (mcv) {
|
||||
.memory => |addr| try self.genSetReg(Type.usize, addr_reg, .{ .immediate = addr }),
|
||||
.linker_load => |load_struct| {
|
||||
const atom_index = if (self.bin_file.cast(link.File.MachO)) |macho_file| blk: {
|
||||
const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk macho_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| blk: {
|
||||
const atom = try coff_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk coff_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else unreachable;
|
||||
|
||||
switch (load_struct.type) {
|
||||
.import => unreachable,
|
||||
.got => try self.asmMovLinker(addr_reg, atom_index, load_struct),
|
||||
.direct => try self.asmLeaLinker(addr_reg, atom_index, load_struct),
|
||||
}
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
|
||||
try self.genInlineMemcpy(
|
||||
.{ .ptr_stack_offset = stack_offset },
|
||||
.{ .register = addr_reg },
|
||||
@ -7157,7 +7375,7 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: i32, mcv: MCValue, opts: Inl
|
||||
.{ .immediate = abi_size },
|
||||
.{},
|
||||
),
|
||||
.ptr_stack_offset => {
|
||||
.ptr_stack_offset, .tlv_reloc => {
|
||||
const tmp_reg = try self.copyToTmpRegister(ty, mcv);
|
||||
const tmp_lock = self.register_manager.lockRegAssumeUnused(tmp_reg);
|
||||
defer self.register_manager.unlockReg(tmp_lock);
|
||||
@ -7247,11 +7465,26 @@ fn genInlineMemcpy(
|
||||
try self.spillRegisters(&.{ .rdi, .rsi, .rcx });
|
||||
|
||||
switch (dst_ptr) {
|
||||
.memory, .linker_load => {
|
||||
try self.loadMemPtrIntoRegister(.rdi, Type.usize, dst_ptr);
|
||||
.memory => |addr| {
|
||||
try self.genSetReg(Type.usize, .rdi, .{ .immediate = addr });
|
||||
// Load the pointer, which is stored in memory
|
||||
try self.asmRegisterMemory(.mov, .rdi, Memory.sib(.qword, .{ .base = .rdi }));
|
||||
},
|
||||
.linker_load => |load_struct| {
|
||||
const atom_index = if (self.bin_file.cast(link.File.MachO)) |macho_file| blk: {
|
||||
const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk macho_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| blk: {
|
||||
const atom = try coff_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk coff_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else unreachable;
|
||||
|
||||
switch (load_struct.type) {
|
||||
.import => unreachable,
|
||||
.got, .direct => try self.asmMovLinker(.rdi, atom_index, load_struct),
|
||||
}
|
||||
},
|
||||
.tlv_reloc => try self.genSetReg(Type.usize, .rdi, dst_ptr),
|
||||
.stack_offset, .ptr_stack_offset => |off| {
|
||||
try self.asmRegisterMemory(switch (dst_ptr) {
|
||||
.stack_offset => .mov,
|
||||
@ -7275,11 +7508,26 @@ fn genInlineMemcpy(
|
||||
}
|
||||
|
||||
switch (src_ptr) {
|
||||
.memory, .linker_load => {
|
||||
try self.loadMemPtrIntoRegister(.rsi, Type.usize, src_ptr);
|
||||
.memory => |addr| {
|
||||
try self.genSetReg(Type.usize, .rsi, .{ .immediate = addr });
|
||||
// Load the pointer, which is stored in memory
|
||||
try self.asmRegisterMemory(.mov, .rsi, Memory.sib(.qword, .{ .base = .rsi }));
|
||||
},
|
||||
.linker_load => |load_struct| {
|
||||
const atom_index = if (self.bin_file.cast(link.File.MachO)) |macho_file| blk: {
|
||||
const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk macho_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| blk: {
|
||||
const atom = try coff_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk coff_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else unreachable;
|
||||
|
||||
switch (load_struct.type) {
|
||||
.import => unreachable,
|
||||
.got, .direct => try self.asmMovLinker(.rsi, atom_index, load_struct),
|
||||
}
|
||||
},
|
||||
.tlv_reloc => try self.genSetReg(Type.usize, .rsi, src_ptr),
|
||||
.stack_offset, .ptr_stack_offset => |off| {
|
||||
try self.asmRegisterMemory(switch (src_ptr) {
|
||||
.stack_offset => .mov,
|
||||
@ -7326,11 +7574,26 @@ fn genInlineMemset(
|
||||
try self.spillRegisters(&.{ .rdi, .al, .rcx });
|
||||
|
||||
switch (dst_ptr) {
|
||||
.memory, .linker_load => {
|
||||
try self.loadMemPtrIntoRegister(.rdi, Type.usize, dst_ptr);
|
||||
.memory => |addr| {
|
||||
try self.genSetReg(Type.usize, .rdi, .{ .immediate = addr });
|
||||
// Load the pointer, which is stored in memory
|
||||
try self.asmRegisterMemory(.mov, .rdi, Memory.sib(.qword, .{ .base = .rdi }));
|
||||
},
|
||||
.linker_load => |load_struct| {
|
||||
const atom_index = if (self.bin_file.cast(link.File.MachO)) |macho_file| blk: {
|
||||
const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk macho_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| blk: {
|
||||
const atom = try coff_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk coff_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else unreachable;
|
||||
|
||||
switch (load_struct.type) {
|
||||
.import => unreachable,
|
||||
.got, .direct => try self.asmMovLinker(.rdi, atom_index, load_struct),
|
||||
}
|
||||
},
|
||||
.tlv_reloc => try self.genSetReg(Type.usize, .rdi, dst_ptr),
|
||||
.stack_offset, .ptr_stack_offset => |off| {
|
||||
try self.asmRegisterMemory(switch (dst_ptr) {
|
||||
.stack_offset => .mov,
|
||||
@ -7454,10 +7717,10 @@ fn genSetReg(self: *Self, ty: Type, reg: Register, mcv: MCValue) InnerError!void
|
||||
|
||||
try self.asmRegisterRegister(.mov, registerAlias(reg, abi_size), registerAlias(src_reg, abi_size));
|
||||
},
|
||||
.memory, .linker_load => switch (ty.zigTypeTag()) {
|
||||
.memory => |addr| switch (ty.zigTypeTag()) {
|
||||
.Float => {
|
||||
const base_reg = try self.register_manager.allocReg(null, gp);
|
||||
try self.loadMemPtrIntoRegister(base_reg, Type.usize, mcv);
|
||||
const base_reg = (try self.register_manager.allocReg(null, gp)).to64();
|
||||
try self.genSetReg(Type.usize, base_reg, .{ .immediate = addr });
|
||||
|
||||
if (intrinsicsAllowed(self.target.*, ty)) {
|
||||
return self.asmRegisterMemory(
|
||||
@ -7469,29 +7732,20 @@ fn genSetReg(self: *Self, ty: Type, reg: Register, mcv: MCValue) InnerError!void
|
||||
}),
|
||||
},
|
||||
reg.to128(),
|
||||
Memory.sib(Memory.PtrSize.fromSize(abi_size), .{ .base = base_reg.to64() }),
|
||||
Memory.sib(Memory.PtrSize.fromSize(abi_size), .{ .base = base_reg }),
|
||||
);
|
||||
}
|
||||
|
||||
return self.fail("TODO genSetReg from memory for float with no intrinsics", .{});
|
||||
},
|
||||
else => switch (mcv) {
|
||||
else => unreachable,
|
||||
.linker_load => {
|
||||
try self.loadMemPtrIntoRegister(reg, Type.usize, mcv);
|
||||
try self.asmRegisterMemory(
|
||||
.mov,
|
||||
registerAlias(reg, abi_size),
|
||||
Memory.sib(Memory.PtrSize.fromSize(abi_size), .{ .base = reg.to64() }),
|
||||
);
|
||||
},
|
||||
.memory => |x| if (x <= math.maxInt(i32)) {
|
||||
else => {
|
||||
if (addr <= math.maxInt(i32)) {
|
||||
try self.asmRegisterMemory(
|
||||
.mov,
|
||||
registerAlias(reg, abi_size),
|
||||
Memory.sib(Memory.PtrSize.fromSize(abi_size), .{
|
||||
.base = .ds,
|
||||
.disp = @intCast(i32, x),
|
||||
.disp = @intCast(i32, addr),
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
@ -7501,20 +7755,77 @@ fn genSetReg(self: *Self, ty: Type, reg: Register, mcv: MCValue) InnerError!void
|
||||
_ = try self.addInst(.{
|
||||
.tag = .mov_moffs,
|
||||
.ops = .rax_moffs,
|
||||
.data = .{ .payload = try self.addExtra(Mir.MemoryMoffs.encode(.ds, x)) },
|
||||
.data = .{ .payload = try self.addExtra(Mir.MemoryMoffs.encode(.ds, addr)) },
|
||||
});
|
||||
} else {
|
||||
// Rather than duplicate the logic used for the move, we just use a self-call with a new MCValue.
|
||||
try self.genSetReg(ty, reg, MCValue{ .immediate = x });
|
||||
try self.genSetReg(Type.usize, reg, MCValue{ .immediate = addr });
|
||||
try self.asmRegisterMemory(
|
||||
.mov,
|
||||
registerAlias(reg, abi_size),
|
||||
Memory.sib(Memory.PtrSize.fromSize(abi_size), .{ .base = reg.to64() }),
|
||||
);
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
.tlv_reloc => |sym_index| {
|
||||
const atom_index = if (self.bin_file.cast(link.File.MachO)) |macho_file| blk: {
|
||||
const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk macho_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| blk: {
|
||||
const atom = try coff_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk coff_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else unreachable;
|
||||
|
||||
if (self.bin_file.cast(link.File.MachO)) |_| {
|
||||
_ = try self.addInst(.{
|
||||
.tag = .mov_linker,
|
||||
.ops = .tlv_reloc,
|
||||
.data = .{ .payload = try self.addExtra(Mir.LeaRegisterReloc{
|
||||
.reg = @enumToInt(Register.rdi),
|
||||
.atom_index = atom_index,
|
||||
.sym_index = sym_index,
|
||||
}) },
|
||||
});
|
||||
// TODO: spill registers before calling
|
||||
try self.asmMemory(.call, Memory.sib(.qword, .{ .base = .rdi }));
|
||||
try self.genSetReg(Type.usize, reg, .{ .register = .rax });
|
||||
} else return self.fail("TODO emit ptr to TLV sequence on {s}", .{@tagName(self.bin_file.tag)});
|
||||
},
|
||||
.linker_load => |load_struct| {
|
||||
const atom_index = if (self.bin_file.cast(link.File.MachO)) |macho_file| blk: {
|
||||
const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk macho_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| blk: {
|
||||
const atom = try coff_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
|
||||
break :blk coff_file.getAtom(atom).getSymbolIndex().?;
|
||||
} else unreachable;
|
||||
|
||||
switch (ty.zigTypeTag()) {
|
||||
.Float => {
|
||||
const base_reg = (try self.register_manager.allocReg(null, gp)).to64();
|
||||
try self.asmLeaLinker(base_reg, atom_index, load_struct);
|
||||
|
||||
if (intrinsicsAllowed(self.target.*, ty)) {
|
||||
return self.asmRegisterMemory(
|
||||
switch (ty.tag()) {
|
||||
.f32 => .movss,
|
||||
.f64 => .movsd,
|
||||
else => return self.fail("TODO genSetReg from memory for {}", .{
|
||||
ty.fmt(self.bin_file.options.module.?),
|
||||
}),
|
||||
},
|
||||
reg.to128(),
|
||||
Memory.sib(Memory.PtrSize.fromSize(abi_size), .{ .base = base_reg.to64() }),
|
||||
);
|
||||
}
|
||||
|
||||
return self.fail("TODO genSetReg from memory for float with no intrinsics", .{});
|
||||
},
|
||||
else => try self.asmMovLinker(registerAlias(reg, abi_size), atom_index, load_struct),
|
||||
}
|
||||
},
|
||||
.stack_offset => |off| {
|
||||
switch (ty.zigTypeTag()) {
|
||||
.Int => switch (ty.intInfo(self.target.*).signedness) {
|
||||
@ -8468,6 +8779,7 @@ fn genTypedValue(self: *Self, arg_tv: TypedValue) InnerError!MCValue {
|
||||
.linker_load => |ll| .{ .linker_load = ll },
|
||||
.immediate => |imm| .{ .immediate = imm },
|
||||
.memory => |addr| .{ .memory = addr },
|
||||
.tlv_reloc => |sym_index| .{ .tlv_reloc = sym_index },
|
||||
},
|
||||
.fail => |msg| {
|
||||
self.err_msg = msg;
|
||||
|
||||
@ -42,7 +42,7 @@ pub fn emitMir(emit: *Emit) Error!void {
|
||||
).?;
|
||||
const target = macho_file.getGlobalByIndex(inst.data.relocation.sym_index);
|
||||
try link.File.MachO.Atom.addRelocation(macho_file, atom_index, .{
|
||||
.type = @enumToInt(std.macho.reloc_type_x86_64.X86_64_RELOC_BRANCH),
|
||||
.type = .branch,
|
||||
.target = target,
|
||||
.offset = end_offset - 4,
|
||||
.addend = 0,
|
||||
@ -65,20 +65,20 @@ pub fn emitMir(emit: *Emit) Error!void {
|
||||
});
|
||||
} else return emit.fail("TODO implement {} for {}", .{ inst.tag, emit.bin_file.tag }),
|
||||
|
||||
.lea_linker => if (emit.bin_file.cast(link.File.MachO)) |macho_file| {
|
||||
.mov_linker, .lea_linker => if (emit.bin_file.cast(link.File.MachO)) |macho_file| {
|
||||
const metadata =
|
||||
emit.lower.mir.extraData(Mir.LeaRegisterReloc, inst.data.payload).data;
|
||||
const reloc_type = switch (inst.ops) {
|
||||
.got_reloc => @enumToInt(std.macho.reloc_type_x86_64.X86_64_RELOC_GOT),
|
||||
.direct_reloc => @enumToInt(std.macho.reloc_type_x86_64.X86_64_RELOC_SIGNED),
|
||||
else => unreachable,
|
||||
};
|
||||
const atom_index = macho_file.getAtomIndexForSymbol(.{
|
||||
.sym_index = metadata.atom_index,
|
||||
.file = null,
|
||||
}).?;
|
||||
try link.File.MachO.Atom.addRelocation(macho_file, atom_index, .{
|
||||
.type = reloc_type,
|
||||
.type = switch (inst.ops) {
|
||||
.got_reloc => .got,
|
||||
.direct_reloc => .signed,
|
||||
.tlv_reloc => .tlv,
|
||||
else => unreachable,
|
||||
},
|
||||
.target = .{ .sym_index = metadata.sym_index, .file = null },
|
||||
.offset = @intCast(u32, end_offset - 4),
|
||||
.addend = 0,
|
||||
|
||||
@ -127,6 +127,7 @@ pub fn lowerMir(lower: *Lower, inst: Mir.Inst) Error![]const Instruction {
|
||||
.call_extern => try lower.emit(.none, .call, &.{.{ .imm = Immediate.s(0) }}),
|
||||
|
||||
.lea_linker => try lower.mirLeaLinker(inst),
|
||||
.mov_linker => try lower.mirMovLinker(inst),
|
||||
|
||||
.mov_moffs => try lower.mirMovMoffs(inst),
|
||||
|
||||
@ -444,6 +445,15 @@ fn mirLeaLinker(lower: *Lower, inst: Mir.Inst) Error!void {
|
||||
});
|
||||
}
|
||||
|
||||
fn mirMovLinker(lower: *Lower, inst: Mir.Inst) Error!void {
|
||||
const metadata = lower.mir.extraData(Mir.LeaRegisterReloc, inst.data.payload).data;
|
||||
const reg = @intToEnum(Register, metadata.reg);
|
||||
try lower.emit(.none, .mov, &.{
|
||||
.{ .reg = reg },
|
||||
.{ .mem = Memory.rip(Memory.PtrSize.fromBitSize(reg.bitSize()), 0) },
|
||||
});
|
||||
}
|
||||
|
||||
const abi = @import("abi.zig");
|
||||
const assert = std.debug.assert;
|
||||
const bits = @import("bits.zig");
|
||||
|
||||
@ -233,6 +233,8 @@ pub const Inst = struct {
|
||||
|
||||
/// Load effective address of a symbol not yet allocated in VM.
|
||||
lea_linker,
|
||||
/// Move address of a symbol not yet allocated in VM.
|
||||
mov_linker,
|
||||
|
||||
/// End of prologue
|
||||
dbg_prologue_end,
|
||||
@ -402,6 +404,9 @@ pub const Inst = struct {
|
||||
/// Linker relocation - imports table indirection (binding).
|
||||
/// Uses `payload` payload with extra data of type `LeaRegisterReloc`.
|
||||
import_reloc,
|
||||
/// Linker relocation - threadlocal variable via GOT indirection.
|
||||
/// Uses `payload` payload with extra data of type `LeaRegisterReloc`.
|
||||
tlv_reloc,
|
||||
};
|
||||
|
||||
pub const Data = union {
|
||||
|
||||
@ -493,7 +493,7 @@ pub fn generateSymbol(
|
||||
bin_file.allocator,
|
||||
src_loc,
|
||||
"TODO implement generateSymbol for big int enums ('{}')",
|
||||
.{typed_value.ty.fmtDebug()},
|
||||
.{typed_value.ty.fmt(mod)},
|
||||
),
|
||||
};
|
||||
}
|
||||
@ -932,6 +932,10 @@ pub const GenResult = union(enum) {
|
||||
/// such as ARM, the immediate will never exceed 32-bits.
|
||||
immediate: u64,
|
||||
linker_load: LinkerLoad,
|
||||
/// Pointer to a threadlocal variable.
|
||||
/// The address resolution will be deferred until the linker allocates everything in virtual memory.
|
||||
/// Payload is a symbol index.
|
||||
tlv_reloc: u32,
|
||||
/// Direct by-address reference to memory location.
|
||||
memory: u64,
|
||||
};
|
||||
@ -957,13 +961,13 @@ fn genDeclRef(
|
||||
tv: TypedValue,
|
||||
decl_index: Module.Decl.Index,
|
||||
) CodeGenError!GenResult {
|
||||
log.debug("genDeclRef: ty = {}, val = {}", .{ tv.ty.fmtDebug(), tv.val.fmtDebug() });
|
||||
const module = bin_file.options.module.?;
|
||||
log.debug("genDeclRef: ty = {}, val = {}", .{ tv.ty.fmt(module), tv.val.fmtValue(tv.ty, module) });
|
||||
|
||||
const target = bin_file.options.target;
|
||||
const ptr_bits = target.cpu.arch.ptrBitWidth();
|
||||
const ptr_bytes: u64 = @divExact(ptr_bits, 8);
|
||||
|
||||
const module = bin_file.options.module.?;
|
||||
const decl = module.declPtr(decl_index);
|
||||
|
||||
if (!decl.ty.isFnOrHasRuntimeBitsIgnoreComptime()) {
|
||||
@ -991,6 +995,8 @@ fn genDeclRef(
|
||||
|
||||
module.markDeclAlive(decl);
|
||||
|
||||
const is_threadlocal = tv.val.isPtrToThreadLocal(module) and !bin_file.options.single_threaded;
|
||||
|
||||
if (bin_file.cast(link.File.Elf)) |elf_file| {
|
||||
const atom_index = try elf_file.getOrCreateAtomForDecl(decl_index);
|
||||
const atom = elf_file.getAtom(atom_index);
|
||||
@ -998,6 +1004,9 @@ fn genDeclRef(
|
||||
} else if (bin_file.cast(link.File.MachO)) |macho_file| {
|
||||
const atom_index = try macho_file.getOrCreateAtomForDecl(decl_index);
|
||||
const sym_index = macho_file.getAtom(atom_index).getSymbolIndex().?;
|
||||
if (is_threadlocal) {
|
||||
return GenResult.mcv(.{ .tlv_reloc = sym_index });
|
||||
}
|
||||
return GenResult.mcv(.{ .linker_load = .{
|
||||
.type = .got,
|
||||
.sym_index = sym_index,
|
||||
@ -1025,7 +1034,8 @@ fn genUnnamedConst(
|
||||
tv: TypedValue,
|
||||
owner_decl_index: Module.Decl.Index,
|
||||
) CodeGenError!GenResult {
|
||||
log.debug("genUnnamedConst: ty = {}, val = {}", .{ tv.ty.fmtDebug(), tv.val.fmtDebug() });
|
||||
const mod = bin_file.options.module.?;
|
||||
log.debug("genUnnamedConst: ty = {}, val = {}", .{ tv.ty.fmt(mod), tv.val.fmtValue(tv.ty, mod) });
|
||||
|
||||
const target = bin_file.options.target;
|
||||
const local_sym_index = bin_file.lowerUnnamedConst(tv, owner_decl_index) catch |err| {
|
||||
@ -1065,7 +1075,11 @@ pub fn genTypedValue(
|
||||
typed_value.val = rt.data;
|
||||
}
|
||||
|
||||
log.debug("genTypedValue: ty = {}, val = {}", .{ typed_value.ty.fmtDebug(), typed_value.val.fmtDebug() });
|
||||
const mod = bin_file.options.module.?;
|
||||
log.debug("genTypedValue: ty = {}, val = {}", .{
|
||||
typed_value.ty.fmt(mod),
|
||||
typed_value.val.fmtValue(typed_value.ty, mod),
|
||||
});
|
||||
|
||||
if (typed_value.val.isUndef())
|
||||
return GenResult.mcv(.undef);
|
||||
|
||||
@ -540,7 +540,7 @@ pub const File = struct {
|
||||
/// May be called before or after updateDeclExports for any given Decl.
|
||||
pub fn updateDecl(base: *File, module: *Module, decl_index: Module.Decl.Index) UpdateDeclError!void {
|
||||
const decl = module.declPtr(decl_index);
|
||||
log.debug("updateDecl {*} ({s}), type={}", .{ decl, decl.name, decl.ty.fmtDebug() });
|
||||
log.debug("updateDecl {*} ({s}), type={}", .{ decl, decl.name, decl.ty.fmt(module) });
|
||||
assert(decl.has_tv);
|
||||
if (build_options.only_c) {
|
||||
assert(base.tag == .c);
|
||||
@ -564,7 +564,7 @@ pub const File = struct {
|
||||
pub fn updateFunc(base: *File, module: *Module, func: *Module.Fn, air: Air, liveness: Liveness) UpdateDeclError!void {
|
||||
const owner_decl = module.declPtr(func.owner_decl);
|
||||
log.debug("updateFunc {*} ({s}), type={}", .{
|
||||
owner_decl, owner_decl.name, owner_decl.ty.fmtDebug(),
|
||||
owner_decl, owner_decl.name, owner_decl.ty.fmt(module),
|
||||
});
|
||||
if (build_options.only_c) {
|
||||
assert(base.tag == .c);
|
||||
|
||||
@ -150,7 +150,7 @@ pub const DeclState = struct {
|
||||
.type = ty,
|
||||
.offset = undefined,
|
||||
});
|
||||
log.debug("%{d}: {}", .{ sym_index, ty.fmtDebug() });
|
||||
log.debug("%{d}: {}", .{ sym_index, ty.fmt(self.mod) });
|
||||
try self.abbrev_resolver.putNoClobberContext(self.gpa, ty, sym_index, .{
|
||||
.mod = self.mod,
|
||||
});
|
||||
@ -570,7 +570,7 @@ pub const DeclState = struct {
|
||||
try dbg_info_buffer.append(0);
|
||||
},
|
||||
else => {
|
||||
log.debug("TODO implement .debug_info for type '{}'", .{ty.fmtDebug()});
|
||||
log.debug("TODO implement .debug_info for type '{}'", .{ty.fmt(self.mod)});
|
||||
try dbg_info_buffer.append(@enumToInt(AbbrevKind.pad1));
|
||||
},
|
||||
}
|
||||
@ -1055,6 +1055,10 @@ pub fn commitDeclState(
|
||||
},
|
||||
}
|
||||
{
|
||||
log.debug("relocating subprogram high PC value: {x} => {x}", .{
|
||||
self.getRelocDbgInfoSubprogramHighPC(),
|
||||
sym_size,
|
||||
});
|
||||
const ptr = dbg_info_buffer.items[self.getRelocDbgInfoSubprogramHighPC()..][0..4];
|
||||
mem.writeInt(u32, ptr, @intCast(u32, sym_size), target_endian);
|
||||
}
|
||||
@ -1263,7 +1267,12 @@ pub fn commitDeclState(
|
||||
} else {
|
||||
const atom = self.getAtom(.di_atom, symbol.atom_index);
|
||||
const value = atom.off + symbol.offset + reloc.addend;
|
||||
log.debug("{x}: [() => {x}] (%{d}, '{}')", .{ reloc.offset, value, target, ty.fmtDebug() });
|
||||
log.debug("{x}: [() => {x}] (%{d}, '{}')", .{
|
||||
reloc.offset,
|
||||
value,
|
||||
target,
|
||||
ty.fmt(module),
|
||||
});
|
||||
mem.writeInt(
|
||||
u32,
|
||||
dbg_info_buffer.items[reloc.offset..][0..@sizeOf(u32)],
|
||||
|
||||
@ -5,7 +5,6 @@ const build_options = @import("build_options");
|
||||
const builtin = @import("builtin");
|
||||
const assert = std.debug.assert;
|
||||
const dwarf = std.dwarf;
|
||||
const fmt = std.fmt;
|
||||
const fs = std.fs;
|
||||
const log = std.log.scoped(.link);
|
||||
const macho = std.macho;
|
||||
@ -138,6 +137,8 @@ got_section_index: ?u8 = null,
|
||||
data_const_section_index: ?u8 = null,
|
||||
la_symbol_ptr_section_index: ?u8 = null,
|
||||
data_section_index: ?u8 = null,
|
||||
thread_vars_section_index: ?u8 = null,
|
||||
thread_data_section_index: ?u8 = null,
|
||||
|
||||
locals: std.ArrayListUnmanaged(macho.nlist_64) = .{},
|
||||
globals: std.ArrayListUnmanaged(SymbolWithLoc) = .{},
|
||||
@ -153,13 +154,9 @@ stub_helper_preamble_atom_index: ?Atom.Index = null,
|
||||
|
||||
strtab: StringTable(.strtab) = .{},
|
||||
|
||||
got_entries: std.ArrayListUnmanaged(Entry) = .{},
|
||||
got_entries_free_list: std.ArrayListUnmanaged(u32) = .{},
|
||||
got_entries_table: std.AutoHashMapUnmanaged(SymbolWithLoc, u32) = .{},
|
||||
|
||||
stubs: std.ArrayListUnmanaged(Entry) = .{},
|
||||
stubs_free_list: std.ArrayListUnmanaged(u32) = .{},
|
||||
stubs_table: std.AutoHashMapUnmanaged(SymbolWithLoc, u32) = .{},
|
||||
got_table: SectionTable = .{},
|
||||
stubs_table: SectionTable = .{},
|
||||
tlv_table: SectionTable = .{},
|
||||
|
||||
error_flags: File.ErrorFlags = File.ErrorFlags{},
|
||||
|
||||
@ -268,26 +265,120 @@ const DeclMetadata = struct {
|
||||
}
|
||||
};
|
||||
|
||||
const Entry = struct {
|
||||
target: SymbolWithLoc,
|
||||
// Index into the synthetic symbol table (i.e., file == null).
|
||||
sym_index: u32,
|
||||
const SectionTable = struct {
|
||||
entries: std.ArrayListUnmanaged(Entry) = .{},
|
||||
free_list: std.ArrayListUnmanaged(u32) = .{},
|
||||
lookup: std.AutoHashMapUnmanaged(SymbolWithLoc, u32) = .{},
|
||||
|
||||
pub fn getSymbol(entry: Entry, macho_file: *MachO) macho.nlist_64 {
|
||||
return macho_file.getSymbol(.{ .sym_index = entry.sym_index, .file = null });
|
||||
pub fn deinit(st: *ST, allocator: Allocator) void {
|
||||
st.entries.deinit(allocator);
|
||||
st.free_list.deinit(allocator);
|
||||
st.lookup.deinit(allocator);
|
||||
}
|
||||
|
||||
pub fn getSymbolPtr(entry: Entry, macho_file: *MachO) *macho.nlist_64 {
|
||||
return macho_file.getSymbolPtr(.{ .sym_index = entry.sym_index, .file = null });
|
||||
pub fn allocateEntry(st: *ST, allocator: Allocator, target: SymbolWithLoc) !u32 {
|
||||
try st.entries.ensureUnusedCapacity(allocator, 1);
|
||||
const index = blk: {
|
||||
if (st.free_list.popOrNull()) |index| {
|
||||
log.debug(" (reusing entry index {d})", .{index});
|
||||
break :blk index;
|
||||
} else {
|
||||
log.debug(" (allocating entry at index {d})", .{st.entries.items.len});
|
||||
const index = @intCast(u32, st.entries.items.len);
|
||||
_ = st.entries.addOneAssumeCapacity();
|
||||
break :blk index;
|
||||
}
|
||||
};
|
||||
st.entries.items[index] = .{ .target = target, .sym_index = 0 };
|
||||
try st.lookup.putNoClobber(allocator, target, index);
|
||||
return index;
|
||||
}
|
||||
|
||||
pub fn getAtomIndex(entry: Entry, macho_file: *MachO) ?Atom.Index {
|
||||
return macho_file.getAtomIndexForSymbol(.{ .sym_index = entry.sym_index, .file = null });
|
||||
pub fn freeEntry(st: *ST, allocator: Allocator, target: SymbolWithLoc) void {
|
||||
const index = st.lookup.get(target) orelse return;
|
||||
st.free_list.append(allocator, index) catch {};
|
||||
st.entries.items[index] = .{
|
||||
.target = .{ .sym_index = 0 },
|
||||
.sym_index = 0,
|
||||
};
|
||||
_ = st.lookup.remove(target);
|
||||
}
|
||||
|
||||
pub fn getName(entry: Entry, macho_file: *MachO) []const u8 {
|
||||
return macho_file.getSymbolName(.{ .sym_index = entry.sym_index, .file = null });
|
||||
pub fn getAtomIndex(st: *const ST, macho_file: *MachO, target: SymbolWithLoc) ?Atom.Index {
|
||||
const index = st.lookup.get(target) orelse return null;
|
||||
return st.entries.items[index].getAtomIndex(macho_file);
|
||||
}
|
||||
|
||||
const FormatContext = struct {
|
||||
macho_file: *MachO,
|
||||
st: *const ST,
|
||||
};
|
||||
|
||||
fn fmt(
|
||||
ctx: FormatContext,
|
||||
comptime unused_format_string: []const u8,
|
||||
options: std.fmt.FormatOptions,
|
||||
writer: anytype,
|
||||
) @TypeOf(writer).Error!void {
|
||||
_ = options;
|
||||
comptime assert(unused_format_string.len == 0);
|
||||
try writer.writeAll("SectionTable:\n");
|
||||
for (ctx.st.entries.items, 0..) |entry, i| {
|
||||
const atom_sym = entry.getSymbol(ctx.macho_file);
|
||||
const target_sym = ctx.macho_file.getSymbol(entry.target);
|
||||
try writer.print(" {d}@{x} => ", .{ i, atom_sym.n_value });
|
||||
if (target_sym.undf()) {
|
||||
try writer.print("import('{s}')", .{
|
||||
ctx.macho_file.getSymbolName(entry.target),
|
||||
});
|
||||
} else {
|
||||
try writer.print("local(%{d}) in object({?d})", .{
|
||||
entry.target.sym_index,
|
||||
entry.target.file,
|
||||
});
|
||||
}
|
||||
try writer.writeByte('\n');
|
||||
}
|
||||
}
|
||||
|
||||
fn format(st: *const ST, comptime unused_format_string: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void {
|
||||
_ = st;
|
||||
_ = unused_format_string;
|
||||
_ = options;
|
||||
_ = writer;
|
||||
@compileError("do not format SectionTable directly; use st.fmtDebug()");
|
||||
}
|
||||
|
||||
pub fn fmtDebug(st: *const ST, macho_file: *MachO) std.fmt.Formatter(fmt) {
|
||||
return .{ .data = .{
|
||||
.macho_file = macho_file,
|
||||
.st = st,
|
||||
} };
|
||||
}
|
||||
|
||||
const ST = @This();
|
||||
|
||||
const Entry = struct {
|
||||
target: SymbolWithLoc,
|
||||
// Index into the synthetic symbol table (i.e., file == null).
|
||||
sym_index: u32,
|
||||
|
||||
pub fn getSymbol(entry: Entry, macho_file: *MachO) macho.nlist_64 {
|
||||
return macho_file.getSymbol(.{ .sym_index = entry.sym_index });
|
||||
}
|
||||
|
||||
pub fn getSymbolPtr(entry: Entry, macho_file: *MachO) *macho.nlist_64 {
|
||||
return macho_file.getSymbolPtr(.{ .sym_index = entry.sym_index });
|
||||
}
|
||||
|
||||
pub fn getAtomIndex(entry: Entry, macho_file: *MachO) ?Atom.Index {
|
||||
return macho_file.getAtomIndexForSymbol(.{ .sym_index = entry.sym_index });
|
||||
}
|
||||
|
||||
pub fn getName(entry: Entry, macho_file: *MachO) []const u8 {
|
||||
return macho_file.getSymbolName(.{ .sym_index = entry.sym_index });
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const BindingTable = std.AutoArrayHashMapUnmanaged(Atom.Index, std.ArrayListUnmanaged(Atom.Binding));
|
||||
@ -397,7 +488,7 @@ pub fn openPath(allocator: Allocator, options: link.Options) !*MachO {
|
||||
// Create dSYM bundle.
|
||||
log.debug("creating {s}.dSYM bundle", .{sub_path});
|
||||
|
||||
const d_sym_path = try fmt.allocPrint(
|
||||
const d_sym_path = try std.fmt.allocPrint(
|
||||
allocator,
|
||||
"{s}.dSYM" ++ fs.path.sep_str ++ "Contents" ++ fs.path.sep_str ++ "Resources" ++ fs.path.sep_str ++ "DWARF",
|
||||
.{sub_path},
|
||||
@ -611,6 +702,9 @@ pub fn flushModule(self: *MachO, comp: *Compilation, prog_node: *std.Progress.No
|
||||
if (self.dyld_stub_binder_index == null) {
|
||||
self.dyld_stub_binder_index = try self.addUndefined("dyld_stub_binder", .add_got);
|
||||
}
|
||||
if (!self.base.options.single_threaded) {
|
||||
_ = try self.addUndefined("__tlv_bootstrap", .none);
|
||||
}
|
||||
|
||||
try self.createMhExecuteHeaderSymbol();
|
||||
|
||||
@ -619,6 +713,12 @@ pub fn flushModule(self: *MachO, comp: *Compilation, prog_node: *std.Progress.No
|
||||
try self.resolveSymbolsInDylibs(&actions);
|
||||
|
||||
if (self.unresolved.count() > 0) {
|
||||
for (self.unresolved.keys()) |index| {
|
||||
// TODO: convert into compiler errors.
|
||||
const global = self.globals.items[index];
|
||||
const sym_name = self.getSymbolName(global);
|
||||
log.err("undefined symbol reference '{s}'", .{sym_name});
|
||||
}
|
||||
return error.UndefinedSymbolReference;
|
||||
}
|
||||
|
||||
@ -1237,22 +1337,17 @@ pub fn createAtom(self: *MachO) !Atom.Index {
|
||||
|
||||
pub fn createGotAtom(self: *MachO, target: SymbolWithLoc) !Atom.Index {
|
||||
const atom_index = try self.createAtom();
|
||||
const atom = self.getAtomPtr(atom_index);
|
||||
atom.size = @sizeOf(u64);
|
||||
self.getAtomPtr(atom_index).size = @sizeOf(u64);
|
||||
|
||||
const sym = atom.getSymbolPtr(self);
|
||||
const sym = self.getAtom(atom_index).getSymbolPtr(self);
|
||||
sym.n_type = macho.N_SECT;
|
||||
sym.n_sect = self.got_section_index.? + 1;
|
||||
sym.n_value = try self.allocateAtom(atom_index, atom.size, @alignOf(u64));
|
||||
sym.n_value = try self.allocateAtom(atom_index, @sizeOf(u64), @alignOf(u64));
|
||||
|
||||
log.debug("allocated GOT atom at 0x{x}", .{sym.n_value});
|
||||
|
||||
try Atom.addRelocation(self, atom_index, .{
|
||||
.type = switch (self.base.options.target.cpu.arch) {
|
||||
.aarch64 => @enumToInt(macho.reloc_type_arm64.ARM64_RELOC_UNSIGNED),
|
||||
.x86_64 => @enumToInt(macho.reloc_type_x86_64.X86_64_RELOC_UNSIGNED),
|
||||
else => unreachable,
|
||||
},
|
||||
.type = .unsigned,
|
||||
.target = target,
|
||||
.offset = 0,
|
||||
.addend = 0,
|
||||
@ -1269,6 +1364,7 @@ pub fn createGotAtom(self: *MachO, target: SymbolWithLoc) !Atom.Index {
|
||||
} else {
|
||||
try Atom.addRebase(self, atom_index, 0);
|
||||
}
|
||||
try self.writePtrWidthAtom(atom_index);
|
||||
|
||||
return atom_index;
|
||||
}
|
||||
@ -1334,15 +1430,15 @@ fn createStubHelperPreambleAtom(self: *MachO) !void {
|
||||
code[9] = 0xff;
|
||||
code[10] = 0x25;
|
||||
|
||||
try Atom.addRelocations(self, atom_index, 2, .{ .{
|
||||
.type = @enumToInt(macho.reloc_type_x86_64.X86_64_RELOC_SIGNED),
|
||||
try Atom.addRelocations(self, atom_index, &[_]Relocation{ .{
|
||||
.type = .signed,
|
||||
.target = dyld_private,
|
||||
.offset = 3,
|
||||
.addend = 0,
|
||||
.pcrel = true,
|
||||
.length = 2,
|
||||
}, .{
|
||||
.type = @enumToInt(macho.reloc_type_x86_64.X86_64_RELOC_GOT),
|
||||
.type = .got,
|
||||
.target = dyld_stub_binder,
|
||||
.offset = 11,
|
||||
.addend = 0,
|
||||
@ -1374,29 +1470,29 @@ fn createStubHelperPreambleAtom(self: *MachO) !void {
|
||||
// br x16
|
||||
mem.writeIntLittle(u32, code[20..][0..4], aarch64.Instruction.br(.x16).toU32());
|
||||
|
||||
try Atom.addRelocations(self, atom_index, 4, .{ .{
|
||||
.type = @enumToInt(macho.reloc_type_arm64.ARM64_RELOC_PAGE21),
|
||||
try Atom.addRelocations(self, atom_index, &[_]Relocation{ .{
|
||||
.type = .page,
|
||||
.target = dyld_private,
|
||||
.offset = 0,
|
||||
.addend = 0,
|
||||
.pcrel = true,
|
||||
.length = 2,
|
||||
}, .{
|
||||
.type = @enumToInt(macho.reloc_type_arm64.ARM64_RELOC_PAGEOFF12),
|
||||
.type = .pageoff,
|
||||
.target = dyld_private,
|
||||
.offset = 4,
|
||||
.addend = 0,
|
||||
.pcrel = false,
|
||||
.length = 2,
|
||||
}, .{
|
||||
.type = @enumToInt(macho.reloc_type_arm64.ARM64_RELOC_GOT_LOAD_PAGE21),
|
||||
.type = .got_page,
|
||||
.target = dyld_stub_binder,
|
||||
.offset = 12,
|
||||
.addend = 0,
|
||||
.pcrel = true,
|
||||
.length = 2,
|
||||
}, .{
|
||||
.type = @enumToInt(macho.reloc_type_arm64.ARM64_RELOC_GOT_LOAD_PAGEOFF12),
|
||||
.type = .got_pageoff,
|
||||
.target = dyld_stub_binder,
|
||||
.offset = 16,
|
||||
.addend = 0,
|
||||
@ -1454,8 +1550,8 @@ fn createStubHelperAtom(self: *MachO) !Atom.Index {
|
||||
code[5] = 0xe9;
|
||||
|
||||
try Atom.addRelocation(self, atom_index, .{
|
||||
.type = @enumToInt(macho.reloc_type_x86_64.X86_64_RELOC_BRANCH),
|
||||
.target = .{ .sym_index = stub_helper_preamble_atom_sym_index, .file = null },
|
||||
.type = .branch,
|
||||
.target = .{ .sym_index = stub_helper_preamble_atom_sym_index },
|
||||
.offset = 6,
|
||||
.addend = 0,
|
||||
.pcrel = true,
|
||||
@ -1477,8 +1573,8 @@ fn createStubHelperAtom(self: *MachO) !Atom.Index {
|
||||
// Next 4 bytes 8..12 are just a placeholder populated in `populateLazyBindOffsetsInStubHelper`.
|
||||
|
||||
try Atom.addRelocation(self, atom_index, .{
|
||||
.type = @enumToInt(macho.reloc_type_arm64.ARM64_RELOC_BRANCH26),
|
||||
.target = .{ .sym_index = stub_helper_preamble_atom_sym_index, .file = null },
|
||||
.type = .branch,
|
||||
.target = .{ .sym_index = stub_helper_preamble_atom_sym_index },
|
||||
.offset = 4,
|
||||
.addend = 0,
|
||||
.pcrel = true,
|
||||
@ -1505,12 +1601,8 @@ fn createLazyPointerAtom(self: *MachO, stub_sym_index: u32, target: SymbolWithLo
|
||||
sym.n_sect = self.la_symbol_ptr_section_index.? + 1;
|
||||
|
||||
try Atom.addRelocation(self, atom_index, .{
|
||||
.type = switch (self.base.options.target.cpu.arch) {
|
||||
.aarch64 => @enumToInt(macho.reloc_type_arm64.ARM64_RELOC_UNSIGNED),
|
||||
.x86_64 => @enumToInt(macho.reloc_type_x86_64.X86_64_RELOC_UNSIGNED),
|
||||
else => unreachable,
|
||||
},
|
||||
.target = .{ .sym_index = stub_sym_index, .file = null },
|
||||
.type = .unsigned,
|
||||
.target = .{ .sym_index = stub_sym_index },
|
||||
.offset = 0,
|
||||
.addend = 0,
|
||||
.pcrel = false,
|
||||
@ -1563,8 +1655,8 @@ fn createStubAtom(self: *MachO, laptr_sym_index: u32) !Atom.Index {
|
||||
code[1] = 0x25;
|
||||
|
||||
try Atom.addRelocation(self, atom_index, .{
|
||||
.type = @enumToInt(macho.reloc_type_x86_64.X86_64_RELOC_BRANCH),
|
||||
.target = .{ .sym_index = laptr_sym_index, .file = null },
|
||||
.type = .branch,
|
||||
.target = .{ .sym_index = laptr_sym_index },
|
||||
.offset = 2,
|
||||
.addend = 0,
|
||||
.pcrel = true,
|
||||
@ -1583,18 +1675,18 @@ fn createStubAtom(self: *MachO, laptr_sym_index: u32) !Atom.Index {
|
||||
// br x16
|
||||
mem.writeIntLittle(u32, code[8..12], aarch64.Instruction.br(.x16).toU32());
|
||||
|
||||
try Atom.addRelocations(self, atom_index, 2, .{
|
||||
try Atom.addRelocations(self, atom_index, &[_]Relocation{
|
||||
.{
|
||||
.type = @enumToInt(macho.reloc_type_arm64.ARM64_RELOC_PAGE21),
|
||||
.target = .{ .sym_index = laptr_sym_index, .file = null },
|
||||
.type = .page,
|
||||
.target = .{ .sym_index = laptr_sym_index },
|
||||
.offset = 0,
|
||||
.addend = 0,
|
||||
.pcrel = true,
|
||||
.length = 2,
|
||||
},
|
||||
.{
|
||||
.type = @enumToInt(macho.reloc_type_arm64.ARM64_RELOC_PAGEOFF12),
|
||||
.target = .{ .sym_index = laptr_sym_index, .file = null },
|
||||
.type = .pageoff,
|
||||
.target = .{ .sym_index = laptr_sym_index },
|
||||
.offset = 4,
|
||||
.addend = 0,
|
||||
.pcrel = false,
|
||||
@ -1612,6 +1704,42 @@ fn createStubAtom(self: *MachO, laptr_sym_index: u32) !Atom.Index {
|
||||
return atom_index;
|
||||
}
|
||||
|
||||
fn createThreadLocalDescriptorAtom(self: *MachO, target: SymbolWithLoc) !Atom.Index {
|
||||
const gpa = self.base.allocator;
|
||||
const size = 3 * @sizeOf(u64);
|
||||
const required_alignment: u32 = 1;
|
||||
const atom_index = try self.createAtom();
|
||||
self.getAtomPtr(atom_index).size = size;
|
||||
|
||||
const target_sym_name = self.getSymbolName(target);
|
||||
const name_delimiter = mem.indexOf(u8, target_sym_name, "$").?;
|
||||
const sym_name = try gpa.dupe(u8, target_sym_name[0..name_delimiter]);
|
||||
defer gpa.free(sym_name);
|
||||
|
||||
const sym = self.getAtom(atom_index).getSymbolPtr(self);
|
||||
sym.n_type = macho.N_SECT;
|
||||
sym.n_sect = self.thread_vars_section_index.? + 1;
|
||||
sym.n_strx = try self.strtab.insert(gpa, sym_name);
|
||||
sym.n_value = try self.allocateAtom(atom_index, size, required_alignment);
|
||||
|
||||
log.debug("allocated threadlocal descriptor atom '{s}' at 0x{x}", .{ sym_name, sym.n_value });
|
||||
|
||||
try Atom.addRelocation(self, atom_index, .{
|
||||
.type = .tlv_initializer,
|
||||
.target = target,
|
||||
.offset = 0x10,
|
||||
.addend = 0,
|
||||
.pcrel = false,
|
||||
.length = 3,
|
||||
});
|
||||
|
||||
var code: [size]u8 = undefined;
|
||||
mem.set(u8, &code, 0);
|
||||
try self.writeAtom(atom_index, &code);
|
||||
|
||||
return atom_index;
|
||||
}
|
||||
|
||||
fn createMhExecuteHeaderSymbol(self: *MachO) !void {
|
||||
if (self.base.options.output_mode != .Exe) return;
|
||||
if (self.getGlobal("__mh_execute_header")) |global| {
|
||||
@ -1760,12 +1888,9 @@ pub fn deinit(self: *MachO) void {
|
||||
d_sym.deinit();
|
||||
}
|
||||
|
||||
self.got_entries.deinit(gpa);
|
||||
self.got_entries_free_list.deinit(gpa);
|
||||
self.got_entries_table.deinit(gpa);
|
||||
self.stubs.deinit(gpa);
|
||||
self.stubs_free_list.deinit(gpa);
|
||||
self.got_table.deinit(gpa);
|
||||
self.stubs_table.deinit(gpa);
|
||||
self.tlv_table.deinit(gpa);
|
||||
self.strtab.deinit(gpa);
|
||||
|
||||
self.locals.deinit(gpa);
|
||||
@ -1898,20 +2023,10 @@ fn freeAtom(self: *MachO, atom_index: Atom.Index) void {
|
||||
self.locals_free_list.append(gpa, sym_index) catch {};
|
||||
|
||||
// Try freeing GOT atom if this decl had one
|
||||
const got_target = SymbolWithLoc{ .sym_index = sym_index, .file = null };
|
||||
if (self.got_entries_table.get(got_target)) |got_index| {
|
||||
self.got_entries_free_list.append(gpa, @intCast(u32, got_index)) catch {};
|
||||
self.got_entries.items[got_index] = .{
|
||||
.target = .{ .sym_index = 0, .file = null },
|
||||
.sym_index = 0,
|
||||
};
|
||||
_ = self.got_entries_table.remove(got_target);
|
||||
self.got_table.freeEntry(gpa, .{ .sym_index = sym_index });
|
||||
|
||||
if (self.d_sym) |*d_sym| {
|
||||
d_sym.swapRemoveRelocs(sym_index);
|
||||
}
|
||||
|
||||
log.debug(" adding GOT index {d} to free list (target local@{d})", .{ got_index, sym_index });
|
||||
if (self.d_sym) |*d_sym| {
|
||||
d_sym.swapRemoveRelocs(sym_index);
|
||||
}
|
||||
|
||||
self.locals.items[sym_index].n_type = 0;
|
||||
@ -1986,70 +2101,34 @@ fn allocateGlobal(self: *MachO) !u32 {
|
||||
return index;
|
||||
}
|
||||
|
||||
fn allocateGotEntry(self: *MachO, target: SymbolWithLoc) !u32 {
|
||||
const gpa = self.base.allocator;
|
||||
try self.got_entries.ensureUnusedCapacity(gpa, 1);
|
||||
|
||||
const index = blk: {
|
||||
if (self.got_entries_free_list.popOrNull()) |index| {
|
||||
log.debug(" (reusing GOT entry index {d})", .{index});
|
||||
break :blk index;
|
||||
} else {
|
||||
log.debug(" (allocating GOT entry at index {d})", .{self.got_entries.items.len});
|
||||
const index = @intCast(u32, self.got_entries.items.len);
|
||||
_ = self.got_entries.addOneAssumeCapacity();
|
||||
break :blk index;
|
||||
}
|
||||
};
|
||||
|
||||
self.got_entries.items[index] = .{ .target = target, .sym_index = 0 };
|
||||
try self.got_entries_table.putNoClobber(gpa, target, index);
|
||||
|
||||
return index;
|
||||
}
|
||||
|
||||
fn addGotEntry(self: *MachO, target: SymbolWithLoc) !void {
|
||||
if (self.got_entries_table.contains(target)) return;
|
||||
|
||||
const got_index = try self.allocateGotEntry(target);
|
||||
if (self.got_table.lookup.contains(target)) return;
|
||||
const got_index = try self.got_table.allocateEntry(self.base.allocator, target);
|
||||
const got_atom_index = try self.createGotAtom(target);
|
||||
const got_atom = self.getAtom(got_atom_index);
|
||||
self.got_entries.items[got_index].sym_index = got_atom.getSymbolIndex().?;
|
||||
try self.writePtrWidthAtom(got_atom_index);
|
||||
}
|
||||
|
||||
fn allocateStubEntry(self: *MachO, target: SymbolWithLoc) !u32 {
|
||||
try self.stubs.ensureUnusedCapacity(self.base.allocator, 1);
|
||||
|
||||
const index = blk: {
|
||||
if (self.stubs_free_list.popOrNull()) |index| {
|
||||
log.debug(" (reusing stub entry index {d})", .{index});
|
||||
break :blk index;
|
||||
} else {
|
||||
log.debug(" (allocating stub entry at index {d})", .{self.stubs.items.len});
|
||||
const index = @intCast(u32, self.stubs.items.len);
|
||||
_ = self.stubs.addOneAssumeCapacity();
|
||||
break :blk index;
|
||||
}
|
||||
};
|
||||
|
||||
self.stubs.items[index] = .{ .target = target, .sym_index = 0 };
|
||||
try self.stubs_table.putNoClobber(self.base.allocator, target, index);
|
||||
|
||||
return index;
|
||||
self.got_table.entries.items[got_index].sym_index = got_atom.getSymbolIndex().?;
|
||||
self.markRelocsDirtyByTarget(target);
|
||||
}
|
||||
|
||||
fn addStubEntry(self: *MachO, target: SymbolWithLoc) !void {
|
||||
if (self.stubs_table.contains(target)) return;
|
||||
|
||||
const stub_index = try self.allocateStubEntry(target);
|
||||
if (self.stubs_table.lookup.contains(target)) return;
|
||||
const stub_index = try self.stubs_table.allocateEntry(self.base.allocator, target);
|
||||
const stub_helper_atom_index = try self.createStubHelperAtom();
|
||||
const stub_helper_atom = self.getAtom(stub_helper_atom_index);
|
||||
const laptr_atom_index = try self.createLazyPointerAtom(stub_helper_atom.getSymbolIndex().?, target);
|
||||
const laptr_atom = self.getAtom(laptr_atom_index);
|
||||
const stub_atom_index = try self.createStubAtom(laptr_atom.getSymbolIndex().?);
|
||||
const stub_atom = self.getAtom(stub_atom_index);
|
||||
self.stubs.items[stub_index].sym_index = stub_atom.getSymbolIndex().?;
|
||||
self.stubs_table.entries.items[stub_index].sym_index = stub_atom.getSymbolIndex().?;
|
||||
self.markRelocsDirtyByTarget(target);
|
||||
}
|
||||
|
||||
fn addTlvEntry(self: *MachO, target: SymbolWithLoc) !void {
|
||||
if (self.tlv_table.lookup.contains(target)) return;
|
||||
const tlv_index = try self.tlv_table.allocateEntry(self.base.allocator, target);
|
||||
const tlv_atom_index = try self.createThreadLocalDescriptorAtom(target);
|
||||
const tlv_atom = self.getAtom(tlv_atom_index);
|
||||
self.tlv_table.entries.items[tlv_index].sym_index = tlv_atom.getSymbolIndex().?;
|
||||
self.markRelocsDirtyByTarget(target);
|
||||
}
|
||||
|
||||
@ -2070,8 +2149,6 @@ pub fn updateFunc(self: *MachO, module: *Module, func: *Module.Fn, air: Air, liv
|
||||
self.freeUnnamedConsts(decl_index);
|
||||
Atom.freeRelocations(self, atom_index);
|
||||
|
||||
const atom = self.getAtom(atom_index);
|
||||
|
||||
var code_buffer = std.ArrayList(u8).init(self.base.allocator);
|
||||
defer code_buffer.deinit();
|
||||
|
||||
@ -2100,7 +2177,13 @@ pub fn updateFunc(self: *MachO, module: *Module, func: *Module.Fn, air: Air, liv
|
||||
const addr = try self.updateDeclCode(decl_index, code);
|
||||
|
||||
if (decl_state) |*ds| {
|
||||
try self.d_sym.?.dwarf.commitDeclState(module, decl_index, addr, atom.size, ds);
|
||||
try self.d_sym.?.dwarf.commitDeclState(
|
||||
module,
|
||||
decl_index,
|
||||
addr,
|
||||
self.getAtom(atom_index).size,
|
||||
ds,
|
||||
);
|
||||
}
|
||||
|
||||
// Since we updated the vaddr and the size, each corresponding export symbol also
|
||||
@ -2196,8 +2279,8 @@ pub fn updateDecl(self: *MachO, module: *Module, decl_index: Module.Decl.Index)
|
||||
}
|
||||
|
||||
const atom_index = try self.getOrCreateAtomForDecl(decl_index);
|
||||
const sym_index = self.getAtom(atom_index).getSymbolIndex().?;
|
||||
Atom.freeRelocations(self, atom_index);
|
||||
const atom = self.getAtom(atom_index);
|
||||
|
||||
var code_buffer = std.ArrayList(u8).init(self.base.allocator);
|
||||
defer code_buffer.deinit();
|
||||
@ -2216,14 +2299,14 @@ pub fn updateDecl(self: *MachO, module: *Module, decl_index: Module.Decl.Index)
|
||||
}, &code_buffer, .{
|
||||
.dwarf = ds,
|
||||
}, .{
|
||||
.parent_atom_index = atom.getSymbolIndex().?,
|
||||
.parent_atom_index = sym_index,
|
||||
})
|
||||
else
|
||||
try codegen.generateSymbol(&self.base, decl.srcLoc(), .{
|
||||
.ty = decl.ty,
|
||||
.val = decl_val,
|
||||
}, &code_buffer, .none, .{
|
||||
.parent_atom_index = atom.getSymbolIndex().?,
|
||||
.parent_atom_index = sym_index,
|
||||
});
|
||||
|
||||
var code = switch (res) {
|
||||
@ -2237,7 +2320,13 @@ pub fn updateDecl(self: *MachO, module: *Module, decl_index: Module.Decl.Index)
|
||||
const addr = try self.updateDeclCode(decl_index, code);
|
||||
|
||||
if (decl_state) |*ds| {
|
||||
try self.d_sym.?.dwarf.commitDeclState(module, decl_index, addr, atom.size, ds);
|
||||
try self.d_sym.?.dwarf.commitDeclState(
|
||||
module,
|
||||
decl_index,
|
||||
addr,
|
||||
self.getAtom(atom_index).size,
|
||||
ds,
|
||||
);
|
||||
}
|
||||
|
||||
// Since we updated the vaddr and the size, each corresponding export symbol also
|
||||
@ -2322,7 +2411,6 @@ fn updateLazySymbolAtom(
|
||||
symbol.n_value = vaddr;
|
||||
|
||||
try self.addGotEntry(.{ .sym_index = local_sym_index });
|
||||
self.markRelocsDirtyByTarget(atom.getSymbolWithLoc());
|
||||
try self.writeAtom(atom_index, code);
|
||||
}
|
||||
|
||||
@ -2356,6 +2444,7 @@ fn getDeclOutputSection(self: *MachO, decl_index: Module.Decl.Index) u8 {
|
||||
const val = decl.val;
|
||||
const zig_ty = ty.zigTypeTag();
|
||||
const mode = self.base.options.optimize_mode;
|
||||
const single_threaded = self.base.options.single_threaded;
|
||||
const sect_id: u8 = blk: {
|
||||
// TODO finish and audit this function
|
||||
if (val.isUndefDeep()) {
|
||||
@ -2366,7 +2455,10 @@ fn getDeclOutputSection(self: *MachO, decl_index: Module.Decl.Index) u8 {
|
||||
}
|
||||
}
|
||||
|
||||
if (val.castTag(.variable)) |_| {
|
||||
if (val.castTag(.variable)) |variable| {
|
||||
if (variable.data.is_threadlocal and !single_threaded) {
|
||||
break :blk self.thread_data_section_index.?;
|
||||
}
|
||||
break :blk self.data_section_index.?;
|
||||
}
|
||||
|
||||
@ -2391,16 +2483,28 @@ fn updateDeclCode(self: *MachO, decl_index: Module.Decl.Index, code: []u8) !u64
|
||||
|
||||
const required_alignment = decl.getAlignment(self.base.options.target);
|
||||
|
||||
const sym_name = try decl.getFullyQualifiedName(mod);
|
||||
defer self.base.allocator.free(sym_name);
|
||||
const decl_name = try decl.getFullyQualifiedName(mod);
|
||||
defer gpa.free(decl_name);
|
||||
|
||||
const decl_metadata = self.decls.get(decl_index).?;
|
||||
const atom_index = decl_metadata.atom;
|
||||
const atom = self.getAtom(atom_index);
|
||||
const sym_index = atom.getSymbolIndex().?;
|
||||
const sect_id = decl_metadata.section;
|
||||
const header = &self.sections.items(.header)[sect_id];
|
||||
const segment = self.getSegment(sect_id);
|
||||
const is_threadlocal = if (!self.base.options.single_threaded)
|
||||
header.flags == macho.S_THREAD_LOCAL_REGULAR or header.flags == macho.S_THREAD_LOCAL_ZEROFILL
|
||||
else
|
||||
false;
|
||||
const code_len = code.len;
|
||||
|
||||
const sym_name = if (is_threadlocal)
|
||||
try std.fmt.allocPrint(gpa, "{s}$tlv$init", .{decl_name})
|
||||
else
|
||||
decl_name;
|
||||
defer if (is_threadlocal) gpa.free(sym_name);
|
||||
|
||||
if (atom.size != 0) {
|
||||
const sym = atom.getSymbolPtr(self);
|
||||
sym.n_strx = try self.strtab.insert(gpa, sym_name);
|
||||
@ -2418,25 +2522,29 @@ fn updateDeclCode(self: *MachO, decl_index: Module.Decl.Index, code: []u8) !u64
|
||||
|
||||
if (vaddr != sym.n_value) {
|
||||
sym.n_value = vaddr;
|
||||
// TODO: I think we should update the offset to the initializer here too.
|
||||
const target: SymbolWithLoc = if (is_threadlocal) blk: {
|
||||
const tlv_atom_index = self.tlv_table.getAtomIndex(self, .{
|
||||
.sym_index = sym_index,
|
||||
}).?;
|
||||
const tlv_atom = self.getAtom(tlv_atom_index);
|
||||
break :blk tlv_atom.getSymbolWithLoc();
|
||||
} else .{ .sym_index = sym_index };
|
||||
self.markRelocsDirtyByTarget(target);
|
||||
log.debug(" (updating GOT entry)", .{});
|
||||
const got_target = SymbolWithLoc{ .sym_index = sym_index, .file = null };
|
||||
const got_atom_index = self.getGotAtomIndexForSymbol(got_target).?;
|
||||
self.markRelocsDirtyByTarget(got_target);
|
||||
const got_atom_index = self.got_table.getAtomIndex(self, target).?;
|
||||
try self.writePtrWidthAtom(got_atom_index);
|
||||
}
|
||||
} else if (code_len < atom.size) {
|
||||
self.shrinkAtom(atom_index, code_len);
|
||||
} else if (atom.next_index == null) {
|
||||
const header = &self.sections.items(.header)[sect_id];
|
||||
const segment = self.getSegment(sect_id);
|
||||
const needed_size = (sym.n_value + code_len) - segment.vmaddr;
|
||||
header.size = needed_size;
|
||||
}
|
||||
self.getAtomPtr(atom_index).size = code_len;
|
||||
} else {
|
||||
const name_str_index = try self.strtab.insert(gpa, sym_name);
|
||||
const sym = atom.getSymbolPtr(self);
|
||||
sym.n_strx = name_str_index;
|
||||
sym.n_strx = try self.strtab.insert(gpa, sym_name);
|
||||
sym.n_type = macho.N_SECT;
|
||||
sym.n_sect = sect_id + 1;
|
||||
sym.n_desc = 0;
|
||||
@ -2450,10 +2558,17 @@ fn updateDeclCode(self: *MachO, decl_index: Module.Decl.Index, code: []u8) !u64
|
||||
self.getAtomPtr(atom_index).size = code_len;
|
||||
sym.n_value = vaddr;
|
||||
|
||||
try self.addGotEntry(.{ .sym_index = sym_index });
|
||||
if (is_threadlocal) {
|
||||
try self.addTlvEntry(.{ .sym_index = sym_index });
|
||||
}
|
||||
const target: SymbolWithLoc = if (is_threadlocal) blk: {
|
||||
const tlv_atom_index = self.tlv_table.getAtomIndex(self, .{ .sym_index = sym_index }).?;
|
||||
const tlv_atom = self.getAtom(tlv_atom_index);
|
||||
break :blk tlv_atom.getSymbolWithLoc();
|
||||
} else .{ .sym_index = sym_index };
|
||||
try self.addGotEntry(target);
|
||||
}
|
||||
|
||||
self.markRelocsDirtyByTarget(atom.getSymbolWithLoc());
|
||||
try self.writeAtom(atom_index, code);
|
||||
|
||||
return atom.getSymbol(self).n_value;
|
||||
@ -2647,11 +2762,7 @@ pub fn getDeclVAddr(self: *MachO, decl_index: Module.Decl.Index, reloc_info: Fil
|
||||
const sym_index = self.getAtom(this_atom_index).getSymbolIndex().?;
|
||||
const atom_index = self.getAtomIndexForSymbol(.{ .sym_index = reloc_info.parent_atom_index, .file = null }).?;
|
||||
try Atom.addRelocation(self, atom_index, .{
|
||||
.type = switch (self.base.options.target.cpu.arch) {
|
||||
.aarch64 => @enumToInt(macho.reloc_type_arm64.ARM64_RELOC_UNSIGNED),
|
||||
.x86_64 => @enumToInt(macho.reloc_type_x86_64.X86_64_RELOC_UNSIGNED),
|
||||
else => unreachable,
|
||||
},
|
||||
.type = .unsigned,
|
||||
.target = .{ .sym_index = sym_index, .file = null },
|
||||
.offset = @intCast(u32, reloc_info.offset),
|
||||
.addend = reloc_info.addend,
|
||||
@ -2790,6 +2901,28 @@ fn populateMissingMetadata(self: *MachO) !void {
|
||||
self.segment_table_dirty = true;
|
||||
}
|
||||
|
||||
if (!self.base.options.single_threaded) {
|
||||
if (self.thread_vars_section_index == null) {
|
||||
self.thread_vars_section_index = try self.allocateSection("__DATA2", "__thread_vars", .{
|
||||
.size = @sizeOf(u64) * 3,
|
||||
.alignment = @sizeOf(u64),
|
||||
.flags = macho.S_THREAD_LOCAL_VARIABLES,
|
||||
.prot = macho.PROT.READ | macho.PROT.WRITE,
|
||||
});
|
||||
self.segment_table_dirty = true;
|
||||
}
|
||||
|
||||
if (self.thread_data_section_index == null) {
|
||||
self.thread_data_section_index = try self.allocateSection("__DATA3", "__thread_data", .{
|
||||
.size = @sizeOf(u64),
|
||||
.alignment = @alignOf(u64),
|
||||
.flags = macho.S_THREAD_LOCAL_REGULAR,
|
||||
.prot = macho.PROT.READ | macho.PROT.WRITE,
|
||||
});
|
||||
self.segment_table_dirty = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (self.linkedit_segment_cmd_index == null) {
|
||||
self.linkedit_segment_cmd_index = @intCast(u8, self.segments.items.len);
|
||||
|
||||
@ -3077,28 +3210,6 @@ fn allocateAtom(self: *MachO, atom_index: Atom.Index, new_atom_size: u64, alignm
|
||||
return vaddr;
|
||||
}
|
||||
|
||||
fn getSectionPrecedence(header: macho.section_64) u4 {
|
||||
if (header.isCode()) {
|
||||
if (mem.eql(u8, "__text", header.sectName())) return 0x0;
|
||||
if (header.type() == macho.S_SYMBOL_STUBS) return 0x1;
|
||||
return 0x2;
|
||||
}
|
||||
switch (header.type()) {
|
||||
macho.S_NON_LAZY_SYMBOL_POINTERS,
|
||||
macho.S_LAZY_SYMBOL_POINTERS,
|
||||
=> return 0x0,
|
||||
macho.S_MOD_INIT_FUNC_POINTERS => return 0x1,
|
||||
macho.S_MOD_TERM_FUNC_POINTERS => return 0x2,
|
||||
macho.S_ZEROFILL => return 0xf,
|
||||
macho.S_THREAD_LOCAL_REGULAR => return 0xd,
|
||||
macho.S_THREAD_LOCAL_ZEROFILL => return 0xe,
|
||||
else => if (mem.eql(u8, "__eh_frame", header.sectName()))
|
||||
return 0xf
|
||||
else
|
||||
return 0x3,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn getGlobalSymbol(self: *MachO, name: []const u8, lib_name: ?[]const u8) !u32 {
|
||||
_ = lib_name;
|
||||
const gpa = self.base.allocator;
|
||||
@ -3474,8 +3585,8 @@ const SymtabCtx = struct {
|
||||
|
||||
fn writeDysymtab(self: *MachO, ctx: SymtabCtx) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const nstubs = @intCast(u32, self.stubs_table.count());
|
||||
const ngot_entries = @intCast(u32, self.got_entries_table.count());
|
||||
const nstubs = @intCast(u32, self.stubs_table.lookup.count());
|
||||
const ngot_entries = @intCast(u32, self.got_table.lookup.count());
|
||||
const nindirectsyms = nstubs * 2 + ngot_entries;
|
||||
const iextdefsym = ctx.nlocalsym;
|
||||
const iundefsym = iextdefsym + ctx.nextdefsym;
|
||||
@ -3497,7 +3608,7 @@ fn writeDysymtab(self: *MachO, ctx: SymtabCtx) !void {
|
||||
if (self.stubs_section_index) |sect_id| {
|
||||
const stubs = &self.sections.items(.header)[sect_id];
|
||||
stubs.reserved1 = 0;
|
||||
for (self.stubs.items) |entry| {
|
||||
for (self.stubs_table.entries.items) |entry| {
|
||||
if (entry.sym_index == 0) continue;
|
||||
const target_sym = self.getSymbol(entry.target);
|
||||
assert(target_sym.undf());
|
||||
@ -3508,7 +3619,7 @@ fn writeDysymtab(self: *MachO, ctx: SymtabCtx) !void {
|
||||
if (self.got_section_index) |sect_id| {
|
||||
const got = &self.sections.items(.header)[sect_id];
|
||||
got.reserved1 = nstubs;
|
||||
for (self.got_entries.items) |entry| {
|
||||
for (self.got_table.entries.items) |entry| {
|
||||
if (entry.sym_index == 0) continue;
|
||||
const target_sym = self.getSymbol(entry.target);
|
||||
if (target_sym.undf()) {
|
||||
@ -3522,7 +3633,7 @@ fn writeDysymtab(self: *MachO, ctx: SymtabCtx) !void {
|
||||
if (self.la_symbol_ptr_section_index) |sect_id| {
|
||||
const la_symbol_ptr = &self.sections.items(.header)[sect_id];
|
||||
la_symbol_ptr.reserved1 = nstubs + ngot_entries;
|
||||
for (self.stubs.items) |entry| {
|
||||
for (self.stubs_table.entries.items) |entry| {
|
||||
if (entry.sym_index == 0) continue;
|
||||
const target_sym = self.getSymbol(entry.target);
|
||||
assert(target_sym.undf());
|
||||
@ -3593,6 +3704,10 @@ fn writeHeader(self: *MachO, ncmds: u32, sizeofcmds: u32) !void {
|
||||
var header: macho.mach_header_64 = .{};
|
||||
header.flags = macho.MH_NOUNDEFS | macho.MH_DYLDLINK | macho.MH_PIE | macho.MH_TWOLEVEL;
|
||||
|
||||
if (!self.base.options.single_threaded) {
|
||||
header.flags |= macho.MH_HAS_TLV_DESCRIPTORS;
|
||||
}
|
||||
|
||||
switch (self.base.options.target.cpu.arch) {
|
||||
.aarch64 => {
|
||||
header.cputype = macho.CPU_TYPE_ARM64;
|
||||
@ -3617,12 +3732,6 @@ fn writeHeader(self: *MachO, ncmds: u32, sizeofcmds: u32) !void {
|
||||
else => unreachable,
|
||||
}
|
||||
|
||||
if (self.getSectionByName("__DATA", "__thread_vars")) |sect_id| {
|
||||
if (self.sections.items(.header)[sect_id].size > 0) {
|
||||
header.flags |= macho.MH_HAS_TLV_DESCRIPTORS;
|
||||
}
|
||||
}
|
||||
|
||||
header.ncmds = ncmds;
|
||||
header.sizeofcmds = sizeofcmds;
|
||||
|
||||
@ -3802,8 +3911,7 @@ pub fn getSymbol(self: *const MachO, sym_with_loc: SymbolWithLoc) macho.nlist_64
|
||||
|
||||
/// Returns name of the symbol described by `sym_with_loc` descriptor.
|
||||
pub fn getSymbolName(self: *const MachO, sym_with_loc: SymbolWithLoc) []const u8 {
|
||||
assert(sym_with_loc.file == null);
|
||||
const sym = self.locals.items[sym_with_loc.sym_index];
|
||||
const sym = self.getSymbol(sym_with_loc);
|
||||
return self.strtab.get(sym.n_strx).?;
|
||||
}
|
||||
|
||||
@ -3867,20 +3975,6 @@ pub fn getAtomIndexForSymbol(self: *MachO, sym_with_loc: SymbolWithLoc) ?Atom.In
|
||||
return self.atom_by_index_table.get(sym_with_loc.sym_index);
|
||||
}
|
||||
|
||||
/// Returns GOT atom that references `sym_with_loc` if one exists.
|
||||
/// Returns null otherwise.
|
||||
pub fn getGotAtomIndexForSymbol(self: *MachO, sym_with_loc: SymbolWithLoc) ?Atom.Index {
|
||||
const got_index = self.got_entries_table.get(sym_with_loc) orelse return null;
|
||||
return self.got_entries.items[got_index].getAtomIndex(self);
|
||||
}
|
||||
|
||||
/// Returns stubs atom that references `sym_with_loc` if one exists.
|
||||
/// Returns null otherwise.
|
||||
pub fn getStubsAtomIndexForSymbol(self: *MachO, sym_with_loc: SymbolWithLoc) ?Atom.Index {
|
||||
const stubs_index = self.stubs_table.get(sym_with_loc) orelse return null;
|
||||
return self.stubs.items[stubs_index].getAtomIndex(self);
|
||||
}
|
||||
|
||||
/// Returns symbol location corresponding to the set entrypoint.
|
||||
/// Asserts output mode is executable.
|
||||
pub fn getEntryPoint(self: MachO) error{MissingMainEntrypoint}!SymbolWithLoc {
|
||||
@ -4227,37 +4321,13 @@ pub fn logSymtab(self: *MachO) void {
|
||||
}
|
||||
|
||||
log.debug("GOT entries:", .{});
|
||||
for (self.got_entries.items, 0..) |entry, i| {
|
||||
const atom_sym = entry.getSymbol(self);
|
||||
const target_sym = self.getSymbol(entry.target);
|
||||
if (target_sym.undf()) {
|
||||
log.debug(" {d}@{x} => import('{s}')", .{
|
||||
i,
|
||||
atom_sym.n_value,
|
||||
self.getSymbolName(entry.target),
|
||||
});
|
||||
} else {
|
||||
log.debug(" {d}@{x} => local(%{d}) in object({?d}) {s}", .{
|
||||
i,
|
||||
atom_sym.n_value,
|
||||
entry.target.sym_index,
|
||||
entry.target.file,
|
||||
logSymAttributes(target_sym, &buf),
|
||||
});
|
||||
}
|
||||
}
|
||||
log.debug("{}", .{self.got_table.fmtDebug(self)});
|
||||
|
||||
log.debug("stubs entries:", .{});
|
||||
for (self.stubs.items, 0..) |entry, i| {
|
||||
const target_sym = self.getSymbol(entry.target);
|
||||
const atom_sym = entry.getSymbol(self);
|
||||
assert(target_sym.undf());
|
||||
log.debug(" {d}@{x} => import('{s}')", .{
|
||||
i,
|
||||
atom_sym.n_value,
|
||||
self.getSymbolName(entry.target),
|
||||
});
|
||||
}
|
||||
log.debug("{}", .{self.stubs_table.fmtDebug(self)});
|
||||
|
||||
log.debug("threadlocal entries:", .{});
|
||||
log.debug("{}", .{self.tlv_table.fmtDebug(self)});
|
||||
}
|
||||
|
||||
pub fn logAtoms(self: *MachO) void {
|
||||
|
||||
@ -14,7 +14,7 @@ const trace = @import("../../tracy.zig").trace;
|
||||
const Allocator = mem.Allocator;
|
||||
const Arch = std.Target.Cpu.Arch;
|
||||
const MachO = @import("../MachO.zig");
|
||||
const Relocation = @import("Relocation.zig");
|
||||
pub const Relocation = @import("Relocation.zig");
|
||||
const SymbolWithLoc = MachO.SymbolWithLoc;
|
||||
|
||||
/// Each decl always gets a local symbol with the fully qualified name.
|
||||
@ -113,25 +113,19 @@ pub fn freeListEligible(self: Atom, macho_file: *MachO) bool {
|
||||
}
|
||||
|
||||
pub fn addRelocation(macho_file: *MachO, atom_index: Index, reloc: Relocation) !void {
|
||||
return addRelocations(macho_file, atom_index, 1, .{reloc});
|
||||
return addRelocations(macho_file, atom_index, &[_]Relocation{reloc});
|
||||
}
|
||||
|
||||
pub fn addRelocations(
|
||||
macho_file: *MachO,
|
||||
atom_index: Index,
|
||||
comptime count: comptime_int,
|
||||
relocs: [count]Relocation,
|
||||
) !void {
|
||||
pub fn addRelocations(macho_file: *MachO, atom_index: Index, relocs: []Relocation) !void {
|
||||
const gpa = macho_file.base.allocator;
|
||||
const target = macho_file.base.options.target;
|
||||
const gop = try macho_file.relocs.getOrPut(gpa, atom_index);
|
||||
if (!gop.found_existing) {
|
||||
gop.value_ptr.* = .{};
|
||||
}
|
||||
try gop.value_ptr.ensureUnusedCapacity(gpa, count);
|
||||
try gop.value_ptr.ensureUnusedCapacity(gpa, relocs.len);
|
||||
for (relocs) |reloc| {
|
||||
log.debug(" (adding reloc of type {s} to target %{d})", .{
|
||||
reloc.fmtType(target),
|
||||
@tagName(reloc.type),
|
||||
reloc.target.sym_index,
|
||||
});
|
||||
gop.value_ptr.appendAssumeCapacity(reloc);
|
||||
|
||||
@ -226,26 +226,20 @@ pub fn flushModule(self: *DebugSymbols, macho_file: *MachO) !void {
|
||||
|
||||
for (self.relocs.items) |*reloc| {
|
||||
const sym = switch (reloc.type) {
|
||||
.direct_load => macho_file.getSymbol(.{ .sym_index = reloc.target, .file = null }),
|
||||
.direct_load => macho_file.getSymbol(.{ .sym_index = reloc.target }),
|
||||
.got_load => blk: {
|
||||
const got_index = macho_file.got_entries_table.get(.{
|
||||
.sym_index = reloc.target,
|
||||
.file = null,
|
||||
}).?;
|
||||
const got_entry = macho_file.got_entries.items[got_index];
|
||||
const got_index = macho_file.got_table.lookup.get(.{ .sym_index = reloc.target }).?;
|
||||
const got_entry = macho_file.got_table.entries.items[got_index];
|
||||
break :blk got_entry.getSymbol(macho_file);
|
||||
},
|
||||
};
|
||||
if (sym.n_value == reloc.prev_vaddr) continue;
|
||||
|
||||
const sym_name = switch (reloc.type) {
|
||||
.direct_load => macho_file.getSymbolName(.{ .sym_index = reloc.target, .file = null }),
|
||||
.direct_load => macho_file.getSymbolName(.{ .sym_index = reloc.target }),
|
||||
.got_load => blk: {
|
||||
const got_index = macho_file.got_entries_table.get(.{
|
||||
.sym_index = reloc.target,
|
||||
.file = null,
|
||||
}).?;
|
||||
const got_entry = macho_file.got_entries.items[got_index];
|
||||
const got_index = macho_file.got_table.lookup.get(.{ .sym_index = reloc.target }).?;
|
||||
const got_entry = macho_file.got_table.entries.items[got_index];
|
||||
break :blk got_entry.getName(macho_file);
|
||||
},
|
||||
};
|
||||
|
||||
@ -1,19 +1,7 @@
|
||||
const Relocation = @This();
|
||||
//! Relocation used by the self-hosted backends to instruct the linker where and how to
|
||||
//! fixup the values when flushing the contents to file and/or memory.
|
||||
|
||||
const std = @import("std");
|
||||
const aarch64 = @import("../../arch/aarch64/bits.zig");
|
||||
const assert = std.debug.assert;
|
||||
const log = std.log.scoped(.link);
|
||||
const macho = std.macho;
|
||||
const math = std.math;
|
||||
const mem = std.mem;
|
||||
const meta = std.meta;
|
||||
|
||||
const Atom = @import("Atom.zig");
|
||||
const MachO = @import("../MachO.zig");
|
||||
const SymbolWithLoc = MachO.SymbolWithLoc;
|
||||
|
||||
type: u4,
|
||||
type: Type,
|
||||
target: SymbolWithLoc,
|
||||
offset: u32,
|
||||
addend: i64,
|
||||
@ -21,39 +9,55 @@ pcrel: bool,
|
||||
length: u2,
|
||||
dirty: bool = true,
|
||||
|
||||
pub const Type = enum {
|
||||
// x86, x86_64
|
||||
/// RIP-relative displacement to a GOT pointer
|
||||
got,
|
||||
/// RIP-relative displacement
|
||||
signed,
|
||||
/// RIP-relative displacement to GOT pointer to TLV thunk
|
||||
tlv,
|
||||
|
||||
// aarch64
|
||||
/// PC-relative distance to target page in GOT section
|
||||
got_page,
|
||||
/// Offset to a GOT pointer relative to the start of a page in GOT section
|
||||
got_pageoff,
|
||||
/// PC-relative distance to target page in a section
|
||||
page,
|
||||
/// Offset to a pointer relative to the start of a page in a section
|
||||
pageoff,
|
||||
|
||||
// common
|
||||
/// PC/RIP-relative displacement B/BL/CALL
|
||||
branch,
|
||||
/// Absolute pointer value
|
||||
unsigned,
|
||||
/// Relative offset to TLV initializer
|
||||
tlv_initializer,
|
||||
};
|
||||
|
||||
/// Returns true if and only if the reloc is dirty AND the target address is available.
|
||||
pub fn isResolvable(self: Relocation, macho_file: *MachO) bool {
|
||||
_ = self.getTargetAtomIndex(macho_file) orelse return false;
|
||||
return self.dirty;
|
||||
}
|
||||
|
||||
pub fn fmtType(self: Relocation, target: std.Target) []const u8 {
|
||||
switch (target.cpu.arch) {
|
||||
.aarch64 => return @tagName(@intToEnum(macho.reloc_type_arm64, self.type)),
|
||||
.x86_64 => return @tagName(@intToEnum(macho.reloc_type_x86_64, self.type)),
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn getTargetAtomIndex(self: Relocation, macho_file: *MachO) ?Atom.Index {
|
||||
switch (macho_file.base.options.target.cpu.arch) {
|
||||
.aarch64 => switch (@intToEnum(macho.reloc_type_arm64, self.type)) {
|
||||
.ARM64_RELOC_GOT_LOAD_PAGE21,
|
||||
.ARM64_RELOC_GOT_LOAD_PAGEOFF12,
|
||||
.ARM64_RELOC_POINTER_TO_GOT,
|
||||
=> return macho_file.getGotAtomIndexForSymbol(self.target),
|
||||
else => {},
|
||||
return switch (self.type) {
|
||||
.got, .got_page, .got_pageoff => macho_file.got_table.getAtomIndex(macho_file, self.target),
|
||||
.tlv => {
|
||||
const thunk_atom_index = macho_file.tlv_table.getAtomIndex(macho_file, self.target) orelse
|
||||
return null;
|
||||
const thunk_atom = macho_file.getAtom(thunk_atom_index);
|
||||
return macho_file.got_table.getAtomIndex(macho_file, thunk_atom.getSymbolWithLoc());
|
||||
},
|
||||
.x86_64 => switch (@intToEnum(macho.reloc_type_x86_64, self.type)) {
|
||||
.X86_64_RELOC_GOT,
|
||||
.X86_64_RELOC_GOT_LOAD,
|
||||
=> return macho_file.getGotAtomIndexForSymbol(self.target),
|
||||
else => {},
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
if (macho_file.getStubsAtomIndexForSymbol(self.target)) |stubs_atom| return stubs_atom;
|
||||
return macho_file.getAtomIndexForSymbol(self.target);
|
||||
.branch => if (macho_file.stubs_table.getAtomIndex(macho_file, self.target)) |index|
|
||||
index
|
||||
else
|
||||
macho_file.getAtomIndexForSymbol(self.target),
|
||||
else => macho_file.getAtomIndexForSymbol(self.target),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn resolve(self: Relocation, macho_file: *MachO, atom_index: Atom.Index, code: []u8) void {
|
||||
@ -64,13 +68,22 @@ pub fn resolve(self: Relocation, macho_file: *MachO, atom_index: Atom.Index, cod
|
||||
|
||||
const target_atom_index = self.getTargetAtomIndex(macho_file).?; // Oops, you didn't check if the relocation can be resolved with isResolvable().
|
||||
const target_atom = macho_file.getAtom(target_atom_index);
|
||||
const target_addr = @intCast(i64, target_atom.getSymbol(macho_file).n_value) + self.addend;
|
||||
|
||||
const target_addr: i64 = switch (self.type) {
|
||||
.tlv_initializer => blk: {
|
||||
assert(self.addend == 0); // Addend here makes no sense.
|
||||
const header = macho_file.sections.items(.header)[macho_file.thread_data_section_index.?];
|
||||
const target_sym = target_atom.getSymbol(macho_file);
|
||||
break :blk @intCast(i64, target_sym.n_value - header.addr);
|
||||
},
|
||||
else => @intCast(i64, target_atom.getSymbol(macho_file).n_value) + self.addend,
|
||||
};
|
||||
|
||||
log.debug(" ({x}: [() => 0x{x} ({s})) ({s})", .{
|
||||
source_addr,
|
||||
target_addr,
|
||||
macho_file.getSymbolName(self.target),
|
||||
self.fmtType(macho_file.base.options.target),
|
||||
@tagName(self.type),
|
||||
});
|
||||
|
||||
switch (arch) {
|
||||
@ -81,18 +94,9 @@ pub fn resolve(self: Relocation, macho_file: *MachO, atom_index: Atom.Index, cod
|
||||
}
|
||||
|
||||
fn resolveAarch64(self: Relocation, source_addr: u64, target_addr: i64, code: []u8) void {
|
||||
const rel_type = @intToEnum(macho.reloc_type_arm64, self.type);
|
||||
if (rel_type == .ARM64_RELOC_UNSIGNED) {
|
||||
return switch (self.length) {
|
||||
2 => mem.writeIntLittle(u32, code[self.offset..][0..4], @truncate(u32, @bitCast(u64, target_addr))),
|
||||
3 => mem.writeIntLittle(u64, code[self.offset..][0..8], @bitCast(u64, target_addr)),
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
var buffer = code[self.offset..][0..4];
|
||||
switch (rel_type) {
|
||||
.ARM64_RELOC_BRANCH26 => {
|
||||
var buffer = code[self.offset..];
|
||||
switch (self.type) {
|
||||
.branch => {
|
||||
const displacement = math.cast(
|
||||
i28,
|
||||
@intCast(i64, target_addr) - @intCast(i64, source_addr),
|
||||
@ -101,15 +105,12 @@ fn resolveAarch64(self: Relocation, source_addr: u64, target_addr: i64, code: []
|
||||
.unconditional_branch_immediate = mem.bytesToValue(meta.TagPayload(
|
||||
aarch64.Instruction,
|
||||
aarch64.Instruction.unconditional_branch_immediate,
|
||||
), buffer),
|
||||
), buffer[0..4]),
|
||||
};
|
||||
inst.unconditional_branch_immediate.imm26 = @truncate(u26, @bitCast(u28, displacement >> 2));
|
||||
mem.writeIntLittle(u32, buffer, inst.toU32());
|
||||
mem.writeIntLittle(u32, buffer[0..4], inst.toU32());
|
||||
},
|
||||
.ARM64_RELOC_PAGE21,
|
||||
.ARM64_RELOC_GOT_LOAD_PAGE21,
|
||||
.ARM64_RELOC_TLVP_LOAD_PAGE21,
|
||||
=> {
|
||||
.page, .got_page => {
|
||||
const source_page = @intCast(i32, source_addr >> 12);
|
||||
const target_page = @intCast(i32, target_addr >> 12);
|
||||
const pages = @bitCast(u21, @intCast(i21, target_page - source_page));
|
||||
@ -117,31 +118,29 @@ fn resolveAarch64(self: Relocation, source_addr: u64, target_addr: i64, code: []
|
||||
.pc_relative_address = mem.bytesToValue(meta.TagPayload(
|
||||
aarch64.Instruction,
|
||||
aarch64.Instruction.pc_relative_address,
|
||||
), buffer),
|
||||
), buffer[0..4]),
|
||||
};
|
||||
inst.pc_relative_address.immhi = @truncate(u19, pages >> 2);
|
||||
inst.pc_relative_address.immlo = @truncate(u2, pages);
|
||||
mem.writeIntLittle(u32, buffer, inst.toU32());
|
||||
mem.writeIntLittle(u32, buffer[0..4], inst.toU32());
|
||||
},
|
||||
.ARM64_RELOC_PAGEOFF12,
|
||||
.ARM64_RELOC_GOT_LOAD_PAGEOFF12,
|
||||
=> {
|
||||
.pageoff, .got_pageoff => {
|
||||
const narrowed = @truncate(u12, @intCast(u64, target_addr));
|
||||
if (isArithmeticOp(buffer)) {
|
||||
if (isArithmeticOp(buffer[0..4])) {
|
||||
var inst = aarch64.Instruction{
|
||||
.add_subtract_immediate = mem.bytesToValue(meta.TagPayload(
|
||||
aarch64.Instruction,
|
||||
aarch64.Instruction.add_subtract_immediate,
|
||||
), buffer),
|
||||
), buffer[0..4]),
|
||||
};
|
||||
inst.add_subtract_immediate.imm12 = narrowed;
|
||||
mem.writeIntLittle(u32, buffer, inst.toU32());
|
||||
mem.writeIntLittle(u32, buffer[0..4], inst.toU32());
|
||||
} else {
|
||||
var inst = aarch64.Instruction{
|
||||
.load_store_register = mem.bytesToValue(meta.TagPayload(
|
||||
aarch64.Instruction,
|
||||
aarch64.Instruction.load_store_register,
|
||||
), buffer),
|
||||
), buffer[0..4]),
|
||||
};
|
||||
const offset: u12 = blk: {
|
||||
if (inst.load_store_register.size == 0) {
|
||||
@ -157,89 +156,25 @@ fn resolveAarch64(self: Relocation, source_addr: u64, target_addr: i64, code: []
|
||||
}
|
||||
};
|
||||
inst.load_store_register.offset = offset;
|
||||
mem.writeIntLittle(u32, buffer, inst.toU32());
|
||||
mem.writeIntLittle(u32, buffer[0..4], inst.toU32());
|
||||
}
|
||||
},
|
||||
.ARM64_RELOC_TLVP_LOAD_PAGEOFF12 => {
|
||||
const RegInfo = struct {
|
||||
rd: u5,
|
||||
rn: u5,
|
||||
size: u2,
|
||||
};
|
||||
const reg_info: RegInfo = blk: {
|
||||
if (isArithmeticOp(buffer)) {
|
||||
const inst = mem.bytesToValue(meta.TagPayload(
|
||||
aarch64.Instruction,
|
||||
aarch64.Instruction.add_subtract_immediate,
|
||||
), buffer);
|
||||
break :blk .{
|
||||
.rd = inst.rd,
|
||||
.rn = inst.rn,
|
||||
.size = inst.sf,
|
||||
};
|
||||
} else {
|
||||
const inst = mem.bytesToValue(meta.TagPayload(
|
||||
aarch64.Instruction,
|
||||
aarch64.Instruction.load_store_register,
|
||||
), buffer);
|
||||
break :blk .{
|
||||
.rd = inst.rt,
|
||||
.rn = inst.rn,
|
||||
.size = inst.size,
|
||||
};
|
||||
}
|
||||
};
|
||||
const narrowed = @truncate(u12, @intCast(u64, target_addr));
|
||||
var inst = aarch64.Instruction{
|
||||
.add_subtract_immediate = .{
|
||||
.rd = reg_info.rd,
|
||||
.rn = reg_info.rn,
|
||||
.imm12 = narrowed,
|
||||
.sh = 0,
|
||||
.s = 0,
|
||||
.op = 0,
|
||||
.sf = @truncate(u1, reg_info.size),
|
||||
},
|
||||
};
|
||||
mem.writeIntLittle(u32, buffer, inst.toU32());
|
||||
.tlv_initializer, .unsigned => switch (self.length) {
|
||||
2 => mem.writeIntLittle(u32, buffer[0..4], @truncate(u32, @bitCast(u64, target_addr))),
|
||||
3 => mem.writeIntLittle(u64, buffer[0..8], @bitCast(u64, target_addr)),
|
||||
else => unreachable,
|
||||
},
|
||||
.ARM64_RELOC_POINTER_TO_GOT => {
|
||||
const result = @intCast(i32, @intCast(i64, target_addr) - @intCast(i64, source_addr));
|
||||
mem.writeIntLittle(i32, buffer, result);
|
||||
},
|
||||
.ARM64_RELOC_SUBTRACTOR => unreachable,
|
||||
.ARM64_RELOC_ADDEND => unreachable,
|
||||
.ARM64_RELOC_UNSIGNED => unreachable,
|
||||
.got, .signed, .tlv => unreachable, // Invalid target architecture.
|
||||
}
|
||||
}
|
||||
|
||||
fn resolveX8664(self: Relocation, source_addr: u64, target_addr: i64, code: []u8) void {
|
||||
const rel_type = @intToEnum(macho.reloc_type_x86_64, self.type);
|
||||
switch (rel_type) {
|
||||
.X86_64_RELOC_BRANCH,
|
||||
.X86_64_RELOC_GOT,
|
||||
.X86_64_RELOC_GOT_LOAD,
|
||||
.X86_64_RELOC_TLV,
|
||||
=> {
|
||||
switch (self.type) {
|
||||
.branch, .got, .tlv, .signed => {
|
||||
const displacement = @intCast(i32, @intCast(i64, target_addr) - @intCast(i64, source_addr) - 4);
|
||||
mem.writeIntLittle(u32, code[self.offset..][0..4], @bitCast(u32, displacement));
|
||||
},
|
||||
.X86_64_RELOC_SIGNED,
|
||||
.X86_64_RELOC_SIGNED_1,
|
||||
.X86_64_RELOC_SIGNED_2,
|
||||
.X86_64_RELOC_SIGNED_4,
|
||||
=> {
|
||||
const correction: u3 = switch (rel_type) {
|
||||
.X86_64_RELOC_SIGNED => 0,
|
||||
.X86_64_RELOC_SIGNED_1 => 1,
|
||||
.X86_64_RELOC_SIGNED_2 => 2,
|
||||
.X86_64_RELOC_SIGNED_4 => 4,
|
||||
else => unreachable,
|
||||
};
|
||||
const displacement = @intCast(i32, target_addr - @intCast(i64, source_addr + correction + 4));
|
||||
mem.writeIntLittle(u32, code[self.offset..][0..4], @bitCast(u32, displacement));
|
||||
},
|
||||
.X86_64_RELOC_UNSIGNED => {
|
||||
.tlv_initializer, .unsigned => {
|
||||
switch (self.length) {
|
||||
2 => {
|
||||
mem.writeIntLittle(u32, code[self.offset..][0..4], @truncate(u32, @bitCast(u64, target_addr)));
|
||||
@ -250,7 +185,7 @@ fn resolveX8664(self: Relocation, source_addr: u64, target_addr: i64, code: []u8
|
||||
else => unreachable,
|
||||
}
|
||||
},
|
||||
.X86_64_RELOC_SUBTRACTOR => unreachable,
|
||||
.got_page, .got_pageoff, .page, .pageoff => unreachable, // Invalid target architecture.
|
||||
}
|
||||
}
|
||||
|
||||
@ -258,3 +193,18 @@ inline fn isArithmeticOp(inst: *const [4]u8) bool {
|
||||
const group_decode = @truncate(u5, inst[3]);
|
||||
return ((group_decode >> 2) == 4);
|
||||
}
|
||||
|
||||
const Relocation = @This();
|
||||
|
||||
const std = @import("std");
|
||||
const aarch64 = @import("../../arch/aarch64/bits.zig");
|
||||
const assert = std.debug.assert;
|
||||
const log = std.log.scoped(.link);
|
||||
const macho = std.macho;
|
||||
const math = std.math;
|
||||
const mem = std.mem;
|
||||
const meta = std.meta;
|
||||
|
||||
const Atom = @import("Atom.zig");
|
||||
const MachO = @import("../MachO.zig");
|
||||
const SymbolWithLoc = MachO.SymbolWithLoc;
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user