mirror of
https://github.com/ziglang/zig.git
synced 2026-02-13 04:48:20 +00:00
zld: resolve target addresses for relocs
This commit is contained in:
parent
b667fe2c62
commit
db44a7803f
@ -191,11 +191,13 @@ pub fn readLoadCommands(self: *Object, reader: anytype) !void {
|
||||
}
|
||||
|
||||
pub fn parseSections(self: *Object) !void {
|
||||
log.warn("parsing sections in {s}", .{self.name.?});
|
||||
const seg = self.load_commands.items[self.segment_cmd_index.?].Segment;
|
||||
|
||||
try self.sections.ensureCapacity(self.allocator, seg.sections.items.len);
|
||||
|
||||
for (seg.sections.items) |sect| {
|
||||
log.warn("parsing section '{s},{s}'", .{ parseName(§.segname), parseName(§.sectname) });
|
||||
// Read sections' code
|
||||
var code = try self.allocator.alloc(u8, sect.size);
|
||||
_ = try self.file.?.preadAll(code, sect.offset);
|
||||
@ -215,7 +217,7 @@ pub fn parseSections(self: *Object) !void {
|
||||
|
||||
break :relocs try reloc.parse(
|
||||
self.allocator,
|
||||
§ion.code,
|
||||
section.code,
|
||||
mem.bytesAsSlice(macho.relocation_info, raw_relocs),
|
||||
);
|
||||
} else null;
|
||||
|
||||
@ -16,6 +16,7 @@ const Allocator = mem.Allocator;
|
||||
const Archive = @import("Archive.zig");
|
||||
const CodeSignature = @import("CodeSignature.zig");
|
||||
const Object = @import("Object.zig");
|
||||
const Relocation = @import("reloc.zig").Relocation;
|
||||
const Symbol = @import("Symbol.zig");
|
||||
const Trie = @import("Trie.zig");
|
||||
|
||||
@ -77,7 +78,7 @@ symtab: std.StringArrayHashMapUnmanaged(Symbol) = .{},
|
||||
strtab: std.ArrayListUnmanaged(u8) = .{},
|
||||
|
||||
threadlocal_offsets: std.ArrayListUnmanaged(u64) = .{},
|
||||
rebases: std.ArrayListUnmanaged(Pointer) = .{},
|
||||
local_rebases: std.ArrayListUnmanaged(Pointer) = .{},
|
||||
stubs: std.StringArrayHashMapUnmanaged(u32) = .{},
|
||||
got_entries: std.StringArrayHashMapUnmanaged(GotEntry) = .{},
|
||||
|
||||
@ -186,7 +187,7 @@ pub fn init(allocator: *Allocator) Zld {
|
||||
|
||||
pub fn deinit(self: *Zld) void {
|
||||
self.threadlocal_offsets.deinit(self.allocator);
|
||||
self.rebases.deinit(self.allocator);
|
||||
self.local_rebases.deinit(self.allocator);
|
||||
|
||||
for (self.stubs.items()) |entry| {
|
||||
self.allocator.free(entry.key);
|
||||
@ -280,9 +281,8 @@ pub fn link(self: *Zld, files: []const []const u8, out_path: []const u8) !void {
|
||||
self.allocateLinkeditSegment();
|
||||
try self.allocateSymbols();
|
||||
try self.allocateStubsAndGotEntries();
|
||||
self.printDebug();
|
||||
// try self.writeStubHelperCommon();
|
||||
// try self.resolveRelocsAndWriteSections();
|
||||
try self.writeStubHelperCommon();
|
||||
try self.resolveRelocsAndWriteSections();
|
||||
// try self.flush();
|
||||
}
|
||||
|
||||
@ -1051,7 +1051,7 @@ fn writeStubHelperCommon(self: *Zld) !void {
|
||||
code[9] = 0xff;
|
||||
code[10] = 0x25;
|
||||
{
|
||||
const dyld_stub_binder = self.nonlazy_imports.get("dyld_stub_binder").?;
|
||||
const dyld_stub_binder = self.got_entries.get("dyld_stub_binder").?;
|
||||
const addr = (got.addr + dyld_stub_binder.index * @sizeOf(u64));
|
||||
const displacement = try math.cast(u32, addr - stub_helper.addr - code_size);
|
||||
mem.writeIntLittle(u32, code[11..], displacement);
|
||||
@ -1095,7 +1095,7 @@ fn writeStubHelperCommon(self: *Zld) !void {
|
||||
code[10] = 0xbf;
|
||||
code[11] = 0xa9;
|
||||
binder_blk_outer: {
|
||||
const dyld_stub_binder = self.nonlazy_imports.get("dyld_stub_binder").?;
|
||||
const dyld_stub_binder = self.got_entries.get("dyld_stub_binder").?;
|
||||
const this_addr = stub_helper.addr + 3 * @sizeOf(u32);
|
||||
const target_addr = (got.addr + dyld_stub_binder.index * @sizeOf(u64));
|
||||
binder_blk: {
|
||||
@ -1113,7 +1113,6 @@ fn writeStubHelperCommon(self: *Zld) !void {
|
||||
const new_this_addr = this_addr + @sizeOf(u32);
|
||||
const displacement = math.divExact(u64, target_addr - new_this_addr, 4) catch |_| break :binder_blk;
|
||||
const literal = math.cast(u18, displacement) catch |_| break :binder_blk;
|
||||
log.debug("2: disp=0x{x}, literal=0x{x}", .{ displacement, literal });
|
||||
// Pad with nop to please division.
|
||||
// nop
|
||||
mem.writeIntLittle(u32, code[12..16], aarch64.Instruction.nop().toU32());
|
||||
@ -1149,8 +1148,8 @@ fn writeStubHelperCommon(self: *Zld) !void {
|
||||
}
|
||||
};
|
||||
|
||||
for (self.lazy_imports.items()) |_, i| {
|
||||
const index = @intCast(u32, i);
|
||||
for (self.stubs.items()) |entry| {
|
||||
const index = entry.value;
|
||||
try self.writeLazySymbolPointer(index);
|
||||
try self.writeStub(index);
|
||||
try self.writeStubInStubHelper(index);
|
||||
@ -1458,6 +1457,7 @@ fn resolveStubsAndGotEntries(self: *Zld) !void {
|
||||
|
||||
if (self.got_entries.contains(sym_name)) continue;
|
||||
|
||||
// TODO clean this up
|
||||
const is_import = self.symtab.get(sym_name).?.tag == .Import;
|
||||
var name = try self.allocator.dupe(u8, sym_name);
|
||||
const index = @intCast(u32, self.got_entries.items().len);
|
||||
@ -1509,8 +1509,7 @@ fn resolveStubsAndGotEntries(self: *Zld) !void {
|
||||
|
||||
fn resolveRelocsAndWriteSections(self: *Zld) !void {
|
||||
for (self.objects.items) |object, object_id| {
|
||||
log.debug("\n\n", .{});
|
||||
log.debug("relocating object {s}", .{object.name});
|
||||
log.warn("relocating object {s}", .{object.name});
|
||||
|
||||
for (object.sections.items) |sect, source_sect_id| {
|
||||
const segname = parseName(§.inner.segname);
|
||||
@ -1529,78 +1528,86 @@ fn resolveRelocsAndWriteSections(self: *Zld) !void {
|
||||
const target_sect_addr = target_sect.addr + target_mapping.offset;
|
||||
const target_sect_off = target_sect.offset + target_mapping.offset;
|
||||
|
||||
for (sect.relocs) |reloc| {
|
||||
const source_addr = target_sect_addr + reloc.offset;
|
||||
if (sect.relocs) |relocs| {
|
||||
for (relocs) |rel| {
|
||||
const source_addr = target_sect_addr + rel.offset;
|
||||
|
||||
var args: Relocation.ResolveArgs = .{
|
||||
.source_addr = source_addr,
|
||||
.target_addr = undefined,
|
||||
};
|
||||
var args: Relocation.ResolveArgs = .{
|
||||
.source_addr = source_addr,
|
||||
.target_addr = undefined,
|
||||
.subtractor = null,
|
||||
};
|
||||
|
||||
if (reloc.cast(Relocation.Unsigned)) |unsigned| {
|
||||
// TODO resolve target addr
|
||||
switch (rel.@"type") {
|
||||
.unsigned => {
|
||||
args.target_addr = try self.relocTargetAddr(@intCast(u16, object_id), rel.target);
|
||||
|
||||
if (unsigned.subtractor) |subtractor| {
|
||||
args.subtractor = undefined; // TODO resolve
|
||||
const unsigned = rel.cast(Relocation.Unsigned) orelse unreachable;
|
||||
if (unsigned.subtractor) |subtractor| {
|
||||
args.subtractor = try self.relocTargetAddr(@intCast(u16, object_id), subtractor);
|
||||
}
|
||||
|
||||
rebases: {
|
||||
var hit: bool = false;
|
||||
if (target_mapping.target_seg_id == self.data_segment_cmd_index.?) {
|
||||
if (self.data_section_index) |index| {
|
||||
if (index == target_mapping.target_sect_id) hit = true;
|
||||
}
|
||||
}
|
||||
if (target_mapping.target_seg_id == self.data_const_segment_cmd_index.?) {
|
||||
if (self.data_const_section_index) |index| {
|
||||
if (index == target_mapping.target_sect_id) hit = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!hit) break :rebases;
|
||||
|
||||
try self.local_rebases.append(self.allocator, .{
|
||||
.offset = source_addr - target_seg.inner.vmaddr,
|
||||
.segment_id = target_mapping.target_seg_id,
|
||||
});
|
||||
}
|
||||
// TLV is handled via a separate offset mechanism.
|
||||
// Calculate the offset to the initializer.
|
||||
if (target_sect.flags == macho.S_THREAD_LOCAL_VARIABLES) tlv: {
|
||||
const sym = object.symtab.items[rel.target.symbol];
|
||||
const sym_name = object.getString(sym.inner.n_strx);
|
||||
|
||||
// TODO we don't want to save offset to tlv_bootstrap
|
||||
if (mem.eql(u8, sym_name, "__tlv_boostrap")) break :tlv;
|
||||
|
||||
const base_addr = blk: {
|
||||
if (self.tlv_data_section_index) |index| {
|
||||
const tlv_data = target_seg.sections.items[index];
|
||||
break :blk tlv_data.addr;
|
||||
} else {
|
||||
const tlv_bss = target_seg.sections.items[self.tlv_bss_section_index.?];
|
||||
break :blk tlv_bss.addr;
|
||||
}
|
||||
};
|
||||
// Since we require TLV data to always preceed TLV bss section, we calculate
|
||||
// offsets wrt to the former if it is defined; otherwise, wrt to the latter.
|
||||
try self.threadlocal_offsets.append(self.allocator, args.target_addr - base_addr);
|
||||
}
|
||||
},
|
||||
.got_page, .got_page_off => {
|
||||
const dc_seg = self.load_commands.items[self.data_const_segment_cmd_index.?].Segment;
|
||||
const got = dc_seg.sections.items[self.got_section_index.?];
|
||||
const sym = object.symtab.items[rel.target.symbol];
|
||||
const sym_name = object.getString(sym.inner.n_strx);
|
||||
const entry = self.got_entries.get(sym_name) orelse unreachable;
|
||||
args.target_addr = got.addr + entry.index * @sizeOf(u64);
|
||||
},
|
||||
else => {
|
||||
args.target_addr = try self.relocTargetAddr(@intCast(u16, object_id), rel.target);
|
||||
},
|
||||
}
|
||||
|
||||
rebases: {
|
||||
var hit: bool = false;
|
||||
if (target_mapping.target_seg_id == self.data_segment_cmd_index.?) {
|
||||
if (self.data_section_index) |index| {
|
||||
if (index == target_mapping.target_sect_id) hit = true;
|
||||
}
|
||||
}
|
||||
if (target_mapping.target_seg_id == self.data_const_segment_cmd_index.?) {
|
||||
if (self.data_const_section_index) |index| {
|
||||
if (index == target_mapping.target_sect_id) hit = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!hit) break :rebases;
|
||||
|
||||
try self.local_rebases.append(self.allocator, .{
|
||||
.offset = source_addr - target_seg.inner.vmaddr,
|
||||
.segment_id = target_mapping.target_seg_id,
|
||||
});
|
||||
}
|
||||
// TLV is handled via a separate offset mechanism.
|
||||
// Calculate the offset to the initializer.
|
||||
if (target_sect.flags == macho.S_THREAD_LOCAL_VARIABLES) tlv: {
|
||||
const sym = object.symtab.items[reloc.target.symbol];
|
||||
const sym_name = object.getString(sym.inner.n_strx);
|
||||
|
||||
// TODO we don't want to save offset to tlv_bootstrap
|
||||
if (mem.eql(u8, sym_name, "__tlv_boostrap")) break :tlv;
|
||||
|
||||
const base_addr = blk: {
|
||||
if (self.tlv_data_section_index) |index| {
|
||||
const tlv_data = target_seg.sections.items[index];
|
||||
break :blk tlv_data.addr;
|
||||
} else {
|
||||
const tlv_bss = target_seg.sections.items[self.tlv_bss_section_index.?];
|
||||
break :blk tlv_bss.addr;
|
||||
}
|
||||
};
|
||||
// Since we require TLV data to always preceed TLV bss section, we calculate
|
||||
// offsets wrt to the former if it is defined; otherwise, wrt to the latter.
|
||||
try self.threadlocal_offsets.append(self.allocator, target_addr - base_addr);
|
||||
}
|
||||
} else if (reloc.cast(Relocation.GotPageOff)) |page_off| {
|
||||
// TODO here we need to work out the indirection to GOT.
|
||||
} else {
|
||||
// TODO resolve target addr.
|
||||
try rel.resolve(args);
|
||||
}
|
||||
|
||||
log.debug("{s}", .{reloc.@"type"});
|
||||
log.debug(" | offset 0x{x}", .{reloc.offset});
|
||||
log.debug(" | source address 0x{x}", .{args.source_addr});
|
||||
log.debug(" | target address 0x{x}", .{args.target_addr});
|
||||
|
||||
try reloc.resolve(args);
|
||||
}
|
||||
|
||||
log.debug("writing contents of '{s},{s}' section from '{s}' from 0x{x} to 0x{x}", .{
|
||||
log.warn("writing contents of '{s},{s}' section from '{s}' from 0x{x} to 0x{x}", .{
|
||||
segname,
|
||||
sectname,
|
||||
object.name,
|
||||
@ -1612,7 +1619,7 @@ fn resolveRelocsAndWriteSections(self: *Zld) !void {
|
||||
target_sect.flags == macho.S_THREAD_LOCAL_ZEROFILL or
|
||||
target_sect.flags == macho.S_THREAD_LOCAL_VARIABLES)
|
||||
{
|
||||
log.debug("zeroing out '{s},{s}' from 0x{x} to 0x{x}", .{
|
||||
log.warn("zeroing out '{s},{s}' from 0x{x} to 0x{x}", .{
|
||||
parseName(&target_sect.segname),
|
||||
parseName(&target_sect.sectname),
|
||||
target_sect_off,
|
||||
@ -1630,88 +1637,60 @@ fn resolveRelocsAndWriteSections(self: *Zld) !void {
|
||||
}
|
||||
}
|
||||
|
||||
fn relocTargetAddr(self: *Zld, object_id: u16, rel: macho.relocation_info) !u64 {
|
||||
fn relocTargetAddr(self: *Zld, object_id: u16, target: Relocation.Target) !u64 {
|
||||
const object = self.objects.items[object_id];
|
||||
const seg = object.load_commands.items[object.segment_cmd_index.?].Segment;
|
||||
|
||||
const is_got: bool = is_got: {
|
||||
switch (self.arch.?) {
|
||||
.x86_64 => {
|
||||
const rel_type = @intToEnum(macho.reloc_type_x86_64, rel.r_type);
|
||||
break :is_got = switch (rel_type) {
|
||||
.X86_64_RELOC_GOT, .X86_64_RELOC_GOT_LOAD => true,
|
||||
else => false,
|
||||
};
|
||||
},
|
||||
.aarch64 => {
|
||||
const rel_type = @intToEnum(macho.reloc_type_aarch64, rel.r_type);
|
||||
break :is_got = switch (rel_type) {
|
||||
.ARM64_RELOC_GOT_LOAD_PAGE21,
|
||||
.ARM64_RELOC_GOT_LOAD_PAGEOFF12,
|
||||
.ARM64_RELOC_POINTER_TO_GOT,
|
||||
=> true,
|
||||
else => false,
|
||||
};
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
const target_addr = blk: {
|
||||
if (rel.r_extern == 1) {
|
||||
const sym = object.symtab.items[rel.r_symbolnum];
|
||||
if (sym.isSect()) {
|
||||
// Relocate using section offsets only.
|
||||
switch (target) {
|
||||
.symbol => |sym_id| {
|
||||
const sym = object.symtab.items[sym_id];
|
||||
const sym_name = object.getString(sym.inner.n_strx);
|
||||
|
||||
switch (sym.tag) {
|
||||
.Stab => unreachable, // TODO is this even allowed to happen?
|
||||
.Local, .Weak, .Strong => {
|
||||
// Relocate using section offsets only.
|
||||
const target_mapping = self.mappings.get(.{
|
||||
.object_id = object_id,
|
||||
.source_sect_id = sym.inner.n_sect - 1,
|
||||
}) orelse unreachable;
|
||||
const source_sect = object.sections.items[target_mapping.source_sect_id];
|
||||
const target_seg = self.load_commands.items[target_mapping.target_seg_id].Segment;
|
||||
const target_sect = target_seg.sections.items[target_mapping.target_sect_id];
|
||||
const target_sect_addr = target_sect.addr + target_mapping.offset;
|
||||
log.warn(" | symbol local to object", .{});
|
||||
break :blk target_sect_addr + sym.inner.n_value - source_sect.inner.addr;
|
||||
},
|
||||
else => {
|
||||
if (self.stubs.get(sym_name)) |index| {
|
||||
log.warn(" | symbol stub {s}", .{sym_name});
|
||||
const segment = self.load_commands.items[self.text_segment_cmd_index.?].Segment;
|
||||
const stubs = segment.sections.items[self.stubs_section_index.?];
|
||||
break :blk stubs.addr + index * stubs.reserved2;
|
||||
} else if (mem.eql(u8, sym_name, "__tlv_bootstrap")) {
|
||||
const segment = self.load_commands.items[self.data_segment_cmd_index.?].Segment;
|
||||
const tlv = segment.sections.items[self.tlv_section_index.?];
|
||||
break :blk tlv.addr;
|
||||
} else {
|
||||
const global = self.symtab.get(sym_name) orelse {
|
||||
log.err("failed to resolve symbol '{s}' as a relocation target", .{sym_name});
|
||||
return error.FailedToResolveRelocationTarget;
|
||||
};
|
||||
break :blk global.inner.n_value;
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
.section => |sect_id| {
|
||||
const target_mapping = self.mappings.get(.{
|
||||
.object_id = object_id,
|
||||
.source_sect_id = sym.n_sect - 1,
|
||||
.source_sect_id = sect_id,
|
||||
}) orelse unreachable;
|
||||
const source_sect = seg.sections.items[target_mapping.source_sect_id];
|
||||
const target_seg = self.load_commands.items[target_mapping.target_seg_id].Segment;
|
||||
const target_sect = target_seg.sections.items[target_mapping.target_sect_id];
|
||||
const target_sect_addr = target_sect.addr + target_mapping.offset;
|
||||
log.debug(" | symbol local to object", .{});
|
||||
break :blk target_sect_addr + sym.n_value - source_sect.addr;
|
||||
} else if (sym.isUndf()) {
|
||||
// Relocate to either the global symbol, or an import from
|
||||
// shared library.
|
||||
const sym_name = object.getString(sym.n_strx);
|
||||
if (self.globals.get(sym_name)) |glob| {
|
||||
break :blk glob.inner.n_value;
|
||||
} else if (self.externs.getEntry(sym_name)) |ext| {
|
||||
const segment = self.load_commands.items[self.text_segment_cmd_index.?].Segment;
|
||||
const stubs = segment.sections.items[self.stubs_section_index.?];
|
||||
break :blk stubs.addr + ext.index * stubs.reserved2;
|
||||
} else if (self.nonlazy_imports.get(sym_name)) |ext| {
|
||||
const segment = self.load_commands.items[self.data_const_segment_cmd_index.?].Segment;
|
||||
const got = segment.sections.items[self.got_section_index.?];
|
||||
break :blk got.addr + ext.index * @sizeOf(u64);
|
||||
} else if (mem.eql(u8, sym_name, "__tlv_bootstrap")) {
|
||||
const segment = self.load_commands.items[self.data_segment_cmd_index.?].Segment;
|
||||
const tlv = segment.sections.items[self.tlv_section_index.?];
|
||||
break :blk tlv.addr;
|
||||
} else {
|
||||
log.err("failed to resolve symbol '{s}' as a relocation target", .{sym_name});
|
||||
return error.FailedToResolveRelocationTarget;
|
||||
}
|
||||
} else {
|
||||
log.err("unexpected symbol {}, {s}", .{ sym, object.getString(sym.n_strx) });
|
||||
return error.UnexpectedSymbolWhenRelocating;
|
||||
}
|
||||
} else {
|
||||
// TODO I think we need to reparse the relocation_info as scattered_relocation_info
|
||||
// here to get the actual section plus offset into that section of the relocated
|
||||
// symbol. Unless the fine-grained location is encoded within the cell in the code
|
||||
// buffer?
|
||||
const target_mapping = self.mappings.get(.{
|
||||
.object_id = object_id,
|
||||
.source_sect_id = @intCast(u16, rel.r_symbolnum - 1),
|
||||
}) orelse unreachable;
|
||||
const target_seg = self.load_commands.items[target_mapping.target_seg_id].Segment;
|
||||
const target_sect = target_seg.sections.items[target_mapping.target_sect_id];
|
||||
break :blk target_sect.addr + target_mapping.offset;
|
||||
break :blk target_sect.addr + target_mapping.offset;
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
return target_addr;
|
||||
}
|
||||
|
||||
|
||||
@ -3,6 +3,7 @@ const aarch64 = @import("../../codegen/aarch64.zig");
|
||||
const assert = std.debug.assert;
|
||||
const log = std.log.scoped(.reloc);
|
||||
const macho = std.macho;
|
||||
const math = std.math;
|
||||
const mem = std.mem;
|
||||
const meta = std.meta;
|
||||
|
||||
@ -10,7 +11,7 @@ const Allocator = mem.Allocator;
|
||||
|
||||
pub const Relocation = struct {
|
||||
@"type": Type,
|
||||
code: *[]u8,
|
||||
code: []u8,
|
||||
offset: u32,
|
||||
target: Target,
|
||||
|
||||
@ -24,20 +25,26 @@ pub const Relocation = struct {
|
||||
pub const ResolveArgs = struct {
|
||||
source_addr: u64,
|
||||
target_addr: u64,
|
||||
subtractor: i64 = undefined,
|
||||
subtractor: ?u64,
|
||||
};
|
||||
|
||||
pub fn resolve(base: *Relocation, args: ResolveArgs) !void {
|
||||
switch (base.@"type") {
|
||||
.branch => try base.cast(Branch).?.resolve(args.source_addr, args.target_addr),
|
||||
.unsigned => try base.cast(Unsigned).?.resolve(args.target_addr, args.subtractor),
|
||||
.page => try base.cast(Page).?.resolve(args.source_addr, args.target_addr),
|
||||
.page_off => try base.cast(PageOff).?.resolve(args.target_addr),
|
||||
.got_page => try base.cast(GotPage).?.resolve(args.source_addr, args.target_addr),
|
||||
.got_page_off => try base.cast(GotPageOff).?.resolve(args.target_addr),
|
||||
.tlvp_page => try base.cast(TlvpPage).?.resolve(args.source_addr, args.target_addr),
|
||||
.tlvp_page_off => try base.cast(TlvpPageOff).?.resolve(args.target_addr),
|
||||
}
|
||||
log.warn("{s}", .{base.@"type"});
|
||||
log.warn(" | offset 0x{x}", .{base.offset});
|
||||
log.warn(" | source address 0x{x}", .{args.source_addr});
|
||||
log.warn(" | target address 0x{x}", .{args.target_addr});
|
||||
log.warn(" | subtractor address 0x{x}", .{args.subtractor});
|
||||
|
||||
return switch (base.@"type") {
|
||||
.branch => @fieldParentPtr(Branch, "base", base).resolve(args.source_addr, args.target_addr),
|
||||
.unsigned => @fieldParentPtr(Unsigned, "base", base).resolve(args.target_addr, args.subtractor),
|
||||
.page => @fieldParentPtr(Page, "base", base).resolve(args.source_addr, args.target_addr),
|
||||
.page_off => @fieldParentPtr(PageOff, "base", base).resolve(args.target_addr),
|
||||
.got_page => @fieldParentPtr(GotPage, "base", base).resolve(args.source_addr, args.target_addr),
|
||||
.got_page_off => @fieldParentPtr(GotPageOff, "base", base).resolve(args.target_addr),
|
||||
.tlvp_page => @fieldParentPtr(TlvpPage, "base", base).resolve(args.source_addr, args.target_addr),
|
||||
.tlvp_page_off => @fieldParentPtr(TlvpPageOff, "base", base).resolve(args.target_addr),
|
||||
};
|
||||
}
|
||||
|
||||
pub const Type = enum {
|
||||
@ -73,9 +80,12 @@ pub const Relocation = struct {
|
||||
|
||||
pub fn resolve(branch: Branch, source_addr: u64, target_addr: u64) !void {
|
||||
const displacement = try math.cast(i28, @intCast(i64, target_addr) - @intCast(i64, source_addr));
|
||||
|
||||
log.warn(" | displacement 0x{x}", .{displacement});
|
||||
|
||||
var inst = branch.inst;
|
||||
inst.AddSubtractImmediate.imm26 = @truncate(u26, @bitCast(u28, displacement) >> 2);
|
||||
mem.writeIntLittle(u32, branch.base.code.*[branch.base.offset..], inst.toU32());
|
||||
inst.UnconditionalBranchImmediate.imm26 = @truncate(u26, @bitCast(u28, displacement) >> 2);
|
||||
mem.writeIntLittle(u32, branch.base.code[0..4], inst.toU32());
|
||||
}
|
||||
};
|
||||
|
||||
@ -92,22 +102,25 @@ pub const Relocation = struct {
|
||||
|
||||
pub const base_type: Relocation.Type = .unsigned;
|
||||
|
||||
pub fn resolve(unsigned: Unsigned, target_addr: u64, subtractor: i64) !void {
|
||||
const result = @intCast(i64, target_addr) - subtractor + unsigned.addend;
|
||||
pub fn resolve(unsigned: Unsigned, target_addr: u64, subtractor: ?u64) !void {
|
||||
const result = if (subtractor) |sub|
|
||||
@intCast(i64, target_addr) - @intCast(i64, sub) + unsigned.addend
|
||||
else
|
||||
@intCast(i64, target_addr) + unsigned.addend;
|
||||
|
||||
log.debug(" | calculated addend 0x{x}", .{unsigned.addend});
|
||||
log.debug(" | calculated unsigned value 0x{x}", .{result});
|
||||
log.warn(" | calculated addend 0x{x}", .{unsigned.addend});
|
||||
log.warn(" | calculated unsigned value 0x{x}", .{result});
|
||||
|
||||
if (unsigned.is_64bit) {
|
||||
mem.writeIntLittle(
|
||||
u64,
|
||||
unsigned.base.code.*[unsigned.base.offset..],
|
||||
unsigned.base.code[0..8],
|
||||
@bitCast(u64, result),
|
||||
);
|
||||
} else {
|
||||
mem.writeIntLittle(
|
||||
u32,
|
||||
unsigned.base.code.*[unsigned.base.offset..],
|
||||
unsigned.base.code[0..4],
|
||||
@truncate(u32, @bitCast(u64, result)),
|
||||
);
|
||||
}
|
||||
@ -128,13 +141,14 @@ pub const Relocation = struct {
|
||||
const target_page = @intCast(i32, ta >> 12);
|
||||
const pages = @bitCast(u21, @intCast(i21, target_page - source_page));
|
||||
|
||||
log.debug(" | moving by {} pages", .{pages});
|
||||
log.warn(" | calculated addend 0x{x}", .{page.addend});
|
||||
log.warn(" | moving by {} pages", .{pages});
|
||||
|
||||
var inst = page.inst;
|
||||
inst.PCRelativeAddress.immhi = @truncate(u19, pages >> 2);
|
||||
inst.PCRelativeAddress.immlo = @truncate(u2, pages);
|
||||
|
||||
mem.writeIntLittle(u32, page.base.code.*[page.base.offset..], inst.toU32());
|
||||
mem.writeIntLittle(u32, page.base.code[0..4], inst.toU32());
|
||||
}
|
||||
};
|
||||
|
||||
@ -155,8 +169,8 @@ pub const Relocation = struct {
|
||||
const ta = if (page_off.addend) |a| target_addr + a else target_addr;
|
||||
const narrowed = @truncate(u12, ta);
|
||||
|
||||
log.debug(" | narrowed address within the page 0x{x}", .{narrowed});
|
||||
log.debug(" | {s} opcode", .{page_off.op_kind});
|
||||
log.warn(" | narrowed address within the page 0x{x}", .{narrowed});
|
||||
log.warn(" | {s} opcode", .{page_off.op_kind});
|
||||
|
||||
var inst = page_off.inst;
|
||||
if (page_off.op_kind == .arithmetic) {
|
||||
@ -178,7 +192,7 @@ pub const Relocation = struct {
|
||||
inst.LoadStoreRegister.offset = offset;
|
||||
}
|
||||
|
||||
mem.writeIntLittle(u32, page_off.base.code.*[page_off.base.offset..], inst.toU32());
|
||||
mem.writeIntLittle(u32, page_off.base.code[0..4], inst.toU32());
|
||||
}
|
||||
};
|
||||
|
||||
@ -194,13 +208,13 @@ pub const Relocation = struct {
|
||||
const target_page = @intCast(i32, target_addr >> 12);
|
||||
const pages = @bitCast(u21, @intCast(i21, target_page - source_page));
|
||||
|
||||
log.debug(" | moving by {} pages", .{pages});
|
||||
log.warn(" | moving by {} pages", .{pages});
|
||||
|
||||
var inst = page.inst;
|
||||
inst.PCRelativeAddress.immhi = @truncate(u19, pages >> 2);
|
||||
inst.PCRelativeAddress.immlo = @truncate(u2, pages);
|
||||
|
||||
mem.writeIntLittle(u32, page.base.code.*[page.base.offset..], inst.toU32());
|
||||
mem.writeIntLittle(u32, page.base.code[0..4], inst.toU32());
|
||||
}
|
||||
};
|
||||
|
||||
@ -214,13 +228,13 @@ pub const Relocation = struct {
|
||||
pub fn resolve(page_off: GotPageOff, target_addr: u64) !void {
|
||||
const narrowed = @truncate(u12, target_addr);
|
||||
|
||||
log.debug(" | narrowed address within the page 0x{x}", .{narrowed});
|
||||
log.warn(" | narrowed address within the page 0x{x}", .{narrowed});
|
||||
|
||||
var inst = page_off.inst;
|
||||
const offset = try math.divExact(u12, narrowed, 8);
|
||||
inst.LoadStoreRegister.offset = offset;
|
||||
|
||||
mem.writeIntLittle(u32, page_off.base.code.*[page_off.base.offset..], inst.toU32());
|
||||
mem.writeIntLittle(u32, page_off.base.code[0..4], inst.toU32());
|
||||
}
|
||||
};
|
||||
|
||||
@ -236,13 +250,13 @@ pub const Relocation = struct {
|
||||
const target_page = @intCast(i32, target_addr >> 12);
|
||||
const pages = @bitCast(u21, @intCast(i21, target_page - source_page));
|
||||
|
||||
log.debug(" | moving by {} pages", .{pages});
|
||||
log.warn(" | moving by {} pages", .{pages});
|
||||
|
||||
var inst = page.inst;
|
||||
inst.PCRelativeAddress.immhi = @truncate(u19, pages >> 2);
|
||||
inst.PCRelativeAddress.immlo = @truncate(u2, pages);
|
||||
|
||||
mem.writeIntLittle(u32, page.base.code.*[page.base.offset..], inst.toU32());
|
||||
mem.writeIntLittle(u32, page.base.code[0..4], inst.toU32());
|
||||
}
|
||||
};
|
||||
|
||||
@ -258,17 +272,17 @@ pub const Relocation = struct {
|
||||
pub fn resolve(page_off: TlvpPageOff, target_addr: u64) !void {
|
||||
const narrowed = @truncate(u12, target_addr);
|
||||
|
||||
log.debug(" | narrowed address within the page 0x{x}", .{narrowed});
|
||||
log.warn(" | narrowed address within the page 0x{x}", .{narrowed});
|
||||
|
||||
var inst = page_off.inst;
|
||||
inst.AddSubtractImmediate.imm12 = narrowed;
|
||||
|
||||
mem.writeIntLittle(u32, page_off.base.code.*[page_off.base.offset..], inst.toU32());
|
||||
mem.writeIntLittle(u32, page_off.base.code[0..4], inst.toU32());
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
pub fn parse(allocator: *Allocator, code: *[]u8, relocs: []const macho.relocation_info) ![]*Relocation {
|
||||
pub fn parse(allocator: *Allocator, code: []u8, relocs: []const macho.relocation_info) ![]*Relocation {
|
||||
var it = RelocIterator{
|
||||
.buffer = relocs,
|
||||
};
|
||||
@ -280,8 +294,9 @@ pub fn parse(allocator: *Allocator, code: *[]u8, relocs: []const macho.relocatio
|
||||
.parsed = std.ArrayList(*Relocation).init(allocator),
|
||||
};
|
||||
defer parser.deinit();
|
||||
try parser.parse();
|
||||
|
||||
return parser.parse();
|
||||
return parser.parsed.toOwnedSlice();
|
||||
}
|
||||
|
||||
const RelocIterator = struct {
|
||||
@ -292,12 +307,12 @@ const RelocIterator = struct {
|
||||
self.index += 1;
|
||||
if (self.index < self.buffer.len) {
|
||||
const reloc = self.buffer[@intCast(u64, self.index)];
|
||||
log.debug("{s}", .{@intToEnum(macho.reloc_type_arm64, reloc.r_type)});
|
||||
log.debug(" | offset = {}", .{reloc.r_address});
|
||||
log.debug(" | PC = {}", .{reloc.r_pcrel == 1});
|
||||
log.debug(" | length = {}", .{reloc.r_length});
|
||||
log.debug(" | symbolnum = {}", .{reloc.r_symbolnum});
|
||||
log.debug(" | extern = {}", .{reloc.r_extern == 1});
|
||||
log.warn("{s}", .{@intToEnum(macho.reloc_type_arm64, reloc.r_type)});
|
||||
log.warn(" | offset = {}", .{reloc.r_address});
|
||||
log.warn(" | PC = {}", .{reloc.r_pcrel == 1});
|
||||
log.warn(" | length = {}", .{reloc.r_length});
|
||||
log.warn(" | symbolnum = {}", .{reloc.r_symbolnum});
|
||||
log.warn(" | extern = {}", .{reloc.r_extern == 1});
|
||||
return reloc;
|
||||
}
|
||||
return null;
|
||||
@ -316,7 +331,7 @@ const RelocIterator = struct {
|
||||
const Parser = struct {
|
||||
allocator: *Allocator,
|
||||
it: *RelocIterator,
|
||||
code: *[]u8,
|
||||
code: []u8,
|
||||
parsed: std.ArrayList(*Relocation),
|
||||
addend: ?u32 = null,
|
||||
subtractor: ?Relocation.Target = null,
|
||||
@ -325,7 +340,7 @@ const Parser = struct {
|
||||
parser.parsed.deinit();
|
||||
}
|
||||
|
||||
fn parse(parser: *Parser) ![]*Relocation {
|
||||
fn parse(parser: *Parser) !void {
|
||||
while (parser.it.next()) |reloc| {
|
||||
switch (@intToEnum(macho.reloc_type_arm64, reloc.r_type)) {
|
||||
.ARM64_RELOC_BRANCH26 => {
|
||||
@ -360,8 +375,6 @@ const Parser = struct {
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return parser.parsed.toOwnedSlice();
|
||||
}
|
||||
|
||||
fn parseAddend(parser: *Parser, reloc: macho.relocation_info) !void {
|
||||
@ -395,7 +408,7 @@ const Parser = struct {
|
||||
assert(reloc.r_length == 2);
|
||||
|
||||
const offset = @intCast(u32, reloc.r_address);
|
||||
const inst = parser.code.*[offset..][0..4];
|
||||
const inst = parser.code[offset..][0..4];
|
||||
const parsed_inst = aarch64.Instruction{ .UnconditionalBranchImmediate = mem.bytesToValue(
|
||||
meta.TagPayload(
|
||||
aarch64.Instruction,
|
||||
@ -412,14 +425,14 @@ const Parser = struct {
|
||||
branch.* = .{
|
||||
.base = .{
|
||||
.@"type" = .branch,
|
||||
.code = parser.code,
|
||||
.code = inst,
|
||||
.offset = @intCast(u32, reloc.r_address),
|
||||
.target = target,
|
||||
},
|
||||
.inst = parsed_inst,
|
||||
};
|
||||
|
||||
log.debug(" | emitting {}", .{branch});
|
||||
log.warn(" | emitting {}", .{branch});
|
||||
try parser.parsed.append(&branch.base);
|
||||
}
|
||||
|
||||
@ -431,7 +444,7 @@ const Parser = struct {
|
||||
const target = Relocation.Target.from_reloc(reloc);
|
||||
|
||||
const offset = @intCast(u32, reloc.r_address);
|
||||
const inst = parser.code.*[offset..][0..4];
|
||||
const inst = parser.code[offset..][0..4];
|
||||
const parsed_inst = aarch64.Instruction{ .PCRelativeAddress = mem.bytesToValue(meta.TagPayload(
|
||||
aarch64.Instruction,
|
||||
aarch64.Instruction.PCRelativeAddress,
|
||||
@ -450,7 +463,7 @@ const Parser = struct {
|
||||
page.* = .{
|
||||
.base = .{
|
||||
.@"type" = .page,
|
||||
.code = parser.code,
|
||||
.code = inst,
|
||||
.offset = offset,
|
||||
.target = target,
|
||||
},
|
||||
@ -458,7 +471,7 @@ const Parser = struct {
|
||||
.inst = parsed_inst,
|
||||
};
|
||||
|
||||
log.debug(" | emitting {}", .{page});
|
||||
log.warn(" | emitting {}", .{page});
|
||||
|
||||
break :ptr &page.base;
|
||||
},
|
||||
@ -469,14 +482,14 @@ const Parser = struct {
|
||||
page.* = .{
|
||||
.base = .{
|
||||
.@"type" = .got_page,
|
||||
.code = parser.code,
|
||||
.code = inst,
|
||||
.offset = offset,
|
||||
.target = target,
|
||||
},
|
||||
.inst = parsed_inst,
|
||||
};
|
||||
|
||||
log.debug(" | emitting {}", .{page});
|
||||
log.warn(" | emitting {}", .{page});
|
||||
|
||||
break :ptr &page.base;
|
||||
},
|
||||
@ -487,14 +500,14 @@ const Parser = struct {
|
||||
page.* = .{
|
||||
.base = .{
|
||||
.@"type" = .tlvp_page,
|
||||
.code = parser.code,
|
||||
.code = inst,
|
||||
.offset = offset,
|
||||
.target = target,
|
||||
},
|
||||
.inst = parsed_inst,
|
||||
};
|
||||
|
||||
log.debug(" | emitting {}", .{page});
|
||||
log.warn(" | emitting {}", .{page});
|
||||
|
||||
break :ptr &page.base;
|
||||
},
|
||||
@ -517,7 +530,7 @@ const Parser = struct {
|
||||
assert(reloc.r_length == 2);
|
||||
|
||||
const offset = @intCast(u32, reloc.r_address);
|
||||
const inst = parser.code.*[offset..][0..4];
|
||||
const inst = parser.code[offset..][0..4];
|
||||
|
||||
var op_kind: Relocation.PageOff.OpKind = undefined;
|
||||
var parsed_inst: aarch64.Instruction = undefined;
|
||||
@ -542,7 +555,7 @@ const Parser = struct {
|
||||
page_off.* = .{
|
||||
.base = .{
|
||||
.@"type" = .page_off,
|
||||
.code = parser.code,
|
||||
.code = inst,
|
||||
.offset = offset,
|
||||
.target = target,
|
||||
},
|
||||
@ -551,7 +564,7 @@ const Parser = struct {
|
||||
.addend = parser.addend,
|
||||
};
|
||||
|
||||
log.debug(" | emitting {}", .{page_off});
|
||||
log.warn(" | emitting {}", .{page_off});
|
||||
try parser.parsed.append(&page_off.base);
|
||||
}
|
||||
|
||||
@ -562,7 +575,7 @@ const Parser = struct {
|
||||
assert(reloc.r_length == 2);
|
||||
|
||||
const offset = @intCast(u32, reloc.r_address);
|
||||
const inst = parser.code.*[offset..][0..4];
|
||||
const inst = parser.code[offset..][0..4];
|
||||
assert(!isArithmeticOp(inst));
|
||||
|
||||
const parsed_inst = mem.bytesToValue(meta.TagPayload(
|
||||
@ -579,7 +592,7 @@ const Parser = struct {
|
||||
page_off.* = .{
|
||||
.base = .{
|
||||
.@"type" = .got_page_off,
|
||||
.code = parser.code,
|
||||
.code = inst,
|
||||
.offset = offset,
|
||||
.target = target,
|
||||
},
|
||||
@ -588,7 +601,7 @@ const Parser = struct {
|
||||
},
|
||||
};
|
||||
|
||||
log.debug(" | emitting {}", .{page_off});
|
||||
log.warn(" | emitting {}", .{page_off});
|
||||
try parser.parsed.append(&page_off.base);
|
||||
}
|
||||
|
||||
@ -605,7 +618,7 @@ const Parser = struct {
|
||||
};
|
||||
|
||||
const offset = @intCast(u32, reloc.r_address);
|
||||
const inst = parser.code.*[offset..][0..4];
|
||||
const inst = parser.code[offset..][0..4];
|
||||
const parsed: RegInfo = parsed: {
|
||||
if (isArithmeticOp(inst)) {
|
||||
const parsed_inst = mem.bytesAsValue(meta.TagPayload(
|
||||
@ -638,7 +651,7 @@ const Parser = struct {
|
||||
page_off.* = .{
|
||||
.base = .{
|
||||
.@"type" = .tlvp_page_off,
|
||||
.code = parser.code,
|
||||
.code = inst,
|
||||
.offset = @intCast(u32, reloc.r_address),
|
||||
.target = target,
|
||||
},
|
||||
@ -655,7 +668,7 @@ const Parser = struct {
|
||||
},
|
||||
};
|
||||
|
||||
log.debug(" | emitting {}", .{page_off});
|
||||
log.warn(" | emitting {}", .{page_off});
|
||||
try parser.parsed.append(&page_off.base);
|
||||
}
|
||||
|
||||
@ -700,14 +713,14 @@ const Parser = struct {
|
||||
};
|
||||
const offset = @intCast(u32, reloc.r_address);
|
||||
const addend: i64 = if (is_64bit)
|
||||
mem.readIntLittle(i64, parser.code.*[offset..][0..8])
|
||||
mem.readIntLittle(i64, parser.code[offset..][0..8])
|
||||
else
|
||||
mem.readIntLittle(i32, parser.code.*[offset..][0..4]);
|
||||
mem.readIntLittle(i32, parser.code[offset..][0..4]);
|
||||
|
||||
unsigned.* = .{
|
||||
.base = .{
|
||||
.@"type" = .unsigned,
|
||||
.code = parser.code,
|
||||
.code = if (is_64bit) parser.code[offset..][0..8] else parser.code[offset..][0..4],
|
||||
.offset = offset,
|
||||
.target = target,
|
||||
},
|
||||
@ -716,7 +729,7 @@ const Parser = struct {
|
||||
.addend = addend,
|
||||
};
|
||||
|
||||
log.debug(" | emitting {}", .{unsigned});
|
||||
log.warn(" | emitting {}", .{unsigned});
|
||||
try parser.parsed.append(&unsigned.base);
|
||||
}
|
||||
};
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user