zld: fix alloc alignment and resolve relocs

This commit is contained in:
Jakub Konka 2021-07-08 14:37:33 +02:00
parent 961b463fad
commit 12187586d1
3 changed files with 173 additions and 102 deletions

View File

@ -535,8 +535,11 @@ pub fn parseTextBlocks(self: *Object, zld: *Zld) !void {
// How does it tie with incremental space allocs?
const tseg = &zld.load_commands.items[match.seg].Segment;
const tsect = &tseg.sections.items[match.sect];
tsect.size += block.size;
tsect.@"align" = math.max(tsect.@"align", block.alignment);
const new_alignment = math.max(tsect.@"align", block.alignment);
const new_alignment_pow_2 = try math.powi(u32, 2, new_alignment);
const new_size = mem.alignForwardGeneric(u64, tsect.size + block.size, new_alignment_pow_2);
tsect.size = new_size;
tsect.@"align" = new_alignment;
if (zld.blocks.getPtr(match)) |last| {
last.*.next = block;
@ -621,8 +624,11 @@ pub fn parseTextBlocks(self: *Object, zld: *Zld) !void {
// How does it tie with incremental space allocs?
const tseg = &zld.load_commands.items[match.seg].Segment;
const tsect = &tseg.sections.items[match.sect];
tsect.size += block.size;
tsect.@"align" = math.max(tsect.@"align", block.alignment);
const new_alignment = math.max(tsect.@"align", block.alignment);
const new_alignment_pow_2 = try math.powi(u32, 2, new_alignment);
const new_size = mem.alignForwardGeneric(u64, tsect.size + block.size, new_alignment_pow_2);
tsect.size = new_size;
tsect.@"align" = new_alignment;
if (zld.blocks.getPtr(match)) |last| {
last.*.next = block;

View File

@ -173,6 +173,12 @@ pub const TextBlock = struct {
self.tlv_offsets.deinit();
}
pub fn resolveRelocs(self: *TextBlock, zld: *Zld) !void {
for (self.relocs.items) |rel| {
try rel.resolve(zld);
}
}
pub fn print_this(self: *const TextBlock, zld: *Zld) void {
log.warn("TextBlock", .{});
log.warn(" | {}: {}", .{ self.local_sym_index, zld.locals.items[self.local_sym_index] });
@ -328,11 +334,10 @@ pub fn link(self: *Zld, files: []const []const u8, output: Output, args: LinkArg
const sect = seg.sections.items[entry.key_ptr.sect];
log.warn("\n\n{s},{s} contents:", .{ segmentName(sect), sectionName(sect) });
log.warn("{}", .{sect});
log.warn(" {}", .{sect});
entry.value_ptr.*.print(self);
}
return error.TODO;
// try self.flush();
try self.flush();
}
fn parseInputFiles(self: *Zld, files: []const []const u8, syslibroot: ?[]const u8) !void {
@ -1041,6 +1046,8 @@ fn allocateSegment(self: *Zld, index: u16, offset: u64) !void {
}
fn allocateTextBlocks(self: *Zld) !void {
log.warn("allocating text blocks", .{});
var it = self.blocks.iterator();
while (it.next()) |entry| {
const match = entry.key_ptr.*;
@ -1050,13 +1057,34 @@ fn allocateTextBlocks(self: *Zld) !void {
const sect = seg.sections.items[match.sect];
var base_addr: u64 = sect.addr + sect.size;
log.warn(" within section {s},{s}", .{ segmentName(sect), sectionName(sect) });
log.warn(" {}", .{sect});
while (true) {
base_addr -= block.size;
const block_alignment = try math.powi(u32, 2, block.alignment);
base_addr = mem.alignBackwardGeneric(u64, base_addr - block.size, block_alignment);
const sym = self.locals.items[block.local_sym_index];
assert(sym.payload == .regular);
sym.payload.regular.address = base_addr;
log.warn(" {s}: start=0x{x}, end=0x{x}, size={}, align={}", .{
sym.name,
base_addr,
base_addr + block.size,
block.size,
block.alignment,
});
// Update each alias (if any)
if (block.aliases) |aliases| {
for (aliases) |index| {
const alias_sym = self.locals.items[index];
assert(alias_sym.payload == .regular);
alias_sym.payload.regular.address = base_addr;
}
}
// Update each symbol contained within the TextBlock
if (block.contained) |contained| {
for (contained) |sym_at_off| {
@ -1073,6 +1101,37 @@ fn allocateTextBlocks(self: *Zld) !void {
}
}
fn writeTextBlocks(self: *Zld) !void {
var it = self.blocks.iterator();
while (it.next()) |entry| {
const match = entry.key_ptr.*;
var block: *TextBlock = entry.value_ptr.*;
const seg = self.load_commands.items[match.seg].Segment;
const sect = seg.sections.items[match.sect];
log.warn("writing text blocks for section {s},{s}", .{ segmentName(sect), sectionName(sect) });
var code = try self.allocator.alloc(u8, sect.size);
defer self.allocator.free(code);
var base_off: u64 = sect.size;
while (true) {
base_off -= block.size;
try block.resolveRelocs(self);
mem.copy(u8, code[base_off..][0..block.size], block.code);
if (block.prev) |prev| {
block = prev;
} else break;
}
try self.file.?.pwriteAll(code, sect.offset);
}
}
fn writeStubHelperCommon(self: *Zld) !void {
const text_segment = &self.load_commands.items[self.text_segment_cmd_index.?].Segment;
const stub_helper = &text_segment.sections.items[self.stub_helper_section_index.?];
@ -1941,104 +2000,105 @@ fn addRpaths(self: *Zld, rpaths: []const []const u8) !void {
}
fn flush(self: *Zld) !void {
try self.writeStubHelperCommon();
try self.writeTextBlocks();
// try self.writeStubHelperCommon();
if (self.common_section_index) |index| {
const seg = &self.load_commands.items[self.data_segment_cmd_index.?].Segment;
const sect = &seg.sections.items[index];
sect.offset = 0;
}
// if (self.common_section_index) |index| {
// const seg = &self.load_commands.items[self.data_segment_cmd_index.?].Segment;
// const sect = &seg.sections.items[index];
// sect.offset = 0;
// }
if (self.bss_section_index) |index| {
const seg = &self.load_commands.items[self.data_segment_cmd_index.?].Segment;
const sect = &seg.sections.items[index];
sect.offset = 0;
}
// if (self.bss_section_index) |index| {
// const seg = &self.load_commands.items[self.data_segment_cmd_index.?].Segment;
// const sect = &seg.sections.items[index];
// sect.offset = 0;
// }
if (self.tlv_bss_section_index) |index| {
const seg = &self.load_commands.items[self.data_segment_cmd_index.?].Segment;
const sect = &seg.sections.items[index];
sect.offset = 0;
}
// if (self.tlv_bss_section_index) |index| {
// const seg = &self.load_commands.items[self.data_segment_cmd_index.?].Segment;
// const sect = &seg.sections.items[index];
// sect.offset = 0;
// }
if (self.tlv_section_index) |index| {
const seg = self.load_commands.items[self.data_segment_cmd_index.?].Segment;
const sect = &seg.sections.items[index];
// if (self.tlv_section_index) |index| {
// const seg = self.load_commands.items[self.data_segment_cmd_index.?].Segment;
// const sect = &seg.sections.items[index];
var buffer = try self.allocator.alloc(u8, @intCast(usize, sect.size));
defer self.allocator.free(buffer);
_ = try self.file.?.preadAll(buffer, sect.offset);
// var buffer = try self.allocator.alloc(u8, @intCast(usize, sect.size));
// defer self.allocator.free(buffer);
// _ = try self.file.?.preadAll(buffer, sect.offset);
var stream = std.io.fixedBufferStream(buffer);
var writer = stream.writer();
// var stream = std.io.fixedBufferStream(buffer);
// var writer = stream.writer();
std.sort.sort(TlvOffset, self.threadlocal_offsets.items, {}, TlvOffset.cmp);
// std.sort.sort(TlvOffset, self.threadlocal_offsets.items, {}, TlvOffset.cmp);
const seek_amt = 2 * @sizeOf(u64);
for (self.threadlocal_offsets.items) |tlv| {
try writer.context.seekBy(seek_amt);
try writer.writeIntLittle(u64, tlv.offset);
}
// const seek_amt = 2 * @sizeOf(u64);
// for (self.threadlocal_offsets.items) |tlv| {
// try writer.context.seekBy(seek_amt);
// try writer.writeIntLittle(u64, tlv.offset);
// }
try self.file.?.pwriteAll(buffer, sect.offset);
}
// try self.file.?.pwriteAll(buffer, sect.offset);
// }
if (self.mod_init_func_section_index) |index| {
const seg = self.load_commands.items[self.data_const_segment_cmd_index.?].Segment;
const sect = &seg.sections.items[index];
// if (self.mod_init_func_section_index) |index| {
// const seg = self.load_commands.items[self.data_const_segment_cmd_index.?].Segment;
// const sect = &seg.sections.items[index];
var initializers = std.ArrayList(u64).init(self.allocator);
defer initializers.deinit();
// var initializers = std.ArrayList(u64).init(self.allocator);
// defer initializers.deinit();
for (self.objects.items) |object| {
for (object.initializers.items) |sym_id| {
const address = object.symbols.items[sym_id].payload.regular.address;
try initializers.append(address);
}
}
// for (self.objects.items) |object| {
// for (object.initializers.items) |sym_id| {
// const address = object.symbols.items[sym_id].payload.regular.address;
// try initializers.append(address);
// }
// }
_ = try self.file.?.pwriteAll(mem.sliceAsBytes(initializers.items), sect.offset);
sect.size = @intCast(u32, initializers.items.len * @sizeOf(u64));
}
// _ = try self.file.?.pwriteAll(mem.sliceAsBytes(initializers.items), sect.offset);
// sect.size = @intCast(u32, initializers.items.len * @sizeOf(u64));
// }
try self.writeGotEntries();
try self.setEntryPoint();
try self.writeRebaseInfoTable();
try self.writeBindInfoTable();
try self.writeLazyBindInfoTable();
try self.writeExportInfo();
try self.writeDataInCode();
// try self.writeGotEntries();
// try self.setEntryPoint();
// try self.writeRebaseInfoTable();
// try self.writeBindInfoTable();
// try self.writeLazyBindInfoTable();
// try self.writeExportInfo();
// try self.writeDataInCode();
{
const seg = &self.load_commands.items[self.linkedit_segment_cmd_index.?].Segment;
const symtab = &self.load_commands.items[self.symtab_cmd_index.?].Symtab;
symtab.symoff = @intCast(u32, seg.inner.fileoff + seg.inner.filesize);
}
// {
// const seg = &self.load_commands.items[self.linkedit_segment_cmd_index.?].Segment;
// const symtab = &self.load_commands.items[self.symtab_cmd_index.?].Symtab;
// symtab.symoff = @intCast(u32, seg.inner.fileoff + seg.inner.filesize);
// }
try self.writeSymbolTable();
try self.writeStringTable();
// try self.writeSymbolTable();
// try self.writeStringTable();
{
// Seal __LINKEDIT size
const seg = &self.load_commands.items[self.linkedit_segment_cmd_index.?].Segment;
seg.inner.vmsize = mem.alignForwardGeneric(u64, seg.inner.filesize, self.page_size.?);
}
// {
// // Seal __LINKEDIT size
// const seg = &self.load_commands.items[self.linkedit_segment_cmd_index.?].Segment;
// seg.inner.vmsize = mem.alignForwardGeneric(u64, seg.inner.filesize, self.page_size.?);
// }
if (self.target.?.cpu.arch == .aarch64) {
try self.writeCodeSignaturePadding();
}
// if (self.target.?.cpu.arch == .aarch64) {
// try self.writeCodeSignaturePadding();
// }
try self.writeLoadCommands();
try self.writeHeader();
// try self.writeLoadCommands();
// try self.writeHeader();
if (self.target.?.cpu.arch == .aarch64) {
try self.writeCodeSignature();
}
// if (self.target.?.cpu.arch == .aarch64) {
// try self.writeCodeSignature();
// }
if (comptime std.Target.current.isDarwin() and std.Target.current.cpu.arch == .aarch64) {
const out_path = self.output.?.path;
try fs.cwd().copyFile(out_path, fs.cwd(), out_path, .{});
}
// if (comptime std.Target.current.isDarwin() and std.Target.current.cpu.arch == .aarch64) {
// const out_path = self.output.?.path;
// try fs.cwd().copyFile(out_path, fs.cwd(), out_path, .{});
// }
}
fn writeGotEntries(self: *Zld) !void {

View File

@ -52,7 +52,7 @@ pub const Relocation = struct {
pub fn resolve(self: Unsigned, base: Relocation, source_addr: u64, target_addr: u64) !void {
const addend = if (self.source_sect_addr) |addr|
self.addend - addr
self.addend - @intCast(i64, addr)
else
self.addend;
@ -86,13 +86,13 @@ pub const Relocation = struct {
arch: Arch,
pub fn resolve(self: Branch, base: Relocation, source_addr: u64, target_addr: u64) !void {
switch (arch) {
switch (self.arch) {
.aarch64 => {
const displacement = try math.cast(i28, @intCast(i64, target_addr) - @intCast(i64, source_addr));
var inst = aarch64.Instruction{
.unconditional_branch_immediate = mem.bytesToValue(
meta.TagPayload(
aarch.Instruction,
aarch64.Instruction,
aarch64.Instruction.unconditional_branch_immediate,
),
base.block.code[base.offset..][0..4],
@ -236,13 +236,15 @@ pub const Relocation = struct {
},
.got => {
const narrowed = @truncate(u12, target_addr);
var inst = mem.bytesToValue(
meta.TagPayload(
aarch64.Instruction,
aarch64.Instruction.load_store_register,
var inst: aarch64.Instruction = .{
.load_store_register = mem.bytesToValue(
meta.TagPayload(
aarch64.Instruction,
aarch64.Instruction.load_store_register,
),
base.block.code[base.offset..][0..4],
),
base.block.code[base.offset..][0..4],
);
};
const offset = try math.divExact(u12, narrowed, 8);
inst.load_store_register.offset = offset;
mem.writeIntLittle(u32, base.block.code[base.offset..][0..4], inst.toU32());
@ -408,14 +410,12 @@ pub const Relocation = struct {
break :blk sym.payload.regular.address + self.offset;
};
const target_addr = blk: {
const is_via_got = inner: {
switch (self.payload) {
.pointer_to_got => break :inner true,
.page => |page| page.kind == .got,
.page_off => |page_off| page_off == .got,
.load => {},
else => break :inner false,
}
const is_via_got = switch (self.payload) {
.pointer_to_got => true,
.page => |page| page.kind == .got,
.page_off => |page_off| page_off.kind == .got,
.load => |load| load.kind == .got,
else => false,
};
if (is_via_got) {
@ -459,6 +459,11 @@ pub const Relocation = struct {
},
}
};
log.warn("relocating {}", .{self});
log.warn(" | source_addr = 0x{x}", .{source_addr});
log.warn(" | target_addr = 0x{x}", .{target_addr});
switch (self.payload) {
.unsigned => |unsigned| try unsigned.resolve(self, source_addr, target_addr),
.branch => |branch| try branch.resolve(self, source_addr, target_addr),