macho: sort sections when linking stage1 binary

This commit is contained in:
Jakub Konka 2021-12-05 22:20:13 +01:00
parent c86f2402d0
commit 397a11c107

View File

@ -920,6 +920,7 @@ pub fn flushModule(self: *MachO, comp: *Compilation) !void {
try self.parseObjectsIntoAtoms();
if (use_stage1) {
try self.sortSections();
try self.allocateTextSegment();
try self.allocateDataConstSegment();
try self.allocateDataSegment();
@ -4849,6 +4850,160 @@ fn nextSegmentAddressAndOffset(self: *MachO) NextSegmentAddressAndOffset {
};
}
fn sortSections(self: *MachO) !void {
var text_index_mapping = std.AutoHashMap(u16, u16).init(self.base.allocator);
defer text_index_mapping.deinit();
var data_const_index_mapping = std.AutoHashMap(u16, u16).init(self.base.allocator);
defer data_const_index_mapping.deinit();
var data_index_mapping = std.AutoHashMap(u16, u16).init(self.base.allocator);
defer data_index_mapping.deinit();
{
// __TEXT segment
const seg = &self.load_commands.items[self.text_segment_cmd_index.?].Segment;
var sections = seg.sections.toOwnedSlice(self.base.allocator);
defer self.base.allocator.free(sections);
try seg.sections.ensureTotalCapacity(self.base.allocator, sections.len);
const indices = &[_]*?u16{
&self.text_section_index,
&self.stubs_section_index,
&self.stub_helper_section_index,
&self.gcc_except_tab_section_index,
&self.cstring_section_index,
&self.ustring_section_index,
&self.text_const_section_index,
&self.objc_methlist_section_index,
&self.objc_methname_section_index,
&self.objc_methtype_section_index,
&self.objc_classname_section_index,
&self.eh_frame_section_index,
};
for (indices) |maybe_index| {
const new_index: u16 = if (maybe_index.*) |index| blk: {
const idx = @intCast(u16, seg.sections.items.len);
seg.sections.appendAssumeCapacity(sections[index]);
try text_index_mapping.putNoClobber(index, idx);
break :blk idx;
} else continue;
maybe_index.* = new_index;
}
}
{
// __DATA_CONST segment
const seg = &self.load_commands.items[self.data_const_segment_cmd_index.?].Segment;
var sections = seg.sections.toOwnedSlice(self.base.allocator);
defer self.base.allocator.free(sections);
try seg.sections.ensureTotalCapacity(self.base.allocator, sections.len);
const indices = &[_]*?u16{
&self.got_section_index,
&self.mod_init_func_section_index,
&self.mod_term_func_section_index,
&self.data_const_section_index,
&self.objc_cfstring_section_index,
&self.objc_classlist_section_index,
&self.objc_imageinfo_section_index,
};
for (indices) |maybe_index| {
const new_index: u16 = if (maybe_index.*) |index| blk: {
const idx = @intCast(u16, seg.sections.items.len);
seg.sections.appendAssumeCapacity(sections[index]);
try data_const_index_mapping.putNoClobber(index, idx);
break :blk idx;
} else continue;
maybe_index.* = new_index;
}
}
{
// __DATA segment
const seg = &self.load_commands.items[self.data_segment_cmd_index.?].Segment;
var sections = seg.sections.toOwnedSlice(self.base.allocator);
defer self.base.allocator.free(sections);
try seg.sections.ensureTotalCapacity(self.base.allocator, sections.len);
// __DATA segment
const indices = &[_]*?u16{
&self.la_symbol_ptr_section_index,
&self.objc_const_section_index,
&self.objc_selrefs_section_index,
&self.objc_classrefs_section_index,
&self.objc_data_section_index,
&self.data_section_index,
&self.tlv_section_index,
&self.tlv_data_section_index,
&self.tlv_bss_section_index,
&self.bss_section_index,
};
for (indices) |maybe_index| {
const new_index: u16 = if (maybe_index.*) |index| blk: {
const idx = @intCast(u16, seg.sections.items.len);
seg.sections.appendAssumeCapacity(sections[index]);
try data_index_mapping.putNoClobber(index, idx);
break :blk idx;
} else continue;
maybe_index.* = new_index;
}
}
{
var transient: std.AutoHashMapUnmanaged(MatchingSection, *Atom) = .{};
try transient.ensureTotalCapacity(self.base.allocator, self.atoms.count());
var it = self.atoms.iterator();
while (it.next()) |entry| {
const old = entry.key_ptr.*;
const sect = if (old.seg == self.text_segment_cmd_index.?)
text_index_mapping.get(old.sect).?
else if (old.seg == self.data_const_segment_cmd_index.?)
data_const_index_mapping.get(old.sect).?
else
data_index_mapping.get(old.sect).?;
transient.putAssumeCapacityNoClobber(.{
.seg = old.seg,
.sect = sect,
}, entry.value_ptr.*);
}
self.atoms.clearAndFree(self.base.allocator);
self.atoms.deinit(self.base.allocator);
self.atoms = transient;
}
{
// Create new section ordinals.
self.section_ordinals.clearRetainingCapacity();
const text_seg = self.load_commands.items[self.text_segment_cmd_index.?].Segment;
for (text_seg.sections.items) |_, sect_id| {
const res = self.section_ordinals.getOrPutAssumeCapacity(.{
.seg = self.text_segment_cmd_index.?,
.sect = @intCast(u16, sect_id),
});
assert(!res.found_existing);
}
const data_const_seg = self.load_commands.items[self.data_const_segment_cmd_index.?].Segment;
for (data_const_seg.sections.items) |_, sect_id| {
const res = self.section_ordinals.getOrPutAssumeCapacity(.{
.seg = self.data_const_segment_cmd_index.?,
.sect = @intCast(u16, sect_id),
});
assert(!res.found_existing);
}
const data_seg = self.load_commands.items[self.data_segment_cmd_index.?].Segment;
for (data_seg.sections.items) |_, sect_id| {
const res = self.section_ordinals.getOrPutAssumeCapacity(.{
.seg = self.data_segment_cmd_index.?,
.sect = @intCast(u16, sect_id),
});
assert(!res.found_existing);
}
}
self.sections_order_dirty = false;
}
fn updateSectionOrdinals(self: *MachO) !void {
if (!self.sections_order_dirty) return;