macho: report basic __eh_frame problems as errors

This commit is contained in:
Jakub Konka 2023-08-30 21:48:24 +02:00
parent ba710ec09d
commit ebe371b757
7 changed files with 55 additions and 43 deletions

View File

@ -693,7 +693,6 @@ pub const File = struct {
/// TODO audit this error set. most of these should be collapsed into one error,
/// and ErrorFlags should be updated to convey the meaning to the user.
pub const FlushError = error{
BadDwarfCfi,
CacheUnavailable,
CurrentWorkingDirectoryUnlinked,
DivisionByZero,
@ -728,8 +727,6 @@ pub const File = struct {
MissAlignment,
MissingEndForBody,
MissingEndForExpression,
/// TODO: this should be removed from the error set in favor of using ErrorFlags
MissingSection,
MissingSymbol,
MissingTableSymbols,
ModuleNameMismatch,

View File

@ -4946,7 +4946,7 @@ fn reportDependencyError(
});
}
fn reportParseError(
pub fn reportParseError(
self: *MachO,
path: []const u8,
comptime format: []const u8,

View File

@ -334,7 +334,14 @@ fn sectionLessThanByAddress(ctx: void, lhs: SortedSection, rhs: SortedSection) b
return lhs.header.addr < rhs.header.addr;
}
pub fn splitIntoAtoms(self: *Object, macho_file: *MachO, object_id: u32) !void {
pub const SplitIntoAtomsError = error{
OutOfMemory,
EndOfStream,
MissingEhFrameSection,
BadDwarfCfi,
};
pub fn splitIntoAtoms(self: *Object, macho_file: *MachO, object_id: u32) SplitIntoAtomsError!void {
log.debug("splitting object({d}, {s}) into atoms", .{ object_id, self.name });
try self.splitRegularSections(macho_file, object_id);
@ -788,11 +795,7 @@ fn parseUnwindInfo(self: *Object, macho_file: *MachO, object_id: u32) !void {
if (UnwindInfo.UnwindEncoding.isDwarf(record.compactUnwindEncoding, cpu_arch)) break true;
} else false;
if (needs_eh_frame and !self.hasEhFrameRecords()) {
log.err("missing __TEXT,__eh_frame section", .{});
log.err(" in object {s}", .{self.name});
return error.MissingSection;
}
if (needs_eh_frame and !self.hasEhFrameRecords()) return error.MissingEhFrameSection;
try self.parseRelocs(gpa, sect_id);
const relocs = self.getRelocs(sect_id);

View File

@ -240,7 +240,7 @@ pub fn collect(info: *UnwindInfo, macho_file: *MachO) !void {
var record = unwind_records[record_id];
if (UnwindEncoding.isDwarf(record.compactUnwindEncoding, cpu_arch)) {
try info.collectPersonalityFromDwarf(macho_file, @as(u32, @intCast(object_id)), symbol, &record);
info.collectPersonalityFromDwarf(macho_file, @as(u32, @intCast(object_id)), symbol, &record);
} else {
if (getPersonalityFunctionReloc(
macho_file,
@ -288,7 +288,7 @@ pub fn collect(info: *UnwindInfo, macho_file: *MachO) !void {
if (object.eh_frame_records_lookup.get(symbol)) |fde_offset| {
if (object.eh_frame_relocs_lookup.get(fde_offset).?.dead) continue;
var record = nullRecord();
try info.collectPersonalityFromDwarf(macho_file, @as(u32, @intCast(object_id)), symbol, &record);
info.collectPersonalityFromDwarf(macho_file, @as(u32, @intCast(object_id)), symbol, &record);
switch (cpu_arch) {
.aarch64 => UnwindEncoding.setMode(&record.compactUnwindEncoding, macho.UNWIND_ARM64_MODE.DWARF),
.x86_64 => UnwindEncoding.setMode(&record.compactUnwindEncoding, macho.UNWIND_X86_64_MODE.DWARF),
@ -500,16 +500,16 @@ fn collectPersonalityFromDwarf(
object_id: u32,
sym_loc: SymbolWithLoc,
record: *macho.compact_unwind_entry,
) !void {
) void {
const object = &macho_file.objects.items[object_id];
var it = object.getEhFrameRecordsIterator();
const fde_offset = object.eh_frame_records_lookup.get(sym_loc).?;
it.seekTo(fde_offset);
const fde = (try it.next()).?;
const fde = (it.next() catch return).?; // We don't care about the error since we already handled it
const cie_ptr = fde.getCiePointerSource(object_id, macho_file, fde_offset);
const cie_offset = fde_offset + 4 - cie_ptr;
it.seekTo(cie_offset);
const cie = (try it.next()).?;
const cie = (it.next() catch return).?; // We don't care about the error since we already handled it
if (cie.getPersonalityPointerReloc(
macho_file,
@ -528,7 +528,7 @@ fn collectPersonalityFromDwarf(
}
}
pub fn calcSectionSize(info: UnwindInfo, macho_file: *MachO) !void {
pub fn calcSectionSize(info: UnwindInfo, macho_file: *MachO) void {
const sect_id = macho_file.unwind_info_section_index orelse return;
const sect = &macho_file.sections.items(.header)[sect_id];
sect.@"align" = 2;

View File

@ -13,7 +13,7 @@ pub fn gcAtoms(macho_file: *MachO) !void {
try alive.ensureTotalCapacity(@as(u32, @intCast(macho_file.atoms.items.len)));
try collectRoots(macho_file, &roots);
try mark(macho_file, roots, &alive);
mark(macho_file, roots, &alive);
prune(macho_file, alive);
}
@ -227,7 +227,7 @@ fn refersLive(macho_file: *MachO, atom_index: Atom.Index, alive: AtomTable) bool
return false;
}
fn mark(macho_file: *MachO, roots: AtomTable, alive: *AtomTable) !void {
fn mark(macho_file: *MachO, roots: AtomTable, alive: *AtomTable) void {
var it = roots.keyIterator();
while (it.next()) |root| {
markLive(macho_file, root.*, alive);
@ -264,11 +264,11 @@ fn mark(macho_file: *MachO, roots: AtomTable, alive: *AtomTable) !void {
for (macho_file.objects.items, 0..) |_, object_id| {
// Traverse unwind and eh_frame records noting if the source symbol has been marked, and if so,
// marking all references as live.
try markUnwindRecords(macho_file, @as(u32, @intCast(object_id)), alive);
markUnwindRecords(macho_file, @as(u32, @intCast(object_id)), alive);
}
}
fn markUnwindRecords(macho_file: *MachO, object_id: u32, alive: *AtomTable) !void {
fn markUnwindRecords(macho_file: *MachO, object_id: u32, alive: *AtomTable) void {
const object = &macho_file.objects.items[object_id];
const cpu_arch = macho_file.base.options.target.cpu.arch;
@ -280,7 +280,7 @@ fn markUnwindRecords(macho_file: *MachO, object_id: u32, alive: *AtomTable) !voi
if (!object.hasUnwindRecords()) {
if (alive.contains(atom_index)) {
// Mark references live and continue.
try markEhFrameRecords(macho_file, object_id, atom_index, alive);
markEhFrameRecords(macho_file, object_id, atom_index, alive);
} else {
while (inner_syms_it.next()) |sym| {
if (object.eh_frame_records_lookup.get(sym)) |fde_offset| {
@ -306,7 +306,7 @@ fn markUnwindRecords(macho_file: *MachO, object_id: u32, alive: *AtomTable) !voi
const record = unwind_records[record_id];
if (UnwindInfo.UnwindEncoding.isDwarf(record.compactUnwindEncoding, cpu_arch)) {
try markEhFrameRecords(macho_file, object_id, atom_index, alive);
markEhFrameRecords(macho_file, object_id, atom_index, alive);
} else {
if (UnwindInfo.getPersonalityFunctionReloc(macho_file, object_id, record_id)) |rel| {
const target = Atom.parseRelocTarget(macho_file, .{
@ -339,7 +339,7 @@ fn markUnwindRecords(macho_file: *MachO, object_id: u32, alive: *AtomTable) !voi
}
}
fn markEhFrameRecords(macho_file: *MachO, object_id: u32, atom_index: Atom.Index, alive: *AtomTable) !void {
fn markEhFrameRecords(macho_file: *MachO, object_id: u32, atom_index: Atom.Index, alive: *AtomTable) void {
const cpu_arch = macho_file.base.options.target.cpu.arch;
const object = &macho_file.objects.items[object_id];
var it = object.getEhFrameRecordsIterator();
@ -348,12 +348,12 @@ fn markEhFrameRecords(macho_file: *MachO, object_id: u32, atom_index: Atom.Index
while (inner_syms_it.next()) |sym| {
const fde_offset = object.eh_frame_records_lookup.get(sym) orelse continue; // Continue in case we hit a temp symbol alias
it.seekTo(fde_offset);
const fde = (try it.next()).?;
const fde = (it.next() catch continue).?; // We don't care about the error at this point since it was already handled
const cie_ptr = fde.getCiePointerSource(object_id, macho_file, fde_offset);
const cie_offset = fde_offset + 4 - cie_ptr;
it.seekTo(cie_offset);
const cie = (try it.next()).?;
const cie = (it.next() catch continue).?; // We don't care about the error at this point since it was already handled
switch (cpu_arch) {
.aarch64 => {
@ -377,10 +377,10 @@ fn markEhFrameRecords(macho_file: *MachO, object_id: u32, atom_index: Atom.Index
},
.x86_64 => {
const sect = object.getSourceSection(object.eh_frame_sect_id.?);
const lsda_ptr = try fde.getLsdaPointer(cie, .{
const lsda_ptr = fde.getLsdaPointer(cie, .{
.base_addr = sect.addr,
.base_offset = fde_offset,
});
}) catch continue; // We don't care about the error at this point since it was already handled
if (lsda_ptr) |lsda_address| {
// Mark LSDA record as live
const sym_index = object.getSymbolByAddress(lsda_address, null);

View File

@ -13,7 +13,7 @@ pub fn scanRelocs(macho_file: *MachO) !void {
const fde_offset = object.eh_frame_records_lookup.get(sym) orelse continue;
if (object.eh_frame_relocs_lookup.get(fde_offset).?.dead) continue;
it.seekTo(fde_offset);
const fde = (try it.next()).?;
const fde = (it.next() catch continue).?; // We don't care about this error since we already handled it
const cie_ptr = fde.getCiePointerSource(@intCast(object_id), macho_file, fde_offset);
const cie_offset = fde_offset + 4 - cie_ptr;
@ -21,7 +21,7 @@ pub fn scanRelocs(macho_file: *MachO) !void {
if (!cies.contains(cie_offset)) {
try cies.putNoClobber(cie_offset, {});
it.seekTo(cie_offset);
const cie = (try it.next()).?;
const cie = (it.next() catch continue).?; // We don't care about this error since we already handled it
try cie.scanRelocs(macho_file, @as(u32, @intCast(object_id)), cie_offset);
}
}
@ -29,7 +29,7 @@ pub fn scanRelocs(macho_file: *MachO) !void {
}
}
pub fn calcSectionSize(macho_file: *MachO, unwind_info: *const UnwindInfo) !void {
pub fn calcSectionSize(macho_file: *MachO, unwind_info: *const UnwindInfo) error{OutOfMemory}!void {
const sect_id = macho_file.eh_frame_section_index orelse return;
const sect = &macho_file.sections.items(.header)[sect_id];
sect.@"align" = 3;
@ -59,7 +59,7 @@ pub fn calcSectionSize(macho_file: *MachO, unwind_info: *const UnwindInfo) !void
if (!is_dwarf) continue;
eh_it.seekTo(fde_record_offset);
const source_fde_record = (try eh_it.next()).?;
const source_fde_record = (eh_it.next() catch continue).?; // We already handled this error
const cie_ptr = source_fde_record.getCiePointerSource(@intCast(object_id), macho_file, fde_record_offset);
const cie_offset = fde_record_offset + 4 - cie_ptr;
@ -67,7 +67,7 @@ pub fn calcSectionSize(macho_file: *MachO, unwind_info: *const UnwindInfo) !void
const gop = try cies.getOrPut(cie_offset);
if (!gop.found_existing) {
eh_it.seekTo(cie_offset);
const source_cie_record = (try eh_it.next()).?;
const source_cie_record = (eh_it.next() catch continue).?; // We already handled this error
gop.value_ptr.* = size;
size += source_cie_record.getSize();
}
@ -121,7 +121,7 @@ pub fn write(macho_file: *MachO, unwind_info: *UnwindInfo) !void {
if (!is_dwarf) continue;
eh_it.seekTo(fde_record_offset);
const source_fde_record = (try eh_it.next()).?;
const source_fde_record = (eh_it.next() catch continue).?; // We already handled this error
const cie_ptr = source_fde_record.getCiePointerSource(@intCast(object_id), macho_file, fde_record_offset);
const cie_offset = fde_record_offset + 4 - cie_ptr;
@ -129,7 +129,7 @@ pub fn write(macho_file: *MachO, unwind_info: *UnwindInfo) !void {
const gop = try cies.getOrPut(cie_offset);
if (!gop.found_existing) {
eh_it.seekTo(cie_offset);
const source_cie_record = (try eh_it.next()).?;
const source_cie_record = (eh_it.next() catch continue).?; // We already handled this error
var cie_record = try source_cie_record.toOwned(gpa);
try cie_record.relocate(macho_file, @as(u32, @intCast(object_id)), .{
.source_offset = cie_offset,
@ -164,17 +164,17 @@ pub fn write(macho_file: *MachO, unwind_info: *UnwindInfo) !void {
eh_frame_offset + 4 - fde_record.getCiePointer(),
).?;
const eh_frame_sect = object.getSourceSection(object.eh_frame_sect_id.?);
const source_lsda_ptr = try fde_record.getLsdaPointer(cie_record, .{
const source_lsda_ptr = fde_record.getLsdaPointer(cie_record, .{
.base_addr = eh_frame_sect.addr,
.base_offset = fde_record_offset,
});
}) catch continue; // We already handled this error
if (source_lsda_ptr) |ptr| {
const sym_index = object.getSymbolByAddress(ptr, null);
const sym = object.symtab[sym_index];
try fde_record.setLsdaPointer(cie_record, sym.n_value, .{
fde_record.setLsdaPointer(cie_record, sym.n_value, .{
.base_addr = sect.addr,
.base_offset = eh_frame_offset,
});
}) catch continue; // We already handled this error
}
},
else => unreachable,
@ -191,10 +191,10 @@ pub fn write(macho_file: *MachO, unwind_info: *UnwindInfo) !void {
const cie_record = eh_records.get(
eh_frame_offset + 4 - fde_record.getCiePointer(),
).?;
const lsda_ptr = try fde_record.getLsdaPointer(cie_record, .{
const lsda_ptr = fde_record.getLsdaPointer(cie_record, .{
.base_addr = sect.addr,
.base_offset = eh_frame_offset,
});
}) catch continue; // We already handled this error
if (lsda_ptr) |ptr| {
record.lsda = ptr - seg.vmaddr;
}
@ -588,7 +588,7 @@ pub const Iterator = struct {
var size = try reader.readIntLittle(u32);
if (size == 0xFFFFFFFF) {
log.err("MachO doesn't support 64bit DWARF CFI __eh_frame records", .{});
log.debug("MachO doesn't support 64bit DWARF CFI __eh_frame records", .{});
return error.BadDwarfCfi;
}

View File

@ -388,7 +388,19 @@ pub fn linkWithZld(
}
for (macho_file.objects.items, 0..) |*object, object_id| {
try object.splitIntoAtoms(macho_file, @as(u32, @intCast(object_id)));
object.splitIntoAtoms(macho_file, @as(u32, @intCast(object_id))) catch |err| switch (err) {
error.MissingEhFrameSection => try macho_file.reportParseError(
object.name,
"missing section: '__TEXT,__eh_frame' is required but could not be found",
.{},
),
error.BadDwarfCfi => try macho_file.reportParseError(
object.name,
"invalid DWARF: failed to parse '__TEXT,__eh_frame' section",
.{},
),
else => |e| return e,
};
}
if (gc_sections) {
@ -433,7 +445,7 @@ pub fn linkWithZld(
try unwind_info.collect(macho_file);
try eh_frame.calcSectionSize(macho_file, &unwind_info);
try unwind_info.calcSectionSize(macho_file);
unwind_info.calcSectionSize(macho_file);
try pruneAndSortSections(macho_file);
try createSegments(macho_file);