mirror of
https://github.com/ziglang/zig.git
synced 2026-02-14 05:20:34 +00:00
macho: redo input file parsing in prep for multithreading
This commit is contained in:
parent
1fc42ed3e7
commit
cba3389d90
@ -105,8 +105,9 @@ win32_resource_table: if (dev.env.supports(.win32_resource)) std.AutoArrayHashMa
|
||||
pub fn deinit(_: @This(), _: Allocator) void {}
|
||||
} = .{},
|
||||
|
||||
link_error_flags: link.File.ErrorFlags = .{},
|
||||
link_errors: std.ArrayListUnmanaged(link.File.ErrorMsg) = .{},
|
||||
link_errors_mutex: std.Thread.Mutex = .{},
|
||||
link_error_flags: link.File.ErrorFlags = .{},
|
||||
lld_errors: std.ArrayListUnmanaged(LldError) = .{},
|
||||
|
||||
work_queues: [
|
||||
@ -3067,7 +3068,6 @@ pub fn totalErrorCount(comp: *Compilation) u32 {
|
||||
total += @intFromBool(comp.link_error_flags.no_entry_point_found);
|
||||
}
|
||||
total += @intFromBool(comp.link_error_flags.missing_libc);
|
||||
|
||||
total += comp.link_errors.items.len;
|
||||
|
||||
// Compile log errors only count if there are no other errors.
|
||||
|
||||
68
src/link.zig
68
src/link.zig
@ -439,6 +439,58 @@ pub const File = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub const ErrorWithNotes = struct {
|
||||
base: *const File,
|
||||
|
||||
/// Allocated index in base.errors array.
|
||||
index: usize,
|
||||
|
||||
/// Next available note slot.
|
||||
note_slot: usize = 0,
|
||||
|
||||
pub fn addMsg(
|
||||
err: ErrorWithNotes,
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
const gpa = err.base.comp.gpa;
|
||||
const err_msg = &err.base.comp.link_errors.items[err.index];
|
||||
err_msg.msg = try std.fmt.allocPrint(gpa, format, args);
|
||||
}
|
||||
|
||||
pub fn addNote(
|
||||
err: *ErrorWithNotes,
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
const gpa = err.base.comp.gpa;
|
||||
const err_msg = &err.base.comp.link_errors.items[err.index];
|
||||
assert(err.note_slot < err_msg.notes.len);
|
||||
err_msg.notes[err.note_slot] = .{ .msg = try std.fmt.allocPrint(gpa, format, args) };
|
||||
err.note_slot += 1;
|
||||
}
|
||||
};
|
||||
|
||||
pub fn addErrorWithNotes(base: *const File, note_count: usize) error{OutOfMemory}!ErrorWithNotes {
|
||||
base.comp.link_errors_mutex.lock();
|
||||
defer base.comp.link_errors_mutex.unlock();
|
||||
const gpa = base.comp.gpa;
|
||||
try base.comp.link_errors.ensureUnusedCapacity(gpa, 1);
|
||||
return base.addErrorWithNotesAssumeCapacity(note_count);
|
||||
}
|
||||
|
||||
pub fn addErrorWithNotesAssumeCapacity(base: *const File, note_count: usize) error{OutOfMemory}!ErrorWithNotes {
|
||||
const gpa = base.comp.gpa;
|
||||
const index = base.comp.link_errors.items.len;
|
||||
const err = base.comp.link_errors.addOneAssumeCapacity();
|
||||
err.* = .{ .msg = undefined, .notes = try gpa.alloc(ErrorMsg, note_count) };
|
||||
return .{ .base = base, .index = index };
|
||||
}
|
||||
|
||||
pub fn hasErrors(base: *const File) bool {
|
||||
return base.comp.link_errors.items.len > 0 or base.comp.link_error_flags.isSet();
|
||||
}
|
||||
|
||||
pub fn releaseLock(self: *File) void {
|
||||
if (self.lock) |*lock| {
|
||||
lock.release();
|
||||
@ -874,9 +926,23 @@ pub const File = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub const ErrorFlags = struct {
|
||||
pub const ErrorFlags = packed struct {
|
||||
no_entry_point_found: bool = false,
|
||||
missing_libc: bool = false,
|
||||
|
||||
const Int = blk: {
|
||||
const bits = @typeInfo(@This()).Struct.fields.len;
|
||||
break :blk @Type(.{
|
||||
.Int = .{
|
||||
.signedness = .unsigned,
|
||||
.bits = bits,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
fn isSet(ef: ErrorFlags) bool {
|
||||
return @as(Int, @bitCast(ef)) > 0;
|
||||
}
|
||||
};
|
||||
|
||||
pub const ErrorMsg = struct {
|
||||
|
||||
117
src/link/Elf.zig
117
src/link/Elf.zig
@ -995,12 +995,12 @@ pub fn growAllocSection(self: *Elf, shdr_index: u32, needed_size: u64) !void {
|
||||
if (maybe_phdr) |phdr| {
|
||||
const mem_capacity = self.allocatedVirtualSize(phdr.p_vaddr);
|
||||
if (needed_size > mem_capacity) {
|
||||
var err = try self.addErrorWithNotes(2);
|
||||
try err.addMsg(self, "fatal linker error: cannot expand load segment phdr({d}) in virtual memory", .{
|
||||
var err = try self.base.addErrorWithNotes(2);
|
||||
try err.addMsg("fatal linker error: cannot expand load segment phdr({d}) in virtual memory", .{
|
||||
self.phdr_to_shdr_table.get(shdr_index).?,
|
||||
});
|
||||
try err.addNote(self, "TODO: emit relocations to memory locations in self-hosted backends", .{});
|
||||
try err.addNote(self, "as a workaround, try increasing pre-allocated virtual memory of each segment", .{});
|
||||
try err.addNote("TODO: emit relocations to memory locations in self-hosted backends", .{});
|
||||
try err.addNote("as a workaround, try increasing pre-allocated virtual memory of each segment", .{});
|
||||
}
|
||||
|
||||
phdr.p_memsz = needed_size;
|
||||
@ -1276,7 +1276,7 @@ pub fn flushModule(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_nod
|
||||
};
|
||||
}
|
||||
|
||||
if (comp.link_errors.items.len > 0) return error.FlushFailure;
|
||||
if (self.base.hasErrors()) return error.FlushFailure;
|
||||
|
||||
// Dedup shared objects
|
||||
{
|
||||
@ -1423,7 +1423,7 @@ pub fn flushModule(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_nod
|
||||
try self.writeElfHeader();
|
||||
}
|
||||
|
||||
if (comp.link_errors.items.len > 0) return error.FlushFailure;
|
||||
if (self.base.hasErrors()) return error.FlushFailure;
|
||||
}
|
||||
|
||||
/// --verbose-link output
|
||||
@ -2852,9 +2852,9 @@ fn writePhdrTable(self: *Elf) !void {
|
||||
}
|
||||
|
||||
pub fn writeElfHeader(self: *Elf) !void {
|
||||
const comp = self.base.comp;
|
||||
if (comp.link_errors.items.len > 0) return; // We had errors, so skip flushing to render the output unusable
|
||||
if (self.base.hasErrors()) return; // We had errors, so skip flushing to render the output unusable
|
||||
|
||||
const comp = self.base.comp;
|
||||
var hdr_buf: [@sizeOf(elf.Elf64_Ehdr)]u8 = undefined;
|
||||
|
||||
var index: usize = 0;
|
||||
@ -4298,9 +4298,9 @@ fn allocatePhdrTable(self: *Elf) error{OutOfMemory}!void {
|
||||
// (revisit getMaxNumberOfPhdrs())
|
||||
// 2. shift everything in file to free more space for EHDR + PHDR table
|
||||
// TODO verify `getMaxNumberOfPhdrs()` is accurate and convert this into no-op
|
||||
var err = try self.addErrorWithNotes(1);
|
||||
try err.addMsg(self, "fatal linker error: not enough space reserved for EHDR and PHDR table", .{});
|
||||
try err.addNote(self, "required 0x{x}, available 0x{x}", .{ needed_size, available_space });
|
||||
var err = try self.base.addErrorWithNotes(1);
|
||||
try err.addMsg("fatal linker error: not enough space reserved for EHDR and PHDR table", .{});
|
||||
try err.addNote("required 0x{x}, available 0x{x}", .{ needed_size, available_space });
|
||||
}
|
||||
|
||||
phdr_table_load.p_filesz = needed_size + ehsize;
|
||||
@ -5863,56 +5863,6 @@ pub fn tlsAddress(self: *Elf) i64 {
|
||||
return @intCast(phdr.p_vaddr);
|
||||
}
|
||||
|
||||
const ErrorWithNotes = struct {
|
||||
/// Allocated index in comp.link_errors array.
|
||||
index: usize,
|
||||
|
||||
/// Next available note slot.
|
||||
note_slot: usize = 0,
|
||||
|
||||
pub fn addMsg(
|
||||
err: ErrorWithNotes,
|
||||
elf_file: *Elf,
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
const comp = elf_file.base.comp;
|
||||
const gpa = comp.gpa;
|
||||
const err_msg = &comp.link_errors.items[err.index];
|
||||
err_msg.msg = try std.fmt.allocPrint(gpa, format, args);
|
||||
}
|
||||
|
||||
pub fn addNote(
|
||||
err: *ErrorWithNotes,
|
||||
elf_file: *Elf,
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
const comp = elf_file.base.comp;
|
||||
const gpa = comp.gpa;
|
||||
const err_msg = &comp.link_errors.items[err.index];
|
||||
assert(err.note_slot < err_msg.notes.len);
|
||||
err_msg.notes[err.note_slot] = .{ .msg = try std.fmt.allocPrint(gpa, format, args) };
|
||||
err.note_slot += 1;
|
||||
}
|
||||
};
|
||||
|
||||
pub fn addErrorWithNotes(self: *Elf, note_count: usize) error{OutOfMemory}!ErrorWithNotes {
|
||||
const comp = self.base.comp;
|
||||
const gpa = comp.gpa;
|
||||
try comp.link_errors.ensureUnusedCapacity(gpa, 1);
|
||||
return self.addErrorWithNotesAssumeCapacity(note_count);
|
||||
}
|
||||
|
||||
fn addErrorWithNotesAssumeCapacity(self: *Elf, note_count: usize) error{OutOfMemory}!ErrorWithNotes {
|
||||
const comp = self.base.comp;
|
||||
const gpa = comp.gpa;
|
||||
const index = comp.link_errors.items.len;
|
||||
const err = comp.link_errors.addOneAssumeCapacity();
|
||||
err.* = .{ .msg = undefined, .notes = try gpa.alloc(link.File.ErrorMsg, note_count) };
|
||||
return .{ .index = index };
|
||||
}
|
||||
|
||||
pub fn getShString(self: Elf, off: u32) [:0]const u8 {
|
||||
assert(off < self.shstrtab.items.len);
|
||||
return mem.sliceTo(@as([*:0]const u8, @ptrCast(self.shstrtab.items.ptr + off)), 0);
|
||||
@ -5940,11 +5890,10 @@ pub fn insertDynString(self: *Elf, name: []const u8) error{OutOfMemory}!u32 {
|
||||
}
|
||||
|
||||
fn reportUndefinedSymbols(self: *Elf, undefs: anytype) !void {
|
||||
const comp = self.base.comp;
|
||||
const gpa = comp.gpa;
|
||||
const gpa = self.base.comp.gpa;
|
||||
const max_notes = 4;
|
||||
|
||||
try comp.link_errors.ensureUnusedCapacity(gpa, undefs.count());
|
||||
try self.base.comp.link_errors.ensureUnusedCapacity(gpa, undefs.count());
|
||||
|
||||
var it = undefs.iterator();
|
||||
while (it.next()) |entry| {
|
||||
@ -5953,18 +5902,18 @@ fn reportUndefinedSymbols(self: *Elf, undefs: anytype) !void {
|
||||
const natoms = @min(atoms.len, max_notes);
|
||||
const nnotes = natoms + @intFromBool(atoms.len > max_notes);
|
||||
|
||||
var err = try self.addErrorWithNotesAssumeCapacity(nnotes);
|
||||
try err.addMsg(self, "undefined symbol: {s}", .{self.symbol(undef_index).name(self)});
|
||||
var err = try self.base.addErrorWithNotesAssumeCapacity(nnotes);
|
||||
try err.addMsg("undefined symbol: {s}", .{self.symbol(undef_index).name(self)});
|
||||
|
||||
for (atoms[0..natoms]) |atom_index| {
|
||||
const atom_ptr = self.atom(atom_index).?;
|
||||
const file_ptr = self.file(atom_ptr.file_index).?;
|
||||
try err.addNote(self, "referenced by {s}:{s}", .{ file_ptr.fmtPath(), atom_ptr.name(self) });
|
||||
try err.addNote("referenced by {s}:{s}", .{ file_ptr.fmtPath(), atom_ptr.name(self) });
|
||||
}
|
||||
|
||||
if (atoms.len > max_notes) {
|
||||
const remaining = atoms.len - max_notes;
|
||||
try err.addNote(self, "referenced {d} more times", .{remaining});
|
||||
try err.addNote("referenced {d} more times", .{remaining});
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -5978,19 +5927,19 @@ fn reportDuplicates(self: *Elf, dupes: anytype) error{ HasDuplicates, OutOfMemor
|
||||
const notes = entry.value_ptr.*;
|
||||
const nnotes = @min(notes.items.len, max_notes) + @intFromBool(notes.items.len > max_notes);
|
||||
|
||||
var err = try self.addErrorWithNotes(nnotes + 1);
|
||||
try err.addMsg(self, "duplicate symbol definition: {s}", .{sym.name(self)});
|
||||
try err.addNote(self, "defined by {}", .{sym.file(self).?.fmtPath()});
|
||||
var err = try self.base.addErrorWithNotes(nnotes + 1);
|
||||
try err.addMsg("duplicate symbol definition: {s}", .{sym.name(self)});
|
||||
try err.addNote("defined by {}", .{sym.file(self).?.fmtPath()});
|
||||
|
||||
var inote: usize = 0;
|
||||
while (inote < @min(notes.items.len, max_notes)) : (inote += 1) {
|
||||
const file_ptr = self.file(notes.items[inote]).?;
|
||||
try err.addNote(self, "defined by {}", .{file_ptr.fmtPath()});
|
||||
try err.addNote("defined by {}", .{file_ptr.fmtPath()});
|
||||
}
|
||||
|
||||
if (notes.items.len > max_notes) {
|
||||
const remaining = notes.items.len - max_notes;
|
||||
try err.addNote(self, "defined {d} more times", .{remaining});
|
||||
try err.addNote("defined {d} more times", .{remaining});
|
||||
}
|
||||
|
||||
has_dupes = true;
|
||||
@ -6005,16 +5954,16 @@ fn reportMissingLibraryError(
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
var err = try self.addErrorWithNotes(checked_paths.len);
|
||||
try err.addMsg(self, format, args);
|
||||
var err = try self.base.addErrorWithNotes(checked_paths.len);
|
||||
try err.addMsg(format, args);
|
||||
for (checked_paths) |path| {
|
||||
try err.addNote(self, "tried {s}", .{path});
|
||||
try err.addNote("tried {s}", .{path});
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reportUnsupportedCpuArch(self: *Elf) error{OutOfMemory}!void {
|
||||
var err = try self.addErrorWithNotes(0);
|
||||
try err.addMsg(self, "fatal linker error: unsupported CPU architecture {s}", .{
|
||||
var err = try self.base.addErrorWithNotes(0);
|
||||
try err.addMsg("fatal linker error: unsupported CPU architecture {s}", .{
|
||||
@tagName(self.getTarget().cpu.arch),
|
||||
});
|
||||
}
|
||||
@ -6025,9 +5974,9 @@ pub fn reportParseError(
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
var err = try self.addErrorWithNotes(1);
|
||||
try err.addMsg(self, format, args);
|
||||
try err.addNote(self, "while parsing {s}", .{path});
|
||||
var err = try self.base.addErrorWithNotes(1);
|
||||
try err.addMsg(format, args);
|
||||
try err.addNote("while parsing {s}", .{path});
|
||||
}
|
||||
|
||||
pub fn reportParseError2(
|
||||
@ -6036,9 +5985,9 @@ pub fn reportParseError2(
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
var err = try self.addErrorWithNotes(1);
|
||||
try err.addMsg(self, format, args);
|
||||
try err.addNote(self, "while parsing {}", .{self.file(file_index).?.fmtPath()});
|
||||
var err = try self.base.addErrorWithNotes(1);
|
||||
try err.addMsg(format, args);
|
||||
try err.addNote("while parsing {}", .{self.file(file_index).?.fmtPath()});
|
||||
}
|
||||
|
||||
const FormatShdrCtx = struct {
|
||||
|
||||
@ -631,15 +631,12 @@ fn dataType(symbol: *const Symbol, elf_file: *Elf) u2 {
|
||||
}
|
||||
|
||||
fn reportUnhandledRelocError(self: Atom, rel: elf.Elf64_Rela, elf_file: *Elf) RelocError!void {
|
||||
var err = try elf_file.addErrorWithNotes(1);
|
||||
try err.addMsg(elf_file, "fatal linker error: unhandled relocation type {} at offset 0x{x}", .{
|
||||
var err = try elf_file.base.addErrorWithNotes(1);
|
||||
try err.addMsg("fatal linker error: unhandled relocation type {} at offset 0x{x}", .{
|
||||
relocation.fmtRelocType(rel.r_type(), elf_file.getTarget().cpu.arch),
|
||||
rel.r_offset,
|
||||
});
|
||||
try err.addNote(elf_file, "in {}:{s}", .{
|
||||
self.file(elf_file).?.fmtPath(),
|
||||
self.name(elf_file),
|
||||
});
|
||||
try err.addNote("in {}:{s}", .{ self.file(elf_file).?.fmtPath(), self.name(elf_file) });
|
||||
return error.RelocFailure;
|
||||
}
|
||||
|
||||
@ -649,15 +646,12 @@ fn reportTextRelocError(
|
||||
rel: elf.Elf64_Rela,
|
||||
elf_file: *Elf,
|
||||
) RelocError!void {
|
||||
var err = try elf_file.addErrorWithNotes(1);
|
||||
try err.addMsg(elf_file, "relocation at offset 0x{x} against symbol '{s}' cannot be used", .{
|
||||
var err = try elf_file.base.addErrorWithNotes(1);
|
||||
try err.addMsg("relocation at offset 0x{x} against symbol '{s}' cannot be used", .{
|
||||
rel.r_offset,
|
||||
symbol.name(elf_file),
|
||||
});
|
||||
try err.addNote(elf_file, "in {}:{s}", .{
|
||||
self.file(elf_file).?.fmtPath(),
|
||||
self.name(elf_file),
|
||||
});
|
||||
try err.addNote("in {}:{s}", .{ self.file(elf_file).?.fmtPath(), self.name(elf_file) });
|
||||
return error.RelocFailure;
|
||||
}
|
||||
|
||||
@ -667,16 +661,13 @@ fn reportPicError(
|
||||
rel: elf.Elf64_Rela,
|
||||
elf_file: *Elf,
|
||||
) RelocError!void {
|
||||
var err = try elf_file.addErrorWithNotes(2);
|
||||
try err.addMsg(elf_file, "relocation at offset 0x{x} against symbol '{s}' cannot be used", .{
|
||||
var err = try elf_file.base.addErrorWithNotes(2);
|
||||
try err.addMsg("relocation at offset 0x{x} against symbol '{s}' cannot be used", .{
|
||||
rel.r_offset,
|
||||
symbol.name(elf_file),
|
||||
});
|
||||
try err.addNote(elf_file, "in {}:{s}", .{
|
||||
self.file(elf_file).?.fmtPath(),
|
||||
self.name(elf_file),
|
||||
});
|
||||
try err.addNote(elf_file, "recompile with -fPIC", .{});
|
||||
try err.addNote("in {}:{s}", .{ self.file(elf_file).?.fmtPath(), self.name(elf_file) });
|
||||
try err.addNote("recompile with -fPIC", .{});
|
||||
return error.RelocFailure;
|
||||
}
|
||||
|
||||
@ -686,16 +677,13 @@ fn reportNoPicError(
|
||||
rel: elf.Elf64_Rela,
|
||||
elf_file: *Elf,
|
||||
) RelocError!void {
|
||||
var err = try elf_file.addErrorWithNotes(2);
|
||||
try err.addMsg(elf_file, "relocation at offset 0x{x} against symbol '{s}' cannot be used", .{
|
||||
var err = try elf_file.base.addErrorWithNotes(2);
|
||||
try err.addMsg("relocation at offset 0x{x} against symbol '{s}' cannot be used", .{
|
||||
rel.r_offset,
|
||||
symbol.name(elf_file),
|
||||
});
|
||||
try err.addNote(elf_file, "in {}:{s}", .{
|
||||
self.file(elf_file).?.fmtPath(),
|
||||
self.name(elf_file),
|
||||
});
|
||||
try err.addNote(elf_file, "recompile with -fno-PIC", .{});
|
||||
try err.addNote("in {}:{s}", .{ self.file(elf_file).?.fmtPath(), self.name(elf_file) });
|
||||
try err.addNote("recompile with -fno-PIC", .{});
|
||||
return error.RelocFailure;
|
||||
}
|
||||
|
||||
@ -1332,9 +1320,9 @@ const x86_64 = struct {
|
||||
try cwriter.writeInt(i32, @as(i32, @intCast(S_ + A - P)), .little);
|
||||
} else {
|
||||
x86_64.relaxGotPcTlsDesc(code[r_offset - 3 ..]) catch {
|
||||
var err = try elf_file.addErrorWithNotes(1);
|
||||
try err.addMsg(elf_file, "could not relax {s}", .{@tagName(r_type)});
|
||||
try err.addNote(elf_file, "in {}:{s} at offset 0x{x}", .{
|
||||
var err = try elf_file.base.addErrorWithNotes(1);
|
||||
try err.addMsg("could not relax {s}", .{@tagName(r_type)});
|
||||
try err.addNote("in {}:{s} at offset 0x{x}", .{
|
||||
atom.file(elf_file).?.fmtPath(),
|
||||
atom.name(elf_file),
|
||||
rel.r_offset,
|
||||
@ -1479,12 +1467,12 @@ const x86_64 = struct {
|
||||
},
|
||||
|
||||
else => {
|
||||
var err = try elf_file.addErrorWithNotes(1);
|
||||
try err.addMsg(elf_file, "TODO: rewrite {} when followed by {}", .{
|
||||
var err = try elf_file.base.addErrorWithNotes(1);
|
||||
try err.addMsg("TODO: rewrite {} when followed by {}", .{
|
||||
relocation.fmtRelocType(rels[0].r_type(), .x86_64),
|
||||
relocation.fmtRelocType(rels[1].r_type(), .x86_64),
|
||||
});
|
||||
try err.addNote(elf_file, "in {}:{s} at offset 0x{x}", .{
|
||||
try err.addNote("in {}:{s} at offset 0x{x}", .{
|
||||
self.file(elf_file).?.fmtPath(),
|
||||
self.name(elf_file),
|
||||
rels[0].r_offset,
|
||||
@ -1534,12 +1522,12 @@ const x86_64 = struct {
|
||||
},
|
||||
|
||||
else => {
|
||||
var err = try elf_file.addErrorWithNotes(1);
|
||||
try err.addMsg(elf_file, "TODO: rewrite {} when followed by {}", .{
|
||||
var err = try elf_file.base.addErrorWithNotes(1);
|
||||
try err.addMsg("TODO: rewrite {} when followed by {}", .{
|
||||
relocation.fmtRelocType(rels[0].r_type(), .x86_64),
|
||||
relocation.fmtRelocType(rels[1].r_type(), .x86_64),
|
||||
});
|
||||
try err.addNote(elf_file, "in {}:{s} at offset 0x{x}", .{
|
||||
try err.addNote("in {}:{s} at offset 0x{x}", .{
|
||||
self.file(elf_file).?.fmtPath(),
|
||||
self.name(elf_file),
|
||||
rels[0].r_offset,
|
||||
@ -1630,12 +1618,12 @@ const x86_64 = struct {
|
||||
},
|
||||
|
||||
else => {
|
||||
var err = try elf_file.addErrorWithNotes(1);
|
||||
try err.addMsg(elf_file, "fatal linker error: rewrite {} when followed by {}", .{
|
||||
var err = try elf_file.base.addErrorWithNotes(1);
|
||||
try err.addMsg("fatal linker error: rewrite {} when followed by {}", .{
|
||||
relocation.fmtRelocType(rels[0].r_type(), .x86_64),
|
||||
relocation.fmtRelocType(rels[1].r_type(), .x86_64),
|
||||
});
|
||||
try err.addNote(elf_file, "in {}:{s} at offset 0x{x}", .{
|
||||
try err.addNote("in {}:{s} at offset 0x{x}", .{
|
||||
self.file(elf_file).?.fmtPath(),
|
||||
self.name(elf_file),
|
||||
rels[0].r_offset,
|
||||
@ -1824,9 +1812,9 @@ const aarch64 = struct {
|
||||
aarch64_util.writeAdrpInst(pages, code);
|
||||
} else {
|
||||
// TODO: relax
|
||||
var err = try elf_file.addErrorWithNotes(1);
|
||||
try err.addMsg(elf_file, "TODO: relax ADR_GOT_PAGE", .{});
|
||||
try err.addNote(elf_file, "in {}:{s} at offset 0x{x}", .{
|
||||
var err = try elf_file.base.addErrorWithNotes(1);
|
||||
try err.addMsg("TODO: relax ADR_GOT_PAGE", .{});
|
||||
try err.addNote("in {}:{s} at offset 0x{x}", .{
|
||||
atom.file(elf_file).?.fmtPath(),
|
||||
atom.name(elf_file),
|
||||
r_offset,
|
||||
@ -2118,9 +2106,9 @@ const riscv = struct {
|
||||
if (S == atom_addr + @as(i64, @intCast(pair.r_offset))) break pair;
|
||||
} else {
|
||||
// TODO: implement searching forward
|
||||
var err = try elf_file.addErrorWithNotes(1);
|
||||
try err.addMsg(elf_file, "TODO: find HI20 paired reloc scanning forward", .{});
|
||||
try err.addNote(elf_file, "in {}:{s} at offset 0x{x}", .{
|
||||
var err = try elf_file.base.addErrorWithNotes(1);
|
||||
try err.addMsg("TODO: find HI20 paired reloc scanning forward", .{});
|
||||
try err.addNote("in {}:{s} at offset 0x{x}", .{
|
||||
atom.file(elf_file).?.fmtPath(),
|
||||
atom.name(elf_file),
|
||||
rel.r_offset,
|
||||
|
||||
@ -704,9 +704,9 @@ pub fn initMergeSections(self: *Object, elf_file: *Elf) !void {
|
||||
var end = start;
|
||||
while (end < data.len - sh_entsize and !isNull(data[end .. end + sh_entsize])) : (end += sh_entsize) {}
|
||||
if (!isNull(data[end .. end + sh_entsize])) {
|
||||
var err = try elf_file.addErrorWithNotes(1);
|
||||
try err.addMsg(elf_file, "string not null terminated", .{});
|
||||
try err.addNote(elf_file, "in {}:{s}", .{ self.fmtPath(), atom_ptr.name(elf_file) });
|
||||
var err = try elf_file.base.addErrorWithNotes(1);
|
||||
try err.addMsg("string not null terminated", .{});
|
||||
try err.addNote("in {}:{s}", .{ self.fmtPath(), atom_ptr.name(elf_file) });
|
||||
return error.MalformedObject;
|
||||
}
|
||||
end += sh_entsize;
|
||||
@ -719,9 +719,9 @@ pub fn initMergeSections(self: *Object, elf_file: *Elf) !void {
|
||||
const sh_entsize: u32 = @intCast(shdr.sh_entsize);
|
||||
if (sh_entsize == 0) continue; // Malformed, don't split but don't error out
|
||||
if (shdr.sh_size % sh_entsize != 0) {
|
||||
var err = try elf_file.addErrorWithNotes(1);
|
||||
try err.addMsg(elf_file, "size not a multiple of sh_entsize", .{});
|
||||
try err.addNote(elf_file, "in {}:{s}", .{ self.fmtPath(), atom_ptr.name(elf_file) });
|
||||
var err = try elf_file.base.addErrorWithNotes(1);
|
||||
try err.addMsg("size not a multiple of sh_entsize", .{});
|
||||
try err.addNote("in {}:{s}", .{ self.fmtPath(), atom_ptr.name(elf_file) });
|
||||
return error.MalformedObject;
|
||||
}
|
||||
|
||||
@ -779,10 +779,10 @@ pub fn resolveMergeSubsections(self: *Object, elf_file: *Elf) !void {
|
||||
const imsec = elf_file.inputMergeSection(imsec_index) orelse continue;
|
||||
if (imsec.offsets.items.len == 0) continue;
|
||||
const msub_index, const offset = imsec.findSubsection(@intCast(esym.st_value)) orelse {
|
||||
var err = try elf_file.addErrorWithNotes(2);
|
||||
try err.addMsg(elf_file, "invalid symbol value: {x}", .{esym.st_value});
|
||||
try err.addNote(elf_file, "for symbol {s}", .{sym.name(elf_file)});
|
||||
try err.addNote(elf_file, "in {}", .{self.fmtPath()});
|
||||
var err = try elf_file.base.addErrorWithNotes(2);
|
||||
try err.addMsg("invalid symbol value: {x}", .{esym.st_value});
|
||||
try err.addNote("for symbol {s}", .{sym.name(elf_file)});
|
||||
try err.addNote("in {}", .{self.fmtPath()});
|
||||
return error.MalformedObject;
|
||||
};
|
||||
|
||||
@ -804,9 +804,9 @@ pub fn resolveMergeSubsections(self: *Object, elf_file: *Elf) !void {
|
||||
const imsec = elf_file.inputMergeSection(imsec_index) orelse continue;
|
||||
if (imsec.offsets.items.len == 0) continue;
|
||||
const msub_index, const offset = imsec.findSubsection(@intCast(@as(i64, @intCast(esym.st_value)) + rel.r_addend)) orelse {
|
||||
var err = try elf_file.addErrorWithNotes(1);
|
||||
try err.addMsg(elf_file, "invalid relocation at offset 0x{x}", .{rel.r_offset});
|
||||
try err.addNote(elf_file, "in {}:{s}", .{ self.fmtPath(), atom_ptr.name(elf_file) });
|
||||
var err = try elf_file.base.addErrorWithNotes(1);
|
||||
try err.addMsg("invalid relocation at offset 0x{x}", .{rel.r_offset});
|
||||
try err.addNote("in {}:{s}", .{ self.fmtPath(), atom_ptr.name(elf_file) });
|
||||
return error.MalformedObject;
|
||||
};
|
||||
const msub = elf_file.mergeSubsection(msub_index);
|
||||
|
||||
@ -591,12 +591,12 @@ const riscv = struct {
|
||||
};
|
||||
|
||||
fn reportInvalidReloc(rec: anytype, elf_file: *Elf, rel: elf.Elf64_Rela) !void {
|
||||
var err = try elf_file.addErrorWithNotes(1);
|
||||
try err.addMsg(elf_file, "invalid relocation type {} at offset 0x{x}", .{
|
||||
var err = try elf_file.base.addErrorWithNotes(1);
|
||||
try err.addMsg("invalid relocation type {} at offset 0x{x}", .{
|
||||
relocation.fmtRelocType(rel.r_type(), elf_file.getTarget().cpu.arch),
|
||||
rel.r_offset,
|
||||
});
|
||||
try err.addNote(elf_file, "in {}:.eh_frame", .{elf_file.file(rec.file_index).?.fmtPath()});
|
||||
try err.addNote("in {}:.eh_frame", .{elf_file.file(rec.file_index).?.fmtPath()});
|
||||
return error.RelocFailure;
|
||||
}
|
||||
|
||||
|
||||
@ -29,7 +29,7 @@ pub fn flushStaticLib(elf_file: *Elf, comp: *Compilation, module_obj_path: ?[]co
|
||||
};
|
||||
}
|
||||
|
||||
if (comp.link_errors.items.len > 0) return error.FlushFailure;
|
||||
if (elf_file.base.hasErrors()) return error.FlushFailure;
|
||||
|
||||
// First, we flush relocatable object file generated with our backends.
|
||||
if (elf_file.zigObjectPtr()) |zig_object| {
|
||||
@ -146,7 +146,7 @@ pub fn flushStaticLib(elf_file: *Elf, comp: *Compilation, module_obj_path: ?[]co
|
||||
try elf_file.base.file.?.setEndPos(total_size);
|
||||
try elf_file.base.file.?.pwriteAll(buffer.items, 0);
|
||||
|
||||
if (comp.link_errors.items.len > 0) return error.FlushFailure;
|
||||
if (elf_file.base.hasErrors()) return error.FlushFailure;
|
||||
}
|
||||
|
||||
pub fn flushObject(elf_file: *Elf, comp: *Compilation, module_obj_path: ?[]const u8) link.File.FlushError!void {
|
||||
@ -177,7 +177,7 @@ pub fn flushObject(elf_file: *Elf, comp: *Compilation, module_obj_path: ?[]const
|
||||
};
|
||||
}
|
||||
|
||||
if (comp.link_errors.items.len > 0) return error.FlushFailure;
|
||||
if (elf_file.base.hasErrors()) return error.FlushFailure;
|
||||
|
||||
// Now, we are ready to resolve the symbols across all input files.
|
||||
// We will first resolve the files in the ZigObject, next in the parsed
|
||||
@ -216,7 +216,7 @@ pub fn flushObject(elf_file: *Elf, comp: *Compilation, module_obj_path: ?[]const
|
||||
try elf_file.writeShdrTable();
|
||||
try elf_file.writeElfHeader();
|
||||
|
||||
if (comp.link_errors.items.len > 0) return error.FlushFailure;
|
||||
if (elf_file.base.hasErrors()) return error.FlushFailure;
|
||||
}
|
||||
|
||||
fn parsePositional(elf_file: *Elf, path: []const u8) Elf.ParseError!void {
|
||||
|
||||
@ -395,17 +395,11 @@ pub fn flushModule(self: *MachO, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
|
||||
}
|
||||
|
||||
for (positionals.items) |obj| {
|
||||
self.parsePositional(obj.path, obj.must_link) catch |err| switch (err) {
|
||||
error.MalformedObject,
|
||||
error.MalformedArchive,
|
||||
error.MalformedDylib,
|
||||
error.InvalidCpuArch,
|
||||
error.InvalidTarget,
|
||||
=> continue, // already reported
|
||||
error.UnknownFileType => try self.reportParseError(obj.path, "unknown file type for an object file", .{}),
|
||||
self.classifyInputFile(obj.path, .{ .path = obj.path }, obj.must_link) catch |err| switch (err) {
|
||||
error.UnknownFileType => try self.reportParseError(obj.path, "unknown file type for an input file", .{}),
|
||||
else => |e| try self.reportParseError(
|
||||
obj.path,
|
||||
"unexpected error: parsing input file failed with error {s}",
|
||||
"unexpected error: reading input file failed with error {s}",
|
||||
.{@errorName(e)},
|
||||
),
|
||||
};
|
||||
@ -448,15 +442,11 @@ pub fn flushModule(self: *MachO, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
|
||||
};
|
||||
|
||||
for (system_libs.items) |lib| {
|
||||
self.parseLibrary(lib, false) catch |err| switch (err) {
|
||||
error.MalformedArchive,
|
||||
error.MalformedDylib,
|
||||
error.InvalidCpuArch,
|
||||
=> continue, // already reported
|
||||
error.UnknownFileType => try self.reportParseError(lib.path, "unknown file type for a library", .{}),
|
||||
self.classifyInputFile(lib.path, lib, false) catch |err| switch (err) {
|
||||
error.UnknownFileType => try self.reportParseError(lib.path, "unknown file type for an input file", .{}),
|
||||
else => |e| try self.reportParseError(
|
||||
lib.path,
|
||||
"unexpected error: parsing library failed with error {s}",
|
||||
"unexpected error: parsing input file failed with error {s}",
|
||||
.{@errorName(e)},
|
||||
),
|
||||
};
|
||||
@ -469,13 +459,8 @@ pub fn flushModule(self: *MachO, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
|
||||
break :blk null;
|
||||
};
|
||||
if (compiler_rt_path) |path| {
|
||||
self.parsePositional(path, false) catch |err| switch (err) {
|
||||
error.MalformedObject,
|
||||
error.MalformedArchive,
|
||||
error.InvalidCpuArch,
|
||||
error.InvalidTarget,
|
||||
=> {}, // already reported
|
||||
error.UnknownFileType => try self.reportParseError(path, "unknown file type for a library", .{}),
|
||||
self.classifyInputFile(path, .{ .path = path }, false) catch |err| switch (err) {
|
||||
error.UnknownFileType => try self.reportParseError(path, "unknown file type for an input file", .{}),
|
||||
else => |e| try self.reportParseError(
|
||||
path,
|
||||
"unexpected error: parsing input file failed with error {s}",
|
||||
@ -484,30 +469,20 @@ pub fn flushModule(self: *MachO, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
|
||||
};
|
||||
}
|
||||
|
||||
if (comp.link_errors.items.len > 0) return error.FlushFailure;
|
||||
if (self.base.hasErrors()) return error.FlushFailure;
|
||||
|
||||
for (self.dylibs.items) |index| {
|
||||
self.getFile(index).?.dylib.umbrella = index;
|
||||
}
|
||||
try self.parseInputFiles();
|
||||
self.parseDependentDylibs() catch |err| {
|
||||
switch (err) {
|
||||
error.MissingLibraryDependencies => {},
|
||||
else => |e| try self.reportUnexpectedError(
|
||||
"unexpected error while parsing dependent libraries: {s}",
|
||||
.{@errorName(e)},
|
||||
),
|
||||
}
|
||||
};
|
||||
|
||||
if (self.dylibs.items.len > 0) {
|
||||
self.parseDependentDylibs() catch |err| {
|
||||
switch (err) {
|
||||
error.MissingLibraryDependencies => {},
|
||||
else => |e| try self.reportUnexpectedError(
|
||||
"unexpected error while parsing dependent libraries: {s}",
|
||||
.{@errorName(e)},
|
||||
),
|
||||
}
|
||||
return error.FlushFailure;
|
||||
};
|
||||
}
|
||||
|
||||
for (self.dylibs.items) |index| {
|
||||
const dylib = self.getFile(index).?.dylib;
|
||||
if (!dylib.explicit and !dylib.hoisted) continue;
|
||||
try dylib.initSymbols(self);
|
||||
}
|
||||
if (self.base.hasErrors()) return error.FlushFailure;
|
||||
|
||||
{
|
||||
const index = @as(File.Index, @intCast(try self.files.addOne(gpa)));
|
||||
@ -841,181 +816,173 @@ pub fn resolveLibSystem(
|
||||
});
|
||||
}
|
||||
|
||||
pub const ParseError = error{
|
||||
MalformedObject,
|
||||
MalformedArchive,
|
||||
MalformedDylib,
|
||||
MalformedTbd,
|
||||
NotLibStub,
|
||||
InvalidCpuArch,
|
||||
InvalidTarget,
|
||||
InvalidTargetFatLibrary,
|
||||
IncompatibleDylibVersion,
|
||||
OutOfMemory,
|
||||
Overflow,
|
||||
InputOutput,
|
||||
EndOfStream,
|
||||
FileSystem,
|
||||
NotSupported,
|
||||
Unhandled,
|
||||
UnknownFileType,
|
||||
} || fs.File.SeekError || fs.File.OpenError || fs.File.ReadError || tapi.TapiError;
|
||||
|
||||
pub fn parsePositional(self: *MachO, path: []const u8, must_link: bool) ParseError!void {
|
||||
pub fn classifyInputFile(self: *MachO, path: []const u8, lib: SystemLib, must_link: bool) !void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
if (try Object.isObject(path)) {
|
||||
try self.parseObject(path);
|
||||
} else {
|
||||
try self.parseLibrary(.{ .path = path }, must_link);
|
||||
|
||||
log.debug("classifying input file {s}", .{path});
|
||||
|
||||
const file = try std.fs.cwd().openFile(path, .{});
|
||||
const fh = try self.addFileHandle(file);
|
||||
var buffer: [Archive.SARMAG]u8 = undefined;
|
||||
|
||||
const fat_arch: ?fat.Arch = try self.parseFatFile(file, path);
|
||||
const offset = if (fat_arch) |fa| fa.offset else 0;
|
||||
|
||||
if (readMachHeader(file, offset) catch null) |h| blk: {
|
||||
if (h.magic != macho.MH_MAGIC_64) break :blk;
|
||||
switch (h.filetype) {
|
||||
macho.MH_OBJECT => try self.addObject(path, fh, offset),
|
||||
macho.MH_DYLIB => _ = try self.addDylib(lib, true, fh, offset),
|
||||
else => return error.UnknownFileType,
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
fn parseLibrary(self: *MachO, lib: SystemLib, must_link: bool) ParseError!void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
if (try fat.isFatLibrary(lib.path)) {
|
||||
const fat_arch = try self.parseFatLibrary(lib.path);
|
||||
if (try Archive.isArchive(lib.path, fat_arch)) {
|
||||
try self.parseArchive(lib, must_link, fat_arch);
|
||||
} else if (try Dylib.isDylib(lib.path, fat_arch)) {
|
||||
_ = try self.parseDylib(lib, true, fat_arch);
|
||||
} else return error.UnknownFileType;
|
||||
} else if (try Archive.isArchive(lib.path, null)) {
|
||||
try self.parseArchive(lib, must_link, null);
|
||||
} else if (try Dylib.isDylib(lib.path, null)) {
|
||||
_ = try self.parseDylib(lib, true, null);
|
||||
} else {
|
||||
_ = self.parseTbd(lib, true) catch |err| switch (err) {
|
||||
error.MalformedTbd => return error.UnknownFileType,
|
||||
else => |e| return e,
|
||||
};
|
||||
if (readArMagic(file, offset, &buffer) catch null) |ar_magic| blk: {
|
||||
if (!mem.eql(u8, ar_magic, Archive.ARMAG)) break :blk;
|
||||
try self.addArchive(lib, must_link, fh, fat_arch);
|
||||
return;
|
||||
}
|
||||
_ = try self.addTbd(lib, true, fh);
|
||||
}
|
||||
|
||||
fn parseObject(self: *MachO, path: []const u8) ParseError!void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const gpa = self.base.comp.gpa;
|
||||
const file = try fs.cwd().openFile(path, .{});
|
||||
const handle = try self.addFileHandle(file);
|
||||
const mtime: u64 = mtime: {
|
||||
const stat = file.stat() catch break :mtime 0;
|
||||
break :mtime @as(u64, @intCast(@divFloor(stat.mtime, 1_000_000_000)));
|
||||
};
|
||||
const index = @as(File.Index, @intCast(try self.files.addOne(gpa)));
|
||||
self.files.set(index, .{
|
||||
.object = .{
|
||||
.offset = 0, // TODO FAT objects
|
||||
.path = try gpa.dupe(u8, path),
|
||||
.file_handle = handle,
|
||||
.mtime = mtime,
|
||||
.index = index,
|
||||
},
|
||||
});
|
||||
try self.objects.append(gpa, index);
|
||||
|
||||
const object = self.getFile(index).?.object;
|
||||
try object.parse(self);
|
||||
}
|
||||
|
||||
pub fn parseFatLibrary(self: *MachO, path: []const u8) !fat.Arch {
|
||||
var buffer: [2]fat.Arch = undefined;
|
||||
const fat_archs = try fat.parseArchs(path, &buffer);
|
||||
fn parseFatFile(self: *MachO, file: std.fs.File, path: []const u8) !?fat.Arch {
|
||||
const fat_h = fat.readFatHeader(file) catch return null;
|
||||
if (fat_h.magic != macho.FAT_MAGIC and fat_h.magic != macho.FAT_MAGIC_64) return null;
|
||||
var fat_archs_buffer: [2]fat.Arch = undefined;
|
||||
const fat_archs = try fat.parseArchs(file, fat_h, &fat_archs_buffer);
|
||||
const cpu_arch = self.getTarget().cpu.arch;
|
||||
for (fat_archs) |arch| {
|
||||
if (arch.tag == cpu_arch) return arch;
|
||||
}
|
||||
try self.reportParseError(path, "missing arch in universal file: expected {s}", .{@tagName(cpu_arch)});
|
||||
return error.InvalidCpuArch;
|
||||
try self.reportParseError(path, "missing arch in universal file: expected {s}", .{
|
||||
@tagName(cpu_arch),
|
||||
});
|
||||
return error.MissingCpuArch;
|
||||
}
|
||||
|
||||
fn parseArchive(self: *MachO, lib: SystemLib, must_link: bool, fat_arch: ?fat.Arch) ParseError!void {
|
||||
pub fn readMachHeader(file: std.fs.File, offset: usize) !macho.mach_header_64 {
|
||||
var buffer: [@sizeOf(macho.mach_header_64)]u8 = undefined;
|
||||
const nread = try file.preadAll(&buffer, offset);
|
||||
if (nread != buffer.len) return error.InputOutput;
|
||||
const hdr = @as(*align(1) const macho.mach_header_64, @ptrCast(&buffer)).*;
|
||||
return hdr;
|
||||
}
|
||||
|
||||
pub fn readArMagic(file: std.fs.File, offset: usize, buffer: *[Archive.SARMAG]u8) ![]const u8 {
|
||||
const nread = try file.preadAll(buffer, offset);
|
||||
if (nread != buffer.len) return error.InputOutput;
|
||||
return buffer[0..Archive.SARMAG];
|
||||
}
|
||||
|
||||
fn addObject(self: *MachO, path: []const u8, handle: File.HandleIndex, offset: u64) !void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const gpa = self.base.comp.gpa;
|
||||
const mtime: u64 = mtime: {
|
||||
const file = self.getFileHandle(handle);
|
||||
const stat = file.stat() catch break :mtime 0;
|
||||
break :mtime @as(u64, @intCast(@divFloor(stat.mtime, 1_000_000_000)));
|
||||
};
|
||||
const index = @as(File.Index, @intCast(try self.files.addOne(gpa)));
|
||||
self.files.set(index, .{ .object = .{
|
||||
.offset = offset,
|
||||
.path = try gpa.dupe(u8, path),
|
||||
.file_handle = handle,
|
||||
.mtime = mtime,
|
||||
.index = index,
|
||||
} });
|
||||
try self.objects.append(gpa, index);
|
||||
}
|
||||
|
||||
pub fn parseInputFiles(self: *MachO) !void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
for (self.objects.items) |index| {
|
||||
self.getFile(index).?.parse(self) catch |err| switch (err) {
|
||||
error.MalformedObject,
|
||||
error.InvalidCpuArch,
|
||||
error.InvalidTarget,
|
||||
=> {}, // already reported
|
||||
else => |e| try self.reportParseError2(index, "unexpected error: parsing input file failed with error {s}", .{@errorName(e)}),
|
||||
};
|
||||
}
|
||||
for (self.dylibs.items) |index| {
|
||||
self.getFile(index).?.parse(self) catch |err| switch (err) {
|
||||
error.MalformedDylib,
|
||||
error.InvalidCpuArch,
|
||||
error.InvalidTarget,
|
||||
=> {}, // already reported
|
||||
else => |e| try self.reportParseError2(index, "unexpected error: parsing input file failed with error {s}", .{@errorName(e)}),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn addArchive(self: *MachO, lib: SystemLib, must_link: bool, handle: File.HandleIndex, fat_arch: ?fat.Arch) !void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
const file = try fs.cwd().openFile(lib.path, .{});
|
||||
const handle = try self.addFileHandle(file);
|
||||
|
||||
var archive = Archive{};
|
||||
defer archive.deinit(gpa);
|
||||
try archive.parse(self, lib.path, handle, fat_arch);
|
||||
try archive.unpack(self, lib.path, handle, fat_arch);
|
||||
|
||||
var has_parse_error = false;
|
||||
for (archive.objects.items) |extracted| {
|
||||
const index = @as(File.Index, @intCast(try self.files.addOne(gpa)));
|
||||
self.files.set(index, .{ .object = extracted });
|
||||
for (archive.objects.items) |unpacked| {
|
||||
const index: File.Index = @intCast(try self.files.addOne(gpa));
|
||||
self.files.set(index, .{ .object = unpacked });
|
||||
const object = &self.files.items(.data)[index].object;
|
||||
object.index = index;
|
||||
object.alive = must_link or lib.needed; // TODO: or self.options.all_load;
|
||||
object.hidden = lib.hidden;
|
||||
object.parse(self) catch |err| switch (err) {
|
||||
error.MalformedObject,
|
||||
error.InvalidCpuArch,
|
||||
error.InvalidTarget,
|
||||
=> has_parse_error = true,
|
||||
else => |e| return e,
|
||||
};
|
||||
try self.objects.append(gpa, index);
|
||||
|
||||
// Finally, we do a post-parse check for -ObjC to see if we need to force load this member
|
||||
// anyhow.
|
||||
object.alive = object.alive or (self.force_load_objc and object.hasObjc());
|
||||
}
|
||||
if (has_parse_error) return error.MalformedArchive;
|
||||
}
|
||||
|
||||
fn parseDylib(self: *MachO, lib: SystemLib, explicit: bool, fat_arch: ?fat.Arch) ParseError!File.Index {
|
||||
fn addDylib(self: *MachO, lib: SystemLib, explicit: bool, handle: File.HandleIndex, offset: u64) !File.Index {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
const file = try fs.cwd().openFile(lib.path, .{});
|
||||
defer file.close();
|
||||
|
||||
const index = @as(File.Index, @intCast(try self.files.addOne(gpa)));
|
||||
const index: File.Index = @intCast(try self.files.addOne(gpa));
|
||||
self.files.set(index, .{ .dylib = .{
|
||||
.offset = offset,
|
||||
.file_handle = handle,
|
||||
.tag = .dylib,
|
||||
.path = try gpa.dupe(u8, lib.path),
|
||||
.index = index,
|
||||
.needed = lib.needed,
|
||||
.weak = lib.weak,
|
||||
.reexport = lib.reexport,
|
||||
.explicit = explicit,
|
||||
.umbrella = index,
|
||||
} });
|
||||
const dylib = &self.files.items(.data)[index].dylib;
|
||||
try dylib.parse(self, file, fat_arch);
|
||||
|
||||
try self.dylibs.append(gpa, index);
|
||||
|
||||
return index;
|
||||
}
|
||||
|
||||
fn parseTbd(self: *MachO, lib: SystemLib, explicit: bool) ParseError!File.Index {
|
||||
fn addTbd(self: *MachO, lib: SystemLib, explicit: bool, handle: File.HandleIndex) !File.Index {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const gpa = self.base.comp.gpa;
|
||||
const file = try fs.cwd().openFile(lib.path, .{});
|
||||
defer file.close();
|
||||
|
||||
var lib_stub = LibStub.loadFromFile(gpa, file) catch return error.MalformedTbd; // TODO actually handle different errors
|
||||
defer lib_stub.deinit();
|
||||
|
||||
const index = @as(File.Index, @intCast(try self.files.addOne(gpa)));
|
||||
const index: File.Index = @intCast(try self.files.addOne(gpa));
|
||||
self.files.set(index, .{ .dylib = .{
|
||||
.offset = 0,
|
||||
.file_handle = handle,
|
||||
.tag = .tbd,
|
||||
.path = try gpa.dupe(u8, lib.path),
|
||||
.index = index,
|
||||
.needed = lib.needed,
|
||||
.weak = lib.weak,
|
||||
.reexport = lib.reexport,
|
||||
.explicit = explicit,
|
||||
.umbrella = index,
|
||||
} });
|
||||
const dylib = &self.files.items(.data)[index].dylib;
|
||||
try dylib.parseTbd(self.getTarget().cpu.arch, self.platform, lib_stub, self);
|
||||
try self.dylibs.append(gpa, index);
|
||||
|
||||
return index;
|
||||
@ -1092,6 +1059,8 @@ fn parseDependentDylibs(self: *MachO) !void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
if (self.dylibs.items.len == 0) return;
|
||||
|
||||
const gpa = self.base.comp.gpa;
|
||||
const lib_dirs = self.lib_dirs;
|
||||
const framework_dirs = self.framework_dirs;
|
||||
@ -1108,7 +1077,7 @@ fn parseDependentDylibs(self: *MachO) !void {
|
||||
while (index < self.dylibs.items.len) : (index += 1) {
|
||||
const dylib_index = self.dylibs.items[index];
|
||||
|
||||
var dependents = std.ArrayList(struct { id: Dylib.Id, file: File.Index }).init(gpa);
|
||||
var dependents = std.ArrayList(File.Index).init(gpa);
|
||||
defer dependents.deinit();
|
||||
try dependents.ensureTotalCapacityPrecise(self.getFile(dylib_index).?.dylib.dependents.items.len);
|
||||
|
||||
@ -1199,38 +1168,34 @@ fn parseDependentDylibs(self: *MachO) !void {
|
||||
.path = full_path,
|
||||
.weak = is_weak,
|
||||
};
|
||||
const file = try std.fs.cwd().openFile(lib.path, .{});
|
||||
const fh = try self.addFileHandle(file);
|
||||
const fat_arch = try self.parseFatFile(file, lib.path);
|
||||
const offset = if (fat_arch) |fa| fa.offset else 0;
|
||||
const file_index = file_index: {
|
||||
if (try fat.isFatLibrary(lib.path)) {
|
||||
const fat_arch = try self.parseFatLibrary(lib.path);
|
||||
if (try Dylib.isDylib(lib.path, fat_arch)) {
|
||||
break :file_index try self.parseDylib(lib, false, fat_arch);
|
||||
} else break :file_index @as(File.Index, 0);
|
||||
} else if (try Dylib.isDylib(lib.path, null)) {
|
||||
break :file_index try self.parseDylib(lib, false, null);
|
||||
} else {
|
||||
const file_index = self.parseTbd(lib, false) catch |err| switch (err) {
|
||||
error.MalformedTbd => @as(File.Index, 0),
|
||||
else => |e| return e,
|
||||
};
|
||||
break :file_index file_index;
|
||||
if (readMachHeader(file, offset) catch null) |h| blk: {
|
||||
if (h.magic != macho.MH_MAGIC_64) break :blk;
|
||||
switch (h.filetype) {
|
||||
macho.MH_DYLIB => break :file_index try self.addDylib(lib, false, fh, offset),
|
||||
else => break :file_index @as(File.Index, 0),
|
||||
}
|
||||
}
|
||||
break :file_index try self.addTbd(lib, false, fh);
|
||||
};
|
||||
dependents.appendAssumeCapacity(.{ .id = id, .file = file_index });
|
||||
dependents.appendAssumeCapacity(file_index);
|
||||
}
|
||||
|
||||
const dylib = self.getFile(dylib_index).?.dylib;
|
||||
for (dependents.items) |entry| {
|
||||
const id = entry.id;
|
||||
const file_index = entry.file;
|
||||
for (dylib.dependents.items, dependents.items) |id, file_index| {
|
||||
if (self.getFile(file_index)) |file| {
|
||||
const dep_dylib = file.dylib;
|
||||
try dep_dylib.parse(self); // TODO in parallel
|
||||
dep_dylib.hoisted = self.isHoisted(id.name);
|
||||
if (self.getFile(dep_dylib.umbrella) == null) {
|
||||
dep_dylib.umbrella = dylib.umbrella;
|
||||
}
|
||||
dep_dylib.umbrella = dylib.umbrella;
|
||||
if (!dep_dylib.hoisted) {
|
||||
const umbrella = dep_dylib.getUmbrella(self);
|
||||
for (dep_dylib.exports.items(.name), dep_dylib.exports.items(.flags)) |off, flags| {
|
||||
// TODO rethink this entire algorithm
|
||||
try umbrella.addExport(gpa, dep_dylib.getString(off), flags);
|
||||
}
|
||||
try umbrella.rpaths.ensureUnusedCapacity(gpa, dep_dylib.rpaths.keys().len);
|
||||
@ -1238,15 +1203,13 @@ fn parseDependentDylibs(self: *MachO) !void {
|
||||
umbrella.rpaths.putAssumeCapacity(try gpa.dupe(u8, rpath), {});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
try self.reportDependencyError(
|
||||
dylib.getUmbrella(self).index,
|
||||
id.name,
|
||||
"unable to resolve dependency",
|
||||
.{},
|
||||
);
|
||||
has_errors = true;
|
||||
}
|
||||
} else try self.reportDependencyError(
|
||||
dylib.getUmbrella(self).index,
|
||||
id.name,
|
||||
"unable to resolve dependency",
|
||||
.{},
|
||||
);
|
||||
has_errors = true;
|
||||
}
|
||||
}
|
||||
|
||||
@ -1533,8 +1496,8 @@ fn reportUndefs(self: *MachO) !void {
|
||||
const notes = entry.value_ptr.*;
|
||||
const nnotes = @min(notes.items.len, max_notes) + @intFromBool(notes.items.len > max_notes);
|
||||
|
||||
var err = try self.addErrorWithNotes(nnotes);
|
||||
try err.addMsg(self, "undefined symbol: {s}", .{undef_sym.getName(self)});
|
||||
var err = try self.base.addErrorWithNotes(nnotes);
|
||||
try err.addMsg("undefined symbol: {s}", .{undef_sym.getName(self)});
|
||||
has_undefs = true;
|
||||
|
||||
var inote: usize = 0;
|
||||
@ -1542,12 +1505,12 @@ fn reportUndefs(self: *MachO) !void {
|
||||
const note = notes.items[inote];
|
||||
const file = self.getFile(note.file).?;
|
||||
const atom = note.getAtom(self).?;
|
||||
try err.addNote(self, "referenced by {}:{s}", .{ file.fmtPath(), atom.getName(self) });
|
||||
try err.addNote("referenced by {}:{s}", .{ file.fmtPath(), atom.getName(self) });
|
||||
}
|
||||
|
||||
if (notes.items.len > max_notes) {
|
||||
const remaining = notes.items.len - max_notes;
|
||||
try err.addNote(self, "referenced {d} more times", .{remaining});
|
||||
try err.addNote("referenced {d} more times", .{remaining});
|
||||
}
|
||||
}
|
||||
if (has_undefs) return error.HasUndefinedSymbols;
|
||||
@ -3323,13 +3286,13 @@ fn growSectionNonRelocatable(self: *MachO, sect_index: u8, needed_size: u64) !vo
|
||||
|
||||
const mem_capacity = self.allocatedSizeVirtual(seg.vmaddr);
|
||||
if (needed_size > mem_capacity) {
|
||||
var err = try self.addErrorWithNotes(2);
|
||||
try err.addMsg(self, "fatal linker error: cannot expand segment seg({d})({s}) in virtual memory", .{
|
||||
var err = try self.base.addErrorWithNotes(2);
|
||||
try err.addMsg("fatal linker error: cannot expand segment seg({d})({s}) in virtual memory", .{
|
||||
seg_id,
|
||||
seg.segName(),
|
||||
});
|
||||
try err.addNote(self, "TODO: emit relocations to memory locations in self-hosted backends", .{});
|
||||
try err.addNote(self, "as a workaround, try increasing pre-allocated virtual memory of each segment", .{});
|
||||
try err.addNote("TODO: emit relocations to memory locations in self-hosted backends", .{});
|
||||
try err.addNote("as a workaround, try increasing pre-allocated virtual memory of each segment", .{});
|
||||
}
|
||||
|
||||
seg.vmsize = needed_size;
|
||||
@ -3618,65 +3581,15 @@ pub fn eatPrefix(path: []const u8, prefix: []const u8) ?[]const u8 {
|
||||
return null;
|
||||
}
|
||||
|
||||
const ErrorWithNotes = struct {
|
||||
/// Allocated index in comp.link_errors array.
|
||||
index: usize,
|
||||
|
||||
/// Next available note slot.
|
||||
note_slot: usize = 0,
|
||||
|
||||
pub fn addMsg(
|
||||
err: ErrorWithNotes,
|
||||
macho_file: *MachO,
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
const comp = macho_file.base.comp;
|
||||
const gpa = comp.gpa;
|
||||
const err_msg = &comp.link_errors.items[err.index];
|
||||
err_msg.msg = try std.fmt.allocPrint(gpa, format, args);
|
||||
}
|
||||
|
||||
pub fn addNote(
|
||||
err: *ErrorWithNotes,
|
||||
macho_file: *MachO,
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
const comp = macho_file.base.comp;
|
||||
const gpa = comp.gpa;
|
||||
const err_msg = &comp.link_errors.items[err.index];
|
||||
assert(err.note_slot < err_msg.notes.len);
|
||||
err_msg.notes[err.note_slot] = .{ .msg = try std.fmt.allocPrint(gpa, format, args) };
|
||||
err.note_slot += 1;
|
||||
}
|
||||
};
|
||||
|
||||
pub fn addErrorWithNotes(self: *MachO, note_count: usize) error{OutOfMemory}!ErrorWithNotes {
|
||||
const comp = self.base.comp;
|
||||
const gpa = comp.gpa;
|
||||
try comp.link_errors.ensureUnusedCapacity(gpa, 1);
|
||||
return self.addErrorWithNotesAssumeCapacity(note_count);
|
||||
}
|
||||
|
||||
fn addErrorWithNotesAssumeCapacity(self: *MachO, note_count: usize) error{OutOfMemory}!ErrorWithNotes {
|
||||
const comp = self.base.comp;
|
||||
const gpa = comp.gpa;
|
||||
const index = comp.link_errors.items.len;
|
||||
const err = comp.link_errors.addOneAssumeCapacity();
|
||||
err.* = .{ .msg = undefined, .notes = try gpa.alloc(link.File.ErrorMsg, note_count) };
|
||||
return .{ .index = index };
|
||||
}
|
||||
|
||||
pub fn reportParseError(
|
||||
self: *MachO,
|
||||
path: []const u8,
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
var err = try self.addErrorWithNotes(1);
|
||||
try err.addMsg(self, format, args);
|
||||
try err.addNote(self, "while parsing {s}", .{path});
|
||||
var err = try self.base.addErrorWithNotes(1);
|
||||
try err.addMsg(format, args);
|
||||
try err.addNote("while parsing {s}", .{path});
|
||||
}
|
||||
|
||||
pub fn reportParseError2(
|
||||
@ -3685,9 +3598,9 @@ pub fn reportParseError2(
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
var err = try self.addErrorWithNotes(1);
|
||||
try err.addMsg(self, format, args);
|
||||
try err.addNote(self, "while parsing {}", .{self.getFile(file_index).?.fmtPath()});
|
||||
var err = try self.base.addErrorWithNotes(1);
|
||||
try err.addMsg(format, args);
|
||||
try err.addNote("while parsing {}", .{self.getFile(file_index).?.fmtPath()});
|
||||
}
|
||||
|
||||
fn reportMissingLibraryError(
|
||||
@ -3696,10 +3609,10 @@ fn reportMissingLibraryError(
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
var err = try self.addErrorWithNotes(checked_paths.len);
|
||||
try err.addMsg(self, format, args);
|
||||
var err = try self.base.addErrorWithNotes(checked_paths.len);
|
||||
try err.addMsg(format, args);
|
||||
for (checked_paths) |path| {
|
||||
try err.addNote(self, "tried {s}", .{path});
|
||||
try err.addNote("tried {s}", .{path});
|
||||
}
|
||||
}
|
||||
|
||||
@ -3711,12 +3624,12 @@ fn reportMissingDependencyError(
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
var err = try self.addErrorWithNotes(2 + checked_paths.len);
|
||||
try err.addMsg(self, format, args);
|
||||
try err.addNote(self, "while resolving {s}", .{path});
|
||||
try err.addNote(self, "a dependency of {}", .{self.getFile(parent).?.fmtPath()});
|
||||
var err = try self.base.addErrorWithNotes(2 + checked_paths.len);
|
||||
try err.addMsg(format, args);
|
||||
try err.addNote("while resolving {s}", .{path});
|
||||
try err.addNote("a dependency of {}", .{self.getFile(parent).?.fmtPath()});
|
||||
for (checked_paths) |p| {
|
||||
try err.addNote(self, "tried {s}", .{p});
|
||||
try err.addNote("tried {s}", .{p});
|
||||
}
|
||||
}
|
||||
|
||||
@ -3727,16 +3640,16 @@ fn reportDependencyError(
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
var err = try self.addErrorWithNotes(2);
|
||||
try err.addMsg(self, format, args);
|
||||
try err.addNote(self, "while parsing {s}", .{path});
|
||||
try err.addNote(self, "a dependency of {}", .{self.getFile(parent).?.fmtPath()});
|
||||
var err = try self.base.addErrorWithNotes(2);
|
||||
try err.addMsg(format, args);
|
||||
try err.addNote("while parsing {s}", .{path});
|
||||
try err.addNote("a dependency of {}", .{self.getFile(parent).?.fmtPath()});
|
||||
}
|
||||
|
||||
pub fn reportUnexpectedError(self: *MachO, comptime format: []const u8, args: anytype) error{OutOfMemory}!void {
|
||||
var err = try self.addErrorWithNotes(1);
|
||||
try err.addMsg(self, format, args);
|
||||
try err.addNote(self, "please report this as a linker bug on https://github.com/ziglang/zig/issues/new/choose", .{});
|
||||
var err = try self.base.addErrorWithNotes(1);
|
||||
try err.addMsg(format, args);
|
||||
try err.addNote("please report this as a linker bug on https://github.com/ziglang/zig/issues/new/choose", .{});
|
||||
}
|
||||
|
||||
fn reportDuplicates(self: *MachO) error{ HasDuplicates, OutOfMemory }!void {
|
||||
@ -3752,20 +3665,20 @@ fn reportDuplicates(self: *MachO) error{ HasDuplicates, OutOfMemory }!void {
|
||||
const notes = entry.value_ptr.*;
|
||||
const nnotes = @min(notes.items.len, max_notes) + @intFromBool(notes.items.len > max_notes);
|
||||
|
||||
var err = try self.addErrorWithNotes(nnotes + 1);
|
||||
try err.addMsg(self, "duplicate symbol definition: {s}", .{sym.getName(self)});
|
||||
try err.addNote(self, "defined by {}", .{sym.getFile(self).?.fmtPath()});
|
||||
var err = try self.base.addErrorWithNotes(nnotes + 1);
|
||||
try err.addMsg("duplicate symbol definition: {s}", .{sym.getName(self)});
|
||||
try err.addNote("defined by {}", .{sym.getFile(self).?.fmtPath()});
|
||||
has_dupes = true;
|
||||
|
||||
var inote: usize = 0;
|
||||
while (inote < @min(notes.items.len, max_notes)) : (inote += 1) {
|
||||
const file = self.getFile(notes.items[inote]).?;
|
||||
try err.addNote(self, "defined by {}", .{file.fmtPath()});
|
||||
try err.addNote("defined by {}", .{file.fmtPath()});
|
||||
}
|
||||
|
||||
if (notes.items.len > max_notes) {
|
||||
const remaining = notes.items.len - max_notes;
|
||||
try err.addNote(self, "defined {d} more times", .{remaining});
|
||||
try err.addNote("defined {d} more times", .{remaining});
|
||||
}
|
||||
}
|
||||
if (has_dupes) return error.HasDuplicates;
|
||||
|
||||
@ -1,21 +1,10 @@
|
||||
objects: std.ArrayListUnmanaged(Object) = .{},
|
||||
|
||||
pub fn isArchive(path: []const u8, fat_arch: ?fat.Arch) !bool {
|
||||
const file = try std.fs.cwd().openFile(path, .{});
|
||||
defer file.close();
|
||||
if (fat_arch) |arch| {
|
||||
try file.seekTo(arch.offset);
|
||||
}
|
||||
const magic = file.reader().readBytesNoEof(SARMAG) catch return false;
|
||||
if (!mem.eql(u8, &magic, ARMAG)) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Archive, allocator: Allocator) void {
|
||||
self.objects.deinit(allocator);
|
||||
}
|
||||
|
||||
pub fn parse(self: *Archive, macho_file: *MachO, path: []const u8, handle_index: File.HandleIndex, fat_arch: ?fat.Arch) !void {
|
||||
pub fn unpack(self: *Archive, macho_file: *MachO, path: []const u8, handle_index: File.HandleIndex, fat_arch: ?fat.Arch) !void {
|
||||
const gpa = macho_file.base.comp.gpa;
|
||||
|
||||
var arena = std.heap.ArenaAllocator.init(gpa);
|
||||
|
||||
@ -906,15 +906,15 @@ const x86_64 = struct {
|
||||
encode(&.{inst}, code) catch return error.RelaxFail;
|
||||
},
|
||||
else => |x| {
|
||||
var err = try macho_file.addErrorWithNotes(2);
|
||||
try err.addMsg(macho_file, "{s}: 0x{x}: 0x{x}: failed to relax relocation of type {}", .{
|
||||
var err = try macho_file.base.addErrorWithNotes(2);
|
||||
try err.addMsg("{s}: 0x{x}: 0x{x}: failed to relax relocation of type {}", .{
|
||||
self.getName(macho_file),
|
||||
self.getAddress(macho_file),
|
||||
rel.offset,
|
||||
rel.fmtPretty(.x86_64),
|
||||
});
|
||||
try err.addNote(macho_file, "expected .mov instruction but found .{s}", .{@tagName(x)});
|
||||
try err.addNote(macho_file, "while parsing {}", .{self.getFile(macho_file).fmtPath()});
|
||||
try err.addNote("expected .mov instruction but found .{s}", .{@tagName(x)});
|
||||
try err.addNote("while parsing {}", .{self.getFile(macho_file).fmtPath()});
|
||||
return error.RelaxFailUnexpectedInstruction;
|
||||
},
|
||||
}
|
||||
|
||||
@ -1,5 +1,9 @@
|
||||
/// Non-zero for fat dylibs
|
||||
offset: u64,
|
||||
path: []const u8,
|
||||
index: File.Index,
|
||||
file_handle: File.HandleIndex,
|
||||
tag: enum { dylib, tbd },
|
||||
|
||||
exports: std.MultiArrayList(Export) = .{},
|
||||
strtab: std.ArrayListUnmanaged(u8) = .{},
|
||||
@ -11,7 +15,7 @@ symbols_extra: std.ArrayListUnmanaged(u32) = .{},
|
||||
globals: std.ArrayListUnmanaged(MachO.SymbolResolver.Index) = .{},
|
||||
dependents: std.ArrayListUnmanaged(Id) = .{},
|
||||
rpaths: std.StringArrayHashMapUnmanaged(void) = .{},
|
||||
umbrella: File.Index = 0,
|
||||
umbrella: File.Index,
|
||||
platform: ?MachO.Platform = null,
|
||||
|
||||
needed: bool,
|
||||
@ -23,16 +27,6 @@ referenced: bool = false,
|
||||
|
||||
output_symtab_ctx: MachO.SymtabCtx = .{},
|
||||
|
||||
pub fn isDylib(path: []const u8, fat_arch: ?fat.Arch) !bool {
|
||||
const file = try std.fs.cwd().openFile(path, .{});
|
||||
defer file.close();
|
||||
if (fat_arch) |arch| {
|
||||
try file.seekTo(arch.offset);
|
||||
}
|
||||
const header = file.reader().readStruct(macho.mach_header_64) catch return false;
|
||||
return header.filetype == macho.MH_DYLIB;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Dylib, allocator: Allocator) void {
|
||||
allocator.free(self.path);
|
||||
self.exports.deinit(allocator);
|
||||
@ -51,12 +45,21 @@ pub fn deinit(self: *Dylib, allocator: Allocator) void {
|
||||
self.rpaths.deinit(allocator);
|
||||
}
|
||||
|
||||
pub fn parse(self: *Dylib, macho_file: *MachO, file: std.fs.File, fat_arch: ?fat.Arch) !void {
|
||||
pub fn parse(self: *Dylib, macho_file: *MachO) !void {
|
||||
switch (self.tag) {
|
||||
.tbd => try self.parseTbd(macho_file),
|
||||
.dylib => try self.parseBinary(macho_file),
|
||||
}
|
||||
try self.initSymbols(macho_file);
|
||||
}
|
||||
|
||||
fn parseBinary(self: *Dylib, macho_file: *MachO) !void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const gpa = macho_file.base.comp.gpa;
|
||||
const offset = if (fat_arch) |ar| ar.offset else 0;
|
||||
const file = macho_file.getFileHandle(self.file_handle);
|
||||
const offset = self.offset;
|
||||
|
||||
log.debug("parsing dylib from binary: {s}", .{self.path});
|
||||
|
||||
@ -258,13 +261,7 @@ fn parseTrie(self: *Dylib, data: []const u8, macho_file: *MachO) !void {
|
||||
try self.parseTrieNode(&it, gpa, arena.allocator(), "");
|
||||
}
|
||||
|
||||
pub fn parseTbd(
|
||||
self: *Dylib,
|
||||
cpu_arch: std.Target.Cpu.Arch,
|
||||
platform: MachO.Platform,
|
||||
lib_stub: LibStub,
|
||||
macho_file: *MachO,
|
||||
) !void {
|
||||
fn parseTbd(self: *Dylib, macho_file: *MachO) !void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
@ -272,6 +269,9 @@ pub fn parseTbd(
|
||||
|
||||
log.debug("parsing dylib from stub: {s}", .{self.path});
|
||||
|
||||
const file = macho_file.getFileHandle(self.file_handle);
|
||||
var lib_stub = LibStub.loadFromFile(gpa, file) catch return error.NotLibStub;
|
||||
defer lib_stub.deinit();
|
||||
const umbrella_lib = lib_stub.inner[0];
|
||||
|
||||
{
|
||||
@ -290,7 +290,8 @@ pub fn parseTbd(
|
||||
|
||||
log.debug(" (install_name '{s}')", .{umbrella_lib.installName()});
|
||||
|
||||
self.platform = platform;
|
||||
const cpu_arch = macho_file.getTarget().cpu.arch;
|
||||
self.platform = macho_file.platform;
|
||||
|
||||
var matcher = try TargetMatcher.init(gpa, cpu_arch, self.platform.?.toApplePlatform());
|
||||
defer matcher.deinit();
|
||||
@ -495,7 +496,7 @@ fn addObjCExport(
|
||||
try self.addExport(allocator, full_name, .{});
|
||||
}
|
||||
|
||||
pub fn initSymbols(self: *Dylib, macho_file: *MachO) !void {
|
||||
fn initSymbols(self: *Dylib, macho_file: *MachO) !void {
|
||||
const gpa = macho_file.base.comp.gpa;
|
||||
|
||||
const nsyms = self.exports.items(.name).len;
|
||||
|
||||
@ -38,13 +38,6 @@ compact_unwind_ctx: CompactUnwindCtx = .{},
|
||||
output_symtab_ctx: MachO.SymtabCtx = .{},
|
||||
output_ar_state: Archive.ArState = .{},
|
||||
|
||||
pub fn isObject(path: []const u8) !bool {
|
||||
const file = try std.fs.cwd().openFile(path, .{});
|
||||
defer file.close();
|
||||
const header = file.reader().readStruct(macho.mach_header_64) catch return false;
|
||||
return header.filetype == macho.MH_OBJECT;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Object, allocator: Allocator) void {
|
||||
if (self.in_archive) |*ar| allocator.free(ar.path);
|
||||
allocator.free(self.path);
|
||||
@ -273,6 +266,9 @@ pub fn parse(self: *Object, macho_file: *MachO) !void {
|
||||
atom.flags.alive = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Finally, we do a post-parse check for -ObjC to see if we need to force load this member anyhow.
|
||||
self.alive = self.alive or (macho_file.force_load_objc and self.hasObjC());
|
||||
}
|
||||
|
||||
pub fn isCstringLiteral(sect: macho.section_64) bool {
|
||||
@ -2325,7 +2321,7 @@ fn hasSymbolStabs(self: Object) bool {
|
||||
return self.stab_files.items.len > 0;
|
||||
}
|
||||
|
||||
pub fn hasObjc(self: Object) bool {
|
||||
fn hasObjC(self: Object) bool {
|
||||
for (self.symtab.items(.nlist)) |nlist| {
|
||||
const name = self.getString(nlist.n_strx);
|
||||
if (mem.startsWith(u8, name, "_OBJC_CLASS_$_")) return true;
|
||||
|
||||
@ -8,11 +8,17 @@ const native_endian = builtin.target.cpu.arch.endian();
|
||||
|
||||
const MachO = @import("../MachO.zig");
|
||||
|
||||
pub fn isFatLibrary(path: []const u8) !bool {
|
||||
const file = try std.fs.cwd().openFile(path, .{});
|
||||
defer file.close();
|
||||
const hdr = file.reader().readStructEndian(macho.fat_header, .big) catch return false;
|
||||
return hdr.magic == macho.FAT_MAGIC;
|
||||
pub fn readFatHeader(file: std.fs.File) !macho.fat_header {
|
||||
return readFatHeaderGeneric(macho.fat_header, file, 0);
|
||||
}
|
||||
|
||||
fn readFatHeaderGeneric(comptime Hdr: type, file: std.fs.File, offset: usize) !Hdr {
|
||||
var buffer: [@sizeOf(Hdr)]u8 = undefined;
|
||||
const nread = try file.preadAll(&buffer, offset);
|
||||
if (nread != buffer.len) return error.InputOutput;
|
||||
var hdr = @as(*align(1) const Hdr, @ptrCast(&buffer)).*;
|
||||
mem.byteSwapAllFields(Hdr, &hdr);
|
||||
return hdr;
|
||||
}
|
||||
|
||||
pub const Arch = struct {
|
||||
@ -21,17 +27,12 @@ pub const Arch = struct {
|
||||
size: u32,
|
||||
};
|
||||
|
||||
pub fn parseArchs(path: []const u8, buffer: *[2]Arch) ![]const Arch {
|
||||
const file = try std.fs.cwd().openFile(path, .{});
|
||||
defer file.close();
|
||||
const reader = file.reader();
|
||||
const fat_header = try reader.readStructEndian(macho.fat_header, .big);
|
||||
assert(fat_header.magic == macho.FAT_MAGIC);
|
||||
|
||||
pub fn parseArchs(file: std.fs.File, fat_header: macho.fat_header, out: *[2]Arch) ![]const Arch {
|
||||
var count: usize = 0;
|
||||
var fat_arch_index: u32 = 0;
|
||||
while (fat_arch_index < fat_header.nfat_arch) : (fat_arch_index += 1) {
|
||||
const fat_arch = try reader.readStructEndian(macho.fat_arch, .big);
|
||||
while (fat_arch_index < fat_header.nfat_arch and count < out.len) : (fat_arch_index += 1) {
|
||||
const offset = @sizeOf(macho.fat_header) + @sizeOf(macho.fat_arch) * fat_arch_index;
|
||||
const fat_arch = try readFatHeaderGeneric(macho.fat_arch, file, offset);
|
||||
// If we come across an architecture that we do not know how to handle, that's
|
||||
// fine because we can keep looking for one that might match.
|
||||
const arch: std.Target.Cpu.Arch = switch (fat_arch.cputype) {
|
||||
@ -39,9 +40,9 @@ pub fn parseArchs(path: []const u8, buffer: *[2]Arch) ![]const Arch {
|
||||
macho.CPU_TYPE_X86_64 => if (fat_arch.cpusubtype == macho.CPU_SUBTYPE_X86_64_ALL) .x86_64 else continue,
|
||||
else => continue,
|
||||
};
|
||||
buffer[count] = .{ .tag = arch, .offset = fat_arch.offset, .size = fat_arch.size };
|
||||
out[count] = .{ .tag = arch, .offset = fat_arch.offset, .size = fat_arch.size };
|
||||
count += 1;
|
||||
}
|
||||
|
||||
return buffer[0..count];
|
||||
return out[0..count];
|
||||
}
|
||||
|
||||
@ -335,6 +335,21 @@ pub const File = union(enum) {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn parse(file: File, macho_file: *MachO) !void {
|
||||
return switch (file) {
|
||||
.internal, .zig_object => unreachable,
|
||||
.object => |x| x.parse(macho_file),
|
||||
.dylib => |x| x.parse(macho_file),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn parseAr(file: File, macho_file: *MachO) !void {
|
||||
return switch (file) {
|
||||
.internal, .zig_object, .dylib => unreachable,
|
||||
.object => |x| x.parseAr(macho_file),
|
||||
};
|
||||
}
|
||||
|
||||
pub const Index = u32;
|
||||
|
||||
pub const Entry = union(enum) {
|
||||
|
||||
@ -27,22 +27,21 @@ pub fn flushObject(macho_file: *MachO, comp: *Compilation, module_obj_path: ?[]c
|
||||
}
|
||||
|
||||
for (positionals.items) |obj| {
|
||||
macho_file.parsePositional(obj.path, obj.must_link) catch |err| switch (err) {
|
||||
error.MalformedObject,
|
||||
error.MalformedArchive,
|
||||
error.InvalidCpuArch,
|
||||
error.InvalidTarget,
|
||||
=> continue, // already reported
|
||||
error.UnknownFileType => try macho_file.reportParseError(obj.path, "unknown file type for an object file", .{}),
|
||||
macho_file.classifyInputFile(obj.path, .{ .path = obj.path }, obj.must_link) catch |err| switch (err) {
|
||||
error.UnknownFileType => try macho_file.reportParseError(obj.path, "unknown file type for an input file", .{}),
|
||||
else => |e| try macho_file.reportParseError(
|
||||
obj.path,
|
||||
"unexpected error: parsing input file failed with error {s}",
|
||||
"unexpected error: reading input file failed with error {s}",
|
||||
.{@errorName(e)},
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (comp.link_errors.items.len > 0) return error.FlushFailure;
|
||||
if (macho_file.base.hasErrors()) return error.FlushFailure;
|
||||
|
||||
try macho_file.parseInputFiles();
|
||||
|
||||
if (macho_file.base.hasErrors()) return error.FlushFailure;
|
||||
|
||||
try macho_file.resolveSymbols();
|
||||
try macho_file.dedupLiterals();
|
||||
@ -93,22 +92,21 @@ pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ?
|
||||
}
|
||||
|
||||
for (positionals.items) |obj| {
|
||||
parsePositional(macho_file, obj.path) catch |err| switch (err) {
|
||||
error.MalformedObject,
|
||||
error.MalformedArchive,
|
||||
error.InvalidCpuArch,
|
||||
error.InvalidTarget,
|
||||
=> continue, // already reported
|
||||
error.UnknownFileType => try macho_file.reportParseError(obj.path, "unknown file type for an object file", .{}),
|
||||
macho_file.classifyInputFile(obj.path, .{ .path = obj.path }, obj.must_link) catch |err| switch (err) {
|
||||
error.UnknownFileType => try macho_file.reportParseError(obj.path, "unknown file type for an input file", .{}),
|
||||
else => |e| try macho_file.reportParseError(
|
||||
obj.path,
|
||||
"unexpected error: parsing input file failed with error {s}",
|
||||
"unexpected error: reading input file failed with error {s}",
|
||||
.{@errorName(e)},
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (comp.link_errors.items.len > 0) return error.FlushFailure;
|
||||
if (macho_file.base.hasErrors()) return error.FlushFailure;
|
||||
|
||||
try parseInputFilesAr(macho_file);
|
||||
|
||||
if (macho_file.base.hasErrors()) return error.FlushFailure;
|
||||
|
||||
// First, we flush relocatable object file generated with our backends.
|
||||
if (macho_file.getZigObject()) |zo| {
|
||||
@ -225,79 +223,19 @@ pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ?
|
||||
try macho_file.base.file.?.setEndPos(total_size);
|
||||
try macho_file.base.file.?.pwriteAll(buffer.items, 0);
|
||||
|
||||
if (comp.link_errors.items.len > 0) return error.FlushFailure;
|
||||
if (macho_file.base.hasErrors()) return error.FlushFailure;
|
||||
}
|
||||
|
||||
fn parsePositional(macho_file: *MachO, path: []const u8) MachO.ParseError!void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
if (try Object.isObject(path)) {
|
||||
try parseObject(macho_file, path);
|
||||
} else if (try fat.isFatLibrary(path)) {
|
||||
const fat_arch = try macho_file.parseFatLibrary(path);
|
||||
if (try Archive.isArchive(path, fat_arch)) {
|
||||
try parseArchive(macho_file, path, fat_arch);
|
||||
} else return error.UnknownFileType;
|
||||
} else if (try Archive.isArchive(path, null)) {
|
||||
try parseArchive(macho_file, path, null);
|
||||
} else return error.UnknownFileType;
|
||||
}
|
||||
|
||||
fn parseObject(macho_file: *MachO, path: []const u8) MachO.ParseError!void {
|
||||
fn parseInputFilesAr(macho_file: *MachO) !void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const gpa = macho_file.base.comp.gpa;
|
||||
const file = try std.fs.cwd().openFile(path, .{});
|
||||
errdefer file.close();
|
||||
const handle = try macho_file.addFileHandle(file);
|
||||
const mtime: u64 = mtime: {
|
||||
const stat = file.stat() catch break :mtime 0;
|
||||
break :mtime @as(u64, @intCast(@divFloor(stat.mtime, 1_000_000_000)));
|
||||
};
|
||||
const index = @as(File.Index, @intCast(try macho_file.files.addOne(gpa)));
|
||||
macho_file.files.set(index, .{
|
||||
.object = .{
|
||||
.offset = 0, // TODO FAT objects
|
||||
.path = try gpa.dupe(u8, path),
|
||||
.file_handle = handle,
|
||||
.mtime = mtime,
|
||||
.index = index,
|
||||
},
|
||||
});
|
||||
try macho_file.objects.append(gpa, index);
|
||||
|
||||
const object = macho_file.getFile(index).?.object;
|
||||
try object.parseAr(macho_file);
|
||||
}
|
||||
|
||||
fn parseArchive(macho_file: *MachO, path: []const u8, fat_arch: ?fat.Arch) MachO.ParseError!void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const gpa = macho_file.base.comp.gpa;
|
||||
|
||||
const file = try std.fs.cwd().openFile(path, .{});
|
||||
errdefer file.close();
|
||||
const handle = try macho_file.addFileHandle(file);
|
||||
|
||||
var archive = Archive{};
|
||||
defer archive.deinit(gpa);
|
||||
try archive.parse(macho_file, path, handle, fat_arch);
|
||||
|
||||
var has_parse_error = false;
|
||||
for (archive.objects.items) |extracted| {
|
||||
const index = @as(File.Index, @intCast(try macho_file.files.addOne(gpa)));
|
||||
macho_file.files.set(index, .{ .object = extracted });
|
||||
const object = &macho_file.files.items(.data)[index].object;
|
||||
object.index = index;
|
||||
object.parseAr(macho_file) catch |err| switch (err) {
|
||||
error.InvalidCpuArch => has_parse_error = true,
|
||||
else => |e| return e,
|
||||
for (macho_file.objects.items) |index| {
|
||||
macho_file.getFile(index).?.parseAr(macho_file) catch |err| switch (err) {
|
||||
error.InvalidCpuArch => {}, // already reported
|
||||
else => |e| try macho_file.reportParseError2(index, "unexpected error: parsing input file failed with error {s}", .{@errorName(e)}),
|
||||
};
|
||||
try macho_file.objects.append(gpa, index);
|
||||
}
|
||||
if (has_parse_error) return error.MalformedArchive;
|
||||
}
|
||||
|
||||
fn markExports(macho_file: *MachO) void {
|
||||
|
||||
@ -658,9 +658,9 @@ fn parseObjectFile(wasm: *Wasm, path: []const u8) !bool {
|
||||
var object = Object.create(wasm, obj_file, path, null) catch |err| switch (err) {
|
||||
error.InvalidMagicByte, error.NotObjectFile => return false,
|
||||
else => |e| {
|
||||
var err_note = try wasm.addErrorWithNotes(1);
|
||||
try err_note.addMsg(wasm, "Failed parsing object file: {s}", .{@errorName(e)});
|
||||
try err_note.addNote(wasm, "while parsing '{s}'", .{path});
|
||||
var err_note = try wasm.base.addErrorWithNotes(1);
|
||||
try err_note.addMsg("Failed parsing object file: {s}", .{@errorName(e)});
|
||||
try err_note.addNote("while parsing '{s}'", .{path});
|
||||
return error.FlushFailure;
|
||||
},
|
||||
};
|
||||
@ -714,9 +714,9 @@ fn parseArchive(wasm: *Wasm, path: []const u8, force_load: bool) !bool {
|
||||
return false;
|
||||
},
|
||||
else => |e| {
|
||||
var err_note = try wasm.addErrorWithNotes(1);
|
||||
try err_note.addMsg(wasm, "Failed parsing archive: {s}", .{@errorName(e)});
|
||||
try err_note.addNote(wasm, "while parsing archive {s}", .{path});
|
||||
var err_note = try wasm.base.addErrorWithNotes(1);
|
||||
try err_note.addMsg("Failed parsing archive: {s}", .{@errorName(e)});
|
||||
try err_note.addNote("while parsing archive {s}", .{path});
|
||||
return error.FlushFailure;
|
||||
},
|
||||
};
|
||||
@ -741,9 +741,9 @@ fn parseArchive(wasm: *Wasm, path: []const u8, force_load: bool) !bool {
|
||||
|
||||
for (offsets.keys()) |file_offset| {
|
||||
var object = archive.parseObject(wasm, file_offset) catch |e| {
|
||||
var err_note = try wasm.addErrorWithNotes(1);
|
||||
try err_note.addMsg(wasm, "Failed parsing object: {s}", .{@errorName(e)});
|
||||
try err_note.addNote(wasm, "while parsing object in archive {s}", .{path});
|
||||
var err_note = try wasm.base.addErrorWithNotes(1);
|
||||
try err_note.addMsg("Failed parsing object: {s}", .{@errorName(e)});
|
||||
try err_note.addNote("while parsing object in archive {s}", .{path});
|
||||
return error.FlushFailure;
|
||||
};
|
||||
object.index = @enumFromInt(wasm.files.len);
|
||||
@ -779,9 +779,9 @@ fn resolveSymbolsInObject(wasm: *Wasm, file_index: File.Index) !void {
|
||||
|
||||
if (symbol.isLocal()) {
|
||||
if (symbol.isUndefined()) {
|
||||
var err = try wasm.addErrorWithNotes(1);
|
||||
try err.addMsg(wasm, "Local symbols are not allowed to reference imports", .{});
|
||||
try err.addNote(wasm, "symbol '{s}' defined in '{s}'", .{ sym_name, obj_file.path() });
|
||||
var err = try wasm.base.addErrorWithNotes(1);
|
||||
try err.addMsg("Local symbols are not allowed to reference imports", .{});
|
||||
try err.addNote("symbol '{s}' defined in '{s}'", .{ sym_name, obj_file.path() });
|
||||
}
|
||||
try wasm.resolved_symbols.putNoClobber(gpa, location, {});
|
||||
continue;
|
||||
@ -816,10 +816,10 @@ fn resolveSymbolsInObject(wasm: *Wasm, file_index: File.Index) !void {
|
||||
break :outer; // existing is weak, while new one isn't. Replace it.
|
||||
}
|
||||
// both are defined and weak, we have a symbol collision.
|
||||
var err = try wasm.addErrorWithNotes(2);
|
||||
try err.addMsg(wasm, "symbol '{s}' defined multiple times", .{sym_name});
|
||||
try err.addNote(wasm, "first definition in '{s}'", .{existing_file_path});
|
||||
try err.addNote(wasm, "next definition in '{s}'", .{obj_file.path()});
|
||||
var err = try wasm.base.addErrorWithNotes(2);
|
||||
try err.addMsg("symbol '{s}' defined multiple times", .{sym_name});
|
||||
try err.addNote("first definition in '{s}'", .{existing_file_path});
|
||||
try err.addNote("next definition in '{s}'", .{obj_file.path()});
|
||||
}
|
||||
|
||||
try wasm.discarded.put(gpa, location, existing_loc);
|
||||
@ -827,10 +827,10 @@ fn resolveSymbolsInObject(wasm: *Wasm, file_index: File.Index) !void {
|
||||
}
|
||||
|
||||
if (symbol.tag != existing_sym.tag) {
|
||||
var err = try wasm.addErrorWithNotes(2);
|
||||
try err.addMsg(wasm, "symbol '{s}' mismatching types '{s}' and '{s}'", .{ sym_name, @tagName(symbol.tag), @tagName(existing_sym.tag) });
|
||||
try err.addNote(wasm, "first definition in '{s}'", .{existing_file_path});
|
||||
try err.addNote(wasm, "next definition in '{s}'", .{obj_file.path()});
|
||||
var err = try wasm.base.addErrorWithNotes(2);
|
||||
try err.addMsg("symbol '{s}' mismatching types '{s}' and '{s}'", .{ sym_name, @tagName(symbol.tag), @tagName(existing_sym.tag) });
|
||||
try err.addNote("first definition in '{s}'", .{existing_file_path});
|
||||
try err.addNote("next definition in '{s}'", .{obj_file.path()});
|
||||
}
|
||||
|
||||
if (existing_sym.isUndefined() and symbol.isUndefined()) {
|
||||
@ -847,14 +847,14 @@ fn resolveSymbolsInObject(wasm: *Wasm, file_index: File.Index) !void {
|
||||
const imp = obj_file.import(sym_index);
|
||||
const module_name = obj_file.string(imp.module_name);
|
||||
if (!mem.eql(u8, existing_name, module_name)) {
|
||||
var err = try wasm.addErrorWithNotes(2);
|
||||
try err.addMsg(wasm, "symbol '{s}' module name mismatch. Expected '{s}', but found '{s}'", .{
|
||||
var err = try wasm.base.addErrorWithNotes(2);
|
||||
try err.addMsg("symbol '{s}' module name mismatch. Expected '{s}', but found '{s}'", .{
|
||||
sym_name,
|
||||
existing_name,
|
||||
module_name,
|
||||
});
|
||||
try err.addNote(wasm, "first definition in '{s}'", .{existing_file_path});
|
||||
try err.addNote(wasm, "next definition in '{s}'", .{obj_file.path()});
|
||||
try err.addNote("first definition in '{s}'", .{existing_file_path});
|
||||
try err.addNote("next definition in '{s}'", .{obj_file.path()});
|
||||
}
|
||||
}
|
||||
|
||||
@ -867,10 +867,10 @@ fn resolveSymbolsInObject(wasm: *Wasm, file_index: File.Index) !void {
|
||||
const existing_ty = wasm.getGlobalType(existing_loc);
|
||||
const new_ty = wasm.getGlobalType(location);
|
||||
if (existing_ty.mutable != new_ty.mutable or existing_ty.valtype != new_ty.valtype) {
|
||||
var err = try wasm.addErrorWithNotes(2);
|
||||
try err.addMsg(wasm, "symbol '{s}' mismatching global types", .{sym_name});
|
||||
try err.addNote(wasm, "first definition in '{s}'", .{existing_file_path});
|
||||
try err.addNote(wasm, "next definition in '{s}'", .{obj_file.path()});
|
||||
var err = try wasm.base.addErrorWithNotes(2);
|
||||
try err.addMsg("symbol '{s}' mismatching global types", .{sym_name});
|
||||
try err.addNote("first definition in '{s}'", .{existing_file_path});
|
||||
try err.addNote("next definition in '{s}'", .{obj_file.path()});
|
||||
}
|
||||
}
|
||||
|
||||
@ -878,11 +878,11 @@ fn resolveSymbolsInObject(wasm: *Wasm, file_index: File.Index) !void {
|
||||
const existing_ty = wasm.getFunctionSignature(existing_loc);
|
||||
const new_ty = wasm.getFunctionSignature(location);
|
||||
if (!existing_ty.eql(new_ty)) {
|
||||
var err = try wasm.addErrorWithNotes(3);
|
||||
try err.addMsg(wasm, "symbol '{s}' mismatching function signatures.", .{sym_name});
|
||||
try err.addNote(wasm, "expected signature {}, but found signature {}", .{ existing_ty, new_ty });
|
||||
try err.addNote(wasm, "first definition in '{s}'", .{existing_file_path});
|
||||
try err.addNote(wasm, "next definition in '{s}'", .{obj_file.path()});
|
||||
var err = try wasm.base.addErrorWithNotes(3);
|
||||
try err.addMsg("symbol '{s}' mismatching function signatures.", .{sym_name});
|
||||
try err.addNote("expected signature {}, but found signature {}", .{ existing_ty, new_ty });
|
||||
try err.addNote("first definition in '{s}'", .{existing_file_path});
|
||||
try err.addNote("next definition in '{s}'", .{obj_file.path()});
|
||||
}
|
||||
}
|
||||
|
||||
@ -930,9 +930,9 @@ fn resolveSymbolsInArchives(wasm: *Wasm) !void {
|
||||
// Parse object and and resolve symbols again before we check remaining
|
||||
// undefined symbols.
|
||||
var object = archive.parseObject(wasm, offset.items[0]) catch |e| {
|
||||
var err_note = try wasm.addErrorWithNotes(1);
|
||||
try err_note.addMsg(wasm, "Failed parsing object: {s}", .{@errorName(e)});
|
||||
try err_note.addNote(wasm, "while parsing object in archive {s}", .{archive.name});
|
||||
var err_note = try wasm.base.addErrorWithNotes(1);
|
||||
try err_note.addMsg("Failed parsing object: {s}", .{@errorName(e)});
|
||||
try err_note.addNote("while parsing object in archive {s}", .{archive.name});
|
||||
return error.FlushFailure;
|
||||
};
|
||||
object.index = @enumFromInt(wasm.files.len);
|
||||
@ -1237,9 +1237,9 @@ fn validateFeatures(
|
||||
allowed[used_index] = is_enabled;
|
||||
emit_features_count.* += @intFromBool(is_enabled);
|
||||
} else if (is_enabled and !allowed[used_index]) {
|
||||
var err = try wasm.addErrorWithNotes(1);
|
||||
try err.addMsg(wasm, "feature '{}' not allowed, but used by linked object", .{@as(types.Feature.Tag, @enumFromInt(used_index))});
|
||||
try err.addNote(wasm, "defined in '{s}'", .{wasm.files.items(.data)[used_set >> 1].object.path});
|
||||
var err = try wasm.base.addErrorWithNotes(1);
|
||||
try err.addMsg("feature '{}' not allowed, but used by linked object", .{@as(types.Feature.Tag, @enumFromInt(used_index))});
|
||||
try err.addNote("defined in '{s}'", .{wasm.files.items(.data)[used_set >> 1].object.path});
|
||||
valid_feature_set = false;
|
||||
}
|
||||
}
|
||||
@ -1251,7 +1251,8 @@ fn validateFeatures(
|
||||
if (shared_memory) {
|
||||
const disallowed_feature = disallowed[@intFromEnum(types.Feature.Tag.shared_mem)];
|
||||
if (@as(u1, @truncate(disallowed_feature)) != 0) {
|
||||
try wasm.addErrorWithoutNotes(
|
||||
var err = try wasm.base.addErrorWithNotes(0);
|
||||
try err.addMsg(
|
||||
"shared-memory is disallowed by '{s}' because it wasn't compiled with 'atomics' and 'bulk-memory' features enabled",
|
||||
.{wasm.files.items(.data)[disallowed_feature >> 1].object.path},
|
||||
);
|
||||
@ -1260,7 +1261,8 @@ fn validateFeatures(
|
||||
|
||||
for ([_]types.Feature.Tag{ .atomics, .bulk_memory }) |feature| {
|
||||
if (!allowed[@intFromEnum(feature)]) {
|
||||
try wasm.addErrorWithoutNotes("feature '{}' is not used but is required for shared-memory", .{feature});
|
||||
var err = try wasm.base.addErrorWithNotes(0);
|
||||
try err.addMsg("feature '{}' is not used but is required for shared-memory", .{feature});
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1268,7 +1270,8 @@ fn validateFeatures(
|
||||
if (has_tls) {
|
||||
for ([_]types.Feature.Tag{ .atomics, .bulk_memory }) |feature| {
|
||||
if (!allowed[@intFromEnum(feature)]) {
|
||||
try wasm.addErrorWithoutNotes("feature '{}' is not used but is required for thread-local storage", .{feature});
|
||||
var err = try wasm.base.addErrorWithNotes(0);
|
||||
try err.addMsg("feature '{}' is not used but is required for thread-local storage", .{feature});
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1281,10 +1284,10 @@ fn validateFeatures(
|
||||
// from here a feature is always used
|
||||
const disallowed_feature = disallowed[@intFromEnum(feature.tag)];
|
||||
if (@as(u1, @truncate(disallowed_feature)) != 0) {
|
||||
var err = try wasm.addErrorWithNotes(2);
|
||||
try err.addMsg(wasm, "feature '{}' is disallowed, but used by linked object", .{feature.tag});
|
||||
try err.addNote(wasm, "disallowed by '{s}'", .{wasm.files.items(.data)[disallowed_feature >> 1].object.path});
|
||||
try err.addNote(wasm, "used in '{s}'", .{object.path});
|
||||
var err = try wasm.base.addErrorWithNotes(2);
|
||||
try err.addMsg("feature '{}' is disallowed, but used by linked object", .{feature.tag});
|
||||
try err.addNote("disallowed by '{s}'", .{wasm.files.items(.data)[disallowed_feature >> 1].object.path});
|
||||
try err.addNote("used in '{s}'", .{object.path});
|
||||
valid_feature_set = false;
|
||||
}
|
||||
|
||||
@ -1295,10 +1298,10 @@ fn validateFeatures(
|
||||
for (required, 0..) |required_feature, feature_index| {
|
||||
const is_required = @as(u1, @truncate(required_feature)) != 0;
|
||||
if (is_required and !object_used_features[feature_index]) {
|
||||
var err = try wasm.addErrorWithNotes(2);
|
||||
try err.addMsg(wasm, "feature '{}' is required but not used in linked object", .{@as(types.Feature.Tag, @enumFromInt(feature_index))});
|
||||
try err.addNote(wasm, "required by '{s}'", .{wasm.files.items(.data)[required_feature >> 1].object.path});
|
||||
try err.addNote(wasm, "missing in '{s}'", .{object.path});
|
||||
var err = try wasm.base.addErrorWithNotes(2);
|
||||
try err.addMsg("feature '{}' is required but not used in linked object", .{@as(types.Feature.Tag, @enumFromInt(feature_index))});
|
||||
try err.addNote("required by '{s}'", .{wasm.files.items(.data)[required_feature >> 1].object.path});
|
||||
try err.addNote("missing in '{s}'", .{object.path});
|
||||
valid_feature_set = false;
|
||||
}
|
||||
}
|
||||
@ -1376,9 +1379,9 @@ fn checkUndefinedSymbols(wasm: *const Wasm) !void {
|
||||
else
|
||||
wasm.name;
|
||||
const symbol_name = undef.getName(wasm);
|
||||
var err = try wasm.addErrorWithNotes(1);
|
||||
try err.addMsg(wasm, "could not resolve undefined symbol '{s}'", .{symbol_name});
|
||||
try err.addNote(wasm, "defined in '{s}'", .{file_name});
|
||||
var err = try wasm.base.addErrorWithNotes(1);
|
||||
try err.addMsg("could not resolve undefined symbol '{s}'", .{symbol_name});
|
||||
try err.addNote("defined in '{s}'", .{file_name});
|
||||
}
|
||||
}
|
||||
if (found_undefined_symbols) {
|
||||
@ -1757,7 +1760,8 @@ fn setupInitFunctions(wasm: *Wasm) !void {
|
||||
break :ty object.func_types[func.type_index];
|
||||
};
|
||||
if (ty.params.len != 0) {
|
||||
try wasm.addErrorWithoutNotes("constructor functions cannot take arguments: '{s}'", .{object.string_table.get(symbol.name)});
|
||||
var err = try wasm.base.addErrorWithNotes(0);
|
||||
try err.addMsg("constructor functions cannot take arguments: '{s}'", .{object.string_table.get(symbol.name)});
|
||||
}
|
||||
log.debug("appended init func '{s}'\n", .{object.string_table.get(symbol.name)});
|
||||
wasm.init_funcs.appendAssumeCapacity(.{
|
||||
@ -2140,7 +2144,8 @@ fn checkExportNames(wasm: *Wasm) !void {
|
||||
|
||||
for (force_exp_names) |exp_name| {
|
||||
const loc = wasm.findGlobalSymbol(exp_name) orelse {
|
||||
try wasm.addErrorWithoutNotes("could not export '{s}', symbol not found", .{exp_name});
|
||||
var err = try wasm.base.addErrorWithNotes(0);
|
||||
try err.addMsg("could not export '{s}', symbol not found", .{exp_name});
|
||||
failed_exports = true;
|
||||
continue;
|
||||
};
|
||||
@ -2203,13 +2208,15 @@ fn setupStart(wasm: *Wasm) !void {
|
||||
const entry_name = wasm.entry_name orelse return;
|
||||
|
||||
const symbol_loc = wasm.findGlobalSymbol(entry_name) orelse {
|
||||
try wasm.addErrorWithoutNotes("Entry symbol '{s}' missing, use '-fno-entry' to suppress", .{entry_name});
|
||||
var err = try wasm.base.addErrorWithNotes(0);
|
||||
try err.addMsg("Entry symbol '{s}' missing, use '-fno-entry' to suppress", .{entry_name});
|
||||
return error.FlushFailure;
|
||||
};
|
||||
|
||||
const symbol = symbol_loc.getSymbol(wasm);
|
||||
if (symbol.tag != .function) {
|
||||
try wasm.addErrorWithoutNotes("Entry symbol '{s}' is not a function", .{entry_name});
|
||||
var err = try wasm.base.addErrorWithNotes(0);
|
||||
try err.addMsg("Entry symbol '{s}' is not a function", .{entry_name});
|
||||
return error.FlushFailure;
|
||||
}
|
||||
|
||||
@ -2314,13 +2321,16 @@ fn setupMemory(wasm: *Wasm) !void {
|
||||
|
||||
if (wasm.initial_memory) |initial_memory| {
|
||||
if (!std.mem.isAlignedGeneric(u64, initial_memory, page_size)) {
|
||||
try wasm.addErrorWithoutNotes("Initial memory must be {d}-byte aligned", .{page_size});
|
||||
var err = try wasm.base.addErrorWithNotes(0);
|
||||
try err.addMsg("Initial memory must be {d}-byte aligned", .{page_size});
|
||||
}
|
||||
if (memory_ptr > initial_memory) {
|
||||
try wasm.addErrorWithoutNotes("Initial memory too small, must be at least {d} bytes", .{memory_ptr});
|
||||
var err = try wasm.base.addErrorWithNotes(0);
|
||||
try err.addMsg("Initial memory too small, must be at least {d} bytes", .{memory_ptr});
|
||||
}
|
||||
if (initial_memory > max_memory_allowed) {
|
||||
try wasm.addErrorWithoutNotes("Initial memory exceeds maximum memory {d}", .{max_memory_allowed});
|
||||
var err = try wasm.base.addErrorWithNotes(0);
|
||||
try err.addMsg("Initial memory exceeds maximum memory {d}", .{max_memory_allowed});
|
||||
}
|
||||
memory_ptr = initial_memory;
|
||||
}
|
||||
@ -2337,13 +2347,16 @@ fn setupMemory(wasm: *Wasm) !void {
|
||||
|
||||
if (wasm.max_memory) |max_memory| {
|
||||
if (!std.mem.isAlignedGeneric(u64, max_memory, page_size)) {
|
||||
try wasm.addErrorWithoutNotes("Maximum memory must be {d}-byte aligned", .{page_size});
|
||||
var err = try wasm.base.addErrorWithNotes(0);
|
||||
try err.addMsg("Maximum memory must be {d}-byte aligned", .{page_size});
|
||||
}
|
||||
if (memory_ptr > max_memory) {
|
||||
try wasm.addErrorWithoutNotes("Maximum memory too small, must be at least {d} bytes", .{memory_ptr});
|
||||
var err = try wasm.base.addErrorWithNotes(0);
|
||||
try err.addMsg("Maximum memory too small, must be at least {d} bytes", .{memory_ptr});
|
||||
}
|
||||
if (max_memory > max_memory_allowed) {
|
||||
try wasm.addErrorWithoutNotes("Maximum memory exceeds maximum amount {d}", .{max_memory_allowed});
|
||||
var err = try wasm.base.addErrorWithNotes(0);
|
||||
try err.addMsg("Maximum memory exceeds maximum amount {d}", .{max_memory_allowed});
|
||||
}
|
||||
wasm.memories.limits.max = @as(u32, @intCast(max_memory / page_size));
|
||||
wasm.memories.limits.setFlag(.WASM_LIMITS_FLAG_HAS_MAX);
|
||||
@ -2446,9 +2459,9 @@ pub fn getMatchingSegment(wasm: *Wasm, file_index: File.Index, symbol_index: Sym
|
||||
break :blk index;
|
||||
};
|
||||
} else {
|
||||
var err = try wasm.addErrorWithNotes(1);
|
||||
try err.addMsg(wasm, "found unknown section '{s}'", .{section_name});
|
||||
try err.addNote(wasm, "defined in '{s}'", .{obj_file.path()});
|
||||
var err = try wasm.base.addErrorWithNotes(1);
|
||||
try err.addMsg("found unknown section '{s}'", .{section_name});
|
||||
try err.addNote("defined in '{s}'", .{obj_file.path()});
|
||||
return error.UnexpectedValue;
|
||||
}
|
||||
},
|
||||
@ -2564,23 +2577,23 @@ pub fn flushModule(wasm: *Wasm, arena: Allocator, tid: Zcu.PerThread.Id, prog_no
|
||||
if (wasm.zig_object_index != .null) {
|
||||
try wasm.resolveSymbolsInObject(wasm.zig_object_index);
|
||||
}
|
||||
if (comp.link_errors.items.len > 0) return error.FlushFailure;
|
||||
if (wasm.base.hasErrors()) return error.FlushFailure;
|
||||
for (wasm.objects.items) |object_index| {
|
||||
try wasm.resolveSymbolsInObject(object_index);
|
||||
}
|
||||
if (comp.link_errors.items.len > 0) return error.FlushFailure;
|
||||
if (wasm.base.hasErrors()) return error.FlushFailure;
|
||||
|
||||
var emit_features_count: u32 = 0;
|
||||
var enabled_features: [@typeInfo(types.Feature.Tag).Enum.fields.len]bool = undefined;
|
||||
try wasm.validateFeatures(&enabled_features, &emit_features_count);
|
||||
try wasm.resolveSymbolsInArchives();
|
||||
if (comp.link_errors.items.len > 0) return error.FlushFailure;
|
||||
if (wasm.base.hasErrors()) return error.FlushFailure;
|
||||
try wasm.resolveLazySymbols();
|
||||
try wasm.checkUndefinedSymbols();
|
||||
try wasm.checkExportNames();
|
||||
|
||||
try wasm.setupInitFunctions();
|
||||
if (comp.link_errors.items.len > 0) return error.FlushFailure;
|
||||
if (wasm.base.hasErrors()) return error.FlushFailure;
|
||||
try wasm.setupStart();
|
||||
|
||||
try wasm.markReferences();
|
||||
@ -2589,7 +2602,7 @@ pub fn flushModule(wasm: *Wasm, arena: Allocator, tid: Zcu.PerThread.Id, prog_no
|
||||
try wasm.mergeTypes();
|
||||
try wasm.allocateAtoms();
|
||||
try wasm.setupMemory();
|
||||
if (comp.link_errors.items.len > 0) return error.FlushFailure;
|
||||
if (wasm.base.hasErrors()) return error.FlushFailure;
|
||||
wasm.allocateVirtualAddresses();
|
||||
wasm.mapFunctionTable();
|
||||
try wasm.initializeCallCtorsFunction();
|
||||
@ -2599,7 +2612,7 @@ pub fn flushModule(wasm: *Wasm, arena: Allocator, tid: Zcu.PerThread.Id, prog_no
|
||||
try wasm.setupStartSection();
|
||||
try wasm.setupExports();
|
||||
try wasm.writeToFile(enabled_features, emit_features_count, arena);
|
||||
if (comp.link_errors.items.len > 0) return error.FlushFailure;
|
||||
if (wasm.base.hasErrors()) return error.FlushFailure;
|
||||
}
|
||||
|
||||
/// Writes the WebAssembly in-memory module to the file
|
||||
@ -2997,7 +3010,10 @@ fn writeToFile(
|
||||
}) catch unreachable;
|
||||
try emitBuildIdSection(&binary_bytes, str);
|
||||
},
|
||||
else => |mode| try wasm.addErrorWithoutNotes("build-id '{s}' is not supported for WebAssembly", .{@tagName(mode)}),
|
||||
else => |mode| {
|
||||
var err = try wasm.base.addErrorWithNotes(0);
|
||||
try err.addMsg("build-id '{s}' is not supported for WebAssembly", .{@tagName(mode)});
|
||||
},
|
||||
}
|
||||
|
||||
var debug_bytes = std.ArrayList(u8).init(gpa);
|
||||
@ -4086,57 +4102,3 @@ fn defaultEntrySymbolName(wasi_exec_model: std.builtin.WasiExecModel) []const u8
|
||||
.command => "_start",
|
||||
};
|
||||
}
|
||||
|
||||
const ErrorWithNotes = struct {
|
||||
/// Allocated index in comp.link_errors array.
|
||||
index: usize,
|
||||
|
||||
/// Next available note slot.
|
||||
note_slot: usize = 0,
|
||||
|
||||
pub fn addMsg(
|
||||
err: ErrorWithNotes,
|
||||
wasm_file: *const Wasm,
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
const comp = wasm_file.base.comp;
|
||||
const gpa = comp.gpa;
|
||||
const err_msg = &comp.link_errors.items[err.index];
|
||||
err_msg.msg = try std.fmt.allocPrint(gpa, format, args);
|
||||
}
|
||||
|
||||
pub fn addNote(
|
||||
err: *ErrorWithNotes,
|
||||
wasm_file: *const Wasm,
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
const comp = wasm_file.base.comp;
|
||||
const gpa = comp.gpa;
|
||||
const err_msg = &comp.link_errors.items[err.index];
|
||||
err_msg.notes[err.note_slot] = .{ .msg = try std.fmt.allocPrint(gpa, format, args) };
|
||||
err.note_slot += 1;
|
||||
}
|
||||
};
|
||||
|
||||
pub fn addErrorWithNotes(wasm: *const Wasm, note_count: usize) error{OutOfMemory}!ErrorWithNotes {
|
||||
const comp = wasm.base.comp;
|
||||
const gpa = comp.gpa;
|
||||
try comp.link_errors.ensureUnusedCapacity(gpa, 1);
|
||||
return wasm.addErrorWithNotesAssumeCapacity(note_count);
|
||||
}
|
||||
|
||||
pub fn addErrorWithoutNotes(wasm: *const Wasm, comptime fmt: []const u8, args: anytype) !void {
|
||||
const err = try wasm.addErrorWithNotes(0);
|
||||
try err.addMsg(wasm, fmt, args);
|
||||
}
|
||||
|
||||
fn addErrorWithNotesAssumeCapacity(wasm: *const Wasm, note_count: usize) error{OutOfMemory}!ErrorWithNotes {
|
||||
const comp = wasm.base.comp;
|
||||
const gpa = comp.gpa;
|
||||
const index = comp.link_errors.items.len;
|
||||
const err = comp.link_errors.addOneAssumeCapacity();
|
||||
err.* = .{ .msg = undefined, .notes = try gpa.alloc(link.File.ErrorMsg, note_count) };
|
||||
return .{ .index = index };
|
||||
}
|
||||
|
||||
@ -235,27 +235,27 @@ fn checkLegacyIndirectFunctionTable(object: *Object, wasm_file: *const Wasm) !?S
|
||||
if (object.imported_tables_count == table_count) return null;
|
||||
|
||||
if (table_count != 0) {
|
||||
var err = try wasm_file.addErrorWithNotes(1);
|
||||
try err.addMsg(wasm_file, "Expected a table entry symbol for each of the {d} table(s), but instead got {d} symbols.", .{
|
||||
var err = try wasm_file.base.addErrorWithNotes(1);
|
||||
try err.addMsg("Expected a table entry symbol for each of the {d} table(s), but instead got {d} symbols.", .{
|
||||
object.imported_tables_count,
|
||||
table_count,
|
||||
});
|
||||
try err.addNote(wasm_file, "defined in '{s}'", .{object.path});
|
||||
try err.addNote("defined in '{s}'", .{object.path});
|
||||
return error.MissingTableSymbols;
|
||||
}
|
||||
|
||||
// MVP object files cannot have any table definitions, only imports (for the indirect function table).
|
||||
if (object.tables.len > 0) {
|
||||
var err = try wasm_file.addErrorWithNotes(1);
|
||||
try err.addMsg(wasm_file, "Unexpected table definition without representing table symbols.", .{});
|
||||
try err.addNote(wasm_file, "defined in '{s}'", .{object.path});
|
||||
var err = try wasm_file.base.addErrorWithNotes(1);
|
||||
try err.addMsg("Unexpected table definition without representing table symbols.", .{});
|
||||
try err.addNote("defined in '{s}'", .{object.path});
|
||||
return error.UnexpectedTable;
|
||||
}
|
||||
|
||||
if (object.imported_tables_count != 1) {
|
||||
var err = try wasm_file.addErrorWithNotes(1);
|
||||
try err.addMsg(wasm_file, "Found more than one table import, but no representing table symbols", .{});
|
||||
try err.addNote(wasm_file, "defined in '{s}'", .{object.path});
|
||||
var err = try wasm_file.base.addErrorWithNotes(1);
|
||||
try err.addMsg("Found more than one table import, but no representing table symbols", .{});
|
||||
try err.addNote("defined in '{s}'", .{object.path});
|
||||
return error.MissingTableSymbols;
|
||||
}
|
||||
|
||||
@ -266,9 +266,9 @@ fn checkLegacyIndirectFunctionTable(object: *Object, wasm_file: *const Wasm) !?S
|
||||
} else unreachable;
|
||||
|
||||
if (!std.mem.eql(u8, object.string_table.get(table_import.name), "__indirect_function_table")) {
|
||||
var err = try wasm_file.addErrorWithNotes(1);
|
||||
try err.addMsg(wasm_file, "Non-indirect function table import '{s}' is missing a corresponding symbol", .{object.string_table.get(table_import.name)});
|
||||
try err.addNote(wasm_file, "defined in '{s}'", .{object.path});
|
||||
var err = try wasm_file.base.addErrorWithNotes(1);
|
||||
try err.addMsg("Non-indirect function table import '{s}' is missing a corresponding symbol", .{object.string_table.get(table_import.name)});
|
||||
try err.addNote("defined in '{s}'", .{object.path});
|
||||
return error.MissingTableSymbols;
|
||||
}
|
||||
|
||||
@ -596,9 +596,9 @@ fn Parser(comptime ReaderType: type) type {
|
||||
try reader.readNoEof(name);
|
||||
|
||||
const tag = types.known_features.get(name) orelse {
|
||||
var err = try parser.wasm_file.addErrorWithNotes(1);
|
||||
try err.addMsg(parser.wasm_file, "Object file contains unknown feature: {s}", .{name});
|
||||
try err.addNote(parser.wasm_file, "defined in '{s}'", .{parser.object.path});
|
||||
var err = try parser.wasm_file.base.addErrorWithNotes(1);
|
||||
try err.addMsg("Object file contains unknown feature: {s}", .{name});
|
||||
try err.addNote("defined in '{s}'", .{parser.object.path});
|
||||
return error.UnknownFeature;
|
||||
};
|
||||
feature.* = .{
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user