link: consolidate diagnostics

By organizing linker diagnostics into this struct, it becomes possible
to share more code between linker backends, and more importantly it
becomes possible to pass only the Diag struct to some functions, rather
than passing the entire linker state object in. This makes data
dependencies more obvious, making it easier to rearrange code and to
multithread.

Also fix MachO code abusing an atomic variable. Not only was it using
the wrong atomic operation, it is unnecessary additional state since
the state is already being protected by a mutex.
This commit is contained in:
Andrew Kelley 2024-10-11 01:15:30 -07:00
parent 5e53203e82
commit 13fb68c064
15 changed files with 513 additions and 440 deletions

View File

@ -106,10 +106,7 @@ win32_resource_table: if (dev.env.supports(.win32_resource)) std.AutoArrayHashMa
pub fn deinit(_: @This(), _: Allocator) void {} pub fn deinit(_: @This(), _: Allocator) void {}
} = .{}, } = .{},
link_errors: std.ArrayListUnmanaged(link.File.ErrorMsg) = .empty, link_diags: link.Diags,
link_errors_mutex: std.Thread.Mutex = .{},
link_error_flags: link.File.ErrorFlags = .{},
lld_errors: std.ArrayListUnmanaged(LldError) = .empty,
work_queues: [ work_queues: [
len: { len: {
@ -842,21 +839,6 @@ pub const MiscError = struct {
} }
}; };
pub const LldError = struct {
/// Allocated with gpa.
msg: []const u8,
context_lines: []const []const u8 = &.{},
pub fn deinit(self: *LldError, gpa: Allocator) void {
for (self.context_lines) |line| {
gpa.free(line);
}
gpa.free(self.context_lines);
gpa.free(self.msg);
}
};
pub const EmitLoc = struct { pub const EmitLoc = struct {
/// If this is `null` it means the file will be output to the cache directory. /// If this is `null` it means the file will be output to the cache directory.
/// When provided, both the open file handle and the path name must outlive the `Compilation`. /// When provided, both the open file handle and the path name must outlive the `Compilation`.
@ -1558,6 +1540,7 @@ pub fn create(gpa: Allocator, arena: Allocator, options: CreateOptions) !*Compil
.global_cc_argv = options.global_cc_argv, .global_cc_argv = options.global_cc_argv,
.file_system_inputs = options.file_system_inputs, .file_system_inputs = options.file_system_inputs,
.parent_whole_cache = options.parent_whole_cache, .parent_whole_cache = options.parent_whole_cache,
.link_diags = .init(gpa),
}; };
// Prevent some footguns by making the "any" fields of config reflect // Prevent some footguns by making the "any" fields of config reflect
@ -1999,13 +1982,7 @@ pub fn destroy(comp: *Compilation) void {
} }
comp.failed_win32_resources.deinit(gpa); comp.failed_win32_resources.deinit(gpa);
for (comp.link_errors.items) |*item| item.deinit(gpa); comp.link_diags.deinit();
comp.link_errors.deinit(gpa);
for (comp.lld_errors.items) |*lld_error| {
lld_error.deinit(gpa);
}
comp.lld_errors.deinit(gpa);
comp.clearMiscFailures(); comp.clearMiscFailures();
@ -2304,7 +2281,7 @@ pub fn update(comp: *Compilation, main_progress_node: std.Progress.Node) !void {
if (anyErrors(comp)) { if (anyErrors(comp)) {
// Skip flushing and keep source files loaded for error reporting. // Skip flushing and keep source files loaded for error reporting.
comp.link_error_flags = .{}; comp.link_diags.flags = .{};
return; return;
} }
@ -2451,7 +2428,7 @@ fn flush(
if (comp.bin_file) |lf| { if (comp.bin_file) |lf| {
// This is needed before reading the error flags. // This is needed before reading the error flags.
lf.flush(arena, tid, prog_node) catch |err| switch (err) { lf.flush(arena, tid, prog_node) catch |err| switch (err) {
error.FlushFailure, error.LinkFailure => {}, // error reported through link_error_flags error.FlushFailure, error.LinkFailure => {}, // error reported through link_diags.flags
error.LLDReportedFailure => {}, // error reported via lockAndParseLldStderr error.LLDReportedFailure => {}, // error reported via lockAndParseLldStderr
else => |e| return e, else => |e| return e,
}; };
@ -3070,7 +3047,7 @@ pub fn getAllErrorsAlloc(comp: *Compilation) !ErrorBundle {
try bundle.addBundleAsRoots(error_bundle); try bundle.addBundleAsRoots(error_bundle);
} }
for (comp.lld_errors.items) |lld_error| { for (comp.link_diags.lld.items) |lld_error| {
const notes_len = @as(u32, @intCast(lld_error.context_lines.len)); const notes_len = @as(u32, @intCast(lld_error.context_lines.len));
try bundle.addRootErrorMessage(.{ try bundle.addRootErrorMessage(.{
@ -3091,7 +3068,7 @@ pub fn getAllErrorsAlloc(comp: *Compilation) !ErrorBundle {
}); });
if (value.children) |b| try bundle.addBundleAsNotes(b); if (value.children) |b| try bundle.addBundleAsNotes(b);
} }
if (comp.alloc_failure_occurred) { if (comp.alloc_failure_occurred or comp.link_diags.flags.alloc_failure_occurred) {
try bundle.addRootErrorMessage(.{ try bundle.addRootErrorMessage(.{
.msg = try bundle.addString("memory allocation failure"), .msg = try bundle.addString("memory allocation failure"),
}); });
@ -3220,14 +3197,14 @@ pub fn getAllErrorsAlloc(comp: *Compilation) !ErrorBundle {
} }
if (bundle.root_list.items.len == 0) { if (bundle.root_list.items.len == 0) {
if (comp.link_error_flags.no_entry_point_found) { if (comp.link_diags.flags.no_entry_point_found) {
try bundle.addRootErrorMessage(.{ try bundle.addRootErrorMessage(.{
.msg = try bundle.addString("no entry point found"), .msg = try bundle.addString("no entry point found"),
}); });
} }
} }
if (comp.link_error_flags.missing_libc) { if (comp.link_diags.flags.missing_libc) {
try bundle.addRootErrorMessage(.{ try bundle.addRootErrorMessage(.{
.msg = try bundle.addString("libc not available"), .msg = try bundle.addString("libc not available"),
.notes_len = 2, .notes_len = 2,
@ -3241,7 +3218,7 @@ pub fn getAllErrorsAlloc(comp: *Compilation) !ErrorBundle {
})); }));
} }
for (comp.link_errors.items) |link_err| { for (comp.link_diags.msgs.items) |link_err| {
try bundle.addRootErrorMessage(.{ try bundle.addRootErrorMessage(.{
.msg = try bundle.addString(link_err.msg), .msg = try bundle.addString(link_err.msg),
.notes_len = @intCast(link_err.notes.len), .notes_len = @intCast(link_err.notes.len),
@ -6161,6 +6138,7 @@ fn wantBuildLibUnwindFromSource(comp: *Compilation) bool {
} }
fn setAllocFailure(comp: *Compilation) void { fn setAllocFailure(comp: *Compilation) void {
@branchHint(.cold);
log.debug("memory allocation failure", .{}); log.debug("memory allocation failure", .{});
comp.alloc_failure_occurred = true; comp.alloc_failure_occurred = true;
} }
@ -6195,54 +6173,6 @@ pub fn lockAndSetMiscFailure(
return setMiscFailure(comp, tag, format, args); return setMiscFailure(comp, tag, format, args);
} }
fn parseLldStderr(comp: *Compilation, prefix: []const u8, stderr: []const u8) Allocator.Error!void {
var context_lines = std.ArrayList([]const u8).init(comp.gpa);
defer context_lines.deinit();
var current_err: ?*LldError = null;
var lines = mem.splitSequence(u8, stderr, if (builtin.os.tag == .windows) "\r\n" else "\n");
while (lines.next()) |line| {
if (line.len > prefix.len + ":".len and
mem.eql(u8, line[0..prefix.len], prefix) and line[prefix.len] == ':')
{
if (current_err) |err| {
err.context_lines = try context_lines.toOwnedSlice();
}
var split = mem.splitSequence(u8, line, "error: ");
_ = split.first();
const duped_msg = try std.fmt.allocPrint(comp.gpa, "{s}: {s}", .{ prefix, split.rest() });
errdefer comp.gpa.free(duped_msg);
current_err = try comp.lld_errors.addOne(comp.gpa);
current_err.?.* = .{ .msg = duped_msg };
} else if (current_err != null) {
const context_prefix = ">>> ";
var trimmed = mem.trimRight(u8, line, &std.ascii.whitespace);
if (mem.startsWith(u8, trimmed, context_prefix)) {
trimmed = trimmed[context_prefix.len..];
}
if (trimmed.len > 0) {
const duped_line = try comp.gpa.dupe(u8, trimmed);
try context_lines.append(duped_line);
}
}
}
if (current_err) |err| {
err.context_lines = try context_lines.toOwnedSlice();
}
}
pub fn lockAndParseLldStderr(comp: *Compilation, prefix: []const u8, stderr: []const u8) void {
comp.mutex.lock();
defer comp.mutex.unlock();
comp.parseLldStderr(prefix, stderr) catch comp.setAllocFailure();
}
pub fn dump_argv(argv: []const []const u8) void { pub fn dump_argv(argv: []const []const u8) void {
std.debug.lockStdErr(); std.debug.lockStdErr();
defer std.debug.unlockStdErr(); defer std.debug.unlockStdErr();

View File

@ -37,6 +37,252 @@ pub const SystemLib = struct {
path: ?Path, path: ?Path,
}; };
pub const Diags = struct {
/// Stored here so that function definitions can distinguish between
/// needing an allocator for things besides error reporting.
gpa: Allocator,
mutex: std.Thread.Mutex,
msgs: std.ArrayListUnmanaged(Msg),
flags: Flags,
lld: std.ArrayListUnmanaged(Lld),
pub const Flags = packed struct {
no_entry_point_found: bool = false,
missing_libc: bool = false,
alloc_failure_occurred: bool = false,
const Int = blk: {
const bits = @typeInfo(@This()).@"struct".fields.len;
break :blk @Type(.{ .int = .{
.signedness = .unsigned,
.bits = bits,
} });
};
pub fn anySet(ef: Flags) bool {
return @as(Int, @bitCast(ef)) > 0;
}
};
pub const Lld = struct {
/// Allocated with gpa.
msg: []const u8,
context_lines: []const []const u8 = &.{},
pub fn deinit(self: *Lld, gpa: Allocator) void {
for (self.context_lines) |line| gpa.free(line);
gpa.free(self.context_lines);
gpa.free(self.msg);
self.* = undefined;
}
};
pub const Msg = struct {
msg: []const u8,
notes: []Msg = &.{},
pub fn deinit(self: *Msg, gpa: Allocator) void {
for (self.notes) |*note| note.deinit(gpa);
gpa.free(self.notes);
gpa.free(self.msg);
}
};
pub const ErrorWithNotes = struct {
diags: *Diags,
/// Allocated index in diags.msgs array.
index: usize,
/// Next available note slot.
note_slot: usize = 0,
pub fn addMsg(
err: ErrorWithNotes,
comptime format: []const u8,
args: anytype,
) error{OutOfMemory}!void {
const gpa = err.diags.gpa;
const err_msg = &err.diags.msgs.items[err.index];
err_msg.msg = try std.fmt.allocPrint(gpa, format, args);
}
pub fn addNote(
err: *ErrorWithNotes,
comptime format: []const u8,
args: anytype,
) error{OutOfMemory}!void {
const gpa = err.diags.gpa;
const err_msg = &err.diags.msgs.items[err.index];
assert(err.note_slot < err_msg.notes.len);
err_msg.notes[err.note_slot] = .{ .msg = try std.fmt.allocPrint(gpa, format, args) };
err.note_slot += 1;
}
};
pub fn init(gpa: Allocator) Diags {
return .{
.gpa = gpa,
.mutex = .{},
.msgs = .empty,
.flags = .{},
.lld = .empty,
};
}
pub fn deinit(diags: *Diags) void {
const gpa = diags.gpa;
for (diags.msgs.items) |*item| item.deinit(gpa);
diags.msgs.deinit(gpa);
for (diags.lld.items) |*item| item.deinit(gpa);
diags.lld.deinit(gpa);
diags.* = undefined;
}
pub fn hasErrors(diags: *Diags) bool {
return diags.msgs.items.len > 0 or diags.flags.anySet();
}
pub fn lockAndParseLldStderr(diags: *Diags, prefix: []const u8, stderr: []const u8) void {
diags.mutex.lock();
defer diags.mutex.unlock();
diags.parseLldStderr(prefix, stderr) catch diags.setAllocFailure();
}
fn parseLldStderr(
diags: *Diags,
prefix: []const u8,
stderr: []const u8,
) Allocator.Error!void {
const gpa = diags.gpa;
var context_lines = std.ArrayList([]const u8).init(gpa);
defer context_lines.deinit();
var current_err: ?*Lld = null;
var lines = mem.splitSequence(u8, stderr, if (builtin.os.tag == .windows) "\r\n" else "\n");
while (lines.next()) |line| {
if (line.len > prefix.len + ":".len and
mem.eql(u8, line[0..prefix.len], prefix) and line[prefix.len] == ':')
{
if (current_err) |err| {
err.context_lines = try context_lines.toOwnedSlice();
}
var split = mem.splitSequence(u8, line, "error: ");
_ = split.first();
const duped_msg = try std.fmt.allocPrint(gpa, "{s}: {s}", .{ prefix, split.rest() });
errdefer gpa.free(duped_msg);
current_err = try diags.lld.addOne(gpa);
current_err.?.* = .{ .msg = duped_msg };
} else if (current_err != null) {
const context_prefix = ">>> ";
var trimmed = mem.trimRight(u8, line, &std.ascii.whitespace);
if (mem.startsWith(u8, trimmed, context_prefix)) {
trimmed = trimmed[context_prefix.len..];
}
if (trimmed.len > 0) {
const duped_line = try gpa.dupe(u8, trimmed);
try context_lines.append(duped_line);
}
}
}
if (current_err) |err| {
err.context_lines = try context_lines.toOwnedSlice();
}
}
pub fn fail(diags: *Diags, comptime format: []const u8, args: anytype) error{LinkFailure} {
@branchHint(.cold);
addError(diags, format, args);
return error.LinkFailure;
}
pub fn addError(diags: *Diags, comptime format: []const u8, args: anytype) void {
@branchHint(.cold);
const gpa = diags.gpa;
diags.mutex.lock();
defer diags.mutex.unlock();
diags.msgs.ensureUnusedCapacity(gpa, 1) catch |err| switch (err) {
error.OutOfMemory => {
diags.flags.alloc_failure_occurred = true;
return;
},
};
const err_msg: Msg = .{
.msg = std.fmt.allocPrint(gpa, format, args) catch |err| switch (err) {
error.OutOfMemory => {
diags.flags.alloc_failure_occurred = true;
return;
},
},
};
diags.msgs.appendAssumeCapacity(err_msg);
}
pub fn addErrorWithNotes(diags: *Diags, note_count: usize) error{OutOfMemory}!ErrorWithNotes {
@branchHint(.cold);
const gpa = diags.gpa;
diags.mutex.lock();
defer diags.mutex.unlock();
try diags.msgs.ensureUnusedCapacity(gpa, 1);
return addErrorWithNotesAssumeCapacity(diags, note_count);
}
pub fn addErrorWithNotesAssumeCapacity(diags: *Diags, note_count: usize) error{OutOfMemory}!ErrorWithNotes {
@branchHint(.cold);
const gpa = diags.gpa;
const index = diags.msgs.items.len;
const err = diags.msgs.addOneAssumeCapacity();
err.* = .{
.msg = undefined,
.notes = try gpa.alloc(Diags.Msg, note_count),
};
return .{
.diags = diags,
.index = index,
};
}
pub fn reportMissingLibraryError(
diags: *Diags,
checked_paths: []const []const u8,
comptime format: []const u8,
args: anytype,
) error{OutOfMemory}!void {
@branchHint(.cold);
var err = try diags.addErrorWithNotes(checked_paths.len);
try err.addMsg(format, args);
for (checked_paths) |path| {
try err.addNote("tried {s}", .{path});
}
}
pub fn reportParseError(
diags: *Diags,
path: Path,
comptime format: []const u8,
args: anytype,
) error{OutOfMemory}!void {
@branchHint(.cold);
var err = try diags.addErrorWithNotes(1);
try err.addMsg(format, args);
try err.addNote("while parsing {}", .{path});
}
pub fn setAllocFailure(diags: *Diags) void {
@branchHint(.cold);
log.debug("memory allocation failure", .{});
diags.flags.alloc_failure_occurred = true;
}
};
pub fn hashAddSystemLibs( pub fn hashAddSystemLibs(
man: *Cache.Manifest, man: *Cache.Manifest,
hm: std.StringArrayHashMapUnmanaged(SystemLib), hm: std.StringArrayHashMapUnmanaged(SystemLib),
@ -446,58 +692,6 @@ pub const File = struct {
} }
} }
pub const ErrorWithNotes = struct {
base: *const File,
/// Allocated index in base.errors array.
index: usize,
/// Next available note slot.
note_slot: usize = 0,
pub fn addMsg(
err: ErrorWithNotes,
comptime format: []const u8,
args: anytype,
) error{OutOfMemory}!void {
const gpa = err.base.comp.gpa;
const err_msg = &err.base.comp.link_errors.items[err.index];
err_msg.msg = try std.fmt.allocPrint(gpa, format, args);
}
pub fn addNote(
err: *ErrorWithNotes,
comptime format: []const u8,
args: anytype,
) error{OutOfMemory}!void {
const gpa = err.base.comp.gpa;
const err_msg = &err.base.comp.link_errors.items[err.index];
assert(err.note_slot < err_msg.notes.len);
err_msg.notes[err.note_slot] = .{ .msg = try std.fmt.allocPrint(gpa, format, args) };
err.note_slot += 1;
}
};
pub fn addErrorWithNotes(base: *const File, note_count: usize) error{OutOfMemory}!ErrorWithNotes {
base.comp.link_errors_mutex.lock();
defer base.comp.link_errors_mutex.unlock();
const gpa = base.comp.gpa;
try base.comp.link_errors.ensureUnusedCapacity(gpa, 1);
return base.addErrorWithNotesAssumeCapacity(note_count);
}
pub fn addErrorWithNotesAssumeCapacity(base: *const File, note_count: usize) error{OutOfMemory}!ErrorWithNotes {
const gpa = base.comp.gpa;
const index = base.comp.link_errors.items.len;
const err = base.comp.link_errors.addOneAssumeCapacity();
err.* = .{ .msg = undefined, .notes = try gpa.alloc(ErrorMsg, note_count) };
return .{ .base = base, .index = index };
}
pub fn hasErrors(base: *const File) bool {
return base.comp.link_errors.items.len > 0 or base.comp.link_error_flags.isSet();
}
pub fn releaseLock(self: *File) void { pub fn releaseLock(self: *File) void {
if (self.lock) |*lock| { if (self.lock) |*lock| {
lock.release(); lock.release();
@ -523,7 +717,7 @@ pub const File = struct {
} }
/// TODO audit this error set. most of these should be collapsed into one error, /// TODO audit this error set. most of these should be collapsed into one error,
/// and ErrorFlags should be updated to convey the meaning to the user. /// and Diags.Flags should be updated to convey the meaning to the user.
pub const FlushError = error{ pub const FlushError = error{
CacheUnavailable, CacheUnavailable,
CurrentWorkingDirectoryUnlinked, CurrentWorkingDirectoryUnlinked,
@ -939,36 +1133,6 @@ pub const File = struct {
} }
}; };
pub const ErrorFlags = packed struct {
no_entry_point_found: bool = false,
missing_libc: bool = false,
const Int = blk: {
const bits = @typeInfo(@This()).@"struct".fields.len;
break :blk @Type(.{ .int = .{
.signedness = .unsigned,
.bits = bits,
} });
};
fn isSet(ef: ErrorFlags) bool {
return @as(Int, @bitCast(ef)) > 0;
}
};
pub const ErrorMsg = struct {
msg: []const u8,
notes: []ErrorMsg = &.{},
pub fn deinit(self: *ErrorMsg, gpa: Allocator) void {
for (self.notes) |*note| {
note.deinit(gpa);
}
gpa.free(self.notes);
gpa.free(self.msg);
}
};
pub const LazySymbol = struct { pub const LazySymbol = struct {
pub const Kind = enum { code, const_data }; pub const Kind = enum { code, const_data };
@ -1154,7 +1318,8 @@ pub fn spawnLld(
switch (term) { switch (term) {
.Exited => |code| if (code != 0) { .Exited => |code| if (code != 0) {
if (comp.clang_passthrough_mode) std.process.exit(code); if (comp.clang_passthrough_mode) std.process.exit(code);
comp.lockAndParseLldStderr(argv[1], stderr); const diags = &comp.link_diags;
diags.lockAndParseLldStderr(argv[1], stderr);
return error.LLDReportedFailure; return error.LLDReportedFailure;
}, },
else => { else => {

View File

@ -1679,6 +1679,7 @@ pub fn flushModule(self: *Coff, arena: Allocator, tid: Zcu.PerThread.Id, prog_no
const comp = self.base.comp; const comp = self.base.comp;
const gpa = comp.gpa; const gpa = comp.gpa;
const diags = &comp.link_diags;
if (self.llvm_object) |llvm_object| { if (self.llvm_object) |llvm_object| {
try self.base.emitLlvmObject(arena, llvm_object, prog_node); try self.base.emitLlvmObject(arena, llvm_object, prog_node);
@ -1796,10 +1797,10 @@ pub fn flushModule(self: *Coff, arena: Allocator, tid: Zcu.PerThread.Id, prog_no
if (self.entry_addr == null and comp.config.output_mode == .Exe) { if (self.entry_addr == null and comp.config.output_mode == .Exe) {
log.debug("flushing. no_entry_point_found = true\n", .{}); log.debug("flushing. no_entry_point_found = true\n", .{});
comp.link_error_flags.no_entry_point_found = true; diags.flags.no_entry_point_found = true;
} else { } else {
log.debug("flushing. no_entry_point_found = false\n", .{}); log.debug("flushing. no_entry_point_found = false\n", .{});
comp.link_error_flags.no_entry_point_found = false; diags.flags.no_entry_point_found = false;
try self.writeHeader(); try self.writeHeader();
} }

View File

@ -769,6 +769,7 @@ pub fn flushModule(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_nod
const comp = self.base.comp; const comp = self.base.comp;
const gpa = comp.gpa; const gpa = comp.gpa;
const diags = &comp.link_diags;
if (self.llvm_object) |llvm_object| { if (self.llvm_object) |llvm_object| {
try self.base.emitLlvmObject(arena, llvm_object, prog_node); try self.base.emitLlvmObject(arena, llvm_object, prog_node);
@ -848,7 +849,7 @@ pub fn flushModule(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_nod
} }
// libc dep // libc dep
comp.link_error_flags.missing_libc = false; diags.flags.missing_libc = false;
if (comp.config.link_libc) { if (comp.config.link_libc) {
if (comp.libc_installation) |lc| { if (comp.libc_installation) |lc| {
const flags = target_util.libcFullLinkFlags(target); const flags = target_util.libcFullLinkFlags(target);
@ -868,7 +869,7 @@ pub fn flushModule(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_nod
if (try self.accessLibPath(arena, &test_path, &checked_paths, lc.crt_dir.?, lib_name, .static)) if (try self.accessLibPath(arena, &test_path, &checked_paths, lc.crt_dir.?, lib_name, .static))
break :success; break :success;
try self.reportMissingLibraryError( try diags.reportMissingLibraryError(
checked_paths.items, checked_paths.items,
"missing system library: '{s}' was not found", "missing system library: '{s}' was not found",
.{lib_name}, .{lib_name},
@ -901,7 +902,7 @@ pub fn flushModule(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_nod
}); });
try self.parseLibraryReportingFailure(.{ .path = path }, false); try self.parseLibraryReportingFailure(.{ .path = path }, false);
} else { } else {
comp.link_error_flags.missing_libc = true; diags.flags.missing_libc = true;
} }
} }
@ -920,7 +921,7 @@ pub fn flushModule(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_nod
if (csu.crtend) |path| try parseObjectReportingFailure(self, path); if (csu.crtend) |path| try parseObjectReportingFailure(self, path);
if (csu.crtn) |path| try parseObjectReportingFailure(self, path); if (csu.crtn) |path| try parseObjectReportingFailure(self, path);
if (self.base.hasErrors()) return error.FlushFailure; if (diags.hasErrors()) return error.FlushFailure;
// Dedup shared objects // Dedup shared objects
{ {
@ -1078,14 +1079,14 @@ pub fn flushModule(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_nod
if (self.base.isExe() and self.linkerDefinedPtr().?.entry_index == null) { if (self.base.isExe() and self.linkerDefinedPtr().?.entry_index == null) {
log.debug("flushing. no_entry_point_found = true", .{}); log.debug("flushing. no_entry_point_found = true", .{});
comp.link_error_flags.no_entry_point_found = true; diags.flags.no_entry_point_found = true;
} else { } else {
log.debug("flushing. no_entry_point_found = false", .{}); log.debug("flushing. no_entry_point_found = false", .{});
comp.link_error_flags.no_entry_point_found = false; diags.flags.no_entry_point_found = false;
try self.writeElfHeader(); try self.writeElfHeader();
} }
if (self.base.hasErrors()) return error.FlushFailure; if (diags.hasErrors()) return error.FlushFailure;
} }
/// --verbose-link output /// --verbose-link output
@ -1358,7 +1359,7 @@ fn dumpArgv(self: *Elf, comp: *Compilation) !void {
} }
pub const ParseError = error{ pub const ParseError = error{
/// Indicates the error is already reported on `Compilation.link_errors`. /// Indicates the error is already reported on `Compilation.link_diags`.
LinkFailure, LinkFailure,
OutOfMemory, OutOfMemory,
@ -1484,7 +1485,10 @@ fn parseLdScript(self: *Elf, lib: SystemLib) ParseError!void {
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
const gpa = self.base.comp.gpa; const comp = self.base.comp;
const gpa = comp.gpa;
const diags = &comp.link_diags;
const in_file = try lib.path.root_dir.handle.openFile(lib.path.sub_path, .{}); const in_file = try lib.path.root_dir.handle.openFile(lib.path.sub_path, .{});
defer in_file.close(); defer in_file.close();
const data = try in_file.readToEndAlloc(gpa, std.math.maxInt(u32)); const data = try in_file.readToEndAlloc(gpa, std.math.maxInt(u32));
@ -1533,7 +1537,7 @@ fn parseLdScript(self: *Elf, lib: SystemLib) ParseError!void {
} }
} }
try self.reportMissingLibraryError( try diags.reportMissingLibraryError(
checked_paths.items, checked_paths.items,
"missing library dependency: GNU ld script '{}' requires '{s}', but file not found", "missing library dependency: GNU ld script '{}' requires '{s}', but file not found",
.{ @as(Path, lib.path), script_arg.path }, .{ @as(Path, lib.path), script_arg.path },
@ -1856,6 +1860,7 @@ fn linkWithLLD(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_node: s
const comp = self.base.comp; const comp = self.base.comp;
const gpa = comp.gpa; const gpa = comp.gpa;
const diags = &comp.link_diags;
const directory = self.base.emit.root_dir; // Just an alias to make it shorter to type. const directory = self.base.emit.root_dir; // Just an alias to make it shorter to type.
const full_out_path = try directory.join(arena, &[_][]const u8{self.base.emit.sub_path}); const full_out_path = try directory.join(arena, &[_][]const u8{self.base.emit.sub_path});
@ -2376,7 +2381,7 @@ fn linkWithLLD(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_node: s
} }
// libc dep // libc dep
comp.link_error_flags.missing_libc = false; diags.flags.missing_libc = false;
if (comp.config.link_libc) { if (comp.config.link_libc) {
if (comp.libc_installation != null) { if (comp.libc_installation != null) {
const needs_grouping = link_mode == .static; const needs_grouping = link_mode == .static;
@ -2401,7 +2406,7 @@ fn linkWithLLD(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_node: s
.dynamic => "libc.so", .dynamic => "libc.so",
})); }));
} else { } else {
comp.link_error_flags.missing_libc = true; diags.flags.missing_libc = true;
} }
} }
} }
@ -2546,7 +2551,8 @@ fn writePhdrTable(self: *Elf) !void {
} }
pub fn writeElfHeader(self: *Elf) !void { pub fn writeElfHeader(self: *Elf) !void {
if (self.base.hasErrors()) return; // We had errors, so skip flushing to render the output unusable const diags = &self.base.comp.link_diags;
if (diags.hasErrors()) return; // We had errors, so skip flushing to render the output unusable
const comp = self.base.comp; const comp = self.base.comp;
var hdr_buf: [@sizeOf(elf.Elf64_Ehdr)]u8 = undefined; var hdr_buf: [@sizeOf(elf.Elf64_Ehdr)]u8 = undefined;
@ -3700,6 +3706,7 @@ fn addLoadPhdrs(self: *Elf) error{OutOfMemory}!void {
/// Allocates PHDR table in virtual memory and in file. /// Allocates PHDR table in virtual memory and in file.
fn allocatePhdrTable(self: *Elf) error{OutOfMemory}!void { fn allocatePhdrTable(self: *Elf) error{OutOfMemory}!void {
const diags = &self.base.comp.link_diags;
const phdr_table = &self.phdrs.items[self.phdr_indexes.table.int().?]; const phdr_table = &self.phdrs.items[self.phdr_indexes.table.int().?];
const phdr_table_load = &self.phdrs.items[self.phdr_indexes.table_load.int().?]; const phdr_table_load = &self.phdrs.items[self.phdr_indexes.table_load.int().?];
@ -3720,7 +3727,7 @@ fn allocatePhdrTable(self: *Elf) error{OutOfMemory}!void {
// (revisit getMaxNumberOfPhdrs()) // (revisit getMaxNumberOfPhdrs())
// 2. shift everything in file to free more space for EHDR + PHDR table // 2. shift everything in file to free more space for EHDR + PHDR table
// TODO verify `getMaxNumberOfPhdrs()` is accurate and convert this into no-op // TODO verify `getMaxNumberOfPhdrs()` is accurate and convert this into no-op
var err = try self.base.addErrorWithNotes(1); var err = try diags.addErrorWithNotes(1);
try err.addMsg("fatal linker error: not enough space reserved for EHDR and PHDR table", .{}); try err.addMsg("fatal linker error: not enough space reserved for EHDR and PHDR table", .{});
try err.addNote("required 0x{x}, available 0x{x}", .{ needed_size, available_space }); try err.addNote("required 0x{x}, available 0x{x}", .{ needed_size, available_space });
} }
@ -4855,16 +4862,17 @@ pub fn insertDynString(self: *Elf, name: []const u8) error{OutOfMemory}!u32 {
fn reportUndefinedSymbols(self: *Elf, undefs: anytype) !void { fn reportUndefinedSymbols(self: *Elf, undefs: anytype) !void {
const gpa = self.base.comp.gpa; const gpa = self.base.comp.gpa;
const diags = &self.base.comp.link_diags;
const max_notes = 4; const max_notes = 4;
try self.base.comp.link_errors.ensureUnusedCapacity(gpa, undefs.count()); try diags.msgs.ensureUnusedCapacity(gpa, undefs.count());
for (undefs.keys(), undefs.values()) |key, refs| { for (undefs.keys(), undefs.values()) |key, refs| {
const undef_sym = self.resolver.keys.items[key - 1]; const undef_sym = self.resolver.keys.items[key - 1];
const nrefs = @min(refs.items.len, max_notes); const nrefs = @min(refs.items.len, max_notes);
const nnotes = nrefs + @intFromBool(refs.items.len > max_notes); const nnotes = nrefs + @intFromBool(refs.items.len > max_notes);
var err = try self.base.addErrorWithNotesAssumeCapacity(nnotes); var err = try diags.addErrorWithNotesAssumeCapacity(nnotes);
try err.addMsg("undefined symbol: {s}", .{undef_sym.name(self)}); try err.addMsg("undefined symbol: {s}", .{undef_sym.name(self)});
for (refs.items[0..nrefs]) |ref| { for (refs.items[0..nrefs]) |ref| {
@ -4882,6 +4890,7 @@ fn reportUndefinedSymbols(self: *Elf, undefs: anytype) !void {
fn reportDuplicates(self: *Elf, dupes: anytype) error{ HasDuplicates, OutOfMemory }!void { fn reportDuplicates(self: *Elf, dupes: anytype) error{ HasDuplicates, OutOfMemory }!void {
if (dupes.keys().len == 0) return; // Nothing to do if (dupes.keys().len == 0) return; // Nothing to do
const diags = &self.base.comp.link_diags;
const max_notes = 3; const max_notes = 3;
@ -4889,7 +4898,7 @@ fn reportDuplicates(self: *Elf, dupes: anytype) error{ HasDuplicates, OutOfMemor
const sym = self.resolver.keys.items[key - 1]; const sym = self.resolver.keys.items[key - 1];
const nnotes = @min(notes.items.len, max_notes) + @intFromBool(notes.items.len > max_notes); const nnotes = @min(notes.items.len, max_notes) + @intFromBool(notes.items.len > max_notes);
var err = try self.base.addErrorWithNotes(nnotes + 1); var err = try diags.addErrorWithNotes(nnotes + 1);
try err.addMsg("duplicate symbol definition: {s}", .{sym.name(self)}); try err.addMsg("duplicate symbol definition: {s}", .{sym.name(self)});
try err.addNote("defined by {}", .{sym.file(self).?.fmtPath()}); try err.addNote("defined by {}", .{sym.file(self).?.fmtPath()});
@ -4908,21 +4917,9 @@ fn reportDuplicates(self: *Elf, dupes: anytype) error{ HasDuplicates, OutOfMemor
return error.HasDuplicates; return error.HasDuplicates;
} }
fn reportMissingLibraryError(
self: *Elf,
checked_paths: []const []const u8,
comptime format: []const u8,
args: anytype,
) error{OutOfMemory}!void {
var err = try self.base.addErrorWithNotes(checked_paths.len);
try err.addMsg(format, args);
for (checked_paths) |path| {
try err.addNote("tried {s}", .{path});
}
}
fn reportUnsupportedCpuArch(self: *Elf) error{OutOfMemory}!void { fn reportUnsupportedCpuArch(self: *Elf) error{OutOfMemory}!void {
var err = try self.base.addErrorWithNotes(0); const diags = &self.base.comp.link_diags;
var err = try diags.addErrorWithNotes(0);
try err.addMsg("fatal linker error: unsupported CPU architecture {s}", .{ try err.addMsg("fatal linker error: unsupported CPU architecture {s}", .{
@tagName(self.getTarget().cpu.arch), @tagName(self.getTarget().cpu.arch),
}); });
@ -4934,7 +4931,8 @@ pub fn addParseError(
comptime format: []const u8, comptime format: []const u8,
args: anytype, args: anytype,
) error{OutOfMemory}!void { ) error{OutOfMemory}!void {
var err = try self.base.addErrorWithNotes(1); const diags = &self.base.comp.link_diags;
var err = try diags.addErrorWithNotes(1);
try err.addMsg(format, args); try err.addMsg(format, args);
try err.addNote("while parsing {}", .{path}); try err.addNote("while parsing {}", .{path});
} }
@ -4945,7 +4943,8 @@ pub fn addFileError(
comptime format: []const u8, comptime format: []const u8,
args: anytype, args: anytype,
) error{OutOfMemory}!void { ) error{OutOfMemory}!void {
var err = try self.base.addErrorWithNotes(1); const diags = &self.base.comp.link_diags;
var err = try diags.addErrorWithNotes(1);
try err.addMsg(format, args); try err.addMsg(format, args);
try err.addNote("while parsing {}", .{self.file(file_index).?.fmtPath()}); try err.addNote("while parsing {}", .{self.file(file_index).?.fmtPath()});
} }

View File

@ -519,7 +519,8 @@ fn dataType(symbol: *const Symbol, elf_file: *Elf) u2 {
} }
fn reportUnhandledRelocError(self: Atom, rel: elf.Elf64_Rela, elf_file: *Elf) RelocError!void { fn reportUnhandledRelocError(self: Atom, rel: elf.Elf64_Rela, elf_file: *Elf) RelocError!void {
var err = try elf_file.base.addErrorWithNotes(1); const diags = &elf_file.base.comp.link_diags;
var err = try diags.addErrorWithNotes(1);
try err.addMsg("fatal linker error: unhandled relocation type {} at offset 0x{x}", .{ try err.addMsg("fatal linker error: unhandled relocation type {} at offset 0x{x}", .{
relocation.fmtRelocType(rel.r_type(), elf_file.getTarget().cpu.arch), relocation.fmtRelocType(rel.r_type(), elf_file.getTarget().cpu.arch),
rel.r_offset, rel.r_offset,
@ -534,7 +535,8 @@ fn reportTextRelocError(
rel: elf.Elf64_Rela, rel: elf.Elf64_Rela,
elf_file: *Elf, elf_file: *Elf,
) RelocError!void { ) RelocError!void {
var err = try elf_file.base.addErrorWithNotes(1); const diags = &elf_file.base.comp.link_diags;
var err = try diags.addErrorWithNotes(1);
try err.addMsg("relocation at offset 0x{x} against symbol '{s}' cannot be used", .{ try err.addMsg("relocation at offset 0x{x} against symbol '{s}' cannot be used", .{
rel.r_offset, rel.r_offset,
symbol.name(elf_file), symbol.name(elf_file),
@ -549,7 +551,8 @@ fn reportPicError(
rel: elf.Elf64_Rela, rel: elf.Elf64_Rela,
elf_file: *Elf, elf_file: *Elf,
) RelocError!void { ) RelocError!void {
var err = try elf_file.base.addErrorWithNotes(2); const diags = &elf_file.base.comp.link_diags;
var err = try diags.addErrorWithNotes(2);
try err.addMsg("relocation at offset 0x{x} against symbol '{s}' cannot be used", .{ try err.addMsg("relocation at offset 0x{x} against symbol '{s}' cannot be used", .{
rel.r_offset, rel.r_offset,
symbol.name(elf_file), symbol.name(elf_file),
@ -565,7 +568,8 @@ fn reportNoPicError(
rel: elf.Elf64_Rela, rel: elf.Elf64_Rela,
elf_file: *Elf, elf_file: *Elf,
) RelocError!void { ) RelocError!void {
var err = try elf_file.base.addErrorWithNotes(2); const diags = &elf_file.base.comp.link_diags;
var err = try diags.addErrorWithNotes(2);
try err.addMsg("relocation at offset 0x{x} against symbol '{s}' cannot be used", .{ try err.addMsg("relocation at offset 0x{x} against symbol '{s}' cannot be used", .{
rel.r_offset, rel.r_offset,
symbol.name(elf_file), symbol.name(elf_file),
@ -1082,6 +1086,7 @@ const x86_64 = struct {
stream: anytype, stream: anytype,
) (error{ InvalidInstruction, CannotEncode } || RelocError)!void { ) (error{ InvalidInstruction, CannotEncode } || RelocError)!void {
dev.check(.x86_64_backend); dev.check(.x86_64_backend);
const diags = &elf_file.base.comp.link_diags;
const r_type: elf.R_X86_64 = @enumFromInt(rel.r_type()); const r_type: elf.R_X86_64 = @enumFromInt(rel.r_type());
const r_offset = std.math.cast(usize, rel.r_offset) orelse return error.Overflow; const r_offset = std.math.cast(usize, rel.r_offset) orelse return error.Overflow;
@ -1176,7 +1181,7 @@ const x86_64 = struct {
try cwriter.writeInt(i32, @as(i32, @intCast(S_ + A - P)), .little); try cwriter.writeInt(i32, @as(i32, @intCast(S_ + A - P)), .little);
} else { } else {
x86_64.relaxGotPcTlsDesc(code[r_offset - 3 ..]) catch { x86_64.relaxGotPcTlsDesc(code[r_offset - 3 ..]) catch {
var err = try elf_file.base.addErrorWithNotes(1); var err = try diags.addErrorWithNotes(1);
try err.addMsg("could not relax {s}", .{@tagName(r_type)}); try err.addMsg("could not relax {s}", .{@tagName(r_type)});
try err.addNote("in {}:{s} at offset 0x{x}", .{ try err.addNote("in {}:{s} at offset 0x{x}", .{
atom.file(elf_file).?.fmtPath(), atom.file(elf_file).?.fmtPath(),
@ -1301,6 +1306,7 @@ const x86_64 = struct {
) !void { ) !void {
dev.check(.x86_64_backend); dev.check(.x86_64_backend);
assert(rels.len == 2); assert(rels.len == 2);
const diags = &elf_file.base.comp.link_diags;
const writer = stream.writer(); const writer = stream.writer();
const rel: elf.R_X86_64 = @enumFromInt(rels[1].r_type()); const rel: elf.R_X86_64 = @enumFromInt(rels[1].r_type());
switch (rel) { switch (rel) {
@ -1317,7 +1323,7 @@ const x86_64 = struct {
}, },
else => { else => {
var err = try elf_file.base.addErrorWithNotes(1); var err = try diags.addErrorWithNotes(1);
try err.addMsg("TODO: rewrite {} when followed by {}", .{ try err.addMsg("TODO: rewrite {} when followed by {}", .{
relocation.fmtRelocType(rels[0].r_type(), .x86_64), relocation.fmtRelocType(rels[0].r_type(), .x86_64),
relocation.fmtRelocType(rels[1].r_type(), .x86_64), relocation.fmtRelocType(rels[1].r_type(), .x86_64),
@ -1341,6 +1347,7 @@ const x86_64 = struct {
) !void { ) !void {
dev.check(.x86_64_backend); dev.check(.x86_64_backend);
assert(rels.len == 2); assert(rels.len == 2);
const diags = &elf_file.base.comp.link_diags;
const writer = stream.writer(); const writer = stream.writer();
const rel: elf.R_X86_64 = @enumFromInt(rels[1].r_type()); const rel: elf.R_X86_64 = @enumFromInt(rels[1].r_type());
switch (rel) { switch (rel) {
@ -1372,7 +1379,7 @@ const x86_64 = struct {
}, },
else => { else => {
var err = try elf_file.base.addErrorWithNotes(1); var err = try diags.addErrorWithNotes(1);
try err.addMsg("TODO: rewrite {} when followed by {}", .{ try err.addMsg("TODO: rewrite {} when followed by {}", .{
relocation.fmtRelocType(rels[0].r_type(), .x86_64), relocation.fmtRelocType(rels[0].r_type(), .x86_64),
relocation.fmtRelocType(rels[1].r_type(), .x86_64), relocation.fmtRelocType(rels[1].r_type(), .x86_64),
@ -1446,6 +1453,7 @@ const x86_64 = struct {
) !void { ) !void {
dev.check(.x86_64_backend); dev.check(.x86_64_backend);
assert(rels.len == 2); assert(rels.len == 2);
const diags = &elf_file.base.comp.link_diags;
const writer = stream.writer(); const writer = stream.writer();
const rel: elf.R_X86_64 = @enumFromInt(rels[1].r_type()); const rel: elf.R_X86_64 = @enumFromInt(rels[1].r_type());
switch (rel) { switch (rel) {
@ -1468,7 +1476,7 @@ const x86_64 = struct {
}, },
else => { else => {
var err = try elf_file.base.addErrorWithNotes(1); var err = try diags.addErrorWithNotes(1);
try err.addMsg("fatal linker error: rewrite {} when followed by {}", .{ try err.addMsg("fatal linker error: rewrite {} when followed by {}", .{
relocation.fmtRelocType(rels[0].r_type(), .x86_64), relocation.fmtRelocType(rels[0].r_type(), .x86_64),
relocation.fmtRelocType(rels[1].r_type(), .x86_64), relocation.fmtRelocType(rels[1].r_type(), .x86_64),
@ -1603,6 +1611,7 @@ const aarch64 = struct {
) (error{ UnexpectedRemainder, DivisionByZero } || RelocError)!void { ) (error{ UnexpectedRemainder, DivisionByZero } || RelocError)!void {
_ = it; _ = it;
const diags = &elf_file.base.comp.link_diags;
const r_type: elf.R_AARCH64 = @enumFromInt(rel.r_type()); const r_type: elf.R_AARCH64 = @enumFromInt(rel.r_type());
const r_offset = std.math.cast(usize, rel.r_offset) orelse return error.Overflow; const r_offset = std.math.cast(usize, rel.r_offset) orelse return error.Overflow;
const cwriter = stream.writer(); const cwriter = stream.writer();
@ -1657,7 +1666,7 @@ const aarch64 = struct {
aarch64_util.writeAdrpInst(pages, code); aarch64_util.writeAdrpInst(pages, code);
} else { } else {
// TODO: relax // TODO: relax
var err = try elf_file.base.addErrorWithNotes(1); var err = try diags.addErrorWithNotes(1);
try err.addMsg("TODO: relax ADR_GOT_PAGE", .{}); try err.addMsg("TODO: relax ADR_GOT_PAGE", .{});
try err.addNote("in {}:{s} at offset 0x{x}", .{ try err.addNote("in {}:{s} at offset 0x{x}", .{
atom.file(elf_file).?.fmtPath(), atom.file(elf_file).?.fmtPath(),
@ -1882,6 +1891,7 @@ const riscv = struct {
code: []u8, code: []u8,
stream: anytype, stream: anytype,
) !void { ) !void {
const diags = &elf_file.base.comp.link_diags;
const r_type: elf.R_RISCV = @enumFromInt(rel.r_type()); const r_type: elf.R_RISCV = @enumFromInt(rel.r_type());
const r_offset = std.math.cast(usize, rel.r_offset) orelse return error.Overflow; const r_offset = std.math.cast(usize, rel.r_offset) orelse return error.Overflow;
const cwriter = stream.writer(); const cwriter = stream.writer();
@ -1943,7 +1953,7 @@ const riscv = struct {
if (S == atom_addr + @as(i64, @intCast(pair.r_offset))) break pair; if (S == atom_addr + @as(i64, @intCast(pair.r_offset))) break pair;
} else { } else {
// TODO: implement searching forward // TODO: implement searching forward
var err = try elf_file.base.addErrorWithNotes(1); var err = try diags.addErrorWithNotes(1);
try err.addMsg("TODO: find HI20 paired reloc scanning forward", .{}); try err.addMsg("TODO: find HI20 paired reloc scanning forward", .{});
try err.addNote("in {}:{s} at offset 0x{x}", .{ try err.addNote("in {}:{s} at offset 0x{x}", .{
atom.file(elf_file).?.fmtPath(), atom.file(elf_file).?.fmtPath(),

View File

@ -644,6 +644,7 @@ pub fn checkDuplicates(self: *Object, dupes: anytype, elf_file: *Elf) error{OutO
pub fn initInputMergeSections(self: *Object, elf_file: *Elf) !void { pub fn initInputMergeSections(self: *Object, elf_file: *Elf) !void {
const gpa = elf_file.base.comp.gpa; const gpa = elf_file.base.comp.gpa;
const diags = &elf_file.base.comp.link_diags;
try self.input_merge_sections.ensureUnusedCapacity(gpa, self.shdrs.items.len); try self.input_merge_sections.ensureUnusedCapacity(gpa, self.shdrs.items.len);
try self.input_merge_sections_indexes.resize(gpa, self.shdrs.items.len); try self.input_merge_sections_indexes.resize(gpa, self.shdrs.items.len);
@ -685,7 +686,7 @@ pub fn initInputMergeSections(self: *Object, elf_file: *Elf) !void {
var end = start; var end = start;
while (end < data.len - sh_entsize and !isNull(data[end .. end + sh_entsize])) : (end += sh_entsize) {} while (end < data.len - sh_entsize and !isNull(data[end .. end + sh_entsize])) : (end += sh_entsize) {}
if (!isNull(data[end .. end + sh_entsize])) { if (!isNull(data[end .. end + sh_entsize])) {
var err = try elf_file.base.addErrorWithNotes(1); var err = try diags.addErrorWithNotes(1);
try err.addMsg("string not null terminated", .{}); try err.addMsg("string not null terminated", .{});
try err.addNote("in {}:{s}", .{ self.fmtPath(), atom_ptr.name(elf_file) }); try err.addNote("in {}:{s}", .{ self.fmtPath(), atom_ptr.name(elf_file) });
return error.LinkFailure; return error.LinkFailure;
@ -700,7 +701,7 @@ pub fn initInputMergeSections(self: *Object, elf_file: *Elf) !void {
const sh_entsize: u32 = @intCast(shdr.sh_entsize); const sh_entsize: u32 = @intCast(shdr.sh_entsize);
if (sh_entsize == 0) continue; // Malformed, don't split but don't error out if (sh_entsize == 0) continue; // Malformed, don't split but don't error out
if (shdr.sh_size % sh_entsize != 0) { if (shdr.sh_size % sh_entsize != 0) {
var err = try elf_file.base.addErrorWithNotes(1); var err = try diags.addErrorWithNotes(1);
try err.addMsg("size not a multiple of sh_entsize", .{}); try err.addMsg("size not a multiple of sh_entsize", .{});
try err.addNote("in {}:{s}", .{ self.fmtPath(), atom_ptr.name(elf_file) }); try err.addNote("in {}:{s}", .{ self.fmtPath(), atom_ptr.name(elf_file) });
return error.LinkFailure; return error.LinkFailure;
@ -738,6 +739,7 @@ pub fn resolveMergeSubsections(self: *Object, elf_file: *Elf) error{
Overflow, Overflow,
}!void { }!void {
const gpa = elf_file.base.comp.gpa; const gpa = elf_file.base.comp.gpa;
const diags = &elf_file.base.comp.link_diags;
for (self.input_merge_sections_indexes.items) |index| { for (self.input_merge_sections_indexes.items) |index| {
const imsec = self.inputMergeSection(index) orelse continue; const imsec = self.inputMergeSection(index) orelse continue;
@ -776,7 +778,7 @@ pub fn resolveMergeSubsections(self: *Object, elf_file: *Elf) error{
const imsec = self.inputMergeSection(imsec_index) orelse continue; const imsec = self.inputMergeSection(imsec_index) orelse continue;
if (imsec.offsets.items.len == 0) continue; if (imsec.offsets.items.len == 0) continue;
const res = imsec.findSubsection(@intCast(esym.st_value)) orelse { const res = imsec.findSubsection(@intCast(esym.st_value)) orelse {
var err = try elf_file.base.addErrorWithNotes(2); var err = try diags.addErrorWithNotes(2);
try err.addMsg("invalid symbol value: {x}", .{esym.st_value}); try err.addMsg("invalid symbol value: {x}", .{esym.st_value});
try err.addNote("for symbol {s}", .{sym.name(elf_file)}); try err.addNote("for symbol {s}", .{sym.name(elf_file)});
try err.addNote("in {}", .{self.fmtPath()}); try err.addNote("in {}", .{self.fmtPath()});
@ -802,7 +804,7 @@ pub fn resolveMergeSubsections(self: *Object, elf_file: *Elf) error{
if (imsec.offsets.items.len == 0) continue; if (imsec.offsets.items.len == 0) continue;
const msec = elf_file.mergeSection(imsec.merge_section_index); const msec = elf_file.mergeSection(imsec.merge_section_index);
const res = imsec.findSubsection(@intCast(@as(i64, @intCast(esym.st_value)) + rel.r_addend)) orelse { const res = imsec.findSubsection(@intCast(@as(i64, @intCast(esym.st_value)) + rel.r_addend)) orelse {
var err = try elf_file.base.addErrorWithNotes(1); var err = try diags.addErrorWithNotes(1);
try err.addMsg("invalid relocation at offset 0x{x}", .{rel.r_offset}); try err.addMsg("invalid relocation at offset 0x{x}", .{rel.r_offset});
try err.addNote("in {}:{s}", .{ self.fmtPath(), atom_ptr.name(elf_file) }); try err.addNote("in {}:{s}", .{ self.fmtPath(), atom_ptr.name(elf_file) });
return error.LinkFailure; return error.LinkFailure;

View File

@ -611,7 +611,8 @@ const riscv = struct {
}; };
fn reportInvalidReloc(rec: anytype, elf_file: *Elf, rel: elf.Elf64_Rela) !void { fn reportInvalidReloc(rec: anytype, elf_file: *Elf, rel: elf.Elf64_Rela) !void {
var err = try elf_file.base.addErrorWithNotes(1); const diags = &elf_file.base.comp.link_diags;
var err = try diags.addErrorWithNotes(1);
try err.addMsg("invalid relocation type {} at offset 0x{x}", .{ try err.addMsg("invalid relocation type {} at offset 0x{x}", .{
relocation.fmtRelocType(rel.r_type(), elf_file.getTarget().cpu.arch), relocation.fmtRelocType(rel.r_type(), elf_file.getTarget().cpu.arch),
rel.r_offset, rel.r_offset,

View File

@ -1,5 +1,6 @@
pub fn flushStaticLib(elf_file: *Elf, comp: *Compilation, module_obj_path: ?Path) link.File.FlushError!void { pub fn flushStaticLib(elf_file: *Elf, comp: *Compilation, module_obj_path: ?Path) link.File.FlushError!void {
const gpa = comp.gpa; const gpa = comp.gpa;
const diags = &comp.link_diags;
for (comp.objects) |obj| { for (comp.objects) |obj| {
switch (Compilation.classifyFileExt(obj.path.sub_path)) { switch (Compilation.classifyFileExt(obj.path.sub_path)) {
@ -21,7 +22,7 @@ pub fn flushStaticLib(elf_file: *Elf, comp: *Compilation, module_obj_path: ?Path
try parseObjectStaticLibReportingFailure(elf_file, comp.compiler_rt_obj.?.full_object_path); try parseObjectStaticLibReportingFailure(elf_file, comp.compiler_rt_obj.?.full_object_path);
} }
if (elf_file.base.hasErrors()) return error.FlushFailure; if (diags.hasErrors()) return error.FlushFailure;
// First, we flush relocatable object file generated with our backends. // First, we flush relocatable object file generated with our backends.
if (elf_file.zigObjectPtr()) |zig_object| { if (elf_file.zigObjectPtr()) |zig_object| {
@ -146,10 +147,12 @@ pub fn flushStaticLib(elf_file: *Elf, comp: *Compilation, module_obj_path: ?Path
try elf_file.base.file.?.setEndPos(total_size); try elf_file.base.file.?.setEndPos(total_size);
try elf_file.base.file.?.pwriteAll(buffer.items, 0); try elf_file.base.file.?.pwriteAll(buffer.items, 0);
if (elf_file.base.hasErrors()) return error.FlushFailure; if (diags.hasErrors()) return error.FlushFailure;
} }
pub fn flushObject(elf_file: *Elf, comp: *Compilation, module_obj_path: ?Path) link.File.FlushError!void { pub fn flushObject(elf_file: *Elf, comp: *Compilation, module_obj_path: ?Path) link.File.FlushError!void {
const diags = &comp.link_diags;
for (comp.objects) |obj| { for (comp.objects) |obj| {
if (obj.isObject()) { if (obj.isObject()) {
try elf_file.parseObjectReportingFailure(obj.path); try elf_file.parseObjectReportingFailure(obj.path);
@ -167,7 +170,7 @@ pub fn flushObject(elf_file: *Elf, comp: *Compilation, module_obj_path: ?Path) l
if (module_obj_path) |path| try elf_file.parseObjectReportingFailure(path); if (module_obj_path) |path| try elf_file.parseObjectReportingFailure(path);
if (elf_file.base.hasErrors()) return error.FlushFailure; if (diags.hasErrors()) return error.FlushFailure;
// Now, we are ready to resolve the symbols across all input files. // Now, we are ready to resolve the symbols across all input files.
// We will first resolve the files in the ZigObject, next in the parsed // We will first resolve the files in the ZigObject, next in the parsed
@ -213,7 +216,7 @@ pub fn flushObject(elf_file: *Elf, comp: *Compilation, module_obj_path: ?Path) l
try elf_file.writeShdrTable(); try elf_file.writeShdrTable();
try elf_file.writeElfHeader(); try elf_file.writeElfHeader();
if (elf_file.base.hasErrors()) return error.FlushFailure; if (diags.hasErrors()) return error.FlushFailure;
} }
fn parseObjectStaticLibReportingFailure(elf_file: *Elf, path: Path) error{OutOfMemory}!void { fn parseObjectStaticLibReportingFailure(elf_file: *Elf, path: Path) error{OutOfMemory}!void {

View File

@ -100,7 +100,6 @@ debug_rnglists_sect_index: ?u8 = null,
has_tlv: AtomicBool = AtomicBool.init(false), has_tlv: AtomicBool = AtomicBool.init(false),
binds_to_weak: AtomicBool = AtomicBool.init(false), binds_to_weak: AtomicBool = AtomicBool.init(false),
weak_defines: AtomicBool = AtomicBool.init(false), weak_defines: AtomicBool = AtomicBool.init(false),
has_errors: AtomicBool = AtomicBool.init(false),
/// Options /// Options
/// SDK layout /// SDK layout
@ -347,6 +346,7 @@ pub fn flushModule(self: *MachO, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
const comp = self.base.comp; const comp = self.base.comp;
const gpa = comp.gpa; const gpa = comp.gpa;
const diags = &self.base.comp.link_diags;
if (self.llvm_object) |llvm_object| { if (self.llvm_object) |llvm_object| {
try self.base.emitLlvmObject(arena, llvm_object, prog_node); try self.base.emitLlvmObject(arena, llvm_object, prog_node);
@ -397,8 +397,8 @@ pub fn flushModule(self: *MachO, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
for (positionals.items) |obj| { for (positionals.items) |obj| {
self.classifyInputFile(obj.path, .{ .path = obj.path }, obj.must_link) catch |err| switch (err) { self.classifyInputFile(obj.path, .{ .path = obj.path }, obj.must_link) catch |err| switch (err) {
error.UnknownFileType => try self.reportParseError(obj.path, "unknown file type for an input file", .{}), error.UnknownFileType => try diags.reportParseError(obj.path, "unknown file type for an input file", .{}),
else => |e| try self.reportParseError( else => |e| try diags.reportParseError(
obj.path, obj.path,
"unexpected error: reading input file failed with error {s}", "unexpected error: reading input file failed with error {s}",
.{@errorName(e)}, .{@errorName(e)},
@ -444,8 +444,8 @@ pub fn flushModule(self: *MachO, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
for (system_libs.items) |lib| { for (system_libs.items) |lib| {
self.classifyInputFile(lib.path, lib, false) catch |err| switch (err) { self.classifyInputFile(lib.path, lib, false) catch |err| switch (err) {
error.UnknownFileType => try self.reportParseError(lib.path, "unknown file type for an input file", .{}), error.UnknownFileType => try diags.reportParseError(lib.path, "unknown file type for an input file", .{}),
else => |e| try self.reportParseError( else => |e| try diags.reportParseError(
lib.path, lib.path,
"unexpected error: parsing input file failed with error {s}", "unexpected error: parsing input file failed with error {s}",
.{@errorName(e)}, .{@errorName(e)},
@ -461,8 +461,8 @@ pub fn flushModule(self: *MachO, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
}; };
if (compiler_rt_path) |path| { if (compiler_rt_path) |path| {
self.classifyInputFile(path, .{ .path = path }, false) catch |err| switch (err) { self.classifyInputFile(path, .{ .path = path }, false) catch |err| switch (err) {
error.UnknownFileType => try self.reportParseError(path, "unknown file type for an input file", .{}), error.UnknownFileType => try diags.reportParseError(path, "unknown file type for an input file", .{}),
else => |e| try self.reportParseError( else => |e| try diags.reportParseError(
path, path,
"unexpected error: parsing input file failed with error {s}", "unexpected error: parsing input file failed with error {s}",
.{@errorName(e)}, .{@errorName(e)},
@ -474,14 +474,11 @@ pub fn flushModule(self: *MachO, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
self.parseDependentDylibs() catch |err| { self.parseDependentDylibs() catch |err| {
switch (err) { switch (err) {
error.MissingLibraryDependencies => {}, error.MissingLibraryDependencies => {},
else => |e| try self.reportUnexpectedError( else => |e| return diags.fail("failed to parse dependent libraries: {s}", .{@errorName(e)}),
"unexpected error while parsing dependent libraries: {s}",
.{@errorName(e)},
),
} }
}; };
if (self.base.hasErrors()) return error.FlushFailure; if (diags.hasErrors()) return error.FlushFailure;
{ {
const index = @as(File.Index, @intCast(try self.files.addOne(gpa))); const index = @as(File.Index, @intCast(try self.files.addOne(gpa)));
@ -502,10 +499,7 @@ pub fn flushModule(self: *MachO, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
self.checkDuplicates() catch |err| switch (err) { self.checkDuplicates() catch |err| switch (err) {
error.HasDuplicates => return error.FlushFailure, error.HasDuplicates => return error.FlushFailure,
else => |e| { else => |e| return diags.fail("failed to check for duplicate symbol definitions: {s}", .{@errorName(e)}),
try self.reportUnexpectedError("unexpected error while checking for duplicate symbol definitions", .{});
return e;
},
}; };
self.markImportsAndExports(); self.markImportsAndExports();
@ -520,10 +514,7 @@ pub fn flushModule(self: *MachO, arena: Allocator, tid: Zcu.PerThread.Id, prog_n
self.scanRelocs() catch |err| switch (err) { self.scanRelocs() catch |err| switch (err) {
error.HasUndefinedSymbols => return error.FlushFailure, error.HasUndefinedSymbols => return error.FlushFailure,
else => |e| { else => |e| return diags.fail("failed to scan relocations: {s}", .{@errorName(e)}),
try self.reportUnexpectedError("unexpected error while scanning relocations", .{});
return e;
},
}; };
try self.initOutputSections(); try self.initOutputSections();
@ -784,6 +775,8 @@ pub fn resolveLibSystem(
comp: *Compilation, comp: *Compilation,
out_libs: anytype, out_libs: anytype,
) !void { ) !void {
const diags = &self.base.comp.link_diags;
var test_path = std.ArrayList(u8).init(arena); var test_path = std.ArrayList(u8).init(arena);
var checked_paths = std.ArrayList([]const u8).init(arena); var checked_paths = std.ArrayList([]const u8).init(arena);
@ -803,7 +796,7 @@ pub fn resolveLibSystem(
if (try accessLibPath(arena, &test_path, &checked_paths, dir, "System")) break :success; if (try accessLibPath(arena, &test_path, &checked_paths, dir, "System")) break :success;
} }
try self.reportMissingLibraryError(checked_paths.items, "unable to find libSystem system library", .{}); try diags.reportMissingLibraryError(checked_paths.items, "unable to find libSystem system library", .{});
return error.MissingLibSystem; return error.MissingLibSystem;
} }
@ -845,6 +838,7 @@ pub fn classifyInputFile(self: *MachO, path: Path, lib: SystemLib, must_link: bo
} }
fn parseFatFile(self: *MachO, file: std.fs.File, path: Path) !?fat.Arch { fn parseFatFile(self: *MachO, file: std.fs.File, path: Path) !?fat.Arch {
const diags = &self.base.comp.link_diags;
const fat_h = fat.readFatHeader(file) catch return null; const fat_h = fat.readFatHeader(file) catch return null;
if (fat_h.magic != macho.FAT_MAGIC and fat_h.magic != macho.FAT_MAGIC_64) return null; if (fat_h.magic != macho.FAT_MAGIC and fat_h.magic != macho.FAT_MAGIC_64) return null;
var fat_archs_buffer: [2]fat.Arch = undefined; var fat_archs_buffer: [2]fat.Arch = undefined;
@ -853,7 +847,7 @@ fn parseFatFile(self: *MachO, file: std.fs.File, path: Path) !?fat.Arch {
for (fat_archs) |arch| { for (fat_archs) |arch| {
if (arch.tag == cpu_arch) return arch; if (arch.tag == cpu_arch) return arch;
} }
try self.reportParseError(path, "missing arch in universal file: expected {s}", .{ try diags.reportParseError(path, "missing arch in universal file: expected {s}", .{
@tagName(cpu_arch), @tagName(cpu_arch),
}); });
return error.MissingCpuArch; return error.MissingCpuArch;
@ -901,6 +895,7 @@ pub fn parseInputFiles(self: *MachO) !void {
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
const diags = &self.base.comp.link_diags;
const tp = self.base.comp.thread_pool; const tp = self.base.comp.thread_pool;
var wg: WaitGroup = .{}; var wg: WaitGroup = .{};
@ -916,7 +911,7 @@ pub fn parseInputFiles(self: *MachO) !void {
} }
} }
if (self.has_errors.swap(false, .seq_cst)) return error.FlushFailure; if (diags.hasErrors()) return error.LinkFailure;
} }
fn parseInputFileWorker(self: *MachO, file: File) void { fn parseInputFileWorker(self: *MachO, file: File) void {
@ -928,9 +923,9 @@ fn parseInputFileWorker(self: *MachO, file: File) void {
error.InvalidMachineType, error.InvalidMachineType,
error.InvalidTarget, error.InvalidTarget,
=> {}, // already reported => {}, // already reported
else => |e| self.reportParseError2(file.getIndex(), "unexpected error: parsing input file failed with error {s}", .{@errorName(e)}) catch {}, else => |e| self.reportParseError2(file.getIndex(), "unexpected error: parsing input file failed with error {s}", .{@errorName(e)}) catch {},
} }
_ = self.has_errors.swap(true, .seq_cst);
}; };
} }
@ -1296,6 +1291,7 @@ fn markLive(self: *MachO) void {
fn convertTentativeDefsAndResolveSpecialSymbols(self: *MachO) !void { fn convertTentativeDefsAndResolveSpecialSymbols(self: *MachO) !void {
const tp = self.base.comp.thread_pool; const tp = self.base.comp.thread_pool;
const diags = &self.base.comp.link_diags;
var wg: WaitGroup = .{}; var wg: WaitGroup = .{};
{ {
wg.reset(); wg.reset();
@ -1307,7 +1303,7 @@ fn convertTentativeDefsAndResolveSpecialSymbols(self: *MachO) !void {
tp.spawnWg(&wg, resolveSpecialSymbolsWorker, .{ self, obj }); tp.spawnWg(&wg, resolveSpecialSymbolsWorker, .{ self, obj });
} }
} }
if (self.has_errors.swap(false, .seq_cst)) return error.FlushFailure; if (diags.hasErrors()) return error.LinkFailure;
} }
fn convertTentativeDefinitionsWorker(self: *MachO, object: *Object) void { fn convertTentativeDefinitionsWorker(self: *MachO, object: *Object) void {
@ -1319,26 +1315,19 @@ fn convertTentativeDefinitionsWorker(self: *MachO, object: *Object) void {
"unexpected error occurred while converting tentative symbols into defined symbols: {s}", "unexpected error occurred while converting tentative symbols into defined symbols: {s}",
.{@errorName(err)}, .{@errorName(err)},
) catch {}; ) catch {};
_ = self.has_errors.swap(true, .seq_cst);
}; };
} }
fn resolveSpecialSymbolsWorker(self: *MachO, obj: *InternalObject) void { fn resolveSpecialSymbolsWorker(self: *MachO, obj: *InternalObject) void {
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
obj.resolveBoundarySymbols(self) catch |err| {
self.reportUnexpectedError("unexpected error occurred while resolving boundary symbols: {s}", .{ const diags = &self.base.comp.link_diags;
@errorName(err),
}) catch {}; obj.resolveBoundarySymbols(self) catch |err|
_ = self.has_errors.swap(true, .seq_cst); return diags.addError("failed to resolve boundary symbols: {s}", .{@errorName(err)});
return; obj.resolveObjcMsgSendSymbols(self) catch |err|
}; return diags.addError("failed to resolve ObjC msgsend stubs: {s}", .{@errorName(err)});
obj.resolveObjcMsgSendSymbols(self) catch |err| {
self.reportUnexpectedError("unexpected error occurred while resolving ObjC msgsend stubs: {s}", .{
@errorName(err),
}) catch {};
_ = self.has_errors.swap(true, .seq_cst);
};
} }
pub fn dedupLiterals(self: *MachO) !void { pub fn dedupLiterals(self: *MachO) !void {
@ -1390,6 +1379,8 @@ fn checkDuplicates(self: *MachO) !void {
defer tracy.end(); defer tracy.end();
const tp = self.base.comp.thread_pool; const tp = self.base.comp.thread_pool;
const diags = &self.base.comp.link_diags;
var wg: WaitGroup = .{}; var wg: WaitGroup = .{};
{ {
wg.reset(); wg.reset();
@ -1405,7 +1396,7 @@ fn checkDuplicates(self: *MachO) !void {
} }
} }
if (self.has_errors.swap(false, .seq_cst)) return error.FlushFailure; if (diags.hasErrors()) return error.LinkFailure;
try self.reportDuplicates(); try self.reportDuplicates();
} }
@ -1417,7 +1408,6 @@ fn checkDuplicatesWorker(self: *MachO, file: File) void {
self.reportParseError2(file.getIndex(), "failed to check for duplicate definitions: {s}", .{ self.reportParseError2(file.getIndex(), "failed to check for duplicate definitions: {s}", .{
@errorName(err), @errorName(err),
}) catch {}; }) catch {};
_ = self.has_errors.swap(true, .seq_cst);
}; };
} }
@ -1460,6 +1450,8 @@ fn scanRelocs(self: *MachO) !void {
defer tracy.end(); defer tracy.end();
const tp = self.base.comp.thread_pool; const tp = self.base.comp.thread_pool;
const diags = &self.base.comp.link_diags;
var wg: WaitGroup = .{}; var wg: WaitGroup = .{};
{ {
@ -1477,7 +1469,7 @@ fn scanRelocs(self: *MachO) !void {
} }
} }
if (self.has_errors.swap(false, .seq_cst)) return error.FlushFailure; if (diags.hasErrors()) return error.LinkFailure;
if (self.getInternalObject()) |obj| { if (self.getInternalObject()) |obj| {
try obj.checkUndefs(self); try obj.checkUndefs(self);
@ -1503,7 +1495,6 @@ fn scanRelocsWorker(self: *MachO, file: File) void {
self.reportParseError2(file.getIndex(), "failed to scan relocations: {s}", .{ self.reportParseError2(file.getIndex(), "failed to scan relocations: {s}", .{
@errorName(err), @errorName(err),
}) catch {}; }) catch {};
_ = self.has_errors.swap(true, .seq_cst);
}; };
} }
@ -1527,6 +1518,7 @@ fn reportUndefs(self: *MachO) !void {
if (self.undefs.keys().len == 0) return; // Nothing to do if (self.undefs.keys().len == 0) return; // Nothing to do
const gpa = self.base.comp.gpa; const gpa = self.base.comp.gpa;
const diags = &self.base.comp.link_diags;
const max_notes = 4; const max_notes = 4;
// We will sort by name, and then by file to ensure deterministic output. // We will sort by name, and then by file to ensure deterministic output.
@ -1558,7 +1550,7 @@ fn reportUndefs(self: *MachO) !void {
break :nnotes @min(nnotes, max_notes) + @intFromBool(nnotes > max_notes); break :nnotes @min(nnotes, max_notes) + @intFromBool(nnotes > max_notes);
}; };
var err = try self.base.addErrorWithNotes(nnotes); var err = try diags.addErrorWithNotes(nnotes);
try err.addMsg("undefined symbol: {s}", .{undef_sym.getName(self)}); try err.addMsg("undefined symbol: {s}", .{undef_sym.getName(self)});
switch (notes) { switch (notes) {
@ -1908,6 +1900,7 @@ fn calcSectionSizes(self: *MachO) !void {
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
const diags = &self.base.comp.link_diags;
const cpu_arch = self.getTarget().cpu.arch; const cpu_arch = self.getTarget().cpu.arch;
if (self.data_sect_index) |idx| { if (self.data_sect_index) |idx| {
@ -1951,7 +1944,7 @@ fn calcSectionSizes(self: *MachO) !void {
} }
} }
if (self.has_errors.swap(false, .seq_cst)) return error.FlushFailure; if (diags.hasErrors()) return error.LinkFailure;
try self.calcSymtabSize(); try self.calcSymtabSize();
@ -2003,6 +1996,9 @@ fn calcSectionSizes(self: *MachO) !void {
fn calcSectionSizeWorker(self: *MachO, sect_id: u8) void { fn calcSectionSizeWorker(self: *MachO, sect_id: u8) void {
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
const diags = &self.base.comp.link_diags;
const doWork = struct { const doWork = struct {
fn doWork(macho_file: *MachO, header: *macho.section_64, atoms: []const Ref) !void { fn doWork(macho_file: *MachO, header: *macho.section_64, atoms: []const Ref) !void {
for (atoms) |ref| { for (atoms) |ref| {
@ -2020,26 +2016,21 @@ fn calcSectionSizeWorker(self: *MachO, sect_id: u8) void {
const header = &slice.items(.header)[sect_id]; const header = &slice.items(.header)[sect_id];
const atoms = slice.items(.atoms)[sect_id].items; const atoms = slice.items(.atoms)[sect_id].items;
doWork(self, header, atoms) catch |err| { doWork(self, header, atoms) catch |err| {
self.reportUnexpectedError("failed to calculate size of section '{s},{s}': {s}", .{ try diags.addError("failed to calculate size of section '{s},{s}': {s}", .{
header.segName(), header.segName(), header.sectName(), @errorName(err),
header.sectName(), });
@errorName(err),
}) catch {};
_ = self.has_errors.swap(true, .seq_cst);
}; };
} }
fn createThunksWorker(self: *MachO, sect_id: u8) void { fn createThunksWorker(self: *MachO, sect_id: u8) void {
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
const diags = &self.base.comp.link_diags;
self.createThunks(sect_id) catch |err| { self.createThunks(sect_id) catch |err| {
const header = self.sections.items(.header)[sect_id]; const header = self.sections.items(.header)[sect_id];
self.reportUnexpectedError("failed to create thunks and calculate size of section '{s},{s}': {s}", .{ diags.addError("failed to create thunks and calculate size of section '{s},{s}': {s}", .{
header.segName(), header.segName(), header.sectName(), @errorName(err),
header.sectName(), });
@errorName(err),
}) catch {};
_ = self.has_errors.swap(true, .seq_cst);
}; };
} }
@ -2047,6 +2038,8 @@ fn generateUnwindInfo(self: *MachO) !void {
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
const diags = &self.base.comp.link_diags;
if (self.eh_frame_sect_index) |index| { if (self.eh_frame_sect_index) |index| {
const sect = &self.sections.items(.header)[index]; const sect = &self.sections.items(.header)[index];
sect.size = try eh_frame.calcSize(self); sect.size = try eh_frame.calcSize(self);
@ -2055,10 +2048,7 @@ fn generateUnwindInfo(self: *MachO) !void {
if (self.unwind_info_sect_index) |index| { if (self.unwind_info_sect_index) |index| {
const sect = &self.sections.items(.header)[index]; const sect = &self.sections.items(.header)[index];
self.unwind_info.generate(self) catch |err| switch (err) { self.unwind_info.generate(self) catch |err| switch (err) {
error.TooManyPersonalities => return self.reportUnexpectedError( error.TooManyPersonalities => return diags.fail("too many personalities in unwind info", .{}),
"too many personalities in unwind info",
.{},
),
else => |e| return e, else => |e| return e,
}; };
sect.size = self.unwind_info.calcSize(); sect.size = self.unwind_info.calcSize();
@ -2427,6 +2417,7 @@ fn writeSectionsAndUpdateLinkeditSizes(self: *MachO) !void {
defer tracy.end(); defer tracy.end();
const gpa = self.base.comp.gpa; const gpa = self.base.comp.gpa;
const diags = &self.base.comp.link_diags;
const cmd = self.symtab_cmd; const cmd = self.symtab_cmd;
try self.symtab.resize(gpa, cmd.nsyms); try self.symtab.resize(gpa, cmd.nsyms);
@ -2495,7 +2486,7 @@ fn writeSectionsAndUpdateLinkeditSizes(self: *MachO) !void {
}; };
} }
if (self.has_errors.swap(false, .seq_cst)) return error.FlushFailure; if (diags.hasErrors()) return error.LinkFailure;
} }
fn writeAtomsWorker(self: *MachO, file: File) void { fn writeAtomsWorker(self: *MachO, file: File) void {
@ -2505,13 +2496,15 @@ fn writeAtomsWorker(self: *MachO, file: File) void {
self.reportParseError2(file.getIndex(), "failed to resolve relocations and write atoms: {s}", .{ self.reportParseError2(file.getIndex(), "failed to resolve relocations and write atoms: {s}", .{
@errorName(err), @errorName(err),
}) catch {}; }) catch {};
_ = self.has_errors.swap(true, .seq_cst);
}; };
} }
fn writeThunkWorker(self: *MachO, thunk: Thunk) void { fn writeThunkWorker(self: *MachO, thunk: Thunk) void {
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
const diags = &self.base.comp.link_diags;
const doWork = struct { const doWork = struct {
fn doWork(th: Thunk, buffer: []u8, macho_file: *MachO) !void { fn doWork(th: Thunk, buffer: []u8, macho_file: *MachO) !void {
const off = math.cast(usize, th.value) orelse return error.Overflow; const off = math.cast(usize, th.value) orelse return error.Overflow;
@ -2522,8 +2515,7 @@ fn writeThunkWorker(self: *MachO, thunk: Thunk) void {
}.doWork; }.doWork;
const out = self.sections.items(.out)[thunk.out_n_sect].items; const out = self.sections.items(.out)[thunk.out_n_sect].items;
doWork(thunk, out, self) catch |err| { doWork(thunk, out, self) catch |err| {
self.reportUnexpectedError("failed to write contents of thunk: {s}", .{@errorName(err)}) catch {}; diags.addError("failed to write contents of thunk: {s}", .{@errorName(err)});
_ = self.has_errors.swap(true, .seq_cst);
}; };
} }
@ -2531,6 +2523,8 @@ fn writeSyntheticSectionWorker(self: *MachO, sect_id: u8, out: []u8) void {
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
const diags = &self.base.comp.link_diags;
const Tag = enum { const Tag = enum {
eh_frame, eh_frame,
unwind_info, unwind_info,
@ -2575,18 +2569,18 @@ fn writeSyntheticSectionWorker(self: *MachO, sect_id: u8, out: []u8) void {
unreachable; unreachable;
}; };
doWork(self, tag, out) catch |err| { doWork(self, tag, out) catch |err| {
self.reportUnexpectedError("could not write section '{s},{s}': {s}", .{ diags.addError("could not write section '{s},{s}': {s}", .{
header.segName(), header.segName(), header.sectName(), @errorName(err),
header.sectName(), });
@errorName(err),
}) catch {};
_ = self.has_errors.swap(true, .seq_cst);
}; };
} }
fn updateLazyBindSizeWorker(self: *MachO) void { fn updateLazyBindSizeWorker(self: *MachO) void {
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
const diags = &self.base.comp.link_diags;
const doWork = struct { const doWork = struct {
fn doWork(macho_file: *MachO) !void { fn doWork(macho_file: *MachO) !void {
try macho_file.lazy_bind_section.updateSize(macho_file); try macho_file.lazy_bind_section.updateSize(macho_file);
@ -2596,12 +2590,8 @@ fn updateLazyBindSizeWorker(self: *MachO) void {
try macho_file.stubs_helper.write(macho_file, stream.writer()); try macho_file.stubs_helper.write(macho_file, stream.writer());
} }
}.doWork; }.doWork;
doWork(self) catch |err| { doWork(self) catch |err|
self.reportUnexpectedError("could not calculate size of lazy binding section: {s}", .{ diags.addError("could not calculate size of lazy binding section: {s}", .{@errorName(err)});
@errorName(err),
}) catch {};
_ = self.has_errors.swap(true, .seq_cst);
};
} }
pub fn updateLinkeditSizeWorker(self: *MachO, tag: enum { pub fn updateLinkeditSizeWorker(self: *MachO, tag: enum {
@ -2611,6 +2601,7 @@ pub fn updateLinkeditSizeWorker(self: *MachO, tag: enum {
export_trie, export_trie,
data_in_code, data_in_code,
}) void { }) void {
const diags = &self.base.comp.link_diags;
const res = switch (tag) { const res = switch (tag) {
.rebase => self.rebase_section.updateSize(self), .rebase => self.rebase_section.updateSize(self),
.bind => self.bind_section.updateSize(self), .bind => self.bind_section.updateSize(self),
@ -2618,13 +2609,8 @@ pub fn updateLinkeditSizeWorker(self: *MachO, tag: enum {
.export_trie => self.export_trie.updateSize(self), .export_trie => self.export_trie.updateSize(self),
.data_in_code => self.data_in_code.updateSize(self), .data_in_code => self.data_in_code.updateSize(self),
}; };
res catch |err| { res catch |err|
self.reportUnexpectedError("could not calculate size of {s} section: {s}", .{ diags.addError("could not calculate size of {s} section: {s}", .{ @tagName(tag), @errorName(err) });
@tagName(tag),
@errorName(err),
}) catch {};
_ = self.has_errors.swap(true, .seq_cst);
};
} }
fn writeSectionsToFile(self: *MachO) !void { fn writeSectionsToFile(self: *MachO) !void {
@ -3432,6 +3418,7 @@ pub fn growSection(self: *MachO, sect_index: u8, needed_size: u64) !void {
} }
fn growSectionNonRelocatable(self: *MachO, sect_index: u8, needed_size: u64) !void { fn growSectionNonRelocatable(self: *MachO, sect_index: u8, needed_size: u64) !void {
const diags = &self.base.comp.link_diags;
const sect = &self.sections.items(.header)[sect_index]; const sect = &self.sections.items(.header)[sect_index];
const seg_id = self.sections.items(.segment_id)[sect_index]; const seg_id = self.sections.items(.segment_id)[sect_index];
@ -3467,7 +3454,7 @@ fn growSectionNonRelocatable(self: *MachO, sect_index: u8, needed_size: u64) !vo
const mem_capacity = self.allocatedSizeVirtual(seg.vmaddr); const mem_capacity = self.allocatedSizeVirtual(seg.vmaddr);
if (needed_size > mem_capacity) { if (needed_size > mem_capacity) {
var err = try self.base.addErrorWithNotes(2); var err = try diags.addErrorWithNotes(2);
try err.addMsg("fatal linker error: cannot expand segment seg({d})({s}) in virtual memory", .{ try err.addMsg("fatal linker error: cannot expand segment seg({d})({s}) in virtual memory", .{
seg_id, seg_id,
seg.segName(), seg.segName(),
@ -3766,41 +3753,18 @@ pub fn eatPrefix(path: []const u8, prefix: []const u8) ?[]const u8 {
return null; return null;
} }
pub fn reportParseError(
self: *MachO,
path: Path,
comptime format: []const u8,
args: anytype,
) error{OutOfMemory}!void {
var err = try self.base.addErrorWithNotes(1);
try err.addMsg(format, args);
try err.addNote("while parsing {}", .{path});
}
pub fn reportParseError2( pub fn reportParseError2(
self: *MachO, self: *MachO,
file_index: File.Index, file_index: File.Index,
comptime format: []const u8, comptime format: []const u8,
args: anytype, args: anytype,
) error{OutOfMemory}!void { ) error{OutOfMemory}!void {
var err = try self.base.addErrorWithNotes(1); const diags = &self.base.comp.link_diags;
var err = try diags.addErrorWithNotes(1);
try err.addMsg(format, args); try err.addMsg(format, args);
try err.addNote("while parsing {}", .{self.getFile(file_index).?.fmtPath()}); try err.addNote("while parsing {}", .{self.getFile(file_index).?.fmtPath()});
} }
fn reportMissingLibraryError(
self: *MachO,
checked_paths: []const []const u8,
comptime format: []const u8,
args: anytype,
) error{OutOfMemory}!void {
var err = try self.base.addErrorWithNotes(checked_paths.len);
try err.addMsg(format, args);
for (checked_paths) |path| {
try err.addNote("tried {s}", .{path});
}
}
fn reportMissingDependencyError( fn reportMissingDependencyError(
self: *MachO, self: *MachO,
parent: File.Index, parent: File.Index,
@ -3809,7 +3773,8 @@ fn reportMissingDependencyError(
comptime format: []const u8, comptime format: []const u8,
args: anytype, args: anytype,
) error{OutOfMemory}!void { ) error{OutOfMemory}!void {
var err = try self.base.addErrorWithNotes(2 + checked_paths.len); const diags = &self.base.comp.link_diags;
var err = try diags.addErrorWithNotes(2 + checked_paths.len);
try err.addMsg(format, args); try err.addMsg(format, args);
try err.addNote("while resolving {s}", .{path}); try err.addNote("while resolving {s}", .{path});
try err.addNote("a dependency of {}", .{self.getFile(parent).?.fmtPath()}); try err.addNote("a dependency of {}", .{self.getFile(parent).?.fmtPath()});
@ -3825,18 +3790,13 @@ fn reportDependencyError(
comptime format: []const u8, comptime format: []const u8,
args: anytype, args: anytype,
) error{OutOfMemory}!void { ) error{OutOfMemory}!void {
var err = try self.base.addErrorWithNotes(2); const diags = &self.base.comp.link_diags;
var err = try diags.addErrorWithNotes(2);
try err.addMsg(format, args); try err.addMsg(format, args);
try err.addNote("while parsing {s}", .{path}); try err.addNote("while parsing {s}", .{path});
try err.addNote("a dependency of {}", .{self.getFile(parent).?.fmtPath()}); try err.addNote("a dependency of {}", .{self.getFile(parent).?.fmtPath()});
} }
pub fn reportUnexpectedError(self: *MachO, comptime format: []const u8, args: anytype) error{OutOfMemory}!void {
var err = try self.base.addErrorWithNotes(1);
try err.addMsg(format, args);
try err.addNote("please report this as a linker bug on https://github.com/ziglang/zig/issues/new/choose", .{});
}
fn reportDuplicates(self: *MachO) error{ HasDuplicates, OutOfMemory }!void { fn reportDuplicates(self: *MachO) error{ HasDuplicates, OutOfMemory }!void {
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
@ -3844,6 +3804,7 @@ fn reportDuplicates(self: *MachO) error{ HasDuplicates, OutOfMemory }!void {
if (self.dupes.keys().len == 0) return; // Nothing to do if (self.dupes.keys().len == 0) return; // Nothing to do
const gpa = self.base.comp.gpa; const gpa = self.base.comp.gpa;
const diags = &self.base.comp.link_diags;
const max_notes = 3; const max_notes = 3;
// We will sort by name, and then by file to ensure deterministic output. // We will sort by name, and then by file to ensure deterministic output.
@ -3861,7 +3822,7 @@ fn reportDuplicates(self: *MachO) error{ HasDuplicates, OutOfMemory }!void {
const notes = self.dupes.get(key).?; const notes = self.dupes.get(key).?;
const nnotes = @min(notes.items.len, max_notes) + @intFromBool(notes.items.len > max_notes); const nnotes = @min(notes.items.len, max_notes) + @intFromBool(notes.items.len > max_notes);
var err = try self.base.addErrorWithNotes(nnotes + 1); var err = try diags.addErrorWithNotes(nnotes + 1);
try err.addMsg("duplicate symbol definition: {s}", .{sym.getName(self)}); try err.addMsg("duplicate symbol definition: {s}", .{sym.getName(self)});
try err.addNote("defined by {}", .{sym.getFile(self).?.fmtPath()}); try err.addNote("defined by {}", .{sym.getFile(self).?.fmtPath()});

View File

@ -6,6 +6,7 @@ pub fn deinit(self: *Archive, allocator: Allocator) void {
pub fn unpack(self: *Archive, macho_file: *MachO, path: Path, handle_index: File.HandleIndex, fat_arch: ?fat.Arch) !void { pub fn unpack(self: *Archive, macho_file: *MachO, path: Path, handle_index: File.HandleIndex, fat_arch: ?fat.Arch) !void {
const gpa = macho_file.base.comp.gpa; const gpa = macho_file.base.comp.gpa;
const diags = &macho_file.base.comp.link_diags;
var arena = std.heap.ArenaAllocator.init(gpa); var arena = std.heap.ArenaAllocator.init(gpa);
defer arena.deinit(); defer arena.deinit();
@ -28,7 +29,7 @@ pub fn unpack(self: *Archive, macho_file: *MachO, path: Path, handle_index: File
pos += @sizeOf(ar_hdr); pos += @sizeOf(ar_hdr);
if (!mem.eql(u8, &hdr.ar_fmag, ARFMAG)) { if (!mem.eql(u8, &hdr.ar_fmag, ARFMAG)) {
try macho_file.reportParseError(path, "invalid header delimiter: expected '{s}', found '{s}'", .{ try diags.reportParseError(path, "invalid header delimiter: expected '{s}', found '{s}'", .{
std.fmt.fmtSliceEscapeLower(ARFMAG), std.fmt.fmtSliceEscapeLower(&hdr.ar_fmag), std.fmt.fmtSliceEscapeLower(ARFMAG), std.fmt.fmtSliceEscapeLower(&hdr.ar_fmag),
}); });
return error.MalformedArchive; return error.MalformedArchive;

View File

@ -893,6 +893,7 @@ fn resolveRelocInner(
const x86_64 = struct { const x86_64 = struct {
fn relaxGotLoad(self: Atom, code: []u8, rel: Relocation, macho_file: *MachO) ResolveError!void { fn relaxGotLoad(self: Atom, code: []u8, rel: Relocation, macho_file: *MachO) ResolveError!void {
dev.check(.x86_64_backend); dev.check(.x86_64_backend);
const diags = &macho_file.base.comp.link_diags;
const old_inst = disassemble(code) orelse return error.RelaxFail; const old_inst = disassemble(code) orelse return error.RelaxFail;
switch (old_inst.encoding.mnemonic) { switch (old_inst.encoding.mnemonic) {
.mov => { .mov => {
@ -901,7 +902,7 @@ const x86_64 = struct {
encode(&.{inst}, code) catch return error.RelaxFail; encode(&.{inst}, code) catch return error.RelaxFail;
}, },
else => |x| { else => |x| {
var err = try macho_file.base.addErrorWithNotes(2); var err = try diags.addErrorWithNotes(2);
try err.addMsg("{s}: 0x{x}: 0x{x}: failed to relax relocation of type {}", .{ try err.addMsg("{s}: 0x{x}: 0x{x}: failed to relax relocation of type {}", .{
self.getName(macho_file), self.getName(macho_file),
self.getAddress(macho_file), self.getAddress(macho_file),

View File

@ -364,6 +364,8 @@ pub fn scanRelocs(self: *ZigObject, macho_file: *MachO) !void {
pub fn resolveRelocs(self: *ZigObject, macho_file: *MachO) !void { pub fn resolveRelocs(self: *ZigObject, macho_file: *MachO) !void {
const gpa = macho_file.base.comp.gpa; const gpa = macho_file.base.comp.gpa;
const diags = &macho_file.base.comp.link_diags;
var has_error = false; var has_error = false;
for (self.getAtoms()) |atom_index| { for (self.getAtoms()) |atom_index| {
const atom = self.getAtom(atom_index) orelse continue; const atom = self.getAtom(atom_index) orelse continue;
@ -379,17 +381,12 @@ pub fn resolveRelocs(self: *ZigObject, macho_file: *MachO) !void {
defer gpa.free(code); defer gpa.free(code);
self.getAtomData(macho_file, atom.*, code) catch |err| { self.getAtomData(macho_file, atom.*, code) catch |err| {
switch (err) { switch (err) {
error.InputOutput => { error.InputOutput => return diags.fail("fetching code for '{s}' failed", .{
try macho_file.reportUnexpectedError("fetching code for '{s}' failed", .{
atom.getName(macho_file), atom.getName(macho_file),
}); }),
}, else => |e| return diags.fail("failed to fetch code for '{s}': {s}", .{
else => |e| { atom.getName(macho_file), @errorName(e),
try macho_file.reportUnexpectedError("unexpected error while fetching code for '{s}': {s}", .{ }),
atom.getName(macho_file),
@errorName(e),
});
},
} }
has_error = true; has_error = true;
continue; continue;
@ -398,9 +395,7 @@ pub fn resolveRelocs(self: *ZigObject, macho_file: *MachO) !void {
atom.resolveRelocs(macho_file, code) catch |err| { atom.resolveRelocs(macho_file, code) catch |err| {
switch (err) { switch (err) {
error.ResolveFailed => {}, error.ResolveFailed => {},
else => |e| { else => |e| return diags.fail("failed to resolve relocations: {s}", .{@errorName(e)}),
try macho_file.reportUnexpectedError("unexpected error while resolving relocations: {s}", .{@errorName(e)});
},
} }
has_error = true; has_error = true;
continue; continue;
@ -426,6 +421,7 @@ pub fn calcNumRelocs(self: *ZigObject, macho_file: *MachO) void {
pub fn writeRelocs(self: *ZigObject, macho_file: *MachO) !void { pub fn writeRelocs(self: *ZigObject, macho_file: *MachO) !void {
const gpa = macho_file.base.comp.gpa; const gpa = macho_file.base.comp.gpa;
const diags = &macho_file.base.comp.link_diags;
for (self.getAtoms()) |atom_index| { for (self.getAtoms()) |atom_index| {
const atom = self.getAtom(atom_index) orelse continue; const atom = self.getAtom(atom_index) orelse continue;
@ -439,21 +435,8 @@ pub fn writeRelocs(self: *ZigObject, macho_file: *MachO) !void {
const atom_size = std.math.cast(usize, atom.size) orelse return error.Overflow; const atom_size = std.math.cast(usize, atom.size) orelse return error.Overflow;
const code = try gpa.alloc(u8, atom_size); const code = try gpa.alloc(u8, atom_size);
defer gpa.free(code); defer gpa.free(code);
self.getAtomData(macho_file, atom.*, code) catch |err| switch (err) { self.getAtomData(macho_file, atom.*, code) catch |err|
error.InputOutput => { return diags.fail("failed to fetch code for '{s}': {s}", .{ atom.getName(macho_file), @errorName(err) });
try macho_file.reportUnexpectedError("fetching code for '{s}' failed", .{
atom.getName(macho_file),
});
return error.FlushFailure;
},
else => |e| {
try macho_file.reportUnexpectedError("unexpected error while fetching code for '{s}': {s}", .{
atom.getName(macho_file),
@errorName(e),
});
return error.FlushFailure;
},
};
const file_offset = header.offset + atom.value; const file_offset = header.offset + atom.value;
try atom.writeRelocs(macho_file, code, relocs[extra.rel_out_index..][0..extra.rel_out_count]); try atom.writeRelocs(macho_file, code, relocs[extra.rel_out_index..][0..extra.rel_out_count]);
try macho_file.base.file.?.pwriteAll(code, file_offset); try macho_file.base.file.?.pwriteAll(code, file_offset);

View File

@ -1,5 +1,6 @@
pub fn flushObject(macho_file: *MachO, comp: *Compilation, module_obj_path: ?Path) link.File.FlushError!void { pub fn flushObject(macho_file: *MachO, comp: *Compilation, module_obj_path: ?Path) link.File.FlushError!void {
const gpa = macho_file.base.comp.gpa; const gpa = macho_file.base.comp.gpa;
const diags = &macho_file.base.comp.link_diags;
// TODO: "positional arguments" is a CLI concept, not a linker concept. Delete this unnecessary array list. // TODO: "positional arguments" is a CLI concept, not a linker concept. Delete this unnecessary array list.
var positionals = std.ArrayList(Compilation.LinkObject).init(gpa); var positionals = std.ArrayList(Compilation.LinkObject).init(gpa);
@ -29,8 +30,8 @@ pub fn flushObject(macho_file: *MachO, comp: *Compilation, module_obj_path: ?Pat
for (positionals.items) |obj| { for (positionals.items) |obj| {
macho_file.classifyInputFile(obj.path, .{ .path = obj.path }, obj.must_link) catch |err| switch (err) { macho_file.classifyInputFile(obj.path, .{ .path = obj.path }, obj.must_link) catch |err| switch (err) {
error.UnknownFileType => try macho_file.reportParseError(obj.path, "unknown file type for an input file", .{}), error.UnknownFileType => try diags.reportParseError(obj.path, "unknown file type for an input file", .{}),
else => |e| try macho_file.reportParseError( else => |e| try diags.reportParseError(
obj.path, obj.path,
"unexpected error: reading input file failed with error {s}", "unexpected error: reading input file failed with error {s}",
.{@errorName(e)}, .{@errorName(e)},
@ -38,11 +39,11 @@ pub fn flushObject(macho_file: *MachO, comp: *Compilation, module_obj_path: ?Pat
}; };
} }
if (macho_file.base.hasErrors()) return error.FlushFailure; if (diags.hasErrors()) return error.FlushFailure;
try macho_file.parseInputFiles(); try macho_file.parseInputFiles();
if (macho_file.base.hasErrors()) return error.FlushFailure; if (diags.hasErrors()) return error.FlushFailure;
try macho_file.resolveSymbols(); try macho_file.resolveSymbols();
try macho_file.dedupLiterals(); try macho_file.dedupLiterals();
@ -75,6 +76,7 @@ pub fn flushObject(macho_file: *MachO, comp: *Compilation, module_obj_path: ?Pat
pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ?Path) link.File.FlushError!void { pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ?Path) link.File.FlushError!void {
const gpa = comp.gpa; const gpa = comp.gpa;
const diags = &macho_file.base.comp.link_diags;
var positionals = std.ArrayList(Compilation.LinkObject).init(gpa); var positionals = std.ArrayList(Compilation.LinkObject).init(gpa);
defer positionals.deinit(); defer positionals.deinit();
@ -94,8 +96,8 @@ pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ?
for (positionals.items) |obj| { for (positionals.items) |obj| {
macho_file.classifyInputFile(obj.path, .{ .path = obj.path }, obj.must_link) catch |err| switch (err) { macho_file.classifyInputFile(obj.path, .{ .path = obj.path }, obj.must_link) catch |err| switch (err) {
error.UnknownFileType => try macho_file.reportParseError(obj.path, "unknown file type for an input file", .{}), error.UnknownFileType => try diags.reportParseError(obj.path, "unknown file type for an input file", .{}),
else => |e| try macho_file.reportParseError( else => |e| try diags.reportParseError(
obj.path, obj.path,
"unexpected error: reading input file failed with error {s}", "unexpected error: reading input file failed with error {s}",
.{@errorName(e)}, .{@errorName(e)},
@ -103,11 +105,11 @@ pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ?
}; };
} }
if (macho_file.base.hasErrors()) return error.FlushFailure; if (diags.hasErrors()) return error.FlushFailure;
try parseInputFilesAr(macho_file); try parseInputFilesAr(macho_file);
if (macho_file.base.hasErrors()) return error.FlushFailure; if (diags.hasErrors()) return error.FlushFailure;
// First, we flush relocatable object file generated with our backends. // First, we flush relocatable object file generated with our backends.
if (macho_file.getZigObject()) |zo| { if (macho_file.getZigObject()) |zo| {
@ -228,7 +230,7 @@ pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ?
try macho_file.base.file.?.setEndPos(total_size); try macho_file.base.file.?.setEndPos(total_size);
try macho_file.base.file.?.pwriteAll(buffer.items, 0); try macho_file.base.file.?.pwriteAll(buffer.items, 0);
if (macho_file.base.hasErrors()) return error.FlushFailure; if (diags.hasErrors()) return error.FlushFailure;
} }
fn parseInputFilesAr(macho_file: *MachO) !void { fn parseInputFilesAr(macho_file: *MachO) !void {
@ -293,6 +295,8 @@ fn calcSectionSizes(macho_file: *MachO) !void {
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
const diags = &macho_file.base.comp.link_diags;
if (macho_file.getZigObject()) |zo| { if (macho_file.getZigObject()) |zo| {
// TODO this will create a race as we need to track merging of debug sections which we currently don't // TODO this will create a race as we need to track merging of debug sections which we currently don't
zo.calcNumRelocs(macho_file); zo.calcNumRelocs(macho_file);
@ -337,7 +341,7 @@ fn calcSectionSizes(macho_file: *MachO) !void {
} }
try calcSymtabSize(macho_file); try calcSymtabSize(macho_file);
if (macho_file.has_errors.swap(false, .seq_cst)) return error.FlushFailure; if (diags.hasErrors()) return error.LinkFailure;
} }
fn calcSectionSizeWorker(macho_file: *MachO, sect_id: u8) void { fn calcSectionSizeWorker(macho_file: *MachO, sect_id: u8) void {
@ -365,6 +369,8 @@ fn calcEhFrameSizeWorker(macho_file: *MachO) void {
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
const diags = &macho_file.base.comp.link_diags;
const doWork = struct { const doWork = struct {
fn doWork(mfile: *MachO, header: *macho.section_64) !void { fn doWork(mfile: *MachO, header: *macho.section_64) !void {
header.size = try eh_frame.calcSize(mfile); header.size = try eh_frame.calcSize(mfile);
@ -374,12 +380,8 @@ fn calcEhFrameSizeWorker(macho_file: *MachO) void {
}.doWork; }.doWork;
const header = &macho_file.sections.items(.header)[macho_file.eh_frame_sect_index.?]; const header = &macho_file.sections.items(.header)[macho_file.eh_frame_sect_index.?];
doWork(macho_file, header) catch |err| { doWork(macho_file, header) catch |err|
macho_file.reportUnexpectedError("failed to calculate size of section '__TEXT,__eh_frame': {s}", .{ diags.addError("failed to calculate size of section '__TEXT,__eh_frame': {s}", .{@errorName(err)});
@errorName(err),
}) catch {};
_ = macho_file.has_errors.swap(true, .seq_cst);
};
} }
fn calcCompactUnwindSize(macho_file: *MachO) void { fn calcCompactUnwindSize(macho_file: *MachO) void {
@ -592,6 +594,7 @@ fn writeSections(macho_file: *MachO) !void {
defer tracy.end(); defer tracy.end();
const gpa = macho_file.base.comp.gpa; const gpa = macho_file.base.comp.gpa;
const diags = &macho_file.base.comp.link_diags;
const cpu_arch = macho_file.getTarget().cpu.arch; const cpu_arch = macho_file.getTarget().cpu.arch;
const slice = macho_file.sections.slice(); const slice = macho_file.sections.slice();
for (slice.items(.header), slice.items(.out), slice.items(.relocs), 0..) |header, *out, *relocs, n_sect| { for (slice.items(.header), slice.items(.out), slice.items(.relocs), 0..) |header, *out, *relocs, n_sect| {
@ -637,7 +640,7 @@ fn writeSections(macho_file: *MachO) !void {
} }
} }
if (macho_file.has_errors.swap(false, .seq_cst)) return error.FlushFailure; if (diags.hasErrors()) return error.LinkFailure;
if (macho_file.getZigObject()) |zo| { if (macho_file.getZigObject()) |zo| {
try zo.writeRelocs(macho_file); try zo.writeRelocs(macho_file);
@ -651,33 +654,28 @@ fn writeAtomsWorker(macho_file: *MachO, file: File) void {
macho_file.reportParseError2(file.getIndex(), "failed to write atoms: {s}", .{ macho_file.reportParseError2(file.getIndex(), "failed to write atoms: {s}", .{
@errorName(err), @errorName(err),
}) catch {}; }) catch {};
_ = macho_file.has_errors.swap(true, .seq_cst);
}; };
} }
fn writeEhFrameWorker(macho_file: *MachO) void { fn writeEhFrameWorker(macho_file: *MachO) void {
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
const diags = &macho_file.base.comp.link_diags;
const sect_index = macho_file.eh_frame_sect_index.?; const sect_index = macho_file.eh_frame_sect_index.?;
const buffer = macho_file.sections.items(.out)[sect_index]; const buffer = macho_file.sections.items(.out)[sect_index];
const relocs = macho_file.sections.items(.relocs)[sect_index]; const relocs = macho_file.sections.items(.relocs)[sect_index];
eh_frame.writeRelocs(macho_file, buffer.items, relocs.items) catch |err| { eh_frame.writeRelocs(macho_file, buffer.items, relocs.items) catch |err|
macho_file.reportUnexpectedError("failed to write '__LD,__eh_frame' section: {s}", .{ diags.addError("failed to write '__LD,__eh_frame' section: {s}", .{@errorName(err)});
@errorName(err),
}) catch {};
_ = macho_file.has_errors.swap(true, .seq_cst);
};
} }
fn writeCompactUnwindWorker(macho_file: *MachO, object: *Object) void { fn writeCompactUnwindWorker(macho_file: *MachO, object: *Object) void {
const tracy = trace(@src()); const tracy = trace(@src());
defer tracy.end(); defer tracy.end();
object.writeCompactUnwindRelocatable(macho_file) catch |err| {
macho_file.reportUnexpectedError("failed to write '__LD,__eh_frame' section: {s}", .{ const diags = &macho_file.base.comp.link_diags;
@errorName(err), object.writeCompactUnwindRelocatable(macho_file) catch |err|
}) catch {}; diags.addError("failed to write '__LD,__eh_frame' section: {s}", .{@errorName(err)});
_ = macho_file.has_errors.swap(true, .seq_cst);
};
} }
fn writeSectionsToFile(macho_file: *MachO) !void { fn writeSectionsToFile(macho_file: *MachO) !void {

View File

@ -649,6 +649,8 @@ fn parseInputFiles(wasm: *Wasm, files: []const []const u8) !void {
/// file and parsed successfully. Returns false when file is not an object file. /// file and parsed successfully. Returns false when file is not an object file.
/// May return an error instead when parsing failed. /// May return an error instead when parsing failed.
fn parseObjectFile(wasm: *Wasm, path: []const u8) !bool { fn parseObjectFile(wasm: *Wasm, path: []const u8) !bool {
const diags = &wasm.base.comp.link_diags;
const obj_file = try fs.cwd().openFile(path, .{}); const obj_file = try fs.cwd().openFile(path, .{});
errdefer obj_file.close(); errdefer obj_file.close();
@ -656,7 +658,7 @@ fn parseObjectFile(wasm: *Wasm, path: []const u8) !bool {
var object = Object.create(wasm, obj_file, path, null) catch |err| switch (err) { var object = Object.create(wasm, obj_file, path, null) catch |err| switch (err) {
error.InvalidMagicByte, error.NotObjectFile => return false, error.InvalidMagicByte, error.NotObjectFile => return false,
else => |e| { else => |e| {
var err_note = try wasm.base.addErrorWithNotes(1); var err_note = try diags.addErrorWithNotes(1);
try err_note.addMsg("Failed parsing object file: {s}", .{@errorName(e)}); try err_note.addMsg("Failed parsing object file: {s}", .{@errorName(e)});
try err_note.addNote("while parsing '{s}'", .{path}); try err_note.addNote("while parsing '{s}'", .{path});
return error.FlushFailure; return error.FlushFailure;
@ -698,6 +700,7 @@ pub inline fn getAtomPtr(wasm: *Wasm, index: Atom.Index) *Atom {
/// are referenced by other object files or Zig code. /// are referenced by other object files or Zig code.
fn parseArchive(wasm: *Wasm, path: []const u8, force_load: bool) !bool { fn parseArchive(wasm: *Wasm, path: []const u8, force_load: bool) !bool {
const gpa = wasm.base.comp.gpa; const gpa = wasm.base.comp.gpa;
const diags = &wasm.base.comp.link_diags;
const archive_file = try fs.cwd().openFile(path, .{}); const archive_file = try fs.cwd().openFile(path, .{});
errdefer archive_file.close(); errdefer archive_file.close();
@ -712,7 +715,7 @@ fn parseArchive(wasm: *Wasm, path: []const u8, force_load: bool) !bool {
return false; return false;
}, },
else => |e| { else => |e| {
var err_note = try wasm.base.addErrorWithNotes(1); var err_note = try diags.addErrorWithNotes(1);
try err_note.addMsg("Failed parsing archive: {s}", .{@errorName(e)}); try err_note.addMsg("Failed parsing archive: {s}", .{@errorName(e)});
try err_note.addNote("while parsing archive {s}", .{path}); try err_note.addNote("while parsing archive {s}", .{path});
return error.FlushFailure; return error.FlushFailure;
@ -739,7 +742,7 @@ fn parseArchive(wasm: *Wasm, path: []const u8, force_load: bool) !bool {
for (offsets.keys()) |file_offset| { for (offsets.keys()) |file_offset| {
var object = archive.parseObject(wasm, file_offset) catch |e| { var object = archive.parseObject(wasm, file_offset) catch |e| {
var err_note = try wasm.base.addErrorWithNotes(1); var err_note = try diags.addErrorWithNotes(1);
try err_note.addMsg("Failed parsing object: {s}", .{@errorName(e)}); try err_note.addMsg("Failed parsing object: {s}", .{@errorName(e)});
try err_note.addNote("while parsing object in archive {s}", .{path}); try err_note.addNote("while parsing object in archive {s}", .{path});
return error.FlushFailure; return error.FlushFailure;
@ -763,6 +766,7 @@ fn requiresTLSReloc(wasm: *const Wasm) bool {
fn resolveSymbolsInObject(wasm: *Wasm, file_index: File.Index) !void { fn resolveSymbolsInObject(wasm: *Wasm, file_index: File.Index) !void {
const gpa = wasm.base.comp.gpa; const gpa = wasm.base.comp.gpa;
const diags = &wasm.base.comp.link_diags;
const obj_file = wasm.file(file_index).?; const obj_file = wasm.file(file_index).?;
log.debug("Resolving symbols in object: '{s}'", .{obj_file.path()}); log.debug("Resolving symbols in object: '{s}'", .{obj_file.path()});
@ -777,7 +781,7 @@ fn resolveSymbolsInObject(wasm: *Wasm, file_index: File.Index) !void {
if (symbol.isLocal()) { if (symbol.isLocal()) {
if (symbol.isUndefined()) { if (symbol.isUndefined()) {
var err = try wasm.base.addErrorWithNotes(1); var err = try diags.addErrorWithNotes(1);
try err.addMsg("Local symbols are not allowed to reference imports", .{}); try err.addMsg("Local symbols are not allowed to reference imports", .{});
try err.addNote("symbol '{s}' defined in '{s}'", .{ sym_name, obj_file.path() }); try err.addNote("symbol '{s}' defined in '{s}'", .{ sym_name, obj_file.path() });
} }
@ -814,7 +818,7 @@ fn resolveSymbolsInObject(wasm: *Wasm, file_index: File.Index) !void {
break :outer; // existing is weak, while new one isn't. Replace it. break :outer; // existing is weak, while new one isn't. Replace it.
} }
// both are defined and weak, we have a symbol collision. // both are defined and weak, we have a symbol collision.
var err = try wasm.base.addErrorWithNotes(2); var err = try diags.addErrorWithNotes(2);
try err.addMsg("symbol '{s}' defined multiple times", .{sym_name}); try err.addMsg("symbol '{s}' defined multiple times", .{sym_name});
try err.addNote("first definition in '{s}'", .{existing_file_path}); try err.addNote("first definition in '{s}'", .{existing_file_path});
try err.addNote("next definition in '{s}'", .{obj_file.path()}); try err.addNote("next definition in '{s}'", .{obj_file.path()});
@ -825,7 +829,7 @@ fn resolveSymbolsInObject(wasm: *Wasm, file_index: File.Index) !void {
} }
if (symbol.tag != existing_sym.tag) { if (symbol.tag != existing_sym.tag) {
var err = try wasm.base.addErrorWithNotes(2); var err = try diags.addErrorWithNotes(2);
try err.addMsg("symbol '{s}' mismatching types '{s}' and '{s}'", .{ sym_name, @tagName(symbol.tag), @tagName(existing_sym.tag) }); try err.addMsg("symbol '{s}' mismatching types '{s}' and '{s}'", .{ sym_name, @tagName(symbol.tag), @tagName(existing_sym.tag) });
try err.addNote("first definition in '{s}'", .{existing_file_path}); try err.addNote("first definition in '{s}'", .{existing_file_path});
try err.addNote("next definition in '{s}'", .{obj_file.path()}); try err.addNote("next definition in '{s}'", .{obj_file.path()});
@ -845,7 +849,7 @@ fn resolveSymbolsInObject(wasm: *Wasm, file_index: File.Index) !void {
const imp = obj_file.import(sym_index); const imp = obj_file.import(sym_index);
const module_name = obj_file.string(imp.module_name); const module_name = obj_file.string(imp.module_name);
if (!mem.eql(u8, existing_name, module_name)) { if (!mem.eql(u8, existing_name, module_name)) {
var err = try wasm.base.addErrorWithNotes(2); var err = try diags.addErrorWithNotes(2);
try err.addMsg("symbol '{s}' module name mismatch. Expected '{s}', but found '{s}'", .{ try err.addMsg("symbol '{s}' module name mismatch. Expected '{s}', but found '{s}'", .{
sym_name, sym_name,
existing_name, existing_name,
@ -865,7 +869,7 @@ fn resolveSymbolsInObject(wasm: *Wasm, file_index: File.Index) !void {
const existing_ty = wasm.getGlobalType(existing_loc); const existing_ty = wasm.getGlobalType(existing_loc);
const new_ty = wasm.getGlobalType(location); const new_ty = wasm.getGlobalType(location);
if (existing_ty.mutable != new_ty.mutable or existing_ty.valtype != new_ty.valtype) { if (existing_ty.mutable != new_ty.mutable or existing_ty.valtype != new_ty.valtype) {
var err = try wasm.base.addErrorWithNotes(2); var err = try diags.addErrorWithNotes(2);
try err.addMsg("symbol '{s}' mismatching global types", .{sym_name}); try err.addMsg("symbol '{s}' mismatching global types", .{sym_name});
try err.addNote("first definition in '{s}'", .{existing_file_path}); try err.addNote("first definition in '{s}'", .{existing_file_path});
try err.addNote("next definition in '{s}'", .{obj_file.path()}); try err.addNote("next definition in '{s}'", .{obj_file.path()});
@ -876,7 +880,7 @@ fn resolveSymbolsInObject(wasm: *Wasm, file_index: File.Index) !void {
const existing_ty = wasm.getFunctionSignature(existing_loc); const existing_ty = wasm.getFunctionSignature(existing_loc);
const new_ty = wasm.getFunctionSignature(location); const new_ty = wasm.getFunctionSignature(location);
if (!existing_ty.eql(new_ty)) { if (!existing_ty.eql(new_ty)) {
var err = try wasm.base.addErrorWithNotes(3); var err = try diags.addErrorWithNotes(3);
try err.addMsg("symbol '{s}' mismatching function signatures.", .{sym_name}); try err.addMsg("symbol '{s}' mismatching function signatures.", .{sym_name});
try err.addNote("expected signature {}, but found signature {}", .{ existing_ty, new_ty }); try err.addNote("expected signature {}, but found signature {}", .{ existing_ty, new_ty });
try err.addNote("first definition in '{s}'", .{existing_file_path}); try err.addNote("first definition in '{s}'", .{existing_file_path});
@ -909,6 +913,7 @@ fn resolveSymbolsInObject(wasm: *Wasm, file_index: File.Index) !void {
fn resolveSymbolsInArchives(wasm: *Wasm) !void { fn resolveSymbolsInArchives(wasm: *Wasm) !void {
const gpa = wasm.base.comp.gpa; const gpa = wasm.base.comp.gpa;
const diags = &wasm.base.comp.link_diags;
if (wasm.archives.items.len == 0) return; if (wasm.archives.items.len == 0) return;
log.debug("Resolving symbols in archives", .{}); log.debug("Resolving symbols in archives", .{});
@ -928,7 +933,7 @@ fn resolveSymbolsInArchives(wasm: *Wasm) !void {
// Parse object and and resolve symbols again before we check remaining // Parse object and and resolve symbols again before we check remaining
// undefined symbols. // undefined symbols.
var object = archive.parseObject(wasm, offset.items[0]) catch |e| { var object = archive.parseObject(wasm, offset.items[0]) catch |e| {
var err_note = try wasm.base.addErrorWithNotes(1); var err_note = try diags.addErrorWithNotes(1);
try err_note.addMsg("Failed parsing object: {s}", .{@errorName(e)}); try err_note.addMsg("Failed parsing object: {s}", .{@errorName(e)});
try err_note.addNote("while parsing object in archive {s}", .{archive.name}); try err_note.addNote("while parsing object in archive {s}", .{archive.name});
return error.FlushFailure; return error.FlushFailure;
@ -1172,6 +1177,7 @@ fn validateFeatures(
emit_features_count: *u32, emit_features_count: *u32,
) !void { ) !void {
const comp = wasm.base.comp; const comp = wasm.base.comp;
const diags = &wasm.base.comp.link_diags;
const target = comp.root_mod.resolved_target.result; const target = comp.root_mod.resolved_target.result;
const shared_memory = comp.config.shared_memory; const shared_memory = comp.config.shared_memory;
const cpu_features = target.cpu.features; const cpu_features = target.cpu.features;
@ -1235,7 +1241,7 @@ fn validateFeatures(
allowed[used_index] = is_enabled; allowed[used_index] = is_enabled;
emit_features_count.* += @intFromBool(is_enabled); emit_features_count.* += @intFromBool(is_enabled);
} else if (is_enabled and !allowed[used_index]) { } else if (is_enabled and !allowed[used_index]) {
var err = try wasm.base.addErrorWithNotes(1); var err = try diags.addErrorWithNotes(1);
try err.addMsg("feature '{}' not allowed, but used by linked object", .{@as(types.Feature.Tag, @enumFromInt(used_index))}); try err.addMsg("feature '{}' not allowed, but used by linked object", .{@as(types.Feature.Tag, @enumFromInt(used_index))});
try err.addNote("defined in '{s}'", .{wasm.files.items(.data)[used_set >> 1].object.path}); try err.addNote("defined in '{s}'", .{wasm.files.items(.data)[used_set >> 1].object.path});
valid_feature_set = false; valid_feature_set = false;
@ -1249,7 +1255,7 @@ fn validateFeatures(
if (shared_memory) { if (shared_memory) {
const disallowed_feature = disallowed[@intFromEnum(types.Feature.Tag.shared_mem)]; const disallowed_feature = disallowed[@intFromEnum(types.Feature.Tag.shared_mem)];
if (@as(u1, @truncate(disallowed_feature)) != 0) { if (@as(u1, @truncate(disallowed_feature)) != 0) {
var err = try wasm.base.addErrorWithNotes(0); var err = try diags.addErrorWithNotes(0);
try err.addMsg( try err.addMsg(
"shared-memory is disallowed by '{s}' because it wasn't compiled with 'atomics' and 'bulk-memory' features enabled", "shared-memory is disallowed by '{s}' because it wasn't compiled with 'atomics' and 'bulk-memory' features enabled",
.{wasm.files.items(.data)[disallowed_feature >> 1].object.path}, .{wasm.files.items(.data)[disallowed_feature >> 1].object.path},
@ -1259,7 +1265,7 @@ fn validateFeatures(
for ([_]types.Feature.Tag{ .atomics, .bulk_memory }) |feature| { for ([_]types.Feature.Tag{ .atomics, .bulk_memory }) |feature| {
if (!allowed[@intFromEnum(feature)]) { if (!allowed[@intFromEnum(feature)]) {
var err = try wasm.base.addErrorWithNotes(0); var err = try diags.addErrorWithNotes(0);
try err.addMsg("feature '{}' is not used but is required for shared-memory", .{feature}); try err.addMsg("feature '{}' is not used but is required for shared-memory", .{feature});
} }
} }
@ -1268,7 +1274,7 @@ fn validateFeatures(
if (has_tls) { if (has_tls) {
for ([_]types.Feature.Tag{ .atomics, .bulk_memory }) |feature| { for ([_]types.Feature.Tag{ .atomics, .bulk_memory }) |feature| {
if (!allowed[@intFromEnum(feature)]) { if (!allowed[@intFromEnum(feature)]) {
var err = try wasm.base.addErrorWithNotes(0); var err = try diags.addErrorWithNotes(0);
try err.addMsg("feature '{}' is not used but is required for thread-local storage", .{feature}); try err.addMsg("feature '{}' is not used but is required for thread-local storage", .{feature});
} }
} }
@ -1282,7 +1288,7 @@ fn validateFeatures(
// from here a feature is always used // from here a feature is always used
const disallowed_feature = disallowed[@intFromEnum(feature.tag)]; const disallowed_feature = disallowed[@intFromEnum(feature.tag)];
if (@as(u1, @truncate(disallowed_feature)) != 0) { if (@as(u1, @truncate(disallowed_feature)) != 0) {
var err = try wasm.base.addErrorWithNotes(2); var err = try diags.addErrorWithNotes(2);
try err.addMsg("feature '{}' is disallowed, but used by linked object", .{feature.tag}); try err.addMsg("feature '{}' is disallowed, but used by linked object", .{feature.tag});
try err.addNote("disallowed by '{s}'", .{wasm.files.items(.data)[disallowed_feature >> 1].object.path}); try err.addNote("disallowed by '{s}'", .{wasm.files.items(.data)[disallowed_feature >> 1].object.path});
try err.addNote("used in '{s}'", .{object.path}); try err.addNote("used in '{s}'", .{object.path});
@ -1296,7 +1302,7 @@ fn validateFeatures(
for (required, 0..) |required_feature, feature_index| { for (required, 0..) |required_feature, feature_index| {
const is_required = @as(u1, @truncate(required_feature)) != 0; const is_required = @as(u1, @truncate(required_feature)) != 0;
if (is_required and !object_used_features[feature_index]) { if (is_required and !object_used_features[feature_index]) {
var err = try wasm.base.addErrorWithNotes(2); var err = try diags.addErrorWithNotes(2);
try err.addMsg("feature '{}' is required but not used in linked object", .{@as(types.Feature.Tag, @enumFromInt(feature_index))}); try err.addMsg("feature '{}' is required but not used in linked object", .{@as(types.Feature.Tag, @enumFromInt(feature_index))});
try err.addNote("required by '{s}'", .{wasm.files.items(.data)[required_feature >> 1].object.path}); try err.addNote("required by '{s}'", .{wasm.files.items(.data)[required_feature >> 1].object.path});
try err.addNote("missing in '{s}'", .{object.path}); try err.addNote("missing in '{s}'", .{object.path});
@ -1364,6 +1370,7 @@ pub fn findGlobalSymbol(wasm: *Wasm, name: []const u8) ?SymbolLoc {
fn checkUndefinedSymbols(wasm: *const Wasm) !void { fn checkUndefinedSymbols(wasm: *const Wasm) !void {
const comp = wasm.base.comp; const comp = wasm.base.comp;
const diags = &wasm.base.comp.link_diags;
if (comp.config.output_mode == .Obj) return; if (comp.config.output_mode == .Obj) return;
if (wasm.import_symbols) return; if (wasm.import_symbols) return;
@ -1377,7 +1384,7 @@ fn checkUndefinedSymbols(wasm: *const Wasm) !void {
else else
wasm.name; wasm.name;
const symbol_name = undef.getName(wasm); const symbol_name = undef.getName(wasm);
var err = try wasm.base.addErrorWithNotes(1); var err = try diags.addErrorWithNotes(1);
try err.addMsg("could not resolve undefined symbol '{s}'", .{symbol_name}); try err.addMsg("could not resolve undefined symbol '{s}'", .{symbol_name});
try err.addNote("defined in '{s}'", .{file_name}); try err.addNote("defined in '{s}'", .{file_name});
} }
@ -1736,6 +1743,7 @@ fn sortDataSegments(wasm: *Wasm) !void {
/// contain any parameters. /// contain any parameters.
fn setupInitFunctions(wasm: *Wasm) !void { fn setupInitFunctions(wasm: *Wasm) !void {
const gpa = wasm.base.comp.gpa; const gpa = wasm.base.comp.gpa;
const diags = &wasm.base.comp.link_diags;
// There's no constructors for Zig so we can simply search through linked object files only. // There's no constructors for Zig so we can simply search through linked object files only.
for (wasm.objects.items) |file_index| { for (wasm.objects.items) |file_index| {
const object: Object = wasm.files.items(.data)[@intFromEnum(file_index)].object; const object: Object = wasm.files.items(.data)[@intFromEnum(file_index)].object;
@ -1751,7 +1759,7 @@ fn setupInitFunctions(wasm: *Wasm) !void {
break :ty object.func_types[func.type_index]; break :ty object.func_types[func.type_index];
}; };
if (ty.params.len != 0) { if (ty.params.len != 0) {
var err = try wasm.base.addErrorWithNotes(0); var err = try diags.addErrorWithNotes(0);
try err.addMsg("constructor functions cannot take arguments: '{s}'", .{object.string_table.get(symbol.name)}); try err.addMsg("constructor functions cannot take arguments: '{s}'", .{object.string_table.get(symbol.name)});
} }
log.debug("appended init func '{s}'\n", .{object.string_table.get(symbol.name)}); log.debug("appended init func '{s}'\n", .{object.string_table.get(symbol.name)});
@ -2130,12 +2138,13 @@ fn mergeTypes(wasm: *Wasm) !void {
fn checkExportNames(wasm: *Wasm) !void { fn checkExportNames(wasm: *Wasm) !void {
const force_exp_names = wasm.export_symbol_names; const force_exp_names = wasm.export_symbol_names;
const diags = &wasm.base.comp.link_diags;
if (force_exp_names.len > 0) { if (force_exp_names.len > 0) {
var failed_exports = false; var failed_exports = false;
for (force_exp_names) |exp_name| { for (force_exp_names) |exp_name| {
const loc = wasm.findGlobalSymbol(exp_name) orelse { const loc = wasm.findGlobalSymbol(exp_name) orelse {
var err = try wasm.base.addErrorWithNotes(0); var err = try diags.addErrorWithNotes(0);
try err.addMsg("could not export '{s}', symbol not found", .{exp_name}); try err.addMsg("could not export '{s}', symbol not found", .{exp_name});
failed_exports = true; failed_exports = true;
continue; continue;
@ -2195,18 +2204,19 @@ fn setupExports(wasm: *Wasm) !void {
fn setupStart(wasm: *Wasm) !void { fn setupStart(wasm: *Wasm) !void {
const comp = wasm.base.comp; const comp = wasm.base.comp;
const diags = &wasm.base.comp.link_diags;
// do not export entry point if user set none or no default was set. // do not export entry point if user set none or no default was set.
const entry_name = wasm.entry_name orelse return; const entry_name = wasm.entry_name orelse return;
const symbol_loc = wasm.findGlobalSymbol(entry_name) orelse { const symbol_loc = wasm.findGlobalSymbol(entry_name) orelse {
var err = try wasm.base.addErrorWithNotes(0); var err = try diags.addErrorWithNotes(0);
try err.addMsg("Entry symbol '{s}' missing, use '-fno-entry' to suppress", .{entry_name}); try err.addMsg("Entry symbol '{s}' missing, use '-fno-entry' to suppress", .{entry_name});
return error.FlushFailure; return error.FlushFailure;
}; };
const symbol = symbol_loc.getSymbol(wasm); const symbol = symbol_loc.getSymbol(wasm);
if (symbol.tag != .function) { if (symbol.tag != .function) {
var err = try wasm.base.addErrorWithNotes(0); var err = try diags.addErrorWithNotes(0);
try err.addMsg("Entry symbol '{s}' is not a function", .{entry_name}); try err.addMsg("Entry symbol '{s}' is not a function", .{entry_name});
return error.FlushFailure; return error.FlushFailure;
} }
@ -2220,6 +2230,7 @@ fn setupStart(wasm: *Wasm) !void {
/// Sets up the memory section of the wasm module, as well as the stack. /// Sets up the memory section of the wasm module, as well as the stack.
fn setupMemory(wasm: *Wasm) !void { fn setupMemory(wasm: *Wasm) !void {
const comp = wasm.base.comp; const comp = wasm.base.comp;
const diags = &wasm.base.comp.link_diags;
const shared_memory = comp.config.shared_memory; const shared_memory = comp.config.shared_memory;
log.debug("Setting up memory layout", .{}); log.debug("Setting up memory layout", .{});
const page_size = std.wasm.page_size; // 64kb const page_size = std.wasm.page_size; // 64kb
@ -2312,15 +2323,15 @@ fn setupMemory(wasm: *Wasm) !void {
if (wasm.initial_memory) |initial_memory| { if (wasm.initial_memory) |initial_memory| {
if (!std.mem.isAlignedGeneric(u64, initial_memory, page_size)) { if (!std.mem.isAlignedGeneric(u64, initial_memory, page_size)) {
var err = try wasm.base.addErrorWithNotes(0); var err = try diags.addErrorWithNotes(0);
try err.addMsg("Initial memory must be {d}-byte aligned", .{page_size}); try err.addMsg("Initial memory must be {d}-byte aligned", .{page_size});
} }
if (memory_ptr > initial_memory) { if (memory_ptr > initial_memory) {
var err = try wasm.base.addErrorWithNotes(0); var err = try diags.addErrorWithNotes(0);
try err.addMsg("Initial memory too small, must be at least {d} bytes", .{memory_ptr}); try err.addMsg("Initial memory too small, must be at least {d} bytes", .{memory_ptr});
} }
if (initial_memory > max_memory_allowed) { if (initial_memory > max_memory_allowed) {
var err = try wasm.base.addErrorWithNotes(0); var err = try diags.addErrorWithNotes(0);
try err.addMsg("Initial memory exceeds maximum memory {d}", .{max_memory_allowed}); try err.addMsg("Initial memory exceeds maximum memory {d}", .{max_memory_allowed});
} }
memory_ptr = initial_memory; memory_ptr = initial_memory;
@ -2338,15 +2349,15 @@ fn setupMemory(wasm: *Wasm) !void {
if (wasm.max_memory) |max_memory| { if (wasm.max_memory) |max_memory| {
if (!std.mem.isAlignedGeneric(u64, max_memory, page_size)) { if (!std.mem.isAlignedGeneric(u64, max_memory, page_size)) {
var err = try wasm.base.addErrorWithNotes(0); var err = try diags.addErrorWithNotes(0);
try err.addMsg("Maximum memory must be {d}-byte aligned", .{page_size}); try err.addMsg("Maximum memory must be {d}-byte aligned", .{page_size});
} }
if (memory_ptr > max_memory) { if (memory_ptr > max_memory) {
var err = try wasm.base.addErrorWithNotes(0); var err = try diags.addErrorWithNotes(0);
try err.addMsg("Maximum memory too small, must be at least {d} bytes", .{memory_ptr}); try err.addMsg("Maximum memory too small, must be at least {d} bytes", .{memory_ptr});
} }
if (max_memory > max_memory_allowed) { if (max_memory > max_memory_allowed) {
var err = try wasm.base.addErrorWithNotes(0); var err = try diags.addErrorWithNotes(0);
try err.addMsg("Maximum memory exceeds maximum amount {d}", .{max_memory_allowed}); try err.addMsg("Maximum memory exceeds maximum amount {d}", .{max_memory_allowed});
} }
wasm.memories.limits.max = @as(u32, @intCast(max_memory / page_size)); wasm.memories.limits.max = @as(u32, @intCast(max_memory / page_size));
@ -2364,6 +2375,7 @@ fn setupMemory(wasm: *Wasm) !void {
pub fn getMatchingSegment(wasm: *Wasm, file_index: File.Index, symbol_index: Symbol.Index) !u32 { pub fn getMatchingSegment(wasm: *Wasm, file_index: File.Index, symbol_index: Symbol.Index) !u32 {
const comp = wasm.base.comp; const comp = wasm.base.comp;
const gpa = comp.gpa; const gpa = comp.gpa;
const diags = &wasm.base.comp.link_diags;
const obj_file = wasm.file(file_index).?; const obj_file = wasm.file(file_index).?;
const symbol = obj_file.symbols()[@intFromEnum(symbol_index)]; const symbol = obj_file.symbols()[@intFromEnum(symbol_index)];
const index: u32 = @intCast(wasm.segments.items.len); const index: u32 = @intCast(wasm.segments.items.len);
@ -2450,7 +2462,7 @@ pub fn getMatchingSegment(wasm: *Wasm, file_index: File.Index, symbol_index: Sym
break :blk index; break :blk index;
}; };
} else { } else {
var err = try wasm.base.addErrorWithNotes(1); var err = try diags.addErrorWithNotes(1);
try err.addMsg("found unknown section '{s}'", .{section_name}); try err.addMsg("found unknown section '{s}'", .{section_name});
try err.addNote("defined in '{s}'", .{obj_file.path()}); try err.addNote("defined in '{s}'", .{obj_file.path()});
return error.UnexpectedValue; return error.UnexpectedValue;
@ -2487,6 +2499,7 @@ pub fn flushModule(wasm: *Wasm, arena: Allocator, tid: Zcu.PerThread.Id, prog_no
defer tracy.end(); defer tracy.end();
const comp = wasm.base.comp; const comp = wasm.base.comp;
const diags = &comp.link_diags;
if (wasm.llvm_object) |llvm_object| { if (wasm.llvm_object) |llvm_object| {
try wasm.base.emitLlvmObject(arena, llvm_object, prog_node); try wasm.base.emitLlvmObject(arena, llvm_object, prog_node);
const use_lld = build_options.have_llvm and comp.config.use_lld; const use_lld = build_options.have_llvm and comp.config.use_lld;
@ -2569,23 +2582,23 @@ pub fn flushModule(wasm: *Wasm, arena: Allocator, tid: Zcu.PerThread.Id, prog_no
if (wasm.zig_object_index != .null) { if (wasm.zig_object_index != .null) {
try wasm.resolveSymbolsInObject(wasm.zig_object_index); try wasm.resolveSymbolsInObject(wasm.zig_object_index);
} }
if (wasm.base.hasErrors()) return error.FlushFailure; if (diags.hasErrors()) return error.FlushFailure;
for (wasm.objects.items) |object_index| { for (wasm.objects.items) |object_index| {
try wasm.resolveSymbolsInObject(object_index); try wasm.resolveSymbolsInObject(object_index);
} }
if (wasm.base.hasErrors()) return error.FlushFailure; if (diags.hasErrors()) return error.FlushFailure;
var emit_features_count: u32 = 0; var emit_features_count: u32 = 0;
var enabled_features: [@typeInfo(types.Feature.Tag).@"enum".fields.len]bool = undefined; var enabled_features: [@typeInfo(types.Feature.Tag).@"enum".fields.len]bool = undefined;
try wasm.validateFeatures(&enabled_features, &emit_features_count); try wasm.validateFeatures(&enabled_features, &emit_features_count);
try wasm.resolveSymbolsInArchives(); try wasm.resolveSymbolsInArchives();
if (wasm.base.hasErrors()) return error.FlushFailure; if (diags.hasErrors()) return error.FlushFailure;
try wasm.resolveLazySymbols(); try wasm.resolveLazySymbols();
try wasm.checkUndefinedSymbols(); try wasm.checkUndefinedSymbols();
try wasm.checkExportNames(); try wasm.checkExportNames();
try wasm.setupInitFunctions(); try wasm.setupInitFunctions();
if (wasm.base.hasErrors()) return error.FlushFailure; if (diags.hasErrors()) return error.FlushFailure;
try wasm.setupStart(); try wasm.setupStart();
try wasm.markReferences(); try wasm.markReferences();
@ -2594,7 +2607,7 @@ pub fn flushModule(wasm: *Wasm, arena: Allocator, tid: Zcu.PerThread.Id, prog_no
try wasm.mergeTypes(); try wasm.mergeTypes();
try wasm.allocateAtoms(); try wasm.allocateAtoms();
try wasm.setupMemory(); try wasm.setupMemory();
if (wasm.base.hasErrors()) return error.FlushFailure; if (diags.hasErrors()) return error.FlushFailure;
wasm.allocateVirtualAddresses(); wasm.allocateVirtualAddresses();
wasm.mapFunctionTable(); wasm.mapFunctionTable();
try wasm.initializeCallCtorsFunction(); try wasm.initializeCallCtorsFunction();
@ -2604,7 +2617,7 @@ pub fn flushModule(wasm: *Wasm, arena: Allocator, tid: Zcu.PerThread.Id, prog_no
try wasm.setupStartSection(); try wasm.setupStartSection();
try wasm.setupExports(); try wasm.setupExports();
try wasm.writeToFile(enabled_features, emit_features_count, arena); try wasm.writeToFile(enabled_features, emit_features_count, arena);
if (wasm.base.hasErrors()) return error.FlushFailure; if (diags.hasErrors()) return error.FlushFailure;
} }
/// Writes the WebAssembly in-memory module to the file /// Writes the WebAssembly in-memory module to the file
@ -2615,6 +2628,7 @@ fn writeToFile(
arena: Allocator, arena: Allocator,
) !void { ) !void {
const comp = wasm.base.comp; const comp = wasm.base.comp;
const diags = &comp.link_diags;
const gpa = comp.gpa; const gpa = comp.gpa;
const use_llvm = comp.config.use_llvm; const use_llvm = comp.config.use_llvm;
const use_lld = build_options.have_llvm and comp.config.use_lld; const use_lld = build_options.have_llvm and comp.config.use_lld;
@ -3003,7 +3017,7 @@ fn writeToFile(
try emitBuildIdSection(&binary_bytes, str); try emitBuildIdSection(&binary_bytes, str);
}, },
else => |mode| { else => |mode| {
var err = try wasm.base.addErrorWithNotes(0); var err = try diags.addErrorWithNotes(0);
try err.addMsg("build-id '{s}' is not supported for WebAssembly", .{@tagName(mode)}); try err.addMsg("build-id '{s}' is not supported for WebAssembly", .{@tagName(mode)});
}, },
} }
@ -3684,7 +3698,8 @@ fn linkWithLLD(wasm: *Wasm, arena: Allocator, tid: Zcu.PerThread.Id, prog_node:
switch (term) { switch (term) {
.Exited => |code| { .Exited => |code| {
if (code != 0) { if (code != 0) {
comp.lockAndParseLldStderr(linker_command, stderr); const diags = &comp.link_diags;
diags.lockAndParseLldStderr(linker_command, stderr);
return error.LLDReportedFailure; return error.LLDReportedFailure;
} }
}, },

View File

@ -226,6 +226,8 @@ pub fn findImport(object: *const Object, sym: Symbol) types.Import {
/// ///
/// When the object file is *NOT* MVP, we return `null`. /// When the object file is *NOT* MVP, we return `null`.
fn checkLegacyIndirectFunctionTable(object: *Object, wasm_file: *const Wasm) !?Symbol { fn checkLegacyIndirectFunctionTable(object: *Object, wasm_file: *const Wasm) !?Symbol {
const diags = &wasm_file.base.comp.link_diags;
var table_count: usize = 0; var table_count: usize = 0;
for (object.symtable) |sym| { for (object.symtable) |sym| {
if (sym.tag == .table) table_count += 1; if (sym.tag == .table) table_count += 1;
@ -235,7 +237,7 @@ fn checkLegacyIndirectFunctionTable(object: *Object, wasm_file: *const Wasm) !?S
if (object.imported_tables_count == table_count) return null; if (object.imported_tables_count == table_count) return null;
if (table_count != 0) { if (table_count != 0) {
var err = try wasm_file.base.addErrorWithNotes(1); var err = try diags.addErrorWithNotes(1);
try err.addMsg("Expected a table entry symbol for each of the {d} table(s), but instead got {d} symbols.", .{ try err.addMsg("Expected a table entry symbol for each of the {d} table(s), but instead got {d} symbols.", .{
object.imported_tables_count, object.imported_tables_count,
table_count, table_count,
@ -246,14 +248,14 @@ fn checkLegacyIndirectFunctionTable(object: *Object, wasm_file: *const Wasm) !?S
// MVP object files cannot have any table definitions, only imports (for the indirect function table). // MVP object files cannot have any table definitions, only imports (for the indirect function table).
if (object.tables.len > 0) { if (object.tables.len > 0) {
var err = try wasm_file.base.addErrorWithNotes(1); var err = try diags.addErrorWithNotes(1);
try err.addMsg("Unexpected table definition without representing table symbols.", .{}); try err.addMsg("Unexpected table definition without representing table symbols.", .{});
try err.addNote("defined in '{s}'", .{object.path}); try err.addNote("defined in '{s}'", .{object.path});
return error.UnexpectedTable; return error.UnexpectedTable;
} }
if (object.imported_tables_count != 1) { if (object.imported_tables_count != 1) {
var err = try wasm_file.base.addErrorWithNotes(1); var err = try diags.addErrorWithNotes(1);
try err.addMsg("Found more than one table import, but no representing table symbols", .{}); try err.addMsg("Found more than one table import, but no representing table symbols", .{});
try err.addNote("defined in '{s}'", .{object.path}); try err.addNote("defined in '{s}'", .{object.path});
return error.MissingTableSymbols; return error.MissingTableSymbols;
@ -266,7 +268,7 @@ fn checkLegacyIndirectFunctionTable(object: *Object, wasm_file: *const Wasm) !?S
} else unreachable; } else unreachable;
if (!std.mem.eql(u8, object.string_table.get(table_import.name), "__indirect_function_table")) { if (!std.mem.eql(u8, object.string_table.get(table_import.name), "__indirect_function_table")) {
var err = try wasm_file.base.addErrorWithNotes(1); var err = try diags.addErrorWithNotes(1);
try err.addMsg("Non-indirect function table import '{s}' is missing a corresponding symbol", .{object.string_table.get(table_import.name)}); try err.addMsg("Non-indirect function table import '{s}' is missing a corresponding symbol", .{object.string_table.get(table_import.name)});
try err.addNote("defined in '{s}'", .{object.path}); try err.addNote("defined in '{s}'", .{object.path});
return error.MissingTableSymbols; return error.MissingTableSymbols;
@ -587,6 +589,7 @@ fn Parser(comptime ReaderType: type) type {
/// to be able to link. /// to be able to link.
/// Logs an info message when an undefined feature is detected. /// Logs an info message when an undefined feature is detected.
fn parseFeatures(parser: *ObjectParser, gpa: Allocator) !void { fn parseFeatures(parser: *ObjectParser, gpa: Allocator) !void {
const diags = &parser.wasm_file.base.comp.link_diags;
const reader = parser.reader.reader(); const reader = parser.reader.reader();
for (try readVec(&parser.object.features, reader, gpa)) |*feature| { for (try readVec(&parser.object.features, reader, gpa)) |*feature| {
const prefix = try readEnum(types.Feature.Prefix, reader); const prefix = try readEnum(types.Feature.Prefix, reader);
@ -596,7 +599,7 @@ fn Parser(comptime ReaderType: type) type {
try reader.readNoEof(name); try reader.readNoEof(name);
const tag = types.known_features.get(name) orelse { const tag = types.known_features.get(name) orelse {
var err = try parser.wasm_file.base.addErrorWithNotes(1); var err = try diags.addErrorWithNotes(1);
try err.addMsg("Object file contains unknown feature: {s}", .{name}); try err.addMsg("Object file contains unknown feature: {s}", .{name});
try err.addNote("defined in '{s}'", .{parser.object.path}); try err.addNote("defined in '{s}'", .{parser.object.path});
return error.UnknownFeature; return error.UnknownFeature;