mirror of
https://github.com/ziglang/zig.git
synced 2026-02-13 04:48:20 +00:00
Merge pull request #14386 from ziglang/macho-fixes
macho: fix nondeterministic failures in the macOS CI
This commit is contained in:
commit
74b72a766d
@ -238,10 +238,10 @@ const Entry = struct {
|
||||
}
|
||||
};
|
||||
|
||||
const BindingTable = std.AutoHashMapUnmanaged(*Atom, std.ArrayListUnmanaged(Atom.Binding));
|
||||
const UnnamedConstTable = std.AutoHashMapUnmanaged(Module.Decl.Index, std.ArrayListUnmanaged(*Atom));
|
||||
const RebaseTable = std.AutoHashMapUnmanaged(*Atom, std.ArrayListUnmanaged(u32));
|
||||
const RelocationTable = std.AutoHashMapUnmanaged(*Atom, std.ArrayListUnmanaged(Relocation));
|
||||
const BindingTable = std.AutoArrayHashMapUnmanaged(*Atom, std.ArrayListUnmanaged(Atom.Binding));
|
||||
const UnnamedConstTable = std.AutoArrayHashMapUnmanaged(Module.Decl.Index, std.ArrayListUnmanaged(*Atom));
|
||||
const RebaseTable = std.AutoArrayHashMapUnmanaged(*Atom, std.ArrayListUnmanaged(u32));
|
||||
const RelocationTable = std.AutoArrayHashMapUnmanaged(*Atom, std.ArrayListUnmanaged(Relocation));
|
||||
|
||||
const PendingUpdate = union(enum) {
|
||||
resolve_undef: u32,
|
||||
@ -469,10 +469,11 @@ pub fn flushModule(self: *MachO, comp: *Compilation, prog_node: *std.Progress.No
|
||||
|
||||
const cache_dir_handle = module.zig_cache_artifact_directory.handle;
|
||||
var man: Cache.Manifest = undefined;
|
||||
defer if (!self.base.options.disable_lld_caching) man.deinit();
|
||||
defer man.deinit();
|
||||
|
||||
var digest: [Cache.hex_digest_len]u8 = undefined;
|
||||
man = comp.cache_parent.obtain();
|
||||
man.want_shared_lock = false;
|
||||
self.base.releaseLock();
|
||||
|
||||
man.hash.addListOfBytes(libs.keys());
|
||||
@ -546,11 +547,8 @@ pub fn flushModule(self: *MachO, comp: *Compilation, prog_node: *std.Progress.No
|
||||
|
||||
try self.allocateSpecialSymbols();
|
||||
|
||||
{
|
||||
var it = self.relocs.keyIterator();
|
||||
while (it.next()) |atom| {
|
||||
try atom.*.resolveRelocations(self);
|
||||
}
|
||||
for (self.relocs.keys()) |atom| {
|
||||
try atom.resolveRelocations(self);
|
||||
}
|
||||
|
||||
if (build_options.enable_logging) {
|
||||
@ -1017,8 +1015,7 @@ fn writePtrWidthAtom(self: *MachO, atom: *Atom) !void {
|
||||
|
||||
fn markRelocsDirtyByTarget(self: *MachO, target: SymbolWithLoc) void {
|
||||
// TODO: reverse-lookup might come in handy here
|
||||
var it = self.relocs.valueIterator();
|
||||
while (it.next()) |relocs| {
|
||||
for (self.relocs.values()) |*relocs| {
|
||||
for (relocs.items) |*reloc| {
|
||||
if (!reloc.target.eql(target)) continue;
|
||||
reloc.dirty = true;
|
||||
@ -1027,8 +1024,7 @@ fn markRelocsDirtyByTarget(self: *MachO, target: SymbolWithLoc) void {
|
||||
}
|
||||
|
||||
fn markRelocsDirtyByAddress(self: *MachO, addr: u64) void {
|
||||
var it = self.relocs.valueIterator();
|
||||
while (it.next()) |relocs| {
|
||||
for (self.relocs.values()) |*relocs| {
|
||||
for (relocs.items) |*reloc| {
|
||||
const target_atom = reloc.getTargetAtom(self) orelse continue;
|
||||
const target_sym = target_atom.getSymbol(self);
|
||||
@ -1392,7 +1388,7 @@ pub fn createLazyPointerAtom(self: *MachO, stub_sym_index: u32, target: SymbolWi
|
||||
try self.atom_by_index_table.putNoClobber(gpa, sym_index, atom);
|
||||
|
||||
sym.n_value = try self.allocateAtom(atom, atom.size, @alignOf(u64));
|
||||
log.debug("allocated lazy pointer atom at 0x{x}", .{sym.n_value});
|
||||
log.debug("allocated lazy pointer atom at 0x{x} ({s})", .{ sym.n_value, self.getSymbolName(target) });
|
||||
try self.writePtrWidthAtom(atom);
|
||||
|
||||
return atom;
|
||||
@ -1784,47 +1780,32 @@ pub fn deinit(self: *MachO) void {
|
||||
assert(self.decls.count() == 0);
|
||||
}
|
||||
|
||||
{
|
||||
var it = self.unnamed_const_atoms.valueIterator();
|
||||
while (it.next()) |atoms| {
|
||||
atoms.deinit(gpa);
|
||||
}
|
||||
self.unnamed_const_atoms.deinit(gpa);
|
||||
for (self.unnamed_const_atoms.values()) |*atoms| {
|
||||
atoms.deinit(gpa);
|
||||
}
|
||||
self.unnamed_const_atoms.deinit(gpa);
|
||||
|
||||
self.atom_by_index_table.deinit(gpa);
|
||||
|
||||
{
|
||||
var it = self.relocs.valueIterator();
|
||||
while (it.next()) |relocs| {
|
||||
relocs.deinit(gpa);
|
||||
}
|
||||
self.relocs.deinit(gpa);
|
||||
for (self.relocs.values()) |*relocs| {
|
||||
relocs.deinit(gpa);
|
||||
}
|
||||
self.relocs.deinit(gpa);
|
||||
|
||||
{
|
||||
var it = self.rebases.valueIterator();
|
||||
while (it.next()) |rebases| {
|
||||
rebases.deinit(gpa);
|
||||
}
|
||||
self.rebases.deinit(gpa);
|
||||
for (self.rebases.values()) |*rebases| {
|
||||
rebases.deinit(gpa);
|
||||
}
|
||||
self.rebases.deinit(gpa);
|
||||
|
||||
{
|
||||
var it = self.bindings.valueIterator();
|
||||
while (it.next()) |bindings| {
|
||||
bindings.deinit(gpa);
|
||||
}
|
||||
self.bindings.deinit(gpa);
|
||||
for (self.bindings.values()) |*bindings| {
|
||||
bindings.deinit(gpa);
|
||||
}
|
||||
self.bindings.deinit(gpa);
|
||||
|
||||
{
|
||||
var it = self.lazy_bindings.valueIterator();
|
||||
while (it.next()) |bindings| {
|
||||
bindings.deinit(gpa);
|
||||
}
|
||||
self.lazy_bindings.deinit(gpa);
|
||||
for (self.lazy_bindings.values()) |*bindings| {
|
||||
bindings.deinit(gpa);
|
||||
}
|
||||
self.lazy_bindings.deinit(gpa);
|
||||
}
|
||||
|
||||
fn freeAtom(self: *MachO, atom: *Atom) void {
|
||||
@ -2579,13 +2560,13 @@ pub fn deleteExport(self: *MachO, exp: Export) void {
|
||||
}
|
||||
|
||||
fn freeRelocationsForAtom(self: *MachO, atom: *Atom) void {
|
||||
var removed_relocs = self.relocs.fetchRemove(atom);
|
||||
var removed_relocs = self.relocs.fetchOrderedRemove(atom);
|
||||
if (removed_relocs) |*relocs| relocs.value.deinit(self.base.allocator);
|
||||
var removed_rebases = self.rebases.fetchRemove(atom);
|
||||
var removed_rebases = self.rebases.fetchOrderedRemove(atom);
|
||||
if (removed_rebases) |*rebases| rebases.value.deinit(self.base.allocator);
|
||||
var removed_bindings = self.bindings.fetchRemove(atom);
|
||||
var removed_bindings = self.bindings.fetchOrderedRemove(atom);
|
||||
if (removed_bindings) |*bindings| bindings.value.deinit(self.base.allocator);
|
||||
var removed_lazy_bindings = self.lazy_bindings.fetchRemove(atom);
|
||||
var removed_lazy_bindings = self.lazy_bindings.fetchOrderedRemove(atom);
|
||||
if (removed_lazy_bindings) |*lazy_bindings| lazy_bindings.value.deinit(self.base.allocator);
|
||||
}
|
||||
|
||||
@ -3198,11 +3179,8 @@ fn writeLinkeditSegmentData(self: *MachO) !void {
|
||||
fn collectRebaseData(self: *MachO, rebase: *Rebase) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const slice = self.sections.slice();
|
||||
var it = self.rebases.keyIterator();
|
||||
|
||||
while (it.next()) |key_ptr| {
|
||||
const atom = key_ptr.*;
|
||||
|
||||
for (self.rebases.keys()) |atom, i| {
|
||||
log.debug(" ATOM(%{d}, '{s}')", .{ atom.sym_index, atom.getName(self) });
|
||||
|
||||
const sym = atom.getSymbol(self);
|
||||
@ -3211,7 +3189,7 @@ fn collectRebaseData(self: *MachO, rebase: *Rebase) !void {
|
||||
|
||||
const base_offset = sym.n_value - seg.vmaddr;
|
||||
|
||||
const rebases = self.rebases.get(atom).?;
|
||||
const rebases = self.rebases.values()[i];
|
||||
try rebase.entries.ensureUnusedCapacity(gpa, rebases.items.len);
|
||||
|
||||
for (rebases.items) |offset| {
|
||||
@ -3230,11 +3208,8 @@ fn collectRebaseData(self: *MachO, rebase: *Rebase) !void {
|
||||
fn collectBindData(self: *MachO, bind: anytype, raw_bindings: anytype) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const slice = self.sections.slice();
|
||||
var it = raw_bindings.keyIterator();
|
||||
|
||||
while (it.next()) |key_ptr| {
|
||||
const atom = key_ptr.*;
|
||||
|
||||
for (raw_bindings.keys()) |atom, i| {
|
||||
log.debug(" ATOM(%{d}, '{s}')", .{ atom.sym_index, atom.getName(self) });
|
||||
|
||||
const sym = atom.getSymbol(self);
|
||||
@ -3243,7 +3218,7 @@ fn collectBindData(self: *MachO, bind: anytype, raw_bindings: anytype) !void {
|
||||
|
||||
const base_offset = sym.n_value - seg.vmaddr;
|
||||
|
||||
const bindings = raw_bindings.get(atom).?;
|
||||
const bindings = raw_bindings.values()[i];
|
||||
try bind.entries.ensureUnusedCapacity(gpa, bindings.items.len);
|
||||
|
||||
for (bindings.items) |binding| {
|
||||
@ -3421,13 +3396,17 @@ fn populateLazyBindOffsetsInStubHelper(self: *MachO, lazy_bind: LazyBind) !void
|
||||
const header = section.header;
|
||||
var atom = section.last_atom.?;
|
||||
|
||||
var index: usize = 0;
|
||||
while (index < lazy_bind.offsets.items.len) : (index += 1) {
|
||||
var index: usize = lazy_bind.offsets.items.len;
|
||||
while (index > 0) : (index -= 1) {
|
||||
const sym = atom.getSymbol(self);
|
||||
const file_offset = header.offset + sym.n_value - header.addr + stub_offset;
|
||||
const bind_offset = lazy_bind.offsets.items[index];
|
||||
const bind_offset = lazy_bind.offsets.items[index - 1];
|
||||
|
||||
log.debug("writing lazy bind offset 0x{x} in stub helper at 0x{x}", .{ bind_offset, file_offset });
|
||||
log.debug("writing lazy bind offset 0x{x} ({s}) in stub helper at 0x{x}", .{
|
||||
bind_offset,
|
||||
self.getSymbolName(lazy_bind.entries.items[index - 1].target),
|
||||
file_offset,
|
||||
});
|
||||
|
||||
try self.base.file.?.pwriteAll(mem.asBytes(&bind_offset), file_offset);
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user