mirror of
https://github.com/ziglang/zig.git
synced 2026-02-11 03:51:08 +00:00
Merge pull request #12802 from ziglang/macho-refactor
Self-hosted backends and linkers refactor: x86_64 + aarch64 + macho + coff
This commit is contained in:
commit
69da56b36c
@ -139,21 +139,10 @@ const MCValue = union(enum) {
|
||||
/// If the type is a pointer, it means the pointer address is at
|
||||
/// this memory location.
|
||||
memory: u64,
|
||||
/// The value is in memory referenced indirectly via a GOT entry
|
||||
/// index.
|
||||
///
|
||||
/// If the type is a pointer, it means the pointer is referenced
|
||||
/// indirectly via GOT. When lowered, linker will emit
|
||||
/// relocations of type ARM64_RELOC_GOT_LOAD_PAGE21 and
|
||||
/// ARM64_RELOC_GOT_LOAD_PAGEOFF12.
|
||||
got_load: u32,
|
||||
/// The value is in memory referenced directly via symbol index.
|
||||
///
|
||||
/// If the type is a pointer, it means the pointer is referenced
|
||||
/// directly via symbol index. When lowered, linker will emit a
|
||||
/// relocation of type ARM64_RELOC_PAGE21 and
|
||||
/// ARM64_RELOC_PAGEOFF12.
|
||||
direct_load: u32,
|
||||
/// The value is in memory but requires a linker relocation fixup:
|
||||
/// * got - the value is referenced indirectly via GOT entry index (the linker emits a got-type reloc)
|
||||
/// * direct - the value is referenced directly via symbol index index (the linker emits a displacement reloc)
|
||||
linker_load: struct { @"type": enum { got, direct }, sym_index: u32 },
|
||||
/// The value is one of the stack variables.
|
||||
///
|
||||
/// If the type is a pointer, it means the pointer address is in
|
||||
@ -2959,8 +2948,7 @@ fn load(self: *Self, dst_mcv: MCValue, ptr: MCValue, ptr_ty: Type) InnerError!vo
|
||||
.memory,
|
||||
.stack_offset,
|
||||
.stack_argument_offset,
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.linker_load,
|
||||
=> {
|
||||
const addr_reg = try self.copyToTmpRegister(ptr_ty, ptr);
|
||||
try self.load(dst_mcv, .{ .register = addr_reg }, ptr_ty);
|
||||
@ -3197,8 +3185,7 @@ fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type
|
||||
.memory,
|
||||
.stack_offset,
|
||||
.stack_argument_offset,
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.linker_load,
|
||||
=> {
|
||||
const addr_reg = try self.copyToTmpRegister(ptr_ty, ptr);
|
||||
try self.store(.{ .register = addr_reg }, value, ptr_ty, value_ty);
|
||||
@ -3493,7 +3480,10 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions.
|
||||
const func = func_payload.data;
|
||||
const fn_owner_decl = mod.declPtr(func.owner_decl);
|
||||
try self.genSetReg(Type.initTag(.u64), .x30, .{
|
||||
.got_load = fn_owner_decl.link.macho.sym_index,
|
||||
.linker_load = .{
|
||||
.@"type" = .got,
|
||||
.sym_index = fn_owner_decl.link.macho.sym_index,
|
||||
},
|
||||
});
|
||||
// blr x30
|
||||
_ = try self.addInst(.{
|
||||
@ -4427,8 +4417,7 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) InnerErro
|
||||
.register = cond_reg,
|
||||
});
|
||||
},
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.linker_load,
|
||||
.memory,
|
||||
.stack_argument_offset,
|
||||
.stack_offset,
|
||||
@ -4479,13 +4468,10 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) InnerErro
|
||||
});
|
||||
},
|
||||
.memory => |addr| try self.genSetReg(Type.usize, src_reg, .{ .immediate = addr }),
|
||||
.got_load,
|
||||
.direct_load,
|
||||
=> |sym_index| {
|
||||
const tag: Mir.Inst.Tag = switch (mcv) {
|
||||
.got_load => .load_memory_ptr_got,
|
||||
.direct_load => .load_memory_ptr_direct,
|
||||
else => unreachable,
|
||||
.linker_load => |load_struct| {
|
||||
const tag: Mir.Inst.Tag = switch (load_struct.@"type") {
|
||||
.got => .load_memory_ptr_got,
|
||||
.direct => .load_memory_ptr_direct,
|
||||
};
|
||||
const mod = self.bin_file.options.module.?;
|
||||
_ = try self.addInst(.{
|
||||
@ -4494,7 +4480,7 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) InnerErro
|
||||
.payload = try self.addExtra(Mir.LoadMemoryPie{
|
||||
.register = @enumToInt(src_reg),
|
||||
.atom_index = mod.declPtr(self.mod_fn.owner_decl).link.macho.sym_index,
|
||||
.sym_index = sym_index,
|
||||
.sym_index = load_struct.sym_index,
|
||||
}),
|
||||
},
|
||||
});
|
||||
@ -4594,13 +4580,10 @@ fn genSetReg(self: *Self, ty: Type, reg: Register, mcv: MCValue) InnerError!void
|
||||
});
|
||||
},
|
||||
.register_with_overflow => unreachable, // doesn't fit into a register
|
||||
.got_load,
|
||||
.direct_load,
|
||||
=> |sym_index| {
|
||||
const tag: Mir.Inst.Tag = switch (mcv) {
|
||||
.got_load => .load_memory_got,
|
||||
.direct_load => .load_memory_direct,
|
||||
else => unreachable,
|
||||
.linker_load => |load_struct| {
|
||||
const tag: Mir.Inst.Tag = switch (load_struct.@"type") {
|
||||
.got => .load_memory_got,
|
||||
.direct => .load_memory_direct,
|
||||
};
|
||||
const mod = self.bin_file.options.module.?;
|
||||
_ = try self.addInst(.{
|
||||
@ -4609,7 +4592,7 @@ fn genSetReg(self: *Self, ty: Type, reg: Register, mcv: MCValue) InnerError!void
|
||||
.payload = try self.addExtra(Mir.LoadMemoryPie{
|
||||
.register = @enumToInt(reg),
|
||||
.atom_index = mod.declPtr(self.mod_fn.owner_decl).link.macho.sym_index,
|
||||
.sym_index = sym_index,
|
||||
.sym_index = load_struct.sym_index,
|
||||
}),
|
||||
},
|
||||
});
|
||||
@ -4741,8 +4724,7 @@ fn genSetStackArgument(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) I
|
||||
.register_with_overflow => {
|
||||
return self.fail("TODO implement genSetStackArgument {}", .{mcv});
|
||||
},
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.linker_load,
|
||||
.memory,
|
||||
.stack_argument_offset,
|
||||
.stack_offset,
|
||||
@ -4785,13 +4767,10 @@ fn genSetStackArgument(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) I
|
||||
});
|
||||
},
|
||||
.memory => |addr| try self.genSetReg(ptr_ty, src_reg, .{ .immediate = @intCast(u32, addr) }),
|
||||
.got_load,
|
||||
.direct_load,
|
||||
=> |sym_index| {
|
||||
const tag: Mir.Inst.Tag = switch (mcv) {
|
||||
.got_load => .load_memory_ptr_got,
|
||||
.direct_load => .load_memory_ptr_direct,
|
||||
else => unreachable,
|
||||
.linker_load => |load_struct| {
|
||||
const tag: Mir.Inst.Tag = switch (load_struct.@"type") {
|
||||
.got => .load_memory_ptr_got,
|
||||
.direct => .load_memory_ptr_direct,
|
||||
};
|
||||
const mod = self.bin_file.options.module.?;
|
||||
_ = try self.addInst(.{
|
||||
@ -4800,7 +4779,7 @@ fn genSetStackArgument(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) I
|
||||
.payload = try self.addExtra(Mir.LoadMemoryPie{
|
||||
.register = @enumToInt(src_reg),
|
||||
.atom_index = mod.declPtr(self.mod_fn.owner_decl).link.macho.sym_index,
|
||||
.sym_index = sym_index,
|
||||
.sym_index = load_struct.sym_index,
|
||||
}),
|
||||
},
|
||||
});
|
||||
@ -5107,7 +5086,10 @@ fn lowerDeclRef(self: *Self, tv: TypedValue, decl_index: Module.Decl.Index) Inne
|
||||
// Because MachO is PIE-always-on, we defer memory address resolution until
|
||||
// the linker has enough info to perform relocations.
|
||||
assert(decl.link.macho.sym_index != 0);
|
||||
return MCValue{ .got_load = decl.link.macho.sym_index };
|
||||
return MCValue{ .linker_load = .{
|
||||
.@"type" = .got,
|
||||
.sym_index = decl.link.macho.sym_index,
|
||||
} };
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |_| {
|
||||
return self.fail("TODO codegen COFF const Decl pointer", .{});
|
||||
} else if (self.bin_file.cast(link.File.Plan9)) |p9| {
|
||||
@ -5129,7 +5111,10 @@ fn lowerUnnamedConst(self: *Self, tv: TypedValue) InnerError!MCValue {
|
||||
const vaddr = elf_file.local_symbols.items[local_sym_index].st_value;
|
||||
return MCValue{ .memory = vaddr };
|
||||
} else if (self.bin_file.cast(link.File.MachO)) |_| {
|
||||
return MCValue{ .direct_load = local_sym_index };
|
||||
return MCValue{ .linker_load = .{
|
||||
.@"type" = .direct,
|
||||
.sym_index = local_sym_index,
|
||||
} };
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |_| {
|
||||
return self.fail("TODO lower unnamed const in COFF", .{});
|
||||
} else if (self.bin_file.cast(link.File.Plan9)) |_| {
|
||||
|
||||
@ -681,12 +681,10 @@ fn mirCallExtern(emit: *Emit, inst: Mir.Inst.Index) !void {
|
||||
};
|
||||
// Add relocation to the decl.
|
||||
const atom = macho_file.atom_by_index_table.get(relocation.atom_index).?;
|
||||
const target = macho_file.getGlobalByIndex(relocation.sym_index);
|
||||
try atom.relocs.append(emit.bin_file.allocator, .{
|
||||
.offset = offset,
|
||||
.target = .{
|
||||
.sym_index = relocation.sym_index,
|
||||
.file = null,
|
||||
},
|
||||
.target = target,
|
||||
.addend = 0,
|
||||
.subtractor = null,
|
||||
.pcrel = true,
|
||||
|
||||
@ -128,15 +128,11 @@ pub const MCValue = union(enum) {
|
||||
/// The value is in memory at a hard-coded address.
|
||||
/// If the type is a pointer, it means the pointer address is at this memory location.
|
||||
memory: u64,
|
||||
/// The value is in memory referenced indirectly via a GOT entry index.
|
||||
/// If the type is a pointer, it means the pointer is referenced indirectly via GOT.
|
||||
/// When lowered, linker will emit a relocation of type X86_64_RELOC_GOT.
|
||||
got_load: u32,
|
||||
imports_load: u32,
|
||||
/// The value is in memory referenced directly via symbol index.
|
||||
/// If the type is a pointer, it means the pointer is referenced directly via symbol index.
|
||||
/// When lowered, linker will emit a relocation of type X86_64_RELOC_SIGNED.
|
||||
direct_load: u32,
|
||||
/// The value is in memory but requires a linker relocation fixup:
|
||||
/// * got - the value is referenced indirectly via GOT entry index (the linker emits a got-type reloc)
|
||||
/// * direct - the value is referenced directly via symbol index index (the linker emits a displacement reloc)
|
||||
/// * import - the value is referenced indirectly via import entry index (the linker emits an import-type reloc)
|
||||
linker_load: struct { @"type": enum { got, direct, import }, sym_index: u32 },
|
||||
/// The value is one of the stack variables.
|
||||
/// If the type is a pointer, it means the pointer address is in the stack at this offset.
|
||||
stack_offset: i32,
|
||||
@ -150,9 +146,7 @@ pub const MCValue = union(enum) {
|
||||
.memory,
|
||||
.stack_offset,
|
||||
.ptr_stack_offset,
|
||||
.direct_load,
|
||||
.got_load,
|
||||
.imports_load,
|
||||
.linker_load,
|
||||
=> true,
|
||||
else => false,
|
||||
};
|
||||
@ -165,26 +159,6 @@ pub const MCValue = union(enum) {
|
||||
};
|
||||
}
|
||||
|
||||
fn isMutable(mcv: MCValue) bool {
|
||||
return switch (mcv) {
|
||||
.none => unreachable,
|
||||
.unreach => unreachable,
|
||||
.dead => unreachable,
|
||||
|
||||
.immediate,
|
||||
.memory,
|
||||
.eflags,
|
||||
.ptr_stack_offset,
|
||||
.undef,
|
||||
.register_overflow,
|
||||
=> false,
|
||||
|
||||
.register,
|
||||
.stack_offset,
|
||||
=> true,
|
||||
};
|
||||
}
|
||||
|
||||
fn isRegister(mcv: MCValue) bool {
|
||||
return switch (mcv) {
|
||||
.register => true,
|
||||
@ -2307,11 +2281,7 @@ fn airArrayElemVal(self: *Self, inst: Air.Inst.Index) !void {
|
||||
.data = .{ .imm = @bitCast(u32, -off) },
|
||||
});
|
||||
},
|
||||
.memory,
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.imports_load,
|
||||
=> {
|
||||
.memory, .linker_load => {
|
||||
try self.loadMemPtrIntoRegister(addr_reg, Type.usize, array);
|
||||
},
|
||||
else => return self.fail("TODO implement array_elem_val when array is {}", .{array}),
|
||||
@ -2652,11 +2622,7 @@ fn load(self: *Self, dst_mcv: MCValue, ptr: MCValue, ptr_ty: Type) InnerError!vo
|
||||
else => return self.fail("TODO implement loading from register into {}", .{dst_mcv}),
|
||||
}
|
||||
},
|
||||
.memory,
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.imports_load,
|
||||
=> {
|
||||
.memory, .linker_load => {
|
||||
const reg = try self.copyToTmpRegister(ptr_ty, ptr);
|
||||
try self.load(dst_mcv, .{ .register = reg }, ptr_ty);
|
||||
},
|
||||
@ -2691,10 +2657,7 @@ fn airLoad(self: *Self, inst: Air.Inst.Index) !void {
|
||||
|
||||
fn loadMemPtrIntoRegister(self: *Self, reg: Register, ptr_ty: Type, ptr: MCValue) InnerError!void {
|
||||
switch (ptr) {
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.imports_load,
|
||||
=> |sym_index| {
|
||||
.linker_load => |load_struct| {
|
||||
const abi_size = @intCast(u32, ptr_ty.abiSize(self.target.*));
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const fn_owner_decl = mod.declPtr(self.mod_fn.owner_decl);
|
||||
@ -2702,11 +2665,10 @@ fn loadMemPtrIntoRegister(self: *Self, reg: Register, ptr_ty: Type, ptr: MCValue
|
||||
fn_owner_decl.link.macho.sym_index
|
||||
else
|
||||
fn_owner_decl.link.coff.sym_index;
|
||||
const flags: u2 = switch (ptr) {
|
||||
.got_load => 0b00,
|
||||
.direct_load => 0b01,
|
||||
.imports_load => 0b10,
|
||||
else => unreachable,
|
||||
const flags: u2 = switch (load_struct.@"type") {
|
||||
.got => 0b00,
|
||||
.direct => 0b01,
|
||||
.import => 0b10,
|
||||
};
|
||||
_ = try self.addInst(.{
|
||||
.tag = .lea_pic,
|
||||
@ -2717,7 +2679,7 @@ fn loadMemPtrIntoRegister(self: *Self, reg: Register, ptr_ty: Type, ptr: MCValue
|
||||
.data = .{
|
||||
.relocation = .{
|
||||
.atom_index = atom_index,
|
||||
.sym_index = sym_index,
|
||||
.sym_index = load_struct.sym_index,
|
||||
},
|
||||
},
|
||||
});
|
||||
@ -2801,9 +2763,7 @@ fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type
|
||||
.register => |src_reg| {
|
||||
try self.genInlineMemcpyRegisterRegister(value_ty, reg, src_reg, 0);
|
||||
},
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.imports_load,
|
||||
.linker_load,
|
||||
.memory,
|
||||
.stack_offset,
|
||||
=> {
|
||||
@ -2822,11 +2782,7 @@ fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type
|
||||
},
|
||||
}
|
||||
},
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.imports_load,
|
||||
.memory,
|
||||
=> {
|
||||
.linker_load, .memory => {
|
||||
const value_lock: ?RegisterLock = switch (value) {
|
||||
.register => |reg| self.register_manager.lockReg(reg),
|
||||
else => null,
|
||||
@ -2894,11 +2850,7 @@ fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type
|
||||
.register => {
|
||||
return self.store(new_ptr, value, ptr_ty, value_ty);
|
||||
},
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.imports_load,
|
||||
.memory,
|
||||
=> {
|
||||
.linker_load, .memory => {
|
||||
if (abi_size <= 8) {
|
||||
const tmp_reg = try self.register_manager.allocReg(null, gp);
|
||||
const tmp_reg_lock = self.register_manager.lockRegAssumeUnused(tmp_reg);
|
||||
@ -3606,9 +3558,7 @@ fn genBinOpMir(self: *Self, mir_tag: Mir.Inst.Tag, dst_ty: Type, dst_mcv: MCValu
|
||||
});
|
||||
},
|
||||
.memory,
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.imports_load,
|
||||
.linker_load,
|
||||
.eflags,
|
||||
=> {
|
||||
assert(abi_size <= 8);
|
||||
@ -3694,10 +3644,7 @@ fn genBinOpMir(self: *Self, mir_tag: Mir.Inst.Tag, dst_ty: Type, dst_mcv: MCValu
|
||||
=> {
|
||||
return self.fail("TODO implement x86 ADD/SUB/CMP source memory", .{});
|
||||
},
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.imports_load,
|
||||
=> {
|
||||
.linker_load => {
|
||||
return self.fail("TODO implement x86 ADD/SUB/CMP source symbol at index in linker", .{});
|
||||
},
|
||||
.eflags => {
|
||||
@ -3708,10 +3655,7 @@ fn genBinOpMir(self: *Self, mir_tag: Mir.Inst.Tag, dst_ty: Type, dst_mcv: MCValu
|
||||
.memory => {
|
||||
return self.fail("TODO implement x86 ADD/SUB/CMP destination memory", .{});
|
||||
},
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.imports_load,
|
||||
=> {
|
||||
.linker_load => {
|
||||
return self.fail("TODO implement x86 ADD/SUB/CMP destination symbol at index", .{});
|
||||
},
|
||||
}
|
||||
@ -3779,10 +3723,7 @@ fn genIntMulComplexOpMir(self: *Self, dst_ty: Type, dst_mcv: MCValue, src_mcv: M
|
||||
.memory => {
|
||||
return self.fail("TODO implement x86 multiply source memory", .{});
|
||||
},
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.imports_load,
|
||||
=> {
|
||||
.linker_load => {
|
||||
return self.fail("TODO implement x86 multiply source symbol at index in linker", .{});
|
||||
},
|
||||
.eflags => {
|
||||
@ -3826,10 +3767,7 @@ fn genIntMulComplexOpMir(self: *Self, dst_ty: Type, dst_mcv: MCValue, src_mcv: M
|
||||
.memory, .stack_offset => {
|
||||
return self.fail("TODO implement x86 multiply source memory", .{});
|
||||
},
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.imports_load,
|
||||
=> {
|
||||
.linker_load => {
|
||||
return self.fail("TODO implement x86 multiply source symbol at index in linker", .{});
|
||||
},
|
||||
.eflags => {
|
||||
@ -3840,10 +3778,7 @@ fn genIntMulComplexOpMir(self: *Self, dst_ty: Type, dst_mcv: MCValue, src_mcv: M
|
||||
.memory => {
|
||||
return self.fail("TODO implement x86 multiply destination memory", .{});
|
||||
},
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.imports_load,
|
||||
=> {
|
||||
.linker_load => {
|
||||
return self.fail("TODO implement x86 multiply destination symbol at index in linker", .{});
|
||||
},
|
||||
}
|
||||
@ -4006,9 +3941,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions.
|
||||
.unreach => unreachable,
|
||||
.dead => unreachable,
|
||||
.memory => unreachable,
|
||||
.got_load => unreachable,
|
||||
.direct_load => unreachable,
|
||||
.imports_load => unreachable,
|
||||
.linker_load => unreachable,
|
||||
.eflags => unreachable,
|
||||
.register_overflow => unreachable,
|
||||
}
|
||||
@ -4066,7 +3999,10 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions.
|
||||
const func = func_payload.data;
|
||||
const fn_owner_decl = mod.declPtr(func.owner_decl);
|
||||
try self.genSetReg(Type.initTag(.usize), .rax, .{
|
||||
.got_load = fn_owner_decl.link.coff.sym_index,
|
||||
.linker_load = .{
|
||||
.@"type" = .got,
|
||||
.sym_index = fn_owner_decl.link.coff.sym_index,
|
||||
},
|
||||
});
|
||||
_ = try self.addInst(.{
|
||||
.tag = .call,
|
||||
@ -4087,7 +4023,10 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions.
|
||||
}
|
||||
const sym_index = try coff_file.getGlobalSymbol(mem.sliceTo(decl_name, 0));
|
||||
try self.genSetReg(Type.initTag(.usize), .rax, .{
|
||||
.imports_load = sym_index,
|
||||
.linker_load = .{
|
||||
.@"type" = .import,
|
||||
.sym_index = sym_index,
|
||||
},
|
||||
});
|
||||
_ = try self.addInst(.{
|
||||
.tag = .call,
|
||||
@ -4119,7 +4058,12 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions.
|
||||
const func = func_payload.data;
|
||||
const fn_owner_decl = mod.declPtr(func.owner_decl);
|
||||
const sym_index = fn_owner_decl.link.macho.sym_index;
|
||||
try self.genSetReg(Type.initTag(.usize), .rax, .{ .got_load = sym_index });
|
||||
try self.genSetReg(Type.initTag(.usize), .rax, .{
|
||||
.linker_load = .{
|
||||
.@"type" = .got,
|
||||
.sym_index = sym_index,
|
||||
},
|
||||
});
|
||||
// callq *%rax
|
||||
_ = try self.addInst(.{
|
||||
.tag = .call,
|
||||
@ -4505,11 +4449,7 @@ fn genVarDbgInfo(
|
||||
leb128.writeILEB128(dbg_info.writer(), -off) catch unreachable;
|
||||
dbg_info.items[fixup] += @intCast(u8, dbg_info.items.len - fixup - 2);
|
||||
},
|
||||
.memory,
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.imports_load,
|
||||
=> {
|
||||
.memory, .linker_load => {
|
||||
const ptr_width = @intCast(u8, @divExact(self.target.cpu.arch.ptrBitWidth(), 8));
|
||||
const is_ptr = switch (tag) {
|
||||
.dbg_var_ptr => true,
|
||||
@ -4540,10 +4480,11 @@ fn genVarDbgInfo(
|
||||
try dbg_info.append(DW.OP.deref);
|
||||
}
|
||||
switch (mcv) {
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.imports_load,
|
||||
=> |index| try dw.addExprlocReloc(index, offset, is_ptr),
|
||||
.linker_load => |load_struct| try dw.addExprlocReloc(
|
||||
load_struct.sym_index,
|
||||
offset,
|
||||
is_ptr,
|
||||
),
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
@ -5587,11 +5528,7 @@ fn genSetStackArg(self: *Self, ty: Type, stack_offset: i32, mcv: MCValue) InnerE
|
||||
else => return self.fail("TODO implement inputs on stack for {} with abi size > 8", .{mcv}),
|
||||
}
|
||||
},
|
||||
.memory,
|
||||
.direct_load,
|
||||
.got_load,
|
||||
.imports_load,
|
||||
=> {
|
||||
.memory, .linker_load => {
|
||||
if (abi_size <= 8) {
|
||||
const reg = try self.copyToTmpRegister(ty, mcv);
|
||||
return self.genSetStackArg(ty, stack_offset, MCValue{ .register = reg });
|
||||
@ -5835,11 +5772,7 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: i32, mcv: MCValue, opts: Inl
|
||||
},
|
||||
}
|
||||
},
|
||||
.memory,
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.imports_load,
|
||||
=> {
|
||||
.memory, .linker_load => {
|
||||
if (abi_size <= 8) {
|
||||
const reg = try self.copyToTmpRegister(ty, mcv);
|
||||
return self.genSetStack(ty, stack_offset, MCValue{ .register = reg }, opts);
|
||||
@ -5959,11 +5892,7 @@ fn genInlineMemcpy(
|
||||
const tmp_reg = regs[4].to8();
|
||||
|
||||
switch (dst_ptr) {
|
||||
.memory,
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.imports_load,
|
||||
=> {
|
||||
.memory, .linker_load => {
|
||||
try self.loadMemPtrIntoRegister(dst_addr_reg, Type.usize, dst_ptr);
|
||||
},
|
||||
.ptr_stack_offset, .stack_offset => |off| {
|
||||
@ -5992,11 +5921,7 @@ fn genInlineMemcpy(
|
||||
}
|
||||
|
||||
switch (src_ptr) {
|
||||
.memory,
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.imports_load,
|
||||
=> {
|
||||
.memory, .linker_load => {
|
||||
try self.loadMemPtrIntoRegister(src_addr_reg, Type.usize, src_ptr);
|
||||
},
|
||||
.ptr_stack_offset, .stack_offset => |off| {
|
||||
@ -6120,11 +6045,7 @@ fn genInlineMemset(
|
||||
const index_reg = regs[1].to64();
|
||||
|
||||
switch (dst_ptr) {
|
||||
.memory,
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.imports_load,
|
||||
=> {
|
||||
.memory, .linker_load => {
|
||||
try self.loadMemPtrIntoRegister(addr_reg, Type.usize, dst_ptr);
|
||||
},
|
||||
.ptr_stack_offset, .stack_offset => |off| {
|
||||
@ -6356,10 +6277,7 @@ fn genSetReg(self: *Self, ty: Type, reg: Register, mcv: MCValue) InnerError!void
|
||||
.data = undefined,
|
||||
});
|
||||
},
|
||||
.direct_load,
|
||||
.got_load,
|
||||
.imports_load,
|
||||
=> {
|
||||
.linker_load => {
|
||||
switch (ty.zigTypeTag()) {
|
||||
.Float => {
|
||||
const base_reg = try self.register_manager.allocReg(null, gp);
|
||||
@ -6753,11 +6671,7 @@ fn airMemcpy(self: *Self, inst: Air.Inst.Index) !void {
|
||||
// TODO Is this the only condition for pointer dereference for memcpy?
|
||||
const src: MCValue = blk: {
|
||||
switch (src_ptr) {
|
||||
.got_load,
|
||||
.direct_load,
|
||||
.imports_load,
|
||||
.memory,
|
||||
=> {
|
||||
.linker_load, .memory => {
|
||||
const reg = try self.register_manager.allocReg(null, gp);
|
||||
try self.loadMemPtrIntoRegister(reg, src_ty, src_ptr);
|
||||
_ = try self.addInst(.{
|
||||
@ -6997,10 +6911,16 @@ fn lowerDeclRef(self: *Self, tv: TypedValue, decl_index: Module.Decl.Index) Inne
|
||||
return MCValue{ .memory = got_addr };
|
||||
} else if (self.bin_file.cast(link.File.MachO)) |_| {
|
||||
assert(decl.link.macho.sym_index != 0);
|
||||
return MCValue{ .got_load = decl.link.macho.sym_index };
|
||||
return MCValue{ .linker_load = .{
|
||||
.@"type" = .got,
|
||||
.sym_index = decl.link.macho.sym_index,
|
||||
} };
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |_| {
|
||||
assert(decl.link.coff.sym_index != 0);
|
||||
return MCValue{ .got_load = decl.link.coff.sym_index };
|
||||
return MCValue{ .linker_load = .{
|
||||
.@"type" = .got,
|
||||
.sym_index = decl.link.coff.sym_index,
|
||||
} };
|
||||
} else if (self.bin_file.cast(link.File.Plan9)) |p9| {
|
||||
try p9.seeDecl(decl_index);
|
||||
const got_addr = p9.bases.data + decl.link.plan9.got_index.? * ptr_bytes;
|
||||
@ -7019,9 +6939,15 @@ fn lowerUnnamedConst(self: *Self, tv: TypedValue) InnerError!MCValue {
|
||||
const vaddr = elf_file.local_symbols.items[local_sym_index].st_value;
|
||||
return MCValue{ .memory = vaddr };
|
||||
} else if (self.bin_file.cast(link.File.MachO)) |_| {
|
||||
return MCValue{ .direct_load = local_sym_index };
|
||||
return MCValue{ .linker_load = .{
|
||||
.@"type" = .direct,
|
||||
.sym_index = local_sym_index,
|
||||
} };
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |_| {
|
||||
return MCValue{ .direct_load = local_sym_index };
|
||||
return MCValue{ .linker_load = .{
|
||||
.@"type" = .direct,
|
||||
.sym_index = local_sym_index,
|
||||
} };
|
||||
} else if (self.bin_file.cast(link.File.Plan9)) |_| {
|
||||
return self.fail("TODO lower unnamed const in Plan9", .{});
|
||||
} else {
|
||||
|
||||
@ -1021,10 +1021,14 @@ fn mirLeaPic(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
|
||||
.@"type" = switch (ops.flags) {
|
||||
0b00 => .got,
|
||||
0b01 => .direct,
|
||||
0b10 => .imports,
|
||||
0b10 => .import,
|
||||
else => unreachable,
|
||||
},
|
||||
.target = switch (ops.flags) {
|
||||
0b00, 0b01 => .{ .sym_index = relocation.sym_index, .file = null },
|
||||
0b10 => coff_file.getGlobalByIndex(relocation.sym_index),
|
||||
else => unreachable,
|
||||
},
|
||||
.target = .{ .sym_index = relocation.sym_index, .file = null },
|
||||
.offset = @intCast(u32, end_offset - 4),
|
||||
.addend = 0,
|
||||
.pcrel = true,
|
||||
@ -1142,12 +1146,10 @@ fn mirCallExtern(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
|
||||
if (emit.bin_file.cast(link.File.MachO)) |macho_file| {
|
||||
// Add relocation to the decl.
|
||||
const atom = macho_file.atom_by_index_table.get(relocation.atom_index).?;
|
||||
const target = macho_file.getGlobalByIndex(relocation.sym_index);
|
||||
try atom.relocs.append(emit.bin_file.allocator, .{
|
||||
.offset = offset,
|
||||
.target = .{
|
||||
.sym_index = relocation.sym_index,
|
||||
.file = null,
|
||||
},
|
||||
.target = target,
|
||||
.addend = 0,
|
||||
.subtractor = null,
|
||||
.pcrel = true,
|
||||
@ -1157,16 +1159,17 @@ fn mirCallExtern(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
|
||||
} else if (emit.bin_file.cast(link.File.Coff)) |coff_file| {
|
||||
// Add relocation to the decl.
|
||||
const atom = coff_file.atom_by_index_table.get(relocation.atom_index).?;
|
||||
const target = coff_file.getGlobalByIndex(relocation.sym_index);
|
||||
try atom.addRelocation(coff_file, .{
|
||||
.@"type" = .direct,
|
||||
.target = .{ .sym_index = relocation.sym_index, .file = null },
|
||||
.target = target,
|
||||
.offset = offset,
|
||||
.addend = 0,
|
||||
.pcrel = true,
|
||||
.length = 2,
|
||||
});
|
||||
} else {
|
||||
return emit.fail("TODO implement call_extern for linking backends different than MachO", .{});
|
||||
return emit.fail("TODO implement call_extern for linking backends different than MachO and COFF", .{});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -127,7 +127,7 @@ pub const Reloc = struct {
|
||||
@"type": enum {
|
||||
got,
|
||||
direct,
|
||||
imports,
|
||||
import,
|
||||
},
|
||||
target: SymbolWithLoc,
|
||||
offset: u32,
|
||||
@ -141,7 +141,7 @@ pub const Reloc = struct {
|
||||
switch (self.@"type") {
|
||||
.got => return coff_file.getGotAtomForSymbol(self.target),
|
||||
.direct => return coff_file.getAtomForSymbol(self.target),
|
||||
.imports => return coff_file.getImportAtomForSymbol(self.target),
|
||||
.import => return coff_file.getImportAtomForSymbol(self.target),
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -1423,23 +1423,22 @@ fn resolveGlobalSymbol(self: *Coff, current: SymbolWithLoc) !void {
|
||||
const sym = self.getSymbol(current);
|
||||
const sym_name = self.getSymbolName(current);
|
||||
|
||||
const global_index = self.resolver.get(sym_name) orelse {
|
||||
const name = try gpa.dupe(u8, sym_name);
|
||||
const global_index = try self.allocateGlobal();
|
||||
self.globals.items[global_index] = current;
|
||||
try self.resolver.putNoClobber(gpa, name, global_index);
|
||||
const gop = try self.getOrPutGlobalPtr(sym_name);
|
||||
if (!gop.found_existing) {
|
||||
gop.value_ptr.* = current;
|
||||
if (sym.section_number == .UNDEFINED) {
|
||||
try self.unresolved.putNoClobber(gpa, global_index, false);
|
||||
try self.unresolved.putNoClobber(gpa, self.getGlobalIndex(sym_name).?, false);
|
||||
}
|
||||
return;
|
||||
};
|
||||
}
|
||||
|
||||
log.debug("TODO finish resolveGlobalSymbols implementation", .{});
|
||||
|
||||
if (sym.section_number == .UNDEFINED) return;
|
||||
|
||||
_ = self.unresolved.swapRemove(global_index);
|
||||
self.globals.items[global_index] = current;
|
||||
_ = self.unresolved.swapRemove(self.getGlobalIndex(sym_name).?);
|
||||
|
||||
gop.value_ptr.* = current;
|
||||
}
|
||||
|
||||
pub fn flush(self: *Coff, comp: *Compilation, prog_node: *std.Progress.Node) !void {
|
||||
@ -1544,25 +1543,26 @@ pub fn getDeclVAddr(
|
||||
}
|
||||
|
||||
pub fn getGlobalSymbol(self: *Coff, name: []const u8) !u32 {
|
||||
if (self.resolver.get(name)) |global_index| {
|
||||
return self.globals.items[global_index].sym_index;
|
||||
const gop = try self.getOrPutGlobalPtr(name);
|
||||
const global_index = self.getGlobalIndex(name).?;
|
||||
|
||||
if (gop.found_existing) {
|
||||
return global_index;
|
||||
}
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const sym_index = try self.allocateSymbol();
|
||||
const global_index = try self.allocateGlobal();
|
||||
const sym_loc = SymbolWithLoc{ .sym_index = sym_index, .file = null };
|
||||
self.globals.items[global_index] = sym_loc;
|
||||
gop.value_ptr.* = sym_loc;
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const sym_name = try gpa.dupe(u8, name);
|
||||
const sym = self.getSymbolPtr(sym_loc);
|
||||
try self.setSymbolName(sym, sym_name);
|
||||
sym.storage_class = .EXTERNAL;
|
||||
|
||||
try self.resolver.putNoClobber(gpa, sym_name, global_index);
|
||||
try self.unresolved.putNoClobber(gpa, global_index, true);
|
||||
|
||||
return sym_index;
|
||||
return global_index;
|
||||
}
|
||||
|
||||
pub fn updateDeclLineNumber(self: *Coff, module: *Module, decl: *Module.Decl) !void {
|
||||
@ -2061,6 +2061,49 @@ pub fn getSymbolName(self: *const Coff, sym_loc: SymbolWithLoc) []const u8 {
|
||||
return self.strtab.get(offset).?;
|
||||
}
|
||||
|
||||
/// Returns pointer to the global entry for `name` if one exists.
|
||||
pub fn getGlobalPtr(self: *Coff, name: []const u8) ?*SymbolWithLoc {
|
||||
const global_index = self.resolver.get(name) orelse return null;
|
||||
return &self.globals.items[global_index];
|
||||
}
|
||||
|
||||
/// Returns the global entry for `name` if one exists.
|
||||
pub fn getGlobal(self: *const Coff, name: []const u8) ?SymbolWithLoc {
|
||||
const global_index = self.resolver.get(name) orelse return null;
|
||||
return self.globals.items[global_index];
|
||||
}
|
||||
|
||||
/// Returns the index of the global entry for `name` if one exists.
|
||||
pub fn getGlobalIndex(self: *const Coff, name: []const u8) ?u32 {
|
||||
return self.resolver.get(name);
|
||||
}
|
||||
|
||||
/// Returns global entry at `index`.
|
||||
pub fn getGlobalByIndex(self: *const Coff, index: u32) SymbolWithLoc {
|
||||
assert(index < self.globals.items.len);
|
||||
return self.globals.items[index];
|
||||
}
|
||||
|
||||
const GetOrPutGlobalPtrResult = struct {
|
||||
found_existing: bool,
|
||||
value_ptr: *SymbolWithLoc,
|
||||
};
|
||||
|
||||
/// Return pointer to the global entry for `name` if one exists.
|
||||
/// Puts a new global entry for `name` if one doesn't exist, and
|
||||
/// returns a pointer to it.
|
||||
pub fn getOrPutGlobalPtr(self: *Coff, name: []const u8) !GetOrPutGlobalPtrResult {
|
||||
if (self.getGlobalPtr(name)) |ptr| {
|
||||
return GetOrPutGlobalPtrResult{ .found_existing = true, .value_ptr = ptr };
|
||||
}
|
||||
const gpa = self.base.allocator;
|
||||
const global_index = try self.allocateGlobal();
|
||||
const global_name = try gpa.dupe(u8, name);
|
||||
_ = try self.resolver.put(gpa, global_name, global_index);
|
||||
const ptr = &self.globals.items[global_index];
|
||||
return GetOrPutGlobalPtrResult{ .found_existing = false, .value_ptr = ptr };
|
||||
}
|
||||
|
||||
/// Returns atom if there is an atom referenced by the symbol described by `sym_loc` descriptor.
|
||||
/// Returns null on failure.
|
||||
pub fn getAtomForSymbol(self: *Coff, sym_loc: SymbolWithLoc) ?*Atom {
|
||||
|
||||
@ -111,13 +111,3 @@ pub fn addBaseRelocation(self: *Atom, coff_file: *Coff, offset: u32) !void {
|
||||
}
|
||||
try gop.value_ptr.append(gpa, offset);
|
||||
}
|
||||
|
||||
pub fn addBinding(self: *Atom, coff_file: *Coff, target: SymbolWithLoc) !void {
|
||||
const gpa = coff_file.base.allocator;
|
||||
log.debug(" (adding binding to target %{d} in %{d})", .{ target.sym_index, self.sym_index });
|
||||
const gop = try coff_file.bindings.getOrPut(gpa, self);
|
||||
if (!gop.found_existing) {
|
||||
gop.value_ptr.* = .{};
|
||||
}
|
||||
try gop.value_ptr.append(gpa, target);
|
||||
}
|
||||
|
||||
@ -131,17 +131,12 @@ la_symbol_ptr_section_index: ?u8 = null,
|
||||
data_section_index: ?u8 = null,
|
||||
|
||||
locals: std.ArrayListUnmanaged(macho.nlist_64) = .{},
|
||||
globals: std.StringArrayHashMapUnmanaged(SymbolWithLoc) = .{},
|
||||
// FIXME Jakub
|
||||
// TODO storing index into globals might be dangerous if we delete a global
|
||||
// while not having everything resolved. Actually, perhaps `unresolved`
|
||||
// should not be stored at the global scope? Is this possible?
|
||||
// Otherwise, audit if this can be a problem.
|
||||
// An alternative, which I still need to investigate for perf reasons is to
|
||||
// store all global names in an adapted with context strtab.
|
||||
globals: std.ArrayListUnmanaged(SymbolWithLoc) = .{},
|
||||
resolver: std.StringHashMapUnmanaged(u32) = .{},
|
||||
unresolved: std.AutoArrayHashMapUnmanaged(u32, bool) = .{},
|
||||
|
||||
locals_free_list: std.ArrayListUnmanaged(u32) = .{},
|
||||
globals_free_list: std.ArrayListUnmanaged(u32) = .{},
|
||||
|
||||
dyld_stub_binder_index: ?u32 = null,
|
||||
dyld_private_atom: ?*Atom = null,
|
||||
@ -1917,7 +1912,7 @@ fn allocateSpecialSymbols(self: *MachO) !void {
|
||||
"___dso_handle",
|
||||
"__mh_execute_header",
|
||||
}) |name| {
|
||||
const global = self.globals.get(name) orelse continue;
|
||||
const global = self.getGlobal(name) orelse continue;
|
||||
if (global.file != null) continue;
|
||||
const sym = self.getSymbolPtr(global);
|
||||
const seg = self.segments.items[self.text_segment_cmd_index.?];
|
||||
@ -2048,16 +2043,11 @@ fn writeAtomsIncremental(self: *MachO) !void {
|
||||
|
||||
pub fn createGotAtom(self: *MachO, target: SymbolWithLoc) !*Atom {
|
||||
const gpa = self.base.allocator;
|
||||
const sym_index = @intCast(u32, self.locals.items.len);
|
||||
try self.locals.append(gpa, .{
|
||||
.n_strx = 0,
|
||||
.n_type = macho.N_SECT,
|
||||
.n_sect = 0,
|
||||
.n_desc = 0,
|
||||
.n_value = 0,
|
||||
});
|
||||
|
||||
const sym_index = try self.allocateSymbol();
|
||||
const atom = try MachO.createEmptyAtom(gpa, sym_index, @sizeOf(u64), 3);
|
||||
const sym = atom.getSymbolPtr(self);
|
||||
sym.n_type = macho.N_SECT;
|
||||
|
||||
try atom.relocs.append(gpa, .{
|
||||
.offset = 0,
|
||||
.target = target,
|
||||
@ -2074,7 +2064,7 @@ pub fn createGotAtom(self: *MachO, target: SymbolWithLoc) !*Atom {
|
||||
|
||||
const target_sym = self.getSymbol(target);
|
||||
if (target_sym.undf()) {
|
||||
const global = self.globals.get(self.getSymbolName(target)).?;
|
||||
const global = self.getGlobal(self.getSymbolName(target)).?;
|
||||
try atom.bindings.append(gpa, .{
|
||||
.target = global,
|
||||
.offset = 0,
|
||||
@ -2093,20 +2083,15 @@ pub fn createGotAtom(self: *MachO, target: SymbolWithLoc) !*Atom {
|
||||
|
||||
pub fn createTlvPtrAtom(self: *MachO, target: SymbolWithLoc) !*Atom {
|
||||
const gpa = self.base.allocator;
|
||||
const sym_index = @intCast(u32, self.locals.items.len);
|
||||
try self.locals.append(gpa, .{
|
||||
.n_strx = 0,
|
||||
.n_type = macho.N_SECT,
|
||||
.n_sect = 0,
|
||||
.n_desc = 0,
|
||||
.n_value = 0,
|
||||
});
|
||||
|
||||
const sym_index = try self.allocateSymbol();
|
||||
const atom = try MachO.createEmptyAtom(gpa, sym_index, @sizeOf(u64), 3);
|
||||
const sym = atom.getSymbolPtr(self);
|
||||
sym.n_type = macho.N_SECT;
|
||||
|
||||
const target_sym = self.getSymbol(target);
|
||||
assert(target_sym.undf());
|
||||
|
||||
const global = self.globals.get(self.getSymbolName(target)).?;
|
||||
const global = self.getGlobal(self.getSymbolName(target)).?;
|
||||
try atom.bindings.append(gpa, .{
|
||||
.target = global,
|
||||
.offset = 0,
|
||||
@ -2130,15 +2115,10 @@ fn createDyldPrivateAtom(self: *MachO) !void {
|
||||
if (self.dyld_private_atom != null) return;
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const sym_index = @intCast(u32, self.locals.items.len);
|
||||
try self.locals.append(gpa, .{
|
||||
.n_strx = 0,
|
||||
.n_type = macho.N_SECT,
|
||||
.n_sect = 0,
|
||||
.n_desc = 0,
|
||||
.n_value = 0,
|
||||
});
|
||||
const sym_index = try self.allocateSymbol();
|
||||
const atom = try MachO.createEmptyAtom(gpa, sym_index, @sizeOf(u64), 3);
|
||||
const sym = atom.getSymbolPtr(self);
|
||||
sym.n_type = macho.N_SECT;
|
||||
self.dyld_private_atom = atom;
|
||||
|
||||
try self.allocateAtomCommon(atom, self.data_section_index.?);
|
||||
@ -2163,15 +2143,11 @@ fn createStubHelperPreambleAtom(self: *MachO) !void {
|
||||
.aarch64 => 2,
|
||||
else => unreachable,
|
||||
};
|
||||
const sym_index = @intCast(u32, self.locals.items.len);
|
||||
try self.locals.append(gpa, .{
|
||||
.n_strx = 0,
|
||||
.n_type = macho.N_SECT,
|
||||
.n_sect = 0,
|
||||
.n_desc = 0,
|
||||
.n_value = 0,
|
||||
});
|
||||
const sym_index = try self.allocateSymbol();
|
||||
const atom = try MachO.createEmptyAtom(gpa, sym_index, size, alignment);
|
||||
const sym = atom.getSymbolPtr(self);
|
||||
sym.n_type = macho.N_SECT;
|
||||
|
||||
const dyld_private_sym_index = self.dyld_private_atom.?.sym_index;
|
||||
switch (arch) {
|
||||
.x86_64 => {
|
||||
@ -2288,15 +2264,11 @@ pub fn createStubHelperAtom(self: *MachO) !*Atom {
|
||||
.aarch64 => 2,
|
||||
else => unreachable,
|
||||
};
|
||||
const sym_index = @intCast(u32, self.locals.items.len);
|
||||
try self.locals.append(gpa, .{
|
||||
.n_strx = 0,
|
||||
.n_type = macho.N_SECT,
|
||||
.n_sect = 0,
|
||||
.n_desc = 0,
|
||||
.n_value = 0,
|
||||
});
|
||||
const sym_index = try self.allocateSymbol();
|
||||
const atom = try MachO.createEmptyAtom(gpa, sym_index, stub_size, alignment);
|
||||
const sym = atom.getSymbolPtr(self);
|
||||
sym.n_type = macho.N_SECT;
|
||||
|
||||
try atom.relocs.ensureTotalCapacity(gpa, 1);
|
||||
|
||||
switch (arch) {
|
||||
@ -2352,15 +2324,11 @@ pub fn createStubHelperAtom(self: *MachO) !*Atom {
|
||||
|
||||
pub fn createLazyPointerAtom(self: *MachO, stub_sym_index: u32, target: SymbolWithLoc) !*Atom {
|
||||
const gpa = self.base.allocator;
|
||||
const sym_index = @intCast(u32, self.locals.items.len);
|
||||
try self.locals.append(gpa, .{
|
||||
.n_strx = 0,
|
||||
.n_type = macho.N_SECT,
|
||||
.n_sect = 0,
|
||||
.n_desc = 0,
|
||||
.n_value = 0,
|
||||
});
|
||||
const sym_index = try self.allocateSymbol();
|
||||
const atom = try MachO.createEmptyAtom(gpa, sym_index, @sizeOf(u64), 3);
|
||||
const sym = atom.getSymbolPtr(self);
|
||||
sym.n_type = macho.N_SECT;
|
||||
|
||||
try atom.relocs.append(gpa, .{
|
||||
.offset = 0,
|
||||
.target = .{ .sym_index = stub_sym_index, .file = null },
|
||||
@ -2376,7 +2344,7 @@ pub fn createLazyPointerAtom(self: *MachO, stub_sym_index: u32, target: SymbolWi
|
||||
});
|
||||
try atom.rebases.append(gpa, 0);
|
||||
|
||||
const global = self.globals.get(self.getSymbolName(target)).?;
|
||||
const global = self.getGlobal(self.getSymbolName(target)).?;
|
||||
try atom.lazy_bindings.append(gpa, .{
|
||||
.target = global,
|
||||
.offset = 0,
|
||||
@ -2403,15 +2371,11 @@ pub fn createStubAtom(self: *MachO, laptr_sym_index: u32) !*Atom {
|
||||
.aarch64 => 3 * @sizeOf(u32),
|
||||
else => unreachable, // unhandled architecture type
|
||||
};
|
||||
const sym_index = @intCast(u32, self.locals.items.len);
|
||||
try self.locals.append(gpa, .{
|
||||
.n_strx = 0,
|
||||
.n_type = macho.N_SECT,
|
||||
.n_sect = 0,
|
||||
.n_desc = 0,
|
||||
.n_value = 0,
|
||||
});
|
||||
const sym_index = try self.allocateSymbol();
|
||||
const atom = try MachO.createEmptyAtom(gpa, sym_index, stub_size, alignment);
|
||||
const sym = atom.getSymbolPtr(self);
|
||||
sym.n_type = macho.N_SECT;
|
||||
|
||||
switch (arch) {
|
||||
.x86_64 => {
|
||||
// jmp
|
||||
@ -2472,7 +2436,7 @@ pub fn createStubAtom(self: *MachO, laptr_sym_index: u32) !*Atom {
|
||||
fn createTentativeDefAtoms(self: *MachO) !void {
|
||||
const gpa = self.base.allocator;
|
||||
|
||||
for (self.globals.values()) |global| {
|
||||
for (self.globals.items) |global| {
|
||||
const sym = self.getSymbolPtr(global);
|
||||
if (!sym.tentative()) continue;
|
||||
|
||||
@ -2516,51 +2480,44 @@ fn createTentativeDefAtoms(self: *MachO) !void {
|
||||
|
||||
fn createMhExecuteHeaderSymbol(self: *MachO) !void {
|
||||
if (self.base.options.output_mode != .Exe) return;
|
||||
if (self.globals.get("__mh_execute_header")) |global| {
|
||||
if (self.getGlobal("__mh_execute_header")) |global| {
|
||||
const sym = self.getSymbol(global);
|
||||
if (!sym.undf() and !(sym.pext() or sym.weakDef())) return;
|
||||
}
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const n_strx = try self.strtab.insert(gpa, "__mh_execute_header");
|
||||
const sym_index = @intCast(u32, self.locals.items.len);
|
||||
try self.locals.append(gpa, .{
|
||||
.n_strx = n_strx,
|
||||
const sym_index = try self.allocateSymbol();
|
||||
const sym_loc = SymbolWithLoc{ .sym_index = sym_index, .file = null };
|
||||
const sym = self.getSymbolPtr(sym_loc);
|
||||
sym.* = .{
|
||||
.n_strx = try self.strtab.insert(gpa, "__mh_execute_header"),
|
||||
.n_type = macho.N_SECT | macho.N_EXT,
|
||||
.n_sect = 0,
|
||||
.n_desc = macho.REFERENCED_DYNAMICALLY,
|
||||
.n_value = 0,
|
||||
});
|
||||
|
||||
const name = try gpa.dupe(u8, "__mh_execute_header");
|
||||
const gop = try self.globals.getOrPut(gpa, name);
|
||||
defer if (gop.found_existing) gpa.free(name);
|
||||
gop.value_ptr.* = .{
|
||||
.sym_index = sym_index,
|
||||
.file = null,
|
||||
};
|
||||
|
||||
const gop = try self.getOrPutGlobalPtr("__mh_execute_header");
|
||||
gop.value_ptr.* = sym_loc;
|
||||
}
|
||||
|
||||
fn createDsoHandleSymbol(self: *MachO) !void {
|
||||
const global = self.globals.getPtr("___dso_handle") orelse return;
|
||||
const sym = self.getSymbolPtr(global.*);
|
||||
if (!sym.undf()) return;
|
||||
const global = self.getGlobalPtr("___dso_handle") orelse return;
|
||||
if (!self.getSymbol(global.*).undf()) return;
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const n_strx = try self.strtab.insert(gpa, "___dso_handle");
|
||||
const sym_index = @intCast(u32, self.locals.items.len);
|
||||
try self.locals.append(gpa, .{
|
||||
.n_strx = n_strx,
|
||||
const sym_index = try self.allocateSymbol();
|
||||
const sym_loc = SymbolWithLoc{ .sym_index = sym_index, .file = null };
|
||||
const sym = self.getSymbolPtr(sym_loc);
|
||||
sym.* = .{
|
||||
.n_strx = try self.strtab.insert(gpa, "___dso_handle"),
|
||||
.n_type = macho.N_SECT | macho.N_EXT,
|
||||
.n_sect = 0,
|
||||
.n_desc = macho.N_WEAK_DEF,
|
||||
.n_value = 0,
|
||||
});
|
||||
global.* = .{
|
||||
.sym_index = sym_index,
|
||||
.file = null,
|
||||
};
|
||||
_ = self.unresolved.swapRemove(@intCast(u32, self.globals.getIndex("___dso_handle").?));
|
||||
global.* = sym_loc;
|
||||
_ = self.unresolved.swapRemove(self.getGlobalIndex("___dso_handle").?);
|
||||
}
|
||||
|
||||
fn resolveGlobalSymbol(self: *MachO, current: SymbolWithLoc) !void {
|
||||
@ -2568,19 +2525,14 @@ fn resolveGlobalSymbol(self: *MachO, current: SymbolWithLoc) !void {
|
||||
const sym = self.getSymbol(current);
|
||||
const sym_name = self.getSymbolName(current);
|
||||
|
||||
const name = try gpa.dupe(u8, sym_name);
|
||||
const global_index = @intCast(u32, self.globals.values().len);
|
||||
const gop = try self.globals.getOrPut(gpa, name);
|
||||
defer if (gop.found_existing) gpa.free(name);
|
||||
|
||||
const gop = try self.getOrPutGlobalPtr(sym_name);
|
||||
if (!gop.found_existing) {
|
||||
gop.value_ptr.* = current;
|
||||
if (sym.undf() and !sym.tentative()) {
|
||||
try self.unresolved.putNoClobber(gpa, global_index, false);
|
||||
try self.unresolved.putNoClobber(gpa, self.getGlobalIndex(sym_name).?, false);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const global = gop.value_ptr.*;
|
||||
const global_sym = self.getSymbol(global);
|
||||
|
||||
@ -2619,7 +2571,7 @@ fn resolveGlobalSymbol(self: *MachO, current: SymbolWithLoc) !void {
|
||||
}
|
||||
if (sym.undf() and !sym.tentative()) return;
|
||||
|
||||
_ = self.unresolved.swapRemove(@intCast(u32, self.globals.getIndex(name).?));
|
||||
_ = self.unresolved.swapRemove(self.getGlobalIndex(sym_name).?);
|
||||
|
||||
gop.value_ptr.* = current;
|
||||
}
|
||||
@ -2664,7 +2616,7 @@ fn resolveSymbolsInObject(self: *MachO, object_id: u16) !void {
|
||||
const sym_loc = SymbolWithLoc{ .sym_index = sym_index, .file = object_id };
|
||||
self.resolveGlobalSymbol(sym_loc) catch |err| switch (err) {
|
||||
error.MultipleSymbolDefinitions => {
|
||||
const global = self.globals.get(sym_name).?;
|
||||
const global = self.getGlobal(sym_name).?;
|
||||
log.err("symbol '{s}' defined multiple times", .{sym_name});
|
||||
if (global.file) |file| {
|
||||
log.err(" first definition in '{s}'", .{self.objects.items[file].name});
|
||||
@ -2684,7 +2636,8 @@ fn resolveSymbolsInArchives(self: *MachO) !void {
|
||||
const cpu_arch = self.base.options.target.cpu.arch;
|
||||
var next_sym: usize = 0;
|
||||
loop: while (next_sym < self.unresolved.count()) {
|
||||
const global = self.globals.values()[self.unresolved.keys()[next_sym]];
|
||||
const global_index = self.unresolved.keys()[next_sym];
|
||||
const global = self.globals.items[global_index];
|
||||
const sym_name = self.getSymbolName(global);
|
||||
|
||||
for (self.archives.items) |archive| {
|
||||
@ -2710,10 +2663,11 @@ fn resolveSymbolsInArchives(self: *MachO) !void {
|
||||
fn resolveSymbolsInDylibs(self: *MachO) !void {
|
||||
if (self.dylibs.items.len == 0) return;
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
var next_sym: usize = 0;
|
||||
loop: while (next_sym < self.unresolved.count()) {
|
||||
const global_index = self.unresolved.keys()[next_sym];
|
||||
const global = self.globals.values()[global_index];
|
||||
const global = self.globals.items[global_index];
|
||||
const sym = self.getSymbolPtr(global);
|
||||
const sym_name = self.getSymbolName(global);
|
||||
|
||||
@ -2722,7 +2676,7 @@ fn resolveSymbolsInDylibs(self: *MachO) !void {
|
||||
|
||||
const dylib_id = @intCast(u16, id);
|
||||
if (!self.referenced_dylibs.contains(dylib_id)) {
|
||||
try self.referenced_dylibs.putNoClobber(self.base.allocator, dylib_id, {});
|
||||
try self.referenced_dylibs.putNoClobber(gpa, dylib_id, {});
|
||||
}
|
||||
|
||||
const ordinal = self.referenced_dylibs.getIndex(dylib_id) orelse unreachable;
|
||||
@ -2760,7 +2714,7 @@ fn resolveSymbolsAtLoading(self: *MachO) !void {
|
||||
var next_sym: usize = 0;
|
||||
while (next_sym < self.unresolved.count()) {
|
||||
const global_index = self.unresolved.keys()[next_sym];
|
||||
const global = self.globals.values()[global_index];
|
||||
const global = self.globals.items[global_index];
|
||||
const sym = self.getSymbolPtr(global);
|
||||
const sym_name = self.getSymbolName(global);
|
||||
|
||||
@ -2800,26 +2754,27 @@ fn resolveDyldStubBinder(self: *MachO) !void {
|
||||
if (self.unresolved.count() == 0) return; // no need for a stub binder if we don't have any imports
|
||||
|
||||
const gpa = self.base.allocator;
|
||||
const n_strx = try self.strtab.insert(gpa, "dyld_stub_binder");
|
||||
const sym_index = @intCast(u32, self.locals.items.len);
|
||||
try self.locals.append(gpa, .{
|
||||
.n_strx = n_strx,
|
||||
const sym_index = try self.allocateSymbol();
|
||||
const sym_loc = SymbolWithLoc{ .sym_index = sym_index, .file = null };
|
||||
const sym = self.getSymbolPtr(sym_loc);
|
||||
const sym_name = "dyld_stub_binder";
|
||||
sym.* = .{
|
||||
.n_strx = try self.strtab.insert(gpa, sym_name),
|
||||
.n_type = macho.N_UNDF,
|
||||
.n_sect = 0,
|
||||
.n_desc = 0,
|
||||
.n_value = 0,
|
||||
});
|
||||
const sym_name = try gpa.dupe(u8, "dyld_stub_binder");
|
||||
const global = SymbolWithLoc{ .sym_index = sym_index, .file = null };
|
||||
try self.globals.putNoClobber(gpa, sym_name, global);
|
||||
const sym = &self.locals.items[sym_index];
|
||||
};
|
||||
const gop = try self.getOrPutGlobalPtr(sym_name);
|
||||
gop.value_ptr.* = sym_loc;
|
||||
const global = gop.value_ptr.*;
|
||||
|
||||
for (self.dylibs.items) |dylib, id| {
|
||||
if (!dylib.symbols.contains(sym_name)) continue;
|
||||
|
||||
const dylib_id = @intCast(u16, id);
|
||||
if (!self.referenced_dylibs.contains(dylib_id)) {
|
||||
try self.referenced_dylibs.putNoClobber(self.base.allocator, dylib_id, {});
|
||||
try self.referenced_dylibs.putNoClobber(gpa, dylib_id, {});
|
||||
}
|
||||
|
||||
const ordinal = self.referenced_dylibs.getIndex(dylib_id) orelse unreachable;
|
||||
@ -3050,14 +3005,20 @@ pub fn deinit(self: *MachO) void {
|
||||
self.stubs_free_list.deinit(gpa);
|
||||
self.stubs_table.deinit(gpa);
|
||||
self.strtab.deinit(gpa);
|
||||
|
||||
self.locals.deinit(gpa);
|
||||
self.globals.deinit(gpa);
|
||||
self.locals_free_list.deinit(gpa);
|
||||
self.globals_free_list.deinit(gpa);
|
||||
self.unresolved.deinit(gpa);
|
||||
|
||||
for (self.globals.keys()) |key| {
|
||||
gpa.free(key);
|
||||
{
|
||||
var it = self.resolver.keyIterator();
|
||||
while (it.next()) |key_ptr| {
|
||||
gpa.free(key_ptr.*);
|
||||
}
|
||||
self.resolver.deinit(gpa);
|
||||
}
|
||||
self.globals.deinit(gpa);
|
||||
|
||||
for (self.objects.items) |*object| {
|
||||
object.deinit(gpa);
|
||||
@ -3211,6 +3172,29 @@ fn allocateSymbol(self: *MachO) !u32 {
|
||||
return index;
|
||||
}
|
||||
|
||||
fn allocateGlobal(self: *MachO) !u32 {
|
||||
try self.globals.ensureUnusedCapacity(self.base.allocator, 1);
|
||||
|
||||
const index = blk: {
|
||||
if (self.globals_free_list.popOrNull()) |index| {
|
||||
log.debug(" (reusing global index {d})", .{index});
|
||||
break :blk index;
|
||||
} else {
|
||||
log.debug(" (allocating symbol index {d})", .{self.globals.items.len});
|
||||
const index = @intCast(u32, self.globals.items.len);
|
||||
_ = self.globals.addOneAssumeCapacity();
|
||||
break :blk index;
|
||||
}
|
||||
};
|
||||
|
||||
self.globals.items[index] = .{
|
||||
.sym_index = 0,
|
||||
.file = null,
|
||||
};
|
||||
|
||||
return index;
|
||||
}
|
||||
|
||||
pub fn allocateGotEntry(self: *MachO, target: SymbolWithLoc) !u32 {
|
||||
const gpa = self.base.allocator;
|
||||
try self.got_entries.ensureUnusedCapacity(gpa, 1);
|
||||
@ -3832,7 +3816,7 @@ pub fn updateDeclExports(
|
||||
|
||||
self.resolveGlobalSymbol(sym_loc) catch |err| switch (err) {
|
||||
error.MultipleSymbolDefinitions => {
|
||||
const global = self.globals.get(exp_name).?;
|
||||
const global = self.getGlobal(exp_name).?;
|
||||
if (sym_loc.sym_index != global.sym_index and global.file != null) {
|
||||
_ = try module.failed_exports.put(module.gpa, exp, try Module.ErrorMsg.create(
|
||||
gpa,
|
||||
@ -3869,11 +3853,13 @@ pub fn deleteExport(self: *MachO, exp: Export) void {
|
||||
};
|
||||
self.locals_free_list.append(gpa, sym_index) catch {};
|
||||
|
||||
if (self.globals.get(sym_name)) |global| blk: {
|
||||
if (global.sym_index != sym_index) break :blk;
|
||||
if (global.file != null) break :blk;
|
||||
const kv = self.globals.fetchSwapRemove(sym_name);
|
||||
gpa.free(kv.?.key);
|
||||
if (self.resolver.fetchRemove(sym_name)) |entry| {
|
||||
defer gpa.free(entry.key);
|
||||
self.globals_free_list.append(gpa, entry.value) catch {};
|
||||
self.globals.items[entry.value] = .{
|
||||
.sym_index = 0,
|
||||
.file = null,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@ -4864,32 +4850,26 @@ pub fn addAtomToSection(self: *MachO, atom: *Atom, sect_id: u8) !void {
|
||||
|
||||
pub fn getGlobalSymbol(self: *MachO, name: []const u8) !u32 {
|
||||
const gpa = self.base.allocator;
|
||||
|
||||
const sym_name = try std.fmt.allocPrint(gpa, "_{s}", .{name});
|
||||
const global_index = @intCast(u32, self.globals.values().len);
|
||||
const gop = try self.globals.getOrPut(gpa, sym_name);
|
||||
defer if (gop.found_existing) gpa.free(sym_name);
|
||||
defer gpa.free(sym_name);
|
||||
const gop = try self.getOrPutGlobalPtr(sym_name);
|
||||
const global_index = self.getGlobalIndex(sym_name).?;
|
||||
|
||||
if (gop.found_existing) {
|
||||
// TODO audit this: can we ever reference anything from outside the Zig module?
|
||||
assert(gop.value_ptr.file == null);
|
||||
return gop.value_ptr.sym_index;
|
||||
return global_index;
|
||||
}
|
||||
|
||||
const sym_index = @intCast(u32, self.locals.items.len);
|
||||
try self.locals.append(gpa, .{
|
||||
.n_strx = try self.strtab.insert(gpa, sym_name),
|
||||
.n_type = macho.N_UNDF,
|
||||
.n_sect = 0,
|
||||
.n_desc = 0,
|
||||
.n_value = 0,
|
||||
});
|
||||
gop.value_ptr.* = .{
|
||||
.sym_index = sym_index,
|
||||
.file = null,
|
||||
};
|
||||
const sym_index = try self.allocateSymbol();
|
||||
const sym_loc = SymbolWithLoc{ .sym_index = sym_index, .file = null };
|
||||
gop.value_ptr.* = sym_loc;
|
||||
|
||||
const sym = self.getSymbolPtr(sym_loc);
|
||||
sym.n_strx = try self.strtab.insert(gpa, sym_name);
|
||||
|
||||
try self.unresolved.putNoClobber(gpa, global_index, true);
|
||||
|
||||
return sym_index;
|
||||
return global_index;
|
||||
}
|
||||
|
||||
fn getSegmentAllocBase(self: MachO, indices: []const ?u8) struct { vmaddr: u64, fileoff: u64 } {
|
||||
@ -5055,7 +5035,7 @@ fn writeDyldInfoData(self: *MachO, ncmds: *u32, lc_writer: anytype) !void {
|
||||
if (self.base.options.output_mode == .Exe) {
|
||||
for (&[_]SymbolWithLoc{
|
||||
try self.getEntryPoint(),
|
||||
self.globals.get("__mh_execute_header").?,
|
||||
self.getGlobal("__mh_execute_header").?,
|
||||
}) |global| {
|
||||
const sym = self.getSymbol(global);
|
||||
const sym_name = self.getSymbolName(global);
|
||||
@ -5068,7 +5048,7 @@ fn writeDyldInfoData(self: *MachO, ncmds: *u32, lc_writer: anytype) !void {
|
||||
}
|
||||
} else {
|
||||
assert(self.base.options.output_mode == .Lib);
|
||||
for (self.globals.values()) |global| {
|
||||
for (self.globals.items) |global| {
|
||||
const sym = self.getSymbol(global);
|
||||
|
||||
if (sym.undf()) continue;
|
||||
@ -5271,9 +5251,9 @@ fn writeFunctionStarts(self: *MachO, ncmds: *u32, lc_writer: anytype) !void {
|
||||
// We need to sort by address first
|
||||
var addresses = std.ArrayList(u64).init(gpa);
|
||||
defer addresses.deinit();
|
||||
try addresses.ensureTotalCapacityPrecise(self.globals.count());
|
||||
try addresses.ensureTotalCapacityPrecise(self.globals.items.len);
|
||||
|
||||
for (self.globals.values()) |global| {
|
||||
for (self.globals.items) |global| {
|
||||
const sym = self.getSymbol(global);
|
||||
if (sym.undf()) continue;
|
||||
if (sym.n_desc == N_DESC_GCED) continue;
|
||||
@ -5453,7 +5433,7 @@ fn writeSymtab(self: *MachO, lc: *macho.symtab_command) !SymtabCtx {
|
||||
if (sym.n_desc == N_DESC_GCED) continue; // GCed, skip
|
||||
const sym_loc = SymbolWithLoc{ .sym_index = @intCast(u32, sym_id), .file = null };
|
||||
if (self.symbolIsTemp(sym_loc)) continue; // local temp symbol, skip
|
||||
if (self.globals.contains(self.getSymbolName(sym_loc))) continue; // global symbol is either an export or import, skip
|
||||
if (self.getGlobal(self.getSymbolName(sym_loc)) != null) continue; // global symbol is either an export or import, skip
|
||||
try locals.append(sym);
|
||||
}
|
||||
|
||||
@ -5463,7 +5443,7 @@ fn writeSymtab(self: *MachO, lc: *macho.symtab_command) !SymtabCtx {
|
||||
if (sym.n_desc == N_DESC_GCED) continue; // GCed, skip
|
||||
const sym_loc = SymbolWithLoc{ .sym_index = @intCast(u32, sym_id), .file = @intCast(u32, object_id) };
|
||||
if (self.symbolIsTemp(sym_loc)) continue; // local temp symbol, skip
|
||||
if (self.globals.contains(self.getSymbolName(sym_loc))) continue; // global symbol is either an export or import, skip
|
||||
if (self.getGlobal(self.getSymbolName(sym_loc)) != null) continue; // global symbol is either an export or import, skip
|
||||
var out_sym = sym;
|
||||
out_sym.n_strx = try self.strtab.insert(gpa, self.getSymbolName(sym_loc));
|
||||
try locals.append(out_sym);
|
||||
@ -5477,7 +5457,7 @@ fn writeSymtab(self: *MachO, lc: *macho.symtab_command) !SymtabCtx {
|
||||
var exports = std.ArrayList(macho.nlist_64).init(gpa);
|
||||
defer exports.deinit();
|
||||
|
||||
for (self.globals.values()) |global| {
|
||||
for (self.globals.items) |global| {
|
||||
const sym = self.getSymbol(global);
|
||||
if (sym.undf()) continue; // import, skip
|
||||
if (sym.n_desc == N_DESC_GCED) continue; // GCed, skip
|
||||
@ -5491,7 +5471,7 @@ fn writeSymtab(self: *MachO, lc: *macho.symtab_command) !SymtabCtx {
|
||||
|
||||
var imports_table = std.AutoHashMap(SymbolWithLoc, u32).init(gpa);
|
||||
|
||||
for (self.globals.values()) |global| {
|
||||
for (self.globals.items) |global| {
|
||||
const sym = self.getSymbol(global);
|
||||
if (sym.n_strx == 0) continue; // no name, skip
|
||||
if (!sym.undf()) continue; // not an import, skip
|
||||
@ -5798,6 +5778,49 @@ pub fn getSymbolName(self: *MachO, sym_with_loc: SymbolWithLoc) []const u8 {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns pointer to the global entry for `name` if one exists.
|
||||
pub fn getGlobalPtr(self: *MachO, name: []const u8) ?*SymbolWithLoc {
|
||||
const global_index = self.resolver.get(name) orelse return null;
|
||||
return &self.globals.items[global_index];
|
||||
}
|
||||
|
||||
/// Returns the global entry for `name` if one exists.
|
||||
pub fn getGlobal(self: *const MachO, name: []const u8) ?SymbolWithLoc {
|
||||
const global_index = self.resolver.get(name) orelse return null;
|
||||
return self.globals.items[global_index];
|
||||
}
|
||||
|
||||
/// Returns the index of the global entry for `name` if one exists.
|
||||
pub fn getGlobalIndex(self: *const MachO, name: []const u8) ?u32 {
|
||||
return self.resolver.get(name);
|
||||
}
|
||||
|
||||
/// Returns global entry at `index`.
|
||||
pub fn getGlobalByIndex(self: *const MachO, index: u32) SymbolWithLoc {
|
||||
assert(index < self.globals.items.len);
|
||||
return self.globals.items[index];
|
||||
}
|
||||
|
||||
const GetOrPutGlobalPtrResult = struct {
|
||||
found_existing: bool,
|
||||
value_ptr: *SymbolWithLoc,
|
||||
};
|
||||
|
||||
/// Return pointer to the global entry for `name` if one exists.
|
||||
/// Puts a new global entry for `name` if one doesn't exist, and
|
||||
/// returns a pointer to it.
|
||||
pub fn getOrPutGlobalPtr(self: *MachO, name: []const u8) !GetOrPutGlobalPtrResult {
|
||||
if (self.getGlobalPtr(name)) |ptr| {
|
||||
return GetOrPutGlobalPtrResult{ .found_existing = true, .value_ptr = ptr };
|
||||
}
|
||||
const gpa = self.base.allocator;
|
||||
const global_index = try self.allocateGlobal();
|
||||
const global_name = try gpa.dupe(u8, name);
|
||||
_ = try self.resolver.put(gpa, global_name, global_index);
|
||||
const ptr = &self.globals.items[global_index];
|
||||
return GetOrPutGlobalPtrResult{ .found_existing = false, .value_ptr = ptr };
|
||||
}
|
||||
|
||||
/// Returns atom if there is an atom referenced by the symbol described by `sym_with_loc` descriptor.
|
||||
/// Returns null on failure.
|
||||
pub fn getAtomForSymbol(self: *MachO, sym_with_loc: SymbolWithLoc) ?*Atom {
|
||||
@ -5834,7 +5857,7 @@ pub fn getTlvPtrAtomForSymbol(self: *MachO, sym_with_loc: SymbolWithLoc) ?*Atom
|
||||
/// Asserts output mode is executable.
|
||||
pub fn getEntryPoint(self: MachO) error{MissingMainEntrypoint}!SymbolWithLoc {
|
||||
const entry_name = self.base.options.entry orelse "_main";
|
||||
const global = self.globals.get(entry_name) orelse {
|
||||
const global = self.getGlobal(entry_name) orelse {
|
||||
log.err("entrypoint '{s}' not found", .{entry_name});
|
||||
return error.MissingMainEntrypoint;
|
||||
};
|
||||
@ -6342,9 +6365,9 @@ fn logSymtab(self: *MachO) void {
|
||||
}
|
||||
|
||||
log.debug("globals table:", .{});
|
||||
for (self.globals.keys()) |name, id| {
|
||||
const value = self.globals.values()[id];
|
||||
log.debug(" {s} => %{d} in object({?d})", .{ name, value.sym_index, value.file });
|
||||
for (self.globals.items) |global| {
|
||||
const name = self.getSymbolName(global);
|
||||
log.debug(" {s} => %{d} in object({?d})", .{ name, global.sym_index, global.file });
|
||||
}
|
||||
|
||||
log.debug("GOT entries:", .{});
|
||||
|
||||
@ -272,7 +272,7 @@ pub fn parseRelocs(self: *Atom, relocs: []align(1) const macho.relocation_info,
|
||||
subtractor = sym_loc;
|
||||
} else {
|
||||
const sym_name = context.macho_file.getSymbolName(sym_loc);
|
||||
subtractor = context.macho_file.globals.get(sym_name).?;
|
||||
subtractor = context.macho_file.getGlobal(sym_name).?;
|
||||
}
|
||||
// Verify that *_SUBTRACTOR is followed by *_UNSIGNED.
|
||||
if (relocs.len <= i + 1) {
|
||||
@ -339,7 +339,7 @@ pub fn parseRelocs(self: *Atom, relocs: []align(1) const macho.relocation_info,
|
||||
break :target sym_loc;
|
||||
} else {
|
||||
const sym_name = context.macho_file.getSymbolName(sym_loc);
|
||||
break :target context.macho_file.globals.get(sym_name).?;
|
||||
break :target context.macho_file.getGlobal(sym_name).?;
|
||||
}
|
||||
};
|
||||
const offset = @intCast(u32, rel.r_address - context.base_offset);
|
||||
@ -579,7 +579,7 @@ pub fn resolveRelocs(self: *Atom, macho_file: *MachO) !void {
|
||||
// If there is no atom for target, we still need to check for special, atom-less
|
||||
// symbols such as `___dso_handle`.
|
||||
const target_name = macho_file.getSymbolName(rel.target);
|
||||
assert(macho_file.globals.contains(target_name));
|
||||
assert(macho_file.getGlobal(target_name) != null);
|
||||
const atomless_sym = macho_file.getSymbol(rel.target);
|
||||
log.debug(" | atomless target '{s}'", .{target_name});
|
||||
break :blk atomless_sym.n_value;
|
||||
|
||||
@ -480,7 +480,7 @@ fn writeSymtab(self: *DebugSymbols, lc: *macho.symtab_command) !void {
|
||||
if (sym.n_desc == MachO.N_DESC_GCED) continue; // GCed, skip
|
||||
const sym_loc = MachO.SymbolWithLoc{ .sym_index = @intCast(u32, sym_id), .file = null };
|
||||
if (self.base.symbolIsTemp(sym_loc)) continue; // local temp symbol, skip
|
||||
if (self.base.globals.contains(self.base.getSymbolName(sym_loc))) continue; // global symbol is either an export or import, skip
|
||||
if (self.base.getGlobal(self.base.getSymbolName(sym_loc)) != null) continue; // global symbol is either an export or import, skip
|
||||
var out_sym = sym;
|
||||
out_sym.n_strx = try self.strtab.insert(gpa, self.base.getSymbolName(sym_loc));
|
||||
try locals.append(out_sym);
|
||||
@ -489,7 +489,7 @@ fn writeSymtab(self: *DebugSymbols, lc: *macho.symtab_command) !void {
|
||||
var exports = std.ArrayList(macho.nlist_64).init(gpa);
|
||||
defer exports.deinit();
|
||||
|
||||
for (self.base.globals.values()) |global| {
|
||||
for (self.base.globals.items) |global| {
|
||||
const sym = self.base.getSymbol(global);
|
||||
if (sym.undf()) continue; // import, skip
|
||||
if (sym.n_desc == MachO.N_DESC_GCED) continue; // GCed, skip
|
||||
|
||||
@ -62,7 +62,7 @@ fn collectRoots(roots: *std.AutoHashMap(*Atom, void), macho_file: *MachO) !void
|
||||
else => |other| {
|
||||
assert(other == .Lib);
|
||||
// Add exports as GC roots
|
||||
for (macho_file.globals.values()) |global| {
|
||||
for (macho_file.globals.items) |global| {
|
||||
const sym = macho_file.getSymbol(global);
|
||||
if (!sym.sect()) continue;
|
||||
const atom = macho_file.getAtomForSymbol(global) orelse {
|
||||
@ -77,7 +77,7 @@ fn collectRoots(roots: *std.AutoHashMap(*Atom, void), macho_file: *MachO) !void
|
||||
}
|
||||
|
||||
// TODO just a temp until we learn how to parse unwind records
|
||||
if (macho_file.globals.get("___gxx_personality_v0")) |global| {
|
||||
if (macho_file.getGlobal("___gxx_personality_v0")) |global| {
|
||||
if (macho_file.getAtomForSymbol(global)) |atom| {
|
||||
_ = try roots.getOrPut(atom);
|
||||
log.debug("adding root", .{});
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user