Merge pull request #17146 from ziglang/elf-linker

elf: upstream zld/ELF functionality, part 2
This commit is contained in:
Jakub Konka 2023-09-14 01:45:23 +02:00 committed by GitHub
commit 8fb4a4efba
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 954 additions and 47 deletions

View File

@ -555,6 +555,7 @@ set(ZIG_STAGE2_SOURCES
"${CMAKE_SOURCE_DIR}/src/arch/wasm/Emit.zig"
"${CMAKE_SOURCE_DIR}/src/arch/wasm/Mir.zig"
"${CMAKE_SOURCE_DIR}/src/arch/x86_64/CodeGen.zig"
"${CMAKE_SOURCE_DIR}/src/arch/x86_64/Disassembler.zig"
"${CMAKE_SOURCE_DIR}/src/arch/x86_64/Emit.zig"
"${CMAKE_SOURCE_DIR}/src/arch/x86_64/Encoding.zig"
"${CMAKE_SOURCE_DIR}/src/arch/x86_64/Mir.zig"
@ -584,6 +585,7 @@ set(ZIG_STAGE2_SOURCES
"${CMAKE_SOURCE_DIR}/src/link/Coff/Object.zig"
"${CMAKE_SOURCE_DIR}/src/link/Coff/lld.zig"
"${CMAKE_SOURCE_DIR}/src/link/Elf.zig"
"${CMAKE_SOURCE_DIR}/src/link/Elf/Archive.zig"
"${CMAKE_SOURCE_DIR}/src/link/Elf/Atom.zig"
"${CMAKE_SOURCE_DIR}/src/link/Elf/LinkerDefined.zig"
"${CMAKE_SOURCE_DIR}/src/link/Elf/Object.zig"

View File

@ -4316,7 +4316,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
if (self.bin_file.cast(link.File.Elf)) |elf_file| {
const sym_index = try elf_file.getOrCreateMetadataForDecl(func.owner_decl);
const sym = elf_file.symbol(sym_index);
_ = try sym.getOrCreateGotEntry(elf_file);
_ = try sym.getOrCreateGotEntry(sym_index, elf_file);
const got_addr = @as(u32, @intCast(sym.gotAddress(elf_file)));
try self.genSetReg(Type.usize, .x30, .{ .memory = got_addr });
} else if (self.bin_file.cast(link.File.MachO)) |macho_file| {

View File

@ -4296,7 +4296,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
if (self.bin_file.cast(link.File.Elf)) |elf_file| {
const sym_index = try elf_file.getOrCreateMetadataForDecl(func.owner_decl);
const sym = elf_file.symbol(sym_index);
_ = try sym.getOrCreateGotEntry(elf_file);
_ = try sym.getOrCreateGotEntry(sym_index, elf_file);
const got_addr = @as(u32, @intCast(sym.gotAddress(elf_file)));
try self.genSetReg(Type.usize, .lr, .{ .memory = got_addr });
} else if (self.bin_file.cast(link.File.MachO)) |_| {

View File

@ -1749,7 +1749,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
.func => |func| {
const sym_index = try elf_file.getOrCreateMetadataForDecl(func.owner_decl);
const sym = elf_file.symbol(sym_index);
_ = try sym.getOrCreateGotEntry(elf_file);
_ = try sym.getOrCreateGotEntry(sym_index, elf_file);
const got_addr = @as(u32, @intCast(sym.gotAddress(elf_file)));
try self.genSetReg(Type.usize, .ra, .{ .memory = got_addr });
_ = try self.addInst(.{

View File

@ -1351,7 +1351,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
const got_addr = if (self.bin_file.cast(link.File.Elf)) |elf_file| blk: {
const sym_index = try elf_file.getOrCreateMetadataForDecl(func.owner_decl);
const sym = elf_file.symbol(sym_index);
_ = try sym.getOrCreateGotEntry(elf_file);
_ = try sym.getOrCreateGotEntry(sym_index, elf_file);
break :blk @as(u32, @intCast(sym.gotAddress(elf_file)));
} else unreachable;

View File

@ -8157,7 +8157,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
const sym_index = try elf_file.getOrCreateMetadataForDecl(owner_decl);
const sym = elf_file.symbol(sym_index);
sym.flags.needs_got = true;
_ = try sym.getOrCreateGotEntry(elf_file);
_ = try sym.getOrCreateGotEntry(sym_index, elf_file);
const got_addr = sym.gotAddress(elf_file);
try self.asmMemory(.{ ._, .call }, Memory.sib(.qword, .{
.base = .{ .reg = .ds },
@ -10236,7 +10236,7 @@ fn genLazySymbolRef(
return self.fail("{s} creating lazy symbol", .{@errorName(err)});
const sym = elf_file.symbol(sym_index);
sym.flags.needs_got = true;
_ = try sym.getOrCreateGotEntry(elf_file);
_ = try sym.getOrCreateGotEntry(sym_index, elf_file);
const got_addr = sym.gotAddress(elf_file);
const got_mem =
Memory.sib(.qword, .{ .base = .{ .reg = .ds }, .disp = @intCast(got_addr) });

View File

@ -0,0 +1,475 @@
const Disassembler = @This();
const std = @import("std");
const assert = std.debug.assert;
const math = std.math;
const bits = @import("bits.zig");
const encoder = @import("encoder.zig");
const Encoding = @import("Encoding.zig");
const Immediate = bits.Immediate;
const Instruction = encoder.Instruction;
const LegacyPrefixes = encoder.LegacyPrefixes;
const Memory = bits.Memory;
const Register = bits.Register;
const Rex = encoder.Rex;
pub const Error = error{
EndOfStream,
LegacyPrefixAfterRex,
UnknownOpcode,
Overflow,
Todo,
};
code: []const u8,
pos: usize = 0,
pub fn init(code: []const u8) Disassembler {
return .{ .code = code };
}
pub fn next(dis: *Disassembler) Error!?Instruction {
const prefixes = dis.parsePrefixes() catch |err| switch (err) {
error.EndOfStream => return null,
else => |e| return e,
};
const enc = try dis.parseEncoding(prefixes) orelse return error.UnknownOpcode;
switch (enc.data.op_en) {
.np => return inst(enc, .{}),
.d, .i => {
const imm = try dis.parseImm(enc.data.ops[0]);
return inst(enc, .{
.op1 = .{ .imm = imm },
});
},
.zi => {
const imm = try dis.parseImm(enc.data.ops[1]);
return inst(enc, .{
.op1 = .{ .reg = Register.rax.toBitSize(enc.data.ops[0].regBitSize()) },
.op2 = .{ .imm = imm },
});
},
.o, .oi => {
const reg_low_enc = @as(u3, @truncate(dis.code[dis.pos - 1]));
const op2: Instruction.Operand = if (enc.data.op_en == .oi) .{
.imm = try dis.parseImm(enc.data.ops[1]),
} else .none;
return inst(enc, .{
.op1 = .{ .reg = parseGpRegister(reg_low_enc, prefixes.rex.b, prefixes.rex, enc.data.ops[0].regBitSize()) },
.op2 = op2,
});
},
.m, .mi, .m1, .mc => {
const modrm = try dis.parseModRmByte();
const act_enc = Encoding.findByOpcode(enc.opcode(), .{
.legacy = prefixes.legacy,
.rex = prefixes.rex,
}, modrm.op1) orelse return error.UnknownOpcode;
const sib = if (modrm.sib()) try dis.parseSibByte() else null;
if (modrm.direct()) {
const op2: Instruction.Operand = switch (act_enc.data.op_en) {
.mi => .{ .imm = try dis.parseImm(act_enc.data.ops[1]) },
.m1 => .{ .imm = Immediate.u(1) },
.mc => .{ .reg = .cl },
.m => .none,
else => unreachable,
};
return inst(act_enc, .{
.op1 = .{ .reg = parseGpRegister(modrm.op2, prefixes.rex.b, prefixes.rex, act_enc.data.ops[0].regBitSize()) },
.op2 = op2,
});
}
const disp = try dis.parseDisplacement(modrm, sib);
const op2: Instruction.Operand = switch (act_enc.data.op_en) {
.mi => .{ .imm = try dis.parseImm(act_enc.data.ops[1]) },
.m1 => .{ .imm = Immediate.u(1) },
.mc => .{ .reg = .cl },
.m => .none,
else => unreachable,
};
if (modrm.rip()) {
return inst(act_enc, .{
.op1 = .{ .mem = Memory.rip(Memory.PtrSize.fromBitSize(act_enc.data.ops[0].memBitSize()), disp) },
.op2 = op2,
});
}
const scale_index = if (sib) |info| info.scaleIndex(prefixes.rex) else null;
const base = if (sib) |info|
info.baseReg(modrm, prefixes)
else
parseGpRegister(modrm.op2, prefixes.rex.b, prefixes.rex, 64);
return inst(act_enc, .{
.op1 = .{ .mem = Memory.sib(Memory.PtrSize.fromBitSize(act_enc.data.ops[0].memBitSize()), .{
.base = if (base) |base_reg| .{ .reg = base_reg } else .none,
.scale_index = scale_index,
.disp = disp,
}) },
.op2 = op2,
});
},
.fd => {
const seg = segmentRegister(prefixes.legacy);
const offset = try dis.parseOffset();
return inst(enc, .{
.op1 = .{ .reg = Register.rax.toBitSize(enc.data.ops[0].regBitSize()) },
.op2 = .{ .mem = Memory.moffs(seg, offset) },
});
},
.td => {
const seg = segmentRegister(prefixes.legacy);
const offset = try dis.parseOffset();
return inst(enc, .{
.op1 = .{ .mem = Memory.moffs(seg, offset) },
.op2 = .{ .reg = Register.rax.toBitSize(enc.data.ops[1].regBitSize()) },
});
},
.mr, .mri, .mrc => {
const modrm = try dis.parseModRmByte();
const sib = if (modrm.sib()) try dis.parseSibByte() else null;
const src_bit_size = enc.data.ops[1].regBitSize();
if (modrm.direct()) {
return inst(enc, .{
.op1 = .{ .reg = parseGpRegister(modrm.op2, prefixes.rex.b, prefixes.rex, enc.data.ops[0].regBitSize()) },
.op2 = .{ .reg = parseGpRegister(modrm.op1, prefixes.rex.x, prefixes.rex, src_bit_size) },
});
}
const dst_bit_size = enc.data.ops[0].memBitSize();
const disp = try dis.parseDisplacement(modrm, sib);
const op3: Instruction.Operand = switch (enc.data.op_en) {
.mri => .{ .imm = try dis.parseImm(enc.data.ops[2]) },
.mrc => .{ .reg = .cl },
.mr => .none,
else => unreachable,
};
if (modrm.rip()) {
return inst(enc, .{
.op1 = .{ .mem = Memory.rip(Memory.PtrSize.fromBitSize(dst_bit_size), disp) },
.op2 = .{ .reg = parseGpRegister(modrm.op1, prefixes.rex.r, prefixes.rex, src_bit_size) },
.op3 = op3,
});
}
const scale_index = if (sib) |info| info.scaleIndex(prefixes.rex) else null;
const base = if (sib) |info|
info.baseReg(modrm, prefixes)
else
parseGpRegister(modrm.op2, prefixes.rex.b, prefixes.rex, 64);
return inst(enc, .{
.op1 = .{ .mem = Memory.sib(Memory.PtrSize.fromBitSize(dst_bit_size), .{
.base = if (base) |base_reg| .{ .reg = base_reg } else .none,
.scale_index = scale_index,
.disp = disp,
}) },
.op2 = .{ .reg = parseGpRegister(modrm.op1, prefixes.rex.r, prefixes.rex, src_bit_size) },
.op3 = op3,
});
},
.rm, .rmi => {
const modrm = try dis.parseModRmByte();
const sib = if (modrm.sib()) try dis.parseSibByte() else null;
const dst_bit_size = enc.data.ops[0].regBitSize();
if (modrm.direct()) {
const op3: Instruction.Operand = switch (enc.data.op_en) {
.rm => .none,
.rmi => .{ .imm = try dis.parseImm(enc.data.ops[2]) },
else => unreachable,
};
return inst(enc, .{
.op1 = .{ .reg = parseGpRegister(modrm.op1, prefixes.rex.x, prefixes.rex, dst_bit_size) },
.op2 = .{ .reg = parseGpRegister(modrm.op2, prefixes.rex.b, prefixes.rex, enc.data.ops[1].regBitSize()) },
.op3 = op3,
});
}
const src_bit_size = if (enc.data.ops[1] == .m) dst_bit_size else enc.data.ops[1].memBitSize();
const disp = try dis.parseDisplacement(modrm, sib);
const op3: Instruction.Operand = switch (enc.data.op_en) {
.rmi => .{ .imm = try dis.parseImm(enc.data.ops[2]) },
.rm => .none,
else => unreachable,
};
if (modrm.rip()) {
return inst(enc, .{
.op1 = .{ .reg = parseGpRegister(modrm.op1, prefixes.rex.r, prefixes.rex, dst_bit_size) },
.op2 = .{ .mem = Memory.rip(Memory.PtrSize.fromBitSize(src_bit_size), disp) },
.op3 = op3,
});
}
const scale_index = if (sib) |info| info.scaleIndex(prefixes.rex) else null;
const base = if (sib) |info|
info.baseReg(modrm, prefixes)
else
parseGpRegister(modrm.op2, prefixes.rex.b, prefixes.rex, 64);
return inst(enc, .{
.op1 = .{ .reg = parseGpRegister(modrm.op1, prefixes.rex.r, prefixes.rex, dst_bit_size) },
.op2 = .{ .mem = Memory.sib(Memory.PtrSize.fromBitSize(src_bit_size), .{
.base = if (base) |base_reg| .{ .reg = base_reg } else .none,
.scale_index = scale_index,
.disp = disp,
}) },
.op3 = op3,
});
},
.rm0, .vmi, .rvm, .rvmr, .rvmi, .mvr => unreachable, // TODO
}
}
fn inst(encoding: Encoding, args: struct {
prefix: Instruction.Prefix = .none,
op1: Instruction.Operand = .none,
op2: Instruction.Operand = .none,
op3: Instruction.Operand = .none,
op4: Instruction.Operand = .none,
}) Instruction {
var i = Instruction{ .encoding = encoding, .prefix = args.prefix, .ops = .{
args.op1,
args.op2,
args.op3,
args.op4,
} };
return i;
}
const Prefixes = struct {
legacy: LegacyPrefixes = .{},
rex: Rex = .{},
// TODO add support for VEX prefix
};
fn parsePrefixes(dis: *Disassembler) !Prefixes {
const rex_prefix_mask: u4 = 0b0100;
var stream = std.io.fixedBufferStream(dis.code[dis.pos..]);
const reader = stream.reader();
var res: Prefixes = .{};
while (true) {
const next_byte = try reader.readByte();
dis.pos += 1;
switch (next_byte) {
0xf0, 0xf2, 0xf3, 0x2e, 0x36, 0x26, 0x64, 0x65, 0x3e, 0x66, 0x67 => {
// Legacy prefix
if (res.rex.present) return error.LegacyPrefixAfterRex;
switch (next_byte) {
0xf0 => res.legacy.prefix_f0 = true,
0xf2 => res.legacy.prefix_f2 = true,
0xf3 => res.legacy.prefix_f3 = true,
0x2e => res.legacy.prefix_2e = true,
0x36 => res.legacy.prefix_36 = true,
0x26 => res.legacy.prefix_26 = true,
0x64 => res.legacy.prefix_64 = true,
0x65 => res.legacy.prefix_65 = true,
0x3e => res.legacy.prefix_3e = true,
0x66 => res.legacy.prefix_66 = true,
0x67 => res.legacy.prefix_67 = true,
else => unreachable,
}
},
else => {
if (rex_prefix_mask == @as(u4, @truncate(next_byte >> 4))) {
// REX prefix
res.rex.w = next_byte & 0b1000 != 0;
res.rex.r = next_byte & 0b100 != 0;
res.rex.x = next_byte & 0b10 != 0;
res.rex.b = next_byte & 0b1 != 0;
res.rex.present = true;
continue;
}
// TODO VEX prefix
dis.pos -= 1;
break;
},
}
}
return res;
}
fn parseEncoding(dis: *Disassembler, prefixes: Prefixes) !?Encoding {
const o_mask: u8 = 0b1111_1000;
var opcode: [3]u8 = .{ 0, 0, 0 };
var stream = std.io.fixedBufferStream(dis.code[dis.pos..]);
const reader = stream.reader();
comptime var opc_count = 0;
inline while (opc_count < 3) : (opc_count += 1) {
const byte = try reader.readByte();
opcode[opc_count] = byte;
dis.pos += 1;
if (byte == 0x0f) {
// Multi-byte opcode
} else if (opc_count > 0) {
// Multi-byte opcode
if (Encoding.findByOpcode(opcode[0 .. opc_count + 1], .{
.legacy = prefixes.legacy,
.rex = prefixes.rex,
}, null)) |mnemonic| {
return mnemonic;
}
} else {
// Single-byte opcode
if (Encoding.findByOpcode(opcode[0..1], .{
.legacy = prefixes.legacy,
.rex = prefixes.rex,
}, null)) |mnemonic| {
return mnemonic;
} else {
// Try O* encoding
return Encoding.findByOpcode(&.{opcode[0] & o_mask}, .{
.legacy = prefixes.legacy,
.rex = prefixes.rex,
}, null);
}
}
}
return null;
}
fn parseGpRegister(low_enc: u3, is_extended: bool, rex: Rex, bit_size: u64) Register {
const reg_id: u4 = @as(u4, @intCast(@intFromBool(is_extended))) << 3 | low_enc;
const reg = @as(Register, @enumFromInt(reg_id)).toBitSize(bit_size);
return switch (reg) {
.spl => if (rex.present or rex.isSet()) .spl else .ah,
.dil => if (rex.present or rex.isSet()) .dil else .bh,
.bpl => if (rex.present or rex.isSet()) .bpl else .ch,
.sil => if (rex.present or rex.isSet()) .sil else .dh,
else => reg,
};
}
fn parseImm(dis: *Disassembler, kind: Encoding.Op) !Immediate {
var stream = std.io.fixedBufferStream(dis.code[dis.pos..]);
var creader = std.io.countingReader(stream.reader());
const reader = creader.reader();
const imm = switch (kind) {
.imm8s, .rel8 => Immediate.s(try reader.readInt(i8, .Little)),
.imm16s, .rel16 => Immediate.s(try reader.readInt(i16, .Little)),
.imm32s, .rel32 => Immediate.s(try reader.readInt(i32, .Little)),
.imm8 => Immediate.u(try reader.readInt(u8, .Little)),
.imm16 => Immediate.u(try reader.readInt(u16, .Little)),
.imm32 => Immediate.u(try reader.readInt(u32, .Little)),
.imm64 => Immediate.u(try reader.readInt(u64, .Little)),
else => unreachable,
};
dis.pos += std.math.cast(usize, creader.bytes_read) orelse return error.Overflow;
return imm;
}
fn parseOffset(dis: *Disassembler) !u64 {
var stream = std.io.fixedBufferStream(dis.code[dis.pos..]);
const reader = stream.reader();
const offset = try reader.readInt(u64, .Little);
dis.pos += 8;
return offset;
}
const ModRm = packed struct {
mod: u2,
op1: u3,
op2: u3,
inline fn direct(self: ModRm) bool {
return self.mod == 0b11;
}
inline fn rip(self: ModRm) bool {
return self.mod == 0 and self.op2 == 0b101;
}
inline fn sib(self: ModRm) bool {
return !self.direct() and self.op2 == 0b100;
}
};
fn parseModRmByte(dis: *Disassembler) !ModRm {
if (dis.code[dis.pos..].len == 0) return error.EndOfStream;
const modrm_byte = dis.code[dis.pos];
dis.pos += 1;
const mod: u2 = @as(u2, @truncate(modrm_byte >> 6));
const op1: u3 = @as(u3, @truncate(modrm_byte >> 3));
const op2: u3 = @as(u3, @truncate(modrm_byte));
return ModRm{ .mod = mod, .op1 = op1, .op2 = op2 };
}
fn segmentRegister(prefixes: LegacyPrefixes) Register {
if (prefixes.prefix_2e) return .cs;
if (prefixes.prefix_36) return .ss;
if (prefixes.prefix_26) return .es;
if (prefixes.prefix_64) return .fs;
if (prefixes.prefix_65) return .gs;
return .ds;
}
const Sib = packed struct {
scale: u2,
index: u3,
base: u3,
fn scaleIndex(self: Sib, rex: Rex) ?Memory.ScaleIndex {
if (self.index == 0b100 and !rex.x) return null;
return .{
.scale = @as(u4, 1) << self.scale,
.index = parseGpRegister(self.index, rex.x, rex, 64),
};
}
fn baseReg(self: Sib, modrm: ModRm, prefixes: Prefixes) ?Register {
if (self.base == 0b101 and modrm.mod == 0) {
if (self.scaleIndex(prefixes.rex)) |_| return null;
return segmentRegister(prefixes.legacy);
}
return parseGpRegister(self.base, prefixes.rex.b, prefixes.rex, 64);
}
};
fn parseSibByte(dis: *Disassembler) !Sib {
if (dis.code[dis.pos..].len == 0) return error.EndOfStream;
const sib_byte = dis.code[dis.pos];
dis.pos += 1;
const scale: u2 = @as(u2, @truncate(sib_byte >> 6));
const index: u3 = @as(u3, @truncate(sib_byte >> 3));
const base: u3 = @as(u3, @truncate(sib_byte));
return Sib{ .scale = scale, .index = index, .base = base };
}
fn parseDisplacement(dis: *Disassembler, modrm: ModRm, sib: ?Sib) !i32 {
var stream = std.io.fixedBufferStream(dis.code[dis.pos..]);
var creader = std.io.countingReader(stream.reader());
const reader = creader.reader();
const disp = disp: {
if (sib) |info| {
if (info.base == 0b101 and modrm.mod == 0) {
break :disp try reader.readInt(i32, .Little);
}
}
if (modrm.rip()) {
break :disp try reader.readInt(i32, .Little);
}
break :disp switch (modrm.mod) {
0b00 => 0,
0b01 => try reader.readInt(i8, .Little),
0b10 => try reader.readInt(i32, .Little),
0b11 => unreachable,
};
};
dis.pos += std.math.cast(usize, creader.bytes_read) orelse return error.Overflow;
return disp;
}

View File

@ -861,7 +861,7 @@ fn genDeclRef(
const sym_index = try elf_file.getOrCreateMetadataForDecl(decl_index);
const sym = elf_file.symbol(sym_index);
sym.flags.needs_got = true;
_ = try sym.getOrCreateGotEntry(elf_file);
_ = try sym.getOrCreateGotEntry(sym_index, elf_file);
return GenResult.mcv(.{ .memory = sym.gotAddress(elf_file) });
} else if (bin_file.cast(link.File.MachO)) |macho_file| {
const atom_index = try macho_file.getOrCreateAtomForDecl(decl_index);

View File

@ -40,6 +40,8 @@ phdr_got_index: ?u16 = null,
phdr_load_ro_index: ?u16 = null,
/// The index into the program headers of a PT_LOAD program header with Write flag
phdr_load_rw_index: ?u16 = null,
/// The index into the program headers of a PT_LOAD program header with zerofill data.
phdr_load_zerofill_index: ?u16 = null,
entry_addr: ?u64 = null,
page_size: u32,
@ -56,6 +58,7 @@ got: GotSection = .{},
text_section_index: ?u16 = null,
rodata_section_index: ?u16 = null,
data_section_index: ?u16 = null,
bss_section_index: ?u16 = null,
eh_frame_section_index: ?u16 = null,
eh_frame_hdr_section_index: ?u16 = null,
dynamic_section_index: ?u16 = null,
@ -532,6 +535,26 @@ pub fn populateMissingMetadata(self: *Elf) !void {
self.phdr_table_dirty = true;
}
if (self.phdr_load_zerofill_index == null) {
self.phdr_load_zerofill_index = @as(u16, @intCast(self.phdrs.items.len));
const p_align = if (self.base.options.target.os.tag == .linux) self.page_size else @as(u16, ptr_size);
const off = self.phdrs.items[self.phdr_load_rw_index.?].p_offset;
log.debug("found PT_LOAD zerofill free space 0x{x} to 0x{x}", .{ off, off });
// TODO Same as for GOT
const addr: u32 = if (self.base.options.target.ptrBitWidth() >= 32) 0x14000000 else 0xf000;
try self.phdrs.append(gpa, .{
.p_type = elf.PT_LOAD,
.p_offset = off,
.p_filesz = 0,
.p_vaddr = addr,
.p_paddr = addr,
.p_memsz = 0,
.p_align = p_align,
.p_flags = elf.PF_R | elf.PF_W,
});
self.phdr_table_dirty = true;
}
if (self.shstrtab_section_index == null) {
self.shstrtab_section_index = @as(u16, @intCast(self.shdrs.items.len));
assert(self.shstrtab.buffer.items.len == 0);
@ -655,6 +678,26 @@ pub fn populateMissingMetadata(self: *Elf) !void {
self.shdr_table_dirty = true;
}
if (self.bss_section_index == null) {
self.bss_section_index = @as(u16, @intCast(self.shdrs.items.len));
const phdr = &self.phdrs.items[self.phdr_load_zerofill_index.?];
try self.shdrs.append(gpa, .{
.sh_name = try self.shstrtab.insert(gpa, ".bss"),
.sh_type = elf.SHT_NOBITS,
.sh_flags = elf.SHF_WRITE | elf.SHF_ALLOC,
.sh_addr = phdr.p_vaddr,
.sh_offset = phdr.p_offset,
.sh_size = phdr.p_filesz,
.sh_link = 0,
.sh_info = 0,
.sh_addralign = @as(u16, ptr_size),
.sh_entsize = 0,
});
try self.phdr_to_shdr_table.putNoClobber(gpa, self.bss_section_index.?, self.phdr_load_zerofill_index.?);
try self.last_atom_and_free_list_table.putNoClobber(gpa, self.bss_section_index.?, .{});
self.shdr_table_dirty = true;
}
if (self.symtab_section_index == null) {
self.symtab_section_index = @as(u16, @intCast(self.shdrs.items.len));
const min_align: u16 = if (small_ptr) @alignOf(elf.Elf32_Sym) else @alignOf(elf.Elf64_Sym);
@ -868,8 +911,9 @@ pub fn growAllocSection(self: *Elf, shdr_index: u16, needed_size: u64) !void {
const shdr = &self.shdrs.items[shdr_index];
const phdr_index = self.phdr_to_shdr_table.get(shdr_index).?;
const phdr = &self.phdrs.items[phdr_index];
const is_zerofill = shdr.sh_type == elf.SHT_NOBITS;
if (needed_size > self.allocatedSize(shdr.sh_offset)) {
if (needed_size > self.allocatedSize(shdr.sh_offset) and !is_zerofill) {
// Must move the entire section.
const new_offset = self.findFreeSpace(needed_size, self.page_size);
const existing_size = if (self.last_atom_and_free_list_table.get(shdr_index)) |meta| blk: {
@ -893,7 +937,10 @@ pub fn growAllocSection(self: *Elf, shdr_index: u16, needed_size: u64) !void {
shdr.sh_size = needed_size;
phdr.p_memsz = needed_size;
phdr.p_filesz = needed_size;
if (!is_zerofill) {
phdr.p_filesz = needed_size;
}
self.markDirty(shdr_index, phdr_index);
}
@ -1005,13 +1052,6 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
const directory = self.base.options.emit.?.directory; // Just an alias to make it shorter to type.
const full_out_path = try directory.join(arena, &[_][]const u8{self.base.options.emit.?.sub_path});
const compiler_rt_path: ?[]const u8 = blk: {
if (comp.compiler_rt_lib) |x| break :blk x.full_object_path;
if (comp.compiler_rt_obj) |x| break :blk x.full_object_path;
break :blk null;
};
_ = compiler_rt_path;
// Here we will parse input positional and library files (if referenced).
// This will roughly match in any linker backend we support.
var positionals = std.ArrayList(Compilation.LinkObject).init(arena);
@ -1037,6 +1077,15 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
try positionals.append(.{ .path = key.status.success.object_path });
}
const compiler_rt_path: ?[]const u8 = blk: {
if (comp.compiler_rt_lib) |x| break :blk x.full_object_path;
if (comp.compiler_rt_obj) |x| break :blk x.full_object_path;
break :blk null;
};
if (compiler_rt_path) |path| {
try positionals.append(.{ .path = path });
}
for (positionals.items) |obj| {
const in_file = try std.fs.cwd().openFile(obj.path, .{});
defer in_file.close();
@ -1093,7 +1142,7 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
// input Object files.
// Any qualifing unresolved symbol will be upgraded to an absolute, weak
// symbol for potential resolution at load-time.
self.resolveSymbols();
try self.resolveSymbols();
self.markImportsExports();
self.claimUnresolved();
@ -1356,6 +1405,7 @@ const ParseError = error{
EndOfStream,
FileSystem,
NotSupported,
InvalidCharacter,
} || std.os.SeekError || std.fs.File.OpenError || std.fs.File.ReadError;
fn parsePositional(
@ -1367,10 +1417,32 @@ fn parsePositional(
) ParseError!void {
const tracy = trace(@src());
defer tracy.end();
_ = must_link;
if (Object.isObject(in_file)) {
try self.parseObject(in_file, path, ctx);
} else {
try self.parseLibrary(in_file, path, .{
.path = null,
.needed = false,
.weak = false,
}, must_link, ctx);
}
}
fn parseLibrary(
self: *Elf,
in_file: std.fs.File,
path: []const u8,
lib: link.SystemLib,
must_link: bool,
ctx: *ParseErrorCtx,
) ParseError!void {
const tracy = trace(@src());
defer tracy.end();
_ = lib;
if (Archive.isArchive(in_file)) {
try self.parseArchive(in_file, path, must_link, ctx);
} else return error.UnknownFileType;
}
@ -1395,15 +1467,109 @@ fn parseObject(self: *Elf, in_file: std.fs.File, path: []const u8, ctx: *ParseEr
if (ctx.detected_cpu_arch != self.base.options.target.cpu.arch) return error.InvalidCpuArch;
}
fn resolveSymbols(self: *Elf) void {
if (self.zig_module_index) |index| {
const zig_module = self.file(index).?.zig_module;
zig_module.resolveSymbols(self);
fn parseArchive(
self: *Elf,
in_file: std.fs.File,
path: []const u8,
must_link: bool,
ctx: *ParseErrorCtx,
) ParseError!void {
const tracy = trace(@src());
defer tracy.end();
const gpa = self.base.allocator;
const data = try in_file.readToEndAlloc(gpa, std.math.maxInt(u32));
var archive = Archive{ .path = path, .data = data };
defer archive.deinit(gpa);
try archive.parse(self);
for (archive.objects.items) |extracted| {
const index = @as(File.Index, @intCast(try self.files.addOne(gpa)));
self.files.set(index, .{ .object = extracted });
const object = &self.files.items(.data)[index].object;
object.index = index;
object.alive = must_link;
try object.parse(self);
try self.objects.append(gpa, index);
ctx.detected_cpu_arch = object.header.?.e_machine.toTargetCpuArch().?;
if (ctx.detected_cpu_arch != self.base.options.target.cpu.arch) return error.InvalidCpuArch;
}
}
/// When resolving symbols, we approach the problem similarly to `mold`.
/// 1. Resolve symbols across all objects (including those preemptively extracted archives).
/// 2. Resolve symbols across all shared objects.
/// 3. Mark live objects (see `Elf.markLive`)
/// 4. Reset state of all resolved globals since we will redo this bit on the pruned set.
/// 5. Remove references to dead objects/shared objects
/// 6. Re-run symbol resolution on pruned objects and shared objects sets.
fn resolveSymbols(self: *Elf) error{Overflow}!void {
// Resolve symbols in the ZigModule. For now, we assume that it's always live.
if (self.zig_module_index) |index| self.file(index).?.resolveSymbols(self);
// Resolve symbols on the set of all objects and shared objects (even if some are unneeded).
for (self.objects.items) |index| self.file(index).?.resolveSymbols(self);
// Mark live objects.
self.markLive();
// Reset state of all globals after marking live objects.
if (self.zig_module_index) |index| self.file(index).?.resetGlobals(self);
for (self.objects.items) |index| self.file(index).?.resetGlobals(self);
// Prune dead objects and shared objects.
var i: usize = 0;
while (i < self.objects.items.len) {
const index = self.objects.items[i];
if (!self.file(index).?.isAlive()) {
_ = self.objects.orderedRemove(i);
} else i += 1;
}
// Dedup comdat groups.
for (self.objects.items) |index| {
const object = self.file(index).?.object;
for (object.comdat_groups.items) |cg_index| {
const cg = self.comdatGroup(cg_index);
const cg_owner = self.comdatGroupOwner(cg.owner);
const owner_file_index = if (self.file(cg_owner.file)) |file_ptr|
file_ptr.object.index
else
std.math.maxInt(File.Index);
cg_owner.file = @min(owner_file_index, index);
}
}
for (self.objects.items) |index| {
const object = self.file(index).?.object;
object.resolveSymbols(self);
for (object.comdat_groups.items) |cg_index| {
const cg = self.comdatGroup(cg_index);
const cg_owner = self.comdatGroupOwner(cg.owner);
if (cg_owner.file != index) {
for (try object.comdatGroupMembers(cg.shndx)) |shndx| {
const atom_index = object.atoms.items[shndx];
if (self.atom(atom_index)) |atom_ptr| {
atom_ptr.alive = false;
// atom_ptr.markFdesDead(self);
}
}
}
}
}
// Re-resolve the symbols.
if (self.zig_module_index) |index| self.file(index).?.resolveSymbols(self);
for (self.objects.items) |index| self.file(index).?.resolveSymbols(self);
}
/// Traverses all objects and shared objects marking any object referenced by
/// a live object/shared object as alive itself.
/// This routine will prune unneeded objects extracted from archives and
/// unneeded shared objects.
fn markLive(self: *Elf) void {
for (self.objects.items) |index| {
const file_ptr = self.file(index).?;
if (file_ptr.isAlive()) file_ptr.markLive(self);
}
}
@ -1477,11 +1643,11 @@ fn scanRelocs(self: *Elf) !void {
try self.reportUndefined(&undefs);
for (self.symbols.items) |*sym| {
for (self.symbols.items, 0..) |*sym, sym_index| {
if (sym.flags.needs_got) {
log.debug("'{s}' needs GOT", .{sym.name(self)});
// TODO how can we tell we need to write it again, aka the entry is dirty?
const gop = try sym.getOrCreateGotEntry(self);
const gop = try sym.getOrCreateGotEntry(@intCast(sym_index), self);
try self.got.writeEntry(self, gop.index);
}
}
@ -1500,7 +1666,7 @@ fn allocateObjects(self: *Elf) !void {
const local = self.symbol(local_index);
const atom_ptr = local.atom(self) orelse continue;
if (!atom_ptr.alive) continue;
local.value = atom_ptr.value;
local.value += atom_ptr.value;
}
for (object.globals()) |global_index| {
@ -1508,7 +1674,7 @@ fn allocateObjects(self: *Elf) !void {
const atom_ptr = global.atom(self) orelse continue;
if (!atom_ptr.alive) continue;
if (global.file_index == index) {
global.value = atom_ptr.value;
global.value += atom_ptr.value;
}
}
}
@ -1524,7 +1690,11 @@ fn writeObjects(self: *Elf) !void {
if (!atom_ptr.alive) continue;
const shdr = &self.shdrs.items[atom_ptr.output_section_index];
if (shdr.sh_type == elf.SHT_NOBITS) continue;
if (shdr.sh_flags & elf.SHF_ALLOC == 0) continue; // TODO we don't yet know how to handle non-alloc sections
const file_offset = shdr.sh_offset + atom_ptr.value - shdr.sh_addr;
log.debug("writing atom({d}) at 0x{x}", .{ atom_ptr.atom_index, file_offset });
const code = try atom_ptr.codeInObjectUncompressAlloc(self);
defer gpa.free(code);
@ -2529,7 +2699,7 @@ fn updateDeclCode(
esym.st_value = atom_ptr.value;
sym.flags.needs_got = true;
const gop = try sym.getOrCreateGotEntry(self);
const gop = try sym.getOrCreateGotEntry(sym_index, self);
try self.got.writeEntry(self, gop.index);
}
@ -2764,7 +2934,7 @@ fn updateLazySymbol(self: *Elf, sym: link.File.LazySymbol, symbol_index: Symbol.
local_esym.st_value = atom_ptr.value;
local_sym.flags.needs_got = true;
const gop = try local_sym.getOrCreateGotEntry(self);
const gop = try local_sym.getOrCreateGotEntry(symbol_index, self);
try self.got.writeEntry(self, gop.index);
const section_offset = atom_ptr.value - self.phdrs.items[phdr_index].p_vaddr;
@ -3635,7 +3805,7 @@ pub fn addSymbol(self: *Elf) !Symbol.Index {
break :blk index;
}
};
self.symbols.items[index] = .{ .index = index };
self.symbols.items[index] = .{};
return index;
}
@ -4012,6 +4182,7 @@ const synthetic_sections = @import("Elf/synthetic_sections.zig");
const Air = @import("../Air.zig");
const Allocator = std.mem.Allocator;
const Archive = @import("Elf/Archive.zig");
pub const Atom = @import("Elf/Atom.zig");
const Cache = std.Build.Cache;
const Compilation = @import("../Compilation.zig");

153
src/link/Elf/Archive.zig Normal file
View File

@ -0,0 +1,153 @@
path: []const u8,
data: []const u8,
objects: std.ArrayListUnmanaged(Object) = .{},
strtab: []const u8 = &[0]u8{},
// Archive files start with the ARMAG identifying string. Then follows a
// `struct ar_hdr', and as many bytes of member file data as its `ar_size'
// member indicates, for each member file.
/// String that begins an archive file.
pub const ARMAG: *const [SARMAG:0]u8 = "!<arch>\n";
/// Size of that string.
pub const SARMAG: u4 = 8;
/// String in ar_fmag at the end of each header.
const ARFMAG: *const [2:0]u8 = "`\n";
const SYM64NAME: *const [7:0]u8 = "/SYM64/";
const ar_hdr = extern struct {
/// Member file name, sometimes / terminated.
ar_name: [16]u8,
/// File date, decimal seconds since Epoch.
ar_date: [12]u8,
/// User ID, in ASCII format.
ar_uid: [6]u8,
/// Group ID, in ASCII format.
ar_gid: [6]u8,
/// File mode, in ASCII octal.
ar_mode: [8]u8,
/// File size, in ASCII decimal.
ar_size: [10]u8,
/// Always contains ARFMAG.
ar_fmag: [2]u8,
fn date(self: ar_hdr) !u64 {
const value = getValue(&self.ar_date);
return std.fmt.parseInt(u64, value, 10);
}
fn size(self: ar_hdr) !u32 {
const value = getValue(&self.ar_size);
return std.fmt.parseInt(u32, value, 10);
}
fn getValue(raw: []const u8) []const u8 {
return mem.trimRight(u8, raw, &[_]u8{@as(u8, 0x20)});
}
fn isStrtab(self: ar_hdr) bool {
return mem.eql(u8, getValue(&self.ar_name), "//");
}
fn isSymtab(self: ar_hdr) bool {
return mem.eql(u8, getValue(&self.ar_name), "/");
}
};
pub fn isArchive(file: std.fs.File) bool {
const reader = file.reader();
const magic = reader.readBytesNoEof(Archive.SARMAG) catch return false;
defer file.seekTo(0) catch {};
if (!mem.eql(u8, &magic, ARMAG)) return false;
return true;
}
pub fn deinit(self: *Archive, allocator: Allocator) void {
allocator.free(self.data);
self.objects.deinit(allocator);
}
pub fn parse(self: *Archive, elf_file: *Elf) !void {
const gpa = elf_file.base.allocator;
var stream = std.io.fixedBufferStream(self.data);
const reader = stream.reader();
_ = try reader.readBytesNoEof(SARMAG);
while (true) {
if (stream.pos % 2 != 0) {
stream.pos += 1;
}
const hdr = reader.readStruct(ar_hdr) catch break;
if (!mem.eql(u8, &hdr.ar_fmag, ARFMAG)) {
// TODO convert into an error
log.debug(
"{s}: invalid header delimiter: expected '{s}', found '{s}'",
.{ self.path, std.fmt.fmtSliceEscapeLower(ARFMAG), std.fmt.fmtSliceEscapeLower(&hdr.ar_fmag) },
);
return;
}
const size = try hdr.size();
defer {
_ = stream.seekBy(size) catch {};
}
if (hdr.isSymtab()) continue;
if (hdr.isStrtab()) {
self.strtab = self.data[stream.pos..][0..size];
continue;
}
const name = ar_hdr.getValue(&hdr.ar_name);
if (mem.eql(u8, name, "__.SYMDEF") or mem.eql(u8, name, "__.SYMDEF SORTED")) continue;
const object_name = blk: {
if (name[0] == '/') {
const off = try std.fmt.parseInt(u32, name[1..], 10);
break :blk self.getString(off);
}
break :blk name;
};
const object = Object{
.archive = self.path,
.path = try gpa.dupe(u8, object_name[0 .. object_name.len - 1]), // To account for trailing '/'
.data = try gpa.dupe(u8, self.data[stream.pos..][0..size]),
.index = undefined,
.alive = false,
};
log.debug("extracting object '{s}' from archive '{s}'", .{ object.path, self.path });
try self.objects.append(gpa, object);
}
}
fn getString(self: Archive, off: u32) []const u8 {
assert(off < self.strtab.len);
return mem.sliceTo(@as([*:'\n']const u8, @ptrCast(self.strtab.ptr + off)), 0);
}
const std = @import("std");
const assert = std.debug.assert;
const elf = std.elf;
const fs = std.fs;
const log = std.log.scoped(.link);
const mem = std.mem;
const Allocator = mem.Allocator;
const Archive = @This();
const Elf = @import("../Elf.zig");
const Object = @import("Object.zig");

View File

@ -322,11 +322,12 @@ pub fn scanRelocs(self: Atom, elf_file: *Elf, undefs: anytype) !void {
if (rel.r_type() == elf.R_X86_64_NONE) continue;
const symbol = switch (file_ptr) {
.zig_module => |x| elf_file.symbol(x.symbol(rel.r_sym())),
.object => |x| elf_file.symbol(x.symbols.items[rel.r_sym()]),
const symbol_index = switch (file_ptr) {
.zig_module => |x| x.symbol(rel.r_sym()),
.object => |x| x.symbols.items[rel.r_sym()],
else => unreachable,
};
const symbol = elf_file.symbol(symbol_index);
// Check for violation of One Definition Rule for COMDATs.
if (symbol.file(elf_file) == null) {
@ -340,7 +341,7 @@ pub fn scanRelocs(self: Atom, elf_file: *Elf, undefs: anytype) !void {
}
// Report an undefined symbol.
try self.reportUndefined(elf_file, symbol, rel, undefs);
try self.reportUndefined(elf_file, symbol, symbol_index, rel, undefs);
// While traversing relocations, mark symbols that require special handling such as
// pointer indirection via GOT, or a stub trampoline via PLT.
@ -379,7 +380,14 @@ pub fn scanRelocs(self: Atom, elf_file: *Elf, undefs: anytype) !void {
}
// This function will report any undefined non-weak symbols that are not imports.
fn reportUndefined(self: Atom, elf_file: *Elf, sym: *const Symbol, rel: elf.Elf64_Rela, undefs: anytype) !void {
fn reportUndefined(
self: Atom,
elf_file: *Elf,
sym: *const Symbol,
sym_index: Symbol.Index,
rel: elf.Elf64_Rela,
undefs: anytype,
) !void {
const rel_esym = switch (elf_file.file(self.file_index).?) {
.zig_module => |x| x.elfSym(rel.r_sym()).*,
.object => |x| x.symtab[rel.r_sym()],
@ -392,7 +400,7 @@ fn reportUndefined(self: Atom, elf_file: *Elf, sym: *const Symbol, rel: elf.Elf6
!sym.flags.import and
esym.st_shndx == elf.SHN_UNDEF)
{
const gop = try undefs.getOrPut(sym.index);
const gop = try undefs.getOrPut(sym_index);
if (!gop.found_existing) {
gop.value_ptr.* = std.ArrayList(Atom.Index).init(elf_file.base.allocator);
}
@ -417,6 +425,7 @@ pub fn resolveRelocs(self: Atom, elf_file: *Elf, code: []u8) !void {
.object => |x| elf_file.symbol(x.symbols.items[rel.r_sym()]),
else => unreachable,
};
const r_offset = std.math.cast(usize, rel.r_offset) orelse return error.Overflow;
// We will use equation format to resolve relocations:
// https://intezer.com/blog/malware-analysis/executable-and-linkable-format-101-part-3-relocations/
@ -446,24 +455,49 @@ pub fn resolveRelocs(self: Atom, elf_file: *Elf, code: []u8) !void {
relocs_log.debug(" {s}: {x}: [{x} => {x}] G({x}) ({s})", .{
fmtRelocType(r_type),
rel.r_offset,
r_offset,
P,
S + A,
G + GOT + A,
target.name(elf_file),
});
try stream.seekTo(rel.r_offset);
try stream.seekTo(r_offset);
switch (rel.r_type()) {
elf.R_X86_64_NONE => unreachable,
elf.R_X86_64_64 => try cwriter.writeIntLittle(i64, S + A),
elf.R_X86_64_32 => try cwriter.writeIntLittle(u32, @as(u32, @truncate(@as(u64, @intCast(S + A))))),
elf.R_X86_64_32S => try cwriter.writeIntLittle(i32, @as(i32, @truncate(S + A))),
elf.R_X86_64_PLT32,
elf.R_X86_64_PC32,
=> try cwriter.writeIntLittle(i32, @as(i32, @intCast(S + A - P))),
elf.R_X86_64_GOTPCREL => try cwriter.writeIntLittle(i32, @as(i32, @intCast(G + GOT + A - P))),
elf.R_X86_64_GOTPC32 => try cwriter.writeIntLittle(i32, @as(i32, @intCast(GOT + A - P))),
elf.R_X86_64_GOTPC64 => try cwriter.writeIntLittle(i64, GOT + A - P),
elf.R_X86_64_GOTPCRELX => {
if (!target.flags.import and !target.isIFunc(elf_file) and !target.isAbs(elf_file)) blk: {
x86_64.relaxGotpcrelx(code[r_offset - 2 ..]) catch break :blk;
try cwriter.writeIntLittle(i32, @as(i32, @intCast(S + A - P)));
continue;
}
try cwriter.writeIntLittle(i32, @as(i32, @intCast(G + GOT + A - P)));
},
elf.R_X86_64_REX_GOTPCRELX => {
if (!target.flags.import and !target.isIFunc(elf_file) and !target.isAbs(elf_file)) blk: {
x86_64.relaxRexGotpcrelx(code[r_offset - 3 ..]) catch break :blk;
try cwriter.writeIntLittle(i32, @as(i32, @intCast(S + A - P)));
continue;
}
try cwriter.writeIntLittle(i32, @as(i32, @intCast(G + GOT + A - P)));
},
else => {
log.err("TODO: unhandled relocation type {}", .{fmtRelocType(rel.r_type())});
@panic("TODO unhandled relocation type");
@ -591,6 +625,58 @@ fn format2(
// future.
pub const Index = u16;
const x86_64 = struct {
pub fn relaxGotpcrelx(code: []u8) !void {
const old_inst = disassemble(code) orelse return error.RelaxFail;
const inst = switch (old_inst.encoding.mnemonic) {
.call => try Instruction.new(old_inst.prefix, .call, &.{
// TODO: hack to force imm32s in the assembler
.{ .imm = Immediate.s(-129) },
}),
.jmp => try Instruction.new(old_inst.prefix, .jmp, &.{
// TODO: hack to force imm32s in the assembler
.{ .imm = Immediate.s(-129) },
}),
else => return error.RelaxFail,
};
relocs_log.debug(" relaxing {} => {}", .{ old_inst.encoding, inst.encoding });
const nop = try Instruction.new(.none, .nop, &.{});
encode(&.{ nop, inst }, code) catch return error.RelaxFail;
}
pub fn relaxRexGotpcrelx(code: []u8) !void {
const old_inst = disassemble(code) orelse return error.RelaxFail;
switch (old_inst.encoding.mnemonic) {
.mov => {
const inst = try Instruction.new(old_inst.prefix, .lea, &old_inst.ops);
relocs_log.debug(" relaxing {} => {}", .{ old_inst.encoding, inst.encoding });
encode(&.{inst}, code) catch return error.RelaxFail;
},
else => return error.RelaxFail,
}
}
fn disassemble(code: []const u8) ?Instruction {
var disas = Disassembler.init(code);
const inst = disas.next() catch return null;
return inst;
}
fn encode(insts: []const Instruction, code: []u8) !void {
var stream = std.io.fixedBufferStream(code);
const writer = stream.writer();
for (insts) |inst| {
try inst.encode(writer, .{});
}
}
const bits = @import("../../arch/x86_64/bits.zig");
const encoder = @import("../../arch/x86_64/encoder.zig");
const Disassembler = @import("../../arch/x86_64/Disassembler.zig");
const Immediate = bits.Immediate;
const Instruction = encoder.Instruction;
};
const std = @import("std");
const assert = std.debug.assert;
const elf = std.elf;

View File

@ -485,9 +485,9 @@ pub fn claimUnresolved(self: *Object, elf_file: *Elf) void {
pub fn resetGlobals(self: *Object, elf_file: *Elf) void {
for (self.globals()) |index| {
const global = elf_file.symbol(index);
const name = global.name;
const off = global.name_offset;
global.* = .{};
global.name = name;
global.name_offset = off;
}
}
@ -499,7 +499,7 @@ pub fn markLive(self: *Object, elf_file: *Elf) void {
if (sym.st_bind() == elf.STB_WEAK) continue;
const global = elf_file.symbol(index);
const file = global.getFile(elf_file) orelse continue;
const file = global.file(elf_file) orelse continue;
const should_keep = sym.st_shndx == elf.SHN_UNDEF or
(sym.st_shndx == elf.SHN_COMMON and global.elfSym(elf_file).st_shndx != elf.SHN_COMMON);
if (should_keep and !file.isAlive()) {

View File

@ -1,7 +1,5 @@
//! Represents a defined symbol.
index: Index = 0,
/// Allocated address value of this symbol.
value: u64 = 0,
@ -117,10 +115,10 @@ const GetOrCreateGotEntryResult = struct {
index: GotSection.Index,
};
pub fn getOrCreateGotEntry(symbol: *Symbol, elf_file: *Elf) !GetOrCreateGotEntryResult {
pub fn getOrCreateGotEntry(symbol: *Symbol, symbol_index: Index, elf_file: *Elf) !GetOrCreateGotEntryResult {
assert(symbol.flags.needs_got);
if (symbol.flags.has_got) return .{ .found_existing = true, .index = symbol.extra(elf_file).?.got };
const index = try elf_file.got.addGotSymbol(symbol.index, elf_file);
const index = try elf_file.got.addGotSymbol(symbol_index, elf_file);
symbol.flags.has_got = true;
return .{ .found_existing = false, .index = index };
}
@ -270,7 +268,7 @@ fn format2(
_ = options;
_ = unused_fmt_string;
const symbol = ctx.symbol;
try writer.print("%{d} : {s} : @{x}", .{ symbol.index, symbol.fmtName(ctx.elf_file), symbol.value });
try writer.print("%{d} : {s} : @{x}", .{ symbol.esym_index, symbol.fmtName(ctx.elf_file), symbol.value });
if (symbol.file(ctx.elf_file)) |file_ptr| {
if (symbol.isAbs(ctx.elf_file)) {
if (symbol.elfSym(ctx.elf_file).st_shndx == elf.SHN_UNDEF) {

View File

@ -148,6 +148,15 @@ pub fn scanRelocs(self: *ZigModule, elf_file: *Elf, undefs: anytype) !void {
}
}
pub fn resetGlobals(self: *ZigModule, elf_file: *Elf) void {
for (self.globals()) |index| {
const global = elf_file.symbol(index);
const off = global.name_offset;
global.* = .{};
global.name_offset = off;
}
}
pub fn updateSymtabSize(self: *ZigModule, elf_file: *Elf) void {
for (self.locals()) |local_index| {
const local = elf_file.symbol(local_index);

View File

@ -62,6 +62,19 @@ pub const File = union(enum) {
return (@as(u32, base) << 24) + file.index();
}
pub fn resolveSymbols(file: File, elf_file: *Elf) void {
switch (file) {
inline else => |x| x.resolveSymbols(elf_file),
}
}
pub fn resetGlobals(file: File, elf_file: *Elf) void {
switch (file) {
.linker_defined => unreachable,
inline else => |x| x.resetGlobals(elf_file),
}
}
pub fn setAlive(file: File) void {
switch (file) {
.zig_module, .linker_defined => {},
@ -71,7 +84,7 @@ pub const File = union(enum) {
pub fn markLive(file: File, elf_file: *Elf) void {
switch (file) {
.zig_module, .linker_defined => {},
.zig_module, .linker_defined => unreachable,
inline else => |x| x.markLive(elf_file),
}
}