Merge pull request #13444 from ziglang/arm64-coff

aarch64,coff: lift-off!
This commit is contained in:
Jakub Konka 2022-11-05 12:19:45 +01:00 committed by GitHub
commit 28288dcbbf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 340 additions and 58 deletions

View File

@ -36,7 +36,7 @@ pub const default_mode: ModeOverride = if (is_async) Mode.evented else .blocking
fn getStdOutHandle() os.fd_t {
if (builtin.os.tag == .windows) {
if (builtin.zig_backend == .stage2_x86_64) {
if (builtin.zig_backend == .stage2_x86_64 or builtin.zig_backend == .stage2_aarch64) {
// TODO: this is just a temporary workaround until we advance x86 backend further along.
return os.windows.GetStdHandle(os.windows.STD_OUTPUT_HANDLE) catch os.windows.INVALID_HANDLE_VALUE;
}
@ -62,7 +62,7 @@ pub fn getStdOut() File {
fn getStdErrHandle() os.fd_t {
if (builtin.os.tag == .windows) {
if (builtin.zig_backend == .stage2_x86_64) {
if (builtin.zig_backend == .stage2_x86_64 or builtin.zig_backend == .stage2_aarch64) {
// TODO: this is just a temporary workaround until we advance x86 backend further along.
return os.windows.GetStdHandle(os.windows.STD_ERROR_HANDLE) catch os.windows.INVALID_HANDLE_VALUE;
}
@ -88,7 +88,7 @@ pub fn getStdErr() File {
fn getStdInHandle() os.fd_t {
if (builtin.os.tag == .windows) {
if (builtin.zig_backend == .stage2_x86_64) {
if (builtin.zig_backend == .stage2_x86_64 or builtin.zig_backend == .stage2_aarch64) {
// TODO: this is just a temporary workaround until we advance x86 backend further along.
return os.windows.GetStdHandle(os.windows.STD_INPUT_HANDLE) catch os.windows.INVALID_HANDLE_VALUE;
}

View File

@ -142,7 +142,8 @@ const MCValue = union(enum) {
/// The value is in memory but requires a linker relocation fixup:
/// * got - the value is referenced indirectly via GOT entry index (the linker emits a got-type reloc)
/// * direct - the value is referenced directly via symbol index index (the linker emits a displacement reloc)
linker_load: struct { @"type": enum { got, direct }, sym_index: u32 },
/// * import - the value is referenced indirectly via import entry index (the linker emits an import-type reloc)
linker_load: struct { @"type": enum { got, direct, import }, sym_index: u32 },
/// The value is one of the stack variables.
///
/// If the type is a pointer, it means the pointer address is in
@ -1117,13 +1118,11 @@ fn truncRegister(
});
},
32, 64 => {
assert(dest_reg.size() == operand_reg.size());
_ = try self.addInst(.{
.tag = .mov_register,
.data = .{ .rr = .{
.rd = dest_reg,
.rn = operand_reg,
.rd = if (int_bits == 32) dest_reg.toW() else dest_reg.toX(),
.rn = if (int_bits == 32) operand_reg.toW() else operand_reg.toX(),
} },
});
},
@ -3719,14 +3718,21 @@ fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type
const tag: Mir.Inst.Tag = switch (load_struct.@"type") {
.got => .load_memory_ptr_got,
.direct => .load_memory_ptr_direct,
.import => unreachable,
};
const mod = self.bin_file.options.module.?;
const owner_decl = mod.declPtr(self.mod_fn.owner_decl);
const atom_index = switch (self.bin_file.tag) {
.macho => owner_decl.link.macho.sym_index,
.coff => owner_decl.link.coff.sym_index,
else => unreachable, // unsupported target format
};
_ = try self.addInst(.{
.tag = tag,
.data = .{
.payload = try self.addExtra(Mir.LoadMemoryPie{
.register = @enumToInt(src_reg),
.atom_index = mod.declPtr(self.mod_fn.owner_decl).link.macho.sym_index,
.atom_index = atom_index,
.sym_index = load_struct.sym_index,
}),
},
@ -4057,6 +4063,45 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions.
} else {
return self.fail("TODO implement calling bitcasted functions", .{});
}
} else if (self.bin_file.cast(link.File.Coff)) |coff_file| {
if (func_value.castTag(.function)) |func_payload| {
const func = func_payload.data;
const fn_owner_decl = mod.declPtr(func.owner_decl);
try self.genSetReg(Type.initTag(.u64), .x30, .{
.linker_load = .{
.@"type" = .got,
.sym_index = fn_owner_decl.link.coff.sym_index,
},
});
// blr x30
_ = try self.addInst(.{
.tag = .blr,
.data = .{ .reg = .x30 },
});
} else if (func_value.castTag(.extern_fn)) |func_payload| {
const extern_fn = func_payload.data;
const decl_name = mod.declPtr(extern_fn.owner_decl).name;
if (extern_fn.lib_name) |lib_name| {
log.debug("TODO enforce that '{s}' is expected in '{s}' library", .{
decl_name,
lib_name,
});
}
const sym_index = try coff_file.getGlobalSymbol(mem.sliceTo(decl_name, 0));
try self.genSetReg(Type.initTag(.u64), .x30, .{
.linker_load = .{
.@"type" = .import,
.sym_index = sym_index,
},
});
// blr x30
_ = try self.addInst(.{
.tag = .blr,
.data = .{ .reg = .x30 },
});
} else {
return self.fail("TODO implement calling bitcasted functions", .{});
}
} else if (self.bin_file.cast(link.File.Plan9)) |p9| {
if (func_value.castTag(.function)) |func_payload| {
try p9.seeDecl(func_payload.data.owner_decl);
@ -4077,8 +4122,6 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions.
} else {
return self.fail("TODO implement calling bitcasted functions", .{});
}
} else if (self.bin_file.cast(link.File.Coff)) |_| {
return self.fail("TODO implement calling in COFF for {}", .{self.target.cpu.arch});
} else unreachable;
} else {
assert(ty.zigTypeTag() == .Pointer);
@ -5161,14 +5204,21 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) InnerErro
const tag: Mir.Inst.Tag = switch (load_struct.@"type") {
.got => .load_memory_ptr_got,
.direct => .load_memory_ptr_direct,
.import => unreachable,
};
const mod = self.bin_file.options.module.?;
const owner_decl = mod.declPtr(self.mod_fn.owner_decl);
const atom_index = switch (self.bin_file.tag) {
.macho => owner_decl.link.macho.sym_index,
.coff => owner_decl.link.coff.sym_index,
else => unreachable, // unsupported target format
};
_ = try self.addInst(.{
.tag = tag,
.data = .{
.payload = try self.addExtra(Mir.LoadMemoryPie{
.register = @enumToInt(src_reg),
.atom_index = mod.declPtr(self.mod_fn.owner_decl).link.macho.sym_index,
.atom_index = atom_index,
.sym_index = load_struct.sym_index,
}),
},
@ -5268,14 +5318,21 @@ fn genSetReg(self: *Self, ty: Type, reg: Register, mcv: MCValue) InnerError!void
const tag: Mir.Inst.Tag = switch (load_struct.@"type") {
.got => .load_memory_got,
.direct => .load_memory_direct,
.import => .load_memory_import,
};
const mod = self.bin_file.options.module.?;
const owner_decl = mod.declPtr(self.mod_fn.owner_decl);
const atom_index = switch (self.bin_file.tag) {
.macho => owner_decl.link.macho.sym_index,
.coff => owner_decl.link.coff.sym_index,
else => unreachable, // unsupported target format
};
_ = try self.addInst(.{
.tag = tag,
.data = .{
.payload = try self.addExtra(Mir.LoadMemoryPie{
.register = @enumToInt(reg),
.atom_index = mod.declPtr(self.mod_fn.owner_decl).link.macho.sym_index,
.atom_index = atom_index,
.sym_index = load_struct.sym_index,
}),
},
@ -5455,14 +5512,21 @@ fn genSetStackArgument(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) I
const tag: Mir.Inst.Tag = switch (load_struct.@"type") {
.got => .load_memory_ptr_got,
.direct => .load_memory_ptr_direct,
.import => unreachable,
};
const mod = self.bin_file.options.module.?;
const owner_decl = mod.declPtr(self.mod_fn.owner_decl);
const atom_index = switch (self.bin_file.tag) {
.macho => owner_decl.link.macho.sym_index,
.coff => owner_decl.link.coff.sym_index,
else => unreachable, // unsupported target format
};
_ = try self.addInst(.{
.tag = tag,
.data = .{
.payload = try self.addExtra(Mir.LoadMemoryPie{
.register = @enumToInt(src_reg),
.atom_index = mod.declPtr(self.mod_fn.owner_decl).link.macho.sym_index,
.atom_index = atom_index,
.sym_index = load_struct.sym_index,
}),
},
@ -5775,7 +5839,13 @@ fn lowerDeclRef(self: *Self, tv: TypedValue, decl_index: Module.Decl.Index) Inne
.sym_index = decl.link.macho.sym_index,
} };
} else if (self.bin_file.cast(link.File.Coff)) |_| {
return self.fail("TODO codegen COFF const Decl pointer", .{});
// Because COFF is PIE-always-on, we defer memory address resolution until
// the linker has enough info to perform relocations.
assert(decl.link.coff.sym_index != 0);
return MCValue{ .linker_load = .{
.@"type" = .got,
.sym_index = decl.link.coff.sym_index,
} };
} else if (self.bin_file.cast(link.File.Plan9)) |p9| {
try p9.seeDecl(decl_index);
const got_addr = p9.bases.data + decl.link.plan9.got_index.? * ptr_bytes;
@ -5799,7 +5869,10 @@ fn lowerUnnamedConst(self: *Self, tv: TypedValue) InnerError!MCValue {
.sym_index = local_sym_index,
} };
} else if (self.bin_file.cast(link.File.Coff)) |_| {
return self.fail("TODO lower unnamed const in COFF", .{});
return MCValue{ .linker_load = .{
.@"type" = .direct,
.sym_index = local_sym_index,
} };
} else if (self.bin_file.cast(link.File.Plan9)) |_| {
return self.fail("TODO lower unnamed const in Plan9", .{});
} else {

View File

@ -145,6 +145,7 @@ pub fn emitMir(
.load_memory_got => try emit.mirLoadMemoryPie(inst),
.load_memory_direct => try emit.mirLoadMemoryPie(inst),
.load_memory_import => try emit.mirLoadMemoryPie(inst),
.load_memory_ptr_got => try emit.mirLoadMemoryPie(inst),
.load_memory_ptr_direct => try emit.mirLoadMemoryPie(inst),
@ -674,13 +675,14 @@ fn mirCallExtern(emit: *Emit, inst: Mir.Inst.Index) !void {
assert(emit.mir.instructions.items(.tag)[inst] == .call_extern);
const relocation = emit.mir.instructions.items(.data)[inst].relocation;
const offset = blk: {
const offset = @intCast(u32, emit.code.items.len);
// bl
try emit.writeInstruction(Instruction.bl(0));
break :blk offset;
};
if (emit.bin_file.cast(link.File.MachO)) |macho_file| {
const offset = blk: {
const offset = @intCast(u32, emit.code.items.len);
// bl
try emit.writeInstruction(Instruction.bl(0));
break :blk offset;
};
// Add relocation to the decl.
const atom = macho_file.getAtomForSymbol(.{ .sym_index = relocation.atom_index, .file = null }).?;
const target = macho_file.getGlobalByIndex(relocation.sym_index);
@ -692,8 +694,10 @@ fn mirCallExtern(emit: *Emit, inst: Mir.Inst.Index) !void {
.pcrel = true,
.length = 2,
});
} else if (emit.bin_file.cast(link.File.Coff)) |_| {
unreachable; // Calling imports is handled via `.load_memory_import`
} else {
return emit.fail("Implement call_extern for linking backends != MachO", .{});
return emit.fail("Implement call_extern for linking backends != {{ COFF, MachO }}", .{});
}
}
@ -855,7 +859,9 @@ fn mirLoadMemoryPie(emit: *Emit, inst: Mir.Inst.Index) !void {
try emit.writeInstruction(Instruction.adrp(reg.toX(), 0));
switch (tag) {
.load_memory_got => {
.load_memory_got,
.load_memory_import,
=> {
// ldr reg, reg, offset
try emit.writeInstruction(Instruction.ldr(
reg,
@ -926,6 +932,51 @@ fn mirLoadMemoryPie(emit: *Emit, inst: Mir.Inst.Index) !void {
else => unreachable,
},
});
} else if (emit.bin_file.cast(link.File.Coff)) |coff_file| {
const atom = coff_file.getAtomForSymbol(.{ .sym_index = data.atom_index, .file = null }).?;
const target = switch (tag) {
.load_memory_got,
.load_memory_ptr_got,
.load_memory_direct,
.load_memory_ptr_direct,
=> link.File.Coff.SymbolWithLoc{ .sym_index = data.sym_index, .file = null },
.load_memory_import => coff_file.getGlobalByIndex(data.sym_index),
else => unreachable,
};
try atom.addRelocation(coff_file, .{
.target = target,
.offset = offset,
.addend = 0,
.pcrel = true,
.length = 2,
.@"type" = switch (tag) {
.load_memory_got,
.load_memory_ptr_got,
=> .got_page,
.load_memory_direct,
.load_memory_ptr_direct,
=> .page,
.load_memory_import => .import_page,
else => unreachable,
},
});
try atom.addRelocation(coff_file, .{
.target = target,
.offset = offset + 4,
.addend = 0,
.pcrel = false,
.length = 2,
.@"type" = switch (tag) {
.load_memory_got,
.load_memory_ptr_got,
=> .got_pageoff,
.load_memory_direct,
.load_memory_ptr_direct,
=> .pageoff,
.load_memory_import => .import_pageoff,
else => unreachable,
},
});
} else {
return emit.fail("TODO implement load_memory for PIE GOT indirection on this platform", .{});
}

View File

@ -84,6 +84,10 @@ pub const Inst = struct {
///
/// Payload is `LoadMemoryPie`
load_memory_direct,
/// Loads the contents into a register
///
/// Payload is `LoadMemoryPie`
load_memory_import,
/// Loads the address into a register
///
/// Payload is `LoadMemoryPie`

View File

@ -9,9 +9,11 @@ const fmt = std.fmt;
const log = std.log.scoped(.link);
const math = std.math;
const mem = std.mem;
const meta = std.meta;
const Allocator = std.mem.Allocator;
const aarch64 = @import("../arch/aarch64/bits.zig");
const codegen = @import("../codegen.zig");
const link = @import("../link.zig");
const lld = @import("Coff/lld.zig");
@ -125,9 +127,29 @@ const Entry = struct {
pub const Reloc = struct {
@"type": enum {
// x86, x86_64
/// RIP-relative displacement to a GOT pointer
got,
direct,
/// RIP-relative displacement to an import pointer
import,
// aarch64
/// PC-relative distance to target page in GOT section
got_page,
/// Offset to a GOT pointer relative to the start of a page in GOT section
got_pageoff,
/// PC-relative distance to target page in a section (e.g., .rdata)
page,
/// Offset to a pointer relative to the start of a page in a section (e.g., .rdata)
pageoff,
/// PC-relative distance to target page in a import section
import_page,
/// Offset to a pointer relative to the start of a page in an import section (e.g., .rdata)
import_pageoff,
// common
/// Absolute pointer value
direct,
},
target: SymbolWithLoc,
offset: u32,
@ -139,9 +161,20 @@ pub const Reloc = struct {
/// Returns an Atom which is the target node of this relocation edge (if any).
fn getTargetAtom(self: Reloc, coff_file: *Coff) ?*Atom {
switch (self.@"type") {
.got => return coff_file.getGotAtomForSymbol(self.target),
.direct => return coff_file.getAtomForSymbol(self.target),
.import => return coff_file.getImportAtomForSymbol(self.target),
.got,
.got_page,
.got_pageoff,
=> return coff_file.getGotAtomForSymbol(self.target),
.direct,
.page,
.pageoff,
=> return coff_file.getAtomForSymbol(self.target),
.import,
.import_page,
.import_pageoff,
=> return coff_file.getImportAtomForSymbol(self.target),
}
}
};
@ -151,8 +184,6 @@ const BaseRelocationTable = std.AutoHashMapUnmanaged(*Atom, std.ArrayListUnmanag
const UnnamedConstTable = std.AutoHashMapUnmanaged(Module.Decl.Index, std.ArrayListUnmanaged(*Atom));
const default_file_alignment: u16 = 0x200;
const default_image_base_dll: u64 = 0x10000000;
const default_image_base_exe: u64 = 0x400000;
const default_size_of_stack_reserve: u32 = 0x1000000;
const default_size_of_stack_commit: u32 = 0x1000;
const default_size_of_heap_reserve: u32 = 0x100000;
@ -866,12 +897,14 @@ fn resolveRelocs(self: *Coff, atom: *Atom) !void {
for (relocs.items) |*reloc| {
if (!reloc.dirty) continue;
const source_vaddr = source_sym.value + reloc.offset;
const target_atom = reloc.getTargetAtom(self) orelse continue;
const target_vaddr = target_atom.getSymbol(self).value;
const target_vaddr_with_addend = target_vaddr + reloc.addend;
const image_base = self.getImageBase();
log.debug(" ({x}: [() => 0x{x} ({s})) ({s}) (in file at 0x{x})", .{
source_sym.value + reloc.offset,
source_vaddr,
target_vaddr_with_addend,
self.getSymbolName(reloc.target),
@tagName(reloc.@"type"),
@ -880,30 +913,137 @@ fn resolveRelocs(self: *Coff, atom: *Atom) !void {
reloc.dirty = false;
if (reloc.pcrel) {
const source_vaddr = source_sym.value + reloc.offset;
const disp =
@intCast(i32, target_vaddr_with_addend) - @intCast(i32, source_vaddr) - 4;
try self.base.file.?.pwriteAll(mem.asBytes(&disp), file_offset + reloc.offset);
continue;
}
switch (self.base.options.target.cpu.arch) {
.aarch64 => {
var buffer: [@sizeOf(u64)]u8 = undefined;
switch (reloc.length) {
2 => {
const amt = try self.base.file.?.preadAll(buffer[0..4], file_offset + reloc.offset);
if (amt != 4) return error.InputOutput;
},
3 => {
const amt = try self.base.file.?.preadAll(&buffer, file_offset + reloc.offset);
if (amt != 8) return error.InputOutput;
},
else => unreachable,
}
switch (self.ptr_width) {
.p32 => try self.base.file.?.pwriteAll(
mem.asBytes(&@intCast(u32, target_vaddr_with_addend + default_image_base_exe)),
file_offset + reloc.offset,
),
.p64 => switch (reloc.length) {
2 => try self.base.file.?.pwriteAll(
mem.asBytes(&@truncate(u32, target_vaddr_with_addend + default_image_base_exe)),
file_offset + reloc.offset,
),
3 => try self.base.file.?.pwriteAll(
mem.asBytes(&(target_vaddr_with_addend + default_image_base_exe)),
file_offset + reloc.offset,
),
else => unreachable,
switch (reloc.@"type") {
.got_page, .import_page, .page => {
const source_page = @intCast(i32, source_vaddr >> 12);
const target_page = @intCast(i32, target_vaddr_with_addend >> 12);
const pages = @bitCast(u21, @intCast(i21, target_page - source_page));
var inst = aarch64.Instruction{
.pc_relative_address = mem.bytesToValue(meta.TagPayload(
aarch64.Instruction,
aarch64.Instruction.pc_relative_address,
), buffer[0..4]),
};
inst.pc_relative_address.immhi = @truncate(u19, pages >> 2);
inst.pc_relative_address.immlo = @truncate(u2, pages);
mem.writeIntLittle(u32, buffer[0..4], inst.toU32());
},
.got_pageoff, .import_pageoff, .pageoff => {
assert(!reloc.pcrel);
const narrowed = @truncate(u12, @intCast(u64, target_vaddr_with_addend));
if (isArithmeticOp(buffer[0..4])) {
var inst = aarch64.Instruction{
.add_subtract_immediate = mem.bytesToValue(meta.TagPayload(
aarch64.Instruction,
aarch64.Instruction.add_subtract_immediate,
), buffer[0..4]),
};
inst.add_subtract_immediate.imm12 = narrowed;
mem.writeIntLittle(u32, buffer[0..4], inst.toU32());
} else {
var inst = aarch64.Instruction{
.load_store_register = mem.bytesToValue(meta.TagPayload(
aarch64.Instruction,
aarch64.Instruction.load_store_register,
), buffer[0..4]),
};
const offset: u12 = blk: {
if (inst.load_store_register.size == 0) {
if (inst.load_store_register.v == 1) {
// 128-bit SIMD is scaled by 16.
break :blk @divExact(narrowed, 16);
}
// Otherwise, 8-bit SIMD or ldrb.
break :blk narrowed;
} else {
const denom: u4 = math.powi(u4, 2, inst.load_store_register.size) catch unreachable;
break :blk @divExact(narrowed, denom);
}
};
inst.load_store_register.offset = offset;
mem.writeIntLittle(u32, buffer[0..4], inst.toU32());
}
},
.direct => {
assert(!reloc.pcrel);
switch (reloc.length) {
2 => mem.writeIntLittle(
u32,
buffer[0..4],
@truncate(u32, target_vaddr_with_addend + image_base),
),
3 => mem.writeIntLittle(u64, &buffer, target_vaddr_with_addend + image_base),
else => unreachable,
}
},
.got => unreachable,
.import => unreachable,
}
switch (reloc.length) {
2 => try self.base.file.?.pwriteAll(buffer[0..4], file_offset + reloc.offset),
3 => try self.base.file.?.pwriteAll(&buffer, file_offset + reloc.offset),
else => unreachable,
}
},
.x86_64, .x86 => {
switch (reloc.@"type") {
.got_page => unreachable,
.got_pageoff => unreachable,
.page => unreachable,
.pageoff => unreachable,
.import_page => unreachable,
.import_pageoff => unreachable,
.got, .import => {
assert(reloc.pcrel);
const disp = @intCast(i32, target_vaddr_with_addend) - @intCast(i32, source_vaddr) - 4;
try self.base.file.?.pwriteAll(mem.asBytes(&disp), file_offset + reloc.offset);
},
.direct => {
if (reloc.pcrel) {
const disp = @intCast(i32, target_vaddr_with_addend) - @intCast(i32, source_vaddr) - 4;
try self.base.file.?.pwriteAll(mem.asBytes(&disp), file_offset + reloc.offset);
} else switch (self.ptr_width) {
.p32 => try self.base.file.?.pwriteAll(
mem.asBytes(&@intCast(u32, target_vaddr_with_addend + image_base)),
file_offset + reloc.offset,
),
.p64 => switch (reloc.length) {
2 => try self.base.file.?.pwriteAll(
mem.asBytes(&@truncate(u32, target_vaddr_with_addend + image_base)),
file_offset + reloc.offset,
),
3 => try self.base.file.?.pwriteAll(
mem.asBytes(&(target_vaddr_with_addend + image_base)),
file_offset + reloc.offset,
),
else => unreachable,
},
}
},
}
},
else => unreachable, // unhandled target architecture
}
}
}
@ -1831,11 +1971,7 @@ fn writeHeader(self: *Coff) !void {
const subsystem: coff.Subsystem = .WINDOWS_CUI;
const size_of_image: u32 = self.getSizeOfImage();
const size_of_headers: u32 = mem.alignForwardGeneric(u32, self.getSizeOfHeaders(), default_file_alignment);
const image_base = self.base.options.image_base_override orelse switch (self.base.options.output_mode) {
.Exe => default_image_base_exe,
.Lib => default_image_base_dll,
else => unreachable,
};
const image_base = self.getImageBase();
const base_of_code = self.sections.get(self.text_section_index.?).header.virtual_address;
const base_of_data = self.sections.get(self.data_section_index.?).header.virtual_address;
@ -2042,6 +2178,19 @@ pub fn getEntryPoint(self: Coff) ?SymbolWithLoc {
return self.globals.items[global_index];
}
pub fn getImageBase(self: Coff) u64 {
const image_base: u64 = self.base.options.image_base_override orelse switch (self.base.options.output_mode) {
.Exe => switch (self.base.options.target.cpu.arch) {
.aarch64 => @as(u64, 0x140000000),
.x86_64, .x86 => 0x400000,
else => unreachable, // unsupported target architecture
},
.Lib => 0x10000000,
.Obj => 0,
};
return image_base;
}
/// Returns pointer-to-symbol described by `sym_loc` descriptor.
pub fn getSymbolPtr(self: *Coff, sym_loc: SymbolWithLoc) *coff.Symbol {
assert(sym_loc.file == null); // TODO linking object files
@ -2248,3 +2397,8 @@ fn logSections(self: *Coff) void {
});
}
}
inline fn isArithmeticOp(inst: *const [4]u8) bool {
const group_decode = @truncate(u5, inst[3]);
return ((group_decode >> 2) == 4);
}