x86_64: finish rolling out all MIR assembly helpers

This commit is contained in:
Jakub Konka 2023-03-09 23:56:55 +01:00
parent 022b308d6a
commit d0e7212539
3 changed files with 225 additions and 321 deletions

View File

@ -491,6 +491,37 @@ fn asmRegisterImmediate(self: *Self, tag: Mir.Inst.Tag, reg: Register, imm: Imme
});
}
fn asmRegisterRegisterImmediate(
self: *Self,
tag: Mir.Inst.Tag,
reg1: Register,
reg2: Register,
imm: Immediate,
) !void {
const ops: Mir.Inst.Ops = switch (imm) {
.signed => .rri_s,
.unsigned => .rri_u,
};
const data: Mir.Inst.Data = switch (ops) {
.rri_s => .{ .rri_s = .{
.r1 = reg1,
.r2 = reg2,
.imm = imm.signed,
} },
.rri_u => .{ .rri_u = .{
.r1 = reg1,
.r2 = reg2,
.imm = @intCast(u32, imm.unsigned),
} },
else => unreachable,
};
_ = try self.addInst(.{
.tag = tag,
.ops = ops,
.data = data,
});
}
fn asmMemory(self: *Self, tag: Mir.Inst.Tag, m: Memory) !void {
const ops: Mir.Inst.Ops = switch (m) {
.sib => .m_sib,
@ -2767,27 +2798,20 @@ fn loadMemPtrIntoRegister(self: *Self, reg: Register, ptr_ty: Type, ptr: MCValue
const atom = try coff_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
break :blk coff_file.getAtom(atom).getSymbolIndex().?;
} else unreachable;
const flags: u2 = switch (load_struct.type) {
.got => 0b00,
.direct => 0b01,
.import => 0b10,
const ops: Mir.Inst.Ops = switch (load_struct.type) {
.got => .got_reloc,
.direct => .direct_reloc,
.import => .import_reloc,
};
_ = abi_size;
_ = atom_index;
_ = flags;
// _ = try self.addInst(.{
// .tag = .lea_pic,
// .ops = Mir.Inst.Ops.encode(.{
// .reg1 = registerAlias(reg, abi_size),
// .flags = flags,
// }),
// .data = .{
// .relocation = .{
// .atom_index = atom_index,
// .sym_index = load_struct.sym_index,
// },
// },
// });
_ = try self.addInst(.{
.tag = .lea_linker,
.ops = ops,
.data = .{ .payload = try self.addExtra(Mir.LeaRegisterReloc{
.reg = @enumToInt(registerAlias(reg, abi_size)),
.atom_index = atom_index,
.sym_index = load_struct.sym_index,
}) },
});
},
.memory => |addr| {
// TODO: in case the address fits in an imm32 we can use [ds:imm32]
@ -3690,18 +3714,15 @@ fn genIntMulComplexOpMir(self: *Self, dst_ty: Type, dst_mcv: MCValue, src_mcv: M
registerAlias(src_reg, abi_size),
),
.immediate => |imm| {
// TODO take into account the type's ABI size when selecting the register alias
// register, immediate
if (math.minInt(i32) <= imm and imm <= math.maxInt(i32)) {
// _ = try self.addInst(.{
// .tag = .imul_complex,
// .ops = Mir.Inst.Ops.encode(.{
// .reg1 = dst_reg.to32(),
// .reg2 = dst_reg.to32(),
// .flags = 0b10,
// }),
// .data = .{ .imm = @intCast(u32, imm) },
// });
// TODO take into account the type's ABI size when selecting the register alias
// register, immediate
try self.asmRegisterRegisterImmediate(
.imul,
dst_reg.to32(),
dst_reg.to32(),
Immediate.u(@intCast(u32, imm)),
);
} else {
// TODO verify we don't spill and assign to the same register as dst_mcv
const src_reg = try self.copyToTmpRegister(dst_ty, src_mcv);
@ -4034,16 +4055,14 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
const sym_index = try macho_file.getGlobalSymbol(mem.sliceTo(decl_name, 0));
const atom = try macho_file.getOrCreateAtomForDecl(self.mod_fn.owner_decl);
const atom_index = macho_file.getAtom(atom).getSymbolIndex().?;
_ = sym_index;
_ = atom_index;
// _ = try self.addInst(.{
// .tag = .call_extern,
// .ops = undefined,
// .data = .{ .relocation = .{
// .atom_index = atom_index,
// .sym_index = sym_index,
// } },
// });
_ = try self.addInst(.{
.tag = .call_extern,
.ops = undefined,
.data = .{ .relocation = .{
.atom_index = atom_index,
.sym_index = sym_index,
} },
});
} else {
return self.fail("TODO implement calling extern functions", .{});
}
@ -5528,7 +5547,6 @@ fn genInlineMemcpy(
const index_reg = regs[2].to64();
const count_reg = regs[3].to64();
const tmp_reg = regs[4].to8();
_ = tmp_reg;
switch (dst_ptr) {
.memory, .linker_load => {
@ -5575,7 +5593,6 @@ fn genInlineMemcpy(
}
try self.genSetReg(Type.usize, count_reg, len);
try self.asmRegisterImmediate(.mov, index_reg, Immediate.u(0));
const loop_start = try self.addInst(.{
@ -5595,26 +5612,22 @@ fn genInlineMemcpy(
} },
});
// mov tmp, [addr + index_reg]
// _ = try self.addInst(.{
// .tag = .mov_scale_src,
// .ops = Mir.Inst.Ops.encode(.{
// .reg1 = tmp_reg.to8(),
// .reg2 = src_addr_reg,
// }),
// .data = .{ .payload = try self.addExtra(Mir.IndexRegisterDisp.encode(index_reg, 0)) },
// });
// mov [stack_offset + index_reg], tmp
// _ = try self.addInst(.{
// .tag = .mov_scale_dst,
// .ops = Mir.Inst.Ops.encode(.{
// .reg1 = dst_addr_reg,
// .reg2 = tmp_reg.to8(),
// }),
// .data = .{ .payload = try self.addExtra(Mir.IndexRegisterDisp.encode(index_reg, 0)) },
// });
try self.asmRegisterMemory(.mov, tmp_reg.to8(), Memory.sib(.byte, .{
.base = src_addr_reg,
.scale_index = .{
.scale = 1,
.index = index_reg,
},
.disp = 0,
}));
try self.asmMemoryRegister(.mov, Memory.sib(.byte, .{
.base = dst_addr_reg,
.scale_index = .{
.scale = 1,
.index = index_reg,
},
.disp = 0,
}), tmp_reg.to8());
try self.asmRegisterImmediate(.add, index_reg, Immediate.u(1));
try self.asmRegisterImmediate(.sub, count_reg, Immediate.u(1));
@ -5655,15 +5668,10 @@ fn genInlineMemset(
try self.loadMemPtrIntoRegister(addr_reg, Type.usize, dst_ptr);
},
.ptr_stack_offset, .stack_offset => |off| {
_ = off;
// _ = try self.addInst(.{
// .tag = .lea,
// .ops = Mir.Inst.Ops.encode(.{
// .reg1 = addr_reg.to64(),
// .reg2 = opts.dest_stack_base orelse .rbp,
// }),
// .data = .{ .disp = -off },
// });
try self.asmRegisterMemory(.lea, addr_reg.to64(), Memory.sib(.qword, .{
.base = opts.dest_stack_base orelse .rbp,
.disp = -off,
}));
},
.register => |reg| {
try self.asmRegisterRegister(
@ -5703,18 +5711,14 @@ fn genInlineMemset(
if (x > math.maxInt(i32)) {
return self.fail("TODO inline memset for value immediate larger than 32bits", .{});
}
// mov byte ptr [rbp + index_reg + stack_offset], imm
// _ = try self.addInst(.{
// .tag = .mov_mem_index_imm,
// .ops = Mir.Inst.Ops.encode(.{
// .reg1 = addr_reg,
// }),
// .data = .{ .payload = try self.addExtra(Mir.IndexRegisterDispImm.encode(
// index_reg,
// 0,
// @intCast(u32, x),
// )) },
// });
try self.asmMemoryImmediate(.mov, Memory.sib(.byte, .{
.base = addr_reg,
.scale_index = .{
.scale = 1,
.index = index_reg,
},
.disp = 0,
}), Immediate.u(@intCast(u8, x)));
},
else => return self.fail("TODO inline memset for value of type {}", .{value}),
}

View File

@ -120,6 +120,10 @@ pub fn lowerMir(emit: *Emit) InnerError!void {
.jmp_reloc => try emit.mirJmpReloc(inst),
.call_extern => try emit.mirCallExtern(inst),
.lea_linker => try emit.mirLeaLinker(inst),
.mov_moffs => try emit.mirMovMoffs(inst),
.movsx => try emit.mirMovsx(inst),
@ -213,6 +217,16 @@ fn mirEncodeGeneric(emit: *Emit, tag: Mir.Inst.Tag, inst: Mir.Inst.Index) InnerE
.{ .imm = Immediate.u(Mir.Imm64.decode(imm64)) },
};
},
.rri_s => operands[0..3].* = .{
.{ .reg = data.rri_s.r1 },
.{ .reg = data.rri_s.r2 },
.{ .imm = Immediate.s(data.rri_s.imm) },
},
.rri_u => operands[0..3].* = .{
.{ .reg = data.rri_u.r1 },
.{ .reg = data.rri_u.r2 },
.{ .imm = Immediate.u(data.rri_u.imm) },
},
.m_sib => {
const msib = emit.mir.extraData(Mir.MemorySib, data.payload).data;
operands[0] = .{ .mem = Mir.MemorySib.decode(msib) };
@ -402,47 +416,44 @@ fn mirJmpReloc(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
}
}
// fn mirCallExtern(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
// const tag = emit.mir.instructions.items(.tag)[inst];
// assert(tag == .call_extern);
// const relocation = emit.mir.instructions.items(.data)[inst].relocation;
fn mirCallExtern(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
const relocation = emit.mir.instructions.items(.data)[inst].relocation;
// const offset = blk: {
// // callq
// try emit.encode(.call, .{
// .op1 = .{ .imm = Immediate.s(0) },
// });
// break :blk @intCast(u32, emit.code.items.len) - 4;
// };
const offset = blk: {
try emit.encode(.call, .{
.op1 = .{ .imm = Immediate.s(0) },
});
break :blk @intCast(u32, emit.code.items.len) - 4;
};
// if (emit.bin_file.cast(link.File.MachO)) |macho_file| {
// // Add relocation to the decl.
// const atom_index = macho_file.getAtomIndexForSymbol(.{ .sym_index = relocation.atom_index, .file = null }).?;
// const target = macho_file.getGlobalByIndex(relocation.sym_index);
// try link.File.MachO.Atom.addRelocation(macho_file, atom_index, .{
// .type = @enumToInt(std.macho.reloc_type_x86_64.X86_64_RELOC_BRANCH),
// .target = target,
// .offset = offset,
// .addend = 0,
// .pcrel = true,
// .length = 2,
// });
// } else if (emit.bin_file.cast(link.File.Coff)) |coff_file| {
// // Add relocation to the decl.
// const atom_index = coff_file.getAtomIndexForSymbol(.{ .sym_index = relocation.atom_index, .file = null }).?;
// const target = coff_file.getGlobalByIndex(relocation.sym_index);
// try link.File.Coff.Atom.addRelocation(coff_file, atom_index, .{
// .type = .direct,
// .target = target,
// .offset = offset,
// .addend = 0,
// .pcrel = true,
// .length = 2,
// });
// } else {
// return emit.fail("TODO implement call_extern for linking backends different than MachO and COFF", .{});
// }
// }
if (emit.bin_file.cast(link.File.MachO)) |macho_file| {
// Add relocation to the decl.
const atom_index = macho_file.getAtomIndexForSymbol(.{ .sym_index = relocation.atom_index, .file = null }).?;
const target = macho_file.getGlobalByIndex(relocation.sym_index);
try link.File.MachO.Atom.addRelocation(macho_file, atom_index, .{
.type = @enumToInt(std.macho.reloc_type_x86_64.X86_64_RELOC_BRANCH),
.target = target,
.offset = offset,
.addend = 0,
.pcrel = true,
.length = 2,
});
} else if (emit.bin_file.cast(link.File.Coff)) |coff_file| {
// Add relocation to the decl.
const atom_index = coff_file.getAtomIndexForSymbol(.{ .sym_index = relocation.atom_index, .file = null }).?;
const target = coff_file.getGlobalByIndex(relocation.sym_index);
try link.File.Coff.Atom.addRelocation(coff_file, atom_index, .{
.type = .direct,
.target = target,
.offset = offset,
.addend = 0,
.pcrel = true,
.length = 2,
});
} else {
return emit.fail("TODO implement call_extern for linking backends different than MachO and COFF", .{});
}
}
fn mirPushPopRegisterList(emit: *Emit, tag: Mir.Inst.Tag, inst: Mir.Inst.Index) InnerError!void {
const payload = emit.mir.instructions.items(.data)[inst].payload;
@ -474,194 +485,63 @@ fn mirPushPopRegisterList(emit: *Emit, tag: Mir.Inst.Tag, inst: Mir.Inst.Index)
}
}
// fn mirJmpCall(emit: *Emit, mnemonic: Instruction.Mnemonic, inst: Mir.Inst.Index) InnerError!void {
// const ops = emit.mir.instructions.items(.ops)[inst].decode();
// switch (ops.flags) {
// 0b00 => {
// const target = emit.mir.instructions.items(.data)[inst].inst;
// const source = emit.code.items.len;
// try emit.encode(mnemonic, .{
// .op1 = .{ .imm = Immediate.s(0) },
// });
// try emit.relocs.append(emit.bin_file.allocator, .{
// .source = source,
// .target = target,
// .offset = emit.code.items.len - 4,
// .length = 5,
// });
// },
// 0b01 => {
// if (ops.reg1 == .none) {
// const disp = emit.mir.instructions.items(.data)[inst].disp;
// return emit.encode(mnemonic, .{
// .op1 = .{ .mem = Memory.sib(.qword, .{ .disp = disp }) },
// });
// }
// return emit.encode(mnemonic, .{
// .op1 = .{ .reg = ops.reg1 },
// });
// },
// 0b10 => {
// const disp = emit.mir.instructions.items(.data)[inst].disp;
// return emit.encode(mnemonic, .{
// .op1 = .{ .mem = Memory.sib(.qword, .{
// .base = ops.reg1,
// .disp = disp,
// }) },
// });
// },
// 0b11 => return emit.fail("TODO unused variant jmp/call 0b11", .{}),
// }
// }
fn mirLeaLinker(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
const ops = emit.mir.instructions.items(.ops)[inst];
const payload = emit.mir.instructions.items(.data)[inst].payload;
const metadata = emit.mir.extraData(Mir.LeaRegisterReloc, payload).data;
const reg = @intToEnum(Register, metadata.reg);
// fn mirLea(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
// const tag = emit.mir.instructions.items(.tag)[inst];
// assert(tag == .lea);
// const ops = emit.mir.instructions.items(.ops)[inst].decode();
// switch (ops.flags) {
// 0b00 => {
// const disp = emit.mir.instructions.items(.data)[inst].disp;
// const src_reg: ?Register = if (ops.reg2 != .none) ops.reg2 else null;
// return emit.encode(.lea, .{
// .op1 = .{ .reg = ops.reg1 },
// .op2 = .{ .mem = Memory.sib(Memory.PtrSize.fromBitSize(ops.reg1.bitSize()), .{
// .base = src_reg,
// .disp = disp,
// }) },
// });
// },
// 0b01 => {
// const start_offset = emit.code.items.len;
// try emit.encode(.lea, .{
// .op1 = .{ .reg = ops.reg1 },
// .op2 = .{ .mem = Memory.rip(Memory.PtrSize.fromBitSize(ops.reg1.bitSize()), 0) },
// });
// const end_offset = emit.code.items.len;
// // Backpatch the displacement
// const payload = emit.mir.instructions.items(.data)[inst].payload;
// const imm = emit.mir.extraData(Mir.Imm64, payload).data.decode();
// const disp = @intCast(i32, @intCast(i64, imm) - @intCast(i64, end_offset - start_offset));
// mem.writeIntLittle(i32, emit.code.items[end_offset - 4 ..][0..4], disp);
// },
// 0b10 => {
// const payload = emit.mir.instructions.items(.data)[inst].payload;
// const index_reg_disp = emit.mir.extraData(Mir.IndexRegisterDisp, payload).data.decode();
// const src_reg: ?Register = if (ops.reg2 != .none) ops.reg2 else null;
// const scale_index = Memory.ScaleIndex{
// .scale = 1,
// .index = index_reg_disp.index,
// };
// return emit.encode(.lea, .{
// .op1 = .{ .reg = ops.reg1 },
// .op2 = .{ .mem = Memory.sib(Memory.PtrSize.fromBitSize(ops.reg1.bitSize()), .{
// .base = src_reg,
// .scale_index = scale_index,
// .disp = index_reg_disp.disp,
// }) },
// });
// },
// 0b11 => return emit.fail("TODO unused LEA variant 0b11", .{}),
// }
// }
try emit.encode(.lea, .{
.op1 = .{ .reg = reg },
.op2 = .{ .mem = Memory.rip(Memory.PtrSize.fromBitSize(reg.bitSize()), 0) },
});
// fn mirLeaPic(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
// const tag = emit.mir.instructions.items(.tag)[inst];
// assert(tag == .lea_pic);
// const ops = emit.mir.instructions.items(.ops)[inst].decode();
// const relocation = emit.mir.instructions.items(.data)[inst].relocation;
const end_offset = emit.code.items.len;
// switch (ops.flags) {
// 0b00, 0b01, 0b10 => {},
// else => return emit.fail("TODO unused LEA PIC variant 0b11", .{}),
// }
// try emit.encode(.lea, .{
// .op1 = .{ .reg = ops.reg1 },
// .op2 = .{ .mem = Memory.rip(Memory.PtrSize.fromBitSize(ops.reg1.bitSize()), 0) },
// });
// const end_offset = emit.code.items.len;
// if (emit.bin_file.cast(link.File.MachO)) |macho_file| {
// const reloc_type = switch (ops.flags) {
// 0b00 => @enumToInt(std.macho.reloc_type_x86_64.X86_64_RELOC_GOT),
// 0b01 => @enumToInt(std.macho.reloc_type_x86_64.X86_64_RELOC_SIGNED),
// else => unreachable,
// };
// const atom_index = macho_file.getAtomIndexForSymbol(.{ .sym_index = relocation.atom_index, .file = null }).?;
// try link.File.MachO.Atom.addRelocation(macho_file, atom_index, .{
// .type = reloc_type,
// .target = .{ .sym_index = relocation.sym_index, .file = null },
// .offset = @intCast(u32, end_offset - 4),
// .addend = 0,
// .pcrel = true,
// .length = 2,
// });
// } else if (emit.bin_file.cast(link.File.Coff)) |coff_file| {
// const atom_index = coff_file.getAtomIndexForSymbol(.{ .sym_index = relocation.atom_index, .file = null }).?;
// try link.File.Coff.Atom.addRelocation(coff_file, atom_index, .{
// .type = switch (ops.flags) {
// 0b00 => .got,
// 0b01 => .direct,
// 0b10 => .import,
// else => unreachable,
// },
// .target = switch (ops.flags) {
// 0b00, 0b01 => .{ .sym_index = relocation.sym_index, .file = null },
// 0b10 => coff_file.getGlobalByIndex(relocation.sym_index),
// else => unreachable,
// },
// .offset = @intCast(u32, end_offset - 4),
// .addend = 0,
// .pcrel = true,
// .length = 2,
// });
// } else {
// return emit.fail("TODO implement lea reg, [rip + reloc] for linking backends different than MachO", .{});
// }
// }
// fn mirCallExtern(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
// const tag = emit.mir.instructions.items(.tag)[inst];
// assert(tag == .call_extern);
// const relocation = emit.mir.instructions.items(.data)[inst].relocation;
// const offset = blk: {
// // callq
// try emit.encode(.call, .{
// .op1 = .{ .imm = Immediate.s(0) },
// });
// break :blk @intCast(u32, emit.code.items.len) - 4;
// };
// if (emit.bin_file.cast(link.File.MachO)) |macho_file| {
// // Add relocation to the decl.
// const atom_index = macho_file.getAtomIndexForSymbol(.{ .sym_index = relocation.atom_index, .file = null }).?;
// const target = macho_file.getGlobalByIndex(relocation.sym_index);
// try link.File.MachO.Atom.addRelocation(macho_file, atom_index, .{
// .type = @enumToInt(std.macho.reloc_type_x86_64.X86_64_RELOC_BRANCH),
// .target = target,
// .offset = offset,
// .addend = 0,
// .pcrel = true,
// .length = 2,
// });
// } else if (emit.bin_file.cast(link.File.Coff)) |coff_file| {
// // Add relocation to the decl.
// const atom_index = coff_file.getAtomIndexForSymbol(.{ .sym_index = relocation.atom_index, .file = null }).?;
// const target = coff_file.getGlobalByIndex(relocation.sym_index);
// try link.File.Coff.Atom.addRelocation(coff_file, atom_index, .{
// .type = .direct,
// .target = target,
// .offset = offset,
// .addend = 0,
// .pcrel = true,
// .length = 2,
// });
// } else {
// return emit.fail("TODO implement call_extern for linking backends different than MachO and COFF", .{});
// }
// }
if (emit.bin_file.cast(link.File.MachO)) |macho_file| {
const reloc_type = switch (ops) {
.got_reloc => @enumToInt(std.macho.reloc_type_x86_64.X86_64_RELOC_GOT),
.direct_reloc => @enumToInt(std.macho.reloc_type_x86_64.X86_64_RELOC_SIGNED),
else => unreachable,
};
const atom_index = macho_file.getAtomIndexForSymbol(.{
.sym_index = metadata.atom_index,
.file = null,
}).?;
try link.File.MachO.Atom.addRelocation(macho_file, atom_index, .{
.type = reloc_type,
.target = .{ .sym_index = metadata.sym_index, .file = null },
.offset = @intCast(u32, end_offset - 4),
.addend = 0,
.pcrel = true,
.length = 2,
});
} else if (emit.bin_file.cast(link.File.Coff)) |coff_file| {
const atom_index = coff_file.getAtomIndexForSymbol(.{
.sym_index = metadata.atom_index,
.file = null,
}).?;
try link.File.Coff.Atom.addRelocation(coff_file, atom_index, .{
.type = switch (ops) {
.got_reloc => .got,
.direct_reloc => .direct,
.import_reloc => .import,
else => unreachable,
},
.target = switch (ops) {
.got_reloc, .direct_reloc => .{ .sym_index = metadata.sym_index, .file = null },
.import_reloc => coff_file.getGlobalByIndex(metadata.sym_index),
else => unreachable,
},
.offset = @intCast(u32, end_offset - 4),
.addend = 0,
.pcrel = true,
.length = 2,
});
} else {
return emit.fail("TODO implement lea reg, [rip + reloc] for linking backends different than MachO", .{});
}
}
fn mirDbgLine(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
const payload = emit.mir.instructions.items(.data)[inst].payload;

View File

@ -142,6 +142,13 @@ pub const Inst = struct {
/// Jump with relocation to another local MIR instruction
jmp_reloc,
/// Call to an extern symbol via linker relocation.
/// Uses `relocation` payload.
call_extern,
/// Load effective address of a symbol not yet allocated in VM.
lea_linker,
/// End of prologue
dbg_prologue_end,
/// Start of epilogue
@ -169,6 +176,12 @@ pub const Inst = struct {
/// Register, register, register operands.
/// Uses `rrr` payload.
rrr,
/// Register, register, immediate (sign-extended) operands.
/// Uses `rri_s` payload.
rri_s,
/// Register, register, immediate (unsigned) operands.
/// Uses `rri_u` payload.
rri_u,
/// Register with condition code (CC).
/// Uses `r_c` payload.
r_c,
@ -199,12 +212,6 @@ pub const Inst = struct {
/// Register, memory (RIP) operands.
/// Uses `rx` payload.
rm_rip,
/// Register, memory, immediate (unsigned) operands
/// Uses `rx` payload.
rmi_u,
/// Register, memory, immediate (sign-extended) operands
/// Uses `rx` payload.
rmi_s,
/// Single memory (SIB) operand.
/// Uses `payload` with extra data of type `MemorySib`.
m_sib,
@ -250,6 +257,15 @@ pub const Inst = struct {
rm_cc,
/// Uses `reloc` payload.
reloc,
/// Linker relocation - GOT indirection.
/// Uses `payload` payload with extra data of type `LeaRegisterReloc`.
got_reloc,
/// Linker relocation - direct reference.
/// Uses `payload` payload with extra data of type `LeaRegisterReloc`.
direct_reloc,
/// Linker relocation - imports table indirection (binding).
/// Uses `payload` payload with extra data of type `LeaRegisterReloc`.
import_reloc,
};
pub const Data = union {
@ -279,6 +295,16 @@ pub const Inst = struct {
r2: Register,
r3: Register,
},
rri_s: struct {
r1: Register,
r2: Register,
imm: i32,
},
rri_u: struct {
r1: Register,
r2: Register,
imm: u32,
},
/// Register with condition code (CC).
r_c: struct {
r1: Register,
@ -339,13 +365,7 @@ pub const Inst = struct {
pub const LeaRegisterReloc = struct {
/// Destination register.
reg: Register,
/// Type of the load.
load_type: enum(u2) {
got,
direct,
import,
},
reg: u32,
/// Index of the containing atom.
atom_index: u32,
/// Index into the linker's symbol table.