stage2 AArch64: introduce MCValue.condition_flags

Follows 9747303d16dfca61316a292d1e05ac901191e3a3 for AArch64
This commit is contained in:
joachimschmidt557 2022-06-07 11:08:45 +02:00 committed by Andrew Kelley
parent 70dc910086
commit 61844b6bd4

View File

@ -35,6 +35,7 @@ const RegisterManager = abi.RegisterManager;
const RegisterLock = RegisterManager.RegisterLock;
const Register = bits.Register;
const Instruction = bits.Instruction;
const Condition = bits.Instruction.Condition;
const callee_preserved_regs = abi.callee_preserved_regs;
const c_abi_int_param_regs = abi.c_abi_int_param_regs;
const c_abi_int_return_regs = abi.c_abi_int_return_regs;
@ -90,7 +91,7 @@ register_manager: RegisterManager = .{},
/// Maps offset to what is stored there.
stack: std.AutoHashMapUnmanaged(u32, StackAllocation) = .{},
/// Tracks the current instruction allocated to the compare flags
compare_flags_inst: ?Air.Inst.Index = null,
condition_flags_inst: ?Air.Inst.Index = null,
/// Offset from the stack base, representing the end of the stack frame.
max_end_stack: u32 = 0,
@ -161,12 +162,10 @@ const MCValue = union(enum) {
/// The value is a pointer to one of the stack variables (payload
/// is stack offset).
ptr_stack_offset: u32,
/// The value is in the compare flags assuming an unsigned
/// operation, with this operator applied on top of it.
compare_flags_unsigned: math.CompareOperator,
/// The value is in the compare flags assuming a signed operation,
/// with this operator applied on top of it.
compare_flags_signed: math.CompareOperator,
/// The value resides in the N, Z, C, V flags. The value is 1 (if
/// the type is u1) or true (if the type in bool) iff the
/// specified condition is true.
condition_flags: Condition,
fn isMemory(mcv: MCValue) bool {
return switch (mcv) {
@ -190,8 +189,7 @@ const MCValue = union(enum) {
.immediate,
.memory,
.compare_flags_unsigned,
.compare_flags_signed,
.condition_flags,
.ptr_stack_offset,
.undef,
=> false,
@ -758,10 +756,10 @@ fn processDeath(self: *Self, inst: Air.Inst.Index) void {
},
.register_with_overflow => |rwo| {
self.register_manager.freeReg(rwo.reg);
self.compare_flags_inst = null;
self.condition_flags_inst = null;
},
.compare_flags_signed, .compare_flags_unsigned => {
self.compare_flags_inst = null;
.condition_flags => {
self.condition_flags_inst = null;
},
else => {}, // TODO process stack allocation death
}
@ -911,12 +909,10 @@ pub fn spillInstruction(self: *Self, reg: Register, inst: Air.Inst.Index) !void
/// Save the current instruction stored in the compare flags if
/// occupied
fn spillCompareFlagsIfOccupied(self: *Self) !void {
if (self.compare_flags_inst) |inst_to_save| {
if (self.condition_flags_inst) |inst_to_save| {
const mcv = self.getResolvedInstValue(inst_to_save);
const new_mcv = switch (mcv) {
.compare_flags_signed,
.compare_flags_unsigned,
=> try self.allocRegOrMem(inst_to_save, true),
.condition_flags => try self.allocRegOrMem(inst_to_save, true),
.register_with_overflow => try self.allocRegOrMem(inst_to_save, false),
else => unreachable, // mcv doesn't occupy the compare flags
};
@ -927,7 +923,7 @@ fn spillCompareFlagsIfOccupied(self: *Self) !void {
const branch = &self.branch_stack.items[self.branch_stack.items.len - 1];
try branch.inst_table.put(self.gpa, inst_to_save, new_mcv);
self.compare_flags_inst = null;
self.condition_flags_inst = null;
// TODO consolidate with register manager and spillInstruction
// this call should really belong in the register manager!
@ -1109,32 +1105,7 @@ fn airNot(self: *Self, inst: Air.Inst.Index) !void {
switch (operand) {
.dead => unreachable,
.unreach => unreachable,
.compare_flags_unsigned => |op| {
const r = MCValue{
.compare_flags_unsigned = switch (op) {
.gte => .lt,
.gt => .lte,
.neq => .eq,
.lt => .gte,
.lte => .gt,
.eq => .neq,
},
};
break :result r;
},
.compare_flags_signed => |op| {
const r = MCValue{
.compare_flags_signed = switch (op) {
.gte => .lt,
.gt => .lte,
.neq => .eq,
.lt => .gte,
.lte => .gt,
.eq => .neq,
},
};
break :result r;
},
.condition_flags => |cond| break :result MCValue{ .condition_flags = cond.negate() },
else => {
switch (operand_ty.zigTypeTag()) {
.Bool => {
@ -1851,7 +1822,7 @@ fn airOverflow(self: *Self, inst: Air.Inst.Index) !void {
const stack_offset = try self.allocMem(inst, tuple_size, tuple_align);
try self.spillCompareFlagsIfOccupied();
self.compare_flags_inst = null;
self.condition_flags_inst = null;
const base_tag: Air.Inst.Tag = switch (tag) {
.add_with_overflow => .add,
@ -1875,7 +1846,7 @@ fn airOverflow(self: *Self, inst: Air.Inst.Index) !void {
_ = try self.binOp(.cmp_eq, dest, .{ .register = truncated_reg }, Type.usize, Type.usize, null);
try self.genSetStack(lhs_ty, stack_offset, .{ .register = truncated_reg });
try self.genSetStack(Type.initTag(.u1), stack_offset - overflow_bit_offset, .{ .compare_flags_unsigned = .neq });
try self.genSetStack(Type.initTag(.u1), stack_offset - overflow_bit_offset, .{ .condition_flags = .ne });
break :result MCValue{ .stack_offset = stack_offset };
},
@ -1907,7 +1878,7 @@ fn airOverflow(self: *Self, inst: Air.Inst.Index) !void {
};
try self.spillCompareFlagsIfOccupied();
self.compare_flags_inst = inst;
self.condition_flags_inst = inst;
const dest = blk: {
if (rhs_immediate_ok) {
@ -1966,7 +1937,7 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
const stack_offset = try self.allocMem(inst, tuple_size, tuple_align);
try self.spillCompareFlagsIfOccupied();
self.compare_flags_inst = null;
self.condition_flags_inst = null;
const base_tag: Mir.Inst.Tag = switch (int_info.signedness) {
.signed => .smull,
@ -2015,16 +1986,14 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
}
try self.genSetStack(lhs_ty, stack_offset, .{ .register = truncated_reg });
try self.genSetStack(Type.initTag(.u1), stack_offset - overflow_bit_offset, .{
.compare_flags_unsigned = .neq,
});
try self.genSetStack(Type.initTag(.u1), stack_offset - overflow_bit_offset, .{ .condition_flags = .ne });
break :result MCValue{ .stack_offset = stack_offset };
} else if (int_info.bits <= 64) {
const stack_offset = try self.allocMem(inst, tuple_size, tuple_align);
try self.spillCompareFlagsIfOccupied();
self.compare_flags_inst = null;
self.condition_flags_inst = null;
// TODO this should really be put in a helper similar to `binOpRegister`
const lhs_is_register = lhs == .register;
@ -2194,9 +2163,7 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
try self.truncRegister(dest_reg, truncated_reg, int_info.signedness, int_info.bits);
try self.genSetStack(lhs_ty, stack_offset, .{ .register = truncated_reg });
try self.genSetStack(Type.initTag(.u1), stack_offset - overflow_bit_offset, .{
.compare_flags_unsigned = .neq,
});
try self.genSetStack(Type.initTag(.u1), stack_offset - overflow_bit_offset, .{ .condition_flags = .ne });
break :result MCValue{ .stack_offset = stack_offset };
} else return self.fail("TODO implement mul_with_overflow for integers > u64/i64", .{});
@ -2236,7 +2203,7 @@ fn airShlWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
defer if (lhs_lock) |reg| self.register_manager.unlockReg(reg);
try self.spillCompareFlagsIfOccupied();
self.compare_flags_inst = null;
self.condition_flags_inst = null;
// lsl dest, lhs, rhs
const dest = try self.binOp(.shl, lhs, rhs, lhs_ty, rhs_ty, null);
@ -2251,9 +2218,7 @@ fn airShlWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
_ = try self.binOp(.cmp_eq, lhs, reconstructed, lhs_ty, lhs_ty, null);
try self.genSetStack(lhs_ty, stack_offset, dest);
try self.genSetStack(Type.initTag(.u1), stack_offset - overflow_bit_offset, .{
.compare_flags_unsigned = .neq,
});
try self.genSetStack(Type.initTag(.u1), stack_offset - overflow_bit_offset, .{ .condition_flags = .ne });
break :result MCValue{ .stack_offset = stack_offset };
} else {
@ -2681,8 +2646,7 @@ fn load(self: *Self, dst_mcv: MCValue, ptr: MCValue, ptr_ty: Type) InnerError!vo
.undef => unreachable,
.unreach => unreachable,
.dead => unreachable,
.compare_flags_unsigned,
.compare_flags_signed,
.condition_flags,
.register_with_overflow,
=> unreachable, // cannot hold an address
.immediate => |imm| try self.setRegOrMem(elem_ty, dst_mcv, .{ .memory = imm }),
@ -2694,7 +2658,7 @@ fn load(self: *Self, dst_mcv: MCValue, ptr: MCValue, ptr_ty: Type) InnerError!vo
switch (dst_mcv) {
.dead => unreachable,
.undef => unreachable,
.compare_flags_signed, .compare_flags_unsigned => unreachable,
.condition_flags => unreachable,
.register => |dst_reg| {
try self.genLdrRegister(dst_reg, addr_reg, elem_ty);
},
@ -2903,8 +2867,7 @@ fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type
.undef => unreachable,
.unreach => unreachable,
.dead => unreachable,
.compare_flags_unsigned,
.compare_flags_signed,
.condition_flags,
.register_with_overflow,
=> unreachable, // cannot hold an address
.immediate => |imm| {
@ -3370,11 +3333,11 @@ fn airCmp(self: *Self, inst: Air.Inst.Index, op: math.CompareOperator) !void {
});
try self.spillCompareFlagsIfOccupied();
self.compare_flags_inst = inst;
self.condition_flags_inst = inst;
break :result switch (int_info.signedness) {
.signed => MCValue{ .compare_flags_signed = op },
.unsigned => MCValue{ .compare_flags_unsigned = op },
.signed => MCValue{ .condition_flags = Condition.fromCompareOperatorSigned(op) },
.unsigned => MCValue{ .condition_flags = Condition.fromCompareOperatorUnsigned(op) },
};
} else {
return self.fail("TODO AArch64 cmp for ints > 64 bits", .{});
@ -3434,26 +3397,13 @@ fn airDbgVar(self: *Self, inst: Air.Inst.Index) !void {
fn condBr(self: *Self, condition: MCValue) !Mir.Inst.Index {
switch (condition) {
.compare_flags_signed,
.compare_flags_unsigned,
=> return try self.addInst(.{
.condition_flags => |cond| return try self.addInst(.{
.tag = .b_cond,
.data = .{
.inst_cond = .{
.inst = undefined, // populated later through performReloc
.cond = switch (condition) {
.compare_flags_signed => |cmp_op| blk: {
// Here we map to the opposite condition because the jump is to the false branch.
const condition_code = Instruction.Condition.fromCompareOperatorSigned(cmp_op);
break :blk condition_code.negate();
},
.compare_flags_unsigned => |cmp_op| blk: {
// Here we map to the opposite condition because the jump is to the false branch.
const condition_code = Instruction.Condition.fromCompareOperatorUnsigned(cmp_op);
break :blk condition_code.negate();
},
else => unreachable,
},
// Here we map to the opposite condition because the jump is to the false branch.
.cond = cond.negate(),
},
},
}),
@ -3503,7 +3453,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
var parent_stack = try self.stack.clone(self.gpa);
defer parent_stack.deinit(self.gpa);
const parent_registers = self.register_manager.registers;
const parent_compare_flags_inst = self.compare_flags_inst;
const parent_condition_flags_inst = self.condition_flags_inst;
try self.branch_stack.append(.{});
errdefer {
@ -3522,7 +3472,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
defer saved_then_branch.deinit(self.gpa);
self.register_manager.registers = parent_registers;
self.compare_flags_inst = parent_compare_flags_inst;
self.condition_flags_inst = parent_condition_flags_inst;
self.stack.deinit(self.gpa);
self.stack = parent_stack;
@ -3672,15 +3622,15 @@ fn isErr(self: *Self, ty: Type, operand: MCValue) !MCValue {
else => return self.fail("TODO implement isErr for {}", .{operand}),
}
return MCValue{ .compare_flags_unsigned = .gt };
return MCValue{ .condition_flags = .hi };
}
fn isNonErr(self: *Self, ty: Type, operand: MCValue) !MCValue {
const is_err_result = try self.isErr(ty, operand);
switch (is_err_result) {
.compare_flags_unsigned => |op| {
assert(op == .gt);
return MCValue{ .compare_flags_unsigned = .lte };
.condition_flags => |cond| {
assert(cond == .hi);
return MCValue{ .condition_flags = cond.negate() };
},
.immediate => |imm| {
assert(imm == 0);
@ -3889,7 +3839,7 @@ fn br(self: *Self, block: Air.Inst.Index, operand: Air.Inst.Ref) !void {
block_data.mcv = switch (operand_mcv) {
.none, .dead, .unreach => unreachable,
.register, .stack_offset, .memory => operand_mcv,
.immediate => blk: {
.immediate, .condition_flags => blk: {
const new_mcv = try self.allocRegOrMem(block, true);
try self.setRegOrMem(self.air.typeOfIndex(block), new_mcv, operand_mcv);
break :blk new_mcv;
@ -4072,8 +4022,7 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) InnerErro
else => return self.fail("TODO implement memset", .{}),
}
},
.compare_flags_unsigned,
.compare_flags_signed,
.condition_flags,
.immediate,
.ptr_stack_offset,
=> {
@ -4235,15 +4184,7 @@ fn genSetReg(self: *Self, ty: Type, reg: Register, mcv: MCValue) InnerError!void
} },
});
},
.compare_flags_unsigned,
.compare_flags_signed,
=> |op| {
const condition = switch (mcv) {
.compare_flags_unsigned => Instruction.Condition.fromCompareOperatorUnsigned(op),
.compare_flags_signed => Instruction.Condition.fromCompareOperatorSigned(op),
else => unreachable,
};
.condition_flags => |condition| {
_ = try self.addInst(.{
.tag = .cset,
.data = .{ .r_cond = .{