From cd7cbed651524cdbdddcb69436ce348b1aa7a036 Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Mon, 7 Nov 2022 10:51:52 +0100 Subject: [PATCH 1/9] aarch64: partially implement isNull() --- src/arch/aarch64/CodeGen.zig | 39 +++++++++++++++++++++--------------- 1 file changed, 23 insertions(+), 16 deletions(-) diff --git a/src/arch/aarch64/CodeGen.zig b/src/arch/aarch64/CodeGen.zig index 613bdbd3fd..be83fd5894 100644 --- a/src/arch/aarch64/CodeGen.zig +++ b/src/arch/aarch64/CodeGen.zig @@ -4562,18 +4562,21 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void { return self.finishAir(inst, .unreach, .{ .none, .none, .none }); } -fn isNull(self: *Self, operand: MCValue) !MCValue { - _ = operand; - // Here you can specialize this instruction if it makes sense to, otherwise the default - // will call isNonNull and invert the result. - return self.fail("TODO call isNonNull and invert the result", .{}); +fn isNull(self: *Self, operand_bind: ReadArg.Bind, operand_ty: Type) !MCValue { + if (operand_ty.isPtrLikeOptional()) { + assert(operand_ty.abiSize(self.target.*) == 8); + + const imm_bind: ReadArg.Bind = .{ .mcv = .{ .immediate = 0 } }; + return self.cmp(operand_bind, imm_bind, Type.usize, .eq); + } else { + return self.fail("TODO implement non-pointer optionals", .{}); + } } -fn isNonNull(self: *Self, operand: MCValue) !MCValue { - _ = operand; - // Here you can specialize this instruction if it makes sense to, otherwise the default - // will call isNull and invert the result. - return self.fail("TODO call isNull and invert the result", .{}); +fn isNonNull(self: *Self, operand_bind: ReadArg.Bind, operand_ty: Type) !MCValue { + const is_null_res = try self.isNull(operand_bind, operand_ty); + assert(is_null_res.compare_flags == .eq); + return MCValue{ .compare_flags = is_null_res.compare_flags.negate() }; } fn isErr(self: *Self, ty: Type, operand: MCValue) !MCValue { @@ -4605,8 +4608,10 @@ fn isNonErr(self: *Self, ty: Type, operand: MCValue) !MCValue { fn airIsNull(self: *Self, inst: Air.Inst.Index) !void { const un_op = self.air.instructions.items(.data)[inst].un_op; const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: { - const operand = try self.resolveInst(un_op); - break :result try self.isNull(operand); + const operand_bind: ReadArg.Bind = .{ .inst = un_op }; + const operand_ty = self.air.typeOf(un_op); + + break :result try self.isNull(operand_bind, operand_ty); }; return self.finishAir(inst, result, .{ un_op, .none, .none }); } @@ -4621,7 +4626,7 @@ fn airIsNullPtr(self: *Self, inst: Air.Inst.Index) !void { const operand = try self.allocRegOrMem(elem_ty, true, null); try self.load(operand, operand_ptr, ptr_ty); - break :result try self.isNull(operand); + break :result try self.isNull(.{ .mcv = operand }, elem_ty); }; return self.finishAir(inst, result, .{ un_op, .none, .none }); } @@ -4629,8 +4634,10 @@ fn airIsNullPtr(self: *Self, inst: Air.Inst.Index) !void { fn airIsNonNull(self: *Self, inst: Air.Inst.Index) !void { const un_op = self.air.instructions.items(.data)[inst].un_op; const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: { - const operand = try self.resolveInst(un_op); - break :result try self.isNonNull(operand); + const operand_bind: ReadArg.Bind = .{ .inst = un_op }; + const operand_ty = self.air.typeOf(un_op); + + break :result try self.isNonNull(operand_bind, operand_ty); }; return self.finishAir(inst, result, .{ un_op, .none, .none }); } @@ -4645,7 +4652,7 @@ fn airIsNonNullPtr(self: *Self, inst: Air.Inst.Index) !void { const operand = try self.allocRegOrMem(elem_ty, true, null); try self.load(operand, operand_ptr, ptr_ty); - break :result try self.isNonNull(operand); + break :result try self.isNonNull(.{ .mcv = operand }, elem_ty); }; return self.finishAir(inst, result, .{ un_op, .none, .none }); } From 0de56d1722a50435696d7bdc8900ca4206e12d70 Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Mon, 7 Nov 2022 11:54:06 +0100 Subject: [PATCH 2/9] aarch64: partially implement optionalPayload() --- src/arch/aarch64/CodeGen.zig | 34 +++++++++++++++++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/src/arch/aarch64/CodeGen.zig b/src/arch/aarch64/CodeGen.zig index be83fd5894..d677d0659c 100644 --- a/src/arch/aarch64/CodeGen.zig +++ b/src/arch/aarch64/CodeGen.zig @@ -2882,10 +2882,42 @@ fn airShlSat(self: *Self, inst: Air.Inst.Index) !void { fn airOptionalPayload(self: *Self, inst: Air.Inst.Index) !void { const ty_op = self.air.instructions.items(.data)[inst].ty_op; - const result: MCValue = if (self.liveness.isUnused(inst)) .dead else return self.fail("TODO implement .optional_payload for {}", .{self.target.cpu.arch}); + const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: { + const optional_ty = self.air.typeOf(ty_op.operand); + const mcv = try self.resolveInst(ty_op.operand); + break :result try self.optionalPayload(inst, mcv, optional_ty); + }; return self.finishAir(inst, result, .{ ty_op.operand, .none, .none }); } +fn optionalPayload(self: *Self, inst: Air.Inst.Index, mcv: MCValue, optional_ty: Type) !MCValue { + var opt_buf: Type.Payload.ElemType = undefined; + const payload_ty = optional_ty.optionalChild(&opt_buf); + if (!payload_ty.hasRuntimeBits()) return MCValue.none; + if (optional_ty.isPtrLikeOptional()) { + // TODO should we reuse the operand here? + const raw_reg = try self.register_manager.allocReg(inst, gp); + const reg = self.registerAlias(raw_reg, payload_ty); + try self.genSetReg(payload_ty, reg, mcv); + return MCValue{ .register = reg }; + } + + const offset = @intCast(u32, optional_ty.abiSize(self.target.*) - payload_ty.abiSize(self.target.*)); + switch (mcv) { + .register => return self.fail("TODO optionalPayload for registers", .{}), + .stack_argument_offset => |off| { + return MCValue{ .stack_argument_offset = off + offset }; + }, + .stack_offset => |off| { + return MCValue{ .stack_offset = off - offset }; + }, + .memory => |addr| { + return MCValue{ .memory = addr + offset }; + }, + else => unreachable, // invalid MCValue for an error union + } +} + fn airOptionalPayloadPtr(self: *Self, inst: Air.Inst.Index) !void { const ty_op = self.air.instructions.items(.data)[inst].ty_op; const result: MCValue = if (self.liveness.isUnused(inst)) .dead else return self.fail("TODO implement .optional_payload_ptr for {}", .{self.target.cpu.arch}); From 35bd5363eed17270f1d3f96b8b1579126b12048a Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Mon, 7 Nov 2022 12:05:05 +0100 Subject: [PATCH 3/9] aarch64: implement isNull() for non-pointer optionals --- src/arch/aarch64/CodeGen.zig | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/arch/aarch64/CodeGen.zig b/src/arch/aarch64/CodeGen.zig index d677d0659c..0f34f4bbc6 100644 --- a/src/arch/aarch64/CodeGen.zig +++ b/src/arch/aarch64/CodeGen.zig @@ -4601,7 +4601,12 @@ fn isNull(self: *Self, operand_bind: ReadArg.Bind, operand_ty: Type) !MCValue { const imm_bind: ReadArg.Bind = .{ .mcv = .{ .immediate = 0 } }; return self.cmp(operand_bind, imm_bind, Type.usize, .eq); } else { - return self.fail("TODO implement non-pointer optionals", .{}); + var buf: Type.Payload.ElemType = undefined; + const payload_ty = operand_ty.optionalChild(&buf); + const sentinel_ty = if (payload_ty.hasRuntimeBitsIgnoreComptime()) Type.bool else operand_ty; + + const imm_bind: ReadArg.Bind = .{ .mcv = .{ .immediate = 0 } }; + return self.cmp(operand_bind, imm_bind, sentinel_ty, .eq); } } From a07449450f460dacd3ad4daa480e00c7bc9e9d7f Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Mon, 7 Nov 2022 15:16:54 +0100 Subject: [PATCH 4/9] aarch64: implement optionalPayload when mcv is register --- src/arch/aarch64/CodeGen.zig | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/src/arch/aarch64/CodeGen.zig b/src/arch/aarch64/CodeGen.zig index 0f34f4bbc6..29a01ae8b8 100644 --- a/src/arch/aarch64/CodeGen.zig +++ b/src/arch/aarch64/CodeGen.zig @@ -2904,7 +2904,30 @@ fn optionalPayload(self: *Self, inst: Air.Inst.Index, mcv: MCValue, optional_ty: const offset = @intCast(u32, optional_ty.abiSize(self.target.*) - payload_ty.abiSize(self.target.*)); switch (mcv) { - .register => return self.fail("TODO optionalPayload for registers", .{}), + .register => |source_reg| { + // TODO should we reuse the operand here? + const raw_reg = try self.register_manager.allocReg(inst, gp); + const dest_reg = raw_reg.toX(); + + const shift = @intCast(u6, offset * 8); + if (shift == 0) { + try self.genSetReg(payload_ty, dest_reg, mcv); + } else { + _ = try self.addInst(.{ + .tag = if (payload_ty.isSignedInt()) + Mir.Inst.Tag.asr_immediate + else + Mir.Inst.Tag.lsr_immediate, + .data = .{ .rr_shift = .{ + .rd = dest_reg, + .rn = source_reg, + .shift = shift, + } }, + }); + } + + return MCValue{ .register = dest_reg }; + }, .stack_argument_offset => |off| { return MCValue{ .stack_argument_offset = off + offset }; }, From 0d556877af4d12e1e0d5c0146ab80273e91f211c Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Mon, 7 Nov 2022 15:17:34 +0100 Subject: [PATCH 5/9] aarch64: implement .wrap_optional always saving to the stack --- src/arch/aarch64/CodeGen.zig | 34 +++++++++++++++++++++++++++++----- 1 file changed, 29 insertions(+), 5 deletions(-) diff --git a/src/arch/aarch64/CodeGen.zig b/src/arch/aarch64/CodeGen.zig index 29a01ae8b8..5adf81148b 100644 --- a/src/arch/aarch64/CodeGen.zig +++ b/src/arch/aarch64/CodeGen.zig @@ -3068,13 +3068,37 @@ fn airSaveErrReturnTraceIndex(self: *Self, inst: Air.Inst.Index) !void { fn airWrapOptional(self: *Self, inst: Air.Inst.Index) !void { const ty_op = self.air.instructions.items(.data)[inst].ty_op; const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: { - const optional_ty = self.air.typeOfIndex(inst); - - // Optional with a zero-bit payload type is just a boolean true - if (optional_ty.abiSize(self.target.*) == 1) + const payload_ty = self.air.typeOf(ty_op.operand); + if (!payload_ty.hasRuntimeBits()) { break :result MCValue{ .immediate = 1 }; + } - return self.fail("TODO implement wrap optional for {}", .{self.target.cpu.arch}); + const optional_ty = self.air.typeOfIndex(inst); + const operand = try self.resolveInst(ty_op.operand); + const operand_lock: ?RegisterLock = switch (operand) { + .register => |reg| self.register_manager.lockRegAssumeUnused(reg), + else => null, + }; + defer if (operand_lock) |lock| self.register_manager.unlockReg(lock); + + if (optional_ty.isPtrLikeOptional()) { + // TODO should we check if we can reuse the operand? + const raw_reg = try self.register_manager.allocReg(inst, gp); + const reg = self.registerAlias(raw_reg, payload_ty); + try self.genSetReg(payload_ty, raw_reg, operand); + break :result MCValue{ .register = reg }; + } + + const optional_abi_size = @intCast(u32, optional_ty.abiSize(self.target.*)); + const optional_abi_align = optional_ty.abiAlignment(self.target.*); + const payload_abi_size = @intCast(u32, payload_ty.abiSize(self.target.*)); + const offset = optional_abi_size - payload_abi_size; + + const stack_offset = try self.allocMem(optional_abi_size, optional_abi_align, inst); + try self.genSetStack(Type.bool, stack_offset, .{ .immediate = 1 }); + try self.genSetStack(payload_ty, stack_offset - @intCast(u32, offset), operand); + + break :result MCValue{ .stack_offset = stack_offset }; }; return self.finishAir(inst, result, .{ ty_op.operand, .none, .none }); } From 45f65c84457f3b52209fd4f4595130d6528ebecd Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Mon, 7 Nov 2022 20:50:00 +0100 Subject: [PATCH 6/9] aarch64: fix implementation of .is_null and .is_non_null --- src/arch/aarch64/CodeGen.zig | 40 +++++++++++++++--------------------- 1 file changed, 16 insertions(+), 24 deletions(-) diff --git a/src/arch/aarch64/CodeGen.zig b/src/arch/aarch64/CodeGen.zig index 5adf81148b..7a8d6d3796 100644 --- a/src/arch/aarch64/CodeGen.zig +++ b/src/arch/aarch64/CodeGen.zig @@ -1548,7 +1548,7 @@ fn allocRegs( }; const raw_reg = try self.register_manager.allocReg(track_inst, gp); arg.reg.* = self.registerAlias(raw_reg, arg.ty); - read_locks[i] = self.register_manager.lockReg(arg.reg.*); + read_locks[i] = self.register_manager.lockRegAssumeUnused(arg.reg.*); } } @@ -2920,13 +2920,13 @@ fn optionalPayload(self: *Self, inst: Air.Inst.Index, mcv: MCValue, optional_ty: Mir.Inst.Tag.lsr_immediate, .data = .{ .rr_shift = .{ .rd = dest_reg, - .rn = source_reg, + .rn = source_reg.toX(), .shift = shift, } }, }); } - return MCValue{ .register = dest_reg }; + return MCValue{ .register = self.registerAlias(dest_reg, payload_ty) }; }, .stack_argument_offset => |off| { return MCValue{ .stack_argument_offset = off + offset }; @@ -4215,18 +4215,16 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions. const result: MCValue = result: { switch (info.return_value) { - .register => |reg| { - if (RegisterManager.indexOfReg(&callee_preserved_regs, reg) == null) { - // Save function return value in a callee saved register - break :result try self.copyToNewRegister(inst, info.return_value); - } + .register => { + // Save function return value in a callee saved register + break :result try self.copyToNewRegister(inst, info.return_value); }, else => {}, } break :result info.return_value; }; - if (args.len + 1 <= Liveness.bpi - 1) { + if (args.len <= Liveness.bpi - 2) { var buf = [1]Air.Inst.Ref{.none} ** (Liveness.bpi - 1); buf[0] = callee; std.mem.copy(Air.Inst.Ref, buf[1..], args); @@ -4642,19 +4640,13 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void { } fn isNull(self: *Self, operand_bind: ReadArg.Bind, operand_ty: Type) !MCValue { - if (operand_ty.isPtrLikeOptional()) { - assert(operand_ty.abiSize(self.target.*) == 8); - - const imm_bind: ReadArg.Bind = .{ .mcv = .{ .immediate = 0 } }; - return self.cmp(operand_bind, imm_bind, Type.usize, .eq); - } else { + const sentinel_ty: Type = if (!operand_ty.isPtrLikeOptional()) blk: { var buf: Type.Payload.ElemType = undefined; const payload_ty = operand_ty.optionalChild(&buf); - const sentinel_ty = if (payload_ty.hasRuntimeBitsIgnoreComptime()) Type.bool else operand_ty; - - const imm_bind: ReadArg.Bind = .{ .mcv = .{ .immediate = 0 } }; - return self.cmp(operand_bind, imm_bind, sentinel_ty, .eq); - } + break :blk if (payload_ty.hasRuntimeBitsIgnoreComptime()) Type.bool else operand_ty; + } else operand_ty; + const imm_bind: ReadArg.Bind = .{ .mcv = .{ .immediate = 0 } }; + return self.cmp(operand_bind, imm_bind, sentinel_ty, .eq); } fn isNonNull(self: *Self, operand_bind: ReadArg.Bind, operand_ty: Type) !MCValue { @@ -4692,10 +4684,10 @@ fn isNonErr(self: *Self, ty: Type, operand: MCValue) !MCValue { fn airIsNull(self: *Self, inst: Air.Inst.Index) !void { const un_op = self.air.instructions.items(.data)[inst].un_op; const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: { - const operand_bind: ReadArg.Bind = .{ .inst = un_op }; + const operand = try self.resolveInst(un_op); const operand_ty = self.air.typeOf(un_op); - break :result try self.isNull(operand_bind, operand_ty); + break :result try self.isNull(.{ .mcv = operand }, operand_ty); }; return self.finishAir(inst, result, .{ un_op, .none, .none }); } @@ -4718,10 +4710,10 @@ fn airIsNullPtr(self: *Self, inst: Air.Inst.Index) !void { fn airIsNonNull(self: *Self, inst: Air.Inst.Index) !void { const un_op = self.air.instructions.items(.data)[inst].un_op; const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: { - const operand_bind: ReadArg.Bind = .{ .inst = un_op }; + const operand = try self.resolveInst(un_op); const operand_ty = self.air.typeOf(un_op); - break :result try self.isNonNull(operand_bind, operand_ty); + break :result try self.isNonNull(.{ .mcv = operand }, operand_ty); }; return self.finishAir(inst, result, .{ un_op, .none, .none }); } From 32ad218f5aa389d5827af6367d180efe2f5bba38 Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Tue, 8 Nov 2022 13:50:30 +0100 Subject: [PATCH 7/9] aarch64: revert changes to .call --- src/arch/aarch64/CodeGen.zig | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/arch/aarch64/CodeGen.zig b/src/arch/aarch64/CodeGen.zig index 7a8d6d3796..532409c279 100644 --- a/src/arch/aarch64/CodeGen.zig +++ b/src/arch/aarch64/CodeGen.zig @@ -4215,16 +4215,18 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions. const result: MCValue = result: { switch (info.return_value) { - .register => { - // Save function return value in a callee saved register - break :result try self.copyToNewRegister(inst, info.return_value); + .register => |reg| { + if (RegisterManager.indexOfReg(&callee_preserved_regs, reg) == null) { + // Save function return value in a callee saved register + break :result try self.copyToNewRegister(inst, info.return_value); + } }, else => {}, } break :result info.return_value; }; - if (args.len <= Liveness.bpi - 2) { + if (args.len + 1 <= Liveness.bpi - 1) { var buf = [1]Air.Inst.Ref{.none} ** (Liveness.bpi - 1); buf[0] = callee; std.mem.copy(Air.Inst.Ref, buf[1..], args); From 179f16904f47a0455c656a49bd7dd5ee8ceb54fe Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Tue, 8 Nov 2022 13:59:06 +0100 Subject: [PATCH 8/9] aarch64: circumvent zig0 inference problems --- src/arch/aarch64/CodeGen.zig | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/arch/aarch64/CodeGen.zig b/src/arch/aarch64/CodeGen.zig index 532409c279..3bea24f57a 100644 --- a/src/arch/aarch64/CodeGen.zig +++ b/src/arch/aarch64/CodeGen.zig @@ -3067,7 +3067,12 @@ fn airSaveErrReturnTraceIndex(self: *Self, inst: Air.Inst.Index) !void { fn airWrapOptional(self: *Self, inst: Air.Inst.Index) !void { const ty_op = self.air.instructions.items(.data)[inst].ty_op; - const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: { + + if (self.liveness.isUnused(inst)) { + return self.finishAir(inst, .dead, .{ ty_op.operand, .none, .none }); + } + + const result: MCValue = result: { const payload_ty = self.air.typeOf(ty_op.operand); if (!payload_ty.hasRuntimeBits()) { break :result MCValue{ .immediate = 1 }; @@ -3100,6 +3105,7 @@ fn airWrapOptional(self: *Self, inst: Air.Inst.Index) !void { break :result MCValue{ .stack_offset = stack_offset }; }; + return self.finishAir(inst, result, .{ ty_op.operand, .none, .none }); } From e83590d0e887e3b856e2e638c608fa821c1efa2e Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Tue, 8 Nov 2022 13:59:47 +0100 Subject: [PATCH 9/9] aarch64: pass some tests dealing with optionals --- test/behavior/optional.zig | 7 ------- 1 file changed, 7 deletions(-) diff --git a/test/behavior/optional.zig b/test/behavior/optional.zig index d04e7fdb62..5bc79ed31c 100644 --- a/test/behavior/optional.zig +++ b/test/behavior/optional.zig @@ -6,7 +6,6 @@ const expectEqual = testing.expectEqual; const expectEqualStrings = std.testing.expectEqualStrings; test "passing an optional integer as a parameter" { - if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO const S = struct { @@ -26,8 +25,6 @@ test "passing an optional integer as a parameter" { pub const EmptyStruct = struct {}; test "optional pointer to size zero struct" { - if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; - var e = EmptyStruct{}; var o: ?*EmptyStruct = &e; try expect(o != null); @@ -142,7 +139,6 @@ fn test_cmp_optional_non_optional() !void { } test "unwrap function call with optional pointer return value" { - if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO const S = struct { @@ -163,7 +159,6 @@ test "unwrap function call with optional pointer return value" { } test "nested orelse" { - if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO const S = struct { @@ -189,7 +184,6 @@ test "nested orelse" { } test "self-referential struct through a slice of optional" { - if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO @@ -270,7 +264,6 @@ test "0-bit child type coerced to optional return ptr result location" { test "0-bit child type coerced to optional" { if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO - if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO const S = struct { fn doTheTest() !void {