x86_64: implement @memmove

This commit is contained in:
Jacob Young 2025-05-12 09:13:20 -04:00
parent 6d68a494c8
commit 025611629f
3 changed files with 107 additions and 12 deletions

View File

@ -101399,8 +101399,66 @@ fn genBody(cg: *CodeGen, body: []const Air.Inst.Index) InnerError!void {
.memset => try cg.airMemset(inst, false),
.memset_safe => try cg.airMemset(inst, true),
.memcpy => try cg.airMemcpy(inst),
.memmove => try cg.airMemmove(inst),
.memcpy, .memmove => |air_tag| if (use_old) switch (air_tag) {
else => unreachable,
.memcpy => try cg.airMemcpy(inst),
.memmove => return cg.fail("TODO implement airMemmove for {}", .{cg.target.cpu.arch}),
} else {
const bin_op = air_datas[@intFromEnum(inst)].bin_op;
var ops = try cg.tempsFromOperands(inst, .{ bin_op.lhs, bin_op.rhs }) ++ .{undefined};
ops[2] = ops[0].getByteLen(cg) catch |err| switch (err) {
error.SelectFailed => return cg.fail("failed to select {s} {} {} {} {}", .{
@tagName(air_tag),
cg.typeOf(bin_op.lhs).fmt(pt),
cg.typeOf(bin_op.rhs).fmt(pt),
ops[0].tracking(cg),
ops[1].tracking(cg),
}),
else => |e| return e,
};
try ops[0].toSlicePtr(cg);
cg.select(&.{}, &.{}, &ops, switch (air_tag) {
else => unreachable,
inline .memcpy, .memmove => |symbol| comptime &.{.{
.patterns = &.{
.{ .src = .{
.{ .to_param_gpr = .{ .cc = .ccc, .index = 0 } },
.{ .to_param_gpr = .{ .cc = .ccc, .index = 1 } },
.{ .to_param_gpr = .{ .cc = .ccc, .index = 2 } },
} },
},
.call_frame = .{ .alignment = .@"16" },
.extra_temps = .{
.{ .type = .usize, .kind = .{ .symbol = &.{ .name = @tagName(symbol) } } },
.unused,
.unused,
.unused,
.unused,
.unused,
.unused,
.unused,
.unused,
.unused,
.unused,
},
.clobbers = .{ .eflags = true, .caller_preserved = .ccc },
.each = .{ .once = &.{
.{ ._, ._, .call, .tmp0d, ._, ._, ._ },
} },
}},
}) catch |err| switch (err) {
error.SelectFailed => return cg.fail("failed to select {s} {} {} {} {} {}", .{
@tagName(air_tag),
cg.typeOf(bin_op.lhs).fmt(pt),
cg.typeOf(bin_op.rhs).fmt(pt),
ops[0].tracking(cg),
ops[1].tracking(cg),
ops[2].tracking(cg),
}),
else => |e| return e,
};
for (ops) |op| try op.die(cg);
},
.cmpxchg_weak, .cmpxchg_strong => try cg.airCmpxchg(inst),
.atomic_load => try cg.airAtomicLoad(inst),
.atomic_store_unordered => try cg.airAtomicStore(inst, .unordered),
@ -118458,11 +118516,6 @@ fn airMemcpy(self: *CodeGen, inst: Air.Inst.Index) !void {
return self.finishAir(inst, .unreach, .{ bin_op.lhs, bin_op.rhs, .none });
}
fn airMemmove(self: *CodeGen, inst: Air.Inst.Index) !void {
_ = inst;
return self.fail("TODO implement airMemmove for {}", .{self.target.cpu.arch});
}
fn airTagName(self: *CodeGen, inst: Air.Inst.Index, only_safety: bool) !void {
const pt = self.pt;
const zcu = pt.zcu;
@ -122137,6 +122190,49 @@ const Temp = struct {
return .{ .index = new_temp_index.toIndex() };
}
fn getByteLen(temp: *Temp, cg: *CodeGen) Select.Error!Temp {
const zcu = cg.pt.zcu;
const ip = &zcu.intern_pool;
const ptr_info = ip.indexToKey(temp.typeOf(cg).toIntern()).ptr_type;
switch (ptr_info.flags.size) {
.one => {
const array_info = ip.indexToKey(ptr_info.child).array_type;
return cg.tempInit(.usize, .{
.immediate = Type.fromInterned(array_info.child).abiSize(zcu) * array_info.len,
});
},
.many, .c => unreachable,
.slice => {
const elem_size = Type.fromInterned(ptr_info.child).abiSize(zcu);
var len = try temp.getLimb(.usize, 1, cg);
while (try len.toRegClass(true, .general_purpose, cg)) {}
const len_reg = len.tracking(cg).short.register.to64();
if (!std.math.isPowerOfTwo(elem_size)) {
try cg.spillEflagsIfOccupied();
try cg.asmRegisterRegisterImmediate(
.{ .i_, .mul },
len_reg,
len_reg,
.u(elem_size),
);
} else if (elem_size > 8) {
try cg.spillEflagsIfOccupied();
try cg.asmRegisterImmediate(
.{ ._l, .sh },
len_reg,
.u(std.math.log2_int(u64, elem_size)),
);
} else if (elem_size != 1) try cg.asmRegisterMemory(.{ ._, .lea }, len_reg, .{
.mod = .{ .rm = .{
.index = len_reg,
.scale = .fromFactor(@intCast(elem_size)),
} },
});
return len;
},
}
}
fn toLimb(temp: *Temp, limb_ty: Type, limb_index: u28, cg: *CodeGen) InnerError!void {
switch (temp.unwrap(cg)) {
.ref => {},

View File

@ -6,7 +6,6 @@ var x: u8 = 1;
// This excludes builtin functions that return void or noreturn that cannot be tested.
test {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO

View File

@ -3,13 +3,13 @@ const builtin = @import("builtin");
const expect = std.testing.expect;
test "memmove and memset intrinsics" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
try testMemmoveMemset();
try comptime testMemmoveMemset();
@ -33,13 +33,13 @@ fn testMemmoveMemset() !void {
}
test "@memmove with both operands single-ptr-to-array, one is null-terminated" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
try testMemmoveBothSinglePtrArrayOneIsNullTerminated();
try comptime testMemmoveBothSinglePtrArrayOneIsNullTerminated();
@ -79,13 +79,13 @@ fn testMemmoveBothSinglePtrArrayOneIsNullTerminated() !void {
}
test "@memmove dest many pointer" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
try testMemmoveDestManyPtr();
try comptime testMemmoveDestManyPtr();
@ -123,13 +123,13 @@ fn testMemmoveDestManyPtr() !void {
}
test "@memmove slice" {
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
try testMemmoveSlice();
try comptime testMemmoveSlice();