mirror of
https://github.com/ziglang/zig.git
synced 2026-02-14 21:38:33 +00:00
x86_64: implement struct_field_ptr for packed containers
This commit is contained in:
parent
d9ce69dc39
commit
0cfc0d0d13
@ -3659,34 +3659,37 @@ fn airStore(self: *Self, inst: Air.Inst.Index) !void {
|
||||
fn airStructFieldPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
|
||||
const extra = self.air.extraData(Air.StructField, ty_pl.payload).data;
|
||||
const result = try self.structFieldPtr(inst, extra.struct_operand, extra.field_index);
|
||||
const result = try self.fieldPtr(inst, extra.struct_operand, extra.field_index);
|
||||
return self.finishAir(inst, result, .{ extra.struct_operand, .none, .none });
|
||||
}
|
||||
|
||||
fn airStructFieldPtrIndex(self: *Self, inst: Air.Inst.Index, index: u8) !void {
|
||||
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
|
||||
const result = try self.structFieldPtr(inst, ty_op.operand, index);
|
||||
const result = try self.fieldPtr(inst, ty_op.operand, index);
|
||||
return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
|
||||
}
|
||||
|
||||
fn structFieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, index: u32) !MCValue {
|
||||
fn fieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, index: u32) !MCValue {
|
||||
if (self.liveness.isUnused(inst)) {
|
||||
return MCValue.dead;
|
||||
}
|
||||
|
||||
const mcv = try self.resolveInst(operand);
|
||||
const ptr_ty = self.air.typeOf(operand);
|
||||
const struct_ty = ptr_ty.childType();
|
||||
if (struct_ty.zigTypeTag() == .Struct and struct_ty.containerLayout() == .Packed) {
|
||||
return self.fail("TODO structFieldPtr implement packed structs", .{});
|
||||
}
|
||||
const struct_field_offset = @intCast(u32, struct_ty.structFieldOffset(index, self.target.*));
|
||||
const container_ty = ptr_ty.childType();
|
||||
const field_offset = switch (container_ty.containerLayout()) {
|
||||
.Auto, .Extern => @intCast(u32, container_ty.structFieldOffset(index, self.target.*)),
|
||||
.Packed => if (container_ty.zigTypeTag() == .Struct and ptr_ty.ptrInfo().data.host_size == 0)
|
||||
container_ty.packedStructFieldByteOffset(index, self.target.*)
|
||||
else
|
||||
0,
|
||||
};
|
||||
|
||||
const dst_mcv: MCValue = result: {
|
||||
switch (mcv) {
|
||||
.stack_offset => {
|
||||
const offset_reg = try self.copyToTmpRegister(ptr_ty, .{
|
||||
.immediate = struct_field_offset,
|
||||
.immediate = field_offset,
|
||||
});
|
||||
const offset_reg_lock = self.register_manager.lockRegAssumeUnused(offset_reg);
|
||||
defer self.register_manager.unlockReg(offset_reg_lock);
|
||||
@ -3696,7 +3699,7 @@ fn structFieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, inde
|
||||
break :result dst_mcv;
|
||||
},
|
||||
.ptr_stack_offset => |off| {
|
||||
const ptr_stack_offset = off - @intCast(i32, struct_field_offset);
|
||||
const ptr_stack_offset = off - @intCast(i32, field_offset);
|
||||
break :result MCValue{ .ptr_stack_offset = ptr_stack_offset };
|
||||
},
|
||||
.register => |reg| {
|
||||
@ -3704,7 +3707,7 @@ fn structFieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, inde
|
||||
defer self.register_manager.unlockReg(reg_lock);
|
||||
|
||||
const offset_reg = try self.copyToTmpRegister(ptr_ty, .{
|
||||
.immediate = struct_field_offset,
|
||||
.immediate = field_offset,
|
||||
});
|
||||
const offset_reg_lock = self.register_manager.lockRegAssumeUnused(offset_reg);
|
||||
defer self.register_manager.unlockReg(offset_reg_lock);
|
||||
@ -3725,7 +3728,7 @@ fn structFieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, inde
|
||||
try self.genBinOpMir(.add, ptr_ty, .{ .register = result_reg }, .{ .register = offset_reg });
|
||||
break :result MCValue{ .register = result_reg };
|
||||
},
|
||||
else => return self.fail("TODO implement codegen struct_field_ptr for {}", .{mcv}),
|
||||
else => return self.fail("TODO implement fieldPtr for {}", .{mcv}),
|
||||
}
|
||||
};
|
||||
return dst_mcv;
|
||||
|
||||
@ -10,7 +10,6 @@ var buffer: [256]u8 = undefined;
|
||||
|
||||
test {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -14,7 +14,6 @@ fn bar(pointer: ?*anyopaque) void {
|
||||
test "fixed" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
bar(t);
|
||||
|
||||
@ -4,7 +4,6 @@ const builtin = @import("builtin");
|
||||
test "@ptrCast from const to nullable" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
const c: u8 = 4;
|
||||
@ -15,7 +14,6 @@ test "@ptrCast from const to nullable" {
|
||||
test "@ptrCast from var in empty struct to nullable" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
const container = struct {
|
||||
|
||||
@ -96,7 +96,6 @@ test "discard the result of a function that returns a struct" {
|
||||
}
|
||||
|
||||
test "inline function call that calls optional function pointer, return pointer at callsite interacts correctly with callsite return type" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -571,7 +571,6 @@ test "packed struct passed to callconv(.C) function" {
|
||||
test "overaligned pointer to packed struct" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
|
||||
const S = packed struct { a: u32, b: u32 };
|
||||
var foo: S align(4) = .{ .a = 123, .b = 456 };
|
||||
|
||||
@ -827,7 +827,6 @@ test "non-packed struct with u128 entry in union" {
|
||||
}
|
||||
|
||||
test "packed struct field passed to generic function" {
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user