mirror of
https://github.com/ziglang/zig.git
synced 2026-02-14 21:38:33 +00:00
stage2: move many Type encodings to InternPool
Notably, `vector`. Additionally, all alternate encodings of `pointer`, `optional`, and `array`.
This commit is contained in:
parent
9d422bff18
commit
5e636643d2
13
src/Air.zig
13
src/Air.zig
@ -1375,7 +1375,7 @@ pub fn typeOfIndex(air: Air, inst: Air.Inst.Index, ip: InternPool) Type {
|
||||
|
||||
.bool_to_int => return Type.u1,
|
||||
|
||||
.tag_name, .error_name => return Type.initTag(.const_slice_u8_sentinel_0),
|
||||
.tag_name, .error_name => return Type.const_slice_u8_sentinel_0,
|
||||
|
||||
.call, .call_always_tail, .call_never_tail, .call_never_inline => {
|
||||
const callee_ty = air.typeOf(datas[inst].pl_op.operand, ip);
|
||||
@ -1384,18 +1384,21 @@ pub fn typeOfIndex(air: Air, inst: Air.Inst.Index, ip: InternPool) Type {
|
||||
|
||||
.slice_elem_val, .ptr_elem_val, .array_elem_val => {
|
||||
const ptr_ty = air.typeOf(datas[inst].bin_op.lhs, ip);
|
||||
return ptr_ty.elemType();
|
||||
return ptr_ty.childTypeIp(ip);
|
||||
},
|
||||
.atomic_load => {
|
||||
const ptr_ty = air.typeOf(datas[inst].atomic_load.ptr, ip);
|
||||
return ptr_ty.elemType();
|
||||
return ptr_ty.childTypeIp(ip);
|
||||
},
|
||||
.atomic_rmw => {
|
||||
const ptr_ty = air.typeOf(datas[inst].pl_op.operand, ip);
|
||||
return ptr_ty.elemType();
|
||||
return ptr_ty.childTypeIp(ip);
|
||||
},
|
||||
|
||||
.reduce, .reduce_optimized => return air.typeOf(datas[inst].reduce.operand, ip).childType(),
|
||||
.reduce, .reduce_optimized => {
|
||||
const operand_ty = air.typeOf(datas[inst].reduce.operand, ip);
|
||||
return ip.indexToKey(operand_ty.ip_index).vector_type.child.toType();
|
||||
},
|
||||
|
||||
.mul_add => return air.typeOf(datas[inst].pl_op.operand, ip),
|
||||
.select => {
|
||||
|
||||
@ -31,28 +31,10 @@ const KeyAdapter = struct {
|
||||
|
||||
pub const Key = union(enum) {
|
||||
int_type: IntType,
|
||||
ptr_type: struct {
|
||||
elem_type: Index,
|
||||
sentinel: Index = .none,
|
||||
alignment: u16 = 0,
|
||||
size: std.builtin.Type.Pointer.Size,
|
||||
is_const: bool = false,
|
||||
is_volatile: bool = false,
|
||||
is_allowzero: bool = false,
|
||||
address_space: std.builtin.AddressSpace = .generic,
|
||||
},
|
||||
array_type: struct {
|
||||
len: u64,
|
||||
child: Index,
|
||||
sentinel: Index,
|
||||
},
|
||||
vector_type: struct {
|
||||
len: u32,
|
||||
child: Index,
|
||||
},
|
||||
optional_type: struct {
|
||||
payload_type: Index,
|
||||
},
|
||||
ptr_type: PtrType,
|
||||
array_type: ArrayType,
|
||||
vector_type: VectorType,
|
||||
opt_type: Index,
|
||||
error_union_type: struct {
|
||||
error_set_type: Index,
|
||||
payload_type: Index,
|
||||
@ -87,6 +69,47 @@ pub const Key = union(enum) {
|
||||
|
||||
pub const IntType = std.builtin.Type.Int;
|
||||
|
||||
pub const PtrType = struct {
|
||||
elem_type: Index,
|
||||
sentinel: Index = .none,
|
||||
/// If zero use pointee_type.abiAlignment()
|
||||
/// When creating pointer types, if alignment is equal to pointee type
|
||||
/// abi alignment, this value should be set to 0 instead.
|
||||
alignment: u16 = 0,
|
||||
/// If this is non-zero it means the pointer points to a sub-byte
|
||||
/// range of data, which is backed by a "host integer" with this
|
||||
/// number of bytes.
|
||||
/// When host_size=pointee_abi_size and bit_offset=0, this must be
|
||||
/// represented with host_size=0 instead.
|
||||
host_size: u16 = 0,
|
||||
bit_offset: u16 = 0,
|
||||
vector_index: VectorIndex = .none,
|
||||
size: std.builtin.Type.Pointer.Size = .One,
|
||||
is_const: bool = false,
|
||||
is_volatile: bool = false,
|
||||
is_allowzero: bool = false,
|
||||
/// See src/target.zig defaultAddressSpace function for how to obtain
|
||||
/// an appropriate value for this field.
|
||||
address_space: std.builtin.AddressSpace = .generic,
|
||||
|
||||
pub const VectorIndex = enum(u32) {
|
||||
none = std.math.maxInt(u32),
|
||||
runtime = std.math.maxInt(u32) - 1,
|
||||
_,
|
||||
};
|
||||
};
|
||||
|
||||
pub const ArrayType = struct {
|
||||
len: u64,
|
||||
child: Index,
|
||||
sentinel: Index,
|
||||
};
|
||||
|
||||
pub const VectorType = struct {
|
||||
len: u32,
|
||||
child: Index,
|
||||
};
|
||||
|
||||
pub fn hash32(key: Key) u32 {
|
||||
return @truncate(u32, key.hash64());
|
||||
}
|
||||
@ -106,7 +129,7 @@ pub const Key = union(enum) {
|
||||
.ptr_type,
|
||||
.array_type,
|
||||
.vector_type,
|
||||
.optional_type,
|
||||
.opt_type,
|
||||
.error_union_type,
|
||||
.simple_type,
|
||||
.simple_value,
|
||||
@ -159,8 +182,8 @@ pub const Key = union(enum) {
|
||||
const b_info = b.vector_type;
|
||||
return std.meta.eql(a_info, b_info);
|
||||
},
|
||||
.optional_type => |a_info| {
|
||||
const b_info = b.optional_type;
|
||||
.opt_type => |a_info| {
|
||||
const b_info = b.opt_type;
|
||||
return std.meta.eql(a_info, b_info);
|
||||
},
|
||||
.error_union_type => |a_info| {
|
||||
@ -220,7 +243,7 @@ pub const Key = union(enum) {
|
||||
.ptr_type,
|
||||
.array_type,
|
||||
.vector_type,
|
||||
.optional_type,
|
||||
.opt_type,
|
||||
.error_union_type,
|
||||
.simple_type,
|
||||
.struct_type,
|
||||
@ -630,6 +653,7 @@ pub const Tag = enum(u8) {
|
||||
/// data is payload to Vector.
|
||||
type_vector,
|
||||
/// A fully explicitly specified pointer type.
|
||||
/// TODO actually this is missing some stuff like bit_offset
|
||||
/// data is payload to Pointer.
|
||||
type_pointer,
|
||||
/// An optional type.
|
||||
@ -893,7 +917,7 @@ pub fn indexToKey(ip: InternPool, index: Index) Key {
|
||||
} };
|
||||
},
|
||||
|
||||
.type_optional => .{ .optional_type = .{ .payload_type = @intToEnum(Index, data) } },
|
||||
.type_optional => .{ .opt_type = @intToEnum(Index, data) },
|
||||
|
||||
.type_error_union => @panic("TODO"),
|
||||
.type_enum_simple => @panic("TODO"),
|
||||
@ -971,10 +995,10 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
|
||||
}),
|
||||
});
|
||||
},
|
||||
.optional_type => |optional_type| {
|
||||
.opt_type => |opt_type| {
|
||||
ip.items.appendAssumeCapacity(.{
|
||||
.tag = .type_optional,
|
||||
.data = @enumToInt(optional_type.payload_type),
|
||||
.data = @enumToInt(opt_type),
|
||||
});
|
||||
},
|
||||
.error_union_type => |error_union_type| {
|
||||
@ -1192,3 +1216,13 @@ test "basic usage" {
|
||||
} });
|
||||
try std.testing.expect(another_array_i32 == array_i32);
|
||||
}
|
||||
|
||||
pub fn childType(ip: InternPool, i: Index) Index {
|
||||
return switch (ip.indexToKey(i)) {
|
||||
.ptr_type => |ptr_type| ptr_type.elem_type,
|
||||
.vector_type => |vector_type| vector_type.child,
|
||||
.array_type => |array_type| array_type.child,
|
||||
.opt_type => |child| child,
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
@ -225,6 +225,7 @@ pub fn categorizeOperand(
|
||||
air: Air,
|
||||
inst: Air.Inst.Index,
|
||||
operand: Air.Inst.Index,
|
||||
ip: InternPool,
|
||||
) OperandCategory {
|
||||
const air_tags = air.instructions.items(.tag);
|
||||
const air_datas = air.instructions.items(.data);
|
||||
@ -534,7 +535,7 @@ pub fn categorizeOperand(
|
||||
.aggregate_init => {
|
||||
const ty_pl = air_datas[inst].ty_pl;
|
||||
const aggregate_ty = air.getRefType(ty_pl.ty);
|
||||
const len = @intCast(usize, aggregate_ty.arrayLen());
|
||||
const len = @intCast(usize, aggregate_ty.arrayLenIp(ip));
|
||||
const elements = @ptrCast([]const Air.Inst.Ref, air.extra[ty_pl.payload..][0..len]);
|
||||
|
||||
if (elements.len <= bpi - 1) {
|
||||
@ -625,7 +626,7 @@ pub fn categorizeOperand(
|
||||
|
||||
var operand_live: bool = true;
|
||||
for (air.extra[cond_extra.end..][0..2]) |cond_inst| {
|
||||
if (l.categorizeOperand(air, cond_inst, operand) == .tomb)
|
||||
if (l.categorizeOperand(air, cond_inst, operand, ip) == .tomb)
|
||||
operand_live = false;
|
||||
|
||||
switch (air_tags[cond_inst]) {
|
||||
@ -872,6 +873,7 @@ fn analyzeInst(
|
||||
data: *LivenessPassData(pass),
|
||||
inst: Air.Inst.Index,
|
||||
) Allocator.Error!void {
|
||||
const ip = a.intern_pool;
|
||||
const inst_tags = a.air.instructions.items(.tag);
|
||||
const inst_datas = a.air.instructions.items(.data);
|
||||
|
||||
@ -1140,7 +1142,7 @@ fn analyzeInst(
|
||||
.aggregate_init => {
|
||||
const ty_pl = inst_datas[inst].ty_pl;
|
||||
const aggregate_ty = a.air.getRefType(ty_pl.ty);
|
||||
const len = @intCast(usize, aggregate_ty.arrayLen());
|
||||
const len = @intCast(usize, aggregate_ty.arrayLenIp(ip.*));
|
||||
const elements = @ptrCast([]const Air.Inst.Ref, a.air.extra[ty_pl.payload..][0..len]);
|
||||
|
||||
if (elements.len <= bpi - 1) {
|
||||
|
||||
@ -325,7 +325,7 @@ fn verifyBody(self: *Verify, body: []const Air.Inst.Index) Error!void {
|
||||
.aggregate_init => {
|
||||
const ty_pl = data[inst].ty_pl;
|
||||
const aggregate_ty = self.air.getRefType(ty_pl.ty);
|
||||
const len = @intCast(usize, aggregate_ty.arrayLen());
|
||||
const len = @intCast(usize, aggregate_ty.arrayLenIp(ip.*));
|
||||
const elements = @ptrCast([]const Air.Inst.Ref, self.air.extra[ty_pl.payload..][0..len]);
|
||||
|
||||
var bt = self.liveness.iterateBigTomb(inst);
|
||||
|
||||
@ -5805,7 +5805,7 @@ pub fn analyzeFnBody(mod: *Module, func: *Fn, arena: Allocator) SemaError!Air {
|
||||
// is unused so it just has to be a no-op.
|
||||
sema.air_instructions.set(ptr_inst.*, .{
|
||||
.tag = .alloc,
|
||||
.data = .{ .ty = Type.initTag(.single_const_pointer_to_comptime_int) },
|
||||
.data = .{ .ty = Type.single_const_pointer_to_comptime_int },
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -6545,7 +6545,7 @@ pub fn populateTestFunctions(
|
||||
}
|
||||
const decl = mod.declPtr(decl_index);
|
||||
var buf: Type.SlicePtrFieldTypeBuffer = undefined;
|
||||
const tmp_test_fn_ty = decl.ty.slicePtrFieldType(&buf).elemType();
|
||||
const tmp_test_fn_ty = decl.ty.slicePtrFieldType(&buf).childType(mod);
|
||||
|
||||
const array_decl_index = d: {
|
||||
// Add mod.test_functions to an array decl then make the test_functions
|
||||
@ -6575,7 +6575,7 @@ pub fn populateTestFunctions(
|
||||
errdefer name_decl_arena.deinit();
|
||||
const bytes = try name_decl_arena.allocator().dupe(u8, test_name_slice);
|
||||
const test_name_decl_index = try mod.createAnonymousDeclFromDecl(array_decl, array_decl.src_namespace, null, .{
|
||||
.ty = try Type.Tag.array_u8.create(name_decl_arena.allocator(), bytes.len),
|
||||
.ty = try Type.array(name_decl_arena.allocator(), bytes.len, null, Type.u8, mod),
|
||||
.val = try Value.Tag.bytes.create(name_decl_arena.allocator(), bytes),
|
||||
});
|
||||
try mod.declPtr(test_name_decl_index).finalizeNewArena(&name_decl_arena);
|
||||
@ -6609,7 +6609,12 @@ pub fn populateTestFunctions(
|
||||
|
||||
{
|
||||
// This copy accesses the old Decl Type/Value so it must be done before `clearValues`.
|
||||
const new_ty = try Type.Tag.const_slice.create(arena, try tmp_test_fn_ty.copy(arena));
|
||||
const new_ty = try Type.ptr(arena, mod, .{
|
||||
.size = .Slice,
|
||||
.pointee_type = try tmp_test_fn_ty.copy(arena),
|
||||
.mutable = false,
|
||||
.@"addrspace" = .generic,
|
||||
});
|
||||
const new_var = try gpa.create(Var);
|
||||
errdefer gpa.destroy(new_var);
|
||||
new_var.* = decl.val.castTag(.variable).?.data.*;
|
||||
@ -6819,6 +6824,34 @@ pub fn intType(mod: *Module, signedness: std.builtin.Signedness, bits: u16) Allo
|
||||
return i.toType();
|
||||
}
|
||||
|
||||
pub fn arrayType(mod: *Module, info: InternPool.Key.ArrayType) Allocator.Error!Type {
|
||||
const i = try intern(mod, .{ .array_type = info });
|
||||
return i.toType();
|
||||
}
|
||||
|
||||
pub fn vectorType(mod: *Module, info: InternPool.Key.VectorType) Allocator.Error!Type {
|
||||
const i = try intern(mod, .{ .vector_type = info });
|
||||
return i.toType();
|
||||
}
|
||||
|
||||
pub fn optionalType(mod: *Module, child_type: InternPool.Index) Allocator.Error!Type {
|
||||
const i = try intern(mod, .{ .opt_type = child_type });
|
||||
return i.toType();
|
||||
}
|
||||
|
||||
pub fn ptrType(mod: *Module, info: InternPool.Key.PtrType) Allocator.Error!Type {
|
||||
const i = try intern(mod, .{ .ptr_type = info });
|
||||
return i.toType();
|
||||
}
|
||||
|
||||
pub fn singleMutPtrType(mod: *Module, child_type: Type) Allocator.Error!Type {
|
||||
return ptrType(mod, .{ .elem_type = child_type.ip_index });
|
||||
}
|
||||
|
||||
pub fn singleConstPtrType(mod: *Module, child_type: Type) Allocator.Error!Type {
|
||||
return ptrType(mod, .{ .elem_type = child_type.ip_index, .is_const = true });
|
||||
}
|
||||
|
||||
pub fn smallestUnsignedInt(mod: *Module, max: u64) Allocator.Error!Type {
|
||||
return intType(mod, .unsigned, Type.smallestUnsignedBits(max));
|
||||
}
|
||||
|
||||
1073
src/Sema.zig
1073
src/Sema.zig
File diff suppressed because it is too large
Load Diff
@ -77,15 +77,6 @@ pub fn print(
|
||||
return writer.writeAll("(variable)");
|
||||
|
||||
while (true) switch (val.tag()) {
|
||||
.single_const_pointer_to_comptime_int_type => return writer.writeAll("*const comptime_int"),
|
||||
.const_slice_u8_type => return writer.writeAll("[]const u8"),
|
||||
.const_slice_u8_sentinel_0_type => return writer.writeAll("[:0]const u8"),
|
||||
.anyerror_void_error_union_type => return writer.writeAll("anyerror!void"),
|
||||
|
||||
.manyptr_u8_type => return writer.writeAll("[*]u8"),
|
||||
.manyptr_const_u8_type => return writer.writeAll("[*]const u8"),
|
||||
.manyptr_const_u8_sentinel_0_type => return writer.writeAll("[*:0]const u8"),
|
||||
|
||||
.empty_struct_value, .aggregate => {
|
||||
if (level == 0) {
|
||||
return writer.writeAll(".{ ... }");
|
||||
@ -112,7 +103,7 @@ pub fn print(
|
||||
return writer.writeAll("}");
|
||||
} else {
|
||||
const elem_ty = ty.elemType2(mod);
|
||||
const len = ty.arrayLen();
|
||||
const len = ty.arrayLen(mod);
|
||||
|
||||
if (elem_ty.eql(Type.u8, mod)) str: {
|
||||
const max_len = @intCast(usize, std.math.min(len, max_string_len));
|
||||
@ -288,7 +279,7 @@ pub fn print(
|
||||
.ty = ty.elemType2(mod),
|
||||
.val = val.castTag(.repeated).?.data,
|
||||
};
|
||||
const len = ty.arrayLen();
|
||||
const len = ty.arrayLen(mod);
|
||||
const max_len = std.math.min(len, max_aggregate_items);
|
||||
while (i < max_len) : (i += 1) {
|
||||
if (i != 0) try writer.writeAll(", ");
|
||||
@ -306,7 +297,7 @@ pub fn print(
|
||||
try writer.writeAll(".{ ");
|
||||
try print(.{
|
||||
.ty = ty.elemType2(mod),
|
||||
.val = ty.sentinel().?,
|
||||
.val = ty.sentinel(mod).?,
|
||||
}, writer, level - 1, mod);
|
||||
return writer.writeAll(" }");
|
||||
},
|
||||
@ -364,8 +355,7 @@ pub fn print(
|
||||
},
|
||||
.opt_payload => {
|
||||
val = val.castTag(.opt_payload).?.data;
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
ty = ty.optionalChild(&buf);
|
||||
ty = ty.optionalChild(mod);
|
||||
return print(.{ .ty = ty, .val = val }, writer, level, mod);
|
||||
},
|
||||
.eu_payload_ptr => {
|
||||
@ -386,13 +376,8 @@ pub fn print(
|
||||
|
||||
try writer.writeAll(", &(payload of ");
|
||||
|
||||
var ptr_ty: Type.Payload.ElemType = .{
|
||||
.base = .{ .tag = .single_mut_pointer },
|
||||
.data = data.container_ty,
|
||||
};
|
||||
|
||||
try print(.{
|
||||
.ty = Type.initPayload(&ptr_ty.base),
|
||||
.ty = mod.singleMutPtrType(data.container_ty) catch @panic("OOM"),
|
||||
.val = data.container_ptr,
|
||||
}, writer, level - 1, mod);
|
||||
|
||||
@ -415,13 +400,8 @@ pub fn print(
|
||||
|
||||
try writer.writeAll(", &(payload of ");
|
||||
|
||||
var ptr_ty: Type.Payload.ElemType = .{
|
||||
.base = .{ .tag = .single_mut_pointer },
|
||||
.data = data.container_ty,
|
||||
};
|
||||
|
||||
try print(.{
|
||||
.ty = Type.initPayload(&ptr_ty.base),
|
||||
.ty = mod.singleMutPtrType(data.container_ty) catch @panic("OOM"),
|
||||
.val = data.container_ptr,
|
||||
}, writer, level - 1, mod);
|
||||
|
||||
|
||||
@ -1030,7 +1030,7 @@ fn allocMem(
|
||||
/// Use a pointer instruction as the basis for allocating stack memory.
|
||||
fn allocMemPtr(self: *Self, inst: Air.Inst.Index) !u32 {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const elem_ty = self.typeOfIndex(inst).elemType();
|
||||
const elem_ty = self.typeOfIndex(inst).childType(mod);
|
||||
|
||||
if (!elem_ty.hasRuntimeBits(mod)) {
|
||||
// return the stack offset 0. Stack offset 0 will be where all
|
||||
@ -1140,17 +1140,14 @@ fn airAlloc(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airRetPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const result: MCValue = switch (self.ret_mcv) {
|
||||
.none, .register => .{ .ptr_stack_offset = try self.allocMemPtr(inst) },
|
||||
.stack_offset => blk: {
|
||||
// self.ret_mcv is an address to where this function
|
||||
// should store its result into
|
||||
const ret_ty = self.fn_type.fnReturnType();
|
||||
var ptr_ty_payload: Type.Payload.ElemType = .{
|
||||
.base = .{ .tag = .single_mut_pointer },
|
||||
.data = ret_ty,
|
||||
};
|
||||
const ptr_ty = Type.initPayload(&ptr_ty_payload.base);
|
||||
const ptr_ty = try mod.singleMutPtrType(ret_ty);
|
||||
|
||||
// addr_reg will contain the address of where to store the
|
||||
// result into
|
||||
@ -2406,9 +2403,9 @@ fn ptrArithmetic(
|
||||
assert(rhs_ty.eql(Type.usize, mod));
|
||||
|
||||
const ptr_ty = lhs_ty;
|
||||
const elem_ty = switch (ptr_ty.ptrSize()) {
|
||||
.One => ptr_ty.childType().childType(), // ptr to array, so get array element type
|
||||
else => ptr_ty.childType(),
|
||||
const elem_ty = switch (ptr_ty.ptrSize(mod)) {
|
||||
.One => ptr_ty.childType(mod).childType(mod), // ptr to array, so get array element type
|
||||
else => ptr_ty.childType(mod),
|
||||
};
|
||||
const elem_size = elem_ty.abiSize(mod);
|
||||
|
||||
@ -3024,8 +3021,7 @@ fn airOptionalPayload(self: *Self, inst: Air.Inst.Index) !void {
|
||||
|
||||
fn optionalPayload(self: *Self, inst: Air.Inst.Index, mcv: MCValue, optional_ty: Type) !MCValue {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
var opt_buf: Type.Payload.ElemType = undefined;
|
||||
const payload_ty = optional_ty.optionalChild(&opt_buf);
|
||||
const payload_ty = optional_ty.optionalChild(mod);
|
||||
if (!payload_ty.hasRuntimeBits(mod)) return MCValue.none;
|
||||
if (optional_ty.isPtrLikeOptional(mod)) {
|
||||
// TODO should we reuse the operand here?
|
||||
@ -3459,7 +3455,7 @@ fn ptrElemVal(
|
||||
maybe_inst: ?Air.Inst.Index,
|
||||
) !MCValue {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const elem_ty = ptr_ty.childType();
|
||||
const elem_ty = ptr_ty.childType(mod);
|
||||
const elem_size = @intCast(u32, elem_ty.abiSize(mod));
|
||||
|
||||
// TODO optimize for elem_sizes of 1, 2, 4, 8
|
||||
@ -3617,7 +3613,7 @@ fn reuseOperand(
|
||||
|
||||
fn load(self: *Self, dst_mcv: MCValue, ptr: MCValue, ptr_ty: Type) InnerError!void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const elem_ty = ptr_ty.elemType();
|
||||
const elem_ty = ptr_ty.childType(mod);
|
||||
const elem_size = elem_ty.abiSize(mod);
|
||||
|
||||
switch (ptr) {
|
||||
@ -3773,7 +3769,7 @@ fn genInlineMemset(
|
||||
) !void {
|
||||
const dst_reg = switch (dst) {
|
||||
.register => |r| r,
|
||||
else => try self.copyToTmpRegister(Type.initTag(.manyptr_u8), dst),
|
||||
else => try self.copyToTmpRegister(Type.manyptr_u8, dst),
|
||||
};
|
||||
const dst_reg_lock = self.register_manager.lockReg(dst_reg);
|
||||
defer if (dst_reg_lock) |lock| self.register_manager.unlockReg(lock);
|
||||
@ -4096,7 +4092,7 @@ fn structFieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, inde
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const mcv = try self.resolveInst(operand);
|
||||
const ptr_ty = self.typeOf(operand);
|
||||
const struct_ty = ptr_ty.childType();
|
||||
const struct_ty = ptr_ty.childType(mod);
|
||||
const struct_field_offset = @intCast(u32, struct_ty.structFieldOffset(index, mod));
|
||||
switch (mcv) {
|
||||
.ptr_stack_offset => |off| {
|
||||
@ -4173,7 +4169,7 @@ fn airFieldParentPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const extra = self.air.extraData(Air.FieldParentPtr, ty_pl.payload).data;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
|
||||
const field_ptr = try self.resolveInst(extra.field_ptr);
|
||||
const struct_ty = self.air.getRefType(ty_pl.ty).childType();
|
||||
const struct_ty = self.air.getRefType(ty_pl.ty).childType(mod);
|
||||
const struct_field_offset = @intCast(u32, struct_ty.structFieldOffset(extra.field_index, mod));
|
||||
switch (field_ptr) {
|
||||
.ptr_stack_offset => |off| {
|
||||
@ -4254,7 +4250,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
|
||||
|
||||
const fn_ty = switch (ty.zigTypeTag(mod)) {
|
||||
.Fn => ty,
|
||||
.Pointer => ty.childType(),
|
||||
.Pointer => ty.childType(mod),
|
||||
else => unreachable,
|
||||
};
|
||||
|
||||
@ -4280,11 +4276,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
|
||||
|
||||
const ret_ptr_reg = self.registerAlias(.x0, Type.usize);
|
||||
|
||||
var ptr_ty_payload: Type.Payload.ElemType = .{
|
||||
.base = .{ .tag = .single_mut_pointer },
|
||||
.data = ret_ty,
|
||||
};
|
||||
const ptr_ty = Type.initPayload(&ptr_ty_payload.base);
|
||||
const ptr_ty = try mod.singleMutPtrType(ret_ty);
|
||||
try self.register_manager.getReg(ret_ptr_reg, null);
|
||||
try self.genSetReg(ptr_ty, ret_ptr_reg, .{ .ptr_stack_offset = stack_offset });
|
||||
|
||||
@ -4453,11 +4445,7 @@ fn airRet(self: *Self, inst: Air.Inst.Index) !void {
|
||||
//
|
||||
// self.ret_mcv is an address to where this function
|
||||
// should store its result into
|
||||
var ptr_ty_payload: Type.Payload.ElemType = .{
|
||||
.base = .{ .tag = .single_mut_pointer },
|
||||
.data = ret_ty,
|
||||
};
|
||||
const ptr_ty = Type.initPayload(&ptr_ty_payload.base);
|
||||
const ptr_ty = try mod.singleMutPtrType(ret_ty);
|
||||
try self.store(self.ret_mcv, operand, ptr_ty, ret_ty);
|
||||
},
|
||||
else => unreachable,
|
||||
@ -4533,8 +4521,7 @@ fn cmp(
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const int_ty = switch (lhs_ty.zigTypeTag(mod)) {
|
||||
.Optional => blk: {
|
||||
var opt_buffer: Type.Payload.ElemType = undefined;
|
||||
const payload_ty = lhs_ty.optionalChild(&opt_buffer);
|
||||
const payload_ty = lhs_ty.optionalChild(mod);
|
||||
if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod)) {
|
||||
break :blk Type.u1;
|
||||
} else if (lhs_ty.isPtrLikeOptional(mod)) {
|
||||
@ -4850,8 +4837,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
fn isNull(self: *Self, operand_bind: ReadArg.Bind, operand_ty: Type) !MCValue {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const sentinel: struct { ty: Type, bind: ReadArg.Bind } = if (!operand_ty.isPtrLikeOptional(mod)) blk: {
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
const payload_ty = operand_ty.optionalChild(&buf);
|
||||
const payload_ty = operand_ty.optionalChild(mod);
|
||||
if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod))
|
||||
break :blk .{ .ty = operand_ty, .bind = operand_bind };
|
||||
|
||||
@ -4947,11 +4933,12 @@ fn airIsNull(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airIsNullPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const un_op = self.air.instructions.items(.data)[inst].un_op;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
|
||||
const operand_ptr = try self.resolveInst(un_op);
|
||||
const ptr_ty = self.typeOf(un_op);
|
||||
const elem_ty = ptr_ty.elemType();
|
||||
const elem_ty = ptr_ty.childType(mod);
|
||||
|
||||
const operand = try self.allocRegOrMem(elem_ty, true, null);
|
||||
try self.load(operand, operand_ptr, ptr_ty);
|
||||
@ -4973,11 +4960,12 @@ fn airIsNonNull(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airIsNonNullPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const un_op = self.air.instructions.items(.data)[inst].un_op;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
|
||||
const operand_ptr = try self.resolveInst(un_op);
|
||||
const ptr_ty = self.typeOf(un_op);
|
||||
const elem_ty = ptr_ty.elemType();
|
||||
const elem_ty = ptr_ty.childType(mod);
|
||||
|
||||
const operand = try self.allocRegOrMem(elem_ty, true, null);
|
||||
try self.load(operand, operand_ptr, ptr_ty);
|
||||
@ -4999,11 +4987,12 @@ fn airIsErr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airIsErrPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const un_op = self.air.instructions.items(.data)[inst].un_op;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
|
||||
const operand_ptr = try self.resolveInst(un_op);
|
||||
const ptr_ty = self.typeOf(un_op);
|
||||
const elem_ty = ptr_ty.elemType();
|
||||
const elem_ty = ptr_ty.childType(mod);
|
||||
|
||||
const operand = try self.allocRegOrMem(elem_ty, true, null);
|
||||
try self.load(operand, operand_ptr, ptr_ty);
|
||||
@ -5025,11 +5014,12 @@ fn airIsNonErr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airIsNonErrPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const un_op = self.air.instructions.items(.data)[inst].un_op;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
|
||||
const operand_ptr = try self.resolveInst(un_op);
|
||||
const ptr_ty = self.typeOf(un_op);
|
||||
const elem_ty = ptr_ty.elemType();
|
||||
const elem_ty = ptr_ty.childType(mod);
|
||||
|
||||
const operand = try self.allocRegOrMem(elem_ty, true, null);
|
||||
try self.load(operand, operand_ptr, ptr_ty);
|
||||
@ -5511,11 +5501,7 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) InnerErro
|
||||
const reg = try self.copyToTmpRegister(ty, mcv);
|
||||
return self.genSetStack(ty, stack_offset, MCValue{ .register = reg });
|
||||
} else {
|
||||
var ptr_ty_payload: Type.Payload.ElemType = .{
|
||||
.base = .{ .tag = .single_mut_pointer },
|
||||
.data = ty,
|
||||
};
|
||||
const ptr_ty = Type.initPayload(&ptr_ty_payload.base);
|
||||
const ptr_ty = try mod.singleMutPtrType(ty);
|
||||
|
||||
// TODO call extern memcpy
|
||||
const regs = try self.register_manager.allocRegs(5, .{ null, null, null, null, null }, gp);
|
||||
@ -5833,11 +5819,7 @@ fn genSetStackArgument(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) I
|
||||
const reg = try self.copyToTmpRegister(ty, mcv);
|
||||
return self.genSetStackArgument(ty, stack_offset, MCValue{ .register = reg });
|
||||
} else {
|
||||
var ptr_ty_payload: Type.Payload.ElemType = .{
|
||||
.base = .{ .tag = .single_mut_pointer },
|
||||
.data = ty,
|
||||
};
|
||||
const ptr_ty = Type.initPayload(&ptr_ty_payload.base);
|
||||
const ptr_ty = try mod.singleMutPtrType(ty);
|
||||
|
||||
// TODO call extern memcpy
|
||||
const regs = try self.register_manager.allocRegs(5, .{ null, null, null, null, null }, gp);
|
||||
@ -5957,12 +5939,13 @@ fn airBitCast(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airArrayToSlice(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
|
||||
const ptr_ty = self.typeOf(ty_op.operand);
|
||||
const ptr = try self.resolveInst(ty_op.operand);
|
||||
const array_ty = ptr_ty.childType();
|
||||
const array_len = @intCast(u32, array_ty.arrayLen());
|
||||
const array_ty = ptr_ty.childType(mod);
|
||||
const array_len = @intCast(u32, array_ty.arrayLen(mod));
|
||||
|
||||
const ptr_bits = self.target.ptrBitWidth();
|
||||
const ptr_bytes = @divExact(ptr_bits, 8);
|
||||
@ -6079,8 +6062,9 @@ fn airReduce(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airAggregateInit(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const vector_ty = self.typeOfIndex(inst);
|
||||
const len = vector_ty.vectorLen();
|
||||
const len = vector_ty.vectorLen(mod);
|
||||
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
|
||||
const elements = @ptrCast([]const Air.Inst.Ref, self.air.extra[ty_pl.payload..][0..len]);
|
||||
const result: MCValue = res: {
|
||||
|
||||
@ -1010,7 +1010,7 @@ fn allocMem(
|
||||
/// Use a pointer instruction as the basis for allocating stack memory.
|
||||
fn allocMemPtr(self: *Self, inst: Air.Inst.Index) !u32 {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const elem_ty = self.typeOfIndex(inst).elemType();
|
||||
const elem_ty = self.typeOfIndex(inst).childType(mod);
|
||||
|
||||
if (!elem_ty.hasRuntimeBits(mod)) {
|
||||
// As this stack item will never be dereferenced at runtime,
|
||||
@ -1117,17 +1117,14 @@ fn airAlloc(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airRetPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const result: MCValue = switch (self.ret_mcv) {
|
||||
.none, .register => .{ .ptr_stack_offset = try self.allocMemPtr(inst) },
|
||||
.stack_offset => blk: {
|
||||
// self.ret_mcv is an address to where this function
|
||||
// should store its result into
|
||||
const ret_ty = self.fn_type.fnReturnType();
|
||||
var ptr_ty_payload: Type.Payload.ElemType = .{
|
||||
.base = .{ .tag = .single_mut_pointer },
|
||||
.data = ret_ty,
|
||||
};
|
||||
const ptr_ty = Type.initPayload(&ptr_ty_payload.base);
|
||||
const ptr_ty = try mod.singleMutPtrType(ret_ty);
|
||||
|
||||
// addr_reg will contain the address of where to store the
|
||||
// result into
|
||||
@ -2372,8 +2369,8 @@ fn ptrElemVal(
|
||||
ptr_ty: Type,
|
||||
maybe_inst: ?Air.Inst.Index,
|
||||
) !MCValue {
|
||||
const elem_ty = ptr_ty.childType();
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const elem_ty = ptr_ty.childType(mod);
|
||||
const elem_size = @intCast(u32, elem_ty.abiSize(mod));
|
||||
|
||||
switch (elem_size) {
|
||||
@ -2474,7 +2471,8 @@ fn arrayElemVal(
|
||||
array_ty: Type,
|
||||
maybe_inst: ?Air.Inst.Index,
|
||||
) InnerError!MCValue {
|
||||
const elem_ty = array_ty.childType();
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const elem_ty = array_ty.childType(mod);
|
||||
|
||||
const mcv = try array_bind.resolveToMcv(self);
|
||||
switch (mcv) {
|
||||
@ -2508,11 +2506,7 @@ fn arrayElemVal(
|
||||
|
||||
const base_bind: ReadArg.Bind = .{ .mcv = ptr_to_mcv };
|
||||
|
||||
var ptr_ty_payload: Type.Payload.ElemType = .{
|
||||
.base = .{ .tag = .single_mut_pointer },
|
||||
.data = elem_ty,
|
||||
};
|
||||
const ptr_ty = Type.initPayload(&ptr_ty_payload.base);
|
||||
const ptr_ty = try mod.singleMutPtrType(elem_ty);
|
||||
|
||||
return try self.ptrElemVal(base_bind, index_bind, ptr_ty, maybe_inst);
|
||||
},
|
||||
@ -2659,8 +2653,8 @@ fn reuseOperand(
|
||||
}
|
||||
|
||||
fn load(self: *Self, dst_mcv: MCValue, ptr: MCValue, ptr_ty: Type) InnerError!void {
|
||||
const elem_ty = ptr_ty.elemType();
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const elem_ty = ptr_ty.childType(mod);
|
||||
const elem_size = @intCast(u32, elem_ty.abiSize(mod));
|
||||
|
||||
switch (ptr) {
|
||||
@ -2888,7 +2882,7 @@ fn structFieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, inde
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const mcv = try self.resolveInst(operand);
|
||||
const ptr_ty = self.typeOf(operand);
|
||||
const struct_ty = ptr_ty.childType();
|
||||
const struct_ty = ptr_ty.childType(mod);
|
||||
const struct_field_offset = @intCast(u32, struct_ty.structFieldOffset(index, mod));
|
||||
switch (mcv) {
|
||||
.ptr_stack_offset => |off| {
|
||||
@ -3004,7 +2998,7 @@ fn airFieldParentPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const extra = self.air.extraData(Air.FieldParentPtr, ty_pl.payload).data;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
|
||||
const field_ptr = try self.resolveInst(extra.field_ptr);
|
||||
const struct_ty = self.air.getRefType(ty_pl.ty).childType();
|
||||
const struct_ty = self.air.getRefType(ty_pl.ty).childType(mod);
|
||||
|
||||
if (struct_ty.zigTypeTag(mod) == .Union) {
|
||||
return self.fail("TODO implement @fieldParentPtr codegen for unions", .{});
|
||||
@ -3898,9 +3892,9 @@ fn ptrArithmetic(
|
||||
assert(rhs_ty.eql(Type.usize, mod));
|
||||
|
||||
const ptr_ty = lhs_ty;
|
||||
const elem_ty = switch (ptr_ty.ptrSize()) {
|
||||
.One => ptr_ty.childType().childType(), // ptr to array, so get array element type
|
||||
else => ptr_ty.childType(),
|
||||
const elem_ty = switch (ptr_ty.ptrSize(mod)) {
|
||||
.One => ptr_ty.childType(mod).childType(mod), // ptr to array, so get array element type
|
||||
else => ptr_ty.childType(mod),
|
||||
};
|
||||
const elem_size = @intCast(u32, elem_ty.abiSize(mod));
|
||||
|
||||
@ -4079,7 +4073,7 @@ fn genInlineMemset(
|
||||
) !void {
|
||||
const dst_reg = switch (dst) {
|
||||
.register => |r| r,
|
||||
else => try self.copyToTmpRegister(Type.initTag(.manyptr_u8), dst),
|
||||
else => try self.copyToTmpRegister(Type.manyptr_u8, dst),
|
||||
};
|
||||
const dst_reg_lock = self.register_manager.lockReg(dst_reg);
|
||||
defer if (dst_reg_lock) |lock| self.register_manager.unlockReg(lock);
|
||||
@ -4229,7 +4223,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
|
||||
|
||||
const fn_ty = switch (ty.zigTypeTag(mod)) {
|
||||
.Fn => ty,
|
||||
.Pointer => ty.childType(),
|
||||
.Pointer => ty.childType(mod),
|
||||
else => unreachable,
|
||||
};
|
||||
|
||||
@ -4259,11 +4253,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
|
||||
const ret_abi_align = @intCast(u32, ret_ty.abiAlignment(mod));
|
||||
const stack_offset = try self.allocMem(ret_abi_size, ret_abi_align, inst);
|
||||
|
||||
var ptr_ty_payload: Type.Payload.ElemType = .{
|
||||
.base = .{ .tag = .single_mut_pointer },
|
||||
.data = ret_ty,
|
||||
};
|
||||
const ptr_ty = Type.initPayload(&ptr_ty_payload.base);
|
||||
const ptr_ty = try mod.singleMutPtrType(ret_ty);
|
||||
try self.register_manager.getReg(.r0, null);
|
||||
try self.genSetReg(ptr_ty, .r0, .{ .ptr_stack_offset = stack_offset });
|
||||
|
||||
@ -4401,11 +4391,7 @@ fn airRet(self: *Self, inst: Air.Inst.Index) !void {
|
||||
//
|
||||
// self.ret_mcv is an address to where this function
|
||||
// should store its result into
|
||||
var ptr_ty_payload: Type.Payload.ElemType = .{
|
||||
.base = .{ .tag = .single_mut_pointer },
|
||||
.data = ret_ty,
|
||||
};
|
||||
const ptr_ty = Type.initPayload(&ptr_ty_payload.base);
|
||||
const ptr_ty = try mod.singleMutPtrType(ret_ty);
|
||||
try self.store(self.ret_mcv, operand, ptr_ty, ret_ty);
|
||||
},
|
||||
else => unreachable, // invalid return result
|
||||
@ -4482,8 +4468,7 @@ fn cmp(
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const int_ty = switch (lhs_ty.zigTypeTag(mod)) {
|
||||
.Optional => blk: {
|
||||
var opt_buffer: Type.Payload.ElemType = undefined;
|
||||
const payload_ty = lhs_ty.optionalChild(&opt_buffer);
|
||||
const payload_ty = lhs_ty.optionalChild(mod);
|
||||
if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod)) {
|
||||
break :blk Type.u1;
|
||||
} else if (lhs_ty.isPtrLikeOptional(mod)) {
|
||||
@ -4837,11 +4822,12 @@ fn airIsNull(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airIsNullPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const un_op = self.air.instructions.items(.data)[inst].un_op;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
|
||||
const operand_ptr = try self.resolveInst(un_op);
|
||||
const ptr_ty = self.typeOf(un_op);
|
||||
const elem_ty = ptr_ty.elemType();
|
||||
const elem_ty = ptr_ty.childType(mod);
|
||||
|
||||
const operand = try self.allocRegOrMem(elem_ty, true, null);
|
||||
try self.load(operand, operand_ptr, ptr_ty);
|
||||
@ -4863,11 +4849,12 @@ fn airIsNonNull(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airIsNonNullPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const un_op = self.air.instructions.items(.data)[inst].un_op;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
|
||||
const operand_ptr = try self.resolveInst(un_op);
|
||||
const ptr_ty = self.typeOf(un_op);
|
||||
const elem_ty = ptr_ty.elemType();
|
||||
const elem_ty = ptr_ty.childType(mod);
|
||||
|
||||
const operand = try self.allocRegOrMem(elem_ty, true, null);
|
||||
try self.load(operand, operand_ptr, ptr_ty);
|
||||
@ -4924,11 +4911,12 @@ fn airIsErr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airIsErrPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const un_op = self.air.instructions.items(.data)[inst].un_op;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
|
||||
const operand_ptr = try self.resolveInst(un_op);
|
||||
const ptr_ty = self.typeOf(un_op);
|
||||
const elem_ty = ptr_ty.elemType();
|
||||
const elem_ty = ptr_ty.childType(mod);
|
||||
|
||||
const operand = try self.allocRegOrMem(elem_ty, true, null);
|
||||
try self.load(operand, operand_ptr, ptr_ty);
|
||||
@ -4950,11 +4938,12 @@ fn airIsNonErr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airIsNonErrPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const un_op = self.air.instructions.items(.data)[inst].un_op;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
|
||||
const operand_ptr = try self.resolveInst(un_op);
|
||||
const ptr_ty = self.typeOf(un_op);
|
||||
const elem_ty = ptr_ty.elemType();
|
||||
const elem_ty = ptr_ty.childType(mod);
|
||||
|
||||
const operand = try self.allocRegOrMem(elem_ty, true, null);
|
||||
try self.load(operand, operand_ptr, ptr_ty);
|
||||
@ -5455,11 +5444,7 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) InnerErro
|
||||
const reg = try self.copyToTmpRegister(ty, mcv);
|
||||
return self.genSetStack(ty, stack_offset, MCValue{ .register = reg });
|
||||
} else {
|
||||
var ptr_ty_payload: Type.Payload.ElemType = .{
|
||||
.base = .{ .tag = .single_mut_pointer },
|
||||
.data = ty,
|
||||
};
|
||||
const ptr_ty = Type.initPayload(&ptr_ty_payload.base);
|
||||
const ptr_ty = try mod.singleMutPtrType(ty);
|
||||
|
||||
// TODO call extern memcpy
|
||||
const regs = try self.register_manager.allocRegs(5, .{ null, null, null, null, null }, gp);
|
||||
@ -5816,11 +5801,7 @@ fn genSetStackArgument(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) I
|
||||
const reg = try self.copyToTmpRegister(ty, mcv);
|
||||
return self.genSetStackArgument(ty, stack_offset, MCValue{ .register = reg });
|
||||
} else {
|
||||
var ptr_ty_payload: Type.Payload.ElemType = .{
|
||||
.base = .{ .tag = .single_mut_pointer },
|
||||
.data = ty,
|
||||
};
|
||||
const ptr_ty = Type.initPayload(&ptr_ty_payload.base);
|
||||
const ptr_ty = try mod.singleMutPtrType(ty);
|
||||
|
||||
// TODO call extern memcpy
|
||||
const regs = try self.register_manager.allocRegs(5, .{ null, null, null, null, null }, gp);
|
||||
@ -5908,12 +5889,13 @@ fn airBitCast(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airArrayToSlice(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
|
||||
const ptr_ty = self.typeOf(ty_op.operand);
|
||||
const ptr = try self.resolveInst(ty_op.operand);
|
||||
const array_ty = ptr_ty.childType();
|
||||
const array_len = @intCast(u32, array_ty.arrayLen());
|
||||
const array_ty = ptr_ty.childType(mod);
|
||||
const array_len = @intCast(u32, array_ty.arrayLen(mod));
|
||||
|
||||
const stack_offset = try self.allocMem(8, 8, inst);
|
||||
try self.genSetStack(ptr_ty, stack_offset, ptr);
|
||||
@ -6026,8 +6008,9 @@ fn airReduce(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airAggregateInit(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const vector_ty = self.typeOfIndex(inst);
|
||||
const len = vector_ty.vectorLen();
|
||||
const len = vector_ty.vectorLen(mod);
|
||||
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
|
||||
const elements = @ptrCast([]const Air.Inst.Ref, self.air.extra[ty_pl.payload..][0..len]);
|
||||
const result: MCValue = res: {
|
||||
|
||||
@ -807,7 +807,7 @@ fn allocMem(self: *Self, inst: Air.Inst.Index, abi_size: u32, abi_align: u32) !u
|
||||
/// Use a pointer instruction as the basis for allocating stack memory.
|
||||
fn allocMemPtr(self: *Self, inst: Air.Inst.Index) !u32 {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const elem_ty = self.typeOfIndex(inst).elemType();
|
||||
const elem_ty = self.typeOfIndex(inst).childType(mod);
|
||||
const abi_size = math.cast(u32, elem_ty.abiSize(mod)) orelse {
|
||||
return self.fail("type '{}' too big to fit into stack frame", .{elem_ty.fmt(mod)});
|
||||
};
|
||||
@ -1099,9 +1099,9 @@ fn binOp(
|
||||
switch (lhs_ty.zigTypeTag(mod)) {
|
||||
.Pointer => {
|
||||
const ptr_ty = lhs_ty;
|
||||
const elem_ty = switch (ptr_ty.ptrSize()) {
|
||||
.One => ptr_ty.childType().childType(), // ptr to array, so get array element type
|
||||
else => ptr_ty.childType(),
|
||||
const elem_ty = switch (ptr_ty.ptrSize(mod)) {
|
||||
.One => ptr_ty.childType(mod).childType(mod), // ptr to array, so get array element type
|
||||
else => ptr_ty.childType(mod),
|
||||
};
|
||||
const elem_size = elem_ty.abiSize(mod);
|
||||
|
||||
@ -1502,7 +1502,8 @@ fn reuseOperand(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, op_ind
|
||||
}
|
||||
|
||||
fn load(self: *Self, dst_mcv: MCValue, ptr: MCValue, ptr_ty: Type) InnerError!void {
|
||||
const elem_ty = ptr_ty.elemType();
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const elem_ty = ptr_ty.childType(mod);
|
||||
switch (ptr) {
|
||||
.none => unreachable,
|
||||
.undef => unreachable,
|
||||
@ -2496,8 +2497,9 @@ fn airReduce(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airAggregateInit(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const vector_ty = self.typeOfIndex(inst);
|
||||
const len = vector_ty.vectorLen();
|
||||
const len = vector_ty.vectorLen(mod);
|
||||
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
|
||||
const elements = @ptrCast([]const Air.Inst.Ref, self.air.extra[ty_pl.payload..][0..len]);
|
||||
const result: MCValue = res: {
|
||||
|
||||
@ -838,8 +838,9 @@ fn airAddSubWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airAggregateInit(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const vector_ty = self.typeOfIndex(inst);
|
||||
const len = vector_ty.vectorLen();
|
||||
const len = vector_ty.vectorLen(mod);
|
||||
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
|
||||
const elements = @ptrCast([]const Air.Inst.Ref, self.air.extra[ty_pl.payload..][0..len]);
|
||||
const result: MCValue = res: {
|
||||
@ -871,12 +872,13 @@ fn airArrayElemVal(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airArrayToSlice(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
|
||||
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
|
||||
const ptr_ty = self.typeOf(ty_op.operand);
|
||||
const ptr = try self.resolveInst(ty_op.operand);
|
||||
const array_ty = ptr_ty.childType();
|
||||
const array_len = @intCast(u32, array_ty.arrayLen());
|
||||
const array_ty = ptr_ty.childType(mod);
|
||||
const array_len = @intCast(u32, array_ty.arrayLen(mod));
|
||||
|
||||
const ptr_bits = self.target.ptrBitWidth();
|
||||
const ptr_bytes = @divExact(ptr_bits, 8);
|
||||
@ -1300,7 +1302,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const fn_ty = switch (ty.zigTypeTag(mod)) {
|
||||
.Fn => ty,
|
||||
.Pointer => ty.childType(),
|
||||
.Pointer => ty.childType(mod),
|
||||
else => unreachable,
|
||||
};
|
||||
|
||||
@ -1440,8 +1442,7 @@ fn airCmp(self: *Self, inst: Air.Inst.Index, op: math.CompareOperator) !void {
|
||||
.Pointer => Type.usize,
|
||||
.ErrorSet => Type.u16,
|
||||
.Optional => blk: {
|
||||
var opt_buffer: Type.Payload.ElemType = undefined;
|
||||
const payload_ty = lhs_ty.optionalChild(&opt_buffer);
|
||||
const payload_ty = lhs_ty.optionalChild(mod);
|
||||
if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod)) {
|
||||
break :blk Type.u1;
|
||||
} else if (lhs_ty.isPtrLikeOptional(mod)) {
|
||||
@ -2447,6 +2448,7 @@ fn airSlice(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airSliceElemVal(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const is_volatile = false; // TODO
|
||||
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
|
||||
|
||||
@ -2456,8 +2458,7 @@ fn airSliceElemVal(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const index_mcv = try self.resolveInst(bin_op.rhs);
|
||||
|
||||
const slice_ty = self.typeOf(bin_op.lhs);
|
||||
const elem_ty = slice_ty.childType();
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const elem_ty = slice_ty.childType(mod);
|
||||
const elem_size = elem_ty.abiSize(mod);
|
||||
|
||||
var buf: Type.SlicePtrFieldTypeBuffer = undefined;
|
||||
@ -2797,7 +2798,7 @@ fn allocMem(self: *Self, inst: Air.Inst.Index, abi_size: u32, abi_align: u32) !u
|
||||
/// Use a pointer instruction as the basis for allocating stack memory.
|
||||
fn allocMemPtr(self: *Self, inst: Air.Inst.Index) !u32 {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const elem_ty = self.typeOfIndex(inst).elemType();
|
||||
const elem_ty = self.typeOfIndex(inst).childType(mod);
|
||||
|
||||
if (!elem_ty.hasRuntimeBits(mod)) {
|
||||
// As this stack item will never be dereferenced at runtime,
|
||||
@ -3001,9 +3002,9 @@ fn binOp(
|
||||
switch (lhs_ty.zigTypeTag(mod)) {
|
||||
.Pointer => {
|
||||
const ptr_ty = lhs_ty;
|
||||
const elem_ty = switch (ptr_ty.ptrSize()) {
|
||||
.One => ptr_ty.childType().childType(), // ptr to array, so get array element type
|
||||
else => ptr_ty.childType(),
|
||||
const elem_ty = switch (ptr_ty.ptrSize(mod)) {
|
||||
.One => ptr_ty.childType(mod).childType(mod), // ptr to array, so get array element type
|
||||
else => ptr_ty.childType(mod),
|
||||
};
|
||||
const elem_size = elem_ty.abiSize(mod);
|
||||
|
||||
@ -3019,7 +3020,7 @@ fn binOp(
|
||||
// multiplying it with elem_size
|
||||
|
||||
const offset = try self.binOp(.mul, rhs, .{ .immediate = elem_size }, Type.usize, Type.usize, null);
|
||||
const addr = try self.binOp(tag, lhs, offset, Type.initTag(.manyptr_u8), Type.usize, null);
|
||||
const addr = try self.binOp(tag, lhs, offset, Type.manyptr_u8, Type.usize, null);
|
||||
return addr;
|
||||
}
|
||||
},
|
||||
@ -4042,11 +4043,7 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) InnerErro
|
||||
const reg = try self.copyToTmpRegister(ty, mcv);
|
||||
return self.genSetStack(ty, stack_offset, MCValue{ .register = reg });
|
||||
} else {
|
||||
var ptr_ty_payload: Type.Payload.ElemType = .{
|
||||
.base = .{ .tag = .single_mut_pointer },
|
||||
.data = ty,
|
||||
};
|
||||
const ptr_ty = Type.initPayload(&ptr_ty_payload.base);
|
||||
const ptr_ty = try mod.singleMutPtrType(ty);
|
||||
|
||||
const regs = try self.register_manager.allocRegs(4, .{ null, null, null, null }, gp);
|
||||
const regs_locks = self.register_manager.lockRegsAssumeUnused(4, regs);
|
||||
@ -4269,7 +4266,7 @@ fn jump(self: *Self, inst: Mir.Inst.Index) !void {
|
||||
|
||||
fn load(self: *Self, dst_mcv: MCValue, ptr: MCValue, ptr_ty: Type) InnerError!void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const elem_ty = ptr_ty.elemType();
|
||||
const elem_ty = ptr_ty.childType(mod);
|
||||
const elem_size = elem_ty.abiSize(mod);
|
||||
|
||||
switch (ptr) {
|
||||
@ -4729,7 +4726,7 @@ fn structFieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, inde
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const mcv = try self.resolveInst(operand);
|
||||
const ptr_ty = self.typeOf(operand);
|
||||
const struct_ty = ptr_ty.childType();
|
||||
const struct_ty = ptr_ty.childType(mod);
|
||||
const struct_field_offset = @intCast(u32, struct_ty.structFieldOffset(index, mod));
|
||||
switch (mcv) {
|
||||
.ptr_stack_offset => |off| {
|
||||
|
||||
@ -1542,7 +1542,7 @@ fn allocStack(func: *CodeGen, ty: Type) !WValue {
|
||||
fn allocStackPtr(func: *CodeGen, inst: Air.Inst.Index) !WValue {
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const ptr_ty = func.typeOfIndex(inst);
|
||||
const pointee_ty = ptr_ty.childType();
|
||||
const pointee_ty = ptr_ty.childType(mod);
|
||||
|
||||
if (func.initial_stack_value == .none) {
|
||||
try func.initializeStack();
|
||||
@ -1766,8 +1766,7 @@ fn isByRef(ty: Type, mod: *const Module) bool {
|
||||
},
|
||||
.Optional => {
|
||||
if (ty.isPtrLikeOptional(mod)) return false;
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
const pl_type = ty.optionalChild(&buf);
|
||||
const pl_type = ty.optionalChild(mod);
|
||||
if (pl_type.zigTypeTag(mod) == .ErrorSet) return false;
|
||||
return pl_type.hasRuntimeBitsIgnoreComptime(mod);
|
||||
},
|
||||
@ -2139,7 +2138,7 @@ fn airRet(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
|
||||
fn airRetPtr(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const child_type = func.typeOfIndex(inst).childType();
|
||||
const child_type = func.typeOfIndex(inst).childType(mod);
|
||||
|
||||
var result = result: {
|
||||
if (!child_type.isFnOrHasRuntimeBitsIgnoreComptime(mod)) {
|
||||
@ -2161,7 +2160,7 @@ fn airRetLoad(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const un_op = func.air.instructions.items(.data)[inst].un_op;
|
||||
const operand = try func.resolveInst(un_op);
|
||||
const ret_ty = func.typeOf(un_op).childType();
|
||||
const ret_ty = func.typeOf(un_op).childType(mod);
|
||||
|
||||
const fn_info = func.decl.ty.fnInfo();
|
||||
if (!ret_ty.hasRuntimeBitsIgnoreComptime(mod)) {
|
||||
@ -2188,7 +2187,7 @@ fn airCall(func: *CodeGen, inst: Air.Inst.Index, modifier: std.builtin.CallModif
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const fn_ty = switch (ty.zigTypeTag(mod)) {
|
||||
.Fn => ty,
|
||||
.Pointer => ty.childType(),
|
||||
.Pointer => ty.childType(mod),
|
||||
else => unreachable,
|
||||
};
|
||||
const ret_ty = fn_ty.fnReturnType();
|
||||
@ -2301,8 +2300,8 @@ fn airStore(func: *CodeGen, inst: Air.Inst.Index, safety: bool) InnerError!void
|
||||
const lhs = try func.resolveInst(bin_op.lhs);
|
||||
const rhs = try func.resolveInst(bin_op.rhs);
|
||||
const ptr_ty = func.typeOf(bin_op.lhs);
|
||||
const ptr_info = ptr_ty.ptrInfo().data;
|
||||
const ty = ptr_ty.childType();
|
||||
const ptr_info = ptr_ty.ptrInfo(mod);
|
||||
const ty = ptr_ty.childType(mod);
|
||||
|
||||
if (ptr_info.host_size == 0) {
|
||||
try func.store(lhs, rhs, ty, 0);
|
||||
@ -2360,8 +2359,7 @@ fn store(func: *CodeGen, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerE
|
||||
if (ty.isPtrLikeOptional(mod)) {
|
||||
return func.store(lhs, rhs, Type.usize, 0);
|
||||
}
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
const pl_ty = ty.optionalChild(&buf);
|
||||
const pl_ty = ty.optionalChild(mod);
|
||||
if (!pl_ty.hasRuntimeBitsIgnoreComptime(mod)) {
|
||||
return func.store(lhs, rhs, Type.u8, 0);
|
||||
}
|
||||
@ -2454,7 +2452,7 @@ fn airLoad(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const operand = try func.resolveInst(ty_op.operand);
|
||||
const ty = func.air.getRefType(ty_op.ty);
|
||||
const ptr_ty = func.typeOf(ty_op.operand);
|
||||
const ptr_info = ptr_ty.ptrInfo().data;
|
||||
const ptr_info = ptr_ty.ptrInfo(mod);
|
||||
|
||||
if (!ty.hasRuntimeBitsIgnoreComptime(mod)) return func.finishAir(inst, .none, &.{ty_op.operand});
|
||||
|
||||
@ -2971,7 +2969,7 @@ fn lowerParentPtr(func: *CodeGen, ptr_val: Value, offset: u32) InnerError!WValue
|
||||
break :blk field_offset;
|
||||
},
|
||||
},
|
||||
.Pointer => switch (parent_ty.ptrSize()) {
|
||||
.Pointer => switch (parent_ty.ptrSize(mod)) {
|
||||
.Slice => switch (field_ptr.field_index) {
|
||||
0 => 0,
|
||||
1 => func.ptrSize(),
|
||||
@ -3001,11 +2999,7 @@ fn lowerParentPtrDecl(func: *CodeGen, ptr_val: Value, decl_index: Module.Decl.In
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const decl = mod.declPtr(decl_index);
|
||||
mod.markDeclAlive(decl);
|
||||
var ptr_ty_payload: Type.Payload.ElemType = .{
|
||||
.base = .{ .tag = .single_mut_pointer },
|
||||
.data = decl.ty,
|
||||
};
|
||||
const ptr_ty = Type.initPayload(&ptr_ty_payload.base);
|
||||
const ptr_ty = try mod.singleMutPtrType(decl.ty);
|
||||
return func.lowerDeclRefValue(.{ .ty = ptr_ty, .val = ptr_val }, decl_index, offset);
|
||||
}
|
||||
|
||||
@ -3145,8 +3139,7 @@ fn lowerConstant(func: *CodeGen, arg_val: Value, ty: Type) InnerError!WValue {
|
||||
return func.fail("Wasm TODO: lowerConstant error union with non-zero-bit payload type", .{});
|
||||
},
|
||||
.Optional => if (ty.optionalReprIsPayload(mod)) {
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
const pl_ty = ty.optionalChild(&buf);
|
||||
const pl_ty = ty.optionalChild(mod);
|
||||
if (val.castTag(.opt_payload)) |payload| {
|
||||
return func.lowerConstant(payload.data, pl_ty);
|
||||
} else if (val.isNull(mod)) {
|
||||
@ -3217,8 +3210,7 @@ fn emitUndefined(func: *CodeGen, ty: Type) InnerError!WValue {
|
||||
else => unreachable,
|
||||
},
|
||||
.Optional => {
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
const pl_ty = ty.optionalChild(&buf);
|
||||
const pl_ty = ty.optionalChild(mod);
|
||||
if (ty.optionalReprIsPayload(mod)) {
|
||||
return func.emitUndefined(pl_ty);
|
||||
}
|
||||
@ -3403,8 +3395,7 @@ fn cmp(func: *CodeGen, lhs: WValue, rhs: WValue, ty: Type, op: std.math.CompareO
|
||||
assert(!(lhs != .stack and rhs == .stack));
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
if (ty.zigTypeTag(mod) == .Optional and !ty.optionalReprIsPayload(mod)) {
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
const payload_ty = ty.optionalChild(&buf);
|
||||
const payload_ty = ty.optionalChild(mod);
|
||||
if (payload_ty.hasRuntimeBitsIgnoreComptime(mod)) {
|
||||
// When we hit this case, we must check the value of optionals
|
||||
// that are not pointers. This means first checking against non-null for
|
||||
@ -3609,19 +3600,21 @@ fn bitcast(func: *CodeGen, wanted_ty: Type, given_ty: Type, operand: WValue) Inn
|
||||
}
|
||||
|
||||
fn airStructFieldPtr(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const ty_pl = func.air.instructions.items(.data)[inst].ty_pl;
|
||||
const extra = func.air.extraData(Air.StructField, ty_pl.payload);
|
||||
|
||||
const struct_ptr = try func.resolveInst(extra.data.struct_operand);
|
||||
const struct_ty = func.typeOf(extra.data.struct_operand).childType();
|
||||
const struct_ty = func.typeOf(extra.data.struct_operand).childType(mod);
|
||||
const result = try func.structFieldPtr(inst, extra.data.struct_operand, struct_ptr, struct_ty, extra.data.field_index);
|
||||
func.finishAir(inst, result, &.{extra.data.struct_operand});
|
||||
}
|
||||
|
||||
fn airStructFieldPtrIndex(func: *CodeGen, inst: Air.Inst.Index, index: u32) InnerError!void {
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
|
||||
const struct_ptr = try func.resolveInst(ty_op.operand);
|
||||
const struct_ty = func.typeOf(ty_op.operand).childType();
|
||||
const struct_ty = func.typeOf(ty_op.operand).childType(mod);
|
||||
|
||||
const result = try func.structFieldPtr(inst, ty_op.operand, struct_ptr, struct_ty, index);
|
||||
func.finishAir(inst, result, &.{ty_op.operand});
|
||||
@ -3640,7 +3633,7 @@ fn structFieldPtr(
|
||||
const offset = switch (struct_ty.containerLayout()) {
|
||||
.Packed => switch (struct_ty.zigTypeTag(mod)) {
|
||||
.Struct => offset: {
|
||||
if (result_ty.ptrInfo().data.host_size != 0) {
|
||||
if (result_ty.ptrInfo(mod).host_size != 0) {
|
||||
break :offset @as(u32, 0);
|
||||
}
|
||||
break :offset struct_ty.packedStructFieldByteOffset(index, mod);
|
||||
@ -3981,7 +3974,7 @@ fn airUnwrapErrUnionPayload(func: *CodeGen, inst: Air.Inst.Index, op_is_ptr: boo
|
||||
|
||||
const operand = try func.resolveInst(ty_op.operand);
|
||||
const op_ty = func.typeOf(ty_op.operand);
|
||||
const err_ty = if (op_is_ptr) op_ty.childType() else op_ty;
|
||||
const err_ty = if (op_is_ptr) op_ty.childType(mod) else op_ty;
|
||||
const payload_ty = err_ty.errorUnionPayload();
|
||||
|
||||
const result = result: {
|
||||
@ -4009,7 +4002,7 @@ fn airUnwrapErrUnionError(func: *CodeGen, inst: Air.Inst.Index, op_is_ptr: bool)
|
||||
|
||||
const operand = try func.resolveInst(ty_op.operand);
|
||||
const op_ty = func.typeOf(ty_op.operand);
|
||||
const err_ty = if (op_is_ptr) op_ty.childType() else op_ty;
|
||||
const err_ty = if (op_is_ptr) op_ty.childType(mod) else op_ty;
|
||||
const payload_ty = err_ty.errorUnionPayload();
|
||||
|
||||
const result = result: {
|
||||
@ -4156,11 +4149,12 @@ fn intcast(func: *CodeGen, operand: WValue, given: Type, wanted: Type) InnerErro
|
||||
}
|
||||
|
||||
fn airIsNull(func: *CodeGen, inst: Air.Inst.Index, opcode: wasm.Opcode, op_kind: enum { value, ptr }) InnerError!void {
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const un_op = func.air.instructions.items(.data)[inst].un_op;
|
||||
const operand = try func.resolveInst(un_op);
|
||||
|
||||
const op_ty = func.typeOf(un_op);
|
||||
const optional_ty = if (op_kind == .ptr) op_ty.childType() else op_ty;
|
||||
const optional_ty = if (op_kind == .ptr) op_ty.childType(mod) else op_ty;
|
||||
const is_null = try func.isNull(operand, optional_ty, opcode);
|
||||
const result = try is_null.toLocal(func, optional_ty);
|
||||
func.finishAir(inst, result, &.{un_op});
|
||||
@ -4171,8 +4165,7 @@ fn airIsNull(func: *CodeGen, inst: Air.Inst.Index, opcode: wasm.Opcode, op_kind:
|
||||
fn isNull(func: *CodeGen, operand: WValue, optional_ty: Type, opcode: wasm.Opcode) InnerError!WValue {
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
try func.emitWValue(operand);
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
const payload_ty = optional_ty.optionalChild(&buf);
|
||||
const payload_ty = optional_ty.optionalChild(mod);
|
||||
if (!optional_ty.optionalReprIsPayload(mod)) {
|
||||
// When payload is zero-bits, we can treat operand as a value, rather than
|
||||
// a pointer to the stack value
|
||||
@ -4221,14 +4214,13 @@ fn airOptionalPayload(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
}
|
||||
|
||||
fn airOptionalPayloadPtr(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
|
||||
const operand = try func.resolveInst(ty_op.operand);
|
||||
const opt_ty = func.typeOf(ty_op.operand).childType();
|
||||
const opt_ty = func.typeOf(ty_op.operand).childType(mod);
|
||||
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const result = result: {
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
const payload_ty = opt_ty.optionalChild(&buf);
|
||||
const payload_ty = opt_ty.optionalChild(mod);
|
||||
if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod) or opt_ty.optionalReprIsPayload(mod)) {
|
||||
break :result func.reuseOperand(ty_op.operand, operand);
|
||||
}
|
||||
@ -4242,9 +4234,8 @@ fn airOptionalPayloadPtrSet(func: *CodeGen, inst: Air.Inst.Index) InnerError!voi
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
|
||||
const operand = try func.resolveInst(ty_op.operand);
|
||||
const opt_ty = func.typeOf(ty_op.operand).childType();
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
const payload_ty = opt_ty.optionalChild(&buf);
|
||||
const opt_ty = func.typeOf(ty_op.operand).childType(mod);
|
||||
const payload_ty = opt_ty.optionalChild(mod);
|
||||
if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod)) {
|
||||
return func.fail("TODO: Implement OptionalPayloadPtrSet for optional with zero-sized type {}", .{payload_ty.fmtDebug()});
|
||||
}
|
||||
@ -4325,13 +4316,13 @@ fn airSliceLen(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
}
|
||||
|
||||
fn airSliceElemVal(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const bin_op = func.air.instructions.items(.data)[inst].bin_op;
|
||||
|
||||
const slice_ty = func.typeOf(bin_op.lhs);
|
||||
const slice = try func.resolveInst(bin_op.lhs);
|
||||
const index = try func.resolveInst(bin_op.rhs);
|
||||
const elem_ty = slice_ty.childType();
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const elem_ty = slice_ty.childType(mod);
|
||||
const elem_size = elem_ty.abiSize(mod);
|
||||
|
||||
// load pointer onto stack
|
||||
@ -4355,11 +4346,11 @@ fn airSliceElemVal(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
}
|
||||
|
||||
fn airSliceElemPtr(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const ty_pl = func.air.instructions.items(.data)[inst].ty_pl;
|
||||
const bin_op = func.air.extraData(Air.Bin, ty_pl.payload).data;
|
||||
|
||||
const elem_ty = func.air.getRefType(ty_pl.ty).childType();
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const elem_ty = func.air.getRefType(ty_pl.ty).childType(mod);
|
||||
const elem_size = elem_ty.abiSize(mod);
|
||||
|
||||
const slice = try func.resolveInst(bin_op.lhs);
|
||||
@ -4436,7 +4427,7 @@ fn airArrayToSlice(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
|
||||
|
||||
const operand = try func.resolveInst(ty_op.operand);
|
||||
const array_ty = func.typeOf(ty_op.operand).childType();
|
||||
const array_ty = func.typeOf(ty_op.operand).childType(mod);
|
||||
const slice_ty = func.air.getRefType(ty_op.ty);
|
||||
|
||||
// create a slice on the stack
|
||||
@ -4448,7 +4439,7 @@ fn airArrayToSlice(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
}
|
||||
|
||||
// store the length of the array in the slice
|
||||
const len = WValue{ .imm32 = @intCast(u32, array_ty.arrayLen()) };
|
||||
const len = WValue{ .imm32 = @intCast(u32, array_ty.arrayLen(mod)) };
|
||||
try func.store(slice_local, len, Type.usize, func.ptrSize());
|
||||
|
||||
func.finishAir(inst, slice_local, &.{ty_op.operand});
|
||||
@ -4470,13 +4461,13 @@ fn airPtrToInt(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
}
|
||||
|
||||
fn airPtrElemVal(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const bin_op = func.air.instructions.items(.data)[inst].bin_op;
|
||||
|
||||
const ptr_ty = func.typeOf(bin_op.lhs);
|
||||
const ptr = try func.resolveInst(bin_op.lhs);
|
||||
const index = try func.resolveInst(bin_op.rhs);
|
||||
const elem_ty = ptr_ty.childType();
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const elem_ty = ptr_ty.childType(mod);
|
||||
const elem_size = elem_ty.abiSize(mod);
|
||||
|
||||
// load pointer onto the stack
|
||||
@ -4507,12 +4498,12 @@ fn airPtrElemVal(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
}
|
||||
|
||||
fn airPtrElemPtr(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const ty_pl = func.air.instructions.items(.data)[inst].ty_pl;
|
||||
const bin_op = func.air.extraData(Air.Bin, ty_pl.payload).data;
|
||||
|
||||
const ptr_ty = func.typeOf(bin_op.lhs);
|
||||
const elem_ty = func.air.getRefType(ty_pl.ty).childType();
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const elem_ty = func.air.getRefType(ty_pl.ty).childType(mod);
|
||||
const elem_size = elem_ty.abiSize(mod);
|
||||
|
||||
const ptr = try func.resolveInst(bin_op.lhs);
|
||||
@ -4544,9 +4535,9 @@ fn airPtrBinOp(func: *CodeGen, inst: Air.Inst.Index, op: Op) InnerError!void {
|
||||
const ptr = try func.resolveInst(bin_op.lhs);
|
||||
const offset = try func.resolveInst(bin_op.rhs);
|
||||
const ptr_ty = func.typeOf(bin_op.lhs);
|
||||
const pointee_ty = switch (ptr_ty.ptrSize()) {
|
||||
.One => ptr_ty.childType().childType(), // ptr to array, so get array element type
|
||||
else => ptr_ty.childType(),
|
||||
const pointee_ty = switch (ptr_ty.ptrSize(mod)) {
|
||||
.One => ptr_ty.childType(mod).childType(mod), // ptr to array, so get array element type
|
||||
else => ptr_ty.childType(mod),
|
||||
};
|
||||
|
||||
const valtype = typeToValtype(Type.usize, mod);
|
||||
@ -4565,6 +4556,7 @@ fn airPtrBinOp(func: *CodeGen, inst: Air.Inst.Index, op: Op) InnerError!void {
|
||||
}
|
||||
|
||||
fn airMemset(func: *CodeGen, inst: Air.Inst.Index, safety: bool) InnerError!void {
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
if (safety) {
|
||||
// TODO if the value is undef, write 0xaa bytes to dest
|
||||
} else {
|
||||
@ -4575,16 +4567,16 @@ fn airMemset(func: *CodeGen, inst: Air.Inst.Index, safety: bool) InnerError!void
|
||||
const ptr = try func.resolveInst(bin_op.lhs);
|
||||
const ptr_ty = func.typeOf(bin_op.lhs);
|
||||
const value = try func.resolveInst(bin_op.rhs);
|
||||
const len = switch (ptr_ty.ptrSize()) {
|
||||
const len = switch (ptr_ty.ptrSize(mod)) {
|
||||
.Slice => try func.sliceLen(ptr),
|
||||
.One => @as(WValue, .{ .imm32 = @intCast(u32, ptr_ty.childType().arrayLen()) }),
|
||||
.One => @as(WValue, .{ .imm32 = @intCast(u32, ptr_ty.childType(mod).arrayLen(mod)) }),
|
||||
.C, .Many => unreachable,
|
||||
};
|
||||
|
||||
const elem_ty = if (ptr_ty.ptrSize() == .One)
|
||||
ptr_ty.childType().childType()
|
||||
const elem_ty = if (ptr_ty.ptrSize(mod) == .One)
|
||||
ptr_ty.childType(mod).childType(mod)
|
||||
else
|
||||
ptr_ty.childType();
|
||||
ptr_ty.childType(mod);
|
||||
|
||||
const dst_ptr = try func.sliceOrArrayPtr(ptr, ptr_ty);
|
||||
try func.memset(elem_ty, dst_ptr, len, value);
|
||||
@ -4686,13 +4678,13 @@ fn memset(func: *CodeGen, elem_ty: Type, ptr: WValue, len: WValue, value: WValue
|
||||
}
|
||||
|
||||
fn airArrayElemVal(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const bin_op = func.air.instructions.items(.data)[inst].bin_op;
|
||||
|
||||
const array_ty = func.typeOf(bin_op.lhs);
|
||||
const array = try func.resolveInst(bin_op.lhs);
|
||||
const index = try func.resolveInst(bin_op.rhs);
|
||||
const elem_ty = array_ty.childType();
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const elem_ty = array_ty.childType(mod);
|
||||
const elem_size = elem_ty.abiSize(mod);
|
||||
|
||||
if (isByRef(array_ty, mod)) {
|
||||
@ -4810,7 +4802,7 @@ fn airSplat(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
|
||||
const operand = try func.resolveInst(ty_op.operand);
|
||||
const ty = func.typeOfIndex(inst);
|
||||
const elem_ty = ty.childType();
|
||||
const elem_ty = ty.childType(mod);
|
||||
|
||||
if (determineSimdStoreStrategy(ty, mod) == .direct) blk: {
|
||||
switch (operand) {
|
||||
@ -4859,7 +4851,7 @@ fn airSplat(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
}
|
||||
}
|
||||
const elem_size = elem_ty.bitSize(mod);
|
||||
const vector_len = @intCast(usize, ty.vectorLen());
|
||||
const vector_len = @intCast(usize, ty.vectorLen(mod));
|
||||
if ((!std.math.isPowerOfTwo(elem_size) or elem_size % 8 != 0) and vector_len > 1) {
|
||||
return func.fail("TODO: WebAssembly `@splat` for arbitrary element bitsize {d}", .{elem_size});
|
||||
}
|
||||
@ -4895,7 +4887,7 @@ fn airShuffle(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const mask = func.air.values[extra.mask];
|
||||
const mask_len = extra.mask_len;
|
||||
|
||||
const child_ty = inst_ty.childType();
|
||||
const child_ty = inst_ty.childType(mod);
|
||||
const elem_size = child_ty.abiSize(mod);
|
||||
|
||||
// TODO: One of them could be by ref; handle in loop
|
||||
@ -4959,16 +4951,16 @@ fn airAggregateInit(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const ty_pl = func.air.instructions.items(.data)[inst].ty_pl;
|
||||
const result_ty = func.typeOfIndex(inst);
|
||||
const len = @intCast(usize, result_ty.arrayLen());
|
||||
const len = @intCast(usize, result_ty.arrayLen(mod));
|
||||
const elements = @ptrCast([]const Air.Inst.Ref, func.air.extra[ty_pl.payload..][0..len]);
|
||||
|
||||
const result: WValue = result_value: {
|
||||
switch (result_ty.zigTypeTag(mod)) {
|
||||
.Array => {
|
||||
const result = try func.allocStack(result_ty);
|
||||
const elem_ty = result_ty.childType();
|
||||
const elem_ty = result_ty.childType(mod);
|
||||
const elem_size = @intCast(u32, elem_ty.abiSize(mod));
|
||||
const sentinel = if (result_ty.sentinel()) |sent| blk: {
|
||||
const sentinel = if (result_ty.sentinel(mod)) |sent| blk: {
|
||||
break :blk try func.lowerConstant(sent, elem_ty);
|
||||
} else null;
|
||||
|
||||
@ -5190,8 +5182,7 @@ fn cmpOptionals(func: *CodeGen, lhs: WValue, rhs: WValue, operand_ty: Type, op:
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
assert(operand_ty.hasRuntimeBitsIgnoreComptime(mod));
|
||||
assert(op == .eq or op == .neq);
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
const payload_ty = operand_ty.optionalChild(&buf);
|
||||
const payload_ty = operand_ty.optionalChild(mod);
|
||||
|
||||
// We store the final result in here that will be validated
|
||||
// if the optional is truly equal.
|
||||
@ -5268,7 +5259,7 @@ fn cmpBigInt(func: *CodeGen, lhs: WValue, rhs: WValue, operand_ty: Type, op: std
|
||||
fn airSetUnionTag(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const bin_op = func.air.instructions.items(.data)[inst].bin_op;
|
||||
const un_ty = func.typeOf(bin_op.lhs).childType();
|
||||
const un_ty = func.typeOf(bin_op.lhs).childType(mod);
|
||||
const tag_ty = func.typeOf(bin_op.rhs);
|
||||
const layout = un_ty.unionGetLayout(mod);
|
||||
if (layout.tag_size == 0) return func.finishAir(inst, .none, &.{ bin_op.lhs, bin_op.rhs });
|
||||
@ -5398,7 +5389,7 @@ fn airErrUnionPayloadPtrSet(func: *CodeGen, inst: Air.Inst.Index) InnerError!voi
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const ty_op = func.air.instructions.items(.data)[inst].ty_op;
|
||||
|
||||
const err_set_ty = func.typeOf(ty_op.operand).childType();
|
||||
const err_set_ty = func.typeOf(ty_op.operand).childType(mod);
|
||||
const payload_ty = err_set_ty.errorUnionPayload();
|
||||
const operand = try func.resolveInst(ty_op.operand);
|
||||
|
||||
@ -5426,7 +5417,7 @@ fn airFieldParentPtr(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const extra = func.air.extraData(Air.FieldParentPtr, ty_pl.payload).data;
|
||||
|
||||
const field_ptr = try func.resolveInst(extra.field_ptr);
|
||||
const parent_ty = func.air.getRefType(ty_pl.ty).childType();
|
||||
const parent_ty = func.air.getRefType(ty_pl.ty).childType(mod);
|
||||
const field_offset = parent_ty.structFieldOffset(extra.field_index, mod);
|
||||
|
||||
const result = if (field_offset != 0) result: {
|
||||
@ -5455,10 +5446,10 @@ fn airMemcpy(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const bin_op = func.air.instructions.items(.data)[inst].bin_op;
|
||||
const dst = try func.resolveInst(bin_op.lhs);
|
||||
const dst_ty = func.typeOf(bin_op.lhs);
|
||||
const ptr_elem_ty = dst_ty.childType();
|
||||
const ptr_elem_ty = dst_ty.childType(mod);
|
||||
const src = try func.resolveInst(bin_op.rhs);
|
||||
const src_ty = func.typeOf(bin_op.rhs);
|
||||
const len = switch (dst_ty.ptrSize()) {
|
||||
const len = switch (dst_ty.ptrSize(mod)) {
|
||||
.Slice => blk: {
|
||||
const slice_len = try func.sliceLen(dst);
|
||||
if (ptr_elem_ty.abiSize(mod) != 1) {
|
||||
@ -5470,7 +5461,7 @@ fn airMemcpy(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
break :blk slice_len;
|
||||
},
|
||||
.One => @as(WValue, .{
|
||||
.imm32 = @intCast(u32, ptr_elem_ty.arrayLen() * ptr_elem_ty.childType().abiSize(mod)),
|
||||
.imm32 = @intCast(u32, ptr_elem_ty.arrayLen(mod) * ptr_elem_ty.childType(mod).abiSize(mod)),
|
||||
}),
|
||||
.C, .Many => unreachable,
|
||||
};
|
||||
@ -5551,7 +5542,7 @@ fn airErrorName(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
// As the names are global and the slice elements are constant, we do not have
|
||||
// to make a copy of the ptr+value but can point towards them directly.
|
||||
const error_table_symbol = try func.bin_file.getErrorTableSymbol();
|
||||
const name_ty = Type.initTag(.const_slice_u8_sentinel_0);
|
||||
const name_ty = Type.const_slice_u8_sentinel_0;
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const abi_size = name_ty.abiSize(mod);
|
||||
|
||||
@ -5857,7 +5848,7 @@ fn airMulWithOverflow(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
try func.addLabel(.local_set, overflow_bit.local.value);
|
||||
break :blk try func.wrapOperand(bin_op, lhs_ty);
|
||||
} else if (int_info.bits == 64 and int_info.signedness == .unsigned) blk: {
|
||||
const new_ty = Type.initTag(.u128);
|
||||
const new_ty = Type.u128;
|
||||
var lhs_upcast = try (try func.intcast(lhs, lhs_ty, new_ty)).toLocal(func, lhs_ty);
|
||||
defer lhs_upcast.free(func);
|
||||
var rhs_upcast = try (try func.intcast(rhs, lhs_ty, new_ty)).toLocal(func, lhs_ty);
|
||||
@ -5878,7 +5869,7 @@ fn airMulWithOverflow(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const bin_op = try func.callIntrinsic(
|
||||
"__multi3",
|
||||
&[_]Type{Type.i64} ** 4,
|
||||
Type.initTag(.i128),
|
||||
Type.i128,
|
||||
&.{ lhs, lhs_shifted, rhs, rhs_shifted },
|
||||
);
|
||||
const res = try func.allocLocal(lhs_ty);
|
||||
@ -5902,19 +5893,19 @@ fn airMulWithOverflow(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const mul1 = try func.callIntrinsic(
|
||||
"__multi3",
|
||||
&[_]Type{Type.i64} ** 4,
|
||||
Type.initTag(.i128),
|
||||
Type.i128,
|
||||
&.{ lhs_lsb, zero, rhs_msb, zero },
|
||||
);
|
||||
const mul2 = try func.callIntrinsic(
|
||||
"__multi3",
|
||||
&[_]Type{Type.i64} ** 4,
|
||||
Type.initTag(.i128),
|
||||
Type.i128,
|
||||
&.{ rhs_lsb, zero, lhs_msb, zero },
|
||||
);
|
||||
const mul3 = try func.callIntrinsic(
|
||||
"__multi3",
|
||||
&[_]Type{Type.i64} ** 4,
|
||||
Type.initTag(.i128),
|
||||
Type.i128,
|
||||
&.{ lhs_msb, zero, rhs_msb, zero },
|
||||
);
|
||||
|
||||
@ -5942,7 +5933,7 @@ fn airMulWithOverflow(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
_ = try func.binOp(lsb_or, mul_add_lt, Type.bool, .@"or");
|
||||
try func.addLabel(.local_set, overflow_bit.local.value);
|
||||
|
||||
const tmp_result = try func.allocStack(Type.initTag(.u128));
|
||||
const tmp_result = try func.allocStack(Type.u128);
|
||||
try func.emitWValue(tmp_result);
|
||||
const mul3_msb = try func.load(mul3, Type.u64, 0);
|
||||
try func.store(.stack, mul3_msb, Type.u64, tmp_result.offset());
|
||||
@ -6191,11 +6182,12 @@ fn airTry(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
}
|
||||
|
||||
fn airTryPtr(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const mod = func.bin_file.base.options.module.?;
|
||||
const ty_pl = func.air.instructions.items(.data)[inst].ty_pl;
|
||||
const extra = func.air.extraData(Air.TryPtr, ty_pl.payload);
|
||||
const err_union_ptr = try func.resolveInst(extra.data.ptr);
|
||||
const body = func.air.extra[extra.end..][0..extra.data.body_len];
|
||||
const err_union_ty = func.typeOf(extra.data.ptr).childType();
|
||||
const err_union_ty = func.typeOf(extra.data.ptr).childType(mod);
|
||||
const result = try lowerTry(func, inst, err_union_ptr, body, err_union_ty, true);
|
||||
func.finishAir(inst, result, &.{extra.data.ptr});
|
||||
}
|
||||
@ -6845,11 +6837,11 @@ fn getTagNameFunction(func: *CodeGen, enum_ty: Type) InnerError!u32 {
|
||||
for (enum_ty.enumFields().keys(), 0..) |tag_name, field_index| {
|
||||
// for each tag name, create an unnamed const,
|
||||
// and then get a pointer to its value.
|
||||
var name_ty_payload: Type.Payload.Len = .{
|
||||
.base = .{ .tag = .array_u8_sentinel_0 },
|
||||
.data = @intCast(u64, tag_name.len),
|
||||
};
|
||||
const name_ty = Type.initPayload(&name_ty_payload.base);
|
||||
const name_ty = try mod.arrayType(.{
|
||||
.len = tag_name.len,
|
||||
.child = .u8_type,
|
||||
.sentinel = .zero_u8,
|
||||
});
|
||||
const string_bytes = &mod.string_literal_bytes;
|
||||
try string_bytes.ensureUnusedCapacity(mod.gpa, tag_name.len);
|
||||
const gop = try mod.string_literal_table.getOrPutContextAdapted(mod.gpa, tag_name, Module.StringLiteralAdapter{
|
||||
@ -6972,7 +6964,7 @@ fn getTagNameFunction(func: *CodeGen, enum_ty: Type) InnerError!u32 {
|
||||
// finish function body
|
||||
try writer.writeByte(std.wasm.opcode(.end));
|
||||
|
||||
const slice_ty = Type.initTag(.const_slice_u8_sentinel_0);
|
||||
const slice_ty = Type.const_slice_u8_sentinel_0;
|
||||
const func_type = try genFunctype(arena, .Unspecified, &.{int_tag_ty}, slice_ty, mod);
|
||||
return func.bin_file.createFunction(func_name, func_type, &body_list, &relocs);
|
||||
}
|
||||
@ -7068,7 +7060,7 @@ fn airCmpxchg(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const extra = func.air.extraData(Air.Cmpxchg, ty_pl.payload).data;
|
||||
|
||||
const ptr_ty = func.typeOf(extra.ptr);
|
||||
const ty = ptr_ty.childType();
|
||||
const ty = ptr_ty.childType(mod);
|
||||
const result_ty = func.typeOfIndex(inst);
|
||||
|
||||
const ptr_operand = try func.resolveInst(extra.ptr);
|
||||
@ -7355,7 +7347,7 @@ fn airAtomicStore(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
const ptr = try func.resolveInst(bin_op.lhs);
|
||||
const operand = try func.resolveInst(bin_op.rhs);
|
||||
const ptr_ty = func.typeOf(bin_op.lhs);
|
||||
const ty = ptr_ty.childType();
|
||||
const ty = ptr_ty.childType(mod);
|
||||
|
||||
if (func.useAtomicFeature()) {
|
||||
const tag: wasm.AtomicsOpcode = switch (ty.abiSize(mod)) {
|
||||
|
||||
@ -2259,7 +2259,7 @@ fn allocFrameIndex(self: *Self, alloc: FrameAlloc) !FrameIndex {
|
||||
fn allocMemPtr(self: *Self, inst: Air.Inst.Index) !FrameIndex {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const ptr_ty = self.typeOfIndex(inst);
|
||||
const val_ty = ptr_ty.childType();
|
||||
const val_ty = ptr_ty.childType(mod);
|
||||
return self.allocFrameIndex(FrameAlloc.init(.{
|
||||
.size = math.cast(u32, val_ty.abiSize(mod)) orelse {
|
||||
return self.fail("type '{}' too big to fit into stack frame", .{val_ty.fmt(mod)});
|
||||
@ -2289,8 +2289,8 @@ fn allocRegOrMemAdvanced(self: *Self, ty: Type, inst: ?Air.Inst.Index, reg_ok: b
|
||||
80 => break :need_mem,
|
||||
else => unreachable,
|
||||
},
|
||||
.Vector => switch (ty.childType().zigTypeTag(mod)) {
|
||||
.Float => switch (ty.childType().floatBits(self.target.*)) {
|
||||
.Vector => switch (ty.childType(mod).zigTypeTag(mod)) {
|
||||
.Float => switch (ty.childType(mod).floatBits(self.target.*)) {
|
||||
16, 32, 64, 128 => if (self.hasFeature(.avx)) 32 else 16,
|
||||
80 => break :need_mem,
|
||||
else => unreachable,
|
||||
@ -2727,12 +2727,12 @@ fn airTrunc(self: *Self, inst: Air.Inst.Index) !void {
|
||||
try self.copyToRegisterWithInstTracking(inst, dst_ty, src_mcv);
|
||||
|
||||
if (dst_ty.zigTypeTag(mod) == .Vector) {
|
||||
assert(src_ty.zigTypeTag(mod) == .Vector and dst_ty.vectorLen() == src_ty.vectorLen());
|
||||
const dst_info = dst_ty.childType().intInfo(mod);
|
||||
const src_info = src_ty.childType().intInfo(mod);
|
||||
assert(src_ty.zigTypeTag(mod) == .Vector and dst_ty.vectorLen(mod) == src_ty.vectorLen(mod));
|
||||
const dst_info = dst_ty.childType(mod).intInfo(mod);
|
||||
const src_info = src_ty.childType(mod).intInfo(mod);
|
||||
const mir_tag = if (@as(?Mir.Inst.FixedTag, switch (dst_info.bits) {
|
||||
8 => switch (src_info.bits) {
|
||||
16 => switch (dst_ty.vectorLen()) {
|
||||
16 => switch (dst_ty.vectorLen(mod)) {
|
||||
1...8 => if (self.hasFeature(.avx)) .{ .vp_b, .ackusw } else .{ .p_b, .ackusw },
|
||||
9...16 => if (self.hasFeature(.avx2)) .{ .vp_b, .ackusw } else null,
|
||||
else => null,
|
||||
@ -2740,7 +2740,7 @@ fn airTrunc(self: *Self, inst: Air.Inst.Index) !void {
|
||||
else => null,
|
||||
},
|
||||
16 => switch (src_info.bits) {
|
||||
32 => switch (dst_ty.vectorLen()) {
|
||||
32 => switch (dst_ty.vectorLen(mod)) {
|
||||
1...4 => if (self.hasFeature(.avx))
|
||||
.{ .vp_w, .ackusd }
|
||||
else if (self.hasFeature(.sse4_1))
|
||||
@ -2769,14 +2769,10 @@ fn airTrunc(self: *Self, inst: Air.Inst.Index) !void {
|
||||
};
|
||||
const splat_val = Value.initPayload(&splat_pl.base);
|
||||
|
||||
var full_pl = Type.Payload.Array{
|
||||
.base = .{ .tag = .vector },
|
||||
.data = .{
|
||||
.len = @divExact(@as(u64, if (src_abi_size > 16) 256 else 128), src_info.bits),
|
||||
.elem_type = src_ty.childType(),
|
||||
},
|
||||
};
|
||||
const full_ty = Type.initPayload(&full_pl.base);
|
||||
const full_ty = try mod.vectorType(.{
|
||||
.len = @intCast(u32, @divExact(@as(u64, if (src_abi_size > 16) 256 else 128), src_info.bits)),
|
||||
.child = src_ty.childType(mod).ip_index,
|
||||
});
|
||||
const full_abi_size = @intCast(u32, full_ty.abiSize(mod));
|
||||
|
||||
const splat_mcv = try self.genTypedValue(.{ .ty = full_ty, .val = splat_val });
|
||||
@ -3587,7 +3583,7 @@ fn airOptionalPayloadPtrSet(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const result = result: {
|
||||
const dst_ty = self.typeOfIndex(inst);
|
||||
const src_ty = self.typeOf(ty_op.operand);
|
||||
const opt_ty = src_ty.childType();
|
||||
const opt_ty = src_ty.childType(mod);
|
||||
const src_mcv = try self.resolveInst(ty_op.operand);
|
||||
|
||||
if (opt_ty.optionalReprIsPayload(mod)) {
|
||||
@ -3607,7 +3603,7 @@ fn airOptionalPayloadPtrSet(self: *Self, inst: Air.Inst.Index) !void {
|
||||
else
|
||||
try self.copyToRegisterWithInstTracking(inst, dst_ty, src_mcv);
|
||||
|
||||
const pl_ty = dst_ty.childType();
|
||||
const pl_ty = dst_ty.childType(mod);
|
||||
const pl_abi_size = @intCast(i32, pl_ty.abiSize(mod));
|
||||
try self.genSetMem(.{ .reg = dst_mcv.getReg().? }, pl_abi_size, Type.bool, .{ .immediate = 1 });
|
||||
break :result if (self.liveness.isUnused(inst)) .unreach else dst_mcv;
|
||||
@ -3737,7 +3733,7 @@ fn airUnwrapErrUnionErrPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const dst_lock = self.register_manager.lockRegAssumeUnused(dst_reg);
|
||||
defer self.register_manager.unlockReg(dst_lock);
|
||||
|
||||
const eu_ty = src_ty.childType();
|
||||
const eu_ty = src_ty.childType(mod);
|
||||
const pl_ty = eu_ty.errorUnionPayload();
|
||||
const err_ty = eu_ty.errorUnionSet();
|
||||
const err_off = @intCast(i32, errUnionErrorOffset(pl_ty, mod));
|
||||
@ -3777,7 +3773,7 @@ fn airUnwrapErrUnionPayloadPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const dst_lock = self.register_manager.lockReg(dst_reg);
|
||||
defer if (dst_lock) |lock| self.register_manager.unlockReg(lock);
|
||||
|
||||
const eu_ty = src_ty.childType();
|
||||
const eu_ty = src_ty.childType(mod);
|
||||
const pl_ty = eu_ty.errorUnionPayload();
|
||||
const pl_off = @intCast(i32, errUnionPayloadOffset(pl_ty, mod));
|
||||
const dst_abi_size = @intCast(u32, dst_ty.abiSize(mod));
|
||||
@ -3803,7 +3799,7 @@ fn airErrUnionPayloadPtrSet(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const src_lock = self.register_manager.lockRegAssumeUnused(src_reg);
|
||||
defer self.register_manager.unlockReg(src_lock);
|
||||
|
||||
const eu_ty = src_ty.childType();
|
||||
const eu_ty = src_ty.childType(mod);
|
||||
const pl_ty = eu_ty.errorUnionPayload();
|
||||
const err_ty = eu_ty.errorUnionSet();
|
||||
const err_off = @intCast(i32, errUnionErrorOffset(pl_ty, mod));
|
||||
@ -4057,7 +4053,7 @@ fn genSliceElemPtr(self: *Self, lhs: Air.Inst.Ref, rhs: Air.Inst.Ref) !MCValue {
|
||||
};
|
||||
defer if (slice_mcv_lock) |lock| self.register_manager.unlockReg(lock);
|
||||
|
||||
const elem_ty = slice_ty.childType();
|
||||
const elem_ty = slice_ty.childType(mod);
|
||||
const elem_size = elem_ty.abiSize(mod);
|
||||
var buf: Type.SlicePtrFieldTypeBuffer = undefined;
|
||||
const slice_ptr_field_type = slice_ty.slicePtrFieldType(&buf);
|
||||
@ -4116,7 +4112,7 @@ fn airArrayElemVal(self: *Self, inst: Air.Inst.Index) !void {
|
||||
};
|
||||
defer if (array_lock) |lock| self.register_manager.unlockReg(lock);
|
||||
|
||||
const elem_ty = array_ty.childType();
|
||||
const elem_ty = array_ty.childType(mod);
|
||||
const elem_abi_size = elem_ty.abiSize(mod);
|
||||
|
||||
const index_ty = self.typeOf(bin_op.rhs);
|
||||
@ -4253,7 +4249,7 @@ fn airSetUnionTag(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
|
||||
const ptr_union_ty = self.typeOf(bin_op.lhs);
|
||||
const union_ty = ptr_union_ty.childType();
|
||||
const union_ty = ptr_union_ty.childType(mod);
|
||||
const tag_ty = self.typeOf(bin_op.rhs);
|
||||
const layout = union_ty.unionGetLayout(mod);
|
||||
|
||||
@ -4287,7 +4283,9 @@ fn airSetUnionTag(self: *Self, inst: Air.Inst.Index) !void {
|
||||
break :blk MCValue{ .register = reg };
|
||||
} else ptr;
|
||||
|
||||
var ptr_tag_pl = ptr_union_ty.ptrInfo();
|
||||
var ptr_tag_pl: Type.Payload.Pointer = .{
|
||||
.data = ptr_union_ty.ptrInfo(mod),
|
||||
};
|
||||
ptr_tag_pl.data.pointee_type = tag_ty;
|
||||
const ptr_tag_ty = Type.initPayload(&ptr_tag_pl.base);
|
||||
try self.store(ptr_tag_ty, adjusted_ptr, tag);
|
||||
@ -4924,14 +4922,11 @@ fn airFloatSign(self: *Self, inst: Air.Inst.Index) !void {
|
||||
var stack align(@alignOf(ExpectedContents)) =
|
||||
std.heap.stackFallback(@sizeOf(ExpectedContents), arena.allocator());
|
||||
|
||||
var vec_pl = Type.Payload.Array{
|
||||
.base = .{ .tag = .vector },
|
||||
.data = .{
|
||||
.len = @divExact(abi_size * 8, scalar_bits),
|
||||
.elem_type = try mod.intType(.signed, scalar_bits),
|
||||
},
|
||||
};
|
||||
const vec_ty = Type.initPayload(&vec_pl.base);
|
||||
const vec_ty = try mod.vectorType(.{
|
||||
.len = @divExact(abi_size * 8, scalar_bits),
|
||||
.child = (try mod.intType(.signed, scalar_bits)).ip_index,
|
||||
});
|
||||
|
||||
const sign_val = switch (tag) {
|
||||
.neg => try vec_ty.minInt(stack.get(), mod),
|
||||
.fabs => try vec_ty.maxInt(stack.get(), mod),
|
||||
@ -5034,15 +5029,15 @@ fn genRound(self: *Self, ty: Type, dst_reg: Register, src_mcv: MCValue, mode: u4
|
||||
16, 80, 128 => null,
|
||||
else => unreachable,
|
||||
},
|
||||
.Vector => switch (ty.childType().zigTypeTag(mod)) {
|
||||
.Float => switch (ty.childType().floatBits(self.target.*)) {
|
||||
32 => switch (ty.vectorLen()) {
|
||||
.Vector => switch (ty.childType(mod).zigTypeTag(mod)) {
|
||||
.Float => switch (ty.childType(mod).floatBits(self.target.*)) {
|
||||
32 => switch (ty.vectorLen(mod)) {
|
||||
1 => if (self.hasFeature(.avx)) .{ .v_ss, .round } else .{ ._ss, .round },
|
||||
2...4 => if (self.hasFeature(.avx)) .{ .v_ps, .round } else .{ ._ps, .round },
|
||||
5...8 => if (self.hasFeature(.avx)) .{ .v_ps, .round } else null,
|
||||
else => null,
|
||||
},
|
||||
64 => switch (ty.vectorLen()) {
|
||||
64 => switch (ty.vectorLen(mod)) {
|
||||
1 => if (self.hasFeature(.avx)) .{ .v_sd, .round } else .{ ._sd, .round },
|
||||
2 => if (self.hasFeature(.avx)) .{ .v_pd, .round } else .{ ._pd, .round },
|
||||
3...4 => if (self.hasFeature(.avx)) .{ .v_pd, .round } else null,
|
||||
@ -5131,9 +5126,9 @@ fn airSqrt(self: *Self, inst: Air.Inst.Index) !void {
|
||||
80, 128 => null,
|
||||
else => unreachable,
|
||||
},
|
||||
.Vector => switch (ty.childType().zigTypeTag(mod)) {
|
||||
.Float => switch (ty.childType().floatBits(self.target.*)) {
|
||||
16 => if (self.hasFeature(.f16c)) switch (ty.vectorLen()) {
|
||||
.Vector => switch (ty.childType(mod).zigTypeTag(mod)) {
|
||||
.Float => switch (ty.childType(mod).floatBits(self.target.*)) {
|
||||
16 => if (self.hasFeature(.f16c)) switch (ty.vectorLen(mod)) {
|
||||
1 => {
|
||||
try self.asmRegisterRegister(
|
||||
.{ .v_ps, .cvtph2 },
|
||||
@ -5184,13 +5179,13 @@ fn airSqrt(self: *Self, inst: Air.Inst.Index) !void {
|
||||
},
|
||||
else => null,
|
||||
} else null,
|
||||
32 => switch (ty.vectorLen()) {
|
||||
32 => switch (ty.vectorLen(mod)) {
|
||||
1 => if (self.hasFeature(.avx)) .{ .v_ss, .sqrt } else .{ ._ss, .sqrt },
|
||||
2...4 => if (self.hasFeature(.avx)) .{ .v_ps, .sqrt } else .{ ._ps, .sqrt },
|
||||
5...8 => if (self.hasFeature(.avx)) .{ .v_ps, .sqrt } else null,
|
||||
else => null,
|
||||
},
|
||||
64 => switch (ty.vectorLen()) {
|
||||
64 => switch (ty.vectorLen(mod)) {
|
||||
1 => if (self.hasFeature(.avx)) .{ .v_sd, .sqrt } else .{ ._sd, .sqrt },
|
||||
2 => if (self.hasFeature(.avx)) .{ .v_pd, .sqrt } else .{ ._pd, .sqrt },
|
||||
3...4 => if (self.hasFeature(.avx)) .{ .v_pd, .sqrt } else null,
|
||||
@ -5292,7 +5287,7 @@ fn reuseOperandAdvanced(
|
||||
|
||||
fn packedLoad(self: *Self, dst_mcv: MCValue, ptr_ty: Type, ptr_mcv: MCValue) InnerError!void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const ptr_info = ptr_ty.ptrInfo().data;
|
||||
const ptr_info = ptr_ty.ptrInfo(mod);
|
||||
|
||||
const val_ty = ptr_info.pointee_type;
|
||||
const val_abi_size = @intCast(u32, val_ty.abiSize(mod));
|
||||
@ -5365,7 +5360,8 @@ fn packedLoad(self: *Self, dst_mcv: MCValue, ptr_ty: Type, ptr_mcv: MCValue) Inn
|
||||
}
|
||||
|
||||
fn load(self: *Self, dst_mcv: MCValue, ptr_ty: Type, ptr_mcv: MCValue) InnerError!void {
|
||||
const dst_ty = ptr_ty.childType();
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const dst_ty = ptr_ty.childType(mod);
|
||||
switch (ptr_mcv) {
|
||||
.none,
|
||||
.unreach,
|
||||
@ -5424,7 +5420,7 @@ fn airLoad(self: *Self, inst: Air.Inst.Index) !void {
|
||||
else
|
||||
try self.allocRegOrMem(inst, true);
|
||||
|
||||
if (ptr_ty.ptrInfo().data.host_size > 0) {
|
||||
if (ptr_ty.ptrInfo(mod).host_size > 0) {
|
||||
try self.packedLoad(dst_mcv, ptr_ty, ptr_mcv);
|
||||
} else {
|
||||
try self.load(dst_mcv, ptr_ty, ptr_mcv);
|
||||
@ -5436,8 +5432,8 @@ fn airLoad(self: *Self, inst: Air.Inst.Index) !void {
|
||||
|
||||
fn packedStore(self: *Self, ptr_ty: Type, ptr_mcv: MCValue, src_mcv: MCValue) InnerError!void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const ptr_info = ptr_ty.ptrInfo().data;
|
||||
const src_ty = ptr_ty.childType();
|
||||
const ptr_info = ptr_ty.ptrInfo(mod);
|
||||
const src_ty = ptr_ty.childType(mod);
|
||||
|
||||
const limb_abi_size: u16 = @min(ptr_info.host_size, 8);
|
||||
const limb_abi_bits = limb_abi_size * 8;
|
||||
@ -5509,7 +5505,8 @@ fn packedStore(self: *Self, ptr_ty: Type, ptr_mcv: MCValue, src_mcv: MCValue) In
|
||||
}
|
||||
|
||||
fn store(self: *Self, ptr_ty: Type, ptr_mcv: MCValue, src_mcv: MCValue) InnerError!void {
|
||||
const src_ty = ptr_ty.childType();
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const src_ty = ptr_ty.childType(mod);
|
||||
switch (ptr_mcv) {
|
||||
.none,
|
||||
.unreach,
|
||||
@ -5544,6 +5541,7 @@ fn store(self: *Self, ptr_ty: Type, ptr_mcv: MCValue, src_mcv: MCValue) InnerErr
|
||||
}
|
||||
|
||||
fn airStore(self: *Self, inst: Air.Inst.Index, safety: bool) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
if (safety) {
|
||||
// TODO if the value is undef, write 0xaa bytes to dest
|
||||
} else {
|
||||
@ -5553,7 +5551,7 @@ fn airStore(self: *Self, inst: Air.Inst.Index, safety: bool) !void {
|
||||
const ptr_mcv = try self.resolveInst(bin_op.lhs);
|
||||
const ptr_ty = self.typeOf(bin_op.lhs);
|
||||
const src_mcv = try self.resolveInst(bin_op.rhs);
|
||||
if (ptr_ty.ptrInfo().data.host_size > 0) {
|
||||
if (ptr_ty.ptrInfo(mod).host_size > 0) {
|
||||
try self.packedStore(ptr_ty, ptr_mcv, src_mcv);
|
||||
} else {
|
||||
try self.store(ptr_ty, ptr_mcv, src_mcv);
|
||||
@ -5578,11 +5576,11 @@ fn fieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, index: u32
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const ptr_field_ty = self.typeOfIndex(inst);
|
||||
const ptr_container_ty = self.typeOf(operand);
|
||||
const container_ty = ptr_container_ty.childType();
|
||||
const container_ty = ptr_container_ty.childType(mod);
|
||||
const field_offset = @intCast(i32, switch (container_ty.containerLayout()) {
|
||||
.Auto, .Extern => container_ty.structFieldOffset(index, mod),
|
||||
.Packed => if (container_ty.zigTypeTag(mod) == .Struct and
|
||||
ptr_field_ty.ptrInfo().data.host_size == 0)
|
||||
ptr_field_ty.ptrInfo(mod).host_size == 0)
|
||||
container_ty.packedStructFieldByteOffset(index, mod)
|
||||
else
|
||||
0,
|
||||
@ -5760,7 +5758,7 @@ fn airFieldParentPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const extra = self.air.extraData(Air.FieldParentPtr, ty_pl.payload).data;
|
||||
|
||||
const inst_ty = self.typeOfIndex(inst);
|
||||
const parent_ty = inst_ty.childType();
|
||||
const parent_ty = inst_ty.childType(mod);
|
||||
const field_offset = @intCast(i32, parent_ty.structFieldOffset(extra.field_index, mod));
|
||||
|
||||
const src_mcv = try self.resolveInst(extra.field_ptr);
|
||||
@ -6680,10 +6678,10 @@ fn genBinOp(
|
||||
80, 128 => null,
|
||||
else => unreachable,
|
||||
},
|
||||
.Vector => switch (lhs_ty.childType().zigTypeTag(mod)) {
|
||||
.Vector => switch (lhs_ty.childType(mod).zigTypeTag(mod)) {
|
||||
else => null,
|
||||
.Int => switch (lhs_ty.childType().intInfo(mod).bits) {
|
||||
8 => switch (lhs_ty.vectorLen()) {
|
||||
.Int => switch (lhs_ty.childType(mod).intInfo(mod).bits) {
|
||||
8 => switch (lhs_ty.vectorLen(mod)) {
|
||||
1...16 => switch (air_tag) {
|
||||
.add,
|
||||
.addwrap,
|
||||
@ -6694,7 +6692,7 @@ fn genBinOp(
|
||||
.bit_and => if (self.hasFeature(.avx)) .{ .vp_, .@"and" } else .{ .p_, .@"and" },
|
||||
.bit_or => if (self.hasFeature(.avx)) .{ .vp_, .@"or" } else .{ .p_, .@"or" },
|
||||
.xor => if (self.hasFeature(.avx)) .{ .vp_, .xor } else .{ .p_, .xor },
|
||||
.min => switch (lhs_ty.childType().intInfo(mod).signedness) {
|
||||
.min => switch (lhs_ty.childType(mod).intInfo(mod).signedness) {
|
||||
.signed => if (self.hasFeature(.avx))
|
||||
.{ .vp_b, .mins }
|
||||
else if (self.hasFeature(.sse4_1))
|
||||
@ -6708,7 +6706,7 @@ fn genBinOp(
|
||||
else
|
||||
null,
|
||||
},
|
||||
.max => switch (lhs_ty.childType().intInfo(mod).signedness) {
|
||||
.max => switch (lhs_ty.childType(mod).intInfo(mod).signedness) {
|
||||
.signed => if (self.hasFeature(.avx))
|
||||
.{ .vp_b, .maxs }
|
||||
else if (self.hasFeature(.sse4_1))
|
||||
@ -6734,11 +6732,11 @@ fn genBinOp(
|
||||
.bit_and => if (self.hasFeature(.avx2)) .{ .vp_, .@"and" } else null,
|
||||
.bit_or => if (self.hasFeature(.avx2)) .{ .vp_, .@"or" } else null,
|
||||
.xor => if (self.hasFeature(.avx2)) .{ .vp_, .xor } else null,
|
||||
.min => switch (lhs_ty.childType().intInfo(mod).signedness) {
|
||||
.min => switch (lhs_ty.childType(mod).intInfo(mod).signedness) {
|
||||
.signed => if (self.hasFeature(.avx2)) .{ .vp_b, .mins } else null,
|
||||
.unsigned => if (self.hasFeature(.avx)) .{ .vp_b, .minu } else null,
|
||||
},
|
||||
.max => switch (lhs_ty.childType().intInfo(mod).signedness) {
|
||||
.max => switch (lhs_ty.childType(mod).intInfo(mod).signedness) {
|
||||
.signed => if (self.hasFeature(.avx2)) .{ .vp_b, .maxs } else null,
|
||||
.unsigned => if (self.hasFeature(.avx2)) .{ .vp_b, .maxu } else null,
|
||||
},
|
||||
@ -6746,7 +6744,7 @@ fn genBinOp(
|
||||
},
|
||||
else => null,
|
||||
},
|
||||
16 => switch (lhs_ty.vectorLen()) {
|
||||
16 => switch (lhs_ty.vectorLen(mod)) {
|
||||
1...8 => switch (air_tag) {
|
||||
.add,
|
||||
.addwrap,
|
||||
@ -6760,7 +6758,7 @@ fn genBinOp(
|
||||
.bit_and => if (self.hasFeature(.avx)) .{ .vp_, .@"and" } else .{ .p_, .@"and" },
|
||||
.bit_or => if (self.hasFeature(.avx)) .{ .vp_, .@"or" } else .{ .p_, .@"or" },
|
||||
.xor => if (self.hasFeature(.avx)) .{ .vp_, .xor } else .{ .p_, .xor },
|
||||
.min => switch (lhs_ty.childType().intInfo(mod).signedness) {
|
||||
.min => switch (lhs_ty.childType(mod).intInfo(mod).signedness) {
|
||||
.signed => if (self.hasFeature(.avx))
|
||||
.{ .vp_w, .mins }
|
||||
else
|
||||
@ -6770,7 +6768,7 @@ fn genBinOp(
|
||||
else
|
||||
.{ .p_w, .minu },
|
||||
},
|
||||
.max => switch (lhs_ty.childType().intInfo(mod).signedness) {
|
||||
.max => switch (lhs_ty.childType(mod).intInfo(mod).signedness) {
|
||||
.signed => if (self.hasFeature(.avx))
|
||||
.{ .vp_w, .maxs }
|
||||
else
|
||||
@ -6795,11 +6793,11 @@ fn genBinOp(
|
||||
.bit_and => if (self.hasFeature(.avx2)) .{ .vp_, .@"and" } else null,
|
||||
.bit_or => if (self.hasFeature(.avx2)) .{ .vp_, .@"or" } else null,
|
||||
.xor => if (self.hasFeature(.avx2)) .{ .vp_, .xor } else null,
|
||||
.min => switch (lhs_ty.childType().intInfo(mod).signedness) {
|
||||
.min => switch (lhs_ty.childType(mod).intInfo(mod).signedness) {
|
||||
.signed => if (self.hasFeature(.avx2)) .{ .vp_w, .mins } else null,
|
||||
.unsigned => if (self.hasFeature(.avx)) .{ .vp_w, .minu } else null,
|
||||
},
|
||||
.max => switch (lhs_ty.childType().intInfo(mod).signedness) {
|
||||
.max => switch (lhs_ty.childType(mod).intInfo(mod).signedness) {
|
||||
.signed => if (self.hasFeature(.avx2)) .{ .vp_w, .maxs } else null,
|
||||
.unsigned => if (self.hasFeature(.avx2)) .{ .vp_w, .maxu } else null,
|
||||
},
|
||||
@ -6807,7 +6805,7 @@ fn genBinOp(
|
||||
},
|
||||
else => null,
|
||||
},
|
||||
32 => switch (lhs_ty.vectorLen()) {
|
||||
32 => switch (lhs_ty.vectorLen(mod)) {
|
||||
1...4 => switch (air_tag) {
|
||||
.add,
|
||||
.addwrap,
|
||||
@ -6826,7 +6824,7 @@ fn genBinOp(
|
||||
.bit_and => if (self.hasFeature(.avx)) .{ .vp_, .@"and" } else .{ .p_, .@"and" },
|
||||
.bit_or => if (self.hasFeature(.avx)) .{ .vp_, .@"or" } else .{ .p_, .@"or" },
|
||||
.xor => if (self.hasFeature(.avx)) .{ .vp_, .xor } else .{ .p_, .xor },
|
||||
.min => switch (lhs_ty.childType().intInfo(mod).signedness) {
|
||||
.min => switch (lhs_ty.childType(mod).intInfo(mod).signedness) {
|
||||
.signed => if (self.hasFeature(.avx))
|
||||
.{ .vp_d, .mins }
|
||||
else if (self.hasFeature(.sse4_1))
|
||||
@ -6840,7 +6838,7 @@ fn genBinOp(
|
||||
else
|
||||
null,
|
||||
},
|
||||
.max => switch (lhs_ty.childType().intInfo(mod).signedness) {
|
||||
.max => switch (lhs_ty.childType(mod).intInfo(mod).signedness) {
|
||||
.signed => if (self.hasFeature(.avx))
|
||||
.{ .vp_d, .maxs }
|
||||
else if (self.hasFeature(.sse4_1))
|
||||
@ -6869,11 +6867,11 @@ fn genBinOp(
|
||||
.bit_and => if (self.hasFeature(.avx2)) .{ .vp_, .@"and" } else null,
|
||||
.bit_or => if (self.hasFeature(.avx2)) .{ .vp_, .@"or" } else null,
|
||||
.xor => if (self.hasFeature(.avx2)) .{ .vp_, .xor } else null,
|
||||
.min => switch (lhs_ty.childType().intInfo(mod).signedness) {
|
||||
.min => switch (lhs_ty.childType(mod).intInfo(mod).signedness) {
|
||||
.signed => if (self.hasFeature(.avx2)) .{ .vp_d, .mins } else null,
|
||||
.unsigned => if (self.hasFeature(.avx)) .{ .vp_d, .minu } else null,
|
||||
},
|
||||
.max => switch (lhs_ty.childType().intInfo(mod).signedness) {
|
||||
.max => switch (lhs_ty.childType(mod).intInfo(mod).signedness) {
|
||||
.signed => if (self.hasFeature(.avx2)) .{ .vp_d, .maxs } else null,
|
||||
.unsigned => if (self.hasFeature(.avx2)) .{ .vp_d, .maxu } else null,
|
||||
},
|
||||
@ -6881,7 +6879,7 @@ fn genBinOp(
|
||||
},
|
||||
else => null,
|
||||
},
|
||||
64 => switch (lhs_ty.vectorLen()) {
|
||||
64 => switch (lhs_ty.vectorLen(mod)) {
|
||||
1...2 => switch (air_tag) {
|
||||
.add,
|
||||
.addwrap,
|
||||
@ -6910,8 +6908,8 @@ fn genBinOp(
|
||||
},
|
||||
else => null,
|
||||
},
|
||||
.Float => switch (lhs_ty.childType().floatBits(self.target.*)) {
|
||||
16 => if (self.hasFeature(.f16c)) switch (lhs_ty.vectorLen()) {
|
||||
.Float => switch (lhs_ty.childType(mod).floatBits(self.target.*)) {
|
||||
16 => if (self.hasFeature(.f16c)) switch (lhs_ty.vectorLen(mod)) {
|
||||
1 => {
|
||||
const tmp_reg = (try self.register_manager.allocReg(null, sse)).to128();
|
||||
const tmp_lock = self.register_manager.lockRegAssumeUnused(tmp_reg);
|
||||
@ -7086,7 +7084,7 @@ fn genBinOp(
|
||||
},
|
||||
else => null,
|
||||
} else null,
|
||||
32 => switch (lhs_ty.vectorLen()) {
|
||||
32 => switch (lhs_ty.vectorLen(mod)) {
|
||||
1 => switch (air_tag) {
|
||||
.add => if (self.hasFeature(.avx)) .{ .v_ss, .add } else .{ ._ss, .add },
|
||||
.sub => if (self.hasFeature(.avx)) .{ .v_ss, .sub } else .{ ._ss, .sub },
|
||||
@ -7124,7 +7122,7 @@ fn genBinOp(
|
||||
} else null,
|
||||
else => null,
|
||||
},
|
||||
64 => switch (lhs_ty.vectorLen()) {
|
||||
64 => switch (lhs_ty.vectorLen(mod)) {
|
||||
1 => switch (air_tag) {
|
||||
.add => if (self.hasFeature(.avx)) .{ .v_sd, .add } else .{ ._sd, .add },
|
||||
.sub => if (self.hasFeature(.avx)) .{ .v_sd, .sub } else .{ ._sd, .sub },
|
||||
@ -7236,14 +7234,14 @@ fn genBinOp(
|
||||
16, 80, 128 => null,
|
||||
else => unreachable,
|
||||
},
|
||||
.Vector => switch (lhs_ty.childType().zigTypeTag(mod)) {
|
||||
.Float => switch (lhs_ty.childType().floatBits(self.target.*)) {
|
||||
32 => switch (lhs_ty.vectorLen()) {
|
||||
.Vector => switch (lhs_ty.childType(mod).zigTypeTag(mod)) {
|
||||
.Float => switch (lhs_ty.childType(mod).floatBits(self.target.*)) {
|
||||
32 => switch (lhs_ty.vectorLen(mod)) {
|
||||
1 => .{ .v_ss, .cmp },
|
||||
2...8 => .{ .v_ps, .cmp },
|
||||
else => null,
|
||||
},
|
||||
64 => switch (lhs_ty.vectorLen()) {
|
||||
64 => switch (lhs_ty.vectorLen(mod)) {
|
||||
1 => .{ .v_sd, .cmp },
|
||||
2...4 => .{ .v_pd, .cmp },
|
||||
else => null,
|
||||
@ -7270,13 +7268,13 @@ fn genBinOp(
|
||||
16, 80, 128 => null,
|
||||
else => unreachable,
|
||||
},
|
||||
.Vector => switch (lhs_ty.childType().zigTypeTag(mod)) {
|
||||
.Float => switch (lhs_ty.childType().floatBits(self.target.*)) {
|
||||
32 => switch (lhs_ty.vectorLen()) {
|
||||
.Vector => switch (lhs_ty.childType(mod).zigTypeTag(mod)) {
|
||||
.Float => switch (lhs_ty.childType(mod).floatBits(self.target.*)) {
|
||||
32 => switch (lhs_ty.vectorLen(mod)) {
|
||||
1...8 => .{ .v_ps, .blendv },
|
||||
else => null,
|
||||
},
|
||||
64 => switch (lhs_ty.vectorLen()) {
|
||||
64 => switch (lhs_ty.vectorLen(mod)) {
|
||||
1...4 => .{ .v_pd, .blendv },
|
||||
else => null,
|
||||
},
|
||||
@ -7304,14 +7302,14 @@ fn genBinOp(
|
||||
16, 80, 128 => null,
|
||||
else => unreachable,
|
||||
},
|
||||
.Vector => switch (lhs_ty.childType().zigTypeTag(mod)) {
|
||||
.Float => switch (lhs_ty.childType().floatBits(self.target.*)) {
|
||||
32 => switch (lhs_ty.vectorLen()) {
|
||||
.Vector => switch (lhs_ty.childType(mod).zigTypeTag(mod)) {
|
||||
.Float => switch (lhs_ty.childType(mod).floatBits(self.target.*)) {
|
||||
32 => switch (lhs_ty.vectorLen(mod)) {
|
||||
1 => .{ ._ss, .cmp },
|
||||
2...4 => .{ ._ps, .cmp },
|
||||
else => null,
|
||||
},
|
||||
64 => switch (lhs_ty.vectorLen()) {
|
||||
64 => switch (lhs_ty.vectorLen(mod)) {
|
||||
1 => .{ ._sd, .cmp },
|
||||
2 => .{ ._pd, .cmp },
|
||||
else => null,
|
||||
@ -7337,13 +7335,13 @@ fn genBinOp(
|
||||
16, 80, 128 => null,
|
||||
else => unreachable,
|
||||
},
|
||||
.Vector => switch (lhs_ty.childType().zigTypeTag(mod)) {
|
||||
.Float => switch (lhs_ty.childType().floatBits(self.target.*)) {
|
||||
32 => switch (lhs_ty.vectorLen()) {
|
||||
.Vector => switch (lhs_ty.childType(mod).zigTypeTag(mod)) {
|
||||
.Float => switch (lhs_ty.childType(mod).floatBits(self.target.*)) {
|
||||
32 => switch (lhs_ty.vectorLen(mod)) {
|
||||
1...4 => .{ ._ps, .blendv },
|
||||
else => null,
|
||||
},
|
||||
64 => switch (lhs_ty.vectorLen()) {
|
||||
64 => switch (lhs_ty.vectorLen(mod)) {
|
||||
1...2 => .{ ._pd, .blendv },
|
||||
else => null,
|
||||
},
|
||||
@ -7368,13 +7366,13 @@ fn genBinOp(
|
||||
16, 80, 128 => null,
|
||||
else => unreachable,
|
||||
},
|
||||
.Vector => switch (lhs_ty.childType().zigTypeTag(mod)) {
|
||||
.Float => switch (lhs_ty.childType().floatBits(self.target.*)) {
|
||||
32 => switch (lhs_ty.vectorLen()) {
|
||||
.Vector => switch (lhs_ty.childType(mod).zigTypeTag(mod)) {
|
||||
.Float => switch (lhs_ty.childType(mod).floatBits(self.target.*)) {
|
||||
32 => switch (lhs_ty.vectorLen(mod)) {
|
||||
1...4 => .{ ._ps, .@"and" },
|
||||
else => null,
|
||||
},
|
||||
64 => switch (lhs_ty.vectorLen()) {
|
||||
64 => switch (lhs_ty.vectorLen(mod)) {
|
||||
1...2 => .{ ._pd, .@"and" },
|
||||
else => null,
|
||||
},
|
||||
@ -7398,13 +7396,13 @@ fn genBinOp(
|
||||
16, 80, 128 => null,
|
||||
else => unreachable,
|
||||
},
|
||||
.Vector => switch (lhs_ty.childType().zigTypeTag(mod)) {
|
||||
.Float => switch (lhs_ty.childType().floatBits(self.target.*)) {
|
||||
32 => switch (lhs_ty.vectorLen()) {
|
||||
.Vector => switch (lhs_ty.childType(mod).zigTypeTag(mod)) {
|
||||
.Float => switch (lhs_ty.childType(mod).floatBits(self.target.*)) {
|
||||
32 => switch (lhs_ty.vectorLen(mod)) {
|
||||
1...4 => .{ ._ps, .andn },
|
||||
else => null,
|
||||
},
|
||||
64 => switch (lhs_ty.vectorLen()) {
|
||||
64 => switch (lhs_ty.vectorLen(mod)) {
|
||||
1...2 => .{ ._pd, .andn },
|
||||
else => null,
|
||||
},
|
||||
@ -7428,13 +7426,13 @@ fn genBinOp(
|
||||
16, 80, 128 => null,
|
||||
else => unreachable,
|
||||
},
|
||||
.Vector => switch (lhs_ty.childType().zigTypeTag(mod)) {
|
||||
.Float => switch (lhs_ty.childType().floatBits(self.target.*)) {
|
||||
32 => switch (lhs_ty.vectorLen()) {
|
||||
.Vector => switch (lhs_ty.childType(mod).zigTypeTag(mod)) {
|
||||
.Float => switch (lhs_ty.childType(mod).floatBits(self.target.*)) {
|
||||
32 => switch (lhs_ty.vectorLen(mod)) {
|
||||
1...4 => .{ ._ps, .@"or" },
|
||||
else => null,
|
||||
},
|
||||
64 => switch (lhs_ty.vectorLen()) {
|
||||
64 => switch (lhs_ty.vectorLen(mod)) {
|
||||
1...2 => .{ ._pd, .@"or" },
|
||||
else => null,
|
||||
},
|
||||
@ -7586,11 +7584,7 @@ fn genBinOpMir(
|
||||
.load_got,
|
||||
.load_tlv,
|
||||
=> {
|
||||
var ptr_pl = Type.Payload.ElemType{
|
||||
.base = .{ .tag = .single_const_pointer },
|
||||
.data = ty,
|
||||
};
|
||||
const ptr_ty = Type.initPayload(&ptr_pl.base);
|
||||
const ptr_ty = try mod.singleConstPtrType(ty);
|
||||
const addr_reg = try self.copyToTmpRegister(ptr_ty, src_mcv.address());
|
||||
return self.genBinOpMir(mir_tag, ty, dst_mcv, .{
|
||||
.indirect = .{ .reg = addr_reg },
|
||||
@ -8058,7 +8052,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallModifier
|
||||
|
||||
const fn_ty = switch (ty.zigTypeTag(mod)) {
|
||||
.Fn => ty,
|
||||
.Pointer => ty.childType(),
|
||||
.Pointer => ty.childType(mod),
|
||||
else => unreachable,
|
||||
};
|
||||
|
||||
@ -8506,10 +8500,11 @@ fn airTry(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airTryPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
|
||||
const extra = self.air.extraData(Air.TryPtr, ty_pl.payload);
|
||||
const body = self.air.extra[extra.end..][0..extra.data.body_len];
|
||||
const err_union_ty = self.typeOf(extra.data.ptr).childType();
|
||||
const err_union_ty = self.typeOf(extra.data.ptr).childType(mod);
|
||||
const result = try self.genTry(inst, extra.data.ptr, body, err_union_ty, true);
|
||||
return self.finishAir(inst, result, .{ .none, .none, .none });
|
||||
}
|
||||
@ -8683,8 +8678,7 @@ fn isNull(self: *Self, inst: Air.Inst.Index, opt_ty: Type, opt_mcv: MCValue) !MC
|
||||
try self.spillEflagsIfOccupied();
|
||||
self.eflags_inst = inst;
|
||||
|
||||
var pl_buf: Type.Payload.ElemType = undefined;
|
||||
const pl_ty = opt_ty.optionalChild(&pl_buf);
|
||||
const pl_ty = opt_ty.optionalChild(mod);
|
||||
|
||||
var ptr_buf: Type.SlicePtrFieldTypeBuffer = undefined;
|
||||
const some_info: struct { off: i32, ty: Type } = if (opt_ty.optionalReprIsPayload(mod))
|
||||
@ -8775,9 +8769,8 @@ fn isNullPtr(self: *Self, inst: Air.Inst.Index, ptr_ty: Type, ptr_mcv: MCValue)
|
||||
try self.spillEflagsIfOccupied();
|
||||
self.eflags_inst = inst;
|
||||
|
||||
const opt_ty = ptr_ty.childType();
|
||||
var pl_buf: Type.Payload.ElemType = undefined;
|
||||
const pl_ty = opt_ty.optionalChild(&pl_buf);
|
||||
const opt_ty = ptr_ty.childType(mod);
|
||||
const pl_ty = opt_ty.optionalChild(mod);
|
||||
|
||||
var ptr_buf: Type.SlicePtrFieldTypeBuffer = undefined;
|
||||
const some_info: struct { off: i32, ty: Type } = if (opt_ty.optionalReprIsPayload(mod))
|
||||
@ -8919,6 +8912,7 @@ fn airIsErr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airIsErrPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const un_op = self.air.instructions.items(.data)[inst].un_op;
|
||||
|
||||
const operand_ptr = try self.resolveInst(un_op);
|
||||
@ -8939,7 +8933,7 @@ fn airIsErrPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const ptr_ty = self.typeOf(un_op);
|
||||
try self.load(operand, ptr_ty, operand_ptr);
|
||||
|
||||
const result = try self.isErr(inst, ptr_ty.childType(), operand);
|
||||
const result = try self.isErr(inst, ptr_ty.childType(mod), operand);
|
||||
|
||||
return self.finishAir(inst, result, .{ un_op, .none, .none });
|
||||
}
|
||||
@ -8953,6 +8947,7 @@ fn airIsNonErr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
|
||||
fn airIsNonErrPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const un_op = self.air.instructions.items(.data)[inst].un_op;
|
||||
|
||||
const operand_ptr = try self.resolveInst(un_op);
|
||||
@ -8973,7 +8968,7 @@ fn airIsNonErrPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const ptr_ty = self.typeOf(un_op);
|
||||
try self.load(operand, ptr_ty, operand_ptr);
|
||||
|
||||
const result = try self.isNonErr(inst, ptr_ty.childType(), operand);
|
||||
const result = try self.isNonErr(inst, ptr_ty.childType(mod), operand);
|
||||
|
||||
return self.finishAir(inst, result, .{ un_op, .none, .none });
|
||||
}
|
||||
@ -9452,9 +9447,9 @@ fn moveStrategy(self: *Self, ty: Type, aligned: bool) !MoveStrategy {
|
||||
else if (aligned) .{ ._, .movdqa } else .{ ._, .movdqu } },
|
||||
else => {},
|
||||
},
|
||||
.Vector => switch (ty.childType().zigTypeTag(mod)) {
|
||||
.Int => switch (ty.childType().intInfo(mod).bits) {
|
||||
8 => switch (ty.vectorLen()) {
|
||||
.Vector => switch (ty.childType(mod).zigTypeTag(mod)) {
|
||||
.Int => switch (ty.childType(mod).intInfo(mod).bits) {
|
||||
8 => switch (ty.vectorLen(mod)) {
|
||||
1 => if (self.hasFeature(.avx)) return .{ .vex_insert_extract = .{
|
||||
.insert = .{ .vp_b, .insr },
|
||||
.extract = .{ .vp_b, .extr },
|
||||
@ -9484,7 +9479,7 @@ fn moveStrategy(self: *Self, ty: Type, aligned: bool) !MoveStrategy {
|
||||
return .{ .move = if (aligned) .{ .v_, .movdqa } else .{ .v_, .movdqu } },
|
||||
else => {},
|
||||
},
|
||||
16 => switch (ty.vectorLen()) {
|
||||
16 => switch (ty.vectorLen(mod)) {
|
||||
1 => return if (self.hasFeature(.avx)) .{ .vex_insert_extract = .{
|
||||
.insert = .{ .vp_w, .insr },
|
||||
.extract = .{ .vp_w, .extr },
|
||||
@ -9507,7 +9502,7 @@ fn moveStrategy(self: *Self, ty: Type, aligned: bool) !MoveStrategy {
|
||||
return .{ .move = if (aligned) .{ .v_, .movdqa } else .{ .v_, .movdqu } },
|
||||
else => {},
|
||||
},
|
||||
32 => switch (ty.vectorLen()) {
|
||||
32 => switch (ty.vectorLen(mod)) {
|
||||
1 => return .{ .move = if (self.hasFeature(.avx))
|
||||
.{ .v_d, .mov }
|
||||
else
|
||||
@ -9523,7 +9518,7 @@ fn moveStrategy(self: *Self, ty: Type, aligned: bool) !MoveStrategy {
|
||||
return .{ .move = if (aligned) .{ .v_, .movdqa } else .{ .v_, .movdqu } },
|
||||
else => {},
|
||||
},
|
||||
64 => switch (ty.vectorLen()) {
|
||||
64 => switch (ty.vectorLen(mod)) {
|
||||
1 => return .{ .move = if (self.hasFeature(.avx))
|
||||
.{ .v_q, .mov }
|
||||
else
|
||||
@ -9535,7 +9530,7 @@ fn moveStrategy(self: *Self, ty: Type, aligned: bool) !MoveStrategy {
|
||||
return .{ .move = if (aligned) .{ .v_, .movdqa } else .{ .v_, .movdqu } },
|
||||
else => {},
|
||||
},
|
||||
128 => switch (ty.vectorLen()) {
|
||||
128 => switch (ty.vectorLen(mod)) {
|
||||
1 => return .{ .move = if (self.hasFeature(.avx))
|
||||
if (aligned) .{ .v_, .movdqa } else .{ .v_, .movdqu }
|
||||
else if (aligned) .{ ._, .movdqa } else .{ ._, .movdqu } },
|
||||
@ -9543,15 +9538,15 @@ fn moveStrategy(self: *Self, ty: Type, aligned: bool) !MoveStrategy {
|
||||
return .{ .move = if (aligned) .{ .v_, .movdqa } else .{ .v_, .movdqu } },
|
||||
else => {},
|
||||
},
|
||||
256 => switch (ty.vectorLen()) {
|
||||
256 => switch (ty.vectorLen(mod)) {
|
||||
1 => if (self.hasFeature(.avx))
|
||||
return .{ .move = if (aligned) .{ .v_, .movdqa } else .{ .v_, .movdqu } },
|
||||
else => {},
|
||||
},
|
||||
else => {},
|
||||
},
|
||||
.Float => switch (ty.childType().floatBits(self.target.*)) {
|
||||
16 => switch (ty.vectorLen()) {
|
||||
.Float => switch (ty.childType(mod).floatBits(self.target.*)) {
|
||||
16 => switch (ty.vectorLen(mod)) {
|
||||
1 => return if (self.hasFeature(.avx)) .{ .vex_insert_extract = .{
|
||||
.insert = .{ .vp_w, .insr },
|
||||
.extract = .{ .vp_w, .extr },
|
||||
@ -9574,7 +9569,7 @@ fn moveStrategy(self: *Self, ty: Type, aligned: bool) !MoveStrategy {
|
||||
return .{ .move = if (aligned) .{ .v_, .movdqa } else .{ .v_, .movdqu } },
|
||||
else => {},
|
||||
},
|
||||
32 => switch (ty.vectorLen()) {
|
||||
32 => switch (ty.vectorLen(mod)) {
|
||||
1 => return .{ .move = if (self.hasFeature(.avx))
|
||||
.{ .v_ss, .mov }
|
||||
else
|
||||
@ -9590,7 +9585,7 @@ fn moveStrategy(self: *Self, ty: Type, aligned: bool) !MoveStrategy {
|
||||
return .{ .move = if (aligned) .{ .v_ps, .mova } else .{ .v_ps, .movu } },
|
||||
else => {},
|
||||
},
|
||||
64 => switch (ty.vectorLen()) {
|
||||
64 => switch (ty.vectorLen(mod)) {
|
||||
1 => return .{ .move = if (self.hasFeature(.avx))
|
||||
.{ .v_sd, .mov }
|
||||
else
|
||||
@ -9602,7 +9597,7 @@ fn moveStrategy(self: *Self, ty: Type, aligned: bool) !MoveStrategy {
|
||||
return .{ .move = if (aligned) .{ .v_pd, .mova } else .{ .v_pd, .movu } },
|
||||
else => {},
|
||||
},
|
||||
128 => switch (ty.vectorLen()) {
|
||||
128 => switch (ty.vectorLen(mod)) {
|
||||
1 => return .{ .move = if (self.hasFeature(.avx))
|
||||
if (aligned) .{ .v_, .movdqa } else .{ .v_, .movdqu }
|
||||
else if (aligned) .{ ._, .movdqa } else .{ ._, .movdqu } },
|
||||
@ -10248,8 +10243,8 @@ fn airArrayToSlice(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const slice_ty = self.typeOfIndex(inst);
|
||||
const ptr_ty = self.typeOf(ty_op.operand);
|
||||
const ptr = try self.resolveInst(ty_op.operand);
|
||||
const array_ty = ptr_ty.childType();
|
||||
const array_len = array_ty.arrayLen();
|
||||
const array_ty = ptr_ty.childType(mod);
|
||||
const array_len = array_ty.arrayLen(mod);
|
||||
|
||||
const frame_index = try self.allocFrameIndex(FrameAlloc.initType(slice_ty, mod));
|
||||
try self.genSetMem(.{ .frame = frame_index }, 0, ptr_ty, ptr);
|
||||
@ -10790,16 +10785,16 @@ fn airMemset(self: *Self, inst: Air.Inst.Index, safety: bool) !void {
|
||||
const elem_abi_size = @intCast(u31, elem_ty.abiSize(mod));
|
||||
|
||||
if (elem_abi_size == 1) {
|
||||
const ptr: MCValue = switch (dst_ptr_ty.ptrSize()) {
|
||||
const ptr: MCValue = switch (dst_ptr_ty.ptrSize(mod)) {
|
||||
// TODO: this only handles slices stored in the stack
|
||||
.Slice => dst_ptr,
|
||||
.One => dst_ptr,
|
||||
.C, .Many => unreachable,
|
||||
};
|
||||
const len: MCValue = switch (dst_ptr_ty.ptrSize()) {
|
||||
const len: MCValue = switch (dst_ptr_ty.ptrSize(mod)) {
|
||||
// TODO: this only handles slices stored in the stack
|
||||
.Slice => dst_ptr.address().offset(8).deref(),
|
||||
.One => .{ .immediate = dst_ptr_ty.childType().arrayLen() },
|
||||
.One => .{ .immediate = dst_ptr_ty.childType(mod).arrayLen(mod) },
|
||||
.C, .Many => unreachable,
|
||||
};
|
||||
const len_lock: ?RegisterLock = switch (len) {
|
||||
@ -10815,7 +10810,7 @@ fn airMemset(self: *Self, inst: Air.Inst.Index, safety: bool) !void {
|
||||
// Store the first element, and then rely on memcpy copying forwards.
|
||||
// Length zero requires a runtime check - so we handle arrays specially
|
||||
// here to elide it.
|
||||
switch (dst_ptr_ty.ptrSize()) {
|
||||
switch (dst_ptr_ty.ptrSize(mod)) {
|
||||
.Slice => {
|
||||
var buf: Type.SlicePtrFieldTypeBuffer = undefined;
|
||||
const slice_ptr_ty = dst_ptr_ty.slicePtrFieldType(&buf);
|
||||
@ -10858,13 +10853,9 @@ fn airMemset(self: *Self, inst: Air.Inst.Index, safety: bool) !void {
|
||||
try self.performReloc(skip_reloc);
|
||||
},
|
||||
.One => {
|
||||
var elem_ptr_pl = Type.Payload.ElemType{
|
||||
.base = .{ .tag = .single_mut_pointer },
|
||||
.data = elem_ty,
|
||||
};
|
||||
const elem_ptr_ty = Type.initPayload(&elem_ptr_pl.base);
|
||||
const elem_ptr_ty = try mod.singleMutPtrType(elem_ty);
|
||||
|
||||
const len = dst_ptr_ty.childType().arrayLen();
|
||||
const len = dst_ptr_ty.childType(mod).arrayLen(mod);
|
||||
|
||||
assert(len != 0); // prevented by Sema
|
||||
try self.store(elem_ptr_ty, dst_ptr, src_val);
|
||||
@ -10889,6 +10880,7 @@ fn airMemset(self: *Self, inst: Air.Inst.Index, safety: bool) !void {
|
||||
}
|
||||
|
||||
fn airMemcpy(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
|
||||
|
||||
const dst_ptr = try self.resolveInst(bin_op.lhs);
|
||||
@ -10906,9 +10898,9 @@ fn airMemcpy(self: *Self, inst: Air.Inst.Index) !void {
|
||||
};
|
||||
defer if (src_ptr_lock) |lock| self.register_manager.unlockReg(lock);
|
||||
|
||||
const len: MCValue = switch (dst_ptr_ty.ptrSize()) {
|
||||
const len: MCValue = switch (dst_ptr_ty.ptrSize(mod)) {
|
||||
.Slice => dst_ptr.address().offset(8).deref(),
|
||||
.One => .{ .immediate = dst_ptr_ty.childType().arrayLen() },
|
||||
.One => .{ .immediate = dst_ptr_ty.childType(mod).arrayLen(mod) },
|
||||
.C, .Many => unreachable,
|
||||
};
|
||||
const len_lock: ?RegisterLock = switch (len) {
|
||||
@ -11059,7 +11051,7 @@ fn airSplat(self: *Self, inst: Air.Inst.Index) !void {
|
||||
switch (scalar_ty.zigTypeTag(mod)) {
|
||||
else => {},
|
||||
.Float => switch (scalar_ty.floatBits(self.target.*)) {
|
||||
32 => switch (vector_ty.vectorLen()) {
|
||||
32 => switch (vector_ty.vectorLen(mod)) {
|
||||
1 => {
|
||||
if (self.reuseOperand(inst, ty_op.operand, 0, src_mcv)) break :result src_mcv;
|
||||
const dst_reg = try self.register_manager.allocReg(inst, dst_rc);
|
||||
@ -11139,7 +11131,7 @@ fn airSplat(self: *Self, inst: Air.Inst.Index) !void {
|
||||
},
|
||||
else => {},
|
||||
},
|
||||
64 => switch (vector_ty.vectorLen()) {
|
||||
64 => switch (vector_ty.vectorLen(mod)) {
|
||||
1 => {
|
||||
if (self.reuseOperand(inst, ty_op.operand, 0, src_mcv)) break :result src_mcv;
|
||||
const dst_reg = try self.register_manager.allocReg(inst, dst_rc);
|
||||
@ -11205,7 +11197,7 @@ fn airSplat(self: *Self, inst: Air.Inst.Index) !void {
|
||||
},
|
||||
else => {},
|
||||
},
|
||||
128 => switch (vector_ty.vectorLen()) {
|
||||
128 => switch (vector_ty.vectorLen(mod)) {
|
||||
1 => {
|
||||
if (self.reuseOperand(inst, ty_op.operand, 0, src_mcv)) break :result src_mcv;
|
||||
const dst_reg = try self.register_manager.allocReg(inst, dst_rc);
|
||||
@ -11271,7 +11263,7 @@ fn airReduce(self: *Self, inst: Air.Inst.Index) !void {
|
||||
fn airAggregateInit(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const result_ty = self.typeOfIndex(inst);
|
||||
const len = @intCast(usize, result_ty.arrayLen());
|
||||
const len = @intCast(usize, result_ty.arrayLen(mod));
|
||||
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
|
||||
const elements = @ptrCast([]const Air.Inst.Ref, self.air.extra[ty_pl.payload..][0..len]);
|
||||
const result: MCValue = result: {
|
||||
@ -11375,7 +11367,7 @@ fn airAggregateInit(self: *Self, inst: Air.Inst.Index) !void {
|
||||
.Array => {
|
||||
const frame_index =
|
||||
try self.allocFrameIndex(FrameAlloc.initType(result_ty, mod));
|
||||
const elem_ty = result_ty.childType();
|
||||
const elem_ty = result_ty.childType(mod);
|
||||
const elem_size = @intCast(u32, elem_ty.abiSize(mod));
|
||||
|
||||
for (elements, 0..) |elem, elem_i| {
|
||||
@ -11387,7 +11379,7 @@ fn airAggregateInit(self: *Self, inst: Air.Inst.Index) !void {
|
||||
const elem_off = @intCast(i32, elem_size * elem_i);
|
||||
try self.genSetMem(.{ .frame = frame_index }, elem_off, elem_ty, mat_elem_mcv);
|
||||
}
|
||||
if (result_ty.sentinel()) |sentinel| try self.genSetMem(
|
||||
if (result_ty.sentinel(mod)) |sentinel| try self.genSetMem(
|
||||
.{ .frame = frame_index },
|
||||
@intCast(i32, elem_size * elements.len),
|
||||
elem_ty,
|
||||
@ -11512,14 +11504,14 @@ fn airMulAdd(self: *Self, inst: Air.Inst.Index) !void {
|
||||
16, 80, 128 => null,
|
||||
else => unreachable,
|
||||
},
|
||||
.Vector => switch (ty.childType().zigTypeTag(mod)) {
|
||||
.Float => switch (ty.childType().floatBits(self.target.*)) {
|
||||
32 => switch (ty.vectorLen()) {
|
||||
.Vector => switch (ty.childType(mod).zigTypeTag(mod)) {
|
||||
.Float => switch (ty.childType(mod).floatBits(self.target.*)) {
|
||||
32 => switch (ty.vectorLen(mod)) {
|
||||
1 => .{ .v_ss, .fmadd132 },
|
||||
2...8 => .{ .v_ps, .fmadd132 },
|
||||
else => null,
|
||||
},
|
||||
64 => switch (ty.vectorLen()) {
|
||||
64 => switch (ty.vectorLen(mod)) {
|
||||
1 => .{ .v_sd, .fmadd132 },
|
||||
2...4 => .{ .v_pd, .fmadd132 },
|
||||
else => null,
|
||||
@ -11539,14 +11531,14 @@ fn airMulAdd(self: *Self, inst: Air.Inst.Index) !void {
|
||||
16, 80, 128 => null,
|
||||
else => unreachable,
|
||||
},
|
||||
.Vector => switch (ty.childType().zigTypeTag(mod)) {
|
||||
.Float => switch (ty.childType().floatBits(self.target.*)) {
|
||||
32 => switch (ty.vectorLen()) {
|
||||
.Vector => switch (ty.childType(mod).zigTypeTag(mod)) {
|
||||
.Float => switch (ty.childType(mod).floatBits(self.target.*)) {
|
||||
32 => switch (ty.vectorLen(mod)) {
|
||||
1 => .{ .v_ss, .fmadd213 },
|
||||
2...8 => .{ .v_ps, .fmadd213 },
|
||||
else => null,
|
||||
},
|
||||
64 => switch (ty.vectorLen()) {
|
||||
64 => switch (ty.vectorLen(mod)) {
|
||||
1 => .{ .v_sd, .fmadd213 },
|
||||
2...4 => .{ .v_pd, .fmadd213 },
|
||||
else => null,
|
||||
@ -11566,14 +11558,14 @@ fn airMulAdd(self: *Self, inst: Air.Inst.Index) !void {
|
||||
16, 80, 128 => null,
|
||||
else => unreachable,
|
||||
},
|
||||
.Vector => switch (ty.childType().zigTypeTag(mod)) {
|
||||
.Float => switch (ty.childType().floatBits(self.target.*)) {
|
||||
32 => switch (ty.vectorLen()) {
|
||||
.Vector => switch (ty.childType(mod).zigTypeTag(mod)) {
|
||||
.Float => switch (ty.childType(mod).floatBits(self.target.*)) {
|
||||
32 => switch (ty.vectorLen(mod)) {
|
||||
1 => .{ .v_ss, .fmadd231 },
|
||||
2...8 => .{ .v_ps, .fmadd231 },
|
||||
else => null,
|
||||
},
|
||||
64 => switch (ty.vectorLen()) {
|
||||
64 => switch (ty.vectorLen(mod)) {
|
||||
1 => .{ .v_sd, .fmadd231 },
|
||||
2...4 => .{ .v_pd, .fmadd231 },
|
||||
else => null,
|
||||
|
||||
@ -76,7 +76,7 @@ pub fn classifySystemV(ty: Type, mod: *const Module, ctx: Context) [8]Class {
|
||||
};
|
||||
var result = [1]Class{.none} ** 8;
|
||||
switch (ty.zigTypeTag(mod)) {
|
||||
.Pointer => switch (ty.ptrSize()) {
|
||||
.Pointer => switch (ty.ptrSize(mod)) {
|
||||
.Slice => {
|
||||
result[0] = .integer;
|
||||
result[1] = .integer;
|
||||
@ -158,8 +158,8 @@ pub fn classifySystemV(ty: Type, mod: *const Module, ctx: Context) [8]Class {
|
||||
else => unreachable,
|
||||
},
|
||||
.Vector => {
|
||||
const elem_ty = ty.childType();
|
||||
const bits = elem_ty.bitSize(mod) * ty.arrayLen();
|
||||
const elem_ty = ty.childType(mod);
|
||||
const bits = elem_ty.bitSize(mod) * ty.arrayLen(mod);
|
||||
if (bits <= 64) return .{
|
||||
.sse, .none, .none, .none,
|
||||
.none, .none, .none, .none,
|
||||
|
||||
@ -230,7 +230,7 @@ pub fn generateSymbol(
|
||||
.Array => switch (typed_value.val.tag()) {
|
||||
.bytes => {
|
||||
const bytes = typed_value.val.castTag(.bytes).?.data;
|
||||
const len = @intCast(usize, typed_value.ty.arrayLenIncludingSentinel());
|
||||
const len = @intCast(usize, typed_value.ty.arrayLenIncludingSentinel(mod));
|
||||
// The bytes payload already includes the sentinel, if any
|
||||
try code.ensureUnusedCapacity(len);
|
||||
code.appendSliceAssumeCapacity(bytes[0..len]);
|
||||
@ -241,7 +241,7 @@ pub fn generateSymbol(
|
||||
const bytes = mod.string_literal_bytes.items[str_lit.index..][0..str_lit.len];
|
||||
try code.ensureUnusedCapacity(bytes.len + 1);
|
||||
code.appendSliceAssumeCapacity(bytes);
|
||||
if (typed_value.ty.sentinel()) |sent_val| {
|
||||
if (typed_value.ty.sentinel(mod)) |sent_val| {
|
||||
const byte = @intCast(u8, sent_val.toUnsignedInt(mod));
|
||||
code.appendAssumeCapacity(byte);
|
||||
}
|
||||
@ -249,8 +249,8 @@ pub fn generateSymbol(
|
||||
},
|
||||
.aggregate => {
|
||||
const elem_vals = typed_value.val.castTag(.aggregate).?.data;
|
||||
const elem_ty = typed_value.ty.elemType();
|
||||
const len = @intCast(usize, typed_value.ty.arrayLenIncludingSentinel());
|
||||
const elem_ty = typed_value.ty.childType(mod);
|
||||
const len = @intCast(usize, typed_value.ty.arrayLenIncludingSentinel(mod));
|
||||
for (elem_vals[0..len]) |elem_val| {
|
||||
switch (try generateSymbol(bin_file, src_loc, .{
|
||||
.ty = elem_ty,
|
||||
@ -264,9 +264,9 @@ pub fn generateSymbol(
|
||||
},
|
||||
.repeated => {
|
||||
const array = typed_value.val.castTag(.repeated).?.data;
|
||||
const elem_ty = typed_value.ty.childType();
|
||||
const sentinel = typed_value.ty.sentinel();
|
||||
const len = typed_value.ty.arrayLen();
|
||||
const elem_ty = typed_value.ty.childType(mod);
|
||||
const sentinel = typed_value.ty.sentinel(mod);
|
||||
const len = typed_value.ty.arrayLen(mod);
|
||||
|
||||
var index: u64 = 0;
|
||||
while (index < len) : (index += 1) {
|
||||
@ -292,8 +292,8 @@ pub fn generateSymbol(
|
||||
return Result.ok;
|
||||
},
|
||||
.empty_array_sentinel => {
|
||||
const elem_ty = typed_value.ty.childType();
|
||||
const sentinel_val = typed_value.ty.sentinel().?;
|
||||
const elem_ty = typed_value.ty.childType(mod);
|
||||
const sentinel_val = typed_value.ty.sentinel(mod).?;
|
||||
switch (try generateSymbol(bin_file, src_loc, .{
|
||||
.ty = elem_ty,
|
||||
.val = sentinel_val,
|
||||
@ -618,8 +618,7 @@ pub fn generateSymbol(
|
||||
return Result.ok;
|
||||
},
|
||||
.Optional => {
|
||||
var opt_buf: Type.Payload.ElemType = undefined;
|
||||
const payload_type = typed_value.ty.optionalChild(&opt_buf);
|
||||
const payload_type = typed_value.ty.optionalChild(mod);
|
||||
const is_pl = !typed_value.val.isNull(mod);
|
||||
const abi_size = math.cast(usize, typed_value.ty.abiSize(mod)) orelse return error.Overflow;
|
||||
|
||||
@ -751,7 +750,7 @@ pub fn generateSymbol(
|
||||
.Vector => switch (typed_value.val.tag()) {
|
||||
.bytes => {
|
||||
const bytes = typed_value.val.castTag(.bytes).?.data;
|
||||
const len = math.cast(usize, typed_value.ty.arrayLen()) orelse return error.Overflow;
|
||||
const len = math.cast(usize, typed_value.ty.arrayLen(mod)) orelse return error.Overflow;
|
||||
const padding = math.cast(usize, typed_value.ty.abiSize(mod) - len) orelse
|
||||
return error.Overflow;
|
||||
try code.ensureUnusedCapacity(len + padding);
|
||||
@ -761,8 +760,8 @@ pub fn generateSymbol(
|
||||
},
|
||||
.aggregate => {
|
||||
const elem_vals = typed_value.val.castTag(.aggregate).?.data;
|
||||
const elem_ty = typed_value.ty.elemType();
|
||||
const len = math.cast(usize, typed_value.ty.arrayLen()) orelse return error.Overflow;
|
||||
const elem_ty = typed_value.ty.childType(mod);
|
||||
const len = math.cast(usize, typed_value.ty.arrayLen(mod)) orelse return error.Overflow;
|
||||
const padding = math.cast(usize, typed_value.ty.abiSize(mod) -
|
||||
(math.divCeil(u64, elem_ty.bitSize(mod) * len, 8) catch |err| switch (err) {
|
||||
error.DivisionByZero => unreachable,
|
||||
@ -782,8 +781,8 @@ pub fn generateSymbol(
|
||||
},
|
||||
.repeated => {
|
||||
const array = typed_value.val.castTag(.repeated).?.data;
|
||||
const elem_ty = typed_value.ty.childType();
|
||||
const len = typed_value.ty.arrayLen();
|
||||
const elem_ty = typed_value.ty.childType(mod);
|
||||
const len = typed_value.ty.arrayLen(mod);
|
||||
const padding = math.cast(usize, typed_value.ty.abiSize(mod) -
|
||||
(math.divCeil(u64, elem_ty.bitSize(mod) * len, 8) catch |err| switch (err) {
|
||||
error.DivisionByZero => unreachable,
|
||||
@ -1188,7 +1187,7 @@ pub fn genTypedValue(
|
||||
|
||||
switch (typed_value.ty.zigTypeTag(mod)) {
|
||||
.Void => return GenResult.mcv(.none),
|
||||
.Pointer => switch (typed_value.ty.ptrSize()) {
|
||||
.Pointer => switch (typed_value.ty.ptrSize(mod)) {
|
||||
.Slice => {},
|
||||
else => {
|
||||
switch (typed_value.val.tag()) {
|
||||
@ -1219,9 +1218,8 @@ pub fn genTypedValue(
|
||||
if (typed_value.ty.isPtrLikeOptional(mod)) {
|
||||
if (typed_value.val.tag() == .null_value) return GenResult.mcv(.{ .immediate = 0 });
|
||||
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
return genTypedValue(bin_file, src_loc, .{
|
||||
.ty = typed_value.ty.optionalChild(&buf),
|
||||
.ty = typed_value.ty.optionalChild(mod),
|
||||
.val = if (typed_value.val.castTag(.opt_payload)) |pl| pl.data else typed_value.val,
|
||||
}, owner_decl_index);
|
||||
} else if (typed_value.ty.abiSize(mod) == 1) {
|
||||
|
||||
@ -625,7 +625,9 @@ pub const DeclGen = struct {
|
||||
// Ensure complete type definition is visible before accessing fields.
|
||||
_ = try dg.typeToIndex(field_ptr.container_ty, .complete);
|
||||
|
||||
var container_ptr_pl = ptr_ty.ptrInfo();
|
||||
var container_ptr_pl: Type.Payload.Pointer = .{
|
||||
.data = ptr_ty.ptrInfo(mod),
|
||||
};
|
||||
container_ptr_pl.data.pointee_type = field_ptr.container_ty;
|
||||
const container_ptr_ty = Type.initPayload(&container_ptr_pl.base);
|
||||
|
||||
@ -653,7 +655,9 @@ pub const DeclGen = struct {
|
||||
try dg.writeCValue(writer, field);
|
||||
},
|
||||
.byte_offset => |byte_offset| {
|
||||
var u8_ptr_pl = ptr_ty.ptrInfo();
|
||||
var u8_ptr_pl: Type.Payload.Pointer = .{
|
||||
.data = ptr_ty.ptrInfo(mod),
|
||||
};
|
||||
u8_ptr_pl.data.pointee_type = Type.u8;
|
||||
const u8_ptr_ty = Type.initPayload(&u8_ptr_pl.base);
|
||||
|
||||
@ -692,11 +696,10 @@ pub const DeclGen = struct {
|
||||
},
|
||||
.elem_ptr => {
|
||||
const elem_ptr = ptr_val.castTag(.elem_ptr).?.data;
|
||||
var elem_ptr_ty_pl: Type.Payload.ElemType = .{
|
||||
.base = .{ .tag = .c_mut_pointer },
|
||||
.data = elem_ptr.elem_ty,
|
||||
};
|
||||
const elem_ptr_ty = Type.initPayload(&elem_ptr_ty_pl.base);
|
||||
const elem_ptr_ty = try mod.ptrType(.{
|
||||
.size = .C,
|
||||
.elem_type = elem_ptr.elem_ty.ip_index,
|
||||
});
|
||||
|
||||
try writer.writeAll("&(");
|
||||
try dg.renderParentPtr(writer, elem_ptr.array_ptr, elem_ptr_ty, location);
|
||||
@ -704,11 +707,10 @@ pub const DeclGen = struct {
|
||||
},
|
||||
.opt_payload_ptr, .eu_payload_ptr => {
|
||||
const payload_ptr = ptr_val.cast(Value.Payload.PayloadPtr).?.data;
|
||||
var container_ptr_ty_pl: Type.Payload.ElemType = .{
|
||||
.base = .{ .tag = .c_mut_pointer },
|
||||
.data = payload_ptr.container_ty,
|
||||
};
|
||||
const container_ptr_ty = Type.initPayload(&container_ptr_ty_pl.base);
|
||||
const container_ptr_ty = try mod.ptrType(.{
|
||||
.elem_type = payload_ptr.container_ty.ip_index,
|
||||
.size = .C,
|
||||
});
|
||||
|
||||
// Ensure complete type definition is visible before accessing fields.
|
||||
_ = try dg.typeToIndex(payload_ptr.container_ty, .complete);
|
||||
@ -794,8 +796,7 @@ pub const DeclGen = struct {
|
||||
return writer.print("){x})", .{try dg.fmtIntLiteral(Type.usize, val, .Other)});
|
||||
},
|
||||
.Optional => {
|
||||
var opt_buf: Type.Payload.ElemType = undefined;
|
||||
const payload_ty = ty.optionalChild(&opt_buf);
|
||||
const payload_ty = ty.optionalChild(mod);
|
||||
|
||||
if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod)) {
|
||||
return dg.renderValue(writer, Type.bool, val, location);
|
||||
@ -889,11 +890,11 @@ pub const DeclGen = struct {
|
||||
return writer.writeAll(" }");
|
||||
},
|
||||
.Array, .Vector => {
|
||||
const ai = ty.arrayInfo();
|
||||
const ai = ty.arrayInfo(mod);
|
||||
if (ai.elem_type.eql(Type.u8, dg.module)) {
|
||||
var literal = stringLiteral(writer);
|
||||
try literal.start();
|
||||
const c_len = ty.arrayLenIncludingSentinel();
|
||||
const c_len = ty.arrayLenIncludingSentinel(mod);
|
||||
var index: u64 = 0;
|
||||
while (index < c_len) : (index += 1)
|
||||
try literal.writeChar(0xaa);
|
||||
@ -906,11 +907,11 @@ pub const DeclGen = struct {
|
||||
}
|
||||
|
||||
try writer.writeByte('{');
|
||||
const c_len = ty.arrayLenIncludingSentinel();
|
||||
const c_len = ty.arrayLenIncludingSentinel(mod);
|
||||
var index: u64 = 0;
|
||||
while (index < c_len) : (index += 1) {
|
||||
if (index > 0) try writer.writeAll(", ");
|
||||
try dg.renderValue(writer, ty.childType(), val, initializer_type);
|
||||
try dg.renderValue(writer, ty.childType(mod), val, initializer_type);
|
||||
}
|
||||
return writer.writeByte('}');
|
||||
}
|
||||
@ -1110,7 +1111,7 @@ pub const DeclGen = struct {
|
||||
// First try specific tag representations for more efficiency.
|
||||
switch (val.tag()) {
|
||||
.undef, .empty_struct_value, .empty_array => {
|
||||
const ai = ty.arrayInfo();
|
||||
const ai = ty.arrayInfo(mod);
|
||||
try writer.writeByte('{');
|
||||
if (ai.sentinel) |s| {
|
||||
try dg.renderValue(writer, ai.elem_type, s, initializer_type);
|
||||
@ -1128,9 +1129,9 @@ pub const DeclGen = struct {
|
||||
},
|
||||
else => unreachable,
|
||||
};
|
||||
const sentinel = if (ty.sentinel()) |sentinel| @intCast(u8, sentinel.toUnsignedInt(mod)) else null;
|
||||
const sentinel = if (ty.sentinel(mod)) |sentinel| @intCast(u8, sentinel.toUnsignedInt(mod)) else null;
|
||||
try writer.print("{s}", .{
|
||||
fmtStringLiteral(bytes[0..@intCast(usize, ty.arrayLen())], sentinel),
|
||||
fmtStringLiteral(bytes[0..@intCast(usize, ty.arrayLen(mod))], sentinel),
|
||||
});
|
||||
},
|
||||
else => {
|
||||
@ -1142,7 +1143,7 @@ pub const DeclGen = struct {
|
||||
// MSVC throws C2078 if an array of size 65536 or greater is initialized with a string literal
|
||||
const max_string_initializer_len = 65535;
|
||||
|
||||
const ai = ty.arrayInfo();
|
||||
const ai = ty.arrayInfo(mod);
|
||||
if (ai.elem_type.eql(Type.u8, dg.module)) {
|
||||
if (ai.len <= max_string_initializer_len) {
|
||||
var literal = stringLiteral(writer);
|
||||
@ -1198,8 +1199,7 @@ pub const DeclGen = struct {
|
||||
}
|
||||
},
|
||||
.Optional => {
|
||||
var opt_buf: Type.Payload.ElemType = undefined;
|
||||
const payload_ty = ty.optionalChild(&opt_buf);
|
||||
const payload_ty = ty.optionalChild(mod);
|
||||
|
||||
const is_null_val = Value.makeBool(val.tag() == .null_value);
|
||||
if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod))
|
||||
@ -2410,12 +2410,13 @@ pub fn genGlobalAsm(mod: *Module, writer: anytype) !void {
|
||||
}
|
||||
|
||||
pub fn genErrDecls(o: *Object) !void {
|
||||
const mod = o.dg.module;
|
||||
const writer = o.writer();
|
||||
|
||||
try writer.writeAll("enum {\n");
|
||||
o.indent_writer.pushIndent();
|
||||
var max_name_len: usize = 0;
|
||||
for (o.dg.module.error_name_list.items, 0..) |name, value| {
|
||||
for (mod.error_name_list.items, 0..) |name, value| {
|
||||
max_name_len = std.math.max(name.len, max_name_len);
|
||||
var err_pl = Value.Payload.Error{ .data = .{ .name = name } };
|
||||
try o.dg.renderValue(writer, Type.anyerror, Value.initPayload(&err_pl.base), .Other);
|
||||
@ -2430,12 +2431,15 @@ pub fn genErrDecls(o: *Object) !void {
|
||||
defer o.dg.gpa.free(name_buf);
|
||||
|
||||
@memcpy(name_buf[0..name_prefix.len], name_prefix);
|
||||
for (o.dg.module.error_name_list.items) |name| {
|
||||
for (mod.error_name_list.items) |name| {
|
||||
@memcpy(name_buf[name_prefix.len..][0..name.len], name);
|
||||
const identifier = name_buf[0 .. name_prefix.len + name.len];
|
||||
|
||||
var name_ty_pl = Type.Payload.Len{ .base = .{ .tag = .array_u8_sentinel_0 }, .data = name.len };
|
||||
const name_ty = Type.initPayload(&name_ty_pl.base);
|
||||
const name_ty = try mod.arrayType(.{
|
||||
.len = name.len,
|
||||
.child = .u8_type,
|
||||
.sentinel = .zero_u8,
|
||||
});
|
||||
|
||||
var name_pl = Value.Payload.Bytes{ .base = .{ .tag = .bytes }, .data = name };
|
||||
const name_val = Value.initPayload(&name_pl.base);
|
||||
@ -2448,15 +2452,15 @@ pub fn genErrDecls(o: *Object) !void {
|
||||
}
|
||||
|
||||
var name_array_ty_pl = Type.Payload.Array{ .base = .{ .tag = .array }, .data = .{
|
||||
.len = o.dg.module.error_name_list.items.len,
|
||||
.elem_type = Type.initTag(.const_slice_u8_sentinel_0),
|
||||
.len = mod.error_name_list.items.len,
|
||||
.elem_type = Type.const_slice_u8_sentinel_0,
|
||||
} };
|
||||
const name_array_ty = Type.initPayload(&name_array_ty_pl.base);
|
||||
|
||||
try writer.writeAll("static ");
|
||||
try o.dg.renderTypeAndName(writer, name_array_ty, .{ .identifier = array_identifier }, Const, 0, .complete);
|
||||
try writer.writeAll(" = {");
|
||||
for (o.dg.module.error_name_list.items, 0..) |name, value| {
|
||||
for (mod.error_name_list.items, 0..) |name, value| {
|
||||
if (value != 0) try writer.writeByte(',');
|
||||
|
||||
var len_pl = Value.Payload.U64{ .base = .{ .tag = .int_u64 }, .data = name.len };
|
||||
@ -2487,6 +2491,7 @@ fn genExports(o: *Object) !void {
|
||||
}
|
||||
|
||||
pub fn genLazyFn(o: *Object, lazy_fn: LazyFnMap.Entry) !void {
|
||||
const mod = o.dg.module;
|
||||
const w = o.writer();
|
||||
const key = lazy_fn.key_ptr.*;
|
||||
const val = lazy_fn.value_ptr;
|
||||
@ -2495,7 +2500,7 @@ pub fn genLazyFn(o: *Object, lazy_fn: LazyFnMap.Entry) !void {
|
||||
.tag_name => {
|
||||
const enum_ty = val.data.tag_name;
|
||||
|
||||
const name_slice_ty = Type.initTag(.const_slice_u8_sentinel_0);
|
||||
const name_slice_ty = Type.const_slice_u8_sentinel_0;
|
||||
|
||||
try w.writeAll("static ");
|
||||
try o.dg.renderType(w, name_slice_ty);
|
||||
@ -2514,11 +2519,11 @@ pub fn genLazyFn(o: *Object, lazy_fn: LazyFnMap.Entry) !void {
|
||||
var int_pl: Value.Payload.U64 = undefined;
|
||||
const int_val = tag_val.enumToInt(enum_ty, &int_pl);
|
||||
|
||||
var name_ty_pl = Type.Payload.Len{
|
||||
.base = .{ .tag = .array_u8_sentinel_0 },
|
||||
.data = name.len,
|
||||
};
|
||||
const name_ty = Type.initPayload(&name_ty_pl.base);
|
||||
const name_ty = try mod.arrayType(.{
|
||||
.len = name.len,
|
||||
.child = .u8_type,
|
||||
.sentinel = .zero_u8,
|
||||
});
|
||||
|
||||
var name_pl = Value.Payload.Bytes{ .base = .{ .tag = .bytes }, .data = name };
|
||||
const name_val = Value.initPayload(&name_pl.base);
|
||||
@ -2547,7 +2552,7 @@ pub fn genLazyFn(o: *Object, lazy_fn: LazyFnMap.Entry) !void {
|
||||
try w.writeAll("}\n");
|
||||
},
|
||||
.never_tail, .never_inline => |fn_decl_index| {
|
||||
const fn_decl = o.dg.module.declPtr(fn_decl_index);
|
||||
const fn_decl = mod.declPtr(fn_decl_index);
|
||||
const fn_cty = try o.dg.typeToCType(fn_decl.ty, .complete);
|
||||
const fn_info = fn_cty.cast(CType.Payload.Function).?.data;
|
||||
|
||||
@ -3150,7 +3155,7 @@ fn airPtrElemPtr(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
|
||||
const inst_ty = f.typeOfIndex(inst);
|
||||
const ptr_ty = f.typeOf(bin_op.lhs);
|
||||
const elem_ty = ptr_ty.childType();
|
||||
const elem_ty = ptr_ty.childType(mod);
|
||||
const elem_has_bits = elem_ty.hasRuntimeBitsIgnoreComptime(mod);
|
||||
|
||||
const ptr = try f.resolveInst(bin_op.lhs);
|
||||
@ -3166,7 +3171,7 @@ fn airPtrElemPtr(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
try f.renderType(writer, inst_ty);
|
||||
try writer.writeByte(')');
|
||||
if (elem_has_bits) try writer.writeByte('&');
|
||||
if (elem_has_bits and ptr_ty.ptrSize() == .One) {
|
||||
if (elem_has_bits and ptr_ty.ptrSize(mod) == .One) {
|
||||
// It's a pointer to an array, so we need to de-reference.
|
||||
try f.writeCValueDeref(writer, ptr);
|
||||
} else try f.writeCValue(writer, ptr, .Other);
|
||||
@ -3264,7 +3269,7 @@ fn airArrayElemVal(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
fn airAlloc(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
const mod = f.object.dg.module;
|
||||
const inst_ty = f.typeOfIndex(inst);
|
||||
const elem_type = inst_ty.elemType();
|
||||
const elem_type = inst_ty.childType(mod);
|
||||
if (!elem_type.isFnOrHasRuntimeBitsIgnoreComptime(mod)) return .{ .undef = inst_ty };
|
||||
|
||||
const local = try f.allocLocalValue(
|
||||
@ -3280,7 +3285,7 @@ fn airAlloc(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
fn airRetPtr(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
const mod = f.object.dg.module;
|
||||
const inst_ty = f.typeOfIndex(inst);
|
||||
const elem_ty = inst_ty.elemType();
|
||||
const elem_ty = inst_ty.childType(mod);
|
||||
if (!elem_ty.isFnOrHasRuntimeBitsIgnoreComptime(mod)) return .{ .undef = inst_ty };
|
||||
|
||||
const local = try f.allocLocalValue(
|
||||
@ -3323,7 +3328,7 @@ fn airLoad(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
|
||||
const ptr_ty = f.typeOf(ty_op.operand);
|
||||
const ptr_scalar_ty = ptr_ty.scalarType(mod);
|
||||
const ptr_info = ptr_scalar_ty.ptrInfo().data;
|
||||
const ptr_info = ptr_scalar_ty.ptrInfo(mod);
|
||||
const src_ty = ptr_info.pointee_type;
|
||||
|
||||
if (!src_ty.hasRuntimeBitsIgnoreComptime(mod)) {
|
||||
@ -3412,7 +3417,7 @@ fn airRet(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !CValue {
|
||||
const writer = f.object.writer();
|
||||
const op_inst = Air.refToIndex(un_op);
|
||||
const op_ty = f.typeOf(un_op);
|
||||
const ret_ty = if (is_ptr) op_ty.childType() else op_ty;
|
||||
const ret_ty = if (is_ptr) op_ty.childType(mod) else op_ty;
|
||||
var lowered_ret_buf: LowerFnRetTyBuffer = undefined;
|
||||
const lowered_ret_ty = lowerFnRetTy(ret_ty, &lowered_ret_buf, mod);
|
||||
|
||||
@ -3601,7 +3606,7 @@ fn airStore(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue {
|
||||
|
||||
const ptr_ty = f.typeOf(bin_op.lhs);
|
||||
const ptr_scalar_ty = ptr_ty.scalarType(mod);
|
||||
const ptr_info = ptr_scalar_ty.ptrInfo().data;
|
||||
const ptr_info = ptr_scalar_ty.ptrInfo(mod);
|
||||
|
||||
const ptr_val = try f.resolveInst(bin_op.lhs);
|
||||
const src_ty = f.typeOf(bin_op.rhs);
|
||||
@ -4156,7 +4161,7 @@ fn airCall(
|
||||
const callee_ty = f.typeOf(pl_op.operand);
|
||||
const fn_ty = switch (callee_ty.zigTypeTag(mod)) {
|
||||
.Fn => callee_ty,
|
||||
.Pointer => callee_ty.childType(),
|
||||
.Pointer => callee_ty.childType(mod),
|
||||
else => unreachable,
|
||||
};
|
||||
|
||||
@ -4331,10 +4336,11 @@ fn airTry(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
}
|
||||
|
||||
fn airTryPtr(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
const mod = f.object.dg.module;
|
||||
const ty_pl = f.air.instructions.items(.data)[inst].ty_pl;
|
||||
const extra = f.air.extraData(Air.TryPtr, ty_pl.payload);
|
||||
const body = f.air.extra[extra.end..][0..extra.data.body_len];
|
||||
const err_union_ty = f.typeOf(extra.data.ptr).childType();
|
||||
const err_union_ty = f.typeOf(extra.data.ptr).childType(mod);
|
||||
return lowerTry(f, inst, extra.data.ptr, body, err_union_ty, true);
|
||||
}
|
||||
|
||||
@ -4826,7 +4832,7 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
|
||||
const is_reg = constraint[1] == '{';
|
||||
if (is_reg) {
|
||||
const output_ty = if (output == .none) inst_ty else f.typeOf(output).childType();
|
||||
const output_ty = if (output == .none) inst_ty else f.typeOf(output).childType(mod);
|
||||
try writer.writeAll("register ");
|
||||
const alignment = 0;
|
||||
const local_value = try f.allocLocalValue(output_ty, alignment);
|
||||
@ -5061,9 +5067,8 @@ fn airIsNull(
|
||||
}
|
||||
|
||||
const operand_ty = f.typeOf(un_op);
|
||||
const optional_ty = if (is_ptr) operand_ty.childType() else operand_ty;
|
||||
var payload_buf: Type.Payload.ElemType = undefined;
|
||||
const payload_ty = optional_ty.optionalChild(&payload_buf);
|
||||
const optional_ty = if (is_ptr) operand_ty.childType(mod) else operand_ty;
|
||||
const payload_ty = optional_ty.optionalChild(mod);
|
||||
var slice_ptr_buf: Type.SlicePtrFieldTypeBuffer = undefined;
|
||||
|
||||
const rhs = if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod))
|
||||
@ -5097,8 +5102,7 @@ fn airOptionalPayload(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
try reap(f, inst, &.{ty_op.operand});
|
||||
const opt_ty = f.typeOf(ty_op.operand);
|
||||
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
const payload_ty = opt_ty.optionalChild(&buf);
|
||||
const payload_ty = opt_ty.optionalChild(mod);
|
||||
|
||||
if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod)) {
|
||||
return .none;
|
||||
@ -5132,10 +5136,10 @@ fn airOptionalPayloadPtr(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
const operand = try f.resolveInst(ty_op.operand);
|
||||
try reap(f, inst, &.{ty_op.operand});
|
||||
const ptr_ty = f.typeOf(ty_op.operand);
|
||||
const opt_ty = ptr_ty.childType();
|
||||
const opt_ty = ptr_ty.childType(mod);
|
||||
const inst_ty = f.typeOfIndex(inst);
|
||||
|
||||
if (!inst_ty.childType().hasRuntimeBitsIgnoreComptime(mod)) {
|
||||
if (!inst_ty.childType(mod).hasRuntimeBitsIgnoreComptime(mod)) {
|
||||
return .{ .undef = inst_ty };
|
||||
}
|
||||
|
||||
@ -5163,7 +5167,7 @@ fn airOptionalPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
try reap(f, inst, &.{ty_op.operand});
|
||||
const operand_ty = f.typeOf(ty_op.operand);
|
||||
|
||||
const opt_ty = operand_ty.elemType();
|
||||
const opt_ty = operand_ty.childType(mod);
|
||||
|
||||
const inst_ty = f.typeOfIndex(inst);
|
||||
|
||||
@ -5221,7 +5225,7 @@ fn fieldLocation(
|
||||
else
|
||||
.{ .identifier = container_ty.structFieldName(next_field_index) } };
|
||||
} else if (container_ty.hasRuntimeBitsIgnoreComptime(mod)) .end else .begin,
|
||||
.Packed => if (field_ptr_ty.ptrInfo().data.host_size == 0)
|
||||
.Packed => if (field_ptr_ty.ptrInfo(mod).host_size == 0)
|
||||
.{ .byte_offset = container_ty.packedStructFieldByteOffset(field_index, mod) }
|
||||
else
|
||||
.begin,
|
||||
@ -5243,7 +5247,7 @@ fn fieldLocation(
|
||||
},
|
||||
.Packed => .begin,
|
||||
},
|
||||
.Pointer => switch (container_ty.ptrSize()) {
|
||||
.Pointer => switch (container_ty.ptrSize(mod)) {
|
||||
.Slice => switch (field_index) {
|
||||
0 => .{ .field = .{ .identifier = "ptr" } },
|
||||
1 => .{ .field = .{ .identifier = "len" } },
|
||||
@ -5280,7 +5284,7 @@ fn airFieldParentPtr(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
const extra = f.air.extraData(Air.FieldParentPtr, ty_pl.payload).data;
|
||||
|
||||
const container_ptr_ty = f.typeOfIndex(inst);
|
||||
const container_ty = container_ptr_ty.childType();
|
||||
const container_ty = container_ptr_ty.childType(mod);
|
||||
|
||||
const field_ptr_ty = f.typeOf(extra.field_ptr);
|
||||
const field_ptr_val = try f.resolveInst(extra.field_ptr);
|
||||
@ -5296,7 +5300,9 @@ fn airFieldParentPtr(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
switch (fieldLocation(container_ty, field_ptr_ty, extra.field_index, mod)) {
|
||||
.begin => try f.writeCValue(writer, field_ptr_val, .Initializer),
|
||||
.field => |field| {
|
||||
var u8_ptr_pl = field_ptr_ty.ptrInfo();
|
||||
var u8_ptr_pl: Type.Payload.Pointer = .{
|
||||
.data = field_ptr_ty.ptrInfo(mod),
|
||||
};
|
||||
u8_ptr_pl.data.pointee_type = Type.u8;
|
||||
const u8_ptr_ty = Type.initPayload(&u8_ptr_pl.base);
|
||||
|
||||
@ -5311,7 +5317,9 @@ fn airFieldParentPtr(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
try writer.writeAll("))");
|
||||
},
|
||||
.byte_offset => |byte_offset| {
|
||||
var u8_ptr_pl = field_ptr_ty.ptrInfo();
|
||||
var u8_ptr_pl: Type.Payload.Pointer = .{
|
||||
.data = field_ptr_ty.ptrInfo(mod),
|
||||
};
|
||||
u8_ptr_pl.data.pointee_type = Type.u8;
|
||||
const u8_ptr_ty = Type.initPayload(&u8_ptr_pl.base);
|
||||
|
||||
@ -5345,7 +5353,7 @@ fn fieldPtr(
|
||||
field_index: u32,
|
||||
) !CValue {
|
||||
const mod = f.object.dg.module;
|
||||
const container_ty = container_ptr_ty.elemType();
|
||||
const container_ty = container_ptr_ty.childType(mod);
|
||||
const field_ptr_ty = f.typeOfIndex(inst);
|
||||
|
||||
// Ensure complete type definition is visible before accessing fields.
|
||||
@ -5365,7 +5373,9 @@ fn fieldPtr(
|
||||
try f.writeCValueDerefMember(writer, container_ptr_val, field);
|
||||
},
|
||||
.byte_offset => |byte_offset| {
|
||||
var u8_ptr_pl = field_ptr_ty.ptrInfo();
|
||||
var u8_ptr_pl: Type.Payload.Pointer = .{
|
||||
.data = field_ptr_ty.ptrInfo(mod),
|
||||
};
|
||||
u8_ptr_pl.data.pointee_type = Type.u8;
|
||||
const u8_ptr_ty = Type.initPayload(&u8_ptr_pl.base);
|
||||
|
||||
@ -5532,7 +5542,7 @@ fn airUnwrapErrUnionErr(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
try reap(f, inst, &.{ty_op.operand});
|
||||
|
||||
const operand_is_ptr = operand_ty.zigTypeTag(mod) == .Pointer;
|
||||
const error_union_ty = if (operand_is_ptr) operand_ty.childType() else operand_ty;
|
||||
const error_union_ty = if (operand_is_ptr) operand_ty.childType(mod) else operand_ty;
|
||||
const error_ty = error_union_ty.errorUnionSet();
|
||||
const payload_ty = error_union_ty.errorUnionPayload();
|
||||
const local = try f.allocLocal(inst, inst_ty);
|
||||
@ -5569,7 +5579,7 @@ fn airUnwrapErrUnionPay(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !CValu
|
||||
const operand = try f.resolveInst(ty_op.operand);
|
||||
try reap(f, inst, &.{ty_op.operand});
|
||||
const operand_ty = f.typeOf(ty_op.operand);
|
||||
const error_union_ty = if (is_ptr) operand_ty.childType() else operand_ty;
|
||||
const error_union_ty = if (is_ptr) operand_ty.childType(mod) else operand_ty;
|
||||
|
||||
const writer = f.object.writer();
|
||||
if (!error_union_ty.errorUnionPayload().hasRuntimeBits(mod)) {
|
||||
@ -5673,7 +5683,7 @@ fn airErrUnionPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
const writer = f.object.writer();
|
||||
const ty_op = f.air.instructions.items(.data)[inst].ty_op;
|
||||
const operand = try f.resolveInst(ty_op.operand);
|
||||
const error_union_ty = f.typeOf(ty_op.operand).childType();
|
||||
const error_union_ty = f.typeOf(ty_op.operand).childType(mod);
|
||||
|
||||
const error_ty = error_union_ty.errorUnionSet();
|
||||
const payload_ty = error_union_ty.errorUnionPayload();
|
||||
@ -5761,7 +5771,7 @@ fn airIsErr(f: *Function, inst: Air.Inst.Index, is_ptr: bool, operator: []const
|
||||
try reap(f, inst, &.{un_op});
|
||||
const operand_ty = f.typeOf(un_op);
|
||||
const local = try f.allocLocal(inst, Type.bool);
|
||||
const err_union_ty = if (is_ptr) operand_ty.childType() else operand_ty;
|
||||
const err_union_ty = if (is_ptr) operand_ty.childType(mod) else operand_ty;
|
||||
const payload_ty = err_union_ty.errorUnionPayload();
|
||||
const error_ty = err_union_ty.errorUnionSet();
|
||||
|
||||
@ -5795,7 +5805,7 @@ fn airArrayToSlice(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
const inst_ty = f.typeOfIndex(inst);
|
||||
const writer = f.object.writer();
|
||||
const local = try f.allocLocal(inst, inst_ty);
|
||||
const array_ty = f.typeOf(ty_op.operand).childType();
|
||||
const array_ty = f.typeOf(ty_op.operand).childType(mod);
|
||||
|
||||
try f.writeCValueMember(writer, local, .{ .identifier = "ptr" });
|
||||
try writer.writeAll(" = ");
|
||||
@ -5811,7 +5821,7 @@ fn airArrayToSlice(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
} else try f.writeCValue(writer, operand, .Initializer);
|
||||
try writer.writeAll("; ");
|
||||
|
||||
const array_len = array_ty.arrayLen();
|
||||
const array_len = array_ty.arrayLen(mod);
|
||||
var len_pl: Value.Payload.U64 = .{ .base = .{ .tag = .int_u64 }, .data = array_len };
|
||||
const len_val = Value.initPayload(&len_pl.base);
|
||||
try f.writeCValueMember(writer, local, .{ .identifier = "len" });
|
||||
@ -6050,7 +6060,7 @@ fn airCmpxchg(f: *Function, inst: Air.Inst.Index, flavor: [*:0]const u8) !CValue
|
||||
const expected_value = try f.resolveInst(extra.expected_value);
|
||||
const new_value = try f.resolveInst(extra.new_value);
|
||||
const ptr_ty = f.typeOf(extra.ptr);
|
||||
const ty = ptr_ty.childType();
|
||||
const ty = ptr_ty.childType(mod);
|
||||
|
||||
const writer = f.object.writer();
|
||||
const new_value_mat = try Materialize.start(f, inst, writer, ty, new_value);
|
||||
@ -6152,7 +6162,7 @@ fn airAtomicRmw(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
const extra = f.air.extraData(Air.AtomicRmw, pl_op.payload).data;
|
||||
const inst_ty = f.typeOfIndex(inst);
|
||||
const ptr_ty = f.typeOf(pl_op.operand);
|
||||
const ty = ptr_ty.childType();
|
||||
const ty = ptr_ty.childType(mod);
|
||||
const ptr = try f.resolveInst(pl_op.operand);
|
||||
const operand = try f.resolveInst(extra.operand);
|
||||
|
||||
@ -6207,7 +6217,7 @@ fn airAtomicLoad(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
const ptr = try f.resolveInst(atomic_load.ptr);
|
||||
try reap(f, inst, &.{atomic_load.ptr});
|
||||
const ptr_ty = f.typeOf(atomic_load.ptr);
|
||||
const ty = ptr_ty.childType();
|
||||
const ty = ptr_ty.childType(mod);
|
||||
|
||||
const repr_ty = if (ty.isRuntimeFloat())
|
||||
mod.intType(.unsigned, @intCast(u16, ty.abiSize(mod) * 8)) catch unreachable
|
||||
@ -6241,7 +6251,7 @@ fn airAtomicStore(f: *Function, inst: Air.Inst.Index, order: [*:0]const u8) !CVa
|
||||
const mod = f.object.dg.module;
|
||||
const bin_op = f.air.instructions.items(.data)[inst].bin_op;
|
||||
const ptr_ty = f.typeOf(bin_op.lhs);
|
||||
const ty = ptr_ty.childType();
|
||||
const ty = ptr_ty.childType(mod);
|
||||
const ptr = try f.resolveInst(bin_op.lhs);
|
||||
const element = try f.resolveInst(bin_op.rhs);
|
||||
|
||||
@ -6299,7 +6309,7 @@ fn airMemset(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue {
|
||||
}
|
||||
|
||||
try writer.writeAll("memset(");
|
||||
switch (dest_ty.ptrSize()) {
|
||||
switch (dest_ty.ptrSize(mod)) {
|
||||
.Slice => {
|
||||
try f.writeCValueMember(writer, dest_slice, .{ .identifier = "ptr" });
|
||||
try writer.writeAll(", 0xaa, ");
|
||||
@ -6311,8 +6321,8 @@ fn airMemset(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue {
|
||||
}
|
||||
},
|
||||
.One => {
|
||||
const array_ty = dest_ty.childType();
|
||||
const len = array_ty.arrayLen() * elem_abi_size;
|
||||
const array_ty = dest_ty.childType(mod);
|
||||
const len = array_ty.arrayLen(mod) * elem_abi_size;
|
||||
|
||||
try f.writeCValue(writer, dest_slice, .FunctionArgument);
|
||||
try writer.print(", 0xaa, {d});\n", .{len});
|
||||
@ -6327,11 +6337,10 @@ fn airMemset(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue {
|
||||
// For the assignment in this loop, the array pointer needs to get
|
||||
// casted to a regular pointer, otherwise an error like this occurs:
|
||||
// error: array type 'uint32_t[20]' (aka 'unsigned int[20]') is not assignable
|
||||
var elem_ptr_ty_pl: Type.Payload.ElemType = .{
|
||||
.base = .{ .tag = .c_mut_pointer },
|
||||
.data = elem_ty,
|
||||
};
|
||||
const elem_ptr_ty = Type.initPayload(&elem_ptr_ty_pl.base);
|
||||
const elem_ptr_ty = try mod.ptrType(.{
|
||||
.size = .C,
|
||||
.elem_type = elem_ty.ip_index,
|
||||
});
|
||||
|
||||
const index = try f.allocLocal(inst, Type.usize);
|
||||
|
||||
@ -6342,13 +6351,13 @@ fn airMemset(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue {
|
||||
try writer.writeAll("; ");
|
||||
try f.writeCValue(writer, index, .Other);
|
||||
try writer.writeAll(" != ");
|
||||
switch (dest_ty.ptrSize()) {
|
||||
switch (dest_ty.ptrSize(mod)) {
|
||||
.Slice => {
|
||||
try f.writeCValueMember(writer, dest_slice, .{ .identifier = "len" });
|
||||
},
|
||||
.One => {
|
||||
const array_ty = dest_ty.childType();
|
||||
try writer.print("{d}", .{array_ty.arrayLen()});
|
||||
const array_ty = dest_ty.childType(mod);
|
||||
try writer.print("{d}", .{array_ty.arrayLen(mod)});
|
||||
},
|
||||
.Many, .C => unreachable,
|
||||
}
|
||||
@ -6377,7 +6386,7 @@ fn airMemset(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue {
|
||||
const bitcasted = try bitcast(f, Type.u8, value, elem_ty);
|
||||
|
||||
try writer.writeAll("memset(");
|
||||
switch (dest_ty.ptrSize()) {
|
||||
switch (dest_ty.ptrSize(mod)) {
|
||||
.Slice => {
|
||||
try f.writeCValueMember(writer, dest_slice, .{ .identifier = "ptr" });
|
||||
try writer.writeAll(", ");
|
||||
@ -6387,8 +6396,8 @@ fn airMemset(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue {
|
||||
try writer.writeAll(");\n");
|
||||
},
|
||||
.One => {
|
||||
const array_ty = dest_ty.childType();
|
||||
const len = array_ty.arrayLen() * elem_abi_size;
|
||||
const array_ty = dest_ty.childType(mod);
|
||||
const len = array_ty.arrayLen(mod) * elem_abi_size;
|
||||
|
||||
try f.writeCValue(writer, dest_slice, .FunctionArgument);
|
||||
try writer.writeAll(", ");
|
||||
@ -6416,9 +6425,9 @@ fn airMemcpy(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
try writer.writeAll(", ");
|
||||
try writeSliceOrPtr(f, writer, src_ptr, src_ty);
|
||||
try writer.writeAll(", ");
|
||||
switch (dest_ty.ptrSize()) {
|
||||
switch (dest_ty.ptrSize(mod)) {
|
||||
.Slice => {
|
||||
const elem_ty = dest_ty.childType();
|
||||
const elem_ty = dest_ty.childType(mod);
|
||||
const elem_abi_size = elem_ty.abiSize(mod);
|
||||
try f.writeCValueMember(writer, dest_ptr, .{ .identifier = "len" });
|
||||
if (elem_abi_size > 1) {
|
||||
@ -6428,10 +6437,10 @@ fn airMemcpy(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
}
|
||||
},
|
||||
.One => {
|
||||
const array_ty = dest_ty.childType();
|
||||
const elem_ty = array_ty.childType();
|
||||
const array_ty = dest_ty.childType(mod);
|
||||
const elem_ty = array_ty.childType(mod);
|
||||
const elem_abi_size = elem_ty.abiSize(mod);
|
||||
const len = array_ty.arrayLen() * elem_abi_size;
|
||||
const len = array_ty.arrayLen(mod) * elem_abi_size;
|
||||
try writer.print("{d});\n", .{len});
|
||||
},
|
||||
.Many, .C => unreachable,
|
||||
@ -6448,7 +6457,7 @@ fn airSetUnionTag(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
const new_tag = try f.resolveInst(bin_op.rhs);
|
||||
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
|
||||
|
||||
const union_ty = f.typeOf(bin_op.lhs).childType();
|
||||
const union_ty = f.typeOf(bin_op.lhs).childType(mod);
|
||||
const layout = union_ty.unionGetLayout(mod);
|
||||
if (layout.tag_size == 0) return .none;
|
||||
const tag_ty = union_ty.unionTagTypeSafety().?;
|
||||
@ -6777,7 +6786,7 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
const mod = f.object.dg.module;
|
||||
const ty_pl = f.air.instructions.items(.data)[inst].ty_pl;
|
||||
const inst_ty = f.typeOfIndex(inst);
|
||||
const len = @intCast(usize, inst_ty.arrayLen());
|
||||
const len = @intCast(usize, inst_ty.arrayLen(mod));
|
||||
const elements = @ptrCast([]const Air.Inst.Ref, f.air.extra[ty_pl.payload..][0..len]);
|
||||
const gpa = f.object.dg.gpa;
|
||||
const resolved_elements = try gpa.alloc(CValue, elements.len);
|
||||
@ -6796,7 +6805,7 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
const local = try f.allocLocal(inst, inst_ty);
|
||||
switch (inst_ty.zigTypeTag(mod)) {
|
||||
.Array, .Vector => {
|
||||
const elem_ty = inst_ty.childType();
|
||||
const elem_ty = inst_ty.childType(mod);
|
||||
const a = try Assignment.init(f, elem_ty);
|
||||
for (resolved_elements, 0..) |element, i| {
|
||||
try a.restart(f, writer);
|
||||
@ -6806,7 +6815,7 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
try f.writeCValue(writer, element, .Other);
|
||||
try a.end(f, writer);
|
||||
}
|
||||
if (inst_ty.sentinel()) |sentinel| {
|
||||
if (inst_ty.sentinel(mod)) |sentinel| {
|
||||
try a.restart(f, writer);
|
||||
try f.writeCValue(writer, local, .Other);
|
||||
try writer.print("[{d}]", .{resolved_elements.len});
|
||||
@ -7708,7 +7717,7 @@ const Vectorize = struct {
|
||||
pub fn start(f: *Function, inst: Air.Inst.Index, writer: anytype, ty: Type) !Vectorize {
|
||||
const mod = f.object.dg.module;
|
||||
return if (ty.zigTypeTag(mod) == .Vector) index: {
|
||||
var len_pl = Value.Payload.U64{ .base = .{ .tag = .int_u64 }, .data = ty.vectorLen() };
|
||||
var len_pl = Value.Payload.U64{ .base = .{ .tag = .int_u64 }, .data = ty.vectorLen(mod) };
|
||||
|
||||
const local = try f.allocLocal(inst, Type.usize);
|
||||
|
||||
|
||||
@ -1423,7 +1423,7 @@ pub const CType = extern union {
|
||||
}),
|
||||
|
||||
.Pointer => {
|
||||
const info = ty.ptrInfo().data;
|
||||
const info = ty.ptrInfo(mod);
|
||||
switch (info.size) {
|
||||
.Slice => {
|
||||
if (switch (kind) {
|
||||
@ -1625,9 +1625,9 @@ pub const CType = extern union {
|
||||
.Vector => .vector,
|
||||
else => unreachable,
|
||||
};
|
||||
if (try lookup.typeToIndex(ty.childType(), kind)) |child_idx| {
|
||||
if (try lookup.typeToIndex(ty.childType(mod), kind)) |child_idx| {
|
||||
self.storage = .{ .seq = .{ .base = .{ .tag = t }, .data = .{
|
||||
.len = ty.arrayLenIncludingSentinel(),
|
||||
.len = ty.arrayLenIncludingSentinel(mod),
|
||||
.elem_type = child_idx,
|
||||
} } };
|
||||
self.value = .{ .cty = initPayload(&self.storage.seq) };
|
||||
@ -1639,8 +1639,7 @@ pub const CType = extern union {
|
||||
},
|
||||
|
||||
.Optional => {
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
const payload_ty = ty.optionalChild(&buf);
|
||||
const payload_ty = ty.optionalChild(mod);
|
||||
if (payload_ty.hasRuntimeBitsIgnoreComptime(mod)) {
|
||||
if (ty.optionalReprIsPayload(mod)) {
|
||||
try self.initType(payload_ty, kind, lookup);
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -625,20 +625,20 @@ pub const DeclGen = struct {
|
||||
.Array => switch (val.tag()) {
|
||||
.aggregate => {
|
||||
const elem_vals = val.castTag(.aggregate).?.data;
|
||||
const elem_ty = ty.elemType();
|
||||
const len = @intCast(u32, ty.arrayLenIncludingSentinel()); // TODO: limit spir-v to 32 bit arrays in a more elegant way.
|
||||
const elem_ty = ty.childType(mod);
|
||||
const len = @intCast(u32, ty.arrayLenIncludingSentinel(mod)); // TODO: limit spir-v to 32 bit arrays in a more elegant way.
|
||||
for (elem_vals[0..len]) |elem_val| {
|
||||
try self.lower(elem_ty, elem_val);
|
||||
}
|
||||
},
|
||||
.repeated => {
|
||||
const elem_val = val.castTag(.repeated).?.data;
|
||||
const elem_ty = ty.elemType();
|
||||
const len = @intCast(u32, ty.arrayLen());
|
||||
const elem_ty = ty.childType(mod);
|
||||
const len = @intCast(u32, ty.arrayLen(mod));
|
||||
for (0..len) |_| {
|
||||
try self.lower(elem_ty, elem_val);
|
||||
}
|
||||
if (ty.sentinel()) |sentinel| {
|
||||
if (ty.sentinel(mod)) |sentinel| {
|
||||
try self.lower(elem_ty, sentinel);
|
||||
}
|
||||
},
|
||||
@ -646,7 +646,7 @@ pub const DeclGen = struct {
|
||||
const str_lit = val.castTag(.str_lit).?.data;
|
||||
const bytes = dg.module.string_literal_bytes.items[str_lit.index..][0..str_lit.len];
|
||||
try self.addBytes(bytes);
|
||||
if (ty.sentinel()) |sentinel| {
|
||||
if (ty.sentinel(mod)) |sentinel| {
|
||||
try self.addByte(@intCast(u8, sentinel.toUnsignedInt(mod)));
|
||||
}
|
||||
},
|
||||
@ -706,8 +706,7 @@ pub const DeclGen = struct {
|
||||
}
|
||||
},
|
||||
.Optional => {
|
||||
var opt_buf: Type.Payload.ElemType = undefined;
|
||||
const payload_ty = ty.optionalChild(&opt_buf);
|
||||
const payload_ty = ty.optionalChild(mod);
|
||||
const has_payload = !val.isNull(mod);
|
||||
const abi_size = ty.abiSize(mod);
|
||||
|
||||
@ -1216,10 +1215,10 @@ pub const DeclGen = struct {
|
||||
return try self.spv.resolve(.{ .float_type = .{ .bits = bits } });
|
||||
},
|
||||
.Array => {
|
||||
const elem_ty = ty.childType();
|
||||
const elem_ty = ty.childType(mod);
|
||||
const elem_ty_ref = try self.resolveType(elem_ty, .direct);
|
||||
const total_len = std.math.cast(u32, ty.arrayLenIncludingSentinel()) orelse {
|
||||
return self.fail("array type of {} elements is too large", .{ty.arrayLenIncludingSentinel()});
|
||||
const total_len = std.math.cast(u32, ty.arrayLenIncludingSentinel(mod)) orelse {
|
||||
return self.fail("array type of {} elements is too large", .{ty.arrayLenIncludingSentinel(mod)});
|
||||
};
|
||||
return self.spv.arrayType(total_len, elem_ty_ref);
|
||||
},
|
||||
@ -1248,7 +1247,7 @@ pub const DeclGen = struct {
|
||||
},
|
||||
},
|
||||
.Pointer => {
|
||||
const ptr_info = ty.ptrInfo().data;
|
||||
const ptr_info = ty.ptrInfo(mod);
|
||||
|
||||
const storage_class = spvStorageClass(ptr_info.@"addrspace");
|
||||
const child_ty_ref = try self.resolveType(ptr_info.pointee_type, .indirect);
|
||||
@ -1280,8 +1279,8 @@ pub const DeclGen = struct {
|
||||
// TODO: Properly verify sizes and child type.
|
||||
|
||||
return try self.spv.resolve(.{ .vector_type = .{
|
||||
.component_type = try self.resolveType(ty.elemType(), repr),
|
||||
.component_count = @intCast(u32, ty.vectorLen()),
|
||||
.component_type = try self.resolveType(ty.childType(mod), repr),
|
||||
.component_count = @intCast(u32, ty.vectorLen(mod)),
|
||||
} });
|
||||
},
|
||||
.Struct => {
|
||||
@ -1335,8 +1334,7 @@ pub const DeclGen = struct {
|
||||
} });
|
||||
},
|
||||
.Optional => {
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
const payload_ty = ty.optionalChild(&buf);
|
||||
const payload_ty = ty.optionalChild(mod);
|
||||
if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod)) {
|
||||
// Just use a bool.
|
||||
// Note: Always generate the bool with indirect format, to save on some sanity
|
||||
@ -1685,7 +1683,8 @@ pub const DeclGen = struct {
|
||||
}
|
||||
|
||||
fn load(self: *DeclGen, ptr_ty: Type, ptr_id: IdRef) !IdRef {
|
||||
const value_ty = ptr_ty.childType();
|
||||
const mod = self.module;
|
||||
const value_ty = ptr_ty.childType(mod);
|
||||
const indirect_value_ty_ref = try self.resolveType(value_ty, .indirect);
|
||||
const result_id = self.spv.allocId();
|
||||
const access = spec.MemoryAccess.Extended{
|
||||
@ -1701,7 +1700,8 @@ pub const DeclGen = struct {
|
||||
}
|
||||
|
||||
fn store(self: *DeclGen, ptr_ty: Type, ptr_id: IdRef, value_id: IdRef) !void {
|
||||
const value_ty = ptr_ty.childType();
|
||||
const mod = self.module;
|
||||
const value_ty = ptr_ty.childType(mod);
|
||||
const indirect_value_id = try self.convertToIndirect(value_ty, value_id);
|
||||
const access = spec.MemoryAccess.Extended{
|
||||
.Volatile = ptr_ty.isVolatilePtr(),
|
||||
@ -2072,7 +2072,7 @@ pub const DeclGen = struct {
|
||||
const b = try self.resolve(extra.b);
|
||||
const mask = self.air.values[extra.mask];
|
||||
const mask_len = extra.mask_len;
|
||||
const a_len = self.typeOf(extra.a).vectorLen();
|
||||
const a_len = self.typeOf(extra.a).vectorLen(mod);
|
||||
|
||||
const result_id = self.spv.allocId();
|
||||
const result_type_id = try self.resolveTypeId(ty);
|
||||
@ -2138,9 +2138,10 @@ pub const DeclGen = struct {
|
||||
}
|
||||
|
||||
fn ptrAdd(self: *DeclGen, result_ty: Type, ptr_ty: Type, ptr_id: IdRef, offset_id: IdRef) !IdRef {
|
||||
const mod = self.module;
|
||||
const result_ty_ref = try self.resolveType(result_ty, .direct);
|
||||
|
||||
switch (ptr_ty.ptrSize()) {
|
||||
switch (ptr_ty.ptrSize(mod)) {
|
||||
.One => {
|
||||
// Pointer to array
|
||||
// TODO: Is this correct?
|
||||
@ -2498,7 +2499,7 @@ pub const DeclGen = struct {
|
||||
// Construct new pointer type for the resulting pointer
|
||||
const elem_ty = ptr_ty.elemType2(mod); // use elemType() so that we get T for *[N]T.
|
||||
const elem_ty_ref = try self.resolveType(elem_ty, .direct);
|
||||
const elem_ptr_ty_ref = try self.spv.ptrType(elem_ty_ref, spvStorageClass(ptr_ty.ptrAddressSpace()));
|
||||
const elem_ptr_ty_ref = try self.spv.ptrType(elem_ty_ref, spvStorageClass(ptr_ty.ptrAddressSpace(mod)));
|
||||
if (ptr_ty.isSinglePointer(mod)) {
|
||||
// Pointer-to-array. In this case, the resulting pointer is not of the same type
|
||||
// as the ptr_ty (we want a *T, not a *[N]T), and hence we need to use accessChain.
|
||||
@ -2516,7 +2517,7 @@ pub const DeclGen = struct {
|
||||
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
|
||||
const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data;
|
||||
const ptr_ty = self.typeOf(bin_op.lhs);
|
||||
const elem_ty = ptr_ty.childType();
|
||||
const elem_ty = ptr_ty.childType(mod);
|
||||
// TODO: Make this return a null ptr or something
|
||||
if (!elem_ty.hasRuntimeBitsIgnoreComptime(mod)) return null;
|
||||
|
||||
@ -2526,6 +2527,7 @@ pub const DeclGen = struct {
|
||||
}
|
||||
|
||||
fn airPtrElemVal(self: *DeclGen, inst: Air.Inst.Index) !?IdRef {
|
||||
const mod = self.module;
|
||||
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
|
||||
const ptr_ty = self.typeOf(bin_op.lhs);
|
||||
const ptr_id = try self.resolve(bin_op.lhs);
|
||||
@ -2536,9 +2538,9 @@ pub const DeclGen = struct {
|
||||
// If we have a pointer-to-array, construct an element pointer to use with load()
|
||||
// If we pass ptr_ty directly, it will attempt to load the entire array rather than
|
||||
// just an element.
|
||||
var elem_ptr_info = ptr_ty.ptrInfo();
|
||||
elem_ptr_info.data.size = .One;
|
||||
const elem_ptr_ty = Type.initPayload(&elem_ptr_info.base);
|
||||
var elem_ptr_info = ptr_ty.ptrInfo(mod);
|
||||
elem_ptr_info.size = .One;
|
||||
const elem_ptr_ty = try Type.ptr(undefined, mod, elem_ptr_info);
|
||||
|
||||
return try self.load(elem_ptr_ty, elem_ptr_id);
|
||||
}
|
||||
@ -2586,7 +2588,7 @@ pub const DeclGen = struct {
|
||||
field_index: u32,
|
||||
) !?IdRef {
|
||||
const mod = self.module;
|
||||
const object_ty = object_ptr_ty.childType();
|
||||
const object_ty = object_ptr_ty.childType(mod);
|
||||
switch (object_ty.zigTypeTag(mod)) {
|
||||
.Struct => switch (object_ty.containerLayout()) {
|
||||
.Packed => unreachable, // TODO
|
||||
@ -2662,9 +2664,10 @@ pub const DeclGen = struct {
|
||||
|
||||
fn airAlloc(self: *DeclGen, inst: Air.Inst.Index) !?IdRef {
|
||||
if (self.liveness.isUnused(inst)) return null;
|
||||
const mod = self.module;
|
||||
const ptr_ty = self.typeOfIndex(inst);
|
||||
assert(ptr_ty.ptrAddressSpace() == .generic);
|
||||
const child_ty = ptr_ty.childType();
|
||||
assert(ptr_ty.ptrAddressSpace(mod) == .generic);
|
||||
const child_ty = ptr_ty.childType(mod);
|
||||
const child_ty_ref = try self.resolveType(child_ty, .indirect);
|
||||
return try self.alloc(child_ty_ref, null);
|
||||
}
|
||||
@ -2834,7 +2837,7 @@ pub const DeclGen = struct {
|
||||
const mod = self.module;
|
||||
const un_op = self.air.instructions.items(.data)[inst].un_op;
|
||||
const ptr_ty = self.typeOf(un_op);
|
||||
const ret_ty = ptr_ty.childType();
|
||||
const ret_ty = ptr_ty.childType(mod);
|
||||
|
||||
if (!ret_ty.hasRuntimeBitsIgnoreComptime(mod)) {
|
||||
try self.func.body.emit(self.spv.gpa, .OpReturn, {});
|
||||
@ -2971,8 +2974,7 @@ pub const DeclGen = struct {
|
||||
const operand_id = try self.resolve(un_op);
|
||||
const optional_ty = self.typeOf(un_op);
|
||||
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
const payload_ty = optional_ty.optionalChild(&buf);
|
||||
const payload_ty = optional_ty.optionalChild(mod);
|
||||
|
||||
const bool_ty_ref = try self.resolveType(Type.bool, .direct);
|
||||
|
||||
|
||||
@ -11,7 +11,8 @@ const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const assert = std.debug.assert;
|
||||
|
||||
const ZigDecl = @import("../../Module.zig").Decl;
|
||||
const ZigModule = @import("../../Module.zig");
|
||||
const ZigDecl = ZigModule.Decl;
|
||||
|
||||
const spec = @import("spec.zig");
|
||||
const Word = spec.Word;
|
||||
|
||||
@ -219,8 +219,7 @@ pub const DeclState = struct {
|
||||
try dbg_info_buffer.writer().print("{}\x00", .{ty.fmt(mod)});
|
||||
} else {
|
||||
// Non-pointer optionals are structs: struct { .maybe = *, .val = * }
|
||||
var buf = try arena.create(Type.Payload.ElemType);
|
||||
const payload_ty = ty.optionalChild(buf);
|
||||
const payload_ty = ty.optionalChild(mod);
|
||||
// DW.AT.structure_type
|
||||
try dbg_info_buffer.append(@enumToInt(AbbrevKind.struct_type));
|
||||
// DW.AT.byte_size, DW.FORM.udata
|
||||
@ -304,7 +303,7 @@ pub const DeclState = struct {
|
||||
// DW.AT.type, DW.FORM.ref4
|
||||
const index = dbg_info_buffer.items.len;
|
||||
try dbg_info_buffer.resize(index + 4);
|
||||
try self.addTypeRelocGlobal(atom_index, ty.childType(), @intCast(u32, index));
|
||||
try self.addTypeRelocGlobal(atom_index, ty.childType(mod), @intCast(u32, index));
|
||||
}
|
||||
},
|
||||
.Array => {
|
||||
@ -315,7 +314,7 @@ pub const DeclState = struct {
|
||||
// DW.AT.type, DW.FORM.ref4
|
||||
var index = dbg_info_buffer.items.len;
|
||||
try dbg_info_buffer.resize(index + 4);
|
||||
try self.addTypeRelocGlobal(atom_index, ty.childType(), @intCast(u32, index));
|
||||
try self.addTypeRelocGlobal(atom_index, ty.childType(mod), @intCast(u32, index));
|
||||
// DW.AT.subrange_type
|
||||
try dbg_info_buffer.append(@enumToInt(AbbrevKind.array_dim));
|
||||
// DW.AT.type, DW.FORM.ref4
|
||||
@ -323,7 +322,7 @@ pub const DeclState = struct {
|
||||
try dbg_info_buffer.resize(index + 4);
|
||||
try self.addTypeRelocGlobal(atom_index, Type.usize, @intCast(u32, index));
|
||||
// DW.AT.count, DW.FORM.udata
|
||||
const len = ty.arrayLenIncludingSentinel();
|
||||
const len = ty.arrayLenIncludingSentinel(mod);
|
||||
try leb128.writeULEB128(dbg_info_buffer.writer(), len);
|
||||
// DW.AT.array_type delimit children
|
||||
try dbg_info_buffer.append(0);
|
||||
@ -688,7 +687,7 @@ pub const DeclState = struct {
|
||||
const mod = self.mod;
|
||||
const target = mod.getTarget();
|
||||
const endian = target.cpu.arch.endian();
|
||||
const child_ty = if (is_ptr) ty.childType() else ty;
|
||||
const child_ty = if (is_ptr) ty.childType(mod) else ty;
|
||||
|
||||
switch (loc) {
|
||||
.register => |reg| {
|
||||
|
||||
@ -2931,7 +2931,7 @@ pub fn getErrorTableSymbol(wasm: *Wasm) !u32 {
|
||||
|
||||
const atom_index = try wasm.createAtom();
|
||||
const atom = wasm.getAtomPtr(atom_index);
|
||||
const slice_ty = Type.initTag(.const_slice_u8_sentinel_0);
|
||||
const slice_ty = Type.const_slice_u8_sentinel_0;
|
||||
const mod = wasm.base.options.module.?;
|
||||
atom.alignment = slice_ty.abiAlignment(mod);
|
||||
const sym_index = atom.sym_index;
|
||||
@ -2988,7 +2988,7 @@ fn populateErrorNameTable(wasm: *Wasm) !void {
|
||||
for (mod.error_name_list.items) |error_name| {
|
||||
const len = @intCast(u32, error_name.len + 1); // names are 0-termianted
|
||||
|
||||
const slice_ty = Type.initTag(.const_slice_u8_sentinel_0);
|
||||
const slice_ty = Type.const_slice_u8_sentinel_0;
|
||||
const offset = @intCast(u32, atom.code.items.len);
|
||||
// first we create the data for the slice of the name
|
||||
try atom.code.appendNTimes(wasm.base.allocator, 0, 4); // ptr to name, will be relocated
|
||||
|
||||
@ -433,9 +433,10 @@ const Writer = struct {
|
||||
}
|
||||
|
||||
fn writeAggregateInit(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void {
|
||||
const mod = w.module;
|
||||
const ty_pl = w.air.instructions.items(.data)[inst].ty_pl;
|
||||
const vector_ty = w.air.getRefType(ty_pl.ty);
|
||||
const len = @intCast(usize, vector_ty.arrayLen());
|
||||
const len = @intCast(usize, vector_ty.arrayLen(mod));
|
||||
const elements = @ptrCast([]const Air.Inst.Ref, w.air.extra[ty_pl.payload..][0..len]);
|
||||
|
||||
try w.writeType(s, vector_ty);
|
||||
@ -512,10 +513,11 @@ const Writer = struct {
|
||||
}
|
||||
|
||||
fn writeSelect(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void {
|
||||
const mod = w.module;
|
||||
const pl_op = w.air.instructions.items(.data)[inst].pl_op;
|
||||
const extra = w.air.extraData(Air.Bin, pl_op.payload).data;
|
||||
|
||||
const elem_ty = w.typeOfIndex(inst).childType();
|
||||
const elem_ty = w.typeOfIndex(inst).childType(mod);
|
||||
try w.writeType(s, elem_ty);
|
||||
try s.writeAll(", ");
|
||||
try w.writeOperand(s, inst, 0, pl_op.operand);
|
||||
|
||||
1770
src/type.zig
1770
src/type.zig
File diff suppressed because it is too large
Load Diff
208
src/value.zig
208
src/value.zig
@ -33,14 +33,6 @@ pub const Value = struct {
|
||||
// Keep in sync with tools/stage2_pretty_printers_common.py
|
||||
pub const Tag = enum(usize) {
|
||||
// The first section of this enum are tags that require no payload.
|
||||
manyptr_u8_type,
|
||||
manyptr_const_u8_type,
|
||||
manyptr_const_u8_sentinel_0_type,
|
||||
single_const_pointer_to_comptime_int_type,
|
||||
const_slice_u8_type,
|
||||
const_slice_u8_sentinel_0_type,
|
||||
anyerror_void_error_union_type,
|
||||
|
||||
undef,
|
||||
zero,
|
||||
one,
|
||||
@ -140,11 +132,6 @@ pub const Value = struct {
|
||||
|
||||
pub fn Type(comptime t: Tag) type {
|
||||
return switch (t) {
|
||||
.single_const_pointer_to_comptime_int_type,
|
||||
.const_slice_u8_type,
|
||||
.const_slice_u8_sentinel_0_type,
|
||||
.anyerror_void_error_union_type,
|
||||
|
||||
.undef,
|
||||
.zero,
|
||||
.one,
|
||||
@ -153,9 +140,6 @@ pub const Value = struct {
|
||||
.empty_struct_value,
|
||||
.empty_array,
|
||||
.null_value,
|
||||
.manyptr_u8_type,
|
||||
.manyptr_const_u8_type,
|
||||
.manyptr_const_u8_sentinel_0_type,
|
||||
=> @compileError("Value Tag " ++ @tagName(t) ++ " has no payload"),
|
||||
|
||||
.int_big_positive,
|
||||
@ -280,9 +264,7 @@ pub const Value = struct {
|
||||
}
|
||||
|
||||
pub fn castTag(self: Value, comptime t: Tag) ?*t.Type() {
|
||||
if (self.ip_index != .none) {
|
||||
return null;
|
||||
}
|
||||
assert(self.ip_index == .none);
|
||||
|
||||
if (@enumToInt(self.legacy.tag_if_small_enough) < Tag.no_payload_count)
|
||||
return null;
|
||||
@ -305,11 +287,6 @@ pub const Value = struct {
|
||||
.legacy = .{ .tag_if_small_enough = self.legacy.tag_if_small_enough },
|
||||
};
|
||||
} else switch (self.legacy.ptr_otherwise.tag) {
|
||||
.single_const_pointer_to_comptime_int_type,
|
||||
.const_slice_u8_type,
|
||||
.const_slice_u8_sentinel_0_type,
|
||||
.anyerror_void_error_union_type,
|
||||
|
||||
.undef,
|
||||
.zero,
|
||||
.one,
|
||||
@ -318,9 +295,6 @@ pub const Value = struct {
|
||||
.empty_array,
|
||||
.null_value,
|
||||
.empty_struct_value,
|
||||
.manyptr_u8_type,
|
||||
.manyptr_const_u8_type,
|
||||
.manyptr_const_u8_sentinel_0_type,
|
||||
=> unreachable,
|
||||
|
||||
.ty, .lazy_align, .lazy_size => {
|
||||
@ -553,14 +527,6 @@ pub const Value = struct {
|
||||
}
|
||||
var val = start_val;
|
||||
while (true) switch (val.tag()) {
|
||||
.single_const_pointer_to_comptime_int_type => return out_stream.writeAll("*const comptime_int"),
|
||||
.const_slice_u8_type => return out_stream.writeAll("[]const u8"),
|
||||
.const_slice_u8_sentinel_0_type => return out_stream.writeAll("[:0]const u8"),
|
||||
.anyerror_void_error_union_type => return out_stream.writeAll("anyerror!void"),
|
||||
.manyptr_u8_type => return out_stream.writeAll("[*]u8"),
|
||||
.manyptr_const_u8_type => return out_stream.writeAll("[*]const u8"),
|
||||
.manyptr_const_u8_sentinel_0_type => return out_stream.writeAll("[*:0]const u8"),
|
||||
|
||||
.empty_struct_value => return out_stream.writeAll("struct {}{}"),
|
||||
.aggregate => {
|
||||
return out_stream.writeAll("(aggregate)");
|
||||
@ -674,7 +640,7 @@ pub const Value = struct {
|
||||
switch (val.tag()) {
|
||||
.bytes => {
|
||||
const bytes = val.castTag(.bytes).?.data;
|
||||
const adjusted_len = bytes.len - @boolToInt(ty.sentinel() != null);
|
||||
const adjusted_len = bytes.len - @boolToInt(ty.sentinel(mod) != null);
|
||||
const adjusted_bytes = bytes[0..adjusted_len];
|
||||
return allocator.dupe(u8, adjusted_bytes);
|
||||
},
|
||||
@ -686,7 +652,7 @@ pub const Value = struct {
|
||||
.enum_literal => return allocator.dupe(u8, val.castTag(.enum_literal).?.data),
|
||||
.repeated => {
|
||||
const byte = @intCast(u8, val.castTag(.repeated).?.data.toUnsignedInt(mod));
|
||||
const result = try allocator.alloc(u8, @intCast(usize, ty.arrayLen()));
|
||||
const result = try allocator.alloc(u8, @intCast(usize, ty.arrayLen(mod)));
|
||||
@memset(result, byte);
|
||||
return result;
|
||||
},
|
||||
@ -701,7 +667,7 @@ pub const Value = struct {
|
||||
const slice = val.castTag(.slice).?.data;
|
||||
return arrayToAllocatedBytes(slice.ptr, slice.len.toUnsignedInt(mod), allocator, mod);
|
||||
},
|
||||
else => return arrayToAllocatedBytes(val, ty.arrayLen(), allocator, mod),
|
||||
else => return arrayToAllocatedBytes(val, ty.arrayLen(mod), allocator, mod),
|
||||
}
|
||||
}
|
||||
|
||||
@ -720,13 +686,6 @@ pub const Value = struct {
|
||||
if (self.ip_index != .none) return self.ip_index.toType();
|
||||
return switch (self.tag()) {
|
||||
.ty => self.castTag(.ty).?.data,
|
||||
.single_const_pointer_to_comptime_int_type => Type.initTag(.single_const_pointer_to_comptime_int),
|
||||
.const_slice_u8_type => Type.initTag(.const_slice_u8),
|
||||
.const_slice_u8_sentinel_0_type => Type.initTag(.const_slice_u8_sentinel_0),
|
||||
.anyerror_void_error_union_type => Type.initTag(.anyerror_void_error_union),
|
||||
.manyptr_u8_type => Type.initTag(.manyptr_u8),
|
||||
.manyptr_const_u8_type => Type.initTag(.manyptr_const_u8),
|
||||
.manyptr_const_u8_sentinel_0_type => Type.initTag(.manyptr_const_u8_sentinel_0),
|
||||
|
||||
else => unreachable,
|
||||
};
|
||||
@ -1096,8 +1055,8 @@ pub const Value = struct {
|
||||
else => unreachable,
|
||||
},
|
||||
.Array => {
|
||||
const len = ty.arrayLen();
|
||||
const elem_ty = ty.childType();
|
||||
const len = ty.arrayLen(mod);
|
||||
const elem_ty = ty.childType(mod);
|
||||
const elem_size = @intCast(usize, elem_ty.abiSize(mod));
|
||||
var elem_i: usize = 0;
|
||||
var elem_value_buf: ElemValueBuffer = undefined;
|
||||
@ -1150,8 +1109,7 @@ pub const Value = struct {
|
||||
},
|
||||
.Optional => {
|
||||
if (!ty.isPtrLikeOptional(mod)) return error.IllDefinedMemoryLayout;
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
const child = ty.optionalChild(&buf);
|
||||
const child = ty.optionalChild(mod);
|
||||
const opt_val = val.optionalValue(mod);
|
||||
if (opt_val) |some| {
|
||||
return some.writeToMemory(child, mod, buffer);
|
||||
@ -1220,9 +1178,9 @@ pub const Value = struct {
|
||||
else => unreachable,
|
||||
},
|
||||
.Vector => {
|
||||
const elem_ty = ty.childType();
|
||||
const elem_ty = ty.childType(mod);
|
||||
const elem_bit_size = @intCast(u16, elem_ty.bitSize(mod));
|
||||
const len = @intCast(usize, ty.arrayLen());
|
||||
const len = @intCast(usize, ty.arrayLen(mod));
|
||||
|
||||
var bits: u16 = 0;
|
||||
var elem_i: usize = 0;
|
||||
@ -1267,8 +1225,7 @@ pub const Value = struct {
|
||||
},
|
||||
.Optional => {
|
||||
assert(ty.isPtrLikeOptional(mod));
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
const child = ty.optionalChild(&buf);
|
||||
const child = ty.optionalChild(mod);
|
||||
const opt_val = val.optionalValue(mod);
|
||||
if (opt_val) |some| {
|
||||
return some.writeToPackedMemory(child, mod, buffer, bit_offset);
|
||||
@ -1335,9 +1292,9 @@ pub const Value = struct {
|
||||
else => unreachable,
|
||||
},
|
||||
.Array => {
|
||||
const elem_ty = ty.childType();
|
||||
const elem_ty = ty.childType(mod);
|
||||
const elem_size = elem_ty.abiSize(mod);
|
||||
const elems = try arena.alloc(Value, @intCast(usize, ty.arrayLen()));
|
||||
const elems = try arena.alloc(Value, @intCast(usize, ty.arrayLen(mod)));
|
||||
var offset: usize = 0;
|
||||
for (elems) |*elem| {
|
||||
elem.* = try readFromMemory(elem_ty, mod, buffer[offset..], arena);
|
||||
@ -1386,8 +1343,7 @@ pub const Value = struct {
|
||||
},
|
||||
.Optional => {
|
||||
assert(ty.isPtrLikeOptional(mod));
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
const child = ty.optionalChild(&buf);
|
||||
const child = ty.optionalChild(mod);
|
||||
return readFromMemory(child, mod, buffer, arena);
|
||||
},
|
||||
else => @panic("TODO implement readFromMemory for more types"),
|
||||
@ -1449,8 +1405,8 @@ pub const Value = struct {
|
||||
else => unreachable,
|
||||
},
|
||||
.Vector => {
|
||||
const elem_ty = ty.childType();
|
||||
const elems = try arena.alloc(Value, @intCast(usize, ty.arrayLen()));
|
||||
const elem_ty = ty.childType(mod);
|
||||
const elems = try arena.alloc(Value, @intCast(usize, ty.arrayLen(mod)));
|
||||
|
||||
var bits: u16 = 0;
|
||||
const elem_bit_size = @intCast(u16, elem_ty.bitSize(mod));
|
||||
@ -1483,8 +1439,7 @@ pub const Value = struct {
|
||||
},
|
||||
.Optional => {
|
||||
assert(ty.isPtrLikeOptional(mod));
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
const child = ty.optionalChild(&buf);
|
||||
const child = ty.optionalChild(mod);
|
||||
return readFromPackedMemory(child, mod, buffer, bit_offset, arena);
|
||||
},
|
||||
else => @panic("TODO implement readFromPackedMemory for more types"),
|
||||
@ -1956,7 +1911,7 @@ pub const Value = struct {
|
||||
pub fn compareAll(lhs: Value, op: std.math.CompareOperator, rhs: Value, ty: Type, mod: *Module) bool {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
var i: usize = 0;
|
||||
while (i < ty.vectorLen()) : (i += 1) {
|
||||
while (i < ty.vectorLen(mod)) : (i += 1) {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
const lhs_elem = lhs.elemValueBuffer(mod, i, &lhs_buf);
|
||||
@ -2092,8 +2047,7 @@ pub const Value = struct {
|
||||
.opt_payload => {
|
||||
const a_payload = a.castTag(.opt_payload).?.data;
|
||||
const b_payload = b.castTag(.opt_payload).?.data;
|
||||
var buffer: Type.Payload.ElemType = undefined;
|
||||
const payload_ty = ty.optionalChild(&buffer);
|
||||
const payload_ty = ty.optionalChild(mod);
|
||||
return eqlAdvanced(a_payload, payload_ty, b_payload, payload_ty, mod, opt_sema);
|
||||
},
|
||||
.slice => {
|
||||
@ -2175,7 +2129,7 @@ pub const Value = struct {
|
||||
return true;
|
||||
}
|
||||
|
||||
const elem_ty = ty.childType();
|
||||
const elem_ty = ty.childType(mod);
|
||||
for (a_field_vals, 0..) |a_elem, i| {
|
||||
const b_elem = b_field_vals[i];
|
||||
|
||||
@ -2239,8 +2193,8 @@ pub const Value = struct {
|
||||
return eqlAdvanced(a_val, int_ty, b_val, int_ty, mod, opt_sema);
|
||||
},
|
||||
.Array, .Vector => {
|
||||
const len = ty.arrayLen();
|
||||
const elem_ty = ty.childType();
|
||||
const len = ty.arrayLen(mod);
|
||||
const elem_ty = ty.childType(mod);
|
||||
var i: usize = 0;
|
||||
var a_buf: ElemValueBuffer = undefined;
|
||||
var b_buf: ElemValueBuffer = undefined;
|
||||
@ -2253,11 +2207,11 @@ pub const Value = struct {
|
||||
}
|
||||
return true;
|
||||
},
|
||||
.Pointer => switch (ty.ptrSize()) {
|
||||
.Pointer => switch (ty.ptrSize(mod)) {
|
||||
.Slice => {
|
||||
const a_len = switch (a_ty.ptrSize()) {
|
||||
const a_len = switch (a_ty.ptrSize(mod)) {
|
||||
.Slice => a.sliceLen(mod),
|
||||
.One => a_ty.childType().arrayLen(),
|
||||
.One => a_ty.childType(mod).arrayLen(mod),
|
||||
else => unreachable,
|
||||
};
|
||||
if (a_len != b.sliceLen(mod)) {
|
||||
@ -2266,7 +2220,7 @@ pub const Value = struct {
|
||||
|
||||
var ptr_buf: Type.SlicePtrFieldTypeBuffer = undefined;
|
||||
const ptr_ty = ty.slicePtrFieldType(&ptr_buf);
|
||||
const a_ptr = switch (a_ty.ptrSize()) {
|
||||
const a_ptr = switch (a_ty.ptrSize(mod)) {
|
||||
.Slice => a.slicePtr(),
|
||||
.One => a,
|
||||
else => unreachable,
|
||||
@ -2412,8 +2366,8 @@ pub const Value = struct {
|
||||
else => return hashPtr(val, hasher, mod),
|
||||
},
|
||||
.Array, .Vector => {
|
||||
const len = ty.arrayLen();
|
||||
const elem_ty = ty.childType();
|
||||
const len = ty.arrayLen(mod);
|
||||
const elem_ty = ty.childType(mod);
|
||||
var index: usize = 0;
|
||||
var elem_value_buf: ElemValueBuffer = undefined;
|
||||
while (index < len) : (index += 1) {
|
||||
@ -2438,8 +2392,7 @@ pub const Value = struct {
|
||||
if (val.castTag(.opt_payload)) |payload| {
|
||||
std.hash.autoHash(hasher, true); // non-null
|
||||
const sub_val = payload.data;
|
||||
var buffer: Type.Payload.ElemType = undefined;
|
||||
const sub_ty = ty.optionalChild(&buffer);
|
||||
const sub_ty = ty.optionalChild(mod);
|
||||
sub_val.hash(sub_ty, hasher, mod);
|
||||
} else {
|
||||
std.hash.autoHash(hasher, false); // null
|
||||
@ -2534,8 +2487,8 @@ pub const Value = struct {
|
||||
else => val.hashPtr(hasher, mod),
|
||||
},
|
||||
.Array, .Vector => {
|
||||
const len = ty.arrayLen();
|
||||
const elem_ty = ty.childType();
|
||||
const len = ty.arrayLen(mod);
|
||||
const elem_ty = ty.childType(mod);
|
||||
var index: usize = 0;
|
||||
var elem_value_buf: ElemValueBuffer = undefined;
|
||||
while (index < len) : (index += 1) {
|
||||
@ -2544,8 +2497,7 @@ pub const Value = struct {
|
||||
}
|
||||
},
|
||||
.Optional => if (val.castTag(.opt_payload)) |payload| {
|
||||
var buf: Type.Payload.ElemType = undefined;
|
||||
const child_ty = ty.optionalChild(&buf);
|
||||
const child_ty = ty.optionalChild(mod);
|
||||
payload.data.hashUncoerced(child_ty, hasher, mod);
|
||||
} else std.hash.autoHash(hasher, std.builtin.TypeId.Null),
|
||||
.ErrorSet, .ErrorUnion => if (val.getError()) |err| hasher.update(err) else {
|
||||
@ -2720,7 +2672,7 @@ pub const Value = struct {
|
||||
const decl_index = val.castTag(.decl_ref).?.data;
|
||||
const decl = mod.declPtr(decl_index);
|
||||
if (decl.ty.zigTypeTag(mod) == .Array) {
|
||||
return decl.ty.arrayLen();
|
||||
return decl.ty.arrayLen(mod);
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
@ -2729,7 +2681,7 @@ pub const Value = struct {
|
||||
const decl_index = val.castTag(.decl_ref_mut).?.data.decl_index;
|
||||
const decl = mod.declPtr(decl_index);
|
||||
if (decl.ty.zigTypeTag(mod) == .Array) {
|
||||
return decl.ty.arrayLen();
|
||||
return decl.ty.arrayLen(mod);
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
@ -2737,7 +2689,7 @@ pub const Value = struct {
|
||||
.comptime_field_ptr => {
|
||||
const payload = val.castTag(.comptime_field_ptr).?.data;
|
||||
if (payload.field_ty.zigTypeTag(mod) == .Array) {
|
||||
return payload.field_ty.arrayLen();
|
||||
return payload.field_ty.arrayLen(mod);
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
@ -3137,7 +3089,7 @@ pub const Value = struct {
|
||||
|
||||
pub fn intToFloatAdvanced(val: Value, arena: Allocator, int_ty: Type, float_ty: Type, mod: *Module, opt_sema: ?*Sema) !Value {
|
||||
if (int_ty.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, int_ty.vectorLen());
|
||||
const result_data = try arena.alloc(Value, int_ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var buf: Value.ElemValueBuffer = undefined;
|
||||
const elem_val = val.elemValueBuffer(mod, i, &buf);
|
||||
@ -3250,7 +3202,7 @@ pub const Value = struct {
|
||||
mod: *Module,
|
||||
) !Value {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, ty.vectorLen());
|
||||
const result_data = try arena.alloc(Value, ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -3298,7 +3250,7 @@ pub const Value = struct {
|
||||
mod: *Module,
|
||||
) !Value {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, ty.vectorLen());
|
||||
const result_data = try arena.alloc(Value, ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -3345,8 +3297,8 @@ pub const Value = struct {
|
||||
mod: *Module,
|
||||
) !OverflowArithmeticResult {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
const overflowed_data = try arena.alloc(Value, ty.vectorLen());
|
||||
const result_data = try arena.alloc(Value, ty.vectorLen());
|
||||
const overflowed_data = try arena.alloc(Value, ty.vectorLen(mod));
|
||||
const result_data = try arena.alloc(Value, ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -3408,7 +3360,7 @@ pub const Value = struct {
|
||||
mod: *Module,
|
||||
) !Value {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, ty.vectorLen());
|
||||
const result_data = try arena.alloc(Value, ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -3452,7 +3404,7 @@ pub const Value = struct {
|
||||
mod: *Module,
|
||||
) !Value {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, ty.vectorLen());
|
||||
const result_data = try arena.alloc(Value, ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -3527,7 +3479,7 @@ pub const Value = struct {
|
||||
/// operands must be (vectors of) integers; handles undefined scalars.
|
||||
pub fn bitwiseNot(val: Value, ty: Type, arena: Allocator, mod: *Module) !Value {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, ty.vectorLen());
|
||||
const result_data = try arena.alloc(Value, ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var buf: Value.ElemValueBuffer = undefined;
|
||||
const elem_val = val.elemValueBuffer(mod, i, &buf);
|
||||
@ -3565,7 +3517,7 @@ pub const Value = struct {
|
||||
/// operands must be (vectors of) integers; handles undefined scalars.
|
||||
pub fn bitwiseAnd(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: *Module) !Value {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen());
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -3601,7 +3553,7 @@ pub const Value = struct {
|
||||
/// operands must be (vectors of) integers; handles undefined scalars.
|
||||
pub fn bitwiseNand(lhs: Value, rhs: Value, ty: Type, arena: Allocator, mod: *Module) !Value {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, ty.vectorLen());
|
||||
const result_data = try arena.alloc(Value, ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -3631,7 +3583,7 @@ pub const Value = struct {
|
||||
/// operands must be (vectors of) integers; handles undefined scalars.
|
||||
pub fn bitwiseOr(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: *Module) !Value {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen());
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -3666,7 +3618,7 @@ pub const Value = struct {
|
||||
/// operands must be (vectors of) integers; handles undefined scalars.
|
||||
pub fn bitwiseXor(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: *Module) !Value {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen());
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -3701,7 +3653,7 @@ pub const Value = struct {
|
||||
|
||||
pub fn intDiv(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: *Module) !Value {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen());
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -3741,7 +3693,7 @@ pub const Value = struct {
|
||||
|
||||
pub fn intDivFloor(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: *Module) !Value {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen());
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -3781,7 +3733,7 @@ pub const Value = struct {
|
||||
|
||||
pub fn intMod(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: *Module) !Value {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen());
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -3857,7 +3809,7 @@ pub const Value = struct {
|
||||
pub fn floatRem(lhs: Value, rhs: Value, float_type: Type, arena: Allocator, mod: *Module) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -3904,7 +3856,7 @@ pub const Value = struct {
|
||||
pub fn floatMod(lhs: Value, rhs: Value, float_type: Type, arena: Allocator, mod: *Module) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -3950,7 +3902,7 @@ pub const Value = struct {
|
||||
|
||||
pub fn intMul(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: *Module) !Value {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen());
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -3986,7 +3938,7 @@ pub const Value = struct {
|
||||
|
||||
pub fn intTrunc(val: Value, ty: Type, allocator: Allocator, signedness: std.builtin.Signedness, bits: u16, mod: *Module) !Value {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen());
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var buf: Value.ElemValueBuffer = undefined;
|
||||
const elem_val = val.elemValueBuffer(mod, i, &buf);
|
||||
@ -4007,7 +3959,7 @@ pub const Value = struct {
|
||||
mod: *Module,
|
||||
) !Value {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen());
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var buf: Value.ElemValueBuffer = undefined;
|
||||
const elem_val = val.elemValueBuffer(mod, i, &buf);
|
||||
@ -4038,7 +3990,7 @@ pub const Value = struct {
|
||||
|
||||
pub fn shl(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: *Module) !Value {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen());
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -4078,8 +4030,8 @@ pub const Value = struct {
|
||||
mod: *Module,
|
||||
) !OverflowArithmeticResult {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
const overflowed_data = try allocator.alloc(Value, ty.vectorLen());
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen());
|
||||
const overflowed_data = try allocator.alloc(Value, ty.vectorLen(mod));
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -4136,7 +4088,7 @@ pub const Value = struct {
|
||||
mod: *Module,
|
||||
) !Value {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, ty.vectorLen());
|
||||
const result_data = try arena.alloc(Value, ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -4184,7 +4136,7 @@ pub const Value = struct {
|
||||
mod: *Module,
|
||||
) !Value {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, ty.vectorLen());
|
||||
const result_data = try arena.alloc(Value, ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -4212,7 +4164,7 @@ pub const Value = struct {
|
||||
|
||||
pub fn shr(lhs: Value, rhs: Value, ty: Type, allocator: Allocator, mod: *Module) !Value {
|
||||
if (ty.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen());
|
||||
const result_data = try allocator.alloc(Value, ty.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -4264,7 +4216,7 @@ pub const Value = struct {
|
||||
) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var buf: Value.ElemValueBuffer = undefined;
|
||||
const elem_val = val.elemValueBuffer(mod, i, &buf);
|
||||
@ -4300,7 +4252,7 @@ pub const Value = struct {
|
||||
) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -4359,7 +4311,7 @@ pub const Value = struct {
|
||||
) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -4418,7 +4370,7 @@ pub const Value = struct {
|
||||
) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -4477,7 +4429,7 @@ pub const Value = struct {
|
||||
) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var lhs_buf: Value.ElemValueBuffer = undefined;
|
||||
var rhs_buf: Value.ElemValueBuffer = undefined;
|
||||
@ -4530,7 +4482,7 @@ pub const Value = struct {
|
||||
pub fn sqrt(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var buf: Value.ElemValueBuffer = undefined;
|
||||
const elem_val = val.elemValueBuffer(mod, i, &buf);
|
||||
@ -4570,7 +4522,7 @@ pub const Value = struct {
|
||||
pub fn sin(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var buf: Value.ElemValueBuffer = undefined;
|
||||
const elem_val = val.elemValueBuffer(mod, i, &buf);
|
||||
@ -4610,7 +4562,7 @@ pub const Value = struct {
|
||||
pub fn cos(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var buf: Value.ElemValueBuffer = undefined;
|
||||
const elem_val = val.elemValueBuffer(mod, i, &buf);
|
||||
@ -4650,7 +4602,7 @@ pub const Value = struct {
|
||||
pub fn tan(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var buf: Value.ElemValueBuffer = undefined;
|
||||
const elem_val = val.elemValueBuffer(mod, i, &buf);
|
||||
@ -4690,7 +4642,7 @@ pub const Value = struct {
|
||||
pub fn exp(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var buf: Value.ElemValueBuffer = undefined;
|
||||
const elem_val = val.elemValueBuffer(mod, i, &buf);
|
||||
@ -4730,7 +4682,7 @@ pub const Value = struct {
|
||||
pub fn exp2(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var buf: Value.ElemValueBuffer = undefined;
|
||||
const elem_val = val.elemValueBuffer(mod, i, &buf);
|
||||
@ -4770,7 +4722,7 @@ pub const Value = struct {
|
||||
pub fn log(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var buf: Value.ElemValueBuffer = undefined;
|
||||
const elem_val = val.elemValueBuffer(mod, i, &buf);
|
||||
@ -4810,7 +4762,7 @@ pub const Value = struct {
|
||||
pub fn log2(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var buf: Value.ElemValueBuffer = undefined;
|
||||
const elem_val = val.elemValueBuffer(mod, i, &buf);
|
||||
@ -4850,7 +4802,7 @@ pub const Value = struct {
|
||||
pub fn log10(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var buf: Value.ElemValueBuffer = undefined;
|
||||
const elem_val = val.elemValueBuffer(mod, i, &buf);
|
||||
@ -4890,7 +4842,7 @@ pub const Value = struct {
|
||||
pub fn fabs(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var buf: Value.ElemValueBuffer = undefined;
|
||||
const elem_val = val.elemValueBuffer(mod, i, &buf);
|
||||
@ -4930,7 +4882,7 @@ pub const Value = struct {
|
||||
pub fn floor(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var buf: Value.ElemValueBuffer = undefined;
|
||||
const elem_val = val.elemValueBuffer(mod, i, &buf);
|
||||
@ -4970,7 +4922,7 @@ pub const Value = struct {
|
||||
pub fn ceil(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var buf: Value.ElemValueBuffer = undefined;
|
||||
const elem_val = val.elemValueBuffer(mod, i, &buf);
|
||||
@ -5010,7 +4962,7 @@ pub const Value = struct {
|
||||
pub fn round(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var buf: Value.ElemValueBuffer = undefined;
|
||||
const elem_val = val.elemValueBuffer(mod, i, &buf);
|
||||
@ -5050,7 +5002,7 @@ pub const Value = struct {
|
||||
pub fn trunc(val: Value, float_type: Type, arena: Allocator, mod: *Module) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var buf: Value.ElemValueBuffer = undefined;
|
||||
const elem_val = val.elemValueBuffer(mod, i, &buf);
|
||||
@ -5097,7 +5049,7 @@ pub const Value = struct {
|
||||
) !Value {
|
||||
const target = mod.getTarget();
|
||||
if (float_type.zigTypeTag(mod) == .Vector) {
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen());
|
||||
const result_data = try arena.alloc(Value, float_type.vectorLen(mod));
|
||||
for (result_data, 0..) |*scalar, i| {
|
||||
var mulend1_buf: Value.ElemValueBuffer = undefined;
|
||||
const mulend1_elem = mulend1.elemValueBuffer(mod, i, &mulend1_buf);
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user